[med-svn] [gatk] 01/03: Imported Upstream version 3.5+dfsg

Andreas Tille tille at debian.org
Thu Nov 26 18:54:31 UTC 2015


This is an automated email from the git hooks/post-receive script.

tille pushed a commit to branch master
in repository gatk.

commit de4ae38c2aaaf44202aaac75a486e87afae3caeb
Author: Andreas Tille <tille at debian.org>
Date:   Thu Nov 26 19:52:42 2015 +0100

    Imported Upstream version 3.5+dfsg
---
 ant-bridge.sh                                      |    28 +-
 licensing/private_license.txt                      |     2 +-
 licensing/protected_license.txt                    |     2 +-
 licensing/public_license.txt                       |     2 +-
 pom.xml                                            |   150 +-
 public/VectorPairHMM/pom.xml                       |     2 +-
 public/VectorPairHMM/src/main/c++/Sandbox.java     |    10 +-
 public/VectorPairHMM/src/main/c++/utils.cc         |    26 +-
 public/doc/Ant_Help.tex                            |     9 -
 public/doc/GATK_Coding_Standards.pdf               |   Bin 133046 -> 0 bytes
 public/doc/GATK_Coding_Standards.tex               |   288 -
 public/doc/README                                  |    89 +-
 public/external-example/pom.xml                    |    51 +-
 .../java/org/mycompany/app/MyExampleWalker.java    |     8 +-
 .../app/MyExampleWalkerIntegrationTest.java        |     6 +-
 .../org/mycompany/app/MyExampleWalkerUnitTest.java |     2 +-
 public/gatk-engine/pom.xml                         |    41 +-
 .../gatk/engine/CommandLineExecutable.java         |   229 +
 .../gatk/engine/CommandLineGATK.java               |   370 +
 .../broadinstitute/gatk/engine/GATKVCFUtils.java   |   422 +
 .../gatk/engine/GenomeAnalysisEngine.java          |  1325 ++
 .../broadinstitute/gatk/engine/ReadMetrics.java    |     2 +-
 .../broadinstitute/gatk/engine/ReadProperties.java |   197 +
 .../broadinstitute/gatk/engine/SampleUtils.java    |   258 +
 .../broadinstitute/gatk/engine/WalkerManager.java  |   451 +
 .../gatk/engine/alignment/Aligner.java             |    74 +
 .../gatk/engine/alignment/Alignment.java           |   246 +
 .../gatk/engine/alignment/bwa/BWAAligner.java      |    63 +
 .../engine/alignment/bwa/BWAConfiguration.java     |    79 +
 .../gatk/engine/alignment/bwa/BWTFiles.java        |   259 +
 .../alignment/bwa/java/AlignerTestHarness.java     |   189 +
 .../alignment/bwa/java/AlignmentMatchSequence.java |   175 +
 .../engine/alignment/bwa/java/AlignmentState.java  |    38 +
 .../engine/alignment/bwa/java/BWAAlignment.java    |   215 +
 .../engine/alignment/bwa/java/BWAJavaAligner.java  |   418 +
 .../gatk/engine/alignment/bwa/java/LowerBound.java |   113 +
 .../gatk/engine/alignment/package-info.java        |    26 +
 .../engine/alignment/reference/bwt/AMBWriter.java  |    93 +
 .../engine/alignment/reference/bwt/ANNWriter.java  |   120 +
 .../gatk/engine/alignment/reference/bwt/BWT.java   |   197 +
 .../engine/alignment/reference/bwt/BWTReader.java  |   114 +
 .../bwt/BWTSupplementaryFileGenerator.java         |    85 +
 .../engine/alignment/reference/bwt/BWTWriter.java  |    96 +
 .../gatk/engine/alignment/reference/bwt/Bases.java |   133 +
 .../engine/alignment/reference/bwt/Counts.java     |   176 +
 .../reference/bwt/CreateBWTFromReference.java      |   200 +
 .../alignment/reference/bwt/SequenceBlock.java     |    66 +
 .../alignment/reference/bwt/SuffixArray.java       |   183 +
 .../alignment/reference/bwt/SuffixArrayReader.java |   110 +
 .../alignment/reference/bwt/SuffixArrayWriter.java |    92 +
 .../reference/packing/BasePackedInputStream.java   |   120 +
 .../reference/packing/BasePackedOutputStream.java  |   165 +
 .../reference/packing/CreatePACFromReference.java  |    64 +
 .../alignment/reference/packing/PackUtils.java     |   160 +
 .../packing/UnsignedIntPackedInputStream.java      |   129 +
 .../packing/UnsignedIntPackedOutputStream.java     |   121 +
 .../engine/arguments/DbsnpArgumentCollection.java  |    46 +
 .../engine/arguments/GATKArgumentCollection.java   |   680 +
 ...ndardVariantContextInputArgumentCollection.java |    48 +
 .../gatk/engine/crypt/CryptUtils.java              |   391 +
 .../broadinstitute/gatk/engine/crypt/GATKKey.java  |   350 +
 .../gatk/engine/datasources/package-info.java      |    26 +
 .../engine/datasources/providers/AllLocusView.java |   169 +
 .../datasources/providers/CoveredLocusView.java    |    63 +
 .../IntervalOverlappingRODsFromStream.java         |   168 +
 .../providers/IntervalReferenceOrderedView.java    |   182 +
 .../providers/InvalidPositionException.java        |    46 +
 .../datasources/providers/LocusReferenceView.java  |   249 +
 .../providers/LocusShardDataProvider.java          |   100 +
 .../engine/datasources/providers/LocusView.java    |   219 +
 .../providers/ManagingReferenceOrderedView.java    |   116 +
 .../providers/RODMetaDataContainer.java            |    83 +
 .../providers/ReadBasedReferenceOrderedView.java   |    60 +
 .../datasources/providers/ReadReferenceView.java   |   102 +
 .../providers/ReadShardDataProvider.java           |    82 +
 .../engine/datasources/providers/ReadView.java     |    88 +
 .../providers/ReferenceOrderedView.java            |    33 +
 .../datasources/providers/ReferenceView.java       |   131 +
 .../engine/datasources/providers/RodLocusView.java |   196 +
 .../datasources/providers/ShardDataProvider.java   |   197 +
 .../gatk/engine/datasources/providers/View.java    |    55 +
 .../engine/datasources/providers/package-info.java |    26 +
 .../reads/ActiveRegionShardBalancer.java           |    85 +
 .../engine/datasources/reads/BAMAccessPlan.java    |   170 +
 .../gatk/engine/datasources/reads/BAMSchedule.java |   531 +
 .../engine/datasources/reads/BAMScheduler.java     |   321 +
 .../reads/BGZFBlockLoadingDispatcher.java          |    86 +
 .../engine/datasources/reads/BlockInputStream.java |   451 +
 .../gatk/engine/datasources/reads/BlockLoader.java |   189 +
 .../engine/datasources/reads/FileHandleCache.java  |   229 +
 .../gatk/engine/datasources/reads/FilePointer.java |   437 +
 .../engine/datasources/reads/GATKBAMIndex.java     |   469 +
 .../engine/datasources/reads/GATKBAMIndexData.java |   121 +
 .../reads/IntervalOverlapFilteringIterator.java    |   205 +
 .../engine/datasources/reads/IntervalSharder.java  |    93 +
 .../gatk/engine/datasources/reads/LocusShard.java  |    61 +
 .../datasources/reads/LocusShardBalancer.java      |    58 +
 .../gatk/engine/datasources/reads/ReadShard.java   |   271 +
 .../datasources/reads/ReadShardBalancer.java       |   231 +
 .../engine/datasources/reads/SAMDataSource.java    |  1236 ++
 .../gatk/engine/datasources/reads/Shard.java       |   254 +
 .../engine/datasources/reads/ShardBalancer.java    |    49 +
 .../engine/datasources/reads/package-info.java     |    26 +
 .../datasources/reads/utilities/BAMFileStat.java   |   185 +
 .../datasources/reads/utilities/BAMTagRenamer.java |   100 +
 .../reads/utilities/FindLargeShards.java           |   192 +
 .../reads/utilities/PrintBAMRegion.java            |   113 +
 .../reads/utilities/PrintBGZFBounds.java           |   137 +
 .../reads/utilities/UnzipSingleBlock.java          |    89 +
 .../datasources/reads/utilities/package-info.java  |    26 +
 .../datasources/reference/ReferenceDataSource.java |   166 +
 .../engine/datasources/reference/package-info.java |    26 +
 .../engine/datasources/rmd/DataStreamSegment.java  |    32 +
 .../gatk/engine/datasources/rmd/EntireStream.java  |    32 +
 .../datasources/rmd/MappedStreamSegment.java       |    48 +
 .../datasources/rmd/ReferenceOrderedDataPool.java  |   153 +
 .../rmd/ReferenceOrderedDataSource.java            |   257 +
 .../gatk/engine/datasources/rmd/ResourcePool.java  |   188 +
 .../gatk/engine/datasources/rmd/package-info.java  |    26 +
 .../gatk/engine/executive/Accumulator.java         |   211 +
 .../executive/HierarchicalMicroScheduler.java      |   495 +
 .../executive/HierarchicalMicroSchedulerMBean.java |    86 +
 .../engine/executive/LinearMicroScheduler.java     |   130 +
 .../gatk/engine/executive/MicroScheduler.java      |   463 +
 .../gatk/engine/executive/MicroSchedulerMBean.java |    37 +
 .../gatk/engine/executive/OutputMergeTask.java     |   102 +
 .../gatk/engine/executive/ReduceTree.java          |   187 +
 .../gatk/engine/executive/ShardTraverser.java      |   163 +
 .../gatk/engine/executive/TreeReducer.java         |   127 +
 .../gatk/engine/executive/WindowMaker.java         |   218 +
 .../gatk/engine/executive/package-info.java        |    26 +
 .../gatk/engine/filters/BAQReadTransformer.java    |    75 +
 .../gatk/engine/filters/BadCigarFilter.java        |   141 +
 .../gatk/engine/filters/BadMateFilter.java         |    74 +
 .../engine/filters/CountingFilteringIterator.java  |   150 +
 .../gatk/engine/filters/DisableableReadFilter.java |    35 +
 .../gatk/engine/filters/DuplicateReadFilter.java   |    91 +
 .../filters/FailsVendorQualityCheckFilter.java     |    43 +
 .../gatk/engine/filters/FilterManager.java         |   106 +
 .../gatk/engine/filters/LibraryReadFilter.java     |    65 +
 .../gatk/engine/filters/MalformedReadFilter.java   |   277 +
 .../gatk/engine/filters/MappingQualityFilter.java  |    62 +
 .../filters/MappingQualityUnavailableFilter.java   |    58 +
 .../engine/filters/MappingQualityZeroFilter.java   |    56 +
 .../gatk/engine/filters/MateSameStrandFilter.java  |    65 +
 .../gatk/engine/filters/MaxInsertSizeFilter.java   |    59 +
 .../engine/filters/MissingReadGroupFilter.java     |    55 +
 .../engine/filters/NDNCigarReadTransformer.java    |   123 +
 .../filters/NoOriginalQualityScoresFilter.java     |    54 +
 .../engine/filters/NotPrimaryAlignmentFilter.java  |    56 +
 .../gatk/engine/filters/OverclippedReadFilter.java |    80 +
 .../gatk/engine/filters/Platform454Filter.java     |    57 +
 .../gatk/engine/filters/PlatformFilter.java        |    65 +
 .../gatk/engine/filters/PlatformUnitFilter.java    |    90 +
 .../engine/filters/PlatformUnitFilterHelper.java   |    87 +
 .../gatk/engine/filters/ReadFilter.java            |    60 +
 .../engine/filters/ReadGroupBlackListFilter.java   |   133 +
 .../gatk/engine/filters/ReadLengthFilter.java      |    63 +
 .../gatk/engine/filters/ReadNameFilter.java        |    59 +
 .../gatk/engine/filters/ReadStrandFilter.java      |    62 +
 .../filters/ReassignMappingQualityFilter.java      |    88 +
 .../filters/ReassignOneMappingQualityFilter.java   |    90 +
 .../gatk/engine/filters/SampleFilter.java          |    61 +
 .../gatk/engine/filters/SingleReadGroupFilter.java |    63 +
 .../gatk/engine/filters/UnmappedReadFilter.java    |    56 +
 .../gatk/engine/filters/package-info.java          |    26 +
 .../gatk/engine/io/BySampleSAMFileWriter.java      |    70 +
 .../gatk/engine/io/DirectOutputTracker.java        |    48 +
 .../gatk/engine/io/FastqFileWriter.java            |    77 +
 .../gatk/engine/io/NWaySAMFileWriter.java          |   255 +
 .../gatk/engine/io/OutputTracker.java              |   193 +
 .../gatk/engine/io/ThreadGroupOutputTracker.java   |   170 +
 .../engine/io/storage/OutputStreamStorage.java     |   144 +
 .../engine/io/storage/SAMFileWriterStorage.java    |   172 +
 .../gatk/engine/io/storage/Storage.java            |    45 +
 .../gatk/engine/io/storage/StorageFactory.java     |    92 +
 .../io/storage/VariantContextWriterStorage.java    |   247 +
 .../stubs/OutputStreamArgumentTypeDescriptor.java  |   134 +
 .../gatk/engine/io/stubs/OutputStreamStub.java     |   142 +
 .../stubs/SAMFileWriterArgumentTypeDescriptor.java |   106 +
 .../gatk/engine/io/stubs/SAMFileWriterStub.java    |   373 +
 .../io/stubs/SAMReaderArgumentTypeDescriptor.java  |    77 +
 .../broadinstitute/gatk/engine/io/stubs/Stub.java  |    69 +
 .../io/stubs/VCFWriterArgumentTypeDescriptor.java  |   138 +
 .../engine/io/stubs/VariantContextWriterStub.java  |   313 +
 .../gatk/engine/iterators/BoundedReadIterator.java |   160 +
 .../gatk/engine/iterators/GenomeLocusIterator.java |   100 +
 .../gatk/engine/iterators/IterableIterator.java    |    40 +
 .../MalformedBAMErrorReformatingIterator.java      |    69 +
 .../MisencodedBaseQualityReadTransformer.java      |    94 +
 .../gatk/engine/iterators/NullSAMIterator.java     |    58 +
 .../gatk/engine/iterators/PeekingIterator.java     |    65 +
 .../engine/iterators/PositionTrackingIterator.java |   106 +
 .../gatk/engine/iterators/RNAReadTransformer.java  |    37 +
 .../engine/iterators/ReadFormattingIterator.java   |   141 +
 .../gatk/engine/iterators/ReadTransformer.java     |   205 +
 .../engine/iterators/ReadTransformersMode.java     |    53 +
 .../engine/iterators/ReadTransformingIterator.java |    68 +
 .../engine/iterators/VerifyingSamIterator.java     |    90 +
 .../gatk/engine/iterators/package-info.java        |    26 +
 .../broadinstitute/gatk/engine/package-info.java   |     2 +-
 .../gatk/engine/phonehome/GATKRunReport.java       |   786 +
 .../engine/phonehome/GATKRunReportException.java   |    99 +
 .../gatk/engine/recalibration/BQSRArgumentSet.java |   100 +
 .../gatk/engine/recalibration/BQSRMode.java        |    55 +
 .../resourcemanagement/ThreadAllocation.java       |   116 +
 .../gatk/engine/samples/Affection.java             |    47 +
 .../broadinstitute/gatk/engine/samples/Gender.java |    35 +
 .../gatk/engine/samples/MendelianViolation.java    |   461 +
 .../gatk/engine/samples/PedReader.java             |   311 +
 .../engine/samples/PedigreeValidationType.java     |    42 +
 .../broadinstitute/gatk/engine/samples/Sample.java |   261 +
 .../gatk/engine/samples/SampleDB.java              |   338 +
 .../gatk/engine/samples/SampleDBBuilder.java       |   161 +
 .../broadinstitute/gatk/engine/samples/Trio.java   |    70 +
 .../traversals/ArtificialReadsTraversal.java       |   142 +
 .../engine/traversals/TAROrderedReadCache.java     |   168 +
 .../gatk/engine/traversals/TraversalEngine.java    |   124 +
 .../engine/traversals/TraverseActiveRegions.java   |   719 +
 .../gatk/engine/traversals/TraverseDuplicates.java |   205 +
 .../gatk/engine/traversals/TraverseLociNano.java   |   304 +
 .../gatk/engine/traversals/TraverseReadPairs.java  |   129 +
 .../gatk/engine/traversals/TraverseReadsNano.java  |   256 +
 .../gatk/engine/traversals/package-info.java       |    26 +
 .../walkers/ActiveRegionTraversalParameters.java   |    97 +
 .../gatk/engine/walkers/ActiveRegionWalker.java    |   196 +
 .../broadinstitute/gatk/engine/walkers/Allows.java |    51 +
 .../gatk/engine/walkers/Attribution.java           |    39 +
 .../gatk/engine/walkers/BAQMode.java               |    56 +
 .../org/broadinstitute/gatk/engine/walkers/By.java |    53 +
 .../gatk/engine/walkers/DataSource.java            |    58 +
 .../gatk/engine/walkers/DisabledReadFilters.java   |    41 +
 .../gatk/engine/walkers/Downsample.java            |    47 +
 .../gatk/engine/walkers/DuplicateWalker.java       |    57 +
 .../gatk/engine/walkers/FailMethod.java            |    63 +
 .../gatk/engine/walkers/LocusWalker.java           |    58 +
 .../gatk/engine/walkers/NanoSchedulable.java       |    34 +
 .../gatk/engine/walkers/PartitionBy.java           |    40 +
 .../gatk/engine/walkers/PartitionType.java         |    61 +
 .../broadinstitute/gatk/engine/walkers/RMD.java    |    56 +
 .../gatk/engine/walkers/ReadFilters.java           |    45 +
 .../gatk/engine/walkers/ReadPairWalker.java        |    63 +
 .../gatk/engine/walkers/ReadWalker.java            |    55 +
 .../gatk/engine/walkers/RefWalker.java             |    39 +
 .../gatk/engine/walkers/Reference.java             |    47 +
 .../gatk/engine/walkers/RemoveProgramRecords.java  |    46 +
 .../gatk/engine/walkers/Requires.java              |    52 +
 .../gatk/engine/walkers/RodWalker.java             |    39 +
 .../gatk/engine/walkers/TreeReducible.java         |    49 +
 .../broadinstitute/gatk/engine/walkers/Walker.java |   210 +
 .../gatk/engine/walkers/WalkerName.java            |    42 +
 .../broadinstitute/gatk/engine/walkers/Window.java |    57 +
 .../src/main/resources/GATK_public.key             |   Bin
 .../engine/phonehome/resources/GATK_AWS_access.key |   Bin
 .../engine/phonehome/resources/GATK_AWS_secret.key |   Bin
 .../gatk/engine}/recalibration/BQSR.R              |     0
 .../gatk/engine/CommandLineGATKUnitTest.java       |    68 +
 .../gatk/engine/EngineFeaturesIntegrationTest.java |   793 +
 .../gatk/engine/GATKVCFUtilsUnitTest.java          |   159 +
 .../gatk/engine/GenomeAnalysisEngineUnitTest.java  |   272 +
 .../gatk/engine/InstantiableWalker.java            |    37 +
 .../gatk/engine/MaxRuntimeIntegrationTest.java     |   151 +
 .../gatk/engine/ReadMetricsUnitTest.java           |   372 +
 .../gatk/engine/SampleUtilsUnitTest.java           |    49 +
 .../gatk/engine/UninstantiableWalker.java          |    37 +
 .../gatk/engine/WalkerManagerUnitTest.java         |    61 +
 .../gatk/engine/arguments/CramIntegrationTest.java |    90 +
 .../engine/arguments/IntervalIntegrationTest.java  |   314 +
 .../arguments/InvalidArgumentIntegrationTest.java  |    55 +
 .../engine/arguments/LoggingIntegrationTest.java   |   117 +
 .../gatk/engine/crypt/CryptUtilsUnitTest.java      |   200 +
 .../gatk/engine/crypt/GATKKeyIntegrationTest.java  |   157 +
 .../gatk/engine/crypt/GATKKeyUnitTest.java         |   129 +
 .../providers/AllLocusViewUnitTest.java            |    90 +
 .../providers/CoveredLocusViewUnitTest.java        |   102 +
 .../IntervalReferenceOrderedViewUnitTest.java      |   366 +
 .../providers/LocusReferenceViewUnitTest.java      |   143 +
 .../datasources/providers/LocusViewTemplate.java   |   405 +
 .../providers/ReadReferenceViewUnitTest.java       |   160 +
 .../providers/ReferenceOrderedViewUnitTest.java    |   157 +
 .../providers/ReferenceViewTemplate.java           |   122 +
 .../providers/ShardDataProviderUnitTest.java       |   152 +
 .../reads/ActiveRegionShardBalancerUnitTest.java   |   103 +
 .../datasources/reads/DownsamplerBenchmark.java    |    94 +
 .../datasources/reads/FilePointerUnitTest.java     |   130 +
 .../datasources/reads/GATKBAMIndexUnitTest.java    |   113 +
 .../datasources/reads/GATKWalkerBenchmark.java     |   156 +
 .../IntervalOverlapFilteringIteratorUnitTest.java  |   150 +
 .../engine/datasources/reads/MockLocusShard.java   |    49 +
 .../datasources/reads/PicardBaselineBenchmark.java |   101 +
 .../datasources/reads/ReadProcessingBenchmark.java |    83 +
 .../reads/ReadShardBalancerUnitTest.java           |   197 +
 .../datasources/reads/SAMDataSourceUnitTest.java   |   268 +
 .../datasources/reads/SAMReaderIDUnitTest.java     |    50 +
 .../reads/SeekableBufferedStreamUnitTest.java      |   104 +
 .../reads/TheoreticalMinimaBenchmark.java          |   114 +
 .../ReferenceDataSourceIntegrationTest.java        |    75 +
 .../rmd/ReferenceOrderedDataPoolUnitTest.java      |   208 +
 .../rmd/ReferenceOrderedQueryDataPoolUnitTest.java |    89 +
 .../downsampling/DownsamplingIntegrationTest.java  |    45 +
 .../DownsamplingReadsIteratorUnitTest.java         |   141 +
 .../FractionalDownsamplerUnitTest.java             |   160 +
 .../downsampling/LevelingDownsamplerUnitTest.java  |   165 +
 ...PerSampleDownsamplingReadsIteratorUnitTest.java |   302 +
 ...edArtificialSingleSampleReadStreamAnalyzer.java |   127 +
 .../downsampling/ReservoirDownsamplerUnitTest.java |   133 +
 .../SimplePositionalDownsamplerUnitTest.java       |   333 +
 .../gatk/engine/executive/ReduceTreeUnitTest.java  |   254 +
 .../AllowNCigarMalformedReadFilterUnitTest.java    |    77 +
 .../engine/filters/BadCigarFilterUnitTest.java     |    97 +
 .../filters/BadReadGroupsIntegrationTest.java      |    52 +
 .../filters/MalformedReadFilterUnitTest.java       |   246 +
 .../filters/NDNCigarReadTransformerUnitTest.java   |    70 +
 .../filters/OverclippedReadFilterUnitTest.java     |   105 +
 .../gatk/engine/filters/ReadFilterTest.java        |   373 +
 .../filters/ReadGroupBlackListFilterUnitTest.java  |   247 +
 .../filters/UnsafeMalformedReadFilterUnitTest.java |    50 +
 .../gatk/engine/io/OutputTrackerUnitTest.java      |    84 +
 .../io/stubs/ArgumentTypeDescriptorUnitTest.java   |   233 +
 .../iterators/BoundedReadIteratorUnitTest.java     |   144 +
 .../iterators/GATKSAMIteratorAdapterUnitTest.java  |   179 +
 .../iterators/MisencodedBaseQualityUnitTest.java   |    99 +
 .../iterators/ReadFormattingIteratorUnitTest.java  |    52 +
 .../iterators/VerifyingSamIteratorUnitTest.java    |   129 +
 .../engine/phonehome/GATKRunReportUnitTest.java    |   358 +
 .../gatk/engine/samples/PedReaderUnitTest.java     |   354 +
 .../gatk/engine/samples/SampleDBUnitTest.java      |   272 +
 .../gatk/engine/samples/SampleUnitTest.java        |    89 +
 .../engine/traversals/DummyActiveRegionWalker.java |   116 +
 .../traversals/TAROrderedReadCacheUnitTest.java    |   111 +
 .../traversals/TraverseActiveRegionsUnitTest.java  |   680 +
 .../traversals/TraverseDuplicatesUnitTest.java     |   162 +
 .../engine/traversals/TraverseReadsUnitTest.java   |   167 +
 .../gatk/engine/walkers/TestCountLociWalker.java   |    58 +
 .../gatk/engine/walkers/TestCountReadsWalker.java  |    59 +
 .../engine/walkers/TestErrorThrowingWalker.java    |    70 +
 .../gatk/engine/walkers/TestPrintReadsWalker.java  |    76 +
 .../engine/walkers/TestPrintVariantsWalker.java    |    99 +
 .../gatk/engine/walkers/WalkerTest.java            |   457 +
 .../gatk-engine/src/test/resources/exampleBAM.bam  |   Bin 3635 -> 0 bytes
 .../src/test/resources/exampleBAM.bam.bai          |   Bin 232 -> 0 bytes
 .../src/test/resources/exampleBAM.simple.bai       |   Bin 232 -> 0 bytes
 .../src/test/resources/exampleBAM.simple.bam       |   Bin 3595 -> 0 bytes
 .../gatk-engine/src/test/resources/exampleNORG.bam |   Bin 3586 -> 0 bytes
 .../src/test/resources/exampleNORG.bam.bai         |   Bin 232 -> 0 bytes
 public/gatk-queue-extensions-generator/pom.xml     |     4 +-
 .../extensions/gatk/ArgumentDefinitionField.java   |     4 +-
 .../gatk/queue/extensions/gatk/ArgumentField.java  |     2 +-
 .../extensions/gatk/GATKExtensionsGenerator.java   |    13 +-
 .../queue/extensions/gatk/ReadFilterField.java     |     2 +-
 public/gatk-queue-extensions-public/pom.xml        |     8 +-
 .../CNV/ONLY_GENOTYPE_xhmmCNVpipeline.scala        |   103 +
 .../gatk/queue/qscripts/CNV/xhmmCNVpipeline.scala  |   133 +-
 .../gatk/queue/qscripts/GATKResourcesBundle.scala  |     5 +-
 .../queue/qscripts/examples/ExampleCountLoci.scala |     2 +-
 .../qscripts/examples/ExampleCountReads.scala      |     2 +-
 .../qscripts/examples/ExampleCustomWalker.scala    |     2 +-
 .../qscripts/examples/ExamplePrintReads.scala      |     6 +-
 .../qscripts/examples/ExampleReadFilter.scala      |     2 +-
 .../examples/ExampleRetryMemoryLimit.scala         |     2 +-
 .../gatk/queue/qscripts/examples/HelloWorld.scala  |     2 +-
 .../gatk/queue/qscripts/lib/ChunkVCF.scala         |     2 +-
 .../gatk/queue/qscripts/lib/Vcf2Table.scala        |     2 +-
 .../gatk/queue/qscripts/lib/VcfToPed.scala         |     2 +-
 .../gatk/queue/extensions/cancer/MuTect.scala      |     5 +-
 .../queue/extensions/gatk/BamGatherFunction.scala  |     2 +-
 .../extensions/gatk/CatVariantsGatherer.scala      |     2 +-
 .../extensions/gatk/ContigScatterFunction.scala    |     4 +-
 .../gatk/DistributedScatterFunction.scala          |     2 +-
 .../gatk/queue/extensions/gatk/DoC/package.scala   |     5 +-
 .../gatk/queue/extensions/gatk/GATKIntervals.scala |     4 +-
 .../extensions/gatk/GATKScatterFunction.scala      |     4 +-
 .../extensions/gatk/IntervalScatterFunction.scala  |     2 +-
 .../extensions/gatk/LocusScatterFunction.scala     |     2 +-
 .../extensions/gatk/ReadScatterFunction.scala      |     2 +-
 .../gatk/queue/extensions/gatk/TaggedFile.scala    |     2 +-
 .../queue/extensions/gatk/VcfGatherFunction.scala  |     2 +-
 .../gatk/WriteFlankingIntervalsFunction.scala      |     2 +-
 .../gatk/queue/extensions/gatk/XHMM/package.scala  |   156 +
 .../extensions/picard/AddOrReplaceReadGroups.scala |     2 +-
 .../extensions/picard/CalculateHsMetrics.scala     |     8 +-
 .../extensions/picard/CollectGcBiasMetrics.scala   |     2 +-
 .../extensions/picard/CollectMultipleMetrics.scala |     2 +-
 .../extensions/picard/CollectWgsMetrics.scala      |    70 +
 .../gatk/queue/extensions/picard/FastqToSam.scala  |     2 +-
 .../queue/extensions/picard/MarkDuplicates.scala   |     6 +-
 .../queue/extensions/picard/MergeSamFiles.scala    |     2 +-
 .../extensions/picard/PicardBamFunction.scala      |     2 +-
 .../extensions/picard/PicardMetricsFunction.scala  |     2 +-
 .../gatk/queue/extensions/picard/ReorderSam.scala  |     2 +-
 .../gatk/queue/extensions/picard/RevertSam.scala   |     2 +-
 .../gatk/queue/extensions/picard/SamToFastq.scala  |     2 +-
 .../gatk/queue/extensions/picard/SortSam.scala     |     2 +-
 .../queue/extensions/picard/ValidateSamFile.scala  |     2 +-
 .../samtools/SamtoolsCommandLineFunction.scala     |     2 +-
 .../samtools/SamtoolsIndexFunction.scala           |     2 +-
 .../samtools/SamtoolsMergeFunction.scala           |     2 +-
 .../gatk/queue/extensions/snpeff/SnpEff.scala      |     2 +-
 .../extensions/gatk/GATKIntervalsUnitTest.scala    |     2 +-
 .../extensions/gatk/QueueFeaturesQueueTest.scala   |    63 +
 .../examples/ExampleCountLociQueueTest.scala       |     2 +-
 .../examples/ExampleCountReadsQueueTest.scala      |     2 +-
 .../examples/ExamplePrintReadsQueueTest.scala      |     2 +-
 .../examples/ExampleReadFilterQueueTest.scala      |     2 +-
 .../ExampleRetryMemoryLimitQueueTest.scala         |     2 +-
 .../pipeline/examples/HelloWorldQueueTest.scala    |    27 +-
 public/gatk-queue/pom.xml                          |     6 +-
 .../broadinstitute/gatk/queue/QueueVersion.java    |     2 +-
 .../broadinstitute/gatk/queue/package-info.java    |     2 +-
 .../broadinstitute/gatk/queue/QCommandLine.scala   |    29 +-
 .../broadinstitute/gatk/queue/QCommandPlugin.scala |     2 +-
 .../org/broadinstitute/gatk/queue/QException.scala |     2 +-
 .../org/broadinstitute/gatk/queue/QScript.scala    |     9 +-
 .../broadinstitute/gatk/queue/QScriptManager.scala |     2 +-
 .../org/broadinstitute/gatk/queue/QSettings.scala  |     7 +-
 .../gatk/queue/engine/CommandLineJobManager.scala  |     2 +-
 .../gatk/queue/engine/CommandLineJobRunner.scala   |     2 +-
 .../queue/engine/CommandLinePluginManager.scala    |     2 +-
 .../gatk/queue/engine/FunctionEdge.scala           |     2 +-
 .../gatk/queue/engine/InProcessJobManager.scala    |     2 +-
 .../gatk/queue/engine/InProcessRunner.scala        |     2 +-
 .../gatk/queue/engine/JobManager.scala             |     2 +-
 .../gatk/queue/engine/JobRunInfo.scala             |     2 +-
 .../gatk/queue/engine/JobRunner.scala              |     2 +-
 .../gatk/queue/engine/MappingEdge.scala            |     2 +-
 .../broadinstitute/gatk/queue/engine/QEdge.scala   |     2 +-
 .../broadinstitute/gatk/queue/engine/QGraph.scala  |    29 +-
 .../gatk/queue/engine/QGraphSettings.scala         |    14 +-
 .../broadinstitute/gatk/queue/engine/QNode.scala   |     2 +-
 .../gatk/queue/engine/QStatusMessenger.scala       |     2 +-
 .../gatk/queue/engine/RunnerStatus.scala           |     2 +-
 .../gatk/queue/engine/drmaa/DrmaaJobManager.scala  |     2 +-
 .../gatk/queue/engine/drmaa/DrmaaJobRunner.scala   |     2 +-
 .../engine/gridengine/GridEngineJobManager.scala   |     2 +-
 .../engine/gridengine/GridEngineJobRunner.scala    |    14 +-
 .../gatk/queue/engine/lsf/Lsf706JobManager.scala   |     2 +-
 .../gatk/queue/engine/lsf/Lsf706JobRunner.scala    |     2 +-
 .../parallelshell/ParallelShellJobManager.scala    |    70 +
 .../parallelshell/ParallelShellJobRunner.scala     |   151 +
 .../ThreadSafeProcessController.scala              |   106 +
 .../engine/pbsengine/PbsEngineJobManager.scala     |     2 +-
 .../engine/pbsengine/PbsEngineJobRunner.scala      |     2 +-
 .../gatk/queue/engine/shell/ShellJobManager.scala  |     2 +-
 .../gatk/queue/engine/shell/ShellJobRunner.scala   |     2 +-
 .../gatk/queue/function/CommandLineFunction.scala  |     2 +-
 .../gatk/queue/function/InProcessFunction.scala    |     2 +-
 .../queue/function/JavaCommandLineFunction.scala   |     2 +-
 .../gatk/queue/function/ListWriterFunction.scala   |     2 +-
 .../gatk/queue/function/QFunction.scala            |     2 +-
 .../gatk/queue/function/RetryMemoryLimit.scala     |     2 +-
 .../function/scattergather/CloneFunction.scala     |     2 +-
 .../scattergather/ConcatenateLogsFunction.scala    |     2 +-
 .../function/scattergather/GatherFunction.scala    |     2 +-
 .../function/scattergather/GathererFunction.scala  |     2 +-
 .../function/scattergather/ScatterFunction.scala   |     2 +-
 .../scattergather/ScatterGatherableFunction.scala  |     2 +-
 .../scattergather/SimpleTextGatherFunction.scala   |     2 +-
 .../library/clf/vcf/VCFExtractIntervals.scala      |     2 +-
 .../queue/library/clf/vcf/VCFExtractSamples.scala  |     2 +-
 .../gatk/queue/library/ipf/SortByRef.scala         |     2 +-
 .../library/ipf/vcf/VCFExtractIntervals.scala      |     2 +-
 .../queue/library/ipf/vcf/VCFExtractSamples.scala  |     2 +-
 .../queue/library/ipf/vcf/VCFExtractSites.scala    |     2 +-
 .../queue/library/ipf/vcf/VCFSimpleMerge.scala     |     2 +-
 .../gatk/queue/util/ClassFieldCache.scala          |     2 +-
 .../gatk/queue/util/CollectionUtils.scala          |     2 +-
 .../gatk/queue/util/EmailMessage.scala             |     2 +-
 .../gatk/queue/util/EmailSettings.scala            |     2 +-
 .../broadinstitute/gatk/queue/util/Logging.scala   |     2 +-
 .../queue/util/PrimitiveOptionConversions.scala    |     2 +-
 .../gatk/queue/util/QJobReport.scala               |     4 +-
 .../gatk/queue/util/QJobsReporter.scala            |     5 +-
 .../gatk/queue/util/QScriptUtils.scala             |    23 +-
 .../gatk/queue/util/ReflectionUtils.scala          |     2 +-
 .../gatk/queue/util/RemoteFile.scala               |     2 +-
 .../gatk/queue/util/RemoteFileConverter.scala      |     2 +-
 .../org/broadinstitute/gatk/queue/util/Retry.scala |     2 +-
 .../gatk/queue/util/RetryException.scala           |     2 +-
 .../util/ScalaCompoundArgumentTypeDescriptor.scala |     2 +-
 .../gatk/queue/util/ShellUtils.scala               |     2 +-
 .../gatk/queue/util/StringFileConversions.scala    |     2 +-
 .../gatk/queue/util/SystemUtils.scala              |     2 +-
 .../gatk/queue/util/TextFormatUtils.scala          |     2 +-
 .../gatk/queue/util/VCF_BAM_utilities.scala        |     2 +-
 .../function/CommandLineFunctionUnitTest.scala     |     2 +-
 .../gatk/queue/pipeline/QueueTest.scala            |     8 +-
 .../gatk/queue/pipeline/QueueTestEvalSpec.scala    |     2 +-
 .../gatk/queue/pipeline/QueueTestSpec.scala        |     2 +-
 .../gatk/queue/util/ShellUtilsUnitTest.scala       |     2 +-
 .../queue/util/StringFileConversionsUnitTest.scala |     2 +-
 .../gatk/queue/util/SystemUtilsUnitTest.scala      |     2 +-
 public/gatk-root/pom.xml                           |    35 +-
 public/gatk-tools-public/pom.xml                   |    32 +-
 .../main/java/htsjdk/samtools/GATKBAMFileSpan.java |   308 -
 .../src/main/java/htsjdk/samtools/GATKBin.java     |   135 -
 .../src/main/java/htsjdk/samtools/GATKChunk.java   |   116 -
 .../java/htsjdk/samtools/PicardNamespaceUtils.java |    40 -
 .../gatk/engine/CommandLineExecutable.java         |   229 -
 .../gatk/engine/CommandLineGATK.java               |   385 -
 .../gatk/engine/GenomeAnalysisEngine.java          |  1280 --
 .../broadinstitute/gatk/engine/ReadProperties.java |   198 -
 .../broadinstitute/gatk/engine/WalkerManager.java  |   431 -
 .../gatk/engine/alignment/Aligner.java             |    74 -
 .../gatk/engine/alignment/Alignment.java           |   246 -
 .../gatk/engine/alignment/bwa/BWAAligner.java      |    63 -
 .../engine/alignment/bwa/BWAConfiguration.java     |    79 -
 .../gatk/engine/alignment/bwa/BWTFiles.java        |   259 -
 .../alignment/bwa/java/AlignerTestHarness.java     |   189 -
 .../alignment/bwa/java/AlignmentMatchSequence.java |   175 -
 .../engine/alignment/bwa/java/AlignmentState.java  |    38 -
 .../engine/alignment/bwa/java/BWAAlignment.java    |   215 -
 .../engine/alignment/bwa/java/BWAJavaAligner.java  |   418 -
 .../gatk/engine/alignment/bwa/java/LowerBound.java |   113 -
 .../gatk/engine/alignment/package-info.java        |    26 -
 .../engine/alignment/reference/bwt/AMBWriter.java  |    93 -
 .../engine/alignment/reference/bwt/ANNWriter.java  |   120 -
 .../gatk/engine/alignment/reference/bwt/BWT.java   |   197 -
 .../engine/alignment/reference/bwt/BWTReader.java  |   114 -
 .../bwt/BWTSupplementaryFileGenerator.java         |    85 -
 .../engine/alignment/reference/bwt/BWTWriter.java  |    96 -
 .../gatk/engine/alignment/reference/bwt/Bases.java |   133 -
 .../engine/alignment/reference/bwt/Counts.java     |   176 -
 .../reference/bwt/CreateBWTFromReference.java      |   200 -
 .../alignment/reference/bwt/SequenceBlock.java     |    66 -
 .../alignment/reference/bwt/SuffixArray.java       |   183 -
 .../alignment/reference/bwt/SuffixArrayReader.java |   110 -
 .../alignment/reference/bwt/SuffixArrayWriter.java |    92 -
 .../reference/packing/BasePackedInputStream.java   |   120 -
 .../reference/packing/BasePackedOutputStream.java  |   165 -
 .../reference/packing/CreatePACFromReference.java  |    64 -
 .../alignment/reference/packing/PackUtils.java     |   160 -
 .../packing/UnsignedIntPackedInputStream.java      |   129 -
 .../packing/UnsignedIntPackedOutputStream.java     |   121 -
 .../engine/arguments/DbsnpArgumentCollection.java  |    46 -
 .../engine/arguments/GATKArgumentCollection.java   |   628 -
 ...ndardVariantContextInputArgumentCollection.java |    48 -
 .../gatk/engine/arguments/ValidationExclusion.java |    67 -
 .../gatk/engine/contexts/AlignmentContext.java     |   154 -
 .../engine/contexts/AlignmentContextUtils.java     |   150 -
 .../gatk/engine/contexts/ReferenceContext.java     |   217 -
 .../gatk/engine/datasources/package-info.java      |    26 -
 .../engine/datasources/providers/AllLocusView.java |   169 -
 .../datasources/providers/CoveredLocusView.java    |    63 -
 .../IntervalOverlappingRODsFromStream.java         |   168 -
 .../providers/IntervalReferenceOrderedView.java    |   184 -
 .../providers/InvalidPositionException.java        |    46 -
 .../datasources/providers/LocusReferenceView.java  |   236 -
 .../providers/LocusShardDataProvider.java          |   100 -
 .../engine/datasources/providers/LocusView.java    |   220 -
 .../providers/ManagingReferenceOrderedView.java    |   117 -
 .../providers/RODMetaDataContainer.java            |    83 -
 .../providers/ReadBasedReferenceOrderedView.java   |    69 -
 .../datasources/providers/ReadReferenceView.java   |   102 -
 .../providers/ReadShardDataProvider.java           |    82 -
 .../engine/datasources/providers/ReadView.java     |    88 -
 .../providers/ReferenceOrderedView.java            |    33 -
 .../datasources/providers/ReferenceView.java       |   131 -
 .../engine/datasources/providers/RodLocusView.java |   197 -
 .../datasources/providers/ShardDataProvider.java   |   197 -
 .../gatk/engine/datasources/providers/View.java    |    55 -
 .../engine/datasources/providers/package-info.java |    26 -
 .../reads/ActiveRegionShardBalancer.java           |    85 -
 .../engine/datasources/reads/BAMAccessPlan.java    |   170 -
 .../gatk/engine/datasources/reads/BAMSchedule.java |   530 -
 .../engine/datasources/reads/BAMScheduler.java     |   320 -
 .../reads/BGZFBlockLoadingDispatcher.java          |    86 -
 .../engine/datasources/reads/BlockInputStream.java |   450 -
 .../gatk/engine/datasources/reads/BlockLoader.java |   189 -
 .../engine/datasources/reads/FileHandleCache.java  |   232 -
 .../gatk/engine/datasources/reads/FilePointer.java |   436 -
 .../engine/datasources/reads/GATKBAMIndex.java     |   468 -
 .../engine/datasources/reads/GATKBAMIndexData.java |   121 -
 .../reads/IntervalOverlapFilteringIterator.java    |   205 -
 .../engine/datasources/reads/IntervalSharder.java  |    93 -
 .../gatk/engine/datasources/reads/LocusShard.java  |    60 -
 .../datasources/reads/LocusShardBalancer.java      |    58 -
 .../gatk/engine/datasources/reads/ReadShard.java   |   271 -
 .../datasources/reads/ReadShardBalancer.java       |   231 -
 .../engine/datasources/reads/SAMDataSource.java    |  1179 --
 .../gatk/engine/datasources/reads/SAMReaderID.java |   125 -
 .../gatk/engine/datasources/reads/Shard.java       |   253 -
 .../engine/datasources/reads/ShardBalancer.java    |    49 -
 .../engine/datasources/reads/package-info.java     |    26 -
 .../datasources/reads/utilities/BAMFileStat.java   |   185 -
 .../datasources/reads/utilities/BAMTagRenamer.java |   100 -
 .../reads/utilities/FindLargeShards.java           |   192 -
 .../reads/utilities/PrintBAMRegion.java            |   113 -
 .../reads/utilities/PrintBGZFBounds.java           |   137 -
 .../reads/utilities/UnzipSingleBlock.java          |    89 -
 .../datasources/reads/utilities/package-info.java  |    26 -
 .../datasources/reference/ReferenceDataSource.java |   199 -
 .../engine/datasources/reference/package-info.java |    26 -
 .../engine/datasources/rmd/DataStreamSegment.java  |    32 -
 .../gatk/engine/datasources/rmd/EntireStream.java  |    32 -
 .../datasources/rmd/MappedStreamSegment.java       |    48 -
 .../datasources/rmd/ReferenceOrderedDataPool.java  |   153 -
 .../rmd/ReferenceOrderedDataSource.java            |   256 -
 .../gatk/engine/datasources/rmd/ResourcePool.java  |   188 -
 .../gatk/engine/datasources/rmd/package-info.java  |    26 -
 .../AlleleBiasedDownsamplingUtils.java             |   369 -
 .../gatk/engine/downsampling/DownsampleType.java   |    39 -
 .../gatk/engine/downsampling/Downsampler.java      |   161 -
 .../engine/downsampling/DownsamplingMethod.java    |   142 -
 .../downsampling/DownsamplingReadsIterator.java    |   116 -
 .../engine/downsampling/DownsamplingUtils.java     |   107 -
 .../engine/downsampling/FractionalDownsampler.java |   129 -
 .../downsampling/FractionalDownsamplerFactory.java |    46 -
 .../engine/downsampling/LevelingDownsampler.java   |   242 -
 .../downsampling/PassThroughDownsampler.java       |   111 -
 .../PerSampleDownsamplingReadsIterator.java        |   207 -
 .../gatk/engine/downsampling/ReadsDownsampler.java |    56 -
 .../downsampling/ReadsDownsamplerFactory.java      |    38 -
 .../engine/downsampling/ReservoirDownsampler.java  |   219 -
 .../downsampling/ReservoirDownsamplerFactory.java  |    46 -
 .../downsampling/SimplePositionalDownsampler.java  |   171 -
 .../SimplePositionalDownsamplerFactory.java        |    46 -
 .../gatk/engine/executive/Accumulator.java         |   211 -
 .../executive/HierarchicalMicroScheduler.java      |   495 -
 .../executive/HierarchicalMicroSchedulerMBean.java |    86 -
 .../engine/executive/LinearMicroScheduler.java     |   130 -
 .../gatk/engine/executive/MicroScheduler.java      |   463 -
 .../gatk/engine/executive/MicroSchedulerMBean.java |    37 -
 .../gatk/engine/executive/OutputMergeTask.java     |   102 -
 .../gatk/engine/executive/ReduceTree.java          |   187 -
 .../gatk/engine/executive/ShardTraverser.java      |   163 -
 .../gatk/engine/executive/TreeReducer.java         |   127 -
 .../gatk/engine/executive/WindowMaker.java         |   217 -
 .../gatk/engine/executive/package-info.java        |    26 -
 .../gatk/engine/filters/BadCigarFilter.java        |   122 -
 .../gatk/engine/filters/BadMateFilter.java         |    47 -
 .../engine/filters/CountingFilteringIterator.java  |   150 -
 .../gatk/engine/filters/DuplicateReadFilter.java   |    66 -
 .../filters/FailsVendorQualityCheckFilter.java     |    41 -
 .../gatk/engine/filters/FilterManager.java         |    95 -
 .../gatk/engine/filters/LibraryReadFilter.java     |    49 -
 .../gatk/engine/filters/MalformedReadFilter.java   |   260 -
 .../gatk/engine/filters/MappingQualityFilter.java  |    46 -
 .../filters/MappingQualityUnavailableFilter.java   |    43 -
 .../engine/filters/MappingQualityZeroFilter.java   |    42 -
 .../gatk/engine/filters/MateSameStrandFilter.java  |    42 -
 .../gatk/engine/filters/MaxInsertSizeFilter.java   |    44 -
 .../engine/filters/MissingReadGroupFilter.java     |    41 -
 .../engine/filters/NDNCigarReadTransformer.java    |   118 -
 .../filters/NoOriginalQualityScoresFilter.java     |    65 -
 .../engine/filters/NotPrimaryAlignmentFilter.java  |    41 -
 .../gatk/engine/filters/Platform454Filter.java     |    43 -
 .../gatk/engine/filters/PlatformFilter.java        |    49 -
 .../gatk/engine/filters/PlatformUnitFilter.java    |    86 -
 .../engine/filters/PlatformUnitFilterHelper.java   |    87 -
 .../gatk/engine/filters/ReadFilter.java            |    60 -
 .../engine/filters/ReadGroupBlackListFilter.java   |   120 -
 .../gatk/engine/filters/ReadLengthFilter.java      |    48 -
 .../gatk/engine/filters/ReadNameFilter.java        |    44 -
 .../gatk/engine/filters/ReadStrandFilter.java      |    46 -
 .../filters/ReassignMappingQualityFilter.java      |    86 -
 .../filters/ReassignOneMappingQualityFilter.java   |    87 -
 .../gatk/engine/filters/SampleFilter.java          |    45 -
 .../gatk/engine/filters/SingleReadGroupFilter.java |    48 -
 .../gatk/engine/filters/UnmappedReadFilter.java    |    41 -
 .../gatk/engine/filters/package-info.java          |    26 -
 .../gatk/engine/io/DirectOutputTracker.java        |    48 -
 .../gatk/engine/io/FastqFileWriter.java            |    77 -
 .../gatk/engine/io/GATKSAMFileWriter.java          |    56 -
 .../gatk/engine/io/OutputTracker.java              |   178 -
 .../gatk/engine/io/ThreadGroupOutputTracker.java   |   170 -
 .../engine/io/storage/OutputStreamStorage.java     |   144 -
 .../engine/io/storage/SAMFileWriterStorage.java    |   157 -
 .../gatk/engine/io/storage/Storage.java            |    45 -
 .../gatk/engine/io/storage/StorageFactory.java     |    92 -
 .../io/storage/VariantContextWriterStorage.java    |   228 -
 .../stubs/OutputStreamArgumentTypeDescriptor.java  |   134 -
 .../gatk/engine/io/stubs/OutputStreamStub.java     |   142 -
 .../stubs/SAMFileReaderArgumentTypeDescriptor.java |    77 -
 .../stubs/SAMFileWriterArgumentTypeDescriptor.java |   106 -
 .../gatk/engine/io/stubs/SAMFileWriterStub.java    |   336 -
 .../broadinstitute/gatk/engine/io/stubs/Stub.java  |    69 -
 .../io/stubs/VCFWriterArgumentTypeDescriptor.java  |   148 -
 .../engine/io/stubs/VariantContextWriterStub.java  |   301 -
 .../gatk/engine/iterators/BoundedReadIterator.java |   159 -
 .../gatk/engine/iterators/GATKSAMIterator.java     |    56 -
 .../engine/iterators/GATKSAMIteratorAdapter.java   |   136 -
 .../engine/iterators/GATKSAMRecordIterator.java    |    57 -
 .../gatk/engine/iterators/GenomeLocusIterator.java |   100 -
 .../gatk/engine/iterators/IterableIterator.java    |    40 -
 .../MalformedBAMErrorReformatingIterator.java      |    69 -
 .../gatk/engine/iterators/NullSAMIterator.java     |    57 -
 .../gatk/engine/iterators/PeekingIterator.java     |    65 -
 .../engine/iterators/PositionTrackingIterator.java |   105 -
 .../gatk/engine/iterators/PushbackIterator.java    |    82 -
 .../gatk/engine/iterators/RNAReadTransformer.java  |    37 -
 .../engine/iterators/ReadFormattingIterator.java   |   140 -
 .../gatk/engine/iterators/ReadTransformer.java     |   205 -
 .../engine/iterators/ReadTransformersMode.java     |    53 -
 .../engine/iterators/VerifyingSamIterator.java     |    90 -
 .../gatk/engine/iterators/package-info.java        |    26 -
 .../gatk/engine/phonehome/GATKRunReport.java       |   786 -
 .../engine/phonehome/GATKRunReportException.java   |    99 -
 .../gatk/engine/refdata/RODRecordListImpl.java     |   129 -
 .../gatk/engine/refdata/RefMetaDataTracker.java    |   497 -
 .../refdata/ReferenceDependentFeatureCodec.java    |    43 -
 .../gatk/engine/refdata/ReferenceOrderedDatum.java |    66 -
 .../gatk/engine/refdata/SeekableRODIterator.java   |   412 -
 .../engine/refdata/VariantContextAdaptors.java     |   399 -
 .../gatk/engine/refdata/package-info.java          |    26 -
 .../gatk/engine/refdata/tracks/FeatureManager.java |   280 -
 .../refdata/tracks/IndexDictionaryUtils.java       |   114 -
 .../gatk/engine/refdata/tracks/RMDTrack.java       |   147 -
 .../engine/refdata/tracks/RMDTrackBuilder.java     |   430 -
 .../utils/FeatureToGATKFeatureIterator.java        |    74 -
 .../engine/refdata/utils/FlashBackIterator.java    |   221 -
 .../gatk/engine/refdata/utils/GATKFeature.java     |   109 -
 .../utils/LocationAwareSeekableRODIterator.java    |    49 -
 .../gatk/engine/refdata/utils/RMDTriplet.java      |    92 -
 .../gatk/engine/refdata/utils/RODRecordList.java   |    45 -
 .../gatk/engine/report/GATKReport.java             |   376 -
 .../gatk/engine/report/GATKReportColumn.java       |   147 -
 .../gatk/engine/report/GATKReportColumnFormat.java |    63 -
 .../gatk/engine/report/GATKReportDataType.java     |   236 -
 .../gatk/engine/report/GATKReportGatherer.java     |    62 -
 .../gatk/engine/report/GATKReportTable.java        |   779 -
 .../gatk/engine/report/GATKReportVersion.java      |   101 -
 .../resourcemanagement/ThreadAllocation.java       |   116 -
 .../gatk/engine/samples/Affection.java             |    47 -
 .../broadinstitute/gatk/engine/samples/Gender.java |    35 -
 .../gatk/engine/samples/PedReader.java             |   311 -
 .../engine/samples/PedigreeValidationType.java     |    42 -
 .../broadinstitute/gatk/engine/samples/Sample.java |   259 -
 .../gatk/engine/samples/SampleDB.java              |   338 -
 .../gatk/engine/samples/SampleDBBuilder.java       |   161 -
 .../broadinstitute/gatk/engine/samples/Trio.java   |    70 -
 .../engine/traversals/TAROrderedReadCache.java     |   168 -
 .../gatk/engine/traversals/TraversalEngine.java    |   124 -
 .../engine/traversals/TraverseActiveRegions.java   |   719 -
 .../gatk/engine/traversals/TraverseDuplicates.java |   205 -
 .../gatk/engine/traversals/TraverseLociNano.java   |   304 -
 .../gatk/engine/traversals/TraverseReadPairs.java  |   129 -
 .../gatk/engine/traversals/TraverseReadsNano.java  |   256 -
 .../gatk/engine/traversals/package-info.java       |    26 -
 .../walkers/ActiveRegionTraversalParameters.java   |    97 -
 .../gatk/engine/walkers/ActiveRegionWalker.java    |   196 -
 .../broadinstitute/gatk/engine/walkers/Allows.java |    51 -
 .../gatk/engine/walkers/Attribution.java           |    39 -
 .../gatk/engine/walkers/BAQMode.java               |    56 -
 .../org/broadinstitute/gatk/engine/walkers/By.java |    53 -
 .../gatk/engine/walkers/DataSource.java            |    58 -
 .../gatk/engine/walkers/Downsample.java            |    47 -
 .../gatk/engine/walkers/DuplicateWalker.java       |    57 -
 .../gatk/engine/walkers/LocusWalker.java           |    58 -
 .../gatk/engine/walkers/Multiplex.java             |    44 -
 .../gatk/engine/walkers/Multiplexer.java           |    52 -
 .../gatk/engine/walkers/NanoSchedulable.java       |    34 -
 .../gatk/engine/walkers/PartitionBy.java           |    39 -
 .../gatk/engine/walkers/PartitionType.java         |    61 -
 .../broadinstitute/gatk/engine/walkers/RMD.java    |    56 -
 .../gatk/engine/walkers/ReadFilters.java           |    45 -
 .../gatk/engine/walkers/ReadPairWalker.java        |    63 -
 .../gatk/engine/walkers/ReadWalker.java            |    55 -
 .../gatk/engine/walkers/RefWalker.java             |    39 -
 .../gatk/engine/walkers/Reference.java             |    47 -
 .../gatk/engine/walkers/RemoveProgramRecords.java  |    46 -
 .../gatk/engine/walkers/Requires.java              |    52 -
 .../gatk/engine/walkers/RodWalker.java             |    39 -
 .../gatk/engine/walkers/TreeReducible.java         |    49 -
 .../broadinstitute/gatk/engine/walkers/Walker.java |   177 -
 .../gatk/engine/walkers/WalkerName.java            |    42 -
 .../broadinstitute/gatk/engine/walkers/Window.java |    57 -
 .../walkers/diffengine/BAMDiffableReader.java      |   119 -
 .../engine/walkers/diffengine/DiffElement.java     |   125 -
 .../gatk/engine/walkers/diffengine/DiffEngine.java |   437 -
 .../gatk/engine/walkers/diffengine/DiffNode.java   |   249 -
 .../engine/walkers/diffengine/DiffObjects.java     |   276 -
 .../gatk/engine/walkers/diffengine/DiffValue.java  |    90 -
 .../engine/walkers/diffengine/DiffableReader.java  |    66 -
 .../gatk/engine/walkers/diffengine/Difference.java |   137 -
 .../diffengine/GATKReportDiffableReader.java       |   104 -
 .../walkers/diffengine/VCFDiffableReader.java      |   145 -
 .../org/broadinstitute/gatk/tools/CatVariants.java |    95 +-
 .../broadinstitute/gatk/tools/ListAnnotations.java |    85 -
 .../tools/walkers/annotator/AlleleBalance.java     |    28 +-
 .../walkers/annotator/AlleleBalanceBySample.java   |    20 +-
 .../annotator/AlleleSpecificAnnotationData.java    |    96 +
 .../gatk/tools/walkers/annotator/BaseCounts.java   |    17 +-
 .../annotator/ChromosomeCountConstants.java        |    44 -
 .../walkers/annotator/CompressedDataList.java      |   117 +
 .../annotator/FractionInformativeReads.java        |   115 +
 .../gatk/tools/walkers/annotator/LowMQ.java        |    20 +-
 .../annotator/MappingQualityZeroBySample.java      |    20 +-
 .../gatk/tools/walkers/annotator/NBaseCount.java   |    21 +-
 .../walkers/annotator/ReducibleAnnotationData.java |   105 +
 .../gatk/tools/walkers/annotator/SnpEff.java       |    78 +-
 .../gatk/tools/walkers/annotator/SnpEffUtil.java   |     2 +-
 .../tools/walkers/annotator/VariantAnnotator.java  |   105 +-
 .../walkers/annotator/VariantAnnotatorEngine.java  |   320 +-
 .../walkers/annotator/VariantOverlapAnnotator.java |     4 +-
 .../interfaces/AS_StandardAnnotation.java          |    31 +
 .../interfaces/ActiveRegionBasedAnnotation.java    |     2 +-
 .../annotator/interfaces/AnnotationHelpUtils.java  |    56 +
 .../interfaces/AnnotationInterfaceManager.java     |     4 +-
 .../annotator/interfaces/AnnotationType.java       |     2 +-
 .../annotator/interfaces/AnnotatorCompatible.java  |     2 +-
 .../interfaces/ExperimentalAnnotation.java         |     2 +-
 .../annotator/interfaces/GenotypeAnnotation.java   |     8 +-
 .../annotator/interfaces/InfoFieldAnnotation.java  |    23 +-
 .../annotator/interfaces/ReducibleAnnotation.java  |    88 +
 .../interfaces/RodRequiringAnnotation.java         |     2 +-
 .../annotator/interfaces/StandardAnnotation.java   |     2 +-
 .../interfaces/StandardSomaticAnnotation.java      |    28 +
 .../annotator/interfaces/StandardUGAnnotation.java |    28 +
 .../interfaces/VariantAnnotatorAnnotation.java     |     2 +-
 .../interfaces/WorkInProgressAnnotation.java       |     2 +-
 .../tools/walkers/beagle/BeagleOutputToVCF.java    |   392 -
 .../tools/walkers/beagle/ProduceBeagleInput.java   |   463 -
 .../walkers/beagle/VariantsToBeagleUnphased.java   |   184 -
 .../gatk/tools/walkers/coverage/CallableLoci.java  |    56 +-
 .../walkers/coverage/CompareCallableLoci.java      |    38 +-
 .../gatk/tools/walkers/coverage/CoverageUtils.java |    23 +-
 .../tools/walkers/coverage/DepthOfCoverage.java    |   101 +-
 .../walkers/coverage/DepthOfCoverageStats.java     |     2 +-
 .../gatk/tools/walkers/coverage/DoCOutputType.java |     2 +-
 .../walkers/coverage/GCContentByInterval.java      |    16 +-
 .../diagnostics/CoveredByNSamplesSites.java        |   154 -
 .../walkers/diagnostics/ErrorRatePerCycle.java     |    25 +-
 .../walkers/diagnostics/ReadGroupProperties.java   |    36 +-
 .../diagnostics/ReadLengthDistribution.java        |    27 +-
 .../gatk/tools/walkers/diffengine/DiffObjects.java |   279 +
 .../tools/walkers/examples/GATKDocsExample.java    |     8 +-
 .../tools/walkers/examples/GATKPaperGenotyper.java |     8 +-
 .../fasta/FastaAlternateReferenceMaker.java        |    88 +-
 .../tools/walkers/fasta/FastaReferenceMaker.java   |    42 +-
 .../gatk/tools/walkers/fasta/FastaSequence.java    |     2 +-
 .../gatk/tools/walkers/fasta/FastaStats.java       |    14 +-
 .../gatk/tools/walkers/filters/ClusteredSnps.java  |     2 +-
 .../tools/walkers/filters/FiltrationContext.java   |     4 +-
 .../walkers/filters/FiltrationContextWindow.java   |     2 +-
 .../tools/walkers/filters/VariantFiltration.java   |   167 +-
 .../gatk/tools/walkers/genotyper/AlleleList.java   |    41 -
 .../walkers/genotyper/AlleleListPermutation.java   |    35 -
 .../tools/walkers/genotyper/AlleleListUtils.java   |   334 -
 .../tools/walkers/genotyper/IndexedAlleleList.java |    95 -
 .../tools/walkers/genotyper/IndexedSampleList.java |    96 -
 .../gatk/tools/walkers/genotyper/SampleList.java   |    42 -
 .../tools/walkers/genotyper/SampleListUtils.java   |   224 -
 .../haplotypecaller/HCMappingQualityFilter.java    |    24 +-
 .../gatk/tools/walkers/help/WalkerDoclet.java      |    64 +
 .../walkers/help/WalkerDocumentationHandler.java   |   375 +
 .../gatk/tools/walkers/package-info.java           |     2 +-
 .../gatk/tools/walkers/qc/CheckPileup.java         |    14 +-
 .../gatk/tools/walkers/qc/CountBases.java          |    17 +-
 .../gatk/tools/walkers/qc/CountIntervals.java      |    20 +-
 .../gatk/tools/walkers/qc/CountLoci.java           |    18 +-
 .../gatk/tools/walkers/qc/CountMales.java          |    19 +-
 .../gatk/tools/walkers/qc/CountRODs.java           |    24 +-
 .../gatk/tools/walkers/qc/CountRODsByRef.java      |    20 +-
 .../gatk/tools/walkers/qc/CountReadEvents.java     |    19 +-
 .../gatk/tools/walkers/qc/CountReads.java          |    21 +-
 .../gatk/tools/walkers/qc/CountTerminusEvent.java  |    18 +-
 .../gatk/tools/walkers/qc/DocumentationTest.java   |     8 +-
 .../gatk/tools/walkers/qc/ErrorThrowing.java       |    43 +-
 .../gatk/tools/walkers/qc/FlagStat.java            |    16 +-
 .../gatk/tools/walkers/qc/Pileup.java              |    22 +-
 .../gatk/tools/walkers/qc/PrintRODs.java           |    15 +-
 .../gatk/tools/walkers/qc/QCRef.java               |    19 +-
 .../gatk/tools/walkers/qc/ReadClippingStats.java   |    30 +-
 .../gatk/tools/walkers/qc/RodSystemValidation.java |    10 +-
 .../gatk/tools/walkers/readutils/ClipReads.java    |    10 +-
 .../gatk/tools/walkers/readutils/PrintReads.java   |    51 +-
 .../walkers/readutils/ReadAdaptorTrimmer.java      |   395 -
 .../gatk/tools/walkers/readutils/SplitSamFile.java |    39 +-
 .../gatk/tools/walkers/rnaseq/ASEReadCounter.java  |   311 +
 .../tools/walkers/varianteval/VariantEval.java     |   144 +-
 .../varianteval/VariantEvalReportWriter.java       |     6 +-
 .../varianteval/evaluators/CompOverlap.java        |     8 +-
 .../varianteval/evaluators/CountVariants.java      |     8 +-
 .../evaluators/IndelLengthHistogram.java           |     8 +-
 .../varianteval/evaluators/IndelSummary.java       |     8 +-
 .../evaluators/MendelianViolationEvaluator.java    |    12 +-
 .../varianteval/evaluators/MetricsCollection.java  |    67 +
 .../evaluators/MultiallelicSummary.java            |     8 +-
 .../varianteval/evaluators/PrintMissingComp.java   |    16 +-
 .../varianteval/evaluators/StandardEval.java       |     2 +-
 .../evaluators/ThetaVariantEvaluator.java          |     8 +-
 .../evaluators/TiTvVariantEvaluator.java           |     8 +-
 .../varianteval/evaluators/ValidationReport.java   |     8 +-
 .../varianteval/evaluators/VariantEvaluator.java   |     8 +-
 .../varianteval/evaluators/VariantSummary.java     |    10 +-
 .../varianteval/stratifications/AlleleCount.java   |    13 +-
 .../stratifications/AlleleFrequency.java           |     8 +-
 .../varianteval/stratifications/CompRod.java       |     8 +-
 .../varianteval/stratifications/Contig.java        |     8 +-
 .../walkers/varianteval/stratifications/CpG.java   |     8 +-
 .../varianteval/stratifications/Degeneracy.java    |     8 +-
 .../stratifications/DynamicStratification.java     |     2 +-
 .../varianteval/stratifications/EvalRod.java       |     8 +-
 .../varianteval/stratifications/Family.java        |    55 +
 .../varianteval/stratifications/Filter.java        |     8 +-
 .../stratifications/FunctionalClass.java           |     8 +-
 .../varianteval/stratifications/IndelSize.java     |     8 +-
 .../stratifications/IntervalStratification.java    |    10 +-
 .../stratifications/JexlExpression.java            |     8 +-
 .../varianteval/stratifications/Novelty.java       |     8 +-
 .../varianteval/stratifications/OneBPIndel.java    |     8 +-
 .../stratifications/RequiredStratification.java    |     2 +-
 .../varianteval/stratifications/Sample.java        |     8 +-
 .../stratifications/SnpEffPositionModifier.java    |     9 +-
 .../stratifications/StandardStratification.java    |     2 +-
 .../varianteval/stratifications/TandemRepeat.java  |     8 +-
 .../stratifications/VariantStratifier.java         |     8 +-
 .../varianteval/stratifications/VariantType.java   |     8 +-
 .../stratifications/manager/StratNode.java         |     2 +-
 .../stratifications/manager/StratNodeIterator.java |     2 +-
 .../manager/StratificationManager.java             |     2 +-
 .../stratifications/manager/Stratifier.java        |     2 +-
 .../tools/walkers/varianteval/util/Analysis.java   |     2 +-
 .../varianteval/util/AnalysisModuleScanner.java    |     2 +-
 .../tools/walkers/varianteval/util/DataPoint.java  |     2 +-
 .../varianteval/util/EvaluationContext.java        |     8 +-
 .../tools/walkers/varianteval/util/Molten.java     |     2 +-
 .../varianteval/util/SortableJexlVCMatchExp.java   |     2 +-
 .../walkers/varianteval/util/VariantEvalUtils.java |    36 +-
 .../variantrecalibration/VQSRCalibrationCurve.java |     2 +-
 .../walkers/variantutils/CombineVariants.java      |   187 +-
 .../walkers/variantutils/ConcordanceMetrics.java   |    90 +-
 .../walkers/variantutils/FilterLiftedVariants.java |   136 -
 .../walkers/variantutils/GenotypeConcordance.java  |    36 +-
 .../variantutils/LeftAlignAndTrimVariants.java     |   156 +-
 .../walkers/variantutils/LiftoverVariants.java     |   179 -
 .../variantutils/RandomlySplitVariants.java        |    44 +-
 .../tools/walkers/variantutils/SelectHeaders.java  |    51 +-
 .../tools/walkers/variantutils/SelectVariants.java |   819 +-
 .../walkers/variantutils/ValidateVariants.java     |    68 +-
 .../variantutils/VariantValidationAssessor.java    |   304 -
 .../variantutils/VariantsToAllelicPrimitives.java  |    42 +-
 .../walkers/variantutils/VariantsToBinaryPed.java  |   103 +-
 .../walkers/variantutils/VariantsToTable.java      |    38 +-
 .../tools/walkers/variantutils/VariantsToVCF.java  |    34 +-
 .../gatk/utils/AutoFormattingTime.java             |   185 -
 .../org/broadinstitute/gatk/utils/BaseUtils.java   |   672 -
 .../org/broadinstitute/gatk/utils/BitSetUtils.java |   134 -
 .../gatk/utils/ContigComparator.java               |    80 -
 .../gatk/utils/DeprecatedToolChecks.java           |    96 -
 .../broadinstitute/gatk/utils/GenomeLocParser.java |   622 -
 .../gatk/utils/GenomeLocSortedSet.java             |   476 -
 .../broadinstitute/gatk/utils/HeapSizeMonitor.java |   107 -
 .../org/broadinstitute/gatk/utils/IndelUtils.java  |   262 -
 .../org/broadinstitute/gatk/utils/LRUCache.java    |    45 -
 .../utils/MRUCachingSAMSequenceDictionary.java     |   186 -
 .../broadinstitute/gatk/utils/MannWhitneyU.java    |   508 -
 .../org/broadinstitute/gatk/utils/MathUtils.java   |  1690 --
 .../java/org/broadinstitute/gatk/utils/Median.java |    94 -
 .../gatk/utils/MendelianViolation.java             |   460 -
 .../gatk/utils/MultiThreadedErrorTracker.java      |   105 -
 .../org/broadinstitute/gatk/utils/NGSPlatform.java |   136 -
 .../org/broadinstitute/gatk/utils/PathUtils.java   |   195 -
 .../broadinstitute/gatk/utils/QualityUtils.java    |   397 -
 .../gatk/utils/R/RScriptExecutor.java              |   191 -
 .../gatk/utils/R/RScriptExecutorException.java     |    34 -
 .../gatk/utils/R/RScriptLibrary.java               |    66 -
 .../org/broadinstitute/gatk/utils/R/RUtils.java    |    91 -
 .../org/broadinstitute/gatk/utils/SampleUtils.java |   290 -
 .../gatk/utils/SequenceDictionaryUtils.java        |   527 -
 .../gatk/utils/UnvalidatingGenomeLoc.java          |    50 -
 .../java/org/broadinstitute/gatk/utils/Utils.java  |  1186 --
 .../gatk/utils/activeregion/ActiveRegion.java      |   500 -
 .../utils/activeregion/ActiveRegionReadState.java  |    40 -
 .../gatk/utils/activeregion/ActivityProfile.java   |   520 -
 .../utils/activeregion/ActivityProfileState.java   |   112 -
 .../activeregion/BandPassActivityProfile.java      |   194 -
 .../gatk/utils/analysis/AminoAcid.java             |   114 -
 .../gatk/utils/analysis/AminoAcidTable.java        |    94 -
 .../gatk/utils/analysis/AminoAcidUtils.java        |    77 -
 .../org/broadinstitute/gatk/utils/baq/BAQ.java     |   713 -
 .../gatk/utils/baq/BAQReadTransformer.java         |    74 -
 .../gatk/utils/baq/ReadTransformingIterator.java   |    69 -
 .../gatk/utils/classloader/JVMUtils.java           |   309 -
 .../gatk/utils/classloader/PluginManager.java      |   355 -
 .../utils/classloader/ProtectedPackageSource.java  |    28 -
 .../utils/classloader/PublicPackageSource.java     |    28 -
 .../gatk/utils/clipping/ClippingOp.java            |   617 -
 .../utils/clipping/ClippingRepresentation.java     |    63 -
 .../gatk/utils/clipping/ReadClipper.java           |   568 -
 .../gatk/utils/codecs/beagle/BeagleCodec.java      |   276 -
 .../gatk/utils/codecs/beagle/BeagleFeature.java    |   111 -
 .../gatk/utils/codecs/hapmap/RawHapMapCodec.java   |   125 -
 .../gatk/utils/codecs/hapmap/RawHapMapFeature.java |   196 -
 .../gatk/utils/codecs/refseq/RefSeqCodec.java      |   171 -
 .../gatk/utils/codecs/refseq/RefSeqFeature.java    |   323 -
 .../gatk/utils/codecs/refseq/Transcript.java       |    78 -
 .../utils/codecs/sampileup/SAMPileupCodec.java     |   354 -
 .../utils/codecs/sampileup/SAMPileupFeature.java   |   276 -
 .../gatk/utils/codecs/samread/SAMReadCodec.java    |   123 -
 .../gatk/utils/codecs/samread/SAMReadFeature.java  |   199 -
 .../gatk/utils/codecs/table/BedTableCodec.java     |    59 -
 .../gatk/utils/codecs/table/TableCodec.java        |   126 -
 .../gatk/utils/codecs/table/TableFeature.java      |    99 -
 .../gatk/utils/collections/DefaultHashMap.java     |    56 -
 .../gatk/utils/collections/ExpandingArrayList.java |    69 -
 .../gatk/utils/collections/IndexedSet.java         |   342 -
 .../collections/LoggingNestedIntegerArray.java     |   120 -
 .../gatk/utils/collections/NestedIntegerArray.java |   221 -
 .../gatk/utils/collections/Pair.java               |    93 -
 .../gatk/utils/collections/Permutation.java        |   103 -
 .../gatk/utils/collections/PrimitivePair.java      |   200 -
 .../gatk/utils/collections/RODMergingIterator.java |   160 -
 .../gatk/utils/commandline/Advanced.java           |    41 -
 .../gatk/utils/commandline/Argument.java           |   125 -
 .../gatk/utils/commandline/ArgumentCollection.java |    45 -
 .../gatk/utils/commandline/ArgumentDefinition.java |   297 -
 .../utils/commandline/ArgumentDefinitionGroup.java |    99 -
 .../utils/commandline/ArgumentDefinitions.java     |   195 -
 .../gatk/utils/commandline/ArgumentException.java  |    38 -
 .../gatk/utils/commandline/ArgumentIOType.java     |    52 -
 .../gatk/utils/commandline/ArgumentMatch.java      |   294 -
 .../utils/commandline/ArgumentMatchFileValue.java  |    52 -
 .../gatk/utils/commandline/ArgumentMatchSite.java  |    77 -
 .../utils/commandline/ArgumentMatchSource.java     |    97 -
 .../utils/commandline/ArgumentMatchSourceType.java |    33 -
 .../commandline/ArgumentMatchStringValue.java      |    49 -
 .../gatk/utils/commandline/ArgumentMatchValue.java |    43 -
 .../gatk/utils/commandline/ArgumentMatches.java    |   211 -
 .../gatk/utils/commandline/ArgumentSource.java     |   243 -
 .../utils/commandline/ArgumentTypeDescriptor.java  |  1030 -
 .../gatk/utils/commandline/ClassType.java          |    40 -
 .../gatk/utils/commandline/CommandLineProgram.java |   447 -
 .../gatk/utils/commandline/CommandLineUtils.java   |   192 -
 .../commandline/EnumerationArgumentDefault.java    |    65 -
 .../gatk/utils/commandline/Gather.java             |    41 -
 .../gatk/utils/commandline/Gatherer.java           |    47 -
 .../gatk/utils/commandline/Hidden.java             |    41 -
 .../gatk/utils/commandline/Input.java              |    83 -
 .../commandline/IntervalArgumentCollection.java    |    88 -
 .../gatk/utils/commandline/IntervalBinding.java    |   106 -
 .../commandline/MissingArgumentValueException.java |    50 -
 .../gatk/utils/commandline/Output.java             |    90 -
 .../gatk/utils/commandline/ParsedArgs.java         |    38 -
 .../gatk/utils/commandline/ParsedListArgs.java     |    55 -
 .../gatk/utils/commandline/ParsingEngine.java      |   829 -
 .../commandline/ParsingEngineArgumentFiles.java    |    55 -
 .../commandline/ParsingEngineArgumentProvider.java |    37 -
 .../gatk/utils/commandline/ParsingMethod.java      |   127 -
 .../gatk/utils/commandline/RodBinding.java         |   197 -
 .../utils/commandline/RodBindingCollection.java    |    89 -
 .../gatk/utils/commandline/Tags.java               |   112 -
 .../gatk/utils/commandline/package-info.java       |    26 -
 .../gatk/utils/crypt/CryptUtils.java               |   391 -
 .../broadinstitute/gatk/utils/crypt/GATKKey.java   |   350 -
 .../gatk/utils/duplicates/DupUtils.java            |   142 -
 .../gatk/utils/duplicates/DuplicateComp.java       |    66 -
 .../DynamicClassResolutionException.java           |    54 -
 .../gatk/utils/exceptions/UserException.java       |   485 -
 .../gatk/utils/fasta/ArtificialFastaUtils.java     |   154 -
 .../fasta/CachingIndexedFastaSequenceFile.java     |   311 -
 .../gatk/utils/fasta/package-info.java             |    26 -
 .../gatk/utils/file/FSLockWithShared.java          |   293 -
 .../gatk/utils/fragments/FragmentCollection.java   |    67 -
 .../gatk/utils/fragments/FragmentUtils.java        |   377 -
 .../gatk/utils/genotyper/DiploidGenotype.java      |   125 -
 .../gatk/utils/genotyper/MostLikelyAllele.java     |   134 -
 .../genotyper/PerReadAlleleLikelihoodMap.java      |   413 -
 .../gatk/utils/genotyper/ReadLikelihoods.java      |  1587 --
 .../gatk/utils/haplotype/EventMap.java             |   423 -
 .../gatk/utils/haplotype/Haplotype.java            |   343 -
 .../utils/haplotype/HaplotypeBaseComparator.java   |    42 -
 .../utils/haplotype/HaplotypeScoreComparator.java  |    39 -
 .../haplotype/HaplotypeSizeAndBaseComparator.java  |    47 -
 .../gatk/utils/help/ApplicationDetails.java        |    95 -
 .../gatk/utils/help/DocletUtils.java               |    76 -
 .../gatk/utils/help/DocumentedGATKFeature.java     |    50 -
 .../utils/help/DocumentedGATKFeatureHandler.java   |    99 -
 .../utils/help/DocumentedGATKFeatureObject.java    |    61 -
 .../gatk/utils/help/ForumAPIUtils.java             |   173 -
 .../gatk/utils/help/ForumDiscussion.java           |    84 -
 .../gatk/utils/help/GATKDocUtils.java              |    71 -
 .../gatk/utils/help/GATKDocWorkUnit.java           |   127 -
 .../broadinstitute/gatk/utils/help/GATKDoclet.java |   576 -
 .../gatk/utils/help/GSONArgument.java              |    83 -
 .../gatk/utils/help/GSONWorkUnit.java              |    86 -
 .../utils/help/GenericDocumentationHandler.java    |  1008 -
 .../gatk/utils/help/HelpConstants.java             |    83 -
 .../gatk/utils/help/HelpFormatter.java             |   336 -
 .../broadinstitute/gatk/utils/help/HelpUtils.java  |    64 -
 .../utils/help/ResourceBundleExtractorDoclet.java  |   228 -
 .../gatk/utils/instrumentation/Sizeof.java         |   146 -
 .../gatk/utils/interval/IntervalMergingRule.java   |    35 -
 .../gatk/utils/interval/IntervalSetRule.java       |    36 -
 .../gatk/utils/interval/IntervalUtils.java         |   890 -
 .../gatk/utils/io/FileExtension.java               |    37 -
 .../utils/io/HardThresholdingOutputStream.java     |    56 -
 .../org/broadinstitute/gatk/utils/io/IOUtils.java  |   575 -
 .../org/broadinstitute/gatk/utils/io/Resource.java |    91 -
 .../gatk/utils/jna/clibrary/JNAUtils.java          |    59 -
 .../gatk/utils/jna/clibrary/LibC.java              |   200 -
 .../gatk/utils/jna/drmaa/v1_0/JnaJobInfo.java      |   101 -
 .../gatk/utils/jna/drmaa/v1_0/JnaJobTemplate.java  |   316 -
 .../gatk/utils/jna/drmaa/v1_0/JnaSession.java      |   461 -
 .../utils/jna/drmaa/v1_0/JnaSessionFactory.java    |    40 -
 .../gatk/utils/jna/drmaa/v1_0/LibDrmaa.java        |   723 -
 .../gatk/utils/jna/lsf/v7_0_6/LibBat.java          | 20014 -------------------
 .../gatk/utils/jna/lsf/v7_0_6/LibLsf.java          |  1780 --
 .../utils/locusiterator/AlignmentStateMachine.java |   370 -
 .../utils/locusiterator/LIBSDownsamplingInfo.java  |    51 -
 .../gatk/utils/locusiterator/LIBSPerformance.java  |   198 -
 .../gatk/utils/locusiterator/LocusIterator.java    |    62 -
 .../utils/locusiterator/LocusIteratorByState.java  |   454 -
 .../locusiterator/PerSampleReadStateManager.java   |   261 -
 .../gatk/utils/locusiterator/ReadStateManager.java |   289 -
 .../utils/locusiterator/SamplePartitioner.java     |   172 -
 .../gatk/utils/nanoScheduler/EOFMarkedValue.java   |   105 -
 .../gatk/utils/nanoScheduler/InputProducer.java    |   217 -
 .../gatk/utils/nanoScheduler/MapResult.java        |    75 -
 .../gatk/utils/nanoScheduler/MapResultsQueue.java  |   116 -
 .../gatk/utils/nanoScheduler/NSMapFunction.java    |    44 -
 .../utils/nanoScheduler/NSProgressFunction.java    |    37 -
 .../gatk/utils/nanoScheduler/NSReduceFunction.java |    43 -
 .../gatk/utils/nanoScheduler/NanoScheduler.java    |   494 -
 .../gatk/utils/nanoScheduler/Reducer.java          |   169 -
 .../gatk/utils/pairhmm/BatchPairHMM.java           |    41 -
 .../gatk/utils/pairhmm/Log10PairHMM.java           |   220 -
 .../gatk/utils/pairhmm/N2MemoryPairHMM.java        |    98 -
 .../broadinstitute/gatk/utils/pairhmm/PairHMM.java |   357 -
 .../gatk/utils/pairhmm/PairHMMModel.java           |   435 -
 .../gatk/utils/pairhmm/PairHMMReadyHaplotypes.java |   182 -
 .../utils/pileup/MergingPileupElementIterator.java |    76 -
 .../gatk/utils/pileup/PileupElement.java           |   539 -
 .../gatk/utils/pileup/PileupElementFilter.java     |    36 -
 .../gatk/utils/pileup/PileupElementTracker.java    |   154 -
 .../gatk/utils/pileup/ReadBackedPileup.java        |   295 -
 .../gatk/utils/pileup/ReadBackedPileupImpl.java    |  1040 -
 .../gatk/utils/progressmeter/ProgressMeter.java    |   465 -
 .../utils/progressmeter/ProgressMeterDaemon.java   |   111 -
 .../utils/progressmeter/ProgressMeterData.java     |    79 -
 .../gatk/utils/recalibration/BQSRArgumentSet.java  |    85 -
 .../gatk/utils/recalibration/BQSRMode.java         |    55 -
 .../gatk/utils/recalibration/EventType.java        |    72 -
 .../gatk/utils/runtime/CapturedStreamOutput.java   |   134 -
 .../gatk/utils/runtime/InputStreamSettings.java    |   116 -
 .../gatk/utils/runtime/OutputStreamSettings.java   |   127 -
 .../gatk/utils/runtime/ProcessController.java      |   387 -
 .../gatk/utils/runtime/ProcessOutput.java          |    57 -
 .../gatk/utils/runtime/ProcessSettings.java        |   140 -
 .../gatk/utils/runtime/RuntimeUtils.java           |    77 -
 .../gatk/utils/runtime/StreamLocation.java         |    33 -
 .../gatk/utils/runtime/StreamOutput.java           |    69 -
 .../gatk/utils/sam/AlignmentStartComparator.java   |    50 -
 .../sam/AlignmentStartWithNoTiesComparator.java    |    73 -
 .../gatk/utils/sam/AlignmentUtils.java             |  1337 --
 .../gatk/utils/sam/ArtificialBAMBuilder.java       |   242 -
 .../utils/sam/ArtificialGATKSAMFileWriter.java     |   130 -
 .../utils/sam/ArtificialMultiSampleReadStream.java |    87 -
 .../utils/sam/ArtificialPatternedSAMIterator.java  |   172 -
 .../gatk/utils/sam/ArtificialReadsTraversal.java   |   140 -
 .../gatk/utils/sam/ArtificialSAMFileReader.java    |   156 -
 .../gatk/utils/sam/ArtificialSAMIterator.java      |   212 -
 .../gatk/utils/sam/ArtificialSAMQueryIterator.java |   259 -
 .../gatk/utils/sam/ArtificialSAMUtils.java         |   484 -
 .../sam/ArtificialSingleSampleReadStream.java      |   213 -
 .../ArtificialSingleSampleReadStreamAnalyzer.java  |   282 -
 .../gatk/utils/sam/BySampleSAMFileWriter.java      |    70 -
 .../broadinstitute/gatk/utils/sam/CigarUtils.java  |   273 -
 .../gatk/utils/sam/GATKSAMReadGroupRecord.java     |   116 -
 .../gatk/utils/sam/GATKSAMRecord.java              |   631 -
 .../gatk/utils/sam/GATKSamRecordFactory.java       |    75 -
 .../sam/MisencodedBaseQualityReadTransformer.java  |    94 -
 .../gatk/utils/sam/NWaySAMFileWriter.java          |   185 -
 .../ReadUnclippedStartWithNoTiesComparator.java    |    73 -
 .../broadinstitute/gatk/utils/sam/ReadUtils.java   |   964 -
 .../gatk/utils/sam/SAMFileReaderBuilder.java       |    84 -
 .../gatk/utils/sam/SimplifyingSAMFileWriter.java   |    86 -
 .../gatk/utils/sam/package-info.java               |    26 -
 .../GlobalEdgeGreedySWPairwiseAlignment.java       |   208 -
 .../gatk/utils/smithwaterman/Parameters.java       |    62 -
 .../utils/smithwaterman/SWPairwiseAlignment.java   |   599 -
 .../smithwaterman/SWPairwiseAlignmentMain.java     |   221 -
 .../gatk/utils/smithwaterman/SWParameterSet.java   |    51 -
 .../gatk/utils/smithwaterman/SmithWaterman.java    |    57 -
 .../gatk/utils/text/ListFileUtils.java             |   344 -
 .../gatk/utils/text/TextFormattingUtils.java       |   172 -
 .../broadinstitute/gatk/utils/text/XReadLines.java |   208 -
 .../EfficiencyMonitoringThreadFactory.java         |   160 -
 .../gatk/utils/threading/NamedThreadFactory.java   |    51 -
 .../utils/threading/ThreadEfficiencyMonitor.java   |   232 -
 .../gatk/utils/threading/ThreadLocalArray.java     |    65 -
 .../gatk/utils/threading/ThreadPoolMonitor.java    |    77 -
 .../gatk/utils/threading/package-info.java         |    26 -
 .../gatk/utils/variant/GATKVCFIndexType.java       |    39 -
 .../gatk/utils/variant/GATKVCFUtils.java           |   316 -
 .../utils/variant/GATKVariantContextUtils.java     |  1960 --
 .../gatk/utils/variant/HomoSapiensConstants.java   |    51 -
 .../gatk/utils/wiggle/WiggleHeader.java            |    56 -
 .../gatk/utils/wiggle/WiggleWriter.java            |   117 -
 .../walkers/variantrecalibration/plot_Tranches.R   |    93 -
 .../htsjdk/samtools/GATKBAMFileSpanUnitTest.java   |   254 -
 .../java/htsjdk/samtools/GATKChunkUnitTest.java    |    71 -
 .../gatk/engine/CommandLineGATKUnitTest.java       |    68 -
 .../gatk/engine/EngineFeaturesIntegrationTest.java |   736 -
 .../gatk/engine/GenomeAnalysisEngineUnitTest.java  |   273 -
 .../gatk/engine/MaxRuntimeIntegrationTest.java     |   151 -
 .../gatk/engine/ReadMetricsUnitTest.java           |   371 -
 .../gatk/engine/WalkerManagerUnitTest.java         |    71 -
 .../providers/AllLocusViewUnitTest.java            |    90 -
 .../providers/CoveredLocusViewUnitTest.java        |   103 -
 .../IntervalReferenceOrderedViewUnitTest.java      |   366 -
 .../providers/LocusReferenceViewUnitTest.java      |   143 -
 .../datasources/providers/LocusViewTemplate.java   |   405 -
 .../providers/ReadReferenceViewUnitTest.java       |   160 -
 .../providers/ReferenceOrderedViewUnitTest.java    |   157 -
 .../providers/ReferenceViewTemplate.java           |   122 -
 .../providers/ShardDataProviderUnitTest.java       |   152 -
 .../reads/ActiveRegionShardBalancerUnitTest.java   |   102 -
 .../datasources/reads/DownsamplerBenchmark.java    |    94 -
 .../datasources/reads/FilePointerUnitTest.java     |   129 -
 .../datasources/reads/GATKBAMIndexUnitTest.java    |   108 -
 .../datasources/reads/GATKWalkerBenchmark.java     |   141 -
 .../IntervalOverlapFilteringIteratorUnitTest.java  |   150 -
 .../engine/datasources/reads/MockLocusShard.java   |    51 -
 .../datasources/reads/PicardBaselineBenchmark.java |   101 -
 .../datasources/reads/ReadProcessingBenchmark.java |    83 -
 .../reads/ReadShardBalancerUnitTest.java           |   195 -
 .../datasources/reads/SAMDataSourceUnitTest.java   |   253 -
 .../datasources/reads/SAMReaderIDUnitTest.java     |    49 -
 .../reads/SeekableBufferedStreamUnitTest.java      |   101 -
 .../reads/TheoreticalMinimaBenchmark.java          |   114 -
 .../ReferenceDataSourceIntegrationTest.java        |    75 -
 .../rmd/ReferenceOrderedDataPoolUnitTest.java      |   208 -
 .../rmd/ReferenceOrderedQueryDataPoolUnitTest.java |    89 -
 .../AlleleBiasedDownsamplingUtilsUnitTest.java     |   219 -
 .../downsampling/DownsamplingIntegrationTest.java  |    44 -
 .../DownsamplingReadsIteratorUnitTest.java         |   139 -
 .../FractionalDownsamplerUnitTest.java             |   158 -
 .../downsampling/LevelingDownsamplerUnitTest.java  |   163 -
 ...PerSampleDownsamplingReadsIteratorUnitTest.java |   299 -
 ...edArtificialSingleSampleReadStreamAnalyzer.java |   127 -
 .../downsampling/ReservoirDownsamplerUnitTest.java |   131 -
 .../SimplePositionalDownsamplerUnitTest.java       |   331 -
 .../gatk/engine/executive/ReduceTreeUnitTest.java  |   254 -
 .../AllowNCigarMalformedReadFilterUnitTest.java    |    77 -
 .../engine/filters/BadCigarFilterUnitTest.java     |    91 -
 .../filters/BadReadGroupsIntegrationTest.java      |    52 -
 .../filters/MalformedReadFilterUnitTest.java       |   246 -
 .../filters/NDNCigarReadTransformerUnitTest.java   |    70 -
 .../gatk/engine/filters/ReadFilterTest.java        |   370 -
 .../filters/ReadGroupBlackListFilterUnitTest.java  |   247 -
 .../filters/UnsafeMalformedReadFilterUnitTest.java |    50 -
 .../gatk/engine/io/OutputTrackerUnitTest.java      |    84 -
 .../iterators/BoundedReadIteratorUnitTest.java     |   145 -
 .../iterators/GATKSAMIteratorAdapterUnitTest.java  |   176 -
 .../iterators/ReadFormattingIteratorUnitTest.java  |    50 -
 .../iterators/VerifyingSamIteratorUnitTest.java    |   128 -
 .../engine/phonehome/GATKRunReportUnitTest.java    |   310 -
 .../engine/refdata/RefMetaDataTrackerUnitTest.java |   290 -
 .../refdata/tracks/FeatureManagerUnitTest.java     |   163 -
 .../refdata/tracks/RMDTrackBuilderUnitTest.java    |   190 -
 .../utils/CheckableCloseableTribbleIterator.java   |    90 -
 .../FeatureToGATKFeatureIteratorUnitTest.java      |    61 -
 .../refdata/utils/FlashBackIteratorUnitTest.java   |   364 -
 .../engine/refdata/utils/TestFeatureReader.java    |    53 -
 .../engine/refdata/utils/TestRMDTrackBuilder.java  |    71 -
 .../gatk/engine/report/GATKReportUnitTest.java     |   285 -
 .../gatk/engine/samples/PedReaderUnitTest.java     |   354 -
 .../gatk/engine/samples/SampleDBUnitTest.java      |   251 -
 .../gatk/engine/samples/SampleUnitTest.java        |    89 -
 .../engine/traversals/DummyActiveRegionWalker.java |   116 -
 .../traversals/TAROrderedReadCacheUnitTest.java    |   111 -
 .../traversals/TraverseActiveRegionsUnitTest.java  |   679 -
 .../traversals/TraverseDuplicatesUnitTest.java     |   162 -
 .../engine/traversals/TraverseReadsUnitTest.java   |   166 -
 .../gatk/engine/walkers/WalkerTest.java            |   455 -
 .../gatk/tools/CatVariantsIntegrationTest.java     |   197 +-
 .../gatk/tools/walkers/BAQIntegrationTest.java     |     6 +-
 .../CNV/SymbolicAllelesIntegrationTest.java        |     6 +-
 .../annotator/CompressedDataListUnitTest.java      |   118 +
 .../walkers/annotator/SnpEffUtilUnitTest.java      |     2 +-
 .../coverage/CallableLociIntegrationTest.java      |     2 +-
 .../CompareCallableLociWalkerIntegrationTest.java  |     2 +-
 .../DepthOfCoverageB36IntegrationTest.java         |     2 +-
 .../coverage/DepthOfCoverageIntegrationTest.java   |    35 +-
 .../diffengine/DiffObjectsIntegrationTest.java     |    76 +
 .../walkers/filters/VariantFiltrationUnitTest.java |   107 +
 .../walkers/qc/CheckPileupIntegrationTest.java     |     2 +-
 .../gatk/tools/walkers/qc/CountReadsUnitTest.java  |     2 +-
 .../qc/DictionaryConsistencyIntegrationTest.java   |     2 +-
 .../tools/walkers/qc/FlagStatIntegrationTest.java  |     2 +-
 .../walkers/qc/PileupWalkerIntegrationTest.java    |     2 +-
 .../readutils/ClipReadsWalkersIntegrationTest.java |    28 +-
 .../readutils/PrintReadsIntegrationTest.java       |    34 +-
 .../readutils/PrintReadsLargeScaleTest.java        |     2 +-
 .../walkers/readutils/PrintReadsUnitTest.java      |     6 +-
 .../ReadAdaptorTrimmerIntegrationTest.java         |    60 -
 .../readutils/SplitSamFileIntegrationTest.java     |    60 +
 .../rnaseq/ASEReadCounterIntegrationTest.java      |   112 +
 .../variantutils/FilterLiftedVariantsUnitTest.java |    54 -
 .../variantutils/SelectVariantsUnitTest.java       |    30 +-
 .../walkers/variantutils/VCFIntegrationTest.java   |   395 +
 .../gatk/utils/AutoFormattingTimeUnitTest.java     |   118 -
 .../org/broadinstitute/gatk/utils/BaseTest.java    |   568 -
 .../gatk/utils/BaseUtilsUnitTest.java              |   179 -
 .../gatk/utils/BitSetUtilsUnitTest.java            |    85 -
 .../gatk/utils/ExampleToCopyUnitTest.java          |   241 -
 .../gatk/utils/GATKTextReporter.java               |    41 -
 .../gatk/utils/GenomeLocParserBenchmark.java       |    81 -
 .../gatk/utils/GenomeLocParserUnitTest.java        |   509 -
 .../gatk/utils/GenomeLocSortedSetUnitTest.java     |   405 -
 .../gatk/utils/GenomeLocUnitTest.java              |   386 -
 .../java/org/broadinstitute/gatk/utils/MD5DB.java  |   312 -
 .../org/broadinstitute/gatk/utils/MD5Mismatch.java |    67 -
 .../MRUCachingSAMSequencingDictionaryUnitTest.java |    97 -
 .../org/broadinstitute/gatk/utils/MWUnitTest.java  |   131 -
 .../gatk/utils/MathUtilsUnitTest.java              |   913 -
 .../broadinstitute/gatk/utils/MedianUnitTest.java  |   115 -
 .../gatk/utils/NGSPlatformUnitTest.java            |   167 -
 .../gatk/utils/PathUtilsUnitTest.java              |    65 -
 .../gatk/utils/QualityUtilsUnitTest.java           |   189 -
 .../gatk/utils/R/RScriptExecutorUnitTest.java      |   110 -
 .../gatk/utils/R/RScriptLibraryUnitTest.java       |    47 -
 .../gatk/utils/R/RUtilsUnitTest.java               |    65 -
 .../gatk/utils/SampleUtilsUnitTest.java            |    52 -
 .../utils/SequenceDictionaryUtilsUnitTest.java     |   241 -
 .../gatk/utils/SimpleTimerUnitTest.java            |   179 -
 .../gatk/utils/TestNGTestTransformer.java          |    62 -
 .../broadinstitute/gatk/utils/UtilsUnitTest.java   |   363 -
 .../utils/activeregion/ActiveRegionUnitTest.java   |   395 -
 .../activeregion/ActivityProfileStateUnitTest.java |    92 -
 .../activeregion/ActivityProfileUnitTest.java      |   491 -
 .../BandPassActivityProfileUnitTest.java           |   339 -
 .../broadinstitute/gatk/utils/baq/BAQUnitTest.java |   257 -
 .../gatk/utils/classloader/JVMUtilsUnitTest.java   |    75 -
 .../gatk/utils/clipping/ReadClipperTestUtils.java  |   144 -
 .../gatk/utils/clipping/ReadClipperUnitTest.java   |   421 -
 .../gatk/utils/codecs/hapmap/HapMapUnitTest.java   |   164 -
 .../utils/collections/DefaultHashMapUnitTest.java  |   159 -
 .../collections/ExpandingArrayListUnitTest.java    |   177 -
 .../commandline/ArgumentMatchSiteUnitTest.java     |    80 -
 .../commandline/ArgumentMatchSourceUnitTest.java   |    99 -
 .../ArgumentTypeDescriptorUnitTest.java            |   233 -
 .../InvalidArgumentIntegrationTest.java            |    66 -
 .../utils/commandline/LoggingIntegrationTest.java  |   117 -
 .../utils/commandline/ParsingEngineUnitTest.java   |  1140 --
 .../commandline/RodBindingCollectionUnitTest.java  |   133 -
 .../gatk/utils/commandline/RodBindingUnitTest.java |    82 -
 .../gatk/utils/crypt/CryptUtilsUnitTest.java       |   199 -
 .../gatk/utils/crypt/GATKKeyIntegrationTest.java   |   157 -
 .../gatk/utils/crypt/GATKKeyUnitTest.java          |   129 -
 .../CachingIndexedFastaSequenceFileUnitTest.java   |   264 -
 .../gatk/utils/file/FSLockWithSharedUnitTest.java  |    60 -
 .../utils/fragments/FragmentUtilsBenchmark.java    |    81 -
 .../utils/fragments/FragmentUtilsUnitTest.java     |   390 -
 .../gatk/utils/haplotype/EventMapUnitTest.java     |   203 -
 .../gatk/utils/haplotype/HaplotypeUnitTest.java    |   249 -
 .../utils/interval/IntervalIntegrationTest.java    |   304 -
 .../gatk/utils/interval/IntervalUtilsUnitTest.java |  1110 -
 .../gatk/utils/io/IOUtilsUnitTest.java             |   326 -
 .../gatk/utils/jna/clibrary/LibCUnitTest.java      |    70 -
 .../utils/jna/drmaa/v1_0/JnaSessionQueueTest.java  |   165 -
 .../utils/jna/drmaa/v1_0/LibDrmaaQueueTest.java    |   257 -
 .../gatk/utils/jna/lsf/v7_0_6/LibBatQueueTest.java |   162 -
 .../AlignmentStateMachineUnitTest.java             |   110 -
 .../gatk/utils/locusiterator/LIBS_position.java    |   155 -
 .../locusiterator/LocusIteratorBenchmark.java      |   142 -
 .../LocusIteratorByStateBaseTest.java              |   252 -
 .../LocusIteratorByStateUnitTest.java              |   753 -
 .../PerSampleReadStateManagerUnitTest.java         |   188 -
 .../utils/nanoScheduler/InputProducerUnitTest.java |    94 -
 .../utils/nanoScheduler/MapResultUnitTest.java     |    65 -
 .../utils/nanoScheduler/NanoSchedulerUnitTest.java |   343 -
 .../gatk/utils/nanoScheduler/ReducerUnitTest.java  |   236 -
 .../gatk/utils/pileup/PileupElementUnitTest.java   |   189 -
 .../utils/pileup/ReadBackedPileupUnitTest.java     |   328 -
 .../progressmeter/ProgressMeterDaemonUnitTest.java |   121 -
 .../progressmeter/ProgressMeterDataUnitTest.java   |    86 -
 .../utils/recalibration/EventTypeUnitTest.java     |    61 -
 .../utils/report/ReportMarshallerUnitTest.java     |    64 -
 .../utils/runtime/ProcessControllerUnitTest.java   |   518 -
 .../gatk/utils/runtime/RuntimeUtilsUnitTest.java   |    42 -
 .../gatk/utils/sam/AlignmentUtilsUnitTest.java     |  1044 -
 .../utils/sam/ArtificialBAMBuilderUnitTest.java    |   121 -
 .../ArtificialPatternedSAMIteratorUnitTest.java    |   122 -
 .../utils/sam/ArtificialSAMFileWriterUnitTest.java |   120 -
 .../sam/ArtificialSAMQueryIteratorUnitTest.java    |   138 -
 .../gatk/utils/sam/ArtificialSAMUtilsUnitTest.java |   108 -
 .../ArtificialSingleSampleReadStreamUnitTest.java  |   186 -
 .../gatk/utils/sam/GATKSAMRecordUnitTest.java      |    78 -
 .../utils/sam/MisencodedBaseQualityUnitTest.java   |    96 -
 .../gatk/utils/sam/ReadUtilsUnitTest.java          |   340 -
 .../smithwaterman/SmithWatermanBenchmark.java      |    87 -
 .../gatk/utils/text/ListFileUtilsUnitTest.java     |   159 -
 .../utils/text/TextFormattingUtilsUnitTest.java    |    89 -
 .../EfficiencyMonitoringThreadFactoryUnitTest.java |   189 -
 .../utils/threading/ThreadPoolMonitorUnitTest.java |    64 -
 .../gatk/utils/variant/GATKVCFUtilsUnitTest.java   |   138 -
 .../variant/GATKVariantContextUtilsUnitTest.java   |  1612 --
 .../gatk/utils/variant/VCFIntegrationTest.java     |   377 -
 .../utils/variant/VariantContextBenchmark.java     |   377 -
 public/gatk-utils/pom.xml                          |    46 +-
 .../src/main/assembly/example-resources.xml        |     0
 .../gatk/utils/help/log4j.properties               |     7 -
 .../main/java/htsjdk/samtools/GATKBAMFileSpan.java |   308 +
 .../src/main/java/htsjdk/samtools/GATKBin.java     |   146 +
 .../src/main/java/htsjdk/samtools/GATKChunk.java   |   116 +
 .../java/htsjdk/samtools/PicardNamespaceUtils.java |    40 +
 .../gatk/utils/AutoFormattingTime.java             |   185 +
 .../org/broadinstitute/gatk/utils/BaseUtils.java   |   671 +
 .../org/broadinstitute/gatk/utils/BitSetUtils.java |   134 +
 .../gatk/utils/ContigComparator.java               |    80 +
 .../gatk/utils/DeprecatedToolChecks.java           |   107 +
 .../org/broadinstitute/gatk/utils/GenomeLoc.java   |     3 +-
 .../broadinstitute/gatk/utils/GenomeLocParser.java |   622 +
 .../gatk/utils/GenomeLocSortedSet.java             |   476 +
 .../gatk/utils/HasGenomeLocation.java              |     2 +-
 .../broadinstitute/gatk/utils/HeapSizeMonitor.java |   107 +
 .../org/broadinstitute/gatk/utils/IndelUtils.java  |   262 +
 .../org/broadinstitute/gatk/utils/LRUCache.java    |    45 +
 .../utils/MRUCachingSAMSequenceDictionary.java     |   186 +
 .../broadinstitute/gatk/utils/MannWhitneyU.java    |   507 +
 .../org/broadinstitute/gatk/utils/MathUtils.java   |  1689 ++
 .../java/org/broadinstitute/gatk/utils/Median.java |    94 +
 .../gatk/utils/MultiThreadedErrorTracker.java      |   105 +
 .../org/broadinstitute/gatk/utils/NGSPlatform.java |   146 +
 .../org/broadinstitute/gatk/utils/PathUtils.java   |   195 +
 .../broadinstitute/gatk/utils/QualityUtils.java    |   397 +
 .../gatk/utils/R/RScriptExecutor.java              |   191 +
 .../gatk/utils/R/RScriptExecutorException.java     |    34 +
 .../gatk/utils/R/RScriptLibrary.java               |    66 +
 .../org/broadinstitute/gatk/utils/R/RUtils.java    |    91 +
 .../gatk/utils/SequenceDictionaryUtils.java        |   540 +
 .../gatk/utils/SequencerFlowClass.java             |    38 +
 .../org/broadinstitute/gatk/utils/SimpleTimer.java |     2 +-
 .../gatk/utils/UnvalidatingGenomeLoc.java          |    50 +
 .../java/org/broadinstitute/gatk/utils/Utils.java  |  1174 ++
 .../gatk/utils/ValidationExclusion.java            |    71 +
 .../gatk/utils/activeregion/ActiveRegion.java      |   500 +
 .../utils/activeregion/ActiveRegionReadState.java  |    40 +
 .../gatk/utils/activeregion/ActivityProfile.java   |   520 +
 .../utils/activeregion/ActivityProfileState.java   |   112 +
 .../activeregion/BandPassActivityProfile.java      |   194 +
 .../gatk/utils/analysis/AminoAcid.java             |   114 +
 .../gatk/utils/analysis/AminoAcidTable.java        |    94 +
 .../gatk/utils/analysis/AminoAcidUtils.java        |    77 +
 .../org/broadinstitute/gatk/utils/baq/BAQ.java     |   713 +
 .../gatk/utils/classloader/JVMUtils.java           |   309 +
 .../gatk/utils/classloader/PluginManager.java      |   356 +
 .../utils/classloader/ProtectedPackageSource.java  |    28 +
 .../utils/classloader/PublicPackageSource.java     |    28 +
 .../gatk/utils/clipping/ClippingOp.java            |   617 +
 .../utils/clipping/ClippingRepresentation.java     |    63 +
 .../gatk/utils/clipping/ReadClipper.java           |   568 +
 .../gatk/utils/codecs/beagle/BeagleCodec.java      |   287 +
 .../gatk/utils/codecs/beagle/BeagleFeature.java    |   118 +
 .../gatk/utils/codecs/hapmap/RawHapMapCodec.java   |   135 +
 .../gatk/utils/codecs/hapmap/RawHapMapFeature.java |   204 +
 .../gatk/utils/codecs/refseq/RefSeqCodec.java      |   181 +
 .../gatk/utils/codecs/refseq/RefSeqFeature.java    |   331 +
 .../gatk/utils/codecs/refseq/Transcript.java       |    78 +
 .../utils/codecs/sampileup/SAMPileupCodec.java     |   365 +
 .../utils/codecs/sampileup/SAMPileupFeature.java   |   284 +
 .../gatk/utils/codecs/samread/SAMReadCodec.java    |   134 +
 .../gatk/utils/codecs/samread/SAMReadFeature.java  |   207 +
 .../gatk/utils/codecs/table/BedTableCodec.java     |    58 +
 .../gatk/utils/codecs/table/TableCodec.java        |   136 +
 .../gatk/utils/codecs/table/TableFeature.java      |   104 +
 .../gatk/utils/collections/DefaultHashMap.java     |    56 +
 .../gatk/utils/collections/ExpandingArrayList.java |    69 +
 .../gatk/utils/collections/IndexedSet.java         |   342 +
 .../collections/LoggingNestedIntegerArray.java     |   120 +
 .../gatk/utils/collections/NestedIntegerArray.java |   221 +
 .../gatk/utils/collections/Pair.java               |    93 +
 .../gatk/utils/collections/Permutation.java        |   103 +
 .../gatk/utils/collections/PrimitivePair.java      |   200 +
 .../gatk/utils/collections/RODMergingIterator.java |   160 +
 .../gatk/utils/commandline/Advanced.java           |    41 +
 .../gatk/utils/commandline/Argument.java           |   125 +
 .../gatk/utils/commandline/ArgumentCollection.java |    45 +
 .../gatk/utils/commandline/ArgumentDefinition.java |   297 +
 .../utils/commandline/ArgumentDefinitionGroup.java |    99 +
 .../utils/commandline/ArgumentDefinitions.java     |   195 +
 .../gatk/utils/commandline/ArgumentException.java  |    38 +
 .../gatk/utils/commandline/ArgumentIOType.java     |    52 +
 .../gatk/utils/commandline/ArgumentMatch.java      |   292 +
 .../utils/commandline/ArgumentMatchFileValue.java  |    52 +
 .../gatk/utils/commandline/ArgumentMatchSite.java  |    77 +
 .../utils/commandline/ArgumentMatchSource.java     |    97 +
 .../utils/commandline/ArgumentMatchSourceType.java |    33 +
 .../commandline/ArgumentMatchStringValue.java      |    49 +
 .../gatk/utils/commandline/ArgumentMatchValue.java |    43 +
 .../gatk/utils/commandline/ArgumentMatches.java    |   209 +
 .../gatk/utils/commandline/ArgumentSource.java     |   243 +
 .../utils/commandline/ArgumentTypeDescriptor.java  |  1038 +
 .../gatk/utils/commandline/ClassType.java          |    40 +
 .../gatk/utils/commandline/CommandLineProgram.java |   460 +
 .../gatk/utils/commandline/CommandLineUtils.java   |   192 +
 .../commandline/EnumerationArgumentDefault.java    |    65 +
 .../gatk/utils/commandline/Gather.java             |    41 +
 .../gatk/utils/commandline/Gatherer.java           |    47 +
 .../gatk/utils/commandline/Hidden.java             |    41 +
 .../gatk/utils/commandline/Input.java              |    83 +
 .../commandline/IntervalArgumentCollection.java    |    88 +
 .../gatk/utils/commandline/IntervalBinding.java    |   101 +
 .../commandline/MissingArgumentValueException.java |    50 +
 .../gatk/utils/commandline/Multiplex.java          |    44 +
 .../gatk/utils/commandline/Multiplexer.java        |    52 +
 .../gatk/utils/commandline/Output.java             |    90 +
 .../gatk/utils/commandline/ParsedArgs.java         |    38 +
 .../gatk/utils/commandline/ParsedListArgs.java     |    55 +
 .../gatk/utils/commandline/ParsingEngine.java      |   829 +
 .../commandline/ParsingEngineArgumentFiles.java    |    55 +
 .../commandline/ParsingEngineArgumentProvider.java |    37 +
 .../gatk/utils/commandline/ParsingMethod.java      |   127 +
 .../gatk/utils/commandline/RodBinding.java         |   197 +
 .../utils/commandline/RodBindingCollection.java    |    89 +
 .../gatk/utils/commandline/Tags.java               |   112 +
 .../gatk/utils/commandline/package-info.java       |    26 +
 .../gatk/utils/contexts/AlignmentContext.java      |   154 +
 .../gatk/utils/contexts/AlignmentContextUtils.java |   150 +
 .../gatk/utils/contexts/ReferenceContext.java      |   217 +
 .../gatk/utils/diffengine/BAMDiffableReader.java   |   119 +
 .../gatk/utils/diffengine/DiffElement.java         |   125 +
 .../gatk/utils/diffengine/DiffEngine.java          |   437 +
 .../gatk/utils/diffengine/DiffNode.java            |   249 +
 .../gatk/utils/diffengine/DiffValue.java           |    90 +
 .../gatk/utils/diffengine/DiffableReader.java      |    66 +
 .../gatk/utils/diffengine/Difference.java          |   137 +
 .../utils/diffengine/GATKReportDiffableReader.java |   104 +
 .../gatk/utils/diffengine/VCFDiffableReader.java   |   145 +
 .../AlleleBiasedDownsamplingUtils.java             |   369 +
 .../gatk/utils/downsampling/DownsampleType.java    |    39 +
 .../gatk/utils/downsampling/Downsampler.java       |   161 +
 .../utils/downsampling/DownsamplingMethod.java     |   121 +
 .../downsampling/DownsamplingReadsIterator.java    |   116 +
 .../gatk/utils/downsampling/DownsamplingUtils.java |   107 +
 .../utils/downsampling/FractionalDownsampler.java  |   129 +
 .../downsampling/FractionalDownsamplerFactory.java |    46 +
 .../utils/downsampling/LevelingDownsampler.java    |   242 +
 .../utils/downsampling/PassThroughDownsampler.java |   111 +
 .../PerSampleDownsamplingReadsIterator.java        |   207 +
 .../gatk/utils/downsampling/ReadsDownsampler.java  |    56 +
 .../downsampling/ReadsDownsamplerFactory.java      |    38 +
 .../utils/downsampling/ReservoirDownsampler.java   |   219 +
 .../downsampling/ReservoirDownsamplerFactory.java  |    46 +
 .../downsampling/SimplePositionalDownsampler.java  |   171 +
 .../SimplePositionalDownsamplerFactory.java        |    46 +
 .../gatk/utils/duplicates/DupUtils.java            |   142 +
 .../gatk/utils/duplicates/DuplicateComp.java       |    66 +
 .../DynamicClassResolutionException.java           |    54 +
 .../gatk/utils/exceptions/GATKException.java       |     2 +-
 .../utils/exceptions/ReviewedGATKException.java    |     2 +-
 .../gatk/utils/exceptions/UserException.java       |   490 +
 .../gatk/utils/fasta/ArtificialFastaUtils.java     |   154 +
 .../fasta/CachingIndexedFastaSequenceFile.java     |   370 +
 .../gatk/utils/fasta/package-info.java             |    26 +
 .../gatk/utils/file/FSLockWithShared.java          |   293 +
 .../gatk/utils/fragments/FragmentCollection.java   |    67 +
 .../gatk/utils/fragments/FragmentUtils.java        |   377 +
 .../gatk/utils/genotyper/AlleleList.java           |    41 +
 .../utils/genotyper/AlleleListPermutation.java     |    35 +
 .../gatk/utils/genotyper/AlleleListUtils.java      |   334 +
 .../gatk/utils/genotyper/DiploidGenotype.java      |   125 +
 .../gatk/utils/genotyper/IndexedAlleleList.java    |    95 +
 .../gatk/utils/genotyper/IndexedSampleList.java    |    96 +
 .../gatk/utils/genotyper/MostLikelyAllele.java     |   134 +
 .../genotyper/PerReadAlleleLikelihoodMap.java      |   417 +
 .../gatk/utils/genotyper/ReadLikelihoods.java      |  1586 ++
 .../gatk/utils/genotyper/SampleList.java           |    42 +
 .../gatk/utils/genotyper/SampleListUtils.java      |   224 +
 .../gatk/utils/haplotype/EventMap.java             |   423 +
 .../gatk/utils/haplotype/Haplotype.java            |   343 +
 .../utils/haplotype/HaplotypeBaseComparator.java   |    42 +
 .../utils/haplotype/HaplotypeScoreComparator.java  |    39 +
 .../haplotype/HaplotypeSizeAndBaseComparator.java  |    47 +
 .../gatk/utils/help/ApplicationDetails.java        |    95 +
 .../gatk/utils/help/DocletUtils.java               |    80 +
 .../gatk/utils/help/DocumentedGATKFeature.java     |    50 +
 .../utils/help/DocumentedGATKFeatureHandler.java   |    99 +
 .../utils/help/DocumentedGATKFeatureObject.java    |    61 +
 .../gatk/utils/help/ForumAPIUtils.java             |   173 +
 .../gatk/utils/help/ForumDiscussion.java           |    84 +
 .../gatk/utils/help/GATKDocUtils.java              |    75 +
 .../gatk/utils/help/GATKDocWorkUnit.java           |   127 +
 .../broadinstitute/gatk/utils/help/GATKDoclet.java |   580 +
 .../gatk/utils/help/GSONArgument.java              |    83 +
 .../gatk/utils/help/GSONWorkUnit.java              |    86 +
 .../utils/help/GenericDocumentationHandler.java    |   722 +
 .../gatk/utils/help/HelpConstants.java             |    82 +
 .../gatk/utils/help/HelpFormatter.java             |   336 +
 .../utils/help/ResourceBundleExtractorDoclet.java  |   281 +
 .../gatk/utils/instrumentation/Sizeof.java         |   146 +
 .../gatk/utils/interval/IntervalMergingRule.java   |    35 +
 .../gatk/utils/interval/IntervalSetRule.java       |    36 +
 .../gatk/utils/interval/IntervalUtils.java         |   895 +
 .../gatk/utils/io/FileExtension.java               |    37 +
 .../utils/io/HardThresholdingOutputStream.java     |    56 +
 .../org/broadinstitute/gatk/utils/io/IOUtils.java  |   575 +
 .../gatk/utils/io/ReferenceBacked.java             |    33 +
 .../org/broadinstitute/gatk/utils/io/Resource.java |   142 +
 .../gatk/utils/iterators/GATKSAMIterator.java      |    56 +
 .../utils/iterators/GATKSAMIteratorAdapter.java    |   136 +
 .../gatk/utils/iterators/PushbackIterator.java     |    82 +
 .../gatk/utils/jna/clibrary/JNAUtils.java          |    59 +
 .../gatk/utils/jna/clibrary/LibC.java              |   200 +
 .../gatk/utils/jna/drmaa/v1_0/JnaJobInfo.java      |   101 +
 .../gatk/utils/jna/drmaa/v1_0/JnaJobTemplate.java  |   316 +
 .../gatk/utils/jna/drmaa/v1_0/JnaSession.java      |   461 +
 .../utils/jna/drmaa/v1_0/JnaSessionFactory.java    |    40 +
 .../gatk/utils/jna/drmaa/v1_0/LibDrmaa.java        |   723 +
 .../gatk/utils/jna/lsf/v7_0_6/LibBat.java          | 20014 +++++++++++++++++++
 .../gatk/utils/jna/lsf/v7_0_6/LibLsf.java          |  1780 ++
 .../utils/locusiterator/AlignmentStateMachine.java |   372 +
 .../utils/locusiterator/LIBSDownsamplingInfo.java  |    51 +
 .../gatk/utils/locusiterator/LIBSPerformance.java  |   191 +
 .../gatk/utils/locusiterator/LocusIterator.java    |    62 +
 .../utils/locusiterator/LocusIteratorByState.java  |   457 +
 .../locusiterator/PerSampleReadStateManager.java   |   261 +
 .../gatk/utils/locusiterator/ReadStateManager.java |   289 +
 .../utils/locusiterator/SamplePartitioner.java     |   172 +
 .../gatk/utils/nanoScheduler/EOFMarkedValue.java   |   105 +
 .../gatk/utils/nanoScheduler/InputProducer.java    |   217 +
 .../gatk/utils/nanoScheduler/MapResult.java        |    75 +
 .../gatk/utils/nanoScheduler/MapResultsQueue.java  |   116 +
 .../gatk/utils/nanoScheduler/NSMapFunction.java    |    44 +
 .../utils/nanoScheduler/NSProgressFunction.java    |    37 +
 .../gatk/utils/nanoScheduler/NSReduceFunction.java |    43 +
 .../gatk/utils/nanoScheduler/NanoScheduler.java    |   494 +
 .../gatk/utils/nanoScheduler/Reducer.java          |   169 +
 .../broadinstitute/gatk/utils/package-info.java    |     2 +-
 .../gatk/utils/pairhmm/BatchPairHMM.java           |    41 +
 .../gatk/utils/pairhmm/Log10PairHMM.java           |   220 +
 .../gatk/utils/pairhmm/N2MemoryPairHMM.java        |    98 +
 .../broadinstitute/gatk/utils/pairhmm/PairHMM.java |   390 +
 .../gatk/utils/pairhmm/PairHMMModel.java           |   435 +
 .../gatk/utils/pairhmm/PairHMMReadyHaplotypes.java |   182 +
 .../utils/pileup/MergingPileupElementIterator.java |    76 +
 .../gatk/utils/pileup/PileupElement.java           |   539 +
 .../gatk/utils/pileup/PileupElementFilter.java     |    36 +
 .../gatk/utils/pileup/PileupElementTracker.java    |   154 +
 .../gatk/utils/pileup/ReadBackedPileup.java        |   295 +
 .../gatk/utils/pileup/ReadBackedPileupImpl.java    |  1040 +
 .../org/broadinstitute/gatk/utils/pileup2/Notes    |     0
 .../gatk/utils/progressmeter/ProgressMeter.java    |   465 +
 .../utils/progressmeter/ProgressMeterDaemon.java   |   111 +
 .../utils/progressmeter/ProgressMeterData.java     |    79 +
 .../gatk/utils/recalibration/EventType.java        |    72 +
 .../gatk/utils/refdata/RODRecordListImpl.java      |   129 +
 .../gatk/utils/refdata/RefMetaDataTracker.java     |   497 +
 .../refdata/ReferenceDependentFeatureCodec.java    |    42 +
 .../gatk/utils/refdata/ReferenceOrderedDatum.java  |    66 +
 .../gatk/utils/refdata/SeekableRODIterator.java    |   412 +
 .../gatk/utils/refdata/VariantContextAdaptors.java |   265 +
 .../gatk/utils/refdata/package-info.java           |    26 +
 .../gatk/utils/refdata/tracks/FeatureManager.java  |   280 +
 .../utils/refdata/tracks/IndexDictionaryUtils.java |   114 +
 .../gatk/utils/refdata/tracks/RMDTrack.java        |   147 +
 .../gatk/utils/refdata/tracks/RMDTrackBuilder.java |   469 +
 .../utils/FeatureToGATKFeatureIterator.java        |    74 +
 .../utils/refdata/utils/FlashBackIterator.java     |   221 +
 .../gatk/utils/refdata/utils/GATKFeature.java      |   114 +
 .../utils/LocationAwareSeekableRODIterator.java    |    49 +
 .../gatk/utils/refdata/utils/RMDTriplet.java       |    92 +
 .../gatk/utils/refdata/utils/RODRecordList.java    |    45 +
 .../gatk/utils/report/GATKReport.java              |   376 +
 .../gatk/utils/report/GATKReportColumn.java        |   147 +
 .../gatk/utils/report/GATKReportColumnFormat.java  |    63 +
 .../gatk/utils/report/GATKReportDataType.java      |   236 +
 .../gatk/utils/report/GATKReportGatherer.java      |    62 +
 .../gatk/utils/report/GATKReportTable.java         |   779 +
 .../gatk/utils/report/GATKReportVersion.java       |   100 +
 .../gatk/utils/runtime/CapturedStreamOutput.java   |   134 +
 .../gatk/utils/runtime/InputStreamSettings.java    |   116 +
 .../gatk/utils/runtime/OutputStreamSettings.java   |   127 +
 .../gatk/utils/runtime/ProcessController.java      |   387 +
 .../gatk/utils/runtime/ProcessOutput.java          |    57 +
 .../gatk/utils/runtime/ProcessSettings.java        |   140 +
 .../gatk/utils/runtime/RuntimeUtils.java           |    77 +
 .../gatk/utils/runtime/StreamLocation.java         |    33 +
 .../gatk/utils/runtime/StreamOutput.java           |    69 +
 .../gatk/utils/sam/AlignmentStartComparator.java   |    50 +
 .../sam/AlignmentStartWithNoTiesComparator.java    |    73 +
 .../gatk/utils/sam/AlignmentUtils.java             |  1339 ++
 .../gatk/utils/sam/ArtificialBAMBuilder.java       |   242 +
 .../utils/sam/ArtificialGATKSAMFileWriter.java     |   129 +
 .../utils/sam/ArtificialMultiSampleReadStream.java |    87 +
 .../utils/sam/ArtificialPatternedSAMIterator.java  |   172 +
 .../gatk/utils/sam/ArtificialSAMFileReader.java    |   155 +
 .../gatk/utils/sam/ArtificialSAMIterator.java      |   212 +
 .../gatk/utils/sam/ArtificialSAMQueryIterator.java |   259 +
 .../gatk/utils/sam/ArtificialSAMUtils.java         |   484 +
 .../sam/ArtificialSingleSampleReadStream.java      |   213 +
 .../ArtificialSingleSampleReadStreamAnalyzer.java  |   282 +
 .../broadinstitute/gatk/utils/sam/CigarUtils.java  |   273 +
 .../gatk/utils/sam/GATKSAMFileWriter.java          |    56 +
 .../gatk/utils/sam/GATKSAMReadGroupRecord.java     |   116 +
 .../gatk/utils/sam/GATKSAMRecord.java              |   623 +
 .../gatk/utils/sam/GATKSAMRecordIterator.java      |    64 +
 .../ReadUnclippedStartWithNoTiesComparator.java    |    73 +
 .../broadinstitute/gatk/utils/sam/ReadUtils.java   |   957 +
 .../gatk/utils/sam/SAMReaderBuilder.java           |   102 +
 .../broadinstitute/gatk/utils/sam/SAMReaderID.java |   134 +
 .../gatk/utils/sam/SimplifyingSAMFileWriter.java   |    86 +
 .../gatk/utils/sam/package-info.java               |    26 +
 .../GlobalEdgeGreedySWPairwiseAlignment.java       |   208 +
 .../gatk/utils/smithwaterman/Parameters.java       |    62 +
 .../utils/smithwaterman/SWPairwiseAlignment.java   |   599 +
 .../smithwaterman/SWPairwiseAlignmentMain.java     |   221 +
 .../gatk/utils/smithwaterman/SWParameterSet.java   |    51 +
 .../gatk/utils/smithwaterman/SmithWaterman.java    |    57 +
 .../gatk/utils/text/ListFileUtils.java             |   344 +
 .../gatk/utils/text/TextFormattingUtils.java       |   182 +
 .../broadinstitute/gatk/utils/text/XReadLines.java |   208 +
 .../EfficiencyMonitoringThreadFactory.java         |   160 +
 .../gatk/utils/threading/NamedThreadFactory.java   |    51 +
 .../utils/threading/ThreadEfficiencyMonitor.java   |   232 +
 .../gatk/utils/threading/ThreadLocalArray.java     |    65 +
 .../gatk/utils/threading/ThreadPoolMonitor.java    |    77 +
 .../gatk/utils/threading/package-info.java         |    26 +
 .../utils/variant/ChromosomeCountConstants.java    |    44 +
 .../gatk/utils/variant/GATKVCFConstants.java       |   175 +
 .../gatk/utils/variant/GATKVCFHeaderLines.java     |   200 +
 .../gatk/utils/variant/GATKVCFIndexType.java       |    39 +
 .../utils/variant/GATKVariantContextUtils.java     |  2124 ++
 .../gatk/utils/variant/HomoSapiensConstants.java   |    51 +
 .../gatk/utils/variant/VCIterable.java             |    92 +
 .../gatk/utils/wiggle/WiggleHeader.java            |    56 +
 .../gatk/utils/wiggle/WiggleWriter.java            |   117 +
 .../htsjdk/samtools/GATKBAMFileSpanUnitTest.java   |   254 +
 .../java/htsjdk/samtools/GATKChunkUnitTest.java    |    71 +
 .../gatk/utils/AutoFormattingTimeUnitTest.java     |   118 +
 .../org/broadinstitute/gatk/utils/BaseTest.java    |   564 +
 .../gatk/utils/BaseUtilsUnitTest.java              |   177 +
 .../gatk/utils/BitSetUtilsUnitTest.java            |    84 +
 .../gatk/utils/ExampleToCopyUnitTest.java          |   239 +
 .../gatk/utils/GATKTextReporter.java               |    41 +
 .../gatk/utils/GenomeLocParserBenchmark.java       |    81 +
 .../gatk/utils/GenomeLocParserUnitTest.java        |   509 +
 .../gatk/utils/GenomeLocSortedSetUnitTest.java     |   405 +
 .../gatk/utils/GenomeLocUnitTest.java              |   386 +
 .../java/org/broadinstitute/gatk/utils/MD5DB.java  |   312 +
 .../org/broadinstitute/gatk/utils/MD5Mismatch.java |    67 +
 .../MRUCachingSAMSequencingDictionaryUnitTest.java |    97 +
 .../org/broadinstitute/gatk/utils/MWUnitTest.java  |   131 +
 .../gatk/utils/MathUtilsUnitTest.java              |   913 +
 .../broadinstitute/gatk/utils/MedianUnitTest.java  |   115 +
 .../gatk/utils/NGSPlatformUnitTest.java            |   167 +
 .../gatk/utils/PathUtilsUnitTest.java              |    65 +
 .../gatk/utils/QualityUtilsUnitTest.java           |   189 +
 .../gatk/utils/R/RScriptExecutorUnitTest.java      |   110 +
 .../gatk/utils/R/RScriptLibraryUnitTest.java       |    47 +
 .../gatk/utils/R/RUtilsUnitTest.java               |    65 +
 .../utils/SequenceDictionaryUtilsUnitTest.java     |   239 +
 .../gatk/utils/SimpleTimerUnitTest.java            |   179 +
 .../gatk/utils/TestNGTestTransformer.java          |    62 +
 .../broadinstitute/gatk/utils/UtilsUnitTest.java   |   362 +
 .../utils/activeregion/ActiveRegionUnitTest.java   |   395 +
 .../activeregion/ActivityProfileStateUnitTest.java |    92 +
 .../activeregion/ActivityProfileUnitTest.java      |   491 +
 .../BandPassActivityProfileUnitTest.java           |   339 +
 .../broadinstitute/gatk/utils/baq/BAQUnitTest.java |   257 +
 .../gatk/utils/classloader/JVMUtilsUnitTest.java   |    75 +
 .../gatk/utils/clipping/ReadClipperTestUtils.java  |   162 +
 .../gatk/utils/clipping/ReadClipperUnitTest.java   |   421 +
 .../utils/codecs/beagle/BeagleCodecUnitTest.java   |    42 +
 .../gatk/utils/codecs/hapmap/HapMapUnitTest.java   |   174 +
 .../utils/codecs/refseq/RefSeqCodecUnitTest.java   |    42 +
 .../codecs/sampileup/SAMPileupCodecUnitTest.java   |    42 +
 .../utils/codecs/samread/SAMReadCodecUnitTest.java |    42 +
 .../utils/codecs/table/TableCodecUnitTest.java     |    42 +
 .../utils/collections/DefaultHashMapUnitTest.java  |   159 +
 .../collections/ExpandingArrayListUnitTest.java    |   177 +
 .../commandline/ArgumentMatchSiteUnitTest.java     |    80 +
 .../commandline/ArgumentMatchSourceUnitTest.java   |    99 +
 .../utils/commandline/ParsingEngineUnitTest.java   |  1140 ++
 .../commandline/RodBindingCollectionUnitTest.java  |   133 +
 .../gatk/utils/commandline/RodBindingUnitTest.java |    82 +
 .../AlleleBiasedDownsamplingUtilsUnitTest.java     |   219 +
 .../CachingIndexedFastaSequenceFileUnitTest.java   |   264 +
 .../gatk/utils/file/FSLockWithSharedUnitTest.java  |    60 +
 .../utils/fragments/FragmentUtilsBenchmark.java    |    81 +
 .../utils/fragments/FragmentUtilsUnitTest.java     |   390 +
 .../gatk/utils/haplotype/EventMapUnitTest.java     |   203 +
 .../gatk/utils/haplotype/HaplotypeUnitTest.java    |   249 +
 .../gatk/utils/interval/IntervalUtilsUnitTest.java |  1114 ++
 .../gatk/utils/io/IOUtilsUnitTest.java             |   326 +
 .../gatk/utils/jna/clibrary/LibCUnitTest.java      |    70 +
 .../utils/jna/drmaa/v1_0/JnaSessionQueueTest.java  |   165 +
 .../utils/jna/drmaa/v1_0/LibDrmaaQueueTest.java    |   257 +
 .../gatk/utils/jna/lsf/v7_0_6/LibBatQueueTest.java |   162 +
 .../AlignmentStateMachineUnitTest.java             |   116 +
 .../gatk/utils/locusiterator/LIBS_position.java    |   155 +
 .../locusiterator/LocusIteratorBenchmark.java      |   142 +
 .../LocusIteratorByStateBaseTest.java              |   232 +
 .../LocusIteratorByStateUnitTest.java              |   743 +
 .../PerSampleReadStateManagerUnitTest.java         |   188 +
 .../utils/nanoScheduler/InputProducerUnitTest.java |    94 +
 .../utils/nanoScheduler/MapResultUnitTest.java     |    65 +
 .../utils/nanoScheduler/NanoSchedulerUnitTest.java |   343 +
 .../gatk/utils/nanoScheduler/ReducerUnitTest.java  |   236 +
 .../gatk/utils/pileup/PileupElementUnitTest.java   |   189 +
 .../utils/pileup/ReadBackedPileupUnitTest.java     |   328 +
 .../progressmeter/ProgressMeterDaemonUnitTest.java |   121 +
 .../progressmeter/ProgressMeterDataUnitTest.java   |    86 +
 .../utils/recalibration/EventTypeUnitTest.java     |    61 +
 .../utils/refdata/RefMetaDataTrackerUnitTest.java  |   290 +
 .../refdata/tracks/FeatureManagerUnitTest.java     |   163 +
 .../refdata/tracks/RMDTrackBuilderUnitTest.java    |   190 +
 .../utils/CheckableCloseableTribbleIterator.java   |    90 +
 .../FeatureToGATKFeatureIteratorUnitTest.java      |    61 +
 .../refdata/utils/FlashBackIteratorUnitTest.java   |   369 +
 .../utils/refdata/utils/TestFeatureReader.java     |    53 +
 .../utils/refdata/utils/TestRMDTrackBuilder.java   |    70 +
 .../gatk/utils/report/GATKReportUnitTest.java      |   289 +
 .../utils/report/ReportMarshallerUnitTest.java     |    64 +
 .../utils/runtime/ProcessControllerUnitTest.java   |   518 +
 .../gatk/utils/runtime/RuntimeUtilsUnitTest.java   |    42 +
 .../gatk/utils/sam/AlignmentUtilsUnitTest.java     |  1045 +
 .../utils/sam/ArtificialBAMBuilderUnitTest.java    |   121 +
 .../ArtificialPatternedSAMIteratorUnitTest.java    |   122 +
 .../utils/sam/ArtificialSAMFileWriterUnitTest.java |   120 +
 .../sam/ArtificialSAMQueryIteratorUnitTest.java    |   138 +
 .../gatk/utils/sam/ArtificialSAMUtilsUnitTest.java |   108 +
 .../ArtificialSingleSampleReadStreamUnitTest.java  |   186 +
 .../gatk/utils/sam/GATKSAMRecordUnitTest.java      |    78 +
 .../gatk/utils/sam/ReadUtilsUnitTest.java          |   339 +
 .../smithwaterman/SmithWatermanBenchmark.java      |    87 +
 .../gatk/utils/text/ListFileUtilsUnitTest.java     |   154 +
 .../utils/text/TextFormattingUtilsUnitTest.java    |    89 +
 .../EfficiencyMonitoringThreadFactoryUnitTest.java |   189 +
 .../utils/threading/ThreadPoolMonitorUnitTest.java |    64 +
 .../variant/GATKVariantContextUtilsUnitTest.java   |  1665 ++
 .../utils/variant/VariantContextBenchmark.java     |   377 +
 .../gatk-utils/src/test/resources/exampleBAM.bam   |   Bin 0 -> 3609 bytes
 .../src/test/resources/exampleBAM.bam.bai          |   Bin 0 -> 232 bytes
 .../src/test/resources/exampleBAM.simple.bai       |   Bin 0 -> 232 bytes
 .../src/test/resources/exampleBAM.simple.bam       |   Bin 0 -> 3602 bytes
 .../test/resources/exampleBAM_with_unmapped.bai    |   Bin 0 -> 232 bytes
 .../test/resources/exampleBAM_with_unmapped.bam    |   Bin 0 -> 3575 bytes
 .../test/resources/exampleCRAM-nobai-nocrai.cram   |   Bin 0 -> 13353 bytes
 .../test/resources/exampleCRAM-nobai-withcrai.cram |   Bin 0 -> 13353 bytes
 .../resources/exampleCRAM-nobai-withcrai.cram.crai |   Bin 0 -> 47 bytes
 .../gatk-utils/src/test/resources/exampleCRAM.cram |   Bin 0 -> 5281 bytes
 .../src/test/resources/exampleCRAM.cram.bai        |   Bin 0 -> 136 bytes
 .../src/test/resources/exampleCRAM.cram.crai       |   Bin 0 -> 44 bytes
 .../src/test/resources/exampleDBSNP.vcf            |     0
 .../src/test/resources/exampleDBSNP.vcf.idx        |   Bin
 .../src/test/resources/exampleFASTA-3contigs.fasta |     0
 .../src/test/resources/exampleFASTA-combined.fasta |     0
 .../src/test/resources/exampleFASTA-windows.fasta  |     0
 .../src/test/resources/exampleFASTA.dict           |     0
 .../src/test/resources/exampleFASTA.fasta          |     0
 .../src/test/resources/exampleFASTA.fasta.amb      |     0
 .../src/test/resources/exampleFASTA.fasta.ann      |     0
 .../src/test/resources/exampleFASTA.fasta.bwt      |   Bin
 .../src/test/resources/exampleFASTA.fasta.fai      |     0
 .../src/test/resources/exampleFASTA.fasta.pac      |   Bin
 .../src/test/resources/exampleFASTA.fasta.rbwt     |   Bin
 .../src/test/resources/exampleFASTA.fasta.rpac     |   Bin
 .../src/test/resources/exampleFASTA.fasta.rsa      |   Bin
 .../src/test/resources/exampleFASTA.fasta.sa       |   Bin
 .../src/test/resources/exampleGATKReport.eval      |     0
 .../src/test/resources/exampleGATKReportv1.tbl     |     0
 .../src/test/resources/exampleGATKReportv2.tbl     |     0
 .../src/test/resources/exampleGRP.grp              |     0
 .../src/test/resources/exampleINTERVAL.intervals   |     0
 .../gatk-utils/src/test/resources/exampleNORG.bam  |   Bin 0 -> 3612 bytes
 .../src/test/resources/exampleNORG.bam.bai         |   Bin 0 -> 232 bytes
 .../test/resources/forAlleleFractionSimulation.vcf |     0
 .../resources/forAlleleFractionSimulation.vcf.idx  |   Bin
 .../src/test/resources/forLongInsert.vcf           |     0
 .../src/test/resources/forLongInsert.vcf.idx       |   Bin
 .../src/test/resources/forSimulation.vcf           |     0
 .../src/test/resources/forSimulation.vcf.idx       |   Bin
 .../src/test/resources/testProperties.properties   |     0
 .../src/test/resources/testfile.sam                |     0
 public/gsalib/pom.xml                              |    10 +-
 public/gsalib/src/R/DESCRIPTION                    |    15 +-
 public/gsalib/src/R/NAMESPACE                      |     3 +-
 public/gsalib/src/R/R/gsa.error.R                  |    12 -
 public/gsalib/src/R/R/gsa.getargs.R                |   116 -
 public/gsalib/src/R/R/gsa.message.R                |     3 -
 public/gsalib/src/R/R/gsa.plot.venn.R              |    50 -
 public/gsalib/src/R/R/gsa.read.eval.R              |    83 -
 public/gsalib/src/R/R/gsa.read.gatkreport.R        |     2 +-
 public/gsalib/src/R/R/gsa.read.squidmetrics.R      |    28 -
 public/gsalib/src/R/R/gsa.read.vcf.R               |    23 -
 .../gsalib/src/R/R/gsa.reshape.concordance.table.R |    20 +
 public/gsalib/src/R/R/gsa.variantqc.utils.R        |   246 -
 public/gsalib/src/R/R/gsa.warn.R                   |     3 -
 public/gsalib/src/R/Read-and-delete-me             |     9 -
 .../src/R/inst/extdata/test_gatkreport.table       |    20 +
 .../src/R/inst/extdata/test_genconcord.table       |    30 +
 public/gsalib/src/R/man/gsa.error.Rd               |    49 -
 public/gsalib/src/R/man/gsa.getargs.Rd             |    57 -
 public/gsalib/src/R/man/gsa.message.Rd             |    44 -
 public/gsalib/src/R/man/gsa.plot.venn.Rd           |    75 -
 public/gsalib/src/R/man/gsa.read.eval.Rd           |   111 -
 public/gsalib/src/R/man/gsa.read.gatkreport.Rd     |    37 +-
 public/gsalib/src/R/man/gsa.read.gatkreportv0.Rd   |    26 +
 public/gsalib/src/R/man/gsa.read.gatkreportv1.Rd   |    26 +
 public/gsalib/src/R/man/gsa.read.squidmetrics.Rd   |    48 -
 public/gsalib/src/R/man/gsa.read.vcf.Rd            |    53 -
 .../src/R/man/gsa.reshape.concordance.table.Rd     |    48 +
 public/gsalib/src/R/man/gsa.warn.Rd                |    46 -
 public/gsalib/src/R/man/gsalib-internal.Rd         |     7 +
 public/gsalib/src/R/man/gsalib-package.Rd          |    70 +-
 public/gsalib/src/R/man/test_gatkreport.table.Rd   |    13 +
 public/gsalib/src/R/man/test_genconcord.table.Rd   |    13 +
 public/package-tests/pom.xml                       |    12 +-
 public/perl/liftOverVCF.pl                         |    83 -
 public/perl/sortByRef.pl                           |   127 -
 public/pom.xml                                     |     2 +-
 .../cofoja/cofoja/1.0-r139/cofoja-1.0-r139.jar     |   Bin 378205 -> 0 bytes
 .../cofoja/cofoja/1.0-r139/cofoja-1.0-r139.pom     |     9 -
 .../cofoja/1.2-20140817/cofoja-1.2-20140817.jar    |   Bin 0 -> 216297 bytes
 .../cofoja/1.2-20140817/cofoja-1.2-20140817.pom    |    89 +
 .../picard/picard/1.120.1579/picard-1.120.1579.jar |   Bin 1380569 -> 0 bytes
 .../picard/picard/1.120.1579/picard-1.120.1579.pom |    34 -
 .../htsjdk/1.120.1620/htsjdk-1.120.1620.jar        |   Bin 2185840 -> 0 bytes
 .../htsjdk/1.120.1620/htsjdk-1.120.1620.pom        |    27 -
 public/src/main/assembly/binary-dist.xml           |    22 +
 .../main/scripts/shell/check_utils_engine_tools.sh |    25 +
 settings/helpTemplates/generic.index.template.html |     2 +-
 settings/helpTemplates/generic.template.html       |    13 +-
 1916 files changed, 167568 insertions(+), 164559 deletions(-)

diff --git a/ant-bridge.sh b/ant-bridge.sh
index a2f6865..af94fa4 100755
--- a/ant-bridge.sh
+++ b/ant-bridge.sh
@@ -1,6 +1,7 @@
 #!/bin/sh
 
-mvn_args="verify"
+default_args="verify '-Ddisable.shadepackage'"
+mvn_args="${default_args}"
 mvn_properties=
 mvn_clean=
 unknown_args=
@@ -44,22 +45,23 @@ for arg in "${@}" ; do
         fi
 
     else
-        if [[ "${arg}" != "dist" && "${mvn_args}" != "" && "${mvn_args}" != "verify" ]] ; then
+        if [[ "${arg}" != "dist" && "${mvn_args}" != "" && "${mvn_args}" != "${default_args}" ]] ; then
             echo "Sorry, this script does not currently support mixing targets." >&2
             exit 1
 
         elif [[ "${arg}" == "dist" ]] ; then
-            mvn_args="verify"
+            mvn_args="${default_args}"
 
         elif [[ "${arg}" == "gatk" ]] ; then
-            mvn_args="verify '-P!queue'"
+            mvn_args="${default_args} '-P!queue'"
 
         elif [[ "${arg}" == "test.compile" ]] ; then
             mvn_args="test-compile"
 
         elif [[ "${arg}" == "gatkdocs" ]] ; then
             local_repo="sitetemprepo"
-            mvn_args="install -Dmaven.repo.local=${local_repo} -Ddisable.queue && mvn site -Dmaven.repo.local=${local_repo} -Ddisable.queue"
+            mvn_args="install -Dmaven.repo.local=${local_repo} '-P!queue' && mvn site -Dmaven.repo.local=${local_repo} '-P!queue'"
+            mvn_pkg_args=
 
         elif [[ "${arg}" == "package.gatk.full" ]] ; then
             mvn_args="package '-P!private,!queue'"
@@ -75,11 +77,11 @@ for arg in "${@}" ; do
 
 #        elif [[ "${arg}" == "release.gatk.full" ]] ; then
 #            mvn_args="package '-P!private,!queue'"
-#            post_script=" && private/src/main/scripts/shell/copy_release.sh public/gatk-package/target/GenomeAnalysisTK-*.tar.bz2"
+#            post_script=" && private/src/main/scripts/shell/copy_release.sh protected/gatk-package-distribution/target/GenomeAnalysisTK-*.tar.bz2"
 
 #        elif [[ "${arg}" == "release.queue.full" ]] ; then
 #            mvn_args="package '-P!private'"
-#            post_script=" && private/src/main/scripts/shell/copy_release.sh public/queue-package/target/Queue-*.tar.bz2"
+#            post_script=" && private/src/main/scripts/shell/copy_release.sh protected/gatk-queue-package-distribution/target/Queue-*.tar.bz2"
 
         elif [[ "${arg}" == "build-picard-private" ]] ; then
             mvn_args="mvn install -f private/picard-maven/pom.xml"
@@ -113,7 +115,7 @@ for arg in "${@}" ; do
             mvn_args="${mvn_args} -Dgatk.queuetests.run=true"
 
         elif [[ "${arg}" == "committests" ]] ; then
-            mvn_args="verify -Dgatk.committests.skipped=false"
+            mvn_args="${default_args} -Dgatk.committests.skipped=false"
 
         elif [[ "${arg}" == "test" ]] ; then
             mvn_args="test -Dgatk.unittests.skipped=false"
@@ -122,19 +124,19 @@ for arg in "${@}" ; do
             mvn_args="test -Dgatk.unittests.skipped=false"
 
         elif [[ "${arg}" == "integrationtest" ]] ; then
-            mvn_args="verify -Dgatk.integrationtests.skipped=false"
+            mvn_args="${default_args} -Dgatk.integrationtests.skipped=false"
 
         elif [[ "${arg}" == "largescaletest" ]] ; then
-            mvn_args="verify -Dgatk.largescaletests.skipped=false"
+            mvn_args="${default_args} -Dgatk.largescaletests.skipped=false"
 
         elif [[ "${arg}" == "knowledgebasetest" ]] ; then
-            mvn_args="verify -Dgatk.knowledgebasetests.skipped=false"
+            mvn_args="${default_args} -Dgatk.knowledgebasetests.skipped=false"
 
         elif [[ "${arg}" == "queuetest" ]] ; then
-            mvn_args="verify -Dgatk.queuetests.skipped=false"
+            mvn_args="${default_args} -Dgatk.queuetests.skipped=false"
 
         elif [[ "${arg}" == "queuetestrun" ]] ; then
-            mvn_args="verify -Dgatk.queuetests.skipped=false -Dgatk.queuetests.run=true"
+            mvn_args="${default_args} -Dgatk.queuetests.skipped=false -Dgatk.queuetests.run=true"
 
         elif [[ "${arg}" == "fasttest" ]] ; then
             mvn_args="verify -Dgatk.committests.skipped=false -pl private/gatk-tools-private -am -Dresource.bundle.skip=true"
diff --git a/licensing/private_license.txt b/licensing/private_license.txt
index a9d3904..bbb8dcf 100644
--- a/licensing/private_license.txt
+++ b/licensing/private_license.txt
@@ -24,7 +24,7 @@ LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic
 
 4. OWNERSHIP OF INTELLECTUAL PROPERTY
 LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
-Copyright 2012-2014 Broad Institute, Inc.
+Copyright 2012-2015 Broad Institute, Inc.
 Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
 LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
 
diff --git a/licensing/protected_license.txt b/licensing/protected_license.txt
index a9d3904..bbb8dcf 100644
--- a/licensing/protected_license.txt
+++ b/licensing/protected_license.txt
@@ -24,7 +24,7 @@ LICENSEE expressly acknowledges that the PROGRAM contains an embedded automatic
 
 4. OWNERSHIP OF INTELLECTUAL PROPERTY
 LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
-Copyright 2012-2014 Broad Institute, Inc.
+Copyright 2012-2015 Broad Institute, Inc.
 Notice of attribution: The GATK3 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
 LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
 
diff --git a/licensing/public_license.txt b/licensing/public_license.txt
index 648ec8f..c53c5b3 100644
--- a/licensing/public_license.txt
+++ b/licensing/public_license.txt
@@ -1,4 +1,4 @@
-Copyright (c) 2012 The Broad Institute
+Copyright 2012-2015 Broad Institute, Inc.
 
 Permission is hereby granted, free of charge, to any person
 obtaining a copy of this software and associated documentation
diff --git a/pom.xml b/pom.xml
index 8488cf8..afd3bc2 100644
--- a/pom.xml
+++ b/pom.xml
@@ -13,7 +13,7 @@
     <parent>
         <groupId>org.broadinstitute.gatk</groupId>
         <artifactId>gatk-root</artifactId>
-        <version>3.3</version>
+        <version>3.5</version>
         <relativePath>public/gatk-root</relativePath>
     </parent>
 
@@ -32,11 +32,15 @@
         <resource.bundle.skip>false</resource.bundle.skip>
         <!-- TODO: Need a better a way to say "don't include hidden" by default -->
         <gatkdocs.include.hidden>-build-timestamp "${maven.build.timestamp}"</gatkdocs.include.hidden>
+        <gatk.shell.directory>${gatk.basedir}/public/src/main/scripts/shell</gatk.shell.directory>
+        <gatk.assembly.directory>${gatk.basedir}/public/src/main/assembly</gatk.assembly.directory>
 
         <!--
         Phases of the build that may be disabled to speed up compilation.
         -->
         <gatk.jar.phase>package</gatk.jar.phase>
+        <gatk.unpack.phase>prepare-package</gatk.unpack.phase>
+        <gatk.shade.phase>package</gatk.shade.phase>
         <gatk.generate-resources.phase>generate-resources</gatk.generate-resources.phase>
         <gatk.process-resources.phase>process-resources</gatk.process-resources.phase>
         <gatk.process-test-resources.phase>process-test-resources</gatk.process-test-resources.phase>
@@ -65,6 +69,16 @@
         <gatk.serialqueuetests.skipped>${gatk.serialcommittests.skipped}</gatk.serialqueuetests.skipped>
         <gatk.seriallargescaletests.skipped>true</gatk.seriallargescaletests.skipped>
         <gatk.serialknowledgebasetests.skipped>true</gatk.serialknowledgebasetests.skipped>
+
+        <!-- Full path to write the executable MANIFEST.MF only jars, 10 seconds to create at 4KB each, and the accompanying lib directory -->
+        <gatk.executable.directory>${gatk.basedir}/target/executable</gatk.executable.directory>
+        <!-- Full path to write (symlinks to) the full shaded package jars, 1+ minute to create at 12MB+ each -->
+        <gatk.package.directory>${gatk.basedir}/target/package</gatk.package.directory>
+        <!--
+        Full path to write symlink to either an executable MANIFEST.MF only jar - OR - a fully shaded package jar.
+        NOTE: MANIFEST.MF only jars MUST be accompanied by the lib folder, or they will not run.
+        -->
+        <gatk.shortcut.directory>${gatk.basedir}/target</gatk.shortcut.directory>
     </properties>
 
     <dependencies>
@@ -138,9 +152,20 @@
                                 <excludes>${resource.bundle.path}</excludes>
                             </configuration>
                         </execution>
+                        <execution>
+                            <id>executable-jar-lib</id>
+                            <goals>
+                                <goal>copy-dependencies</goal>
+                            </goals>
+                            <phase>none</phase>
+                            <configuration>
+                                <outputDirectory>${gatk.executable.directory}/lib</outputDirectory>
+                                <includeScope>runtime</includeScope>
+                                <useBaseVersion>false</useBaseVersion>
+                            </configuration>
+                        </execution>
                     </executions>
                 </plugin>
-                <!-- TODO: Change the ResourceBundleExtractorDoclet to not require log4j.properties file -->
                 <plugin>
                     <groupId>org.apache.maven.plugins</groupId>
                     <artifactId>maven-resources-plugin</artifactId>
@@ -159,25 +184,6 @@
                             </goals>
                             <phase>${gatk.process-test-resources.phase}</phase>
                         </execution>
-                        <execution>
-                            <id>copy-resource-bundle-log4j</id>
-                            <goals>
-                                <goal>copy-resources</goal>
-                            </goals>
-                            <phase>none</phase>
-                            <configuration>
-                                <!--
-                                Just before running the resource bundle generation, copy a simple log4j
-                                config file to the apidocs execution directory, so that logging prints out.
-                                -->
-                                <outputDirectory>${project.reporting.outputDirectory}/apidocs</outputDirectory>
-                                <resources>
-                                    <resource>
-                                        <directory>${gatk.basedir}/gatk-utils/src/main/config/org/broadinstitute/gatk/utils/help</directory>
-                                    </resource>
-                                </resources>
-                            </configuration>
-                        </execution>
                     </executions>
                 </plugin>
                 <plugin>
@@ -198,8 +204,7 @@
                                 <docletPath>${project.build.outputDirectory}</docletPath>
                                 <docletArtifact>
                                     <groupId>${project.groupId}</groupId>
-                                    <!-- TODO: THIS IS SUPPOSED TO BE GATK-UTILS! -->
-                                    <artifactId>gatk-tools-public</artifactId>
+                                    <artifactId>${project.artifactId}</artifactId>
                                     <version>${project.version}</version>
                                 </docletArtifact>
                                 <maxmemory>2g</maxmemory>
@@ -320,9 +325,39 @@
                     <artifactId>maven-jar-plugin</artifactId>
                     <executions>
                         <execution>
+                            <id>executable-jar</id>
+                            <goals>
+                                <goal>jar</goal>
+                            </goals>
+                            <phase>none</phase>
+                            <configuration>
+                                <classesDirectory>${project.build.outputDirectory}/ignored_by_executable_jar</classesDirectory>
+                                <outputDirectory>${gatk.executable.directory}</outputDirectory>
+                                <finalName>${gatk.binary-dist.name}</finalName>
+                                <archive>
+                                    <manifest>
+                                        <mainClass>${app.main.class}</mainClass>
+                                        <addClasspath>true</addClasspath>
+                                        <classpathPrefix>lib/</classpathPrefix>
+                                    </manifest>
+                                </archive>
+                            </configuration>
+                        </execution>
+                        <execution>
                             <id>default-jar</id>
                             <phase>${gatk.jar.phase}</phase>
                         </execution>
+                        <!--
+                        Maven keeps executing default-jar first, even if it's listed AFTER the executable-jar.
+                        So while packaging: run executable-jar, disable default-jar, then run unshaded-default-jar.
+                        -->
+                        <execution>
+                            <id>unshaded-default-jar</id>
+                            <goals>
+                                <goal>jar</goal>
+                            </goals>
+                            <phase>none</phase>
+                        </execution>
                         <execution>
                             <id>test-jar</id>
                             <goals>
@@ -341,13 +376,14 @@
                     <artifactId>maven-shade-plugin</artifactId>
                     <executions>
                         <execution>
-                            <id>gatk-executable</id>
+                            <id>package-jar</id>
                             <goals>
                                 <goal>shade</goal>
                             </goals>
                             <phase>none</phase>
                             <configuration>
                                 <minimizeJar>true</minimizeJar>
+                                <createDependencyReducedPom>false</createDependencyReducedPom>
                                 <artifactSet>
                                     <excludes>
                                         <exclude>org.broadinstitute.gatk:gsalib:tar.gz:*</exclude>
@@ -405,7 +441,7 @@
                             <phase>none</phase>
                             <configuration>
                                 <descriptors>
-                                    <descriptor>src/main/assembly/binary-dist.xml</descriptor>
+                                    <descriptor>${gatk.assembly.directory}/binary-dist.xml</descriptor>
                                 </descriptors>
                             </configuration>
                         </execution>
@@ -437,7 +473,22 @@
                             </configuration>
                         </execution>
                         <execution>
-                            <id>link-binary-jar</id>
+                            <id>link-executable-jar</id>
+                            <goals>
+                                <goal>link</goal>
+                            </goals>
+                            <phase>none</phase>
+                            <configuration>
+                                <links>
+                                    <link>
+                                        <dst>${gatk.shortcut.directory}/${gatk.binary-dist.name}.${project.packaging}</dst>
+                                        <src>${gatk.executable.directory}/${gatk.binary-dist.name}.${project.packaging}</src>
+                                    </link>
+                                </links>
+                            </configuration>
+                        </execution>
+                        <execution>
+                            <id>link-package-jar</id>
                             <goals>
                                 <goal>link</goal>
                             </goals>
@@ -445,7 +496,11 @@
                             <configuration>
                                 <links>
                                     <link>
-                                        <dst>${gatk.basedir}/target/${gatk.binary-dist.name}.${project.packaging}</dst>
+                                        <dst>${gatk.package.directory}/${gatk.binary-dist.name}.${project.packaging}</dst>
+                                        <src>${project.build.directory}/${project.build.finalName}.${project.packaging}</src>
+                                    </link>
+                                    <link>
+                                        <dst>${gatk.shortcut.directory}/${gatk.binary-dist.name}.${project.packaging}</dst>
                                         <src>${project.build.directory}/${project.build.finalName}.${project.packaging}</src>
                                     </link>
                                 </links>
@@ -626,6 +681,21 @@
                 <executions>
                     <execution>
                         <!--
+                        TODO: Separate maven modules into separate git repos?
+                        Until then, keep devs from accidentally mixing utils/engine/tools.
+                        -->
+                        <id>check-utils-engine-tools</id>
+                        <goals>
+                            <goal>exec</goal>
+                        </goals>
+                        <phase>process-sources</phase>
+                        <inherited>false</inherited>
+                        <configuration>
+                            <executable>${gatk.shell.directory}/check_utils_engine_tools.sh</executable>
+                        </configuration>
+                    </execution>
+                    <execution>
+                        <!--
                         TODO: Remove after 3.3+ release.
                         Until then, will clean out symbolic links from users who download public/protected.
                         Perhaps leave the script even longer.
@@ -637,7 +707,7 @@
                         <phase>process-test-resources</phase>
                         <inherited>false</inherited>
                         <configuration>
-                            <executable>${gatk.basedir}/public/src/main/scripts/shell/delete_maven_links.sh</executable>
+                            <executable>${gatk.shell.directory}/delete_maven_links.sh</executable>
                         </configuration>
                     </execution>
                 </executions>
@@ -689,7 +759,7 @@
                         <!-- Only generate the GATK Docs across the parent aggregation, not the children too. -->
                         <inherited>false</inherited>
                         <configuration>
-                            <doclet>org.broadinstitute.gatk.utils.help.GATKDoclet</doclet>
+                            <doclet>org.broadinstitute.gatk.tools.walkers.help.WalkerDoclet</doclet>
                             <docletArtifact>
                                 <groupId>${project.groupId}</groupId>
                                 <artifactId>gatk-package-distribution</artifactId>
@@ -733,6 +803,26 @@
             </modules>
         </profile>
 
+        <!-- Optionally do not shade/package jars -->
+        <!--
+        NOTE: Profile id "fast" comes from comments in PR #771.
+        The name is meant to be memorable, but is highly non-specific. Users are forewarned that
+        behavior of this profile, or the identifier itself, may be heavily modified in the future.
+        Hardcode usage in non-VCS controlled scripts at your own risk.
+        -->
+        <profile>
+            <id>fast</id>
+            <activation>
+                <property>
+                    <name>disable.shadepackage</name>
+                </property>
+            </activation>
+            <properties>
+                <gatk.unpack.phase>none</gatk.unpack.phase>
+                <gatk.shade.phase>none</gatk.shade.phase>
+            </properties>
+        </profile>
+
         <!-- Collection of properties for use during package testing -->
         <profile>
             <id>packagetests-enabled</id>
@@ -746,6 +836,8 @@
                 <maven.javadoc.skip>true</maven.javadoc.skip>
                 <gatk.generate-gatk-extensions.skipped>true</gatk.generate-gatk-extensions.skipped>
                 <gatk.jar.phase>none</gatk.jar.phase>
+                <gatk.unpack.phase>none</gatk.unpack.phase>
+                <gatk.shade.phase>none</gatk.shade.phase>
                 <gatk.generate-resources.phase>none</gatk.generate-resources.phase>
                 <gatk.process-resources.phase>none</gatk.process-resources.phase>
                 <gatk.process-test-resources.phase>none</gatk.process-test-resources.phase>
diff --git a/public/VectorPairHMM/pom.xml b/public/VectorPairHMM/pom.xml
index ca08087..b2d2319 100644
--- a/public/VectorPairHMM/pom.xml
+++ b/public/VectorPairHMM/pom.xml
@@ -5,7 +5,7 @@
     <parent>
         <groupId>org.broadinstitute.gatk</groupId>
         <artifactId>gatk-root</artifactId>
-        <version>3.3</version>
+        <version>3.5</version>
         <relativePath>../../public/gatk-root</relativePath>
     </parent>
 
diff --git a/public/VectorPairHMM/src/main/c++/Sandbox.java b/public/VectorPairHMM/src/main/c++/Sandbox.java
index 605c5f5..99c91d2 100644
--- a/public/VectorPairHMM/src/main/c++/Sandbox.java
+++ b/public/VectorPairHMM/src/main/c++/Sandbox.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -69,7 +69,7 @@ public class Sandbox {
      * change per JVM session
      * @param readDataHolderClass class type of JNIReadDataHolderClass
      * @param haplotypeDataHolderClass class type of JNIHaplotypeDataHolderClass
-     * @param mask mask is a 64 bit integer identical to the one received from jniGetMachineType(). Users can disable usage of some hardware features by zeroing some bits in the mask
+     * @param mask 64 bit integer identical to the one received from jniGetMachineType(). Users can disable usage of some hardware features by zeroing bits in the mask
      * */
     private native void jniInitializeClassFieldsAndMachineMask(Class<?> readDataHolderClass, Class<?> haplotypeDataHolderClass, long mask);
 
@@ -156,7 +156,7 @@ public class Sandbox {
     public native void jniClose();
     public void close()
     {
-        System.out.println("Time spent in setup for JNI call : "+(setupTime*1e-9)+" compute time : "+(computeTime*1e-9));
+        System.err.println("Time spent in setup for JNI call : " + (setupTime * 1e-9) + " compute time : " + (computeTime * 1e-9));
         jniClose();
     }
 
@@ -170,8 +170,8 @@ public class Sandbox {
       }
       catch(FileNotFoundException e)
       {
-        System.err.println("File "+filename+" cannot be found/read");
-        return;
+          System.err.println("File "+filename + " cannot be found/read");
+          return;
       }
       int idx = 0;
       int numReads = 0;
diff --git a/public/VectorPairHMM/src/main/c++/utils.cc b/public/VectorPairHMM/src/main/c++/utils.cc
index 3b0ce35..89bd975 100644
--- a/public/VectorPairHMM/src/main/c++/utils.cc
+++ b/public/VectorPairHMM/src/main/c++/utils.cc
@@ -154,20 +154,20 @@ void initialize_function_pointers(uint64_t mask)
   //mask = (1 << SSE41_CUSTOM_IDX);
   if(is_avx_supported() && (mask & (1<< AVX_CUSTOM_IDX)))
   {
-    cout << "Using AVX accelerated implementation of PairHMM\n";
+    cerr << "Using AVX accelerated implementation of PairHMM\n";
     g_compute_full_prob_float = compute_full_prob_avxs<float>;
     g_compute_full_prob_double = compute_full_prob_avxd<double>;
   }
   else
     if(is_sse41_supported() && (mask & ((1<< SSE41_CUSTOM_IDX) | (1<<SSE42_CUSTOM_IDX))))
     {
-      cout << "Using SSE4.1 accelerated implementation of PairHMM\n";
+      cerr << "Using SSE4.1 accelerated implementation of PairHMM\n";
       g_compute_full_prob_float = compute_full_prob_sses<float>;
       g_compute_full_prob_double = compute_full_prob_ssed<double>;
     }
     else
     {
-      cout << "Using un-vectorized C++ implementation of PairHMM\n";
+      cerr << "Using un-vectorized C++ implementation of PairHMM\n";
       g_compute_full_prob_float = compute_full_prob<float>;
       g_compute_full_prob_double = compute_full_prob<double>;
     }
@@ -300,16 +300,16 @@ void tokenize(std::ifstream& fptr, std::vector<std::string>& tokens)
       {
 	myVec.push_back(tmp);
 	++i;
-	//std::cout <<tmp <<"#";
+	//std::cerr <<tmp <<"#";
       }
       tmp = "";
     }
-    //std::cout << "\n";
+    //std::cerr << "\n";
     if(myVec.size() > 0)
       break;
   }
   tokens.clear();
-  //std::cout << "Why "<<myVec.size()<<"\n";
+  //std::cerr << "Why "<<myVec.size()<<"\n";
   tokens.resize(myVec.size());
   for(i=0;i<(int)myVec.size();++i)
     tokens[i] = myVec[i];
@@ -334,7 +334,7 @@ int read_mod_testcase(ifstream& fptr, testcase* tc, bool reformat)
   tc->i = new char[tc->rslen];
   tc->d = new char[tc->rslen];
   tc->c = new char[tc->rslen];
-  //cout << "Lengths "<<tc->haplen <<" "<<tc->rslen<<"\n";
+  //cerr << "Lengths "<<tc->haplen <<" "<<tc->rslen<<"\n";
   memcpy(tc->rs, tokens[1].c_str(),tokens[1].size());
   assert(tokens.size() == (size_t)(2 + 4*(tc->rslen)));
   //assert(tc->rslen < MROWS);
@@ -522,7 +522,7 @@ void do_compute(char* filename, bool use_old_read_testcase, unsigned chunk_size,
           double rel_error = (baseline_result != 0) ? fabs(abs_error/baseline_result) : 0;
           if(abs_error > 1e-5 && rel_error > 1e-5)
           {
-            cout << std::scientific << baseline_result << " "<<results_vec[i]<<"\n";
+            cerr << std::scientific << baseline_result << " "<<results_vec[i]<<"\n";
             all_ok = false;
           }
         }
@@ -547,14 +547,14 @@ void do_compute(char* filename, bool use_old_read_testcase, unsigned chunk_size,
 #endif
   if(all_ok)
   {
-    cout << "All output values within acceptable error\n";
-    cout << "Baseline double precision compute time "<<baseline_compute_time*1e-9<<"\n";
+    cerr << "All output values within acceptable error\n";
+    cerr << "Baseline double precision compute time "<<baseline_compute_time*1e-9<<"\n";
   }
-  cout << "Num testcase "<<num_testcases<< " num double invocations "<<num_double_calls<<"\n";
-  cout << "Vector compute time "<< vector_compute_time*1e-9 << "\n";
+  cerr << "Num testcase "<<num_testcases<< " num double invocations "<<num_double_calls<<"\n";
+  cerr << "Vector compute time "<< vector_compute_time*1e-9 << "\n";
 #ifdef USE_PAPI
   for(unsigned i=0;i<NUM_PAPI_COUNTERS;++i)
-    cout << eventnames[i] << " : "<<accum_values[i]<<"\n";
+    cerr << eventnames[i] << " : "<<accum_values[i]<<"\n";
 #endif
 #ifdef PRINT_PER_INTERVAL_TIMINGS
   times_fptr.close();
diff --git a/public/doc/Ant_Help.tex b/public/doc/Ant_Help.tex
deleted file mode 100644
index e119843..0000000
--- a/public/doc/Ant_Help.tex
+++ /dev/null
@@ -1,9 +0,0 @@
-\begin{description}
-  \item[compile] Compiles all java code in the source tree.  Places generated classes in the build directory.
-  \item[dist] Generates jar files, suitable for running via java -jar {YOUR\_JAR}.  Places resulting jars in the dist subdirectory.
-  \item[resolve] Resolves third-party dependencies.  Downloads all third-party dependencies to the lib directory.
-  \item[javadoc] Generates javadoc for the source tree.  Places javadoc in the javadoc directory.
-  \item[clean] Removes artifacts from old compilations / distributions.
-\end{description}
-View all available ant targets by running 'ant -projecthelp' in the directory containing build.xml.
-
diff --git a/public/doc/GATK_Coding_Standards.pdf b/public/doc/GATK_Coding_Standards.pdf
deleted file mode 100644
index 81148bd..0000000
Binary files a/public/doc/GATK_Coding_Standards.pdf and /dev/null differ
diff --git a/public/doc/GATK_Coding_Standards.tex b/public/doc/GATK_Coding_Standards.tex
deleted file mode 100644
index 42b6830..0000000
--- a/public/doc/GATK_Coding_Standards.tex
+++ /dev/null
@@ -1,288 +0,0 @@
-\documentclass[9pt]{report}
-\usepackage{fullpage}
-\usepackage{listings}
-\begin{document}
-
-\title{Genome Analysis Toolkit Coding Standards for Java}
-\author{Genome Analysis Software Engineering}
-\date{\today}
-\maketitle
-\tableofcontents
-
-\chapter{Overview}
-
-\section{Credit}
-The majority of text in this document is verbatim from the production informatics coding standard document, produced by Ted Sharpe and the production
-informatics team here at the Broad Institute.  They deserve all the credit for the insights in this document, and if you feel so inclined please share statements
-of gratitude with them.
-
-\section{Summary}
-This document is an attempt to describe a brief and minimal set of standards for Java code for use by production informatics projects at the Broad.  The goal of the standards is to allow programmers to be innovative and expressive, while allowing their peers a vague hope of maintaining and extending their output.  This document describes a set of arbitrary standards for naming and documentation based on industry-standard practices, and a set of �good practices� guidelines intended to imp [...]
-\pagebreak
-\section{Cheat Sheet}
-\begin{table}[htdp]
-\caption{default}
-\begin{center}
-\begin{tabular}{l p{4.5cm} r}
-Type & Style & Example \\ \hline
-Function names & uppercase words, with the first word lowercase & \texttt{convertToParser} \\
-Package names & lowercase, with specific functional encapsulation for each class  & \texttt{org.broadinstitute.sting.gatk}\\
-Variable names & uppercase words, with the first word lowercase & \texttt{resetCounter} \\
-Class names & uppercase words representing a noun & \texttt{TraversalEngine} \\
-Interface names & the same as class names, no 'i' before the name  & \texttt{GenomeEngine} \\
-Tab length & 4 spaces &  \\
-
-\end{tabular}
-\end{center}
-\label{default}
-\end{table}%
-
-
-
-\chapter{Naming}
-Choosing names is among the more arduous tasks in programming.  There is constant contention between creativity�finding the mot juste�and following fashion so as to more easily allow names to be guessed; and a tug between being concise�if only to save typing�and being verbose so as to provide greater explicitness.  Expending the time necessary to create a really good name conflicts with the programmer�s appropriate desire to get on with the job and get something done.
-Unfortunately, naming has a great impact on the maintainability of the code, and despite its being a somewhat fussy and fusty topic, a few guidelines are appropriate.  If the standards document is well crafted it will free the programmer from some of the arbitrary decision-making that, when inconsistent, detracts from the intelligibility of the code, while allowing the programmer to focus on the semantics of the name.
-
-\section{Package Naming}
-Package names should be all lower case, and should begin with: \\ \\ 
-\texttt{org.broadinstitute.sting}\\ \\
-To this add the product name (e.g., basecaller), and subdivide packages according to functionality below that.  Put all classes in a package (to allow for more easily understood dependency trees, and to allow reuse via a CLASSPATH of reasonable length).\\
-
-\lstset{language=Java, caption=Good Package Names, frame=leftline, label=PackageNames,basicstyle=\small}
-\lstset{tabsize=4}
-\begin{lstlisting}
-
-package org.broadinstitute.gatk.tools.walkers.basecalller.basecallerEngine; // Good
-package org.broadinstitute.gatk.tools.walkers.baseCallingStuff;			  // Bad
-package myBaseCaller;								  		// Unacceptable
-
-\end{lstlisting}
-
-
-\subsection{Good practices}  Try to organize packages so that the dependency tree isn�t total spaghetti.  Ideally, there should be a hierarchy among the packages so that, for example, the web support classes are in one package, and don�t know anything about database access, and the database access classes are in another package, and don�t know anything about web support.  Something that knows about both (a package that supports servlet development, for example) should be in a separate pa [...]
-
-\section{Interface Naming}
-Interfaces require no special naming convention to distinguish them from classes.  (See the Class naming conventions below.)  The presumption is that most of the types that you are passing around are, in fact, abstract types that don�t lock you into a particular implementation:  elaborate decoration of interface names is therefore unnecessary and undesirable.  Distinguish the implementation class names instead (since they should be repeated far less often throughout the code than the nam [...]
-\lstset{language=Java, caption=Good Interface Names, frame=leftline, label=InterfaceNames,basicstyle=\small}
-\lstset{tabsize=4}
-\begin{lstlisting}
-
-interface DatabaseBinder		// OK
-interface ITagFactory		// not as good
-
-\end{lstlisting}
-
-
-Some people like to reserve �able or �ible names for interfaces (e.g., Cloneable, Comparable, or Runnable).  This is especially useful for mix-in interfaces, where the interface describes a capability that can be shared among otherwise disparate types of objects.  Not all interfaces fall into this pattern, however, and you needn�t be concerned if your interface seems to want to have a noun as its name, rather than forcing it into the verb-able mold.
-\subsection{Good practices}
- Expending the time necessary to abstract common behavior from a set of related classes is probably the single most important thing you can do to improve the extensibility and adaptability of your code.  Use interfaces to describe the common behavior so that you don�t force your clients to use particular implementations.
-In all cases, the methods of the interface describe the complete repertoire of behaviors necessary to be an object of the type named by the interface.  So check your name to see if it describes something possessing the interface�s set of behaviors, and check your methods to see if that�s what something by that name should be able to do.  Ruthlessly eliminate any inconsistencies.
-
-\section{Class Naming} 
-Naming:  Use mixed-case names, capitalizing the first character of each word.  Avoid overly short, and overly common names, and overly long, verbose names.  If there is doubt about whether the components of a name are separate words, use less capitalization rather than more.  (E.g, Timezone, not TimeZone.  Barcode, not BarCode.)  You are aiming to capitalize each concept, not each morpheme.  A primary interface name is a good prefix for a class name.  A package name is not a good prefix. [...]
-Don�t abbreviate any of the words in a class name.  It�s just too hard to remember, and too likely to introduce inconsistency.  You may, however, use very common acronyms as if they were a word.  For example, StructuredQueryLanguageHelper would be quite ridiculous�SQL is a common acronym, and the class should be called SqlHelper (note the lower case ql).
-Class names must be nouns.
-\lstset{language=Java, caption=Good Class Names, frame=leftline, label=ClassNames,basicstyle=\small}
-\lstset{tabsize=4}
-\begin{lstlisting}
-
-class FormModule		// OK
-class SOExecutor		// not very good � obscure abbreviations
-class State				// not good � too vague, ambiguous, and common
-
-\end{lstlisting}
-
-\subsection{Good practices}
-A class should model a clear concept.  If you can�t explain the concept behind a class in a sentence or two, there is probably something wrong.  (And if your sentence or two seems to require extensive use of the word �or� you can be certain that you have a problem.)
-
-\section{Field and Variable Naming}
-Use mixed case names, capitalizing the first character of each word except the first.  Constants may be all upper case, using underscores to separate words.
-Idiosyncratic abbreviations are acceptable for variable and field names, because their scope is very limited.  (This is because you won�t be using any public fields:  see below.)
-Public constants should follow the semantic rules already specified for classes:  not too short, not too long, no cryptic abbreviations.  Remember that your clients will qualify the name of the constant with the class name, so overly common words are not so much of a problem as for class names.  
-
-\subsection{Good practices}
- No non-final public fields.  Ever.  Period.  You know why.  (But just to be tediously explicit, we�ll go on a bit about it.)  Just when you think you�ve got a totally passive data-bearing object that might just as well be a C-language struct as a Java class, and you reckon you�ll just make its fields public, you�ll figure out that it needs behavior.  It needs to be persistent, and has to keep track of whether it�s been dirtied or not.  Or it needs a new field that is dependent in some w [...]
-Sacks of data are so boring�objects long to have behavior.
-Note that public members of private, nested classes aren�t really public at all, and so they�re perfectly fine.  (This allows you to create struct-like objects for internal use within a class.)
-Be careful about static objects (whether public or private, final or not):  they get created when the class is loaded and there is no easy way to handle complex dependencies among them that might require some particular loading order.  It�s best to keep them very simple�for example, creating tiny immutable static objects to simulate C++-style enumerations is completely appropriate.
-Experience has shown that distinguishing the names of instance fields, class (i.e., static) fields, and constants from each other and from temporaries (i.e., stack frame variables, whether declared in a local block or passed as arguments) is enormously helpful in quickly comprehending a method:  It�s good to know at a glance how the code is affecting the state of the object (instance fields) or of all the objects in the class (class fields) without having to study the entire class to lea [...]
-Overly short variable names, especially one-character names, and names that are common words, or parts of common words, are a problem for programmers who use editors with a limited understanding of Java�s syntax.  Using iii as a for-loop index instead of i doesn�t really take much extra time, and it makes it a great deal easier to find all the places where the variable is used.
-\lstset{language=Java, caption=Good Variable Names, frame=leftline, label=VarNames,basicstyle=\small}
-\lstset{tabsize=4}
-\begin{lstlisting}
-
-private static Section gHeader;	// class member
-private URL mMyURL;				// instance member
-String wfQName;					// local � ugly, but who cares?
-for ( int i = 0; i < k; ++i )	// Please, don�t.
-int iii;						// Thank you.
-\end{lstlisting}
-
-
-\section{Method Naming}  
-Use mixed case names, capitalizing the first character of each word except for the first.  The first, uncapitalized word should be a verb.  Uncomplicated, local fetchers and modifiers of independent state ought to use get and set as their first word.  Getters for boolean values can use is, has, or can instead of get.  (Conversely, a method that causes the lights to dim over a several square block area should not be called getFoo.  Merely to name it fetchFoo instead appropriately hints th [...]
-It�s especially important that you don�t use obscure abbreviations or leave the vowels out of words in method names:  they�re too hard to remember.  Common acronyms, treated as if they were words, are fine.  It�s quite helpful if the name of the method is pronounceable, and spelled as it�s pronounced.
-\subsection{Good practices}
-Methods that have more lines than can fit on the screen all at once are much harder to understand than those that don�t.  Ditto for methods that have numerous lines that must be wrapped due to their length.  Some suggest that each method can have at most one looping control structure.  That doesn�t always work in practice, but you get the gist.
-Following strict structured programming rules is usually an aid to comprehension.  (This might not be true if doing so requires you to use a complicated set of flags to control the flow.)  If you feel that a continue, labelled break, or early return is less confusing than the structured alternatives, provide a comment to call attention to the easily overlooked, one- or two-token statement that breaks the rules of structured programming.
-
-\lstset{language=Java, caption=Good Method Names, frame=leftline, label=MethodNames,basicstyle=\small}
-\lstset{tabsize=4}
-\begin{lstlisting}
-
-while ( itr.hasNext() )
-{
-	if ( "foo".equals((String)itr.next()) )
-	{
-		return;	// HEY!  There�s an early return, here.
-	}
-}
-
-\end{lstlisting}
-
-A method should do one job.  And, as stated earlier for classes, you ought to be able to describe that job in a crisp sentence or two.  After you�ve done so, type it in as javadoc.
-Take switches and long series of if / else if blocks as a warning sign that polymorphism might have served you better.
-Methods with more than a very few parameters are difficult to use.  If you have a method with more than three or four parameters you might consider passing an object instead.  If you have more than seven, you definitely need to do something else.
-There is a delicate balance to strike between sanity-checking every argument passed to every method and never testing your inputs at all.  Too much checking chews into performance, too little means the code will not be robust.  Here are two criteria for achieving a happy medium:
-Arguments that are not used directly by the method, but simply passed through to lower layers need not be checked.  Arguments that are used directly by the method ought to be checked, especially if the consequence of not checking them will be something completely uninformative like a NullPointerException.  You cannot assume that it will be easy to produce a stack trace to home in on what code is producing the exception, and so the exception�s message must serve that purpose.
-The second criterion is to ask whether the consequences of passing bad arguments will be felt immediately or whether doing so will deploy a delayed-action time bomb.  For example, passing a null object that gets saved as a part of the object�s state (even if it�s not used directly within the method that received it) may put a na�ve object into an unstable state that may not reveal itself until much later in the program�s execution.  This can be the very devil to debug.  Your goal is to m [...]
-
-
-
-\chapter{Layout}
-\section{File Template}
-Begin each file with the following bit of rote material.  Just cut and paste it into your IDE so that it becomes the first few lines of every source file:
-\lstset{language=Java, caption=A Good Code Header, frame=leftline, label=Header,basicstyle=\small}
-\lstset{tabsize=4}
-\begin{lstlisting}
-/*
- * $Id$
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-\end{lstlisting}
-
-\section{Documentation}
-
-All classes must have the following, minimum level of javadoc:
-\lstset{language=Java, caption=Javadoc Class Style, frame=leftline, label=GoodJavadoc,basicstyle=\small}
-\lstset{tabsize=4}
-\begin{lstlisting}
-/**
- * One crisp, informative sentence or noun phrase that explains
- * the concept modeled by the class.
- *
- * This class is [<em>not</em>] thread safe [because it is immutable].
- *
- * @author	I. M. Coder
- * @version	$Revision$
- */
-class CrispConcept
-{
-    public static final String ID = "$Id$";
-\end{lstlisting}
-
-
-You are encouraged to provide as much explanatory material as you feel is helpful following that first, summary sentence.  Information on algorithms and other information that will help a client make appropriate use of the class is particularly welcome.  (But see Stateful Interfaces, and Rules for Use, below.)
-Tell us whether your class is thread-safe or not.  Thread safety due to immutability is particularly well worth mentioning.  (It may warn a maintenance programmer off adding the set methods that you apparently forgot to provide.)
-All methods must have the following, minimum level of javadoc:
-\lstset{language=Java, caption=Good Method Javadoc, frame=leftline, label=MethodJavadoc,basicstyle=\small}
-\lstset{tabsize=4}
-\begin{lstlisting}
-/**
- * One crisp, informative sentence or noun phrase that explains
- * what the method does.
- *
- * @param	parm1	Parm1 selects the widget to be frobnicated.  Cannot be null.
- * @param	parm2	Parm2 specifies the type of frobnication to apply.
- * @return	The frobnicated widget.
- * @throws	FrobnicationException Thrown if widget isn�t frobnicable.
- */
-\end{lstlisting}
-For each parameter of reference type, tell us whether the reference may be null.
-\section{Imports}
-There is a balance between trying to maintain lengthy lists of classes imported one-by-one on the one hand, and importing many packages wholesale using an asterisk on the other.  It shouldn�t often be a big problem:  there are exceptions, but a class that makes use of dozens of other classes may be trying to tell you that it needs some redesign.  The suggestion is to import classes explicitly from packages we have written�especially those under active development.  Also import explicitly [...]
-\section{Order of Class Members}
-Classes should be laid out consistently.  You may put the member fields at either the bottom or the top, but you must not sprinkle them throughout.
-There is an argument that a class ought to be ordered with its public constants and constructors at the very top, its public methods next, and its internal stuff last, since that concentrates at the very top what a programmer needs to know to make use of the class.  This isn�t obligatory, but you may wish to give it some consideration.
-Nested classes go at the very bottom, after everything else.
-\section{Make �em Pretty}
-Use four space tabs.  Don�t omit braces around single statements.  Line things up so that it�s clear what things are on the same level.  Give us enough white space to make it pretty.
-
-
-\chapter{Design Considerations}
-If you�re doing things right (and the DoD doesn�t) design isn�t a distinct phase that ends when coding begins.  Ideally, you�ll develop a comprehensive, top-down design before you begin coding.  This may take more than one napkin.  Even so, you�ll face many decisions about implementation details that are not completely specified by the overall design.  In other words, it�s inevitable that you�ll be doing design while you code.  What follows are some coding standards for you to consider w [...]
-\section{Encapsulation}
-With very rare exception, all fields�both instance members and static members�should be private.  Protected and default (i.e., package-scoped) access is like public access, only less so.  (Making a change in such a field still requires you to locate and analyze use of the field in indefinitely many files:  for public fields you need to look everywhere, for protected fields you need only scan everything that extends you, and for default-access fields, you need only scan everything in the  [...]
-Always use the most restrictive permission consistent with the design of your class.  Don�t make all of your internal methods protected in the vain hope that someday, some extending class might need to tweak your internal state, and you�ll make it easy.
-Similarly, don�t provide a getter and setter for every piece of your internal state:  the goal is to meet the contract of the interfaces you implement, and to hide the details of how you do it.  Even the most legitimately passive of objects�the model objects you�ve just hauled out of the relational database�will likely have private, internal state that should not be exposed directly to clients.  (Check out all the hidden state in EJB entity beans, for example.)
-\section{Is-A versus Has-A}
-A subclass and its superclass have an Is-A (or specialization-generalization) relationship.  A class and a component of that class have a Has-A (or containment) relationship.  If you have a crisp, clear idea of what kind of things two classes represent, then simply saying to yourself �Thing A is a (special kind of a) Thing B�, and �Thing A has a Thing B (as one of its parts)�, will often make it clear what the relationship should be:  one of the two sentences may sound very odd.  �A Dog  [...]
-\section{Redundant data}
-If you have only one copy of a given datum, it will be either right or wrong, but it won�t be inconsistent with other data.  Guaranteeing consistency is a great deal more complex than guaranteeing accuracy.  Checking for and maintaining consistency among multiple copies of a datum often robs you of the efficiency you hoped to gain by the denormalization; not checking for and not maintaining consistency is a very frequent source of hard-to-fix bugs, and weird, unreliable program behavior.
-\section{Exceptions}
-Exceptions handle, well, exceptional conditions.  Properly used, they provide a last-gasp attempt to allow a robust program to clean-up and recover from catastrophic situations.  Exceptions should not be thrown frequently, certainly not as a part of the normal, expected flow of a program.  Do not use them as a nifty hack for implementing non-local transfer of control.  (Exceptions are far more expensive than normal returns, so the performance wizards won�t be tempted to do this, anyway.  [...]
-RuntimeExceptions are for even more rare, more catastrophic situations from which recovery is unlikely, at best.
-In code which will be called by general clients outside your package, catch and re-throw exceptions from the lower layers of code on which you depend to give a more package-oriented explanation of the bad thing that happened.  However, preserve information when you do this:  Wrap the original exception in a new exception that supplements rather than replaces the original message.  And respond to all flavors of printStackTrace with the nested exception�s stack trace (i.e., delegate these  [...]
-Never create exceptions with null messages.
-\section{Recovering External Resources}
-Java frees you from having to worry about memory as a resource.  (Well, it reduces the worry, anyway.)  Therefore you should have oodles of time left over to make certain that you free other resources when you�re done with them.  Two key external resources that you must make certain to release are open streams (which chew up a precious operating system file handle), and database connections (which chew up precious DBMS memory).  The only really reliable way to make certain that these res [...]
-Try to avoid designs that require a class to maintain an open stream.  One technique is to use an event-driven model to turn the file processing upside down:  you can still have a nicely modular and reusable class while processing the file within the scope of a single block by using Listeners.
-\section{Stateful Interfaces and Rules for Use}
-Good interfaces are concise, comprehensive, and orthogonal.  Concise means that there are no superfluous operations that don�t seem to fit the underlying abstraction, and that there is one good way of accomplishing a given end, not a variety of ways from which you must choose.  Comprehensive means that everything you might need to do in manipulating the object has been provided for.  And orthogonal means that each method does something independent, and that any method can be called at an [...]
-Poor interfaces are cluttered with Rules for Use.  If you are lucky, these rules are made explicit in documentation:  Be sure to call this method before calling that one, but never call this method if you�ve ever called that one, and do, please, remember to call this one when you�re all done.  Needless to say, these interfaces are very difficult for clients to use correctly.
-One particularly common form of this blight is the Stateful Interface:  the object has a lifecycle, and certain methods are appropriate only when the object is in some particular phase of the lifecycle.  For a simple example, read the javadoc for the java.sql.CallableStatement class.  It describes how you must call registerOutputParameter before calling execute, how, for maximum portability, you should not call getMoreResults if you have called any of the getOutputParameter methods, and  [...]
-If you can�t seem to work out how to avoid Rules for Use on your interfaces, you must at the very least make sure that each method call detects misuse, and throws an appropriate exception or does something other than trash your internal state.  Every public method, if it affects object state at all, must transform the object from one valid, consistent state to another valid, consistent state.
-If you need some ego-incentive to motivate you, consider this:  Hard-to-use, hard-to-understand, hard-to-maintain code is quickly replaced after you cease to maintain it.  What kind of legacy is that?
-Sometimes you can see several distinct patterns of use among the clients of an interface:  they might be telling you that you need to re-factor the interface into two separate interfaces.
-\section{Multithreading}
-Making your implementations thread-safe is a enormously complex issue.  Unfortunately, almost all of us are doing some work in multi-threaded environments (writing servlets, for example), and it's an issue that we are forced to confront.
-If you haven�t examined the issue for one of the classes you�ve implemented (either because you don�t anticipate its being used in a multi-threaded environment, or because the whole thing makes your brain ache), please provide a javadoc comment for the class indicating that it is not thread-safe.  If you�re not sure, it�s not safe!
-Don�t just synchronize every method.  Synchronization is far too expensive to use carelessly.  (Less so than it used to be, but still expensive.  And it doesn�t resolve all multi-threading issues, anyway.)
-One way of beginning to address the issue of thread-safety is to understand what doesn�t need any special thread-safety code, and try to produce as much of that as you can.  Here are a couple of quick tips.
-Objects that can be seen only by a single thread are immune from the issue:  If the only references to an object are from local variables�that is, if a reference to the object is never stored in an instance or static field�it will be visible only from the thread that creates it.
-Immutable objects are automatically thread-safe.  If you can�t change it, you can�t see it in an inconsistent, intermediate state.
-\section{Canonical methods}
-Most simple classes either do or are.  In the EJB environment session beans are the do classes, and entity beans are the are classes.  In the model-view-controller paradigm, model objects are the are objects, controllers are the do objects, and views are mostly are, but typically also have a little bit of do flavor.  So, you see, it does depend on what your definition of is is.
-The point of the distinction is that most of the are classes, those often immutable little bags of independent state, usually need to override equals and hashCode to behave properly.  You must implement these basic object operations in each of your passive, data-bearing classes; you may wish (or need) to implement them in the others.
-The ares are typically more useful when Comparable�implementing that interface allows you to put them into sorted Collections�so you ought to consider that next.
-Being Cloneable and Serializable usually come for free (no code to write), so throw those into the mix, too, unless there�s a compelling reason not to.  An example of a reason not to might be that you need to maintain uniqueness at a level of abstraction higher than object identity�you don�t want to allow clients to make copies.
-\section{Performance}
-Your overall design�your selection of algorithms and data structures, for example�has a far greater impact on performance than any little hacks you can apply while implementing the code.  So design for performance, and implement for clarity.
-Nonetheless, the java compiler that most of us use can use a little help in doing optimization.  Don�t expect order-of-magnitude performance gains�you�ll get those by designing away I/O, replacing searches with hashes, etc.�these are percentage point tweaks.
-Move invariant code out of loops.  For example, many for loops can calculate their terminating condition once, before the loop starts, rather than at each iteration through the loop.
-\lstset{language=Java, caption=Improvements to loops, frame=leftline, label=loopInprovements,basicstyle=\small}
-\lstset{tabsize=4}
-\begin{lstlisting}
-for ( int iii = 0; iii < str.length(); ++iii )	// bad
-
-int nnn = str.length();
-for ( int iii = 0; iii < nnn; ++iii )		// good
-\end{lstlisting}
-Strength reduction:  do a simple calculation to update the value of some variable using the value it has from a previous trip through a loop, rather than from scratch each time.
-\lstset{language=Java, caption=Good looping practices, frame=leftline, label=loopPractices,basicstyle=\small}
-\lstset{tabsize=4}
-\begin{lstlisting}
-for ( int iii = 0; iii < nnn; ++iii )		// bad
-{
-	double val = pow( 2., iii );
-	. . .
-}
-
-double val = 1.;
-for ( int iii = 0; iii < nnn; ++iii )		// good
-{
-	. . .
-	val *= 2.;
-}
-\end{lstlisting}
-Avoid some performance dogs in the SDK:  Use the underlying Stream classes rather than Readers when appropriate.  Use the newer, non-synchronized collection classes rather than Vector and Hashtable.
-Penalties
-Code that fails to follow these guidelines will be posted around the MIT campus, along with the author�s email address and an urgent request for comments.
-
-\end{document}
\ No newline at end of file
diff --git a/public/doc/README b/public/doc/README
index e70ced0..a6b5ca0 100644
--- a/public/doc/README
+++ b/public/doc/README
@@ -1,86 +1,3 @@
-The Genome Analysis Toolkit (GATK) 
-Copyright (c) 2009 The Broad Institute 
-
-Overview 
--------- 
-The Genome Analysis Toolkit (GATK) is a structured programming
-framework designed to enable rapid development of efficient and robust
-analysis tools for next-generation DNA sequencers.  The GATK solves
-the data management challenge by separating data access patterns from
-analysis algorithms, using the functional programming philosophy of
-Map/Reduce.  Consequently, the GATK is structured into data traversals
-and data walkers that interact through a programming contract in which
-the traversal provides a series of units of data to the walker, and
-the walker consumes each datum to generate an output for each datum.
-Because many tools to analyze next-generation sequencing data access
-the data in a very similar way, the GATK can provide a small but
-nearly comprehensive set of traversal types that satisfying the data
-access needs of the majority of analysis tools.  For example,
-traversals "by each sequencer read" and "by every read covering
-each locus in a genome" are common throughout many tools such as
-counting reads, building base quality histograms, reporting average
-coverage of the genome, and calling SNPs.  The small number of these
-traversals, shared among many tools enables the core GATK development
-team to optimize such traversals for correctness, stability, CPU
-performance, memory footprint, and in many cases to even automatically
-parallelize calculations.  Moreover, since the traversal engine
-encapsulates the complexity of efficiently accessing the
-next-generation sequencing data, researchers and developers are free
-to focus on their specific analysis algorithms.  This not only vastly
-improves productivity of the developers, who can quickly write new
-analyses, but also results in tools that are efficient and robust and
-can benefit from improvement to a common data management engine.
-
-Capabilities 
------------- 
-The GenomeAnalysisTK development environment is currently provided as
-a platform-independent Java programming language library.  The core
-system works with the nascent standard Sequence Alignment/Map (SAM)
-format to represent reads using a production-quality SAM library
-developed at the Broad.  The system can access a variety of metadata
-files such as dbSNP, Hapmap, RefSeq as well as work with genotype and
-SNP files in GLF, Geli, and other common formats.  The core system
-handles read data from Illumina/Solexa, SOLiD, and Roche/454.  The
-current GATK engine can process all of the 1000 genomes data
-representing ~5Tb of data from these three technologies produced from
-multiple sequencing centers and aligned to the human reference genome
-with multiple aligners.  The GATK currently provides traversals by
-each read (ByRead traversal), by all reads covering each locus in the
-genome (ByLoci traversal), and by all reads within pre-specified
-intervals on the genome (ByWindow traversal).
-
-Dependencies
-------------
-The GATK relies on a Java 6-compatible JRE.  At the time of this writing,
-the GATK team tests with Sun JRE version 1.6.0_12-b04.  Additionally, the
-GATK requires as inputs a sorted, indexed BAM file containing aligned reads 
-and a fasta-format reference with associated dictionary file (.dict)and 
-index (.fasta.fai).  
-
-Instructions for preparing input files are available here:
-
-http://www.broadinstitute.org/gatk/guide/article?id=1204
-
-The bundled 'resources' directory  contains an example BAM and fasta.
-
-Getting Started
----------------
-The GATK is distributed with a few standard analyses, including PrintReads,
-Pileup, and DepthOfCoverage.  More information on the included walkers is
-available here:
-
-http://www.broadinstitute.org/gatk/gatkdocs
-
-To print the reads of the included sample data, untar the package into
-the GenomeAnalysisTK directory and run the following command:
-
-java -jar GenomeAnalysisTK/GenomeAnalysisTK.jar \
-     -T PrintReads \
-     -R GenomeAnalysisTK/resources/exampleFASTA.fasta \
-     -I GenomeAnalysisTK/resources/exampleBAM.bam
-
-Support
--------
-Documentation for the GATK is available at http://www.broadinstitute.org/gatk/guide.
-For help using the GATK, developing analyses with the GATK, bug reports, 
-or feature requests, please visit our support forum at http://gatkforums.broadinstitute.org/
+The Genome Analysis Toolkit
+============
+See http://www.broadinstitute.org/gatk/
\ No newline at end of file
diff --git a/public/external-example/pom.xml b/public/external-example/pom.xml
index 5065805..bdd5dbd 100644
--- a/public/external-example/pom.xml
+++ b/public/external-example/pom.xml
@@ -9,7 +9,7 @@
     <name>External Example</name>
 
     <properties>
-        <gatk.version>3.3</gatk.version>
+        <gatk.version>3.5</gatk.version>
         <!--
         gatk.basedir property must point to your checkout of GATK/GATK until we can get all the
         dependencies out of the committed gatk repo and into central.
@@ -28,7 +28,7 @@
         <gatk.integrationtests.skipped>${gatk.committests.skipped}</gatk.integrationtests.skipped>
 
         <!-- This flag is used by the package tests to disable re-shading -->
-        <gatk.unpack.phase>process-resources</gatk.unpack.phase>
+        <gatk.unpack.phase>prepare-package</gatk.unpack.phase>
         <gatk.shade.phase>package</gatk.shade.phase>
     </properties>
 
@@ -49,7 +49,15 @@
 
         <dependency>
             <groupId>org.broadinstitute.gatk</groupId>
-            <artifactId>gatk-tools-public</artifactId>
+            <artifactId>gatk-utils</artifactId>
+            <version>${gatk.version}</version>
+            <type>test-jar</type>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.broadinstitute.gatk</groupId>
+            <artifactId>gatk-engine</artifactId>
             <version>${gatk.version}</version>
             <type>test-jar</type>
             <scope>test</scope>
@@ -82,7 +90,7 @@
                             <artifactItems>
                                 <artifactItem>
                                     <groupId>org.broadinstitute.gatk</groupId>
-                                    <artifactId>gatk-engine</artifactId>
+                                    <artifactId>gatk-utils</artifactId>
                                     <version>${gatk.version}</version>
                                     <classifier>example-resources</classifier>
                                     <type>tar.bz2</type>
@@ -111,10 +119,9 @@
                             <!-- Required as doclet uses reflection to access classes for documentation, instead of source java-->
                             <docletPath>${project.build.outputDirectory}</docletPath>
                             <docletArtifact>
-                                <groupId>org.broadinstitute.gatk</groupId>
-                                <!-- TODO: THIS IS SUPPOSED TO BE GATK-UTILS! -->
-                                <artifactId>gatk-tools-public</artifactId>
-                                <version>${gatk.version}</version>
+                                <groupId>${project.groupId}</groupId>
+                                <artifactId>${project.artifactId}</artifactId>
+                                <version>${project.version}</version>
                             </docletArtifact>
                             <maxmemory>2g</maxmemory>
                             <useStandardDocletOptions>false</useStandardDocletOptions>
@@ -138,6 +145,7 @@
                         </goals>
                         <configuration>
                             <minimizeJar>true</minimizeJar>
+                            <createDependencyReducedPom>false</createDependencyReducedPom>
                             <!-- Explicitly include classes loaded via reflection from artifacts below -->
                             <filters>
                                 <filter>
@@ -147,7 +155,7 @@
                                     </includes>
                                 </filter>
                                 <filter>
-                                    <artifact>samtools:htsjdk</artifact>
+                                    <artifact>com.github.samtools:htsjdk</artifact>
                                     <includes>
                                         <include>**</include>
                                     </includes>
@@ -252,7 +260,32 @@
         </plugins>
     </build>
 
+    <reporting>
+        <excludeDefaults>true</excludeDefaults>
+    </reporting>
+
+    <!-- These profiles are used by the GATK build in a multi-module setting. You do NOT need these profiles. -->
     <profiles>
+        <!-- Optionally do not shade/package jars -->
+        <!--
+        NOTE: Profile id "fast" comes from comments in PR #771.
+        The name is meant to be memorable, but is highly non-specific. Users are forewarned that
+        behavior of this profile, or the identifier itself, may be heavily modified in the future.
+        Hardcode usage in non-VCS controlled scripts at your own risk.
+        -->
+        <profile>
+            <id>fast</id>
+            <activation>
+                <property>
+                    <name>disable.shadepackage</name>
+                </property>
+            </activation>
+            <properties>
+                <gatk.unpack.phase>none</gatk.unpack.phase>
+                <gatk.shade.phase>none</gatk.shade.phase>
+            </properties>
+        </profile>
+        <!-- Collection of properties for use during package testing -->
         <profile>
             <id>packagetests-enabled</id>
             <activation>
diff --git a/public/external-example/src/main/java/org/mycompany/app/MyExampleWalker.java b/public/external-example/src/main/java/org/mycompany/app/MyExampleWalker.java
index 1834c4a..c6188df 100644
--- a/public/external-example/src/main/java/org/mycompany/app/MyExampleWalker.java
+++ b/public/external-example/src/main/java/org/mycompany/app/MyExampleWalker.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -26,9 +26,9 @@
 package org.mycompany.app;
 
 import org.broadinstitute.gatk.utils.commandline.Output;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.LocusWalker;
 
 import java.io.PrintStream;
diff --git a/public/external-example/src/test/java/org/mycompany/app/MyExampleWalkerIntegrationTest.java b/public/external-example/src/test/java/org/mycompany/app/MyExampleWalkerIntegrationTest.java
index ee625a4..9f866bf 100644
--- a/public/external-example/src/test/java/org/mycompany/app/MyExampleWalkerIntegrationTest.java
+++ b/public/external-example/src/test/java/org/mycompany/app/MyExampleWalkerIntegrationTest.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -46,9 +46,13 @@ public class MyExampleWalkerIntegrationTest extends WalkerTest {
     }
 
     private File getResource(String path) throws URISyntaxException {
+        return new File(publicTestDir, path);
+        /*
+        TODO: Enable proper resource extraction from the test jars. For now just use the publicTestDir path.
         URL resourceUrl = getClass().getResource(path);
         if (resourceUrl == null)
             throw new MissingResourceException("Resource not found: " + path, getClass().getSimpleName(), path);
         return new File(resourceUrl.toURI());
+         */
     }
 }
diff --git a/public/external-example/src/test/java/org/mycompany/app/MyExampleWalkerUnitTest.java b/public/external-example/src/test/java/org/mycompany/app/MyExampleWalkerUnitTest.java
index 56335f1..3c8ca6c 100644
--- a/public/external-example/src/test/java/org/mycompany/app/MyExampleWalkerUnitTest.java
+++ b/public/external-example/src/test/java/org/mycompany/app/MyExampleWalkerUnitTest.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-engine/pom.xml b/public/gatk-engine/pom.xml
index 15ba06e..79ec377 100644
--- a/public/gatk-engine/pom.xml
+++ b/public/gatk-engine/pom.xml
@@ -5,7 +5,7 @@
     <parent>
         <groupId>org.broadinstitute.gatk</groupId>
         <artifactId>gatk-aggregator</artifactId>
-        <version>3.3</version>
+        <version>3.5</version>
         <relativePath>../..</relativePath>
     </parent>
 
@@ -24,6 +24,22 @@
             <artifactId>gatk-utils</artifactId>
             <version>${project.version}</version>
         </dependency>
+        <dependency>
+            <groupId>net.java.dev.jets3t</groupId>
+            <artifactId>jets3t</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.simpleframework</groupId>
+            <artifactId>simple-xml</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>${project.groupId}</groupId>
+            <artifactId>gatk-utils</artifactId>
+            <version>${project.version}</version>
+            <type>test-jar</type>
+            <scope>test</scope>
+        </dependency>
 
         <dependency>
             <groupId>com.google.caliper</groupId>
@@ -36,28 +52,6 @@
         <plugins>
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-assembly-plugin</artifactId>
-                <executions>
-                    <execution>
-                        <id>example-resources</id>
-                        <phase>${gatk.generate-resources.phase}</phase>
-                    </execution>
-                </executions>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-resources-plugin</artifactId>
-                <executions>
-                    <execution>
-                        <id>copy-resource-bundle-log4j</id>
-                        <phase>prepare-package</phase>
-                    </execution>
-                </executions>
-            </plugin>
-            <!--
-            TODO: Refactor ResourceBundleExtractorDoclet.isWalker() and move the RBED to utils.
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
                 <artifactId>maven-javadoc-plugin</artifactId>
                 <executions>
                     <execution>
@@ -66,7 +60,6 @@
                     </execution>
                 </executions>
             </plugin>
-            -->
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>
                 <artifactId>maven-invoker-plugin</artifactId>
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/CommandLineExecutable.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/CommandLineExecutable.java
new file mode 100644
index 0000000..ceb2eda
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/CommandLineExecutable.java
@@ -0,0 +1,229 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine;
+
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.commandline.ArgumentTypeDescriptor;
+import org.broadinstitute.gatk.utils.commandline.CommandLineProgram;
+import org.broadinstitute.gatk.engine.arguments.GATKArgumentCollection;
+import org.broadinstitute.gatk.utils.sam.SAMReaderID;
+import org.broadinstitute.gatk.engine.filters.ReadFilter;
+import org.broadinstitute.gatk.engine.io.stubs.OutputStreamArgumentTypeDescriptor;
+import org.broadinstitute.gatk.engine.io.stubs.SAMFileWriterArgumentTypeDescriptor;
+import org.broadinstitute.gatk.engine.io.stubs.VCFWriterArgumentTypeDescriptor;
+import org.broadinstitute.gatk.engine.phonehome.GATKRunReport;
+import org.broadinstitute.gatk.utils.refdata.utils.RMDTriplet;
+import org.broadinstitute.gatk.engine.walkers.Walker;
+import org.broadinstitute.gatk.engine.crypt.CryptUtils;
+import org.broadinstitute.gatk.engine.crypt.GATKKey;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.text.ListFileUtils;
+
+import java.security.PublicKey;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+
+/**
+ * @author aaron
+ */
+public abstract class CommandLineExecutable extends CommandLineProgram {
+    /**
+     * The actual engine which performs the analysis.
+     */
+    protected GenomeAnalysisEngine engine = new GenomeAnalysisEngine();
+
+    // get the analysis name
+    public abstract String getAnalysisName();
+
+    /**
+     * Gets the GATK argument bundle.
+     * @return A structure consisting of whatever arguments should be used to initialize the GATK engine.
+     */
+    protected abstract GATKArgumentCollection getArgumentCollection();
+
+    /**
+     * A list of all the arguments initially used as sources.
+     */
+    private final Collection<Object> argumentSources = new ArrayList<Object>();
+
+    protected static Logger logger = Logger.getLogger(CommandLineExecutable.class);
+
+    /**
+     * this is the function that the inheriting class can expect to have called
+     * when the command line system has initialized.
+     *
+     * @return the return code to exit the program with
+     */
+    protected int execute() throws Exception {
+        engine.setParser(parser);
+        argumentSources.add(this);
+
+        Walker<?,?> walker = engine.getWalkerByName(getAnalysisName());
+
+        try {
+            // Make sure a valid GATK user key is present, if required.
+            authorizeGATKRun();
+
+            engine.setArguments(getArgumentCollection());
+
+            // File lists can require a bit of additional expansion.  Set these explicitly by the engine. 
+            final Collection<SAMReaderID> bamFileList=ListFileUtils.unpackBAMFileList(getArgumentCollection().samFiles,parser);
+            engine.setSAMFileIDs(bamFileList);
+            if(getArgumentCollection().showFullBamList){
+                logger.info(String.format("Adding the following input SAM Files: %s",bamFileList.toString()));
+            }
+
+            engine.setWalker(walker);
+            walker.setToolkit(engine);
+
+            Collection<ReadFilter> filters = engine.createFilters();
+            engine.setFilters(filters);
+
+            // load the arguments into the walker / filters.
+            // TODO: The fact that this extra load call exists here when all the parsing happens at the engine
+            // TODO: level indicates that we're doing something wrong.  Turn this around so that the GATK can drive
+            // TODO: argument processing.
+            loadArgumentsIntoObject(walker);
+            argumentSources.add(walker);
+
+            Collection<RMDTriplet> rodBindings = ListFileUtils.unpackRODBindings(parser.getRodBindings(), parser);
+            engine.setReferenceMetaDataFiles(rodBindings);
+
+            for (ReadFilter filter: filters) {
+                loadArgumentsIntoObject(filter);
+                argumentSources.add(filter);
+            }
+
+            engine.execute();
+            generateGATKRunReport(walker);
+        } catch ( Exception e ) {
+            generateGATKRunReport(walker, e);
+            throw e;
+        }
+
+        // always return 0
+        return 0;
+    }
+
+    /**
+     * Authorizes this run of the GATK by checking for a valid GATK user key, if required.
+     * Currently, a key is required only if running with the -et NO_ET or -et STDOUT options.
+     */
+    private void authorizeGATKRun() {
+        if ( getArgumentCollection().phoneHomeType == GATKRunReport.PhoneHomeOption.NO_ET ||
+             getArgumentCollection().phoneHomeType == GATKRunReport.PhoneHomeOption.STDOUT ) {
+            if ( getArgumentCollection().gatkKeyFile == null ) {
+                throw new UserException("Running with the -et NO_ET or -et STDOUT option requires a GATK Key file. " +
+                                        "Please see " + UserException.PHONE_HOME_DOCS_URL +
+                                        " for more information and instructions on how to obtain a key.");
+            }
+            else {
+                PublicKey gatkPublicKey = CryptUtils.loadGATKDistributedPublicKey();
+                GATKKey gatkUserKey = new GATKKey(gatkPublicKey, getArgumentCollection().gatkKeyFile);
+
+                if ( ! gatkUserKey.isValid() ) {
+                    throw new UserException.KeySignatureVerificationException(getArgumentCollection().gatkKeyFile);
+                }
+            }
+        }
+    }
+
+    /**
+     * Generate the GATK run report for this walker using the current GATKEngine, if -et is enabled.
+     * This report will be written to either STDOUT or to the run repository, depending on the options
+     * for -et.
+     *
+     * @param e the exception, can be null if no exception occurred
+     */
+    private void generateGATKRunReport(Walker<?,?> walker, Exception e) {
+        if ( getArgumentCollection().phoneHomeType != GATKRunReport.PhoneHomeOption.NO_ET ) {
+            GATKRunReport report = new GATKRunReport(walker, e, engine, getArgumentCollection().phoneHomeType );
+            report.postReport(getArgumentCollection().phoneHomeType);
+        }
+    }
+
+    /**
+     * Convenience method for fully parameterized generateGATKRunReport when an exception has
+     * not occurred
+     *
+     * @param walker
+     */
+    private void generateGATKRunReport(Walker<?,?> walker) {
+        generateGATKRunReport(walker, null);
+    }
+
+    /**
+     * Subclasses of CommandLinePrograms can provide their own types of command-line arguments.
+     * @return A collection of type descriptors generating implementation-dependent placeholders.
+     */
+    protected Collection<ArgumentTypeDescriptor> getArgumentTypeDescriptors() {
+        return Arrays.asList( new VCFWriterArgumentTypeDescriptor(engine,System.out,argumentSources),
+                              new SAMFileWriterArgumentTypeDescriptor(engine,System.out),
+                              new OutputStreamArgumentTypeDescriptor(engine,System.out) );
+    }
+
+    /**
+     * GATK can add arguments dynamically based on analysis type.
+     *
+     * @return true
+     */
+    @Override
+    protected boolean canAddArgumentsDynamically() {
+        return true;
+    }
+
+    /**
+     * GATK provides the walker as an argument source.
+     * @return List of walkers to load dynamically.
+     */
+    @Override
+    protected Class[] getArgumentSources() {
+        // No walker info?  No plugins.
+        if (getAnalysisName() == null) return new Class[] {};
+
+        Collection<Class> argumentSources = new ArrayList<Class>();
+
+        Walker walker = engine.getWalkerByName(getAnalysisName());
+        engine.setArguments(getArgumentCollection());
+        engine.setWalker(walker);
+        walker.setToolkit(engine);
+        argumentSources.add(walker.getClass());
+
+        Collection<ReadFilter> filters = engine.createFilters();
+        for(ReadFilter filter: filters)
+            argumentSources.add(filter.getClass());
+
+        Class[] argumentSourcesAsArray = new Class[argumentSources.size()];
+        return argumentSources.toArray(argumentSourcesAsArray);
+    }
+
+    @Override
+    protected String getArgumentSourceName( Class argumentSource ) {
+        return engine.getWalkerName((Class<Walker>)argumentSource);
+    }
+
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/CommandLineGATK.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/CommandLineGATK.java
new file mode 100644
index 0000000..9f6c16e
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/CommandLineGATK.java
@@ -0,0 +1,370 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine;
+
+import htsjdk.samtools.SAMException;
+import htsjdk.tribble.TribbleException;
+import org.broadinstitute.gatk.utils.commandline.Argument;
+import org.broadinstitute.gatk.utils.commandline.ArgumentCollection;
+import org.broadinstitute.gatk.utils.commandline.CommandLineProgram;
+import org.broadinstitute.gatk.engine.arguments.GATKArgumentCollection;
+import org.broadinstitute.gatk.utils.refdata.tracks.FeatureManager;
+import org.broadinstitute.gatk.engine.walkers.Attribution;
+import org.broadinstitute.gatk.engine.walkers.Walker;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.help.*;
+import org.broadinstitute.gatk.utils.text.TextFormattingUtils;
+
+import java.util.*;
+
+/**
+ * All command line parameters accepted by all tools in the GATK.
+ *
+ * <h3>Info for end users</h3>
+ *
+ * <p>This is a list of options and parameters that are generally available to all tools in the GATK.</p>
+ *
+ * <p>There may be a few restrictions, which are indicated in individual argument descriptions. For example the -BQSR
+ * argument is only meant to be used with a subset of tools, and the -pedigree argument will only be effectively used
+ * by a subset of tools as well. Some arguments conflict with others, and some conversely are dependent on others. This
+ * is all indicated in the detailed argument descriptions, so be sure to read those in their entirety rather than just
+ * skimming the one-line summary in the table.</p>
+ *
+ * <h3>Info for developers</h3>
+ *
+ * <p>This class is the GATK engine itself, which manages map/reduce data access and runs walkers.</p>
+ *
+ * <p>We run command line GATK programs using this class. It gets the command line args, parses them, and hands the
+ * gatk all the parsed out information. Pretty much anything dealing with the underlying system should go here;
+ * the GATK engine should deal with any data related information.</p>
+ */
+ at DocumentedGATKFeature(groupName = HelpConstants.DOCS_CAT_ENGINE)
+public class CommandLineGATK extends CommandLineExecutable {
+    /**
+     * A complete list of tools (sometimes also called walkers because they "walk" through the data to perform analyses)
+     * is available in the online documentation.
+     */
+    @Argument(fullName = "analysis_type", shortName = "T", doc = "Name of the tool to run")
+    private String analysisName = null;
+
+    // our argument collection, the collection of command line args we accept
+    @ArgumentCollection
+    private GATKArgumentCollection argCollection = new GATKArgumentCollection();
+
+    /**
+     * Get pleasing info about the GATK.
+     *
+     * @return A list of Strings that contain pleasant info about the GATK.
+     */
+    @Override
+    protected ApplicationDetails getApplicationDetails() {
+        return new ApplicationDetails(createApplicationHeader(),
+                getAttribution(),
+                ApplicationDetails.createDefaultRunningInstructions(getClass()),
+                getAdditionalHelp());
+    }
+
+    @Override
+    public String getAnalysisName() {
+        return analysisName;
+    }
+
+    @Override
+    protected GATKArgumentCollection getArgumentCollection() {
+        return argCollection;
+    }
+
+    /**
+     * Required main method implementation.
+     */
+    public static void main(String[] argv) {
+        try {
+            CommandLineGATK instance = new CommandLineGATK();
+            start(instance, argv);
+            System.exit(CommandLineProgram.result); // todo -- this is a painful hack
+        } catch (UserException e) {
+            exitSystemWithUserError(e);
+        } catch (TribbleException e) {
+            // We can generate Tribble Exceptions in weird places when e.g. VCF genotype fields are
+            //   lazy loaded, so they aren't caught elsewhere and made into User Exceptions
+            exitSystemWithUserError(e);
+        } catch (SAMException e) {
+            checkForMaskedUserErrors(e);
+            exitSystemWithSamError(e);
+        } catch (OutOfMemoryError e) {
+            exitSystemWithUserError(new UserException.NotEnoughMemory());
+        } catch (Throwable t) {
+            checkForMaskedUserErrors(t);
+            exitSystemWithError(t);
+        }
+    }
+
+    public static final String PICARD_TEXT_SAM_FILE_ERROR_1 = "Cannot use index file with textual SAM file";
+    public static final String PICARD_TEXT_SAM_FILE_ERROR_2 = "Cannot retrieve file pointers within SAM text files";
+    public static final String NO_SPACE_LEFT_ON_DEVICE_ERROR = "No space left on device";
+    public static final String DISK_QUOTA_EXCEEDED_ERROR = "Disk quota exceeded";
+
+    private static void checkForMaskedUserErrors(final Throwable t) {
+        // masked out of memory error
+        if ( t instanceof OutOfMemoryError )
+            exitSystemWithUserError(new UserException.NotEnoughMemory());
+        // masked user error
+        if ( t instanceof UserException || t instanceof TribbleException )
+            exitSystemWithUserError(new UserException(t.getMessage()));
+
+        // no message means no masked error
+        final String message = t.getMessage();
+        if ( message == null )
+            return;
+
+        // too many open files error
+        if ( message.contains("Too many open files") )
+            exitSystemWithUserError(new UserException.TooManyOpenFiles());
+
+        // malformed BAM looks like a SAM file
+        if ( message.contains(PICARD_TEXT_SAM_FILE_ERROR_1) || message.contains(PICARD_TEXT_SAM_FILE_ERROR_2) )
+            exitSystemWithSamError(t);
+
+        // can't close tribble index when writing
+        if ( message.contains("Unable to close index for") )
+            exitSystemWithUserError(new UserException(t.getCause() == null ? message : t.getCause().getMessage()));
+
+        // disk is full
+        if ( message.contains(NO_SPACE_LEFT_ON_DEVICE_ERROR) || message.contains(DISK_QUOTA_EXCEEDED_ERROR) )
+            exitSystemWithUserError(new UserException.NoSpaceOnDevice());
+
+        // masked error wrapped in another one
+        if ( t.getCause() != null )
+            checkForMaskedUserErrors(t.getCause());
+    }
+
+    /**
+     * Creates the a short blurb about the GATK, copyright info, and where to get documentation.
+     *
+     * @return The application header.
+     */
+    public static List<String> createApplicationHeader() {
+        List<String> header = new ArrayList<String>();
+        header.add(String.format("The Genome Analysis Toolkit (GATK) v%s, Compiled %s",getVersionNumber(), getBuildTime()));
+        header.add("Copyright (c) 2010 The Broad Institute");
+        header.add("For support and documentation go to " + HelpConstants.BASE_GATK_URL);
+        return header;
+    }
+
+    /**
+     * If the user supplied any additional attribution, return it here.
+     * @return Additional attribution if supplied by the user.  Empty (non-null) list otherwise.
+     */
+    private List<String> getAttribution() {
+        List<String> attributionLines = new ArrayList<String>();
+
+        // If no analysis name is present, fill in extra help on the walkers.
+        WalkerManager walkerManager = engine.getWalkerManager();
+        String analysisName = getAnalysisName();
+        if(analysisName != null && walkerManager.exists(analysisName)) {
+            Class<? extends Walker> walkerType = walkerManager.getWalkerClassByName(analysisName);
+            if(walkerType.isAnnotationPresent(Attribution.class))
+                attributionLines.addAll(Arrays.asList(walkerType.getAnnotation(Attribution.class).value()));
+        }
+        return attributionLines;
+    }
+
+    /**
+     * Retrieves additional information about GATK walkers.
+     * the code in HelpFormatter and supply it as a helper to this method.
+     *
+     * @return A string summarizing the walkers available in this distribution.
+     */
+    private String getAdditionalHelp() {
+        String additionalHelp;
+
+        // If no analysis name is present, fill in extra help on the walkers.
+        WalkerManager walkerManager = engine.getWalkerManager();
+        if(analysisName != null && walkerManager.exists(analysisName))
+            additionalHelp = getWalkerHelp(walkerManager.getWalkerClassByName(analysisName));
+        else
+            additionalHelp = getAllWalkerHelp();
+
+        return additionalHelp;
+    }
+
+    private static final int PACKAGE_INDENT = 1;
+    private static final int WALKER_INDENT = 3;
+    private static final String FIELD_SEPARATOR = "  ";
+
+    private String getWalkerHelp(Class<? extends Walker> walkerType) {
+        // Construct a help string to output details on this walker.
+        StringBuilder additionalHelp = new StringBuilder();
+        Formatter formatter = new Formatter(additionalHelp);
+
+        formatter.format("Available Reference Ordered Data types:%n");
+        formatter.format(new FeatureManager().userFriendlyListOfAvailableFeatures());
+        formatter.format("%n");
+
+        formatter.format("For a full description of this walker, see its GATKdocs at:%n");
+        formatter.format("%s%n", GATKDocUtils.helpLinksToGATKDocs(walkerType));
+
+        return additionalHelp.toString();
+    }
+
+    /**
+     * Load in additional help information about all available walkers.
+     * @return A string representation of the additional help.
+     */
+    private String getAllWalkerHelp() {
+        // Construct a help string to output available walkers.
+        StringBuilder additionalHelp = new StringBuilder();
+        Formatter formatter = new Formatter(additionalHelp);
+
+        // Get the list of walker names from the walker manager.
+        WalkerManager walkerManager = engine.getWalkerManager();
+
+        // Build a list sorted by walker display name.  As this information is collected, keep track of the longest
+        // package / walker name for later formatting.
+        SortedSet<HelpEntry> helpText = new TreeSet<HelpEntry>(new HelpEntryComparator());
+        
+        int longestPackageName = 0;
+        int longestWalkerName = 0;
+        for(Map.Entry<String,Collection<Class<? extends Walker>>> walkersByPackage: walkerManager.getWalkerNamesByPackage(true).entrySet()) {
+            // Get the display name.
+            String packageName = walkersByPackage.getKey();
+            String packageDisplayName = walkerManager.getPackageDisplayName(walkersByPackage.getKey());
+            String packageHelpText = walkerManager.getPackageSummaryText(packageName);
+
+            // Compute statistics about which names is longest.
+            longestPackageName = Math.max(longestPackageName,packageDisplayName.length());
+
+            SortedSet<HelpEntry> walkersInPackage = new TreeSet<HelpEntry>(new HelpEntryComparator());
+            for(Class<? extends Walker> walkerType: walkersByPackage.getValue()) {
+                String walkerName = walkerType.getName();
+                String walkerDisplayName = walkerManager.getName(walkerType);
+                String walkerHelpText = walkerManager.getWalkerSummaryText(walkerType);                
+
+                longestWalkerName = Math.max(longestWalkerName,walkerManager.getName(walkerType).length());
+
+                walkersInPackage.add(new HelpEntry(walkerName,walkerDisplayName,walkerHelpText));
+            }
+
+            // Dump the walkers into the sorted set.
+            helpText.add(new HelpEntry(packageName,packageDisplayName,packageHelpText,Collections.unmodifiableSortedSet(walkersInPackage)));
+        }
+
+        final int headerWidth = Math.max(longestPackageName+PACKAGE_INDENT,longestWalkerName+WALKER_INDENT);
+
+
+        for(HelpEntry packageHelp: helpText) {
+            printDescriptorLine(formatter,PACKAGE_INDENT,packageHelp.displayName,headerWidth,FIELD_SEPARATOR,packageHelp.summary,TextFormattingUtils.DEFAULT_LINE_WIDTH);
+            
+            for(HelpEntry walkerHelp: packageHelp.children)
+                printDescriptorLine(formatter,WALKER_INDENT,walkerHelp.displayName,headerWidth,FIELD_SEPARATOR,walkerHelp.summary,TextFormattingUtils.DEFAULT_LINE_WIDTH);
+
+            // Print a blank line between sets of walkers.
+            printDescriptorLine(formatter,0,"",headerWidth,FIELD_SEPARATOR,"", TextFormattingUtils.DEFAULT_LINE_WIDTH);
+        }
+
+        return additionalHelp.toString();
+    }
+
+    private void printDescriptorLine(Formatter formatter,
+                                     int headerIndentWidth,
+                                     String header,
+                                     int headerWidth,
+                                     String fieldSeparator,
+                                     String description,
+                                     int lineWidth) {
+        final int headerPaddingWidth = headerWidth - header.length() - headerIndentWidth;
+        final int descriptionWidth = lineWidth - fieldSeparator.length() - headerWidth;
+        List<String> wordWrappedText = TextFormattingUtils.wordWrap(description,descriptionWidth);
+
+        String headerIndentFormatString  = headerIndentWidth  > 0 ? "%" + headerIndentWidth  + "s" : "%s";
+        String headerPaddingFormatString = headerPaddingWidth > 0 ? "%" + headerPaddingWidth + "s" : "%s";
+        String headerWidthFormatString   = headerWidth        > 0 ? "%" + headerWidth        + "s" : "%s";
+
+        // Output description line.
+        formatter.format(headerIndentFormatString + "%s" + headerPaddingFormatString + "%s%s%n",
+                "", header, "", fieldSeparator, wordWrappedText.size()>0?wordWrappedText.get(0):"");
+        for(int i = 1; i < wordWrappedText.size(); i++)
+            formatter.format(headerWidthFormatString + "%s%s%n", "", fieldSeparator, wordWrappedText.get(i));
+    }
+
+}
+
+/**
+ * Represents a given help entry; contains a display name, a summary and optionally some children.
+ */
+class HelpEntry {
+    public final String uid;
+    public final String displayName;
+    public final String summary;
+    public final SortedSet<HelpEntry> children;
+
+    /**
+     * Create a new help entry with the given display name, summary and children.
+     * @param uid a unique identifier.  Usually, the java package.
+     * @param displayName display name for this help entry.
+     * @param summary summary for this help entry.
+     * @param children children for this help entry.
+     */
+    public HelpEntry(String uid, String displayName, String summary, SortedSet<HelpEntry> children)  {
+        this.uid = uid;
+        this.displayName = displayName;
+        this.summary = summary;
+        this.children = children;
+    }
+
+    /**
+     * Create a new help entry with the given display name, summary and children.
+     * @param uid a unique identifier.  Usually, the java package.
+     * @param displayName display name for this help entry.
+     * @param summary summary for this help entry.
+     */
+    public HelpEntry(String uid, String displayName, String summary) {
+        this(uid,displayName,summary,null);
+    }
+
+}
+
+/**
+ * Compare two help entries by display name.
+ */
+class HelpEntryComparator implements Comparator<HelpEntry> {
+    private static TextFormattingUtils.CaseInsensitiveComparator textComparator = new TextFormattingUtils.CaseInsensitiveComparator();
+
+    /**
+     * Compares the order of lhs to rhs, not taking case into account.
+     * @param lhs First object to compare.
+     * @param rhs Second object to compare.
+     * @return 0 if objects are identical; -1 if lhs is before rhs, 1 if rhs is before lhs.  Nulls are treated as after everything else.
+     */
+    public int compare(HelpEntry lhs, HelpEntry rhs) {
+        if(lhs == null && rhs == null) return 0;
+        if(lhs == null || lhs.displayName.equals("")) return 1;
+        if(rhs == null || rhs.displayName.equals("")) return -1;
+        return lhs.displayName.equals(rhs.displayName) ? textComparator.compare(lhs.uid,rhs.uid) : textComparator.compare(lhs.displayName,rhs.displayName);
+    }
+
+
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/GATKVCFUtils.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/GATKVCFUtils.java
new file mode 100644
index 0000000..c1daca1
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/GATKVCFUtils.java
@@ -0,0 +1,422 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine;
+
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.tribble.index.*;
+import htsjdk.tribble.index.interval.IntervalTreeIndex;
+import htsjdk.tribble.index.linear.LinearIndex;
+import org.apache.log4j.Logger;
+import htsjdk.tribble.Feature;
+import htsjdk.tribble.index.interval.IntervalIndexCreator;
+import htsjdk.tribble.index.linear.LinearIndexCreator;
+import htsjdk.tribble.index.tabix.TabixFormat;
+import htsjdk.tribble.index.tabix.TabixIndexCreator;
+import htsjdk.tribble.readers.LineIterator;
+import htsjdk.tribble.readers.PositionalBufferedStream;
+import org.broadinstitute.gatk.utils.commandline.ArgumentTypeDescriptor;
+import org.broadinstitute.gatk.utils.commandline.RodBinding;
+import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
+import org.broadinstitute.gatk.utils.collections.Pair;
+import htsjdk.variant.variantcontext.VariantContext;
+import htsjdk.variant.vcf.*;
+import org.broadinstitute.gatk.utils.variant.GATKVCFIndexType;
+
+import java.io.*;
+import java.lang.reflect.Field;
+import java.util.*;
+
+
+/**
+ * A set of GATK-specific static utility methods for common operations on VCF files/records.
+ */
+public class GATKVCFUtils {
+
+    /**
+     * Constructor access disallowed...static utility methods only!
+     */
+    private GATKVCFUtils() { }
+
+    public static final Logger logger = Logger.getLogger(GATKVCFUtils.class);
+    public final static String GATK_COMMAND_LINE_KEY = "GATKCommandLine";
+
+    public final static GATKVCFIndexType DEFAULT_INDEX_TYPE = GATKVCFIndexType.DYNAMIC_SEEK;  // by default, optimize for seek time.  All indices prior to Nov 2013 used this type.
+    public final static Integer DEFAULT_INDEX_PARAMETER = -1;           // the default DYNAMIC_SEEK does not use a parameter
+    // as determined experimentally Nov-Dec 2013
+    public final static GATKVCFIndexType DEFAULT_GVCF_INDEX_TYPE = GATKVCFIndexType.LINEAR;
+    public final static Integer DEFAULT_GVCF_INDEX_PARAMETER = 128000;
+
+    // GVCF file extensions
+    public final static String GVCF_EXT = "g.vcf";
+    public final static String GVCF_GZ_EXT = "g.vcf.gz";
+
+    // Message for using the deprecated --variant_index_type or --variant_index_parameter arguments.
+    public final static String DEPRECATED_INDEX_ARGS_MSG = "Naming your output file using the .g.vcf extension will automatically set the appropriate values " +
+            " for --variant_index_type and --variant_index_parameter";
+
+    /**
+     * Gets the appropriately formatted header for a VCF file describing this GATK run
+     *
+     * @param header the existing VCFHeader that we will be adding this command line argument header line to.  Existing
+     *               command line argument header lines will be used to generate a unique header line key.
+     * @param engine the GATK engine that holds the walker name, GATK version, and other information
+     * @param argumentSources contains information on the argument values provided to the GATK for converting to a
+     *                        command line string.  Should be provided from the data in the parsing engine.  Can be
+     *                        empty in which case the command line will be the empty string.
+     * @return VCF header line describing this run of the GATK.
+     */
+    public static VCFHeaderLine getCommandLineArgumentHeaderLine(final VCFHeader header, final GenomeAnalysisEngine engine, final Collection<Object> argumentSources) {
+        if ( engine == null ) throw new IllegalArgumentException("engine cannot be null");
+        if ( argumentSources == null ) throw new IllegalArgumentException("argumentSources cannot be null");
+
+        final Map<String, String> attributes = new LinkedHashMap<>();
+        attributes.put("ID", engine.getWalkerName());
+        attributes.put("Version", CommandLineGATK.getVersionNumber());
+        final Date date = new Date();
+        attributes.put("Date", date.toString());
+        attributes.put("Epoch", Long.toString(date.getTime()));
+        attributes.put("CommandLineOptions", engine.createApproximateCommandLineArgumentString(argumentSources.toArray()));
+
+        // in case the walker name contains space, remove any spaces
+        String key = getCommandLineKey(header, engine.getWalkerName().replaceAll("\\s", ""));
+        return new VCFSimpleHeaderLine(key, attributes);
+    }
+
+    // create a unique command line argument header line key.  This method will look for existing
+    // keys using the same walker name and append a count after it to make it unique.
+    private static String getCommandLineKey(final VCFHeader header, final String walkerName) {
+        final Iterator<VCFHeaderLine> existingMetaDataIterator = header.getMetaDataInInputOrder().iterator();
+
+        // the command line argument keys are in the format GATK_COMMAND_LINE_KEY.(walker name)
+        final String searchKey = String.format("%s.%s", GATK_COMMAND_LINE_KEY, walkerName);
+
+        int commandLineKeyCount = 0;
+        VCFHeaderLine line;
+        while ( existingMetaDataIterator.hasNext() ) {
+            line = existingMetaDataIterator.next();
+            // if we find another key that starts with the same text as the walker
+            if ( line.getKey().startsWith(searchKey) )
+                commandLineKeyCount++;
+        }
+
+        // if there are no existing keys with this same walker name, then just return the
+        // GATK_COMMAND_LINE_KEY.(walker name) format
+        if ( commandLineKeyCount == 0 )
+            return searchKey;
+        // otherwise append the count associated with this new command (existing + 1)
+        else
+            return String.format("%s.%d", searchKey, commandLineKeyCount+1);
+    }
+
+    public static <T extends Feature> Map<String, VCFHeader> getVCFHeadersFromRods(GenomeAnalysisEngine toolkit, List<RodBinding<T>> rodBindings) {
+        // Collect the eval rod names
+        final Set<String> names = new TreeSet<String>();
+        for ( final RodBinding<T> evalRod : rodBindings )
+            names.add(evalRod.getName());
+        return getVCFHeadersFromRods(toolkit, names);
+    }
+
+    public static Map<String, VCFHeader> getVCFHeadersFromRods(GenomeAnalysisEngine toolkit) {
+        return getVCFHeadersFromRods(toolkit, (Collection<String>)null);
+    }
+
+    public static Map<String, VCFHeader> getVCFHeadersFromRods(GenomeAnalysisEngine toolkit, Collection<String> rodNames) {
+        Map<String, VCFHeader> data = new HashMap<String, VCFHeader>();
+
+        // iterate to get all of the sample names
+        List<ReferenceOrderedDataSource> dataSources = toolkit.getRodDataSources();
+        for ( ReferenceOrderedDataSource source : dataSources ) {
+            // ignore the rod if it's not in our list
+            if ( rodNames != null && !rodNames.contains(source.getName()) )
+                continue;
+
+            if ( source.getHeader() != null && source.getHeader() instanceof VCFHeader )
+                data.put(source.getName(), (VCFHeader)source.getHeader());
+        }
+
+        return data;
+    }
+
+    public static Map<String,VCFHeader> getVCFHeadersFromRodPrefix(GenomeAnalysisEngine toolkit,String prefix) {
+        Map<String, VCFHeader> data = new HashMap<String, VCFHeader>();
+
+        // iterate to get all of the sample names
+        List<ReferenceOrderedDataSource> dataSources = toolkit.getRodDataSources();
+        for ( ReferenceOrderedDataSource source : dataSources ) {
+            // ignore the rod if lacks the prefix
+            if ( ! source.getName().startsWith(prefix) )
+                continue;
+
+            if ( source.getHeader() != null && source.getHeader() instanceof VCFHeader )
+                data.put(source.getName(), (VCFHeader)source.getHeader());
+        }
+
+        return data;
+    }
+
+    /**
+     * Gets the header fields from all VCF rods input by the user
+     *
+     * @param toolkit    GATK engine
+     *
+     * @return a set of all fields
+     */
+    public static Set<VCFHeaderLine> getHeaderFields(GenomeAnalysisEngine toolkit) {
+        return getHeaderFields(toolkit, null);
+    }
+
+    /**
+     * Gets the header fields from all VCF rods input by the user
+     *
+     * @param toolkit    GATK engine
+     * @param rodNames   names of rods to use, or null if we should use all possible ones
+     *
+     * @return a set of all fields
+     */
+    public static Set<VCFHeaderLine> getHeaderFields(GenomeAnalysisEngine toolkit, Collection<String> rodNames) {
+
+        // keep a map of sample name to occurrences encountered
+        TreeSet<VCFHeaderLine> fields = new TreeSet<VCFHeaderLine>();
+
+        // iterate to get all of the sample names
+        List<ReferenceOrderedDataSource> dataSources = toolkit.getRodDataSources();
+        for ( ReferenceOrderedDataSource source : dataSources ) {
+            // ignore the rod if it's not in our list
+            if ( rodNames != null && !rodNames.contains(source.getName()) )
+                continue;
+
+            if ( source.getRecordType().equals(VariantContext.class)) {
+                VCFHeader header = (VCFHeader)source.getHeader();
+                if ( header != null )
+                    fields.addAll(header.getMetaDataInSortedOrder());
+            }
+        }
+
+        return fields;
+    }
+
+    /**
+     * Add / replace the contig header lines in the VCFHeader with the information in the GATK engine
+     *
+     * @param header the header to update
+     * @param engine the GATK engine containing command line arguments and the master sequence dictionary
+     */
+    public static VCFHeader withUpdatedContigs(final VCFHeader header, final GenomeAnalysisEngine engine) {
+        return VCFUtils.withUpdatedContigs(header, engine.getArguments().referenceFile, engine.getMasterSequenceDictionary());
+    }
+
+    /**
+     * Create and return an IndexCreator
+     * @param type
+     * @param parameter
+     * @param outFile
+     * @return
+     */
+    public static IndexCreator getIndexCreator(GATKVCFIndexType type, int parameter, File outFile) {
+        return getIndexCreator(type, parameter, outFile, null);
+    }
+
+    /**
+     * Create and return an IndexCreator
+     * @param type
+     * @param parameter
+     * @param outFile
+     * @param sequenceDictionary
+     * @return
+     */
+    public static IndexCreator getIndexCreator(GATKVCFIndexType type, int parameter, File outFile, SAMSequenceDictionary sequenceDictionary) {
+        if (ArgumentTypeDescriptor.isCompressed(outFile.toString())) {
+            if (type != GATKVCFUtils.DEFAULT_INDEX_TYPE || parameter != GATKVCFUtils.DEFAULT_INDEX_PARAMETER)
+                logger.warn("Creating Tabix index for " + outFile + ", ignoring user-specified index type and parameter");
+
+            if (sequenceDictionary == null)
+                return new TabixIndexCreator(TabixFormat.VCF);
+            else
+                return new TabixIndexCreator(sequenceDictionary, TabixFormat.VCF);
+        }
+
+        IndexCreator idxCreator;
+        switch (type) {
+            case DYNAMIC_SEEK: idxCreator = new DynamicIndexCreator(outFile, IndexFactory.IndexBalanceApproach.FOR_SEEK_TIME); break;
+            case DYNAMIC_SIZE: idxCreator = new DynamicIndexCreator(outFile, IndexFactory.IndexBalanceApproach.FOR_SIZE); break;
+            case LINEAR: idxCreator = new LinearIndexCreator(outFile, parameter); break;
+            case INTERVAL: idxCreator = new IntervalIndexCreator(outFile, parameter); break;
+            default: throw new IllegalArgumentException("Unknown IndexCreator type: " + type);
+        }
+
+        return idxCreator;
+    }
+
+    /**
+     * Read all of the VCF records from source into memory, returning the header and the VariantContexts
+     *
+     * SHOULD ONLY BE USED FOR UNIT/INTEGRATION TESTING PURPOSES!
+     *
+     * @param source the file to read, must be in VCF4 format
+     * @return
+     * @throws java.io.IOException
+     */
+    public static Pair<VCFHeader, List<VariantContext>> readVCF(final File source) throws IOException {
+        // read in the features
+        final List<VariantContext> vcs = new ArrayList<VariantContext>();
+        final VCFCodec codec = new VCFCodec();
+        PositionalBufferedStream pbs = new PositionalBufferedStream(new FileInputStream(source));
+        final LineIterator vcfSource = codec.makeSourceFromStream(pbs);
+        try {
+            final VCFHeader vcfHeader = (VCFHeader) codec.readActualHeader(vcfSource);
+
+            while (vcfSource.hasNext()) {
+                final VariantContext vc = codec.decode(vcfSource);
+                if ( vc != null )
+                    vcs.add(vc);
+            }
+
+            return new Pair<VCFHeader, List<VariantContext>>(vcfHeader, vcs);
+        } finally {
+            codec.close(vcfSource);
+        }
+    }
+
+    /**
+     * Check if the two indices are equivalent
+     *
+     * @param thisIndex index
+     * @param otherIndex index
+     * @return true if indices are equivalent, false otherwise.
+     */
+    public static boolean equivalentAbstractIndices(AbstractIndex thisIndex, AbstractIndex otherIndex){
+        return thisIndex.getVersion() == otherIndex.getVersion() &&
+                thisIndex.getIndexedFile().equals(otherIndex.getIndexedFile()) &&
+                thisIndex.getIndexedFileSize() == otherIndex.getIndexedFileSize() &&
+                thisIndex.getIndexedFileMD5().equals(otherIndex.getIndexedFileMD5()) &&
+                thisIndex.getFlags() == otherIndex.getFlags();
+    }
+
+    /**
+     * Check if the two indices are equivalent for a chromosome
+     *
+     * @param thisIndex index
+     * @param otherIndex index
+     * @param chr chromosome
+     * @return true if indices are equivalent, false otherwise.
+     * @throws NoSuchFieldException if index does not exist for a chromosome
+     * @throws IllegalAccessException if index does not exist for a chromosome
+     */
+    public static boolean equivalentLinearIndices(LinearIndex thisIndex, LinearIndex otherIndex, String chr) throws NoSuchFieldException, IllegalAccessException {
+        htsjdk.tribble.index.linear.LinearIndex.ChrIndex thisChr = (htsjdk.tribble.index.linear.LinearIndex.ChrIndex)getChrIndex(thisIndex, chr);
+        htsjdk.tribble.index.linear.LinearIndex.ChrIndex otherChr = (htsjdk.tribble.index.linear.LinearIndex.ChrIndex)getChrIndex(otherIndex, chr);
+
+        return  thisChr.getName().equals(otherChr.getName()) &&
+                //thisChr.getTotalSize() == otherChr.getTotalSize() &&      TODO: why does this differ?
+                thisChr.getNFeatures() == otherChr.getNFeatures() &&
+                thisChr.getNBlocks() == otherChr.getNBlocks();
+    }
+
+    /**
+     * Check if the two interval indices are equivalent for a chromosome
+     *
+     * @param thisIndex interval index
+     * @param otherIndex interval index
+     * @param chr chromosome
+     * @return true if indices are equivalent, false otherwise.
+     * @throws NoSuchFieldException if index does not exist for a chromosome
+     * @throws IllegalAccessException if index does not exist for a chromosome
+     */
+    public static boolean equivalentIntervalIndices(IntervalTreeIndex thisIndex, IntervalTreeIndex otherIndex, String chr) throws NoSuchFieldException, IllegalAccessException {
+        htsjdk.tribble.index.interval.IntervalTreeIndex.ChrIndex thisChr = (htsjdk.tribble.index.interval.IntervalTreeIndex.ChrIndex)getChrIndex(thisIndex, chr);
+        htsjdk.tribble.index.interval.IntervalTreeIndex.ChrIndex otherChr = (htsjdk.tribble.index.interval.IntervalTreeIndex.ChrIndex)getChrIndex(otherIndex, chr);
+
+        // TODO: compare trees?
+        return thisChr.getName().equals(otherChr.getName());
+    }
+
+    /**
+     * Get index for a chromosome
+     *
+     * @param index index
+     * @param chr chromosome
+     * @return index for the chromosome
+     * @throws NoSuchFieldException if index does not exist for a chromosome
+     * @throws IllegalAccessException if index does not exist for a chromosome
+     */
+    public static ChrIndex getChrIndex(AbstractIndex index, String chr) throws NoSuchFieldException, IllegalAccessException {
+        Field f = AbstractIndex.class.getDeclaredField("chrIndices");
+        f.setAccessible(true);
+        LinkedHashMap<String, ChrIndex> chrIndices = (LinkedHashMap<String, ChrIndex>) f.get(index);
+        return chrIndices.get(chr);
+    }
+
+    /**
+     * Make an IndexCreator
+     *
+     * @param variantIndexType variant indexing strategy
+     * @param variantIndexParameter variant indexing parameter
+     * @param outputFile output variant file
+     * @param sequenceDictionary collection of SAM sequence records
+     * @return IndexCreator
+     */
+    public static IndexCreator makeIndexCreator(final GATKVCFIndexType variantIndexType, final int variantIndexParameter, final File outputFile, final SAMSequenceDictionary sequenceDictionary) {
+        /*
+        * If using the index arguments, log a warning.
+        * If the genotype file has the GCVF extension (.g.vcf), use the default GCVF indexing.
+        * Otherwise, use the default index type and parameter.
+        */
+        GATKVCFIndexType indexType = DEFAULT_INDEX_TYPE;
+        int indexParameter = DEFAULT_INDEX_PARAMETER;
+        if (usingNonDefaultIndexingArguments(variantIndexType, variantIndexParameter)) {
+            indexType = variantIndexType;
+            indexParameter = variantIndexParameter;
+            logger.warn(DEPRECATED_INDEX_ARGS_MSG);
+        } else if (outputFile.getName().endsWith("."  + GVCF_EXT) || outputFile.getName().endsWith("."  + GVCF_GZ_EXT)) {
+            indexType = DEFAULT_GVCF_INDEX_TYPE;
+            indexParameter = DEFAULT_GVCF_INDEX_PARAMETER;
+        }
+
+        return getIndexCreator(indexType, indexParameter, outputFile, sequenceDictionary);
+    }
+
+    /**
+     * Check if not using the default indexing arguments' values
+     *
+     * @param variantIndexType variant indexing strategy
+     * @param variantIndexParameter variant indexing parameter
+     * @return true if the index type or parameter are not the default values, false otherwise
+     */
+    public static boolean usingNonDefaultIndexingArguments(final GATKVCFIndexType variantIndexType, final int variantIndexParameter) {
+        return variantIndexType != GATKVCFUtils.DEFAULT_INDEX_TYPE || variantIndexParameter != GATKVCFUtils.DEFAULT_INDEX_PARAMETER;
+    }
+
+    /**
+     * Check if using the GCVF indexing arguments' values
+     *
+     * @param variantIndexType variant indexing strategy
+     * @param variantIndexParameter variant indexing parameter
+     * @return true if the index type and parameter are the default GVCF values, false otherwise
+     */
+    public static boolean usingGVCFIndexingArguments(final GATKVCFIndexType variantIndexType, final int variantIndexParameter) {
+        return variantIndexType == GATKVCFUtils.DEFAULT_GVCF_INDEX_TYPE && variantIndexParameter == GATKVCFUtils.DEFAULT_GVCF_INDEX_PARAMETER;
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/GenomeAnalysisEngine.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/GenomeAnalysisEngine.java
new file mode 100644
index 0000000..be2bf61
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/GenomeAnalysisEngine.java
@@ -0,0 +1,1325 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine;
+
+import com.google.java.contract.Ensures;
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import htsjdk.samtools.reference.ReferenceSequenceFile;
+import htsjdk.variant.vcf.VCFConstants;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.engine.arguments.GATKArgumentCollection;
+import org.broadinstitute.gatk.engine.filters.DisableableReadFilter;
+import org.broadinstitute.gatk.utils.downsampling.DownsampleType;
+import org.broadinstitute.gatk.utils.ValidationExclusion;
+import org.broadinstitute.gatk.engine.datasources.reads.*;
+import org.broadinstitute.gatk.engine.datasources.reference.ReferenceDataSource;
+import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
+import org.broadinstitute.gatk.utils.downsampling.DownsamplingMethod;
+import org.broadinstitute.gatk.engine.executive.MicroScheduler;
+import org.broadinstitute.gatk.engine.filters.FilterManager;
+import org.broadinstitute.gatk.engine.filters.ReadFilter;
+import org.broadinstitute.gatk.engine.filters.ReadGroupBlackListFilter;
+import org.broadinstitute.gatk.engine.io.OutputTracker;
+import org.broadinstitute.gatk.engine.io.stubs.Stub;
+import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
+import org.broadinstitute.gatk.engine.iterators.ReadTransformersMode;
+import org.broadinstitute.gatk.engine.phonehome.GATKRunReport;
+import org.broadinstitute.gatk.utils.io.ReferenceBacked;
+import org.broadinstitute.gatk.utils.refdata.tracks.IndexDictionaryUtils;
+import org.broadinstitute.gatk.utils.refdata.tracks.RMDTrackBuilder;
+import org.broadinstitute.gatk.utils.refdata.utils.RMDTriplet;
+import org.broadinstitute.gatk.engine.resourcemanagement.ThreadAllocation;
+import org.broadinstitute.gatk.engine.samples.SampleDB;
+import org.broadinstitute.gatk.engine.samples.SampleDBBuilder;
+import org.broadinstitute.gatk.engine.walkers.*;
+import org.broadinstitute.gatk.utils.genotyper.IndexedSampleList;
+import org.broadinstitute.gatk.utils.genotyper.SampleList;
+import org.broadinstitute.gatk.utils.*;
+import org.broadinstitute.gatk.utils.classloader.PluginManager;
+import org.broadinstitute.gatk.utils.commandline.*;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.interval.IntervalUtils;
+import org.broadinstitute.gatk.utils.progressmeter.ProgressMeter;
+import org.broadinstitute.gatk.engine.recalibration.BQSRArgumentSet;
+import org.broadinstitute.gatk.utils.sam.ReadUtils;
+import org.broadinstitute.gatk.utils.sam.SAMReaderID;
+import org.broadinstitute.gatk.utils.text.XReadLines;
+import org.broadinstitute.gatk.utils.threading.ThreadEfficiencyMonitor;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.util.*;
+import java.util.concurrent.TimeUnit;
+
+import static org.broadinstitute.gatk.utils.DeprecatedToolChecks.getWalkerDeprecationInfo;
+import static org.broadinstitute.gatk.utils.DeprecatedToolChecks.isDeprecatedWalker;
+
+/**
+ * A GenomeAnalysisEngine that runs a specified walker.
+ */
+public class GenomeAnalysisEngine {
+    /**
+     * our log, which we want to capture anything from this class
+     */
+    private static Logger logger = Logger.getLogger(GenomeAnalysisEngine.class);
+    public static final long NO_RUNTIME_LIMIT = -1;
+
+    /**
+     * The GATK command-line argument parsing code.
+     */
+    private ParsingEngine parsingEngine;
+
+    /**
+     * The genomeLocParser can create and parse GenomeLocs.
+     */
+    private GenomeLocParser genomeLocParser;
+
+    /**
+     * Accessor for sharded read data.
+     */
+    private SAMDataSource readsDataSource = null;
+
+    /**
+     * Accessor for sharded reference data.
+     */
+    private ReferenceDataSource referenceDataSource = null;
+
+    /**
+     * Accessor for sample metadata
+     */
+    private SampleDB sampleDB = new SampleDB();
+
+    /**
+     * Accessor for sharded reference-ordered data.
+     */
+    private List<ReferenceOrderedDataSource> rodDataSources;
+
+    // our argument collection
+    private GATKArgumentCollection argCollection;
+
+    /**
+     * Collection of intervals used by the engine.
+     */
+    private GenomeLocSortedSet intervals = null;
+
+    /**
+     * Explicitly assign the interval set to use for this traversal (for unit testing purposes)
+     * @param intervals set of intervals to use for this traversal
+     */
+    public void setIntervals( GenomeLocSortedSet intervals ) {
+        this.intervals = intervals;
+    }
+
+    /**
+     * Collection of inputs used by the engine.
+     */
+    private Map<ArgumentSource, Object> inputs = new HashMap<ArgumentSource, Object>();
+
+    /**
+     * Collection of outputs used by the engine.
+     */
+    private Collection<Stub<?>> outputs = new ArrayList<Stub<?>>();
+
+    /**
+     * Collection of the filters applied to the input data.
+     */
+    private Collection<ReadFilter> filters;
+
+    /**
+     * Collection of the read transformers applied to the reads
+     */
+    private List<ReadTransformer> readTransformers;
+
+    /**
+     * Controls the allocation of threads between CPU vs IO.
+     */
+    private ThreadAllocation threadAllocation;
+
+    private ReadMetrics cumulativeMetrics = null;
+
+    /**
+     * A currently hacky unique name for this GATK instance
+     */
+    private String myName = "GATK_" + Math.abs(Utils.getRandomGenerator().nextInt());
+
+    /**
+     * our walker manager
+     */
+    private final WalkerManager walkerManager = new WalkerManager();
+
+    private Walker<?, ?> walker;
+
+    public void setWalker(Walker<?, ?> walker) {
+        this.walker = walker;
+    }
+
+    /**
+     * The short name of the current GATK walker as a string
+     * @return a non-null String
+     */
+    public String getWalkerName() {
+        return getWalkerName(walker.getClass());
+    }
+
+    /**
+     * A processed collection of SAM reader identifiers.
+     */
+    private Collection<SAMReaderID> samReaderIDs = Collections.emptyList();
+
+    /**
+     * Set the SAM/BAM files over which to traverse.
+     * @param samReaderIDs Collection of ids to use during this traversal.
+     */
+    public void setSAMFileIDs(Collection<SAMReaderID> samReaderIDs) {
+        this.samReaderIDs = samReaderIDs;
+    }
+
+    /**
+     * Collection of reference metadata files over which to traverse.
+     */
+    private Collection<RMDTriplet> referenceMetaDataFiles;
+
+    /**
+     * The threading efficiency monitor we use in the GATK to monitor our efficiency.
+     *
+     * May be null if one isn't active, or hasn't be initialized yet
+     */
+    private ThreadEfficiencyMonitor threadEfficiencyMonitor = null;
+
+    /**
+     * The global progress meter we are using to track our progress through the genome
+     */
+    private ProgressMeter progressMeter = null;
+
+    /**
+     * Set the reference metadata files to use for this traversal.
+     * @param referenceMetaDataFiles Collection of files and descriptors over which to traverse.
+     */
+    public void setReferenceMetaDataFiles(Collection<RMDTriplet> referenceMetaDataFiles) {
+        this.referenceMetaDataFiles = referenceMetaDataFiles;
+    }
+
+    /**
+     * The maximum runtime of this engine, in nanoseconds, set during engine initialization
+     * from the GATKArgumentCollection command line value
+     */
+    private long runtimeLimitInNanoseconds = -1;
+
+    /**
+     *  Base Quality Score Recalibration helper object
+     */
+    private BQSRArgumentSet bqsrArgumentSet = null;
+    public BQSRArgumentSet getBQSRArgumentSet() { return bqsrArgumentSet; }
+    public boolean hasBQSRArgumentSet() { return bqsrArgumentSet != null; }
+    public void setBaseRecalibration(final GATKArgumentCollection args) {
+        bqsrArgumentSet = new BQSRArgumentSet(args);
+    }
+
+    /**
+     * Actually run the GATK with the specified walker.
+     *
+     * @return the value of this traversal.
+     */
+    public Object execute() {
+        // first thing is to make sure the AWS keys can be decrypted
+        GATKRunReport.checkAWSAreValid();
+
+        //HeapSizeMonitor monitor = new HeapSizeMonitor();
+        //monitor.start();
+        setStartTime(new java.util.Date());
+
+        final GATKArgumentCollection args = this.getArguments();
+
+        // validate our parameters
+        if (args == null) {
+            throw new ReviewedGATKException("The GATKArgumentCollection passed to GenomeAnalysisEngine cannot be null.");
+        }
+
+        // validate our parameters              
+        if (this.walker == null)
+            throw new ReviewedGATKException("The walker passed to GenomeAnalysisEngine cannot be null.");
+
+        // check that active region walkers do not use the downsampling to coverage argument
+        checkDownSamplingToCoverage();
+        
+        if (args.nonDeterministicRandomSeed)
+            Utils.resetRandomGenerator(System.currentTimeMillis());
+
+        // if the use specified an input BQSR recalibration table then enable on the fly recalibration
+        if (args.BQSR_RECAL_FILE != null)
+            setBaseRecalibration(args);
+
+        // setup the runtime limits
+        setupRuntimeLimits(args);
+
+        // Determine how the threads should be divided between CPU vs. IO.
+        determineThreadAllocation();
+
+        // Prepare the data for traversal.
+        initializeDataSources();
+
+        // initialize and validate the interval list
+        initializeIntervals();
+        validateSuppliedIntervals();
+
+        // check to make sure that all sequence dictionaries are compatible with the reference's sequence dictionary
+        validateDataSourcesAgainstReference(readsDataSource, referenceDataSource.getReference(), rodDataSources);
+
+        // initialize sampleDB
+        initializeSampleDB();
+
+        // our microscheduler, which is in charge of running everything
+        MicroScheduler microScheduler = createMicroscheduler();
+        threadEfficiencyMonitor = microScheduler.getThreadEfficiencyMonitor();
+
+        // create temp directories as necessary
+        initializeTempDirectory();
+
+        // create the output streams
+        initializeOutputStreams(microScheduler.getOutputTracker());
+
+        // Initializing the shard iterator / BAM schedule might take some time, so let the user know vaguely what's going on
+        logger.info("Preparing for traversal" +
+                    (readsDataSource.getReaderIDs().size() > 0 ? String.format(" over %d BAM files", readsDataSource.getReaderIDs().size()) : ""));
+        Iterable<Shard> shardStrategy = getShardStrategy(readsDataSource,microScheduler.getReference(),intervals);
+        logger.info("Done preparing for traversal");
+
+        // execute the microscheduler, storing the results
+        return microScheduler.execute(this.walker, shardStrategy);
+
+        //monitor.stop();
+        //logger.info(String.format("Maximum heap size consumed: %d",monitor.getMaxMemoryUsed()));
+
+        //return result;
+    }
+
+    /**
+     * Retrieves an instance of the walker based on the walker name.
+     *
+     * @param walkerName Name of the walker.  Must not be null.  If the walker cannot be instantiated, an exception will be thrown.
+     * @return An instance of the walker.
+     */
+    public Walker<?, ?> getWalkerByName(String walkerName) {
+        try {
+            return walkerManager.createByName(walkerName);
+        } catch ( UserException e ) {
+            if ( isDeprecatedWalker(walkerName) ) {
+                e = new UserException.DeprecatedWalker(walkerName, getWalkerDeprecationInfo(walkerName));
+            }
+            throw e;
+        }
+    }
+
+    /**
+     * Gets the name of a given walker type.
+     * @param walkerType Type of walker.
+     * @return Name of the walker.
+     */
+    public String getWalkerName(Class<? extends Walker> walkerType) {
+        return walkerManager.getName(walkerType);
+    }
+
+    public String getName() {
+        return myName;
+    }
+
+    /**
+     * Gets a list of the filters to associate with the given walker.  Will NOT initialize the engine with this filters;
+     * the caller must handle that directly.
+     * @return A collection of available filters.
+     */
+    public Collection<ReadFilter> createFilters() {
+        final List<ReadFilter> filters = new LinkedList<>();
+
+        // First add the user requested filters
+        if (this.getArguments().readGroupBlackList != null && !this.getArguments().readGroupBlackList.isEmpty())
+            filters.add(new ReadGroupBlackListFilter(this.getArguments().readGroupBlackList));
+        for(final String filterName: this.getArguments().readFilters)
+            filters.add(this.getFilterManager().createByName(filterName));
+
+        // now add the walker default filters.  This ordering is critical important if
+        // users need to apply filters that fix up reads that would be removed by default walker filters
+        filters.addAll(WalkerManager.getReadFilters(walker,this.getFilterManager()));
+
+        // disable user-specified read filters, if allowed
+        for(final String filterName: this.getArguments().disabledReadFilters) {
+            ReadFilter filterToDisable = this.getFilterManager().createByName(filterName);
+            if (! (filterToDisable instanceof DisableableReadFilter))
+                throw new IllegalStateException(filterToDisable + " cannot be disabled");
+
+            // so we're not trying to modify the list we're iterating over
+            List<ReadFilter> filtersCopy = new ArrayList<>(filters);
+            for (ReadFilter filter : filtersCopy) {
+                if (filter.getClass() == filterToDisable.getClass())
+                    filters.remove(filter);
+            }
+        }
+
+        return Collections.unmodifiableList(filters);
+    }
+
+    /**
+     * Returns a list of active, initialized read transformers
+     *
+     * @param walker the walker we need to apply read transformers too
+     */
+    public void initializeReadTransformers(final Walker walker) {
+        // keep a list of the active read transformers sorted based on priority ordering
+        List<ReadTransformer> activeTransformers = new ArrayList<ReadTransformer>();
+
+        final ReadTransformersMode overrideMode = WalkerManager.getWalkerAnnotation(walker, ReadTransformersMode.class);
+        final ReadTransformer.ApplicationTime overrideTime = overrideMode != null ? overrideMode.ApplicationTime() : null;
+
+        final PluginManager<ReadTransformer> pluginManager = new PluginManager<ReadTransformer>(ReadTransformer.class);
+
+        for ( final ReadTransformer transformer : pluginManager.createAllTypes() ) {
+            transformer.initialize(overrideTime, this, walker);
+            if ( transformer.enabled() )
+                activeTransformers.add(transformer);
+        }
+
+        setReadTransformers(activeTransformers);
+    }
+
+    public List<ReadTransformer> getReadTransformers() {
+        return readTransformers;
+    }
+
+    /*
+     * Sanity checks that incompatible read transformers are not active together (and throws an exception if they are).
+     *
+     * @param readTransformers   the active read transformers
+     */
+    protected void checkActiveReadTransformers(final List<ReadTransformer> readTransformers) {
+        if ( readTransformers == null )
+            throw new IllegalArgumentException("read transformers cannot be null");
+
+        ReadTransformer sawMustBeFirst = null;
+        ReadTransformer sawMustBeLast  = null;
+
+        for ( final ReadTransformer r : readTransformers ) {
+            if ( r.getOrderingConstraint() == ReadTransformer.OrderingConstraint.MUST_BE_FIRST ) {
+                if ( sawMustBeFirst != null )
+                    throw new UserException.IncompatibleReadFiltersException(sawMustBeFirst.toString(), r.toString());
+                sawMustBeFirst = r;
+            } else if ( r.getOrderingConstraint() == ReadTransformer.OrderingConstraint.MUST_BE_LAST ) {
+                if ( sawMustBeLast != null )
+                    throw new UserException.IncompatibleReadFiltersException(sawMustBeLast.toString(), r.toString());
+                sawMustBeLast = r;
+            }
+        }
+    }
+
+    protected void setReadTransformers(final List<ReadTransformer> readTransformers) {
+        if ( readTransformers == null )
+            throw new ReviewedGATKException("read transformers cannot be null");
+
+        // sort them in priority order
+        Collections.sort(readTransformers, new ReadTransformer.ReadTransformerComparator());
+
+        // make sure we don't have an invalid set of active read transformers
+        checkActiveReadTransformers(readTransformers);
+
+        this.readTransformers = readTransformers;
+    }
+
+    /**
+     * Parse out the thread allocation from the given command-line argument.
+     */
+    private void determineThreadAllocation() {
+        if ( argCollection.numberOfDataThreads < 1 ) throw new UserException.BadArgumentValue("num_threads", "cannot be less than 1, but saw " + argCollection.numberOfDataThreads);
+        if ( argCollection.numberOfCPUThreadsPerDataThread < 1 ) throw new UserException.BadArgumentValue("num_cpu_threads", "cannot be less than 1, but saw " + argCollection.numberOfCPUThreadsPerDataThread);
+        if ( argCollection.numberOfIOThreads < 0 ) throw new UserException.BadArgumentValue("num_io_threads", "cannot be less than 0, but saw " + argCollection.numberOfIOThreads);
+
+        this.threadAllocation = new ThreadAllocation(argCollection.numberOfDataThreads,
+                argCollection.numberOfCPUThreadsPerDataThread,
+                argCollection.numberOfIOThreads,
+                argCollection.monitorThreadEfficiency);
+    }
+
+    public int getTotalNumberOfThreads() {
+        return this.threadAllocation == null ? 1 : threadAllocation.getTotalNumThreads();
+    }
+
+    /**
+     * Allow subclasses and others within this package direct access to the walker manager.
+     * @return The walker manager used by this package.
+     */
+    protected WalkerManager getWalkerManager() {
+        return walkerManager;
+    }
+    
+    /**
+     * setup a microscheduler
+     *
+     * @return a new microscheduler
+     */
+    private MicroScheduler createMicroscheduler() {
+        // Temporarily require all walkers to have a reference, even if that reference is not conceptually necessary.
+        if ((walker instanceof ReadWalker || walker instanceof DuplicateWalker || walker instanceof ReadPairWalker) &&
+                this.getArguments().referenceFile == null) {
+            throw new UserException.CommandLineException("Read-based traversals require a reference file but none was given");
+        }
+
+        return MicroScheduler.create(this,walker,this.getReadsDataSource(),this.getReferenceDataSource().getReference(),this.getRodDataSources(),threadAllocation);
+    }
+
+    protected DownsamplingMethod getDownsamplingMethod() {
+        GATKArgumentCollection argCollection = this.getArguments();
+
+        DownsamplingMethod commandLineMethod = argCollection.getDownsamplingMethod();
+        DownsamplingMethod walkerMethod = WalkerManager.getDownsamplingMethod(walker);
+
+        DownsamplingMethod method = commandLineMethod != null ? commandLineMethod : walkerMethod;
+        checkCompatibilityWithWalker(method, walker);
+        return method;
+    }
+
+    private static void checkCompatibilityWithWalker( DownsamplingMethod method, Walker walker ) {
+        // Refactored from DownsamplingMethod
+        final DownsampleType type = method.type;
+        final Integer toCoverage = method.toCoverage;
+        final boolean isLocusTraversal = walker instanceof LocusWalker || walker instanceof ActiveRegionWalker;
+
+        if ( isLocusTraversal && type == DownsampleType.ALL_READS && toCoverage != null ) {
+            throw new UserException("Downsampling to coverage with the ALL_READS method for locus-based traversals (eg., LocusWalkers) is not currently supported (though it is supported for ReadWalkers).");
+        }
+
+        // For locus traversals, ensure that the dcov value (if present) is not problematically low
+        if ( isLocusTraversal && type != DownsampleType.NONE && toCoverage != null &&
+                toCoverage < DownsamplingMethod.MINIMUM_SAFE_COVERAGE_TARGET_FOR_LOCUS_BASED_TRAVERSALS ) {
+            throw new UserException(String.format("Locus-based traversals (ie., Locus and ActiveRegion walkers) require " +
+                            "a minimum -dcov value of %d when downsampling to coverage. Values less " +
+                            "than this can produce problematic downsampling artifacts while providing " +
+                            "only insignificant improvements in memory usage in most cases.",
+                    DownsamplingMethod.MINIMUM_SAFE_COVERAGE_TARGET_FOR_LOCUS_BASED_TRAVERSALS));
+        }
+    }
+
+    protected void setDownsamplingMethod(DownsamplingMethod method) {
+        argCollection.setDownsamplingMethod(method);
+    }
+
+    protected boolean includeReadsWithDeletionAtLoci() {
+        return walker.includeReadsWithDeletionAtLoci();
+    }
+
+    /**
+     * Verifies that the supplied set of reads files mesh with what the walker says it requires;
+     * also makes sure that list of SAM files specified on the command line is not empty and contains
+     * no duplicates.
+     */
+    protected void validateSuppliedReads() {
+        GATKArgumentCollection arguments = this.getArguments();
+        final Boolean samFilesArePresent = (arguments.samFiles != null && !arguments.samFiles.isEmpty());
+
+        // Check what the walker says is required against what was provided on the command line.
+        if (WalkerManager.isRequired(walker, DataSource.READS) && !samFilesArePresent)
+            throw new ArgumentException("Walker requires reads but none were provided.");
+
+        // Check what the walker says is allowed against what was provided on the command line.
+        if (samFilesArePresent && !WalkerManager.isAllowed(walker, DataSource.READS))
+            throw new ArgumentException("Walker does not allow reads but reads were provided.");
+
+        //Make sure SAM list specified by the user (if necessary) is not empty
+        if(WalkerManager.isRequired(walker, DataSource.READS) && samFilesArePresent && samReaderIDs.isEmpty() ) {
+            throw new UserException("The list of input files does not contain any BAM files.");
+        }
+
+        // Make sure no SAM files were specified multiple times by the user.
+        checkForDuplicateSamFiles();
+    }
+
+    /**
+     * Check that active region walkers do not use the downsampling to coverage argument
+     *
+     * @throws UserException if an active region walker is using the -dcov or --downsample_to_coverage downsampling arguments
+     */
+    private void checkDownSamplingToCoverage() {
+        if (argCollection.downsampleCoverage != null && walker instanceof ActiveRegionWalker) {
+            throw new UserException.CommandLineException("Cannot use -dcov or --downsample_to_coverage for ActiveRegionWalkers, use another downsampling argument");
+        }
+    }
+
+    /**
+     * Checks whether there are SAM files that appear multiple times in the fully unpacked list of
+     * SAM files (samReaderIDs). If there are, throws an ArgumentException listing the files in question.
+     */
+    protected void checkForDuplicateSamFiles() {
+        Set<SAMReaderID> encounteredSamFiles = new HashSet<SAMReaderID>();
+        Set<String> duplicateSamFiles = new LinkedHashSet<String>();
+
+        for ( SAMReaderID samFile : samReaderIDs ) {
+            if ( encounteredSamFiles.contains(samFile) ) {
+                duplicateSamFiles.add(samFile.getSamFilePath());
+            }
+            else {
+                encounteredSamFiles.add(samFile);
+            }
+        }
+
+        if ( duplicateSamFiles.size() > 0 ) {
+            throw new UserException("The following BAM files appear multiple times in the list of input files: " +
+                                    duplicateSamFiles + " BAM files may be specified at most once.");
+        }
+
+    }
+
+    /**
+     * Verifies that the supplied reference file mesh with what the walker says it requires.
+     */
+    protected void validateSuppliedReference() {
+        GATKArgumentCollection arguments = this.getArguments();
+        // Check what the walker says is required against what was provided on the command line.
+        // TODO: Temporarily disabling WalkerManager.isRequired check on the reference because the reference is always required.
+        if (/*WalkerManager.isRequired(walker, DataSource.REFERENCE) &&*/ arguments.referenceFile == null)
+            throw new ArgumentException("Walker requires a reference but none was provided.");
+
+        // Check what the walker says is allowed against what was provided on the command line.
+        if (arguments.referenceFile != null && !WalkerManager.isAllowed(walker, DataSource.REFERENCE))
+            throw new ArgumentException("Walker does not allow a reference but one was provided.");
+    }
+
+    protected void validateSuppliedIntervals() {
+        // Only read walkers support '-L unmapped' intervals.  Trap and validate any other instances of -L unmapped.
+        if(!(walker instanceof ReadWalker)) {
+            GenomeLocSortedSet intervals = getIntervals();
+            if(intervals != null && getIntervals().contains(GenomeLoc.UNMAPPED))
+                throw new ArgumentException("Interval list specifies unmapped region.  Only read walkers may include the unmapped region.");
+        }
+
+        // If intervals is non-null and empty at this point, it means that the list of intervals to process
+        // was filtered down to an empty set (eg., the user specified something like -L chr1 -XL chr1). Since
+        // this was very likely unintentional, the user should be informed of this. Note that this is different
+        // from the case where intervals == null, which indicates that there were no interval arguments.
+        if ( intervals != null && intervals.isEmpty() ) {
+            logger.warn("The given combination of -L and -XL options results in an empty set.  No intervals to process.");
+        }
+
+        // TODO: add a check for ActiveRegion walkers to prevent users from passing an entire contig/chromosome
+    }
+
+    /**
+     * Get the sharding strategy given a driving data source.
+     *
+     * @param readsDataSource readsDataSource
+     * @param drivingDataSource Data on which to shard.
+     * @param intervals intervals
+     * @return the sharding strategy
+     */
+    protected Iterable<Shard> getShardStrategy(SAMDataSource readsDataSource, ReferenceSequenceFile drivingDataSource, GenomeLocSortedSet intervals) {
+        ValidationExclusion exclusions = (readsDataSource != null ? readsDataSource.getReadsInfo().getValidationExclusionList() : null);
+        DownsamplingMethod downsamplingMethod = readsDataSource != null ? readsDataSource.getReadsInfo().getDownsamplingMethod() : null;
+        ReferenceDataSource referenceDataSource = this.getReferenceDataSource();
+
+        // If reads are present, assume that accessing the reads is always the dominant factor and shard based on that supposition.
+        if(!readsDataSource.isEmpty()) {
+            if(!readsDataSource.hasIndex() && !exclusions.contains(ValidationExclusion.TYPE.ALLOW_UNINDEXED_BAM))
+                throw new UserException.CommandLineException("Cannot process the provided BAM/CRAM file(s) because they were not indexed.  The GATK does offer limited processing of unindexed BAM/CRAMs in --unsafe mode, but this feature is unsupported -- use it at your own risk!");
+            if(!readsDataSource.hasIndex() && intervals != null && !argCollection.allowIntervalsWithUnindexedBAM)
+                throw new UserException.CommandLineException("Cannot perform interval processing when reads are present but no index is available.");
+
+            if(walker instanceof LocusWalker) {
+                if (readsDataSource.getSortOrder() != SAMFileHeader.SortOrder.coordinate)
+                    throw new UserException.MissortedBAM(SAMFileHeader.SortOrder.coordinate, "Locus walkers can only traverse coordinate-sorted data.  Please resort your input BAM file(s) or set the Sort Order tag in the header appropriately.");
+                if(intervals == null)
+                    return readsDataSource.createShardIteratorOverMappedReads(new LocusShardBalancer());
+                else
+                    return readsDataSource.createShardIteratorOverIntervals(intervals,new LocusShardBalancer());
+            } 
+            else if(walker instanceof ActiveRegionWalker) {
+                if (readsDataSource.getSortOrder() != SAMFileHeader.SortOrder.coordinate)
+                    throw new UserException.MissortedBAM(SAMFileHeader.SortOrder.coordinate, "Active region walkers can only traverse coordinate-sorted data.  Please resort your input BAM file(s) or set the Sort Order tag in the header appropriately.");
+                if(intervals == null)
+                    return readsDataSource.createShardIteratorOverMappedReads(new ActiveRegionShardBalancer());
+                else
+                    return readsDataSource.createShardIteratorOverIntervals(((ActiveRegionWalker)walker).extendIntervals(intervals, this.genomeLocParser, this.getReferenceDataSource().getReference()), new ActiveRegionShardBalancer());
+            } 
+            else if(walker instanceof ReadWalker || walker instanceof ReadPairWalker || walker instanceof DuplicateWalker) {
+                // Apply special validation to read pair walkers.
+                if(walker instanceof ReadPairWalker) {
+                    if(readsDataSource.getSortOrder() != SAMFileHeader.SortOrder.queryname)
+                        throw new UserException.MissortedBAM(SAMFileHeader.SortOrder.queryname, "Read pair walkers are exceptions in that they cannot be run on coordinate-sorted BAMs but instead require query name-sorted files.  You will need to resort your input BAM file in query name order to use this walker.");
+                    if(intervals != null && !intervals.isEmpty())
+                        throw new UserException.CommandLineException("Pairs traversal cannot be used in conjunction with intervals.");
+                }
+
+                if(intervals == null)
+                    return readsDataSource.createShardIteratorOverAllReads(new ReadShardBalancer());
+                else
+                    return readsDataSource.createShardIteratorOverIntervals(intervals, new ReadShardBalancer());
+            }
+            else
+                throw new ReviewedGATKException("Unable to determine walker type for walker " + walker.getClass().getName());
+        }
+        else {
+            // TODO -- Determine what the ideal shard size should be here.  Matt suggested that a multiple of 16K might work well
+            // TODO --  (because of how VCF indexes work), but my empirical experience has been simply that the larger the shard
+            // TODO --  size the more efficient the traversal (at least for RODWalkers).  Keeping the previous values for now.  [EB]
+            final int SHARD_SIZE = walker instanceof RodWalker ? 1000000 : 100000;
+            if(intervals == null)
+                return referenceDataSource.createShardsOverEntireReference(readsDataSource,genomeLocParser,SHARD_SIZE);
+            else
+                return referenceDataSource.createShardsOverIntervals(readsDataSource,intervals,SHARD_SIZE);
+        }
+    }
+
+    protected boolean flashbackData() {
+        return walker instanceof ReadWalker;
+    }
+
+    /**
+     * Create the temp directory if it doesn't exist.
+     */
+    private void initializeTempDirectory() {
+        File tempDir = new File(System.getProperty("java.io.tmpdir"));
+        if (!tempDir.exists() && !tempDir.mkdirs())
+            throw new UserException.BadTmpDir("Unable to create directory");
+    }
+
+    /**
+     * Initialize the output streams as specified by the user.
+     *
+     * @param outputTracker the tracker supplying the initialization data.
+     */
+    private void initializeOutputStreams(final OutputTracker outputTracker) {
+        for (final Map.Entry<ArgumentSource, Object> input : getInputs().entrySet()) {
+            setReferenceFile(input.getValue());
+            outputTracker.addInput(input.getKey(), input.getValue());
+        }
+        for (final Stub<?> stub : getOutputs()) {
+            setReferenceFile(stub);
+            stub.processArguments(argCollection);
+            outputTracker.addOutput(stub);
+        }
+
+        outputTracker.prepareWalker(walker, getArguments().strictnessLevel);
+    }
+
+    private void setReferenceFile(final Object object) {
+        if (object instanceof ReferenceBacked) {
+            ((ReferenceBacked)object).setReferenceFile(argCollection.referenceFile);
+        }
+    }
+
+    public ReferenceDataSource getReferenceDataSource() {
+        return referenceDataSource;
+    }
+
+    public GenomeLocParser getGenomeLocParser() {
+        return genomeLocParser;
+    }
+
+    /**
+     * Manage lists of filters.
+     */
+    private final FilterManager filterManager = new FilterManager();
+
+    private Date startTime = null; // the start time for execution
+
+    public void setParser(ParsingEngine parsingEngine) {
+        this.parsingEngine = parsingEngine;
+    }
+
+    /**
+     * Explicitly set the GenomeLocParser, for unit testing.
+     * @param genomeLocParser GenomeLocParser to use.
+     */
+    public void setGenomeLocParser(GenomeLocParser genomeLocParser) {
+        this.genomeLocParser = genomeLocParser;
+    }
+
+    /**
+     * Sets the start time when the execute() function was last called
+     * @param startTime the start time when the execute() function was last called
+     */
+    protected void setStartTime(Date startTime) {
+        this.startTime = startTime;
+    }
+
+    /**
+     * @return the start time when the execute() function was last called
+     */
+    public Date getStartTime() {
+        return startTime;
+    }
+
+    /**
+     * Setup the intervals to be processed
+     */
+    protected void initializeIntervals() {
+        intervals = IntervalUtils.parseIntervalArguments(this.referenceDataSource.getReference(), argCollection.intervalArguments);
+    }
+
+    /**
+     * Add additional, externally managed IO streams for inputs.
+     *
+     * @param argumentSource Field into which to inject the value.
+     * @param value          Instance to inject.
+     */
+    public void addInput(ArgumentSource argumentSource, Object value) {
+        inputs.put(argumentSource, value);
+    }
+
+    /**
+     * Add additional, externally managed IO streams for output.
+     *
+     * @param stub Instance to inject.
+     */
+    public void addOutput(Stub<?> stub) {
+        outputs.add(stub);
+    }
+
+    /**
+     * Returns the tag associated with a given command-line argument.
+     * @param key Object for which to inspect the tag.
+     * @return Tags object associated with the given key, or an empty Tag structure if none are present. 
+     */
+    public Tags getTags(Object key)  {
+        return parsingEngine.getTags(key);
+    }
+
+    protected void initializeDataSources() {
+        logger.info("Strictness is " + argCollection.strictnessLevel);
+
+        validateSuppliedReference();
+        setReferenceDataSource(argCollection.referenceFile);
+
+        validateSuppliedReads();
+        initializeReadTransformers(walker);
+
+        final Map<String, String> sampleRenameMap = argCollection.sampleRenameMappingFile != null ?
+                                                    loadSampleRenameMap(argCollection.sampleRenameMappingFile) :
+                                                    null;
+
+        readsDataSource = createReadsDataSource(argCollection,genomeLocParser,referenceDataSource.getReference(), sampleRenameMap);
+
+        for (ReadFilter filter : filters)
+            filter.initialize(this);
+
+        // set the sequence dictionary of all of Tribble tracks to the sequence dictionary of our reference
+        rodDataSources = getReferenceOrderedDataSources(referenceMetaDataFiles,referenceDataSource.getReference().getSequenceDictionary(),
+                                                        genomeLocParser,argCollection.unsafe,sampleRenameMap);
+    }
+
+    /**
+     * Purely for testing purposes.  Do not use unless you absolutely positively know what you are doing (or
+     * need to absolutely positively kill everyone in the room)
+     * @param dataSource
+     */
+    public void setReadsDataSource(final SAMDataSource dataSource) {
+        this.readsDataSource = dataSource;
+    }
+
+    /**
+     * Entry-point function to initialize the samples database from input data and pedigree arguments
+     */
+    private void initializeSampleDB() {
+        SampleDBBuilder sampleDBBuilder = new SampleDBBuilder(this, argCollection.pedigreeValidationType);
+        sampleDBBuilder.addSamplesFromSAMHeader(getSAMFileHeader());
+        sampleDBBuilder.addSamplesFromSampleNames(SampleUtils.getUniqueSamplesFromRods(this));
+        sampleDBBuilder.addSamplesFromPedigreeFiles(argCollection.pedigreeFiles);
+        sampleDBBuilder.addSamplesFromPedigreeStrings(argCollection.pedigreeStrings);
+        sampleDB = sampleDBBuilder.getFinalSampleDB();
+    }
+
+    /**
+     * Gets a unique identifier for the reader sourcing this read.
+     * @param read Read to examine.
+     * @return A unique identifier for the source file of this read.  Exception if not found.
+     */
+    public SAMReaderID getReaderIDForRead(final SAMRecord read) {
+        return getReadsDataSource().getReaderID(read);
+    }
+
+    /**
+     * Gets the source file for this read.
+     * @param id Unique identifier determining which input file to use.
+     * @return The source filename for this read.
+     */
+    public File getSourceFileForReaderID(final SAMReaderID id) {
+        return getReadsDataSource().getSAMFile(id);
+    }
+
+    /**
+     * Now that all files are open, validate the sequence dictionaries of the reads vs. the reference vrs the reference ordered data (if available).
+     *
+     * @param reads     Reads data source.
+     * @param reference Reference data source.
+     * @param rods    a collection of the reference ordered data tracks
+     */
+    private void validateDataSourcesAgainstReference(SAMDataSource reads, ReferenceSequenceFile reference, Collection<ReferenceOrderedDataSource> rods) {
+        if ((reads.isEmpty() && (rods == null || rods.isEmpty())) || reference == null )
+            return;
+
+        // Compile a set of sequence names that exist in the reference file.
+        SAMSequenceDictionary referenceDictionary = reference.getSequenceDictionary();
+
+        if (!reads.isEmpty()) {
+            // Compile a set of sequence names that exist in the BAM files.
+            SAMSequenceDictionary readsDictionary = reads.getHeader().getSequenceDictionary();
+
+            if (readsDictionary.isEmpty()) {
+                logger.info("Reads file is unmapped. Skipping validation against reference.");
+                return;
+            }
+
+            // compare the reads to the reference
+            SequenceDictionaryUtils.validateDictionaries(logger, getArguments().unsafe, "reads", readsDictionary,
+                                                         "reference", referenceDictionary, true, intervals);
+        }
+
+        for (ReferenceOrderedDataSource rod : rods)
+            IndexDictionaryUtils.validateTrackSequenceDictionary(rod.getName(), rod.getSequenceDictionary(), referenceDictionary, getArguments().unsafe);
+    }
+
+    /**
+     * Gets a data source for the given set of reads.
+     *
+     * @param argCollection arguments
+     * @param genomeLocParser parser
+     * @param refReader reader
+     * @return A data source for the given set of reads.
+     */
+    private SAMDataSource createReadsDataSource(final GATKArgumentCollection argCollection, final GenomeLocParser genomeLocParser,
+                                                final IndexedFastaSequenceFile refReader, final Map<String, String> sampleRenameMap) {
+        DownsamplingMethod downsamplingMethod = getDownsamplingMethod();
+
+        // Synchronize the method back into the collection so that it shows up when
+        // interrogating for the downsampling method during command line recreation.
+        setDownsamplingMethod(downsamplingMethod);
+
+        logger.info(downsamplingMethod);
+
+        if (argCollection.removeProgramRecords && argCollection.keepProgramRecords)
+            throw new UserException.BadArgumentValue("rpr / kpr", "Cannot enable both options");
+
+        boolean removeProgramRecords = argCollection.removeProgramRecords || walker.getClass().isAnnotationPresent(RemoveProgramRecords.class);
+
+        if (argCollection.keepProgramRecords)
+            removeProgramRecords = false;
+
+        final boolean keepReadsInLIBS = walker instanceof ActiveRegionWalker;
+
+        return new SAMDataSource(
+                argCollection.referenceFile,
+                samReaderIDs,
+                threadAllocation,
+                argCollection.numberOfBAMFileHandles,
+                genomeLocParser,
+                argCollection.useOriginalBaseQualities,
+                argCollection.strictnessLevel,
+                argCollection.readBufferSize,
+                downsamplingMethod,
+                new ValidationExclusion(Arrays.asList(argCollection.unsafe)),
+                filters,
+                readTransformers,
+                includeReadsWithDeletionAtLoci(),
+                argCollection.defaultBaseQualities,
+                removeProgramRecords,
+                keepReadsInLIBS,
+                sampleRenameMap,
+                argCollection.intervalArguments.intervalMerging);
+    }
+
+    /**
+     * Loads a user-provided sample rename map file for use in on-the-fly sample renaming into an in-memory
+     * HashMap. This file must consist of lines with two whitespace-separated fields, the second of which
+     * may contain whitespace:
+     *
+     * absolute_path_to_file    new_sample_name
+     *
+     * The engine will verify that each file contains data from only one sample when the on-the-fly sample
+     * renaming feature is being used. Note that this feature works only with bam and vcf files.
+     *
+     * @param sampleRenameMapFile sample rename map file from which to load data
+     * @return a HashMap containing the contents of the map file, with the keys being the input file paths and
+     *         the values being the new sample names.
+     */
+    protected Map<String, String> loadSampleRenameMap( final File sampleRenameMapFile ) {
+        logger.info("Renaming samples from input files on-the-fly using mapping file " + sampleRenameMapFile.getAbsolutePath());
+
+        final Map<String, String> sampleRenameMap = new HashMap<>((int)sampleRenameMapFile.length() / 50);
+
+        try {
+            for ( final String line : new XReadLines(sampleRenameMapFile) ) {
+                final String[] tokens = line.split("\\s+", 2);
+
+                if ( tokens.length != 2 ) {
+                    throw new UserException.MalformedFile(sampleRenameMapFile,
+                                                          String.format("Encountered a line with %s fields instead of the required 2 fields. Line was: %s",
+                                                                        tokens.length, line));
+                }
+
+                final File inputFile = new File(tokens[0]);
+                final String newSampleName = tokens[1].trim();
+
+                if (newSampleName.contains(VCFConstants.FIELD_SEPARATOR)) {
+                    throw new UserException.MalformedFile(sampleRenameMapFile, String.format(
+                            "Encountered illegal sample name; sample names may not include the VCF field delimiter (%s).  Sample name: %s; line: %s",
+                            VCFConstants.FIELD_SEPARATOR,
+                            newSampleName,
+                            line
+                    ));
+                }
+
+                if ( ! inputFile.isAbsolute() ) {
+                    throw new UserException.MalformedFile(sampleRenameMapFile, "Input file path not absolute at line: " + line);
+                }
+
+                final String inputFilePath = inputFile.getAbsolutePath();
+
+                if ( sampleRenameMap.containsKey(inputFilePath) ) {
+                    throw new UserException.MalformedFile(sampleRenameMapFile,
+                                                          String.format("Input file %s appears more than once", inputFilePath));
+                }
+
+                sampleRenameMap.put(inputFilePath, newSampleName);
+            }
+        }
+        catch ( FileNotFoundException e ) {
+            throw new UserException.CouldNotReadInputFile(sampleRenameMapFile, e);
+        }
+
+        return sampleRenameMap;
+    }
+
+
+    /**
+     * Opens a reference sequence file paired with an index.  Only public for testing purposes
+     *
+     * @param refFile Handle to a reference sequence file.  Non-null.
+     */
+    public void setReferenceDataSource(File refFile) {
+        this.referenceDataSource = new ReferenceDataSource(refFile);
+        genomeLocParser = new GenomeLocParser(referenceDataSource.getReference());
+    }
+
+    /**
+     * Open the reference-ordered data sources.
+     *
+     * @param referenceMetaDataFiles collection of RMD descriptors to load and validate.
+     * @param sequenceDictionary GATK-wide sequnce dictionary to use for validation.
+     * @param genomeLocParser to use when creating and validating GenomeLocs.
+     * @param validationExclusionType potentially indicate which validations to include / exclude.
+     * @param sampleRenameMap map of file -> new sample name used when doing on-the-fly sample renaming
+     *
+     * @return A list of reference-ordered data sources.
+     */
+    private List<ReferenceOrderedDataSource> getReferenceOrderedDataSources(final Collection<RMDTriplet> referenceMetaDataFiles,
+                                                                            final SAMSequenceDictionary sequenceDictionary,
+                                                                            final GenomeLocParser genomeLocParser,
+                                                                            final ValidationExclusion.TYPE validationExclusionType,
+                                                                            final Map<String, String> sampleRenameMap) {
+        final RMDTrackBuilder builder = new RMDTrackBuilder(sequenceDictionary,genomeLocParser, validationExclusionType,
+                                                            getArguments().disableAutoIndexCreationAndLockingWhenReadingRods,
+                                                            sampleRenameMap);
+
+        final List<ReferenceOrderedDataSource> dataSources = new ArrayList<ReferenceOrderedDataSource>();
+        for (RMDTriplet fileDescriptor : referenceMetaDataFiles)
+            dataSources.add(new ReferenceOrderedDataSource(fileDescriptor,
+                                                           builder,
+                                                           sequenceDictionary,
+                                                           genomeLocParser,
+                                                           flashbackData()));
+
+        return dataSources;
+    }
+
+    /**
+     * Returns the SAM File Header from the input reads' data source file
+     * @return the SAM File Header from the input reads' data source file
+     */
+    public SAMFileHeader getSAMFileHeader() {
+        return readsDataSource.getHeader();
+    }
+
+    public boolean lenientVCFProcessing() {
+        return ValidationExclusion.lenientVCFProcessing(argCollection.unsafe);
+    }
+
+    /**
+     * Returns the unmerged SAM file header for an individual reader.
+     * @param reader The reader.
+     * @return Header for that reader or null if not available.
+     */
+    public SAMFileHeader getSAMFileHeader(SAMReaderID reader) {
+        return readsDataSource == null ? null : readsDataSource.getHeader(reader);
+    }
+
+    /**
+     * Returns an ordered list of the unmerged SAM file headers known to this engine.
+     * @return list of header for each input SAM file, in command line order
+     */
+    public List<SAMFileHeader> getSAMFileHeaders() {
+        final List<SAMFileHeader> headers = new ArrayList<SAMFileHeader>();
+        for ( final SAMReaderID id : getReadsDataSource().getReaderIDs() ) {
+            headers.add(getReadsDataSource().getHeader(id));
+        }
+        return headers;
+    }
+
+    /**
+     * Gets the master sequence dictionary for this GATK engine instance
+     * @return a never-null dictionary listing all of the contigs known to this engine instance
+     */
+    public SAMSequenceDictionary getMasterSequenceDictionary() {
+        return getReferenceDataSource().getReference().getSequenceDictionary();
+    }
+
+    /**
+     * Returns data source object encapsulating all essential info and handlers used to traverse
+     * reads; header merger, individual file readers etc can be accessed through the returned data source object.
+     *
+     * @return the reads data source
+     */
+    public SAMDataSource getReadsDataSource() {
+        return this.readsDataSource;
+    }
+
+    /**
+     * Sets the collection of GATK main application arguments.
+     *
+     * @param argCollection the GATK argument collection
+     */
+    public void setArguments(GATKArgumentCollection argCollection) {
+        this.argCollection = argCollection;
+    }
+
+    /**
+     * Gets the collection of GATK main application arguments.
+     *
+     * @return the GATK argument collection
+     */
+    public GATKArgumentCollection getArguments() {
+        return this.argCollection;
+    }
+
+    /**
+     * Get the list of intervals passed to the engine.
+     * @return List of intervals, or null if no intervals are in use
+     */
+    public GenomeLocSortedSet getIntervals() {
+        return this.intervals;
+    }
+
+    /**
+     * Get the list of regions of the genome being processed.  If the user
+     * requested specific intervals, return those, otherwise return regions
+     * corresponding to the entire genome.  Never returns null.
+     *
+     * @return a non-null set of intervals being processed
+     */
+    @Ensures("result != null")
+    public GenomeLocSortedSet getRegionsOfGenomeBeingProcessed() {
+        if ( getIntervals() == null )
+            // if we don't have any intervals defined, create intervals from the reference itself
+            return GenomeLocSortedSet.createSetFromSequenceDictionary(getReferenceDataSource().getReference().getSequenceDictionary());
+        else
+            return getIntervals();
+    }
+
+    /**
+     * Gets the list of filters employed by this engine.
+     * @return Collection of filters (actual instances) used by this engine.
+     */
+    public Collection<ReadFilter> getFilters() {
+        return this.filters;
+    }
+
+    /**
+     * Sets the list of filters employed by this engine.
+     * @param filters Collection of filters (actual instances) used by this engine.
+     */
+    public void setFilters(Collection<ReadFilter> filters) {
+        this.filters = filters;
+    }
+
+    /**
+     * Gets the filter manager for this engine.
+     * @return filter manager for this engine.
+     */
+    protected FilterManager getFilterManager() {
+        return filterManager;
+    }
+
+    /**
+     * Gets the input sources for this engine.
+     * @return input sources for this engine.
+     */
+    protected Map<ArgumentSource, Object> getInputs() {
+        return inputs;
+    }
+
+    /**
+     * Gets the output stubs for this engine.
+     * @return output stubs for this engine.
+     */
+    protected Collection<Stub<?>> getOutputs() {
+        return outputs;
+    }
+
+    /**
+     * Returns data source objects encapsulating all rod data;
+     * individual rods can be accessed through the returned data source objects.
+     *
+     * @return the rods data sources, never {@code null}.
+     */
+    public List<ReferenceOrderedDataSource> getRodDataSources() {
+        return this.rodDataSources;
+    }
+
+    /**
+     * Gets cumulative metrics about the entire run to this point.
+     * Returns a clone of this snapshot in time.
+     * @return cumulative metrics about the entire run at this point.  ReadMetrics object is a unique instance and is
+     *         owned by the caller; the caller can do with the object what they wish.
+     */
+    public ReadMetrics getCumulativeMetrics() {
+        // todo -- probably shouldn't be lazy
+        if ( cumulativeMetrics == null )
+            cumulativeMetrics = readsDataSource == null ? new ReadMetrics() : readsDataSource.getCumulativeReadMetrics();
+        return cumulativeMetrics;
+    }
+
+    /**
+     * Return the global ThreadEfficiencyMonitor, if there is one
+     *
+     * @return the monitor, or null if none is active
+     */
+    public ThreadEfficiencyMonitor getThreadEfficiencyMonitor() {
+        return threadEfficiencyMonitor;
+    }
+
+    // -------------------------------------------------------------------------------------
+    //
+    // code for working with Samples database
+    //
+    // -------------------------------------------------------------------------------------
+
+    public SampleDB getSampleDB() {
+        return this.sampleDB;
+    }
+
+    public Map<String,String> getApproximateCommandLineArguments(Object... argumentProviders) {
+        return CommandLineUtils.getApproximateCommandLineArguments(parsingEngine,argumentProviders);
+    }
+
+    public String createApproximateCommandLineArgumentString(Object... argumentProviders) {
+        return CommandLineUtils.createApproximateCommandLineArgumentString(parsingEngine,argumentProviders);
+    }
+
+    // -------------------------------------------------------------------------------------
+    //
+    // code for working with progress meter
+    //
+    // -------------------------------------------------------------------------------------
+
+    /**
+     * Register the global progress meter with this engine
+     *
+     * Calling this function more than once will result in an IllegalStateException
+     *
+     * @param meter a non-null progress meter
+     */
+    public void registerProgressMeter(final ProgressMeter meter) {
+        if ( meter == null ) throw new IllegalArgumentException("Meter cannot be null");
+        if ( progressMeter != null ) throw new IllegalStateException("Progress meter already set");
+
+        progressMeter = meter;
+    }
+
+    /**
+     * Get the progress meter being used by this engine.  May be null if no meter has been registered yet
+     * @return a potentially null pointer to the progress meter
+     */
+    public ProgressMeter getProgressMeter() {
+        return progressMeter;
+    }
+
+    /**
+     * Does the current runtime in unit exceed the runtime limit, if one has been provided?
+     *
+     * @return false if not limit was requested or if runtime <= the limit, true otherwise
+     */
+    public boolean exceedsRuntimeLimit() {
+        if ( progressMeter == null )
+            // not yet initialized or not set because of testing
+            return false;
+
+        if ( getArguments().maxRuntime == NO_RUNTIME_LIMIT )
+            return false;
+        else {  
+            final long runtime = progressMeter.getRuntimeInNanosecondsUpdatedPeriodically();
+            if ( runtime < 0 ) throw new IllegalArgumentException("runtime must be >= 0 but got " + runtime);
+            final long maxRuntimeNano = getRuntimeLimitInNanoseconds();
+            return runtime > maxRuntimeNano;
+        }
+    }
+
+    /**
+     * @return the runtime limit in nanoseconds, or -1 if no limit was specified
+     */
+    public long getRuntimeLimitInNanoseconds() {
+        return runtimeLimitInNanoseconds;
+    }
+
+    /**
+     * Setup the runtime limits for this engine, updating the runtimeLimitInNanoseconds
+     * as appropriate
+     *
+     * @param args the GATKArgumentCollection to retrieve our runtime limits from
+     */
+    private void setupRuntimeLimits(final GATKArgumentCollection args) {
+        if ( args.maxRuntime == NO_RUNTIME_LIMIT )
+            runtimeLimitInNanoseconds = -1;
+        else if (args.maxRuntime < 0 )
+            throw new UserException.BadArgumentValue("maxRuntime", "must be >= 0 or == -1 (meaning no limit) but received negative value " + args.maxRuntime);
+        else {
+            runtimeLimitInNanoseconds = TimeUnit.NANOSECONDS.convert(args.maxRuntime, args.maxRuntimeUnits);
+        }
+    }
+
+    /**
+     * Returns the sample list including all samples.
+     * @return never {@code null}.
+     */
+    public SampleList getSampleList() {
+        return new IndexedSampleList(getSampleDB().getSampleNames());
+    }
+
+    /**
+     * Returns the sample list including samples in read inputs.
+     * @return never {@code null}.
+     */
+    public SampleList getReadSampleList() {
+        return new IndexedSampleList(ReadUtils.getSAMFileSamples(getSAMFileHeader()));
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/ReadMetrics.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/ReadMetrics.java
index 0f00bd6..f391b45 100644
--- a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/ReadMetrics.java
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/ReadMetrics.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/ReadProperties.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/ReadProperties.java
new file mode 100644
index 0000000..5e4a355
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/ReadProperties.java
@@ -0,0 +1,197 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine;
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.ValidationStringency;
+import org.broadinstitute.gatk.utils.ValidationExclusion;
+import org.broadinstitute.gatk.utils.sam.SAMReaderID;
+import org.broadinstitute.gatk.utils.downsampling.DownsamplingMethod;
+import org.broadinstitute.gatk.engine.filters.ReadFilter;
+import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
+
+import java.util.Collection;
+import java.util.List;
+/**
+ * User: hanna
+ * Date: May 14, 2009
+ * Time: 4:06:26 PM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * A data structure containing information about the reads data sources as well as
+ * information about how they should be downsampled, sorted, and filtered.
+ */
+public class ReadProperties {
+    private final Collection<SAMReaderID> readers;
+    private final SAMFileHeader header;
+    private final SAMFileHeader.SortOrder sortOrder;
+    private final ValidationStringency validationStringency;
+    private final DownsamplingMethod downsamplingMethod;
+    private final ValidationExclusion exclusionList;
+    private final Collection<ReadFilter> supplementalFilters;
+    private final List<ReadTransformer> readTransformers;
+    private final boolean keepUniqueReadListInLIBS;
+    private final boolean includeReadsWithDeletionAtLoci;
+    private final boolean useOriginalBaseQualities;
+    private final byte defaultBaseQualities;
+
+    /**
+     * Return true if the walker wants to see reads that contain deletions when looking at locus pileups
+     * 
+     * @return
+     */
+    public boolean includeReadsWithDeletionAtLoci() {
+        return includeReadsWithDeletionAtLoci;
+    }
+
+    public boolean keepUniqueReadListInLIBS() {
+        return keepUniqueReadListInLIBS;
+    }
+
+    /**
+     * Gets a list of the files acting as sources of reads.
+     * @return A list of files storing reads data.
+     */
+    public Collection<SAMReaderID> getSAMReaderIDs() {
+        return readers;
+    }
+
+    /**
+     * Gets the sam file header
+     * @return the sam file header
+     */
+    public SAMFileHeader getHeader() {
+        return header;
+    }
+
+    /**
+     * Gets the sort order of the reads
+     * @return the sort order of the reads
+     */
+    public SAMFileHeader.SortOrder getSortOrder() {
+        return sortOrder;
+    }
+
+    /**
+     * How strict should validation be?
+     * @return Stringency of validation.
+     */
+    public ValidationStringency getValidationStringency() {
+        return validationStringency;
+    }
+
+    /**
+     * Gets the method and parameters used when downsampling reads.
+     * @return Downsample fraction.
+     */
+    public DownsamplingMethod getDownsamplingMethod() {
+        return downsamplingMethod;
+    }
+
+    /**
+     * Return whether to 'verify' the reads as we pass through them.
+     * @return Whether to verify the reads.
+     */
+    public ValidationExclusion getValidationExclusionList() {
+        return exclusionList;
+    }
+
+    public Collection<ReadFilter> getSupplementalFilters() {
+        return supplementalFilters;
+    }
+
+
+    public List<ReadTransformer> getReadTransformers() {
+        return readTransformers;
+    }
+
+    /**
+     * Return whether to use original base qualities.
+     * @return Whether to use original base qualities.
+     */
+    public boolean useOriginalBaseQualities() {
+        return useOriginalBaseQualities;
+    }
+
+    /**
+     * @return Default base quality value to fill reads missing base quality information.
+     */
+    public byte defaultBaseQualities() {
+        return defaultBaseQualities;
+    }
+
+    /**
+     * Extract the command-line arguments having to do with reads input
+     * files and store them in an easy-to-work-with package.  Constructor
+     * is package protected.
+     * @param samFiles list of reads files.
+     * @param header sam file header.
+     * @param useOriginalBaseQualities True if original base qualities should be used.
+     * @param strictness Stringency of reads file parsing.
+     * @param downsamplingMethod Method for downsampling reads at a given locus.
+     * @param exclusionList what safety checks we're willing to let slide
+     * @param supplementalFilters additional filters to dynamically apply.
+     * @param includeReadsWithDeletionAtLoci if 'true', the base pileups sent to the walker's map() method
+     *         will explicitly list reads with deletion over the current reference base; otherwise, only observed
+     *        bases will be seen in the pileups, and the deletions will be skipped silently.
+     * @param defaultBaseQualities if the reads have incomplete quality scores, set them all to defaultBaseQuality.
+     * @param keepUniqueReadListInLIBS If true, we will tell LocusIteratorByState to track the unique reads it sees
+     *                                 This is really useful for ActiveRegionTraversals
+     */
+    public ReadProperties( Collection<SAMReaderID> samFiles,
+           SAMFileHeader header,
+           SAMFileHeader.SortOrder sortOrder,
+           boolean useOriginalBaseQualities,
+           ValidationStringency strictness,
+           DownsamplingMethod downsamplingMethod,
+           ValidationExclusion exclusionList,
+           Collection<ReadFilter> supplementalFilters,
+           List<ReadTransformer> readTransformers,
+           boolean includeReadsWithDeletionAtLoci,
+           byte defaultBaseQualities,
+           final boolean keepUniqueReadListInLIBS) {
+        this.readers = samFiles;
+        this.header = header;
+        this.sortOrder = sortOrder;
+        this.validationStringency = strictness;
+        this.downsamplingMethod = downsamplingMethod == null ? DownsamplingMethod.NONE : downsamplingMethod;
+        this.exclusionList = exclusionList == null ? new ValidationExclusion() : exclusionList;
+        this.supplementalFilters = supplementalFilters;
+        this.readTransformers = readTransformers;
+        this.includeReadsWithDeletionAtLoci = includeReadsWithDeletionAtLoci;
+        this.useOriginalBaseQualities = useOriginalBaseQualities;
+        this.defaultBaseQualities = defaultBaseQualities;
+        this.keepUniqueReadListInLIBS = keepUniqueReadListInLIBS;
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/SampleUtils.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/SampleUtils.java
new file mode 100644
index 0000000..838a4ae
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/SampleUtils.java
@@ -0,0 +1,258 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine;
+
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
+import org.broadinstitute.gatk.utils.collections.Pair;
+import htsjdk.variant.vcf.VCFHeader;
+import org.broadinstitute.gatk.utils.text.ListFileUtils;
+import org.broadinstitute.gatk.utils.text.XReadLines;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.util.*;
+
+
+/**
+ * SampleUtils is a static class (no instantiation allowed!) with some utility methods for getting samples
+ * quality scores.
+ *
+ * @author ebanks
+ */
+public class SampleUtils {
+    /**
+     * Private constructor.  No instantiating this class!
+     */
+    private SampleUtils() {}
+
+    /**
+     * Gets all of the unique sample names from all VCF rods input by the user
+     *
+     * @param toolkit    GATK engine
+     *
+     * @return the set of unique samples
+     */
+    public static Set<String> getUniqueSamplesFromRods(GenomeAnalysisEngine toolkit) {
+        return getUniqueSamplesFromRods(toolkit, null);
+    }
+
+    /**
+     * Gets all of the unique sample names from the set of provided VCF rod names input by the user
+     *
+     * @param toolkit    GATK engine
+     * @param rodNames   list of rods to use; if null, uses all VCF rods
+     *
+     * @return the set of unique samples
+     */
+    public static Set<String> getUniqueSamplesFromRods(GenomeAnalysisEngine toolkit, Collection<String> rodNames) {
+        Set<String> samples = new LinkedHashSet<>();
+
+        for ( VCFHeader header : GATKVCFUtils.getVCFHeadersFromRods(toolkit, rodNames).values() )
+            samples.addAll(header.getGenotypeSamples());
+
+        return samples;
+    }
+
+    public static Set<String> getRodNamesWithVCFHeader(GenomeAnalysisEngine toolkit, Collection<String> rodNames) {
+        return GATKVCFUtils.getVCFHeadersFromRods(toolkit, rodNames).keySet();
+    }
+
+    public static Set<String> getSampleListWithVCFHeader(GenomeAnalysisEngine toolkit, Collection<String> rodNames) {
+        return getSampleList(GATKVCFUtils.getVCFHeadersFromRods(toolkit, rodNames));
+    }
+
+    public static Set<String> getSampleList(Map<String, VCFHeader> headers) {
+        return getSampleList(headers, GATKVariantContextUtils.GenotypeMergeType.PRIORITIZE);
+    }
+
+    public static Set<String> getSampleList(Map<String, VCFHeader> headers, GATKVariantContextUtils.GenotypeMergeType mergeOption) {
+        Set<String> samples = new TreeSet<String>();
+        for ( Map.Entry<String, VCFHeader> val : headers.entrySet() ) {
+            VCFHeader header = val.getValue();
+            for ( String sample : header.getGenotypeSamples() ) {
+                samples.add(GATKVariantContextUtils.mergedSampleName(val.getKey(), sample, mergeOption == GATKVariantContextUtils.GenotypeMergeType.UNIQUIFY));
+            }
+        }
+
+        return samples;
+    }
+
+
+    /**
+     *
+     * @param VCF_Headers
+     * @return false if there are names duplication between the samples names in the VCF headers
+     */
+    public static boolean verifyUniqueSamplesNames(Map<String, VCFHeader> VCF_Headers) {
+        Set<String> samples = new HashSet<String>();
+        for ( Map.Entry<String, VCFHeader> val : VCF_Headers.entrySet() ) {
+            VCFHeader header = val.getValue();
+            for ( String sample : header.getGenotypeSamples() ) {
+                if (samples.contains(sample)){
+
+                    return false;
+                }
+                samples.add(sample);
+            }
+        }
+
+        return true;
+    }
+
+    /**
+     * Gets the sample names from all VCF rods input by the user and uniquifies them if there is overlap
+     * (e.g. sampleX.1, sampleX.2, ...)
+     * When finished, samples contains the uniquified sample names and rodNamesToSampleNames contains a mapping
+     * from rod/sample pairs to the new uniquified names
+     *
+     * @param toolkit    GATK engine
+     * @param samples    set to store the sample names
+     * @param rodNamesToSampleNames mapping of rod/sample pairs to new uniquified sample names
+     */
+    public static void getUniquifiedSamplesFromRods(GenomeAnalysisEngine toolkit, Set<String> samples, Map<Pair<String, String>, String> rodNamesToSampleNames) {
+
+        // keep a map of sample name to occurrences encountered
+        HashMap<String, Integer> sampleOverlapMap = new HashMap<String, Integer>();
+
+        // iterate to get all of the sample names
+
+        for ( Map.Entry<String, VCFHeader> pair : GATKVCFUtils.getVCFHeadersFromRods(toolkit).entrySet() ) {
+            for ( String sample : pair.getValue().getGenotypeSamples() )
+                addUniqueSample(samples, sampleOverlapMap, rodNamesToSampleNames, sample, pair.getKey());
+        }
+    }
+
+    private static void addUniqueSample(Set<String> samples, Map<String, Integer> sampleOverlapMap, Map<Pair<String, String>, String> rodNamesToSampleNames, String newSample, String rodName) {
+
+        // how many occurrences have we seen so far?
+        Integer occurrences = sampleOverlapMap.get(newSample);
+
+        // if this is the first one, just add it to the list of samples
+        if ( occurrences == null ) {
+            samples.add(newSample);
+            rodNamesToSampleNames.put(new Pair<String, String>(rodName, newSample), newSample);
+            sampleOverlapMap.put(newSample, 1);
+        }
+
+        // if it's already been seen multiple times, give it a unique suffix and increment the value
+        else if ( occurrences >= 2 ) {
+            String uniqueName = newSample + "." + rodName;
+            samples.add(uniqueName);
+            rodNamesToSampleNames.put(new Pair<String, String>(rodName, newSample), uniqueName);
+            sampleOverlapMap.put(newSample, occurrences + 1);
+        }
+
+        // if this is the second occurrence of the sample name, uniquify both of them
+        else { // occurrences == 2
+
+            // remove the 1st occurrence, uniquify it, and add it back
+            samples.remove(newSample);
+            String uniqueName1 = null;
+            for ( Map.Entry<Pair<String, String>, String> entry : rodNamesToSampleNames.entrySet() ) {
+                if ( entry.getValue().equals(newSample) ) {
+                    uniqueName1 = newSample + "." + entry.getKey().first;
+                    entry.setValue(uniqueName1);
+                    break;
+                }
+            }
+            samples.add(uniqueName1);
+
+            // add the second one
+            String uniqueName2 = newSample + "." + rodName;
+            samples.add(uniqueName2);
+            rodNamesToSampleNames.put(new Pair<String, String>(rodName, newSample), uniqueName2);
+
+            sampleOverlapMap.put(newSample, 2);
+        }
+
+    }
+
+    /**
+     * Returns a new set of samples, containing a final list of samples expanded from sampleArgs
+     *
+     * Each element E of sampleArgs can either be a literal sample name or a file.  For each E,
+     * we try to read a file named E from disk, and if possible all lines from that file are expanded
+     * into unique sample names.
+     *
+     * @param sampleArgs args
+     * @return samples
+     */
+    public static Set<String> getSamplesFromCommandLineInput(Collection<String> sampleArgs) {
+        if (sampleArgs != null) {
+            return ListFileUtils.unpackSet(sampleArgs);
+        }
+
+        return new HashSet<String>();
+    }
+
+    public static Set<String> getSamplesFromCommandLineInput(Collection<String> vcfSamples, Collection<String> sampleExpressions) {
+        Set<String> samples = ListFileUtils.unpackSet(vcfSamples);
+        if (sampleExpressions == null) {
+            return samples;
+        } else {
+            return ListFileUtils.includeMatching(samples, sampleExpressions, false);
+        }
+    }
+
+    /**
+     * Given a collection of samples and a collection of regular expressions, generates the set of samples that match each expression
+     * @param originalSamples list of samples to select samples from
+     * @param sampleExpressions list of expressions to use for matching samples
+     * @return the set of samples from originalSamples that satisfy at least one of the expressions in sampleExpressions
+     */
+    public static Collection<String> matchSamplesExpressions (Collection<String> originalSamples, Collection<String> sampleExpressions) {
+        // Now, check the expressions that weren't used in the previous step, and use them as if they're regular expressions
+        Set<String> samples = new HashSet<String>();
+        if (sampleExpressions != null) {
+            samples.addAll(ListFileUtils.includeMatching(originalSamples, sampleExpressions, false));
+        }
+        return samples;
+    }
+
+    /**
+     * Given a list of files with sample names it reads all files and creates a list of unique samples from all these files.
+     * @param files list of files with sample names in
+     * @return a collection of unique samples from all files
+     */
+    public static Collection<String> getSamplesFromFiles (Collection<File> files) {
+        Set<String> samplesFromFiles = new HashSet<String>();
+        if (files != null) {
+            for (File file : files) {
+                try {
+                    XReadLines reader = new XReadLines(file);
+                    List<String> lines = reader.readLines();
+                    for (String line : lines) {
+                        samplesFromFiles.add(line);
+                    }
+                } catch (FileNotFoundException e) {
+                    throw new UserException.CouldNotReadInputFile(file, e);
+                }
+            }
+        }
+        return samplesFromFiles;
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/WalkerManager.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/WalkerManager.java
new file mode 100644
index 0000000..c0008c3
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/WalkerManager.java
@@ -0,0 +1,451 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine;
+
+import org.broadinstitute.gatk.engine.filters.DisableableReadFilter;
+import org.broadinstitute.gatk.engine.walkers.*;
+import org.broadinstitute.gatk.utils.commandline.Hidden;
+import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
+import org.broadinstitute.gatk.utils.downsampling.DownsampleType;
+import org.broadinstitute.gatk.utils.downsampling.DownsamplingMethod;
+import org.broadinstitute.gatk.engine.filters.FilterManager;
+import org.broadinstitute.gatk.engine.filters.ReadFilter;
+import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
+import org.broadinstitute.gatk.utils.classloader.PluginManager;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.help.ResourceBundleExtractorDoclet;
+import org.broadinstitute.gatk.utils.text.TextFormattingUtils;
+
+import java.lang.annotation.Annotation;
+import java.util.*;
+
+/**
+ * Plugin manager that also provides various utilities for inspecting Walkers.
+ */
+public class WalkerManager extends PluginManager<Walker> {
+
+    /**
+     * A collection of help text for walkers and their enclosing packages.
+     */
+    private ResourceBundle helpText;
+
+    public WalkerManager() {
+        super(Walker.class,"walker","");
+        helpText = TextFormattingUtils.GATK_RESOURCE_BUNDLE;
+    }
+
+    /**
+     * Get the list of walkers currently available to the GATK, organized
+     * by package.
+     * @param visibleWalkersOnly If true, return only the walker names that aren't hidden.
+     * @return Names of currently available walkers.
+     */
+    public Map<String,Collection<Class<? extends Walker>>> getWalkerNamesByPackage(boolean visibleWalkersOnly) {
+        Map<String,Collection<Class<? extends Walker>>> walkersByPackage = new HashMap<String,Collection<Class<? extends Walker>>>();
+        for(Class<? extends Walker> walker: getPlugins()) {
+            if(visibleWalkersOnly && isHidden(walker))
+                continue;
+
+            // Extract the name for the package; if the walker is in the unnamed package, use the empty string
+            String walkerPackage = walker.getPackage() != null ? walker.getPackage().getName() : "";
+            if(!walkersByPackage.containsKey(walkerPackage))
+                walkersByPackage.put(walkerPackage,new ArrayList<Class<? extends Walker>>());
+            walkersByPackage.get(walkerPackage).add(walker);
+        }
+        return Collections.unmodifiableMap(walkersByPackage);
+    }
+
+    /**
+     * Gets the display name for a given package.
+     * @param packageName Fully qualified package name.
+     * @return A suitable display name for the package.
+     */
+    public String getPackageDisplayName(String packageName) {
+        // ...try to compute the override from the text of the package name, while accounting for
+        // unpackaged walkers.
+        String displayName = packageName.substring(packageName.lastIndexOf('.')+1);
+        if (displayName.trim().equals("")) displayName = "<unpackaged>";
+        return displayName;
+    }
+
+    /**
+     * Gets the help text associated with a given package name.
+     * @param packageName Package for which to search for help text.
+     * @return Package help text, or "" if none exists.
+     */
+    public String getPackageSummaryText(String packageName) {
+        String key = String.format("%s.%s",packageName, ResourceBundleExtractorDoclet.SUMMARY_TAGLET_NAME);
+        if(!helpText.containsKey(key))
+            return "";
+        return helpText.getString(key);
+    }
+
+    /**
+     * Gets the summary help text associated with a given walker type.
+     * @param walkerType Type of walker for which to search for help text.
+     * @return Walker summary description, or "" if none exists.
+     */
+    public String getWalkerSummaryText(Class<? extends Walker> walkerType) {
+        String walkerSummary = String.format("%s.%s",walkerType.getName(), ResourceBundleExtractorDoclet.SUMMARY_TAGLET_NAME);
+        if(!helpText.containsKey(walkerSummary))
+            return "";
+        return helpText.getString(walkerSummary);
+    }
+
+    /**
+     * Gets the summary help text associated with a given walker type.
+     * @param walker Walker for which to search for help text.
+     * @return Walker summary description, or "" if none exists.
+     */
+    public String getWalkerSummaryText(Walker walker) {
+        return getWalkerSummaryText(walker.getClass());
+    }
+
+    /**
+     * Gets the descriptive help text associated with a given walker type.
+     * @param walkerType Type of walker for which to search for help text.
+     * @return Walker full description, or "" if none exists.
+     */
+    public String getWalkerDescriptionText(Class<? extends Walker> walkerType) {
+        String walkerDescription = String.format("%s.%s",walkerType.getName(), ResourceBundleExtractorDoclet.DESCRIPTION_TAGLET_NAME);
+        if(!helpText.containsKey(walkerDescription))
+            return "";
+        return helpText.getString(walkerDescription);
+    }
+
+    /**
+     * Gets the descriptive help text associated with a given walker type.
+     * @param walker Walker for which to search for help text.
+     * @return Walker full description, or "" if none exists.
+     */
+    public String getWalkerDescriptionText(Walker walker) {
+        return getWalkerDescriptionText(walker.getClass());
+    }
+
+    /**
+     * Retrieves the walker class given a walker name.
+     * @param walkerName Name of the walker.
+     * @return Class representing the walker.
+     */
+    public Class<? extends Walker> getWalkerClassByName(String walkerName) {
+        return getPluginsByName().get(walkerName);
+    }
+
+    /**
+     * Rather than use the default exception, return a MalformedWalkerArgumentsException.
+     * @param errorMessage error message from formatErrorMessage()
+     * @return - A MalformedWalkerArgumentsException with errorMessage
+     */
+    @Override
+    protected UserException createMalformedArgumentException(final String errorMessage) {
+        return new UserException.MalformedWalkerArgumentsException(errorMessage);
+    }
+
+    /**
+     * Gets the data source for the provided walker.
+     * @param walkerClass The class of the walker.
+     * @return Which type of data source to traverse over...reads or reference?
+     */
+    public static DataSource getWalkerDataSource(Class<? extends Walker> walkerClass) {
+        By byDataSource = walkerClass.getAnnotation(By.class);
+        if( byDataSource == null )
+            throw new ReviewedGATKException("Unable to find By annotation for walker class " + walkerClass.getName());
+        return byDataSource.value();
+    }
+
+    /**
+     * Gets the data source for the provided walker.
+     * @param walker The walker.
+     * @return Which type of data source to traverse over...reads or reference?
+     */
+    public static DataSource getWalkerDataSource(Walker walker) {
+        return getWalkerDataSource(walker.getClass());
+    }
+
+    /**
+     * Get a list of RODs allowed by the walker.
+     * @param walkerClass Class of the walker to query.
+     * @return The list of allowed reference meta data.
+     */
+    public static List<RMD> getAllowsMetaData(Class<? extends Walker> walkerClass) {
+        return Collections.<RMD>emptyList();
+    }
+
+    /**
+     * Determine whether the given walker supports the given data source.
+     * @param walkerClass Class of the walker to query.
+     * @param dataSource Source to check for .
+     * @return True if the walker forbids this data type.  False otherwise.
+     */
+    public static boolean isAllowed(Class<? extends Walker> walkerClass, DataSource dataSource) {
+        Allows allowsDataSource = getWalkerAllowed(walkerClass);
+
+        // Allows is less restrictive than requires.  If an allows
+        // clause is not specified, any kind of data is allowed.
+        if( allowsDataSource == null )
+            return true;
+
+        return Arrays.asList(allowsDataSource.value()).contains(dataSource);
+    }
+
+    /**
+     * Determine whether the given walker supports the given data source.
+     * @param walker Walker to query.
+     * @param dataSource Source to check for .
+     * @return True if the walker forbids this data type.  False otherwise.
+     */
+    public static boolean isAllowed(Walker walker, DataSource dataSource) {
+        return isAllowed(walker.getClass(), dataSource);
+    }
+
+    /**
+     * Determine whether the given walker supports the given reference ordered data.
+     * @param walkerClass Class of the walker to query.
+     * @param rod Source to check.
+     * @return True if the walker forbids this data type.  False otherwise.
+     */
+    public static boolean isAllowed(Class<? extends Walker> walkerClass, ReferenceOrderedDataSource rod) {
+        return true;
+    }
+
+    /**
+     * Determine whether the given walker supports the given reference ordered data.
+     * @param walker Walker to query.
+     * @param rod Source to check.
+     * @return True if the walker forbids this data type.  False otherwise.
+     */
+    public static boolean isAllowed(Walker walker, ReferenceOrderedDataSource rod) {
+        return isAllowed(walker.getClass(), rod);
+    }
+
+    /**
+     * Determine whether the given walker requires the given data source.
+     * @param walkerClass Class of the walker to query.
+     * @param dataSource Source to check for.
+     * @return True if the walker allows this data type.  False otherwise.
+     */
+    public static boolean isRequired(Class<? extends Walker> walkerClass, DataSource dataSource) {
+        Requires requiresDataSource = getWalkerRequirements(walkerClass);
+        return Arrays.asList(requiresDataSource.value()).contains(dataSource);
+    }
+
+    /**
+     * Determine whether the given walker requires the given data source.
+     * @param walker Walker to query.
+     * @param dataSource Source to check for.
+     * @return True if the walker allows this data type.  False otherwise.
+     */
+    public static boolean isRequired(Walker walker, DataSource dataSource) {
+        return isRequired(walker.getClass(), dataSource);
+    }
+
+    /**
+     * Get a list of RODs required by the walker.
+     * @param walkerClass Class of the walker to query.
+     * @return The list of required reference meta data.
+     */
+    public static List<RMD> getRequiredMetaData(Class<? extends Walker> walkerClass) {
+        return Collections.emptyList();
+    }
+
+    /**
+     * Get a list of RODs required by the walker.
+     * @param walker Walker to query.
+     * @return The list of required reference meta data.
+     */
+    public static List<RMD> getRequiredMetaData(Walker walker) {
+        return getRequiredMetaData(walker.getClass());
+    }
+
+    /**
+     * Reports whether this walker type is hidden -- in other words, whether it'll appear in the help output.
+     * @param walkerType Class to test for visibility.
+     * @return True if the walker should be hidden.  False otherwise.
+     */
+    public static boolean isHidden(Class<? extends Walker> walkerType) {
+        return walkerType.isAnnotationPresent(Hidden.class);    
+    }
+
+    /**
+     * Extracts filters that the walker has requested be run on the dataset.
+     * @param walkerClass Class of the walker to inspect for filtering requests.
+     * @param filterManager Manages the creation of filters.
+     * @return A non-empty list of filters to apply to the reads.
+     */
+    public static List<ReadFilter> getReadFilters(Class<? extends Walker> walkerClass, FilterManager filterManager) {
+        List<ReadFilter> filters = new ArrayList<ReadFilter>();
+        for(Class<? extends ReadFilter> filterType: getReadFilterTypes(walkerClass))
+            filters.add(filterManager.createFilterByType(filterType));
+        return filters;
+    }
+
+    /**
+     * Extracts filters that the walker has requested be run on the dataset.
+     * @param walker Walker to inspect for filtering requests.
+     * @param filterManager Manages the creation of filters.
+     * @return A non-empty list of filters to apply to the reads.
+     */
+    public static List<ReadFilter> getReadFilters(Walker walker, FilterManager filterManager) {
+        return getReadFilters(walker.getClass(), filterManager);
+    }
+
+    /**
+     * Gets the type of downsampling method requested by the walker.  If an alternative
+     * downsampling method is specified on the command-line, the command-line version will
+     * be used instead.
+     * @param walker The walker to interrogate.
+     * @return The downsampling method, as specified by the walker.  Null if none exists.
+     */
+    public static DownsamplingMethod getDownsamplingMethod( Walker walker ) {
+        return getDownsamplingMethod(walker.getClass());
+    }
+
+    /**
+     * Gets the type of downsampling method requested by the walker.  If an alternative
+     * downsampling method is specified on the command-line, the command-line version will
+     * be used instead.
+     * @param walkerClass The class of the walker to interrogate.
+     * @return The downsampling method, as specified by the walker.  Null if none exists.
+     */
+    public static DownsamplingMethod getDownsamplingMethod( Class<? extends Walker> walkerClass ) {
+        DownsamplingMethod downsamplingMethod = null;
+
+        if( walkerClass.isAnnotationPresent(Downsample.class) ) {
+            Downsample downsampleParameters = walkerClass.getAnnotation(Downsample.class);
+            DownsampleType type = downsampleParameters.by();
+            Integer toCoverage = downsampleParameters.toCoverage() >= 0 ? downsampleParameters.toCoverage() : null;
+            Double toFraction = downsampleParameters.toFraction() >= 0.0d ? downsampleParameters.toFraction() : null;
+            downsamplingMethod = new DownsamplingMethod(type, toCoverage, toFraction);
+        }
+
+        return downsamplingMethod;
+    }
+
+    public static <T extends Annotation> T getWalkerAnnotation(final Walker walker, final Class<T> clazz) {
+        return walker.getClass().getAnnotation(clazz);
+    }
+
+    public static ReadTransformer.ApplicationTime getBAQApplicationTime(Walker walker) {
+        return walker.getClass().getAnnotation(BAQMode.class).ApplicationTime();
+    }    
+
+    /**
+     * Create a name for this type of walker.
+     *
+     * @param walkerType The type of walker.
+     * @return A name for this type of walker.
+     */
+    @Override
+    public String getName(Class walkerType) {
+        String walkerName = "";
+
+        if (walkerType.getAnnotation(WalkerName.class) != null)
+            walkerName = ((WalkerName)walkerType.getAnnotation(WalkerName.class)).value().trim();
+        else
+            walkerName = super.getName(walkerType);
+
+        return walkerName;
+    }
+
+    /**
+     * Utility to get the requires attribute from the walker.
+     * Throws an exception if requirements are missing.
+     * @param walkerClass Class of the walker to query for required data.
+     * @return Required data attribute.
+     */
+    private static Requires getWalkerRequirements(Class<? extends Walker> walkerClass) {
+        Requires requiresDataSource = walkerClass.getAnnotation(Requires.class);
+        if( requiresDataSource == null )
+            throw new ReviewedGATKException( "Unable to find data types required by walker class " + walkerClass.getName());
+        return requiresDataSource;
+    }
+
+    /**
+     * Utility to get the requires attribute from the walker.
+     * Throws an exception if requirements are missing.
+     * @param walker Walker to query for required data.
+     * @return Required data attribute.
+     */
+    private static Requires getWalkerRequirements(Walker walker) {
+        return getWalkerRequirements(walker.getClass());
+    }
+
+    /**
+     * Utility to get the forbidden attribute from the walker.
+     * @param walkerClass Class of the walker to query for required data.
+     * @return Required data attribute.  Null if forbidden info isn't present.
+     */
+    private static Allows getWalkerAllowed(Class<? extends Walker> walkerClass) {
+        Allows allowsDataSource = walkerClass.getAnnotation(Allows.class);
+        return allowsDataSource;
+    }
+
+    /**
+     * Utility to get the forbidden attribute from the walker.
+     * @param walker Walker to query for required data.
+     * @return Required data attribute.  Null if forbidden info isn't present.
+     */
+    private static Allows getWalkerAllowed(Walker walker) {
+        return getWalkerAllowed(walker.getClass());
+    }
+
+    /**
+     * Gets the list of filtering classes specified as walker annotations.
+     * @param walkerClass Class of the walker to inspect.
+     * @return An array of types extending from SamRecordFilter.  Will never be null.
+     */
+    public static Collection<Class<? extends ReadFilter>> getReadFilterTypes(Class<?> walkerClass) {
+        List<Class<? extends ReadFilter>> filterTypes = new ArrayList<Class<? extends ReadFilter>>();
+        while(walkerClass != null) {
+            // Add the read filters in the ReadFilters annotation
+            if(walkerClass.isAnnotationPresent(ReadFilters.class)) {
+                for ( Class c : walkerClass.getAnnotation(ReadFilters.class).value() ) {
+                    if( !filterTypes.contains(c) )
+                        filterTypes.add(c);
+                }
+            }
+            // Remove read filters in the DisabledReadFilters annotation
+            if(walkerClass.isAnnotationPresent(DisabledReadFilters.class)) {
+                for ( Class c : walkerClass.getAnnotation(DisabledReadFilters.class).value() ) {
+                    if ( filterTypes.contains(c) )
+                        filterTypes.remove(c);
+                }
+            }
+            walkerClass = walkerClass.getSuperclass();
+        }
+        return filterTypes;
+    }
+
+    /**
+     * Gets the list of filtering classes specified as walker annotations.
+     * @param walker The walker to inspect.
+     * @return An array of types extending from SamRecordFilter.  Will never be null.
+     */
+    public static Collection<Class<? extends ReadFilter>> getReadFilterTypes(Walker walker) {
+        return getReadFilterTypes(walker.getClass());
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/Aligner.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/Aligner.java
new file mode 100644
index 0000000..eb403c6
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/Aligner.java
@@ -0,0 +1,74 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment;
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMRecord;
+
+/**
+ * Create perfect alignments from the read to the genome represented by the given BWT / suffix array. 
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public interface Aligner {
+    /**
+     * Close this instance of the BWA pointer and delete its resources.
+     */
+    public void close();    
+
+    /**
+     * Allow the aligner to choose one alignment randomly from the pile of best alignments.
+     * @param bases Bases to align.
+     * @return An align
+     */
+    public Alignment getBestAlignment(final byte[] bases);
+
+    /**
+     * Align the read to the reference.
+     * @param read Read to align.
+     * @param header Optional header to drop in place.
+     * @return A list of the alignments.
+     */
+    public SAMRecord align(final SAMRecord read, final SAMFileHeader header);
+
+    /**
+     * Get a iterator of alignments, batched by mapping quality.
+     * @param bases List of bases.
+     * @return Iterator to alignments.
+     */
+    public Iterable<Alignment[]> getAllAlignments(final byte[] bases);
+
+    /**
+     * Get a iterator of aligned reads, batched by mapping quality.
+     * @param read Read to align.
+     * @param newHeader Optional new header to use when aligning the read.  If present, it must be null.
+     * @return Iterator to alignments.
+     */
+    public Iterable<SAMRecord[]> alignAll(final SAMRecord read, final SAMFileHeader newHeader);
+}
+
+
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/Alignment.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/Alignment.java
new file mode 100644
index 0000000..2b2c6b1
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/Alignment.java
@@ -0,0 +1,246 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment;
+
+import htsjdk.samtools.*;
+import org.broadinstitute.gatk.utils.BaseUtils;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+/**
+ * Represents an alignment of a read to a site in the reference genome.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class Alignment {
+    protected int contigIndex;
+    protected long alignmentStart;
+    protected boolean negativeStrand;
+    protected int mappingQuality;
+
+    protected char[] cigarOperators;
+    protected int[] cigarLengths;
+
+    protected int editDistance;
+    protected String mismatchingPositions;
+
+    protected int numMismatches;
+    protected int numGapOpens;
+    protected int numGapExtensions;
+    protected int bestCount;
+    protected int secondBestCount;
+
+    /**
+     * Gets the index of the given contig.
+     * @return the inde
+     */
+    public int getContigIndex() { return contigIndex; }
+
+    /**
+     * Gets the starting position for the given alignment.
+     * @return Starting position.
+     */
+    public long getAlignmentStart() { return alignmentStart; }
+
+    /**
+     * Is the given alignment on the reverse strand?
+     * @return True if the alignment is on the reverse strand.
+     */
+    public boolean isNegativeStrand() { return negativeStrand; }
+
+    /**
+     * Gets the score of this alignment.
+     * @return The score.
+     */
+    public int getMappingQuality() { return mappingQuality; }
+
+    /**
+     * Gets the edit distance; will eventually end up in the NM SAM tag
+     * if this alignment makes it that far.
+     * @return The edit distance.
+     */
+    public int getEditDistance() { return editDistance; }
+
+    /**
+     * A string representation of which positions mismatch; contents of MD tag.
+     * @return String representation of mismatching positions.
+     */
+    public String getMismatchingPositions() { return mismatchingPositions; }
+    
+    /**
+     * Gets the number of mismatches in the read.
+     * @return Number of mismatches.
+     */
+    public int getNumMismatches() { return numMismatches; }
+
+    /**
+     * Get the number of gap opens.
+     * @return Number of gap opens.
+     */
+    public int getNumGapOpens() { return numGapOpens; }
+
+    /**
+     * Get the number of gap extensions.
+     * @return Number of gap extensions.
+     */
+    public int getNumGapExtensions() { return numGapExtensions; }
+
+    /**
+     * Get the number of best alignments.
+     * @return Number of top scoring alignments.
+     */
+    public int getBestCount() { return bestCount; }
+
+    /**
+     * Get the number of second best alignments.
+     * @return Number of second best scoring alignments.
+     */
+    public int getSecondBestCount() { return secondBestCount; }
+
+    /**
+     * Gets the cigar for this alignment.
+     * @return sam-jdk formatted alignment.
+     */
+    public Cigar getCigar() {
+        Cigar cigar = new Cigar();
+        for(int i = 0; i < cigarOperators.length; i++) {
+            CigarOperator operator = CigarOperator.characterToEnum(cigarOperators[i]);
+            cigar.add(new CigarElement(cigarLengths[i],operator));
+        }
+        return cigar;
+    }
+
+    /**
+     * Temporarily implement getCigarString() for debugging; the TextCigarCodec is unfortunately
+     * package-protected.
+     * @return
+     */
+    public String getCigarString() {
+        Cigar cigar = getCigar();
+        if(cigar.isEmpty()) return "*";
+
+        StringBuilder cigarString = new StringBuilder();
+        for(CigarElement element: cigar.getCigarElements()) {
+            cigarString.append(element.getLength());
+            cigarString.append(element.getOperator());
+        }
+        return cigarString.toString();
+    }
+
+    /**
+     * Stub for inheritance.
+     */
+    public Alignment() {}    
+
+    /**
+     * Create a new alignment object.
+     * @param contigIndex The contig to which this read aligned.
+     * @param alignmentStart The point within the contig to which this read aligned.
+     * @param negativeStrand Forward or reverse alignment of the given read.
+     * @param mappingQuality How good does BWA think this mapping is?
+     * @param cigarOperators The ordered operators in the cigar string.
+     * @param cigarLengths The lengths to which each operator applies.
+     * @param editDistance The edit distance (cumulative) of the read.
+     * @param mismatchingPositions String representation of which bases in the read mismatch.
+     * @param numMismatches Number of total mismatches in the read.
+     * @param numGapOpens Number of gap opens in the read.
+     * @param numGapExtensions Number of gap extensions in the read.
+     * @param bestCount Number of best alignments in the read.
+     * @param secondBestCount Number of second best alignments in the read.
+     */
+    public Alignment(int contigIndex,
+                     int alignmentStart,
+                     boolean negativeStrand,
+                     int mappingQuality,
+                     char[] cigarOperators,
+                     int[] cigarLengths,
+                     int editDistance,
+                     String mismatchingPositions,
+                     int numMismatches,
+                     int numGapOpens,
+                     int numGapExtensions,
+                     int bestCount,
+                     int secondBestCount) {
+        this.contigIndex = contigIndex;
+        this.alignmentStart = alignmentStart;
+        this.negativeStrand = negativeStrand;
+        this.mappingQuality = mappingQuality;
+        this.cigarOperators = cigarOperators;
+        this.cigarLengths = cigarLengths;
+        this.editDistance = editDistance;
+        this.mismatchingPositions = mismatchingPositions;
+        this.numMismatches = numMismatches;
+        this.numGapOpens = numGapOpens;
+        this.numGapExtensions = numGapExtensions;
+        this.bestCount = bestCount;
+        this.secondBestCount = secondBestCount;
+    }
+
+    /**
+     * Creates a read directly from an alignment.
+     * @param alignment The alignment to convert to a read.
+     * @param unmappedRead Source of the unmapped read.  Should have bases, quality scores, and flags.
+     * @param newSAMHeader The new SAM header to use in creating this read.  Can be null, but if so, the sequence
+     *                     dictionary in the
+     * @return A mapped alignment.
+     */
+    public static SAMRecord convertToRead(Alignment alignment, SAMRecord unmappedRead, SAMFileHeader newSAMHeader) {
+        SAMRecord read;
+        try {
+            read = (SAMRecord)unmappedRead.clone();
+        }
+        catch(CloneNotSupportedException ex) {
+            throw new ReviewedGATKException("Unable to create aligned read from template.");
+        }
+
+        if(newSAMHeader != null)
+            read.setHeader(newSAMHeader);
+
+        // If we're realigning a previously aligned record, strip out the placement of the alignment.
+        read.setReferenceName(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME);
+        read.setAlignmentStart(SAMRecord.NO_ALIGNMENT_START);
+        read.setMateReferenceName(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME);
+        read.setMateAlignmentStart(SAMRecord.NO_ALIGNMENT_START);        
+
+        if(alignment != null) {
+            read.setReadUnmappedFlag(false);
+            read.setReferenceIndex(alignment.getContigIndex());
+            read.setAlignmentStart((int)alignment.getAlignmentStart());
+            read.setReadNegativeStrandFlag(alignment.isNegativeStrand());
+            read.setMappingQuality(alignment.getMappingQuality());
+            read.setCigar(alignment.getCigar());
+            if(alignment.isNegativeStrand()) {
+                read.setReadBases(BaseUtils.simpleReverseComplement(read.getReadBases()));
+                read.setBaseQualities(Utils.reverse(read.getBaseQualities()));
+            }
+            read.setAttribute("NM",alignment.getEditDistance());
+            read.setAttribute("MD",alignment.getMismatchingPositions());
+        }
+
+        return read;
+    }    
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/BWAAligner.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/BWAAligner.java
new file mode 100644
index 0000000..d9924c7
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/BWAAligner.java
@@ -0,0 +1,63 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment.bwa;
+
+import org.broadinstitute.gatk.engine.alignment.Aligner;
+
+/**
+ * Align reads using BWA.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public abstract class BWAAligner implements Aligner {
+    /**
+     * The supporting files used by BWA.
+     */
+    protected BWTFiles bwtFiles;
+
+    /**
+     * The current configuration for the BWA aligner.
+     */
+    protected BWAConfiguration configuration;
+
+    /**
+     * Create a new BWAAligner.  Purpose of this call is to ensure that all BWA constructors accept the correct
+     * parameters.
+     * @param bwtFiles The many files representing BWTs persisted to disk.
+     * @param configuration Configuration parameters for the alignment.
+     */
+    public BWAAligner(BWTFiles bwtFiles, BWAConfiguration configuration) {
+        this.bwtFiles = bwtFiles;
+        this.configuration = configuration;
+    }
+
+    /**
+     * Update the configuration passed to the BWA aligner.
+     * @param configuration New configuration to set.
+     */    
+    public abstract void updateConfiguration(BWAConfiguration configuration);
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/BWAConfiguration.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/BWAConfiguration.java
new file mode 100644
index 0000000..54e63e4
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/BWAConfiguration.java
@@ -0,0 +1,79 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment.bwa;
+
+/**
+ * Configuration for the BWA/C aligner.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class BWAConfiguration {
+    /**
+     * The maximum edit distance used by BWA.
+     */
+    public Float maximumEditDistance = null;
+
+    /**
+     * How many gap opens are acceptable within this alignment?
+     */
+    public Integer maximumGapOpens = null;
+
+    /**
+     * How many gap extensions are acceptable within this alignment?
+     */
+    public Integer maximumGapExtensions = null;
+
+    /**
+     * Do we disallow indels within a certain range from the start / end?
+     */
+    public Integer disallowIndelWithinRange = null;
+
+    /**
+     * What is the scoring penalty for a mismatch?
+     */
+    public Integer mismatchPenalty = null;
+
+    /**
+     * What is the scoring penalty for a gap open?
+     */
+    public Integer gapOpenPenalty = null;
+
+    /**
+     * What is the scoring penalty for a gap extension?
+     */
+    public Integer gapExtensionPenalty = null;
+
+    /**
+     * Enter bwa's 'non-stop' mode (equivalent to bwa aln -N parameter).
+     */
+    public Boolean nonStopMode = false;
+
+    /**
+     * Set the max queue size that bwa will use when searching for matches (equivalent to bwa aln -m parameter).
+     */
+    public Integer maxEntriesInQueue = null;
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/BWTFiles.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/BWTFiles.java
new file mode 100644
index 0000000..f5a337b
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/BWTFiles.java
@@ -0,0 +1,259 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment.bwa;
+
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.SAMSequenceRecord;
+import htsjdk.samtools.util.StringUtil;
+import org.broadinstitute.gatk.engine.alignment.reference.bwt.*;
+import org.broadinstitute.gatk.engine.alignment.reference.packing.PackUtils;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.io.File;
+import java.io.IOException;
+
+/**
+ * Support files for BWT.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class BWTFiles {
+    /**
+     * ANN (?) file name.
+     */
+    public final File annFile;
+
+    /**
+     * AMB (?) file name.
+     */
+    public final File ambFile;
+
+    /**
+     * Packed reference sequence file.
+     */
+    public final File pacFile;
+
+    /**
+     * Reverse of packed reference sequence file.
+     */
+    public final File rpacFile;
+
+    /**
+     * Forward BWT file.
+     */
+    public final File forwardBWTFile;
+
+    /**
+     * Forward suffix array file.
+     */
+    public final File forwardSAFile;
+
+    /**
+     * Reverse BWT file.
+     */
+    public final File reverseBWTFile;
+
+    /**
+     * Reverse suffix array file.
+     */
+    public final File reverseSAFile;
+
+    /**
+     * Where these files autogenerated on the fly?
+     */
+    public final boolean autogenerated;
+
+    /**
+     * Create a new BWA configuration file using the given prefix.
+     * @param prefix Prefix to use when creating the configuration.  Must not be null.
+     */
+    public BWTFiles(String prefix) {
+        if(prefix == null)
+            throw new ReviewedGATKException("Prefix must not be null.");
+        annFile = new File(prefix + ".ann");
+        ambFile = new File(prefix + ".amb");
+        pacFile = new File(prefix + ".pac");
+        rpacFile = new File(prefix + ".rpac");
+        forwardBWTFile = new File(prefix + ".bwt");
+        forwardSAFile = new File(prefix + ".sa");
+        reverseBWTFile = new File(prefix + ".rbwt");
+        reverseSAFile = new File(prefix + ".rsa");
+        autogenerated = false;
+    }
+
+    /**
+     * Hand-create a new BWTFiles object, specifying a unique file object for each type.
+     * @param annFile ANN (alternate dictionary) file.
+     * @param ambFile AMB (holes) files.
+     * @param pacFile Packed representation of the forward reference sequence.
+     * @param forwardBWTFile BWT representation of the forward reference sequence.
+     * @param forwardSAFile SA representation of the forward reference sequence.
+     * @param rpacFile Packed representation of the reversed reference sequence.
+     * @param reverseBWTFile BWT representation of the reversed reference sequence.
+     * @param reverseSAFile SA representation of the reversed reference sequence.
+     */
+    private BWTFiles(File annFile,
+                     File ambFile,
+                     File pacFile,
+                     File forwardBWTFile,
+                     File forwardSAFile,
+                     File rpacFile,
+                     File reverseBWTFile,
+                     File reverseSAFile) {
+        this.annFile = annFile;
+        this.ambFile = ambFile;
+        this.pacFile = pacFile;
+        this.forwardBWTFile = forwardBWTFile;
+        this.forwardSAFile = forwardSAFile;
+        this.rpacFile = rpacFile;
+        this.reverseBWTFile = reverseBWTFile;
+        this.reverseSAFile = reverseSAFile;        
+        autogenerated = true;
+    }
+
+    /**
+     * Close out this files object, in the process deleting any temporary filse
+     * that were created.
+     */
+    public void close() {
+        if(autogenerated) {
+            boolean success = true;
+            success = annFile.delete();
+            success &= ambFile.delete();
+            success &= pacFile.delete();
+            success &= forwardBWTFile.delete();
+            success &= forwardSAFile.delete();
+            success &= rpacFile.delete();
+            success &= reverseBWTFile.delete();
+            success &= reverseSAFile.delete();
+
+            if(!success)
+                throw new ReviewedGATKException("Unable to clean up autogenerated representation");
+        }
+    }
+
+    /**
+     * Create a new set of BWT files from the given reference sequence.
+     * @param referenceSequence Sequence from which to build metadata.
+     * @return A new object representing encoded representations of each sequence.
+     */
+    public static BWTFiles createFromReferenceSequence(byte[] referenceSequence) {
+        byte[] normalizedReferenceSequence = new byte[referenceSequence.length];
+        System.arraycopy(referenceSequence,0,normalizedReferenceSequence,0,referenceSequence.length);
+        normalizeReferenceSequence(normalizedReferenceSequence);        
+
+        File annFile,ambFile,pacFile,bwtFile,saFile,rpacFile,rbwtFile,rsaFile;
+        try {
+            // Write the ann and amb for this reference sequence.
+            annFile = File.createTempFile("bwt",".ann");
+            ambFile = File.createTempFile("bwt",".amb");
+
+            SAMSequenceDictionary dictionary = new SAMSequenceDictionary();
+            dictionary.addSequence(new SAMSequenceRecord("autogenerated",normalizedReferenceSequence.length));
+
+            ANNWriter annWriter = new ANNWriter(annFile);
+            annWriter.write(dictionary);
+            annWriter.close();
+
+            AMBWriter ambWriter = new AMBWriter(ambFile);
+            ambWriter.writeEmpty(dictionary);
+            ambWriter.close();
+
+            // Write the encoded files for the forward version of this reference sequence.
+            pacFile = File.createTempFile("bwt",".pac");
+            bwtFile = File.createTempFile("bwt",".bwt");
+            saFile = File.createTempFile("bwt",".sa");
+
+            writeEncodedReferenceSequence(normalizedReferenceSequence,pacFile,bwtFile,saFile);
+
+            // Write the encoded files for the reverse version of this reference sequence.
+            byte[] reverseReferenceSequence = Utils.reverse(normalizedReferenceSequence);
+
+            rpacFile = File.createTempFile("bwt",".rpac");
+            rbwtFile = File.createTempFile("bwt",".rbwt");
+            rsaFile = File.createTempFile("bwt",".rsa");
+
+            writeEncodedReferenceSequence(reverseReferenceSequence,rpacFile,rbwtFile,rsaFile);
+        }
+        catch(IOException ex) {
+            throw new ReviewedGATKException("Unable to write autogenerated reference sequence to temporary files");
+        }
+
+        // Make sure that, at the very least, all temporary files are deleted on exit.
+        annFile.deleteOnExit();
+        ambFile.deleteOnExit();
+        pacFile.deleteOnExit();
+        bwtFile.deleteOnExit();
+        saFile.deleteOnExit();
+        rpacFile.deleteOnExit();
+        rbwtFile.deleteOnExit();
+        rsaFile.deleteOnExit();
+
+        return new BWTFiles(annFile,ambFile,pacFile,bwtFile,saFile,rpacFile,rbwtFile,rsaFile);
+    }
+
+    /**
+     * Write the encoded form of the reference sequence.  In the case of BWA, the encoded reference
+     * sequence is the reference itself in PAC format, the BWT, and the suffix array.
+     * @param referenceSequence The reference sequence to encode.
+     * @param pacFile Target for the PAC-encoded reference.
+     * @param bwtFile Target for the BWT representation of the reference.
+     * @param suffixArrayFile Target for the suffix array encoding of the reference.
+     * @throws java.io.IOException In case of issues writing to the file.
+     */
+    private static void writeEncodedReferenceSequence(byte[] referenceSequence,
+                                               File pacFile,
+                                               File bwtFile,
+                                               File suffixArrayFile) throws IOException {
+        PackUtils.writeReferenceSequence(pacFile,referenceSequence);
+
+        BWT bwt = BWT.createFromReferenceSequence(referenceSequence);
+        BWTWriter bwtWriter = new BWTWriter(bwtFile);
+        bwtWriter.write(bwt);
+        bwtWriter.close();
+
+        SuffixArray suffixArray = SuffixArray.createFromReferenceSequence(referenceSequence);
+        SuffixArrayWriter suffixArrayWriter = new SuffixArrayWriter(suffixArrayFile);
+        suffixArrayWriter.write(suffixArray);
+        suffixArrayWriter.close();
+    }
+
+    /**
+     * Convert the given reference sequence into a form suitable for building into
+     * on-the-fly sequences.
+     * @param referenceSequence The reference sequence to normalize.
+     * @throws org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException if normalized sequence cannot be generated.
+     */
+    private static void normalizeReferenceSequence(byte[] referenceSequence) {
+        StringUtil.toUpperCase(referenceSequence);
+        for(byte base: referenceSequence) {
+            if(base != 'A' && base != 'C' && base != 'G' && base != 'T')
+                throw new ReviewedGATKException(String.format("Base type %c is not supported when building references on-the-fly",(char)base));
+        }
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/AlignerTestHarness.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/AlignerTestHarness.java
new file mode 100644
index 0000000..50981a3
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/AlignerTestHarness.java
@@ -0,0 +1,189 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment.bwa.java;
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import htsjdk.samtools.*;
+import org.broadinstitute.gatk.engine.alignment.Aligner;
+import org.broadinstitute.gatk.engine.alignment.Alignment;
+import org.broadinstitute.gatk.utils.BaseUtils;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+
+/**
+ * A test harness to ensure that the perfect aligner works.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class AlignerTestHarness {
+    public static void main( String argv[] ) throws FileNotFoundException {
+        if( argv.length != 6 ) {
+            System.out.println("PerfectAlignerTestHarness <fasta> <bwt> <rbwt> <sa> <rsa> <bam>");
+            System.exit(1);
+        }
+
+        File referenceFile = new File(argv[0]);
+        File bwtFile = new File(argv[1]);
+        File rbwtFile = new File(argv[2]);
+        File suffixArrayFile = new File(argv[3]);
+        File reverseSuffixArrayFile = new File(argv[4]);
+        File bamFile = new File(argv[5]);
+
+        align(referenceFile,bwtFile,rbwtFile,suffixArrayFile,reverseSuffixArrayFile,bamFile);
+    }
+
+    private static void align(File referenceFile, File bwtFile, File rbwtFile, File suffixArrayFile, File reverseSuffixArrayFile, File bamFile) throws FileNotFoundException {
+        Aligner aligner = new BWAJavaAligner(bwtFile,rbwtFile,suffixArrayFile,reverseSuffixArrayFile);
+        int count = 0;
+
+        SAMFileReader reader = new SAMFileReader(bamFile);
+        reader.setValidationStringency(ValidationStringency.SILENT);
+
+        int mismatches = 0;
+        int failures = 0;
+
+        for(SAMRecord read: reader) {
+            count++;
+            if( count > 200000 ) break;
+            //if( count < 366000 ) continue;
+            //if( count > 2 ) break;
+            //if( !read.getReadName().endsWith("SL-XBC:1:82:506:404#0") )
+            //    continue;
+            //if( !read.getReadName().endsWith("SL-XBC:1:36:30:1926#0") )
+            //    continue;
+            //if( !read.getReadName().endsWith("SL-XBC:1:60:1342:1340#0") )
+            //    continue;
+
+            SAMRecord alignmentCleaned = null;
+            try {
+                alignmentCleaned = (SAMRecord)read.clone();
+            }
+            catch( CloneNotSupportedException ex ) {
+                throw new ReviewedGATKException("SAMRecord clone not supported", ex);
+            }
+
+            if( alignmentCleaned.getReadNegativeStrandFlag() )
+                alignmentCleaned.setReadBases(BaseUtils.simpleReverseComplement(alignmentCleaned.getReadBases()));
+
+            alignmentCleaned.setReferenceIndex(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX);
+            alignmentCleaned.setAlignmentStart(SAMRecord.NO_ALIGNMENT_START);
+            alignmentCleaned.setMappingQuality(SAMRecord.NO_MAPPING_QUALITY);
+            alignmentCleaned.setCigarString(SAMRecord.NO_ALIGNMENT_CIGAR);
+
+            // Clear everything except flags pertaining to pairing and set 'unmapped' status to true.
+            alignmentCleaned.setFlags(alignmentCleaned.getFlags() & 0x00A1 | 0x000C);
+
+            Iterable<Alignment[]> alignments = aligner.getAllAlignments(alignmentCleaned.getReadBases());
+            if(!alignments.iterator().hasNext() ) {
+                //throw new GATKException(String.format("Unable to align read %s to reference; count = %d",read.getReadName(),count));
+                System.out.printf("Unable to align read %s to reference; count = %d%n",read.getReadName(),count);
+                failures++;
+            }
+
+            Alignment foundAlignment = null;
+            for(Alignment[] alignmentsOfQuality: alignments) {
+                for(Alignment alignment: alignmentsOfQuality) {
+                    if( read.getReadNegativeStrandFlag() != alignment.isNegativeStrand() )
+                        continue;
+                    if( read.getAlignmentStart() != alignment.getAlignmentStart() )
+                        continue;
+
+                    foundAlignment = alignment;                    
+                }
+            }
+
+            if( foundAlignment != null ) {
+                //System.out.printf("%s: Aligned read to reference at position %d with %d mismatches, %d gap opens, and %d gap extensions.%n", read.getReadName(), foundAlignment.getAlignmentStart(), foundAlignment.getMismatches(), foundAlignment.getGapOpens(), foundAlignment.getGapExtensions());
+            }
+            else {
+                System.out.printf("Error aligning read %s%n", read.getReadName());
+
+                mismatches++;
+
+                IndexedFastaSequenceFile reference = new IndexedFastaSequenceFile(referenceFile);
+
+                System.out.printf("read          = %s, position = %d, negative strand = %b%n", formatBasesBasedOnCigar(read.getReadString(),read.getCigar(),CigarOperator.DELETION),
+                                                                                               read.getAlignmentStart(),
+                                                                                               read.getReadNegativeStrandFlag());
+                int numDeletions = numDeletionsInCigar(read.getCigar());
+                String expectedRef = new String(reference.getSubsequenceAt(reference.getSequenceDictionary().getSequences().get(0).getSequenceName(),read.getAlignmentStart(),read.getAlignmentStart()+read.getReadLength()+numDeletions-1).getBases());
+                System.out.printf("expected ref  = %s%n", formatBasesBasedOnCigar(expectedRef,read.getCigar(),CigarOperator.INSERTION));
+
+                for(Alignment[] alignmentsOfQuality: alignments) {
+                    for(Alignment alignment: alignmentsOfQuality) {
+                        System.out.println();
+
+                        Cigar cigar = ((BWAAlignment)alignment).getCigar();
+
+                        System.out.printf("read          = %s%n", formatBasesBasedOnCigar(read.getReadString(),cigar,CigarOperator.DELETION));
+
+                        int deletionCount = ((BWAAlignment)alignment).getNumberOfBasesMatchingState(AlignmentState.DELETION);
+                        String alignedRef = new String(reference.getSubsequenceAt(reference.getSequenceDictionary().getSequences().get(0).getSequenceName(),alignment.getAlignmentStart(),alignment.getAlignmentStart()+read.getReadLength()+deletionCount-1).getBases());
+                        System.out.printf("actual ref    = %s, position = %d, negative strand = %b%n", formatBasesBasedOnCigar(alignedRef,cigar,CigarOperator.INSERTION),
+                                alignment.getAlignmentStart(),
+                                alignment.isNegativeStrand());
+                    }
+                }
+
+                //throw new GATKException(String.format("Read %s was placed at incorrect location; count = %d%n",read.getReadName(),count));                
+            }
+
+
+            if( count % 1000 == 0 )
+                System.out.printf("%d reads examined.%n",count);                
+        }
+
+        System.out.printf("%d reads examined; %d mismatches; %d failures.%n",count,mismatches,failures);
+    }
+
+    private static String formatBasesBasedOnCigar( String bases, Cigar cigar, CigarOperator toBlank ) {
+        StringBuilder formatted = new StringBuilder();
+        int readIndex = 0;
+        for(CigarElement cigarElement: cigar.getCigarElements()) {
+            if(cigarElement.getOperator() == toBlank) {
+                int number = cigarElement.getLength();
+                while( number-- > 0 ) formatted.append(' ');
+            }
+            else {
+                int number = cigarElement.getLength();
+                while( number-- > 0 ) formatted.append(bases.charAt(readIndex++));
+            }
+        }
+        return formatted.toString();
+    }
+
+    private static int numDeletionsInCigar( Cigar cigar ) {
+        int numDeletions = 0;
+        for(CigarElement cigarElement: cigar.getCigarElements()) {
+            if(cigarElement.getOperator() == CigarOperator.DELETION)
+                numDeletions += cigarElement.getLength();
+        }
+        return numDeletions;
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/AlignmentMatchSequence.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/AlignmentMatchSequence.java
new file mode 100644
index 0000000..f3ba0e4
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/AlignmentMatchSequence.java
@@ -0,0 +1,175 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment.bwa.java;
+
+import htsjdk.samtools.Cigar;
+import htsjdk.samtools.CigarElement;
+import htsjdk.samtools.CigarOperator;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.util.ArrayDeque;
+import java.util.Deque;
+import java.util.Iterator;
+
+/**
+ * Represents a sequence of matches.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class AlignmentMatchSequence implements Cloneable {
+    /**
+     * Stores the particular match entries in the order they occur.
+     */
+    private Deque<AlignmentMatchSequenceEntry> entries = new ArrayDeque<AlignmentMatchSequenceEntry>();
+
+    /**
+     * Clone the given match sequence.
+     * @return A deep copy of the current match sequence.
+     */
+    public AlignmentMatchSequence clone() {
+        AlignmentMatchSequence copy = null;
+        try {
+            copy = (AlignmentMatchSequence)super.clone(); 
+        }
+        catch( CloneNotSupportedException ex ) {
+            throw new ReviewedGATKException("Unable to clone AlignmentMatchSequence.");
+        }
+
+        copy.entries = new ArrayDeque<AlignmentMatchSequenceEntry>();
+        for( AlignmentMatchSequenceEntry entry: entries )
+            copy.entries.add(entry.clone());
+
+        return copy;
+    }
+
+    public Cigar convertToCigar(boolean negativeStrand) {
+        Cigar cigar = new Cigar();
+        Iterator<AlignmentMatchSequenceEntry> iterator = negativeStrand ? entries.descendingIterator() : entries.iterator();
+        while( iterator.hasNext() ) {
+            AlignmentMatchSequenceEntry entry = iterator.next();
+            CigarOperator operator;
+            switch( entry.getAlignmentState() ) {
+                case MATCH_MISMATCH: operator = CigarOperator.MATCH_OR_MISMATCH; break;
+                case INSERTION: operator = CigarOperator.INSERTION; break;
+                case DELETION: operator = CigarOperator.DELETION; break;
+                default: throw new ReviewedGATKException("convertToCigar: cannot process state: " + entry.getAlignmentState());
+            }
+            cigar.add( new CigarElement(entry.count,operator) );
+        }
+        return cigar;
+    }
+
+    /**
+     * All a new alignment of the given state.
+     * @param state State to add to the sequence.
+     */
+    public void addNext( AlignmentState state ) {
+        AlignmentMatchSequenceEntry last = entries.peekLast();
+        // If the last entry is the same as this one, increment it.  Otherwise, add a new entry.
+        if( last != null && last.alignmentState == state )
+            last.increment();
+        else
+            entries.add(new AlignmentMatchSequenceEntry(state));
+    }
+
+    /**
+     * Gets the current state of this alignment (what's the state of the last base?)
+     * @return State of the most recently aligned base.
+     */
+    public AlignmentState getCurrentState() {
+        if( entries.size() == 0 )
+            return AlignmentState.MATCH_MISMATCH;        
+        return entries.peekLast().getAlignmentState();
+    }
+
+    /**
+     * How many bases in the read match the given state.
+     * @param state State to test.
+     * @return number of bases which match that state.
+     */
+    public int getNumberOfBasesMatchingState(AlignmentState state) {
+        int matches = 0;
+        for( AlignmentMatchSequenceEntry entry: entries ) {
+            if( entry.getAlignmentState() == state )
+                matches += entry.count;
+        }
+        return matches;
+    }
+
+    /**
+     * Stores an individual match sequence entry.
+     */
+    private class AlignmentMatchSequenceEntry implements Cloneable {
+        /**
+         * The state of the alignment throughout a given point in the sequence.
+         */
+        private final AlignmentState alignmentState;
+
+        /**
+         * The number of bases having this particular state.
+         */
+        private int count;
+
+        /**
+         * Create a new sequence entry with the given state.
+         * @param alignmentState The state that this sequence should contain.
+         */
+        AlignmentMatchSequenceEntry( AlignmentState alignmentState ) {
+            this.alignmentState = alignmentState;
+            this.count = 1;
+        }
+
+        /**
+         * Clone the given match sequence entry.
+         * @return A deep copy of the current match sequence entry.
+         */
+        public AlignmentMatchSequenceEntry clone() {
+            try {
+                return (AlignmentMatchSequenceEntry)super.clone(); 
+            }
+            catch( CloneNotSupportedException ex ) {
+                throw new ReviewedGATKException("Unable to clone AlignmentMatchSequenceEntry.");
+            }
+        }
+
+        /**
+         * Retrieves the current state of the alignment.
+         * @return The state of the current sequence.
+         */
+        AlignmentState getAlignmentState() {
+            return alignmentState;
+        }
+
+        /**
+         * Increment the count of alignments having this particular state.
+         */
+        void increment() {
+            count++;
+        }
+    }
+}
+
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/AlignmentState.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/AlignmentState.java
new file mode 100644
index 0000000..4331b96
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/AlignmentState.java
@@ -0,0 +1,38 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment.bwa.java;
+
+/**
+ * The state of a given base in the alignment.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public enum AlignmentState {
+    MATCH_MISMATCH,
+    INSERTION,
+    DELETION
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/BWAAlignment.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/BWAAlignment.java
new file mode 100644
index 0000000..34be0f8
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/BWAAlignment.java
@@ -0,0 +1,215 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment.bwa.java;
+
+import htsjdk.samtools.Cigar;
+import org.broadinstitute.gatk.engine.alignment.Alignment;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+/**
+ * An alignment object to be used incrementally as the BWA aligner
+ * inspects the read.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class BWAAlignment extends Alignment implements Cloneable {
+    /**
+     * Track the number of alignments that have been created.
+     */
+    private static long numCreated;
+
+    /**
+     * Which number alignment is this?
+     */
+    private long creationNumber;
+
+    /**
+     * The aligner performing the alignments.
+     */
+    protected BWAJavaAligner aligner;
+
+    /**
+     * The sequence of matches/mismatches/insertions/deletions.
+     */
+    private AlignmentMatchSequence alignmentMatchSequence = new AlignmentMatchSequence();
+
+    /**
+     * Working variable.  How many bases have been matched at this point.
+     */
+    protected int position;
+
+    /**
+     * Working variable.  How many mismatches have been encountered at this point.
+     */
+    private int mismatches;
+
+    /**
+     * Number of gap opens in alignment.
+     */
+    private int gapOpens;
+
+    /**
+     * Number of gap extensions in alignment.
+     */
+    private int gapExtensions;
+
+    /**
+     * Working variable.  The lower bound of the alignment within the BWT.
+     */
+    protected long loBound;
+
+    /**
+     * Working variable.  The upper bound of the alignment within the BWT.
+     */
+    protected long hiBound;
+
+    protected void setAlignmentStart(long position) {
+        this.alignmentStart = position;
+    }
+
+    protected void setNegativeStrand(boolean negativeStrand) {
+        this.negativeStrand = negativeStrand;
+    }
+
+    /**
+     * Cache the score.
+     */
+    private int score;
+
+    public Cigar getCigar() {
+        return alignmentMatchSequence.convertToCigar(isNegativeStrand());
+    }
+
+    /**
+     * Gets the current state of this alignment (state of the last base viewed)..
+     * @return Current state of the alignment.
+     */
+    public AlignmentState getCurrentState() {
+        return alignmentMatchSequence.getCurrentState();
+    }
+
+    /**
+     * Adds the given state to the current alignment.
+     * @param state State to add to the given alignment.
+     */
+    public void addState( AlignmentState state ) {
+        alignmentMatchSequence.addNext(state);    
+    }
+
+    /**
+     * Gets the BWA score of this alignment.
+     * @return BWA-style scores.  0 is best.
+     */
+    public int getScore() {
+        return score;
+    }
+
+    public int getMismatches() { return mismatches; }
+    public int getGapOpens() { return gapOpens; }
+    public int getGapExtensions() { return gapExtensions; }
+
+    public void incrementMismatches() {
+        this.mismatches++;
+        updateScore();
+    }
+
+    public void incrementGapOpens() {
+        this.gapOpens++;
+        updateScore();
+    }
+
+    public void incrementGapExtensions() {
+        this.gapExtensions++;
+        updateScore();
+    }
+
+    /**
+     * Updates the score based on new information about matches / mismatches.
+     */
+    private void updateScore() {
+        score = mismatches*aligner.MISMATCH_PENALTY + gapOpens*aligner.GAP_OPEN_PENALTY + gapExtensions*aligner.GAP_EXTENSION_PENALTY;
+    }
+
+    /**
+     * Create a new alignment with the given parent aligner.
+     * @param aligner Aligner being used.
+     */
+    public BWAAlignment( BWAJavaAligner aligner ) {
+        this.aligner = aligner;
+        this.creationNumber = numCreated++;
+    }
+
+    /**
+     * Clone the alignment.
+     * @return New instance of the alignment.
+     */
+    public BWAAlignment clone() {
+        BWAAlignment newAlignment = null;
+        try {
+            newAlignment = (BWAAlignment)super.clone();
+        }
+        catch( CloneNotSupportedException ex ) {
+            throw new ReviewedGATKException("Unable to clone BWAAlignment.");
+        }
+        newAlignment.creationNumber = numCreated++;
+        newAlignment.alignmentMatchSequence = alignmentMatchSequence.clone();
+
+        return newAlignment;
+    }
+
+    /**
+     * How many bases in the read match the given state.
+     * @param state State to test.
+     * @return number of bases which match that state.
+     */
+    public int getNumberOfBasesMatchingState(AlignmentState state) {
+        return alignmentMatchSequence.getNumberOfBasesMatchingState(state);
+    }
+
+    /**
+     * Compare this alignment to another alignment.
+     * @param rhs Other alignment to which to compare.
+     * @return < 0 if this < other, == 0 if this == other, > 0 if this > other
+     */
+    public int compareTo(Alignment rhs) {
+        BWAAlignment other = (BWAAlignment)rhs;
+
+        // If the scores are different, disambiguate using the score.
+        if(score != other.score)
+            return score > other.score ? 1 : -1;
+
+        // Otherwise, use the order in which the elements were created.
+        if(creationNumber != other.creationNumber)
+            return creationNumber > other.creationNumber ? -1 : 1;
+
+        return 0;
+    }
+
+    public String toString() {
+        return String.format("position: %d, strand: %b, state: %s, mismatches: %d, gap opens: %d, gap extensions: %d, loBound: %d, hiBound: %d, score: %d, creationNumber: %d", position, negativeStrand, alignmentMatchSequence.getCurrentState(), mismatches, gapOpens, gapExtensions, loBound, hiBound, getScore(), creationNumber);
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/BWAJavaAligner.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/BWAJavaAligner.java
new file mode 100644
index 0000000..40dfc03
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/BWAJavaAligner.java
@@ -0,0 +1,418 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment.bwa.java;
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.engine.alignment.Alignment;
+import org.broadinstitute.gatk.engine.alignment.bwa.BWAAligner;
+import org.broadinstitute.gatk.engine.alignment.bwa.BWAConfiguration;
+import org.broadinstitute.gatk.engine.alignment.reference.bwt.*;
+import org.broadinstitute.gatk.utils.BaseUtils;
+import org.broadinstitute.gatk.utils.Utils;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.PriorityQueue;
+
+/**
+ * Create imperfect alignments from the read to the genome represented by the given BWT / suffix array. 
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class BWAJavaAligner extends BWAAligner {
+    /**
+     * BWT in the forward direction.
+     */
+    private BWT forwardBWT;
+
+    /**
+     * BWT in the reverse direction.
+     */
+    private BWT reverseBWT;
+
+    /**
+     * Suffix array in the forward direction.
+     */
+    private SuffixArray forwardSuffixArray;
+
+    /**
+     * Suffix array in the reverse direction.
+     */
+    private SuffixArray reverseSuffixArray;
+
+    /**
+     * Maximum edit distance (-n option from original BWA).
+     */
+    private final int MAXIMUM_EDIT_DISTANCE = 4;
+
+    /**
+     * Maximum number of gap opens (-o option from original BWA).
+     */
+    private final int MAXIMUM_GAP_OPENS = 1;
+
+    /**
+     * Maximum number of gap extensions (-e option from original BWA).
+     */
+    private final int MAXIMUM_GAP_EXTENSIONS = 6;
+
+    /**
+     * Penalty for straight mismatches (-M option from original BWA).
+     */
+    public final int MISMATCH_PENALTY = 3;
+
+    /**
+     * Penalty for gap opens (-O option from original BWA).
+     */
+    public final int GAP_OPEN_PENALTY = 11;
+
+    /**
+     * Penalty for gap extensions (-E option from original BWA).
+     */
+    public final int GAP_EXTENSION_PENALTY = 4;
+
+    /**
+     * Skip the ends of indels.
+     */
+    public final int INDEL_END_SKIP = 5;
+
+    public BWAJavaAligner( File forwardBWTFile, File reverseBWTFile, File forwardSuffixArrayFile, File reverseSuffixArrayFile ) {
+        super(null,null);
+        forwardBWT = new BWTReader(forwardBWTFile).read();
+        reverseBWT = new BWTReader(reverseBWTFile).read();
+        forwardSuffixArray = new SuffixArrayReader(forwardSuffixArrayFile,forwardBWT).read();
+        reverseSuffixArray = new SuffixArrayReader(reverseSuffixArrayFile,reverseBWT).read();
+    }
+
+    /**
+     * Close this instance of the BWA pointer and delete its resources.
+     */
+    @Override
+    public void close()  {
+        throw new UnsupportedOperationException("BWA aligner can't currently be closed.");
+    }
+
+    /**
+     * Update the current parameters of this aligner.
+     * @param configuration New configuration to set.
+     */
+    public void updateConfiguration(BWAConfiguration configuration) {
+        throw new UnsupportedOperationException("Configuration of the BWA aligner can't currently be changed.");
+    }
+
+    /**
+     * Allow the aligner to choose one alignment randomly from the pile of best alignments.
+     * @param bases Bases to align.
+     * @return An align
+     */
+    public Alignment getBestAlignment(final byte[] bases) { throw new UnsupportedOperationException("BWAJavaAligner does not yet support the standard Aligner interface."); }
+
+    /**
+     * Align the read to the reference.
+     * @param read Read to align.
+     * @param header Optional header to drop in place.
+     * @return A list of the alignments.
+     */
+    public SAMRecord align(final SAMRecord read, final SAMFileHeader header) { throw new UnsupportedOperationException("BWAJavaAligner does not yet support the standard Aligner interface."); }
+
+    /**
+     * Get a iterator of alignments, batched by mapping quality.
+     * @param bases List of bases.
+     * @return Iterator to alignments.
+     */
+    public Iterable<Alignment[]> getAllAlignments(final byte[] bases) { throw new UnsupportedOperationException("BWAJavaAligner does not yet support the standard Aligner interface."); }
+
+    /**
+     * Get a iterator of aligned reads, batched by mapping quality.
+     * @param read Read to align.
+     * @param newHeader Optional new header to use when aligning the read.  If present, it must be null.
+     * @return Iterator to alignments.
+     */
+    public Iterable<SAMRecord[]> alignAll(final SAMRecord read, final SAMFileHeader newHeader) { throw new UnsupportedOperationException("BWAJavaAligner does not yet support the standard Aligner interface."); }
+
+
+    public List<Alignment> align( SAMRecord read ) {
+        List<Alignment> successfulMatches = new ArrayList<Alignment>();
+
+        Byte[] uncomplementedBases = normalizeBases(read.getReadBases());
+        Byte[] complementedBases = normalizeBases(Utils.reverse(BaseUtils.simpleReverseComplement(read.getReadBases())));
+
+        List<LowerBound> forwardLowerBounds = LowerBound.create(uncomplementedBases,forwardBWT);
+        List<LowerBound> reverseLowerBounds = LowerBound.create(complementedBases,reverseBWT);
+
+        // Seed the best score with any score that won't overflow on comparison.
+        int bestScore = Integer.MAX_VALUE - MISMATCH_PENALTY;
+        int bestDiff = MAXIMUM_EDIT_DISTANCE+1;
+        int maxDiff = MAXIMUM_EDIT_DISTANCE;
+
+        PriorityQueue<BWAAlignment> alignments = new PriorityQueue<BWAAlignment>();
+
+        // Create a fictional initial alignment, with the position just off the end of the read, and the limits
+        // set as the entire BWT.
+        alignments.add(createSeedAlignment(reverseBWT));
+        alignments.add(createSeedAlignment(forwardBWT));
+
+        while(!alignments.isEmpty()) {
+            BWAAlignment alignment = alignments.remove();
+
+            // From bwtgap.c in the original BWT; if the rank is worse than the best score + the mismatch PENALTY, move on.
+            if( alignment.getScore() > bestScore + MISMATCH_PENALTY )
+                break;
+
+            Byte[] bases = alignment.isNegativeStrand() ? complementedBases : uncomplementedBases;
+            BWT bwt = alignment.isNegativeStrand() ? forwardBWT : reverseBWT;
+            List<LowerBound> lowerBounds = alignment.isNegativeStrand() ? reverseLowerBounds : forwardLowerBounds;
+
+            // if z < D(i) then return {}
+            int mismatches = maxDiff - alignment.getMismatches() - alignment.getGapOpens() - alignment.getGapExtensions();
+            if( alignment.position < lowerBounds.size()-1 && mismatches < lowerBounds.get(alignment.position+1).value )
+                continue;
+
+            if(mismatches == 0) {
+                exactMatch(alignment,bases,bwt);
+                if(alignment.loBound > alignment.hiBound)
+                    continue;
+            }
+
+            // Found a valid alignment; store it and move on.
+            if(alignment.position >= read.getReadLength()-1) {
+                for(long bwtIndex = alignment.loBound; bwtIndex <= alignment.hiBound; bwtIndex++) {
+                    BWAAlignment finalAlignment = alignment.clone();
+
+                    if( finalAlignment.isNegativeStrand() )
+                        finalAlignment.setAlignmentStart(forwardSuffixArray.get(bwtIndex) + 1);
+                    else {
+                        int sizeAlongReference = read.getReadLength() -
+                                finalAlignment.getNumberOfBasesMatchingState(AlignmentState.INSERTION) +
+                                finalAlignment.getNumberOfBasesMatchingState(AlignmentState.DELETION);
+                        finalAlignment.setAlignmentStart(reverseBWT.length() - reverseSuffixArray.get(bwtIndex) - sizeAlongReference + 1);
+                    }
+
+                    successfulMatches.add(finalAlignment);
+
+                    bestScore = Math.min(finalAlignment.getScore(),bestScore);
+                    bestDiff = Math.min(finalAlignment.getMismatches()+finalAlignment.getGapOpens()+finalAlignment.getGapExtensions(),bestDiff);
+                    maxDiff = bestDiff + 1;
+                }
+
+                continue;
+            }
+
+            //System.out.printf("Processing alignments; queue size = %d, alignment = %s, bound = %d, base = %s%n", alignments.size(), alignment, lowerBounds.get(alignment.position+1).value, alignment.position >= 0 ? (char)bases[alignment.position].byteValue() : "");
+            /*
+            System.out.printf("#1\t[%d,%d,%d,%c]\t[%d,%d,%d]\t[%d,%d]\t[%d,%d]%n",alignments.size(),
+                                                        alignment.negativeStrand?1:0,
+                                                        bases.length-alignment.position-1,
+                                                        alignment.getCurrentState().toString().charAt(0),
+                                                        alignment.getMismatches(),
+                                                        alignment.getGapOpens(),
+                                                        alignment.getGapExtensions(),
+                                                        lowerBounds.get(alignment.position+1).value,
+                                                        lowerBounds.get(alignment.position+1).width,
+                                                        alignment.loBound,
+                                                        alignment.hiBound);
+                                                        */
+
+            // Temporary -- look ahead to see if the next alignment is bounded.
+            boolean allowDifferences = mismatches > 0;
+            boolean allowMismatches = mismatches > 0;
+
+            if( allowDifferences &&
+                alignment.position+1 >= INDEL_END_SKIP-1+alignment.getGapOpens()+alignment.getGapExtensions() &&
+                read.getReadLength()-1-(alignment.position+1) >= INDEL_END_SKIP+alignment.getGapOpens()+alignment.getGapExtensions() ) {
+                if( alignment.getCurrentState() == AlignmentState.MATCH_MISMATCH ) {
+                    if( alignment.getGapOpens() < MAXIMUM_GAP_OPENS ) {
+                        // Add a potential insertion extension.
+                        BWAAlignment insertionAlignment = createInsertionAlignment(alignment);
+                        insertionAlignment.incrementGapOpens();
+                        alignments.add(insertionAlignment);
+
+                        // Add a potential deletion by marking a deletion and augmenting the position.
+                        List<BWAAlignment> deletionAlignments = createDeletionAlignments(bwt,alignment);
+                        for( BWAAlignment deletionAlignment: deletionAlignments )
+                            deletionAlignment.incrementGapOpens();
+                        alignments.addAll(deletionAlignments);
+                    }
+                }
+                else if( alignment.getCurrentState() == AlignmentState.INSERTION ) {
+                    if( alignment.getGapExtensions() < MAXIMUM_GAP_EXTENSIONS && mismatches > 0 ) {
+                        // Add a potential insertion extension.
+                        BWAAlignment insertionAlignment = createInsertionAlignment(alignment);
+                        insertionAlignment.incrementGapExtensions();
+                        alignments.add(insertionAlignment);
+                    }
+                }
+                else if( alignment.getCurrentState() == AlignmentState.DELETION ) {
+                    if( alignment.getGapExtensions() < MAXIMUM_GAP_EXTENSIONS && mismatches > 0 ) {
+                        // Add a potential deletion by marking a deletion and augmenting the position.
+                        List<BWAAlignment> deletionAlignments = createDeletionAlignments(bwt,alignment);
+                        for( BWAAlignment deletionAlignment: deletionAlignments )
+                            deletionAlignment.incrementGapExtensions();
+                        alignments.addAll(deletionAlignments);
+                    }
+                }
+            }
+
+            // Mismatches
+            alignments.addAll(createMatchedAlignments(bwt,alignment,bases,allowDifferences&&allowMismatches));
+        }
+
+        return successfulMatches;
+    }
+
+    /**
+     * Create an seeding alignment to use as a starting point when traversing.
+     * @param bwt source BWT.
+     * @return Seed alignment.
+     */
+    private BWAAlignment createSeedAlignment(BWT bwt) {
+        BWAAlignment seed = new BWAAlignment(this);
+        seed.setNegativeStrand(bwt == forwardBWT);
+        seed.position = -1;
+        seed.loBound = 0;
+        seed.hiBound = bwt.length();
+        return seed;
+    }
+
+    /**
+     * Creates a new alignments representing direct matches / mismatches.
+     * @param bwt Source BWT with which to work.
+     * @param alignment Alignment for the previous position.
+     * @param bases The bases in the read.
+     * @param allowMismatch Should mismatching bases be allowed?
+     * @return New alignment representing this position if valid; null otherwise.
+     */
+    private List<BWAAlignment> createMatchedAlignments( BWT bwt, BWAAlignment alignment, Byte[] bases, boolean allowMismatch ) {
+        List<BWAAlignment> newAlignments = new ArrayList<BWAAlignment>();
+
+        List<Byte> baseChoices = new ArrayList<Byte>();
+        Byte thisBase = bases[alignment.position+1];
+
+        if( allowMismatch )
+            baseChoices.addAll(Bases.allOf());
+        else
+            baseChoices.add(thisBase);
+
+        if( thisBase != null ) {
+            // Keep rotating the current base to the last position until we've hit the current base.
+            for( ;; ) {
+                baseChoices.add(baseChoices.remove(0));
+                if( thisBase.equals(baseChoices.get(baseChoices.size()-1)) )
+                    break;
+
+            }
+        }
+
+        for(byte base: baseChoices) {
+            BWAAlignment newAlignment = alignment.clone();
+
+            newAlignment.loBound = bwt.counts(base) + bwt.occurrences(base,alignment.loBound-1) + 1;
+            newAlignment.hiBound = bwt.counts(base) + bwt.occurrences(base,alignment.hiBound);
+
+            // If this alignment is valid, skip it.
+            if( newAlignment.loBound > newAlignment.hiBound )
+                continue;
+
+            newAlignment.position++;
+            newAlignment.addState(AlignmentState.MATCH_MISMATCH);
+            if( bases[newAlignment.position] == null || base != bases[newAlignment.position] )
+                newAlignment.incrementMismatches();
+
+            newAlignments.add(newAlignment);
+        }
+
+        return newAlignments;
+    }
+
+    /**
+     * Create a new alignment representing an insertion at this point in the read.
+     * @param alignment Alignment from which to derive the insertion.
+     * @return New alignment reflecting the insertion.
+     */
+    private BWAAlignment createInsertionAlignment( BWAAlignment alignment ) {
+        // Add a potential insertion extension.
+        BWAAlignment newAlignment = alignment.clone();
+        newAlignment.position++;
+        newAlignment.addState(AlignmentState.INSERTION);
+        return newAlignment;
+    }
+
+    /**
+     * Create new alignments representing a deletion at this point in the read.
+     * @param bwt source BWT for inferring deletion info.
+     * @param alignment Alignment from which to derive the deletion.
+     * @return New alignments reflecting all possible deletions.
+     */
+    private List<BWAAlignment> createDeletionAlignments( BWT bwt, BWAAlignment alignment) {
+        List<BWAAlignment> newAlignments = new ArrayList<BWAAlignment>();
+        for(byte base: Bases.instance) {
+            BWAAlignment newAlignment = alignment.clone();
+
+            newAlignment.loBound = bwt.counts(base) + bwt.occurrences(base,alignment.loBound-1) + 1;
+            newAlignment.hiBound = bwt.counts(base) + bwt.occurrences(base,alignment.hiBound);
+
+            // If this alignment is valid, skip it.
+            if( newAlignment.loBound > newAlignment.hiBound )
+                continue;
+
+            newAlignment.addState(AlignmentState.DELETION);
+
+            newAlignments.add(newAlignment);
+        }
+
+        return newAlignments;
+    }
+
+    /**
+     * Exactly match the given alignment against the given BWT.
+     * @param alignment Alignment to match.
+     * @param bases Bases to use.
+     * @param bwt BWT to use.
+     */
+    private void exactMatch( BWAAlignment alignment, Byte[] bases, BWT bwt ) {
+        while( ++alignment.position < bases.length ) {
+            byte base = bases[alignment.position];
+            alignment.loBound = bwt.counts(base) + bwt.occurrences(base,alignment.loBound-1) + 1;
+            alignment.hiBound = bwt.counts(base) + bwt.occurrences(base,alignment.hiBound);
+            if( alignment.loBound > alignment.hiBound )
+                return;
+        }
+    }
+
+    /**
+     * Make each base into A/C/G/T or null if unknown.
+     * @param bases Base string to normalize.
+     * @return Array of normalized bases.
+     */
+    private Byte[] normalizeBases( byte[] bases ) {
+        Byte[] normalBases = new Byte[bases.length];
+        for(int i = 0; i < bases.length; i++)
+            normalBases[i] = Bases.fromASCII(bases[i]);
+        return normalBases;
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/LowerBound.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/LowerBound.java
new file mode 100644
index 0000000..04e165e
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/LowerBound.java
@@ -0,0 +1,113 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment.bwa.java;
+
+import org.broadinstitute.gatk.engine.alignment.reference.bwt.BWT;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * At any point along the given read, what is a good lower bound for the
+ * total number of differences?
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class LowerBound {
+    /**
+     * Lower bound of the suffix array.
+     */
+    public final long loIndex;
+
+    /**
+     * Upper bound of the suffix array.
+     */
+    public final long hiIndex;
+
+    /**
+     * Width of the bwt from loIndex -> hiIndex, inclusive.
+     */
+    public final long width;
+
+    /**
+     * The lower bound at the given point.
+     */
+    public final int value;
+
+    /**
+     * Create a new lower bound with the given value.
+     * @param loIndex The lower bound of the BWT.
+     * @param hiIndex The upper bound of the BWT.
+     * @param value Value for the lower bound at this site.
+     */
+    private LowerBound(long loIndex, long hiIndex, int value) {
+        this.loIndex = loIndex;
+        this.hiIndex = hiIndex;
+        this.width = hiIndex - loIndex + 1;
+        this.value = value;
+    }
+
+    /**
+     * Create a non-optimal bound according to the algorithm specified in Figure 3 of the BWA paper.
+     * @param bases Bases of the read to use when creating a new BWT.
+     * @param bwt BWT to check against.
+     * @return A list of lower bounds at every point in the reference.
+     *
+     */
+    public static List<LowerBound> create(Byte[] bases, BWT bwt) {
+        List<LowerBound> bounds = new ArrayList<LowerBound>();
+
+        long loIndex = 0, hiIndex = bwt.length();
+        int mismatches = 0;
+        for( int i = bases.length-1; i >= 0; i-- ) {
+            Byte base = bases[i];
+
+            // Ignore non-ACGT bases.
+            if( base != null ) {
+                loIndex = bwt.counts(base) + bwt.occurrences(base,loIndex-1) + 1;
+                hiIndex = bwt.counts(base) + bwt.occurrences(base,hiIndex);            
+            }
+
+            if( base == null || loIndex > hiIndex ) {
+                loIndex = 0;
+                hiIndex = bwt.length();
+                mismatches++;
+            }
+            bounds.add(0,new LowerBound(loIndex,hiIndex,mismatches));
+        }
+
+        return bounds;
+    }
+
+    /**
+     * Create a string representation of this bound.
+     * @return String version of this bound.
+     */
+    public String toString() {
+        return String.format("LowerBound: w = %d, value = %d",width,value);
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/package-info.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/package-info.java
new file mode 100644
index 0000000..0daa83e
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/package-info.java
@@ -0,0 +1,26 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment;
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/AMBWriter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/AMBWriter.java
new file mode 100644
index 0000000..90e6b97
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/AMBWriter.java
@@ -0,0 +1,93 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment.reference.bwt;
+
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.SAMSequenceRecord;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.PrintStream;
+
+/**
+ * Writes .amb files - a file indicating where 'holes' (indeterminant bases)
+ * exist in the contig.  Currently, only empty, placeholder AMBs are supported.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class AMBWriter {
+    /**
+     * Number of holes is fixed at zero.
+     */
+    private static final int NUM_HOLES = 0;
+
+    /**
+     * Input stream from which to read BWT data.
+     */
+    private final PrintStream out;
+
+    /**
+     * Create a new ANNWriter targeting the given file.
+     * @param file file into which ANN data should be written.
+     * @throws java.io.IOException if there is a problem opening the output file.
+     */
+    public AMBWriter(File file) throws IOException {
+        out = new PrintStream(file);
+    }
+
+    /**
+     * Create a new ANNWriter targeting the given OutputStream.
+     * @param stream Stream into which ANN data should be written.
+     */
+    public AMBWriter(OutputStream stream)  {
+        out = new PrintStream(stream);
+    }
+
+    /**
+     * Write the contents of the given dictionary into the AMB file.
+     * Assumes that there are no holes in the dictionary.
+     * @param dictionary Dictionary to write.
+     */
+    public void writeEmpty(SAMSequenceDictionary dictionary) {
+        long genomeLength = 0L;
+        for(SAMSequenceRecord sequence: dictionary.getSequences())
+            genomeLength += sequence.getSequenceLength();
+
+        int sequences = dictionary.getSequences().size();
+
+        // Write the header
+        out.printf("%d %d %d%n",genomeLength,sequences,NUM_HOLES);
+    }
+
+    /**
+     * Close the given output stream.
+     */
+    public void close() {
+        out.close();
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/ANNWriter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/ANNWriter.java
new file mode 100644
index 0000000..7619d41
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/ANNWriter.java
@@ -0,0 +1,120 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment.reference.bwt;
+
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.SAMSequenceRecord;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.PrintStream;
+
+/**
+ * Writes .ann files - an alternate sequence dictionary format
+ * used by BWA/C.  For best results, the input sequence dictionary
+ * should be created with Picard's CreateSequenceDictionary.jar,
+ * TRUNCATE_NAMES_AT_WHITESPACE=false.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class ANNWriter {
+    /**
+     * BWA uses a fixed seed of 11, written into every file.
+     */
+    private static final int BNS_SEED = 11;
+
+    /**
+     * A seemingly unused value that appears in every contig in the ANN.
+     */
+    private static final int GI = 0;
+
+    /**
+     * Input stream from which to read BWT data.
+     */
+    private final PrintStream out;
+
+    /**
+     * Create a new ANNWriter targeting the given file.
+     * @param file file into which ANN data should be written.
+     * @throws IOException if there is a problem opening the output file.
+     */
+    public ANNWriter(File file) throws IOException {
+        out = new PrintStream(file);
+    }
+
+    /**
+     * Create a new ANNWriter targeting the given OutputStream.
+     * @param stream Stream into which ANN data should be written.
+     */
+    public ANNWriter(OutputStream stream)  {
+        out = new PrintStream(stream);
+    }
+
+    /**
+     * Write the contents of the given dictionary into the ANN file.
+     * Assumes that no ambs (blocks of indeterminate base) are present in the dictionary.
+     * @param dictionary Dictionary to write.
+     */
+    public void write(SAMSequenceDictionary dictionary) {
+        long genomeLength = 0L;
+        for(SAMSequenceRecord sequence: dictionary.getSequences())
+            genomeLength += sequence.getSequenceLength();
+        
+        int sequences = dictionary.getSequences().size();
+
+        // Write the header
+        out.printf("%d %d %d%n",genomeLength,sequences,BNS_SEED);
+
+        for(SAMSequenceRecord sequence: dictionary.getSequences()) {
+            String fullSequenceName = sequence.getSequenceName();
+            String trimmedSequenceName = fullSequenceName;
+            String sequenceComment = "(null)";
+
+            long offset = 0;
+
+            // Separate the sequence name from the sequence comment, based on BWA's definition.
+            // BWA's definition appears to accept a zero-length contig name, so mimic that behavior.
+            if(fullSequenceName.indexOf(' ') >= 0) {
+                trimmedSequenceName = fullSequenceName.substring(0,fullSequenceName.indexOf(' '));
+                sequenceComment = fullSequenceName.substring(fullSequenceName.indexOf(' ')+1);
+            }
+
+            // Write the sequence GI (?), name, and comment.
+            out.printf("%d %s %s%n",GI,trimmedSequenceName,sequenceComment);
+            // Write the sequence offset, length, and ambs (currently fixed at 0).
+            out.printf("%d %d %d%n",offset,sequence.getSequenceLength(),0);
+        }
+    }
+
+    /**
+     * Close the given output stream.
+     */
+    public void close() {
+        out.close();
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/BWT.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/BWT.java
new file mode 100644
index 0000000..dedb71d
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/BWT.java
@@ -0,0 +1,197 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment.reference.bwt;
+
+import org.broadinstitute.gatk.engine.alignment.reference.packing.PackUtils;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+/**
+ * Represents the Burrows-Wheeler Transform of a reference sequence.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class BWT {
+    /**
+     * Write an occurrence table after every SEQUENCE_BLOCK_SIZE bases.
+     * For this implementation to behave correctly, SEQUENCE_BLOCK_SIZE % 8 == 0
+     */
+    public static final int SEQUENCE_BLOCK_SIZE = 128;
+
+    /**
+     * The inverse SA, used as a placeholder for determining where the special EOL character sits.
+     */
+    protected final long inverseSA0;
+
+    /**
+     * Cumulative counts for the entire BWT.
+     */
+    protected final Counts counts;
+
+    /**
+     * The individual sequence blocks, modelling how they appear on disk.
+     */
+    protected final SequenceBlock[] sequenceBlocks;
+
+    /**
+     * Creates a new BWT with the given inverse SA, counts, and sequence (in ASCII).
+     * @param inverseSA0 Inverse SA entry for the first element.  Will be missing from the BWT sequence.
+     * @param counts Cumulative count of bases, in A,C,G,T order.
+     * @param sequenceBlocks The full BWT sequence, sans the '$'.
+     */
+    public BWT( long inverseSA0, Counts counts, SequenceBlock[] sequenceBlocks ) {
+        this.inverseSA0 = inverseSA0;
+        this.counts = counts;
+        this.sequenceBlocks = sequenceBlocks;
+    }
+
+    /**
+     * Creates a new BWT with the given inverse SA, occurrences, and sequence (in ASCII).
+     * @param inverseSA0 Inverse SA entry for the first element.  Will be missing from the BWT sequence.
+     * @param counts Count of bases, in A,C,G,T order.
+     * @param sequence The full BWT sequence, sans the '$'.
+     */
+    public BWT( long inverseSA0, Counts counts, byte[] sequence ) {
+        this(inverseSA0,counts,generateSequenceBlocks(sequence));
+    }
+
+    /**
+     * Extract the full sequence from the list of block.
+     * @return The full BWT string as a byte array.
+     */
+    public byte[] getSequence() {
+        byte[] sequence = new byte[(int)counts.getTotal()];
+        for( SequenceBlock block: sequenceBlocks )
+            System.arraycopy(block.sequence,0,sequence,block.sequenceStart,block.sequenceLength);
+        return sequence;
+    }
+
+    /**
+     * Get the total counts of bases lexicographically smaller than the given base, for Ferragina and Manzini's search.
+     * @param base The base.
+     * @return Total counts for all bases lexicographically smaller than this base.
+     */
+    public long counts(byte base) {
+        return counts.getCumulative(base);
+    }
+
+    /**
+     * Get the total counts of bases lexicographically smaller than the given base, for Ferragina and Manzini's search.
+     * @param base The base.
+     * @param index The position to search within the BWT.
+     * @return Total counts for all bases lexicographically smaller than this base.
+     */
+    public long occurrences(byte base,long index) {
+        SequenceBlock block = getSequenceBlock(index);
+        int position = getSequencePosition(index);
+        long accumulator = block.occurrences.get(base);
+        for(int i = 0; i <= position; i++) {
+            if(base == block.sequence[i])
+                accumulator++;
+        }
+        return accumulator;
+    }
+
+    /**
+     * The number of bases in the BWT as a whole.
+     * @return Number of bases.
+     */
+    public long length() {
+        return counts.getTotal();
+    }
+
+    /**
+     * Create a new BWT from the given reference sequence.
+     * @param referenceSequence Sequence from which to derive the BWT.
+     * @return reference sequence-derived BWT.
+     */
+    public static BWT createFromReferenceSequence(byte[] referenceSequence) {
+        SuffixArray suffixArray = SuffixArray.createFromReferenceSequence(referenceSequence);
+
+        byte[] bwt = new byte[(int)suffixArray.length()-1];
+        int bwtIndex = 0;
+        for(long suffixArrayIndex = 0; suffixArrayIndex < suffixArray.length(); suffixArrayIndex++) {
+            if(suffixArray.get(suffixArrayIndex) == 0)
+                continue;
+            bwt[bwtIndex++] = referenceSequence[(int)suffixArray.get(suffixArrayIndex)-1];
+        }
+
+        return new BWT(suffixArray.inverseSA0,suffixArray.occurrences,bwt);
+    }
+
+    /**
+     * Gets the base at a given position in the BWT.
+     * @param index The index to use.
+     * @return The base at that location.
+     */
+    protected byte getBase(long index) {
+        if(index == inverseSA0)
+            throw new ReviewedGATKException(String.format("Base at index %d does not have a text representation",index));
+
+        SequenceBlock block = getSequenceBlock(index);
+        int position = getSequencePosition(index);
+        return block.sequence[position];
+    }
+
+    private SequenceBlock getSequenceBlock(long index) {
+        // If the index is above the SA-1[0], remap it to the appropriate coordinate space.
+        if(index > inverseSA0) index--;
+        return sequenceBlocks[(int)(index/SEQUENCE_BLOCK_SIZE)];
+    }
+
+    private int getSequencePosition(long index) {
+        // If the index is above the SA-1[0], remap it to the appropriate coordinate space.
+        if(index > inverseSA0) index--;
+        return (int)(index%SEQUENCE_BLOCK_SIZE);
+    }
+
+    /**
+     * Create a set of sequence blocks from one long sequence.
+     * @param sequence Sequence from which to derive blocks.
+     * @return Array of sequence blocks containing data from the sequence.
+     */
+    private static SequenceBlock[] generateSequenceBlocks( byte[] sequence ) {
+        Counts occurrences = new Counts();
+
+        int numSequenceBlocks = PackUtils.numberOfPartitions(sequence.length,SEQUENCE_BLOCK_SIZE);
+        SequenceBlock[] sequenceBlocks = new SequenceBlock[numSequenceBlocks];
+
+        for( int block = 0; block < numSequenceBlocks; block++ ) {
+            int blockStart = block*SEQUENCE_BLOCK_SIZE;
+            int blockLength = Math.min(SEQUENCE_BLOCK_SIZE, sequence.length-blockStart);
+            byte[] subsequence = new byte[blockLength];
+
+            System.arraycopy(sequence,blockStart,subsequence,0,blockLength);
+
+            sequenceBlocks[block] = new SequenceBlock(blockStart,blockLength,occurrences.clone(),subsequence);
+
+            for( byte base: subsequence )
+                occurrences.increment(base);
+        }
+
+        return sequenceBlocks;
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/BWTReader.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/BWTReader.java
new file mode 100644
index 0000000..956f1e9
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/BWTReader.java
@@ -0,0 +1,114 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment.reference.bwt;
+
+import org.broadinstitute.gatk.engine.alignment.reference.packing.BasePackedInputStream;
+import org.broadinstitute.gatk.engine.alignment.reference.packing.PackUtils;
+import org.broadinstitute.gatk.engine.alignment.reference.packing.UnsignedIntPackedInputStream;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.nio.ByteOrder;
+/**
+ * Reads a BWT from a given file.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class BWTReader {
+    /**
+     * Input stream from which to read BWT data.
+     */
+    private FileInputStream inputStream;
+
+    /**
+     * Create a new BWT reader.
+     * @param inputFile File in which the BWT is stored.
+     */
+    public BWTReader( File inputFile ) {
+        try {
+            this.inputStream = new FileInputStream(inputFile);
+        }
+        catch( FileNotFoundException ex ) {
+            throw new ReviewedGATKException("Unable to open input file", ex);
+        }
+    }
+
+    /**
+     * Read a BWT from the input stream.
+     * @return The BWT stored in the input stream.
+     */
+    public BWT read() {
+        UnsignedIntPackedInputStream uintPackedInputStream = new UnsignedIntPackedInputStream(inputStream, ByteOrder.LITTLE_ENDIAN);
+        BasePackedInputStream basePackedInputStream = new BasePackedInputStream<Integer>(Integer.class, inputStream, ByteOrder.LITTLE_ENDIAN);
+
+        long inverseSA0;
+        long[] count;
+        SequenceBlock[] sequenceBlocks;
+
+        try {
+            inverseSA0 = uintPackedInputStream.read();
+            count = new long[PackUtils.ALPHABET_SIZE];
+            uintPackedInputStream.read(count);
+
+            long bwtSize = count[PackUtils.ALPHABET_SIZE-1];
+            sequenceBlocks = new SequenceBlock[PackUtils.numberOfPartitions(bwtSize,BWT.SEQUENCE_BLOCK_SIZE)];
+            
+            for( int block = 0; block < sequenceBlocks.length; block++ ) {
+                int sequenceStart = block* BWT.SEQUENCE_BLOCK_SIZE;
+                int sequenceLength = (int)Math.min(BWT.SEQUENCE_BLOCK_SIZE,bwtSize-sequenceStart);
+
+                long[] occurrences = new long[PackUtils.ALPHABET_SIZE];
+                byte[] bwt = new byte[sequenceLength];
+
+                uintPackedInputStream.read(occurrences);
+                basePackedInputStream.read(bwt);
+
+                sequenceBlocks[block] = new SequenceBlock(sequenceStart,sequenceLength,new Counts(occurrences,false),bwt);
+            }
+        }
+        catch( IOException ex ) {
+            throw new ReviewedGATKException("Unable to read BWT from input stream.", ex);
+        }
+
+        return new BWT(inverseSA0, new Counts(count,true), sequenceBlocks);
+    }
+
+    /**
+     * Close the input stream.
+     */
+    public void close() {
+        try {
+            inputStream.close();
+        }
+        catch( IOException ex ) {
+            throw new ReviewedGATKException("Unable to close input file", ex);
+        }
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/BWTSupplementaryFileGenerator.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/BWTSupplementaryFileGenerator.java
new file mode 100644
index 0000000..551cf1a
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/BWTSupplementaryFileGenerator.java
@@ -0,0 +1,85 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment.reference.bwt;
+
+import htsjdk.samtools.reference.ReferenceSequenceFile;
+import htsjdk.samtools.reference.ReferenceSequenceFileFactory;
+import htsjdk.samtools.SAMSequenceDictionary;
+
+import java.io.File;
+import java.io.IOException;
+
+/**
+ * Generate BWA supplementary files (.ann, .amb) from the command line.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class BWTSupplementaryFileGenerator {
+    enum SupplementaryFileType { ANN, AMB } 
+
+    public static void main(String[] args) throws IOException {
+        if(args.length < 3)
+            usage("Incorrect number of arguments supplied");
+
+        File fastaFile = new File(args[0]);
+        File outputFile = new File(args[1]);
+        SupplementaryFileType outputType = null;
+        try {
+            outputType = Enum.valueOf(SupplementaryFileType.class,args[2]);
+        }
+        catch(IllegalArgumentException ex) {
+            usage("Invalid output type: " + args[2]);
+        }
+
+        ReferenceSequenceFile sequenceFile = ReferenceSequenceFileFactory.getReferenceSequenceFile(fastaFile);
+        SAMSequenceDictionary dictionary = sequenceFile.getSequenceDictionary();
+
+        switch(outputType) {
+            case ANN:
+                ANNWriter annWriter = new ANNWriter(outputFile);
+                annWriter.write(dictionary);
+                annWriter.close();
+                break;
+            case AMB:
+                AMBWriter ambWriter = new AMBWriter(outputFile);
+                ambWriter.writeEmpty(dictionary);
+                ambWriter.close();
+                break;
+            default:
+                usage("Unsupported output type: " + outputType);
+        }
+    }
+
+    /**
+     * Print usage information and exit.
+     */
+    private static void usage(String message) {
+        System.err.println(message);
+        System.err.println("Usage: BWTSupplementaryFileGenerator <fasta> <output file> <output type>");
+        System.exit(1);
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/BWTWriter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/BWTWriter.java
new file mode 100644
index 0000000..636412d
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/BWTWriter.java
@@ -0,0 +1,96 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment.reference.bwt;
+
+import org.broadinstitute.gatk.engine.alignment.reference.packing.BasePackedOutputStream;
+import org.broadinstitute.gatk.engine.alignment.reference.packing.UnsignedIntPackedOutputStream;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.io.*;
+import java.nio.ByteOrder;
+
+/**
+ * Writes an in-memory BWT to an outputstream.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class BWTWriter {
+    /**
+     * Input stream from which to read BWT data.
+     */
+    private final OutputStream outputStream;
+
+    /**
+     * Create a new BWT writer.
+     * @param outputFile File in which the BWT is stored.
+     */
+    public BWTWriter( File outputFile ) {
+        try {
+            this.outputStream = new BufferedOutputStream(new FileOutputStream(outputFile));
+        }
+        catch( FileNotFoundException ex ) {
+            throw new ReviewedGATKException("Unable to open output file", ex);
+        }
+    }
+
+    /**
+     * Write a BWT to the output stream.
+     * @param bwt Transform to be written to the output stream.
+     */
+    public void write( BWT bwt ) {
+        UnsignedIntPackedOutputStream intPackedOutputStream = new UnsignedIntPackedOutputStream(outputStream, ByteOrder.LITTLE_ENDIAN);
+        BasePackedOutputStream basePackedOutputStream = new BasePackedOutputStream<Integer>(Integer.class, outputStream, ByteOrder.LITTLE_ENDIAN);
+
+        try {
+            intPackedOutputStream.write(bwt.inverseSA0);
+            intPackedOutputStream.write(bwt.counts.toArray(true));
+
+            for( SequenceBlock block: bwt.sequenceBlocks ) {
+                intPackedOutputStream.write(block.occurrences.toArray(false));
+                basePackedOutputStream.write(block.sequence);
+            }
+
+            // The last block is the last set of counts in the structure.
+            intPackedOutputStream.write(bwt.counts.toArray(false));
+        }
+        catch( IOException ex ) {
+            throw new ReviewedGATKException("Unable to read BWT from input stream.", ex);
+        }
+    }
+
+    /**
+     * Close the input stream.
+     */
+    public void close() {
+        try {
+            outputStream.close();
+        }
+        catch( IOException ex ) {
+            throw new ReviewedGATKException("Unable to close input file", ex);
+        }
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/Bases.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/Bases.java
new file mode 100644
index 0000000..d6fa7bd
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/Bases.java
@@ -0,0 +1,133 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment.reference.bwt;
+
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.util.*;
+
+/**
+ * Enhanced enum representation of a base.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class Bases implements Iterable<Byte>
+{
+    public static final byte A = 'A';
+    public static final byte C = 'C';
+    public static final byte G = 'G';
+    public static final byte T = 'T';
+
+    public static final Bases instance = new Bases();
+
+    private static final List<Byte> allBases;
+
+    /**
+     * Representation of the base broken down by packed value.
+     */
+    private static final Map<Integer,Byte> basesByPack = new HashMap<Integer,Byte>();
+
+    static {
+        List<Byte> bases = new ArrayList<Byte>();
+        bases.add(A);
+        bases.add(C);
+        bases.add(G);
+        bases.add(T);
+        allBases = Collections.unmodifiableList(bases);
+
+        for(int i = 0; i < allBases.size(); i++)
+            basesByPack.put(i,allBases.get(i));
+    }
+
+    /**
+     * Create a new base with the given ascii representation and
+     * pack value.
+     */
+    private Bases() {
+    }
+
+    /**
+     * Return all possible bases.
+     * @return Byte representation of all bases.
+     */
+    public static Collection<Byte> allOf() {
+        return allBases;
+    }
+
+    /**
+     * Gets the number of known bases.
+     * @return The number of known bases.
+     */
+    public static int size() {
+        return allBases.size();
+    }
+
+    /**
+     * Gets an iterator over the total number of known base types.
+     * @return Iterator over all known bases.
+     */
+    public Iterator<Byte> iterator() {
+        return basesByPack.values().iterator();
+    }
+
+    /**
+     * Get the given base from the packed representation.
+     * @param pack Packed representation.
+     * @return base.
+     */
+    public static byte fromPack( int pack ) { return basesByPack.get(pack); }
+
+    /**
+     * Convert the given base to its packed value.
+     * @param ascii ASCII representation of the base.
+     * @return Packed value.
+     */
+    public static int toPack( byte ascii )
+    {
+        for( Map.Entry<Integer,Byte> entry: basesByPack.entrySet() ) {
+            if( entry.getValue().equals(ascii) )
+                return entry.getKey();
+        }
+        throw new ReviewedGATKException(String.format("Base %c is an invalid base to pack", (char)ascii));
+    }
+
+    /**
+     * Convert the ASCII representation of a base to its 'normalized' representation.
+     * @param base The base itself.
+     * @return The byte, if present.  Null if unknown.
+     */
+    public static Byte fromASCII( byte base ) {
+        Byte found = null;
+        for( Byte normalized: allBases ) {
+            if( normalized.equals(base) ) {
+                found = normalized;
+                break;
+            }
+        }
+        return found;
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/Counts.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/Counts.java
new file mode 100644
index 0000000..66ff7e3
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/Counts.java
@@ -0,0 +1,176 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment.reference.bwt;
+
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Counts of how many bases of each type have been seen.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class Counts implements Cloneable {
+    /**
+     * Internal representation of counts, broken down by ASCII value.
+     */
+    private Map<Byte,Long> counts = new HashMap<Byte,Long>();
+
+    /**
+     * Internal representation of cumulative counts, broken down by ASCII value.
+     */
+    private Map<Byte,Long> cumulativeCounts = new HashMap<Byte,Long>();
+
+    /**
+     * Create an empty Counts object with values A=0,C=0,G=0,T=0.
+     */
+    public Counts()
+    {
+        for(byte base: Bases.instance) {
+            counts.put(base,0L);
+            cumulativeCounts.put(base,0L);
+        }
+    }
+
+    /**
+     * Create a counts data structure with the given initial values. 
+     * @param data Count data, broken down by base.
+     * @param cumulative Whether the counts are cumulative, (count_G=numA+numC+numG,for example).
+     */
+    public Counts( long[] data, boolean cumulative ) {
+        if(cumulative) {
+            long priorCount = 0;
+            for(byte base: Bases.instance) {
+                long count = data[Bases.toPack(base)];
+                counts.put(base,count-priorCount);
+                cumulativeCounts.put(base,priorCount);
+                priorCount = count;
+            }
+        }
+        else {
+            long priorCount = 0;
+            for(byte base: Bases.instance) {
+                long count = data[Bases.toPack(base)];
+                counts.put(base,count);
+                cumulativeCounts.put(base,priorCount);
+                priorCount += count;
+            }
+        }
+    }
+
+    /**
+     * Convert to an array for persistence.
+     * @param cumulative Use a cumulative representation.
+     * @return Array of count values.
+     */
+    public long[] toArray(boolean cumulative) {
+        long[] countArray = new long[counts.size()];
+        if(cumulative) {
+            int index = 0;
+            boolean first = true;
+            for(byte base: Bases.instance) {
+                if(first) {
+                    first = false;
+                    continue;
+                }
+                countArray[index++] = getCumulative(base);
+            }
+            countArray[countArray.length-1] = getTotal();
+        }
+        else {
+            int index = 0;
+            for(byte base: Bases.instance)
+                countArray[index++] = counts.get(base);
+        }
+        return countArray;
+    }
+
+    /**
+     * Create a unique copy of the current object.
+     * @return A duplicate of this object.
+     */
+    public Counts clone() {
+        Counts other;
+        try {
+            other = (Counts)super.clone();
+        }
+        catch(CloneNotSupportedException ex) {
+            throw new ReviewedGATKException("Unable to clone counts object", ex);
+        }
+        other.counts = new HashMap<Byte,Long>(counts);
+        other.cumulativeCounts = new HashMap<Byte,Long>(cumulativeCounts);
+        return other;
+    }
+
+    /**
+     * Increment the number of bases seen at the given location.
+     * @param base Base to increment.
+     */
+    public void increment(byte base) {
+        counts.put(base,counts.get(base)+1);
+        boolean increment = false;
+        for(byte cumulative: Bases.instance) {
+            if(increment) cumulativeCounts.put(cumulative,cumulativeCounts.get(cumulative)+1);
+            increment |= (cumulative == base);
+        }
+    }
+
+    /**
+     * Gets a count of the number of bases seen at a given location.
+     * Note that counts in this case are not cumulative (counts for A,C,G,T
+     * are independent).
+     * @param base Base for which to query counts.
+     * @return Number of bases of this type seen.
+     */
+    public long get(byte base) {
+        return counts.get(base);
+    }
+
+    /**
+     * Gets a count of the number of bases seen before this base.
+     * Note that counts in this case are cumulative.
+     * @param base Base for which to query counts.
+     * @return Number of bases of this type seen.
+     */
+    public long getCumulative(byte base) {
+        return cumulativeCounts.get(base);
+    }
+
+    /**
+     * How many total bases are represented by this count structure?
+     * @return Total bases represented.
+     */
+    public long getTotal() {
+        int accumulator = 0;
+        for(byte base: Bases.instance) {
+            accumulator += get(base);    
+        }
+        return accumulator;
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/CreateBWTFromReference.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/CreateBWTFromReference.java
new file mode 100644
index 0000000..b9775b2
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/CreateBWTFromReference.java
@@ -0,0 +1,200 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment.reference.bwt;
+
+import htsjdk.samtools.reference.ReferenceSequence;
+import htsjdk.samtools.reference.ReferenceSequenceFile;
+import htsjdk.samtools.reference.ReferenceSequenceFileFactory;
+import org.broadinstitute.gatk.engine.alignment.reference.packing.PackUtils;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.io.File;
+import java.io.IOException;
+
+/**
+ * Create a suffix array data structure.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class CreateBWTFromReference {
+    private byte[] loadReference( File inputFile ) {
+        // Read in the first sequence in the input file
+        ReferenceSequenceFile reference = ReferenceSequenceFileFactory.getReferenceSequenceFile(inputFile);
+        ReferenceSequence sequence = reference.nextSequence();
+        return sequence.getBases();
+    }
+
+    private byte[] loadReverseReference( File inputFile ) {
+        ReferenceSequenceFile reference = ReferenceSequenceFileFactory.getReferenceSequenceFile(inputFile);
+        ReferenceSequence sequence = reference.nextSequence();
+        PackUtils.reverse(sequence.getBases());
+        return sequence.getBases();
+    }
+
+    private Counts countOccurrences( byte[] sequence ) {
+        Counts occurrences = new Counts();
+        for( byte base: sequence )
+            occurrences.increment(base);
+        return occurrences;
+    }
+
+    private long[] createSuffixArray( byte[] sequence ) {
+        return SuffixArray.createFromReferenceSequence(sequence).sequence;
+    }
+
+    private long[] invertSuffixArray( long[] suffixArray ) {
+        long[] inverseSuffixArray = new long[suffixArray.length];
+        for( int i = 0; i < suffixArray.length; i++ )
+            inverseSuffixArray[(int)suffixArray[i]] = i;
+        return inverseSuffixArray;
+    }
+
+    private long[] createCompressedSuffixArray( int[] suffixArray, int[] inverseSuffixArray ) {
+        long[] compressedSuffixArray = new long[suffixArray.length];
+        compressedSuffixArray[0] = inverseSuffixArray[0];
+        for( int i = 1; i < suffixArray.length; i++ )
+            compressedSuffixArray[i] = inverseSuffixArray[suffixArray[i]+1];
+        return compressedSuffixArray;
+    }
+
+    private long[] createInversedCompressedSuffixArray( int[] compressedSuffixArray ) {
+        long[] inverseCompressedSuffixArray = new long[compressedSuffixArray.length];
+        for( int i = 0; i < compressedSuffixArray.length; i++ )
+            inverseCompressedSuffixArray[compressedSuffixArray[i]] = i;
+        return inverseCompressedSuffixArray;
+    }
+
+    public static void main( String argv[] ) throws IOException {
+        if( argv.length != 5 ) {
+            System.out.println("USAGE: CreateBWTFromReference <input>.fasta <output bwt> <output rbwt> <output sa> <output rsa>");
+            return;
+        }
+
+        String inputFileName = argv[0];
+        File inputFile = new File(inputFileName);
+
+        String bwtFileName = argv[1];
+        File bwtFile = new File(bwtFileName);
+
+        String rbwtFileName = argv[2];
+        File rbwtFile = new File(rbwtFileName);
+
+        String saFileName = argv[3];
+        File saFile = new File(saFileName);
+
+        String rsaFileName = argv[4];
+        File rsaFile = new File(rsaFileName);
+
+        CreateBWTFromReference creator = new CreateBWTFromReference();
+
+        byte[] sequence = creator.loadReference(inputFile);
+        byte[] reverseSequence = creator.loadReverseReference(inputFile);
+
+        // Count the occurences of each given base.
+        Counts occurrences = creator.countOccurrences(sequence);
+        System.out.printf("Occurrences: a=%d, c=%d, g=%d, t=%d%n",occurrences.getCumulative(Bases.A),
+                                                                  occurrences.getCumulative(Bases.C),
+                                                                  occurrences.getCumulative(Bases.G),
+                                                                  occurrences.getCumulative(Bases.T));
+
+        // Generate the suffix array and print diagnostics.
+        long[] suffixArrayData = creator.createSuffixArray(sequence);
+        long[] reverseSuffixArrayData = creator.createSuffixArray(reverseSequence);
+
+        // Invert the suffix array and print diagnostics.
+        long[] inverseSuffixArray = creator.invertSuffixArray(suffixArrayData);
+        long[] reverseInverseSuffixArray = creator.invertSuffixArray(reverseSuffixArrayData);
+
+        SuffixArray suffixArray = new SuffixArray( inverseSuffixArray[0], occurrences, suffixArrayData );
+        SuffixArray reverseSuffixArray = new SuffixArray( reverseInverseSuffixArray[0], occurrences, reverseSuffixArrayData );
+
+        /*
+        // Create the data structure for the compressed suffix array and print diagnostics.
+        int[] compressedSuffixArray = creator.createCompressedSuffixArray(suffixArray.sequence,inverseSuffixArray);
+        int reconstructedInverseSA = compressedSuffixArray[0];
+        for( int i = 0; i < 8; i++ ) {
+            System.out.printf("compressedSuffixArray[%d] = %d (SA-1[%d] = %d)%n", i, compressedSuffixArray[i], i, reconstructedInverseSA);
+            reconstructedInverseSA = compressedSuffixArray[reconstructedInverseSA];
+        }
+
+        // Create the data structure for the inverse compressed suffix array and print diagnostics.
+        int[] inverseCompressedSuffixArray = creator.createInversedCompressedSuffixArray(compressedSuffixArray);
+        for( int i = 0; i < 8; i++ ) {
+            System.out.printf("inverseCompressedSuffixArray[%d] = %d%n", i, inverseCompressedSuffixArray[i]);
+        }
+        */
+
+        // Create the BWT.
+        BWT bwt = BWT.createFromReferenceSequence(sequence);
+        BWT reverseBWT = BWT.createFromReferenceSequence(reverseSequence);
+
+        byte[] bwtSequence = bwt.getSequence();
+        System.out.printf("BWT: %s... (length = %d)%n", new String(bwtSequence,0,80),bwt.length());
+
+        BWTWriter bwtWriter = new BWTWriter(bwtFile);
+        bwtWriter.write(bwt);
+        bwtWriter.close();
+
+        BWTWriter reverseBWTWriter = new BWTWriter(rbwtFile);
+        reverseBWTWriter.write(reverseBWT);
+        reverseBWTWriter.close();
+
+        /*
+        SuffixArrayWriter saWriter = new SuffixArrayWriter(saFile);
+        saWriter.write(suffixArray);
+        saWriter.close();
+
+        SuffixArrayWriter reverseSAWriter = new SuffixArrayWriter(rsaFile);
+        reverseSAWriter.write(reverseSuffixArray);
+        reverseSAWriter.close();
+        */
+
+        File existingBWTFile = new File(inputFileName+".bwt");
+        BWTReader existingBWTReader = new BWTReader(existingBWTFile);
+        BWT existingBWT = existingBWTReader.read();
+
+        byte[] existingBWTSequence = existingBWT.getSequence();
+        System.out.printf("Existing BWT: %s... (length = %d)%n",new String(existingBWTSequence,0,80),existingBWT.length());
+
+        for( int i = 0; i < bwt.length(); i++ ) {
+            if( bwtSequence[i] != existingBWTSequence[i] )
+                throw new ReviewedGATKException("BWT mismatch at " + i);
+        }
+
+        File existingSAFile = new File(inputFileName+".sa");
+        SuffixArrayReader existingSuffixArrayReader = new SuffixArrayReader(existingSAFile,existingBWT);
+        SuffixArray existingSuffixArray = existingSuffixArrayReader.read();
+
+        for(int i = 0; i < suffixArray.length(); i++) {
+            if( i % 10000 == 0 )
+                System.out.printf("Validating suffix array entry %d%n", i);
+            if( suffixArray.get(i) != existingSuffixArray.get(i) )
+                throw new ReviewedGATKException(String.format("Suffix array mismatch at %d; SA is %d; should be %d",i,existingSuffixArray.get(i),suffixArray.get(i)));
+        }
+    }
+
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/SequenceBlock.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/SequenceBlock.java
new file mode 100644
index 0000000..f4d1302
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/SequenceBlock.java
@@ -0,0 +1,66 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment.reference.bwt;
+
+/**
+ * Models a block of bases within the BWT.
+ */
+public class SequenceBlock {
+    /**
+     * Start position of this sequence within the BWT.
+     */
+    public final int sequenceStart;
+
+    /**
+     * Length of this sequence within the BWT.
+     */
+    public final int sequenceLength;
+
+
+    /**
+     * Occurrences of each letter up to this sequence block.
+     */
+    public final Counts occurrences;
+
+    /**
+     * Sequence for this segment.
+     */
+    public final byte[] sequence;
+
+    /**
+     * Create a new block within this BWT.
+     * @param sequenceStart Starting position of this sequence within the BWT.
+     * @param sequenceLength Length of this sequence.
+     * @param occurrences How many of each base has been seen before this sequence began.
+     * @param sequence The actual sequence from the BWT.
+     */
+    public SequenceBlock( int sequenceStart, int sequenceLength, Counts occurrences, byte[] sequence ) {
+        this.sequenceStart = sequenceStart;
+        this.sequenceLength = sequenceLength;
+        this.occurrences = occurrences;
+        this.sequence = sequence;
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/SuffixArray.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/SuffixArray.java
new file mode 100644
index 0000000..f052ee3
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/SuffixArray.java
@@ -0,0 +1,183 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment.reference.bwt;
+
+import htsjdk.samtools.util.StringUtil;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.util.Comparator;
+import java.util.TreeSet;
+
+/**
+ * An in-memory representation of a suffix array.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class SuffixArray {
+    public final long inverseSA0;
+    public final Counts occurrences;
+
+    /**
+     * The elements of the sequence actually stored in memory.
+     */
+    protected final long[] sequence;
+
+    /**
+     * How often are individual elements in the sequence actually stored
+     * in memory, as opposed to being calculated on the fly?
+     */
+    protected final int sequenceInterval;
+
+    /**
+     * The BWT used to calculate missing portions of the sequence.
+     */
+    protected final BWT bwt;
+
+    public SuffixArray(long inverseSA0, Counts occurrences, long[] sequence) {
+        this(inverseSA0,occurrences,sequence,1,null);
+    }
+
+    /**
+     * Creates a new sequence array with the given inverse SA, occurrences, and values.
+     * @param inverseSA0 Inverse SA entry for the first element.
+     * @param occurrences Cumulative number of occurrences of A,C,G,T, in order.
+     * @param sequence The full suffix array.
+     * @param sequenceInterval How frequently is the sequence interval stored.
+     * @param bwt bwt used to infer the remaining entries in the BWT.
+     */
+    public SuffixArray(long inverseSA0, Counts occurrences, long[] sequence, int sequenceInterval, BWT bwt) {
+        this.inverseSA0 = inverseSA0;
+        this.occurrences = occurrences;
+        this.sequence = sequence;
+        this.sequenceInterval = sequenceInterval;
+        this.bwt = bwt;
+
+        if(sequenceInterval != 1 && bwt == null)
+            throw new ReviewedGATKException("A BWT must be provided if the sequence interval is not 1");
+    }
+
+    /**
+     * Retrieves the length of the sequence array.
+     * @return Length of the suffix array.
+     */
+    public long length() {
+        if( bwt != null )
+            return bwt.length()+1;
+        else
+            return sequence.length;
+    }
+
+    /**
+     * Get the suffix array value at a given sequence.
+     * @param index Index at which to retrieve the suffix array vaule.
+     * @return The suffix array value at that entry.
+     */
+    public long get(long index) {
+        int iterations = 0;
+        while(index%sequenceInterval != 0) {
+            // The inverseSA0 ('$') doesn't have a usable ASCII representation; it must be treated as a special case.
+            if(index == inverseSA0)
+                index = 0;
+            else {
+                byte base = bwt.getBase(index);
+                index = bwt.counts(base) + bwt.occurrences(base,index);
+            }
+            iterations++;
+        }
+        return (sequence[(int)(index/sequenceInterval)]+iterations) % length();
+    }
+
+    /**
+     * Create a suffix array from a given reference sequence.
+     * @param sequence The reference sequence to use when building the suffix array.
+     * @return a constructed suffix array.
+     */
+    public static SuffixArray createFromReferenceSequence(byte[] sequence) {
+        // The builder for the suffix array.  Use an integer in this case because
+        // Java arrays can only hold an integer.
+        TreeSet<Integer> suffixArrayBuilder = new TreeSet<Integer>(new SuffixArrayComparator(sequence));
+
+        Counts occurrences = new Counts();
+        for( byte base: sequence )
+            occurrences.increment(base);
+
+        // Build out the suffix array using a custom comparator.
+        for( int i = 0; i <= sequence.length; i++ )
+            suffixArrayBuilder.add(i);
+
+        // Copy the suffix array into an array.
+        long[] suffixArray = new long[suffixArrayBuilder.size()];
+        int i = 0;
+        for( Integer element: suffixArrayBuilder )
+            suffixArray[i++] = element;
+
+        // Find the first element in the inverse suffix array.
+        long inverseSA0 = -1;
+        for(i = 0; i < suffixArray.length; i++) {
+            if(suffixArray[i] == 0)
+                inverseSA0 = i;
+        }
+        if(inverseSA0 < 0)
+            throw new ReviewedGATKException("Unable to find first inverse SA entry in generated suffix array.");
+
+        return new SuffixArray(inverseSA0,occurrences,suffixArray);
+    }    
+
+    /**
+     * Compares two suffix arrays of the given sequence.  Will return whichever string appears
+     * first in lexicographic order.
+     */
+    private static class SuffixArrayComparator implements Comparator<Integer> {
+        /**
+         * The data source for all suffix arrays.
+         */
+        private final String sequence;
+
+        /**
+         * Create a new comparator.
+         * @param sequence Reference sequence to use as basis for comparison.
+         */
+        public SuffixArrayComparator( byte[] sequence ) {
+            // Processing the suffix array tends to be easier as a string.
+            this.sequence = StringUtil.bytesToString(sequence);
+        }
+
+        /**
+         * Compare the two given suffix arrays.  Criteria for comparison is the lexicographic order of
+         * the two substrings sequence[lhs:], sequence[rhs:].
+         * @param lhs Left-hand side of comparison.
+         * @param rhs Right-hand side of comparison.
+         * @return How the suffix arrays represented by lhs, rhs compare.
+         */
+        public int compare( Integer lhs, Integer rhs ) {
+            String lhsSuffixArray = sequence.substring(lhs);
+            String rhsSuffixArray = sequence.substring(rhs);
+            return lhsSuffixArray.compareTo(rhsSuffixArray);
+        }
+    }
+
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/SuffixArrayReader.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/SuffixArrayReader.java
new file mode 100644
index 0000000..9bc95e1
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/SuffixArrayReader.java
@@ -0,0 +1,110 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment.reference.bwt;
+
+import org.broadinstitute.gatk.engine.alignment.reference.packing.PackUtils;
+import org.broadinstitute.gatk.engine.alignment.reference.packing.UnsignedIntPackedInputStream;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.nio.ByteOrder;
+
+/**
+ * A reader for suffix arrays in permanent storage.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class SuffixArrayReader {
+    /**
+     * Input stream from which to read suffix array data.
+     */
+    private FileInputStream inputStream;
+
+    /**
+     * BWT to use to fill in missing data.
+     */
+    private BWT bwt;
+
+    /**
+     * Create a new suffix array reader.
+     * @param inputFile File in which the suffix array is stored.
+     * @param bwt BWT to use when filling in missing data.
+     */
+    public SuffixArrayReader(File inputFile, BWT bwt) {
+        try {
+            this.inputStream = new FileInputStream(inputFile);
+            this.bwt = bwt;
+        }
+        catch( FileNotFoundException ex ) {
+            throw new ReviewedGATKException("Unable to open input file", ex);
+        }
+    }
+
+    /**
+     * Read a suffix array from the input stream.
+     * @return The suffix array stored in the input stream.
+     */
+    public SuffixArray read() {
+        UnsignedIntPackedInputStream uintPackedInputStream = new UnsignedIntPackedInputStream(inputStream, ByteOrder.LITTLE_ENDIAN);
+
+        long inverseSA0;
+        long[] occurrences;
+        long[] suffixArray;
+        int suffixArrayInterval;
+
+        try {
+            inverseSA0 = uintPackedInputStream.read();
+            occurrences = new long[PackUtils.ALPHABET_SIZE];
+            uintPackedInputStream.read(occurrences);
+            // Throw away the suffix array size in bytes and use the occurrences table directly.
+            suffixArrayInterval = (int)uintPackedInputStream.read();
+            suffixArray = new long[(int)((occurrences[occurrences.length-1]+suffixArrayInterval-1)/suffixArrayInterval)];
+            uintPackedInputStream.read(suffixArray);
+        }
+        catch( IOException ex ) {
+            throw new ReviewedGATKException("Unable to read BWT from input stream.", ex);
+        }
+
+        return new SuffixArray(inverseSA0, new Counts(occurrences,true), suffixArray, suffixArrayInterval, bwt);
+    }
+
+
+    /**
+     * Close the input stream.
+     */
+    public void close() {
+        try {
+            inputStream.close();
+        }
+        catch( IOException ex ) {
+            throw new ReviewedGATKException("Unable to close input file", ex);
+        }
+    }    
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/SuffixArrayWriter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/SuffixArrayWriter.java
new file mode 100644
index 0000000..b98da8c
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/SuffixArrayWriter.java
@@ -0,0 +1,92 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment.reference.bwt;
+
+import org.broadinstitute.gatk.engine.alignment.reference.packing.UnsignedIntPackedOutputStream;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.io.*;
+import java.nio.ByteOrder;
+
+/**
+ * Javadoc goes here.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class SuffixArrayWriter {
+    /**
+     * Input stream from which to read suffix array data.
+     */
+    private OutputStream outputStream;
+
+    /**
+     * Create a new suffix array reader.
+     * @param outputFile File in which the suffix array is stored.
+     */
+    public SuffixArrayWriter( File outputFile ) {
+        try {
+            this.outputStream = new BufferedOutputStream(new FileOutputStream(outputFile));
+        }
+        catch( FileNotFoundException ex ) {
+            throw new ReviewedGATKException("Unable to open input file", ex);
+        }
+    }
+
+    /**
+     * Write a suffix array to the output stream.
+     * @param suffixArray suffix array to write.
+     */
+    public void write(SuffixArray suffixArray) {
+        UnsignedIntPackedOutputStream uintPackedOutputStream = new UnsignedIntPackedOutputStream(outputStream, ByteOrder.LITTLE_ENDIAN);
+
+        try {
+            uintPackedOutputStream.write(suffixArray.inverseSA0);
+            uintPackedOutputStream.write(suffixArray.occurrences.toArray(true));
+            // How frequently the suffix array entry is placed.
+            uintPackedOutputStream.write(1);
+            // Length of the suffix array.
+            uintPackedOutputStream.write(suffixArray.length()-1);
+            uintPackedOutputStream.write(suffixArray.sequence,1,suffixArray.sequence.length-1);
+        }
+        catch( IOException ex ) {
+            throw new ReviewedGATKException("Unable to read BWT from input stream.", ex);
+        }
+    }
+
+
+    /**
+     * Close the input stream.
+     */
+    public void close() {
+        try {
+            outputStream.close();
+        }
+        catch( IOException ex ) {
+            throw new ReviewedGATKException("Unable to close input file", ex);
+        }
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/BasePackedInputStream.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/BasePackedInputStream.java
new file mode 100644
index 0000000..f94c351
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/BasePackedInputStream.java
@@ -0,0 +1,120 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment.reference.packing;
+
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.channels.FileChannel;
+
+/**
+ * Reads a packed version of the input stream.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class BasePackedInputStream<T> {
+    /**
+     * Type of object to unpack.
+     */
+    private final Class<T> type;
+
+    /**
+     * Ultimate source for packed bases.
+     */
+    private final FileInputStream targetInputStream;
+
+    /**
+     * Channel source for packed bases.
+     */
+    private final FileChannel targetInputChannel;
+
+    /**
+     * A fixed-size buffer for word-packed data.
+     */
+    private final ByteOrder byteOrder;
+
+    /**
+     * How many bases are in a given packed word.
+     */
+    private final int basesPerPackedWord = PackUtils.bitsInType(Integer.class)/PackUtils.BITS_PER_BASE;
+
+    /**
+     * How many bytes in an integer?
+     */
+    private final int bytesPerInteger = PackUtils.bitsInType(Integer.class)/PackUtils.BITS_PER_BYTE;
+
+
+    public BasePackedInputStream( Class<T> type, File inputFile, ByteOrder byteOrder ) throws FileNotFoundException {
+        this(type,new FileInputStream(inputFile),byteOrder);
+    }
+
+    public BasePackedInputStream( Class<T> type, FileInputStream inputStream, ByteOrder byteOrder ) {
+        if( type != Integer.class )
+            throw new ReviewedGATKException("Only bases packed into 32-bit words are currently supported by this input stream.  Type specified: " + type.getName());
+        this.type = type;
+        this.targetInputStream = inputStream;
+        this.targetInputChannel = inputStream.getChannel();
+        this.byteOrder = byteOrder;
+    }
+
+    /**
+     * Read the entire contents of the input stream.
+     * @param bwt array into which bases should be read.
+     * @throws IOException if an I/O error occurs.
+     */
+    public void read(byte[] bwt) throws IOException {
+        read(bwt,0,bwt.length);
+    }
+
+    /**
+     * Read the next <code>length</code> bases into the bwt array, starting at the given offset.
+     * @param bwt array holding the given data.
+     * @param offset target position in the bases array into which bytes should be written.
+     * @param length number of bases to read from the stream.
+     * @throws IOException if an I/O error occurs.
+     */
+    public void read(byte[] bwt, int offset, int length) throws IOException {
+        int bufferWidth = ((bwt.length+basesPerPackedWord-1)/basesPerPackedWord)*bytesPerInteger;
+        ByteBuffer buffer = ByteBuffer.allocate(bufferWidth).order(byteOrder);
+        targetInputChannel.read(buffer);
+        targetInputChannel.position(targetInputChannel.position()+buffer.remaining());
+        buffer.flip();
+
+        int packedWord = 0;
+        int i = 0;
+        while(i < length) {
+            if(i % basesPerPackedWord == 0) packedWord = buffer.getInt();
+            int position = basesPerPackedWord - i%basesPerPackedWord - 1;
+            bwt[offset+i++] = PackUtils.unpackBase((byte)((packedWord >> position*PackUtils.BITS_PER_BASE) & 0x3));            
+        }
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/BasePackedOutputStream.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/BasePackedOutputStream.java
new file mode 100644
index 0000000..e14ce62
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/BasePackedOutputStream.java
@@ -0,0 +1,165 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment.reference.packing;
+
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.io.*;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+/**
+ * A general-purpose stream for writing packed bases.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class BasePackedOutputStream<T> {
+    /**
+     * Type of object to pack.
+     */
+    private final Class<T> type;
+
+    /**
+     * How many bases can be stored in the given data structure?
+     */
+    private final int basesPerType;
+
+    /**
+     * Ultimate target for the packed bases.
+     */
+    private final OutputStream targetOutputStream;
+
+    /**
+     * A fixed-size buffer for word-packed data.
+     */
+    private final ByteBuffer buffer;
+
+    public BasePackedOutputStream( Class<T> type, File outputFile, ByteOrder byteOrder ) throws FileNotFoundException {
+        this(type,new BufferedOutputStream(new FileOutputStream(outputFile)),byteOrder);
+    }
+
+    /**
+     * Write packed bases to the given output stream.
+     * @param type Type of data to pack bases into.
+     * @param outputStream Output stream to which to write packed bases.
+     * @param byteOrder Switch between big endian / little endian when reading / writing files.
+     */
+    public BasePackedOutputStream( Class<T> type, OutputStream outputStream, ByteOrder byteOrder) {
+        this.targetOutputStream = outputStream;
+        this.type = type;
+        basesPerType = PackUtils.bitsInType(type)/PackUtils.BITS_PER_BASE;
+        this.buffer = ByteBuffer.allocate(basesPerType/PackUtils.ALPHABET_SIZE).order(byteOrder);
+    }
+
+    /**
+     * Writes the given base to the output stream.  Will write only this base; no packing will be performed.
+     * @param base List of bases to write.
+     * @throws IOException if an I/O error occurs.
+     */
+    public void write( int base ) throws IOException {
+        write( new byte[] { (byte)base } );
+    }
+
+    /**
+     * Writes an array of bases to the target output stream.
+     * @param bases List of bases to write.
+     * @throws IOException if an I/O error occurs.
+     */
+    public void write( byte[] bases ) throws IOException {
+        write(bases,0,bases.length);
+    }
+
+    /**
+     * Writes a subset of the array of bases to the output stream.
+     * @param bases List of bases to write.
+     * @param offset site at which to start writing.
+     * @param length number of bases to write.
+     * @throws IOException if an I/O error occurs.
+     */
+    public void write( byte[] bases, int offset, int length ) throws IOException {
+        int packedBases = 0;
+        int positionInPack = 0;
+
+        for( int base = offset; base < offset+length; base++ ) {
+            packedBases = packBase(bases[base], packedBases, positionInPack);
+
+            // Increment the packed counter.  If all possible bases have been squeezed into this byte, write it out.
+            positionInPack = ++positionInPack % basesPerType;
+            if( positionInPack == 0 ) {
+                writePackedBases(packedBases);
+                packedBases = 0;
+            }
+        }
+
+        if( positionInPack > 0 )
+            writePackedBases(packedBases);
+    }
+
+    /**
+     * Flush the contents of the OutputStream to disk.
+     * @throws IOException if an I/O error occurs.
+     */
+    public void flush() throws IOException {
+        targetOutputStream.flush();
+    }
+
+    /**
+     * Closes the given output stream.
+     * @throws IOException if an I/O error occurs.
+     */
+    public void close() throws IOException {
+        targetOutputStream.close();
+    }
+
+    /**
+     * Pack the given base into the basepack.
+     * @param base The base to pack.
+     * @param basePack Target for the pack operation.
+     * @param position Position within the pack to which to add the base.
+     * @return The packed integer.
+     */
+    private int packBase( byte base, int basePack, int position ) {
+        basePack |= (PackUtils.packBase(base) << 2*(basesPerType-position-1));
+        return basePack;
+    }    
+
+    /**
+     * Write the given packed base structure to the output file.
+     * @param packedBases Packed bases to write.
+     * @throws IOException on error writing to the file.
+     */
+    private void writePackedBases(int packedBases) throws IOException {
+        buffer.rewind();
+        if( type == Integer.class )
+            buffer.putInt(packedBases);
+        else if( type == Byte.class )
+            buffer.put((byte)packedBases);
+        else
+            throw new ReviewedGATKException("Cannot pack bases into type " + type.getName());
+        targetOutputStream.write(buffer.array());        
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/CreatePACFromReference.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/CreatePACFromReference.java
new file mode 100644
index 0000000..057342c
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/CreatePACFromReference.java
@@ -0,0 +1,64 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment.reference.packing;
+
+import htsjdk.samtools.reference.ReferenceSequence;
+import htsjdk.samtools.reference.ReferenceSequenceFile;
+import htsjdk.samtools.reference.ReferenceSequenceFileFactory;
+
+import java.io.File;
+import java.io.IOException;
+
+/**
+ * Generate a .PAC file from a given reference.
+ *
+ * @author hanna
+ * @version 0.1
+ */
+
+public class CreatePACFromReference {
+    public static void main( String argv[] ) throws IOException {
+        if( argv.length != 3 ) {
+            System.out.println("USAGE: CreatePACFromReference <input>.fasta <output pac> <output rpac>");
+            return;
+        }
+
+        // Read in the first sequence in the input file
+        String inputFileName = argv[0];
+        File inputFile = new File(inputFileName);
+        ReferenceSequenceFile reference = ReferenceSequenceFileFactory.getReferenceSequenceFile(inputFile);
+        ReferenceSequence sequence = reference.nextSequence();
+
+        // Target file for output
+        PackUtils.writeReferenceSequence( new File(argv[1]), sequence.getBases() );
+
+        // Reverse the bases in the reference
+        PackUtils.reverse(sequence.getBases());
+
+        // Target file for output
+        PackUtils.writeReferenceSequence( new File(argv[2]), sequence.getBases() );
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/PackUtils.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/PackUtils.java
new file mode 100644
index 0000000..4b64c03
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/PackUtils.java
@@ -0,0 +1,160 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment.reference.packing;
+
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.ByteOrder;
+
+/**
+ * Utilities designed for packing / unpacking bases.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class PackUtils {
+    /**
+     * How many possible bases can be encoded?
+     */
+    public static final int ALPHABET_SIZE = 4;
+
+    /**
+     * How many bits does it take to store a single base?
+     */
+    public static final int BITS_PER_BASE = (int)(Math.log(ALPHABET_SIZE)/Math.log(2));
+
+    /**
+     * How many bits fit into a single byte?
+     */
+    public static final int BITS_PER_BYTE = 8;
+
+    /**
+     * Writes a reference sequence to a PAC file.
+     * @param outputFile Filename for the PAC file.
+     * @param referenceSequence Reference sequence to write.
+     * @throws IOException If there's a problem writing to the output file.
+     */
+    public static void writeReferenceSequence( File outputFile, byte[] referenceSequence ) throws IOException {
+        OutputStream outputStream = new FileOutputStream(outputFile);
+
+        BasePackedOutputStream<Byte> basePackedOutputStream = new BasePackedOutputStream<Byte>(Byte.class, outputStream, ByteOrder.BIG_ENDIAN);
+        basePackedOutputStream.write(referenceSequence);
+
+        outputStream.write(referenceSequence.length%PackUtils.ALPHABET_SIZE);
+
+        outputStream.close();
+    }
+
+
+    /**
+     * How many bits can a given type hold?
+     * @param type Type to test.
+     * @return Number of bits that the given type can hold.
+     */
+    public static int bitsInType( Class<?> type ) {
+        try {
+            long typeSize = type.getField("MAX_VALUE").getLong(null) - type.getField("MIN_VALUE").getLong(null)+1;
+            long intTypeSize = (long)Integer.MAX_VALUE - (long)Integer.MIN_VALUE + 1;
+            if( typeSize > intTypeSize )
+                throw new ReviewedGATKException("Cannot determine number of bits available in type: " + type.getName());
+            return (int)(Math.log(typeSize)/Math.log(2));
+        }
+        catch( NoSuchFieldException ex ) {
+            throw new ReviewedGATKException("Cannot determine number of bits available in type: " + type.getName(),ex);
+        }
+        catch( IllegalAccessException ex ) {
+            throw new ReviewedGATKException("Cannot determine number of bits available in type: " + type.getName(),ex);
+        }
+    }
+
+    /**
+     * Gets the two-bit representation of a base.  A=00b, C=01b, G=10b, T=11b.
+     * @param base ASCII value for the base to pack.
+     * @return A byte from 0-3 indicating the base's packed value.
+     */
+    public static byte packBase(byte base) {
+        switch( base ) {
+            case 'A':
+                return 0;
+            case 'C':
+                return 1;
+            case 'G':
+                return 2;
+            case 'T':
+                return 3;
+            default:
+                throw new ReviewedGATKException("Unknown base type: " + base);
+        }
+    }
+
+    /**
+     * Converts a two-bit representation of a base into an ASCII representation of a base. 
+     * @param pack Byte from 0-3 indicating which base is represented.
+     * @return An ASCII value representing the packed base.
+     */
+    public static byte unpackBase(byte pack) {
+        switch( pack ) {
+            case 0:
+                return 'A';
+            case 1:
+                return 'C';
+            case 2:
+                return 'G';
+            case 3:
+                return 'T';
+            default:
+                throw new ReviewedGATKException("Unknown pack type: " + pack);
+        }
+    }
+
+    /**
+     * Reverses an unpacked sequence of bases.
+     * @param bases bases to reverse.
+     */
+    public static void reverse( byte[] bases ) {
+        for( int i = 0, j = bases.length-1; i < j; i++, j-- ) {
+            byte temp = bases[j];
+            bases[j] = bases[i];
+            bases[i] = temp;
+        }        
+    }
+
+    /**
+     * Given a structure of size <code>size</code> that should be split
+     * into <code>partitionSize</code> partitions, how many partitions should
+     * be created?  Size of last partition will be <= partitionSize.
+     * @param size Total size of the data structure.
+     * @param partitionSize Size of an individual partition.
+     * @return Number of partitions that would be created.
+     */
+    public static int numberOfPartitions( long size, long partitionSize ) {
+        return (int)((size+partitionSize-1) / partitionSize);    
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/UnsignedIntPackedInputStream.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/UnsignedIntPackedInputStream.java
new file mode 100644
index 0000000..5eed18d
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/UnsignedIntPackedInputStream.java
@@ -0,0 +1,129 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment.reference.packing;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.channels.FileChannel;
+
+/**
+ * Read a set of integers packed into 
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class UnsignedIntPackedInputStream {
+    /**
+     * Ultimate target for the occurrence array.
+     */
+    private final FileInputStream targetInputStream;
+
+    /**
+     * Target channel from which to pull file data.
+     */
+    private final FileChannel targetInputChannel;
+
+    /**
+     * The byte order in which integer input data appears.
+     */
+    private final ByteOrder byteOrder;
+
+    /**
+     * How many bytes are required to store an integer?
+     */
+    private final int bytesPerInteger = PackUtils.bitsInType(Integer.class)/PackUtils.BITS_PER_BYTE;
+
+    /**
+     * Create a new PackedIntInputStream, writing to the given target file.
+     * @param inputFile target input file.
+     * @param byteOrder Endianness to use when writing a list of integers.
+     * @throws java.io.IOException if an I/O error occurs.
+     */
+    public UnsignedIntPackedInputStream(File inputFile, ByteOrder byteOrder) throws IOException {
+        this(new FileInputStream(inputFile),byteOrder);
+    }
+
+    /**
+     * Read  ints from the given InputStream.
+     * @param inputStream Input stream from which to read ints.
+     * @param byteOrder Endianness to use when writing a list of integers.
+     */
+    public UnsignedIntPackedInputStream(FileInputStream inputStream, ByteOrder byteOrder) {
+        this.targetInputStream = inputStream;
+        this.targetInputChannel = inputStream.getChannel();
+        this.byteOrder = byteOrder;
+    }
+
+    /**
+     * Read a datum from the input stream.
+     * @return The next input datum in the stream.
+     * @throws IOException if an I/O error occurs.
+     */
+    public long read() throws IOException {
+        long[] data = new long[1];
+        read(data);
+        return data[0];
+    }
+
+    /**
+     * Read the data from the input stream.
+     * @param data placeholder for input data.
+     * @throws IOException if an I/O error occurs.
+     */
+    public void read( long[] data ) throws IOException {
+        read( data, 0, data.length );
+    }
+
+    /**
+     * Read the data from the input stream, starting at the given offset.
+     * @param data placeholder for input data.
+     * @param offset place in the array to start reading in data.
+     * @param length number of ints to read in. 
+     * @throws IOException if an I/O error occurs.
+     */
+    public void read( long[] data, int offset, int length ) throws IOException {
+        ByteBuffer readBuffer = ByteBuffer.allocate(bytesPerInteger*length).order(byteOrder);
+
+        targetInputChannel.read(readBuffer,targetInputChannel.position());
+        readBuffer.flip();
+        targetInputChannel.position(targetInputChannel.position()+readBuffer.remaining());
+
+        int i = 0;
+        while(i < length)
+            data[offset+i++] = readBuffer.getInt() & 0xFFFFFFFFL;
+    }
+
+    /**
+     * Closes the given output stream.
+     * @throws IOException if an I/O error occurs.
+     */
+    public void close() throws IOException {
+        targetInputStream.close();
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/UnsignedIntPackedOutputStream.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/UnsignedIntPackedOutputStream.java
new file mode 100644
index 0000000..b9d94b6
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/UnsignedIntPackedOutputStream.java
@@ -0,0 +1,121 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.alignment.reference.packing;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+/**
+ * Writes an list of integers to the output file.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class UnsignedIntPackedOutputStream {
+    /**
+     * Ultimate target for the occurrence array.
+     */
+    private final OutputStream targetOutputStream;
+
+    /**
+     * A fixed-size buffer for int-packed data.
+     */
+    private final ByteBuffer buffer;
+
+    /**
+     * Create a new PackedIntOutputStream, writing to the given target file.
+     * @param outputFile target output file.
+     * @param byteOrder Endianness to use when writing a list of integers.
+     * @throws IOException if an I/O error occurs.
+     */
+    public UnsignedIntPackedOutputStream(File outputFile, ByteOrder byteOrder) throws IOException {
+        this(new FileOutputStream(outputFile),byteOrder);
+    }
+
+    /**
+     * Write packed ints to the given OutputStream.
+     * @param outputStream Output stream to which to write packed ints.
+     * @param byteOrder Endianness to use when writing a list of integers.
+     */
+    public UnsignedIntPackedOutputStream(OutputStream outputStream, ByteOrder byteOrder) {
+        this.targetOutputStream = outputStream;
+        buffer = ByteBuffer.allocate(PackUtils.bitsInType(Integer.class)/PackUtils.BITS_PER_BYTE).order(byteOrder);
+    }
+
+    /**
+     * Write the data to the output stream.
+     * @param datum datum to write. 
+     * @throws IOException if an I/O error occurs.
+     */
+    public void write( long datum ) throws IOException {
+        buffer.rewind();
+        buffer.putInt((int)datum);
+        targetOutputStream.write(buffer.array());
+    }
+
+    /**
+     * Write the data to the output stream.
+     * @param data data to write.  occurrences.length must match alphabet size.
+     * @throws IOException if an I/O error occurs.
+     */
+    public void write( long[] data ) throws IOException {
+        for(long datum: data)
+            write(datum);
+    }
+
+    /**
+     * Write the given chunk of data to the input stream.
+     * @param data data to write.
+     * @param offset position at which to start.
+     * @param length number of ints to write.
+     * @throws IOException if an I/O error occurs.
+     */
+    public void write( long[] data, int offset, int length ) throws IOException {
+        for( int i = offset; i < offset+length; i++ )
+            write(data[i]);
+    }
+
+    /**
+     * Flush the contents of the OutputStream to disk.
+     * @throws IOException if an I/O error occurs.
+     */
+    public void flush() throws IOException {
+        targetOutputStream.flush();
+    }    
+
+    /**
+     * Closes the given output stream.
+     * @throws IOException if an I/O error occurs.
+     */
+    public void close() throws IOException {
+        targetOutputStream.close();
+    }
+
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/arguments/DbsnpArgumentCollection.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/arguments/DbsnpArgumentCollection.java
new file mode 100644
index 0000000..39247f5
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/arguments/DbsnpArgumentCollection.java
@@ -0,0 +1,46 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.arguments;
+
+
+import org.broadinstitute.gatk.utils.commandline.Input;
+import org.broadinstitute.gatk.utils.commandline.RodBinding;
+import htsjdk.variant.variantcontext.VariantContext;
+
+/**
+ * @author ebanks
+ * @version 1.0
+ */
+public class DbsnpArgumentCollection {
+
+    /**
+     * A dbSNP VCF file.
+     */
+    @Input(fullName="dbsnp", shortName = "D", doc="dbSNP file", required=false)
+    public RodBinding<VariantContext> dbsnp;
+
+}
+
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/arguments/GATKArgumentCollection.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/arguments/GATKArgumentCollection.java
new file mode 100644
index 0000000..0601034
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/arguments/GATKArgumentCollection.java
@@ -0,0 +1,680 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.arguments;
+
+import htsjdk.samtools.ValidationStringency;
+import org.broadinstitute.gatk.utils.ValidationExclusion;
+import org.broadinstitute.gatk.utils.commandline.*;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.utils.downsampling.DownsampleType;
+import org.broadinstitute.gatk.utils.downsampling.DownsamplingMethod;
+import org.broadinstitute.gatk.engine.phonehome.GATKRunReport;
+import org.broadinstitute.gatk.engine.samples.PedigreeValidationType;
+import org.broadinstitute.gatk.utils.QualityUtils;
+import org.broadinstitute.gatk.utils.baq.BAQ;
+import org.broadinstitute.gatk.utils.variant.GATKVCFIndexType;
+import org.broadinstitute.gatk.engine.GATKVCFUtils;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * @author aaron
+ * @version 1.0
+ */
+public class GATKArgumentCollection {
+
+    // the default value of the stop of the expanded window
+    public static final int DEFAULT_REFERENCE_WINDOW_STOP = 0;
+
+    /** the constructor */
+    public GATKArgumentCollection() {
+    }
+
+    // parameters and their defaults
+    /**
+     * An input file containing sequence data mapped to a reference, in BAM or CRAM format, or a text file containing a
+     * list of input files (with extension .list). Note that the GATK requires an accompanying .bai index for each BAM
+     * or CRAM file. Please see our online documentation for more details on input formatting requirements.
+     */
+    @Input(fullName = "input_file", shortName = "I", doc = "Input file containing sequence data (BAM or CRAM)", required = false)
+    public List<String> samFiles = new ArrayList<>();
+
+    @Advanced
+    @Argument(fullName = "showFullBamList",doc="Emit a log entry (level INFO) containing the full list of sequence data files to be included in the analysis (including files inside .bam.list or .cram.list files).")
+    public Boolean showFullBamList = false;
+
+    @Advanced
+    @Argument(fullName = "read_buffer_size", shortName = "rbs", doc="Number of reads per SAM file to buffer in memory", required = false, minValue = 0)
+    public Integer readBufferSize = null;
+
+    // --------------------------------------------------------------------------------------------------------------
+    //
+    // GATKRunReport options
+    //
+    // --------------------------------------------------------------------------------------------------------------
+
+    /**
+     * By default, GATK generates a run report that is uploaded to a cloud-based service. This report contains basic
+     * statistics about the run (which tool was used, whether the run was successful etc.) that help us for debugging
+     * and development. Up to version 3.3-0 the run report contains a record of the username and hostname associated
+     * with the run, but it does **NOT** contain any information that could be used to identify patient data.
+     * Nevertheless, if your data is subject to stringent confidentiality clauses (no outside communication) or if your
+     * run environment is not connected to the internet, you can disable the reporting system by seeting this option to
+     * "NO_ET". You will also need to request a key using the online request form on our website (see FAQs).
+     */
+    @Argument(fullName = "phone_home", shortName = "et", doc="Run reporting mode", required = false)
+    public GATKRunReport.PhoneHomeOption phoneHomeType = GATKRunReport.PhoneHomeOption.AWS;
+    /**
+     * Please see the "phone_home" argument below and the online documentation FAQs for more details on the key system
+     * and how to request a key.
+     */
+    @Argument(fullName = "gatk_key", shortName = "K", doc="GATK key file required to run with -et NO_ET", required = false)
+    public File gatkKeyFile = null;
+
+    /**
+     * The GATKRunReport supports tagging GATK runs with an arbitrary tag that can be
+     * used to group together runs during later analysis (as of GATK 2.2) .  One use of this capability is to tag
+     * runs as GATK performance tests, so that the performance of the GATK over time can be assessed from the logs
+     * directly.
+     *
+     * Note that the tags do not conform to any ontology, so you are free to use any tags that you might find
+     * meaningful.
+     */
+    @Argument(fullName = "tag", shortName = "tag", doc="Tag to identify this GATK run as part of a group of runs", required = false)
+    public String tag = "NA";
+
+    // --------------------------------------------------------------------------------------------------------------
+    //
+    // General features
+    //
+    // --------------------------------------------------------------------------------------------------------------
+
+    /**
+     * Reads that fail the specified filters will not be used in the analysis. Multiple filters can be specified separately,
+     * e.g. you can do -rf MalformedRead -rf BadCigar and so on. Available read filters are listed in the online tool
+     * documentation. Note that the read name format is e.g. MalformedReadFilter, but at the command line the filter
+     * name should be given without the Filter suffix; e.g. -rf MalformedRead (NOT -rf MalformedReadFilter, which is not
+     * recognized by the program). Note also that some read filters are applied by default for some analysis tools; this
+     * is specified in each tool's documentation. The default filters can only be disabled if they are DisableableReadFilters.
+     */
+    @Argument(fullName = "read_filter", shortName = "rf", doc = "Filters to apply to reads before analysis", required = false)
+    public final List<String> readFilters = new ArrayList<>();
+
+    @Argument(fullName = "disable_read_filter", shortName = "drf", doc = "Read filters to disable", required = false)
+    public final List<String> disabledReadFilters = new ArrayList<>();
+
+    @ArgumentCollection
+    public IntervalArgumentCollection intervalArguments = new IntervalArgumentCollection();
+    /**
+     * The reference genome against which the sequence data was mapped. The GATK requires an index file and a dictionary
+     * file accompanying the reference (please see the online documentation FAQs for more details on these files). Although
+     * this argument is indicated as being optional, almost all GATK tools require a reference in order to run.
+     * Note also that while GATK can in theory process genomes from any organism with any number of chromosomes or contigs,
+     * it is not designed to process draft genome assemblies and performance will decrease as the number of contigs in
+     * the reference increases. We strongly discourage the use of unfinished genome assemblies containing more than a few
+     * hundred contigs. Contig numbers in the thousands will most probably cause memory-related crashes.
+     */
+    @Input(fullName = "reference_sequence", shortName = "R", doc = "Reference sequence file", required = false)
+    public File referenceFile = null;
+    /**
+     * If this flag is enabled, the random numbers generated will be different in every run, causing GATK to behave non-deterministically.
+     */
+    @Argument(fullName = "nonDeterministicRandomSeed", shortName = "ndrs", doc = "Use a non-deterministic random seed", required = false)
+    public boolean nonDeterministicRandomSeed = false;
+    /**
+     * To be used in the testing framework where dynamic parallelism can result in differing numbers of calls to the random generator.
+     */
+    @Hidden
+    @Argument(fullName = "disableDithering",doc="Completely eliminates randomized dithering from rank sum tests.")
+    public boolean disableDithering = false;
+    /**
+     * This will truncate the run but without exiting with a failure. By default the value is interpreted in minutes, but this can be changed with the maxRuntimeUnits argument.
+     */
+    @Argument(fullName = "maxRuntime", shortName = "maxRuntime", doc="Stop execution cleanly as soon as maxRuntime has been reached", required = false)
+    public long maxRuntime = GenomeAnalysisEngine.NO_RUNTIME_LIMIT;
+
+    @Argument(fullName = "maxRuntimeUnits", shortName = "maxRuntimeUnits", doc="Unit of time used by maxRuntime", required = false)
+    public TimeUnit maxRuntimeUnits = TimeUnit.MINUTES;
+
+    // --------------------------------------------------------------------------------------------------------------
+    //
+    // Downsampling Arguments
+    //
+    // --------------------------------------------------------------------------------------------------------------
+    /**
+     * There are several ways to downsample reads, i.e. to remove reads from the pile of reads that will be used for analysis.
+     * See the documentation of the individual downsampling options for details on how they work. Note that many GATK tools
+     * specify a default downsampling type and target, but this behavior can be overridden from the command line using the
+     * downsampling arguments.
+     */
+    @Argument(fullName = "downsampling_type", shortName="dt", doc="Type of read downsampling to employ at a given locus", required = false)
+    public DownsampleType downsamplingType = null;
+    /**
+     * Reads will be downsampled so the specified fraction remains; e.g. if you specify -dfrac 0.25, three-quarters of
+     * the reads will be removed, and the remaining one quarter will be used in the analysis. This method of downsampling
+     * is truly unbiased and random. It is typically used to simulate the effect of generating different amounts of
+     * sequence data for a given sample. For example, you can use this in a pilot experiment to evaluate how much target
+     * coverage you need to aim for in order to obtain enough coverage in all loci of interest.
+     */
+    @Argument(fullName = "downsample_to_fraction", shortName = "dfrac", doc = "Fraction of reads to downsample to", required = false, minValue = 0.0, maxValue = 1.0)
+    public Double downsampleFraction = null;
+
+    /**
+     * The principle of this downsampling type is to downsample reads to a given capping threshold coverage. Its purpose is to
+     * get rid of excessive coverage, because above a certain depth, having additional data is not informative and imposes
+     * unreasonable computational costs. The downsampling process takes two different forms depending on the type of
+     * analysis it is used with.
+     *
+     * For locus-based traversals (LocusWalkers like UnifiedGenotyper and ActiveRegionWalkers like HaplotypeCaller),
+     * downsample_to_coverage controls the maximum depth of coverage at each locus. For read-based traversals
+     * (ReadWalkers like BaseRecalibrator), it controls the maximum number of reads sharing the same alignment start
+     * position. For ReadWalkers you will typically need to use much lower dcov values than you would with LocusWalkers
+     * to see an effect. Note that this downsampling option does not produce an unbiased random sampling from all available
+     * reads at each locus: instead, the primary goal of the to-coverage downsampler is to maintain an even representation
+     * of reads from all alignment start positions when removing excess coverage. For a truly unbiased random sampling of
+     * reads, use -dfrac instead. Also note that the coverage target is an approximate goal that is not guaranteed to be
+     * met exactly: the downsampling algorithm will under some circumstances retain slightly more or less coverage than
+     * requested.
+     */
+    @Argument(fullName = "downsample_to_coverage", shortName = "dcov",
+              doc = "Target coverage threshold for downsampling to coverage",
+              required = false, minValue = 0)
+    public Integer downsampleCoverage = null;
+
+    /**
+     * Gets the downsampling method explicitly specified by the user. If the user didn't specify
+     * a default downsampling mechanism, return the default.
+     * @return The explicitly specified downsampling mechanism, or the default if none exists.
+     */
+    public DownsamplingMethod getDownsamplingMethod() {
+        if ( downsamplingType == null && downsampleFraction == null && downsampleCoverage == null )
+            return null;
+
+        return new DownsamplingMethod(downsamplingType, downsampleCoverage, downsampleFraction);
+    }
+
+    /**
+     * Set the downsampling method stored in the argument collection so that it is read back out when interrogating the command line arguments.
+     * @param method The downsampling mechanism.
+     */
+    public void setDownsamplingMethod(DownsamplingMethod method) {
+        if (method == null)
+            throw new IllegalArgumentException("method is null");
+
+        downsamplingType = method.type;
+        downsampleCoverage = method.toCoverage;
+        downsampleFraction = method.toFraction;
+    }
+
+    // --------------------------------------------------------------------------------------------------------------
+    //
+    // BAQ arguments
+    //
+    // --------------------------------------------------------------------------------------------------------------
+    @Advanced
+    @Argument(fullName = "baq", shortName="baq", doc="Type of BAQ calculation to apply in the engine", required = false)
+    public BAQ.CalculationMode BAQMode = BAQ.CalculationMode.OFF;
+    /**
+     *  Phred-scaled gap open penalty for BAQ calculation. Although the default value is 40, a value of 30 may be better for whole genome call sets.
+     */
+    @Advanced
+    @Argument(fullName = "baqGapOpenPenalty", shortName="baqGOP", doc="BAQ gap open penalty", required = false, minValue = 0)
+    public double BAQGOP = BAQ.DEFAULT_GOP;
+
+    // --------------------------------------------------------------------------------------------------------------
+    //
+    // refactor NDN cigar string arguments
+    //
+    // --------------------------------------------------------------------------------------------------------------
+    /**
+     * Some RNAseq aligners that use a known transcriptome resource (such as TopHat2) produce NDN elements in read CIGARS
+     * when a small exon is entirely deleted during transcription, which ends up looking like [exon1]NDN[exon3]. These
+     * rarely happen, but when they do they cause GATK to fail with an error. Setting this flag tells the GATK to
+     * reduce "NDN" to a simpler CIGAR representation with one N element (with total length of the three refactored
+     * elements). From the point of view of variant calling, there is no meaningful difference between the two
+     * representations.
+     */
+    @Argument(fullName = "refactor_NDN_cigar_string", shortName = "fixNDN", doc = "Reduce NDN elements in CIGAR string", required = false)
+    public boolean REFACTOR_NDN_CIGAR_READS = false;
+
+    // --------------------------------------------------------------------------------------------------------------
+    //
+    // quality encoding checking arguments
+    //
+    // --------------------------------------------------------------------------------------------------------------
+
+    /**
+     * By default the GATK assumes that base quality scores start at Q0 == ASCII 33 according to the SAM specification.
+     * However, encoding in some datasets (especially older Illumina ones) starts at Q64. This argument will fix the
+     * encodings on the fly (as the data is read in) by subtracting 31 from every quality score. Note that this argument should
+     * NEVER be used by default; you should only use it when you have confirmed that the quality scores in your data are
+     * not in the correct encoding.
+     */
+    @Argument(fullName = "fix_misencoded_quality_scores", shortName="fixMisencodedQuals", doc="Fix mis-encoded base quality scores", required = false)
+    public boolean FIX_MISENCODED_QUALS = false;
+    /**
+     * This flag tells GATK to ignore warnings when encountering base qualities that are too high and that seemingly
+     * indicate a problem with the base quality encoding of the BAM or CRAM file. You should only use this if you really
+     * know what you are doing; otherwise you could seriously mess up your data and ruin your analysis.
+     */
+    @Argument(fullName = "allow_potentially_misencoded_quality_scores", shortName="allowPotentiallyMisencodedQuals", doc="Ignore warnings about base quality score encoding", required = false)
+    public boolean ALLOW_POTENTIALLY_MISENCODED_QUALS = false;
+    /**
+     * This flag tells GATK to use the original base qualities (that were in the data before BQSR/recalibration) which
+     * are stored in the OQ tag, if they are present, rather than use the post-recalibration quality scores. If no OQ
+     * tag is present for a read, the standard qual score will be used.
+     */
+    @Argument(fullName="useOriginalQualities", shortName = "OQ", doc = "Use the base quality scores from the OQ tag", required=false)
+    public Boolean useOriginalBaseQualities = false;
+    /**
+     * If reads are missing some or all base quality scores, this value will be used for all base quality scores.
+     * By default this is set to -1 to disable default base quality assignment.
+     */
+    @Argument(fullName="defaultBaseQualities", shortName = "DBQ", doc = "Assign a default base quality", required=false, minValue = 0, maxValue = Byte.MAX_VALUE)
+    public byte defaultBaseQualities = -1;
+
+    // --------------------------------------------------------------------------------------------------------------
+    //
+    // performance log arguments
+    //
+    // --------------------------------------------------------------------------------------------------------------
+
+    /**
+     * The file name for the GATK performance log output, or null if you don't want to generate the
+     * detailed performance logging table.  This table is suitable for importing into R or any
+     * other analysis software that can read tsv files.
+     */
+    @Argument(fullName = "performanceLog", shortName="PF", doc="Write GATK runtime performance log to this file", required = false)
+    public File performanceLog = null;
+
+    // --------------------------------------------------------------------------------------------------------------
+    //
+    // BQSR arguments
+    //
+    // --------------------------------------------------------------------------------------------------------------
+
+    /**
+     * Enables on-the-fly recalibrate of base qualities, intended primarily for use with BaseRecalibrator and PrintReads
+     * (see Best Practices workflow documentation). The covariates tables are produced by the BaseRecalibrator tool.
+     * Please be aware that you should only run recalibration with the covariates file created on the same input BAM(s)
+     * or CRAM(s).
+     */
+    @Input(fullName="BQSR", shortName="BQSR", required=false, doc="Input covariates table file for on-the-fly base quality score recalibration")
+    public File BQSR_RECAL_FILE = null;
+
+    /**
+     * Turns on the base quantization module. It requires a recalibration report (-BQSR).
+     *
+     * A value of 0 here means "do not quantize".
+     * Any value greater than zero will be used to recalculate the quantization using that many levels.
+     * Negative values mean that we should quantize using the recalibration report's quantization level.
+     */
+    @Argument(fullName="quantize_quals", shortName = "qq", doc = "Quantize quality scores to a given number of levels (with -BQSR)", required=false)
+    public int quantizationLevels = 0;
+
+    /**
+     * Static quantized quals are entirely separate from the quantize_qual option which uses dynamic binning.
+     * The two types of binning should not be used together.
+     */
+    @Advanced
+    @Argument(fullName="static_quantized_quals", shortName = "SQQ", doc = "Use static quantized quality scores to a given number of levels (with -BQSR)", required=false,  exclusiveOf = "quantize_quals", minValue = QualityUtils.MIN_USABLE_Q_SCORE, maxValue = QualityUtils.MAX_QUAL)
+    public List<Integer> staticQuantizationQuals = null;
+
+    /**
+     * Round down quantized only works with the static_quantized_quals option, and should not be used with
+     * the dynamic binning option provided by quantize_quals.  When roundDown = false, rounding is done in
+     * probability space to the nearest bin.  When roundDown = true, the value is rounded to the nearest bin
+     * that is smaller than the current bin.
+     */
+    @Hidden
+    @Argument(fullName="round_down_quantized", shortName = "RDQ", doc = "Round quals down to nearest quantized qual", required=false, exclusiveOf="quantize_quals")
+    public boolean roundDown = false;
+
+    /**
+     * Turns off printing of the base insertion and base deletion tags when using the -BQSR argument. Only the base substitution qualities will be produced.
+     */
+    @Argument(fullName="disable_indel_quals", shortName = "DIQ", doc = "Disable printing of base insertion and deletion tags (with -BQSR)", required=false)
+    public boolean disableIndelQuals = false;
+
+    /**
+     * By default, the OQ tag in not emitted when using the -BQSR argument. Use this flag to include OQ tags in the output BAM or CRAM file.
+     * Note that this may results in significant file size increase.
+     */
+    @Argument(fullName="emit_original_quals", shortName = "EOQ", doc = "Emit the OQ tag with the original base qualities (with -BQSR)", required=false)
+    public boolean emitOriginalQuals = false;
+
+    /**
+     * This flag tells GATK not to modify quality scores less than this value. Instead they will be written out unmodified in the recalibrated BAM or CRAM file.
+     * In general it's unsafe to change qualities scores below < 6, since base callers use these values to indicate random or bad bases.
+     * For example, Illumina writes Q2 bases when the machine has really gone wrong. This would be fine in and of itself,
+     * but when you select a subset of these reads based on their ability to align to the reference and their dinucleotide effect,
+     * your Q2 bin can be elevated to Q8 or Q10, leading to issues downstream.
+     */
+    @Advanced
+    @Argument(fullName = "preserve_qscores_less_than", shortName = "preserveQ", doc = "Don't recalibrate bases with quality scores less than this threshold (with -BQSR)", required = false, minValue = 0, minRecommendedValue = QualityUtils.MIN_USABLE_Q_SCORE)
+    public int PRESERVE_QSCORES_LESS_THAN = QualityUtils.MIN_USABLE_Q_SCORE;
+    /**
+     * If specified, this value will be used as the prior for all mismatch quality scores instead of the actual reported quality score.
+     */
+    @Advanced
+    @Argument(fullName = "globalQScorePrior", shortName = "globalQScorePrior", doc = "Global Qscore Bayesian prior to use for BQSR", required = false)
+    public double globalQScorePrior = -1.0;
+
+
+    // --------------------------------------------------------------------------------------------------------------
+    //
+    // Other utility arguments
+    //
+    // --------------------------------------------------------------------------------------------------------------
+
+    /**
+     * Keep in mind that if you set this to LENIENT, we may refuse to provide you with support if anything goes wrong.
+     */
+    @Argument(fullName = "validation_strictness", shortName = "S", doc = "How strict should we be with validation", required = false)
+    public ValidationStringency strictnessLevel = ValidationStringency.SILENT;
+    /**
+     * Some tools keep program records in the SAM header by default. Use this argument to override that behavior and discard program records for the SAM header. Does not work on CRAM files.
+     */
+    @Argument(fullName = "remove_program_records", shortName = "rpr", doc = "Remove program records from the SAM header", required = false)
+    public boolean removeProgramRecords = false;
+    /**
+     * Some tools discard program records from the SAM header by default. Use this argument to override that behavior and keep program records in the SAM header.
+     */
+    @Argument(fullName = "keep_program_records", shortName = "kpr", doc = "Keep program records in the SAM header", required = false)
+    public boolean keepProgramRecords = false;
+
+    /**
+     * On-the-fly sample renaming works only with single-sample BAM, CRAM, and VCF files. Each line of the mapping file
+     * must contain the absolute path to a BAM, CRAM, or VCF file, followed by whitespace, followed by the new sample
+     * name for that BAM, CRAM, or VCF file. The sample name may contain non-tab whitespace, but leading or trailing
+     * whitespace will be ignored. The engine will verify at runtime that each BAM/CRAM/VCF targeted for sample
+     * renaming has only a single sample specified in its header (though, in the case of BAM/CRAM files, there may be
+     * multiple read groups for that sample).
+     */
+    @Advanced
+    @Argument(fullName = "sample_rename_mapping_file", shortName = "sample_rename_mapping_file", doc = "Rename sample IDs on-the-fly at runtime using the provided mapping file", required = false)
+    public File sampleRenameMappingFile = null;
+
+    /**
+     * For expert users only who know what they are doing. We do not support usage of this argument, so we may refuse to help you if you use it and something goes wrong. The one exception to this rule is ALLOW_N_CIGAR_READS, which is necessary for RNAseq analysis.
+     */
+    @Advanced
+    @Argument(fullName = "unsafe", shortName = "U", doc = "Enable unsafe operations: nothing will be checked at runtime", required = false)
+    public ValidationExclusion.TYPE unsafe;
+    /**
+     * Not recommended for general use. Disables both auto-generation of index files and index file locking
+     * when reading VCFs and other rods and an index isn't present or is out-of-date. The file locking necessary for auto index
+     * generation to work safely is prone to random failures/hangs on certain platforms, which makes it desirable to disable it
+     * for situations like test suite runs where the indices are already known to exist, however this option is unsafe in general
+     * because it allows reading from index files without first acquiring a lock.
+     */
+    @Advanced
+    @Argument(fullName = "disable_auto_index_creation_and_locking_when_reading_rods", shortName = "disable_auto_index_creation_and_locking_when_reading_rods",
+              doc = "Disable both auto-generation of index files and index file locking",
+              required = false)
+    public boolean disableAutoIndexCreationAndLockingWhenReadingRods = false;
+
+    @Hidden
+    @Argument(fullName = "no_cmdline_in_header", shortName = "no_cmdline_in_header", doc = "Don't output the usual VCF header tag with the command line. FOR DEBUGGING PURPOSES ONLY. This option is required in order to pass integration tests.",
+              required = false)
+    public boolean disableCommandLineInVCF = false;
+
+    @Argument(fullName = "sites_only", shortName = "sites_only", doc = "Just output sites without genotypes (i.e. only the first 8 columns of the VCF)",
+              required = false)
+    public boolean sitesOnlyVCF = false;
+
+    /**
+     * <p>The VCF specification permits missing records to be dropped from the end of FORMAT fields, so long as GT is always output.
+     * This option prevents GATK from performing that trimming.</p>
+     *
+     * <p>For example, given a FORMAT of <pre>GT:AD:DP:PL</pre>, GATK will by default emit <pre>./.</pre> for a variant with
+     * no reads present (ie, the AD, DP, and PL fields are trimmed).  If you specify -writeFullFormat, this record
+     * would be emitted as <pre>./.:.:.:.</pre></p>
+     */
+    @Argument(fullName = "never_trim_vcf_format_field", shortName = "writeFullFormat", doc = "Always output all the records in VCF FORMAT fields, even if some are missing",
+              required = false)
+    public boolean neverTrimVCFFormatField = false;
+
+    @Hidden
+    @Argument(fullName = "bcf", shortName = "bcf", doc = "Force BCF output, regardless of the file's extension",
+              required = false)
+    public boolean forceBCFOutput = false;
+
+    @Advanced
+    @Argument(fullName = "bam_compression", shortName = "compress", doc = "Compression level to use for writing BAM files (0 - 9, higher is more compressed)",
+              minValue = 0, maxValue = 9, required = false)
+    public Integer bamCompression = null;
+
+    @Advanced
+    @Argument(fullName = "simplifyBAM", shortName = "simplifyBAM",
+              doc = "If provided, output BAM/CRAM files will be simplified to include just key reads for downstream variation discovery analyses (removing duplicates, PF-, non-primary reads), as well stripping all extended tags from the kept reads except the read group identifier",
+              required = false)
+    public boolean simplifyBAM = false;
+
+    @Advanced
+    @Argument(fullName = "disable_bam_indexing", doc = "Turn off on-the-fly creation of indices for output BAM/CRAM files.",
+            required = false)
+    public boolean disableBAMIndexing = false;
+
+    @Argument(fullName = "generate_md5", doc = "Enable on-the-fly creation of md5s for output BAM files.",
+            required = false)
+    public boolean enableBAMmd5 = false;
+
+    // --------------------------------------------------------------------------------------------------------------
+    //
+    // Multi-threading arguments
+    //
+    // --------------------------------------------------------------------------------------------------------------
+
+    /**
+     * Data threads contains N cpu threads per data thread, and act as completely data parallel processing, increasing
+     * the memory usage of GATK by M data threads. Data threads generally scale extremely effectively, up to 24 cores.
+     * See online documentation FAQs for more information.
+     */
+    @Argument(fullName = "num_threads", shortName = "nt", doc = "Number of data threads to allocate to this analysis", required = false, minValue = 1)
+    public Integer numberOfDataThreads = 1;
+
+    /**
+     * Each CPU thread operates the map cycle independently, but may run into earlier scaling problems with IO than
+     * data threads. Has the benefit of not requiring X times as much memory per thread as data threads do, but rather
+     * only a constant overhead. See online documentation FAQs for more information.
+     */
+    @Argument(fullName="num_cpu_threads_per_data_thread", shortName = "nct", doc="Number of CPU threads to allocate per data thread", required = false, minValue = 1)
+    public int numberOfCPUThreadsPerDataThread = 1;
+
+    @Argument(fullName="num_io_threads", shortName = "nit", doc="Number of given threads to allocate to BAM IO", required = false, minValue = 0)
+    @Hidden
+    public int numberOfIOThreads = 0;
+
+    /**
+     * Enable GATK to monitor its own threading efficiency, at an itsy-bitsy tiny
+     * cost (< 0.1%) in runtime because of turning on the JavaBean.  This is largely for
+     * debugging purposes. Note that this argument is not compatible with -nt, it only works with -nct.
+     */
+    @Argument(fullName = "monitorThreadEfficiency", shortName = "mte", doc = "Enable threading efficiency monitoring", required = false)
+    public Boolean monitorThreadEfficiency = false;
+
+    @Argument(fullName = "num_bam_file_handles", shortName = "bfh", doc="When using IO threads, total number of BAM file handles to keep open simultaneously", required=false, minValue = 1)
+    @Hidden
+    public Integer numberOfBAMFileHandles = null;
+    /**
+     * This will filter out read groups matching <TAG>:<STRING> (e.g. SM:sample1) or a .txt file containing the filter strings one per line.
+     */
+    @Input(fullName = "read_group_black_list", shortName="rgbl", doc="Exclude read groups based on tags", required = false)
+    public List<String> readGroupBlackList = null;
+
+    // --------------------------------------------------------------------------------------------------------------
+    //
+    // PED (pedigree) support
+    //
+    // --------------------------------------------------------------------------------------------------------------
+
+    /**
+     * <p>Reads PED file-formatted tabular text files describing meta-data about the samples being
+     * processed in the GATK.</p>
+     *
+     * <ul>
+     *  <li>see <a href="http://www.broadinstitute.org/mpg/tagger/faq.html">http://www.broadinstitute.org/mpg/tagger/faq.html</a></li>
+     *  <li>see <a href="http://pngu.mgh.harvard.edu/~purcell/plink/data.shtml#ped">http://pngu.mgh.harvard.edu/~purcell/plink/data.shtml#ped</a></li>
+     * </ul>
+     *
+     * <p>The PED file is a white-space (space or tab) delimited file: the first six columns are mandatory:</p>
+     *
+     * <ul>
+     *  <li>Family ID</li>
+     *  <li>Individual ID</li>
+     *  <li>Paternal ID</li>
+     *  <li>Maternal ID</li>
+     *  <li>Sex (1=male; 2=female; other=unknown)</li>
+     *  <li>Phenotype</li>
+     * </ul>
+     *
+     *  <p>The IDs are alphanumeric: the combination of family and individual ID should uniquely identify a person.
+     *  A PED file must have 1 and only 1 phenotype in the sixth column. The phenotype can be either a
+     *  quantitative trait or an affection status column: GATK will automatically detect which type
+     *  (i.e. based on whether a value other than 0, 1, 2 or the missing genotype code is observed).</p>
+     *
+     *  <p>If an individual's sex is unknown, then any character other than 1 or 2 can be used.</p>
+     *
+     *  <p>You can add a comment to a PED or MAP file by starting the line with a # character. The rest of that
+     *  line will be ignored. Do not start any family IDs with this character therefore.</p>
+     *
+     *  <p>Affection status should be coded:</p>
+     *
+     * <ul>
+     *  <li>-9 missing</li>
+     *   <li>0 missing</li>
+     *   <li>1 unaffected</li>
+     *   <li>2 affected</li>
+     * </ul>
+     *
+     * <p>If any value outside of -9,0,1,2 is detected than the samples are assumed
+     * to phenotype values are interpreted as string phenotype values.  In this case -9 uniquely
+     * represents the missing value.</p>
+     *
+     * <p>Genotypes (column 7 onwards) cannot be specified to the GATK.</p>
+     *
+     * <p>For example, here are two individuals (one row = one person):</p>
+     *
+     * <pre>
+     *   FAM001  1  0 0  1  2
+     *   FAM001  2  0 0  1  2
+     * </pre>
+     *
+     * <p>Each -ped argument can be tagged with NO_FAMILY_ID, NO_PARENTS, NO_SEX, NO_PHENOTYPE to
+     * tell the GATK PED parser that the corresponding fields are missing from the ped file.</p>
+     *
+     * <p>Note that most GATK walkers do not use pedigree information.  Walkers that require pedigree
+     * data should clearly indicate so in their arguments and will throw errors if required pedigree
+     * information is missing.</p>
+     */
+    @Argument(fullName="pedigree", shortName = "ped", doc="Pedigree files for samples",required=false)
+    public List<File> pedigreeFiles = Collections.emptyList();
+
+    /**
+     * Inline PED records (see -ped argument).  Each -pedString STRING can contain one or more
+     * valid PED records (see -ped) separated by semi-colons.  Supports all tags for each pedString
+     * as -ped supports
+     */
+    @Argument(fullName="pedigreeString", shortName = "pedString", doc="Pedigree string for samples",required=false)
+    public List<String> pedigreeStrings = Collections.emptyList();
+
+    /**
+     * How strict should we be in parsing the PED files?
+     */
+    @Argument(fullName="pedigreeValidationType", shortName = "pedValidationType", doc="Validation strictness for pedigree information",required=false)
+    public PedigreeValidationType pedigreeValidationType = PedigreeValidationType.STRICT;
+
+    // --------------------------------------------------------------------------------------------------------------
+    //
+    // BAM indexing and sharding arguments
+    //
+    // --------------------------------------------------------------------------------------------------------------
+    /**
+     * NO INTEGRATION TESTS are available.  Use at your own risk.
+     */
+    @Argument(fullName="allow_intervals_with_unindexed_bam",doc="Allow interval processing with an unsupported BAM/CRAM",required=false)
+    @Hidden
+    public boolean allowIntervalsWithUnindexedBAM = false;
+
+    // --------------------------------------------------------------------------------------------------------------
+    //
+    // testing BCF2
+    //
+    // --------------------------------------------------------------------------------------------------------------
+    /**
+     * If provided, whenever we create a VCFWriter we will also write out a BCF file alongside it, for testing purposes.
+     */
+    @Argument(fullName="generateShadowBCF",shortName = "generateShadowBCF",doc="Write a BCF copy of the output VCF",required=false)
+    @Hidden
+    public boolean generateShadowBCF = false;
+    // TODO -- remove all code tagged with TODO -- remove me when argument generateShadowBCF is removed
+
+    // --------------------------------------------------------------------------------------------------------------
+    //
+    // VCF/BCF index parameters
+    //
+    // --------------------------------------------------------------------------------------------------------------
+
+    /**
+     * Specify the Tribble indexing strategy to use for VCFs.
+     *
+     * LINEAR creates a LinearIndex with bins of equal width, specified by the Bin Width parameter
+     * INTERVAL creates an IntervalTreeIndex with bins with an equal amount of features, specified by the Features Per Bin parameter
+     * DYNAMIC_SEEK attempts to optimize for minimal seek time by choosing an appropriate strategy and parameter (user-supplied parameter is ignored)
+     * DYNAMIC_SIZE attempts to optimize for minimal index size by choosing an appropriate strategy and parameter (user-supplied parameter is ignored)
+     *
+     * This argument is deprecated, using the output file ".g.vcf" extension will automatically set the appropriate value
+     */
+    @Argument(fullName="variant_index_type",shortName = "variant_index_type",doc="Type of IndexCreator to use for VCF/BCF indices",required=false)
+    @Advanced
+    public GATKVCFIndexType variant_index_type = GATKVCFUtils.DEFAULT_INDEX_TYPE;
+    /**
+     * This is either the bin width or the number of features per bin, depending on the indexing strategy
+     *
+     * This argument is deprecated, using the output file ".g.vcf" extension will automatically set the appropriate value
+     */
+    @Argument(fullName="variant_index_parameter",shortName = "variant_index_parameter",doc="Parameter to pass to the VCF/BCF IndexCreator",required=false)
+    @Advanced
+    public int variant_index_parameter = GATKVCFUtils.DEFAULT_INDEX_PARAMETER;
+
+    // --------------------------------------------------------------------------------------------------------------
+    //
+    // Window arguments
+    //
+    // -------------------------------------------------------------------------------------------------------------
+    /**
+     * Stop of the expanded window for which the reference context should be provided, relative to the locus.
+     */
+    @Argument(fullName = "reference_window_stop", shortName = "ref_win_stop", doc = "Reference window stop", minValue = 0, required = false)
+    @Advanced
+    public int reference_window_stop = DEFAULT_REFERENCE_WINDOW_STOP;
+}
+
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/arguments/StandardVariantContextInputArgumentCollection.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/arguments/StandardVariantContextInputArgumentCollection.java
new file mode 100644
index 0000000..ff8da29
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/arguments/StandardVariantContextInputArgumentCollection.java
@@ -0,0 +1,48 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.arguments;
+
+
+import org.broadinstitute.gatk.utils.commandline.Input;
+import org.broadinstitute.gatk.utils.commandline.RodBinding;
+import htsjdk.variant.variantcontext.VariantContext;
+
+/**
+ * @author ebanks
+ * @version 1.0
+ */
+public class StandardVariantContextInputArgumentCollection {
+
+    /**
+     * Variants from this VCF file are used by this tool as input.
+     * The file must at least contain the standard VCF header lines, but
+     * can be empty (i.e., no variants are contained in the file).
+     */
+    @Input(fullName="variant", shortName = "V", doc="Input VCF file", required=true)
+    public RodBinding<VariantContext> variants;
+
+}
+
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/crypt/CryptUtils.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/crypt/CryptUtils.java
new file mode 100644
index 0000000..d7696d6
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/crypt/CryptUtils.java
@@ -0,0 +1,391 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.crypt;
+
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.io.IOUtils;
+
+import javax.crypto.Cipher;
+import java.io.File;
+import java.io.InputStream;
+import java.security.*;
+import java.security.spec.InvalidKeySpecException;
+import java.security.spec.KeySpec;
+import java.security.spec.PKCS8EncodedKeySpec;
+import java.security.spec.X509EncodedKeySpec;
+import java.util.Arrays;
+
+/**
+ * A set of cryptographic utility methods and constants.
+ *
+ * Contains methods to:
+ *
+ * -Create a public/private key pair
+ * -Read and write public/private keys to/from files/streams
+ * -Load the GATK master private/public keys
+ * -Encrypt/decrypt data
+ *
+ * Also contains constants that control the cryptographic defaults
+ * throughout the GATK.
+ *
+ * @author David Roazen
+ */
+public class CryptUtils {
+
+    // ---------------------------------------------------------------------------------
+    // Constants (these control the default cryptographic settings throughout the GATK):
+    // ---------------------------------------------------------------------------------
+
+    /**
+     * Default key length in bits of newly-created keys. 2048 bits provides a good balance between
+     * security and speed.
+     */
+    public static final int DEFAULT_KEY_LENGTH = 2048;
+
+    /**
+     * Default encryption algorithm to use, when none is specified.
+     */
+    public static final String DEFAULT_ENCRYPTION_ALGORITHM = "RSA";
+
+    /**
+     * Default random-number generation algorithm to use, when none is specified.
+     */
+    public static final String DEFAULT_RANDOM_NUMBER_GENERATION_ALGORITHM = "SHA1PRNG";
+
+    /**
+     * Name of the public key file distributed with the GATK. This file is packaged
+     * into the GATK jar, and we use the system ClassLoader to find it.
+     */
+    public static final String GATK_DISTRIBUTED_PUBLIC_KEY_FILE_NAME = "GATK_public.key";
+
+    /**
+     * Location of the master copy of the GATK private key.
+     */
+    public static final String GATK_MASTER_PRIVATE_KEY_FILE = "/humgen/gsa-hpprojects/GATK/data/gatk_master_keys/GATK_private.key";
+
+    /**
+     * Location of the master copy of the GATK public key. This file should always be the same as
+     * the public key file distributed with the GATK (and there are automated tests to ensure that it is).
+     */
+    public static final String GATK_MASTER_PUBLIC_KEY_FILE =  "/humgen/gsa-hpprojects/GATK/data/gatk_master_keys/GATK_public.key";
+
+    /**
+     * Directory where generated GATK user keys are stored. See the GATKKey class for more information.
+     */
+    public static final String GATK_USER_KEY_DIRECTORY =      "/humgen/gsa-hpprojects/GATK/data/gatk_user_keys/";
+
+
+    // -----------------------
+    // Utility Methods:
+    // -----------------------
+
+    /**
+     * Generate a new public/private key pair using the default encryption settings defined above.
+     *
+     * @return A new public/private key pair created using the default settings
+     */
+    public static KeyPair generateKeyPair() {
+        return generateKeyPair(DEFAULT_KEY_LENGTH, DEFAULT_ENCRYPTION_ALGORITHM, DEFAULT_RANDOM_NUMBER_GENERATION_ALGORITHM);
+    }
+
+    /**
+     * Generate a new public/private key pair using custom encryption settings.
+     *
+     * @param keyLength Length of the key in bits
+     * @param encryptionAlgorithm Encryption algorithm to use
+     * @param randNumberAlgorithm Random-number generation algorithm to use
+     * @return A new public/private key pair, created according to the specified parameters
+     */
+    public static KeyPair generateKeyPair( int keyLength, String encryptionAlgorithm, String randNumberAlgorithm ) {
+        try {
+            KeyPairGenerator keyGen = KeyPairGenerator.getInstance(encryptionAlgorithm);
+            SecureRandom randomnessSource = createRandomnessSource(randNumberAlgorithm);
+
+            keyGen.initialize(keyLength, randomnessSource);
+            return keyGen.generateKeyPair();
+        }
+        catch ( NoSuchAlgorithmException e ) {
+            throw new ReviewedGATKException(String.format("Could not find an implementation of the requested encryption algorithm %s", encryptionAlgorithm), e);
+        }
+        catch ( Exception e ) {
+            throw new ReviewedGATKException("Error while generating key pair", e);
+        }
+    }
+
+    /**
+     * Create a source of randomness using the default random-number generation algorithm.
+     *
+     * @return A randomness source that uses the default algorithm
+     */
+    public static SecureRandom createRandomnessSource() {
+        return createRandomnessSource(DEFAULT_RANDOM_NUMBER_GENERATION_ALGORITHM);
+    }
+
+    /**
+     * Create a source of randomness using a custom random-number generation algorithm.
+     *
+     * @param randAlgorithm The random-number generation algorithm to use
+     * @return A randomness sources that uses the specified algorithm
+     */
+    public static SecureRandom createRandomnessSource ( String randAlgorithm ) {
+        try {
+            return SecureRandom.getInstance(randAlgorithm);
+        }
+        catch ( NoSuchAlgorithmException e ) {
+            throw new ReviewedGATKException(String.format("Could not find an implementation of the requested random-number generation algorithm %s", randAlgorithm), e);
+        }
+    }
+
+    /**
+     * Writes a public/private key pair to disk
+     *
+     * @param keyPair The key pair we're writing to disk
+     * @param privateKeyFile Location to write the private key
+     * @param publicKeyFile Location to write the public key
+     */
+    public static void writeKeyPair ( KeyPair keyPair, File privateKeyFile, File publicKeyFile ) {
+        writeKey(keyPair.getPrivate(), privateKeyFile);
+        writeKey(keyPair.getPublic(), publicKeyFile);
+    }
+
+    /**
+     * Writes an arbitrary key to disk
+     *
+     * @param key The key to write
+     * @param destination Location to write the key to
+     */
+    public static void writeKey ( Key key, File destination ) {
+        IOUtils.writeByteArrayToFile(key.getEncoded(), destination);
+    }
+
+    /**
+     * Reads in a public key created using the default encryption algorithm from a file.
+     *
+     * @param source File containing the public key
+     * @return The public key read
+     */
+    public static PublicKey readPublicKey ( File source ) {
+        return decodePublicKey(IOUtils.readFileIntoByteArray(source), DEFAULT_ENCRYPTION_ALGORITHM);
+    }
+
+    /**
+     * Reads in a public key created using the default encryption algorithm from a stream.
+     *
+     * @param source Stream attached to the public key
+     * @return The public key read
+     */
+    public static PublicKey readPublicKey ( InputStream source ) {
+        return decodePublicKey(IOUtils.readStreamIntoByteArray(source), DEFAULT_ENCRYPTION_ALGORITHM);
+    }
+
+    /**
+     * Decodes the raw bytes of a public key into a usable object.
+     *
+     * @param rawKey The encoded bytes of a public key as read from, eg., a file. The
+     *               key must be in the standard X.509 format for a public key.
+     * @param encryptionAlgorithm The encryption algorithm used to create the public key
+     * @return The public key as a usable object
+     */
+    public static PublicKey decodePublicKey ( byte[] rawKey, String encryptionAlgorithm ) {
+        try {
+            KeySpec keySpec = new X509EncodedKeySpec(rawKey);
+            KeyFactory keyFactory = KeyFactory.getInstance(encryptionAlgorithm);
+            return keyFactory.generatePublic(keySpec);
+        }
+        catch ( NoSuchAlgorithmException e ) {
+            throw new ReviewedGATKException(String.format("Could not find an implementation of the requested encryption algorithm %s", encryptionAlgorithm), e);
+        }
+        catch ( InvalidKeySpecException e ) {
+            throw new ReviewedGATKException("Unable to use X.509 key specification to decode the given key", e);
+        }
+    }
+
+    /**
+     * Reads in a private key created using the default encryption algorithm from a file.
+     *
+     * @param source File containing the private key
+     * @return The private key read
+     */
+    public static PrivateKey readPrivateKey ( File source ) {
+        return decodePrivateKey(IOUtils.readFileIntoByteArray(source), DEFAULT_ENCRYPTION_ALGORITHM);
+    }
+
+    /**
+     * Reads in a private key created using the default encryption algorithm from a stream.
+     *
+     * @param source Stream attached to the private key
+     * @return The private key read
+     */
+    public static PrivateKey readPrivateKey ( InputStream source ) {
+        return decodePrivateKey(IOUtils.readStreamIntoByteArray(source), DEFAULT_ENCRYPTION_ALGORITHM);
+    }
+
+    /**
+     * Decodes the raw bytes of a private key into a usable object.
+     *
+     * @param rawKey The encoded bytes of a private key as read from, eg., a file. The
+     *               key must be in the standard PKCS #8 format for a private key.
+     * @param encryptionAlgorithm The encryption algorithm used to create the private key
+     * @return The private key as a usable object
+     */
+    public static PrivateKey decodePrivateKey ( byte[] rawKey, String encryptionAlgorithm ) {
+        try {
+            KeySpec keySpec = new PKCS8EncodedKeySpec(rawKey);
+            KeyFactory keyFactory = KeyFactory.getInstance(encryptionAlgorithm);
+            return keyFactory.generatePrivate(keySpec);
+        }
+        catch ( NoSuchAlgorithmException e ) {
+            throw new ReviewedGATKException(String.format("Could not find an implementation of the requested encryption algorithm %s", encryptionAlgorithm), e);
+        }
+        catch ( InvalidKeySpecException e ) {
+            throw new ReviewedGATKException("Unable to use the PKCS #8 key specification to decode the given key", e);
+        }
+    }
+
+    /**
+     * Loads the copy of the GATK public key that is distributed with the GATK. Uses the system
+     * ClassLoader to locate the public key file, which should be stored at the root of the GATK
+     * jar file.
+     *
+     * @return The GATK public key as a usable object
+     */
+    public static PublicKey loadGATKDistributedPublicKey() {
+        InputStream publicKeyInputStream = ClassLoader.getSystemResourceAsStream(GATK_DISTRIBUTED_PUBLIC_KEY_FILE_NAME);
+
+        if ( publicKeyInputStream == null ) {
+            throw new ReviewedGATKException(String.format("Could not locate the GATK public key %s in the classpath",
+                                                           GATK_DISTRIBUTED_PUBLIC_KEY_FILE_NAME));
+        }
+
+        return readPublicKey(publicKeyInputStream);
+    }
+
+    /**
+     * Loads the master copy of the GATK private key. You must have the appropriate UNIX permissions
+     * to do this!
+     *
+     * @return The GATK master private key as a usable object
+     */
+    public static PrivateKey loadGATKMasterPrivateKey() {
+        return readPrivateKey(new File(GATK_MASTER_PRIVATE_KEY_FILE));
+    }
+
+    /**
+     * Loads the master copy of the GATK public key. This should always be the same as the
+     * public key distributed with the GATK returned by loadGATKDistributedPublicKey().
+     *
+     * @return The GATK master public key as a usable object
+     */
+    public static PublicKey loadGATKMasterPublicKey() {
+        return readPublicKey(new File(GATK_MASTER_PUBLIC_KEY_FILE));
+    }
+
+    /**
+     * Encrypts the given data using the key provided.
+     *
+     * @param data The data to encrypt, as a byte array
+     * @param encryptKey The key with which to encrypt the data
+     * @return The encrypted version of the provided data
+     */
+    public static byte[] encryptData ( byte[] data, Key encryptKey ) {
+        return transformDataUsingCipher(data, encryptKey, Cipher.ENCRYPT_MODE);
+    }
+
+    /**
+     * Decrypts the given data using the key provided.
+     *
+     * @param encryptedData Data to decrypt, as a byte array
+     * @param decryptKey The key with which to decrypt the data
+     * @return The decrypted version of the provided data
+     */
+    public static byte[] decryptData ( byte[] encryptedData, Key decryptKey ) {
+        return transformDataUsingCipher(encryptedData, decryptKey, Cipher.DECRYPT_MODE);
+    }
+
+    /**
+     * Helper method for encryption/decryption that takes data and processes it using
+     * the given key
+     *
+     * @param data Data to encrypt/decrypt
+     * @param key Key to use to encrypt/decrypt the data
+     * @param cipherMode Specifies whether we are encrypting or decrypting
+     * @return The encrypted/decrypted data
+     */
+    private static byte[] transformDataUsingCipher ( byte[] data, Key key, int cipherMode ) {
+        try {
+            Cipher cipher = Cipher.getInstance(key.getAlgorithm());
+            cipher.init(cipherMode, key);
+            return cipher.doFinal(data);
+        }
+        catch ( NoSuchAlgorithmException e ) {
+            throw new ReviewedGATKException(String.format("Could not find an implementation of the requested algorithm %s",
+                                             key.getAlgorithm()), e);
+        }
+        catch ( InvalidKeyException e ) {
+            throw new ReviewedGATKException("Key is invalid", e);
+        }
+        catch ( GeneralSecurityException e ) {
+            throw new ReviewedGATKException("Error during encryption", e);
+        }
+    }
+
+    /**
+     * Tests whether the public/private keys provided can each decrypt data encrypted by
+     * the other key -- ie., tests whether these two keys are part of the same public/private
+     * key pair.
+     *
+     * @param privateKey The private key to test
+     * @param publicKey The public key to test
+     * @return True if the keys are part of the same key pair and can decrypt each other's
+     *         encrypted data, otherwise false.
+     */
+    public static boolean keysDecryptEachOther ( PrivateKey privateKey, PublicKey publicKey ) {
+        byte[] plainText = "Test PlainText".getBytes();
+
+        byte[] dataEncryptedUsingPrivateKey = CryptUtils.encryptData(plainText, privateKey);
+        byte[] dataEncryptedUsingPublicKey = CryptUtils.encryptData(plainText, publicKey);
+
+        byte[] privateKeyDataDecryptedWithPublicKey = CryptUtils.decryptData(dataEncryptedUsingPrivateKey, publicKey);
+        byte[] publicKeyDataDecryptedWithPrivateKey = CryptUtils.decryptData(dataEncryptedUsingPublicKey, privateKey);
+
+        // Make sure we actually transformed the data during encryption:
+        if ( Arrays.equals(plainText, dataEncryptedUsingPrivateKey) ||
+             Arrays.equals(plainText, dataEncryptedUsingPublicKey) ||
+             Arrays.equals(dataEncryptedUsingPrivateKey, dataEncryptedUsingPublicKey) ) {
+            return false;
+        }
+
+        // Make sure that we were able to recreate the original plaintext using
+        // both the public key on the private-key-encrypted data and the private
+        // key on the public-key-encrypted data:
+        if ( ! Arrays.equals(plainText, privateKeyDataDecryptedWithPublicKey) ||
+             ! Arrays.equals(plainText, publicKeyDataDecryptedWithPrivateKey) ) {
+            return false;
+        }
+
+        return true;
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/crypt/GATKKey.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/crypt/GATKKey.java
new file mode 100644
index 0000000..3260476
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/crypt/GATKKey.java
@@ -0,0 +1,350 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.crypt;
+
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.io.IOUtils;
+
+import java.io.*;
+import java.security.*;
+import java.util.zip.GZIPInputStream;
+import java.util.zip.GZIPOutputStream;
+
+/**
+ * Class to represent a GATK user key.
+ *
+ * A GATK user key contains an email address and a cryptographic signature.
+ * The signature is the SHA-1 hash of the email address encrypted using
+ * the GATK master private key. The GATK master public key (distributed
+ * with the GATK) is used to decrypt the signature and validate the key
+ * at the start of each GATK run that requires a key.
+ *
+ * Keys are cryptographically secure in that valid keys definitely come
+ * from us and cannot be fabricated, however nothing prevents keys from
+ * being shared between users.
+ *
+ * GATK user keys have the following on-disk format:
+ *
+ *     GZIP Container:
+ *         Email address
+ *         NUL byte (delimiter)
+ *         Cryptographic Signature (encrypted SHA-1 hash of email address)
+ *
+ * The key data is wrapped within a GZIP container to placate over-zealous
+ * email filters (since keys must often be emailed) and also to provide an
+ * additional integrity check via the built-in GZIP CRC.
+ *
+ * @author David Roazen
+ */
+public class GATKKey {
+
+    /**
+     * Private key used to sign the GATK key. Required only when creating a new
+     * key from scratch, not when loading an existing key from disk.
+     */
+    private PrivateKey privateKey;
+
+    /**
+     * Public key used to validate the GATK key.
+     */
+    private PublicKey publicKey;
+
+    /**
+     * The user's email address, stored within the key and signed.
+     */
+    private String emailAddress;
+
+    /**
+     * The cryptographic signature of the email address. By default, this is
+     * the SHA-1 hash of the email address encrypted using the RSA algorithm.
+     */
+    private byte[] signature;
+
+    /**
+     * The combination of hash/encryption algorithms to use to generate the signature.
+     * By default this is "SHA1withRSA"
+     */
+    private String signingAlgorithm;
+
+    /**
+     * Default hash/encryption algorithms to use to sign the key.
+     */
+    public static final String DEFAULT_SIGNING_ALGORITHM = "SHA1withRSA";
+
+    /**
+     * Byte value used to separate the email address from its signature in the key file.
+     */
+    public static final byte GATK_KEY_SECTIONAL_DELIMITER = 0;
+
+
+    // -----------------------
+    // Constructors:
+    // -----------------------
+
+    /**
+     * Constructor to create a new GATK key from scratch using an email address
+     * and public/private key pair. The private key is used for signing, and the
+     * public key is used to validate the newly-created key.
+     *
+     * @param privateKey Private key used to sign the new GATK key
+     * @param publicKey Public key used to validate the new GATK key
+     * @param emailAddress The user's email address, which we will store in the key and sign
+     */
+    public GATKKey ( PrivateKey privateKey, PublicKey publicKey, String emailAddress ) {
+        this(privateKey, publicKey, emailAddress, DEFAULT_SIGNING_ALGORITHM);
+    }
+
+    /**
+     * Constructor to create a new GATK key from scratch using an email address
+     * and public/private key pair, and additionally specify the signing algorithm
+     * to use. The private key is used for signing, and the public key is used to
+     * validate the newly-created key.
+     *
+     * @param privateKey Private key used to sign the new GATK key
+     * @param publicKey Public key used to validate the new GATK key
+     * @param emailAddress The user's email address, which we will store in the key and sign
+     * @param signingAlgorithm The combination of hash and encryption algorithms to use to sign the key
+     */
+    public GATKKey ( PrivateKey privateKey, PublicKey publicKey, String emailAddress, String signingAlgorithm ) {
+        if ( privateKey == null || publicKey == null || emailAddress == null || emailAddress.length() == 0 || signingAlgorithm == null ) {
+            throw new ReviewedGATKException("Cannot construct GATKKey using null/empty arguments");
+        }
+
+        this.privateKey = privateKey;
+        this.publicKey = publicKey;
+        this.emailAddress = emailAddress;
+        this.signingAlgorithm = signingAlgorithm;
+
+        validateEmailAddress();
+        generateSignature();
+
+        if ( ! isValid() ) {
+            throw new ReviewedGATKException("Newly-generated GATK key fails validation -- this should never happen!");
+        }
+    }
+
+    /**
+     * Constructor to load an existing GATK key from a file.
+     *
+     * During loading, the key file is checked for integrity, but not cryptographic
+     * validity (which must be done through a subsequent call to isValid()).
+     *
+     * @param publicKey Public key that will be used to validate the loaded GATK key
+     *                  in subsequent calls to isValid()
+     * @param keyFile File containing the GATK key to load
+     */
+    public GATKKey ( PublicKey publicKey, File keyFile ) {
+        this(publicKey, keyFile, DEFAULT_SIGNING_ALGORITHM);
+    }
+
+    /**
+     * Constructor to load an existing GATK key from a file, and additionally specify
+     * the signing algorithm used to sign the key being loaded.
+     *
+     * During loading, the key file is checked for integrity, but not cryptographic
+     * validity (which must be done through a subsequent call to isValid()).
+     *
+     * @param publicKey Public key that will be used to validate the loaded GATK key
+     *                  in subsequent calls to isValid()
+     * @param keyFile File containing the GATK key to load
+     * @param signingAlgorithm The combination of hash and encryption algorithms used to sign the key
+     */
+    public GATKKey ( PublicKey publicKey, File keyFile, String signingAlgorithm ) {
+        if ( publicKey == null || keyFile == null || signingAlgorithm == null ) {
+            throw new ReviewedGATKException("Cannot construct GATKKey using null arguments");
+        }
+
+        this.publicKey = publicKey;
+        this.signingAlgorithm = signingAlgorithm;
+
+        readKey(keyFile);
+    }
+
+    // -----------------------
+    // Public API Methods:
+    // -----------------------
+
+    /**
+     * Writes out this key to a file in the format described at the top of this class,
+     * encapsulating the key within a GZIP container.
+     *
+     * @param destination File to write the key to
+     */
+    public void writeKey ( File destination ) {
+        try {
+            byte[] keyBytes = marshalKeyData();
+            IOUtils.writeByteArrayToStream(keyBytes, new GZIPOutputStream(new FileOutputStream(destination)));
+        }
+        catch ( IOException e ) {
+            throw new UserException.CouldNotCreateOutputFile(destination, e);
+        }
+    }
+
+    /**
+     * Checks whether the signature of this key is cryptographically valid (ie., can be
+     * decrypted by the public key to produce a valid SHA-1 hash of the email address
+     * in the key).
+     *
+     * @return True if the key's signature passes validation, otherwise false
+     */
+    public boolean isValid() {
+        try {
+            Signature sig = Signature.getInstance(signingAlgorithm);
+            sig.initVerify(publicKey);
+            sig.update(emailAddress.getBytes());
+            return sig.verify(signature);
+        }
+        catch ( NoSuchAlgorithmException e ) {
+            throw new ReviewedGATKException(String.format("Signing algorithm %s not found", signingAlgorithm), e);
+        }
+        catch ( InvalidKeyException e ) {
+            // If the GATK public key is invalid, it's likely our problem, not the user's:
+            throw new ReviewedGATKException(String.format("Public key %s is invalid", publicKey), e);
+        }
+        catch ( SignatureException e ) {
+            throw new UserException.UnreadableKeyException("Signature is invalid or signing algorithm was unable to process the input data", e);
+        }
+    }
+
+    // -----------------------
+    // Private Helper Methods:
+    // -----------------------
+
+    /**
+     * Helper method that creates a signature for this key using the combination of
+     * hash/encryption algorithms specified at construction time.
+     */
+    private void generateSignature() {
+        try {
+            Signature sig = Signature.getInstance(signingAlgorithm);
+            sig.initSign(privateKey, CryptUtils.createRandomnessSource());
+            sig.update(emailAddress.getBytes());
+            signature = sig.sign();
+        }
+        catch ( NoSuchAlgorithmException e ) {
+            throw new ReviewedGATKException(String.format("Signing algorithm %s not found", signingAlgorithm), e);
+        }
+        catch ( InvalidKeyException e ) {
+            throw new ReviewedGATKException(String.format("Private key %s is invalid", privateKey), e);
+        }
+        catch ( SignatureException e ) {
+            throw new ReviewedGATKException(String.format("Error creating signature for email address %s", emailAddress), e);
+        }
+    }
+
+    /**
+     * Helper method that reads in a GATK key from a file. Should not be called directly --
+     * use the appropriate constructor above.
+     *
+     * @param source File to read the key from
+     */
+    private void readKey ( File source ) {
+        try {
+            byte[] keyBytes = IOUtils.readStreamIntoByteArray(new GZIPInputStream(new FileInputStream(source)));
+
+            // As a sanity check, compare the number of bytes read to the uncompressed file size
+            // stored in the GZIP ISIZE field. If they don't match, the key must be corrupt:
+            if ( keyBytes.length != IOUtils.getGZIPFileUncompressedSize(source) ) {
+                throw new UserException.UnreadableKeyException("Number of bytes read does not match the uncompressed size specified in the GZIP ISIZE field");
+            }
+
+            unmarshalKeyData(keyBytes);
+        }
+        catch ( FileNotFoundException e ) {
+            throw new UserException.CouldNotReadInputFile(source, e);
+        }
+        catch ( IOException e ) {
+            throw new UserException.UnreadableKeyException(source, e);
+        }
+        catch ( UserException.CouldNotReadInputFile e ) {
+            throw new UserException.UnreadableKeyException(source, e);
+        }
+    }
+
+    /**
+     * Helper method that assembles the email address and signature into a format
+     * suitable for writing to disk.
+     *
+     * @return The aggregated key data, ready to be written to disk
+     */
+    private byte[] marshalKeyData() {
+        byte[] emailAddressBytes = emailAddress.getBytes();
+        byte[] assembledKey = new byte[emailAddressBytes.length + 1 + signature.length];
+
+        System.arraycopy(emailAddressBytes, 0, assembledKey, 0, emailAddressBytes.length);
+        assembledKey[emailAddressBytes.length] = GATK_KEY_SECTIONAL_DELIMITER;
+        System.arraycopy(signature, 0, assembledKey, emailAddressBytes.length + 1, signature.length);
+
+        return assembledKey;
+    }
+
+    /**
+     * Helper method that parses the raw key data from disk into its component
+     * email address and signature. Performs some basic validation in the process.
+     *
+     * @param keyBytes The raw, uncompressed key data read from disk
+     */
+    private void unmarshalKeyData ( byte[] keyBytes ) {
+        int delimiterPosition = -1;
+
+        for ( int i = 0; i < keyBytes.length; i++ ) {
+            if ( keyBytes[i] == GATK_KEY_SECTIONAL_DELIMITER ) {
+                delimiterPosition = i;
+                break;
+            }
+        }
+
+        if ( delimiterPosition == -1 ) {
+            throw new UserException.UnreadableKeyException("Malformed GATK key contains no sectional delimiter");
+        }
+        else if ( delimiterPosition == 0 ) {
+            throw new UserException.UnreadableKeyException("Malformed GATK key contains no email address");
+        }
+        else if ( delimiterPosition == keyBytes.length - 1 ) {
+            throw new UserException.UnreadableKeyException("Malformed GATK key contains no signature");
+        }
+
+        byte[] emailAddressBytes = new byte[delimiterPosition];
+        System.arraycopy(keyBytes, 0, emailAddressBytes, 0, delimiterPosition);
+        emailAddress = new String(emailAddressBytes);
+
+        signature = new byte[keyBytes.length - delimiterPosition - 1];
+        System.arraycopy(keyBytes, delimiterPosition + 1, signature, 0, keyBytes.length - delimiterPosition - 1);
+    }
+
+    /**
+     * Helper method that ensures that the user's email address does not contain the NUL byte, which we
+     * reserve as a delimiter within each key file.
+     */
+    private void validateEmailAddress() {
+        for ( byte b : emailAddress.getBytes() ) {
+            if ( b == GATK_KEY_SECTIONAL_DELIMITER ) {
+                throw new UserException(String.format("Email address must not contain a byte with value %d", GATK_KEY_SECTIONAL_DELIMITER));
+            }
+        }
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/package-info.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/package-info.java
new file mode 100644
index 0000000..71a1af2
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/package-info.java
@@ -0,0 +1,26 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources;
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/AllLocusView.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/AllLocusView.java
new file mode 100644
index 0000000..865a72b
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/AllLocusView.java
@@ -0,0 +1,169 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.providers;
+
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.engine.iterators.GenomeLocusIterator;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.pileup.ReadBackedPileupImpl;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+
+import java.util.Collections;
+import java.util.List;
+import java.util.NoSuchElementException;
+/**
+ * User: hanna
+ * Date: May 13, 2009
+ * Time: 3:32:30 PM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * A LocusView over which the user can iterate.
+ */
+
+public class AllLocusView extends LocusView {
+    private GenomeLocusIterator locusIterator;
+
+    /**
+     * Gets the next position in the view: next call to next() will jump there.
+     * Note that both nextPosition and nextLocus are PRE-read and cached.
+     */
+    private GenomeLoc nextPosition = null;
+
+    /**
+     * What's the next available context?
+     */
+    private AlignmentContext nextLocus = null;
+
+    /**
+     * Signal not to advance the iterator because we're currently sitting at the next element.
+     */
+    private boolean atNextElement = false;
+
+    /**
+     * Create a new queue of locus contexts.
+     *
+     * @param provider
+     */
+    public AllLocusView(LocusShardDataProvider provider) {
+        super(provider);
+        // Seed the state tracking members with the first possible seek position and the first possible locus context.
+        locusIterator = new GenomeLocusIterator(genomeLocParser, provider.getLocus());
+    }
+
+    public boolean hasNext() {
+        advance();
+        return nextPosition != null;
+    }
+
+    public AlignmentContext next() {
+        advance();
+
+        if (nextPosition == null)
+            throw new NoSuchElementException("No next is available in the all locus view");
+
+        // Flag to the iterator that no data is waiting in the queue to be processed.
+        atNextElement = false;
+
+        AlignmentContext currentLocus;
+
+        // If actual data is present, return it.  Otherwise, return empty data.
+        if (nextLocus != null && nextLocus.getLocation().equals(nextPosition))
+            currentLocus = nextLocus;
+        else
+            currentLocus = createEmptyLocus(nextPosition);
+
+        return currentLocus;
+    }
+
+    private void advance() {
+        // Already at the next element?  Don't move forward.
+        if (atNextElement)
+            return;
+
+        // Out of elements?
+        if (nextPosition == null && !locusIterator.hasNext())
+            return;
+
+        // If nextLocus has been consumed, clear it out to make room for the next incoming locus.
+        if (nextPosition != null && nextLocus != null && !nextLocus.getLocation().isPast(nextPosition)) {
+            nextLocus = null;
+
+            // Determine the next locus. The trick is that we may have more than one alignment context at the same
+            // reference position (regular base pileup, then extended pileup). If next alignment context (that we just pre-read)
+            // is still at the current position, we do not increment current position and wait for next call to next() to return
+            // that context. If we know that next context is past the current position, we are done with current
+            // position
+            if (hasNextLocus()) {
+                nextLocus = nextLocus();
+                if (nextPosition.equals(nextLocus.getLocation())) {
+                    atNextElement = true;
+                    return;
+                }
+            }
+        }
+
+        // No elements left in queue?  Clear out the position state tracker and return.
+        if (!locusIterator.hasNext()) {
+            nextPosition = null;
+            return;
+        }
+
+        // Actually fill the next position.
+        nextPosition = locusIterator.next();
+        atNextElement = true;
+
+        // Crank the iterator to (if possible) or past the next context.  Be careful not to hold a reference to nextLocus
+        // while using the hasNextLocus() / nextLocus() machinery; this will cause us to use more memory than is optimal. 
+        while (nextLocus == null || nextLocus.getLocation().isBefore(nextPosition)) {
+            nextLocus = null;
+            if (!hasNextLocus())
+                break;
+            nextLocus = nextLocus();
+        }
+    }
+
+    /**
+     * Creates a blank locus context at the specified location.
+     *
+     * @param site Site at which to create the blank locus context.
+     * @return empty context.
+     */
+    private final static List<GATKSAMRecord> EMPTY_PILEUP_READS = Collections.emptyList();
+    private final static List<Integer> EMPTY_PILEUP_OFFSETS = Collections.emptyList();
+    private final static List<Boolean> EMPTY_DELETION_STATUS = Collections.emptyList();
+
+    private AlignmentContext createEmptyLocus(GenomeLoc site) {
+        return new AlignmentContext(site, new ReadBackedPileupImpl(site, EMPTY_PILEUP_READS, EMPTY_PILEUP_OFFSETS));
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/CoveredLocusView.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/CoveredLocusView.java
new file mode 100644
index 0000000..6d961a3
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/CoveredLocusView.java
@@ -0,0 +1,63 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.providers;
+
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+/**
+ * User: hanna
+ * Date: May 12, 2009
+ * Time: 11:24:42 AM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * A queue of locus contexts.  Provides unidirectional seek.  Stripped down
+ * implementation of java.util.Queue interface.
+ */
+
+public class CoveredLocusView extends LocusView {
+    /**
+     * Create a new queue of locus contexts.
+     * @param provider
+     */
+    public CoveredLocusView(LocusShardDataProvider provider) {
+        super(provider);
+    }
+
+    public boolean hasNext() {
+        return hasNextLocus();
+    }
+
+    public AlignmentContext next() {
+        return nextLocus();
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/IntervalOverlappingRODsFromStream.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/IntervalOverlappingRODsFromStream.java
new file mode 100644
index 0000000..1eb58a1
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/IntervalOverlappingRODsFromStream.java
@@ -0,0 +1,168 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.providers;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import htsjdk.samtools.util.PeekableIterator;
+import org.broadinstitute.gatk.utils.refdata.RODRecordListImpl;
+import org.broadinstitute.gatk.utils.refdata.utils.GATKFeature;
+import org.broadinstitute.gatk.utils.refdata.utils.RODRecordList;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+
+import java.util.Collection;
+import java.util.LinkedList;
+import java.util.ListIterator;
+
+/**
+ * Key algorithmic helper for ReadBasedReferenceOrderedData
+ *
+ * Takes a single iterator of features, and provides a single capability that returns
+ * the list of RODs that overlap an interval.  Allows sequential getOverlapping calls
+ * from intervals provided that these intervals always have increasing getStart() values.
+ *
+ */
+class IntervalOverlappingRODsFromStream {
+    /**
+     * Only held for QC purposes
+     */
+    GenomeLoc lastQuery = null;
+
+    private final String name;
+    private final LinkedList<GATKFeature> currentFeatures = new LinkedList<GATKFeature>();
+    private final PeekableIterator<RODRecordList> futureFeatures;
+
+    /**
+     * Create a new IntervalOverlappingRODsFromStream that reads elements from futureFeatures and
+     * returns RODRecordLists having name
+     *
+     * @param name
+     * @param futureFeatures
+     */
+    IntervalOverlappingRODsFromStream(final String name, final PeekableIterator<RODRecordList> futureFeatures) {
+        if ( futureFeatures == null ) throw new IllegalArgumentException("futureFeatures cannot be null");
+
+        this.name = name;
+        this.futureFeatures = futureFeatures;
+    }
+
+    /**
+     * Get the list of RODs overlapping loc from this stream of RODs.
+     *
+     * @param loc the interval to query
+     * @return a non-null RODRecordList containing the overlapping RODs, which may be empty
+     */
+    @Ensures({"overlaps(loc, result)",
+            "! futureFeatures.hasNext() || futureFeatures.peek().getLocation().isPast(loc)",
+            "result != null"})
+    public RODRecordList getOverlapping(final GenomeLoc loc) {
+        if ( lastQuery != null && loc.getStart() < lastQuery.getStart() )
+            throw new IllegalArgumentException(String.format("BUG: query interval (%s) starts before the previous interval %s", loc, lastQuery));
+
+        readOverlappingFutureFeatures(loc);
+        return new RODRecordListImpl(name, subsetToOverlapping(loc, currentFeatures), loc);
+    }
+
+
+    /**
+     * For contract assurance.  Checks that all bindings in loc overlap
+     *
+     * @param loc
+     * @param bindings
+     * @return
+     */
+    @Requires({"loc != null", "bindings != null"})
+    private boolean overlaps(final GenomeLoc loc, final RODRecordList bindings) {
+        for ( final GATKFeature feature : bindings )
+            if ( ! feature.getLocation().overlapsP(loc) )
+                return false;
+        return true;
+    }
+
+    /**
+     * Subset the features in all to those that overlap with loc
+     *
+     * The current features list contains everything read that cannot be thrown away yet, but not
+     * everything in there necessarily overlaps with loc.  Subset to just those that do overlap
+     *
+     * @param loc the location that features must overlap
+     * @param all the list of all features
+     * @return a subset of all that overlaps with loc
+     */
+    @Requires({"loc != null", "all != null"})
+    @Ensures("result.size() <= all.size()")
+    private Collection<GATKFeature> subsetToOverlapping(final GenomeLoc loc, final Collection<GATKFeature> all) {
+        final LinkedList<GATKFeature> overlapping = new LinkedList<GATKFeature>();
+        for ( final GATKFeature feature : all )
+            if ( feature.getLocation().overlapsP(loc) )
+                overlapping.add(feature);
+        return overlapping;
+    }
+
+    /**
+     * Update function.  Remove all elements of currentFeatures that end before loc
+     *
+     * Must be called by clients periodically when they know they they will never ask for data before
+     * loc, so that the running cache of RODs doesn't grow out of control.
+     *
+     * @param loc the location to use
+     */
+    @Requires("loc != null")
+    @Ensures("currentFeatures.size() <= old(currentFeatures.size())")
+    public void trimCurrentFeaturesToLoc(final GenomeLoc loc) {
+        final ListIterator<GATKFeature> it = currentFeatures.listIterator();
+        while ( it.hasNext() ) {
+            final GATKFeature feature = it.next();
+            if ( feature.getLocation().isBefore(loc) )
+                it.remove();
+        }
+    }
+
+    /**
+     * Update function: Read all elements from futureFeatures that overlap with loc
+     *
+     * Stops at the first element that starts before the end of loc, or the stream empties
+     *
+     * @param loc
+     */
+    @Requires("loc != null")
+    @Ensures("currentFeatures.size() >= old(currentFeatures.size())")
+    private void readOverlappingFutureFeatures(final GenomeLoc loc) {
+        while ( futureFeatures.hasNext() ) {
+            final GenomeLoc nextLoc = futureFeatures.peek().getLocation();
+            if ( nextLoc.isBefore(loc) ) {
+                futureFeatures.next(); // next rod element is before loc, throw it away and keep looking
+            } else if ( nextLoc.isPast(loc) ) {
+                break; // next element is past loc, stop looking but don't pop it
+            } else if ( nextLoc.overlapsP(loc) ) {
+                // add overlapping elements to our current features, removing from stream
+                for ( final GATKFeature feature : futureFeatures.next() ) {
+                    currentFeatures.add(feature);
+                }
+            }
+        }
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/IntervalReferenceOrderedView.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/IntervalReferenceOrderedView.java
new file mode 100644
index 0000000..659a2c7
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/IntervalReferenceOrderedView.java
@@ -0,0 +1,182 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.providers;
+
+import htsjdk.samtools.util.PeekableIterator;
+import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.refdata.utils.LocationAwareSeekableRODIterator;
+import org.broadinstitute.gatk.utils.refdata.utils.RODRecordList;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+/**
+ * a ROD view that allows for requests for RODs that overlap intervals on the genome to produce a RefMetaDataTracker
+ */
+public class IntervalReferenceOrderedView implements ReferenceOrderedView {
+    /** a list of the RMDDataState (location->iterators) */
+    private final List<RMDDataState> states = new ArrayList<>(1);
+
+    /**
+     * Used to get genome locs for reads
+     */
+    protected final GenomeLocParser genomeLocParser;
+
+    /**
+     * The total extent of all reads in this span.  We create iterators from our RODs
+     * from the start of this span, to the end.
+     */
+    private final GenomeLoc shardSpan;
+
+    /**
+     * Create a new IntervalReferenceOrderedView taking data from provider and capable of
+     * servicing ROD overlap requests within the genomic interval span
+     *
+     * @param provider a ShardDataProvider to give us data
+     * @param span a GenomeLoc span, or null indicating take the entire genome
+     */
+    public IntervalReferenceOrderedView(final ShardDataProvider provider, final GenomeLoc span) {
+        if ( provider == null ) throw new IllegalArgumentException("provider cannot be null");
+        if ( provider.hasReferenceOrderedData() && span == null ) throw new IllegalArgumentException("span cannot be null when provider has reference ordered data");
+
+        this.genomeLocParser = provider.getGenomeLocParser();
+        this.shardSpan = span;
+        provider.register(this);
+
+        // conditional to optimize the case where we don't have any ROD data
+        if ( provider.hasReferenceOrderedData() && ! shardSpan.isUnmapped() ) {
+            for (final ReferenceOrderedDataSource dataSource : provider.getReferenceOrderedData())
+                states.add(new RMDDataState(dataSource, dataSource.seek(shardSpan)));
+        }
+    }
+
+    /**
+     * Testing constructor
+     */
+    protected IntervalReferenceOrderedView(final GenomeLocParser genomeLocParser,
+                                           final GenomeLoc shardSpan,
+                                           final List<String> names,
+                                           final List<PeekableIterator<RODRecordList>> featureSources) {
+        this.genomeLocParser = genomeLocParser;
+        this.shardSpan = shardSpan;
+        for ( int i = 0; i < names.size(); i++ )
+            states.add(new RMDDataState(names.get(i), featureSources.get(i)));
+    }
+
+    public Collection<Class<? extends View>> getConflictingViews() {
+        List<Class<? extends View>> classes = new ArrayList<>();
+        classes.add(ManagingReferenceOrderedView.class);
+        return classes;
+    }
+
+    /**
+     * Get a RefMetaDataTracker containing bindings for all RODs overlapping the start position of loc
+     * @param loc a GenomeLoc of size == 1
+     * @return a non-null RefMetaDataTracker
+     */
+    @Override
+    public RefMetaDataTracker getReferenceOrderedDataAtLocus(GenomeLoc loc) {
+        if ( loc == null ) throw new IllegalArgumentException("loc cannot be null");
+        if ( loc.size() != 1 ) throw new IllegalArgumentException("GenomeLoc must have size == 1 but got " + loc);
+        return getReferenceOrderedDataForInterval(loc);
+    }
+
+    /**
+     * Get a RefMetaDataTracker containing bindings for all RODs overlapping interval
+     *
+     * @param interval a non=null interval
+     * @return a non-null RefMetaDataTracker
+     */
+    public RefMetaDataTracker getReferenceOrderedDataForInterval(final GenomeLoc interval) {
+        if ( interval == null ) throw new IllegalArgumentException("Interval cannot be null");
+
+        if ( states.isEmpty() || shardSpan.isUnmapped() ) // optimization for no bindings (common for read walkers)
+            return RefMetaDataTracker.EMPTY_TRACKER;
+        else {
+            final List<RODRecordList> bindings = new ArrayList<>(states.size());
+            for ( final RMDDataState state : states )
+                bindings.add(state.stream.getOverlapping(interval));
+            return new RefMetaDataTracker(bindings);
+        }
+    }
+
+    /**
+     * Trim down all of the ROD managers so that they only hold ROD bindings wit start >= startOfDataToKeep.getStart()
+     *
+     * @param startOfDataToKeep a non-null genome loc
+     */
+    public void trimCurrentFeaturesToLoc(final GenomeLoc startOfDataToKeep) {
+        if ( startOfDataToKeep == null ) throw new IllegalArgumentException("startOfDataToKeep cannot be null");
+
+        for ( final RMDDataState state : states )
+            state.stream.trimCurrentFeaturesToLoc(startOfDataToKeep);
+    }
+
+    /**
+     * Closes the current view.
+     */
+    public void close() {
+        for (final RMDDataState state : states)
+            state.close();
+
+        // Clear out the existing data so that post-close() accesses to this data will fail-fast.
+        states.clear();
+    }
+
+    /**
+     * Models the traversal state of a given ROD lane.
+     */
+    private static class RMDDataState {
+        public final ReferenceOrderedDataSource dataSource;
+        public final IntervalOverlappingRODsFromStream stream;
+        private final LocationAwareSeekableRODIterator iterator;
+
+        public RMDDataState(ReferenceOrderedDataSource dataSource, LocationAwareSeekableRODIterator iterator) {
+            this.dataSource = dataSource;
+            this.iterator = iterator;
+            this.stream = new IntervalOverlappingRODsFromStream(dataSource.getName(), new PeekableIterator<>(iterator));
+        }
+
+        /**
+         * For testing
+         */
+        public RMDDataState(final String name, final PeekableIterator<RODRecordList> iterator) {
+            this.dataSource = null;
+            this.iterator = null;
+            this.stream = new IntervalOverlappingRODsFromStream(name, new PeekableIterator<>(iterator));
+        }
+
+        public void close() {
+            if ( dataSource != null )
+                dataSource.close( iterator );
+        }
+    }
+}
+
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/InvalidPositionException.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/InvalidPositionException.java
new file mode 100644
index 0000000..b00d1c6
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/InvalidPositionException.java
@@ -0,0 +1,46 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.providers;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: hanna
+ * Date: Apr 16, 2009
+ * Time: 4:11:40 PM
+ *
+ * Thrown to indicate invalid positions passed to the providers.
+ * Extend from RuntimeException to make it easier on our walker writers; don't make
+ * them catch every exception that comes their way.
+ */
+public class InvalidPositionException extends RuntimeException {
+    public InvalidPositionException(String message) {
+        super(message);
+    }
+
+    public InvalidPositionException(String message, Throwable throwable) {
+        super(message,throwable);
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/LocusReferenceView.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/LocusReferenceView.java
new file mode 100644
index 0000000..ede909e
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/LocusReferenceView.java
@@ -0,0 +1,249 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.providers;
+
+import htsjdk.samtools.reference.ReferenceSequence;
+import org.broadinstitute.gatk.engine.arguments.GATKArgumentCollection;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.engine.walkers.Reference;
+import org.broadinstitute.gatk.engine.walkers.Walker;
+import org.broadinstitute.gatk.engine.walkers.Window;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+/*
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following
+ * conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+/**
+ * Provides access to the portion of the reference covering a single locus.
+ */
+public class LocusReferenceView extends ReferenceView {
+    /**
+     * Bound the reference view to make sure all accesses are within the shard.
+     */
+    private GenomeLoc bounds;
+
+    /**
+     * Start of the expanded window for which the reference context should be provided,
+     * relative to the locus in question.
+     */
+    private final int windowStart;
+
+
+    /**
+     * Stop of the expanded window for which the reference context should be provided,
+     * relative to the locus in question.
+     */
+    private final int windowStop;
+
+    /**
+     * Track the reference sequence and the last point accessed.  Used to
+     * track state when traversing over the reference.
+     */
+    private ReferenceSequence referenceSequence;
+
+    /**
+     * Create a LocusReferenceView given no other contextual information about
+     * the walkers, etc.
+     * @param provider  source for locus data.
+     */
+    public LocusReferenceView( LocusShardDataProvider provider ) {
+        super(provider);
+        initializeBounds(provider);
+        windowStart = windowStop = 0;
+        initializeReferenceSequence(bounds);
+    }
+
+    /**
+     * Create a new locus reference view.
+     * @param walker input walker
+     * @param provider source for locus data.
+     */
+    public LocusReferenceView( Walker walker, LocusShardDataProvider provider ) {
+        super(provider);
+        initializeBounds(provider);
+
+        // Retrieve information about the window being accessed.
+        if( walker.getClass().isAnnotationPresent(Reference.class) ) {
+            Window window = walker.getClass().getAnnotation(Reference.class).window();
+
+            if( window.start() > 0 ) throw new ReviewedGATKException( "Reference window starts after current locus" );
+            if( window.stop() < 0 ) throw new ReviewedGATKException( "Reference window ends before current locus" );
+
+            windowStart = window.start();
+
+            if ( walker.getArguments() == null ){
+                windowStop = window.stop();
+            } else {
+                // Use reference arguments if set, otherwise use the annotation
+                windowStop = walker.getArguments().reference_window_stop != GATKArgumentCollection.DEFAULT_REFERENCE_WINDOW_STOP ?
+                        walker.getArguments().reference_window_stop : window.stop();
+            }
+        }
+        else {
+            windowStart = 0;
+            if ( walker.getArguments() == null ){
+                windowStop = 0;
+            } else {
+                windowStop = walker.getArguments().reference_window_stop;
+            }
+        }
+
+        if(bounds != null) {
+            int expandedStart = getWindowStart( bounds );
+            int expandedStop  = getWindowStop( bounds );
+            initializeReferenceSequence(genomeLocParser.createGenomeLoc(bounds.getContig(), bounds.getContigIndex(), expandedStart, expandedStop));
+        }
+    }
+
+    /**
+     * Initialize the bounds of this shard, trimming the bounds so that they match the reference.
+     * @param provider Provider covering the appropriate locus.
+     */
+    private void initializeBounds(LocusShardDataProvider provider) {
+        if(provider.getLocus() != null) {
+            int sequenceLength = reference.getSequenceDictionary().getSequence(provider.getLocus().getContig()).getSequenceLength();
+            bounds = genomeLocParser.createGenomeLoc(provider.getLocus().getContig(),
+                    Math.max(provider.getLocus().getStart(),1),
+                    Math.min(provider.getLocus().getStop(),sequenceLength));
+        }
+        else
+            bounds = null;
+    }
+
+    /**
+     * Initialize reference sequence data using the given locus.
+     * @param locus
+     */
+    private void initializeReferenceSequence( GenomeLoc locus ) {
+        this.referenceSequence = reference.getSubsequenceAt( locus.getContig(), locus.getStart(), locus.getStop() );
+    }
+
+    protected GenomeLoc trimToBounds(GenomeLoc l) {
+        int expandedStart = getWindowStart( bounds );
+        int expandedStop  = getWindowStop( bounds );
+        if ( l.getStart() < expandedStart ) l = genomeLocParser.setStart(l, expandedStart);
+        if ( l.getStop() > expandedStop  ) l = genomeLocParser.setStop(l, expandedStop);
+        return l;
+    }
+
+    public class Provider implements ReferenceContext.ReferenceContextRefProvider {
+        int refStart, len;
+
+        public Provider( int refStart, int len ) {
+            this.refStart = refStart;
+            this.len = len;
+        }
+
+        public byte[] getBases() {
+            //System.out.printf("Getting bases for location%n");
+            byte[] bases = new byte[len];
+            System.arraycopy(referenceSequence.getBases(), refStart, bases, 0, len);
+            return bases;
+        }
+    }
+
+    /**
+     * Gets the reference context associated with this particular point or extended interval on the genome.
+     * @param genomeLoc Region for which to retrieve the base(s). If region spans beyond contig end or beyond current bounds, it will be trimmed down.
+     * @return The base at the position represented by this genomeLoc.
+     */
+    public ReferenceContext getReferenceContext( GenomeLoc genomeLoc ) {
+        //validateLocation( genomeLoc );
+
+        GenomeLoc window = genomeLocParser.createGenomeLoc( genomeLoc.getContig(), genomeLoc.getContigIndex(),
+                getWindowStart(genomeLoc), getWindowStop(genomeLoc) );
+
+        int refStart = -1;
+        if (bounds != null) {
+            window = trimToBounds(window);
+            refStart = (int)(window.getStart() - getWindowStart(bounds));
+        }
+        else {
+            if(referenceSequence == null || referenceSequence.getContigIndex() != genomeLoc.getContigIndex())
+                referenceSequence = reference.getSequence(genomeLoc.getContig());
+            refStart = (int)window.getStart()-1;
+        }
+
+        int len = (int)window.size();
+        return new ReferenceContext( genomeLocParser, genomeLoc, window, new Provider(refStart, len));
+    }
+
+    /**
+     * Allow the user to pull reference info from any arbitrary region of the reference.
+     * @param genomeLoc The locus.
+     * @return A list of the bases starting at the start of the locus (inclusive) and ending
+     *         at the end of the locus (inclusive).
+     */
+    public byte[] getReferenceBases( GenomeLoc genomeLoc ) {
+        return super.getReferenceBases(genomeLoc);
+    }
+
+    /**
+     * Gets the start of the expanded window, bounded if necessary by the contig.
+     * @param locus The locus to expand.
+     * @return The expanded window.
+     */
+    private int getWindowStart( GenomeLoc locus ) {
+        // If the locus is not within the bounds of the contig it allegedly maps to, expand only as much as we can.
+        if(locus.getStart() < 1) return 1;
+//        if(locus.getStart() < 1) return locus.getStart();
+        return Math.max( locus.getStart() + windowStart, 1 );
+    }
+
+    /**
+     * Gets the stop of the expanded window, bounded if necessary by the contig.
+     * @param locus The locus to expand.
+     * @return The expanded window.
+     */    
+    private int getWindowStop( GenomeLoc locus ) {
+        // If the locus is not within the bounds of the contig it allegedly maps to, expand only as much as we can.
+        int sequenceLength = reference.getSequenceDictionary().getSequence(locus.getContig()).getSequenceLength();
+        if(locus.getStop() > sequenceLength) return sequenceLength;
+        return Math.min( locus.getStop() + windowStop, sequenceLength );
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/LocusShardDataProvider.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/LocusShardDataProvider.java
new file mode 100644
index 0000000..bcb148b
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/LocusShardDataProvider.java
@@ -0,0 +1,100 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.providers;
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import org.broadinstitute.gatk.engine.ReadProperties;
+import org.broadinstitute.gatk.engine.datasources.reads.Shard;
+import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
+import org.broadinstitute.gatk.utils.locusiterator.LocusIterator;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+
+import java.util.Collection;
+
+/**
+ * Presents data sharded by locus to the traversal engine.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class LocusShardDataProvider extends ShardDataProvider {
+    /**
+     * Information about the source of the read data.
+     */
+    private final ReadProperties sourceInfo;
+
+    /**
+     * The particular locus for which data is provided.  Should be contained within shard.getGenomeLocs().
+     */
+    private final GenomeLoc locus;
+
+    /**
+     * The raw collection of reads.
+     */
+    private final LocusIterator locusIterator;
+
+    /**
+     * Create a data provider for the shard given the reads and reference.
+     * @param shard The chunk of data over which traversals happen.
+     * @param reference A getter for a section of the reference.
+     */
+    public LocusShardDataProvider(Shard shard, ReadProperties sourceInfo, GenomeLocParser genomeLocParser, GenomeLoc locus, LocusIterator locusIterator, IndexedFastaSequenceFile reference, Collection<ReferenceOrderedDataSource> rods) {
+        super(shard,genomeLocParser,reference,rods);
+        this.sourceInfo = sourceInfo;
+        this.locus = locus;
+        this.locusIterator = locusIterator;
+    }
+
+    /**
+     * Returns information about the source of the reads.
+     * @return Info about the source of the reads.
+     */
+    public ReadProperties getSourceInfo() {
+        return sourceInfo;
+    }
+
+    /**
+     * Gets the locus associated with this shard data provider.
+     * @return The locus.
+     */
+    public GenomeLoc getLocus() {
+        return locus;
+    }
+
+    /**
+     * Gets an iterator over all the reads bound by this shard.
+     * @return An iterator over all reads in this shard.
+     */
+    public LocusIterator getLocusIterator() {
+        return locusIterator;
+    }        
+
+    @Override
+    public void close() {
+        super.close();
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/LocusView.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/LocusView.java
new file mode 100644
index 0000000..78262b5
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/LocusView.java
@@ -0,0 +1,219 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.providers;
+
+import org.broadinstitute.gatk.engine.ReadProperties;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.locusiterator.LocusIterator;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.locusiterator.LocusIteratorByState;
+
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.NoSuchElementException;
+
+/**
+ * User: hanna
+ * Date: May 13, 2009
+ * Time: 3:30:16 PM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * The two goals of the LocusView are as follows:
+ * 1) To provide a 'trigger track' iteration interface so that TraverseLoci can easily switch
+ *    between iterating over all bases in a region, only covered bases in a region covered by
+ *    reads, only bases in a region covered by RODs, or any other sort of trigger track
+ *    implementation one can think of.
+ * 2) To manage the copious number of iterators that have to be jointly pulled through the
+ *    genome to make a locus traversal function.
+ */
+public abstract class LocusView extends LocusIterator implements View {
+    /**
+     * The locus bounding this view.
+     */
+    protected GenomeLoc locus;
+
+    /**
+     * The GenomeLocParser, used to create new genome locs.
+     */
+    protected GenomeLocParser genomeLocParser;
+
+    /**
+     * Source info for this view.  Informs the class about downsampling requirements.
+     */
+    private ReadProperties sourceInfo;
+
+    /**
+     * The actual locus context iterator.
+     */
+    private LocusIterator loci;
+
+    /**
+     * The next locus context from the iterator.  Lazy loaded: if nextLocus is null and advance() doesn't
+     * populate it, the iterator is exhausted.  If populated, this is the value that should be returned by
+     * next(). 
+     */
+    private AlignmentContext nextLocus = null;
+
+    public LocusView(LocusShardDataProvider provider) {
+        this.locus = provider.getLocus();
+        
+        this.sourceInfo = provider.getSourceInfo();
+        this.genomeLocParser = provider.getGenomeLocParser();
+        this.loci = provider.getLocusIterator();
+
+        advance();
+
+        provider.register(this);
+    }
+
+    /**
+     * Only one view of the locus is supported at any given time.
+     * @return A list consisting of all other locus views.
+     */
+    public Collection<Class<? extends View>> getConflictingViews() {
+        return Arrays.<Class<? extends View>>asList(LocusView.class,ReadView.class);
+    }
+
+    /**
+     * Close this view.
+     */
+    public void close() {
+        // Set everything to null with the hope of failing fast.
+        locus = null;
+        sourceInfo = null;
+        loci = null;
+
+        super.close();
+    }
+
+    /**
+     * Is there another covered locus context bounded by this view.
+     * @return True if another covered locus context exists.  False otherwise.
+     */
+    public abstract boolean hasNext();
+
+    /**
+     * Returns the next covered locus context in the shard.
+     * @return Next covered locus context in the shard.
+     * @throw NoSuchElementException if no such element exists.
+     */
+    public abstract AlignmentContext next();
+
+    /**
+     * Unsupported.
+     * @throw UnsupportedOperationException always.
+     */
+    public void remove() {
+        throw new UnsupportedOperationException("Unable to remove elements from this queue.");
+    }
+
+    /**
+     * Is there another locus context bounded by this shard.
+     * @return True if another locus context is bounded by this shard.
+     */
+    protected boolean hasNextLocus() {
+        advance();
+        return nextLocus != null;
+    }
+
+    /**
+     * Get the next locus context bounded by this shard.
+     * @return Next locus context bounded by this shard.
+     * @throw NoSuchElementException if the next element is missing.
+     */
+    protected AlignmentContext nextLocus() {
+        advance();
+        if(nextLocus == null)
+            throw new NoSuchElementException("No more elements remain in locus context queue.");
+
+        // Cache the current and apply filtering.
+        AlignmentContext current = nextLocus;
+
+        // Indicate that the next operation will need to advance.
+        nextLocus = null;
+        
+        return current;
+    }
+
+    /**
+     * Seed the nextLocus variable with the contents of the next locus (if one exists).
+     */
+    private void advance() {
+        // Already an unclaimed locus present
+        if(nextLocus != null)
+            return;
+
+        //System.out.printf("loci is %s%n", loci);
+        if( !loci.hasNext() ) {
+            nextLocus = null;
+            return;
+        }
+
+        nextLocus = loci.next();
+
+        // If the location of this shard is available, trim the data stream to match the shard.
+        // TODO: Much of this functionality is being replaced by the WindowMaker.
+        if(locus != null) {
+            // Iterate through any elements not contained within this shard.
+            while( nextLocus != null && !isContainedInShard(nextLocus.getLocation()) && loci.hasNext() )
+                nextLocus = loci.next();
+
+            // If nothing in the shard was found, indicate that by setting nextLocus to null.
+            if( nextLocus != null && !isContainedInShard(nextLocus.getLocation()) )
+                nextLocus = null;
+        }
+    }
+
+    /**
+     * Is this location contained in the given shard.
+     * @param location Location to check.
+     * @return True if the given location is contained within the shard.  False otherwise.
+     */
+    private boolean isContainedInShard(GenomeLoc location) {
+        return locus.containsP(location);
+    }
+
+    /**
+     * {@inheritDoc}
+     *
+     * Since this class has an actual LIBS, so this function will never throw an exception
+     *
+     * @return the LocusIteratorByState used by this view to get pileups
+     */
+    @Override
+    public LocusIteratorByState getLIBS() {
+        return loci.getLIBS();
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ManagingReferenceOrderedView.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ManagingReferenceOrderedView.java
new file mode 100644
index 0000000..bb5fcdd
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ManagingReferenceOrderedView.java
@@ -0,0 +1,116 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.providers;
+
+import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.refdata.utils.LocationAwareSeekableRODIterator;
+import org.broadinstitute.gatk.utils.refdata.utils.RODRecordList;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+/**
+ * User: hanna
+ * Date: May 21, 2009
+ * Time: 2:49:17 PM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * A view into the reference-ordered data in the provider.
+ */
+public class ManagingReferenceOrderedView implements ReferenceOrderedView {
+    /**
+     * The data sources along with their current states.
+     */
+    private List<ReferenceOrderedDataState> states = new ArrayList<ReferenceOrderedDataState>();
+
+    /**
+     * Create a new view of reference-ordered data.
+     * @param provider
+     */
+    public ManagingReferenceOrderedView( LocusShardDataProvider provider ) {
+        for( ReferenceOrderedDataSource dataSource: provider.getReferenceOrderedData() )
+            states.add(new ReferenceOrderedDataState(dataSource, dataSource.seek(provider.getLocus())));
+
+        provider.register(this);
+    }
+
+    public Collection<Class<? extends View>> getConflictingViews() { return Collections.emptyList(); }
+
+    /**
+     * Gets an object which can track the reference-ordered data at every locus.
+     * @param loc Locus at which to track.
+     * @return A tracker containing information about this locus.
+     */
+    @Override
+    public RefMetaDataTracker getReferenceOrderedDataAtLocus( GenomeLoc loc ) {
+        if ( states.isEmpty() )
+            return RefMetaDataTracker.EMPTY_TRACKER;
+        else {
+            List<RODRecordList> bindings = new ArrayList<RODRecordList>(states.size());
+
+            for ( ReferenceOrderedDataState state: states )
+                // todo -- warning, I removed the reference to the name from states
+                bindings.add( state.iterator.seekForward(loc) );
+
+            return new RefMetaDataTracker(bindings);
+        }
+    }
+
+    /**
+     * Closes the current view.
+     */
+    public void close() {
+        for( ReferenceOrderedDataState state: states )
+            state.dataSource.close( state.iterator );
+
+        // Clear out the existing data so that post-close() accesses to this data will fail-fast.
+        states = null;
+    }
+}
+
+/**
+ * Models the traversal state of a given ROD lane.
+ */
+class ReferenceOrderedDataState {
+    public final ReferenceOrderedDataSource dataSource;
+    public final LocationAwareSeekableRODIterator iterator;
+
+    public ReferenceOrderedDataState( ReferenceOrderedDataSource dataSource, LocationAwareSeekableRODIterator iterator ) {
+        this.dataSource = dataSource;
+        this.iterator = iterator;
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/RODMetaDataContainer.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/RODMetaDataContainer.java
new file mode 100644
index 0000000..4198985
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/RODMetaDataContainer.java
@@ -0,0 +1,83 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.providers;
+
+import org.broadinstitute.gatk.utils.refdata.utils.GATKFeature;
+import org.broadinstitute.gatk.utils.collections.Pair;
+
+import java.util.*;
+
+
+/**
+ * 
+ * @author aaron 
+ * 
+ * Class RODMetaDataContainer
+ *
+ * stores both the name and the class for each ROD.  This class assumes that:
+ *
+ * -Names must be unique
+ * -Classes are allowed to have duplicates
+ *
+ * This class encapsulates the ref data associations, and provides lookup by name and by
+ * class type.
+ *
+ */
+public class RODMetaDataContainer {
+    // we only allow non-duplicate ROD names, a HashMap is fine
+    private final HashMap<String, GATKFeature> nameMap = new HashMap<String, GATKFeature>();
+
+    // we do allow duplicate class entries, so we need to store pairs of data
+    private final List<Pair<Class, GATKFeature>> classMap = new ArrayList<Pair<Class, GATKFeature>>();
+
+    public void addEntry(GATKFeature data) {
+        nameMap.put(data.getName(),data);
+        classMap.add(new Pair<Class, GATKFeature>(data.getClass(),data));
+    }
+
+    public Collection<GATKFeature> getSet(String name) {
+        if (name == null) return getSet();
+        Set<GATKFeature> set = new HashSet<GATKFeature>();
+        if (nameMap.containsKey(name)) set.add(nameMap.get(name));
+        return set;
+    }
+
+    /**
+     * get the feature contents of this container; the unfiltered set without their name association
+     * @return
+     */
+    public Collection<GATKFeature> getSet() {
+        return new ArrayList<GATKFeature>(nameMap.values());
+    }
+
+    // the brute force (n) search ended up being faster than sorting and binary search in all but the most extreme cases (thousands of RODs at a location).
+    public Collection<GATKFeature> getSet(Class cls) {
+        Collection<GATKFeature> ret = new ArrayList<GATKFeature>();
+        for (Pair<Class, GATKFeature> pair: classMap)
+            if (pair.first.equals(cls)) ret.add(pair.second);
+        return ret;
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReadBasedReferenceOrderedView.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReadBasedReferenceOrderedView.java
new file mode 100644
index 0000000..ae555c3
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReadBasedReferenceOrderedView.java
@@ -0,0 +1,60 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.providers;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.engine.datasources.reads.ReadShard;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+
+/** a ROD view for reads. This provides the Read traversals a way of getting a RefMetaDataTracker */
+public class ReadBasedReferenceOrderedView extends IntervalReferenceOrderedView {
+    public ReadBasedReferenceOrderedView(final ShardDataProvider provider) {
+        super(provider, provider.hasReferenceOrderedData() ? ((ReadShard)provider.getShard()).getReadsSpan() : null);
+    }
+
+    /**
+     * create a RefMetaDataTracker given the current read
+     *
+     * @param rec the read
+     *
+     * @return a RefMetaDataTracker for the read, from which you can get ROD -> read alignments
+     */
+    @Requires("rec != null")
+    @Ensures("result != null")
+    public RefMetaDataTracker getReferenceOrderedDataForRead(final SAMRecord rec) {
+        if ( rec.getReadUnmappedFlag() )
+            return RefMetaDataTracker.EMPTY_TRACKER;
+        else {
+            final GenomeLoc readSpan = genomeLocParser.createGenomeLoc(rec);
+            trimCurrentFeaturesToLoc(readSpan);
+            return getReferenceOrderedDataForInterval(readSpan);
+        }
+    }
+}
+
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReadReferenceView.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReadReferenceView.java
new file mode 100644
index 0000000..6687ee6
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReadReferenceView.java
@@ -0,0 +1,102 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.providers;
+
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+/*
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following
+ * conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+/**
+ * User: hanna
+ * Date: May 22, 2009
+ * Time: 12:36:14 PM
+ *
+ */
+
+/** Provides access to the reference over a single read. */
+
+public class ReadReferenceView extends ReferenceView {
+    /**
+     * Create a view of the reference with respect to a single read.
+     *
+     * @param provider
+     */
+    public ReadReferenceView( ShardDataProvider provider ) {
+        super(provider);
+    }
+
+    protected ReferenceContext.ReferenceContextRefProvider getReferenceBasesProvider( GenomeLoc genomeLoc ) {
+        return new Provider(genomeLoc);
+    }
+
+    public class Provider implements ReferenceContext.ReferenceContextRefProvider {
+        GenomeLoc loc;
+
+        public Provider( GenomeLoc loc ) {
+            this.loc = loc;
+        }
+
+        public byte[] getBases() {
+            return getReferenceBases(loc);
+        }
+    }
+
+    /**
+     * Return a reference context appropriate for the span of read
+     *
+     * @param read the mapped read to test
+     * @return
+     */
+    public ReferenceContext getReferenceContext( final SAMRecord read ) {
+        GenomeLoc loc = genomeLocParser.createGenomeLoc(read);
+        return new ReferenceContext( genomeLocParser, loc, loc, getReferenceBasesProvider(loc) );
+    }
+
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReadShardDataProvider.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReadShardDataProvider.java
new file mode 100644
index 0000000..541787f
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReadShardDataProvider.java
@@ -0,0 +1,82 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.providers;
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import org.broadinstitute.gatk.engine.datasources.reads.Shard;
+import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIterator;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+
+import java.util.Collection;
+
+/**
+ * Present data sharded by read to a traversal engine.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class ReadShardDataProvider extends ShardDataProvider {
+    /**
+     * The raw collection of reads.
+     */
+    private final GATKSAMIterator reads;
+
+    /**
+     * Create a data provider for the shard given the reads and reference.
+     * @param shard The chunk of data over which traversals happen.
+     * @param reference A getter for a section of the reference.
+     */
+    public ReadShardDataProvider(Shard shard, GenomeLocParser genomeLocParser, GATKSAMIterator reads, IndexedFastaSequenceFile reference, Collection<ReferenceOrderedDataSource> rods) {
+        super(shard,genomeLocParser,reference,rods);
+        this.reads = reads;
+    }
+
+    /**
+     * Can this data source provide reads?
+     * @return True if reads are available, false otherwise.
+     */
+    public boolean hasReads() {
+        return reads != null;
+    }    
+
+    /**
+     * Gets an iterator over all the reads bound by this shard.
+     * @return An iterator over all reads in this shard.
+     */
+    public GATKSAMIterator getReadIterator() {
+        return reads;
+    }
+
+    @Override
+    public void close() {
+        super.close();
+        
+        if(reads != null)
+            reads.close();
+    }
+
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReadView.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReadView.java
new file mode 100644
index 0000000..5c6bf31
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReadView.java
@@ -0,0 +1,88 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.providers;
+
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIterator;
+
+import java.util.Arrays;
+import java.util.Collection;
+/**
+ * User: hanna
+ * Date: May 22, 2009
+ * Time: 12:06:54 PM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * A view into the reads that a provider can provide. 
+ */
+public class ReadView implements View, Iterable<SAMRecord> {
+    /**
+     * The iterator into the reads supplied by this provider.
+     */
+    private GATKSAMIterator reads;
+
+    /**
+     * Create a new view of the reads given the current data set.
+     * @param provider Source for the data.
+     */
+    public ReadView( ReadShardDataProvider provider ) {
+        reads = provider.getReadIterator();
+    }
+
+    /**
+     * Other reads and loci conflict with this view.
+     * @return Array of reads and loci.
+     */
+    public Collection<Class<? extends View>> getConflictingViews() {
+        return Arrays.<Class<? extends View>>asList(ReadView.class, LocusView.class);
+    }
+
+    /**
+     * Close the view over these reads.  Note that this method closes just
+     * the view into the reads, not the reads themselves.
+     */
+    public void close() {
+        // Don't close the reads.  The provider is responsible for this.
+        // Just dispose of the pointer.
+        reads = null;
+    }
+
+    /**
+     * Gets an iterator into the reads supplied by this provider.
+     * @return Iterator into the reads that this provider covers.
+     */
+    public GATKSAMIterator iterator() {
+        return reads;    
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReferenceOrderedView.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReferenceOrderedView.java
new file mode 100644
index 0000000..93a2e0a
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReferenceOrderedView.java
@@ -0,0 +1,33 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.providers;
+
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+
+public interface ReferenceOrderedView extends View {
+    RefMetaDataTracker getReferenceOrderedDataAtLocus( GenomeLoc loc );
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReferenceView.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReferenceView.java
new file mode 100644
index 0000000..870d5ba
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReferenceView.java
@@ -0,0 +1,131 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.providers;
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import htsjdk.samtools.reference.ReferenceSequence;
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.SAMSequenceRecord;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+/**
+ * User: hanna
+ * Date: May 22, 2009
+ * Time: 12:19:17 PM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * A view into the reference backing this shard.
+ */
+public class ReferenceView implements View {
+    /**
+     * The parser, used to create and parse GenomeLocs.
+     */
+    protected final GenomeLocParser genomeLocParser;
+
+    /**
+     * The source of reference data.
+     */
+    protected IndexedFastaSequenceFile reference = null;
+
+    /**
+     * Create a new ReferenceView.
+     * @param provider
+     */
+    public ReferenceView( ShardDataProvider provider ) {
+        this.genomeLocParser = provider.getGenomeLocParser();
+        this.reference = provider.getReference();
+    }
+
+    /**
+     * Reference views don't conflict with anything else.
+     * @return Empty list.
+     */
+    public Collection<Class<? extends View>> getConflictingViews() { return Collections.emptyList(); }
+
+    /**
+     * Deinitialize pointers for fast fail.  Someone else will handle file management.
+     */
+    public void close() {
+        reference = null;
+    }
+
+    /**
+     * Allow the user to pull reference info from any arbitrary region of the reference.
+     * If parts of the reference don't exist, mark them in the char array with 'X'es.
+     * @param genomeLoc The locus.
+     * @return A list of the bases starting at the start of the locus (inclusive) and ending
+     *         at the end of the locus (inclusive).
+     */
+    final static int BUFFER = 10000;
+    final static byte[] Xs = new byte[BUFFER];
+    static {
+        Arrays.fill(Xs, (byte)'X');
+    }
+
+    protected byte[] getReferenceBases( SAMRecord read ) {
+        return getReferenceBases(genomeLocParser.createGenomeLoc(read));
+
+    }
+
+    protected byte[] getReferenceBases( GenomeLoc genomeLoc ) {
+        SAMSequenceRecord sequenceInfo = reference.getSequenceDictionary().getSequence(genomeLoc.getContig());
+
+        long start = genomeLoc.getStart();
+        long stop = Math.min( genomeLoc.getStop(), sequenceInfo.getSequenceLength() );
+
+        // Read with no aligned bases?  Return an empty array.
+        if(stop - start + 1 == 0)
+            return new byte[0];
+
+        ReferenceSequence subsequence = reference.getSubsequenceAt(genomeLoc.getContig(), start, stop);
+
+        int overhang = (int)(genomeLoc.getStop() - stop);
+        if ( overhang > 0 ) {
+            if ( overhang > BUFFER ) // todo -- this is a bit dangerous
+                throw new ReviewedGATKException("Insufficient buffer size for Xs overhanging genome -- expand BUFFER");
+            byte[] all = new byte[subsequence.getBases().length + overhang];
+            System.arraycopy(subsequence.getBases(), 0, all, 0, subsequence.getBases().length);
+            System.arraycopy(Xs, 0, all, subsequence.getBases().length, overhang);
+            return all;
+        } else {
+            // fast path
+            return subsequence.getBases();
+        }
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/RodLocusView.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/RodLocusView.java
new file mode 100644
index 0000000..269cb6f
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/RodLocusView.java
@@ -0,0 +1,196 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.providers;
+
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.refdata.utils.LocationAwareSeekableRODIterator;
+import org.broadinstitute.gatk.utils.refdata.utils.RODRecordList;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.collections.RODMergingIterator;
+import org.broadinstitute.gatk.utils.pileup.ReadBackedPileupImpl;
+
+import java.util.*;
+
+/**
+ * A view into the reference-ordered data in the provider.
+ */
+public class RodLocusView extends LocusView implements ReferenceOrderedView {
+    /**
+     * The data sources along with their current states.
+     */
+    private RODMergingIterator rodQueue = null;
+
+    Collection<RODRecordList> allTracksHere;
+
+    GenomeLoc lastLoc = null;
+    RODRecordList interval = null;
+
+    /**
+     * The data sources along with their current states.
+     */
+    private List<ReferenceOrderedDataState> states = new ArrayList<ReferenceOrderedDataState>();    
+
+    /**
+     * Enable debugging output -- todo remove me
+     */
+    final static boolean DEBUG = false;
+
+    final static String INTERVAL_ROD_NAME = "interval";
+
+    /**
+     * Create a new view of reference-ordered data.
+     *
+     * @param provider
+     */
+    public RodLocusView( LocusShardDataProvider provider ) {
+        super(provider);
+
+        GenomeLoc loc = provider.getLocus();
+
+        List< Iterator<RODRecordList> > iterators = new LinkedList< Iterator<RODRecordList> >();
+        for( ReferenceOrderedDataSource dataSource: provider.getReferenceOrderedData() ) {
+            if ( DEBUG ) System.out.printf("Shard is %s%n", provider.getLocus());
+
+            // grab the ROD iterator from the data source, and compute the first location in this shard, forwarding
+            // the iterator to immediately before it, so that it can be added to the merging iterator primed for
+            // next() to return the first real ROD in this shard
+            LocationAwareSeekableRODIterator it = dataSource.seek(provider.getLocus());
+            it.seekForward(genomeLocParser.createGenomeLoc(loc.getContig(), loc.getStart()-1));
+
+            states.add(new ReferenceOrderedDataState(dataSource,it));            
+
+            // we need to special case the interval so we don't always think there's a rod at the first location
+            if ( dataSource.getName().equals(INTERVAL_ROD_NAME) ) {
+                if ( interval != null )
+                    throw new RuntimeException("BUG: interval local variable already assigned " + interval);
+                interval = it.next();
+            } else {
+                iterators.add( it );
+            }
+        }
+
+        rodQueue = new RODMergingIterator(iterators);
+    }
+
+    @Override
+    public RefMetaDataTracker getReferenceOrderedDataAtLocus( GenomeLoc loc ) {
+        // special case the interval again -- add it into the ROD
+        if ( interval != null ) { allTracksHere.add(interval); }
+        return new RefMetaDataTracker(allTracksHere);
+    }
+
+    public boolean hasNext() {
+        if ( ! rodQueue.hasNext() )
+            return false;
+        else {
+            return ! rodQueue.peekLocation().isPast(locus);
+        }
+    }
+
+    /**
+     * Returns the next covered locus context in the shard.
+     * @return Next covered locus context in the shard.
+     * @throw NoSuchElementException if no such element exists.
+     */
+    public AlignmentContext next() {
+        if ( DEBUG ) System.out.printf("In RodLocusView.next()...%n");
+        RODRecordList datum = rodQueue.next();
+        if ( DEBUG ) System.out.printf("In RodLocusView.next(); datum = %s...%n", datum.getLocation());
+
+        if ( DEBUG ) System.out.printf("In RodLocusView.next(): creating tracker...%n");
+
+        allTracksHere = getSpanningTracks(datum);
+        GenomeLoc rodSite = datum.getLocation();
+        GenomeLoc site = genomeLocParser.createGenomeLoc( rodSite.getContig(), rodSite.getStart(), rodSite.getStart());
+
+        if ( DEBUG ) System.out.printf("rodLocusView.next() is at %s%n", site);
+
+        // calculate the number of skipped bases, and update lastLoc so we can do that again in the next()
+        long skippedBases = getSkippedBases( rodSite );
+        lastLoc = site;
+        return new AlignmentContext(site, new ReadBackedPileupImpl(site), skippedBases);
+    }
+
+    private Collection<RODRecordList> getSpanningTracks(RODRecordList marker) {
+        return rodQueue.allElementsLTE(marker);
+    }
+
+    /**
+     * Returns the number of reference bases that have been skipped:
+     *
+     * 1 -- since the last processed location if we have one
+     * 2 -- from the beginning of the shard if this is the first loc
+     * 3 -- from the last location to the current position
+     *
+     * @param currentPos
+     * @return
+     */
+    private long getSkippedBases( GenomeLoc currentPos ) {
+        // the minus - is because if lastLoc == null, you haven't yet seen anything in this interval, so it should also be counted as skipped
+        Integer compStop = lastLoc == null ? locus.getStart() - 1 : lastLoc.getStop();
+        long skippedBases = currentPos.getStart() - compStop  - 1;
+
+        if ( skippedBases < -1 ) { // minus 1 value is ok
+            throw new RuntimeException(String.format("BUG: skipped bases=%d is < 0: cur=%s vs. last=%s, shard=%s",
+                    skippedBases, currentPos, lastLoc, locus));
+        }
+        return Math.max(skippedBases, 0);
+    }
+
+    /**
+     * Get the location one after the last position we will traverse through
+     * @return
+     */
+    public GenomeLoc getLocOneBeyondShard() {
+        return genomeLocParser.createGenomeLoc(locus.getContig(),locus.getStop()+1);
+    }
+
+    /**
+     * How many bases are we skipping from the current location to the end of the interval / shard
+     * if we have no more elements
+     *
+     * @return
+     */
+    public long getLastSkippedBases() {
+        if ( hasNext() )
+            throw new RuntimeException("BUG: getLastSkippedBases called when there are elements remaining.");
+
+        return getSkippedBases(getLocOneBeyondShard());
+    }
+
+    /**
+     * Closes the current view.
+     */
+    public void close() {
+        for( ReferenceOrderedDataState state: states )
+            state.dataSource.close( state.iterator );
+
+        rodQueue = null;
+        allTracksHere = null;
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ShardDataProvider.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ShardDataProvider.java
new file mode 100644
index 0000000..99ad5a5
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ShardDataProvider.java
@@ -0,0 +1,197 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.providers;
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import org.broadinstitute.gatk.engine.datasources.reads.Shard;
+import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+/**
+ * User: hanna
+ * Date: May 8, 2009
+ * Time: 3:09:57 PM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * An umbrella class that examines the data passed to the microscheduler and
+ * tries to assemble as much as possible with it. 
+ */
+public abstract class ShardDataProvider {
+    /**
+     * An ArrayList of all the views that are examining this data.
+     */
+    private List<View> registeredViews = new ArrayList<View>();
+
+    /**
+     * The shard over which we're providing data.
+     */
+    private final Shard shard;
+
+    /**
+     * The parser, used to create and build new GenomeLocs.
+     */
+    private final GenomeLocParser genomeLocParser;
+
+    /**
+     * Provider of reference data for this particular shard.
+     */
+    private final IndexedFastaSequenceFile reference;
+
+    /**
+     * Sources of reference-ordered data.
+     */
+    private final Collection<ReferenceOrderedDataSource> referenceOrderedData;
+
+    /**
+     * Returns the GenomeLocParser associated with this traversal.
+     * @return The associated parser.
+     */
+    public GenomeLocParser getGenomeLocParser() {
+        return genomeLocParser;
+    }
+
+    /**
+     * Retrieves the shard associated with this data provider.
+     * @return The shard associated with this data provider.
+     */
+    public Shard getShard() {
+        return shard;
+    }
+
+    /**
+     * Can this data source provide reference information?
+     * @return True if possible, false otherwise.
+     */
+    public boolean hasReference() {
+        return reference != null;
+    }
+
+
+    /**
+     * Gets a pointer into the given indexed fasta sequence file.
+     * @return The indexed fasta sequence file.
+     */
+    IndexedFastaSequenceFile getReference() {
+        return reference;        
+    }
+
+    /**
+     * Gets a window into the reference-ordered data.  Package protected so that only
+     * views can access it.
+     * @return List of reference-ordered data sources.
+     */
+    Collection<ReferenceOrderedDataSource> getReferenceOrderedData() {
+        return referenceOrderedData;        
+    }
+
+    /**
+     * @return true if reference ordered data will be provided by this shard
+     */
+    public boolean hasReferenceOrderedData() {
+        return ! getReferenceOrderedData().isEmpty();
+    }
+
+    /**
+     * Create a data provider for the shard given the reads and reference.
+     * @param shard The chunk of data over which traversals happen.
+     * @param reference A getter for a section of the reference.
+     */
+    public ShardDataProvider(Shard shard,GenomeLocParser genomeLocParser,IndexedFastaSequenceFile reference,Collection<ReferenceOrderedDataSource> rods) {
+        this.shard = shard;
+        this.genomeLocParser = genomeLocParser;
+        this.reference = reference;
+        this.referenceOrderedData = rods;
+    }
+
+    /**
+     * Skeletal, package protected constructor for unit tests which require a ShardDataProvider.
+     * @param shard the shard
+     */
+    ShardDataProvider(Shard shard,GenomeLocParser genomeLocParser) {
+        this(shard,genomeLocParser,null,null);
+    }
+
+    /**
+     * Register this view with the shard provider, and make sure it has no conflicts with any other views.
+     * @param view The new view.
+     */
+    void register( View view ) {
+        // Check all registered classes to see whether a conflict exists.
+        for( View registeredView: registeredViews ) {
+            Collection<Class<? extends View>> conflicts = registeredView.getConflictingViews();
+            for( Class<? extends View> conflict: conflicts ) {
+                if( conflict.isInstance(view) )
+                    throw new ReviewedGATKException(String.format("Tried to register two conflicting views: %s and %s",
+                                                           registeredView.getClass().getSimpleName(),
+                                                           view.getClass().getSimpleName()));
+            }
+        }
+
+        // Check whether this class has any objection to any other classes.
+        for( Class<? extends View> conflict: view.getConflictingViews() ) {
+            for( View registeredView: registeredViews ) {
+                if( conflict.isInstance(registeredView) )
+                    throw new ReviewedGATKException(String.format("Tried to register two conflicting views: %s and %s",
+                                                           registeredView.getClass().getSimpleName(),
+                                                           view.getClass().getSimpleName()));
+            }
+        }
+
+        this.registeredViews.add(view);
+    }
+
+    /**
+     * Retire this shard.
+     */
+    public void close() {
+        for( View view: registeredViews )
+            view.close();
+
+        // Explicitly purge registered views to ensure that we don't end up with circular references
+        // to views, which can in turn hold state.
+        registeredViews.clear();
+
+        if(shard != null)
+            shard.close();
+    }
+
+    @Override
+    public String toString() {
+        return shard.toString();
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/View.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/View.java
new file mode 100644
index 0000000..1611778
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/View.java
@@ -0,0 +1,55 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.providers;
+
+import java.util.Collection;
+/**
+ * User: hanna
+ * Date: May 21, 2009
+ * Time: 3:14:56 PM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * Represents a view into given data.
+ */
+public interface View {
+    /**
+     * Gets a list of all types of views which can conflict with this view.
+     */
+    public Collection<Class<? extends View>> getConflictingViews();
+
+    /**
+     * Inform this view that the data provided to it no longer exists.
+     */
+    public void close();
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/package-info.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/package-info.java
new file mode 100644
index 0000000..db2aabe
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/package-info.java
@@ -0,0 +1,26 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.providers;
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/ActiveRegionShardBalancer.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/ActiveRegionShardBalancer.java
new file mode 100644
index 0000000..cc3fcd1
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/ActiveRegionShardBalancer.java
@@ -0,0 +1,85 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * ActiveRegionShardBalancer
+ *
+ * Merges all of the file pointer information for a single contig index into a single
+ * combined shard.  The purpose of doing this is to ensure that the HaplotypeCaller, which
+ * doesn't support TreeReduction by construction, gets all of the data on a single
+ * contig together so the the NanoSchedule runs efficiently
+ */
+public class ActiveRegionShardBalancer extends ShardBalancer {
+    /**
+     * Convert iterators of file pointers into balanced iterators of shards.
+     * @return An iterator over balanced shards.
+     */
+    public Iterator<Shard> iterator() {
+        return new Iterator<Shard>() {
+            public boolean hasNext() {
+                return filePointers.hasNext();
+            }
+
+            public Shard next() {
+                FilePointer current = getCombinedFilePointersOnSingleContig();
+
+                // FilePointers have already been combined as necessary at the IntervalSharder level. No
+                // need to do so again here.
+
+                return new LocusShard(parser,readsDataSource,current.getLocations(),current.fileSpans);
+            }
+
+            public void remove() {
+                throw new UnsupportedOperationException("Unable to remove from shard balancing iterator");
+            }
+        };
+    }
+
+    /**
+     * Combine all of the file pointers in the filePointers iterator into a single combined
+     * FilePointer that spans all of the file pointers on a single contig
+     * @return a non-null FilePointer
+     */
+    private FilePointer getCombinedFilePointersOnSingleContig() {
+        FilePointer current = filePointers.next();
+
+        final List<FilePointer> toCombine = new LinkedList<>();
+        toCombine.add(current);
+
+        while ( filePointers.hasNext() &&
+                current.isRegionUnmapped == filePointers.peek().isRegionUnmapped &&
+                (current.getContigIndex() == filePointers.peek().getContigIndex() || current.isRegionUnmapped) ) {
+            toCombine.add(filePointers.next());
+        }
+
+        return FilePointer.union(toCombine, parser);
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BAMAccessPlan.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BAMAccessPlan.java
new file mode 100644
index 0000000..259c77b
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BAMAccessPlan.java
@@ -0,0 +1,170 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import htsjdk.samtools.util.PeekableIterator;
+import htsjdk.samtools.GATKBAMFileSpan;
+import htsjdk.samtools.GATKChunk;
+import htsjdk.samtools.util.BlockCompressedFilePointerUtil;
+import org.broadinstitute.gatk.utils.sam.SAMReaderID;
+
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+* Created by IntelliJ IDEA.
+* User: mhanna
+* Date: 10/14/11
+* Time: 10:47 PM
+* To change this template use File | Settings | File Templates.
+*/
+class BAMAccessPlan {
+    private final SAMReaderID reader;
+    private final BlockInputStream inputStream;
+
+    private final List<GATKChunk> positions;
+    private PeekableIterator<GATKChunk> positionIterator;
+
+    /**
+     * Stores the next block address to read, or -1 if no such block is available.
+     */
+    private long nextBlockAddress;
+
+
+    BAMAccessPlan(final SAMReaderID reader, final BlockInputStream inputStream, GATKBAMFileSpan fileSpan) {
+        this.reader = reader;
+        this.inputStream = inputStream;
+
+        this.positions = fileSpan.getGATKChunks();
+        initialize();
+    }
+
+    public SAMReaderID getReader() {
+        return reader;
+    }
+
+    public BlockInputStream getInputStream() {
+        return inputStream;
+    }
+
+    /**
+     * Retrieves the next block address to be read.
+     * @return Next block address to be read.
+     */
+    public long getBlockAddress() {
+        return nextBlockAddress;
+    }
+
+    /**
+     * Retrieves the first offset of interest in the block returned by getBlockAddress().
+     * @return First block of interest in this segment.
+     */
+    public int getFirstOffsetInBlock() {
+        return (nextBlockAddress == positionIterator.peek().getBlockStart()) ? positionIterator.peek().getBlockOffsetStart() : 0;
+    }
+
+    /**
+     * Gets the spans overlapping the given block; used to copy the contents of the block into the circular buffer.
+     * @param blockAddress Block address for which to search.
+     * @param filePosition Block address at which to terminate the last chunk if the last chunk goes beyond this span.
+     * @return list of chunks containing that block.
+     */
+    public List<GATKChunk> getSpansOverlappingBlock(long blockAddress, long filePosition) {
+        List<GATKChunk> spansOverlapping = new LinkedList<GATKChunk>();
+        // While the position iterator overlaps the given block, pull out spans to report.
+        while(positionIterator.hasNext() && positionIterator.peek().getBlockStart() <= blockAddress) {
+            // Create a span over as much of the block as is covered by this chunk.
+            int blockOffsetStart = (blockAddress == positionIterator.peek().getBlockStart()) ? positionIterator.peek().getBlockOffsetStart() : 0;
+
+            // Calculate the end of this span.  If the span extends past this block, cap it using the current file position.
+            long blockEnd;
+            int blockOffsetEnd;
+            if(blockAddress < positionIterator.peek().getBlockEnd()) {
+                blockEnd = filePosition;
+                blockOffsetEnd = 0;
+            }
+            else {
+                blockEnd = positionIterator.peek().getBlockEnd();
+                blockOffsetEnd = positionIterator.peek().getBlockOffsetEnd();
+            }
+
+            GATKChunk newChunk = new GATKChunk(blockAddress,blockOffsetStart,blockEnd,blockOffsetEnd);
+
+            if(newChunk.getChunkStart() <= newChunk.getChunkEnd())
+                spansOverlapping.add(new GATKChunk(blockAddress,blockOffsetStart,blockEnd,blockOffsetEnd));
+
+            // If the value currently stored in the position iterator ends past the current block, we must be done.  Abort.
+            if(!positionIterator.hasNext() ||  positionIterator.peek().getBlockEnd() > blockAddress)
+                break;
+
+            // If the position iterator ends before the block ends, pull the position iterator forward.
+            if(positionIterator.peek().getBlockEnd() <= blockAddress)
+                positionIterator.next();
+        }
+
+        return spansOverlapping;
+    }
+
+    public void reset() {
+        initialize();
+    }
+
+    /**
+     * Resets the SAM reader position to its original state.
+     */
+    private void initialize() {
+        this.positionIterator = new PeekableIterator<GATKChunk>(positions.iterator());
+        if(positionIterator.hasNext())
+            nextBlockAddress = positionIterator.peek().getBlockStart();
+        else
+            nextBlockAddress = -1;
+    }
+
+    /**
+     * Advances the current position to the next block to read, given the current position in the file.
+     * @param filePosition The current position within the file.
+     */
+    void advancePosition(final long filePosition) {
+        nextBlockAddress = BlockCompressedFilePointerUtil.getBlockAddress(filePosition);
+
+        // Check the current file position against the iterator; if the iterator is before the current file position,
+        // draw the iterator forward.  Remember when performing the check that coordinates are half-open!
+        while(positionIterator.hasNext() && isFilePositionPastEndOfChunk(filePosition,positionIterator.peek()))
+            positionIterator.next();
+
+        // If the block iterator has shot past the file pointer, bring the file pointer flush with the start of the current block.
+        if(positionIterator.hasNext() && filePosition < positionIterator.peek().getChunkStart())
+            nextBlockAddress = positionIterator.peek().getBlockStart();
+
+        // If we've shot off the end of the block pointer, notify consumers that iteration is complete.
+        if(!positionIterator.hasNext())
+            nextBlockAddress = -1;
+    }
+
+    private boolean isFilePositionPastEndOfChunk(final long filePosition, final GATKChunk chunk) {
+        return filePosition >= chunk.getChunkEnd();
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BAMSchedule.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BAMSchedule.java
new file mode 100644
index 0000000..71db602
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BAMSchedule.java
@@ -0,0 +1,531 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import htsjdk.samtools.util.PeekableIterator;
+import htsjdk.samtools.Bin;
+import htsjdk.samtools.GATKBAMFileSpan;
+import htsjdk.samtools.GATKChunk;
+import htsjdk.samtools.util.CloseableIterator;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.GATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.sam.SAMReaderID;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.channels.FileChannel;
+import java.util.*;
+
+/**
+ * Writes schedules for a single BAM file to a target output file.
+ */
+public class BAMSchedule implements CloseableIterator<BAMScheduleEntry> {
+    /**
+     * File in which to store schedule data.
+     */
+    private File scheduleFile;
+
+    /**
+     * File channel for the schedule file.
+     */
+    private FileChannel scheduleFileChannel;
+
+    /**
+     * The definitive, sorted list of reader IDs.  Order is important here: the order
+     * in which the reader IDs are presented here maps to the order in which they appear in the file. 
+     */
+    private final List<SAMReaderID> readerIDs = new ArrayList<SAMReaderID>();
+
+    /**
+     * Iterators over the schedule.  Stored in the same order as readerIDs, above.
+     */
+    private final List<PeekableIterator<BAMScheduleEntry>> scheduleIterators = new ArrayList<PeekableIterator<BAMScheduleEntry>>();
+
+    /**
+     * Next schedule entry to be returned.  Null if no additional entries are present.
+     */
+    private BAMScheduleEntry nextScheduleEntry;
+
+    /**
+     * Reference sequence for which to write the schedule.
+     */
+    private final int referenceSequence;
+
+    /**
+     * Sizes of ints and longs in bytes.
+     */
+    private static final int INT_SIZE_IN_BYTES = Integer.SIZE / 8;
+    private static final int LONG_SIZE_IN_BYTES = Long.SIZE / 8;    
+
+    /**
+     * Create a new BAM schedule based on the given index.
+     * @param dataSource The SAM data source to use.
+     * @param intervals List of 
+     */
+    public BAMSchedule(final SAMDataSource dataSource, final List<GenomeLoc> intervals) {
+        if(intervals.isEmpty())
+            throw new ReviewedGATKException("Tried to write schedule for empty interval list.");
+
+        referenceSequence = dataSource.getHeader().getSequence(intervals.get(0).getContig()).getSequenceIndex();
+
+        createScheduleFile();
+
+        readerIDs.addAll(dataSource.getReaderIDs());
+
+        for(final SAMReaderID reader: readerIDs) {
+            final GATKBAMIndex index = dataSource.getIndex(reader);
+            final GATKBAMIndexData indexData = index.readReferenceSequence(referenceSequence);
+
+            int currentBinInLowestLevel = GATKBAMIndex.getFirstBinInLevel(GATKBAMIndex.getNumIndexLevels()-1);
+            Iterator<GenomeLoc> locusIterator = intervals.iterator();
+            GenomeLoc currentLocus = locusIterator.next();
+
+            final long readerStartOffset = position();
+
+            int maxChunkCount = 0;
+
+            while(currentBinInLowestLevel < GATKBAMIndex.MAX_BINS && currentLocus != null) {
+                final Bin bin = new Bin(referenceSequence,currentBinInLowestLevel);
+                final int binStart = index.getFirstLocusInBin(bin);
+                final int binStop = index.getLastLocusInBin(bin);
+
+                // In required, pull bin iterator ahead to the point of the next GenomeLoc.
+                if(binStop < currentLocus.getStart()) {
+                    currentBinInLowestLevel++;
+                    continue;
+                }
+
+                // At this point, the bin stop is guaranteed to be >= the start of the locus.
+                // If the bins have gone past the current locus, update the current locus if at all possible.
+                if(binStart > currentLocus.getStop()) {
+                    currentLocus = locusIterator.hasNext() ? locusIterator.next() : null;
+                    continue;
+                }
+
+                // Code at this point knows that the current bin is neither before nor after the current locus,
+                // so it must overlap.  Add this region to the filesystem.
+                final GATKBAMFileSpan fileSpan = indexData.getSpanOverlapping(bin);
+
+                if(!fileSpan.isEmpty()) {
+                    // File format is binary in little endian; start of region, end of region, num chunks, then the chunks themselves.
+                    ByteBuffer buffer = allocateByteBuffer(2*INT_SIZE_IN_BYTES + INT_SIZE_IN_BYTES + fileSpan.getGATKChunks().size()*LONG_SIZE_IN_BYTES*2);
+                    buffer.putInt(binStart);
+                    buffer.putInt(binStop);
+                    buffer.putInt(fileSpan.getGATKChunks().size());
+                    for(GATKChunk chunk: fileSpan.getGATKChunks()) {
+                        buffer.putLong(chunk.getChunkStart());
+                        buffer.putLong(chunk.getChunkEnd());
+                    }
+                    maxChunkCount = Math.max(maxChunkCount,fileSpan.getGATKChunks().size());
+
+                    // Prepare buffer for writing
+                    buffer.flip();
+
+                    // And write.
+                    write(buffer);
+                }
+
+                currentBinInLowestLevel++;
+            }
+
+            final long readerStopOffset = position();
+
+            scheduleIterators.add(new PeekableIterator<BAMScheduleEntry>(new BAMScheduleIterator(reader,readerStartOffset,readerStopOffset,maxChunkCount)));
+
+            // Iterator initialization might move the file pointer.  Make sure it gets reset back to where it was before iterator initialization.
+            position(readerStopOffset);
+        }
+
+        advance();
+    }
+
+    /**
+     * Determine whether more ScheduleEntries are present in the iterator.
+     * @return Next schedule entry to parse.
+     */
+    @Override
+    public boolean hasNext() {
+        return nextScheduleEntry != null;    
+    }
+
+    /**
+     * Retrieve the next schedule entry in the list.
+     * @return next schedule entry in the queue.
+     */
+    @Override
+    public BAMScheduleEntry next() {
+        BAMScheduleEntry currentScheduleEntry = nextScheduleEntry;
+        advance();
+        return currentScheduleEntry;
+    }
+
+    /**
+     * Close down and delete the file.
+     */
+    @Override
+    public void close() {
+        try {
+            scheduleFileChannel.close();
+        }
+        catch(IOException ex) {
+            throw makeIOFailureException(true, "Unable to close schedule file.", ex);
+        }
+    }
+
+    /**
+     * Convenience routine for creating UserExceptions
+     * @param wasWriting
+     * @param message
+     * @param e
+     * @return
+     */
+    private final GATKException makeIOFailureException(final boolean wasWriting, final String message, final Exception e) {
+        if ( wasWriting ) {
+            if ( e == null )
+                return new UserException.CouldNotCreateOutputFile(scheduleFile, message);
+            else
+                return new UserException.CouldNotCreateOutputFile(scheduleFile, message, e);
+        } else {
+            if ( e == null )
+                return new UserException.CouldNotReadInputFile(scheduleFile, message);
+            else
+                return new UserException.CouldNotReadInputFile(scheduleFile, message, e);
+        }
+    }
+
+    /**
+     * Advance to the next schedule entry.
+     */
+    private void advance() {
+        nextScheduleEntry = null;
+
+        BitSet selectedIterators = new BitSet(readerIDs.size());
+        int currentStart = Integer.MAX_VALUE;
+        int currentStop = Integer.MAX_VALUE;
+
+        // Select every iterator whose next element is the lowest element in the list.
+        for(int reader = 0; reader < scheduleIterators.size(); reader++) {
+            PeekableIterator<BAMScheduleEntry> scheduleIterator = scheduleIterators.get(reader);
+            if(!scheduleIterator.hasNext())
+                continue;
+
+            // If the iterator starts after this one, skip over it.
+            if(scheduleIterator.peek().start > currentStart)
+                continue;
+
+            // If the iterator starts at the same point as this one, add it to the list.
+            if(scheduleIterator.peek().start == currentStart) {
+                selectedIterators.set(reader);
+                currentStop = Math.min(scheduleIterator.peek().stop,currentStop);
+                continue;
+            }
+
+            // If the iterator is less than anything seen before it, purge the selections and make this one current.
+            if(scheduleIterator.peek().start < currentStart) {
+                selectedIterators.clear();
+                selectedIterators.set(reader);
+                currentStart = scheduleIterator.peek().start;
+                currentStop = scheduleIterator.peek().stop;
+            }
+        }
+
+        // Out of iterators?  Abort early.
+        if(selectedIterators.isEmpty())
+            return;
+
+        // Create the target schedule entry
+        BAMScheduleEntry mergedScheduleEntry = new BAMScheduleEntry(currentStart,currentStop);
+
+        // For each schedule entry with data, load the data into the merged schedule.
+        for (int reader = selectedIterators.nextSetBit(0); reader >= 0; reader = selectedIterators.nextSetBit(reader+1)) {
+            PeekableIterator<BAMScheduleEntry> scheduleIterator = scheduleIterators.get(reader);
+            BAMScheduleEntry individualScheduleEntry = scheduleIterator.peek();
+            mergedScheduleEntry.mergeInto(individualScheduleEntry);
+
+            // If the schedule iterator ends after this entry, consume it.
+            if(individualScheduleEntry.stop <= currentStop)
+                scheduleIterator.next();
+        }
+
+        // For each schedule entry without data, add a blank entry.
+        for (int reader = selectedIterators.nextClearBit(0); reader < readerIDs.size(); reader = selectedIterators.nextClearBit(reader+1)) {
+            mergedScheduleEntry.addFileSpan(readerIDs.get(reader),new GATKBAMFileSpan());
+        }
+
+        nextScheduleEntry = mergedScheduleEntry;
+    }
+
+    @Override
+    public void remove() { throw new UnsupportedOperationException("Unable to remove from a schedule iterator."); }
+
+    /**
+     * Create a new schedule file, containing schedule information for all BAM files being dynamically merged.
+     */
+    private void createScheduleFile() {
+        try {
+            scheduleFile = File.createTempFile("bamschedule."+referenceSequence,null);
+            scheduleFileChannel = new RandomAccessFile(scheduleFile,"rw").getChannel();
+        }
+        catch(IOException ex) {
+            throw new UserException("Unable to create a temporary BAM schedule file.  Please make sure Java can write to the default temp directory or use -Djava.io.tmpdir= to instruct it to use a different temp directory instead.",ex);
+        }
+        scheduleFile.deleteOnExit();
+
+    }
+
+    /**
+     * Creates a new byte buffer of the given size.
+     * @param size the size of buffer to allocate.
+     * @return Newly allocated byte buffer.
+     */
+    private ByteBuffer allocateByteBuffer(final int size) {
+        ByteBuffer buffer = ByteBuffer.allocate(size);
+        buffer.order(ByteOrder.LITTLE_ENDIAN);
+        return buffer;
+    }
+
+    /**
+     * Reads the contents at the current position on disk into the given buffer.
+     * @param buffer buffer to fill.
+     */
+    private int read(final ByteBuffer buffer) {
+        try {
+            return scheduleFileChannel.read(buffer);
+        }
+        catch(IOException ex) {
+            throw makeIOFailureException(false, "Unable to read data from BAM schedule file.", ex);
+        }
+    }
+
+    private void write(final ByteBuffer buffer) {
+        try {
+            scheduleFileChannel.write(buffer);
+            if(buffer.remaining() > 0)
+                throw makeIOFailureException(true, "Unable to write entire buffer to file.", null);
+        }
+        catch(IOException ex) {
+            throw makeIOFailureException(true, "Unable to write data to BAM schedule file.", ex);
+        }
+    }
+
+    /**
+     * Reads the current position from the file channel.
+     * @return Current position within file channel.
+     */
+    private long position() {
+        try {
+            return scheduleFileChannel.position();
+        }
+        catch(IOException ex) {
+            throw makeIOFailureException(false, "Unable to retrieve position of BAM schedule file.", ex);
+        }
+    }
+
+    /**
+     * Reposition the file channel to the specified offset wrt the start of the file.
+     * @param position The position.
+     */
+    private void position(final long position) {
+        try {
+            scheduleFileChannel.position(position);
+        }
+        catch(IOException ex) {
+            throw makeIOFailureException(false, "Unable to position BAM schedule file.",ex);
+        }
+    }
+
+    /**
+     * An iterator over the schedule for a single BAM file.
+     */
+    private class BAMScheduleIterator implements Iterator<BAMScheduleEntry> {
+        /**
+         * ID of the reader associated with the given schedule.
+         */
+        private final SAMReaderID reader;
+
+        /**
+         * Current position in the file.
+         */
+        private long currentPosition;
+
+        /**
+         * Stopping file position of last bin in file for this reader, exclusive.
+         */
+        private final long stopPosition;
+
+        /**
+         * Byte buffer used to store BAM header info.
+         */
+        private final ByteBuffer binHeader;
+
+        /**
+         * Byte buffer used to store chunk data.
+         */
+        private final ByteBuffer chunkData;
+
+        public BAMScheduleIterator(final SAMReaderID reader, final long startPosition, final long stopPosition, final int maxChunkCount) {
+            this.reader = reader;
+            this.currentPosition = startPosition;
+            this.stopPosition = stopPosition;
+            binHeader = allocateByteBuffer(INT_SIZE_IN_BYTES*3);
+            chunkData = allocateByteBuffer(maxChunkCount*LONG_SIZE_IN_BYTES*2);
+        }
+
+        @Override
+        public boolean hasNext() {
+            return currentPosition < stopPosition;
+        }
+
+        @Override
+        public BAMScheduleEntry next() {
+            position(currentPosition);
+
+            // Read data.
+            int binHeaderBytesRead = read(binHeader);
+
+            // Make sure we read in a complete bin header:
+            if ( binHeaderBytesRead < INT_SIZE_IN_BYTES * 3 ) {
+                throw new ReviewedGATKException(String.format("Unable to read a complete bin header from BAM schedule file %s for BAM file %s. " +
+                                                               "The BAM schedule file is likely incomplete/corrupt.",
+                                                               scheduleFile.getAbsolutePath(), reader.getSamFilePath()));
+            }
+
+            // Decode contents.
+            binHeader.flip();
+            final int start = binHeader.getInt();
+            final int stop = binHeader.getInt();
+            final int numChunks = binHeader.getInt();
+
+            // Prepare bin buffer for next read.
+            binHeader.flip();
+
+            // Prepare a target buffer for chunks.
+            GATKChunk[] chunks = new GATKChunk[numChunks];
+
+            // Read all chunk data.
+            chunkData.limit(numChunks*LONG_SIZE_IN_BYTES*2);
+            long bytesRead = read(chunkData);
+            if(bytesRead != numChunks*LONG_SIZE_IN_BYTES*2)
+                throw new ReviewedGATKException("Unable to read all chunks from file");
+
+            // Prepare for reading.
+            chunkData.flip();
+
+            for(int i = 0; i < numChunks; i++)
+                chunks[i] = new GATKChunk(chunkData.getLong(),chunkData.getLong());
+
+            // Prepare chunk buffer for next read.
+            chunkData.flip();
+
+            BAMScheduleEntry nextScheduleEntry = new BAMScheduleEntry(start,stop);
+            nextScheduleEntry.addFileSpan(reader,new GATKBAMFileSpan(chunks));
+
+            // Reset the position of the iterator at the next contig.
+            currentPosition = position();
+
+            return nextScheduleEntry;
+        }
+
+        /**
+         * Not supported.
+         */
+        @Override
+        public void remove() {
+            throw new UnsupportedOperationException("Unable to remove from a BAMScheduleIterator");
+        }
+
+    }
+}
+
+/**
+ * A single proto-shard to be processed.
+ */
+class BAMScheduleEntry {
+    /**
+     * Starting position for the genomic entry.
+     */
+    public final int start;
+
+    /**
+     * Ending position for the genomic entry.
+     */
+    public final int stop;
+
+    /**
+     * The spans representing the given region.
+     */
+    public final Map<SAMReaderID,GATKBAMFileSpan> fileSpans = new HashMap<SAMReaderID,GATKBAMFileSpan>();
+
+    BAMScheduleEntry(final int start, final int stop) {
+        this.start = start;
+        this.stop = stop;
+    }
+
+    /**
+     * Add a new file span to this schedule.
+     * @param reader Reader associated with the span.
+     * @param fileSpan Blocks to read in the given reader.
+     */
+    public void addFileSpan(final SAMReaderID reader, final GATKBAMFileSpan fileSpan) {
+        fileSpans.put(reader,fileSpan);
+    }
+
+    /**
+     * A naive merge operation.  Merge the fileSpans in other into this, blowing up if conflicts are
+     * detected. Completely ignores merging start and stop.
+     * @param other Other schedule entry to merging into this one.
+     */
+    public void mergeInto(final BAMScheduleEntry other) {
+        final int thisSize = fileSpans.size();
+        final int otherSize = other.fileSpans.size();
+        fileSpans.putAll(other.fileSpans);
+        if(fileSpans.size() != thisSize+otherSize)
+            throw new ReviewedGATKException("Unable to handle overlaps when merging BAM schedule entries.");
+    }
+
+    /**
+     * Returns true if the location of this bin tree is before the given position.
+     * @param locus Locus to test.
+     * @return True if this bin sits completely before the given locus; false otherwise.
+     */
+    public boolean isBefore(final GenomeLoc locus) {
+        return stop < locus.getStart();
+    }
+
+    /**
+     * Checks overlap between this bin tree and other bin trees.
+     * @param position the position over which to detect overlap.
+     * @return True if the segment overlaps.  False otherwise.
+     */
+    public boolean overlaps(final GenomeLoc position) {
+        return !(position.getStop() < start || position.getStart() > stop);
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BAMScheduler.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BAMScheduler.java
new file mode 100644
index 0000000..0474779
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BAMScheduler.java
@@ -0,0 +1,321 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import htsjdk.samtools.util.PeekableIterator;
+import htsjdk.samtools.GATKBAMFileSpan;
+import htsjdk.samtools.GATKChunk;
+import htsjdk.samtools.SAMSequenceRecord;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.interval.IntervalMergingRule;
+import org.broadinstitute.gatk.utils.sam.ReadUtils;
+import org.broadinstitute.gatk.utils.sam.SAMReaderID;
+
+import java.util.*;
+
+/**
+ * Assign intervals to the most appropriate blocks, keeping as little as possible in memory at once.
+ */
+public class BAMScheduler implements Iterator<FilePointer> {
+    private final SAMDataSource dataSource;
+
+    private final Map<SAMReaderID,GATKBAMIndex> indexFiles = new HashMap<SAMReaderID,GATKBAMIndex>();
+
+    private FilePointer nextFilePointer = null;
+
+    private GenomeLocSortedSet loci;
+    private PeekableIterator<GenomeLoc> locusIterator;
+    private GenomeLoc currentLocus;
+    private IntervalMergingRule intervalMergingRule;
+
+    /*
+     * Creates BAMScheduler using contigs from the given BAM data source.
+     *
+     * @param dataSource    BAM source
+     * @return non-null BAM scheduler
+     */
+    public static BAMScheduler createOverMappedReads(final SAMDataSource dataSource) {
+        final BAMScheduler scheduler = new BAMScheduler(dataSource, IntervalMergingRule.ALL);
+        final GenomeLocSortedSet intervals = GenomeLocSortedSet.createSetFromSequenceDictionary(dataSource.getHeader().getSequenceDictionary());
+        scheduler.populateFilteredIntervalList(intervals);
+        return scheduler;
+    }
+
+    public static BAMScheduler createOverAllReads(final SAMDataSource dataSource, final GenomeLocParser parser) {
+        BAMScheduler scheduler = new BAMScheduler(dataSource, IntervalMergingRule.ALL);
+        scheduler.populateUnfilteredIntervalList(parser);
+        return scheduler;
+    }
+
+    public static BAMScheduler createOverIntervals(final SAMDataSource dataSource, final IntervalMergingRule mergeRule, final GenomeLocSortedSet loci) {
+        BAMScheduler scheduler = new BAMScheduler(dataSource, mergeRule);
+        scheduler.populateFilteredIntervalList(loci);
+        return scheduler;
+    }
+
+
+    private BAMScheduler(final SAMDataSource dataSource, final IntervalMergingRule mergeRule) {
+        this.dataSource = dataSource;
+        this.intervalMergingRule = mergeRule;
+        for(SAMReaderID reader: dataSource.getReaderIDs()) {
+            GATKBAMIndex index = dataSource.getIndex(reader);
+            if(index != null)
+                indexFiles.put(reader,dataSource.getIndex(reader));
+        }
+    }
+
+    /**
+     * The consumer has asked for a bounded set of locations.  Prepare an iterator over those locations.
+     * @param loci The list of locations to search and iterate over.
+     */
+    private void populateFilteredIntervalList(final GenomeLocSortedSet loci) {
+        this.loci = loci;
+        if(!indexFiles.isEmpty()) {
+            // If index data is available, start up the iterator.
+            locusIterator = new PeekableIterator<GenomeLoc>(loci.iterator());
+            if(locusIterator.hasNext())
+                currentLocus = locusIterator.next();
+            advance();
+        }
+        else {
+            // Otherwise, seed the iterator with a single file pointer over the entire region.
+            nextFilePointer = generatePointerOverEntireFileset();
+            for(GenomeLoc locus: loci)
+                nextFilePointer.addLocation(locus);
+            locusIterator = new PeekableIterator<GenomeLoc>(Collections.<GenomeLoc>emptyList().iterator());
+        }
+    }
+
+    /**
+     * The consumer has provided null, meaning to iterate over all available data.  Create a file pointer stretching
+     * from just before the start of the region to the end of the region.
+     */
+    private void populateUnfilteredIntervalList(final GenomeLocParser parser) {
+        this.loci = new GenomeLocSortedSet(parser);
+        locusIterator = new PeekableIterator<GenomeLoc>(Collections.<GenomeLoc>emptyList().iterator());
+        nextFilePointer = generatePointerOverEntireFileset();
+    }
+
+    /**
+     * Generate a span that runs from the end of the BAM header to the end of the fle.
+     * @return A file pointer over the specified region.
+     */
+    private FilePointer generatePointerOverEntireFileset() {
+        FilePointer filePointer = new FilePointer(intervalMergingRule);
+
+        // This is a "monolithic" FilePointer representing all regions in all files we will ever visit, and is
+        // the only FilePointer we will create. This allows us to have this FilePointer represent regions from
+        // multiple contigs
+        filePointer.setIsMonolithic(true);
+
+        Map<SAMReaderID,GATKBAMFileSpan> currentPosition;
+
+        currentPosition = dataSource.getInitialReaderPositions();
+
+        for(SAMReaderID reader: dataSource.getReaderIDs())
+            filePointer.addFileSpans(reader,createSpanToEndOfFile(currentPosition.get(reader).getGATKChunks().get(0).getChunkStart()));
+        return filePointer;
+    }
+
+    public boolean hasNext() {
+        return nextFilePointer != null;
+    }
+
+    public FilePointer next() {
+        if(!hasNext())
+            throw new NoSuchElementException("No next element available in interval sharder");
+        FilePointer currentFilePointer = nextFilePointer;
+        nextFilePointer = null;
+        advance();
+
+        return currentFilePointer;
+    }
+
+    public void remove() {
+        throw new UnsupportedOperationException("Unable to remove FilePointers from an IntervalSharder");
+    }
+
+    private void advance() {
+        if(loci.isEmpty())
+            return;
+
+        while(nextFilePointer == null && currentLocus != null) {
+            // special case handling of the unmapped shard.
+            if(currentLocus == GenomeLoc.UNMAPPED) {
+                nextFilePointer = new FilePointer(intervalMergingRule, GenomeLoc.UNMAPPED);
+                for(SAMReaderID id: dataSource.getReaderIDs())
+                    nextFilePointer.addFileSpans(id,createSpanToEndOfFile(indexFiles.get(id).getStartOfLastLinearBin()));
+                currentLocus = null;
+                continue;
+            }
+
+            nextFilePointer = new FilePointer(intervalMergingRule);
+
+            int coveredRegionStart = 1;
+            int coveredRegionStop = Integer.MAX_VALUE;
+            GenomeLoc coveredRegion = null;
+
+            BAMScheduleEntry scheduleEntry = getNextOverlappingBAMScheduleEntry(currentLocus);
+
+            // No overlapping data at all.
+            if(scheduleEntry != null) {
+                coveredRegionStart = Math.max(coveredRegionStart,scheduleEntry.start);
+                coveredRegionStop = Math.min(coveredRegionStop,scheduleEntry.stop);
+                coveredRegion = loci.getGenomeLocParser().createGenomeLoc(currentLocus.getContig(),coveredRegionStart,coveredRegionStop);
+
+                nextFilePointer.addFileSpans(scheduleEntry.fileSpans);
+            }
+            else {
+                // Always create a file span, whether there was covered data or not.  If there was no covered data, then the binTree is empty.
+                for(SAMReaderID reader: indexFiles.keySet())
+                    nextFilePointer.addFileSpans(reader,new GATKBAMFileSpan());
+            }
+
+            // Early exit if no bins were found.
+            if(coveredRegion == null) {
+                // for debugging only: maximum split is 16384.                
+                nextFilePointer.addLocation(currentLocus);
+                currentLocus = locusIterator.hasNext() ? locusIterator.next() : null;
+                continue;
+            }
+
+            // Early exit if only part of the first interval was found.
+            if(currentLocus.startsBefore(coveredRegion)) {
+                int splitPoint = Math.min(coveredRegion.getStart()-currentLocus.getStart(),16384)+currentLocus.getStart();
+                GenomeLoc[] splitContigs = currentLocus.split(splitPoint);
+                nextFilePointer.addLocation(splitContigs[0]);
+                currentLocus = splitContigs[1];
+                continue;
+            }
+
+            // Define the initial range of the file pointer, aka the region where the locus currently being processed intersects the BAM list.
+            GenomeLoc initialLocation = currentLocus.intersect(coveredRegion);
+            nextFilePointer.addLocation(initialLocation);
+
+            // See whether the BAM regions discovered overlap the next set of intervals in the interval list.  If so, include every overlapping interval.
+            if(!nextFilePointer.locations.isEmpty()) {
+                while(locusIterator.hasNext() && locusIterator.peek().overlapsP(coveredRegion)) {
+                    currentLocus = locusIterator.next();
+                    nextFilePointer.addLocation(currentLocus.intersect(coveredRegion));
+                }
+
+                // Chop off the uncovered portion of the locus.  Since we know that the covered region overlaps the current locus,
+                  // we can simplify the interval creation process to the end of the covered region to the stop of the given interval.
+                if(coveredRegionStop < currentLocus.getStop())
+                    currentLocus = loci.getGenomeLocParser().createGenomeLoc(currentLocus.getContig(),coveredRegionStop+1,currentLocus.getStop());
+                else if(locusIterator.hasNext())
+                    currentLocus = locusIterator.next();
+                else
+                    currentLocus = null;
+            }
+
+        }
+    }
+
+    
+    /**
+     * The last reference sequence processed by this iterator.
+     */
+    private Integer lastReferenceSequenceLoaded = null;
+
+    /**
+     * The stateful iterator used to progress through the genoem.
+     */
+    private PeekableIterator<BAMScheduleEntry> bamScheduleIterator = null;
+
+    /**
+     * Clean up underlying BAMSchedule file handles.
+     */
+    public void close() {
+        if(bamScheduleIterator != null)
+            bamScheduleIterator.close();
+    }
+
+    /**
+     * Get the next overlapping tree of bins associated with the given BAM file.
+     * @param currentLocus The actual locus for which to check overlap.
+     * @return The next schedule entry overlapping with the given list of loci.
+     */
+    private BAMScheduleEntry getNextOverlappingBAMScheduleEntry(final GenomeLoc currentLocus) {
+        // Make sure that we consult the BAM header to ensure that we're using the correct contig index for this contig name.
+        // This will ensure that if the two sets of contigs don't quite match (b36 male vs female ref, hg19 Epstein-Barr), then
+        // we'll be using the correct contig index for the BAMs.
+        // TODO: Warning: assumes all BAMs use the same sequence dictionary!  Get around this with contig aliasing.
+        SAMSequenceRecord currentContigSequenceRecord = dataSource.getHeader().getSequence(currentLocus.getContig());
+        if ( currentContigSequenceRecord == null ) {
+            throw new UserException(String.format("Contig %s not present in sequence dictionary for merged BAM header: %s",
+                                                  currentLocus.getContig(),
+                                                  ReadUtils.prettyPrintSequenceRecords(dataSource.getHeader().getSequenceDictionary())));
+        }
+
+        final int currentContigIndex = currentContigSequenceRecord.getSequenceIndex();
+
+        // Stale reference sequence or first invocation.  (Re)create the binTreeIterator.
+        if(lastReferenceSequenceLoaded == null || lastReferenceSequenceLoaded != currentContigIndex) {
+            if(bamScheduleIterator != null)
+                bamScheduleIterator.close();
+            lastReferenceSequenceLoaded = currentContigIndex;
+
+            // Naive algorithm: find all elements in current contig for proper schedule creation.
+            List<GenomeLoc> lociInContig = new LinkedList<GenomeLoc>();
+            for(GenomeLoc locus: loci) {
+                if (!GenomeLoc.isUnmapped(locus) && dataSource.getHeader().getSequence(locus.getContig()) == null)
+                    throw new ReviewedGATKException("BAM file(s) do not have the contig: " + locus.getContig() + ". You are probably using a different reference than the one this file was aligned with");
+
+                if (!GenomeLoc.isUnmapped(locus) && dataSource.getHeader().getSequence(locus.getContig()).getSequenceIndex() == lastReferenceSequenceLoaded)
+                    lociInContig.add(locus);
+            }
+
+            bamScheduleIterator = new PeekableIterator<BAMScheduleEntry>(new BAMSchedule(dataSource,lociInContig));
+        }
+
+        if(!bamScheduleIterator.hasNext())
+            return null;
+
+        // Peek the iterator along until finding the first binTree at or following the current locus.
+        BAMScheduleEntry bamScheduleEntry = bamScheduleIterator.peek();
+        while(bamScheduleEntry != null && bamScheduleEntry.isBefore(currentLocus)) {
+            bamScheduleIterator.next();
+            bamScheduleEntry = bamScheduleIterator.hasNext() ? bamScheduleIterator.peek() : null;
+        }                                   
+
+        return (bamScheduleEntry != null && bamScheduleEntry.overlaps(currentLocus)) ? bamScheduleEntry : null;
+    }
+
+    /**
+     * Create a span from the given start point to the end of the file.
+     * @param startOfRegion Start of the region, in encoded coordinates (block start << 16 & block offset).
+     * @return A file span from the given point to the end of the file.
+     */
+    private GATKBAMFileSpan createSpanToEndOfFile(final long startOfRegion) {
+      return new GATKBAMFileSpan(new GATKChunk(startOfRegion,Long.MAX_VALUE));
+    }
+
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BGZFBlockLoadingDispatcher.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BGZFBlockLoadingDispatcher.java
new file mode 100644
index 0000000..acb9311
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BGZFBlockLoadingDispatcher.java
@@ -0,0 +1,86 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.util.LinkedList;
+import java.util.Queue;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+
+/**
+ * Preloads BGZF blocks in preparation for unzipping and data processing.
+ * TODO: Right now, the block loader has all threads blocked waiting for a work request.  Ultimately this should
+ * TODO: be replaced with a central thread management strategy.
+ */
+public class BGZFBlockLoadingDispatcher {
+    /**
+     * The file handle cache, used when allocating blocks from the dispatcher.
+     */
+    private final FileHandleCache fileHandleCache;
+
+    private final ExecutorService threadPool;
+
+    private final Queue<BAMAccessPlan> inputQueue;
+
+    public BGZFBlockLoadingDispatcher(final int numThreads, final int numFileHandles) {
+        threadPool = Executors.newFixedThreadPool(numThreads);
+        fileHandleCache = new FileHandleCache(numFileHandles);
+        inputQueue = new LinkedList<BAMAccessPlan>();
+
+        threadPool.execute(new BlockLoader(this,fileHandleCache,true));
+    }
+
+    /**
+     * Initiates a request for a new block load.
+      * @param readerPosition Position at which to load.
+     */
+    void queueBlockLoad(final BAMAccessPlan readerPosition) {
+        synchronized(inputQueue) {
+            inputQueue.add(readerPosition);
+            inputQueue.notify();
+        }
+    }
+
+    /**
+     * Claims the next work request from the queue.
+     * @return The next work request, or null if none is available.
+     */
+    BAMAccessPlan claimNextWorkRequest() {
+        synchronized(inputQueue) {
+            while(inputQueue.isEmpty()) {
+                try {
+                    inputQueue.wait();
+                }
+                catch(InterruptedException ex) {
+                    throw new ReviewedGATKException("Interrupt occurred waiting for next block reader work item");
+                }
+            }
+            return inputQueue.poll();
+        }
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BlockInputStream.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BlockInputStream.java
new file mode 100644
index 0000000..84c55e4
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BlockInputStream.java
@@ -0,0 +1,451 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import htsjdk.samtools.GATKBAMFileSpan;
+import htsjdk.samtools.GATKChunk;
+import htsjdk.samtools.util.BlockCompressedInputStream;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.sam.SAMReaderID;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.util.Arrays;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * Presents decompressed blocks to the SAMFileReader.
+ */
+public class BlockInputStream extends InputStream {
+    /**
+     * Mechanism for triggering block loads.
+     */
+    private final BGZFBlockLoadingDispatcher dispatcher;
+
+    /**
+     * The reader whose data is supplied by this input stream.
+     */
+    private final SAMReaderID reader;
+
+    /**
+     * Length of the input stream.
+     */
+    private final long length;
+
+    /**
+     * The latest error reported by an asynchronous block load.
+     */
+    private Throwable error;
+
+    /**
+     * Current accessPlan.
+     */
+    private BAMAccessPlan accessPlan;
+
+    /**
+     * A stream of compressed data blocks.
+     */
+    private final ByteBuffer buffer;
+
+    /**
+     * Offsets of the given blocks in the buffer.
+     */
+    private LinkedList<Integer> blockOffsets = new LinkedList<Integer>();
+
+    /**
+     * Source positions of the given blocks in the buffer.
+     */
+    private LinkedList<Long> blockPositions = new LinkedList<Long>();
+
+    /**
+     * Provides a lock to wait for more data to arrive.
+     */
+    private final Object lock = new Object();
+
+    /**
+     * An input stream to use when comparing data back to what it should look like.
+     */
+    private final BlockCompressedInputStream validatingInputStream;
+
+    /**
+     * Create a new block presenting input stream with a dedicated buffer.
+     * @param dispatcher the block loading messenger.
+     * @param reader the reader for which to load data.
+     * @param validate validates the contents read into the buffer against the contents of a Picard BlockCompressedInputStream.
+     */
+    BlockInputStream(final BGZFBlockLoadingDispatcher dispatcher, final SAMReaderID reader, final boolean validate) {
+        this.reader = reader;
+        this.length = reader.getSamFile().length();
+
+        buffer = ByteBuffer.wrap(new byte[64*1024]);
+        buffer.order(ByteOrder.LITTLE_ENDIAN);
+
+        // The state of the buffer assumes that the range of data written into the buffer appears in the range
+        // [position,limit), while extra capacity exists in the range [limit,capacity)
+        buffer.limit(0);
+
+        this.dispatcher = dispatcher;
+        // TODO: Kill the region when all we want to do is start at the beginning of the stream and run to the end of the stream.
+        this.accessPlan = new BAMAccessPlan(reader,this,new GATKBAMFileSpan(new GATKChunk(0,Long.MAX_VALUE)));
+
+        // The block offsets / block positions guarantee that the ending offset/position in the data structure maps to
+        // the point in the file just following the last read.  These two arrays should never be empty; initializing
+        // to 0 to match the position above.
+        this.blockOffsets.add(0);
+        this.blockPositions.add(0L);
+
+        try {
+            if(validate) {
+                System.out.printf("BlockInputStream %s: BGZF block validation mode activated%n",this);
+                validatingInputStream = new BlockCompressedInputStream(reader.getSamFile());
+                // A bug in ValidatingInputStream means that calling getFilePointer() immediately after initialization will result in an NPE.
+                // Poke the stream to start reading data.
+                validatingInputStream.available();
+            }
+            else
+                validatingInputStream = null;
+        }
+        catch(IOException ex) {
+            throw new ReviewedGATKException("Unable to validate against Picard input stream",ex);
+        }
+    }
+
+    public long length() {
+        return length;
+    }
+
+    public long getFilePointer() {
+        long filePointer;
+        synchronized(lock) {
+            // Find the current block within the input stream.
+            int blockIndex;
+            for(blockIndex = 0; blockIndex+1 < blockOffsets.size() && buffer.position() > blockOffsets.get(blockIndex+1); blockIndex++)
+                ;
+            filePointer = blockPositions.get(blockIndex) + (buffer.position()-blockOffsets.get(blockIndex));
+        }
+
+//        if(validatingInputStream != null && filePointer != validatingInputStream.getFilePointer())
+//            throw new ReviewedGATKException(String.format("Position of input stream is invalid; expected (block address, block offset) = (%d,%d), got (%d,%d)",
+//                    BlockCompressedFilePointerUtil.getBlockAddress(validatingInputStream.getFilePointer()),BlockCompressedFilePointerUtil.getBlockOffset(validatingInputStream.getFilePointer()),
+//                    BlockCompressedFilePointerUtil.getBlockAddress(filePointer),BlockCompressedFilePointerUtil.getBlockOffset(filePointer)));
+
+        return filePointer;
+    }
+
+    private void clearBuffers() {
+        this.accessPlan.reset();
+
+        // Buffer semantics say that outside of a lock, buffer should always be prepared for reading.
+        // Indicate no data to be read.
+        buffer.clear();
+        buffer.limit(0);
+
+        // Clear everything except the last block offset / position
+        blockOffsets.clear();
+        blockOffsets.add(0);
+        while(blockPositions.size() > 1)
+            blockPositions.removeFirst();
+    }
+
+    public boolean eof() {
+        synchronized(lock) {
+            // TODO: Handle multiple empty BGZF blocks at end of the file.
+            return accessPlan != null && (accessPlan.getBlockAddress() < 0 || accessPlan.getBlockAddress() >= length);
+        }
+    }
+
+    /**
+     * Submits a new access plan for the given dataset and seeks to the given point.
+     * @param accessPlan The next seek point for BAM data in this reader.
+     */
+    public void submitAccessPlan(final BAMAccessPlan accessPlan) {
+        //System.out.printf("Thread %s: submitting access plan for block at position: %d%n",Thread.currentThread().getId(),position.getBlockAddress());
+        this.accessPlan = accessPlan;
+        accessPlan.reset();
+
+        clearBuffers();
+
+        // Pull the iterator past any oddball chunks at the beginning of the shard (chunkEnd < chunkStart, empty chunks, etc).
+        // TODO: Don't pass these empty chunks in.
+        accessPlan.advancePosition(makeFilePointer(accessPlan.getBlockAddress(),0));
+
+        if(accessPlan.getBlockAddress() >= 0) {
+            waitForBufferFill();
+        }
+
+        if(validatingInputStream != null) {
+            try {
+                validatingInputStream.seek(makeFilePointer(accessPlan.getBlockAddress(),0));
+            }
+            catch(IOException ex) {
+                throw new ReviewedGATKException("Unable to validate against Picard input stream",ex);
+            }
+        }
+
+    }
+
+
+    private void compactBuffer() {
+        // Compact buffer to maximize storage space.
+        int bytesToRemove = 0;
+
+        // Look ahead to see if we can compact away the first blocks in the series.
+        while(blockOffsets.size() > 1 && buffer.position() >= blockOffsets.get(1)) {
+            blockOffsets.remove();
+            blockPositions.remove();
+            bytesToRemove = blockOffsets.peek();
+        }
+
+        // If we end up with an empty block at the end of the series, compact this as well.
+        if(buffer.remaining() == 0 && blockOffsets.size() > 1 && buffer.position() >= blockOffsets.peek()) {
+            bytesToRemove += buffer.position();
+            blockOffsets.remove();
+            blockPositions.remove();
+        }
+
+        int finalBufferStart = buffer.position() - bytesToRemove;
+        int finalBufferSize = buffer.remaining();
+
+        // Position the buffer to remove the unneeded data, and compact it away.
+        buffer.position(bytesToRemove);
+        buffer.compact();
+
+        // Reset the limits for reading.
+        buffer.position(finalBufferStart);
+        buffer.limit(finalBufferStart+finalBufferSize);
+
+        // Shift everything in the offset buffer down to accommodate the bytes removed from the buffer.
+        for(int i = 0; i < blockOffsets.size(); i++)
+            blockOffsets.set(i,blockOffsets.get(i)-bytesToRemove);
+    }
+
+    /**
+     * Push contents of incomingBuffer into the end of this buffer.
+     * MUST be called from a thread that is NOT the reader thread.
+     * @param incomingBuffer The data being pushed into this input stream.
+     * @param accessPlan target access plan for the data.
+     * @param filePosition the current position of the file pointer
+     */
+    public void copyIntoBuffer(final ByteBuffer incomingBuffer, final BAMAccessPlan accessPlan, final long filePosition) {
+        synchronized(lock) {
+            try {
+                if(validatingInputStream != null) {
+                    byte[] validBytes = new byte[incomingBuffer.remaining()];
+
+                    byte[] currentBytes = new byte[incomingBuffer.remaining()];
+                    int pos = incomingBuffer.position();
+                    int lim = incomingBuffer.limit();
+                    incomingBuffer.get(currentBytes);
+
+                    incomingBuffer.limit(lim);
+                    incomingBuffer.position(pos);
+
+                    long currentFilePointer = validatingInputStream.getFilePointer();
+                    validatingInputStream.seek(makeFilePointer(accessPlan.getBlockAddress(), 0));
+                    validatingInputStream.read(validBytes);
+                    validatingInputStream.seek(currentFilePointer);
+
+                    if(!Arrays.equals(validBytes,currentBytes))
+                        throw new ReviewedGATKException(String.format("Bytes being inserted into BlockInputStream %s are incorrect",this));
+                }
+
+                compactBuffer();
+                // Open up the buffer for more reading.
+                buffer.limit(buffer.capacity());
+
+                // Get the spans overlapping this particular block...
+                List<GATKChunk> spansOverlapping = accessPlan.getSpansOverlappingBlock(accessPlan.getBlockAddress(),filePosition);
+
+                // ...and advance the block
+                this.accessPlan = accessPlan;
+                accessPlan.advancePosition(makeFilePointer(filePosition, 0));
+
+                if(buffer.remaining() < incomingBuffer.remaining())
+                    lock.wait();
+
+                final int bytesInIncomingBuffer = incomingBuffer.limit();
+
+                for(GATKChunk spanOverlapping: spansOverlapping) {
+                    // Clear out the endcap tracking state and add in the starting position for this transfer.
+                    blockOffsets.removeLast();
+                    blockOffsets.add(buffer.position());
+                    blockPositions.removeLast();
+                    blockPositions.add(spanOverlapping.getChunkStart());
+
+                    // Stream the buffer into the data stream.
+                    incomingBuffer.limit((spanOverlapping.getBlockEnd() > spanOverlapping.getBlockStart()) ? bytesInIncomingBuffer : spanOverlapping.getBlockOffsetEnd());
+                    incomingBuffer.position(spanOverlapping.getBlockOffsetStart());
+                    buffer.put(incomingBuffer);
+
+                    // Add the endcap for this transfer.
+                    blockOffsets.add(buffer.position());
+                    blockPositions.add(spanOverlapping.getChunkEnd());
+                }
+
+                // Set up the buffer for reading.
+                buffer.flip();
+
+                lock.notify();
+            }
+            catch(Exception ex) {
+                reportException(ex);
+                lock.notify();
+            }
+        }
+    }
+
+    void reportException(Throwable t) {
+        synchronized(lock) {
+            this.error = t;
+            lock.notify();
+        }
+    }
+
+    private void checkForErrors() {
+        synchronized(lock) {
+            if(error != null) {
+                ReviewedGATKException toThrow = new ReviewedGATKException(String.format("Thread %s, BlockInputStream %s: Unable to retrieve BAM data from disk",Thread.currentThread().getId(),this),error);
+                toThrow.setStackTrace(error.getStackTrace());
+                throw toThrow;
+            }
+        }
+    }
+
+    /**
+     * Reads the next byte of data from the input stream.
+     * @return Next byte of data, from 0->255, as an int.
+     */
+    @Override
+    public int read() {
+        byte[] singleByte = new byte[1];
+        read(singleByte);
+        return singleByte[0];
+    }
+
+    /**
+     * Fills the given byte array to the extent possible.
+     * @param bytes byte array to be filled.
+     * @return The number of bytes actually read.
+     */
+    @Override
+    public int read(byte[] bytes) {
+        return read(bytes,0,bytes.length);
+    }
+
+    @Override
+    public int read(byte[] bytes, final int offset, final int length) {
+        int remaining = length;
+        synchronized(lock) {
+            while(remaining > 0) {
+                // Check for error conditions during last read.
+                checkForErrors();
+
+                // If completely out of space, queue up another buffer fill.
+                waitForBufferFill();
+
+                // Couldn't manage to load any data at all; abort and return what's available.
+                if(buffer.remaining() == 0)
+                    break;
+
+                int numBytesToCopy = Math.min(buffer.remaining(),remaining);
+                buffer.get(bytes,length-remaining+offset,numBytesToCopy);
+                remaining -= numBytesToCopy;
+
+                //if(remaining > 0)
+                //    System.out.printf("Thread %s: read the first %d bytes of a %d byte request%n",Thread.currentThread().getId(),length-remaining,length);
+                // TODO: Assert that we don't copy across a block boundary
+            }
+
+            // Notify any waiting threads that some of the contents of the buffer were removed.
+            if(length-remaining > 0)
+                lock.notify();
+        }
+
+//        if(validatingInputStream != null) {
+//            byte[] validBytes = new byte[length];
+//            try {
+//                validatingInputStream.read(validBytes,offset,length);
+//                for(int i = offset; i < offset+length; i++) {
+//                    if(bytes[i] != validBytes[i])
+//                        throw new ReviewedGATKException(String.format("Thread %s: blockInputStream %s attempting to return wrong set of bytes; mismatch at offset %d",Thread.currentThread().getId(),this,i));
+//                }
+//            }
+//            catch(IOException ex) {
+//                throw new ReviewedGATKException("Unable to validate against Picard input stream",ex);
+//            }
+//        }
+
+        // If any data was copied into the buffer, return the amount of data copied.
+        if(remaining < length)
+            return length - remaining;
+
+        // Otherwise, return -1.
+        return -1;
+    }
+
+    public void close() {
+        if(validatingInputStream != null) {
+            try {
+                validatingInputStream.close();
+            }
+            catch(IOException ex) {
+                throw new ReviewedGATKException("Unable to validate against Picard input stream",ex);
+            }
+        }
+    }
+
+    public String getSource() {
+        return reader.getSamFilePath();
+    }
+
+    private void waitForBufferFill() {
+        synchronized(lock) {
+            if(buffer.remaining() == 0 && !eof()) {
+                //System.out.printf("Thread %s is waiting for a buffer fill from position %d to buffer %s%n",Thread.currentThread().getId(),position.getBlockAddress(),this);
+                dispatcher.queueBlockLoad(accessPlan);
+                try {
+                    lock.wait();
+                }
+                catch(InterruptedException ex) {
+                    throw new ReviewedGATKException("Interrupt occurred waiting for buffer to fill",ex);
+                }
+            }
+        }
+    }
+
+    /**
+     * Create an encoded BAM file pointer given the address of a BGZF block and an offset.
+     * @param blockAddress Physical address on disk of a BGZF block.
+     * @param blockOffset Offset into the uncompressed data stored in the BGZF block.
+     * @return 64-bit pointer encoded according to the BAM spec.
+     */
+    public static long makeFilePointer(final long blockAddress, final int blockOffset) {
+        return blockAddress << 16 | blockOffset;
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BlockLoader.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BlockLoader.java
new file mode 100644
index 0000000..b26297d
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BlockLoader.java
@@ -0,0 +1,189 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import htsjdk.samtools.util.BlockCompressedStreamConstants;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.channels.FileChannel;
+import java.util.zip.DataFormatException;
+import java.util.zip.Inflater;
+
+/**
+ * An engine for loading blocks.
+ */
+class BlockLoader implements Runnable {
+    /**
+     * Coordinates the input queue.
+     */
+    private BGZFBlockLoadingDispatcher dispatcher;
+
+    /**
+     * A cache from which to retrieve open file handles.
+     */
+    private final FileHandleCache fileHandleCache;
+
+    /**
+     * Whether asynchronous decompression should happen.
+     */
+    private final boolean decompress;
+
+    /**
+     * An direct input buffer for incoming data from disk.
+     */
+    private final ByteBuffer inputBuffer;
+
+    public BlockLoader(final BGZFBlockLoadingDispatcher dispatcher, final FileHandleCache fileHandleCache, final boolean decompress) {
+        this.dispatcher = dispatcher;
+        this.fileHandleCache = fileHandleCache;
+        this.decompress = decompress;
+
+        this.inputBuffer = ByteBuffer.allocateDirect(64*1024 + BlockCompressedStreamConstants.EMPTY_GZIP_BLOCK.length);
+        inputBuffer.order(ByteOrder.LITTLE_ENDIAN);
+    }
+
+    public void run() {
+        for(;;) {
+            BAMAccessPlan accessPlan = null;
+            try {
+                accessPlan = dispatcher.claimNextWorkRequest();
+                FileInputStream inputStream = fileHandleCache.claimFileInputStream(accessPlan.getReader());
+
+                //long blockAddress = readerPosition.getBlockAddress();
+                //System.out.printf("Thread %s: BlockLoader: copying bytes from %s at position %d into %s%n",Thread.currentThread().getId(),inputStream,blockAddress,readerPosition.getInputStream());
+
+                ByteBuffer compressedBlock = readBGZFBlock(inputStream,accessPlan.getBlockAddress());
+                long nextBlockAddress = position(inputStream);
+                fileHandleCache.releaseFileInputStream(accessPlan.getReader(),inputStream);
+
+                ByteBuffer block = decompress ? decompressBGZFBlock(compressedBlock) : compressedBlock;
+                int bytesCopied = block.remaining();
+
+                BlockInputStream bamInputStream = accessPlan.getInputStream();
+                bamInputStream.copyIntoBuffer(block,accessPlan,nextBlockAddress);
+
+                //System.out.printf("Thread %s: BlockLoader: copied %d bytes from %s at position %d into %s%n",Thread.currentThread().getId(),bytesCopied,inputStream,blockAddress,readerPosition.getInputStream());
+            }
+            catch(Throwable error) {
+                if(accessPlan != null && accessPlan.getInputStream() != null)
+                    accessPlan.getInputStream().reportException(error);
+            }
+        }
+
+    }
+
+    private ByteBuffer readBGZFBlock(final FileInputStream inputStream, final long blockAddress) throws IOException {
+        FileChannel channel = inputStream.getChannel();
+
+        // Read the block header
+        channel.position(blockAddress);
+
+        int uncompressedDataSize = 0;
+        int bufferSize = 0;
+
+        do {
+            inputBuffer.clear();
+            inputBuffer.limit(BlockCompressedStreamConstants.BLOCK_HEADER_LENGTH);
+            channel.read(inputBuffer);
+
+            // Read out the size of the full BGZF block into a two bit short container, then 'or' that
+            // value into an int buffer to transfer the bitwise contents into an int.
+            inputBuffer.flip();
+            if(inputBuffer.remaining() != BlockCompressedStreamConstants.BLOCK_HEADER_LENGTH)
+                throw new ReviewedGATKException("BUG: unable to read a the complete block header in one pass.");
+
+            // Verify that the file was read at a valid point.
+            if(unpackUByte8(inputBuffer,0) != BlockCompressedStreamConstants.GZIP_ID1 ||
+                    unpackUByte8(inputBuffer,1) != BlockCompressedStreamConstants.GZIP_ID2 ||
+                    unpackUByte8(inputBuffer,3) != BlockCompressedStreamConstants.GZIP_FLG ||
+                    unpackUInt16(inputBuffer,10) != BlockCompressedStreamConstants.GZIP_XLEN ||
+                    unpackUByte8(inputBuffer,12) != BlockCompressedStreamConstants.BGZF_ID1 ||
+                    unpackUByte8(inputBuffer,13) != BlockCompressedStreamConstants.BGZF_ID2) {
+                throw new ReviewedGATKException("BUG: Started reading compressed block at incorrect position");
+            }
+
+            inputBuffer.position(BlockCompressedStreamConstants.BLOCK_LENGTH_OFFSET);
+            bufferSize = unpackUInt16(inputBuffer,BlockCompressedStreamConstants.BLOCK_LENGTH_OFFSET)+1;
+
+            // Adjust buffer limits and finish reading the block.  Also read the next header, just in case there's a 0-byte block.
+            inputBuffer.limit(bufferSize);
+            inputBuffer.position(BlockCompressedStreamConstants.BLOCK_HEADER_LENGTH);
+            channel.read(inputBuffer);
+
+            // Check the uncompressed length.  If 0 and not at EOF, we'll want to check the next block.
+            uncompressedDataSize = inputBuffer.getInt(inputBuffer.limit()-4);
+            //System.out.printf("Uncompressed block size of the current block (at position %d) is %d%n",channel.position()-inputBuffer.limit(),uncompressedDataSize);
+        }
+        while(uncompressedDataSize == 0 && channel.position() < channel.size());
+
+        // Prepare the buffer for reading.
+        inputBuffer.flip();
+
+        return inputBuffer;
+    }
+
+    private ByteBuffer decompressBGZFBlock(final ByteBuffer bgzfBlock) throws DataFormatException {
+        final int compressedBufferSize = bgzfBlock.remaining();
+
+        // Determine the uncompressed buffer size (
+        bgzfBlock.position(bgzfBlock.limit()-4);
+        int uncompressedBufferSize = bgzfBlock.getInt();
+        byte[] uncompressedContent = new byte[uncompressedBufferSize];
+
+        // Bound the CDATA section of the buffer.
+        bgzfBlock.limit(compressedBufferSize-BlockCompressedStreamConstants.BLOCK_FOOTER_LENGTH);
+        bgzfBlock.position(BlockCompressedStreamConstants.BLOCK_HEADER_LENGTH);
+        byte[] compressedContent = new byte[bgzfBlock.remaining()];
+        ByteBuffer.wrap(compressedContent).put(bgzfBlock);
+
+        // Decompress the buffer.
+        final Inflater inflater = new Inflater(true);
+        inflater.setInput(compressedContent);
+        int bytesUncompressed = inflater.inflate(uncompressedContent);
+        if(bytesUncompressed != uncompressedBufferSize)
+            throw new ReviewedGATKException("Error decompressing block");
+
+        return ByteBuffer.wrap(uncompressedContent);
+    }
+
+    private long position(final FileInputStream inputStream) throws IOException {
+        return inputStream.getChannel().position();
+    }
+
+    private int unpackUByte8(final ByteBuffer buffer,final int position) {
+        return buffer.get(position) & 0xFF;
+    }
+
+    private int unpackUInt16(final ByteBuffer buffer,final int position) {
+        // Read out the size of the full BGZF block into a two bit short container, then 'or' that
+        // value into an int buffer to transfer the bitwise contents into an int.
+        return buffer.getShort(position) & 0xFFFF;
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/FileHandleCache.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/FileHandleCache.java
new file mode 100644
index 0000000..1765b7b
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/FileHandleCache.java
@@ -0,0 +1,229 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.GATKException;
+import org.broadinstitute.gatk.utils.sam.SAMReaderID;
+
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+/**
+ * Caches frequently used  file handles.  Right now, caches only a single file handle.
+ * TODO: Generalize to support arbitrary file handle caches.
+ */
+public class FileHandleCache {
+    /**
+     * The underlying data structure storing file handles.
+     */
+    private final FileHandleStorage fileHandleStorage;
+
+    /**
+     * How many file handles should be kept open at once.
+     */
+    private final int cacheSize;
+
+    /**
+     * A uniquifier: assign a unique ID to every instance of a file handle.
+     */
+    private final Map<SAMReaderID,Integer> keyCounter = new HashMap<SAMReaderID,Integer>();
+
+    /**
+     * A shared lock, private so that outside users cannot notify it.
+     */
+    private final Object lock = new Object();
+
+    /**
+     * Indicates how many file handles are outstanding at this point.
+     */
+    private int numOutstandingFileHandles = 0;
+
+    /**
+     * Create a new file handle cache of the given cache size.
+     * @param cacheSize how many readers to hold open at once.
+     */
+    public FileHandleCache(final int cacheSize) {
+        this.cacheSize = cacheSize;
+        fileHandleStorage = new FileHandleStorage();
+    }
+
+    /**
+     * Retrieves or opens a file handle for the given reader ID.
+     * @param key The ke
+     * @return A file input stream from the cache, if available, or otherwise newly opened.
+     */
+    public FileInputStream claimFileInputStream(final SAMReaderID key) {
+        synchronized(lock) {
+            FileInputStream inputStream = findExistingEntry(key);
+            if(inputStream == null) {
+                try {
+                    // If the cache is maxed out, wait for another file handle to emerge.
+                    if(numOutstandingFileHandles >= cacheSize)
+                        lock.wait();
+                }
+                catch(InterruptedException ex) {
+                    throw new ReviewedGATKException("Interrupted while waiting for a file handle");
+                }
+                inputStream = openInputStream(key);
+            }
+            numOutstandingFileHandles++;
+
+            //System.out.printf("Handing input stream %s to thread %s%n",inputStream,Thread.currentThread().getId());
+            return inputStream;
+        }
+    }
+
+    /**
+     * Releases the current reader and returns it to the cache.
+     * @param key The reader.
+     * @param inputStream The stream being used.
+     */
+    public void releaseFileInputStream(final SAMReaderID key, final FileInputStream inputStream) {
+        synchronized(lock) {
+            numOutstandingFileHandles--;
+            UniqueKey newID = allocateKey(key);
+            fileHandleStorage.put(newID,inputStream);
+            // Let any listeners know that another file handle has become available.
+            lock.notify();
+        }
+    }
+
+    /**
+     * Finds an existing entry in the storage mechanism.
+     * @param key Reader.
+     * @return a cached stream, if available.  Otherwise,
+     */
+    private FileInputStream findExistingEntry(final SAMReaderID key) {
+        int existingHandles = getMostRecentUniquifier(key);
+
+        // See if any of the keys currently exist in the repository.
+        for(int i = 0; i <= existingHandles; i++) {
+            UniqueKey uniqueKey = new UniqueKey(key,i);
+            if(fileHandleStorage.containsKey(uniqueKey))
+                return fileHandleStorage.remove(uniqueKey);
+        }
+
+        return null;
+    }
+
+    /**
+     * Gets the most recent uniquifier used for the given reader.
+     * @param reader Reader for which to determine uniqueness.
+     * @return
+     */
+    private int getMostRecentUniquifier(final SAMReaderID reader) {
+        if(keyCounter.containsKey(reader))
+            return keyCounter.get(reader);
+        else return -1;
+    }
+
+    private UniqueKey allocateKey(final SAMReaderID reader) {
+        int uniquifier = getMostRecentUniquifier(reader)+1;
+        keyCounter.put(reader,uniquifier);
+        return new UniqueKey(reader,uniquifier);
+    }
+
+    private FileInputStream openInputStream(final SAMReaderID reader) {
+        try {
+            return new FileInputStream(reader.getSamFilePath());
+        }
+        catch(IOException ex) {
+            throw new GATKException("Unable to open input file");
+        }
+    }
+
+    private void closeInputStream(final FileInputStream inputStream) {
+        try {
+            inputStream.close();
+        }
+        catch(IOException ex) {
+            throw new GATKException("Unable to open input file");
+        }
+    }
+
+    /**
+     * Actually contains the file handles, purging them as they get too old.
+     */
+    private class FileHandleStorage extends LinkedHashMap<UniqueKey,FileInputStream> {
+        /**
+         * Remove the oldest entry
+         * @param entry Entry to consider removing.
+         * @return True if the cache size has been exceeded.  False otherwise.
+         */
+        @Override
+        protected boolean removeEldestEntry(Map.Entry<UniqueKey,FileInputStream> entry) {
+            synchronized (lock) {
+                if(size() > cacheSize) {
+                    keyCounter.put(entry.getKey().key,keyCounter.get(entry.getKey().key)-1);
+                    closeInputStream(entry.getValue());
+
+                    return true;
+                }
+            }
+            return false;
+        }
+    }
+
+    /**
+     * Uniquifies a key by adding a numerical uniquifier.
+     */
+    private class UniqueKey {
+        /**
+         * The file handle's key.
+         */
+        private final SAMReaderID key;
+
+        /**
+         * A uniquifier, so that multiple of the same reader can exist in the cache.
+         */
+        private final int uniqueID;
+
+        public UniqueKey(final SAMReaderID reader, final int uniqueID) {
+            this.key = reader;
+            this.uniqueID = uniqueID;
+        }
+
+        @Override
+        public boolean equals(Object other) {
+            if(!(other instanceof UniqueKey))
+                return false;
+            UniqueKey otherUniqueKey = (UniqueKey)other;
+            return key.equals(otherUniqueKey.key) && this.uniqueID == otherUniqueKey.uniqueID;
+        }
+
+        @Override
+        public int hashCode() {
+            return key.hashCode();
+        }
+    }
+
+
+
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/FilePointer.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/FilePointer.java
new file mode 100644
index 0000000..78b6eab
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/FilePointer.java
@@ -0,0 +1,437 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import htsjdk.samtools.util.PeekableIterator;
+import htsjdk.samtools.GATKBAMFileSpan;
+import htsjdk.samtools.GATKChunk;
+import htsjdk.samtools.SAMFileSpan;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.interval.IntervalMergingRule;
+import org.broadinstitute.gatk.utils.interval.IntervalUtils;
+import org.broadinstitute.gatk.utils.sam.SAMReaderID;
+
+import java.util.*;
+
+/**
+ * Represents a small section of a BAM file, and every associated interval.
+ */
+public class FilePointer {
+    protected final SortedMap<SAMReaderID,SAMFileSpan> fileSpans = new TreeMap<SAMReaderID,SAMFileSpan>();
+    protected final List<GenomeLoc> locations = new ArrayList<GenomeLoc>();
+    protected final IntervalMergingRule intervalMergingRule;
+
+    /**
+     * Does this file pointer point into an unmapped region?
+     */
+    protected final boolean isRegionUnmapped;
+
+    /**
+     * Is this FilePointer "monolithic"? That is, does it represent all regions in all files that we will
+     * ever visit during this GATK run? If this is set to true, the engine will expect to see only this
+     * one FilePointer during the entire run, and this FilePointer will be allowed to contain intervals
+     * from more than one contig.
+     */
+    private boolean isMonolithic = false;
+
+    /**
+     * Index of the contig covered by this FilePointer. Only meaningful for non-monolithic, mapped FilePointers
+     */
+    private Integer contigIndex = null;
+
+
+    public FilePointer( final IntervalMergingRule mergeRule, final List<GenomeLoc> locations ) {
+        this.intervalMergingRule = mergeRule;
+        this.locations.addAll(locations);
+        this.isRegionUnmapped = checkUnmappedStatus();
+
+        validateAllLocations();
+        if ( locations.size() > 0 ) {
+            contigIndex = locations.get(0).getContigIndex();
+        }
+    }
+
+    public FilePointer( final IntervalMergingRule mergeRule, final GenomeLoc... locations ) {
+        this(mergeRule, Arrays.asList(locations));
+    }
+
+    public FilePointer( final Map<SAMReaderID,SAMFileSpan> fileSpans, final IntervalMergingRule mergeRule, final List<GenomeLoc> locations ) {
+        this(mergeRule, locations);
+        this.fileSpans.putAll(fileSpans);
+    }
+
+    private boolean checkUnmappedStatus() {
+        boolean foundMapped = false, foundUnmapped = false;
+
+        for( GenomeLoc location: locations ) {
+            if ( GenomeLoc.isUnmapped(location) )
+                foundUnmapped = true;
+            else
+                foundMapped = true;
+        }
+        if ( foundMapped && foundUnmapped )
+            throw new ReviewedGATKException("BUG: File pointers cannot be mixed mapped/unmapped.");
+
+        return foundUnmapped;
+    }
+
+    private void validateAllLocations() {
+        // Unmapped and monolithic FilePointers are exempted from the one-contig-only restriction
+        if ( isRegionUnmapped || isMonolithic ) {
+            return;
+        }
+
+        Integer previousContigIndex = null;
+
+        for ( GenomeLoc location : locations ) {
+            if ( previousContigIndex != null && previousContigIndex != location.getContigIndex() ) {
+                throw new ReviewedGATKException("Non-monolithic file pointers must contain intervals from at most one contig");
+            }
+
+            previousContigIndex = location.getContigIndex();
+        }
+    }
+
+    private void validateLocation( GenomeLoc location ) {
+        if ( isRegionUnmapped != GenomeLoc.isUnmapped(location) ) {
+            throw new ReviewedGATKException("BUG: File pointers cannot be mixed mapped/unmapped.");
+        }
+        if ( ! isRegionUnmapped && ! isMonolithic && contigIndex != null && contigIndex != location.getContigIndex() ) {
+            throw new ReviewedGATKException("Non-monolithic file pointers must contain intervals from at most one contig");
+        }
+    }
+
+    /**
+     * Returns an immutable view of this FilePointer's file spans
+     *
+     * @return an immutable view of this FilePointer's file spans
+     */
+    public Map<SAMReaderID, SAMFileSpan> getFileSpans() {
+        return Collections.unmodifiableMap(fileSpans);
+    }
+
+    /**
+     * Returns an immutable variant of the list of locations.
+     * @return
+     */
+    public List<GenomeLoc> getLocations() {
+        return Collections.unmodifiableList(locations);
+    }
+
+    /**
+     * Returns the index of the contig into which this FilePointer points (a FilePointer can represent
+     * regions in at most one contig).
+     *
+     * @return the index of the contig into which this FilePointer points
+     */
+    public int getContigIndex() {
+        return locations.size() > 0 ? locations.get(0).getContigIndex() : SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX;
+    }
+
+    /**
+     * Returns the IntervalMergingRule used by this FilePointer to merge adjacent locations
+     *
+     * @return the IntervalMergingRule used by this FilePointer (never null)
+     */
+    public IntervalMergingRule getIntervalMergingRule() {
+        return intervalMergingRule;
+    }
+
+    /**
+     * Is this FilePointer "monolithic"? That is, does it represent all regions in all files that we will
+     * ever visit during this GATK run? If this is set to true, the engine will expect to see only this
+     * one FilePointer during the entire run, and this FilePointer will be allowed to contain intervals
+     * from more than one contig.
+     *
+     * @return true if this FP is a monolithic FP representing all regions in all files, otherwise false
+     */
+    public boolean isMonolithic() {
+        return isMonolithic;
+    }
+
+    /**
+     * Set this FP's "monolithic" status to true or false. An FP is monolithic if it represents all
+     * regions in all files that we will ever visit, and is the only FP we will ever create. A monolithic
+     * FP may contain intervals from more than one contig.
+     *
+     * @param isMonolithic set this FP's monolithic status to this value
+     */
+    public void setIsMonolithic( boolean isMonolithic ) {
+        this.isMonolithic = isMonolithic;
+    }
+
+    @Override
+    public boolean equals(final Object other) {
+        if(!(other instanceof FilePointer))
+            return false;
+        FilePointer otherFilePointer = (FilePointer)other;
+
+        // intervals
+        if(this.locations.size() != otherFilePointer.locations.size())
+            return false;
+        for(int i = 0; i < locations.size(); i++) {
+            if(!this.locations.get(i).equals(otherFilePointer.locations.get(i)))
+                return false;
+        }
+
+        // fileSpans
+        if(this.fileSpans.size() != otherFilePointer.fileSpans.size())
+            return false;
+        Iterator<Map.Entry<SAMReaderID,SAMFileSpan>> thisEntries = this.fileSpans.entrySet().iterator();
+        Iterator<Map.Entry<SAMReaderID,SAMFileSpan>> otherEntries = otherFilePointer.fileSpans.entrySet().iterator();
+        while(thisEntries.hasNext() || otherEntries.hasNext()) {
+            if(!thisEntries.next().equals(otherEntries.next()))
+                return false;
+        }
+        
+        return true;
+    }
+
+    public void addLocation(final GenomeLoc location) {
+        validateLocation(location);
+
+        this.locations.add(location);
+        if ( contigIndex == null ) {
+            contigIndex = location.getContigIndex();
+        }
+    }
+
+    public void addFileSpans(final SAMReaderID id, final SAMFileSpan fileSpan) {
+        this.fileSpans.put(id,fileSpan);
+    }
+
+    public void addFileSpans(final Map<SAMReaderID, GATKBAMFileSpan> fileSpans) {
+        this.fileSpans.putAll(fileSpans);
+    }
+
+
+    /**
+     * Computes the size of this file span, in uncompressed bytes.
+     * @return Size of the file span.
+     */
+    public long size() {
+        long size = 0L;
+        for(SAMFileSpan fileSpan: fileSpans.values())
+            size += ((GATKBAMFileSpan)fileSpan).size();
+        return size;
+    }
+
+    /**
+     * Returns the difference in size between two filespans.
+     * @param other Other filespan against which to measure.
+     * @return The difference in size between the two file pointers.
+     */
+    public long minus(final FilePointer other) {
+        long difference = 0;
+        PeekableIterator<Map.Entry<SAMReaderID,SAMFileSpan>> thisIterator = new PeekableIterator<Map.Entry<SAMReaderID,SAMFileSpan>>(this.fileSpans.entrySet().iterator());
+        PeekableIterator<Map.Entry<SAMReaderID,SAMFileSpan>> otherIterator = new PeekableIterator<Map.Entry<SAMReaderID,SAMFileSpan>>(other.fileSpans.entrySet().iterator());
+
+        while(thisIterator.hasNext()) {
+            // If there are no elements left in the 'other' iterator, spin out this iterator.
+            if(!otherIterator.hasNext()) {
+                GATKBAMFileSpan nextSpan = (GATKBAMFileSpan)thisIterator.next().getValue();
+                difference += nextSpan.size();
+                continue;
+            }
+
+            // Otherwise, compare the latest value.
+            int compareValue = thisIterator.peek().getKey().compareTo(otherIterator.peek().getKey());
+
+            if(compareValue < 0) {
+                // This before other.
+                difference += ((GATKBAMFileSpan)thisIterator.next().getValue()).size();
+            }
+            else if(compareValue > 0) {
+                // Other before this.
+                difference += ((GATKBAMFileSpan)otherIterator.next().getValue()).size();
+            }
+            else {
+                // equality; difference the values.
+                GATKBAMFileSpan thisRegion = (GATKBAMFileSpan)thisIterator.next().getValue();
+                GATKBAMFileSpan otherRegion = (GATKBAMFileSpan)otherIterator.next().getValue();
+                difference += Math.abs(thisRegion.minus(otherRegion).size());
+            }
+        }
+        return difference;
+    }
+
+    /**
+     * Combines two file pointers into one.
+     * @param parser The genomelocparser to use when manipulating intervals.
+     * @param other File pointer to combine into this one.
+     * @return A completely new file pointer that is the combination of the two.
+     */
+    public FilePointer combine(final GenomeLocParser parser, final FilePointer other) {
+        FilePointer combined = new FilePointer(intervalMergingRule);
+
+        List<GenomeLoc> intervals = new ArrayList<GenomeLoc>();
+        intervals.addAll(locations);
+        intervals.addAll(other.locations);
+        for(GenomeLoc interval: IntervalUtils.sortAndMergeIntervals(parser,intervals,intervalMergingRule))
+            combined.addLocation(interval);
+
+        PeekableIterator<Map.Entry<SAMReaderID,SAMFileSpan>> thisIterator = new PeekableIterator<Map.Entry<SAMReaderID,SAMFileSpan>>(this.fileSpans.entrySet().iterator());
+        PeekableIterator<Map.Entry<SAMReaderID,SAMFileSpan>> otherIterator = new PeekableIterator<Map.Entry<SAMReaderID,SAMFileSpan>>(other.fileSpans.entrySet().iterator());
+
+        while(thisIterator.hasNext() || otherIterator.hasNext()) {
+            int compareValue;
+            if(!otherIterator.hasNext()) {
+                compareValue = -1;
+            }
+            else if(!thisIterator.hasNext())
+                compareValue = 1;
+            else
+                compareValue = thisIterator.peek().getKey().compareTo(otherIterator.peek().getKey());
+
+            // This before other.
+            if(compareValue < 0)
+                mergeElementsInto(combined,thisIterator);
+            // Other before this.
+            else if(compareValue > 0)
+                mergeElementsInto(combined,otherIterator);
+            // equality; union the values.
+            else
+                mergeElementsInto(combined,thisIterator,otherIterator);
+        }
+        return combined;
+    }
+
+    /**
+     * Roll the next element in the iterator into the combined entry.
+     * @param combined Entry into which to roll the next element.
+     * @param iterators Sources of next elements.
+     */
+    private void mergeElementsInto(final FilePointer combined, Iterator<Map.Entry<SAMReaderID,SAMFileSpan>>... iterators) {
+        if(iterators.length == 0)
+            throw new ReviewedGATKException("Tried to add zero elements to an existing file pointer.");
+        Map.Entry<SAMReaderID,SAMFileSpan> initialElement = iterators[0].next();
+        GATKBAMFileSpan fileSpan = (GATKBAMFileSpan)initialElement.getValue();
+        for(int i = 1; i < iterators.length; i++)
+            fileSpan = fileSpan.union((GATKBAMFileSpan)iterators[i].next().getValue());
+        combined.addFileSpans(initialElement.getKey(),fileSpan);
+    }
+
+    /**
+     * Efficiently generate the union of the n FilePointers passed in. Much more efficient than
+     * combining two FilePointers at a time using the combine() method above.
+     *
+     * IMPORTANT: the FilePointers to be unioned must either all represent regions on the
+     * same contig, or all be unmapped, since we cannot create FilePointers with a mix of
+     * contigs or with mixed mapped/unmapped regions.
+     *
+     * @param filePointers the FilePointers to union
+     * @param parser our GenomeLocParser
+     * @return the union of the FilePointers passed in
+     */
+    public static FilePointer union( List<FilePointer> filePointers, GenomeLocParser parser ) {
+        if ( filePointers == null || filePointers.isEmpty() ) {
+            return new FilePointer(IntervalMergingRule.ALL);
+        }
+
+        Map<SAMReaderID, List<GATKChunk>> fileChunks = new HashMap<SAMReaderID, List<GATKChunk>>();
+        List<GenomeLoc> locations = new ArrayList<GenomeLoc>();
+        IntervalMergingRule mergeRule = filePointers.get(0).getIntervalMergingRule();
+
+        // First extract all intervals and file chunks from the FilePointers into unsorted, unmerged collections
+        for ( FilePointer filePointer : filePointers ) {
+            locations.addAll(filePointer.getLocations());
+            if (mergeRule != filePointer.getIntervalMergingRule())
+                throw new ReviewedGATKException("All FilePointers in FilePointer.union() must have use the same IntervalMergeRule");
+
+            for ( Map.Entry<SAMReaderID, SAMFileSpan> fileSpanEntry : filePointer.getFileSpans().entrySet() ) {
+                GATKBAMFileSpan fileSpan = (GATKBAMFileSpan)fileSpanEntry.getValue();
+
+                if ( fileChunks.containsKey(fileSpanEntry.getKey()) ) {
+                    fileChunks.get(fileSpanEntry.getKey()).addAll(fileSpan.getGATKChunks());
+                }
+                else {
+                    fileChunks.put(fileSpanEntry.getKey(), fileSpan.getGATKChunks());
+                }
+            }
+        }
+
+        // Now sort and merge the intervals
+        List<GenomeLoc> sortedMergedLocations = new ArrayList<GenomeLoc>();
+        sortedMergedLocations.addAll(IntervalUtils.sortAndMergeIntervals(parser, locations, mergeRule));
+
+        // For each BAM file, convert from an unsorted, unmerged list of chunks to a GATKBAMFileSpan containing
+        // the sorted, merged union of the chunks for that file
+        Map<SAMReaderID, SAMFileSpan> mergedFileSpans = new HashMap<SAMReaderID, SAMFileSpan>(fileChunks.size());
+        for ( Map.Entry<SAMReaderID, List<GATKChunk>> fileChunksEntry : fileChunks.entrySet() ) {
+            List<GATKChunk> unmergedChunks = fileChunksEntry.getValue();
+            mergedFileSpans.put(fileChunksEntry.getKey(),
+                                (new GATKBAMFileSpan(unmergedChunks.toArray(new GATKChunk[unmergedChunks.size()]))).union(new GATKBAMFileSpan()));
+        }
+
+        return new FilePointer(mergedFileSpans, mergeRule, sortedMergedLocations);
+    }
+
+    /**
+     * Returns true if any of the file spans in this FilePointer overlap their counterparts in
+     * the other FilePointer. "Overlap" is defined as having an overlapping extent (the region
+     * from the start of the first chunk to the end of the last chunk).
+     *
+     * @param other the FilePointer against which to check overlap with this FilePointer
+     * @return true if any file spans overlap their counterparts in other, otherwise false
+     */
+    public boolean hasFileSpansOverlappingWith( FilePointer other ) {
+        for ( Map.Entry<SAMReaderID, SAMFileSpan> thisFilePointerEntry : fileSpans.entrySet() ) {
+            GATKBAMFileSpan thisFileSpan = new GATKBAMFileSpan(thisFilePointerEntry.getValue());
+
+            SAMFileSpan otherEntry = other.fileSpans.get(thisFilePointerEntry.getKey());
+            if ( otherEntry == null ) {
+                continue;  // no counterpart for this file span in other
+            }
+            GATKBAMFileSpan otherFileSpan = new GATKBAMFileSpan(otherEntry);
+
+            if ( thisFileSpan.getExtent().overlaps(otherFileSpan.getExtent()) ) {
+                return true;
+            }
+        }
+
+        return false;
+    }
+
+    @Override
+    public String toString() {
+        StringBuilder builder = new StringBuilder();
+        builder.append("FilePointer:\n");
+        builder.append("\tlocations = {");
+        builder.append(Utils.join(";",locations));
+        builder.append("}\n\tregions = \n");
+        for(Map.Entry<SAMReaderID,SAMFileSpan> entry: fileSpans.entrySet()) {
+            builder.append(entry.getKey());
+            builder.append("= {");
+            builder.append(entry.getValue());
+            builder.append("}");
+        }
+        return builder.toString();
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/GATKBAMIndex.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/GATKBAMIndex.java
new file mode 100644
index 0000000..b1d54d2
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/GATKBAMIndex.java
@@ -0,0 +1,469 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import htsjdk.samtools.*;
+import htsjdk.samtools.seekablestream.SeekableBufferedStream;
+import htsjdk.samtools.seekablestream.SeekableFileStream;
+import htsjdk.samtools.seekablestream.SeekableStream;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * A basic interface for querying BAM indices.
+ * Very much not thread-safe.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class GATKBAMIndex {
+    /**
+     * BAM index file magic number.
+     */
+    private static final byte[] BAM_INDEX_MAGIC = "BAI\1".getBytes();
+
+    /**
+     * Reports the total amount of genomic data that any bin can index.
+     */
+    protected static final int BIN_GENOMIC_SPAN = 512*1024*1024;
+
+    /**
+     * What is the starting bin for each level?
+     */
+    private static final int[] LEVEL_STARTS = {0,1,9,73,585,4681};
+
+    /**
+     * Reports the maximum number of bins that can appear in a BAM file.
+     */
+    public static final int MAX_BINS = 37450;   // =(8^6-1)/7+1
+
+    private final SAMSequenceDictionary sequenceDictionary;
+    private final File mFile;
+
+    //TODO: figure out a good value for this buffer size
+    private static final int BUFFERED_STREAM_BUFFER_SIZE = 8192;
+
+    /**
+     * Number of sequences stored in this index.
+     */
+    private final int sequenceCount;
+
+    /**
+     * A cache of the starting positions of the sequences.
+     */
+    private final long[] sequenceStartCache;
+
+    private SeekableFileStream fileStream;
+    private SeekableStream baiStream;
+    private SeekableBufferedStream bufferedStream;
+    private long fileLength;
+
+    public GATKBAMIndex(final File file, final SAMSequenceDictionary sequenceDictionary) {
+        mFile = file;
+        this.sequenceDictionary = sequenceDictionary;
+
+        // Open the file stream.
+        openIndexFile();
+
+        // Verify the magic number.
+        seek(0);
+        final byte[] buffer = readBytes(4);
+        if (!Arrays.equals(buffer, BAM_INDEX_MAGIC)) {
+            throw new ReviewedGATKException("Invalid file header in BAM index " + mFile +
+                                       ": " + new String(buffer));
+        }
+
+        seek(4);
+
+        sequenceCount = readInteger();
+
+        // Create a cache of the starting position of each sequence.  Initialize it to -1.
+        sequenceStartCache = new long[sequenceCount];
+        for(int i = 1; i < sequenceCount; i++)
+            sequenceStartCache[i] = -1;
+
+        // Seed the first element in the array with the current position.
+        if(sequenceCount > 0)
+            sequenceStartCache[0] = position();
+
+        closeIndexFile();
+    }
+
+    public GATKBAMIndexData readReferenceSequence(final int referenceSequence) {
+        openIndexFile();
+
+        if (referenceSequence >= sequenceCount)
+            throw new ReviewedGATKException("Invalid sequence number " + referenceSequence + " in index file " + mFile);
+
+        skipToSequence(referenceSequence);
+
+        int binCount = readInteger();
+        List<GATKBin> bins = new ArrayList<>();
+        for (int binNumber = 0; binNumber < binCount; binNumber++) {
+            final int indexBin = readInteger();
+            final int nChunks = readInteger();
+
+            List<GATKChunk> chunks = new ArrayList<>(nChunks);
+            long[] rawChunkData = readLongs(nChunks*2);
+            for (int ci = 0; ci < nChunks; ci++) {
+                final long chunkBegin = rawChunkData[ci*2];
+                final long chunkEnd = rawChunkData[ci*2+1];
+                chunks.add(new GATKChunk(chunkBegin, chunkEnd));
+            }
+            GATKBin bin = new GATKBin(referenceSequence, indexBin);
+            bin.setChunkList(chunks.toArray(new GATKChunk[chunks.size()]));
+            while(indexBin >= bins.size())
+                bins.add(null);
+            bins.set(indexBin,bin);
+        }
+
+        final int nLinearBins = readInteger();
+        long[] linearIndexEntries = readLongs(nLinearBins);
+
+        LinearIndex linearIndex = new LinearIndex(referenceSequence,0,linearIndexEntries);
+
+        closeIndexFile();
+
+        return new GATKBAMIndexData(this,referenceSequence,bins,linearIndex);
+    }
+
+    /**
+     * Get the number of levels employed by this index.
+     * @return Number of levels in this index.
+     */
+    public static int getNumIndexLevels() {
+        return LEVEL_STARTS.length;
+    }
+
+    /**
+     * Gets the first bin in the given level.
+     * @param levelNumber Level number.  0-based.
+     * @return The first bin in this level.
+     */
+    public static int getFirstBinInLevel(final int levelNumber) {
+        return LEVEL_STARTS[levelNumber];
+    }
+
+    /**
+     * Gets the number of bins in the given level.
+     * @param levelNumber Level number.  0-based.
+     * @return The size (number of possible bins) of the given level.
+     */
+    public int getLevelSize(final int levelNumber) {
+        if(levelNumber == getNumIndexLevels()-1)
+            return MAX_BINS-LEVEL_STARTS[levelNumber]-1;
+        else
+            return LEVEL_STARTS[levelNumber+1]-LEVEL_STARTS[levelNumber];
+    }
+
+    /**
+     * Gets the level associated with the given bin number.
+     * @param bin The bin  for which to determine the level.
+     * @return the level associated with the given bin number.
+     */
+    public int getLevelForBin(final Bin bin) {
+        GATKBin gatkBin = new GATKBin(bin);
+        if(gatkBin.getBinNumber() >= MAX_BINS)
+            throw new ReviewedGATKException("Tried to get level for invalid bin in index file " + mFile);
+        for(int i = getNumIndexLevels()-1; i >= 0; i--) {
+            if(gatkBin.getBinNumber() >= LEVEL_STARTS[i])
+                return i;
+        }
+        throw new ReviewedGATKException("Unable to find correct bin for bin " + bin + " in index file " + mFile);
+    }
+
+    /**
+     * Gets the first locus that this bin can index into.
+     * @param bin The bin to test.
+     * @return The last position that the given bin can represent.
+     */
+    public int getFirstLocusInBin(final Bin bin) {
+        final int level = getLevelForBin(bin);
+        final int levelStart = LEVEL_STARTS[level];
+        final int levelSize = ((level==getNumIndexLevels()-1) ? MAX_BINS-1 : LEVEL_STARTS[level+1]) - levelStart;
+        return (new GATKBin(bin).getBinNumber() - levelStart)*(BIN_GENOMIC_SPAN /levelSize)+1;
+    }
+
+    /**
+     * Gets the last locus that this bin can index into.
+     * @param bin The bin to test.
+     * @return The last position that the given bin can represent.
+     */
+    public int getLastLocusInBin(final Bin bin) {
+        final int level = getLevelForBin(bin);
+        final int levelStart = LEVEL_STARTS[level];
+        final int levelSize = ((level==getNumIndexLevels()-1) ? MAX_BINS-1 : LEVEL_STARTS[level+1]) - levelStart;
+        return (new GATKBin(bin).getBinNumber()-levelStart+1)*(BIN_GENOMIC_SPAN /levelSize);
+    }
+
+    /**
+     * Use to get close to the unmapped reads at the end of a BAM file.
+     * @return The file offset of the first record in the last linear bin, or -1
+     * if there are no elements in linear bins (i.e. no mapped reads).
+     */
+    public long getStartOfLastLinearBin() {
+        openIndexFile();
+
+        seek(4);
+
+        final int sequenceCount = readInteger();
+        // Because no reads may align to the last sequence in the sequence dictionary,
+        // grab the last element of the linear index for each sequence, and return
+        // the last one from the last sequence that has one.
+        long lastLinearIndexPointer = -1;
+        for (int i = 0; i < sequenceCount; i++) {
+            // System.out.println("# Sequence TID: " + i);
+            final int nBins = readInteger();
+            // System.out.println("# nBins: " + nBins);
+            for (int j1 = 0; j1 < nBins; j1++) {
+                // Skip bin #
+                skipBytes(4);
+                final int nChunks = readInteger();
+                // Skip chunks
+                skipBytes(16 * nChunks);
+            }
+            final int nLinearBins = readInteger();
+            if (nLinearBins > 0) {
+                // Skip to last element of list of linear bins
+                skipBytes(8 * (nLinearBins - 1));
+                lastLinearIndexPointer = readLongs(1)[0];
+            }
+        }
+
+        closeIndexFile();
+
+        return lastLinearIndexPointer;
+    }
+
+    /**
+     * Gets the possible number of bins for a given reference sequence.
+     * @return How many bins could possibly be used according to this indexing scheme to index a single contig.
+     */
+    protected int getMaxAddressibleGenomicLocation() {
+        return BIN_GENOMIC_SPAN;
+    }
+
+    protected void skipToSequence(final int referenceSequence) {
+        // Find the offset in the file of the last sequence whose position has been determined.  Start here
+        // when searching the sequence for the next value to read.  (Note that sequenceStartCache[0] will always
+        // be present, so no extra stopping condition is necessary.
+        int sequenceIndex = referenceSequence;
+        while(sequenceStartCache[sequenceIndex] == -1)
+            sequenceIndex--;
+
+        // Advance to the most recently found position.
+        seek(sequenceStartCache[sequenceIndex]);
+
+        for (int i = sequenceIndex; i < referenceSequence; i++) {
+            sequenceStartCache[i] = position();
+            // System.out.println("# Sequence TID: " + i);
+            final int nBins = readInteger();
+            // System.out.println("# nBins: " + nBins);
+            for (int j = 0; j < nBins; j++) {
+                /* final int bin = */
+                readInteger();
+                final int nChunks = readInteger();
+                // System.out.println("# bin[" + j + "] = " + bin + ", nChunks = " + nChunks);
+                skipBytes(16 * nChunks);
+            }
+            final int nLinearBins = readInteger();
+            // System.out.println("# nLinearBins: " + nLinearBins);
+            skipBytes(8 * nLinearBins);
+
+        }
+
+        sequenceStartCache[referenceSequence] = position();
+    }
+
+
+
+    private void openIndexFile() {
+        try {
+            fileStream = new SeekableFileStream(mFile);
+            baiStream = SamIndexes.asBaiSeekableStreamOrNull(fileStream, sequenceDictionary);
+            bufferedStream = new SeekableBufferedStream(baiStream, BUFFERED_STREAM_BUFFER_SIZE);
+            fileLength=bufferedStream.length();
+        }
+        catch (IOException exc) {
+            throw new ReviewedGATKException("Unable to open index file (" + exc.getMessage() +")" + mFile, exc);
+        }
+    }
+
+    private void closeIndexFile() {
+        try {
+            bufferedStream.close();
+            baiStream.close();
+            fileStream.close();
+            fileLength = -1;
+        }
+        catch (IOException exc) {
+            throw new ReviewedGATKException("Unable to close index file " + mFile, exc);
+        }
+    }
+
+    private static final int INT_SIZE_IN_BYTES = Integer.SIZE / 8;
+    private static final int LONG_SIZE_IN_BYTES = Long.SIZE / 8;
+
+    private byte[] readBytes(int count) {
+        ByteBuffer buffer = getBuffer(count);
+        read(buffer);
+        buffer.flip();
+        byte[] contents = new byte[count];
+        buffer.get(contents);
+        return contents;
+    }
+
+    private int readInteger() {
+        ByteBuffer buffer = getBuffer(INT_SIZE_IN_BYTES);
+        read(buffer);
+        buffer.flip();
+        return buffer.getInt();
+    }
+
+    /**
+     * Reads an array of <count> longs from the file channel, returning the results as an array.
+     * @param count Number of longs to read.
+     * @return An array of longs.  Size of array should match count.
+     */
+    private long[] readLongs(final int count) {
+        ByteBuffer buffer = getBuffer(count*LONG_SIZE_IN_BYTES);
+        read(buffer);
+        buffer.flip();
+        long[] result = new long[count];
+        for(int i = 0; i < count; i++)
+            result[i] = buffer.getLong();
+        return result;
+    }
+
+    private void read(final ByteBuffer buffer) {
+        final int bytesRequested = buffer.limit();
+        if (bytesRequested == 0)
+            return;
+
+        try {
+
+           //BufferedInputStream cannot read directly into a byte buffer, so we read into an array
+            //and put the result into the bytebuffer after the if statement.
+
+            // We have a rigid expectation here to read in exactly the number of bytes we've limited
+            // our buffer to -- if there isn't enough data in the file, the index
+            // must be truncated or otherwise corrupt:
+            if(bytesRequested > fileLength - bufferedStream.position()){
+                throw new UserException.MalformedFile(mFile, String.format("Premature end-of-file while reading BAM index file %s. " +
+                        "It's likely that this file is truncated or corrupt -- " +
+                        "Please try re-indexing the corresponding BAM file.",
+                        mFile));
+            }
+
+            int bytesRead = bufferedStream.read(byteArray, 0, bytesRequested);
+
+            // We have a rigid expectation here to read in exactly the number of bytes we've limited
+            // our buffer to -- if we encounter EOF (-1), the index
+            // must be truncated or otherwise corrupt:
+            if (bytesRead <= 0) {
+            throw new UserException.MalformedFile(mFile, String.format("Premature end-of-file while reading BAM index file %s. " +
+                                                                       "It's likely that this file is truncated or corrupt -- " +
+                                                                       "Please try re-indexing the corresponding BAM file.",
+                                                                       mFile));
+            }
+
+            if(bytesRead != bytesRequested)
+                throw new RuntimeException("Read amount different from requested amount. This should not happen.");
+
+            buffer.put(byteArray, 0, bytesRequested);
+        }
+        catch(IOException ex) {
+            throw new ReviewedGATKException("Index: unable to read bytes from index file " + mFile);
+        }
+    }
+
+
+    /**
+     * A reusable buffer for use by this index generator.
+     * TODO: Should this be a SoftReference?
+     */
+    private ByteBuffer buffer = null;
+
+    //BufferedStream don't read into ByteBuffers, so we need this temporary array
+    private byte[] byteArray=null;
+    private ByteBuffer getBuffer(final int size) {
+        if(buffer == null || buffer.capacity() < size) {
+            // Allocate a new byte buffer.  For now, make it indirect to make sure it winds up on the heap for easier debugging.
+            buffer = ByteBuffer.allocate(size);
+            byteArray = new byte[size];
+            buffer.order(ByteOrder.LITTLE_ENDIAN);
+        }
+        buffer.clear();
+        buffer.limit(size);
+        return buffer;
+    }
+
+    private void skipBytes(final int count) {
+        try {
+
+            //try to skip forward the requested amount.
+            long skipped =  bufferedStream.skip(count);
+
+            if( skipped != count ) { //if not managed to skip the requested amount
+                throw new ReviewedGATKException("Index: unable to reposition file channel of index file " + mFile);
+            }
+        }
+        catch(IOException ex) {
+            throw new ReviewedGATKException("Index: unable to reposition file channel of index file " + mFile);
+        }
+    }
+
+    private void seek(final long position) {
+        try {
+            //to seek a new position, move the fileChannel, and reposition the bufferedStream
+            bufferedStream.seek(position);
+        }
+        catch(IOException ex) {
+            throw new ReviewedGATKException("Index: unable to reposition of file channel of index file " + mFile);
+        }
+    }
+
+    /**
+     * Retrieve the position from the current file channel.
+     * @return position of the current file channel.
+     */
+    private long position() {
+        try {
+            return bufferedStream.position();
+        }
+        catch (IOException exc) {
+            throw new ReviewedGATKException("Unable to read position from index file " + mFile, exc);
+        }
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/GATKBAMIndexData.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/GATKBAMIndexData.java
new file mode 100644
index 0000000..80d5ba0
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/GATKBAMIndexData.java
@@ -0,0 +1,121 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import htsjdk.samtools.*;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Stores and processes a single reference worth of GATK data.
+ */
+public class GATKBAMIndexData {
+    private final GATKBAMIndex index;
+    private final int referenceSequence;
+    private final List<GATKBin> bins;
+    private final LinearIndex linearIndex;
+
+    public GATKBAMIndexData(final GATKBAMIndex index, final int referenceSequence, final List<GATKBin> bins, final LinearIndex linearIndex) {
+        this.index = index;
+        this.referenceSequence = referenceSequence;
+        this.bins = bins;
+        this.linearIndex = linearIndex;
+    }
+
+    public int getReferenceSequence() {
+        return referenceSequence;
+    }
+
+    /**
+     * Perform an overlapping query of all bins bounding the given location.
+     * @param bin The bin over which to perform an overlapping query.
+     * @return The file pointers
+     */
+    public GATKBAMFileSpan getSpanOverlapping(final Bin bin) {
+        if(bin == null)
+            return null;
+
+        GATKBin gatkBin = new GATKBin(bin);
+
+        final int binLevel = index.getLevelForBin(bin);
+        final int firstLocusInBin = index.getFirstLocusInBin(bin);
+
+        // Add the specified bin to the tree if it exists.
+        List<GATKBin> binTree = new ArrayList<GATKBin>();
+        if(gatkBin.getBinNumber() < bins.size() && bins.get(gatkBin.getBinNumber()) != null)
+            binTree.add(bins.get(gatkBin.getBinNumber()));
+
+        int currentBinLevel = binLevel;
+        while(--currentBinLevel >= 0) {
+            final int binStart = index.getFirstBinInLevel(currentBinLevel);
+            final int binWidth = index.getMaxAddressibleGenomicLocation()/index.getLevelSize(currentBinLevel);
+            final int binNumber = firstLocusInBin/binWidth + binStart;
+            if(binNumber < bins.size() && bins.get(binNumber) != null)
+                binTree.add(bins.get(binNumber));
+        }
+
+        List<GATKChunk> chunkList = new ArrayList<GATKChunk>();
+        for(GATKBin coveringBin: binTree) {
+            for(GATKChunk chunk: coveringBin.getChunkList())
+                chunkList.add(chunk.clone());
+        }
+
+        final int start = index.getFirstLocusInBin(bin);
+        chunkList = optimizeChunkList(chunkList,linearIndex.getMinimumOffset(start));
+        return new GATKBAMFileSpan(chunkList.toArray(new GATKChunk[chunkList.size()]));
+    }
+
+    private List<GATKChunk> optimizeChunkList(final List<GATKChunk> chunks, final long minimumOffset) {
+        GATKChunk lastChunk = null;
+        Collections.sort(chunks);
+        final List<GATKChunk> result = new ArrayList<GATKChunk>();
+        for (final GATKChunk chunk : chunks) {
+            if (chunk.getChunkEnd() <= minimumOffset) {
+                continue;               // linear index optimization
+            }
+            if (result.isEmpty()) {
+                result.add(chunk);
+                lastChunk = chunk;
+                continue;
+            }
+            // Coalesce chunks that are in adjacent file blocks.
+            // This is a performance optimization.
+            if (!lastChunk.overlaps(chunk) && !lastChunk.isAdjacentTo(chunk)) {
+                result.add(chunk);
+                lastChunk = chunk;
+            } else {
+                if (chunk.getChunkEnd() > lastChunk.getChunkEnd()) {
+                    lastChunk.setChunkEnd(chunk.getChunkEnd());
+                }
+            }
+        }
+        return result;
+    }
+
+
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/IntervalOverlapFilteringIterator.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/IntervalOverlapFilteringIterator.java
new file mode 100644
index 0000000..f992cf1
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/IntervalOverlapFilteringIterator.java
@@ -0,0 +1,205 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.util.CloseableIterator;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.sam.AlignmentUtils;
+
+import java.util.List;
+import java.util.NoSuchElementException;
+
+/**
+ * High efficiency filtering iterator designed to filter out reads only included
+ * in the query results due to the granularity of the BAM index.
+ *
+ * Built into the BAM index is a notion of 16kbase granularity -- an index query for
+ * two regions contained within a 16kbase chunk (say, chr1:5-10 and chr1:11-20) will
+ * return exactly the same regions within the BAM file.  This iterator is optimized
+ * to subtract out reads which do not at all overlap the interval list passed to the
+ * constructor.
+ *
+ * Example:
+ * interval list: chr20:6-10
+ * Reads that would pass through the filter: chr20:6-10, chr20:1-15, chr20:1-7, chr20:8-15.
+ * Reads that would be discarded by the filter: chr20:1-5, chr20:11-15.
+ */
+class IntervalOverlapFilteringIterator implements CloseableIterator<SAMRecord> {
+    /**
+     * The wrapped iterator.
+     */
+    private CloseableIterator<SAMRecord> iterator;
+
+    /**
+     * The next read, queued up and ready to go.
+     */
+    private SAMRecord nextRead;
+
+    /**
+     * Rather than using the straight genomic bounds, use filter out only mapped reads.
+     */
+    private boolean keepOnlyUnmappedReads;
+
+    /**
+     * Custom representation of interval bounds.
+     * Makes it simpler to track current position.
+     */
+    private int[] intervalContigIndices;
+    private int[] intervalStarts;
+    private int[] intervalEnds;
+
+    /**
+     * Position within the interval list.
+     */
+    private int currentBound = 0;
+
+    public IntervalOverlapFilteringIterator(CloseableIterator<SAMRecord> iterator, List<GenomeLoc> intervals) {
+        this.iterator = iterator;
+
+        // Look at the interval list to detect whether we should worry about unmapped reads.
+        // If we find a mix of mapped/unmapped intervals, throw an exception.
+        boolean foundMappedIntervals = false;
+        for(GenomeLoc location: intervals) {
+            if(! GenomeLoc.isUnmapped(location))
+                foundMappedIntervals = true;
+            keepOnlyUnmappedReads |= GenomeLoc.isUnmapped(location);
+        }
+
+
+        if(foundMappedIntervals) {
+            if(keepOnlyUnmappedReads)
+                throw new ReviewedGATKException("Tried to apply IntervalOverlapFilteringIterator to a mixed of mapped and unmapped intervals.  Please apply this filter to only mapped or only unmapped reads");
+            this.intervalContigIndices = new int[intervals.size()];
+            this.intervalStarts = new int[intervals.size()];
+            this.intervalEnds = new int[intervals.size()];
+            int i = 0;
+            for(GenomeLoc interval: intervals) {
+                intervalContigIndices[i] = interval.getContigIndex();
+                intervalStarts[i] = interval.getStart();
+                intervalEnds[i] = interval.getStop();
+                i++;
+            }
+        }
+
+        advance();
+    }
+
+    public boolean hasNext() {
+        return nextRead != null;
+    }
+
+    public SAMRecord next() {
+        if(nextRead == null)
+            throw new NoSuchElementException("No more reads left in this iterator.");
+        SAMRecord currentRead = nextRead;
+        advance();
+        return currentRead;
+    }
+
+    public void remove() {
+        throw new UnsupportedOperationException("Cannot remove from an IntervalOverlapFilteringIterator");
+    }
+
+
+    public void close() {
+        iterator.close();
+    }
+
+    private void advance() {
+        nextRead = null;
+
+        if(!iterator.hasNext())
+            return;
+
+        SAMRecord candidateRead = iterator.next();
+        while(nextRead == null && (keepOnlyUnmappedReads || currentBound < intervalStarts.length)) {
+            if(!keepOnlyUnmappedReads) {
+                // Mapped read filter; check against GenomeLoc-derived bounds.
+                if(readEndsOnOrAfterStartingBound(candidateRead)) {
+                    // This read ends after the current interval begins.
+                    // Promising, but this read must be checked against the ending bound.
+                    if(readStartsOnOrBeforeEndingBound(candidateRead)) {
+                        // Yes, this read is within both bounds.  This must be our next read.
+                        nextRead = candidateRead;
+                        break;
+                    }
+                    else {
+                        // Oops, we're past the end bound.  Increment the current bound and try again.
+                        currentBound++;
+                        continue;
+                    }
+                }
+            }
+            else {
+                // Found a -L UNMAPPED read. NOTE: this is different than just being flagged as unmapped! We're done.
+                if(AlignmentUtils.isReadGenomeLocUnmapped(candidateRead)) {
+                    nextRead = candidateRead;
+                    break;
+                }
+            }
+
+            // No more reads available.  Stop the search.
+            if(!iterator.hasNext())
+                break;
+
+            // No reasonable read found; advance the iterator.
+            candidateRead = iterator.next();
+        }
+    }
+
+    /**
+     * Check whether the read lies after the start of the current bound.  If the read is unmapped but placed, its
+     * end will be distorted, so rely only on the alignment start.
+     * @param read The read to position-check.
+     * @return True if the read starts after the current bounds.  False otherwise.
+     */
+    private boolean readEndsOnOrAfterStartingBound(final SAMRecord read) {
+        return
+                // Read ends on a later contig, or...
+                read.getReferenceIndex() > intervalContigIndices[currentBound] ||
+                        // Read ends of this contig...
+                        (read.getReferenceIndex() == intervalContigIndices[currentBound] &&
+                                // either after this location, or...
+                                (read.getAlignmentEnd() >= intervalStarts[currentBound] ||
+                                        // read is unmapped but positioned and alignment start is on or after this start point.
+                                        (read.getReadUnmappedFlag() && read.getAlignmentStart() >= intervalStarts[currentBound])));
+    }
+
+    /**
+     * Check whether the read lies before the end of the current bound.
+     * @param read The read to position-check.
+     * @return True if the read starts after the current bounds.  False otherwise.
+     */
+    private boolean readStartsOnOrBeforeEndingBound(final SAMRecord read) {
+        return
+                // Read starts on a prior contig, or...
+                read.getReferenceIndex() < intervalContigIndices[currentBound] ||
+                        // Read starts on this contig and the alignment start is registered before this end point.
+                        (read.getReferenceIndex() == intervalContigIndices[currentBound] && read.getAlignmentStart() <= intervalEnds[currentBound]);
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/IntervalSharder.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/IntervalSharder.java
new file mode 100644
index 0000000..fd5b73b
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/IntervalSharder.java
@@ -0,0 +1,93 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import htsjdk.samtools.util.PeekableIterator;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
+import org.broadinstitute.gatk.utils.interval.IntervalMergingRule;
+
+import java.util.Iterator;
+
+/**
+ * Handles the process of aggregating BAM intervals into individual shards.
+ * TODO: The task performed by IntervalSharder is now better performed by LocusShardBalancer.  Merge BAMScheduler and IntervalSharder.
+ */
+public class IntervalSharder implements Iterator<FilePointer> {
+    /**
+     * The iterator actually laying out the data for BAM scheduling.
+     */
+    private final PeekableIterator<FilePointer> wrappedIterator;
+
+    /**
+     * The parser, for interval manipulation.
+     */
+    private final GenomeLocParser parser;
+
+    public static IntervalSharder shardOverAllReads(final SAMDataSource dataSource, final GenomeLocParser parser) {
+        return new IntervalSharder(BAMScheduler.createOverAllReads(dataSource,parser),parser);
+    }
+
+    public static IntervalSharder shardOverMappedReads(final SAMDataSource dataSource, final GenomeLocParser parser) {
+        return new IntervalSharder(BAMScheduler.createOverMappedReads(dataSource),parser);
+    }
+
+    public static IntervalSharder shardOverIntervals(final SAMDataSource dataSource, final GenomeLocSortedSet loci, final IntervalMergingRule intervalMergeRule) {
+        return new IntervalSharder(BAMScheduler.createOverIntervals(dataSource,intervalMergeRule,loci),loci.getGenomeLocParser());
+    }
+
+    private IntervalSharder(final BAMScheduler scheduler, final GenomeLocParser parser) {
+        wrappedIterator = new PeekableIterator<FilePointer>(scheduler);
+        this.parser = parser;
+    }
+    public void close() {
+      wrappedIterator.close();
+    }
+
+    public boolean hasNext() {
+        return wrappedIterator.hasNext();
+    }
+
+    /**
+     * Accumulate shards where there's no additional cost to processing the next shard in the sequence.
+     * @return The next file pointer to process.
+     */
+    public FilePointer next() {
+        FilePointer current = wrappedIterator.next();
+
+        while ( wrappedIterator.hasNext() &&
+                current.isRegionUnmapped == wrappedIterator.peek().isRegionUnmapped &&
+                (current.getContigIndex() == wrappedIterator.peek().getContigIndex() || current.isRegionUnmapped) &&
+                current.minus(wrappedIterator.peek()) == 0 ) {
+
+            current = current.combine(parser,wrappedIterator.next());
+        }
+
+        return current;
+    }
+
+    public void remove() { throw new UnsupportedOperationException("Unable to remove from an interval sharder."); }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/LocusShard.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/LocusShard.java
new file mode 100644
index 0000000..d84aa5d
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/LocusShard.java
@@ -0,0 +1,61 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import htsjdk.samtools.SAMFileSpan;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.sam.SAMReaderID;
+
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Handles locus shards of BAM information.
+ * @author aaron
+ * @version 1.0
+ * @date Apr 7, 2009
+ */
+public class LocusShard extends Shard {
+    /**
+     * Create a new locus shard, divided by index.
+     * @param intervals List of intervals to process.
+     * @param fileSpans File spans associated with that interval.
+     */
+    public LocusShard(GenomeLocParser parser, SAMDataSource dataSource, List<GenomeLoc> intervals, Map<SAMReaderID,SAMFileSpan> fileSpans) {
+        super(parser, ShardType.LOCUS, intervals, dataSource, fileSpans, false);
+    }
+
+    /**
+     * String representation of this shard.
+     * @return A string representation of the boundaries of this shard.
+     */
+    @Override
+    public String toString() {
+        return Utils.join(";",getGenomeLocs());
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/LocusShardBalancer.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/LocusShardBalancer.java
new file mode 100644
index 0000000..cf4286d
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/LocusShardBalancer.java
@@ -0,0 +1,58 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import java.util.Iterator;
+
+/**
+ * Batch granular file pointers into potentially larger shards.
+ */
+public class LocusShardBalancer extends ShardBalancer {
+    /**
+     * Convert iterators of file pointers into balanced iterators of shards.
+     * @return An iterator over balanced shards.
+     */
+    public Iterator<Shard> iterator() {
+        return new Iterator<Shard>() {
+            public boolean hasNext() {
+                return filePointers.hasNext();
+            }
+
+            public Shard next() {
+                FilePointer current = filePointers.next();
+
+                // FilePointers have already been combined as necessary at the IntervalSharder level. No
+                // need to do so again here.
+
+                return new LocusShard(parser,readsDataSource,current.getLocations(),current.fileSpans);
+            }
+
+            public void remove() {
+                throw new UnsupportedOperationException("Unable to remove from shard balancing iterator");
+            }
+        };
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/ReadShard.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/ReadShard.java
new file mode 100644
index 0000000..399e4bc
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/ReadShard.java
@@ -0,0 +1,271 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import htsjdk.samtools.util.PeekableIterator;
+import htsjdk.samtools.*;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIterator;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIteratorAdapter;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.sam.SAMReaderID;
+
+import java.util.*;
+
+/**
+ *
+ * User: aaron
+ * Date: Apr 10, 2009
+ * Time: 5:03:13 PM
+ *
+ * The Broad Institute
+ * SOFTWARE COPYRIGHT NOTICE AGREEMENT 
+ * This software and its documentation are copyright 2009 by the
+ * Broad Institute/Massachusetts Institute of Technology. All rights are reserved.
+ *
+ * This software is supplied without any warranty or guaranteed support whatsoever. Neither
+ * the Broad Institute nor MIT can be responsible for its use, misuse, or functionality.
+ *
+ */
+
+/**
+ * Expresses a shard of read data in block format.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class ReadShard extends Shard {
+
+    /**
+     * Default read shard buffer size
+     */
+    public static final int DEFAULT_MAX_READS = 10000;
+
+    /**
+     * What is the maximum number of reads per BAM file which should go into a read shard.
+     *
+     * TODO: this non-final static variable should either be made final or turned into an
+     * TODO: instance variable somewhere -- as both static and mutable it wreaks havoc
+     * TODO: with tests that use multiple instances of SAMDataSource (since SAMDataSource
+     * TODO: changes this value)
+     */
+    public static int MAX_READS = DEFAULT_MAX_READS;
+
+    /**
+     * The reads making up this shard.
+     */
+    private final Collection<SAMRecord> reads = new ArrayList<SAMRecord>(MAX_READS);
+
+    public ReadShard(GenomeLocParser parser, SAMDataSource readsDataSource, Map<SAMReaderID,SAMFileSpan> fileSpans, List<GenomeLoc> loci, boolean isUnmapped) {
+        super(parser, ShardType.READ, loci, readsDataSource, fileSpans, isUnmapped);
+    }
+
+    /**
+     * Sets the maximum number of reads buffered in a read shard.  Implemented as a weirdly static interface
+     * until we know what effect tuning this parameter has.
+     *
+     * TODO: this mutable static interface is awful and breaks tests -- need to refactor
+     *
+     * @param bufferSize New maximum number
+     */
+    static void setReadBufferSize(final int bufferSize) {
+        MAX_READS = bufferSize;
+    }
+
+    /**
+     * What read buffer size are we using?
+     *
+     * @return
+     */
+    public static int getReadBufferSize() {
+        return MAX_READS;
+    }
+
+    /**
+     * Returns true if this shard is meant to buffer reads, rather
+     * than just holding pointers to their locations.
+     * @return True if this shard can buffer reads.  False otherwise.
+     */
+    public boolean buffersReads() {
+        return true;
+    }
+
+    /**
+     * Returns true if the read buffer is currently full.
+     * @return True if this shard's buffer is full (and the shard can buffer reads).
+     */
+    public boolean isBufferEmpty() {
+        return reads.size() == 0;
+    }
+
+    /**
+     * Returns true if the read buffer is currently full.
+     * @return True if this shard's buffer is full (and the shard can buffer reads).
+     */
+    public boolean isBufferFull() {
+        return reads.size() > ReadShard.MAX_READS;
+    }
+
+    /**
+     * Adds a read to the read buffer.
+     * @param read Add a read to the internal shard buffer.
+     */
+    public void addRead(SAMRecord read) {
+        // DO NOT validate that the buffer is full.  Paired read sharding will occasionally have to stuff another
+        // read or two into the buffer.
+        reads.add(read);
+    }
+
+    /**
+     * Fills this shard's buffer with reads from the iterator passed in
+     *
+     * @param readIter Iterator from which to draw the reads to fill the shard
+     */
+    @Override
+    public void fill( PeekableIterator<SAMRecord> readIter ) {
+        if( ! buffersReads() )
+            throw new ReviewedGATKException("Attempting to fill a non-buffering shard.");
+
+        SAMFileHeader.SortOrder sortOrder = getReadProperties().getSortOrder();
+        SAMRecord read = null;
+
+        while( ! isBufferFull() && readIter.hasNext() ) {
+            final SAMRecord nextRead = readIter.peek();
+            if ( read == null || (nextRead.getReferenceIndex().equals(read.getReferenceIndex())) ) {
+                // only add reads to the shard if they are on the same contig
+                read = readIter.next();
+                addRead(read);
+            } else {
+                break;
+            }
+        }
+
+        // If the reads are sorted in coordinate order, ensure that all reads
+        // having the same alignment start become part of the same shard, to allow
+        // downsampling to work better across shard boundaries. Note that because our
+        // read stream has already been fed through the positional downsampler, which
+        // ensures that at each alignment start position there are no more than dcov
+        // reads, we're in no danger of accidentally creating a disproportionately huge
+        // shard
+        if ( sortOrder == SAMFileHeader.SortOrder.coordinate ) {
+            while ( readIter.hasNext() ) {
+                SAMRecord additionalRead = readIter.peek();
+
+                // Stop filling the shard as soon as we encounter a read having a different
+                // alignment start or contig from the last read added in the earlier loop
+                // above, or an unmapped read
+                if ( read == null ||
+                     additionalRead.getReadUnmappedFlag() ||
+                     ! additionalRead.getReferenceIndex().equals(read.getReferenceIndex()) ||
+                     additionalRead.getAlignmentStart() != read.getAlignmentStart() ) {
+                    break;
+                }
+
+                addRead(readIter.next());
+            }
+        }
+
+        // If the reads are sorted in queryname order, ensure that all reads
+        // having the same queryname become part of the same shard.
+        if( sortOrder == SAMFileHeader.SortOrder.queryname ) {
+            while( readIter.hasNext() ) {
+                SAMRecord nextRead = readIter.peek();
+                if( read == null || ! read.getReadName().equals(nextRead.getReadName()) )
+                    break;
+                addRead(readIter.next());
+            }
+        }
+    }
+
+    /**
+     * Creates an iterator over reads stored in this shard's read cache.
+     * @return
+     */
+    public GATKSAMIterator iterator() {
+        return GATKSAMIteratorAdapter.adapt(reads.iterator());
+    }
+
+    /**
+     * String representation of this shard.
+     * @return A string representation of the boundaries of this shard.
+     */
+    @Override
+    public String toString() {
+        StringBuilder sb = new StringBuilder();
+        for(Map.Entry<SAMReaderID,SAMFileSpan> entry: getFileSpans().entrySet()) {
+            sb.append(entry.getKey());
+            sb.append(": ");
+            sb.append(entry.getValue());
+            sb.append(' ');
+        }
+        return sb.toString();
+    }
+
+    /**
+     * Get the full span from the start of the left most read to the end of the right most one
+     *
+     * Note this may be different than the getLocation() of the shard, as this reflects the
+     * targeted span, not the actual span of reads
+     *
+     * @return the genome loc representing the span of these reads on the genome
+     */
+    public GenomeLoc getReadsSpan() {
+        if ( isUnmapped() || super.getGenomeLocs() == null || reads.isEmpty() )
+            return super.getLocation();
+        else {
+            int start = Integer.MAX_VALUE;
+            int stop = Integer.MIN_VALUE;
+            String contig = null;
+            boolean foundMapped = false;
+
+            for ( final SAMRecord read : reads ) {
+                if ( contig != null && ! read.getReferenceName().equals(contig) )
+                    throw new ReviewedGATKException("ReadShard contains reads spanning contig boundaries, which is no longer allowed. "
+                            + "First contig is " + contig + " next read was " + read.getReferenceName() );
+                contig = read.getReferenceName();
+
+                // Even if this shard as a *whole* is not "unmapped", we can still encounter *individual* unmapped mates
+                // of mapped reads within this shard's buffer. In fact, if we're very unlucky with shard boundaries,
+                // this shard might consist *only* of unmapped mates! We need to refrain from using the alignment
+                // starts/stops of these unmapped mates, and detect the case where the shard has been filled *only*
+                // with unmapped mates.
+                if ( ! read.getReadUnmappedFlag() ) {
+                    foundMapped = true;
+                    if ( read.getAlignmentStart() < start ) start = read.getAlignmentStart();
+                    if ( read.getAlignmentEnd() > stop ) stop = read.getAlignmentEnd();
+                }
+            }
+
+            assert contig != null;
+
+            if ( ! foundMapped || contig.equals("*") ) // all reads are unmapped
+                return GenomeLoc.UNMAPPED;
+            else
+                return parser.createGenomeLoc(contig, start, stop);
+        }
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/ReadShardBalancer.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/ReadShardBalancer.java
new file mode 100644
index 0000000..5b52c8f
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/ReadShardBalancer.java
@@ -0,0 +1,231 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import htsjdk.samtools.util.PeekableIterator;
+import htsjdk.samtools.SAMRecord;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.util.*;
+
+/**
+ * Convert from an unbalanced iterator over FilePointers to a balanced iterator over Shards.
+ *
+ * When processing FilePointers, our strategy is to aggregate all FilePointers for each contig
+ * together into one monolithic FilePointer, create one persistent set of read iterators over
+ * that monolithic FilePointer, and repeatedly use that persistent set of read iterators to
+ * fill read shards with reads.
+ *
+ * This strategy has several important advantages:
+ *
+ * 1. We avoid issues with file span overlap. FilePointers that are more granular than a whole
+ *    contig will have regions that overlap with other FilePointers on the same contig, due
+ *    to the limited granularity of BAM index data. By creating only one FilePointer per contig,
+ *    we avoid having to track how much of each file region we've visited (as we did in the
+ *    former implementation), we avoid expensive non-sequential access patterns in the files,
+ *    and we avoid having to repeatedly re-create our iterator chain for every small region
+ *    of interest.
+ *
+ * 2. We avoid boundary issues with the engine-level downsampling. Since we create a single
+ *    persistent set of read iterators (which include the downsampling iterator(s)) per contig,
+ *    the downsampling process is never interrupted by FilePointer or Shard boundaries, and never
+ *    loses crucial state information while downsampling within a contig.
+ *
+ * TODO: There is also at least one important disadvantage:
+ *
+ * 1. We load more BAM index data into memory at once, and this work is done upfront before processing
+ *    the next contig, creating a delay before traversal of each contig. This delay may be
+ *    compensated for by the gains listed in #1 above, and we may be no worse off overall in
+ *    terms of total runtime, but we need to verify this empirically.
+ *
+ * @author David Roazen
+ */
+public class ReadShardBalancer extends ShardBalancer {
+
+    private static Logger logger = Logger.getLogger(ReadShardBalancer.class);
+
+    /**
+     * Convert iterators of file pointers into balanced iterators of shards.
+     * @return An iterator over balanced shards.
+     */
+    public Iterator<Shard> iterator() {
+        return new Iterator<Shard>() {
+            /**
+             * The cached shard to be returned next.  Prefetched in the peekable iterator style.
+             */
+            private Shard nextShard = null;
+
+            /**
+             * The file pointer currently being processed.
+             */
+            private FilePointer currentContigFilePointer = null;
+
+            /**
+             * Iterator over the reads from the current contig's file pointer. The same iterator will be
+             * used to fill all shards associated with a given file pointer
+             */
+            private PeekableIterator<SAMRecord> currentContigReadsIterator = null;
+
+            /**
+             * How many FilePointers have we pulled from the filePointers iterator?
+             */
+            private int totalFilePointersConsumed = 0;
+
+            /**
+             * Have we encountered a monolithic FilePointer?
+             */
+            private boolean encounteredMonolithicFilePointer = false;
+
+
+            {
+                createNextContigFilePointer();
+                advance();
+            }
+
+            public boolean hasNext() {
+                return nextShard != null;
+            }
+
+            public Shard next() {
+                if ( ! hasNext() )
+                    throw new NoSuchElementException("No next read shard available");
+                Shard currentShard = nextShard;
+                advance();
+                return currentShard;
+            }
+
+            private void advance() {
+                nextShard = null;
+
+                // May need multiple iterations to fill the next shard if all reads in current file spans get filtered/downsampled away
+                while ( nextShard == null && currentContigFilePointer != null ) {
+
+                    // If we've exhausted the current file pointer of reads, move to the next file pointer (if there is one):
+                    if ( currentContigReadsIterator != null && ! currentContigReadsIterator.hasNext() ) {
+
+                        // Close the old, exhausted chain of iterators to release resources
+                        currentContigReadsIterator.close();
+
+                        // Advance to the FilePointer for the next contig
+                        createNextContigFilePointer();
+
+                        // We'll need to create a fresh iterator for this file pointer when we create the first
+                        // shard for it below.
+                        currentContigReadsIterator = null;
+                    }
+
+                    // At this point our currentContigReadsIterator may be null or non-null depending on whether or not
+                    // this is our first shard for this file pointer.
+                    if ( currentContigFilePointer != null ) {
+                        Shard shard = new ReadShard(parser,readsDataSource, currentContigFilePointer.fileSpans, currentContigFilePointer.locations, currentContigFilePointer.isRegionUnmapped);
+
+                        // Create a new reads iterator only when we've just advanced to the file pointer for the next
+                        // contig. It's essential that the iterators persist across all shards that share the same contig
+                        // to allow the downsampling to work properly.
+                        if ( currentContigReadsIterator == null ) {
+                            currentContigReadsIterator = new PeekableIterator<SAMRecord>(readsDataSource.getIterator(shard));
+                        }
+
+                        if ( currentContigReadsIterator.hasNext() ) {
+                            shard.fill(currentContigReadsIterator);
+                            nextShard = shard;
+                        }
+                    }
+                }
+            }
+
+            /**
+             * Aggregate all FilePointers for the next contig together into one monolithic FilePointer
+             * to avoid boundary issues with visiting the same file regions more than once (since more
+             * granular FilePointers will have regions that overlap with other nearby FilePointers due
+             * to the nature of BAM indices).
+             *
+             * By creating one persistent set of iterators per contig we also avoid boundary artifacts
+             * in the engine-level downsampling.
+             *
+             * TODO: This FilePointer aggregation should ideally be done at the BAMSchedule level for
+             * TODO: read traversals, as there's little point in the BAMSchedule emitting extremely
+             * TODO: granular FilePointers if we're just going to union them. The BAMSchedule should
+             * TODO: emit one FilePointer per contig for read traversals (but, crucially, NOT for
+             * TODO: locus traversals).
+             */
+            private void createNextContigFilePointer() {
+                currentContigFilePointer = null;
+                List<FilePointer> nextContigFilePointers = new ArrayList<FilePointer>();
+
+                if ( filePointers.hasNext() ) {
+                    logger.info("Loading BAM index data");
+                }
+
+                while ( filePointers.hasNext() ) {
+
+                    // Make sure that if we see a monolithic FilePointer (representing all regions in all files) that
+                    // it is the ONLY FilePointer we ever encounter
+                    if ( encounteredMonolithicFilePointer ) {
+                        throw new ReviewedGATKException("Bug: encountered additional FilePointers after encountering a monolithic FilePointer");
+                    }
+                    if ( filePointers.peek().isMonolithic() ) {
+                        if ( totalFilePointersConsumed > 0 ) {
+                            throw new ReviewedGATKException("Bug: encountered additional FilePointers before encountering a monolithic FilePointer");
+                        }
+                        encounteredMonolithicFilePointer = true;
+                        logger.debug(String.format("Encountered monolithic FilePointer: %s", filePointers.peek()));
+                    }
+
+                    // If this is the first FP we've seen, or we're dealing with mapped regions and the next FP is on the
+                    // same contig as previous FPs, or all our FPs are unmapped, add the next FP to the list of FPs to merge
+                    if ( nextContigFilePointers.isEmpty() ||
+                             (! nextContigFilePointers.get(0).isRegionUnmapped && ! filePointers.peek().isRegionUnmapped &&
+                             nextContigFilePointers.get(0).getContigIndex() == filePointers.peek().getContigIndex()) ||
+                                 (nextContigFilePointers.get(0).isRegionUnmapped && filePointers.peek().isRegionUnmapped) ) {
+
+                        nextContigFilePointers.add(filePointers.next());
+                        totalFilePointersConsumed++;
+                    }
+                    else {
+                        break; // next FilePointer is on a different contig or has different mapped/unmapped status,
+                               // save it for next time
+                    }
+                }
+
+                if ( ! nextContigFilePointers.isEmpty() ) {
+                    currentContigFilePointer = FilePointer.union(nextContigFilePointers, parser);
+                }
+
+                if ( currentContigFilePointer != null ) {
+                    logger.info("Done loading BAM index data");
+                    logger.debug(String.format("Next FilePointer: %s", currentContigFilePointer));
+                }
+            }
+
+            public void remove() {
+                throw new UnsupportedOperationException("Unable to remove from shard balancing iterator");
+            }
+        };
+    }
+
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/SAMDataSource.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/SAMDataSource.java
new file mode 100644
index 0000000..c97201b
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/SAMDataSource.java
@@ -0,0 +1,1236 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import htsjdk.samtools.MergingSamRecordIterator;
+import htsjdk.samtools.SamFileHeaderMerger;
+import htsjdk.samtools.*;
+import htsjdk.samtools.reference.ReferenceSequenceFileFactory;
+import htsjdk.samtools.util.CloseableIterator;
+import htsjdk.samtools.util.CloserUtil;
+import htsjdk.samtools.util.RuntimeIOException;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.engine.ReadMetrics;
+import org.broadinstitute.gatk.engine.ReadProperties;
+import org.broadinstitute.gatk.utils.ValidationExclusion;
+import org.broadinstitute.gatk.engine.filters.CountingFilteringIterator;
+import org.broadinstitute.gatk.engine.filters.ReadFilter;
+import org.broadinstitute.gatk.engine.iterators.*;
+import org.broadinstitute.gatk.engine.resourcemanagement.ThreadAllocation;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
+import org.broadinstitute.gatk.utils.SimpleTimer;
+import org.broadinstitute.gatk.engine.iterators.ReadTransformingIterator;
+import org.broadinstitute.gatk.utils.downsampling.*;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.interval.IntervalMergingRule;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIterator;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIteratorAdapter;
+import org.broadinstitute.gatk.utils.sam.GATKSAMReadGroupRecord;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecordIterator;
+import org.broadinstitute.gatk.utils.sam.SAMReaderID;
+
+import java.io.File;
+import java.util.*;
+import java.util.concurrent.Callable;
+
+/**
+ * User: aaron
+ * Date: Mar 26, 2009
+ * Time: 2:36:16 PM
+ * <p/>
+ * Converts shards to SAM iterators over the specified region
+ */
+public class SAMDataSource {
+    /** Reference file */
+    private final File referenceFile;
+
+    /** Backing support for reads. */
+    protected final ReadProperties readProperties;
+
+    /**
+     * Runtime metrics of reads filtered, etc.
+     */
+    private final ReadMetrics readMetrics;
+
+    /**
+     * Tools for parsing GenomeLocs, for verifying BAM ordering against general ordering.
+     */
+    protected final GenomeLocParser genomeLocParser;
+
+    /**
+     * Identifiers for the readers driving this data source.
+     */
+    private final Collection<SAMReaderID> readerIDs;
+
+    /**
+     * How strict are the readers driving this data source.
+     */
+    private final ValidationStringency validationStringency;
+
+    /**
+     * Do we want to remove the program records from this data source?
+     */
+    private final boolean removeProgramRecords;
+
+    /**
+     * Store BAM indices for each reader present.
+     */
+    private final Map<SAMReaderID,GATKBAMIndex> bamIndices = new HashMap<SAMReaderID,GATKBAMIndex>();
+
+    /**
+     * The merged header.
+     */
+    private final SAMFileHeader mergedHeader;
+
+    /**
+     * The constituent headers of the unmerged files.
+     */
+    private final Map<SAMReaderID,SAMFileHeader> headers = new HashMap<SAMReaderID,SAMFileHeader>();
+
+    /**
+     * The sort order of the BAM files.  Files without a sort order tag are assumed to be
+     * in coordinate order.
+     */
+    private SAMFileHeader.SortOrder sortOrder = null;
+
+    /**
+     * Whether the read groups in overlapping files collide.
+     */
+    private final boolean hasReadGroupCollisions;
+
+    /**
+     * Maps the SAM readers' merged read group ids to their original ids. Since merged read group ids
+     * are always unique, we can simply use a map here, no need to stratify by reader.
+     */
+    private final ReadGroupMapping mergedToOriginalReadGroupMappings = new ReadGroupMapping();
+
+    /**
+     * Maps the SAM readers' original read group ids to their revised ids. This mapping must be stratified
+     * by readers, since there can be readgroup id collision: different bam files (readers) can list the
+     * same read group id, which will be disambiguated when these input streams are merged.
+     */
+    private final Map<SAMReaderID,ReadGroupMapping> originalToMergedReadGroupMappings = new HashMap<SAMReaderID,ReadGroupMapping>();
+
+    /**
+     * Mapping from input file path to new sample name. Used only when doing on-the-fly sample renaming.
+     */
+    private Map<String, String> sampleRenameMap = null;
+
+    /** our log, which we want to capture anything from this class */
+    private static Logger logger = Logger.getLogger(SAMDataSource.class);
+
+    /**
+     * A collection of readers driving the merging process.
+     */
+    private final SAMResourcePool resourcePool;
+
+    /**
+     * Asynchronously loads BGZF blocks.
+     */
+    private final BGZFBlockLoadingDispatcher dispatcher;
+
+    /**
+     * How are threads allocated.
+     */
+    private final ThreadAllocation threadAllocation;
+
+    /**
+     * How are adjacent intervals merged by the sharder?
+     */
+    private final IntervalMergingRule intervalMergingRule;
+
+    /**
+     * Static set of unsupported programs that create bam files.
+     * The key is the PG record ID and the value is the name of the tool that created it
+     */
+    private static Map<String, String> unsupportedPGs = new HashMap<>();
+    static {
+        unsupportedPGs.put("GATK ReduceReads", "ReduceReads");
+    }
+
+    /**
+     * Create a new SAM data source given the supplied read metadata.
+     *
+     * For testing purposes
+     *
+     * @param samFiles list of reads files.
+     */
+    public SAMDataSource(final File referenceFile, final Collection<SAMReaderID> samFiles,
+                         final ThreadAllocation threadAllocation, final Integer numFileHandles,
+                         final GenomeLocParser genomeLocParser) {
+        this(
+                referenceFile,
+                samFiles,
+                threadAllocation,
+                numFileHandles,
+                genomeLocParser,
+                false,
+                ValidationStringency.STRICT,
+                null,
+                null,
+                new ValidationExclusion(),
+                new ArrayList<ReadFilter>(),
+                false);
+    }
+
+    /**
+     * See complete constructor.  Does not enable BAQ by default.
+     *
+     * For testing purposes
+     */
+    public SAMDataSource(
+            final File referenceFile,
+            Collection<SAMReaderID> samFiles,
+            ThreadAllocation threadAllocation,
+            Integer numFileHandles,
+            GenomeLocParser genomeLocParser,
+            boolean useOriginalBaseQualities,
+            ValidationStringency strictness,
+            Integer readBufferSize,
+            DownsamplingMethod downsamplingMethod,
+            ValidationExclusion exclusionList,
+            Collection<ReadFilter> supplementalFilters,
+            boolean includeReadsWithDeletionAtLoci) {
+        this(   referenceFile,
+                samFiles,
+                threadAllocation,
+                numFileHandles,
+                genomeLocParser,
+                useOriginalBaseQualities,
+                strictness,
+                readBufferSize,
+                downsamplingMethod,
+                exclusionList,
+                supplementalFilters,
+                Collections.<ReadTransformer>emptyList(),
+                includeReadsWithDeletionAtLoci,
+                (byte) -1,
+                false,
+                false,
+                null,
+                IntervalMergingRule.ALL);
+    }
+
+    /**
+     * Create a new SAM data source given the supplied read metadata.
+     * @param referenceFile reference file.
+     * @param samFiles list of reads files.
+     * @param useOriginalBaseQualities True if original base qualities should be used.
+     * @param strictness Stringency of reads file parsing.
+     * @param readBufferSize Number of reads to hold in memory per BAM.
+     * @param downsamplingMethod Method for downsampling reads at a given locus.
+     * @param exclusionList what safety checks we're willing to let slide
+     * @param supplementalFilters additional filters to dynamically apply.
+     * @param includeReadsWithDeletionAtLoci if 'true', the base pileups sent to the walker's map() method
+     *         will explicitly list reads with deletion over the current reference base; otherwise, only observed
+     *        bases will be seen in the pileups, and the deletions will be skipped silently.
+     * @param defaultBaseQualities if the reads have incomplete quality scores, set them all to defaultBaseQuality.
+     * @param keepReadsInLIBS should we keep a unique list of reads in LIBS?
+     * @param sampleRenameMap Map of BAM file to new sample ID used during on-the-fly runtime sample renaming.
+     *                        Will be null if we're not doing sample renaming.
+     * @param intervalMergingRule how are adjacent intervals merged by the sharder
+     */
+    public SAMDataSource(
+            final File referenceFile,
+            Collection<SAMReaderID> samFiles,
+            ThreadAllocation threadAllocation,
+            Integer numFileHandles,
+            GenomeLocParser genomeLocParser,
+            boolean useOriginalBaseQualities,
+            ValidationStringency strictness,
+            Integer readBufferSize,
+            DownsamplingMethod downsamplingMethod,
+            ValidationExclusion exclusionList,
+            Collection<ReadFilter> supplementalFilters,
+            List<ReadTransformer> readTransformers,
+            boolean includeReadsWithDeletionAtLoci,
+            byte defaultBaseQualities,
+            boolean removeProgramRecords,
+            final boolean keepReadsInLIBS,
+            final Map<String, String> sampleRenameMap,
+            final IntervalMergingRule intervalMergingRule) {
+
+        this.referenceFile = referenceFile;
+        this.readMetrics = new ReadMetrics();
+        this.genomeLocParser = genomeLocParser;
+        this.intervalMergingRule = intervalMergingRule;
+
+        readerIDs = samFiles;
+
+        this.threadAllocation = threadAllocation;
+        // TODO: Consider a borrowed-thread dispatcher implementation.
+        if(this.threadAllocation.getNumIOThreads() > 0) {
+            logger.info("Running in asynchronous I/O mode; number of threads = " + this.threadAllocation.getNumIOThreads());
+            dispatcher = new BGZFBlockLoadingDispatcher(this.threadAllocation.getNumIOThreads(), numFileHandles != null ? numFileHandles : 1);
+        }
+        else
+            dispatcher = null;
+
+        validationStringency = strictness;
+        this.removeProgramRecords = removeProgramRecords;
+        if(readBufferSize != null)
+            ReadShard.setReadBufferSize(readBufferSize);   // TODO: use of non-final static variable here is just awful, especially for parallel tests
+        else {
+            // Choose a sensible default for the read buffer size.
+            // Previously we we're picked 100000 reads per BAM per shard with a max cap of 250K reads in memory at once.
+            // Now we are simply setting it to 100K reads
+            ReadShard.setReadBufferSize(100000);
+        }
+
+        this.sampleRenameMap = sampleRenameMap;
+
+        resourcePool = new SAMResourcePool(Integer.MAX_VALUE);
+        SAMReaders readers = resourcePool.getAvailableReaders();
+
+        // Determine the sort order.
+        for(SAMReaderID readerID: readerIDs) {
+            if (! readerID.getSamFile().canRead() )
+                throw new UserException.CouldNotReadInputFile(readerID.getSamFile(),"file is not present or user does not have appropriate permissions.  " +
+                        "Please check that the file is present and readable and try again.");
+
+            // Get the sort order, forcing it to coordinate if unsorted.
+            SamReader reader = readers.getReader(readerID);
+            SAMFileHeader header = reader.getFileHeader();
+
+            headers.put(readerID,header);
+
+            if ( header.getReadGroups().isEmpty() ) {
+                throw new UserException.MalformedBAM(readers.getReaderID(reader).getSamFile(),
+                        "SAM file doesn't have any read groups defined in the header.  The GATK no longer supports SAM files without read groups");
+            }
+
+            SAMFileHeader.SortOrder sortOrder = header.getSortOrder() != SAMFileHeader.SortOrder.unsorted ? header.getSortOrder() : SAMFileHeader.SortOrder.coordinate;
+
+            // Validate that all input files are sorted in the same order.
+            if(this.sortOrder != null && this.sortOrder != sortOrder)
+                throw new UserException.MissortedBAM(String.format("Attempted to process mixed of files sorted as %s and %s.",this.sortOrder,sortOrder));
+
+            // Update the sort order.
+            this.sortOrder = sortOrder;
+        }
+
+        mergedHeader = readers.getMergedHeader();
+        hasReadGroupCollisions = readers.hasReadGroupCollisions();
+
+        readProperties = new ReadProperties(
+                samFiles,
+                mergedHeader,
+                sortOrder,
+                useOriginalBaseQualities,
+                strictness,
+                downsamplingMethod,
+                exclusionList,
+                supplementalFilters,
+                readTransformers,
+                includeReadsWithDeletionAtLoci,
+                defaultBaseQualities,
+                keepReadsInLIBS);
+
+        // cache the read group id (original) -> read group id (merged)
+        // and read group id (merged) -> read group id (original) mappings.
+        for(SAMReaderID id: readerIDs) {
+            SamReader reader = readers.getReader(id);
+
+            ReadGroupMapping mappingToMerged = new ReadGroupMapping();
+
+            List<SAMReadGroupRecord> readGroups = reader.getFileHeader().getReadGroups();
+            for(SAMReadGroupRecord readGroup: readGroups) {
+                if(hasReadGroupCollisions) {
+                    mappingToMerged.put(readGroup.getReadGroupId(),readers.getReadGroupId(id,readGroup.getReadGroupId()));
+                    mergedToOriginalReadGroupMappings.put(readers.getReadGroupId(id,readGroup.getReadGroupId()),readGroup.getReadGroupId());
+                } else {
+                    mappingToMerged.put(readGroup.getReadGroupId(),readGroup.getReadGroupId());
+                    mergedToOriginalReadGroupMappings.put(readGroup.getReadGroupId(),readGroup.getReadGroupId());
+                }
+            }
+
+            originalToMergedReadGroupMappings.put(id,mappingToMerged);
+        }
+
+        final SAMSequenceDictionary samSequenceDictionary;
+        if (referenceFile == null) {
+            samSequenceDictionary = mergedHeader.getSequenceDictionary();
+        } else {
+            samSequenceDictionary = ReferenceSequenceFileFactory.
+                    getReferenceSequenceFile(referenceFile).
+                    getSequenceDictionary();
+        }
+
+        for(SAMReaderID id: readerIDs) {
+            File indexFile = findIndexFile(id.getSamFile());
+            if(indexFile != null)
+                bamIndices.put(id,new GATKBAMIndex(indexFile, samSequenceDictionary));
+        }
+
+        resourcePool.releaseReaders(readers);
+    }
+
+    /**
+     * Checks whether the provided SAM header if from a reduced bam file.
+     * @param header the SAM header for a given file
+     * @throws UserException if the header is from a reduced bam
+     */
+    private void checkForUnsupportedBamFile(final SAMFileHeader header) {
+        for ( final SAMProgramRecord PGrecord : header.getProgramRecords() ) {
+            if ( unsupportedPGs.containsKey(PGrecord.getId()) )
+                throw new UserException("The GATK no longer supports running off of BAMs produced by " + unsupportedPGs.get(PGrecord.getId()));
+        }
+    }
+
+    public void close() {
+        SAMReaders readers = resourcePool.getAvailableReaders();
+        for(SAMReaderID readerID: readerIDs) {
+            SamReader reader = readers.getReader(readerID);
+            CloserUtil.close(reader);
+        }
+    }
+
+    /**
+     * Returns Reads data structure containing information about the reads data sources placed in this pool as well as
+     * information about how they are downsampled, sorted, and filtered
+     * @return
+     */
+    public ReadProperties getReadsInfo() { return readProperties; }
+
+    /**
+     * Checks to see whether any reads files are supplying data.
+     * @return True if no reads files are supplying data to the traversal; false otherwise.
+     */
+    public boolean isEmpty() {
+        return readProperties.getSAMReaderIDs().size() == 0;
+    }
+
+    /**
+     * Gets the SAM file associated with a given reader ID.
+     * @param id The reader for which to retrieve the source file.
+     * @return the file actually associated with the id.
+     */
+    public File getSAMFile(SAMReaderID id) {
+        return id.getSamFile();
+    }
+
+    /**
+     * Returns readers used by this data source.
+     * @return A list of SAM reader IDs.
+     */
+    public Collection<SAMReaderID> getReaderIDs() {
+        return readerIDs;
+    }
+
+    /**
+     * Retrieves the id of the reader which built the given read.
+     * @param read The read to test.
+     * @return ID of the reader.
+     */
+    public SAMReaderID getReaderID(SAMRecord read) {
+        return resourcePool.getReaderID(read.getFileSource().getReader());
+    }
+
+    /**
+     * Gets the merged header from the SAM file.
+     * @return The merged header.
+     */
+    public SAMFileHeader getHeader() {
+        return mergedHeader;
+    }
+
+    public SAMFileHeader getHeader(SAMReaderID id) {
+        return headers.get(id);
+    }
+
+    /**
+     * Gets the revised read group id mapped to this 'original' read group id.
+     * @param reader for which to grab a read group.
+     * @param originalReadGroupId ID of the original read group.
+     * @return Merged read group ID.
+     */
+    public String getReadGroupId(final SAMReaderID reader, final String originalReadGroupId) {
+        return originalToMergedReadGroupMappings.get(reader).get(originalReadGroupId);
+    }
+
+    /**
+     * Gets the original read group id (as it was specified in the original input bam file) that maps onto
+     * this 'merged' read group id.
+     * @param mergedReadGroupId 'merged' ID of the read group (as it is presented by the read received from merged input stream).
+     * @return Merged read group ID.
+     */
+    public String getOriginalReadGroupId(final String mergedReadGroupId) {
+        return mergedToOriginalReadGroupMappings.get(mergedReadGroupId);
+    }
+
+    /**
+     * Gets the index for a particular reader.  Always preloaded.
+     * @param id Id of the reader.
+     * @return The index.  Will preload the index if necessary.
+     */
+    public GATKBAMIndex getIndex(final SAMReaderID id) {
+        return bamIndices.get(id);
+    }
+
+    /**
+     * Return true if the index for a particular reader exists.
+     * @param id Id of the reader.
+     * @return True if the index exists.
+     */
+    public boolean hasIndex(final SAMReaderID id) {
+        return bamIndices.containsKey(id);
+    }
+
+    /**
+     * True if all readers that require an index for SAMFileSpan creation have an index.
+     * @return True if all readers that require an index for SAMFileSpan creation have an index.
+     */
+    public boolean hasIndex() {
+        for (final SAMReaderID readerID: readerIDs) {
+            if (!hasIndex(readerID)) {
+                return false;
+            }
+        }
+        return true;
+    }
+
+    /**
+     * Returns true if the reader caches its SAMFileHeader for each iterator.
+     * @return true if this reader caches its SAMFileHeader for each iterator.
+     */
+    private boolean isIteratorSAMFileHeaderCached(final SAMReaderID readerID) {
+        // example: https://github.com/samtools/htsjdk/blob/ee4308ede60962f3ab4275473ac384724b471149/src/java/htsjdk/samtools/CRAMFileReader.java#L183
+        return !readerID.getSamFile().getName().toLowerCase().endsWith(SamReader.Type.CRAM_TYPE.fileExtension());
+    }
+
+    /**
+     * Retrieves the sort order of the readers.
+     * @return Sort order.  Can be unsorted, coordinate order, or query name order.
+     */
+    public SAMFileHeader.SortOrder getSortOrder() {
+        return sortOrder;
+    }
+
+    /**
+     * Gets the cumulative read metrics for shards already processed.
+     * @return Cumulative read metrics.
+     */
+    public ReadMetrics getCumulativeReadMetrics() {
+        // don't return a clone here because the engine uses a pointer to this object
+        return readMetrics;
+    }
+
+    /**
+     * Incorporate the given read metrics into the cumulative read metrics.
+     * @param readMetrics The 'incremental' read metrics, to be incorporated into the cumulative metrics.
+     */
+    public void incorporateReadMetrics(final ReadMetrics readMetrics) {
+        this.readMetrics.incrementMetrics(readMetrics);
+    }
+
+    public GATKSAMIterator seek(Shard shard) {
+        if(shard.buffersReads()) {
+            return shard.iterator();
+        }
+        else {
+            return getIterator(shard);
+        }
+    }
+
+    /**
+     * Gets the reader associated with the given read.
+     * @param readers Available readers.
+     * @param read
+     * @return
+     */
+    private SAMReaderID getReaderID(SAMReaders readers, SAMRecord read) {
+        for(SAMReaderID id: getReaderIDs()) {
+            if(readers.getReader(id) == read.getFileSource().getReader())
+                return id;
+        }
+        throw new ReviewedGATKException("Unable to find id for reader associated with read " + read.getReadName());
+    }
+
+    /**
+     * Get the initial reader positions across all BAM files
+     *
+     * @return the start positions of the first chunk of reads for all BAM files
+     */
+    protected Map<SAMReaderID, GATKBAMFileSpan> getInitialReaderPositions() {
+        Map<SAMReaderID, GATKBAMFileSpan> initialPositions = new HashMap<SAMReaderID, GATKBAMFileSpan>();
+        SAMReaders readers = resourcePool.getAvailableReaders();
+
+        for ( SAMReaderID id: getReaderIDs() ) {
+            final GATKBAMFileSpan span = new GATKBAMFileSpan(readers.getReader(id).indexing().getFilePointerSpanningReads());
+            initialPositions.put(id, span);
+        }
+
+        resourcePool.releaseReaders(readers);
+        return initialPositions;
+    }
+
+    /**
+     * Get an iterator over the data types specified in the shard.
+     *
+     * @param shard The shard specifying the data limits.
+     * @return An iterator over the selected data.
+     */
+    protected GATKSAMIterator getIterator( Shard shard ) {
+        return getIterator(resourcePool.getAvailableReaders(), shard, shard instanceof ReadShard);
+    }
+
+    /**
+     * Get an iterator over the data types specified in the shard.
+     * @param readers Readers from which to load data.
+     * @param shard The shard specifying the data limits.
+     * @param enableVerification True to verify.  For compatibility with old sharding strategy.
+     * @return An iterator over the selected data.
+     */
+    private GATKSAMIterator getIterator(SAMReaders readers, Shard shard, boolean enableVerification) {
+        // Set up merging to dynamically merge together multiple BAMs.
+        Map<SamReader,CloseableIterator<SAMRecord>> iteratorMap = new HashMap<>();
+
+        for(SAMReaderID id: getReaderIDs()) {
+            CloseableIterator<SAMRecord> iterator;
+
+            // TODO: null used to be the signal for unmapped, but we've replaced that with a simple index query for the last bin.
+            // TODO: Kill this check once we've proven that the design elements are gone.
+            if(shard.getFileSpans().get(id) == null)
+                throw new ReviewedGATKException("SAMDataSource: received null location for reader " + id + ", but null locations are no longer supported.");
+
+            try {
+                if(threadAllocation.getNumIOThreads() > 0) {
+                    // TODO: need to add friendly error if -nit is used with non BAM. Later, possibly add this capability with CRAM when htsjdk supports CRAM file spans are supported.
+                    BlockInputStream inputStream = readers.getInputStream(id);
+                    inputStream.submitAccessPlan(new BAMAccessPlan(id, inputStream, (GATKBAMFileSpan) shard.getFileSpans().get(id)));
+                    BAMRecordCodec codec = new BAMRecordCodec(getHeader(id));
+                    codec.setInputStream(inputStream);
+                    iterator = new BAMCodecIterator(inputStream,readers.getReader(id),codec);
+                }
+                else {
+                    final SamReader reader = readers.getReader(id);
+                    iterator = ((SamReader.Indexing)reader).iterator(shard.getFileSpans().get(id));
+                }
+            } catch ( RuntimeException e ) { // we need to catch RuntimeExceptions here because the Picard code is throwing them (among SAMFormatExceptions) sometimes
+                throw new UserException.MalformedBAM(id.getSamFile(), e.getMessage());
+            }
+
+            // At the moment, too many other classes to change for GATKSAMRecordIterator converter.
+            // Force the compiler to just let the conversion happen, since generics are erased anyway.
+            iterator = (CloseableIterator<SAMRecord>)(Object)new GATKSAMRecordIterator(iterator);
+            iterator = new MalformedBAMErrorReformatingIterator(id.getSamFile(), iterator);
+            if(shard.getGenomeLocs().size() > 0)
+                iterator = new IntervalOverlapFilteringIterator(iterator,shard.getGenomeLocs());
+
+            iteratorMap.put(readers.getReader(id), iterator);
+        }
+
+        MergingSamRecordIterator mergingIterator = readers.createMergingIterator(iteratorMap);
+
+        // The readMetrics object being passed in should be that of this dataSource and NOT the shard: the dataSource's
+        // metrics is intended to keep track of the reads seen (and hence passed to the CountingFilteringIterator when
+        // we apply the decorators), whereas the shard's metrics is used to keep track the "records" seen.
+        return applyDecoratingIterators(readMetrics,
+                enableVerification,
+                readProperties.useOriginalBaseQualities(),
+                new ReleasingIterator(readers, GATKSAMIteratorAdapter.adapt(mergingIterator)),
+                readProperties.getValidationExclusionList().contains(ValidationExclusion.TYPE.NO_READ_ORDER_VERIFICATION),
+                readProperties.getSupplementalFilters(),
+                readProperties.getReadTransformers(),
+                readProperties.defaultBaseQualities(),
+                shard instanceof LocusShard);
+    }
+
+    private class BAMCodecIterator implements CloseableIterator<SAMRecord> {
+        private final BlockInputStream inputStream;
+        private final SamReader reader;
+        private final BAMRecordCodec codec;
+        private SAMRecord nextRead;
+
+        private BAMCodecIterator(final BlockInputStream inputStream, final SamReader reader, final BAMRecordCodec codec) {
+            this.inputStream = inputStream;
+            this.reader = reader;
+            this.codec = codec;
+            advance();
+        }
+
+        public boolean hasNext() {
+            return nextRead != null;
+        }
+
+        public SAMRecord next() {
+            if(!hasNext())
+                throw new NoSuchElementException("Unable to retrieve next record from BAMCodecIterator; input stream is empty");
+            SAMRecord currentRead = nextRead;
+            advance();
+            return currentRead;
+        }
+
+        public void close() {
+            // NO-OP.
+        }
+
+        public void remove() {
+            throw new UnsupportedOperationException("Unable to remove from BAMCodecIterator");
+        }
+
+        private void advance() {
+            final long startCoordinate = inputStream.getFilePointer();
+            nextRead = codec.decode();
+            final long stopCoordinate = inputStream.getFilePointer();
+
+            if(reader != null && nextRead != null)
+                PicardNamespaceUtils.setFileSource(nextRead, new SAMFileSource(reader, new GATKBAMFileSpan(new GATKChunk(startCoordinate, stopCoordinate))));
+        }
+    }
+
+    /**
+     * Filter reads based on user-specified criteria.
+     *
+     * @param readMetrics metrics to track when using this iterator.
+     * @param enableVerification Verify the order of reads.
+     * @param useOriginalBaseQualities True if original base qualities should be used.
+     * @param wrappedIterator the raw data source.
+     * @param noValidationOfReadOrder Another trigger for the verifying iterator?  TODO: look into this.
+     * @param supplementalFilters additional filters to apply to the reads.
+     * @param defaultBaseQualities if the reads have incomplete quality scores, set them all to defaultBaseQuality.
+     * @param isLocusBasedTraversal true if we're dealing with a read stream from a LocusShard
+     * @return An iterator wrapped with filters reflecting the passed-in parameters.  Will not be null.
+     */
+    protected GATKSAMIterator applyDecoratingIterators(ReadMetrics readMetrics,
+                                                        boolean enableVerification,
+                                                        boolean useOriginalBaseQualities,
+                                                        GATKSAMIterator wrappedIterator,
+                                                        Boolean noValidationOfReadOrder,
+                                                        Collection<ReadFilter> supplementalFilters,
+                                                        List<ReadTransformer> readTransformers,
+                                                        byte defaultBaseQualities,
+                                                        boolean isLocusBasedTraversal ) {
+
+        // Always apply the ReadFormattingIterator before both ReadFilters and ReadTransformers. At a minimum,
+        // this will consolidate the cigar strings into canonical form. This has to be done before the read
+        // filtering, because not all read filters will behave correctly with things like zero-length cigar
+        // elements. If useOriginalBaseQualities is true or defaultBaseQualities >= 0, this iterator will also
+        // modify the base qualities.
+        wrappedIterator = new ReadFormattingIterator(wrappedIterator, useOriginalBaseQualities, defaultBaseQualities);
+
+        // Read Filters: these are applied BEFORE downsampling, so that we downsample within the set of reads
+        // that actually survive filtering. Otherwise we could get much less coverage than requested.
+        wrappedIterator = GATKSAMIteratorAdapter.adapt(new CountingFilteringIterator(readMetrics,wrappedIterator,supplementalFilters));
+
+        // Downsampling:
+
+        // For locus traversals where we're downsampling to coverage by sample, assume that the downsamplers
+        // will be invoked downstream from us in LocusIteratorByState. This improves performance by avoiding
+        // splitting/re-assembly of the read stream at this stage, and also allows for partial downsampling
+        // of individual reads.
+        boolean assumeDownstreamLIBSDownsampling = isLocusBasedTraversal &&
+                                                   readProperties.getDownsamplingMethod().type == DownsampleType.BY_SAMPLE &&
+                                                   readProperties.getDownsamplingMethod().toCoverage != null;
+
+        // Apply downsampling iterators here only in cases where we know that LocusIteratorByState won't be
+        // doing any downsampling downstream of us
+        if ( ! assumeDownstreamLIBSDownsampling ) {
+            wrappedIterator = applyDownsamplingIterator(wrappedIterator);
+        }
+
+        // unless they've said not to validate read ordering (!noValidationOfReadOrder) and we've enabled verification,
+        // verify the read ordering by applying a sort order iterator
+        if (!noValidationOfReadOrder && enableVerification)
+            wrappedIterator = new VerifyingSamIterator(wrappedIterator);
+
+        // Read transformers: these are applied last, so that we don't bother transforming reads that get discarded
+        // by the read filters or downsampler.
+        for ( final ReadTransformer readTransformer : readTransformers ) {
+            if ( readTransformer.enabled() && readTransformer.getApplicationTime() == ReadTransformer.ApplicationTime.ON_INPUT )
+                wrappedIterator = new ReadTransformingIterator(wrappedIterator, readTransformer);
+        }
+
+        return wrappedIterator;
+    }
+
+    protected GATKSAMIterator applyDownsamplingIterator( GATKSAMIterator wrappedIterator ) {
+        if ( readProperties.getDownsamplingMethod() == null ||
+             readProperties.getDownsamplingMethod().type == DownsampleType.NONE ) {
+            return wrappedIterator;
+        }
+
+        if ( readProperties.getDownsamplingMethod().toFraction != null ) {
+
+            // If we're downsampling to a fraction of reads, there's no point in paying the cost of
+            // splitting/re-assembling the read stream by sample to run the FractionalDownsampler on
+            // reads from each sample separately, since the result would be the same as running the
+            // FractionalDownsampler on the entire stream. So, ALWAYS use the DownsamplingReadsIterator
+            // rather than the PerSampleDownsamplingReadsIterator, even if BY_SAMPLE downsampling
+            // was requested.
+
+            return new DownsamplingReadsIterator(wrappedIterator,
+                                                 new FractionalDownsampler<SAMRecord>(readProperties.getDownsamplingMethod().toFraction));
+        }
+        else if ( readProperties.getDownsamplingMethod().toCoverage != null ) {
+
+            // If we're downsampling to coverage, we DO need to pay the cost of splitting/re-assembling
+            // the read stream to run the downsampler on the reads for each individual sample separately if
+            // BY_SAMPLE downsampling was requested.
+
+            if ( readProperties.getDownsamplingMethod().type == DownsampleType.BY_SAMPLE ) {
+                return new PerSampleDownsamplingReadsIterator(wrappedIterator,
+                                                              new SimplePositionalDownsamplerFactory<SAMRecord>(readProperties.getDownsamplingMethod().toCoverage));
+            }
+            else if ( readProperties.getDownsamplingMethod().type == DownsampleType.ALL_READS ) {
+                return new DownsamplingReadsIterator(wrappedIterator,
+                                                     new SimplePositionalDownsampler<SAMRecord>(readProperties.getDownsamplingMethod().toCoverage));
+            }
+        }
+
+        return wrappedIterator;
+    }
+
+
+    private class SAMResourcePool {
+        /**
+         * How many entries can be cached in this resource pool?
+         */
+        private final int maxEntries;
+
+        /**
+         * All iterators of this reference-ordered data.
+         */
+        private List<SAMReaders> allResources = new ArrayList<SAMReaders>();
+
+        /**
+         * All iterators that are not currently in service.
+         */
+        private List<SAMReaders> availableResources = new ArrayList<SAMReaders>();
+
+        public SAMResourcePool(final int maxEntries) {
+            this.maxEntries = maxEntries;
+        }
+
+        /**
+         * Choose a set of readers from the pool to use for this query.  When complete,
+         * @return
+         */
+        public synchronized SAMReaders getAvailableReaders() {
+            if(availableResources.size() == 0)
+                createNewResource();
+            SAMReaders readers = availableResources.get(0);
+            availableResources.remove(readers);
+            return readers;
+        }
+
+        public synchronized void releaseReaders(SAMReaders readers) {
+            if(!allResources.contains(readers))
+                throw new ReviewedGATKException("Tried to return readers from the pool that didn't originate in the pool.");
+            availableResources.add(readers);
+        }
+
+        /**
+         * Gets the reader id for the given reader.
+         * @param reader Reader for which to determine the id.
+         * @return id of the given reader.
+         */
+        protected synchronized SAMReaderID getReaderID(SamReader reader) {
+            for(SAMReaders readers: allResources) {
+                SAMReaderID id = readers.getReaderID(reader);
+                if(id != null)
+                    return id;
+            }
+            throw new ReviewedGATKException("No such reader id is available");
+        }
+
+        private synchronized void createNewResource() {
+            if(allResources.size() > maxEntries)
+                throw new ReviewedGATKException("Cannot create a new resource pool.  All resources are in use.");
+            SAMReaders readers = new SAMReaders(readerIDs, validationStringency, removeProgramRecords);
+            allResources.add(readers);
+            availableResources.add(readers);
+        }
+
+    }
+
+    /**
+     * A collection of readers derived from a reads metadata structure.
+     */
+    private class SAMReaders implements Iterable<SamReader> {
+        /**
+         * Cached representation of the merged header used to generate a merging iterator.
+         */
+        private final SamFileHeaderMerger headerMerger;
+
+        /**
+         * Internal storage for a map of id -> reader.
+         */
+        private final Map<SAMReaderID,SamReader> readers = new LinkedHashMap<>();
+
+        /**
+         * The inptu streams backing
+         */
+        private final Map<SAMReaderID,BlockInputStream> inputStreams = new LinkedHashMap<SAMReaderID,BlockInputStream>();
+
+        /**
+         * Derive a new set of readers from the Reads metadata.
+         * @param readerIDs reads to load.
+         * TODO: validationStringency is not used here
+         * @param validationStringency validation stringency.
+         * @param removeProgramRecords indicate whether to clear program records from the readers
+         */
+        public SAMReaders(Collection<SAMReaderID> readerIDs, ValidationStringency validationStringency, boolean removeProgramRecords) {
+            final int totalNumberOfFiles = readerIDs.size();
+            int readerNumber = 1;
+            final SimpleTimer timer = new SimpleTimer().start();
+
+            if ( totalNumberOfFiles > 0 ) logger.info("Initializing SAMRecords in serial");
+            final int tickSize = 50;
+            int nExecutedTotal = 0;
+            long lastTick = timer.currentTime();
+            for(final SAMReaderID readerID: readerIDs) {
+                final ReaderInitializer init = new ReaderInitializer(readerID).call();
+
+                checkForUnsupportedBamFile(init.reader.getFileHeader());
+
+                if (removeProgramRecords && isIteratorSAMFileHeaderCached(readerID)) {
+                    // Only works when the SamReader implementation caches its header.
+                    // Some implementations (ex: CRAM) rewrite the new underlying file header in reader.getIterator().
+                    // Later, when MergingSamRecordIterator goes to check the headers with .contains()/.equals(),
+                    // it will error out complaining it can't find the unmodified version of the header.
+                    init.reader.getFileHeader().setProgramRecords(new ArrayList<SAMProgramRecord>());
+                }
+
+                if (threadAllocation.getNumIOThreads() > 0) {
+                    inputStreams.put(init.readerID, init.blockInputStream); // get from initializer
+                }
+
+                logger.debug(String.format("Processing file (%d of %d) %s...", readerNumber++, totalNumberOfFiles,  readerID.getSamFile()));
+                readers.put(init.readerID,init.reader);
+                if ( ++nExecutedTotal % tickSize == 0) {
+                    double tickInSec = (timer.currentTime() - lastTick) / 1000.0;
+                    printReaderPerformance(nExecutedTotal, tickSize, totalNumberOfFiles, timer, tickInSec);
+                    lastTick = timer.currentTime();
+                }
+            }
+
+            if ( totalNumberOfFiles > 0 ) logger.info(String.format("Done initializing BAM readers: total time %.2f", timer.getElapsedTime()));
+
+            Collection<SAMFileHeader> headers = new LinkedList<SAMFileHeader>();
+
+            // Examine the bam headers, perform any requested sample renaming on them, and add
+            // them to the list of headers to pass to the Picard SamFileHeaderMerger:
+            for ( final Map.Entry<SAMReaderID, SamReader> readerEntry : readers.entrySet() ) {
+                final SAMReaderID readerID = readerEntry.getKey();
+                final SamReader reader = readerEntry.getValue();
+                final SAMFileHeader header = reader.getFileHeader();
+
+                // The remappedSampleName will be null if either no on-the-fly sample renaming was requested,
+                // or the user's sample rename map file didn't contain an entry for this bam file:
+                final String remappedSampleName = sampleRenameMap != null ? sampleRenameMap.get(readerID.getSamFilePath()) : null;
+
+                // If we've been asked to rename the sample for this bam file, do so now. We'll check to
+                // make sure this bam only contains reads from one sample before proceeding.
+                //
+                // IMPORTANT: relies on the fact that the Picard SamFileHeaderMerger makes a copy of
+                //            the existing read group attributes (including sample name) when merging
+                //            headers, regardless of whether there are read group collisions or not.
+                if ( remappedSampleName != null ) {
+                    remapSampleName(readerID, header, remappedSampleName);
+                }
+
+                headers.add(header);
+            }
+
+            headerMerger = new SamFileHeaderMerger(SAMFileHeader.SortOrder.coordinate,headers,true);
+
+            // update all read groups to GATKSAMRecordReadGroups
+            final List<SAMReadGroupRecord> gatkReadGroups = new LinkedList<SAMReadGroupRecord>();
+            for ( final SAMReadGroupRecord rg : headerMerger.getMergedHeader().getReadGroups() ) {
+                gatkReadGroups.add(new GATKSAMReadGroupRecord(rg));
+            }
+            headerMerger.getMergedHeader().setReadGroups(gatkReadGroups);
+        }
+
+        /**
+         * Changes the sample name in the read groups for the provided bam file header to match the
+         * remappedSampleName. Blows up with a UserException if the header contains more than one
+         * sample name.
+         *
+         * @param readerID ID for the bam file from which the provided header came from
+         * @param header The bam file header. Will be modified by this call.
+         * @param remappedSampleName New sample name to replace the existing sample attribute in the
+         *                           read groups for the header.
+         */
+        private void remapSampleName( final SAMReaderID readerID, final SAMFileHeader header, final String remappedSampleName ) {
+            String firstEncounteredSample = null;
+
+            for ( final SAMReadGroupRecord readGroup : header.getReadGroups() ) {
+                final String thisReadGroupSample = readGroup.getSample();
+
+                if ( thisReadGroupSample == null ) {
+                    throw new UserException(String.format("On-the fly sample renaming was requested for bam file %s, however this " +
+                                                          "bam file contains a read group (id: %s) with a null sample attribute",
+                                                          readerID.getSamFilePath(), readGroup.getId()));
+                }
+                else if ( firstEncounteredSample == null ) {
+                    firstEncounteredSample = thisReadGroupSample;
+                }
+                else if ( ! firstEncounteredSample.equals(thisReadGroupSample) ) {
+                    throw new UserException(String.format("On-the-fly sample renaming was requested for bam file %s, " +
+                                                          "however this bam file contains reads from more than one sample " +
+                                                          "(encountered samples %s and %s in the bam header). The GATK requires that " +
+                                                          "all bams for which on-the-fly sample renaming is requested " +
+                                                          "contain reads from only a single sample per bam.",
+                                                          readerID.getSamFilePath(), firstEncounteredSample, thisReadGroupSample));
+                }
+
+                readGroup.setSample(remappedSampleName);
+            }
+        }
+
+        final private void printReaderPerformance(final int nExecutedTotal,
+                                                  final int nExecutedInTick,
+                                                  final int totalNumberOfFiles,
+                                                  final SimpleTimer timer,
+                                                  final double tickDurationInSec) {
+            final int pendingSize = totalNumberOfFiles - nExecutedTotal;
+            final double totalTimeInSeconds = timer.getElapsedTime();
+            final double nTasksPerSecond = nExecutedTotal / (1.0*totalTimeInSeconds);
+            final int nRemaining = pendingSize;
+            final double estTimeToComplete = pendingSize / nTasksPerSecond;
+            logger.info(String.format("Init %d BAMs in last %.2f s, %d of %d in %.2f s / %.2f m (%.2f tasks/s).  %d remaining with est. completion in %.2f s / %.2f m",
+                    nExecutedInTick, tickDurationInSec,
+                    nExecutedTotal, totalNumberOfFiles, totalTimeInSeconds, totalTimeInSeconds / 60, nTasksPerSecond,
+                    nRemaining, estTimeToComplete, estTimeToComplete / 60));
+        }
+
+        /**
+         * Return the header derived from the merging of these BAM files.
+         * @return the merged header.
+         */
+        public SAMFileHeader getMergedHeader() {
+            return headerMerger.getMergedHeader();
+        }
+
+        /**
+         * Do multiple read groups collide in this dataset?
+         * @return True if multiple read groups collide; false otherwis.
+         */
+        public boolean hasReadGroupCollisions() {
+            return headerMerger.hasReadGroupCollisions();
+        }
+
+        /**
+         * Get the newly mapped read group ID for the given read group.
+         * @param readerID Reader for which to discern the transformed ID.
+         * @param originalReadGroupID Original read group.
+         * @return Remapped read group.
+         */
+        public String getReadGroupId(final SAMReaderID readerID, final String originalReadGroupID) {
+            SAMFileHeader header = readers.get(readerID).getFileHeader();
+            return headerMerger.getReadGroupId(header,originalReadGroupID);
+        }
+
+        /**
+         * Creates a new merging iterator from the given map, with the given header.
+         * @param iteratorMap A map of readers to iterators.
+         * @return An iterator which will merge those individual iterators.
+         */
+        public MergingSamRecordIterator createMergingIterator(final Map<SamReader,CloseableIterator<SAMRecord>> iteratorMap) {
+            return new MergingSamRecordIterator(headerMerger,iteratorMap,true);
+        }
+
+        /**
+         * Retrieve the reader from the data structure.
+         * @param id The ID of the reader to retrieve.
+         * @return the reader associated with the given id.
+         */
+        public SamReader getReader(SAMReaderID id) {
+            if(!readers.containsKey(id))
+                throw new NoSuchElementException("No reader is associated with id " + id);
+            return readers.get(id);
+        }
+
+        /**
+         * Retrieve the input stream backing a reader.
+         * @param id The ID of the reader to retrieve.
+         * @return the reader associated with the given id.
+         */
+        public BlockInputStream getInputStream(final SAMReaderID id) {
+            return inputStreams.get(id);
+        }
+
+        /**
+         * Searches for the reader id of this reader.
+         * @param reader Reader for which to search.
+         * @return The id associated the given reader, or null if the reader is not present in this collection.
+         */
+        protected SAMReaderID getReaderID(SamReader reader) {
+            for(Map.Entry<SAMReaderID,SamReader> entry: readers.entrySet()) {
+                if(reader == entry.getValue())
+                    return entry.getKey();
+            }
+            // Not found? return null.
+            return null;
+        }
+
+        /**
+         * Returns an iterator over all readers in this structure.
+         * @return An iterator over readers.
+         */
+        public Iterator<SamReader> iterator() {
+            return readers.values().iterator();
+        }
+
+        /**
+         * Returns whether any readers are present in this structure.
+         * @return
+         */
+        public boolean isEmpty() {
+            return readers.isEmpty();
+        }
+    }
+
+    class ReaderInitializer implements Callable<ReaderInitializer> {
+        final SAMReaderID readerID;
+        BlockInputStream blockInputStream = null;
+        SamReader reader;
+
+        public ReaderInitializer(final SAMReaderID readerID) {
+            this.readerID = readerID;
+        }
+
+        public ReaderInitializer call() {
+            try {
+                if (threadAllocation.getNumIOThreads() > 0)
+                    blockInputStream = new BlockInputStream(dispatcher,readerID,false);
+                reader = SamReaderFactory.makeDefault()
+                        .referenceSequence(referenceFile)
+                        .validationStringency(validationStringency)
+                        .setOption(SamReaderFactory.Option.EAGERLY_DECODE, false)
+                        .setOption(SamReaderFactory.Option.INCLUDE_SOURCE_IN_RECORDS, true)
+                        .open(readerID.getSamFile());
+
+            } catch ( RuntimeIOException e ) {
+                throw new UserException.CouldNotReadInputFile(readerID.getSamFile(), e);
+            } catch ( SAMFormatException e ) {
+                throw new UserException.MalformedBAM(readerID.getSamFile(), e.getMessage());
+            }
+            // Picard is throwing a RuntimeException here when BAMs are malformed with bad headers (and so look like SAM files).
+            // Let's keep this separate from the SAMFormatException (which ultimately derives from RuntimeException) case,
+            // just in case we want to change this behavior later.
+            catch ( RuntimeException e ) {
+                throw new UserException.MalformedBAM(readerID.getSamFile(), e.getMessage());
+            }
+            return this;
+        }
+    }
+
+    private class ReleasingIterator implements GATKSAMIterator {
+        /**
+         * The resource acting as the source of the data.
+         */
+        private final SAMReaders resource;
+
+        /**
+         * The iterator to wrap.
+         */
+        private final GATKSAMIterator wrappedIterator;
+
+        public ReleasingIterator(SAMReaders resource, GATKSAMIterator wrapped) {
+            this.resource = resource;
+            this.wrappedIterator = wrapped;
+        }
+
+        public ReleasingIterator iterator() {
+            return this;
+        }
+
+        public void remove() {
+            throw new UnsupportedOperationException("Can't remove from a GATKSAMIterator");
+        }
+
+        public void close() {
+            wrappedIterator.close();
+            resourcePool.releaseReaders(resource);
+        }
+
+        public boolean hasNext() {
+            return wrappedIterator.hasNext();
+        }
+
+        public SAMRecord next() {
+            return wrappedIterator.next();
+        }
+    }
+
+    /**
+     * Maps read groups in the original SAMFileReaders to read groups in
+     */
+    private class ReadGroupMapping extends HashMap<String,String> {}
+
+    /**
+     * Locates the index file alongside the given BAM, if present.
+     * @param bamFile The data file to use.
+     * @return A File object if the index file is present; null otherwise.
+     */
+    private File findIndexFile(File bamFile) {
+        return SamFiles.findIndex(bamFile);
+    }
+
+    /**
+     * Creates a BAM schedule over all reads in the BAM file, both mapped and unmapped.  The outgoing stream
+     * will be as granular as possible given our current knowledge of the best ways to split up BAM files.
+     * @return An iterator that spans all reads in all BAM files.
+     */
+    public Iterable<Shard> createShardIteratorOverAllReads(final ShardBalancer shardBalancer) {
+        shardBalancer.initialize(this,IntervalSharder.shardOverAllReads(this,genomeLocParser),genomeLocParser);
+        return shardBalancer;
+    }
+
+    /**
+     * Creates a BAM schedule over all mapped reads in the BAM file, when a 'mapped' read is defined as any
+     * read that has been assigned
+     *
+     * @param   shardBalancer  shard balancer object
+     * @return non-null initialized version of the shard balancer
+     */
+    public Iterable<Shard> createShardIteratorOverMappedReads(final ShardBalancer shardBalancer) {
+        shardBalancer.initialize(this,IntervalSharder.shardOverMappedReads(this,genomeLocParser),genomeLocParser);
+        return shardBalancer;
+    }
+
+    /**
+     * Create a schedule for processing the initialized BAM file using the given interval list.
+     * The returned schedule should be as granular as possible.
+     * @param intervals The list of intervals for which to create the schedule.
+     * @return A granular iterator over file pointers.
+     */
+    public Iterable<Shard> createShardIteratorOverIntervals(final GenomeLocSortedSet intervals,final ShardBalancer shardBalancer) {
+        if(intervals == null)
+            throw new ReviewedGATKException("Unable to create schedule from intervals; no intervals were provided.");
+        shardBalancer.initialize(this,IntervalSharder.shardOverIntervals(SAMDataSource.this,intervals,intervalMergingRule),genomeLocParser);
+        return shardBalancer;
+    }
+}
+
+
+
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/Shard.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/Shard.java
new file mode 100644
index 0000000..a8f46fa
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/Shard.java
@@ -0,0 +1,254 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import htsjdk.samtools.util.PeekableIterator;
+import htsjdk.samtools.SAMFileSpan;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.engine.ReadMetrics;
+import org.broadinstitute.gatk.engine.ReadProperties;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIterator;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.HasGenomeLocation;
+import org.broadinstitute.gatk.utils.sam.SAMReaderID;
+
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+/**
+ *
+ * User: aaron
+ * Date: Apr 10, 2009
+ * Time: 5:00:27 PM
+ *
+ * The Broad Institute
+ * SOFTWARE COPYRIGHT NOTICE AGREEMENT 
+ * This software and its documentation are copyright 2009 by the
+ * Broad Institute/Massachusetts Institute of Technology. All rights are reserved.
+ *
+ * This software is supplied without any warranty or guaranteed support whatsoever. Neither
+ * the Broad Institute nor MIT can be responsible for its use, misuse, or functionality.
+ *
+ */
+
+/**
+ * @author aaron
+ * @version 1.0
+ * @date Apr 10, 2009
+ * <p/>
+ * Interface Shard
+ * <p/>
+ * The base abstract class for shards.
+ */
+public abstract class Shard implements HasGenomeLocation {
+    public enum ShardType {
+        READ, LOCUS
+    }
+
+    protected final GenomeLocParser parser; // incredibly annoying!
+
+    /**
+     * What type of shard is this?  Read or locus?
+     */
+    protected final ShardType shardType;
+
+    /**
+     * Locations.
+     */
+    protected final List<GenomeLoc> locs;
+
+    /**
+     * Whether the current location is unmapped.
+     */
+    private final boolean isUnmapped;
+
+    /**
+     * Reads data, if applicable.
+     */
+    private final SAMDataSource readsDataSource;
+
+    /**
+     * The data backing the next chunks to deliver to the traversal engine.
+     */
+    private final Map<SAMReaderID,SAMFileSpan> fileSpans;
+
+    /**
+     * Lazy-calculated span of all of the genome locs in this shard
+     */
+    private GenomeLoc spanningLocation = null;
+
+    /**
+     * Statistics about which reads in this shards were used and which were filtered away.
+     */
+    protected final ReadMetrics readMetrics = new ReadMetrics();
+
+    /**
+     * Whether this shard points to an unmapped region.
+     * Some shard types conceptually be unmapped (e.g. LocusShards).  In
+     * this case, isUnmapped should always return false.
+     * @return True if this shard is unmapped.  False otherwise.
+     */
+    public boolean isUnmapped() {
+        return isUnmapped;
+    }    
+
+    public Shard(GenomeLocParser parser,
+                 ShardType shardType,
+                 List<GenomeLoc> locs,
+                 SAMDataSource readsDataSource,
+                 Map<SAMReaderID,SAMFileSpan> fileSpans,
+                 boolean isUnmapped) {
+        this.locs = locs;
+        this.parser = parser;
+        this.shardType = shardType;
+        this.readsDataSource = readsDataSource;
+        this.fileSpans = fileSpans;
+        this.isUnmapped = isUnmapped;        
+    }
+
+    /**
+     * If isUnmapped is true, than getGenomeLocs by
+     * definition will return a singleton list with a GenomeLoc.UNMAPPED
+     *
+     * Can return null, indicating that the entire genome is covered.
+     *
+     * @return the genome location represented by this shard
+     */
+    public List<GenomeLoc> getGenomeLocs() {
+        return locs;
+    }
+
+    /**
+     * Get the list of chunks delimiting this shard.
+     * @return a list of chunks that contain data for this shard.
+     */
+    public Map<SAMReaderID,SAMFileSpan> getFileSpans() {
+        return Collections.unmodifiableMap(fileSpans);
+    }    
+
+    /**
+     * Returns the span of the genomeLocs comprising this shard
+     * @return a GenomeLoc that starts as the first position in getGenomeLocs() and stops at the stop of the last
+     *    position in getGenomeLocs()
+     */
+    public GenomeLoc getLocation() {
+        if ( spanningLocation == null ) {
+            if ( getGenomeLocs() == null )
+                spanningLocation = GenomeLoc.WHOLE_GENOME;
+            else if ( getGenomeLocs().size() == 0 ) {
+                spanningLocation = getGenomeLocs().get(0);
+            } else {
+                int start = Integer.MAX_VALUE;
+                int stop = Integer.MIN_VALUE;
+                String contig = null;
+
+                for ( GenomeLoc loc : getGenomeLocs() ) {
+                    if ( GenomeLoc.isUnmapped(loc) )
+                        // special case the unmapped region marker, just abort out
+                        return loc;
+                    contig = loc.getContig();
+                    if ( loc.getStart() < start ) start = loc.getStart();
+                    if ( loc.getStop() > stop ) stop = loc.getStop();
+                }
+
+                spanningLocation = parser.createGenomeLoc(contig, start, stop);
+            }
+        }
+
+        return spanningLocation;
+    }
+
+
+    /**
+     * what kind of shard do we return
+     * @return ShardType, indicating the type
+     */
+    public ShardType getShardType() {
+        return shardType;
+    }
+
+    /**
+     * Does any releasing / aggregation required when the shard is through being processed.
+     */
+    public void close() {
+        readsDataSource.incorporateReadMetrics(readMetrics);
+    }
+
+    /**
+     * Gets key read validation and filtering properties.
+     * @return set of read properties associated with this shard.
+     */
+    public ReadProperties getReadProperties() {
+        return readsDataSource.getReadsInfo();
+    }
+
+    /**
+     * Gets the runtime metrics associated with this shard.
+     * Retrieves a storage space of metrics about number of reads included, filtered, etc.
+     * @return Storage space for metrics.
+     */
+    public ReadMetrics getReadMetrics() {
+        return readMetrics;
+    }
+
+    /**
+     * Returns true if this shard is meant to buffer reads, rather
+     * than just holding pointers to their locations.
+     * @return True if this shard can buffer reads.  False otherwise.
+     */
+    public boolean buffersReads() { return false; }
+
+    /**
+     * Returns true if the read buffer is currently full.
+     * @return True if this shard's buffer is full (and the shard can buffer reads).
+     */
+    public boolean isBufferEmpty() { throw new UnsupportedOperationException("This shard does not buffer reads."); }
+
+    /**
+     * Returns true if the read buffer is currently full.
+     * @return True if this shard's buffer is full (and the shard can buffer reads).
+     */
+    public boolean isBufferFull() { throw new UnsupportedOperationException("This shard does not buffer reads."); }
+
+    /**
+     * Adds a read to the read buffer.
+     * @param read Add a read to the internal shard buffer.
+     */
+    public void addRead(SAMRecord read) { throw new UnsupportedOperationException("This shard does not buffer reads."); }
+
+    /**
+     * Fills the shard with reads. Can only do this with shards that buffer reads
+     * @param readIter Iterator from which to draw the reads to fill the shard
+     */
+    public void fill( PeekableIterator<SAMRecord> readIter ) { throw new UnsupportedOperationException("This shard does not buffer reads."); }
+
+    /**
+     * Gets the iterator over the elements cached in the shard.
+     * @return
+     */
+    public GATKSAMIterator iterator() { throw new UnsupportedOperationException("This shard does not buffer reads."); }    
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/ShardBalancer.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/ShardBalancer.java
new file mode 100644
index 0000000..fee842d
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/ShardBalancer.java
@@ -0,0 +1,49 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import htsjdk.samtools.util.PeekableIterator;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+
+import java.util.Iterator;
+
+/**
+ * Balances maximally granular file pointers into shards of reasonable size.
+ */
+public abstract class ShardBalancer implements Iterable<Shard> {
+    protected SAMDataSource readsDataSource;
+    protected PeekableIterator<FilePointer> filePointers;
+    protected GenomeLocParser parser;
+
+    public void initialize(final SAMDataSource readsDataSource, final Iterator<FilePointer> filePointers, final GenomeLocParser parser) {
+        this.readsDataSource = readsDataSource;
+        this.filePointers = new PeekableIterator<FilePointer>(filePointers);
+        this.parser = parser;
+    }
+    public void close() {
+      this.filePointers.close();
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/package-info.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/package-info.java
new file mode 100644
index 0000000..399737b
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/package-info.java
@@ -0,0 +1,26 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/BAMFileStat.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/BAMFileStat.java
new file mode 100644
index 0000000..69ba13c
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/BAMFileStat.java
@@ -0,0 +1,185 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads.utilities;
+
+import htsjdk.samtools.BAMIndex;
+import htsjdk.samtools.SAMFileReader;
+import htsjdk.samtools.ValidationStringency;
+import org.broadinstitute.gatk.utils.commandline.Argument;
+import org.broadinstitute.gatk.utils.commandline.CommandLineProgram;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.instrumentation.Sizeof;
+
+import java.io.File;
+import java.lang.reflect.Field;
+import java.util.List;
+import java.util.Map;
+
+/**
+ *
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class BAMFileStat extends CommandLineProgram {
+    public enum CommandType { ShowBlocks, ShowIndex }
+
+    @Argument(doc="Which operation to run.",required=true)
+    private CommandType command;
+
+    @Argument(doc="The BAM file to inspect.",required=true)
+    private String bamFileName;
+
+    @Argument(doc="The range to inspect.",required=false)
+    private String range;
+
+    public int execute() {
+        switch(command) {
+            case ShowBlocks:
+                throw new ReviewedGATKException("The BAM block inspector has been disabled.");
+            case ShowIndex:
+                showIndexBins(new File(bamFileName),range);
+                break;
+        }
+        return 0;
+    }
+
+    /**
+     * Required main method implementation.
+     * @param argv Command-line arguments.
+     */
+    public static void main(String[] argv) {
+        try {
+            BAMFileStat instance = new BAMFileStat();
+            start(instance, argv);
+            System.exit(CommandLineProgram.result);
+        } catch (Exception e) {
+            exitSystemWithError(e);
+        }
+    }
+
+    private void showIndexBins(File bamFile,String contigName) {
+        SAMFileReader reader;
+        BAMIndex index;
+
+        reader = new SAMFileReader(bamFile);
+        reader.setValidationStringency(ValidationStringency.SILENT);
+        reader.enableIndexCaching(true);
+        index = reader.getIndex();
+
+        reader.queryOverlapping(contigName,1,reader.getFileHeader().getSequence(contigName).getSequenceLength()).close();
+
+        int numBins = 0;
+        int numChunks = 0;
+        int numLinearIndexEntries = 0;
+
+        try {
+            Field[] fields = index.getClass().getDeclaredFields();
+            for(Field field: fields) {
+                if(field.getName().equals("mLastReferenceRetrieved")) {
+                    field.setAccessible(true);
+                    Integer lastReferenceRetrieved = (Integer)field.get(index);
+                    System.out.printf("Last reference retrieved: %d%n", lastReferenceRetrieved);
+                }
+
+                if(field.getName().equals("mQueriesByReference")) {
+                    field.setAccessible(true);
+                    Map<Integer,Object> cachedQueries = (Map<Integer,Object>)field.get(index);
+
+                    for(Object bamIndexContent: cachedQueries.values()) {
+                        List<Object> bins = null;
+                        Map<Object,Object> binToChunkMap = null;
+                        Object linearIndex = null;
+
+                        Field[] indexContentFields = bamIndexContent.getClass().getDeclaredFields();
+                        for(Field indexContentField: indexContentFields) {
+                            if(indexContentField.getName().equals("mReferenceSequence")) {
+                                indexContentField.setAccessible(true);
+                                System.out.printf("Reference sequence: %d%n", indexContentField.getInt(bamIndexContent));
+                            }
+
+                            if(indexContentField.getName().equals("mBins")) {
+                                indexContentField.setAccessible(true);
+                                bins = (List<Object>)indexContentField.get(bamIndexContent);
+                            }
+
+                            if(indexContentField.getName().equals("mBinToChunks")) {
+                                indexContentField.setAccessible(true);
+                                binToChunkMap = (Map<Object,Object>)indexContentField.get(bamIndexContent);
+                            }
+
+                            if(indexContentField.getName().equals("mLinearIndex")) {
+                                indexContentField.setAccessible(true);
+                                linearIndex = indexContentField.get(bamIndexContent);
+                            }
+                        }
+
+                        numBins = bins.size();
+                        for(Object bin: bins) {
+                            int binNumber;
+
+                            Field[] binFields = bin.getClass().getDeclaredFields();
+                            for(Field binField: binFields) {
+                                if(binField.getName().equals("binNumber")) {
+                                    binField.setAccessible(true);
+                                    binNumber = binField.getInt(bin);
+                                    List<Object> chunks = (List<Object>)binToChunkMap.get(bin);
+                                    System.out.printf("\tBin: %d, number of chunks: %d%n",binNumber,chunks.size());
+                                    for(Object chunk: chunks)
+                                        System.out.printf("\t\tChunk: %s%n",chunk);
+                                    numChunks += chunks.size();
+                                }
+                            }
+                        }
+
+                        Field[] linearIndexFields = linearIndex.getClass().getDeclaredFields();
+                        for(Field linearIndexField: linearIndexFields) {
+                            if(linearIndexField.getName().equals("mIndexEntries")) {
+                                linearIndexField.setAccessible(true);
+                                long[] linearIndexEntries = (long[])linearIndexField.get(linearIndex);
+                                System.out.printf("\t\tIndex entries: %d", linearIndexEntries.length);
+                                for(long indexEntry: linearIndexEntries)
+                                    System.out.printf("%d,",indexEntry);
+                                System.out.printf("%n");
+                                numLinearIndexEntries = linearIndexEntries.length;
+                            }
+                        }
+                    }
+                }
+            }
+        }
+        catch(IllegalAccessException ex) {
+            throw new ReviewedGATKException("Unable to examine cached index",ex);
+        }
+
+        System.out.printf("%nOverall: %d bins, %d chunks, %d linear index entries",numBins,numChunks,numLinearIndexEntries);
+        if(Sizeof.isEnabled())
+            System.out.printf(", total index size in bytes: %d",Sizeof.getObjectGraphSize(index));
+        System.out.println();
+
+        reader.close();
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/BAMTagRenamer.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/BAMTagRenamer.java
new file mode 100644
index 0000000..5c601ef
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/BAMTagRenamer.java
@@ -0,0 +1,100 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads.utilities;
+
+import htsjdk.samtools.SAMFileReader;
+import htsjdk.samtools.SAMFileWriter;
+import htsjdk.samtools.SAMFileWriterFactory;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.commandline.Argument;
+import org.broadinstitute.gatk.utils.commandline.CommandLineProgram;
+
+import java.io.File;
+
+/**
+ * A simple utility written directly in Picard that will rename tags
+ * from one name to another.
+ *
+ * @author hanna
+ * @version 0.1
+ */
+
+public class BAMTagRenamer extends CommandLineProgram {
+    @Argument(fullName="input",shortName="I",doc="Input file to process",required=true)
+    private File input = null;
+
+    @Argument(fullName="output",shortName="O",doc="Output file to create",required=true)
+    private File output = null;
+
+    @Argument(fullName="bam_compression",shortName="compress",doc="Compression level to use when writing the BAM file.",required=false)
+    private int compressionLevel = 5;
+
+    @Argument(fullName="original_tag_name",shortName="otn",doc="Tag name to be replaced.",required=true)
+    private String sourceTagName = null;
+
+    @Argument(fullName="replacement_tag_name",shortName="rtn",doc="Tag name to be used as a replacement.",required=true)
+    private String targetTagName = null;
+
+    public int execute() {
+        long readsWritten = 0;
+        long readsAltered = 0;
+
+        SAMFileReader reader = new SAMFileReader(input);
+        SAMFileWriter writer = new SAMFileWriterFactory().makeBAMWriter(reader.getFileHeader(),true,output,compressionLevel);
+
+        for(SAMRecord read: reader) {
+            Object value = read.getAttribute(sourceTagName);
+            if(value != null) {
+                read.setAttribute(sourceTagName,null);
+                read.setAttribute(targetTagName,value);
+                readsAltered++;
+            }
+            writer.addAlignment(read);
+            readsWritten++;
+            if(readsWritten % 1000000 == 0)
+                System.out.printf("%d reads written.  %d tag names updated from %s to %s.%n",readsWritten,readsAltered,sourceTagName,targetTagName);
+        }
+
+        writer.close();
+        System.out.printf("%d reads written.  %d tag names updated from %s to %s.%n",readsWritten,readsAltered,sourceTagName,targetTagName);        
+
+        return 0;
+    }
+
+    /**
+     * Required main method implementation.
+     */
+    public static void main(String[] argv) {
+        BAMTagRenamer instance = new BAMTagRenamer();
+        try {
+            start(instance, argv);
+        } catch (Exception e) {
+            throw new RuntimeException(e);
+        }
+
+        System.exit(CommandLineProgram.result);
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/FindLargeShards.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/FindLargeShards.java
new file mode 100644
index 0000000..5aaba79
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/FindLargeShards.java
@@ -0,0 +1,192 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads.utilities;
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.commandline.CommandLineProgram;
+import org.broadinstitute.gatk.utils.commandline.Input;
+import org.broadinstitute.gatk.utils.commandline.Output;
+import org.broadinstitute.gatk.engine.datasources.reads.FilePointer;
+import org.broadinstitute.gatk.engine.datasources.reads.IntervalSharder;
+import org.broadinstitute.gatk.engine.datasources.reads.SAMDataSource;
+import org.broadinstitute.gatk.utils.sam.SAMReaderID;
+import org.broadinstitute.gatk.engine.resourcemanagement.ThreadAllocation;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
+import org.broadinstitute.gatk.utils.interval.IntervalMergingRule;
+import org.broadinstitute.gatk.utils.interval.IntervalUtils;
+import org.broadinstitute.gatk.utils.text.ListFileUtils;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.math.BigInteger;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Traverses a region in a dataset looking for outliers.
+ */
+public class FindLargeShards extends CommandLineProgram {
+    private static Logger logger = Logger.getLogger(FindLargeShards.class);
+
+    @Input(fullName = "input_file", shortName = "I", doc = "SAM or BAM file(s)", required = false)
+    public List<String> samFiles = new ArrayList<String>();
+
+    @Input(fullName = "reference_sequence", shortName = "R", doc = "Reference sequence file", required = false)
+    public File referenceFile = null;
+
+    @Input(fullName = "intervals", shortName = "L", doc = "A list of genomic intervals over which to operate. Can be explicitly specified on the command line or in a file.",required=false)
+    public List<String> intervals = null;
+
+    @Output(required=false)
+    public PrintStream out = System.out;
+
+    /**
+     * The square of the sum of all uncompressed data.  Based on the BAM spec, the size of this could be
+     * up to (2^64)^2.
+     */
+    private BigInteger sumOfSquares = BigInteger.valueOf(0);
+
+    /**
+     * The running sum of all uncompressed data.  Based on the BAM spec, the BAM must be less than Long.MAX_LONG
+     * when compressed -- in other words, the sum of the sizes of all BGZF blocks must be < 2^64.
+     */
+    private BigInteger sum = BigInteger.valueOf(0);
+
+    /**
+     * The number of shards viewed.
+     */
+    private long numberOfShards;
+
+
+    @Override
+    public int execute() throws IOException {
+        // initialize reference
+        IndexedFastaSequenceFile refReader = new IndexedFastaSequenceFile(referenceFile);
+        GenomeLocParser genomeLocParser = new GenomeLocParser(refReader);        
+
+        // initialize reads
+        List<SAMReaderID> bamReaders = ListFileUtils.unpackBAMFileList(samFiles,parser);
+        SAMDataSource dataSource = new SAMDataSource(referenceFile, bamReaders, new ThreadAllocation(), null, genomeLocParser);
+
+        // intervals
+        final GenomeLocSortedSet intervalSortedSet;
+        if ( intervals != null )
+            intervalSortedSet = IntervalUtils.sortAndMergeIntervals(genomeLocParser, IntervalUtils.parseIntervalArguments(genomeLocParser, intervals), IntervalMergingRule.ALL);
+        else
+            intervalSortedSet = GenomeLocSortedSet.createSetFromSequenceDictionary(refReader.getSequenceDictionary());
+
+        logger.info(String.format("PROGRESS: Calculating mean and variance: Contig\tRegion.Start\tRegion.Stop\tSize"));        
+
+        IntervalSharder sharder = IntervalSharder.shardOverIntervals(dataSource,intervalSortedSet,IntervalMergingRule.ALL);
+        while(sharder.hasNext()) {
+            FilePointer filePointer = sharder.next();
+
+            // Size of the file pointer.
+            final long size = filePointer.size();            
+
+            BigInteger bigSize = BigInteger.valueOf(size);
+            sumOfSquares = sumOfSquares.add(bigSize.pow(2));
+            sum = sum.add(bigSize);
+            numberOfShards++;
+
+            if(numberOfShards % 1000 == 0) {
+                GenomeLoc boundingRegion = getBoundingRegion(filePointer,genomeLocParser);
+                logger.info(String.format("PROGRESS: Calculating mean and variance: %s\t%d\t%d\t%d",boundingRegion.getContig(),boundingRegion.getStart(),boundingRegion.getStop(),size));
+            }
+
+        }
+
+        // Print out the stddev: (sum(x^2) - (1/N)*sum(x)^2)/N
+        long mean = sum.divide(BigInteger.valueOf(numberOfShards)).longValue();
+        long stddev = (long)(Math.sqrt(sumOfSquares.subtract(sum.pow(2).divide(BigInteger.valueOf(numberOfShards))).divide(BigInteger.valueOf(numberOfShards)).doubleValue()));
+        logger.info(String.format("Number of shards: %d; mean uncompressed size = %d; stddev uncompressed size  = %d%n",numberOfShards,mean,stddev));
+
+        // Crank through the shards again, this time reporting on the shards significantly larger than the mean.
+        long threshold = mean + stddev*5;
+        logger.warn(String.format("PROGRESS: Searching for large shards: Contig\tRegion.Start\tRegion.Stop\tSize"));
+        out.printf("Contig\tRegion.Start\tRegion.Stop\tSize%n");
+
+        sharder =  IntervalSharder.shardOverIntervals(dataSource,intervalSortedSet,IntervalMergingRule.ALL);
+        while(sharder.hasNext()) {
+            FilePointer filePointer = sharder.next();
+
+            // Bounding region.
+            GenomeLoc boundingRegion = getBoundingRegion(filePointer,genomeLocParser);
+
+            // Size of the file pointer.
+            final long size = filePointer.size();            
+
+            numberOfShards++;
+
+            if(filePointer.size() <= threshold) {
+                if(numberOfShards % 1000 == 0) 
+                    logger.info(String.format("PROGRESS: Searching for large shards: %s\t%d\t%d\t%d",boundingRegion.getContig(),boundingRegion.getStart(),boundingRegion.getStop(),size));
+                continue;
+            }
+
+            out.printf("%s\t%d\t%d\t%d%n",boundingRegion.getContig(),boundingRegion.getStart(),boundingRegion.getStop(),size);
+        }
+
+        return 0;
+    }
+
+    private GenomeLoc getBoundingRegion(final FilePointer filePointer, final GenomeLocParser genomeLocParser) {
+        List<GenomeLoc> regions = filePointer.getLocations();
+
+        // The region contained by this FilePointer.
+        final String contig = regions.get(0).getContig();
+        final int start = regions.get(0).getStart();
+        final int stop = regions.get(regions.size()-1).getStop();
+
+        return genomeLocParser.createGenomeLoc(contig,start,stop);
+    }
+
+    /**
+     * Required main method implementation.
+     * @param argv Command-line argument text.
+     * @throws Exception on error.
+     */
+    public static void main(String[] argv) throws Exception {
+        int returnCode = 0;
+        try {
+            FindLargeShards instance = new FindLargeShards();
+            start(instance, argv);
+            returnCode = 0;
+        }
+        catch(Exception ex) {
+            returnCode = 1;
+            ex.printStackTrace();
+            throw ex;
+        }
+        finally {
+            System.exit(returnCode);
+        }
+    }    
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/PrintBAMRegion.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/PrintBAMRegion.java
new file mode 100644
index 0000000..9545412
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/PrintBAMRegion.java
@@ -0,0 +1,113 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads.utilities;
+
+import htsjdk.samtools.*;
+import org.broadinstitute.gatk.utils.commandline.Argument;
+import org.broadinstitute.gatk.utils.commandline.CommandLineProgram;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+import java.io.File;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: mhanna
+ * Date: Feb 25, 2011
+ * Time: 3:25:13 PM
+ * To change this template use File | Settings | File Templates.
+ */
+public class PrintBAMRegion extends CommandLineProgram {
+    @Argument(fullName="input",shortName="I",doc="Input file to process",required=true)
+    private File input = null;
+
+    @Argument(fullName="region",shortName="R",doc="BAM region to process, in chunk format (mmmm:nn-xxxx:yy)",required=true)
+    private String region;
+
+    private static final long MIN_BLOCK_SIZE = 0;
+    private static final long MAX_BLOCK_SIZE = (long)Math.pow(2,48)-1;
+    private static final int MIN_OFFSET_SIZE = 0;
+    private static final int MAX_OFFSET_SIZE = (int)Math.pow(2,16)-1;
+
+    public int execute() {
+        SAMFileReader reader = new SAMFileReader(input);
+        reader.setValidationStringency(ValidationStringency.SILENT);
+
+        Pattern regionPattern = Pattern.compile("(\\d+):(\\d+)-(\\d+):(\\d+)");
+        Matcher matcher = regionPattern.matcher(region);
+        if(!matcher.matches())
+            throw new UserException("BAM region to process must be in chunk format (mmmm:nn-xxxx:yy)");
+
+        long firstBlock = Long.parseLong(matcher.group(1));
+        int firstOffset = Integer.parseInt(matcher.group(2));
+        long lastBlock = Long.parseLong(matcher.group(3));
+        int lastOffset = Integer.parseInt(matcher.group(4));
+
+        if(firstBlock < MIN_BLOCK_SIZE || firstBlock > MAX_BLOCK_SIZE)
+            throw new UserException(String.format("First block is invalid; must be between %d and %d; actually is %d",MIN_BLOCK_SIZE,MAX_BLOCK_SIZE,firstBlock));
+        if(lastBlock < MIN_BLOCK_SIZE || lastBlock > MAX_BLOCK_SIZE)
+            throw new UserException(String.format("Last block is invalid; must be between %d and %d; actually is %d",MIN_BLOCK_SIZE,MAX_BLOCK_SIZE,lastBlock));
+        if(firstOffset < MIN_OFFSET_SIZE || firstOffset > MAX_OFFSET_SIZE)
+            throw new UserException(String.format("First offset is invalid; must be between %d and %d; actually is %d",MIN_OFFSET_SIZE,MAX_OFFSET_SIZE,firstOffset));
+        if(lastOffset < MIN_OFFSET_SIZE || lastOffset > MAX_OFFSET_SIZE)
+            throw new UserException(String.format("Last offset is invalid; must be between %d and %d; actually is %d",MIN_OFFSET_SIZE,MAX_OFFSET_SIZE,lastOffset));
+
+        GATKChunk chunk = new GATKChunk(firstBlock<<16 | firstOffset,lastBlock<<16 | lastOffset);
+        GATKBAMFileSpan fileSpan = new GATKBAMFileSpan(chunk);
+
+        SAMRecordIterator iterator = reader.iterator(fileSpan);
+        long readCount = 0;
+        while(iterator.hasNext()) {
+            System.out.printf("%s%n",iterator.next().format());
+            readCount++;
+        }
+        System.out.printf("%d reads shown.",readCount);
+
+        iterator.close();
+        reader.close();
+
+        return 0;
+    }
+
+
+    /**
+     * Required main method implementation.
+     * @param argv Command-line argument text.
+     * @throws Exception on error.
+     */
+    public static void main(String[] argv) throws Exception {
+        try {
+            PrintBAMRegion instance = new PrintBAMRegion();
+            start(instance, argv);
+            System.exit(0);
+        }
+        catch(Exception ex) {
+            ex.printStackTrace();
+            System.exit(1);
+        }
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/PrintBGZFBounds.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/PrintBGZFBounds.java
new file mode 100644
index 0000000..01299ae
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/PrintBGZFBounds.java
@@ -0,0 +1,137 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads.utilities;
+
+import org.broadinstitute.gatk.utils.commandline.Argument;
+import org.broadinstitute.gatk.utils.commandline.CommandLineProgram;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+/**
+ * Calculates the bounds of each BGZF block in a BAM index file, along with
+ */
+public class PrintBGZFBounds extends CommandLineProgram {
+    @Argument(fullName="input",shortName="I",doc="Input bai file to process",required=true)
+    private File input = null;
+
+    private final int BYTE_SIZE_IN_BYTES = Byte.SIZE / 8;
+    private final int INT_SIZE_IN_BYTES = Integer.SIZE / 8;
+    private final int SHORT_SIZE_IN_BYTES = INT_SIZE_IN_BYTES / 2;
+
+    /**
+     * ID1 + ID2 + CM + FLG + MTIME + XFL + OS + XLEN.
+     */
+    private final int HEADER_SIZE = BYTE_SIZE_IN_BYTES*4+INT_SIZE_IN_BYTES+BYTE_SIZE_IN_BYTES*2+SHORT_SIZE_IN_BYTES + BYTE_SIZE_IN_BYTES*2 + SHORT_SIZE_IN_BYTES*2;;
+
+    /**
+     * CRC32 + ISIZE
+     */
+    private final int FOOTER_SIZE = INT_SIZE_IN_BYTES*2;
+
+    @Override
+    public int execute() throws IOException {
+        FileInputStream fis = new FileInputStream(input);
+        ByteBuffer headerBuffer = allocateBuffer(HEADER_SIZE);
+        ByteBuffer footerBuffer = allocateBuffer(FOOTER_SIZE);
+
+        float compressedSize = 0;
+        float uncompressedSize = 0;
+        long totalBlocks = 0;
+
+        //SAMFileReader reader = new SAMFileReader(input);
+
+        while(true) {
+            final long blockStart = fis.getChannel().position();
+
+            int totalRead = fis.getChannel().read(headerBuffer);
+            if(totalRead <= 0)
+                break;
+            headerBuffer.flip();
+
+            // Read out header information, including subfield IDs.
+            headerBuffer.position(headerBuffer.capacity()-BYTE_SIZE_IN_BYTES*2);
+            final int cDataSize = headerBuffer.getShort()-HEADER_SIZE-FOOTER_SIZE+1;
+            compressedSize += cDataSize;
+
+            // Skip past body.
+            fis.getChannel().position(fis.getChannel().position()+cDataSize);
+
+            // Read the footer
+            fis.getChannel().read(footerBuffer);
+            footerBuffer.flip();
+
+            // Retrieve the uncompressed size from the footer.
+            footerBuffer.position(footerBuffer.capacity()-INT_SIZE_IN_BYTES);
+            uncompressedSize += footerBuffer.getInt();
+
+            // Reset buffers for subsequent reads.
+            headerBuffer.flip();
+            footerBuffer.flip();
+
+            totalBlocks++;
+
+            final long blockStop = fis.getChannel().position() - 1;
+
+            System.out.printf("BGZF block %d: [%d-%d]%n",totalBlocks,blockStart,blockStop);
+        }
+
+        System.out.printf("SUCCESS!  Average compressed block size = %f, average uncompressed size = %f, compressed/uncompressed ratio: %f%n",compressedSize/totalBlocks,uncompressedSize/totalBlocks,compressedSize/uncompressedSize);
+
+        return 0;
+    }
+
+    private ByteBuffer allocateBuffer(final int size) {
+        ByteBuffer buffer = ByteBuffer.allocate(size);
+        buffer.order(ByteOrder.LITTLE_ENDIAN);
+        return buffer;
+    }
+
+    /**
+     * Required main method implementation.
+     * @param argv Command-line argument text.
+     * @throws Exception on error.
+     */
+    public static void main(String[] argv) throws Exception {
+        int returnCode = 0;
+        try {
+            PrintBGZFBounds instance = new PrintBGZFBounds();
+            start(instance, argv);
+            returnCode = 0;
+        }
+        catch(Exception ex) {
+            returnCode = 1;
+            ex.printStackTrace();
+            throw ex;
+        }
+        finally {
+            System.exit(returnCode);
+        }
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/UnzipSingleBlock.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/UnzipSingleBlock.java
new file mode 100644
index 0000000..399ea38
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/UnzipSingleBlock.java
@@ -0,0 +1,89 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads.utilities;
+
+import htsjdk.samtools.util.BlockGunzipper;
+import org.broadinstitute.gatk.utils.commandline.CommandLineProgram;
+import org.broadinstitute.gatk.utils.commandline.Input;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+
+/**
+ * Test decompression of a single BGZF block.
+ */
+public class UnzipSingleBlock extends CommandLineProgram {
+    @Input(fullName = "block_file", shortName = "b", doc = "block file over which to test unzipping", required = true)
+    private File blockFile;
+
+    @Input(fullName = "compressed_block_size", shortName = "cbs", doc = "size of compressed block", required = true)
+    private int compressedBufferSize;
+
+    public int execute() throws IOException, NoSuchMethodException, IllegalAccessException, InvocationTargetException {
+        byte[] compressedBuffer = new byte[(int)blockFile.length()];
+        byte[] uncompressedBuffer = new byte[65536];
+
+        FileInputStream fis = new FileInputStream(blockFile);
+        fis.read(compressedBuffer);
+        fis.close();
+
+        BlockGunzipper gunzipper = new BlockGunzipper();
+        gunzipper.setCheckCrcs(true);
+        Method unzipBlock = BlockGunzipper.class.getDeclaredMethod("unzipBlock",byte[].class,byte[].class,Integer.TYPE);
+        unzipBlock.setAccessible(true);
+
+        unzipBlock.invoke(gunzipper,uncompressedBuffer,compressedBuffer,compressedBufferSize);
+
+        System.out.printf("SUCCESS!%n");
+
+        return 0;
+    }
+
+    /**
+     * Required main method implementation.
+     * @param argv Command-line argument text.
+     * @throws Exception on error.
+     */
+    public static void main(String[] argv) throws Exception {
+        int returnCode = 0;
+        try {
+            UnzipSingleBlock instance = new UnzipSingleBlock();
+            start(instance, argv);
+            returnCode = 0;
+        }
+        catch(Exception ex) {
+            returnCode = 1;
+            ex.printStackTrace();
+            throw ex;
+        }
+        finally {
+            System.exit(returnCode);
+        }
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/package-info.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/package-info.java
new file mode 100644
index 0000000..e50a770
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/package-info.java
@@ -0,0 +1,26 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads.utilities;
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reference/ReferenceDataSource.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reference/ReferenceDataSource.java
new file mode 100644
index 0000000..21d1c7c
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reference/ReferenceDataSource.java
@@ -0,0 +1,166 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reference;
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import htsjdk.samtools.SAMSequenceRecord;
+import org.broadinstitute.gatk.engine.datasources.reads.LocusShard;
+import org.broadinstitute.gatk.engine.datasources.reads.SAMDataSource;
+import org.broadinstitute.gatk.engine.datasources.reads.Shard;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Loads reference data from fasta file
+ * Looks for fai and dict files, and tries to create them if they don't exist
+ */
+public class ReferenceDataSource {
+    private IndexedFastaSequenceFile reference;
+
+    /** our log, which we want to capture anything from this class */
+    protected static final org.apache.log4j.Logger logger = org.apache.log4j.Logger.getLogger(ReferenceDataSource.class);
+
+    /**
+     * Create reference data source from fasta file
+     * @param fastaFile Fasta file to be used as reference
+     */
+    public ReferenceDataSource(File fastaFile) {
+        reference = CachingIndexedFastaSequenceFile.checkAndCreate(fastaFile);
+    }
+
+    /**
+     * Get indexed fasta file
+     * @return IndexedFastaSequenceFile that was created from file
+     */
+    public IndexedFastaSequenceFile getReference() {
+        return this.reference;
+    }
+
+    /**
+     * Creates an iterator for processing the entire reference.
+     * @param readsDataSource the reads datasource to embed in the locus shard.
+     * @param parser used to generate/regenerate intervals.  TODO: decouple the creation of the shards themselves from the creation of the driving iterator so that datasources need not be passed to datasources.
+     * @param maxShardSize The maximum shard size which can be used to create this list.
+     * @return Creates a schedule for performing a traversal over the entire reference.
+     */
+    public Iterable<Shard> createShardsOverEntireReference(final SAMDataSource readsDataSource, final GenomeLocParser parser, final int maxShardSize) {
+        List<Shard> shards = new ArrayList<Shard>();
+        for(SAMSequenceRecord refSequenceRecord: reference.getSequenceDictionary().getSequences()) {
+            for(int shardStart = 1; shardStart <= refSequenceRecord.getSequenceLength(); shardStart += maxShardSize) {
+                final int shardStop = Math.min(shardStart+maxShardSize-1, refSequenceRecord.getSequenceLength());
+                shards.add(new LocusShard(parser,
+                        readsDataSource,
+                        Collections.singletonList(parser.createGenomeLoc(refSequenceRecord.getSequenceName(),shardStart,shardStop)),
+                        null));
+            }
+        }
+        return shards;
+    }
+
+
+    public Iterable<Shard> createShardsOverIntervals(final SAMDataSource readsDataSource, final GenomeLocSortedSet intervals, final int maxShardSize) {
+        List<Shard> shards = new ArrayList<Shard>();
+
+        for(GenomeLoc interval: intervals) {
+            while(interval.size() > maxShardSize) {
+                shards.add(new LocusShard(intervals.getGenomeLocParser(),
+                        readsDataSource,
+                        Collections.singletonList(intervals.getGenomeLocParser().createGenomeLoc(interval.getContig(),interval.getStart(),interval.getStart()+maxShardSize-1)),
+                        null));
+                interval = intervals.getGenomeLocParser().createGenomeLoc(interval.getContig(),interval.getStart()+maxShardSize,interval.getStop());
+            }
+            shards.add(new LocusShard(intervals.getGenomeLocParser(),
+                    readsDataSource,
+                    Collections.singletonList(interval),
+                    null));
+        }
+
+        return shards;
+    }
+
+
+    /**
+     * Creates an iterator for processing the entire reference.
+     * @param readsDataSource  the reads datasource to embed in the locus shard.  TODO: decouple the creation of the shards themselves from the creation of the driving iterator so that datasources need not be passed to datasources.
+     * @param intervals        the list of intervals to use when processing the reference.
+     * @param targetShardSize  the suggested - and maximum - shard size which can be used to create this list; we will merge intervals greedily so that we generate shards up to but not greater than the target size.
+     * @return Creates a schedule for performing a traversal over the entire reference.
+     */
+/*
+    public Iterable<Shard> createShardsOverIntervals(final SAMDataSource readsDataSource, final GenomeLocSortedSet intervals, final int targetShardSize) {
+        final List<Shard> shards = new ArrayList<Shard>();
+        final GenomeLocParser parser = intervals.getGenomeLocParser();
+        LinkedList<GenomeLoc> currentIntervals = new LinkedList<GenomeLoc>();
+
+        for(GenomeLoc interval: intervals) {
+            // if the next interval is too big, we can safely shard currentInterval and then break down this one
+            if (interval.size() > targetShardSize) {
+                if (!currentIntervals.isEmpty())
+                    shards.add(createShardFromInterval(currentIntervals, readsDataSource, parser));
+                while(interval.size() > targetShardSize) {
+                    final GenomeLoc partialInterval = parser.createGenomeLoc(interval.getContig(), interval.getStart(), interval.getStart()+targetShardSize-1);
+                    shards.add(createShardFromInterval(Collections.singletonList(partialInterval), readsDataSource, parser));
+                    interval = parser.createGenomeLoc(interval.getContig(), interval.getStart() + targetShardSize, interval.getStop());
+                }
+                currentIntervals = new LinkedList<GenomeLoc>();
+                currentIntervals.add(interval);
+            }
+            // otherwise, we need to check whether we can merge this interval with currentInterval (and either shard currentInterval or merge accordingly)
+            else {
+                if (currentIntervals.isEmpty()) {
+                    currentIntervals.add(interval);
+                }
+                else {
+                    if (currentIntervals.getLast().compareContigs(interval) != 0 || interval.getStop() - currentIntervals.getLast().getStart() + 1 > targetShardSize) {
+                        shards.add(createShardFromInterval(currentIntervals, readsDataSource, parser));
+                        currentIntervals = new LinkedList<GenomeLoc>();
+                    }
+                    currentIntervals.add(interval);
+                }
+            }
+        }
+        if (!currentIntervals.isEmpty())
+            shards.add(createShardFromInterval(currentIntervals, readsDataSource, parser));
+        return shards;
+    }
+
+    private static Shard createShardFromInterval(final List<GenomeLoc> intervals, final SAMDataSource readsDataSource, final GenomeLocParser parser) {
+        //logger.debug("Adding shard " + interval);
+        return new LocusShard(parser,
+                readsDataSource,
+                intervals,
+                null);
+    }
+*/
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reference/package-info.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reference/package-info.java
new file mode 100644
index 0000000..77bea89
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/reference/package-info.java
@@ -0,0 +1,26 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reference;
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/DataStreamSegment.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/DataStreamSegment.java
new file mode 100644
index 0000000..c12933c
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/DataStreamSegment.java
@@ -0,0 +1,32 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.rmd;
+
+/**
+ * Marker interface that represents an arbitrary consecutive segment within a data stream.
+ */
+interface DataStreamSegment {
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/EntireStream.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/EntireStream.java
new file mode 100644
index 0000000..389f2c6
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/EntireStream.java
@@ -0,0 +1,32 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.rmd;
+
+/**
+ * Models the entire stream of data.
+ */
+class EntireStream implements DataStreamSegment {
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/MappedStreamSegment.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/MappedStreamSegment.java
new file mode 100644
index 0000000..c490481
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/MappedStreamSegment.java
@@ -0,0 +1,48 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.rmd;
+
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.HasGenomeLocation;
+
+/**
+ * Models a mapped position within a stream of GATK input data.
+ */
+class MappedStreamSegment implements DataStreamSegment, HasGenomeLocation {
+    public final GenomeLoc locus;
+
+    /**
+     * Retrieves the first location covered by a mapped stream segment.
+     * @return Location of the first base in this segment.
+     */
+    public GenomeLoc getLocation() {
+        return locus;
+    }
+
+    public MappedStreamSegment(GenomeLoc locus) {
+        this.locus = locus;
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/ReferenceOrderedDataPool.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/ReferenceOrderedDataPool.java
new file mode 100644
index 0000000..3ed14bb
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/ReferenceOrderedDataPool.java
@@ -0,0 +1,153 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.rmd;
+
+import htsjdk.samtools.SAMSequenceDictionary;
+import org.broadinstitute.gatk.utils.refdata.SeekableRODIterator;
+import org.broadinstitute.gatk.utils.refdata.tracks.RMDTrack;
+import org.broadinstitute.gatk.utils.refdata.tracks.RMDTrackBuilder;
+import org.broadinstitute.gatk.utils.refdata.utils.FlashBackIterator;
+import org.broadinstitute.gatk.utils.refdata.utils.LocationAwareSeekableRODIterator;
+import org.broadinstitute.gatk.utils.refdata.utils.RMDTriplet;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.util.List;
+
+/**
+ * A pool of reference-ordered data iterators.
+ */
+class ReferenceOrderedDataPool extends ResourcePool<LocationAwareSeekableRODIterator, LocationAwareSeekableRODIterator> {
+    // the reference-ordered data itself.
+    private final RMDTriplet fileDescriptor;
+
+    // our tribble track builder
+    private final RMDTrackBuilder builder;
+
+    /**
+     * The header from this RMD, if present.
+     */
+    private final Object header;
+
+    /**
+     * The sequence dictionary from this ROD.  If no sequence dictionary is present, this dictionary will be the same as the reference's.
+     */
+    private final SAMSequenceDictionary sequenceDictionary;
+
+    boolean flashbackData = false;
+    public ReferenceOrderedDataPool(RMDTriplet fileDescriptor,RMDTrackBuilder builder,SAMSequenceDictionary sequenceDictionary, GenomeLocParser genomeLocParser,boolean flashbackData) {
+        super(sequenceDictionary,genomeLocParser);
+        this.fileDescriptor = fileDescriptor;
+        this.builder = builder;
+        this.flashbackData = flashbackData;
+
+        // prepopulate one RMDTrack
+        LocationAwareSeekableRODIterator iterator = createNewResource();
+        this.addNewResource(iterator);
+
+        // Pull the proper header and sequence dictionary from the prepopulated track.
+        this.header = iterator.getHeader();
+        this.sequenceDictionary = iterator.getSequenceDictionary();
+    }
+
+    /**
+     * Gets the header used by this resource pool.
+     * @return Header used by this resource pool.
+     */
+    public Object getHeader() {
+        return header;
+    }
+
+    /**
+     * Gets the sequence dictionary built into the ROD index file.
+     * @return Sequence dictionary from the index file.
+     */
+    public SAMSequenceDictionary getSequenceDictionary() {
+        return sequenceDictionary;
+    }
+
+    /**
+     * Create a new iterator from the existing reference-ordered data.  This new iterator is expected
+     * to be completely independent of any other iterator.
+     * @return The newly created resource.
+     */
+    public LocationAwareSeekableRODIterator createNewResource() {
+        if(numIterators() > 0)
+            throw new ReviewedGATKException("BUG: Tried to create multiple iterators over streaming ROD interface");
+        RMDTrack track = builder.createInstanceOfTrack(fileDescriptor);
+        LocationAwareSeekableRODIterator iter = new SeekableRODIterator(track.getHeader(),track.getSequenceDictionary(),referenceSequenceDictionary,genomeLocParser,track.getIterator());
+        return (flashbackData) ? new FlashBackIterator(iter) : iter;
+    }
+
+    /**
+     * Finds the best existing ROD iterator from the pool.  In this case, the best existing ROD is defined as
+     * the first one encountered that is at or before the given position.
+     * @param segment @{inheritedDoc}
+     * @param resources @{inheritedDoc}
+     * @return @{inheritedDoc}
+     */
+    public LocationAwareSeekableRODIterator selectBestExistingResource( DataStreamSegment segment, List<LocationAwareSeekableRODIterator> resources ) {
+        if(segment instanceof MappedStreamSegment) {
+            GenomeLoc position = ((MappedStreamSegment)segment).getLocation();
+
+            for( LocationAwareSeekableRODIterator RODIterator : resources ) {
+
+                if( (RODIterator.position() == null && RODIterator.hasNext()) ||
+                    (RODIterator.position() != null && RODIterator.position().isBefore(position)) )
+                    return RODIterator;
+                if (RODIterator.position() != null && RODIterator instanceof FlashBackIterator && ((FlashBackIterator)RODIterator).canFlashBackTo(position)) {
+                    ((FlashBackIterator)RODIterator).flashBackTo(position);
+                    return RODIterator;
+                }
+
+            }
+            return null;
+        }
+        else if(segment instanceof EntireStream) {
+            // Asking for a segment over the entire stream, so by definition, there is no best existing resource.
+            // Force the system to create a new one.
+            return null;
+        }
+        else {
+            throw new ReviewedGATKException("Unable to find a ROD iterator for segments of type " + segment.getClass());
+        }
+    }
+
+    /**
+     * In this case, the iterator is the resource.  Pass it through.
+     */
+    public LocationAwareSeekableRODIterator createIteratorFromResource( DataStreamSegment segment, LocationAwareSeekableRODIterator resource ) {
+        return resource;
+    }
+
+    /**
+     * kill the buffers in the iterator
+     */
+    public void closeResource( LocationAwareSeekableRODIterator resource ) {
+        if (resource instanceof FlashBackIterator) ((FlashBackIterator)resource).close();
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/ReferenceOrderedDataSource.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/ReferenceOrderedDataSource.java
new file mode 100644
index 0000000..1dd8e8f
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/ReferenceOrderedDataSource.java
@@ -0,0 +1,257 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.rmd;
+
+import htsjdk.samtools.SAMSequenceDictionary;
+import org.broadinstitute.gatk.utils.commandline.Tags;
+import org.broadinstitute.gatk.utils.refdata.SeekableRODIterator;
+import org.broadinstitute.gatk.utils.refdata.tracks.RMDTrack;
+import org.broadinstitute.gatk.utils.refdata.tracks.RMDTrackBuilder;
+import org.broadinstitute.gatk.utils.refdata.utils.LocationAwareSeekableRODIterator;
+import org.broadinstitute.gatk.utils.refdata.utils.RMDTriplet;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.lang.reflect.Type;
+import java.util.List;
+
+/**
+ * A data source which provides a single type of reference-ordered data.
+ */
+public class ReferenceOrderedDataSource {
+    /**
+     * The reference-ordered data itself.
+     */
+    private final RMDTriplet fileDescriptor;
+
+    /**
+     * The header associated with this VCF, if any.
+     */
+    private final Object header;
+
+    /**
+     * The private sequence dictionary associated with this RMD.
+     */
+    private final SAMSequenceDictionary sequenceDictionary;
+
+    /**
+     * The builder to use when constructing new reference-ordered data readers.
+     */
+    private final RMDTrackBuilder builder;
+
+    /**
+     * A pool of iterators for navigating through the genome.
+     */
+    private final ResourcePool<?,LocationAwareSeekableRODIterator> iteratorPool;
+
+    /**
+     * Create a new reference-ordered data source.
+     */
+    public ReferenceOrderedDataSource(RMDTriplet fileDescriptor,
+                                      RMDTrackBuilder builder,
+                                      SAMSequenceDictionary referenceSequenceDictionary,
+                                      GenomeLocParser genomeLocParser,
+                                      boolean flashbackData ) {
+        this.fileDescriptor = fileDescriptor;
+        this.builder = builder;
+
+        // TODO: Unify the two blocks of code below by creating a ReferenceOrderedDataPool base class of a coherent type (not RMDTrack for one and SeekableIterator for the other).
+        if (fileDescriptor.getStorageType() != RMDTriplet.RMDStorageType.STREAM) {
+            iteratorPool = new ReferenceOrderedQueryDataPool(fileDescriptor,
+                                                             builder,
+                                                             referenceSequenceDictionary,
+                                                             genomeLocParser);
+            this.header = ((ReferenceOrderedQueryDataPool)iteratorPool).getHeader();
+            this.sequenceDictionary = ((ReferenceOrderedQueryDataPool)iteratorPool).getSequenceDictionary();
+        }
+        else {
+            iteratorPool = new ReferenceOrderedDataPool(fileDescriptor,
+                                                        builder,
+                                                        referenceSequenceDictionary,
+                                                        genomeLocParser,
+                                                        flashbackData);
+            this.header = ((ReferenceOrderedDataPool)iteratorPool).getHeader();
+            this.sequenceDictionary = ((ReferenceOrderedDataPool)iteratorPool).getSequenceDictionary();
+        }
+    }
+
+    /**
+     * Return the name of the underlying reference-ordered data.
+     * @return Name of the underlying rod.
+     */
+    public String getName() {
+        return fileDescriptor.getName();
+    }
+
+    public Class getType() {
+        return builder.getFeatureManager().getByTriplet(fileDescriptor).getCodecClass();
+    }
+
+    public Class getRecordType() {
+        return builder.getFeatureManager().getByTriplet(fileDescriptor).getFeatureClass();
+    }
+
+    public File getFile() {
+        return new File(fileDescriptor.getFile());
+    }
+
+    public Object getHeader() {
+        return header;
+    }
+
+    public Tags getTags() {
+        return fileDescriptor.getTags();
+    }
+    
+    public String getTagValue( final String key ) {
+        return fileDescriptor.getTags().getValue( key );
+    }
+
+
+    /**
+     * Retrieves the sequence dictionary created by this ROD.
+     * @return
+     */
+    public SAMSequenceDictionary getSequenceDictionary() {
+        return sequenceDictionary;
+    }
+
+    /**
+     * helper function for determining if we are the same track based on name and record type
+     *
+     * @param name the name to match
+     * @param type the type to match
+     *
+     * @return true on a match, false if the name or type is different
+     */
+    public boolean matchesNameAndRecordType(String name, Type type) {
+        return (name.equals(fileDescriptor.getName()) && (type.getClass().isAssignableFrom(getType().getClass())));
+    }
+
+    /**
+     * Seek to the specified position and return an iterator through the data.
+     *
+     * @param loc GenomeLoc that points to the selected position.
+     *
+     * @return Iterator through the data.
+     */
+    public LocationAwareSeekableRODIterator seek(GenomeLoc loc) {
+        DataStreamSegment dataStreamSegment = loc != null ? new MappedStreamSegment(loc) : new EntireStream();
+        return iteratorPool.iterator(dataStreamSegment);
+    }
+
+
+    /**
+     * Close the specified iterator, returning it to the pool.
+     * @param iterator Iterator to close.
+     */
+    public void close( LocationAwareSeekableRODIterator iterator ) {
+        iteratorPool.release(iterator);
+    }
+
+}
+
+/**
+ * a data pool for the new query based RODs
+ */
+class ReferenceOrderedQueryDataPool extends ResourcePool<RMDTrack,LocationAwareSeekableRODIterator> {
+    // the reference-ordered data itself.
+    private final RMDTriplet fileDescriptor;
+
+    // our tribble track builder
+    private final RMDTrackBuilder builder;
+
+    /**
+     * The header from this RMD, if present.
+     */
+    private final Object header;
+
+    /**
+     * The sequence dictionary from this ROD.  If no sequence dictionary is present, this dictionary will be the same as the reference's.
+     */
+    private final SAMSequenceDictionary sequenceDictionary;
+
+    public ReferenceOrderedQueryDataPool(RMDTriplet fileDescriptor, RMDTrackBuilder builder, SAMSequenceDictionary referenceSequenceDictionary, GenomeLocParser genomeLocParser) {
+        super(referenceSequenceDictionary,genomeLocParser);
+        this.fileDescriptor = fileDescriptor;
+        this.builder = builder;
+
+        // prepopulate one RMDTrack
+        RMDTrack track = builder.createInstanceOfTrack(fileDescriptor);
+        this.addNewResource(track);
+
+        // Pull the proper header and sequence dictionary from the prepopulated track.
+        this.header = track.getHeader();
+        this.sequenceDictionary = track.getSequenceDictionary();
+    }
+
+    public Object getHeader() {
+        return header;
+    }
+
+    public SAMSequenceDictionary getSequenceDictionary() {
+        return sequenceDictionary;
+    }
+
+    @Override
+    protected RMDTrack createNewResource() {
+        return builder.createInstanceOfTrack(fileDescriptor);
+    }
+
+    @Override
+    protected RMDTrack selectBestExistingResource(DataStreamSegment segment, List<RMDTrack> availableResources) {
+        for (RMDTrack reader : availableResources)
+            if (reader != null) return reader;
+        return null;
+    }
+
+    @Override
+    protected LocationAwareSeekableRODIterator createIteratorFromResource(DataStreamSegment position, RMDTrack track) {
+        try {
+            if (position instanceof MappedStreamSegment) {
+                GenomeLoc pos = ((MappedStreamSegment) position).locus;
+                return new SeekableRODIterator(header,sequenceDictionary,referenceSequenceDictionary,genomeLocParser,track.query(pos));
+            } else {
+                return new SeekableRODIterator(header,sequenceDictionary,referenceSequenceDictionary,genomeLocParser,track.getIterator());
+            }
+        } catch (FileNotFoundException e) {
+            throw new UserException.CouldNotReadInputFile(fileDescriptor.getName(), "it could not be found");
+        } catch (IOException | RuntimeException e) {
+            throw new ReviewedGATKException("Unable to create iterator for rod named " + fileDescriptor.getName(),e);
+        }
+
+    }
+
+    @Override
+    protected void closeResource(RMDTrack track) {
+        track.close();
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/ResourcePool.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/ResourcePool.java
new file mode 100644
index 0000000..213ab28
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/ResourcePool.java
@@ -0,0 +1,188 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.rmd;
+
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.util.CloseableIterator;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.util.*;
+
+/**
+ * A pool of open resources, all of which can create a closeable iterator.
+ */
+abstract class ResourcePool <T,I extends CloseableIterator> {
+    /**
+     * Sequence dictionary.
+     */
+    protected final SAMSequenceDictionary referenceSequenceDictionary;
+
+    /**
+     * Builder/parser for GenomeLocs.
+     */
+    protected final GenomeLocParser genomeLocParser;
+
+    /**
+     * All iterators of this reference-ordered data.
+     */
+    private List<T> allResources = new ArrayList<T>();
+
+    /**
+     * All iterators that are not currently in service.
+     */
+    private List<T> availableResources = new ArrayList<T>();
+
+    /**
+     * Which iterators are assigned to which pools.
+     */
+    private Map<I,T> resourceAssignments = new HashMap<I,T>();
+
+    protected ResourcePool(SAMSequenceDictionary referenceSequenceDictionary,GenomeLocParser genomeLocParser) {
+        this.referenceSequenceDictionary = referenceSequenceDictionary;
+        this.genomeLocParser = genomeLocParser;
+    }
+
+    /**
+     * Get an iterator whose position is before the specified location.  Create a new one if none exists.
+     * @param segment Target position for the iterator.
+     * @return An iterator that can traverse the selected region.  Should be able to iterate concurrently with other
+     *         iterators from tihs pool.
+     */
+    public I iterator( DataStreamSegment segment ) {
+        // Grab the first iterator in the list whose position is before the requested position.
+        T selectedResource = null;
+        synchronized (this) {
+            selectedResource = selectBestExistingResource(segment, availableResources);
+
+            // No iterator found?  Create another.  It is expected that
+            // each iterator created will have its own file handle.
+            if (selectedResource == null) {
+                selectedResource = createNewResource();
+                addNewResource(selectedResource);
+            }
+
+            // Remove the iterator from the list of available iterators.
+            availableResources.remove(selectedResource);
+        }
+
+
+        I iterator = createIteratorFromResource(segment, selectedResource);
+
+        // also protect the resource assignment
+        synchronized (this) {
+            // Make a note of this assignment for proper releasing later.
+            resourceAssignments.put(iterator, selectedResource);
+        }
+
+        return iterator;
+    }
+
+    /**
+     * Release the lock on the given iterator, returning it to the pool.
+     * @param iterator Iterator to return to the pool.
+     */
+    public void release( I iterator ) {
+        synchronized(this) {
+            // Find and remove the resource from the list of allocated resources.
+            T resource = resourceAssignments.get( iterator );
+            Object obj = resourceAssignments.remove(iterator);
+
+            // Close the iterator.
+            iterator.close();
+
+            // make sure we actually removed the assignment
+            if (obj == null)
+                    throw new ReviewedGATKException("Failed to remove resource assignment; target key had no associated value in the resource assignment map");
+            // Return the resource to the pool.
+            if( !allResources.contains(resource) )
+                throw new ReviewedGATKException("Iterator does not belong to the given pool.");
+            availableResources.add(resource);
+        }
+    }
+
+    /**
+     * Add a resource to the list of available resources.  Useful if derived classes
+     * want to seed the pool with a set of at a given time (like at initialization).
+     * @param resource The new resource to add.
+     */
+    protected void addNewResource( T resource ) {
+        synchronized(this) {
+            allResources.add(resource);
+            availableResources.add(resource);
+        }
+    }
+
+    /**
+     * If no appropriate resources are found in the pool, the system can create a new resource.
+     * Delegate the creation of the resource to the subclass.
+     * @return The new resource created.
+     */
+    protected abstract T createNewResource();
+
+    /**
+     * Find the most appropriate resource to acquire the specified data.
+     * @param segment The data over which the resource is required.
+     * @param availableResources A list of candidate resources to evaluate.
+     * @return The best choice of the availableResources, or null if no resource meets the criteria.
+     */
+    protected abstract T selectBestExistingResource( DataStreamSegment segment, List<T> availableResources );
+
+    /**
+     * Create an iterator over the specified resource.
+     * @param position The bounds of iteration.  The first element of the iterator through the last element should all
+     *                 be in the range described by position.
+     * @param resource The resource from which to derive the iterator.
+     * @return A new iterator over the given data.
+     */
+    protected abstract I createIteratorFromResource( DataStreamSegment position, T resource );
+
+    /**
+     * Retire this resource from service.
+     * @param resource The resource to retire.
+     */
+    protected abstract void closeResource(T resource);
+
+    /**
+     * Operating stats...get the number of total iterators.  Package-protected
+     * for unit testing.
+     * @return An integer number of total iterators.
+     */
+    int numIterators() {
+        return allResources.size();
+    }
+
+    /**
+     * Operating stats...get the number of available iterators.  Package-protected
+     * for unit testing.
+     * @return An integer number of available iterators.
+     */
+    int numAvailableIterators() {
+        return availableResources.size();
+    }
+
+}
+
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/package-info.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/package-info.java
new file mode 100644
index 0000000..402087d
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/package-info.java
@@ -0,0 +1,26 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.rmd;
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/Accumulator.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/Accumulator.java
new file mode 100644
index 0000000..8d166e1
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/Accumulator.java
@@ -0,0 +1,211 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.executive;
+
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.engine.datasources.providers.LocusShardDataProvider;
+import org.broadinstitute.gatk.engine.datasources.providers.ShardDataProvider;
+import org.broadinstitute.gatk.engine.walkers.Walker;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
+import org.broadinstitute.gatk.utils.collections.Pair;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+/**
+ * Manages the
+ */
+
+public abstract class Accumulator {
+    /**
+     * The walker for which to accumulate.
+     */
+    protected final Walker walker;
+
+    /**
+     * Create a new Accumulator.  Forbid outside classes from performing this operation.
+     * @param walker
+     */
+    protected Accumulator( Walker walker ) {
+        this.walker = walker;
+    }
+
+    /**
+     * Creates an accumulator suitable for accumulating results of the given walker.
+     * @param walker Walker for which to build an accumulator.
+     * @return Accumulator suitable for this walker.s
+     */
+    public static Accumulator create( GenomeAnalysisEngine engine, Walker walker ) {
+        if( walker.isReduceByInterval() && engine.getIntervals() != null)
+            return new IntervalAccumulator( walker, engine.getIntervals() );
+        else
+            return new StandardAccumulator( walker );
+    }
+
+    /**
+     * Gets the appropriate reduce initializer for this accumulator.
+     * @return Traversal reduce init to feed into traversal engine. 
+     */
+    public abstract Object getReduceInit();
+
+    /**
+     * Roll this traversal result into the given accumulator.
+     * @param result Result of the most recent accumulation.
+     * @return the newest accumulation of the given data.
+     */
+    public abstract void accumulate( ShardDataProvider provider, Object result );
+
+    /**
+     * Finishes off the traversal.  Submits accumulated results to
+     * the walker and returns them.
+     * TODO: Its a bit funky to delegate the finishing of the traversal
+     *       to an accumulator, but we're doing it for type safety so the
+     *       right Walker override gets called.  Clean this up.
+     * @return Final result of accumulation.
+     */
+    public abstract Object finishTraversal();
+
+    /**
+     * Accumulates in the 'standard' fashion; basically funnels
+     * the reduce result back into the reduce init and relies on
+     * the user-supplied reduce to handle the accumulation.
+     */
+    private static class StandardAccumulator extends Accumulator {
+        private Object accumulator = null;
+        private boolean initialized = false;
+
+        protected StandardAccumulator( Walker walker ) {
+            super(walker);
+        }
+
+        /**
+         * Standard accumulator returns reduceInit first, then the
+         * results of the previous accumulation. 
+         */
+        public Object getReduceInit() {
+            if( !initialized ) {
+                initialized = true;
+                return walker.reduceInit();
+            }
+            else
+                return accumulator;
+        }
+
+        /**
+         * The result of the accumulator in a non-intervals walker
+         * already takes the accumulation into account.  return the result. 
+         */
+        public void accumulate( ShardDataProvider provider, Object result ) { this.accumulator = result; }
+
+        /**
+         * The result of the traversal is the list of accumulated intervals.
+         */
+        public Object finishTraversal() {
+            walker.onTraversalDone(getReduceInit());  // must call getReduceInit to ensure that we get the accumulator value or the reduceInit value
+            return this.accumulator;
+        }
+    }
+
+    /**
+     * An interval-based accumulator.  Treats each reduce result independently,
+     * and aggregates those results into a single list.
+     */
+    private static class IntervalAccumulator extends Accumulator {
+        /**
+         * True if a new interval is being started.  This flag is used to
+         * ensure that reduceInit() is not called unnecessarily.
+         */
+        private boolean startingNewInterval = true;
+
+        /**
+         * An iterator through all intervals in the series.
+         */
+        private final Iterator<GenomeLoc> intervalIterator;
+
+        /**
+         * For which interval is the accumulator currently accumulating?
+         */
+        private GenomeLoc currentInterval = null;
+
+        /**
+         * The actual mapping of interval to accumulator.
+         */
+        private final List<Pair<GenomeLoc,Object>> intervalAccumulator = new ArrayList<Pair<GenomeLoc,Object>>();
+
+        /**
+         * Holds the next value to be passed in as the reduce result.
+         */
+        private Object nextReduceInit = null;
+
+        protected IntervalAccumulator(Walker walker, GenomeLocSortedSet intervals) {
+            super(walker);
+            this.intervalIterator = intervals.iterator();
+            if(intervalIterator.hasNext()) currentInterval = intervalIterator.next();
+        }
+
+        /**
+         * Interval accumulator always feeds reduceInit into every new traversal.
+         */
+        public Object getReduceInit() {
+            if(startingNewInterval) {
+                startingNewInterval = false;
+                nextReduceInit = walker.reduceInit();
+            }
+            return nextReduceInit;
+        }
+
+        /**
+         * Create a holder for interval results if none exists.  Add the result to the holder.
+         */
+        public void accumulate( ShardDataProvider provider, Object result ) {
+            if(!(provider instanceof LocusShardDataProvider))
+                throw new ReviewedGATKException("Unable to reduce by interval on reads traversals at this time.");
+
+            GenomeLoc location = ((LocusShardDataProvider)provider).getLocus();
+
+            // Pull the interval iterator ahead to the interval overlapping this shard fragment.
+            while((currentInterval == null || currentInterval.isBefore(location)) && intervalIterator.hasNext())
+                currentInterval = intervalIterator.next();
+
+            if(currentInterval != null && currentInterval.getContig().equals(location.getContig()) && currentInterval.getStop() == location.getStop()) {
+                intervalAccumulator.add(new Pair<GenomeLoc,Object>(currentInterval,result));
+                startingNewInterval = true;
+            }
+            else
+                nextReduceInit = result;
+        }
+
+        /**
+         * The result of the traversal is the list of accumulated intervals.
+         */
+        public Object finishTraversal() {
+            walker.onTraversalDone(intervalAccumulator);
+            return intervalAccumulator;
+        }
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/HierarchicalMicroScheduler.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/HierarchicalMicroScheduler.java
new file mode 100644
index 0000000..0f01f76
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/HierarchicalMicroScheduler.java
@@ -0,0 +1,495 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.executive;
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import htsjdk.tribble.TribbleException;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.engine.datasources.reads.SAMDataSource;
+import org.broadinstitute.gatk.engine.datasources.reads.Shard;
+import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
+import org.broadinstitute.gatk.engine.io.OutputTracker;
+import org.broadinstitute.gatk.engine.io.ThreadGroupOutputTracker;
+import org.broadinstitute.gatk.engine.resourcemanagement.ThreadAllocation;
+import org.broadinstitute.gatk.engine.walkers.TreeReducible;
+import org.broadinstitute.gatk.engine.walkers.Walker;
+import org.broadinstitute.gatk.utils.MultiThreadedErrorTracker;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.threading.ThreadPoolMonitor;
+
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.Queue;
+import java.util.concurrent.*;
+
+/**
+ * A microscheduler that schedules shards according to a tree-like structure.
+ * Requires a special walker tagged with a 'TreeReducible' interface.
+ */
+public class HierarchicalMicroScheduler extends MicroScheduler implements HierarchicalMicroSchedulerMBean, ReduceTree.TreeReduceNotifier {
+    /**
+     * How many outstanding output merges are allowed before the scheduler stops
+     * allowing new processes and starts merging flat-out.
+     */
+    private static final int MAX_OUTSTANDING_OUTPUT_MERGES = 50;
+
+    /** Manage currently running threads. */
+    private ExecutorService threadPool;
+
+    /**
+     * A thread local output tracker for managing output per-thread.
+     */
+    private ThreadGroupOutputTracker outputTracker = new ThreadGroupOutputTracker();
+
+    private final Queue<TreeReduceTask> reduceTasks = new LinkedList<TreeReduceTask>();
+
+    /**
+     * An exception that's occurred in this traversal.  If null, no exception has occurred.
+     */
+    final MultiThreadedErrorTracker errorTracker = new MultiThreadedErrorTracker();
+
+    /**
+     * Queue of incoming shards.
+     */
+    private Iterator<Shard> traversalTasks;
+
+    /**
+     * Keep a queue of shard traversals, and constantly monitor it to see what output
+     * merge tasks remain.
+     * TODO: Integrate this into the reduce tree.
+     */
+    private final Queue<ShardTraverser> outputMergeTasks = new LinkedList<ShardTraverser>();
+
+    /** How many shard traversals have run to date? */
+    private int totalCompletedTraversals = 0;
+
+    /** What is the total time spent traversing shards? */
+    private long totalShardTraverseTime = 0;
+
+    /** What is the total time spent tree reducing shard output? */
+    private long totalTreeReduceTime = 0;
+
+    /** How many tree reduces have been completed? */
+    private long totalCompletedTreeReduces = 0;
+
+    /** What is the total time spent merging output? */
+    private long totalOutputMergeTime = 0;
+
+    /**
+     * Create a new hierarchical microscheduler to process the given reads and reference.
+     *
+     * @param walker           the walker used to process the dataset.
+     * @param reads            Reads file(s) to process.
+     * @param reference        Reference for driving the traversal.
+     * @param threadAllocation How should we apply multi-threaded execution?
+     */
+    protected HierarchicalMicroScheduler(final GenomeAnalysisEngine engine,
+                                         final Walker walker,
+                                         final SAMDataSource reads,
+                                         final IndexedFastaSequenceFile reference,
+                                         final Collection<ReferenceOrderedDataSource> rods,
+                                         final ThreadAllocation threadAllocation) {
+        super(engine, walker, reads, reference, rods, threadAllocation);
+
+        final int nThreadsToUse = threadAllocation.getNumDataThreads();
+        if ( threadAllocation.monitorThreadEfficiency() ) {
+            throw new UserException.BadArgumentValue("nt", "Cannot monitor thread efficiency with -nt, sorry");
+        }
+
+        this.threadPool = Executors.newFixedThreadPool(nThreadsToUse, new UniqueThreadGroupThreadFactory());
+    }
+
+    /**
+     * Creates threads for HMS each with a unique thread group.  Critical to
+     * track outputs via the ThreadGroupOutputTracker.
+     */
+    private static class UniqueThreadGroupThreadFactory implements ThreadFactory {
+        int counter = 0;
+
+        @Override
+        public Thread newThread(Runnable r) {
+            final ThreadGroup group = new ThreadGroup("HMS-group-" + counter++);
+            return new Thread(group, r);
+        }
+    }
+
+    public Object execute( Walker walker, Iterable<Shard> shardStrategy ) {
+        super.startingExecution();
+
+        // Fast fail for walkers not supporting TreeReducible interface.
+        if (!( walker instanceof TreeReducible ))
+            throw new IllegalArgumentException("The GATK can currently run in parallel only with TreeReducible walkers");
+
+        this.traversalTasks = shardStrategy.iterator();
+
+        final ReduceTree reduceTree = new ReduceTree(this);
+        initializeWalker(walker);
+
+        while (! abortExecution() && (isShardTraversePending() || isTreeReducePending())) {
+            // Check for errors during execution.
+            errorTracker.throwErrorIfPending();
+
+            // Too many files sitting around taking up space?  Merge them.
+            if (isMergeLimitExceeded())
+                mergeExistingOutput(false);
+
+            // Wait for the next slot in the queue to become free.
+            waitForFreeQueueSlot();
+
+            // Pick the next most appropriate task and run it.  In the interest of
+            // memory conservation, hierarchical reduces always run before traversals.
+            if (isTreeReduceReady())
+                queueNextTreeReduce(walker);
+            else if (isShardTraversePending())
+                queueNextShardTraverse(walker, reduceTree);
+        }
+
+        errorTracker.throwErrorIfPending();
+
+        threadPool.shutdown();
+
+        // Merge any lingering output files.  If these files aren't ready,
+        // sit around and wait for them, then merge them.
+        mergeExistingOutput(true);
+
+        Object result = null;
+        try {
+            result = reduceTree.getResult().get();
+            notifyTraversalDone(walker,result);
+        } catch (ReviewedGATKException ex) {
+            throw ex;
+        } catch ( ExecutionException ex ) {
+            // the thread died and we are failing to get the result, rethrow it as a runtime exception
+            throw notifyOfTraversalError(ex.getCause());
+        } catch (Exception ex) {
+            throw new ReviewedGATKException("Unable to retrieve result", ex);
+        }
+
+        // do final cleanup operations
+        outputTracker.close();
+        cleanup();
+        executionIsDone();
+
+        return result;
+    }
+
+    /**
+     * Run the initialize method of the walker.  Ensure that any calls
+     * to the output stream will bypass thread local storage and write
+     * directly to the output file.
+     * @param walker Walker to initialize.
+     */
+    protected void initializeWalker(Walker walker) {
+        outputTracker.bypassThreadLocalStorage(true);
+        try {
+            walker.initialize();
+        }
+        finally {
+            outputTracker.bypassThreadLocalStorage(false);
+        }
+    }
+
+    /**
+     * Run the initialize method of the walker.  Ensure that any calls
+     * to the output stream will bypass thread local storage and write
+     * directly to the output file.
+     * @param walker Walker to initialize.
+     */
+    protected void notifyTraversalDone(Walker walker, Object result) {
+        outputTracker.bypassThreadLocalStorage(true);
+        try {
+            walker.onTraversalDone(result);
+        }
+        finally {
+            outputTracker.bypassThreadLocalStorage(false);
+        }
+    }
+
+    /**
+     * @{inheritDoc}
+     */
+    public OutputTracker getOutputTracker() {
+        return outputTracker;
+    }
+
+    /**
+     * Returns true if there are unscheduled shard traversal waiting to run.
+     *
+     * @return true if a shard traversal is waiting; false otherwise.
+     */
+    protected boolean isShardTraversePending() {
+        return traversalTasks.hasNext();
+    }
+
+    /**
+     * Returns true if there are tree reduces that can be run without
+     * blocking.
+     *
+     * @return true if a tree reduce is ready; false otherwise.
+     */
+    protected boolean isTreeReduceReady() {
+        if (reduceTasks.size() == 0)
+            return false;
+        return reduceTasks.peek().isReadyForReduce();
+    }
+
+    /**
+     * Returns true if there are tree reduces that need to be run before
+     * the computation is complete.  Returns true if any entries are in the queue,
+     * blocked or otherwise.
+     *
+     * @return true if a tree reduce is pending; false otherwise.
+     */
+    protected boolean isTreeReducePending() {
+        return reduceTasks.size() > 0;
+    }
+
+    /**
+     * Returns whether the maximum number of files is sitting in the temp directory
+     * waiting to be merged back in.
+     *
+     * @return True if the merging needs to take priority.  False otherwise.
+     */
+    protected boolean isMergeLimitExceeded() {
+        int pendingTasks = 0;
+        for( ShardTraverser shardTraverse: outputMergeTasks ) {
+            if( !shardTraverse.isComplete() )
+                break;
+            pendingTasks++;
+        }
+        return (outputMergeTasks.size() >= MAX_OUTSTANDING_OUTPUT_MERGES);
+    }
+
+    /**
+     * Merging all output that's sitting ready in the OutputMerger queue into
+     * the final data streams.
+     */
+    protected void mergeExistingOutput( boolean wait ) {
+        long startTime = System.currentTimeMillis();
+
+//        logger.warn("MergingExistingOutput");
+//        printOutputMergeTasks();
+
+        // Create a list of the merge tasks that will be performed in this run of the mergeExistingOutput().
+        Queue<ShardTraverser> mergeTasksInSession = new LinkedList<ShardTraverser>();
+        while( !outputMergeTasks.isEmpty() ) {
+            ShardTraverser traverser = outputMergeTasks.peek();
+
+            // If the next traversal isn't done and we're not supposed to wait, we've found our working set.  Continue.
+            if( !traverser.isComplete() && !wait )
+                break;
+
+            outputMergeTasks.remove();
+            mergeTasksInSession.add(traverser);
+        }
+
+//        logger.warn("Selected things to merge:");
+//        printOutputMergeTasks(mergeTasksInSession);
+
+        // Actually run through, merging the tasks in the working queue.
+        for( ShardTraverser traverser: mergeTasksInSession ) {
+            //logger.warn("*** Merging " + traverser.getIntervalsString());
+            if( !traverser.isComplete() )
+                traverser.waitForComplete();
+
+            OutputMergeTask mergeTask = traverser.getOutputMergeTask();
+            if( mergeTask != null ) {
+                try {
+                    mergeTask.merge();
+                }
+                catch(TribbleException ex) {
+                    // Specifically catch Tribble I/O exceptions and rethrow them as Reviewed.  We don't expect
+                    // any issues here because we created the Tribble output file mere moments ago and expect it to
+                    // be completely valid.
+                    throw new ReviewedGATKException("Unable to merge temporary Tribble output file.",ex);
+                }
+            }
+        }
+
+        long endTime = System.currentTimeMillis();
+
+        totalOutputMergeTime += ( endTime - startTime );
+    }
+
+    /**
+     * Queues the next traversal of a walker from the traversal tasks queue.
+     *
+     * @param walker     Walker to apply to the dataset.
+     * @param reduceTree Tree of reduces to which to add this shard traverse.
+     */
+    protected void queueNextShardTraverse( Walker walker, ReduceTree reduceTree ) {
+        if (!traversalTasks.hasNext())
+            throw new IllegalStateException("Cannot traverse; no pending traversals exist.");
+
+        final Shard shard = traversalTasks.next();
+
+        // todo -- add ownership claim here
+
+        final ShardTraverser traverser = new ShardTraverser(this, walker, shard, outputTracker);
+
+        final Future traverseResult = threadPool.submit(traverser);
+
+        // Add this traverse result to the reduce tree.  The reduce tree will call a callback to throw its entries on the queue.
+        reduceTree.addEntry(traverseResult);
+        outputMergeTasks.add(traverser);
+
+//        logger.warn("adding merge task");
+//        printOutputMergeTasks();
+
+        // No more data?  Let the reduce tree know so it can finish processing what it's got.
+        if (!isShardTraversePending())
+            reduceTree.complete();
+    }
+
+    private synchronized void printOutputMergeTasks() {
+        printOutputMergeTasks(outputMergeTasks);
+    }
+
+    private synchronized void printOutputMergeTasks(final Queue<ShardTraverser> tasks) {
+        logger.info("Output merge tasks " + tasks.size());
+        for ( final ShardTraverser traverser : tasks )
+            logger.info(String.format("\t%s: complete? %b", traverser.getIntervalsString(), traverser.isComplete()));
+    }
+
+    /** Pulls the next reduce from the queue and runs it. */
+    protected void queueNextTreeReduce( Walker walker ) {
+        if (reduceTasks.size() == 0)
+            throw new IllegalStateException("Cannot reduce; no pending reduces exist.");
+        final TreeReduceTask reducer = reduceTasks.remove();
+        reducer.setWalker((TreeReducible) walker);
+
+        threadPool.submit(reducer);
+    }
+
+    /** Blocks until a free slot appears in the thread queue. */
+    protected void waitForFreeQueueSlot() {
+        final ThreadPoolMonitor monitor = new ThreadPoolMonitor();
+        synchronized (monitor) {
+            threadPool.submit(monitor);
+            monitor.watch();
+        }
+    }
+
+    /**
+     * Callback for adding reduce tasks to the run queue.
+     *
+     * @return A new, composite future of the result of this reduce.
+     */
+    public Future notifyReduce( final Future lhs, final Future rhs ) {
+        final TreeReduceTask reducer = new TreeReduceTask(new TreeReducer(this, lhs, rhs));
+        reduceTasks.add(reducer);
+        return reducer;
+    }
+
+    /**
+     * Allows other threads to notify of an error during traversal.
+     */
+    protected synchronized RuntimeException notifyOfTraversalError(Throwable error) {
+        return errorTracker.notifyOfError(error);
+    }
+
+    /** A small wrapper class that provides the TreeReducer interface along with the FutureTask semantics. */
+    private class TreeReduceTask extends FutureTask {
+        final private TreeReducer treeReducer;
+
+        public TreeReduceTask( TreeReducer treeReducer ) {
+            super(treeReducer);
+            this.treeReducer = treeReducer;
+        }
+
+        public void setWalker( TreeReducible walker ) {
+            treeReducer.setWalker(walker);
+        }
+
+        public boolean isReadyForReduce() {
+            return treeReducer.isReadyForReduce();
+        }
+    }
+
+    /**
+     * Used by the ShardTraverser to report time consumed traversing a given shard.
+     *
+     * @param shardTraversalTime Elapsed time traversing a given shard.
+     */
+    synchronized void reportShardTraverseTime( long shardTraversalTime ) {
+        totalShardTraverseTime += shardTraversalTime;
+        totalCompletedTraversals++;
+    }
+
+    /**
+     * Used by the TreeReducer to report time consumed reducing two shards.
+     *
+     * @param treeReduceTime Elapsed time reducing two shards.
+     */
+    synchronized void reportTreeReduceTime( long treeReduceTime ) {
+        totalTreeReduceTime += treeReduceTime;
+        totalCompletedTreeReduces++;
+
+    }
+
+    /** {@inheritDoc} */
+    public int getNumberOfTasksInReduceQueue() {
+        return reduceTasks.size();
+    }
+
+    /** {@inheritDoc} */
+    public int getNumberOfTasksInIOQueue() {
+        synchronized( outputMergeTasks ) {
+            return outputMergeTasks.size();
+        }
+    }
+
+    /** {@inheritDoc} */
+    public long getTotalShardTraverseTimeMillis() {
+        return totalShardTraverseTime;
+    }
+
+    /** {@inheritDoc} */
+    public long getAvgShardTraverseTimeMillis() {
+        if (totalCompletedTraversals == 0)
+            return 0;
+        return totalShardTraverseTime / totalCompletedTraversals;
+    }
+
+    /** {@inheritDoc} */
+    public long getTotalTreeReduceTimeMillis() {
+        return totalTreeReduceTime;
+    }
+
+    /** {@inheritDoc} */
+    public long getAvgTreeReduceTimeMillis() {
+        if (totalCompletedTreeReduces == 0)
+            return 0;
+        return totalTreeReduceTime / totalCompletedTreeReduces;
+    }
+
+    /** {@inheritDoc} */
+    public long getTotalOutputMergeTimeMillis() {
+        return totalOutputMergeTime;
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/HierarchicalMicroSchedulerMBean.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/HierarchicalMicroSchedulerMBean.java
new file mode 100644
index 0000000..b0fd074
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/HierarchicalMicroSchedulerMBean.java
@@ -0,0 +1,86 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.executive;
+/**
+ * User: hanna
+ * Date: May 29, 2009
+ * Time: 4:05:27 PM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * An interface for retrieving runtime statistics about how the hierarchical
+ * microscheduler is behaving. 
+ */
+public interface HierarchicalMicroSchedulerMBean {
+    /**
+     * How many tree reduces are waiting in the tree reduce queue?
+     * @return Total number of reduces waiting in the tree reduce queue?
+     */
+    public int getNumberOfTasksInReduceQueue();
+
+    /**
+     * How many pending I/O combining tasks are waiting in the queue?
+     * @return Total number of I/O tasks waiting in the I/O queue.
+     */
+    public int getNumberOfTasksInIOQueue();
+
+    /**
+     * What is the total time spent running traversals?
+     * @return Total time spent traversing shards; 0 if none have been traversed.
+     */
+    public long getTotalShardTraverseTimeMillis();
+
+    /**
+     * What is the average time spent running traversals?
+     * @return Average time spent traversing shards; 0 if none have been traversed.
+     */
+    public long getAvgShardTraverseTimeMillis();
+
+    /**
+     * What is the total time spent merging output?
+     */
+    public long getTotalOutputMergeTimeMillis();
+
+    /**
+     * What is the total time spent running tree reduces?
+     * @return Total time spent running tree reduces; 0 if none have been run.
+     */
+    public long getTotalTreeReduceTimeMillis();
+
+    /**
+     * What is the average time spent running tree reduces?
+     * @return Average time spent running tree reduces; 0 if none have been run.
+     */
+    public long getAvgTreeReduceTimeMillis();
+}
+
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/LinearMicroScheduler.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/LinearMicroScheduler.java
new file mode 100644
index 0000000..04516da
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/LinearMicroScheduler.java
@@ -0,0 +1,130 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.executive;
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.engine.datasources.providers.LocusShardDataProvider;
+import org.broadinstitute.gatk.engine.datasources.providers.ReadShardDataProvider;
+import org.broadinstitute.gatk.engine.datasources.providers.ShardDataProvider;
+import org.broadinstitute.gatk.engine.datasources.reads.SAMDataSource;
+import org.broadinstitute.gatk.engine.datasources.reads.Shard;
+import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
+import org.broadinstitute.gatk.engine.io.DirectOutputTracker;
+import org.broadinstitute.gatk.engine.io.OutputTracker;
+import org.broadinstitute.gatk.engine.resourcemanagement.ThreadAllocation;
+import org.broadinstitute.gatk.engine.traversals.TraversalEngine;
+import org.broadinstitute.gatk.engine.walkers.Walker;
+import org.broadinstitute.gatk.utils.sam.ReadUtils;
+import org.broadinstitute.gatk.utils.threading.ThreadEfficiencyMonitor;
+
+import java.util.Collection;
+
+
+/** A micro-scheduling manager for single-threaded execution of a traversal. */
+public class LinearMicroScheduler extends MicroScheduler {
+
+    /**
+     * A direct output tracker for directly managing output.
+     */
+    private DirectOutputTracker outputTracker = new DirectOutputTracker();
+
+    /**
+     * Create a new linear microscheduler to process the given reads and reference.
+     *
+     * @param walker    Walker for the traversal.
+     * @param reads     Reads file(s) to process.
+     * @param reference Reference for driving the traversal.
+     * @param rods      Reference-ordered data.
+     */
+    protected LinearMicroScheduler(final GenomeAnalysisEngine engine,
+                                   final Walker walker,
+                                   final SAMDataSource reads,
+                                   final IndexedFastaSequenceFile reference,
+                                   final Collection<ReferenceOrderedDataSource> rods,
+                                   final ThreadAllocation threadAllocation) {
+        super(engine, walker, reads, reference, rods, threadAllocation);
+
+        if ( threadAllocation.monitorThreadEfficiency() )
+            setThreadEfficiencyMonitor(new ThreadEfficiencyMonitor());
+    }
+
+    /**
+     * Run this traversal over the specified subsection of the dataset.
+     *
+     * @param walker    Computation to perform over dataset.
+     * @param shardStrategy A strategy for sharding the data.
+     */
+    public Object execute(Walker walker, Iterable<Shard> shardStrategy) {
+        super.startingExecution();
+        walker.initialize();
+        Accumulator accumulator = Accumulator.create(engine,walker);
+
+        boolean done = walker.isDone();
+        int counter = 0;
+
+        final TraversalEngine traversalEngine = borrowTraversalEngine(this);
+        for (Shard shard : shardStrategy ) {
+            if ( abortExecution() || done || shard == null ) // we ran out of shards that aren't owned
+                break;
+
+            if(shard.getShardType() == Shard.ShardType.LOCUS) {
+                WindowMaker windowMaker = new WindowMaker(shard, engine.getGenomeLocParser(),
+                        getReadIterator(shard), shard.getGenomeLocs(), ReadUtils.getSAMFileSamples(engine.getSAMFileHeader()));
+                for(WindowMaker.WindowMakerIterator iterator: windowMaker) {
+                    ShardDataProvider dataProvider = new LocusShardDataProvider(shard,iterator.getSourceInfo(),engine.getGenomeLocParser(),iterator.getLocus(),iterator,reference,rods);
+                    Object result = traversalEngine.traverse(walker, dataProvider, accumulator.getReduceInit());
+                    accumulator.accumulate(dataProvider,result);
+                    dataProvider.close();
+                    if ( walker.isDone() ) break;
+                }
+                windowMaker.close();
+            }
+            else {
+                ShardDataProvider dataProvider = new ReadShardDataProvider(shard,engine.getGenomeLocParser(),getReadIterator(shard),reference,rods);
+                Object result = traversalEngine.traverse(walker, dataProvider, accumulator.getReduceInit());
+                accumulator.accumulate(dataProvider,result);
+                dataProvider.close();
+            }
+
+            done = walker.isDone();
+        }
+
+        Object result = accumulator.finishTraversal();
+
+        outputTracker.close();
+        returnTraversalEngine(this, traversalEngine);
+        cleanup();
+        executionIsDone();
+
+        return accumulator;
+    }
+
+    /**
+     * @{inheritDoc}
+     */
+    public OutputTracker getOutputTracker() { return outputTracker; }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/MicroScheduler.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/MicroScheduler.java
new file mode 100644
index 0000000..27e0859
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/MicroScheduler.java
@@ -0,0 +1,463 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.executive;
+
+import com.google.java.contract.Ensures;
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.engine.ReadMetrics;
+import org.broadinstitute.gatk.engine.datasources.reads.SAMDataSource;
+import org.broadinstitute.gatk.engine.datasources.reads.Shard;
+import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
+import org.broadinstitute.gatk.engine.io.OutputTracker;
+import org.broadinstitute.gatk.engine.iterators.NullSAMIterator;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIterator;
+import org.broadinstitute.gatk.engine.resourcemanagement.ThreadAllocation;
+import org.broadinstitute.gatk.engine.traversals.*;
+import org.broadinstitute.gatk.engine.walkers.*;
+import org.broadinstitute.gatk.utils.AutoFormattingTime;
+import org.broadinstitute.gatk.utils.MathUtils;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.progressmeter.ProgressMeter;
+import org.broadinstitute.gatk.utils.threading.ThreadEfficiencyMonitor;
+
+import javax.management.JMException;
+import javax.management.MBeanServer;
+import javax.management.ObjectName;
+import java.io.File;
+import java.lang.management.ManagementFactory;
+import java.util.*;
+
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: mhanna
+ * Date: Apr 26, 2009
+ * Time: 12:37:23 PM
+ *
+ * General base class for all scheduling algorithms
+ * Shards and schedules data in manageable chunks.
+ *
+ * Creates N TraversalEngines for each data thread for the MicroScheduler.  This is necessary
+ * because in the HMS case you have multiple threads executing a traversal engine independently, and
+ * these engines may need to create separate resources for efficiency or implementation reasons.  For example,
+ * the nanoScheduler creates threads to implement the traversal, and this creation is instance specific.
+ * So each HMS thread needs to have it's own distinct copy of the traversal engine if it wants to have
+ * N data threads x M nano threads => N * M threads total.  These are borrowed from this microscheduler
+ * and returned when done.  Also allows us to tracks all created traversal engines so this microscheduler
+ * can properly shut them all down when the scheduling is done.
+ *
+ */
+public abstract class MicroScheduler implements MicroSchedulerMBean {
+    protected static final Logger logger = Logger.getLogger(MicroScheduler.class);
+
+    /**
+     * The list of all Traversal engines we've created in this micro scheduler
+     */
+    final List<TraversalEngine> allCreatedTraversalEngines = new LinkedList<TraversalEngine>();
+
+    /**
+     * All available engines.  Engines are borrowed and returned when a subclass is actually
+     * going to execute the engine on some data.  This allows us to have N copies for
+     * N data parallel executions, but without the dangerous code of having local
+     * ThreadLocal variables.
+     */
+    final LinkedList<TraversalEngine> availableTraversalEngines = new LinkedList<TraversalEngine>();
+
+    /**
+     * Engines that have been allocated to a key already.
+     */
+    final HashMap<Object, TraversalEngine> allocatedTraversalEngines = new HashMap<Object, TraversalEngine>();
+
+    /**
+     * Counts the number of instances of the class that are currently alive.
+     */
+    private static int instanceNumber = 0;
+
+    /**
+     * The engine invoking this scheduler.
+     */
+    protected final GenomeAnalysisEngine engine;
+
+    protected final IndexedFastaSequenceFile reference;
+
+    private final SAMDataSource reads;
+    protected final Collection<ReferenceOrderedDataSource> rods;
+
+    private final MBeanServer mBeanServer;
+    private final ObjectName mBeanName;
+
+    /**
+     * Threading efficiency monitor for tracking the resource utilization of the GATK
+     *
+     * may be null
+     */
+    ThreadEfficiencyMonitor threadEfficiencyMonitor = null;
+
+    /**
+     * MicroScheduler factory function.  Create a microscheduler appropriate for reducing the
+     * selected walker.
+     *
+     * @param walker        Which walker to use.
+     * @param reads         the informations associated with the reads
+     * @param reference     the reference file
+     * @param rods          the rods to include in the traversal
+     * @param threadAllocation Number of threads to utilize.
+     *
+     * @return The best-fit microscheduler.
+     */
+    public static MicroScheduler create(GenomeAnalysisEngine engine, Walker walker, SAMDataSource reads, IndexedFastaSequenceFile reference, Collection<ReferenceOrderedDataSource> rods, ThreadAllocation threadAllocation) {
+        if ( threadAllocation.isRunningInParallelMode() ) {
+            logger.info(String.format("Running the GATK in parallel mode with %d total threads, " +
+                    "%d CPU thread(s) for each of %d data thread(s), of %d processors available on this machine",
+                    threadAllocation.getTotalNumThreads(),
+                    threadAllocation.getNumCPUThreadsPerDataThread(),
+                    threadAllocation.getNumDataThreads(),
+                    Runtime.getRuntime().availableProcessors()));
+            if ( threadAllocation.getTotalNumThreads() > Runtime.getRuntime().availableProcessors() )
+                logger.warn(String.format("Number of requested GATK threads %d is more than the number of " +
+                        "available processors on this machine %d", threadAllocation.getTotalNumThreads(),
+                        Runtime.getRuntime().availableProcessors()));
+        }
+
+        if ( threadAllocation.getNumDataThreads() > 1 ) {
+            if (walker.isReduceByInterval())
+                throw new UserException.BadArgumentValue("nt", String.format("This run of %s is set up to aggregate results by interval.  Due to a current limitation of the GATK, analyses of this type do not currently support parallel execution.  Please run your analysis without the -nt option or check if this tool has an option to disable per-interval calculations.", engine.getWalkerName(walker.getClass())));
+
+            if ( ! (walker instanceof TreeReducible) ) {
+                throw badNT("nt", engine, walker);
+            }
+        }
+
+        if ( threadAllocation.getNumCPUThreadsPerDataThread() > 1 && ! (walker instanceof NanoSchedulable) ) {
+            throw badNT("nct", engine, walker);
+        }
+
+        if ( threadAllocation.getNumDataThreads() > 1 ) {
+            return new HierarchicalMicroScheduler(engine, walker, reads, reference, rods, threadAllocation);
+        } else {
+            return new LinearMicroScheduler(engine, walker, reads, reference, rods, threadAllocation);
+        }
+    }
+
+    private static UserException badNT(final String parallelArg, final GenomeAnalysisEngine engine, final Walker walker) {
+        throw new UserException.BadArgumentValue(parallelArg,
+                String.format("The analysis %s currently does not support parallel execution with %s.  " +
+                        "Please run your analysis without the %s option.", engine.getWalkerName(walker.getClass()), parallelArg, parallelArg));
+    }
+
+    /**
+     * Create a microscheduler given the reads and reference.
+     *
+     * @param walker  the walker to execute with
+     * @param reads   The reads.
+     * @param reference The reference.
+     * @param rods    the rods to include in the traversal
+     * @param threadAllocation the allocation of threads to use in the underlying traversal
+     */
+    protected MicroScheduler(final GenomeAnalysisEngine engine,
+                             final Walker walker,
+                             final SAMDataSource reads,
+                             final IndexedFastaSequenceFile reference,
+                             final Collection<ReferenceOrderedDataSource> rods,
+                             final ThreadAllocation threadAllocation) {
+        this.engine = engine;
+        this.reads = reads;
+        this.reference = reference;
+        this.rods = rods;
+
+        final File progressLogFile = engine.getArguments() == null ? null : engine.getArguments().performanceLog;
+
+        // Creates uninitialized TraversalEngines appropriate for walker and threadAllocation,
+        // and adds it to the list of created engines for later shutdown.
+        for ( int i = 0; i < threadAllocation.getNumDataThreads(); i++ ) {
+            final TraversalEngine traversalEngine = createTraversalEngine(walker, threadAllocation);
+            allCreatedTraversalEngines.add(traversalEngine);
+            availableTraversalEngines.add(traversalEngine);
+        }
+
+        // Create the progress meter, and register it with the analysis engine
+        engine.registerProgressMeter(new ProgressMeter(progressLogFile,
+                availableTraversalEngines.peek().getTraversalUnits(),
+                engine.getRegionsOfGenomeBeingProcessed()));
+
+        // Now that we have a progress meter, go through and initialize the traversal engines
+        for ( final TraversalEngine traversalEngine : allCreatedTraversalEngines )
+            traversalEngine.initialize(engine, walker, engine.getProgressMeter());
+
+        // JMX does not allow multiple instances with the same ObjectName to be registered with the same platform MXBean.
+        // To get around this limitation and since we have no job identifier at this point, register a simple counter that
+        // will count the number of instances of this object that have been created in this JVM.
+        int thisInstance = instanceNumber++;
+        mBeanServer = ManagementFactory.getPlatformMBeanServer();
+        try {
+            mBeanName = new ObjectName("org.broadinstitute.gatk.engine.executive:type=MicroScheduler,instanceNumber="+thisInstance);
+            mBeanServer.registerMBean(this, mBeanName);
+        }
+        catch (JMException ex) {
+            throw new ReviewedGATKException("Unable to register microscheduler with JMX", ex);
+        }
+    }
+
+    /**
+     * Really make us a traversal engine of the appropriate type for walker and thread allocation
+     *
+     * @return a non-null uninitialized traversal engine
+     */
+    @Ensures("result != null")
+    private TraversalEngine createTraversalEngine(final Walker walker, final ThreadAllocation threadAllocation) {
+        if (walker instanceof ReadWalker) {
+            return new TraverseReadsNano(threadAllocation.getNumCPUThreadsPerDataThread());
+        } else if (walker instanceof LocusWalker) {
+            return new TraverseLociNano(threadAllocation.getNumCPUThreadsPerDataThread());
+        } else if (walker instanceof DuplicateWalker) {
+            return new TraverseDuplicates();
+        } else if (walker instanceof ReadPairWalker) {
+            return new TraverseReadPairs();
+        } else if (walker instanceof ActiveRegionWalker) {
+            return new TraverseActiveRegions(threadAllocation.getNumCPUThreadsPerDataThread());
+        } else {
+            throw new UnsupportedOperationException("Unable to determine traversal type, the walker is an unknown type.");
+        }
+    }
+
+
+    /**
+     * Return the ThreadEfficiencyMonitor we are using to track our resource utilization, if there is one
+     *
+     * @return the monitor, or null if none is active
+     */
+    public ThreadEfficiencyMonitor getThreadEfficiencyMonitor() {
+        return threadEfficiencyMonitor;
+    }
+
+    /**
+     * Inform this Microscheduler to use the efficiency monitor used to create threads in subclasses
+     *
+     * @param threadEfficiencyMonitor
+     */
+    public void setThreadEfficiencyMonitor(final ThreadEfficiencyMonitor threadEfficiencyMonitor) {
+        this.threadEfficiencyMonitor = threadEfficiencyMonitor;
+    }
+
+    /**
+     * Should we stop all execution work and exit gracefully?
+     *
+     * Returns true in the case where some external signal or time limit has been received, indicating
+     * that this GATK shouldn't continue executing.  This isn't a kill signal, it is really a "shutdown
+     * gracefully at the next opportunity" signal.  Concrete implementations of the MicroScheduler
+     * examine this value as often as reasonable and, if it returns true, stop what they are doing
+     * at the next available opportunity, shutdown their resources, call notify done, and return.
+     *
+     * @return true if we should abort execution, or false otherwise
+     */
+    protected boolean abortExecution() {
+        final boolean abort = engine.exceedsRuntimeLimit();
+        if ( abort ) {
+            final AutoFormattingTime aft = new AutoFormattingTime(engine.getRuntimeLimitInNanoseconds(), -1, 4);
+            logger.info("Aborting execution (cleanly) because the runtime has exceeded the requested maximum " + aft);
+        }
+        return abort;
+    }
+
+    /**
+     * Walks a walker over the given list of intervals.
+     *
+     * @param walker        Computation to perform over dataset.
+     * @param shardStrategy A strategy for sharding the data.
+     *
+     * @return the return type of the walker
+     */
+    public abstract Object execute(Walker walker, Iterable<Shard> shardStrategy);
+
+    /**
+     * Tells this MicroScheduler that the execution of one of the subclass of this object as started
+     *
+     * Must be called when the implementation of execute actually starts up
+     *
+     * Currently only starts the progress meter timer running, but other start up activities could be incorporated
+     */
+    protected void startingExecution() {
+        engine.getProgressMeter().start();
+    }
+
+    /**
+     * Retrieves the object responsible for tracking and managing output.
+     * @return An output tracker, for loading data in and extracting results.  Will not be null.
+     */
+    public abstract OutputTracker getOutputTracker();
+
+    /**
+     * Gets the an iterator over the given reads, which will iterate over the reads in the given shard.
+     * @param shard the shard to use when querying reads.
+     * @return an iterator over the reads specified in the shard.
+     */
+    protected GATKSAMIterator getReadIterator(Shard shard) {
+        return (!reads.isEmpty()) ? reads.seek(shard) : new NullSAMIterator();
+    }
+
+    /**
+     * Must be called by subclasses when execute is done
+     */
+    protected void executionIsDone() {
+        engine.getProgressMeter().notifyDone(engine.getCumulativeMetrics().getNumIterations());
+        printReadFilteringStats();
+        shutdownTraversalEngines();
+
+        // Print out the threading efficiency of this HMS, if state monitoring is enabled
+        if ( threadEfficiencyMonitor != null ) {
+            // include the master thread information
+            threadEfficiencyMonitor.threadIsDone(Thread.currentThread());
+            threadEfficiencyMonitor.printUsageInformation(logger);
+        }
+    }
+
+    /**
+     * Shutdown all of the created engines, and clear the list of created engines, dropping
+     * pointers to the traversal engines
+     */
+    public synchronized void shutdownTraversalEngines() {
+        for ( final TraversalEngine te : allCreatedTraversalEngines)
+            te.shutdown();
+
+        allCreatedTraversalEngines.clear();
+        availableTraversalEngines.clear();
+    }
+
+    /**
+     * Prints out information about number of reads observed and filtering, if any reads were used in the traversal
+     *
+     * Looks like:
+     *
+     * INFO  10:40:47,370 MicroScheduler - 22 reads were filtered out during traversal out of 101 total (21.78%)
+     * INFO  10:40:47,370 MicroScheduler -   -> 1 reads (0.99% of total) failing BadMateFilter
+     * INFO  10:40:47,370 MicroScheduler -   -> 20 reads (19.80% of total) failing DuplicateReadFilter
+     * INFO  10:40:47,370 MicroScheduler -   -> 1 reads (0.99% of total) failing FailsVendorQualityCheckFilter
+     */
+    private void printReadFilteringStats() {
+        final ReadMetrics cumulativeMetrics = engine.getCumulativeMetrics();
+        if ( cumulativeMetrics.getNumReadsSeen() > 0 ) {
+            // count up the number of skipped reads by summing over all filters
+            long nSkippedReads = 0L;
+            for ( final long countsByFilter : cumulativeMetrics.getCountsByFilter().values())
+                nSkippedReads += countsByFilter;
+
+            logger.info(String.format("%d reads were filtered out during the traversal out of approximately %d total reads (%.2f%%)",
+                    nSkippedReads,
+                    cumulativeMetrics.getNumReadsSeen(),
+                    100.0 * MathUtils.ratio(nSkippedReads, cumulativeMetrics.getNumReadsSeen())));
+
+            for ( final Map.Entry<String, Long> filterCounts : cumulativeMetrics.getCountsByFilter().entrySet() ) {
+                long count = filterCounts.getValue();
+                logger.info(String.format("  -> %d reads (%.2f%% of total) failing %s",
+                        count, 100.0 * MathUtils.ratio(count,cumulativeMetrics.getNumReadsSeen()), filterCounts.getKey()));
+            }
+        }
+    }
+
+    /**
+     * Gets the engine that created this microscheduler.
+     * @return The engine owning this microscheduler.
+     */
+    public GenomeAnalysisEngine getEngine() { return engine; }
+
+    /**
+     * Returns data source maintained by this scheduler
+     * @return
+     */
+    public SAMDataSource getSAMDataSource() { return reads; }
+
+    /**
+     * Returns the reference maintained by this scheduler.
+     * @return The reference maintained by this scheduler.
+     */
+    public IndexedFastaSequenceFile getReference() { return reference; }
+
+    protected void cleanup() {
+        try {
+            mBeanServer.unregisterMBean(mBeanName);
+        }
+        catch (JMException ex) {
+            throw new ReviewedGATKException("Unable to unregister microscheduler with JMX", ex);
+        }
+    }
+
+    /**
+     * Returns a traversal engine suitable for use, associated with key
+     *
+     * Key is an arbitrary object that is used to retrieve the same traversal
+     * engine over and over.  This can be important in the case where the
+     * traversal engine has data associated with it in some other context,
+     * and we need to ensure that the context always sees the same traversal
+     * engine.  This happens in the HierarchicalMicroScheduler, where you want
+     * the a thread executing traversals to retrieve the same engine each time,
+     * as outputs are tracked w.r.t. that engine.
+     *
+     * If no engine is associated with key yet, pops the next available engine
+     * from the available ones maintained by this
+     * microscheduler.  Note that it's a runtime error to pop a traversal engine
+     * from this scheduler if there are none available.  Callers that
+     * once pop'd an engine for use must return it with returnTraversalEngine
+     *
+     * @param key the key to associate with this engine
+     * @return a non-null TraversalEngine suitable for execution in this scheduler
+     */
+    @Ensures("result != null")
+    protected synchronized TraversalEngine borrowTraversalEngine(final Object key) {
+        if ( key == null ) throw new IllegalArgumentException("key cannot be null");
+
+        final TraversalEngine engine = allocatedTraversalEngines.get(key);
+        if ( engine == null ) {
+            if ( availableTraversalEngines.isEmpty() )
+                throw new IllegalStateException("no traversal engines were available");
+            allocatedTraversalEngines.put(key, availableTraversalEngines.pop());
+            return allocatedTraversalEngines.get(key);
+        } else {
+            return engine;
+        }
+    }
+
+    /**
+     * Return a borrowed traversal engine to this MicroScheduler, for later use
+     * in another traversal execution
+     *
+     * @param key the key used to id the engine, provided to the borrowTraversalEngine function
+     * @param traversalEngine the borrowed traversal engine.  Must have been previously borrowed.
+     */
+    protected synchronized void returnTraversalEngine(final Object key, final TraversalEngine traversalEngine) {
+        if ( traversalEngine == null )
+            throw new IllegalArgumentException("Attempting to push a null traversal engine");
+        if ( ! allCreatedTraversalEngines.contains(traversalEngine) )
+            throw new IllegalArgumentException("Attempting to push a traversal engine not created by this MicroScheduler" + engine);
+        if ( ! allocatedTraversalEngines.containsKey(key) )
+            throw new IllegalArgumentException("No traversal engine was never checked out with key " + key);
+
+        // note there's nothing to actually do here, but a function implementation
+        // might want to do something
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/MicroSchedulerMBean.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/MicroSchedulerMBean.java
new file mode 100644
index 0000000..06fcfef
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/MicroSchedulerMBean.java
@@ -0,0 +1,37 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.executive;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: mhanna
+ * Date: Jan 12, 2011
+ * Time: 9:19:27 PM
+ * To change this template use File | Settings | File Templates.
+ */
+public interface MicroSchedulerMBean {
+    // has nothing because we don't have anything we currently track
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/OutputMergeTask.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/OutputMergeTask.java
new file mode 100644
index 0000000..846657a
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/OutputMergeTask.java
@@ -0,0 +1,102 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.executive;
+
+import org.broadinstitute.gatk.engine.io.storage.Storage;
+
+import java.util.ArrayList;
+import java.util.Collection;
+
+/**
+ * User: hanna
+ * Date: Apr 30, 2009
+ * Time: 4:04:38 PM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * Hold pointers to the output and error streams, and state to indicate whether
+ * a write is complete.  Not generally thread-safe.  Calls to isComplete()/complete()
+ * can be made at any time from any thread, but complete() should be called on the
+ * thread which is doing the writing. 
+ */
+public class OutputMergeTask {
+    /**
+     * The output streams which should be written to.
+     */
+    private final Collection<MergeOperation<?>> mergeOperations = new ArrayList<MergeOperation<?>>();
+
+    /**
+     * Add a new merge operation to this merge task.
+     * @param targetStream Target for stream output.
+     * @param temporaryStorage Temporary storage.
+     * @param <StreamType> Type of the output stream.
+     */
+    public <StreamType> void addMergeOperation( StreamType targetStream, Storage<StreamType> temporaryStorage ) {
+        mergeOperations.add( new MergeOperation<StreamType>(targetStream,temporaryStorage) );
+    }
+
+    /**
+     * Merge data from output streams into target storage.
+     */
+    public synchronized void merge() {
+        for( MergeOperation mergeOperation: mergeOperations )
+            mergeOperation.temporaryStorage.mergeInto(mergeOperation.targetStream);
+    }
+
+    /**
+     * Represents a single file needed to be merged.
+     * @param <StreamType> Type of the file to be merged.
+     */
+    private class MergeOperation<StreamType> {
+        /**
+         * Destination for the temporary file's output.
+         */
+        public final StreamType targetStream;
+
+        /**
+         * Temporary storage location for the file.
+         */
+        public final Storage<StreamType> temporaryStorage;
+
+        /**
+         * Create a new merge file object with the given output stream and storage placeholder.
+         * @param targetStream Target for temporary data.
+         * @param temporaryStorage The temporary data itself.
+         */
+        public MergeOperation( StreamType targetStream, Storage<StreamType> temporaryStorage ) {
+            this.targetStream = targetStream;
+            this.temporaryStorage = temporaryStorage;
+        }
+    }
+}
+
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/ReduceTree.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/ReduceTree.java
new file mode 100644
index 0000000..b1ab50b
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/ReduceTree.java
@@ -0,0 +1,187 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.executive;
+
+import java.util.ArrayList;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Queue;
+import java.util.concurrent.Future;
+/**
+ * User: hanna
+ * Date: Apr 28, 2009
+ * Time: 11:09:29 AM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * A tree for organizing reduce results and detecting when enough dependencies
+ * are resolved for a reduce to be scheduled.  The tree can trigger a callback
+ * whenever it believes a reduce operation is pending.
+ *
+ * Not thread-safe.  All calls should be made sequentially from the same thread.
+ */
+public class ReduceTree {
+    /**
+     * Data structure for the tree.  Each entry in the outer list represents a level
+     * of the tree, and each entry in the inner queues represent nodes in that level.
+     *
+     * Whenever a reduce can happen, the entries to be reduced are pulled out of
+     * their slots in level n of the tree and the composite entry is added to level n+1.
+     */
+    private List<Queue<Future>> treeNodes = new ArrayList<Queue<Future>>();
+
+    /**
+     * The entire contents have been added to the tree.  Completely schedule the reductions.
+     */
+    private boolean treeComplete = false;
+
+    /**
+     * Called to indicate that all data required to perform a given reduce has been scheduled.
+     */
+    private TreeReduceNotifier treeReduceNotifier = null;
+
+    /**
+     * Creates a ReduceTree.
+     * @param notifier A callback indicating that all data required to perform a given reduce has been scheduled.
+     */
+    public ReduceTree( TreeReduceNotifier notifier ) {
+        this.treeReduceNotifier = notifier;
+    }
+
+    /**
+     * A callback indicating that all computations have been scheduled to complete the given reduce.
+     */
+    public interface TreeReduceNotifier {
+        /**
+         * Indicates that a reduce is ready to happen.
+         * @param lhs Left-hand side of the tree reduce.
+         * @param rhs Right-hand side of the tree reduce.
+         * @return The future result of the computation reduce(lhs,rhs)
+         */
+        Future notifyReduce( Future lhs, Future rhs );
+    }
+
+    /**
+     * Add an entry to the list of data to be reduced.  The results of entry.get() will
+     * be scheduled for reduction with neighboring elements.
+     * @param entry Entry to be paired with other elements.
+     */
+    public void addEntry( Future entry ) {
+        addNodeAtLevel( entry, 0 );
+    }
+
+    /**
+     * Signal to the ReduceTree that all possible data has been added and it should reduce
+     * as much as is possible.
+     */
+    public void complete() {
+        treeComplete = true;
+        reduce();
+    }
+
+    /**
+     * Gets the placeholder for the final result of the tree reduce.
+     * @return Future whose get() method will return the final result.  Null if nothing has been added.
+     */
+    public Future getResult() {
+        if( !treeComplete )
+            throw new IllegalStateException( "Cannot get the final result for an incomplete tree.");
+
+        // If nothing has been added to the tree, return null.
+        if( treeNodes.size() == 0 )
+            return null;
+
+        // Assert that there aren't any pending computations that were forgotten along the way.
+        for( int i = 0; i < treeNodes.size() - 2; i++ ) {
+            if( treeNodes.get(i).size() > 0 )
+                throw new IllegalStateException( "Some inner reduces were missed along the way.");
+        }
+
+        Queue<Future> lastLevel = treeNodes.get(treeNodes.size() - 1);
+
+        // Assert that there's only one reduce left at the last level.
+        if( lastLevel.size() != 1 )
+            throw new IllegalStateException( "Invalid number of entries at the tip of the tree: " + lastLevel.size() );
+
+        // Get the placeholder for the last result.
+        return lastLevel.element();
+    }
+
+    /**
+     * Recursively collapse the tree whereever possible.
+     */
+    protected void reduce() {
+        reduce( 0 );
+    }
+
+    /**
+     * Recursively collapse the tree, starting at the specified level.
+     * @param level Level at which to start reducing.
+     */
+    private void reduce( int level ) {
+        // base case for recursion.
+        if( treeNodes.size() <= level )
+            return;
+
+        Queue<Future> treeLevel = treeNodes.get(level);
+
+        while( treeLevel.size() >= 2 ) {
+            addNodeAtLevel( treeReduceNotifier.notifyReduce( treeLevel.remove(), treeLevel.remove() ), level + 1 );
+        }
+
+        if( treeLevel.size() == 1 && treeComplete && !isDeepestLevel(level) ) {
+            Future element = treeLevel.remove();
+            addNodeAtLevel( element, level + 1 );
+        }
+
+        reduce( level + 1 );
+    }
+
+    private boolean isDeepestLevel( int level ) {
+        return level == (treeNodes.size() - 1);
+    }
+
+    /**
+     * Add the given node to the tree at the corresponding level.  Create the level
+     * if it doesn't exist.
+     * @param node Node to add.  Must not be null.
+     * @param level Level number at which to add.  0-based index into treeNodes list.
+     */
+    protected void addNodeAtLevel( Future node, int level ) {
+        while( treeNodes.size() <= level )
+            treeNodes.add( new LinkedList<Future>() );
+        treeNodes.get(level).add(node);
+        reduce(level);
+    }
+
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/ShardTraverser.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/ShardTraverser.java
new file mode 100644
index 0000000..5753b0f
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/ShardTraverser.java
@@ -0,0 +1,163 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.executive;
+
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.engine.datasources.providers.LocusShardDataProvider;
+import org.broadinstitute.gatk.engine.datasources.providers.ShardDataProvider;
+import org.broadinstitute.gatk.engine.datasources.reads.Shard;
+import org.broadinstitute.gatk.engine.io.ThreadGroupOutputTracker;
+import org.broadinstitute.gatk.engine.traversals.TraversalEngine;
+import org.broadinstitute.gatk.engine.walkers.Walker;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.util.concurrent.Callable;
+/**
+ * User: hanna
+ * Date: Apr 29, 2009
+ * Time: 4:40:38 PM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+/**
+ * Carries the walker over a given shard, in a callable interface.
+ */
+public class ShardTraverser implements Callable {
+    final private HierarchicalMicroScheduler microScheduler;
+    final private Walker walker;
+    final private Shard shard;
+    final private ThreadGroupOutputTracker outputTracker;
+    private OutputMergeTask outputMergeTask;
+
+    /** our log, which we want to capture anything from this class */
+    final protected static Logger logger = Logger.getLogger(ShardTraverser.class);
+
+    /**
+     * Is this traversal complete?
+     */
+    private boolean complete = false;
+
+    public ShardTraverser( HierarchicalMicroScheduler microScheduler,
+                           Walker walker,
+                           Shard shard,
+                           ThreadGroupOutputTracker outputTracker) {
+        this.microScheduler = microScheduler;
+        this.walker = walker;
+        this.shard = shard;
+        this.outputTracker = outputTracker;
+    }
+
+    public Object call() {
+        final Object traversalEngineKey = Thread.currentThread();
+        final TraversalEngine traversalEngine = microScheduler.borrowTraversalEngine(traversalEngineKey);
+
+        try {
+            final long startTime = System.currentTimeMillis();
+
+            // this is CRITICAL -- initializes output maps in this master thread,
+            // so that any subthreads created by the traversal itself can access this map
+            outputTracker.initializeStorage();
+
+            Object accumulator = walker.reduceInit();
+            final WindowMaker windowMaker = new WindowMaker(shard,microScheduler.getEngine().getGenomeLocParser(),
+                    microScheduler.getReadIterator(shard),
+                    shard.getGenomeLocs(),
+                    microScheduler.engine.getSampleDB().getSampleNames()); // todo: microScheduler.engine is protected - is it okay to user it here?
+
+            for(WindowMaker.WindowMakerIterator iterator: windowMaker) {
+                final ShardDataProvider dataProvider = new LocusShardDataProvider(shard,iterator.getSourceInfo(),microScheduler.getEngine().getGenomeLocParser(),iterator.getLocus(),iterator,microScheduler.reference,microScheduler.rods);
+                accumulator = traversalEngine.traverse(walker, dataProvider, accumulator);
+                dataProvider.close();
+            }
+
+            windowMaker.close();
+            outputMergeTask = outputTracker.closeStorage();
+
+            final long endTime = System.currentTimeMillis();
+
+            microScheduler.reportShardTraverseTime(endTime-startTime);
+
+            return accumulator;
+        } catch(Throwable t) {
+            // Notify that an exception has occurred and rethrow it.
+            throw microScheduler.notifyOfTraversalError(t);
+        } finally {
+            synchronized(this) {
+                complete = true;
+                microScheduler.returnTraversalEngine(traversalEngineKey, traversalEngine);
+                notifyAll();
+            }
+        }
+    }
+
+    /**
+     * Return a human readable string describing the intervals this traverser is operating on
+     * @return
+     */
+    public String getIntervalsString() {
+        return Utils.join(",", shard.getGenomeLocs());
+    }
+
+    /**
+     * Has this traversal completed?
+     * @return True if completed, false otherwise.
+     */
+    public boolean isComplete() {
+        synchronized(this) {
+            return complete;
+        }
+    }
+
+   /**
+     * Waits for any the given OutputMerger to be ready for merging.
+     */
+    public void waitForComplete() {
+        try {
+            synchronized(this) {
+                if( isComplete() )
+                    return;
+                wait();
+            }
+        }
+        catch( InterruptedException ex ) {
+            throw new ReviewedGATKException("Interrupted while waiting for more output to be finalized.",ex);
+        }
+    }
+
+    /**
+     * Gets the output merge task associated with the given shard.
+     * @return OutputMergeTask if one exists; null if nothing needs to be merged.
+     */
+    public OutputMergeTask getOutputMergeTask() {
+        return outputMergeTask;
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/TreeReducer.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/TreeReducer.java
new file mode 100644
index 0000000..67a5fab
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/TreeReducer.java
@@ -0,0 +1,127 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.executive;
+
+import org.broadinstitute.gatk.engine.walkers.TreeReducible;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Future;
+/**
+ * User: hanna
+ * Date: Apr 29, 2009
+ * Time: 4:47:35 PM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * Represents a future reduce...a reduce that will be ready at some point in the future.
+ * Provides services for indicating when all data is prepared for the reduce a callable
+ * interface to force the reduce.
+ */
+public class TreeReducer implements Callable {
+    final private HierarchicalMicroScheduler microScheduler;
+    private TreeReducible walker;
+    private Future lhs;
+    private Future rhs;
+
+    /**
+     * Create a full tree reduce.  Combine this two results using an unspecified walker at some point in the future.
+     * @param microScheduler The parent hierarchical microscheduler for this reducer.
+     * @param lhs Left-hand side of the reduce.
+     * @param rhs Right-hand side of the reduce.
+     */
+    public TreeReducer( HierarchicalMicroScheduler microScheduler, Future lhs, Future rhs ) {
+        this.microScheduler = microScheduler;
+        this.lhs = lhs;
+        this.rhs = rhs;
+    }
+
+    /**
+     * Provide a walker for the future reduce.
+     * @param walker walker to use when performing the reduce.
+     */
+    public void setWalker( TreeReducible walker ) {
+        this.walker = walker;
+    }
+
+    /**
+     * Is the data ready for reduce?  True if lhs and rhs have already been resolved.
+     * @return True if data is ready and waiting, false otherwise.
+     */
+    public boolean isReadyForReduce() {
+        if( lhs == null )
+            throw new IllegalStateException(String.format("Insufficient data on which to reduce; lhs = %s, rhs = %s", lhs, rhs) );
+
+        return lhs.isDone() && (rhs == null || rhs.isDone());
+    }
+
+    /**
+     * Returns the value of the reduce.  If not isReadyForReduce(), this call will until all entries become ready.
+     * @return Result of the reduce.
+     */
+    public Object call() {
+        Object result;
+
+        final long startTime = System.currentTimeMillis();
+
+        try {
+            if( lhs == null )
+                result = null;
+                // todo -- what the hell is this above line?  Shouldn't it be the two below?
+//            if( lhs == null )
+//                throw new IllegalStateException(String.format("Insufficient data on which to reduce; lhs = %s, rhs = %s", lhs, rhs) );
+            else
+                result = walker.treeReduce( lhs.get(), rhs.get() );
+        }
+        catch( InterruptedException ex ) {
+            microScheduler.notifyOfTraversalError(ex);
+            throw new ReviewedGATKException("Hierarchical reduce interrupted", ex);
+        }
+        catch( ExecutionException ex ) {
+            microScheduler.notifyOfTraversalError(ex);
+            throw new ReviewedGATKException("Hierarchical reduce failed", ex);
+        }
+
+        final long endTime = System.currentTimeMillis();
+
+        // Constituent bits of this tree reduces are no longer required.  Throw them away.
+        this.lhs = null;
+        this.rhs = null;
+
+        microScheduler.reportTreeReduceTime( endTime - startTime );
+
+        return result;
+    }
+}
+
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/WindowMaker.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/WindowMaker.java
new file mode 100644
index 0000000..e2f870d
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/WindowMaker.java
@@ -0,0 +1,218 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.executive;
+
+import htsjdk.samtools.util.PeekableIterator;
+import org.broadinstitute.gatk.engine.ReadProperties;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.engine.datasources.reads.Shard;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecordIterator;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIterator;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.locusiterator.LocusIterator;
+import org.broadinstitute.gatk.utils.locusiterator.LocusIteratorByState;
+
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+import java.util.NoSuchElementException;
+
+/**
+ * Transforms an iterator of reads which overlap the given interval list into an iterator of covered single-base loci
+ * completely contained within the interval list.  To do this, it creates a LocusIteratorByState which will emit a single-bp
+ * locus for every base covered by the read iterator, then uses the WindowMakerIterator.advance() to filter down that stream of
+ * loci to only those covered by the given interval list.
+ *
+ * Example:
+ * Incoming stream of reads: A:chr20:1-5, B:chr20:2-6, C:chr20:2-7, D:chr20:3-8, E:chr20:5-10
+ * Incoming intervals: chr20:3-7
+ *
+ * Locus iterator by state will produce the following stream of data:
+ *  chr1:1 {A}, chr1:2 {A,B,C}, chr1:3 {A,B,C,D}, chr1:4 {A,B,C,D}, chr1:5 {A,B,C,D,E},
+ *  chr1:6 {B,C,D,E}, chr1:7 {C,D,E}, chr1:8 {D,E}, chr1:9 {E}, chr1:10 {E}
+ *
+ * WindowMakerIterator will then filter the incoming stream, emitting the following stream:
+ *  chr1:3 {A,B,C,D}, chr1:4 {A,B,C,D}, chr1:5 {A,B,C,D,E}, chr1:6 {B,C,D,E}, chr1:7 {C,D,E}
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class WindowMaker implements Iterable<WindowMaker.WindowMakerIterator>, Iterator<WindowMaker.WindowMakerIterator> {
+    /**
+     * Source information for iteration.
+     */
+    private final ReadProperties sourceInfo;
+
+    /**
+     * Hold the read iterator so that it can be closed later.
+     */
+    private final GATKSAMRecordIterator readIterator;
+
+    /**
+     * The data source for reads.  Will probably come directly from the BAM file.
+     */
+    private final PeekableIterator<AlignmentContext> sourceIterator;
+
+    /**
+     * Stores the sequence of intervals that the windowmaker should be tracking.
+     */
+    private final PeekableIterator<GenomeLoc> intervalIterator;
+
+    /**
+     * In the case of monolithic sharding, this case returns whether the only shard has been generated.
+     */
+    private boolean shardGenerated = false;
+
+    /**
+     * The alignment context to return from this shard's iterator.  Lazy implementation: the iterator will not find the
+     * currentAlignmentContext until absolutely required to do so.   If currentAlignmentContext is null and advance()
+     * doesn't populate it, no more elements are available.  If currentAlignmentContext is non-null, currentAlignmentContext
+     * should be returned by next().
+     */
+    private AlignmentContext currentAlignmentContext;
+
+    /**
+     * Create a new window maker with the given iterator as a data source, covering
+     * the given intervals.
+     * @param iterator The data source for this window.
+     * @param intervals The set of intervals over which to traverse.
+     * @param sampleNames The complete set of sample names in the reads in shard
+     */
+
+    private final LocusIteratorByState libs;
+
+    public WindowMaker(Shard shard, GenomeLocParser genomeLocParser, GATKSAMIterator iterator, List<GenomeLoc> intervals, Collection<String> sampleNames) {
+        this.sourceInfo = shard.getReadProperties();
+        this.readIterator = new GATKSAMRecordIterator(iterator);
+
+        this.libs = new LocusIteratorByState(readIterator,
+                sourceInfo.getDownsamplingMethod(), sourceInfo.includeReadsWithDeletionAtLoci(),
+                sourceInfo.keepUniqueReadListInLIBS(), genomeLocParser,sampleNames);
+        this.sourceIterator = new PeekableIterator<AlignmentContext>(libs);
+
+        this.intervalIterator = intervals.size()>0 ? new PeekableIterator<GenomeLoc>(intervals.iterator()) : null;
+    }
+
+    public WindowMaker(Shard shard, GenomeLocParser genomeLocParser, GATKSAMIterator iterator, List<GenomeLoc> intervals ) {
+        this(shard, genomeLocParser, iterator, intervals, LocusIteratorByState.sampleListForSAMWithoutReadGroups());
+    }
+
+    public Iterator<WindowMakerIterator> iterator() {
+        return this;
+    }
+
+    public boolean hasNext() {
+        return (intervalIterator != null && intervalIterator.hasNext()) || !shardGenerated;
+    }
+
+    public WindowMakerIterator next() {
+        shardGenerated = true;
+        return new WindowMakerIterator(intervalIterator != null ? intervalIterator.next() : null);
+    }
+
+    public void remove() {
+        throw new UnsupportedOperationException("Cannot remove from a window maker.");
+    }
+
+    public void close() {
+        this.readIterator.close();
+    }
+
+    public class WindowMakerIterator extends LocusIterator {
+        /**
+         * The locus for which this iterator is currently returning reads.
+         */
+        private final GenomeLoc locus;
+
+        public WindowMakerIterator(GenomeLoc locus) {
+            this.locus = locus;
+            advance();
+        }
+
+        public ReadProperties getSourceInfo() {
+            return sourceInfo;
+        }
+
+        public GenomeLoc getLocus() {
+            return locus;
+        }
+
+        public WindowMakerIterator iterator() {
+            return this;
+        }
+
+        public boolean hasNext() {
+            advance();
+            return currentAlignmentContext != null;
+        }
+
+        public AlignmentContext next() {
+            if(!hasNext()) throw new NoSuchElementException("WindowMakerIterator is out of elements for this interval.");
+
+            // Consume this alignment context.
+            AlignmentContext toReturn = currentAlignmentContext;
+            currentAlignmentContext = null;
+
+            // Return the current element.
+            return toReturn;
+        }
+
+        private void advance() {
+            // Need to find the next element that is not past shard boundaries.  If we travel past the edge of
+            // shard boundaries, stop and let the next interval pick it up.
+            while(currentAlignmentContext == null && sourceIterator.hasNext()) {
+                // Advance the iterator and try again.
+                AlignmentContext candidateAlignmentContext = sourceIterator.peek();
+
+                if(locus == null) {
+                    // No filter present.  Return everything that LocusIteratorByState provides us.
+                    currentAlignmentContext = sourceIterator.next();
+                }
+                else if(locus.isPast(candidateAlignmentContext.getLocation()))
+                    // Found a locus before the current window; claim this alignment context and throw it away.
+                    sourceIterator.next();
+                else if(locus.containsP(candidateAlignmentContext.getLocation())) {
+                    // Found a locus within the current window; claim this alignment context and call it the next entry.
+                    currentAlignmentContext = sourceIterator.next();
+                }
+                else if(locus.isBefore(candidateAlignmentContext.getLocation())) {
+                    // Whoops.  Skipped passed the end of the region.  Iteration for this window is complete.  Do
+                    // not claim this alignment context in case it is part of the next shard.
+                    break;
+                }
+                else
+                    throw new ReviewedGATKException("BUG: filtering locus does not contain, is not before, and is not past the given alignment context");
+            }
+        }
+
+        @Override
+        public LocusIteratorByState getLIBS() {
+            return libs;
+        }
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/package-info.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/package-info.java
new file mode 100644
index 0000000..400f028
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/executive/package-info.java
@@ -0,0 +1,26 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.executive;
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/BAQReadTransformer.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/BAQReadTransformer.java
new file mode 100644
index 0000000..c3ff6ae
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/BAQReadTransformer.java
@@ -0,0 +1,75 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.engine.WalkerManager;
+import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
+import org.broadinstitute.gatk.engine.walkers.BAQMode;
+import org.broadinstitute.gatk.engine.walkers.Walker;
+import org.broadinstitute.gatk.utils.baq.BAQ;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+
+/**
+ * Applies Heng's BAQ calculation to a stream of incoming reads
+ */
+public class BAQReadTransformer extends ReadTransformer {
+    private BAQ baqHMM;
+    private IndexedFastaSequenceFile refReader;
+    private BAQ.CalculationMode cmode;
+    private BAQ.QualityMode qmode;
+
+    @Override
+    public ApplicationTime initializeSub(final GenomeAnalysisEngine engine, final Walker walker) {
+        final BAQMode mode = WalkerManager.getWalkerAnnotation(walker, BAQMode.class);
+        this.refReader = engine.getReferenceDataSource().getReference();
+        this.cmode = engine.getArguments().BAQMode;
+        this.qmode = mode.QualityMode();
+        baqHMM = new BAQ(engine.getArguments().BAQGOP);
+
+        if ( qmode == BAQ.QualityMode.DONT_MODIFY )
+            throw new ReviewedGATKException("BUG: shouldn't create BAQ transformer with quality mode DONT_MODIFY");
+
+        if ( mode.ApplicationTime() == ReadTransformer.ApplicationTime.FORBIDDEN && enabled() )
+            throw new UserException.BadArgumentValue("baq", "Walker cannot accept BAQ'd base qualities, and yet BAQ mode " + cmode + " was requested.");
+
+        return mode.ApplicationTime();
+    }
+
+    @Override
+    public boolean enabled() {
+        return cmode != BAQ.CalculationMode.OFF;
+    }
+
+    @Override
+    public GATKSAMRecord apply(final GATKSAMRecord read) {
+        baqHMM.baqRead(read, refReader, cmode, qmode);
+        return read;
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/BadCigarFilter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/BadCigarFilter.java
new file mode 100644
index 0000000..d4f6cb8
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/BadCigarFilter.java
@@ -0,0 +1,141 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.Cigar;
+import htsjdk.samtools.CigarElement;
+import htsjdk.samtools.CigarOperator;
+import htsjdk.samtools.SAMRecord;
+
+import java.util.Iterator;
+
+/**
+ * Filter out reads with wonky CIGAR strings
+ *
+ * <p>This read filter will filter out the following cases:</p>
+ * <ul>
+ *     <li>different length and cigar length</li>
+ *     <li>Hard/Soft clips in the middle of the cigar</li>
+ *     <li>starting with deletions (with or without preceding clips)</li>
+ *     <li>ending in deletions (with or without follow-up clips)</li>
+ *     <li>fully hard or soft clipped</li>
+ *     <li>consecutive indels in the cigar (II, DD, ID or DI)</li>
+ * </ul>
+ *
+ * <h3>Usage example</h3>
+ *
+ * <h4>Enable the bad cigar filter</h4>
+ * <pre>
+ *     java -jar GenomeAnalysisTk.jar \
+ *         -T ToolName \
+ *         -R reference.fasta \
+ *         -I input.bam \
+ *         -o output.file \
+ *         -rf BadCigar
+ * </pre>
+ *
+ * @author ebanks
+ * @version 0.1
+ */
+
+public class BadCigarFilter extends ReadFilter {
+
+    public boolean filterOut(final SAMRecord rec) {
+        final Cigar c = rec.getCigar();
+
+        // if there is no Cigar then it can't be bad
+        if( c.isEmpty() ) {
+            return false;
+        }
+
+        // Read and it's CIGAR not the same length
+        if ( rec.getReadLength() != c.getReadLength() ) {
+            return true;
+        }
+
+        Iterator<CigarElement> elementIterator = c.getCigarElements().iterator();
+
+        CigarOperator firstOp = CigarOperator.H;
+        while (elementIterator.hasNext() && (firstOp == CigarOperator.H || firstOp == CigarOperator.S)) {
+            CigarOperator op = elementIterator.next().getOperator();
+
+            // No reads with Hard/Soft clips in the middle of the cigar
+            if (firstOp != CigarOperator.H && op == CigarOperator.H) {
+                    return true;
+            }
+            firstOp = op;
+        }
+
+        // No reads starting with deletions (with or without preceding clips)
+        if (firstOp == CigarOperator.D) {
+            return true;
+        }
+
+        boolean hasMeaningfulElements = (firstOp != CigarOperator.H && firstOp != CigarOperator.S);
+        boolean previousElementWasIndel = firstOp == CigarOperator.I;
+        CigarOperator lastOp = firstOp;
+        CigarOperator previousOp = firstOp;
+
+        while (elementIterator.hasNext()) {
+            CigarOperator op = elementIterator.next().getOperator();
+
+            if (op != CigarOperator.S && op != CigarOperator.H) {
+
+                // No reads with Hard/Soft clips in the middle of the cigar
+                if (previousOp == CigarOperator.S || previousOp == CigarOperator.H)
+                    return true;
+
+                lastOp = op;
+
+                if (!hasMeaningfulElements && op.consumesReadBases()) {
+                    hasMeaningfulElements = true;
+                }
+
+                if (op == CigarOperator.I || op == CigarOperator.D) {
+
+                    // No reads that have consecutive indels in the cigar (II, DD, ID or DI)
+                    if (previousElementWasIndel) {
+                        return true;
+                    }
+                    previousElementWasIndel = true;
+                }
+                else {
+                    previousElementWasIndel = false;
+                }
+            }
+            // No reads with Hard/Soft clips in the middle of the cigar
+            else if (op == CigarOperator.S && previousOp == CigarOperator.H) {
+                return true;
+            }
+
+            previousOp = op;
+        }
+
+        // No reads ending in deletions (with or without follow-up clips)
+        // No reads that are fully hard or soft clipped
+        return lastOp == CigarOperator.D || !hasMeaningfulElements;
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/BadMateFilter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/BadMateFilter.java
new file mode 100644
index 0000000..317322d
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/BadMateFilter.java
@@ -0,0 +1,74 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.SAMRecord;
+
+/**
+ * Filter out reads whose mate maps to a different contig
+ *
+ * <p>This filter is intended to ensure that only reads that are likely to be mapped in the right place, and therefore
+ * to be informative, will be used in analysis. If mates in a pair are mapping to different contigs, it is likely that
+ * at least one of them is in the wrong place. One exception is you are using a draft genome assembly in which the
+ * chromosomes are fragmented into many contigs; then you may legitimately have reads that are correctly mapped but are
+ * on different contigs than their mate. This read filter can be disabled from the command line using the -drf argument.
+ * </p>
+ *
+ * <h4>Enable the bad mate filter</h4>
+ * <pre>
+ *     java -jar GenomeAnalysisTk.jar \
+ *         -T ToolName \
+ *         -R reference.fasta \
+ *         -I input.bam \
+ *         -o output.file \
+ *         -rf BadMate
+ * </pre>
+ *
+ * <h4>Disable the bad mate filter</h4>
+ * <pre>
+ *     java -jar GenomeAnalysisTk.jar \
+ *         -T ToolName \
+ *         -R reference.fasta \
+ *         -I input.bam \
+ *         -o output.file \
+ *         <b>-drf</b> BadMate
+ * </pre>
+ *
+ * @author ebanks
+ * @version 0.1
+ */
+
+public class BadMateFilter extends DisableableReadFilter {
+
+    public boolean filterOut(final SAMRecord rec) {
+        return hasBadMate(rec);
+    }
+
+    public static boolean hasBadMate(final SAMRecord rec) {
+        return (rec.getReadPairedFlag() && !rec.getMateUnmappedFlag() && !rec.getReferenceIndex().equals(rec.getMateReferenceIndex()));
+    }
+
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/CountingFilteringIterator.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/CountingFilteringIterator.java
new file mode 100644
index 0000000..de49247
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/CountingFilteringIterator.java
@@ -0,0 +1,150 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.filter.SamRecordFilter;
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.util.CloseableIterator;
+import htsjdk.samtools.util.CloserUtil;
+import org.broadinstitute.gatk.engine.ReadMetrics;
+
+import java.util.*;
+
+/**
+ * Filtering Iterator which takes a filter and an iterator and iterates
+ * through only those records which are not rejected by the filter.
+ * @author Mark DePristo
+ */
+public class CountingFilteringIterator implements CloseableIterator<SAMRecord> {
+    private final ReadMetrics globalRuntimeMetrics;
+    private final ReadMetrics privateRuntimeMetrics;
+    private final Iterator<SAMRecord> iterator;
+    private final List<CountingReadFilter> filters = new ArrayList<>();
+    private SAMRecord next = null;
+
+    // wrapper around ReadFilters to count the number of filtered reads
+    private final class CountingReadFilter extends ReadFilter {
+        protected final ReadFilter readFilter;
+        protected long counter = 0L;
+
+        public CountingReadFilter(final ReadFilter readFilter) {
+            this.readFilter = readFilter;
+        }
+
+        @Override
+        public boolean filterOut(final SAMRecord record) {
+            final boolean result = readFilter.filterOut(record);
+            if ( result )
+                counter++;
+            return result;
+        }
+    }
+
+    /**
+     * Constructor
+     *
+     * @param metrics   metrics to accumulate on the nature of filtered reads.
+     * @param iterator  the backing iterator
+     * @param filters    the filter (which may be a FilterAggregator)
+     */
+    public CountingFilteringIterator(ReadMetrics metrics, Iterator<SAMRecord> iterator, Collection<ReadFilter> filters) {
+        this.globalRuntimeMetrics = metrics;
+        privateRuntimeMetrics = new ReadMetrics();
+        this.iterator = iterator;
+        for ( final ReadFilter filter : filters )
+            this.filters.add(new CountingReadFilter(filter));
+        next = getNextRecord();
+    }
+
+    /**
+     * Returns true if the iteration has more elements.
+     *
+     * @return  true if the iteration has more elements.  Otherwise returns false.
+     */
+    public boolean hasNext() {
+        return next != null;
+    }
+
+    /**
+     * Returns the next element in the iteration.
+     *
+     * @return  the next element in the iteration
+     * @throws java.util.NoSuchElementException
+     */
+    public SAMRecord next() {
+        if (next == null) {
+            throw new NoSuchElementException("Iterator has no more elements.");
+        }
+        final SAMRecord result = next;
+        next = getNextRecord();
+        return result;
+    }
+
+    /**
+     * Required method for Iterator API.
+     *
+     * @throws UnsupportedOperationException
+     */
+    public void remove() {
+        throw new UnsupportedOperationException("Remove() not supported by CountingFilteringIterator");
+    }
+
+    public void close() {
+        CloserUtil.close(iterator);
+
+        for ( final CountingReadFilter filter : filters )
+            privateRuntimeMetrics.setFilterCount(filter.readFilter.getClass().getSimpleName(), filter.counter);
+        // update the global metrics with all the data we collected here
+        globalRuntimeMetrics.incrementMetrics(privateRuntimeMetrics);
+    }
+
+    /**
+     * Gets the next record from the underlying iterator that passes the filter
+     *
+     * @return SAMRecord    the next filter-passing record
+     */
+    private SAMRecord getNextRecord() {
+        while (iterator.hasNext()) {
+            SAMRecord record = iterator.next();
+
+            // update only the private copy of the metrics so that we don't need to worry about race conditions
+            // that can arise when trying to update the global copy; it was agreed that this is the cleanest solution.
+            privateRuntimeMetrics.incrementNumReadsSeen();
+
+            boolean filtered = false;
+            for(SamRecordFilter filter: filters) {
+                if(filter.filterOut(record)) {
+                    filtered = true;
+                    break;
+                }
+            }
+
+            if(!filtered) return record;
+        }
+
+        return null;
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/DisableableReadFilter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/DisableableReadFilter.java
new file mode 100644
index 0000000..2898ca1
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/DisableableReadFilter.java
@@ -0,0 +1,35 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
+import org.broadinstitute.gatk.utils.help.HelpConstants;
+
+ at DocumentedGATKFeature(
+        groupName = HelpConstants.DOCS_CAT_RF,
+        summary = "A ReadFilter which can be disabled by using the --disable_read_filter parameter" )
+public abstract class DisableableReadFilter extends ReadFilter {
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/DuplicateReadFilter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/DuplicateReadFilter.java
new file mode 100644
index 0000000..bb1d7e8
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/DuplicateReadFilter.java
@@ -0,0 +1,91 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.SAMRecord;
+
+/*
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following
+ * conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+/**
+ * Filter out duplicate reads
+ *
+ * <p>This filter recognizes the SAM flag set by MarkDuplicates. It can be disabled from the command line if needed
+ * using the -drf argument.</p>
+ *
+ * <h3>Usage examples</h3>
+ *
+ * <h4>Enable the duplicate read filter</h4>
+ * <pre>
+ *     java -jar GenomeAnalysisTk.jar \
+ *         -T ToolName \
+ *         -R reference.fasta \
+ *         -I input.bam \
+ *         -o output.file \
+ *         -rf DuplicateRead
+ * </pre>
+ *
+ * <h4>Disable the duplicate read filter</h4>
+ * <pre>
+ *     java -jar GenomeAnalysisTk.jar \
+ *         -T ToolName \
+ *         -R reference.fasta \
+ *         -I input.bam \
+ *         -o output.file \
+ *         <b>-drf</b> DuplicateRead
+ * </pre>
+ *
+ * @author rpoplin
+ * @since Dec 9, 2009
+ */
+
+public class DuplicateReadFilter extends DisableableReadFilter {
+    public boolean filterOut( final SAMRecord read ) {
+        return read.getDuplicateReadFlag();
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/FailsVendorQualityCheckFilter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/FailsVendorQualityCheckFilter.java
new file mode 100644
index 0000000..1834be5
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/FailsVendorQualityCheckFilter.java
@@ -0,0 +1,43 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.SAMRecord;
+
+/**
+ * Filter out reads that fail the vendor quality check
+ *
+ * <p>This filter recognizes the SAM flag corresponding to the vendor quality check.</p>
+ *
+ * @author rpoplin
+ * @since Jul 19, 2010
+ */
+
+public class FailsVendorQualityCheckFilter extends ReadFilter {
+    public boolean filterOut( final SAMRecord read ) {
+        return read.getReadFailsVendorQualityCheckFlag();
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/FilterManager.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/FilterManager.java
new file mode 100644
index 0000000..b0dcae5
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/FilterManager.java
@@ -0,0 +1,106 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import org.broadinstitute.gatk.utils.classloader.PluginManager;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.help.GATKDocUtils;
+import org.broadinstitute.gatk.utils.help.HelpConstants;
+
+import java.util.Collection;
+import java.util.List;
+
+/**
+ * Manage filters and filter options.  Any requests for basic filtering classes
+ * should ultimately be made through this class.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class FilterManager extends PluginManager<ReadFilter> {
+    public FilterManager() {
+        super(ReadFilter.class,"filter","Filter");
+    }
+
+    /**
+     * Instantiate a filter of the given type.  Along the way, scream bloody murder if
+     * the filter is not available.
+     * @param filterType The type of the filter
+     * @return The filter
+     */
+    public ReadFilter createFilterByType(Class<? extends ReadFilter> filterType) {
+        return this.createByName(getName(filterType));
+    }
+
+    public Collection<Class<? extends ReadFilter>> getValues() {
+        return this.getPlugins();
+    }
+
+    /**
+     * Rather than use the default error message, print out a list of read filters as well.
+     * @param pluginCategory - string, the category of the plugin (e.g. read filter)
+     * @param pluginName - string, what we were trying to match (but failed to)
+     * @return - A wall of text with the default message, followed by a listing of available read filters
+     */
+    @Override
+    protected String formatErrorMessage(String pluginCategory, String pluginName) {
+        List<Class<? extends ReadFilter>> availableFilters = this.getPluginsImplementing(ReadFilter.class);
+
+
+        return String.format("Read filter %s not found. Available read filters:%n%n%s%n%n%s",pluginName,
+                userFriendlyListofReadFilters(availableFilters),
+                "Please consult the GATK Documentation (" + HelpConstants.GATK_DOCS_URL + ") for more information.");
+    }
+
+    /**
+     * Rather than use the default exception, return a MalformedReadFilterException.
+     * @param errorMessage error message from formatErrorMessage()
+     * @return - A MalformedReadFilterException with errorMessage
+     */
+    @Override
+    protected UserException createMalformedArgumentException(final String errorMessage) {
+        return new UserException.MalformedReadFilterException(errorMessage);
+    }
+
+    private String userFriendlyListofReadFilters(List<Class<? extends ReadFilter>> filters) {
+        final String headName = "FilterName", headDoc = "Documentation";
+        int longestNameLength = -1;
+        for ( Class < ? extends ReadFilter> filter : filters ) {
+            longestNameLength = Math.max(longestNameLength,this.getName(filter).length());
+        }
+        String format = "   %"+longestNameLength+"s        %s%n";
+
+        StringBuilder listBuilder = new StringBuilder();
+        listBuilder.append(String.format(format,headName,headDoc));
+        for ( Class<? extends ReadFilter> filter : filters ) {
+            String helpLink = GATKDocUtils.helpLinksToGATKDocs(filter);
+            String filterName = this.getName(filter);
+            listBuilder.append(String.format(format,filterName,helpLink));
+        }
+
+        return listBuilder.toString();
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/LibraryReadFilter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/LibraryReadFilter.java
new file mode 100644
index 0000000..e56dc27
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/LibraryReadFilter.java
@@ -0,0 +1,65 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.SAMReadGroupRecord;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.commandline.Argument;
+import org.broadinstitute.gatk.engine.filters.ReadFilter;
+
+/**
+ * Only use reads from the specified library
+ *
+ * <p>This filter is useful for running on only a subset of the data as identified by a read group property.
+ * In the case of the library filter, the goal is usually to run quality control checks on a particular library.</p>
+ *
+ * <h3>Usage example</h3>
+ *
+ * <h4>Enable the library read filter</h4>
+ * <pre>
+ *     java -jar GenomeAnalysisTk.jar \
+ *         -T ToolName \
+ *         -R reference.fasta \
+ *         -I input.bam \
+ *         -o output.file \
+ *         -rf LibraryRead \
+ *         -library library_name
+ * </pre>
+ *
+ * @author kcibul
+ * @since Aug 15, 2012
+ *
+ */
+
+public class LibraryReadFilter extends ReadFilter {
+    @Argument(fullName = "library", shortName = "library", doc="The name of the library to keep, filtering out all others", required=true)
+    private String LIBRARY_TO_KEEP = null;
+
+    public boolean filterOut( final SAMRecord read ) {
+        final SAMReadGroupRecord readGroup = read.getReadGroup();
+        return ( readGroup == null || readGroup.getLibrary() == null || !readGroup.getLibrary().equals( LIBRARY_TO_KEEP ) );
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/MalformedReadFilter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/MalformedReadFilter.java
new file mode 100644
index 0000000..4e26716
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/MalformedReadFilter.java
@@ -0,0 +1,277 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.*;
+import org.broadinstitute.gatk.utils.commandline.Argument;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.engine.ReadProperties;
+import org.broadinstitute.gatk.utils.ValidationExclusion;
+import org.broadinstitute.gatk.engine.datasources.reads.SAMDataSource;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+/**
+ * Filter out malformed reads
+ *
+ * <p>This filter is applied automatically by all GATK tools in order to protect them from crashing on reads that are
+ * grossly malformed. There are a few issues (such as the absence of sequence bases) that will cause the run to fail with an
+ * error, but these cases can be preempted by setting flags that cause the problem reads to also be filtered.</p>
+ *
+ * <h3>Usage example</h3>
+ *
+ * <h4>Set the malformed read filter to filter out reads that have no sequence bases</h4>
+ * <pre>
+ *     java -jar GenomeAnalysisTk.jar \
+ *         -T ToolName \
+ *         -R reference.fasta \
+ *         -I input.bam \
+ *         -o output.file \
+ *         -filterNoBases
+ * </pre>
+ *
+ * <p>Note that the MalformedRead filter itself does not need to be specified in the command line because it is set
+ * automatically.</p>
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class MalformedReadFilter extends ReadFilter {
+
+
+    private static final String FILTER_READS_WITH_N_CIGAR_ARGUMENT_FULL_NAME = "filter_reads_with_N_cigar" ;
+
+    private SAMFileHeader header;
+
+    @Argument(fullName = FILTER_READS_WITH_N_CIGAR_ARGUMENT_FULL_NAME, shortName = "filterRNC", doc = "Filter out reads with CIGAR containing the N operator, instead of failing with an error", required = false)
+    boolean filterReadsWithNCigar = false;
+
+
+    @Argument(fullName = "filter_mismatching_base_and_quals", shortName = "filterMBQ", doc = "Filter out reads with mismatching numbers of bases and base qualities, instead of failing with an error", required = false)
+    boolean filterMismatchingBaseAndQuals = false;
+
+    @Argument(fullName = "filter_bases_not_stored", shortName = "filterNoBases", doc = "Filter out reads with no stored bases (i.e. '*' where the sequence should be), instead of failing with an error", required = false)
+    boolean filterBasesNotStored = false;
+
+    /**
+     * Indicates the applicable validation exclusions
+     */
+    private boolean allowNCigars;
+
+    @Override
+    public void initialize(final GenomeAnalysisEngine engine) {
+        header = engine.getSAMFileHeader();
+        ValidationExclusion validationExclusions = null;
+        final SAMDataSource rds = engine.getReadsDataSource();
+        if (rds != null) {
+          final ReadProperties rps = rds.getReadsInfo();
+          if (rps != null) {
+            validationExclusions = rps.getValidationExclusionList();
+          }
+        }
+        if (validationExclusions == null) {
+            allowNCigars = false;
+        } else {
+            allowNCigars = validationExclusions.contains(ValidationExclusion.TYPE.ALLOW_N_CIGAR_READS);
+        }
+    }
+
+    public boolean filterOut(final SAMRecord read) {
+        // slowly changing the behavior to blow up first and filtering out if a parameter is explicitly provided
+        return  !checkInvalidAlignmentStart(read) ||
+                !checkInvalidAlignmentEnd(read) ||
+                !checkAlignmentDisagreesWithHeader(this.header,read) ||
+                !checkHasReadGroup(read) ||
+                !checkMismatchingBasesAndQuals(read, filterMismatchingBaseAndQuals) ||
+                !checkCigarDisagreesWithAlignment(read) ||
+                !checkSeqStored(read, filterBasesNotStored) ||
+                !checkCigarIsSupported(read,filterReadsWithNCigar,allowNCigars);
+    }
+
+    private static boolean checkHasReadGroup(final SAMRecord read) {
+        if ( read.getReadGroup() == null ) {
+            // there are 2 possibilities: either the RG tag is missing or it is not defined in the header
+            final String rgID = (String)read.getAttribute(SAMTagUtil.getSingleton().RG);
+            if ( rgID == null )
+                throw new UserException.ReadMissingReadGroup(read);
+            throw new UserException.ReadHasUndefinedReadGroup(read, rgID);
+        }
+        return true;
+    }
+
+    /**
+     * Check for the case in which the alignment start is inconsistent with the read unmapped flag.
+     * @param read The read to validate.
+     * @return true if read start is valid, false otherwise.
+     */
+    private static boolean checkInvalidAlignmentStart(final SAMRecord read ) {
+        // read is not flagged as 'unmapped', but alignment start is NO_ALIGNMENT_START
+        if( !read.getReadUnmappedFlag() && read.getAlignmentStart() == SAMRecord.NO_ALIGNMENT_START )
+            return false;
+        // Read is not flagged as 'unmapped', but alignment start is -1
+        if( !read.getReadUnmappedFlag() && read.getAlignmentStart() == -1 )
+            return false;
+        return true;
+    }
+
+    /**
+     * Check for invalid end of alignments.
+     * @param read The read to validate.
+     * @return true if read end is valid, false otherwise.
+     */
+    private static boolean checkInvalidAlignmentEnd(final SAMRecord read ) {
+        // Alignment aligns to negative number of bases in the reference.
+        if( !read.getReadUnmappedFlag() && read.getAlignmentEnd() != -1 && (read.getAlignmentEnd()-read.getAlignmentStart()+1)<0 )
+            return false;
+        return true;
+    }
+
+    /**
+     * Check to ensure that the alignment makes sense based on the contents of the header.
+     * @param header The SAM file header.
+     * @param read The read to verify.
+     * @return true if alignment agrees with header, false othrewise.
+     */
+    private static boolean checkAlignmentDisagreesWithHeader(final SAMFileHeader header, final SAMRecord read ) {
+        // Read is aligned to nonexistent contig
+        if( read.getReferenceIndex() == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX && read.getAlignmentStart() != SAMRecord.NO_ALIGNMENT_START )
+            return false;
+        final SAMSequenceRecord contigHeader = header.getSequence( read.getReferenceIndex() );
+        // Read is aligned to a point after the end of the contig
+        if( !read.getReadUnmappedFlag() && read.getAlignmentStart() > contigHeader.getSequenceLength() )
+            return false;
+        return true;
+    }
+
+    /**
+     * Check for inconsistencies between the cigar string and the
+     * @param read The read to validate.
+     * @return true if cigar agrees with alignment, false otherwise.
+     */
+    private static boolean checkCigarDisagreesWithAlignment(final SAMRecord read) {
+        // Read has a valid alignment start, but the CIGAR string is empty
+        if( !read.getReadUnmappedFlag() &&
+            read.getAlignmentStart() != -1 &&
+            read.getAlignmentStart() != SAMRecord.NO_ALIGNMENT_START &&
+            read.getAlignmentBlocks().size() < 0 )
+            return false;
+        return true;
+    }
+
+    /**
+     * Check for unsupported CIGAR operators.
+     * Currently the N operator is not supported.
+     * @param read The read to validate.
+     * @param filterReadsWithNCigar whether the offending read should just
+     *                              be silently filtered or not.
+     * @param allowNCigars whether reads that contain N operators in their CIGARs
+     *                     can be processed or an exception should be thrown instead.
+     * @throws UserException.UnsupportedCigarOperatorException
+     *   if {@link #filterReadsWithNCigar} is <code>false</code> and
+     *   the input read has some unsupported operation.
+     * @return <code>true</code> if the read CIGAR operations are
+     * fully supported, otherwise <code>false</code>, as long as
+     * no exception has been thrown.
+     */
+    private static boolean checkCigarIsSupported(final SAMRecord read, final boolean filterReadsWithNCigar, final boolean allowNCigars) {
+        if( containsNOperator(read)) {
+            if (! filterReadsWithNCigar && !allowNCigars) {
+                throw new UserException.UnsupportedCigarOperatorException(
+                        CigarOperator.N,read,
+                        "Perhaps you are"
+                        + " trying to use RNA-Seq data?"
+                        + " While we are currently actively working to"
+                        + " support this data type unfortunately the"
+                        + " GATK cannot be used with this data in its"
+                        + " current form. You have the option of either"
+                        + " filtering out all reads with operator "
+                        + CigarOperator.N + " in their CIGAR string"
+                        + " (please add --"
+                        +  FILTER_READS_WITH_N_CIGAR_ARGUMENT_FULL_NAME
+                        + " to your command line) or"
+                        + " assume the risk of processing those reads as they"
+                        + " are including the pertinent unsafe flag (please add -U"
+                        + ' ' + ValidationExclusion.TYPE.ALLOW_N_CIGAR_READS
+                        + " to your command line). Notice however that if you were"
+                        + " to choose the latter, an unspecified subset of the"
+                        + " analytical outputs of an unspecified subset of the tools"
+                        + " will become unpredictable. Consequently the GATK team"
+                        + " might well not be able to provide you with the usual support"
+                        + " with any issue regarding any output");
+            }
+            return ! filterReadsWithNCigar;
+        }
+        return true;
+    }
+
+    private static boolean containsNOperator(final SAMRecord read) {
+        final Cigar cigar = read.getCigar();
+        if (cigar == null)   {
+            return false;
+        }
+        for (final CigarElement ce : cigar.getCigarElements()) {
+            if (ce.getOperator() == CigarOperator.N) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    /**
+     * Check if the read has the same number of bases and base qualities
+     * @param read the read to validate
+     * @return true if they have the same number. False otherwise.
+     */
+    private static boolean checkMismatchingBasesAndQuals(final SAMRecord read, final boolean filterMismatchingBaseAndQuals) {
+        final boolean result;
+        if (read.getReadLength() == read.getBaseQualities().length)
+            result = true;
+        else if (filterMismatchingBaseAndQuals)
+            result = false;
+        else
+            throw new UserException.MalformedBAM(read,
+                    String.format("BAM file has a read with mismatching number of bases and base qualities. Offender: %s [%d bases] [%d quals].%s",
+                            read.getReadName(), read.getReadLength(), read.getBaseQualities().length,
+                            read.getBaseQualities().length == 0 ? " You can use --defaultBaseQualities to assign a default base quality for all reads, but this can be dangerous in you don't know what you are doing." : ""));
+
+        return result;
+    }
+
+    /**
+     * Check if the read has its base sequence stored
+     * @param read the read to validate
+     * @return true if the sequence is stored and false otherwise ("*" in the SEQ field).
+     */
+    protected static boolean checkSeqStored(final SAMRecord read, final boolean filterBasesNotStored) {
+
+        if ( read.getReadBases() != SAMRecord.NULL_SEQUENCE )
+            return true;
+
+        if ( filterBasesNotStored )
+            return false;
+
+        throw new UserException.MalformedBAM(read, String.format("the BAM file has a read with no stored bases (i.e. it uses '*') which is not supported in the GATK; see the --filter_bases_not_stored argument. Offender: %s", read.getReadName()));
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/MappingQualityFilter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/MappingQualityFilter.java
new file mode 100644
index 0000000..85dda8e
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/MappingQualityFilter.java
@@ -0,0 +1,62 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.commandline.Argument;
+
+/**
+ * Filter out reads with low mapping qualities
+ *
+ * <p>This filter is intended to ensure that only reads that are likely
+ * to be mapped in the right place, and therefore to be informative, will be used in analysis.</p>
+ *
+ * <h3>Usage example</h3>
+ *
+ * <h4>Set the mapping quality filter to filter out reads that have MAPQ < 15</h4>
+ * <pre>
+ *     java -jar GenomeAnalysisTk.jar \
+ *         -T HaplotypeCaller \
+ *         -R reference.fasta \
+ *         -I input.bam \
+ *         -o output.vcf \
+ *         -rf MappingQuality \
+ *         -mmq 15
+ * </pre>
+ *
+ * @author ebanks
+ * @version 0.1
+ */
+
+public class MappingQualityFilter extends ReadFilter {
+
+    @Argument(fullName = "min_mapping_quality_score", shortName = "mmq", doc = "Minimum read mapping quality required to consider a read for calling", required = false)
+    public int MIN_MAPPING_QUALTY_SCORE = 10;
+
+    public boolean filterOut(SAMRecord rec) {
+        return (rec.getMappingQuality() < MIN_MAPPING_QUALTY_SCORE);
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/MappingQualityUnavailableFilter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/MappingQualityUnavailableFilter.java
new file mode 100644
index 0000000..f006a3c
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/MappingQualityUnavailableFilter.java
@@ -0,0 +1,58 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.QualityUtils;
+
+/**
+ * Filter out reads with no mapping quality information
+ *
+ *
+ * <p>This filter is intended to ensure that only reads that are likely
+ * to be mapped in the right place, and therefore to be informative, will be used in analysis.</p>
+ *
+ * <h3>Usage example</h3>
+ *
+ * <pre>
+ *     java -jar GenomeAnalysisTk.jar \
+ *         -T ToolName \
+ *         -R reference.fasta \
+ *         -I input.bam \
+ *         -o output.file \
+ *         -rf MappingQualityUnavailable
+ * </pre>
+ *
+ * @author ebanks
+ * @version 0.1
+ */
+
+public class MappingQualityUnavailableFilter extends ReadFilter {
+    public boolean filterOut(SAMRecord rec) {
+        return (rec.getMappingQuality() == QualityUtils.MAPPING_QUALITY_UNAVAILABLE);
+    }
+}
+
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/MappingQualityZeroFilter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/MappingQualityZeroFilter.java
new file mode 100644
index 0000000..46f044b
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/MappingQualityZeroFilter.java
@@ -0,0 +1,56 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.SAMRecord;
+
+/**
+ * Filter out reads with mapping quality zero
+ *
+ * <p>This filter is intended to ensure that only reads that are likely
+ * to be mapped in the right place, and therefore to be informative, will be used in analysis.</p>
+ *
+ * <h3>Usage example</h3>
+ *
+ * <pre>
+ *     java -jar GenomeAnalysisTk.jar \
+ *         -T ToolName \
+ *         -R reference.fasta \
+ *         -I input.bam \
+ *         -o output.file \
+ *         -rf MappingQualityZero
+ * </pre>
+ *
+ * @author hanna
+ * @version 0.1
+ */
+
+public class MappingQualityZeroFilter extends ReadFilter {
+    public boolean filterOut(SAMRecord rec) {
+        return (rec.getMappingQuality() == 0);
+    }
+}
+
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/MateSameStrandFilter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/MateSameStrandFilter.java
new file mode 100644
index 0000000..a8a02da
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/MateSameStrandFilter.java
@@ -0,0 +1,65 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.SAMRecord;
+
+/**
+ * Filter out reads with bad pairing (and related) properties
+ *
+ * <p>This filter is intended to ensure that only reads that are likely
+ * to be mapped in the right place, and therefore to be informative, will be used in analysis.
+ * The following cases will be filtered out:
+ * </p>
+ * <ul>
+ *     <li>is not paired</li>
+ *     <li>mate is unmapped</li>
+ *     <li>is duplicate</li>
+ *     <li>fails vendor quality check</li>
+ *     <li>both mate and read are in the same strand orientation</li>
+ * </ul>
+ *
+ * <h3>Usage example</h3>
+ *
+ * <pre>
+ *     java -jar GenomeAnalysisTk.jar \
+ *         -T ToolName \
+ *         -R reference.fasta \
+ *         -I input.bam \
+ *         -o output.file \
+ *         -rf MateSameStrand
+ * </pre>
+ *
+ * @author chartl
+ * @since 5/18/11
+ */
+public class MateSameStrandFilter extends ReadFilter {
+
+    public boolean filterOut(SAMRecord read) {
+        return (! read.getReadPairedFlag() ) || read.getMateUnmappedFlag() || read.getDuplicateReadFlag() ||
+                read.getReadFailsVendorQualityCheckFlag() || (read.getMateNegativeStrandFlag() == read.getReadNegativeStrandFlag());
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/MaxInsertSizeFilter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/MaxInsertSizeFilter.java
new file mode 100644
index 0000000..0407d85
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/MaxInsertSizeFilter.java
@@ -0,0 +1,59 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.commandline.Argument;
+
+/**
+ * Filter out reads that exceed a given insert size
+ *
+ * <p>This filter is intended to ensure that only reads that are likely
+ * to be mapped in the right place, and therefore to be informative, will be used in analysis.</p>
+ *
+ * <h3>Usage example</h3>
+ *
+ * <pre>
+ *     java -jar GenomeAnalysisTk.jar \
+ *         -T ToolName \
+ *         -R reference.fasta \
+ *         -I input.bam \
+ *         -o output.file \
+ *         -rf MaxInsertSize \
+ *         -maxInsert 10000
+ * </pre>
+ *
+ * @author chartl
+ * @since 5/2/11
+ */
+public class MaxInsertSizeFilter extends ReadFilter {
+    @Argument(fullName = "maxInsertSize", shortName = "maxInsert", doc="Insert size cutoff", required=false)
+    private int maxInsertSize = 1000000;
+
+    public boolean filterOut(SAMRecord record) {
+        return (record.getReadPairedFlag() && (record.getInferredInsertSize() > maxInsertSize || record.getInferredInsertSize() < -1*maxInsertSize));
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/MissingReadGroupFilter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/MissingReadGroupFilter.java
new file mode 100644
index 0000000..7acc34e
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/MissingReadGroupFilter.java
@@ -0,0 +1,55 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.SAMRecord;
+
+/**
+ * Filter out reads without read group information
+ *
+ * <p>Many GATK tools are dependent on having read group information in order to operate correctly. This filter excludes
+ * any reads that have not been appropriately identified. </p>
+ *
+ * <h3>Usage example</h3>
+ *
+ * <pre>
+ *     java -jar GenomeAnalysisTk.jar \
+ *         -T ToolName \
+ *         -R reference.fasta \
+ *         -I input.bam \
+ *         -o output.file \
+ *         -rf MissingReadGroup
+ * </pre>
+ *
+ * @author ebanks
+ * @version 0.1
+ */
+
+public class MissingReadGroupFilter extends ReadFilter {
+    public boolean filterOut(SAMRecord rec) {
+        return rec.getReadGroup() == null;
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/NDNCigarReadTransformer.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/NDNCigarReadTransformer.java
new file mode 100644
index 0000000..aa449a1
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/NDNCigarReadTransformer.java
@@ -0,0 +1,123 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.Cigar;
+import htsjdk.samtools.CigarElement;
+import htsjdk.samtools.CigarOperator;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.engine.iterators.RNAReadTransformer;
+import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
+import org.broadinstitute.gatk.engine.walkers.Walker;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+
+/**
+ * Reduce NDN cigar elements to one N element.
+ *
+ * <p>This read transformer will refactor cigar strings that contain N-D-N elements to one N element (with total length
+ * of the three refactored elements). The engine parameter that activate this read transformer is
+ * `--refactor_NDN_cigar_string` / `-fixNDN`</p>
+ *
+ * <h3>Rationale</h3>
+ * <p>Some RNAseq aligners that use a known transcriptome resource (such as TopHat2) produce NDN elements in read CIGARS
+ * when a small exon is entirely deleted during transcription, which ends up looking like [exon1]NDN[exon3]. Currently
+ * we consider that the internal N-D-N motif is illegal and we error out when we encounter it. By refactoring the cigar string of
+ * those specific reads, this read transformer allows users of TopHat and other tools to circumvent this problem without
+ * affecting the rest of their dataset. From the point of view of variant calling, there is no meaningful difference between
+ * the two representations.</p>
+ *
+ * <h3>Developer notes</h3>
+ * <ul>
+ *     <li>Any walker that needs to apply this functionality should apply that read transformer in its map function, since it won't be activated by the GATK engine.</li>
+ * </ul>
+ *
+ *
+ * @author ami
+ * @since 04/22/14
+ */
+
+public class NDNCigarReadTransformer extends RNAReadTransformer {
+
+    private boolean refactorReads;
+
+    @Override
+    public ApplicationTime initializeSub(final GenomeAnalysisEngine engine, final Walker walker) {
+        refactorReads = engine.getArguments().REFACTOR_NDN_CIGAR_READS;
+
+        return ApplicationTime.HANDLED_IN_WALKER;   //  NOTE: any walker that need that functionality should apply that read transformer in its map function, since it won't be activated by the GATK engine.
+    }
+
+    @Override
+    public GATKSAMRecord apply(final GATKSAMRecord read) {
+        if(read == null)
+            throw new UserException.BadInput("try to transform a null GATKSAMRecord");
+        final Cigar originalCigar = read.getCigar();
+        if (originalCigar.isValid(read.getReadName(),-1) != null)
+            throw new UserException.BadInput("try to transform a read with non-valid cigar string: readName: "+read.getReadName()+" Cigar String: "+originalCigar);
+        read.setCigar(refactorNDNtoN(originalCigar));
+        return read;
+    }
+
+    @Override
+    public boolean enabled() {
+        return refactorReads;
+    }
+
+
+
+    protected Cigar refactorNDNtoN(final Cigar originalCigar) {
+        final Cigar refactoredCigar = new Cigar();
+        final int cigarLength = originalCigar.numCigarElements();
+        for(int i = 0; i < cigarLength; i++){
+            final CigarElement element = originalCigar.getCigarElement(i);
+            if(element.getOperator() == CigarOperator.N && thereAreAtLeast2MoreElements(i,cigarLength)){
+                final CigarElement nextElement = originalCigar.getCigarElement(i+1);
+                final CigarElement nextNextElement = originalCigar.getCigarElement(i+2);
+
+                // if it is N-D-N replace with N (with the total length) otherwise just add the first N.
+                if(nextElement.getOperator() == CigarOperator.D && nextNextElement.getOperator() == CigarOperator.N){
+                    final int threeElementsLength = element.getLength() + nextElement.getLength() + nextNextElement.getLength();
+                    final CigarElement refactoredElement = new CigarElement(threeElementsLength,CigarOperator.N);
+                    refactoredCigar.add(refactoredElement);
+                    i += 2; //skip the elements that were refactored
+                }
+                else
+                    refactoredCigar.add(element);  // add only the first N
+            }
+            else
+                refactoredCigar.add(element);  // add any non-N element
+        }
+        return refactoredCigar;
+    }
+
+    private boolean thereAreAtLeast2MoreElements(final int index, final int cigarLength){
+        return index < cigarLength - 2;
+    }
+
+}
+
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/NoOriginalQualityScoresFilter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/NoOriginalQualityScoresFilter.java
new file mode 100644
index 0000000..482a541
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/NoOriginalQualityScoresFilter.java
@@ -0,0 +1,54 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.SAMRecord;
+
+
+/**
+ * Filter out reads that do not have an original quality quality score (OQ) tag
+ *
+ * <p>The OQ tag can be added during the base recalibration process in order to preserve original information.</p>
+ *
+ * <h3>Usage example</h3>
+ *
+ * <pre>
+ *     java -jar GenomeAnalysisTk.jar \
+ *         -T ToolName \
+ *         -R reference.fasta \
+ *         -I input.bam \
+ *         -o output.file \
+ *         -rf NoOriginalQualityScores
+ * </pre>
+ *
+ * @author rpoplin
+ * @since Nov 19, 2009
+ */
+public class NoOriginalQualityScoresFilter extends ReadFilter {
+    public boolean filterOut( final SAMRecord read ) {
+        return (read.getAttribute("OQ") == null);
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/NotPrimaryAlignmentFilter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/NotPrimaryAlignmentFilter.java
new file mode 100644
index 0000000..841a139
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/NotPrimaryAlignmentFilter.java
@@ -0,0 +1,56 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.SAMRecord;
+
+/**
+ * Filter out reads that are secondary alignments
+ *
+ * <p>This filter recognizes the SAM flag that identifies secondary alignments (ie not the best alignment).
+ * It is intended to ensure that only reads that are likely to be mapped in the right place, and therefore to be
+ * informative, will be used in analysis.</p>
+ *
+ * <h3>Usage example</h3>
+ *
+ * <pre>
+ *     java -jar GenomeAnalysisTk.jar \
+ *         -T ToolName \
+ *         -R reference.fasta \
+ *         -I input.bam \
+ *         -o output.file \
+ *         -rf NotPrimaryAlignment
+ * </pre>
+ *
+ * @author rpoplin
+ * @since Dec 9, 2009
+ */
+
+public class NotPrimaryAlignmentFilter extends ReadFilter {
+    public boolean filterOut( final SAMRecord read ) {
+        return read.getNotPrimaryAlignmentFlag();
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/OverclippedReadFilter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/OverclippedReadFilter.java
new file mode 100644
index 0000000..fe96f9f
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/OverclippedReadFilter.java
@@ -0,0 +1,80 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.*;
+import org.broadinstitute.gatk.utils.commandline.Argument;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.engine.ReadProperties;
+import org.broadinstitute.gatk.utils.ValidationExclusion;
+import org.broadinstitute.gatk.engine.datasources.reads.SAMDataSource;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+/**
+ * Filter out reads that are over-soft-clipped
+ *
+ * <p>
+ *     This filter is intended to filter out reads that are potentially from foreign organisms.
+ *     From experience with sequencing of human DNA we have found cases of contamination by bacterial
+ *     organisms; the symptoms of such contamination are a class of reads with only a small number
+ *     of aligned bases and additionally many soft-clipped bases.  This filter is intended
+ *     to remove such reads. Consecutive soft-clipped blocks are treated as a single block
+ * </p>
+ *
+ */
+public class OverclippedReadFilter extends ReadFilter {
+
+    @Argument(fullName = "filter_is_too_short_value", shortName = "filterTooShort", doc = "Value for which reads with less than this number of aligned bases is considered too short", required = false)
+    int tooShort = 30;
+
+    @Argument(fullName = "do_not_require_softclips_both_ends", shortName = "NoRequireSCBothEnds", doc = "Allow a read to be filtered out based on having only 1 soft-clipped block. By default, both ends must have a soft-clipped block, setting this flag requires only 1 soft-clipped block.", required = false)
+    Boolean doNotRequireSoftclipsOnBothEnds = false;
+
+
+    public boolean filterOut(final SAMRecord read) {
+        int alignedLength = 0;
+        int softClipBlocks = 0;
+        int minSoftClipBlocks = doNotRequireSoftclipsOnBothEnds ? 1 : 2;
+        CigarOperator lastOperator = null;
+
+        for ( final CigarElement element : read.getCigar().getCigarElements() ) {
+            if ( element.getOperator() == CigarOperator.S ) {
+                //Treat consecutive S blocks as a single one
+                if(lastOperator != CigarOperator.S){
+                    softClipBlocks += 1;
+                }
+
+            } else if ( element.getOperator().consumesReadBases() ) {   // M, I, X, and EQ (S was already accounted for above)
+                alignedLength += element.getLength();
+            }
+            lastOperator = element.getOperator();
+        }
+
+        return(alignedLength < tooShort && softClipBlocks >= minSoftClipBlocks);
+
+    }
+
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/Platform454Filter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/Platform454Filter.java
new file mode 100644
index 0000000..22822dd
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/Platform454Filter.java
@@ -0,0 +1,57 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.broadinstitute.gatk.utils.sam.ReadUtils;
+
+/**
+ * Filter out reads produced by 454 technology
+ *
+ * <p>Reads produced by 454 technology should not be processed by the GATK's indel realignment tools. This filter is
+ * applied by those tools to enforce that rule.</p>
+ *
+ * <h3>Usage example</h3>
+ *
+ * <pre>
+ *     java -jar GenomeAnalysisTk.jar \
+ *         -T ToolName \
+ *         -R reference.fasta \
+ *         -I input.bam \
+ *         -o output.file \
+ *         -rf Platform454
+ * </pre>
+ *
+ * @author ebanks
+ * @version 0.1
+ */
+
+public class Platform454Filter extends ReadFilter {
+    public boolean filterOut(SAMRecord rec) {
+        return (ReadUtils.is454Read((GATKSAMRecord)rec));
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/PlatformFilter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/PlatformFilter.java
new file mode 100644
index 0000000..8c7fcc6
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/PlatformFilter.java
@@ -0,0 +1,65 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.commandline.Argument;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.broadinstitute.gatk.utils.sam.ReadUtils;
+
+/**
+ * Filter out reads that were generated by a specific sequencing platform
+ *
+ * <p>This filter is useful for running on only a subset of the data as identified by a read group property.
+ * In the case of the platform filter, the goal is usually to blacklist certain sequencing technologies at certain processing steps
+ * if we know there is an incompatibility problem (like 454 and indel realignment, which is special-cased).</p>
+ *
+ * <h3>Usage example</h3>
+ *
+ * <pre>
+ *     java -jar GenomeAnalysisTk.jar \
+ *         -T ToolName \
+ *         -R reference.fasta \
+ *         -I input.bam \
+ *         -o output.file \
+ *         -rf Platform \
+ *         -PLFilterName platform_name
+ * </pre>
+ *
+ * @author ebanks
+ * @version 0.1
+ */
+public class PlatformFilter extends ReadFilter {
+    @Argument(fullName = "PLFilterName", shortName = "PLFilterName", doc="Discard reads with RG:PL attribute containing this string", required=false)
+    protected String[] PLFilterNames;
+
+    public boolean filterOut(SAMRecord rec) {
+        for ( String name : PLFilterNames )
+            if ( ReadUtils.isPlatformRead((GATKSAMRecord)rec, name.toUpperCase() ))
+                return true;
+        return false;
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/PlatformUnitFilter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/PlatformUnitFilter.java
new file mode 100644
index 0000000..c00d521
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/PlatformUnitFilter.java
@@ -0,0 +1,90 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.SAMReadGroupRecord;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ * Filter out reads with blacklisted platform unit tags
+ *
+ * <p>This filter is useful for running on only a subset of the data as identified by a read group property.
+ * In the case of the platform unit filter, the goal is usually to blacklist certain runs if we know there was a problem with
+ * a particular sequencing machine.</p>
+ *
+ * @author asivache
+ * @since Sep 21, 2009
+ */
+public class PlatformUnitFilter extends ReadFilter {
+    // a hack: use static in order to be able to fill it with the data from command line at runtime
+    static private Set<String> blackListedLanes = new HashSet<String>();
+
+    public boolean filterOut(SAMRecord samRecord) {
+
+        if ( blackListedLanes.size() == 0 ) return false; // no filters set, nothing to do
+
+        Object pu_attr = samRecord.getAttribute("PU");
+
+        if ( pu_attr == null ) {
+            // no platform unit in the record, go get from read group
+            SAMReadGroupRecord rgr = samRecord.getReadGroup();
+            if ( rgr == null ) throw new UserException.MalformedBAM(samRecord, "Read " + samRecord.getReadName() +" has NO associated read group record");
+            pu_attr = rgr.getAttribute("PU") ;
+        }
+        if ( pu_attr == null ) return false; // could not get PU, forget about the filtering...
+        return blackListedLanes.contains((String)pu_attr);
+    }
+
+    /**
+     * The argument is interpreted as a comma-separated list of lanes (platform units) to be filtered
+     * out. All the specified names will be registered with the filter and filterOut(r) for any SAMRecord r
+     * belonging to one of the specified lanes will thereafter return true.
+     * The names can be surrounded by additional spaces, the latters will be trimmed by this method.
+     * This method can be called multiple times to add more lanes. Re-registering the same lane again is safe.
+     * @param arg
+     */
+    public static void setBlackListedLanes(String arg) {
+        String[] lanes = arg.split(",");
+        for ( int i = 0; i < lanes.length ; i++ ) {
+            blackListedLanes.add(lanes[i].trim());
+        }
+    }
+
+    /**
+     * Adds a single name of a lane (platform unit) to be filtered out by this filter. The name can be surrounded
+     * by spaces, the latters will be trimmed out. This method can be called multiple times to add more lanes.
+     * Re-registering the same lane again is safe.
+     * @param arg
+     */
+    public static void addBlackListedLane(String arg) {
+        blackListedLanes.add(arg.trim());
+    }
+
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/PlatformUnitFilterHelper.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/PlatformUnitFilterHelper.java
new file mode 100644
index 0000000..d92ffe7
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/PlatformUnitFilterHelper.java
@@ -0,0 +1,87 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.text.XReadLines;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.util.regex.Pattern;
+
+/**
+ * This is a utility class, its sole purpose is to populate PlatformUnitFilter with data. When a command line argument
+ * (@Argument) of the type PlatformUnitFilterHelper is declared in an application (walker), its constuctor
+ * PlatformUnitFilterHelper(String) automatically called by the argument system will parse its String argument
+ * and set up static fields of PlatformUnitFilter object.
+ *
+ * The String argument can be either a name of existing file, or a list of comma-separated lane (Platform Unit) names.
+ * First, the constructor will check if a file with specified name exists. If it does, then it is assumed that each line
+ * in the file contains one name of a lane (Platfor Unit) to filter out. If such file does not exist, then the argument is
+ * interpreted as a comma-separated list. Blank spaces around lane names are allowed in both cases and will be trimmed out.
+ *
+ * In other words, all it takes to request filtering out reads from specific lane(s) is
+ *
+ * 1) declare filter usage in the walker
+ *
+ * @ReadFilters({PlatformUnitFilter.class,...})
+ *
+ * 2) specify the argument that will take the list of lanes to filter:
+ *
+ * @Argument(fullName="filterLanes", shortName="FL", doc="all specified lanes will be ignored", required=false)
+ *   PlatformUnitFilterHelper dummy;
+ *
+ * After that, the walker can be invoked with "--filterLanes 302UBAAXX090508.8,302YAAAXX090427.8" argument.
+ *
+ * Created by IntelliJ IDEA.
+ * User: asivache
+ * Date: Sep 22, 2009
+ * Time: 11:11:48 AM
+ * To change this template use File | Settings | File Templates.
+ */
+public class PlatformUnitFilterHelper {
+    final public static Pattern EMPTYLINE_PATTERN = Pattern.compile("^\\s*$");
+
+    public PlatformUnitFilterHelper(String arg) {
+         File f = new File(arg);
+
+         if ( f.exists() ) {
+             try {
+                 XReadLines reader = new XReadLines(f);
+                 for ( String line : reader ) {
+                     if ( EMPTYLINE_PATTERN.matcher(line).matches() ) continue; // skip empty lines
+                     PlatformUnitFilter.addBlackListedLane(line); // PlatformUnitFilter will trim the line as needed
+                 }
+             } catch ( FileNotFoundException e) { throw new UserException.CouldNotReadInputFile(f, e); } // this should NEVER happen
+             return;
+         }
+
+        // no such file, must be a comma-separated list:
+
+        PlatformUnitFilter.setBlackListedLanes(arg); // PlatformUnitFilter will split on commas and trim as needed
+
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/ReadFilter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/ReadFilter.java
new file mode 100644
index 0000000..1720fdc
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/ReadFilter.java
@@ -0,0 +1,60 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.filter.SamRecordFilter;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
+import org.broadinstitute.gatk.utils.help.HelpConstants;
+
+/**
+ * A SamRecordFilter that also depends on the header.
+ */
+ at DocumentedGATKFeature(
+        groupName = HelpConstants.DOCS_CAT_RF,
+        summary = "GATK Engine arguments that filter or transfer incoming SAM/BAM data files" )
+public abstract class ReadFilter implements SamRecordFilter {
+    /**
+     * Sets the header for use by this filter.
+     * @param engine the engine.
+     */
+    public void initialize(GenomeAnalysisEngine engine) {}
+
+
+    /**
+     * Determines whether a pair of SAMRecord matches this filter
+     *
+     * @param first  the first SAMRecord to evaluate
+     * @param second the second SAMRecord to evaluate
+     *
+     * @return true if the SAMRecords matches the filter, otherwise false
+     * @throws UnsupportedOperationException when paired filter not implemented
+     */
+    public boolean filterOut(final SAMRecord first, final SAMRecord second) {
+        throw new UnsupportedOperationException("Paired filter not implemented: " + this.getClass());
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/ReadGroupBlackListFilter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/ReadGroupBlackListFilter.java
new file mode 100644
index 0000000..7c57b0d
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/ReadGroupBlackListFilter.java
@@ -0,0 +1,133 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.SAMReadGroupRecord;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.text.XReadLines;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.util.*;
+import java.util.Map.Entry;
+
+/**
+ * Filter out reads matching a read group tag value
+ *
+ * <p>This filter is useful for running on only a subset of the data as identified by a read group property,
+ * using expression matching against the read group tags.</p>
+ *
+ * <h3>Usage example</h3>
+ *
+ * <h4>Set the read group filter to blacklist read groups that have the PU tag "1000G-mpimg-080821-1_1"</h4>
+ * <pre>
+ *     java -jar GenomeAnalysisTk.jar \
+ *         -T ToolName \
+ *         -R reference.fasta \
+ *         -I input.bam \
+ *         -o output.file \
+ *         -rf ReadGroupBlackList \
+ *         -rgbl PU:1000G-mpimg-080821-1_1
+ * </pre>
+ */
+public class ReadGroupBlackListFilter extends ReadFilter {
+    private Set<Entry<String, Collection<String>>> filterEntries;
+
+    public ReadGroupBlackListFilter(List<String> blackLists) {
+        Map<String, Collection<String>> filters = new TreeMap<String, Collection<String>>();
+        for (String blackList : blackLists)
+            addFilter(filters, blackList, null, 0);
+        this.filterEntries = filters.entrySet();
+    }
+
+    public boolean filterOut(SAMRecord samRecord) {
+        for (Entry<String, Collection<String>> filterEntry : filterEntries) {
+            String attributeType = filterEntry.getKey();
+
+            SAMReadGroupRecord samReadGroupRecord = samRecord.getReadGroup();
+            if (samReadGroupRecord != null) {
+                Object attribute;
+                if ("ID".equals(attributeType) || "RG".equals(attributeType))
+                    attribute = samReadGroupRecord.getId();
+                else
+                    attribute = samReadGroupRecord.getAttribute(attributeType);
+                if (attribute != null && filterEntry.getValue().contains(attribute))
+                    return true;
+            }
+        }
+
+        return false;
+    }
+
+    private void addFilter(Map<String, Collection<String>> filters, String filter, File parentFile, int parentLineNum) {
+        if (filter.toLowerCase().endsWith(".list") || filter.toLowerCase().endsWith(".txt")) {
+            File file = new File(filter);
+            try {
+                int lineNum = 0;
+                XReadLines lines = new XReadLines(file);
+                for (String line : lines) {
+                    lineNum++;
+
+                    if (line.trim().length() == 0)
+                        continue;
+
+                    if (line.startsWith("#"))
+                        continue;
+
+                    addFilter(filters, line, file, lineNum);
+                }
+            } catch (FileNotFoundException e) {
+                String message = "Error loading black list: " + file.getAbsolutePath();
+                if (parentFile != null) {
+                    message += ", " + parentFile.getAbsolutePath() + ":" + parentLineNum;
+                }
+                throw new UserException(message);
+            }
+        } else {
+            String[] filterEntry = filter.split(":", 2);
+
+            String message = null;
+            if (filterEntry.length != 2) {
+                message = "Invalid read group filter: " + filter;
+            } else if (filterEntry[0].length() != 2) {
+                message = "Tag is not two characters: " + filter;
+            }
+
+            if (message != null) {
+                if (parentFile != null) {
+                    message += ", " + parentFile.getAbsolutePath() + ":" + parentLineNum;
+                }
+                message += ", format is <TAG>:<SUBSTRING>";
+                throw new UserException(message);
+            }
+
+            if (!filters.containsKey(filterEntry[0]))
+                filters.put(filterEntry[0], new TreeSet<String>());
+            filters.get(filterEntry[0]).add(filterEntry[1]);
+        }
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/ReadLengthFilter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/ReadLengthFilter.java
new file mode 100644
index 0000000..b0fc108
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/ReadLengthFilter.java
@@ -0,0 +1,63 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.commandline.Argument;
+
+/**
+ * Filter out reads based on length
+ *
+ * <p>This filter is useful for running on only reads that are longer (or shorter) than the given threshold sizes. </p>
+ *
+ * <h3>Usage example</h3>
+ *
+ * <pre>
+ *     java -jar GenomeAnalysisTk.jar \
+ *         -T ToolName \
+ *         -R reference.fasta \
+ *         -I input.bam \
+ *         -o output.file \
+ *         -rf ReadLength \
+ *         -minRead 50 \
+ *         -maxRead 101
+ * </pre>
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class ReadLengthFilter extends ReadFilter {
+    @Argument(fullName = "maxReadLength", shortName = "maxRead", doc="Discard reads with length greater than the specified value", required=true)
+    private int maxReadLength;
+
+    @Argument(fullName = "minReadLength", shortName = "minRead", doc="Discard reads with length shorter than the specified value", required=true)
+    private int minReadLength = 1;
+    public boolean filterOut(SAMRecord read) {
+        // check the length
+        return read.getReadLength() > maxReadLength || read.getReadLength() < minReadLength;
+    }
+
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/ReadNameFilter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/ReadNameFilter.java
new file mode 100644
index 0000000..94548ab
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/ReadNameFilter.java
@@ -0,0 +1,59 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.commandline.Argument;
+
+/**
+ * Only use reads with this read name
+ *
+ * <p>This filter is useful for isolating a particular read, pair of reads or or set of alignments for a given read
+ * when troubleshooting issues where the error message provided a culprit name.</p>
+ *
+ * <h3>Usage example</h3>
+ *
+ * <pre>
+ *     java -jar GenomeAnalysisTk.jar \
+ *         -T ToolName \
+ *         -R reference.fasta \
+ *         -I input.bam \
+ *         -o output.file \
+ *         -rf ReadName \
+ *         -rn read_name
+ * </pre>
+ *
+ * @author chartl
+ * @since 9/19/11
+ */
+public class ReadNameFilter extends ReadFilter {
+     @Argument(fullName = "readName", shortName = "rn", doc="Read name to whitelist", required=true)
+    private String readName;
+
+    public boolean filterOut(final SAMRecord rec) {
+        return ! rec.getReadName().equals(readName);
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/ReadStrandFilter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/ReadStrandFilter.java
new file mode 100644
index 0000000..6cfcdac
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/ReadStrandFilter.java
@@ -0,0 +1,62 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.commandline.Argument;
+
+/**
+ * Filter out reads based on strand orientation
+ *
+ * <p>This filter is useful for isolating reads from only forward or reverse strands. By default, it filters out reads
+ * from the negative (reverse) strand. This logic can be reversed by using the -filterPositive flag.</p>
+ *
+ * <h3>Usage example</h3>
+ *
+ * <h4>Set the read strand filter to filter out positive (forward) strand reads</h4>
+ * <pre>
+ *     java -jar GenomeAnalysisTk.jar \
+ *         -T ToolName \
+ *         -R reference.fasta \
+ *         -I input.bam \
+ *         -o output.file \
+ *         -rf ReadStrand \
+ *         -filterPositive
+ * </pre>
+ *
+ * @author chartl
+ * @version 0.1
+ */
+public class ReadStrandFilter extends ReadFilter {
+    @Argument(fullName = "filterPositive", shortName = "fp", doc="Discard reads on the forward strand",required=false)
+	boolean filterForward = false;
+    
+    public boolean filterOut(SAMRecord read) {
+        // check the length
+        return read.getReadNegativeStrandFlag() != filterForward;
+    }
+
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/ReassignMappingQualityFilter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/ReassignMappingQualityFilter.java
new file mode 100644
index 0000000..dffd866
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/ReassignMappingQualityFilter.java
@@ -0,0 +1,88 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.commandline.Argument;
+
+/**
+ * Set the mapping quality of all reads to a given value.
+ *
+ *  <p>
+ *     If a BAM file contains erroneous or missing mapping qualities (MAPQ), this read transformer will set all your
+ *     mapping qualities to a given value (see arguments list for default value).
+ *  </p>
+ *
+ * <h3>See also</h3>
+ *
+ * <p>ReassignOneMappingQualityFilter: reassigns a single MAPQ value, as opposed to all those found in the BAM file.</p>
+ *
+ * <h3>Caveats</h3>
+ *
+ * <p>Note that due to the order of operations involved in applying filters, it is possible that other read filters
+ * (determined either at command-line or internally by the tool you are using) will be applied to your data before
+ * this read transformation can be applied. If one of those other filters acts on the read mapping quality (MAPQ),
+ * then you may not obtain the expected results. Unfortunately it is currently not possible to change the order of
+ * operations from command line. To avoid the problem, we recommend applying this filter separately from any other
+ * analysis, using PrintReads.</p>
+ *
+ *
+ * <h3>Input</h3>
+ *  <p>
+ *	    BAM file(s)
+ *  </p>
+ *
+ * <h3>Output</h3>
+ *  <p>
+ *      BAM file(s) with the mapping qualities of all reads reassigned to the specified value
+ *  </p>
+ *
+ * <h3>Usage example</h3>
+ *  <pre>
+ *  java -jar GenomeAnalysisTK.jar \
+ *      -T PrintReads \
+ *      -R reference.fasta \
+ *      -I input.bam \
+ *      -o output.file \
+ *      -rf ReassignMappingQuality \
+ *      -DMQ 35
+ *  </pre>
+ *
+ * @author carneiro
+ * @since 8/8/11
+ */
+
+public class ReassignMappingQualityFilter extends ReadFilter {
+
+    @Argument(fullName = "default_mapping_quality", shortName = "DMQ", doc = "Default read mapping quality to assign to all reads", required = false)
+    public int defaultMappingQuality = 60;
+
+    public boolean filterOut(SAMRecord rec) {
+        rec.setMappingQuality(defaultMappingQuality);
+        return false;
+    }
+}
+
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/ReassignOneMappingQualityFilter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/ReassignOneMappingQualityFilter.java
new file mode 100644
index 0000000..0a4eeaf
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/ReassignOneMappingQualityFilter.java
@@ -0,0 +1,90 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.commandline.Argument;
+
+/**
+ * Set the mapping quality of reads with a given value to another given value.
+ *
+ *  <p>
+ *     This read transformer will change a certain read mapping quality to a different value without affecting reads that
+ *     have other mapping qualities. This is intended primarily for users of RNA-Seq data handling programs such
+ *     as TopHat, which use MAPQ = 255 to designate uniquely aligned reads. According to convention, 255 normally
+ *     designates "unknown" quality, and most GATK tools automatically ignore such reads. By reassigning a different
+ *     mapping quality to those specific reads, users of TopHat and other tools can circumvent this problem without
+ *     affecting the rest of their dataset.
+ *  </p>
+ *
+ *  <p>
+ *     This differs from the ReassignMappingQuality filter by its selectivity -- only one mapping quality is targeted.
+ *     ReassignMappingQuality will change ALL mapping qualities to a single one, and is typically used for datasets
+ *     that have no assigned mapping qualities.
+ *  </p>
+ *
+ * <h3>Input</h3>
+ *  <p>
+ *	    BAM file(s)
+ *  </p>
+ *
+ *
+ * <h3>Output</h3>
+ *  <p>
+ *      BAM file(s) with one read mapping quality selectively reassigned as desired
+ *  </p>
+ *
+ * <h3>Usage example</h3>
+ *  <pre>
+ *    java -jar GenomeAnalysisTK.jar \
+ *      -T PrintReads \
+ *      -R reference.fasta \
+ *      -I input.bam \
+ *      -o output.file \
+ *      -rf ReassignOneMappingQuality \
+ *      -RMQF 255 \
+ *      -RMQT 60
+ *  </pre>
+ *
+ * @author vdauwera
+ * @since 2/19/13
+ */
+
+public class ReassignOneMappingQualityFilter extends ReadFilter {
+
+    @Argument(fullName = "reassign_mapping_quality_from", shortName = "RMQF", doc = "Original mapping quality", required = false)
+    public int reassignMappingQualityFrom = 255;
+
+    @Argument(fullName = "reassign_mapping_quality_to", shortName = "RMQT", doc = "Desired mapping quality", required = false)
+    public int reassignMappingQualityTo = 60;
+
+    public boolean filterOut(SAMRecord rec) {
+        if (rec.getMappingQuality() == reassignMappingQualityFrom)
+        rec.setMappingQuality(reassignMappingQualityTo);
+        return false;
+    }
+}
+
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/SampleFilter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/SampleFilter.java
new file mode 100644
index 0000000..df582ab
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/SampleFilter.java
@@ -0,0 +1,61 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.SAMReadGroupRecord;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.commandline.Argument;
+
+import java.util.Set;
+
+/**
+ * Only use reads belonging to a specific sample
+ *
+ * <p>This filter is useful for isolating data from one particular sample in a multisample file.</p>
+ *
+ * <h3>Usage example</h3>
+ *
+ * <h4>Use only reads from the sample named NA12878</h4>
+ * <pre>
+ *     java -jar GenomeAnalysisTk.jar \
+ *         -T ToolName \
+ *         -R reference.fasta \
+ *         -I input.bam \
+ *         -o output.file \
+ *         -rf Sample \
+ *         -goodSM NA12878
+ * </pre>
+ *
+ */
+public class SampleFilter extends ReadFilter {
+    @Argument(fullName = "sample_to_keep", shortName = "goodSM", doc="The name of the sample(s) to keep, filtering out all others", required=true)
+    private Set SAMPLES_TO_KEEP = null;
+
+    public boolean filterOut( final SAMRecord read ) {
+        final SAMReadGroupRecord readGroup = read.getReadGroup();
+        return !( readGroup != null && SAMPLES_TO_KEEP.contains(readGroup.getSample()) );
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/SingleReadGroupFilter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/SingleReadGroupFilter.java
new file mode 100644
index 0000000..b5f7e44
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/SingleReadGroupFilter.java
@@ -0,0 +1,63 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.SAMReadGroupRecord;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.commandline.Argument;
+
+/**
+ * Only use reads from the specified read group
+ *
+ * <p>This filter is useful for isolating data from one particular read group (usually a single lane).</p>
+ *
+ * <h3>Usage example</h3>
+ *
+ * <h4>Use only reads from the read group with ID "read_group_1</h4>
+ * <pre>
+ *     java -jar GenomeAnalysisTk.jar \
+ *         -T ToolName \
+ *         -R reference.fasta \
+ *         -I input.bam \
+ *         -o output.file \
+ *         -rf SingleReadGroup \
+ *         -goodRG read_group_1
+ * </pre>
+ *
+ * @author rpoplin
+ * @since Nov 27, 2009
+ *
+ */
+
+public class SingleReadGroupFilter extends ReadFilter {
+    @Argument(fullName = "read_group_to_keep", shortName = "goodRG", doc="The name of the read group to keep, filtering out all others", required=true)
+    private String READ_GROUP_TO_KEEP = null;
+
+    public boolean filterOut( final SAMRecord read ) {
+        final SAMReadGroupRecord readGroup = read.getReadGroup();
+        return !( readGroup != null && readGroup.getReadGroupId().equals( READ_GROUP_TO_KEEP ) );
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/UnmappedReadFilter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/UnmappedReadFilter.java
new file mode 100644
index 0000000..e448068
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/UnmappedReadFilter.java
@@ -0,0 +1,56 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.SAMRecord;
+
+/**
+ * Filter out unmapped reads
+ *
+ *
+ * <p>This filter recognizes the SAM flag corresponding to being unmapped. It is intended to ensure that only
+ * reads that are likely to be mapped in the right place, and therefore to be informative, will be used in analysis.</p>
+ *
+ * <h3>Usage example</h3>
+ *
+ * <pre>
+ *     java -jar GenomeAnalysisTk.jar \
+ *         -T ToolName \
+ *         -R reference.fasta \
+ *         -I input.bam \
+ *         -o output.file \
+ *         -rf UnmappedRead
+ * </pre>
+ *
+ * @author rpoplin
+ * @since Dec 9, 2009
+ */
+
+public class UnmappedReadFilter extends ReadFilter {
+    public boolean filterOut( final SAMRecord read ) {
+        return read.getReadUnmappedFlag() || read.getAlignmentStart() == SAMRecord.NO_ALIGNMENT_START;
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/package-info.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/package-info.java
new file mode 100644
index 0000000..c5ba1d9
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/filters/package-info.java
@@ -0,0 +1,26 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/BySampleSAMFileWriter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/BySampleSAMFileWriter.java
new file mode 100644
index 0000000..15b6f2d
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/BySampleSAMFileWriter.java
@@ -0,0 +1,70 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.io;
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMProgramRecord;
+import htsjdk.samtools.SAMReadGroupRecord;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.utils.sam.SAMReaderID;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: carneiro
+ * Date: Nov 13
+ */
+public class BySampleSAMFileWriter extends NWaySAMFileWriter {
+
+    private final Map<String, SAMReaderID> sampleToWriterMap;
+
+    public BySampleSAMFileWriter(GenomeAnalysisEngine toolkit, String ext, SAMFileHeader.SortOrder order, boolean presorted, boolean indexOnTheFly, boolean generateMD5, SAMProgramRecord pRecord, boolean keep_records) {
+        super(toolkit, ext, order, presorted, indexOnTheFly, generateMD5, pRecord, keep_records);
+
+        sampleToWriterMap = new HashMap<String, SAMReaderID>(toolkit.getSAMFileHeader().getReadGroups().size() * 2);
+
+        for (SAMReaderID readerID : toolkit.getReadsDataSource().getReaderIDs()) {
+            for (SAMReadGroupRecord rg : toolkit.getReadsDataSource().getHeader(readerID).getReadGroups()) {
+                String sample = rg.getSample();
+                if (sampleToWriterMap.containsKey(sample) && sampleToWriterMap.get(sample) != readerID) {
+                    throw new ReviewedGATKException("The same sample appears in multiple files, this input cannot be multiplexed using the BySampleSAMFileWriter, try NWaySAMFileWriter instead.");
+                }
+                else {
+                    sampleToWriterMap.put(sample, readerID);
+                }
+            }
+        }
+    }
+
+    @Override
+    public void addAlignment(SAMRecord samRecord) {
+        super.addAlignment(samRecord, sampleToWriterMap.get(samRecord.getReadGroup().getSample()));
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/DirectOutputTracker.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/DirectOutputTracker.java
new file mode 100644
index 0000000..d5c6695
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/DirectOutputTracker.java
@@ -0,0 +1,48 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.io;
+
+import org.broadinstitute.gatk.engine.io.storage.Storage;
+import org.broadinstitute.gatk.engine.io.storage.StorageFactory;
+import org.broadinstitute.gatk.engine.io.stubs.Stub;
+
+/**
+ * Maps creation of storage directly to output streams in parent.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class DirectOutputTracker extends OutputTracker {
+    public <T> T getStorage( Stub<T> stub ) {
+        Storage target = outputs.get(stub);
+        if( target == null ) {
+            target = StorageFactory.createStorage(stub);
+            outputs.put(stub, target);
+        }
+        return (T)target;
+    }
+
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/FastqFileWriter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/FastqFileWriter.java
new file mode 100644
index 0000000..ec9b5ee
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/FastqFileWriter.java
@@ -0,0 +1,77 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.io;
+
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.broadinstitute.gatk.utils.sam.ReadUtils;
+
+import java.io.FileNotFoundException;
+import java.io.PrintStream;
+
+/**
+ * User: carneiro
+ * Date: 1/27/13
+ * Time: 12:54 AM
+ */
+public class FastqFileWriter {
+    private PrintStream output;
+
+    public FastqFileWriter(String filename) {
+        try {
+            this.output = new PrintStream(filename);
+        } catch (FileNotFoundException e) {
+            throw new ReviewedGATKException("Can't open file " + filename);
+        }
+    }
+
+    public void addAlignment(GATKSAMRecord read) {
+        output.println("@" + read.getReadName());
+
+        if (read.getReadNegativeStrandFlag()) {
+            output.println(ReadUtils.getBasesReverseComplement(read));
+            output.println("+");
+            output.println(ReadUtils.convertReadQualToString(invertQuals(read.getBaseQualities())));
+        } else {
+            output.println(ReadUtils.convertReadBasesToString(read));
+            output.println("+");
+            output.println(ReadUtils.convertReadQualToString(read));
+        }
+    }
+
+    public void close() {
+        this.output.close();
+    }
+
+    private byte[] invertQuals (byte[] quals) {
+        final int l = quals.length;
+        byte[] invertedQuals = new byte[l];
+        for (int i=0; i<l; i++) {
+            invertedQuals[l-1-i] = quals[i];
+        }
+        return invertedQuals;
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/NWaySAMFileWriter.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/NWaySAMFileWriter.java
new file mode 100644
index 0000000..87a8b66
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/NWaySAMFileWriter.java
@@ -0,0 +1,255 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.io;
+
+import htsjdk.samtools.*;
+import htsjdk.samtools.util.ProgressLoggerInterface;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.utils.commandline.CommandLineProgram;
+import org.broadinstitute.gatk.utils.sam.SAMReaderID;
+import org.broadinstitute.gatk.utils.exceptions.GATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.sam.GATKSAMFileWriter;
+
+import java.io.File;
+import java.util.*;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: asivache
+ * Date: May 31, 2011
+ * Time: 3:52:49 PM
+ * To change this template use File | Settings | File Templates.
+ */
+public class NWaySAMFileWriter implements SAMFileWriter {
+
+    private Map<SAMReaderID,SAMFileWriter> writerMap = null;
+    private boolean presorted ;
+    GenomeAnalysisEngine toolkit;
+    boolean KEEP_ALL_PG_RECORDS = false;
+
+    public NWaySAMFileWriter(GenomeAnalysisEngine toolkit, Map<String,String> in2out, SAMFileHeader.SortOrder order,
+                             boolean presorted, boolean indexOnTheFly, boolean generateMD5, SAMProgramRecord pRecord, boolean keep_records) {
+        this.presorted = presorted;
+        this.toolkit = toolkit;
+        this.KEEP_ALL_PG_RECORDS = keep_records;
+        writerMap = new HashMap<SAMReaderID,SAMFileWriter>();
+        setupByReader(toolkit,in2out,order, presorted, indexOnTheFly, generateMD5, pRecord);
+    }
+
+    public NWaySAMFileWriter(GenomeAnalysisEngine toolkit, String ext, SAMFileHeader.SortOrder order,
+                              boolean presorted, boolean indexOnTheFly , boolean generateMD5, SAMProgramRecord pRecord, boolean keep_records) {
+        this.presorted = presorted;
+        this.toolkit = toolkit;
+        this.KEEP_ALL_PG_RECORDS = keep_records;
+        writerMap = new HashMap<SAMReaderID,SAMFileWriter>();
+        setupByReader(toolkit,ext,order, presorted, indexOnTheFly, generateMD5, pRecord);
+    }
+
+    public NWaySAMFileWriter(GenomeAnalysisEngine toolkit, Map<String,String> in2out, SAMFileHeader.SortOrder order,
+                             boolean presorted, boolean indexOnTheFly, boolean generateMD5) {
+        this(toolkit, in2out, order, presorted, indexOnTheFly, generateMD5, null,false);
+    }
+
+    public NWaySAMFileWriter(GenomeAnalysisEngine toolkit, String ext, SAMFileHeader.SortOrder order,
+                              boolean presorted, boolean indexOnTheFly , boolean generateMD5) {
+        this(toolkit, ext, order, presorted, indexOnTheFly, generateMD5, null,false);
+    }
+
+    /**
+     * Creates a program record for the program, adds it to the list of program records (@PG tags) in the bam file and sets
+     * up the writer with the header and presorted status.
+     *
+     * @param originalHeader      original header
+     * @param programRecord       the program record for this program
+     */
+    public static SAMFileHeader setupWriter(final SAMFileHeader originalHeader, final SAMProgramRecord programRecord) {
+        final SAMFileHeader header = originalHeader.clone();
+        final List<SAMProgramRecord> oldRecords = header.getProgramRecords();
+        final List<SAMProgramRecord> newRecords = new ArrayList<SAMProgramRecord>(oldRecords.size()+1);
+        for ( SAMProgramRecord record : oldRecords )
+            if ( (programRecord != null && !record.getId().startsWith(programRecord.getId())))
+                newRecords.add(record);
+
+        if (programRecord != null) {
+            newRecords.add(programRecord);
+            header.setProgramRecords(newRecords);
+        }
+        return header;
+    }
+
+    /**
+    * Creates a program record for the program, adds it to the list of program records (@PG tags) in the bam file and returns
+    * the new header to be added to the BAM writer.
+    *
+    * @param toolkit             the engine
+    * @param walker              the walker object (so we can extract the command line)
+    * @param PROGRAM_RECORD_NAME the name for the PG tag
+    * @return a pre-filled header for the bam writer
+    */
+    public static SAMFileHeader setupWriter(final GenomeAnalysisEngine toolkit, final SAMFileHeader originalHeader, final Object walker, final String PROGRAM_RECORD_NAME) {
+        final SAMProgramRecord programRecord = createProgramRecord(toolkit, walker, PROGRAM_RECORD_NAME);
+        return setupWriter(originalHeader, programRecord);
+    }
+
+    /**
+     * Creates a program record for the program, adds it to the list of program records (@PG tags) in the bam file and sets
+     * up the writer with the header and presorted status.
+     *
+     * @param writer              BAM file writer
+     * @param toolkit             the engine
+     * @param preSorted           whether or not the writer can assume reads are going to be added are already sorted
+     * @param walker              the walker object (so we can extract the command line)
+     * @param PROGRAM_RECORD_NAME the name for the PG tag
+     */
+    public static void setupWriter(GATKSAMFileWriter writer, GenomeAnalysisEngine toolkit, SAMFileHeader originalHeader, boolean preSorted, Object walker, String PROGRAM_RECORD_NAME) {
+        SAMFileHeader header = setupWriter(toolkit, originalHeader, walker, PROGRAM_RECORD_NAME);
+        writer.writeHeader(header);
+        writer.setPresorted(preSorted);
+    }
+
+    /**
+     * Creates a program record (@PG) tag
+     *
+     * @param toolkit             the engine
+     * @param walker              the walker object (so we can extract the command line)
+     * @param PROGRAM_RECORD_NAME the name for the PG tag
+     * @return a program record for the tool
+     */
+    public static SAMProgramRecord createProgramRecord(GenomeAnalysisEngine toolkit, Object walker, String PROGRAM_RECORD_NAME) {
+        final SAMProgramRecord programRecord = new SAMProgramRecord(PROGRAM_RECORD_NAME);
+        try {
+            programRecord.setProgramVersion(CommandLineProgram.getVersionNumber());
+        } catch (MissingResourceException e) {
+            // couldn't care less if the resource is missing...
+        }
+        programRecord.setCommandLine(toolkit.createApproximateCommandLineArgumentString(toolkit, walker));
+        return programRecord;
+    }
+
+    /**
+     * Instantiates multiple underlying SAM writes, one per input SAM reader registered with GATK engine (those will be retrieved
+     * from <code>toolkit</code>). The <code>in2out</code> map must contain an entry for each input filename and map it
+     * onto a unique output file name.
+     * @param toolkit
+     * @param in2out
+     */
+    public void setupByReader(GenomeAnalysisEngine toolkit, Map<String,String> in2out, SAMFileHeader.SortOrder order,
+                              boolean presorted, boolean indexOnTheFly, boolean generateMD5, SAMProgramRecord pRecord) {
+        if ( in2out==null ) throw new GATKException("input-output bam filename map for n-way-out writing is NULL");
+        for ( SAMReaderID rid : toolkit.getReadsDataSource().getReaderIDs() ) {
+
+            String fName = toolkit.getReadsDataSource().getSAMFile(rid).getName();
+
+            String outName;
+            if ( ! in2out.containsKey(fName) )
+                    throw new UserException.BadInput("Input-output bam filename map does not contain an entry for the input file "+fName);
+            outName = in2out.get(fName);
+
+            if ( writerMap.containsKey( rid ) )
+                throw new GATKException("nWayOut mode: Reader id for input sam file "+fName+" is already registered; "+
+                        "map file likely contains multiple entries for this input file");
+
+            addWriter(rid,outName, order, presorted, indexOnTheFly, generateMD5, pRecord);
+        }
+
+    }
+
+    /**
+     * Instantiates multiple underlying SAM writes, one per input SAM reader registered with GATK engine (those will be retrieved
+     * from <code>toolkit</code>). The output file names will be generated automatically by stripping ".sam" or ".bam" off the
+     * input file name and adding ext instead (e.g. ".cleaned.bam").
+     * onto a unique output file name.
+     * @param toolkit
+     * @param ext
+     */
+    public void setupByReader(GenomeAnalysisEngine toolkit, String ext, SAMFileHeader.SortOrder order,
+                              boolean presorted, boolean indexOnTheFly, boolean generateMD5, SAMProgramRecord pRecord) {
+        for ( SAMReaderID rid : toolkit.getReadsDataSource().getReaderIDs() ) {
+
+            String fName = toolkit.getReadsDataSource().getSAMFile(rid).getName();
+
+            String outName;
+            int pos ;
+            if ( fName.toUpperCase().endsWith(".BAM") ) pos = fName.toUpperCase().lastIndexOf(".BAM");
+            else {
+                if ( fName.toUpperCase().endsWith(".SAM") ) pos = fName.toUpperCase().lastIndexOf(".SAM");
+                else throw new UserException.BadInput("Input file name "+fName+" does not end with .sam or .bam");
+            }
+            String prefix = fName.substring(0,pos);
+            outName = prefix+ext;
+
+            if ( writerMap.containsKey( rid ) )
+                throw new GATKException("nWayOut mode: Reader id for input sam file "+fName+" is already registered");
+            addWriter(rid,outName, order, presorted, indexOnTheFly, generateMD5, pRecord);
+        }
+
+    }
+
+    private void addWriter(SAMReaderID id , String outName, SAMFileHeader.SortOrder order, boolean presorted,
+                           boolean indexOnTheFly, boolean generateMD5, SAMProgramRecord programRecord) {
+        File f = new File(outName);
+        SAMFileHeader header = setupWriter(toolkit.getSAMFileHeader(id), programRecord);
+        SAMFileWriterFactory factory = new SAMFileWriterFactory();
+        factory.setCreateIndex(indexOnTheFly);
+        factory.setCreateMd5File(generateMD5);
+        SAMFileWriter sw = factory.makeSAMOrBAMWriter(header, presorted, f);
+        writerMap.put(id,sw);
+    }
+
+    public Collection<SAMFileWriter> getWriters() {
+        return writerMap.values();
+    }
+
+    public void addAlignment(SAMRecord samRecord) {
+        final SAMReaderID id = toolkit.getReaderIDForRead(samRecord);
+        String rg = samRecord.getStringAttribute("RG");
+        if ( rg != null ) {
+            String rg_orig = toolkit.getReadsDataSource().getOriginalReadGroupId(rg);
+            samRecord.setAttribute("RG",rg_orig);
+        }
+        addAlignment(samRecord, id);
+    }
+
+    public void addAlignment(SAMRecord samRecord, SAMReaderID readerID) {
+        writerMap.get(readerID).addAlignment(samRecord);
+    }
+
+    public SAMFileHeader getFileHeader() {
+        return toolkit.getSAMFileHeader();
+    }
+
+    public void close() {
+        for ( SAMFileWriter w : writerMap.values() ) w.close();
+    }
+
+    @Override
+    public void setProgressLogger(final ProgressLoggerInterface logger) {
+        for (final SAMFileWriter writer: writerMap.values()) {
+            writer.setProgressLogger(logger);
+        }
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/OutputTracker.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/OutputTracker.java
new file mode 100644
index 0000000..693d709
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/OutputTracker.java
@@ -0,0 +1,193 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.io;
+
+import htsjdk.samtools.ValidationStringency;
+import org.broadinstitute.gatk.utils.io.ReferenceBacked;
+import org.broadinstitute.gatk.utils.commandline.ArgumentSource;
+import org.broadinstitute.gatk.engine.io.storage.Storage;
+import org.broadinstitute.gatk.engine.io.storage.StorageFactory;
+import org.broadinstitute.gatk.engine.io.stubs.OutputStreamStub;
+import org.broadinstitute.gatk.engine.io.stubs.Stub;
+import org.broadinstitute.gatk.engine.walkers.Walker;
+import org.broadinstitute.gatk.utils.classloader.JVMUtils;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.io.IOUtils;
+import org.broadinstitute.gatk.utils.sam.SAMReaderBuilder;
+
+import java.io.File;
+import java.io.OutputStream;
+import java.lang.reflect.Field;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Manages the output and err streams that are created specifically for walker
+ * output.
+ */
+public abstract class OutputTracker implements ReferenceBacked {
+    /**
+     * The reference file.
+     */
+    private File referenceFile;
+
+    /**
+     * The streams to which walker users should be reading directly.
+     */
+    protected Map<ArgumentSource, Object> inputs = new HashMap<ArgumentSource,Object>();
+
+    /**
+     * The streams to which walker users should be writing directly.
+     */
+    protected Map<Stub,Storage> outputs = new HashMap<Stub,Storage>();
+
+    /**
+     * Special-purpose stub.  Provides a connection to output streams.
+     */
+    protected OutputStreamStub outStub = null;
+
+    /**
+     * Special-purpose stream.  Provides a connection to error streams.
+     */
+    protected OutputStreamStub errStub = null;
+
+    /**
+     * Gets the output storage associated with a given stub.
+     * @param stub The stub for which to find / create the right output stream.
+     * @param <T> Type of the stream to create.
+     * @return Storage object with a facade of type T.
+     */
+    public abstract <T> T getStorage( Stub<T> stub );
+
+    @Override
+    public File getReferenceFile() {
+        return referenceFile;
+    }
+
+    @Override
+    public void setReferenceFile(final File referenceFile) {
+        this.referenceFile = referenceFile;
+    }
+
+    public void prepareWalker( Walker walker, ValidationStringency strictnessLevel ) {
+        for( Map.Entry<ArgumentSource,Object> io: inputs.entrySet() ) {
+            ArgumentSource targetField = io.getKey();
+            Object targetValue = io.getValue();
+
+            // Ghastly hack: reaches in and finishes building out the SAMFileReader.
+            // TODO: Generalize this, and move it to its own initialization step.
+            if( targetValue instanceof SAMReaderBuilder) {
+                SAMReaderBuilder builder = (SAMReaderBuilder)targetValue;
+                builder.setValidationStringency(strictnessLevel);
+                targetValue = builder.build();
+            }
+
+            JVMUtils.setFieldValue( targetField.field, walker, targetValue );
+        }
+    }
+
+    /**
+     * Provide a mechanism for injecting supplemental streams for external management.
+     * @param argumentSource source Class / field into which to inject this stream.
+     * @param stub Stream to manage.
+     */
+    public void addInput( ArgumentSource argumentSource, Object stub ) {
+        inputs.put(argumentSource,stub);
+    }
+
+    /**
+     * Provide a mechanism for injecting supplemental streams for external management.
+     * @param stub Stream to manage.
+     */
+    public <T> void addOutput(Stub<T> stub) {
+        addOutput(stub,null);
+    }
+
+    /**
+     * Provide a mechanism for injecting supplemental streams for external management.
+     * @param stub Stream to manage.
+     */
+    public <T> void addOutput(Stub<T> stub, Storage<T> storage) {
+        stub.register(this);
+        outputs.put(stub,storage);
+        validateOutputPath(stub);
+    }
+
+    /**
+     * Close down all existing output streams.
+     */
+    public void close() {
+        for( Stub stub: outputs.keySet() ) {
+            // If the stream hasn't yet been created, create it so that there's at least an empty file present.
+            if( outputs.get(stub) == null )
+                getTargetStream(stub);
+
+            // Close down the storage.
+            outputs.get(stub).close();
+        }
+    }
+
+    /**
+     * Collects the target stream for this data.
+     * @param stub The stub for this stream.
+     * @param <T> type of stub.
+     * @return An instantiated file into which data can be written.
+     */
+    protected <T> T getTargetStream( Stub<T> stub ) {
+        if( !outputs.containsKey(stub) )
+            throw new ReviewedGATKException("OutputTracker was not notified that this stub exists: " + stub);
+        Storage<T> storage = outputs.get(stub);
+        if( storage == null ) {
+            storage = StorageFactory.createStorage(stub);
+            outputs.put(stub,storage);
+        }
+        return (T)storage;
+    }
+
+    /**
+     * Ensures that the File associated with this stub (if any) is in a writable location
+     * @param stub
+     */
+    protected <T> void validateOutputPath(final Stub<T> stub) {
+        if (stub.getOutputFile() != null && !(IOUtils.isSpecialFile(stub.getOutputFile()))) {
+            final File parentDir = stub.getOutputFile().getAbsoluteFile().getParentFile();
+            if (! (parentDir.canWrite() && parentDir.canExecute()))
+                throw new UserException.CouldNotCreateOutputFile(stub.getOutputFile(),
+                        "either the containing directory doesn't exist or it isn't writable");
+        }
+    }
+
+    /**
+     * Install an OutputStreamStub into the given fieldName of the given walker.
+     * @param walker Walker into which to inject the field name.
+     * @param fieldName Name of the field into which to inject the stub.
+     */
+    private void installStub( Walker walker, String fieldName, OutputStream outputStream ) {
+        Field field = JVMUtils.findField( walker.getClass(), fieldName );
+        JVMUtils.setFieldValue( field, walker, outputStream );
+    }    
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/ThreadGroupOutputTracker.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/ThreadGroupOutputTracker.java
new file mode 100644
index 0000000..70a94a6
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/ThreadGroupOutputTracker.java
@@ -0,0 +1,170 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.io;
+
+import org.broadinstitute.gatk.engine.executive.OutputMergeTask;
+import org.broadinstitute.gatk.engine.io.storage.Storage;
+import org.broadinstitute.gatk.engine.io.storage.StorageFactory;
+import org.broadinstitute.gatk.engine.io.stubs.Stub;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * An output tracker that can either track its output per-thread or directly.
+ *
+ * This output tracker doesn't use thread local values, but rather looks up the
+ * storage map via the thread's group.  This is necessary in the case where
+ * there's a master thread that creates the output map, and spawns subthreads
+ * that actually do work.  As long as those subthreads are spawned in the
+ * thread group of the master thread, this tracker will properly find the
+ * storage map associated with the master thread in the group, and return
+ * the map to all subthreads.
+ *
+ * @author mhanna, depristo
+ * @version 0.2
+ */
+public class ThreadGroupOutputTracker extends OutputTracker {
+    /**
+     * A map from thread ID of the master thread to the storage map from
+     * Stub to Storage objects
+     */
+    private Map<ThreadGroup, Map<Stub, Storage>> threadsToStorage = new HashMap<ThreadGroup, Map<Stub, Storage>>();
+
+    /**
+     * A total hack.  If bypass = true, bypass thread local storage and write directly
+     * to the target file.  Used to handle output during initialize() and onTraversalDone().
+     */
+    private boolean bypass = false;
+    public void bypassThreadLocalStorage(boolean bypass) {
+        this.bypass = bypass;
+    }
+
+    /**
+     * Initialize the storage map for this thread.
+     *
+     * Checks if there's a thread local binding for this thread, and if
+     * not initializes the map for it.  This map is then
+     * populated with stub -> storage bindings according to the
+     * superclasses' outputs map.
+     *
+     * Must be called within the master thread to create a map associated with
+     * the master thread ID.
+     */
+    public synchronized void initializeStorage() {
+        final ThreadGroup group = Thread.currentThread().getThreadGroup();
+        Map<Stub,Storage> threadLocalOutputStreams = threadsToStorage.get(group);
+
+        if( threadLocalOutputStreams == null ) {
+            threadLocalOutputStreams = new HashMap<Stub,Storage>();
+            threadsToStorage.put( group, threadLocalOutputStreams );
+        }
+
+        for ( final Stub stub : outputs.keySet() ) {
+            final Storage target = StorageFactory.createStorage(stub, createTempFile(stub));
+            threadLocalOutputStreams.put(stub, target);
+        }
+    }
+
+    @Override
+    public <T> T getStorage( final Stub<T> stub ) {
+        Storage target;
+
+        if (bypass) {
+            target = outputs.get(stub);
+            if( target == null ) {
+                target = StorageFactory.createStorage(stub);
+                outputs.put(stub, target);
+            }
+        }
+        else {
+            final Map<Stub,Storage> threadLocalOutputStreams = findStorage(Thread.currentThread());
+            target = threadLocalOutputStreams.get(stub);
+
+            // make sure something hasn't gone wrong, and we somehow find a map that doesn't include our stub
+            if ( target == null )
+                throw new ReviewedGATKException("target isn't supposed to be null for " + Thread.currentThread()
+                        + " id " + Thread.currentThread().getId() + " map is " + threadLocalOutputStreams);
+        }
+
+        return (T)target;
+    }
+
+
+    private synchronized Map<Stub,Storage> findStorage(final Thread thread) {
+        final Map<Stub, Storage> map = threadsToStorage.get(thread.getThreadGroup());
+
+        if ( map != null ) {
+            return map;
+        } else {
+            // something is terribly wrong, we have a storage lookup for a thread that doesn't have
+            // any map data associated with it!
+            throw new ReviewedGATKException("Couldn't find storage map associated with thread " + thread + " in group " + thread.getThreadGroup());
+        }
+    }
+
+    /**
+     * Close down any existing temporary files which have been opened.
+     */
+    public synchronized OutputMergeTask closeStorage() {
+        final Map<Stub,Storage> threadLocalOutputStreams = findStorage(Thread.currentThread());
+
+        if( threadLocalOutputStreams == null || threadLocalOutputStreams.isEmpty() )
+            return null;
+
+        final OutputMergeTask outputMergeTask = new OutputMergeTask();
+        for( Map.Entry<Stub,Storage> entry: threadLocalOutputStreams.entrySet() ) {
+            final Stub stub = entry.getKey();
+            final Storage storageEntry = entry.getValue();
+
+            storageEntry.close();
+            outputMergeTask.addMergeOperation(getTargetStream(stub), storageEntry);
+        }
+
+//        logger.info("Closing " + Thread.currentThread().getId() + " => " + threadLocalOutputStreams);
+        threadLocalOutputStreams.clear();
+
+        return outputMergeTask;
+    }
+
+    /**
+     * Creates a temporary file for a stub of the given type.
+     * @param stub Stub for which to create a temporary file.
+     * @param <T> Type of the stub to accept.
+     * @return A temp file, or throw an exception if the temp file cannot be created.
+     */
+    private <T> File createTempFile( Stub<T> stub ) {
+        try {
+            return File.createTempFile( stub.getClass().getName(), null );
+        } catch( IOException ex ) {
+            throw new UserException.BadTmpDir("Unable to create temporary file for stub: " + stub.getClass().getName() );
+        }
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/storage/OutputStreamStorage.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/storage/OutputStreamStorage.java
new file mode 100644
index 0000000..7ed538b
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/storage/OutputStreamStorage.java
@@ -0,0 +1,144 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.io.storage;
+
+import org.broadinstitute.gatk.engine.io.stubs.OutputStreamStub;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+import java.io.*;
+import java.nio.channels.Channels;
+import java.nio.channels.FileChannel;
+import java.nio.channels.WritableByteChannel;
+
+public class OutputStreamStorage extends OutputStream implements Storage<OutputStream> {
+    /**
+     * File to which data will temporarily be written.
+     */
+    private final File file;
+
+    /**
+     * Stream to which data in this shard will be written.
+     */
+    private final OutputStream outputStream;
+
+    /**
+     * Create a new storage area with the given stub.
+     * @param stub
+     */
+    public OutputStreamStorage( OutputStreamStub stub ) {
+        if( stub.getOutputFile() != null ) {
+            this.file = stub.getOutputFile();
+            this.outputStream = initializeOutputStream(stub.getOutputFile());
+        }
+        else if( stub.getOutputStream() != null ) {
+            this.file = null;
+            this.outputStream = stub.getOutputStream();           
+        }
+        else
+            throw new ReviewedGATKException("Not enough information to create storage for an OutputStream; need either a file or an existing output stream");
+    }
+
+    public OutputStreamStorage( OutputStreamStub stub, File file ) {
+        this.file = file;
+        this.outputStream = initializeOutputStream(file);
+    }
+
+    private OutputStream initializeOutputStream( File file ) {
+        try {
+            return new FileOutputStream( file );
+        }
+        catch(FileNotFoundException ex) {
+            throw new UserException.CouldNotCreateOutputFile(file, "Unable to open output stream for file", ex);
+        }
+    }
+
+    /**
+     * @{inheritDoc}
+     */
+    public void flush() throws IOException {
+        outputStream.flush();
+    }
+
+    /**
+     * @{inheritDoc}
+     */
+    public void close() {
+        // Don't close System.out or System.err; this'll cause trouble
+        // with subsequent code running in this VM.
+        if( outputStream == System.out || outputStream == System.err )
+            return;
+        
+        try {
+            outputStream.close();
+        }
+        catch( IOException ex ) {
+            throw new UserException.CouldNotCreateOutputFile(file, "Unable to close output stream", ex );
+        }
+    }
+
+    /**
+     * @{inheritDoc}
+     */
+    public void write( byte[] b ) throws IOException {
+        outputStream.write(b);
+    }
+
+    /**
+     * @{inheritDoc}
+     */
+    public void write( byte[] b, int off, int len ) throws IOException {
+        outputStream.write(b, off, len);
+    }
+
+    /**
+     * @{inheritDoc}
+     */
+    public void write( int b ) throws IOException {
+        outputStream.write(b);
+    }
+
+
+    public void mergeInto( OutputStream targetStream ) {
+        FileInputStream sourceStream = null;
+        try {
+            sourceStream = new FileInputStream( file );
+            FileChannel sourceChannel = sourceStream.getChannel();
+
+            WritableByteChannel targetChannel = Channels.newChannel( targetStream );
+            sourceChannel.transferTo( 0, sourceChannel.size(), targetChannel );
+
+            sourceStream.close();
+            file.delete();
+        }
+        catch( FileNotFoundException ex ) {
+            throw new UserException.CouldNotReadInputFile(file, "Unable to open input stream for file", ex);
+        }
+        catch( IOException ex ) {
+            throw new UserException.CouldNotReadInputFile(file, "Unable to transfer contents of file", ex);
+        }
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/storage/SAMFileWriterStorage.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/storage/SAMFileWriterStorage.java
new file mode 100644
index 0000000..108b1f0
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/storage/SAMFileWriterStorage.java
@@ -0,0 +1,172 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.io.storage;
+
+import htsjdk.samtools.*;
+import htsjdk.samtools.util.CloseableIterator;
+import htsjdk.samtools.util.ProgressLoggerInterface;
+import htsjdk.samtools.util.RuntimeIOException;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.engine.io.stubs.SAMFileWriterStub;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.sam.SimplifyingSAMFileWriter;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+
+/**
+ * Provides temporary storage for SAMFileWriters.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class SAMFileWriterStorage implements SAMFileWriter, Storage<SAMFileWriter> {
+    private final File file;
+    private File referenceFasta;
+    private SAMFileWriter writer;
+
+    private static Logger logger = Logger.getLogger(SAMFileWriterStorage.class);
+
+    public SAMFileWriterStorage( SAMFileWriterStub stub ) {
+        this(stub,stub.getOutputFile());
+    }
+
+    public SAMFileWriterStorage( SAMFileWriterStub stub, File file ) {
+        this.referenceFasta = stub.getReferenceFile();
+        this.file = file;
+        SAMFileWriterFactory factory = new SAMFileWriterFactory();
+        // Enable automatic index creation for pre-sorted BAMs.
+        if (stub.getFileHeader().getSortOrder().equals(SAMFileHeader.SortOrder.coordinate) && stub.getIndexOnTheFly())
+            factory.setCreateIndex(true);
+        if (stub.getGenerateMD5())
+            factory.setCreateMd5File(true);
+        // Adjust max records in RAM.
+        // TODO -- this doesn't actually work because of a bug in Picard; do not use until fixed
+        if(stub.getMaxRecordsInRam() != null)
+            factory.setMaxRecordsInRam(stub.getMaxRecordsInRam());
+
+        if(stub.getOutputFile() != null) {
+            try {
+                if (stub.getOutputFile().getName().toLowerCase().endsWith(".cram")) {
+                    this.writer = createCRAMWriter(factory, stub.getFileHeader(), file, this.referenceFasta);
+                } else {
+                    this.writer = createBAMWriter(factory,stub.getFileHeader(),stub.isPresorted(),file,stub.getCompressionLevel());
+                }
+            } catch(RuntimeIOException ex) {
+                throw new UserException.CouldNotCreateOutputFile(file,"file could not be created",ex);
+            }
+        }
+        else if(stub.getOutputStream() != null){
+            this.writer = factory.makeSAMWriter( stub.getFileHeader(), stub.isPresorted(), stub.getOutputStream());
+        }
+        else
+            throw new UserException("Unable to write to SAM file; neither a target file nor a stream has been specified");
+
+        // if we want to send the BAM file through the simplifying writer, wrap it here
+        if ( stub.simplifyBAM() ) {
+            this.writer = new SimplifyingSAMFileWriter(this.writer);
+        }
+    }
+
+    public SAMFileHeader getFileHeader() {
+        return writer.getFileHeader();
+    }
+
+    public void addAlignment( SAMRecord read ) {
+        writer.addAlignment(read);
+    }
+
+    public void close() {
+        try {
+            writer.close();
+        } catch (RuntimeIOException e) {
+            throw new UserException.ErrorWritingBamFile(e.getMessage());
+        }
+    }
+
+    public void mergeInto( SAMFileWriter targetStream ) {
+        SAMFileReader reader = new SAMFileReader( file );
+        try {
+            CloseableIterator<SAMRecord> iterator = reader.iterator();
+            while( iterator.hasNext() )
+                targetStream.addAlignment( iterator.next() );
+            iterator.close();
+        }
+        finally {
+            reader.close();
+            file.delete();
+        }
+    }
+
+    private SAMFileWriter createCRAMWriter(final SAMFileWriterFactory factory,
+                                           final SAMFileHeader header,
+                                           final File file,
+                                           final File referenceFasta) {
+        return factory.makeCRAMWriter(header, file, referenceFasta);
+    }
+
+    private SAMFileWriter createBAMWriter(final SAMFileWriterFactory factory,
+                                 final SAMFileHeader header,
+                                 final boolean presorted,
+                                 final File outputFile,
+                                 final Integer compressionLevel) {
+        SAMFileWriter writer;
+        if(compressionLevel != null)
+            writer = factory.makeBAMWriter(header, presorted, outputFile, compressionLevel);
+        else
+            writer = factory.makeBAMWriter(header, presorted, outputFile);
+
+        // mhanna - 1 Mar 2011 - temporary hack until Picard generates an index file for empty BAMs --
+        //                     - do a pre-initialization of the BAM file.
+        try {
+            Method prepareToWriteAlignmentsMethod = writer.getClass().getDeclaredMethod("prepareToWriteAlignments");
+            if(prepareToWriteAlignmentsMethod != null) {
+                prepareToWriteAlignmentsMethod.setAccessible(true);
+                prepareToWriteAlignmentsMethod.invoke(writer);
+            }
+        }
+        catch(NoSuchMethodException ex) {
+            logger.info("Unable to call prepareToWriteAlignments method; this should be reviewed when Picard is updated.");
+        }
+        catch(IllegalAccessException ex) {
+            logger.info("Unable to access prepareToWriteAlignments method; this should be reviewed when Picard is updated.");
+        }
+        catch(InvocationTargetException ex) {
+            logger.info("Unable to invoke prepareToWriteAlignments method; this should be reviewed when Picard is updated.");
+        }
+
+        return writer;
+    }
+
+    @Override
+    public void setProgressLogger(final ProgressLoggerInterface logger) {
+        writer.setProgressLogger(logger);
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/storage/Storage.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/storage/Storage.java
new file mode 100644
index 0000000..e285484
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/storage/Storage.java
@@ -0,0 +1,45 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.io.storage;
+
+/**
+ * An interface representing the temporary storage of data.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public interface Storage<StreamType> {
+    /**
+     * Writing to the temporary storage is done.  Close down the file.
+     */
+    public void close();
+
+    /**
+     * Merges the stream backing up this temporary storage into the target.
+     * @param target Target stream for the temporary storage.  May not be null.
+     */
+    public void mergeInto( StreamType target );
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/storage/StorageFactory.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/storage/StorageFactory.java
new file mode 100644
index 0000000..ac79c36
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/storage/StorageFactory.java
@@ -0,0 +1,92 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.io.storage;
+
+import org.broadinstitute.gatk.engine.io.stubs.OutputStreamStub;
+import org.broadinstitute.gatk.engine.io.stubs.SAMFileWriterStub;
+import org.broadinstitute.gatk.engine.io.stubs.Stub;
+import org.broadinstitute.gatk.engine.io.stubs.VariantContextWriterStub;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.io.File;
+
+/**
+ * Construct storage of the required type.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class StorageFactory {
+    /**
+     * Disable storage factory construction.
+     */
+    private StorageFactory() {}
+
+    /**
+     * Gets the output storage associated with a given stub.
+     * @param stub The stub for which to find / create the right output stream.
+     * @param <T> Type of the stream to create.
+     * @return Storage object with a facade of type T.
+     */
+    public static <T> Storage<T> createStorage( Stub<T> stub ) {
+        return createStorage( stub, null );
+    }
+
+    /**
+     * Gets the output storage associated with a given stub.
+     * @param stub The stub for which to find / create the right output stream.
+     * @param file The filename to which to write the file.
+     * @param <T> Type of the stream to create.
+     * @return Storage object with a facade of type T.
+     */
+     public static <T> Storage<T> createStorage( Stub<T> stub, File file ) {
+        Storage storage;
+
+        if(stub instanceof OutputStreamStub) {
+            if( file != null )
+                storage = new OutputStreamStorage((OutputStreamStub)stub,file);
+            else
+                storage = new OutputStreamStorage((OutputStreamStub)stub);
+        }
+        else if(stub instanceof SAMFileWriterStub) {
+            if( file != null )
+                storage = new SAMFileWriterStorage((SAMFileWriterStub)stub,file);
+            else
+                storage = new SAMFileWriterStorage((SAMFileWriterStub)stub);
+        }
+        else if(stub instanceof VariantContextWriterStub) {
+            VariantContextWriterStub vcfWriterStub = (VariantContextWriterStub)stub;
+            if( file != null )
+                storage = new VariantContextWriterStorage(vcfWriterStub,file);
+            else
+                storage = new VariantContextWriterStorage(vcfWriterStub);
+        }
+        else
+            throw new ReviewedGATKException("Unsupported stub type: " + stub.getClass().getName());
+
+        return storage;
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/storage/VariantContextWriterStorage.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/storage/VariantContextWriterStorage.java
new file mode 100644
index 0000000..480c9ac
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/storage/VariantContextWriterStorage.java
@@ -0,0 +1,247 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.io.storage;
+
+import htsjdk.samtools.util.BlockCompressedOutputStream;
+import org.apache.log4j.Logger;
+import htsjdk.tribble.AbstractFeatureReader;
+import htsjdk.tribble.Feature;
+import htsjdk.tribble.FeatureCodec;
+import org.broadinstitute.gatk.engine.io.stubs.VariantContextWriterStub;
+import org.broadinstitute.gatk.utils.refdata.tracks.FeatureManager;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import htsjdk.variant.bcf2.BCF2Utils;
+import htsjdk.variant.variantcontext.VariantContext;
+import htsjdk.variant.variantcontext.writer.Options;
+import htsjdk.variant.variantcontext.writer.VariantContextWriter;
+import htsjdk.variant.variantcontext.writer.VariantContextWriterFactory;
+import htsjdk.variant.vcf.VCFHeader;
+
+import java.io.*;
+import java.util.Arrays;
+import java.util.EnumSet;
+import java.util.List;
+
+/**
+ * Provides temporary and permanent storage for genotypes in VCF format.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class VariantContextWriterStorage implements Storage<VariantContextWriterStorage>, VariantContextWriter {
+    /**
+     * our log, which we want to capture anything from this class
+     */
+    private static Logger logger = Logger.getLogger(VariantContextWriterStorage.class);
+
+    private final static int BUFFER_SIZE = 1048576;
+
+    protected final File file;
+    protected OutputStream stream;
+    protected final VariantContextWriter writer;
+    boolean closed = false;
+
+    /**
+     * Constructs an object which will write directly into the output file provided by the stub.
+     * Intentionally delaying the writing of the header -- this should be filled in by the walker.
+     *
+     * Respecs the isCompressed() request in stub, so if isCompressed() is true then this
+     * will create a storage output that dumps output to a BlockCompressedOutputStream.
+     *
+     * @param stub Stub to use when constructing the output file.
+     */
+    public VariantContextWriterStorage(VariantContextWriterStub stub)  {
+        if ( stub.getOutputFile() != null ) {
+            this.file = stub.getOutputFile();
+            writer = vcfWriterToFile(stub,stub.getOutputFile(),true,true);
+        }
+        else if ( stub.getOutputStream() != null ) {
+            this.file = null;
+            this.stream = stub.getOutputStream();
+            writer = VariantContextWriterFactory.create(stream,
+                    stub.getMasterSequenceDictionary(), stub.getWriterOptions(false));
+        }
+        else
+            throw new ReviewedGATKException("Unable to create target to which to write; storage was provided with neither a file nor a stream.");
+    }
+
+    /**
+     * Constructs an object which will redirect into a different file.
+     *
+     * Note that this function does not respect the isCompressed() request from the stub, in order
+     * to ensure that tmp. files can be read back in by the Tribble system, and merged with the mergeInto function.
+     *
+     * @param stub Stub to use when synthesizing file / header info.
+     * @param tempFile File into which to direct the output data.
+     */
+    public VariantContextWriterStorage(VariantContextWriterStub stub, File tempFile) {
+        //logger.debug("Creating temporary output file " + tempFile.getAbsolutePath() + " for VariantContext output.");
+        this.file = tempFile;
+        this.writer = vcfWriterToFile(stub, file, false, false);
+        writer.writeHeader(stub.getVCFHeader());
+    }
+
+    /**
+     * common initialization routine for multiple constructors
+     * @param stub Stub to use when constructing the output file.
+     * @param file Target file into which to write VCF records.
+     * @param indexOnTheFly true to index the file on the fly.  NOTE: will be forced to false for compressed files.
+     * @param allowCompressed if false, we won't compress the output, even if the stub requests it.  Critical
+     *                        for creating temp. output files that will be subsequently merged, as these do not
+     *                        support compressed output
+     * @return A VCF writer for use with this class
+     */
+    private VariantContextWriter vcfWriterToFile(final VariantContextWriterStub stub,
+                                                 final File file,
+                                                 final boolean indexOnTheFly,
+                                                 final boolean allowCompressed) {
+        try {
+            // we cannot merge compressed outputs, so don't compress if allowCompressed is false,
+            // which is the case when we have a temporary output file for later merging
+            if ( allowCompressed && stub.isCompressed() )
+                stream = new BlockCompressedOutputStream(file);
+            else
+                stream = new PrintStream(new BufferedOutputStream(new FileOutputStream(file), BUFFER_SIZE));
+        }
+        catch(IOException ex) {
+            throw new UserException.CouldNotCreateOutputFile(file, "Unable to open target output stream", ex);
+        }
+
+        EnumSet<Options> options = stub.getWriterOptions(indexOnTheFly);
+        VariantContextWriter writer = VariantContextWriterFactory.create(file, this.stream, stub.getMasterSequenceDictionary(), stub.getIndexCreator(), options);
+
+        // if the stub says to test BCF, create a secondary writer to BCF and an 2 way out writer to send to both
+        // TODO -- remove me when argument generateShadowBCF is removed
+        if ( stub.alsoWriteBCFForTest() && ! VariantContextWriterFactory.isBCFOutput(file, options)) {
+            final File bcfFile = BCF2Utils.shadowBCF(file);
+            if ( bcfFile != null ) {
+                FileOutputStream bcfStream;
+                try {
+                    bcfStream = new FileOutputStream(bcfFile);
+                } catch (FileNotFoundException e) {
+                    throw new RuntimeException(bcfFile + ": Unable to create BCF writer", e);
+                }
+
+                VariantContextWriter bcfWriter = VariantContextWriterFactory.create(bcfFile, bcfStream, stub.getMasterSequenceDictionary(), stub.getIndexCreator(), options);
+                writer = new TestWriter(writer, bcfWriter);
+            }
+        }
+
+        return writer;
+    }
+
+    /**
+     * Check the return from PrintStream.checkError() if underlying stream is a java.io.PrintStream
+     * @return true if PrintStream.checkError() returned true, false otherwise
+     */
+    public boolean checkError(){
+        if ( stream instanceof PrintStream )
+            return ((PrintStream) stream).checkError();
+        return false;
+    }
+
+    private final static class TestWriter implements VariantContextWriter {
+        final List<VariantContextWriter> writers;
+
+        private TestWriter(final VariantContextWriter ... writers) {
+            this.writers = Arrays.asList(writers);
+        }
+
+        @Override
+        public void writeHeader(final VCFHeader header) {
+            for ( final VariantContextWriter writer : writers ) writer.writeHeader(header);
+        }
+
+        @Override
+        public void close() {
+            for ( final VariantContextWriter writer : writers ) writer.close();
+        }
+
+        @Override
+        public void add(final VariantContext vc) {
+            for ( final VariantContextWriter writer : writers ) writer.add(vc);
+        }
+
+        /**
+         * Check the return from PrintStream.checkError() if underlying stream for a java.io.PrintStream
+         * @return false, no error since the underlying stream is not a java.io.PrintStream
+         */
+        public boolean checkError(){
+            return false;
+        }
+    }
+
+    public void add(VariantContext vc) {
+        if ( closed ) throw new ReviewedGATKException("Attempting to write to a closed VariantContextWriterStorage " + vc.getStart() + " storage=" + this);
+        writer.add(vc);
+    }
+
+    /**
+     * initialize this VCF header
+     *
+     * @param header  the header
+     */
+    public void writeHeader(VCFHeader header) {
+        writer.writeHeader(header);
+    }
+
+    /**
+     * Close the VCF storage object.
+     */
+    public void close() {
+        writer.close();
+        closed = true;
+    }
+
+    public void mergeInto(VariantContextWriterStorage target) {
+        try {
+            if ( ! closed )
+                throw new ReviewedGATKException("Writer not closed, but we are merging into the file!");
+            final String targetFilePath = target.file != null ? target.file.getAbsolutePath() : "/dev/stdin";
+            logger.debug(String.format("Merging VariantContextWriterStorage from %s into %s", file.getAbsolutePath(), targetFilePath));
+
+            // use the feature manager to determine the right codec for the tmp file
+            // that way we don't assume it's a specific type
+            final FeatureManager.FeatureDescriptor fd = new FeatureManager().getByFiletype(file);
+            if ( fd == null )
+                throw new UserException.LocalParallelizationProblem(file);
+
+            final FeatureCodec codec = fd.getCodec();
+            final AbstractFeatureReader<Feature, ?> source = AbstractFeatureReader.getFeatureReader(file.getAbsolutePath(), codec, false);
+
+            for ( final Feature vc : source.iterator() ) {
+                target.writer.add((VariantContext) vc);
+            }
+
+            source.close();
+            file.delete(); // this should be last to aid in debugging when the process fails
+        } catch (IOException e) {
+            throw new UserException.CouldNotReadInputFile(file, "Error reading file in VCFWriterStorage: ", e);
+        }
+    }
+
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/stubs/OutputStreamArgumentTypeDescriptor.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/stubs/OutputStreamArgumentTypeDescriptor.java
new file mode 100644
index 0000000..dd1d691
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/stubs/OutputStreamArgumentTypeDescriptor.java
@@ -0,0 +1,134 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.io.stubs;
+
+import org.broadinstitute.gatk.utils.commandline.*;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.utils.exceptions.DynamicClassResolutionException;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.io.File;
+import java.io.OutputStream;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.Type;
+
+/**
+ * Insert an OutputStreamStub instead of a full-fledged concrete OutputStream implementations.
+ */
+public class OutputStreamArgumentTypeDescriptor extends ArgumentTypeDescriptor {
+    /**
+     * The engine into which output stubs should be fed.
+     */
+    private final GenomeAnalysisEngine engine;
+
+    /**
+     * The default output stream to write to write this info if
+     */
+    private final OutputStream defaultOutputStream;
+
+    /**
+     * Create a new OutputStream argument, notifying the given engine when that argument has been created.
+     * @param engine Engine to add SAMFileWriter output to.
+     * @param defaultOutputStream Default target for output file.
+     */
+    public OutputStreamArgumentTypeDescriptor(GenomeAnalysisEngine engine,OutputStream defaultOutputStream) {
+        this.engine = engine;
+        this.defaultOutputStream = defaultOutputStream;
+    }
+
+    @Override
+    public boolean supports( Class type ) {
+        return getConstructorForClass(type) != null;
+    }
+
+    @Override
+    public boolean createsTypeDefault(ArgumentSource source) {
+        return !source.isRequired() && source.defaultsToStdout();
+    }
+
+    @Override
+    public String typeDefaultDocString(ArgumentSource source) {
+        return "stdout";
+    }
+
+    @Override
+    public Object createTypeDefault(ParsingEngine parsingEngine,ArgumentSource source, Type type) {
+        if(source.isRequired() || !source.defaultsToStdout())
+            throw new ReviewedGATKException("BUG: tried to create type default for argument type descriptor that can't support a type default.");
+        OutputStreamStub stub = new OutputStreamStub(defaultOutputStream);
+        engine.addOutput(stub);
+        return createInstanceOfClass((Class)type,stub);
+    }
+
+    @Override
+    public Object parse( ParsingEngine parsingEngine, ArgumentSource source, Type type, ArgumentMatches matches )  {
+        ArgumentDefinition definition = createDefaultArgumentDefinition(source);
+        String fileName = getArgumentValue( definition, matches ).asString();
+
+        // This parser has been passed a null filename and the GATK is not responsible for creating a type default for the object;
+        // therefore, the user must have failed to specify a type default
+        if(fileName == null && source.isRequired())
+            throw new MissingArgumentValueException(definition);
+
+        OutputStreamStub stub = new OutputStreamStub(new File(fileName));
+
+        engine.addOutput(stub);
+
+        Object result = createInstanceOfClass(makeRawTypeIfNecessary(type),stub);
+        // WARNING: Side effects required by engine!
+        parsingEngine.addTags(result,getArgumentTags(matches));
+        
+        return result;
+    }
+
+    /**
+     * Retrieves the constructor for an object that takes exactly one argument: an output stream.
+     * @param type Type for which to go constructor spelunking.
+     * @return Constructor, if available.  Null, if not.
+     */
+    private Constructor<OutputStream> getConstructorForClass( Class type ) {
+        try {
+            return type.getConstructor( OutputStream.class );
+        }
+        catch( NoSuchMethodException ex ) {
+            return null;
+        }
+    }
+
+    /**
+     * Creat a new instance of the class accepting a single outputstream constructor.
+     * @param type Type of object to create.
+     * @param outputStream resulting output stream.
+     * @return A new instance of the outputstream-derived class.
+     */
+    private Object createInstanceOfClass(Class type,OutputStream outputStream) {
+        try {
+            return getConstructorForClass(type).newInstance(outputStream);
+        } catch (Exception e) {
+            throw new DynamicClassResolutionException(type, e);
+        }
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/stubs/OutputStreamStub.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/stubs/OutputStreamStub.java
new file mode 100644
index 0000000..5c82747
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/stubs/OutputStreamStub.java
@@ -0,0 +1,142 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.io.stubs;
+
+import org.broadinstitute.gatk.engine.arguments.GATKArgumentCollection;
+import org.broadinstitute.gatk.engine.io.OutputTracker;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.OutputStream;
+
+/**
+ * A stub for routing and management of anything backed by an OutputStream.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class OutputStreamStub extends OutputStream implements Stub<OutputStream> {
+    /**
+     * The file that this stub should write to.  Should be passed along to
+     * whatever happens to create storage for this stub.  Might be null, if
+     * this stub connects directly to an existing stream.
+     */
+    private final File targetFile;
+
+    /**
+     * The stream that this stub should write to.  Should be passed along to
+     * whatever happens to create storage for this stub.  Might be null, if
+     * this stub connects directly to an existing stream.
+     */
+    private final OutputStream targetStream;
+    
+    /**
+     * Connects this stub with an external stream capable of serving the
+     * requests of the consumer of this stub.
+     */
+    private OutputTracker outputTracker = null;
+
+    /**
+     * Specify that this target output stream should write to the given file.
+     * @param targetFile Target file to which to write.  Should not be null.
+     */
+    public OutputStreamStub( File targetFile ) {
+        this.targetFile = targetFile;
+        this.targetStream = null;
+    }
+
+    /**
+     * Specify that this target output stream should write to the given stream.
+     * @param targetStream Target stream to which to write.  Should not be null.
+     */
+    public OutputStreamStub( OutputStream targetStream ) {
+        this.targetFile = null;
+        this.targetStream = targetStream;
+    }
+
+
+    /**
+     * Return the target file to which this data should be written.
+     * @return Target file.  No sanity checking will have been performed by the file object.
+     */
+    public File getOutputFile() {
+        return targetFile;
+    }
+
+    /**
+     * Return the target stream to which this data should be written.
+     * @return Target stream.  No sanity checking will have been performed by the file object.
+     */
+    public OutputStream getOutputStream() {
+        return targetStream;
+    }
+
+    /**
+     * Registers the given streamConnector with this stub.
+     * @param outputTracker The connector used to provide an appropriate stream.
+     */
+    public void register( OutputTracker outputTracker ) {
+        this.outputTracker = outputTracker;
+    }
+
+    @Override
+    public void processArguments( final GATKArgumentCollection argumentCollection ) {}
+
+    /**
+     * @{inheritDoc}
+     */
+    public void flush() throws IOException {
+        outputTracker.getStorage(this).flush();
+    }
+
+    /**
+     * @{inheritDoc}
+     */
+    public void close() throws IOException {
+        outputTracker.getStorage(this).close();
+    }
+
+    /**
+     * @{inheritDoc}
+     */
+    public void write( byte[] b ) throws IOException {
+        outputTracker.getStorage(this).write(b);
+    }
+
+    /**
+     * @{inheritDoc}
+     */
+    public void write( byte[] b, int off, int len ) throws IOException {
+        outputTracker.getStorage(this).write(b, off, len);
+    }
+
+    /**
+     * @{inheritDoc}
+     */
+    public void write( int b ) throws IOException {
+        outputTracker.getStorage(this).write(b);
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/stubs/SAMFileWriterArgumentTypeDescriptor.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/stubs/SAMFileWriterArgumentTypeDescriptor.java
new file mode 100644
index 0000000..27b59b2
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/stubs/SAMFileWriterArgumentTypeDescriptor.java
@@ -0,0 +1,106 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.io.stubs;
+
+import htsjdk.samtools.SAMFileWriter;
+import org.broadinstitute.gatk.utils.commandline.*;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.utils.sam.GATKSAMFileWriter;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.io.OutputStream;
+import java.lang.reflect.Type;
+
+/**
+ * Insert a SAMFileWriterStub  instead of a full-fledged concrete OutputStream implementations.
+ */
+public class SAMFileWriterArgumentTypeDescriptor extends ArgumentTypeDescriptor {
+
+    /**
+     * The engine into which output stubs should be fed.
+     */
+    private final GenomeAnalysisEngine engine;
+
+    /**
+     * The default location to which data should be written if the user specifies no such location.
+     */
+    private final OutputStream defaultOutputStream;
+
+    /**
+     * Create a new SAMFileWriter argument, notifying the given engine when that argument has been created.
+     * @param engine Engine to add SAMFileWriter output to.
+     * @param defaultOutputStream the target for the data
+     */
+    public SAMFileWriterArgumentTypeDescriptor( GenomeAnalysisEngine engine, OutputStream defaultOutputStream ) {
+        this.engine = engine;
+        this.defaultOutputStream = defaultOutputStream;
+    }
+
+    @Override
+    public boolean supports( Class type ) {
+        return SAMFileWriter.class.equals(type) || GATKSAMFileWriter.class.equals(type);
+    }
+
+    @Override
+    public boolean createsTypeDefault(ArgumentSource source) {
+        return !source.isRequired() && source.defaultsToStdout();
+    }
+
+    @Override
+    public String typeDefaultDocString(ArgumentSource source) {
+        return "stdout";
+    }
+
+    @Override
+    public Object createTypeDefault(ParsingEngine parsingEngine,ArgumentSource source, Type type) {
+        if(source.isRequired() || !source.defaultsToStdout())
+            throw new ReviewedGATKException("BUG: tried to create type default for argument type descriptor that can't support a type default.");
+        SAMFileWriterStub stub = new SAMFileWriterStub(engine,defaultOutputStream);
+        engine.addOutput(stub);
+        return stub;
+    }
+
+    @Override
+    public Object parse( ParsingEngine parsingEngine, ArgumentSource source, Type type, ArgumentMatches matches )  {
+        // Extract all possible parameters that could be passed to a BAM file writer?
+        ArgumentDefinition bamArgumentDefinition = createDefaultArgumentDefinition(source);
+        ArgumentMatchValue writerFileName = getArgumentValue( bamArgumentDefinition, matches );
+
+        // Create the stub
+        SAMFileWriterStub stub = null;      // stub = new SAMFileWriterStub(engine, defaultOutputStream);
+
+        if (writerFileName != null &&  writerFileName.asFile() != null ) {
+            stub = new SAMFileWriterStub(engine, writerFileName.asFile());
+
+            // WARNING: Side effects required by engine!
+            parsingEngine.addTags(stub,getArgumentTags(matches));
+            engine.addOutput(stub);
+        }
+
+        return stub;
+    }
+
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/stubs/SAMFileWriterStub.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/stubs/SAMFileWriterStub.java
new file mode 100644
index 0000000..3dca0e8
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/stubs/SAMFileWriterStub.java
@@ -0,0 +1,373 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.io.stubs;
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMFileWriter;
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.util.ProgressLoggerInterface;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.engine.arguments.GATKArgumentCollection;
+import org.broadinstitute.gatk.engine.io.OutputTracker;
+import org.broadinstitute.gatk.utils.io.ReferenceBacked;
+import org.broadinstitute.gatk.utils.sam.GATKSAMFileWriter;
+import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
+import org.broadinstitute.gatk.utils.baq.BAQ;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+
+import java.io.File;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * A stub for routing and management of SAM file reading and writing.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class SAMFileWriterStub implements Stub<SAMFileWriter>, GATKSAMFileWriter, ReferenceBacked {
+    /**
+     * Engine to use for collecting attributes for the output SAM file.
+     */
+    private final GenomeAnalysisEngine engine;
+
+    /**
+     * A header supplied by the user that overrides the merged header from the input BAM.
+     */
+    private SAMFileHeader headerOverride = null;
+
+    /**
+     * The sam file that this stub should write to.  Should be passed along to
+     * whatever happens to create the StreamConnector.
+     */
+    private final File samFile;
+
+    /**
+     * The reference file for stub.
+     */
+    private File referenceFile;
+
+    /**
+     * The target output stream, to be used in place of the SAM file.
+     */
+    private final OutputStream samOutputStream;
+
+    /**
+     * The validation stringency to apply when reading this file.
+     */
+    private Integer compressionLevel = null;
+
+    /**
+     * Should the GATK index the output BAM on-the-fly?
+     */
+    private boolean indexOnTheFly = false;
+
+    /**
+     * Should the GATK generate an md5 for the output BAM?
+     */
+    private boolean generateMD5 = false;
+
+    /**
+     * Should this BAM be presorted?
+     */
+    private boolean presorted = true;
+
+    /**
+     * How many records should the BAM writer store in RAM while
+     * sorting the BAM on-the-fly?
+     */
+    private Integer maxRecordsInRam = null;
+
+    /**
+     * Connects this stub with an external stream capable of serving the
+     * requests of the consumer of this stub.
+     */
+    private OutputTracker outputTracker = null;
+
+    /**
+     * Has the write started?  If so, throw an exception if someone tries to
+     * change write parameters to the file (compression level, presorted flag,
+     * header, etc).
+     */
+    private boolean writeStarted = false;
+
+
+    /**
+     * HMM for BAQ, if needed
+     */
+    BAQ baqHMM = new BAQ();
+
+    /**
+     * Should we simplify the BAM file while writing it out?
+     */
+    private boolean simplifyBAM = false;
+
+    private List<ReadTransformer> onOutputReadTransformers = null;
+
+    /**
+     * Create a new stub given the requested SAM file and compression level.
+     * @param engine source of header data, maybe other data about input files.
+     * @param samFile SAM file to (ultimately) create.
+     */
+    public SAMFileWriterStub( GenomeAnalysisEngine engine, File samFile ) {
+        this(engine, samFile, null);
+    }
+
+    /**
+     * Create a new stub given the requested SAM file and compression level.
+     * @param engine source of header data, maybe other data about input files.
+     * @param stream Output stream to which data should be written.
+     */
+    public SAMFileWriterStub( GenomeAnalysisEngine engine, OutputStream stream ) {
+        this(engine, null, stream);
+    }
+
+    private SAMFileWriterStub(final GenomeAnalysisEngine engine, final File samFile, final OutputStream stream) {
+        this.engine = engine;
+        this.samFile = samFile;
+        this.samOutputStream = stream;
+    }
+
+    /**
+     * Creates a SAMFileWriter using all of the features currently set in the engine (command line arguments, ReadTransformers, etc)
+     * @param file the filename to write to
+     * @param engine the engine
+     * @return a SAMFileWriter with the correct options set
+     */
+    public static SAMFileWriter createSAMFileWriter(final String file, final GenomeAnalysisEngine engine) {
+        final SAMFileWriterStub output = new SAMFileWriterStub(engine, new File(file));
+        output.processArguments(engine.getArguments());
+        return output;
+    }
+
+    /**
+     *  As {@link #createSAMFileWriter(String, org.broadinstitute.gatk.engine.GenomeAnalysisEngine)}, but also sets the header
+     */
+    public static SAMFileWriter createSAMFileWriter(final String file, final GenomeAnalysisEngine engine, final SAMFileHeader header) {
+        final SAMFileWriterStub output = (SAMFileWriterStub) createSAMFileWriter(file, engine);
+        output.writeHeader(header);
+        return output;
+    }
+
+    /**
+     * Retrieves the SAM file to (ultimately) be created.
+     * @return The SAM file.  Must not be null.
+     */
+    public File getOutputFile() {
+        return samFile;
+    }
+
+    public boolean simplifyBAM() {
+        return simplifyBAM;
+    }
+
+    public void setSimplifyBAM(boolean v) {
+        simplifyBAM = v;
+    }
+
+    public OutputStream getOutputStream() {
+        return samOutputStream;
+    }
+
+    @Override
+    public File getReferenceFile() {
+        return referenceFile;
+    }
+
+    @Override
+    public void setReferenceFile(final File referenceFile) {
+        this.referenceFile = referenceFile;
+    }
+
+    /**
+     * Retrieves the header to use when creating the new SAM file.
+     * @return header to use when creating the new SAM file.
+     */
+    public SAMFileHeader getFileHeader() {
+        return headerOverride != null ? headerOverride : engine.getSAMFileHeader();
+    }
+
+    /**
+     * Retrieves the desired compression level for 
+     * @return The current compression level.  Could be null if the user doesn't care.
+     */
+    public Integer getCompressionLevel() {
+        return compressionLevel;
+    }
+
+    /**
+     * Sets the desired compression level.
+     * @param compressionLevel The suggested compression level.
+     */
+    public void setCompressionLevel( Integer compressionLevel ) {
+        if(writeStarted)
+            throw new ReviewedGATKException("Attempted to change the compression level of a file with alignments already in it.");
+        this.compressionLevel = compressionLevel;
+    }
+
+    /**
+     * Gets whether to index this output stream on-the-fly.
+     * @return True means create an index.  False means skip index creation.
+     */
+    public Boolean getIndexOnTheFly() {
+        return indexOnTheFly;
+    }
+
+    /**
+     * Controls whether to index this output stream on-the-fly.
+     * @param indexOnTheFly True means create an index.  False means skip index creation.
+     */
+    public void setIndexOnTheFly( boolean indexOnTheFly ) {
+        if(writeStarted)
+            throw new UserException("Attempted to index a BAM on the fly of a file with alignments already in it.");
+        this.indexOnTheFly = indexOnTheFly;
+    }
+
+    /**
+     * Gets whether to generate an md5 on-the-fly for this BAM.
+     * @return True generates the md5.  False means skip writing the file.
+     */
+    public Boolean getGenerateMD5() {
+        return generateMD5;
+    }
+
+    /**
+     * Gets whether to generate an md5 on-the-fly for this BAM.
+     * @param generateMD5   True generates the md5.  False means skip writing the file.
+     */
+    public void setGenerateMD5(boolean generateMD5) {
+        if(writeStarted)
+            throw new UserException("Attempted to turn on md5 generation for BAM file with alignments already in it.");        
+        this.generateMD5 = generateMD5;
+    }
+
+    /**
+     * Whether the BAM file to create is actually presorted.
+     * @return True if the BAM file is presorted.  False otherwise.
+     */
+    public boolean isPresorted() {
+        return this.presorted;
+    }
+
+    /**
+     * Set Whether the BAM file to create is actually presorted.
+     * @param presorted True if the BAM file is presorted.  False otherwise.
+     */
+    public void setPresorted(boolean presorted) {
+        if(writeStarted)
+            throw new ReviewedGATKException("Attempted to change the presorted state of a file with alignments already in it.");
+        this.presorted = presorted;
+    }
+
+    /**
+     * Get the maximum number of reads to hold in RAM when sorting a BAM on-the-fly.
+     * @return Max records in RAM, or null if unset.
+     */
+    public Integer getMaxRecordsInRam() {
+        return this.maxRecordsInRam;
+    }
+
+    /**
+     * Sets the maximum number of reads to hold in RAM when sorting a BAM on-the-fly.
+     * @param maxRecordsInRam Max number of records in RAM.
+     */
+    public void setMaxRecordsInRam(int maxRecordsInRam) {
+        if(writeStarted)
+            throw new ReviewedGATKException("Attempted to change the max records in RAM of a file with alignments already in it.");
+        this.maxRecordsInRam = maxRecordsInRam;
+    }
+
+    /**
+     * Registers the given streamConnector with this stub.
+     * @param outputTracker The connector used to provide an appropriate stream.
+     */
+    public void register( OutputTracker outputTracker ) {
+        this.outputTracker = outputTracker;
+    }
+
+    @Override
+    public void processArguments( final GATKArgumentCollection argumentCollection ) {
+        if (argumentCollection.bamCompression != null)
+            setCompressionLevel(argumentCollection.bamCompression);
+        setGenerateMD5(argumentCollection.enableBAMmd5);
+        setIndexOnTheFly(!argumentCollection.disableBAMIndexing);
+        setSimplifyBAM(argumentCollection.simplifyBAM);
+
+    }
+
+    /**
+     * Use the given header as the target for this writer.
+     * @param header The header to write.
+     */
+    public void writeHeader(SAMFileHeader header) {
+        if(writeStarted)
+            throw new ReviewedGATKException("Attempted to change the header of a file with alignments already in it.");
+        this.headerOverride = header;
+    }
+
+    private void initializeReadTransformers() {
+        this.onOutputReadTransformers = new ArrayList<>(engine.getReadTransformers().size());
+        for ( final ReadTransformer transformer : engine.getReadTransformers() ) {
+            if ( transformer.getApplicationTime() == ReadTransformer.ApplicationTime.ON_OUTPUT )
+                onOutputReadTransformers.add(transformer);
+        }
+    }
+
+    /**
+     * @{inheritDoc}
+     */
+    public void addAlignment( final SAMRecord readIn ) {
+        if ( onOutputReadTransformers == null )
+            initializeReadTransformers();
+
+        GATKSAMRecord workingRead = (GATKSAMRecord)readIn;
+
+        // run on output read transformers
+        for ( final ReadTransformer transform : onOutputReadTransformers )
+            workingRead = transform.apply(workingRead);
+
+        writeStarted = true;
+        outputTracker.getStorage(this).addAlignment(workingRead);
+    }
+
+    /**
+     * @{inheritDoc}
+     */
+    public void close() {
+        outputTracker.getStorage(this).close();    
+    }
+
+    /**
+     * @throws java.lang.UnsupportedOperationException No progress logging in this implementation.
+     */
+    @Override
+    public void setProgressLogger(final ProgressLoggerInterface logger) {
+        throw new UnsupportedOperationException("Progress logging not supported");
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/stubs/SAMReaderArgumentTypeDescriptor.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/stubs/SAMReaderArgumentTypeDescriptor.java
new file mode 100644
index 0000000..8b9b621
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/stubs/SAMReaderArgumentTypeDescriptor.java
@@ -0,0 +1,77 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.io.stubs;
+
+import htsjdk.samtools.SAMFileReader;
+import org.broadinstitute.gatk.utils.commandline.*;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.sam.SAMReaderBuilder;
+
+import java.lang.reflect.Type;
+
+/**
+ * Describe how to parse SAMReaders.
+ */
+public class SAMReaderArgumentTypeDescriptor extends ArgumentTypeDescriptor {
+    /**
+     * The engine into which output stubs should be fed.
+     */
+    private GenomeAnalysisEngine engine;
+
+    /**
+     * Create a new SAMFileReader argument, notifying the given engine when that argument has been created.
+     * @param engine engine
+     */
+    public SAMReaderArgumentTypeDescriptor(GenomeAnalysisEngine engine) {
+        this.engine = engine;
+    }
+
+    @Override
+    public boolean supports( Class type ) {
+        return SAMFileReader.class.isAssignableFrom(type);
+    }
+
+    @Override
+    public Object parse( ParsingEngine parsingEngine, ArgumentSource source, Type type, ArgumentMatches matches ) {
+        SAMReaderBuilder builder = new SAMReaderBuilder();
+
+        ArgumentMatchValue readerFileName = getArgumentValue( createDefaultArgumentDefinition(source), matches );
+
+        if( readerFileName == null )
+            throw new UserException.CommandLineException("SAM file compression was supplied, but no associated writer was supplied with it.");
+
+        builder.setSAMFile(readerFileName.asFile());
+
+        // WARNING: Skipping required side-effect because stub is impossible to generate.
+        engine.addInput(source, builder);
+
+        // MASSIVE KLUDGE!  SAMFileReader is tricky to implement and we don't yet have a stub.  Return null, then
+        // let the output tracker load it in.
+        // TODO: Add a stub for SAMReader.
+        return null;
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/stubs/Stub.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/stubs/Stub.java
new file mode 100644
index 0000000..a8b8c3f
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/stubs/Stub.java
@@ -0,0 +1,69 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.io.stubs;
+
+import org.broadinstitute.gatk.engine.arguments.GATKArgumentCollection;
+import org.broadinstitute.gatk.engine.io.OutputTracker;
+
+import java.io.File;
+import java.io.OutputStream;
+
+/**
+ * A stub used for managing IO. Acts as a proxy for IO streams
+ * not yet created or streams that need significant external
+ * management.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public interface Stub<StreamType> {
+    /**
+     * Provides a facility to register this stream with the given
+     * StreamConnector.  The stub should route each output method
+     * to the specified connector.
+     * @param outputTracker The connector used to provide an appropriate stream.
+     */
+    public void register( OutputTracker outputTracker );
+
+    /**
+     * Provides a mechanism for uniformly processing command-line arguments
+     * that are important for file processing.  For example, this method
+     * might pass on the compression value specified by the user to
+     * a SAMFileWriter
+     * @param argumentCollection The arguments to be processed
+     */
+    public void processArguments( final GATKArgumentCollection argumentCollection );
+
+    /**
+     * Returns the OutputStream represented by this stub or null if not available.
+     */
+    public OutputStream getOutputStream();
+
+    /**
+     * Returns the File represented by this stub or null if not available.
+     */
+    public File getOutputFile();
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/stubs/VCFWriterArgumentTypeDescriptor.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/stubs/VCFWriterArgumentTypeDescriptor.java
new file mode 100644
index 0000000..4219930
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/stubs/VCFWriterArgumentTypeDescriptor.java
@@ -0,0 +1,138 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.io.stubs;
+
+import org.broadinstitute.gatk.utils.commandline.*;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import htsjdk.variant.variantcontext.writer.VariantContextWriter;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.io.File;
+import java.io.OutputStream;
+import java.lang.reflect.Type;
+import java.util.Collection;
+
+/**
+ * Injects new command-line arguments into the system providing support for the genotype writer.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class VCFWriterArgumentTypeDescriptor extends ArgumentTypeDescriptor {
+
+    /**
+     * The engine into which output stubs should be fed.
+     */
+    private final GenomeAnalysisEngine engine;
+
+    /**
+      * The default location to which data should be written if the user specifies no such location.
+      */
+    private final OutputStream defaultOutputStream;
+
+    /**
+     * The sources into which arguments were injected.
+     */
+    private final Collection<Object> argumentSources;
+
+    /**
+     * Create a new GenotypeWriter argument, notifying the given engine when that argument has been created.
+     * @param engine the engine to be notified.
+     * @param defaultOutputStream the default output stream to be written to if nothing else is specified.
+     * @param argumentSources sources from which command-line arguments should be derived.
+     */
+    public VCFWriterArgumentTypeDescriptor(GenomeAnalysisEngine engine, OutputStream defaultOutputStream, Collection<Object> argumentSources) {
+        this.engine = engine;
+        this.defaultOutputStream = defaultOutputStream;
+        this.argumentSources = argumentSources;
+    }
+
+    /**
+     * Reports whether this ArgumentTypeDescriptor supports the given type.
+     * @param type The type to check.
+     * @return True if the argument is a GenotypeWriter.
+     */
+    @Override
+    public boolean supports( Class type ) {
+        return VariantContextWriter.class.equals(type);
+    }
+
+    /**
+     * This command-line argument descriptor does want to override the provided default value.
+     * @return true always.
+     */
+    @Override
+    public boolean createsTypeDefault(ArgumentSource source) {
+        return !source.isRequired() && source.defaultsToStdout();
+    }
+
+    @Override
+    public String typeDefaultDocString(ArgumentSource source) {
+        return "stdout";
+    }
+
+    @Override
+    public Object createTypeDefault(ParsingEngine parsingEngine, ArgumentSource source, Type type) {
+        if(source.isRequired() || !source.defaultsToStdout())
+            throw new ReviewedGATKException("BUG: tried to create type default for argument type descriptor that can't support a type default.");        
+        VariantContextWriterStub stub = new VariantContextWriterStub(engine, defaultOutputStream, argumentSources);
+        engine.addOutput(stub);
+        return stub;
+    }
+
+    /**
+     * Convert the given argument matches into a single object suitable for feeding into the ArgumentSource.
+     * @param source Source for this argument.
+     * @param type not used
+     * @param matches Matches that match with this argument.
+     * @return Transform from the matches into the associated argument.
+     */
+    @Override
+    public Object parse( ParsingEngine parsingEngine, ArgumentSource source, Type type, ArgumentMatches matches )  {
+        ArgumentDefinition defaultArgumentDefinition = createDefaultArgumentDefinition(source);
+        // Get the filename for the genotype file, if it exists.  If not, we'll need to send output to out.
+        ArgumentMatchValue writerFileName = getArgumentValue(defaultArgumentDefinition,matches);
+        File writerFile = writerFileName != null ? writerFileName.asFile() : null;
+
+        // This parser has been passed a null filename and the GATK is not responsible for creating a type default for the object;
+        // therefore, the user must have failed to specify a type default
+        if(writerFile == null && source.isRequired())
+            throw new MissingArgumentValueException(defaultArgumentDefinition);
+
+        // Create a stub for the given object.
+        final VariantContextWriterStub stub = (writerFile != null)
+                ? new VariantContextWriterStub(engine, writerFile, argumentSources)
+                : new VariantContextWriterStub(engine, defaultOutputStream, argumentSources);
+
+        stub.setCompressed(isCompressed(writerFileName == null ? null: writerFileName.asString()));
+
+        // WARNING: Side effects required by engine!
+        parsingEngine.addTags(stub,getArgumentTags(matches));
+        engine.addOutput(stub);
+
+        return stub;
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/stubs/VariantContextWriterStub.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/stubs/VariantContextWriterStub.java
new file mode 100644
index 0000000..a35c978
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/io/stubs/VariantContextWriterStub.java
@@ -0,0 +1,313 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.io.stubs;
+
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.tribble.index.IndexCreator;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.engine.arguments.GATKArgumentCollection;
+import org.broadinstitute.gatk.engine.io.OutputTracker;
+import org.broadinstitute.gatk.engine.GATKVCFUtils;
+import htsjdk.variant.variantcontext.VariantContext;
+import htsjdk.variant.variantcontext.writer.Options;
+import htsjdk.variant.variantcontext.writer.VariantContextWriter;
+import htsjdk.variant.variantcontext.writer.VariantContextWriterFactory;
+import htsjdk.variant.vcf.VCFHeader;
+import htsjdk.variant.vcf.VCFHeaderLine;
+
+import java.io.File;
+import java.io.OutputStream;
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.EnumSet;
+import java.util.List;
+
+/**
+ * A stub for routing and management of genotype reading and writing.
+ *
+ * @author ebanks
+ * @version 0.1
+ */
+public class VariantContextWriterStub implements Stub<VariantContextWriter>, VariantContextWriter {
+    public final static boolean UPDATE_CONTIG_HEADERS = true;
+
+    /**
+     * The engine, central to the GATK's processing.
+     */
+    private final GenomeAnalysisEngine engine;
+
+    /**
+     * The file that this stub should write to.  Should be mutually
+     * exclusive with genotypeStream.
+     */
+    private final File genotypeFile;
+
+    /**
+     * The output stream to which stub data should be written.  Will be
+     * mutually exclusive with genotypeFile.
+     */
+    private final PrintStream genotypeStream;
+
+    /**
+     * A hack: push the argument sources into the VCF header so that the VCF header
+     * can rebuild the command-line arguments.
+     */
+    private final Collection<Object> argumentSources;
+
+    /**
+     * Which IndexCreator to use
+     */
+    private final IndexCreator indexCreator;
+
+    /**
+     * The cached VCF header (initialized to null)
+     */
+    private VCFHeader vcfHeader = null;
+
+    /**
+     * Should we emit a compressed output stream?
+     */
+    private boolean isCompressed = false;
+
+    /**
+     * Should the header be written out?  A hidden argument.
+     */
+    private boolean skipWritingCommandLineHeader = false;
+
+    /**
+     * Should we not write genotypes even when provided?
+     */
+    private boolean doNotWriteGenotypes = false;
+
+    /**
+     * Should we force BCF writing regardless of the file extension?
+     */
+    private boolean forceBCF = false;
+
+    /**
+     * Should we write all of the fields in the FORMAT field, even if missing fields could be trimmed?
+     */
+    private boolean writeFullFormatField = false;
+
+    /**
+     * Connects this stub with an external stream capable of serving the
+     * requests of the consumer of this stub.
+     */
+    protected OutputTracker outputTracker = null;
+
+    /**
+     * Create a new stub given the requested file.
+     *
+     * @param engine engine.
+     * @param genotypeFile  file to (ultimately) create.
+     * @param argumentSources sources.
+     */
+    public VariantContextWriterStub(GenomeAnalysisEngine engine, File genotypeFile, Collection<Object> argumentSources) {
+        this.engine = engine;
+        this.genotypeFile = genotypeFile;
+        this.genotypeStream = null;
+
+        this.indexCreator = GATKVCFUtils.makeIndexCreator(engine.getArguments().variant_index_type, engine.getArguments().variant_index_parameter,
+                genotypeFile, null);
+        this.argumentSources = argumentSources;
+    }
+
+    /**
+     * Create a new stub given the requested file.
+     *
+     * @param engine engine.
+     * @param genotypeStream  stream to (ultimately) write.
+     * @param argumentSources sources.
+     */
+    public VariantContextWriterStub(GenomeAnalysisEngine engine, OutputStream genotypeStream, Collection<Object> argumentSources) {
+        this.engine = engine;
+        this.genotypeFile = null;
+        this.genotypeStream = new PrintStream(genotypeStream);
+        this.indexCreator = null;
+        this.argumentSources = argumentSources;
+    }
+
+    /**
+     * Retrieves the file to (ultimately) be created.
+     * @return The file.  Can be null if genotypeStream is not.
+     */
+    public File getOutputFile() {
+        return genotypeFile;
+    }
+
+    /**
+     * Retrieves the output stream to which to (ultimately) write.
+     * @return The file.  Can be null if genotypeFile is not.
+     */
+    public OutputStream getOutputStream() {
+        return genotypeStream;
+    }
+
+    public boolean isCompressed() {
+        return isCompressed;
+    }
+
+    public void setCompressed(final boolean compressed) {
+        isCompressed = compressed;
+    }
+
+    public void setSkipWritingCommandLineHeader(final boolean skipWritingCommandLineHeader) {
+        this.skipWritingCommandLineHeader = skipWritingCommandLineHeader;
+    }
+
+    public void setDoNotWriteGenotypes(final boolean doNotWriteGenotypes) {
+        this.doNotWriteGenotypes = doNotWriteGenotypes;
+    }
+
+    public void setForceBCF(final boolean forceBCF) {
+        this.forceBCF = forceBCF;
+    }
+
+    public void setWriteFullFormatField(final boolean writeFullFormatField) {
+        this.writeFullFormatField = writeFullFormatField;
+    }
+
+    public IndexCreator getIndexCreator() {
+        return indexCreator;
+    }
+
+    /**
+     * Gets the master sequence dictionary from the engine associated with this stub
+     * @link GenomeAnalysisEngine.getMasterSequenceDictionary
+     * @return the master sequence dictionary from the engine associated with this stub
+     */
+    public SAMSequenceDictionary getMasterSequenceDictionary() {
+        return engine.getMasterSequenceDictionary();
+    }
+
+    public EnumSet<Options> getWriterOptions() {
+        return getWriterOptions(false);
+    }
+
+    public EnumSet<Options> getWriterOptions(boolean indexOnTheFly) {
+        final List<Options> options = new ArrayList<>();
+
+        if ( doNotWriteGenotypes ) options.add(Options.DO_NOT_WRITE_GENOTYPES);
+        if ( engine.lenientVCFProcessing() ) options.add(Options.ALLOW_MISSING_FIELDS_IN_HEADER);
+        if ( indexOnTheFly) options.add(Options.INDEX_ON_THE_FLY);
+        if ( writeFullFormatField ) options.add(Options.WRITE_FULL_FORMAT_FIELD);
+
+        if ( forceBCF || (getOutputFile() != null && VariantContextWriterFactory.isBCFOutput(getOutputFile())) )
+            options.add(Options.FORCE_BCF);
+
+        return options.isEmpty() ? EnumSet.noneOf(Options.class) : EnumSet.copyOf(options);
+    }
+
+    /**
+     * Retrieves the header to use when creating the new file.
+     * @return header to use when creating the new file.
+     */
+    public VCFHeader getVCFHeader() {
+        return vcfHeader;
+    }
+
+    /**
+     * Registers the given streamConnector with this stub.
+     * @param outputTracker The connector used to provide an appropriate stream.
+     */
+    public void register( OutputTracker outputTracker ) {
+        this.outputTracker = outputTracker;
+    }
+
+    @Override
+    public void processArguments( final GATKArgumentCollection argumentCollection ) {
+        setDoNotWriteGenotypes(argumentCollection.sitesOnlyVCF);
+        setSkipWritingCommandLineHeader(argumentCollection.disableCommandLineInVCF);
+        setForceBCF(argumentCollection.forceBCFOutput);
+        setWriteFullFormatField(argumentCollection.neverTrimVCFFormatField);
+    }
+
+    @Override
+    public void writeHeader(VCFHeader header) {
+        vcfHeader = header;
+
+        if ( header.isWriteEngineHeaders() ) {
+            // skip writing the command line header if requested
+            if ( ! skipWritingCommandLineHeader && header.isWriteCommandLine() ) {
+                // Always add the header line, as the current format allows multiple entries
+                final VCFHeaderLine commandLineArgHeaderLine = GATKVCFUtils.getCommandLineArgumentHeaderLine(vcfHeader, engine, argumentSources);
+                vcfHeader.addMetaDataLine(commandLineArgHeaderLine);
+            }
+
+            if ( UPDATE_CONTIG_HEADERS )
+                vcfHeader = GATKVCFUtils.withUpdatedContigs(vcfHeader, engine);
+        }
+
+        outputTracker.getStorage(this).writeHeader(vcfHeader);
+    }
+
+    /**
+     * @{inheritDoc}
+     */
+    public void add(VariantContext vc) {
+        outputTracker.getStorage(this).add(vc);
+    }
+
+    /**
+     * @{inheritDoc}
+     */
+    @Override
+    public void close() {
+        outputTracker.getStorage(this).close();
+    }
+
+    /**
+     * Gets a string representation of this object.
+     * @return a string representation of this object.
+     */
+    @Override
+    public String toString() {
+        return (getOutputFile() == null) ? "(Stream)" : getOutputFile().getAbsolutePath();
+    }
+
+    /**
+     * Should we also write a BCF file alongside our VCF file for testing
+     *
+     * TODO -- remove me when argument generateShadowBCF is removed
+     *
+     * @return
+     */
+    public boolean alsoWriteBCFForTest() {
+        return engine.getArguments().numberOfDataThreads == 1 && // only works single threaded
+                ! isCompressed() && // for non-compressed outputs
+                getOutputFile() != null && // that are going to disk
+                engine.getArguments().generateShadowBCF; // and we actually want to do it
+    }
+
+    /**
+     * Check the return from PrintStream.checkError() if underlying stream for a java.io.PrintStream
+     * @return true if PrintStream.checkError() returned true, false otherwise
+     */
+    public boolean checkError(){
+        return genotypeStream.checkError();
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/BoundedReadIterator.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/BoundedReadIterator.java
new file mode 100644
index 0000000..7ab11e3
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/BoundedReadIterator.java
@@ -0,0 +1,160 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.iterators;
+
+import htsjdk.samtools.MergingSamRecordIterator;
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIterator;
+
+import java.util.Iterator;
+
+/*
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following
+ * conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+
+/**
+ * @author aaron
+ * @version 1.0
+ * @date Apr 14, 2009
+ * <p/>
+ * Class BoundedReadIterator
+ * <p/>
+ * This class implements a read iterator that is bounded by the number of reads
+ * it will produce over the iteration.
+ */
+public class BoundedReadIterator implements GATKSAMIterator {
+
+    // the genome loc we're bounding
+    final private long readCount;
+    private long currentCount = 0;
+
+    // the iterator we want to decorate
+    private final GATKSAMIterator iterator;
+
+    // our unmapped read flag
+    private boolean doNotUseThatUnmappedReadPile = false;
+
+    /**
+     * The next read that we've buffered.  Null indicates that there's
+     * nothing in the buffer (not that there isn't a next read).
+     */
+    private SAMRecord record = null;
+
+    /**
+     * constructor
+     * @param iter
+     * @param readCount
+     */
+    public BoundedReadIterator(GATKSAMIterator iter, long readCount) {
+        this.iterator = iter;
+        this.readCount = readCount;
+    }
+
+    public void useUnmappedReads(boolean useThem) {
+        this.doNotUseThatUnmappedReadPile = useThem;
+    }
+
+    public SAMFileHeader getHeader() {
+        // todo: this is bad, we need an iterface out there for samrecords that supports getting the header,
+        // regardless of the merging
+        if (iterator instanceof MergingSamRecordIterator)
+            return ((MergingSamRecordIterator)iterator).getMergedHeader();
+        else
+            return null;
+    }
+
+    /**
+     * Do we have a next? If the iterator has a read and we're not over the read
+     * count, then yes
+     * @return
+     */
+    public boolean hasNext() {
+        if( record != null )
+            return true;
+
+        if (iterator.hasNext() && currentCount < readCount) {
+            record = iterator.next();
+            ++currentCount;
+            if (record.getAlignmentStart() == 0 && doNotUseThatUnmappedReadPile) {
+                return false;
+            }
+            return true;
+        } else {
+            return false;
+        }
+    }
+
+    /**
+     * get the next SAMRecord
+     * @return SAMRecord representing the next read
+     */
+    public SAMRecord next() {
+        SAMRecord cached = record;
+        record = null;
+        return cached;
+    }
+
+    /**
+     * this is unsupported on SAMRecord iterators
+     */
+    public void remove() {
+        throw new UnsupportedOperationException("You cannot use an iterator to remove a SAMRecord");
+    }
+
+    /**
+     * close the iterator
+     */
+    public void close() {
+        iterator.close();
+    }
+
+    public Iterator<SAMRecord> iterator() {
+        return this;
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/GenomeLocusIterator.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/GenomeLocusIterator.java
new file mode 100644
index 0000000..3a21d5a
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/GenomeLocusIterator.java
@@ -0,0 +1,100 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.iterators;
+
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+
+import java.util.Iterator;
+import java.util.NoSuchElementException;
+/**
+ * User: hanna
+ * Date: May 12, 2009
+ * Time: 10:52:47 AM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * Iterates through all of the loci provided in the reference.
+ */
+public class GenomeLocusIterator implements Iterator<GenomeLoc> {
+    /**
+     * Builds individual loci.
+     */
+    private GenomeLocParser parser;
+
+    /**
+     * The entire region over which we're iterating.
+     */
+    private GenomeLoc completeLocus;
+
+    /**
+     * The current position in the traversal.
+     */
+    private GenomeLoc currentLocus;
+
+    /**
+     * Creates an iterator that can traverse over the entire
+     * reference specified in the given ShardDataProvider.
+     * @param completeLocus Data provider to use as a backing source.
+     *                 Provider must have a reference (hasReference() == true).
+     */
+    public GenomeLocusIterator( GenomeLocParser parser, GenomeLoc completeLocus ) {
+        this.parser = parser;
+        this.completeLocus = completeLocus;
+        this.currentLocus = parser.createGenomeLoc(completeLocus.getContig(),completeLocus.getStart());
+    }
+
+    /**
+     * Is the iterator still within the locus?
+     * @return True if the iterator has more elements.  False otherwise. 
+     */
+    public boolean hasNext() {
+        return !currentLocus.isPast(completeLocus);    
+    }
+
+    /**
+     * Get the next single-base locus context bounded by the iterator.
+     * @return GenomeLoc representing the next single-base locus context.
+     */
+    public GenomeLoc next() {
+        if( !hasNext() )
+            throw new NoSuchElementException("No elements remaining in bounded reference region.");
+        GenomeLoc toReturn = currentLocus;
+        currentLocus = parser.incPos(currentLocus);
+        return toReturn;
+    }
+
+    public void remove() {
+        throw new UnsupportedOperationException( "ReferenceLocusIterator is read-only" );
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/IterableIterator.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/IterableIterator.java
new file mode 100644
index 0000000..7ef375b
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/IterableIterator.java
@@ -0,0 +1,40 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.iterators;
+
+import java.util.Iterator;
+
+public class IterableIterator<T> implements Iterable<T> {
+    private Iterator<T> iter;
+
+    public IterableIterator(Iterator<T> iter) {
+        this.iter = iter;
+    }
+
+    public Iterator<T> iterator() {
+        return iter;
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/MalformedBAMErrorReformatingIterator.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/MalformedBAMErrorReformatingIterator.java
new file mode 100644
index 0000000..d04ddaa
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/MalformedBAMErrorReformatingIterator.java
@@ -0,0 +1,69 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.iterators;
+
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.util.CloseableIterator;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+import java.io.File;
+import java.util.Iterator;
+
+/**
+ * Traps BAM formatting errors in underlying iterator and rethrows meaningful GATK UserExceptions
+ */
+public class MalformedBAMErrorReformatingIterator implements CloseableIterator<SAMRecord> {
+    File source;
+    CloseableIterator<SAMRecord> it;
+
+    public MalformedBAMErrorReformatingIterator(final File source, final CloseableIterator<SAMRecord> it) {
+        this.it = it;
+        this.source = source;
+    }
+
+    public boolean hasNext() {
+        try {
+            return this.it.hasNext();
+        } catch ( RuntimeException e ) { // we need to catch RuntimeExceptions here because the Picard code is throwing them (among SAMFormatExceptions) sometimes
+            throw new UserException.MalformedBAM(source, e.getMessage());
+        }
+    }
+
+    public SAMRecord next() {
+        try {
+            return it.next();
+        } catch ( RuntimeException e ) { // we need to catch RuntimeExceptions here because the Picard code is throwing them (among SAMFormatExceptions) sometimes
+            throw new UserException.MalformedBAM(source, e.getMessage());
+        }
+    }
+
+    public void remove() {
+        throw new UnsupportedOperationException("Can not remove records from a SAM file via an iterator!");
+    }
+
+    public void close() { it.close(); }
+    public Iterator<SAMRecord> iterator() { return this; }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/MisencodedBaseQualityReadTransformer.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/MisencodedBaseQualityReadTransformer.java
new file mode 100644
index 0000000..f7c4080
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/MisencodedBaseQualityReadTransformer.java
@@ -0,0 +1,94 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.iterators;
+
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.engine.walkers.Walker;
+import org.broadinstitute.gatk.utils.QualityUtils;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+
+/**
+ * Checks for and errors out (or fixes if requested) when it detects reads with base qualities that are not encoded with
+ * phred-scaled quality scores.  Q0 == ASCII 33 according to the SAM specification, whereas Illumina encoding starts at
+ * Q64.  The idea here is simple: if we are asked to fix the scores then we just subtract 31 from every quality score.
+ * Otherwise, we randomly sample reads (for efficiency) and error out if we encounter a qual that's too high.
+ */
+public class MisencodedBaseQualityReadTransformer extends ReadTransformer {
+
+    private static final int samplingFrequency = 1000;  // sample 1 read for every 1000 encountered
+    private static final int encodingFixValue = 31;  // Illumina_64 - PHRED_33
+
+    private boolean disabled;
+    private boolean fixQuals;
+    protected static int currentReadCounter = 0;
+
+    @Override
+    public ApplicationTime initializeSub(final GenomeAnalysisEngine engine, final Walker walker) {
+        fixQuals = engine.getArguments().FIX_MISENCODED_QUALS;
+        disabled = !fixQuals && engine.getArguments().ALLOW_POTENTIALLY_MISENCODED_QUALS;
+
+        return ReadTransformer.ApplicationTime.ON_INPUT;
+    }
+
+    @Override
+    public boolean enabled() {
+        return !disabled;
+    }
+
+    @Override
+    public GATKSAMRecord apply(final GATKSAMRecord read) {
+        if ( fixQuals )
+            return fixMisencodedQuals(read);
+
+        checkForMisencodedQuals(read);
+        return read;
+    }
+
+    protected static GATKSAMRecord fixMisencodedQuals(final GATKSAMRecord read) {
+        final byte[] quals = read.getBaseQualities();
+        for ( int i = 0; i < quals.length; i++ ) {
+            quals[i] -= encodingFixValue;
+            if ( quals[i] < 0 )
+                throw new UserException.BadInput("while fixing mis-encoded base qualities we encountered a read that was correctly encoded; we cannot handle such a mixture of reads so unfortunately the BAM must be fixed with some other tool");
+        }
+        read.setBaseQualities(quals);
+        return read;
+    }
+
+    protected static void checkForMisencodedQuals(final GATKSAMRecord read) {
+        // sample reads randomly for checking
+        if ( ++currentReadCounter >= samplingFrequency ) {
+            currentReadCounter = 0;
+
+            final byte[] quals = read.getBaseQualities();
+            for ( final byte qual : quals ) {
+                if ( qual > QualityUtils.MAX_REASONABLE_Q_SCORE )
+                    throw new UserException.MisencodedBAM(read, "we encountered an extremely high quality score of " + (int)qual);
+            }
+        }
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/NullSAMIterator.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/NullSAMIterator.java
new file mode 100644
index 0000000..743847e
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/NullSAMIterator.java
@@ -0,0 +1,58 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.iterators;
+
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIterator;
+
+import java.util.Iterator;
+import java.util.NoSuchElementException;
+/**
+ * User: hanna
+ * Date: May 19, 2009
+ * Time: 6:47:16 PM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * A placeholder for an iterator with no data.
+ */
+public class NullSAMIterator implements GATKSAMIterator {
+    public NullSAMIterator() {}
+
+    public Iterator<SAMRecord> iterator() { return this; }
+    public void close() { /* NO-OP */ }
+
+    public boolean hasNext() { return false; }
+    public SAMRecord next() { throw new NoSuchElementException("No next element is available."); }
+    public void remove() { throw new UnsupportedOperationException("Cannot remove from a GATKSAMIterator"); }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/PeekingIterator.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/PeekingIterator.java
new file mode 100644
index 0000000..8db59fb
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/PeekingIterator.java
@@ -0,0 +1,65 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.iterators;
+
+
+/*
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following
+ * conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+/**
+ * @author aaron
+ *         <p/>
+ *         Class PeekingIterator
+ *         <p/>
+ *         a peekable interface, that requires a peek() method
+ */
+public interface PeekingIterator<T> {
+
+    /** @return returns a peeked value */
+    public T peek();
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/PositionTrackingIterator.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/PositionTrackingIterator.java
new file mode 100644
index 0000000..dfeb739
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/PositionTrackingIterator.java
@@ -0,0 +1,106 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.iterators;
+
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.util.CloseableIterator;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIterator;
+
+/**
+ * Iterates through a list of elements, tracking the number of elements it has seen.
+ * @author hanna
+ * @version 0.1
+ */
+public class PositionTrackingIterator implements GATKSAMIterator {
+    /**
+     * The iterator being tracked.
+     */
+    private CloseableIterator<SAMRecord> iterator;
+
+    /**
+     * Current position within the tracked iterator.
+     */
+    private long position;
+
+    /**
+     * Retrieves the current position of the iterator.  The 'current position' of the iterator is defined as
+     * the coordinate of the read that will be returned if next() is called.
+     * @return The current position of the iterator.
+     */
+    public long getPosition() {
+        return position;
+    }
+
+    /**
+     * Create a new iterator wrapping the given position, assuming that the reader is <code>position</code> reads
+     * into the sequence.
+     * @param iterator Iterator to wraps.
+     * @param position Non-negative position where the iterator currently sits.
+     */
+    public PositionTrackingIterator(CloseableIterator<SAMRecord> iterator, long position ) {
+        this.iterator = iterator;
+        this.position = position;
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    public boolean hasNext() {
+        return iterator.hasNext();
+    }
+
+    /**
+     * Try to get the next read in the list.  If a next read is available, increment the position.
+     * @return next read in the list, if available.
+     */
+    public SAMRecord next() {
+        try {
+            return iterator.next();
+        }
+        finally {
+            position++;
+        }
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    public GATKSAMIterator iterator() {
+        return this;
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    public void close() {
+        iterator.close();
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    public void remove() { throw new UnsupportedOperationException("Cannot remove from a GATKSAMIterator"); }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/RNAReadTransformer.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/RNAReadTransformer.java
new file mode 100644
index 0000000..2b92758
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/RNAReadTransformer.java
@@ -0,0 +1,37 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.iterators;
+
+/**
+ *
+ * A baseclass for RNAseq read transformer
+ *
+ * @author ami
+ * @since 4/28/14.
+ */
+public abstract class RNAReadTransformer extends ReadTransformer{
+    public boolean isRNAReadTransformer(){return true;}
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/ReadFormattingIterator.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/ReadFormattingIterator.java
new file mode 100644
index 0000000..223c2fa
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/ReadFormattingIterator.java
@@ -0,0 +1,141 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.iterators;
+
+import htsjdk.samtools.SAMRecord;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIterator;
+import org.broadinstitute.gatk.utils.sam.AlignmentUtils;
+
+/**
+ * An iterator which does post-processing of a read, including potentially wrapping
+ * the read in something with a compatible interface or replacing the read entirely.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class ReadFormattingIterator implements GATKSAMIterator {
+    /**
+     * Logger.
+     */
+    final protected static Logger logger = Logger.getLogger(ReadFormattingIterator.class);    
+
+    /**
+     * Iterator to which to pass
+     */
+    private GATKSAMIterator wrappedIterator;
+
+    /**
+     * True if original base qualities should be used.
+     */
+    private final boolean useOriginalBaseQualities;
+
+    /**
+      * Positive if there is a default Base Quality value to fill in the reads with.
+      */
+     private final byte defaultBaseQualities;
+
+
+    /**
+     * Decorate the given iterator inside a ReadWrappingIterator.
+     * @param wrappedIterator iterator
+     * @param useOriginalBaseQualities true if original base qualities should be used
+     * @param defaultBaseQualities if the reads have incomplete quality scores, set them all to defaultBaseQuality.  
+     */
+    public ReadFormattingIterator(GATKSAMIterator wrappedIterator, boolean useOriginalBaseQualities, byte defaultBaseQualities) {
+        this.wrappedIterator = wrappedIterator;
+        this.useOriginalBaseQualities = useOriginalBaseQualities;
+        this.defaultBaseQualities = defaultBaseQualities;
+
+    }
+
+    /**
+     * Convenience function for use in foreach loops.  Dangerous because it does not actually
+     * reset the iterator.
+     * @return An iterator through the current data stream.
+     */
+    public GATKSAMIterator iterator() {
+        // NOTE: this iterator doesn't perform any kind of reset operation; it just returns itself.
+        //       can we do something better?  Do we really have to provide support for the Iterable interface?
+        return this;
+    }
+
+    /**
+     * Close this iterator.
+     */
+    public void close() {
+        wrappedIterator.close();
+    }
+
+    /**
+     * Does the iterator contain more values?
+     * @return True if there are more left to return, false otherwise.
+     */
+    public boolean hasNext() {
+        return wrappedIterator.hasNext();
+    }
+
+    /**
+     * Get the next value in the sequence.
+     * @return Next value in the sequence.  By convention, a NoSuchElementException should be thrown if
+     *         no next exists.
+     */
+    public SAMRecord next() {
+        SAMRecord rec = wrappedIterator.next();
+
+        // Always consolidate the cigar string into canonical form, collapsing zero-length / repeated cigar elements.
+        // Downstream code (like LocusIteratorByState) cannot necessarily handle non-consolidated cigar strings.
+        rec.setCigar(AlignmentUtils.consolidateCigar(rec.getCigar()));
+
+        // if we are using default quals, check if we need them, and add if necessary.
+        // 1. we need if reads are lacking or have incomplete quality scores
+        // 2. we add if defaultBaseQualities has a positive value
+        if (defaultBaseQualities >= 0) {
+            byte reads [] = rec.getReadBases();
+            byte quals [] = rec.getBaseQualities();
+            if (quals == null || quals.length < reads.length) {
+                byte new_quals [] = new byte [reads.length];
+                for (int i=0; i<reads.length; i++)
+                    new_quals[i] = defaultBaseQualities;
+                rec.setBaseQualities(new_quals);
+            }
+        }
+
+        // if we are using original quals, set them now if they are present in the record
+        if ( useOriginalBaseQualities ) {
+            byte[] originalQuals = rec.getOriginalBaseQualities();
+            if ( originalQuals != null )
+                rec.setBaseQualities(originalQuals);
+        }
+
+        return rec;
+    }
+
+    /**
+     * Remove the current element from the list.  Unsupported in this wrapper.
+     */
+    public void remove() { throw new UnsupportedOperationException("Cannot remove from a ReadWrappingIterator"); }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/ReadTransformer.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/ReadTransformer.java
new file mode 100644
index 0000000..575aebf
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/ReadTransformer.java
@@ -0,0 +1,205 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.iterators;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.engine.walkers.Walker;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+
+import java.util.Comparator;
+
+/**
+ * Baseclass used to describe a read transformer like BAQ and BQSR
+ *
+ * Read transformers are plugable infrastructure that modify read state
+ * either on input, on output, or within walkers themselves.
+ *
+ * The function apply() is called on each read seen by the GATK (after passing
+ * all ReadFilters) and it can do as it sees fit (without modifying the alignment)
+ * to the read to change qualities, add tags, etc.
+ *
+ * Initialize is called once right before the GATK traversal begins providing
+ * the ReadTransformer with the ability to collect and initialize data from the
+ * engine.
+ *
+ * Note that all ReadTransformers within the classpath are created and initialized.  If one
+ * shouldn't be run it should look at the command line options of the engine and override
+ * the enabled.
+ *
+ * @since 8/31/12
+ * @author depristo
+ */
+abstract public class ReadTransformer {
+    /**
+     * When should this read transform be applied?
+     */
+    private ApplicationTime applicationTime;
+
+    /**
+     * Keep track of whether we've been initialized already, and ensure it's not called more than once.
+     */
+    private boolean initialized = false;
+
+    protected ReadTransformer() {}
+
+    /*
+     * @return the ordering constraint for the given read transformer
+     */
+    public OrderingConstraint getOrderingConstraint() { return OrderingConstraint.DO_NOT_CARE; }
+
+    /**
+     * Master initialization routine.  Called to setup a ReadTransform, using it's overloaded initializeSub routine.
+     *
+     * @param overrideTime if not null, we will run this ReadTransform at the time provided, regardless of the timing of this read transformer itself
+     * @param engine the engine, for initializing values
+     * @param walker the walker we intend to run
+     */
+    @Requires({"initialized == false", "engine != null", "walker != null"})
+    @Ensures("initialized == true")
+    public final void initialize(final ApplicationTime overrideTime, final GenomeAnalysisEngine engine, final Walker walker) {
+        if ( engine == null ) throw new IllegalArgumentException("engine cannot be null");
+        if ( walker == null ) throw new IllegalArgumentException("walker cannot be null");
+
+        this.applicationTime = initializeSub(engine, walker);
+        if ( overrideTime != null ) this.applicationTime = overrideTime;
+        initialized = true;
+    }
+
+    /**
+     * Subclasses must override this to initialize themselves
+     *
+     * @param engine the engine, for initializing values
+     * @param walker the walker we intend to run
+     * @return the point of time we'd like this read transform to be run
+     */
+    @Requires({"engine != null", "walker != null"})
+    @Ensures("result != null")
+    protected abstract ApplicationTime initializeSub(final GenomeAnalysisEngine engine, final Walker walker);
+
+    /**
+     * Should this ReadTransformer be activated?  Called after initialize, which allows this
+     * read transformer to look at its arguments and decide if it should be active.  All
+     * ReadTransformers must override this, as by default they are not enabled.
+     *
+     * @return true if this ReadTransformer should be used on the read stream
+     */
+    public boolean enabled() {
+        return false;
+    }
+
+    /**
+     * Has this transformer been initialized?
+     *
+     * @return true if it has
+     */
+    public final boolean isInitialized() {
+        return initialized;
+    }
+
+    /**
+     * When should we apply this read transformer?
+     *
+     * @return true if yes
+     */
+    public final ApplicationTime getApplicationTime() {
+        return applicationTime;
+    }
+
+    /**
+     * Primary interface function for a read transform to actually do some work
+     *
+     * The function apply() is called on each read seen by the GATK (after passing
+     * all ReadFilters) and it can do as it sees fit (without modifying the alignment)
+     * to the read to change qualities, add tags, etc.
+     *
+     * @param read the read to transform
+     * @return the transformed read
+     */
+    @Requires("read != null")
+    @Ensures("result != null")
+    abstract public GATKSAMRecord apply(final GATKSAMRecord read);
+
+    @Override
+    public String toString() {
+        return getClass().getSimpleName();
+    }
+
+    /**
+     * When should a read transformer be applied?
+     */
+    public static enum ApplicationTime {
+        /**
+         * Walker does not tolerate this read transformer
+         */
+        FORBIDDEN,
+
+        /**
+         * apply the transformation to the incoming reads, the default
+         */
+        ON_INPUT,
+
+        /**
+         * apply the transformation to the outgoing read stream
+         */
+        ON_OUTPUT,
+
+        /**
+         * the walker will deal with the calculation itself
+         */
+        HANDLED_IN_WALKER
+    }
+
+    /*
+     * This enum specifies the constraints that the given read transformer has relative to any other read transformers being used
+     */
+    public enum OrderingConstraint {
+        /*
+         * If 2 read transformers are both active and MUST_BE_FIRST, then an error will be generated
+         */
+        MUST_BE_FIRST,
+
+        /*
+         * No constraints on the ordering for this read transformer
+         */
+        DO_NOT_CARE,
+
+        /*
+         * If 2 read transformers are both active and MUST_BE_LAST, then an error will be generated
+         */
+        MUST_BE_LAST
+    }
+
+    public static class ReadTransformerComparator implements Comparator<ReadTransformer> {
+
+        public int compare(final ReadTransformer r1, final ReadTransformer r2) {
+            if ( r1.getOrderingConstraint() == r2.getOrderingConstraint() )
+                return 0;
+            return ( r1.getOrderingConstraint() == OrderingConstraint.MUST_BE_FIRST || r2.getOrderingConstraint() == OrderingConstraint.MUST_BE_LAST ) ? -1 : 1;
+        }
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/ReadTransformersMode.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/ReadTransformersMode.java
new file mode 100644
index 0000000..fc7e773
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/ReadTransformersMode.java
@@ -0,0 +1,53 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.iterators;
+
+import java.lang.annotation.*;
+
+/**
+ * User: hanna
+ * Date: May 14, 2009
+ * Time: 1:51:22 PM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * Allows the walker to indicate what type of data it wants to consume.
+ */
+
+ at Documented
+ at Inherited
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target(ElementType.TYPE)
+public @interface ReadTransformersMode {
+    public abstract ReadTransformer.ApplicationTime ApplicationTime() default ReadTransformer.ApplicationTime.ON_INPUT;
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/ReadTransformingIterator.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/ReadTransformingIterator.java
new file mode 100644
index 0000000..e3d676b
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/ReadTransformingIterator.java
@@ -0,0 +1,68 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.iterators;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIterator;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+
+import java.util.Iterator;
+
+/**
+ * Iterator that applies a ReadTransformer to a stream of reads
+ */
+public class ReadTransformingIterator implements GATKSAMIterator {
+    private final GATKSAMIterator it;
+    private final ReadTransformer transformer;
+
+    /**
+     * Creates a new ReadTransforming iterator
+     */
+    @Requires({"it != null", "transformer != null", "transformer.isInitialized()"})
+    public ReadTransformingIterator(final GATKSAMIterator it, final ReadTransformer transformer) {
+        if ( ! transformer.isInitialized() )
+            throw new IllegalStateException("Creating a read transformer stream for an uninitialized read transformer: " + transformer);
+        if ( transformer.getApplicationTime() == ReadTransformer.ApplicationTime.FORBIDDEN )
+            throw new IllegalStateException("Creating a read transformer stream for a forbidden transformer " + transformer);
+
+        this.it = it;
+        this.transformer = transformer;
+    }
+
+    @Requires("hasNext()")
+    @Ensures("result != null")
+    public SAMRecord next()     {
+        final GATKSAMRecord read = (GATKSAMRecord)it.next();
+        return transformer.apply(read);
+    }
+
+    public boolean hasNext()    { return this.it.hasNext(); }
+    public void remove()        { throw new UnsupportedOperationException("Can not remove records from a SAM file via an iterator!"); }
+    public void close()         { it.close(); }
+    public Iterator<SAMRecord> iterator() { return this; }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/VerifyingSamIterator.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/VerifyingSamIterator.java
new file mode 100644
index 0000000..6c0004c
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/VerifyingSamIterator.java
@@ -0,0 +1,90 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.iterators;
+
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIterator;
+
+import java.util.Iterator;
+
+/**
+ * Verifies that the incoming stream of reads is correctly sorted
+ */
+public class VerifyingSamIterator implements GATKSAMIterator {
+    GATKSAMIterator it;
+    SAMRecord last = null;
+    boolean checkOrderP = true;
+
+    public VerifyingSamIterator(GATKSAMIterator it) {
+        this.it = it;
+    }
+
+    public boolean hasNext() { return this.it.hasNext(); }
+    public SAMRecord next() {
+
+        SAMRecord cur = it.next();
+        if ( last != null )
+            verifyRecord(last, cur);
+        if ( ! cur.getReadUnmappedFlag() )
+            last = cur;
+        return cur;
+    }
+
+    private void verifyRecord( final SAMRecord last, final SAMRecord cur ) {
+        if ( checkOrderP && isOutOfOrder(last, cur) ) {
+            this.last = null;
+            throw new UserException.MissortedBAM(String.format("reads are out of order:%nlast:%n%s%ncurrent:%n%s%n", last.format(), cur.format()) );
+        }
+    }
+
+    private boolean isOutOfOrder( final SAMRecord last, final SAMRecord cur ) {
+        if ( last == null || cur.getReadUnmappedFlag() )
+            return false;
+        else {
+            if(last.getReferenceIndex() == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX || last.getAlignmentStart() == SAMRecord.NO_ALIGNMENT_START)
+                throw new UserException.MalformedBAM(last,String.format("read %s has inconsistent mapping information.",last.format()));
+            if(cur.getReferenceIndex() == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX || cur.getAlignmentStart() == SAMRecord.NO_ALIGNMENT_START)
+                throw new UserException.MalformedBAM(last,String.format("read %s has inconsistent mapping information.",cur.format()));
+
+            return (last.getReferenceIndex() > cur.getReferenceIndex()) ||
+                    (last.getReferenceIndex().equals(cur.getReferenceIndex()) &&
+                            last.getAlignmentStart() > cur.getAlignmentStart());
+        }
+    }
+
+    public void remove() {
+        throw new UnsupportedOperationException("Can not remove records from a SAM file via an iterator!");
+    }
+
+    public void close() {
+        it.close();
+    }
+
+    public Iterator<SAMRecord> iterator() {
+        return this;
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/package-info.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/package-info.java
new file mode 100644
index 0000000..e450d45
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/iterators/package-info.java
@@ -0,0 +1,26 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.iterators;
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/package-info.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/package-info.java
index bd34a17..51722e7 100644
--- a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/package-info.java
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/package-info.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/phonehome/GATKRunReport.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/phonehome/GATKRunReport.java
new file mode 100644
index 0000000..d81c270
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/phonehome/GATKRunReport.java
@@ -0,0 +1,786 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.phonehome;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.engine.CommandLineGATK;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.engine.walkers.Walker;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.engine.crypt.CryptUtils;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.io.IOUtils;
+import org.broadinstitute.gatk.utils.io.Resource;
+import org.broadinstitute.gatk.utils.threading.ThreadEfficiencyMonitor;
+import org.jets3t.service.S3Service;
+import org.jets3t.service.S3ServiceException;
+import org.jets3t.service.impl.rest.httpclient.RestS3Service;
+import org.jets3t.service.model.S3Object;
+import org.jets3t.service.security.AWSCredentials;
+import org.simpleframework.xml.Element;
+import org.simpleframework.xml.Serializer;
+import org.simpleframework.xml.core.Persister;
+
+import java.io.*;
+import java.security.NoSuchAlgorithmException;
+import java.security.PublicKey;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.Arrays;
+import java.util.Date;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.zip.GZIPInputStream;
+import java.util.zip.GZIPOutputStream;
+
+
+/**
+ * A detailed description of a GATK run, and error if applicable.  Simply create a GATKRunReport
+ * with the constructor, providing the walker that was run and the fully instantiated GenomeAnalysisEngine
+ * <b>after the run finishes</b> and the GATKRunReport will collect all of the report information
+ * into this object.  Call postReport to write out the report, as an XML document, to either STDOUT,
+ * a file (in which case the output is gzipped), or with no arguments the report will be posted to the
+ * GATK run report database.
+ *
+ * @author depristo
+ * @since 2010
+ */
+public class GATKRunReport {
+    protected static final String REPORT_BUCKET_NAME = "broad.gsa.gatk.run.reports";
+    protected static final String TEST_REPORT_BUCKET_NAME = "broad.gsa.gatk.run.reports.test";
+    protected final static String AWS_ACCESS_KEY_MD5 = "34d4a26eb2062b3f06e833b28f9a38c6";
+    protected final static String AWS_SECRET_KEY_MD5 = "83f2332eec99ef1d7425d5dc5d4b514a";
+
+    private static final DateFormat DATE_FORMAT = new SimpleDateFormat("yyyy/MM/dd HH.mm.ss");
+
+    /**
+     * our log
+     */
+    protected static final Logger logger = Logger.getLogger(GATKRunReport.class);
+
+    /**
+     * Default value for the number of milliseconds before an S3 put operation is timed-out.
+     * Can be overridden via a constructor argument.
+     */
+    private static final long S3_DEFAULT_PUT_TIME_OUT_IN_MILLISECONDS = 30 * 1000;
+
+    /**
+     * Number of milliseconds before an S3 put operation is timed-out.
+     */
+    private long s3PutTimeOutInMilliseconds = S3_DEFAULT_PUT_TIME_OUT_IN_MILLISECONDS;
+
+    // -----------------------------------------------------------------
+    // elements captured for the report
+    // -----------------------------------------------------------------
+
+    @Element(required = false, name = "id")
+    private String id;
+
+    @Element(required = false, name = "exception")
+    private GATKRunReportException mException;
+
+    @Element(required = true, name = "start-time")
+    private String startTime = "ND";
+
+    @Element(required = true, name = "end-time")
+    private String endTime;
+
+    @Element(required = true, name = "run-time")
+    private long runTime = 0;
+
+    @Element(required = true, name = "walker-name")
+    private String walkerName;
+
+    @Element(required = true, name = "svn-version")
+    private String svnVersion;
+
+    @Element(required = true, name = "total-memory")
+    private long totalMemory;
+
+    @Element(required = true, name = "max-memory")
+    private long maxMemory;
+
+    @Element(required = true, name = "user-name")
+    private String userName;
+
+    @Element(required = true, name = "host-name")
+    private String hostName;
+
+    @Element(required = true, name = "java")
+    private String javaVersion;
+
+    @Element(required = true, name = "machine")
+    private String machine;
+
+    @Element(required = true, name = "iterations")
+    private long nIterations;
+
+    @Element(required = true, name = "tag")
+    private String tag;
+
+    @Element(required = true, name = "num-threads")
+    private int numThreads;
+    @Element(required = true, name = "percent-time-running")
+    private String percentTimeRunning;
+    @Element(required = true, name = "percent-time-waiting")
+    private String percentTimeWaiting;
+    @Element(required = true, name = "percent-time-blocking")
+    private String percentTimeBlocking;
+    @Element(required = true, name = "percent-time-waiting-for-io")
+    private String percentTimeWaitingForIO;
+
+    /** The error message, if one occurred, or null if none did */
+    public String errorMessage = null;
+    /** The error that occurred, if one did, or null if none did */
+    public Throwable errorThrown = null;
+
+    /**
+     * How should the GATK report its usage?
+     */
+    public enum PhoneHomeOption {
+        /** Disable phone home */
+        NO_ET,
+        /** Forces the report to go to S3 */
+        AWS,
+        /** Force output to STDOUT.  For debugging only */
+        STDOUT
+    }
+
+    /**
+     * To allow us to deserial reports from XML
+     */
+    private GATKRunReport() { }
+
+    /**
+     * Read a GATKRunReport from the serialized XML representation in String reportAsXML
+     * @param stream an input stream containing a serialized XML report
+     * @return a reconstituted GATKRunReport from reportAsXML
+     * @throws Exception if parsing fails for any reason
+     */
+    @Ensures("result != null")
+    protected static GATKRunReport deserializeReport(final InputStream stream) throws Exception {
+        final Serializer serializer = new Persister();
+        return serializer.read(GATKRunReport.class, stream);
+    }
+
+    /**
+     * Create a new GATKRunReport from a report on S3
+     *
+     * Assumes that s3Object has already been written to S3, and this function merely
+     * fetches it from S3 and deserializes it.  The access keys must have permission to
+     * GetObject from S3.
+     *
+     * @param downloaderAccessKey AWS access key with permission to GetObject from bucketName
+     * @param downloaderSecretKey AWS secret key with permission to GetObject from bucketName
+     * @param bucketName the name of the bucket holding the report
+     * @param s3Object the s3Object we wrote to S3 in bucketName that we want to get back and decode
+     * @return a deserialized report derived from s3://bucketName/s3Object.getName()
+     * @throws Exception
+     */
+    @Ensures("result != null")
+    protected static GATKRunReport deserializeReport(final String downloaderAccessKey,
+                                                     final String downloaderSecretKey,
+                                                     final String bucketName,
+                                                     final S3Object s3Object) throws Exception {
+        final S3Service s3Service = initializeAWSService(downloaderAccessKey, downloaderSecretKey);
+
+        // Retrieve the whole data object we created previously
+        final S3Object objectComplete = s3Service.getObject(bucketName, s3Object.getName());
+
+        // Read the data from the object's DataInputStream using a loop, and print it out.
+        return deserializeReport(new GZIPInputStream(objectComplete.getDataInputStream()));
+    }
+
+    /**
+     * Create a new RunReport and population all of the fields with values from the walker and engine.
+     * Allows the S3 put timeout to be explicitly set.
+     *
+     * @param walker the GATK walker that we ran
+     * @param e the exception caused by running this walker, or null if we completed successfully
+     * @param engine the GAE we used to run the walker, so we can fetch runtime, args, etc
+     * @param type the GATK phone home setting
+     * @param s3PutTimeOutInMilliseconds number of milliseconds to wait before timing out an S3 put operation
+     */
+    public GATKRunReport(final Walker<?,?> walker, final Exception e, final GenomeAnalysisEngine engine, final PhoneHomeOption type,
+                         final long s3PutTimeOutInMilliseconds) {
+        this(walker, e, engine, type);
+        this.s3PutTimeOutInMilliseconds = s3PutTimeOutInMilliseconds;
+    }
+
+    /**
+     * Create a new RunReport and population all of the fields with values from the walker and engine.
+     * Leaves the S3 put timeout set to the default value of S3_DEFAULT_PUT_TIME_OUT_IN_MILLISECONDS.
+     *
+     * @param walker the GATK walker that we ran
+     * @param e the exception caused by running this walker, or null if we completed successfully
+     * @param engine the GAE we used to run the walker, so we can fetch runtime, args, etc
+     * @param type the GATK phone home setting
+     */
+    public GATKRunReport(final Walker<?,?> walker, final Exception e, final GenomeAnalysisEngine engine, final PhoneHomeOption type) {
+        if ( type == PhoneHomeOption.NO_ET )
+            throw new ReviewedGATKException("Trying to create a run report when type is NO_ET!");
+
+        logger.debug("Aggregating data for run report");
+
+        // what did we run?
+        id = org.apache.commons.lang.RandomStringUtils.randomAlphanumeric(32);
+        walkerName = engine.getWalkerName(walker.getClass());
+        svnVersion = CommandLineGATK.getVersionNumber();
+
+        // runtime performance metrics
+        Date end = new java.util.Date();
+        endTime = DATE_FORMAT.format(end);
+        if ( engine.getStartTime() != null ) { // made it this far during initialization
+            startTime = DATE_FORMAT.format(engine.getStartTime());
+            runTime = (end.getTime() - engine.getStartTime().getTime()) / 1000L; // difference in seconds
+        }
+
+        // deal with memory usage
+        Runtime.getRuntime().gc(); // call GC so totalMemory is ~ used memory
+        maxMemory = Runtime.getRuntime().maxMemory();
+        totalMemory = Runtime.getRuntime().totalMemory();
+
+        // we can only do some operations if an error hasn't occurred
+        if ( engine.getCumulativeMetrics() != null ) {
+            // it's possible we aborted so early that these data structures arent initialized
+            nIterations = engine.getCumulativeMetrics().getNumIterations();
+        }
+
+        tag = engine.getArguments().tag;
+
+        // user and hostname -- information about the runner of the GATK
+        userName = System.getProperty("user.name");
+        hostName = Utils.resolveHostname();
+
+        // basic java information
+        javaVersion = Utils.join("-", Arrays.asList(System.getProperty("java.vendor"), System.getProperty("java.version")));
+        machine = Utils.join("-", Arrays.asList(System.getProperty("os.name"), System.getProperty("os.arch")));
+
+        // if there was an exception, capture it
+        this.mException = e == null ? null : new GATKRunReportException(e);
+
+        numThreads = engine.getTotalNumberOfThreads();
+        percentTimeRunning = getThreadEfficiencyPercent(engine, ThreadEfficiencyMonitor.State.USER_CPU);
+        percentTimeBlocking = getThreadEfficiencyPercent(engine, ThreadEfficiencyMonitor.State.BLOCKING);
+        percentTimeWaiting = getThreadEfficiencyPercent(engine, ThreadEfficiencyMonitor.State.WAITING);
+        percentTimeWaitingForIO = getThreadEfficiencyPercent(engine, ThreadEfficiencyMonitor.State.WAITING_FOR_IO);
+    }
+
+    /**
+     * Get the random alpha-numeric ID of this GATKRunReport
+     * @return a non-null string ID
+     */
+    @Ensures("result != null")
+    public String getID() {
+        return id;
+    }
+
+    /**
+     * Return a string representing the percent of time the GATK spent in state, if possible.  Otherwise return NA
+     *
+     * @param engine the GATK engine whose threading efficiency info we will use
+     * @param state the state whose occupancy we wish to know
+     * @return a string representation of the percent occupancy of state, or NA is not possible
+     */
+    @Requires({"engine != null", "state != null"})
+    @Ensures("result != null")
+    private String getThreadEfficiencyPercent(final GenomeAnalysisEngine engine, final ThreadEfficiencyMonitor.State state) {
+        final ThreadEfficiencyMonitor tem = engine.getThreadEfficiencyMonitor();
+        return tem == null ? "NA" : String.format("%.2f", tem.getStatePercent(state));
+    }
+
+    /**
+     * Get a filename (no path) appropriate for this report
+     *
+     * @return a non-null string filename
+     */
+    @Ensures("result != null")
+    protected String getReportFileName() {
+        return getID() + ".report.xml.gz";
+    }
+
+    // ---------------------------------------------------------------------------
+    //
+    // Main public interface method for posting reports
+    //
+    // ---------------------------------------------------------------------------
+
+    /**
+     * Post this GATK report to the destination implied by the PhoneHomeOption type
+     *
+     * Guaranteed to never throw an exception (exception noted below) and to return
+     * with a reasonable (~10 seconds) time regardless of successful writing of the report.
+     *
+     * @throws IllegalArgumentException if type == null
+     * @param type the type of phoning home we want to do
+     * @return true if a report was successfully written, false otherwise
+     */
+    public boolean postReport(final PhoneHomeOption type) {
+        if ( type == null ) throw new IllegalArgumentException("type cannot be null");
+
+        logger.debug("Posting report of type " + type);
+        switch (type) {
+            case NO_ET: // don't do anything
+                return false;
+            case AWS:
+                wentToAWS = true;
+                return postReportToAWSS3() != null;
+            case STDOUT:
+                return postReportToStream(System.out);
+            default:
+                exceptDuringRunReport("BUG: unexpected PhoneHomeOption ");
+                return false;
+        }
+    }
+
+    // ---------------------------------------------------------------------------
+    //
+    // Code for sending reports to local files
+    //
+    // ---------------------------------------------------------------------------
+
+    /**
+     * Write an XML representation of this report to the stream, throwing a GATKException if the marshalling
+     * fails for any reason.
+     *
+     * @param stream an output stream to write the report to
+     */
+    @Requires("stream != null")
+    protected boolean postReportToStream(final OutputStream stream) {
+        final Serializer serializer = new Persister();
+        try {
+            serializer.write(this, stream);
+            return true;
+        } catch (Exception e) {
+            return false;
+        }
+    }
+
+    // ---------------------------------------------------------------------------
+    //
+    // Code for sending reports to s3
+    //
+    // ---------------------------------------------------------------------------
+
+    /**
+     * Get the name of the S3 bucket where we should upload this report
+     *
+     * @return the string name of the s3 bucket
+     */
+    @Ensures("result != null")
+    protected String getS3ReportBucket() {
+        return s3ReportBucket;
+    }
+
+    /**
+     * Decrypts encrypted AWS key from encryptedKeySource
+     * @param encryptedKeySource a file containing an encrypted AWS key
+     * @return a decrypted AWS key as a String
+     */
+    @Ensures("result != null")
+    public static String decryptAWSKey(final File encryptedKeySource) throws FileNotFoundException {
+        if ( encryptedKeySource == null ) throw new IllegalArgumentException("encryptedKeySource cannot be null");
+        return decryptAWSKey(new FileInputStream(encryptedKeySource));
+    }
+
+    /**
+     * @see #decryptAWSKey(java.io.File) but with input from an inputstream
+     */
+    @Requires("encryptedKeySource != null")
+    @Ensures("result != null")
+    private static String decryptAWSKey(final InputStream encryptedKeySource) {
+        final PublicKey key = CryptUtils.loadGATKDistributedPublicKey();
+        final byte[] fromDisk = IOUtils.readStreamIntoByteArray(encryptedKeySource);
+        final byte[] decrypted = CryptUtils.decryptData(fromDisk, key);
+        return new String(decrypted);
+    }
+
+    /**
+     * Get the decrypted AWS key sorted in the resource directories of name
+     * @param name the name of the file containing the needed AWS key
+     * @return a non-null GATK
+     */
+    @Requires("name != null")
+    @Ensures("result != null")
+    private static String getAWSKey(final String name) {
+        final Resource resource = new Resource(name, GATKRunReport.class);
+        return decryptAWSKey(resource.getResourceContentsAsStream());
+    }
+
+    /**
+     * Get the AWS access key for the GATK user
+     * @return a non-null AWS access key for the GATK user
+     */
+    @Ensures("result != null")
+    protected static String getAWSUploadAccessKey() {
+        return getAWSKey("resources/GATK_AWS_access.key");
+    }
+
+    /**
+     * Get the AWS secret key for the GATK user
+     * @return a non-null AWS secret key for the GATK user
+     */
+    @Ensures("result != null")
+    protected static String getAWSUploadSecretKey() {
+        return getAWSKey("resources/GATK_AWS_secret.key");
+    }
+
+    /**
+     * Check that the AWS keys can be decrypted and are what we expect them to be
+     *
+     * @throws ReviewedGATKException if anything goes wrong
+     */
+    public static void checkAWSAreValid() {
+        try {
+            final String accessKeyMD5 = Utils.calcMD5(getAWSUploadAccessKey());
+            final String secretKeyMD5 = Utils.calcMD5(getAWSUploadSecretKey());
+
+            if ( ! AWS_ACCESS_KEY_MD5.equals(accessKeyMD5) ) {
+                throw new ReviewedGATKException("Invalid AWS access key found, expected MD5 " + AWS_ACCESS_KEY_MD5 + " but got " + accessKeyMD5);
+            }
+            if ( ! AWS_SECRET_KEY_MD5.equals(secretKeyMD5) ) {
+                throw new ReviewedGATKException("Invalid AWS secret key found, expected MD5 " + AWS_SECRET_KEY_MD5 + " but got " + secretKeyMD5);
+            }
+
+        } catch ( Exception e ) {
+            throw new ReviewedGATKException("Couldn't decrypt AWS keys, something is wrong with the GATK distribution");
+        }
+    }
+
+    /**
+     * Get an initialized S3Service for use in communicating with AWS/s3
+     *
+     * @param awsAccessKey our AWS access key to use
+     * @param awsSecretKey our AWS secret key to use
+     * @return an initialized S3Service object that can be immediately used to interact with S3
+     * @throws S3ServiceException
+     */
+    @Requires({"awsAccessKey != null", "awsSecretKey != null"})
+    @Ensures("result != null")
+    protected static S3Service initializeAWSService(final String awsAccessKey, final String awsSecretKey) throws S3ServiceException {
+        // To communicate with S3, create a class that implements an S3Service. We will use the REST/HTTP
+        // implementation based on HttpClient, as this is the most robust implementation provided with JetS3t.
+        final AWSCredentials awsCredentials = new AWSCredentials(awsAccessKey, awsSecretKey);
+        return new RestS3Service(awsCredentials);
+    }
+
+    /**
+     * A runnable that pushes this GATKReport up to s3.
+     *
+     * Should be run in a separate thread so we can time it out if something is taking too long
+     */
+    private class S3PutRunnable implements Runnable {
+        /** Was the upload operation successful? */
+        public final AtomicBoolean isSuccess;
+        /** The name of this report */
+        private final String filename;
+        /** The contents of this report */
+        private final byte[] contents;
+
+        /** The s3Object that we created to upload, or null if it failed */
+        public S3Object s3Object = null;
+
+        @Requires({"filename != null", "contents != null"})
+        public S3PutRunnable(final String filename, final byte[] contents){
+            this.isSuccess = new AtomicBoolean();
+            this.filename = filename;
+            this.contents = contents;
+        }
+
+        public void run() {
+            try {
+                switch ( awsMode ) {
+                    case FAIL_WITH_EXCEPTION:
+                        throw new IllegalStateException("We are throwing an exception for testing purposes");
+                    case TIMEOUT:
+                        try {
+                            Thread.sleep(s3PutTimeOutInMilliseconds * 100);
+                        } catch ( InterruptedException e ) {
+                            // supposed to be empty
+                        }
+                        break;
+                    case NORMAL:
+                        // IAM GATK user credentials -- only right is to PutObject into broad.gsa.gatk.run.reports bucket
+                        final S3Service s3Service = initializeAWSService(getAWSUploadAccessKey(), getAWSUploadSecretKey());
+
+                        // Create an S3Object based on a file, with Content-Length set automatically and
+                        // Content-Type set based on the file's extension (using the Mimetypes utility class)
+                        final S3Object fileObject = new S3Object(filename, contents);
+                        //logger.info("Created S3Object" + fileObject);
+                        //logger.info("Uploading " + localFile + " to AWS bucket");
+                        s3Object = s3Service.putObject(getS3ReportBucket(), fileObject);
+                        isSuccess.set(true);
+                        break;
+                    default:
+                        throw new IllegalStateException("Unexpected AWS exception");
+                }
+            } catch ( S3ServiceException e ) {
+                exceptDuringRunReport("S3 exception occurred", e);
+            } catch ( NoSuchAlgorithmException e ) {
+                exceptDuringRunReport("Couldn't calculate MD5", e);
+            } catch ( IOException e ) {
+                exceptDuringRunReport("Couldn't read report file", e);
+            } catch ( Exception e ) {
+                exceptDuringRunReport("An unexpected exception occurred during posting", e);
+            }
+        }
+    }
+
+    /**
+     * Post this GATK report to the AWS s3 GATK_Run_Report log
+     *
+     * @return the s3Object pointing to our pushed report, or null if we failed to push
+     */
+    protected S3Object postReportToAWSS3() {
+        // modifying example code from http://jets3t.s3.amazonaws.com/toolkit/code-samples.html
+        this.hostName = Utils.resolveHostname(); // we want to fill in the host name
+        final String key = getReportFileName();
+        logger.debug("Generating GATK report to AWS S3 with key " + key);
+
+        try {
+            // create an byte output stream so we can capture the output as a byte[]
+            final ByteArrayOutputStream byteStream = new ByteArrayOutputStream(8096);
+            final OutputStream outputStream = new GZIPOutputStream(byteStream);
+            postReportToStream(outputStream);
+            outputStream.close();
+            final byte[] report = byteStream.toByteArray();
+
+            // stop us from printing the annoying, and meaningless, mime types warning
+            final Logger mimeTypeLogger = Logger.getLogger(org.jets3t.service.utils.Mimetypes.class);
+            mimeTypeLogger.setLevel(Level.FATAL);
+
+            // Set the S3 upload on its own thread with timeout:
+            final S3PutRunnable s3run = new S3PutRunnable(key,report);
+            final Thread s3thread = new Thread(s3run);
+            s3thread.setDaemon(true);
+            s3thread.setName("S3Put-Thread");
+            s3thread.start();
+
+            s3thread.join(s3PutTimeOutInMilliseconds);
+
+            if(s3thread.isAlive()){
+                s3thread.interrupt();
+                exceptDuringRunReport("Run statistics report upload to AWS S3 timed-out");
+            } else if(s3run.isSuccess.get()) {
+                logger.info("Uploaded run statistics report to AWS S3");
+                logger.debug("Uploaded to AWS: " + s3run.s3Object);
+                return s3run.s3Object;
+            } else {
+                // an exception occurred, the thread should have already invoked the exceptDuringRunReport function
+            }
+        } catch ( IOException e ) {
+            exceptDuringRunReport("Couldn't read report file", e);
+        } catch ( InterruptedException e) {
+            exceptDuringRunReport("Run statistics report upload interrupted", e);
+        }
+
+        return null;
+    }
+
+    // ---------------------------------------------------------------------------
+    //
+    // Error handling code
+    //
+    // ---------------------------------------------------------------------------
+
+    /**
+     * Note that an exception occurred during creating or writing this report
+     * @param msg the message to print
+     * @param e the exception that occurred
+     */
+    @Ensures("exceptionOccurredDuringPost()")
+    private void exceptDuringRunReport(final String msg, final Throwable e) {
+        this.errorMessage = msg;
+        this.errorThrown = e;
+        logger.debug("A problem occurred during GATK run reporting [*** everything is fine, but no report could be generated; please do not post this to the support forum ***].  Message is: " + msg + ".  Error message is: " + e.getMessage());
+    }
+
+    /**
+     * Note that an exception occurred during creating or writing this report
+     * @param msg the message to print
+     */
+    @Ensures("exceptionOccurredDuringPost()")
+    private void exceptDuringRunReport(final String msg) {
+        this.errorMessage = msg;
+        logger.debug("A problem occurred during GATK run reporting [*** everything is fine, but no report could be generated; please do not post this to the support forum ***].  Message is " + msg);
+    }
+
+    /**
+     * Did an error occur during the posting of this run report?
+     * @return true if so, false if not
+     */
+    public boolean exceptionOccurredDuringPost() {
+        return getErrorMessage() != null;
+    }
+
+    /**
+     * If an error occurred during posting of this report, retrieve the message of the error that occurred, or null if
+     * no error occurred
+     * @return a string describing the error that occurred, or null if none did
+     */
+    public String getErrorMessage() {
+        return errorMessage;
+    }
+
+    /**
+     * Get the throwable that caused the exception during posting of this message, or null if none was available
+     *
+     * Note that getting a null valuable from this function doesn't not imply that no error occurred.  Some
+     * errors that occurred many not have generated a throwable.
+     *
+     * @return the Throwable that caused the error, or null if no error occurred or was not caused by a throwable
+     */
+    public Throwable getErrorThrown() {
+        return errorThrown;
+    }
+
+    /**
+     * Helper method to format the exception that occurred during posting, or a string saying none occurred
+     * @return a non-null string
+     */
+    @Ensures("result != null")
+    protected String formatError() {
+        return exceptionOccurredDuringPost()
+                ? String.format("Exception message=%s with cause=%s", getErrorMessage(), getErrorThrown())
+                : "No exception occurred";
+    }
+
+    // ---------------------------------------------------------------------------
+    //
+    // Equals and hashcode -- purely for comparing reports for testing
+    //
+    // ---------------------------------------------------------------------------
+
+    @Override
+    public boolean equals(Object o) {
+        if (this == o) return true;
+        if (o == null || getClass() != o.getClass()) return false;
+
+        GATKRunReport that = (GATKRunReport) o;
+
+        if (maxMemory != that.maxMemory) return false;
+        if (nIterations != that.nIterations) return false;
+        if (numThreads != that.numThreads) return false;
+        if (runTime != that.runTime) return false;
+        if (totalMemory != that.totalMemory) return false;
+        if (endTime != null ? !endTime.equals(that.endTime) : that.endTime != null) return false;
+        if (hostName != null ? !hostName.equals(that.hostName) : that.hostName != null) return false;
+        if (id != null ? !id.equals(that.id) : that.id != null) return false;
+        if (javaVersion != null ? !javaVersion.equals(that.javaVersion) : that.javaVersion != null) return false;
+        if (mException != null ? !mException.equals(that.mException) : that.mException != null) return false;
+        if (machine != null ? !machine.equals(that.machine) : that.machine != null) return false;
+        if (percentTimeBlocking != null ? !percentTimeBlocking.equals(that.percentTimeBlocking) : that.percentTimeBlocking != null)
+            return false;
+        if (percentTimeRunning != null ? !percentTimeRunning.equals(that.percentTimeRunning) : that.percentTimeRunning != null)
+            return false;
+        if (percentTimeWaiting != null ? !percentTimeWaiting.equals(that.percentTimeWaiting) : that.percentTimeWaiting != null)
+            return false;
+        if (percentTimeWaitingForIO != null ? !percentTimeWaitingForIO.equals(that.percentTimeWaitingForIO) : that.percentTimeWaitingForIO != null)
+            return false;
+        if (startTime != null ? !startTime.equals(that.startTime) : that.startTime != null) return false;
+        if (svnVersion != null ? !svnVersion.equals(that.svnVersion) : that.svnVersion != null) return false;
+        if (tag != null ? !tag.equals(that.tag) : that.tag != null) return false;
+        if (userName != null ? !userName.equals(that.userName) : that.userName != null) return false;
+        if (walkerName != null ? !walkerName.equals(that.walkerName) : that.walkerName != null) return false;
+
+        return true;
+    }
+
+    @Override
+    public int hashCode() {
+        int result = id != null ? id.hashCode() : 0;
+        result = 31 * result + (mException != null ? mException.hashCode() : 0);
+        result = 31 * result + (startTime != null ? startTime.hashCode() : 0);
+        result = 31 * result + (endTime != null ? endTime.hashCode() : 0);
+        result = 31 * result + (int) (runTime ^ (runTime >>> 32));
+        result = 31 * result + (walkerName != null ? walkerName.hashCode() : 0);
+        result = 31 * result + (svnVersion != null ? svnVersion.hashCode() : 0);
+        result = 31 * result + (int) (totalMemory ^ (totalMemory >>> 32));
+        result = 31 * result + (int) (maxMemory ^ (maxMemory >>> 32));
+        result = 31 * result + (userName != null ? userName.hashCode() : 0);
+        result = 31 * result + (hostName != null ? hostName.hashCode() : 0);
+        result = 31 * result + (javaVersion != null ? javaVersion.hashCode() : 0);
+        result = 31 * result + (machine != null ? machine.hashCode() : 0);
+        result = 31 * result + (int) (nIterations ^ (nIterations >>> 32));
+        result = 31 * result + (tag != null ? tag.hashCode() : 0);
+        result = 31 * result + numThreads;
+        result = 31 * result + (percentTimeRunning != null ? percentTimeRunning.hashCode() : 0);
+        result = 31 * result + (percentTimeWaiting != null ? percentTimeWaiting.hashCode() : 0);
+        result = 31 * result + (percentTimeBlocking != null ? percentTimeBlocking.hashCode() : 0);
+        result = 31 * result + (percentTimeWaitingForIO != null ? percentTimeWaitingForIO.hashCode() : 0);
+        return result;
+    }
+
+    // ---------------------------------------------------------------------------
+    //
+    // Code specifically for testing the GATKRunReport
+    //
+    // ---------------------------------------------------------------------------
+
+    /**
+     * Enum specifying how the S3 uploader should behave.  Must be normal by default.  Purely for testing purposes
+     */
+    protected enum AWSMode {
+        NORMAL, // write normally to AWS
+        FAIL_WITH_EXCEPTION, // artificially fail during writing
+        TIMEOUT // sleep, so we time out
+    }
+    /** Our AWS mode */
+    private AWSMode awsMode = AWSMode.NORMAL;
+    /** The bucket were we send the GATK report on AWS/s3 */
+    private String s3ReportBucket = REPORT_BUCKET_NAME;
+    /** Did we send the report to AWS? */
+    private boolean wentToAWS = false;
+
+    /**
+     * Send the report to the AWS test bucket -- for testing only
+     */
+    protected void sendAWSToTestBucket() {
+        s3ReportBucket = TEST_REPORT_BUCKET_NAME;
+    }
+
+    /**
+     * Has the report been written to AWS?
+     *
+     * Does not imply anything about the success of the send, just that it was attempted
+     *
+     * @return true if the report has been sent to AWS, false otherwise
+     */
+    protected boolean wentToAWS() {
+        return wentToAWS;
+    }
+
+    /**
+     * Purely for testing purposes.  Tells the AWS uploader whether to actually upload or simulate errors
+     * @param mode what we want to do
+     */
+    @Requires("mode != null")
+    protected void setAwsMode(final AWSMode mode) {
+        this.awsMode = mode;
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/phonehome/GATKRunReportException.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/phonehome/GATKRunReportException.java
new file mode 100644
index 0000000..4615882
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/phonehome/GATKRunReportException.java
@@ -0,0 +1,99 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.phonehome;
+
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.simpleframework.xml.Element;
+import org.simpleframework.xml.ElementList;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * A helper class for formatting in XML the throwable chain starting at e.
+ */
+class GATKRunReportException {
+    @Element(required = false, name = "message")
+    String message = null;
+
+    @ElementList(required = false, name = "stacktrace")
+    final List<String> stackTrace = new ArrayList<String>();
+
+    @Element(required = false, name = "cause")
+    GATKRunReportException cause = null;
+
+    @Element(required = false, name = "is-user-exception")
+    Boolean isUserException;
+
+    @Element(required = false, name = "exception-class")
+    Class exceptionClass;
+
+    /**
+     * Allow us to deserialize from XML
+     */
+    public GATKRunReportException() { }
+
+    public GATKRunReportException(Throwable e) {
+        message = e.getMessage();
+        exceptionClass = e.getClass();
+        isUserException = e instanceof UserException;
+        for (StackTraceElement element : e.getStackTrace()) {
+            stackTrace.add(element.toString());
+        }
+
+        if ( e.getCause() != null ) {
+            cause = new GATKRunReportException(e.getCause());
+        }
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (this == o) return true;
+        if (o == null || getClass() != o.getClass()) return false;
+
+        GATKRunReportException that = (GATKRunReportException) o;
+
+        if (cause != null ? !cause.equals(that.cause) : that.cause != null) return false;
+        if (exceptionClass != null ? !exceptionClass.equals(that.exceptionClass) : that.exceptionClass != null)
+            return false;
+        if (isUserException != null ? !isUserException.equals(that.isUserException) : that.isUserException != null)
+            return false;
+        if (message != null ? !message.equals(that.message) : that.message != null) return false;
+        if (stackTrace != null ? !stackTrace.equals(that.stackTrace) : that.stackTrace != null) return false;
+
+        return true;
+    }
+
+    @Override
+    public int hashCode() {
+        int result = message != null ? message.hashCode() : 0;
+        result = 31 * result + (stackTrace != null ? stackTrace.hashCode() : 0);
+        result = 31 * result + (cause != null ? cause.hashCode() : 0);
+        result = 31 * result + (isUserException != null ? isUserException.hashCode() : 0);
+        result = 31 * result + (exceptionClass != null ? exceptionClass.hashCode() : 0);
+        return result;
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/recalibration/BQSRArgumentSet.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/recalibration/BQSRArgumentSet.java
new file mode 100644
index 0000000..4914511
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/recalibration/BQSRArgumentSet.java
@@ -0,0 +1,100 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.recalibration;
+
+import org.broadinstitute.gatk.engine.arguments.GATKArgumentCollection;
+
+import java.io.File;
+import java.util.List;
+
+public class BQSRArgumentSet {
+    // declare public, STL-style for easier and more efficient access:
+    private File BQSR_RECAL_FILE;
+    private int quantizationLevels;
+    private List<Integer> staticQuantizedQuals;
+    private boolean roundDown;
+    private boolean disableIndelQuals;
+    private boolean emitOriginalQuals;
+    private int PRESERVE_QSCORES_LESS_THAN;
+    private double globalQScorePrior;
+
+    public BQSRArgumentSet(final GATKArgumentCollection args) {
+        this.BQSR_RECAL_FILE = args.BQSR_RECAL_FILE;
+        this.quantizationLevels = args.quantizationLevels;
+        this.staticQuantizedQuals = args.staticQuantizationQuals;
+        this.roundDown = args.roundDown;
+        this.disableIndelQuals = args.disableIndelQuals;
+        this.emitOriginalQuals = args.emitOriginalQuals;
+        this.PRESERVE_QSCORES_LESS_THAN = args.PRESERVE_QSCORES_LESS_THAN;
+        this.globalQScorePrior = args.globalQScorePrior;
+    }
+
+    public File getRecalFile() { return BQSR_RECAL_FILE; }
+
+    public int getQuantizationLevels() { return quantizationLevels; }
+
+    public List<Integer> getStaticQuantizedQuals() {return staticQuantizedQuals; }
+
+    public boolean getRoundDown() {return roundDown; }
+
+    public boolean shouldDisableIndelQuals() { return disableIndelQuals; }
+
+    public boolean shouldEmitOriginalQuals() { return emitOriginalQuals; }
+
+    public int getPreserveQscoresLessThan() { return PRESERVE_QSCORES_LESS_THAN; }
+
+    public double getGlobalQScorePrior() { return globalQScorePrior; }
+
+    public void setRecalFile(final File BQSR_RECAL_FILE) {
+        this.BQSR_RECAL_FILE = BQSR_RECAL_FILE;
+    }
+
+    public void setQuantizationLevels(final int quantizationLevels) {
+        this.quantizationLevels = quantizationLevels;
+    }
+
+    public void setStaticQuantizedQuals(final List<Integer> staticQuantizedQuals) { this.staticQuantizedQuals = staticQuantizedQuals; }
+
+    public void setRoundDown(final boolean roundDown) {
+        this.roundDown = roundDown;
+    }
+
+    public void setDisableIndelQuals(final boolean disableIndelQuals) {
+        this.disableIndelQuals = disableIndelQuals;
+    }
+
+    public void setEmitOriginalQuals(final boolean emitOriginalQuals) {
+        this.emitOriginalQuals = emitOriginalQuals;
+    }
+
+    public void setPreserveQscoresLessThan(final int PRESERVE_QSCORES_LESS_THAN) {
+        this.PRESERVE_QSCORES_LESS_THAN = PRESERVE_QSCORES_LESS_THAN;
+    }
+
+    public void setGlobalQScorePrior(final double globalQScorePrior) {
+        this.globalQScorePrior = globalQScorePrior;
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/recalibration/BQSRMode.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/recalibration/BQSRMode.java
new file mode 100644
index 0000000..13e97f8
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/recalibration/BQSRMode.java
@@ -0,0 +1,55 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.recalibration;
+
+import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
+
+import java.lang.annotation.*;
+
+/**
+ * User: hanna
+ * Date: May 14, 2009
+ * Time: 1:51:22 PM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * Allows the walker to indicate what type of data it wants to consume.
+ */
+
+ at Documented
+ at Inherited
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target(ElementType.TYPE)
+public @interface BQSRMode {
+    public abstract ReadTransformer.ApplicationTime ApplicationTime() default ReadTransformer.ApplicationTime.ON_INPUT;
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/resourcemanagement/ThreadAllocation.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/resourcemanagement/ThreadAllocation.java
new file mode 100644
index 0000000..8676476
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/resourcemanagement/ThreadAllocation.java
@@ -0,0 +1,116 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.resourcemanagement;
+
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+/**
+ * Models how threads are distributed between various components of the GATK.
+ */
+public class ThreadAllocation {
+    /**
+     * The number of CPU threads to be used by the GATK.
+     */
+    private final int numDataThreads;
+
+    /**
+     * The number of CPU threads per data thread for GATK processing
+     */
+    private final int numCPUThreadsPerDataThread;
+
+    /**
+     * Number of threads to devote exclusively to IO.  Default is 0.
+     */
+    private final int numIOThreads;
+
+    /**
+     * Should we monitor thread efficiency?
+     */
+    private final boolean monitorEfficiency;
+
+    public int getNumDataThreads() {
+        return numDataThreads;
+    }
+
+    public int getNumCPUThreadsPerDataThread() {
+        return numCPUThreadsPerDataThread;
+    }
+
+    public int getNumIOThreads() {
+        return numIOThreads;
+    }
+
+    public boolean monitorThreadEfficiency() {
+        return monitorEfficiency;
+    }
+
+    /**
+     * Are we running in parallel mode?
+     *
+     * @return true if any parallel processing is enabled
+     */
+    public boolean isRunningInParallelMode() {
+        return getTotalNumThreads() > 1;
+    }
+
+    /**
+     * What is the total number of threads in use by the GATK?
+     *
+     * @return the sum of all thread allocations in this object
+     */
+    public int getTotalNumThreads() {
+        return getNumDataThreads() * getNumCPUThreadsPerDataThread() + getNumIOThreads();
+    }
+
+    /**
+     * Construct the default thread allocation.
+     */
+    public ThreadAllocation() {
+        this(1, 1, 0, false);
+    }
+
+    /**
+     * Set up the thread allocation.  Default allocation is 1 CPU thread, 0 IO threads.
+     * (0 IO threads means that no threads are devoted exclusively to IO; they're inline on the CPU thread).
+     * @param numDataThreads Total number of threads allocated to the traversal.
+     * @param numCPUThreadsPerDataThread The number of CPU threads per data thread to allocate
+     * @param numIOThreads Total number of threads allocated exclusively to IO.
+     * @param monitorEfficiency should we monitor threading efficiency in the GATK?
+     */
+    public ThreadAllocation(final int numDataThreads,
+                            final int numCPUThreadsPerDataThread,
+                            final int numIOThreads,
+                            final boolean monitorEfficiency) {
+        if ( numDataThreads < 1 ) throw new ReviewedGATKException("numDataThreads cannot be less than 1, but saw " + numDataThreads);
+        if ( numCPUThreadsPerDataThread < 1 ) throw new ReviewedGATKException("numCPUThreadsPerDataThread cannot be less than 1, but saw " + numCPUThreadsPerDataThread);
+        if ( numIOThreads < 0 ) throw new ReviewedGATKException("numIOThreads cannot be less than 0, but saw " + numIOThreads);
+
+        this.numDataThreads = numDataThreads;
+        this.numCPUThreadsPerDataThread = numCPUThreadsPerDataThread;
+        this.numIOThreads = numIOThreads;
+        this.monitorEfficiency = monitorEfficiency;
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/samples/Affection.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/samples/Affection.java
new file mode 100644
index 0000000..2dac6e1
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/samples/Affection.java
@@ -0,0 +1,47 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.samples;
+
+/**
+ * Categorical sample trait for association and analysis
+ *
+ * Samples can have unknown status, be affected or unaffected by the
+ * categorical trait, or they can be marked as actually having an
+ * other trait value (stored in an associated value in the Sample class)
+ *
+ * @author Mark DePristo
+ * @since Sept. 2011
+ */
+public enum Affection {
+    /** Status is unknown */
+    UNKNOWN,
+    /** Suffers from the disease */
+    AFFECTED,
+    /** Unaffected by the disease */
+    UNAFFECTED,
+    /** An "other" trait: value of the trait is stored elsewhere and is an arbitrary string */
+    OTHER
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/samples/Gender.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/samples/Gender.java
new file mode 100644
index 0000000..fd721ed
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/samples/Gender.java
@@ -0,0 +1,35 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.samples;
+
+/**
+* ENUM of possible human genders: male, female, or unknown
+*/
+public enum Gender {
+    MALE,
+    FEMALE,
+    UNKNOWN
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/samples/MendelianViolation.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/samples/MendelianViolation.java
new file mode 100644
index 0000000..0e37cb4
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/samples/MendelianViolation.java
@@ -0,0 +1,461 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.samples;
+
+import org.broadinstitute.gatk.engine.samples.Sample;
+import htsjdk.variant.variantcontext.Genotype;
+import htsjdk.variant.variantcontext.GenotypeType;
+import htsjdk.variant.variantcontext.VariantContext;
+import org.broadinstitute.gatk.utils.MathUtils;
+
+import java.util.*;
+
+/**
+ * User: carneiro / lfran
+ * Date: 3/9/11
+ * Time: 12:38 PM
+ *
+ * Class for the identification and tracking of mendelian violation. It can be used in 2 distinct ways:
+ * - Either using an instance of the MendelianViolation class to track mendelian violations for each of the families while
+ * walking over the variants
+ * - Or using the static methods to directly get information about mendelian violation in a family at a given locus
+ *
+ */
+public class MendelianViolation {
+    //List of families with violations
+    private List<String> violationFamilies;
+
+    //Call information
+    private int nocall = 0;
+    private int familyCalled = 0;
+    private int varFamilyCalled = 0;
+    private int lowQual = 0;
+
+    private boolean allCalledOnly = true;
+
+    //Stores occurrences of inheritance
+    private EnumMap<GenotypeType, EnumMap<GenotypeType,EnumMap<GenotypeType,Integer>>> inheritance;
+
+    private int violations_total=0;
+
+    private double minGenotypeQuality;
+
+    private boolean abortOnSampleNotFound;
+
+    //Number of families with genotype information for all members
+    public int getFamilyCalledCount(){
+        return familyCalled;
+    }
+
+    //Number of families with genotype information for all members
+    public int getVarFamilyCalledCount(){
+        return varFamilyCalled;
+    }
+
+    //Number of families missing genotypes for one or more of their members
+    public int getFamilyNoCallCount(){
+        return nocall;
+    }
+
+    //Number of families with genotypes below the set quality threshold
+    public int getFamilyLowQualsCount(){
+        return lowQual;
+    }
+
+    public int getViolationsCount(){
+        return violations_total;
+    }
+
+    //Count of alt alleles inherited from het parents (no violation)
+    public int getParentHetInheritedVar(){
+        return getParentsHetHetInheritedVar() + getParentsRefHetInheritedVar() + getParentsVarHetInheritedVar();
+    }
+
+    //Count of ref alleles inherited from het parents (no violation)
+    public int getParentHetInheritedRef(){
+        return getParentsHetHetInheritedRef() + getParentsRefHetInheritedRef() + getParentsVarHetInheritedRef();
+    }
+
+    //Count of HomRef/HomRef/HomRef trios
+    public int getRefRefRef(){
+        return inheritance.get(GenotypeType.HOM_REF).get(GenotypeType.HOM_REF).get(GenotypeType.HOM_REF);
+    }
+
+    //Count of HomVar/HomVar/HomVar trios
+    public int getVarVarVar(){
+        return inheritance.get(GenotypeType.HOM_VAR).get(GenotypeType.HOM_VAR).get(GenotypeType.HOM_VAR);
+    }
+
+    //Count of HomRef/HomVar/Het trios
+    public int getRefVarHet(){
+        return inheritance.get(GenotypeType.HOM_REF).get(GenotypeType.HOM_VAR).get(GenotypeType.HET) +
+                inheritance.get(GenotypeType.HOM_VAR).get(GenotypeType.HOM_REF).get(GenotypeType.HET);
+    }
+
+    //Count of Het/Het/Het trios
+    public int getHetHetHet(){
+        return inheritance.get(GenotypeType.HET).get(GenotypeType.HET).get(GenotypeType.HET);
+    }
+
+    //Count of Het/Het/HomRef trios
+    public int getHetHetHomRef(){
+        return inheritance.get(GenotypeType.HET).get(GenotypeType.HET).get(GenotypeType.HOM_REF);
+    }
+
+    //Count of Het/Het/HomVar trios
+    public int getHetHetHomVar(){
+        return inheritance.get(GenotypeType.HET).get(GenotypeType.HET).get(GenotypeType.HOM_VAR);
+    }
+
+    //Count of ref alleles inherited from Het/Het parents (no violation)
+    public int getParentsHetHetInheritedRef(){
+        return inheritance.get(GenotypeType.HET).get(GenotypeType.HET).get(GenotypeType.HET)
+               + 2*inheritance.get(GenotypeType.HET).get(GenotypeType.HET).get(GenotypeType.HOM_REF);
+        //return parentsHetHet_childRef;
+    }
+
+    //Count of var alleles inherited from Het/Het parents (no violation)
+    public int getParentsHetHetInheritedVar(){
+        return inheritance.get(GenotypeType.HET).get(GenotypeType.HET).get(GenotypeType.HET)
+               + 2*inheritance.get(GenotypeType.HET).get(GenotypeType.HET).get(GenotypeType.HOM_VAR);
+        //return parentsHetHet_childVar;
+    }
+
+    //Count of ref alleles inherited from HomRef/Het parents (no violation)
+    public int getParentsRefHetInheritedRef(){
+        return inheritance.get(GenotypeType.HOM_REF).get(GenotypeType.HET).get(GenotypeType.HOM_REF)
+               + inheritance.get(GenotypeType.HET).get(GenotypeType.HOM_REF).get(GenotypeType.HOM_REF);
+        //return parentsHomRefHet_childRef;
+    }
+
+    //Count of var alleles inherited from HomRef/Het parents (no violation)
+    public int getParentsRefHetInheritedVar(){
+        return inheritance.get(GenotypeType.HOM_REF).get(GenotypeType.HET).get(GenotypeType.HET)
+               + inheritance.get(GenotypeType.HET).get(GenotypeType.HOM_REF).get(GenotypeType.HET);
+        //return parentsHomRefHet_childVar;
+    }
+
+    //Count of ref alleles inherited from HomVar/Het parents (no violation)
+    public int getParentsVarHetInheritedRef(){
+        return inheritance.get(GenotypeType.HOM_VAR).get(GenotypeType.HET).get(GenotypeType.HET)
+               + inheritance.get(GenotypeType.HET).get(GenotypeType.HOM_VAR).get(GenotypeType.HET);
+        //return parentsHomVarHet_childRef;
+    }
+
+    //Count of var alleles inherited from HomVar/Het parents (no violation)
+    public int getParentsVarHetInheritedVar(){
+        return inheritance.get(GenotypeType.HOM_VAR).get(GenotypeType.HET).get(GenotypeType.HOM_VAR)
+               + inheritance.get(GenotypeType.HET).get(GenotypeType.HOM_VAR).get(GenotypeType.HOM_VAR);
+        //return parentsHomVarHet_childVar;
+    }
+
+    //Count of violations of the type HOM_REF/HOM_REF -> HOM_VAR
+    public int getParentsRefRefChildVar(){
+        return inheritance.get(GenotypeType.HOM_REF).get(GenotypeType.HOM_REF).get(GenotypeType.HOM_VAR);
+    }
+
+    //Count of violations of the type HOM_REF/HOM_REF -> HET
+    public int getParentsRefRefChildHet(){
+        return inheritance.get(GenotypeType.HOM_REF).get(GenotypeType.HOM_REF).get(GenotypeType.HET);
+    }
+
+    //Count of violations of the type HOM_REF/HET -> HOM_VAR
+    public int getParentsRefHetChildVar(){
+        return inheritance.get(GenotypeType.HOM_REF).get(GenotypeType.HET).get(GenotypeType.HOM_VAR)
+                + inheritance.get(GenotypeType.HET).get(GenotypeType.HOM_REF).get(GenotypeType.HOM_VAR);
+    }
+
+    //Count of violations of the type HOM_REF/HOM_VAR -> HOM_VAR
+    public int getParentsRefVarChildVar(){
+        return inheritance.get(GenotypeType.HOM_REF).get(GenotypeType.HOM_VAR).get(GenotypeType.HOM_VAR)
+                + inheritance.get(GenotypeType.HOM_VAR).get(GenotypeType.HOM_REF).get(GenotypeType.HOM_VAR);
+    }
+
+    //Count of violations of the type HOM_REF/HOM_VAR -> HOM_REF
+    public int getParentsRefVarChildRef(){
+        return inheritance.get(GenotypeType.HOM_REF).get(GenotypeType.HOM_VAR).get(GenotypeType.HOM_REF)
+                + inheritance.get(GenotypeType.HOM_VAR).get(GenotypeType.HOM_REF).get(GenotypeType.HOM_REF);
+    }
+
+    //Count of violations of the type HOM_VAR/HET -> HOM_REF
+    public int getParentsVarHetChildRef(){
+        return inheritance.get(GenotypeType.HET).get(GenotypeType.HOM_VAR).get(GenotypeType.HOM_REF)
+                + inheritance.get(GenotypeType.HOM_VAR).get(GenotypeType.HET).get(GenotypeType.HOM_REF);
+    }
+
+    //Count of violations of the type HOM_VAR/HOM_VAR -> HOM_REF
+    public int getParentsVarVarChildRef(){
+        return inheritance.get(GenotypeType.HOM_VAR).get(GenotypeType.HOM_VAR).get(GenotypeType.HOM_REF);
+    }
+
+    //Count of violations of the type HOM_VAR/HOM_VAR -> HET
+    public int getParentsVarVarChildHet(){
+        return inheritance.get(GenotypeType.HOM_VAR).get(GenotypeType.HOM_VAR).get(GenotypeType.HET);
+    }
+
+
+    //Count of violations of the type HOM_VAR/? -> HOM_REF
+    public int getParentVarChildRef(){
+        return getParentsRefVarChildRef() + getParentsVarHetChildRef() +getParentsVarVarChildRef();
+    }
+
+    //Count of violations of the type HOM_REF/? -> HOM_VAR
+    public int getParentRefChildVar(){
+        return getParentsRefVarChildVar() + getParentsRefHetChildVar() +getParentsRefRefChildVar();
+    }
+
+    //Returns a String containing all trios where a Mendelian violation was observed.
+    //The String is formatted "mom1+dad1=child1,mom2+dad2=child2,..."
+    public String getViolationFamiliesString(){
+        if(violationFamilies.isEmpty())
+            return "";
+
+        Iterator<String> it = violationFamilies.iterator();
+        String violationFams = it.next();
+        while(it.hasNext()){
+            violationFams += ","+it.next();
+        }
+        return violationFams;
+    }
+
+    public List<String> getViolationFamilies(){
+        return violationFamilies;
+    }
+
+    static final int[] mvOffsets = new int[] { 1,2,5,6,8,11,15,18,20,21,24,25 };
+    static final int[] nonMVOffsets = new int[]{ 0,3,4,7,9,10,12,13,14,16,17,19,22,23,26 };
+
+    public double getMinGenotypeQuality() {
+        return minGenotypeQuality;
+    }
+
+   /**
+     * Constructor
+     * @param minGenotypeQualityP - the minimum phred scaled genotype quality score necessary to asses mendelian violation
+     *
+     */
+    public MendelianViolation(double minGenotypeQualityP) {
+        this(minGenotypeQualityP,true);
+    }
+
+    /**
+     * Constructor
+     * @param minGenotypeQualityP - the minimum phred scaled genotype quality score necessary to asses mendelian violation
+     * @param abortOnSampleNotFound - Whether to stop execution if a family is passed but no relevant genotypes are found. If false, then the family is ignored.
+     */
+    public MendelianViolation(double minGenotypeQualityP, boolean abortOnSampleNotFound) {
+        minGenotypeQuality = minGenotypeQualityP;
+        this.abortOnSampleNotFound = abortOnSampleNotFound;
+        violationFamilies = new ArrayList<String>();
+        createInheritanceMap();
+    }
+
+    /**
+     * Constructor
+     * @param minGenotypeQualityP - the minimum phred scaled genotype quality score necessary to asses mendelian violation
+     * @param abortOnSampleNotFound - Whether to stop execution if a family is passed but no relevant genotypes are found. If false, then the family is ignored.
+     * @param completeTriosOnly - whether only complete trios are considered or parent/child pairs are too.
+     */
+    public MendelianViolation(double minGenotypeQualityP, boolean abortOnSampleNotFound, boolean completeTriosOnly) {
+        minGenotypeQuality = minGenotypeQualityP;
+        this.abortOnSampleNotFound = abortOnSampleNotFound;
+        violationFamilies = new ArrayList<String>();
+        createInheritanceMap();
+        allCalledOnly = completeTriosOnly;
+    }
+
+    /**
+     * @param families the families to be checked for Mendelian violations
+     * @param vc the variant context to extract the genotypes and alleles for mom, dad and child.
+     * @return whether or not there is a mendelian violation at the site.
+     */
+    public int countViolations(Map<String, Set<Sample>> families, VariantContext vc){
+
+        //Reset counts
+        nocall = 0;
+        lowQual = 0;
+        familyCalled = 0;
+        varFamilyCalled = 0;
+        violations_total=0;
+        violationFamilies.clear();
+        clearInheritanceMap();
+
+        for(Set<Sample> family : families.values()){
+            Iterator<Sample> sampleIterator = family.iterator();
+            Sample sample;
+            while(sampleIterator.hasNext()){
+                sample = sampleIterator.next();
+                if(sample.getParents().size() > 0)
+                    updateViolations(sample.getFamilyID(),sample.getMaternalID(), sample.getPaternalID(), sample.getID() ,vc);
+            }
+        }
+        return violations_total;
+    }
+
+    public boolean isViolation(Sample mother, Sample father, Sample child, VariantContext vc){
+
+        //Reset counts
+        nocall = 0;
+        lowQual = 0;
+        familyCalled = 0;
+        varFamilyCalled = 0;
+        violations_total=0;
+        violationFamilies.clear();
+        clearInheritanceMap();
+        updateViolations(mother.getFamilyID(),mother.getID(),father.getID(),child.getID(),vc);
+        return violations_total>0;
+    }
+
+
+    private void updateViolations(String familyId, String motherId, String fatherId, String childId, VariantContext vc){
+
+            int count;
+            Genotype gMom = vc.getGenotype(motherId);
+            Genotype gDad = vc.getGenotype(fatherId);
+            Genotype gChild = vc.getGenotype(childId);
+
+            if (gMom == null || gDad == null || gChild == null){
+                if(abortOnSampleNotFound)
+                    throw new IllegalArgumentException(String.format("Variant %s:%d: Missing genotypes for family %s: mom=%s dad=%s family=%s", vc.getChr(), vc.getStart(), familyId, motherId, fatherId, childId));
+                else
+                    return;
+            }
+            //Count No calls
+            if(allCalledOnly && (!gMom.isCalled() || !gDad.isCalled() || !gChild.isCalled())){
+                nocall++;
+            }
+            else if (!gMom.isCalled() && !gDad.isCalled() || !gChild.isCalled()){
+                nocall++;
+            }
+            //Count lowQual. Note that if min quality is set to 0, even values with no quality associated are returned
+            else if (minGenotypeQuality>0 && (gMom.getPhredScaledQual()   < minGenotypeQuality ||
+                gDad.getPhredScaledQual()   < minGenotypeQuality ||
+                gChild.getPhredScaledQual() < minGenotypeQuality )) {
+                lowQual++;
+            }
+            else{
+                //Count all families per loci called
+                familyCalled++;
+                //If the family is all homref, not too interesting
+                if(!(gMom.isHomRef() && gDad.isHomRef() && gChild.isHomRef()))
+                {
+                    varFamilyCalled++;
+                    if(isViolation(gMom, gDad, gChild)){
+                        violationFamilies.add(familyId);
+                        violations_total++;
+                    }
+                }
+                count = inheritance.get(gMom.getType()).get(gDad.getType()).get(gChild.getType());
+                inheritance.get(gMom.getType()).get(gDad.getType()).put(gChild.getType(),count+1);
+
+            }
+    }
+
+    /**
+     * Evaluate the genotypes of mom, dad, and child to detect Mendelian violations
+     *
+     * @param gMom
+     * @param gDad
+     * @param gChild
+     * @return true if the three genotypes represent a Mendelian violation; false otherwise
+     */
+    public static boolean isViolation(final Genotype gMom, final Genotype gDad, final Genotype gChild) {
+        //1 parent is no "call
+        if(!gMom.isCalled()){
+            return (gDad.isHomRef() && gChild.isHomVar()) || (gDad.isHomVar() && gChild.isHomRef());
+        }
+        else if(!gDad.isCalled()){
+            return (gMom.isHomRef() && gChild.isHomVar()) || (gMom.isHomVar() && gChild.isHomRef());
+        }
+        //Both parents have genotype information
+        return !(gMom.getAlleles().contains(gChild.getAlleles().get(0)) && gDad.getAlleles().contains(gChild.getAlleles().get(1)) ||
+            gMom.getAlleles().contains(gChild.getAlleles().get(1)) && gDad.getAlleles().contains(gChild.getAlleles().get(0)));
+    }
+
+    private void createInheritanceMap(){
+
+        inheritance = new EnumMap<GenotypeType,EnumMap<GenotypeType,EnumMap<GenotypeType,Integer>>>(GenotypeType.class);
+        for(GenotypeType mType : GenotypeType.values()){
+            inheritance.put(mType, new EnumMap<GenotypeType,EnumMap<GenotypeType,Integer>>(GenotypeType.class));
+            for(GenotypeType dType : GenotypeType.values()){
+                inheritance.get(mType).put(dType, new EnumMap<GenotypeType,Integer>(GenotypeType.class));
+                for(GenotypeType cType : GenotypeType.values()){
+                    inheritance.get(mType).get(dType).put(cType, 0);
+                }
+            }
+        }
+
+    }
+
+    private void clearInheritanceMap(){
+        for(GenotypeType mType : GenotypeType.values()){
+            for(GenotypeType dType : GenotypeType.values()){
+                for(GenotypeType cType : GenotypeType.values()){
+                    inheritance.get(mType).get(dType).put(cType, 0);
+                }
+            }
+        }
+    }
+
+    /**
+     * @return the likelihood ratio for a mendelian violation
+     */
+    public double violationLikelihoodRatio(VariantContext vc, String motherId, String fatherId, String childId) {
+        double[] logLikAssignments = new double[27];
+        // the matrix to set up is
+        // MOM   DAD    CHILD
+        //                    |-  AA
+        //   AA     AA    |    AB
+        //                    |-   BB
+        //                    |- AA
+        //  AA     AB     |   AB
+        //                    |- BB
+        // etc. The leaves are counted as 0-11 for MVs and 0-14 for non-MVs
+        double[] momGL = vc.getGenotype(motherId).getLikelihoods().getAsVector();
+        double[] dadGL = vc.getGenotype(fatherId).getLikelihoods().getAsVector();
+        double[] childGL = vc.getGenotype(childId).getLikelihoods().getAsVector();
+        int offset = 0;
+        for ( int oMom = 0; oMom < 3; oMom++ ) {
+            for ( int oDad = 0; oDad < 3; oDad++ ) {
+                for ( int oChild = 0; oChild < 3; oChild ++ ) {
+                    logLikAssignments[offset++] = momGL[oMom] + dadGL[oDad] + childGL[oChild];
+                }
+            }
+        }
+        double[] mvLiks = new double[12];
+        double[] nonMVLiks = new double[15];
+        for ( int i = 0; i < 12; i ++ ) {
+            mvLiks[i] = logLikAssignments[mvOffsets[i]];
+        }
+
+        for ( int i = 0; i < 15; i++) {
+            nonMVLiks[i] = logLikAssignments[nonMVOffsets[i]];
+        }
+
+        return MathUtils.log10sumLog10(mvLiks) - MathUtils.log10sumLog10(nonMVLiks);
+    }
+
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/samples/PedReader.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/samples/PedReader.java
new file mode 100644
index 0000000..0bbc3c8
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/samples/PedReader.java
@@ -0,0 +1,311 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.samples;
+
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.MathUtils;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.text.XReadLines;
+
+import java.io.*;
+import java.util.*;
+
+/**
+ * Reads PED file-formatted tabular text files
+ *
+ * See http://www.broadinstitute.org/mpg/tagger/faq.html
+ * See http://pngu.mgh.harvard.edu/~purcell/plink/data.shtml#ped
+ *
+ * The "ped" file format refers to the widely-used format for linkage pedigree data.
+ * Each line describes a single (diploid) individual in the following format:
+ *
+ *      family_ID individual_ID father_ID mother_ID gender phenotype genotype_1 genotype_2 ...
+ *
+ * If your data lacks pedigree information (for example, unrelated case/control individuals),
+ * set the father_ID and mother_ID to 0. sex denotes the individual's gender with 1=male and 2=female.
+ * phenotype refers to the affected status (for association studies) where 0=unknown, 1=unaffected, 2=affected.
+ * Finally, each genotype is written as two (=diploid) integer numbers (separated by whitespace),
+ * where 1=A, 2=C, 3=G, 4=T. No header lines are allowed and all columns must be separated by whitespace.
+ * Check out the information at the PLINK website on the "ped" file format.
+ *
+ * The PED file is a white-space (space or tab) delimited file: the first six columns are mandatory:
+ *  Family ID
+ *  Individual ID
+ *  Paternal ID
+ *  Maternal ID
+ *  Sex (1=male; 2=female; other=unknown)
+ *  Phenotype
+ *
+ *  The IDs are alphanumeric: the combination of family and individual ID should uniquely identify a person.
+ *  A PED file must have 1 and only 1 phenotype in the sixth column. The phenotype can be either a
+ *  quantitative trait or an affection status column: PLINK will automatically detect which type
+ *  (i.e. based on whether a value other than 0, 1, 2 or the missing genotype code is observed).
+ *  Note that the GATK actually supports arbitrary values for quantitative trait -- not just doubles --
+ *  and are actually representing these values as strings instead of doubles
+ *
+ *  NOTE Quantitative traits with decimal points must be coded with a period/full-stop character and
+ *  not a comma, i.e. 2.394 not 2,394
+ *
+ *  If an individual's sex is unknown, then any character other than 1 or 2 can be used.
+ *  When new files are created (PED, FAM, or other which contain sex) then the original coding will be
+ *  preserved. However, these individuals will be dropped from any analyses (i.e. phenotype set to missing also)
+ *  and an error message will arise if an analysis that uses family information is requested and an
+ *  individual of 'unknown' sex is specified as a father or mother.
+ *
+ *
+ *  HINT You can add a comment to a PED or MAP file by starting the line with a # character. The rest of that
+ *  line will be ignored. Do not start any family IDs with this character therefore.
+ *
+ *  Affection status, by default, should be coded:
+ *  -9 missing
+ *   0 missing
+ *   1 unaffected
+ *   2 affected
+ *
+ * If your file is coded 0/1 to represent unaffected/affected, then use the --1 flag:
+ * plink --file mydata --1 which will specify a disease phenotype coded:
+ *
+ *  -9 missing
+ *  0 unaffected
+ *  1 affected
+ *
+ * The missing phenotype value for quantitative traits is, by default, -9 (this can also be used for
+ * disease traits as well as 0). It can be reset by including the --missing-phenotype option:
+ *
+ * Genotypes (column 7 onwards) should also be white-space delimited; they can be any character
+ * (e.g. 1,2,3,4 or A,C,G,T or anything else) except 0 which is, by default, the missing genotype
+ * character. All markers should be biallelic. All SNPs (whether haploid or not) must have two
+ * alleles specified. Either Both alleles should be missing (i.e. 0) or neither.
+ *
+ * No header row should be given. For example, here are two individuals typed for 3 SNPs (one row = one person):
+ *
+ *   FAM001  1  0 0  1  2  A A  G G  A C
+ *   FAM001  2  0 0  1  2  A A  A G  0 0
+ *   ...
+ *
+ * Note that the GATK does not support genotypes in a PED file.
+ *
+ * @author Mark DePristo
+ * @since 2011
+ */
+public class PedReader {
+    private static Logger logger = Logger.getLogger(PedReader.class);
+    final static private Set<String> CATAGORICAL_TRAIT_VALUES = new HashSet<String>(Arrays.asList("-9", "0", "1", "2"));
+    final static private String commentMarker = "#";
+
+    /**
+     * An enum that specifies which, if any, of the standard PED fields are
+     * missing from the input records.  For example, suppose we have the full record:
+     *
+     * "fam1 kid dad mom 1 2"
+     *
+     * indicating a male affected child.  This can be parsed with the -ped x.ped argument
+     * to the GATK.  Suppose we only have:
+     *
+     * "fam1 kid 1"
+     *
+     * we can parse the reduced version of this record with -ped:NO_PARENTS,NO_PHENOTYPE x.ped
+     */
+    public enum MissingPedField {
+        /**
+         * The PED records do not have the first (FAMILY_ID) argument.  The family id
+         * will be set to null / empty.
+         */
+        NO_FAMILY_ID,
+
+        /**
+         * The PED records do not have either the paternal or maternal IDs, so
+         * the corresponding IDs are set to null.
+         */
+        NO_PARENTS,
+
+        /**
+         * The PED records do not have the GENDER field, so the sex of each
+         * sample will be set to UNKNOWN.
+         */
+        NO_SEX,
+
+        /**
+         * The PED records do not have the PHENOTYPE field, so the phenotype
+         * of each sample will be set to UNKNOWN.
+         */
+        NO_PHENOTYPE
+    }
+
+    protected enum Field {
+        FAMILY_ID, INDIVIDUAL_ID, PATERNAL_ID, MATERNAL_ID, GENDER, PHENOTYPE
+    }
+
+    // phenotype
+    private final static String MISSING_VALUE1 = "-9";
+    private final static String MISSING_VALUE2 = "0";
+    private final static String PHENOTYPE_UNAFFECTED = "1";
+    private final static String PHENOTYPE_AFFECTED = "2";
+
+    // Sex
+    private final static String SEX_MALE = "1";
+    private final static String SEX_FEMALE = "2";
+    // other=unknown
+
+    public PedReader() { }
+
+    public final List<Sample> parse(File source, EnumSet<MissingPedField> missingFields, SampleDB sampleDB) throws FileNotFoundException  {
+        logger.info("Reading PED file " + source + " with missing fields: " + missingFields);
+        return parse(new FileReader(source), missingFields, sampleDB);
+    }
+
+    public final List<Sample> parse(final String source, EnumSet<MissingPedField> missingFields, SampleDB sampleDB) {
+        logger.warn("Reading PED string: \"" + source + "\" with missing fields: " + missingFields);
+        return parse(new StringReader(source.replace(";", String.format("%n"))), missingFields, sampleDB);
+    }
+
+    public final List<Sample> parse(Reader reader, EnumSet<MissingPedField> missingFields, SampleDB sampleDB) {
+        final List<String> lines = new XReadLines(reader).readLines();
+
+        // What are the record offsets?
+        final int familyPos = missingFields.contains(MissingPedField.NO_FAMILY_ID) ? -1 : 0;
+        final int samplePos = familyPos + 1;
+        final int paternalPos = missingFields.contains(MissingPedField.NO_PARENTS) ? -1 : samplePos + 1;
+        final int maternalPos = missingFields.contains(MissingPedField.NO_PARENTS) ? -1 : paternalPos + 1;
+        final int sexPos = missingFields.contains(MissingPedField.NO_SEX) ? -1 : Math.max(maternalPos, samplePos) + 1;
+        final int phenotypePos = missingFields.contains(MissingPedField.NO_PHENOTYPE) ? -1 : Math.max(sexPos, Math.max(maternalPos, samplePos)) + 1;
+        final int nExpectedFields = MathUtils.arrayMaxInt(Arrays.asList(samplePos, paternalPos, maternalPos, sexPos, phenotypePos)) + 1;
+
+        // go through once and determine properties
+        int lineNo = 1;
+        boolean isQT = false;
+        final List<String[]> splits = new ArrayList<String[]>(lines.size());
+        for ( final String line : lines ) {
+            if ( line.startsWith(commentMarker)) continue;
+            if ( line.trim().equals("") ) continue;
+
+            final String[] parts = line.split("\\s+");
+
+            if ( parts.length != nExpectedFields )
+                throw new UserException.MalformedFile(reader.toString(), "Bad PED line " + lineNo + ": wrong number of fields");
+
+            if ( phenotypePos != -1 ) {
+                isQT = isQT || ! CATAGORICAL_TRAIT_VALUES.contains(parts[phenotypePos]);
+            }
+
+            splits.add(parts);
+            lineNo++;
+        }
+        logger.info("Phenotype is other? " + isQT);
+
+        // now go through and parse each record
+        lineNo = 1;
+        final List<Sample> samples = new ArrayList<Sample>(splits.size());
+        for ( final String[] parts : splits ) {
+            String familyID = null, individualID, paternalID = null, maternalID = null;
+            Gender sex = Gender.UNKNOWN;
+            String quantitativePhenotype = Sample.UNSET_QT;
+            Affection affection = Affection.UNKNOWN;
+
+            if ( familyPos != -1 ) familyID = maybeMissing(parts[familyPos]);
+            individualID = parts[samplePos];
+            if ( paternalPos != -1 ) paternalID = maybeMissing(parts[paternalPos]);
+            if ( maternalPos != -1 ) maternalID = maybeMissing(parts[maternalPos]);
+
+            if ( sexPos != -1 ) {
+                if ( parts[sexPos].equals(SEX_MALE) ) sex = Gender.MALE;
+                else if ( parts[sexPos].equals(SEX_FEMALE) ) sex = Gender.FEMALE;
+                else sex = Gender.UNKNOWN;
+            }
+
+            if ( phenotypePos != -1 ) {
+                if ( isQT ) {
+                    if ( parts[phenotypePos].equals(MISSING_VALUE1) )
+                        affection = Affection.UNKNOWN;
+                    else {
+                        affection = Affection.OTHER;
+                        quantitativePhenotype = parts[phenotypePos];
+                    }
+                } else {
+                    if ( parts[phenotypePos].equals(MISSING_VALUE1) ) affection = Affection.UNKNOWN;
+                    else if ( parts[phenotypePos].equals(MISSING_VALUE2) ) affection = Affection.UNKNOWN;
+                    else if ( parts[phenotypePos].equals(PHENOTYPE_UNAFFECTED) ) affection = Affection.UNAFFECTED;
+                    else if ( parts[phenotypePos].equals(PHENOTYPE_AFFECTED) ) affection = Affection.AFFECTED;
+                    else throw new ReviewedGATKException("Unexpected phenotype type " + parts[phenotypePos] + " at line " + lineNo);
+                }
+            }
+
+            final Sample s = new Sample(individualID, sampleDB, familyID, paternalID, maternalID, sex, affection, quantitativePhenotype);
+            samples.add(s);
+            sampleDB.addSample(s);
+            lineNo++;
+        }
+
+        for ( final Sample sample : new ArrayList<Sample>(samples) ) {
+            Sample dad = maybeAddImplicitSample(sampleDB, sample.getPaternalID(), sample.getFamilyID(), Gender.MALE);
+            if ( dad != null ) samples.add(dad);
+
+            Sample mom = maybeAddImplicitSample(sampleDB, sample.getMaternalID(), sample.getFamilyID(), Gender.FEMALE);
+            if ( mom != null ) samples.add(mom);
+        }
+
+        return samples;
+    }
+
+    private final static String maybeMissing(final String string) {
+        if ( string.equals(MISSING_VALUE1) || string.equals(MISSING_VALUE2) )
+            return null;
+        else
+            return string;
+    }
+
+    private final Sample maybeAddImplicitSample(SampleDB sampleDB, final String id, final String familyID, final Gender gender) {
+        if ( id != null && sampleDB.getSample(id) == null ) {
+            Sample s = new Sample(id, sampleDB, familyID, null, null, gender, Affection.UNKNOWN, Sample.UNSET_QT);
+            sampleDB.addSample(s);
+            return s;
+        } else
+            return null;
+    }
+
+    /**
+     * Parses a list of tags from the command line, assuming it comes from the GATK Engine
+     * tags, and returns the corresponding EnumSet.
+     *
+     * @param arg the actual engine arg, used for the UserException if there's an error
+     * @param tags a list of string tags that should be converted to the MissingPedField value
+     * @return
+     */
+    public static final EnumSet<MissingPedField> parseMissingFieldTags(final Object arg, final List<String> tags) {
+        final EnumSet<MissingPedField> missingFields = EnumSet.noneOf(MissingPedField.class);
+
+        for ( final String tag : tags ) {
+            try {
+                missingFields.add(MissingPedField.valueOf(tag));
+            } catch ( IllegalArgumentException e ) {
+                throw new UserException.BadArgumentValue(arg.toString(), "Unknown tag " + tag + " allowed values are " + MissingPedField.values());
+            }
+        }
+
+        return missingFields;
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/samples/PedigreeValidationType.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/samples/PedigreeValidationType.java
new file mode 100644
index 0000000..1a373e1
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/samples/PedigreeValidationType.java
@@ -0,0 +1,42 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.samples;
+
+/**
+*
+*/
+public enum PedigreeValidationType {
+    /**
+     * Require if a pedigree file is provided at all samples in the VCF or BAM files have a corresponding
+     * entry in the pedigree file(s).
+     */
+    STRICT,
+
+    /**
+     * Do not enforce any overlap between the VCF/BAM samples and the pedigree data
+     * */
+    SILENT
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/samples/Sample.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/samples/Sample.java
new file mode 100644
index 0000000..41cc0b2
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/samples/Sample.java
@@ -0,0 +1,261 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.samples;
+
+
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ *
+ */
+public class Sample implements Comparable<Sample> { // implements java.io.Serializable {
+    final private String familyID, paternalID, maternalID;
+    final private Gender gender;
+    final private String otherPhenotype;
+    final private Affection affection;
+    final private String ID;
+    final private SampleDB infoDB;
+    final private Map<String, Object> properties = new HashMap<String, Object>();
+
+    public final static String UNSET_QT = null;
+
+    public Sample(final String ID, final SampleDB infoDB,
+                  final String familyID, final String paternalID, final String maternalID,
+                  final Gender gender, final Affection affection, final String otherPhenotype) {
+        this.familyID = familyID;
+        this.paternalID = paternalID;
+        this.maternalID = maternalID;
+        this.gender = gender;
+        this.otherPhenotype = otherPhenotype;
+        this.affection = affection;
+        this.ID = ID;
+        this.infoDB = infoDB;
+    }
+
+    protected Sample(final String ID,
+                     final String familyID, final String paternalID, final String maternalID,
+                     final Gender gender, final Affection affection, final String otherPhenotype) {
+        this(ID, null, familyID, paternalID, maternalID, gender, affection, otherPhenotype);
+    }
+
+    protected Sample(final String ID,
+                     final String familyID, final String paternalID, final String maternalID,
+                     final Gender gender, final Affection affection) {
+        this(ID, null, familyID, paternalID, maternalID, gender, affection, UNSET_QT);
+    }
+
+
+    public Sample(final String ID, final SampleDB infoDB,
+                  final String familyID, final String paternalID, final String maternalID, final Gender gender) {
+        this(ID, infoDB, familyID, paternalID, maternalID, gender, Affection.UNKNOWN, UNSET_QT);
+    }
+
+    public Sample(final String ID, final SampleDB infoDB, final Affection affection, final String otherPhenotype) {
+        this(ID, infoDB, null, null, null, Gender.UNKNOWN, affection, otherPhenotype);
+    }
+
+    public Sample(String id, SampleDB infoDB) {
+        this(id, infoDB, null, null, null,
+                Gender.UNKNOWN, Affection.UNKNOWN, UNSET_QT);
+    }
+
+    // -------------------------------------------------------------------------------------
+    //
+    // standard property getters
+    //
+    // -------------------------------------------------------------------------------------
+
+    public String getID() {
+        return ID;
+    }
+
+    public String getFamilyID() {
+        return familyID;
+    }
+
+    public String getPaternalID() {
+        return paternalID;
+    }
+
+    public String getMaternalID() {
+        return maternalID;
+    }
+
+    public Affection getAffection() {
+        return affection;
+    }
+
+    public boolean hasOtherPhenotype() {
+        return affection == Affection.OTHER;
+    }
+
+    public String getOtherPhenotype() {
+        return otherPhenotype;
+    }
+
+    /**
+     * Get the sample's mother
+     * @return sample object with relationship mother, if exists, or null
+     */
+    public Sample getMother() {
+        return infoDB.getSample(maternalID);
+    }
+
+    /**
+     * Get the sample's father
+     * @return sample object with relationship father, if exists, or null
+     */
+    public Sample getFather() {
+        return infoDB.getSample(paternalID);
+    }
+
+    public ArrayList<Sample> getParents(){
+        ArrayList<Sample> parents = new ArrayList<Sample>(2);
+        Sample parent = getMother();
+        if(parent != null)
+            parents.add(parent);
+        parent = getFather();
+        if(parent != null)
+            parents.add(parent);
+        return parents;
+    }
+
+    /**
+     * Get gender of the sample
+     * @return property of key "gender" - must be of type Gender
+     */
+    public Gender getGender() {
+        return gender;
+    }
+
+    @Override
+    public int compareTo(final Sample sample) {
+        return ID.compareTo(sample.getID());
+    }
+
+    @Override
+    public String toString() {
+        return String.format("Sample %s fam=%s dad=%s mom=%s gender=%s affection=%s qt=%s props=%s",
+                getID(), getFamilyID(), getPaternalID(), getMaternalID(), getGender(), getAffection(),
+                getOtherPhenotype(), properties);
+    }
+
+//    // -------------------------------------------------------------------------------------
+//    //
+//    // code for working with additional -- none standard -- properites
+//    //
+//    // -------------------------------------------------------------------------------------
+//
+//    public Map<String, Object> getExtraProperties() {
+//        return Collections.unmodifiableMap(properties);
+//    }
+//
+//    /**
+//     * Get one property
+//     * @param key key of property
+//     * @return value of property as generic object
+//     */
+//    public Object getExtraPropertyValue(final String key) {
+//        return properties.get(key);
+//    }
+//
+//    /**
+//     *
+//     * @param key property key
+//     * @return true if sample has this property (even if its value is null)
+//     */
+//    public boolean hasExtraProperty(String key) {
+//        return properties.containsKey(key);
+//    }
+
+    @Override
+    public int hashCode() {
+        return ID.hashCode();
+    }
+
+    @Override
+    public boolean equals(final Object o) {
+        if(o == null)
+            return false;
+        if(o instanceof Sample) {
+            Sample otherSample = (Sample)o;
+            return ID.equals(otherSample.ID) &&
+                    equalOrNull(familyID, otherSample.familyID) &&
+                    equalOrNull(paternalID, otherSample.paternalID) &&
+                    equalOrNull(maternalID, otherSample.maternalID) &&
+                    equalOrNull(gender, otherSample.gender) &&
+                    equalOrNull(otherPhenotype, otherSample.otherPhenotype) &&
+                    equalOrNull(affection, otherSample.affection) &&
+                    equalOrNull(properties, otherSample.properties);
+        }
+        return false;
+    }
+
+    private final static boolean equalOrNull(final Object o1, final Object o2) {
+        if ( o1 == null )
+            return o2 == null;
+        else
+            return o2 == null ? false : o1.equals(o2);
+    }
+
+    private final static <T> T mergeValues(final String name, final String field, final T o1, final T o2, final T emptyValue) {
+        if ( o1 == null || o1.equals(emptyValue) ) {
+            // take o2 if both are null, otherwise keep o2
+            return o2 == null ? null : o2;
+        } else {
+            if ( o2 == null || o2.equals(emptyValue) )
+                return o1; // keep o1, since it's a real value
+            else {
+                // both o1 and o2 have a value
+                if ( o1 instanceof String && o1.equals(o2) )
+                    return o1;
+                else if ( o1 == o2 )
+                    return o1;
+                else
+                    throw new UserException("Inconsistent values detected for " + name + " for field " + field + " value1 " + o1 + " value2 " + o2);
+            }
+        }
+    }
+
+    public final static Sample mergeSamples(final Sample prev, final Sample next) {
+        if ( prev.equals(next) )
+            return next;
+        else {
+            return new Sample(prev.getID(), prev.infoDB,
+                    mergeValues(prev.getID(), "Family_ID", prev.getFamilyID(), next.getFamilyID(), null),
+                    mergeValues(prev.getID(), "Paternal_ID", prev.getPaternalID(), next.getPaternalID(), null),
+                    mergeValues(prev.getID(), "Material_ID", prev.getMaternalID(), next.getMaternalID(), null),
+                    mergeValues(prev.getID(), "Gender", prev.getGender(), next.getGender(), Gender.UNKNOWN),
+                    mergeValues(prev.getID(), "Affection", prev.getAffection(), next.getAffection(), Affection.UNKNOWN),
+                    mergeValues(prev.getID(), "OtherPhenotype", prev.getOtherPhenotype(), next.getOtherPhenotype(), UNSET_QT));
+                    //mergeValues(prev.getID(), "ExtraProperties", prev.getExtraProperties(), next.getExtraProperties(), Collections.emptyMap()));
+        }
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/samples/SampleDB.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/samples/SampleDB.java
new file mode 100644
index 0000000..c0502da
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/samples/SampleDB.java
@@ -0,0 +1,338 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.samples;
+
+import htsjdk.samtools.SAMReadGroupRecord;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.exceptions.GATKException;
+import htsjdk.variant.variantcontext.Genotype;
+
+import java.util.*;
+
+/**
+ *
+ */
+public class SampleDB {
+    /**
+     * This is where Sample objects are stored. Samples are usually accessed by their ID, which is unique, so
+     * this is stored as a HashMap.
+     */
+    private final HashMap<String, Sample> samples = new HashMap<String, Sample>();
+
+    /**
+     * Constructor takes both a SAM header and sample files because the two must be integrated.
+     */
+    public SampleDB() {
+
+    }
+
+    /**
+     * Protected function to add a single sample to the database
+     *
+     * @param sample to be added
+     */
+    protected SampleDB addSample(Sample sample) {
+        Sample prev = samples.get(sample.getID());
+        if ( prev != null )
+            sample = Sample.mergeSamples(prev, sample);
+        samples.put(sample.getID(), sample);
+        return this;
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // Functions for getting a sample from the DB
+    //
+    // --------------------------------------------------------------------------------
+
+    /**
+     * Get a sample by its ID
+     * If an alias is passed in, return the main sample object 
+     * @param id
+     * @return sample Object with this ID, or null if this does not exist
+     */
+    public Sample getSample(String id) {
+        return samples.get(id);
+    }
+
+    /**
+     *
+     * @param read
+     * @return sample Object with this ID, or null if this does not exist
+     */
+    public Sample getSample(final SAMRecord read) {
+        return getSample(read.getReadGroup());
+    }
+
+    /**
+     *
+     * @param rg
+     * @return sample Object with this ID, or null if this does not exist
+     */
+    public Sample getSample(final SAMReadGroupRecord rg) {
+        return getSample(rg.getSample());
+    }
+
+    /**
+     * @param g Genotype
+     * @return sample Object with this ID, or null if this does not exist
+     */
+    public Sample getSample(final Genotype g) {
+        return getSample(g.getSampleName());
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // Functions for accessing samples in the DB
+    //
+    // --------------------------------------------------------------------------------
+
+    /**
+     * Get number of sample objects
+     * @return size of samples map
+     */
+    public int sampleCount() {
+        return samples.size();
+    }
+
+    public Set<Sample> getSamples() {
+        return new LinkedHashSet<>(samples.values());
+    }
+
+    public Collection<String> getSampleNames() {
+        return Collections.unmodifiableCollection(samples.keySet());
+    }
+
+
+    /**
+     * Takes a collection of sample names and returns their corresponding sample objects
+     * Note that, since a set is returned, if you pass in a list with duplicates names there will not be any duplicates in the returned set
+     * @param sampleNameList Set of sample names
+     * @return Corresponding set of samples
+     */
+    public Set<Sample> getSamples(Collection<String> sampleNameList) {
+        HashSet<Sample> samples = new HashSet<Sample>();
+        for (String name : sampleNameList) {
+            try {
+                samples.add(getSample(name));
+            }
+            catch (Exception e) {
+                throw new GATKException("Could not get sample with the following ID: " + name, e);
+            }
+        }
+        return samples;
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // Higher level pedigree functions
+    //
+    // --------------------------------------------------------------------------------
+
+    /**
+     * Returns a sorted set of the family IDs in all samples (excluding null ids)
+     * @return
+     */
+    public final Set<String> getFamilyIDs() {
+        return getFamilies().keySet();
+    }
+
+    /**
+     * Returns a map from family ID -> set of family members for all samples with
+     * non-null family ids
+     *
+     * @return
+     */
+    public final Map<String, Set<Sample>> getFamilies() {
+        return getFamilies(null);
+    }
+
+    /**
+     * Returns a map from family ID -> set of family members for all samples in sampleIds with
+     * non-null family ids
+     *
+     * @param sampleIds - all samples to include. If null is passed then all samples are returned.
+     * @return
+     */
+    public final Map<String, Set<Sample>> getFamilies(Collection<String> sampleIds) {
+        final Map<String, Set<Sample>> families = new TreeMap<String, Set<Sample>>();
+
+        for ( final Sample sample : samples.values() ) {
+            if(sampleIds == null || sampleIds.contains(sample.getID())){
+                final String famID = sample.getFamilyID();
+                if ( famID != null ) {
+                    if ( ! families.containsKey(famID) )
+                        families.put(famID, new TreeSet<Sample>());
+                    families.get(famID).add(sample);
+                }
+            }
+        }
+        return families;
+    }
+
+    /**
+     * Returns all the trios present in the sample database. The strictOneChild parameter determines
+     * whether multiple children of the same parents resolve to multiple trios, or are excluded
+     * @param strictOneChild - exclude pedigrees with >1 child for parental pair
+     * @return - all of the mother+father=child triplets, subject to strictOneChild
+     */
+    public final Set<Trio> getTrios(boolean strictOneChild) {
+        Set<Trio> trioSet = new HashSet<Trio>();
+        for ( String familyString : getFamilyIDs() ) {
+            Set<Sample> family = getFamily(familyString);
+            for ( Sample sample : family) {
+                if ( sample.getParents().size() == 2 ) {
+                    Trio trio = new Trio(sample.getMother(),sample.getFather(),sample);
+                    trioSet.add(trio);
+                }
+            }
+        }
+
+        if ( strictOneChild )
+            trioSet = removeTriosWithSameParents(trioSet);
+
+        return trioSet;
+    }
+
+    /**
+     * Returns all the trios present in the db. See getTrios(boolean strictOneChild)
+     * @return all the trios present in the samples db.
+     */
+    public final Set<Trio> getTrios() {
+        return getTrios(false);
+    }
+
+    /**
+     * Subsets a set of trios to only those with nonmatching founders. If two (or more) trio objects have
+     * the same mother and father, then both (all) are removed from the returned set.
+     * @param trios - a set of Trio objects
+     * @return those subset of Trio objects in the input set with nonmatching founders
+     */
+    private Set<Trio> removeTriosWithSameParents(final Set<Trio> trios) {
+        Set<Trio> filteredTrios = new HashSet<Trio>();
+        filteredTrios.addAll(trios);
+        Set<Trio> triosWithSameParents = new HashSet<Trio>();
+        for ( Trio referenceTrio : filteredTrios ) {
+            for ( Trio compareTrio : filteredTrios ) {
+                if ( referenceTrio != compareTrio &&
+                     referenceTrio.getFather().equals(compareTrio.getFather()) &&
+                     referenceTrio.getMother().equals(compareTrio.getMother()) ) {
+                    triosWithSameParents.add(referenceTrio);
+                    triosWithSameParents.add(compareTrio);
+                }
+            }
+        }
+        filteredTrios.removeAll(triosWithSameParents);
+        return filteredTrios;
+    }
+
+    /**
+     * Returns the set of all children that have both of their parents.
+     * Note that if a family is composed of more than 1 child, each child is
+     * returned.
+     * @return - all the children that have both of their parents
+     * @deprecated - getTrios() replaces this function
+     */
+    @Deprecated
+    public final Set<Sample> getChildrenWithParents(){
+        return getChildrenWithParents(false);
+    }
+
+    /**
+     * Returns the set of all children that have both of their parents.
+     * Note that if triosOnly = false, a family is composed of more than 1 child, each child is
+     * returned.
+     *
+     * This method can be used wherever trios are needed
+     *
+     * @param triosOnly - if set to true, only strict trios are returned
+     * @return - all the children that have both of their parents
+     * @deprecated - getTrios(boolean strict) replaces this function
+     * @bug -- does not work for extracting multiple generations of trios, e.g.
+     * ..........Mom1------Dad1
+     * ................|
+     * ..............Child1--------Mom2
+     * .......................|
+     * .....................Child2
+     */
+    @Deprecated
+    public final Set<Sample> getChildrenWithParents(boolean triosOnly) {
+
+        Map<String, Set<Sample>> families = getFamilies();
+        final Set<Sample> childrenWithParents = new HashSet<Sample>();
+        Iterator<Sample> sampleIterator;
+
+        for ( Set<Sample> familyMembers: families.values() ) {
+            if(triosOnly && familyMembers.size() != 3)
+                continue;
+
+            sampleIterator = familyMembers.iterator();
+            Sample sample;
+            while(sampleIterator.hasNext()){
+                sample = sampleIterator.next();
+                if(sample.getParents().size() == 2 && familyMembers.containsAll(sample.getParents()))
+                    childrenWithParents.add(sample);
+            }
+
+        }
+        return childrenWithParents;
+    }
+
+    /**
+     * Return all samples with a given family ID
+     * @param familyId
+     * @return
+     */
+    public Set<Sample> getFamily(String familyId) {
+        return getFamilies().get(familyId);
+    }
+
+    /**
+     * Returns all children of a given sample
+     * See note on the efficiency of getFamily() - since this depends on getFamily() it's also not efficient
+     * @param sample
+     * @return
+     */
+    public Set<Sample> getChildren(Sample sample) {
+        final HashSet<Sample> children = new HashSet<Sample>();
+        for ( final Sample familyMember : getFamily(sample.getFamilyID())) {
+            if ( familyMember.getMother() == sample || familyMember.getFather() == sample ) {
+                children.add(familyMember);
+            }
+        }
+        return children;
+    }
+
+    public Set<String> getFounderIds(){
+        Set<String> founders = new HashSet<String>();
+        for(Sample sample : getSamples()){
+            if(sample.getParents().size()<1)
+                founders.add(sample.getID());
+
+        }
+        return founders;
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/samples/SampleDBBuilder.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/samples/SampleDBBuilder.java
new file mode 100644
index 0000000..2b5427f
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/samples/SampleDBBuilder.java
@@ -0,0 +1,161 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.samples;
+
+import htsjdk.samtools.SAMFileHeader;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.sam.ReadUtils;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.util.*;
+
+/**
+ *
+ */
+public class SampleDBBuilder {
+    PedigreeValidationType validationStrictness;
+    final SampleDB sampleDB = new SampleDB();
+    final GenomeAnalysisEngine engine;
+
+    Set<Sample> samplesFromDataSources = new HashSet<Sample>();
+    Set<Sample> samplesFromPedigrees = new HashSet<Sample>();
+
+    /** for testing only */
+    protected SampleDBBuilder(PedigreeValidationType validationStrictness) {
+        engine = null;
+        this.validationStrictness = validationStrictness;
+    }
+
+    /**
+     * Constructor takes both a SAM header and sample files because the two must be integrated.
+     */
+    public SampleDBBuilder(GenomeAnalysisEngine engine, PedigreeValidationType validationStrictness) {
+        this.engine = engine;
+        this.validationStrictness = validationStrictness;
+    }
+
+    /**
+     * Hallucinates sample objects for all the samples in the SAM file and stores them
+     */
+    public SampleDBBuilder addSamplesFromSAMHeader(final SAMFileHeader header) {
+        addSamplesFromSampleNames(ReadUtils.getSAMFileSamples(header));
+        return this;
+    }
+
+    public SampleDBBuilder addSamplesFromSampleNames(final Collection<String> sampleNames) {
+        for (final String sampleName : sampleNames) {
+            if (sampleDB.getSample(sampleName) == null) {
+                final Sample newSample = new Sample(sampleName, sampleDB);
+                sampleDB.addSample(newSample);
+                samplesFromDataSources.add(newSample); // keep track of data source samples
+            }
+        }
+        return this;
+    }
+
+    public SampleDBBuilder addSamplesFromPedigreeFiles(final List<File> pedigreeFiles) {
+        for (final File pedFile : pedigreeFiles) {
+            Collection<Sample> samples = addSamplesFromPedigreeArgument(pedFile);
+            samplesFromPedigrees.addAll(samples);
+        }
+
+        return this;
+    }
+
+    public SampleDBBuilder addSamplesFromPedigreeStrings(final List<String> pedigreeStrings) {
+        for (final String pedString : pedigreeStrings) {
+            Collection<Sample> samples = addSamplesFromPedigreeArgument(pedString);
+            samplesFromPedigrees.addAll(samples);
+        }
+
+        return this;
+    }
+
+    /**
+     * Parse one sample file and integrate it with samples that are already there
+     * Fail quickly if we find any errors in the file
+     */
+    private Collection<Sample> addSamplesFromPedigreeArgument(File sampleFile) {
+        final PedReader reader = new PedReader();
+
+        try {
+            return reader.parse(sampleFile, getMissingFields(sampleFile), sampleDB);
+        } catch ( FileNotFoundException e ) {
+            throw new UserException.CouldNotReadInputFile(sampleFile, e);
+        }
+    }
+
+    private Collection<Sample> addSamplesFromPedigreeArgument(final String string) {
+        final PedReader reader = new PedReader();
+        return reader.parse(string, getMissingFields(string), sampleDB);
+    }
+
+    public SampleDB getFinalSampleDB() {
+        validate();
+        return sampleDB;
+    }
+
+    public EnumSet<PedReader.MissingPedField> getMissingFields(final Object engineArg) {
+        if ( engine == null )
+            return EnumSet.noneOf(PedReader.MissingPedField.class);
+        else {
+            final List<String> posTags = engine.getTags(engineArg).getPositionalTags();
+            return PedReader.parseMissingFieldTags(engineArg, posTags);
+        }
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // Validation
+    //
+    // --------------------------------------------------------------------------------
+
+    protected final void validate() {
+        validatePedigreeIDUniqueness();
+        if ( validationStrictness != PedigreeValidationType.SILENT ) {
+            // check that samples in data sources are all annotated, if anything is annotated
+            if ( ! samplesFromPedigrees.isEmpty() && ! samplesFromDataSources.isEmpty() ) {
+                final Set<String> sampleNamesFromPedigrees = new HashSet<String>();
+                for ( final Sample pSample : samplesFromPedigrees )
+                    sampleNamesFromPedigrees.add(pSample.getID());
+
+                for ( final Sample dsSample : samplesFromDataSources )
+                    if ( ! sampleNamesFromPedigrees.contains(dsSample.getID()) )
+                        throw new UserException("Sample " + dsSample.getID() + " found in data sources but not in pedigree files with STRICT pedigree validation");
+            }
+        }
+    }
+
+    private void validatePedigreeIDUniqueness() {
+        Set<String> pedigreeIDs = new HashSet<String>();
+        for ( Sample sample : samplesFromPedigrees ) {
+            pedigreeIDs.add(sample.getID());
+        }
+        assert pedigreeIDs.size() == samplesFromPedigrees.size() : "The number of sample IDs extracted from the pedigree does not equal the number of samples in the pedigree. Is a sample associated with multiple families?";
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/samples/Trio.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/samples/Trio.java
new file mode 100644
index 0000000..3bbf7bf
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/samples/Trio.java
@@ -0,0 +1,70 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.samples;
+
+/**
+ * A class for imposing a trio structure on three samples; a common paradigm
+ *
+ * todo -- there should probably be an interface or abstract class "Pedigree" that generalizes the notion of
+ *      -- imposing structure on samples. But given how complex pedigrees can quickly become, it's not
+ *      -- clear the best way to do this.
+ */
+public class Trio {
+    private Sample mother;
+    private Sample father;
+    private Sample child;
+
+    public Trio(Sample mom, Sample dad, Sample spawn) {
+        assert mom.getID().equals(spawn.getMaternalID()) && dad.getID().equals(spawn.getPaternalID()) : "Samples passed to trio constructor do not form a trio";
+        mother = mom;
+        father = dad;
+        child = spawn;
+    }
+
+    public Sample getMother() {
+        return mother;
+    }
+
+    public String getMaternalID() {
+        return mother.getID();
+    }
+
+    public Sample getFather() {
+        return father;
+    }
+
+    public String getPaternalID() {
+        return father.getID();
+    }
+
+    public Sample getChild() {
+        return child;
+    }
+
+    public String getChildID() {
+        return child.getID();
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/traversals/ArtificialReadsTraversal.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/traversals/ArtificialReadsTraversal.java
new file mode 100644
index 0000000..91ba863
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/traversals/ArtificialReadsTraversal.java
@@ -0,0 +1,142 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.traversals;
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMRecord;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.engine.datasources.providers.ShardDataProvider;
+import org.broadinstitute.gatk.engine.walkers.ReadWalker;
+import org.broadinstitute.gatk.engine.walkers.Walker;
+import org.broadinstitute.gatk.utils.sam.ArtificialPatternedSAMIterator;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+
+
+/*
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following
+ * conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+/**
+ * @author aaron
+ *
+ * this class acts as a fake reads traversal engine for testing out reads based traversals.
+ */
+public class ArtificialReadsTraversal<M,T> extends TraversalEngine<M,T,Walker<M,T>,ShardDataProvider> {
+
+    public int startingChr = 1;
+    public int endingChr = 5;
+    public int readsPerChr = 100;
+    public int unMappedReads = 1000;
+    private int DEFAULT_READ_LENGTH = ArtificialSAMUtils.DEFAULT_READ_LENGTH;
+    private ArtificialPatternedSAMIterator iter;
+    /** our log, which we want to capture anything from this class */
+    protected static Logger logger = Logger.getLogger(ArtificialReadsTraversal.class);
+
+    /** Creates a new, uninitialized ArtificialReadsTraversal */
+    public ArtificialReadsTraversal() {
+    }
+
+    // what read ordering are we using
+    private ArtificialPatternedSAMIterator.PATTERN readOrder = ArtificialPatternedSAMIterator.PATTERN.IN_ORDER_READS;
+
+
+    /**
+     * set the read ordering of the reads given to the walker
+     *
+     * @param readOrdering
+     */
+    public void setReadOrder( ArtificialPatternedSAMIterator.PATTERN readOrdering ) {
+        readOrder = readOrdering;
+    }
+
+    @Override
+    public String getTraversalUnits() {
+        return "reads";
+    }
+
+    /**
+     * Traverse by reads, given the data and the walker
+     *
+     * @param walker       the walker to traverse with
+     * @param dataProvider the provider of the reads data
+     * @param sum          the value of type T, specified by the walker, to feed to the walkers reduce function
+     *
+     * @return the reduce variable of the read walker
+     */
+    public T traverse( Walker<M, T> walker,
+                       ShardDataProvider dataProvider,
+                       T sum ) {
+
+        if (!( walker instanceof ReadWalker ))
+            throw new IllegalArgumentException("Walker isn't a read walker!");
+
+        ReadWalker<M, T> readWalker = (ReadWalker<M, T>) walker;
+        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(( endingChr - startingChr ) + 1, startingChr, readsPerChr + DEFAULT_READ_LENGTH);
+        iter = new ArtificialPatternedSAMIterator(this.startingChr,
+                this.endingChr,
+                this.readsPerChr,
+                this.unMappedReads,
+                header,
+                this.readOrder);
+
+        // while we still have more reads
+        for (SAMRecord read : iter) {
+
+            // an array of characters that represent the reference
+            ReferenceContext refSeq = null;
+
+            final boolean keepMeP = readWalker.filter(refSeq, (GATKSAMRecord) read);
+            if (keepMeP) {
+                M x = readWalker.map(refSeq, (GATKSAMRecord) read, null);  // TODO: fix me at some point, it would be nice to fake out ROD data too
+                sum = readWalker.reduce(x, sum);
+            }
+        }
+        return sum;
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/traversals/TAROrderedReadCache.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/traversals/TAROrderedReadCache.java
new file mode 100644
index 0000000..858a557
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/traversals/TAROrderedReadCache.java
@@ -0,0 +1,168 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.traversals;
+
+import org.broadinstitute.gatk.utils.downsampling.Downsampler;
+import org.broadinstitute.gatk.utils.downsampling.ReservoirDownsampler;
+import org.broadinstitute.gatk.utils.sam.AlignmentStartComparator;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Subsystem to track a list of all reads currently live in the TraverseActiveRegions system,
+ * while limiting the total number of reads to a maximum capacity.
+ *
+ * User: depristo
+ * Date: 4/7/13
+ * Time: 11:23 AM
+ */
+public class TAROrderedReadCache {
+    private final int maxCapacity;
+    private ArrayList<GATKSAMRecord> undownsampledCache;
+    private Downsampler<GATKSAMRecord> downsampler;
+
+    private static final int UNDOWNSAMPLED_CACHE_MAX_INITIAL_SIZE = 10000;
+
+    /**
+     * Create a new empty ReadCache
+     * @param maxCapacity the max capacity of the read cache.
+     */
+    public TAROrderedReadCache( final int maxCapacity ) {
+        if ( maxCapacity < 0 ) throw new IllegalArgumentException("maxCapacity must be >= 0 but got " + maxCapacity);
+        this.maxCapacity = maxCapacity;
+
+        // The one we're not currently using will always be null:
+        initializeUndownsampledCache();
+        this.downsampler = null;
+    }
+
+    /**
+     * Moves all reads over to the downsampler, causing it to be used from this point on. Should be called
+     * when the undownsampledCache fills up and we need to start discarding reads. Since the
+     * ReservoirDownsampler doesn't preserve relative ordering, pop operations become expensive
+     * after this point, as they require a O(n log n) sort.
+     */
+    private void activateDownsampler() {
+        downsampler = new ReservoirDownsampler<>(maxCapacity, false);
+        downsampler.submit(undownsampledCache);
+        undownsampledCache = null; // preferable to the O(n) clear() method
+    }
+
+    /**
+     * Allocate the undownsampled cache used when we have fewer than maxCapacity items
+     */
+    private void initializeUndownsampledCache() {
+        undownsampledCache = new ArrayList<>(Math.min(maxCapacity + 1, UNDOWNSAMPLED_CACHE_MAX_INITIAL_SIZE));
+    }
+
+    /**
+     * What's the maximum number of reads we'll store in the cache?
+     * @return a positive integer
+     */
+    public int getMaxCapacity() {
+        return maxCapacity;
+    }
+
+    /**
+     * Add a single read to this cache.  Assumed to be in sorted order w.r.t. the previously added reads
+     * @param read a read to add
+     */
+    public void add( final GATKSAMRecord read ) {
+        if ( read == null ) throw new IllegalArgumentException("Read cannot be null");
+
+        if ( downsampler != null ) {
+            downsampler.submit(read);
+        }
+        else {
+            undownsampledCache.add(read);
+
+            // No more room in the undownsampledCache? Time to start downsampling
+            if ( undownsampledCache.size() > maxCapacity ) {
+                activateDownsampler();
+            }
+        }
+    }
+
+    /**
+     * Add a collection of reads to this cache.  Assumed to be in sorted order w.r.t. the previously added reads and each other
+     * @param reads a collection of reads to add
+     */
+    public void addAll( final List<GATKSAMRecord> reads ) {
+        if ( reads == null ) throw new IllegalArgumentException("Reads cannot be null");
+        for ( final GATKSAMRecord read : reads ) {
+            add(read);
+        }
+    }
+
+    /**
+     * How many reads are currently in the cache?
+     * @return a positive integer
+     */
+    public int size() {
+        return downsampler != null ? downsampler.size() : undownsampledCache.size();
+    }
+
+    /**
+     * How many reads were discarded since the last call to popCurrentReads
+     *
+     * @return number of items discarded during downsampling since last pop operation
+     */
+    public int getNumDiscarded() {
+        return downsampler != null ? downsampler.getNumberOfDiscardedItems() : 0;
+    }
+
+    /**
+     * Removes all reads currently in the cache, and returns them in sorted order (w.r.t. alignmentStart)
+     *
+     * Flushes this cache, so after this call the cache will contain no reads, and we'll be in the same
+     * initial state as the constructor would put us in, with a non-null undownsampledCache and a null
+     * downsampler.
+     *
+     * @return a list of GATKSAMRecords in this cache
+     */
+    public List<GATKSAMRecord> popCurrentReads() {
+        final List<GATKSAMRecord> poppedReads;
+
+        if ( downsampler == null ) {
+            poppedReads = undownsampledCache;  // avoid making a copy here, since we're going to allocate a new cache
+        }
+        else {
+            // If we triggered the downsampler, we need to sort the reads before returning them,
+            // since the ReservoirDownsampler is not guaranteed to preserve relative ordering of items.
+            // After consuming the downsampled items in this call to popCurrentReads(), we switch back
+            // to using the undownsampledCache until we fill up again.
+            poppedReads = downsampler.consumeFinalizedItems();  // avoid making a copy here
+            Collections.sort(poppedReads, new AlignmentStartComparator());
+            downsampler = null;
+        }
+
+        initializeUndownsampledCache();
+        return poppedReads;
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/traversals/TraversalEngine.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/traversals/TraversalEngine.java
new file mode 100644
index 0000000..3671ded
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/traversals/TraversalEngine.java
@@ -0,0 +1,124 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.traversals;
+
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.engine.ReadMetrics;
+import org.broadinstitute.gatk.engine.datasources.providers.ShardDataProvider;
+import org.broadinstitute.gatk.engine.walkers.Walker;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.progressmeter.ProgressMeter;
+
+public abstract class TraversalEngine<M,T,WalkerType extends Walker<M,T>,ProviderType extends ShardDataProvider> {
+    /** our log, which we want to capture anything from this class */
+    protected static final Logger logger = Logger.getLogger(TraversalEngine.class);
+
+    protected GenomeAnalysisEngine engine;
+    private ProgressMeter progressMeter;
+
+    // ----------------------------------------------------------------------------------------------------
+    //
+    // ABSTRACT METHODS
+    //
+    // ----------------------------------------------------------------------------------------------------
+
+    /**
+     * Gets the named traversal type associated with the given traversal, such as loci, reads, etc.
+     *
+     * @return A user-friendly name for the given traversal type.
+     */
+    public abstract String getTraversalUnits();
+
+    /**
+     * this method must be implemented by all traversal engines
+     *
+     * @param walker       the walker to run with
+     * @param dataProvider the data provider that generates data given the shard
+     * @param sum          the accumulator
+     *
+     * @return an object of the reduce type
+     */
+    public abstract T traverse(WalkerType walker,
+                               ProviderType dataProvider,
+                               T sum);
+
+    /**
+     * Initialize the traversal engine.  After this point traversals can be run over the data
+     *
+     * @param engine GenomeAnalysisEngine for this traversal
+     * @param progressMeter An optional (null == optional) meter to track our progress
+     */
+    public void initialize(final GenomeAnalysisEngine engine, final Walker walker, final ProgressMeter progressMeter) {
+        if ( engine == null )
+            throw new ReviewedGATKException("BUG: GenomeAnalysisEngine cannot be null!");
+
+        this.engine = engine;
+        this.progressMeter = progressMeter;
+    }
+
+    /**
+     * For testing only.  Does not initialize the progress meter
+     *
+     * @param engine
+     */
+    protected void initialize(final GenomeAnalysisEngine engine, final Walker walker) {
+        initialize(engine, walker, null);
+    }
+
+    /**
+     * Called by the MicroScheduler when all work is done and the GATK is shutting down.
+     *
+     * To be used by subclasses that need to free up resources (such as threads)
+     */
+    public void shutdown() {
+        // by default there's nothing to do
+    }
+
+    /**
+     * Update the cumulative traversal metrics according to the data in this shard
+     *
+     * @param singleTraverseMetrics read metrics object containing the information about a single shard's worth
+     *                              of data processing
+     */
+    public void updateCumulativeMetrics(final ReadMetrics singleTraverseMetrics) {
+        engine.getCumulativeMetrics().incrementMetrics(singleTraverseMetrics);
+    }
+
+    /**
+     * Forward request to notifyOfProgress
+     *
+     * Assumes that one cycle has been completed
+     *
+     * @param loc  the location
+     */
+    public void printProgress(final GenomeLoc loc) {
+        if ( progressMeter != null )
+            progressMeter.notifyOfProgress(loc, engine.getCumulativeMetrics().getNumIterations());
+    }
+}
+
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/traversals/TraverseActiveRegions.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/traversals/TraverseActiveRegions.java
new file mode 100644
index 0000000..9eb732c
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/traversals/TraverseActiveRegions.java
@@ -0,0 +1,719 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.traversals;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.engine.datasources.providers.*;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.engine.walkers.ActiveRegionTraversalParameters;
+import org.broadinstitute.gatk.engine.walkers.ActiveRegionWalker;
+import org.broadinstitute.gatk.engine.walkers.Walker;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.activeregion.ActiveRegion;
+import org.broadinstitute.gatk.utils.activeregion.ActivityProfile;
+import org.broadinstitute.gatk.utils.activeregion.ActivityProfileState;
+import org.broadinstitute.gatk.utils.activeregion.BandPassActivityProfile;
+import org.broadinstitute.gatk.utils.nanoScheduler.NSMapFunction;
+import org.broadinstitute.gatk.utils.nanoScheduler.NSProgressFunction;
+import org.broadinstitute.gatk.utils.nanoScheduler.NSReduceFunction;
+import org.broadinstitute.gatk.utils.nanoScheduler.NanoScheduler;
+import org.broadinstitute.gatk.utils.progressmeter.ProgressMeter;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.broadinstitute.gatk.utils.sam.ReadUtils;
+
+import java.io.PrintStream;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * Implement active region traversal
+ *
+ * User: depristo
+ * Date: 1/9/13
+ * Time: 4:45 PM
+ *
+ * Live region:
+ *
+ *   The ART tracks a thing called the live region.  The live region is a position on a specific contig
+ *   of the alignment start of the last read we processed during this traversal.  Because the
+ *   read stream is sorted, future reads must occurs in the the live region.  Therefore the the dead region
+ *   (everything to the left of the live boundary) cannot have any more read data.  The live / dead
+ *   regions are used to decide when we can safely call map on active regions, as only active regions
+ *   contained completely within the dead region (including extensions) have a complete set of read data
+ *   in the collected read list.  All of the data related to the live region is captured by the local
+ *   variable spanOfLastReadSeen
+ *
+ */
+public final class TraverseActiveRegions<M, T> extends TraversalEngine<M,T,ActiveRegionWalker<M,T>,LocusShardDataProvider> {
+    private final static boolean DEBUG = false;
+    protected final static Logger logger = Logger.getLogger(TraversalEngine.class);
+    protected final static boolean LOG_READ_CARRYING = false;
+
+    // set by the traversal
+    private boolean walkerHasPresetRegions = false;
+    private int activeRegionExtension = -1;
+    private int maxRegionSize = -1;
+    private int minRegionSize = -1;
+
+    private final LinkedList<ActiveRegion> workQueue = new LinkedList<>();
+
+    private TAROrderedReadCache myReads = null;
+
+    private GenomeLoc lastRegionProcessed = null;
+    private GenomeLoc spanOfLastReadSeen = null;
+    private ActivityProfile activityProfile = null;
+    int maxReadsInMemory = 0;
+    ActiveRegionWalker<M, T> walker;
+
+    final NanoScheduler<MapData, M, T> nanoScheduler;
+
+    /**
+     * Data to use in the ActiveRegionWalker.map function produced by the NanoScheduler input iterator
+     */
+    private static class MapData {
+        public ActiveRegion activeRegion;
+        public RefMetaDataTracker tracker;
+
+        private MapData(ActiveRegion activeRegion, RefMetaDataTracker tracker) {
+            this.activeRegion = activeRegion;
+            this.tracker = tracker;
+        }
+    }
+
+    /**
+     * Create a single threaded active region traverser
+     */
+    public TraverseActiveRegions() {
+        this(1);
+    }
+
+    /**
+     * Create an active region traverser that uses nThreads for getting its work done
+     * @param nThreads number of threads
+     */
+    public TraverseActiveRegions(final int nThreads) {
+        nanoScheduler = new NanoScheduler<>(nThreads);
+        nanoScheduler.setProgressFunction(new NSProgressFunction<MapData>() {
+            @Override
+            public void progress(MapData lastActiveRegion) {
+                if ( lastActiveRegion != null )
+                    // note, need to use getStopLocation so we don't give an interval to ProgressMeterDaemon
+                    printProgress(lastActiveRegion.activeRegion.getLocation().getStopLocation());
+            }
+        });
+    }
+
+    /**
+     * Have the debugging output streams been initialized already?
+     *
+     * We have to do lazy initialization because when the initialize() function is called
+     * the streams aren't yet initialized in the GATK walker.
+     */
+    private boolean streamsInitialized = false;
+
+    @Override
+    public void initialize(GenomeAnalysisEngine engine, Walker walker, ProgressMeter progressMeter) {
+        super.initialize(engine, walker, progressMeter);
+
+        this.walker = (ActiveRegionWalker<M,T>)walker;
+        if ( this.walker.wantsExtendedReads() && ! this.walker.wantsNonPrimaryReads() ) {
+            throw new IllegalArgumentException("Active region walker " + this.walker + " requested extended events but not " +
+                    "non-primary reads, an inconsistent state.  Please modify the walker");
+        }
+
+        ActiveRegionTraversalParameters annotation = walker.getClass().getAnnotation(ActiveRegionTraversalParameters.class);
+        this.activeRegionExtension = this.walker.activeRegionExtension == null ? annotation.extension() : this.walker.activeRegionExtension;
+        this.maxRegionSize = this.walker.activeRegionMaxSize == null ? annotation.maxRegion() : this.walker.activeRegionMaxSize;
+        this.minRegionSize = annotation.minRegion();
+        final double bandPassSigma = this.walker.bandPassSigma == null ? annotation.bandPassSigma() : this.walker.bandPassSigma;
+        walkerHasPresetRegions = this.walker.hasPresetActiveRegions();
+
+        activityProfile = new BandPassActivityProfile(engine.getGenomeLocParser(), engine.getIntervals(), this.walker.maxProbPropagationDistance, this.walker.activeProbThreshold,
+                BandPassActivityProfile.MAX_FILTER_SIZE, bandPassSigma);
+
+        final int maxReadsAcrossSamples = annotation.maxReadsToHoldInMemoryPerSample() * ReadUtils.getSAMFileSamples(engine.getSAMFileHeader()).size();
+        final int maxReadsToHoldInMemory = Math.min(maxReadsAcrossSamples, annotation.maxReadsToHoldTotal());
+        myReads = new TAROrderedReadCache(maxReadsToHoldInMemory);
+    }
+
+    // -------------------------------------------------------------------------------------
+    //
+    // Utility functions
+    //
+    // -------------------------------------------------------------------------------------
+
+    /**
+     * Load in the preset regions for contig into workQueue
+     *
+     * Should be called before starting to process work on contig
+     *
+     * Can only be called when walkerHasPresetRegions is true or an IllegalStateException will be thrown
+     *
+     * @param contig the contig we are about to process
+     */
+    protected void loadPresetRegionsForContigToWorkQueue(final String contig) {
+        if ( ! walkerHasPresetRegions ) throw new IllegalStateException("only appropriate to call when walker has preset regions");
+
+        final GenomeLoc contigSpan = engine.getGenomeLocParser().createOverEntireContig(contig);
+        for ( final GenomeLoc loc : this.walker.getPresetActiveRegions().getOverlapping(contigSpan) ) {
+            workQueue.add(new ActiveRegion(loc, null, true, engine.getGenomeLocParser(), getActiveRegionExtension()));
+        }
+    }
+
+    protected int getActiveRegionExtension() {
+        return activeRegionExtension;
+    }
+
+    protected int getMaxRegionSize() {
+        return maxRegionSize;
+    }
+
+    protected int getMinRegionSize() {
+        return minRegionSize;
+    }
+
+    @Override
+    public String getTraversalUnits() {
+        return "active regions";
+    }
+
+    @Override
+    public String toString() {
+        return "TraverseActiveRegions";
+    }
+
+    /**
+     * Is the loc outside of the intervals being requested for processing by the GATK?
+     * @param loc
+     * @return
+     */
+    protected boolean outsideEngineIntervals(final GenomeLoc loc) {
+        return engine.getIntervals() != null && ! engine.getIntervals().overlaps(loc);
+    }
+
+    // -------------------------------------------------------------------------------------
+    //
+    // Actual traverse function
+    //
+    // -------------------------------------------------------------------------------------
+
+    /**
+     * Did read appear in the last shard?
+     *
+     * When we transition across shard boundaries we see duplicate reads because
+     * each shard contains the reads that *overlap* the shard.  So if we just finished
+     * shard 1-1000 and are now in 1001-2000 we'll see duplicate reads from 1001
+     * that overlapped 1-1000.  This function tests read to determine if we would have
+     * seen it before by asking if read.getAlignmentStart() is less than the
+     * stop position of the last seen read at the start of the traversal.  The reason
+     * we need to use the location of the last read at the start of the traversal
+     * is that we update the lastRead during the traversal, and we only want to filter
+     * out reads whose start is before the last read of the previous shard, not the
+     * current shard.
+     *
+     * @param locOfLastReadAtTraversalStart the location of the last read seen at the start of the traversal
+     * @param read the read we want to test if it's already been seen in the last shard
+     * @return true if read would have appeared in the last shard, false otherwise
+     */
+    @Requires({"read != null"})
+    private boolean appearedInLastShard(final GenomeLoc locOfLastReadAtTraversalStart, final GATKSAMRecord read) {
+        if ( locOfLastReadAtTraversalStart == null )
+            // we're in the first shard, so obviously the answer is no
+            return false;
+        else {
+            // otherwise check to see if the alignment occurred in the previous shard
+            return read.getAlignmentStart() <= locOfLastReadAtTraversalStart.getStart()
+                    // we're on the same contig
+                    && read.getReferenceIndex() == locOfLastReadAtTraversalStart.getContigIndex();
+        }
+
+    }
+
+    @Override
+    public T traverse( final ActiveRegionWalker<M,T> walker,
+                       final LocusShardDataProvider dataProvider,
+                       T sum) {
+        if ( LOG_READ_CARRYING || logger.isDebugEnabled() )
+            logger.info(String.format("TraverseActiveRegions.traverse: Shard is %s", dataProvider));
+
+        nanoScheduler.setDebug(false);
+        final Iterator<MapData> activeRegionIterator = new ActiveRegionIterator(dataProvider);
+        final TraverseActiveRegionMap myMap = new TraverseActiveRegionMap();
+        final TraverseActiveRegionReduce myReduce = new TraverseActiveRegionReduce();
+        final T result = nanoScheduler.execute(activeRegionIterator, myMap, sum, myReduce);
+
+        return result;
+    }
+
+    private class ActiveRegionIterator implements Iterator<MapData> {
+        private final LocusShardDataProvider dataProvider;
+        private LinkedList<MapData> readyActiveRegions = new LinkedList<>();
+        private boolean done = false;
+        private final LocusView locusView;
+        private final LocusReferenceView referenceView;
+        private final GenomeLoc locOfLastReadAtTraversalStart;
+        private final IntervalReferenceOrderedView referenceOrderedDataView;
+        private final GenomeLoc currentWindow;
+        private final boolean processRemainingActiveRegions;
+
+        public ActiveRegionIterator( final LocusShardDataProvider dataProvider ) {
+            this.dataProvider = dataProvider;
+            locusView = new AllLocusView(dataProvider);
+            referenceView = new LocusReferenceView( walker, dataProvider );
+
+            // The data shard may carry a number of locations to process (due to being indexed together).
+            // This value is just the interval we are processing within the entire provider
+            currentWindow = dataProvider.getLocus();
+            final int currentWindowPos = dataProvider.getShard().getGenomeLocs().indexOf(currentWindow);
+            if ( currentWindowPos == -1 ) throw new IllegalStateException("Data provider " + dataProvider + " didn't have our current window in it " + currentWindow);
+            processRemainingActiveRegions = currentWindowPos == dataProvider.getShard().getGenomeLocs().size() - 1;
+
+            // the rodSpan covers all of the bases in the activity profile, including all of the bases
+            // through the current window interval.  This is because we may issue a query to get data for an
+            // active region spanning before the current interval as far back as the start of the current profile,
+            // if we have pending work to do that finalizes in this interval.
+            final GenomeLoc rodSpan = activityProfile.getSpan() == null ? currentWindow : activityProfile.getSpan().endpointSpan(currentWindow);
+            if ( ! dataProvider.getShard().getLocation().containsP(rodSpan) ) throw new IllegalStateException("Rod span " + rodSpan + " isn't contained within the data shard " + dataProvider.getShard().getLocation() + ", meaning we wouldn't get all of the data we need");
+            referenceOrderedDataView = new IntervalReferenceOrderedView( dataProvider, rodSpan );
+
+            // We keep processing while the next reference location is within the interval
+            locOfLastReadAtTraversalStart = spanOfLastSeenRead();
+
+            // load in the workQueue the present regions that span the current contig, if it's different from the last one
+            if ( walkerHasPresetRegions && ( lastRegionProcessed == null || ! currentWindow.onSameContig(lastRegionProcessed)) ) {
+                loadPresetRegionsForContigToWorkQueue(currentWindow.getContig());
+            }
+
+            // remember the last region we processed for sanity checking later
+            lastRegionProcessed = currentWindow;
+        }
+
+        @Override public void remove() { throw new UnsupportedOperationException("Cannot remove from ActiveRegionIterator"); }
+
+        @Override
+        public MapData next() {
+            return readyActiveRegions.pop();
+        }
+        @Override
+        public boolean hasNext() {
+            if ( engine.exceedsRuntimeLimit() ) // too much time has been dedicated to doing work, just stop
+                 return false;
+            if ( ! readyActiveRegions.isEmpty() )
+                return true;
+            if ( done )
+                return false;
+            else {
+
+                while( locusView.hasNext() ) {
+                    final AlignmentContext locus = locusView.next();
+                    final GenomeLoc location = locus.getLocation();
+
+                    rememberLastLocusLocation(location);
+
+                    // get all of the new reads that appear in the current pileup, and them to our list of reads
+                    // provided we haven't seen them before
+                    final Collection<GATKSAMRecord> reads = locusView.getLIBS().transferReadsFromAllPreviousPileups();
+                    for( final GATKSAMRecord read : reads ) {
+                        // note that ActiveRegionShards span entire contigs, so this check is in some
+                        // sense no longer necessary, as any read that appeared in the last shard would now
+                        // by definition be on a different contig.  However, the logic here doesn't hurt anything
+                        // and makes us robust should we decided to provide shards that don't fully span
+                        // contigs at some point in the future
+                        if ( ! appearedInLastShard(locOfLastReadAtTraversalStart, read) ) {
+                            rememberLastReadLocation(read);
+                            myReads.add(read);
+                        }
+                    }
+
+                    // skip this location -- it's not part of our engine intervals
+                    if ( outsideEngineIntervals(location) )
+                        continue;
+
+                    // we've move across some interval boundary, restart profile
+                    final boolean flushProfile = ! activityProfile.isEmpty()
+                            && ( activityProfile.getContigIndex() != location.getContigIndex()
+                            || location.getStart() != activityProfile.getStop() + 1);
+                    final List<MapData> newActiveRegions = prepActiveRegionsForProcessing(walker, flushProfile, false, referenceOrderedDataView);
+
+                    dataProvider.getShard().getReadMetrics().incrementNumIterations();
+
+                    // create reference context. Note that if we have a pileup of "extended events", the context will
+                    // hold the (longest) stretch of deleted reference bases (if deletions are present in the pileup).
+                    final ReferenceContext refContext = referenceView.getReferenceContext(location);
+
+                    // Iterate forward to get all reference ordered data covering this location
+                    final RefMetaDataTracker tracker = referenceOrderedDataView.getReferenceOrderedDataAtLocus(locus.getLocation());
+
+                    // Call the walkers isActive function for this locus and add them to the list to be integrated later
+                    addIsActiveResult(walker, tracker, refContext, locus);
+
+                    maxReadsInMemory = Math.max(myReads.size(), maxReadsInMemory);
+                    printProgress(location);
+
+                    if ( ! newActiveRegions.isEmpty() ) {
+                        readyActiveRegions.addAll(newActiveRegions);
+                        if ( DEBUG )
+                            for ( final MapData region : newActiveRegions )
+                                logger.info("Adding region to queue for processing " + region.activeRegion);
+                        return true;
+                    }
+                }
+
+                if ( processRemainingActiveRegions ) {
+                    // we've run out of stuff to process, and since shards now span entire contig boundaries
+                    // we should finalized our regions.  This allows us to continue to use our referenceOrderedDataView
+                    // which would otherwise be shutdown.  Only followed when the microschedule says that we're
+                    // inside of the last window in the current shard
+                    readyActiveRegions.addAll(prepActiveRegionsForProcessing(walker, true, true, referenceOrderedDataView));
+                }
+
+                return ! readyActiveRegions.isEmpty();
+            }
+        }
+    }
+
+    // -------------------------------------------------------------------------------------
+    //
+    // Functions to manage and interact with the live / dead zone
+    //
+    // -------------------------------------------------------------------------------------
+
+    /**
+     * Update the live region to reflect that the last read we've seen in the traversal is read
+     *
+     * Requires that sequential calls always be provided reads in coordinate sorted order
+     *
+     * @param read the last read we've seen during the traversal
+     */
+    @Requires({"read != null"})
+    protected void rememberLastReadLocation(final GATKSAMRecord read) {
+        final GenomeLoc currentLocation = engine.getGenomeLocParser().createGenomeLoc(read);
+        if ( spanOfLastReadSeen == null )
+            spanOfLastReadSeen = currentLocation;
+        else {
+            if ( currentLocation.isBefore(spanOfLastReadSeen) )
+                throw new IllegalStateException("Updating last read seen in the traversal with read " + read + " with span " + currentLocation + " but this occurs before the previously seen read " + spanOfLastReadSeen);
+            spanOfLastReadSeen = currentLocation;
+        }
+    }
+
+    /**
+     * Update the live region to reflect that we've reached locus
+     *
+     * This function is complementary to #rememberLastReadLocation, but if we don't have any reads for a long
+     * time (e.g., there's no coverage) we will keep active regions around far longer than necessary.
+     *
+     * Only updates the span if it's beyond the last seen
+     *
+     * @param currentLocation the current location we've processed on the genome
+     */
+    protected void rememberLastLocusLocation(final GenomeLoc currentLocation) {
+        if ( spanOfLastReadSeen == null )
+            spanOfLastReadSeen = currentLocation;
+        else {
+            if ( currentLocation.isPast(spanOfLastReadSeen) )
+                spanOfLastReadSeen = currentLocation;
+        }
+    }
+
+
+    /**
+     * Get a GenomeLoc indicating the start (heading to the right) of the live ART region.
+     * @return the left-most position of the live region on the genome
+     */
+    protected GenomeLoc spanOfLastSeenRead() {
+        return spanOfLastReadSeen;
+    }
+
+    /**
+     * Is the active region completely within the traversal's dead zone?
+     *
+     * @param region the region we want to test
+     * @return true if the extended location of region is completely within the current dead zone, false otherwise
+     */
+    protected boolean regionCompletelyWithinDeadZone(final ActiveRegion region) {
+        if ( spanOfLastSeenRead() == null )
+            return false;
+
+        final int contigCmp = region.getExtendedLoc().compareContigs(spanOfLastSeenRead());
+        if ( contigCmp > 0 )
+            throw new IllegalStateException("Active region " + region + " on a contig after last seen read " + spanOfLastSeenRead());
+        else {
+            return contigCmp < 0 || region.getExtendedLoc().getStop() < spanOfLastSeenRead().getStart();
+        }
+    }
+
+    /**
+     * Is the read dead?  That is, can it no longer be in any future active region, and therefore can be discarded?
+     *
+     * read: start |--------> stop ------ stop + extension
+     * region:                      start |-----------------| end
+     *
+     * Since the regions are coming in order, read could potentially be contained in a future interval if
+     * stop + activeRegionExtension >= end.  If, on the other hand, stop + extension is < the end
+     * of this region, then we can discard it, since any future region could only include reads
+     * up to end + 1 - extension.
+     *
+     * Note that this function doesn't care about the dead zone.  We're assuming that by
+     * actually calling this function with an active region that region is already in the dead zone,
+     * so checking that the read is in the dead zone doesn't make sense.
+     *
+     * @param read the read we're testing
+     * @param activeRegion the current active region
+     * @return true if the read is dead, false other
+     */
+    @Requires({"read != null", "activeRegion != null"})
+    private boolean readCannotOccurInAnyMoreActiveRegions(final GATKSAMRecord read, final ActiveRegion activeRegion) {
+        return read.getReferenceIndex() < activeRegion.getLocation().getContigIndex() ||
+                ( read.getReferenceIndex() == activeRegion.getLocation().getContigIndex()
+                        && read.getAlignmentEnd() + getActiveRegionExtension() < activeRegion.getLocation().getStop() );
+    }
+
+    // -------------------------------------------------------------------------------------
+    //
+    // Functions to write out activity profiles and active regions
+    //
+    // -------------------------------------------------------------------------------------
+
+    /**
+     * Initialize the debugging output streams (activity profile and active regions), if not done so already
+     */
+    @Ensures("streamsInitialized == true")
+    private void initializeOutputStreamsIfNecessary() {
+        if ( ! streamsInitialized ) {
+            streamsInitialized = true;
+            if ( walker.activityProfileOutStream != null ) {
+                printIGVFormatHeader(walker.activityProfileOutStream, "line", "ActivityProfile");
+            }
+
+            if ( walker.activeRegionOutStream != null ) {
+                printIGVFormatHeader(walker.activeRegionOutStream, "line", "ActiveRegions");
+            }
+        }
+    }
+
+    /**
+     * Helper function to write out a IGV formatted line to out, at loc, with values
+     *
+     * http://www.broadinstitute.org/software/igv/IGV
+     *
+     * @param out a non-null PrintStream where we'll write our line
+     * @param graphType the type of graph to show in IGV for this track
+     * @param columns the column names for this IGV track
+     */
+    @Requires({
+            "out != null",
+            "graphType != null",
+            "columns.length > 0"
+    })
+    private void printIGVFormatHeader(final PrintStream out, final String graphType, final String ... columns ) {
+        out.printf("#track graphType=%s%n", graphType);
+        out.printf("Chromosome\tStart\tEnd\tFeature\t%s%n", Utils.join("\t", columns));
+
+    }
+
+    /**
+     * Helper function to write out a IGV formatted line to out, at loc, with values
+     *
+     * http://www.broadinstitute.org/software/igv/IGV
+     *
+     * @param out a non-null PrintStream where we'll write our line
+     * @param loc the location of values
+     * @param featureName string name of this feature (see IGV format)
+     * @param values the floating point values to associate with loc and feature name in out
+     */
+    @Requires({
+            "out != null",
+            "loc != null",
+            "values.length > 0"
+    })
+    private void printIGVFormatRow(final PrintStream out, final GenomeLoc loc, final String featureName, final double ... values) {
+        // note that start and stop are 0 based, but the stop is exclusive so we don't subtract 1
+        out.printf("%s\t%d\t%d\t%s", loc.getContig(), loc.getStart() - 1, loc.getStop(), featureName);
+        for ( final double value : values )
+            out.print(String.format("\t%.5f", value));
+        out.println();
+    }
+
+    /**
+     * Write out activity profile information, if requested by the walker
+     *
+     * @param states the states in the current activity profile
+     */
+    @Requires("states != null")
+    private void writeActivityProfile(final List<ActivityProfileState> states) {
+        if ( walker.activityProfileOutStream != null ) {
+            initializeOutputStreamsIfNecessary();
+            for ( final ActivityProfileState state : states ) {
+                printIGVFormatRow(walker.activityProfileOutStream, state.getLoc(), "state", Math.min(state.isActiveProb, 1.0));
+            }
+        }
+    }
+
+    /**
+     * Write out each active region to the walker activeRegionOutStream
+     *
+     * @param region the region we're currently operating on
+     */
+    @Requires("region != null")
+    private void writeActiveRegion(final ActiveRegion region) {
+        if( walker.activeRegionOutStream != null ) {
+            initializeOutputStreamsIfNecessary();
+            printIGVFormatRow(walker.activeRegionOutStream, region.getLocation().getStartLocation(),
+                    "end-marker", 0.0);
+            printIGVFormatRow(walker.activeRegionOutStream, region.getLocation(),
+                    "size=" + region.getLocation().size(), region.isActive() ? 1.0 : -1.0);
+        }
+    }
+
+
+    // -------------------------------------------------------------------------------------
+    //
+    // Functions to process active regions that are ready for map / reduce calls
+    //
+    // -------------------------------------------------------------------------------------
+
+    /**
+     * Invoke the walker isActive function, and incorporate its result into the activity profile
+     *
+     * @param walker the walker we're running
+     * @param tracker the ref meta data tracker to pass on to the isActive function of walker
+     * @param refContext the refContext to pass on to the isActive function of walker
+     * @param locus the AlignmentContext to pass on to the isActive function of walker
+     */
+    private void addIsActiveResult(final ActiveRegionWalker<M, T> walker,
+                                   final RefMetaDataTracker tracker, final ReferenceContext refContext,
+                                   final AlignmentContext locus) {
+        // must be called, even if we won't use the result, to satisfy walker contract
+        final ActivityProfileState state = walker.isActive( tracker, refContext, locus );
+        if ( walker.forceActive) state.isActiveProb = 1.0;
+        if ( ! walkerHasPresetRegions ) {
+            activityProfile.add(state);
+        }
+    }
+
+    /**
+     * Take the individual isActive calls and integrate them into contiguous active regions and
+     * add these blocks of work to the work queue
+     * band-pass filter the list of isActive probabilities and turn into active regions
+     */
+    private List<MapData> prepActiveRegionsForProcessing(final ActiveRegionWalker<M, T> walker,
+                                                              final boolean flushActivityProfile,
+                                                              final boolean forceAllRegionsToBeActive,
+                                                              final IntervalReferenceOrderedView referenceOrderedDataView) {
+        if ( ! walkerHasPresetRegions ) {
+            // We don't have preset regions, so we get our regions from the activity profile
+            final Collection<ActiveRegion> activeRegions = activityProfile.popReadyActiveRegions(getActiveRegionExtension(), getMinRegionSize(), getMaxRegionSize(), flushActivityProfile);
+            workQueue.addAll(activeRegions);
+            if ( ! activeRegions.isEmpty() && logger.isDebugEnabled() ) logger.debug("Integrated " + activityProfile.size() + " isActive calls into " + activeRegions.size() + " regions." );
+        }
+
+        // Since we've traversed sufficiently past this point (or this contig!) in the workQueue we can unload those regions and process them
+        final LinkedList<MapData> readyRegions = new LinkedList<>();
+        while( workQueue.peek() != null ) {
+            final ActiveRegion activeRegion = workQueue.peek();
+            if ( forceAllRegionsToBeActive || regionCompletelyWithinDeadZone(activeRegion) ) {
+                writeActivityProfile(activeRegion.getSupportingStates());
+                writeActiveRegion(activeRegion);
+                readyRegions.add(prepActiveRegionForProcessing(workQueue.remove(), walker, referenceOrderedDataView));
+            } else {
+                break;
+            }
+        }
+
+        return readyRegions;
+
+    }
+
+    private MapData prepActiveRegionForProcessing(final ActiveRegion activeRegion,
+                                                  final ActiveRegionWalker<M, T> walker,
+                                                  final IntervalReferenceOrderedView referenceOrderedDataView) {
+        final List<GATKSAMRecord> stillLive = new LinkedList<>();
+        for ( final GATKSAMRecord read : myReads.popCurrentReads() ) {
+            boolean killed = false;
+            final GenomeLoc readLoc = this.engine.getGenomeLocParser().createGenomeLoc( read );
+
+            if( activeRegion.getLocation().overlapsP( readLoc ) ) {
+                activeRegion.add(read);
+
+                if ( ! walker.wantsNonPrimaryReads() ) {
+                    killed = true;
+                }
+            } else if( walker.wantsExtendedReads() && activeRegion.getExtendedLoc().overlapsP( readLoc )) {
+                activeRegion.add( read );
+            }
+
+            // if the read hasn't already been killed, check if it cannot occur in any more active regions, and maybe kill it
+            if ( ! killed && readCannotOccurInAnyMoreActiveRegions(read, activeRegion) ) {
+                killed = true;
+            }
+
+            // keep track of all of the still live active regions
+            if ( ! killed ) stillLive.add(read);
+        }
+        myReads.addAll(stillLive);
+
+        if ( logger.isDebugEnabled() ) {
+            logger.debug(">> Map call with " + activeRegion.getReads().size() + " " + (activeRegion.isActive() ? "active" : "inactive") + " reads @ " + activeRegion.getLocation() + " with full extent: " + activeRegion.getReadSpanLoc());
+        }
+
+        if ( LOG_READ_CARRYING )
+            logger.info(String.format("Processing region %20s span=%3d active?=%5b with %4d reads.  Overall max reads carried is %s",
+                    activeRegion.getLocation(), activeRegion.getLocation().size(), activeRegion.isActive(), activeRegion.size(), maxReadsInMemory));
+
+        // prepare the RefMetaDataTracker information
+        final GenomeLoc loc = activeRegion.getLocation();
+        // get all of the RODs that cover the active region (without extension)
+        final RefMetaDataTracker tracker = referenceOrderedDataView.getReferenceOrderedDataForInterval(loc);
+        // trim away all of the features that occurred before this location, as we will not need them in the future
+        referenceOrderedDataView.trimCurrentFeaturesToLoc(loc);
+
+        return new MapData(activeRegion, tracker);
+    }
+
+    private class TraverseActiveRegionMap implements NSMapFunction<MapData, M> {
+        @Override
+        public M apply(final MapData mapData) {
+            if ( DEBUG ) logger.info("Executing walker.map for " + mapData.activeRegion + " in thread " + Thread.currentThread().getName());
+            return walker.map(mapData.activeRegion, mapData.tracker);
+        }
+    }
+
+    private class TraverseActiveRegionReduce implements NSReduceFunction<M, T> {
+        @Override
+        public T apply(M one, T sum) {
+            return walker.reduce(one, sum);
+        }
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/traversals/TraverseDuplicates.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/traversals/TraverseDuplicates.java
new file mode 100644
index 0000000..41738a0
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/traversals/TraverseDuplicates.java
@@ -0,0 +1,205 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.traversals;
+
+import htsjdk.samtools.SAMRecord;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.engine.datasources.providers.ReadShardDataProvider;
+import org.broadinstitute.gatk.engine.datasources.providers.ReadView;
+import org.broadinstitute.gatk.utils.iterators.PushbackIterator;
+import org.broadinstitute.gatk.engine.walkers.DuplicateWalker;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.pileup.ReadBackedPileupImpl;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+
+import java.util.*;
+
+/**
+ * @author Mark DePristo
+ * @version 0.1
+ *          <p/>
+ *          Class TraverseDuplicates
+ *          <p/>
+ *          This class handles traversing lists of duplicate reads in the new shardable style
+ */
+public class TraverseDuplicates<M,T> extends TraversalEngine<M,T,DuplicateWalker<M,T>,ReadShardDataProvider> {
+    /** our log, which we want to capture anything from this class */
+    protected static Logger logger = Logger.getLogger(TraverseDuplicates.class);
+
+    /** Turn this to true to enable logger.debug output */
+    private final boolean DEBUG = false;
+
+    @Override
+    public String getTraversalUnits() {
+        return "dups";
+    }
+
+    private List<GATKSAMRecord> readsAtLoc(final GATKSAMRecord read, PushbackIterator<SAMRecord> iter) {
+        GenomeLoc site = engine.getGenomeLocParser().createGenomeLoc(read);
+        ArrayList<GATKSAMRecord> l = new ArrayList<GATKSAMRecord>();
+
+        l.add(read);
+        for (SAMRecord read2 : iter) {
+            GenomeLoc site2 = engine.getGenomeLocParser().createGenomeLoc(read2);
+
+            // the next read starts too late
+            if (site2.getStart() != site.getStart()) {
+                iter.pushback(read2);
+                break;
+            } else {
+                l.add((GATKSAMRecord) read2);
+            }
+        }
+
+        return l;
+    }
+
+    /**
+     * Creates a set of lists of reads, where each list contains reads from the same underlying molecule according
+     * to their duplicate flag and their (and mate, if applicable) start/end positions.
+     *
+     * @param reads the list of reads to split into unique molecular samples
+     * @return
+     */
+    protected Set<List<GATKSAMRecord>> uniqueReadSets(List<GATKSAMRecord> reads) {
+        Set<List<GATKSAMRecord>> readSets = new LinkedHashSet<List<GATKSAMRecord>>();
+
+        // for each read, find duplicates, and either add the read to its duplicate list or start a new one
+        for ( GATKSAMRecord read : reads ) {
+            List<GATKSAMRecord> readSet = findDuplicateReads(read, readSets);
+
+            if ( readSet == null ) {
+                readSets.add(new ArrayList<GATKSAMRecord>(Arrays.asList(read)));    // copy so I can add to the list
+            } else {
+                readSet.add(read);
+            }
+        }
+
+        return readSets;
+    }
+
+    /**
+     * Find duplicate reads for read in the set of unique reads.  This is effective a duplicate marking algorithm,
+     * but it relies for safety's sake on the file itself being marked by a true duplicate marking algorithm.  Pair
+     * and single-end read aware.
+     *
+     * @param read
+     * @param readSets
+     * @return The list of duplicate reads that read is a member of, or null if it's the only one of its kind
+     */
+    protected List<GATKSAMRecord> findDuplicateReads(GATKSAMRecord read, Set<List<GATKSAMRecord>> readSets ) {
+        if ( read.getReadPairedFlag() ) {
+            // paired
+            final GenomeLoc readMateLoc = engine.getGenomeLocParser().createGenomeLoc(read.getMateReferenceName(), read.getMateAlignmentStart(), read.getMateAlignmentStart());
+
+            for (List<GATKSAMRecord> reads : readSets) {
+                GATKSAMRecord key = reads.get(0);
+
+                // read and key start at the same place, and either the this read and the key
+                // share a mate location or the read is flagged as a duplicate
+                if ( read.getAlignmentStart() == key.getAlignmentStart() && key.getReadPairedFlag() && ( key.getDuplicateReadFlag() || read.getDuplicateReadFlag() ) ) {
+                    // at least one has to be marked as a duplicate
+                    final GenomeLoc keyMateLoc = engine.getGenomeLocParser().createGenomeLoc(key.getMateReferenceName(), key.getMateAlignmentStart(), key.getMateAlignmentStart());
+                    if ( readMateLoc.compareTo(keyMateLoc) == 0 ) {
+                        // we are at the same position as the dup and have the same mat pos, it's a dup
+                        if (DEBUG) logger.debug(String.format("  => Adding read to dups list: %s %d %s vs. %s", read, reads.size(), readMateLoc, keyMateLoc));
+                        return reads;
+                    }
+                }
+            }
+        } else {
+            for (List<GATKSAMRecord> reads : readSets) {
+                GATKSAMRecord key = reads.get(0);
+                boolean v = (! key.getReadPairedFlag()) && read.getAlignmentStart() == key.getAlignmentStart() && ( key.getDuplicateReadFlag() || read.getDuplicateReadFlag() ) && read.getReadLength() == key.getReadLength();
+                //System.out.printf("%s %s %b %b %d %d %d %d => %b%n",
+                //        read.getReadPairedFlag(), key.getReadPairedFlag(), read.getDuplicateReadFlag(), key.getDuplicateReadFlag(),
+                //        read.getAlignmentStart(), key.getAlignmentStart(), read.getReadLength(), key.getReadLength(), v);
+                if ( v ) {
+                    //System.out.printf("Returning reads...%n");
+                    return reads;
+                }
+            }
+        }
+
+        return null;
+    }
+
+    // --------------------------------------------------------------------------------------------------------------
+    //
+    // new style interface to the system
+    //
+    // --------------------------------------------------------------------------------------------------------------
+
+    /**
+     * Traverse by reads, given the data and the walker
+     *
+     * @param walker the walker to execute over
+     * @param sum    of type T, the return from the walker
+     *
+     * @return the result type T, the product of all the reduce calls
+     */
+    public T traverse(DuplicateWalker<M, T> walker,
+                      ReadShardDataProvider dataProvider,
+                      T sum) {
+        PushbackIterator<SAMRecord> iter = new PushbackIterator<SAMRecord>(new ReadView(dataProvider).iterator());
+
+        /**
+         * while we still have more reads:
+         * ok, here's the idea.  We get all the reads that start at the same position in the genome
+         * We then split the list of reads into sublists of reads:
+         *   -> those with the same mate pair position, for paired reads
+         *   -> those flagged as unpaired and duplicated but having the same start and end
+         */
+        boolean done = walker.isDone();
+        for (SAMRecord read : iter) {
+            if ( done ) break;
+            // get the genome loc from the read
+            GenomeLoc site = engine.getGenomeLocParser().createGenomeLoc(read);
+
+            Set<List<GATKSAMRecord>> readSets = uniqueReadSets(readsAtLoc((GATKSAMRecord) read, iter));
+            if ( DEBUG ) logger.debug(String.format("*** TraverseDuplicates.traverse at %s with %d read sets", site, readSets.size()));
+
+            // Jump forward in the reference to this locus location
+            AlignmentContext locus = new AlignmentContext(site, new ReadBackedPileupImpl(site));
+
+            // update the number of duplicate sets we've seen
+            dataProvider.getShard().getReadMetrics().incrementNumIterations();
+
+            // actually call filter and map, accumulating sum
+            final boolean keepMeP = walker.filter(site, locus, readSets);
+            if (keepMeP) {
+                M x = walker.map(site, locus, readSets);
+                sum = walker.reduce(x, sum);
+            }
+
+            printProgress(site.getStopLocation());
+            done = walker.isDone();
+        }
+
+        return sum;
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/traversals/TraverseLociNano.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/traversals/TraverseLociNano.java
new file mode 100644
index 0000000..0c2676b
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/traversals/TraverseLociNano.java
@@ -0,0 +1,304 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.traversals;
+
+import org.broadinstitute.gatk.engine.WalkerManager;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.engine.datasources.providers.*;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.engine.walkers.DataSource;
+import org.broadinstitute.gatk.engine.walkers.LocusWalker;
+import org.broadinstitute.gatk.engine.walkers.Walker;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.nanoScheduler.NSMapFunction;
+import org.broadinstitute.gatk.utils.nanoScheduler.NSProgressFunction;
+import org.broadinstitute.gatk.utils.nanoScheduler.NSReduceFunction;
+import org.broadinstitute.gatk.utils.nanoScheduler.NanoScheduler;
+import org.broadinstitute.gatk.utils.pileup.ReadBackedPileupImpl;
+
+import java.util.Iterator;
+
+/**
+ * A simple solution to iterating over all reference positions over a series of genomic locations.
+ */
+public class TraverseLociNano<M,T> extends TraversalEngine<M,T,LocusWalker<M,T>,LocusShardDataProvider> {
+    /** our log, which we want to capture anything from this class */
+    private static final boolean DEBUG = false;
+
+    final NanoScheduler<MapData, MapResult, T> nanoScheduler;
+
+    public TraverseLociNano(int nThreads) {
+        nanoScheduler = new NanoScheduler<MapData, MapResult, T>(nThreads);
+        nanoScheduler.setProgressFunction(new TraverseLociProgress());
+    }
+
+    @Override
+    public final String getTraversalUnits() {
+        return "sites";
+    }
+
+    protected static class TraverseResults<T> {
+        final int numIterations;
+        final T reduceResult;
+
+        public TraverseResults(int numIterations, T reduceResult) {
+            this.numIterations = numIterations;
+            this.reduceResult = reduceResult;
+        }
+    }
+
+    @Override
+    public T traverse( LocusWalker<M,T> walker,
+                       LocusShardDataProvider dataProvider,
+                       T sum) {
+        logger.debug(String.format("TraverseLoci.traverse: Shard is %s", dataProvider));
+
+        final LocusView locusView = getLocusView( walker, dataProvider );
+
+        if ( locusView.hasNext() ) { // trivial optimization to avoid unnecessary processing when there's nothing here at all
+            //ReferenceOrderedView referenceOrderedDataView = new ReferenceOrderedView( dataProvider );
+            ReferenceOrderedView referenceOrderedDataView = null;
+            if ( WalkerManager.getWalkerDataSource(walker) != DataSource.REFERENCE_ORDERED_DATA )
+                referenceOrderedDataView = new ManagingReferenceOrderedView( dataProvider );
+            else
+                referenceOrderedDataView = (RodLocusView)locusView;
+
+            final LocusReferenceView referenceView = new LocusReferenceView( walker, dataProvider );
+
+            final TraverseResults<T> result = traverse( walker, locusView, referenceView, referenceOrderedDataView, sum );
+            sum = result.reduceResult;
+            dataProvider.getShard().getReadMetrics().incrementNumIterations(result.numIterations);
+        }
+
+        // We have a final map call to execute here to clean up the skipped based from the
+        // last position in the ROD to that in the interval
+        if ( WalkerManager.getWalkerDataSource(walker) == DataSource.REFERENCE_ORDERED_DATA && ! walker.isDone() ) {
+            // only do this if the walker isn't done!
+            final RodLocusView rodLocusView = (RodLocusView)locusView;
+            final long nSkipped = rodLocusView.getLastSkippedBases();
+            if ( nSkipped > 0 ) {
+                final GenomeLoc site = rodLocusView.getLocOneBeyondShard();
+                final AlignmentContext ac = new AlignmentContext(site, new ReadBackedPileupImpl(site), nSkipped);
+                final M x = walker.map(null, null, ac);
+                sum = walker.reduce(x, sum);
+            }
+        }
+
+        return sum;
+    }
+
+    /**
+     * Gets the best view of loci for this walker given the available data.  The view will function as a 'trigger track'
+     * of sorts, providing a consistent interface so that TraverseLoci doesn't need to be reimplemented for any new datatype
+     * that comes along.
+     * @param walker walker to interrogate.
+     * @param dataProvider Data which which to drive the locus view.
+     * @return A view of the locus data, where one iteration of the locus view maps to one iteration of the traversal.
+     */
+    private LocusView getLocusView( Walker<M,T> walker, LocusShardDataProvider dataProvider ) {
+        final DataSource dataSource = WalkerManager.getWalkerDataSource(walker);
+        if( dataSource == DataSource.READS )
+            return new CoveredLocusView(dataProvider);
+        else if( dataSource == DataSource.REFERENCE ) //|| ! GenomeAnalysisEngine.instance.getArguments().enableRodWalkers )
+            return new AllLocusView(dataProvider);
+        else if( dataSource == DataSource.REFERENCE_ORDERED_DATA )
+            return new RodLocusView(dataProvider);
+        else
+            throw new UnsupportedOperationException("Unsupported traversal type: " + dataSource);
+    }
+
+    protected TraverseResults<T> traverse(final LocusWalker<M, T> walker,
+                                          final LocusView locusView,
+                                          final LocusReferenceView referenceView,
+                                          final ReferenceOrderedView referenceOrderedDataView,
+                                          final T sum) {
+        nanoScheduler.setDebug(DEBUG);
+        final TraverseLociMap myMap = new TraverseLociMap(walker);
+        final TraverseLociReduce myReduce = new TraverseLociReduce(walker);
+
+        final MapDataIterator inputIterator = new MapDataIterator(locusView, referenceView, referenceOrderedDataView);
+        final T result = nanoScheduler.execute(inputIterator, myMap, sum, myReduce);
+
+        return new TraverseResults<T>(inputIterator.numIterations, result);
+    }
+
+    /**
+     * Create iterator that provides inputs for all map calls into MapData, to be provided
+     * to NanoScheduler for Map/Reduce
+     */
+    private class MapDataIterator implements Iterator<MapData> {
+        final LocusView locusView;
+        final LocusReferenceView referenceView;
+        final ReferenceOrderedView referenceOrderedDataView;
+        int numIterations = 0;
+
+        private MapDataIterator(LocusView locusView, LocusReferenceView referenceView, ReferenceOrderedView referenceOrderedDataView) {
+            this.locusView = locusView;
+            this.referenceView = referenceView;
+            this.referenceOrderedDataView = referenceOrderedDataView;
+        }
+
+        @Override
+        public boolean hasNext() {
+            return locusView.hasNext() && ! engine.exceedsRuntimeLimit();
+        }
+
+        @Override
+        public MapData next() {
+            final AlignmentContext locus = locusView.next();
+            final GenomeLoc location = locus.getLocation();
+
+            //logger.info("Pulling data from MapDataIterator at " + location);
+
+            // create reference context. Note that if we have a pileup of "extended events", the context will
+            // hold the (longest) stretch of deleted reference bases (if deletions are present in the pileup).
+            final ReferenceContext refContext = referenceView.getReferenceContext(location);
+
+            // Iterate forward to get all reference ordered data covering this location
+            final RefMetaDataTracker tracker = referenceOrderedDataView.getReferenceOrderedDataAtLocus(location);
+
+            numIterations++;
+            return new MapData(locus, refContext,  tracker);
+        }
+
+        @Override
+        public void remove() {
+            throw new UnsupportedOperationException("Cannot remove elements from MapDataIterator");
+        }
+    }
+
+    @Override
+    public void shutdown() {
+        nanoScheduler.shutdown();
+    }
+
+    /**
+     * The input data needed for each map call.  The read, the reference, and the RODs
+     */
+    private class MapData {
+        final AlignmentContext alignmentContext;
+        final ReferenceContext refContext;
+        final RefMetaDataTracker tracker;
+
+        private MapData(final AlignmentContext alignmentContext, ReferenceContext refContext, RefMetaDataTracker tracker) {
+            this.alignmentContext = alignmentContext;
+            this.refContext = refContext;
+            this.tracker = tracker;
+        }
+
+        @Override
+        public String toString() {
+            return "MapData " + alignmentContext.getLocation();
+        }
+    }
+
+    /**
+     * Contains the results of a map call, indicating whether the call was good, filtered, or done
+     */
+    private class MapResult {
+        final M value;
+        final boolean reduceMe;
+
+        /**
+         * Create a MapResult with value that should be reduced
+         *
+         * @param value the value to reduce
+         */
+        private MapResult(final M value) {
+            this.value = value;
+            this.reduceMe = true;
+        }
+
+        /**
+         * Create a MapResult that shouldn't be reduced
+         */
+        private MapResult() {
+            this.value = null;
+            this.reduceMe = false;
+        }
+    }
+
+    /**
+     * A static object that tells reduce that the result of map should be skipped (filtered or done)
+     */
+    private final MapResult SKIP_REDUCE = new MapResult();
+
+    /**
+     * MapFunction for TraverseReads meeting NanoScheduler interface requirements
+     *
+     * Applies walker.map to MapData, returning a MapResult object containing the result
+     */
+    private class TraverseLociMap implements NSMapFunction<MapData, MapResult> {
+        final LocusWalker<M,T> walker;
+
+        private TraverseLociMap(LocusWalker<M, T> walker) {
+            this.walker = walker;
+        }
+
+        @Override
+        public MapResult apply(final MapData data) {
+            if ( ! walker.isDone() ) {
+                final boolean keepMeP = walker.filter(data.tracker, data.refContext, data.alignmentContext);
+                if (keepMeP) {
+                    final M x = walker.map(data.tracker, data.refContext, data.alignmentContext);
+                    return new MapResult(x);
+                }
+            }
+            return SKIP_REDUCE;
+        }
+    }
+
+    /**
+     * NSReduceFunction for TraverseReads meeting NanoScheduler interface requirements
+     *
+     * Takes a MapResult object and applies the walkers reduce function to each map result, when applicable
+     */
+    private class TraverseLociReduce implements NSReduceFunction<MapResult, T> {
+        final LocusWalker<M,T> walker;
+
+        private TraverseLociReduce(LocusWalker<M, T> walker) {
+            this.walker = walker;
+        }
+
+        @Override
+        public T apply(MapResult one, T sum) {
+            if ( one.reduceMe )
+                // only run reduce on values that aren't DONE or FAILED
+                return walker.reduce(one.value, sum);
+            else
+                return sum;
+        }
+    }
+
+    private class TraverseLociProgress implements NSProgressFunction<MapData> {
+        @Override
+        public void progress(MapData lastProcessedMap) {
+            if (lastProcessedMap.alignmentContext != null)
+                printProgress(lastProcessedMap.alignmentContext.getLocation());
+        }
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/traversals/TraverseReadPairs.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/traversals/TraverseReadPairs.java
new file mode 100644
index 0000000..4387436
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/traversals/TraverseReadPairs.java
@@ -0,0 +1,129 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.traversals;
+
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.SAMRecordCoordinateComparator;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.engine.datasources.providers.ReadShardDataProvider;
+import org.broadinstitute.gatk.engine.datasources.providers.ReadView;
+import org.broadinstitute.gatk.engine.datasources.reads.Shard;
+import org.broadinstitute.gatk.engine.walkers.DataSource;
+import org.broadinstitute.gatk.engine.walkers.ReadPairWalker;
+import org.broadinstitute.gatk.engine.walkers.Requires;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Traverse over a collection of read pairs, assuming that a given shard will contain all pairs.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+ at Requires({DataSource.REFERENCE})
+public class TraverseReadPairs<M,T> extends TraversalEngine<M,T, ReadPairWalker<M,T>,ReadShardDataProvider> {
+
+    /** our log, which we want to capture anything from this class */
+    protected static final Logger logger = Logger.getLogger(TraverseReadPairs.class);
+
+    @Override
+    public String getTraversalUnits() {
+        return "read pairs";
+    }
+
+    /**
+     * Traverse by reads, given the data and the walker
+     *
+     * @param walker the walker to execute over
+     * @param sum    of type T, the return from the walker
+     *
+     * @return the result type T, the product of all the reduce calls
+     */
+    public T traverse(ReadPairWalker<M, T> walker,
+                      ReadShardDataProvider dataProvider,
+                      T sum) {
+        logger.debug(String.format("TraverseReadsPairs.traverse Covered dataset is %s", dataProvider));
+
+        if( !dataProvider.hasReads() )
+            throw new IllegalArgumentException("Unable to traverse reads; no read data is available.");
+
+        ReadView reads = new ReadView(dataProvider);
+        List<SAMRecord> pairs = new ArrayList<SAMRecord>();
+
+        boolean done = walker.isDone();
+        for(SAMRecord read: reads) {
+            if ( done ) break;
+            dataProvider.getShard().getReadMetrics().incrementNumReadsSeen();
+
+            if(pairs.size() == 0 || pairs.get(0).getReadName().equals(read.getReadName())) {
+                // If this read name is the same as the last, accumulate it.
+                pairs.add(read);
+            }
+            else {
+                // Otherwise, walk over the accumulated list, then start fresh with the new read.
+                sum = walkOverPairs(walker,dataProvider.getShard(),pairs,sum);
+                pairs.clear();
+                pairs.add(read);
+
+                printProgress(null);
+            }
+
+            done = walker.isDone();
+        }
+
+        // If any data was left in the queue, process it.
+        if(pairs.size() > 0)
+            sum = walkOverPairs(walker,dataProvider.getShard(),pairs,sum);
+
+        return sum;
+    }
+
+    /**
+     * Filter / map / reduce over a single pair.
+     * @param walker The walker.
+     * @param shard The shard currently being processed.
+     * @param reads The reads in the pair.
+     * @param sum The accumulator.
+     * @return The accumulator after application of the given read pairing.
+     */
+    private T walkOverPairs(ReadPairWalker<M,T> walker, Shard shard, List<SAMRecord> reads, T sum) {
+        // update the number of reads we've seen
+        shard.getReadMetrics().incrementNumIterations();
+
+        // Sort the reads present in coordinate order.
+        Collections.sort(reads,new SAMRecordCoordinateComparator());
+
+        final boolean keepMeP = walker.filter(reads);
+        if (keepMeP) {
+            M x = walker.map(reads);
+            sum = walker.reduce(x, sum);
+        }
+
+        return sum;
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/traversals/TraverseReadsNano.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/traversals/TraverseReadsNano.java
new file mode 100644
index 0000000..ccc8159
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/traversals/TraverseReadsNano.java
@@ -0,0 +1,256 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.traversals;
+
+import htsjdk.samtools.SAMRecord;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.engine.datasources.providers.ReadBasedReferenceOrderedView;
+import org.broadinstitute.gatk.engine.datasources.providers.ReadReferenceView;
+import org.broadinstitute.gatk.engine.datasources.providers.ReadShardDataProvider;
+import org.broadinstitute.gatk.engine.datasources.providers.ReadView;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.engine.walkers.ReadWalker;
+import org.broadinstitute.gatk.utils.nanoScheduler.NSMapFunction;
+import org.broadinstitute.gatk.utils.nanoScheduler.NSProgressFunction;
+import org.broadinstitute.gatk.utils.nanoScheduler.NSReduceFunction;
+import org.broadinstitute.gatk.utils.nanoScheduler.NanoScheduler;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+
+import java.util.Iterator;
+import java.util.LinkedList;
+
+/**
+ * A nano-scheduling version of TraverseReads.
+ *
+ * Implements the traversal of a walker that accepts individual reads, the reference, and
+ * RODs per map call.  Directly supports shared memory parallelism via NanoScheduler
+ *
+ * @author depristo
+ * @version 1.0
+ * @date 9/2/2012
+ */
+public class TraverseReadsNano<M,T> extends TraversalEngine<M,T,ReadWalker<M,T>,ReadShardDataProvider> {
+    /** our log, which we want to capture anything from this class */
+    private final static boolean PRE_READ_ALL_MAP_DATA = true;
+    protected static final Logger logger = Logger.getLogger(TraverseReadsNano.class);
+    private static final boolean DEBUG = false;
+    final NanoScheduler<MapData, MapResult, T> nanoScheduler;
+
+    public TraverseReadsNano(int nThreads) {
+        nanoScheduler = new NanoScheduler<MapData, MapResult, T>(nThreads);
+        nanoScheduler.setProgressFunction(new NSProgressFunction<MapData>() {
+            @Override
+            public void progress(MapData lastProcessedMap) {
+                if ( lastProcessedMap.refContext != null )
+                    // note, need to use getStopLocation so we don't give an interval to ProgressMeterDaemon
+                    printProgress(lastProcessedMap.refContext.getLocus().getStopLocation());
+            }
+        });
+    }
+
+    @Override
+    public String getTraversalUnits() {
+        return "reads";
+    }
+
+    /**
+     * Traverse by reads, given the data and the walker
+     *
+     * @param walker the walker to traverse with
+     * @param dataProvider the provider of the reads data
+     * @param sum the value of type T, specified by the walker, to feed to the walkers reduce function
+     * @return the reduce variable of the read walker
+     */
+    public T traverse(ReadWalker<M,T> walker,
+                      ReadShardDataProvider dataProvider,
+                      T sum) {
+        if ( logger.isDebugEnabled() )
+            logger.debug(String.format("TraverseReadsNano.traverse Covered dataset is %s", dataProvider));
+
+        if( !dataProvider.hasReads() )
+            throw new IllegalArgumentException("Unable to traverse reads; no read data is available.");
+
+        nanoScheduler.setDebug(DEBUG);
+        final TraverseReadsMap myMap = new TraverseReadsMap(walker);
+        final TraverseReadsReduce myReduce = new TraverseReadsReduce(walker);
+
+        final Iterator<MapData> aggregatedInputs = aggregateMapData(dataProvider);
+        final T result = nanoScheduler.execute(aggregatedInputs, myMap, sum, myReduce);
+
+        return result;
+    }
+
+    /**
+     * Aggregate all of the inputs for all map calls into MapData, to be provided
+     * to NanoScheduler for Map/Reduce
+     *
+     * @param dataProvider the source of our data
+     * @return a linked list of MapData objects holding the read, ref, and ROD info for every map/reduce
+     *          should execute
+     */
+    private Iterator<MapData> aggregateMapData(final ReadShardDataProvider dataProvider) {
+        final Iterator<MapData> it = makeDataIterator(dataProvider);
+        if ( PRE_READ_ALL_MAP_DATA ) {
+            final LinkedList<MapData> l = new LinkedList<MapData>();
+            while ( it.hasNext() ) l.add(it.next());
+            return l.iterator();
+        } else {
+            return it;
+        }
+    }
+
+
+    private Iterator<MapData> makeDataIterator(final ReadShardDataProvider dataProvider) {
+        return new Iterator<MapData> ()  {
+            final ReadView reads = new ReadView(dataProvider);
+            final ReadReferenceView reference = new ReadReferenceView(dataProvider);
+            final ReadBasedReferenceOrderedView rodView = new ReadBasedReferenceOrderedView(dataProvider);
+            final Iterator<SAMRecord> readIterator = reads.iterator();
+
+            @Override public boolean hasNext() { return ! engine.exceedsRuntimeLimit() && readIterator.hasNext(); }
+
+            @Override
+            public MapData next() {
+                final SAMRecord read = readIterator.next();
+                final ReferenceContext refContext = ! read.getReadUnmappedFlag()
+                        ? reference.getReferenceContext(read)
+                        : null;
+
+                // if the read is mapped, create a metadata tracker
+                final RefMetaDataTracker tracker = read.getReferenceIndex() >= 0
+                        ? rodView.getReferenceOrderedDataForRead(read)
+                        : null;
+
+                // update the number of reads we've seen
+                dataProvider.getShard().getReadMetrics().incrementNumIterations();
+
+                return new MapData((GATKSAMRecord)read, refContext, tracker);
+            }
+
+            @Override public void remove() {
+                throw new UnsupportedOperationException("Remove not supported");
+            }
+        };
+    }
+
+    @Override
+    public void shutdown() {
+        nanoScheduler.shutdown();
+    }
+
+    /**
+     * The input data needed for each map call.  The read, the reference, and the RODs
+     */
+    private class MapData {
+        final GATKSAMRecord read;
+        final ReferenceContext refContext;
+        final RefMetaDataTracker tracker;
+
+        private MapData(GATKSAMRecord read, ReferenceContext refContext, RefMetaDataTracker tracker) {
+            this.read = read;
+            this.refContext = refContext;
+            this.tracker = tracker;
+        }
+    }
+
+    /**
+     * Contains the results of a map call, indicating whether the call was good, filtered, or done
+     */
+    private class MapResult {
+        final M value;
+        final boolean reduceMe;
+
+        /**
+         * Create a MapResult with value that should be reduced
+         *
+         * @param value the value to reduce
+         */
+        private MapResult(final M value) {
+            this.value = value;
+            this.reduceMe = true;
+        }
+
+        /**
+         * Create a MapResult that shouldn't be reduced
+         */
+        private MapResult() {
+            this.value = null;
+            this.reduceMe = false;
+        }
+    }
+
+    /**
+     * A static object that tells reduce that the result of map should be skipped (filtered or done)
+     */
+    private final MapResult SKIP_REDUCE = new MapResult();
+
+    /**
+     * MapFunction for TraverseReads meeting NanoScheduler interface requirements
+     *
+     * Applies walker.map to MapData, returning a MapResult object containing the result
+     */
+    private class TraverseReadsMap implements NSMapFunction<MapData, MapResult> {
+        final ReadWalker<M,T> walker;
+
+        private TraverseReadsMap(ReadWalker<M, T> walker) {
+            this.walker = walker;
+        }
+
+        @Override
+        public MapResult apply(final MapData data) {
+            if ( ! walker.isDone() ) {
+                final boolean keepMeP = walker.filter(data.refContext, data.read);
+                if (keepMeP)
+                    return new MapResult(walker.map(data.refContext, data.read, data.tracker));
+            }
+
+            return SKIP_REDUCE;
+        }
+    }
+
+    /**
+     * NSReduceFunction for TraverseReads meeting NanoScheduler interface requirements
+     *
+     * Takes a MapResult object and applies the walkers reduce function to each map result, when applicable
+     */
+    private class TraverseReadsReduce implements NSReduceFunction<MapResult, T> {
+        final ReadWalker<M,T> walker;
+
+        private TraverseReadsReduce(ReadWalker<M, T> walker) {
+            this.walker = walker;
+        }
+
+        @Override
+        public T apply(MapResult one, T sum) {
+            if ( one.reduceMe )
+                // only run reduce on values that aren't DONE or FAILED
+                return walker.reduce(one.value, sum);
+            else
+                return sum;
+        }
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/traversals/package-info.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/traversals/package-info.java
new file mode 100644
index 0000000..b203e39
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/traversals/package-info.java
@@ -0,0 +1,26 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.traversals;
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/ActiveRegionTraversalParameters.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/ActiveRegionTraversalParameters.java
new file mode 100644
index 0000000..f017cfb
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/ActiveRegionTraversalParameters.java
@@ -0,0 +1,97 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+import org.broadinstitute.gatk.utils.activeregion.BandPassActivityProfile;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.Inherited;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+/**
+ * Describes the parameters that this walker requires of the active region traversal
+ *
+ * User: rpoplin
+ * Date: 1/18/12
+ */
+ at Documented
+ at Inherited
+ at Retention(RetentionPolicy.RUNTIME)
+
+public @interface ActiveRegionTraversalParameters {
+    /**
+     * How far to either side of the active region itself should we include reads?
+     *
+     * That is, if the active region is 10 bp wide, and extension is 5, ART will provide
+     * the walker with active regions 10 bp, with 5 bp of extension on either side, and
+     * all reads that cover the 20 bp of the region + extension.
+     *
+     * @return the size of the active region extension we'd like
+     */
+    public int extension() default 0;
+
+    /**
+     * The minimum number of bp for an active region, when we need to chop it up into pieces because
+     * it's become too big.  This only comes into effect when there's literally no good place to chop
+     * that does make the region smaller than this value.
+     *
+     * @return the min size in bp of regions
+     */
+    public int minRegion() default 50;
+
+    /**
+     * The maximum size in bp of active regions wanted by this walker
+     *
+     * Active regions larger than this value are automatically cut up by ART into smaller
+     * regions of size <= this value.
+     *
+     * @return the max size in bp of regions
+     */
+    public int maxRegion() default 1500;
+
+    /**
+     * The variance value for the Gaussian kernel of the band pass filter employed by ART
+     * @return the breadth of the band pass gaussian kernel we want for our traversal
+     */
+    public double bandPassSigma() default BandPassActivityProfile.DEFAULT_SIGMA;
+
+    /**
+     * What is the maximum number of reads we're willing to hold in memory per sample
+     * during the traversal?  This limits our exposure to unusually large amounts
+     * of coverage in the engine.
+     * @return the maximum number of reads we're willing to hold in memory
+     */
+    public int maxReadsToHoldInMemoryPerSample() default 30000;
+
+    /**
+     * No matter what the per sample value says, we will never hold more than this
+     * number of reads in memory at any time.  Provides an upper bound on the total number
+     * of reads in the case where we have a lot of samples.
+     * @return the maximum number of reads to hold in memory
+     */
+    public int maxReadsToHoldTotal() default 10000000;
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/ActiveRegionWalker.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/ActiveRegionWalker.java
new file mode 100644
index 0000000..3e92ff7
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/ActiveRegionWalker.java
@@ -0,0 +1,196 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+import com.google.java.contract.Ensures;
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import htsjdk.tribble.Feature;
+import org.broadinstitute.gatk.utils.commandline.*;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.downsampling.DownsampleType;
+import org.broadinstitute.gatk.engine.filters.*;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
+import org.broadinstitute.gatk.utils.activeregion.ActiveRegion;
+import org.broadinstitute.gatk.utils.activeregion.ActiveRegionReadState;
+import org.broadinstitute.gatk.utils.activeregion.ActivityProfileState;
+import org.broadinstitute.gatk.utils.interval.IntervalMergingRule;
+import org.broadinstitute.gatk.utils.interval.IntervalSetRule;
+import org.broadinstitute.gatk.utils.interval.IntervalUtils;
+
+import java.io.PrintStream;
+import java.util.*;
+
+/**
+ * Base class for all the Active Region Walkers.
+ * User: rpoplin
+ * Date: 12/7/11
+ */
+
+ at By(DataSource.READS)
+ at Requires({DataSource.READS, DataSource.REFERENCE})
+ at PartitionBy(PartitionType.READ)
+ at ActiveRegionTraversalParameters(extension=50,maxRegion=1500)
+ at ReadFilters({UnmappedReadFilter.class, NotPrimaryAlignmentFilter.class, DuplicateReadFilter.class, FailsVendorQualityCheckFilter.class, MappingQualityUnavailableFilter.class})
+ at Downsample(by = DownsampleType.BY_SAMPLE, toCoverage = 1000)
+ at RemoveProgramRecords
+public abstract class ActiveRegionWalker<MapType, ReduceType> extends Walker<MapType, ReduceType> {
+    /**
+     * If provided, this walker will write out its activity profile (per bp probabilities of being active)
+     * to this file in the IGV formatted TAB deliminated output:
+     *
+     * http://www.broadinstitute.org/software/igv/IGV
+     *
+     * Intended to make debugging the activity profile calculations easier
+     */
+    @Output(fullName="activityProfileOut", shortName="APO", doc="Output the raw activity profile results in IGV format", required = false, defaultToStdout = false)
+    public PrintStream activityProfileOutStream = null;
+
+    /**
+     * If provided, this walker will write out its active and inactive regions
+     * to this file in the IGV formatted TAB deliminated output:
+     *
+     * http://www.broadinstitute.org/software/igv/IGV
+     *
+     * Intended to make debugging the active region calculations easier
+     */
+    @Output(fullName="activeRegionOut", shortName="ARO", doc="Output the active region to this IGV formatted file", required = false, defaultToStdout = false)
+    public PrintStream activeRegionOutStream = null;
+
+    @Advanced
+    @Input(fullName="activeRegionIn", shortName="AR", doc="Use this interval list file as the active regions to process", required = false)
+    protected List<IntervalBinding<Feature>> activeRegionBindings = null;
+
+    @Advanced
+    @Argument(fullName="activeRegionExtension", shortName="activeRegionExtension", doc="The active region extension; if not provided defaults to Walker annotated default", required = false)
+    public Integer activeRegionExtension = null;
+
+    /**
+     * For the active region walker to treat all bases as active.  Useful for debugging when you want to force something like
+     * the HaplotypeCaller to process a specific interval you provide the GATK
+     */
+    @Advanced
+    @Argument(fullName="forceActive", shortName="forceActive", doc="If provided, all bases will be tagged as active", required = false)
+    public boolean forceActive = false;
+
+    @Advanced
+    @Argument(fullName="activeRegionMaxSize", shortName="activeRegionMaxSize", doc="The active region maximum size; if not provided defaults to Walker annotated default", required = false)
+    public Integer activeRegionMaxSize = null;
+
+    @Advanced
+    @Argument(fullName="bandPassSigma", shortName="bandPassSigma", doc="The sigma of the band pass filter Gaussian kernel; if not provided defaults to Walker annotated default", required = false)
+    public Double bandPassSigma = null;
+
+    /*
+     * For active region limits in ActivityProfile
+*   */
+    @Hidden
+    @Argument(fullName = "maxProbPropagationDistance", shortName = "maxProbPropDist", minValue = 0, doc="Region probability propagation distance beyond it's maximum size.", required = false)
+    public Integer maxProbPropagationDistance = 50;
+
+    @Advanced
+    @Argument(fullName = "activeProbabilityThreshold", shortName = "ActProbThresh", minValue = 0.0, maxValue = 1.0, doc="Threshold for the probability of a profile state being active.", required = false)
+    public Double activeProbThreshold = 0.002;
+
+    private GenomeLocSortedSet presetActiveRegions = null;
+
+    @Override
+    public void initialize() {
+        if( activeRegionBindings == null ) { return; }
+        List<GenomeLoc> allIntervals = new ArrayList<GenomeLoc>(0);
+        for ( IntervalBinding intervalBinding : activeRegionBindings ) {
+            List<GenomeLoc> intervals = intervalBinding.getIntervals(this.getToolkit().getGenomeLocParser());
+
+            if ( intervals.isEmpty() ) {
+                logger.warn("The interval file " + intervalBinding.getSource() + " contains no intervals that could be parsed.");
+            }
+
+            allIntervals = IntervalUtils.mergeListsBySetOperator(intervals, allIntervals, IntervalSetRule.UNION);
+        }
+
+        presetActiveRegions = IntervalUtils.sortAndMergeIntervals(this.getToolkit().getGenomeLocParser(), allIntervals, IntervalMergingRule.ALL);
+    }
+
+    /**
+     * Does this walker want us to use a set of preset action regions instead of dynamically using the result of isActive?
+     * @return true if yes, false if no
+     */
+    public boolean hasPresetActiveRegions() {
+        return presetActiveRegions != null;
+    }
+
+    /**
+     * Get the set of preset active regions, or null if none were provided
+     * @return a set of genome locs specifying fixed active regions requested by the walker, or null if none exist
+     */
+    public GenomeLocSortedSet getPresetActiveRegions() {
+        return presetActiveRegions;
+    }
+
+    // Do we actually want to operate on the context?
+    public boolean filter(final RefMetaDataTracker tracker, final ReferenceContext ref, final AlignmentContext context) {
+        return true;    // We are keeping all the reads
+    }
+
+    public EnumSet<ActiveRegionReadState> desiredReadStates() {
+        return EnumSet.of(ActiveRegionReadState.PRIMARY);
+    }
+
+    public final boolean wantsNonPrimaryReads() {
+        return desiredReadStates().contains(ActiveRegionReadState.NONPRIMARY);
+    }
+
+    public boolean wantsExtendedReads() {
+        return desiredReadStates().contains(ActiveRegionReadState.EXTENDED);
+    }
+
+    public boolean wantsUnmappedReads() {
+        return desiredReadStates().contains(ActiveRegionReadState.UNMAPPED);
+    }
+
+    // Determine probability of active status over the AlignmentContext
+    @Ensures({"result.isActiveProb >= 0.0", "result.isActiveProb <= 1.0"})
+    public abstract ActivityProfileState isActive(final RefMetaDataTracker tracker, final ReferenceContext ref, final AlignmentContext context);
+
+    // Map over the ActiveRegion
+    public abstract MapType map(final ActiveRegion activeRegion, final RefMetaDataTracker metaDataTracker);
+
+    public final GenomeLocSortedSet extendIntervals( final GenomeLocSortedSet intervals, final GenomeLocParser genomeLocParser, IndexedFastaSequenceFile reference ) {
+        final int activeRegionExtension = this.getClass().getAnnotation(ActiveRegionTraversalParameters.class).extension();
+        final List<GenomeLoc> allIntervals = new ArrayList<GenomeLoc>();
+        for( final GenomeLoc interval : intervals.toList() ) {
+            final int start = Math.max( 1, interval.getStart() - activeRegionExtension );
+            final int stop = Math.min( reference.getSequenceDictionary().getSequence(interval.getContig()).getSequenceLength(), interval.getStop() + activeRegionExtension );
+            allIntervals.add( genomeLocParser.createGenomeLoc(interval.getContig(), start, stop) );
+        }
+        return IntervalUtils.sortAndMergeIntervals(genomeLocParser, allIntervals, IntervalMergingRule.ALL);
+    }
+
+
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/Allows.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/Allows.java
new file mode 100644
index 0000000..ef2251e
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/Allows.java
@@ -0,0 +1,51 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+import java.lang.annotation.*;
+/**
+ * User: hanna
+ * Date: May 19, 2009
+ * Time: 10:05:01 AM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * Determines what data sources are allowed by a given walker.
+ */
+ at Documented
+ at Inherited
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target(ElementType.TYPE)
+public @interface Allows {
+    DataSource[] value();
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/Attribution.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/Attribution.java
new file mode 100644
index 0000000..613b6b2
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/Attribution.java
@@ -0,0 +1,39 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+import java.lang.annotation.*;
+
+/**
+ * Allow users to provide attribution text that will appear prominently in the log output.
+ */
+ at Documented
+ at Inherited
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target(ElementType.TYPE)
+public @interface Attribution {
+    public String[] value();
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/BAQMode.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/BAQMode.java
new file mode 100644
index 0000000..8a500c1
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/BAQMode.java
@@ -0,0 +1,56 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
+
+import java.lang.annotation.*;
+
+/**
+ * User: hanna
+ * Date: May 14, 2009
+ * Time: 1:51:22 PM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * Allows the walker to indicate what type of data it wants to consume.
+ */
+
+ at Documented
+ at Inherited
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target(ElementType.TYPE)
+public @interface BAQMode {
+    public abstract org.broadinstitute.gatk.utils.baq.BAQ.QualityMode QualityMode() default org.broadinstitute.gatk.utils.baq.BAQ.QualityMode.OVERWRITE_QUALS;
+    public abstract ReadTransformer.ApplicationTime ApplicationTime() default ReadTransformer.ApplicationTime.ON_INPUT;
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/By.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/By.java
new file mode 100644
index 0000000..903f5bd
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/By.java
@@ -0,0 +1,53 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+import java.lang.annotation.*;
+/**
+ * User: hanna
+ * Date: May 14, 2009
+ * Time: 1:51:22 PM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * Allows the walker to indicate what type of data it wants to consume.
+ */
+
+ at Documented
+ at Inherited
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target(ElementType.TYPE)
+public @interface By {
+    DataSource value();
+}
+
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/DataSource.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/DataSource.java
new file mode 100644
index 0000000..e6deb76
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/DataSource.java
@@ -0,0 +1,58 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+/**
+ * User: hanna
+ * Date: May 14, 2009
+ * Time: 2:12:33 PM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * Allow user to choose between a number of different data sources.
+ */
+public enum DataSource {
+    /**
+     * Does this walker require read (BAM) data to work?
+     */
+    READS,
+
+    /**
+     * Does this walker require reference data to work?
+     */
+    REFERENCE,
+
+    /**
+     * Does this walker require reference order data (VCF) to work?
+     */
+    REFERENCE_ORDERED_DATA
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/DisabledReadFilters.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/DisabledReadFilters.java
new file mode 100644
index 0000000..5708e26
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/DisabledReadFilters.java
@@ -0,0 +1,41 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+import htsjdk.samtools.filter.SamRecordFilter;
+
+import java.lang.annotation.*;
+
+/**
+ * An annotation to describe which inherited ReadFilters to disable
+ */
+ at Documented
+ at Inherited
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target(ElementType.TYPE)
+public @interface DisabledReadFilters {
+    public Class<? extends SamRecordFilter>[] value() default {};
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/Downsample.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/Downsample.java
new file mode 100644
index 0000000..1992998
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/Downsample.java
@@ -0,0 +1,47 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+import org.broadinstitute.gatk.utils.downsampling.DownsampleType;
+
+import java.lang.annotation.*;
+
+/**
+ * Specifies a method for downsampling the reads passed to a given
+ * walker based on the input from that walker.
+ *
+ * @author hanna
+ * @version 0.1
+ */
+ at Documented
+ at Inherited
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target(ElementType.TYPE)
+public @interface Downsample {
+    DownsampleType by();
+    int toCoverage() default -1;
+    double toFraction() default -1.0F;
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/DuplicateWalker.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/DuplicateWalker.java
new file mode 100644
index 0000000..46691e4
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/DuplicateWalker.java
@@ -0,0 +1,57 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.engine.filters.NotPrimaryAlignmentFilter;
+import org.broadinstitute.gatk.engine.filters.UnmappedReadFilter;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+
+import java.util.List;
+import java.util.Set;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: mdepristo
+ * Date: Feb 22, 2009
+ * Time: 2:52:28 PM
+ * To change this template use File | Settings | File Templates.
+ */
+ at Requires({DataSource.READS,DataSource.REFERENCE})
+ at ReadFilters({UnmappedReadFilter.class,NotPrimaryAlignmentFilter.class})
+public abstract class DuplicateWalker<MapType, ReduceType> extends Walker<MapType, ReduceType> {
+    // Do we actually want to operate on the context?
+    public boolean filter(GenomeLoc loc, AlignmentContext context, Set<List<GATKSAMRecord>> readSets ) {
+        return true;    // We are keeping all the reads
+    }
+
+    public abstract MapType map(GenomeLoc loc, AlignmentContext context, Set<List<GATKSAMRecord>> readSets );
+
+    // Given result of map function
+    public abstract ReduceType reduceInit();
+    public abstract ReduceType reduce(MapType value, ReduceType sum);
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/FailMethod.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/FailMethod.java
new file mode 100644
index 0000000..a2d12dc
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/FailMethod.java
@@ -0,0 +1,63 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+import htsjdk.samtools.SAMException;
+import org.broadinstitute.gatk.engine.CommandLineGATK;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+public enum FailMethod {
+      MAP,
+      REDUCE,
+      TREE_REDUCE;
+
+    /**
+     * Used by QC walkers to test that engine throws appropriate errors.
+     * Split from the walker in ErrorThrowing.java.
+     * @param exceptionToThrow Exception type to throw.
+     */
+    public static void fail(final String exceptionToThrow) {
+        switch (exceptionToThrow) {
+            case "UserException":
+                throw new UserException("UserException");
+            case "NullPointerException":
+                throw new NullPointerException();
+            case "ReviewedGATKException":
+                throw new ReviewedGATKException("ReviewedGATKException");
+            case "SamError1":
+                throw new RuntimeException(CommandLineGATK.PICARD_TEXT_SAM_FILE_ERROR_1);
+            case "SamError2":
+                throw new RuntimeException(CommandLineGATK.PICARD_TEXT_SAM_FILE_ERROR_2);
+            case "NoSpace1":
+                throw new htsjdk.samtools.util.RuntimeIOException(new java.io.IOException("No space left on device java.io.FileOutputStream.writeBytes(Native Method)"));
+            case "NoSpace2":
+                throw new SAMException("Exception writing BAM index file", new java.io.IOException("No space left on device java.io.FileOutputStream.writeBytes(Native Method)"));
+            default:
+                throw new UserException.BadArgumentValue("exception", "exception isn't a recognized value " + exceptionToThrow);
+        }
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/LocusWalker.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/LocusWalker.java
new file mode 100644
index 0000000..66f34b3
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/LocusWalker.java
@@ -0,0 +1,58 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.downsampling.DownsampleType;
+import org.broadinstitute.gatk.engine.filters.DuplicateReadFilter;
+import org.broadinstitute.gatk.engine.filters.FailsVendorQualityCheckFilter;
+import org.broadinstitute.gatk.engine.filters.NotPrimaryAlignmentFilter;
+import org.broadinstitute.gatk.engine.filters.UnmappedReadFilter;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: mdepristo
+ * Date: Feb 22, 2009
+ * Time: 2:52:28 PM
+ * To change this template use File | Settings | File Templates.
+ */
+ at By(DataSource.READS)
+ at Requires({DataSource.READS,DataSource.REFERENCE})
+ at PartitionBy(PartitionType.LOCUS)
+ at ReadFilters({UnmappedReadFilter.class,NotPrimaryAlignmentFilter.class,DuplicateReadFilter.class,FailsVendorQualityCheckFilter.class})
+ at Downsample(by = DownsampleType.BY_SAMPLE, toCoverage = 1000)
+ at RemoveProgramRecords
+public abstract class LocusWalker<MapType, ReduceType> extends Walker<MapType, ReduceType> {
+    // Do we actually want to operate on the context?
+    public boolean filter(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
+        return true;    // We are keeping all the reads
+    }
+
+    // Map over the org.broadinstitute.gatk.engine.contexts.AlignmentContext
+    public abstract MapType map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context);
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/NanoSchedulable.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/NanoSchedulable.java
new file mode 100644
index 0000000..329aaac
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/NanoSchedulable.java
@@ -0,0 +1,34 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+/**
+ * Root parallelism interface.  Walkers that implement this
+ * declare that their map function is thread-safe and so multiple
+ * map calls can be run in parallel in the same JVM instance.
+ */
+public interface NanoSchedulable {
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/PartitionBy.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/PartitionBy.java
new file mode 100644
index 0000000..5078d4e
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/PartitionBy.java
@@ -0,0 +1,40 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+import java.lang.annotation.*;
+
+/**
+ * Allows the walker to indicate how to partition data it wants to consume.
+ */
+ at Documented
+ at Inherited
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target(ElementType.TYPE)
+public @interface PartitionBy {
+    PartitionType value();
+    boolean includeUnmapped() default false;
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/PartitionType.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/PartitionType.java
new file mode 100644
index 0000000..eb6b604
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/PartitionType.java
@@ -0,0 +1,61 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+/**
+ * Defines the ways walkers inputs can be partitioned before
+ * being passed to multiple instances of the walker.
+ */
+public enum PartitionType {
+    /**
+     * Do not partition the walker inputs.
+     */
+    NONE,
+
+    /**
+     * The walker inputs can be chunked down to individual
+     * reads.
+     */
+    READ,
+
+    /**
+     * The walker inputs can be chunked down to the
+     * per-locus level.
+     */
+    LOCUS,
+
+    /**
+     * The walker inputs should be processed as complete
+     * intervals defined -L or the reference contigs.
+     */
+    INTERVAL,
+
+    /**
+     * The walker inputs should always be processed as complete
+     * contigs, even if there are multiple intervals per contig.
+     */
+    CONTIG
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/RMD.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/RMD.java
new file mode 100644
index 0000000..8a221cb
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/RMD.java
@@ -0,0 +1,56 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+import htsjdk.tribble.Feature;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.Inherited;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+/**
+ * User: hanna
+ * Date: May 19, 2009
+ * Time: 1:34:15 PM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * A data type representing reference-ordered data.
+ */
+ at Documented
+ at Inherited
+ at Retention(RetentionPolicy.RUNTIME)
+public @interface RMD {
+    String name();    
+    Class type() default Feature.class;
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/ReadFilters.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/ReadFilters.java
new file mode 100644
index 0000000..3438232
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/ReadFilters.java
@@ -0,0 +1,45 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+import htsjdk.samtools.filter.SamRecordFilter;
+import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
+
+import java.lang.annotation.*;
+
+/**
+ * An annotation to describe what kind of data will be filtered out.
+ *
+ * @author hanna
+ * @version 0.1
+ */
+ at Documented
+ at Inherited
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target(ElementType.TYPE)
+public @interface ReadFilters {
+    public Class<? extends SamRecordFilter>[] value() default {};
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/ReadPairWalker.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/ReadPairWalker.java
new file mode 100644
index 0000000..9b8e3be
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/ReadPairWalker.java
@@ -0,0 +1,63 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+import htsjdk.samtools.SAMRecord;
+
+import java.util.Collection;
+
+/**
+ * Walks over all pairs/collections of reads in a BAM file sorted by
+ * read name.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+ at Requires({DataSource.READS})
+public abstract class ReadPairWalker<MapType,ReduceType> extends Walker<MapType,ReduceType> {
+
+    /**
+     * Optionally filters out read pairs.
+     * @param reads collections of all reads with the same read name.
+     * @return True to process the reads with map/reduce; false otherwise.
+     */
+    public boolean filter(Collection<SAMRecord> reads) {
+        // Keep all pairs by default.
+        return true;
+    }
+
+    /**
+     * Maps a read pair to a given reduce of type MapType.  Semantics determined by subclasser.
+     * @param reads Collection of reads having the same name.
+     * @return Semantics defined by implementer.
+     */
+    public abstract MapType map(Collection<SAMRecord> reads);
+
+    // Given result of map function
+    public abstract ReduceType reduceInit();
+    public abstract ReduceType reduce(MapType value, ReduceType sum);
+
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/ReadWalker.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/ReadWalker.java
new file mode 100644
index 0000000..8173131
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/ReadWalker.java
@@ -0,0 +1,55 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: mdepristo
+ * Date: Feb 22, 2009
+ * Time: 2:52:28 PM
+ * To change this template use File | Settings | File Templates.
+ */
+ at Requires({DataSource.READS, DataSource.REFERENCE})
+ at PartitionBy(value = PartitionType.READ, includeUnmapped = true)
+public abstract class ReadWalker<MapType, ReduceType> extends Walker<MapType, ReduceType> {
+    public boolean requiresOrderedReads() { return false; }
+    
+    // Do we actually want to operate on the context?
+    /** Must return true for reads that need to be processed. Reads, for which this method return false will
+     * be skipped by the engine and never passed to the walker.
+     */
+    public boolean filter(ReferenceContext ref, GATKSAMRecord read) {
+        // We are keeping all the reads
+        return true;
+    }
+
+    // Map over the org.broadinstitute.gatk.engine.contexts.AlignmentContext
+    public abstract MapType map(ReferenceContext ref, GATKSAMRecord read, RefMetaDataTracker metaDataTracker);
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/RefWalker.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/RefWalker.java
new file mode 100644
index 0000000..0535aca
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/RefWalker.java
@@ -0,0 +1,39 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: mdepristo
+ * Date: Feb 22, 2009
+ * Time: 2:52:28 PM
+ * To change this template use File | Settings | File Templates.
+ */
+ at By(DataSource.REFERENCE)
+ at Requires({DataSource.REFERENCE})
+ at Allows(DataSource.REFERENCE)
+public abstract class RefWalker<MapType, ReduceType> extends LocusWalker<MapType, ReduceType> {
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/Reference.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/Reference.java
new file mode 100644
index 0000000..c3cee86
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/Reference.java
@@ -0,0 +1,47 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+import java.lang.annotation.*;
+
+/**
+ * Describes presentation, capabilities, and limitations of the reference
+ * provided to the GATK.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+ at Documented
+ at Inherited
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target(ElementType.TYPE)
+public @interface Reference {
+    /**
+     * Specifies the window expansion for the current walker.
+     * @return The window to which the reference should be expanded.  Defaults to [0,0] (no expansion).
+     */
+    public Window window() default @Window;
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/RemoveProgramRecords.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/RemoveProgramRecords.java
new file mode 100644
index 0000000..8f6f5e3
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/RemoveProgramRecords.java
@@ -0,0 +1,46 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+/**
+ * Created with IntelliJ IDEA.
+ * User: thibault
+ * Date: 8/2/12
+ * Time: 1:58 PM
+ * To change this template use File | Settings | File Templates.
+ */
+
+import java.lang.annotation.*;
+
+/**
+ * Indicates that program records should be removed from SAM headers by default for this walker
+ */
+ at Documented
+ at Inherited
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target(ElementType.TYPE)
+public @interface RemoveProgramRecords {
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/Requires.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/Requires.java
new file mode 100644
index 0000000..9dec898
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/Requires.java
@@ -0,0 +1,52 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+import java.lang.annotation.*;
+/**
+ * User: hanna
+ * Date: May 19, 2009
+ * Time: 10:06:47 AM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * Determines what data sources are mandated by a given walker.
+ */
+ at Documented
+ at Inherited
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target(ElementType.TYPE)
+public @interface Requires {
+    DataSource[] value();
+    RMD[] referenceMetaData() default {};
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/RodWalker.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/RodWalker.java
new file mode 100644
index 0000000..ef94e7e
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/RodWalker.java
@@ -0,0 +1,39 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: mdepristo
+ * Date: Feb 22, 2009
+ * Time: 2:52:28 PM
+ * To change this template use File | Settings | File Templates.
+ */
+ at By(DataSource.REFERENCE_ORDERED_DATA)
+ at Requires({DataSource.REFERENCE, DataSource.REFERENCE_ORDERED_DATA})
+ at Allows({DataSource.REFERENCE, DataSource.REFERENCE_ORDERED_DATA})
+public abstract class RodWalker<MapType, ReduceType> extends LocusWalker<MapType, ReduceType> {
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/TreeReducible.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/TreeReducible.java
new file mode 100644
index 0000000..a041148
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/TreeReducible.java
@@ -0,0 +1,49 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: mhanna
+ * Date: Apr 26, 2009
+ * Time: 5:34:11 PM
+ * To change this template use File | Settings | File Templates.
+ */
+
+/**
+ * Indicates that a class is tree reducible, aka that any two adjacent
+ * shards of the data can reduce with each other, and the composite result
+ * can be reduced with other composite results.
+ */
+public interface TreeReducible<ReduceType> {
+    /**
+     * A composite, 'reduce of reduces' function.
+     * @param lhs 'left-most' portion of data in the composite reduce.
+     * @param rhs 'right-most' portion of data in the composite reduce.
+     * @return The composite reduce type.
+     */
+    ReduceType treeReduce(ReduceType lhs, ReduceType rhs);
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/Walker.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/Walker.java
new file mode 100644
index 0000000..9d08422
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/Walker.java
@@ -0,0 +1,210 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+import htsjdk.samtools.SAMSequenceDictionary;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.engine.CommandLineGATK;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.utils.downsampling.DownsampleType;
+import org.broadinstitute.gatk.engine.filters.BadCigarFilter;
+import org.broadinstitute.gatk.engine.filters.MalformedReadFilter;
+import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
+import org.broadinstitute.gatk.engine.samples.Sample;
+import org.broadinstitute.gatk.engine.samples.SampleDB;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.baq.BAQ;
+import org.broadinstitute.gatk.utils.collections.Pair;
+import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
+import org.broadinstitute.gatk.engine.recalibration.BQSRMode;
+import org.broadinstitute.gatk.engine.arguments.GATKArgumentCollection;
+
+import java.util.List;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: hanna
+ * Date: Mar 17, 2009
+ * Time: 1:53:31 PM
+ * To change this template use File | Settings | File Templates.
+ */
+ at ReadFilters({MalformedReadFilter.class,BadCigarFilter.class})
+ at PartitionBy(PartitionType.NONE)
+ at Downsample(by = DownsampleType.NONE)
+ at BAQMode(QualityMode = BAQ.QualityMode.OVERWRITE_QUALS, ApplicationTime = ReadTransformer.ApplicationTime.ON_INPUT)
+ at BQSRMode(ApplicationTime = ReadTransformer.ApplicationTime.ON_INPUT)
+ at DocumentedGATKFeature(groupName = "Uncategorized", extraDocs = {CommandLineGATK.class})
+public abstract class Walker<MapType, ReduceType> {
+    final protected static Logger logger = Logger.getLogger(Walker.class);
+    private GenomeAnalysisEngine toolkit;
+
+    protected Walker() {
+    }
+
+    /**
+     * Set the toolkit, for peering into internal structures that can't
+     * otherwise be read.
+     * @param toolkit The genome analysis toolkit.
+     */
+    public void setToolkit(GenomeAnalysisEngine toolkit) {
+        this.toolkit = toolkit;
+    }
+
+    /**
+     * Retrieve the toolkit, for peering into internal structures that can't
+     * otherwise be read.  Use sparingly, and discuss uses with software engineering
+     * team.
+     * @return The genome analysis toolkit.
+     */
+    protected GenomeAnalysisEngine getToolkit() {
+        return toolkit;
+    }
+
+    /**
+     * Gets the master sequence dictionary for this walker
+     * @link GenomeAnalysisEngine.getMasterSequenceDictionary
+     * @return the master sequence dictionary or null if no genome analysis toolkit.
+     */
+    protected SAMSequenceDictionary getMasterSequenceDictionary() {
+        if ( toolkit == null )
+            return null;
+        else
+            return toolkit.getMasterSequenceDictionary();
+    }
+
+    /**
+     * Gets the GATK argument collection
+     * @link GenomeAnalysisEngine.getArguments
+     * @return the GATK argument collection or null if no genome analysis toolkit.
+     */
+    public GATKArgumentCollection getArguments(){
+        if ( toolkit == null )
+            return null;
+        else
+            return toolkit.getArguments();
+    }
+
+    /**
+     * Gets the GATK samples database
+     * @link GenomeAnalysisEngine.getSampleDB
+     * @return the GATK samples database or null if no genome analysis toolkit.
+     */
+    public SampleDB getSampleDB() {
+        if ( toolkit == null )
+            return null;
+        else
+            return toolkit.getSampleDB();
+    }
+
+    /**
+     * Gets a sample from the GATK samples database
+     * @param id the sample ID
+     * @return the sample from the GATK samples database or null if no genome analysis toolkit or samples database.
+     */
+    protected Sample getSample(final String id) {
+        if ( getSampleDB() == null )
+            return null;
+        else
+            return getSampleDB().getSample(id);
+    }
+
+    /**
+     * (conceptual static) method that states whether you want to see reads piling up at a locus
+     * that contain a deletion at the locus.
+     *
+     * ref:   ATCTGA
+     * read1: ATCTGA
+     * read2: AT--GA
+     *
+     * Normally, the locus iterator only returns a list of read1 at this locus at position 3, but
+     * if this function returns true, then the system will return (read1, read2) with offsets
+     * of (3, -1).  The -1 offset indicates a deletion in the read.
+     *
+     * @return false if you don't want to see deletions, or true if you do
+     */
+    public boolean includeReadsWithDeletionAtLoci() { 
+        return false;
+    }
+
+    public void initialize() { }
+
+    /**
+     * A function for overloading in subclasses providing a mechanism to abort early from a walker.
+     *
+     * If this ever returns true, then the Traversal engine will stop executing map calls
+     * and start the process of shutting down the walker in an orderly fashion.
+     * @return
+     */
+    public boolean isDone() {
+        return false;
+    }
+
+    /**
+     * Provide an initial value for reduce computations.
+     * @return Initial value of reduce.
+     */
+    public abstract ReduceType reduceInit();
+
+    /**
+     * Reduces a single map with the accumulator provided as the ReduceType.
+     * @param value result of the map.
+     * @param sum accumulator for the reduce.
+     * @return accumulator with result of the map taken into account.
+     */
+    public abstract ReduceType reduce(MapType value, ReduceType sum);    
+
+    public void onTraversalDone(ReduceType result) {
+        logger.info("[REDUCE RESULT] Traversal result is: " + result);
+    }
+
+    /**
+     * General interval reduce routine called after all of the traversals are done
+     * @param results interval reduce results
+     */
+    public void onTraversalDone(List<Pair<GenomeLoc, ReduceType>> results) {
+        for ( Pair<GenomeLoc, ReduceType> result : results ) {
+            logger.info(String.format("[INTERVAL REDUCE RESULT] at %s ", result.getFirst()));
+            this.onTraversalDone(result.getSecond());
+        }
+    }
+
+    /**
+     * Return true if your walker wants to reduce each interval separately.  Default is false.
+     *
+     * If you set this flag, several things will happen.
+     *
+     * The system will invoke reduceInit() once for each interval being processed, starting a fresh reduce
+     * Reduce will accumulate normally at each map unit in the interval
+     * However, onTraversalDone(reduce) will be called after each interval is processed.
+     * The system will call onTraversalDone( GenomeLoc -> reduce ), after all reductions are done,
+     *   which is overloaded here to call onTraversalDone(reduce) for each location
+     *
+     * @return true if your walker wants to reduce each interval separately.
+     */
+    public boolean isReduceByInterval() {
+        return false;
+    }
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/WalkerName.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/WalkerName.java
new file mode 100644
index 0000000..2c8dca9
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/WalkerName.java
@@ -0,0 +1,42 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+import java.lang.annotation.*;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: hanna
+ * Date: Mar 26, 2009
+ * Time: 3:00:16 PM
+ * To change this template use File | Settings | File Templates.
+ */
+ at Documented
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target(ElementType.TYPE)
+public @interface WalkerName {
+    public String value() default "";
+}
diff --git a/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/Window.java b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/Window.java
new file mode 100644
index 0000000..f613607
--- /dev/null
+++ b/public/gatk-engine/src/main/java/org/broadinstitute/gatk/engine/walkers/Window.java
@@ -0,0 +1,57 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.Inherited;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+/**
+ * Describes the size of the window into the genome.  Has differing semantics based on
+ * the data this annotation is used to describe.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+ at Documented
+ at Inherited
+ at Retention(RetentionPolicy.RUNTIME)
+public @interface Window {
+    /**
+     * Controls where the window should start and stop relative to
+     * the locus currently being processed.
+     * @return start point; default is 0, indicating 'supply only the reference base at the current locus'.
+     */
+    public int start() default 0;
+
+    /**
+     * Controls where the window should start and stop relative to
+     * the locus currently being processed.
+     * @return stop point; default is 0, indicating 'supply only the reference base at the current locus'.
+     */
+    public int stop() default 0;
+}
diff --git a/public/gatk-tools-public/src/main/resources/GATK_public.key b/public/gatk-engine/src/main/resources/GATK_public.key
similarity index 100%
rename from public/gatk-tools-public/src/main/resources/GATK_public.key
rename to public/gatk-engine/src/main/resources/GATK_public.key
diff --git a/public/gatk-tools-public/src/main/resources/org/broadinstitute/gatk/engine/phonehome/resources/GATK_AWS_access.key b/public/gatk-engine/src/main/resources/org/broadinstitute/gatk/engine/phonehome/resources/GATK_AWS_access.key
similarity index 100%
rename from public/gatk-tools-public/src/main/resources/org/broadinstitute/gatk/engine/phonehome/resources/GATK_AWS_access.key
rename to public/gatk-engine/src/main/resources/org/broadinstitute/gatk/engine/phonehome/resources/GATK_AWS_access.key
diff --git a/public/gatk-tools-public/src/main/resources/org/broadinstitute/gatk/engine/phonehome/resources/GATK_AWS_secret.key b/public/gatk-engine/src/main/resources/org/broadinstitute/gatk/engine/phonehome/resources/GATK_AWS_secret.key
similarity index 100%
rename from public/gatk-tools-public/src/main/resources/org/broadinstitute/gatk/engine/phonehome/resources/GATK_AWS_secret.key
rename to public/gatk-engine/src/main/resources/org/broadinstitute/gatk/engine/phonehome/resources/GATK_AWS_secret.key
diff --git a/public/gatk-tools-public/src/main/resources/org/broadinstitute/gatk/utils/recalibration/BQSR.R b/public/gatk-engine/src/main/resources/org/broadinstitute/gatk/engine/recalibration/BQSR.R
similarity index 100%
rename from public/gatk-tools-public/src/main/resources/org/broadinstitute/gatk/utils/recalibration/BQSR.R
rename to public/gatk-engine/src/main/resources/org/broadinstitute/gatk/engine/recalibration/BQSR.R
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/CommandLineGATKUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/CommandLineGATKUnitTest.java
new file mode 100644
index 0000000..a5ddd06
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/CommandLineGATKUnitTest.java
@@ -0,0 +1,68 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine;
+
+import htsjdk.samtools.SAMFileReader;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.File;
+
+/**
+ * @author Eric Banks
+ * @since 7/18/12
+ */
+public class CommandLineGATKUnitTest extends BaseTest {
+
+    @Test(enabled = true)
+    public void testSamTextFileError1() {
+        final File samFile = new File(publicTestDir + "testfile.sam");
+        final File indexFile = new File(publicTestDir + "HiSeq.1mb.1RG.bai");
+        try {
+            final SAMFileReader reader = new SAMFileReader(samFile, indexFile, false);
+
+            // we shouldn't get here
+            Assert.fail("We should have exceptioned out when trying to create a reader with an index for a textual SAM file");
+        } catch (RuntimeException e) {
+            Assert.assertTrue(e.getMessage().indexOf(CommandLineGATK.PICARD_TEXT_SAM_FILE_ERROR_1) != -1);
+        }
+    }
+
+    @Test(enabled = true)
+    public void testSamTextFileError2() {
+        File samFile = new File(publicTestDir + "testfile.sam");
+        try {
+            final SAMFileReader reader = new SAMFileReader(samFile);
+            reader.getFilePointerSpanningReads();
+
+            // we shouldn't get here
+            Assert.fail("We should have exceptioned out when trying to call getFilePointerSpanningReads() for a textual SAM file");
+        } catch (RuntimeException e) {
+            Assert.assertTrue(e.getMessage().indexOf(CommandLineGATK.PICARD_TEXT_SAM_FILE_ERROR_2) != -1);
+        }
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/EngineFeaturesIntegrationTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/EngineFeaturesIntegrationTest.java
new file mode 100644
index 0000000..9f5d881
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/EngineFeaturesIntegrationTest.java
@@ -0,0 +1,793 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine;
+
+import htsjdk.samtools.*;
+import htsjdk.tribble.readers.LineIterator;
+import org.broadinstitute.gatk.engine.walkers.*;
+import org.broadinstitute.gatk.utils.commandline.*;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.engine.filters.MappingQualityUnavailableFilter;
+import org.broadinstitute.gatk.engine.filters.DuplicateReadFilter;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.collections.Pair;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import htsjdk.variant.variantcontext.VariantContext;
+import htsjdk.variant.vcf.VCFCodec;
+import htsjdk.variant.vcf.VCFHeader;
+import htsjdk.variant.vcf.VCFHeaderLine;
+import org.broadinstitute.gatk.utils.variant.VCIterable;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.*;
+import java.util.*;
+
+/**
+ *
+ */
+public class EngineFeaturesIntegrationTest extends WalkerTest {
+    private void testBadRODBindingInput(String type, String name, Class c) {
+        WalkerTestSpec spec = new WalkerTestSpec("-T TestPrintVariantsWalker -L 1:1 --variant:variant," + type + " "
+                + b37dbSNP132 + " -R " + b37KGReference + " -o %s",
+                1, c);
+        executeTest(name, spec);
+    }
+
+    @Test() private void testBadRODBindingInputType1() {
+        testBadRODBindingInput("beagle", "BEAGLE input to VCF expecting walker", UserException.BadArgumentValue.class);
+    }
+
+    @Test() private void testBadRODBindingInputType3() {
+        testBadRODBindingInput("bed", "Bed input to VCF expecting walker", UserException.BadArgumentValue.class);
+    }
+
+    @Test() private void testBadRODBindingInputTypeUnknownType() {
+        testBadRODBindingInput("bedXXX", "Unknown input to VCF expecting walker", UserException.UnknownTribbleType.class);
+    }
+
+    private void testMissingFile(String name, String missingBinding) {
+        WalkerTestSpec spec = new WalkerTestSpec(missingBinding + " -R " + b37KGReference + " -o %s",
+                1, UserException.CouldNotReadInputFile.class);
+        executeTest(name, spec);
+    }
+
+    @Test() private void testMissingBAMnt1() {
+        testMissingFile("missing BAM", "-T TestPrintReadsWalker -I missing.bam -nt 1");
+    }
+    @Test() private void testMissingBAMnt4() {
+        testMissingFile("missing BAM", "-T TestPrintReadsWalker -I missing.bam -nt 4");
+    }
+    @Test() private void testMissingVCF() {
+        testMissingFile("missing VCF", "-T TestPrintVariantsWalker -V missing.vcf");
+    }
+    @Test() private void testMissingInterval() {
+        testMissingFile("missing interval", "-T TestPrintReadsWalker -L missing.interval_list -I " + b37GoodBAM);
+    }
+
+
+    // --------------------------------------------------------------------------------
+    //
+    // Test that our exceptions are coming back as we expect
+    //
+    // --------------------------------------------------------------------------------
+
+    private class EngineErrorHandlingTestProvider extends TestDataProvider {
+        final Class expectedException;
+        final String args;
+        final int iterationsToTest;
+
+        public EngineErrorHandlingTestProvider(Class exceptedException, final String args) {
+            super(EngineErrorHandlingTestProvider.class);
+            this.expectedException = exceptedException;
+            this.args = args;
+            this.iterationsToTest = args.equals("") ? 1 : 10;
+            setName(String.format("Engine error handling: expected %s with args %s", exceptedException, args));
+        }
+    }
+
+    @DataProvider(name = "EngineErrorHandlingTestProvider")
+    public Object[][] makeEngineErrorHandlingTestProvider() {
+        for ( final FailMethod failMethod : FailMethod.values() ) {
+            if ( failMethod == FailMethod.TREE_REDUCE )
+                continue; // cannot reliably throw errors in TREE_REDUCE
+
+            final String failArg = " -fail " + failMethod.name();
+            for ( final String args : Arrays.asList("", " -nt 2", " -nct 2") ) {
+                new EngineErrorHandlingTestProvider(NullPointerException.class, failArg + args);
+                new EngineErrorHandlingTestProvider(UserException.class, failArg + args);
+                new EngineErrorHandlingTestProvider(ReviewedGATKException.class, failArg + args);
+            }
+        }
+
+        return EngineErrorHandlingTestProvider.getTests(EngineErrorHandlingTestProvider.class);
+    }
+
+    //
+    // Loop over errors to throw, make sure they are the errors we get back from the engine, regardless of NT type
+    //
+    @Test(enabled = true, dataProvider = "EngineErrorHandlingTestProvider", timeOut = 60 * 1000 )
+    public void testEngineErrorHandlingTestProvider(final EngineErrorHandlingTestProvider cfg) {
+        for ( int i = 0; i < cfg.iterationsToTest; i++ ) {
+            final String root = "-T TestErrorThrowingWalker -R " + exampleFASTA;
+            final String args = root + cfg.args + " -E " + cfg.expectedException.getSimpleName();
+            WalkerTestSpec spec = new WalkerTestSpec(args, 0, cfg.expectedException);
+
+            executeTest(cfg.toString(), spec);
+        }
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // Test that read filters are being applied in the order we expect
+    //
+    // --------------------------------------------------------------------------------
+
+    @ReadFilters({MappingQualityUnavailableFilter.class, DuplicateReadFilter.class})
+    @DisabledReadFilters({DuplicateReadFilter.class})
+    public static class DummyReadWalkerWithFilters extends ReadWalker<Integer, Integer> {
+        @Output
+        PrintStream out;
+
+        @Override
+        public Integer map(ReferenceContext ref, GATKSAMRecord read, RefMetaDataTracker metaDataTracker) {
+            return 1;
+        }
+
+        @Override
+        public Integer reduceInit() {
+            return 0;
+        }
+
+        @Override
+        public Integer reduce(Integer value, Integer sum) {
+            return value + sum;
+        }
+
+        @Override
+        public void onTraversalDone(Integer result) {
+            out.println(result);
+        }
+    }
+
+    @Test(enabled = true)
+    public void testUserReadFilterAppliedBeforeWalker() {
+        WalkerTestSpec spec = new WalkerTestSpec("-R " + b37KGReference + " -I " + privateTestDir + "allMAPQ255.bam"
+                + " -T DummyReadWalkerWithFilters -o %s -L MT -rf ReassignMappingQuality",
+                1, Arrays.asList("ecf27a776cdfc771defab1c5d19de9ab"));
+        executeTest("testUserReadFilterAppliedBeforeWalker", spec);
+    }
+
+    @Test(enabled = true)
+    public void testUserReadFilterDisabledAppliedBeforeWalker() {
+        WalkerTestSpec spec = new WalkerTestSpec("-R " + b37KGReference + " -I " + privateTestDir + "allMAPQ255.bam"
+                + " -T DummyReadWalkerWithFilters -o %s -L MT -drf DuplicateRead",
+                1, Arrays.asList("897316929176464ebc9ad085f31e7284"));
+        executeTest("testUserReadFilterDisabledAppliedBeforeWalker", spec);
+    }
+
+    @Test( enabled = true, expectedExceptions = RuntimeException.class )
+    public void testUserReadFilterDisabledAppliedBeforeWalkerException() {
+        WalkerTestSpec spec = new WalkerTestSpec("-R " + b37KGReference + " -I " + privateTestDir + "allMAPQ255.bam"
+                + " -T DummyReadWalkerWithFilters -o %s -L MT -drf ReassignMappingQuality",
+                1, Arrays.asList(""));
+        executeTest("testUserReadFilterDisabledAppliedBeforeWalkerException", spec);
+    }
+
+    @Test
+    public void testNegativeCompress() {
+        testBadCompressArgument(-1);
+    }
+
+    @Test
+    public void testTooBigCompress() {
+        testBadCompressArgument(100);
+    }
+
+    private void testBadCompressArgument(final int compress) {
+        WalkerTestSpec spec = new WalkerTestSpec("-T TestPrintReadsWalker -R " + b37KGReference + " -I " + privateTestDir + "NA12878.1_10mb_2_10mb.bam -o %s -compress " + compress,
+                1, UserException.class);
+        executeTest("badCompress " + compress, spec);
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // Test that the VCF version key is what we expect
+    //
+    // --------------------------------------------------------------------------------
+    @Test(enabled = true)
+    public void testGATKVersionInVCF() throws Exception {
+        WalkerTestSpec spec = new WalkerTestSpec("-T TestPrintVariantsWalker -R " + b37KGReference +
+                " -V " + privateTestDir + "NA12878.WGS.b37.chr20.firstMB.vcf"
+                + " -o %s -L 20:61098",
+                1, Arrays.asList(""));
+        spec.disableShadowBCF();
+        final File vcf = executeTest("testGATKVersionInVCF", spec).first.get(0);
+        final VCFCodec codec = new VCFCodec();
+        final VCFHeader header = (VCFHeader) codec.readActualHeader(codec.makeSourceFromStream(new FileInputStream(vcf)));
+
+        // go through the metadata headers and look for ones that start with the GATK_COMMAND_LINE_KEY
+        VCFHeaderLine versionLine = null;
+        for ( final VCFHeaderLine headerLine : header.getMetaDataInInputOrder()) {
+           if(headerLine.getKey().startsWith(GATKVCFUtils.GATK_COMMAND_LINE_KEY)) {
+               versionLine = headerLine;
+               break;
+           }
+        }
+        Assert.assertNotNull(versionLine);
+        Assert.assertTrue(versionLine.toString().contains("TestPrintVariantsWalker"));
+    }
+
+    @Test(enabled = true)
+    public void testMultipleGATKVersionsInVCF() throws Exception {
+        WalkerTestSpec spec = new WalkerTestSpec("-T TestPrintVariantsWalker -R " + b37KGReference +
+                " -V " + privateTestDir + "gatkCommandLineInHeader.vcf"
+                + " -o %s",
+                1, Arrays.asList(""));
+        spec.disableShadowBCF();
+        final File vcf = executeTest("testMultipleGATKVersionsInVCF", spec).first.get(0);
+        final VCFCodec codec = new VCFCodec();
+        final VCFHeader header = (VCFHeader) codec.readActualHeader(codec.makeSourceFromStream(new FileInputStream(vcf)));
+
+        boolean foundHC = false;
+        boolean foundPV = false;
+        for ( final VCFHeaderLine line : header.getMetaDataInInputOrder() ) {
+            if ( line.getKey().startsWith(GATKVCFUtils.GATK_COMMAND_LINE_KEY) ) {
+                if ( line.toString().contains("HaplotypeCaller") ) {
+                    Assert.assertFalse(foundHC);
+                    foundHC = true;
+                }
+                if ( line.toString().contains("TestPrintVariantsWalker") ) {
+                    Assert.assertFalse(foundPV);
+                    foundPV = true;
+                }
+            }
+        }
+
+        Assert.assertTrue(foundHC, "Didn't find HaplotypeCaller command line header field");
+        Assert.assertTrue(foundPV, "Didn't find TestPrintVariantsWalker command line header field");
+    }
+
+    @Test(enabled = true)
+    public void testMultipleGATKVersionsSameWalkerInVCF() throws Exception {
+        WalkerTestSpec spec = new WalkerTestSpec("-T TestPrintVariantsWalker -R " + b37KGReference +
+                " -V " + privateTestDir + "gatkCommandLineExistsInHeader.vcf"
+                + " -o %s",
+                1, Arrays.asList(""));
+        spec.disableShadowBCF();
+        final File vcf = executeTest("testMultipleGATKVersionsSameWalkerInVCF", spec).first.get(0);
+        final VCFCodec codec = new VCFCodec();
+        final VCFHeader header = (VCFHeader) codec.readActualHeader(codec.makeSourceFromStream(new FileInputStream(vcf)));
+
+        boolean foundFirstWalker  = false;
+        boolean foundSecondWalker = false;
+        for ( final VCFHeaderLine line : header.getMetaDataInInputOrder() ) {
+            if ( line.getKey().startsWith(GATKVCFUtils.GATK_COMMAND_LINE_KEY) ) {
+                // check if we found the second walker command line header field key
+                if ( line.getKey().contains("TestPrintVariantsWalker.2") ) {
+                    Assert.assertFalse(foundSecondWalker);
+                    foundSecondWalker = true;
+                }
+                // otherwise if this is not the second walker command but contains the same
+                // walker name, then it is the first occurrence.  If we somehow got more than
+                // two occurrences of this walker, the Assert.assertFalse(foundFirstWalker);
+                // will catch this
+                else if ( line.getKey().contains("TestPrintVariantsWalker") ) {
+                    Assert.assertFalse(foundFirstWalker);
+                    foundFirstWalker = true;
+                }
+            }
+        }
+
+        Assert.assertTrue(foundFirstWalker, "Didn't find TestPrintVariantsWalker command line header field");
+        Assert.assertTrue(foundSecondWalker, "Didn't find (second) TestPrintVariantsWalker command line header field");
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // Test that defaultBaseQualities actually works
+    //
+    // --------------------------------------------------------------------------------
+
+    public WalkerTestSpec testDefaultBaseQualities(final Integer value, final String md5) {
+        return new WalkerTestSpec("-T TestPrintReadsWalker -R " + b37KGReference + " -I " + privateTestDir + "/baseQualitiesToFix.bam -o %s"
+                + (value != null ? " --defaultBaseQualities " + value : ""),
+                1, Arrays.asList(md5));
+    }
+
+    @Test()
+    public void testDefaultBaseQualities20() {
+        executeTest("testDefaultBaseQualities20", testDefaultBaseQualities(20, "90a450f74554bbd2cc3a9e0f9de68e26"));
+    }
+
+    @Test()
+    public void testDefaultBaseQualities30() {
+        executeTest("testDefaultBaseQualities30", testDefaultBaseQualities(30, "ec11db4173ce3b8e43997f00dab5ae26"));
+    }
+
+    @Test(expectedExceptions = Exception.class)
+    public void testDefaultBaseQualitiesNoneProvided() {
+        executeTest("testDefaultBaseQualitiesNoneProvided", testDefaultBaseQualities(null, ""));
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // Test engine-level cigar consolidation
+    //
+    // --------------------------------------------------------------------------------
+
+    @Test
+    public void testGATKEngineConsolidatesCigars() {
+        final WalkerTestSpec spec = new WalkerTestSpec(" -T TestPrintReadsWalker" +
+                                                       " -R " + b37KGReference +
+                                                       " -I " + privateTestDir + "zero_length_cigar_elements.bam" +
+                                                       " -o %s",
+                                                       1, Arrays.asList(""));  // No MD5s; we only want to check the cigar
+
+        final File outputBam = executeTest("testGATKEngineConsolidatesCigars", spec).first.get(0);
+        final SAMFileReader reader = new SAMFileReader(outputBam);
+        reader.setValidationStringency(ValidationStringency.SILENT);
+
+        final SAMRecord read = reader.iterator().next();
+        reader.close();
+
+        // Original cigar was 0M3M0M8M. Check that it's been consolidated after running through the GATK engine:
+        Assert.assertEquals(read.getCigarString(), "11M", "Cigar 0M3M0M8M not consolidated correctly by the engine");
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // Test on-the-fly sample renaming
+    //
+    // --------------------------------------------------------------------------------
+
+    // On-the-fly sample renaming test case: one single-sample bam with multiple read groups
+    @Test
+    public void testOnTheFlySampleRenamingWithSingleBamFile() throws IOException {
+        final File sampleRenameMapFile = createTestSampleRenameMapFile(
+                Arrays.asList(privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12878.HEADERONLY.bam  myNewSampleName"));
+
+        final WalkerTestSpec spec = new WalkerTestSpec(" -T TestPrintReadsWalker" +
+                                                       " -R " + b37KGReference +
+                                                       " -I " + privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12878.HEADERONLY.bam" +
+                                                       " --sample_rename_mapping_file " + sampleRenameMapFile.getAbsolutePath() +
+                                                       " -o %s",
+                                                       1, Arrays.asList(""));  // No MD5s; we only want to check the read groups
+
+        final File outputBam = executeTest("testOnTheFlySampleRenamingWithSingleBamFile", spec).first.get(0);
+        final SAMFileReader reader = new SAMFileReader(outputBam);
+
+        for ( final SAMReadGroupRecord readGroup : reader.getFileHeader().getReadGroups() ) {
+            Assert.assertEquals(readGroup.getSample(), "myNewSampleName", String.format("Sample for read group %s not renamed correctly", readGroup.getId()));
+        }
+
+        reader.close();
+    }
+
+    // On-the-fly sample renaming test case: three single-sample bams with multiple read groups per bam
+    @Test
+    public void testOnTheFlySampleRenamingWithMultipleBamFiles() throws IOException {
+        final File sampleRenameMapFile = createTestSampleRenameMapFile(
+                Arrays.asList(privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12878.HEADERONLY.bam  newSampleFor12878",
+                              privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12891.HEADERONLY.bam  newSampleFor12891",
+                              privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12892.HEADERONLY.bam  newSampleFor12892"));
+
+        final Map<String, String> readGroupToNewSampleMap = new HashMap<>();
+        for ( String inputBamID : Arrays.asList("12878", "12891", "12892") ) {
+            final File inputBam = new File(privateTestDir + String.format("CEUTrio.HiSeq.WGS.b37.NA%s.HEADERONLY.bam", inputBamID));
+            final SAMFileReader inputBamReader = new SAMFileReader(inputBam);
+            final String newSampleName = String.format("newSampleFor%s", inputBamID);
+            for ( final SAMReadGroupRecord readGroup : inputBamReader.getFileHeader().getReadGroups() ) {
+                readGroupToNewSampleMap.put(readGroup.getId(), newSampleName);
+            }
+            inputBamReader.close();
+        }
+
+        final WalkerTestSpec spec = new WalkerTestSpec(" -T TestPrintReadsWalker" +
+                                                       " -R " + b37KGReference +
+                                                       " -I " + privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12878.HEADERONLY.bam" +
+                                                       " -I " + privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12891.HEADERONLY.bam" +
+                                                       " -I " + privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12892.HEADERONLY.bam" +
+                                                       " --sample_rename_mapping_file " + sampleRenameMapFile.getAbsolutePath() +
+                                                       " -o %s",
+                                                       1, Arrays.asList(""));  // No MD5s; we only want to check the read groups
+
+        final File outputBam = executeTest("testOnTheFlySampleRenamingWithMultipleBamFiles", spec).first.get(0);
+        final SAMFileReader outputBamReader = new SAMFileReader(outputBam);
+
+        int totalReadGroupsSeen = 0;
+        for ( final SAMReadGroupRecord readGroup : outputBamReader.getFileHeader().getReadGroups() ) {
+            Assert.assertEquals(readGroup.getSample(), readGroupToNewSampleMap.get(readGroup.getId()),
+                                String.format("Wrong sample for read group %s after on-the-fly renaming", readGroup.getId()));
+            totalReadGroupsSeen++;
+        }
+
+        Assert.assertEquals(totalReadGroupsSeen, readGroupToNewSampleMap.size(), "Wrong number of read groups encountered in output bam file");
+
+        outputBamReader.close();
+    }
+
+    // On-the-fly sample renaming test case: three single-sample bams with multiple read groups per bam,
+    //                                       performing renaming in only SOME of the bams
+    @Test
+    public void testOnTheFlySampleRenamingWithMultipleBamFilesPartialRename() throws IOException {
+        // Rename samples for NA12878 and NA12892, but not for NA12891
+        final File sampleRenameMapFile = createTestSampleRenameMapFile(
+                Arrays.asList(privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12878.HEADERONLY.bam  newSampleFor12878",
+                              privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12892.HEADERONLY.bam  newSampleFor12892"));
+
+        final Map<String, String> readGroupToNewSampleMap = new HashMap<>();
+        for ( String inputBamID : Arrays.asList("12878", "12891", "12892") ) {
+            final File inputBam = new File(privateTestDir + String.format("CEUTrio.HiSeq.WGS.b37.NA%s.HEADERONLY.bam", inputBamID));
+            final SAMFileReader inputBamReader = new SAMFileReader(inputBam);
+
+            // Special-case NA12891, which we're not renaming:
+            final String newSampleName = inputBamID.equals("12891") ? "NA12891" : String.format("newSampleFor%s", inputBamID);
+
+            for ( final SAMReadGroupRecord readGroup : inputBamReader.getFileHeader().getReadGroups() ) {
+                readGroupToNewSampleMap.put(readGroup.getId(), newSampleName);
+            }
+            inputBamReader.close();
+        }
+
+        final WalkerTestSpec spec = new WalkerTestSpec(" -T TestPrintReadsWalker" +
+                                                       " -R " + b37KGReference +
+                                                       " -I " + privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12878.HEADERONLY.bam" +
+                                                       " -I " + privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12891.HEADERONLY.bam" +
+                                                       " -I " + privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12892.HEADERONLY.bam" +
+                                                       " --sample_rename_mapping_file " + sampleRenameMapFile.getAbsolutePath() +
+                                                       " -o %s",
+                                                       1, Arrays.asList(""));  // No MD5s; we only want to check the read groups
+
+        final File outputBam = executeTest("testOnTheFlySampleRenamingWithMultipleBamFilesPartialRename", spec).first.get(0);
+        final SAMFileReader outputBamReader = new SAMFileReader(outputBam);
+
+        int totalReadGroupsSeen = 0;
+        for ( final SAMReadGroupRecord readGroup : outputBamReader.getFileHeader().getReadGroups() ) {
+            Assert.assertEquals(readGroup.getSample(), readGroupToNewSampleMap.get(readGroup.getId()),
+                                String.format("Wrong sample for read group %s after on-the-fly renaming", readGroup.getId()));
+            totalReadGroupsSeen++;
+        }
+
+        Assert.assertEquals(totalReadGroupsSeen, readGroupToNewSampleMap.size(), "Wrong number of read groups encountered in output bam file");
+
+        outputBamReader.close();
+    }
+
+    // On-the-fly sample renaming test case: two single-sample bams with read group collisions
+    @Test
+    public void testOnTheFlySampleRenamingWithReadGroupCollisions() throws IOException {
+        final File sampleRenameMapFile = createTestSampleRenameMapFile(
+                Arrays.asList(privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12878.HEADERONLY.bam  newSampleFor12878",
+                              privateTestDir + "CEUTrio.HiSeq.WGS.b37.READ_GROUP_COLLISIONS_WITH_NA12878.HEADERONLY.bam  newSampleForNot12878"));
+
+        final Set<String> na12878ReadGroups = new HashSet<>();
+        final SAMFileReader inputBamReader = new SAMFileReader(new File(privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12878.HEADERONLY.bam"));
+        for ( final SAMReadGroupRecord readGroup : inputBamReader.getFileHeader().getReadGroups() ) {
+            na12878ReadGroups.add(readGroup.getId());
+        }
+        inputBamReader.close();
+
+        final WalkerTestSpec spec = new WalkerTestSpec(" -T TestPrintReadsWalker" +
+                                                       " -R " + b37KGReference +
+                                                       " -I " + privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12878.HEADERONLY.bam" +
+                                                       " -I " + privateTestDir + "CEUTrio.HiSeq.WGS.b37.READ_GROUP_COLLISIONS_WITH_NA12878.HEADERONLY.bam" +
+                                                       " --sample_rename_mapping_file " + sampleRenameMapFile.getAbsolutePath() +
+                                                       " -o %s",
+                                                       1, Arrays.asList(""));  // No MD5s; we only want to check the read groups
+
+        final File outputBam = executeTest("testOnTheFlySampleRenamingWithReadGroupCollisions", spec).first.get(0);
+        final SAMFileReader outputBamReader = new SAMFileReader(outputBam);
+
+        int totalReadGroupsSeen = 0;
+        for ( final SAMReadGroupRecord readGroup : outputBamReader.getFileHeader().getReadGroups() ) {
+            String expectedSampleName = "";
+            if ( na12878ReadGroups.contains(readGroup.getId()) ) {
+                expectedSampleName = "newSampleFor12878";
+            }
+            else {
+                expectedSampleName = "newSampleForNot12878";
+            }
+
+            Assert.assertEquals(readGroup.getSample(), expectedSampleName,
+                                String.format("Wrong sample for read group %s after on-the-fly renaming", readGroup.getId()));
+            totalReadGroupsSeen++;
+        }
+
+        Assert.assertEquals(totalReadGroupsSeen, na12878ReadGroups.size() * 2, "Wrong number of read groups encountered in output bam file");
+
+        outputBamReader.close();
+    }
+
+    // On-the-fly sample renaming test case: a multi-sample bam (this should generate a UserException)
+    @Test
+    public void testOnTheFlySampleRenamingWithMultiSampleBam() throws IOException {
+        final File sampleRenameMapFile = createTestSampleRenameMapFile(
+                Arrays.asList(privateTestDir + "CEUTrio.HiSeq.WGS.b37.MERGED.HEADERONLY.bam  myNewSampleName"));
+
+        final WalkerTestSpec spec = new WalkerTestSpec(" -T TestPrintReadsWalker" +
+                                                       " -R " + b37KGReference +
+                                                       " -I " + privateTestDir + "CEUTrio.HiSeq.WGS.b37.MERGED.HEADERONLY.bam" +
+                                                       " --sample_rename_mapping_file " + sampleRenameMapFile.getAbsolutePath() +
+                                                       " -o %s",
+                                                       1,
+                                                       UserException.class); // expecting a UserException here
+
+        executeTest("testOnTheFlySampleRenamingWithMultiSampleBam", spec);
+    }
+
+    // On-the-fly sample renaming test case: ensure that walkers can see the remapped sample names in individual reads
+    @Test
+    public void testOnTheFlySampleRenamingVerifyWalkerSeesNewSamplesInReads() throws IOException {
+        final File sampleRenameMapFile = createTestSampleRenameMapFile(
+                Arrays.asList(privateTestDir + "NA12878.HiSeq.b37.chr20.10_11mb.bam  myNewSampleName"));
+
+        final WalkerTestSpec spec = new WalkerTestSpec(" -T OnTheFlySampleRenamingVerifyingTestWalker" +
+                                                       " -R " + b37KGReference +
+                                                       " -I " + privateTestDir + "NA12878.HiSeq.b37.chr20.10_11mb.bam" +
+                                                       " --sample_rename_mapping_file " + sampleRenameMapFile.getAbsolutePath() +
+                                                       " --newSampleName myNewSampleName" +
+                                                       " -L 20:10000000-10001000",
+                                                       1, Arrays.asList(""));
+
+        // Test is a success if our custom walker doesn't throw an exception
+        executeTest("testOnTheFlySampleRenamingVerifyWalkerSeesNewSamplesInReads", spec);
+    }
+
+    @Test
+    public void testOnTheFlySampleRenamingSingleSampleVCF() throws IOException {
+        final File sampleRenameMapFile = createTestSampleRenameMapFile(
+                Arrays.asList(privateTestDir + "NA12878.WGS.b37.chr20.firstMB.vcf  newSampleForNA12878"));
+
+        final WalkerTestSpec spec = new WalkerTestSpec(" -T TestPrintVariantsWalker" +
+                " -R " + b37KGReference +
+                " -V " + privateTestDir + "NA12878.WGS.b37.chr20.firstMB.vcf" +
+                " --sample_rename_mapping_file " + sampleRenameMapFile.getAbsolutePath() +
+                " -o %s",
+                1,
+                Arrays.asList("")); // No MD5s -- we will inspect the output file manually
+
+        final File outputVCF = executeTest("testOnTheFlySampleRenamingSingleSampleVCF", spec).first.get(0);
+        verifySampleRenaming(outputVCF, "newSampleForNA12878");
+    }
+
+    private void verifySampleRenaming( final File outputVCF, final String newSampleName ) throws IOException {
+        final Pair<VCFHeader, VCIterable<LineIterator>> headerAndVCIter = VCIterable.readAllVCs(outputVCF, new VCFCodec());
+        final VCFHeader header = headerAndVCIter.getFirst();
+        final VCIterable<LineIterator> iter = headerAndVCIter.getSecond();
+
+        // Verify that sample renaming occurred at both the header and record levels (checking only the first 10 records):
+
+        Assert.assertEquals(header.getGenotypeSamples().size(), 1, "Wrong number of samples in output vcf header");
+        Assert.assertEquals(header.getGenotypeSamples().get(0), newSampleName, "Wrong sample name in output vcf header");
+
+        int recordCount = 0;
+        while ( iter.hasNext() && recordCount < 10 ) {
+            final VariantContext vcfRecord = iter.next();
+            Assert.assertEquals(vcfRecord.getSampleNames().size(), 1, "Wrong number of samples in output vcf record");
+            Assert.assertEquals(vcfRecord.getSampleNames().iterator().next(), newSampleName, "Wrong sample name in output vcf record");
+            recordCount++;
+        }
+    }
+
+    @Test
+    public void testOnTheFlySampleRenamingVerifyWalkerSeesNewSamplesInVCFRecords() throws Exception {
+        final File sampleRenameMapFile = createTestSampleRenameMapFile(
+                Arrays.asList(privateTestDir + "samplerenametest_single_sample_gvcf.vcf    FOOSAMPLE"));
+
+        final WalkerTestSpec spec = new WalkerTestSpec(" -T OnTheFlySampleRenamingVerifyingRodWalker" +
+                " -R " + hg19Reference +
+                " -V " + privateTestDir + "samplerenametest_single_sample_gvcf.vcf" +
+                " --sample_rename_mapping_file " + sampleRenameMapFile.getAbsolutePath() +
+                " --expectedSampleName FOOSAMPLE" +
+                " -o %s",
+                1,
+                Arrays.asList("")); // No MD5s -- custom walker will throw an exception if there's a problem
+
+        executeTest("testOnTheFlySampleRenamingVerifyWalkerSeesNewSamplesInVCFRecords", spec);
+    }
+
+    @Test
+    public void testOnTheFlySampleRenamingMultiSampleVCF() throws Exception {
+        final File sampleRenameMapFile = createTestSampleRenameMapFile(
+                Arrays.asList(privateTestDir + "vcf/vcfWithGenotypes.vcf  badSample"));
+
+        final WalkerTestSpec spec = new WalkerTestSpec(" -T TestPrintVariantsWalker" +
+                " -R " + b37KGReference +
+                " -V " + privateTestDir + "vcf/vcfWithGenotypes.vcf" +
+                " --sample_rename_mapping_file " + sampleRenameMapFile.getAbsolutePath() +
+                " -o %s",
+                1,
+                UserException.class); // expecting a UserException here
+
+        executeTest("testOnTheFlySampleRenamingMultiSampleVCF", spec);
+    }
+
+    @Test
+    public void testOnTheFlySampleRenamingSitesOnlyVCF() throws Exception {
+        final File sampleRenameMapFile = createTestSampleRenameMapFile(
+                Arrays.asList(privateTestDir + "vcf/vcfWithoutGenotypes.vcf  badSample"));
+
+        final WalkerTestSpec spec = new WalkerTestSpec(" -T TestPrintVariantsWalker" +
+                " -R " + b37KGReference +
+                " -V " + privateTestDir + "vcf/vcfWithoutGenotypes.vcf" +
+                " --sample_rename_mapping_file " + sampleRenameMapFile.getAbsolutePath() +
+                " -o %s",
+                1,
+                UserException.class); // expecting a UserException here
+
+        executeTest("testOnTheFlySampleRenamingSitesOnlyVCF", spec);
+    }
+
+    private File createTestSampleRenameMapFile( final List<String> contents ) throws IOException {
+        final File mapFile = createTempFile("TestSampleRenameMapFile", ".tmp");
+        final PrintWriter writer = new PrintWriter(mapFile);
+
+        for ( final String line : contents ) {
+            writer.println(line);
+        }
+        writer.close();
+
+        return mapFile;
+    }
+
+    public static class OnTheFlySampleRenamingVerifyingTestWalker extends ReadWalker<Integer, Integer> {
+        @Argument(fullName = "newSampleName", shortName = "newSampleName", doc = "", required = true)
+        String newSampleName = null;
+
+        public Integer map(ReferenceContext ref, GATKSAMRecord read, RefMetaDataTracker metaDataTracker) {
+            if ( ! newSampleName.equals(read.getReadGroup().getSample()) ) {
+                throw new IllegalStateException(String.format("Encountered read with the wrong sample name. Expected %s found %s",
+                                                              newSampleName, read.getReadGroup().getSample()));
+            }
+
+            return 1;
+        }
+
+        public Integer reduceInit() { return 0; }
+        public Integer reduce(Integer value, Integer sum) { return value + sum; }
+    }
+
+    public static class OnTheFlySampleRenamingVerifyingRodWalker extends RodWalker<Integer, Integer> {
+        @Argument(fullName = "expectedSampleName", shortName = "expectedSampleName", doc = "", required = true)
+        String expectedSampleName = null;
+
+        @Output
+        PrintStream out;
+
+        @Input(fullName="variant", shortName = "V", doc="Input VCF file", required=true)
+        public RodBinding<VariantContext> variants;
+
+        public Integer map( RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context ) {
+            if ( tracker == null ) {
+                return 0;
+            }
+
+            for ( final VariantContext vc : tracker.getValues(variants, context.getLocation()) ) {
+                if ( vc.getSampleNames().size() != 1 ) {
+                    throw new IllegalStateException("Encountered a vcf record with num samples != 1");
+                }
+
+                final String actualSampleName = vc.getSampleNames().iterator().next();
+                if ( ! expectedSampleName.equals(actualSampleName)) {
+                    throw new IllegalStateException(String.format("Encountered vcf record with wrong sample name. Expected %s found %s",
+                                                                  expectedSampleName, actualSampleName));
+                }
+            }
+
+            return 1;
+        }
+
+        public Integer reduceInit() {
+            return 0;
+        }
+
+        public Integer reduce(Integer counter, Integer sum) {
+            return counter + sum;
+        }
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // Test output file-specific options
+    //
+    // --------------------------------------------------------------------------------
+
+    //Returns the output file
+    private File testBAMFeatures(final String args, final String md5) {
+        WalkerTestSpec spec = new WalkerTestSpec("-T TestPrintReadsWalker -R " + b37KGReference +
+                " -I " + privateTestDir + "NA20313.highCoverageRegion.bam"
+                + " --no_pg_tag -o %s " + args,
+                1, Arrays.asList(".bam"), Arrays.asList(md5));
+        return executeTest("testBAMFeatures: "+args, spec).first.get(0);
+    }
+
+    @Test
+    public void testSAMWriterFeatures() {
+        testBAMFeatures("-compress 0", "49228d4f5b14c4cfed4a09372eb71139");
+        testBAMFeatures("-compress 9", "bc61a1b2b53a2ec7c63b533fa2f8701b");
+        testBAMFeatures("-simplifyBAM", "a1127bab46674b165496b79bb9fa7964");
+
+        //Validate MD5
+        final String expectedMD5 = "c58b9114fc15b53655f2c03c819c29fd";
+        final File md5Target = testBAMFeatures("--generate_md5", expectedMD5);
+        final File md5File = new File(md5Target.getAbsoluteFile() + ".md5");
+        md5File.deleteOnExit();
+        Assert.assertTrue(md5File.exists(), "MD5 wasn't created");
+        try {
+            String md5 = new BufferedReader(new FileReader(md5File)).readLine();
+            Assert.assertEquals(md5, expectedMD5, "Generated MD5 doesn't match expected");
+        } catch (IOException e) {
+            Assert.fail("Can't parse MD5 file", e);
+        }
+
+        //Validate that index isn't created
+        final String unindexedBAM = testBAMFeatures("--disable_bam_indexing", expectedMD5).getAbsolutePath();
+        Assert.assertTrue(!(new File(unindexedBAM+".bai").exists()) &&
+                          !(new File(unindexedBAM.replace(".bam", ".bai")).exists()),
+                          "BAM index was created even though it was disabled");
+    }
+
+    @DataProvider(name = "vcfFeaturesData")
+    public Object[][] getVCFFeaturesData() {
+        return new Object[][]{
+                {"--sites_only", "6ef742ee6d9bcbc7b23f928c0e8a1d0e"},
+                {"--bcf", "285549ca1a719a09fa95cfa129520621"}
+        };
+    }
+
+    @Test(dataProvider = "vcfFeaturesData")
+    public void testVCFFeatures(final String args, final String md5) {
+        WalkerTestSpec spec = new WalkerTestSpec("-T TestPrintVariantsWalker -R " + b37KGReference +
+                " -V " + privateTestDir + "CEUtrioTest.vcf"
+                + " --no_cmdline_in_header -o %s " + args,
+                1, Arrays.asList(md5));
+        executeTest("testVCFFeatures: "+args, spec);
+    }
+
+    @DataProvider(name = "vcfFormatHandlingData")
+    public Object[][] getVCFFormatHandlingData() {
+        return new Object[][]{
+                {true, "870f39e19ec89c8a09f7eca0f5c4bcb9"},
+                {false, "baf9a1755d3b4e0ed25b03233e99ca91"}
+        };
+    }
+
+    @Test(dataProvider = "vcfFormatHandlingData")
+    public void testVCFFormatHandling(final boolean writeFullFormat, final String md5) {
+        WalkerTestSpec spec = new WalkerTestSpec("-T TestPrintVariantsWalker -R " + b37KGReference +
+                " -V " + privateTestDir + "ILLUMINA.wex.broad_phase2_baseline.20111114.both.exome.genotypes.1000.vcf"
+                + " --no_cmdline_in_header -o %s "
+                + " --fullyDecode " //Without this parameter, the FORMAT fields will be emitted unchanged.  Oops
+                + (writeFullFormat ? "-writeFullFormat" : "") ,
+                1, Arrays.asList(md5));
+        executeTest("testVCFFormatHandling: "+(writeFullFormat ? "Untrimmed" : "Trimmed"), spec);
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/GATKVCFUtilsUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/GATKVCFUtilsUnitTest.java
new file mode 100644
index 0000000..439c7e5
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/GATKVCFUtilsUnitTest.java
@@ -0,0 +1,159 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine;
+
+import htsjdk.tribble.index.DynamicIndexCreator;
+import htsjdk.tribble.index.IndexCreator;
+import htsjdk.tribble.index.interval.IntervalIndexCreator;
+import htsjdk.tribble.index.linear.LinearIndexCreator;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.engine.walkers.RodWalker;
+import org.broadinstitute.gatk.engine.walkers.Walker;
+import htsjdk.variant.vcf.VCFHeader;
+import htsjdk.variant.vcf.VCFHeaderLine;
+import org.broadinstitute.gatk.utils.variant.GATKVCFIndexType;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.lang.reflect.Method;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Set;
+
+public class GATKVCFUtilsUnitTest extends BaseTest {
+    public static class VCFHeaderTestWalker extends RodWalker<Integer, Integer> {
+        public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { return null; }
+        public Integer reduceInit() { return 0; }
+        public Integer reduce(Integer value, Integer sum) { return value + sum; }
+    }
+
+    public static class VCFHeaderTest2Walker extends VCFHeaderTestWalker {}
+
+    @Test
+    public void testAddingVCFHeaderInfo() {
+        final VCFHeader header = new VCFHeader();
+
+        final Walker walker1 = new VCFHeaderTestWalker();
+        final Walker walker2 = new VCFHeaderTest2Walker();
+
+        final GenomeAnalysisEngine testEngine1 = new GenomeAnalysisEngine();
+        testEngine1.setWalker(walker1);
+
+        final GenomeAnalysisEngine testEngine2 = new GenomeAnalysisEngine();
+        testEngine2.setWalker(walker2);
+
+        final VCFHeaderLine line1 = GATKVCFUtils.getCommandLineArgumentHeaderLine(header, testEngine1, Collections.EMPTY_LIST);
+        logger.warn(line1);
+        Assert.assertNotNull(line1);
+        // assert the key matches the expected format (GATKVCFUtils.GATK_COMMAND_LINE_KEY).(walker name)
+        final String expectedLine1Key = String.format("%s.%s", GATKVCFUtils.GATK_COMMAND_LINE_KEY, testEngine1.getWalkerName());
+        Assert.assertEquals(line1.getKey(), expectedLine1Key);
+
+        for (final String field : Arrays.asList("Version", "ID", "Date", "CommandLineOptions"))
+            Assert.assertTrue(line1.toString().contains(field), "Couldn't find field " + field + " in " + line1.getValue());
+        Assert.assertTrue(line1.toString().contains("ID=" + testEngine1.getWalkerName()));
+
+        final VCFHeaderLine line2 = GATKVCFUtils.getCommandLineArgumentHeaderLine(header, testEngine2, Collections.EMPTY_LIST);
+        logger.warn(line2);
+
+
+        header.addMetaDataLine(line1);
+        final Set<VCFHeaderLine> lines1 = header.getMetaDataInInputOrder();
+        Assert.assertTrue(lines1.contains(line1));
+
+        header.addMetaDataLine(line2);
+        final Set<VCFHeaderLine> lines2 = header.getMetaDataInInputOrder();
+        Assert.assertTrue(lines2.contains(line1));
+        Assert.assertTrue(lines2.contains(line2));
+
+        // create a new header line using the same engine as used by line 1
+        final VCFHeaderLine line3 = GATKVCFUtils.getCommandLineArgumentHeaderLine(header, testEngine1, Collections.EMPTY_LIST);
+        logger.warn(line3);
+
+        // ensure convention followed by getCommandLineArgumentHeaderLine is to append ".(number of duplicate engine runs)"
+        // line3 uses the same walker as line1, whereas line2 uses a different walker.  line3 is the second occurrence of walker1
+        // so a ".2" gets appended afterwards
+        final String expectedLine3Key = String.format("%s.%s.2", GATKVCFUtils.GATK_COMMAND_LINE_KEY, testEngine1.getWalkerName());
+        Assert.assertEquals(line3.getKey(), expectedLine3Key);
+
+        header.addMetaDataLine(line3);
+
+        final Set<VCFHeaderLine> lines3 = header.getMetaDataInInputOrder();
+        Assert.assertTrue(lines3.contains(line1));
+        Assert.assertTrue(lines3.contains(line2));
+        Assert.assertTrue(lines3.contains(line3));
+    }
+
+    private class IndexCreatorTest extends TestDataProvider {
+        private final GATKVCFIndexType type;
+        private final int parameter;
+        private final Class expectedClass;
+        private final Integer expectedDimension;
+        private final Method dimensionGetter;
+
+        private IndexCreatorTest(GATKVCFIndexType type, int parameter, Class expectedClass, Integer expectedDimension,
+                                 String dimensionGetterName) {
+            super(IndexCreatorTest.class);
+
+            this.type = type;
+            this.parameter = parameter;
+            this.expectedClass = expectedClass;
+            this.expectedDimension = expectedDimension;
+            try {
+                // Conditional matches testGetIndexCreator's if-statement
+                this.dimensionGetter = this.expectedDimension == null ? null : expectedClass.getDeclaredMethod(dimensionGetterName);
+            } catch (NoSuchMethodException e) {
+                throw new RuntimeException(e);
+            }
+        }
+    }
+
+    @DataProvider(name = "indexCreator")
+    public Object[][] indexCreatorData() {
+        new IndexCreatorTest(GATKVCFIndexType.DYNAMIC_SEEK, 0, DynamicIndexCreator.class, null, null);
+        new IndexCreatorTest(GATKVCFIndexType.DYNAMIC_SIZE, 0, DynamicIndexCreator.class, null, null);
+        new IndexCreatorTest(GATKVCFIndexType.LINEAR, 100, LinearIndexCreator.class, 100, "getBinSize");
+        new IndexCreatorTest(GATKVCFIndexType.INTERVAL, 200, IntervalIndexCreator.class, 200, "getFeaturesPerInterval");
+
+        return IndexCreatorTest.getTests(IndexCreatorTest.class);
+    }
+
+    @Test(dataProvider = "indexCreator")
+    public void testGetIndexCreator(IndexCreatorTest spec) throws Exception{
+        File dummy = new File("");
+        IndexCreator ic = GATKVCFUtils.getIndexCreator(spec.type, spec.parameter, dummy);
+        Assert.assertEquals(ic.getClass(), spec.expectedClass, "Wrong IndexCreator type");
+        if (spec.expectedDimension != null) {
+            Integer dimension = (int)spec.dimensionGetter.invoke(ic);
+            Assert.assertEquals(dimension, spec.expectedDimension, "Wrong dimension");
+        }
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/GenomeAnalysisEngineUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/GenomeAnalysisEngineUnitTest.java
new file mode 100644
index 0000000..79fdb92
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/GenomeAnalysisEngineUnitTest.java
@@ -0,0 +1,272 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine;
+
+import org.broadinstitute.gatk.engine.walkers.TestCountReadsWalker;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.commandline.Tags;
+import org.broadinstitute.gatk.engine.arguments.GATKArgumentCollection;
+import org.broadinstitute.gatk.utils.sam.SAMReaderID;
+import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
+import org.broadinstitute.gatk.engine.walkers.Walker;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.util.*;
+
+/**
+ * Tests selected functionality in the GenomeAnalysisEngine class
+ */
+public class GenomeAnalysisEngineUnitTest extends BaseTest {
+
+    @Test(expectedExceptions=UserException.class)
+    public void testEmptySamFileListHandling() throws Exception {
+        GenomeAnalysisEngine testEngine = new GenomeAnalysisEngine();
+        testEngine.setWalker(new TestCountReadsWalker()); //generalizable to any walker requiring reads
+
+        //supply command line args so validateSuppliedReads() knows whether reads were passed in
+        GATKArgumentCollection testArgs = new GATKArgumentCollection();
+        testArgs.samFiles.add("empty.list");
+        testEngine.setArguments(testArgs);
+
+        //represents the empty list of samFiles read in from empty.list by CommandLineExecutable
+        Collection<SAMReaderID> samFiles = new ArrayList<SAMReaderID>();
+
+        testEngine.setSAMFileIDs(samFiles);
+        testEngine.validateSuppliedReads();
+    }
+
+    @Test(expectedExceptions=UserException.class)
+    public void testDuplicateSamFileHandlingSingleDuplicate() throws Exception {
+        GenomeAnalysisEngine testEngine = new GenomeAnalysisEngine();
+
+        Collection<SAMReaderID> samFiles = new ArrayList<SAMReaderID>();
+        samFiles.add(new SAMReaderID(new File(publicTestDir + "exampleBAM.bam"), new Tags()));
+        samFiles.add(new SAMReaderID(new File(publicTestDir + "exampleBAM.bam"), new Tags()));
+
+        testEngine.setSAMFileIDs(samFiles);
+        testEngine.checkForDuplicateSamFiles();
+    }
+
+    @Test(expectedExceptions=UserException.class)
+    public void testDuplicateSamFileHandlingMultipleDuplicates() throws Exception {
+        GenomeAnalysisEngine testEngine = new GenomeAnalysisEngine();
+
+        Collection<SAMReaderID> samFiles = new ArrayList<SAMReaderID>();
+        samFiles.add(new SAMReaderID(new File(publicTestDir + "exampleBAM.bam"), new Tags()));
+        samFiles.add(new SAMReaderID(new File(publicTestDir + "exampleNORG.bam"), new Tags()));
+        samFiles.add(new SAMReaderID(new File(publicTestDir + "exampleBAM.bam"),  new Tags()));
+        samFiles.add(new SAMReaderID(new File(publicTestDir + "exampleNORG.bam"), new Tags()));
+
+        testEngine.setSAMFileIDs(samFiles);
+        testEngine.checkForDuplicateSamFiles();
+    }
+
+    @Test(expectedExceptions=UserException.class)
+    public void testDuplicateSamFileHandlingAbsoluteVsRelativePath() {
+        GenomeAnalysisEngine testEngine = new GenomeAnalysisEngine();
+
+        final File relativePathToBAMFile = new File(publicTestDir + "exampleBAM.bam");
+        final File absolutePathToBAMFile = new File(relativePathToBAMFile.getAbsolutePath());
+        Collection<SAMReaderID> samFiles = new ArrayList<SAMReaderID>();
+        samFiles.add(new SAMReaderID(relativePathToBAMFile, new Tags()));
+        samFiles.add(new SAMReaderID(absolutePathToBAMFile, new Tags()));
+
+        testEngine.setSAMFileIDs(samFiles);
+        testEngine.checkForDuplicateSamFiles();
+    }
+
+    @Test
+    public void testEmptyIntervalSetHandling() throws Exception {
+        GenomeLocParser genomeLocParser = new GenomeLocParser(ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000).getSequenceDictionary());
+
+        GenomeAnalysisEngine testEngine = new GenomeAnalysisEngine();
+
+        testEngine.setWalker(new TestCountReadsWalker());
+        testEngine.setIntervals(new GenomeLocSortedSet(genomeLocParser));
+
+        testEngine.validateSuppliedIntervals();
+    }
+
+    @Test
+    public void testLoadWellFormedSampleRenameMapFile() throws IOException {
+        final File mapFile = createTestSampleRenameMapFile(Arrays.asList("/foo/bar/first.bam    newSample1",
+                                                                         "/foo/bar/second.bam        newSample2",
+                                                                         "/foo/bar2/third.bam newSample3",
+                                                                         "/foo/bar2/fourth.bam new sample    4",
+                                                                         "/foo/bar2/fifth.bam     new   sample     5    "));
+        final GenomeAnalysisEngine engine = new GenomeAnalysisEngine();
+        final Map<String, String> renameMap = engine.loadSampleRenameMap(mapFile);
+
+        Assert.assertEquals(renameMap.size(), 5, "Sample rename map was wrong size after loading from file");
+
+        final Iterator<String> expectedResultsIterator = Arrays.asList(
+                        "/foo/bar/first.bam",   "newSample1", 
+                        "/foo/bar/second.bam",  "newSample2", 
+                        "/foo/bar2/third.bam",  "newSample3",
+                        "/foo/bar2/fourth.bam", "new sample    4",
+                        "/foo/bar2/fifth.bam",  "new   sample     5"
+        ).iterator();
+        while ( expectedResultsIterator.hasNext() ) {
+            final String expectedKey = expectedResultsIterator.next();
+            final String expectedValue = expectedResultsIterator.next();
+
+            Assert.assertNotNull(renameMap.get(expectedKey), String.format("Entry for %s not found in sample rename map", expectedKey));
+            Assert.assertEquals(renameMap.get(expectedKey), expectedValue, "Wrong value in sample rename map for " + expectedKey);
+        }
+    }
+
+    @DataProvider(name = "MalformedSampleRenameMapFileDataProvider")
+    public Object[][] generateMalformedSampleRenameMapFiles() throws IOException {
+        final List<Object[]> tests = new ArrayList<Object[]>();
+
+        tests.add(new Object[]{"testLoadSampleRenameMapFileNonExistentFile",
+                               new File("/foo/bar/nonexistent")});
+        tests.add(new Object[]{"testLoadSampleRenameMapFileMalformedLine",
+                               createTestSampleRenameMapFile(Arrays.asList("/path/to/foo.bam"))});
+        tests.add(new Object[]{"testLoadSampleRenameMapFileNonAbsoluteBamPath",
+                               createTestSampleRenameMapFile(Arrays.asList("relative/path/to/foo.bam newSample"))});
+        tests.add(new Object[]{"testLoadSampleRenameMapFileDuplicateBamPath",
+                               createTestSampleRenameMapFile(Arrays.asList("/path/to/dupe.bam newSample1",
+                                                                           "/path/to/dupe.bam newSample2"))});
+        tests.add(new Object[]{"testLoadSampleRenameMapFileTabInSampleName",
+                               createTestSampleRenameMapFile(Arrays.asList("/path/to/stuff.bam some wonky\tsample   "))});
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "MalformedSampleRenameMapFileDataProvider", expectedExceptions = UserException.class)
+    public void testLoadMalformedSampleRenameMapFile( final String testName, final File mapFile ) {
+        logger.info("Executing test " + testName);
+
+        final GenomeAnalysisEngine engine = new GenomeAnalysisEngine();
+        final Map<String, String> renameMap = engine.loadSampleRenameMap(mapFile);
+    }
+
+    private File createTestSampleRenameMapFile( final List<String> contents ) throws IOException {
+        final File mapFile = createTempFile("TestSampleRenameMapFile", ".tmp");
+        final PrintWriter writer = new PrintWriter(mapFile);
+
+        for ( final String line : contents ) {
+            writer.println(line);
+        }
+        writer.close();
+
+        return mapFile;
+    }
+
+    ///////////////////////////////////////////////////
+    // Test the ReadTransformer ordering enforcement //
+    ///////////////////////////////////////////////////
+
+    public static class TestReadTransformer extends ReadTransformer {
+
+        private OrderingConstraint orderingConstraint = OrderingConstraint.DO_NOT_CARE;
+        private boolean enabled;
+
+        protected TestReadTransformer(final OrderingConstraint orderingConstraint) {
+            this.orderingConstraint = orderingConstraint;
+            enabled = true;
+        }
+
+        // need this because PackageUtils will pick up this class as a possible ReadTransformer
+        protected TestReadTransformer() {
+            enabled = false;
+        }
+
+        @Override
+        public OrderingConstraint getOrderingConstraint() { return orderingConstraint; }
+
+        @Override
+        public ApplicationTime initializeSub(final GenomeAnalysisEngine engine, final Walker walker) { return ApplicationTime.HANDLED_IN_WALKER; }
+
+        @Override
+        public boolean enabled() { return enabled; }
+
+        @Override
+        public GATKSAMRecord apply(final GATKSAMRecord read) { return read; }
+
+    }
+
+    @DataProvider(name = "ReadTransformerData")
+    public Object[][] makeReadTransformerData() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        for ( final ReadTransformer.OrderingConstraint orderingConstraint1 : ReadTransformer.OrderingConstraint.values() ) {
+            for ( final ReadTransformer.OrderingConstraint orderingConstraint2 : ReadTransformer.OrderingConstraint.values() ) {
+                for ( final ReadTransformer.OrderingConstraint orderingConstraint3 : ReadTransformer.OrderingConstraint.values() ) {
+                    tests.add(new Object[]{orderingConstraint1, orderingConstraint2, orderingConstraint3});
+                }
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "ReadTransformerData")
+    public void testReadTransformer(final ReadTransformer.OrderingConstraint oc1, final ReadTransformer.OrderingConstraint oc2, final ReadTransformer.OrderingConstraint oc3) {
+
+        final GenomeAnalysisEngine testEngine = new GenomeAnalysisEngine();
+        final List<ReadTransformer> readTransformers = new ArrayList<ReadTransformer>(3);
+        readTransformers.add(new TestReadTransformer(oc1));
+        readTransformers.add(new TestReadTransformer(oc2));
+        readTransformers.add(new TestReadTransformer(oc3));
+
+        final boolean shouldThrowException = numWithConstraint(ReadTransformer.OrderingConstraint.MUST_BE_FIRST, oc1, oc2, oc3) > 1 ||
+                numWithConstraint(ReadTransformer.OrderingConstraint.MUST_BE_LAST, oc1, oc2, oc3) > 1;
+
+        try {
+            testEngine.setReadTransformers(readTransformers);
+
+            Assert.assertFalse(shouldThrowException);
+            Assert.assertEquals(testEngine.getReadTransformers().size(), 3);
+
+            Assert.assertTrue(testEngine.getReadTransformers().get(1).getOrderingConstraint() != ReadTransformer.OrderingConstraint.MUST_BE_FIRST);
+            Assert.assertTrue(testEngine.getReadTransformers().get(2).getOrderingConstraint() != ReadTransformer.OrderingConstraint.MUST_BE_FIRST);
+            Assert.assertTrue(testEngine.getReadTransformers().get(0).getOrderingConstraint() != ReadTransformer.OrderingConstraint.MUST_BE_LAST);
+            Assert.assertTrue(testEngine.getReadTransformers().get(1).getOrderingConstraint() != ReadTransformer.OrderingConstraint.MUST_BE_LAST);
+        } catch (UserException.IncompatibleReadFiltersException e) {
+            Assert.assertTrue(shouldThrowException);
+        }
+    }
+
+    private int numWithConstraint(final ReadTransformer.OrderingConstraint target, final ReadTransformer.OrderingConstraint... constraints ) {
+        int count = 0;
+        for ( final ReadTransformer.OrderingConstraint constraint : constraints ) {
+            if ( constraint == target )
+                count++;
+        }
+        return count;
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/InstantiableWalker.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/InstantiableWalker.java
new file mode 100644
index 0000000..7258e4d
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/InstantiableWalker.java
@@ -0,0 +1,37 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine;
+
+import org.broadinstitute.gatk.engine.walkers.Walker;
+import org.broadinstitute.gatk.utils.commandline.Hidden;
+
+ at Hidden
+public class InstantiableWalker extends Walker<Integer,Long> {
+    // Public constructor will generate instantiable message
+    public InstantiableWalker() {}
+    public Long reduceInit() { return 0L; }
+    public Long reduce(Integer value, Long accum) { return 0L; }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/MaxRuntimeIntegrationTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/MaxRuntimeIntegrationTest.java
new file mode 100644
index 0000000..9ae7259
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/MaxRuntimeIntegrationTest.java
@@ -0,0 +1,151 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine;
+
+import org.broadinstitute.gatk.engine.walkers.WalkerTest;
+import org.broadinstitute.gatk.utils.commandline.Argument;
+import org.broadinstitute.gatk.utils.commandline.Output;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.engine.walkers.LocusWalker;
+import org.broadinstitute.gatk.utils.SimpleTimer;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+/**
+ *
+ */
+public class MaxRuntimeIntegrationTest extends WalkerTest {
+    public static class SleepingWalker extends LocusWalker<Integer, Integer> {
+        @Output PrintStream out;
+
+        @Argument(fullName="sleepTime",shortName="sleepTime",doc="x", required=false)
+        public int sleepTime = 100;
+
+        @Override
+        public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
+            try {Thread.sleep(sleepTime);} catch (InterruptedException e) {};
+            return 1;
+        }
+
+        @Override public Integer reduceInit() { return 0; }
+        @Override public Integer reduce(Integer value, Integer sum) { return sum + value; }
+
+        @Override
+        public void onTraversalDone(Integer result) {
+            out.println(result);
+        }
+    }
+
+    private static final long STARTUP_TIME = TimeUnit.NANOSECONDS.convert(60, TimeUnit.SECONDS);
+
+    private class MaxRuntimeTestProvider extends TestDataProvider {
+        final long maxRuntime;
+        final TimeUnit unit;
+
+        public MaxRuntimeTestProvider(final long maxRuntime, final TimeUnit unit) {
+            super(MaxRuntimeTestProvider.class);
+            this.maxRuntime = maxRuntime;
+            this.unit = unit;
+            setName(String.format("Max runtime test : %d of %s", maxRuntime, unit));
+        }
+
+        public long expectedMaxRuntimeNano() {
+            return TimeUnit.NANOSECONDS.convert(maxRuntime, unit) + STARTUP_TIME;
+        }
+    }
+
+    @DataProvider(name = "MaxRuntimeProvider")
+    public Object[][] makeMaxRuntimeProvider() {
+        for ( final TimeUnit requestedUnits : Arrays.asList(TimeUnit.NANOSECONDS, TimeUnit.MILLISECONDS, TimeUnit.SECONDS, TimeUnit.MINUTES) )
+            new MaxRuntimeTestProvider(requestedUnits.convert(30, TimeUnit.SECONDS), requestedUnits);
+
+        return MaxRuntimeTestProvider.getTests(MaxRuntimeTestProvider.class);
+    }
+
+    //
+    // Loop over errors to throw, make sure they are the errors we get back from the engine, regardless of NT type
+    //
+    @Test(enabled = true, dataProvider = "MaxRuntimeProvider", timeOut = 120 * 1000)
+    public void testMaxRuntime(final MaxRuntimeTestProvider cfg) {
+        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
+                "-T TestPrintReadsWalker -R " + hg18Reference
+                        + " -I " + validationDataLocation + "NA12878.WEx.downsampled20x.bam -o /dev/null"
+                        + " -maxRuntime " + cfg.maxRuntime + " -maxRuntimeUnits " + cfg.unit, 0,
+                Collections.<String>emptyList());
+        final SimpleTimer timer = new SimpleTimer().start();
+        executeTest("Max runtime " + cfg, spec);
+        final long actualRuntimeNano = timer.getElapsedTimeNano();
+
+        Assert.assertTrue(actualRuntimeNano < cfg.expectedMaxRuntimeNano(),
+                "Actual runtime " + TimeUnit.SECONDS.convert(actualRuntimeNano, TimeUnit.NANOSECONDS)
+                        + " exceeded max. tolerated runtime " + TimeUnit.SECONDS.convert(cfg.expectedMaxRuntimeNano(), TimeUnit.NANOSECONDS)
+                        + " given requested runtime " + cfg.maxRuntime + " " + cfg.unit);
+    }
+
+    @DataProvider(name = "SubshardProvider")
+    public Object[][] makeSubshardProvider() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        // this functionality can be adapted to provide input data for whatever you might want in your data
+        tests.add(new Object[]{10});
+        tests.add(new Object[]{100});
+        tests.add(new Object[]{500});
+        tests.add(new Object[]{1000});
+        tests.add(new Object[]{2000});
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = true, dataProvider = "SubshardProvider", timeOut = 120 * 1000)
+    public void testSubshardTimeout(final int sleepTime) throws Exception {
+        final int maxRuntime = 5000;
+
+        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
+                "-T SleepingWalker -R " + b37KGReference
+                        + " -I " + privateTestDir + "NA12878.100kb.BQSRv2.example.bam -o %s"
+                        + " -maxRuntime " + maxRuntime + " -maxRuntimeUnits MILLISECONDS -sleepTime " + sleepTime, 1,
+                Collections.singletonList(""));
+        final File result = executeTest("Subshard max runtime ", spec).getFirst().get(0);
+        final int cycle = Integer.valueOf(new BufferedReader(new FileReader(result)).readLine());
+
+        final int maxCycles = (int)Math.ceil((maxRuntime * 5) / sleepTime);
+        logger.warn(String.format("Max cycles %d saw %d in file %s with sleepTime %d and maxRuntime %d", maxCycles, cycle, result, sleepTime, maxRuntime));
+        Assert.assertTrue(cycle < maxCycles, "Too many cycles seen -- saw " + cycle + " in file " + result + " but max should have been " + maxCycles);
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/ReadMetricsUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/ReadMetricsUnitTest.java
new file mode 100644
index 0000000..6a1c340
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/ReadMetricsUnitTest.java
@@ -0,0 +1,372 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine;
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import htsjdk.samtools.*;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.commandline.Tags;
+import org.broadinstitute.gatk.utils.ValidationExclusion;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.engine.datasources.providers.LocusShardDataProvider;
+import org.broadinstitute.gatk.engine.datasources.providers.ReadShardDataProvider;
+import org.broadinstitute.gatk.engine.datasources.reads.*;
+import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
+import org.broadinstitute.gatk.engine.executive.WindowMaker;
+import org.broadinstitute.gatk.engine.filters.ReadFilter;
+import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.engine.resourcemanagement.ThreadAllocation;
+import org.broadinstitute.gatk.engine.traversals.*;
+import org.broadinstitute.gatk.engine.walkers.*;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
+import org.broadinstitute.gatk.utils.activeregion.ActiveRegion;
+import org.broadinstitute.gatk.utils.activeregion.ActivityProfileState;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+import org.broadinstitute.gatk.utils.interval.IntervalMergingRule;
+import org.broadinstitute.gatk.utils.sam.*;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.*;
+
+public class ReadMetricsUnitTest extends BaseTest {
+
+    @Test
+    public void testReadsSeenDoNotOverflowInt() {
+
+        final ReadMetrics metrics = new ReadMetrics();
+
+        final long moreThanMaxInt = ((long)Integer.MAX_VALUE) + 1L;
+
+        for ( long i = 0L; i < moreThanMaxInt; i++ ) {
+            metrics.incrementNumReadsSeen();
+        }
+
+        Assert.assertEquals(metrics.getNumReadsSeen(), moreThanMaxInt);
+        Assert.assertTrue(metrics.getNumReadsSeen() > (long) Integer.MAX_VALUE);
+
+        logger.warn(String.format("%d %d %d", Integer.MAX_VALUE, moreThanMaxInt, Long.MAX_VALUE));
+    }
+
+
+    // Test the accuracy of the read metrics
+
+    private File referenceFile;
+    private IndexedFastaSequenceFile reference;
+    private SAMSequenceDictionary dictionary;
+    private SAMFileHeader header;
+    private GATKSAMReadGroupRecord readGroup;
+    private GenomeLocParser genomeLocParser;
+    private File testBAM;
+
+    private static final int numReadsPerContig = 250000;
+    private static final List<String> contigs = Arrays.asList("1", "2", "3");
+
+    @BeforeClass
+    private void init() throws IOException {
+        referenceFile = new File(b37KGReference);
+        reference = new CachingIndexedFastaSequenceFile(referenceFile);
+        dictionary = reference.getSequenceDictionary();
+        genomeLocParser = new GenomeLocParser(dictionary);
+        header = ArtificialSAMUtils.createDefaultReadGroup(new SAMFileHeader(), "test", "test");
+        header.setSequenceDictionary(dictionary);
+        header.setSortOrder(SAMFileHeader.SortOrder.coordinate);
+        readGroup = new GATKSAMReadGroupRecord(header.getReadGroup("test"));
+
+        final List<GATKSAMRecord> reads = new ArrayList<>();
+        for ( final String contig : contigs ) {
+            for ( int i = 1; i <= numReadsPerContig; i++ ) {
+                reads.add(buildSAMRecord("read" + contig + "_" + i, contig, i));
+            }
+        }
+
+        createBAM(reads);
+    }
+
+    private void createBAM(final List<GATKSAMRecord> reads) throws IOException {
+        testBAM = createTempFile("TraverseActiveRegionsUnitTest", ".bam");
+
+        SAMFileWriter out = new SAMFileWriterFactory().setCreateIndex(true).makeBAMWriter(reads.get(0).getHeader(), true, testBAM);
+        for (GATKSAMRecord read : reads ) {
+            out.addAlignment(read);
+        }
+        out.close();
+
+        new File(testBAM.getAbsolutePath().replace(".bam", ".bai")).deleteOnExit();
+        new File(testBAM.getAbsolutePath() + ".bai").deleteOnExit();
+    }
+
+    // copied from LocusViewTemplate
+    protected GATKSAMRecord buildSAMRecord(final String readName, final String contig, final int alignmentStart) {
+        GATKSAMRecord record = new GATKSAMRecord(header);
+
+        record.setReadName(readName);
+        record.setReferenceIndex(dictionary.getSequenceIndex(contig));
+        record.setAlignmentStart(alignmentStart);
+
+        record.setCigarString("1M");
+        record.setReadString("A");
+        record.setBaseQualityString("A");
+        record.setReadGroup(readGroup);
+
+        return record;
+    }
+
+    @Test
+    public void testCountsFromReadTraversal() {
+        final GenomeAnalysisEngine engine = new GenomeAnalysisEngine();
+        engine.setGenomeLocParser(genomeLocParser);
+
+        final Collection<SAMReaderID> samFiles = new ArrayList<>();
+        final SAMReaderID readerID = new SAMReaderID(testBAM, new Tags());
+        samFiles.add(readerID);
+
+        final SAMDataSource dataSource = new SAMDataSource(referenceFile, samFiles, new ThreadAllocation(), null, genomeLocParser,
+                false,
+                ValidationStringency.STRICT,
+                null,
+                null,
+                new ValidationExclusion(),
+                new ArrayList<ReadFilter>(),
+                new ArrayList<ReadTransformer>(),
+                false, (byte)30, false, true, null, IntervalMergingRule.ALL);
+
+        engine.setReadsDataSource(dataSource);
+
+        final TraverseReadsNano traverseReadsNano = new TraverseReadsNano(1);
+        final DummyReadWalker walker = new DummyReadWalker();
+        traverseReadsNano.initialize(engine, walker, null);
+
+        for ( final Shard shard : dataSource.createShardIteratorOverAllReads(new ReadShardBalancer()) ) {
+            final ReadShardDataProvider dataProvider = new ReadShardDataProvider(shard, engine.getGenomeLocParser(), dataSource.seek(shard), reference, new ArrayList<ReferenceOrderedDataSource>());
+            traverseReadsNano.traverse(walker, dataProvider, 0);
+            dataProvider.close();
+        }
+
+        Assert.assertEquals(engine.getCumulativeMetrics().getNumReadsSeen(), contigs.size() * numReadsPerContig);
+        Assert.assertEquals(engine.getCumulativeMetrics().getNumIterations(), contigs.size() * numReadsPerContig);
+    }
+
+    @Test
+    public void testCountsFromLocusTraversal() {
+        final GenomeAnalysisEngine engine = new GenomeAnalysisEngine();
+        engine.setGenomeLocParser(genomeLocParser);
+
+        final Collection<SAMReaderID> samFiles = new ArrayList<>();
+        final SAMReaderID readerID = new SAMReaderID(testBAM, new Tags());
+        samFiles.add(readerID);
+
+        final SAMDataSource dataSource = new SAMDataSource(referenceFile, samFiles, new ThreadAllocation(), null, genomeLocParser,
+                false,
+                ValidationStringency.STRICT,
+                null,
+                null,
+                new ValidationExclusion(),
+                new ArrayList<ReadFilter>(),
+                new ArrayList<ReadTransformer>(),
+                false, (byte)30, false, true, null, IntervalMergingRule.ALL);
+
+        engine.setReadsDataSource(dataSource);
+        final Set<String> samples = ReadUtils.getSAMFileSamples(dataSource.getHeader());
+
+        final TraverseLociNano traverseLociNano = new TraverseLociNano(1);
+        final DummyLocusWalker walker = new DummyLocusWalker();
+        traverseLociNano.initialize(engine, walker, null);
+
+        for ( final Shard shard : dataSource.createShardIteratorOverAllReads(new LocusShardBalancer()) ) {
+            final WindowMaker windowMaker = new WindowMaker(shard, genomeLocParser, dataSource.seek(shard), shard.getGenomeLocs(), samples);
+            for ( WindowMaker.WindowMakerIterator window : windowMaker ) {
+                final LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, shard.getReadProperties(), genomeLocParser, window.getLocus(), window, reference, new ArrayList<ReferenceOrderedDataSource>());
+                traverseLociNano.traverse(walker, dataProvider, 0);
+                dataProvider.close();
+            }
+            windowMaker.close();
+        }
+
+        //dataSource.close();
+        Assert.assertEquals(engine.getCumulativeMetrics().getNumReadsSeen(), contigs.size() * numReadsPerContig);
+        Assert.assertEquals(engine.getCumulativeMetrics().getNumIterations(), contigs.size() * numReadsPerContig);
+    }
+
+    @Test
+    public void testCountsFromActiveRegionTraversal() {
+        final GenomeAnalysisEngine engine = new GenomeAnalysisEngine();
+        engine.setGenomeLocParser(genomeLocParser);
+
+        final Collection<SAMReaderID> samFiles = new ArrayList<>();
+        final SAMReaderID readerID = new SAMReaderID(testBAM, new Tags());
+        samFiles.add(readerID);
+
+        final SAMDataSource dataSource = new SAMDataSource(referenceFile, samFiles, new ThreadAllocation(), null, genomeLocParser,
+                false,
+                ValidationStringency.STRICT,
+                null,
+                null,
+                new ValidationExclusion(),
+                new ArrayList<ReadFilter>(),
+                new ArrayList<ReadTransformer>(),
+                false, (byte)30, false, true, null, IntervalMergingRule.ALL);
+
+        engine.setReadsDataSource(dataSource);
+        final Set<String> samples = ReadUtils.getSAMFileSamples(dataSource.getHeader());
+
+        final List<GenomeLoc> intervals = new ArrayList<>(contigs.size());
+        for ( final String contig : contigs )
+            intervals.add(genomeLocParser.createGenomeLoc(contig, 1, numReadsPerContig));
+
+        final TraverseActiveRegions traverseActiveRegions = new TraverseActiveRegions();
+        final DummyActiveRegionWalker walker = new DummyActiveRegionWalker();
+        traverseActiveRegions.initialize(engine, walker, null);
+
+        for ( final Shard shard : dataSource.createShardIteratorOverIntervals(new GenomeLocSortedSet(genomeLocParser, intervals), new ActiveRegionShardBalancer()) ) {
+            final WindowMaker windowMaker = new WindowMaker(shard, genomeLocParser, dataSource.seek(shard), shard.getGenomeLocs(), samples);
+            for ( WindowMaker.WindowMakerIterator window : windowMaker ) {
+                final LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, shard.getReadProperties(), genomeLocParser, window.getLocus(), window, reference, new ArrayList<ReferenceOrderedDataSource>());
+                traverseActiveRegions.traverse(walker, dataProvider, 0);
+                dataProvider.close();
+            }
+            windowMaker.close();
+        }
+
+        Assert.assertEquals(engine.getCumulativeMetrics().getNumReadsSeen(), contigs.size() * numReadsPerContig);
+        Assert.assertEquals(engine.getCumulativeMetrics().getNumIterations(), contigs.size() * numReadsPerContig);
+    }
+
+    @Test
+    public void testFilteredCounts() {
+        final GenomeAnalysisEngine engine = new GenomeAnalysisEngine();
+        engine.setGenomeLocParser(genomeLocParser);
+
+        final Collection<SAMReaderID> samFiles = new ArrayList<>();
+        final SAMReaderID readerID = new SAMReaderID(testBAM, new Tags());
+        samFiles.add(readerID);
+
+        final List<ReadFilter> filters = new ArrayList<>();
+        filters.add(new EveryTenthReadFilter());
+
+        final SAMDataSource dataSource = new SAMDataSource(referenceFile, samFiles, new ThreadAllocation(), null, genomeLocParser,
+                false,
+                ValidationStringency.STRICT,
+                null,
+                null,
+                new ValidationExclusion(),
+                filters,
+                new ArrayList<ReadTransformer>(),
+                false, (byte)30, false, true, null, IntervalMergingRule.ALL);
+
+        engine.setReadsDataSource(dataSource);
+
+        final TraverseReadsNano traverseReadsNano = new TraverseReadsNano(1);
+        final DummyReadWalker walker = new DummyReadWalker();
+        traverseReadsNano.initialize(engine, walker, null);
+
+        for ( final Shard shard : dataSource.createShardIteratorOverAllReads(new ReadShardBalancer()) ) {
+            final ReadShardDataProvider dataProvider = new ReadShardDataProvider(shard, engine.getGenomeLocParser(), dataSource.seek(shard), reference, new ArrayList<ReferenceOrderedDataSource>());
+            traverseReadsNano.traverse(walker, dataProvider, 0);
+            dataProvider.close();
+        }
+
+        Assert.assertEquals((long)engine.getCumulativeMetrics().getCountsByFilter().get(EveryTenthReadFilter.class.getSimpleName()), contigs.size() * numReadsPerContig / 10);
+    }
+
+    class DummyLocusWalker extends LocusWalker<Integer, Integer> {
+        @Override
+        public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
+            return 0;
+        }
+
+        @Override
+        public Integer reduceInit() {
+            return 0;
+        }
+
+        @Override
+        public Integer reduce(Integer value, Integer sum) {
+            return 0;
+        }
+    }
+
+    class DummyReadWalker extends ReadWalker<Integer, Integer> {
+        @Override
+        public Integer map(ReferenceContext ref, GATKSAMRecord read, RefMetaDataTracker metaDataTracker) {
+            return 0;
+        }
+
+        @Override
+        public Integer reduceInit() {
+            return 0;
+        }
+
+        @Override
+        public Integer reduce(Integer value, Integer sum) {
+            return 0;
+        }
+    }
+
+    class DummyActiveRegionWalker extends ActiveRegionWalker<Integer, Integer> {
+        @Override
+        public ActivityProfileState isActive(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
+            return new ActivityProfileState(ref.getLocus(), 0.0);
+        }
+
+        @Override
+        public Integer map(ActiveRegion activeRegion, RefMetaDataTracker metaDataTracker) {
+            return 0;
+        }
+
+        @Override
+        public Integer reduceInit() {
+            return 0;
+        }
+
+        @Override
+        public Integer reduce(Integer value, Integer sum) {
+            return 0;
+        }
+    }
+
+    private final class EveryTenthReadFilter extends ReadFilter {
+
+        private int myCounter = 0;
+
+        @Override
+        public boolean filterOut(final SAMRecord record) {
+            if ( ++myCounter == 10 ) {
+                myCounter = 0;
+                return true;
+            }
+
+            return false;
+        }
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/SampleUtilsUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/SampleUtilsUnitTest.java
new file mode 100644
index 0000000..972816c
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/SampleUtilsUnitTest.java
@@ -0,0 +1,49 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine;
+
+import org.broadinstitute.gatk.engine.SampleUtils;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.util.*;
+
+/**
+ * Testing framework for sample utilities class.
+ *
+ * @author gauthier
+ */
+
+public class SampleUtilsUnitTest extends BaseTest {
+    @Test(expectedExceptions=UserException.class)
+    public void testBadSampleFiles() throws Exception {
+        Set<File> sampleFiles = new HashSet<File>(0);
+        sampleFiles.add(new File("fileNotHere.samples"));
+        Collection<String> samplesFromFile = SampleUtils.getSamplesFromFiles(sampleFiles);
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/UninstantiableWalker.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/UninstantiableWalker.java
new file mode 100644
index 0000000..32fcee3
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/UninstantiableWalker.java
@@ -0,0 +1,37 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine;
+
+import org.broadinstitute.gatk.engine.walkers.Walker;
+import org.broadinstitute.gatk.utils.commandline.Hidden;
+
+ at Hidden
+public class UninstantiableWalker extends Walker<Integer,Long> {
+    // Private constructor will generate uninstantiable message
+    private UninstantiableWalker() {}
+    public Long reduceInit() { return 0L; }
+    public Long reduce(Integer value, Long accum) { return 0L; }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/WalkerManagerUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/WalkerManagerUnitTest.java
new file mode 100644
index 0000000..5173dcb
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/WalkerManagerUnitTest.java
@@ -0,0 +1,61 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine;
+
+import org.broadinstitute.gatk.engine.walkers.Walker;
+import org.broadinstitute.gatk.utils.exceptions.DynamicClassResolutionException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+/**
+ * Tests basic functionality of the walker manager.
+ */
+public class WalkerManagerUnitTest {
+    private static WalkerManager walkerManager;
+
+    @BeforeClass
+    public void setUp() {
+        walkerManager = new WalkerManager();
+    }
+
+    @Test
+    public void testPresentWalker() {
+        Walker instantiableWalker = walkerManager.createByName("InstantiableWalker");
+        Assert.assertEquals(InstantiableWalker.class, instantiableWalker.getClass());
+    }
+
+    @Test(expectedExceptions=UserException.class)
+    public void testAbsentWalker() {
+        walkerManager.createByName("Missing");
+    }
+
+    @Test(expectedExceptions=DynamicClassResolutionException.class)
+    public void testUninstantiableWalker() {
+        walkerManager.createByName("UninstantiableWalker");
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/arguments/CramIntegrationTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/arguments/CramIntegrationTest.java
new file mode 100644
index 0000000..31565a6
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/arguments/CramIntegrationTest.java
@@ -0,0 +1,90 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.arguments;
+
+import org.broadinstitute.gatk.engine.walkers.WalkerTest;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.Collections;
+
+/**
+ * Test the GATK core CRAM parsing mechanism.
+ */
+public class CramIntegrationTest extends WalkerTest {
+    @DataProvider(name="cramData")
+    public Object[][] getCRAMData() {
+        return new Object[][] {
+                {"PrintReads", "exampleBAM.bam", "", "cram", "97470174cd313a4d200b2a96ffd73e99"},
+                {"PrintReads", "exampleCRAM.cram", "", "cram", "424c725c4ffe7215e358ecf5abd5e5e8"},
+                {"PrintReads", "exampleCRAM.cram", "", "bam", "247805098718dd74b8a871796424d359"},
+                {"PrintReads", "exampleCRAM.cram", " -L chr1:200", "bam", "a5b26631cd89f86f6184bcac7bc9c9ca"},
+                {"CountLoci", "exampleCRAM.cram", "", "txt", "ade93df31a6150321c1067e749cae9be"},
+                {"CountLoci", "exampleCRAM.cram", " -L chr1:200", "txt", "b026324c6904b2a9cb4b88d6d61c81d1"},
+                {"CountReads", "exampleCRAM.cram", "", "txt", "4fbafd6948b6529caa2b78e476359875"},
+                {"CountReads", "exampleCRAM.cram", " -L chr1:200", "txt", "b026324c6904b2a9cb4b88d6d61c81d1"},
+                {"PrintReads", "exampleCRAM.cram", " -L chr1:200 -L chr1:89597", "bam", "24dbd14b60220461f47ec5517962cb7f"},
+                {"CountLoci", "exampleCRAM.cram", " -L chr1:200 -L chr1:89597", "txt", "26ab0db90d72e28ad0ba1e22ee510510"},
+                {"CountReads", "exampleCRAM.cram", " -L chr1:200 -L chr1:89597", "txt", "6d7fce9fee471194aa8b5b6e47267f03"},
+                {"PrintReads", "exampleCRAM-nobai-withcrai.cram", " -L chr1:200 -L chr1:89597", "bam", "84bee5063d8fa0d07e7c3ff7e825ae3a"},
+                {"CountLoci", "exampleCRAM-nobai-withcrai.cram", " -L chr1:200 -L chr1:89597", "txt", "26ab0db90d72e28ad0ba1e22ee510510"},
+                {"CountReads", "exampleCRAM-nobai-withcrai.cram", " -L chr1:200 -L chr1:89597", "txt", "6d7fce9fee471194aa8b5b6e47267f03"},
+        };
+    }
+
+    @Test(dataProvider = "cramData")
+    public void testCram(String walker, String input, String args, String ext, String md5) {
+        WalkerTestSpec spec = new WalkerTestSpec(
+                " -T Test" + walker + "Walker" +
+                    " -I " + publicTestDir + input +
+                    " -R " + exampleFASTA +
+                    args +
+                    " -o %s",
+                1, // just one output file
+                Collections.singletonList(ext),
+                Collections.singletonList(md5));
+        executeTest(String.format("testCram %s %s -> %s: %s", walker, input, ext, args), spec);
+    }
+
+    @DataProvider(name = "cramNoIndexData")
+    public Object[][] getCramNoIndexData() {
+        return new Object[][]{
+                {"exampleCRAM-nobai-nocrai.cram"},
+        };
+    }
+
+    @Test(dataProvider = "cramNoIndexData")
+    public void testCramNoIndex(String input) {
+        WalkerTestSpec spec = new WalkerTestSpec(
+                " -T TestPrintReadsWalker" +
+                        " -I " + publicTestDir + input +
+                        " -R " + exampleFASTA,
+                0,
+                UserException.class);
+        executeTest(String.format("testCramNoIndex %s", input), spec);
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/arguments/IntervalIntegrationTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/arguments/IntervalIntegrationTest.java
new file mode 100644
index 0000000..6de2f96
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/arguments/IntervalIntegrationTest.java
@@ -0,0 +1,314 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.arguments;
+
+import org.broadinstitute.gatk.engine.walkers.WalkerTest;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.util.Collections;
+
+/**
+ * Test the GATK core interval parsing mechanism.
+ */
+public class IntervalIntegrationTest extends WalkerTest {
+    @Test(enabled = true)
+    public void testAllImplicitIntervalParsing() {
+        String md5 = "7821db9e14d4f8e07029ff1959cd5a99";
+        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
+                "-T TestCountLociWalker" +
+                        " -I " + validationDataLocation + "OV-0930.normal.chunk.bam" +
+                        " -R " + hg18Reference +
+                        " -o %s",
+                        1, // just one output file
+                        Collections.singletonList(md5));
+        executeTest("testAllIntervalsImplicit",spec);
+    }
+
+    @Test
+    public void testUnmappedReadInclusion() {
+        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
+                "-T TestPrintReadsWalker" +
+                        " -I " + validationDataLocation + "MV1994.bam" +
+                        " -R " + validationDataLocation + "Escherichia_coli_K12_MG1655.fasta" +
+                        " -L unmapped" +
+                        " -U",
+                        0, // two output files
+                        Collections.<String>emptyList());
+
+        // our base file
+        File baseOutputFile = createTempFile("testUnmappedReadInclusion",".bam");
+        spec.setOutputFileLocation(baseOutputFile);
+        spec.addAuxFile("c66bb2c3c5382e2acff09b2b359562bb",createTempFileFromBase(baseOutputFile.getAbsolutePath()));
+        spec.addAuxFile("fadcdf88597b9609c5f2a17f4c6eb455", createTempFileFromBase(baseOutputFile.getAbsolutePath().substring(0,baseOutputFile.getAbsolutePath().indexOf(".bam"))+".bai"));
+
+        executeTest("testUnmappedReadInclusion",spec);
+    }
+
+    @Test
+    public void testMultipleIntervalInclusionOnCRAM() {
+        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
+                "-T TestPrintReadsWalker" +
+                        " -I " + validationDataLocation + "MV1994.cram" +
+                        " -R " + validationDataLocation + "Escherichia_coli_K12_MG1655.fasta" +
+                        " -L Escherichia_coli_K12:11000" +
+                        " -L Escherichia_coli_K12:12000" +
+                        " -L Escherichia_coli_K12:13000" +
+                        " -L Escherichia_coli_K12:14000" +
+                        " -L Escherichia_coli_K12:15000" +
+                        " -L Escherichia_coli_K12:16000" +
+                        " -L Escherichia_coli_K12:17000" +
+                        " -L unmapped",
+                0, // two output files
+                Collections.<String>emptyList());
+
+        // our base file
+        File baseOutputFile = createTempFile("testUnmappedReadInclusion", ".cram");
+        spec.setOutputFileLocation(baseOutputFile);
+        spec.addAuxFile("0f11cc035455cd68fb388e33aaf5feff", createTempFileFromBase(baseOutputFile.getAbsolutePath()));
+        spec.addAuxFile("ebbe6e311b6bb240554ec96ed9809216", createTempFileFromBase(baseOutputFile.getAbsolutePath() + ".bai"));
+
+        executeTest("testUnmappedReadInclusionCRAM", spec);
+    }
+
+    @Test
+    public void testMixedMappedAndUnmapped() {
+        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
+                "-T TestPrintReadsWalker" +
+                        " -I " + validationDataLocation + "MV1994.bam" +
+                        " -R " + validationDataLocation + "Escherichia_coli_K12_MG1655.fasta" +
+                        " -L Escherichia_coli_K12:4630000-4639675" +
+                        " -L unmapped" +
+                        " -U",
+                        0, // two output files
+                        Collections.<String>emptyList());
+
+        // our base file
+        File baseOutputFile = createTempFile("testUnmappedReadInclusion",".bam");
+        spec.setOutputFileLocation(baseOutputFile);
+        spec.addAuxFile("c64cff3ed376bc8f2977078dbdac4518",createTempFileFromBase(baseOutputFile.getAbsolutePath()));
+        spec.addAuxFile("fa90ff91ac0cc689c71a3460a3530b8b", createTempFileFromBase(baseOutputFile.getAbsolutePath().substring(0,baseOutputFile.getAbsolutePath().indexOf(".bam"))+".bai"));
+
+        executeTest("testUnmappedReadInclusion",spec);
+    }
+
+
+    @Test(enabled = false)
+    public void testUnmappedReadExclusion() {
+        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
+                "-T TestPrintReadsWalker" +
+                        " -I " + validationDataLocation + "MV1994.bam" +
+                        " -R " + validationDataLocation + "Escherichia_coli_K12_MG1655.fasta" +
+                        " -XL unmapped" +
+                        " -U",
+                        0, // two output files
+                        Collections.<String>emptyList());
+
+        // our base file
+        File baseOutputFile = createTempFile("testUnmappedReadExclusion",".bam");
+        spec.setOutputFileLocation(baseOutputFile);
+        spec.addAuxFile("80887ba488e53dabd9596ff93070ae75",createTempFileFromBase(baseOutputFile.getAbsolutePath()));
+        spec.addAuxFile("b341d808ecc33217f37c0c0cde2a3e2f", createTempFileFromBase(baseOutputFile.getAbsolutePath().substring(0,baseOutputFile.getAbsolutePath().indexOf(".bam"))+".bai"));
+
+        executeTest("testUnmappedReadExclusion",spec);
+    }
+
+    @Test(enabled = true)
+    public void testIntervalParsingFromFile() {
+        String md5 = "48a24b70a0b376535542b996af517398";
+        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
+                "-T TestCountLociWalker" +
+                        " -I " + validationDataLocation + "OV-0930.normal.chunk.bam" +
+                        " -R " + hg18Reference +
+                        " -o %s" +
+                        " -L " + validationDataLocation + "intervalTest.1.vcf",
+                        1, // just one output file
+                        Collections.singletonList(md5));
+        executeTest("testIntervalParsingFromFile", spec);
+    }
+
+    @Test(enabled = true)
+    public void testIntervalMergingFromFiles() {
+        String md5 = "9ae0ea9e3c9c6e1b9b6252c8395efdc1";
+        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
+                "-T TestCountLociWalker" +
+                        " -I " + validationDataLocation + "OV-0930.normal.chunk.bam" +
+                        " -R " + hg18Reference +
+                        " -o %s" +
+                        " -L " + validationDataLocation + "intervalTest.1.vcf" +
+                        " -L " + validationDataLocation + "intervalTest.2.vcf",
+                        1, // just one output file
+                        Collections.singletonList(md5));
+        executeTest("testIntervalMergingFromFiles", spec);
+    }
+
+    @Test(enabled = true)
+    public void testIntervalExclusionsFromFiles() {
+        String md5 = "26ab0db90d72e28ad0ba1e22ee510510";
+        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
+                "-T TestCountLociWalker" +
+                        " -I " + validationDataLocation + "OV-0930.normal.chunk.bam" +
+                        " -R " + hg18Reference +
+                        " -o %s" +
+                        " -L " + validationDataLocation + "intervalTest.1.vcf" +
+                        " -XL " + validationDataLocation + "intervalTest.2.vcf",
+                        1, // just one output file
+                        Collections.singletonList(md5));
+        executeTest("testIntervalExclusionsFromFiles", spec);
+    }
+
+    @Test(enabled = true)
+    public void testMixedIntervalMerging() {
+        String md5 = "7c5aba41f53293b712fd86d08ed5b36e";
+        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
+                "-T TestCountLociWalker" +
+                        " -I " + validationDataLocation + "OV-0930.normal.chunk.bam" +
+                        " -R " + hg18Reference +
+                        " -o %s" +
+                        " -L " + validationDataLocation + "intervalTest.1.vcf" +
+                        " -L chr1:1677524-1677528",
+                        1, // just one output file
+                        Collections.singletonList(md5));
+        executeTest("testMixedIntervalMerging", spec);
+    }
+
+    @Test(enabled = true)
+    public void testBed() {
+        String md5 = "cf4278314ef8e4b996e1b798d8eb92cf";
+        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
+                "-T TestCountLociWalker" +
+                        " -I " + validationDataLocation + "OV-0930.normal.chunk.bam" +
+                        " -R " + hg18Reference +
+                        " -o %s" +
+                        " -L " + validationDataLocation + "intervalTest.bed",
+                        1, // just one output file
+                        Collections.singletonList(md5));
+        executeTest("testBed", spec);
+    }
+
+    @Test(enabled = true)
+    public void testComplexVCF() {
+        String md5 = "166d77ac1b46a1ec38aa35ab7e628ab5";
+        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
+                "-T TestCountLociWalker" +
+                        " -I " + validationDataLocation + "OV-0930.normal.chunk.bam" +
+                        " -R " + hg18Reference +
+                        " -o %s" +
+                        " -L " + validationDataLocation + "intervalTest.3.vcf",
+                1, // just one output file
+                Collections.singletonList(md5));
+        executeTest("testComplexVCF", spec);
+    }
+
+    @Test(enabled = true)
+    public void testComplexVCFWithPadding() {
+        String md5 = "649ee93d50739c656e94ec88a32c7ffe";
+        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
+                "-T TestCountLociWalker" +
+                        " --interval_padding 2" +
+                        " -I " + validationDataLocation + "OV-0930.normal.chunk.bam" +
+                        " -R " + hg18Reference +
+                        " -o %s" +
+                        " -L " + validationDataLocation + "intervalTest.3.vcf",
+                1, // just one output file
+                Collections.singletonList(md5));
+        executeTest("testComplexVCFWithPadding", spec);
+    }
+
+    @Test(enabled = true)
+    public void testMergingWithComplexVCF() {
+        String md5 = "6d7fce9fee471194aa8b5b6e47267f03";
+        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
+                "-T TestCountLociWalker" +
+                        " -I " + validationDataLocation + "OV-0930.normal.chunk.bam" +
+                        " -R " + hg18Reference +
+                        " -o %s" +
+                        " -L " + validationDataLocation + "intervalTest.1.vcf" +
+                        " -XL " + validationDataLocation + "intervalTest.3.vcf",
+                        1, // just one output file
+                        Collections.singletonList(md5));
+        executeTest("testMergingWithComplexVCF", spec);
+    }
+
+    @Test(enabled = true)
+    public void testEmptyVCF() {
+        String md5 = "897316929176464ebc9ad085f31e7284";
+        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
+                "-T TestCountLociWalker" +
+                        " -I " + validationDataLocation + "OV-0930.normal.chunk.bam" +
+                        " -R " + hg18Reference +
+                        " -o %s" +
+                        " -L " + validationDataLocation + "intervalTest.empty.vcf",
+                        1, // just one output file
+                        Collections.singletonList(md5));
+        executeTest("testEmptyVCFWarning", spec);
+    }
+
+    @Test(enabled = true)
+    public void testIncludeExcludeIsTheSame() {
+        String md5 = "897316929176464ebc9ad085f31e7284";
+        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
+                "-T TestCountLociWalker" +
+                        " -I " + validationDataLocation + "OV-0930.normal.chunk.bam" +
+                        " -R " + hg18Reference +
+                        " -o %s" +
+                        " -L " + validationDataLocation + "intervalTest.1.vcf" +
+                        " -XL " + validationDataLocation + "intervalTest.1.vcf",
+                        1, // just one output file
+                        Collections.singletonList(md5));
+        executeTest("testIncludeExcludeIsTheSame", spec);
+    }
+
+    @Test(enabled = true)
+    public void testSymbolicAlleles() {
+        String md5 = "52745056d2fd5904857bbd4984c08098";
+        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
+                "-T TestCountLociWalker" +
+                        " -I " + validationDataLocation + "NA12878.chrom1.SLX.SRP000032.2009_06.bam" +
+                        " -R " + b36KGReference +
+                        " -o %s" +
+                        " -L " + privateTestDir + "symbolic_alleles_1.vcf",
+                1, // just one output file
+                Collections.singletonList(md5));
+        executeTest("testSymbolicAlleles", spec);
+    }
+
+    @Test
+    public void testIntersectionOfLexicographicallySortedIntervals() {
+        final String md5 = "18be9375e5a753f766616a51eb6131f0";
+        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
+                " -T TestCountLociWalker" +
+                " -I " + privateTestDir + "NA12878.4.snippet.bam" +
+                " -R " + b37KGReference +
+                " -L " + privateTestDir + "lexicographicallySortedIntervals.bed" +
+                " -L 4" +
+                " -isr INTERSECTION" +
+                " -o %s",
+                1, // just one output file
+                Collections.singletonList(md5));
+        executeTest("testIntersectionOfLexicographicallySortedIntervals", spec);
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/arguments/InvalidArgumentIntegrationTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/arguments/InvalidArgumentIntegrationTest.java
new file mode 100644
index 0000000..985c344
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/arguments/InvalidArgumentIntegrationTest.java
@@ -0,0 +1,55 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.arguments;
+
+import org.broadinstitute.gatk.engine.walkers.WalkerTest;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.testng.annotations.Test;
+
+public class InvalidArgumentIntegrationTest extends WalkerTest {
+    @Test
+    public void testUnknownReadFilter() {
+        executeTest("UnknownReadFilter",
+                new WalkerTest.WalkerTestSpec(
+                        " -T TestPrintReadsWalker" +
+                        " -R " + exampleFASTA +
+                        " -I " + publicTestDir + "exampleBAM.bam" +
+                        " -o %s" +
+                        " -rf TestUnknownReadFilter",
+                1, UserException.MalformedReadFilterException.class));
+    }
+
+    @Test
+    public void testMalformedWalkerArgs() {
+        executeTest("MalformedWalkerArgs",
+                new WalkerTest.WalkerTestSpec(
+                        " -T UnknownWalkerName" +
+                        " -R " + exampleFASTA +
+                        " -I " + publicTestDir + "exampleBAM.bam" +
+                        " -o %s",
+                1, UserException.MalformedWalkerArgumentsException.class));
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/arguments/LoggingIntegrationTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/arguments/LoggingIntegrationTest.java
new file mode 100644
index 0000000..abb5fb6
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/arguments/LoggingIntegrationTest.java
@@ -0,0 +1,117 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.arguments;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Arrays;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.log4j.Level;
+
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.MD5DB;
+import org.broadinstitute.gatk.utils.MD5Mismatch;
+import org.broadinstitute.gatk.engine.CommandLineGATK;
+import org.broadinstitute.gatk.utils.runtime.*;
+
+public class LoggingIntegrationTest {
+    private final MD5DB md5db = new MD5DB();
+
+    private class LoggingTestProvider extends BaseTest.TestDataProvider {
+
+        private final String baseCmdLine;
+
+        private final Level logLevel;
+        private final String logFileStr;
+        public final File argumentOutputFile;
+        public final File pipedOutputFile;
+
+        private LoggingTestProvider(final Level logLevel, final boolean explicitLogfile) throws IOException {
+            super(LoggingTestProvider.class);
+
+            // TODO: a better command line that exercises log levels besides INFO
+            this.baseCmdLine = String.format("java -cp %s %s -T TestPrintVariantsWalker -R %s -V %s -L 1:1000000-2000000 --no_cmdline_in_header",
+                    StringUtils.join(RuntimeUtils.getAbsoluteClassPaths(), File.pathSeparatorChar),
+                    CommandLineGATK.class.getCanonicalName(), BaseTest.b37KGReference, BaseTest.b37_NA12878_OMNI);
+
+            this.logLevel = logLevel;
+            this.logFileStr = explicitLogfile ? " -log " + BaseTest.createTempFile(logLevel.toString(), "log") : "";
+            this.argumentOutputFile = BaseTest.createTempFile(logLevel.toString(), "vcf");
+            this.pipedOutputFile = BaseTest.createTempFile(logLevel.toString(), "vcf");
+        }
+
+        public final String getCmdLine(boolean redirectStdout) {
+            String command = String.format("%s -l %s %s", baseCmdLine, logLevel, logFileStr);
+            return redirectStdout ? command : command + " -o " + argumentOutputFile;
+        }
+
+        public String toString() {
+            return String.format("LoggingTestProvider logLevel=%s", logLevel);
+        }
+    }
+
+    @DataProvider(name = "LoggingTest")
+    public Object[][] makeLoggingTestProvider() throws IOException {
+        for (Boolean explicitLogFile : Arrays.asList(true, false)) {
+            // TODO: enable other logging levels when tests for those exist
+            new LoggingTestProvider(Level.DEBUG, explicitLogFile);
+        }
+
+        return LoggingTestProvider.getTests(LoggingTestProvider.class);
+    }
+
+    /**
+     * test that using an output argument produces the same output as stdout
+     */
+    @Test(dataProvider = "LoggingTest")
+    public void testStdoutEquivalence(final LoggingTestProvider cfg) throws IOException {
+
+        ProcessController pc = ProcessController.getThreadLocal();
+
+        // output argument
+
+        ProcessSettings ps = new ProcessSettings(cfg.getCmdLine(false).split("\\s+"));
+        pc.execAndCheck(ps);
+        String output_argument_md5 = md5db.calculateFileMD5(cfg.argumentOutputFile);
+
+        // pipe to stdout
+
+        ps = new ProcessSettings(cfg.getCmdLine(true).split("\\s+"));
+        ps.setStdoutSettings(new OutputStreamSettings(cfg.pipedOutputFile));
+        pc.execAndCheck(ps);
+
+        MD5DB.MD5Match result = md5db.testFileMD5("LoggingIntegrationTest", "LoggingIntegrationTest", cfg.pipedOutputFile, output_argument_md5, false);
+        if(result.failed) {
+            final MD5Mismatch failure = new MD5Mismatch(result.actualMD5, result.expectedMD5, result.diffEngineOutput);
+            Assert.fail(failure.toString());
+        }
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/crypt/CryptUtilsUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/crypt/CryptUtilsUnitTest.java
new file mode 100644
index 0000000..51d4c24
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/crypt/CryptUtilsUnitTest.java
@@ -0,0 +1,200 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.crypt;
+
+import org.broadinstitute.gatk.engine.crypt.CryptUtils;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.testng.SkipException;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+import org.testng.Assert;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.security.Key;
+import java.security.KeyPair;
+import java.security.PrivateKey;
+import java.security.PublicKey;
+import java.util.Arrays;
+
+public class CryptUtilsUnitTest extends BaseTest {
+
+    @Test
+    public void testGenerateValidKeyPairWithDefaultSettings() {
+        KeyPair keyPair = CryptUtils.generateKeyPair();
+        Assert.assertTrue(CryptUtils.keysDecryptEachOther(keyPair.getPrivate(), keyPair.getPublic()));
+    }
+
+    @DataProvider( name = "InvalidKeyPairSettings" )
+    public Object[][] invalidKeyPairSettingsDataProvider() {
+        return new Object[][] {
+            { -1, CryptUtils.DEFAULT_ENCRYPTION_ALGORITHM, CryptUtils.DEFAULT_RANDOM_NUMBER_GENERATION_ALGORITHM},
+            { CryptUtils.DEFAULT_KEY_LENGTH, "Made-up algorithm", CryptUtils.DEFAULT_RANDOM_NUMBER_GENERATION_ALGORITHM},
+            { CryptUtils.DEFAULT_KEY_LENGTH, CryptUtils.DEFAULT_ENCRYPTION_ALGORITHM, "Made-up algorithm"}
+        };
+    }
+
+    @Test( dataProvider = "InvalidKeyPairSettings", expectedExceptions = ReviewedGATKException.class )
+    public void testGenerateKeyPairWithInvalidSettings( int keyLength, String encryptionAlgorithm, String randomNumberGenerationAlgorithm ) {
+        KeyPair keyPair = CryptUtils.generateKeyPair(keyLength, encryptionAlgorithm, randomNumberGenerationAlgorithm);
+    }
+
+    @Test
+    public void testGATKMasterKeyPairMutualDecryption() {
+        if ( gatkPrivateKeyExistsButReadPermissionDenied() ) {
+            throw new SkipException(String.format("Skipping test %s because we do not have permission to read the GATK private key",
+                                    "testGATKMasterKeyPairMutualDecryption"));
+        }
+
+        Assert.assertTrue(CryptUtils.keysDecryptEachOther(CryptUtils.loadGATKMasterPrivateKey(), CryptUtils.loadGATKMasterPublicKey()));
+    }
+
+    @Test
+    public void testGATKMasterPrivateKeyWithDistributedPublicKeyMutualDecryption() {
+        if ( gatkPrivateKeyExistsButReadPermissionDenied() ) {
+            throw new SkipException(String.format("Skipping test %s because we do not have permission to read the GATK private key",
+                                    "testGATKMasterPrivateKeyWithDistributedPublicKeyMutualDecryption"));
+        }
+
+        Assert.assertTrue(CryptUtils.keysDecryptEachOther(CryptUtils.loadGATKMasterPrivateKey(), CryptUtils.loadGATKDistributedPublicKey()));
+    }
+
+    @Test
+    public void testKeyPairWriteThenRead() {
+        KeyPair keyPair = CryptUtils.generateKeyPair();
+        File privateKeyFile = createTempFile("testKeyPairWriteThenRead_private", "key");
+        File publicKeyFile = createTempFile("testKeyPairWriteThenRead_public", "key");
+
+        CryptUtils.writeKeyPair(keyPair, privateKeyFile, publicKeyFile);
+
+        assertKeysAreEqual(keyPair.getPrivate(), CryptUtils.readPrivateKey(privateKeyFile));
+        assertKeysAreEqual(keyPair.getPublic(), CryptUtils.readPublicKey(publicKeyFile));
+    }
+
+    @Test
+    public void testPublicKeyWriteThenReadFromFile() {
+        File keyFile = createTempFile("testPublicKeyWriteThenReadFromFile", "key");
+        PublicKey publicKey = CryptUtils.generateKeyPair().getPublic();
+
+        CryptUtils.writeKey(publicKey, keyFile);
+
+        assertKeysAreEqual(publicKey, CryptUtils.readPublicKey(keyFile));
+    }
+
+    @Test
+    public void testPublicKeyWriteThenReadFromStream() throws IOException {
+        File keyFile = createTempFile("testPublicKeyWriteThenReadFromStream", "key");
+        PublicKey publicKey = CryptUtils.generateKeyPair().getPublic();
+
+        CryptUtils.writeKey(publicKey, keyFile);
+
+        assertKeysAreEqual(publicKey, CryptUtils.readPublicKey(new FileInputStream(keyFile)));
+    }
+
+    @Test
+    public void testPrivateKeyWriteThenReadFromFile() {
+        File keyFile = createTempFile("testPrivateKeyWriteThenReadFromFile", "key");
+        PrivateKey privateKey = CryptUtils.generateKeyPair().getPrivate();
+
+        CryptUtils.writeKey(privateKey, keyFile);
+
+        assertKeysAreEqual(privateKey, CryptUtils.readPrivateKey(keyFile));
+    }
+
+    @Test
+    public void testPrivateKeyWriteThenReadFromStream() throws IOException {
+        File keyFile = createTempFile("testPrivateKeyWriteThenReadFromStream", "key");
+        PrivateKey privateKey = CryptUtils.generateKeyPair().getPrivate();
+
+        CryptUtils.writeKey(privateKey, keyFile);
+
+        assertKeysAreEqual(privateKey, CryptUtils.readPrivateKey(new FileInputStream(keyFile)));
+    }
+
+    @Test( expectedExceptions = UserException.CouldNotReadInputFile.class )
+    public void testReadNonExistentPublicKey() {
+        File nonExistentFile = new File("jdshgkdfhg.key");
+        Assert.assertFalse(nonExistentFile.exists());
+
+        CryptUtils.readPublicKey(nonExistentFile);
+    }
+
+    @Test( expectedExceptions = UserException.CouldNotReadInputFile.class )
+    public void testReadNonExistentPrivateKey() {
+        File nonExistentFile = new File("jdshgkdfhg.key");
+        Assert.assertFalse(nonExistentFile.exists());
+
+        CryptUtils.readPrivateKey(nonExistentFile);
+    }
+
+    @Test
+    public void testDecodePublicKey() {
+        PublicKey originalKey = CryptUtils.generateKeyPair().getPublic();
+        PublicKey decodedKey = CryptUtils.decodePublicKey(originalKey.getEncoded(), CryptUtils.DEFAULT_ENCRYPTION_ALGORITHM);
+        assertKeysAreEqual(originalKey, decodedKey);
+    }
+
+    @Test
+    public void testDecodePrivateKey() {
+        PrivateKey originalKey = CryptUtils.generateKeyPair().getPrivate();
+        PrivateKey decodedKey = CryptUtils.decodePrivateKey(originalKey.getEncoded(), CryptUtils.DEFAULT_ENCRYPTION_ALGORITHM);
+        assertKeysAreEqual(originalKey, decodedKey);
+    }
+
+    @Test
+    public void testLoadGATKMasterPrivateKey() {
+        if ( gatkPrivateKeyExistsButReadPermissionDenied() ) {
+            throw new SkipException(String.format("Skipping test %s because we do not have permission to read the GATK private key",
+                                    "testLoadGATKMasterPrivateKey"));
+        }
+
+        PrivateKey gatkMasterPrivateKey = CryptUtils.loadGATKMasterPrivateKey();
+    }
+
+    @Test
+    public void testLoadGATKMasterPublicKey() {
+        PublicKey gatkMasterPublicKey = CryptUtils.loadGATKMasterPublicKey();
+    }
+
+    @Test
+    public void testLoadGATKDistributedPublicKey() {
+        PublicKey gatkDistributedPublicKey = CryptUtils.loadGATKDistributedPublicKey();
+    }
+
+    private void assertKeysAreEqual( Key originalKey, Key keyFromDisk ) {
+        Assert.assertTrue(Arrays.equals(originalKey.getEncoded(), keyFromDisk.getEncoded()));
+        Assert.assertEquals(originalKey.getAlgorithm(), keyFromDisk.getAlgorithm());
+        Assert.assertEquals(originalKey.getFormat(), keyFromDisk.getFormat());
+    }
+
+    private boolean gatkPrivateKeyExistsButReadPermissionDenied() {
+        File gatkPrivateKey = new File(CryptUtils.GATK_MASTER_PRIVATE_KEY_FILE);
+        return gatkPrivateKey.exists() && ! gatkPrivateKey.canRead();
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/crypt/GATKKeyIntegrationTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/crypt/GATKKeyIntegrationTest.java
new file mode 100644
index 0000000..2d2ce2b
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/crypt/GATKKeyIntegrationTest.java
@@ -0,0 +1,157 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.crypt;
+
+import org.broadinstitute.gatk.engine.walkers.WalkerTest;
+import org.broadinstitute.gatk.engine.phonehome.GATKRunReport;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.Arrays;
+
+public class GATKKeyIntegrationTest extends WalkerTest {
+
+    public static final String BASE_COMMAND = String.format("-T TestPrintReadsWalker -R %s -I %s -o %%s",
+                                                            publicTestDir + "exampleFASTA.fasta",
+                                                            publicTestDir + "exampleBAM.bam");
+    public static final String MD5_UPON_SUCCESSFUL_RUN = "462656ec9632f8c21ee534d35093c3f8";
+
+
+    private void runGATKKeyTest ( String testName, String etArg, String keyArg, Class expectedException, String md5 ) {
+        String command = BASE_COMMAND + String.format(" %s %s", etArg, keyArg);
+
+        WalkerTestSpec spec = expectedException != null ?
+                              new WalkerTestSpec(command, 1, expectedException) :
+                              new WalkerTestSpec(command, 1, Arrays.asList(md5));
+
+        spec.disableImplicitArgs(); // Turn off automatic inclusion of -et/-K args by WalkerTest
+        executeTest(testName, spec);
+    }
+
+    @Test
+    public void testValidKeyNoET() {
+        runGATKKeyTest("testValidKeyNoET",
+                       "-et " + GATKRunReport.PhoneHomeOption.NO_ET,
+                       "-K " + keysDataLocation + "valid.key",
+                       null,
+                       MD5_UPON_SUCCESSFUL_RUN);
+    }
+
+    @Test
+    public void testValidKeyETStdout() {
+        runGATKKeyTest("testValidKeyETStdout",
+                       "-et " + GATKRunReport.PhoneHomeOption.STDOUT,
+                       "-K " + keysDataLocation + "valid.key",
+                       null,
+                       MD5_UPON_SUCCESSFUL_RUN);
+    }
+
+    @Test
+    public void testValidKeyETStandard() {
+        runGATKKeyTest("testValidKeyETStandard",
+                       "",
+                       "-K " + keysDataLocation + "valid.key",
+                       null,
+                       MD5_UPON_SUCCESSFUL_RUN);
+    }
+
+    @Test
+    public void testNoKeyNoET() {
+        runGATKKeyTest("testNoKeyNoET",
+                       "-et " + GATKRunReport.PhoneHomeOption.NO_ET,
+                       "",
+                       UserException.class,
+                       null);
+    }
+
+    @Test
+    public void testNoKeyETStdout() {
+        runGATKKeyTest("testNoKeyETStdout",
+                       "-et " + GATKRunReport.PhoneHomeOption.STDOUT,
+                       "",
+                       UserException.class,
+                       null);
+    }
+
+    @Test
+    public void testNoKeyETStandard() {
+        runGATKKeyTest("testNoKeyETStandard",
+                       "",
+                       "",
+                       null,
+                       MD5_UPON_SUCCESSFUL_RUN);
+    }
+
+    @Test
+    public void testRevokedKey() {
+        runGATKKeyTest("testRevokedKey",
+                       "-et " + GATKRunReport.PhoneHomeOption.NO_ET,
+                       "-K " + keysDataLocation + "revoked.key",
+                       UserException.KeySignatureVerificationException.class,
+                       null);
+    }
+
+    @DataProvider(name = "CorruptKeyTestData")
+    public Object[][] corruptKeyDataProvider() {
+        return new Object[][] {
+            { "corrupt_empty.key",                  UserException.UnreadableKeyException.class },
+            { "corrupt_single_byte_file.key",       UserException.UnreadableKeyException.class },
+            { "corrupt_random_contents.key",        UserException.UnreadableKeyException.class },
+            { "corrupt_single_byte_deletion.key",   UserException.UnreadableKeyException.class },
+            { "corrupt_single_byte_insertion.key",  UserException.UnreadableKeyException.class },
+            { "corrupt_single_byte_change.key",     UserException.UnreadableKeyException.class },
+            { "corrupt_multi_byte_deletion.key",    UserException.UnreadableKeyException.class },
+            { "corrupt_multi_byte_insertion.key",   UserException.UnreadableKeyException.class },
+            { "corrupt_multi_byte_change.key",      UserException.UnreadableKeyException.class },
+            { "corrupt_bad_isize_field.key",        UserException.UnreadableKeyException.class },
+            { "corrupt_bad_crc.key",                UserException.UnreadableKeyException.class },
+            { "corrupt_no_email_address.key",       UserException.UnreadableKeyException.class },
+            { "corrupt_no_sectional_delimiter.key", UserException.UnreadableKeyException.class },
+            { "corrupt_no_signature.key",           UserException.UnreadableKeyException.class },
+            { "corrupt_bad_signature.key",          UserException.KeySignatureVerificationException.class },
+            { "corrupt_non_gzipped_valid_key.key",  UserException.UnreadableKeyException.class }
+        };
+    }
+
+    @Test(dataProvider = "CorruptKeyTestData")
+    public void testCorruptKey ( String corruptKeyName, Class expectedException ) {
+        runGATKKeyTest(String.format("testCorruptKey (%s)", corruptKeyName),
+                       "-et " + GATKRunReport.PhoneHomeOption.NO_ET,
+                       "-K " + keysDataLocation + corruptKeyName,
+                       expectedException,
+                       null);
+    }
+
+    @Test
+    public void testCorruptButNonRequiredKey() {
+        runGATKKeyTest("testCorruptButNonRequiredKey",
+                       "",
+                       "-K " + keysDataLocation + "corrupt_random_contents.key",
+                       null,
+                       MD5_UPON_SUCCESSFUL_RUN);
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/crypt/GATKKeyUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/crypt/GATKKeyUnitTest.java
new file mode 100644
index 0000000..2d15795
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/crypt/GATKKeyUnitTest.java
@@ -0,0 +1,129 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.crypt;
+
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.testng.SkipException;
+import org.testng.annotations.Test;
+import org.testng.Assert;
+
+import java.io.File;
+import java.security.KeyPair;
+import java.security.PrivateKey;
+import java.security.PublicKey;
+
+public class GATKKeyUnitTest extends BaseTest {
+
+    @Test
+    public void testCreateGATKKeyUsingMasterKeyPair() {
+        if ( gatkPrivateKeyExistsButReadPermissionDenied() ) {
+            throw new SkipException(String.format("Skipping test %s because we do not have permission to read the GATK private key",
+                                    "testCreateGATKKeyUsingMasterKeyPair"));
+        }
+
+        PrivateKey masterPrivateKey = CryptUtils.loadGATKMasterPrivateKey();
+        PublicKey masterPublicKey = CryptUtils.loadGATKMasterPublicKey();
+
+        // We should be able to create a valid GATKKey using our master key pair:
+        GATKKey key = new GATKKey(masterPrivateKey, masterPublicKey, "foo at bar.com");
+        Assert.assertTrue(key.isValid());
+    }
+
+    @Test
+    public void testCreateGATKKeyUsingMasterPrivateKeyAndDistributedPublicKey() {
+        if ( gatkPrivateKeyExistsButReadPermissionDenied() ) {
+            throw new SkipException(String.format("Skipping test %s because we do not have permission to read the GATK private key",
+                                    "testCreateGATKKeyUsingMasterPrivateKeyAndDistributedPublicKey"));
+        }
+
+        PrivateKey masterPrivateKey = CryptUtils.loadGATKMasterPrivateKey();
+        PublicKey distributedPublicKey = CryptUtils.loadGATKDistributedPublicKey();
+
+        // We should also be able to create a valid GATKKey using our master private
+        // key and the public key we distribute with the GATK:
+        GATKKey key = new GATKKey(masterPrivateKey, distributedPublicKey, "foo at bar.com");
+        Assert.assertTrue(key.isValid());
+    }
+
+    @Test( expectedExceptions = ReviewedGATKException.class )
+    public void testKeyPairMismatch() {
+        KeyPair firstKeyPair = CryptUtils.generateKeyPair();
+        KeyPair secondKeyPair = CryptUtils.generateKeyPair();
+
+        // Attempting to create a GATK Key with private and public keys that aren't part of the
+        // same key pair should immediately trigger a validation failure:
+        GATKKey key = new GATKKey(firstKeyPair.getPrivate(), secondKeyPair.getPublic(), "foo at bar.com");
+    }
+
+    @Test( expectedExceptions = ReviewedGATKException.class )
+    public void testEncryptionAlgorithmMismatch() {
+        KeyPair keyPair = CryptUtils.generateKeyPair(CryptUtils.DEFAULT_KEY_LENGTH, "DSA", CryptUtils.DEFAULT_RANDOM_NUMBER_GENERATION_ALGORITHM);
+
+        // Attempting to use a DSA private key to create an RSA signature should throw an error:
+        GATKKey key = new GATKKey(keyPair.getPrivate(), keyPair.getPublic(), "foo at bar.com", "SHA1withRSA");
+    }
+
+    @Test( expectedExceptions = UserException.class )
+    public void testInvalidEmailAddress() {
+        String emailAddressWithNulByte = new String(new byte[] { 0 });
+        KeyPair keyPair = CryptUtils.generateKeyPair();
+
+        // Email addresses cannot contain the NUL byte, since it's used as a sectional delimiter in the key file:
+        GATKKey key = new GATKKey(keyPair.getPrivate(), keyPair.getPublic(), emailAddressWithNulByte);
+    }
+
+    @Test
+    public void testCreateGATKKeyFromValidKeyFile() {
+        GATKKey key = new GATKKey(CryptUtils.loadGATKDistributedPublicKey(), new File(keysDataLocation + "valid.key"));
+        Assert.assertTrue(key.isValid());
+    }
+
+    @Test( expectedExceptions = UserException.UnreadableKeyException.class )
+    public void testCreateGATKKeyFromCorruptKeyFile() {
+        GATKKey key = new GATKKey(CryptUtils.loadGATKDistributedPublicKey(), new File(keysDataLocation + "corrupt_random_contents.key"));
+    }
+
+    @Test
+    public void testCreateGATKKeyFromRevokedKeyFile() {
+        GATKKey key = new GATKKey(CryptUtils.loadGATKDistributedPublicKey(), new File(keysDataLocation + "revoked.key"));
+        Assert.assertFalse(key.isValid());
+    }
+
+    @Test( expectedExceptions = UserException.CouldNotReadInputFile.class )
+    public void testCreateGATKKeyFromNonExistentFile() {
+        File nonExistentFile = new File("ghfdkgsdhg.key");
+        Assert.assertFalse(nonExistentFile.exists());
+
+        GATKKey key = new GATKKey(CryptUtils.loadGATKDistributedPublicKey(), nonExistentFile);
+    }
+
+    private boolean gatkPrivateKeyExistsButReadPermissionDenied() {
+        File gatkPrivateKey = new File(CryptUtils.GATK_MASTER_PRIVATE_KEY_FILE);
+        return gatkPrivateKey.exists() && ! gatkPrivateKey.canRead();
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/AllLocusViewUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/AllLocusViewUnitTest.java
new file mode 100644
index 0000000..02751f6
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/AllLocusViewUnitTest.java
@@ -0,0 +1,90 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.providers;
+
+
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.testng.Assert;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+
+import java.util.List;
+/**
+ * User: hanna
+ * Date: May 12, 2009
+ * Time: 2:34:46 PM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * Test the view of all loci.
+ */
+public class AllLocusViewUnitTest extends LocusViewTemplate {
+
+    @Override
+    protected LocusView createView(LocusShardDataProvider provider) {
+        return new AllLocusView(provider);
+    }
+
+    /**
+     * Test the reads according to an independently derived context.
+     * @param view
+     * @param range
+     * @param reads
+     */
+    @Override
+    protected void testReadsInContext( LocusView view, List<GenomeLoc> range, List<GATKSAMRecord> reads ) {
+        AllLocusView allLocusView = (AllLocusView)view;
+
+        // TODO: Should skip over loci not in the given range.
+        GenomeLoc firstLoc = range.get(0);
+        GenomeLoc lastLoc = range.get(range.size()-1);
+        GenomeLoc bounds = genomeLocParser.createGenomeLoc(firstLoc.getContig(),firstLoc.getStart(),lastLoc.getStop());
+
+        for( int i = bounds.getStart(); i <= bounds.getStop(); i++ ) {
+            GenomeLoc site = genomeLocParser.createGenomeLoc("chr1",i);
+            AlignmentContext locusContext = allLocusView.next();
+            Assert.assertEquals(locusContext.getLocation(), site, "Locus context location is incorrect");
+            int expectedReadsAtSite = 0;
+
+            for( GATKSAMRecord read: reads ) {
+                if(genomeLocParser.createGenomeLoc(read).containsP(locusContext.getLocation())) {
+                    Assert.assertTrue(locusContext.getReads().contains(read),"Target locus context does not contain reads");
+                    expectedReadsAtSite++;
+                }
+            }
+
+            Assert.assertEquals(locusContext.getReads().size(), expectedReadsAtSite, "Found wrong number of reads at site");
+        }
+
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/CoveredLocusViewUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/CoveredLocusViewUnitTest.java
new file mode 100644
index 0000000..2c45ebc
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/CoveredLocusViewUnitTest.java
@@ -0,0 +1,102 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.providers;
+
+
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.testng.Assert;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+
+import java.util.List;
+/**
+ * User: hanna
+ * Date: May 12, 2009
+ * Time: 2:34:46 PM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * Test the CoveredLocusView.
+ */
+public class CoveredLocusViewUnitTest extends LocusViewTemplate {
+
+    /**
+     * Retrieve a covered locus view.
+     */
+    @Override
+    protected LocusView createView(LocusShardDataProvider provider) {
+        return new CoveredLocusView(provider);
+    }
+
+    /**
+     * Test the reads according to an independently derived context.
+     * @param view
+     * @param range
+     * @param reads
+     */
+    @Override
+    protected void testReadsInContext( LocusView view, List<GenomeLoc> range, List<GATKSAMRecord> reads ) {
+        CoveredLocusView coveredLocusView = (CoveredLocusView)view;
+
+        // TODO: Should skip over loci not in the given range.
+        GenomeLoc firstLoc = range.get(0);
+        GenomeLoc lastLoc = range.get(range.size()-1);
+        GenomeLoc bounds = genomeLocParser.createGenomeLoc(firstLoc.getContig(),firstLoc.getStart(),lastLoc.getStop());
+
+        for( int i = bounds.getStart(); i <= bounds.getStop(); i++ ) {
+            GenomeLoc site = genomeLocParser.createGenomeLoc("chr1",i);
+
+            int expectedReadsAtSite = 0;
+            for( GATKSAMRecord read: reads ) {
+                if( genomeLocParser.createGenomeLoc(read).containsP(site) )
+                    expectedReadsAtSite++;
+            }
+
+            if( expectedReadsAtSite < 1 )
+                continue;
+
+            Assert.assertTrue(coveredLocusView.hasNext(),"Incorrect number of loci in view");
+
+            AlignmentContext locusContext = coveredLocusView.next();
+            Assert.assertEquals(locusContext.getLocation(), site, "Target locus context location is incorrect");
+            Assert.assertEquals(locusContext.getReads().size(), expectedReadsAtSite, "Found wrong number of reads at site");
+
+            for( GATKSAMRecord read: reads ) {
+                if(genomeLocParser.createGenomeLoc(read).containsP(locusContext.getLocation()))
+                    Assert.assertTrue(locusContext.getReads().contains(read),"Target locus context does not contain reads");
+            }
+        }
+
+        Assert.assertFalse(coveredLocusView.hasNext(),"Iterator is not bounded at boundaries of shard");
+    }        
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/IntervalReferenceOrderedViewUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/IntervalReferenceOrderedViewUnitTest.java
new file mode 100644
index 0000000..27936b5
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/IntervalReferenceOrderedViewUnitTest.java
@@ -0,0 +1,366 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.providers;
+
+import htsjdk.samtools.util.PeekableIterator;
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.tribble.SimpleFeature;
+import htsjdk.tribble.Feature;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.commandline.RodBinding;
+import org.broadinstitute.gatk.utils.refdata.RODRecordListImpl;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.refdata.utils.GATKFeature;
+import org.broadinstitute.gatk.utils.refdata.utils.RODRecordList;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.*;
+
+/**
+ * @author depristo
+ */
+public class IntervalReferenceOrderedViewUnitTest extends BaseTest {
+    private static int startingChr = 1;
+    private static int endingChr = 2;
+    private static int readCount = 100;
+    private static int DEFAULT_READ_LENGTH = ArtificialSAMUtils.DEFAULT_READ_LENGTH;
+    private static String contig;
+    private static SAMFileHeader header;
+
+    private GenomeLocParser genomeLocParser;
+
+    @BeforeClass
+    public void beforeClass() {
+        header = ArtificialSAMUtils.createArtificialSamHeader((endingChr - startingChr) + 1, startingChr, readCount + DEFAULT_READ_LENGTH);
+        contig = header.getSequence(0).getSequenceName();
+        genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
+
+        initializeTests();
+    }
+
+    private class CompareFeatures implements Comparator<Feature> {
+        @Override
+        public int compare(Feature o1, Feature o2) {
+            return genomeLocParser.createGenomeLoc(o1).compareTo(genomeLocParser.createGenomeLoc(o2));
+        }
+    }
+
+    private class ReadMetaDataTrackerRODStreamTest extends TestDataProvider {
+        final List<Feature> allFeatures;
+        final List<GenomeLoc> intervals;
+
+        public ReadMetaDataTrackerRODStreamTest(final List<Feature> allFeatures, final GenomeLoc interval) {
+            this(allFeatures, Collections.singletonList(interval));
+        }
+
+        public ReadMetaDataTrackerRODStreamTest(final List<Feature> allFeatures, final List<GenomeLoc> intervals) {
+            super(ReadMetaDataTrackerRODStreamTest.class);
+            this.allFeatures = new ArrayList<Feature>(allFeatures);
+            Collections.sort(this.allFeatures, new CompareFeatures());
+            this.intervals = new ArrayList<GenomeLoc>(intervals);
+            Collections.sort(this.intervals);
+            setName(String.format("%s nFeatures %d intervals %s", getClass().getSimpleName(), allFeatures.size(),
+                    intervals.size() == 1 ? intervals.get(0) : "size " + intervals.size()));
+        }
+
+        public PeekableIterator<RODRecordList> getIterator(final String name) {
+            return new PeekableIterator<RODRecordList>(new TribbleIteratorFromCollection(name, genomeLocParser, allFeatures));
+        }
+
+        public Set<Feature> getExpectedOverlaps(final GenomeLoc interval) {
+            final Set<Feature> overlapping = new HashSet<Feature>();
+            for ( final Feature f : allFeatures )
+                if ( genomeLocParser.createGenomeLoc(f).overlapsP(interval) )
+                    overlapping.add(f);
+            return overlapping;
+        }
+    }
+
+    public void initializeTests() {
+        final List<Feature> handPickedFeatures = new ArrayList<Feature>();
+
+        handPickedFeatures.add(new SimpleFeature(contig, 1, 1));
+        handPickedFeatures.add(new SimpleFeature(contig, 2, 5));
+        handPickedFeatures.add(new SimpleFeature(contig, 4, 4));
+        handPickedFeatures.add(new SimpleFeature(contig, 6, 6));
+        handPickedFeatures.add(new SimpleFeature(contig, 9, 10));
+        handPickedFeatures.add(new SimpleFeature(contig, 10, 10));
+        handPickedFeatures.add(new SimpleFeature(contig, 10, 11));
+        handPickedFeatures.add(new SimpleFeature(contig, 13, 20));
+
+        createTestsForFeatures(handPickedFeatures);
+
+        // test in the present of a large spanning element
+        {
+            List<Feature> oneLargeSpan = new ArrayList<Feature>(handPickedFeatures);
+            oneLargeSpan.add(new SimpleFeature(contig, 1, 30));
+            createTestsForFeatures(oneLargeSpan);
+        }
+
+        // test in the presence of a partially spanning element
+        {
+            List<Feature> partialSpanStart = new ArrayList<Feature>(handPickedFeatures);
+            partialSpanStart.add(new SimpleFeature(contig, 1, 6));
+            createTestsForFeatures(partialSpanStart);
+        }
+
+        // test in the presence of a partially spanning element at the end
+        {
+            List<Feature> partialSpanEnd = new ArrayList<Feature>(handPickedFeatures);
+            partialSpanEnd.add(new SimpleFeature(contig, 10, 30));
+            createTestsForFeatures(partialSpanEnd);
+        }
+
+        // no data at all
+        final GenomeLoc loc = genomeLocParser.createGenomeLoc(contig, 5, 5);
+        new ReadMetaDataTrackerRODStreamTest(Collections.<Feature>emptyList(), loc);
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // tests for the lower level IntervalOverlappingRODsFromStream
+    //
+    // --------------------------------------------------------------------------------
+
+    @DataProvider(name = "ReadMetaDataTrackerRODStreamTest")
+    public Object[][] createReadMetaDataTrackerRODStreamTest() {
+        return ReadMetaDataTrackerRODStreamTest.getTests(ReadMetaDataTrackerRODStreamTest.class);
+    }
+
+    private GenomeLoc span(final List<GenomeLoc> features) {
+        int featuresStart = 1; for ( final GenomeLoc f : features ) featuresStart = Math.min(featuresStart, f.getStart());
+        int featuresStop = 1; for ( final GenomeLoc f : features ) featuresStop = Math.max(featuresStop, f.getStop());
+        return genomeLocParser.createGenomeLoc(contig, featuresStart, featuresStop);
+    }
+
+    private void createTestsForFeatures(final List<Feature> features) {
+        int featuresStart = 1; for ( final Feature f : features ) featuresStart = Math.min(featuresStart, f.getStart());
+        int featuresStop = 1; for ( final Feature f : features ) featuresStop = Math.max(featuresStop, f.getEnd());
+
+        for ( final int size : Arrays.asList(1, 5, 10, 100) ) {
+            final List<GenomeLoc> allIntervals = new ArrayList<GenomeLoc>();
+            // regularly spaced
+            for ( int start = featuresStart; start < featuresStop; start++) {
+                final GenomeLoc loc = genomeLocParser.createGenomeLoc(contig, start, start + size - 1);
+                allIntervals.add(loc);
+                new ReadMetaDataTrackerRODStreamTest(features, loc);
+            }
+
+            // starting and stopping at every feature
+            for ( final Feature f : features ) {
+                // just at the feature
+                allIntervals.add(genomeLocParser.createGenomeLoc(contig, f.getStart(), f.getEnd()));
+                new ReadMetaDataTrackerRODStreamTest(features, allIntervals.get(allIntervals.size() - 1));
+
+                // up to end
+                allIntervals.add(genomeLocParser.createGenomeLoc(contig, f.getStart() - 1, f.getEnd()));
+                new ReadMetaDataTrackerRODStreamTest(features, allIntervals.get(allIntervals.size() - 1));
+
+                // missing by 1
+                allIntervals.add(genomeLocParser.createGenomeLoc(contig, f.getStart() + 1, f.getEnd() + 1));
+                new ReadMetaDataTrackerRODStreamTest(features, allIntervals.get(allIntervals.size() - 1));
+
+                // just spanning
+                allIntervals.add(genomeLocParser.createGenomeLoc(contig, f.getStart() - 1, f.getEnd() + 1));
+                new ReadMetaDataTrackerRODStreamTest(features, allIntervals.get(allIntervals.size() - 1));
+            }
+
+            new ReadMetaDataTrackerRODStreamTest(features, allIntervals);
+        }
+    }
+
+    @Test(enabled = true, dataProvider = "ReadMetaDataTrackerRODStreamTest")
+    public void runReadMetaDataTrackerRODStreamTest_singleQuery(final ReadMetaDataTrackerRODStreamTest data) {
+        if ( data.intervals.size() == 1 ) {
+            final String name = "testName";
+            final PeekableIterator<RODRecordList> iterator = data.getIterator(name);
+            final IntervalOverlappingRODsFromStream stream = new IntervalOverlappingRODsFromStream(name, iterator);
+            testRODStream(data, stream, Collections.singletonList(data.intervals.get(0)));
+        }
+    }
+
+    @Test(enabled = true, dataProvider = "ReadMetaDataTrackerRODStreamTest", dependsOnMethods = "runReadMetaDataTrackerRODStreamTest_singleQuery")
+    public void runReadMetaDataTrackerRODStreamTest_multipleQueries(final ReadMetaDataTrackerRODStreamTest data) {
+        if ( data.intervals.size() > 1 ) {
+            final String name = "testName";
+            final PeekableIterator<RODRecordList> iterator = data.getIterator(name);
+            final IntervalOverlappingRODsFromStream stream = new IntervalOverlappingRODsFromStream(name, iterator);
+            testRODStream(data, stream, data.intervals);
+        }
+    }
+
+    private void testRODStream(final ReadMetaDataTrackerRODStreamTest test, final IntervalOverlappingRODsFromStream stream, final List<GenomeLoc> intervals) {
+        for ( final GenomeLoc interval : intervals ) {
+            final RODRecordList query = stream.getOverlapping(interval);
+            final HashSet<Feature> queryFeatures = new HashSet<Feature>();
+            for ( final GATKFeature f : query ) queryFeatures.add((Feature)f.getUnderlyingObject());
+            final Set<Feature> overlaps = test.getExpectedOverlaps(interval);
+
+            Assert.assertEquals(queryFeatures.size(), overlaps.size(), "IntervalOverlappingRODsFromStream didn't return the expected set of overlapping features." +
+                    " Expected size = " + overlaps.size() + " but saw " + queryFeatures.size());
+
+            BaseTest.assertEqualsSet(queryFeatures, overlaps, "IntervalOverlappingRODsFromStream didn't return the expected set of overlapping features." +
+                    " Expected = " + Utils.join(",", overlaps) + " but saw " + Utils.join(",", queryFeatures));
+        }
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // tests for the higher level tracker itself
+    //
+    // --------------------------------------------------------------------------------
+
+    @DataProvider(name = "ReadMetaDataTrackerTests")
+    public Object[][] createTrackerTests() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        final Object[][] singleTests = ReadMetaDataTrackerRODStreamTest.getTests(ReadMetaDataTrackerRODStreamTest.class);
+        final List<ReadMetaDataTrackerRODStreamTest> multiSiteTests = new ArrayList<ReadMetaDataTrackerRODStreamTest>();
+        for ( final Object[] singleTest : singleTests ) {
+            if ( ((ReadMetaDataTrackerRODStreamTest)singleTest[0]).intervals.size() > 1 )
+                multiSiteTests.add((ReadMetaDataTrackerRODStreamTest)singleTest[0]);
+        }
+
+        for ( final boolean testStateless : Arrays.asList(true, false) ) {
+            // all pairwise tests
+            for ( List<ReadMetaDataTrackerRODStreamTest> singleTest : Utils.makePermutations(multiSiteTests, 2, false)) {
+                tests.add(new Object[]{singleTest, testStateless});
+            }
+
+            // all 3 way pairwise tests
+            //for ( List<ReadMetaDataTrackerRODStreamTest> singleTest : Utils.makePermutations(multiSiteTests, 3, false)) {
+            //    tests.add(new Object[]{singleTest, testStateless});
+            //}
+        }
+
+        logger.warn("Creating " + tests.size() + " tests for ReadMetaDataTrackerTests");
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = true, dataProvider = "ReadMetaDataTrackerTests", dependsOnMethods = "runReadMetaDataTrackerRODStreamTest_multipleQueries")
+    public void runReadMetaDataTrackerTest(final List<ReadMetaDataTrackerRODStreamTest> RODs, final boolean testStateless) {
+        final List<String> names = new ArrayList<String>();
+        final List<PeekableIterator<RODRecordList>> iterators = new ArrayList<PeekableIterator<RODRecordList>>();
+        final List<GenomeLoc> intervals = new ArrayList<GenomeLoc>();
+        final List<RodBinding<Feature>> rodBindings = new ArrayList<RodBinding<Feature>>();
+
+        for ( int i = 0; i < RODs.size(); i++ ) {
+            final RodBinding<Feature> rodBinding = new RodBinding<Feature>(Feature.class, "name"+i);
+            rodBindings.add(rodBinding);
+            final String name = rodBinding.getName();
+            names.add(name);
+            iterators.add(RODs.get(i).getIterator(name));
+            intervals.addAll(RODs.get(i).intervals);
+        }
+
+        Collections.sort(intervals);
+        final GenomeLoc span = span(intervals);
+        final IntervalReferenceOrderedView view = new IntervalReferenceOrderedView(genomeLocParser, span, names, iterators);
+
+        if ( testStateless ) {
+            // test each tracker is well formed, as each is created
+            for ( final GenomeLoc interval : intervals ) {
+                final RefMetaDataTracker tracker = view.getReferenceOrderedDataForInterval(interval);
+                testMetaDataTrackerBindings(tracker, interval, RODs, rodBindings);
+            }
+        } else {
+            // tests all trackers are correct after reading them into an array
+            // this checks that the trackers are be safely stored away and analyzed later (critical for nano-scheduling)
+            final List<RefMetaDataTracker> trackers = new ArrayList<RefMetaDataTracker>();
+            for ( final GenomeLoc interval : intervals ) {
+                final RefMetaDataTracker tracker = view.getReferenceOrderedDataForInterval(interval);
+                trackers.add(tracker);
+            }
+
+            for ( int i = 0; i < trackers.size(); i++) {
+                testMetaDataTrackerBindings(trackers.get(i), intervals.get(i), RODs, rodBindings);
+            }
+        }
+    }
+
+    private void testMetaDataTrackerBindings(final RefMetaDataTracker tracker,
+                                             final GenomeLoc interval,
+                                             final List<ReadMetaDataTrackerRODStreamTest> RODs,
+                                             final List<RodBinding<Feature>> rodBindings) {
+        for ( int i = 0; i < RODs.size(); i++ ) {
+            final ReadMetaDataTrackerRODStreamTest test = RODs.get(i);
+            final List<Feature> queryFeaturesList = tracker.getValues(rodBindings.get(i));
+            final Set<Feature> queryFeatures = new HashSet<Feature>(queryFeaturesList);
+            final Set<Feature> overlaps = test.getExpectedOverlaps(interval);
+
+            Assert.assertEquals(queryFeatures.size(), overlaps.size(), "IntervalOverlappingRODsFromStream didn't return the expected set of overlapping features." +
+                    " Expected size = " + overlaps.size() + " but saw " + queryFeatures.size());
+
+            BaseTest.assertEqualsSet(queryFeatures, overlaps, "IntervalOverlappingRODsFromStream didn't return the expected set of overlapping features." +
+                    " Expected = " + Utils.join(",", overlaps) + " but saw " + Utils.join(",", queryFeatures));
+        }
+    }
+
+    static class TribbleIteratorFromCollection implements Iterator<RODRecordList> {
+        // current location
+        private final String name;
+        final Queue<GATKFeature> gatkFeatures;
+
+        public TribbleIteratorFromCollection(final String name, final GenomeLocParser genomeLocParser, final List<Feature> features) {
+            this.name = name;
+
+            this.gatkFeatures = new LinkedList<GATKFeature>();
+            for ( final Feature f : features )
+                gatkFeatures.add(new GATKFeature.TribbleGATKFeature(genomeLocParser, f, name));
+        }
+
+        @Override
+        public boolean hasNext() {
+            return ! gatkFeatures.isEmpty();
+        }
+
+        @Override
+        public RODRecordList next() {
+            final GATKFeature first = gatkFeatures.poll();
+            final Collection<GATKFeature> myFeatures = new LinkedList<GATKFeature>();
+            myFeatures.add(first);
+            while ( gatkFeatures.peek() != null && gatkFeatures.peek().getLocation().getStart() == first.getStart() )
+                myFeatures.add(gatkFeatures.poll());
+
+            GenomeLoc loc = first.getLocation();
+            for ( final GATKFeature feature : myFeatures )
+                loc = loc.merge(feature.getLocation());
+
+            return new RODRecordListImpl(name, myFeatures, loc); // is this safe?
+        }
+
+        @Override public void remove() { throw new IllegalStateException("GRRR"); }
+    }
+}
+
+
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/LocusReferenceViewUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/LocusReferenceViewUnitTest.java
new file mode 100644
index 0000000..4115b97
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/LocusReferenceViewUnitTest.java
@@ -0,0 +1,143 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.providers;
+
+import org.broadinstitute.gatk.engine.datasources.reads.Shard;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.engine.datasources.reads.MockLocusShard;
+import org.broadinstitute.gatk.engine.iterators.GenomeLocusIterator;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+
+import htsjdk.samtools.reference.ReferenceSequence;
+import htsjdk.samtools.util.StringUtil;
+
+import java.util.Collections;
+/*
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following
+ * conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+/** Tests for viewing the reference from the perspective of a locus. */
+
+public class LocusReferenceViewUnitTest extends ReferenceViewTemplate {
+
+//
+//    /** Multiple-base pair queries should generate exceptions. */
+//    @Test(expectedExceptions=InvalidPositionException.class)
+//    public void testSingleBPFailure() {
+//        Shard shard = new LocusShard(GenomeLocParser.createGenomeLoc(0, 1, 50));
+//
+//        ShardDataProvider dataProvider = new ShardDataProvider(shard, null, sequenceFile, null);
+//        LocusReferenceView view = new LocusReferenceView(dataProvider);
+//
+//        view.getReferenceContext(shard.getGenomeLoc()).getBase();
+//    }
+
+    @Test
+    public void testOverlappingReferenceBases() {
+        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc(sequenceFile.getSequenceDictionary().getSequence(0).getSequenceName(),
+                                                                                                                   sequenceFile.getSequence("chrM").length() - 10,
+                                                                                                                   sequenceFile.getSequence("chrM").length())));
+        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, null, genomeLocParser, shard.getGenomeLocs().get(0), null, sequenceFile, null);
+        LocusReferenceView view = new LocusReferenceView(dataProvider);
+
+        byte[] results = view.getReferenceBases(genomeLocParser.createGenomeLoc(sequenceFile.getSequenceDictionary().getSequence(0).getSequenceName(),
+                                                                                sequenceFile.getSequence("chrM").length() - 10,
+                                                                                sequenceFile.getSequence("chrM").length() + 9));
+        System.out.printf("results are %s%n", new String(results));
+        Assert.assertEquals(results.length, 20);
+        for (int x = 0; x < results.length; x++) {
+            if (x <= 10) Assert.assertTrue(results[x] != 'X');
+            else Assert.assertTrue(results[x] == 'X');
+        }
+    }
+
+
+    /** Queries outside the bounds of the shard should result in reference context window trimmed at the shard boundary. */
+    @Test
+    public void testBoundsFailure() {
+        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc(sequenceFile.getSequenceDictionary().getSequence(0).getSequenceName(), 1, 50)));
+
+        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, null, genomeLocParser, shard.getGenomeLocs().get(0), null, sequenceFile, null);
+        LocusReferenceView view = new LocusReferenceView(dataProvider);
+
+        GenomeLoc locus = genomeLocParser.createGenomeLoc(sequenceFile.getSequenceDictionary().getSequence(0).getSequenceName(), 50, 51);
+
+        ReferenceContext rc = view.getReferenceContext(locus);
+        Assert.assertTrue(rc.getLocus().equals(locus));
+        Assert.assertTrue(rc.getWindow().equals(genomeLocParser.createGenomeLoc(sequenceFile.getSequenceDictionary().getSequence(0).getSequenceName(),50)));
+        Assert.assertTrue(rc.getBases().length == 1);
+    }
+
+
+    /**
+     * Compares the contents of the fasta and view at a specified location.
+     *
+     * @param loc
+     */
+    protected void validateLocation( GenomeLoc loc ) {
+        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(loc));
+        GenomeLocusIterator shardIterator = new GenomeLocusIterator(genomeLocParser,loc);
+
+        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, null, genomeLocParser, loc, null, sequenceFile, null);
+        LocusReferenceView view = new LocusReferenceView(dataProvider);
+
+        while (shardIterator.hasNext()) {
+            GenomeLoc locus = shardIterator.next();
+
+            ReferenceSequence expectedAsSeq = sequenceFile.getSubsequenceAt(locus.getContig(), locus.getStart(), locus.getStop());
+            char expected = Character.toUpperCase(StringUtil.bytesToString(expectedAsSeq.getBases()).charAt(0));
+            char actual = view.getReferenceContext(locus).getBaseAsChar();
+
+            Assert.assertEquals(actual, expected, String.format("Value of base at position %s in shard %s does not match expected", locus.toString(), shard.getGenomeLocs())
+            );
+        }
+    }
+
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/LocusViewTemplate.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/LocusViewTemplate.java
new file mode 100644
index 0000000..fc7857d
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/LocusViewTemplate.java
@@ -0,0 +1,405 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.providers;
+
+import htsjdk.samtools.reference.ReferenceSequence;
+import htsjdk.samtools.reference.ReferenceSequenceFile;
+import htsjdk.samtools.*;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.engine.datasources.reads.MockLocusShard;
+import org.broadinstitute.gatk.utils.sam.SAMReaderID;
+import org.broadinstitute.gatk.engine.datasources.reads.Shard;
+import org.broadinstitute.gatk.engine.executive.WindowMaker;
+import org.broadinstitute.gatk.engine.datasources.reads.LocusShard;
+import org.broadinstitute.gatk.engine.datasources.reads.SAMDataSource;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIterator;
+import org.broadinstitute.gatk.engine.resourcemanagement.ThreadAllocation;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.util.*;
+/**
+ * User: hanna
+ * Date: May 13, 2009
+ * Time: 4:29:08 PM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/** Base support for testing variants of the LocusView family of classes. */
+
+public abstract class LocusViewTemplate extends BaseTest {
+    protected static ReferenceSequenceFile sequenceSourceFile = null;
+    protected GenomeLocParser genomeLocParser = null;
+
+    @BeforeClass
+    public void setupGenomeLoc() throws FileNotFoundException {
+        sequenceSourceFile = fakeReferenceSequenceFile();
+        genomeLocParser = new GenomeLocParser(sequenceSourceFile);
+    }
+
+    @Test
+    public void emptyAlignmentContextTest() {
+        SAMRecordIterator iterator = new SAMRecordIterator();
+
+        GenomeLoc shardBounds = genomeLocParser.createGenomeLoc("chr1", 1, 5);
+        Shard shard = new LocusShard(genomeLocParser, new SAMDataSource(null,Collections.<SAMReaderID>emptyList(),new ThreadAllocation(),null,genomeLocParser),Collections.singletonList(shardBounds),Collections.<SAMReaderID,SAMFileSpan>emptyMap());
+        WindowMaker windowMaker = new WindowMaker(shard,genomeLocParser,iterator,shard.getGenomeLocs());
+        WindowMaker.WindowMakerIterator window = windowMaker.next();
+        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, null, genomeLocParser, window.getLocus(), window, null, null);
+
+        LocusView view = createView(dataProvider);
+
+        testReadsInContext(view, shard.getGenomeLocs(), Collections.<GATKSAMRecord>emptyList());
+    }
+
+    @Test
+    public void singleReadTest() {
+        GATKSAMRecord read = buildSAMRecord("read1","chr1", 1, 5);
+        SAMRecordIterator iterator = new SAMRecordIterator(read);
+
+        GenomeLoc shardBounds = genomeLocParser.createGenomeLoc("chr1", 1, 5);
+        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(shardBounds));
+        WindowMaker windowMaker = new WindowMaker(shard,genomeLocParser,iterator,shard.getGenomeLocs());
+        WindowMaker.WindowMakerIterator window = windowMaker.next();
+        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, window.getSourceInfo(), genomeLocParser, window.getLocus(), window, null, null);
+
+        LocusView view = createView(dataProvider);
+
+        testReadsInContext(view, shard.getGenomeLocs(), Collections.singletonList(read));
+    }
+
+    @Test
+    public void readCoveringFirstPartTest() {
+        GATKSAMRecord read = buildSAMRecord("read1","chr1", 1, 5);
+        SAMRecordIterator iterator = new SAMRecordIterator(read);
+
+        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc("chr1", 1, 10)));
+        WindowMaker windowMaker = new WindowMaker(shard,genomeLocParser,iterator,shard.getGenomeLocs());
+        WindowMaker.WindowMakerIterator window = windowMaker.next();
+        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, window.getSourceInfo(), genomeLocParser, window.getLocus(), window, null, null);
+        LocusView view = createView(dataProvider);
+
+        testReadsInContext(view, shard.getGenomeLocs(), Collections.singletonList(read));
+    }
+
+    @Test
+    public void readCoveringLastPartTest() {
+        GATKSAMRecord read = buildSAMRecord("read1","chr1", 6, 10);
+        SAMRecordIterator iterator = new SAMRecordIterator(read);
+
+        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc("chr1", 1, 10)));
+        WindowMaker windowMaker = new WindowMaker(shard,genomeLocParser,iterator,shard.getGenomeLocs());
+        WindowMaker.WindowMakerIterator window = windowMaker.next();
+        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, window.getSourceInfo(), genomeLocParser, window.getLocus(), window, null, null);
+        LocusView view = createView(dataProvider);
+
+        testReadsInContext(view, shard.getGenomeLocs(), Collections.singletonList(read));
+    }
+
+    @Test
+    public void readCoveringMiddleTest() {
+        GATKSAMRecord read = buildSAMRecord("read1","chr1", 3, 7);
+        SAMRecordIterator iterator = new SAMRecordIterator(read);
+
+        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc("chr1", 1, 10)));
+        WindowMaker windowMaker = new WindowMaker(shard,genomeLocParser,iterator,shard.getGenomeLocs());
+        WindowMaker.WindowMakerIterator window = windowMaker.next();
+        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, window.getSourceInfo(), genomeLocParser, window.getLocus(), window, null, null);
+        LocusView view = createView(dataProvider);
+
+        testReadsInContext(view, shard.getGenomeLocs(), Collections.singletonList(read));
+    }
+
+    @Test
+    public void readAndLocusOverlapAtLastBase() {
+        GATKSAMRecord read = buildSAMRecord("read1","chr1", 1, 5);
+        SAMRecordIterator iterator = new SAMRecordIterator(read);
+
+        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc("chr1", 5, 5)));
+        WindowMaker windowMaker = new WindowMaker(shard,genomeLocParser,iterator,shard.getGenomeLocs());
+        WindowMaker.WindowMakerIterator window = windowMaker.next();
+        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, window.getSourceInfo(), genomeLocParser, window.getLocus(), window, null, null);
+        LocusView view = createView(dataProvider);
+
+        testReadsInContext(view, shard.getGenomeLocs(), Collections.singletonList(read));
+    }
+
+    @Test
+    public void readOverlappingStartTest() {
+        GATKSAMRecord read = buildSAMRecord("read1","chr1", 1, 10);
+        SAMRecordIterator iterator = new SAMRecordIterator(read);
+
+        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc("chr1", 6, 15)));
+        WindowMaker windowMaker = new WindowMaker(shard,genomeLocParser,iterator,shard.getGenomeLocs());
+        WindowMaker.WindowMakerIterator window = windowMaker.next();
+        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, window.getSourceInfo(), genomeLocParser, window.getLocus(), window, null, null);
+        LocusView view = createView(dataProvider);
+
+        testReadsInContext(view, shard.getGenomeLocs(), Collections.singletonList(read));
+    }
+
+    @Test
+    public void readOverlappingEndTest() {
+        GATKSAMRecord read = buildSAMRecord("read1","chr1", 6, 15);
+        SAMRecordIterator iterator = new SAMRecordIterator(read);
+
+        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc("chr1", 1, 10)));
+        WindowMaker windowMaker = new WindowMaker(shard,genomeLocParser,iterator,shard.getGenomeLocs());
+        WindowMaker.WindowMakerIterator window = windowMaker.next();
+        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, window.getSourceInfo(), genomeLocParser, window.getLocus(), window, null, null);
+        LocusView view = createView(dataProvider);
+
+        testReadsInContext(view, shard.getGenomeLocs(), Collections.singletonList(read));
+    }
+
+    @Test
+    public void readsSpanningTest() {
+        GATKSAMRecord read1 = buildSAMRecord("read1","chr1", 1, 5);
+        GATKSAMRecord read2 = buildSAMRecord("read2","chr1", 6, 10);
+        SAMRecordIterator iterator = new SAMRecordIterator(read1, read2);
+
+        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc("chr1", 1, 10)));
+        WindowMaker windowMaker = new WindowMaker(shard,genomeLocParser,iterator,shard.getGenomeLocs());
+        WindowMaker.WindowMakerIterator window = windowMaker.next();
+        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, window.getSourceInfo(), genomeLocParser, window.getLocus(), window, null, null);
+        LocusView view = createView(dataProvider);
+
+        List<GATKSAMRecord> expectedReads = new ArrayList<GATKSAMRecord>();
+        Collections.addAll(expectedReads, read1, read2);
+        testReadsInContext(view, shard.getGenomeLocs(), expectedReads);
+    }
+
+    @Test
+    public void duplicateReadsTest() {
+        GATKSAMRecord read1 = buildSAMRecord("read1","chr1", 1, 5);
+        GATKSAMRecord read2 = buildSAMRecord("read2","chr1", 1, 5);
+        GATKSAMRecord read3 = buildSAMRecord("read3","chr1", 6, 10);
+        GATKSAMRecord read4 = buildSAMRecord("read4","chr1", 6, 10);
+        SAMRecordIterator iterator = new SAMRecordIterator(read1, read2, read3, read4);
+
+        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc("chr1", 1, 10)));
+        WindowMaker windowMaker = new WindowMaker(shard,genomeLocParser,iterator,shard.getGenomeLocs());
+        WindowMaker.WindowMakerIterator window = windowMaker.next();
+        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, window.getSourceInfo(), genomeLocParser, window.getLocus(), window, null, null);
+        LocusView view = createView(dataProvider);
+
+        List<GATKSAMRecord> expectedReads = new ArrayList<GATKSAMRecord>();
+        Collections.addAll(expectedReads, read1, read2, read3, read4);
+        testReadsInContext(view, shard.getGenomeLocs(), expectedReads);
+    }
+
+    @Test
+    public void cascadingReadsWithinBoundsTest() {
+        GATKSAMRecord read1 = buildSAMRecord("read1","chr1", 2, 6);
+        GATKSAMRecord read2 = buildSAMRecord("read2","chr1", 3, 7);
+        GATKSAMRecord read3 = buildSAMRecord("read3","chr1", 4, 8);
+        GATKSAMRecord read4 = buildSAMRecord("read4","chr1", 5, 9);
+        SAMRecordIterator iterator = new SAMRecordIterator(read1, read2, read3, read4);
+
+        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc("chr1", 1, 10)));
+        WindowMaker windowMaker = new WindowMaker(shard,genomeLocParser,iterator,shard.getGenomeLocs());
+        WindowMaker.WindowMakerIterator window = windowMaker.next();
+        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, window.getSourceInfo(), genomeLocParser, window.getLocus(), window, null, null);
+        LocusView view = createView(dataProvider);
+
+        List<GATKSAMRecord> expectedReads = new ArrayList<GATKSAMRecord>();
+        Collections.addAll(expectedReads, read1, read2, read3, read4);
+        testReadsInContext(view, shard.getGenomeLocs(), expectedReads);
+    }
+
+    @Test
+    public void cascadingReadsAtBoundsTest() {
+        GATKSAMRecord read1 = buildSAMRecord("read1","chr1", 1, 5);
+        GATKSAMRecord read2 = buildSAMRecord("read2","chr1", 2, 6);
+        GATKSAMRecord read3 = buildSAMRecord("read3","chr1", 3, 7);
+        GATKSAMRecord read4 = buildSAMRecord("read4","chr1", 4, 8);
+        GATKSAMRecord read5 = buildSAMRecord("read5","chr1", 5, 9);
+        GATKSAMRecord read6 = buildSAMRecord("read6","chr1", 6, 10);
+        SAMRecordIterator iterator = new SAMRecordIterator(read1, read2, read3, read4, read5, read6);
+
+        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc("chr1", 1, 10)));
+        WindowMaker windowMaker = new WindowMaker(shard,genomeLocParser,iterator,shard.getGenomeLocs());
+        WindowMaker.WindowMakerIterator window = windowMaker.next();
+        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, window.getSourceInfo(), genomeLocParser, window.getLocus(), window, null, null);
+        LocusView view = createView(dataProvider);
+
+        List<GATKSAMRecord> expectedReads = new ArrayList<GATKSAMRecord>();
+        Collections.addAll(expectedReads, read1, read2, read3, read4, read5, read6);
+        testReadsInContext(view, shard.getGenomeLocs(), expectedReads);
+    }
+
+    @Test
+    public void cascadingReadsOverlappingBoundsTest() {
+        GATKSAMRecord read01 = buildSAMRecord("read1","chr1", 1, 5);
+        GATKSAMRecord read02 = buildSAMRecord("read2","chr1", 2, 6);
+        GATKSAMRecord read03 = buildSAMRecord("read3","chr1", 3, 7);
+        GATKSAMRecord read04 = buildSAMRecord("read4","chr1", 4, 8);
+        GATKSAMRecord read05 = buildSAMRecord("read5","chr1", 5, 9);
+        GATKSAMRecord read06 = buildSAMRecord("read6","chr1", 6, 10);
+        GATKSAMRecord read07 = buildSAMRecord("read7","chr1", 7, 11);
+        GATKSAMRecord read08 = buildSAMRecord("read8","chr1", 8, 12);
+        GATKSAMRecord read09 = buildSAMRecord("read9","chr1", 9, 13);
+        GATKSAMRecord read10 = buildSAMRecord("read10","chr1", 10, 14);
+        GATKSAMRecord read11 = buildSAMRecord("read11","chr1", 11, 15);
+        GATKSAMRecord read12 = buildSAMRecord("read12","chr1", 12, 16);
+        SAMRecordIterator iterator = new SAMRecordIterator(read01, read02, read03, read04, read05, read06,
+                                                           read07, read08, read09, read10, read11, read12);
+
+        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc("chr1", 6, 15)));
+        WindowMaker windowMaker = new WindowMaker(shard,genomeLocParser,iterator,shard.getGenomeLocs());
+        WindowMaker.WindowMakerIterator window = windowMaker.next();
+        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, window.getSourceInfo(), genomeLocParser, window.getLocus(), window, null, null);
+        LocusView view = createView(dataProvider);
+
+        List<GATKSAMRecord> expectedReads = new ArrayList<GATKSAMRecord>();
+        Collections.addAll(expectedReads, read01, read02, read03, read04, read05, read06,
+                           read07, read08, read09, read10, read11, read12);
+        testReadsInContext(view, shard.getGenomeLocs(), expectedReads);
+    }
+
+    /**
+     * Creates a view of the type required for testing.
+     *
+     * @return The correct view to test.
+     */
+    protected abstract LocusView createView(LocusShardDataProvider provider);
+
+    /**
+     * Test the reads according to an independently derived context.
+     *
+     * @param view
+     * @param bounds
+     * @param reads
+     */
+    protected abstract void testReadsInContext(LocusView view, List<GenomeLoc> bounds, List<GATKSAMRecord> reads);
+
+    /**
+     * Fake a reference sequence file.  Essentially, seek a header with a bunch of dummy data.
+     *
+     * @return A 'fake' reference sequence file
+     */
+    private static ReferenceSequenceFile fakeReferenceSequenceFile() {
+        return new ReferenceSequenceFile() {
+            public SAMSequenceDictionary getSequenceDictionary() {
+                SAMSequenceRecord sequenceRecord = new SAMSequenceRecord("chr1", 1000000);
+                SAMSequenceDictionary dictionary = new SAMSequenceDictionary(Collections.singletonList(sequenceRecord));
+                return dictionary;
+            }
+
+            public boolean isIndexed() { return false; }
+
+            public ReferenceSequence nextSequence() {
+                throw new UnsupportedOperationException("Fake implementation doesn't support a getter");
+            }
+
+            public ReferenceSequence getSequence( String contig ) {
+                throw new UnsupportedOperationException("Fake implementation doesn't support a getter");
+            }
+
+            public ReferenceSequence getSubsequenceAt( String contig, long start, long stop ) {
+                throw new UnsupportedOperationException("Fake implementation doesn't support a getter");
+            }
+
+            public void reset() {
+                return;
+            }
+
+            public void close() throws IOException {
+            }
+        };
+    }
+
+    /**
+     * Build a SAM record featuring the absolute minimum required dataset.
+     *
+     * @param contig         Contig to populate.
+     * @param alignmentStart start of alignment
+     * @param alignmentEnd   end of alignment
+     *
+     * @return New SAM Record
+     */
+    protected GATKSAMRecord buildSAMRecord(String readName, String contig, int alignmentStart, int alignmentEnd) {
+        SAMFileHeader header = new SAMFileHeader();
+        header.setSequenceDictionary(sequenceSourceFile.getSequenceDictionary());
+
+        GATKSAMRecord record = new GATKSAMRecord(header);
+
+        record.setReadName(readName);
+        record.setReferenceIndex(sequenceSourceFile.getSequenceDictionary().getSequenceIndex(contig));
+        record.setAlignmentStart(alignmentStart);
+        Cigar cigar = new Cigar();
+        int len = alignmentEnd - alignmentStart + 1;
+        cigar.add(new CigarElement(len, CigarOperator.M));
+        record.setCigar(cigar);
+        record.setReadBases(new byte[len]);
+        record.setBaseQualities(new byte[len]);
+        return record;
+    }
+
+    /** A simple iterator which iterates over a list of reads. */
+    protected class SAMRecordIterator implements GATKSAMIterator {
+        private Iterator<SAMRecord> backingIterator = null;
+
+        public SAMRecordIterator(SAMRecord... reads) {
+            List<SAMRecord> backingList = new ArrayList<SAMRecord>();
+            backingList.addAll(Arrays.asList(reads));
+            backingIterator = backingList.iterator();
+        }
+
+        public boolean hasNext() {
+            return backingIterator.hasNext();
+        }
+
+        public SAMRecord next() {
+            return backingIterator.next();
+        }
+
+        public Iterator<SAMRecord> iterator() {
+            return this;
+        }
+
+        public void close() {
+            // NO-OP.
+        }
+
+        public void remove() {
+            throw new UnsupportedOperationException("Can't remove from a read-only iterator");
+        }
+    }
+
+
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/ReadReferenceViewUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/ReadReferenceViewUnitTest.java
new file mode 100644
index 0000000..342c9ca
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/ReadReferenceViewUnitTest.java
@@ -0,0 +1,160 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.providers;
+
+import org.testng.Assert;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+
+import org.testng.annotations.Test;
+
+import htsjdk.samtools.*;
+import htsjdk.samtools.reference.ReferenceSequence;
+
+/*
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following
+ * conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+/**
+ * User: hanna
+ * Date: May 27, 2009
+ * Time: 1:04:27 PM
+ *
+ */
+
+/**
+ * Test reading the reference for a given read.
+ */
+
+public class ReadReferenceViewUnitTest extends ReferenceViewTemplate {
+
+
+    /**
+     * tests that the ReadReferenceView correctly generates X's when a read overhangs the
+     * end of a contig
+     */
+    @Test
+    public void testOverhangingRead() {
+        testOverhangingGivenSize(25,0);
+        testOverhangingGivenSize(25,12);
+        testOverhangingGivenSize(25,24);
+    }
+
+
+    /**
+     * a private method, that tests getting the read sequence for reads that overlap the end of the
+     * contig
+     * @param readLength the length of the read
+     * @param overlap the amount of overlap
+     */
+    private void testOverhangingGivenSize(int readLength, int overlap) {
+        SAMSequenceRecord selectedContig = sequenceFile.getSequenceDictionary().getSequences().get(sequenceFile.getSequenceDictionary().getSequences().size()-1);
+        final long contigStart = selectedContig.getSequenceLength() - (readLength - overlap - 1);
+        final long contigStop = selectedContig.getSequenceLength() + overlap;
+
+        ReadShardDataProvider dataProvider = new ReadShardDataProvider(null,genomeLocParser,null,sequenceFile,null);
+        ReadReferenceView view = new ReadReferenceView(dataProvider);
+
+        SAMRecord rec = buildSAMRecord(selectedContig.getSequenceName(),(int)contigStart,(int)contigStop);
+        ReferenceSequence expectedAsSeq = sequenceFile.getSubsequenceAt(selectedContig.getSequenceName(),(int)contigStart,selectedContig.getSequenceLength());
+        //char[] expected = StringUtil.bytesToString(expectedAsSeq.getBases()).toCharArray();
+        byte[] expected = expectedAsSeq.getBases();
+        byte[] actual = view.getReferenceBases(rec);
+
+        Assert.assertEquals((readLength - overlap), expected.length);
+        Assert.assertEquals(readLength, actual.length);
+        int xRange = 0;
+        for (; xRange < (readLength - overlap); xRange++) {
+            Assert.assertTrue(actual[xRange] != 'X');
+        }
+        for (; xRange < actual.length; xRange++) {
+            Assert.assertTrue(actual[xRange] == 'X');
+        }
+    }
+
+
+    /**
+     * Compares the contents of the fasta and view at a specified location.
+     * @param loc the location to validate
+     */
+    protected void validateLocation( GenomeLoc loc ) {
+        SAMRecord read = buildSAMRecord( loc.getContig(), (int)loc.getStart(), (int)loc.getStop() );
+
+        ReadShardDataProvider dataProvider = new ReadShardDataProvider(null,genomeLocParser,null,sequenceFile,null);
+        ReadReferenceView view = new ReadReferenceView(dataProvider);
+
+        ReferenceSequence expectedAsSeq = sequenceFile.getSubsequenceAt(loc.getContig(),loc.getStart(),loc.getStop());
+        byte[] expected = expectedAsSeq.getBases();
+        byte[] actual = view.getReferenceBases(read);
+
+        org.testng.Assert.assertEquals(actual,expected,String.format("Base array at  in shard %s does not match expected",loc.toString()));
+    }
+
+
+    /**
+     * Build a SAM record featuring the absolute minimum required dataset.
+     * TODO: Blatantly copied from LocusViewTemplate.  Refactor these into a set of tools.
+     * @param contig Contig to populate.
+     * @param alignmentStart start of alignment
+     * @param alignmentEnd end of alignment
+     * @return New SAM Record
+     */
+    protected SAMRecord buildSAMRecord( String contig, int alignmentStart, int alignmentEnd ) {
+        SAMFileHeader header = new SAMFileHeader();
+        header.setSequenceDictionary(sequenceFile.getSequenceDictionary());
+
+        SAMRecord record = new SAMRecord(header);
+
+        record.setReferenceIndex(sequenceFile.getSequenceDictionary().getSequenceIndex(contig));
+        record.setAlignmentStart(alignmentStart);
+        Cigar cigar = new Cigar();
+        cigar.add(new CigarElement(alignmentEnd-alignmentStart+1, CigarOperator.M));
+        record.setCigar(cigar);
+        return record;
+    }
+
+
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/ReferenceOrderedViewUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/ReferenceOrderedViewUnitTest.java
new file mode 100644
index 0000000..ec26ba2
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/ReferenceOrderedViewUnitTest.java
@@ -0,0 +1,157 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.providers;
+
+import htsjdk.tribble.Feature;
+import org.broadinstitute.gatk.utils.commandline.RodBinding;
+import org.broadinstitute.gatk.utils.commandline.Tags;
+import org.broadinstitute.gatk.engine.datasources.reads.MockLocusShard;
+import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
+import org.broadinstitute.gatk.utils.refdata.tracks.RMDTrackBuilder;
+import org.broadinstitute.gatk.utils.refdata.utils.RMDTriplet;
+import org.testng.Assert;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.engine.datasources.reads.Shard;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.codecs.table.TableFeature;
+import org.broadinstitute.gatk.utils.refdata.utils.RMDTriplet.RMDStorageType;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.util.Arrays;
+import java.util.Collections;
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+/**
+ * User: hanna
+ * Date: May 27, 2009
+ * Time: 3:07:23 PM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * Test the transparent view into the reference-ordered data.  At the moment, just do some basic bindings and make
+ * sure the data comes through correctly.
+ */
+public class ReferenceOrderedViewUnitTest extends BaseTest {
+    /**
+     * Sequence file.
+     */
+    private static IndexedFastaSequenceFile seq;
+    private GenomeLocParser genomeLocParser;
+
+    /**
+     * our track builder
+     */
+    RMDTrackBuilder builder = null;
+
+    @BeforeClass
+    public void init() throws FileNotFoundException {
+        // sequence
+        seq = new CachingIndexedFastaSequenceFile(new File(hg18Reference));
+        genomeLocParser = new GenomeLocParser(seq);
+        // disable auto-index creation/locking in the RMDTrackBuilder for tests
+        builder = new RMDTrackBuilder(seq.getSequenceDictionary(),genomeLocParser,null,true,null);
+    }
+
+    /**
+     * Make sure binding to an empty list produces an empty tracker.
+     */
+    @Test
+    public void testNoBindings() {
+        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc("chrM",1,30)));
+        LocusShardDataProvider provider = new LocusShardDataProvider(shard, null, genomeLocParser, shard.getGenomeLocs().get(0), null, seq, Collections.<ReferenceOrderedDataSource>emptyList());
+        ReferenceOrderedView view = new ManagingReferenceOrderedView( provider );
+
+        RefMetaDataTracker tracker = view.getReferenceOrderedDataAtLocus(genomeLocParser.createGenomeLoc("chrM",10));
+        Assert.assertEquals(tracker.getValues(Feature.class).size(), 0, "The tracker should not have produced any data");
+    }
+
+    /**
+     * Test a single ROD binding.
+     */
+    @Test
+    public void testSingleBinding() {
+        String fileName = privateTestDir + "TabularDataTest.dat";
+        RMDTriplet triplet = new RMDTriplet("tableTest","Table",fileName,RMDStorageType.FILE,new Tags());
+        ReferenceOrderedDataSource dataSource = new ReferenceOrderedDataSource(triplet,builder,seq.getSequenceDictionary(),genomeLocParser,false);
+
+        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc("chrM",1,30)));
+
+        LocusShardDataProvider provider = new LocusShardDataProvider(shard, null, genomeLocParser, shard.getGenomeLocs().get(0), null, seq, Collections.singletonList(dataSource));
+        ReferenceOrderedView view = new ManagingReferenceOrderedView( provider );
+
+        RefMetaDataTracker tracker = view.getReferenceOrderedDataAtLocus(genomeLocParser.createGenomeLoc("chrM",20));
+        TableFeature datum = tracker.getFirstValue(new RodBinding<TableFeature>(TableFeature.class, "tableTest"));
+
+        Assert.assertEquals(datum.get("COL1"),"C","datum parameter for COL1 is incorrect");
+        Assert.assertEquals(datum.get("COL2"),"D","datum parameter for COL2 is incorrect");
+        Assert.assertEquals(datum.get("COL3"),"E","datum parameter for COL3 is incorrect");
+    }
+
+    /**
+     * Make sure multiple bindings are visible from the view.
+     */
+    @Test
+    public void testMultipleBinding() {
+        File file = new File(privateTestDir + "TabularDataTest.dat");
+
+        RMDTriplet testTriplet1 = new RMDTriplet("tableTest1","Table",file.getAbsolutePath(),RMDStorageType.FILE,new Tags());
+        ReferenceOrderedDataSource dataSource1 = new ReferenceOrderedDataSource(testTriplet1,builder,seq.getSequenceDictionary(),genomeLocParser,false);
+
+        RMDTriplet testTriplet2 = new RMDTriplet("tableTest2","Table",file.getAbsolutePath(),RMDStorageType.FILE,new Tags());
+        ReferenceOrderedDataSource dataSource2 = new ReferenceOrderedDataSource(testTriplet2,builder,seq.getSequenceDictionary(),genomeLocParser,false);
+
+        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc("chrM",1,30)));
+
+        LocusShardDataProvider provider = new LocusShardDataProvider(shard, null, genomeLocParser, shard.getGenomeLocs().get(0), null, seq, Arrays.asList(dataSource1,dataSource2));
+        ReferenceOrderedView view = new ManagingReferenceOrderedView( provider );
+
+        RefMetaDataTracker tracker = view.getReferenceOrderedDataAtLocus(genomeLocParser.createGenomeLoc("chrM",20));
+        TableFeature datum1 = tracker.getFirstValue(new RodBinding<TableFeature>(TableFeature.class, "tableTest1"));
+
+        Assert.assertEquals(datum1.get("COL1"),"C","datum1 parameter for COL1 is incorrect");
+        Assert.assertEquals(datum1.get("COL2"),"D","datum1 parameter for COL2 is incorrect");
+        Assert.assertEquals(datum1.get("COL3"),"E","datum1 parameter for COL3 is incorrect");
+
+        TableFeature datum2 = tracker.getFirstValue(new RodBinding<TableFeature>(TableFeature.class, "tableTest2"));
+
+        Assert.assertEquals(datum2.get("COL1"),"C","datum2 parameter for COL1 is incorrect");
+        Assert.assertEquals(datum2.get("COL2"),"D","datum2 parameter for COL2 is incorrect");
+        Assert.assertEquals(datum2.get("COL3"),"E","datum2 parameter for COL3 is incorrect");
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/ReferenceViewTemplate.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/ReferenceViewTemplate.java
new file mode 100644
index 0000000..68ecabc
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/ReferenceViewTemplate.java
@@ -0,0 +1,122 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.providers;
+
+import htsjdk.samtools.SAMSequenceRecord;
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+/**
+ * User: hanna
+ * Date: May 27, 2009
+ * Time: 1:12:35 PM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * Template for testing reference views (ReadReferenceView and LocusReferenceView).
+ */
+
+public abstract class ReferenceViewTemplate extends BaseTest {
+    /**
+     * The fasta, for comparison.
+     */
+    protected IndexedFastaSequenceFile sequenceFile = null;
+    protected GenomeLocParser genomeLocParser = null;
+
+    //
+    // The bulk of sequence retrieval is tested by IndexedFastaSequenceFile, but we'll run a few spot
+    // checks here to make sure that data is flowing through the LocusReferenceView.
+
+    /**
+     * Initialize the fasta.
+     */
+    @BeforeClass
+    public void initialize() throws FileNotFoundException {
+        sequenceFile = new CachingIndexedFastaSequenceFile( new File(hg18Reference) );
+        genomeLocParser = new GenomeLocParser(sequenceFile);
+    }
+
+    /**
+     * Test the initial fasta location.
+     */
+    @Test
+    public void testReferenceStart() {
+        validateLocation( genomeLocParser.createGenomeLoc(sequenceFile.getSequenceDictionary().getSequence(0).getSequenceName(),1,25) );
+    }
+
+    /**
+     * Test the end of a contig.
+     */
+    @Test
+    public void testReferenceEnd() {
+        // Test the last 25 bases of the first contig.
+        SAMSequenceRecord selectedContig = sequenceFile.getSequenceDictionary().getSequences().get(sequenceFile.getSequenceDictionary().getSequences().size()-1);
+        final int contigStart = selectedContig.getSequenceLength() - 24;
+        final int contigStop = selectedContig.getSequenceLength();
+        validateLocation( genomeLocParser.createGenomeLoc(selectedContig.getSequenceName(),contigStart,contigStop) );
+    }
+
+    /**
+     * Test the start of the middle contig.
+     */
+    @Test
+    public void testContigStart() {
+        // Test the last 25 bases of the first contig.
+        int contigPosition = sequenceFile.getSequenceDictionary().getSequences().size()/2;
+        SAMSequenceRecord selectedContig = sequenceFile.getSequenceDictionary().getSequences().get(contigPosition);
+        validateLocation( genomeLocParser.createGenomeLoc(selectedContig.getSequenceName(),1,25) );
+    }
+
+
+    /**
+     * Test the end of the middle contig.
+     */
+    @Test
+    public void testContigEnd() {
+        // Test the last 25 bases of the first contig.
+        int contigPosition = sequenceFile.getSequenceDictionary().getSequences().size()/2;
+        SAMSequenceRecord selectedContig = sequenceFile.getSequenceDictionary().getSequences().get(contigPosition);
+        final int contigStart = selectedContig.getSequenceLength() - 24;
+        final int contigStop = selectedContig.getSequenceLength();
+        validateLocation( genomeLocParser.createGenomeLoc(selectedContig.getSequenceName(),contigStart,contigStop) );
+    }
+
+    protected abstract void validateLocation( GenomeLoc loc );
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/ShardDataProviderUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/ShardDataProviderUnitTest.java
new file mode 100644
index 0000000..3ada244
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/ShardDataProviderUnitTest.java
@@ -0,0 +1,152 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.providers;
+
+import org.testng.Assert;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.testng.annotations.BeforeMethod;
+
+
+import org.testng.annotations.Test;
+import org.broadinstitute.gatk.utils.BaseTest;
+
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Arrays;
+/**
+ * User: hanna
+ * Date: May 27, 2009
+ * Time: 1:56:02 PM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * Test basic functionality of the shard data provider.
+ */
+
+public class ShardDataProviderUnitTest extends BaseTest {
+    /**
+     * Provider to test.  Should be recreated for every test.
+     */
+    private ShardDataProvider provider = null;
+
+    @BeforeMethod
+    public void createProvider() {
+        provider = new LocusShardDataProvider( null,null,null,null,null,null,null );
+    }
+
+    /**
+     * Test whether views are closed when the provider closes.
+     */
+    @Test
+    public void testClose() {
+        TestView testView = new TestView( provider );
+        Assert.assertFalse(testView.closed,"View is currently closed but should be open");
+
+        provider.close();
+        Assert.assertTrue(testView.closed,"View is currently open but should be closed");
+    }
+
+    /**
+     * Test whether multiple of the same view can be registered and all get a close method.
+     */
+    @Test
+    public void testMultipleClose() {
+        Collection<TestView> testViews = Arrays.asList(new TestView(provider),new TestView(provider));
+        for( TestView testView: testViews )
+            Assert.assertFalse(testView.closed,"View is currently closed but should be open");
+
+        provider.close();
+        for( TestView testView: testViews )
+            Assert.assertTrue(testView.closed,"View is currently open but should be closed");
+    }
+
+    /**
+     * Try adding a view which conflicts with some other view that's already been registered.
+     */
+    @Test(expectedExceptions= ReviewedGATKException.class)
+    public void testAddViewWithExistingConflict() {
+        View initial = new ConflictingTestView( provider );
+        View conflictsWithInitial = new TestView( provider );
+    }
+
+    /**
+     * Try adding a view which has a conflict with a previously registered view.
+     */
+    @Test(expectedExceptions= ReviewedGATKException.class)
+    public void testAddViewWithNewConflict() {
+        View conflictsWithInitial = new TestView( provider );
+        View initial = new ConflictingTestView( provider );
+    }
+
+    /**
+     * A simple view for testing interactions between views attached to the ShardDataProvider.
+     */
+    private class TestView implements View {
+        /**
+         * Is the test view currently closed.
+         */
+        private boolean closed = false;
+
+        /**
+         * Create a new test view wrapping the given provider.
+         * @param provider
+         */
+        public TestView( ShardDataProvider provider ) {
+            provider.register(this);            
+        }
+
+        /**
+         * Gets conflicting views.  In this case, none conflict.
+         * @return
+         */
+        public Collection<Class<? extends View>> getConflictingViews() { return Collections.emptyList(); }
+
+        /**
+         * Close this view.
+         */
+        public void close() { this.closed = true; }
+    }
+
+    /**
+     * Another view that conflicts with the one above.
+     */
+    private class ConflictingTestView implements View {
+        public ConflictingTestView( ShardDataProvider provider ) { provider.register(this); }
+
+        public Collection<Class<? extends View>> getConflictingViews() {
+            return Collections.<Class<? extends View>>singleton(TestView.class);
+        }
+
+        public void close() {}
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/ActiveRegionShardBalancerUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/ActiveRegionShardBalancerUnitTest.java
new file mode 100644
index 0000000..afa7001
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/ActiveRegionShardBalancerUnitTest.java
@@ -0,0 +1,103 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMFileSpan;
+import htsjdk.samtools.SAMSequenceRecord;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.interval.IntervalMergingRule;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.broadinstitute.gatk.utils.sam.SAMReaderID;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.io.FileNotFoundException;
+import java.util.*;
+
+public class ActiveRegionShardBalancerUnitTest extends BaseTest {
+    // example genome loc parser for this test, can be deleted if you don't use the reference
+    private GenomeLocParser genomeLocParser;
+    protected SAMDataSource readsDataSource;
+
+    @BeforeClass
+    public void setup() throws FileNotFoundException {
+        // sequence
+        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(10, 0, 10000);
+        genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
+        readsDataSource = null;
+    }
+
+    @Test
+    public void testMergingManyContigs() {
+        executeTest(genomeLocParser.getContigs().getSequences());
+    }
+
+    @Test
+    public void testMergingAllPointersOnSingleContig() {
+        executeTest(Arrays.asList(genomeLocParser.getContigs().getSequences().get(1)));
+    }
+
+    @Test
+    public void testMergingMultipleDiscontinuousContigs() {
+        final List<SAMSequenceRecord> all = genomeLocParser.getContigs().getSequences();
+        executeTest(Arrays.asList(all.get(1), all.get(3)));
+    }
+
+    private void executeTest(final Collection<SAMSequenceRecord> records) {
+        final ActiveRegionShardBalancer balancer = new ActiveRegionShardBalancer();
+
+        final List<Set<GenomeLoc>> expectedLocs = new LinkedList<>();
+        final List<FilePointer> pointers = new LinkedList<>();
+
+        for ( final SAMSequenceRecord record : records ) {
+            final int size = 10;
+            int end = 0;
+            for ( int i = 0; i < record.getSequenceLength(); i += size) {
+                final int myEnd = i + size - 1;
+                end = myEnd;
+                final GenomeLoc loc = genomeLocParser.createGenomeLoc(record.getSequenceName(), i, myEnd);
+                final Map<SAMReaderID, SAMFileSpan> fileSpans = Collections.emptyMap();
+                final FilePointer fp = new FilePointer(fileSpans, IntervalMergingRule.ALL, Collections.singletonList(loc));
+                pointers.add(fp);
+            }
+            expectedLocs.add(Collections.singleton(genomeLocParser.createGenomeLoc(record.getSequenceName(), 0, end)));
+        }
+
+        balancer.initialize(readsDataSource, pointers.iterator(), genomeLocParser);
+
+        int i = 0;
+        int nShardsFound = 0;
+        for ( final Shard shard : balancer ) {
+            nShardsFound++;
+            Assert.assertEquals(new HashSet<>(shard.getGenomeLocs()), expectedLocs.get(i++));
+        }
+        Assert.assertEquals(nShardsFound, records.size(), "Didn't find exactly one shard for each contig in the sequence dictionary");
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/DownsamplerBenchmark.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/DownsamplerBenchmark.java
new file mode 100644
index 0000000..553ca99
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/DownsamplerBenchmark.java
@@ -0,0 +1,94 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import com.google.caliper.Param;
+import org.broadinstitute.gatk.engine.WalkerManager;
+import org.broadinstitute.gatk.utils.downsampling.DownsamplingMethod;
+import org.broadinstitute.gatk.engine.walkers.LocusWalker;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: mhanna
+ * Date: Apr 22, 2011
+ * Time: 4:02:56 PM
+ * To change this template use File | Settings | File Templates.
+ */
+public class DownsamplerBenchmark extends ReadProcessingBenchmark {
+    @Param
+    private String bamFile;
+
+    @Param
+    private Integer maxReads;
+
+    @Override
+    public String getBAMFile() { return bamFile; }
+
+    @Override
+    public Integer getMaxReads() { return maxReads; }
+
+    @Param
+    private Downsampling downsampling;
+
+//    public void timeDownsampling(int reps) {
+//        for(int i = 0; i < reps; i++) {
+//            SAMFileReader reader = new SAMFileReader(inputFile);
+//            ReadProperties readProperties = new ReadProperties(Collections.<SAMReaderID>singletonList(new SAMReaderID(inputFile,new Tags())),
+//                    reader.getFileHeader(),
+//                    SAMFileHeader.SortOrder.coordinate,
+//                    false,
+//                    SAMFileReader.ValidationStringency.SILENT,
+//                    downsampling.create(),
+//                    new ValidationExclusion(Collections.singletonList(ValidationExclusion.TYPE.ALL)),
+//                    Collections.<ReadFilter>emptyList(),
+//                    Collections.<ReadTransformer>emptyList(),
+//                    false,
+//                    (byte)0,
+//                    false);
+//
+//            GenomeLocParser genomeLocParser = new GenomeLocParser(reader.getFileHeader().getSequenceDictionary());
+//            // Filter unmapped reads.  TODO: is this always strictly necessary?  Who in the GATK normally filters these out?
+//            Iterator<SAMRecord> readIterator = new FilteringIterator(reader.iterator(),new UnmappedReadFilter());
+//            LegacyLocusIteratorByState locusIteratorByState = new LegacyLocusIteratorByState(readIterator,readProperties,genomeLocParser, LegacyLocusIteratorByState.sampleListForSAMWithoutReadGroups());
+//            while(locusIteratorByState.hasNext()) {
+//                locusIteratorByState.next().getLocation();
+//            }
+//            reader.close();
+//        }
+//    }
+
+    private enum Downsampling {
+        NONE {
+            @Override
+            DownsamplingMethod create() { return DownsamplingMethod.NONE; }
+        },
+        PER_SAMPLE {
+            @Override
+            DownsamplingMethod create() { return WalkerManager.getDownsamplingMethod(LocusWalker.class); }
+        };
+        abstract DownsamplingMethod create();
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/FilePointerUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/FilePointerUnitTest.java
new file mode 100644
index 0000000..7c016c3
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/FilePointerUnitTest.java
@@ -0,0 +1,130 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import htsjdk.samtools.GATKBAMFileSpan;
+import htsjdk.samtools.GATKChunk;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.commandline.Tags;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+import org.broadinstitute.gatk.utils.interval.IntervalMergingRule;
+import org.broadinstitute.gatk.utils.sam.SAMReaderID;
+import org.testng.Assert;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+
+/**
+ *
+ */
+public class FilePointerUnitTest extends BaseTest {
+    private IndexedFastaSequenceFile seq;
+    private GenomeLocParser genomeLocParser;
+    private SAMReaderID readerID = new SAMReaderID("samFile",new Tags());
+
+    /**
+     * This function does the setup of our parser, before each method call.
+     * <p/>
+     * Called before every test case method.
+     */
+    @BeforeMethod
+    public void doForEachTest() throws FileNotFoundException {
+        // sequence
+        seq = new CachingIndexedFastaSequenceFile(new File(hg18Reference));
+        genomeLocParser = new GenomeLocParser(seq.getSequenceDictionary());
+    }
+
+    @Test
+    public void testFilePointerCombineDisjoint() {
+        FilePointer one = new FilePointer(IntervalMergingRule.ALL, genomeLocParser.createGenomeLoc("chr1",1,5));
+        one.addFileSpans(readerID,new GATKBAMFileSpan(new GATKChunk(0,1)));
+        FilePointer two = new FilePointer(IntervalMergingRule.ALL, genomeLocParser.createGenomeLoc("chr1",6,10));
+        two.addFileSpans(readerID,new GATKBAMFileSpan(new GATKChunk(1,2)));
+
+        FilePointer result = new FilePointer(IntervalMergingRule.ALL, genomeLocParser.createGenomeLoc("chr1",1,10));
+        result.addFileSpans(readerID,new GATKBAMFileSpan(new GATKChunk(0,2)));
+
+        Assert.assertEquals(one.combine(genomeLocParser,two),result,"Combination of two file pointers is incorrect");
+        Assert.assertEquals(two.combine(genomeLocParser,one),result,"Combination of two file pointers is incorrect");
+
+        //Now test that adjacent (but disjoint) intervals are properly handled with OVERLAPPING_ONLY
+        one = new FilePointer(IntervalMergingRule.OVERLAPPING_ONLY, genomeLocParser.createGenomeLoc("chr1",1,5));
+        one.addFileSpans(readerID,new GATKBAMFileSpan(new GATKChunk(0,1)));
+        two = new FilePointer(IntervalMergingRule.OVERLAPPING_ONLY, genomeLocParser.createGenomeLoc("chr1",6,10));
+        two.addFileSpans(readerID,new GATKBAMFileSpan(new GATKChunk(1,2)));
+
+        result = new FilePointer(IntervalMergingRule.OVERLAPPING_ONLY,
+                genomeLocParser.createGenomeLoc("chr1",1,5),
+                genomeLocParser.createGenomeLoc("chr1",6,10));
+        result.addFileSpans(readerID,new GATKBAMFileSpan(new GATKChunk(0,2)));
+
+        Assert.assertEquals(one.combine(genomeLocParser,two),result,"Combination of two file pointers is incorrect");
+        Assert.assertEquals(two.combine(genomeLocParser,one),result,"Combination of two file pointers is incorrect");
+    }
+
+    @Test
+    public void testFilePointerCombineJoint() {
+        FilePointer one = new FilePointer(IntervalMergingRule.ALL, genomeLocParser.createGenomeLoc("chr1",1,5));
+        one.addFileSpans(readerID,new GATKBAMFileSpan(new GATKChunk(0,2)));
+        FilePointer two = new FilePointer(IntervalMergingRule.ALL, genomeLocParser.createGenomeLoc("chr1",2,6));
+        two.addFileSpans(readerID,new GATKBAMFileSpan(new GATKChunk(1,3)));
+
+        FilePointer result = new FilePointer(IntervalMergingRule.ALL, genomeLocParser.createGenomeLoc("chr1",1,6));
+        result.addFileSpans(readerID,new GATKBAMFileSpan(new GATKChunk(0,3)));        
+
+        Assert.assertEquals(one.combine(genomeLocParser,two),result,"Combination of two file pointers is incorrect");
+        Assert.assertEquals(two.combine(genomeLocParser,one),result,"Combination of two file pointers is incorrect");
+
+        //Repeat the tests for OVERLAPPING_ONLY
+        one = new FilePointer(IntervalMergingRule.OVERLAPPING_ONLY, genomeLocParser.createGenomeLoc("chr1",1,5));
+        one.addFileSpans(readerID,new GATKBAMFileSpan(new GATKChunk(0,2)));
+        two = new FilePointer(IntervalMergingRule.OVERLAPPING_ONLY, genomeLocParser.createGenomeLoc("chr1",2,6));
+        two.addFileSpans(readerID,new GATKBAMFileSpan(new GATKChunk(1,3)));
+
+        result = new FilePointer(IntervalMergingRule.OVERLAPPING_ONLY, genomeLocParser.createGenomeLoc("chr1",1,6));
+        result.addFileSpans(readerID,new GATKBAMFileSpan(new GATKChunk(0,3)));
+
+        Assert.assertEquals(one.combine(genomeLocParser,two),result,"Combination of two file pointers is incorrect");
+        Assert.assertEquals(two.combine(genomeLocParser,one),result,"Combination of two file pointers is incorrect");
+    }
+
+    @Test
+    public void testFilePointerCombineOneSided() {
+        FilePointer filePointer = new FilePointer(IntervalMergingRule.ALL, genomeLocParser.createGenomeLoc("chr1",1,5));
+        filePointer.addFileSpans(readerID,new GATKBAMFileSpan(new GATKChunk(0,1)));
+        FilePointer empty = new FilePointer(IntervalMergingRule.ALL, genomeLocParser.createGenomeLoc("chr1",6,10));
+        // Do not add file spans to empty result
+
+        FilePointer result = new FilePointer(IntervalMergingRule.ALL, genomeLocParser.createGenomeLoc("chr1",1,10));
+        result.addFileSpans(readerID,new GATKBAMFileSpan(new GATKChunk(0,1)));
+        Assert.assertEquals(filePointer.combine(genomeLocParser,empty),result,"Combination of two file pointers is incorrect");
+        Assert.assertEquals(empty.combine(genomeLocParser,filePointer),result,"Combination of two file pointers is incorrect");
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/GATKBAMIndexUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/GATKBAMIndexUnitTest.java
new file mode 100644
index 0000000..13f3569
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/GATKBAMIndexUnitTest.java
@@ -0,0 +1,113 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import htsjdk.samtools.SAMFileReader;
+import htsjdk.samtools.SAMSequenceDictionary;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+
+/**
+ * Test basic functionality in the GATK's implementation of the BAM index classes.
+ */
+public class GATKBAMIndexUnitTest extends BaseTest {
+    private static File bamFile = new File(validationDataLocation+"MV1994.selected.bam");
+
+    /**
+     * Index file forming the source of all unit tests.
+     */
+    private static File bamIndexFile = new File(validationDataLocation+"MV1994.selected.bam.bai");
+
+    /**
+     * Storage for the index itself.
+     */
+    private GATKBAMIndex bamIndex;
+
+    /**
+     * Sequences.
+     */
+    private SAMSequenceDictionary sequenceDictionary;
+
+
+    @BeforeClass
+    public void init() throws FileNotFoundException {
+        SAMFileReader reader = new SAMFileReader(bamFile);
+        this.sequenceDictionary = reader.getFileHeader().getSequenceDictionary();
+        reader.close();
+        
+        bamIndex = new GATKBAMIndex(bamIndexFile, sequenceDictionary);
+    }
+
+    @Test
+    public void testNumberAndSizeOfIndexLevels() {
+        // The correct values for this test are pulled directly from the
+        // SAM Format Specification v1.3-r882, Section 4.1.1, last paragraph.
+        Assert.assertEquals(GATKBAMIndex.getNumIndexLevels(),6,"Incorrect number of levels in BAM index");
+
+        // Level 0
+        Assert.assertEquals(GATKBAMIndex.getFirstBinInLevel(0),0);
+        Assert.assertEquals(bamIndex.getLevelSize(0),1);
+
+        // Level 1
+        Assert.assertEquals(GATKBAMIndex.getFirstBinInLevel(1),1);
+        Assert.assertEquals(bamIndex.getLevelSize(1),8-1+1);
+
+        // Level 2
+        Assert.assertEquals(GATKBAMIndex.getFirstBinInLevel(2),9);
+        Assert.assertEquals(bamIndex.getLevelSize(2),72-9+1);
+
+        // Level 3
+        Assert.assertEquals(GATKBAMIndex.getFirstBinInLevel(3),73);
+        Assert.assertEquals(bamIndex.getLevelSize(3),584-73+1);
+
+        // Level 4
+        Assert.assertEquals(GATKBAMIndex.getFirstBinInLevel(4),585);
+        Assert.assertEquals(bamIndex.getLevelSize(4),4680-585+1);
+
+        // Level 5                                
+        Assert.assertEquals(GATKBAMIndex.getFirstBinInLevel(5),4681);
+        Assert.assertEquals(bamIndex.getLevelSize(5),37448-4681+1);
+    }
+
+    @Test( expectedExceptions = UserException.MalformedFile.class )
+    public void testDetectTruncatedBamIndexWordBoundary() {
+        GATKBAMIndex index = new GATKBAMIndex(new File(privateTestDir + "truncated_at_word_boundary.bai"), sequenceDictionary);
+        index.readReferenceSequence(0);
+    }
+
+    @Test( expectedExceptions = UserException.MalformedFile.class )
+    public void testDetectTruncatedBamIndexNonWordBoundary() {
+        GATKBAMIndex index = new GATKBAMIndex(new File(privateTestDir + "truncated_at_non_word_boundary.bai"), sequenceDictionary);
+        index.readReferenceSequence(0);
+    }
+
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/GATKWalkerBenchmark.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/GATKWalkerBenchmark.java
new file mode 100644
index 0000000..1a42dd0
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/GATKWalkerBenchmark.java
@@ -0,0 +1,156 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import com.google.caliper.Param;
+import org.broadinstitute.gatk.engine.walkers.*;
+import org.broadinstitute.gatk.utils.commandline.Output;
+import org.broadinstitute.gatk.utils.commandline.Tags;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.engine.arguments.GATKArgumentCollection;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.engine.filters.ReadFilter;
+import org.broadinstitute.gatk.engine.filters.UnmappedReadFilter;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.refdata.utils.RMDTriplet;
+import org.broadinstitute.gatk.utils.classloader.JVMUtils;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.broadinstitute.gatk.utils.sam.SAMReaderID;
+
+import java.io.File;
+import java.io.PrintStream;
+import java.util.Collections;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: mhanna
+ * Date: Feb 25, 2011
+ * Time: 10:16:54 AM
+ * To change this template use File | Settings | File Templates.
+ */
+public class GATKWalkerBenchmark extends ReadProcessingBenchmark {
+    @Param
+    private String bamFile;
+
+    @Param
+    private Integer maxReads;
+
+    @Param
+    private String referenceFile;
+
+    @Param
+    private WalkerType walkerType;
+
+    @Override
+    public String getBAMFile() { return bamFile; }
+
+    @Override
+    public Integer getMaxReads() { return maxReads; }    
+
+    @Override
+    public void setUp() {
+        super.setUp();
+    }
+
+    public void timeWalkerPerformance(final int reps) {
+        for(int i = 0; i < reps; i++) {
+            GenomeAnalysisEngine engine = new GenomeAnalysisEngine();
+
+            // Establish the argument collection
+            GATKArgumentCollection argCollection = new GATKArgumentCollection();
+            argCollection.referenceFile = new File(referenceFile);
+            argCollection.samFiles = Collections.singletonList(inputFile.getAbsolutePath());
+
+            engine.setArguments(argCollection);
+            // Bugs in the engine mean that this has to be set twice.
+            engine.setSAMFileIDs(Collections.singletonList(new SAMReaderID(inputFile,new Tags())));
+            engine.setFilters(Collections.<ReadFilter>singletonList(new UnmappedReadFilter()));
+            engine.setReferenceMetaDataFiles(Collections.<RMDTriplet>emptyList());
+
+            // Create the walker
+            engine.setWalker(walkerType.create());
+
+            engine.execute();
+        }
+    }
+
+    private enum WalkerType {
+        COUNT_READS {
+            @Override
+            Walker create() { return new CountReadsPerformanceWalker(); }
+        },
+        COUNT_BASES_IN_READ {
+            @Override
+            Walker create() { return new CountBasesInReadPerformanceWalker(); }
+        },
+        COUNT_LOCI {
+            @Override
+            Walker create() {
+                CountLociPerformanceWalker walker = new CountLociPerformanceWalker();
+                JVMUtils.setFieldValue(JVMUtils.findField(CountLociPerformanceWalker.class,"out"),walker,System.out);
+                return walker;
+            }
+        };
+        abstract Walker create();
+    }
+}
+
+class CountLociPerformanceWalker extends TestCountLociWalker {
+    // NOTE: Added this output during porting. Previous version of test was reaching out of engine
+    // and into production o.b.g.tools.walkers.qc.CountLoci.
+    @Output
+    PrintStream out;
+
+    @Override
+    public void onTraversalDone(Long result) {
+        out.println(result);
+    }
+}
+
+class CountReadsPerformanceWalker extends TestCountReadsWalker {
+}
+
+class CountBasesInReadPerformanceWalker extends ReadWalker<Integer,Long> {
+    private long As;
+    private long Cs;
+    private long Gs;
+    private long Ts;
+
+    public Integer map(ReferenceContext ref, GATKSAMRecord read, RefMetaDataTracker tracker) {
+        for(byte base: read.getReadBases()) {
+            switch(base) {
+                case 'A': As++; break;
+                case 'C': Cs++; break;
+                case 'G': Gs++; break;
+                case 'T': Ts++; break;
+            }
+        }
+        return 1;
+    }
+
+    public Long reduceInit() { return 0L; }
+    public Long reduce(Integer value, Long accum) { return value + accum; }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/IntervalOverlapFilteringIteratorUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/IntervalOverlapFilteringIteratorUnitTest.java
new file mode 100644
index 0000000..817fcc1
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/IntervalOverlapFilteringIteratorUnitTest.java
@@ -0,0 +1,150 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.SAMSequenceRecord;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+public class IntervalOverlapFilteringIteratorUnitTest {
+
+    private SAMFileHeader header;
+    private GenomeLoc firstContig;
+    private GenomeLoc secondContig;
+
+    /** Basic aligned and mapped read. */
+    private SAMRecord readMapped;
+
+    /** Read with no contig specified in the read, -L UNMAPPED */
+    private SAMRecord readNoReference;
+
+    /** This read has a start position, but is flagged that it's not mapped. */
+    private SAMRecord readUnmappedFlag;
+
+    /** This read is from the second contig. */
+    private SAMRecord readSecondContig;
+
+    /** This read says it's aligned, but actually has an unknown start. */
+    private SAMRecord readUnknownStart;
+
+    /** The above reads in the order one would expect to find them in a sorted BAM. */
+    private List<SAMRecord> testReads;
+
+    @BeforeClass
+    public void init() {
+        header = ArtificialSAMUtils.createArtificialSamHeader(3, 1, ArtificialSAMUtils.DEFAULT_READ_LENGTH * 2);
+        GenomeLocParser genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
+        SAMSequenceRecord record;
+
+        record = header.getSequence(0);
+        firstContig = genomeLocParser.createGenomeLoc(record.getSequenceName(), 1, record.getSequenceLength());
+        record = header.getSequence(1);
+        secondContig = genomeLocParser.createGenomeLoc(record.getSequenceName(), 1, record.getSequenceLength());
+
+        readMapped = createMappedRead("mapped", 1);
+
+        readUnmappedFlag = createMappedRead("unmappedFlagged", 2);
+        readUnmappedFlag.setReadUnmappedFlag(true);
+
+        readSecondContig = createMappedRead("secondContig", 3);
+        readSecondContig.setReferenceName(secondContig.getContig());
+
+        /* This read says it's aligned, but to a contig not in the header. */
+        SAMRecord readUnknownContig = createMappedRead("unknownContig", 4);
+        readUnknownContig.setReferenceName("unknownContig");
+
+        readUnknownStart = createMappedRead("unknownStart", 1);
+        readUnknownStart.setAlignmentStart(SAMRecord.NO_ALIGNMENT_START);
+
+        readNoReference = createUnmappedRead("unmappedNoReference");
+
+        testReads = new ArrayList<SAMRecord>();
+        testReads.add(readMapped);
+        testReads.add(readUnmappedFlag);
+        testReads.add(readUnknownStart);
+        testReads.add(readSecondContig);
+        testReads.add(readUnknownContig);
+        testReads.add(readNoReference);
+    }
+
+    @DataProvider(name = "filteringIteratorTestData")
+    public Object[][] getFilteringIteratorTestData() {
+        return new Object[][] {
+                new Object[] {Arrays.asList(firstContig), Arrays.asList(readMapped, readUnmappedFlag, readUnknownStart)},
+                new Object[] {Arrays.asList(GenomeLoc.UNMAPPED), Arrays.asList(readNoReference)},
+                new Object[] {Arrays.asList(firstContig, secondContig), Arrays.asList(readMapped, readUnmappedFlag, readUnknownStart, readSecondContig)}
+        };
+    }
+
+    @Test(dataProvider = "filteringIteratorTestData")
+    public void testFilteringIterator(List<GenomeLoc> locs, List<SAMRecord> expected) {
+        IntervalOverlapFilteringIterator filterIter = new IntervalOverlapFilteringIterator(
+                ArtificialSAMUtils.createReadIterator(testReads), locs);
+
+        List<SAMRecord> actual = new ArrayList<SAMRecord>();
+        while (filterIter.hasNext()) {
+            actual.add(filterIter.next());
+        }
+        Assert.assertEquals(actual, expected);
+    }
+
+    @Test(expectedExceptions = ReviewedGATKException.class)
+    public void testMappedAndUnmapped() {
+        new IntervalOverlapFilteringIterator(
+                ArtificialSAMUtils.createReadIterator(testReads),
+                Arrays.asList(firstContig, GenomeLoc.UNMAPPED));
+    }
+
+    private SAMRecord createUnmappedRead(String name) {
+        return ArtificialSAMUtils.createArtificialRead(
+                header,
+                name,
+                SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX,
+                SAMRecord.NO_ALIGNMENT_START,
+                ArtificialSAMUtils.DEFAULT_READ_LENGTH);
+    }
+
+    private SAMRecord createMappedRead(String name, int start) {
+        return ArtificialSAMUtils.createArtificialRead(
+                header,
+                name,
+                0,
+                start,
+                ArtificialSAMUtils.DEFAULT_READ_LENGTH);
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/MockLocusShard.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/MockLocusShard.java
new file mode 100644
index 0000000..d006fca
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/MockLocusShard.java
@@ -0,0 +1,49 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import org.broadinstitute.gatk.utils.sam.SAMReaderID;
+import org.broadinstitute.gatk.engine.resourcemanagement.ThreadAllocation;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+
+import java.util.List;
+import java.util.Collections;
+
+/**
+ * A mock locus shard, usable for infrastructure that requires a shard to behave properly.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class MockLocusShard extends LocusShard {
+    public MockLocusShard(final GenomeLocParser genomeLocParser,final List<GenomeLoc> intervals) {
+        super(  genomeLocParser,
+                new SAMDataSource(null, Collections.<SAMReaderID>emptyList(),new ThreadAllocation(),null,genomeLocParser),
+                intervals,
+                null);
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/PicardBaselineBenchmark.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/PicardBaselineBenchmark.java
new file mode 100644
index 0000000..90c7559
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/PicardBaselineBenchmark.java
@@ -0,0 +1,101 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import com.google.caliper.Param;
+import com.google.caliper.SimpleBenchmark;
+import htsjdk.samtools.util.SamLocusIterator;
+import htsjdk.samtools.SAMFileReader;
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.util.CloseableIterator;
+
+import java.io.File;
+import java.util.Iterator;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: mhanna
+ * Date: Apr 22, 2011
+ * Time: 3:51:06 PM
+ * To change this template use File | Settings | File Templates.
+ */
+public class PicardBaselineBenchmark extends ReadProcessingBenchmark {
+    @Param
+    private String bamFile;
+
+    @Param
+    private Integer maxReads;
+
+    @Override
+    public String getBAMFile() { return bamFile; }
+
+    @Override
+    public Integer getMaxReads() { return maxReads; }
+    
+    public void timeDecompressBamFile(int reps) {
+        for(int i = 0; i < reps; i++) {
+            SAMFileReader reader = new SAMFileReader(inputFile);
+            CloseableIterator<SAMRecord> iterator = reader.iterator();
+            while(iterator.hasNext())
+                iterator.next();
+            iterator.close();
+            reader.close();
+        }
+    }
+
+    public void timeExtractTag(int reps) {
+        for(int i = 0; i < reps; i++) {
+            SAMFileReader reader = new SAMFileReader(inputFile);
+            CloseableIterator<SAMRecord> iterator = reader.iterator();
+            while(iterator.hasNext()) {
+                SAMRecord read = iterator.next();
+                read.getAttribute("OQ");
+            }
+            iterator.close();
+            reader.close();
+        }
+    }
+
+    public void timeSamLocusIterator(int reps) {
+        for(int i = 0; i < reps; i++) {
+            SAMFileReader reader = new SAMFileReader(inputFile);
+            long loci = 0;
+
+            SamLocusIterator samLocusIterator = new SamLocusIterator(reader);
+            samLocusIterator.setEmitUncoveredLoci(false);
+            Iterator<SamLocusIterator.LocusInfo> workhorseIterator = samLocusIterator.iterator();
+
+            while(workhorseIterator.hasNext()) {
+                SamLocusIterator.LocusInfo locusInfo = workhorseIterator.next();
+                // Use the value of locusInfo to avoid optimization.
+                if(locusInfo != null) loci++;
+            }
+            System.out.printf("Total loci = %d%n",loci);
+
+            reader.close();
+        }
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/ReadProcessingBenchmark.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/ReadProcessingBenchmark.java
new file mode 100644
index 0000000..a8973e1
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/ReadProcessingBenchmark.java
@@ -0,0 +1,83 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import com.google.caliper.Param;
+import com.google.caliper.SimpleBenchmark;
+import htsjdk.samtools.SAMFileReader;
+import htsjdk.samtools.SAMFileWriter;
+import htsjdk.samtools.SAMFileWriterFactory;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.io.File;
+import java.io.IOException;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: mhanna
+ * Date: Apr 22, 2011
+ * Time: 4:04:38 PM
+ * To change this template use File | Settings | File Templates.
+ */
+public abstract class ReadProcessingBenchmark extends SimpleBenchmark {
+    protected abstract String getBAMFile();
+    protected abstract Integer getMaxReads();
+
+    protected File inputFile;
+
+    @Override
+    public void setUp() {
+        SAMFileReader fullInputFile = new SAMFileReader(new File(getBAMFile()));
+
+        File tempFile = null;
+        try {
+            tempFile = File.createTempFile("testfile_"+getMaxReads(),".bam");
+        }
+        catch(IOException ex) {
+            throw new ReviewedGATKException("Unable to create temporary BAM",ex);
+        }
+        SAMFileWriterFactory factory = new SAMFileWriterFactory();
+        factory.setCreateIndex(true);
+        SAMFileWriter writer = factory.makeBAMWriter(fullInputFile.getFileHeader(),true,tempFile);
+
+        long numReads = 0;
+        for(SAMRecord read: fullInputFile) {
+            if(numReads++ >= getMaxReads())
+                break;
+            writer.addAlignment(read);
+        }
+
+        writer.close();
+
+        inputFile = tempFile;
+    }
+
+    @Override
+    public void tearDown() {
+        inputFile.delete();
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/ReadShardBalancerUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/ReadShardBalancerUnitTest.java
new file mode 100644
index 0000000..20a99f7
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/ReadShardBalancerUnitTest.java
@@ -0,0 +1,197 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import htsjdk.samtools.*;
+import org.broadinstitute.gatk.utils.ValidationExclusion;
+import org.broadinstitute.gatk.utils.downsampling.DownsampleType;
+import org.broadinstitute.gatk.utils.downsampling.DownsamplingMethod;
+import org.broadinstitute.gatk.engine.filters.ReadFilter;
+import org.broadinstitute.gatk.engine.resourcemanagement.ThreadAllocation;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.commandline.Tags;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.broadinstitute.gatk.utils.sam.ArtificialSingleSampleReadStream;
+import org.broadinstitute.gatk.utils.sam.SAMReaderID;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Arrays;
+
+public class ReadShardBalancerUnitTest extends BaseTest {
+
+    /**
+     * Tests to ensure that ReadShardBalancer works as expected and does not place shard boundaries
+     * at inappropriate places, such as within an alignment start position
+     */
+    private static class ReadShardBalancerTest extends TestDataProvider {
+        private int numContigs;
+        private int numStacksPerContig;
+        private int stackSize;
+        private int numUnmappedReads;
+        private DownsamplingMethod downsamplingMethod;
+        private int expectedReadCount;
+
+        private SAMFileHeader header;
+        private SAMReaderID testBAM;
+
+        public ReadShardBalancerTest( int numContigs,
+                                      int numStacksPerContig,
+                                      int stackSize,
+                                      int numUnmappedReads,
+                                      int downsamplingTargetCoverage ) {
+            super(ReadShardBalancerTest.class);
+
+            this.numContigs = numContigs;
+            this.numStacksPerContig = numStacksPerContig;
+            this.stackSize = stackSize;
+            this.numUnmappedReads = numUnmappedReads;
+
+            this.downsamplingMethod = new DownsamplingMethod(DownsampleType.BY_SAMPLE, downsamplingTargetCoverage, null);
+            this.expectedReadCount = Math.min(stackSize, downsamplingTargetCoverage) * numStacksPerContig * numContigs + numUnmappedReads;
+
+            setName(String.format("%s: numContigs=%d numStacksPerContig=%d stackSize=%d numUnmappedReads=%d downsamplingTargetCoverage=%d",
+                                  getClass().getSimpleName(), numContigs, numStacksPerContig, stackSize, numUnmappedReads, downsamplingTargetCoverage));
+        }
+
+        public void run() {
+            createTestBAM();
+
+            SAMDataSource dataSource = new SAMDataSource(null, // Reference not used in this test.
+                                                         Arrays.asList(testBAM),
+                                                         new ThreadAllocation(),
+                                                         null,
+                                                         new GenomeLocParser(header.getSequenceDictionary()),
+                                                         false,
+                                                         ValidationStringency.SILENT,
+                                                         ReadShard.DEFAULT_MAX_READS,  // reset ReadShard.MAX_READS to ReadShard.DEFAULT_MAX_READS for each test
+                                                         downsamplingMethod,
+                                                         new ValidationExclusion(),
+                                                         new ArrayList<ReadFilter>(),
+                                                         false);
+
+            Iterable<Shard> shardIterator = dataSource.createShardIteratorOverAllReads(new ReadShardBalancer());
+
+            SAMRecord readAtEndOfLastShard = null;
+            int totalReadsSeen = 0;
+
+            for ( Shard shard : shardIterator ) {
+                int numContigsThisShard = 0;
+                SAMRecord lastRead = null;
+
+                for ( SAMRecord read : shard.iterator() ) {
+                    totalReadsSeen++;
+
+                    if ( lastRead == null ) {
+                        numContigsThisShard = 1;
+                    }
+                    else if ( ! read.getReadUnmappedFlag() && ! lastRead.getReferenceIndex().equals(read.getReferenceIndex()) ) {
+                        numContigsThisShard++;
+                    }
+
+                    // If the last read from the previous shard is not unmapped, we have to make sure
+                    // that no reads in this shard start at the same position
+                    if ( readAtEndOfLastShard != null && ! readAtEndOfLastShard.getReadUnmappedFlag() ) {
+                        Assert.assertFalse(readAtEndOfLastShard.getReferenceIndex().equals(read.getReferenceIndex()) &&
+                                           readAtEndOfLastShard.getAlignmentStart() == read.getAlignmentStart(),
+                                           String.format("Reads from alignment start position %d:%d are split across multiple shards",
+                                                         read.getReferenceIndex(), read.getAlignmentStart()));
+                    }
+
+                    lastRead = read;
+                }
+
+                // There should never be reads from more than 1 contig in a shard (ignoring unmapped reads)
+                Assert.assertTrue(numContigsThisShard == 1, "found a shard with reads from multiple contigs");
+
+                readAtEndOfLastShard = lastRead;
+            }
+
+            Assert.assertEquals(totalReadsSeen, expectedReadCount, "did not encounter the expected number of reads");
+        }
+
+        private void createTestBAM() {
+            header = ArtificialSAMUtils.createArtificialSamHeader(numContigs, 1, 100000);
+            SAMReadGroupRecord readGroup = new SAMReadGroupRecord("foo");
+            readGroup.setSample("testSample");
+            header.addReadGroup(readGroup);
+            ArtificialSingleSampleReadStream artificialReads = new ArtificialSingleSampleReadStream(header,
+                                                                                                    "foo",
+                                                                                                    numContigs,
+                                                                                                    numStacksPerContig,
+                                                                                                    stackSize,
+                                                                                                    stackSize,
+                                                                                                    1,
+                                                                                                    100,
+                                                                                                    50,
+                                                                                                    150,
+                                                                                                    numUnmappedReads);
+
+            final File testBAMFile = createTempFile("SAMDataSourceFillShardBoundaryTest", ".bam");
+
+            SAMFileWriter bamWriter = new SAMFileWriterFactory().setCreateIndex(true).makeBAMWriter(header, true, testBAMFile);
+            for ( SAMRecord read : artificialReads ) {
+                bamWriter.addAlignment(read);
+            }
+            bamWriter.close();
+
+            testBAM =  new SAMReaderID(testBAMFile, new Tags());
+
+            new File(testBAM.getSamFilePath().replace(".bam", ".bai")).deleteOnExit();
+            new File(testBAM.getSamFilePath() + ".bai").deleteOnExit();
+        }
+    }
+
+    @DataProvider(name = "ReadShardBalancerTestDataProvider")
+    public Object[][] createReadShardBalancerTests() {
+        for ( int numContigs = 1; numContigs <= 3; numContigs++ ) {
+            for ( int numStacksPerContig : Arrays.asList(1, 2, 4) ) {
+                // Use crucial read shard boundary values as the stack sizes
+                for ( int stackSize : Arrays.asList(ReadShard.DEFAULT_MAX_READS / 2, ReadShard.DEFAULT_MAX_READS / 2 + 10, ReadShard.DEFAULT_MAX_READS, ReadShard.DEFAULT_MAX_READS - 1, ReadShard.DEFAULT_MAX_READS + 1, ReadShard.DEFAULT_MAX_READS * 2) ) {
+                    for ( int numUnmappedReads : Arrays.asList(0, ReadShard.DEFAULT_MAX_READS / 2, ReadShard.DEFAULT_MAX_READS * 2) ) {
+                        // The first value will result in no downsampling at all, the others in some downsampling
+                        for ( int downsamplingTargetCoverage : Arrays.asList(ReadShard.DEFAULT_MAX_READS * 10, ReadShard.DEFAULT_MAX_READS, ReadShard.DEFAULT_MAX_READS / 2) ) {
+                            new ReadShardBalancerTest(numContigs, numStacksPerContig, stackSize, numUnmappedReads, downsamplingTargetCoverage);
+                        }
+                    }
+                }
+            }
+        }
+
+        return ReadShardBalancerTest.getTests(ReadShardBalancerTest.class);
+    }
+
+    @Test(dataProvider = "ReadShardBalancerTestDataProvider")
+    public void runReadShardBalancerTest( ReadShardBalancerTest test ) {
+        logger.warn("Running test: " + test);
+
+        test.run();
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/SAMDataSourceUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/SAMDataSourceUnitTest.java
new file mode 100644
index 0000000..16f43f9
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/SAMDataSourceUnitTest.java
@@ -0,0 +1,268 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import htsjdk.samtools.*;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.commandline.Tags;
+import org.broadinstitute.gatk.utils.ValidationExclusion;
+import org.broadinstitute.gatk.engine.filters.ReadFilter;
+import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIterator;
+import org.broadinstitute.gatk.engine.resourcemanagement.ThreadAllocation;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+import org.broadinstitute.gatk.utils.interval.IntervalMergingRule;
+import org.broadinstitute.gatk.utils.sam.SAMReaderID;
+import org.testng.annotations.AfterMethod;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+import static org.testng.Assert.*;
+
+/**
+ * <p/>
+ * Class SAMDataSourceUnitTest
+ * <p/>
+ * The test of the SAMBAM simple data source.
+ */
+public class SAMDataSourceUnitTest extends BaseTest {
+
+    // TODO: These legacy tests should really be replaced with a more comprehensive suite of tests for SAMDataSource
+
+    private List<SAMReaderID> readers;
+    private File referenceFile;
+    private IndexedFastaSequenceFile seq;
+    private GenomeLocParser genomeLocParser;
+
+    /**
+     * This function does the setup of our parser, before each method call.
+     * <p/>
+     * Called before every test case method.
+     */
+    @BeforeMethod
+    public void doForEachTest() throws FileNotFoundException {
+        readers = new ArrayList<SAMReaderID>();
+
+        // sequence
+        referenceFile = new File(b36KGReference);
+        seq = new CachingIndexedFastaSequenceFile(referenceFile);
+        genomeLocParser = new GenomeLocParser(seq.getSequenceDictionary());
+    }
+
+    /**
+     * Tears down the test fixture after each call.
+     * <p/>
+     * Called after every test case method.
+     */
+    @AfterMethod
+    public void undoForEachTest() {
+        seq = null;
+        readers.clear();
+    }
+
+
+    /** Test out that we can shard the file and iterate over every read */
+    @Test
+    public void testLinearBreakIterateAll() {
+        logger.warn("Executing testLinearBreakIterateAll");
+
+        // setup the data
+        readers.add(new SAMReaderID(new File(validationDataLocation+"/NA12878.chrom6.SLX.SRP000032.2009_06.selected.bam"),new Tags()));
+
+        // the sharding strat.
+        SAMDataSource data = new SAMDataSource(
+                referenceFile,
+                readers,
+                new ThreadAllocation(),
+                null,
+                genomeLocParser,
+                false,
+                ValidationStringency.SILENT,
+                null,
+                null,
+                new ValidationExclusion(),
+                new ArrayList<ReadFilter>(),
+                false);
+
+        Iterable<Shard> strat = data.createShardIteratorOverMappedReads(new LocusShardBalancer());
+        int count = 0;
+
+        try {
+            for (Shard sh : strat) {
+                int readCount = 0;
+                count++;
+
+                GenomeLoc firstLocus = sh.getGenomeLocs().get(0), lastLocus = sh.getGenomeLocs().get(sh.getGenomeLocs().size()-1);
+                logger.debug("Start : " + firstLocus.getStart() + " stop : " + lastLocus.getStop() + " contig " + firstLocus.getContig());
+                logger.debug("count = " + count);
+                GATKSAMIterator datum = data.seek(sh);
+
+                // for the first couple of shards make sure we can see the reads
+                if (count < 5) {
+                    for (SAMRecord r : datum) {
+                    }
+                    readCount++;
+                }
+                datum.close();
+
+                // if we're over 100 shards, break out
+                if (count > 100) {
+                    break;
+                }
+            }
+        }
+        catch (UserException.CouldNotReadInputFile e) {
+            e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
+            fail("testLinearBreakIterateAll: We Should get a UserException.CouldNotReadInputFile exception");
+        }
+    }
+
+    /** Test that we clear program records when requested */
+    @Test
+    public void testRemoveProgramRecords() {
+        logger.warn("Executing testRemoveProgramRecords");
+
+        // setup the data
+        readers.add(new SAMReaderID(new File(b37GoodBAM),new Tags()));
+
+        // use defaults
+        SAMDataSource data = new SAMDataSource(
+                referenceFile,
+                readers,
+                new ThreadAllocation(),
+                null,
+                genomeLocParser,
+                false,
+                ValidationStringency.SILENT,
+                null,
+                null,
+                new ValidationExclusion(),
+                new ArrayList<ReadFilter>(),
+                false);
+
+        List<SAMProgramRecord> defaultProgramRecords = data.getHeader().getProgramRecords();
+        assertTrue(defaultProgramRecords.size() != 0, "testRemoveProgramRecords: No program records found when using default constructor");
+
+        boolean removeProgramRecords = false;
+        data = new SAMDataSource(
+                referenceFile,
+                readers,
+                new ThreadAllocation(),
+                null,
+                genomeLocParser,
+                false,
+                ValidationStringency.SILENT,
+                null,
+                null,
+                new ValidationExclusion(),
+                new ArrayList<ReadFilter>(),
+                Collections.<ReadTransformer>emptyList(),
+                false,
+                (byte) -1,
+                removeProgramRecords,
+                false,
+                null, IntervalMergingRule.ALL);
+
+        List<SAMProgramRecord> dontRemoveProgramRecords = data.getHeader().getProgramRecords();
+        assertEquals(dontRemoveProgramRecords, defaultProgramRecords, "testRemoveProgramRecords: default program records differ from removeProgramRecords = false");
+
+        removeProgramRecords = true;
+        data = new SAMDataSource(
+                referenceFile,
+                readers,
+                new ThreadAllocation(),
+                null,
+                genomeLocParser,
+                false,
+                ValidationStringency.SILENT,
+                null,
+                null,
+                new ValidationExclusion(),
+                new ArrayList<ReadFilter>(),
+                Collections.<ReadTransformer>emptyList(),
+                false,
+                (byte) -1,
+                removeProgramRecords,
+                false,
+                null, IntervalMergingRule.ALL);
+
+        List<SAMProgramRecord> doRemoveProgramRecords = data.getHeader().getProgramRecords();
+        assertTrue(doRemoveProgramRecords.isEmpty(), "testRemoveProgramRecords: program records not cleared when removeProgramRecords = true");
+    }
+
+    @Test(expectedExceptions = UserException.class)
+    public void testFailOnReducedReads() {
+        readers.add(new SAMReaderID(new File(privateTestDir + "old.reduced.bam"), new Tags()));
+
+        SAMDataSource data = new SAMDataSource(
+                referenceFile,
+                readers,
+                new ThreadAllocation(),
+                null,
+                genomeLocParser,
+                false,
+                ValidationStringency.SILENT,
+                null,
+                null,
+                new ValidationExclusion(),
+                new ArrayList<ReadFilter>(),
+                false);
+    }
+
+    @Test(expectedExceptions = UserException.class)
+    public void testFailOnReducedReadsRemovingProgramRecords() {
+        readers.add(new SAMReaderID(new File(privateTestDir + "old.reduced.bam"), new Tags()));
+
+        SAMDataSource data = new SAMDataSource(
+                referenceFile,
+                readers,
+                new ThreadAllocation(),
+                null,
+                genomeLocParser,
+                false,
+                ValidationStringency.SILENT,
+                null,
+                null,
+                new ValidationExclusion(),
+                new ArrayList<ReadFilter>(),
+                Collections.<ReadTransformer>emptyList(),
+                false,
+                (byte) -1,
+                true,
+                false,
+                null, IntervalMergingRule.ALL);
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/SAMReaderIDUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/SAMReaderIDUnitTest.java
new file mode 100644
index 0000000..b8bf0ea
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/SAMReaderIDUnitTest.java
@@ -0,0 +1,50 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.commandline.Tags;
+import org.broadinstitute.gatk.utils.sam.SAMReaderID;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.File;
+
+public class SAMReaderIDUnitTest extends BaseTest {
+
+    @Test
+    public void testSAMReaderIDHashingAndEquality() {
+        // Test to make sure that two SAMReaderIDs that point at the same file via an absolute vs. relative
+        // path are equal according to equals() and have the same hash code
+        final File relativePathToBAMFile = new File(publicTestDir + "exampleBAM.bam");
+        final File absolutePathToBAMFile = new File(relativePathToBAMFile.getAbsolutePath());
+        final SAMReaderID relativePathSAMReaderID = new SAMReaderID(relativePathToBAMFile, new Tags());
+        final SAMReaderID absolutePathSAMReaderID = new SAMReaderID(absolutePathToBAMFile, new Tags());
+
+        Assert.assertEquals(relativePathSAMReaderID, absolutePathSAMReaderID, "Absolute-path and relative-path SAMReaderIDs not equal according to equals()");
+        Assert.assertEquals(relativePathSAMReaderID.hashCode(), absolutePathSAMReaderID.hashCode(), "Absolute-path and relative-path SAMReaderIDs have different hash codes");
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/SeekableBufferedStreamUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/SeekableBufferedStreamUnitTest.java
new file mode 100644
index 0000000..201d9c4
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/SeekableBufferedStreamUnitTest.java
@@ -0,0 +1,104 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import htsjdk.samtools.seekablestream.SeekableBufferedStream;
+import htsjdk.samtools.seekablestream.SeekableFileStream;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.IOException;
+
+/**
+ * Test basic functionality in SeekableBufferedStream.
+ */
+public class SeekableBufferedStreamUnitTest extends BaseTest {
+    private static File InputFile = new File(validationDataLocation + "megabyteZeros.dat");
+
+    final private int BUFFERED_STREAM_BUFFER_SIZE = 100;
+    private byte buffer[] = new byte[BUFFERED_STREAM_BUFFER_SIZE * 10];
+
+
+    @DataProvider(name = "BasicArgumentsDivisible")
+    public Integer[][] DivisableReads() {
+        return new Integer[][]{{1}, {4}, {5}, {10}, {20}, {50}, {100}};
+    }
+
+    @DataProvider(name = "BasicArgumentsIndivisibleAndSmall")
+    public Integer[][] InDivisableReadsSmall() {
+        return new Integer[][]{{3}, {11}, {31}, {51}, {77}, {99}};
+    }
+
+    @DataProvider(name = "BasicArgumentsIndivisibleYetLarge")
+    public Integer[][] InDivisableReadsLarge() {
+        return new Integer[][]{{101}, {151}, {205}, {251}, {301}};
+    }
+
+
+    private void testReadsLength(int length) throws IOException {
+        final int READ_SIZE=100000; //file is 10^6, so make this smaller to be safe.
+
+        SeekableFileStream fileStream = new SeekableFileStream(InputFile);
+        SeekableBufferedStream bufferedStream = new SeekableBufferedStream(fileStream, BUFFERED_STREAM_BUFFER_SIZE);
+
+        for (int i = 0; i < READ_SIZE / length; ++i) {
+            Assert.assertEquals(bufferedStream.read(buffer, 0, length), length);
+        }
+
+    }
+
+    // These tests fail because SeekableBuffered stream may return _less_ than the amount you are asking for.
+    // make sure that you wrap reads with while-loops.  If these test start failing (meaning that the reads work properly,
+    // the layer of protection built into GATKBamIndex can be removed.
+    //
+    // pdexheimer, Jan 2015 - SeekableBufferedStream no longer returns less than the expected amount.
+    // Renaming testIndivisableSmallReadsFAIL to testIndivisableSmallReadsPASS and removing the expected exception
+    // If this bug regresses, the while loop will need to be re-introduced into GATKBamIndex.read()
+
+    @Test(dataProvider = "BasicArgumentsIndivisibleAndSmall", enabled = true)
+    public void testIndivisableSmallReadsPASS(Integer readLength) throws IOException {
+        testReadsLength(readLength);
+    }
+
+    //Evidently, if you ask for a read length that's larger than the inernal buffer,
+    //SeekableBufferedStreamdoes something else and gives you what you asked for
+
+    @Test(dataProvider = "BasicArgumentsIndivisibleYetLarge", enabled = true)
+    public void testIndivisableLargeReadsPASS(Integer readLength) throws IOException {
+        testReadsLength(readLength);
+    }
+
+    // if the readlength divides the buffer, there are no failures
+    @Test(dataProvider = "BasicArgumentsDivisible", enabled = true)
+    public void testDivisableReadsPASS(Integer readLength) throws IOException {
+        testReadsLength(readLength);
+    }
+
+
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/TheoreticalMinimaBenchmark.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/TheoreticalMinimaBenchmark.java
new file mode 100644
index 0000000..039ca4d
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/TheoreticalMinimaBenchmark.java
@@ -0,0 +1,114 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reads;
+
+import com.google.caliper.Param;
+import htsjdk.samtools.Cigar;
+import htsjdk.samtools.CigarElement;
+import htsjdk.samtools.SAMFileReader;
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.util.CloseableIterator;
+
+import java.io.File;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: mhanna
+ * Date: Apr 22, 2011
+ * Time: 4:01:23 PM
+ * To change this template use File | Settings | File Templates.
+ */
+public class TheoreticalMinimaBenchmark extends ReadProcessingBenchmark {
+    @Param
+    private String bamFile;
+
+    @Param
+    private Integer maxReads;
+
+    @Override
+    public String getBAMFile() { return bamFile; }
+
+    @Override
+    public Integer getMaxReads() { return maxReads; }
+
+    public void timeIterateOverEachBase(int reps) {
+        System.out.printf("Processing " + inputFile);
+        for(int i = 0; i < reps; i++) {
+            SAMFileReader reader = new SAMFileReader(inputFile);
+            CloseableIterator<SAMRecord> iterator = reader.iterator();
+
+            long As=0,Cs=0,Gs=0,Ts=0;
+            while(iterator.hasNext()) {
+                SAMRecord read = iterator.next();
+                for(byte base: read.getReadBases()) {
+                    switch(base) {
+                        case 'A': As++; break;
+                        case 'C': Cs++; break;
+                        case 'G': Gs++; break;
+                        case 'T': Ts++; break;
+                    }
+                }
+            }
+            System.out.printf("As = %d; Cs = %d; Gs = %d; Ts = %d; total = %d%n",As,Cs,Gs,Ts,As+Cs+Gs+Ts);
+            iterator.close();
+            reader.close();
+        }
+    }
+
+    public void timeIterateOverCigarString(int reps) {
+        for(int i = 0; i < reps; i++) {
+            long matchMismatches = 0;
+            long insertions = 0;
+            long deletions = 0;
+            long others = 0;
+
+            SAMFileReader reader = new SAMFileReader(inputFile);
+            CloseableIterator<SAMRecord> iterator = reader.iterator();
+            while(iterator.hasNext()) {
+                SAMRecord read = iterator.next();
+
+                Cigar cigar = read.getCigar();
+                for(CigarElement cigarElement: cigar.getCigarElements()) {
+                    int elementSize = cigarElement.getLength();
+                    while(elementSize > 0) {
+                        switch(cigarElement.getOperator()) {
+                            case M: case EQ: case X: matchMismatches++; break;
+                            case I: insertions++; break;
+                            case D: deletions++; break;
+                            default: others++; break;
+                        }
+                        elementSize--;
+                    }
+                }
+            }
+            System.out.printf("Ms = %d; Is = %d; Ds = %d; others = %d; total = %d%n",matchMismatches,insertions,deletions,others,matchMismatches+insertions+deletions+others);
+
+            iterator.close();
+            reader.close();
+        }
+    }
+
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reference/ReferenceDataSourceIntegrationTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reference/ReferenceDataSourceIntegrationTest.java
new file mode 100644
index 0000000..36cbb58
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/reference/ReferenceDataSourceIntegrationTest.java
@@ -0,0 +1,75 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.reference;
+
+import org.broadinstitute.gatk.engine.walkers.WalkerTest;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.testng.annotations.Test;
+import org.testng.Assert;
+
+import java.io.File;
+import java.io.IOException;
+
+public class ReferenceDataSourceIntegrationTest extends WalkerTest {
+
+    @Test
+    public void testReferenceWithMissingFaiFile() throws IOException {
+        final File dummyReference = createTempFile("dummy", ".fasta");
+        final File dictFile = new File(dummyReference.getAbsolutePath().replace(".fasta", ".dict"));
+        dictFile.deleteOnExit();
+        Assert.assertTrue(dictFile.createNewFile());
+
+        final WalkerTestSpec spec = new WalkerTestSpec(
+            " -T TestPrintReadsWalker" +
+            " -R " + dummyReference.getAbsolutePath() +
+            " -I " + privateTestDir + "NA12878.4.snippet.bam" +
+            " -o %s",
+            1,
+            UserException.MissingReferenceFaiFile.class
+        );
+
+        executeTest("testReferenceWithMissingFaiFile", spec);
+    }
+
+    @Test
+    public void testReferenceWithMissingDictFile() throws IOException {
+        final File dummyReference = createTempFile("dummy", ".fasta");
+        final File faiFile = new File(dummyReference.getAbsolutePath() + ".fai");
+        faiFile.deleteOnExit();
+        Assert.assertTrue(faiFile.createNewFile());
+
+        final WalkerTestSpec spec = new WalkerTestSpec(
+                " -T TestPrintReadsWalker" +
+                " -R " + dummyReference.getAbsolutePath() +
+                " -I " + privateTestDir + "NA12878.4.snippet.bam" +
+                " -o %s",
+                1,
+                UserException.MissingReferenceDictFile.class
+        );
+
+        executeTest("testReferenceWithMissingDictFile", spec);
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/rmd/ReferenceOrderedDataPoolUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/rmd/ReferenceOrderedDataPoolUnitTest.java
new file mode 100644
index 0000000..15bc5b7
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/rmd/ReferenceOrderedDataPoolUnitTest.java
@@ -0,0 +1,208 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.rmd;
+
+import org.broadinstitute.gatk.utils.commandline.Tags;
+import org.broadinstitute.gatk.utils.refdata.tracks.RMDTrackBuilder;
+import org.testng.Assert;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.codecs.table.TableFeature;
+import org.broadinstitute.gatk.utils.refdata.utils.LocationAwareSeekableRODIterator;
+import org.broadinstitute.gatk.utils.refdata.utils.RMDTriplet;
+import org.broadinstitute.gatk.utils.refdata.utils.RMDTriplet.RMDStorageType;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+
+import static org.testng.Assert.assertTrue;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+/**
+ * User: hanna
+ * Date: May 21, 2009
+ * Time: 11:03:04 AM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * Test the contents and number of iterators in the pool.
+ */
+
+public class ReferenceOrderedDataPoolUnitTest extends BaseTest {
+
+    private RMDTriplet triplet = null;
+    private RMDTrackBuilder builder = null;
+
+    private IndexedFastaSequenceFile seq;
+    private GenomeLocParser genomeLocParser;
+
+    private GenomeLoc testSite1;
+    private GenomeLoc testSite2;
+    private GenomeLoc testSite3;
+
+    private GenomeLoc testInterval1; // an interval matching testSite1 -> testSite2 for queries
+    private GenomeLoc testInterval2; // an interval matching testSite2 -> testSite3 for queries
+
+
+    @BeforeClass
+    public void init() throws FileNotFoundException {
+        seq = new CachingIndexedFastaSequenceFile(new File(hg18Reference));
+        genomeLocParser = new GenomeLocParser(seq);
+
+        testSite1 = genomeLocParser.createGenomeLoc("chrM",10);
+        testSite2 = genomeLocParser.createGenomeLoc("chrM",20);
+        testSite3 = genomeLocParser.createGenomeLoc("chrM",30);
+        testInterval1 = genomeLocParser.createGenomeLoc("chrM",10,20);
+        testInterval2 = genomeLocParser.createGenomeLoc("chrM",20,30);
+    }
+
+    @BeforeMethod
+    public void setUp() {
+        String fileName = privateTestDir + "TabularDataTest.dat";
+
+        triplet = new RMDTriplet("tableTest","Table",fileName,RMDStorageType.FILE,new Tags());
+        // disable auto-index creation/locking in the RMDTrackBuilder for tests
+        builder = new RMDTrackBuilder(seq.getSequenceDictionary(),genomeLocParser,null,true,null);
+    }
+
+    @Test
+    public void testCreateSingleIterator() {
+        ResourcePool iteratorPool = new ReferenceOrderedDataPool(triplet,builder,seq.getSequenceDictionary(),genomeLocParser,false);
+        LocationAwareSeekableRODIterator iterator = (LocationAwareSeekableRODIterator)iteratorPool.iterator( new MappedStreamSegment(testSite1) );
+
+        Assert.assertEquals(iteratorPool.numIterators(), 1, "Number of iterators in the pool is incorrect");
+        Assert.assertEquals(iteratorPool.numAvailableIterators(), 0, "Number of available iterators in the pool is incorrect");
+
+        TableFeature datum = (TableFeature)iterator.next().get(0).getUnderlyingObject();
+
+        assertTrue(datum.getLocation().equals(testSite1));
+        assertTrue(datum.get("COL1").equals("A"));
+        assertTrue(datum.get("COL2").equals("B"));
+        assertTrue(datum.get("COL3").equals("C"));
+
+        iteratorPool.release(iterator);
+
+        Assert.assertEquals(iteratorPool.numIterators(), 1, "Number of iterators in the pool is incorrect");
+        Assert.assertEquals(iteratorPool.numAvailableIterators(), 1, "Number of available iterators in the pool is incorrect");
+    }
+
+    @Test
+    public void testCreateMultipleIterators() {
+        ReferenceOrderedQueryDataPool iteratorPool = new ReferenceOrderedQueryDataPool(triplet,builder,seq.getSequenceDictionary(),genomeLocParser);
+        LocationAwareSeekableRODIterator iterator1 = iteratorPool.iterator( new MappedStreamSegment(testInterval1) );
+
+        // Create a new iterator at position 2.
+        LocationAwareSeekableRODIterator iterator2 = iteratorPool.iterator( new MappedStreamSegment(testInterval2) );
+
+        Assert.assertEquals(iteratorPool.numIterators(), 2, "Number of iterators in the pool is incorrect");
+        Assert.assertEquals(iteratorPool.numAvailableIterators(), 0, "Number of available iterators in the pool is incorrect");
+
+        // Test out-of-order access: first iterator2, then iterator1.
+        // Ugh...first call to a region needs to be a seek.
+        TableFeature datum = (TableFeature)iterator2.seekForward(testSite2).get(0).getUnderlyingObject();
+        assertTrue(datum.getLocation().equals(testSite2));
+        assertTrue(datum.get("COL1").equals("C"));
+        assertTrue(datum.get("COL2").equals("D"));
+        assertTrue(datum.get("COL3").equals("E"));
+
+        datum = (TableFeature)iterator1.next().get(0).getUnderlyingObject();
+        assertTrue(datum.getLocation().equals(testSite1));
+        assertTrue(datum.get("COL1").equals("A"));
+        assertTrue(datum.get("COL2").equals("B"));
+        assertTrue(datum.get("COL3").equals("C"));
+
+        // Advance iterator2, and make sure both iterator's contents are still correct.
+        datum = (TableFeature)iterator2.next().get(0).getUnderlyingObject();
+        assertTrue(datum.getLocation().equals(testSite3));
+        assertTrue(datum.get("COL1").equals("F"));
+        assertTrue(datum.get("COL2").equals("G"));
+        assertTrue(datum.get("COL3").equals("H"));
+
+        datum = (TableFeature)iterator1.next().get(0).getUnderlyingObject();
+        assertTrue(datum.getLocation().equals(testSite2));
+        assertTrue(datum.get("COL1").equals("C"));
+        assertTrue(datum.get("COL2").equals("D"));
+        assertTrue(datum.get("COL3").equals("E"));
+
+        // Cleanup, and make sure the number of iterators dies appropriately.
+        iteratorPool.release(iterator1);
+
+        Assert.assertEquals(iteratorPool.numIterators(), 2, "Number of iterators in the pool is incorrect");
+        Assert.assertEquals(iteratorPool.numAvailableIterators(), 1, "Number of available iterators in the pool is incorrect");
+
+        iteratorPool.release(iterator2);
+
+        Assert.assertEquals(iteratorPool.numIterators(), 2, "Number of iterators in the pool is incorrect");
+        Assert.assertEquals(iteratorPool.numAvailableIterators(), 2, "Number of available iterators in the pool is incorrect");
+    }
+
+    @Test
+    public void testIteratorConservation() {
+        ReferenceOrderedDataPool iteratorPool = new ReferenceOrderedDataPool(triplet,builder,seq.getSequenceDictionary(),genomeLocParser,false);
+        LocationAwareSeekableRODIterator iterator = iteratorPool.iterator( new MappedStreamSegment(testSite1) );
+
+        Assert.assertEquals(iteratorPool.numIterators(), 1, "Number of iterators in the pool is incorrect");
+        Assert.assertEquals(iteratorPool.numAvailableIterators(), 0, "Number of available iterators in the pool is incorrect");
+
+        TableFeature datum = (TableFeature)iterator.next().get(0).getUnderlyingObject();
+        assertTrue(datum.getLocation().equals(testSite1));
+        assertTrue(datum.get("COL1").equals("A"));
+        assertTrue(datum.get("COL2").equals("B"));
+        assertTrue(datum.get("COL3").equals("C"));
+
+        iteratorPool.release(iterator);
+
+        // Create another iterator after the current iterator.
+        iterator = iteratorPool.iterator( new MappedStreamSegment(testSite3) );
+
+        // Make sure that the previously acquired iterator was reused.
+        Assert.assertEquals(iteratorPool.numIterators(), 1, "Number of iterators in the pool is incorrect");
+        Assert.assertEquals(iteratorPool.numAvailableIterators(), 0, "Number of available iterators in the pool is incorrect");
+
+        datum = (TableFeature)iterator.seekForward(testSite3).get(0).getUnderlyingObject();
+        assertTrue(datum.getLocation().equals(testSite3));
+        assertTrue(datum.get("COL1").equals("F"));
+        assertTrue(datum.get("COL2").equals("G"));
+        assertTrue(datum.get("COL3").equals("H"));
+
+        iteratorPool.release(iterator);
+
+        Assert.assertEquals(iteratorPool.numIterators(), 1, "Number of iterators in the pool is incorrect");
+        Assert.assertEquals(iteratorPool.numAvailableIterators(), 1, "Number of available iterators in the pool is incorrect");
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/rmd/ReferenceOrderedQueryDataPoolUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/rmd/ReferenceOrderedQueryDataPoolUnitTest.java
new file mode 100644
index 0000000..0f6f9e3
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/datasources/rmd/ReferenceOrderedQueryDataPoolUnitTest.java
@@ -0,0 +1,89 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.datasources.rmd;
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import htsjdk.tribble.Feature;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.commandline.Tags;
+import org.broadinstitute.gatk.utils.refdata.utils.*;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.List;
+
+public class ReferenceOrderedQueryDataPoolUnitTest extends BaseTest{
+    @Test
+    public void testCloseFilePointers() throws IOException {
+        // Build up query parameters
+        File file = new File(BaseTest.privateTestDir + "NA12878.hg19.example1.vcf");
+        RMDTriplet triplet = new RMDTriplet("test", "VCF", file.getAbsolutePath(), RMDTriplet.RMDStorageType.FILE, new Tags());
+        IndexedFastaSequenceFile seq = new CachingIndexedFastaSequenceFile(new File(BaseTest.hg19Reference));
+        GenomeLocParser parser = new GenomeLocParser(seq);
+        GenomeLoc loc = parser.createGenomeLoc("20", 1, 100000);
+        TestRMDTrackBuilder builder = new TestRMDTrackBuilder(seq.getSequenceDictionary(), parser);
+
+        // Create the query data pool
+        ReferenceOrderedQueryDataPool pool = new ReferenceOrderedQueryDataPool(triplet, builder, seq.getSequenceDictionary(), parser);
+
+        for (int i = 0; i < 3; i++) {
+            // Ensure our tribble iterators are closed.
+            CheckableCloseableTribbleIterator.clearThreadIterators();
+            Assert.assertTrue(CheckableCloseableTribbleIterator.getThreadIterators().isEmpty(), "Tribble iterators list was not cleared.");
+
+            // Request the the rodIterator
+            LocationAwareSeekableRODIterator rodIterator = pool.iterator(new MappedStreamSegment(loc));
+
+            // Run normal iteration over rodIterator
+            Assert.assertTrue(rodIterator.hasNext(), "Rod iterator does not have a next value.");
+            GenomeLoc rodIteratorLocation = rodIterator.next().getLocation();
+            Assert.assertEquals(rodIteratorLocation.getContig(), "20", "Instead of chr 20 rod iterator was at location " + rodIteratorLocation);
+
+            // Check that the underlying tribbleIterators are still open.
+            List<CheckableCloseableTribbleIterator<? extends Feature>> tribbleIterators = CheckableCloseableTribbleIterator.getThreadIterators();
+            Assert.assertFalse(tribbleIterators.isEmpty(), "Tribble iterators list is empty");
+            for (CheckableCloseableTribbleIterator<? extends Feature> tribbleIterator: tribbleIterators) {
+                Assert.assertFalse(tribbleIterator.isClosed(), "Tribble iterator is closed but should be still open.");
+            }
+
+            // Releasing the rodIterator should close the underlying tribbleIterator.
+            pool.release(rodIterator);
+
+            // Check that the underlying tribbleIterators are now closed.
+            for (CheckableCloseableTribbleIterator<? extends Feature> tribbleIterator: tribbleIterators) {
+                Assert.assertTrue(tribbleIterator.isClosed(), "Tribble iterator is open but should be now closed.");
+            }
+        }
+
+        // Extra cleanup.
+        CheckableCloseableTribbleIterator.clearThreadIterators();
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/downsampling/DownsamplingIntegrationTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/downsampling/DownsamplingIntegrationTest.java
new file mode 100644
index 0000000..8dac72f
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/downsampling/DownsamplingIntegrationTest.java
@@ -0,0 +1,45 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.downsampling;
+
+import org.broadinstitute.gatk.engine.walkers.WalkerTest;
+import org.broadinstitute.gatk.utils.downsampling.DownsamplingMethod;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.testng.annotations.Test;
+
+public class DownsamplingIntegrationTest extends WalkerTest {
+
+    @Test
+    public void testDetectLowDcovValueWithLocusTraversal() {
+        final WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
+            "-T TestCountLociWalker -R " + publicTestDir + "exampleFASTA.fasta -I " + publicTestDir + "exampleBAM.bam -o %s " +
+            "-dcov " + (DownsamplingMethod.MINIMUM_SAFE_COVERAGE_TARGET_FOR_LOCUS_BASED_TRAVERSALS - 1),
+            1,
+            UserException.class
+        );
+        executeTest("testDetectLowDcovValueWithLocusTraversal", spec);
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/downsampling/DownsamplingReadsIteratorUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/downsampling/DownsamplingReadsIteratorUnitTest.java
new file mode 100644
index 0000000..4fbb02a
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/downsampling/DownsamplingReadsIteratorUnitTest.java
@@ -0,0 +1,141 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.downsampling;
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMReadGroupRecord;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.downsampling.DownsamplingReadsIterator;
+import org.broadinstitute.gatk.utils.downsampling.SimplePositionalDownsampler;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.broadinstitute.gatk.utils.sam.ArtificialSingleSampleReadStream;
+import org.broadinstitute.gatk.utils.sam.ArtificialSingleSampleReadStreamAnalyzer;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.Arrays;
+
+public class DownsamplingReadsIteratorUnitTest extends BaseTest {
+
+    private static class DownsamplingReadsIteratorTest extends TestDataProvider {
+        private DownsamplingReadsIterator downsamplingIter;
+        private int targetCoverage;
+        private ArtificialSingleSampleReadStream stream;
+        private ArtificialSingleSampleReadStreamAnalyzer streamAnalyzer;
+
+        public DownsamplingReadsIteratorTest( ArtificialSingleSampleReadStream stream, int targetCoverage ) {
+            super(DownsamplingReadsIteratorTest.class);
+
+            this.stream = stream;
+            this.targetCoverage = targetCoverage;
+
+            setName(String.format("%s: targetCoverage=%d numContigs=%d stacksPerContig=%d readsPerStack=%d-%d distanceBetweenStacks=%d-%d readLength=%d-%d unmappedReads=%d",
+                    getClass().getSimpleName(),
+                    targetCoverage,
+                    stream.getNumContigs(),
+                    stream.getNumStacksPerContig(),
+                    stream.getMinReadsPerStack(),
+                    stream.getMaxReadsPerStack(),
+                    stream.getMinDistanceBetweenStacks(),
+                    stream.getMaxDistanceBetweenStacks(),
+                    stream.getMinReadLength(),
+                    stream.getMaxReadLength(),
+                    stream.getNumUnmappedReads()));
+        }
+
+        public void run() {
+            streamAnalyzer = new PositionallyDownsampledArtificialSingleSampleReadStreamAnalyzer(stream, targetCoverage);
+            downsamplingIter = new DownsamplingReadsIterator(stream.getGATKSAMIterator(), new SimplePositionalDownsampler<SAMRecord>(targetCoverage));
+
+            streamAnalyzer.analyze(downsamplingIter);
+
+            // Check whether the observed properties of the downsampled stream are what they should be
+            streamAnalyzer.validate();
+
+            // Allow memory used by this test to be reclaimed
+            stream = null;
+            streamAnalyzer = null;
+            downsamplingIter = null;
+        }
+    }
+
+    @DataProvider(name = "DownsamplingReadsIteratorTestDataProvider")
+    public Object[][] createDownsamplingReadsIteratorTests() {
+        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(5, 1, 10000);
+        String readGroupID = "testReadGroup";
+        SAMReadGroupRecord readGroup = new SAMReadGroupRecord(readGroupID);
+        readGroup.setSample("testSample");
+        header.addReadGroup(readGroup);
+
+        // Values that don't vary across tests
+        int targetCoverage = 10;
+        int minReadLength = 50;
+        int maxReadLength = 100;
+        int minDistanceBetweenStacks = 1;
+        int maxDistanceBetweenStacks = maxReadLength + 1;
+
+        Utils.resetRandomGenerator();
+
+        // brute force testing!
+        for ( int numContigs : Arrays.asList(1, 2, 5) ) {
+            for ( int stacksPerContig : Arrays.asList(1, 2, 10) ) {
+                for ( int minReadsPerStack : Arrays.asList(1, targetCoverage / 2, targetCoverage, targetCoverage - 1, targetCoverage + 1, targetCoverage * 2) ) {
+                    for ( int maxReadsPerStack : Arrays.asList(1, targetCoverage / 2, targetCoverage, targetCoverage - 1, targetCoverage + 1, targetCoverage * 2) ) {
+                        for ( int numUnmappedReads : Arrays.asList(0, 1, targetCoverage, targetCoverage * 2) ) {
+                            // Only interested in sane read stream configurations here
+                            if ( minReadsPerStack <= maxReadsPerStack ) {
+                                new DownsamplingReadsIteratorTest(new ArtificialSingleSampleReadStream(header,
+                                                                                                       readGroupID,
+                                                                                                       numContigs,
+                                                                                                       stacksPerContig,
+                                                                                                       minReadsPerStack,
+                                                                                                       maxReadsPerStack,
+                                                                                                       minDistanceBetweenStacks,
+                                                                                                       maxDistanceBetweenStacks,
+                                                                                                       minReadLength,
+                                                                                                       maxReadLength,
+                                                                                                       numUnmappedReads),
+                                                                  targetCoverage);
+                            }
+                        }
+                    }
+                }
+            }
+        }
+
+        return DownsamplingReadsIteratorTest.getTests(DownsamplingReadsIteratorTest.class);
+    }
+
+    @Test(dataProvider = "DownsamplingReadsIteratorTestDataProvider")
+    public void runDownsamplingReadsIteratorTest( DownsamplingReadsIteratorTest test ) {
+        logger.warn("Running test: " + test);
+
+        Utils.resetRandomGenerator();
+        test.run();
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/downsampling/FractionalDownsamplerUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/downsampling/FractionalDownsamplerUnitTest.java
new file mode 100644
index 0000000..af7e571
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/downsampling/FractionalDownsamplerUnitTest.java
@@ -0,0 +1,160 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.downsampling;
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.downsampling.FractionalDownsampler;
+import org.broadinstitute.gatk.utils.downsampling.ReadsDownsampler;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+import org.testng.Assert;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+
+public class FractionalDownsamplerUnitTest extends BaseTest {
+
+    private static class FractionalDownsamplerTest extends TestDataProvider {
+        double fraction;
+        int totalReads;
+        int expectedMinNumReadsAfterDownsampling;
+        int expectedMaxNumReadsAfterDownsampling;
+        int expectedMinDiscardedItems;
+        int expectedMaxDiscardedItems;
+
+        private static final double EXPECTED_ACCURACY = 0.05; // should be accurate to within +/- this percent
+
+        public FractionalDownsamplerTest( double fraction, int totalReads ) {
+            super(FractionalDownsamplerTest.class);
+
+            this.fraction = fraction;
+            this.totalReads = totalReads;
+
+            calculateExpectations();
+
+            setName(String.format("%s: fraction=%.2f totalReads=%d expectedMinNumReadsAfterDownsampling=%d expectedMaxNumReadsAfterDownsampling=%d",
+                    getClass().getSimpleName(), fraction, totalReads, expectedMinNumReadsAfterDownsampling, expectedMaxNumReadsAfterDownsampling));
+        }
+
+        private void calculateExpectations() {
+            // Require an exact match in the 0% and 100% cases
+            if ( fraction == 0.0 ) {
+                expectedMinNumReadsAfterDownsampling = expectedMaxNumReadsAfterDownsampling = 0;
+                expectedMinDiscardedItems = expectedMaxDiscardedItems = totalReads;
+            }
+            else if ( fraction == 1.0 ) {
+                expectedMinNumReadsAfterDownsampling = expectedMaxNumReadsAfterDownsampling = totalReads;
+                expectedMinDiscardedItems = expectedMaxDiscardedItems = 0;
+            }
+            else {
+                expectedMinNumReadsAfterDownsampling = Math.max((int)((fraction - EXPECTED_ACCURACY) * totalReads), 0);
+                expectedMaxNumReadsAfterDownsampling = Math.min((int) ((fraction + EXPECTED_ACCURACY) * totalReads), totalReads);
+                expectedMinDiscardedItems = totalReads - expectedMaxNumReadsAfterDownsampling;
+                expectedMaxDiscardedItems = totalReads - expectedMinNumReadsAfterDownsampling;
+            }
+        }
+
+        public Collection<SAMRecord> createReads() {
+            Collection<SAMRecord> reads = new ArrayList<SAMRecord>(totalReads);
+
+            SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000);
+            reads.addAll(ArtificialSAMUtils.createStackOfIdenticalArtificialReads(totalReads, header, "foo", 0, 1, 100));
+
+            return reads;
+        }
+    }
+
+    @DataProvider(name = "FractionalDownsamplerTestDataProvider")
+    public Object[][] createFractionalDownsamplerTestData() {
+        for ( double fraction : Arrays.asList(0.0, 0.25, 0.5, 0.75, 1.0) ) {
+            for ( int totalReads : Arrays.asList(0, 1000, 10000) ) {
+                new FractionalDownsamplerTest(fraction, totalReads);
+            }
+        }
+
+        return FractionalDownsamplerTest.getTests(FractionalDownsamplerTest.class);
+    }
+
+    @Test(dataProvider = "FractionalDownsamplerTestDataProvider")
+    public void runFractionalDownsamplerTest( FractionalDownsamplerTest test ) {
+        logger.warn("Running test: " + test);
+
+        Utils.resetRandomGenerator();
+
+        ReadsDownsampler<SAMRecord> downsampler = new FractionalDownsampler<SAMRecord>(test.fraction);
+
+        downsampler.submit(test.createReads());
+
+        if ( test.totalReads > 0 ) {
+            if ( test.fraction > FractionalDownsamplerTest.EXPECTED_ACCURACY ) {
+                Assert.assertTrue(downsampler.hasFinalizedItems());
+                Assert.assertTrue(downsampler.peekFinalized() != null);
+            }
+            Assert.assertFalse(downsampler.hasPendingItems());
+            Assert.assertTrue(downsampler.peekPending() == null);
+        }
+        else {
+            Assert.assertFalse(downsampler.hasFinalizedItems() || downsampler.hasPendingItems());
+            Assert.assertTrue(downsampler.peekFinalized() == null && downsampler.peekPending() == null);
+        }
+
+        downsampler.signalEndOfInput();
+
+        if ( test.totalReads > 0 ) {
+            if ( test.fraction > FractionalDownsamplerTest.EXPECTED_ACCURACY ) {
+                Assert.assertTrue(downsampler.hasFinalizedItems());
+                Assert.assertTrue(downsampler.peekFinalized() != null);
+            }
+            Assert.assertFalse(downsampler.hasPendingItems());
+            Assert.assertTrue(downsampler.peekPending() == null);
+        }
+        else {
+            Assert.assertFalse(downsampler.hasFinalizedItems() || downsampler.hasPendingItems());
+            Assert.assertTrue(downsampler.peekFinalized() == null && downsampler.peekPending() == null);
+        }
+
+        List<SAMRecord> downsampledReads = downsampler.consumeFinalizedItems();
+        Assert.assertFalse(downsampler.hasFinalizedItems() || downsampler.hasPendingItems());
+        Assert.assertTrue(downsampler.peekFinalized() == null && downsampler.peekPending() == null);
+
+        Assert.assertTrue(downsampledReads.size() >= test.expectedMinNumReadsAfterDownsampling &&
+                          downsampledReads.size() <= test.expectedMaxNumReadsAfterDownsampling);
+
+        Assert.assertTrue(downsampler.getNumberOfDiscardedItems() >= test.expectedMinDiscardedItems &&
+                          downsampler.getNumberOfDiscardedItems() <= test.expectedMaxDiscardedItems);
+
+        Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), test.totalReads - downsampledReads.size());
+
+        downsampler.resetStats();
+        Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 0);
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/downsampling/LevelingDownsamplerUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/downsampling/LevelingDownsamplerUnitTest.java
new file mode 100644
index 0000000..e3f2fee
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/downsampling/LevelingDownsamplerUnitTest.java
@@ -0,0 +1,165 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.downsampling;
+
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.downsampling.Downsampler;
+import org.broadinstitute.gatk.utils.downsampling.LevelingDownsampler;
+import org.testng.annotations.Test;
+import org.testng.annotations.DataProvider;
+import org.testng.Assert;
+
+import java.util.*;
+
+public class LevelingDownsamplerUnitTest extends BaseTest {
+
+    private static class LevelingDownsamplerUniformStacksTest extends TestDataProvider {
+        public enum DataStructure { LINKED_LIST, ARRAY_LIST }
+
+        int targetSize;
+        int numStacks;
+        int stackSize;
+        DataStructure dataStructure;
+        int expectedSize;
+
+        public LevelingDownsamplerUniformStacksTest( int targetSize, int numStacks, int stackSize, DataStructure dataStructure ) {
+            super(LevelingDownsamplerUniformStacksTest.class);
+
+            this.targetSize = targetSize;
+            this.numStacks = numStacks;
+            this.stackSize = stackSize;
+            this.dataStructure = dataStructure;
+            expectedSize = calculateExpectedDownsampledStackSize();
+
+            setName(String.format("%s: targetSize=%d numStacks=%d stackSize=%d dataStructure=%s expectedSize=%d",
+                    getClass().getSimpleName(), targetSize, numStacks, stackSize, dataStructure, expectedSize));
+        }
+
+        public Collection<List<Object>> createStacks() {
+            Collection<List<Object>> stacks = new ArrayList<List<Object>>();
+
+            for ( int i = 1; i <= numStacks; i++ ) {
+                List<Object> stack = dataStructure == DataStructure.LINKED_LIST ? new LinkedList<Object>() : new ArrayList<Object>();
+
+                for ( int j = 1; j <= stackSize; j++ ) {
+                    stack.add(new Object());
+                }
+
+                stacks.add(stack);
+            }
+
+            return stacks;
+        }
+
+        private int calculateExpectedDownsampledStackSize() {
+            int numItemsToRemove = numStacks * stackSize - targetSize;
+
+            if ( numStacks == 0 ) {
+                return 0;
+            }
+            else if ( numItemsToRemove <= 0 ) {
+                return stackSize;
+            }
+
+            return Math.max(1, stackSize - (numItemsToRemove / numStacks));
+        }
+    }
+
+    @DataProvider(name = "UniformStacksDataProvider")
+    public Object[][] createUniformStacksTestData() {
+        for ( int targetSize = 1; targetSize <= 10000; targetSize *= 10 ) {
+            for ( int numStacks = 0; numStacks <= 10; numStacks++ ) {
+                for ( int stackSize = 1; stackSize <= 1000; stackSize *= 10 ) {
+                    for ( LevelingDownsamplerUniformStacksTest.DataStructure dataStructure : LevelingDownsamplerUniformStacksTest.DataStructure.values() ) {
+                        new LevelingDownsamplerUniformStacksTest(targetSize, numStacks, stackSize, dataStructure);
+                    }
+                }
+            }
+        }
+
+        return LevelingDownsamplerUniformStacksTest.getTests(LevelingDownsamplerUniformStacksTest.class);
+    }
+
+    @Test( dataProvider = "UniformStacksDataProvider" )
+    public void testLevelingDownsamplerWithUniformStacks( LevelingDownsamplerUniformStacksTest test ) {
+        logger.warn("Running test: " + test);
+
+        Utils.resetRandomGenerator();
+
+        Downsampler<List<Object>> downsampler = new LevelingDownsampler<List<Object>, Object>(test.targetSize);
+
+        downsampler.submit(test.createStacks());
+
+        if ( test.numStacks > 0 ) {
+            Assert.assertFalse(downsampler.hasFinalizedItems());
+            Assert.assertTrue(downsampler.peekFinalized() == null);
+            Assert.assertTrue(downsampler.hasPendingItems());
+            Assert.assertTrue(downsampler.peekPending() != null);
+        }
+        else {
+            Assert.assertFalse(downsampler.hasFinalizedItems() || downsampler.hasPendingItems());
+            Assert.assertTrue(downsampler.peekFinalized() == null && downsampler.peekPending() == null);
+        }
+
+        downsampler.signalEndOfInput();
+
+        if ( test.numStacks > 0 ) {
+            Assert.assertTrue(downsampler.hasFinalizedItems());
+            Assert.assertTrue(downsampler.peekFinalized() != null);
+            Assert.assertFalse(downsampler.hasPendingItems());
+            Assert.assertTrue(downsampler.peekPending() == null);
+        }
+        else {
+            Assert.assertFalse(downsampler.hasFinalizedItems() || downsampler.hasPendingItems());
+            Assert.assertTrue(downsampler.peekFinalized() == null && downsampler.peekPending() == null);
+        }
+
+        final int sizeFromDownsampler = downsampler.size();
+        List<List<Object>> downsampledStacks = downsampler.consumeFinalizedItems();
+        Assert.assertFalse(downsampler.hasFinalizedItems() || downsampler.hasPendingItems());
+        Assert.assertTrue(downsampler.peekFinalized() == null && downsampler.peekPending() == null);
+
+        Assert.assertEquals(downsampledStacks.size(), test.numStacks);
+
+        int totalRemainingItems = 0;
+        for ( List<Object> stack : downsampledStacks ) {
+            Assert.assertTrue(Math.abs(stack.size() - test.expectedSize) <= 1);
+            totalRemainingItems += stack.size();
+        }
+
+        Assert.assertEquals(sizeFromDownsampler, totalRemainingItems);
+        int numItemsReportedDiscarded = downsampler.getNumberOfDiscardedItems();
+        int numItemsActuallyDiscarded = test.numStacks * test.stackSize - totalRemainingItems;
+
+        Assert.assertEquals(numItemsReportedDiscarded, numItemsActuallyDiscarded);
+
+        downsampler.resetStats();
+        Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 0);
+
+        Assert.assertTrue(totalRemainingItems <= Math.max(test.targetSize, test.numStacks));
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/downsampling/PerSampleDownsamplingReadsIteratorUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/downsampling/PerSampleDownsamplingReadsIteratorUnitTest.java
new file mode 100644
index 0000000..2193078
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/downsampling/PerSampleDownsamplingReadsIteratorUnitTest.java
@@ -0,0 +1,302 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.downsampling;
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMReadGroupRecord;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.downsampling.PerSampleDownsamplingReadsIterator;
+import org.broadinstitute.gatk.utils.downsampling.ReadsDownsamplerFactory;
+import org.broadinstitute.gatk.utils.downsampling.SimplePositionalDownsamplerFactory;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIterator;
+import org.broadinstitute.gatk.engine.iterators.VerifyingSamIterator;
+import org.broadinstitute.gatk.utils.MathUtils;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.sam.ArtificialMultiSampleReadStream;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.broadinstitute.gatk.utils.sam.ArtificialSingleSampleReadStream;
+import org.broadinstitute.gatk.utils.sam.ArtificialSingleSampleReadStreamAnalyzer;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.*;
+
+public class PerSampleDownsamplingReadsIteratorUnitTest extends BaseTest {
+
+    private static class PerSampleDownsamplingReadsIteratorTest extends TestDataProvider {
+
+        // TODO: tests should distinguish between variance across samples and variance within a sample
+
+        private enum StreamDensity {
+            SPARSE         (MAX_READ_LENGTH,     MAX_READ_LENGTH * 2),
+            DENSE          (1,                   MIN_READ_LENGTH),
+            MIXED          (1,                   MAX_READ_LENGTH * 2),
+            UNIFORM_DENSE  (1,                   1),
+            UNIFORM_SPARSE (MAX_READ_LENGTH * 2, MAX_READ_LENGTH * 2);
+
+            int minDistanceBetweenStacks;
+            int maxDistanceBetweenStacks;
+
+            StreamDensity( int minDistanceBetweenStacks, int maxDistanceBetweenStacks ) {
+                this.minDistanceBetweenStacks = minDistanceBetweenStacks;
+                this.maxDistanceBetweenStacks = maxDistanceBetweenStacks;
+            }
+
+            public String toString() {
+                return String.format("StreamDensity:%d-%d", minDistanceBetweenStacks, maxDistanceBetweenStacks);
+            }
+        }
+
+        private enum StreamStackDepth {
+            NON_UNIFORM_LOW   (1,  5),
+            NON_UNIFORM_HIGH  (15, 20),
+            NON_UNIFORM_MIXED (1,  20),
+            UNIFORM_SINGLE    (1,  1),
+            UNIFORM_LOW       (2,  2),
+            UNIFORM_HIGH      (20, 20),
+            UNIFORM_MEDIUM    (10, 10);   // should set target coverage to this value for testing
+
+            int minReadsPerStack;
+            int maxReadsPerStack;
+
+            StreamStackDepth( int minReadsPerStack, int maxReadsPerStack ) {
+                this.minReadsPerStack = minReadsPerStack;
+                this.maxReadsPerStack = maxReadsPerStack;
+            }
+
+            public boolean isUniform() {
+                return minReadsPerStack == maxReadsPerStack;
+            }
+
+            public String toString() {
+                return String.format("StreamStackDepth:%d-%d", minReadsPerStack, maxReadsPerStack);
+            }
+        }
+
+        private enum StreamStacksPerContig {
+            UNIFORM(20, 20),
+            NON_UNIFORM(1, 30);
+
+            int minStacksPerContig;
+            int maxStacksPerContig;
+
+            StreamStacksPerContig( int minStacksPerContig, int maxStacksPerContig ) {
+                this.minStacksPerContig = minStacksPerContig;
+                this.maxStacksPerContig = maxStacksPerContig;
+            }
+
+            public boolean isUniform() {
+                return minStacksPerContig == maxStacksPerContig;
+            }
+
+            public String toString() {
+                return String.format("StreamStacksPerContig:%d-%d", minStacksPerContig, maxStacksPerContig);
+            }
+        }
+
+        // Not interested in testing multiple ranges for the read lengths, as none of our current
+        // downsamplers are affected by read length
+        private static final int MIN_READ_LENGTH = 50;
+        private static final int MAX_READ_LENGTH = 150;
+
+        private ReadsDownsamplerFactory<SAMRecord> downsamplerFactory;
+        private int targetCoverage;
+        private int numSamples;
+        private int minContigs;
+        private int maxContigs;
+        private StreamDensity streamDensity;
+        private StreamStackDepth streamStackDepth;
+        private StreamStacksPerContig streamStacksPerContig;
+        private double unmappedReadsFraction;
+        private int unmappedReadsCount;
+        private boolean verifySortedness;
+
+        private ArtificialMultiSampleReadStream mergedReadStream;
+        private Map<String, ArtificialSingleSampleReadStream> perSampleArtificialReadStreams;
+        private Map<String, ArtificialSingleSampleReadStreamAnalyzer> perSampleStreamAnalyzers;
+        private SAMFileHeader header;
+
+        public PerSampleDownsamplingReadsIteratorTest( ReadsDownsamplerFactory<SAMRecord> downsamplerFactory,
+                                                       int targetCoverage,
+                                                       int numSamples,
+                                                       int minContigs,
+                                                       int maxContigs,
+                                                       StreamDensity streamDensity,
+                                                       StreamStackDepth streamStackDepth,
+                                                       StreamStacksPerContig streamStacksPerContig,
+                                                       double unmappedReadsFraction,
+                                                       int unmappedReadsCount,
+                                                       boolean verifySortedness ) {
+            super(PerSampleDownsamplingReadsIteratorTest.class);
+
+            this.downsamplerFactory = downsamplerFactory;
+            this.targetCoverage = targetCoverage;
+            this.numSamples = numSamples;
+            this.minContigs = minContigs;
+            this.maxContigs = maxContigs;
+            this.streamDensity = streamDensity;
+            this.streamStackDepth = streamStackDepth;
+            this.streamStacksPerContig = streamStacksPerContig;
+            this.unmappedReadsFraction = unmappedReadsFraction;
+            this.unmappedReadsCount = unmappedReadsCount;
+            this.verifySortedness = verifySortedness;
+
+            header = createHeader();
+            createReadStreams();
+
+            setName(String.format("%s: targetCoverage=%d numSamples=%d minContigs=%d maxContigs=%d %s %s %s unmappedReadsFraction=%.2f unmappedReadsCount=%d verifySortedness=%b",
+                    getClass().getSimpleName(), targetCoverage, numSamples, minContigs, maxContigs, streamDensity, streamStackDepth, streamStacksPerContig, unmappedReadsFraction, unmappedReadsCount, verifySortedness));
+        }
+
+        private SAMFileHeader createHeader() {
+            SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(maxContigs, 1, (streamDensity.maxDistanceBetweenStacks + MAX_READ_LENGTH) * streamStacksPerContig.maxStacksPerContig + 100000);
+            List<String> readGroups = new ArrayList<String>(numSamples);
+            List<String> sampleNames = new ArrayList<String>(numSamples);
+
+            for ( int i = 0; i < numSamples; i++ ) {
+                readGroups.add("ReadGroup" + i);
+                sampleNames.add("Sample" + i);
+            }
+
+            return ArtificialSAMUtils.createEnumeratedReadGroups(header, readGroups, sampleNames);
+        }
+
+        private void createReadStreams() {
+            perSampleArtificialReadStreams = new HashMap<String, ArtificialSingleSampleReadStream>(numSamples);
+            perSampleStreamAnalyzers = new HashMap<String, ArtificialSingleSampleReadStreamAnalyzer>(numSamples);
+
+            for (SAMReadGroupRecord readGroup : header.getReadGroups() ) {
+                String readGroupID = readGroup.getReadGroupId();
+                String sampleName = readGroup.getSample();
+
+                int thisSampleNumContigs = MathUtils.randomIntegerInRange(minContigs, maxContigs);
+                int thisSampleStacksPerContig = MathUtils.randomIntegerInRange(streamStacksPerContig.minStacksPerContig, streamStacksPerContig.maxStacksPerContig);
+
+                int thisSampleNumUnmappedReads = Utils.getRandomGenerator().nextDouble() < unmappedReadsFraction ? unmappedReadsCount : 0;
+
+                ArtificialSingleSampleReadStream thisSampleStream = new ArtificialSingleSampleReadStream(header,
+                                                                                                         readGroupID,
+                                                                                                         thisSampleNumContigs,
+                                                                                                         thisSampleStacksPerContig,
+                                                                                                         streamStackDepth.minReadsPerStack,
+                                                                                                         streamStackDepth.maxReadsPerStack,
+                                                                                                         streamDensity.minDistanceBetweenStacks,
+                                                                                                         streamDensity.maxDistanceBetweenStacks,
+                                                                                                         MIN_READ_LENGTH,
+                                                                                                         MAX_READ_LENGTH,
+                                                                                                         thisSampleNumUnmappedReads);
+                perSampleArtificialReadStreams.put(sampleName, thisSampleStream);
+                perSampleStreamAnalyzers.put(sampleName, new PositionallyDownsampledArtificialSingleSampleReadStreamAnalyzer(thisSampleStream, targetCoverage));
+            }
+
+            mergedReadStream = new ArtificialMultiSampleReadStream(perSampleArtificialReadStreams.values());
+        }
+
+        public void run() {
+            GATKSAMIterator downsamplingIter = new PerSampleDownsamplingReadsIterator(mergedReadStream.getGATKSAMIterator(), downsamplerFactory);
+
+            if ( verifySortedness ) {
+                downsamplingIter = new VerifyingSamIterator(downsamplingIter);
+            }
+
+            while ( downsamplingIter.hasNext() ) {
+                SAMRecord read = downsamplingIter.next();
+                String sampleName = read.getReadGroup() != null ? read.getReadGroup().getSample() : null;
+
+                ArtificialSingleSampleReadStreamAnalyzer analyzer = perSampleStreamAnalyzers.get(sampleName);
+                if ( analyzer != null ) {
+                    analyzer.update(read);
+                }
+                else {
+                    throw new ReviewedGATKException("bug: stream analyzer for sample " + sampleName + " not found");
+                }
+            }
+
+            for ( Map.Entry<String, ArtificialSingleSampleReadStreamAnalyzer> analyzerEntry : perSampleStreamAnalyzers.entrySet() ) {
+                ArtificialSingleSampleReadStreamAnalyzer analyzer = analyzerEntry.getValue();
+                analyzer.finalizeStats();
+
+                // Validate the downsampled read stream for each sample individually
+                analyzer.validate();
+            }
+
+            // Allow memory used by this test to be reclaimed:
+            mergedReadStream = null;
+            perSampleArtificialReadStreams = null;
+            perSampleStreamAnalyzers = null;
+        }
+    }
+
+    @DataProvider(name = "PerSampleDownsamplingReadsIteratorTestDataProvider")
+    public Object[][] createPerSampleDownsamplingReadsIteratorTests() {
+
+        Utils.resetRandomGenerator();
+
+        // Some values don't vary across tests
+        int targetCoverage = PerSampleDownsamplingReadsIteratorTest.StreamStackDepth.UNIFORM_MEDIUM.minReadsPerStack;
+        ReadsDownsamplerFactory<SAMRecord> downsamplerFactory = new SimplePositionalDownsamplerFactory<SAMRecord>(targetCoverage);
+        int maxContigs = 3;
+        boolean verifySortedness = true;
+
+        for ( int numSamples : Arrays.asList(1, 2, 10) ) {
+            for ( int minContigs = 1; minContigs <= maxContigs; minContigs++ ) {
+                for ( PerSampleDownsamplingReadsIteratorTest.StreamDensity streamDensity : PerSampleDownsamplingReadsIteratorTest.StreamDensity.values() ) {
+                    for ( PerSampleDownsamplingReadsIteratorTest.StreamStackDepth streamStackDepth : PerSampleDownsamplingReadsIteratorTest.StreamStackDepth.values() ) {
+                        for (PerSampleDownsamplingReadsIteratorTest.StreamStacksPerContig streamStacksPerContig : PerSampleDownsamplingReadsIteratorTest.StreamStacksPerContig.values() ) {
+                            for ( double unmappedReadsFraction : Arrays.asList(0.0, 1.0, 0.5) ) {
+                                for ( int unmappedReadsCount : Arrays.asList(1, 50) ) {
+                                    new PerSampleDownsamplingReadsIteratorTest(downsamplerFactory,
+                                                                               targetCoverage,
+                                                                               numSamples,
+                                                                               minContigs,
+                                                                               maxContigs,
+                                                                               streamDensity,
+                                                                               streamStackDepth,
+                                                                               streamStacksPerContig,
+                                                                               unmappedReadsFraction,
+                                                                               unmappedReadsCount,
+                                                                               verifySortedness);
+                                }
+                            }
+                        }
+                    }
+                }
+            }
+        }
+
+        return PerSampleDownsamplingReadsIteratorTest.getTests(PerSampleDownsamplingReadsIteratorTest.class);
+    }
+
+    @Test(dataProvider = "PerSampleDownsamplingReadsIteratorTestDataProvider")
+    public void runPerSampleDownsamplingReadsIteratorTest( PerSampleDownsamplingReadsIteratorTest test ) {
+        logger.warn("Running test: " + test);
+
+        Utils.resetRandomGenerator();
+        test.run();
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/downsampling/PositionallyDownsampledArtificialSingleSampleReadStreamAnalyzer.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/downsampling/PositionallyDownsampledArtificialSingleSampleReadStreamAnalyzer.java
new file mode 100644
index 0000000..d23fb1b
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/downsampling/PositionallyDownsampledArtificialSingleSampleReadStreamAnalyzer.java
@@ -0,0 +1,127 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.downsampling;
+
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.sam.ArtificialSingleSampleReadStream;
+import org.broadinstitute.gatk.utils.sam.ArtificialSingleSampleReadStreamAnalyzer;
+
+/**
+ * Class for analyzing an artificial read stream that has been positionally downsampled, and verifying
+ * that the downsampling was done correctly without changing the stream in unexpected ways.
+ *
+ * @author David Roazen
+ */
+public class PositionallyDownsampledArtificialSingleSampleReadStreamAnalyzer extends ArtificialSingleSampleReadStreamAnalyzer {
+    private int targetCoverage;
+
+    public PositionallyDownsampledArtificialSingleSampleReadStreamAnalyzer( ArtificialSingleSampleReadStream originalStream, int targetCoverage ) {
+        super(originalStream);
+        this.targetCoverage = targetCoverage;
+    }
+
+    /**
+     * Overridden validate() method that checks for the effects of positional downsampling in addition to checking
+     * for whether the original properties of the stream not affected by downsampling have been preserved
+     */
+    @Override
+    public void validate() {
+        if ( (originalStream.getNumContigs() == 0 || originalStream.getNumStacksPerContig() == 0) && originalStream.getNumUnmappedReads() == 0 ) {
+            if ( totalReads != 0 ) {
+                throw new ReviewedGATKException("got reads from the stream, but the stream was configured to have 0 reads");
+            }
+            return;  // no further validation needed for the 0-reads case
+        }
+        else if ( totalReads == 0 ) {
+            throw new ReviewedGATKException("got no reads from the stream, but the stream was configured to have > 0 reads");
+        }
+
+        if ( ! allSamplesMatch ) {
+            throw new ReviewedGATKException("some reads had the wrong sample");
+        }
+
+        if ( numContigs != originalStream.getNumContigs() ) {
+            throw new ReviewedGATKException("number of contigs not correct");
+        }
+
+        if ( stacksPerContig.size() != originalStream.getNumContigs() ) {
+            throw new ReviewedGATKException(String.format("bug in analyzer code: calculated sizes for %d contigs even though there were only %d contigs",
+                                                           stacksPerContig.size(), originalStream.getNumContigs()));
+        }
+
+        for ( int contigStackCount : stacksPerContig ) {
+            if ( contigStackCount != originalStream.getNumStacksPerContig() ) {
+                throw new ReviewedGATKException("contig had incorrect number of stacks");
+            }
+        }
+
+        if ( originalStream.getNumStacksPerContig() > 0 ) {
+
+            // Check for the effects of positional downsampling:
+            int stackMinimumAfterDownsampling = Math.min(targetCoverage, originalStream.getMinReadsPerStack());
+            int stackMaximumAfterDownsampling = targetCoverage;
+
+            if ( minReadsPerStack < stackMinimumAfterDownsampling ) {
+                throw new ReviewedGATKException("stack had fewer than the minimum number of reads after downsampling");
+            }
+            if ( maxReadsPerStack > stackMaximumAfterDownsampling ) {
+                throw new ReviewedGATKException("stack had more than the maximum number of reads after downsampling");
+            }
+        }
+        else if ( minReadsPerStack != null || maxReadsPerStack != null ) {
+            throw new ReviewedGATKException("bug in analyzer code: reads per stack was calculated even though 0 stacks per contig was specified");
+        }
+
+        if ( originalStream.getNumStacksPerContig() > 1 ) {
+            if ( minDistanceBetweenStacks < originalStream.getMinDistanceBetweenStacks() ) {
+                throw new ReviewedGATKException("stacks were separated by less than the minimum distance");
+            }
+            if ( maxDistanceBetweenStacks > originalStream.getMaxDistanceBetweenStacks() ) {
+                throw new ReviewedGATKException("stacks were separated by more than the maximum distance");
+            }
+        }
+        else if ( minDistanceBetweenStacks != null || maxDistanceBetweenStacks != null ) {
+            throw new ReviewedGATKException("bug in analyzer code: distance between stacks was calculated even though numStacksPerContig was <= 1");
+        }
+
+        if ( minReadLength < originalStream.getMinReadLength() ) {
+            throw new ReviewedGATKException("read was shorter than the minimum allowed length");
+        }
+        if ( maxReadLength > originalStream.getMaxReadLength() ) {
+            throw new ReviewedGATKException("read was longer than the maximum allowed length");
+        }
+
+        if ( numUnmappedReads != originalStream.getNumUnmappedReads() ) {
+            throw new ReviewedGATKException(String.format("wrong number of unmapped reads: requested %d but saw %d",
+                                                           originalStream.getNumUnmappedReads(), numUnmappedReads));
+        }
+
+        if ( (originalStream.getNumContigs() == 0 || originalStream.getNumStacksPerContig() == 0) &&
+             numUnmappedReads != totalReads ) {
+            throw new ReviewedGATKException("stream should have consisted only of unmapped reads, but saw some mapped reads");
+        }
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/downsampling/ReservoirDownsamplerUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/downsampling/ReservoirDownsamplerUnitTest.java
new file mode 100644
index 0000000..87e2ac8
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/downsampling/ReservoirDownsamplerUnitTest.java
@@ -0,0 +1,133 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.downsampling;
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.downsampling.ReadsDownsampler;
+import org.broadinstitute.gatk.utils.downsampling.ReservoirDownsampler;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+import org.testng.Assert;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+public class ReservoirDownsamplerUnitTest extends BaseTest {
+
+    private static class ReservoirDownsamplerTest extends TestDataProvider {
+        int reservoirSize;
+        int totalReads;
+        int expectedNumReadsAfterDownsampling;
+        int expectedNumDiscardedItems;
+
+        public ReservoirDownsamplerTest( int reservoirSize, int totalReads ) {
+            super(ReservoirDownsamplerTest.class);
+
+            this.reservoirSize = reservoirSize;
+            this.totalReads = totalReads;
+
+            expectedNumReadsAfterDownsampling = Math.min(reservoirSize, totalReads);
+            expectedNumDiscardedItems = totalReads <= reservoirSize ? 0 : totalReads - reservoirSize;
+
+            setName(String.format("%s: reservoirSize=%d totalReads=%d expectedNumReadsAfterDownsampling=%d expectedNumDiscardedItems=%d",
+                    getClass().getSimpleName(), reservoirSize, totalReads, expectedNumReadsAfterDownsampling, expectedNumDiscardedItems));
+        }
+
+        public Collection<SAMRecord> createReads() {
+            Collection<SAMRecord> reads = new ArrayList<SAMRecord>(totalReads);
+
+            SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000);
+            reads.addAll(ArtificialSAMUtils.createStackOfIdenticalArtificialReads(totalReads, header, "foo", 0, 1, 100));
+
+            return reads;
+        }
+    }
+
+    @DataProvider(name = "ReservoirDownsamplerTestDataProvider")
+    public Object[][] createReservoirDownsamplerTestData() {
+        for ( int reservoirSize = 1; reservoirSize <= 10000; reservoirSize *= 10 ) {
+            new ReservoirDownsamplerTest(reservoirSize, 0);
+            for ( int totalReads = 1; totalReads <= 10000; totalReads *= 10 ) {
+                new ReservoirDownsamplerTest(reservoirSize, totalReads);
+            }
+        }
+
+        return ReservoirDownsamplerTest.getTests(ReservoirDownsamplerTest.class);
+    }
+
+    @Test(dataProvider = "ReservoirDownsamplerTestDataProvider")
+    public void testReservoirDownsampler( ReservoirDownsamplerTest test ) {
+        logger.warn("Running test: " + test);
+
+        Utils.resetRandomGenerator();
+
+        ReadsDownsampler<SAMRecord> downsampler = new ReservoirDownsampler<SAMRecord>(test.reservoirSize);
+
+        downsampler.submit(test.createReads());
+
+        if ( test.totalReads > 0 ) {
+            Assert.assertTrue(downsampler.hasFinalizedItems());
+            Assert.assertTrue(downsampler.peekFinalized() != null);
+            Assert.assertFalse(downsampler.hasPendingItems());
+            Assert.assertTrue(downsampler.peekPending() == null);
+        }
+        else {
+            Assert.assertFalse(downsampler.hasFinalizedItems() || downsampler.hasPendingItems());
+            Assert.assertTrue(downsampler.peekFinalized() == null && downsampler.peekPending() == null);
+        }
+
+        downsampler.signalEndOfInput();
+
+        if ( test.totalReads > 0 ) {
+            Assert.assertTrue(downsampler.hasFinalizedItems());
+            Assert.assertTrue(downsampler.peekFinalized() != null);
+            Assert.assertFalse(downsampler.hasPendingItems());
+            Assert.assertTrue(downsampler.peekPending() == null);
+        }
+        else {
+            Assert.assertFalse(downsampler.hasFinalizedItems() || downsampler.hasPendingItems());
+            Assert.assertTrue(downsampler.peekFinalized() == null && downsampler.peekPending() == null);
+        }
+
+        Assert.assertEquals(downsampler.size(), test.expectedNumReadsAfterDownsampling);
+        List<SAMRecord> downsampledReads = downsampler.consumeFinalizedItems();
+        Assert.assertFalse(downsampler.hasFinalizedItems() || downsampler.hasPendingItems());
+        Assert.assertTrue(downsampler.peekFinalized() == null && downsampler.peekPending() == null);
+
+        Assert.assertEquals(downsampledReads.size(), test.expectedNumReadsAfterDownsampling);
+
+        Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), test.expectedNumDiscardedItems);
+        Assert.assertEquals(test.totalReads - downsampledReads.size(), test.expectedNumDiscardedItems);
+
+        downsampler.resetStats();
+        Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 0);
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/downsampling/SimplePositionalDownsamplerUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/downsampling/SimplePositionalDownsamplerUnitTest.java
new file mode 100644
index 0000000..47a1179
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/downsampling/SimplePositionalDownsamplerUnitTest.java
@@ -0,0 +1,333 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.downsampling;
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.downsampling.ReadsDownsampler;
+import org.broadinstitute.gatk.utils.downsampling.SimplePositionalDownsampler;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+import org.testng.Assert;
+
+import java.util.*;
+
+public class SimplePositionalDownsamplerUnitTest extends BaseTest {
+
+    private static class SimplePositionalDownsamplerTest extends TestDataProvider {
+        int targetCoverage;
+        int numStacks;
+        List<Integer> stackSizes;
+        List<Integer> expectedStackSizes;
+        boolean multipleContigs;
+        int totalInitialReads;
+
+        public SimplePositionalDownsamplerTest( int targetCoverage, List<Integer> stackSizes, boolean multipleContigs ) {
+            super(SimplePositionalDownsamplerTest.class);
+
+            this.targetCoverage = targetCoverage;
+            this.numStacks = stackSizes.size();
+            this.stackSizes = stackSizes;
+            this.multipleContigs = multipleContigs;
+
+            calculateExpectedDownsampledStackSizes();
+
+            totalInitialReads = 0;
+            for ( Integer stackSize : stackSizes ) {
+                totalInitialReads += stackSize;
+            }
+
+            setName(String.format("%s: targetCoverage=%d numStacks=%d stackSizes=%s expectedSizes=%s multipleContigs=%b",
+                    getClass().getSimpleName(), targetCoverage, numStacks, stackSizes, expectedStackSizes, multipleContigs));
+        }
+
+        public Collection<SAMRecord> createReads() {
+            Collection<SAMRecord> reads = new ArrayList<SAMRecord>();
+            SAMFileHeader header = multipleContigs ?
+                                   ArtificialSAMUtils.createArtificialSamHeader(2, 1, 1000000) :
+                                   ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000);
+
+            int refIndex = 0;
+            int alignmentStart = 1;
+            int readLength = 100;
+
+            for ( int i = 0; i < numStacks; i++ ) {
+                if ( multipleContigs && refIndex == 0 && i >= numStacks / 2 ) {
+                    refIndex++;
+                }
+
+                reads.addAll(ArtificialSAMUtils.createStackOfIdenticalArtificialReads(stackSizes.get(i), header, "foo",
+                                                                                      refIndex, alignmentStart, readLength));
+
+                alignmentStart += 10;
+            }
+
+            return reads;
+        }
+
+        private void calculateExpectedDownsampledStackSizes() {
+            expectedStackSizes = new ArrayList<Integer>(numStacks);
+
+            for ( Integer stackSize : stackSizes ) {
+                int expectedSize = targetCoverage >= stackSize ? stackSize : targetCoverage;
+                expectedStackSizes.add(expectedSize);
+            }
+        }
+    }
+
+    @DataProvider(name = "SimplePositionalDownsamplerTestDataProvider")
+    public Object[][] createSimplePositionalDownsamplerTestData() {
+        Utils.resetRandomGenerator();
+
+        for ( int targetCoverage = 1; targetCoverage <= 10000; targetCoverage *= 10 ) {
+            for ( int contigs = 1; contigs <= 2; contigs++ ) {
+                for ( int numStacks = 0; numStacks <= 10; numStacks++ ) {
+                    List<Integer> stackSizes = new ArrayList<Integer>(numStacks);
+                    for ( int stack = 1; stack <= numStacks; stack++ ) {
+                        stackSizes.add(Utils.getRandomGenerator().nextInt(targetCoverage * 2) + 1);
+                    }
+                    new SimplePositionalDownsamplerTest(targetCoverage, stackSizes, contigs > 1);
+                }
+            }
+        }
+
+        return SimplePositionalDownsamplerTest.getTests(SimplePositionalDownsamplerTest.class);
+    }
+
+    @Test( dataProvider = "SimplePositionalDownsamplerTestDataProvider" )
+    public void testSimplePostionalDownsampler( SimplePositionalDownsamplerTest test ) {
+        logger.warn("Running test: " + test);
+
+        Utils.resetRandomGenerator();
+
+        ReadsDownsampler<SAMRecord> downsampler = new SimplePositionalDownsampler<SAMRecord>(test.targetCoverage);
+
+        downsampler.submit(test.createReads());
+
+        if ( test.numStacks > 1 ) {
+            Assert.assertTrue(downsampler.hasFinalizedItems());
+            Assert.assertTrue(downsampler.peekFinalized() != null);
+            Assert.assertTrue(downsampler.hasPendingItems());
+            Assert.assertTrue(downsampler.peekPending() != null);
+        }
+        else if ( test.numStacks == 1 ) {
+            Assert.assertFalse(downsampler.hasFinalizedItems());
+            Assert.assertTrue(downsampler.peekFinalized() == null);
+            Assert.assertTrue(downsampler.hasPendingItems());
+            Assert.assertTrue(downsampler.peekPending() != null);
+        }
+        else {
+            Assert.assertFalse(downsampler.hasFinalizedItems() || downsampler.hasPendingItems());
+            Assert.assertTrue(downsampler.peekFinalized() == null && downsampler.peekPending() == null);
+        }
+
+        downsampler.signalEndOfInput();
+
+        if ( test.numStacks > 0 ) {
+            Assert.assertTrue(downsampler.hasFinalizedItems());
+            Assert.assertTrue(downsampler.peekFinalized() != null);
+            Assert.assertFalse(downsampler.hasPendingItems());
+            Assert.assertTrue(downsampler.peekPending() == null);
+        }
+        else {
+            Assert.assertFalse(downsampler.hasFinalizedItems() || downsampler.hasPendingItems());
+            Assert.assertTrue(downsampler.peekFinalized() == null && downsampler.peekPending() == null);
+        }
+
+        List<SAMRecord> downsampledReads = downsampler.consumeFinalizedItems();
+        Assert.assertFalse(downsampler.hasFinalizedItems() || downsampler.hasPendingItems());
+        Assert.assertTrue(downsampler.peekFinalized() == null && downsampler.peekPending() == null);
+
+        if ( test.numStacks == 0 ) {
+            Assert.assertTrue(downsampledReads.isEmpty());
+        }
+        else {
+            List<Integer> downsampledStackSizes = getDownsampledStackSizesAndVerifySortedness(downsampledReads);
+
+            Assert.assertEquals(downsampledStackSizes.size(), test.numStacks);
+            Assert.assertEquals(downsampledStackSizes, test.expectedStackSizes);
+
+            int numReadsActuallyEliminated = test.totalInitialReads - downsampledReads.size();
+            int numReadsReportedEliminated = downsampler.getNumberOfDiscardedItems();
+            Assert.assertEquals(numReadsActuallyEliminated, numReadsReportedEliminated);
+        }
+
+        downsampler.resetStats();
+        Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 0);
+    }
+
+    private List<Integer> getDownsampledStackSizesAndVerifySortedness( List<SAMRecord> downsampledReads ) {
+        List<Integer> stackSizes = new ArrayList<Integer>();
+
+        if ( downsampledReads.isEmpty() ) {
+            return stackSizes;
+        }
+
+        Iterator<SAMRecord> iter = downsampledReads.iterator();
+        Assert.assertTrue(iter.hasNext());
+
+        SAMRecord previousRead = iter.next();
+        int currentStackSize = 1;
+
+        while ( iter.hasNext() ) {
+            SAMRecord currentRead = iter.next();
+
+            if ( currentRead.getReferenceIndex() > previousRead.getReferenceIndex() || currentRead.getAlignmentStart() > previousRead.getAlignmentStart() ) {
+                stackSizes.add(currentStackSize);
+                currentStackSize = 1;
+            }
+            else if ( currentRead.getReferenceIndex() < previousRead.getReferenceIndex() || currentRead.getAlignmentStart() < previousRead.getAlignmentStart() ) {
+                Assert.fail(String.format("Reads are out of order: %s %s", previousRead, currentRead));
+            }
+            else {
+                currentStackSize++;
+            }
+
+            previousRead = currentRead;
+        }
+
+        stackSizes.add(currentStackSize);
+        return stackSizes;
+    }
+
+    @Test
+    public void testSimplePositionalDownsamplerSignalNoMoreReadsBefore() {
+        ReadsDownsampler<SAMRecord> downsampler = new SimplePositionalDownsampler<SAMRecord>(1000);
+
+        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000);
+
+        Collection<SAMRecord> readStack = new ArrayList<SAMRecord>();
+        readStack.addAll(ArtificialSAMUtils.createStackOfIdenticalArtificialReads(50, header, "foo", 0, 1, 100));
+        downsampler.submit(readStack);
+
+        Assert.assertFalse(downsampler.hasFinalizedItems());
+        Assert.assertTrue(downsampler.peekFinalized() == null);
+        Assert.assertTrue(downsampler.hasPendingItems());
+        Assert.assertTrue(downsampler.peekPending() != null);
+
+        SAMRecord laterRead = ArtificialSAMUtils.createArtificialRead(header, "foo", 0, 2, 100);
+        downsampler.signalNoMoreReadsBefore(laterRead);
+
+        Assert.assertTrue(downsampler.hasFinalizedItems());
+        Assert.assertTrue(downsampler.peekFinalized() != null);
+        Assert.assertFalse(downsampler.hasPendingItems());
+        Assert.assertTrue(downsampler.peekPending() == null);
+
+        List<SAMRecord> downsampledReads = downsampler.consumeFinalizedItems();
+
+        Assert.assertEquals(downsampledReads.size(), readStack.size());
+    }
+
+    @Test
+    public void testBasicUnmappedReadsSupport() {
+        ReadsDownsampler<SAMRecord> downsampler = new SimplePositionalDownsampler<SAMRecord>(100);
+
+        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000);
+
+        Collection<SAMRecord> readStack = new ArrayList<SAMRecord>();
+        readStack.addAll(ArtificialSAMUtils.createStackOfIdenticalArtificialReads(200, header, "foo", SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX,
+                                                                                  SAMRecord.NO_ALIGNMENT_START, 100));
+        for ( SAMRecord read : readStack ) {
+            Assert.assertTrue(read.getReadUnmappedFlag());
+        }
+
+        downsampler.submit(readStack);
+        downsampler.signalEndOfInput();
+
+        List<SAMRecord> downsampledReads = downsampler.consumeFinalizedItems();
+
+        // Unmapped reads should not get downsampled at all by the SimplePositionalDownsampler
+        Assert.assertEquals(downsampledReads.size(), readStack.size());
+
+        for ( SAMRecord read: downsampledReads ) {
+            Assert.assertTrue(read.getReadUnmappedFlag());
+        }
+    }
+
+    @Test
+    public void testMixedMappedAndUnmappedReadsSupport() {
+        ReadsDownsampler<SAMRecord> downsampler = new SimplePositionalDownsampler<SAMRecord>(100);
+
+        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000);
+
+        Collection<SAMRecord> mappedReadStack = new ArrayList<SAMRecord>();
+        mappedReadStack.addAll(ArtificialSAMUtils.createStackOfIdenticalArtificialReads(200, header, "foo", 0, 1, 100));
+        for ( SAMRecord read : mappedReadStack ) {
+            Assert.assertFalse(read.getReadUnmappedFlag());
+        }
+
+        Collection<SAMRecord> unmappedReadStack = new ArrayList<SAMRecord>();
+        unmappedReadStack.addAll(ArtificialSAMUtils.createStackOfIdenticalArtificialReads(200, header, "foo", SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX,
+                                                                                          SAMRecord.NO_ALIGNMENT_START, 100));
+        for ( SAMRecord read : unmappedReadStack ) {
+            Assert.assertTrue(read.getReadUnmappedFlag());
+        }
+
+        downsampler.submit(mappedReadStack);
+        downsampler.submit(unmappedReadStack);
+        downsampler.signalEndOfInput();
+
+        List<SAMRecord> downsampledReads = downsampler.consumeFinalizedItems();
+
+        // Unmapped reads should not get downsampled at all by the SimplePositionalDownsampler
+        Assert.assertEquals(downsampledReads.size(), 300);
+        Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 100);
+
+        int count = 1;
+        for ( SAMRecord read: downsampledReads ) {
+            if ( count <= 100 ) {
+                Assert.assertFalse(read.getReadUnmappedFlag());
+            }
+            else {
+                Assert.assertTrue(read.getReadUnmappedFlag());
+            }
+
+            count++;
+        }
+    }
+
+    @Test
+    public void testGATKSAMRecordSupport() {
+        ReadsDownsampler<GATKSAMRecord> downsampler = new SimplePositionalDownsampler<GATKSAMRecord>(1000);
+
+        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000);
+
+        List<GATKSAMRecord> reads = new ArrayList<GATKSAMRecord>();
+        for ( int i = 0; i < 10; i++ ) {
+            reads.add(ArtificialSAMUtils.createArtificialRead(header, "foo", 0, 10, 20 * i + 10));
+        }
+
+        downsampler.submit(reads);
+        downsampler.signalEndOfInput();
+        List<GATKSAMRecord> downsampledReads = downsampler.consumeFinalizedItems();
+
+        Assert.assertEquals(downsampledReads.size(), 10);
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/executive/ReduceTreeUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/executive/ReduceTreeUnitTest.java
new file mode 100644
index 0000000..44432cf
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/executive/ReduceTreeUnitTest.java
@@ -0,0 +1,254 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.executive;
+
+
+import org.testng.Assert;
+import org.testng.annotations.AfterMethod;
+import org.testng.annotations.Test;
+import org.testng.annotations.BeforeMethod;
+
+import org.broadinstitute.gatk.utils.BaseTest;
+
+import java.util.concurrent.Callable;
+import java.util.concurrent.Future;
+import java.util.concurrent.FutureTask;
+import java.util.concurrent.ExecutionException;
+import java.util.List;
+import java.util.ArrayList;
+/**
+ * User: hanna
+ * Date: Apr 29, 2009
+ * Time: 10:40:49 AM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * Make sure the reduce tree organizes reduces in the correct way.
+ */
+
+public class ReduceTreeUnitTest extends BaseTest implements ReduceTree.TreeReduceNotifier {
+
+    /**
+     * The tree indicating reduce order.
+     */
+    private ReduceTree reduceTree = null;
+
+    /**
+     * 
+     */
+    private List<List<Integer>> reduces = new ArrayList<List<Integer>>();
+
+    @BeforeMethod
+    public void createTree() {
+        reduceTree = new ReduceTree( this );
+    }
+
+    @AfterMethod
+    public void destroyTree() {
+        reduceTree = null;
+        reduces.clear();
+    }
+
+    @Test
+    public void testNoValueReduce()
+        throws InterruptedException, ExecutionException {
+        reduceTree.complete();
+        Assert.assertEquals(reduceTree.getResult(), null, "Single-value reduce failed");
+    }
+
+    @Test
+    public void testSingleValueReduce()
+            throws InterruptedException, ExecutionException {
+        reduceTree.addEntry( getReduceTestEntry(1) );
+        reduceTree.complete();
+        Assert.assertEquals(reduceTree.getResult().get(), 1, "Single-value reduce failed");
+    }
+
+    @Test(expectedExceptions=IllegalStateException.class)
+    public void testIncompleteReduce()
+            throws InterruptedException, ExecutionException {
+        reduceTree.addEntry( getReduceTestEntry(1) );
+        reduceTree.getResult().get();
+    }
+
+    @Test
+    public void testDualValueReduce()
+        throws InterruptedException, ExecutionException {
+        reduceTree.addEntry( getReduceTestEntry(1) );
+        reduceTree.addEntry( getReduceTestEntry(2) );
+        reduceTree.complete();
+
+        List<Integer> expected = new ArrayList<Integer>();
+        expected.add( 1 );
+        expected.add( 2 );
+
+        // Test the result
+        Assert.assertEquals(reduceTree.getResult().get(), expected, "Dual-value reduce failed");
+
+        // Test the intermediate steps
+        Assert.assertEquals(reduces.size(), 1, "Size of incoming tree reduces incorrect");
+        Assert.assertEquals(reduces.get(0), expected, "Incoming tree reduce incorrect");
+    }
+
+    @Test
+    public void testThreeValueReduce()
+        throws InterruptedException, ExecutionException {
+        List<Integer> firstExpected = new ArrayList<Integer>();
+        firstExpected.add(1);
+        firstExpected.add(2);
+
+        List<Integer> finalExpected = new ArrayList<Integer>();
+        finalExpected.addAll( firstExpected );
+        finalExpected.add(3);
+
+        reduceTree.addEntry( getReduceTestEntry(1) );
+
+        Assert.assertEquals(reduces.size(), 0, "Reduce queue should be empty after entering a single element");
+
+        reduceTree.addEntry( getReduceTestEntry(2) );
+
+        Assert.assertEquals(reduces.size(), 1, "Reduce queue should have one element after two entries");
+        Assert.assertEquals(reduces.get(0), firstExpected, "Reduce queue element is incorrect after two entries");
+
+        reduceTree.addEntry( getReduceTestEntry(3) );
+
+        Assert.assertEquals(reduces.size(), 1, "Reduce queue should have one element after three entries");
+        Assert.assertEquals(reduces.get(0), firstExpected, "Reduce queue element is incorrect after three entries");
+
+        reduceTree.complete();
+
+        // Test the result
+        Assert.assertEquals(reduceTree.getResult().get(), finalExpected, "Three value reduce failed");
+
+        Assert.assertEquals(reduces.size(), 2, "Reduce queue should have two elements after three entries (complete)");
+        Assert.assertEquals(reduces.get(0), firstExpected, "Reduce queue element is incorrect after three entries");
+        Assert.assertEquals(reduces.get(1), finalExpected, "Reduce queue element is incorrect after three entries");
+    }
+
+    @Test
+    public void testFourValueReduce()
+        throws InterruptedException, ExecutionException {
+        List<Integer> lhsExpected = new ArrayList<Integer>();
+        lhsExpected.add(1);
+        lhsExpected.add(2);
+
+        List<Integer> rhsExpected = new ArrayList<Integer>();
+        rhsExpected.add(3);
+        rhsExpected.add(4);
+
+        List<Integer> finalExpected = new ArrayList<Integer>();
+        finalExpected.addAll(lhsExpected);
+        finalExpected.addAll(rhsExpected);
+
+        reduceTree.addEntry( getReduceTestEntry(1) );
+
+        Assert.assertEquals(reduces.size(), 0, "Reduce queue should be empty after entering a single element");
+
+        reduceTree.addEntry( getReduceTestEntry(2) );
+
+        Assert.assertEquals(reduces.size(), 1, "Reduce queue should have one element after two entries");
+        Assert.assertEquals(reduces.get(0), lhsExpected, "Reduce queue element is incorrect after two entries");
+
+        reduceTree.addEntry( getReduceTestEntry(3) );
+
+        Assert.assertEquals(reduces.size(), 1, "Reduce queue should have one element after three entries");
+        Assert.assertEquals(reduces.get(0), lhsExpected, "Reduce queue element is incorrect after three entries");
+
+        reduceTree.addEntry( getReduceTestEntry(4) );
+
+        Assert.assertEquals(reduces.size(), 3, "Reduce queue should have three elements after four entries");
+        Assert.assertEquals(reduces.get(0), lhsExpected, "Reduce queue element 0 is incorrect after three entries");
+        Assert.assertEquals(reduces.get(1), rhsExpected, "Reduce queue element 1 is incorrect after three entries");
+        Assert.assertEquals(reduces.get(2), finalExpected, "Reduce queue element 2 is incorrect after three entries");
+
+        reduceTree.complete();
+
+                // Test the result
+        Assert.assertEquals(reduceTree.getResult().get(), finalExpected, "Four-valued reduce failed");
+
+        // Test the working tree
+        Assert.assertEquals(reduces.size(), 3, "Didn't see correct number of reduces");
+        Assert.assertEquals(reduces.get(0), lhsExpected, "lhs of four value reduce failed");
+        Assert.assertEquals(reduces.get(1), rhsExpected, "rhs of four value reduce failed");
+        Assert.assertEquals(reduces.get(2), finalExpected, "final value four value reduce failed");
+    }
+
+
+    private Future getReduceTestEntry( Object value ) {
+        // Create a task and run it, assuring that the tests won't block on a get.
+        FutureTask task = new FutureTask( new ReduceTestEntry( value ) );
+        task.run();
+        return task;
+    }
+
+    public Future notifyReduce( Future lhs, Future rhs )  {
+        List<Integer> reduce = new ArrayList<Integer>();
+
+        try {
+            if( lhs == null && rhs == null )
+                throw new IllegalStateException("lhs and rhs are null");
+
+            if( lhs.get() instanceof List )
+                reduce.addAll((List)lhs.get());
+            else
+                reduce.add((Integer)lhs.get());
+
+            if( rhs != null ) {
+                if( rhs.get() instanceof List )
+                    reduce.addAll((List)rhs.get());
+                else
+                    reduce.add((Integer)rhs.get());
+            }
+        }
+        catch( Exception ex ) {
+            // just rethrow any exceptions
+            throw new RuntimeException(ex);
+        }
+
+        reduces.add( reduce );
+
+        return getReduceTestEntry( reduce );
+    }
+
+    private class ReduceTestEntry implements Callable {
+        private Object data;
+
+        public ReduceTestEntry( Object data ) {
+            this.data = data;
+        }
+
+        public Object call() {
+            return data;
+        }
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/filters/AllowNCigarMalformedReadFilterUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/filters/AllowNCigarMalformedReadFilterUnitTest.java
new file mode 100644
index 0000000..c0e48a9
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/filters/AllowNCigarMalformedReadFilterUnitTest.java
@@ -0,0 +1,77 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.ValidationExclusion;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.util.Collections;
+
+
+/**
+ * Tests for the {@link MalformedReadFilter} when the unsafe flag
+ * {@link ValidationExclusion.TYPE#ALLOW_N_CIGAR_READS} is set.
+ *
+ * @author Valentin Ruano-Rubio
+ * @since 6/6/13
+ */
+public class AllowNCigarMalformedReadFilterUnitTest extends MalformedReadFilterUnitTest {
+
+
+    @Override
+    protected ValidationExclusion composeValidationExclusion() {
+        return new ValidationExclusion(Collections.singletonList(ValidationExclusion.TYPE.ALLOW_N_CIGAR_READS));
+    }
+
+
+    @Test(enabled = true,
+            dataProvider= "UnsupportedCigarOperatorDataProvider")
+    @CigarOperatorTest(CigarOperatorTest.Outcome.IGNORE)
+    public void testCigarNOperatorFilterIgnore(final String cigarString) {
+
+        final MalformedReadFilter filter = buildMalformedReadFilter(false);
+        final SAMRecord nContainingCigarRead = buildSAMRecord(cigarString);
+        Assert.assertFalse(filter.filterOut(nContainingCigarRead),
+                "filters out N containing Cigar when it should ignore the fact");
+    }
+
+    @Test(enabled = false)
+    @Override
+    public void testCigarNOperatorFilterException(final String cigarString) {
+        // Nothing to do here.
+        // Just deactivates the parents test case.
+    }
+
+
+
+
+
+
+
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/filters/BadCigarFilterUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/filters/BadCigarFilterUnitTest.java
new file mode 100644
index 0000000..109c64c
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/filters/BadCigarFilterUnitTest.java
@@ -0,0 +1,97 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.Cigar;
+import org.broadinstitute.gatk.utils.clipping.ReadClipperTestUtils;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.util.List;
+
+/**
+ * Checks that the Bad Cigar filter works for all kinds of wonky cigars
+ *
+ * @author Mauricio Carneiro
+ * @since 3/20/12
+ */
+public class BadCigarFilterUnitTest {
+
+    public static final String[] BAD_CIGAR_LIST = {
+            "2D4M",               // starting with multiple deletions
+            "4M2D",               // ending with multiple deletions
+            "3M1I1D",             // adjacent indels AND ends in deletion
+            "1M1I1D2M",           // adjacent indels I->D
+            "1M1D2I1M",           // adjacent indels D->I
+            "1M1I2M1D",           // ends in single deletion with insertion in the middle
+            "4M1D",               // ends in single deletion
+            "1D4M",               // starts with single deletion
+            "2M1D1D2M",           // adjacent D's
+            "1M1I1I1M",           // adjacent I's
+            "1H1D4M",             // starting with deletion after H
+            "1S1D3M",             // starting with deletion after S
+            "1H1S1D3M",           // starting with deletion after HS
+            "4M1D1H",             // ending with deletion before H
+            "3M1D1S",             // ending with deletion before S
+            "3M1D1S1H",           // ending with deletion before HS
+            "10M2H10M",           // H in the middle
+            "10M2S10M",           // S in the middle
+            "1H1S10M2S10M1S1H",    // deceiving S in the middle
+            "1H1S10M2H10M1S1H"    // deceiving H in the middle
+    };
+
+    BadCigarFilter filter;
+
+    @BeforeClass
+    public void init() {
+        filter = new BadCigarFilter();
+    }
+
+    @Test(enabled = true)
+    public void testWonkyCigars() {
+        for (String cigarString : BAD_CIGAR_LIST) {
+            GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigarString, 0);
+            Assert.assertTrue(filter.filterOut(read), read.getCigarString());
+        }
+    }
+
+    @Test(enabled = true)
+    public void testReadCigarLengthMismatch() {
+        GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar("4M", 1);
+        Assert.assertTrue(filter.filterOut(read), read.getCigarString());
+    }
+
+    @Test(enabled = true)
+    public void testGoodCigars() {
+        List<Cigar> cigarList = ReadClipperTestUtils.generateCigarList(10);
+        for (Cigar cigar : cigarList) {
+            GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar, 0);
+            Assert.assertFalse(filter.filterOut(read), read.getCigarString());
+        }
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/filters/BadReadGroupsIntegrationTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/filters/BadReadGroupsIntegrationTest.java
new file mode 100644
index 0000000..9f21233
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/filters/BadReadGroupsIntegrationTest.java
@@ -0,0 +1,52 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import org.broadinstitute.gatk.engine.walkers.WalkerTest;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.testng.annotations.Test;
+
+
+public class BadReadGroupsIntegrationTest extends WalkerTest {
+
+    @Test
+    public void testMissingReadGroup() {
+        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
+                "-T TestPrintReadsWalker -R " + hg18Reference + " -I " + privateTestDir + "missingReadGroup.bam -o /dev/null",
+                0,
+                UserException.ReadMissingReadGroup.class);
+        executeTest("test Missing Read Group", spec);
+    }
+
+    @Test
+    public void testUndefinedReadGroup() {
+        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
+                "-T TestPrintReadsWalker -R " + hg18Reference + " -I " + privateTestDir + "undefinedReadGroup.bam -o /dev/null",
+                0,
+                UserException.ReadHasUndefinedReadGroup.class);
+        executeTest("test Undefined Read Group", spec);
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/filters/MalformedReadFilterUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/filters/MalformedReadFilterUnitTest.java
new file mode 100644
index 0000000..aec5990
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/filters/MalformedReadFilterUnitTest.java
@@ -0,0 +1,246 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+
+import htsjdk.samtools.Cigar;
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.TextCigarCodec;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.utils.ValidationExclusion;
+import org.broadinstitute.gatk.engine.datasources.reads.SAMDataSource;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.exceptions.UserException.UnsupportedCigarOperatorException;
+
+import java.lang.annotation.*;
+import java.lang.reflect.Method;
+import java.util.*;
+
+
+/**
+ * Tests for the MalformedReadFilter
+ *
+ * @author Eric Banks
+ * @since 3/14/13
+ */
+public class MalformedReadFilterUnitTest extends ReadFilterTest {
+
+    //////////////////////////////////////
+    // Test the checkSeqStored() method //
+    //////////////////////////////////////
+
+    @Test(enabled = true)
+    public void testCheckSeqStored () {
+
+        final GATKSAMRecord goodRead = ArtificialSAMUtils.createArtificialRead(new byte[]{(byte)'A'}, new byte[]{(byte)'A'}, "1M");
+        final GATKSAMRecord badRead = ArtificialSAMUtils.createArtificialRead(new byte[]{}, new byte[]{}, "1M");
+        badRead.setReadString("*");
+
+        Assert.assertTrue(MalformedReadFilter.checkSeqStored(goodRead, true));
+        Assert.assertFalse(MalformedReadFilter.checkSeqStored(badRead, true));
+
+        try {
+            MalformedReadFilter.checkSeqStored(badRead, false);
+            Assert.assertTrue(false, "We should have exceptioned out in the previous line");
+        } catch (UserException e) { }
+    }
+
+    @Test(enabled = true, dataProvider= "UnsupportedCigarOperatorDataProvider")
+    @CigarOperatorTest(CigarOperatorTest.Outcome.FILTER)
+    public void testCigarNOperatorFilterTruePositive(String cigarString) {
+
+       final MalformedReadFilter filter = buildMalformedReadFilter(true);
+       final SAMRecord nContainingCigarRead = buildSAMRecord(cigarString);
+       Assert.assertTrue(filter.filterOut(nContainingCigarRead),
+                  " Did not filtered out a N containing CIGAR read");
+    }
+
+    @Test(enabled = true, dataProvider= "UnsupportedCigarOperatorDataProvider")
+    @CigarOperatorTest(CigarOperatorTest.Outcome.ACCEPT)
+    public void testCigarNOperatorFilterTrueNegative(String cigarString) {
+
+        final MalformedReadFilter filter = buildMalformedReadFilter(true);
+        final SAMRecord nonNContainingCigarRead = buildSAMRecord(cigarString);
+        Assert.assertFalse(filter.filterOut(nonNContainingCigarRead),
+                    " Filtered out a non-N containing CIGAR read");
+    }
+
+    @Test(enabled = true,
+            expectedExceptions = UnsupportedCigarOperatorException.class,
+            dataProvider= "UnsupportedCigarOperatorDataProvider")
+    @CigarOperatorTest(CigarOperatorTest.Outcome.EXCEPTION)
+    public void testCigarNOperatorFilterException(final String cigarString) {
+
+        final MalformedReadFilter filter = buildMalformedReadFilter(false);
+        final SAMRecord nContainingCigarRead = buildSAMRecord(cigarString);
+
+        filter.filterOut(nContainingCigarRead);
+    }
+
+    @Test(enabled = true, dataProvider="UnsupportedCigarOperatorDataProvider")
+    @CigarOperatorTest(CigarOperatorTest.Outcome.ACCEPT)
+    public void testCigarNOperatorFilterControl(final String cigarString) {
+
+        final MalformedReadFilter filter = buildMalformedReadFilter(false);
+        final SAMRecord nonNContainingCigarRead = buildSAMRecord(cigarString);
+
+        Assert.assertFalse(filter.filterOut(nonNContainingCigarRead));
+    }
+
+    protected SAMRecord buildSAMRecord(final String cigarString) {
+        final Cigar nContainingCigar = TextCigarCodec.decode(cigarString);
+        return  this.createRead(nContainingCigar, 1, 0, 10);
+    }
+
+    protected MalformedReadFilter buildMalformedReadFilter(final boolean filterRNO) {
+        return buildMalformedReadFiter(filterRNO,new ValidationExclusion.TYPE[] {});
+    }
+
+    protected MalformedReadFilter buildMalformedReadFiter(boolean filterRNO, final ValidationExclusion.TYPE... excl) {
+        final ValidationExclusion ve = new ValidationExclusion(Arrays.asList(excl));
+
+        final MalformedReadFilter filter = new MalformedReadFilter();
+
+        final SAMFileHeader h = getHeader();
+        final SAMDataSource ds =  getDataSource();
+
+        final GenomeAnalysisEngine gae = new GenomeAnalysisEngine() {
+            @Override
+            public SAMFileHeader getSAMFileHeader() {
+                return h;
+            }
+
+            @Override
+            public SAMDataSource getReadsDataSource() {
+                return ds;
+            }
+        };
+        filter.initialize(gae);
+        filter.filterReadsWithNCigar = filterRNO;
+        return filter;
+    }
+
+    @Retention(RetentionPolicy.RUNTIME)
+    @Target(ElementType.METHOD)
+    @Inherited
+    protected @interface CigarOperatorTest {
+
+        enum Outcome {
+            ANY,ACCEPT,FILTER,EXCEPTION,IGNORE;
+
+            public boolean appliesTo (String cigar) {
+                boolean hasN = cigar.indexOf('N') != -1;
+                switch (this) {
+                    case ANY: return true;
+                    case ACCEPT: return !hasN;
+                    case IGNORE: return hasN;
+                    case FILTER:
+                    case EXCEPTION:
+                    default:
+                        return hasN;
+
+                }
+            }
+        }
+
+        Outcome value() default Outcome.ANY;
+    }
+
+    /**
+     * Cigar test data for unsupported operator test.
+     * Each element of this array corresponds to a test case. In turn the first element of the test case array is the
+     * Cigar string for that test case and the second indicates whether it should be filtered due to the presence of a
+     * unsupported operator
+     */
+    private static final String[] TEST_CIGARS =  {
+       "101M10D20I10M",
+       "6M14N5M",
+       "1N",
+       "101M",
+       "110N",
+       "2N4M",
+       "4M2N",
+       "3M1I1M",
+       "1M2I2M",
+       "1M10N1I1M",
+       "1M1I1D",
+       "11N12M1I34M12N"
+    };
+
+    @DataProvider(name= "UnsupportedCigarOperatorDataProvider")
+    public Iterator<Object[]> unsupportedOperatorDataProvider(final Method testMethod) {
+        final CigarOperatorTest a = resolveCigarOperatorTestAnnotation(testMethod);
+        final List<Object[]> result = new LinkedList<Object[]>();
+        for (final String cigarString : TEST_CIGARS) {
+            if (a == null || a.value().appliesTo(cigarString)) {
+                result.add(new Object[] { cigarString });
+            }
+        }
+        return result.iterator();
+    }
+
+    /**
+     * Gets the most specific {@link CigarOperatorTest} annotation for the
+     * signature of the test method provided.
+     * <p/>
+     * This in-house implementation is required due to the fact that method
+     * annotations do not have inheritance.
+     *
+     * @param m targeted test method.
+     * @return <code>null</code> if there is no {@link CigarOperatorTest}
+     * annotation in this or overridden methods.
+     */
+    private CigarOperatorTest resolveCigarOperatorTestAnnotation(final Method m) {
+       CigarOperatorTest res = m.getAnnotation(CigarOperatorTest.class);
+       if (res != null) {
+           return res;
+       }
+       Class<?> c = this.getClass();
+       Class<?> p = c.getSuperclass();
+       while (p != null && p != Object.class) {
+           try {
+             final Method met = p.getDeclaredMethod(m.getName(),
+                     m.getParameterTypes());
+             res = met.getAnnotation(CigarOperatorTest.class);
+             if (res != null) {
+                 break;
+             }
+           } catch (NoSuchMethodException e) {
+             // Its ok; nothing to do here, just keep looking.
+           }
+           c = p;
+           p = c.getSuperclass();
+       }
+       return res;
+    }
+
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/filters/NDNCigarReadTransformerUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/filters/NDNCigarReadTransformerUnitTest.java
new file mode 100644
index 0000000..6a35f0b
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/filters/NDNCigarReadTransformerUnitTest.java
@@ -0,0 +1,70 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.Cigar;
+import org.broadinstitute.gatk.utils.sam.CigarUtils;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+/**
+ * @author ami
+ * @since 04/22/14
+ */
+public class NDNCigarReadTransformerUnitTest {
+
+
+    @DataProvider(name = "filteringIteratorTestData")
+    public String[][] getFilteringIteratorTestData() {
+        return new String[][] {
+                {"1M1N1N1M","1M1N1N1M"},           // NN elements
+                {"1M1N1D4M","1M1N1D4M"},           // ND
+                {"1M1N3M","1M1N3M"},               // N
+                {"1M1N2I1N3M","1M1N2I1N3M"},       // NIN
+                {"1M1N3D2N1M","1M6N1M"},
+                {"1M2N2D2N1M1D3N1D1N1M2H","1M6N1M1D5N1M2H"},
+                {"1H2S1M1N3D2N1M","1H2S1M6N1M"},
+                {"10M628N2D203N90M","10M833N90M"}
+        };
+    }
+
+    NDNCigarReadTransformer filter;
+
+    @BeforeClass
+    public void init() {
+        filter = new NDNCigarReadTransformer();
+    }
+
+    @Test(dataProvider = "filteringIteratorTestData")
+    public void testCigarRefactoring (final String originalCigarString, final String expectedString) {
+        Cigar originalCigar = CigarUtils.cigarFromString(originalCigarString);
+        String actualString = filter.refactorNDNtoN(originalCigar).toString();
+        Assert.assertEquals(actualString, expectedString, "ciagr string "+ originalCigarString+" should become: "+expectedString+" but got: "+actualString);
+    }
+
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/filters/OverclippedReadFilterUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/filters/OverclippedReadFilterUnitTest.java
new file mode 100644
index 0000000..2e28ef0
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/filters/OverclippedReadFilterUnitTest.java
@@ -0,0 +1,105 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+
+import htsjdk.samtools.Cigar;
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.TextCigarCodec;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.*;
+
+
+/**
+ * Tests for the OverclippedReadFilter
+ */
+public class OverclippedReadFilterUnitTest extends ReadFilterTest {
+
+    @Test(enabled = true, dataProvider= "OverclippedDataProvider")
+    public void testOverclippedFilter(final String cigarString, boolean doNotRequireSoftclipsOnBothEnds, final boolean expectedResult) {
+
+        final OverclippedReadFilter filter = new OverclippedReadFilter();
+        filter.doNotRequireSoftclipsOnBothEnds = doNotRequireSoftclipsOnBothEnds;
+        final SAMRecord read = buildSAMRecord(cigarString);
+        Assert.assertEquals(filter.filterOut(read), expectedResult, cigarString);
+    }
+
+    private SAMRecord buildSAMRecord(final String cigarString) {
+        final Cigar cigar = TextCigarCodec.decode(cigarString);
+        return this.createRead(cigar, 1, 0, 10);
+    }
+
+    @DataProvider(name= "OverclippedDataProvider")
+    public Iterator<Object[]> overclippedDataProvider() {
+        final List<Object[]> result = new LinkedList<Object[]>();
+
+        result.add(new Object[] { "1S10M1S", false, true });
+        result.add(new Object[] { "1S10X1S", false, true });
+        result.add(new Object[] { "1H1S10M1S1H", false, true });
+        result.add(new Object[] { "1S40M1S", false, false});
+        result.add(new Object[] { "1S40X1S", false, false });
+        result.add(new Object[] { "1H10M1S", false, false});
+        result.add(new Object[] { "1S10M1H", false, false});
+
+        result.add(new Object[] { "10M1S", false, false});
+        result.add(new Object[] { "1S10M", false, false});
+
+        result.add(new Object[] { "10M1S", true, true});
+        result.add(new Object[] { "1S10M", true, true});
+
+        result.add(new Object[] { "1S10M10D10M1S", false, true });
+        result.add(new Object[] { "1S1M40I1S", false, false });
+
+        result.add(new Object[] { "1S10I1S", false, true });
+        result.add(new Object[] { "1S40I1S", false, false });
+        result.add(new Object[] { "1S40I1S", true, false });
+
+        result.add(new Object[] { "25S40I25M", true, false });
+
+        //Read is too short once soft-clipping removed
+        result.add(new Object[] { "25S25M", true, true });
+        result.add(new Object[] { "25S25X", true, true });
+        result.add(new Object[] { "25S25H", true, true });
+        result.add(new Object[] { "25S25H", false, false });
+
+        result.add(new Object[] { "25S25M25S", false, true });
+        result.add(new Object[] { "25M25S", true, true });
+        result.add(new Object[] { "25S25M", true, true });
+
+        result.add(new Object[] { "25S35S", true, true });
+
+        //Read long enough even with soft clipping removed
+        result.add(new Object[] { "25S35M25S", true, false });
+        result.add(new Object[] { "35M25S", true, false });
+        result.add(new Object[] { "25S35M", true, false });
+
+        return result.iterator();
+    }
+
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/filters/ReadFilterTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/filters/ReadFilterTest.java
new file mode 100644
index 0000000..9c3e390
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/filters/ReadFilterTest.java
@@ -0,0 +1,373 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import htsjdk.samtools.*;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.ValidationExclusion;
+import org.broadinstitute.gatk.engine.datasources.reads.SAMDataSource;
+import org.broadinstitute.gatk.utils.sam.SAMReaderID;
+import org.broadinstitute.gatk.utils.downsampling.DownsamplingMethod;
+import org.broadinstitute.gatk.engine.resourcemanagement.ThreadAllocation;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.testng.annotations.AfterClass;
+import org.testng.annotations.BeforeClass;
+
+import java.io.File;
+import java.util.*;
+
+/**
+ * Class ReadBaseTest
+ * <p/>
+ * This is the base test class for read filter test classes.  All read
+ * filter test cases should extend from this
+ * class; it sets ups a header mock up to test read filtering.
+ *
+ * Feel free to override non-final method to modify the behavior
+ * (i.e. change how read group id are formatted, or complete a header).
+ *
+ * <p/>
+ * You can statically determine the number of read-group involved
+ * in the test by calling {@link #ReadFilterTest(int)} in you constructor.
+ * <p/>
+ *
+ * Notice that the same header object is shared by all test and
+ * it is initialized by Junit (calling {@link #beforeClass()}.
+ *
+ * @author Valentin Ruano Rubio
+ * @date May 23, 2013
+ */
+public class ReadFilterTest extends BaseTest {
+
+    private static final int DEFAULT_READ_GROUP_COUNT = 5;
+    private static final int DEFAULT_READER_COUNT = 1;
+    private static final String DEFAULT_READ_GROUP_PREFIX = "ReadGroup";
+    private static final String DEFAULT_PLATFORM_UNIT_PREFIX = "Lane";
+    private static final String DEFAULT_SAMPLE_NAME_PREFIX = "Sample";
+    private static final String DEFAULT_PLATFORM_PREFIX = "Platform";
+    private static final int DEFAULT_CHROMOSOME_COUNT = 1;
+    private static final int DEFAULT_CHROMOSOME_START_INDEX = 1;
+    private static final int DEFAULT_CHROMOSOME_SIZE = 1000;
+    private static final String DEFAULT_SAM_FILE_FORMAT = "readfile-%3d.bam";
+
+    private final int groupCount;
+
+    private  SAMFileHeader header;
+
+    private SAMDataSource dataSource;
+
+    /**
+     * Constructs a new read-filter test providing the number of read
+     * groups in the file.
+     *
+     * @param groupCount number of read-group in the fictional SAM file,
+     *                   must be equal or greater than 1.
+     */
+    protected ReadFilterTest(final int groupCount) {
+        if (groupCount < 1) {
+            throw new IllegalArgumentException(
+                    "the read group count must at least be 1");
+        }
+        this.groupCount = groupCount;
+    }
+
+
+    /**
+     * Gets the data source.
+     *
+     * @throws IllegalStateException if the data source was not initialized
+     *          invoking {@link #beforeClass()}
+     * @return never <code>null</code>
+     */
+    protected final SAMDataSource getDataSource() {
+        checkDataSourceExists();
+        return dataSource;
+    }
+
+    /**
+     * Returns the mock-up SAM file header for testing.
+     *
+     * @throws IllegalStateException if the header was not initialized
+     *          invoking {@link #beforeClass()}
+     * @return never <code>null</code>
+     */
+    protected final SAMFileHeader getHeader() {
+        checkHeaderExists();
+        return header;
+    }
+
+    /**
+     * Construct a read filter test with the default number of groups
+     *  ({@link #DEFAULT_READ_GROUP_COUNT}.
+     */
+    public ReadFilterTest() {
+        this(DEFAULT_READ_GROUP_COUNT);
+    }
+
+    /**
+     * Return the number of read groups involved in the test
+     * @return <code>1</code> or greater.
+     */
+    protected final int getReadGroupCount() {
+        return groupCount;
+    }
+
+    /**
+     * Composes the Id for the read group given its index.
+     *
+     * This methods must return a unique distinct ID for each possible index and
+     * it must be the same value each time it is invoked.
+     *
+     * @param index the index of the targeted read group in the range
+     *              [1,{@link #getReadGroupCount()}]
+     * @return never <code>null</code> and must be unique to each possible
+     *         read group index.
+     */
+    protected String composeReadGroupId(final int index) {
+        checkReadGroupIndex(index);
+        return DEFAULT_READ_GROUP_PREFIX + index;
+    }
+
+    /**
+     * Composes the Platform name for the read group given its index.
+     *
+     * This method must always return the same value give an index.
+     *
+     * @param index the index of the targeted read group in the range
+     *              [1,{@link #getReadGroupCount()}]
+     * @return never <code>null</code>.
+     */
+    protected String composePlatformName(final int index) {
+        checkReadGroupIndex(index);
+        return DEFAULT_PLATFORM_PREFIX + (((index-1)%2)+1);
+    }
+
+
+    /**
+     * Composes the Platform unit name for the read group given its index.
+     *
+     * @param index the index of the targeted read group in the range
+     *              [1,{@link #getReadGroupCount()}]
+     * @return never <code>null</code>.
+     */
+    protected String composePlatformUnitName(final int index) {
+        checkReadGroupIndex(index);
+        return DEFAULT_PLATFORM_UNIT_PREFIX + (((index-1)%3)+1);
+    }
+
+
+
+    /**
+     * Checks the correctness of a given read group index.
+     *
+     * A correct index is any value in the range [1,{@link #getReadGroupCount()}].
+     *
+     * @param index the target index.
+     * @throws IllegalArgumentException if the input index is not correct.
+     */
+    protected final void checkReadGroupIndex(final int index) {
+        checkIndex(index,groupCount,"read group");
+    }
+
+
+    private void checkIndex(final int index, final int max, CharSequence name) {
+        if (index < 1 || index > max) {
+            throw new IllegalArgumentException(
+                    name + " index ("
+                    + index
+                    + ") is out of bounds [1," + max + "]");
+        }
+    }
+
+
+    /**
+     * Checks whether the header was initialized.
+     *
+     * @throws IllegalStateException if the header was not yet initialized.
+     */
+    protected final void checkHeaderExists() {
+        if (header == null) {
+            throw new IllegalArgumentException(
+                    "header has not been initialized;"
+                    + " beforeClass() was not invoked");
+        }
+    }
+
+    /**
+     * Checks whether the data source was initialized.
+     *
+     * @throws IllegalStateException if the data source was not yet initialized.
+     */
+    protected final void checkDataSourceExists() {
+        if (header == null) {
+            throw new IllegalArgumentException(
+                    "data source has not been initialized;"
+                            + " beforeClass() was not invoked");
+        }
+    }
+
+    /**
+     * Returns the ID for a read group given its index.
+     *
+     * @param index the index of the targeted read group in the range
+     *              [1,{@link #getReadGroupCount()}]
+     * @return never <code>null</code> and must be unique to each
+     *              possible read group index.
+     */
+    protected final String getReadGroupId(final int index) {
+        checkReadGroupIndex(index);
+        return getHeader().getReadGroups().get(index - 1).getReadGroupId();
+    }
+
+    /**
+     * Returns the platform name for a read group given its index.
+     *
+     * @param group the index of the targeted read group in the range
+     *              [1,{@link #getReadGroupCount()}]
+     * @return never <code>null</code>.
+     */
+    protected final String getPlatformName(final int group) {
+        checkReadGroupIndex(group);
+        return getHeader().getReadGroups().get(group - 1).getPlatform();
+    }
+
+    /**
+     * Returns the platform unit for a read group given its index.
+     *
+     * @param group the index of the targeted read group in the range
+     *              [1,{@link #getReadGroupCount()}]
+     * @return never <code>null</code>.
+     */
+    protected final String getPlatformUnit(final int group) {
+        checkReadGroupIndex(group);
+        return getHeader().getReadGroups().get(group - 1).getPlatformUnit();
+    }
+
+
+    /**
+     * Composes the mock up SAM file header.
+     *
+     * It must return an equivalent (equal) value each time it is invoked.
+     *
+     * @return never <code>null</code>.
+     */
+    protected SAMFileHeader composeHeader() {
+
+        return ArtificialSAMUtils.createArtificialSamHeader(
+                DEFAULT_CHROMOSOME_COUNT, DEFAULT_CHROMOSOME_START_INDEX,
+                DEFAULT_CHROMOSOME_SIZE);
+    }
+
+    @BeforeClass
+    public void beforeClass() {
+
+        header = composeHeader();
+        dataSource = composeDataSource();
+        final List<String> readGroupIDs = new ArrayList<String>();
+        final List<String> sampleNames = new ArrayList<String>();
+
+        for (int i = 1; i <= getReadGroupCount(); i++) {
+            final String readGroupId = composeReadGroupId(i);
+            readGroupIDs.add(readGroupId);
+            sampleNames.add(readGroupId);
+        }
+
+        ArtificialSAMUtils.createEnumeratedReadGroups(
+                header, readGroupIDs, sampleNames);
+
+        for (int i = 1; i <= getReadGroupCount(); i++) {
+            final String readGroupId = readGroupIDs.get(i-1);
+            final SAMReadGroupRecord groupRecord = header.getReadGroup(readGroupId);
+            groupRecord.setAttribute("PL", composePlatformName(i));
+            groupRecord.setAttribute("PU", composePlatformUnitName(i));
+        }
+
+    }
+
+    protected ValidationExclusion composeValidationExclusion() {
+        return new ValidationExclusion();
+    }
+
+    protected SAMDataSource composeDataSource() {
+        checkHeaderExists();
+        final File referenceFile = null; // Not used in this test.
+        final Set<SAMReaderID> readerIDs = new HashSet<>(1);
+        final ThreadAllocation ta = new ThreadAllocation();
+        final Integer numFileHandles = 1; // I believe that any value would do but need to confirm.
+        final boolean useOriginalBaseQualities = true;
+        final ValidationStringency strictness = ValidationStringency.LENIENT;
+        final Integer readBufferSize = 1; // not relevant.
+        final DownsamplingMethod downsamplingMethod = DownsamplingMethod.NONE;
+        final ValidationExclusion exclusionList = composeValidationExclusion();
+        final Collection<ReadFilter> supplementalFilters = Collections.EMPTY_SET;
+        final boolean includeReadsWithDeletionAtLoci = true;
+
+        final GenomeLocParser glp = new GenomeLocParser(header.getSequenceDictionary());
+        final SAMDataSource res = new SAMDataSource(
+                referenceFile,
+                readerIDs,
+                ta,
+                numFileHandles,
+                glp,
+                useOriginalBaseQualities,
+                strictness,
+                readBufferSize,
+                downsamplingMethod,
+                exclusionList,
+                supplementalFilters,
+                includeReadsWithDeletionAtLoci);
+
+        return res;
+    }
+
+    @AfterClass
+    public void afterClass() {
+        header = null;
+        dataSource = null;
+    }
+
+    /**
+     * Creates a read record.
+     *
+     * @param cigar the new record CIGAR.
+     * @param group the new record group index that must be in the range \
+     *              [1,{@link #getReadGroupCount()}]
+     * @param reference the reference sequence index (0-based)
+     * @param start the start position of the read alignment in the reference
+     *              (1-based)
+     * @return never <code>null</code>
+     */
+    protected SAMRecord createRead(final Cigar cigar, final int group, final int reference, final int start) {
+        final SAMRecord record = ArtificialSAMUtils.createArtificialRead(cigar);
+        record.setHeader(getHeader());
+        record.setAlignmentStart(start);
+        record.setReferenceIndex(reference);
+        record.setAttribute(SAMTag.RG.toString(), getReadGroupId(group));
+        return record;
+
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/filters/ReadGroupBlackListFilterUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/filters/ReadGroupBlackListFilterUnitTest.java
new file mode 100644
index 0000000..8f11220
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/filters/ReadGroupBlackListFilterUnitTest.java
@@ -0,0 +1,247 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+import org.testng.Assert;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.testng.annotations.Test;
+
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.SAMReadGroupRecord;
+
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Collections;
+
+public class ReadGroupBlackListFilterUnitTest extends ReadFilterTest {
+
+    @Test(expectedExceptions=ReviewedGATKException.class)
+    public void testBadFilter() {
+        List<String> badFilters = Collections.singletonList("bad");
+        new ReadGroupBlackListFilter(badFilters);
+    }
+    @Test(expectedExceptions=ReviewedGATKException.class)
+    public void testBadFilterTag() {
+        List<String> badFilters = Collections.singletonList("bad:filter");
+        new ReadGroupBlackListFilter(badFilters);
+    }
+
+    @Test(expectedExceptions=ReviewedGATKException.class)
+    public void testBadFilterFile() {
+        List<String> badFilters = Collections.singletonList("/foo/bar/rgbl.txt");
+        new ReadGroupBlackListFilter(badFilters);
+    }
+
+    @Test
+    public void testFilterReadGroup() {
+        SAMRecord filteredRecord = ArtificialSAMUtils.createArtificialRead(getHeader(), "readUno", 0, 1, 20);
+        filteredRecord.setAttribute("RG", getReadGroupId(1));
+
+        SAMRecord unfilteredRecord = ArtificialSAMUtils.createArtificialRead(getHeader(), "readDos", 0, 2, 20);
+        unfilteredRecord.setAttribute("RG", getReadGroupId(2));
+
+        List<String> filterList = new ArrayList<String>();
+        filterList.add("RG:" + getReadGroupId(1));
+
+        ReadGroupBlackListFilter filter = new ReadGroupBlackListFilter(filterList);
+        Assert.assertTrue(filter.filterOut(filteredRecord));
+        Assert.assertFalse(filter.filterOut(unfilteredRecord));
+    }
+
+    @Test
+    public void testFilterPlatformUnit() {
+        SAMRecord filteredRecord = ArtificialSAMUtils.createArtificialRead(getHeader(), "readUno", 0, 1, 20);
+        filteredRecord.setAttribute("RG", getReadGroupId(1));
+
+        SAMRecord unfilteredRecord = ArtificialSAMUtils.createArtificialRead(getHeader(), "readDos", 0, 2, 20);
+        unfilteredRecord.setAttribute("RG", getReadGroupId(2));
+
+        List<String> filterList = new ArrayList<String>();
+        filterList.add("PU:" + getPlatformUnit(1));
+
+        ReadGroupBlackListFilter filter = new ReadGroupBlackListFilter(filterList);
+        Assert.assertTrue(filter.filterOut(filteredRecord));
+        Assert.assertFalse(filter.filterOut(unfilteredRecord));
+    }
+
+    @Test
+    public void testFilterOutByReadGroup() {
+        int recordsPerGroup = 3;
+        List<SAMRecord> records = new ArrayList<SAMRecord>();
+        int alignmentStart = 0;
+        for (int x = 1; x <= getReadGroupCount(); x++) {
+            SAMReadGroupRecord groupRecord = getHeader().getReadGroup(getReadGroupId(x));
+            for (int y = 1; y <= recordsPerGroup; y++) {
+                SAMRecord record = ArtificialSAMUtils.createArtificialRead(getHeader(), "readUno", 0, ++alignmentStart, 20);
+                record.setAttribute("RG", groupRecord.getReadGroupId());
+                records.add(record);
+            }
+        }
+
+        List<String> filterList = new ArrayList<String>();
+        filterList.add("RG:" + getReadGroupId(1));
+        filterList.add("RG:" + getReadGroupId(3));
+
+        ReadGroupBlackListFilter filter = new ReadGroupBlackListFilter(filterList);
+        int filtered = 0;
+        int unfiltered = 0;
+        for (SAMRecord record : records) {
+            String readGroupName = record.getReadGroup().getReadGroupId();
+            if (filter.filterOut(record)) {
+                if (!filterList.contains("RG:" + readGroupName))
+                    Assert.fail("Read group " + readGroupName + " was filtered");
+                filtered++;
+            } else {
+                if (filterList.contains("RG:" + readGroupName))
+                    Assert.fail("Read group " + readGroupName + " was not filtered");
+                unfiltered++;
+            }
+        }
+
+        int filteredExpected = recordsPerGroup * 2;
+        int unfilteredExpected = recordsPerGroup * (getReadGroupCount() - 2);
+        Assert.assertEquals(filtered, filteredExpected, "Filtered");
+        Assert.assertEquals(unfiltered, unfilteredExpected, "Uniltered");
+    }
+
+    @Test
+    public void testFilterOutByAttribute() {
+        int recordsPerGroup = 3;
+        List<SAMRecord> records = new ArrayList<SAMRecord>();
+        int alignmentStart = 0;
+        for (int x = 1; x <= getReadGroupCount(); x++) {
+            SAMReadGroupRecord groupRecord = getHeader().getReadGroup(getReadGroupId(x));
+            for (int y = 1; y <= recordsPerGroup; y++) {
+                SAMRecord record = ArtificialSAMUtils.createArtificialRead(getHeader(), "readUno", 0, ++alignmentStart, 20);
+                record.setAttribute("RG", groupRecord.getReadGroupId());
+                records.add(record);
+            }
+        }
+
+        List<String> filterList = new ArrayList<String>();
+        filterList.add("PU:" + getPlatformUnit(1));
+
+        ReadGroupBlackListFilter filter = new ReadGroupBlackListFilter(filterList);
+        int filtered = 0;
+        int unfiltered = 0;
+        for (SAMRecord record : records) {
+            String platformUnit = (String) record.getReadGroup().getAttribute("PU");
+            if (filter.filterOut(record)) {
+                if (!filterList.contains("PU:" + platformUnit))
+                    Assert.fail("Platform unit " + platformUnit + " was filtered");
+                filtered++;
+            } else {
+                if (filterList.contains("PU:" + platformUnit))
+                    Assert.fail("Platform unit " + platformUnit + " was not filtered");
+                unfiltered++;
+            }
+        }
+
+        int filteredExpected = 6;
+        int unfilteredExpected = 9;
+        Assert.assertEquals(filtered, filteredExpected, "Filtered");
+        Assert.assertEquals(unfiltered, unfilteredExpected, "Uniltered");
+    }
+
+    @Test
+    public void testFilterOutByFile() {
+        int recordsPerGroup = 3;
+        List<SAMRecord> records = new ArrayList<SAMRecord>();
+        int alignmentStart = 0;
+        for (int x = 1; x <= getReadGroupCount(); x++) {
+            SAMReadGroupRecord groupRecord = getHeader().getReadGroup(getReadGroupId(x));
+            for (int y = 1; y <= recordsPerGroup; y++) {
+                SAMRecord record = ArtificialSAMUtils.createArtificialRead(getHeader(), "readUno", 0, ++alignmentStart, 20);
+                record.setAttribute("RG", groupRecord.getReadGroupId());
+                records.add(record);
+            }
+        }
+
+        List<String> filterList = new ArrayList<String>();
+        filterList.add(privateTestDir + "readgroupblacklisttest.txt");
+
+        ReadGroupBlackListFilter filter = new ReadGroupBlackListFilter(filterList);
+        int filtered = 0;
+        int unfiltered = 0;
+        for (SAMRecord record : records) {
+            String readGroup = record.getReadGroup().getReadGroupId();
+            if (filter.filterOut(record)) {
+                if (!("ReadGroup3".equals(readGroup) || "ReadGroup4".equals(readGroup)))
+                    Assert.fail("Read group " + readGroup + " was filtered");
+                filtered++;
+            } else {
+                if ("ReadGroup3".equals(readGroup) || "ReadGroup4".equals(readGroup))
+                    Assert.fail("Read group " + readGroup + " was not filtered");
+                unfiltered++;
+            }
+        }
+
+        int filteredExpected = recordsPerGroup * 2;
+        int unfilteredExpected = recordsPerGroup * (getReadGroupCount() - 2);
+        Assert.assertEquals(filtered, filteredExpected, "Filtered");
+        Assert.assertEquals(unfiltered, unfilteredExpected, "Uniltered");
+    }
+
+    @Test
+    public void testFilterOutByListFile() {
+        int recordsPerGroup = 3;
+        List<SAMRecord> records = new ArrayList<SAMRecord>();
+        int alignmentStart = 0;
+        for (int x = 1; x <= getReadGroupCount(); x++) {
+            SAMReadGroupRecord groupRecord = getHeader().getReadGroup(getReadGroupId(x));
+            for (int y = 1; y <= recordsPerGroup; y++) {
+                SAMRecord record = ArtificialSAMUtils.createArtificialRead(getHeader(), "readUno", 0, ++alignmentStart, 20);
+                record.setAttribute("RG", groupRecord.getReadGroupId());
+                records.add(record);
+            }
+        }
+
+        List<String> filterList = new ArrayList<String>();
+        filterList.add(privateTestDir + "readgroupblacklisttestlist.txt");
+
+        ReadGroupBlackListFilter filter = new ReadGroupBlackListFilter(filterList);
+        int filtered = 0;
+        int unfiltered = 0;
+        for (SAMRecord record : records) {
+            String readGroup = record.getReadGroup().getReadGroupId();
+            if (filter.filterOut(record)) {
+                if (!("ReadGroup3".equals(readGroup) || "ReadGroup4".equals(readGroup)))
+                    Assert.fail("Read group " + readGroup + " was filtered");
+                filtered++;
+            } else {
+                if ("ReadGroup3".equals(readGroup) || "ReadGroup4".equals(readGroup))
+                    Assert.fail("Read group " + readGroup + " was not filtered");
+                unfiltered++;
+            }
+        }
+
+        int filteredExpected = recordsPerGroup * 2;
+        int unfilteredExpected = recordsPerGroup * (getReadGroupCount() - 2);
+        Assert.assertEquals(filtered, filteredExpected, "Filtered");
+        Assert.assertEquals(unfiltered, unfilteredExpected, "Uniltered");
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/filters/UnsafeMalformedReadFilterUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/filters/UnsafeMalformedReadFilterUnitTest.java
new file mode 100644
index 0000000..38b1cb7
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/filters/UnsafeMalformedReadFilterUnitTest.java
@@ -0,0 +1,50 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.filters;
+
+
+import org.broadinstitute.gatk.utils.ValidationExclusion;
+
+import java.util.Collections;
+
+
+/**
+ * Tests for the {@link MalformedReadFilter} when the unsafe flag
+ * {@link ValidationExclusion.TYPE#ALL} is set.
+ *
+ * @author Valentin Ruano-Rubio
+ * @since 6/6/13
+ */
+public class UnsafeMalformedReadFilterUnitTest extends AllowNCigarMalformedReadFilterUnitTest {
+
+
+    @Override
+    protected ValidationExclusion composeValidationExclusion() {
+        return new ValidationExclusion(Collections.singletonList(ValidationExclusion.TYPE.ALL));
+    }
+
+
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/io/OutputTrackerUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/io/OutputTrackerUnitTest.java
new file mode 100644
index 0000000..1e00065
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/io/OutputTrackerUnitTest.java
@@ -0,0 +1,84 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.io;
+
+import org.broadinstitute.gatk.engine.io.stubs.OutputStreamStub;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+
+public class OutputTrackerUnitTest extends BaseTest {
+
+    private final OutputTracker tracker = new DirectOutputTracker();
+    private File unwriteableDir = null;
+    private File untraversableDir = null;
+
+    @BeforeClass
+    public void createDirectories() {
+        unwriteableDir = new File("unwriteable");
+        unwriteableDir.deleteOnExit();
+        unwriteableDir.mkdir();
+        unwriteableDir.setWritable(false);
+
+        untraversableDir = new File("untraversable");
+        untraversableDir.deleteOnExit();
+        untraversableDir.mkdir();
+        untraversableDir.setExecutable(false);
+    }
+
+    @DataProvider(name = "BadOutputPaths")
+    public Object[][] makeBadOutputPaths() {
+        return new Object[][] {new String[] {"thisDirectoryDoesNotExist/stub.txt"},
+                new String[] {"/thisDirectoryDoesNotExist/dummy.txt"},
+                new String[] {unwriteableDir.getAbsolutePath()+"/dummy.txt"},
+                new String[] {untraversableDir.getAbsolutePath()+"/dummy.txt"}};
+    }
+
+    @DataProvider(name = "GoodOutputPaths")
+    public Object[][] makeGoodOutputPaths() {
+        return new Object[][] {new String[] {publicTestDir+"stub.txt"},
+                new String[] {"dummy.txt"}};
+    }
+
+    @Test(dataProvider = "BadOutputPaths", expectedExceptions = UserException.CouldNotCreateOutputFile.class)
+    public void testInvalidOutputPath(final String path) {
+        tracker.validateOutputPath(new OutputStreamStub(new File(path)));
+    }
+
+    @Test(dataProvider = "GoodOutputPaths")
+    public void testValidOutputPath(final String path) {
+        tracker.validateOutputPath(new OutputStreamStub(new File(path)));
+    }
+
+    @Test
+    public void testOutputPathWithNullFile() {
+        tracker.validateOutputPath(new OutputStreamStub(System.out));
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/io/stubs/ArgumentTypeDescriptorUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/io/stubs/ArgumentTypeDescriptorUnitTest.java
new file mode 100644
index 0000000..ecffcfc
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/io/stubs/ArgumentTypeDescriptorUnitTest.java
@@ -0,0 +1,233 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.io.stubs;
+
+import htsjdk.variant.variantcontext.VariantContext;
+import it.unimi.dsi.fastutil.objects.ObjectArrayList;
+import htsjdk.samtools.SAMFileWriter;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.utils.commandline.*;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import htsjdk.variant.variantcontext.writer.VariantContextWriter;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.util.Arrays;
+import java.util.Collection;
+
+
+public class ArgumentTypeDescriptorUnitTest extends BaseTest {
+
+    ////////////////////////////////////////////////////////////////////
+    // This section tests the functionality of the @Output annotation //
+    ////////////////////////////////////////////////////////////////////
+
+    private class ATDTestCommandLineProgram extends CommandLineProgram {
+        public int execute() { return 0; }
+
+        @Override
+        public Collection<ArgumentTypeDescriptor> getArgumentTypeDescriptors() {
+            final GenomeAnalysisEngine engine = new GenomeAnalysisEngine();
+            return Arrays.asList( new SAMFileWriterArgumentTypeDescriptor(engine, System.out),
+                    new OutputStreamArgumentTypeDescriptor(engine, System.out),
+                    new VCFWriterArgumentTypeDescriptor(engine, System.out, null));
+        }
+
+        protected abstract class ATDTestOutputArgumentSource {
+            public abstract Object getOut();
+        }
+
+        protected class OutputRequiredSamArgumentSource extends ATDTestOutputArgumentSource {
+            @Output(shortName="o", doc="output file", required = true)
+            public SAMFileWriter out;
+            public Object getOut() { return out; }
+        }
+
+        protected class OutputRequiredVcfArgumentSource extends ATDTestOutputArgumentSource {
+            @Output(shortName="o", doc="output file", required = true)
+            public VariantContextWriter out;
+            public Object getOut() { return out; }
+        }
+
+        protected class OutputRequiredStreamArgumentSource extends ATDTestOutputArgumentSource {
+            @Output(shortName="o", doc="output file", required = true)
+            public PrintStream out;
+            public Object getOut() { return out; }
+        }
+
+        protected class OutputNotRequiredNoDefaultSamArgumentSource extends ATDTestOutputArgumentSource {
+            @Output(shortName="o", doc="output file", required = false, defaultToStdout = false)
+            public SAMFileWriter out;
+            public Object getOut() { return out; }
+        }
+
+        protected class OutputNotRequiredNoDefaultVcfArgumentSource extends ATDTestOutputArgumentSource {
+            @Output(shortName="o", doc="output file", required = false, defaultToStdout = false)
+            public VariantContextWriter out;
+            public Object getOut() { return out; }
+        }
+
+        protected class OutputNotRequiredNoDefaultStreamArgumentSource extends ATDTestOutputArgumentSource {
+            @Output(shortName="o", doc="output file", required = false, defaultToStdout = false)
+            public PrintStream out;
+            public Object getOut() { return out; }
+        }
+
+        protected class OutputNotRequiredSamArgumentSource extends ATDTestOutputArgumentSource {
+            @Output(shortName="o", doc="output file", required = false)
+            public SAMFileWriter out;
+            public Object getOut() { return out; }
+        }
+
+        protected class OutputNotRequiredVcfArgumentSource extends ATDTestOutputArgumentSource {
+            @Output(shortName="o", doc="output file", required = false)
+            public VariantContextWriter out;
+            public Object getOut() { return out; }
+        }
+
+        protected class OutputNotRequiredStreamArgumentSource extends ATDTestOutputArgumentSource {
+            @Output(shortName="o", doc="output file", required = false)
+            public PrintStream out;
+            public Object getOut() { return out; }
+        }
+    }
+
+    @DataProvider(name = "OutputProvider")
+    public Object[][] OutputProvider() {
+
+        ObjectArrayList<Object[]> tests = new ObjectArrayList<Object[]>();
+
+        final ATDTestCommandLineProgram clp = new ATDTestCommandLineProgram();
+
+        for ( final Object obj : Arrays.asList(clp.new OutputRequiredSamArgumentSource(), clp.new OutputRequiredVcfArgumentSource(), clp.new OutputRequiredStreamArgumentSource()) ) {
+            for ( final boolean provided : Arrays.asList(true, false) ) {
+                tests.add(new Object[]{obj, true, true, provided});
+            }
+        }
+
+        for ( final Object obj : Arrays.asList(clp.new OutputNotRequiredSamArgumentSource(), clp.new OutputNotRequiredVcfArgumentSource(), clp.new OutputNotRequiredStreamArgumentSource()) ) {
+            for ( final boolean provided : Arrays.asList(true, false) ) {
+                tests.add(new Object[]{obj, false, true, provided});
+            }
+        }
+
+        for ( final Object obj : Arrays.asList(clp.new OutputNotRequiredNoDefaultSamArgumentSource(), clp.new OutputNotRequiredNoDefaultVcfArgumentSource(), clp.new OutputNotRequiredNoDefaultStreamArgumentSource()) ) {
+            for ( final boolean provided : Arrays.asList(true, false) ) {
+                tests.add(new Object[]{obj, false, false, provided});
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "OutputProvider")
+    public void testOutput(final ATDTestCommandLineProgram.ATDTestOutputArgumentSource argumentSource, final boolean required, final boolean hasDefault, final boolean provided) {
+
+        final ParsingEngine parser = new ParsingEngine(new ATDTestCommandLineProgram());
+        parser.addArgumentSource(argumentSource.getClass());
+        parser.parse(provided ? new String[] {"out", "foo"} : new String[] {});
+
+        try {
+            parser.loadArgumentsIntoObject(argumentSource);
+
+            if ( !provided && (required || !hasDefault) )
+                Assert.assertEquals(argumentSource.getOut(), null);
+            else if ( !provided )
+                Assert.assertNotEquals(argumentSource.getOut(), null);
+            else if ( argumentSource.getOut() == null || !(argumentSource.getOut() instanceof SAMFileWriterStub) ) // can't test this one case
+                Assert.assertEquals(!provided, outputIsStdout(argumentSource.getOut()));
+
+        } catch (Exception e) {
+            throw new ReviewedGATKException(e.getMessage());
+        }
+    }
+
+    @Test
+    public void testRodBindingsCollection() {
+
+        final ParsingEngine parser = new ParsingEngine(new ATDTestCommandLineProgram());
+
+        //A list file containing a single VCF
+        final File listFile = createTempListFile("oneVCF", privateTestDir + "empty.vcf");
+
+        try {
+            Object result = ArgumentTypeDescriptor.getRodBindingsCollection(listFile,
+                    parser,
+                    VariantContext.class,
+                    "variant",
+                    new Tags(),
+                    "variantTest");
+            if (!(result instanceof RodBindingCollection))
+                throw new ReviewedGATKException("getRodBindingsCollection did not return a RodBindingCollection");
+            RodBindingCollection<?> rbc = (RodBindingCollection) result;
+
+            Assert.assertEquals(rbc.getType(), VariantContext.class);
+            Assert.assertEquals(rbc.getRodBindings().size(), 1);
+
+        } catch (IOException e) {
+            throw new ReviewedGATKException(e.getMessage(), e);
+        }
+
+        //The same file, now with an extra blank line
+        final File listFileWithBlank = createTempListFile("oneVCFwithBlankLine", privateTestDir + "empty.vcf", "");
+        try {
+            Object result = ArgumentTypeDescriptor.getRodBindingsCollection(listFileWithBlank,
+                    parser,
+                    VariantContext.class,
+                    "variant",
+                    new Tags(),
+                    "variantTest");
+            if (!(result instanceof RodBindingCollection))
+                throw new ReviewedGATKException("getRodBindingsCollection did not return a RodBindingCollection");
+            RodBindingCollection<?> rbc = (RodBindingCollection) result;
+
+            Assert.assertEquals(rbc.getType(), VariantContext.class);
+            Assert.assertEquals(rbc.getRodBindings().size(), 1);
+
+        } catch (IOException e) {
+            throw new ReviewedGATKException(e.getMessage(), e);
+        }
+    }
+
+    private static boolean outputIsStdout(final Object out) {
+        if ( out == null ) {
+            return false;
+        } else if ( out instanceof SAMFileWriterStub ) {
+            return ((SAMFileWriterStub)out).getOutputStream() != System.out;
+        } else if ( out instanceof VariantContextWriterStub ) {
+            return ((VariantContextWriterStub)out).getOutputStream() == System.out;
+        } else if ( out instanceof OutputStreamStub ) {
+            return ((OutputStreamStub)out).getOutputStream() == System.out;
+        }
+        return false;
+    }
+
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/iterators/BoundedReadIteratorUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/iterators/BoundedReadIteratorUnitTest.java
new file mode 100644
index 0000000..3c5d84c
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/iterators/BoundedReadIteratorUnitTest.java
@@ -0,0 +1,144 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.iterators;
+
+import static org.testng.Assert.fail;
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.reference.ReferenceSequenceFile;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIterator;
+import org.testng.Assert;
+
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+
+import org.testng.annotations.BeforeMethod;
+
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+
+
+/*
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following
+ * conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+/**
+ * @author aaron
+ * @version 1.0
+ * @date Apr 14, 2009
+ * <p/>
+ * Class BoundedReadIteratorUnitTest
+ * <p/>
+ * tests for the bounded read iterator.
+ */
+public class BoundedReadIteratorUnitTest extends BaseTest {
+
+    /** the file list and the fasta sequence */
+    private List<File> fl;
+    private ReferenceSequenceFile seq;
+
+    /**
+     * This function does the setup of our parser, before each method call.
+     * <p/>
+     * Called before every test case method.
+     */
+    @BeforeMethod
+    public void doForEachTest() throws FileNotFoundException {
+        fl = new ArrayList<File>();
+    }
+
+
+    /** Test out that we can shard the file and iterate over every read */
+    @Test
+    public void testBounding() {
+        logger.warn("Executing testBounding");
+        // total reads expected
+        final int expected = 20;
+        // bound by ten reads
+        BoundedReadIterator iter = new BoundedReadIterator(new testIterator(), expected);
+
+        int count = 0;
+        for (SAMRecord rec: iter) {
+            count++;
+        }
+
+        Assert.assertEquals(count, expected);
+    }
+}
+
+class testIterator implements GATKSAMIterator {
+    SAMFileHeader header;
+    testIterator() {
+        header = ArtificialSAMUtils.createArtificialSamHeader(1,1,2000);
+    }
+
+    public void close() {
+
+    }
+
+    public boolean hasNext() {
+        return true;
+    }
+
+    public SAMRecord next() {
+        return ArtificialSAMUtils.createArtificialRead(header,"blah",0,1,100);
+    }
+
+    public void remove() {
+    }
+
+    public Iterator<SAMRecord> iterator() {
+        return this;
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/iterators/GATKSAMIteratorAdapterUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/iterators/GATKSAMIteratorAdapterUnitTest.java
new file mode 100644
index 0000000..9f2589d
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/iterators/GATKSAMIteratorAdapterUnitTest.java
@@ -0,0 +1,179 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.iterators;
+
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.util.CloseableIterator;
+import org.broadinstitute.gatk.utils.BaseTest;
+import static org.testng.Assert.assertEquals;
+
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIterator;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIteratorAdapter;
+import org.testng.annotations.Test;
+
+import java.util.Iterator;
+
+/**
+ *
+ * User: aaron
+ * Date: May 13, 2009
+ * Time: 6:58:21 PM
+ *
+ * The Broad Institute
+ * SOFTWARE COPYRIGHT NOTICE AGREEMENT 
+ * This software and its documentation are copyright 2009 by the
+ * Broad Institute/Massachusetts Institute of Technology. All rights are reserved.
+ *
+ * This software is supplied without any warranty or guaranteed support whatsoever. Neither
+ * the Broad Institute nor MIT can be responsible for its use, misuse, or functionality.
+ *
+ */
+
+
+/**
+ * @author aaron
+ * @version 1.0
+ * @date May 13, 2009
+ * <p/>
+ * Class GATKSAMIteratorTest
+ * <p/>
+ * Tests the GATKSAMIteratorAdapter class.
+ */
+public class GATKSAMIteratorAdapterUnitTest extends BaseTest {
+
+    class MyTestIterator implements Iterator<SAMRecord> {
+
+        public int count = 0;
+
+        public MyTestIterator() {
+            count = 0;
+        }
+
+        public boolean hasNext() {
+            if (count < 100) {
+                ++count;
+                return true;
+            } else {
+                return false;
+            }
+        }
+
+        public SAMRecord next() {
+            return null;
+        }
+
+        public void remove() {
+            throw new UnsupportedOperationException("Unsupported");
+        }
+    }
+
+    class MyTestCloseableIterator implements CloseableIterator<SAMRecord> {
+        public int count = 0;
+
+        public MyTestCloseableIterator() {
+            count = 0;
+        }
+
+        public boolean hasNext() {
+            if (count < 100) {
+                ++count;
+                return true;
+            } else {
+                return false;
+            }
+        }
+
+        public SAMRecord next() {
+            return null;
+        }
+
+        public void remove() {
+            throw new UnsupportedOperationException("Unsupported");
+        }
+
+        public void close() {
+            count = -1;
+        }
+    }
+
+
+    @Test
+    public void testNormalIterator() {
+        final int COUNT = 100;
+        MyTestIterator it = new MyTestIterator();
+
+        GATKSAMIterator samIt = GATKSAMIteratorAdapter.adapt(it);
+        int countCheck = 0;
+        while (samIt.hasNext()) {
+            samIt.next();
+            ++countCheck;
+            //logger.warn("cnt = " + countCheck);
+        }
+
+        assertEquals(countCheck, COUNT);
+
+        assertEquals(countCheck, COUNT);
+    }
+
+    @Test
+    public void testCloseableIterator() {
+        final int COUNT = 100;
+
+        MyTestCloseableIterator it = new MyTestCloseableIterator();
+
+        GATKSAMIterator samIt = GATKSAMIteratorAdapter.adapt(it);
+
+        int countCheck = 0;
+        while (samIt.hasNext()) {
+            samIt.next();
+            ++countCheck;
+        }
+
+        assertEquals(countCheck, COUNT);
+    }
+
+    @Test
+    public void testCloseOnCloseableIterator() {
+        final int COUNT = 100;
+
+        MyTestCloseableIterator it = new MyTestCloseableIterator();
+        
+        GATKSAMIterator samIt = GATKSAMIteratorAdapter.adapt(it);
+
+
+        int countCheck = 0;
+        while (samIt.hasNext()) {
+            samIt.next();
+            ++countCheck;
+        }
+
+        assertEquals(countCheck, COUNT);
+
+        // check to see that the count get's set to -1
+        samIt.close();
+        assertEquals(it.count, -1);
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/iterators/MisencodedBaseQualityUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/iterators/MisencodedBaseQualityUnitTest.java
new file mode 100644
index 0000000..d743d00
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/iterators/MisencodedBaseQualityUnitTest.java
@@ -0,0 +1,99 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.iterators;
+
+
+import htsjdk.samtools.SAMFileHeader;
+import org.broadinstitute.gatk.engine.iterators.MisencodedBaseQualityReadTransformer;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.testng.Assert;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.Test;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * Basic unit test for misencoded quals
+ */
+public class MisencodedBaseQualityUnitTest extends BaseTest {
+
+    private static final String readBases = "AAAAAAAAAA";
+    private static final byte[] badQuals = { 59, 60, 62, 63, 64, 61, 62, 58, 57, 56 };
+    private static final byte[] goodQuals = { 60, 60, 60, 60, 60, 60, 60, 60, 60, 60 };
+    private static final byte[] fixedQuals = { 28, 29, 31, 32, 33, 30, 31, 27, 26, 25 };
+    private SAMFileHeader header;
+
+    @BeforeMethod
+    public void before() {
+        // reset the read counter so that we are deterministic
+        MisencodedBaseQualityReadTransformer.currentReadCounter = 0;
+        header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000);
+    }
+
+    private GATKSAMRecord createRead(final boolean useGoodBases) {
+        GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "foo", 0, 10, readBases.getBytes(),
+                                                                     useGoodBases ? Arrays.copyOf(goodQuals, goodQuals.length) :
+                                                                                    Arrays.copyOf(badQuals, badQuals.length));
+        read.setCigarString("10M");
+        return read;
+    }
+
+    @Test(enabled = true)
+    public void testGoodQuals() {
+        final List<GATKSAMRecord> reads = new ArrayList<GATKSAMRecord>(10000);
+        for ( int i = 0; i < 10000; i++ )
+            reads.add(createRead(true));
+
+        testEncoding(reads);
+    }
+
+    @Test(enabled = true, expectedExceptions = {UserException.class})
+    public void testBadQualsThrowsError() {
+        final List<GATKSAMRecord> reads = new ArrayList<GATKSAMRecord>(10000);
+        for ( int i = 0; i < 10000; i++ )
+            reads.add(createRead(false));
+
+        testEncoding(reads);
+    }
+
+    @Test(enabled = true)
+    public void testFixBadQuals() {
+        final GATKSAMRecord read = createRead(false);
+        final GATKSAMRecord fixedRead = MisencodedBaseQualityReadTransformer.fixMisencodedQuals(read);
+        for ( int i = 0; i < fixedQuals.length; i++ )
+            Assert.assertEquals(fixedQuals[i], fixedRead.getBaseQualities()[i]);
+    }
+
+    private void testEncoding(final List<GATKSAMRecord> reads) {
+        for ( final GATKSAMRecord read : reads )
+            MisencodedBaseQualityReadTransformer.checkForMisencodedQuals(read);
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/iterators/ReadFormattingIteratorUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/iterators/ReadFormattingIteratorUnitTest.java
new file mode 100644
index 0000000..a931bcb
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/iterators/ReadFormattingIteratorUnitTest.java
@@ -0,0 +1,52 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.iterators;
+
+import htsjdk.samtools.*;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIterator;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIteratorAdapter;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.util.Arrays;
+
+
+public class ReadFormattingIteratorUnitTest extends BaseTest {
+
+    @Test
+    public void testIteratorConsolidatesCigars() {
+        final Cigar unconsolidatedCigar = TextCigarCodec.decode("3M0M5M0M");
+        final SAMRecord unconsolidatedRead = ArtificialSAMUtils.createArtificialRead(unconsolidatedCigar);
+
+        final GATKSAMIterator readIterator = GATKSAMIteratorAdapter.adapt(Arrays.asList(unconsolidatedRead).iterator());
+        final ReadFormattingIterator formattingIterator = new ReadFormattingIterator(readIterator, false, (byte)-1);
+        final SAMRecord postIterationRead = formattingIterator.next();
+
+        Assert.assertEquals(postIterationRead.getCigarString(), "8M", "Cigar 3M0M5M0M not consolidated correctly by ReadFormattingIterator");
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/iterators/VerifyingSamIteratorUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/iterators/VerifyingSamIteratorUnitTest.java
new file mode 100644
index 0000000..4e3c205
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/iterators/VerifyingSamIteratorUnitTest.java
@@ -0,0 +1,129 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.iterators;
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.SAMSequenceRecord;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIteratorAdapter;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: mhanna
+ * Date: Mar 2, 2011
+ * Time: 9:48:10 PM
+ * To change this template use File | Settings | File Templates.
+ */
+public class VerifyingSamIteratorUnitTest {
+    private SAMFileHeader samFileHeader;
+
+    @BeforeClass
+    public void init() {
+        SAMSequenceDictionary sequenceDictionary = new SAMSequenceDictionary();
+        sequenceDictionary.addSequence(new SAMSequenceRecord("1",500));
+        sequenceDictionary.addSequence(new SAMSequenceRecord("2",500));
+
+        samFileHeader = new SAMFileHeader();
+        samFileHeader.setSequenceDictionary(sequenceDictionary);
+    }
+
+    @Test
+    public void testSortedReadsBasic() {
+        SAMRecord read1 = ArtificialSAMUtils.createArtificialRead(samFileHeader,"read1",getContig(0).getSequenceIndex(),1,10);
+        SAMRecord read2 = ArtificialSAMUtils.createArtificialRead(samFileHeader,"read2",getContig(0).getSequenceIndex(),2,10);
+        List<SAMRecord> reads = Arrays.asList(read1,read2);
+
+        VerifyingSamIterator iterator = new VerifyingSamIterator(GATKSAMIteratorAdapter.adapt(reads.iterator()));
+
+        Assert.assertTrue(iterator.hasNext(),"Insufficient reads");
+        Assert.assertSame(iterator.next(),read1,"Incorrect read in read 1 position");
+        Assert.assertTrue(iterator.hasNext(),"Insufficient reads");
+        Assert.assertSame(iterator.next(),read2,"Incorrect read in read 2 position");
+        Assert.assertFalse(iterator.hasNext(),"Too many reads in iterator");
+    }
+
+    @Test
+    public void testSortedReadsAcrossContigs() {
+        SAMRecord read1 = ArtificialSAMUtils.createArtificialRead(samFileHeader,"read1",getContig(0).getSequenceIndex(),2,10);
+        SAMRecord read2 = ArtificialSAMUtils.createArtificialRead(samFileHeader,"read2",getContig(1).getSequenceIndex(),1,10);
+        List<SAMRecord> reads = Arrays.asList(read1,read2);
+
+        VerifyingSamIterator iterator = new VerifyingSamIterator(GATKSAMIteratorAdapter.adapt(reads.iterator()));
+
+        Assert.assertTrue(iterator.hasNext(),"Insufficient reads");
+        Assert.assertSame(iterator.next(),read1,"Incorrect read in read 1 position");
+        Assert.assertTrue(iterator.hasNext(),"Insufficient reads");
+        Assert.assertSame(iterator.next(),read2,"Incorrect read in read 2 position");
+        Assert.assertFalse(iterator.hasNext(),"Too many reads in iterator");
+    }
+
+    @Test(expectedExceptions=UserException.MissortedBAM.class)
+    public void testImproperlySortedReads() {
+        SAMRecord read1 = ArtificialSAMUtils.createArtificialRead(samFileHeader,"read1",getContig(0).getSequenceIndex(),2,10);
+        SAMRecord read2 = ArtificialSAMUtils.createArtificialRead(samFileHeader,"read2",getContig(0).getSequenceIndex(),1,10);
+        List<SAMRecord> reads = Arrays.asList(read1,read2);
+
+        VerifyingSamIterator iterator = new VerifyingSamIterator(GATKSAMIteratorAdapter.adapt(reads.iterator()));
+
+        Assert.assertTrue(iterator.hasNext(),"Insufficient reads");
+        Assert.assertSame(iterator.next(),read1,"Incorrect read in read 1 position");
+        Assert.assertTrue(iterator.hasNext(),"Insufficient reads");
+
+        // Should trigger MissortedBAM exception.
+        iterator.next();
+    }
+
+    @Test(expectedExceptions=UserException.MalformedBAM.class)
+    public void testInvalidAlignment() {
+        // Create an invalid alignment state.
+        SAMRecord read1 = ArtificialSAMUtils.createArtificialRead(samFileHeader,"read1",getContig(0).getSequenceIndex(),1,10);
+        SAMRecord read2 = ArtificialSAMUtils.createArtificialRead(samFileHeader,"read1",getContig(0).getSequenceIndex(),2,10);
+        read1.setReferenceIndex(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX);
+        List<SAMRecord> reads = Arrays.asList(read1,read2);
+
+        VerifyingSamIterator iterator = new VerifyingSamIterator(GATKSAMIteratorAdapter.adapt(reads.iterator()));
+
+        Assert.assertTrue(iterator.hasNext(),"Insufficient reads");
+        Assert.assertSame(iterator.next(),read1,"Incorrect read in read 1 position");
+        Assert.assertTrue(iterator.hasNext(),"Insufficient reads");
+
+        // Should trigger MalformedBAM exception.
+        iterator.next();
+    }
+
+    private SAMSequenceRecord getContig(final int contigIndex) {
+        return samFileHeader.getSequence(contigIndex);            
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/phonehome/GATKRunReportUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/phonehome/GATKRunReportUnitTest.java
new file mode 100644
index 0000000..bcfb60c
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/phonehome/GATKRunReportUnitTest.java
@@ -0,0 +1,358 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.phonehome;
+
+import org.broadinstitute.gatk.engine.walkers.*;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.engine.arguments.GATKArgumentCollection;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.activeregion.ActiveRegion;
+import org.broadinstitute.gatk.utils.activeregion.ActivityProfileState;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.jets3t.service.S3Service;
+import org.jets3t.service.S3ServiceException;
+import org.jets3t.service.ServiceException;
+import org.jets3t.service.model.S3Object;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.FileInputStream;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Properties;
+
+public class GATKRunReportUnitTest extends BaseTest {
+    private final static boolean DEBUG = false;
+    private static final long S3_PUT_TIMEOUT_IN_MILLISECONDS_FOR_TESTING = 30 * 1000;
+    private static final String AWS_DOWNLOADER_CREDENTIALS_PROPERTIES_FILE = privateTestDir + "phonehome/awsDownloaderCredentials.properties";
+
+    private Walker walker;
+    private Exception exception;
+    private GenomeAnalysisEngine engine;
+    private String downloaderAccessKey;
+    private String downloaderSecretKey;
+
+    @BeforeClass
+    public void setup() throws Exception {
+        walker = new RunReportDummyReadWalker();
+        exception = new IllegalArgumentException("javaException");
+        engine = new GenomeAnalysisEngine();
+        engine.setArguments(new GATKArgumentCollection());
+
+        Properties awsProperties = new Properties();
+        awsProperties.load(new FileInputStream(AWS_DOWNLOADER_CREDENTIALS_PROPERTIES_FILE));
+        downloaderAccessKey = awsProperties.getProperty("accessKey");
+        downloaderSecretKey = awsProperties.getProperty("secretKey");
+    }
+
+    @Test(enabled = ! DEBUG)
+    public void testAWSKeysAreValid() {
+        // throws an exception if they aren't
+        GATKRunReport.checkAWSAreValid();
+    }
+
+    @Test(enabled = ! DEBUG)
+    public void testAccessKey() throws Exception {
+        testAWSKey(GATKRunReport.getAWSUploadAccessKey(), GATKRunReport.AWS_ACCESS_KEY_MD5);
+    }
+
+    @Test(enabled = ! DEBUG)
+    public void testSecretKey() throws Exception {
+        testAWSKey(GATKRunReport.getAWSUploadSecretKey(), GATKRunReport.AWS_SECRET_KEY_MD5);
+    }
+
+    private void testAWSKey(final String accessKey, final String expectedMD5) throws Exception {
+        Assert.assertNotNull(accessKey, "AccessKey should not be null");
+        final String actualmd5 = Utils.calcMD5(accessKey);
+        Assert.assertEquals(actualmd5, expectedMD5);
+    }
+
+    @DataProvider(name = "GATKReportCreationTest")
+    public Object[][] makeGATKReportCreationTest() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        final Walker readWalker = new RunReportDummyReadWalker();
+        final Walker lociWalker = new RunReportDummyLocusWalker();
+        final Walker rodWalker = new RunReportDummyRodWalker();
+        final Walker artWalker = new RunReportDummyActiveRegionWalker();
+
+        final Exception noException = null;
+        final Exception javaException = new IllegalArgumentException("javaException");
+        final Exception stingException = new ReviewedGATKException("GATKException");
+        final Exception userException = new UserException("userException");
+
+        final GenomeAnalysisEngine engine = new GenomeAnalysisEngine();
+        engine.setArguments(new GATKArgumentCollection());
+
+        for ( final Walker walker : Arrays.asList(readWalker, lociWalker, rodWalker, artWalker) ) {
+            for ( final Exception exception : Arrays.asList(noException,  javaException, stingException, userException) ) {
+                tests.add(new Object[]{walker, exception, engine});
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "GATKReportCreationTest")
+    public void testGATKReportCreationReadingAndWriting(final Walker walker, final Exception exception, final GenomeAnalysisEngine engine) throws Exception {
+        final GATKRunReport report = new GATKRunReport(walker, exception, engine, GATKRunReport.PhoneHomeOption.STDOUT);
+        final ByteArrayOutputStream captureStream = new ByteArrayOutputStream();
+        final boolean succeeded = report.postReportToStream(captureStream);
+        Assert.assertTrue(succeeded, "Failed to write report to stream");
+        Assert.assertFalse(report.exceptionOccurredDuringPost(), "Post succeeded but report says it failed");
+        Assert.assertNull(report.getErrorMessage(), "Post succeeded but there was an error message");
+        Assert.assertNull(report.getErrorThrown(), "Post succeeded but there was an error message");
+        final InputStream readStream = new ByteArrayInputStream(captureStream.toByteArray());
+
+        GATKRunReport deserialized = null;
+        try {
+            deserialized = GATKRunReport.deserializeReport(readStream);
+        } catch ( Exception e ) {
+            final String reportString = new String(captureStream.toByteArray());
+            Assert.fail("Failed to deserialize GATK report " + reportString + " with exception " + e);
+        }
+
+        if ( deserialized != null )
+            Assert.assertEquals(report, deserialized);
+    }
+
+    @DataProvider(name = "GATKAWSReportMode")
+    public Object[][] makeGATKAWSReportMode() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        for ( final GATKRunReport.AWSMode mode : GATKRunReport.AWSMode.values() ) {
+            tests.add(new Object[]{mode});
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    // Will fail with timeout if AWS time out isn't working
+    // Will fail with exception if AWS doesn't protect itself from errors
+    @Test(enabled = ! DEBUG, dataProvider = "GATKAWSReportMode", timeOut = S3_PUT_TIMEOUT_IN_MILLISECONDS_FOR_TESTING * 2)
+    public void testAWS(final GATKRunReport.AWSMode awsMode) {
+        logger.warn("Starting testAWS mode=" + awsMode);
+
+        // Use a shorter timeout than usual when we're testing GATKRunReport.AWSMode.TIMEOUT
+        final long thisTestS3Timeout = awsMode == GATKRunReport.AWSMode.TIMEOUT ? 30 * 1000 : S3_PUT_TIMEOUT_IN_MILLISECONDS_FOR_TESTING;
+        final GATKRunReport report = new GATKRunReport(walker, exception, engine, GATKRunReport.PhoneHomeOption.AWS, thisTestS3Timeout);
+        report.sendAWSToTestBucket();
+        report.setAwsMode(awsMode);
+        final S3Object s3Object = report.postReportToAWSS3();
+
+        if ( awsMode == GATKRunReport.AWSMode.NORMAL ) {
+            Assert.assertNotNull(s3Object, "Upload to AWS failed, s3Object was null. error was " + report.formatError());
+            Assert.assertFalse(report.exceptionOccurredDuringPost(), "The upload should have succeeded but the report says it didn't.  Error was " + report.formatError());
+            Assert.assertNull(report.getErrorMessage(), "Report succeeded but an error message was found");
+            Assert.assertNull(report.getErrorThrown(), "Report succeeded but an thrown error was found");
+            try {
+                final GATKRunReport deserialized = GATKRunReport.deserializeReport(downloaderAccessKey, downloaderSecretKey, report.getS3ReportBucket(), s3Object);
+                Assert.assertEquals(report, deserialized);
+                deleteFromS3(report);
+            } catch ( Exception e ) {
+                Assert.fail("Failed to read, deserialize, or delete GATK report " + s3Object.getName() + " with exception " + e);
+            }
+        } else {
+            Assert.assertNull(s3Object, "AWS upload should have failed for mode " + awsMode + " but got non-null s3 object back " + s3Object + " error was " + report.formatError());
+            Assert.assertTrue(report.exceptionOccurredDuringPost(), "S3 object was null but the report says that the upload succeeded");
+            Assert.assertNotNull(report.getErrorMessage(), "Report succeeded but an error message wasn't found");
+            if ( awsMode == GATKRunReport.AWSMode.FAIL_WITH_EXCEPTION )
+                Assert.assertNotNull(report.getErrorThrown());
+        }
+    }
+
+    private void deleteFromS3(final GATKRunReport report) throws Exception {
+        final S3Service s3Service = GATKRunReport.initializeAWSService(downloaderAccessKey, downloaderSecretKey);
+        // Retrieve the whole data object we created previously
+        s3Service.deleteObject(report.getS3ReportBucket(), report.getReportFileName());
+    }
+
+    @DataProvider(name = "PostReportByType")
+    public Object[][] makePostReportByType() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        for ( final GATKRunReport.PhoneHomeOption et : GATKRunReport.PhoneHomeOption.values() ) {
+            tests.add(new Object[]{et});
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = ! DEBUG, dataProvider = "PostReportByType", timeOut = S3_PUT_TIMEOUT_IN_MILLISECONDS_FOR_TESTING * 2)
+    public void testPostReportByType(final GATKRunReport.PhoneHomeOption type) {
+        final GATKRunReport report = new GATKRunReport(walker, exception, engine, GATKRunReport.PhoneHomeOption.AWS, S3_PUT_TIMEOUT_IN_MILLISECONDS_FOR_TESTING);
+        Assert.assertFalse(report.exceptionOccurredDuringPost(), "An exception occurred during posting the report");
+        final boolean succeeded = report.postReport(type);
+
+        if ( type == GATKRunReport.PhoneHomeOption.NO_ET )
+            Assert.assertFalse(succeeded, "NO_ET option shouldn't write a report");
+        else {
+            Assert.assertTrue(succeeded, "Any non NO_ET option should succeed in writing a report");
+
+            if ( type == GATKRunReport.PhoneHomeOption.STDOUT ) {
+                // nothing to do
+            } else {
+                // must have gone to AWS
+                try {
+                    Assert.assertTrue(report.wentToAWS(), "The report should have gone to AWS but the report says it wasn't");
+                    deleteFromS3(report);
+                } catch ( Exception e ) {
+                    Assert.fail("Failed delete GATK report " + report.getReportFileName() + " with exception " + e);
+                }
+            }
+        }
+    }
+
+    public interface S3Op {
+        public void apply() throws ServiceException;
+    }
+
+    // Will fail with timeout if AWS time out isn't working
+    // Will fail with exception if AWS doesn't protect itself from errors
+    @Test(timeOut = S3_PUT_TIMEOUT_IN_MILLISECONDS_FOR_TESTING * 2)
+    public void testAWSPublicKeyHasAccessControls() throws Exception {
+        final GATKRunReport report = new GATKRunReport(walker, exception, engine, GATKRunReport.PhoneHomeOption.AWS, S3_PUT_TIMEOUT_IN_MILLISECONDS_FOR_TESTING);
+        report.sendAWSToTestBucket();
+        final S3Object s3Object = report.postReportToAWSS3();
+        Assert.assertNotNull(s3Object, "Upload to AWS failed, s3Object was null. error was " + report.formatError());
+
+        // create a service with the public key, and make sure it cannot list or delete
+        final S3Service s3Service = GATKRunReport.initializeAWSService(GATKRunReport.getAWSUploadAccessKey(), GATKRunReport.getAWSUploadSecretKey());
+        assertOperationNotAllowed("listAllBuckets", new S3Op() {
+            @Override
+            public void apply() throws S3ServiceException {
+                s3Service.listAllBuckets();
+            }
+        });
+        assertOperationNotAllowed("listBucket", new S3Op() {
+            @Override
+            public void apply() throws S3ServiceException { s3Service.listObjects(report.getS3ReportBucket()); }
+        });
+        assertOperationNotAllowed("createBucket", new S3Op() {
+            @Override
+            public void apply() throws S3ServiceException { s3Service.createBucket("ShouldNotCreate"); }
+        });
+        assertOperationNotAllowed("deleteObject", new S3Op() {
+            @Override
+            public void apply() throws ServiceException { s3Service.deleteObject(report.getS3ReportBucket(), report.getReportFileName()); }
+        });
+    }
+
+    private void assertOperationNotAllowed(final String name, final S3Op op) {
+        try {
+            op.apply();
+            // only gets here if the operation was successful
+            Assert.fail("Operation " + name + " ran successfully but we expected to it fail");
+        } catch ( ServiceException e ) {
+            Assert.assertEquals(e.getErrorCode(), "AccessDenied");
+        }
+    }
+
+    class RunReportDummyReadWalker extends ReadWalker<Integer, Integer> {
+        @Override
+        public Integer map(ReferenceContext ref, GATKSAMRecord read, RefMetaDataTracker metaDataTracker) {
+            return 0;
+        }
+
+        @Override
+        public Integer reduceInit() {
+            return 0;
+        }
+
+        @Override
+        public Integer reduce(Integer value, Integer sum) {
+            return 0;
+        }
+    }
+
+    class RunReportDummyLocusWalker extends LocusWalker<Integer, Integer> {
+        @Override
+        public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
+            return 0;
+        }
+
+        @Override
+        public Integer reduceInit() {
+            return 0;
+        }
+
+        @Override
+        public Integer reduce(Integer value, Integer sum) {
+            return 0;
+        }
+    }
+
+    class RunReportDummyRodWalker extends RodWalker<Integer, Integer> {
+        @Override
+        public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
+            return 0;
+        }
+
+        @Override
+        public Integer reduceInit() {
+            return 0;
+        }
+
+        @Override
+        public Integer reduce(Integer value, Integer sum) {
+            return 0;
+        }
+    }
+
+    class RunReportDummyActiveRegionWalker extends ActiveRegionWalker<Integer, Integer> {
+        @Override
+        public ActivityProfileState isActive(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
+            return new ActivityProfileState(ref.getLocus(), 0.0);
+        }
+
+        @Override
+        public Integer map(ActiveRegion activeRegion, RefMetaDataTracker metaDataTracker) {
+            return 0;
+        }
+
+        @Override
+        public Integer reduceInit() {
+            return 0;
+        }
+
+        @Override
+        public Integer reduce(Integer value, Integer sum) {
+            return 0;
+        }
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/samples/PedReaderUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/samples/PedReaderUnitTest.java
new file mode 100644
index 0000000..b059f8d
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/samples/PedReaderUnitTest.java
@@ -0,0 +1,354 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.samples;
+
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.StringReader;
+import java.util.*;
+
+/**
+ * UnitTest for PedReader
+ *
+ * @author Mark DePristo
+ * @since 2011
+ */
+public class PedReaderUnitTest extends BaseTest {
+    private static Logger logger = Logger.getLogger(PedReaderUnitTest.class);
+
+    private class PedReaderTest extends TestDataProvider {
+        public String fileContents;
+        public List<Sample> expectedSamples;
+        EnumSet<PedReader.MissingPedField> missing;
+
+        private PedReaderTest(final String name, final List<Sample> expectedSamples, final String fileContents) {
+            super(PedReaderTest.class, name);
+            this.fileContents = fileContents;
+            this.expectedSamples = expectedSamples;
+        }
+    }
+
+//     Family ID
+//     Individual ID
+//     Paternal ID
+//     Maternal ID
+//     Sex (1=male; 2=female; other=unknown)
+//     Phenotype
+//
+//     -9 missing
+//     0 missing
+//     1 unaffected
+//     2 affected
+
+    @DataProvider(name = "readerTest")
+    public Object[][] createPEDFiles() {
+        new PedReaderTest("singleRecordMale",
+                Arrays.asList(new Sample("kid", "fam1", null, null, Gender.MALE, Affection.UNAFFECTED)),
+                "fam1 kid 0 0 1 1");
+
+        new PedReaderTest("singleRecordFemale",
+                Arrays.asList(new Sample("kid", "fam1", null, null, Gender.FEMALE, Affection.UNAFFECTED)),
+                "fam1 kid 0 0 2 1");
+
+        new PedReaderTest("singleRecordMissingGender",
+                Arrays.asList(new Sample("kid", "fam1", null, null, Gender.UNKNOWN, Affection.UNKNOWN)),
+                "fam1 kid 0 0 0 0");
+
+        // Affection
+        new PedReaderTest("singleRecordAffected",
+                Arrays.asList(new Sample("kid", "fam1", null, null, Gender.MALE, Affection.AFFECTED)),
+                "fam1 kid 0 0 1 2");
+
+        new PedReaderTest("singleRecordUnaffected",
+                Arrays.asList(new Sample("kid", "fam1", null, null, Gender.MALE, Affection.UNAFFECTED)),
+                "fam1 kid 0 0 1 1");
+
+        new PedReaderTest("singleRecordMissingAffection-9",
+                Arrays.asList(new Sample("kid", "fam1", null, null, Gender.MALE, Affection.UNKNOWN)),
+                "fam1 kid 0 0 1 -9");
+
+        new PedReaderTest("singleRecordMissingAffection0",
+                Arrays.asList(new Sample("kid", "fam1", null, null, Gender.MALE, Affection.UNKNOWN)),
+                "fam1 kid 0 0 1 0");
+
+        new PedReaderTest("multipleUnrelated",
+                Arrays.asList(
+                        new Sample("s1", "fam1", null, null, Gender.MALE,   Affection.UNAFFECTED),
+                        new Sample("s2", "fam2", null, null, Gender.FEMALE, Affection.AFFECTED)),
+                String.format("%s%n%s",
+                        "fam1 s1 0 0 1 1",
+                        "fam2 s2 0 0 2 2"));
+
+        new PedReaderTest("multipleUnrelatedExtraLine",
+                Arrays.asList(
+                        new Sample("s1", "fam1", null, null, Gender.MALE,   Affection.UNAFFECTED),
+                        new Sample("s2", "fam2", null, null, Gender.FEMALE, Affection.AFFECTED)),
+                String.format("%s%n%s%n  %n", // note extra newlines and whitespace
+                        "fam1 s1 0 0 1 1",
+                        "fam2 s2 0 0 2 2"));
+
+        new PedReaderTest("explicitTrio",
+                Arrays.asList(
+                        new Sample("kid", "fam1", "dad", "mom", Gender.MALE,   Affection.AFFECTED),
+                        new Sample("dad", "fam1", null, null,   Gender.MALE,   Affection.UNAFFECTED),
+                        new Sample("mom", "fam1", null, null,   Gender.FEMALE, Affection.AFFECTED)),
+                String.format("%s%n%s%n%s",
+                        "fam1 kid dad mom 1 2",
+                        "fam1 dad 0   0   1 1",
+                        "fam1 mom 0   0   2 2"));
+
+        new PedReaderTest("implicitTrio",
+                Arrays.asList(
+                        new Sample("kid", "fam1", "dad", "mom", Gender.MALE,   Affection.AFFECTED),
+                        new Sample("dad", "fam1", null, null,   Gender.MALE,   Affection.UNKNOWN),
+                        new Sample("mom", "fam1", null, null,   Gender.FEMALE, Affection.UNKNOWN)),
+                "fam1 kid dad mom 1 2");
+
+        new PedReaderTest("partialTrio",
+                Arrays.asList(
+                        new Sample("kid", "fam1", "dad", "mom", Gender.MALE,   Affection.AFFECTED),
+                        new Sample("dad", "fam1", null, null,   Gender.MALE,   Affection.UNAFFECTED),
+                        new Sample("mom", "fam1", null, null,   Gender.FEMALE, Affection.UNKNOWN)),
+                String.format("%s%n%s",
+                        "fam1 kid dad mom 1 2",
+                        "fam1 dad 0   0   1 1"));
+
+        new PedReaderTest("bigPedigree",
+                Arrays.asList(
+                        new Sample("kid", "fam1", "dad",       "mom",      Gender.MALE,   Affection.AFFECTED),
+                        new Sample("dad", "fam1", "granddad1", "grandma1", Gender.MALE,   Affection.UNAFFECTED),
+                        new Sample("granddad1", "fam1", null, null,        Gender.MALE,   Affection.UNKNOWN),
+                        new Sample("grandma1",  "fam1", null, null,        Gender.FEMALE,   Affection.UNKNOWN),
+                        new Sample("mom", "fam1", "granddad2", "grandma2", Gender.FEMALE, Affection.AFFECTED),
+                        new Sample("granddad2", "fam1", null, null,        Gender.MALE,   Affection.UNKNOWN),
+                        new Sample("grandma2",  "fam1", null, null,        Gender.FEMALE,   Affection.UNKNOWN)),
+                String.format("%s%n%s%n%s",
+                        "fam1 kid dad       mom      1 2",
+                        "fam1 dad granddad1 grandma1 1 1",
+                        "fam1 mom granddad2 grandma2 2 2"));
+
+        // Quantitative trait
+        new PedReaderTest("OtherPhenotype",
+                Arrays.asList(
+                        new Sample("s1", "fam1", null, null, Gender.MALE,   Affection.OTHER, "1"),
+                        new Sample("s2", "fam2", null, null, Gender.FEMALE, Affection.OTHER, "10.0")),
+                String.format("%s%n%s",
+                        "fam1 s1 0 0 1 1",
+                        "fam2 s2 0 0 2 10.0"));
+
+        new PedReaderTest("OtherPhenotypeWithMissing",
+                Arrays.asList(
+                        new Sample("s1", "fam1", null, null, Gender.MALE,   Affection.UNKNOWN, Sample.UNSET_QT),
+                        new Sample("s2", "fam2", null, null, Gender.FEMALE, Affection.OTHER, "10.0")),
+                String.format("%s%n%s",
+                        "fam1 s1 0 0 1 -9",
+                        "fam2 s2 0 0 2 10.0"));
+
+        new PedReaderTest("OtherPhenotypeOnlyInts",
+                Arrays.asList(
+                        new Sample("s1", "fam1", null, null, Gender.MALE,   Affection.OTHER, "1"),
+                        new Sample("s2", "fam2", null, null, Gender.FEMALE, Affection.OTHER, "10")),
+                String.format("%s%n%s",
+                        "fam1 s1 0 0 1 1",
+                        "fam2 s2 0 0 2 10"));
+
+        return PedReaderTest.getTests(PedReaderTest.class);
+    }
+
+    private static final void runTest(PedReaderTest test, String myFileContents, EnumSet<PedReader.MissingPedField> missing) {
+        logger.warn("Test " + test);
+        PedReader reader = new PedReader();
+        SampleDB sampleDB = new SampleDB();
+        List<Sample> readSamples = reader.parse(myFileContents, missing, sampleDB);
+        Assert.assertEquals(new HashSet<Sample>(test.expectedSamples), new HashSet<Sample>(readSamples));
+    }
+
+    @Test(enabled = true, dataProvider = "readerTest")
+    public void testPedReader(PedReaderTest test) {
+        runTest(test, test.fileContents, EnumSet.noneOf(PedReader.MissingPedField.class));
+    }
+
+    @Test(enabled = true, dataProvider = "readerTest")
+    public void testPedReaderWithComments(PedReaderTest test) {
+        runTest(test, String.format("#comment%n%s", test.fileContents), EnumSet.noneOf(PedReader.MissingPedField.class));
+    }
+
+    @Test(enabled = true, dataProvider = "readerTest")
+    public void testPedReaderWithSemicolons(PedReaderTest test) {
+        runTest(test,
+                test.fileContents.replace(String.format("%n"), ";"),
+                EnumSet.noneOf(PedReader.MissingPedField.class));
+    }
+
+    // -----------------------------------------------------------------
+    // missing format field tests
+    // -----------------------------------------------------------------
+
+    private class PedReaderTestMissing extends TestDataProvider {
+        public EnumSet<PedReader.MissingPedField> missingDesc;
+        public EnumSet<PedReader.Field> missingFields;
+        public final String fileContents;
+        public Sample expected;
+
+
+        private PedReaderTestMissing(final String name, final String fileContents,
+                                     EnumSet<PedReader.MissingPedField> missingDesc,
+                                     EnumSet<PedReader.Field> missingFields,
+                                     final Sample expected) {
+            super(PedReaderTestMissing.class, name);
+            this.fileContents = fileContents;
+            this.missingDesc = missingDesc;
+            this.missingFields = missingFields;
+            this.expected = expected;
+        }
+    }
+
+    @DataProvider(name = "readerTestMissing")
+    public Object[][] createPEDFilesWithMissing() {
+        new PedReaderTestMissing("missingFam",
+                "fam1 kid dad mom 1 2",
+                EnumSet.of(PedReader.MissingPedField.NO_FAMILY_ID),
+                EnumSet.of(PedReader.Field.FAMILY_ID),
+                new Sample("kid", null, "dad", "mom", Gender.MALE, Affection.AFFECTED));
+
+        new PedReaderTestMissing("missingParents",
+                "fam1 kid dad mom 1 2",
+                EnumSet.of(PedReader.MissingPedField.NO_PARENTS),
+                EnumSet.of(PedReader.Field.PATERNAL_ID, PedReader.Field.MATERNAL_ID),
+                new Sample("kid", "fam1", null, null, Gender.MALE, Affection.AFFECTED));
+
+        new PedReaderTestMissing("missingSex",
+                "fam1 kid dad mom 1 2",
+                EnumSet.of(PedReader.MissingPedField.NO_SEX),
+                EnumSet.of(PedReader.Field.GENDER),
+                new Sample("kid", "fam1", "dad", "mom", Gender.UNKNOWN, Affection.AFFECTED));
+
+        new PedReaderTestMissing("missingPhenotype",
+                "fam1 kid dad mom 1 2",
+                EnumSet.of(PedReader.MissingPedField.NO_PHENOTYPE),
+                EnumSet.of(PedReader.Field.PHENOTYPE),
+                new Sample("kid", "fam1", "dad", "mom", Gender.MALE, Affection.UNKNOWN));
+
+        new PedReaderTestMissing("missingEverythingButGender",
+                "fam1 kid dad mom 1 2",
+                EnumSet.of(PedReader.MissingPedField.NO_PHENOTYPE, PedReader.MissingPedField.NO_PARENTS, PedReader.MissingPedField.NO_FAMILY_ID),
+                EnumSet.of(PedReader.Field.FAMILY_ID, PedReader.Field.PATERNAL_ID, PedReader.Field.MATERNAL_ID, PedReader.Field.PHENOTYPE),
+                new Sample("kid", null, null, null, Gender.MALE, Affection.UNKNOWN));
+
+
+        return PedReaderTestMissing.getTests(PedReaderTestMissing.class);
+    }
+
+    @Test(enabled = true, dataProvider = "readerTestMissing")
+    public void testPedReaderWithMissing(PedReaderTestMissing test) {
+        final String contents = sliceContents(test.missingFields, test.fileContents);
+        logger.warn("Test " + test);
+        PedReader reader = new PedReader();
+        SampleDB sampleDB = new SampleDB();
+        reader.parse(new StringReader(contents), test.missingDesc, sampleDB);
+        final Sample missingSample = sampleDB.getSample("kid");
+        Assert.assertEquals(test.expected, missingSample, "Missing field value not expected value for " + test);
+    }
+
+    private final static String sliceContents(EnumSet<PedReader.Field> missingFieldsSet, String full) {
+        List<String> parts = new ArrayList<String>(Arrays.asList(full.split("\\s+")));
+        final List<PedReader.Field> missingFields = new ArrayList<PedReader.Field>(missingFieldsSet);
+        Collections.reverse(missingFields);
+        for ( PedReader.Field field : missingFields )
+            parts.remove(field.ordinal());
+        return Utils.join("\t", parts);
+    }
+
+    // -----------------------------------------------------------------
+    // parsing tags
+    // -----------------------------------------------------------------
+
+    private class PedReaderTestTagParsing extends TestDataProvider {
+        public EnumSet<PedReader.MissingPedField> expected;
+        public final List<String> tags;
+
+        private PedReaderTestTagParsing(final List<String> tags, EnumSet<PedReader.MissingPedField> missingDesc) {
+            super(PedReaderTestTagParsing.class);
+            this.tags = tags;
+            this.expected = missingDesc;
+        }
+    }
+
+    @DataProvider(name = "readerTestTagParsing")
+    public Object[][] createReaderTestTagParsing() {
+        new PedReaderTestTagParsing(
+                Collections.<String>emptyList(),
+                EnumSet.noneOf(PedReader.MissingPedField.class));
+
+        new PedReaderTestTagParsing(
+                Arrays.asList("NO_FAMILY_ID"),
+                EnumSet.of(PedReader.MissingPedField.NO_FAMILY_ID));
+
+        new PedReaderTestTagParsing(
+                Arrays.asList("NO_PARENTS"),
+                EnumSet.of(PedReader.MissingPedField.NO_PARENTS));
+
+        new PedReaderTestTagParsing(
+                Arrays.asList("NO_PHENOTYPE"),
+                EnumSet.of(PedReader.MissingPedField.NO_PHENOTYPE));
+
+        new PedReaderTestTagParsing(
+                Arrays.asList("NO_SEX"),
+                EnumSet.of(PedReader.MissingPedField.NO_SEX));
+
+        new PedReaderTestTagParsing(
+                Arrays.asList("NO_SEX", "NO_PHENOTYPE"),
+                EnumSet.of(PedReader.MissingPedField.NO_SEX, PedReader.MissingPedField.NO_PHENOTYPE));
+
+        new PedReaderTestTagParsing(
+                Arrays.asList("NO_SEX", "NO_PHENOTYPE", "NO_PARENTS"),
+                EnumSet.of(PedReader.MissingPedField.NO_SEX, PedReader.MissingPedField.NO_PHENOTYPE, PedReader.MissingPedField.NO_PARENTS));
+
+        return PedReaderTestTagParsing.getTests(PedReaderTestTagParsing.class);
+    }
+
+    @Test(enabled = true, dataProvider = "readerTestTagParsing")
+    public void testPedReaderTagParsing(PedReaderTestTagParsing test) {
+        EnumSet<PedReader.MissingPedField> parsed = PedReader.parseMissingFieldTags("test", test.tags);
+        Assert.assertEquals(test.expected, parsed, "Failed to properly parse tags " + test.tags);
+    }
+
+    @Test(enabled = true, expectedExceptions = UserException.class)
+    public void testPedReaderTagParsing1() {
+        EnumSet<PedReader.MissingPedField> parsed = PedReader.parseMissingFieldTags("test", Arrays.asList("XXX"));
+    }
+
+    @Test(enabled = true, expectedExceptions = UserException.class)
+    public void testPedReaderTagParsing2() {
+        EnumSet<PedReader.MissingPedField> parsed = PedReader.parseMissingFieldTags("test", Arrays.asList("NO_SEX", "XXX"));
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/samples/SampleDBUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/samples/SampleDBUnitTest.java
new file mode 100644
index 0000000..d6d1ed2
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/samples/SampleDBUnitTest.java
@@ -0,0 +1,272 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.samples;
+
+import htsjdk.samtools.SAMFileHeader;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.testng.Assert;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.util.*;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: brett
+ * Date: Sep 9, 2010
+ * Time: 8:21:00 AM
+ */
+public class SampleDBUnitTest extends BaseTest {
+    private static SampleDBBuilder builder;
+    // all the test sample files are located here
+    private File testPED = new File(privateTestDir +  "testtrio.ped");
+
+    private static final Set<Sample> testPEDSamples = new HashSet<Sample>(Arrays.asList(
+            new Sample("kid", "fam1", "dad", "mom", Gender.MALE,   Affection.AFFECTED),
+            new Sample("dad", "fam1", null, null,   Gender.MALE,   Affection.UNAFFECTED),
+            new Sample("mom", "fam1", null, null,   Gender.FEMALE, Affection.AFFECTED)));
+
+    private static final Set<Sample> testPEDFamilyF2 = new HashSet<Sample>(Arrays.asList(
+            new Sample("s2", "fam2", "d2", "m2", Gender.FEMALE, Affection.AFFECTED),
+            new Sample("d2", "fam2", null, null, Gender.MALE, Affection.UNKNOWN),
+            new Sample("m2", "fam2", null, null, Gender.FEMALE, Affection.UNKNOWN)
+            ));
+
+    private static final Set<Sample> testPEDFamilyF3 = new HashSet<Sample>(Arrays.asList(
+            new Sample("s1", "fam3", "d1", "m1", Gender.FEMALE, Affection.AFFECTED),
+            new Sample("d1", "fam3", null, null, Gender.MALE, Affection.UNKNOWN),
+            new Sample("m1", "fam3", null, null, Gender.FEMALE, Affection.UNKNOWN)
+            ));
+
+    private static final Set<Sample> testSAMSamples = new HashSet<Sample>(Arrays.asList(
+            new Sample("kid", null, null, null, Gender.UNKNOWN,   Affection.UNKNOWN),
+            new Sample("mom", null, null, null, Gender.UNKNOWN,   Affection.UNKNOWN),
+            new Sample("dad", null, null, null, Gender.UNKNOWN,   Affection.UNKNOWN)));
+
+    private static final HashMap<String, Set<Sample>> testGetFamilies = new HashMap<String,Set<Sample>>();
+    static {
+        testGetFamilies.put("fam1", testPEDSamples);
+        testGetFamilies.put("fam2", testPEDFamilyF2);
+        testGetFamilies.put("fam3", testPEDFamilyF3);
+    }
+
+    private static final Set<Sample> testKidsWithParentsFamilies2 = new HashSet<Sample>(Arrays.asList(
+            new Sample("kid", "fam1", "dad", "mom", Gender.MALE,   Affection.AFFECTED),
+            new Sample("kid3", "fam5", "dad2", "mom2", Gender.MALE,   Affection.AFFECTED),
+            new Sample("kid2", "fam5", "dad2", "mom2", Gender.MALE,   Affection.AFFECTED)));
+
+    private static final HashSet<String> testGetPartialFamiliesIds =   new HashSet<String>(Arrays.asList("kid","s1"));
+    private static final HashMap<String, Set<Sample>> testGetPartialFamilies = new HashMap<String,Set<Sample>>();
+    static {
+        testGetPartialFamilies.put("fam1", new HashSet<Sample>(Arrays.asList(new Sample("kid", "fam1", "dad", "mom", Gender.MALE,   Affection.AFFECTED))));
+        testGetPartialFamilies.put("fam3", new HashSet<Sample>(Arrays.asList(new Sample("s1", "fam3", "d1", "m1", Gender.FEMALE, Affection.AFFECTED))));
+    }
+
+    private static final String testPEDString =
+            String.format("%s%n%s%n%s",
+                    "fam1 kid dad mom 1 2",
+                    "fam1 dad 0   0   1 1",
+                    "fam1 mom 0   0   2 2");
+
+    private static final String testPEDMultipleFamilies =
+            String.format("%s%n%s%n%s%n%s%n%s",
+                    "fam1 kid dad mom 1 2",
+                    "fam1 dad 0   0   1 1",
+                    "fam1 mom 0   0   2 2",
+                    "fam3 s1  d1  m1  2 2",
+                    "fam2 s2  d2  m2  2 2");
+
+    private static final String testPEDMultipleFamilies2 =
+            String.format("%s%n%s%n%s%n%s%n%s%n%s%n%s%n%s%n%s",
+                    "fam1 kid dad mom 1 2",
+                    "fam1 dad 0   0   1 1",
+                    "fam1 mom 0   0   2 2",
+                    "fam4 kid4 dad4 0 1 2",
+                    "fam4 dad4 0   0   1 1",
+                    "fam5 kid2 dad2 mom2 1 2",
+                    "fam5 kid3 dad2 mom2 1 2",
+                    "fam5 dad2 0   0   1 1",
+                    "fam5 mom2 0   0   2 2");
+
+    private static final String testPEDStringInconsistentGender =
+            "fam1 kid 0   0   2 2";
+
+    private static final String testPEDStringConsistent =
+            "fam1 kid dad   mom   1 2";
+
+    private static final Set<Sample> testPEDSamplesAsSet =
+            new HashSet<Sample>(testPEDSamples);
+
+
+    @BeforeMethod
+    public void before() {
+        builder = new SampleDBBuilder(PedigreeValidationType.STRICT);
+    }
+
+    @Test()
+    public void loadPEDFile() {
+        final SampleDB db = builder.addSamplesFromPedigreeFiles(Arrays.asList(testPED))
+                                   .getFinalSampleDB();
+        Assert.assertEquals(testPEDSamplesAsSet, db.getSamples());
+    }
+
+    @Test()
+    public void loadPEDString() {
+        final SampleDB db = builder.addSamplesFromPedigreeStrings(Arrays.asList(testPEDString))
+                             .getFinalSampleDB();
+        Assert.assertEquals(testPEDSamplesAsSet, db.getSamples());
+    }
+
+    private static final void addSAMHeader() {
+        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 10);
+        ArtificialSAMUtils.createEnumeratedReadGroups(header, Arrays.asList("1", "2", "3"),
+                Arrays.asList("kid", "mom", "dad"));
+        builder.addSamplesFromSAMHeader(header);
+    }
+
+    @Test()
+    public void loadSAMHeader() {
+        addSAMHeader();
+        final SampleDB db = builder.getFinalSampleDB();
+        Assert.assertEquals(testSAMSamples, db.getSamples());
+    }
+
+    @Test()
+    public void loadSAMHeaderPlusPED() {
+        addSAMHeader();
+        final SampleDB db = builder.addSamplesFromPedigreeFiles(Arrays.asList(testPED))
+                                   .getFinalSampleDB();
+        Assert.assertEquals(testPEDSamples, db.getSamples());
+    }
+
+    @Test()
+    public void loadDuplicateData() {
+        final SampleDB db = builder.addSamplesFromPedigreeFiles(Arrays.asList(testPED))
+                                   .addSamplesFromPedigreeFiles(Arrays.asList(testPED))
+                                   .getFinalSampleDB();
+        Assert.assertEquals(testPEDSamples, db.getSamples());
+    }
+
+    @Test(expectedExceptions = UserException.class)
+    public void loadNonExistentFile() {
+        final SampleDB db = builder.addSamplesFromPedigreeFiles(Arrays.asList(new File("non-existence-file.txt")))
+                           .getFinalSampleDB();
+        Assert.assertEquals(testSAMSamples, db.getSamples());
+    }
+
+    @Test(expectedExceptions = UserException.class)
+    public void loadInconsistentData() {
+        builder = new SampleDBBuilder(PedigreeValidationType.STRICT)
+                      .addSamplesFromPedigreeFiles(Arrays.asList(testPED))
+                      .addSamplesFromPedigreeStrings(Arrays.asList(testPEDStringInconsistentGender));
+        builder.getFinalSampleDB();
+    }
+
+    @Test
+    public void loadConsistentData() {
+        // build a temporary DB and get the resulting sample to use for test result comparison
+        final Sample baseKidSample = new SampleDBBuilder(PedigreeValidationType.STRICT)
+                                        .addSamplesFromPedigreeStrings(Arrays.asList(testPEDStringConsistent))
+                                        .getFinalSampleDB()
+                                        .getSample("kid");
+
+        // build a sample DB and then merge in the consistent test string
+        final SampleDB finalDB = new SampleDBBuilder(PedigreeValidationType.STRICT)
+                                     .addSamplesFromPedigreeFiles(Arrays.asList(testPED))
+                                     .addSamplesFromPedigreeStrings(Arrays.asList(testPEDStringConsistent))
+                                     .getFinalSampleDB();
+
+        Assert.assertEquals(finalDB.getSamples().size(), 3);
+        Assert.assertTrue(finalDB.getSample("kid").equals(baseKidSample));
+    }
+
+    @Test(expectedExceptions = UserException.class)
+    public void sampleInSAMHeaderNotInSamplesDB() {
+        addSAMHeader();
+        builder.addSamplesFromPedigreeStrings(Arrays.asList(testPEDStringInconsistentGender))
+               .getFinalSampleDB();
+    }
+
+    @Test()
+    public void getFamilyIDs() {
+        final SampleDB db = builder.addSamplesFromPedigreeStrings(Arrays.asList(testPEDMultipleFamilies))
+                                   .getFinalSampleDB();
+        Assert.assertEquals(db.getFamilyIDs(), new TreeSet<String>(Arrays.asList("fam1", "fam2", "fam3")));
+    }
+
+    @Test()
+    public void getFamily() {
+        final SampleDB db = builder.addSamplesFromPedigreeStrings(Arrays.asList(testPEDMultipleFamilies))
+                                   .getFinalSampleDB();
+        Assert.assertEquals(db.getFamily("fam1"), testPEDSamplesAsSet);
+    }
+
+    @Test()
+    public void getFamilies(){
+        final SampleDB db = builder.addSamplesFromPedigreeStrings(Arrays.asList(testPEDMultipleFamilies))
+                                   .getFinalSampleDB();
+        Assert.assertEquals(db.getFamilies(),testGetFamilies);
+        Assert.assertEquals(db.getFamilies(null),testGetFamilies);
+        Assert.assertEquals(db.getFamilies(testGetPartialFamiliesIds),testGetPartialFamilies);
+    }
+
+    @Test()
+    public void testGetChildrenWithParents()
+    {
+        final SampleDB db = builder.addSamplesFromPedigreeStrings(Arrays.asList(testPEDMultipleFamilies2))
+                                   .getFinalSampleDB();
+        Assert.assertEquals(db.getChildrenWithParents(), testKidsWithParentsFamilies2);
+        Assert.assertEquals(db.getChildrenWithParents(false), testKidsWithParentsFamilies2);
+        Assert.assertEquals(db.getChildrenWithParents(true), new HashSet<Sample>(Arrays.asList(new Sample("kid", "fam1", "dad", "mom", Gender.MALE,   Affection.AFFECTED))));
+    }
+
+    @Test()
+    public void testGetFounderIds(){
+        final SampleDB db = builder.addSamplesFromPedigreeStrings(Arrays.asList(testPEDMultipleFamilies2))
+                                   .getFinalSampleDB();
+        Assert.assertEquals(db.getFounderIds(), new HashSet<String>(Arrays.asList("dad","mom","dad2","mom2","dad4")));
+    }
+
+    @Test()
+    public void loadFamilyIDs() {
+        final SampleDB db = builder.addSamplesFromPedigreeStrings(Arrays.asList(testPEDMultipleFamilies))
+                                   .getFinalSampleDB();
+        final Map<String, Set<Sample>> families = db.getFamilies();
+        Assert.assertEquals(families.size(), 3);
+        Assert.assertEquals(families.keySet(), new TreeSet<String>(Arrays.asList("fam1", "fam2", "fam3")));
+
+        for ( final String famID : families.keySet() ) {
+            final Set<Sample> fam = families.get(famID);
+            Assert.assertEquals(fam.size(), 3);
+            for ( final Sample sample : fam ) {
+                Assert.assertEquals(sample.getFamilyID(), famID);
+            }
+        }
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/samples/SampleUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/samples/SampleUnitTest.java
new file mode 100644
index 0000000..4e63d5d
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/samples/SampleUnitTest.java
@@ -0,0 +1,89 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.samples;
+
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+/**
+ *
+ */
+public class SampleUnitTest extends BaseTest {
+    SampleDB db;
+    static Sample fam1A, fam1B, fam1C;
+    static Sample s1, s2;
+    static Sample trait1, trait2, trait3, trait4, trait5;
+
+    @BeforeClass
+    public void init() {
+        db = new SampleDB();
+
+        fam1A = new Sample("1A", db, "fam1", "1B", "1C", Gender.UNKNOWN);
+        fam1B = new Sample("1B", db, "fam1", null, null, Gender.MALE);
+        fam1C = new Sample("1C", db, "fam1", null, null, Gender.FEMALE);
+
+        s1 = new Sample("s1", db);
+        s2 = new Sample("s2", db);
+
+        trait1 = new Sample("t1", db, Affection.AFFECTED, Sample.UNSET_QT);
+        trait2 = new Sample("t2", db, Affection.UNAFFECTED, Sample.UNSET_QT);
+        trait3 = new Sample("t3", db, Affection.UNKNOWN, Sample.UNSET_QT);
+        trait4 = new Sample("t4", db, Affection.OTHER, "1.0");
+        trait5 = new Sample("t4", db, Affection.OTHER, "CEU");
+    }
+
+    /**
+     * Now basic getters
+     */
+    @Test()
+    public void normalGettersTest() {
+        Assert.assertEquals("1A", fam1A.getID());
+        Assert.assertEquals("fam1", fam1A.getFamilyID());
+        Assert.assertEquals("1B", fam1A.getPaternalID());
+        Assert.assertEquals("1C", fam1A.getMaternalID());
+        Assert.assertEquals(null, fam1B.getPaternalID());
+        Assert.assertEquals(null, fam1B.getMaternalID());
+
+        Assert.assertEquals(Affection.AFFECTED, trait1.getAffection());
+        Assert.assertEquals(Sample.UNSET_QT, trait1.getOtherPhenotype());
+        Assert.assertEquals(Affection.UNAFFECTED, trait2.getAffection());
+        Assert.assertEquals(Sample.UNSET_QT, trait2.getOtherPhenotype());
+        Assert.assertEquals(Affection.UNKNOWN, trait3.getAffection());
+        Assert.assertEquals(Sample.UNSET_QT, trait3.getOtherPhenotype());
+        Assert.assertEquals(Affection.OTHER, trait4.getAffection());
+        Assert.assertEquals("1.0", trait4.getOtherPhenotype());
+        Assert.assertEquals("CEU", trait5.getOtherPhenotype());
+    }
+
+    @Test()
+    public void testGenders() {
+        Assert.assertTrue(fam1A.getGender() == Gender.UNKNOWN);
+        Assert.assertTrue(fam1B.getGender() == Gender.MALE);
+        Assert.assertTrue(fam1C.getGender() == Gender.FEMALE);
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/traversals/DummyActiveRegionWalker.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/traversals/DummyActiveRegionWalker.java
new file mode 100644
index 0000000..e17ff3e
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/traversals/DummyActiveRegionWalker.java
@@ -0,0 +1,116 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.traversals;
+
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.engine.walkers.ActiveRegionWalker;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
+import org.broadinstitute.gatk.utils.activeregion.ActiveRegion;
+import org.broadinstitute.gatk.utils.activeregion.ActiveRegionReadState;
+import org.broadinstitute.gatk.utils.activeregion.ActivityProfileState;
+
+import java.util.*;
+
+/**
+ * ActiveRegionWalker for unit testing
+ *
+ * User: depristo
+ * Date: 1/15/13
+ * Time: 1:28 PM
+ */
+class DummyActiveRegionWalker extends ActiveRegionWalker<Integer, Integer> {
+    private final double prob;
+    private EnumSet<ActiveRegionReadState> states = super.desiredReadStates();
+    private GenomeLocSortedSet activeRegions = null;
+
+    protected List<GenomeLoc> isActiveCalls = new ArrayList<GenomeLoc>();
+    protected Map<GenomeLoc, ActiveRegion> mappedActiveRegions = new LinkedHashMap<GenomeLoc, ActiveRegion>();
+    private boolean declareHavingPresetRegions = false;
+
+    public DummyActiveRegionWalker() {
+        this(1.0);
+    }
+
+    public DummyActiveRegionWalker(double constProb) {
+        this.prob = constProb;
+    }
+
+    public DummyActiveRegionWalker(GenomeLocSortedSet activeRegions, EnumSet<ActiveRegionReadState> wantStates, final boolean declareHavingPresetRegions) {
+        this(activeRegions, declareHavingPresetRegions);
+        this.states = wantStates;
+    }
+
+    public DummyActiveRegionWalker(GenomeLocSortedSet activeRegions, final boolean declareHavingPresetRegions) {
+        this(1.0);
+        this.activeRegions = activeRegions;
+        this.declareHavingPresetRegions = declareHavingPresetRegions;
+    }
+
+    public void setStates(EnumSet<ActiveRegionReadState> states) {
+        this.states = states;
+    }
+
+    @Override
+    public boolean hasPresetActiveRegions() {
+        return declareHavingPresetRegions;
+    }
+
+    @Override
+    public GenomeLocSortedSet getPresetActiveRegions() {
+        return declareHavingPresetRegions ? activeRegions : null;
+    }
+
+    @Override
+    public EnumSet<ActiveRegionReadState> desiredReadStates() {
+        return states;
+    }
+
+    @Override
+    public ActivityProfileState isActive(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
+        isActiveCalls.add(ref.getLocus());
+        final double p = activeRegions == null || activeRegions.overlaps(ref.getLocus()) ? prob : 0.0;
+        return new ActivityProfileState(ref.getLocus(), p);
+    }
+
+    @Override
+    public Integer map(ActiveRegion activeRegion, RefMetaDataTracker metaDataTracker) {
+        mappedActiveRegions.put(activeRegion.getLocation(), activeRegion);
+        return 0;
+    }
+
+    @Override
+    public Integer reduceInit() {
+        return 0;
+    }
+
+    @Override
+    public Integer reduce(Integer value, Integer sum) {
+        return 0;
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/traversals/TAROrderedReadCacheUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/traversals/TAROrderedReadCacheUnitTest.java
new file mode 100644
index 0000000..88421ab
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/traversals/TAROrderedReadCacheUnitTest.java
@@ -0,0 +1,111 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.traversals;
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+import org.broadinstitute.gatk.utils.sam.ArtificialBAMBuilder;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+public class TAROrderedReadCacheUnitTest extends BaseTest {
+    // example fasta index file, can be deleted if you don't use the reference
+    private IndexedFastaSequenceFile seq;
+
+    @BeforeClass
+    public void setup() throws FileNotFoundException {
+        // sequence
+        seq = new CachingIndexedFastaSequenceFile(new File(b37KGReference));
+    }
+
+    @DataProvider(name = "ReadCacheTestData")
+    public Object[][] makeReadCacheTestData() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        for ( final int nReadsPerLocus : Arrays.asList(0, 1, 10, 100) ) {
+            for ( final int nLoci : Arrays.asList(1, 10, 100) ) {
+                for ( final int max : Arrays.asList(10, 50, 1000) ) {
+                    for ( final boolean addAllAtOnce : Arrays.asList(true, false) ) {
+                        tests.add(new Object[]{nReadsPerLocus, nLoci, max, addAllAtOnce});
+                    }
+                }
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "ReadCacheTestData")
+    public void testReadCache(final int nReadsPerLocus, final int nLoci, final int max, final boolean addAllAtOnce) {
+        final TAROrderedReadCache cache = new TAROrderedReadCache(max);
+
+        Assert.assertEquals(cache.getMaxCapacity(), max);
+        Assert.assertEquals(cache.getNumDiscarded(), 0);
+        Assert.assertEquals(cache.size(), 0);
+
+        final ArtificialBAMBuilder bamBuilder = new ArtificialBAMBuilder(seq, nReadsPerLocus, nLoci);
+        final List<GATKSAMRecord> reads = bamBuilder.makeReads();
+
+        if ( addAllAtOnce ) {
+            cache.addAll(reads);
+        } else {
+            for ( final GATKSAMRecord read : reads ) {
+                cache.add(read);
+            }
+        }
+
+        final int nTotalReads = reads.size();
+        final int nExpectedToKeep = Math.min(nTotalReads, max);
+        final int nExpectedToDiscard = nTotalReads - nExpectedToKeep;
+        Assert.assertEquals(cache.getNumDiscarded(), nExpectedToDiscard, "wrong number of reads discarded");
+        Assert.assertEquals(cache.size(), nExpectedToKeep, "wrong number of reads kept");
+
+        final List<GATKSAMRecord> cacheReads = cache.popCurrentReads();
+        Assert.assertEquals(cache.size(), 0, "Should be no reads left");
+        Assert.assertEquals(cache.getNumDiscarded(), 0, "should have reset stats");
+        Assert.assertEquals(cacheReads.size(), nExpectedToKeep, "should have 1 read for every read we expected to keep");
+
+        verifySortednessOfReads(cacheReads);
+    }
+
+    private void verifySortednessOfReads( final List<GATKSAMRecord> reads) {
+        int lastStart = -1;
+        for ( GATKSAMRecord read : reads ) {
+            Assert.assertTrue(lastStart <= read.getAlignmentStart(), "Reads should be sorted but weren't.  Found read with start " + read.getAlignmentStart() + " while last was " + lastStart);
+            lastStart = read.getAlignmentStart();
+        }
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/traversals/TraverseActiveRegionsUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/traversals/TraverseActiveRegionsUnitTest.java
new file mode 100644
index 0000000..c69774b
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/traversals/TraverseActiveRegionsUnitTest.java
@@ -0,0 +1,680 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.traversals;
+
+import com.google.java.contract.PreconditionError;
+import htsjdk.samtools.*;
+import org.broadinstitute.gatk.utils.commandline.Tags;
+import org.broadinstitute.gatk.utils.ValidationExclusion;
+import org.broadinstitute.gatk.engine.datasources.reads.*;
+import org.broadinstitute.gatk.engine.filters.ReadFilter;
+import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
+import org.broadinstitute.gatk.engine.resourcemanagement.ThreadAllocation;
+import org.broadinstitute.gatk.engine.walkers.Walker;
+import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
+import org.broadinstitute.gatk.utils.activeregion.ActiveRegionReadState;
+import org.broadinstitute.gatk.utils.interval.IntervalMergingRule;
+import org.broadinstitute.gatk.utils.interval.IntervalUtils;
+import org.broadinstitute.gatk.utils.sam.*;
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.engine.datasources.providers.LocusShardDataProvider;
+import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
+import org.broadinstitute.gatk.engine.executive.WindowMaker;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.activeregion.ActiveRegion;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+
+import java.io.File;
+import java.io.IOException;
+import java.util.*;
+
+/**
+ * Created with IntelliJ IDEA.
+ * User: thibault
+ * Date: 11/13/12
+ * Time: 2:47 PM
+ *
+ * Test the Active Region Traversal Contract
+ * http://iwww.broadinstitute.org/gsa/wiki/index.php/Active_Region_Traversal_Contract
+ */
+public class TraverseActiveRegionsUnitTest extends BaseTest {
+    private final static boolean ENFORCE_CONTRACTS = false;
+    private final static boolean DEBUG = false;
+
+    @DataProvider(name = "TraversalEngineProvider")
+    public Object[][] makeTraversals() {
+        final List<Object[]> traversals = new LinkedList<Object[]>();
+        traversals.add(new Object[]{new TraverseActiveRegions<>()});
+        return traversals.toArray(new Object[][]{});
+    }
+
+    private File referenceFile;
+    private IndexedFastaSequenceFile reference;
+    private SAMSequenceDictionary dictionary;
+    private GenomeLocParser genomeLocParser;
+
+    private List<GenomeLoc> intervals;
+
+    private File testBAM;
+
+    @BeforeClass
+    private void init() throws IOException {
+        //reference = new CachingIndexedFastaSequenceFile(new File("/Users/depristo/Desktop/broadLocal/localData/human_g1k_v37.fasta")); // hg19Reference));
+        referenceFile = new File(hg19Reference);
+        reference = new CachingIndexedFastaSequenceFile(referenceFile);
+        dictionary = reference.getSequenceDictionary();
+        genomeLocParser = new GenomeLocParser(dictionary);
+
+        // TODO: reads with indels
+        // TODO: reads which span many regions
+        // TODO: reads which are partially between intervals (in/outside extension)
+        // TODO: duplicate reads
+        // TODO: read at the end of a contig
+        // TODO: reads which are completely outside intervals but within extension
+        // TODO: test the extension itself
+        // TODO: unmapped reads
+
+        intervals = new ArrayList<GenomeLoc>();
+        intervals.add(genomeLocParser.createGenomeLoc("1", 10, 20));
+        intervals.add(genomeLocParser.createGenomeLoc("1", 1, 999));
+        intervals.add(genomeLocParser.createGenomeLoc("1", 1000, 1999));
+        intervals.add(genomeLocParser.createGenomeLoc("1", 2000, 2999));
+        intervals.add(genomeLocParser.createGenomeLoc("1", 10000, 20000));
+        intervals.add(genomeLocParser.createGenomeLoc("2", 1, 100));
+        intervals.add(genomeLocParser.createGenomeLoc("20", 10000, 10100));
+        intervals = IntervalUtils.sortAndMergeIntervals(genomeLocParser, intervals, IntervalMergingRule.OVERLAPPING_ONLY).toList();
+
+        List<GATKSAMRecord> reads = new ArrayList<GATKSAMRecord>();
+        reads.add(buildSAMRecord("simple", "1", 100, 200));
+        reads.add(buildSAMRecord("overlap_equal", "1", 10, 20));
+        reads.add(buildSAMRecord("overlap_unequal", "1", 10, 21));
+        reads.add(buildSAMRecord("boundary_equal", "1", 1990, 2009));
+        reads.add(buildSAMRecord("boundary_unequal", "1", 1990, 2008));
+        reads.add(buildSAMRecord("boundary_1_pre", "1", 1950, 2000));
+        reads.add(buildSAMRecord("boundary_1_post", "1", 1999, 2050));
+        reads.add(buildSAMRecord("extended_and_np", "1", 990, 1990));
+        reads.add(buildSAMRecord("outside_intervals", "1", 5000, 6000));
+        reads.add(buildSAMRecord("shard_boundary_1_pre", "1", 16300, 16385));
+        reads.add(buildSAMRecord("shard_boundary_1_post", "1", 16384, 16400));
+        reads.add(buildSAMRecord("shard_boundary_equal", "1", 16355, 16414));
+        reads.add(buildSAMRecord("simple20", "20", 10025, 10075));
+
+        createBAM(reads);
+    }
+
+    private void createBAM(List<GATKSAMRecord> reads) throws IOException {
+        testBAM = createTempFile("TraverseActiveRegionsUnitTest", ".bam");
+
+        SAMFileWriter out = new SAMFileWriterFactory().setCreateIndex(true).makeBAMWriter(reads.get(0).getHeader(), true, testBAM);
+        for (GATKSAMRecord read : ReadUtils.sortReadsByCoordinate(reads)) {
+            out.addAlignment(read);
+        }
+        out.close();
+
+        new File(testBAM.getAbsolutePath().replace(".bam", ".bai")).deleteOnExit();
+        new File(testBAM.getAbsolutePath() + ".bai").deleteOnExit();
+    }
+
+    @Test(enabled = true && ! DEBUG, dataProvider = "TraversalEngineProvider")
+    public void testAllBasesSeen(TraverseActiveRegions t) {
+        DummyActiveRegionWalker walker = new DummyActiveRegionWalker();
+
+        List<GenomeLoc> activeIntervals = getIsActiveIntervals(t, walker, intervals);
+        // Contract: Every genome position in the analysis interval(s) is processed by the walker's isActive() call
+        verifyEqualIntervals(intervals, activeIntervals);
+    }
+
+    private List<GenomeLoc> getIsActiveIntervals(final TraverseActiveRegions t, DummyActiveRegionWalker walker, List<GenomeLoc> intervals) {
+        List<GenomeLoc> activeIntervals = new ArrayList<GenomeLoc>();
+        for (LocusShardDataProvider dataProvider : createDataProviders(t, walker, intervals, testBAM)) {
+            t.traverse(walker, dataProvider, 0);
+            activeIntervals.addAll(walker.isActiveCalls);
+        }
+
+        return activeIntervals;
+    }
+
+    @Test (enabled = ENFORCE_CONTRACTS, dataProvider = "TraversalEngineProvider", expectedExceptions = PreconditionError.class)
+    public void testIsActiveRangeLow (TraverseActiveRegions t) {
+        DummyActiveRegionWalker walker = new DummyActiveRegionWalker(-0.1);
+        getActiveRegions(t, walker, intervals).values();
+    }
+
+    @Test (enabled = ENFORCE_CONTRACTS, dataProvider = "TraversalEngineProvider", expectedExceptions = PreconditionError.class)
+    public void testIsActiveRangeHigh (TraverseActiveRegions t) {
+        DummyActiveRegionWalker walker = new DummyActiveRegionWalker(1.1);
+        getActiveRegions(t, walker, intervals).values();
+    }
+
+    @Test(enabled = true && ! DEBUG, dataProvider = "TraversalEngineProvider")
+    public void testActiveRegionCoverage(TraverseActiveRegions t) {
+        DummyActiveRegionWalker walker = new DummyActiveRegionWalker(new GenomeLocSortedSet(genomeLocParser, intervals), true);
+
+        Collection<ActiveRegion> activeRegions = getActiveRegions(t, walker, intervals).values();
+        verifyActiveRegionCoverage(intervals, activeRegions);
+    }
+
+    private void verifyActiveRegionCoverage(List<GenomeLoc> intervals, Collection<ActiveRegion> activeRegions) {
+        List<GenomeLoc> intervalStarts = new ArrayList<GenomeLoc>();
+        List<GenomeLoc> intervalStops = new ArrayList<GenomeLoc>();
+
+        for (GenomeLoc interval : intervals) {
+            intervalStarts.add(interval.getStartLocation());
+            intervalStops.add(interval.getStopLocation());
+        }
+
+        Map<GenomeLoc, ActiveRegion> baseRegionMap = new HashMap<GenomeLoc, ActiveRegion>();
+
+        for (ActiveRegion activeRegion : activeRegions) {
+            for (GenomeLoc activeLoc : toSingleBaseLocs(activeRegion.getLocation())) {
+                // Contract: Regions do not overlap
+                Assert.assertFalse(baseRegionMap.containsKey(activeLoc), "Genome location " + activeLoc + " is assigned to more than one region");
+                baseRegionMap.put(activeLoc, activeRegion);
+            }
+
+            GenomeLoc start = activeRegion.getLocation().getStartLocation();
+            if (intervalStarts.contains(start))
+                intervalStarts.remove(start);
+
+            GenomeLoc stop = activeRegion.getLocation().getStopLocation();
+            if (intervalStops.contains(stop))
+                intervalStops.remove(stop);
+        }
+
+        for (GenomeLoc baseLoc : toSingleBaseLocs(intervals)) {
+            // Contract: Each location in the interval(s) is in exactly one region
+            // Contract: The total set of regions exactly matches the analysis interval(s)
+            Assert.assertTrue(baseRegionMap.containsKey(baseLoc), "Genome location " + baseLoc + " is not assigned to any region");
+            baseRegionMap.remove(baseLoc);
+        }
+
+        // Contract: The total set of regions exactly matches the analysis interval(s)
+        Assert.assertEquals(baseRegionMap.size(), 0, "Active regions contain base(s) outside of the given intervals");
+
+        // Contract: All explicit interval boundaries must also be region boundaries
+        Assert.assertEquals(intervalStarts.size(), 0, "Interval start location does not match an active region start location");
+        Assert.assertEquals(intervalStops.size(), 0, "Interval stop location does not match an active region stop location");
+    }
+
+    @Test(enabled = true && ! DEBUG, dataProvider = "TraversalEngineProvider")
+    public void testActiveRegionExtensionOnContig(TraverseActiveRegions t) {
+        DummyActiveRegionWalker walker = new DummyActiveRegionWalker();
+
+        Collection<ActiveRegion> activeRegions = getActiveRegions(t, walker, intervals).values();
+        for (ActiveRegion activeRegion : activeRegions) {
+            GenomeLoc loc = activeRegion.getExtendedLoc();
+
+            // Contract: active region extensions must stay on the contig
+            Assert.assertTrue(loc.getStart() > 0, "Active region extension begins at location " + loc.getStart() + ", past the left end of the contig");
+            int refLen = dictionary.getSequence(loc.getContigIndex()).getSequenceLength();
+            Assert.assertTrue(loc.getStop() <= refLen, "Active region extension ends at location " + loc.getStop() + ", past the right end of the contig");
+        }
+    }
+
+    @Test(enabled = true && !DEBUG, dataProvider = "TraversalEngineProvider")
+    public void testPrimaryReadMapping(TraverseActiveRegions t) {
+        DummyActiveRegionWalker walker = new DummyActiveRegionWalker(new GenomeLocSortedSet(genomeLocParser, intervals),
+                EnumSet.of(ActiveRegionReadState.PRIMARY),
+                true);
+
+        // Contract: Each read has the Primary state in a single region (or none)
+        // This is the region of maximum overlap for the read (earlier if tied)
+
+        // simple: Primary in 1:1-999
+        // overlap_equal: Primary in 1:1-999
+        // overlap_unequal: Primary in 1:1-999
+        // boundary_equal: Primary in 1:1000-1999, Non-Primary in 1:2000-2999
+        // boundary_unequal: Primary in 1:1000-1999, Non-Primary in 1:2000-2999
+        // boundary_1_pre: Primary in 1:1000-1999, Non-Primary in 1:2000-2999
+        // boundary_1_post: Primary in 1:1000-1999, Non-Primary in 1:2000-2999
+        // extended_and_np: Primary in 1:1-999, Non-Primary in 1:1000-1999, Extended in 1:2000-2999
+        // outside_intervals: none
+        // shard_boundary_1_pre: Primary in 1:14908-16384, Non-Primary in 1:16385-16927
+        // shard_boundary_1_post: Primary in 1:14908-16384, Non-Primary in 1:16385-16927
+        // shard_boundary_equal: Primary in 1:14908-16384, Non-Primary in 1:16385-16927
+        // simple20: Primary in 20:10000-10100
+
+        Map<GenomeLoc, ActiveRegion> activeRegions = getActiveRegions(t, walker, intervals);
+        ActiveRegion region;
+
+        region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 1, 999));
+        verifyReadMapping(region, "simple", "overlap_equal", "overlap_unequal", "extended_and_np");
+
+        region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 1000, 1999));
+        verifyReadMapping(region, "boundary_unequal", "boundary_1_pre", "boundary_equal", "boundary_1_post");
+
+        region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 2000, 2999));
+        verifyReadMapping(region);
+
+        region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 10000, 20000));
+        verifyReadMapping(region, "shard_boundary_1_pre", "shard_boundary_1_post", "shard_boundary_equal");
+
+        region = activeRegions.get(genomeLocParser.createGenomeLoc("20", 10000, 10100));
+        verifyReadMapping(region, "simple20");
+    }
+
+    @Test(enabled = true && ! DEBUG, dataProvider = "TraversalEngineProvider")
+    public void testNonPrimaryReadMapping(TraverseActiveRegions t) {
+        DummyActiveRegionWalker walker = new DummyActiveRegionWalker(new GenomeLocSortedSet(genomeLocParser, intervals),
+                EnumSet.of(ActiveRegionReadState.PRIMARY, ActiveRegionReadState.NONPRIMARY),
+                true);
+
+        // Contract: Each read has the Primary state in a single region (or none)
+        // This is the region of maximum overlap for the read (earlier if tied)
+
+        // Contract: Each read has the Non-Primary state in all other regions it overlaps
+
+        // simple: Primary in 1:1-999
+        // overlap_equal: Primary in 1:1-999
+        // overlap_unequal: Primary in 1:1-999
+        // boundary_equal: Primary in 1:1000-1999, Non-Primary in 1:2000-2999
+        // boundary_unequal: Primary in 1:1000-1999, Non-Primary in 1:2000-2999
+        // boundary_1_pre: Primary in 1:1000-1999, Non-Primary in 1:2000-2999
+        // boundary_1_post: Primary in 1:1000-1999, Non-Primary in 1:2000-2999
+        // extended_and_np: Primary in 1:1-999, Non-Primary in 1:1000-1999, Extended in 1:2000-2999
+        // outside_intervals: none
+        // shard_boundary_1_pre: Primary in 1:14908-16384, Non-Primary in 1:16385-16927
+        // shard_boundary_1_post: Primary in 1:14908-16384, Non-Primary in 1:16385-16927
+        // shard_boundary_equal: Primary in 1:14908-16384, Non-Primary in 1:16385-16927
+        // simple20: Primary in 20:10000-10100
+
+        Map<GenomeLoc, ActiveRegion> activeRegions = getActiveRegions(t, walker, intervals);
+        ActiveRegion region;
+
+        region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 1, 999));
+        verifyReadMapping(region, "simple", "overlap_equal", "overlap_unequal", "extended_and_np");
+
+        region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 1000, 1999));
+        verifyReadMapping(region, "boundary_equal", "boundary_unequal", "extended_and_np", "boundary_1_pre", "boundary_1_post");
+
+        region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 2000, 2999));
+        verifyReadMapping(region, "boundary_equal", "boundary_unequal", "boundary_1_pre", "boundary_1_post");
+
+        region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 10000, 20000));
+        verifyReadMapping(region, "shard_boundary_1_pre", "shard_boundary_1_post", "shard_boundary_equal");
+
+        region = activeRegions.get(genomeLocParser.createGenomeLoc("20", 10000, 10100));
+        verifyReadMapping(region, "simple20");
+    }
+
+    @Test(enabled = true && ! DEBUG, dataProvider = "TraversalEngineProvider")
+    public void testExtendedReadMapping(TraverseActiveRegions t) {
+        DummyActiveRegionWalker walker = new DummyActiveRegionWalker(new GenomeLocSortedSet(genomeLocParser, intervals),
+                EnumSet.of(ActiveRegionReadState.PRIMARY, ActiveRegionReadState.NONPRIMARY, ActiveRegionReadState.EXTENDED),
+                true);
+
+        // Contract: Each read has the Primary state in a single region (or none)
+        // This is the region of maximum overlap for the read (earlier if tied)
+
+        // Contract: Each read has the Non-Primary state in all other regions it overlaps
+        // Contract: Each read has the Extended state in regions where it only overlaps if the region is extended
+
+        // simple: Primary in 1:1-999
+        // overlap_equal: Primary in 1:1-999
+        // overlap_unequal: Primary in 1:1-999
+        // boundary_equal: Non-Primary in 1:1000-1999, Primary in 1:2000-2999
+        // boundary_unequal: Primary in 1:1000-1999, Non-Primary in 1:2000-2999
+        // boundary_1_pre: Primary in 1:1000-1999, Non-Primary in 1:2000-2999
+        // boundary_1_post: Non-Primary in 1:1000-1999, Primary in 1:2000-2999
+        // extended_and_np: Non-Primary in 1:1-999, Primary in 1:1000-1999, Extended in 1:2000-2999
+        // outside_intervals: none
+        // shard_boundary_1_pre: Primary in 1:14908-16384, Non-Primary in 1:16385-16927
+        // shard_boundary_1_post: Non-Primary in 1:14908-16384, Primary in 1:16385-16927
+        // shard_boundary_equal: Non-Primary in 1:14908-16384, Primary in 1:16385-16927
+        // simple20: Primary in 20:10000-10100
+
+        Map<GenomeLoc, ActiveRegion> activeRegions = getActiveRegions(t, walker, intervals);
+        ActiveRegion region;
+
+        region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 1, 999));
+        verifyReadMapping(region, "simple", "overlap_equal", "overlap_unequal", "extended_and_np");
+
+        region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 1000, 1999));
+        verifyReadMapping(region, "boundary_equal", "boundary_unequal", "extended_and_np", "boundary_1_pre", "boundary_1_post");
+
+        region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 2000, 2999));
+        verifyReadMapping(region, "boundary_equal", "boundary_unequal", "extended_and_np", "boundary_1_pre", "boundary_1_post");
+
+        region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 10000, 20000));
+        verifyReadMapping(region, "shard_boundary_1_pre", "shard_boundary_1_post", "shard_boundary_equal");
+
+        region = activeRegions.get(genomeLocParser.createGenomeLoc("20", 10000, 10100));
+        verifyReadMapping(region, "simple20");
+    }
+
+    @Test(enabled = true && ! DEBUG, dataProvider = "TraversalEngineProvider")
+    public void testUnmappedReads(TraverseActiveRegions t) {
+        // TODO
+    }
+
+    private void verifyReadMapping(ActiveRegion region, String... reads) {
+        Assert.assertNotNull(region, "Region was unexpectedly null");
+        final Set<String> regionReads = new HashSet<String>();
+        for (SAMRecord read : region.getReads()) {
+            Assert.assertFalse(regionReads.contains(read.getReadName()), "Duplicate reads detected in region " + region + " read " + read.getReadName());
+            regionReads.add(read.getReadName());
+        }
+
+        Collection<String> wantReads = new ArrayList<String>(Arrays.asList(reads));
+        for (SAMRecord read : region.getReads()) {
+            String regionReadName = read.getReadName();
+            Assert.assertTrue(wantReads.contains(regionReadName), "Read " + regionReadName + " incorrectly assigned to active region " + region);
+            wantReads.remove(regionReadName);
+        }
+
+        Assert.assertTrue(wantReads.isEmpty(), "Reads missing in active region " + region + ", wanted " + (wantReads.isEmpty() ? "" : wantReads.iterator().next()));
+    }
+
+    private Map<GenomeLoc, ActiveRegion> getActiveRegions(TraverseActiveRegions t, DummyActiveRegionWalker walker, List<GenomeLoc> intervals) {
+        return getActiveRegions(t, walker, intervals, testBAM);
+    }
+
+    private Map<GenomeLoc, ActiveRegion> getActiveRegions(TraverseActiveRegions t, DummyActiveRegionWalker walker, List<GenomeLoc> intervals, final File bam) {
+        for (LocusShardDataProvider dataProvider : createDataProviders(t, walker, intervals, bam))
+            t.traverse(walker, dataProvider, 0);
+
+        return walker.mappedActiveRegions;
+    }
+
+    private Collection<GenomeLoc> toSingleBaseLocs(GenomeLoc interval) {
+        List<GenomeLoc> bases = new ArrayList<GenomeLoc>();
+        if (interval.size() == 1)
+            bases.add(interval);
+        else {
+            for (int location = interval.getStart(); location <= interval.getStop(); location++)
+                bases.add(genomeLocParser.createGenomeLoc(interval.getContig(), location, location));
+        }
+
+        return bases;
+    }
+
+    private Collection<GenomeLoc> toSingleBaseLocs(List<GenomeLoc> intervals) {
+        Set<GenomeLoc> bases = new TreeSet<GenomeLoc>();    // for sorting and uniqueness
+        for (GenomeLoc interval : intervals)
+            bases.addAll(toSingleBaseLocs(interval));
+
+        return bases;
+    }
+
+    private void verifyEqualIntervals(List<GenomeLoc> aIntervals, List<GenomeLoc> bIntervals) {
+        Collection<GenomeLoc> aBases = toSingleBaseLocs(aIntervals);
+        Collection<GenomeLoc> bBases = toSingleBaseLocs(bIntervals);
+
+        Assert.assertTrue(aBases.size() == bBases.size(), "Interval lists have a differing number of bases: " + aBases.size() + " vs. " + bBases.size());
+
+        Iterator<GenomeLoc> aIter = aBases.iterator();
+        Iterator<GenomeLoc> bIter = bBases.iterator();
+        while (aIter.hasNext() && bIter.hasNext()) {
+            GenomeLoc aLoc = aIter.next();
+            GenomeLoc bLoc = bIter.next();
+            Assert.assertTrue(aLoc.equals(bLoc), "Interval locations do not match: " + aLoc + " vs. " + bLoc);
+        }
+    }
+
+    // copied from LocusViewTemplate
+    protected GATKSAMRecord buildSAMRecord(String readName, String contig, int alignmentStart, int alignmentEnd) {
+        SAMFileHeader header = ArtificialSAMUtils.createDefaultReadGroup(new SAMFileHeader(), "test", "test");
+        header.setSequenceDictionary(dictionary);
+        header.setSortOrder(SAMFileHeader.SortOrder.coordinate);
+        GATKSAMRecord record = new GATKSAMRecord(header);
+
+        record.setReadName(readName);
+        record.setReferenceIndex(dictionary.getSequenceIndex(contig));
+        record.setAlignmentStart(alignmentStart);
+
+        Cigar cigar = new Cigar();
+        int len = alignmentEnd - alignmentStart + 1;
+        cigar.add(new CigarElement(len, CigarOperator.M));
+        record.setCigar(cigar);
+        record.setReadString(new String(new char[len]).replace("\0", "A"));
+        record.setBaseQualities(new byte[len]);
+        record.setReadGroup(new GATKSAMReadGroupRecord(header.getReadGroup("test")));
+
+        return record;
+    }
+
+    private List<LocusShardDataProvider> createDataProviders(TraverseActiveRegions traverseActiveRegions, final Walker walker, List<GenomeLoc> intervals, File bamFile) {
+        GenomeAnalysisEngine engine = new GenomeAnalysisEngine();
+        engine.setGenomeLocParser(genomeLocParser);
+
+        Collection<SAMReaderID> samFiles = new ArrayList<SAMReaderID>();
+        SAMReaderID readerID = new SAMReaderID(bamFile, new Tags());
+        samFiles.add(readerID);
+
+        SAMDataSource dataSource = new SAMDataSource(referenceFile, samFiles, new ThreadAllocation(), null, genomeLocParser,
+                false,
+                ValidationStringency.STRICT,
+                null,
+                null,
+                new ValidationExclusion(),
+                new ArrayList<ReadFilter>(),
+                new ArrayList<ReadTransformer>(),
+                false, (byte)30, false, true, null, IntervalMergingRule.ALL);
+
+        engine.setReadsDataSource(dataSource);
+        final Set<String> samples = ReadUtils.getSAMFileSamples(dataSource.getHeader());
+
+        traverseActiveRegions.initialize(engine, walker);
+        List<LocusShardDataProvider> providers = new ArrayList<LocusShardDataProvider>();
+        for (Shard shard : dataSource.createShardIteratorOverIntervals(new GenomeLocSortedSet(genomeLocParser, intervals), new ActiveRegionShardBalancer())) {
+            for (WindowMaker.WindowMakerIterator window : new WindowMaker(shard, genomeLocParser, dataSource.seek(shard), shard.getGenomeLocs(), samples)) {
+                providers.add(new LocusShardDataProvider(shard, shard.getReadProperties(), genomeLocParser, window.getLocus(), window, reference, new ArrayList<ReferenceOrderedDataSource>()));
+            }
+        }
+
+        return providers;
+    }
+
+    // ---------------------------------------------------------------------------------------------------------
+    //
+    // Combinatorial tests to ensure reads are going into the right regions
+    //
+    // ---------------------------------------------------------------------------------------------------------
+
+    @DataProvider(name = "CombinatorialARTTilingProvider")
+    public Object[][] makeCombinatorialARTTilingProvider() {
+        final List<Object[]> tests = new LinkedList<Object[]>();
+
+        final List<Integer> starts = Arrays.asList(
+                1, // very start of the chromosome
+                ArtificialBAMBuilder.BAM_SHARD_SIZE - 100, // right before the shard boundary
+                ArtificialBAMBuilder.BAM_SHARD_SIZE + 100 // right after the shard boundary
+        );
+
+        final List<EnumSet<ActiveRegionReadState>> allReadStates = Arrays.asList(
+                EnumSet.of(ActiveRegionReadState.PRIMARY),
+                EnumSet.of(ActiveRegionReadState.PRIMARY, ActiveRegionReadState.NONPRIMARY),
+                EnumSet.of(ActiveRegionReadState.PRIMARY, ActiveRegionReadState.NONPRIMARY, ActiveRegionReadState.EXTENDED)
+        );
+
+        final int maxTests = Integer.MAX_VALUE;
+        int nTests = 0;
+        for ( final int readLength : Arrays.asList(100) ) {
+            for ( final int skips : Arrays.asList(0, 10) ) {
+                for ( final int start : starts ) {
+                    for ( final int nReadsPerLocus : Arrays.asList(1, 2) ) {
+                        for ( final int nLoci : Arrays.asList(1, 1000) ) {
+                            final ArtificialBAMBuilder bamBuilder = new ArtificialBAMBuilder(reference, nReadsPerLocus, nLoci);
+                            bamBuilder.setReadLength(readLength);
+                            bamBuilder.setSkipNLoci(skips);
+                            bamBuilder.setAlignmentStart(start);
+                            for ( EnumSet<ActiveRegionReadState> readStates : allReadStates ) {
+                                for ( final GenomeLocSortedSet activeRegions : enumerateActiveRegions(bamBuilder.getAlignmentStart(), bamBuilder.getAlignmentEnd())) {
+                                    nTests++;
+                                    if ( nTests < maxTests ) // && nTests == 1238 )
+                                        tests.add(new Object[]{new TraverseActiveRegions<>(), nTests, activeRegions, readStates, bamBuilder});
+                                }
+                            }
+                        }
+                    }
+                }
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    private Collection<GenomeLocSortedSet> enumerateActiveRegions(final int start, final int stop) {
+        // should basically cut up entire region into equal sized chunks, of
+        // size 10, 20, 50, 100, etc, alternating skipping pieces so they are inactive
+        // Need to make sure we include some edge cases:
+        final List<GenomeLocSortedSet> activeRegions = new LinkedList<GenomeLocSortedSet>();
+
+        for ( final int stepSize : Arrays.asList(11, 29, 53, 97) ) {
+            for ( final boolean startWithActive : Arrays.asList(true, false) ) {
+                activeRegions.add(makeActiveRegionMask(start, stop, stepSize,  startWithActive));
+            }
+        }
+
+        // active region is the whole interval
+        activeRegions.add(new GenomeLocSortedSet(genomeLocParser, genomeLocParser.createGenomeLoc("1", start, stop)));
+
+        // active region extends up to the end of the data, but doesn't include start
+        activeRegions.add(new GenomeLocSortedSet(genomeLocParser, genomeLocParser.createGenomeLoc("1", start+10, stop)));
+
+        return activeRegions;
+    }
+
+    private GenomeLocSortedSet makeActiveRegionMask(final int start, final int stop, final int stepSize, final boolean startWithActive) {
+        final GenomeLocSortedSet active = new GenomeLocSortedSet(genomeLocParser);
+
+        boolean includeRegion = startWithActive;
+        for ( int left = start; left < stop; left += stepSize) {
+            final int right = left + stepSize;
+            final GenomeLoc region = genomeLocParser.createGenomeLoc("1", left, right);
+            if ( includeRegion )
+                active.add(region);
+            includeRegion = ! includeRegion;
+        }
+
+        return active;
+    }
+
+
+    @Test(enabled = true && ! DEBUG, dataProvider = "CombinatorialARTTilingProvider")
+    public void testARTReadsInActiveRegions(final TraverseActiveRegions<Integer, Integer> traversal, final int id, final GenomeLocSortedSet activeRegions, final EnumSet<ActiveRegionReadState> readStates, final ArtificialBAMBuilder bamBuilder) {
+        logger.warn("Running testARTReadsInActiveRegions id=" + id + " locs " + activeRegions + " against bam " + bamBuilder);
+        final List<GenomeLoc> intervals = Arrays.asList(
+                genomeLocParser.createGenomeLoc("1", bamBuilder.getAlignmentStart(), bamBuilder.getAlignmentEnd())
+        );
+
+        final DummyActiveRegionWalker walker = new DummyActiveRegionWalker(activeRegions, false);
+        walker.setStates(readStates);
+
+        final Map<GenomeLoc, ActiveRegion> activeRegionsMap = getActiveRegions(traversal, walker, intervals, bamBuilder.makeTemporarilyBAMFile());
+
+        final Set<String> alreadySeenReads = new HashSet<String>(); // for use with the primary / non-primary
+        for ( final ActiveRegion region : activeRegionsMap.values() ) {
+            final Set<String> readNamesInRegion = readNamesInRegion(region);
+            int nReadsExpectedInRegion = 0;
+            for ( final GATKSAMRecord read : bamBuilder.makeReads() ) {
+                final GenomeLoc readLoc = genomeLocParser.createGenomeLoc(read);
+
+                boolean shouldBeInRegion = readStates.contains(ActiveRegionReadState.EXTENDED)
+                        ? region.getExtendedLoc().overlapsP(readLoc)
+                        : region.getLocation().overlapsP(readLoc);
+
+                if ( ! readStates.contains(ActiveRegionReadState.NONPRIMARY) ) {
+                    if ( alreadySeenReads.contains(read.getReadName()) )
+                        shouldBeInRegion = false;
+                    else if ( shouldBeInRegion )
+                        alreadySeenReads.add(read.getReadName());
+                }
+
+                String msg = readNamesInRegion.contains(read.getReadName()) == shouldBeInRegion ? "" : "Region " + region +
+                        " failed contains read check: read " + read + " with span " + readLoc + " should be in region is " + shouldBeInRegion + " but I got the opposite";
+                Assert.assertEquals(readNamesInRegion.contains(read.getReadName()), shouldBeInRegion, msg);
+
+                nReadsExpectedInRegion += shouldBeInRegion ? 1 : 0;
+            }
+
+            Assert.assertEquals(region.size(), nReadsExpectedInRegion, "There are more reads in active region " + region + "than expected");
+        }
+    }
+
+    private Set<String> readNamesInRegion(final ActiveRegion region) {
+        final Set<String> readNames = new LinkedHashSet<String>(region.getReads().size());
+        for ( final SAMRecord read : region.getReads() )
+            readNames.add(read.getReadName());
+        return readNames;
+    }
+
+    // ---------------------------------------------------------------------------------------------------------
+    //
+    // Make sure all insertion reads are properly included in the active regions
+    //
+    // ---------------------------------------------------------------------------------------------------------
+
+    @Test(dataProvider = "TraversalEngineProvider", enabled = true && ! DEBUG)
+    public void ensureAllInsertionReadsAreInActiveRegions(final TraverseActiveRegions<Integer, Integer> traversal) {
+
+        final int readLength = 10;
+        final int start = 20;
+        final int nReadsPerLocus = 10;
+        final int nLoci = 3;
+
+        final ArtificialBAMBuilder bamBuilder = new ArtificialBAMBuilder(reference, nReadsPerLocus, nLoci);
+        bamBuilder.setReadLength(readLength);
+        bamBuilder.setAlignmentStart(start);
+
+        // note that the position must be +1 as the read's all I cigar puts the end 1 bp before start, leaving it out of the region
+        GATKSAMRecord allI = ArtificialSAMUtils.createArtificialRead(bamBuilder.getHeader(),"allI",0,start+1,readLength);
+        allI.setCigarString(readLength + "I");
+        allI.setReadGroup(new GATKSAMReadGroupRecord(bamBuilder.getHeader().getReadGroups().get(0)));
+
+        bamBuilder.addReads(allI);
+
+        final GenomeLocSortedSet activeRegions = new GenomeLocSortedSet(bamBuilder.getGenomeLocParser());
+        activeRegions.add(bamBuilder.getGenomeLocParser().createGenomeLoc("1", 10, 30));
+        final List<GenomeLoc> intervals = Arrays.asList(
+                genomeLocParser.createGenomeLoc("1", bamBuilder.getAlignmentStart(), bamBuilder.getAlignmentEnd())
+        );
+
+        final DummyActiveRegionWalker walker = new DummyActiveRegionWalker(activeRegions, false);
+
+        final Map<GenomeLoc, ActiveRegion> activeRegionsMap = getActiveRegions(traversal, walker, intervals, bamBuilder.makeTemporarilyBAMFile());
+
+        final ActiveRegion region = activeRegionsMap.values().iterator().next();
+        int nReadsExpectedInRegion = 0;
+
+        final Set<String> readNamesInRegion = readNamesInRegion(region);
+        for ( final GATKSAMRecord read : bamBuilder.makeReads() ) {
+            Assert.assertTrue(readNamesInRegion.contains(read.getReadName()),
+                    "Region " + region + " should contain read " + read + " with cigar " + read.getCigarString() + " but it wasn't");
+            nReadsExpectedInRegion++;
+        }
+
+        Assert.assertEquals(region.size(), nReadsExpectedInRegion, "There are more reads in active region " + region + "than expected");
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/traversals/TraverseDuplicatesUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/traversals/TraverseDuplicatesUnitTest.java
new file mode 100644
index 0000000..10f6801
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/traversals/TraverseDuplicatesUnitTest.java
@@ -0,0 +1,162 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.traversals;
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.testng.Assert;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+
+import org.testng.annotations.BeforeMethod;
+
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Set;
+
+
+/**
+ * @author aaron
+ *         <p/>
+ *         Class TraverseDuplicatesUnitTest
+ *         <p/>
+ *         test the meat of the traverse dupplicates.
+ */
+public class TraverseDuplicatesUnitTest extends BaseTest {
+
+    private TraverseDuplicates obj = new TraverseDuplicates();
+    private SAMFileHeader header;
+    private GenomeLocParser genomeLocParser;
+    private GenomeAnalysisEngine engine;
+    private File refFile = new File(validationDataLocation + "Homo_sapiens_assembly17.fasta");
+
+
+    @BeforeMethod
+    public void doBefore() {
+        header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000);
+        genomeLocParser =new GenomeLocParser(header.getSequenceDictionary());
+
+        engine = new GenomeAnalysisEngine();
+        engine.setReferenceDataSource(refFile);
+        engine.setGenomeLocParser(genomeLocParser);
+        
+        obj.initialize(engine, null);
+    }
+
+    @Test
+    public void testAllDuplicatesNoPairs() {
+        List<SAMRecord> list = new ArrayList<SAMRecord>();
+        for (int x = 0; x < 10; x++) {
+            SAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "SWEET_READ" + x, 0, 1, 100);
+            read.setDuplicateReadFlag(true);
+            list.add(read);
+        }
+        Set<List<SAMRecord>> myPairings = obj.uniqueReadSets(list);
+        Assert.assertEquals(myPairings.size(), 1);
+        Assert.assertEquals(myPairings.iterator().next().size(), 10); // dup's
+    }
+
+    @Test
+    public void testNoDuplicatesNoPairs() {
+        List<SAMRecord> list = new ArrayList<SAMRecord>();
+        for (int x = 0; x < 10; x++) {
+            SAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "SWEET_READ" + x, 0, 1, 100);
+            read.setDuplicateReadFlag(false);
+            list.add(read);
+        }
+
+        Set<List<SAMRecord>> myPairing = obj.uniqueReadSets(list);
+        Assert.assertEquals(myPairing.size(), 10); // unique
+    }
+
+    @Test
+    public void testFiftyFiftyNoPairs() {
+        List<SAMRecord> list = new ArrayList<SAMRecord>();
+        for (int x = 0; x < 5; x++) {
+            SAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "SWEET_READ" + x, 0, 1, 100);
+            read.setDuplicateReadFlag(true);
+            list.add(read);
+        }
+        for (int x = 10; x < 15; x++)
+            list.add(ArtificialSAMUtils.createArtificialRead(header, String.valueOf(x), 0, x, 100));
+
+        Set<List<SAMRecord>> myPairing = obj.uniqueReadSets(list);
+        Assert.assertEquals(myPairing.size(), 6);  // unique
+    }
+
+    @Test
+    public void testAllDuplicatesAllPairs() {
+        List<SAMRecord> list = new ArrayList<SAMRecord>();
+        for (int x = 0; x < 10; x++) {
+            SAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "SWEET_READ"+ x, 0, 1, 100);
+            read.setDuplicateReadFlag(true);
+            read.setMateAlignmentStart(100);
+            read.setMateReferenceIndex(0);
+            read.setReadPairedFlag(true);
+            list.add(read);
+        }
+
+        Set<List<SAMRecord>> myPairing = obj.uniqueReadSets(list);
+        Assert.assertEquals(myPairing.size(), 1);  // unique
+    }
+
+    @Test
+    public void testNoDuplicatesAllPairs() {
+        List<SAMRecord> list = new ArrayList<SAMRecord>();
+        for (int x = 0; x < 10; x++) {
+            SAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "SWEET_READ"+ x, 0, 1, 100);
+            if (x == 0) read.setDuplicateReadFlag(true); // one is a dup but (next line)
+            read.setMateAlignmentStart(100); // they all have a shared start and mate start so they're dup's
+            read.setMateReferenceIndex(0);
+            read.setReadPairedFlag(true);
+            list.add(read);
+        }
+
+        Set<List<SAMRecord>> myPairing = obj.uniqueReadSets(list);
+        Assert.assertEquals(myPairing.size(), 1);  // unique
+    }
+
+    @Test
+    public void testAllDuplicatesAllPairsDifferentPairedEnd() {
+        List<SAMRecord> list = new ArrayList<SAMRecord>();
+        for (int x = 0; x < 10; x++) {
+            SAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "SWEET_READ" + x, 0, 1, 100);
+            if (x == 0) read.setDuplicateReadFlag(true); // one is a dup
+            read.setMateAlignmentStart(100 + x);
+            read.setMateReferenceIndex(0);
+            read.setReadPairedFlag(true);
+            list.add(read);
+        }
+
+        Set<List<SAMRecord>> myPairing = obj.uniqueReadSets(list);
+        Assert.assertEquals(myPairing.size(), 10);  // unique
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/traversals/TraverseReadsUnitTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/traversals/TraverseReadsUnitTest.java
new file mode 100644
index 0000000..099d9e2
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/traversals/TraverseReadsUnitTest.java
@@ -0,0 +1,167 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.traversals;
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import htsjdk.samtools.reference.ReferenceSequenceFile;
+import org.broadinstitute.gatk.engine.walkers.TestCountReadsWalker;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.commandline.Tags;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.engine.datasources.providers.ReadShardDataProvider;
+import org.broadinstitute.gatk.engine.datasources.reads.*;
+import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
+import org.broadinstitute.gatk.engine.resourcemanagement.ThreadAllocation;
+import org.broadinstitute.gatk.engine.walkers.ReadWalker;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+import org.broadinstitute.gatk.utils.sam.SAMReaderID;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+import static org.testng.Assert.fail;
+
+/**
+ *
+ * User: aaron
+ * Date: Apr 24, 2009
+ * Time: 3:42:16 PM
+ *
+ * The Broad Institute
+ * SOFTWARE COPYRIGHT NOTICE AGREEMENT 
+ * This software and its documentation are copyright 2009 by the
+ * Broad Institute/Massachusetts Institute of Technology. All rights are reserved.
+ *
+ * This software is supplied without any warranty or guaranteed support whatsoever. Neither
+ * the Broad Institute nor MIT can be responsible for its use, misuse, or functionality.
+ *
+ */
+
+
+/**
+ * @author aaron
+ * @version 1.0
+ * @date Apr 24, 2009
+ * <p/>
+ * Class TraverseReadsUnitTest
+ * <p/>
+ * test traversing reads
+ */
+public class TraverseReadsUnitTest extends BaseTest {
+
+    private ReferenceSequenceFile seq;
+    private SAMReaderID bam = new SAMReaderID(new File(validationDataLocation + "index_test.bam"),new Tags()); // TCGA-06-0188.aligned.duplicates_marked.bam");
+    private File refFile = new File(validationDataLocation + "Homo_sapiens_assembly17.fasta");
+    private List<SAMReaderID> bamList;
+    private ReadWalker countReadWalker;
+    private File output;
+    private TraverseReadsNano traversalEngine = null;
+
+    private IndexedFastaSequenceFile ref = null;
+    private GenomeLocParser genomeLocParser = null;
+    private GenomeAnalysisEngine engine = null;
+
+    @BeforeClass
+    public void doOnce() {
+        try {
+            ref = new CachingIndexedFastaSequenceFile(refFile);
+        }
+        catch(FileNotFoundException ex) {
+            throw new UserException.CouldNotReadInputFile(refFile,ex);
+        }
+        genomeLocParser = new GenomeLocParser(ref);
+
+        engine = new GenomeAnalysisEngine();
+        engine.setReferenceDataSource(refFile);
+        engine.setGenomeLocParser(genomeLocParser);
+    }
+
+    /**
+     * This function does the setup of our parser, before each method call.
+     * <p/>
+     * Called before every test case method.
+     */
+    @BeforeMethod
+    public void doForEachTest() {
+        output = new File("testOut.txt");
+        FileOutputStream out = null;
+        PrintStream ps; // declare a print stream object
+
+        try {
+            out = new FileOutputStream(output);
+        } catch (FileNotFoundException e) {
+            e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
+            fail("Couldn't open the output file");
+        }
+
+        bamList = new ArrayList<SAMReaderID>();
+        bamList.add(bam);
+        countReadWalker = new TestCountReadsWalker();
+        
+        traversalEngine = new TraverseReadsNano(1);
+        traversalEngine.initialize(engine, countReadWalker);
+    }
+
+    /** Test out that we can shard the file and iterate over every read */
+    @Test
+    public void testUnmappedReadCount() {
+        SAMDataSource dataSource = new SAMDataSource(refFile, bamList,new ThreadAllocation(),null,genomeLocParser);
+        Iterable<Shard> shardStrategy = dataSource.createShardIteratorOverAllReads(new ReadShardBalancer());
+
+        countReadWalker.initialize();
+        Object accumulator = countReadWalker.reduceInit();
+
+        for(Shard shard: shardStrategy) {
+            if (shard == null) {
+                fail("Shard == null");
+            }
+
+            ReadShardDataProvider dataProvider = new ReadShardDataProvider(shard,genomeLocParser,dataSource.seek(shard),null, Collections.<ReferenceOrderedDataSource>emptyList());
+            accumulator = traversalEngine.traverse(countReadWalker, dataProvider, accumulator);
+            dataProvider.close();
+        }
+
+        countReadWalker.onTraversalDone(accumulator);
+
+        if (!(accumulator instanceof Long)) {
+            fail("Count read walker should return a Long.");
+        }
+        if (!accumulator.equals(new Long(10000))) {
+            fail("there should be 10000 mapped reads in the index file, there was " + (accumulator));
+        }
+    }
+
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/walkers/TestCountLociWalker.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/walkers/TestCountLociWalker.java
new file mode 100644
index 0000000..8bf4533
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/walkers/TestCountLociWalker.java
@@ -0,0 +1,58 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+import org.broadinstitute.gatk.utils.commandline.Output;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+
+import java.io.PrintStream;
+
+public class TestCountLociWalker extends LocusWalker<Integer,Long> {
+    @Output
+    private PrintStream out;
+
+    @Override
+    public Integer map(final RefMetaDataTracker tracker, final ReferenceContext ref, final AlignmentContext context) {
+        return 1;
+    }
+
+    @Override
+    public Long reduceInit() {
+        return 0L;
+    }
+
+    @Override
+    public Long reduce(final Integer value, final Long sum) {
+        return value + sum;
+    }
+
+    @Override
+    public void onTraversalDone(final Long result) {
+        out.println(result);
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/walkers/TestCountReadsWalker.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/walkers/TestCountReadsWalker.java
new file mode 100644
index 0000000..7362c4d
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/walkers/TestCountReadsWalker.java
@@ -0,0 +1,59 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+import org.broadinstitute.gatk.utils.commandline.Output;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+
+import java.io.PrintStream;
+
+public class TestCountReadsWalker extends ReadWalker<Integer,Long> {
+    @Output
+    PrintStream out;
+
+    @Override
+    public Integer map(final ReferenceContext ref, final GATKSAMRecord read, final RefMetaDataTracker metaDataTracker) {
+        return 1;
+    }
+
+    @Override
+    public Long reduceInit() {
+        return 0L;
+    }
+
+    @Override
+    public Long reduce(final Integer value, final Long sum) {
+        return value + sum;
+    }
+
+    @Override
+    public void onTraversalDone(final Long result) {
+        if (out != null)
+            out.println(result);
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/walkers/TestErrorThrowingWalker.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/walkers/TestErrorThrowingWalker.java
new file mode 100644
index 0000000..d5308a3
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/walkers/TestErrorThrowingWalker.java
@@ -0,0 +1,70 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+import org.broadinstitute.gatk.utils.commandline.Argument;
+import org.broadinstitute.gatk.utils.commandline.Input;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+
+public class TestErrorThrowingWalker extends RefWalker<Integer, Integer> implements TreeReducible<Integer>, NanoSchedulable {
+    @Input(fullName = "exception", shortName = "E", doc = "Java class of exception to throw", required = true)
+    public String exceptionToThrow;
+
+    @Argument(fullName = "failMethod", shortName = "fail", doc = "Determines which method to fail in", required = false)
+    public FailMethod failMethod = FailMethod.MAP;
+
+    @Override
+    public Integer map(final RefMetaDataTracker tracker, final ReferenceContext ref, final AlignmentContext context) {
+        if (ref == null) // only throw exception when we are in proper map, not special map(null) call
+            return null;
+
+        if (failMethod == FailMethod.MAP)
+            FailMethod.fail(exceptionToThrow);
+
+        return 0;
+    }
+
+    @Override
+    public Integer reduceInit() {
+        return 0;
+    }
+
+    @Override
+    public Integer reduce(final Integer value, final Integer sum) {
+        if (value != null && failMethod == FailMethod.REDUCE)
+            FailMethod.fail(exceptionToThrow);
+        return sum;
+    }
+
+    @Override
+    public Integer treeReduce(final Integer lhs, final Integer rhs) {
+        if (failMethod == FailMethod.TREE_REDUCE)
+            FailMethod.fail(exceptionToThrow);
+        return rhs;
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/walkers/TestPrintReadsWalker.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/walkers/TestPrintReadsWalker.java
new file mode 100644
index 0000000..e3c852e
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/walkers/TestPrintReadsWalker.java
@@ -0,0 +1,76 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMFileWriter;
+import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
+import org.broadinstitute.gatk.engine.io.NWaySAMFileWriter;
+import org.broadinstitute.gatk.utils.commandline.Argument;
+import org.broadinstitute.gatk.utils.commandline.Output;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.sam.GATKSAMFileWriter;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+
+public class TestPrintReadsWalker extends ReadWalker<GATKSAMRecord, SAMFileWriter> implements NanoSchedulable {
+    @Output
+    private GATKSAMFileWriter out;
+
+    @Argument(fullName = "no_pg_tag", shortName = "npt", doc ="", required = false)
+    public boolean NO_PG_TAG = false;
+
+    @Override
+    public void initialize() {
+        // All for the no_pg_tag. Should this be in the engine and not in the walker?
+        final GenomeAnalysisEngine toolkit = getToolkit();
+        final SAMFileHeader outputHeader = toolkit.getSAMFileHeader().clone();
+        final String PROGRAM_RECORD_NAME = "GATK PrintReads";
+        final boolean preSorted = true;
+        if (toolkit.getArguments().BQSR_RECAL_FILE != null && !NO_PG_TAG ) {
+            NWaySAMFileWriter.setupWriter(out, toolkit, outputHeader, preSorted, this, PROGRAM_RECORD_NAME);
+        } else {
+            out.writeHeader(outputHeader);
+            out.setPresorted(preSorted);
+        }
+    }
+
+    @Override
+    public GATKSAMRecord map(final ReferenceContext ref, final GATKSAMRecord read, final RefMetaDataTracker metaDataTracker) {
+        return read;
+    }
+
+    @Override
+    public SAMFileWriter reduceInit() {
+        return out;
+    }
+
+    @Override
+    public SAMFileWriter reduce(final GATKSAMRecord read, final SAMFileWriter output) {
+        output.addAlignment(read);
+        return output;
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/walkers/TestPrintVariantsWalker.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/walkers/TestPrintVariantsWalker.java
new file mode 100644
index 0000000..89d630d
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/walkers/TestPrintVariantsWalker.java
@@ -0,0 +1,99 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+import htsjdk.variant.variantcontext.VariantContext;
+import htsjdk.variant.variantcontext.writer.VariantContextWriter;
+import htsjdk.variant.vcf.*;
+import org.broadinstitute.gatk.engine.GATKVCFUtils;
+import org.broadinstitute.gatk.engine.SampleUtils;
+import org.broadinstitute.gatk.engine.arguments.StandardVariantContextInputArgumentCollection;
+import org.broadinstitute.gatk.utils.commandline.Argument;
+import org.broadinstitute.gatk.utils.commandline.ArgumentCollection;
+import org.broadinstitute.gatk.utils.commandline.Output;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.variant.ChromosomeCountConstants;
+import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
+
+import java.util.*;
+
+public class TestPrintVariantsWalker extends RodWalker<Integer, Integer> implements TreeReducible<Integer> {
+    @ArgumentCollection
+    private StandardVariantContextInputArgumentCollection variantCollection = new StandardVariantContextInputArgumentCollection();
+
+    @Argument(fullName = "fullyDecode", doc = "If true, the incoming VariantContext will be fully decoded", required = false)
+    private boolean fullyDecode = false;
+
+    @Output
+    private VariantContextWriter vcfWriter = null;
+
+    private Map<String, VCFHeader> vcfRods = null;
+
+    @Override
+    public void initialize() {
+        vcfRods = GATKVCFUtils.getVCFHeadersFromRods(getToolkit());
+        final Set<String> samples = SampleUtils.getSampleList(vcfRods, GATKVariantContextUtils.GenotypeMergeType.REQUIRE_UNIQUE);
+        final Set<VCFHeaderLine> headerLines = VCFUtils.smartMergeHeaders(vcfRods.values(), true);
+        headerLines.addAll(Arrays.asList(ChromosomeCountConstants.descriptions));
+        headerLines.add(VCFStandardHeaderLines.getInfoLine(VCFConstants.DEPTH_KEY));
+        final VCFHeader vcfHeader = new VCFHeader(headerLines, samples);
+        vcfWriter.writeHeader(vcfHeader);
+    }
+
+    @Override
+    public Integer map(final RefMetaDataTracker tracker, final ReferenceContext ref, final AlignmentContext context) {
+        if (tracker == null)
+            return 0;
+        final Collection<VariantContext> vcs = tracker.getValues(variantCollection.variants, context.getLocation());
+        for (VariantContext vc : vcs) {
+            if (fullyDecode)
+                vc = vc.fullyDecode(vcfRods.get(vc.getSource()), getToolkit().lenientVCFProcessing());
+            vcfWriter.add(vc);
+        }
+        return vcs.isEmpty() ? 0 : 1;
+    }
+
+    @Override
+    public Integer reduceInit() {
+        return 0;
+    }
+
+    @Override
+    public Integer reduce(final Integer counter, final Integer sum) {
+        return counter + sum;
+    }
+
+    @Override
+    public Integer treeReduce(final Integer lhs, final Integer rhs) {
+        return reduce(lhs, rhs);
+    }
+
+    @Override
+    public void onTraversalDone(final Integer sum) {
+    }
+}
diff --git a/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/walkers/WalkerTest.java b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/walkers/WalkerTest.java
new file mode 100644
index 0000000..e19f6c8
--- /dev/null
+++ b/public/gatk-engine/src/test/java/org/broadinstitute/gatk/engine/walkers/WalkerTest.java
@@ -0,0 +1,457 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.engine.walkers;
+
+import htsjdk.tribble.Tribble;
+import htsjdk.tribble.index.Index;
+import htsjdk.tribble.index.IndexFactory;
+import htsjdk.variant.bcf2.BCF2Utils;
+import htsjdk.variant.vcf.VCFCodec;
+import org.apache.commons.lang.StringUtils;
+import org.broadinstitute.gatk.engine.CommandLineExecutable;
+import org.broadinstitute.gatk.engine.CommandLineGATK;
+import org.broadinstitute.gatk.engine.crypt.CryptUtils;
+import org.broadinstitute.gatk.engine.phonehome.GATKRunReport;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.MD5DB;
+import org.broadinstitute.gatk.utils.MD5Mismatch;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.classloader.JVMUtils;
+import org.broadinstitute.gatk.utils.collections.Pair;
+import org.broadinstitute.gatk.utils.exceptions.GATKException;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.testng.Assert;
+import org.testng.annotations.AfterSuite;
+import org.testng.annotations.BeforeMethod;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.PrintStream;
+import java.text.SimpleDateFormat;
+import java.util.*;
+
+public class WalkerTest extends BaseTest {
+    public static final String gatkKeyFile = CryptUtils.GATK_USER_KEY_DIRECTORY + "gsamembers_broadinstitute.org.key";
+
+    private static final boolean GENERATE_SHADOW_BCF = true;
+    private static final boolean ENABLE_PHONE_HOME_FOR_TESTS = false;
+    private static final boolean ENABLE_ON_THE_FLY_CHECK_FOR_VCF_INDEX = false;
+    private static final boolean ENABLE_AUTO_INDEX_CREATION_AND_LOCKING_FOR_TESTS = false;
+
+    private static MD5DB md5DB = new MD5DB();
+
+    @BeforeMethod
+    public void initializeWalkerTests() {
+        logger.debug("Initializing walker tests");
+        Utils.resetRandomGenerator();
+    }
+
+    @AfterSuite
+    public void finalizeWalkerTests() {
+        logger.debug("Finalizing walker tests");
+        md5DB.close();
+    }
+
+    public static MD5DB getMd5DB() {
+        return md5DB;
+    }
+
+    public void validateOutputBCFIfPossible(final String name, final File resultFile) {
+        final File bcfFile = BCF2Utils.shadowBCF(resultFile);
+        if ( bcfFile != null && bcfFile.exists() ) {
+            logger.warn("Checking shadow BCF output file " + bcfFile + " against VCF file " + resultFile);
+            try {
+                assertVCFandBCFFilesAreTheSame(resultFile, bcfFile);
+                logger.warn("  Shadow BCF PASSED!");
+            } catch ( Exception e ) {
+                Assert.fail("Exception received reading shadow BCFFile " + bcfFile + " for test " + name, e);
+            }
+        }
+    }
+
+    public void validateOutputIndex(final String name, final File resultFile) {
+        if ( !ENABLE_ON_THE_FLY_CHECK_FOR_VCF_INDEX )
+            return;
+
+        File indexFile = Tribble.indexFile(resultFile);
+        //System.out.println("Putative index file is " + indexFile);
+        if ( indexFile.exists() ) {
+            if ( resultFile.getAbsolutePath().contains(".vcf") ) {
+                // todo -- currently we only understand VCF files! Blow up since we can't test them
+                throw new GATKException("Found an index created for file " + resultFile + " but we can only validate VCF files.  Extend this code!");
+            }
+
+            System.out.println("Verifying on-the-fly index " + indexFile + " for test " + name + " using file " + resultFile);
+            Index indexFromOutputFile = IndexFactory.createDynamicIndex(resultFile, new VCFCodec());
+            Index dynamicIndex = IndexFactory.loadIndex(indexFile.getAbsolutePath());
+
+            if ( ! indexFromOutputFile.equalsIgnoreProperties(dynamicIndex) ) {
+                Assert.fail(String.format("Index on disk from indexing on the fly not equal to the index created after the run completed.  FileIndex %s vs. on-the-fly %s%n",
+                        indexFromOutputFile.getProperties(),
+                        dynamicIndex.getProperties()));
+            }
+        }
+    }
+
+    public List<String> assertMatchingMD5s(final String testName, final String testClassName, List<File> resultFiles, List<String> expectedMD5s) {
+        List<String> md5s = new ArrayList<String>();
+        List<MD5DB.MD5Match> fails = new ArrayList<MD5DB.MD5Match>();
+
+        for (int i = 0; i < resultFiles.size(); i++) {
+            MD5DB.MD5Match result = getMd5DB().testFileMD5(testName, testClassName, resultFiles.get(i), expectedMD5s.get(i), parameterize());
+            validateOutputBCFIfPossible(testName, resultFiles.get(i));
+            if ( ! result.failed ) {
+                validateOutputIndex(testName, resultFiles.get(i));
+                md5s.add(result.expectedMD5);
+            } else {
+                fails.add(result);
+            }
+        }
+
+        if ( ! fails.isEmpty() ) {
+            List<String> actuals = new ArrayList<String>();
+            List<String> expecteds = new ArrayList<String>();
+            List<String> diffEngineOutputs = new ArrayList<String>();
+
+            for ( final MD5DB.MD5Match fail : fails ) {
+                actuals.add(fail.actualMD5);
+                expecteds.add(fail.expectedMD5);
+                diffEngineOutputs.add(fail.diffEngineOutput);
+                logger.warn("Fail: " + fail.failMessage);
+            }
+
+            final MD5Mismatch failure = new MD5Mismatch(actuals, expecteds, diffEngineOutputs);
+            Assert.fail(failure.toString());
+        }
+
+        return md5s;
+    }
+
+    public String buildCommandLine(String... arguments) {
+        String cmdline = "";
+
+        for ( int argIndex = 0; argIndex < arguments.length; argIndex++ ) {
+            cmdline += arguments[argIndex];
+
+            if (argIndex < arguments.length - 1) {
+                cmdline += " ";
+            }
+        }
+
+        return cmdline;
+    }
+
+    public class WalkerTestSpec {
+        // Arguments implicitly included in all Walker command lines, unless explicitly
+        // disabled using the disableImplicitArgs() method below.
+        String args = "";
+        int nOutputFiles = -1;
+        List<String> md5s = null;
+        List<String> exts = null;
+        Class expectedException = null;
+        boolean includeImplicitArgs = true;
+        boolean includeShadowBCF = true;
+
+        // Name of the test class that created this test case
+        private Class testClass;
+
+        // the default output path for the integration test
+        private File outputFileLocation = null;
+
+        protected Map<String, File> auxillaryFiles = new HashMap<String, File>();
+
+        public WalkerTestSpec(String args, List<String> md5s) {
+            this(args, -1, md5s);
+        }
+
+        public WalkerTestSpec(String args, int nOutputFiles, List<String> md5s) {
+            this.args = args;
+            this.nOutputFiles = md5s.size();
+            this.md5s = md5s;
+            this.testClass = getCallingTestClass();
+        }
+
+        public WalkerTestSpec(String args, List<String> exts, List<String> md5s) {
+            this(args, -1, exts, md5s);
+        }
+
+        public WalkerTestSpec(String args, int nOutputFiles, List<String> exts, List<String> md5s) {
+            this.args = args;
+            this.nOutputFiles = md5s.size();
+            this.md5s = md5s;
+            this.exts = exts;
+            this.testClass = getCallingTestClass();
+        }
+
+        // @Test(expectedExceptions) doesn't work in integration tests, so use this instead
+        public WalkerTestSpec(String args, int nOutputFiles, Class expectedException) {
+            this.args = args;
+            this.nOutputFiles = nOutputFiles;
+            this.expectedException = expectedException;
+            this.testClass = getCallingTestClass();
+        }
+
+        private Class getCallingTestClass() {
+            return JVMUtils.getCallingClass(getClass());
+        }
+
+        public String getTestClassName() {
+            return testClass.getSimpleName();
+        }
+
+        public String getArgsWithImplicitArgs() {
+            String args = this.args;
+            if ( includeImplicitArgs ) {
+                args = args + (ENABLE_PHONE_HOME_FOR_TESTS ?
+                        String.format(" -et %s ", GATKRunReport.PhoneHomeOption.AWS) :
+                        String.format(" -et %s -K %s ", GATKRunReport.PhoneHomeOption.NO_ET, gatkKeyFile));
+                if ( includeShadowBCF && GENERATE_SHADOW_BCF )
+                    args = args + " --generateShadowBCF ";
+                if ( ! ENABLE_AUTO_INDEX_CREATION_AND_LOCKING_FOR_TESTS )
+                    args = args + " --disable_auto_index_creation_and_locking_when_reading_rods ";
+            }
+
+            return args;
+        }
+
+        /**
+         * In the case where the input VCF files are malformed and cannot be fixed
+         * this function tells the engine to not try to generate a shadow BCF
+         * which will ultimately blow up...
+         */
+        public void disableShadowBCF() { this.includeShadowBCF = false; }
+        public void setOutputFileLocation(File outputFileLocation) {
+            this.outputFileLocation = outputFileLocation;
+        }        
+
+        protected File getOutputFileLocation() {
+            return outputFileLocation;
+        }
+        
+        public boolean expectsException() {
+            return expectedException != null;
+        }
+
+        public Class getExpectedException() {
+            if ( ! expectsException() ) throw new ReviewedGATKException("Tried to get expection for walker test that doesn't expect one");
+            return expectedException;
+        }
+
+        public void addAuxFile(String expectededMD5sum, File outputfile) {
+            auxillaryFiles.put(expectededMD5sum, outputfile);
+        }
+
+        public void disableImplicitArgs() {
+            includeImplicitArgs = false;
+        }
+    }
+
+    protected boolean parameterize() {
+        return false;
+    }
+
+    public enum ParallelTestType {
+        TREE_REDUCIBLE,
+        NANO_SCHEDULED,
+        BOTH
+    }
+
+    protected Pair<List<File>, List<String>> executeTestParallel(final String name, WalkerTestSpec spec, ParallelTestType testType) {
+        final List<Integer> ntThreads  = testType == ParallelTestType.TREE_REDUCIBLE || testType == ParallelTestType.BOTH ? Arrays.asList(1, 4) : Collections.<Integer>emptyList();
+        final List<Integer> cntThreads = testType == ParallelTestType.NANO_SCHEDULED || testType == ParallelTestType.BOTH ? Arrays.asList(1, 4) : Collections.<Integer>emptyList();
+
+        return executeTest(name, spec, ntThreads, cntThreads);
+    }
+
+    protected Pair<List<File>, List<String>> executeTestParallel(final String name, WalkerTestSpec spec) {
+        return executeTestParallel(name, spec, ParallelTestType.TREE_REDUCIBLE);
+    }
+
+    protected Pair<List<File>, List<String>> executeTest(final String name, WalkerTestSpec spec, List<Integer> ntThreads, List<Integer> cpuThreads) {
+        String originalArgs = spec.args;
+        Pair<List<File>, List<String>> results = null;
+
+        boolean ran1 = false;
+        for ( int nt : ntThreads ) {
+            String extra = nt == 1 ? "" : (" -nt " + nt);
+            ran1 = ran1 || nt == 1;
+            spec.args = originalArgs + extra;
+            results = executeTest(name + "-nt-" + nt, spec);
+        }
+
+        for ( int nct : cpuThreads ) {
+            if ( nct != 1 ) {
+                String extra = " -nct " + nct;
+                spec.args = originalArgs + extra;
+                results = executeTest(name + "-cnt-" + nct, spec);
+            }
+        }
+
+        return results;
+    }
+
+    protected Pair<List<File>, List<String>> executeTest(final String name, WalkerTestSpec spec) {
+        List<File> tmpFiles = new ArrayList<File>();
+        for (int i = 0; i < spec.nOutputFiles; i++) {
+            String ext = spec.exts == null ? ".tmp" : "." + spec.exts.get(i);
+            File fl = createTempFile(String.format("walktest.tmp_param.%d", i), ext);
+
+            // Cleanup any potential shadow BCFs on exit too, if we're generating them
+            if ( spec.includeShadowBCF && GENERATE_SHADOW_BCF ) {
+                final File potentalShadowBCFFile = BCF2Utils.shadowBCF(fl);
+                potentalShadowBCFFile.deleteOnExit();
+                new File(potentalShadowBCFFile.getAbsolutePath() + Tribble.STANDARD_INDEX_EXTENSION).deleteOnExit();
+            }
+
+            tmpFiles.add(fl);
+        }
+
+        final String args = String.format(spec.getArgsWithImplicitArgs(), tmpFiles.toArray());
+        System.out.println(Utils.dupString('-', 80));
+
+        if ( spec.expectsException() ) {
+            // this branch handles the case were we are testing that a walker will fail as expected
+            return executeTest(name, spec.getTestClassName(), spec.getOutputFileLocation(), null, tmpFiles, args, spec.getExpectedException());
+        } else {
+            List<String> md5s = new LinkedList<String>();
+            md5s.addAll(spec.md5s);
+
+            // check to see if they included any auxillary files, if so add them to the list and set them to be deleted on exit
+            for (String md5 : spec.auxillaryFiles.keySet()) {
+                md5s.add(md5);
+                final File auxFile = spec.auxillaryFiles.get(md5);
+                auxFile.deleteOnExit();
+                tmpFiles.add(auxFile);
+            }
+            return executeTest(name, spec.getTestClassName(), spec.getOutputFileLocation(), md5s, tmpFiles, args, null);
+        }
+    }
+
+    private void qcMD5s(String name, List<String> md5s) {
+        final String exampleMD5 = "709a1f482cce68992c637da3cff824a8";
+        for (String md5 : md5s) {
+            if ( md5 == null )
+                throw new IllegalArgumentException("Null MD5 found in test " + name);
+            if ( md5.equals("") ) // ok
+                continue;
+            if ( ! StringUtils.isAlphanumeric(md5) )
+                throw new IllegalArgumentException("MD5 contains non-alphanumeric characters test " + name + " md5=" + md5);
+            if ( md5.length() != exampleMD5.length() )
+                throw new IllegalArgumentException("Non-empty MD5 of unexpected number of characters test " + name + " md5=" + md5);
+        }
+    }
+
+
+    /**
+     * execute the test, given the following:
+     * @param testName     the name of the test
+     * @param testClassName the name of the class that contains the test
+     * @param md5s     the list of md5s
+     * @param tmpFiles the temp file corresponding to the md5 list
+     * @param args     the argument list
+     * @param expectedException the expected exception or null
+     * @return a pair of file and string lists
+     */
+    private Pair<List<File>, List<String>> executeTest(String testName, String testClassName, File outputFileLocation, List<String> md5s, List<File> tmpFiles, String args, Class expectedException) {
+        if ( md5s != null ) qcMD5s(testName, md5s);
+
+        if (outputFileLocation != null)
+            args += " -o " + outputFileLocation.getAbsolutePath();
+        executeTest(testName, testClassName, args, expectedException);
+
+        if ( expectedException != null ) {
+            return null;
+        } else {
+            // we need to check MD5s
+            return new Pair<List<File>, List<String>>(tmpFiles, assertMatchingMD5s(testName, testClassName, tmpFiles, md5s));
+        }
+    }
+    
+    /**
+     * execute the test, given the following:
+     * @param testName      the name of the test
+     * @param testClassName the name of the class that contains the test
+     * @param args          the argument list
+     * @param expectedException the expected exception or null
+     */
+    private void executeTest(String testName, String testClassName, String args, Class expectedException) {
+        CommandLineGATK instance = new CommandLineGATK();
+        String[] command = Utils.escapeExpressions(args);
+        // run the executable
+        boolean gotAnException = false;
+        try {
+            final String now = new SimpleDateFormat("HH:mm:ss").format(new Date());
+            final String cmdline = Utils.join(" ",command);
+            System.out.println(String.format("[%s] Executing test %s:%s with GATK arguments: %s", now, testClassName, testName, cmdline));
+            // also write the command line to the HTML log for convenient follow-up
+            // do the replaceAll so paths become relative to the current
+            BaseTest.log(cmdline.replaceAll(publicTestDirRoot, "").replaceAll(privateTestDirRoot, ""));
+            CommandLineExecutable.start(instance, command);
+        } catch (Exception e) {
+            gotAnException = true;
+            if ( expectedException != null ) {
+                // we expect an exception
+                //System.out.println(String.format("Wanted exception %s, saw %s", expectedException, e.getClass()));
+                if ( expectedException.isInstance(e) ) {
+                    // it's the type we expected
+                    //System.out.println(String.format("  => %s PASSED", name));
+                } else {
+                    final String message = String.format("Test %s:%s expected exception %s but instead got %s with error message %s",
+                            testClassName, testName, expectedException, e.getClass(), e.getMessage());
+                    if ( e.getCause() != null ) {
+                        final ByteArrayOutputStream baos = new ByteArrayOutputStream();
+                        final PrintStream ps = new PrintStream(baos);
+                        e.getCause().printStackTrace(ps);
+                        BaseTest.log(message);
+                        BaseTest.log(baos.toString());
+                    }
+                    Assert.fail(message);
+                }
+            } else {
+                // we didn't expect an exception but we got one :-(
+                throw new RuntimeException(e);
+            }
+        }
+
+        // catch failures from the integration test
+        if ( expectedException != null ) {
+            if ( ! gotAnException )
+                // we expected an exception but didn't see it
+                Assert.fail(String.format("Test %s:%s expected exception %s but none was thrown", testClassName, testName, expectedException.toString()));
+        } else {
+            if ( CommandLineExecutable.result != 0) {
+                throw new RuntimeException("Error running the GATK with arguments: " + args);
+            }
+        }
+    }
+
+
+    protected File createTempFileFromBase(final String name) {
+        File fl = new File(name);
+        fl.deleteOnExit();
+        return fl;
+    }
+}
diff --git a/public/gatk-engine/src/test/resources/exampleBAM.bam b/public/gatk-engine/src/test/resources/exampleBAM.bam
deleted file mode 100644
index 319dd1a..0000000
Binary files a/public/gatk-engine/src/test/resources/exampleBAM.bam and /dev/null differ
diff --git a/public/gatk-engine/src/test/resources/exampleBAM.bam.bai b/public/gatk-engine/src/test/resources/exampleBAM.bam.bai
deleted file mode 100644
index 052ac61..0000000
Binary files a/public/gatk-engine/src/test/resources/exampleBAM.bam.bai and /dev/null differ
diff --git a/public/gatk-engine/src/test/resources/exampleBAM.simple.bai b/public/gatk-engine/src/test/resources/exampleBAM.simple.bai
deleted file mode 100644
index 2d8268b..0000000
Binary files a/public/gatk-engine/src/test/resources/exampleBAM.simple.bai and /dev/null differ
diff --git a/public/gatk-engine/src/test/resources/exampleBAM.simple.bam b/public/gatk-engine/src/test/resources/exampleBAM.simple.bam
deleted file mode 100644
index c3eb7ae..0000000
Binary files a/public/gatk-engine/src/test/resources/exampleBAM.simple.bam and /dev/null differ
diff --git a/public/gatk-engine/src/test/resources/exampleNORG.bam b/public/gatk-engine/src/test/resources/exampleNORG.bam
deleted file mode 100644
index f59219f..0000000
Binary files a/public/gatk-engine/src/test/resources/exampleNORG.bam and /dev/null differ
diff --git a/public/gatk-engine/src/test/resources/exampleNORG.bam.bai b/public/gatk-engine/src/test/resources/exampleNORG.bam.bai
deleted file mode 100644
index 26cfe74..0000000
Binary files a/public/gatk-engine/src/test/resources/exampleNORG.bam.bai and /dev/null differ
diff --git a/public/gatk-queue-extensions-generator/pom.xml b/public/gatk-queue-extensions-generator/pom.xml
index 296ca52..7735d65 100644
--- a/public/gatk-queue-extensions-generator/pom.xml
+++ b/public/gatk-queue-extensions-generator/pom.xml
@@ -5,7 +5,7 @@
     <parent>
         <groupId>org.broadinstitute.gatk</groupId>
         <artifactId>gatk-aggregator</artifactId>
-        <version>3.3</version>
+        <version>3.5</version>
         <relativePath>../..</relativePath>
     </parent>
 
@@ -21,7 +21,7 @@
     <dependencies>
         <dependency>
             <groupId>${project.groupId}</groupId>
-            <artifactId>gatk-tools-public</artifactId>
+            <artifactId>gatk-engine</artifactId>
             <version>${project.version}</version>
         </dependency>
     </dependencies>
diff --git a/public/gatk-queue-extensions-generator/src/main/java/org/broadinstitute/gatk/queue/extensions/gatk/ArgumentDefinitionField.java b/public/gatk-queue-extensions-generator/src/main/java/org/broadinstitute/gatk/queue/extensions/gatk/ArgumentDefinitionField.java
index 1e9e5cc..849d994 100644
--- a/public/gatk-queue-extensions-generator/src/main/java/org/broadinstitute/gatk/queue/extensions/gatk/ArgumentDefinitionField.java
+++ b/public/gatk-queue-extensions-generator/src/main/java/org/broadinstitute/gatk/queue/extensions/gatk/ArgumentDefinitionField.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -507,7 +507,7 @@ public abstract class ArgumentDefinitionField extends ArgumentField {
         @Override protected String getFreezeFields() {
             return String.format(
                     ("if (%2$s != null && !org.broadinstitute.gatk.utils.io.IOUtils.isSpecialFile(%2$s))%n" +
-                            "  if (!org.broadinstitute.gatk.engine.io.stubs.VCFWriterArgumentTypeDescriptor.isCompressed(%2$s.getPath))%n" +
+                            "  if (!org.broadinstitute.gatk.utils.commandline.ArgumentTypeDescriptor.isCompressed(%2$s.getPath))%n" +
                             "    %1$s = new File(%2$s.getPath + \"%3$s\")%n"),
                     auxFieldName, originalFieldName, Tribble.STANDARD_INDEX_EXTENSION);
         }
diff --git a/public/gatk-queue-extensions-generator/src/main/java/org/broadinstitute/gatk/queue/extensions/gatk/ArgumentField.java b/public/gatk-queue-extensions-generator/src/main/java/org/broadinstitute/gatk/queue/extensions/gatk/ArgumentField.java
index 9c93efd..d4e0aea 100644
--- a/public/gatk-queue-extensions-generator/src/main/java/org/broadinstitute/gatk/queue/extensions/gatk/ArgumentField.java
+++ b/public/gatk-queue-extensions-generator/src/main/java/org/broadinstitute/gatk/queue/extensions/gatk/ArgumentField.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-generator/src/main/java/org/broadinstitute/gatk/queue/extensions/gatk/GATKExtensionsGenerator.java b/public/gatk-queue-extensions-generator/src/main/java/org/broadinstitute/gatk/queue/extensions/gatk/GATKExtensionsGenerator.java
index d125e3d..90adfc2 100644
--- a/public/gatk-queue-extensions-generator/src/main/java/org/broadinstitute/gatk/queue/extensions/gatk/GATKExtensionsGenerator.java
+++ b/public/gatk-queue-extensions-generator/src/main/java/org/broadinstitute/gatk/queue/extensions/gatk/GATKExtensionsGenerator.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -161,6 +161,8 @@ public class GATKExtensionsGenerator extends CommandLineProgram {
                                     if (scatterClass != null) {
                                         isScatter = true;
                                         constructor += String.format("scatterClass = classOf[%s]%n", scatterClass);
+                                        final boolean includeUnmapped = getUnmappedInclusion(walkerType);
+                                        constructor += String.format("setupScatterFunction = { case scatter: GATKScatterFunction => scatter.includeUnmapped = %b }%n", includeUnmapped);
                                     }
 
                                     writeClass(GATK_EXTENSIONS_PACKAGE_NAME + "." + clpClassName, walkerName,
@@ -227,6 +229,15 @@ public class GATKExtensionsGenerator extends CommandLineProgram {
     }
 
     /**
+     * Should the scatter function for this walker include unmapped reads?
+     * @param walkerType The walker
+     * @return True if unmapped reads should be processed by this walker
+     */
+    private boolean getUnmappedInclusion(Class<? extends Walker> walkerType) {
+        return walkerType.getAnnotation(PartitionBy.class).includeUnmapped();
+    }
+
+    /**
      * Writes a dynamically generated scala wrapper for a class.
      * @param baseClass The class to extend from.
      * @param className The class name to generate.
diff --git a/public/gatk-queue-extensions-generator/src/main/java/org/broadinstitute/gatk/queue/extensions/gatk/ReadFilterField.java b/public/gatk-queue-extensions-generator/src/main/java/org/broadinstitute/gatk/queue/extensions/gatk/ReadFilterField.java
index b23f1aa..581aef0 100644
--- a/public/gatk-queue-extensions-generator/src/main/java/org/broadinstitute/gatk/queue/extensions/gatk/ReadFilterField.java
+++ b/public/gatk-queue-extensions-generator/src/main/java/org/broadinstitute/gatk/queue/extensions/gatk/ReadFilterField.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/pom.xml b/public/gatk-queue-extensions-public/pom.xml
index 4d46ce1..55c9158 100644
--- a/public/gatk-queue-extensions-public/pom.xml
+++ b/public/gatk-queue-extensions-public/pom.xml
@@ -5,7 +5,7 @@
     <parent>
         <groupId>org.broadinstitute.gatk</groupId>
         <artifactId>gatk-aggregator</artifactId>
-        <version>3.3</version>
+        <version>3.5</version>
         <relativePath>../..</relativePath>
     </parent>
 
@@ -39,6 +39,10 @@
             <groupId>log4j</groupId>
             <artifactId>log4j</artifactId>
         </dependency>
+        <dependency>
+            <groupId>com.github.broadinstitute</groupId>
+            <artifactId>picard</artifactId>
+        </dependency>
         <!--
         Extensions generator dependency only applies to the exec:exec,
         not the artifact, but don't know another way to include
@@ -64,7 +68,7 @@
         -->
         <dependency>
             <groupId>${project.groupId}</groupId>
-            <artifactId>gatk-tools-public</artifactId>
+            <artifactId>gatk-utils</artifactId>
             <version>${project.version}</version>
             <type>test-jar</type>
             <scope>test</scope>
diff --git a/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/CNV/ONLY_GENOTYPE_xhmmCNVpipeline.scala b/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/CNV/ONLY_GENOTYPE_xhmmCNVpipeline.scala
new file mode 100644
index 0000000..c2ff1c4
--- /dev/null
+++ b/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/CNV/ONLY_GENOTYPE_xhmmCNVpipeline.scala
@@ -0,0 +1,103 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.queue.qscripts.CNV
+
+import org.broadinstitute.gatk.queue.extensions.gatk._
+import org.broadinstitute.gatk.queue.QScript
+import org.broadinstitute.gatk.queue.util.VCF_BAM_utilities
+import org.broadinstitute.gatk.queue.extensions.gatk.DoC._
+import org.broadinstitute.gatk.utils.commandline._
+import java.io.{File, PrintStream, PrintWriter}
+import org.broadinstitute.gatk.utils.text.XReadLines
+import collection.JavaConversions._
+import org.broadinstitute.gatk.tools.walkers.coverage.CoverageUtils
+import org.broadinstitute.gatk.queue.function.scattergather.{CloneFunction, ScatterFunction, GatherFunction, ScatterGatherableFunction}
+import org.broadinstitute.gatk.queue.function.{CommandLineFunction, InProcessFunction}
+import org.broadinstitute.gatk.utils.io.IOUtils
+
+class ONLY_GENOTYPE_xhmmCNVpipeline extends QScript {
+  qscript =>
+
+  @Input(doc = "bam input, as as a list of .bam files, or a list of bam files with sample IDs to be used ( as specified at https://www.broadinstitute.org/gatk/gatkdocs/org_broadinstitute_sting_gatk_CommandLineGATK.html#--sample_rename_mapping_file )", shortName = "I", required = true)
+  var bams: File = _
+
+  @Input(doc = "xhmm executable file", shortName = "xhmmExec", required = true)
+  var xhmmExec: File = _
+
+  @Input(shortName = "R", doc = "ref", required = true)
+  var referenceFile: File = _
+
+  @Argument(doc = "Samples to run together for DoC, CNV discovery, and CNV genotyping.  By default is set to 1 [one job per sample].", shortName = "samplesPerJob", required = false)
+  var samplesPerJob = 1
+
+  @Output(doc = "Base name for files to output", shortName = "o", required = true)
+  var outputBase: File = _
+
+  @Argument(shortName = "xhmmParams", doc = "xhmm model parameters file", required = true)
+  var xhmmParamsArg: File = _
+
+  @Argument(shortName = "genotypeParams", doc = "xhmm command-line parameters for genotyping step", required = false)
+  var genotypeCommandLineParams: String = ""
+
+  @Argument(shortName = "addGenotypeRegions", doc = "Additional interval list files to be genotyped", required = false)
+  var addGenotypeRegions: List[File] = List[File]()
+
+  @Argument(shortName = "longJobQueue", doc = "Job queue to run the 'long-running' commands", required = false)
+  var longJobQueue: String = ""
+
+  @Argument(shortName = "filteredZscored", doc = "File of PCA-normalized read depths, after filtering and Z-score calculation", required = true)
+  var filteredZscored: File = _
+
+  @Argument(shortName = "originalSameFiltered", doc = "File of original read depths, using same filters (samples and targets) as Z-score matrix [filteredZscored argument]", required = true)
+  var originalSameFiltered: File = _
+
+
+  trait LongRunTime extends CommandLineFunction {
+    if (longJobQueue != "")
+      this.jobQueue = longJobQueue
+  }
+
+  def script = {
+    val parseMixedInputBamList = parseBamListWithOptionalSampleMappings(bams)
+
+    val processMixedInputBamList = new ProcessBamListWithOptionalSampleMappings(parseMixedInputBamList, outputBase.getPath)
+    add(processMixedInputBamList)
+
+    val samples: List[String] = parseMixedInputBamList.sampleToBams.keys.toList
+    Console.out.printf("Samples are %s%n", samples)
+
+    val groups: List[Group] = buildDoCgroups(samples, parseMixedInputBamList.sampleToBams, samplesPerJob, outputBase)
+    for (group <- groups) {
+      Console.out.printf("Group is %s%n", group)
+    }
+
+    for (regionsFile <- addGenotypeRegions) {
+    	val genotypeRegions = new GenotypeCNVs(filteredZscored, regionsFile, originalSameFiltered, new File(outputBase.getParent + "/" + regionsFile.getName.replace(".interval_list", "") + "." + outputBase.getName), xhmmParamsArg, referenceFile, genotypeCommandLineParams, xhmmExec, groups) with LongRunTime
+	add(genotypeRegions)
+    }
+  }
+
+}
diff --git a/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/CNV/xhmmCNVpipeline.scala b/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/CNV/xhmmCNVpipeline.scala
index 7d25668..fd7698f 100644
--- a/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/CNV/xhmmCNVpipeline.scala
+++ b/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/CNV/xhmmCNVpipeline.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -134,6 +134,9 @@ class xhmmCNVpipeline extends QScript {
   @Argument(shortName = "subsegmentGenotypeThreshold", doc = "If genotypeSubsegments, this is the default genotype quality threshold for the sub-segments", required = false)
   var subsegmentGenotypeThreshold = 20.0
 
+  @Argument(shortName = "addGenotypeRegions", doc = "Additional interval list files to be genotyped", required = false)
+  var addGenotypeRegions: List[File] = List[File]()
+
   @Argument(shortName = "longJobQueue", doc = "Job queue to run the 'long-running' commands", required = false)
   var longJobQueue: String = ""
 
@@ -314,41 +317,7 @@ class xhmmCNVpipeline extends QScript {
     val discover = new DiscoverCNVs(filterZscore.filteredZscored, filterOriginal.sameFiltered)
     add(discover)
 
-
-    abstract class BaseGenotypeCNVs(inputParam: File, xcnv: File, origRDParam: File, outName: String) extends SamplesScatterable(xhmmExec, groups) with LongRunTime {
-      @Input(doc = "")
-      val input = inputParam
-
-      @Input(doc = "")
-      val xhmmParams = xhmmParamsArg
-
-      @Input(doc = "")
-      val origRD = origRDParam
-
-      @Input(doc = "")
-      val inXcnv = xcnv
-
-      @Output
-      @Gather(classOf[MergeVCFsGatherFunction])
-      val vcf: File = new File(outName)
-
-      override def commandLine =
-        xhmmExec + " --genotype" +
-          " -p " + xhmmParams +
-          " -r " + input +
-          " -g " + inXcnv +
-          " -F " + referenceFile +
-          " -R " + origRD +
-          " -v " +  vcf +
-          " " + genotypeCommandLineParams +
-          " " + addCommand
-    }
-
-    class GenotypeCNVs(inputParam: File, xcnv: File, origRDParam: File) extends BaseGenotypeCNVs(inputParam, xcnv, origRDParam, outputBase.getPath + ".vcf") {
-      override def description = "Genotypes discovered CNVs in all samples: " + commandLine
-    }
-
-    class GenotypeCNVandSubsegments(inputParam: File, xcnv: File, origRDParam: File) extends BaseGenotypeCNVs(inputParam, xcnv, origRDParam, outputBase.getPath + ".subsegments.vcf") {
+    class GenotypeCNVandSubsegments(inputParam: File, xcnv: File, origRDParam: File, xhmmParamsArg: File, referenceFile: File, genotypeCommandLineParams: String, xhmmExec: File, groups: List[Group]) extends BaseGenotypeCNVs(inputParam, xcnv, origRDParam, outputBase.getPath + ".subsegments.vcf", xhmmParamsArg, referenceFile, genotypeCommandLineParams, xhmmExec, groups) {
       override def commandLine =
         super.commandLine +
         " --subsegments" +
@@ -358,13 +327,19 @@ class xhmmCNVpipeline extends QScript {
       override def description = "Genotypes discovered CNVs (and their sub-segments, of up to " + maxTargetsInSubsegment + " targets) in all samples: " + commandLine
     }
 
-    val genotype = new GenotypeCNVs(filterZscore.filteredZscored, discover.xcnv, filterOriginal.sameFiltered)
+    val genotype = new GenotypeCNVs(filterZscore.filteredZscored, discover.xcnv, filterOriginal.sameFiltered, outputBase, xhmmParamsArg, referenceFile, genotypeCommandLineParams, xhmmExec, groups) with LongRunTime
     add(genotype)
 
     if (genotypeSubsegments) {
-      val genotypeSegs = new GenotypeCNVandSubsegments(filterZscore.filteredZscored, discover.xcnv, filterOriginal.sameFiltered)
+      val genotypeSegs = new GenotypeCNVandSubsegments(filterZscore.filteredZscored, discover.xcnv, filterOriginal.sameFiltered, xhmmParamsArg, referenceFile, genotypeCommandLineParams, xhmmExec, groups) with LongRunTime
       add(genotypeSegs)
     }
+
+    addGenotypeRegions :+= prepTargets.out
+    for (regionsFile <- addGenotypeRegions) {
+    	val genotypeRegions = new GenotypeCNVs(filterZscore.filteredZscored, regionsFile, filterOriginal.sameFiltered, new File(outputBase.getParent + "/" + regionsFile.getName.replace(".interval_list", "") + "." + outputBase.getName), xhmmParamsArg, referenceFile, genotypeCommandLineParams, xhmmExec, groups) with LongRunTime
+	add(genotypeRegions)
+    }
   }
 
   class ExcludeTargetsBasedOnValue(locus_valueIn : File, outSuffix : String, minVal : Double, maxVal : Double) extends InProcessFunction {
@@ -531,85 +506,3 @@ class xhmmCNVpipeline extends QScript {
     override def description = "Filters original read-depth data to be the same as filtered, normalized data: " + command
   }
 }
-
-
-abstract class SamplesScatterable(val xhmmExec: File, val groups: List[Group]) extends ScatterGatherableFunction with CommandLineFunction {
-  this.scatterCount = groups.size
-  this.scatterClass = classOf[SamplesScatterFunction]
-
-  @Input(doc = "", required=false)
-  var keepSampleIDs: Option[String] = None
-
-  def addCommand = if (keepSampleIDs.isDefined) ("--keepSampleIDs " + keepSampleIDs.get) else ""
-}
-
-class SamplesScatterFunction extends ScatterFunction with InProcessFunction {
-  protected var groups: List[Group] = _
-  override def scatterCount = groups.size
-
-  @Output(doc="Scatter function outputs")
-  var scatterSamples: Seq[File] = Nil
-
-  override def init() {
-    this.groups = this.originalFunction.asInstanceOf[SamplesScatterable].groups
-  }
-
-  override def bindCloneInputs(cloneFunction: CloneFunction, index: Int) {
-    val scatterPart = IOUtils.absolute(cloneFunction.commandDirectory, "keepSampleIDs.txt")
-    cloneFunction.setFieldValue("keepSampleIDs", Some(scatterPart))
-    this.scatterSamples :+= scatterPart
-  }
-
-  override def run() {
-    if (groups.size != this.scatterSamples.size)
-      throw new Exception("Internal inconsistency error in scattering jobs")
-
-    (groups, this.scatterSamples).zipped foreach {
-      (group, sampsFile) => {
-        val sampsWriter = new PrintWriter(new PrintStream(sampsFile))
-
-        for (samp <- group.samples) {
-          try {
-            sampsWriter.printf("%s%n", samp)
-          }
-          catch {
-            case e: Exception => throw e
-          }
-        }
-        sampsWriter.close
-      }
-    }
-  }
-}
-
-trait MergeVCFs extends CommandLineFunction {
-  var xhmmExec: File = _
-
-  @Input(doc = "")
-  var inputVCFs: List[File] = Nil
-
-  @Output
-  var mergedVCF: File = null
-
-  override def commandLine =
-    xhmmExec + " --mergeVCFs" +
-      inputVCFs.map(input => " --mergeVCF " + input).reduceLeft(_ + "" + _) +
-      " -v " + mergedVCF
-
-  override def description = "Combines VCF outputs for multiple samples (at same loci): " + commandLine
-}
-
-class MergeVCFsGatherFunction extends MergeVCFs with GatherFunction {
-  override def freezeFieldValues() {
-    super.freezeFieldValues()
-
-    this.xhmmExec = originalFunction.asInstanceOf[SamplesScatterable].xhmmExec
-
-    this.inputVCFs = this.gatherParts.toList
-    this.mergedVCF = this.originalOutput
-  }
-}
-
-class DummyGatherFunction extends InProcessFunction with GatherFunction {
-  override def run() {}
-}
diff --git a/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/GATKResourcesBundle.scala b/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/GATKResourcesBundle.scala
index d89e605..ef6baff 100644
--- a/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/GATKResourcesBundle.scala
+++ b/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/GATKResourcesBundle.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -167,6 +167,9 @@ class GATKResourcesBundle extends QScript {
     addResource(new Resource("/humgen/gsa-hpprojects/GATK/data/Comparisons/Unvalidated/GoldStandardIndel/gold.standard.indel.MillsAnd1000G.b37.vcf",
       "Mills_and_1000G_gold_standard.indels", b37, true, false))
 
+    addResource(new Resource("/humgen/gsa-hpprojects/GATK/data/Comparisons/Validated/Affymetrix_Axiom/Axiom_Exome_Plus.sites_only.all_populations.poly.vcf",
+      "Axiom_Exome_Plus.sites_only.all_populations.poly", b37, true, false))
+
     //
     // CEU trio (NA12878,NA12891,NA12892) best practices results
     //
diff --git a/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/examples/ExampleCountLoci.scala b/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/examples/ExampleCountLoci.scala
index e988a42..0900b36 100644
--- a/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/examples/ExampleCountLoci.scala
+++ b/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/examples/ExampleCountLoci.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/examples/ExampleCountReads.scala b/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/examples/ExampleCountReads.scala
index 55711c0..65da438 100644
--- a/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/examples/ExampleCountReads.scala
+++ b/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/examples/ExampleCountReads.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/examples/ExampleCustomWalker.scala b/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/examples/ExampleCustomWalker.scala
index e38ba3e..9826494 100644
--- a/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/examples/ExampleCustomWalker.scala
+++ b/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/examples/ExampleCustomWalker.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/examples/ExamplePrintReads.scala b/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/examples/ExamplePrintReads.scala
index 440b6f2..8d300f9 100644
--- a/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/examples/ExamplePrintReads.scala
+++ b/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/examples/ExamplePrintReads.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -41,6 +41,9 @@ class ExamplePrintReads extends QScript {
   @Output(doc="Bam output", shortName="out")
   var outFile: File = _
 
+  @Argument(doc="One or more genomic intervals over which to operate", shortName="L", required=false)
+  var intervals: Seq[String] = Nil
+
   def script() {
     val printReads = new PrintReads
     printReads.reference_sequence = referenceFile
@@ -48,6 +51,7 @@ class ExamplePrintReads extends QScript {
     printReads.scatterCount = 3
     printReads.input_file :+= bamFile
     printReads.out = outFile
+    printReads.intervalsString = intervals
     add(printReads)
   }
 }
diff --git a/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/examples/ExampleReadFilter.scala b/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/examples/ExampleReadFilter.scala
index f736406..20798a3 100644
--- a/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/examples/ExampleReadFilter.scala
+++ b/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/examples/ExampleReadFilter.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/examples/ExampleRetryMemoryLimit.scala b/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/examples/ExampleRetryMemoryLimit.scala
index 71e0094..34a2556 100644
--- a/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/examples/ExampleRetryMemoryLimit.scala
+++ b/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/examples/ExampleRetryMemoryLimit.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/examples/HelloWorld.scala b/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/examples/HelloWorld.scala
index c095169..7564da9 100644
--- a/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/examples/HelloWorld.scala
+++ b/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/examples/HelloWorld.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/lib/ChunkVCF.scala b/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/lib/ChunkVCF.scala
index ab687c0..2cf5b76 100644
--- a/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/lib/ChunkVCF.scala
+++ b/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/lib/ChunkVCF.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/lib/Vcf2Table.scala b/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/lib/Vcf2Table.scala
index eb57d2e..aaa071d 100755
--- a/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/lib/Vcf2Table.scala
+++ b/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/lib/Vcf2Table.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/lib/VcfToPed.scala b/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/lib/VcfToPed.scala
index 962d495..85bb70f 100644
--- a/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/lib/VcfToPed.scala
+++ b/public/gatk-queue-extensions-public/src/main/qscripts/org/broadinstitute/gatk/queue/qscripts/lib/VcfToPed.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/cancer/MuTect.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/cancer/MuTect.scala
index 36031d9..d8ade79 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/cancer/MuTect.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/cancer/MuTect.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -32,6 +32,7 @@ import org.broadinstitute.gatk.utils.commandline.Input
 import org.broadinstitute.gatk.utils.commandline.Output
 import org.broadinstitute.gatk.queue.function.scattergather.ScatterGatherableFunction
 import org.broadinstitute.gatk.queue.extensions.gatk.{TaggedFile, VcfGatherFunction, LocusScatterFunction}
+import org.broadinstitute.gatk.utils.commandline.ArgumentTypeDescriptor.isCompressed
 
 class MuTect extends org.broadinstitute.gatk.queue.extensions.gatk.CommandLineGATK with ScatterGatherableFunction {
   analysisName = "MuTect"
@@ -409,7 +410,7 @@ class MuTect extends org.broadinstitute.gatk.queue.extensions.gatk.CommandLineGA
   override def freezeFieldValues() {
     super.freezeFieldValues()
     if (vcf != null && !org.broadinstitute.gatk.utils.io.IOUtils.isSpecialFile(vcf))
-      if (!org.broadinstitute.gatk.engine.io.stubs.VCFWriterArgumentTypeDescriptor.isCompressed(vcf.getPath))
+      if (!org.broadinstitute.gatk.utils.commandline.ArgumentTypeDescriptor.isCompressed(vcf.getPath))
         vcfIndex = new File(vcf.getPath + ".idx")
     dbsnpIndexes ++= dbsnp.filter(orig => orig != null).map(orig => new File(orig.getPath + ".idx"))
     cosmicIndexes ++= cosmic.filter(orig => orig != null).map(orig => new File(orig.getPath + ".idx"))
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/BamGatherFunction.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/BamGatherFunction.scala
index 5388510..e502bb7 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/BamGatherFunction.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/BamGatherFunction.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/CatVariantsGatherer.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/CatVariantsGatherer.scala
index fd91e53..e111e19 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/CatVariantsGatherer.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/CatVariantsGatherer.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/ContigScatterFunction.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/ContigScatterFunction.scala
index 7eff645..d7d79bc 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/ContigScatterFunction.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/ContigScatterFunction.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -33,8 +33,6 @@ import org.broadinstitute.gatk.queue.function.InProcessFunction
  * Splits intervals by contig instead of evenly.
  */
 class ContigScatterFunction extends GATKScatterFunction with InProcessFunction {
-  // Include unmapped reads by default.
-  this.includeUnmapped = true
 
   override def scatterCount = if (intervalFilesExist) super.scatterCount min this.maxIntervals else super.scatterCount
 
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/DistributedScatterFunction.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/DistributedScatterFunction.scala
index f4ad993..49d51ef 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/DistributedScatterFunction.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/DistributedScatterFunction.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/DoC/package.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/DoC/package.scala
index fd54be6..4df0ac7 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/DoC/package.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/DoC/package.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -27,7 +27,6 @@ package org.broadinstitute.gatk.queue.extensions.gatk
 
 import java.io.{PrintStream, PrintWriter, File}
 import org.broadinstitute.gatk.queue.function.scattergather.ScatterGatherableFunction
-import org.broadinstitute.gatk.engine.downsampling.DownsampleType
 import org.broadinstitute.gatk.utils.commandline.{Input, Gather, Output}
 import org.broadinstitute.gatk.queue.function.{InProcessFunction, CommandLineFunction}
 import org.broadinstitute.gatk.tools.walkers.coverage.CoverageUtils
@@ -35,11 +34,13 @@ import scala.collection.JavaConversions._
 import scala.Some
 import org.broadinstitute.gatk.utils.text.XReadLines
 import org.broadinstitute.gatk.queue.util.VCF_BAM_utilities
+import org.broadinstitute.gatk.utils.downsampling.DownsampleType
 
 // Minimal refactor from a package object to a file full of classes/objects
 // due to ongoing bugs with inner classes/objects in package objects:
 //   https://issues.scala-lang.org/browse/SI-4344
 //   https://issues.scala-lang.org/browse/SI-5954
+
   class DoC(val bams: List[File], val DoC_output: File, val countType: CoverageUtils.CountPileupType, val MAX_DEPTH: Int, val minMappingQuality: Int, val minBaseQuality: Int, val scatterCountInput: Int, val START_BIN: Int, val NUM_BINS: Int, val minCoverageCalcs: Seq[Int], val sampleRenameMappingFile: Option[File] = None) extends CommandLineGATK with ScatterGatherableFunction {
     val DOC_OUTPUT_SUFFIX: String = ".sample_interval_summary"
 
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/GATKIntervals.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/GATKIntervals.scala
index 0e568b6..72e7395 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/GATKIntervals.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/GATKIntervals.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -59,7 +59,7 @@ case class GATKIntervals(reference: File, intervals: Seq[File], intervalsString:
       this.excludeIntervalsString.map(GATKIntervals.createBinding(_, "excludeIntervalsString"))
 
     IntervalUtils.parseIntervalBindings(
-      referenceDataSource,
+      referenceDataSource.getReference,
       includeIntervalBindings,
       intervalSetRule, intervalMergingRule, intervalPadding.getOrElse(0),
       excludeIntervalBindings).toList
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/GATKScatterFunction.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/GATKScatterFunction.scala
index 01075c3..afe4655 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/GATKScatterFunction.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/GATKScatterFunction.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -48,7 +48,7 @@ trait GATKScatterFunction extends ScatterFunction {
   protected var originalGATK: CommandLineGATK = _
 
   /** Whether the last scatter job should also include any unmapped reads. */
-  protected var includeUnmapped: Boolean = _
+  var includeUnmapped: Boolean = _
 
   override def init() {
     this.originalGATK = this.originalFunction.asInstanceOf[CommandLineGATK]
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/IntervalScatterFunction.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/IntervalScatterFunction.scala
index 99454d4..8fd7622 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/IntervalScatterFunction.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/IntervalScatterFunction.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/LocusScatterFunction.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/LocusScatterFunction.scala
index 5d71d35..7983f56 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/LocusScatterFunction.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/LocusScatterFunction.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/ReadScatterFunction.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/ReadScatterFunction.scala
index 01e9eed..4a7deea 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/ReadScatterFunction.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/ReadScatterFunction.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/TaggedFile.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/TaggedFile.scala
index e161209..967a00d 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/TaggedFile.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/TaggedFile.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/VcfGatherFunction.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/VcfGatherFunction.scala
index 68664c3..6ba815b 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/VcfGatherFunction.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/VcfGatherFunction.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/WriteFlankingIntervalsFunction.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/WriteFlankingIntervalsFunction.scala
index 290eff9..0f89fe3 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/WriteFlankingIntervalsFunction.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/WriteFlankingIntervalsFunction.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/XHMM/package.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/XHMM/package.scala
new file mode 100644
index 0000000..153cf4b
--- /dev/null
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/gatk/XHMM/package.scala
@@ -0,0 +1,156 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.queue.extensions.gatk
+
+import org.broadinstitute.gatk.queue.extensions.gatk._
+import org.broadinstitute.gatk.queue.QScript
+import org.broadinstitute.gatk.queue.extensions.gatk.DoC._
+import org.broadinstitute.gatk.utils.commandline._
+import java.io.{File, PrintStream, PrintWriter}
+import collection.JavaConversions._
+import org.broadinstitute.gatk.queue.function.scattergather.{CloneFunction, ScatterFunction, GatherFunction, ScatterGatherableFunction}
+import org.broadinstitute.gatk.queue.function.{CommandLineFunction, InProcessFunction}
+import org.broadinstitute.gatk.utils.io.IOUtils
+
+// Minimal refactor from a package object to a file full of classes/objects
+// due to ongoing bugs with inner classes/objects in package objects:
+//   https://issues.scala-lang.org/browse/SI-4344
+//   https://issues.scala-lang.org/browse/SI-5954
+
+    abstract class BaseGenotypeCNVs(inputParam: File, xcnv: File, origRDParam: File, outName: String, xhmmParamsArg: File, referenceFile: File, genotypeCommandLineParams: String, xhmmExec: File, groups: List[Group]) extends SamplesScatterable(xhmmExec, groups) {
+      @Input(doc = "")
+      val input = inputParam
+
+      @Input(doc = "")
+      val xhmmParams = xhmmParamsArg
+
+      @Input(doc = "")
+      val origRD = origRDParam
+
+      @Input(doc = "")
+      val inXcnv = xcnv
+
+      @Output
+      @Gather(classOf[MergeVCFsGatherFunction])
+      val vcf: File = new File(outName)
+
+      override def commandLine =
+        xhmmExec + " --genotype" +
+          " -p " + xhmmParams +
+          " -r " + input +
+          " -g " + inXcnv +
+          " -F " + referenceFile +
+          " -R " + origRD +
+          " -v " +  vcf +
+          " " + genotypeCommandLineParams +
+          " " + addCommand
+    }
+
+    class GenotypeCNVs(inputParam: File, xcnv: File, origRDParam: File, genotypeOutputBase: File, xhmmParamsArg: File, referenceFile: File, genotypeCommandLineParams: String, xhmmExec: File, groups: List[Group]) extends BaseGenotypeCNVs(inputParam, xcnv, origRDParam, genotypeOutputBase.getPath + ".vcf", xhmmParamsArg, referenceFile, genotypeCommandLineParams, xhmmExec, groups) {
+      override def description = "Genotypes CNV regions in all samples: " + commandLine
+    }
+
+
+abstract class SamplesScatterable(val xhmmExec: File, val groups: List[Group]) extends ScatterGatherableFunction with CommandLineFunction {
+  this.scatterCount = groups.size
+  this.scatterClass = classOf[SamplesScatterFunction]
+
+  @Input(doc = "", required=false)
+  var keepSampleIDs: Option[String] = None
+
+  def addCommand = if (keepSampleIDs.isDefined) ("--keepSampleIDs " + keepSampleIDs.get) else ""
+}
+
+class SamplesScatterFunction extends ScatterFunction with InProcessFunction {
+  protected var groups: List[Group] = _
+  override def scatterCount = groups.size
+
+  @Output(doc="Scatter function outputs")
+  var scatterSamples: Seq[File] = Nil
+
+  override def init() {
+    this.groups = this.originalFunction.asInstanceOf[SamplesScatterable].groups
+  }
+
+  override def bindCloneInputs(cloneFunction: CloneFunction, index: Int) {
+    val scatterPart = IOUtils.absolute(cloneFunction.commandDirectory, "keepSampleIDs.txt")
+    cloneFunction.setFieldValue("keepSampleIDs", Some(scatterPart))
+    this.scatterSamples :+= scatterPart
+  }
+
+  override def run() {
+    if (groups.size != this.scatterSamples.size)
+      throw new Exception("Internal inconsistency error in scattering jobs")
+
+    (groups, this.scatterSamples).zipped foreach {
+      (group, sampsFile) => {
+        val sampsWriter = new PrintWriter(new PrintStream(sampsFile))
+
+        for (samp <- group.samples) {
+          try {
+            sampsWriter.printf("%s%n", samp)
+          }
+          catch {
+            case e: Exception => throw e
+          }
+        }
+        sampsWriter.close
+      }
+    }
+  }
+}
+
+trait MergeVCFs extends CommandLineFunction {
+  var xhmmExec: File = _
+
+  @Input(doc = "")
+  var inputVCFs: List[File] = Nil
+
+  @Output
+  var mergedVCF: File = null
+
+  override def commandLine =
+    xhmmExec + " --mergeVCFs" +
+      inputVCFs.map(input => " --mergeVCF " + input).reduceLeft(_ + "" + _) +
+      " -v " + mergedVCF
+
+  override def description = "Combines VCF outputs for multiple samples (at same loci): " + commandLine
+}
+
+class MergeVCFsGatherFunction extends MergeVCFs with GatherFunction {
+  override def freezeFieldValues() {
+    super.freezeFieldValues()
+
+    this.xhmmExec = originalFunction.asInstanceOf[SamplesScatterable].xhmmExec
+
+    this.inputVCFs = this.gatherParts.toList
+    this.mergedVCF = this.originalOutput
+  }
+}
+
+class DummyGatherFunction extends InProcessFunction with GatherFunction {
+  override def run() {}
+}
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/AddOrReplaceReadGroups.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/AddOrReplaceReadGroups.scala
index 6b70d2f..e02863f 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/AddOrReplaceReadGroups.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/AddOrReplaceReadGroups.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/CalculateHsMetrics.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/CalculateHsMetrics.scala
index 68c4ca7..fb6e4d5 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/CalculateHsMetrics.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/CalculateHsMetrics.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -58,11 +58,15 @@ class CalculateHsMetrics extends org.broadinstitute.gatk.queue.function.JavaComm
   @Argument(doc="The level(s) at which to accumulate metrics. Possible values: {ALL_READS, SAMPLE, LIBRARY, READ_GROUP} This option may be specified 0 or more times.", shortName = "level", fullName = "metric_accumulation_level", required = false)
   var level: Seq[picard.analysis.MetricAccumulationLevel] = Seq(MetricAccumulationLevel.SAMPLE)
 
+  @Argument(doc="Optional file to output per-target coverage", shortName = "coverage", fullName = "per_target_coverage", required = false)
+  var perTargetCoverage: File = _
+
   override def inputBams = input
   override def outputFile = output
   override def commandLine = super.commandLine +
     required("BAIT_INTERVALS=" + baits) +
     required("TARGET_INTERVALS=" + targets) +
     required("REFERENCE_SEQUENCE=" + reference) +
-    repeat("METRIC_ACCUMULATION_LEVEL=", level, spaceSeparated=false, escape=true, format="%s")
+    repeat("METRIC_ACCUMULATION_LEVEL=", level, spaceSeparated=false, escape=true, format="%s") +
+    optional("PER_TARGET_COVERAGE=", perTargetCoverage, spaceSeparated = false)
 }
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/CollectGcBiasMetrics.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/CollectGcBiasMetrics.scala
index f0d2300..f36d14a 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/CollectGcBiasMetrics.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/CollectGcBiasMetrics.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/CollectMultipleMetrics.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/CollectMultipleMetrics.scala
index 1507366..022179b 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/CollectMultipleMetrics.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/CollectMultipleMetrics.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/CollectWgsMetrics.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/CollectWgsMetrics.scala
new file mode 100644
index 0000000..923ce00
--- /dev/null
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/CollectWgsMetrics.scala
@@ -0,0 +1,70 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.queue.extensions.picard
+
+import org.broadinstitute.gatk.utils.commandline.{Argument, Output, Input}
+import java.io.File
+
+class CollectWgsMetrics extends org.broadinstitute.gatk.queue.function.JavaCommandLineFunction with PicardMetricsFunction {
+  analysisName = "CollectWgsMetrics"
+  javaMainClass = "picard.analysis.CollectWgsMetrics"
+
+  @Input(doc = "The input SAM or BAM files to analyze", shortName = "i", fullName = "input_bam_files", required = true)
+  var input: Seq[File] = Nil
+
+  @Output(doc = "The output file to write statistics to", shortName = "o", fullName = "output_file", required = true)
+  var output: File = _
+
+  @Argument(doc = "Reference file", shortName = "r", fullName = "reference", required = true)
+  var reference: File = _
+
+  @Argument(doc = "Minimum mapping quality for a read to contribute coverage.", shortName = "mq", fullName = "minimum_mapping_quality", required = false)
+  var mq: Integer = _
+
+  @Argument(doc = "Minimum base quality for a base to contribute coverage.", shortName = "q", fullName = "minimum_base_quality", required = false)
+  var q: Integer = _
+
+  @Argument(doc = "Treat bases with coverage exceeding this value as if they had coverage at this value.", shortName = "cap", fullName = "coverage_cap", required = false)
+  var cap: Integer = _
+
+  @Argument(doc = "For debugging purposes, stop after processing this many genomic bases.", fullName = "stop_after", required = false)
+  var stopAfter: Long = _
+
+  @Argument(doc = "Determines whether to include the base quality histogram in the metrics file.", fullName = "include_bq_histogram", required = false)
+  var includeBQHistogram: Boolean = _
+
+  override def inputBams = input
+
+  override def outputFile = output
+
+  override def commandLine = super.commandLine +
+    required("REFERENCE_SEQUENCE=" + reference) +
+    optional("MQ=", mq, spaceSeparated = false) +
+    optional("Q=", q, spaceSeparated = false) +
+    optional("CAP=", cap, spaceSeparated = false) +
+    optional("STOP_AFTER=", stopAfter, spaceSeparated = false) +
+    optional("INCLUDE_BQ_HISTOGRAM=", includeBQHistogram, spaceSeparated = false)
+}
\ No newline at end of file
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/FastqToSam.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/FastqToSam.scala
index 44e10af..24b88b1 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/FastqToSam.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/FastqToSam.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/MarkDuplicates.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/MarkDuplicates.scala
index 66460b6..94b88c8 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/MarkDuplicates.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/MarkDuplicates.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -37,7 +37,7 @@ import java.io.File
  */
 class MarkDuplicates extends org.broadinstitute.gatk.queue.function.JavaCommandLineFunction with PicardBamFunction {
   analysisName = "MarkDuplicates"
-  javaMainClass = "picard.sam.MarkDuplicates"
+  javaMainClass = "picard.sam.markduplicates.MarkDuplicates"
 
   @Input(doc="The input SAM or BAM files to analyze.  Must be coordinate sorted.", shortName = "input", fullName = "input_bam_files", required = true)
   var input: Seq[File] = Nil
@@ -55,7 +55,7 @@ class MarkDuplicates extends org.broadinstitute.gatk.queue.function.JavaCommandL
   var REMOVE_DUPLICATES: Boolean = false
 
   @Argument(doc = "Maximum number of file handles to keep open when spilling read ends to disk.  Set this number a little lower than the per-process maximum number of file that may be open.  This number can be found by executing the 'ulimit -n' command on a Unix system.", shortName = "max_file_handles", fullName ="max_file_handles_for_read_ends_maps", required=false)
-  var MAX_FILE_HANDLES_FOR_READ_ENDS_MAP: Int = -1;
+  var MAX_FILE_HANDLES_FOR_READ_ENDS_MAP: Int = -1
 
   @Argument(doc = "This number, plus the maximum RAM available to the JVM, determine the memory footprint used by some of the sorting collections.  If you are running out of memory, try reducing this number.", shortName = "sorting_ratio", fullName = "sorting_collection_size_ratio", required = false)
   var SORTING_COLLECTION_SIZE_RATIO: Double = -1
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/MergeSamFiles.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/MergeSamFiles.scala
index 1b25a8e..e0d07b2 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/MergeSamFiles.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/MergeSamFiles.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/PicardBamFunction.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/PicardBamFunction.scala
index 80a60c5..2be93f0 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/PicardBamFunction.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/PicardBamFunction.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/PicardMetricsFunction.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/PicardMetricsFunction.scala
index b9885a9..2b0ef1a 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/PicardMetricsFunction.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/PicardMetricsFunction.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/ReorderSam.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/ReorderSam.scala
index 1813694..482a6b5 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/ReorderSam.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/ReorderSam.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/RevertSam.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/RevertSam.scala
index 2012b54..d79ed96 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/RevertSam.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/RevertSam.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/SamToFastq.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/SamToFastq.scala
index e5624fc..7a65d5e 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/SamToFastq.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/SamToFastq.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/SortSam.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/SortSam.scala
index 847ed92..663186e 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/SortSam.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/SortSam.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/ValidateSamFile.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/ValidateSamFile.scala
index e9ad097..11f5135 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/ValidateSamFile.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/picard/ValidateSamFile.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/samtools/SamtoolsCommandLineFunction.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/samtools/SamtoolsCommandLineFunction.scala
index a7e6030..54400a0 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/samtools/SamtoolsCommandLineFunction.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/samtools/SamtoolsCommandLineFunction.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/samtools/SamtoolsIndexFunction.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/samtools/SamtoolsIndexFunction.scala
index cb55c06..86a5a12 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/samtools/SamtoolsIndexFunction.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/samtools/SamtoolsIndexFunction.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/samtools/SamtoolsMergeFunction.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/samtools/SamtoolsMergeFunction.scala
index 4d7c0c6..a3a60b6 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/samtools/SamtoolsMergeFunction.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/samtools/SamtoolsMergeFunction.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/snpeff/SnpEff.scala b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/snpeff/SnpEff.scala
index ae316bc..fb8b8b8 100644
--- a/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/snpeff/SnpEff.scala
+++ b/public/gatk-queue-extensions-public/src/main/scala/org/broadinstitute/gatk/queue/extensions/snpeff/SnpEff.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/extensions/gatk/GATKIntervalsUnitTest.scala b/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/extensions/gatk/GATKIntervalsUnitTest.scala
index f2ec8a9..bc7f85c 100644
--- a/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/extensions/gatk/GATKIntervalsUnitTest.scala
+++ b/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/extensions/gatk/GATKIntervalsUnitTest.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/extensions/gatk/QueueFeaturesQueueTest.scala b/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/extensions/gatk/QueueFeaturesQueueTest.scala
new file mode 100644
index 0000000..0103822
--- /dev/null
+++ b/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/extensions/gatk/QueueFeaturesQueueTest.scala
@@ -0,0 +1,63 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.queue.extensions.gatk
+
+import org.broadinstitute.gatk.queue.pipeline.{QueueTest, QueueTestSpec}
+import org.broadinstitute.gatk.utils.BaseTest
+import org.testng.annotations.Test
+
+class QueueFeaturesQueueTest {
+
+  @Test(timeOut=36000000)
+  def testIncludeUnmapped(): Unit = {
+
+    //First case: When no intervals are specified, unmapped reads should be included
+    val testOut = "withunmapped.bam"
+    val spec = new QueueTestSpec
+    spec.name = "includeUnmapped"
+    spec.args = Array(
+      " -S " + QueueTest.publicQScriptsPackageDir + "examples/ExamplePrintReads.scala",
+      " -R " + BaseTest.publicTestDir + "exampleFASTA.fasta",
+      " -I " + BaseTest.publicTestDir + "exampleBAM_with_unmapped.bam",
+      " -out " + testOut).mkString
+    spec.fileMD5s += testOut -> "3134a6c732d7f235373095586bc7d470"
+    QueueTest.executeTest(spec)
+
+    //Second case: When intervals are explicitly provided, unmapped reads should not be included
+    val testOut2 = "withoutunmapped.bam"
+    val spec2 = new QueueTestSpec
+    spec2.name = "excludeUnmapped"
+    spec2.args = Array(
+      " -S " + QueueTest.publicQScriptsPackageDir + "examples/ExamplePrintReads.scala",
+      " -R " + BaseTest.publicTestDir + "exampleFASTA.fasta",
+      " -I " + BaseTest.publicTestDir + "exampleBAM_with_unmapped.bam",
+      " -L chr1",
+      " -out " + testOut2).mkString
+    spec2.fileMD5s += testOut2 -> "aa33e589879c4baf6a470d22da76d885"
+    QueueTest.executeTest(spec2)
+  }
+
+}
diff --git a/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/pipeline/examples/ExampleCountLociQueueTest.scala b/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/pipeline/examples/ExampleCountLociQueueTest.scala
index f6cc746..5e34740 100644
--- a/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/pipeline/examples/ExampleCountLociQueueTest.scala
+++ b/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/pipeline/examples/ExampleCountLociQueueTest.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/pipeline/examples/ExampleCountReadsQueueTest.scala b/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/pipeline/examples/ExampleCountReadsQueueTest.scala
index e79ea8a..189a88c 100644
--- a/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/pipeline/examples/ExampleCountReadsQueueTest.scala
+++ b/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/pipeline/examples/ExampleCountReadsQueueTest.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/pipeline/examples/ExamplePrintReadsQueueTest.scala b/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/pipeline/examples/ExamplePrintReadsQueueTest.scala
index fcaf0d7..4829db7 100644
--- a/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/pipeline/examples/ExamplePrintReadsQueueTest.scala
+++ b/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/pipeline/examples/ExamplePrintReadsQueueTest.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/pipeline/examples/ExampleReadFilterQueueTest.scala b/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/pipeline/examples/ExampleReadFilterQueueTest.scala
index af70174..39b74dc 100644
--- a/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/pipeline/examples/ExampleReadFilterQueueTest.scala
+++ b/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/pipeline/examples/ExampleReadFilterQueueTest.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/pipeline/examples/ExampleRetryMemoryLimitQueueTest.scala b/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/pipeline/examples/ExampleRetryMemoryLimitQueueTest.scala
index dddccaa..46f630b 100644
--- a/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/pipeline/examples/ExampleRetryMemoryLimitQueueTest.scala
+++ b/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/pipeline/examples/ExampleRetryMemoryLimitQueueTest.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/pipeline/examples/HelloWorldQueueTest.scala b/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/pipeline/examples/HelloWorldQueueTest.scala
index 093050a..1e51d75 100644
--- a/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/pipeline/examples/HelloWorldQueueTest.scala
+++ b/public/gatk-queue-extensions-public/src/test/scala/org/broadinstitute/gatk/queue/pipeline/examples/HelloWorldQueueTest.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -68,7 +68,7 @@ class HelloWorldQueueTest {
     QueueTest.executeTest(spec)
   }
 
-  @Test(timeOut=36000000)
+  @Test(enabled=false, timeOut=36000000)
   def testHelloWorldWithLsfResource() {
     val spec = new QueueTestSpec
     spec.name = "HelloWorldWithLsfResource"
@@ -78,7 +78,7 @@ class HelloWorldQueueTest {
     QueueTest.executeTest(spec)
   }
 
-  @Test(timeOut=36000000)
+  @Test(enabled=false, timeOut=36000000)
   def testHelloWorldWithLsfResourceAndMemoryLimit() {
     val spec = new QueueTestSpec
     spec.name = "HelloWorldWithLsfResourceAndMemoryLimit"
@@ -88,7 +88,7 @@ class HelloWorldQueueTest {
     QueueTest.executeTest(spec)
   }
 
-  @Test(timeOut=36000000)
+  @Test(enabled=false, timeOut=36000000)
   def testHelloWorldWithLsfEnvironment() {
     val spec = new QueueTestSpec
     spec.name = "HelloWorldWithLsfEnvironment"
@@ -149,4 +149,23 @@ class HelloWorldQueueTest {
     spec.expectedFilePaths = Seq("pipelineLogDir/HelloWorld-1.out")
     QueueTest.executeTest(spec)
   }
+
+  @Test(timeOut=36000000)
+  def testHelloWorldParallelShell() {
+    val spec = new QueueTestSpec
+    spec.name = "HelloWorldWithLogDirectory"
+    spec.args = "-S " + QueueTest.publicQScriptsPackageDir + "examples/HelloWorld.scala"
+    spec.jobRunners = Seq("ParallelShell")
+    QueueTest.executeTest(spec)
+  }
+
+  @Test(timeOut=36000000)
+  def testHelloWorldParallelShellMaxConcurrentRun() {
+    val spec = new QueueTestSpec
+    spec.name = "HelloWorldWithLogDirectory"
+    spec.args = "-S " + QueueTest.publicQScriptsPackageDir + "examples/HelloWorld.scala" +
+      " -maxConcurrentRun 10"
+    spec.jobRunners = Seq("ParallelShell")
+    QueueTest.executeTest(spec)
+  }
 }
diff --git a/public/gatk-queue/pom.xml b/public/gatk-queue/pom.xml
index 05ce207..e56ad94 100644
--- a/public/gatk-queue/pom.xml
+++ b/public/gatk-queue/pom.xml
@@ -5,7 +5,7 @@
     <parent>
         <groupId>org.broadinstitute.gatk</groupId>
         <artifactId>gatk-aggregator</artifactId>
-        <version>3.3</version>
+        <version>3.5</version>
         <relativePath>../..</relativePath>
     </parent>
 
@@ -21,7 +21,7 @@
     <dependencies>
         <dependency>
             <groupId>${project.groupId}</groupId>
-            <artifactId>gatk-tools-public</artifactId>
+            <artifactId>gatk-utils</artifactId>
             <version>${project.version}</version>
         </dependency>
         <dependency>
@@ -47,7 +47,7 @@
 
         <dependency>
             <groupId>${project.groupId}</groupId>
-            <artifactId>gatk-tools-public</artifactId>
+            <artifactId>gatk-utils</artifactId>
             <version>${project.version}</version>
             <type>test-jar</type>
             <scope>test</scope>
diff --git a/public/gatk-queue/src/main/java/org/broadinstitute/gatk/queue/QueueVersion.java b/public/gatk-queue/src/main/java/org/broadinstitute/gatk/queue/QueueVersion.java
index e5e9dcb..f3d5a7d 100644
--- a/public/gatk-queue/src/main/java/org/broadinstitute/gatk/queue/QueueVersion.java
+++ b/public/gatk-queue/src/main/java/org/broadinstitute/gatk/queue/QueueVersion.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/java/org/broadinstitute/gatk/queue/package-info.java b/public/gatk-queue/src/main/java/org/broadinstitute/gatk/queue/package-info.java
index 755b696..24722a0 100644
--- a/public/gatk-queue/src/main/java/org/broadinstitute/gatk/queue/package-info.java
+++ b/public/gatk-queue/src/main/java/org/broadinstitute/gatk/queue/package-info.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/QCommandLine.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/QCommandLine.scala
index 297e10b..7994ae9 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/QCommandLine.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/QCommandLine.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -34,8 +34,7 @@ import org.broadinstitute.gatk.utils.classloader.PluginManager
 import org.broadinstitute.gatk.utils.exceptions.UserException
 import org.broadinstitute.gatk.utils.io.IOUtils
 import org.broadinstitute.gatk.utils.help.ApplicationDetails
-import java.util.{ResourceBundle, Arrays}
-import org.broadinstitute.gatk.utils.text.TextFormattingUtils
+import java.util.Arrays
 import org.apache.commons.io.FilenameUtils
 
 /**
@@ -260,33 +259,11 @@ class QCommandLine extends CommandLineProgram with Logging {
   }
 
   private def createQueueHeader() : Seq[String] = {
-    Seq(String.format("Queue v%s, Compiled %s", getQueueVersion, getBuildTimestamp),
+    Seq(String.format("Queue v%s, Compiled %s", CommandLineProgram.getVersionNumber, CommandLineProgram.getBuildTime),
          "Copyright (c) 2012 The Broad Institute",
          "For support and documentation go to http://www.broadinstitute.org/gatk")
   }
 
-  private def getQueueVersion : String = {
-    val stingResources : ResourceBundle = TextFormattingUtils.loadResourceBundle("GATKText")
-
-    if ( stingResources.containsKey("org.broadinstitute.gatk.queue.QueueVersion.version") ) {
-      stingResources.getString("org.broadinstitute.gatk.queue.QueueVersion.version")
-    }
-    else {
-      "<unknown>"
-    }
-  }
-
-  private def getBuildTimestamp : String = {
-    val stingResources : ResourceBundle = TextFormattingUtils.loadResourceBundle("GATKText")
-
-    if ( stingResources.containsKey("build.timestamp") ) {
-      stingResources.getString("build.timestamp")
-    }
-    else {
-      "<unknown>"
-    }
-  }
-
   def shutdown() = {
     shuttingDown = true
     qGraph.shutdown()
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/QCommandPlugin.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/QCommandPlugin.scala
index 6df8b3c..1e22d7e 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/QCommandPlugin.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/QCommandPlugin.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/QException.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/QException.scala
index 1ae41e9..80ffe55 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/QException.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/QException.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/QScript.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/QScript.scala
index f010445..541da93 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/QScript.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/QScript.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -82,8 +82,7 @@ trait QScript extends Logging with PrimitiveOptionConversions with StringFileCon
    * @param newExtension New extension to append.
    * @return new File with the new extension in the current directory.
    */
-  protected def swapExt(file: File, oldExtension: String, newExtension: String) =
-    new File(file.getName.stripSuffix(oldExtension) + newExtension)
+  protected def swapExt(file: File, oldExtension: String, newExtension: String) = QScriptUtils.swapExt(file, oldExtension, newExtension)
 
   /**
    * Exchanges the extension on a file.
@@ -93,8 +92,7 @@ trait QScript extends Logging with PrimitiveOptionConversions with StringFileCon
    * @param newExtension New extension to append.
    * @return new File with the new extension in dir.
    */
-  protected def swapExt(dir: File, file: File, oldExtension: String, newExtension: String) =
-    new File(dir, file.getName.stripSuffix(oldExtension) + newExtension)
+  protected def swapExt(dir: File, file: File, oldExtension: String, newExtension: String) = QScriptUtils.swapExt(dir, file, oldExtension, newExtension)
 
   /**
    * Adds one or more command line functions to be run.
@@ -181,4 +179,5 @@ object QScript {
   def resetAddOrder() {
     addOrder = 0
   }
+
 }
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/QScriptManager.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/QScriptManager.scala
index 8df12c2..8ffee1e 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/QScriptManager.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/QScriptManager.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/QSettings.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/QSettings.scala
index 86457fb..b95e158 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/QSettings.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/QSettings.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -98,4 +98,9 @@ class QSettings {
   
   @Argument(fullName="log_directory", shortName="logDir", doc="Directory to write log files into.", required=false)
   var logDirectory: File = _
+
+  /**
+   * If set, use Broad-specific cluster settings in the GridEngine job runner. Activated via the -qsub-broad argument in QGraphSettings.
+   */
+  var useBroadClusterSettings: Boolean = false
 }
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/CommandLineJobManager.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/CommandLineJobManager.scala
index a3b004c..ab1fcbb 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/CommandLineJobManager.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/CommandLineJobManager.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/CommandLineJobRunner.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/CommandLineJobRunner.scala
index e5c2594..941126f 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/CommandLineJobRunner.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/CommandLineJobRunner.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/CommandLinePluginManager.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/CommandLinePluginManager.scala
index 3931c5f..37c50d0 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/CommandLinePluginManager.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/CommandLinePluginManager.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/FunctionEdge.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/FunctionEdge.scala
index 1b02f5d..9a567c6 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/FunctionEdge.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/FunctionEdge.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/InProcessJobManager.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/InProcessJobManager.scala
index aa7d069..351b50c 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/InProcessJobManager.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/InProcessJobManager.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/InProcessRunner.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/InProcessRunner.scala
index bb8896d..b65a695 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/InProcessRunner.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/InProcessRunner.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/JobManager.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/JobManager.scala
index e8fb1f0..16bc39c 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/JobManager.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/JobManager.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/JobRunInfo.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/JobRunInfo.scala
index 20a536e..2671329 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/JobRunInfo.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/JobRunInfo.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/JobRunner.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/JobRunner.scala
index d7e4868..eca27e9 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/JobRunner.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/JobRunner.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/MappingEdge.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/MappingEdge.scala
index af51602..e2b2256 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/MappingEdge.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/MappingEdge.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/QEdge.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/QEdge.scala
index d3f2f4b..6718dbb 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/QEdge.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/QEdge.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/QGraph.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/QGraph.scala
index 7d09bf5..a34b9e6 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/QGraph.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/QGraph.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -395,12 +395,18 @@ class QGraph extends Logging {
    */
   private def runJobs() {
     try {
-      if (settings.bsub)
+      if (settings.bsub) {
         settings.jobRunner = "Lsf706"
-      else if (settings.qsub)
+      }
+      else if (settings.qsub || settings.qsubBroad) {
         settings.jobRunner = "GridEngine"
-      else if (settings.jobRunner == null)
+        if ( settings.qsubBroad ) {
+          settings.qSettings.useBroadClusterSettings = true
+        }
+      }
+      else if (settings.jobRunner == null) {
         settings.jobRunner = "Shell"
+      }
       commandLineManager = commandLinePluginManager.createByName(settings.jobRunner)
 
       for (mgr <- managers) {
@@ -428,7 +434,20 @@ class QGraph extends Logging {
         var doneJobs = Set.empty[FunctionEdge]
         var failedJobs = Set.empty[FunctionEdge]
 
-        while (running && readyJobs.size > 0 && !readyRunningCheck(lastRunningCheck)) {
+        def startJobs: Boolean = {
+
+          def canRunMoreConcurrentJobs: Boolean =
+            if(settings.maximumNumberOfConcurrentJobs.isDefined)
+              runningJobs.size + startedJobs.size < settings.maximumNumberOfConcurrentJobs.get
+            else
+              true
+
+          running && readyJobs.size > 0 &&
+            !readyRunningCheck(lastRunningCheck) &&
+            canRunMoreConcurrentJobs
+        }
+
+        while (startJobs) {
           val edge = readyJobs.head
           edge.runner = newRunner(edge.function)
           edge.start()
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/QGraphSettings.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/QGraphSettings.scala
index 49dace9..bc06868 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/QGraphSettings.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/QGraphSettings.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -28,7 +28,7 @@ package org.broadinstitute.gatk.queue.engine
 import java.io.File
 import org.broadinstitute.gatk.queue.QSettings
 import org.broadinstitute.gatk.queue.util.{EmailSettings, SystemUtils}
-import org.broadinstitute.gatk.utils.commandline.{Advanced, ArgumentCollection, Argument}
+import org.broadinstitute.gatk.utils.commandline.{ClassType, Advanced, ArgumentCollection, Argument}
 
 /**
  * Command line options for a QGraph.
@@ -46,6 +46,9 @@ class QGraphSettings {
   @Argument(fullName="qsub", shortName="qsub", doc="Equivalent to -jobRunner GridEngine", required=false)
   var qsub = false
 
+  @Argument(fullName="qsub-broad", shortName="qsub-broad", doc="Equivalent to -qsub, but uses GridEngine parameters specific to the Broad GridEngine cluster", required=false)
+  var qsubBroad = false
+
   @Argument(fullName="status",shortName="status",doc="Get status of jobs for the qscript",required=false)
   var getStatus = false
 
@@ -74,9 +77,14 @@ class QGraphSettings {
   var jobReportFile: String = _
 
   @Advanced
-  @Argument(fullName="disableJobReport", shortName="disabpleJobReport", doc="If provided, we will not create a job report", required=false)
+  @Argument(fullName="disableJobReport", shortName="disableJobReport", doc="If provided, we will not create a job report", required=false)
   var disableJobReport: Boolean = false
 
+  @Advanced
+  @ClassType(classOf[Int])
+  @Argument(fullName="maximumNumberOfJobsToRunConcurrently", shortName="maxConcurrentRun", doc="The maximum number of jobs to start at any given time. (Default is no limit)", required=false)
+  var maximumNumberOfConcurrentJobs: Option[Int] = None
+
   @ArgumentCollection
   val emailSettings = new EmailSettings
 
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/QNode.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/QNode.scala
index 5751a72..82d22c2 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/QNode.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/QNode.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/QStatusMessenger.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/QStatusMessenger.scala
index 14e4082..912d378 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/QStatusMessenger.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/QStatusMessenger.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/RunnerStatus.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/RunnerStatus.scala
index 93c9fde..5d5ac04 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/RunnerStatus.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/RunnerStatus.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/drmaa/DrmaaJobManager.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/drmaa/DrmaaJobManager.scala
index 02cf34e..fedf091 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/drmaa/DrmaaJobManager.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/drmaa/DrmaaJobManager.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/drmaa/DrmaaJobRunner.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/drmaa/DrmaaJobRunner.scala
index aa19bfa..ea4fafe 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/drmaa/DrmaaJobRunner.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/drmaa/DrmaaJobRunner.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/gridengine/GridEngineJobManager.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/gridengine/GridEngineJobManager.scala
index eb60cb3..7b8e523 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/gridengine/GridEngineJobManager.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/gridengine/GridEngineJobManager.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/gridengine/GridEngineJobRunner.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/gridengine/GridEngineJobRunner.scala
index b21f43b..b5b0121 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/gridengine/GridEngineJobRunner.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/gridengine/GridEngineJobRunner.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -58,8 +58,14 @@ class GridEngineJobRunner(session: Session, function: CommandLineFunction) exten
       nativeSpec += " -l %s=%dM".format(function.qSettings.residentRequestParameter, function.residentRequest.map(_ * 1024).get.ceil.toInt)
 
     // If the resident set size limit is defined specify the memory limit
-    if (function.residentLimit.isDefined)
-      nativeSpec += " -l h_rss=%dM".format(function.residentLimit.map(_ * 1024).get.ceil.toInt)
+    if (function.residentLimit.isDefined) {
+      var memoryLimitParameter : String = "h_rss"
+      if (function.qSettings.useBroadClusterSettings) {
+        memoryLimitParameter = "h_vmem"
+      }
+
+      nativeSpec += " -l %s=%dM".format(memoryLimitParameter, function.residentLimit.map(_ * 1024).get.ceil.toInt)
+    }
 
     // If more than 1 core is requested, set the proper request
     // if we aren't being jerks and just stealing cores (previous behavior)
@@ -82,7 +88,7 @@ class GridEngineJobRunner(session: Session, function: CommandLineFunction) exten
     if (priority.isDefined)
       nativeSpec += " -p " + priority.get
 
-    logger.debug("Native spec is: %s".format(nativeSpec))
+    logger.info("Native spec is: %s".format(nativeSpec))
     (nativeSpec + " " + super.functionNativeSpec).trim()
   }
 }
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/lsf/Lsf706JobManager.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/lsf/Lsf706JobManager.scala
index dbe2536..0b5a5ce 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/lsf/Lsf706JobManager.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/lsf/Lsf706JobManager.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/lsf/Lsf706JobRunner.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/lsf/Lsf706JobRunner.scala
index eeb82a3..d35c9c0 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/lsf/Lsf706JobRunner.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/lsf/Lsf706JobRunner.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/parallelshell/ParallelShellJobManager.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/parallelshell/ParallelShellJobManager.scala
new file mode 100644
index 0000000..c200538
--- /dev/null
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/parallelshell/ParallelShellJobManager.scala
@@ -0,0 +1,70 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.queue.engine.parallelshell
+
+import org.broadinstitute.gatk.queue.function.CommandLineFunction
+import org.broadinstitute.gatk.queue.engine.CommandLineJobManager
+
+/**
+ * Runs multiple jobs locally without blocking.
+ * Use this with care as it might not be the most efficient way to run things.
+ * However, for some scenarios, such as running multiple single threaded
+ * programs concurrently it can be quite useful.
+ * 
+ * All this code is based on the normal shell runner in GATK Queue and all 
+ * credits for everything except the concurrency part goes to the GATK team.
+ * 
+ * @author Johan Dahlberg
+ *
+ */
+class ParallelShellJobManager extends CommandLineJobManager[ParallelShellJobRunner] {
+
+  def runnerType = classOf[ParallelShellJobRunner]
+
+  /**
+   * Create new ParallelShellJobRunner
+   * @param function Function for the runner.
+   * @return a new ParallelShellJobRunner instance
+   */
+  def create(function: CommandLineFunction) =
+    new ParallelShellJobRunner(function)
+
+  /**
+   * Update the status of the specified jobrunners.
+   * @param runners Runners to update.
+   * @return runners which were updated.
+   */
+  override def updateStatus(
+    runners: Set[ParallelShellJobRunner]): Set[ParallelShellJobRunner] =
+    runners.filter { runner => runner.updateJobStatus() }
+
+  /**
+   * Stop the specified runners.
+   * @param runners Runners to stop.
+   */
+  override def tryStop(runners: Set[ParallelShellJobRunner]) =
+    runners.foreach(_.tryStop())
+}
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/parallelshell/ParallelShellJobRunner.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/parallelshell/ParallelShellJobRunner.scala
new file mode 100644
index 0000000..8afb80a
--- /dev/null
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/parallelshell/ParallelShellJobRunner.scala
@@ -0,0 +1,151 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.queue.engine.parallelshell
+
+import org.broadinstitute.gatk.queue.function.CommandLineFunction
+import org.broadinstitute.gatk.queue.engine.{ RunnerStatus, CommandLineJobRunner }
+import java.util.Date
+import org.broadinstitute.gatk.utils.Utils
+import org.broadinstitute.gatk.utils.runtime.{ ProcessSettings, OutputStreamSettings }
+import scala.concurrent._
+import ExecutionContext.Implicits.global
+import scala.util.{ Success, Failure }
+import org.broadinstitute.gatk.queue.util.Logging
+
+/**
+ * Runs multiple jobs locally without blocking.
+ * Use this with care as it might not be the most efficient way to run things.
+ * However, for some scenarios, such as running multiple single threaded
+ * programs concurrently it can be quite useful.
+ *
+ * All this code is based on the normal shell runner in GATK Queue and all
+ * credits for everything except the concurrency part goes to the GATK team.
+ *
+ * @author Johan Dahlberg - 20150611
+ *
+ * @param function Command to run.
+ */
+class ParallelShellJobRunner(val function: CommandLineFunction) extends CommandLineJobRunner with Logging {
+
+  // Controller on the thread that started the job
+  val controller: ThreadSafeProcessController = new ThreadSafeProcessController()
+
+  // Once the application exits this promise will be fulfilled.
+  val finalExitStatus = Promise[Int]()
+
+  /**
+   * Runs the function on the local shell.
+   */
+  def start() {
+    val commandLine = Array("sh", jobScript.getAbsolutePath)
+    val stdoutSettings = new OutputStreamSettings
+    val stderrSettings = new OutputStreamSettings
+    val mergeError = function.jobErrorFile == null
+
+    stdoutSettings.setOutputFile(function.jobOutputFile, true)
+    if (function.jobErrorFile != null)
+      stderrSettings.setOutputFile(function.jobErrorFile, true)
+
+    if (logger.isDebugEnabled) {
+      stdoutSettings.printStandard(true)
+      stderrSettings.printStandard(true)
+    }
+
+    val processSettings = new ProcessSettings(
+      commandLine, mergeError, function.commandDirectory, null,
+      null, stdoutSettings, stderrSettings)
+
+    updateJobRun(processSettings)
+
+    getRunInfo.startTime = new Date()
+    getRunInfo.exechosts = Utils.resolveHostname()
+    updateStatus(RunnerStatus.RUNNING)
+
+    // Run the command line process in a future.
+    val executedFuture =
+      future { controller.exec(processSettings) }
+
+    // Register a callback on the completion of the future, making sure that
+    // the status of the job is updated accordingly. 
+    executedFuture.onComplete {
+      case Success(exitStatus) =>
+        logger.debug(commandLine.mkString(" ") + " :: Got return on exit status in future: " + exitStatus)
+        finalExitStatus.success(exitStatus)
+        getRunInfo.doneTime = new Date()
+        exitStatusUpdateJobRunnerStatus(exitStatus)
+      case Failure(throwable) =>
+        logger.debug(
+          "Failed in return from run with: " +
+            throwable.getClass.getCanonicalName + " :: " +
+            throwable.getMessage)
+        finalExitStatus.failure(throwable)
+        getRunInfo.doneTime = new Date()
+        updateStatus(RunnerStatus.FAILED)
+    }
+  }
+
+  /**
+   * Possibly invoked from a shutdown thread, find and
+   * stop the controller from the originating thread
+   */
+  def tryStop() = {
+    try {
+      controller.tryDestroy()
+    } catch {
+      case e: Exception =>
+        logger.error("Unable to kill shell job: " + function.description, e)
+    }
+  }
+
+  /**
+   * Update the status of the runner based on the exit status
+   * of the process.
+   */
+  def exitStatusUpdateJobRunnerStatus(exitStatus: Int): Unit = {
+    exitStatus match {
+      case 0 => updateStatus(RunnerStatus.DONE)
+      case _ => updateStatus(RunnerStatus.FAILED)
+    }
+  }
+
+  /**
+   * Attempts to get the status of a job by looking at if the finalExitStatus
+   * promise has completed or not.
+   * @return if the jobRunner has updated it's status or not.
+   */
+  def updateJobStatus(): Boolean = {
+    if (finalExitStatus.isCompleted) {
+      val completedExitStatus = finalExitStatus.future.value.get.get
+      exitStatusUpdateJobRunnerStatus(completedExitStatus)
+      true
+    } else {
+      // Make sure the status is update here, otherwise Queue will think
+      // it's lots control over the job and kill it after 5 minutes.
+      updateStatus(status)
+      false
+    }
+  }
+}
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/parallelshell/ThreadSafeProcessController.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/parallelshell/ThreadSafeProcessController.scala
new file mode 100644
index 0000000..4bf3f99
--- /dev/null
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/parallelshell/ThreadSafeProcessController.scala
@@ -0,0 +1,106 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.queue.engine.parallelshell
+
+import java.io.PrintWriter
+
+import org.broadinstitute.gatk.queue.util.Logging
+import org.broadinstitute.gatk.utils.runtime.ProcessSettings
+import scala.sys.process._
+
+/**
+ *
+ */
+class ThreadSafeProcessController extends Logging {
+
+  private var process: Option[Process] = None
+
+  /**
+   * Construct a process logger writing the stdout and stderr of the
+   * process controlled by this instance to the files specified in
+   * the provided ProcessSettings instance.
+   * @param processSettings specifiying which files to write to
+   * @return a process logger which can be used by the `scala.sys.process`
+   */
+  private def getProcessLogger(processSettings: ProcessSettings): ProcessLogger = {
+
+    val (stdOutFile, stdErrFile) = {
+
+      val stdOutFile = processSettings.getStdoutSettings.getOutputFile
+
+      if(processSettings.getStderrSettings.getOutputFile != null) {
+        val stdErrFile = processSettings.getStderrSettings.getOutputFile
+        (stdOutFile, stdErrFile)
+      } else {
+        (stdOutFile, stdOutFile)
+      }
+
+    }
+
+    val stdOutPrintWriter = new PrintWriter(stdOutFile)
+    val stdErrPrintWriter = new PrintWriter(stdErrFile)
+
+    def printToWriter(printWriter: PrintWriter)(line: String): Unit = {
+      printWriter.println(line)
+      printWriter.flush()
+    }
+
+    val stringStdOutPrinterFunc = printToWriter(stdOutPrintWriter) _
+    val stringStdErrPrinterFunc = printToWriter(stdErrPrintWriter) _
+
+    val processLogger = ProcessLogger(
+      stringStdOutPrinterFunc,
+      stringStdErrPrinterFunc
+      )
+
+    processLogger
+  }
+
+  /**
+   * Execute the process specified in process settings
+   * @param processSettings specifying the commandline to run.
+   * @return the exit status of the process.
+   */
+  def exec(processSettings: ProcessSettings): Int = {
+
+    val commandLine: ProcessBuilder = processSettings.getCommand.mkString(" ")
+    logger.debug("Trying to start process: " + commandLine)
+    process = Some(commandLine.run(getProcessLogger(processSettings)))
+    process.get.exitValue()
+
+  }
+
+  /**
+   * Attempt to destroy the underlying process.
+   */
+  def tryDestroy(): Unit = {
+    logger.debug("Trying to kill process")
+    process.getOrElse {
+      throw new IllegalStateException("Tried to kill unstarted job.")
+    }.destroy()
+  }
+
+}
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/pbsengine/PbsEngineJobManager.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/pbsengine/PbsEngineJobManager.scala
index fb68231..31dfabe 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/pbsengine/PbsEngineJobManager.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/pbsengine/PbsEngineJobManager.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/pbsengine/PbsEngineJobRunner.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/pbsengine/PbsEngineJobRunner.scala
index 092152f..5fa0d5b 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/pbsengine/PbsEngineJobRunner.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/pbsengine/PbsEngineJobRunner.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/shell/ShellJobManager.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/shell/ShellJobManager.scala
index 5645590..e97f69e 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/shell/ShellJobManager.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/shell/ShellJobManager.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/shell/ShellJobRunner.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/shell/ShellJobRunner.scala
index 327be22..1918676 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/shell/ShellJobRunner.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/engine/shell/ShellJobRunner.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/CommandLineFunction.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/CommandLineFunction.scala
index e1cb7d0..998d4e5 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/CommandLineFunction.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/CommandLineFunction.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/InProcessFunction.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/InProcessFunction.scala
index 5525eeb..a7d1363 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/InProcessFunction.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/InProcessFunction.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/JavaCommandLineFunction.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/JavaCommandLineFunction.scala
index 80027a0..61aa433 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/JavaCommandLineFunction.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/JavaCommandLineFunction.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/ListWriterFunction.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/ListWriterFunction.scala
index c7450b1..33aa1f4 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/ListWriterFunction.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/ListWriterFunction.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/QFunction.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/QFunction.scala
index f7e2671..358b8c3 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/QFunction.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/QFunction.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/RetryMemoryLimit.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/RetryMemoryLimit.scala
index 9202c2a..1799c9a 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/RetryMemoryLimit.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/RetryMemoryLimit.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/scattergather/CloneFunction.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/scattergather/CloneFunction.scala
index ccc9dcf..5eb581a 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/scattergather/CloneFunction.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/scattergather/CloneFunction.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/scattergather/ConcatenateLogsFunction.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/scattergather/ConcatenateLogsFunction.scala
index b97cee1..35429c7 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/scattergather/ConcatenateLogsFunction.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/scattergather/ConcatenateLogsFunction.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/scattergather/GatherFunction.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/scattergather/GatherFunction.scala
index 7044265..a94e86d 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/scattergather/GatherFunction.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/scattergather/GatherFunction.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/scattergather/GathererFunction.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/scattergather/GathererFunction.scala
index 4fcc19f..f560452 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/scattergather/GathererFunction.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/scattergather/GathererFunction.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/scattergather/ScatterFunction.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/scattergather/ScatterFunction.scala
index 29f8c41..e115667 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/scattergather/ScatterFunction.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/scattergather/ScatterFunction.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/scattergather/ScatterGatherableFunction.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/scattergather/ScatterGatherableFunction.scala
index 50ad3cf..6bb865b 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/scattergather/ScatterGatherableFunction.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/scattergather/ScatterGatherableFunction.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/scattergather/SimpleTextGatherFunction.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/scattergather/SimpleTextGatherFunction.scala
index 2c6aa58..3068723 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/scattergather/SimpleTextGatherFunction.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/function/scattergather/SimpleTextGatherFunction.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/library/clf/vcf/VCFExtractIntervals.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/library/clf/vcf/VCFExtractIntervals.scala
index e3b936f..4910e1d 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/library/clf/vcf/VCFExtractIntervals.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/library/clf/vcf/VCFExtractIntervals.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/library/clf/vcf/VCFExtractSamples.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/library/clf/vcf/VCFExtractSamples.scala
index 799061a..cd15b51 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/library/clf/vcf/VCFExtractSamples.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/library/clf/vcf/VCFExtractSamples.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/library/ipf/SortByRef.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/library/ipf/SortByRef.scala
index 0ee38e9..6131f16 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/library/ipf/SortByRef.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/library/ipf/SortByRef.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/library/ipf/vcf/VCFExtractIntervals.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/library/ipf/vcf/VCFExtractIntervals.scala
index 8abcf5b..1cba5f2 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/library/ipf/vcf/VCFExtractIntervals.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/library/ipf/vcf/VCFExtractIntervals.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/library/ipf/vcf/VCFExtractSamples.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/library/ipf/vcf/VCFExtractSamples.scala
index 99571e9..f2cc999 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/library/ipf/vcf/VCFExtractSamples.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/library/ipf/vcf/VCFExtractSamples.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/library/ipf/vcf/VCFExtractSites.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/library/ipf/vcf/VCFExtractSites.scala
index 5bd25bf..31b43c2 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/library/ipf/vcf/VCFExtractSites.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/library/ipf/vcf/VCFExtractSites.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/library/ipf/vcf/VCFSimpleMerge.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/library/ipf/vcf/VCFSimpleMerge.scala
index b3093c8..3c4e025 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/library/ipf/vcf/VCFSimpleMerge.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/library/ipf/vcf/VCFSimpleMerge.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/ClassFieldCache.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/ClassFieldCache.scala
index 82b8ca5..2775845 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/ClassFieldCache.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/ClassFieldCache.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/CollectionUtils.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/CollectionUtils.scala
index 5c0be04..e77ef47 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/CollectionUtils.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/CollectionUtils.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/EmailMessage.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/EmailMessage.scala
index e38183a..e8d2f22 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/EmailMessage.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/EmailMessage.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/EmailSettings.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/EmailSettings.scala
index d315410..8ab5a13 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/EmailSettings.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/EmailSettings.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/Logging.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/Logging.scala
index 3a83a2f..a9b12d1 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/Logging.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/Logging.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/PrimitiveOptionConversions.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/PrimitiveOptionConversions.scala
index edaf802..66a7242 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/PrimitiveOptionConversions.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/PrimitiveOptionConversions.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/QJobReport.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/QJobReport.scala
index be5a17f..fd86d4d 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/QJobReport.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/QJobReport.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -26,8 +26,8 @@
 package org.broadinstitute.gatk.queue.util
 
 import org.broadinstitute.gatk.queue.function.QFunction
-import org.broadinstitute.gatk.engine.report.GATKReportTable
 import org.broadinstitute.gatk.queue.engine.JobRunInfo
+import org.broadinstitute.gatk.utils.report.GATKReportTable
 
 /**
  * A mixin to add Job info to the class
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/QJobsReporter.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/QJobsReporter.scala
index b3b0b33..7b6dde6 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/QJobsReporter.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/QJobsReporter.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -30,9 +30,10 @@ import org.broadinstitute.gatk.utils.io.{Resource}
 import org.broadinstitute.gatk.queue.engine.{JobRunInfo, QGraph}
 import org.broadinstitute.gatk.queue.function.QFunction
 import org.broadinstitute.gatk.utils.R.{RScriptLibrary, RScriptExecutor}
-import org.broadinstitute.gatk.engine.report.{GATKReportTable, GATKReport}
+import org.broadinstitute.gatk.utils.report.GATKReportTable
 import org.broadinstitute.gatk.utils.exceptions.UserException
 import org.apache.commons.io.{FileUtils, IOUtils}
+import org.broadinstitute.gatk.utils.report.{GATKReportTable, GATKReport}
 
 /**
  * Writes out RunInfo to a GATKReport
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/QScriptUtils.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/QScriptUtils.scala
index 3fbce22..5627a46 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/QScriptUtils.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/QScriptUtils.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -42,6 +42,27 @@ import collection.JavaConversions._
 object QScriptUtils {
 
   /**
+   * Exchanges the extension on a file.
+   * @param file File to look for the extension.
+   * @param oldExtension Old extension to strip off, if present.
+   * @param newExtension New extension to append.
+   * @return new File with the new extension in the current directory.
+   */
+  def swapExt(file: File, oldExtension: String, newExtension: String) =
+    new File(file.getName.stripSuffix(oldExtension) + newExtension)
+
+  /**
+   * Exchanges the extension on a file.
+   * @param dir New directory for the file.
+   * @param file File to look for the extension.
+   * @param oldExtension Old extension to strip off, if present.
+   * @param newExtension New extension to append.
+   * @return new File with the new extension in dir.
+   */
+  def swapExt(dir: File, file: File, oldExtension: String, newExtension: String) =
+    new File(dir, file.getName.stripSuffix(oldExtension) + newExtension)
+
+  /**
    * Takes a bam list file and produces a scala sequence with each file allowing the bam list
    * to have empty lines and comment lines (lines starting with #).
    */
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/ReflectionUtils.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/ReflectionUtils.scala
index d172e97..c13f088 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/ReflectionUtils.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/ReflectionUtils.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/RemoteFile.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/RemoteFile.scala
index 1410813..5ae4f76 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/RemoteFile.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/RemoteFile.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/RemoteFileConverter.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/RemoteFileConverter.scala
index b38b27a..8f984f4 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/RemoteFileConverter.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/RemoteFileConverter.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/Retry.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/Retry.scala
index 8d1c1cf..4f23f59 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/Retry.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/Retry.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/RetryException.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/RetryException.scala
index ffd69cc..eb0d96b 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/RetryException.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/RetryException.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/ScalaCompoundArgumentTypeDescriptor.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/ScalaCompoundArgumentTypeDescriptor.scala
index 80ab068..c6cf39c 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/ScalaCompoundArgumentTypeDescriptor.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/ScalaCompoundArgumentTypeDescriptor.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/ShellUtils.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/ShellUtils.scala
index 02d767a..ff23690 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/ShellUtils.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/ShellUtils.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/StringFileConversions.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/StringFileConversions.scala
index ee21380..f6e5931 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/StringFileConversions.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/StringFileConversions.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/SystemUtils.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/SystemUtils.scala
index cf85372..440c155 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/SystemUtils.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/SystemUtils.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/TextFormatUtils.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/TextFormatUtils.scala
index c776dd5..cea1b77 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/TextFormatUtils.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/TextFormatUtils.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/VCF_BAM_utilities.scala b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/VCF_BAM_utilities.scala
index 9de4b28..f722f66 100644
--- a/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/VCF_BAM_utilities.scala
+++ b/public/gatk-queue/src/main/scala/org/broadinstitute/gatk/queue/util/VCF_BAM_utilities.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/test/scala/org/broadinstitute/gatk/queue/function/CommandLineFunctionUnitTest.scala b/public/gatk-queue/src/test/scala/org/broadinstitute/gatk/queue/function/CommandLineFunctionUnitTest.scala
index 351fb71..ab7ca28 100644
--- a/public/gatk-queue/src/test/scala/org/broadinstitute/gatk/queue/function/CommandLineFunctionUnitTest.scala
+++ b/public/gatk-queue/src/test/scala/org/broadinstitute/gatk/queue/function/CommandLineFunctionUnitTest.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/test/scala/org/broadinstitute/gatk/queue/pipeline/QueueTest.scala b/public/gatk-queue/src/test/scala/org/broadinstitute/gatk/queue/pipeline/QueueTest.scala
index 62ac8e1..ed1c1c7 100644
--- a/public/gatk-queue/src/test/scala/org/broadinstitute/gatk/queue/pipeline/QueueTest.scala
+++ b/public/gatk-queue/src/test/scala/org/broadinstitute/gatk/queue/pipeline/QueueTest.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -35,9 +35,9 @@ import org.broadinstitute.gatk.utils.MD5DB
 import org.broadinstitute.gatk.queue.{QScript, QCommandLine}
 import org.broadinstitute.gatk.queue.util.Logging
 import java.io.{FilenameFilter, File}
-import org.broadinstitute.gatk.engine.report.GATKReport
 import org.apache.commons.io.FileUtils
 import org.apache.commons.io.filefilter.WildcardFileFilter
+import org.broadinstitute.gatk.utils.report.GATKReport
 
 object QueueTest extends BaseTest with Logging {
 
@@ -53,12 +53,12 @@ object QueueTest extends BaseTest with Logging {
   /**
    * All the job runners configured to run QueueTests at The Broad.
    */
-  final val allJobRunners = Seq("Lsf706", "GridEngine", "Shell")
+  final val allJobRunners = Seq("GridEngine", "Shell", "ParallelShell")
 
   /**
    * The default job runners to run.
    */
-  final val defaultJobRunners = Seq("Lsf706", "GridEngine")
+  final val defaultJobRunners = Seq("GridEngine")
 
   /**
    * Returns the top level output path to this test.
diff --git a/public/gatk-queue/src/test/scala/org/broadinstitute/gatk/queue/pipeline/QueueTestEvalSpec.scala b/public/gatk-queue/src/test/scala/org/broadinstitute/gatk/queue/pipeline/QueueTestEvalSpec.scala
index 764449c..e0df4d9 100644
--- a/public/gatk-queue/src/test/scala/org/broadinstitute/gatk/queue/pipeline/QueueTestEvalSpec.scala
+++ b/public/gatk-queue/src/test/scala/org/broadinstitute/gatk/queue/pipeline/QueueTestEvalSpec.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/test/scala/org/broadinstitute/gatk/queue/pipeline/QueueTestSpec.scala b/public/gatk-queue/src/test/scala/org/broadinstitute/gatk/queue/pipeline/QueueTestSpec.scala
index 4fdcb08..ac1802e 100644
--- a/public/gatk-queue/src/test/scala/org/broadinstitute/gatk/queue/pipeline/QueueTestSpec.scala
+++ b/public/gatk-queue/src/test/scala/org/broadinstitute/gatk/queue/pipeline/QueueTestSpec.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/test/scala/org/broadinstitute/gatk/queue/util/ShellUtilsUnitTest.scala b/public/gatk-queue/src/test/scala/org/broadinstitute/gatk/queue/util/ShellUtilsUnitTest.scala
index 944cc85..3f3acb5 100644
--- a/public/gatk-queue/src/test/scala/org/broadinstitute/gatk/queue/util/ShellUtilsUnitTest.scala
+++ b/public/gatk-queue/src/test/scala/org/broadinstitute/gatk/queue/util/ShellUtilsUnitTest.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/test/scala/org/broadinstitute/gatk/queue/util/StringFileConversionsUnitTest.scala b/public/gatk-queue/src/test/scala/org/broadinstitute/gatk/queue/util/StringFileConversionsUnitTest.scala
index 160e040..090ba86 100644
--- a/public/gatk-queue/src/test/scala/org/broadinstitute/gatk/queue/util/StringFileConversionsUnitTest.scala
+++ b/public/gatk-queue/src/test/scala/org/broadinstitute/gatk/queue/util/StringFileConversionsUnitTest.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-queue/src/test/scala/org/broadinstitute/gatk/queue/util/SystemUtilsUnitTest.scala b/public/gatk-queue/src/test/scala/org/broadinstitute/gatk/queue/util/SystemUtilsUnitTest.scala
index 2fd78e3..e0950ce 100644
--- a/public/gatk-queue/src/test/scala/org/broadinstitute/gatk/queue/util/SystemUtilsUnitTest.scala
+++ b/public/gatk-queue/src/test/scala/org/broadinstitute/gatk/queue/util/SystemUtilsUnitTest.scala
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-root/pom.xml b/public/gatk-root/pom.xml
index c9f2517..085c33b 100644
--- a/public/gatk-root/pom.xml
+++ b/public/gatk-root/pom.xml
@@ -12,7 +12,7 @@
 
     <groupId>org.broadinstitute.gatk</groupId>
     <artifactId>gatk-root</artifactId>
-    <version>3.3</version>
+    <version>3.5</version>
     <packaging>pom</packaging>
     <name>GATK Root</name>
 
@@ -44,8 +44,8 @@
         <test.listeners>org.testng.reporters.FailedReporter,org.testng.reporters.JUnitXMLReporter,org.broadinstitute.gatk.utils.TestNGTestTransformer,org.broadinstitute.gatk.utils.GATKTextReporter,org.uncommons.reportng.HTMLReporter</test.listeners>
 
         <!-- Version numbers for picard and htsjdk -->
-        <htsjdk.version>1.120.1620</htsjdk.version>
-        <picard.version>1.120.1579</picard.version>
+        <htsjdk.version>1.141</htsjdk.version>
+        <picard.version>1.141</picard.version>
     </properties>
 
     <!-- Dependency configuration (versions, etc.) -->
@@ -64,10 +64,10 @@
             <dependency>
                 <groupId>com.google.code.cofoja</groupId>
                 <artifactId>cofoja</artifactId>
-                <version>1.0-r139</version>
+                <version>1.2-20140817</version>
             </dependency>
             <dependency>
-                <groupId>samtools</groupId>
+                <groupId>com.github.samtools</groupId>
                 <artifactId>htsjdk</artifactId>
                 <version>${htsjdk.version}</version>
                 <exclusions>
@@ -82,7 +82,7 @@
                 </exclusions>
             </dependency>
             <dependency>
-                <groupId>picard</groupId>
+                <groupId>com.github.broadinstitute</groupId>
                 <artifactId>picard</artifactId>
                 <version>${picard.version}</version>
             </dependency>
@@ -130,12 +130,18 @@
                 <artifactId>reflections</artifactId>
                 <version>0.9.9-RC1</version>
             </dependency>
+            <!-- slf4j bindings must only be at the package level: http://www.slf4j.org/manual.html -->
             <dependency>
                 <groupId>org.slf4j</groupId>
-                <artifactId>slf4j-log4j12</artifactId>
+                <artifactId>slf4j-api</artifactId>
                 <version>1.6.1</version>
             </dependency>
             <dependency>
+                <groupId>org.slf4j</groupId>
+                <artifactId>slf4j-log4j12</artifactId>
+                <version>1.7.5</version>
+            </dependency>
+            <dependency>
                 <groupId>gov.nist.math</groupId>
                 <artifactId>jama</artifactId>
                 <version>1.0.2</version>
@@ -404,6 +410,7 @@
                             <gatkdir>${gatk.basedir}</gatkdir>
                             <clover.pertest.coverage>diff</clover.pertest.coverage>
                             <java.awt.headless>true</java.awt.headless>
+                            <queuetest.run>${gatk.queuetests.run}</queuetest.run>
                             <java.io.tmpdir>${java.io.tmpdir}</java.io.tmpdir>
                         </systemPropertyVariables>
                     </configuration>
@@ -458,8 +465,7 @@
                             <gatkdir>${gatk.basedir}</gatkdir>
                             <clover.pertest.coverage>diff</clover.pertest.coverage>
                             <java.awt.headless>true</java.awt.headless>
-                            <!-- TODO: Fix BaseTest to not error out if this property is missing. -->
-                            <pipeline.run>${gatk.queuetests.run}</pipeline.run>
+                            <queuetest.run>${gatk.queuetests.run}</queuetest.run>
                             <java.io.tmpdir>${java.io.tmpdir}</java.io.tmpdir>
                         </systemPropertyVariables>
                     </configuration>
@@ -712,6 +718,17 @@
             <name>GATK Public Local Repository</name>
             <url>file:${gatk.basedir}/public/repo</url>
         </repository>
+        <repository>
+            <releases>
+                <enabled>false</enabled>
+            </releases>
+            <snapshots>
+                <enabled>true</enabled>
+            </snapshots>
+            <id>broad.artifactory.snapshots</id>
+            <name>Broad Institute Artifactory SNAPSHOTs</name>
+            <url>https://artifactory.broadinstitute.org/artifactory/libs-snapshot</url>
+        </repository>
     </repositories>
 
 </project>
diff --git a/public/gatk-tools-public/pom.xml b/public/gatk-tools-public/pom.xml
index 5386b32..ad14fa5 100644
--- a/public/gatk-tools-public/pom.xml
+++ b/public/gatk-tools-public/pom.xml
@@ -5,7 +5,7 @@
     <parent>
         <groupId>org.broadinstitute.gatk</groupId>
         <artifactId>gatk-aggregator</artifactId>
-        <version>3.3</version>
+        <version>3.5</version>
         <relativePath>../..</relativePath>
     </parent>
 
@@ -24,6 +24,26 @@
             <artifactId>gatk-engine</artifactId>
             <version>${project.version}</version>
         </dependency>
+        <dependency>
+            <groupId>org.apache.commons</groupId>
+            <artifactId>commons-jexl</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>${project.groupId}</groupId>
+            <artifactId>gatk-utils</artifactId>
+            <version>${project.version}</version>
+            <type>test-jar</type>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>${project.groupId}</groupId>
+            <artifactId>gatk-engine</artifactId>
+            <version>${project.version}</version>
+            <type>test-jar</type>
+            <scope>test</scope>
+        </dependency>
 
         <dependency>
             <groupId>com.google.caliper</groupId>
@@ -36,16 +56,6 @@
         <plugins>
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-resources-plugin</artifactId>
-                <executions>
-                    <execution>
-                        <id>copy-resource-bundle-log4j</id>
-                        <phase>prepare-package</phase>
-                    </execution>
-                </executions>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
                 <artifactId>maven-javadoc-plugin</artifactId>
                 <executions>
                     <execution>
diff --git a/public/gatk-tools-public/src/main/java/htsjdk/samtools/GATKBAMFileSpan.java b/public/gatk-tools-public/src/main/java/htsjdk/samtools/GATKBAMFileSpan.java
deleted file mode 100644
index c2a5e80..0000000
--- a/public/gatk-tools-public/src/main/java/htsjdk/samtools/GATKBAMFileSpan.java
+++ /dev/null
@@ -1,308 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package htsjdk.samtools;
-
-import htsjdk.samtools.util.PeekableIterator;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Queue;
-
-/**
- * A temporary solution to work around Java access rights issues:
- * override BAMFileSpan and make it public.
- * TODO: Eliminate once we determine the final fate of the BAM index reading code.
- */
-public class GATKBAMFileSpan extends BAMFileSpan {
-    /**
-     * Create a new empty list of chunks.
-     */
-    public GATKBAMFileSpan() {
-        super();
-    }
-
-    /**
-     * Create a new GATKBAMFileSpan from an existing BAMFileSpan.
-     * @param sourceFileSpan
-     */
-    public GATKBAMFileSpan(SAMFileSpan sourceFileSpan) {
-        if(!(sourceFileSpan instanceof BAMFileSpan))
-            throw new SAMException("Unable to create GATKBAMFileSpan from a SAMFileSpan. Please submit a BAMFileSpan instead");
-        BAMFileSpan sourceBAMFileSpan = (BAMFileSpan)sourceFileSpan;
-        for(Chunk chunk: sourceBAMFileSpan.getChunks())
-            add(chunk instanceof GATKChunk ? chunk : new GATKChunk(chunk));
-    }
-
-    /**
-     * Convenience constructor to construct a BAM file span from
-     * a single chunk.
-     * @param chunk Chunk to use as the sole region in this span.
-     */
-    public GATKBAMFileSpan(final Chunk chunk) {
-        super(chunk);
-    }
-
-    /**
-     * Create a new chunk list from the given list of chunks.
-     * @param chunks Constituent chunks.
-     */
-    public GATKBAMFileSpan(final GATKChunk[] chunks) {
-        super(Arrays.<Chunk>asList(chunks));
-    }
-
-    @Override
-    public boolean equals(final Object other) {
-        if(!(other instanceof BAMFileSpan))
-            return false;
-
-        List<Chunk> theseChunks = getChunks();
-        List<Chunk> otherChunks = ((BAMFileSpan)other).getChunks();
-
-        if(theseChunks.size() != otherChunks.size())
-            return false;
-        for(int i = 0; i < theseChunks.size(); i++) {
-            if(!theseChunks.get(i).equals(otherChunks.get(i)))
-                return false;
-        }
-
-        return true;
-    }
-
-    /**
-     * Gets the constituent chunks stored in this span.
-     * @return An unmodifiable list of chunks.
-     */
-    public List<GATKChunk> getGATKChunks() {
-        List<GATKChunk> gatkChunks = new ArrayList<GATKChunk>();
-        for(Chunk chunk: getChunks())
-            gatkChunks.add(new GATKChunk(chunk));
-        return gatkChunks;
-    }
-
-    public String toString() {
-        StringBuilder builder = new StringBuilder();
-        for(GATKChunk chunk: getGATKChunks())
-            builder.append(String.format("%s;",chunk));
-        return builder.toString();
-    }
-
-    /**
-     * Returns an approximation of the number of uncompressed bytes in this
-     * file span.
-     * @return Approximation of uncompressed bytes in filespan.
-     */
-    public long size() {
-        long size = 0L;
-        for(GATKChunk chunk: getGATKChunks())
-            size += chunk.size();
-        return size;
-    }
-
-    /**
-     * Get a GATKChunk representing the "extent" of this file span, from the start of the first
-     * chunk to the end of the last chunk.The chunks list must be sorted in order to use this method.
-     *
-     * @return a GATKChunk representing the extent of this file span, or a GATKChunk representing
-     *         a span of size 0 if there are no chunks
-     */
-    public GATKChunk getExtent() {
-        validateSorted();   // TODO: defensive measure: may be unnecessary
-
-        List<Chunk> chunks = getChunks();
-        if ( chunks.isEmpty() ) {
-            return new GATKChunk(0L, 0L);
-        }
-
-        return new GATKChunk(chunks.get(0).getChunkStart(), chunks.get(chunks.size() - 1).getChunkEnd());
-    }
-
-    /**
-     * Validates the list of chunks to ensure that they appear in sorted order.
-     */
-    private void validateSorted() {
-        List<Chunk> chunks = getChunks();
-        for ( int i = 1; i < chunks.size(); i++ ) {
-            if ( chunks.get(i).getChunkStart() < chunks.get(i-1).getChunkEnd() ) {
-                throw new ReviewedGATKException(String.format("Chunk list is unsorted; chunk %s is before chunk %s", chunks.get(i-1), chunks.get(i)));
-
-            }
-        }
-    }
-
-    /**
-     * Computes the union of two FileSpans.
-     * @param other FileSpan to union with this one.
-     * @return A file span that's been unioned.
-     */
-    public GATKBAMFileSpan union(final GATKBAMFileSpan other) {
-        // No data?  Return an empty file span.
-        if(getGATKChunks().size() == 0 && other.getGATKChunks().size() == 0)
-            return new GATKBAMFileSpan();
-
-        LinkedList<GATKChunk> unmergedUnion = new LinkedList<GATKChunk>();
-        unmergedUnion.addAll(getGATKChunks());
-        unmergedUnion.addAll(other.getGATKChunks());
-        Collections.sort(unmergedUnion);
-
-        List<GATKChunk> mergedUnion = new ArrayList<GATKChunk>();
-        GATKChunk currentChunk = unmergedUnion.remove();
-        while(!unmergedUnion.isEmpty()) {
-
-            // While the current chunk can be merged with the next chunk:
-            while( ! unmergedUnion.isEmpty() &&
-                   (currentChunk.overlaps(unmergedUnion.peek()) || currentChunk.isAdjacentTo(unmergedUnion.peek())) ) {
-
-                // Merge the current chunk with the next chunk:
-                GATKChunk nextChunk = unmergedUnion.remove();
-                currentChunk = currentChunk.merge(nextChunk);
-            }
-            // Add the accumulated range.
-            mergedUnion.add(currentChunk);
-            currentChunk = !unmergedUnion.isEmpty() ? unmergedUnion.remove() : null;
-        }
-
-        // At end of the loop above, the last chunk will be contained in currentChunk and will not yet have been added.  Add it.
-        if(currentChunk !=null)
-            mergedUnion.add(currentChunk);
-
-        return new GATKBAMFileSpan(mergedUnion.toArray(new GATKChunk[mergedUnion.size()]));
-    }
-
-    /**
-     * Intersects two BAM file spans.
-     * @param other File span to intersect with this one.
-     * @return The intersected BAM file span.
-     */
-    public GATKBAMFileSpan intersection(final GATKBAMFileSpan other) {
-        Iterator<GATKChunk> thisIterator = getGATKChunks().iterator();
-        Iterator<GATKChunk> otherIterator = other.getGATKChunks().iterator();
-
-        if(!thisIterator.hasNext() || !otherIterator.hasNext())
-            return new GATKBAMFileSpan();
-
-        GATKChunk thisChunk = thisIterator.next();
-        GATKChunk otherChunk = otherIterator.next();
-
-        List<GATKChunk> intersected = new ArrayList<GATKChunk>();
-
-        while(thisChunk != null && otherChunk != null) {
-            // If this iterator is before other, skip this ahead.
-            if(thisChunk.getChunkEnd() <= otherChunk.getChunkStart()) {
-                thisChunk = thisIterator.hasNext() ? thisIterator.next() : null;
-                continue;
-            }
-
-            // If other iterator is before this, skip other ahead.
-            if(thisChunk.getChunkStart() >= otherChunk.getChunkEnd()) {
-                otherChunk = otherIterator.hasNext() ? otherIterator.next() : null;
-                continue;
-            }
-
-            // If these two chunks overlap, pull out intersection of data and truncated current chunks to point after
-            // the intersection (or next chunk if no such overlap exists).
-            if(thisChunk.overlaps(otherChunk)) {
-                // Determine the chunk constraints
-                GATKChunk firstChunk = thisChunk.getChunkStart() < otherChunk.getChunkStart() ? thisChunk : otherChunk;
-                GATKChunk secondChunk = thisChunk==firstChunk ? otherChunk : thisChunk;
-                GATKChunk intersectedChunk = new GATKChunk(secondChunk.getChunkStart(),Math.min(firstChunk.getChunkEnd(),secondChunk.getChunkEnd()));
-                intersected.add(intersectedChunk);
-
-                if(thisChunk.getChunkEnd() > intersectedChunk.getChunkEnd())
-                    thisChunk = new GATKChunk(intersectedChunk.getChunkEnd(),thisChunk.getChunkEnd());
-                else
-                    thisChunk = thisIterator.hasNext() ? thisIterator.next() : null;
-
-                if(otherChunk.getChunkEnd() > intersectedChunk.getChunkEnd())
-                    otherChunk = new GATKChunk(intersectedChunk.getChunkEnd(),otherChunk.getChunkEnd());
-                else
-                    otherChunk = otherIterator.hasNext() ? otherIterator.next() : null;
-            }
-
-        }
-
-        return new GATKBAMFileSpan(intersected.toArray(new GATKChunk[intersected.size()]));
-    }
-
-    /**
-     * Substracts other file span from this file span.
-     * @param other File span to strike out.
-     * @return This file span minuse the other file span.
-     */
-
-    public GATKBAMFileSpan minus(final GATKBAMFileSpan other) {
-        Iterator<GATKChunk> thisIterator = getGATKChunks().iterator();
-        Iterator<GATKChunk> otherIterator = other.getGATKChunks().iterator();
-
-        if(!thisIterator.hasNext() || !otherIterator.hasNext())
-            return this;
-
-        GATKChunk thisChunk = thisIterator.next();
-        GATKChunk otherChunk = otherIterator.next();
-
-        List<GATKChunk> subtracted = new ArrayList<GATKChunk>();
-
-        while(thisChunk != null && otherChunk != null) {
-            // If this iterator is before the other, add this to the subtracted list and forge ahead.
-            if(thisChunk.getChunkEnd() <= otherChunk.getChunkStart()) {
-                subtracted.add(thisChunk);
-                thisChunk = thisIterator.hasNext() ? thisIterator.next() : null;
-                continue;
-            }
-
-            // If other iterator is before this, skip other ahead.
-            if(thisChunk.getChunkStart() >= otherChunk.getChunkEnd()) {
-                otherChunk = otherIterator.hasNext() ? otherIterator.next() : null;
-                continue;
-            }
-
-            // If these two chunks overlap, pull out intersection of data and truncated current chunks to point after
-            // the intersection (or next chunk if no such overlap exists).
-            if(thisChunk.overlaps(otherChunk)) {
-                // Add in any sort of prefix that this chunk might have over the other.
-                if(thisChunk.getChunkStart() < otherChunk.getChunkStart())
-                    subtracted.add(new GATKChunk(thisChunk.getChunkStart(),otherChunk.getChunkStart()));
-
-                if(thisChunk.getChunkEnd() > otherChunk.getChunkEnd())
-                    thisChunk = new GATKChunk(otherChunk.getChunkEnd(),thisChunk.getChunkEnd());
-                else
-                    thisChunk = thisIterator.hasNext() ? thisIterator.next() : null;
-            }
-        }
-
-        // Finish up any remaining contents of this that didn't make it into the subtracted array.
-        if(thisChunk != null)
-            subtracted.add(thisChunk);
-        while(thisIterator.hasNext())
-            subtracted.add(thisIterator.next());
-
-        return new GATKBAMFileSpan(subtracted.toArray(new GATKChunk[subtracted.size()]));
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/htsjdk/samtools/GATKBin.java b/public/gatk-tools-public/src/main/java/htsjdk/samtools/GATKBin.java
deleted file mode 100644
index d1e689d..0000000
--- a/public/gatk-tools-public/src/main/java/htsjdk/samtools/GATKBin.java
+++ /dev/null
@@ -1,135 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package htsjdk.samtools;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-
-/**
- * A temporary solution to work around Java access rights issues:
- * override GATKBin and make it public.
- * TODO: Eliminate once we determine the final fate of the BAM index reading code.
- */
-public class GATKBin implements Comparable<GATKBin> {
-    /**
-     * The reference sequence associated with this bin.
-     */
-    private final int referenceSequence;
-
-    /**
-     * The number of this bin within the BAM file.
-     */
-    private final int binNumber;
-
-    /**
-     * The chunks associated with this bin.
-     */
-    private GATKChunk[] chunkList;
-
-    public GATKBin(Bin bin) {
-        this(bin.getReferenceSequence(),bin.getBinNumber());
-    }
-
-    public GATKBin(final int referenceSequence, final int binNumber) {
-        this.referenceSequence = referenceSequence;
-        this.binNumber = binNumber;
-    }
-
-    public int getReferenceSequence() {
-        return referenceSequence;
-    }
-
-    public int getBinNumber() {
-        return binNumber;
-    }
-
-    /**
-     * Convert this GATKBin to a normal bin, for processing with the standard BAM query interface.
-     * @return
-     */
-    public Bin toBin() {
-        return new Bin(referenceSequence,binNumber);
-    }
-
-    /**
-     * See whether two bins are equal.  If the ref seq and the bin number
-     * are equal, assume equality of the chunk list.
-     * @param other The other Bin to which to compare this.
-     * @return True if the two bins are equal.  False otherwise.
-     */
-    @Override
-    public boolean equals(Object other) {
-        if(other == null) return false;
-        if(!(other instanceof GATKBin)) return false;
-
-        GATKBin otherBin = (GATKBin)other;
-        return this.referenceSequence == otherBin.referenceSequence && this.binNumber == otherBin.binNumber;
-    }
-
-    /**
-     * Compute a unique hash code for the given reference sequence and bin number.
-     * @return A unique hash code.
-     */
-    @Override
-    public int hashCode() {
-        return ((Integer)referenceSequence).hashCode() ^ ((Integer)binNumber).hashCode();
-    }
-
-    /**
-     * Compare two bins to see what ordering they should appear in.
-     * @param other Other bin to which this bin should be compared.
-     * @return -1 if this < other, 0 if this == other, 1 if this > other.
-     */
-    public int compareTo(GATKBin other) {
-        if(other == null)
-            throw new ClassCastException("Cannot compare to a null object");
-
-        // Check the reference sequences first.
-        if(this.referenceSequence != other.referenceSequence)
-            return referenceSequence - other.referenceSequence;
-
-        // Then check the bin ordering.
-        return binNumber - other.binNumber;
-    }
-
-    /**
-     * Sets the chunks associated with this bin
-     */
-    public void setChunkList(GATKChunk[] list){
-        chunkList = list;
-    }
-
-    /**
-     * Gets the list of chunks associated with this bin.
-     * @return the chunks in this bin.  If no chunks are associated, an empty list will be returned.
-     */
-    public GATKChunk[] getChunkList(){
-        if(chunkList == null)
-            return new GATKChunk[0];
-        return chunkList;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/htsjdk/samtools/GATKChunk.java b/public/gatk-tools-public/src/main/java/htsjdk/samtools/GATKChunk.java
deleted file mode 100644
index aed7aec..0000000
--- a/public/gatk-tools-public/src/main/java/htsjdk/samtools/GATKChunk.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package htsjdk.samtools;
-
-/**
- * A temporary solution to work around Java access rights issues:
- * override chunk and make it public.
- * TODO: Eliminate once we determine the final fate of the BAM index reading code.
- */
-public class GATKChunk extends Chunk {
-    /**
-     * The average ratio of compressed block size / uncompressed block size, computed empirically
-     * using the output of org.broadinstitute.gatk.engine.datasources.reads.utilities.PrintBGZFBounds.
-     */
-    private static final double AVERAGE_BAM_COMPRESSION_RATIO = 0.39;
-
-    public GATKChunk(final long start, final long stop) {
-        super(start,stop);
-    }
-
-    public GATKChunk(final long blockStart, final int blockOffsetStart, final long blockEnd, final int blockOffsetEnd) {
-        super(blockStart << 16 | blockOffsetStart,blockEnd << 16 | blockOffsetEnd);
-    }
-
-    public GATKChunk(final Chunk chunk) {
-        super(chunk.getChunkStart(),chunk.getChunkEnd());
-    }
-
-    @Override
-    public GATKChunk clone() {
-        return new GATKChunk(getChunkStart(),getChunkEnd());
-    }
-
-    @Override
-    public long getChunkStart() {
-        return super.getChunkStart();
-    }
-
-    @Override
-    public void setChunkStart(final long value) {
-        super.setChunkStart(value);
-    }
-
-    @Override
-    public long getChunkEnd() {
-        return super.getChunkEnd();
-    }
-
-    @Override
-    public void setChunkEnd(final long value) {
-        super.setChunkEnd(value);
-    }
-
-    public long getBlockStart() {
-        return getChunkStart() >>> 16;
-    }
-
-    public int getBlockOffsetStart() {
-        return (int)(getChunkStart() & 0xFFFF);
-    }
-
-    public long getBlockEnd() {
-        return getChunkEnd() >>> 16;
-    }
-
-    public int getBlockOffsetEnd() {
-        return ((int)getChunkEnd() & 0xFFFF);
-    }
-
-    /**
-     * Computes an approximation of the uncompressed size of the
-     * chunk, in bytes.  Can be used to determine relative weights
-     * of chunk size.
-     * @return An approximation of the chunk size in bytes.
-     */
-    public long size() {
-        final long chunkSpan = Math.round(((getChunkEnd()>>16)-(getChunkStart()>>16))/AVERAGE_BAM_COMPRESSION_RATIO);
-        final int offsetSpan = (int)((getChunkEnd()&0xFFFF)-(getChunkStart()&0xFFFF));
-        return chunkSpan + offsetSpan;
-    }
-
-    /**
-     * Merges two chunks together. The caller is responsible for testing whether the
-     * chunks overlap/are adjacent before calling this method!
-     *
-     * @param other the chunk to merge with this chunk
-     * @return a new chunk representing the union of the two chunks (provided the chunks were
-     *         overlapping/adjacent)
-     */
-    public GATKChunk merge ( GATKChunk other ) {
-        return new GATKChunk(Math.min(getChunkStart(), other.getChunkStart()), Math.max(getChunkEnd(), other.getChunkEnd()));
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/htsjdk/samtools/PicardNamespaceUtils.java b/public/gatk-tools-public/src/main/java/htsjdk/samtools/PicardNamespaceUtils.java
deleted file mode 100644
index 00f65e5..0000000
--- a/public/gatk-tools-public/src/main/java/htsjdk/samtools/PicardNamespaceUtils.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package htsjdk.samtools;
-
-/**
- * Utils that insist on being in the same package as Picard.
- */
-public class PicardNamespaceUtils {
-    /**
-     * Private constructor only.  Do not instantiate.
-     */
-    private PicardNamespaceUtils() {}
-
-    public static void setFileSource(final SAMRecord read, final SAMFileSource fileSource) {
-        read.setFileSource(fileSource);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/CommandLineExecutable.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/CommandLineExecutable.java
deleted file mode 100644
index b8221bb..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/CommandLineExecutable.java
+++ /dev/null
@@ -1,229 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine;
-
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.utils.commandline.ArgumentTypeDescriptor;
-import org.broadinstitute.gatk.utils.commandline.CommandLineProgram;
-import org.broadinstitute.gatk.engine.arguments.GATKArgumentCollection;
-import org.broadinstitute.gatk.engine.datasources.reads.SAMReaderID;
-import org.broadinstitute.gatk.engine.filters.ReadFilter;
-import org.broadinstitute.gatk.engine.io.stubs.OutputStreamArgumentTypeDescriptor;
-import org.broadinstitute.gatk.engine.io.stubs.SAMFileWriterArgumentTypeDescriptor;
-import org.broadinstitute.gatk.engine.io.stubs.VCFWriterArgumentTypeDescriptor;
-import org.broadinstitute.gatk.engine.phonehome.GATKRunReport;
-import org.broadinstitute.gatk.engine.refdata.utils.RMDTriplet;
-import org.broadinstitute.gatk.engine.walkers.Walker;
-import org.broadinstitute.gatk.utils.crypt.CryptUtils;
-import org.broadinstitute.gatk.utils.crypt.GATKKey;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.text.ListFileUtils;
-
-import java.security.PublicKey;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-
-/**
- * @author aaron
- */
-public abstract class CommandLineExecutable extends CommandLineProgram {
-    /**
-     * The actual engine which performs the analysis.
-     */
-    protected GenomeAnalysisEngine engine = new GenomeAnalysisEngine();
-
-    // get the analysis name
-    public abstract String getAnalysisName();
-
-    /**
-     * Gets the GATK argument bundle.
-     * @return A structure consisting of whatever arguments should be used to initialize the GATK engine.
-     */
-    protected abstract GATKArgumentCollection getArgumentCollection();
-
-    /**
-     * A list of all the arguments initially used as sources.
-     */
-    private final Collection<Object> argumentSources = new ArrayList<Object>();
-
-    protected static Logger logger = Logger.getLogger(CommandLineExecutable.class);
-
-    /**
-     * this is the function that the inheriting class can expect to have called
-     * when the command line system has initialized.
-     *
-     * @return the return code to exit the program with
-     */
-    protected int execute() throws Exception {
-        engine.setParser(parser);
-        argumentSources.add(this);
-
-        Walker<?,?> walker = engine.getWalkerByName(getAnalysisName());
-
-        try {
-            // Make sure a valid GATK user key is present, if required.
-            authorizeGATKRun();
-
-            engine.setArguments(getArgumentCollection());
-
-            // File lists can require a bit of additional expansion.  Set these explicitly by the engine. 
-            final Collection<SAMReaderID> bamFileList=ListFileUtils.unpackBAMFileList(getArgumentCollection().samFiles,parser);
-            engine.setSAMFileIDs(bamFileList);
-            if(getArgumentCollection().showFullBamList){
-                logger.info(String.format("Adding the following input SAM Files: %s",bamFileList.toString()));
-            }
-
-            engine.setWalker(walker);
-            walker.setToolkit(engine);
-
-            Collection<ReadFilter> filters = engine.createFilters();
-            engine.setFilters(filters);
-
-            // load the arguments into the walker / filters.
-            // TODO: The fact that this extra load call exists here when all the parsing happens at the engine
-            // TODO: level indicates that we're doing something wrong.  Turn this around so that the GATK can drive
-            // TODO: argument processing.
-            loadArgumentsIntoObject(walker);
-            argumentSources.add(walker);
-
-            Collection<RMDTriplet> rodBindings = ListFileUtils.unpackRODBindings(parser.getRodBindings(), parser);
-            engine.setReferenceMetaDataFiles(rodBindings);
-
-            for (ReadFilter filter: filters) {
-                loadArgumentsIntoObject(filter);
-                argumentSources.add(filter);
-            }
-
-            engine.execute();
-            generateGATKRunReport(walker);
-        } catch ( Exception e ) {
-            generateGATKRunReport(walker, e);
-            throw e;
-        }
-
-        // always return 0
-        return 0;
-    }
-
-    /**
-     * Authorizes this run of the GATK by checking for a valid GATK user key, if required.
-     * Currently, a key is required only if running with the -et NO_ET or -et STDOUT options.
-     */
-    private void authorizeGATKRun() {
-        if ( getArgumentCollection().phoneHomeType == GATKRunReport.PhoneHomeOption.NO_ET ||
-             getArgumentCollection().phoneHomeType == GATKRunReport.PhoneHomeOption.STDOUT ) {
-            if ( getArgumentCollection().gatkKeyFile == null ) {
-                throw new UserException("Running with the -et NO_ET or -et STDOUT option requires a GATK Key file. " +
-                                        "Please see " + UserException.PHONE_HOME_DOCS_URL +
-                                        " for more information and instructions on how to obtain a key.");
-            }
-            else {
-                PublicKey gatkPublicKey = CryptUtils.loadGATKDistributedPublicKey();
-                GATKKey gatkUserKey = new GATKKey(gatkPublicKey, getArgumentCollection().gatkKeyFile);
-
-                if ( ! gatkUserKey.isValid() ) {
-                    throw new UserException.KeySignatureVerificationException(getArgumentCollection().gatkKeyFile);
-                }
-            }
-        }
-    }
-
-    /**
-     * Generate the GATK run report for this walker using the current GATKEngine, if -et is enabled.
-     * This report will be written to either STDOUT or to the run repository, depending on the options
-     * for -et.
-     *
-     * @param e the exception, can be null if no exception occurred
-     */
-    private void generateGATKRunReport(Walker<?,?> walker, Exception e) {
-        if ( getArgumentCollection().phoneHomeType != GATKRunReport.PhoneHomeOption.NO_ET ) {
-            GATKRunReport report = new GATKRunReport(walker, e, engine, getArgumentCollection().phoneHomeType );
-            report.postReport(getArgumentCollection().phoneHomeType);
-        }
-    }
-
-    /**
-     * Convenience method for fully parameterized generateGATKRunReport when an exception has
-     * not occurred
-     *
-     * @param walker
-     */
-    private void generateGATKRunReport(Walker<?,?> walker) {
-        generateGATKRunReport(walker, null);
-    }
-
-    /**
-     * Subclasses of CommandLinePrograms can provide their own types of command-line arguments.
-     * @return A collection of type descriptors generating implementation-dependent placeholders.
-     */
-    protected Collection<ArgumentTypeDescriptor> getArgumentTypeDescriptors() {
-        return Arrays.asList( new VCFWriterArgumentTypeDescriptor(engine,System.out,argumentSources),
-                              new SAMFileWriterArgumentTypeDescriptor(engine,System.out),
-                              new OutputStreamArgumentTypeDescriptor(engine,System.out) );
-    }
-
-    /**
-     * GATK can add arguments dynamically based on analysis type.
-     *
-     * @return true
-     */
-    @Override
-    protected boolean canAddArgumentsDynamically() {
-        return true;
-    }
-
-    /**
-     * GATK provides the walker as an argument source.
-     * @return List of walkers to load dynamically.
-     */
-    @Override
-    protected Class[] getArgumentSources() {
-        // No walker info?  No plugins.
-        if (getAnalysisName() == null) return new Class[] {};
-
-        Collection<Class> argumentSources = new ArrayList<Class>();
-
-        Walker walker = engine.getWalkerByName(getAnalysisName());
-        engine.setArguments(getArgumentCollection());
-        engine.setWalker(walker);
-        walker.setToolkit(engine);
-        argumentSources.add(walker.getClass());
-
-        Collection<ReadFilter> filters = engine.createFilters();
-        for(ReadFilter filter: filters)
-            argumentSources.add(filter.getClass());
-
-        Class[] argumentSourcesAsArray = new Class[argumentSources.size()];
-        return argumentSources.toArray(argumentSourcesAsArray);
-    }
-
-    @Override
-    protected String getArgumentSourceName( Class argumentSource ) {
-        return engine.getWalkerName((Class<Walker>)argumentSource);
-    }
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/CommandLineGATK.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/CommandLineGATK.java
deleted file mode 100644
index f88c413..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/CommandLineGATK.java
+++ /dev/null
@@ -1,385 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine;
-
-import picard.PicardException;
-import htsjdk.samtools.SAMException;
-import htsjdk.tribble.TribbleException;
-import org.broadinstitute.gatk.utils.commandline.Argument;
-import org.broadinstitute.gatk.utils.commandline.ArgumentCollection;
-import org.broadinstitute.gatk.utils.commandline.CommandLineProgram;
-import org.broadinstitute.gatk.engine.arguments.GATKArgumentCollection;
-import org.broadinstitute.gatk.engine.refdata.tracks.FeatureManager;
-import org.broadinstitute.gatk.engine.walkers.Attribution;
-import org.broadinstitute.gatk.engine.walkers.Walker;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.help.*;
-import org.broadinstitute.gatk.utils.text.TextFormattingUtils;
-
-import java.util.*;
-
-/**
- * All command line parameters accepted by all tools in the GATK.
- *
- * <h3>Info for general users</h3>
- *
- * <p>This is a list of options and parameters that are generally available to all tools in the GATK.</p>
- *
- * <p>There may be a few restrictions, which are indicated in individual argument descriptions. For example the -BQSR
- * argument is only meant to be used with a subset of tools, and the -pedigree argument will only be effectively used
- * by a subset of tools as well. Some arguments conflict with others, and some conversely are dependent on others. This
- * is all indicated in the detailed argument descriptions, so be sure to read those in their entirety rather than just
- * skimming the one-line summaey in the table.</p>
- *
- * <h3>Info for developers</h3>
- *
- * <p>This class is the GATK engine itself, which manages map/reduce data access and runs walkers.</p>
- *
- * <p>We run command line GATK programs using this class. It gets the command line args, parses them, and hands the
- * gatk all the parsed out information. Pretty much anything dealing with the underlying system should go here;
- * the GATK engine should deal with any data related information.</p>
- */
- at DocumentedGATKFeature(groupName = HelpConstants.DOCS_CAT_ENGINE)
-public class CommandLineGATK extends CommandLineExecutable {
-    /**
-     * A complete list of tools (sometimes also called walkers because they "walk" through the data to perform analyses)
-     * is available in the online documentation.
-     */
-    @Argument(fullName = "analysis_type", shortName = "T", doc = "Name of the tool to run")
-    private String analysisName = null;
-
-    // our argument collection, the collection of command line args we accept
-    @ArgumentCollection
-    private GATKArgumentCollection argCollection = new GATKArgumentCollection();
-
-    /**
-     * Get pleasing info about the GATK.
-     *
-     * @return A list of Strings that contain pleasant info about the GATK.
-     */
-    @Override
-    protected ApplicationDetails getApplicationDetails() {
-        return new ApplicationDetails(createApplicationHeader(),
-                getAttribution(),
-                ApplicationDetails.createDefaultRunningInstructions(getClass()),
-                getAdditionalHelp());
-    }
-
-    @Override
-    public String getAnalysisName() {
-        return analysisName;
-    }
-
-    @Override
-    protected GATKArgumentCollection getArgumentCollection() {
-        return argCollection;
-    }
-
-    /**
-     * Required main method implementation.
-     */
-    public static void main(String[] argv) {
-        try {
-            CommandLineGATK instance = new CommandLineGATK();
-            start(instance, argv);
-            System.exit(CommandLineProgram.result); // todo -- this is a painful hack
-        } catch (UserException e) {
-            exitSystemWithUserError(e);
-        } catch (TribbleException e) {
-            // We can generate Tribble Exceptions in weird places when e.g. VCF genotype fields are
-            //   lazy loaded, so they aren't caught elsewhere and made into User Exceptions
-            exitSystemWithUserError(e);
-        } catch(PicardException e) {
-            // TODO: Should Picard exceptions be, in general, UserExceptions or ReviewedGATKExceptions?
-            exitSystemWithError(e);
-        } catch (SAMException e) {
-            checkForMaskedUserErrors(e);
-            exitSystemWithSamError(e);
-        } catch (OutOfMemoryError e) {
-            exitSystemWithUserError(new UserException.NotEnoughMemory());
-        } catch (Throwable t) {
-            checkForMaskedUserErrors(t);
-            exitSystemWithError(t);
-        }
-    }
-
-    public static final String PICARD_TEXT_SAM_FILE_ERROR_1 = "Cannot use index file with textual SAM file";
-    public static final String PICARD_TEXT_SAM_FILE_ERROR_2 = "Cannot retrieve file pointers within SAM text files";
-    public static final String NO_SPACE_LEFT_ON_DEVICE_ERROR = "No space left on device";
-    public static final String DISK_QUOTA_EXCEEDED_ERROR = "Disk quota exceeded";
-
-    private static void checkForMaskedUserErrors(final Throwable t) {
-        // masked out of memory error
-        if ( t instanceof OutOfMemoryError )
-            exitSystemWithUserError(new UserException.NotEnoughMemory());
-        // masked user error
-        if ( t instanceof UserException || t instanceof TribbleException )
-            exitSystemWithUserError(new UserException(t.getMessage()));
-
-        // no message means no masked error
-        final String message = t.getMessage();
-        if ( message == null )
-            return;
-
-        // too many open files error
-        if ( message.contains("Too many open files") )
-            exitSystemWithUserError(new UserException.TooManyOpenFiles());
-
-        // malformed BAM looks like a SAM file
-        if ( message.contains(PICARD_TEXT_SAM_FILE_ERROR_1) || message.contains(PICARD_TEXT_SAM_FILE_ERROR_2) )
-            exitSystemWithSamError(t);
-
-        // can't close tribble index when writing
-        if ( message.contains("Unable to close index for") )
-            exitSystemWithUserError(new UserException(t.getCause() == null ? message : t.getCause().getMessage()));
-
-        // disk is full
-        if ( message.contains(NO_SPACE_LEFT_ON_DEVICE_ERROR) || message.contains(DISK_QUOTA_EXCEEDED_ERROR) )
-            exitSystemWithUserError(new UserException.NoSpaceOnDevice());
-
-        // masked error wrapped in another one
-        if ( t.getCause() != null )
-            checkForMaskedUserErrors(t.getCause());
-    }
-
-    /**
-     * Creates the a short blurb about the GATK, copyright info, and where to get documentation.
-     *
-     * @return The application header.
-     */
-    public static List<String> createApplicationHeader() {
-        List<String> header = new ArrayList<String>();
-        header.add(String.format("The Genome Analysis Toolkit (GATK) v%s, Compiled %s",getVersionNumber(), getBuildTime()));
-        header.add("Copyright (c) 2010 The Broad Institute");
-        header.add("For support and documentation go to " + HelpConstants.BASE_GATK_URL);
-        return header;
-    }
-
-    public static String getVersionNumber() {
-        ResourceBundle headerInfo = TextFormattingUtils.loadResourceBundle("GATKText");
-        return headerInfo.containsKey("org.broadinstitute.gatk.tools.version") ? headerInfo.getString("org.broadinstitute.gatk.tools.version") : "<unknown>";
-    }
-
-    public static String getBuildTime() {
-        ResourceBundle headerInfo = TextFormattingUtils.loadResourceBundle("GATKText");
-        return headerInfo.containsKey("build.timestamp") ? headerInfo.getString("build.timestamp") : "<unknown>";
-    }
-
-    /**
-     * If the user supplied any additional attribution, return it here.
-     * @return Additional attribution if supplied by the user.  Empty (non-null) list otherwise.
-     */
-    private List<String> getAttribution() {
-        List<String> attributionLines = new ArrayList<String>();
-
-        // If no analysis name is present, fill in extra help on the walkers.
-        WalkerManager walkerManager = engine.getWalkerManager();
-        String analysisName = getAnalysisName();
-        if(analysisName != null && walkerManager.exists(analysisName)) {
-            Class<? extends Walker> walkerType = walkerManager.getWalkerClassByName(analysisName);
-            if(walkerType.isAnnotationPresent(Attribution.class))
-                attributionLines.addAll(Arrays.asList(walkerType.getAnnotation(Attribution.class).value()));
-        }
-        return attributionLines;
-    }
-
-    /**
-     * Retrieves additional information about GATK walkers.
-     * the code in HelpFormatter and supply it as a helper to this method.
-     *
-     * @return A string summarizing the walkers available in this distribution.
-     */
-    private String getAdditionalHelp() {
-        String additionalHelp;
-
-        // If no analysis name is present, fill in extra help on the walkers.
-        WalkerManager walkerManager = engine.getWalkerManager();
-        String analysisName = getAnalysisName();
-        if(analysisName != null && walkerManager.exists(getAnalysisName()))
-            additionalHelp = getWalkerHelp(walkerManager.getWalkerClassByName(getAnalysisName()));
-        else
-            additionalHelp = getAllWalkerHelp();
-
-        return additionalHelp;
-    }
-
-    private static final int PACKAGE_INDENT = 1;
-    private static final int WALKER_INDENT = 3;
-    private static final String FIELD_SEPARATOR = "  ";
-
-    private String getWalkerHelp(Class<? extends Walker> walkerType) {
-        // Construct a help string to output details on this walker.
-        StringBuilder additionalHelp = new StringBuilder();
-        Formatter formatter = new Formatter(additionalHelp);
-
-        formatter.format("Available Reference Ordered Data types:%n");
-        formatter.format(new FeatureManager().userFriendlyListOfAvailableFeatures());
-        formatter.format("%n");
-
-        formatter.format("For a full description of this walker, see its GATKdocs at:%n");
-        formatter.format("%s%n", GATKDocUtils.helpLinksToGATKDocs(walkerType));
-
-        return additionalHelp.toString();
-    }
-
-    /**
-     * Load in additional help information about all available walkers.
-     * @return A string representation of the additional help.
-     */
-    private String getAllWalkerHelp() {
-        // Construct a help string to output available walkers.
-        StringBuilder additionalHelp = new StringBuilder();
-        Formatter formatter = new Formatter(additionalHelp);
-
-        // Get the list of walker names from the walker manager.
-        WalkerManager walkerManager = engine.getWalkerManager();
-
-        // Build a list sorted by walker display name.  As this information is collected, keep track of the longest
-        // package / walker name for later formatting.
-        SortedSet<HelpEntry> helpText = new TreeSet<HelpEntry>(new HelpEntryComparator());
-        
-        int longestPackageName = 0;
-        int longestWalkerName = 0;
-        for(Map.Entry<String,Collection<Class<? extends Walker>>> walkersByPackage: walkerManager.getWalkerNamesByPackage(true).entrySet()) {
-            // Get the display name.
-            String packageName = walkersByPackage.getKey();
-            String packageDisplayName = walkerManager.getPackageDisplayName(walkersByPackage.getKey());
-            String packageHelpText = walkerManager.getPackageSummaryText(packageName);
-
-            // Compute statistics about which names is longest.
-            longestPackageName = Math.max(longestPackageName,packageDisplayName.length());
-
-            SortedSet<HelpEntry> walkersInPackage = new TreeSet<HelpEntry>(new HelpEntryComparator());
-            for(Class<? extends Walker> walkerType: walkersByPackage.getValue()) {
-                String walkerName = walkerType.getName();
-                String walkerDisplayName = walkerManager.getName(walkerType);
-                String walkerHelpText = walkerManager.getWalkerSummaryText(walkerType);                
-
-                longestWalkerName = Math.max(longestWalkerName,walkerManager.getName(walkerType).length());
-
-                walkersInPackage.add(new HelpEntry(walkerName,walkerDisplayName,walkerHelpText));
-            }
-
-            // Dump the walkers into the sorted set.
-            helpText.add(new HelpEntry(packageName,packageDisplayName,packageHelpText,Collections.unmodifiableSortedSet(walkersInPackage)));
-        }
-
-        final int headerWidth = Math.max(longestPackageName+PACKAGE_INDENT,longestWalkerName+WALKER_INDENT);
-
-
-        for(HelpEntry packageHelp: helpText) {
-            printDescriptorLine(formatter,PACKAGE_INDENT,packageHelp.displayName,headerWidth,FIELD_SEPARATOR,packageHelp.summary,TextFormattingUtils.DEFAULT_LINE_WIDTH);
-            
-            for(HelpEntry walkerHelp: packageHelp.children)
-                printDescriptorLine(formatter,WALKER_INDENT,walkerHelp.displayName,headerWidth,FIELD_SEPARATOR,walkerHelp.summary,TextFormattingUtils.DEFAULT_LINE_WIDTH);
-
-            // Print a blank line between sets of walkers.
-            printDescriptorLine(formatter,0,"",headerWidth,FIELD_SEPARATOR,"", TextFormattingUtils.DEFAULT_LINE_WIDTH);
-        }
-
-        return additionalHelp.toString();
-    }
-
-    private void printDescriptorLine(Formatter formatter,
-                                     int headerIndentWidth,
-                                     String header,
-                                     int headerWidth,
-                                     String fieldSeparator,
-                                     String description,
-                                     int lineWidth) {
-        final int headerPaddingWidth = headerWidth - header.length() - headerIndentWidth;
-        final int descriptionWidth = lineWidth - fieldSeparator.length() - headerWidth;
-        List<String> wordWrappedText = TextFormattingUtils.wordWrap(description,descriptionWidth);
-
-        String headerIndentFormatString  = headerIndentWidth  > 0 ? "%" + headerIndentWidth  + "s" : "%s";
-        String headerPaddingFormatString = headerPaddingWidth > 0 ? "%" + headerPaddingWidth + "s" : "%s";
-        String headerWidthFormatString   = headerWidth        > 0 ? "%" + headerWidth        + "s" : "%s";
-
-        // Output description line.
-        formatter.format(headerIndentFormatString + "%s" + headerPaddingFormatString + "%s%s%n",
-                "", header, "", fieldSeparator, wordWrappedText.size()>0?wordWrappedText.get(0):"");
-        for(int i = 1; i < wordWrappedText.size(); i++)
-            formatter.format(headerWidthFormatString + "%s%s%n", "", fieldSeparator, wordWrappedText.get(i));
-    }
-
-}
-
-/**
- * Represents a given help entry; contains a display name, a summary and optionally some children.
- */
-class HelpEntry {
-    public final String uid;
-    public final String displayName;
-    public final String summary;
-    public final SortedSet<HelpEntry> children;
-
-    /**
-     * Create a new help entry with the given display name, summary and children.
-     * @param uid a unique identifier.  Usually, the java package.
-     * @param displayName display name for this help entry.
-     * @param summary summary for this help entry.
-     * @param children children for this help entry.
-     */
-    public HelpEntry(String uid, String displayName, String summary, SortedSet<HelpEntry> children)  {
-        this.uid = uid;
-        this.displayName = displayName;
-        this.summary = summary;
-        this.children = children;
-    }
-
-    /**
-     * Create a new help entry with the given display name, summary and children.
-     * @param uid a unique identifier.  Usually, the java package.
-     * @param displayName display name for this help entry.
-     * @param summary summary for this help entry.
-     */
-    public HelpEntry(String uid, String displayName, String summary) {
-        this(uid,displayName,summary,null);
-    }
-
-}
-
-/**
- * Compare two help entries by display name.
- */
-class HelpEntryComparator implements Comparator<HelpEntry> {
-    private static TextFormattingUtils.CaseInsensitiveComparator textComparator = new TextFormattingUtils.CaseInsensitiveComparator();
-
-    /**
-     * Compares the order of lhs to rhs, not taking case into account.
-     * @param lhs First object to compare.
-     * @param rhs Second object to compare.
-     * @return 0 if objects are identical; -1 if lhs is before rhs, 1 if rhs is before lhs.  Nulls are treated as after everything else.
-     */
-    public int compare(HelpEntry lhs, HelpEntry rhs) {
-        if(lhs == null && rhs == null) return 0;
-        if(lhs == null || lhs.displayName.equals("")) return 1;
-        if(rhs == null || rhs.displayName.equals("")) return -1;
-        return lhs.displayName.equals(rhs.displayName) ? textComparator.compare(lhs.uid,rhs.uid) : textComparator.compare(lhs.displayName,rhs.displayName);
-    }
-
-
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/GenomeAnalysisEngine.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/GenomeAnalysisEngine.java
deleted file mode 100644
index abb6993..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/GenomeAnalysisEngine.java
+++ /dev/null
@@ -1,1280 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine;
-
-import com.google.java.contract.Ensures;
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import htsjdk.samtools.reference.ReferenceSequenceFile;
-import htsjdk.variant.vcf.VCFConstants;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.engine.arguments.GATKArgumentCollection;
-import org.broadinstitute.gatk.engine.arguments.ValidationExclusion;
-import org.broadinstitute.gatk.engine.datasources.reads.*;
-import org.broadinstitute.gatk.engine.datasources.reference.ReferenceDataSource;
-import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
-import org.broadinstitute.gatk.engine.downsampling.DownsamplingMethod;
-import org.broadinstitute.gatk.engine.executive.MicroScheduler;
-import org.broadinstitute.gatk.engine.filters.FilterManager;
-import org.broadinstitute.gatk.engine.filters.ReadFilter;
-import org.broadinstitute.gatk.engine.filters.ReadGroupBlackListFilter;
-import org.broadinstitute.gatk.engine.io.OutputTracker;
-import org.broadinstitute.gatk.engine.io.stubs.SAMFileWriterStub;
-import org.broadinstitute.gatk.engine.io.stubs.Stub;
-import org.broadinstitute.gatk.engine.io.stubs.VariantContextWriterStub;
-import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
-import org.broadinstitute.gatk.engine.iterators.ReadTransformersMode;
-import org.broadinstitute.gatk.engine.phonehome.GATKRunReport;
-import org.broadinstitute.gatk.engine.refdata.tracks.IndexDictionaryUtils;
-import org.broadinstitute.gatk.engine.refdata.tracks.RMDTrackBuilder;
-import org.broadinstitute.gatk.engine.refdata.utils.RMDTriplet;
-import org.broadinstitute.gatk.engine.resourcemanagement.ThreadAllocation;
-import org.broadinstitute.gatk.engine.samples.SampleDB;
-import org.broadinstitute.gatk.engine.samples.SampleDBBuilder;
-import org.broadinstitute.gatk.engine.walkers.*;
-import org.broadinstitute.gatk.tools.walkers.genotyper.IndexedSampleList;
-import org.broadinstitute.gatk.tools.walkers.genotyper.SampleList;
-import org.broadinstitute.gatk.utils.*;
-import org.broadinstitute.gatk.utils.classloader.PluginManager;
-import org.broadinstitute.gatk.utils.commandline.*;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.interval.IntervalUtils;
-import org.broadinstitute.gatk.utils.progressmeter.ProgressMeter;
-import org.broadinstitute.gatk.utils.recalibration.BQSRArgumentSet;
-import org.broadinstitute.gatk.utils.sam.ReadUtils;
-import org.broadinstitute.gatk.utils.text.XReadLines;
-import org.broadinstitute.gatk.utils.threading.ThreadEfficiencyMonitor;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.util.*;
-import java.util.concurrent.TimeUnit;
-
-import static org.broadinstitute.gatk.utils.DeprecatedToolChecks.getWalkerDeprecationInfo;
-import static org.broadinstitute.gatk.utils.DeprecatedToolChecks.isDeprecatedWalker;
-
-/**
- * A GenomeAnalysisEngine that runs a specified walker.
- */
-public class GenomeAnalysisEngine {
-    /**
-     * our log, which we want to capture anything from this class
-     */
-    private static Logger logger = Logger.getLogger(GenomeAnalysisEngine.class);
-    public static final long NO_RUNTIME_LIMIT = -1;
-
-    /**
-     * The GATK command-line argument parsing code.
-     */
-    private ParsingEngine parsingEngine;
-
-    /**
-     * The genomeLocParser can create and parse GenomeLocs.
-     */
-    private GenomeLocParser genomeLocParser;
-
-    /**
-     * Accessor for sharded read data.
-     */
-    private SAMDataSource readsDataSource = null;
-
-    /**
-     * Accessor for sharded reference data.
-     */
-    private ReferenceDataSource referenceDataSource = null;
-
-    /**
-     * Accessor for sample metadata
-     */
-    private SampleDB sampleDB = new SampleDB();
-
-    /**
-     * Accessor for sharded reference-ordered data.
-     */
-    private List<ReferenceOrderedDataSource> rodDataSources;
-
-    // our argument collection
-    private GATKArgumentCollection argCollection;
-
-    /**
-     * Collection of intervals used by the engine.
-     */
-    private GenomeLocSortedSet intervals = null;
-
-    /**
-     * Explicitly assign the interval set to use for this traversal (for unit testing purposes)
-     * @param intervals set of intervals to use for this traversal
-     */
-    public void setIntervals( GenomeLocSortedSet intervals ) {
-        this.intervals = intervals;
-    }
-
-    /**
-     * Collection of inputs used by the engine.
-     */
-    private Map<ArgumentSource, Object> inputs = new HashMap<ArgumentSource, Object>();
-
-    /**
-     * Collection of outputs used by the engine.
-     */
-    private Collection<Stub<?>> outputs = new ArrayList<Stub<?>>();
-
-    /**
-     * Collection of the filters applied to the input data.
-     */
-    private Collection<ReadFilter> filters;
-
-    /**
-     * Collection of the read transformers applied to the reads
-     */
-    private List<ReadTransformer> readTransformers;
-
-    /**
-     * Controls the allocation of threads between CPU vs IO.
-     */
-    private ThreadAllocation threadAllocation;
-
-    private ReadMetrics cumulativeMetrics = null;
-
-    /**
-     * A currently hacky unique name for this GATK instance
-     */
-    private String myName = "GATK_" + Math.abs(getRandomGenerator().nextInt());
-
-    /**
-     * our walker manager
-     */
-    private final WalkerManager walkerManager = new WalkerManager();
-
-    private Walker<?, ?> walker;
-
-    public void setWalker(Walker<?, ?> walker) {
-        this.walker = walker;
-    }
-
-    /**
-     * The short name of the current GATK walker as a string
-     * @return a non-null String
-     */
-    public String getWalkerName() {
-        return getWalkerName(walker.getClass());
-    }
-
-    /**
-     * A processed collection of SAM reader identifiers.
-     */
-    private Collection<SAMReaderID> samReaderIDs = Collections.emptyList();
-
-    /**
-     * Set the SAM/BAM files over which to traverse.
-     * @param samReaderIDs Collection of ids to use during this traversal.
-     */
-    public void setSAMFileIDs(Collection<SAMReaderID> samReaderIDs) {
-        this.samReaderIDs = samReaderIDs;
-    }
-
-    /**
-     * Collection of reference metadata files over which to traverse.
-     */
-    private Collection<RMDTriplet> referenceMetaDataFiles;
-
-    /**
-     * The threading efficiency monitor we use in the GATK to monitor our efficiency.
-     *
-     * May be null if one isn't active, or hasn't be initialized yet
-     */
-    private ThreadEfficiencyMonitor threadEfficiencyMonitor = null;
-
-    /**
-     * The global progress meter we are using to track our progress through the genome
-     */
-    private ProgressMeter progressMeter = null;
-
-    /**
-     * Set the reference metadata files to use for this traversal.
-     * @param referenceMetaDataFiles Collection of files and descriptors over which to traverse.
-     */
-    public void setReferenceMetaDataFiles(Collection<RMDTriplet> referenceMetaDataFiles) {
-        this.referenceMetaDataFiles = referenceMetaDataFiles;
-    }
-
-    /**
-     * The maximum runtime of this engine, in nanoseconds, set during engine initialization
-     * from the GATKArgumentCollection command line value
-     */
-    private long runtimeLimitInNanoseconds = -1;
-
-    /**
-     *  Static random number generator and seed.
-     */
-    private static final long GATK_RANDOM_SEED = 47382911L;
-    private static Random randomGenerator = new Random(GATK_RANDOM_SEED);
-    public static Random getRandomGenerator() { return randomGenerator; }
-    public static void resetRandomGenerator() { randomGenerator.setSeed(GATK_RANDOM_SEED); }
-    public static void resetRandomGenerator(long seed) { randomGenerator.setSeed(seed); }
-
-    /**
-     *  Base Quality Score Recalibration helper object
-     */
-    private BQSRArgumentSet bqsrArgumentSet = null;
-    public BQSRArgumentSet getBQSRArgumentSet() { return bqsrArgumentSet; }
-    public boolean hasBQSRArgumentSet() { return bqsrArgumentSet != null; }
-    public void setBaseRecalibration(final GATKArgumentCollection args) {
-        bqsrArgumentSet = new BQSRArgumentSet(args);
-    }
-
-    /**
-     * Actually run the GATK with the specified walker.
-     *
-     * @return the value of this traversal.
-     */
-    public Object execute() {
-        // first thing is to make sure the AWS keys can be decrypted
-        GATKRunReport.checkAWSAreValid();
-
-        //HeapSizeMonitor monitor = new HeapSizeMonitor();
-        //monitor.start();
-        setStartTime(new java.util.Date());
-
-        final GATKArgumentCollection args = this.getArguments();
-
-        // validate our parameters
-        if (args == null) {
-            throw new ReviewedGATKException("The GATKArgumentCollection passed to GenomeAnalysisEngine can not be null.");
-        }
-
-        // validate our parameters              
-        if (this.walker == null)
-            throw new ReviewedGATKException("The walker passed to GenomeAnalysisEngine can not be null.");
-
-        if (args.nonDeterministicRandomSeed)
-            resetRandomGenerator(System.currentTimeMillis());
-
-        // if the use specified an input BQSR recalibration table then enable on the fly recalibration
-        if (args.BQSR_RECAL_FILE != null)
-            setBaseRecalibration(args);
-
-        // setup the runtime limits
-        setupRuntimeLimits(args);
-
-        // Determine how the threads should be divided between CPU vs. IO.
-        determineThreadAllocation();
-
-        // Prepare the data for traversal.
-        initializeDataSources();
-
-        // initialize and validate the interval list
-        initializeIntervals();
-        validateSuppliedIntervals();
-
-        // check to make sure that all sequence dictionaries are compatible with the reference's sequence dictionary
-        validateDataSourcesAgainstReference(readsDataSource, referenceDataSource.getReference(), rodDataSources);
-
-        // initialize sampleDB
-        initializeSampleDB();
-
-        // our microscheduler, which is in charge of running everything
-        MicroScheduler microScheduler = createMicroscheduler();
-        threadEfficiencyMonitor = microScheduler.getThreadEfficiencyMonitor();
-
-        // create temp directories as necessary
-        initializeTempDirectory();
-
-        // create the output streams
-        initializeOutputStreams(microScheduler.getOutputTracker());
-
-        // Initializing the shard iterator / BAM schedule might take some time, so let the user know vaguely what's going on
-        logger.info("Preparing for traversal" +
-                    (readsDataSource.getReaderIDs().size() > 0 ? String.format(" over %d BAM files", readsDataSource.getReaderIDs().size()) : ""));
-        Iterable<Shard> shardStrategy = getShardStrategy(readsDataSource,microScheduler.getReference(),intervals);
-        logger.info("Done preparing for traversal");
-
-        // execute the microscheduler, storing the results
-        return microScheduler.execute(this.walker, shardStrategy);
-
-        //monitor.stop();
-        //logger.info(String.format("Maximum heap size consumed: %d",monitor.getMaxMemoryUsed()));
-
-        //return result;
-    }
-
-    /**
-     * Retrieves an instance of the walker based on the walker name.
-     *
-     * @param walkerName Name of the walker.  Must not be null.  If the walker cannot be instantiated, an exception will be thrown.
-     * @return An instance of the walker.
-     */
-    public Walker<?, ?> getWalkerByName(String walkerName) {
-        try {
-            return walkerManager.createByName(walkerName);
-        } catch ( UserException e ) {
-            if ( isDeprecatedWalker(walkerName) ) {
-                e = new UserException.DeprecatedWalker(walkerName, getWalkerDeprecationInfo(walkerName));
-            }
-            throw e;
-        }
-    }
-
-    /**
-     * Gets the name of a given walker type.
-     * @param walkerType Type of walker.
-     * @return Name of the walker.
-     */
-    public String getWalkerName(Class<? extends Walker> walkerType) {
-        return walkerManager.getName(walkerType);
-    }
-
-    public String getName() {
-        return myName;
-    }
-
-    /**
-     * Gets a list of the filters to associate with the given walker.  Will NOT initialize the engine with this filters;
-     * the caller must handle that directly.
-     * @return A collection of available filters.
-     */
-    public Collection<ReadFilter> createFilters() {
-        final List<ReadFilter> filters = new LinkedList<>();
-
-        // First add the user requested filters
-        if (this.getArguments().readGroupBlackList != null && this.getArguments().readGroupBlackList.size() > 0)
-            filters.add(new ReadGroupBlackListFilter(this.getArguments().readGroupBlackList));
-        for(final String filterName: this.getArguments().readFilters)
-            filters.add(this.getFilterManager().createByName(filterName));
-
-        // now add the walker default filters.  This ordering is critical important if
-        // users need to apply filters that fix up reads that would be removed by default walker filters
-        filters.addAll(WalkerManager.getReadFilters(walker,this.getFilterManager()));
-
-        return Collections.unmodifiableList(filters);
-    }
-
-    /**
-     * Returns a list of active, initialized read transformers
-     *
-     * @param walker the walker we need to apply read transformers too
-     */
-    public void initializeReadTransformers(final Walker walker) {
-        // keep a list of the active read transformers sorted based on priority ordering
-        List<ReadTransformer> activeTransformers = new ArrayList<ReadTransformer>();
-
-        final ReadTransformersMode overrideMode = WalkerManager.getWalkerAnnotation(walker, ReadTransformersMode.class);
-        final ReadTransformer.ApplicationTime overrideTime = overrideMode != null ? overrideMode.ApplicationTime() : null;
-
-        final PluginManager<ReadTransformer> pluginManager = new PluginManager<ReadTransformer>(ReadTransformer.class);
-
-        for ( final ReadTransformer transformer : pluginManager.createAllTypes() ) {
-            transformer.initialize(overrideTime, this, walker);
-            if ( transformer.enabled() )
-                activeTransformers.add(transformer);
-        }
-
-        setReadTransformers(activeTransformers);
-    }
-
-    public List<ReadTransformer> getReadTransformers() {
-        return readTransformers;
-    }
-
-    /*
-     * Sanity checks that incompatible read transformers are not active together (and throws an exception if they are).
-     *
-     * @param readTransformers   the active read transformers
-     */
-    protected void checkActiveReadTransformers(final List<ReadTransformer> readTransformers) {
-        if ( readTransformers == null )
-            throw new IllegalArgumentException("read transformers cannot be null");
-
-        ReadTransformer sawMustBeFirst = null;
-        ReadTransformer sawMustBeLast  = null;
-
-        for ( final ReadTransformer r : readTransformers ) {
-            if ( r.getOrderingConstraint() == ReadTransformer.OrderingConstraint.MUST_BE_FIRST ) {
-                if ( sawMustBeFirst != null )
-                    throw new UserException.IncompatibleReadFiltersException(sawMustBeFirst.toString(), r.toString());
-                sawMustBeFirst = r;
-            } else if ( r.getOrderingConstraint() == ReadTransformer.OrderingConstraint.MUST_BE_LAST ) {
-                if ( sawMustBeLast != null )
-                    throw new UserException.IncompatibleReadFiltersException(sawMustBeLast.toString(), r.toString());
-                sawMustBeLast = r;
-            }
-        }
-    }
-
-    protected void setReadTransformers(final List<ReadTransformer> readTransformers) {
-        if ( readTransformers == null )
-            throw new ReviewedGATKException("read transformers cannot be null");
-
-        // sort them in priority order
-        Collections.sort(readTransformers, new ReadTransformer.ReadTransformerComparator());
-
-        // make sure we don't have an invalid set of active read transformers
-        checkActiveReadTransformers(readTransformers);
-
-        this.readTransformers = readTransformers;
-    }
-
-    /**
-     * Parse out the thread allocation from the given command-line argument.
-     */
-    private void determineThreadAllocation() {
-        if ( argCollection.numberOfDataThreads < 1 ) throw new UserException.BadArgumentValue("num_threads", "cannot be less than 1, but saw " + argCollection.numberOfDataThreads);
-        if ( argCollection.numberOfCPUThreadsPerDataThread < 1 ) throw new UserException.BadArgumentValue("num_cpu_threads", "cannot be less than 1, but saw " + argCollection.numberOfCPUThreadsPerDataThread);
-        if ( argCollection.numberOfIOThreads < 0 ) throw new UserException.BadArgumentValue("num_io_threads", "cannot be less than 0, but saw " + argCollection.numberOfIOThreads);
-
-        this.threadAllocation = new ThreadAllocation(argCollection.numberOfDataThreads,
-                argCollection.numberOfCPUThreadsPerDataThread,
-                argCollection.numberOfIOThreads,
-                argCollection.monitorThreadEfficiency);
-    }
-
-    public int getTotalNumberOfThreads() {
-        return this.threadAllocation == null ? 1 : threadAllocation.getTotalNumThreads();
-    }
-
-
-
-    /**
-     * Allow subclasses and others within this package direct access to the walker manager.
-     * @return The walker manager used by this package.
-     */
-    protected WalkerManager getWalkerManager() {
-        return walkerManager;
-    }
-    
-    /**
-     * setup a microscheduler
-     *
-     * @return a new microscheduler
-     */
-    private MicroScheduler createMicroscheduler() {
-        // Temporarily require all walkers to have a reference, even if that reference is not conceptually necessary.
-        if ((walker instanceof ReadWalker || walker instanceof DuplicateWalker || walker instanceof ReadPairWalker) &&
-                this.getArguments().referenceFile == null) {
-            throw new UserException.CommandLineException("Read-based traversals require a reference file but none was given");
-        }
-
-        return MicroScheduler.create(this,walker,this.getReadsDataSource(),this.getReferenceDataSource().getReference(),this.getRodDataSources(),threadAllocation);
-    }
-
-    protected DownsamplingMethod getDownsamplingMethod() {
-        GATKArgumentCollection argCollection = this.getArguments();
-
-        DownsamplingMethod commandLineMethod = argCollection.getDownsamplingMethod();
-        DownsamplingMethod walkerMethod = WalkerManager.getDownsamplingMethod(walker);
-
-        DownsamplingMethod method = commandLineMethod != null ? commandLineMethod : walkerMethod;
-        method.checkCompatibilityWithWalker(walker);
-        return method;
-    }
-
-    protected void setDownsamplingMethod(DownsamplingMethod method) {
-        argCollection.setDownsamplingMethod(method);
-    }
-
-    protected boolean includeReadsWithDeletionAtLoci() {
-        return walker.includeReadsWithDeletionAtLoci();
-    }
-
-    /**
-     * Verifies that the supplied set of reads files mesh with what the walker says it requires;
-     * also makes sure that list of SAM files specified on the command line is not empty and contains
-     * no duplicates.
-     */
-    protected void validateSuppliedReads() {
-        GATKArgumentCollection arguments = this.getArguments();
-        final Boolean samFilesArePresent = (arguments.samFiles != null && !arguments.samFiles.isEmpty());
-
-        // Check what the walker says is required against what was provided on the command line.
-        if (WalkerManager.isRequired(walker, DataSource.READS) && !samFilesArePresent)
-            throw new ArgumentException("Walker requires reads but none were provided.");
-
-        // Check what the walker says is allowed against what was provided on the command line.
-        if (samFilesArePresent && !WalkerManager.isAllowed(walker, DataSource.READS))
-            throw new ArgumentException("Walker does not allow reads but reads were provided.");
-
-        //Make sure SAM list specified by the user (if necessary) is not empty
-        if(WalkerManager.isRequired(walker, DataSource.READS) && samFilesArePresent && samReaderIDs.isEmpty() ) {
-            throw new UserException("The list of input files does not contain any BAM files.");
-        }
-
-        // Make sure no SAM files were specified multiple times by the user.
-        checkForDuplicateSamFiles();
-    }
-
-    /**
-     * Checks whether there are SAM files that appear multiple times in the fully unpacked list of
-     * SAM files (samReaderIDs). If there are, throws an ArgumentException listing the files in question.
-     */
-    protected void checkForDuplicateSamFiles() {
-        Set<SAMReaderID> encounteredSamFiles = new HashSet<SAMReaderID>();
-        Set<String> duplicateSamFiles = new LinkedHashSet<String>();
-
-        for ( SAMReaderID samFile : samReaderIDs ) {
-            if ( encounteredSamFiles.contains(samFile) ) {
-                duplicateSamFiles.add(samFile.getSamFilePath());
-            }
-            else {
-                encounteredSamFiles.add(samFile);
-            }
-        }
-
-        if ( duplicateSamFiles.size() > 0 ) {
-            throw new UserException("The following BAM files appear multiple times in the list of input files: " +
-                                    duplicateSamFiles + " BAM files may be specified at most once.");
-        }
-
-    }
-
-    /**
-     * Verifies that the supplied reference file mesh with what the walker says it requires.
-     */
-    protected void validateSuppliedReference() {
-        GATKArgumentCollection arguments = this.getArguments();
-        // Check what the walker says is required against what was provided on the command line.
-        // TODO: Temporarily disabling WalkerManager.isRequired check on the reference because the reference is always required.
-        if (/*WalkerManager.isRequired(walker, DataSource.REFERENCE) &&*/ arguments.referenceFile == null)
-            throw new ArgumentException("Walker requires a reference but none was provided.");
-
-        // Check what the walker says is allowed against what was provided on the command line.
-        if (arguments.referenceFile != null && !WalkerManager.isAllowed(walker, DataSource.REFERENCE))
-            throw new ArgumentException("Walker does not allow a reference but one was provided.");
-    }
-
-    protected void validateSuppliedIntervals() {
-        // Only read walkers support '-L unmapped' intervals.  Trap and validate any other instances of -L unmapped.
-        if(!(walker instanceof ReadWalker)) {
-            GenomeLocSortedSet intervals = getIntervals();
-            if(intervals != null && getIntervals().contains(GenomeLoc.UNMAPPED))
-                throw new ArgumentException("Interval list specifies unmapped region.  Only read walkers may include the unmapped region.");
-        }
-
-        // If intervals is non-null and empty at this point, it means that the list of intervals to process
-        // was filtered down to an empty set (eg., the user specified something like -L chr1 -XL chr1). Since
-        // this was very likely unintentional, the user should be informed of this. Note that this is different
-        // from the case where intervals == null, which indicates that there were no interval arguments.
-        if ( intervals != null && intervals.isEmpty() ) {
-            logger.warn("The given combination of -L and -XL options results in an empty set.  No intervals to process.");
-        }
-
-        // TODO: add a check for ActiveRegion walkers to prevent users from passing an entire contig/chromosome
-    }
-
-    /**
-     * Get the sharding strategy given a driving data source.
-     *
-     * @param readsDataSource readsDataSource
-     * @param drivingDataSource Data on which to shard.
-     * @param intervals intervals
-     * @return the sharding strategy
-     */
-    protected Iterable<Shard> getShardStrategy(SAMDataSource readsDataSource, ReferenceSequenceFile drivingDataSource, GenomeLocSortedSet intervals) {
-        ValidationExclusion exclusions = (readsDataSource != null ? readsDataSource.getReadsInfo().getValidationExclusionList() : null);
-        DownsamplingMethod downsamplingMethod = readsDataSource != null ? readsDataSource.getReadsInfo().getDownsamplingMethod() : null;
-        ReferenceDataSource referenceDataSource = this.getReferenceDataSource();
-
-        // If reads are present, assume that accessing the reads is always the dominant factor and shard based on that supposition.
-        if(!readsDataSource.isEmpty()) {
-            if(!readsDataSource.hasIndex() && !exclusions.contains(ValidationExclusion.TYPE.ALLOW_UNINDEXED_BAM))
-                throw new UserException.CommandLineException("Cannot process the provided BAM file(s) because they were not indexed.  The GATK does offer limited processing of unindexed BAMs in --unsafe mode, but this GATK feature is currently unsupported.");
-            if(!readsDataSource.hasIndex() && intervals != null && !argCollection.allowIntervalsWithUnindexedBAM)
-                throw new UserException.CommandLineException("Cannot perform interval processing when reads are present but no index is available.");
-
-            if(walker instanceof LocusWalker) {
-                if (readsDataSource.getSortOrder() != SAMFileHeader.SortOrder.coordinate)
-                    throw new UserException.MissortedBAM(SAMFileHeader.SortOrder.coordinate, "Locus walkers can only traverse coordinate-sorted data.  Please resort your input BAM file(s) or set the Sort Order tag in the header appropriately.");
-                if(intervals == null)
-                    return readsDataSource.createShardIteratorOverMappedReads(new LocusShardBalancer());
-                else
-                    return readsDataSource.createShardIteratorOverIntervals(intervals,new LocusShardBalancer());
-            } 
-            else if(walker instanceof ActiveRegionWalker) {
-                if (readsDataSource.getSortOrder() != SAMFileHeader.SortOrder.coordinate)
-                    throw new UserException.MissortedBAM(SAMFileHeader.SortOrder.coordinate, "Active region walkers can only traverse coordinate-sorted data.  Please resort your input BAM file(s) or set the Sort Order tag in the header appropriately.");
-                if(intervals == null)
-                    return readsDataSource.createShardIteratorOverMappedReads(new ActiveRegionShardBalancer());
-                else
-                    return readsDataSource.createShardIteratorOverIntervals(((ActiveRegionWalker)walker).extendIntervals(intervals, this.genomeLocParser, this.getReferenceDataSource().getReference()), new ActiveRegionShardBalancer());
-            } 
-            else if(walker instanceof ReadWalker || walker instanceof ReadPairWalker || walker instanceof DuplicateWalker) {
-                // Apply special validation to read pair walkers.
-                if(walker instanceof ReadPairWalker) {
-                    if(readsDataSource.getSortOrder() != SAMFileHeader.SortOrder.queryname)
-                        throw new UserException.MissortedBAM(SAMFileHeader.SortOrder.queryname, "Read pair walkers are exceptions in that they cannot be run on coordinate-sorted BAMs but instead require query name-sorted files.  You will need to resort your input BAM file in query name order to use this walker.");
-                    if(intervals != null && !intervals.isEmpty())
-                        throw new UserException.CommandLineException("Pairs traversal cannot be used in conjunction with intervals.");
-                }
-
-                if(intervals == null)
-                    return readsDataSource.createShardIteratorOverAllReads(new ReadShardBalancer());
-                else
-                    return readsDataSource.createShardIteratorOverIntervals(intervals, new ReadShardBalancer());
-            }
-            else
-                throw new ReviewedGATKException("Unable to determine walker type for walker " + walker.getClass().getName());
-        }
-        else {
-            // TODO -- Determine what the ideal shard size should be here.  Matt suggested that a multiple of 16K might work well
-            // TODO --  (because of how VCF indexes work), but my empirical experience has been simply that the larger the shard
-            // TODO --  size the more efficient the traversal (at least for RODWalkers).  Keeping the previous values for now.  [EB]
-            final int SHARD_SIZE = walker instanceof RodWalker ? 1000000 : 100000;
-            if(intervals == null)
-                return referenceDataSource.createShardsOverEntireReference(readsDataSource,genomeLocParser,SHARD_SIZE);
-            else
-                return referenceDataSource.createShardsOverIntervals(readsDataSource,intervals,SHARD_SIZE);
-        }
-    }
-
-    protected boolean flashbackData() {
-        return walker instanceof ReadWalker;
-    }
-
-    /**
-     * Create the temp directory if it doesn't exist.
-     */
-    private void initializeTempDirectory() {
-        File tempDir = new File(System.getProperty("java.io.tmpdir"));
-        if (!tempDir.exists() && !tempDir.mkdirs())
-            throw new UserException.BadTmpDir("Unable to create directory");
-    }
-
-    /**
-     * Initialize the output streams as specified by the user.
-     *
-     * @param outputTracker the tracker supplying the initialization data.
-     */
-    private void initializeOutputStreams(final OutputTracker outputTracker) {
-        for (final Map.Entry<ArgumentSource, Object> input : getInputs().entrySet())
-            outputTracker.addInput(input.getKey(), input.getValue());
-        for (final Stub<?> stub : getOutputs()) {
-            stub.processArguments(argCollection);
-            outputTracker.addOutput(stub);
-        }
-
-        outputTracker.prepareWalker(walker, getArguments().strictnessLevel);
-    }
-
-    public ReferenceDataSource getReferenceDataSource() {
-        return referenceDataSource;
-    }
-
-    public GenomeLocParser getGenomeLocParser() {
-        return genomeLocParser;
-    }
-
-    /**
-     * Manage lists of filters.
-     */
-    private final FilterManager filterManager = new FilterManager();
-
-    private Date startTime = null; // the start time for execution
-
-    public void setParser(ParsingEngine parsingEngine) {
-        this.parsingEngine = parsingEngine;
-    }
-
-    /**
-     * Explicitly set the GenomeLocParser, for unit testing.
-     * @param genomeLocParser GenomeLocParser to use.
-     */
-    public void setGenomeLocParser(GenomeLocParser genomeLocParser) {
-        this.genomeLocParser = genomeLocParser;
-    }
-
-    /**
-     * Sets the start time when the execute() function was last called
-     * @param startTime the start time when the execute() function was last called
-     */
-    protected void setStartTime(Date startTime) {
-        this.startTime = startTime;
-    }
-
-    /**
-     * @return the start time when the execute() function was last called
-     */
-    public Date getStartTime() {
-        return startTime;
-    }
-
-    /**
-     * Setup the intervals to be processed
-     */
-    protected void initializeIntervals() {
-        intervals = IntervalUtils.parseIntervalArguments(this.referenceDataSource, argCollection.intervalArguments);
-    }
-
-    /**
-     * Add additional, externally managed IO streams for inputs.
-     *
-     * @param argumentSource Field into which to inject the value.
-     * @param value          Instance to inject.
-     */
-    public void addInput(ArgumentSource argumentSource, Object value) {
-        inputs.put(argumentSource, value);
-    }
-
-    /**
-     * Add additional, externally managed IO streams for output.
-     *
-     * @param stub Instance to inject.
-     */
-    public void addOutput(Stub<?> stub) {
-        outputs.add(stub);
-    }
-
-    /**
-     * Returns the tag associated with a given command-line argument.
-     * @param key Object for which to inspect the tag.
-     * @return Tags object associated with the given key, or an empty Tag structure if none are present. 
-     */
-    public Tags getTags(Object key)  {
-        return parsingEngine.getTags(key);
-    }
-
-    protected void initializeDataSources() {
-        logger.info("Strictness is " + argCollection.strictnessLevel);
-
-        validateSuppliedReference();
-        setReferenceDataSource(argCollection.referenceFile);
-
-        validateSuppliedReads();
-        initializeReadTransformers(walker);
-
-        final Map<String, String> sampleRenameMap = argCollection.sampleRenameMappingFile != null ?
-                                                    loadSampleRenameMap(argCollection.sampleRenameMappingFile) :
-                                                    null;
-
-        readsDataSource = createReadsDataSource(argCollection,genomeLocParser,referenceDataSource.getReference(), sampleRenameMap);
-
-        for (ReadFilter filter : filters)
-            filter.initialize(this);
-
-        // set the sequence dictionary of all of Tribble tracks to the sequence dictionary of our reference
-        rodDataSources = getReferenceOrderedDataSources(referenceMetaDataFiles,referenceDataSource.getReference().getSequenceDictionary(),
-                                                        genomeLocParser,argCollection.unsafe,sampleRenameMap);
-    }
-
-    /**
-     * Purely for testing purposes.  Do not use unless you absolutely positively know what you are doing (or
-     * need to absolutely positively kill everyone in the room)
-     * @param dataSource
-     */
-    public void setReadsDataSource(final SAMDataSource dataSource) {
-        this.readsDataSource = dataSource;
-    }
-
-    /**
-     * Entry-point function to initialize the samples database from input data and pedigree arguments
-     */
-    private void initializeSampleDB() {
-        SampleDBBuilder sampleDBBuilder = new SampleDBBuilder(this, argCollection.pedigreeValidationType);
-        sampleDBBuilder.addSamplesFromSAMHeader(getSAMFileHeader());
-        sampleDBBuilder.addSamplesFromSampleNames(SampleUtils.getUniqueSamplesFromRods(this));
-        sampleDBBuilder.addSamplesFromPedigreeFiles(argCollection.pedigreeFiles);
-        sampleDBBuilder.addSamplesFromPedigreeStrings(argCollection.pedigreeStrings);
-        sampleDB = sampleDBBuilder.getFinalSampleDB();
-    }
-
-    /**
-     * Gets a unique identifier for the reader sourcing this read.
-     * @param read Read to examine.
-     * @return A unique identifier for the source file of this read.  Exception if not found.
-     */
-    public SAMReaderID getReaderIDForRead(final SAMRecord read) {
-        return getReadsDataSource().getReaderID(read);
-    }
-
-    /**
-     * Gets the source file for this read.
-     * @param id Unique identifier determining which input file to use.
-     * @return The source filename for this read.
-     */
-    public File getSourceFileForReaderID(final SAMReaderID id) {
-        return getReadsDataSource().getSAMFile(id);
-    }
-
-    /**
-     * Now that all files are open, validate the sequence dictionaries of the reads vs. the reference vrs the reference ordered data (if available).
-     *
-     * @param reads     Reads data source.
-     * @param reference Reference data source.
-     * @param rods    a collection of the reference ordered data tracks
-     */
-    private void validateDataSourcesAgainstReference(SAMDataSource reads, ReferenceSequenceFile reference, Collection<ReferenceOrderedDataSource> rods) {
-        if ((reads.isEmpty() && (rods == null || rods.isEmpty())) || reference == null )
-            return;
-
-        // Compile a set of sequence names that exist in the reference file.
-        SAMSequenceDictionary referenceDictionary = reference.getSequenceDictionary();
-
-        if (!reads.isEmpty()) {
-            // Compile a set of sequence names that exist in the BAM files.
-            SAMSequenceDictionary readsDictionary = reads.getHeader().getSequenceDictionary();
-
-            if (readsDictionary.size() == 0) {
-                logger.info("Reads file is unmapped.  Skipping validation against reference.");
-                return;
-            }
-
-            // compare the reads to the reference
-            SequenceDictionaryUtils.validateDictionaries(logger, getArguments().unsafe, "reads", readsDictionary,
-                                                         "reference", referenceDictionary, true, intervals);
-        }
-
-        for (ReferenceOrderedDataSource rod : rods)
-            IndexDictionaryUtils.validateTrackSequenceDictionary(rod.getName(), rod.getSequenceDictionary(), referenceDictionary, getArguments().unsafe);
-    }
-
-    /**
-     * Gets a data source for the given set of reads.
-     *
-     * @param argCollection arguments
-     * @param genomeLocParser parser
-     * @param refReader reader
-     * @return A data source for the given set of reads.
-     */
-    private SAMDataSource createReadsDataSource(final GATKArgumentCollection argCollection, final GenomeLocParser genomeLocParser,
-                                                final IndexedFastaSequenceFile refReader, final Map<String, String> sampleRenameMap) {
-        DownsamplingMethod downsamplingMethod = getDownsamplingMethod();
-
-        // Synchronize the method back into the collection so that it shows up when
-        // interrogating for the downsampling method during command line recreation.
-        setDownsamplingMethod(downsamplingMethod);
-
-        logger.info(downsamplingMethod);
-
-        if (argCollection.removeProgramRecords && argCollection.keepProgramRecords)
-            throw new UserException.BadArgumentValue("rpr / kpr", "Cannot enable both options");
-
-        boolean removeProgramRecords = argCollection.removeProgramRecords || walker.getClass().isAnnotationPresent(RemoveProgramRecords.class);
-
-        if (argCollection.keepProgramRecords)
-            removeProgramRecords = false;
-
-        final boolean keepReadsInLIBS = walker instanceof ActiveRegionWalker;
-
-        return new SAMDataSource(
-                samReaderIDs,
-                threadAllocation,
-                argCollection.numberOfBAMFileHandles,
-                genomeLocParser,
-                argCollection.useOriginalBaseQualities,
-                argCollection.strictnessLevel,
-                argCollection.readBufferSize,
-                downsamplingMethod,
-                new ValidationExclusion(Arrays.asList(argCollection.unsafe)),
-                filters,
-                readTransformers,
-                includeReadsWithDeletionAtLoci(),
-                argCollection.defaultBaseQualities,
-                removeProgramRecords,
-                keepReadsInLIBS,
-                sampleRenameMap,
-                argCollection.intervalArguments.intervalMerging);
-    }
-
-    /**
-     * Loads a user-provided sample rename map file for use in on-the-fly sample renaming into an in-memory
-     * HashMap. This file must consist of lines with two whitespace-separated fields, the second of which
-     * may contain whitespace:
-     *
-     * absolute_path_to_file    new_sample_name
-     *
-     * The engine will verify that each file contains data from only one sample when the on-the-fly sample
-     * renaming feature is being used. Note that this feature works only with bam and vcf files.
-     *
-     * @param sampleRenameMapFile sample rename map file from which to load data
-     * @return a HashMap containing the contents of the map file, with the keys being the input file paths and
-     *         the values being the new sample names.
-     */
-    protected Map<String, String> loadSampleRenameMap( final File sampleRenameMapFile ) {
-        logger.info("Renaming samples from input files on-the-fly using mapping file " + sampleRenameMapFile.getAbsolutePath());
-
-        final Map<String, String> sampleRenameMap = new HashMap<>((int)sampleRenameMapFile.length() / 50);
-
-        try {
-            for ( final String line : new XReadLines(sampleRenameMapFile) ) {
-                final String[] tokens = line.split("\\s+", 2);
-
-                if ( tokens.length != 2 ) {
-                    throw new UserException.MalformedFile(sampleRenameMapFile,
-                                                          String.format("Encountered a line with %s fields instead of the required 2 fields. Line was: %s",
-                                                                        tokens.length, line));
-                }
-
-                final File inputFile = new File(tokens[0]);
-                final String newSampleName = tokens[1].trim();
-
-                if (newSampleName.contains(VCFConstants.FIELD_SEPARATOR)) {
-                    throw new UserException.MalformedFile(sampleRenameMapFile, String.format(
-                            "Encountered illegal sample name; sample names may not include the VCF field delimiter (%s).  Sample name: %s; line: %s",
-                            VCFConstants.FIELD_SEPARATOR,
-                            newSampleName,
-                            line
-                    ));
-                }
-
-                if ( ! inputFile.isAbsolute() ) {
-                    throw new UserException.MalformedFile(sampleRenameMapFile, "Input file path not absolute at line: " + line);
-                }
-
-                final String inputFilePath = inputFile.getAbsolutePath();
-
-                if ( sampleRenameMap.containsKey(inputFilePath) ) {
-                    throw new UserException.MalformedFile(sampleRenameMapFile,
-                                                          String.format("Input file %s appears more than once", inputFilePath));
-                }
-
-                sampleRenameMap.put(inputFilePath, newSampleName);
-            }
-        }
-        catch ( FileNotFoundException e ) {
-            throw new UserException.CouldNotReadInputFile(sampleRenameMapFile, e);
-        }
-
-        return sampleRenameMap;
-    }
-
-
-    /**
-     * Opens a reference sequence file paired with an index.  Only public for testing purposes
-     *
-     * @param refFile Handle to a reference sequence file.  Non-null.
-     */
-    public void setReferenceDataSource(File refFile) {
-        this.referenceDataSource = new ReferenceDataSource(refFile);
-        genomeLocParser = new GenomeLocParser(referenceDataSource.getReference());
-    }
-
-    /**
-     * Open the reference-ordered data sources.
-     *
-     * @param referenceMetaDataFiles collection of RMD descriptors to load and validate.
-     * @param sequenceDictionary GATK-wide sequnce dictionary to use for validation.
-     * @param genomeLocParser to use when creating and validating GenomeLocs.
-     * @param validationExclusionType potentially indicate which validations to include / exclude.
-     * @param sampleRenameMap map of file -> new sample name used when doing on-the-fly sample renaming
-     *
-     * @return A list of reference-ordered data sources.
-     */
-    private List<ReferenceOrderedDataSource> getReferenceOrderedDataSources(final Collection<RMDTriplet> referenceMetaDataFiles,
-                                                                            final SAMSequenceDictionary sequenceDictionary,
-                                                                            final GenomeLocParser genomeLocParser,
-                                                                            final ValidationExclusion.TYPE validationExclusionType,
-                                                                            final Map<String, String> sampleRenameMap) {
-        final RMDTrackBuilder builder = new RMDTrackBuilder(sequenceDictionary,genomeLocParser, validationExclusionType,
-                                                            getArguments().disableAutoIndexCreationAndLockingWhenReadingRods,
-                                                            sampleRenameMap);
-
-        final List<ReferenceOrderedDataSource> dataSources = new ArrayList<ReferenceOrderedDataSource>();
-        for (RMDTriplet fileDescriptor : referenceMetaDataFiles)
-            dataSources.add(new ReferenceOrderedDataSource(fileDescriptor,
-                                                           builder,
-                                                           sequenceDictionary,
-                                                           genomeLocParser,
-                                                           flashbackData()));
-
-        return dataSources;
-    }
-
-    /**
-     * Returns the SAM File Header from the input reads' data source file
-     * @return the SAM File Header from the input reads' data source file
-     */
-    public SAMFileHeader getSAMFileHeader() {
-        return readsDataSource.getHeader();
-    }
-
-    public boolean lenientVCFProcessing() {
-        return lenientVCFProcessing(argCollection.unsafe);
-    }
-
-    public static boolean lenientVCFProcessing(final ValidationExclusion.TYPE val) {
-        return val == ValidationExclusion.TYPE.ALL
-                || val == ValidationExclusion.TYPE.LENIENT_VCF_PROCESSING;
-    }
-
-    /**
-     * Returns the unmerged SAM file header for an individual reader.
-     * @param reader The reader.
-     * @return Header for that reader or null if not available.
-     */
-    public SAMFileHeader getSAMFileHeader(SAMReaderID reader) {
-        return readsDataSource == null ? null : readsDataSource.getHeader(reader);
-    }
-
-    /**
-     * Returns an ordered list of the unmerged SAM file headers known to this engine.
-     * @return list of header for each input SAM file, in command line order
-     */
-    public List<SAMFileHeader> getSAMFileHeaders() {
-        final List<SAMFileHeader> headers = new ArrayList<SAMFileHeader>();
-        for ( final SAMReaderID id : getReadsDataSource().getReaderIDs() ) {
-            headers.add(getReadsDataSource().getHeader(id));
-        }
-        return headers;
-    }
-
-    /**
-     * Gets the master sequence dictionary for this GATK engine instance
-     * @return a never-null dictionary listing all of the contigs known to this engine instance
-     */
-    public SAMSequenceDictionary getMasterSequenceDictionary() {
-        return getReferenceDataSource().getReference().getSequenceDictionary();
-    }
-
-    /**
-     * Returns data source object encapsulating all essential info and handlers used to traverse
-     * reads; header merger, individual file readers etc can be accessed through the returned data source object.
-     *
-     * @return the reads data source
-     */
-    public SAMDataSource getReadsDataSource() {
-        return this.readsDataSource;
-    }
-
-    /**
-     * Sets the collection of GATK main application arguments.
-     *
-     * @param argCollection the GATK argument collection
-     */
-    public void setArguments(GATKArgumentCollection argCollection) {
-        this.argCollection = argCollection;
-    }
-
-    /**
-     * Gets the collection of GATK main application arguments.
-     *
-     * @return the GATK argument collection
-     */
-    public GATKArgumentCollection getArguments() {
-        return this.argCollection;
-    }
-
-    /**
-     * Get the list of intervals passed to the engine.
-     * @return List of intervals, or null if no intervals are in use
-     */
-    public GenomeLocSortedSet getIntervals() {
-        return this.intervals;
-    }
-
-    /**
-     * Get the list of regions of the genome being processed.  If the user
-     * requested specific intervals, return those, otherwise return regions
-     * corresponding to the entire genome.  Never returns null.
-     *
-     * @return a non-null set of intervals being processed
-     */
-    @Ensures("result != null")
-    public GenomeLocSortedSet getRegionsOfGenomeBeingProcessed() {
-        if ( getIntervals() == null )
-            // if we don't have any intervals defined, create intervals from the reference itself
-            return GenomeLocSortedSet.createSetFromSequenceDictionary(getReferenceDataSource().getReference().getSequenceDictionary());
-        else
-            return getIntervals();
-    }
-
-    /**
-     * Gets the list of filters employed by this engine.
-     * @return Collection of filters (actual instances) used by this engine.
-     */
-    public Collection<ReadFilter> getFilters() {
-        return this.filters;
-    }
-
-    /**
-     * Sets the list of filters employed by this engine.
-     * @param filters Collection of filters (actual instances) used by this engine.
-     */
-    public void setFilters(Collection<ReadFilter> filters) {
-        this.filters = filters;
-    }
-
-    /**
-     * Gets the filter manager for this engine.
-     * @return filter manager for this engine.
-     */
-    protected FilterManager getFilterManager() {
-        return filterManager;
-    }
-
-    /**
-     * Gets the input sources for this engine.
-     * @return input sources for this engine.
-     */
-    protected Map<ArgumentSource, Object> getInputs() {
-        return inputs;
-    }
-
-    /**
-     * Gets the output stubs for this engine.
-     * @return output stubs for this engine.
-     */
-    protected Collection<Stub<?>> getOutputs() {
-        return outputs;
-    }
-
-    /**
-     * Returns data source objects encapsulating all rod data;
-     * individual rods can be accessed through the returned data source objects.
-     *
-     * @return the rods data sources, never {@code null}.
-     */
-    public List<ReferenceOrderedDataSource> getRodDataSources() {
-        return this.rodDataSources;
-    }
-
-    /**
-     * Gets cumulative metrics about the entire run to this point.
-     * Returns a clone of this snapshot in time.
-     * @return cumulative metrics about the entire run at this point.  ReadMetrics object is a unique instance and is
-     *         owned by the caller; the caller can do with the object what they wish.
-     */
-    public ReadMetrics getCumulativeMetrics() {
-        // todo -- probably shouldn't be lazy
-        if ( cumulativeMetrics == null )
-            cumulativeMetrics = readsDataSource == null ? new ReadMetrics() : readsDataSource.getCumulativeReadMetrics();
-        return cumulativeMetrics;
-    }
-
-    /**
-     * Return the global ThreadEfficiencyMonitor, if there is one
-     *
-     * @return the monitor, or null if none is active
-     */
-    public ThreadEfficiencyMonitor getThreadEfficiencyMonitor() {
-        return threadEfficiencyMonitor;
-    }
-
-    // -------------------------------------------------------------------------------------
-    //
-    // code for working with Samples database
-    //
-    // -------------------------------------------------------------------------------------
-
-    public SampleDB getSampleDB() {
-        return this.sampleDB;
-    }
-
-    public Map<String,String> getApproximateCommandLineArguments(Object... argumentProviders) {
-        return CommandLineUtils.getApproximateCommandLineArguments(parsingEngine,argumentProviders);
-    }
-
-    public String createApproximateCommandLineArgumentString(Object... argumentProviders) {
-        return CommandLineUtils.createApproximateCommandLineArgumentString(parsingEngine,argumentProviders);
-    }
-
-    // -------------------------------------------------------------------------------------
-    //
-    // code for working with progress meter
-    //
-    // -------------------------------------------------------------------------------------
-
-    /**
-     * Register the global progress meter with this engine
-     *
-     * Calling this function more than once will result in an IllegalStateException
-     *
-     * @param meter a non-null progress meter
-     */
-    public void registerProgressMeter(final ProgressMeter meter) {
-        if ( meter == null ) throw new IllegalArgumentException("Meter cannot be null");
-        if ( progressMeter != null ) throw new IllegalStateException("Progress meter already set");
-
-        progressMeter = meter;
-    }
-
-    /**
-     * Get the progress meter being used by this engine.  May be null if no meter has been registered yet
-     * @return a potentially null pointer to the progress meter
-     */
-    public ProgressMeter getProgressMeter() {
-        return progressMeter;
-    }
-
-    /**
-     * Does the current runtime in unit exceed the runtime limit, if one has been provided?
-     *
-     * @return false if not limit was requested or if runtime <= the limit, true otherwise
-     */
-    public boolean exceedsRuntimeLimit() {
-        if ( progressMeter == null )
-            // not yet initialized or not set because of testing
-            return false;
-
-        if ( getArguments().maxRuntime == NO_RUNTIME_LIMIT )
-            return false;
-        else {  
-            final long runtime = progressMeter.getRuntimeInNanosecondsUpdatedPeriodically();
-            if ( runtime < 0 ) throw new IllegalArgumentException("runtime must be >= 0 but got " + runtime);
-            final long maxRuntimeNano = getRuntimeLimitInNanoseconds();
-            return runtime > maxRuntimeNano;
-        }
-    }
-
-    /**
-     * @return the runtime limit in nanoseconds, or -1 if no limit was specified
-     */
-    public long getRuntimeLimitInNanoseconds() {
-        return runtimeLimitInNanoseconds;
-    }
-
-    /**
-     * Setup the runtime limits for this engine, updating the runtimeLimitInNanoseconds
-     * as appropriate
-     *
-     * @param args the GATKArgumentCollection to retrieve our runtime limits from
-     */
-    private void setupRuntimeLimits(final GATKArgumentCollection args) {
-        if ( args.maxRuntime == NO_RUNTIME_LIMIT )
-            runtimeLimitInNanoseconds = -1;
-        else if (args.maxRuntime < 0 )
-            throw new UserException.BadArgumentValue("maxRuntime", "must be >= 0 or == -1 (meaning no limit) but received negative value " + args.maxRuntime);
-        else {
-            runtimeLimitInNanoseconds = TimeUnit.NANOSECONDS.convert(args.maxRuntime, args.maxRuntimeUnits);
-        }
-    }
-
-    /**
-     * Returns the sample list including all samples.
-     * @return never {@code null}.
-     */
-    public SampleList getSampleList() {
-        return new IndexedSampleList(getSampleDB().getSampleNames());
-    }
-
-    /**
-     * Returns the sample list including samples in read inputs.
-     * @return never {@code null}.
-     */
-    public SampleList getReadSampleList() {
-        return new IndexedSampleList(SampleUtils.getSAMFileSamples(getSAMFileHeader()));
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/ReadProperties.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/ReadProperties.java
deleted file mode 100644
index 6ee9ad3..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/ReadProperties.java
+++ /dev/null
@@ -1,198 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMFileReader;
-import htsjdk.samtools.ValidationStringency;
-import org.broadinstitute.gatk.engine.arguments.ValidationExclusion;
-import org.broadinstitute.gatk.engine.datasources.reads.SAMReaderID;
-import org.broadinstitute.gatk.engine.downsampling.DownsamplingMethod;
-import org.broadinstitute.gatk.engine.filters.ReadFilter;
-import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
-
-import java.util.Collection;
-import java.util.List;
-/**
- * User: hanna
- * Date: May 14, 2009
- * Time: 4:06:26 PM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * A data structure containing information about the reads data sources as well as
- * information about how they should be downsampled, sorted, and filtered.
- */
-public class ReadProperties {
-    private final Collection<SAMReaderID> readers;
-    private final SAMFileHeader header;
-    private final SAMFileHeader.SortOrder sortOrder;
-    private final ValidationStringency validationStringency;
-    private final DownsamplingMethod downsamplingMethod;
-    private final ValidationExclusion exclusionList;
-    private final Collection<ReadFilter> supplementalFilters;
-    private final List<ReadTransformer> readTransformers;
-    private final boolean keepUniqueReadListInLIBS;
-    private final boolean includeReadsWithDeletionAtLoci;
-    private final boolean useOriginalBaseQualities;
-    private final byte defaultBaseQualities;
-
-    /**
-     * Return true if the walker wants to see reads that contain deletions when looking at locus pileups
-     * 
-     * @return
-     */
-    public boolean includeReadsWithDeletionAtLoci() {
-        return includeReadsWithDeletionAtLoci;
-    }
-
-    public boolean keepUniqueReadListInLIBS() {
-        return keepUniqueReadListInLIBS;
-    }
-
-    /**
-     * Gets a list of the files acting as sources of reads.
-     * @return A list of files storing reads data.
-     */
-    public Collection<SAMReaderID> getSAMReaderIDs() {
-        return readers;
-    }
-
-    /**
-     * Gets the sam file header
-     * @return the sam file header
-     */
-    public SAMFileHeader getHeader() {
-        return header;
-    }
-
-    /**
-     * Gets the sort order of the reads
-     * @return the sort order of the reads
-     */
-    public SAMFileHeader.SortOrder getSortOrder() {
-        return sortOrder;
-    }
-
-    /**
-     * How strict should validation be?
-     * @return Stringency of validation.
-     */
-    public ValidationStringency getValidationStringency() {
-        return validationStringency;
-    }
-
-    /**
-     * Gets the method and parameters used when downsampling reads.
-     * @return Downsample fraction.
-     */
-    public DownsamplingMethod getDownsamplingMethod() {
-        return downsamplingMethod;
-    }
-
-    /**
-     * Return whether to 'verify' the reads as we pass through them.
-     * @return Whether to verify the reads.
-     */
-    public ValidationExclusion getValidationExclusionList() {
-        return exclusionList;
-    }
-
-    public Collection<ReadFilter> getSupplementalFilters() {
-        return supplementalFilters;
-    }
-
-
-    public List<ReadTransformer> getReadTransformers() {
-        return readTransformers;
-    }
-
-    /**
-     * Return whether to use original base qualities.
-     * @return Whether to use original base qualities.
-     */
-    public boolean useOriginalBaseQualities() {
-        return useOriginalBaseQualities;
-    }
-
-    /**
-     * @return Default base quality value to fill reads missing base quality information.
-     */
-    public byte defaultBaseQualities() {
-        return defaultBaseQualities;
-    }
-
-    /**
-     * Extract the command-line arguments having to do with reads input
-     * files and store them in an easy-to-work-with package.  Constructor
-     * is package protected.
-     * @param samFiles list of reads files.
-     * @param header sam file header.
-     * @param useOriginalBaseQualities True if original base qualities should be used.
-     * @param strictness Stringency of reads file parsing.
-     * @param downsamplingMethod Method for downsampling reads at a given locus.
-     * @param exclusionList what safety checks we're willing to let slide
-     * @param supplementalFilters additional filters to dynamically apply.
-     * @param includeReadsWithDeletionAtLoci if 'true', the base pileups sent to the walker's map() method
-     *         will explicitly list reads with deletion over the current reference base; otherwise, only observed
-     *        bases will be seen in the pileups, and the deletions will be skipped silently.
-     * @param defaultBaseQualities if the reads have incomplete quality scores, set them all to defaultBaseQuality.
-     * @param keepUniqueReadListInLIBS If true, we will tell LocusIteratorByState to track the unique reads it sees
-     *                                 This is really useful for ActiveRegionTraversals
-     */
-    public ReadProperties( Collection<SAMReaderID> samFiles,
-           SAMFileHeader header,
-           SAMFileHeader.SortOrder sortOrder,
-           boolean useOriginalBaseQualities,
-           ValidationStringency strictness,
-           DownsamplingMethod downsamplingMethod,
-           ValidationExclusion exclusionList,
-           Collection<ReadFilter> supplementalFilters,
-           List<ReadTransformer> readTransformers,
-           boolean includeReadsWithDeletionAtLoci,
-           byte defaultBaseQualities,
-           final boolean keepUniqueReadListInLIBS) {
-        this.readers = samFiles;
-        this.header = header;
-        this.sortOrder = sortOrder;
-        this.validationStringency = strictness;
-        this.downsamplingMethod = downsamplingMethod == null ? DownsamplingMethod.NONE : downsamplingMethod;
-        this.exclusionList = exclusionList == null ? new ValidationExclusion() : exclusionList;
-        this.supplementalFilters = supplementalFilters;
-        this.readTransformers = readTransformers;
-        this.includeReadsWithDeletionAtLoci = includeReadsWithDeletionAtLoci;
-        this.useOriginalBaseQualities = useOriginalBaseQualities;
-        this.defaultBaseQualities = defaultBaseQualities;
-        this.keepUniqueReadListInLIBS = keepUniqueReadListInLIBS;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/WalkerManager.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/WalkerManager.java
deleted file mode 100644
index fb9d489..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/WalkerManager.java
+++ /dev/null
@@ -1,431 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine;
-
-import org.broadinstitute.gatk.engine.walkers.*;
-import org.broadinstitute.gatk.utils.commandline.Hidden;
-import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
-import org.broadinstitute.gatk.engine.downsampling.DownsampleType;
-import org.broadinstitute.gatk.engine.downsampling.DownsamplingMethod;
-import org.broadinstitute.gatk.engine.filters.FilterManager;
-import org.broadinstitute.gatk.engine.filters.ReadFilter;
-import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
-import org.broadinstitute.gatk.utils.classloader.PluginManager;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.help.ResourceBundleExtractorDoclet;
-import org.broadinstitute.gatk.utils.text.TextFormattingUtils;
-
-import java.lang.annotation.Annotation;
-import java.util.*;
-
-/**
- * Plugin manager that also provides various utilities for inspecting Walkers.
- */
-public class WalkerManager extends PluginManager<Walker> {
-
-    /**
-     * A collection of help text for walkers and their enclosing packages.
-     */
-    private ResourceBundle helpText;
-
-    public WalkerManager() {
-        super(Walker.class,"walker","");
-        helpText = TextFormattingUtils.loadResourceBundle("GATKText");
-    }
-
-    /**
-     * Get the list of walkers currently available to the GATK, organized
-     * by package.
-     * @param visibleWalkersOnly If true, return only the walker names that aren't hidden.
-     * @return Names of currently available walkers.
-     */
-    public Map<String,Collection<Class<? extends Walker>>> getWalkerNamesByPackage(boolean visibleWalkersOnly) {
-        Map<String,Collection<Class<? extends Walker>>> walkersByPackage = new HashMap<String,Collection<Class<? extends Walker>>>();
-        for(Class<? extends Walker> walker: getPlugins()) {
-            if(visibleWalkersOnly && isHidden(walker))
-                continue;
-
-            // Extract the name for the package; if the walker is in the unnamed package, use the empty string
-            String walkerPackage = walker.getPackage() != null ? walker.getPackage().getName() : "";
-            if(!walkersByPackage.containsKey(walkerPackage))
-                walkersByPackage.put(walkerPackage,new ArrayList<Class<? extends Walker>>());
-            walkersByPackage.get(walkerPackage).add(walker);
-        }
-        return Collections.unmodifiableMap(walkersByPackage);
-    }
-
-    /**
-     * Gets the display name for a given package.
-     * @param packageName Fully qualified package name.
-     * @return A suitable display name for the package.
-     */
-    public String getPackageDisplayName(String packageName) {
-        // ...try to compute the override from the text of the package name, while accounting for
-        // unpackaged walkers.
-        String displayName = packageName.substring(packageName.lastIndexOf('.')+1);
-        if (displayName.trim().equals("")) displayName = "<unpackaged>";
-        return displayName;
-    }
-
-    /**
-     * Gets the help text associated with a given package name.
-     * @param packageName Package for which to search for help text.
-     * @return Package help text, or "" if none exists.
-     */
-    public String getPackageSummaryText(String packageName) {
-        String key = String.format("%s.%s",packageName, ResourceBundleExtractorDoclet.SUMMARY_TAGLET_NAME);
-        if(!helpText.containsKey(key))
-            return "";
-        return helpText.getString(key);
-    }
-
-    /**
-     * Gets the summary help text associated with a given walker type.
-     * @param walkerType Type of walker for which to search for help text.
-     * @return Walker summary description, or "" if none exists.
-     */
-    public String getWalkerSummaryText(Class<? extends Walker> walkerType) {
-        String walkerSummary = String.format("%s.%s",walkerType.getName(), ResourceBundleExtractorDoclet.SUMMARY_TAGLET_NAME);
-        if(!helpText.containsKey(walkerSummary))
-            return "";
-        return helpText.getString(walkerSummary);
-    }
-
-    /**
-     * Gets the summary help text associated with a given walker type.
-     * @param walker Walker for which to search for help text.
-     * @return Walker summary description, or "" if none exists.
-     */
-    public String getWalkerSummaryText(Walker walker) {
-        return getWalkerSummaryText(walker.getClass());
-    }
-
-    /**
-     * Gets the descriptive help text associated with a given walker type.
-     * @param walkerType Type of walker for which to search for help text.
-     * @return Walker full description, or "" if none exists.
-     */
-    public String getWalkerDescriptionText(Class<? extends Walker> walkerType) {
-        String walkerDescription = String.format("%s.%s",walkerType.getName(), ResourceBundleExtractorDoclet.DESCRIPTION_TAGLET_NAME);
-        if(!helpText.containsKey(walkerDescription))
-            return "";
-        return helpText.getString(walkerDescription);
-    }
-
-    /**
-     * Gets the descriptive help text associated with a given walker type.
-     * @param walker Walker for which to search for help text.
-     * @return Walker full description, or "" if none exists.
-     */
-    public String getWalkerDescriptionText(Walker walker) {
-        return getWalkerDescriptionText(walker.getClass());
-    }
-
-    /**
-     * Retrieves the walker class given a walker name.
-     * @param walkerName Name of the walker.
-     * @return Class representing the walker.
-     */
-    public Class<? extends Walker> getWalkerClassByName(String walkerName) {
-        return getPluginsByName().get(walkerName);
-    }
-
-    /**
-     * Gets the data source for the provided walker.
-     * @param walkerClass The class of the walker.
-     * @return Which type of data source to traverse over...reads or reference?
-     */
-    public static DataSource getWalkerDataSource(Class<? extends Walker> walkerClass) {
-        By byDataSource = walkerClass.getAnnotation(By.class);
-        if( byDataSource == null )
-            throw new ReviewedGATKException("Unable to find By annotation for walker class " + walkerClass.getName());
-        return byDataSource.value();
-    }
-
-    /**
-     * Gets the data source for the provided walker.
-     * @param walker The walker.
-     * @return Which type of data source to traverse over...reads or reference?
-     */
-    public static DataSource getWalkerDataSource(Walker walker) {
-        return getWalkerDataSource(walker.getClass());
-    }
-
-    /**
-     * Get a list of RODs allowed by the walker.
-     * @param walkerClass Class of the walker to query.
-     * @return The list of allowed reference meta data.
-     */
-    public static List<RMD> getAllowsMetaData(Class<? extends Walker> walkerClass) {
-        return Collections.<RMD>emptyList();
-    }
-
-    /**
-     * Determine whether the given walker supports the given data source.
-     * @param walkerClass Class of the walker to query.
-     * @param dataSource Source to check for .
-     * @return True if the walker forbids this data type.  False otherwise.
-     */
-    public static boolean isAllowed(Class<? extends Walker> walkerClass, DataSource dataSource) {
-        Allows allowsDataSource = getWalkerAllowed(walkerClass);
-
-        // Allows is less restrictive than requires.  If an allows
-        // clause is not specified, any kind of data is allowed.
-        if( allowsDataSource == null )
-            return true;
-
-        return Arrays.asList(allowsDataSource.value()).contains(dataSource);
-    }
-
-    /**
-     * Determine whether the given walker supports the given data source.
-     * @param walker Walker to query.
-     * @param dataSource Source to check for .
-     * @return True if the walker forbids this data type.  False otherwise.
-     */
-    public static boolean isAllowed(Walker walker, DataSource dataSource) {
-        return isAllowed(walker.getClass(), dataSource);
-    }
-
-    /**
-     * Determine whether the given walker supports the given reference ordered data.
-     * @param walkerClass Class of the walker to query.
-     * @param rod Source to check.
-     * @return True if the walker forbids this data type.  False otherwise.
-     */
-    public static boolean isAllowed(Class<? extends Walker> walkerClass, ReferenceOrderedDataSource rod) {
-        return true;
-    }
-
-    /**
-     * Determine whether the given walker supports the given reference ordered data.
-     * @param walker Walker to query.
-     * @param rod Source to check.
-     * @return True if the walker forbids this data type.  False otherwise.
-     */
-    public static boolean isAllowed(Walker walker, ReferenceOrderedDataSource rod) {
-        return isAllowed(walker.getClass(), rod);
-    }
-
-    /**
-     * Determine whether the given walker requires the given data source.
-     * @param walkerClass Class of the walker to query.
-     * @param dataSource Source to check for.
-     * @return True if the walker allows this data type.  False otherwise.
-     */
-    public static boolean isRequired(Class<? extends Walker> walkerClass, DataSource dataSource) {
-        Requires requiresDataSource = getWalkerRequirements(walkerClass);
-        return Arrays.asList(requiresDataSource.value()).contains(dataSource);
-    }
-
-    /**
-     * Determine whether the given walker requires the given data source.
-     * @param walker Walker to query.
-     * @param dataSource Source to check for.
-     * @return True if the walker allows this data type.  False otherwise.
-     */
-    public static boolean isRequired(Walker walker, DataSource dataSource) {
-        return isRequired(walker.getClass(), dataSource);
-    }
-
-    /**
-     * Get a list of RODs required by the walker.
-     * @param walkerClass Class of the walker to query.
-     * @return The list of required reference meta data.
-     */
-    public static List<RMD> getRequiredMetaData(Class<? extends Walker> walkerClass) {
-        return Collections.emptyList();
-    }
-
-    /**
-     * Get a list of RODs required by the walker.
-     * @param walker Walker to query.
-     * @return The list of required reference meta data.
-     */
-    public static List<RMD> getRequiredMetaData(Walker walker) {
-        return getRequiredMetaData(walker.getClass());
-    }
-
-    /**
-     * Reports whether this walker type is hidden -- in other words, whether it'll appear in the help output.
-     * @param walkerType Class to test for visibility.
-     * @return True if the walker should be hidden.  False otherwise.
-     */
-    public static boolean isHidden(Class<? extends Walker> walkerType) {
-        return walkerType.isAnnotationPresent(Hidden.class);    
-    }
-
-    /**
-     * Extracts filters that the walker has requested be run on the dataset.
-     * @param walkerClass Class of the walker to inspect for filtering requests.
-     * @param filterManager Manages the creation of filters.
-     * @return A non-empty list of filters to apply to the reads.
-     */
-    public static List<ReadFilter> getReadFilters(Class<? extends Walker> walkerClass, FilterManager filterManager) {
-        List<ReadFilter> filters = new ArrayList<ReadFilter>();
-        for(Class<? extends ReadFilter> filterType: getReadFilterTypes(walkerClass))
-            filters.add(filterManager.createFilterByType(filterType));
-        return filters;
-    }
-
-    /**
-     * Extracts filters that the walker has requested be run on the dataset.
-     * @param walker Walker to inspect for filtering requests.
-     * @param filterManager Manages the creation of filters.
-     * @return A non-empty list of filters to apply to the reads.
-     */
-    public static List<ReadFilter> getReadFilters(Walker walker, FilterManager filterManager) {
-        return getReadFilters(walker.getClass(), filterManager);
-    }
-
-    /**
-     * Gets the type of downsampling method requested by the walker.  If an alternative
-     * downsampling method is specified on the command-line, the command-line version will
-     * be used instead.
-     * @param walker The walker to interrogate.
-     * @return The downsampling method, as specified by the walker.  Null if none exists.
-     */
-    public static DownsamplingMethod getDownsamplingMethod( Walker walker ) {
-        return getDownsamplingMethod(walker.getClass());
-    }
-
-    /**
-     * Gets the type of downsampling method requested by the walker.  If an alternative
-     * downsampling method is specified on the command-line, the command-line version will
-     * be used instead.
-     * @param walkerClass The class of the walker to interrogate.
-     * @return The downsampling method, as specified by the walker.  Null if none exists.
-     */
-    public static DownsamplingMethod getDownsamplingMethod( Class<? extends Walker> walkerClass ) {
-        DownsamplingMethod downsamplingMethod = null;
-
-        if( walkerClass.isAnnotationPresent(Downsample.class) ) {
-            Downsample downsampleParameters = walkerClass.getAnnotation(Downsample.class);
-            DownsampleType type = downsampleParameters.by();
-            Integer toCoverage = downsampleParameters.toCoverage() >= 0 ? downsampleParameters.toCoverage() : null;
-            Double toFraction = downsampleParameters.toFraction() >= 0.0d ? downsampleParameters.toFraction() : null;
-            downsamplingMethod = new DownsamplingMethod(type, toCoverage, toFraction);
-        }
-
-        return downsamplingMethod;
-    }
-
-    public static <T extends Annotation> T getWalkerAnnotation(final Walker walker, final Class<T> clazz) {
-        return walker.getClass().getAnnotation(clazz);
-    }
-
-    public static ReadTransformer.ApplicationTime getBAQApplicationTime(Walker walker) {
-        return walker.getClass().getAnnotation(BAQMode.class).ApplicationTime();
-    }    
-
-    /**
-     * Create a name for this type of walker.
-     *
-     * @param walkerType The type of walker.
-     * @return A name for this type of walker.
-     */
-    @Override
-    public String getName(Class walkerType) {
-        String walkerName = "";
-
-        if (walkerType.getAnnotation(WalkerName.class) != null)
-            walkerName = ((WalkerName)walkerType.getAnnotation(WalkerName.class)).value().trim();
-        else
-            walkerName = super.getName(walkerType);
-
-        return walkerName;
-    }
-
-    /**
-     * Utility to get the requires attribute from the walker.
-     * Throws an exception if requirements are missing.
-     * @param walkerClass Class of the walker to query for required data.
-     * @return Required data attribute.
-     */
-    private static Requires getWalkerRequirements(Class<? extends Walker> walkerClass) {
-        Requires requiresDataSource = walkerClass.getAnnotation(Requires.class);
-        if( requiresDataSource == null )
-            throw new ReviewedGATKException( "Unable to find data types required by walker class " + walkerClass.getName());
-        return requiresDataSource;
-    }
-
-    /**
-     * Utility to get the requires attribute from the walker.
-     * Throws an exception if requirements are missing.
-     * @param walker Walker to query for required data.
-     * @return Required data attribute.
-     */
-    private static Requires getWalkerRequirements(Walker walker) {
-        return getWalkerRequirements(walker.getClass());
-    }
-
-    /**
-     * Utility to get the forbidden attribute from the walker.
-     * @param walkerClass Class of the walker to query for required data.
-     * @return Required data attribute.  Null if forbidden info isn't present.
-     */
-    private static Allows getWalkerAllowed(Class<? extends Walker> walkerClass) {
-        Allows allowsDataSource = walkerClass.getAnnotation(Allows.class);
-        return allowsDataSource;
-    }
-
-    /**
-     * Utility to get the forbidden attribute from the walker.
-     * @param walker Walker to query for required data.
-     * @return Required data attribute.  Null if forbidden info isn't present.
-     */
-    private static Allows getWalkerAllowed(Walker walker) {
-        return getWalkerAllowed(walker.getClass());
-    }
-
-    /**
-     * Gets the list of filtering classes specified as walker annotations.
-     * @param walkerClass Class of the walker to inspect.
-     * @return An array of types extending from SamRecordFilter.  Will never be null.
-     */
-    public static Collection<Class<? extends ReadFilter>> getReadFilterTypes(Class<?> walkerClass) {
-        List<Class<? extends ReadFilter>> filterTypes = new ArrayList<Class<? extends ReadFilter>>();
-        while(walkerClass != null) {
-            if(walkerClass.isAnnotationPresent(ReadFilters.class)) {
-                for ( Class c : walkerClass.getAnnotation(ReadFilters.class).value() ) {
-                    if( !filterTypes.contains(c) )
-                        filterTypes.add(c);
-                }
-            }
-            walkerClass = walkerClass.getSuperclass();
-        }
-        return filterTypes;
-    }
-
-    /**
-     * Gets the list of filtering classes specified as walker annotations.
-     * @param walker The walker to inspect.
-     * @return An array of types extending from SamRecordFilter.  Will never be null.
-     */
-    public static Collection<Class<? extends ReadFilter>> getReadFilterTypes(Walker walker) {
-        return getReadFilterTypes(walker.getClass());
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/Aligner.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/Aligner.java
deleted file mode 100644
index e9622c9..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/Aligner.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMRecord;
-
-/**
- * Create perfect alignments from the read to the genome represented by the given BWT / suffix array. 
- *
- * @author mhanna
- * @version 0.1
- */
-public interface Aligner {
-    /**
-     * Close this instance of the BWA pointer and delete its resources.
-     */
-    public void close();    
-
-    /**
-     * Allow the aligner to choose one alignment randomly from the pile of best alignments.
-     * @param bases Bases to align.
-     * @return An align
-     */
-    public Alignment getBestAlignment(final byte[] bases);
-
-    /**
-     * Align the read to the reference.
-     * @param read Read to align.
-     * @param header Optional header to drop in place.
-     * @return A list of the alignments.
-     */
-    public SAMRecord align(final SAMRecord read, final SAMFileHeader header);
-
-    /**
-     * Get a iterator of alignments, batched by mapping quality.
-     * @param bases List of bases.
-     * @return Iterator to alignments.
-     */
-    public Iterable<Alignment[]> getAllAlignments(final byte[] bases);
-
-    /**
-     * Get a iterator of aligned reads, batched by mapping quality.
-     * @param read Read to align.
-     * @param newHeader Optional new header to use when aligning the read.  If present, it must be null.
-     * @return Iterator to alignments.
-     */
-    public Iterable<SAMRecord[]> alignAll(final SAMRecord read, final SAMFileHeader newHeader);
-}
-
-
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/Alignment.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/Alignment.java
deleted file mode 100644
index 02bc06f..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/Alignment.java
+++ /dev/null
@@ -1,246 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment;
-
-import htsjdk.samtools.*;
-import org.broadinstitute.gatk.utils.BaseUtils;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-/**
- * Represents an alignment of a read to a site in the reference genome.
- *
- * @author mhanna
- * @version 0.1
- */
-public class Alignment {
-    protected int contigIndex;
-    protected long alignmentStart;
-    protected boolean negativeStrand;
-    protected int mappingQuality;
-
-    protected char[] cigarOperators;
-    protected int[] cigarLengths;
-
-    protected int editDistance;
-    protected String mismatchingPositions;
-
-    protected int numMismatches;
-    protected int numGapOpens;
-    protected int numGapExtensions;
-    protected int bestCount;
-    protected int secondBestCount;
-
-    /**
-     * Gets the index of the given contig.
-     * @return the inde
-     */
-    public int getContigIndex() { return contigIndex; }
-
-    /**
-     * Gets the starting position for the given alignment.
-     * @return Starting position.
-     */
-    public long getAlignmentStart() { return alignmentStart; }
-
-    /**
-     * Is the given alignment on the reverse strand?
-     * @return True if the alignment is on the reverse strand.
-     */
-    public boolean isNegativeStrand() { return negativeStrand; }
-
-    /**
-     * Gets the score of this alignment.
-     * @return The score.
-     */
-    public int getMappingQuality() { return mappingQuality; }
-
-    /**
-     * Gets the edit distance; will eventually end up in the NM SAM tag
-     * if this alignment makes it that far.
-     * @return The edit distance.
-     */
-    public int getEditDistance() { return editDistance; }
-
-    /**
-     * A string representation of which positions mismatch; contents of MD tag.
-     * @return String representation of mismatching positions.
-     */
-    public String getMismatchingPositions() { return mismatchingPositions; }
-    
-    /**
-     * Gets the number of mismatches in the read.
-     * @return Number of mismatches.
-     */
-    public int getNumMismatches() { return numMismatches; }
-
-    /**
-     * Get the number of gap opens.
-     * @return Number of gap opens.
-     */
-    public int getNumGapOpens() { return numGapOpens; }
-
-    /**
-     * Get the number of gap extensions.
-     * @return Number of gap extensions.
-     */
-    public int getNumGapExtensions() { return numGapExtensions; }
-
-    /**
-     * Get the number of best alignments.
-     * @return Number of top scoring alignments.
-     */
-    public int getBestCount() { return bestCount; }
-
-    /**
-     * Get the number of second best alignments.
-     * @return Number of second best scoring alignments.
-     */
-    public int getSecondBestCount() { return secondBestCount; }
-
-    /**
-     * Gets the cigar for this alignment.
-     * @return sam-jdk formatted alignment.
-     */
-    public Cigar getCigar() {
-        Cigar cigar = new Cigar();
-        for(int i = 0; i < cigarOperators.length; i++) {
-            CigarOperator operator = CigarOperator.characterToEnum(cigarOperators[i]);
-            cigar.add(new CigarElement(cigarLengths[i],operator));
-        }
-        return cigar;
-    }
-
-    /**
-     * Temporarily implement getCigarString() for debugging; the TextCigarCodec is unfortunately
-     * package-protected.
-     * @return
-     */
-    public String getCigarString() {
-        Cigar cigar = getCigar();
-        if(cigar.isEmpty()) return "*";
-
-        StringBuilder cigarString = new StringBuilder();
-        for(CigarElement element: cigar.getCigarElements()) {
-            cigarString.append(element.getLength());
-            cigarString.append(element.getOperator());
-        }
-        return cigarString.toString();
-    }
-
-    /**
-     * Stub for inheritance.
-     */
-    public Alignment() {}    
-
-    /**
-     * Create a new alignment object.
-     * @param contigIndex The contig to which this read aligned.
-     * @param alignmentStart The point within the contig to which this read aligned.
-     * @param negativeStrand Forward or reverse alignment of the given read.
-     * @param mappingQuality How good does BWA think this mapping is?
-     * @param cigarOperators The ordered operators in the cigar string.
-     * @param cigarLengths The lengths to which each operator applies.
-     * @param editDistance The edit distance (cumulative) of the read.
-     * @param mismatchingPositions String representation of which bases in the read mismatch.
-     * @param numMismatches Number of total mismatches in the read.
-     * @param numGapOpens Number of gap opens in the read.
-     * @param numGapExtensions Number of gap extensions in the read.
-     * @param bestCount Number of best alignments in the read.
-     * @param secondBestCount Number of second best alignments in the read.
-     */
-    public Alignment(int contigIndex,
-                     int alignmentStart,
-                     boolean negativeStrand,
-                     int mappingQuality,
-                     char[] cigarOperators,
-                     int[] cigarLengths,
-                     int editDistance,
-                     String mismatchingPositions,
-                     int numMismatches,
-                     int numGapOpens,
-                     int numGapExtensions,
-                     int bestCount,
-                     int secondBestCount) {
-        this.contigIndex = contigIndex;
-        this.alignmentStart = alignmentStart;
-        this.negativeStrand = negativeStrand;
-        this.mappingQuality = mappingQuality;
-        this.cigarOperators = cigarOperators;
-        this.cigarLengths = cigarLengths;
-        this.editDistance = editDistance;
-        this.mismatchingPositions = mismatchingPositions;
-        this.numMismatches = numMismatches;
-        this.numGapOpens = numGapOpens;
-        this.numGapExtensions = numGapExtensions;
-        this.bestCount = bestCount;
-        this.secondBestCount = secondBestCount;
-    }
-
-    /**
-     * Creates a read directly from an alignment.
-     * @param alignment The alignment to convert to a read.
-     * @param unmappedRead Source of the unmapped read.  Should have bases, quality scores, and flags.
-     * @param newSAMHeader The new SAM header to use in creating this read.  Can be null, but if so, the sequence
-     *                     dictionary in the
-     * @return A mapped alignment.
-     */
-    public static SAMRecord convertToRead(Alignment alignment, SAMRecord unmappedRead, SAMFileHeader newSAMHeader) {
-        SAMRecord read;
-        try {
-            read = (SAMRecord)unmappedRead.clone();
-        }
-        catch(CloneNotSupportedException ex) {
-            throw new ReviewedGATKException("Unable to create aligned read from template.");
-        }
-
-        if(newSAMHeader != null)
-            read.setHeader(newSAMHeader);
-
-        // If we're realigning a previously aligned record, strip out the placement of the alignment.
-        read.setReferenceName(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME);
-        read.setAlignmentStart(SAMRecord.NO_ALIGNMENT_START);
-        read.setMateReferenceName(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME);
-        read.setMateAlignmentStart(SAMRecord.NO_ALIGNMENT_START);        
-
-        if(alignment != null) {
-            read.setReadUnmappedFlag(false);
-            read.setReferenceIndex(alignment.getContigIndex());
-            read.setAlignmentStart((int)alignment.getAlignmentStart());
-            read.setReadNegativeStrandFlag(alignment.isNegativeStrand());
-            read.setMappingQuality(alignment.getMappingQuality());
-            read.setCigar(alignment.getCigar());
-            if(alignment.isNegativeStrand()) {
-                read.setReadBases(BaseUtils.simpleReverseComplement(read.getReadBases()));
-                read.setBaseQualities(Utils.reverse(read.getBaseQualities()));
-            }
-            read.setAttribute("NM",alignment.getEditDistance());
-            read.setAttribute("MD",alignment.getMismatchingPositions());
-        }
-
-        return read;
-    }    
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/BWAAligner.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/BWAAligner.java
deleted file mode 100644
index 2668b8c..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/BWAAligner.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment.bwa;
-
-import org.broadinstitute.gatk.engine.alignment.Aligner;
-
-/**
- * Align reads using BWA.
- *
- * @author mhanna
- * @version 0.1
- */
-public abstract class BWAAligner implements Aligner {
-    /**
-     * The supporting files used by BWA.
-     */
-    protected BWTFiles bwtFiles;
-
-    /**
-     * The current configuration for the BWA aligner.
-     */
-    protected BWAConfiguration configuration;
-
-    /**
-     * Create a new BWAAligner.  Purpose of this call is to ensure that all BWA constructors accept the correct
-     * parameters.
-     * @param bwtFiles The many files representing BWTs persisted to disk.
-     * @param configuration Configuration parameters for the alignment.
-     */
-    public BWAAligner(BWTFiles bwtFiles, BWAConfiguration configuration) {
-        this.bwtFiles = bwtFiles;
-        this.configuration = configuration;
-    }
-
-    /**
-     * Update the configuration passed to the BWA aligner.
-     * @param configuration New configuration to set.
-     */    
-    public abstract void updateConfiguration(BWAConfiguration configuration);
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/BWAConfiguration.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/BWAConfiguration.java
deleted file mode 100644
index b533a0d..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/BWAConfiguration.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment.bwa;
-
-/**
- * Configuration for the BWA/C aligner.
- *
- * @author mhanna
- * @version 0.1
- */
-public class BWAConfiguration {
-    /**
-     * The maximum edit distance used by BWA.
-     */
-    public Float maximumEditDistance = null;
-
-    /**
-     * How many gap opens are acceptable within this alignment?
-     */
-    public Integer maximumGapOpens = null;
-
-    /**
-     * How many gap extensions are acceptable within this alignment?
-     */
-    public Integer maximumGapExtensions = null;
-
-    /**
-     * Do we disallow indels within a certain range from the start / end?
-     */
-    public Integer disallowIndelWithinRange = null;
-
-    /**
-     * What is the scoring penalty for a mismatch?
-     */
-    public Integer mismatchPenalty = null;
-
-    /**
-     * What is the scoring penalty for a gap open?
-     */
-    public Integer gapOpenPenalty = null;
-
-    /**
-     * What is the scoring penalty for a gap extension?
-     */
-    public Integer gapExtensionPenalty = null;
-
-    /**
-     * Enter bwa's 'non-stop' mode (equivalent to bwa aln -N parameter).
-     */
-    public Boolean nonStopMode = false;
-
-    /**
-     * Set the max queue size that bwa will use when searching for matches (equivalent to bwa aln -m parameter).
-     */
-    public Integer maxEntriesInQueue = null;
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/BWTFiles.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/BWTFiles.java
deleted file mode 100644
index 16cc4ad..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/BWTFiles.java
+++ /dev/null
@@ -1,259 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment.bwa;
-
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.SAMSequenceRecord;
-import htsjdk.samtools.util.StringUtil;
-import org.broadinstitute.gatk.engine.alignment.reference.bwt.*;
-import org.broadinstitute.gatk.engine.alignment.reference.packing.PackUtils;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.io.File;
-import java.io.IOException;
-
-/**
- * Support files for BWT.
- *
- * @author mhanna
- * @version 0.1
- */
-public class BWTFiles {
-    /**
-     * ANN (?) file name.
-     */
-    public final File annFile;
-
-    /**
-     * AMB (?) file name.
-     */
-    public final File ambFile;
-
-    /**
-     * Packed reference sequence file.
-     */
-    public final File pacFile;
-
-    /**
-     * Reverse of packed reference sequence file.
-     */
-    public final File rpacFile;
-
-    /**
-     * Forward BWT file.
-     */
-    public final File forwardBWTFile;
-
-    /**
-     * Forward suffix array file.
-     */
-    public final File forwardSAFile;
-
-    /**
-     * Reverse BWT file.
-     */
-    public final File reverseBWTFile;
-
-    /**
-     * Reverse suffix array file.
-     */
-    public final File reverseSAFile;
-
-    /**
-     * Where these files autogenerated on the fly?
-     */
-    public final boolean autogenerated;
-
-    /**
-     * Create a new BWA configuration file using the given prefix.
-     * @param prefix Prefix to use when creating the configuration.  Must not be null.
-     */
-    public BWTFiles(String prefix) {
-        if(prefix == null)
-            throw new ReviewedGATKException("Prefix must not be null.");
-        annFile = new File(prefix + ".ann");
-        ambFile = new File(prefix + ".amb");
-        pacFile = new File(prefix + ".pac");
-        rpacFile = new File(prefix + ".rpac");
-        forwardBWTFile = new File(prefix + ".bwt");
-        forwardSAFile = new File(prefix + ".sa");
-        reverseBWTFile = new File(prefix + ".rbwt");
-        reverseSAFile = new File(prefix + ".rsa");
-        autogenerated = false;
-    }
-
-    /**
-     * Hand-create a new BWTFiles object, specifying a unique file object for each type.
-     * @param annFile ANN (alternate dictionary) file.
-     * @param ambFile AMB (holes) files.
-     * @param pacFile Packed representation of the forward reference sequence.
-     * @param forwardBWTFile BWT representation of the forward reference sequence.
-     * @param forwardSAFile SA representation of the forward reference sequence.
-     * @param rpacFile Packed representation of the reversed reference sequence.
-     * @param reverseBWTFile BWT representation of the reversed reference sequence.
-     * @param reverseSAFile SA representation of the reversed reference sequence.
-     */
-    private BWTFiles(File annFile,
-                     File ambFile,
-                     File pacFile,
-                     File forwardBWTFile,
-                     File forwardSAFile,
-                     File rpacFile,
-                     File reverseBWTFile,
-                     File reverseSAFile) {
-        this.annFile = annFile;
-        this.ambFile = ambFile;
-        this.pacFile = pacFile;
-        this.forwardBWTFile = forwardBWTFile;
-        this.forwardSAFile = forwardSAFile;
-        this.rpacFile = rpacFile;
-        this.reverseBWTFile = reverseBWTFile;
-        this.reverseSAFile = reverseSAFile;        
-        autogenerated = true;
-    }
-
-    /**
-     * Close out this files object, in the process deleting any temporary filse
-     * that were created.
-     */
-    public void close() {
-        if(autogenerated) {
-            boolean success = true;
-            success = annFile.delete();
-            success &= ambFile.delete();
-            success &= pacFile.delete();
-            success &= forwardBWTFile.delete();
-            success &= forwardSAFile.delete();
-            success &= rpacFile.delete();
-            success &= reverseBWTFile.delete();
-            success &= reverseSAFile.delete();
-
-            if(!success)
-                throw new ReviewedGATKException("Unable to clean up autogenerated representation");
-        }
-    }
-
-    /**
-     * Create a new set of BWT files from the given reference sequence.
-     * @param referenceSequence Sequence from which to build metadata.
-     * @return A new object representing encoded representations of each sequence.
-     */
-    public static BWTFiles createFromReferenceSequence(byte[] referenceSequence) {
-        byte[] normalizedReferenceSequence = new byte[referenceSequence.length];
-        System.arraycopy(referenceSequence,0,normalizedReferenceSequence,0,referenceSequence.length);
-        normalizeReferenceSequence(normalizedReferenceSequence);        
-
-        File annFile,ambFile,pacFile,bwtFile,saFile,rpacFile,rbwtFile,rsaFile;
-        try {
-            // Write the ann and amb for this reference sequence.
-            annFile = File.createTempFile("bwt",".ann");
-            ambFile = File.createTempFile("bwt",".amb");
-
-            SAMSequenceDictionary dictionary = new SAMSequenceDictionary();
-            dictionary.addSequence(new SAMSequenceRecord("autogenerated",normalizedReferenceSequence.length));
-
-            ANNWriter annWriter = new ANNWriter(annFile);
-            annWriter.write(dictionary);
-            annWriter.close();
-
-            AMBWriter ambWriter = new AMBWriter(ambFile);
-            ambWriter.writeEmpty(dictionary);
-            ambWriter.close();
-
-            // Write the encoded files for the forward version of this reference sequence.
-            pacFile = File.createTempFile("bwt",".pac");
-            bwtFile = File.createTempFile("bwt",".bwt");
-            saFile = File.createTempFile("bwt",".sa");
-
-            writeEncodedReferenceSequence(normalizedReferenceSequence,pacFile,bwtFile,saFile);
-
-            // Write the encoded files for the reverse version of this reference sequence.
-            byte[] reverseReferenceSequence = Utils.reverse(normalizedReferenceSequence);
-
-            rpacFile = File.createTempFile("bwt",".rpac");
-            rbwtFile = File.createTempFile("bwt",".rbwt");
-            rsaFile = File.createTempFile("bwt",".rsa");
-
-            writeEncodedReferenceSequence(reverseReferenceSequence,rpacFile,rbwtFile,rsaFile);
-        }
-        catch(IOException ex) {
-            throw new ReviewedGATKException("Unable to write autogenerated reference sequence to temporary files");
-        }
-
-        // Make sure that, at the very least, all temporary files are deleted on exit.
-        annFile.deleteOnExit();
-        ambFile.deleteOnExit();
-        pacFile.deleteOnExit();
-        bwtFile.deleteOnExit();
-        saFile.deleteOnExit();
-        rpacFile.deleteOnExit();
-        rbwtFile.deleteOnExit();
-        rsaFile.deleteOnExit();
-
-        return new BWTFiles(annFile,ambFile,pacFile,bwtFile,saFile,rpacFile,rbwtFile,rsaFile);
-    }
-
-    /**
-     * Write the encoded form of the reference sequence.  In the case of BWA, the encoded reference
-     * sequence is the reference itself in PAC format, the BWT, and the suffix array.
-     * @param referenceSequence The reference sequence to encode.
-     * @param pacFile Target for the PAC-encoded reference.
-     * @param bwtFile Target for the BWT representation of the reference.
-     * @param suffixArrayFile Target for the suffix array encoding of the reference.
-     * @throws java.io.IOException In case of issues writing to the file.
-     */
-    private static void writeEncodedReferenceSequence(byte[] referenceSequence,
-                                               File pacFile,
-                                               File bwtFile,
-                                               File suffixArrayFile) throws IOException {
-        PackUtils.writeReferenceSequence(pacFile,referenceSequence);
-
-        BWT bwt = BWT.createFromReferenceSequence(referenceSequence);
-        BWTWriter bwtWriter = new BWTWriter(bwtFile);
-        bwtWriter.write(bwt);
-        bwtWriter.close();
-
-        SuffixArray suffixArray = SuffixArray.createFromReferenceSequence(referenceSequence);
-        SuffixArrayWriter suffixArrayWriter = new SuffixArrayWriter(suffixArrayFile);
-        suffixArrayWriter.write(suffixArray);
-        suffixArrayWriter.close();
-    }
-
-    /**
-     * Convert the given reference sequence into a form suitable for building into
-     * on-the-fly sequences.
-     * @param referenceSequence The reference sequence to normalize.
-     * @throws org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException if normalized sequence cannot be generated.
-     */
-    private static void normalizeReferenceSequence(byte[] referenceSequence) {
-        StringUtil.toUpperCase(referenceSequence);
-        for(byte base: referenceSequence) {
-            if(base != 'A' && base != 'C' && base != 'G' && base != 'T')
-                throw new ReviewedGATKException(String.format("Base type %c is not supported when building references on-the-fly",(char)base));
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/AlignerTestHarness.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/AlignerTestHarness.java
deleted file mode 100644
index 91e41e5..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/AlignerTestHarness.java
+++ /dev/null
@@ -1,189 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment.bwa.java;
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import htsjdk.samtools.*;
-import org.broadinstitute.gatk.engine.alignment.Aligner;
-import org.broadinstitute.gatk.engine.alignment.Alignment;
-import org.broadinstitute.gatk.utils.BaseUtils;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-
-/**
- * A test harness to ensure that the perfect aligner works.
- *
- * @author mhanna
- * @version 0.1
- */
-public class AlignerTestHarness {
-    public static void main( String argv[] ) throws FileNotFoundException {
-        if( argv.length != 6 ) {
-            System.out.println("PerfectAlignerTestHarness <fasta> <bwt> <rbwt> <sa> <rsa> <bam>");
-            System.exit(1);
-        }
-
-        File referenceFile = new File(argv[0]);
-        File bwtFile = new File(argv[1]);
-        File rbwtFile = new File(argv[2]);
-        File suffixArrayFile = new File(argv[3]);
-        File reverseSuffixArrayFile = new File(argv[4]);
-        File bamFile = new File(argv[5]);
-
-        align(referenceFile,bwtFile,rbwtFile,suffixArrayFile,reverseSuffixArrayFile,bamFile);
-    }
-
-    private static void align(File referenceFile, File bwtFile, File rbwtFile, File suffixArrayFile, File reverseSuffixArrayFile, File bamFile) throws FileNotFoundException {
-        Aligner aligner = new BWAJavaAligner(bwtFile,rbwtFile,suffixArrayFile,reverseSuffixArrayFile);
-        int count = 0;
-
-        SAMFileReader reader = new SAMFileReader(bamFile);
-        reader.setValidationStringency(ValidationStringency.SILENT);
-
-        int mismatches = 0;
-        int failures = 0;
-
-        for(SAMRecord read: reader) {
-            count++;
-            if( count > 200000 ) break;
-            //if( count < 366000 ) continue;
-            //if( count > 2 ) break;
-            //if( !read.getReadName().endsWith("SL-XBC:1:82:506:404#0") )
-            //    continue;
-            //if( !read.getReadName().endsWith("SL-XBC:1:36:30:1926#0") )
-            //    continue;
-            //if( !read.getReadName().endsWith("SL-XBC:1:60:1342:1340#0") )
-            //    continue;
-
-            SAMRecord alignmentCleaned = null;
-            try {
-                alignmentCleaned = (SAMRecord)read.clone();
-            }
-            catch( CloneNotSupportedException ex ) {
-                throw new ReviewedGATKException("SAMRecord clone not supported", ex);
-            }
-
-            if( alignmentCleaned.getReadNegativeStrandFlag() )
-                alignmentCleaned.setReadBases(BaseUtils.simpleReverseComplement(alignmentCleaned.getReadBases()));
-
-            alignmentCleaned.setReferenceIndex(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX);
-            alignmentCleaned.setAlignmentStart(SAMRecord.NO_ALIGNMENT_START);
-            alignmentCleaned.setMappingQuality(SAMRecord.NO_MAPPING_QUALITY);
-            alignmentCleaned.setCigarString(SAMRecord.NO_ALIGNMENT_CIGAR);
-
-            // Clear everything except flags pertaining to pairing and set 'unmapped' status to true.
-            alignmentCleaned.setFlags(alignmentCleaned.getFlags() & 0x00A1 | 0x000C);
-
-            Iterable<Alignment[]> alignments = aligner.getAllAlignments(alignmentCleaned.getReadBases());
-            if(!alignments.iterator().hasNext() ) {
-                //throw new GATKException(String.format("Unable to align read %s to reference; count = %d",read.getReadName(),count));
-                System.out.printf("Unable to align read %s to reference; count = %d%n",read.getReadName(),count);
-                failures++;
-            }
-
-            Alignment foundAlignment = null;
-            for(Alignment[] alignmentsOfQuality: alignments) {
-                for(Alignment alignment: alignmentsOfQuality) {
-                    if( read.getReadNegativeStrandFlag() != alignment.isNegativeStrand() )
-                        continue;
-                    if( read.getAlignmentStart() != alignment.getAlignmentStart() )
-                        continue;
-
-                    foundAlignment = alignment;                    
-                }
-            }
-
-            if( foundAlignment != null ) {
-                //System.out.printf("%s: Aligned read to reference at position %d with %d mismatches, %d gap opens, and %d gap extensions.%n", read.getReadName(), foundAlignment.getAlignmentStart(), foundAlignment.getMismatches(), foundAlignment.getGapOpens(), foundAlignment.getGapExtensions());
-            }
-            else {
-                System.out.printf("Error aligning read %s%n", read.getReadName());
-
-                mismatches++;
-
-                IndexedFastaSequenceFile reference = new IndexedFastaSequenceFile(referenceFile);
-
-                System.out.printf("read          = %s, position = %d, negative strand = %b%n", formatBasesBasedOnCigar(read.getReadString(),read.getCigar(),CigarOperator.DELETION),
-                                                                                               read.getAlignmentStart(),
-                                                                                               read.getReadNegativeStrandFlag());
-                int numDeletions = numDeletionsInCigar(read.getCigar());
-                String expectedRef = new String(reference.getSubsequenceAt(reference.getSequenceDictionary().getSequences().get(0).getSequenceName(),read.getAlignmentStart(),read.getAlignmentStart()+read.getReadLength()+numDeletions-1).getBases());
-                System.out.printf("expected ref  = %s%n", formatBasesBasedOnCigar(expectedRef,read.getCigar(),CigarOperator.INSERTION));
-
-                for(Alignment[] alignmentsOfQuality: alignments) {
-                    for(Alignment alignment: alignmentsOfQuality) {
-                        System.out.println();
-
-                        Cigar cigar = ((BWAAlignment)alignment).getCigar();
-
-                        System.out.printf("read          = %s%n", formatBasesBasedOnCigar(read.getReadString(),cigar,CigarOperator.DELETION));
-
-                        int deletionCount = ((BWAAlignment)alignment).getNumberOfBasesMatchingState(AlignmentState.DELETION);
-                        String alignedRef = new String(reference.getSubsequenceAt(reference.getSequenceDictionary().getSequences().get(0).getSequenceName(),alignment.getAlignmentStart(),alignment.getAlignmentStart()+read.getReadLength()+deletionCount-1).getBases());
-                        System.out.printf("actual ref    = %s, position = %d, negative strand = %b%n", formatBasesBasedOnCigar(alignedRef,cigar,CigarOperator.INSERTION),
-                                alignment.getAlignmentStart(),
-                                alignment.isNegativeStrand());
-                    }
-                }
-
-                //throw new GATKException(String.format("Read %s was placed at incorrect location; count = %d%n",read.getReadName(),count));                
-            }
-
-
-            if( count % 1000 == 0 )
-                System.out.printf("%d reads examined.%n",count);                
-        }
-
-        System.out.printf("%d reads examined; %d mismatches; %d failures.%n",count,mismatches,failures);
-    }
-
-    private static String formatBasesBasedOnCigar( String bases, Cigar cigar, CigarOperator toBlank ) {
-        StringBuilder formatted = new StringBuilder();
-        int readIndex = 0;
-        for(CigarElement cigarElement: cigar.getCigarElements()) {
-            if(cigarElement.getOperator() == toBlank) {
-                int number = cigarElement.getLength();
-                while( number-- > 0 ) formatted.append(' ');
-            }
-            else {
-                int number = cigarElement.getLength();
-                while( number-- > 0 ) formatted.append(bases.charAt(readIndex++));
-            }
-        }
-        return formatted.toString();
-    }
-
-    private static int numDeletionsInCigar( Cigar cigar ) {
-        int numDeletions = 0;
-        for(CigarElement cigarElement: cigar.getCigarElements()) {
-            if(cigarElement.getOperator() == CigarOperator.DELETION)
-                numDeletions += cigarElement.getLength();
-        }
-        return numDeletions;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/AlignmentMatchSequence.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/AlignmentMatchSequence.java
deleted file mode 100644
index f1148c6..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/AlignmentMatchSequence.java
+++ /dev/null
@@ -1,175 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment.bwa.java;
-
-import htsjdk.samtools.Cigar;
-import htsjdk.samtools.CigarElement;
-import htsjdk.samtools.CigarOperator;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.util.ArrayDeque;
-import java.util.Deque;
-import java.util.Iterator;
-
-/**
- * Represents a sequence of matches.
- *
- * @author mhanna
- * @version 0.1
- */
-public class AlignmentMatchSequence implements Cloneable {
-    /**
-     * Stores the particular match entries in the order they occur.
-     */
-    private Deque<AlignmentMatchSequenceEntry> entries = new ArrayDeque<AlignmentMatchSequenceEntry>();
-
-    /**
-     * Clone the given match sequence.
-     * @return A deep copy of the current match sequence.
-     */
-    public AlignmentMatchSequence clone() {
-        AlignmentMatchSequence copy = null;
-        try {
-            copy = (AlignmentMatchSequence)super.clone(); 
-        }
-        catch( CloneNotSupportedException ex ) {
-            throw new ReviewedGATKException("Unable to clone AlignmentMatchSequence.");
-        }
-
-        copy.entries = new ArrayDeque<AlignmentMatchSequenceEntry>();
-        for( AlignmentMatchSequenceEntry entry: entries )
-            copy.entries.add(entry.clone());
-
-        return copy;
-    }
-
-    public Cigar convertToCigar(boolean negativeStrand) {
-        Cigar cigar = new Cigar();
-        Iterator<AlignmentMatchSequenceEntry> iterator = negativeStrand ? entries.descendingIterator() : entries.iterator();
-        while( iterator.hasNext() ) {
-            AlignmentMatchSequenceEntry entry = iterator.next();
-            CigarOperator operator;
-            switch( entry.getAlignmentState() ) {
-                case MATCH_MISMATCH: operator = CigarOperator.MATCH_OR_MISMATCH; break;
-                case INSERTION: operator = CigarOperator.INSERTION; break;
-                case DELETION: operator = CigarOperator.DELETION; break;
-                default: throw new ReviewedGATKException("convertToCigar: cannot process state: " + entry.getAlignmentState());
-            }
-            cigar.add( new CigarElement(entry.count,operator) );
-        }
-        return cigar;
-    }
-
-    /**
-     * All a new alignment of the given state.
-     * @param state State to add to the sequence.
-     */
-    public void addNext( AlignmentState state ) {
-        AlignmentMatchSequenceEntry last = entries.peekLast();
-        // If the last entry is the same as this one, increment it.  Otherwise, add a new entry.
-        if( last != null && last.alignmentState == state )
-            last.increment();
-        else
-            entries.add(new AlignmentMatchSequenceEntry(state));
-    }
-
-    /**
-     * Gets the current state of this alignment (what's the state of the last base?)
-     * @return State of the most recently aligned base.
-     */
-    public AlignmentState getCurrentState() {
-        if( entries.size() == 0 )
-            return AlignmentState.MATCH_MISMATCH;        
-        return entries.peekLast().getAlignmentState();
-    }
-
-    /**
-     * How many bases in the read match the given state.
-     * @param state State to test.
-     * @return number of bases which match that state.
-     */
-    public int getNumberOfBasesMatchingState(AlignmentState state) {
-        int matches = 0;
-        for( AlignmentMatchSequenceEntry entry: entries ) {
-            if( entry.getAlignmentState() == state )
-                matches += entry.count;
-        }
-        return matches;
-    }
-
-    /**
-     * Stores an individual match sequence entry.
-     */
-    private class AlignmentMatchSequenceEntry implements Cloneable {
-        /**
-         * The state of the alignment throughout a given point in the sequence.
-         */
-        private final AlignmentState alignmentState;
-
-        /**
-         * The number of bases having this particular state.
-         */
-        private int count;
-
-        /**
-         * Create a new sequence entry with the given state.
-         * @param alignmentState The state that this sequence should contain.
-         */
-        AlignmentMatchSequenceEntry( AlignmentState alignmentState ) {
-            this.alignmentState = alignmentState;
-            this.count = 1;
-        }
-
-        /**
-         * Clone the given match sequence entry.
-         * @return A deep copy of the current match sequence entry.
-         */
-        public AlignmentMatchSequenceEntry clone() {
-            try {
-                return (AlignmentMatchSequenceEntry)super.clone(); 
-            }
-            catch( CloneNotSupportedException ex ) {
-                throw new ReviewedGATKException("Unable to clone AlignmentMatchSequenceEntry.");
-            }
-        }
-
-        /**
-         * Retrieves the current state of the alignment.
-         * @return The state of the current sequence.
-         */
-        AlignmentState getAlignmentState() {
-            return alignmentState;
-        }
-
-        /**
-         * Increment the count of alignments having this particular state.
-         */
-        void increment() {
-            count++;
-        }
-    }
-}
-
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/AlignmentState.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/AlignmentState.java
deleted file mode 100644
index f4ba1bb..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/AlignmentState.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment.bwa.java;
-
-/**
- * The state of a given base in the alignment.
- *
- * @author mhanna
- * @version 0.1
- */
-public enum AlignmentState {
-    MATCH_MISMATCH,
-    INSERTION,
-    DELETION
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/BWAAlignment.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/BWAAlignment.java
deleted file mode 100644
index 88ef4a5..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/BWAAlignment.java
+++ /dev/null
@@ -1,215 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment.bwa.java;
-
-import htsjdk.samtools.Cigar;
-import org.broadinstitute.gatk.engine.alignment.Alignment;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-/**
- * An alignment object to be used incrementally as the BWA aligner
- * inspects the read.
- *
- * @author mhanna
- * @version 0.1
- */
-public class BWAAlignment extends Alignment implements Cloneable {
-    /**
-     * Track the number of alignments that have been created.
-     */
-    private static long numCreated;
-
-    /**
-     * Which number alignment is this?
-     */
-    private long creationNumber;
-
-    /**
-     * The aligner performing the alignments.
-     */
-    protected BWAJavaAligner aligner;
-
-    /**
-     * The sequence of matches/mismatches/insertions/deletions.
-     */
-    private AlignmentMatchSequence alignmentMatchSequence = new AlignmentMatchSequence();
-
-    /**
-     * Working variable.  How many bases have been matched at this point.
-     */
-    protected int position;
-
-    /**
-     * Working variable.  How many mismatches have been encountered at this point.
-     */
-    private int mismatches;
-
-    /**
-     * Number of gap opens in alignment.
-     */
-    private int gapOpens;
-
-    /**
-     * Number of gap extensions in alignment.
-     */
-    private int gapExtensions;
-
-    /**
-     * Working variable.  The lower bound of the alignment within the BWT.
-     */
-    protected long loBound;
-
-    /**
-     * Working variable.  The upper bound of the alignment within the BWT.
-     */
-    protected long hiBound;
-
-    protected void setAlignmentStart(long position) {
-        this.alignmentStart = position;
-    }
-
-    protected void setNegativeStrand(boolean negativeStrand) {
-        this.negativeStrand = negativeStrand;
-    }
-
-    /**
-     * Cache the score.
-     */
-    private int score;
-
-    public Cigar getCigar() {
-        return alignmentMatchSequence.convertToCigar(isNegativeStrand());
-    }
-
-    /**
-     * Gets the current state of this alignment (state of the last base viewed)..
-     * @return Current state of the alignment.
-     */
-    public AlignmentState getCurrentState() {
-        return alignmentMatchSequence.getCurrentState();
-    }
-
-    /**
-     * Adds the given state to the current alignment.
-     * @param state State to add to the given alignment.
-     */
-    public void addState( AlignmentState state ) {
-        alignmentMatchSequence.addNext(state);    
-    }
-
-    /**
-     * Gets the BWA score of this alignment.
-     * @return BWA-style scores.  0 is best.
-     */
-    public int getScore() {
-        return score;
-    }
-
-    public int getMismatches() { return mismatches; }
-    public int getGapOpens() { return gapOpens; }
-    public int getGapExtensions() { return gapExtensions; }
-
-    public void incrementMismatches() {
-        this.mismatches++;
-        updateScore();
-    }
-
-    public void incrementGapOpens() {
-        this.gapOpens++;
-        updateScore();
-    }
-
-    public void incrementGapExtensions() {
-        this.gapExtensions++;
-        updateScore();
-    }
-
-    /**
-     * Updates the score based on new information about matches / mismatches.
-     */
-    private void updateScore() {
-        score = mismatches*aligner.MISMATCH_PENALTY + gapOpens*aligner.GAP_OPEN_PENALTY + gapExtensions*aligner.GAP_EXTENSION_PENALTY;
-    }
-
-    /**
-     * Create a new alignment with the given parent aligner.
-     * @param aligner Aligner being used.
-     */
-    public BWAAlignment( BWAJavaAligner aligner ) {
-        this.aligner = aligner;
-        this.creationNumber = numCreated++;
-    }
-
-    /**
-     * Clone the alignment.
-     * @return New instance of the alignment.
-     */
-    public BWAAlignment clone() {
-        BWAAlignment newAlignment = null;
-        try {
-            newAlignment = (BWAAlignment)super.clone();
-        }
-        catch( CloneNotSupportedException ex ) {
-            throw new ReviewedGATKException("Unable to clone BWAAlignment.");
-        }
-        newAlignment.creationNumber = numCreated++;
-        newAlignment.alignmentMatchSequence = alignmentMatchSequence.clone();
-
-        return newAlignment;
-    }
-
-    /**
-     * How many bases in the read match the given state.
-     * @param state State to test.
-     * @return number of bases which match that state.
-     */
-    public int getNumberOfBasesMatchingState(AlignmentState state) {
-        return alignmentMatchSequence.getNumberOfBasesMatchingState(state);
-    }
-
-    /**
-     * Compare this alignment to another alignment.
-     * @param rhs Other alignment to which to compare.
-     * @return < 0 if this < other, == 0 if this == other, > 0 if this > other
-     */
-    public int compareTo(Alignment rhs) {
-        BWAAlignment other = (BWAAlignment)rhs;
-
-        // If the scores are different, disambiguate using the score.
-        if(score != other.score)
-            return score > other.score ? 1 : -1;
-
-        // Otherwise, use the order in which the elements were created.
-        if(creationNumber != other.creationNumber)
-            return creationNumber > other.creationNumber ? -1 : 1;
-
-        return 0;
-    }
-
-    public String toString() {
-        return String.format("position: %d, strand: %b, state: %s, mismatches: %d, gap opens: %d, gap extensions: %d, loBound: %d, hiBound: %d, score: %d, creationNumber: %d", position, negativeStrand, alignmentMatchSequence.getCurrentState(), mismatches, gapOpens, gapExtensions, loBound, hiBound, getScore(), creationNumber);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/BWAJavaAligner.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/BWAJavaAligner.java
deleted file mode 100644
index 09a5b45..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/BWAJavaAligner.java
+++ /dev/null
@@ -1,418 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment.bwa.java;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.engine.alignment.Alignment;
-import org.broadinstitute.gatk.engine.alignment.bwa.BWAAligner;
-import org.broadinstitute.gatk.engine.alignment.bwa.BWAConfiguration;
-import org.broadinstitute.gatk.engine.alignment.reference.bwt.*;
-import org.broadinstitute.gatk.utils.BaseUtils;
-import org.broadinstitute.gatk.utils.Utils;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.PriorityQueue;
-
-/**
- * Create imperfect alignments from the read to the genome represented by the given BWT / suffix array. 
- *
- * @author mhanna
- * @version 0.1
- */
-public class BWAJavaAligner extends BWAAligner {
-    /**
-     * BWT in the forward direction.
-     */
-    private BWT forwardBWT;
-
-    /**
-     * BWT in the reverse direction.
-     */
-    private BWT reverseBWT;
-
-    /**
-     * Suffix array in the forward direction.
-     */
-    private SuffixArray forwardSuffixArray;
-
-    /**
-     * Suffix array in the reverse direction.
-     */
-    private SuffixArray reverseSuffixArray;
-
-    /**
-     * Maximum edit distance (-n option from original BWA).
-     */
-    private final int MAXIMUM_EDIT_DISTANCE = 4;
-
-    /**
-     * Maximum number of gap opens (-o option from original BWA).
-     */
-    private final int MAXIMUM_GAP_OPENS = 1;
-
-    /**
-     * Maximum number of gap extensions (-e option from original BWA).
-     */
-    private final int MAXIMUM_GAP_EXTENSIONS = 6;
-
-    /**
-     * Penalty for straight mismatches (-M option from original BWA).
-     */
-    public final int MISMATCH_PENALTY = 3;
-
-    /**
-     * Penalty for gap opens (-O option from original BWA).
-     */
-    public final int GAP_OPEN_PENALTY = 11;
-
-    /**
-     * Penalty for gap extensions (-E option from original BWA).
-     */
-    public final int GAP_EXTENSION_PENALTY = 4;
-
-    /**
-     * Skip the ends of indels.
-     */
-    public final int INDEL_END_SKIP = 5;
-
-    public BWAJavaAligner( File forwardBWTFile, File reverseBWTFile, File forwardSuffixArrayFile, File reverseSuffixArrayFile ) {
-        super(null,null);
-        forwardBWT = new BWTReader(forwardBWTFile).read();
-        reverseBWT = new BWTReader(reverseBWTFile).read();
-        forwardSuffixArray = new SuffixArrayReader(forwardSuffixArrayFile,forwardBWT).read();
-        reverseSuffixArray = new SuffixArrayReader(reverseSuffixArrayFile,reverseBWT).read();
-    }
-
-    /**
-     * Close this instance of the BWA pointer and delete its resources.
-     */
-    @Override
-    public void close()  {
-        throw new UnsupportedOperationException("BWA aligner can't currently be closed.");
-    }
-
-    /**
-     * Update the current parameters of this aligner.
-     * @param configuration New configuration to set.
-     */
-    public void updateConfiguration(BWAConfiguration configuration) {
-        throw new UnsupportedOperationException("Configuration of the BWA aligner can't currently be changed.");
-    }
-
-    /**
-     * Allow the aligner to choose one alignment randomly from the pile of best alignments.
-     * @param bases Bases to align.
-     * @return An align
-     */
-    public Alignment getBestAlignment(final byte[] bases) { throw new UnsupportedOperationException("BWAJavaAligner does not yet support the standard Aligner interface."); }
-
-    /**
-     * Align the read to the reference.
-     * @param read Read to align.
-     * @param header Optional header to drop in place.
-     * @return A list of the alignments.
-     */
-    public SAMRecord align(final SAMRecord read, final SAMFileHeader header) { throw new UnsupportedOperationException("BWAJavaAligner does not yet support the standard Aligner interface."); }
-
-    /**
-     * Get a iterator of alignments, batched by mapping quality.
-     * @param bases List of bases.
-     * @return Iterator to alignments.
-     */
-    public Iterable<Alignment[]> getAllAlignments(final byte[] bases) { throw new UnsupportedOperationException("BWAJavaAligner does not yet support the standard Aligner interface."); }
-
-    /**
-     * Get a iterator of aligned reads, batched by mapping quality.
-     * @param read Read to align.
-     * @param newHeader Optional new header to use when aligning the read.  If present, it must be null.
-     * @return Iterator to alignments.
-     */
-    public Iterable<SAMRecord[]> alignAll(final SAMRecord read, final SAMFileHeader newHeader) { throw new UnsupportedOperationException("BWAJavaAligner does not yet support the standard Aligner interface."); }
-
-
-    public List<Alignment> align( SAMRecord read ) {
-        List<Alignment> successfulMatches = new ArrayList<Alignment>();
-
-        Byte[] uncomplementedBases = normalizeBases(read.getReadBases());
-        Byte[] complementedBases = normalizeBases(Utils.reverse(BaseUtils.simpleReverseComplement(read.getReadBases())));
-
-        List<LowerBound> forwardLowerBounds = LowerBound.create(uncomplementedBases,forwardBWT);
-        List<LowerBound> reverseLowerBounds = LowerBound.create(complementedBases,reverseBWT);
-
-        // Seed the best score with any score that won't overflow on comparison.
-        int bestScore = Integer.MAX_VALUE - MISMATCH_PENALTY;
-        int bestDiff = MAXIMUM_EDIT_DISTANCE+1;
-        int maxDiff = MAXIMUM_EDIT_DISTANCE;
-
-        PriorityQueue<BWAAlignment> alignments = new PriorityQueue<BWAAlignment>();
-
-        // Create a fictional initial alignment, with the position just off the end of the read, and the limits
-        // set as the entire BWT.
-        alignments.add(createSeedAlignment(reverseBWT));
-        alignments.add(createSeedAlignment(forwardBWT));
-
-        while(!alignments.isEmpty()) {
-            BWAAlignment alignment = alignments.remove();
-
-            // From bwtgap.c in the original BWT; if the rank is worse than the best score + the mismatch PENALTY, move on.
-            if( alignment.getScore() > bestScore + MISMATCH_PENALTY )
-                break;
-
-            Byte[] bases = alignment.isNegativeStrand() ? complementedBases : uncomplementedBases;
-            BWT bwt = alignment.isNegativeStrand() ? forwardBWT : reverseBWT;
-            List<LowerBound> lowerBounds = alignment.isNegativeStrand() ? reverseLowerBounds : forwardLowerBounds;
-
-            // if z < D(i) then return {}
-            int mismatches = maxDiff - alignment.getMismatches() - alignment.getGapOpens() - alignment.getGapExtensions();
-            if( alignment.position < lowerBounds.size()-1 && mismatches < lowerBounds.get(alignment.position+1).value )
-                continue;
-
-            if(mismatches == 0) {
-                exactMatch(alignment,bases,bwt);
-                if(alignment.loBound > alignment.hiBound)
-                    continue;
-            }
-
-            // Found a valid alignment; store it and move on.
-            if(alignment.position >= read.getReadLength()-1) {
-                for(long bwtIndex = alignment.loBound; bwtIndex <= alignment.hiBound; bwtIndex++) {
-                    BWAAlignment finalAlignment = alignment.clone();
-
-                    if( finalAlignment.isNegativeStrand() )
-                        finalAlignment.setAlignmentStart(forwardSuffixArray.get(bwtIndex) + 1);
-                    else {
-                        int sizeAlongReference = read.getReadLength() -
-                                finalAlignment.getNumberOfBasesMatchingState(AlignmentState.INSERTION) +
-                                finalAlignment.getNumberOfBasesMatchingState(AlignmentState.DELETION);
-                        finalAlignment.setAlignmentStart(reverseBWT.length() - reverseSuffixArray.get(bwtIndex) - sizeAlongReference + 1);
-                    }
-
-                    successfulMatches.add(finalAlignment);
-
-                    bestScore = Math.min(finalAlignment.getScore(),bestScore);
-                    bestDiff = Math.min(finalAlignment.getMismatches()+finalAlignment.getGapOpens()+finalAlignment.getGapExtensions(),bestDiff);
-                    maxDiff = bestDiff + 1;
-                }
-
-                continue;
-            }
-
-            //System.out.printf("Processing alignments; queue size = %d, alignment = %s, bound = %d, base = %s%n", alignments.size(), alignment, lowerBounds.get(alignment.position+1).value, alignment.position >= 0 ? (char)bases[alignment.position].byteValue() : "");
-            /*
-            System.out.printf("#1\t[%d,%d,%d,%c]\t[%d,%d,%d]\t[%d,%d]\t[%d,%d]%n",alignments.size(),
-                                                        alignment.negativeStrand?1:0,
-                                                        bases.length-alignment.position-1,
-                                                        alignment.getCurrentState().toString().charAt(0),
-                                                        alignment.getMismatches(),
-                                                        alignment.getGapOpens(),
-                                                        alignment.getGapExtensions(),
-                                                        lowerBounds.get(alignment.position+1).value,
-                                                        lowerBounds.get(alignment.position+1).width,
-                                                        alignment.loBound,
-                                                        alignment.hiBound);
-                                                        */
-
-            // Temporary -- look ahead to see if the next alignment is bounded.
-            boolean allowDifferences = mismatches > 0;
-            boolean allowMismatches = mismatches > 0;
-
-            if( allowDifferences &&
-                alignment.position+1 >= INDEL_END_SKIP-1+alignment.getGapOpens()+alignment.getGapExtensions() &&
-                read.getReadLength()-1-(alignment.position+1) >= INDEL_END_SKIP+alignment.getGapOpens()+alignment.getGapExtensions() ) {
-                if( alignment.getCurrentState() == AlignmentState.MATCH_MISMATCH ) {
-                    if( alignment.getGapOpens() < MAXIMUM_GAP_OPENS ) {
-                        // Add a potential insertion extension.
-                        BWAAlignment insertionAlignment = createInsertionAlignment(alignment);
-                        insertionAlignment.incrementGapOpens();
-                        alignments.add(insertionAlignment);
-
-                        // Add a potential deletion by marking a deletion and augmenting the position.
-                        List<BWAAlignment> deletionAlignments = createDeletionAlignments(bwt,alignment);
-                        for( BWAAlignment deletionAlignment: deletionAlignments )
-                            deletionAlignment.incrementGapOpens();
-                        alignments.addAll(deletionAlignments);
-                    }
-                }
-                else if( alignment.getCurrentState() == AlignmentState.INSERTION ) {
-                    if( alignment.getGapExtensions() < MAXIMUM_GAP_EXTENSIONS && mismatches > 0 ) {
-                        // Add a potential insertion extension.
-                        BWAAlignment insertionAlignment = createInsertionAlignment(alignment);
-                        insertionAlignment.incrementGapExtensions();
-                        alignments.add(insertionAlignment);
-                    }
-                }
-                else if( alignment.getCurrentState() == AlignmentState.DELETION ) {
-                    if( alignment.getGapExtensions() < MAXIMUM_GAP_EXTENSIONS && mismatches > 0 ) {
-                        // Add a potential deletion by marking a deletion and augmenting the position.
-                        List<BWAAlignment> deletionAlignments = createDeletionAlignments(bwt,alignment);
-                        for( BWAAlignment deletionAlignment: deletionAlignments )
-                            deletionAlignment.incrementGapExtensions();
-                        alignments.addAll(deletionAlignments);
-                    }
-                }
-            }
-
-            // Mismatches
-            alignments.addAll(createMatchedAlignments(bwt,alignment,bases,allowDifferences&&allowMismatches));
-        }
-
-        return successfulMatches;
-    }
-
-    /**
-     * Create an seeding alignment to use as a starting point when traversing.
-     * @param bwt source BWT.
-     * @return Seed alignment.
-     */
-    private BWAAlignment createSeedAlignment(BWT bwt) {
-        BWAAlignment seed = new BWAAlignment(this);
-        seed.setNegativeStrand(bwt == forwardBWT);
-        seed.position = -1;
-        seed.loBound = 0;
-        seed.hiBound = bwt.length();
-        return seed;
-    }
-
-    /**
-     * Creates a new alignments representing direct matches / mismatches.
-     * @param bwt Source BWT with which to work.
-     * @param alignment Alignment for the previous position.
-     * @param bases The bases in the read.
-     * @param allowMismatch Should mismatching bases be allowed?
-     * @return New alignment representing this position if valid; null otherwise.
-     */
-    private List<BWAAlignment> createMatchedAlignments( BWT bwt, BWAAlignment alignment, Byte[] bases, boolean allowMismatch ) {
-        List<BWAAlignment> newAlignments = new ArrayList<BWAAlignment>();
-
-        List<Byte> baseChoices = new ArrayList<Byte>();
-        Byte thisBase = bases[alignment.position+1];
-
-        if( allowMismatch )
-            baseChoices.addAll(Bases.allOf());
-        else
-            baseChoices.add(thisBase);
-
-        if( thisBase != null ) {
-            // Keep rotating the current base to the last position until we've hit the current base.
-            for( ;; ) {
-                baseChoices.add(baseChoices.remove(0));
-                if( thisBase.equals(baseChoices.get(baseChoices.size()-1)) )
-                    break;
-
-            }
-        }
-
-        for(byte base: baseChoices) {
-            BWAAlignment newAlignment = alignment.clone();
-
-            newAlignment.loBound = bwt.counts(base) + bwt.occurrences(base,alignment.loBound-1) + 1;
-            newAlignment.hiBound = bwt.counts(base) + bwt.occurrences(base,alignment.hiBound);
-
-            // If this alignment is valid, skip it.
-            if( newAlignment.loBound > newAlignment.hiBound )
-                continue;
-
-            newAlignment.position++;
-            newAlignment.addState(AlignmentState.MATCH_MISMATCH);
-            if( bases[newAlignment.position] == null || base != bases[newAlignment.position] )
-                newAlignment.incrementMismatches();
-
-            newAlignments.add(newAlignment);
-        }
-
-        return newAlignments;
-    }
-
-    /**
-     * Create a new alignment representing an insertion at this point in the read.
-     * @param alignment Alignment from which to derive the insertion.
-     * @return New alignment reflecting the insertion.
-     */
-    private BWAAlignment createInsertionAlignment( BWAAlignment alignment ) {
-        // Add a potential insertion extension.
-        BWAAlignment newAlignment = alignment.clone();
-        newAlignment.position++;
-        newAlignment.addState(AlignmentState.INSERTION);
-        return newAlignment;
-    }
-
-    /**
-     * Create new alignments representing a deletion at this point in the read.
-     * @param bwt source BWT for inferring deletion info.
-     * @param alignment Alignment from which to derive the deletion.
-     * @return New alignments reflecting all possible deletions.
-     */
-    private List<BWAAlignment> createDeletionAlignments( BWT bwt, BWAAlignment alignment) {
-        List<BWAAlignment> newAlignments = new ArrayList<BWAAlignment>();
-        for(byte base: Bases.instance) {
-            BWAAlignment newAlignment = alignment.clone();
-
-            newAlignment.loBound = bwt.counts(base) + bwt.occurrences(base,alignment.loBound-1) + 1;
-            newAlignment.hiBound = bwt.counts(base) + bwt.occurrences(base,alignment.hiBound);
-
-            // If this alignment is valid, skip it.
-            if( newAlignment.loBound > newAlignment.hiBound )
-                continue;
-
-            newAlignment.addState(AlignmentState.DELETION);
-
-            newAlignments.add(newAlignment);
-        }
-
-        return newAlignments;
-    }
-
-    /**
-     * Exactly match the given alignment against the given BWT.
-     * @param alignment Alignment to match.
-     * @param bases Bases to use.
-     * @param bwt BWT to use.
-     */
-    private void exactMatch( BWAAlignment alignment, Byte[] bases, BWT bwt ) {
-        while( ++alignment.position < bases.length ) {
-            byte base = bases[alignment.position];
-            alignment.loBound = bwt.counts(base) + bwt.occurrences(base,alignment.loBound-1) + 1;
-            alignment.hiBound = bwt.counts(base) + bwt.occurrences(base,alignment.hiBound);
-            if( alignment.loBound > alignment.hiBound )
-                return;
-        }
-    }
-
-    /**
-     * Make each base into A/C/G/T or null if unknown.
-     * @param bases Base string to normalize.
-     * @return Array of normalized bases.
-     */
-    private Byte[] normalizeBases( byte[] bases ) {
-        Byte[] normalBases = new Byte[bases.length];
-        for(int i = 0; i < bases.length; i++)
-            normalBases[i] = Bases.fromASCII(bases[i]);
-        return normalBases;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/LowerBound.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/LowerBound.java
deleted file mode 100644
index e5e292b..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/bwa/java/LowerBound.java
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment.bwa.java;
-
-import org.broadinstitute.gatk.engine.alignment.reference.bwt.BWT;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * At any point along the given read, what is a good lower bound for the
- * total number of differences?
- *
- * @author mhanna
- * @version 0.1
- */
-public class LowerBound {
-    /**
-     * Lower bound of the suffix array.
-     */
-    public final long loIndex;
-
-    /**
-     * Upper bound of the suffix array.
-     */
-    public final long hiIndex;
-
-    /**
-     * Width of the bwt from loIndex -> hiIndex, inclusive.
-     */
-    public final long width;
-
-    /**
-     * The lower bound at the given point.
-     */
-    public final int value;
-
-    /**
-     * Create a new lower bound with the given value.
-     * @param loIndex The lower bound of the BWT.
-     * @param hiIndex The upper bound of the BWT.
-     * @param value Value for the lower bound at this site.
-     */
-    private LowerBound(long loIndex, long hiIndex, int value) {
-        this.loIndex = loIndex;
-        this.hiIndex = hiIndex;
-        this.width = hiIndex - loIndex + 1;
-        this.value = value;
-    }
-
-    /**
-     * Create a non-optimal bound according to the algorithm specified in Figure 3 of the BWA paper.
-     * @param bases Bases of the read to use when creating a new BWT.
-     * @param bwt BWT to check against.
-     * @return A list of lower bounds at every point in the reference.
-     *
-     */
-    public static List<LowerBound> create(Byte[] bases, BWT bwt) {
-        List<LowerBound> bounds = new ArrayList<LowerBound>();
-
-        long loIndex = 0, hiIndex = bwt.length();
-        int mismatches = 0;
-        for( int i = bases.length-1; i >= 0; i-- ) {
-            Byte base = bases[i];
-
-            // Ignore non-ACGT bases.
-            if( base != null ) {
-                loIndex = bwt.counts(base) + bwt.occurrences(base,loIndex-1) + 1;
-                hiIndex = bwt.counts(base) + bwt.occurrences(base,hiIndex);            
-            }
-
-            if( base == null || loIndex > hiIndex ) {
-                loIndex = 0;
-                hiIndex = bwt.length();
-                mismatches++;
-            }
-            bounds.add(0,new LowerBound(loIndex,hiIndex,mismatches));
-        }
-
-        return bounds;
-    }
-
-    /**
-     * Create a string representation of this bound.
-     * @return String version of this bound.
-     */
-    public String toString() {
-        return String.format("LowerBound: w = %d, value = %d",width,value);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/package-info.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/package-info.java
deleted file mode 100644
index 5a6d70d..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/package-info.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment;
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/AMBWriter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/AMBWriter.java
deleted file mode 100644
index b090bab..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/AMBWriter.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment.reference.bwt;
-
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.SAMSequenceRecord;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.io.PrintStream;
-
-/**
- * Writes .amb files - a file indicating where 'holes' (indeterminant bases)
- * exist in the contig.  Currently, only empty, placeholder AMBs are supported.
- *
- * @author mhanna
- * @version 0.1
- */
-public class AMBWriter {
-    /**
-     * Number of holes is fixed at zero.
-     */
-    private static final int NUM_HOLES = 0;
-
-    /**
-     * Input stream from which to read BWT data.
-     */
-    private final PrintStream out;
-
-    /**
-     * Create a new ANNWriter targeting the given file.
-     * @param file file into which ANN data should be written.
-     * @throws java.io.IOException if there is a problem opening the output file.
-     */
-    public AMBWriter(File file) throws IOException {
-        out = new PrintStream(file);
-    }
-
-    /**
-     * Create a new ANNWriter targeting the given OutputStream.
-     * @param stream Stream into which ANN data should be written.
-     */
-    public AMBWriter(OutputStream stream)  {
-        out = new PrintStream(stream);
-    }
-
-    /**
-     * Write the contents of the given dictionary into the AMB file.
-     * Assumes that there are no holes in the dictionary.
-     * @param dictionary Dictionary to write.
-     */
-    public void writeEmpty(SAMSequenceDictionary dictionary) {
-        long genomeLength = 0L;
-        for(SAMSequenceRecord sequence: dictionary.getSequences())
-            genomeLength += sequence.getSequenceLength();
-
-        int sequences = dictionary.getSequences().size();
-
-        // Write the header
-        out.printf("%d %d %d%n",genomeLength,sequences,NUM_HOLES);
-    }
-
-    /**
-     * Close the given output stream.
-     */
-    public void close() {
-        out.close();
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/ANNWriter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/ANNWriter.java
deleted file mode 100644
index 123b3ca..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/ANNWriter.java
+++ /dev/null
@@ -1,120 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment.reference.bwt;
-
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.SAMSequenceRecord;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.io.PrintStream;
-
-/**
- * Writes .ann files - an alternate sequence dictionary format
- * used by BWA/C.  For best results, the input sequence dictionary
- * should be created with Picard's CreateSequenceDictionary.jar,
- * TRUNCATE_NAMES_AT_WHITESPACE=false.
- *
- * @author mhanna
- * @version 0.1
- */
-public class ANNWriter {
-    /**
-     * BWA uses a fixed seed of 11, written into every file.
-     */
-    private static final int BNS_SEED = 11;
-
-    /**
-     * A seemingly unused value that appears in every contig in the ANN.
-     */
-    private static final int GI = 0;
-
-    /**
-     * Input stream from which to read BWT data.
-     */
-    private final PrintStream out;
-
-    /**
-     * Create a new ANNWriter targeting the given file.
-     * @param file file into which ANN data should be written.
-     * @throws IOException if there is a problem opening the output file.
-     */
-    public ANNWriter(File file) throws IOException {
-        out = new PrintStream(file);
-    }
-
-    /**
-     * Create a new ANNWriter targeting the given OutputStream.
-     * @param stream Stream into which ANN data should be written.
-     */
-    public ANNWriter(OutputStream stream)  {
-        out = new PrintStream(stream);
-    }
-
-    /**
-     * Write the contents of the given dictionary into the ANN file.
-     * Assumes that no ambs (blocks of indeterminate base) are present in the dictionary.
-     * @param dictionary Dictionary to write.
-     */
-    public void write(SAMSequenceDictionary dictionary) {
-        long genomeLength = 0L;
-        for(SAMSequenceRecord sequence: dictionary.getSequences())
-            genomeLength += sequence.getSequenceLength();
-        
-        int sequences = dictionary.getSequences().size();
-
-        // Write the header
-        out.printf("%d %d %d%n",genomeLength,sequences,BNS_SEED);
-
-        for(SAMSequenceRecord sequence: dictionary.getSequences()) {
-            String fullSequenceName = sequence.getSequenceName();
-            String trimmedSequenceName = fullSequenceName;
-            String sequenceComment = "(null)";
-
-            long offset = 0;
-
-            // Separate the sequence name from the sequence comment, based on BWA's definition.
-            // BWA's definition appears to accept a zero-length contig name, so mimic that behavior.
-            if(fullSequenceName.indexOf(' ') >= 0) {
-                trimmedSequenceName = fullSequenceName.substring(0,fullSequenceName.indexOf(' '));
-                sequenceComment = fullSequenceName.substring(fullSequenceName.indexOf(' ')+1);
-            }
-
-            // Write the sequence GI (?), name, and comment.
-            out.printf("%d %s %s%n",GI,trimmedSequenceName,sequenceComment);
-            // Write the sequence offset, length, and ambs (currently fixed at 0).
-            out.printf("%d %d %d%n",offset,sequence.getSequenceLength(),0);
-        }
-    }
-
-    /**
-     * Close the given output stream.
-     */
-    public void close() {
-        out.close();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/BWT.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/BWT.java
deleted file mode 100644
index 7d0c43b..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/BWT.java
+++ /dev/null
@@ -1,197 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment.reference.bwt;
-
-import org.broadinstitute.gatk.engine.alignment.reference.packing.PackUtils;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-/**
- * Represents the Burrows-Wheeler Transform of a reference sequence.
- *
- * @author mhanna
- * @version 0.1
- */
-public class BWT {
-    /**
-     * Write an occurrence table after every SEQUENCE_BLOCK_SIZE bases.
-     * For this implementation to behave correctly, SEQUENCE_BLOCK_SIZE % 8 == 0
-     */
-    public static final int SEQUENCE_BLOCK_SIZE = 128;
-
-    /**
-     * The inverse SA, used as a placeholder for determining where the special EOL character sits.
-     */
-    protected final long inverseSA0;
-
-    /**
-     * Cumulative counts for the entire BWT.
-     */
-    protected final Counts counts;
-
-    /**
-     * The individual sequence blocks, modelling how they appear on disk.
-     */
-    protected final SequenceBlock[] sequenceBlocks;
-
-    /**
-     * Creates a new BWT with the given inverse SA, counts, and sequence (in ASCII).
-     * @param inverseSA0 Inverse SA entry for the first element.  Will be missing from the BWT sequence.
-     * @param counts Cumulative count of bases, in A,C,G,T order.
-     * @param sequenceBlocks The full BWT sequence, sans the '$'.
-     */
-    public BWT( long inverseSA0, Counts counts, SequenceBlock[] sequenceBlocks ) {
-        this.inverseSA0 = inverseSA0;
-        this.counts = counts;
-        this.sequenceBlocks = sequenceBlocks;
-    }
-
-    /**
-     * Creates a new BWT with the given inverse SA, occurrences, and sequence (in ASCII).
-     * @param inverseSA0 Inverse SA entry for the first element.  Will be missing from the BWT sequence.
-     * @param counts Count of bases, in A,C,G,T order.
-     * @param sequence The full BWT sequence, sans the '$'.
-     */
-    public BWT( long inverseSA0, Counts counts, byte[] sequence ) {
-        this(inverseSA0,counts,generateSequenceBlocks(sequence));
-    }
-
-    /**
-     * Extract the full sequence from the list of block.
-     * @return The full BWT string as a byte array.
-     */
-    public byte[] getSequence() {
-        byte[] sequence = new byte[(int)counts.getTotal()];
-        for( SequenceBlock block: sequenceBlocks )
-            System.arraycopy(block.sequence,0,sequence,block.sequenceStart,block.sequenceLength);
-        return sequence;
-    }
-
-    /**
-     * Get the total counts of bases lexicographically smaller than the given base, for Ferragina and Manzini's search.
-     * @param base The base.
-     * @return Total counts for all bases lexicographically smaller than this base.
-     */
-    public long counts(byte base) {
-        return counts.getCumulative(base);
-    }
-
-    /**
-     * Get the total counts of bases lexicographically smaller than the given base, for Ferragina and Manzini's search.
-     * @param base The base.
-     * @param index The position to search within the BWT.
-     * @return Total counts for all bases lexicographically smaller than this base.
-     */
-    public long occurrences(byte base,long index) {
-        SequenceBlock block = getSequenceBlock(index);
-        int position = getSequencePosition(index);
-        long accumulator = block.occurrences.get(base);
-        for(int i = 0; i <= position; i++) {
-            if(base == block.sequence[i])
-                accumulator++;
-        }
-        return accumulator;
-    }
-
-    /**
-     * The number of bases in the BWT as a whole.
-     * @return Number of bases.
-     */
-    public long length() {
-        return counts.getTotal();
-    }
-
-    /**
-     * Create a new BWT from the given reference sequence.
-     * @param referenceSequence Sequence from which to derive the BWT.
-     * @return reference sequence-derived BWT.
-     */
-    public static BWT createFromReferenceSequence(byte[] referenceSequence) {
-        SuffixArray suffixArray = SuffixArray.createFromReferenceSequence(referenceSequence);
-
-        byte[] bwt = new byte[(int)suffixArray.length()-1];
-        int bwtIndex = 0;
-        for(long suffixArrayIndex = 0; suffixArrayIndex < suffixArray.length(); suffixArrayIndex++) {
-            if(suffixArray.get(suffixArrayIndex) == 0)
-                continue;
-            bwt[bwtIndex++] = referenceSequence[(int)suffixArray.get(suffixArrayIndex)-1];
-        }
-
-        return new BWT(suffixArray.inverseSA0,suffixArray.occurrences,bwt);
-    }
-
-    /**
-     * Gets the base at a given position in the BWT.
-     * @param index The index to use.
-     * @return The base at that location.
-     */
-    protected byte getBase(long index) {
-        if(index == inverseSA0)
-            throw new ReviewedGATKException(String.format("Base at index %d does not have a text representation",index));
-
-        SequenceBlock block = getSequenceBlock(index);
-        int position = getSequencePosition(index);
-        return block.sequence[position];
-    }
-
-    private SequenceBlock getSequenceBlock(long index) {
-        // If the index is above the SA-1[0], remap it to the appropriate coordinate space.
-        if(index > inverseSA0) index--;
-        return sequenceBlocks[(int)(index/SEQUENCE_BLOCK_SIZE)];
-    }
-
-    private int getSequencePosition(long index) {
-        // If the index is above the SA-1[0], remap it to the appropriate coordinate space.
-        if(index > inverseSA0) index--;
-        return (int)(index%SEQUENCE_BLOCK_SIZE);
-    }
-
-    /**
-     * Create a set of sequence blocks from one long sequence.
-     * @param sequence Sequence from which to derive blocks.
-     * @return Array of sequence blocks containing data from the sequence.
-     */
-    private static SequenceBlock[] generateSequenceBlocks( byte[] sequence ) {
-        Counts occurrences = new Counts();
-
-        int numSequenceBlocks = PackUtils.numberOfPartitions(sequence.length,SEQUENCE_BLOCK_SIZE);
-        SequenceBlock[] sequenceBlocks = new SequenceBlock[numSequenceBlocks];
-
-        for( int block = 0; block < numSequenceBlocks; block++ ) {
-            int blockStart = block*SEQUENCE_BLOCK_SIZE;
-            int blockLength = Math.min(SEQUENCE_BLOCK_SIZE, sequence.length-blockStart);
-            byte[] subsequence = new byte[blockLength];
-
-            System.arraycopy(sequence,blockStart,subsequence,0,blockLength);
-
-            sequenceBlocks[block] = new SequenceBlock(blockStart,blockLength,occurrences.clone(),subsequence);
-
-            for( byte base: subsequence )
-                occurrences.increment(base);
-        }
-
-        return sequenceBlocks;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/BWTReader.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/BWTReader.java
deleted file mode 100644
index 9b28e2a..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/BWTReader.java
+++ /dev/null
@@ -1,114 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment.reference.bwt;
-
-import org.broadinstitute.gatk.engine.alignment.reference.packing.BasePackedInputStream;
-import org.broadinstitute.gatk.engine.alignment.reference.packing.PackUtils;
-import org.broadinstitute.gatk.engine.alignment.reference.packing.UnsignedIntPackedInputStream;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.nio.ByteOrder;
-/**
- * Reads a BWT from a given file.
- *
- * @author mhanna
- * @version 0.1
- */
-public class BWTReader {
-    /**
-     * Input stream from which to read BWT data.
-     */
-    private FileInputStream inputStream;
-
-    /**
-     * Create a new BWT reader.
-     * @param inputFile File in which the BWT is stored.
-     */
-    public BWTReader( File inputFile ) {
-        try {
-            this.inputStream = new FileInputStream(inputFile);
-        }
-        catch( FileNotFoundException ex ) {
-            throw new ReviewedGATKException("Unable to open input file", ex);
-        }
-    }
-
-    /**
-     * Read a BWT from the input stream.
-     * @return The BWT stored in the input stream.
-     */
-    public BWT read() {
-        UnsignedIntPackedInputStream uintPackedInputStream = new UnsignedIntPackedInputStream(inputStream, ByteOrder.LITTLE_ENDIAN);
-        BasePackedInputStream basePackedInputStream = new BasePackedInputStream<Integer>(Integer.class, inputStream, ByteOrder.LITTLE_ENDIAN);
-
-        long inverseSA0;
-        long[] count;
-        SequenceBlock[] sequenceBlocks;
-
-        try {
-            inverseSA0 = uintPackedInputStream.read();
-            count = new long[PackUtils.ALPHABET_SIZE];
-            uintPackedInputStream.read(count);
-
-            long bwtSize = count[PackUtils.ALPHABET_SIZE-1];
-            sequenceBlocks = new SequenceBlock[PackUtils.numberOfPartitions(bwtSize,BWT.SEQUENCE_BLOCK_SIZE)];
-            
-            for( int block = 0; block < sequenceBlocks.length; block++ ) {
-                int sequenceStart = block* BWT.SEQUENCE_BLOCK_SIZE;
-                int sequenceLength = (int)Math.min(BWT.SEQUENCE_BLOCK_SIZE,bwtSize-sequenceStart);
-
-                long[] occurrences = new long[PackUtils.ALPHABET_SIZE];
-                byte[] bwt = new byte[sequenceLength];
-
-                uintPackedInputStream.read(occurrences);
-                basePackedInputStream.read(bwt);
-
-                sequenceBlocks[block] = new SequenceBlock(sequenceStart,sequenceLength,new Counts(occurrences,false),bwt);
-            }
-        }
-        catch( IOException ex ) {
-            throw new ReviewedGATKException("Unable to read BWT from input stream.", ex);
-        }
-
-        return new BWT(inverseSA0, new Counts(count,true), sequenceBlocks);
-    }
-
-    /**
-     * Close the input stream.
-     */
-    public void close() {
-        try {
-            inputStream.close();
-        }
-        catch( IOException ex ) {
-            throw new ReviewedGATKException("Unable to close input file", ex);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/BWTSupplementaryFileGenerator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/BWTSupplementaryFileGenerator.java
deleted file mode 100644
index e60a78b..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/BWTSupplementaryFileGenerator.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment.reference.bwt;
-
-import htsjdk.samtools.reference.ReferenceSequenceFile;
-import htsjdk.samtools.reference.ReferenceSequenceFileFactory;
-import htsjdk.samtools.SAMSequenceDictionary;
-
-import java.io.File;
-import java.io.IOException;
-
-/**
- * Generate BWA supplementary files (.ann, .amb) from the command line.
- *
- * @author mhanna
- * @version 0.1
- */
-public class BWTSupplementaryFileGenerator {
-    enum SupplementaryFileType { ANN, AMB } 
-
-    public static void main(String[] args) throws IOException {
-        if(args.length < 3)
-            usage("Incorrect number of arguments supplied");
-
-        File fastaFile = new File(args[0]);
-        File outputFile = new File(args[1]);
-        SupplementaryFileType outputType = null;
-        try {
-            outputType = Enum.valueOf(SupplementaryFileType.class,args[2]);
-        }
-        catch(IllegalArgumentException ex) {
-            usage("Invalid output type: " + args[2]);
-        }
-
-        ReferenceSequenceFile sequenceFile = ReferenceSequenceFileFactory.getReferenceSequenceFile(fastaFile);
-        SAMSequenceDictionary dictionary = sequenceFile.getSequenceDictionary();
-
-        switch(outputType) {
-            case ANN:
-                ANNWriter annWriter = new ANNWriter(outputFile);
-                annWriter.write(dictionary);
-                annWriter.close();
-                break;
-            case AMB:
-                AMBWriter ambWriter = new AMBWriter(outputFile);
-                ambWriter.writeEmpty(dictionary);
-                ambWriter.close();
-                break;
-            default:
-                usage("Unsupported output type: " + outputType);
-        }
-    }
-
-    /**
-     * Print usage information and exit.
-     */
-    private static void usage(String message) {
-        System.err.println(message);
-        System.err.println("Usage: BWTSupplementaryFileGenerator <fasta> <output file> <output type>");
-        System.exit(1);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/BWTWriter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/BWTWriter.java
deleted file mode 100644
index 4f2a159..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/BWTWriter.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment.reference.bwt;
-
-import org.broadinstitute.gatk.engine.alignment.reference.packing.BasePackedOutputStream;
-import org.broadinstitute.gatk.engine.alignment.reference.packing.UnsignedIntPackedOutputStream;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.io.*;
-import java.nio.ByteOrder;
-
-/**
- * Writes an in-memory BWT to an outputstream.
- *
- * @author mhanna
- * @version 0.1
- */
-public class BWTWriter {
-    /**
-     * Input stream from which to read BWT data.
-     */
-    private final OutputStream outputStream;
-
-    /**
-     * Create a new BWT writer.
-     * @param outputFile File in which the BWT is stored.
-     */
-    public BWTWriter( File outputFile ) {
-        try {
-            this.outputStream = new BufferedOutputStream(new FileOutputStream(outputFile));
-        }
-        catch( FileNotFoundException ex ) {
-            throw new ReviewedGATKException("Unable to open output file", ex);
-        }
-    }
-
-    /**
-     * Write a BWT to the output stream.
-     * @param bwt Transform to be written to the output stream.
-     */
-    public void write( BWT bwt ) {
-        UnsignedIntPackedOutputStream intPackedOutputStream = new UnsignedIntPackedOutputStream(outputStream, ByteOrder.LITTLE_ENDIAN);
-        BasePackedOutputStream basePackedOutputStream = new BasePackedOutputStream<Integer>(Integer.class, outputStream, ByteOrder.LITTLE_ENDIAN);
-
-        try {
-            intPackedOutputStream.write(bwt.inverseSA0);
-            intPackedOutputStream.write(bwt.counts.toArray(true));
-
-            for( SequenceBlock block: bwt.sequenceBlocks ) {
-                intPackedOutputStream.write(block.occurrences.toArray(false));
-                basePackedOutputStream.write(block.sequence);
-            }
-
-            // The last block is the last set of counts in the structure.
-            intPackedOutputStream.write(bwt.counts.toArray(false));
-        }
-        catch( IOException ex ) {
-            throw new ReviewedGATKException("Unable to read BWT from input stream.", ex);
-        }
-    }
-
-    /**
-     * Close the input stream.
-     */
-    public void close() {
-        try {
-            outputStream.close();
-        }
-        catch( IOException ex ) {
-            throw new ReviewedGATKException("Unable to close input file", ex);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/Bases.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/Bases.java
deleted file mode 100644
index d0cd849..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/Bases.java
+++ /dev/null
@@ -1,133 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment.reference.bwt;
-
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.util.*;
-
-/**
- * Enhanced enum representation of a base.
- *
- * @author mhanna
- * @version 0.1
- */
-public class Bases implements Iterable<Byte>
-{
-    public static final byte A = 'A';
-    public static final byte C = 'C';
-    public static final byte G = 'G';
-    public static final byte T = 'T';
-
-    public static final Bases instance = new Bases();
-
-    private static final List<Byte> allBases;
-
-    /**
-     * Representation of the base broken down by packed value.
-     */
-    private static final Map<Integer,Byte> basesByPack = new HashMap<Integer,Byte>();
-
-    static {
-        List<Byte> bases = new ArrayList<Byte>();
-        bases.add(A);
-        bases.add(C);
-        bases.add(G);
-        bases.add(T);
-        allBases = Collections.unmodifiableList(bases);
-
-        for(int i = 0; i < allBases.size(); i++)
-            basesByPack.put(i,allBases.get(i));
-    }
-
-    /**
-     * Create a new base with the given ascii representation and
-     * pack value.
-     */
-    private Bases() {
-    }
-
-    /**
-     * Return all possible bases.
-     * @return Byte representation of all bases.
-     */
-    public static Collection<Byte> allOf() {
-        return allBases;
-    }
-
-    /**
-     * Gets the number of known bases.
-     * @return The number of known bases.
-     */
-    public static int size() {
-        return allBases.size();
-    }
-
-    /**
-     * Gets an iterator over the total number of known base types.
-     * @return Iterator over all known bases.
-     */
-    public Iterator<Byte> iterator() {
-        return basesByPack.values().iterator();
-    }
-
-    /**
-     * Get the given base from the packed representation.
-     * @param pack Packed representation.
-     * @return base.
-     */
-    public static byte fromPack( int pack ) { return basesByPack.get(pack); }
-
-    /**
-     * Convert the given base to its packed value.
-     * @param ascii ASCII representation of the base.
-     * @return Packed value.
-     */
-    public static int toPack( byte ascii )
-    {
-        for( Map.Entry<Integer,Byte> entry: basesByPack.entrySet() ) {
-            if( entry.getValue().equals(ascii) )
-                return entry.getKey();
-        }
-        throw new ReviewedGATKException(String.format("Base %c is an invalid base to pack", (char)ascii));
-    }
-
-    /**
-     * Convert the ASCII representation of a base to its 'normalized' representation.
-     * @param base The base itself.
-     * @return The byte, if present.  Null if unknown.
-     */
-    public static Byte fromASCII( byte base ) {
-        Byte found = null;
-        for( Byte normalized: allBases ) {
-            if( normalized.equals(base) ) {
-                found = normalized;
-                break;
-            }
-        }
-        return found;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/Counts.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/Counts.java
deleted file mode 100644
index c6684b5..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/Counts.java
+++ /dev/null
@@ -1,176 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment.reference.bwt;
-
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Counts of how many bases of each type have been seen.
- *
- * @author mhanna
- * @version 0.1
- */
-public class Counts implements Cloneable {
-    /**
-     * Internal representation of counts, broken down by ASCII value.
-     */
-    private Map<Byte,Long> counts = new HashMap<Byte,Long>();
-
-    /**
-     * Internal representation of cumulative counts, broken down by ASCII value.
-     */
-    private Map<Byte,Long> cumulativeCounts = new HashMap<Byte,Long>();
-
-    /**
-     * Create an empty Counts object with values A=0,C=0,G=0,T=0.
-     */
-    public Counts()
-    {
-        for(byte base: Bases.instance) {
-            counts.put(base,0L);
-            cumulativeCounts.put(base,0L);
-        }
-    }
-
-    /**
-     * Create a counts data structure with the given initial values. 
-     * @param data Count data, broken down by base.
-     * @param cumulative Whether the counts are cumulative, (count_G=numA+numC+numG,for example).
-     */
-    public Counts( long[] data, boolean cumulative ) {
-        if(cumulative) {
-            long priorCount = 0;
-            for(byte base: Bases.instance) {
-                long count = data[Bases.toPack(base)];
-                counts.put(base,count-priorCount);
-                cumulativeCounts.put(base,priorCount);
-                priorCount = count;
-            }
-        }
-        else {
-            long priorCount = 0;
-            for(byte base: Bases.instance) {
-                long count = data[Bases.toPack(base)];
-                counts.put(base,count);
-                cumulativeCounts.put(base,priorCount);
-                priorCount += count;
-            }
-        }
-    }
-
-    /**
-     * Convert to an array for persistence.
-     * @param cumulative Use a cumulative representation.
-     * @return Array of count values.
-     */
-    public long[] toArray(boolean cumulative) {
-        long[] countArray = new long[counts.size()];
-        if(cumulative) {
-            int index = 0;
-            boolean first = true;
-            for(byte base: Bases.instance) {
-                if(first) {
-                    first = false;
-                    continue;
-                }
-                countArray[index++] = getCumulative(base);
-            }
-            countArray[countArray.length-1] = getTotal();
-        }
-        else {
-            int index = 0;
-            for(byte base: Bases.instance)
-                countArray[index++] = counts.get(base);
-        }
-        return countArray;
-    }
-
-    /**
-     * Create a unique copy of the current object.
-     * @return A duplicate of this object.
-     */
-    public Counts clone() {
-        Counts other;
-        try {
-            other = (Counts)super.clone();
-        }
-        catch(CloneNotSupportedException ex) {
-            throw new ReviewedGATKException("Unable to clone counts object", ex);
-        }
-        other.counts = new HashMap<Byte,Long>(counts);
-        other.cumulativeCounts = new HashMap<Byte,Long>(cumulativeCounts);
-        return other;
-    }
-
-    /**
-     * Increment the number of bases seen at the given location.
-     * @param base Base to increment.
-     */
-    public void increment(byte base) {
-        counts.put(base,counts.get(base)+1);
-        boolean increment = false;
-        for(byte cumulative: Bases.instance) {
-            if(increment) cumulativeCounts.put(cumulative,cumulativeCounts.get(cumulative)+1);
-            increment |= (cumulative == base);
-        }
-    }
-
-    /**
-     * Gets a count of the number of bases seen at a given location.
-     * Note that counts in this case are not cumulative (counts for A,C,G,T
-     * are independent).
-     * @param base Base for which to query counts.
-     * @return Number of bases of this type seen.
-     */
-    public long get(byte base) {
-        return counts.get(base);
-    }
-
-    /**
-     * Gets a count of the number of bases seen before this base.
-     * Note that counts in this case are cumulative.
-     * @param base Base for which to query counts.
-     * @return Number of bases of this type seen.
-     */
-    public long getCumulative(byte base) {
-        return cumulativeCounts.get(base);
-    }
-
-    /**
-     * How many total bases are represented by this count structure?
-     * @return Total bases represented.
-     */
-    public long getTotal() {
-        int accumulator = 0;
-        for(byte base: Bases.instance) {
-            accumulator += get(base);    
-        }
-        return accumulator;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/CreateBWTFromReference.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/CreateBWTFromReference.java
deleted file mode 100644
index baa5ebe..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/CreateBWTFromReference.java
+++ /dev/null
@@ -1,200 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment.reference.bwt;
-
-import htsjdk.samtools.reference.ReferenceSequence;
-import htsjdk.samtools.reference.ReferenceSequenceFile;
-import htsjdk.samtools.reference.ReferenceSequenceFileFactory;
-import org.broadinstitute.gatk.engine.alignment.reference.packing.PackUtils;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.io.File;
-import java.io.IOException;
-
-/**
- * Create a suffix array data structure.
- *
- * @author mhanna
- * @version 0.1
- */
-public class CreateBWTFromReference {
-    private byte[] loadReference( File inputFile ) {
-        // Read in the first sequence in the input file
-        ReferenceSequenceFile reference = ReferenceSequenceFileFactory.getReferenceSequenceFile(inputFile);
-        ReferenceSequence sequence = reference.nextSequence();
-        return sequence.getBases();
-    }
-
-    private byte[] loadReverseReference( File inputFile ) {
-        ReferenceSequenceFile reference = ReferenceSequenceFileFactory.getReferenceSequenceFile(inputFile);
-        ReferenceSequence sequence = reference.nextSequence();
-        PackUtils.reverse(sequence.getBases());
-        return sequence.getBases();
-    }
-
-    private Counts countOccurrences( byte[] sequence ) {
-        Counts occurrences = new Counts();
-        for( byte base: sequence )
-            occurrences.increment(base);
-        return occurrences;
-    }
-
-    private long[] createSuffixArray( byte[] sequence ) {
-        return SuffixArray.createFromReferenceSequence(sequence).sequence;
-    }
-
-    private long[] invertSuffixArray( long[] suffixArray ) {
-        long[] inverseSuffixArray = new long[suffixArray.length];
-        for( int i = 0; i < suffixArray.length; i++ )
-            inverseSuffixArray[(int)suffixArray[i]] = i;
-        return inverseSuffixArray;
-    }
-
-    private long[] createCompressedSuffixArray( int[] suffixArray, int[] inverseSuffixArray ) {
-        long[] compressedSuffixArray = new long[suffixArray.length];
-        compressedSuffixArray[0] = inverseSuffixArray[0];
-        for( int i = 1; i < suffixArray.length; i++ )
-            compressedSuffixArray[i] = inverseSuffixArray[suffixArray[i]+1];
-        return compressedSuffixArray;
-    }
-
-    private long[] createInversedCompressedSuffixArray( int[] compressedSuffixArray ) {
-        long[] inverseCompressedSuffixArray = new long[compressedSuffixArray.length];
-        for( int i = 0; i < compressedSuffixArray.length; i++ )
-            inverseCompressedSuffixArray[compressedSuffixArray[i]] = i;
-        return inverseCompressedSuffixArray;
-    }
-
-    public static void main( String argv[] ) throws IOException {
-        if( argv.length != 5 ) {
-            System.out.println("USAGE: CreateBWTFromReference <input>.fasta <output bwt> <output rbwt> <output sa> <output rsa>");
-            return;
-        }
-
-        String inputFileName = argv[0];
-        File inputFile = new File(inputFileName);
-
-        String bwtFileName = argv[1];
-        File bwtFile = new File(bwtFileName);
-
-        String rbwtFileName = argv[2];
-        File rbwtFile = new File(rbwtFileName);
-
-        String saFileName = argv[3];
-        File saFile = new File(saFileName);
-
-        String rsaFileName = argv[4];
-        File rsaFile = new File(rsaFileName);
-
-        CreateBWTFromReference creator = new CreateBWTFromReference();
-
-        byte[] sequence = creator.loadReference(inputFile);
-        byte[] reverseSequence = creator.loadReverseReference(inputFile);
-
-        // Count the occurences of each given base.
-        Counts occurrences = creator.countOccurrences(sequence);
-        System.out.printf("Occurrences: a=%d, c=%d, g=%d, t=%d%n",occurrences.getCumulative(Bases.A),
-                                                                  occurrences.getCumulative(Bases.C),
-                                                                  occurrences.getCumulative(Bases.G),
-                                                                  occurrences.getCumulative(Bases.T));
-
-        // Generate the suffix array and print diagnostics.
-        long[] suffixArrayData = creator.createSuffixArray(sequence);
-        long[] reverseSuffixArrayData = creator.createSuffixArray(reverseSequence);
-
-        // Invert the suffix array and print diagnostics.
-        long[] inverseSuffixArray = creator.invertSuffixArray(suffixArrayData);
-        long[] reverseInverseSuffixArray = creator.invertSuffixArray(reverseSuffixArrayData);
-
-        SuffixArray suffixArray = new SuffixArray( inverseSuffixArray[0], occurrences, suffixArrayData );
-        SuffixArray reverseSuffixArray = new SuffixArray( reverseInverseSuffixArray[0], occurrences, reverseSuffixArrayData );
-
-        /*
-        // Create the data structure for the compressed suffix array and print diagnostics.
-        int[] compressedSuffixArray = creator.createCompressedSuffixArray(suffixArray.sequence,inverseSuffixArray);
-        int reconstructedInverseSA = compressedSuffixArray[0];
-        for( int i = 0; i < 8; i++ ) {
-            System.out.printf("compressedSuffixArray[%d] = %d (SA-1[%d] = %d)%n", i, compressedSuffixArray[i], i, reconstructedInverseSA);
-            reconstructedInverseSA = compressedSuffixArray[reconstructedInverseSA];
-        }
-
-        // Create the data structure for the inverse compressed suffix array and print diagnostics.
-        int[] inverseCompressedSuffixArray = creator.createInversedCompressedSuffixArray(compressedSuffixArray);
-        for( int i = 0; i < 8; i++ ) {
-            System.out.printf("inverseCompressedSuffixArray[%d] = %d%n", i, inverseCompressedSuffixArray[i]);
-        }
-        */
-
-        // Create the BWT.
-        BWT bwt = BWT.createFromReferenceSequence(sequence);
-        BWT reverseBWT = BWT.createFromReferenceSequence(reverseSequence);
-
-        byte[] bwtSequence = bwt.getSequence();
-        System.out.printf("BWT: %s... (length = %d)%n", new String(bwtSequence,0,80),bwt.length());
-
-        BWTWriter bwtWriter = new BWTWriter(bwtFile);
-        bwtWriter.write(bwt);
-        bwtWriter.close();
-
-        BWTWriter reverseBWTWriter = new BWTWriter(rbwtFile);
-        reverseBWTWriter.write(reverseBWT);
-        reverseBWTWriter.close();
-
-        /*
-        SuffixArrayWriter saWriter = new SuffixArrayWriter(saFile);
-        saWriter.write(suffixArray);
-        saWriter.close();
-
-        SuffixArrayWriter reverseSAWriter = new SuffixArrayWriter(rsaFile);
-        reverseSAWriter.write(reverseSuffixArray);
-        reverseSAWriter.close();
-        */
-
-        File existingBWTFile = new File(inputFileName+".bwt");
-        BWTReader existingBWTReader = new BWTReader(existingBWTFile);
-        BWT existingBWT = existingBWTReader.read();
-
-        byte[] existingBWTSequence = existingBWT.getSequence();
-        System.out.printf("Existing BWT: %s... (length = %d)%n",new String(existingBWTSequence,0,80),existingBWT.length());
-
-        for( int i = 0; i < bwt.length(); i++ ) {
-            if( bwtSequence[i] != existingBWTSequence[i] )
-                throw new ReviewedGATKException("BWT mismatch at " + i);
-        }
-
-        File existingSAFile = new File(inputFileName+".sa");
-        SuffixArrayReader existingSuffixArrayReader = new SuffixArrayReader(existingSAFile,existingBWT);
-        SuffixArray existingSuffixArray = existingSuffixArrayReader.read();
-
-        for(int i = 0; i < suffixArray.length(); i++) {
-            if( i % 10000 == 0 )
-                System.out.printf("Validating suffix array entry %d%n", i);
-            if( suffixArray.get(i) != existingSuffixArray.get(i) )
-                throw new ReviewedGATKException(String.format("Suffix array mismatch at %d; SA is %d; should be %d",i,existingSuffixArray.get(i),suffixArray.get(i)));
-        }
-    }
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/SequenceBlock.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/SequenceBlock.java
deleted file mode 100644
index 555e7cc..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/SequenceBlock.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment.reference.bwt;
-
-/**
- * Models a block of bases within the BWT.
- */
-public class SequenceBlock {
-    /**
-     * Start position of this sequence within the BWT.
-     */
-    public final int sequenceStart;
-
-    /**
-     * Length of this sequence within the BWT.
-     */
-    public final int sequenceLength;
-
-
-    /**
-     * Occurrences of each letter up to this sequence block.
-     */
-    public final Counts occurrences;
-
-    /**
-     * Sequence for this segment.
-     */
-    public final byte[] sequence;
-
-    /**
-     * Create a new block within this BWT.
-     * @param sequenceStart Starting position of this sequence within the BWT.
-     * @param sequenceLength Length of this sequence.
-     * @param occurrences How many of each base has been seen before this sequence began.
-     * @param sequence The actual sequence from the BWT.
-     */
-    public SequenceBlock( int sequenceStart, int sequenceLength, Counts occurrences, byte[] sequence ) {
-        this.sequenceStart = sequenceStart;
-        this.sequenceLength = sequenceLength;
-        this.occurrences = occurrences;
-        this.sequence = sequence;
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/SuffixArray.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/SuffixArray.java
deleted file mode 100644
index d1edfe5..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/SuffixArray.java
+++ /dev/null
@@ -1,183 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment.reference.bwt;
-
-import htsjdk.samtools.util.StringUtil;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.util.Comparator;
-import java.util.TreeSet;
-
-/**
- * An in-memory representation of a suffix array.
- *
- * @author mhanna
- * @version 0.1
- */
-public class SuffixArray {
-    public final long inverseSA0;
-    public final Counts occurrences;
-
-    /**
-     * The elements of the sequence actually stored in memory.
-     */
-    protected final long[] sequence;
-
-    /**
-     * How often are individual elements in the sequence actually stored
-     * in memory, as opposed to being calculated on the fly?
-     */
-    protected final int sequenceInterval;
-
-    /**
-     * The BWT used to calculate missing portions of the sequence.
-     */
-    protected final BWT bwt;
-
-    public SuffixArray(long inverseSA0, Counts occurrences, long[] sequence) {
-        this(inverseSA0,occurrences,sequence,1,null);
-    }
-
-    /**
-     * Creates a new sequence array with the given inverse SA, occurrences, and values.
-     * @param inverseSA0 Inverse SA entry for the first element.
-     * @param occurrences Cumulative number of occurrences of A,C,G,T, in order.
-     * @param sequence The full suffix array.
-     * @param sequenceInterval How frequently is the sequence interval stored.
-     * @param bwt bwt used to infer the remaining entries in the BWT.
-     */
-    public SuffixArray(long inverseSA0, Counts occurrences, long[] sequence, int sequenceInterval, BWT bwt) {
-        this.inverseSA0 = inverseSA0;
-        this.occurrences = occurrences;
-        this.sequence = sequence;
-        this.sequenceInterval = sequenceInterval;
-        this.bwt = bwt;
-
-        if(sequenceInterval != 1 && bwt == null)
-            throw new ReviewedGATKException("A BWT must be provided if the sequence interval is not 1");
-    }
-
-    /**
-     * Retrieves the length of the sequence array.
-     * @return Length of the suffix array.
-     */
-    public long length() {
-        if( bwt != null )
-            return bwt.length()+1;
-        else
-            return sequence.length;
-    }
-
-    /**
-     * Get the suffix array value at a given sequence.
-     * @param index Index at which to retrieve the suffix array vaule.
-     * @return The suffix array value at that entry.
-     */
-    public long get(long index) {
-        int iterations = 0;
-        while(index%sequenceInterval != 0) {
-            // The inverseSA0 ('$') doesn't have a usable ASCII representation; it must be treated as a special case.
-            if(index == inverseSA0)
-                index = 0;
-            else {
-                byte base = bwt.getBase(index);
-                index = bwt.counts(base) + bwt.occurrences(base,index);
-            }
-            iterations++;
-        }
-        return (sequence[(int)(index/sequenceInterval)]+iterations) % length();
-    }
-
-    /**
-     * Create a suffix array from a given reference sequence.
-     * @param sequence The reference sequence to use when building the suffix array.
-     * @return a constructed suffix array.
-     */
-    public static SuffixArray createFromReferenceSequence(byte[] sequence) {
-        // The builder for the suffix array.  Use an integer in this case because
-        // Java arrays can only hold an integer.
-        TreeSet<Integer> suffixArrayBuilder = new TreeSet<Integer>(new SuffixArrayComparator(sequence));
-
-        Counts occurrences = new Counts();
-        for( byte base: sequence )
-            occurrences.increment(base);
-
-        // Build out the suffix array using a custom comparator.
-        for( int i = 0; i <= sequence.length; i++ )
-            suffixArrayBuilder.add(i);
-
-        // Copy the suffix array into an array.
-        long[] suffixArray = new long[suffixArrayBuilder.size()];
-        int i = 0;
-        for( Integer element: suffixArrayBuilder )
-            suffixArray[i++] = element;
-
-        // Find the first element in the inverse suffix array.
-        long inverseSA0 = -1;
-        for(i = 0; i < suffixArray.length; i++) {
-            if(suffixArray[i] == 0)
-                inverseSA0 = i;
-        }
-        if(inverseSA0 < 0)
-            throw new ReviewedGATKException("Unable to find first inverse SA entry in generated suffix array.");
-
-        return new SuffixArray(inverseSA0,occurrences,suffixArray);
-    }    
-
-    /**
-     * Compares two suffix arrays of the given sequence.  Will return whichever string appears
-     * first in lexicographic order.
-     */
-    private static class SuffixArrayComparator implements Comparator<Integer> {
-        /**
-         * The data source for all suffix arrays.
-         */
-        private final String sequence;
-
-        /**
-         * Create a new comparator.
-         * @param sequence Reference sequence to use as basis for comparison.
-         */
-        public SuffixArrayComparator( byte[] sequence ) {
-            // Processing the suffix array tends to be easier as a string.
-            this.sequence = StringUtil.bytesToString(sequence);
-        }
-
-        /**
-         * Compare the two given suffix arrays.  Criteria for comparison is the lexicographic order of
-         * the two substrings sequence[lhs:], sequence[rhs:].
-         * @param lhs Left-hand side of comparison.
-         * @param rhs Right-hand side of comparison.
-         * @return How the suffix arrays represented by lhs, rhs compare.
-         */
-        public int compare( Integer lhs, Integer rhs ) {
-            String lhsSuffixArray = sequence.substring(lhs);
-            String rhsSuffixArray = sequence.substring(rhs);
-            return lhsSuffixArray.compareTo(rhsSuffixArray);
-        }
-    }
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/SuffixArrayReader.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/SuffixArrayReader.java
deleted file mode 100644
index dc8cdc0..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/SuffixArrayReader.java
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment.reference.bwt;
-
-import org.broadinstitute.gatk.engine.alignment.reference.packing.PackUtils;
-import org.broadinstitute.gatk.engine.alignment.reference.packing.UnsignedIntPackedInputStream;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.nio.ByteOrder;
-
-/**
- * A reader for suffix arrays in permanent storage.
- *
- * @author mhanna
- * @version 0.1
- */
-public class SuffixArrayReader {
-    /**
-     * Input stream from which to read suffix array data.
-     */
-    private FileInputStream inputStream;
-
-    /**
-     * BWT to use to fill in missing data.
-     */
-    private BWT bwt;
-
-    /**
-     * Create a new suffix array reader.
-     * @param inputFile File in which the suffix array is stored.
-     * @param bwt BWT to use when filling in missing data.
-     */
-    public SuffixArrayReader(File inputFile, BWT bwt) {
-        try {
-            this.inputStream = new FileInputStream(inputFile);
-            this.bwt = bwt;
-        }
-        catch( FileNotFoundException ex ) {
-            throw new ReviewedGATKException("Unable to open input file", ex);
-        }
-    }
-
-    /**
-     * Read a suffix array from the input stream.
-     * @return The suffix array stored in the input stream.
-     */
-    public SuffixArray read() {
-        UnsignedIntPackedInputStream uintPackedInputStream = new UnsignedIntPackedInputStream(inputStream, ByteOrder.LITTLE_ENDIAN);
-
-        long inverseSA0;
-        long[] occurrences;
-        long[] suffixArray;
-        int suffixArrayInterval;
-
-        try {
-            inverseSA0 = uintPackedInputStream.read();
-            occurrences = new long[PackUtils.ALPHABET_SIZE];
-            uintPackedInputStream.read(occurrences);
-            // Throw away the suffix array size in bytes and use the occurrences table directly.
-            suffixArrayInterval = (int)uintPackedInputStream.read();
-            suffixArray = new long[(int)((occurrences[occurrences.length-1]+suffixArrayInterval-1)/suffixArrayInterval)];
-            uintPackedInputStream.read(suffixArray);
-        }
-        catch( IOException ex ) {
-            throw new ReviewedGATKException("Unable to read BWT from input stream.", ex);
-        }
-
-        return new SuffixArray(inverseSA0, new Counts(occurrences,true), suffixArray, suffixArrayInterval, bwt);
-    }
-
-
-    /**
-     * Close the input stream.
-     */
-    public void close() {
-        try {
-            inputStream.close();
-        }
-        catch( IOException ex ) {
-            throw new ReviewedGATKException("Unable to close input file", ex);
-        }
-    }    
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/SuffixArrayWriter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/SuffixArrayWriter.java
deleted file mode 100644
index df152b9..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/bwt/SuffixArrayWriter.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment.reference.bwt;
-
-import org.broadinstitute.gatk.engine.alignment.reference.packing.UnsignedIntPackedOutputStream;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.io.*;
-import java.nio.ByteOrder;
-
-/**
- * Javadoc goes here.
- *
- * @author mhanna
- * @version 0.1
- */
-public class SuffixArrayWriter {
-    /**
-     * Input stream from which to read suffix array data.
-     */
-    private OutputStream outputStream;
-
-    /**
-     * Create a new suffix array reader.
-     * @param outputFile File in which the suffix array is stored.
-     */
-    public SuffixArrayWriter( File outputFile ) {
-        try {
-            this.outputStream = new BufferedOutputStream(new FileOutputStream(outputFile));
-        }
-        catch( FileNotFoundException ex ) {
-            throw new ReviewedGATKException("Unable to open input file", ex);
-        }
-    }
-
-    /**
-     * Write a suffix array to the output stream.
-     * @param suffixArray suffix array to write.
-     */
-    public void write(SuffixArray suffixArray) {
-        UnsignedIntPackedOutputStream uintPackedOutputStream = new UnsignedIntPackedOutputStream(outputStream, ByteOrder.LITTLE_ENDIAN);
-
-        try {
-            uintPackedOutputStream.write(suffixArray.inverseSA0);
-            uintPackedOutputStream.write(suffixArray.occurrences.toArray(true));
-            // How frequently the suffix array entry is placed.
-            uintPackedOutputStream.write(1);
-            // Length of the suffix array.
-            uintPackedOutputStream.write(suffixArray.length()-1);
-            uintPackedOutputStream.write(suffixArray.sequence,1,suffixArray.sequence.length-1);
-        }
-        catch( IOException ex ) {
-            throw new ReviewedGATKException("Unable to read BWT from input stream.", ex);
-        }
-    }
-
-
-    /**
-     * Close the input stream.
-     */
-    public void close() {
-        try {
-            outputStream.close();
-        }
-        catch( IOException ex ) {
-            throw new ReviewedGATKException("Unable to close input file", ex);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/BasePackedInputStream.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/BasePackedInputStream.java
deleted file mode 100644
index 727a378..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/BasePackedInputStream.java
+++ /dev/null
@@ -1,120 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment.reference.packing;
-
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
-import java.nio.channels.FileChannel;
-
-/**
- * Reads a packed version of the input stream.
- *
- * @author mhanna
- * @version 0.1
- */
-public class BasePackedInputStream<T> {
-    /**
-     * Type of object to unpack.
-     */
-    private final Class<T> type;
-
-    /**
-     * Ultimate source for packed bases.
-     */
-    private final FileInputStream targetInputStream;
-
-    /**
-     * Channel source for packed bases.
-     */
-    private final FileChannel targetInputChannel;
-
-    /**
-     * A fixed-size buffer for word-packed data.
-     */
-    private final ByteOrder byteOrder;
-
-    /**
-     * How many bases are in a given packed word.
-     */
-    private final int basesPerPackedWord = PackUtils.bitsInType(Integer.class)/PackUtils.BITS_PER_BASE;
-
-    /**
-     * How many bytes in an integer?
-     */
-    private final int bytesPerInteger = PackUtils.bitsInType(Integer.class)/PackUtils.BITS_PER_BYTE;
-
-
-    public BasePackedInputStream( Class<T> type, File inputFile, ByteOrder byteOrder ) throws FileNotFoundException {
-        this(type,new FileInputStream(inputFile),byteOrder);
-    }
-
-    public BasePackedInputStream( Class<T> type, FileInputStream inputStream, ByteOrder byteOrder ) {
-        if( type != Integer.class )
-            throw new ReviewedGATKException("Only bases packed into 32-bit words are currently supported by this input stream.  Type specified: " + type.getName());
-        this.type = type;
-        this.targetInputStream = inputStream;
-        this.targetInputChannel = inputStream.getChannel();
-        this.byteOrder = byteOrder;
-    }
-
-    /**
-     * Read the entire contents of the input stream.
-     * @param bwt array into which bases should be read.
-     * @throws IOException if an I/O error occurs.
-     */
-    public void read(byte[] bwt) throws IOException {
-        read(bwt,0,bwt.length);
-    }
-
-    /**
-     * Read the next <code>length</code> bases into the bwt array, starting at the given offset.
-     * @param bwt array holding the given data.
-     * @param offset target position in the bases array into which bytes should be written.
-     * @param length number of bases to read from the stream.
-     * @throws IOException if an I/O error occurs.
-     */
-    public void read(byte[] bwt, int offset, int length) throws IOException {
-        int bufferWidth = ((bwt.length+basesPerPackedWord-1)/basesPerPackedWord)*bytesPerInteger;
-        ByteBuffer buffer = ByteBuffer.allocate(bufferWidth).order(byteOrder);
-        targetInputChannel.read(buffer);
-        targetInputChannel.position(targetInputChannel.position()+buffer.remaining());
-        buffer.flip();
-
-        int packedWord = 0;
-        int i = 0;
-        while(i < length) {
-            if(i % basesPerPackedWord == 0) packedWord = buffer.getInt();
-            int position = basesPerPackedWord - i%basesPerPackedWord - 1;
-            bwt[offset+i++] = PackUtils.unpackBase((byte)((packedWord >> position*PackUtils.BITS_PER_BASE) & 0x3));            
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/BasePackedOutputStream.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/BasePackedOutputStream.java
deleted file mode 100644
index b3dbba8..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/BasePackedOutputStream.java
+++ /dev/null
@@ -1,165 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment.reference.packing;
-
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.io.*;
-import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
-
-/**
- * A general-purpose stream for writing packed bases.
- *
- * @author mhanna
- * @version 0.1
- */
-public class BasePackedOutputStream<T> {
-    /**
-     * Type of object to pack.
-     */
-    private final Class<T> type;
-
-    /**
-     * How many bases can be stored in the given data structure?
-     */
-    private final int basesPerType;
-
-    /**
-     * Ultimate target for the packed bases.
-     */
-    private final OutputStream targetOutputStream;
-
-    /**
-     * A fixed-size buffer for word-packed data.
-     */
-    private final ByteBuffer buffer;
-
-    public BasePackedOutputStream( Class<T> type, File outputFile, ByteOrder byteOrder ) throws FileNotFoundException {
-        this(type,new BufferedOutputStream(new FileOutputStream(outputFile)),byteOrder);
-    }
-
-    /**
-     * Write packed bases to the given output stream.
-     * @param type Type of data to pack bases into.
-     * @param outputStream Output stream to which to write packed bases.
-     * @param byteOrder Switch between big endian / little endian when reading / writing files.
-     */
-    public BasePackedOutputStream( Class<T> type, OutputStream outputStream, ByteOrder byteOrder) {
-        this.targetOutputStream = outputStream;
-        this.type = type;
-        basesPerType = PackUtils.bitsInType(type)/PackUtils.BITS_PER_BASE;
-        this.buffer = ByteBuffer.allocate(basesPerType/PackUtils.ALPHABET_SIZE).order(byteOrder);
-    }
-
-    /**
-     * Writes the given base to the output stream.  Will write only this base; no packing will be performed.
-     * @param base List of bases to write.
-     * @throws IOException if an I/O error occurs.
-     */
-    public void write( int base ) throws IOException {
-        write( new byte[] { (byte)base } );
-    }
-
-    /**
-     * Writes an array of bases to the target output stream.
-     * @param bases List of bases to write.
-     * @throws IOException if an I/O error occurs.
-     */
-    public void write( byte[] bases ) throws IOException {
-        write(bases,0,bases.length);
-    }
-
-    /**
-     * Writes a subset of the array of bases to the output stream.
-     * @param bases List of bases to write.
-     * @param offset site at which to start writing.
-     * @param length number of bases to write.
-     * @throws IOException if an I/O error occurs.
-     */
-    public void write( byte[] bases, int offset, int length ) throws IOException {
-        int packedBases = 0;
-        int positionInPack = 0;
-
-        for( int base = offset; base < offset+length; base++ ) {
-            packedBases = packBase(bases[base], packedBases, positionInPack);
-
-            // Increment the packed counter.  If all possible bases have been squeezed into this byte, write it out.
-            positionInPack = ++positionInPack % basesPerType;
-            if( positionInPack == 0 ) {
-                writePackedBases(packedBases);
-                packedBases = 0;
-            }
-        }
-
-        if( positionInPack > 0 )
-            writePackedBases(packedBases);
-    }
-
-    /**
-     * Flush the contents of the OutputStream to disk.
-     * @throws IOException if an I/O error occurs.
-     */
-    public void flush() throws IOException {
-        targetOutputStream.flush();
-    }
-
-    /**
-     * Closes the given output stream.
-     * @throws IOException if an I/O error occurs.
-     */
-    public void close() throws IOException {
-        targetOutputStream.close();
-    }
-
-    /**
-     * Pack the given base into the basepack.
-     * @param base The base to pack.
-     * @param basePack Target for the pack operation.
-     * @param position Position within the pack to which to add the base.
-     * @return The packed integer.
-     */
-    private int packBase( byte base, int basePack, int position ) {
-        basePack |= (PackUtils.packBase(base) << 2*(basesPerType-position-1));
-        return basePack;
-    }    
-
-    /**
-     * Write the given packed base structure to the output file.
-     * @param packedBases Packed bases to write.
-     * @throws IOException on error writing to the file.
-     */
-    private void writePackedBases(int packedBases) throws IOException {
-        buffer.rewind();
-        if( type == Integer.class )
-            buffer.putInt(packedBases);
-        else if( type == Byte.class )
-            buffer.put((byte)packedBases);
-        else
-            throw new ReviewedGATKException("Cannot pack bases into type " + type.getName());
-        targetOutputStream.write(buffer.array());        
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/CreatePACFromReference.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/CreatePACFromReference.java
deleted file mode 100644
index fac3d92..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/CreatePACFromReference.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment.reference.packing;
-
-import htsjdk.samtools.reference.ReferenceSequence;
-import htsjdk.samtools.reference.ReferenceSequenceFile;
-import htsjdk.samtools.reference.ReferenceSequenceFileFactory;
-
-import java.io.File;
-import java.io.IOException;
-
-/**
- * Generate a .PAC file from a given reference.
- *
- * @author hanna
- * @version 0.1
- */
-
-public class CreatePACFromReference {
-    public static void main( String argv[] ) throws IOException {
-        if( argv.length != 3 ) {
-            System.out.println("USAGE: CreatePACFromReference <input>.fasta <output pac> <output rpac>");
-            return;
-        }
-
-        // Read in the first sequence in the input file
-        String inputFileName = argv[0];
-        File inputFile = new File(inputFileName);
-        ReferenceSequenceFile reference = ReferenceSequenceFileFactory.getReferenceSequenceFile(inputFile);
-        ReferenceSequence sequence = reference.nextSequence();
-
-        // Target file for output
-        PackUtils.writeReferenceSequence( new File(argv[1]), sequence.getBases() );
-
-        // Reverse the bases in the reference
-        PackUtils.reverse(sequence.getBases());
-
-        // Target file for output
-        PackUtils.writeReferenceSequence( new File(argv[2]), sequence.getBases() );
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/PackUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/PackUtils.java
deleted file mode 100644
index e60780a..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/PackUtils.java
+++ /dev/null
@@ -1,160 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment.reference.packing;
-
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.nio.ByteOrder;
-
-/**
- * Utilities designed for packing / unpacking bases.
- *
- * @author mhanna
- * @version 0.1
- */
-public class PackUtils {
-    /**
-     * How many possible bases can be encoded?
-     */
-    public static final int ALPHABET_SIZE = 4;
-
-    /**
-     * How many bits does it take to store a single base?
-     */
-    public static final int BITS_PER_BASE = (int)(Math.log(ALPHABET_SIZE)/Math.log(2));
-
-    /**
-     * How many bits fit into a single byte?
-     */
-    public static final int BITS_PER_BYTE = 8;
-
-    /**
-     * Writes a reference sequence to a PAC file.
-     * @param outputFile Filename for the PAC file.
-     * @param referenceSequence Reference sequence to write.
-     * @throws IOException If there's a problem writing to the output file.
-     */
-    public static void writeReferenceSequence( File outputFile, byte[] referenceSequence ) throws IOException {
-        OutputStream outputStream = new FileOutputStream(outputFile);
-
-        BasePackedOutputStream<Byte> basePackedOutputStream = new BasePackedOutputStream<Byte>(Byte.class, outputStream, ByteOrder.BIG_ENDIAN);
-        basePackedOutputStream.write(referenceSequence);
-
-        outputStream.write(referenceSequence.length%PackUtils.ALPHABET_SIZE);
-
-        outputStream.close();
-    }
-
-
-    /**
-     * How many bits can a given type hold?
-     * @param type Type to test.
-     * @return Number of bits that the given type can hold.
-     */
-    public static int bitsInType( Class<?> type ) {
-        try {
-            long typeSize = type.getField("MAX_VALUE").getLong(null) - type.getField("MIN_VALUE").getLong(null)+1;
-            long intTypeSize = (long)Integer.MAX_VALUE - (long)Integer.MIN_VALUE + 1;
-            if( typeSize > intTypeSize )
-                throw new ReviewedGATKException("Cannot determine number of bits available in type: " + type.getName());
-            return (int)(Math.log(typeSize)/Math.log(2));
-        }
-        catch( NoSuchFieldException ex ) {
-            throw new ReviewedGATKException("Cannot determine number of bits available in type: " + type.getName(),ex);
-        }
-        catch( IllegalAccessException ex ) {
-            throw new ReviewedGATKException("Cannot determine number of bits available in type: " + type.getName(),ex);
-        }
-    }
-
-    /**
-     * Gets the two-bit representation of a base.  A=00b, C=01b, G=10b, T=11b.
-     * @param base ASCII value for the base to pack.
-     * @return A byte from 0-3 indicating the base's packed value.
-     */
-    public static byte packBase(byte base) {
-        switch( base ) {
-            case 'A':
-                return 0;
-            case 'C':
-                return 1;
-            case 'G':
-                return 2;
-            case 'T':
-                return 3;
-            default:
-                throw new ReviewedGATKException("Unknown base type: " + base);
-        }
-    }
-
-    /**
-     * Converts a two-bit representation of a base into an ASCII representation of a base. 
-     * @param pack Byte from 0-3 indicating which base is represented.
-     * @return An ASCII value representing the packed base.
-     */
-    public static byte unpackBase(byte pack) {
-        switch( pack ) {
-            case 0:
-                return 'A';
-            case 1:
-                return 'C';
-            case 2:
-                return 'G';
-            case 3:
-                return 'T';
-            default:
-                throw new ReviewedGATKException("Unknown pack type: " + pack);
-        }
-    }
-
-    /**
-     * Reverses an unpacked sequence of bases.
-     * @param bases bases to reverse.
-     */
-    public static void reverse( byte[] bases ) {
-        for( int i = 0, j = bases.length-1; i < j; i++, j-- ) {
-            byte temp = bases[j];
-            bases[j] = bases[i];
-            bases[i] = temp;
-        }        
-    }
-
-    /**
-     * Given a structure of size <code>size</code> that should be split
-     * into <code>partitionSize</code> partitions, how many partitions should
-     * be created?  Size of last partition will be <= partitionSize.
-     * @param size Total size of the data structure.
-     * @param partitionSize Size of an individual partition.
-     * @return Number of partitions that would be created.
-     */
-    public static int numberOfPartitions( long size, long partitionSize ) {
-        return (int)((size+partitionSize-1) / partitionSize);    
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/UnsignedIntPackedInputStream.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/UnsignedIntPackedInputStream.java
deleted file mode 100644
index 0600500..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/UnsignedIntPackedInputStream.java
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment.reference.packing;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
-import java.nio.channels.FileChannel;
-
-/**
- * Read a set of integers packed into 
- *
- * @author mhanna
- * @version 0.1
- */
-public class UnsignedIntPackedInputStream {
-    /**
-     * Ultimate target for the occurrence array.
-     */
-    private final FileInputStream targetInputStream;
-
-    /**
-     * Target channel from which to pull file data.
-     */
-    private final FileChannel targetInputChannel;
-
-    /**
-     * The byte order in which integer input data appears.
-     */
-    private final ByteOrder byteOrder;
-
-    /**
-     * How many bytes are required to store an integer?
-     */
-    private final int bytesPerInteger = PackUtils.bitsInType(Integer.class)/PackUtils.BITS_PER_BYTE;
-
-    /**
-     * Create a new PackedIntInputStream, writing to the given target file.
-     * @param inputFile target input file.
-     * @param byteOrder Endianness to use when writing a list of integers.
-     * @throws java.io.IOException if an I/O error occurs.
-     */
-    public UnsignedIntPackedInputStream(File inputFile, ByteOrder byteOrder) throws IOException {
-        this(new FileInputStream(inputFile),byteOrder);
-    }
-
-    /**
-     * Read  ints from the given InputStream.
-     * @param inputStream Input stream from which to read ints.
-     * @param byteOrder Endianness to use when writing a list of integers.
-     */
-    public UnsignedIntPackedInputStream(FileInputStream inputStream, ByteOrder byteOrder) {
-        this.targetInputStream = inputStream;
-        this.targetInputChannel = inputStream.getChannel();
-        this.byteOrder = byteOrder;
-    }
-
-    /**
-     * Read a datum from the input stream.
-     * @return The next input datum in the stream.
-     * @throws IOException if an I/O error occurs.
-     */
-    public long read() throws IOException {
-        long[] data = new long[1];
-        read(data);
-        return data[0];
-    }
-
-    /**
-     * Read the data from the input stream.
-     * @param data placeholder for input data.
-     * @throws IOException if an I/O error occurs.
-     */
-    public void read( long[] data ) throws IOException {
-        read( data, 0, data.length );
-    }
-
-    /**
-     * Read the data from the input stream, starting at the given offset.
-     * @param data placeholder for input data.
-     * @param offset place in the array to start reading in data.
-     * @param length number of ints to read in. 
-     * @throws IOException if an I/O error occurs.
-     */
-    public void read( long[] data, int offset, int length ) throws IOException {
-        ByteBuffer readBuffer = ByteBuffer.allocate(bytesPerInteger*length).order(byteOrder);
-
-        targetInputChannel.read(readBuffer,targetInputChannel.position());
-        readBuffer.flip();
-        targetInputChannel.position(targetInputChannel.position()+readBuffer.remaining());
-
-        int i = 0;
-        while(i < length)
-            data[offset+i++] = readBuffer.getInt() & 0xFFFFFFFFL;
-    }
-
-    /**
-     * Closes the given output stream.
-     * @throws IOException if an I/O error occurs.
-     */
-    public void close() throws IOException {
-        targetInputStream.close();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/UnsignedIntPackedOutputStream.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/UnsignedIntPackedOutputStream.java
deleted file mode 100644
index 44c462b..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/alignment/reference/packing/UnsignedIntPackedOutputStream.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.alignment.reference.packing;
-
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
-
-/**
- * Writes an list of integers to the output file.
- *
- * @author mhanna
- * @version 0.1
- */
-public class UnsignedIntPackedOutputStream {
-    /**
-     * Ultimate target for the occurrence array.
-     */
-    private final OutputStream targetOutputStream;
-
-    /**
-     * A fixed-size buffer for int-packed data.
-     */
-    private final ByteBuffer buffer;
-
-    /**
-     * Create a new PackedIntOutputStream, writing to the given target file.
-     * @param outputFile target output file.
-     * @param byteOrder Endianness to use when writing a list of integers.
-     * @throws IOException if an I/O error occurs.
-     */
-    public UnsignedIntPackedOutputStream(File outputFile, ByteOrder byteOrder) throws IOException {
-        this(new FileOutputStream(outputFile),byteOrder);
-    }
-
-    /**
-     * Write packed ints to the given OutputStream.
-     * @param outputStream Output stream to which to write packed ints.
-     * @param byteOrder Endianness to use when writing a list of integers.
-     */
-    public UnsignedIntPackedOutputStream(OutputStream outputStream, ByteOrder byteOrder) {
-        this.targetOutputStream = outputStream;
-        buffer = ByteBuffer.allocate(PackUtils.bitsInType(Integer.class)/PackUtils.BITS_PER_BYTE).order(byteOrder);
-    }
-
-    /**
-     * Write the data to the output stream.
-     * @param datum datum to write. 
-     * @throws IOException if an I/O error occurs.
-     */
-    public void write( long datum ) throws IOException {
-        buffer.rewind();
-        buffer.putInt((int)datum);
-        targetOutputStream.write(buffer.array());
-    }
-
-    /**
-     * Write the data to the output stream.
-     * @param data data to write.  occurrences.length must match alphabet size.
-     * @throws IOException if an I/O error occurs.
-     */
-    public void write( long[] data ) throws IOException {
-        for(long datum: data)
-            write(datum);
-    }
-
-    /**
-     * Write the given chunk of data to the input stream.
-     * @param data data to write.
-     * @param offset position at which to start.
-     * @param length number of ints to write.
-     * @throws IOException if an I/O error occurs.
-     */
-    public void write( long[] data, int offset, int length ) throws IOException {
-        for( int i = offset; i < offset+length; i++ )
-            write(data[i]);
-    }
-
-    /**
-     * Flush the contents of the OutputStream to disk.
-     * @throws IOException if an I/O error occurs.
-     */
-    public void flush() throws IOException {
-        targetOutputStream.flush();
-    }    
-
-    /**
-     * Closes the given output stream.
-     * @throws IOException if an I/O error occurs.
-     */
-    public void close() throws IOException {
-        targetOutputStream.close();
-    }
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/arguments/DbsnpArgumentCollection.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/arguments/DbsnpArgumentCollection.java
deleted file mode 100644
index 05ebffa..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/arguments/DbsnpArgumentCollection.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.arguments;
-
-
-import org.broadinstitute.gatk.utils.commandline.Input;
-import org.broadinstitute.gatk.utils.commandline.RodBinding;
-import htsjdk.variant.variantcontext.VariantContext;
-
-/**
- * @author ebanks
- * @version 1.0
- */
-public class DbsnpArgumentCollection {
-
-    /**
-     * A dbSNP VCF file.
-     */
-    @Input(fullName="dbsnp", shortName = "D", doc="dbSNP file", required=false)
-    public RodBinding<VariantContext> dbsnp;
-
-}
-
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/arguments/GATKArgumentCollection.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/arguments/GATKArgumentCollection.java
deleted file mode 100644
index 05834f7..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/arguments/GATKArgumentCollection.java
+++ /dev/null
@@ -1,628 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.arguments;
-
-import htsjdk.samtools.ValidationStringency;
-import org.broadinstitute.gatk.utils.commandline.*;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.downsampling.DownsampleType;
-import org.broadinstitute.gatk.engine.downsampling.DownsamplingMethod;
-import org.broadinstitute.gatk.engine.phonehome.GATKRunReport;
-import org.broadinstitute.gatk.engine.samples.PedigreeValidationType;
-import org.broadinstitute.gatk.utils.QualityUtils;
-import org.broadinstitute.gatk.utils.baq.BAQ;
-import org.broadinstitute.gatk.utils.variant.GATKVCFIndexType;
-import org.broadinstitute.gatk.utils.variant.GATKVCFUtils;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.concurrent.TimeUnit;
-
-/**
- * @author aaron
- * @version 1.0
- */
-public class GATKArgumentCollection {
-
-    /** the constructor */
-    public GATKArgumentCollection() {
-    }
-
-    // parameters and their defaults
-    /**
-     * An input file containing sequence data mapped to a reference, in SAM or BAM format, or a text file containing a
-     * list of input files (with extension .list). Note that the GATK requires an accompanying index for each SAM or
-     * BAM file. Please see our online documentation for more details on input formatting requirements.
-     */
-    @Input(fullName = "input_file", shortName = "I", doc = "Input file containing sequence data (SAM or BAM)", required = false)
-    public List<String> samFiles = new ArrayList<>();
-
-    @Hidden
-    @Argument(fullName = "showFullBamList",doc="Emit a log entry (level INFO) containing the full list of sequence data files to be included in the analysis (including files inside .bam.list files).")
-    public Boolean showFullBamList = false;
-
-    @Argument(fullName = "read_buffer_size", shortName = "rbs", doc="Number of reads per SAM file to buffer in memory", required = false, minValue = 0)
-    public Integer readBufferSize = null;
-
-    // --------------------------------------------------------------------------------------------------------------
-    //
-    // GATKRunReport options
-    //
-    // --------------------------------------------------------------------------------------------------------------
-
-    /**
-     * By default, GATK generates a run report that is uploaded to a cloud-based service. This report contains basic
-     * statistics about the run (which tool was used, whether the run was successful etc.) that help us for debugging
-     * and development. Up to version 3.2-2 the run report contains a record of the username and hostname associated
-     * with the run, but it does **NOT** contain any information that could be used to identify patient data.
-     * Nevertheless, if your data is subject to stringent confidentiality clauses (no outside communication) or if your
-     * run environment is not connected to the internet, you can disable the reporting system by seeting this option to
-     * "NO_ET". You will also need to request a key using the online request form on our website (se FAQs).
-     */
-    @Argument(fullName = "phone_home", shortName = "et", doc="Run reporting mode", required = false)
-    public GATKRunReport.PhoneHomeOption phoneHomeType = GATKRunReport.PhoneHomeOption.AWS;
-    /**
-     * Please see the "phone_home" argument below and the online documentation FAQs for more details on the key system
-     * and how to request a key.
-     */
-    @Argument(fullName = "gatk_key", shortName = "K", doc="GATK key file required to run with -et NO_ET", required = false)
-    public File gatkKeyFile = null;
-
-    /**
-     * The GATKRunReport supports (as of GATK 2.2) tagging GATK runs with an arbitrary tag that can be
-     * used to group together runs during later analysis.  One use of this capability is to tag runs as GATK
-     * performance tests, so that the performance of the GATK over time can be assessed from the logs directly.
-     *
-     * Note that the tags do not conform to any ontology, so you are free to use any tags that you might find
-     * meaningful.
-     */
-    @Argument(fullName = "tag", shortName = "tag", doc="Tag to identify this GATK run as part of a group of runs", required = false)
-    public String tag = "NA";
-
-    // --------------------------------------------------------------------------------------------------------------
-    //
-    // General features
-    //
-    // --------------------------------------------------------------------------------------------------------------
-
-    /**
-     * Reads that fail the specified filters will not be used in the analysis. Multiple filters can be specified separately,
-     * e.g. you can do -rf MalformedRead -rf BadCigar and so on. Available read filters are listed in the online tool
-     * documentation. Note that the read name format is e.g. MalformedReadFilter, but at the command line the filter
-     * name should be given without the Filter suffix; e.g. -rf MalformedRead (NOT -rf MalformedReadFilter, which is not
-     * recognized by the program). Note also that some read filters are applied by default for some analysis tools; this
-     * is specified in each tool's documentation. The default filters cannot be disabled.
-     */
-    @Argument(fullName = "read_filter", shortName = "rf", doc = "Filters to apply to reads before analysis", required = false)
-    public final List<String> readFilters = new ArrayList<>();
-
-    @ArgumentCollection
-    public IntervalArgumentCollection intervalArguments = new IntervalArgumentCollection();
-    /**
-     * The reference genome against which the sequence data was mapped. The GATK requires an index file and a dictionary
-     * file accompanying the reference (please see the online documentation FAQs for more details on these files). Although
-     * this argument is indicated as being optional, almost all GATK tools require a reference in order to run.
-     * Note also that while GATK can in theory process genomes from any organism with any number of chromosomes or contigs,
-     * it is not designed to process draft genome assemblies and performance will decrease as the number of contigs in
-     * the reference increases. We strongly discourage the use of unfinished genome assemblies containing more than a few
-     * hundred contigs. Contig numbers in the thousands will most probably cause memory-related crashes.
-     */
-    @Input(fullName = "reference_sequence", shortName = "R", doc = "Reference sequence file", required = false)
-    public File referenceFile = null;
-    /**
-     * If this flag is enabled, the random numbers generated will be different in every run, causing GATK to behave non-deterministically.
-     */
-    @Argument(fullName = "nonDeterministicRandomSeed", shortName = "ndrs", doc = "Use a non-deterministic random seed", required = false)
-    public boolean nonDeterministicRandomSeed = false;
-    /**
-     * To be used in the testing framework where dynamic parallelism can result in differing numbers of calls to the random generator.
-     */
-    @Hidden
-    @Argument(fullName = "disableDithering",doc="Completely eliminates randomized dithering from rank sum tests.")
-    public boolean disableDithering = false;
-    /**
-     * This will truncate the run but without exiting with a failure. By default the value is interpreted in minutes, but this can be changed with the maxRuntimeUnits argument.
-     */
-    @Argument(fullName = "maxRuntime", shortName = "maxRuntime", doc="Stop execution cleanly as soon as maxRuntime has been reached", required = false)
-    public long maxRuntime = GenomeAnalysisEngine.NO_RUNTIME_LIMIT;
-
-    @Argument(fullName = "maxRuntimeUnits", shortName = "maxRuntimeUnits", doc="Unit of time used by maxRuntime", required = false)
-    public TimeUnit maxRuntimeUnits = TimeUnit.MINUTES;
-
-    // --------------------------------------------------------------------------------------------------------------
-    //
-    // Downsampling Arguments
-    //
-    // --------------------------------------------------------------------------------------------------------------
-    /**
-     * There are several ways to downsample reads, i.e. to removed reads from the pile of reads that will be used for analysis.
-     * See the documentation of the individual downsampling options for details on how they work. Note that Many GATK tools
-     * specify a default downsampling type and target, but this behavior can be overridden from command line using the
-     * downsampling arguments.
-     */
-    @Argument(fullName = "downsampling_type", shortName="dt", doc="Type of read downsampling to employ at a given locus", required = false)
-    public DownsampleType downsamplingType = null;
-    /**
-     * Reads will be downsampled so the specified fraction remains; e.g. if you specify -dfrac 0.25, three-quarters of
-     * the reads will be removed, and the remaining one quarter will be used in the analysis. This method of downsampling
-     * is truly unbiased and random. It is typically used to simulate the effect of generating different amounts of
-     * sequence data for a given sample. For example, you can use this in a pilot experiment to evaluate how much target
-     * coverage you need to aim for in order to obtain enough coverage in all loci of interest.
-     */
-    @Argument(fullName = "downsample_to_fraction", shortName = "dfrac", doc = "Fraction of reads to downsample to", required = false, minValue = 0.0, maxValue = 1.0)
-    public Double downsampleFraction = null;
-
-    /**
-     * The principle of this downsampling type is to downsample reads to a given capping threshold coverage. Its purpose is to
-     * get rid of excessive coverage, because above a certain depth, having additional data is not informative and imposes
-     * unreasonable computational costs. The downsampling process takes two different forms depending on the type of
-     * analysis it is used with.
-     *
-     * For locus-based traversals (LocusWalkers like UnifiedGenotyper and ActiveRegionWalkers like HaplotypeCaller),
-     * downsample_to_coverage controls the maximum depth of coverage at each locus. For read-based traversals
-     * (ReadWalkers like BaseRecalibrator), it controls the maximum number of reads sharing the same alignment start
-     * position. For ReadWalkers you will typically need to use much lower dcov values than you would with LocusWalkers
-     * to see an effect. Note that this downsampling option does not produce an unbiased random sampling from all available
-     * reads at each locus: instead, the primary goal of the to-coverage downsampler is to maintain an even representation
-     * of reads from all alignment start positions when removing excess coverage. For a truly unbiased random sampling of
-     * reads, use -dfrac instead. Also note that the coverage target is an approximate goal that is not guaranteed to be
-     * met exactly: the downsampling algorithm will under some circumstances retain slightly more or less coverage than
-     * requested.
-     */
-    @Argument(fullName = "downsample_to_coverage", shortName = "dcov",
-              doc = "Target coverage threshold for downsampling to coverage",
-              required = false, minValue = 0)
-    public Integer downsampleCoverage = null;
-
-    /**
-     * Gets the downsampling method explicitly specified by the user. If the user didn't specify
-     * a default downsampling mechanism, return the default.
-     * @return The explicitly specified downsampling mechanism, or the default if none exists.
-     */
-    public DownsamplingMethod getDownsamplingMethod() {
-        if ( downsamplingType == null && downsampleFraction == null && downsampleCoverage == null )
-            return null;
-
-        return new DownsamplingMethod(downsamplingType, downsampleCoverage, downsampleFraction);
-    }
-
-    /**
-     * Set the downsampling method stored in the argument collection so that it is read back out when interrogating the command line arguments.
-     * @param method The downsampling mechanism.
-     */
-    public void setDownsamplingMethod(DownsamplingMethod method) {
-        if (method == null)
-            throw new IllegalArgumentException("method is null");
-
-        downsamplingType = method.type;
-        downsampleCoverage = method.toCoverage;
-        downsampleFraction = method.toFraction;
-    }
-
-    // --------------------------------------------------------------------------------------------------------------
-    //
-    // BAQ arguments
-    //
-    // --------------------------------------------------------------------------------------------------------------
-    @Argument(fullName = "baq", shortName="baq", doc="Type of BAQ calculation to apply in the engine", required = false)
-    public BAQ.CalculationMode BAQMode = BAQ.CalculationMode.OFF;
-    /**
-     *  Phred-scaled gap open penalty for BAQ calculation. Although the default value is 40, a value of 30 may be better for whole genome call sets.
-     */
-    @Argument(fullName = "baqGapOpenPenalty", shortName="baqGOP", doc="BAQ gap open penalty", required = false, minValue = 0)
-    public double BAQGOP = BAQ.DEFAULT_GOP;
-
-    // --------------------------------------------------------------------------------------------------------------
-    //
-    // refactor NDN cigar string arguments
-    //
-    // --------------------------------------------------------------------------------------------------------------
-    /**
-     * This flag tells GATK to refactor cigar string with NDN elements to one element. It intended primarily for use in
-     * a RNAseq pipeline since the problem might come up when using RNAseq aligner such as Tophat2 with provided transcriptoms.
-     * You should only use this if you know that your reads have that problem.
-     */
-    @Argument(fullName = "refactor_NDN_cigar_string", shortName = "fixNDN", doc = "refactor cigar string with NDN elements to one element", required = false)
-    public boolean REFACTOR_NDN_CIGAR_READS = false;
-
-    // --------------------------------------------------------------------------------------------------------------
-    //
-    // quality encoding checking arguments
-    //
-    // --------------------------------------------------------------------------------------------------------------
-
-    /**
-     * By default the GATK assumes that base quality scores start at Q0 == ASCII 33 according to the SAM specification.
-     * However, encoding in some datasets (especially older Illumina ones) starts at Q64. This argument will fix the
-     * encodings on the fly (as the data is read in) by subtracting 31 from every quality score. Note that this argument should
-     * NEVER be used by default; you should only use it when you have confirmed that the quality scores in your data are
-     * not in the correct encoding.
-     */
-    @Argument(fullName = "fix_misencoded_quality_scores", shortName="fixMisencodedQuals", doc="Fix mis-encoded base quality scores", required = false)
-    public boolean FIX_MISENCODED_QUALS = false;
-    /**
-     * This flag tells GATK to ignore warnings when encountering base qualities that are too high and that seemingly
-     * indicate a problem with the base quality encoding of the BAM file. You should only use this if you really know
-     * what you are doing; otherwise you could seriously mess up your data and ruin your analysis.
-     */
-    @Argument(fullName = "allow_potentially_misencoded_quality_scores", shortName="allowPotentiallyMisencodedQuals", doc="Ignore warnings about base quality score encoding", required = false)
-    public boolean ALLOW_POTENTIALLY_MISENCODED_QUALS = false;
-    /**
-     * This flag tells GATK to use the original base qualities (that were in the data before BQSR/recalibration) which
-     * are stored in the OQ tag, if they are present, rather than use the post-recalibration quality scores. If no OQ
-     * tag is present for a read, the standard qual score will be used.
-     */
-    @Argument(fullName="useOriginalQualities", shortName = "OQ", doc = "Use the base quality scores from the OQ tag", required=false)
-    public Boolean useOriginalBaseQualities = false;
-    /**
-     * If reads are missing some or all base quality scores, this value will be used for all base quality scores.
-     * By default this is set to -1 to disable default base quality assignment.
-     */
-    @Argument(fullName="defaultBaseQualities", shortName = "DBQ", doc = "Assign a default base quality", required=false, minValue = 0, maxValue = Byte.MAX_VALUE)
-    public byte defaultBaseQualities = -1;
-
-    // --------------------------------------------------------------------------------------------------------------
-    //
-    // performance log arguments
-    //
-    // --------------------------------------------------------------------------------------------------------------
-
-    /**
-     * The file name for the GATK performance log output, or null if you don't want to generate the
-     * detailed performance logging table.  This table is suitable for importing into R or any
-     * other analysis software that can read tsv files.
-     */
-    @Argument(fullName = "performanceLog", shortName="PF", doc="Write GATK runtime performance log to this file", required = false)
-    public File performanceLog = null;
-
-    // --------------------------------------------------------------------------------------------------------------
-    //
-    // BQSR arguments
-    //
-    // --------------------------------------------------------------------------------------------------------------
-
-    /**
-     * Enables on-the-fly recalibrate of base qualities, intended primarily for use with BaseRecalibrator and PrintReads
-     * (see Best Practices workflow documentation). The covariates tables are produced by the BaseRecalibrator tool.
-     * Please be aware that you should only run recalibration with the covariates file created on the same input bam(s).
-     */
-    @Input(fullName="BQSR", shortName="BQSR", required=false, doc="Input covariates table file for on-the-fly base quality score recalibration")
-    public File BQSR_RECAL_FILE = null;
-
-    /**
-     * Turns on the base quantization module. It requires a recalibration report (-BQSR).
-     *
-     * A value of 0 here means "do not quantize".
-     * Any value greater than zero will be used to recalculate the quantization using that many levels.
-     * Negative values mean that we should quantize using the recalibration report's quantization level.
-     */
-    @Hidden
-    @Argument(fullName="quantize_quals", shortName = "qq", doc = "Quantize quality scores to a given number of levels (with -BQSR)", required=false)
-    public int quantizationLevels = 0;
-
-    /**
-     * Turns off printing of the base insertion and base deletion tags when using the -BQSR argument. Only the base substitution qualities will be produced.
-     */
-    @Argument(fullName="disable_indel_quals", shortName = "DIQ", doc = "Disable printing of base insertion and deletion tags (with -BQSR)", required=false)
-    public boolean disableIndelQuals = false;
-
-    /**
-     * By default, the OQ tag in not emitted when using the -BQSR argument. Use this flag to include OQ tags in the output BAM file.
-     * Note that this may results in significant file size increase.
-     */
-    @Argument(fullName="emit_original_quals", shortName = "EOQ", doc = "Emit the OQ tag with the original base qualities (with -BQSR)", required=false)
-    public boolean emitOriginalQuals = false;
-
-    /**
-     * This flag tells GATK not to modify quality scores less than this value. Instead they will be written out unmodified in the recalibrated BAM file.
-     * In general it's unsafe to change qualities scores below < 6, since base callers use these values to indicate random or bad bases.
-     * For example, Illumina writes Q2 bases when the machine has really gone wrong. This would be fine in and of itself,
-     * but when you select a subset of these reads based on their ability to align to the reference and their dinucleotide effect,
-     * your Q2 bin can be elevated to Q8 or Q10, leading to issues downstream.
-     */
-    @Argument(fullName = "preserve_qscores_less_than", shortName = "preserveQ", doc = "Don't recalibrate bases with quality scores less than this threshold (with -BQSR)", required = false, minValue = 0, minRecommendedValue = QualityUtils.MIN_USABLE_Q_SCORE)
-    public int PRESERVE_QSCORES_LESS_THAN = QualityUtils.MIN_USABLE_Q_SCORE;
-    /**
-     * If specified, this value will be used as the prior for all mismatch quality scores instead of the actual reported quality score.
-     */
-    @Argument(fullName = "globalQScorePrior", shortName = "globalQScorePrior", doc = "Global Qscore Bayesian prior to use for BQSR", required = false)
-    public double globalQScorePrior = -1.0;
-
-
-    // --------------------------------------------------------------------------------------------------------------
-    //
-    // Other utility arguments
-    //
-    // --------------------------------------------------------------------------------------------------------------
-
-    /**
-     * Keep in mind that if you set this to LENIENT, we may refuse to provide you with support if anything goes wrong.
-     */
-    @Argument(fullName = "validation_strictness", shortName = "S", doc = "How strict should we be with validation", required = false)
-    public ValidationStringency strictnessLevel = ValidationStringency.SILENT;
-    /**
-     * Some tools keep program records in the SAM header by default. Use this argument to override that behavior and discard program records for the SAM header.
-     */
-    @Argument(fullName = "remove_program_records", shortName = "rpr", doc = "Remove program records from the SAM header", required = false)
-    public boolean removeProgramRecords = false;
-    /**
-     * Some tools discard program records from the SAM header by default. Use this argument to override that behavior and keep program records in the SAM header.
-     */
-    @Argument(fullName = "keep_program_records", shortName = "kpr", doc = "Keep program records in the SAM header", required = false)
-    public boolean keepProgramRecords = false;
-
-    /**
-     * On-the-fly sample renaming works only with single-sample BAM and VCF files. Each line of the mapping file must
-     * contain the absolute path to a BAM or VCF file, followed by whitespace, followed by the new sample name for that
-     * BAM or VCF file. The sample name may contain non-tab whitespace, but leading or trailing whitespace will be 
-     * ignored. The engine will verify at runtime that each BAM/VCF targeted for sample renaming has only a single 
-     * sample specified in its header (though, in the case of BAM files, there may be multiple read groups for that 
-     * sample).
-     */
-    @Advanced
-    @Argument(fullName = "sample_rename_mapping_file", shortName = "sample_rename_mapping_file", doc = "Rename sample IDs on-the-fly at runtime using the provided mapping file", required = false)
-    public File sampleRenameMappingFile = null;
-
-    /**
-     * For expert users only who know what they are doing. We do not support usage of this argument, so we may refuse to help you if you use it and something goes wrong. The one exception to this rule is ALLOW_N_CIGAR_READS, which is necessary for RNAseq analysis.
-     */
-    @Argument(fullName = "unsafe", shortName = "U", doc = "Enable unsafe operations: nothing will be checked at runtime", required = false)
-    public ValidationExclusion.TYPE unsafe;
-    /**
-     * UNSAFE FOR GENERAL USE (FOR TEST SUITE USE ONLY). Disable both auto-generation of index files and index file locking
-     * when reading VCFs and other rods and an index isn't present or is out-of-date. The file locking necessary for auto index
-     * generation to work safely is prone to random failures/hangs on certain platforms, which makes it desirable to disable it
-     * for situations like test suite runs where the indices are already known to exist, however this option is unsafe in general
-     * because it allows reading from index files without first acquiring a lock.
-     */
-    @Hidden
-    @Advanced
-    @Argument(fullName = "disable_auto_index_creation_and_locking_when_reading_rods", shortName = "disable_auto_index_creation_and_locking_when_reading_rods",
-              doc = "Disable both auto-generation of index files and index file locking",
-              required = false)
-    public boolean disableAutoIndexCreationAndLockingWhenReadingRods = false;
-
-    @Hidden
-    @Argument(fullName = "no_cmdline_in_header", shortName = "no_cmdline_in_header", doc = "Don't output the usual VCF header tag with the command line. FOR DEBUGGING PURPOSES ONLY. This option is required in order to pass integration tests.",
-              required = false)
-    public boolean disableCommandLineInVCF = false;
-
-    @Argument(fullName = "sites_only", shortName = "sites_only", doc = "Just output sites without genotypes (i.e. only the first 8 columns of the VCF)",
-              required = false)
-    public boolean sitesOnlyVCF = false;
-
-    /**
-     * <p>The VCF specification permits missing records to be dropped from the end of FORMAT fields, so long as GT is always output.
-     * This option prevents GATK from performing that trimming.</p>
-     *
-     * <p>For example, given a FORMAT of <pre>GT:AD:DP:PL</pre>, GATK will by default emit <pre>./.</pre> for a variant with
-     * no reads present (ie, the AD, DP, and PL fields are trimmed).  If you specify -writeFullFormat, this record
-     * would be emitted as <pre>./.:.:.:.</pre></p>
-     */
-    @Argument(fullName = "never_trim_vcf_format_field", shortName = "writeFullFormat", doc = "Always output all the records in VCF FORMAT fields, even if some are missing",
-              required = false)
-    public boolean neverTrimVCFFormatField = false;
-
-    @Hidden
-    @Argument(fullName = "bcf", shortName = "bcf", doc = "Force BCF output, regardless of the file's extension",
-              required = false)
-    public boolean forceBCFOutput = false;
-
-    @Advanced
-    @Argument(fullName = "bam_compression", shortName = "compress", doc = "Compression level to use for writing BAM files (0 - 9, higher is more compressed)",
-              minValue = 0, maxValue = 9, required = false)
-    public Integer bamCompression = null;
-
-    @Advanced
-    @Argument(fullName = "simplifyBAM", shortName = "simplifyBAM",
-              doc = "If provided, output BAM files will be simplified to include just key reads for downstream variation discovery analyses (removing duplicates, PF-, non-primary reads), as well stripping all extended tags from the kept reads except the read group identifier",
-              required = false)
-    public boolean simplifyBAM = false;
-
-    @Argument(fullName = "disable_bam_indexing", doc = "Turn off on-the-fly creation of indices for output BAM files.",
-            required = false)
-    public boolean disableBAMIndexing = false;
-
-    @Argument(fullName = "generate_md5", doc = "Enable on-the-fly creation of md5s for output BAM files.",
-            required = false)
-    public boolean enableBAMmd5 = false;
-
-    // --------------------------------------------------------------------------------------------------------------
-    //
-    // Multi-threading arguments
-    //
-    // --------------------------------------------------------------------------------------------------------------
-
-    /**
-     * Data threads contains N cpu threads per data thread, and act as completely data parallel processing, increasing
-     * the memory usage of GATK by M data threads. Data threads generally scale extremely effectively, up to 24 cores.
-     * See online documentation FAQs for more information.
-     */
-    @Argument(fullName = "num_threads", shortName = "nt", doc = "Number of data threads to allocate to this analysis", required = false, minValue = 1)
-    public Integer numberOfDataThreads = 1;
-
-    /**
-     * Each CPU thread operates the map cycle independently, but may run into earlier scaling problems with IO than
-     * data threads. Has the benefit of not requiring X times as much memory per thread as data threads do, but rather
-     * only a constant overhead. See online documentation FAQs for more information.
-     */
-    @Argument(fullName="num_cpu_threads_per_data_thread", shortName = "nct", doc="Number of CPU threads to allocate per data thread", required = false, minValue = 1)
-    public int numberOfCPUThreadsPerDataThread = 1;
-
-    @Argument(fullName="num_io_threads", shortName = "nit", doc="Number of given threads to allocate to IO", required = false, minValue = 0)
-    @Hidden
-    public int numberOfIOThreads = 0;
-
-    /**
-     * Enable GATK to monitor its own threading efficiency, at an itsy-bitsy tiny
-     * cost (< 0.1%) in runtime because of turning on the JavaBean.  This is largely for
-     * debugging purposes. Note that this argument is not compatible with -nt, it only works with -nct.
-     */
-    @Argument(fullName = "monitorThreadEfficiency", shortName = "mte", doc = "Enable threading efficiency monitoring", required = false)
-    public Boolean monitorThreadEfficiency = false;
-
-    @Argument(fullName = "num_bam_file_handles", shortName = "bfh", doc="Total number of BAM file handles to keep open simultaneously", required=false, minValue = 1)
-    public Integer numberOfBAMFileHandles = null;
-    /**
-     * This will filter out read groups matching <TAG>:<STRING> (e.g. SM:sample1) or a .txt file containing the filter strings one per line.
-     */
-    @Input(fullName = "read_group_black_list", shortName="rgbl", doc="Exclude read groups based on tags", required = false)
-    public List<String> readGroupBlackList = null;
-
-    // --------------------------------------------------------------------------------------------------------------
-    //
-    // PED (pedigree) support
-    //
-    // --------------------------------------------------------------------------------------------------------------
-
-    /**
-     * <p>Reads PED file-formatted tabular text files describing meta-data about the samples being
-     * processed in the GATK.</p>
-     *
-     * <ul>
-     *  <li>see <a href="http://www.broadinstitute.org/mpg/tagger/faq.html">http://www.broadinstitute.org/mpg/tagger/faq.html</a></li>
-     *  <li>see <a href="http://pngu.mgh.harvard.edu/~purcell/plink/data.shtml#ped">http://pngu.mgh.harvard.edu/~purcell/plink/data.shtml#ped</a></li>
-     * </ul>
-     *
-     * <p>The PED file is a white-space (space or tab) delimited file: the first six columns are mandatory:</p>
-     *
-     * <ul>
-     *  <li>Family ID</li>
-     *  <li>Individual ID</li>
-     *  <li>Paternal ID</li>
-     *  <li>Maternal ID</li>
-     *  <li>Sex (1=male; 2=female; other=unknown)</li>
-     *  <li>Phenotype</li>
-     * </ul>
-     *
-     *  <p>The IDs are alphanumeric: the combination of family and individual ID should uniquely identify a person.
-     *  A PED file must have 1 and only 1 phenotype in the sixth column. The phenotype can be either a
-     *  quantitative trait or an affection status column: GATK will automatically detect which type
-     *  (i.e. based on whether a value other than 0, 1, 2 or the missing genotype code is observed).</p>
-     *
-     *  <p>If an individual's sex is unknown, then any character other than 1 or 2 can be used.</p>
-     *
-     *  <p>You can add a comment to a PED or MAP file by starting the line with a # character. The rest of that
-     *  line will be ignored. Do not start any family IDs with this character therefore.</p>
-     *
-     *  <p>Affection status should be coded:</p>
-     *
-     * <ul>
-     *  <li>-9 missing</li>
-     *   <li>0 missing</li>
-     *   <li>1 unaffected</li>
-     *   <li>2 affected</li>
-     * </ul>
-     *
-     * <p>If any value outside of -9,0,1,2 is detected than the samples are assumed
-     * to phenotype values are interpreted as string phenotype values.  In this case -9 uniquely
-     * represents the missing value.</p>
-     *
-     * <p>Genotypes (column 7 onwards) cannot be specified to the GATK.</p>
-     *
-     * <p>For example, here are two individuals (one row = one person):</p>
-     *
-     * <pre>
-     *   FAM001  1  0 0  1  2
-     *   FAM001  2  0 0  1  2
-     * </pre>
-     *
-     * <p>Each -ped argument can be tagged with NO_FAMILY_ID, NO_PARENTS, NO_SEX, NO_PHENOTYPE to
-     * tell the GATK PED parser that the corresponding fields are missing from the ped file.</p>
-     *
-     * <p>Note that most GATK walkers do not use pedigree information.  Walkers that require pedigree
-     * data should clearly indicate so in their arguments and will throw errors if required pedigree
-     * information is missing.</p>
-     */
-    @Argument(fullName="pedigree", shortName = "ped", doc="Pedigree files for samples",required=false)
-    public List<File> pedigreeFiles = Collections.emptyList();
-
-    /**
-     * Inline PED records (see -ped argument).  Each -pedString STRING can contain one or more
-     * valid PED records (see -ped) separated by semi-colons.  Supports all tags for each pedString
-     * as -ped supports
-     */
-    @Argument(fullName="pedigreeString", shortName = "pedString", doc="Pedigree string for samples",required=false)
-    public List<String> pedigreeStrings = Collections.emptyList();
-
-    /**
-     * How strict should we be in parsing the PED files?
-     */
-    @Argument(fullName="pedigreeValidationType", shortName = "pedValidationType", doc="Validation strictness for pedigree information",required=false)
-    public PedigreeValidationType pedigreeValidationType = PedigreeValidationType.STRICT;
-
-    // --------------------------------------------------------------------------------------------------------------
-    //
-    // BAM indexing and sharding arguments
-    //
-    // --------------------------------------------------------------------------------------------------------------
-    /**
-     * NO INTEGRATION TESTS are available.  Use at your own risk.
-     */
-    @Argument(fullName="allow_intervals_with_unindexed_bam",doc="Allow interval processing with an unsupported BAM",required=false)
-    @Hidden
-    public boolean allowIntervalsWithUnindexedBAM = false;
-
-    // --------------------------------------------------------------------------------------------------------------
-    //
-    // testing BCF2
-    //
-    // --------------------------------------------------------------------------------------------------------------
-    /**
-     * If provided, whenever we create a VCFWriter we will also write out a BCF file alongside it, for testing purposes.
-     */
-    @Argument(fullName="generateShadowBCF",shortName = "generateShadowBCF",doc="Write a BCF copy of the output VCF",required=false)
-    @Hidden
-    public boolean generateShadowBCF = false;
-    // TODO -- remove all code tagged with TODO -- remove me when argument generateShadowBCF is removed
-
-    // --------------------------------------------------------------------------------------------------------------
-    //
-    // VCF/BCF index parameters
-    //
-    // --------------------------------------------------------------------------------------------------------------
-
-    /**
-     * Specify the Tribble indexing strategy to use for VCFs.
-     *
-     * LINEAR creates a LinearIndex with bins of equal width, specified by the Bin Width parameter
-     * INTERVAL creates an IntervalTreeIndex with bins with an equal amount of features, specified by the Features Per Bin parameter
-     * DYNAMIC_SEEK attempts to optimize for minimal seek time by choosing an appropriate strategy and parameter (user-supplied parameter is ignored)
-     * DYNAMIC_SIZE attempts to optimize for minimal index size by choosing an appropriate strategy and parameter (user-supplied parameter is ignored)
-     */
-    @Argument(fullName="variant_index_type",shortName = "variant_index_type",doc="Type of IndexCreator to use for VCF/BCF indices",required=false)
-    @Advanced
-    public GATKVCFIndexType variant_index_type = GATKVCFUtils.DEFAULT_INDEX_TYPE;
-    /**
-     * This is either the bin width or the number of features per bin, depending on the indexing strategy
-     */
-    @Argument(fullName="variant_index_parameter",shortName = "variant_index_parameter",doc="Parameter to pass to the VCF/BCF IndexCreator",required=false)
-    @Advanced
-    public int variant_index_parameter = GATKVCFUtils.DEFAULT_INDEX_PARAMETER;
-}
-
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/arguments/StandardVariantContextInputArgumentCollection.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/arguments/StandardVariantContextInputArgumentCollection.java
deleted file mode 100644
index 331029f..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/arguments/StandardVariantContextInputArgumentCollection.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.arguments;
-
-
-import org.broadinstitute.gatk.utils.commandline.Input;
-import org.broadinstitute.gatk.utils.commandline.RodBinding;
-import htsjdk.variant.variantcontext.VariantContext;
-
-/**
- * @author ebanks
- * @version 1.0
- */
-public class StandardVariantContextInputArgumentCollection {
-
-    /**
-     * Variants from this VCF file are used by this tool as input.
-     * The file must at least contain the standard VCF header lines, but
-     * can be empty (i.e., no variants are contained in the file).
-     */
-    @Input(fullName="variant", shortName = "V", doc="Input VCF file", required=true)
-    public RodBinding<VariantContext> variants;
-
-}
-
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/arguments/ValidationExclusion.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/arguments/ValidationExclusion.java
deleted file mode 100644
index ccd4fdc..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/arguments/ValidationExclusion.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.arguments;
-
-import org.broadinstitute.gatk.utils.commandline.EnumerationArgumentDefault;
-
-import java.util.ArrayList;
-import java.util.List;
-
-
-public class ValidationExclusion {
-
-    // our validation options
-
-    public enum TYPE {
-        ALLOW_N_CIGAR_READS,    // ignore the presence of N operators in CIGARs: do not blow up and process reads that contain one or more N operators.
-                                // This exclusion does not have effect on reads that get filtered {@see MalformedReadFilter}.
-        ALLOW_UNINDEXED_BAM,        // allow bam files that do not have an index; we'll traverse them using monolithic shard
-        ALLOW_UNSET_BAM_SORT_ORDER, // assume that the bam is sorted, even if the SO (sort-order) flag is not set
-        NO_READ_ORDER_VERIFICATION, // do not validate that the reads are in order as we take them from the bam file
-        ALLOW_SEQ_DICT_INCOMPATIBILITY, // allow dangerous, but not fatal, sequence dictionary incompabilities
-        LENIENT_VCF_PROCESSING,         // allow non-standard values for standard VCF header lines.  Don't worry about size differences between header and values, etc.
-        @EnumerationArgumentDefault // set the ALL value to the default value, so if they specify just -U, we get the ALL
-        ALL                         // do not check for all of the above conditions, DEFAULT
-    }
-
-    // a storage for the passed in exclusions
-    List<TYPE> exclusions = new ArrayList<TYPE>();
-
-    public ValidationExclusion(List<TYPE> exclusionsList) {
-        exclusions.addAll(exclusionsList);
-    }
-
-    public ValidationExclusion() {}
-    
-    /**
-     * do we contain the exclusion specified, or were we set to ALL
-     * @param t the exclusion case to test for
-     * @return true if we contain the exclusion or if we're set to ALL, false otherwise
-     */
-    public boolean contains(TYPE t) {
-        return (exclusions.contains(TYPE.ALL) || exclusions.contains(t));
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/contexts/AlignmentContext.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/contexts/AlignmentContext.java
deleted file mode 100644
index 6ac2048..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/contexts/AlignmentContext.java
+++ /dev/null
@@ -1,154 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.contexts;
-
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.HasGenomeLocation;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-
-import java.util.List;
-
-/**
- * Useful class for forwarding on locusContext data from this iterator
- * 
- * Created by IntelliJ IDEA.
- * User: mdepristo
- * Date: Feb 22, 2009
- * Time: 3:01:34 PM
- * To change this template use File | Settings | File Templates.
- */
-public class AlignmentContext implements HasGenomeLocation {
-    protected GenomeLoc loc = null;
-    protected ReadBackedPileup basePileup = null;
-    protected boolean hasPileupBeenDownsampled;
-
-    /**
-     * The number of bases we've skipped over in the reference since the last map invocation.
-     * Only filled in by RodTraversals right now.  By default, nothing is being skipped, so skippedBases == 0.
-     */
-    private long skippedBases = 0;
-
-    public AlignmentContext(GenomeLoc loc, ReadBackedPileup basePileup) {
-        this(loc, basePileup, 0, false);
-    }
-
-    public AlignmentContext(GenomeLoc loc, ReadBackedPileup basePileup, boolean hasPileupBeenDownsampled) {
-        this(loc, basePileup, 0, hasPileupBeenDownsampled);
-    }
-
-    public AlignmentContext(GenomeLoc loc, ReadBackedPileup basePileup, long skippedBases) {
-        this(loc, basePileup, skippedBases, false);
-    }
-
-    public AlignmentContext(GenomeLoc loc, ReadBackedPileup basePileup, long skippedBases,boolean hasPileupBeenDownsampled ) {
-        if ( loc == null ) throw new ReviewedGATKException("BUG: GenomeLoc in Alignment context is null");
-        if ( basePileup == null ) throw new ReviewedGATKException("BUG: ReadBackedPileup in Alignment context is null");
-        if ( skippedBases < 0 ) throw new ReviewedGATKException("BUG: skippedBases is -1 in Alignment context");
-
-        this.loc = loc;
-        this.basePileup = basePileup;
-        this.skippedBases = skippedBases;
-        this.hasPileupBeenDownsampled = hasPileupBeenDownsampled;
-    }
-
-    /** Returns base pileup over the current genomic location. Deprectated. Use getBasePileup() to make your intentions
-     * clear.
-     * @return
-     */
-    @Deprecated
-    public ReadBackedPileup getPileup() { return basePileup; }
-
-    /** Returns base pileup over the current genomic location. May return null if this context keeps only
-     * extended event (indel) pileup.
-     * @return
-     */
-    public ReadBackedPileup getBasePileup() {
-        return basePileup;
-    }
-
-    /**
-     * Returns true if any reads have been filtered out of the pileup due to excess DoC.
-     * @return True if reads have been filtered out.  False otherwise.
-     */
-    public boolean hasPileupBeenDownsampled() { return hasPileupBeenDownsampled; }
-
-    /**
-     * get all of the reads within this context
-     * 
-     * @return
-     */
-    @Deprecated
-    //todo: unsafe and tailored for current usage only; both pileups can be null or worse, bot can be not null in theory
-    public List<GATKSAMRecord> getReads() { return ( basePileup.getReads() ); }
-
-    /**
-     * Are there any reads associated with this locus?
-     *
-     * @return
-     */
-    public boolean hasReads() {
-        return basePileup != null && basePileup.getNumberOfElements() > 0 ;
-    }
-
-    /**
-     * How many reads cover this locus?
-     * @return
-     */
-    public int size() {
-        return basePileup.getNumberOfElements();
-    }
-
-    /**
-     * get a list of the equivalent positions within in the reads at Pos
-     *
-     * @return
-     */
-    @Deprecated
-    public List<Integer> getOffsets() {
-        return basePileup.getOffsets();
-    }
-
-    public String getContig() { return getLocation().getContig(); }
-    public long getPosition() { return getLocation().getStart(); }
-    public GenomeLoc getLocation() { return loc; }
-
-    public void downsampleToCoverage(int coverage) {
-        basePileup = basePileup.getDownsampledPileup(coverage);
-        hasPileupBeenDownsampled = true;
-    }
-
-    /**
-     * Returns the number of bases we've skipped over in the reference since the last map invocation.
-     * Only filled in by RodTraversals right now.  A value of 0 indicates that no bases were skipped.
-     *
-     * @return the number of skipped bases
-     */
-    public long getSkippedBases() {
-        return skippedBases;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/contexts/AlignmentContextUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/contexts/AlignmentContextUtils.java
deleted file mode 100644
index afeb1e7..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/contexts/AlignmentContextUtils.java
+++ /dev/null
@@ -1,150 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.contexts;
-
-import htsjdk.samtools.SAMReadGroupRecord;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.pileup.*;
-
-import java.util.*;
-
-/**
- * Useful utilities for storing different AlignmentContexts
- * User: ebanks
- */
-public class AlignmentContextUtils {
-
-    // Definitions:
-    //   COMPLETE = full alignment context
-    //   FORWARD  = reads on forward strand
-    //   REVERSE  = reads on forward strand
-    //
-    public enum ReadOrientation { COMPLETE, FORWARD, REVERSE }
-
-    private AlignmentContextUtils() {
-        // cannot be instantiated
-    }
-
-    /**
-     * Returns a potentially derived subcontext containing only forward, reverse, or in fact all reads
-     * in alignment context context.
-     *
-     * @param context
-     * @param type
-     * @return
-     */
-    public static AlignmentContext stratify(AlignmentContext context, ReadOrientation type) {
-        switch(type) {
-            case COMPLETE:
-                return context;
-            case FORWARD:
-                return new AlignmentContext(context.getLocation(),context.getPileup().getPositiveStrandPileup());
-            case REVERSE:
-                return new AlignmentContext(context.getLocation(),context.getPileup().getNegativeStrandPileup());
-            default:
-                throw new ReviewedGATKException("Unable to get alignment context for type = " + type);
-        }
-    }
-
-    public static Map<String, AlignmentContext> splitContextBySampleName(AlignmentContext context) {
-        return splitContextBySampleName(context, null);
-    }
-
-    /**
-     * Splits the given AlignmentContext into a StratifiedAlignmentContext per sample, but referencd by sample name instead
-     * of sample object.
-     *
-     * @param context                the original pileup
-     *
-     * @return a Map of sample name to StratifiedAlignmentContext
-     *
-     **/
-    public static Map<String, AlignmentContext> splitContextBySampleName(AlignmentContext context, String assumedSingleSample) {
-        GenomeLoc loc = context.getLocation();
-        HashMap<String, AlignmentContext> contexts = new HashMap<String, AlignmentContext>();
-
-        for(String sample: context.getPileup().getSamples()) {
-            ReadBackedPileup pileupBySample = context.getPileup().getPileupForSample(sample);
-
-            // Don't add empty pileups to the split context.
-            if(pileupBySample.getNumberOfElements() == 0)
-                continue;
-
-            if(sample != null)
-                contexts.put(sample, new AlignmentContext(loc, pileupBySample));
-            else {
-                if(assumedSingleSample == null) {
-                    throw new UserException.ReadMissingReadGroup(pileupBySample.iterator().next().getRead());
-                }
-                contexts.put(assumedSingleSample,new AlignmentContext(loc, pileupBySample));
-            }
-        }
-
-        return contexts;
-    }
-
-    /**
-     * Splits the AlignmentContext into one context per read group
-     *
-     * @param context the original pileup
-     * @return a Map of ReadGroup to AlignmentContext, or an empty map if context has no base pileup
-     *
-     **/
-    public static Map<SAMReadGroupRecord, AlignmentContext> splitContextByReadGroup(AlignmentContext context, Collection<SAMReadGroupRecord> readGroups) {
-        HashMap<SAMReadGroupRecord, AlignmentContext> contexts = new HashMap<SAMReadGroupRecord, AlignmentContext>();
-
-        for (SAMReadGroupRecord rg : readGroups) {
-            ReadBackedPileup rgPileup = context.getBasePileup().getPileupForReadGroup(rg.getReadGroupId());
-            if ( rgPileup != null ) // there we some reads for RG
-                contexts.put(rg, new AlignmentContext(context.getLocation(), rgPileup));
-        }
-
-        return contexts;
-    }
-
-    public static Map<String, AlignmentContext> splitContextBySampleName(ReadBackedPileup pileup) {
-        return splitContextBySampleName(new AlignmentContext(pileup.getLocation(), pileup));
-    }
-
-
-    public static AlignmentContext joinContexts(Collection<AlignmentContext> contexts) {
-        // validation
-        GenomeLoc loc = contexts.iterator().next().getLocation();
-        for(AlignmentContext context: contexts) {
-            if(!loc.equals(context.getLocation()))
-                throw new ReviewedGATKException("Illegal attempt to join contexts from different genomic locations");
-        }
-
-        List<PileupElement> pe = new ArrayList<PileupElement>();
-        for(AlignmentContext context: contexts) {
-            for(PileupElement pileupElement: context.basePileup)
-                pe.add(pileupElement);
-        }
-        return new AlignmentContext(loc, new ReadBackedPileupImpl(loc,pe));
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/contexts/ReferenceContext.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/contexts/ReferenceContext.java
deleted file mode 100644
index 201ea49..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/contexts/ReferenceContext.java
+++ /dev/null
@@ -1,217 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.contexts;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import org.broadinstitute.gatk.utils.BaseUtils;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-
-/**
- * The section of the reference that overlaps with the given
- * read / locus. 
- *
- * @author hanna
- * @version 0.1
- */
-public class ReferenceContext {
-    /**
-     * Facilitates creation of new GenomeLocs.
-     */
-    final private GenomeLocParser genomeLocParser;
-
-    /**
-     * The locus.
-     */
-    final private GenomeLoc locus;
-
-    /**
-     * The window of reference information around the current locus.
-     */
-    final private GenomeLoc window;
-
-    /**
-     * The bases in the window around the current locus.  If null, then bases haven't been fetched yet.
-     * Bases are always upper cased
-     */
-    private byte[] basesCache = null;
-
-    /**
-     * Lazy loader to fetch reference bases
-     */
-    final private ReferenceContextRefProvider basesProvider;
-
-    /**
-     * Interface to create byte[] contexts for lazy loading of the reference
-     */
-    public static interface ReferenceContextRefProvider {
-        /**
-         * You must provide a routine that gets the byte[] bases that would have been passed into the
-         * ReferenceContext.  The RC will handling caching.  The value of this interface and routine is
-         * that it is only called when the bytes are actually requested by the walker, not up front.  So
-         * if the walker doesn't need the refBases for whatever reason, there's no overhead to
-         * provide them.
-         *
-         * @return
-         */
-        @Ensures({"result != null"})
-        public byte[] getBases();
-    }
-
-    private static class ForwardingProvider implements ReferenceContextRefProvider {
-        byte[] bases;
-
-        public ForwardingProvider( byte base ) {
-            this(new byte[] { base });
-        }
-
-        public ForwardingProvider( byte[] bases ) {
-            this.bases = bases;
-        }
-
-        public byte[] getBases() { return bases; }
-    }
-
-    /**
-     * Contructor for a simple, windowless reference context.
-     * @param locus locus of interest.
-     * @param base reference base at that locus.
-     */
-    @Requires({
-            "genomeLocParser != null",
-            "locus != null",
-            "locus.size() > 0"})
-    public ReferenceContext( GenomeLocParser genomeLocParser, GenomeLoc locus, byte base ) {
-        this( genomeLocParser, locus, locus, new ForwardingProvider(base) );
-    }
-
-    @Requires({
-            "genomeLocParser != null",
-            "locus != null",
-            "locus.size() > 0",
-            "window != null",
-            "window.size() > 0",
-            "bases != null && bases.length > 0"})
-    public ReferenceContext( GenomeLocParser genomeLocParser, GenomeLoc locus, GenomeLoc window, byte[] bases ) {
-        this( genomeLocParser, locus, window, new ForwardingProvider(bases) );
-    }
-
-    @Requires({
-            "genomeLocParser != null",
-            "locus != null",
-            "locus.size() > 0",
-            "window != null",
-            "window.size() > 0",
-            "basesProvider != null"})
-    public ReferenceContext( GenomeLocParser genomeLocParser, GenomeLoc locus, GenomeLoc window, ReferenceContextRefProvider basesProvider ) {
-        this.genomeLocParser = genomeLocParser;
-        this.locus = locus;
-        this.window = window;
-        this.basesProvider = basesProvider;
-    }
-
-    /**
-     * Utility function to load bases from the provider to the cache, if necessary
-     */
-    @Ensures({
-            "basesCache != null",
-            "old(basesCache) == null || old(basesCache) == basesCache"})
-    private void fetchBasesFromProvider() {
-        if ( basesCache == null ) {
-            basesCache = basesProvider.getBases();
-
-            // must be an assertion that only runs when the bases are fetch to run in a reasonable amount of time
-            assert BaseUtils.isUpperCase(basesCache);
-        }
-    }
-
-    /**
-     * @return The genome loc parser associated with this reference context
-     */
-    @Ensures("result != null")
-    public GenomeLocParser getGenomeLocParser() {
-        return genomeLocParser;
-    }
-
-    /**
-     * The locus currently being examined.
-     * @return The current locus.
-     */
-    @Ensures("result != null")
-    public GenomeLoc getLocus() {
-        return locus;
-    }
-
-    @Ensures("result != null")
-    public GenomeLoc getWindow() {
-        return window;
-    }
-
-    /**
-     * Get the base at the given locus.
-     * @return The base at the given locus from the reference.
-     */
-    public byte getBase() {
-        return getBases()[(locus.getStart() - window.getStart())];
-    }
-
-    /**
-     * All the bases in the window currently being examined.
-     * @return All bases available.  If the window is of size [0,0], the array will
-     *         contain only the base at the given locus.
-     */
-    @Ensures({"result != null", "result.length > 0"})
-    public byte[] getBases() {
-        fetchBasesFromProvider();
-        return basesCache;
-    }
-
-    /**
-     * All the bases in the window from the current base forward to the end of the window.
-     */
-    @Ensures({"result != null", "result.length > 0"})
-    public byte[] getForwardBases() {
-        final byte[] bases = getBases();
-        final int mid = locus.getStart() - window.getStart();
-        // todo -- warning of performance problem, especially if this is called over and over
-        return new String(bases).substring(mid).getBytes();
-    }
-
-    @Deprecated
-    public char getBaseAsChar() {
-        return (char)getBase();
-    }
-
-    /**
-     * Get the base at the given locus.
-     * @return The base at the given locus from the reference.
-     */
-    @Deprecated()
-    public int getBaseIndex() {
-        return BaseUtils.simpleBaseToBaseIndex(getBase());
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/package-info.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/package-info.java
deleted file mode 100644
index 680da25..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/package-info.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources;
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/AllLocusView.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/AllLocusView.java
deleted file mode 100644
index 56ecce2..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/AllLocusView.java
+++ /dev/null
@@ -1,169 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.providers;
-
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.iterators.GenomeLocusIterator;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.pileup.ReadBackedPileupImpl;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-
-import java.util.Collections;
-import java.util.List;
-import java.util.NoSuchElementException;
-/**
- * User: hanna
- * Date: May 13, 2009
- * Time: 3:32:30 PM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * A LocusView over which the user can iterate.
- */
-
-public class AllLocusView extends LocusView {
-    private GenomeLocusIterator locusIterator;
-
-    /**
-     * Gets the next position in the view: next call to next() will jump there.
-     * Note that both nextPosition and nextLocus are PRE-read and cached.
-     */
-    private GenomeLoc nextPosition = null;
-
-    /**
-     * What's the next available context?
-     */
-    private AlignmentContext nextLocus = null;
-
-    /**
-     * Signal not to advance the iterator because we're currently sitting at the next element.
-     */
-    private boolean atNextElement = false;
-
-    /**
-     * Create a new queue of locus contexts.
-     *
-     * @param provider
-     */
-    public AllLocusView(LocusShardDataProvider provider) {
-        super(provider);
-        // Seed the state tracking members with the first possible seek position and the first possible locus context.
-        locusIterator = new GenomeLocusIterator(genomeLocParser, provider.getLocus());
-    }
-
-    public boolean hasNext() {
-        advance();
-        return nextPosition != null;
-    }
-
-    public AlignmentContext next() {
-        advance();
-
-        if (nextPosition == null)
-            throw new NoSuchElementException("No next is available in the all locus view");
-
-        // Flag to the iterator that no data is waiting in the queue to be processed.
-        atNextElement = false;
-
-        AlignmentContext currentLocus;
-
-        // If actual data is present, return it.  Otherwise, return empty data.
-        if (nextLocus != null && nextLocus.getLocation().equals(nextPosition))
-            currentLocus = nextLocus;
-        else
-            currentLocus = createEmptyLocus(nextPosition);
-
-        return currentLocus;
-    }
-
-    private void advance() {
-        // Already at the next element?  Don't move forward.
-        if (atNextElement)
-            return;
-
-        // Out of elements?
-        if (nextPosition == null && !locusIterator.hasNext())
-            return;
-
-        // If nextLocus has been consumed, clear it out to make room for the next incoming locus.
-        if (nextPosition != null && nextLocus != null && !nextLocus.getLocation().isPast(nextPosition)) {
-            nextLocus = null;
-
-            // Determine the next locus. The trick is that we may have more than one alignment context at the same
-            // reference position (regular base pileup, then extended pileup). If next alignment context (that we just pre-read)
-            // is still at the current position, we do not increment current position and wait for next call to next() to return
-            // that context. If we know that next context is past the current position, we are done with current
-            // position
-            if (hasNextLocus()) {
-                nextLocus = nextLocus();
-                if (nextPosition.equals(nextLocus.getLocation())) {
-                    atNextElement = true;
-                    return;
-                }
-            }
-        }
-
-        // No elements left in queue?  Clear out the position state tracker and return.
-        if (!locusIterator.hasNext()) {
-            nextPosition = null;
-            return;
-        }
-
-        // Actually fill the next position.
-        nextPosition = locusIterator.next();
-        atNextElement = true;
-
-        // Crank the iterator to (if possible) or past the next context.  Be careful not to hold a reference to nextLocus
-        // while using the hasNextLocus() / nextLocus() machinery; this will cause us to use more memory than is optimal. 
-        while (nextLocus == null || nextLocus.getLocation().isBefore(nextPosition)) {
-            nextLocus = null;
-            if (!hasNextLocus())
-                break;
-            nextLocus = nextLocus();
-        }
-    }
-
-    /**
-     * Creates a blank locus context at the specified location.
-     *
-     * @param site Site at which to create the blank locus context.
-     * @return empty context.
-     */
-    private final static List<GATKSAMRecord> EMPTY_PILEUP_READS = Collections.emptyList();
-    private final static List<Integer> EMPTY_PILEUP_OFFSETS = Collections.emptyList();
-    private final static List<Boolean> EMPTY_DELETION_STATUS = Collections.emptyList();
-
-    private AlignmentContext createEmptyLocus(GenomeLoc site) {
-        return new AlignmentContext(site, new ReadBackedPileupImpl(site, EMPTY_PILEUP_READS, EMPTY_PILEUP_OFFSETS));
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/CoveredLocusView.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/CoveredLocusView.java
deleted file mode 100644
index 900612a..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/CoveredLocusView.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.providers;
-
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-/**
- * User: hanna
- * Date: May 12, 2009
- * Time: 11:24:42 AM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * A queue of locus contexts.  Provides unidirectional seek.  Stripped down
- * implementation of java.util.Queue interface.
- */
-
-public class CoveredLocusView extends LocusView {
-    /**
-     * Create a new queue of locus contexts.
-     * @param provider
-     */
-    public CoveredLocusView(LocusShardDataProvider provider) {
-        super(provider);
-    }
-
-    public boolean hasNext() {
-        return hasNextLocus();
-    }
-
-    public AlignmentContext next() {
-        return nextLocus();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/IntervalOverlappingRODsFromStream.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/IntervalOverlappingRODsFromStream.java
deleted file mode 100644
index 9100905..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/IntervalOverlappingRODsFromStream.java
+++ /dev/null
@@ -1,168 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.providers;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import htsjdk.samtools.util.PeekableIterator;
-import org.broadinstitute.gatk.engine.refdata.RODRecordListImpl;
-import org.broadinstitute.gatk.engine.refdata.utils.GATKFeature;
-import org.broadinstitute.gatk.engine.refdata.utils.RODRecordList;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-
-import java.util.Collection;
-import java.util.LinkedList;
-import java.util.ListIterator;
-
-/**
- * Key algorithmic helper for ReadBasedReferenceOrderedData
- *
- * Takes a single iterator of features, and provides a single capability that returns
- * the list of RODs that overlap an interval.  Allows sequential getOverlapping calls
- * from intervals provided that these intervals always have increasing getStart() values.
- *
- */
-class IntervalOverlappingRODsFromStream {
-    /**
-     * Only held for QC purposes
-     */
-    GenomeLoc lastQuery = null;
-
-    private final String name;
-    private final LinkedList<GATKFeature> currentFeatures = new LinkedList<GATKFeature>();
-    private final PeekableIterator<RODRecordList> futureFeatures;
-
-    /**
-     * Create a new IntervalOverlappingRODsFromStream that reads elements from futureFeatures and
-     * returns RODRecordLists having name
-     *
-     * @param name
-     * @param futureFeatures
-     */
-    IntervalOverlappingRODsFromStream(final String name, final PeekableIterator<RODRecordList> futureFeatures) {
-        if ( futureFeatures == null ) throw new IllegalArgumentException("futureFeatures cannot be null");
-
-        this.name = name;
-        this.futureFeatures = futureFeatures;
-    }
-
-    /**
-     * Get the list of RODs overlapping loc from this stream of RODs.
-     *
-     * @param loc the interval to query
-     * @return a non-null RODRecordList containing the overlapping RODs, which may be empty
-     */
-    @Ensures({"overlaps(loc, result)",
-            "! futureFeatures.hasNext() || futureFeatures.peek().getLocation().isPast(loc)",
-            "result != null"})
-    public RODRecordList getOverlapping(final GenomeLoc loc) {
-        if ( lastQuery != null && loc.getStart() < lastQuery.getStart() )
-            throw new IllegalArgumentException(String.format("BUG: query interval (%s) starts before the previous interval %s", loc, lastQuery));
-
-        readOverlappingFutureFeatures(loc);
-        return new RODRecordListImpl(name, subsetToOverlapping(loc, currentFeatures), loc);
-    }
-
-
-    /**
-     * For contract assurance.  Checks that all bindings in loc overlap
-     *
-     * @param loc
-     * @param bindings
-     * @return
-     */
-    @Requires({"loc != null", "bindings != null"})
-    private boolean overlaps(final GenomeLoc loc, final RODRecordList bindings) {
-        for ( final GATKFeature feature : bindings )
-            if ( ! feature.getLocation().overlapsP(loc) )
-                return false;
-        return true;
-    }
-
-    /**
-     * Subset the features in all to those that overlap with loc
-     *
-     * The current features list contains everything read that cannot be thrown away yet, but not
-     * everything in there necessarily overlaps with loc.  Subset to just those that do overlap
-     *
-     * @param loc the location that features must overlap
-     * @param all the list of all features
-     * @return a subset of all that overlaps with loc
-     */
-    @Requires({"loc != null", "all != null"})
-    @Ensures("result.size() <= all.size()")
-    private Collection<GATKFeature> subsetToOverlapping(final GenomeLoc loc, final Collection<GATKFeature> all) {
-        final LinkedList<GATKFeature> overlapping = new LinkedList<GATKFeature>();
-        for ( final GATKFeature feature : all )
-            if ( feature.getLocation().overlapsP(loc) )
-                overlapping.add(feature);
-        return overlapping;
-    }
-
-    /**
-     * Update function.  Remove all elements of currentFeatures that end before loc
-     *
-     * Must be called by clients periodically when they know they they will never ask for data before
-     * loc, so that the running cache of RODs doesn't grow out of control.
-     *
-     * @param loc the location to use
-     */
-    @Requires("loc != null")
-    @Ensures("currentFeatures.size() <= old(currentFeatures.size())")
-    public void trimCurrentFeaturesToLoc(final GenomeLoc loc) {
-        final ListIterator<GATKFeature> it = currentFeatures.listIterator();
-        while ( it.hasNext() ) {
-            final GATKFeature feature = it.next();
-            if ( feature.getLocation().isBefore(loc) )
-                it.remove();
-        }
-    }
-
-    /**
-     * Update function: Read all elements from futureFeatures that overlap with loc
-     *
-     * Stops at the first element that starts before the end of loc, or the stream empties
-     *
-     * @param loc
-     */
-    @Requires("loc != null")
-    @Ensures("currentFeatures.size() >= old(currentFeatures.size())")
-    private void readOverlappingFutureFeatures(final GenomeLoc loc) {
-        while ( futureFeatures.hasNext() ) {
-            final GenomeLoc nextLoc = futureFeatures.peek().getLocation();
-            if ( nextLoc.isBefore(loc) ) {
-                futureFeatures.next(); // next rod element is before loc, throw it away and keep looking
-            } else if ( nextLoc.isPast(loc) ) {
-                break; // next element is past loc, stop looking but don't pop it
-            } else if ( nextLoc.overlapsP(loc) ) {
-                // add overlapping elements to our current features, removing from stream
-                for ( final GATKFeature feature : futureFeatures.next() ) {
-                    currentFeatures.add(feature);
-                }
-            }
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/IntervalReferenceOrderedView.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/IntervalReferenceOrderedView.java
deleted file mode 100644
index 23f4f73..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/IntervalReferenceOrderedView.java
+++ /dev/null
@@ -1,184 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.providers;
-
-import htsjdk.samtools.util.PeekableIterator;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.datasources.reads.ReadShard;
-import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.engine.refdata.utils.LocationAwareSeekableRODIterator;
-import org.broadinstitute.gatk.engine.refdata.utils.RODRecordList;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-
-/**
- * a ROD view that allows for requests for RODs that overlap intervals on the genome to produce a RefMetaDataTracker
- */
-public class IntervalReferenceOrderedView implements ReferenceOrderedView {
-    /** a list of the RMDDataState (location->iterators) */
-    private final List<RMDDataState> states = new ArrayList<>(1);
-
-    /**
-     * Used to get genome locs for reads
-     */
-    protected final GenomeLocParser genomeLocParser;
-
-    /**
-     * The total extent of all reads in this span.  We create iterators from our RODs
-     * from the start of this span, to the end.
-     */
-    private final GenomeLoc shardSpan;
-
-    /**
-     * Create a new IntervalReferenceOrderedView taking data from provider and capable of
-     * servicing ROD overlap requests within the genomic interval span
-     *
-     * @param provider a ShardDataProvider to give us data
-     * @param span a GenomeLoc span, or null indicating take the entire genome
-     */
-    public IntervalReferenceOrderedView(final ShardDataProvider provider, final GenomeLoc span) {
-        if ( provider == null ) throw new IllegalArgumentException("provider cannot be null");
-        if ( provider.hasReferenceOrderedData() && span == null ) throw new IllegalArgumentException("span cannot be null when provider has reference ordered data");
-
-        this.genomeLocParser = provider.getGenomeLocParser();
-        this.shardSpan = span;
-        provider.register(this);
-
-        // conditional to optimize the case where we don't have any ROD data
-        if ( provider.hasReferenceOrderedData() && ! shardSpan.isUnmapped() ) {
-            for (final ReferenceOrderedDataSource dataSource : provider.getReferenceOrderedData())
-                states.add(new RMDDataState(dataSource, dataSource.seek(shardSpan)));
-        }
-    }
-
-    /**
-     * Testing constructor
-     */
-    protected IntervalReferenceOrderedView(final GenomeLocParser genomeLocParser,
-                                           final GenomeLoc shardSpan,
-                                           final List<String> names,
-                                           final List<PeekableIterator<RODRecordList>> featureSources) {
-        this.genomeLocParser = genomeLocParser;
-        this.shardSpan = shardSpan;
-        for ( int i = 0; i < names.size(); i++ )
-            states.add(new RMDDataState(names.get(i), featureSources.get(i)));
-    }
-
-    public Collection<Class<? extends View>> getConflictingViews() {
-        List<Class<? extends View>> classes = new ArrayList<>();
-        classes.add(ManagingReferenceOrderedView.class);
-        return classes;
-    }
-
-    /**
-     * Get a RefMetaDataTracker containing bindings for all RODs overlapping the start position of loc
-     * @param loc a GenomeLoc of size == 1
-     * @return a non-null RefMetaDataTracker
-     */
-    @Override
-    public RefMetaDataTracker getReferenceOrderedDataAtLocus(GenomeLoc loc) {
-        if ( loc == null ) throw new IllegalArgumentException("loc cannot be null");
-        if ( loc.size() != 1 ) throw new IllegalArgumentException("GenomeLoc must have size == 1 but got " + loc);
-        return getReferenceOrderedDataForInterval(loc);
-    }
-
-    /**
-     * Get a RefMetaDataTracker containing bindings for all RODs overlapping interval
-     *
-     * @param interval a non=null interval
-     * @return a non-null RefMetaDataTracker
-     */
-    public RefMetaDataTracker getReferenceOrderedDataForInterval(final GenomeLoc interval) {
-        if ( interval == null ) throw new IllegalArgumentException("Interval cannot be null");
-
-        if ( states.isEmpty() || shardSpan.isUnmapped() ) // optimization for no bindings (common for read walkers)
-            return RefMetaDataTracker.EMPTY_TRACKER;
-        else {
-            final List<RODRecordList> bindings = new ArrayList<>(states.size());
-            for ( final RMDDataState state : states )
-                bindings.add(state.stream.getOverlapping(interval));
-            return new RefMetaDataTracker(bindings);
-        }
-    }
-
-    /**
-     * Trim down all of the ROD managers so that they only hold ROD bindings wit start >= startOfDataToKeep.getStart()
-     *
-     * @param startOfDataToKeep a non-null genome loc
-     */
-    public void trimCurrentFeaturesToLoc(final GenomeLoc startOfDataToKeep) {
-        if ( startOfDataToKeep == null ) throw new IllegalArgumentException("startOfDataToKeep cannot be null");
-
-        for ( final RMDDataState state : states )
-            state.stream.trimCurrentFeaturesToLoc(startOfDataToKeep);
-    }
-
-    /**
-     * Closes the current view.
-     */
-    public void close() {
-        for (final RMDDataState state : states)
-            state.close();
-
-        // Clear out the existing data so that post-close() accesses to this data will fail-fast.
-        states.clear();
-    }
-
-    /**
-     * Models the traversal state of a given ROD lane.
-     */
-    private static class RMDDataState {
-        public final ReferenceOrderedDataSource dataSource;
-        public final IntervalOverlappingRODsFromStream stream;
-        private final LocationAwareSeekableRODIterator iterator;
-
-        public RMDDataState(ReferenceOrderedDataSource dataSource, LocationAwareSeekableRODIterator iterator) {
-            this.dataSource = dataSource;
-            this.iterator = iterator;
-            this.stream = new IntervalOverlappingRODsFromStream(dataSource.getName(), new PeekableIterator<>(iterator));
-        }
-
-        /**
-         * For testing
-         */
-        public RMDDataState(final String name, final PeekableIterator<RODRecordList> iterator) {
-            this.dataSource = null;
-            this.iterator = null;
-            this.stream = new IntervalOverlappingRODsFromStream(name, new PeekableIterator<>(iterator));
-        }
-
-        public void close() {
-            if ( dataSource != null )
-                dataSource.close( iterator );
-        }
-    }
-}
-
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/InvalidPositionException.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/InvalidPositionException.java
deleted file mode 100644
index 997435d..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/InvalidPositionException.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.providers;
-
-/**
- * Created by IntelliJ IDEA.
- * User: hanna
- * Date: Apr 16, 2009
- * Time: 4:11:40 PM
- *
- * Thrown to indicate invalid positions passed to the providers.
- * Extend from RuntimeException to make it easier on our walker writers; don't make
- * them catch every exception that comes their way.
- */
-public class InvalidPositionException extends RuntimeException {
-    public InvalidPositionException(String message) {
-        super(message);
-    }
-
-    public InvalidPositionException(String message, Throwable throwable) {
-        super(message,throwable);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/LocusReferenceView.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/LocusReferenceView.java
deleted file mode 100644
index b535050..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/LocusReferenceView.java
+++ /dev/null
@@ -1,236 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.providers;
-
-import htsjdk.samtools.reference.ReferenceSequence;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.walkers.Reference;
-import org.broadinstitute.gatk.engine.walkers.Walker;
-import org.broadinstitute.gatk.engine.walkers.Window;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-/*
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use,
- * copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the
- * Software is furnished to do so, subject to the following
- * conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
- * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
- * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
- * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
- * OTHER DEALINGS IN THE SOFTWARE.
- */
-
-/**
- * Provides access to the portion of the reference covering a single locus.
- */
-public class LocusReferenceView extends ReferenceView {
-    /**
-     * Bound the reference view to make sure all accesses are within the shard.
-     */
-    private GenomeLoc bounds;
-
-    /**
-     * Start of the expanded window for which the reference context should be provided,
-     * relative to the locus in question.
-     */
-    private final int windowStart;
-
-
-    /**
-     * Start of the expanded window for which the reference context should be provided,
-     * relative to the locus in question.
-     */
-    private final int windowStop;
-
-    /**
-     * Track the reference sequence and the last point accessed.  Used to
-     * track state when traversing over the reference.
-     */
-    private ReferenceSequence referenceSequence;
-
-    /**
-     * Create a LocusReferenceView given no other contextual information about
-     * the walkers, etc.
-     * @param provider  source for locus data.
-     */
-    public LocusReferenceView( LocusShardDataProvider provider ) {
-        super(provider);
-        initializeBounds(provider);
-        windowStart = windowStop = 0;
-        initializeReferenceSequence(bounds);
-    }
-
-    /**
-     * Create a new locus reference view.
-     * @param provider source for locus data.
-     */
-    public LocusReferenceView( Walker walker, LocusShardDataProvider provider ) {
-        super( provider );
-        initializeBounds(provider);
-
-        // Retrieve information about the window being accessed.
-        if( walker.getClass().isAnnotationPresent(Reference.class) ) {
-            Window window = walker.getClass().getAnnotation(Reference.class).window();
-
-            if( window.start() > 0 ) throw new ReviewedGATKException( "Reference window starts after current locus" );
-            if( window.stop() < 0 ) throw new ReviewedGATKException( "Reference window ends before current locus" );
-
-            windowStart = window.start();
-            windowStop = window.stop();
-        }
-        else {
-            windowStart = 0;
-            windowStop = 0;
-        }
-
-        if(bounds != null) {
-            int expandedStart = getWindowStart( bounds );
-            int expandedStop  = getWindowStop( bounds );
-            initializeReferenceSequence(genomeLocParser.createGenomeLoc(bounds.getContig(), bounds.getContigIndex(), expandedStart, expandedStop));
-        }
-    }
-
-    /**
-     * Initialize the bounds of this shard, trimming the bounds so that they match the reference.
-     * @param provider Provider covering the appropriate locus.
-     */
-    private void initializeBounds(LocusShardDataProvider provider) {
-        if(provider.getLocus() != null) {
-            int sequenceLength = reference.getSequenceDictionary().getSequence(provider.getLocus().getContig()).getSequenceLength();
-            bounds = genomeLocParser.createGenomeLoc(provider.getLocus().getContig(),
-                    Math.max(provider.getLocus().getStart(),1),
-                    Math.min(provider.getLocus().getStop(),sequenceLength));
-        }
-        else
-            bounds = null;
-    }
-
-    /**
-     * Initialize reference sequence data using the given locus.
-     * @param locus
-     */
-    private void initializeReferenceSequence( GenomeLoc locus ) {
-        this.referenceSequence = reference.getSubsequenceAt( locus.getContig(), locus.getStart(), locus.getStop() );
-    }
-
-    protected GenomeLoc trimToBounds(GenomeLoc l) {
-        int expandedStart = getWindowStart( bounds );
-        int expandedStop  = getWindowStop( bounds );
-        if ( l.getStart() < expandedStart ) l = genomeLocParser.setStart(l, expandedStart);
-        if ( l.getStop() > expandedStop  ) l = genomeLocParser.setStop(l, expandedStop);
-        return l;
-    }
-
-    public class Provider implements ReferenceContext.ReferenceContextRefProvider {
-        int refStart, len;
-
-        public Provider( int refStart, int len ) {
-            this.refStart = refStart;
-            this.len = len;
-        }
-
-        public byte[] getBases() {
-            //System.out.printf("Getting bases for location%n");
-            byte[] bases = new byte[len];
-            System.arraycopy(referenceSequence.getBases(), refStart, bases, 0, len);
-            return bases;
-        }
-    }
-
-    /**
-     * Gets the reference context associated with this particular point or extended interval on the genome.
-     * @param genomeLoc Region for which to retrieve the base(s). If region spans beyond contig end or beyond current bounds, it will be trimmed down.
-     * @return The base at the position represented by this genomeLoc.
-     */
-    public ReferenceContext getReferenceContext( GenomeLoc genomeLoc ) {
-        //validateLocation( genomeLoc );
-
-        GenomeLoc window = genomeLocParser.createGenomeLoc( genomeLoc.getContig(), genomeLoc.getContigIndex(),
-                getWindowStart(genomeLoc), getWindowStop(genomeLoc) );
-
-        int refStart = -1;
-        if (bounds != null) {
-            window = trimToBounds(window);
-            refStart = (int)(window.getStart() - getWindowStart(bounds));
-        }
-        else {
-            if(referenceSequence == null || referenceSequence.getContigIndex() != genomeLoc.getContigIndex())
-                referenceSequence = reference.getSequence(genomeLoc.getContig());
-            refStart = (int)window.getStart()-1;
-        }
-
-        int len = (int)window.size();
-        return new ReferenceContext( genomeLocParser, genomeLoc, window, new Provider(refStart, len));
-    }
-
-    /**
-     * Allow the user to pull reference info from any arbitrary region of the reference.
-     * @param genomeLoc The locus.
-     * @return A list of the bases starting at the start of the locus (inclusive) and ending
-     *         at the end of the locus (inclusive).
-     */
-    public byte[] getReferenceBases( GenomeLoc genomeLoc ) {
-        return super.getReferenceBases(genomeLoc);
-    }
-
-    /**
-     * Gets the start of the expanded window, bounded if necessary by the contig.
-     * @param locus The locus to expand.
-     * @return The expanded window.
-     */
-    private int getWindowStart( GenomeLoc locus ) {
-        // If the locus is not within the bounds of the contig it allegedly maps to, expand only as much as we can.
-        if(locus.getStart() < 1) return 1;
-//        if(locus.getStart() < 1) return locus.getStart();
-        return Math.max( locus.getStart() + windowStart, 1 );
-    }
-
-    /**
-     * Gets the stop of the expanded window, bounded if necessary by the contig.
-     * @param locus The locus to expand.
-     * @return The expanded window.
-     */    
-    private int getWindowStop( GenomeLoc locus ) {
-        // If the locus is not within the bounds of the contig it allegedly maps to, expand only as much as we can.
-        int sequenceLength = reference.getSequenceDictionary().getSequence(locus.getContig()).getSequenceLength();
-        if(locus.getStop() > sequenceLength) return sequenceLength;
-        return Math.min( locus.getStop() + windowStop, sequenceLength );
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/LocusShardDataProvider.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/LocusShardDataProvider.java
deleted file mode 100644
index 7dc589d..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/LocusShardDataProvider.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.providers;
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import org.broadinstitute.gatk.engine.ReadProperties;
-import org.broadinstitute.gatk.engine.datasources.reads.Shard;
-import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
-import org.broadinstitute.gatk.utils.locusiterator.LocusIterator;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-
-import java.util.Collection;
-
-/**
- * Presents data sharded by locus to the traversal engine.
- *
- * @author mhanna
- * @version 0.1
- */
-public class LocusShardDataProvider extends ShardDataProvider {
-    /**
-     * Information about the source of the read data.
-     */
-    private final ReadProperties sourceInfo;
-
-    /**
-     * The particular locus for which data is provided.  Should be contained within shard.getGenomeLocs().
-     */
-    private final GenomeLoc locus;
-
-    /**
-     * The raw collection of reads.
-     */
-    private final LocusIterator locusIterator;
-
-    /**
-     * Create a data provider for the shard given the reads and reference.
-     * @param shard The chunk of data over which traversals happen.
-     * @param reference A getter for a section of the reference.
-     */
-    public LocusShardDataProvider(Shard shard, ReadProperties sourceInfo, GenomeLocParser genomeLocParser, GenomeLoc locus, LocusIterator locusIterator, IndexedFastaSequenceFile reference, Collection<ReferenceOrderedDataSource> rods) {
-        super(shard,genomeLocParser,reference,rods);
-        this.sourceInfo = sourceInfo;
-        this.locus = locus;
-        this.locusIterator = locusIterator;
-    }
-
-    /**
-     * Returns information about the source of the reads.
-     * @return Info about the source of the reads.
-     */
-    public ReadProperties getSourceInfo() {
-        return sourceInfo;
-    }
-
-    /**
-     * Gets the locus associated with this shard data provider.
-     * @return The locus.
-     */
-    public GenomeLoc getLocus() {
-        return locus;
-    }
-
-    /**
-     * Gets an iterator over all the reads bound by this shard.
-     * @return An iterator over all reads in this shard.
-     */
-    public LocusIterator getLocusIterator() {
-        return locusIterator;
-    }        
-
-    @Override
-    public void close() {
-        super.close();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/LocusView.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/LocusView.java
deleted file mode 100644
index 9bc37e5..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/LocusView.java
+++ /dev/null
@@ -1,220 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.providers;
-
-import org.broadinstitute.gatk.engine.downsampling.DownsampleType;
-import org.broadinstitute.gatk.engine.ReadProperties;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.utils.locusiterator.LocusIterator;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.locusiterator.LocusIteratorByState;
-
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.NoSuchElementException;
-
-/**
- * User: hanna
- * Date: May 13, 2009
- * Time: 3:30:16 PM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * The two goals of the LocusView are as follows:
- * 1) To provide a 'trigger track' iteration interface so that TraverseLoci can easily switch
- *    between iterating over all bases in a region, only covered bases in a region covered by
- *    reads, only bases in a region covered by RODs, or any other sort of trigger track
- *    implementation one can think of.
- * 2) To manage the copious number of iterators that have to be jointly pulled through the
- *    genome to make a locus traversal function.
- */
-public abstract class LocusView extends LocusIterator implements View {
-    /**
-     * The locus bounding this view.
-     */
-    protected GenomeLoc locus;
-
-    /**
-     * The GenomeLocParser, used to create new genome locs.
-     */
-    protected GenomeLocParser genomeLocParser;
-
-    /**
-     * Source info for this view.  Informs the class about downsampling requirements.
-     */
-    private ReadProperties sourceInfo;
-
-    /**
-     * The actual locus context iterator.
-     */
-    private LocusIterator loci;
-
-    /**
-     * The next locus context from the iterator.  Lazy loaded: if nextLocus is null and advance() doesn't
-     * populate it, the iterator is exhausted.  If populated, this is the value that should be returned by
-     * next(). 
-     */
-    private AlignmentContext nextLocus = null;
-
-    public LocusView(LocusShardDataProvider provider) {
-        this.locus = provider.getLocus();
-        
-        this.sourceInfo = provider.getSourceInfo();
-        this.genomeLocParser = provider.getGenomeLocParser();
-        this.loci = provider.getLocusIterator();
-
-        advance();
-
-        provider.register(this);
-    }
-
-    /**
-     * Only one view of the locus is supported at any given time.
-     * @return A list consisting of all other locus views.
-     */
-    public Collection<Class<? extends View>> getConflictingViews() {
-        return Arrays.<Class<? extends View>>asList(LocusView.class,ReadView.class);
-    }
-
-    /**
-     * Close this view.
-     */
-    public void close() {
-        // Set everything to null with the hope of failing fast.
-        locus = null;
-        sourceInfo = null;
-        loci = null;
-
-        super.close();
-    }
-
-    /**
-     * Is there another covered locus context bounded by this view.
-     * @return True if another covered locus context exists.  False otherwise.
-     */
-    public abstract boolean hasNext();
-
-    /**
-     * Returns the next covered locus context in the shard.
-     * @return Next covered locus context in the shard.
-     * @throw NoSuchElementException if no such element exists.
-     */
-    public abstract AlignmentContext next();
-
-    /**
-     * Unsupported.
-     * @throw UnsupportedOperationException always.
-     */
-    public void remove() {
-        throw new UnsupportedOperationException("Unable to remove elements from this queue.");
-    }
-
-    /**
-     * Is there another locus context bounded by this shard.
-     * @return True if another locus context is bounded by this shard.
-     */
-    protected boolean hasNextLocus() {
-        advance();
-        return nextLocus != null;
-    }
-
-    /**
-     * Get the next locus context bounded by this shard.
-     * @return Next locus context bounded by this shard.
-     * @throw NoSuchElementException if the next element is missing.
-     */
-    protected AlignmentContext nextLocus() {
-        advance();
-        if(nextLocus == null)
-            throw new NoSuchElementException("No more elements remain in locus context queue.");
-
-        // Cache the current and apply filtering.
-        AlignmentContext current = nextLocus;
-
-        // Indicate that the next operation will need to advance.
-        nextLocus = null;
-        
-        return current;
-    }
-
-    /**
-     * Seed the nextLocus variable with the contents of the next locus (if one exists).
-     */
-    private void advance() {
-        // Already an unclaimed locus present
-        if(nextLocus != null)
-            return;
-
-        //System.out.printf("loci is %s%n", loci);
-        if( !loci.hasNext() ) {
-            nextLocus = null;
-            return;
-        }
-
-        nextLocus = loci.next();
-
-        // If the location of this shard is available, trim the data stream to match the shard.
-        // TODO: Much of this functionality is being replaced by the WindowMaker.
-        if(locus != null) {
-            // Iterate through any elements not contained within this shard.
-            while( nextLocus != null && !isContainedInShard(nextLocus.getLocation()) && loci.hasNext() )
-                nextLocus = loci.next();
-
-            // If nothing in the shard was found, indicate that by setting nextLocus to null.
-            if( nextLocus != null && !isContainedInShard(nextLocus.getLocation()) )
-                nextLocus = null;
-        }
-    }
-
-    /**
-     * Is this location contained in the given shard.
-     * @param location Location to check.
-     * @return True if the given location is contained within the shard.  False otherwise.
-     */
-    private boolean isContainedInShard(GenomeLoc location) {
-        return locus.containsP(location);
-    }
-
-    /**
-     * {@inheritDoc}
-     *
-     * Since this class has an actual LIBS, so this function will never throw an exception
-     *
-     * @return the LocusIteratorByState used by this view to get pileups
-     */
-    @Override
-    public LocusIteratorByState getLIBS() {
-        return loci.getLIBS();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ManagingReferenceOrderedView.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ManagingReferenceOrderedView.java
deleted file mode 100644
index 2dd42c1..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ManagingReferenceOrderedView.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.providers;
-
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.engine.refdata.utils.LocationAwareSeekableRODIterator;
-import org.broadinstitute.gatk.engine.refdata.utils.RODRecordList;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-/**
- * User: hanna
- * Date: May 21, 2009
- * Time: 2:49:17 PM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * A view into the reference-ordered data in the provider.
- */
-public class ManagingReferenceOrderedView implements ReferenceOrderedView {
-    /**
-     * The data sources along with their current states.
-     */
-    private List<ReferenceOrderedDataState> states = new ArrayList<ReferenceOrderedDataState>();
-
-    /**
-     * Create a new view of reference-ordered data.
-     * @param provider
-     */
-    public ManagingReferenceOrderedView( LocusShardDataProvider provider ) {
-        for( ReferenceOrderedDataSource dataSource: provider.getReferenceOrderedData() )
-            states.add(new ReferenceOrderedDataState(dataSource, dataSource.seek(provider.getLocus())));
-
-        provider.register(this);
-    }
-
-    public Collection<Class<? extends View>> getConflictingViews() { return Collections.emptyList(); }
-
-    /**
-     * Gets an object which can track the reference-ordered data at every locus.
-     * @param loc Locus at which to track.
-     * @return A tracker containing information about this locus.
-     */
-    @Override
-    public RefMetaDataTracker getReferenceOrderedDataAtLocus( GenomeLoc loc ) {
-        if ( states.isEmpty() )
-            return RefMetaDataTracker.EMPTY_TRACKER;
-        else {
-            List<RODRecordList> bindings = new ArrayList<RODRecordList>(states.size());
-
-            for ( ReferenceOrderedDataState state: states )
-                // todo -- warning, I removed the reference to the name from states
-                bindings.add( state.iterator.seekForward(loc) );
-
-            return new RefMetaDataTracker(bindings);
-        }
-    }
-
-    /**
-     * Closes the current view.
-     */
-    public void close() {
-        for( ReferenceOrderedDataState state: states )
-            state.dataSource.close( state.iterator );
-
-        // Clear out the existing data so that post-close() accesses to this data will fail-fast.
-        states = null;
-    }
-}
-
-/**
- * Models the traversal state of a given ROD lane.
- */
-class ReferenceOrderedDataState {
-    public final ReferenceOrderedDataSource dataSource;
-    public final LocationAwareSeekableRODIterator iterator;
-
-    public ReferenceOrderedDataState( ReferenceOrderedDataSource dataSource, LocationAwareSeekableRODIterator iterator ) {
-        this.dataSource = dataSource;
-        this.iterator = iterator;
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/RODMetaDataContainer.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/RODMetaDataContainer.java
deleted file mode 100644
index f244e50..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/RODMetaDataContainer.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.providers;
-
-import org.broadinstitute.gatk.engine.refdata.utils.GATKFeature;
-import org.broadinstitute.gatk.utils.collections.Pair;
-
-import java.util.*;
-
-
-/**
- * 
- * @author aaron 
- * 
- * Class RODMetaDataContainer
- *
- * stores both the name and the class for each ROD.  This class assumes that:
- *
- * -Names must be unique
- * -Classes are allowed to have duplicates
- *
- * This class encapsulates the ref data associations, and provides lookup by name and by
- * class type.
- *
- */
-public class RODMetaDataContainer {
-    // we only allow non-duplicate ROD names, a HashMap is fine
-    private final HashMap<String, GATKFeature> nameMap = new HashMap<String, GATKFeature>();
-
-    // we do allow duplicate class entries, so we need to store pairs of data
-    private final List<Pair<Class, GATKFeature>> classMap = new ArrayList<Pair<Class, GATKFeature>>();
-
-    public void addEntry(GATKFeature data) {
-        nameMap.put(data.getName(),data);
-        classMap.add(new Pair<Class, GATKFeature>(data.getClass(),data));
-    }
-
-    public Collection<GATKFeature> getSet(String name) {
-        if (name == null) return getSet();
-        Set<GATKFeature> set = new HashSet<GATKFeature>();
-        if (nameMap.containsKey(name)) set.add(nameMap.get(name));
-        return set;
-    }
-
-    /**
-     * get the feature contents of this container; the unfiltered set without their name association
-     * @return
-     */
-    public Collection<GATKFeature> getSet() {
-        return new ArrayList<GATKFeature>(nameMap.values());
-    }
-
-    // the brute force (n) search ended up being faster than sorting and binary search in all but the most extreme cases (thousands of RODs at a location).
-    public Collection<GATKFeature> getSet(Class cls) {
-        Collection<GATKFeature> ret = new ArrayList<GATKFeature>();
-        for (Pair<Class, GATKFeature> pair: classMap)
-            if (pair.first.equals(cls)) ret.add(pair.second);
-        return ret;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReadBasedReferenceOrderedView.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReadBasedReferenceOrderedView.java
deleted file mode 100644
index 1d73501..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReadBasedReferenceOrderedView.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.providers;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import htsjdk.samtools.util.PeekableIterator;
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.engine.datasources.reads.ReadShard;
-import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.engine.refdata.utils.LocationAwareSeekableRODIterator;
-import org.broadinstitute.gatk.engine.refdata.utils.RODRecordList;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-
-/** a ROD view for reads. This provides the Read traversals a way of getting a RefMetaDataTracker */
-public class ReadBasedReferenceOrderedView extends IntervalReferenceOrderedView {
-    public ReadBasedReferenceOrderedView(final ShardDataProvider provider) {
-        super(provider, provider.hasReferenceOrderedData() ? ((ReadShard)provider.getShard()).getReadsSpan() : null);
-    }
-
-    /**
-     * create a RefMetaDataTracker given the current read
-     *
-     * @param rec the read
-     *
-     * @return a RefMetaDataTracker for the read, from which you can get ROD -> read alignments
-     */
-    @Requires("rec != null")
-    @Ensures("result != null")
-    public RefMetaDataTracker getReferenceOrderedDataForRead(final SAMRecord rec) {
-        if ( rec.getReadUnmappedFlag() )
-            return RefMetaDataTracker.EMPTY_TRACKER;
-        else {
-            final GenomeLoc readSpan = genomeLocParser.createGenomeLoc(rec);
-            trimCurrentFeaturesToLoc(readSpan);
-            return getReferenceOrderedDataForInterval(readSpan);
-        }
-    }
-}
-
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReadReferenceView.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReadReferenceView.java
deleted file mode 100644
index 14d5827..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReadReferenceView.java
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.providers;
-
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-/*
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use,
- * copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the
- * Software is furnished to do so, subject to the following
- * conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
- * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
- * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
- * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
- * OTHER DEALINGS IN THE SOFTWARE.
- */
-
-/**
- * User: hanna
- * Date: May 22, 2009
- * Time: 12:36:14 PM
- *
- */
-
-/** Provides access to the reference over a single read. */
-
-public class ReadReferenceView extends ReferenceView {
-    /**
-     * Create a view of the reference with respect to a single read.
-     *
-     * @param provider
-     */
-    public ReadReferenceView( ShardDataProvider provider ) {
-        super(provider);
-    }
-
-    protected ReferenceContext.ReferenceContextRefProvider getReferenceBasesProvider( GenomeLoc genomeLoc ) {
-        return new Provider(genomeLoc);
-    }
-
-    public class Provider implements ReferenceContext.ReferenceContextRefProvider {
-        GenomeLoc loc;
-
-        public Provider( GenomeLoc loc ) {
-            this.loc = loc;
-        }
-
-        public byte[] getBases() {
-            return getReferenceBases(loc);
-        }
-    }
-
-    /**
-     * Return a reference context appropriate for the span of read
-     *
-     * @param read the mapped read to test
-     * @return
-     */
-    public ReferenceContext getReferenceContext( final SAMRecord read ) {
-        GenomeLoc loc = genomeLocParser.createGenomeLoc(read);
-        return new ReferenceContext( genomeLocParser, loc, loc, getReferenceBasesProvider(loc) );
-    }
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReadShardDataProvider.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReadShardDataProvider.java
deleted file mode 100644
index 8acfad0..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReadShardDataProvider.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.providers;
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import org.broadinstitute.gatk.engine.datasources.reads.Shard;
-import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
-import org.broadinstitute.gatk.engine.iterators.GATKSAMIterator;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-
-import java.util.Collection;
-
-/**
- * Present data sharded by read to a traversal engine.
- *
- * @author mhanna
- * @version 0.1
- */
-public class ReadShardDataProvider extends ShardDataProvider {
-    /**
-     * The raw collection of reads.
-     */
-    private final GATKSAMIterator reads;
-
-    /**
-     * Create a data provider for the shard given the reads and reference.
-     * @param shard The chunk of data over which traversals happen.
-     * @param reference A getter for a section of the reference.
-     */
-    public ReadShardDataProvider(Shard shard, GenomeLocParser genomeLocParser, GATKSAMIterator reads, IndexedFastaSequenceFile reference, Collection<ReferenceOrderedDataSource> rods) {
-        super(shard,genomeLocParser,reference,rods);
-        this.reads = reads;
-    }
-
-    /**
-     * Can this data source provide reads?
-     * @return True if reads are available, false otherwise.
-     */
-    public boolean hasReads() {
-        return reads != null;
-    }    
-
-    /**
-     * Gets an iterator over all the reads bound by this shard.
-     * @return An iterator over all reads in this shard.
-     */
-    public GATKSAMIterator getReadIterator() {
-        return reads;
-    }
-
-    @Override
-    public void close() {
-        super.close();
-        
-        if(reads != null)
-            reads.close();
-    }
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReadView.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReadView.java
deleted file mode 100644
index 160dbd5..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReadView.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.providers;
-
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.engine.iterators.GATKSAMIterator;
-
-import java.util.Arrays;
-import java.util.Collection;
-/**
- * User: hanna
- * Date: May 22, 2009
- * Time: 12:06:54 PM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * A view into the reads that a provider can provide. 
- */
-public class ReadView implements View, Iterable<SAMRecord> {
-    /**
-     * The iterator into the reads supplied by this provider.
-     */
-    private GATKSAMIterator reads;
-
-    /**
-     * Create a new view of the reads given the current data set.
-     * @param provider Source for the data.
-     */
-    public ReadView( ReadShardDataProvider provider ) {
-        reads = provider.getReadIterator();
-    }
-
-    /**
-     * Other reads and loci conflict with this view.
-     * @return Array of reads and loci.
-     */
-    public Collection<Class<? extends View>> getConflictingViews() {
-        return Arrays.<Class<? extends View>>asList(ReadView.class, LocusView.class);
-    }
-
-    /**
-     * Close the view over these reads.  Note that this method closes just
-     * the view into the reads, not the reads themselves.
-     */
-    public void close() {
-        // Don't close the reads.  The provider is responsible for this.
-        // Just dispose of the pointer.
-        reads = null;
-    }
-
-    /**
-     * Gets an iterator into the reads supplied by this provider.
-     * @return Iterator into the reads that this provider covers.
-     */
-    public GATKSAMIterator iterator() {
-        return reads;    
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReferenceOrderedView.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReferenceOrderedView.java
deleted file mode 100644
index 9f3db51..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReferenceOrderedView.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.providers;
-
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-
-public interface ReferenceOrderedView extends View {
-    RefMetaDataTracker getReferenceOrderedDataAtLocus( GenomeLoc loc );
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReferenceView.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReferenceView.java
deleted file mode 100644
index 2eade15..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ReferenceView.java
+++ /dev/null
@@ -1,131 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.providers;
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import htsjdk.samtools.reference.ReferenceSequence;
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.SAMSequenceRecord;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-/**
- * User: hanna
- * Date: May 22, 2009
- * Time: 12:19:17 PM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * A view into the reference backing this shard.
- */
-public class ReferenceView implements View {
-    /**
-     * The parser, used to create and parse GenomeLocs.
-     */
-    protected final GenomeLocParser genomeLocParser;
-
-    /**
-     * The source of reference data.
-     */
-    protected IndexedFastaSequenceFile reference = null;
-
-    /**
-     * Create a new ReferenceView.
-     * @param provider
-     */
-    public ReferenceView( ShardDataProvider provider ) {
-        this.genomeLocParser = provider.getGenomeLocParser();
-        this.reference = provider.getReference();
-    }
-
-    /**
-     * Reference views don't conflict with anything else.
-     * @return Empty list.
-     */
-    public Collection<Class<? extends View>> getConflictingViews() { return Collections.emptyList(); }
-
-    /**
-     * Deinitialize pointers for fast fail.  Someone else will handle file management.
-     */
-    public void close() {
-        reference = null;
-    }
-
-    /**
-     * Allow the user to pull reference info from any arbitrary region of the reference.
-     * If parts of the reference don't exist, mark them in the char array with 'X'es.
-     * @param genomeLoc The locus.
-     * @return A list of the bases starting at the start of the locus (inclusive) and ending
-     *         at the end of the locus (inclusive).
-     */
-    final static int BUFFER = 10000;
-    final static byte[] Xs = new byte[BUFFER];
-    static {
-        Arrays.fill(Xs, (byte)'X');
-    }
-
-    protected byte[] getReferenceBases( SAMRecord read ) {
-        return getReferenceBases(genomeLocParser.createGenomeLoc(read));
-
-    }
-
-    protected byte[] getReferenceBases( GenomeLoc genomeLoc ) {
-        SAMSequenceRecord sequenceInfo = reference.getSequenceDictionary().getSequence(genomeLoc.getContig());
-
-        long start = genomeLoc.getStart();
-        long stop = Math.min( genomeLoc.getStop(), sequenceInfo.getSequenceLength() );
-
-        // Read with no aligned bases?  Return an empty array.
-        if(stop - start + 1 == 0)
-            return new byte[0];
-
-        ReferenceSequence subsequence = reference.getSubsequenceAt(genomeLoc.getContig(), start, stop);
-
-        int overhang = (int)(genomeLoc.getStop() - stop);
-        if ( overhang > 0 ) {
-            if ( overhang > BUFFER ) // todo -- this is a bit dangerous
-                throw new ReviewedGATKException("Insufficient buffer size for Xs overhanging genome -- expand BUFFER");
-            byte[] all = new byte[subsequence.getBases().length + overhang];
-            System.arraycopy(subsequence.getBases(), 0, all, 0, subsequence.getBases().length);
-            System.arraycopy(Xs, 0, all, subsequence.getBases().length, overhang);
-            return all;
-        } else {
-            // fast path
-            return subsequence.getBases();
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/RodLocusView.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/RodLocusView.java
deleted file mode 100644
index 21cb3ef..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/RodLocusView.java
+++ /dev/null
@@ -1,197 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.providers;
-
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.engine.refdata.utils.LocationAwareSeekableRODIterator;
-import org.broadinstitute.gatk.engine.refdata.utils.RODRecordList;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.collections.RODMergingIterator;
-import org.broadinstitute.gatk.utils.pileup.ReadBackedPileupImpl;
-
-import java.util.*;
-
-/**
- * A view into the reference-ordered data in the provider.
- */
-public class RodLocusView extends LocusView implements ReferenceOrderedView {
-    /**
-     * The data sources along with their current states.
-     */
-    private RODMergingIterator rodQueue = null;
-
-    Collection<RODRecordList> allTracksHere;
-
-    GenomeLoc lastLoc = null;
-    RODRecordList interval = null;
-
-    /**
-     * The data sources along with their current states.
-     */
-    private List<ReferenceOrderedDataState> states = new ArrayList<ReferenceOrderedDataState>();    
-
-    /**
-     * Enable debugging output -- todo remove me
-     */
-    final static boolean DEBUG = false;
-
-    final static String INTERVAL_ROD_NAME = "interval";
-
-    /**
-     * Create a new view of reference-ordered data.
-     *
-     * @param provider
-     */
-    public RodLocusView( LocusShardDataProvider provider ) {
-        super(provider);
-
-        GenomeLoc loc = provider.getLocus();
-
-        List< Iterator<RODRecordList> > iterators = new LinkedList< Iterator<RODRecordList> >();
-        for( ReferenceOrderedDataSource dataSource: provider.getReferenceOrderedData() ) {
-            if ( DEBUG ) System.out.printf("Shard is %s%n", provider.getLocus());
-
-            // grab the ROD iterator from the data source, and compute the first location in this shard, forwarding
-            // the iterator to immediately before it, so that it can be added to the merging iterator primed for
-            // next() to return the first real ROD in this shard
-            LocationAwareSeekableRODIterator it = dataSource.seek(provider.getLocus());
-            it.seekForward(genomeLocParser.createGenomeLoc(loc.getContig(), loc.getStart()-1));
-
-            states.add(new ReferenceOrderedDataState(dataSource,it));            
-
-            // we need to special case the interval so we don't always think there's a rod at the first location
-            if ( dataSource.getName().equals(INTERVAL_ROD_NAME) ) {
-                if ( interval != null )
-                    throw new RuntimeException("BUG: interval local variable already assigned " + interval);
-                interval = it.next();
-            } else {
-                iterators.add( it );
-            }
-        }
-
-        rodQueue = new RODMergingIterator(iterators);
-    }
-
-    @Override
-    public RefMetaDataTracker getReferenceOrderedDataAtLocus( GenomeLoc loc ) {
-        // special case the interval again -- add it into the ROD
-        if ( interval != null ) { allTracksHere.add(interval); }
-        return new RefMetaDataTracker(allTracksHere);
-    }
-
-    public boolean hasNext() {
-        if ( ! rodQueue.hasNext() )
-            return false;
-        else {
-            return ! rodQueue.peekLocation().isPast(locus);
-        }
-    }
-
-    /**
-     * Returns the next covered locus context in the shard.
-     * @return Next covered locus context in the shard.
-     * @throw NoSuchElementException if no such element exists.
-     */
-    public AlignmentContext next() {
-        if ( DEBUG ) System.out.printf("In RodLocusView.next()...%n");
-        RODRecordList datum = rodQueue.next();
-        if ( DEBUG ) System.out.printf("In RodLocusView.next(); datum = %s...%n", datum.getLocation());
-
-        if ( DEBUG ) System.out.printf("In RodLocusView.next(): creating tracker...%n");
-
-        allTracksHere = getSpanningTracks(datum);
-        GenomeLoc rodSite = datum.getLocation();
-        GenomeLoc site = genomeLocParser.createGenomeLoc( rodSite.getContig(), rodSite.getStart(), rodSite.getStart());
-
-        if ( DEBUG ) System.out.printf("rodLocusView.next() is at %s%n", site);
-
-        // calculate the number of skipped bases, and update lastLoc so we can do that again in the next()
-        long skippedBases = getSkippedBases( rodSite );
-        lastLoc = site;
-        return new AlignmentContext(site, new ReadBackedPileupImpl(site), skippedBases);
-    }
-
-    private Collection<RODRecordList> getSpanningTracks(RODRecordList marker) {
-        return rodQueue.allElementsLTE(marker);
-    }
-
-    /**
-     * Returns the number of reference bases that have been skipped:
-     *
-     * 1 -- since the last processed location if we have one
-     * 2 -- from the beginning of the shard if this is the first loc
-     * 3 -- from the last location to the current position
-     *
-     * @param currentPos
-     * @return
-     */
-    private long getSkippedBases( GenomeLoc currentPos ) {
-        // the minus - is because if lastLoc == null, you haven't yet seen anything in this interval, so it should also be counted as skipped
-        Integer compStop = lastLoc == null ? locus.getStart() - 1 : lastLoc.getStop();
-        long skippedBases = currentPos.getStart() - compStop  - 1;
-
-        if ( skippedBases < -1 ) { // minus 1 value is ok
-            throw new RuntimeException(String.format("BUG: skipped bases=%d is < 0: cur=%s vs. last=%s, shard=%s",
-                    skippedBases, currentPos, lastLoc, locus));
-        }
-        return Math.max(skippedBases, 0);
-    }
-
-    /**
-     * Get the location one after the last position we will traverse through
-     * @return
-     */
-    public GenomeLoc getLocOneBeyondShard() {
-        return genomeLocParser.createGenomeLoc(locus.getContig(),locus.getStop()+1);
-    }
-
-    /**
-     * How many bases are we skipping from the current location to the end of the interval / shard
-     * if we have no more elements
-     *
-     * @return
-     */
-    public long getLastSkippedBases() {
-        if ( hasNext() )
-            throw new RuntimeException("BUG: getLastSkippedBases called when there are elements remaining.");
-
-        return getSkippedBases(getLocOneBeyondShard());
-    }
-
-    /**
-     * Closes the current view.
-     */
-    public void close() {
-        for( ReferenceOrderedDataState state: states )
-            state.dataSource.close( state.iterator );
-
-        rodQueue = null;
-        allTracksHere = null;
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ShardDataProvider.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ShardDataProvider.java
deleted file mode 100644
index a36bee5..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/ShardDataProvider.java
+++ /dev/null
@@ -1,197 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.providers;
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import org.broadinstitute.gatk.engine.datasources.reads.Shard;
-import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-/**
- * User: hanna
- * Date: May 8, 2009
- * Time: 3:09:57 PM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * An umbrella class that examines the data passed to the microscheduler and
- * tries to assemble as much as possible with it. 
- */
-public abstract class ShardDataProvider {
-    /**
-     * An ArrayList of all the views that are examining this data.
-     */
-    private List<View> registeredViews = new ArrayList<View>();
-
-    /**
-     * The shard over which we're providing data.
-     */
-    private final Shard shard;
-
-    /**
-     * The parser, used to create and build new GenomeLocs.
-     */
-    private final GenomeLocParser genomeLocParser;
-
-    /**
-     * Provider of reference data for this particular shard.
-     */
-    private final IndexedFastaSequenceFile reference;
-
-    /**
-     * Sources of reference-ordered data.
-     */
-    private final Collection<ReferenceOrderedDataSource> referenceOrderedData;
-
-    /**
-     * Returns the GenomeLocParser associated with this traversal.
-     * @return The associated parser.
-     */
-    public GenomeLocParser getGenomeLocParser() {
-        return genomeLocParser;
-    }
-
-    /**
-     * Retrieves the shard associated with this data provider.
-     * @return The shard associated with this data provider.
-     */
-    public Shard getShard() {
-        return shard;
-    }
-
-    /**
-     * Can this data source provide reference information?
-     * @return True if possible, false otherwise.
-     */
-    public boolean hasReference() {
-        return reference != null;
-    }
-
-
-    /**
-     * Gets a pointer into the given indexed fasta sequence file.
-     * @return The indexed fasta sequence file.
-     */
-    IndexedFastaSequenceFile getReference() {
-        return reference;        
-    }
-
-    /**
-     * Gets a window into the reference-ordered data.  Package protected so that only
-     * views can access it.
-     * @return List of reference-ordered data sources.
-     */
-    Collection<ReferenceOrderedDataSource> getReferenceOrderedData() {
-        return referenceOrderedData;        
-    }
-
-    /**
-     * @return true if reference ordered data will be provided by this shard
-     */
-    public boolean hasReferenceOrderedData() {
-        return ! getReferenceOrderedData().isEmpty();
-    }
-
-    /**
-     * Create a data provider for the shard given the reads and reference.
-     * @param shard The chunk of data over which traversals happen.
-     * @param reference A getter for a section of the reference.
-     */
-    public ShardDataProvider(Shard shard,GenomeLocParser genomeLocParser,IndexedFastaSequenceFile reference,Collection<ReferenceOrderedDataSource> rods) {
-        this.shard = shard;
-        this.genomeLocParser = genomeLocParser;
-        this.reference = reference;
-        this.referenceOrderedData = rods;
-    }
-
-    /**
-     * Skeletal, package protected constructor for unit tests which require a ShardDataProvider.
-     * @param shard the shard
-     */
-    ShardDataProvider(Shard shard,GenomeLocParser genomeLocParser) {
-        this(shard,genomeLocParser,null,null);
-    }
-
-    /**
-     * Register this view with the shard provider, and make sure it has no conflicts with any other views.
-     * @param view The new view.
-     */
-    void register( View view ) {
-        // Check all registered classes to see whether a conflict exists.
-        for( View registeredView: registeredViews ) {
-            Collection<Class<? extends View>> conflicts = registeredView.getConflictingViews();
-            for( Class<? extends View> conflict: conflicts ) {
-                if( conflict.isInstance(view) )
-                    throw new ReviewedGATKException(String.format("Tried to register two conflicting views: %s and %s",
-                                                           registeredView.getClass().getSimpleName(),
-                                                           view.getClass().getSimpleName()));
-            }
-        }
-
-        // Check whether this class has any objection to any other classes.
-        for( Class<? extends View> conflict: view.getConflictingViews() ) {
-            for( View registeredView: registeredViews ) {
-                if( conflict.isInstance(registeredView) )
-                    throw new ReviewedGATKException(String.format("Tried to register two conflicting views: %s and %s",
-                                                           registeredView.getClass().getSimpleName(),
-                                                           view.getClass().getSimpleName()));
-            }
-        }
-
-        this.registeredViews.add(view);
-    }
-
-    /**
-     * Retire this shard.
-     */
-    public void close() {
-        for( View view: registeredViews )
-            view.close();
-
-        // Explicitly purge registered views to ensure that we don't end up with circular references
-        // to views, which can in turn hold state.
-        registeredViews.clear();
-
-        if(shard != null)
-            shard.close();
-    }
-
-    @Override
-    public String toString() {
-        return shard.toString();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/View.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/View.java
deleted file mode 100644
index f628bb4..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/View.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.providers;
-
-import java.util.Collection;
-/**
- * User: hanna
- * Date: May 21, 2009
- * Time: 3:14:56 PM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * Represents a view into given data.
- */
-public interface View {
-    /**
-     * Gets a list of all types of views which can conflict with this view.
-     */
-    public Collection<Class<? extends View>> getConflictingViews();
-
-    /**
-     * Inform this view that the data provided to it no longer exists.
-     */
-    public void close();
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/package-info.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/package-info.java
deleted file mode 100644
index bc8a602..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/providers/package-info.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.providers;
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/ActiveRegionShardBalancer.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/ActiveRegionShardBalancer.java
deleted file mode 100644
index efe6336..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/ActiveRegionShardBalancer.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-
-/**
- * ActiveRegionShardBalancer
- *
- * Merges all of the file pointer information for a single contig index into a single
- * combined shard.  The purpose of doing this is to ensure that the HaplotypeCaller, which
- * doesn't support TreeReduction by construction, gets all of the data on a single
- * contig together so the the NanoSchedule runs efficiently
- */
-public class ActiveRegionShardBalancer extends ShardBalancer {
-    /**
-     * Convert iterators of file pointers into balanced iterators of shards.
-     * @return An iterator over balanced shards.
-     */
-    public Iterator<Shard> iterator() {
-        return new Iterator<Shard>() {
-            public boolean hasNext() {
-                return filePointers.hasNext();
-            }
-
-            public Shard next() {
-                FilePointer current = getCombinedFilePointersOnSingleContig();
-
-                // FilePointers have already been combined as necessary at the IntervalSharder level. No
-                // need to do so again here.
-
-                return new LocusShard(parser,readsDataSource,current.getLocations(),current.fileSpans);
-            }
-
-            public void remove() {
-                throw new UnsupportedOperationException("Unable to remove from shard balancing iterator");
-            }
-        };
-    }
-
-    /**
-     * Combine all of the file pointers in the filePointers iterator into a single combined
-     * FilePointer that spans all of the file pointers on a single contig
-     * @return a non-null FilePointer
-     */
-    private FilePointer getCombinedFilePointersOnSingleContig() {
-        FilePointer current = filePointers.next();
-
-        final List<FilePointer> toCombine = new LinkedList<>();
-        toCombine.add(current);
-
-        while ( filePointers.hasNext() &&
-                current.isRegionUnmapped == filePointers.peek().isRegionUnmapped &&
-                (current.getContigIndex() == filePointers.peek().getContigIndex() || current.isRegionUnmapped) ) {
-            toCombine.add(filePointers.next());
-        }
-
-        return FilePointer.union(toCombine, parser);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BAMAccessPlan.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BAMAccessPlan.java
deleted file mode 100644
index 1e30d6c..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BAMAccessPlan.java
+++ /dev/null
@@ -1,170 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import htsjdk.samtools.util.PeekableIterator;
-import htsjdk.samtools.GATKBAMFileSpan;
-import htsjdk.samtools.GATKChunk;
-import htsjdk.samtools.util.BlockCompressedFilePointerUtil;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.util.LinkedList;
-import java.util.List;
-
-/**
-* Created by IntelliJ IDEA.
-* User: mhanna
-* Date: 10/14/11
-* Time: 10:47 PM
-* To change this template use File | Settings | File Templates.
-*/
-class BAMAccessPlan {
-    private final SAMReaderID reader;
-    private final BlockInputStream inputStream;
-
-    private final List<GATKChunk> positions;
-    private PeekableIterator<GATKChunk> positionIterator;
-
-    /**
-     * Stores the next block address to read, or -1 if no such block is available.
-     */
-    private long nextBlockAddress;
-
-
-    BAMAccessPlan(final SAMReaderID reader, final BlockInputStream inputStream, GATKBAMFileSpan fileSpan) {
-        this.reader = reader;
-        this.inputStream = inputStream;
-
-        this.positions = fileSpan.getGATKChunks();
-        initialize();
-    }
-
-    public SAMReaderID getReader() {
-        return reader;
-    }
-
-    public BlockInputStream getInputStream() {
-        return inputStream;
-    }
-
-    /**
-     * Retrieves the next block address to be read.
-     * @return Next block address to be read.
-     */
-    public long getBlockAddress() {
-        return nextBlockAddress;
-    }
-
-    /**
-     * Retrieves the first offset of interest in the block returned by getBlockAddress().
-     * @return First block of interest in this segment.
-     */
-    public int getFirstOffsetInBlock() {
-        return (nextBlockAddress == positionIterator.peek().getBlockStart()) ? positionIterator.peek().getBlockOffsetStart() : 0;
-    }
-
-    /**
-     * Gets the spans overlapping the given block; used to copy the contents of the block into the circular buffer.
-     * @param blockAddress Block address for which to search.
-     * @param filePosition Block address at which to terminate the last chunk if the last chunk goes beyond this span.
-     * @return list of chunks containing that block.
-     */
-    public List<GATKChunk> getSpansOverlappingBlock(long blockAddress, long filePosition) {
-        List<GATKChunk> spansOverlapping = new LinkedList<GATKChunk>();
-        // While the position iterator overlaps the given block, pull out spans to report.
-        while(positionIterator.hasNext() && positionIterator.peek().getBlockStart() <= blockAddress) {
-            // Create a span over as much of the block as is covered by this chunk.
-            int blockOffsetStart = (blockAddress == positionIterator.peek().getBlockStart()) ? positionIterator.peek().getBlockOffsetStart() : 0;
-
-            // Calculate the end of this span.  If the span extends past this block, cap it using the current file position.
-            long blockEnd;
-            int blockOffsetEnd;
-            if(blockAddress < positionIterator.peek().getBlockEnd()) {
-                blockEnd = filePosition;
-                blockOffsetEnd = 0;
-            }
-            else {
-                blockEnd = positionIterator.peek().getBlockEnd();
-                blockOffsetEnd = positionIterator.peek().getBlockOffsetEnd();
-            }
-
-            GATKChunk newChunk = new GATKChunk(blockAddress,blockOffsetStart,blockEnd,blockOffsetEnd);
-
-            if(newChunk.getChunkStart() <= newChunk.getChunkEnd())
-                spansOverlapping.add(new GATKChunk(blockAddress,blockOffsetStart,blockEnd,blockOffsetEnd));
-
-            // If the value currently stored in the position iterator ends past the current block, we must be done.  Abort.
-            if(!positionIterator.hasNext() ||  positionIterator.peek().getBlockEnd() > blockAddress)
-                break;
-
-            // If the position iterator ends before the block ends, pull the position iterator forward.
-            if(positionIterator.peek().getBlockEnd() <= blockAddress)
-                positionIterator.next();
-        }
-
-        return spansOverlapping;
-    }
-
-    public void reset() {
-        initialize();
-    }
-
-    /**
-     * Resets the SAM reader position to its original state.
-     */
-    private void initialize() {
-        this.positionIterator = new PeekableIterator<GATKChunk>(positions.iterator());
-        if(positionIterator.hasNext())
-            nextBlockAddress = positionIterator.peek().getBlockStart();
-        else
-            nextBlockAddress = -1;
-    }
-
-    /**
-     * Advances the current position to the next block to read, given the current position in the file.
-     * @param filePosition The current position within the file.
-     */
-    void advancePosition(final long filePosition) {
-        nextBlockAddress = BlockCompressedFilePointerUtil.getBlockAddress(filePosition);
-
-        // Check the current file position against the iterator; if the iterator is before the current file position,
-        // draw the iterator forward.  Remember when performing the check that coordinates are half-open!
-        while(positionIterator.hasNext() && isFilePositionPastEndOfChunk(filePosition,positionIterator.peek()))
-            positionIterator.next();
-
-        // If the block iterator has shot past the file pointer, bring the file pointer flush with the start of the current block.
-        if(positionIterator.hasNext() && filePosition < positionIterator.peek().getChunkStart())
-            nextBlockAddress = positionIterator.peek().getBlockStart();
-
-        // If we've shot off the end of the block pointer, notify consumers that iteration is complete.
-        if(!positionIterator.hasNext())
-            nextBlockAddress = -1;
-    }
-
-    private boolean isFilePositionPastEndOfChunk(final long filePosition, final GATKChunk chunk) {
-        return filePosition >= chunk.getChunkEnd();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BAMSchedule.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BAMSchedule.java
deleted file mode 100644
index a80b0a4..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BAMSchedule.java
+++ /dev/null
@@ -1,530 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import htsjdk.samtools.util.PeekableIterator;
-import htsjdk.samtools.Bin;
-import htsjdk.samtools.GATKBAMFileSpan;
-import htsjdk.samtools.GATKChunk;
-import htsjdk.samtools.util.CloseableIterator;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.GATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.RandomAccessFile;
-import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
-import java.nio.channels.FileChannel;
-import java.util.*;
-
-/**
- * Writes schedules for a single BAM file to a target output file.
- */
-public class BAMSchedule implements CloseableIterator<BAMScheduleEntry> {
-    /**
-     * File in which to store schedule data.
-     */
-    private File scheduleFile;
-
-    /**
-     * File channel for the schedule file.
-     */
-    private FileChannel scheduleFileChannel;
-
-    /**
-     * The definitive, sorted list of reader IDs.  Order is important here: the order
-     * in which the reader IDs are presented here maps to the order in which they appear in the file. 
-     */
-    private final List<SAMReaderID> readerIDs = new ArrayList<SAMReaderID>();
-
-    /**
-     * Iterators over the schedule.  Stored in the same order as readerIDs, above.
-     */
-    private final List<PeekableIterator<BAMScheduleEntry>> scheduleIterators = new ArrayList<PeekableIterator<BAMScheduleEntry>>();
-
-    /**
-     * Next schedule entry to be returned.  Null if no additional entries are present.
-     */
-    private BAMScheduleEntry nextScheduleEntry;
-
-    /**
-     * Reference sequence for which to write the schedule.
-     */
-    private final int referenceSequence;
-
-    /**
-     * Sizes of ints and longs in bytes.
-     */
-    private static final int INT_SIZE_IN_BYTES = Integer.SIZE / 8;
-    private static final int LONG_SIZE_IN_BYTES = Long.SIZE / 8;    
-
-    /**
-     * Create a new BAM schedule based on the given index.
-     * @param dataSource The SAM data source to use.
-     * @param intervals List of 
-     */
-    public BAMSchedule(final SAMDataSource dataSource, final List<GenomeLoc> intervals) {
-        if(intervals.isEmpty())
-            throw new ReviewedGATKException("Tried to write schedule for empty interval list.");
-
-        referenceSequence = dataSource.getHeader().getSequence(intervals.get(0).getContig()).getSequenceIndex();
-
-        createScheduleFile();
-
-        readerIDs.addAll(dataSource.getReaderIDs());
-
-        for(final SAMReaderID reader: readerIDs) {
-            final GATKBAMIndex index = dataSource.getIndex(reader);
-            final GATKBAMIndexData indexData = index.readReferenceSequence(referenceSequence);
-
-            int currentBinInLowestLevel = GATKBAMIndex.getFirstBinInLevel(GATKBAMIndex.getNumIndexLevels()-1);
-            Iterator<GenomeLoc> locusIterator = intervals.iterator();
-            GenomeLoc currentLocus = locusIterator.next();
-
-            final long readerStartOffset = position();
-
-            int maxChunkCount = 0;
-
-            while(currentBinInLowestLevel < GATKBAMIndex.MAX_BINS && currentLocus != null) {
-                final Bin bin = new Bin(referenceSequence,currentBinInLowestLevel);
-                final int binStart = index.getFirstLocusInBin(bin);
-                final int binStop = index.getLastLocusInBin(bin);
-
-                // In required, pull bin iterator ahead to the point of the next GenomeLoc.
-                if(binStop < currentLocus.getStart()) {
-                    currentBinInLowestLevel++;
-                    continue;
-                }
-
-                // At this point, the bin stop is guaranteed to be >= the start of the locus.
-                // If the bins have gone past the current locus, update the current locus if at all possible.
-                if(binStart > currentLocus.getStop()) {
-                    currentLocus = locusIterator.hasNext() ? locusIterator.next() : null;
-                    continue;
-                }
-
-                // Code at this point knows that the current bin is neither before nor after the current locus,
-                // so it must overlap.  Add this region to the filesystem.
-                final GATKBAMFileSpan fileSpan = indexData.getSpanOverlapping(bin);
-
-                if(!fileSpan.isEmpty()) {
-                    // File format is binary in little endian; start of region, end of region, num chunks, then the chunks themselves.
-                    ByteBuffer buffer = allocateByteBuffer(2*INT_SIZE_IN_BYTES + INT_SIZE_IN_BYTES + fileSpan.getGATKChunks().size()*LONG_SIZE_IN_BYTES*2);
-                    buffer.putInt(binStart);
-                    buffer.putInt(binStop);
-                    buffer.putInt(fileSpan.getGATKChunks().size());
-                    for(GATKChunk chunk: fileSpan.getGATKChunks()) {
-                        buffer.putLong(chunk.getChunkStart());
-                        buffer.putLong(chunk.getChunkEnd());
-                    }
-                    maxChunkCount = Math.max(maxChunkCount,fileSpan.getGATKChunks().size());
-
-                    // Prepare buffer for writing
-                    buffer.flip();
-
-                    // And write.
-                    write(buffer);
-                }
-
-                currentBinInLowestLevel++;
-            }
-
-            final long readerStopOffset = position();
-
-            scheduleIterators.add(new PeekableIterator<BAMScheduleEntry>(new BAMScheduleIterator(reader,readerStartOffset,readerStopOffset,maxChunkCount)));
-
-            // Iterator initialization might move the file pointer.  Make sure it gets reset back to where it was before iterator initialization.
-            position(readerStopOffset);
-        }
-
-        advance();
-    }
-
-    /**
-     * Determine whether more ScheduleEntries are present in the iterator.
-     * @return Next schedule entry to parse.
-     */
-    @Override
-    public boolean hasNext() {
-        return nextScheduleEntry != null;    
-    }
-
-    /**
-     * Retrieve the next schedule entry in the list.
-     * @return next schedule entry in the queue.
-     */
-    @Override
-    public BAMScheduleEntry next() {
-        BAMScheduleEntry currentScheduleEntry = nextScheduleEntry;
-        advance();
-        return currentScheduleEntry;
-    }
-
-    /**
-     * Close down and delete the file.
-     */
-    @Override
-    public void close() {
-        try {
-            scheduleFileChannel.close();
-        }
-        catch(IOException ex) {
-            throw makeIOFailureException(true, "Unable to close schedule file.", ex);
-        }
-    }
-
-    /**
-     * Convenience routine for creating UserExceptions
-     * @param wasWriting
-     * @param message
-     * @param e
-     * @return
-     */
-    private final GATKException makeIOFailureException(final boolean wasWriting, final String message, final Exception e) {
-        if ( wasWriting ) {
-            if ( e == null )
-                return new UserException.CouldNotCreateOutputFile(scheduleFile, message);
-            else
-                return new UserException.CouldNotCreateOutputFile(scheduleFile, message, e);
-        } else {
-            if ( e == null )
-                return new UserException.CouldNotReadInputFile(scheduleFile, message);
-            else
-                return new UserException.CouldNotReadInputFile(scheduleFile, message, e);
-        }
-    }
-
-    /**
-     * Advance to the next schedule entry.
-     */
-    private void advance() {
-        nextScheduleEntry = null;
-
-        BitSet selectedIterators = new BitSet(readerIDs.size());
-        int currentStart = Integer.MAX_VALUE;
-        int currentStop = Integer.MAX_VALUE;
-
-        // Select every iterator whose next element is the lowest element in the list.
-        for(int reader = 0; reader < scheduleIterators.size(); reader++) {
-            PeekableIterator<BAMScheduleEntry> scheduleIterator = scheduleIterators.get(reader);
-            if(!scheduleIterator.hasNext())
-                continue;
-
-            // If the iterator starts after this one, skip over it.
-            if(scheduleIterator.peek().start > currentStart)
-                continue;
-
-            // If the iterator starts at the same point as this one, add it to the list.
-            if(scheduleIterator.peek().start == currentStart) {
-                selectedIterators.set(reader);
-                currentStop = Math.min(scheduleIterator.peek().stop,currentStop);
-                continue;
-            }
-
-            // If the iterator is less than anything seen before it, purge the selections and make this one current.
-            if(scheduleIterator.peek().start < currentStart) {
-                selectedIterators.clear();
-                selectedIterators.set(reader);
-                currentStart = scheduleIterator.peek().start;
-                currentStop = scheduleIterator.peek().stop;
-            }
-        }
-
-        // Out of iterators?  Abort early.
-        if(selectedIterators.isEmpty())
-            return;
-
-        // Create the target schedule entry
-        BAMScheduleEntry mergedScheduleEntry = new BAMScheduleEntry(currentStart,currentStop);
-
-        // For each schedule entry with data, load the data into the merged schedule.
-        for (int reader = selectedIterators.nextSetBit(0); reader >= 0; reader = selectedIterators.nextSetBit(reader+1)) {
-            PeekableIterator<BAMScheduleEntry> scheduleIterator = scheduleIterators.get(reader);
-            BAMScheduleEntry individualScheduleEntry = scheduleIterator.peek();
-            mergedScheduleEntry.mergeInto(individualScheduleEntry);
-
-            // If the schedule iterator ends after this entry, consume it.
-            if(individualScheduleEntry.stop <= currentStop)
-                scheduleIterator.next();
-        }
-
-        // For each schedule entry without data, add a blank entry.
-        for (int reader = selectedIterators.nextClearBit(0); reader < readerIDs.size(); reader = selectedIterators.nextClearBit(reader+1)) {
-            mergedScheduleEntry.addFileSpan(readerIDs.get(reader),new GATKBAMFileSpan());
-        }
-
-        nextScheduleEntry = mergedScheduleEntry;
-    }
-
-    @Override
-    public void remove() { throw new UnsupportedOperationException("Unable to remove from a schedule iterator."); }
-
-    /**
-     * Create a new schedule file, containing schedule information for all BAM files being dynamically merged.
-     */
-    private void createScheduleFile() {
-        try {
-            scheduleFile = File.createTempFile("bamschedule."+referenceSequence,null);
-            scheduleFileChannel = new RandomAccessFile(scheduleFile,"rw").getChannel();
-        }
-        catch(IOException ex) {
-            throw new UserException("Unable to create a temporary BAM schedule file.  Please make sure Java can write to the default temp directory or use -Djava.io.tmpdir= to instruct it to use a different temp directory instead.",ex);
-        }
-        scheduleFile.deleteOnExit();
-
-    }
-
-    /**
-     * Creates a new byte buffer of the given size.
-     * @param size the size of buffer to allocate.
-     * @return Newly allocated byte buffer.
-     */
-    private ByteBuffer allocateByteBuffer(final int size) {
-        ByteBuffer buffer = ByteBuffer.allocate(size);
-        buffer.order(ByteOrder.LITTLE_ENDIAN);
-        return buffer;
-    }
-
-    /**
-     * Reads the contents at the current position on disk into the given buffer.
-     * @param buffer buffer to fill.
-     */
-    private int read(final ByteBuffer buffer) {
-        try {
-            return scheduleFileChannel.read(buffer);
-        }
-        catch(IOException ex) {
-            throw makeIOFailureException(false, "Unable to read data from BAM schedule file.", ex);
-        }
-    }
-
-    private void write(final ByteBuffer buffer) {
-        try {
-            scheduleFileChannel.write(buffer);
-            if(buffer.remaining() > 0)
-                throw makeIOFailureException(true, "Unable to write entire buffer to file.", null);
-        }
-        catch(IOException ex) {
-            throw makeIOFailureException(true, "Unable to write data to BAM schedule file.", ex);
-        }
-    }
-
-    /**
-     * Reads the current position from the file channel.
-     * @return Current position within file channel.
-     */
-    private long position() {
-        try {
-            return scheduleFileChannel.position();
-        }
-        catch(IOException ex) {
-            throw makeIOFailureException(false, "Unable to retrieve position of BAM schedule file.", ex);
-        }
-    }
-
-    /**
-     * Reposition the file channel to the specified offset wrt the start of the file.
-     * @param position The position.
-     */
-    private void position(final long position) {
-        try {
-            scheduleFileChannel.position(position);
-        }
-        catch(IOException ex) {
-            throw makeIOFailureException(false, "Unable to position BAM schedule file.",ex);
-        }
-    }
-
-    /**
-     * An iterator over the schedule for a single BAM file.
-     */
-    private class BAMScheduleIterator implements Iterator<BAMScheduleEntry> {
-        /**
-         * ID of the reader associated with the given schedule.
-         */
-        private final SAMReaderID reader;
-
-        /**
-         * Current position in the file.
-         */
-        private long currentPosition;
-
-        /**
-         * Stopping file position of last bin in file for this reader, exclusive.
-         */
-        private final long stopPosition;
-
-        /**
-         * Byte buffer used to store BAM header info.
-         */
-        private final ByteBuffer binHeader;
-
-        /**
-         * Byte buffer used to store chunk data.
-         */
-        private final ByteBuffer chunkData;
-
-        public BAMScheduleIterator(final SAMReaderID reader, final long startPosition, final long stopPosition, final int maxChunkCount) {
-            this.reader = reader;
-            this.currentPosition = startPosition;
-            this.stopPosition = stopPosition;
-            binHeader = allocateByteBuffer(INT_SIZE_IN_BYTES*3);
-            chunkData = allocateByteBuffer(maxChunkCount*LONG_SIZE_IN_BYTES*2);
-        }
-
-        @Override
-        public boolean hasNext() {
-            return currentPosition < stopPosition;
-        }
-
-        @Override
-        public BAMScheduleEntry next() {
-            position(currentPosition);
-
-            // Read data.
-            int binHeaderBytesRead = read(binHeader);
-
-            // Make sure we read in a complete bin header:
-            if ( binHeaderBytesRead < INT_SIZE_IN_BYTES * 3 ) {
-                throw new ReviewedGATKException(String.format("Unable to read a complete bin header from BAM schedule file %s for BAM file %s. " +
-                                                               "The BAM schedule file is likely incomplete/corrupt.",
-                                                               scheduleFile.getAbsolutePath(), reader.getSamFilePath()));
-            }
-
-            // Decode contents.
-            binHeader.flip();
-            final int start = binHeader.getInt();
-            final int stop = binHeader.getInt();
-            final int numChunks = binHeader.getInt();
-
-            // Prepare bin buffer for next read.
-            binHeader.flip();
-
-            // Prepare a target buffer for chunks.
-            GATKChunk[] chunks = new GATKChunk[numChunks];
-
-            // Read all chunk data.
-            chunkData.limit(numChunks*LONG_SIZE_IN_BYTES*2);
-            long bytesRead = read(chunkData);
-            if(bytesRead != numChunks*LONG_SIZE_IN_BYTES*2)
-                throw new ReviewedGATKException("Unable to read all chunks from file");
-
-            // Prepare for reading.
-            chunkData.flip();
-
-            for(int i = 0; i < numChunks; i++)
-                chunks[i] = new GATKChunk(chunkData.getLong(),chunkData.getLong());
-
-            // Prepare chunk buffer for next read.
-            chunkData.flip();
-
-            BAMScheduleEntry nextScheduleEntry = new BAMScheduleEntry(start,stop);
-            nextScheduleEntry.addFileSpan(reader,new GATKBAMFileSpan(chunks));
-
-            // Reset the position of the iterator at the next contig.
-            currentPosition = position();
-
-            return nextScheduleEntry;
-        }
-
-        /**
-         * Not supported.
-         */
-        @Override
-        public void remove() {
-            throw new UnsupportedOperationException("Unable to remove from a BAMScheduleIterator");
-        }
-
-    }
-}
-
-/**
- * A single proto-shard to be processed.
- */
-class BAMScheduleEntry {
-    /**
-     * Starting position for the genomic entry.
-     */
-    public final int start;
-
-    /**
-     * Ending position for the genomic entry.
-     */
-    public final int stop;
-
-    /**
-     * The spans representing the given region.
-     */
-    public final Map<SAMReaderID,GATKBAMFileSpan> fileSpans = new HashMap<SAMReaderID,GATKBAMFileSpan>();
-
-    BAMScheduleEntry(final int start, final int stop) {
-        this.start = start;
-        this.stop = stop;
-    }
-
-    /**
-     * Add a new file span to this schedule.
-     * @param reader Reader associated with the span.
-     * @param fileSpan Blocks to read in the given reader.
-     */
-    public void addFileSpan(final SAMReaderID reader, final GATKBAMFileSpan fileSpan) {
-        fileSpans.put(reader,fileSpan);
-    }
-
-    /**
-     * A naive merge operation.  Merge the fileSpans in other into this, blowing up if conflicts are
-     * detected. Completely ignores merging start and stop.
-     * @param other Other schedule entry to merging into this one.
-     */
-    public void mergeInto(final BAMScheduleEntry other) {
-        final int thisSize = fileSpans.size();
-        final int otherSize = other.fileSpans.size();
-        fileSpans.putAll(other.fileSpans);
-        if(fileSpans.size() != thisSize+otherSize)
-            throw new ReviewedGATKException("Unable to handle overlaps when merging BAM schedule entries.");
-    }
-
-    /**
-     * Returns true if the location of this bin tree is before the given position.
-     * @param locus Locus to test.
-     * @return True if this bin sits completely before the given locus; false otherwise.
-     */
-    public boolean isBefore(final GenomeLoc locus) {
-        return stop < locus.getStart();
-    }
-
-    /**
-     * Checks overlap between this bin tree and other bin trees.
-     * @param position the position over which to detect overlap.
-     * @return True if the segment overlaps.  False otherwise.
-     */
-    public boolean overlaps(final GenomeLoc position) {
-        return !(position.getStop() < start || position.getStart() > stop);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BAMScheduler.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BAMScheduler.java
deleted file mode 100644
index 1ea8d39..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BAMScheduler.java
+++ /dev/null
@@ -1,320 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import htsjdk.samtools.util.PeekableIterator;
-import htsjdk.samtools.GATKBAMFileSpan;
-import htsjdk.samtools.GATKChunk;
-import htsjdk.samtools.SAMSequenceRecord;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.interval.IntervalMergingRule;
-import org.broadinstitute.gatk.utils.sam.ReadUtils;
-
-import java.util.*;
-
-/**
- * Assign intervals to the most appropriate blocks, keeping as little as possible in memory at once.
- */
-public class BAMScheduler implements Iterator<FilePointer> {
-    private final SAMDataSource dataSource;
-
-    private final Map<SAMReaderID,GATKBAMIndex> indexFiles = new HashMap<SAMReaderID,GATKBAMIndex>();
-
-    private FilePointer nextFilePointer = null;
-
-    private GenomeLocSortedSet loci;
-    private PeekableIterator<GenomeLoc> locusIterator;
-    private GenomeLoc currentLocus;
-    private IntervalMergingRule intervalMergingRule;
-
-    /*
-     * Creates BAMScheduler using contigs from the given BAM data source.
-     *
-     * @param dataSource    BAM source
-     * @return non-null BAM scheduler
-     */
-    public static BAMScheduler createOverMappedReads(final SAMDataSource dataSource) {
-        final BAMScheduler scheduler = new BAMScheduler(dataSource, IntervalMergingRule.ALL);
-        final GenomeLocSortedSet intervals = GenomeLocSortedSet.createSetFromSequenceDictionary(dataSource.getHeader().getSequenceDictionary());
-        scheduler.populateFilteredIntervalList(intervals);
-        return scheduler;
-    }
-
-    public static BAMScheduler createOverAllReads(final SAMDataSource dataSource, final GenomeLocParser parser) {
-        BAMScheduler scheduler = new BAMScheduler(dataSource, IntervalMergingRule.ALL);
-        scheduler.populateUnfilteredIntervalList(parser);
-        return scheduler;
-    }
-
-    public static BAMScheduler createOverIntervals(final SAMDataSource dataSource, final IntervalMergingRule mergeRule, final GenomeLocSortedSet loci) {
-        BAMScheduler scheduler = new BAMScheduler(dataSource, mergeRule);
-        scheduler.populateFilteredIntervalList(loci);
-        return scheduler;
-    }
-
-
-    private BAMScheduler(final SAMDataSource dataSource, final IntervalMergingRule mergeRule) {
-        this.dataSource = dataSource;
-        this.intervalMergingRule = mergeRule;
-        for(SAMReaderID reader: dataSource.getReaderIDs()) {
-            GATKBAMIndex index = dataSource.getIndex(reader);
-            if(index != null)
-                indexFiles.put(reader,dataSource.getIndex(reader));
-        }
-    }
-
-    /**
-     * The consumer has asked for a bounded set of locations.  Prepare an iterator over those locations.
-     * @param loci The list of locations to search and iterate over.
-     */
-    private void populateFilteredIntervalList(final GenomeLocSortedSet loci) {
-        this.loci = loci;
-        if(!indexFiles.isEmpty()) {
-            // If index data is available, start up the iterator.
-            locusIterator = new PeekableIterator<GenomeLoc>(loci.iterator());
-            if(locusIterator.hasNext())
-                currentLocus = locusIterator.next();
-            advance();
-        }
-        else {
-            // Otherwise, seed the iterator with a single file pointer over the entire region.
-            nextFilePointer = generatePointerOverEntireFileset();
-            for(GenomeLoc locus: loci)
-                nextFilePointer.addLocation(locus);
-            locusIterator = new PeekableIterator<GenomeLoc>(Collections.<GenomeLoc>emptyList().iterator());
-        }
-    }
-
-    /**
-     * The consumer has provided null, meaning to iterate over all available data.  Create a file pointer stretching
-     * from just before the start of the region to the end of the region.
-     */
-    private void populateUnfilteredIntervalList(final GenomeLocParser parser) {
-        this.loci = new GenomeLocSortedSet(parser);
-        locusIterator = new PeekableIterator<GenomeLoc>(Collections.<GenomeLoc>emptyList().iterator());
-        nextFilePointer = generatePointerOverEntireFileset();
-    }
-
-    /**
-     * Generate a span that runs from the end of the BAM header to the end of the fle.
-     * @return A file pointer over the specified region.
-     */
-    private FilePointer generatePointerOverEntireFileset() {
-        FilePointer filePointer = new FilePointer(intervalMergingRule);
-
-        // This is a "monolithic" FilePointer representing all regions in all files we will ever visit, and is
-        // the only FilePointer we will create. This allows us to have this FilePointer represent regions from
-        // multiple contigs
-        filePointer.setIsMonolithic(true);
-
-        Map<SAMReaderID,GATKBAMFileSpan> currentPosition;
-
-        currentPosition = dataSource.getInitialReaderPositions();
-
-        for(SAMReaderID reader: dataSource.getReaderIDs())
-            filePointer.addFileSpans(reader,createSpanToEndOfFile(currentPosition.get(reader).getGATKChunks().get(0).getChunkStart()));
-        return filePointer;
-    }
-
-    public boolean hasNext() {
-        return nextFilePointer != null;
-    }
-
-    public FilePointer next() {
-        if(!hasNext())
-            throw new NoSuchElementException("No next element available in interval sharder");
-        FilePointer currentFilePointer = nextFilePointer;
-        nextFilePointer = null;
-        advance();
-
-        return currentFilePointer;
-    }
-
-    public void remove() {
-        throw new UnsupportedOperationException("Unable to remove FilePointers from an IntervalSharder");
-    }
-
-    private void advance() {
-        if(loci.isEmpty())
-            return;
-
-        while(nextFilePointer == null && currentLocus != null) {
-            // special case handling of the unmapped shard.
-            if(currentLocus == GenomeLoc.UNMAPPED) {
-                nextFilePointer = new FilePointer(intervalMergingRule, GenomeLoc.UNMAPPED);
-                for(SAMReaderID id: dataSource.getReaderIDs())
-                    nextFilePointer.addFileSpans(id,createSpanToEndOfFile(indexFiles.get(id).getStartOfLastLinearBin()));
-                currentLocus = null;
-                continue;
-            }
-
-            nextFilePointer = new FilePointer(intervalMergingRule);
-
-            int coveredRegionStart = 1;
-            int coveredRegionStop = Integer.MAX_VALUE;
-            GenomeLoc coveredRegion = null;
-
-            BAMScheduleEntry scheduleEntry = getNextOverlappingBAMScheduleEntry(currentLocus);
-
-            // No overlapping data at all.
-            if(scheduleEntry != null) {
-                coveredRegionStart = Math.max(coveredRegionStart,scheduleEntry.start);
-                coveredRegionStop = Math.min(coveredRegionStop,scheduleEntry.stop);
-                coveredRegion = loci.getGenomeLocParser().createGenomeLoc(currentLocus.getContig(),coveredRegionStart,coveredRegionStop);
-
-                nextFilePointer.addFileSpans(scheduleEntry.fileSpans);
-            }
-            else {
-                // Always create a file span, whether there was covered data or not.  If there was no covered data, then the binTree is empty.
-                for(SAMReaderID reader: indexFiles.keySet())
-                    nextFilePointer.addFileSpans(reader,new GATKBAMFileSpan());
-            }
-
-            // Early exit if no bins were found.
-            if(coveredRegion == null) {
-                // for debugging only: maximum split is 16384.                
-                nextFilePointer.addLocation(currentLocus);
-                currentLocus = locusIterator.hasNext() ? locusIterator.next() : null;
-                continue;
-            }
-
-            // Early exit if only part of the first interval was found.
-            if(currentLocus.startsBefore(coveredRegion)) {
-                int splitPoint = Math.min(coveredRegion.getStart()-currentLocus.getStart(),16384)+currentLocus.getStart();
-                GenomeLoc[] splitContigs = currentLocus.split(splitPoint);
-                nextFilePointer.addLocation(splitContigs[0]);
-                currentLocus = splitContigs[1];
-                continue;
-            }
-
-            // Define the initial range of the file pointer, aka the region where the locus currently being processed intersects the BAM list.
-            GenomeLoc initialLocation = currentLocus.intersect(coveredRegion);
-            nextFilePointer.addLocation(initialLocation);
-
-            // See whether the BAM regions discovered overlap the next set of intervals in the interval list.  If so, include every overlapping interval.
-            if(!nextFilePointer.locations.isEmpty()) {
-                while(locusIterator.hasNext() && locusIterator.peek().overlapsP(coveredRegion)) {
-                    currentLocus = locusIterator.next();
-                    nextFilePointer.addLocation(currentLocus.intersect(coveredRegion));
-                }
-
-                // Chop off the uncovered portion of the locus.  Since we know that the covered region overlaps the current locus,
-                  // we can simplify the interval creation process to the end of the covered region to the stop of the given interval.
-                if(coveredRegionStop < currentLocus.getStop())
-                    currentLocus = loci.getGenomeLocParser().createGenomeLoc(currentLocus.getContig(),coveredRegionStop+1,currentLocus.getStop());
-                else if(locusIterator.hasNext())
-                    currentLocus = locusIterator.next();
-                else
-                    currentLocus = null;
-            }
-
-        }
-    }
-
-    
-    /**
-     * The last reference sequence processed by this iterator.
-     */
-    private Integer lastReferenceSequenceLoaded = null;
-
-    /**
-     * The stateful iterator used to progress through the genoem.
-     */
-    private PeekableIterator<BAMScheduleEntry> bamScheduleIterator = null;
-
-    /**
-     * Clean up underlying BAMSchedule file handles.
-     */
-    public void close() {
-        if(bamScheduleIterator != null)
-            bamScheduleIterator.close();
-    }
-
-    /**
-     * Get the next overlapping tree of bins associated with the given BAM file.
-     * @param currentLocus The actual locus for which to check overlap.
-     * @return The next schedule entry overlapping with the given list of loci.
-     */
-    private BAMScheduleEntry getNextOverlappingBAMScheduleEntry(final GenomeLoc currentLocus) {
-        // Make sure that we consult the BAM header to ensure that we're using the correct contig index for this contig name.
-        // This will ensure that if the two sets of contigs don't quite match (b36 male vs female ref, hg19 Epstein-Barr), then
-        // we'll be using the correct contig index for the BAMs.
-        // TODO: Warning: assumes all BAMs use the same sequence dictionary!  Get around this with contig aliasing.
-        SAMSequenceRecord currentContigSequenceRecord = dataSource.getHeader().getSequence(currentLocus.getContig());
-        if ( currentContigSequenceRecord == null ) {
-            throw new UserException(String.format("Contig %s not present in sequence dictionary for merged BAM header: %s",
-                                                  currentLocus.getContig(),
-                                                  ReadUtils.prettyPrintSequenceRecords(dataSource.getHeader().getSequenceDictionary())));
-        }
-
-        final int currentContigIndex = currentContigSequenceRecord.getSequenceIndex();
-
-        // Stale reference sequence or first invocation.  (Re)create the binTreeIterator.
-        if(lastReferenceSequenceLoaded == null || lastReferenceSequenceLoaded != currentContigIndex) {
-            if(bamScheduleIterator != null)
-                bamScheduleIterator.close();
-            lastReferenceSequenceLoaded = currentContigIndex;
-
-            // Naive algorithm: find all elements in current contig for proper schedule creation.
-            List<GenomeLoc> lociInContig = new LinkedList<GenomeLoc>();
-            for(GenomeLoc locus: loci) {
-                if (!GenomeLoc.isUnmapped(locus) && dataSource.getHeader().getSequence(locus.getContig()) == null)
-                    throw new ReviewedGATKException("BAM file(s) do not have the contig: " + locus.getContig() + ". You are probably using a different reference than the one this file was aligned with");
-
-                if (!GenomeLoc.isUnmapped(locus) && dataSource.getHeader().getSequence(locus.getContig()).getSequenceIndex() == lastReferenceSequenceLoaded)
-                    lociInContig.add(locus);
-            }
-
-            bamScheduleIterator = new PeekableIterator<BAMScheduleEntry>(new BAMSchedule(dataSource,lociInContig));
-        }
-
-        if(!bamScheduleIterator.hasNext())
-            return null;
-
-        // Peek the iterator along until finding the first binTree at or following the current locus.
-        BAMScheduleEntry bamScheduleEntry = bamScheduleIterator.peek();
-        while(bamScheduleEntry != null && bamScheduleEntry.isBefore(currentLocus)) {
-            bamScheduleIterator.next();
-            bamScheduleEntry = bamScheduleIterator.hasNext() ? bamScheduleIterator.peek() : null;
-        }                                   
-
-        return (bamScheduleEntry != null && bamScheduleEntry.overlaps(currentLocus)) ? bamScheduleEntry : null;
-    }
-
-    /**
-     * Create a span from the given start point to the end of the file.
-     * @param startOfRegion Start of the region, in encoded coordinates (block start << 16 & block offset).
-     * @return A file span from the given point to the end of the file.
-     */
-    private GATKBAMFileSpan createSpanToEndOfFile(final long startOfRegion) {
-      return new GATKBAMFileSpan(new GATKChunk(startOfRegion,Long.MAX_VALUE));
-    }
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BGZFBlockLoadingDispatcher.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BGZFBlockLoadingDispatcher.java
deleted file mode 100644
index cc1d9e9..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BGZFBlockLoadingDispatcher.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.util.LinkedList;
-import java.util.Queue;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-
-/**
- * Preloads BGZF blocks in preparation for unzipping and data processing.
- * TODO: Right now, the block loader has all threads blocked waiting for a work request.  Ultimately this should
- * TODO: be replaced with a central thread management strategy.
- */
-public class BGZFBlockLoadingDispatcher {
-    /**
-     * The file handle cache, used when allocating blocks from the dispatcher.
-     */
-    private final FileHandleCache fileHandleCache;
-
-    private final ExecutorService threadPool;
-
-    private final Queue<BAMAccessPlan> inputQueue;
-
-    public BGZFBlockLoadingDispatcher(final int numThreads, final int numFileHandles) {
-        threadPool = Executors.newFixedThreadPool(numThreads);
-        fileHandleCache = new FileHandleCache(numFileHandles);
-        inputQueue = new LinkedList<BAMAccessPlan>();
-
-        threadPool.execute(new BlockLoader(this,fileHandleCache,true));
-    }
-
-    /**
-     * Initiates a request for a new block load.
-      * @param readerPosition Position at which to load.
-     */
-    void queueBlockLoad(final BAMAccessPlan readerPosition) {
-        synchronized(inputQueue) {
-            inputQueue.add(readerPosition);
-            inputQueue.notify();
-        }
-    }
-
-    /**
-     * Claims the next work request from the queue.
-     * @return The next work request, or null if none is available.
-     */
-    BAMAccessPlan claimNextWorkRequest() {
-        synchronized(inputQueue) {
-            while(inputQueue.isEmpty()) {
-                try {
-                    inputQueue.wait();
-                }
-                catch(InterruptedException ex) {
-                    throw new ReviewedGATKException("Interrupt occurred waiting for next block reader work item");
-                }
-            }
-            return inputQueue.poll();
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BlockInputStream.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BlockInputStream.java
deleted file mode 100644
index 11fecb6..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BlockInputStream.java
+++ /dev/null
@@ -1,450 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import htsjdk.samtools.GATKBAMFileSpan;
-import htsjdk.samtools.GATKChunk;
-import htsjdk.samtools.util.BlockCompressedInputStream;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
-import java.util.Arrays;
-import java.util.LinkedList;
-import java.util.List;
-
-/**
- * Presents decompressed blocks to the SAMFileReader.
- */
-public class BlockInputStream extends InputStream {
-    /**
-     * Mechanism for triggering block loads.
-     */
-    private final BGZFBlockLoadingDispatcher dispatcher;
-
-    /**
-     * The reader whose data is supplied by this input stream.
-     */
-    private final SAMReaderID reader;
-
-    /**
-     * Length of the input stream.
-     */
-    private final long length;
-
-    /**
-     * The latest error reported by an asynchronous block load.
-     */
-    private Throwable error;
-
-    /**
-     * Current accessPlan.
-     */
-    private BAMAccessPlan accessPlan;
-
-    /**
-     * A stream of compressed data blocks.
-     */
-    private final ByteBuffer buffer;
-
-    /**
-     * Offsets of the given blocks in the buffer.
-     */
-    private LinkedList<Integer> blockOffsets = new LinkedList<Integer>();
-
-    /**
-     * Source positions of the given blocks in the buffer.
-     */
-    private LinkedList<Long> blockPositions = new LinkedList<Long>();
-
-    /**
-     * Provides a lock to wait for more data to arrive.
-     */
-    private final Object lock = new Object();
-
-    /**
-     * An input stream to use when comparing data back to what it should look like.
-     */
-    private final BlockCompressedInputStream validatingInputStream;
-
-    /**
-     * Create a new block presenting input stream with a dedicated buffer.
-     * @param dispatcher the block loading messenger.
-     * @param reader the reader for which to load data.
-     * @param validate validates the contents read into the buffer against the contents of a Picard BlockCompressedInputStream.
-     */
-    BlockInputStream(final BGZFBlockLoadingDispatcher dispatcher, final SAMReaderID reader, final boolean validate) {
-        this.reader = reader;
-        this.length = reader.samFile.length();
-
-        buffer = ByteBuffer.wrap(new byte[64*1024]);
-        buffer.order(ByteOrder.LITTLE_ENDIAN);
-
-        // The state of the buffer assumes that the range of data written into the buffer appears in the range
-        // [position,limit), while extra capacity exists in the range [limit,capacity)
-        buffer.limit(0);
-
-        this.dispatcher = dispatcher;
-        // TODO: Kill the region when all we want to do is start at the beginning of the stream and run to the end of the stream.
-        this.accessPlan = new BAMAccessPlan(reader,this,new GATKBAMFileSpan(new GATKChunk(0,Long.MAX_VALUE)));
-
-        // The block offsets / block positions guarantee that the ending offset/position in the data structure maps to
-        // the point in the file just following the last read.  These two arrays should never be empty; initializing
-        // to 0 to match the position above.
-        this.blockOffsets.add(0);
-        this.blockPositions.add(0L);
-
-        try {
-            if(validate) {
-                System.out.printf("BlockInputStream %s: BGZF block validation mode activated%n",this);
-                validatingInputStream = new BlockCompressedInputStream(reader.samFile);
-                // A bug in ValidatingInputStream means that calling getFilePointer() immediately after initialization will result in an NPE.
-                // Poke the stream to start reading data.
-                validatingInputStream.available();
-            }
-            else
-                validatingInputStream = null;
-        }
-        catch(IOException ex) {
-            throw new ReviewedGATKException("Unable to validate against Picard input stream",ex);
-        }
-    }
-
-    public long length() {
-        return length;
-    }
-
-    public long getFilePointer() {
-        long filePointer;
-        synchronized(lock) {
-            // Find the current block within the input stream.
-            int blockIndex;
-            for(blockIndex = 0; blockIndex+1 < blockOffsets.size() && buffer.position() > blockOffsets.get(blockIndex+1); blockIndex++)
-                ;
-            filePointer = blockPositions.get(blockIndex) + (buffer.position()-blockOffsets.get(blockIndex));
-        }
-
-//        if(validatingInputStream != null && filePointer != validatingInputStream.getFilePointer())
-//            throw new ReviewedGATKException(String.format("Position of input stream is invalid; expected (block address, block offset) = (%d,%d), got (%d,%d)",
-//                    BlockCompressedFilePointerUtil.getBlockAddress(validatingInputStream.getFilePointer()),BlockCompressedFilePointerUtil.getBlockOffset(validatingInputStream.getFilePointer()),
-//                    BlockCompressedFilePointerUtil.getBlockAddress(filePointer),BlockCompressedFilePointerUtil.getBlockOffset(filePointer)));
-
-        return filePointer;
-    }
-
-    private void clearBuffers() {
-        this.accessPlan.reset();
-
-        // Buffer semantics say that outside of a lock, buffer should always be prepared for reading.
-        // Indicate no data to be read.
-        buffer.clear();
-        buffer.limit(0);
-
-        // Clear everything except the last block offset / position
-        blockOffsets.clear();
-        blockOffsets.add(0);
-        while(blockPositions.size() > 1)
-            blockPositions.removeFirst();
-    }
-
-    public boolean eof() {
-        synchronized(lock) {
-            // TODO: Handle multiple empty BGZF blocks at end of the file.
-            return accessPlan != null && (accessPlan.getBlockAddress() < 0 || accessPlan.getBlockAddress() >= length);
-        }
-    }
-
-    /**
-     * Submits a new access plan for the given dataset and seeks to the given point.
-     * @param accessPlan The next seek point for BAM data in this reader.
-     */
-    public void submitAccessPlan(final BAMAccessPlan accessPlan) {
-        //System.out.printf("Thread %s: submitting access plan for block at position: %d%n",Thread.currentThread().getId(),position.getBlockAddress());
-        this.accessPlan = accessPlan;
-        accessPlan.reset();
-
-        clearBuffers();
-
-        // Pull the iterator past any oddball chunks at the beginning of the shard (chunkEnd < chunkStart, empty chunks, etc).
-        // TODO: Don't pass these empty chunks in.
-        accessPlan.advancePosition(makeFilePointer(accessPlan.getBlockAddress(),0));
-
-        if(accessPlan.getBlockAddress() >= 0) {
-            waitForBufferFill();
-        }
-
-        if(validatingInputStream != null) {
-            try {
-                validatingInputStream.seek(makeFilePointer(accessPlan.getBlockAddress(),0));
-            }
-            catch(IOException ex) {
-                throw new ReviewedGATKException("Unable to validate against Picard input stream",ex);
-            }
-        }
-
-    }
-
-
-    private void compactBuffer() {
-        // Compact buffer to maximize storage space.
-        int bytesToRemove = 0;
-
-        // Look ahead to see if we can compact away the first blocks in the series.
-        while(blockOffsets.size() > 1 && buffer.position() >= blockOffsets.get(1)) {
-            blockOffsets.remove();
-            blockPositions.remove();
-            bytesToRemove = blockOffsets.peek();
-        }
-
-        // If we end up with an empty block at the end of the series, compact this as well.
-        if(buffer.remaining() == 0 && blockOffsets.size() > 1 && buffer.position() >= blockOffsets.peek()) {
-            bytesToRemove += buffer.position();
-            blockOffsets.remove();
-            blockPositions.remove();
-        }
-
-        int finalBufferStart = buffer.position() - bytesToRemove;
-        int finalBufferSize = buffer.remaining();
-
-        // Position the buffer to remove the unneeded data, and compact it away.
-        buffer.position(bytesToRemove);
-        buffer.compact();
-
-        // Reset the limits for reading.
-        buffer.position(finalBufferStart);
-        buffer.limit(finalBufferStart+finalBufferSize);
-
-        // Shift everything in the offset buffer down to accommodate the bytes removed from the buffer.
-        for(int i = 0; i < blockOffsets.size(); i++)
-            blockOffsets.set(i,blockOffsets.get(i)-bytesToRemove);
-    }
-
-    /**
-     * Push contents of incomingBuffer into the end of this buffer.
-     * MUST be called from a thread that is NOT the reader thread.
-     * @param incomingBuffer The data being pushed into this input stream.
-     * @param accessPlan target access plan for the data.
-     * @param filePosition the current position of the file pointer
-     */
-    public void copyIntoBuffer(final ByteBuffer incomingBuffer, final BAMAccessPlan accessPlan, final long filePosition) {
-        synchronized(lock) {
-            try {
-                if(validatingInputStream != null) {
-                    byte[] validBytes = new byte[incomingBuffer.remaining()];
-
-                    byte[] currentBytes = new byte[incomingBuffer.remaining()];
-                    int pos = incomingBuffer.position();
-                    int lim = incomingBuffer.limit();
-                    incomingBuffer.get(currentBytes);
-
-                    incomingBuffer.limit(lim);
-                    incomingBuffer.position(pos);
-
-                    long currentFilePointer = validatingInputStream.getFilePointer();
-                    validatingInputStream.seek(makeFilePointer(accessPlan.getBlockAddress(), 0));
-                    validatingInputStream.read(validBytes);
-                    validatingInputStream.seek(currentFilePointer);
-
-                    if(!Arrays.equals(validBytes,currentBytes))
-                        throw new ReviewedGATKException(String.format("Bytes being inserted into BlockInputStream %s are incorrect",this));
-                }
-
-                compactBuffer();
-                // Open up the buffer for more reading.
-                buffer.limit(buffer.capacity());
-
-                // Get the spans overlapping this particular block...
-                List<GATKChunk> spansOverlapping = accessPlan.getSpansOverlappingBlock(accessPlan.getBlockAddress(),filePosition);
-
-                // ...and advance the block
-                this.accessPlan = accessPlan;
-                accessPlan.advancePosition(makeFilePointer(filePosition, 0));
-
-                if(buffer.remaining() < incomingBuffer.remaining())
-                    lock.wait();
-
-                final int bytesInIncomingBuffer = incomingBuffer.limit();
-
-                for(GATKChunk spanOverlapping: spansOverlapping) {
-                    // Clear out the endcap tracking state and add in the starting position for this transfer.
-                    blockOffsets.removeLast();
-                    blockOffsets.add(buffer.position());
-                    blockPositions.removeLast();
-                    blockPositions.add(spanOverlapping.getChunkStart());
-
-                    // Stream the buffer into the data stream.
-                    incomingBuffer.limit((spanOverlapping.getBlockEnd() > spanOverlapping.getBlockStart()) ? bytesInIncomingBuffer : spanOverlapping.getBlockOffsetEnd());
-                    incomingBuffer.position(spanOverlapping.getBlockOffsetStart());
-                    buffer.put(incomingBuffer);
-
-                    // Add the endcap for this transfer.
-                    blockOffsets.add(buffer.position());
-                    blockPositions.add(spanOverlapping.getChunkEnd());
-                }
-
-                // Set up the buffer for reading.
-                buffer.flip();
-
-                lock.notify();
-            }
-            catch(Exception ex) {
-                reportException(ex);
-                lock.notify();
-            }
-        }
-    }
-
-    void reportException(Throwable t) {
-        synchronized(lock) {
-            this.error = t;
-            lock.notify();
-        }
-    }
-
-    private void checkForErrors() {
-        synchronized(lock) {
-            if(error != null) {
-                ReviewedGATKException toThrow = new ReviewedGATKException(String.format("Thread %s, BlockInputStream %s: Unable to retrieve BAM data from disk",Thread.currentThread().getId(),this),error);
-                toThrow.setStackTrace(error.getStackTrace());
-                throw toThrow;
-            }
-        }
-    }
-
-    /**
-     * Reads the next byte of data from the input stream.
-     * @return Next byte of data, from 0->255, as an int.
-     */
-    @Override
-    public int read() {
-        byte[] singleByte = new byte[1];
-        read(singleByte);
-        return singleByte[0];
-    }
-
-    /**
-     * Fills the given byte array to the extent possible.
-     * @param bytes byte array to be filled.
-     * @return The number of bytes actually read.
-     */
-    @Override
-    public int read(byte[] bytes) {
-        return read(bytes,0,bytes.length);
-    }
-
-    @Override
-    public int read(byte[] bytes, final int offset, final int length) {
-        int remaining = length;
-        synchronized(lock) {
-            while(remaining > 0) {
-                // Check for error conditions during last read.
-                checkForErrors();
-
-                // If completely out of space, queue up another buffer fill.
-                waitForBufferFill();
-
-                // Couldn't manage to load any data at all; abort and return what's available.
-                if(buffer.remaining() == 0)
-                    break;
-
-                int numBytesToCopy = Math.min(buffer.remaining(),remaining);
-                buffer.get(bytes,length-remaining+offset,numBytesToCopy);
-                remaining -= numBytesToCopy;
-
-                //if(remaining > 0)
-                //    System.out.printf("Thread %s: read the first %d bytes of a %d byte request%n",Thread.currentThread().getId(),length-remaining,length);
-                // TODO: Assert that we don't copy across a block boundary
-            }
-
-            // Notify any waiting threads that some of the contents of the buffer were removed.
-            if(length-remaining > 0)
-                lock.notify();
-        }
-
-//        if(validatingInputStream != null) {
-//            byte[] validBytes = new byte[length];
-//            try {
-//                validatingInputStream.read(validBytes,offset,length);
-//                for(int i = offset; i < offset+length; i++) {
-//                    if(bytes[i] != validBytes[i])
-//                        throw new ReviewedGATKException(String.format("Thread %s: blockInputStream %s attempting to return wrong set of bytes; mismatch at offset %d",Thread.currentThread().getId(),this,i));
-//                }
-//            }
-//            catch(IOException ex) {
-//                throw new ReviewedGATKException("Unable to validate against Picard input stream",ex);
-//            }
-//        }
-
-        // If any data was copied into the buffer, return the amount of data copied.
-        if(remaining < length)
-            return length - remaining;
-
-        // Otherwise, return -1.
-        return -1;
-    }
-
-    public void close() {
-        if(validatingInputStream != null) {
-            try {
-                validatingInputStream.close();
-            }
-            catch(IOException ex) {
-                throw new ReviewedGATKException("Unable to validate against Picard input stream",ex);
-            }
-        }
-    }
-
-    public String getSource() {
-        return reader.getSamFilePath();
-    }
-
-    private void waitForBufferFill() {
-        synchronized(lock) {
-            if(buffer.remaining() == 0 && !eof()) {
-                //System.out.printf("Thread %s is waiting for a buffer fill from position %d to buffer %s%n",Thread.currentThread().getId(),position.getBlockAddress(),this);
-                dispatcher.queueBlockLoad(accessPlan);
-                try {
-                    lock.wait();
-                }
-                catch(InterruptedException ex) {
-                    throw new ReviewedGATKException("Interrupt occurred waiting for buffer to fill",ex);
-                }
-            }
-        }
-    }
-
-    /**
-     * Create an encoded BAM file pointer given the address of a BGZF block and an offset.
-     * @param blockAddress Physical address on disk of a BGZF block.
-     * @param blockOffset Offset into the uncompressed data stored in the BGZF block.
-     * @return 64-bit pointer encoded according to the BAM spec.
-     */
-    public static long makeFilePointer(final long blockAddress, final int blockOffset) {
-        return blockAddress << 16 | blockOffset;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BlockLoader.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BlockLoader.java
deleted file mode 100644
index 09a0cab..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/BlockLoader.java
+++ /dev/null
@@ -1,189 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import htsjdk.samtools.util.BlockCompressedStreamConstants;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
-import java.nio.channels.FileChannel;
-import java.util.zip.DataFormatException;
-import java.util.zip.Inflater;
-
-/**
- * An engine for loading blocks.
- */
-class BlockLoader implements Runnable {
-    /**
-     * Coordinates the input queue.
-     */
-    private BGZFBlockLoadingDispatcher dispatcher;
-
-    /**
-     * A cache from which to retrieve open file handles.
-     */
-    private final FileHandleCache fileHandleCache;
-
-    /**
-     * Whether asynchronous decompression should happen.
-     */
-    private final boolean decompress;
-
-    /**
-     * An direct input buffer for incoming data from disk.
-     */
-    private final ByteBuffer inputBuffer;
-
-    public BlockLoader(final BGZFBlockLoadingDispatcher dispatcher, final FileHandleCache fileHandleCache, final boolean decompress) {
-        this.dispatcher = dispatcher;
-        this.fileHandleCache = fileHandleCache;
-        this.decompress = decompress;
-
-        this.inputBuffer = ByteBuffer.allocateDirect(64*1024 + BlockCompressedStreamConstants.EMPTY_GZIP_BLOCK.length);
-        inputBuffer.order(ByteOrder.LITTLE_ENDIAN);
-    }
-
-    public void run() {
-        for(;;) {
-            BAMAccessPlan accessPlan = null;
-            try {
-                accessPlan = dispatcher.claimNextWorkRequest();
-                FileInputStream inputStream = fileHandleCache.claimFileInputStream(accessPlan.getReader());
-
-                //long blockAddress = readerPosition.getBlockAddress();
-                //System.out.printf("Thread %s: BlockLoader: copying bytes from %s at position %d into %s%n",Thread.currentThread().getId(),inputStream,blockAddress,readerPosition.getInputStream());
-
-                ByteBuffer compressedBlock = readBGZFBlock(inputStream,accessPlan.getBlockAddress());
-                long nextBlockAddress = position(inputStream);
-                fileHandleCache.releaseFileInputStream(accessPlan.getReader(),inputStream);
-
-                ByteBuffer block = decompress ? decompressBGZFBlock(compressedBlock) : compressedBlock;
-                int bytesCopied = block.remaining();
-
-                BlockInputStream bamInputStream = accessPlan.getInputStream();
-                bamInputStream.copyIntoBuffer(block,accessPlan,nextBlockAddress);
-
-                //System.out.printf("Thread %s: BlockLoader: copied %d bytes from %s at position %d into %s%n",Thread.currentThread().getId(),bytesCopied,inputStream,blockAddress,readerPosition.getInputStream());
-            }
-            catch(Throwable error) {
-                if(accessPlan != null && accessPlan.getInputStream() != null)
-                    accessPlan.getInputStream().reportException(error);
-            }
-        }
-
-    }
-
-    private ByteBuffer readBGZFBlock(final FileInputStream inputStream, final long blockAddress) throws IOException {
-        FileChannel channel = inputStream.getChannel();
-
-        // Read the block header
-        channel.position(blockAddress);
-
-        int uncompressedDataSize = 0;
-        int bufferSize = 0;
-
-        do {
-            inputBuffer.clear();
-            inputBuffer.limit(BlockCompressedStreamConstants.BLOCK_HEADER_LENGTH);
-            channel.read(inputBuffer);
-
-            // Read out the size of the full BGZF block into a two bit short container, then 'or' that
-            // value into an int buffer to transfer the bitwise contents into an int.
-            inputBuffer.flip();
-            if(inputBuffer.remaining() != BlockCompressedStreamConstants.BLOCK_HEADER_LENGTH)
-                throw new ReviewedGATKException("BUG: unable to read a the complete block header in one pass.");
-
-            // Verify that the file was read at a valid point.
-            if(unpackUByte8(inputBuffer,0) != BlockCompressedStreamConstants.GZIP_ID1 ||
-                    unpackUByte8(inputBuffer,1) != BlockCompressedStreamConstants.GZIP_ID2 ||
-                    unpackUByte8(inputBuffer,3) != BlockCompressedStreamConstants.GZIP_FLG ||
-                    unpackUInt16(inputBuffer,10) != BlockCompressedStreamConstants.GZIP_XLEN ||
-                    unpackUByte8(inputBuffer,12) != BlockCompressedStreamConstants.BGZF_ID1 ||
-                    unpackUByte8(inputBuffer,13) != BlockCompressedStreamConstants.BGZF_ID2) {
-                throw new ReviewedGATKException("BUG: Started reading compressed block at incorrect position");
-            }
-
-            inputBuffer.position(BlockCompressedStreamConstants.BLOCK_LENGTH_OFFSET);
-            bufferSize = unpackUInt16(inputBuffer,BlockCompressedStreamConstants.BLOCK_LENGTH_OFFSET)+1;
-
-            // Adjust buffer limits and finish reading the block.  Also read the next header, just in case there's a 0-byte block.
-            inputBuffer.limit(bufferSize);
-            inputBuffer.position(BlockCompressedStreamConstants.BLOCK_HEADER_LENGTH);
-            channel.read(inputBuffer);
-
-            // Check the uncompressed length.  If 0 and not at EOF, we'll want to check the next block.
-            uncompressedDataSize = inputBuffer.getInt(inputBuffer.limit()-4);
-            //System.out.printf("Uncompressed block size of the current block (at position %d) is %d%n",channel.position()-inputBuffer.limit(),uncompressedDataSize);
-        }
-        while(uncompressedDataSize == 0 && channel.position() < channel.size());
-
-        // Prepare the buffer for reading.
-        inputBuffer.flip();
-
-        return inputBuffer;
-    }
-
-    private ByteBuffer decompressBGZFBlock(final ByteBuffer bgzfBlock) throws DataFormatException {
-        final int compressedBufferSize = bgzfBlock.remaining();
-
-        // Determine the uncompressed buffer size (
-        bgzfBlock.position(bgzfBlock.limit()-4);
-        int uncompressedBufferSize = bgzfBlock.getInt();
-        byte[] uncompressedContent = new byte[uncompressedBufferSize];
-
-        // Bound the CDATA section of the buffer.
-        bgzfBlock.limit(compressedBufferSize-BlockCompressedStreamConstants.BLOCK_FOOTER_LENGTH);
-        bgzfBlock.position(BlockCompressedStreamConstants.BLOCK_HEADER_LENGTH);
-        byte[] compressedContent = new byte[bgzfBlock.remaining()];
-        ByteBuffer.wrap(compressedContent).put(bgzfBlock);
-
-        // Decompress the buffer.
-        final Inflater inflater = new Inflater(true);
-        inflater.setInput(compressedContent);
-        int bytesUncompressed = inflater.inflate(uncompressedContent);
-        if(bytesUncompressed != uncompressedBufferSize)
-            throw new ReviewedGATKException("Error decompressing block");
-
-        return ByteBuffer.wrap(uncompressedContent);
-    }
-
-    private long position(final FileInputStream inputStream) throws IOException {
-        return inputStream.getChannel().position();
-    }
-
-    private int unpackUByte8(final ByteBuffer buffer,final int position) {
-        return buffer.get(position) & 0xFF;
-    }
-
-    private int unpackUInt16(final ByteBuffer buffer,final int position) {
-        // Read out the size of the full BGZF block into a two bit short container, then 'or' that
-        // value into an int buffer to transfer the bitwise contents into an int.
-        return buffer.getShort(position) & 0xFFFF;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/FileHandleCache.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/FileHandleCache.java
deleted file mode 100644
index 8d5ab3b..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/FileHandleCache.java
+++ /dev/null
@@ -1,232 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.GATKException;
-
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.LinkedHashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Queue;
-
-/**
- * Caches frequently used  file handles.  Right now, caches only a single file handle.
- * TODO: Generalize to support arbitrary file handle caches.
- */
-public class FileHandleCache {
-    /**
-     * The underlying data structure storing file handles.
-     */
-    private final FileHandleStorage fileHandleStorage;
-
-    /**
-     * How many file handles should be kept open at once.
-     */
-    private final int cacheSize;
-
-    /**
-     * A uniquifier: assign a unique ID to every instance of a file handle.
-     */
-    private final Map<SAMReaderID,Integer> keyCounter = new HashMap<SAMReaderID,Integer>();
-
-    /**
-     * A shared lock, private so that outside users cannot notify it.
-     */
-    private final Object lock = new Object();
-
-    /**
-     * Indicates how many file handles are outstanding at this point.
-     */
-    private int numOutstandingFileHandles = 0;
-
-    /**
-     * Create a new file handle cache of the given cache size.
-     * @param cacheSize how many readers to hold open at once.
-     */
-    public FileHandleCache(final int cacheSize) {
-        this.cacheSize = cacheSize;
-        fileHandleStorage = new FileHandleStorage();
-    }
-
-    /**
-     * Retrieves or opens a file handle for the given reader ID.
-     * @param key The ke
-     * @return A file input stream from the cache, if available, or otherwise newly opened.
-     */
-    public FileInputStream claimFileInputStream(final SAMReaderID key) {
-        synchronized(lock) {
-            FileInputStream inputStream = findExistingEntry(key);
-            if(inputStream == null) {
-                try {
-                    // If the cache is maxed out, wait for another file handle to emerge.
-                    if(numOutstandingFileHandles >= cacheSize)
-                        lock.wait();
-                }
-                catch(InterruptedException ex) {
-                    throw new ReviewedGATKException("Interrupted while waiting for a file handle");
-                }
-                inputStream = openInputStream(key);
-            }
-            numOutstandingFileHandles++;
-
-            //System.out.printf("Handing input stream %s to thread %s%n",inputStream,Thread.currentThread().getId());
-            return inputStream;
-        }
-    }
-
-    /**
-     * Releases the current reader and returns it to the cache.
-     * @param key The reader.
-     * @param inputStream The stream being used.
-     */
-    public void releaseFileInputStream(final SAMReaderID key, final FileInputStream inputStream) {
-        synchronized(lock) {
-            numOutstandingFileHandles--;
-            UniqueKey newID = allocateKey(key);
-            fileHandleStorage.put(newID,inputStream);
-            // Let any listeners know that another file handle has become available.
-            lock.notify();
-        }
-    }
-
-    /**
-     * Finds an existing entry in the storage mechanism.
-     * @param key Reader.
-     * @return a cached stream, if available.  Otherwise,
-     */
-    private FileInputStream findExistingEntry(final SAMReaderID key) {
-        int existingHandles = getMostRecentUniquifier(key);
-
-        // See if any of the keys currently exist in the repository.
-        for(int i = 0; i <= existingHandles; i++) {
-            UniqueKey uniqueKey = new UniqueKey(key,i);
-            if(fileHandleStorage.containsKey(uniqueKey))
-                return fileHandleStorage.remove(uniqueKey);
-        }
-
-        return null;
-    }
-
-    /**
-     * Gets the most recent uniquifier used for the given reader.
-     * @param reader Reader for which to determine uniqueness.
-     * @return
-     */
-    private int getMostRecentUniquifier(final SAMReaderID reader) {
-        if(keyCounter.containsKey(reader))
-            return keyCounter.get(reader);
-        else return -1;
-    }
-
-    private UniqueKey allocateKey(final SAMReaderID reader) {
-        int uniquifier = getMostRecentUniquifier(reader)+1;
-        keyCounter.put(reader,uniquifier);
-        return new UniqueKey(reader,uniquifier);
-    }
-
-    private FileInputStream openInputStream(final SAMReaderID reader) {
-        try {
-            return new FileInputStream(reader.getSamFilePath());
-        }
-        catch(IOException ex) {
-            throw new GATKException("Unable to open input file");
-        }
-    }
-
-    private void closeInputStream(final FileInputStream inputStream) {
-        try {
-            inputStream.close();
-        }
-        catch(IOException ex) {
-            throw new GATKException("Unable to open input file");
-        }
-    }
-
-    /**
-     * Actually contains the file handles, purging them as they get too old.
-     */
-    private class FileHandleStorage extends LinkedHashMap<UniqueKey,FileInputStream> {
-        /**
-         * Remove the oldest entry
-         * @param entry Entry to consider removing.
-         * @return True if the cache size has been exceeded.  False otherwise.
-         */
-        @Override
-        protected boolean removeEldestEntry(Map.Entry<UniqueKey,FileInputStream> entry) {
-            synchronized (lock) {
-                if(size() > cacheSize) {
-                    keyCounter.put(entry.getKey().key,keyCounter.get(entry.getKey().key)-1);
-                    closeInputStream(entry.getValue());
-
-                    return true;
-                }
-            }
-            return false;
-        }
-    }
-
-    /**
-     * Uniquifies a key by adding a numerical uniquifier.
-     */
-    private class UniqueKey {
-        /**
-         * The file handle's key.
-         */
-        private final SAMReaderID key;
-
-        /**
-         * A uniquifier, so that multiple of the same reader can exist in the cache.
-         */
-        private final int uniqueID;
-
-        public UniqueKey(final SAMReaderID reader, final int uniqueID) {
-            this.key = reader;
-            this.uniqueID = uniqueID;
-        }
-
-        @Override
-        public boolean equals(Object other) {
-            if(!(other instanceof UniqueKey))
-                return false;
-            UniqueKey otherUniqueKey = (UniqueKey)other;
-            return key.equals(otherUniqueKey.key) && this.uniqueID == otherUniqueKey.uniqueID;
-        }
-
-        @Override
-        public int hashCode() {
-            return key.hashCode();
-        }
-    }
-
-
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/FilePointer.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/FilePointer.java
deleted file mode 100644
index 99d9def..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/FilePointer.java
+++ /dev/null
@@ -1,436 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import htsjdk.samtools.util.PeekableIterator;
-import htsjdk.samtools.GATKBAMFileSpan;
-import htsjdk.samtools.GATKChunk;
-import htsjdk.samtools.SAMFileSpan;
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.interval.IntervalMergingRule;
-import org.broadinstitute.gatk.utils.interval.IntervalUtils;
-
-import java.util.*;
-
-/**
- * Represents a small section of a BAM file, and every associated interval.
- */
-public class FilePointer {
-    protected final SortedMap<SAMReaderID,SAMFileSpan> fileSpans = new TreeMap<SAMReaderID,SAMFileSpan>();
-    protected final List<GenomeLoc> locations = new ArrayList<GenomeLoc>();
-    protected final IntervalMergingRule intervalMergingRule;
-
-    /**
-     * Does this file pointer point into an unmapped region?
-     */
-    protected final boolean isRegionUnmapped;
-
-    /**
-     * Is this FilePointer "monolithic"? That is, does it represent all regions in all files that we will
-     * ever visit during this GATK run? If this is set to true, the engine will expect to see only this
-     * one FilePointer during the entire run, and this FilePointer will be allowed to contain intervals
-     * from more than one contig.
-     */
-    private boolean isMonolithic = false;
-
-    /**
-     * Index of the contig covered by this FilePointer. Only meaningful for non-monolithic, mapped FilePointers
-     */
-    private Integer contigIndex = null;
-
-
-    public FilePointer( final IntervalMergingRule mergeRule, final List<GenomeLoc> locations ) {
-        this.intervalMergingRule = mergeRule;
-        this.locations.addAll(locations);
-        this.isRegionUnmapped = checkUnmappedStatus();
-
-        validateAllLocations();
-        if ( locations.size() > 0 ) {
-            contigIndex = locations.get(0).getContigIndex();
-        }
-    }
-
-    public FilePointer( final IntervalMergingRule mergeRule, final GenomeLoc... locations ) {
-        this(mergeRule, Arrays.asList(locations));
-    }
-
-    public FilePointer( final Map<SAMReaderID,SAMFileSpan> fileSpans, final IntervalMergingRule mergeRule, final List<GenomeLoc> locations ) {
-        this(mergeRule, locations);
-        this.fileSpans.putAll(fileSpans);
-    }
-
-    private boolean checkUnmappedStatus() {
-        boolean foundMapped = false, foundUnmapped = false;
-
-        for( GenomeLoc location: locations ) {
-            if ( GenomeLoc.isUnmapped(location) )
-                foundUnmapped = true;
-            else
-                foundMapped = true;
-        }
-        if ( foundMapped && foundUnmapped )
-            throw new ReviewedGATKException("BUG: File pointers cannot be mixed mapped/unmapped.");
-
-        return foundUnmapped;
-    }
-
-    private void validateAllLocations() {
-        // Unmapped and monolithic FilePointers are exempted from the one-contig-only restriction
-        if ( isRegionUnmapped || isMonolithic ) {
-            return;
-        }
-
-        Integer previousContigIndex = null;
-
-        for ( GenomeLoc location : locations ) {
-            if ( previousContigIndex != null && previousContigIndex != location.getContigIndex() ) {
-                throw new ReviewedGATKException("Non-monolithic file pointers must contain intervals from at most one contig");
-            }
-
-            previousContigIndex = location.getContigIndex();
-        }
-    }
-
-    private void validateLocation( GenomeLoc location ) {
-        if ( isRegionUnmapped != GenomeLoc.isUnmapped(location) ) {
-            throw new ReviewedGATKException("BUG: File pointers cannot be mixed mapped/unmapped.");
-        }
-        if ( ! isRegionUnmapped && ! isMonolithic && contigIndex != null && contigIndex != location.getContigIndex() ) {
-            throw new ReviewedGATKException("Non-monolithic file pointers must contain intervals from at most one contig");
-        }
-    }
-
-    /**
-     * Returns an immutable view of this FilePointer's file spans
-     *
-     * @return an immutable view of this FilePointer's file spans
-     */
-    public Map<SAMReaderID, SAMFileSpan> getFileSpans() {
-        return Collections.unmodifiableMap(fileSpans);
-    }
-
-    /**
-     * Returns an immutable variant of the list of locations.
-     * @return
-     */
-    public List<GenomeLoc> getLocations() {
-        return Collections.unmodifiableList(locations);
-    }
-
-    /**
-     * Returns the index of the contig into which this FilePointer points (a FilePointer can represent
-     * regions in at most one contig).
-     *
-     * @return the index of the contig into which this FilePointer points
-     */
-    public int getContigIndex() {
-        return locations.size() > 0 ? locations.get(0).getContigIndex() : SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX;
-    }
-
-    /**
-     * Returns the IntervalMergingRule used by this FilePointer to merge adjacent locations
-     *
-     * @return the IntervalMergingRule used by this FilePointer (never null)
-     */
-    public IntervalMergingRule getIntervalMergingRule() {
-        return intervalMergingRule;
-    }
-
-    /**
-     * Is this FilePointer "monolithic"? That is, does it represent all regions in all files that we will
-     * ever visit during this GATK run? If this is set to true, the engine will expect to see only this
-     * one FilePointer during the entire run, and this FilePointer will be allowed to contain intervals
-     * from more than one contig.
-     *
-     * @return true if this FP is a monolithic FP representing all regions in all files, otherwise false
-     */
-    public boolean isMonolithic() {
-        return isMonolithic;
-    }
-
-    /**
-     * Set this FP's "monolithic" status to true or false. An FP is monolithic if it represents all
-     * regions in all files that we will ever visit, and is the only FP we will ever create. A monolithic
-     * FP may contain intervals from more than one contig.
-     *
-     * @param isMonolithic set this FP's monolithic status to this value
-     */
-    public void setIsMonolithic( boolean isMonolithic ) {
-        this.isMonolithic = isMonolithic;
-    }
-
-    @Override
-    public boolean equals(final Object other) {
-        if(!(other instanceof FilePointer))
-            return false;
-        FilePointer otherFilePointer = (FilePointer)other;
-
-        // intervals
-        if(this.locations.size() != otherFilePointer.locations.size())
-            return false;
-        for(int i = 0; i < locations.size(); i++) {
-            if(!this.locations.get(i).equals(otherFilePointer.locations.get(i)))
-                return false;
-        }
-
-        // fileSpans
-        if(this.fileSpans.size() != otherFilePointer.fileSpans.size())
-            return false;
-        Iterator<Map.Entry<SAMReaderID,SAMFileSpan>> thisEntries = this.fileSpans.entrySet().iterator();
-        Iterator<Map.Entry<SAMReaderID,SAMFileSpan>> otherEntries = otherFilePointer.fileSpans.entrySet().iterator();
-        while(thisEntries.hasNext() || otherEntries.hasNext()) {
-            if(!thisEntries.next().equals(otherEntries.next()))
-                return false;
-        }
-        
-        return true;
-    }
-
-    public void addLocation(final GenomeLoc location) {
-        validateLocation(location);
-
-        this.locations.add(location);
-        if ( contigIndex == null ) {
-            contigIndex = location.getContigIndex();
-        }
-    }
-
-    public void addFileSpans(final SAMReaderID id, final SAMFileSpan fileSpan) {
-        this.fileSpans.put(id,fileSpan);
-    }
-
-    public void addFileSpans(final Map<SAMReaderID, GATKBAMFileSpan> fileSpans) {
-        this.fileSpans.putAll(fileSpans);
-    }
-
-
-    /**
-     * Computes the size of this file span, in uncompressed bytes.
-     * @return Size of the file span.
-     */
-    public long size() {
-        long size = 0L;
-        for(SAMFileSpan fileSpan: fileSpans.values())
-            size += ((GATKBAMFileSpan)fileSpan).size();
-        return size;
-    }
-
-    /**
-     * Returns the difference in size between two filespans.
-     * @param other Other filespan against which to measure.
-     * @return The difference in size between the two file pointers.
-     */
-    public long minus(final FilePointer other) {
-        long difference = 0;
-        PeekableIterator<Map.Entry<SAMReaderID,SAMFileSpan>> thisIterator = new PeekableIterator<Map.Entry<SAMReaderID,SAMFileSpan>>(this.fileSpans.entrySet().iterator());
-        PeekableIterator<Map.Entry<SAMReaderID,SAMFileSpan>> otherIterator = new PeekableIterator<Map.Entry<SAMReaderID,SAMFileSpan>>(other.fileSpans.entrySet().iterator());
-
-        while(thisIterator.hasNext()) {
-            // If there are no elements left in the 'other' iterator, spin out this iterator.
-            if(!otherIterator.hasNext()) {
-                GATKBAMFileSpan nextSpan = (GATKBAMFileSpan)thisIterator.next().getValue();
-                difference += nextSpan.size();
-                continue;
-            }
-
-            // Otherwise, compare the latest value.
-            int compareValue = thisIterator.peek().getKey().compareTo(otherIterator.peek().getKey());
-
-            if(compareValue < 0) {
-                // This before other.
-                difference += ((GATKBAMFileSpan)thisIterator.next().getValue()).size();
-            }
-            else if(compareValue > 0) {
-                // Other before this.
-                difference += ((GATKBAMFileSpan)otherIterator.next().getValue()).size();
-            }
-            else {
-                // equality; difference the values.
-                GATKBAMFileSpan thisRegion = (GATKBAMFileSpan)thisIterator.next().getValue();
-                GATKBAMFileSpan otherRegion = (GATKBAMFileSpan)otherIterator.next().getValue();
-                difference += Math.abs(thisRegion.minus(otherRegion).size());
-            }
-        }
-        return difference;
-    }
-
-    /**
-     * Combines two file pointers into one.
-     * @param parser The genomelocparser to use when manipulating intervals.
-     * @param other File pointer to combine into this one.
-     * @return A completely new file pointer that is the combination of the two.
-     */
-    public FilePointer combine(final GenomeLocParser parser, final FilePointer other) {
-        FilePointer combined = new FilePointer(intervalMergingRule);
-
-        List<GenomeLoc> intervals = new ArrayList<GenomeLoc>();
-        intervals.addAll(locations);
-        intervals.addAll(other.locations);
-        for(GenomeLoc interval: IntervalUtils.sortAndMergeIntervals(parser,intervals,intervalMergingRule))
-            combined.addLocation(interval);
-
-        PeekableIterator<Map.Entry<SAMReaderID,SAMFileSpan>> thisIterator = new PeekableIterator<Map.Entry<SAMReaderID,SAMFileSpan>>(this.fileSpans.entrySet().iterator());
-        PeekableIterator<Map.Entry<SAMReaderID,SAMFileSpan>> otherIterator = new PeekableIterator<Map.Entry<SAMReaderID,SAMFileSpan>>(other.fileSpans.entrySet().iterator());
-
-        while(thisIterator.hasNext() || otherIterator.hasNext()) {
-            int compareValue;
-            if(!otherIterator.hasNext()) {
-                compareValue = -1;
-            }
-            else if(!thisIterator.hasNext())
-                compareValue = 1;
-            else
-                compareValue = thisIterator.peek().getKey().compareTo(otherIterator.peek().getKey());
-
-            // This before other.
-            if(compareValue < 0)
-                mergeElementsInto(combined,thisIterator);
-            // Other before this.
-            else if(compareValue > 0)
-                mergeElementsInto(combined,otherIterator);
-            // equality; union the values.
-            else
-                mergeElementsInto(combined,thisIterator,otherIterator);
-        }
-        return combined;
-    }
-
-    /**
-     * Roll the next element in the iterator into the combined entry.
-     * @param combined Entry into which to roll the next element.
-     * @param iterators Sources of next elements.
-     */
-    private void mergeElementsInto(final FilePointer combined, Iterator<Map.Entry<SAMReaderID,SAMFileSpan>>... iterators) {
-        if(iterators.length == 0)
-            throw new ReviewedGATKException("Tried to add zero elements to an existing file pointer.");
-        Map.Entry<SAMReaderID,SAMFileSpan> initialElement = iterators[0].next();
-        GATKBAMFileSpan fileSpan = (GATKBAMFileSpan)initialElement.getValue();
-        for(int i = 1; i < iterators.length; i++)
-            fileSpan = fileSpan.union((GATKBAMFileSpan)iterators[i].next().getValue());
-        combined.addFileSpans(initialElement.getKey(),fileSpan);
-    }
-
-    /**
-     * Efficiently generate the union of the n FilePointers passed in. Much more efficient than
-     * combining two FilePointers at a time using the combine() method above.
-     *
-     * IMPORTANT: the FilePointers to be unioned must either all represent regions on the
-     * same contig, or all be unmapped, since we cannot create FilePointers with a mix of
-     * contigs or with mixed mapped/unmapped regions.
-     *
-     * @param filePointers the FilePointers to union
-     * @param parser our GenomeLocParser
-     * @return the union of the FilePointers passed in
-     */
-    public static FilePointer union( List<FilePointer> filePointers, GenomeLocParser parser ) {
-        if ( filePointers == null || filePointers.isEmpty() ) {
-            return new FilePointer(IntervalMergingRule.ALL);
-        }
-
-        Map<SAMReaderID, List<GATKChunk>> fileChunks = new HashMap<SAMReaderID, List<GATKChunk>>();
-        List<GenomeLoc> locations = new ArrayList<GenomeLoc>();
-        IntervalMergingRule mergeRule = filePointers.get(0).getIntervalMergingRule();
-
-        // First extract all intervals and file chunks from the FilePointers into unsorted, unmerged collections
-        for ( FilePointer filePointer : filePointers ) {
-            locations.addAll(filePointer.getLocations());
-            if (mergeRule != filePointer.getIntervalMergingRule())
-                throw new ReviewedGATKException("All FilePointers in FilePointer.union() must have use the same IntervalMergeRule");
-
-            for ( Map.Entry<SAMReaderID, SAMFileSpan> fileSpanEntry : filePointer.getFileSpans().entrySet() ) {
-                GATKBAMFileSpan fileSpan = (GATKBAMFileSpan)fileSpanEntry.getValue();
-
-                if ( fileChunks.containsKey(fileSpanEntry.getKey()) ) {
-                    fileChunks.get(fileSpanEntry.getKey()).addAll(fileSpan.getGATKChunks());
-                }
-                else {
-                    fileChunks.put(fileSpanEntry.getKey(), fileSpan.getGATKChunks());
-                }
-            }
-        }
-
-        // Now sort and merge the intervals
-        List<GenomeLoc> sortedMergedLocations = new ArrayList<GenomeLoc>();
-        sortedMergedLocations.addAll(IntervalUtils.sortAndMergeIntervals(parser, locations, mergeRule));
-
-        // For each BAM file, convert from an unsorted, unmerged list of chunks to a GATKBAMFileSpan containing
-        // the sorted, merged union of the chunks for that file
-        Map<SAMReaderID, SAMFileSpan> mergedFileSpans = new HashMap<SAMReaderID, SAMFileSpan>(fileChunks.size());
-        for ( Map.Entry<SAMReaderID, List<GATKChunk>> fileChunksEntry : fileChunks.entrySet() ) {
-            List<GATKChunk> unmergedChunks = fileChunksEntry.getValue();
-            mergedFileSpans.put(fileChunksEntry.getKey(),
-                                (new GATKBAMFileSpan(unmergedChunks.toArray(new GATKChunk[unmergedChunks.size()]))).union(new GATKBAMFileSpan()));
-        }
-
-        return new FilePointer(mergedFileSpans, mergeRule, sortedMergedLocations);
-    }
-
-    /**
-     * Returns true if any of the file spans in this FilePointer overlap their counterparts in
-     * the other FilePointer. "Overlap" is defined as having an overlapping extent (the region
-     * from the start of the first chunk to the end of the last chunk).
-     *
-     * @param other the FilePointer against which to check overlap with this FilePointer
-     * @return true if any file spans overlap their counterparts in other, otherwise false
-     */
-    public boolean hasFileSpansOverlappingWith( FilePointer other ) {
-        for ( Map.Entry<SAMReaderID, SAMFileSpan> thisFilePointerEntry : fileSpans.entrySet() ) {
-            GATKBAMFileSpan thisFileSpan = new GATKBAMFileSpan(thisFilePointerEntry.getValue());
-
-            SAMFileSpan otherEntry = other.fileSpans.get(thisFilePointerEntry.getKey());
-            if ( otherEntry == null ) {
-                continue;  // no counterpart for this file span in other
-            }
-            GATKBAMFileSpan otherFileSpan = new GATKBAMFileSpan(otherEntry);
-
-            if ( thisFileSpan.getExtent().overlaps(otherFileSpan.getExtent()) ) {
-                return true;
-            }
-        }
-
-        return false;
-    }
-
-    @Override
-    public String toString() {
-        StringBuilder builder = new StringBuilder();
-        builder.append("FilePointer:\n");
-        builder.append("\tlocations = {");
-        builder.append(Utils.join(";",locations));
-        builder.append("}\n\tregions = \n");
-        for(Map.Entry<SAMReaderID,SAMFileSpan> entry: fileSpans.entrySet()) {
-            builder.append(entry.getKey());
-            builder.append("= {");
-            builder.append(entry.getValue());
-            builder.append("}");
-        }
-        return builder.toString();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/GATKBAMIndex.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/GATKBAMIndex.java
deleted file mode 100644
index 17afd58..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/GATKBAMIndex.java
+++ /dev/null
@@ -1,468 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import htsjdk.samtools.Bin;
-import htsjdk.samtools.GATKBin;
-import htsjdk.samtools.GATKChunk;
-import htsjdk.samtools.LinearIndex;
-import htsjdk.samtools.seekablestream.SeekableBufferedStream;
-import htsjdk.samtools.seekablestream.SeekableFileStream;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import java.io.File;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-/**
- * A basic interface for querying BAM indices.
- * Very much not thread-safe.
- *
- * @author mhanna
- * @version 0.1
- */
-public class GATKBAMIndex {
-    /**
-     * BAM index file magic number.
-     */
-    private static final byte[] BAM_INDEX_MAGIC = "BAI\1".getBytes();
-
-    /**
-     * Reports the total amount of genomic data that any bin can index.
-     */
-    protected static final int BIN_GENOMIC_SPAN = 512*1024*1024;
-
-    /**
-     * What is the starting bin for each level?
-     */
-    private static final int[] LEVEL_STARTS = {0,1,9,73,585,4681};
-
-    /**
-     * Reports the maximum number of bins that can appear in a BAM file.
-     */
-    public static final int MAX_BINS = 37450;   // =(8^6-1)/7+1
-
-    private final File mFile;
-
-    //TODO: figure out a good value for this buffer size
-    private final int BUFFERED_STREAM_BUFFER_SIZE = 8192;
-
-    /**
-     * Number of sequences stored in this index.
-     */
-    private final int sequenceCount;
-
-    /**
-     * A cache of the starting positions of the sequences.
-     */
-    private final long[] sequenceStartCache;
-
-    private SeekableFileStream fileStream;
-    private SeekableBufferedStream bufferedStream;
-    private long fileLength;
-
-    public GATKBAMIndex(final File file) {
-        mFile = file;
-        // Open the file stream.
-        openIndexFile();
-
-        // Verify the magic number.
-        seek(0);
-        final byte[] buffer = readBytes(4);
-        if (!Arrays.equals(buffer, BAM_INDEX_MAGIC)) {
-            throw new ReviewedGATKException("Invalid file header in BAM index " + mFile +
-                                       ": " + new String(buffer));
-        }
-
-        seek(4);
-
-        sequenceCount = readInteger();
-
-        // Create a cache of the starting position of each sequence.  Initialize it to -1.
-        sequenceStartCache = new long[sequenceCount];
-        for(int i = 1; i < sequenceCount; i++)
-            sequenceStartCache[i] = -1;
-
-        // Seed the first element in the array with the current position.
-        if(sequenceCount > 0)
-            sequenceStartCache[0] = position();
-
-        closeIndexFile();
-    }
-
-    public GATKBAMIndexData readReferenceSequence(final int referenceSequence) {
-        openIndexFile();
-
-        if (referenceSequence >= sequenceCount)
-            throw new ReviewedGATKException("Invalid sequence number " + referenceSequence + " in index file " + mFile);
-
-        skipToSequence(referenceSequence);
-
-        int binCount = readInteger();
-        List<GATKBin> bins = new ArrayList<GATKBin>();
-        for (int binNumber = 0; binNumber < binCount; binNumber++) {
-            final int indexBin = readInteger();
-            final int nChunks = readInteger();
-
-            List<GATKChunk> chunks = new ArrayList<GATKChunk>(nChunks);
-            long[] rawChunkData = readLongs(nChunks*2);
-            for (int ci = 0; ci < nChunks; ci++) {
-                final long chunkBegin = rawChunkData[ci*2];
-                final long chunkEnd = rawChunkData[ci*2+1];
-                chunks.add(new GATKChunk(chunkBegin, chunkEnd));
-            }
-            GATKBin bin = new GATKBin(referenceSequence, indexBin);
-            bin.setChunkList(chunks.toArray(new GATKChunk[chunks.size()]));
-            while(indexBin >= bins.size())
-                bins.add(null);
-            bins.set(indexBin,bin);
-        }
-
-        final int nLinearBins = readInteger();
-        long[] linearIndexEntries = readLongs(nLinearBins);
-
-        LinearIndex linearIndex = new LinearIndex(referenceSequence,0,linearIndexEntries);
-
-        closeIndexFile();
-
-        return new GATKBAMIndexData(this,referenceSequence,bins,linearIndex);
-    }
-
-    /**
-     * Get the number of levels employed by this index.
-     * @return Number of levels in this index.
-     */
-    public static int getNumIndexLevels() {
-        return LEVEL_STARTS.length;
-    }
-
-    /**
-     * Gets the first bin in the given level.
-     * @param levelNumber Level number.  0-based.
-     * @return The first bin in this level.
-     */
-    public static int getFirstBinInLevel(final int levelNumber) {
-        return LEVEL_STARTS[levelNumber];
-    }
-
-    /**
-     * Gets the number of bins in the given level.
-     * @param levelNumber Level number.  0-based.
-     * @return The size (number of possible bins) of the given level.
-     */
-    public int getLevelSize(final int levelNumber) {
-        if(levelNumber == getNumIndexLevels()-1)
-            return MAX_BINS-LEVEL_STARTS[levelNumber]-1;
-        else
-            return LEVEL_STARTS[levelNumber+1]-LEVEL_STARTS[levelNumber];
-    }
-
-    /**
-     * Gets the level associated with the given bin number.
-     * @param bin The bin  for which to determine the level.
-     * @return the level associated with the given bin number.
-     */
-    public int getLevelForBin(final Bin bin) {
-        GATKBin gatkBin = new GATKBin(bin);
-        if(gatkBin.getBinNumber() >= MAX_BINS)
-            throw new ReviewedGATKException("Tried to get level for invalid bin in index file " + mFile);
-        for(int i = getNumIndexLevels()-1; i >= 0; i--) {
-            if(gatkBin.getBinNumber() >= LEVEL_STARTS[i])
-                return i;
-        }
-        throw new ReviewedGATKException("Unable to find correct bin for bin " + bin + " in index file " + mFile);
-    }
-
-    /**
-     * Gets the first locus that this bin can index into.
-     * @param bin The bin to test.
-     * @return The last position that the given bin can represent.
-     */
-    public int getFirstLocusInBin(final Bin bin) {
-        final int level = getLevelForBin(bin);
-        final int levelStart = LEVEL_STARTS[level];
-        final int levelSize = ((level==getNumIndexLevels()-1) ? MAX_BINS-1 : LEVEL_STARTS[level+1]) - levelStart;
-        return (new GATKBin(bin).getBinNumber() - levelStart)*(BIN_GENOMIC_SPAN /levelSize)+1;
-    }
-
-    /**
-     * Gets the last locus that this bin can index into.
-     * @param bin The bin to test.
-     * @return The last position that the given bin can represent.
-     */
-    public int getLastLocusInBin(final Bin bin) {
-        final int level = getLevelForBin(bin);
-        final int levelStart = LEVEL_STARTS[level];
-        final int levelSize = ((level==getNumIndexLevels()-1) ? MAX_BINS-1 : LEVEL_STARTS[level+1]) - levelStart;
-        return (new GATKBin(bin).getBinNumber()-levelStart+1)*(BIN_GENOMIC_SPAN /levelSize);
-    }
-
-    /**
-     * Use to get close to the unmapped reads at the end of a BAM file.
-     * @return The file offset of the first record in the last linear bin, or -1
-     * if there are no elements in linear bins (i.e. no mapped reads).
-     */
-    public long getStartOfLastLinearBin() {
-        openIndexFile();
-
-        seek(4);
-
-        final int sequenceCount = readInteger();
-        // Because no reads may align to the last sequence in the sequence dictionary,
-        // grab the last element of the linear index for each sequence, and return
-        // the last one from the last sequence that has one.
-        long lastLinearIndexPointer = -1;
-        for (int i = 0; i < sequenceCount; i++) {
-            // System.out.println("# Sequence TID: " + i);
-            final int nBins = readInteger();
-            // System.out.println("# nBins: " + nBins);
-            for (int j1 = 0; j1 < nBins; j1++) {
-                // Skip bin #
-                skipBytes(4);
-                final int nChunks = readInteger();
-                // Skip chunks
-                skipBytes(16 * nChunks);
-            }
-            final int nLinearBins = readInteger();
-            if (nLinearBins > 0) {
-                // Skip to last element of list of linear bins
-                skipBytes(8 * (nLinearBins - 1));
-                lastLinearIndexPointer = readLongs(1)[0];
-            }
-        }
-
-        closeIndexFile();
-
-        return lastLinearIndexPointer;
-    }
-
-    /**
-     * Gets the possible number of bins for a given reference sequence.
-     * @return How many bins could possibly be used according to this indexing scheme to index a single contig.
-     */
-    protected int getMaxAddressibleGenomicLocation() {
-        return BIN_GENOMIC_SPAN;
-    }
-
-    protected void skipToSequence(final int referenceSequence) {
-        // Find the offset in the file of the last sequence whose position has been determined.  Start here
-        // when searching the sequence for the next value to read.  (Note that sequenceStartCache[0] will always
-        // be present, so no extra stopping condition is necessary.
-        int sequenceIndex = referenceSequence;
-        while(sequenceStartCache[sequenceIndex] == -1)
-            sequenceIndex--;
-
-        // Advance to the most recently found position.
-        seek(sequenceStartCache[sequenceIndex]);
-
-        for (int i = sequenceIndex; i < referenceSequence; i++) {
-            sequenceStartCache[i] = position();
-            // System.out.println("# Sequence TID: " + i);
-            final int nBins = readInteger();
-            // System.out.println("# nBins: " + nBins);
-            for (int j = 0; j < nBins; j++) {
-                final int bin = readInteger();
-                final int nChunks = readInteger();
-                // System.out.println("# bin[" + j + "] = " + bin + ", nChunks = " + nChunks);
-                skipBytes(16 * nChunks);
-            }
-            final int nLinearBins = readInteger();
-            // System.out.println("# nLinearBins: " + nLinearBins);
-            skipBytes(8 * nLinearBins);
-
-        }
-
-        sequenceStartCache[referenceSequence] = position();
-    }
-
-
-
-    private void openIndexFile() {
-        try {
-            fileStream = new SeekableFileStream(mFile);
-            bufferedStream = new SeekableBufferedStream(fileStream,BUFFERED_STREAM_BUFFER_SIZE);
-            fileLength=bufferedStream.length();
-        }
-        catch (IOException exc) {
-            throw new ReviewedGATKException("Unable to open index file (" + exc.getMessage() +")" + mFile, exc);
-        }
-    }
-
-    private void closeIndexFile() {
-        try {
-            bufferedStream.close();
-            fileStream.close();
-            fileLength = -1;
-        }
-        catch (IOException exc) {
-            throw new ReviewedGATKException("Unable to close index file " + mFile, exc);
-        }
-    }
-
-    private static final int INT_SIZE_IN_BYTES = Integer.SIZE / 8;
-    private static final int LONG_SIZE_IN_BYTES = Long.SIZE / 8;
-
-    private byte[] readBytes(int count) {
-        ByteBuffer buffer = getBuffer(count);
-        read(buffer);
-        buffer.flip();
-        byte[] contents = new byte[count];
-        buffer.get(contents);
-        return contents;
-    }
-
-    private int readInteger() {
-        ByteBuffer buffer = getBuffer(INT_SIZE_IN_BYTES);
-        read(buffer);
-        buffer.flip();
-        return buffer.getInt();
-    }
-
-    /**
-     * Reads an array of <count> longs from the file channel, returning the results as an array.
-     * @param count Number of longs to read.
-     * @return An array of longs.  Size of array should match count.
-     */
-    private long[] readLongs(final int count) {
-        ByteBuffer buffer = getBuffer(count*LONG_SIZE_IN_BYTES);
-        read(buffer);
-        buffer.flip();
-        long[] result = new long[count];
-        for(int i = 0; i < count; i++)
-            result[i] = buffer.getLong();
-        return result;
-    }
-
-    private void read(final ByteBuffer buffer) {
-        final int bytesRequested = buffer.limit();
-
-        try {
-
-           //BufferedInputStream cannot read directly into a byte buffer, so we read into an array
-            //and put the result into the bytebuffer after the if statement.
-
-            // We have a rigid expectation here to read in exactly the number of bytes we've limited
-            // our buffer to -- if there isn't enough data in the file, the index
-            // must be truncated or otherwise corrupt:
-            if(bytesRequested > fileLength - bufferedStream.position()){
-                throw new UserException.MalformedFile(mFile, String.format("Premature end-of-file while reading BAM index file %s. " +
-                        "It's likely that this file is truncated or corrupt -- " +
-                        "Please try re-indexing the corresponding BAM file.",
-                        mFile));
-            }
-
-            int totalBytesRead = 0;
-            // This while loop must terminate since we demand that we read at least one byte from the file at each iteration
-           while (totalBytesRead < bytesRequested) {
-                //  bufferedStream.read may return less than the requested amount of byte despite
-                // not reaching the end of the file, hence the loop.
-                int bytesRead = bufferedStream.read(byteArray, totalBytesRead, bytesRequested-totalBytesRead);
-
-                // We have a rigid expectation here to read in exactly the number of bytes we've limited
-                // our buffer to -- if we encounter EOF (-1), the index
-                // must be truncated or otherwise corrupt:
-                if (bytesRead <= 0) {
-                throw new UserException.MalformedFile(mFile, String.format("Premature end-of-file while reading BAM index file %s. " +
-                                                                           "It's likely that this file is truncated or corrupt -- " +
-                                                                           "Please try re-indexing the corresponding BAM file.",
-                                                                           mFile));
-                }
-                totalBytesRead += bytesRead;
-            }
-            if(totalBytesRead != bytesRequested)
-                throw new RuntimeException("Read amount different from requested amount. This should not happen.");
-
-            buffer.put(byteArray, 0, bytesRequested);
-        }
-        catch(IOException ex) {
-            throw new ReviewedGATKException("Index: unable to read bytes from index file " + mFile);
-        }
-    }
-
-
-    /**
-     * A reusable buffer for use by this index generator.
-     * TODO: Should this be a SoftReference?
-     */
-    private ByteBuffer buffer = null;
-
-    //BufferedStream don't read into ByteBuffers, so we need this temporary array
-    private byte[] byteArray=null;
-    private ByteBuffer getBuffer(final int size) {
-        if(buffer == null || buffer.capacity() < size) {
-            // Allocate a new byte buffer.  For now, make it indirect to make sure it winds up on the heap for easier debugging.
-            buffer = ByteBuffer.allocate(size);
-            byteArray = new byte[size];
-            buffer.order(ByteOrder.LITTLE_ENDIAN);
-        }
-        buffer.clear();
-        buffer.limit(size);
-        return buffer;
-    }
-
-    private void skipBytes(final int count) {
-        try {
-
-            //try to skip forward the requested amount.
-            long skipped =  bufferedStream.skip(count);
-
-            if( skipped != count ) { //if not managed to skip the requested amount
-		throw new ReviewedGATKException("Index: unable to reposition file channel of index file " + mFile);
-            }
-        }
-        catch(IOException ex) {
-            throw new ReviewedGATKException("Index: unable to reposition file channel of index file " + mFile);
-        }
-    }
-
-    private void seek(final long position) {
-        try {
-            //to seek a new position, move the fileChannel, and reposition the bufferedStream
-            bufferedStream.seek(position);
-        }
-        catch(IOException ex) {
-            throw new ReviewedGATKException("Index: unable to reposition of file channel of index file " + mFile);
-        }
-    }
-
-    /**
-     * Retrieve the position from the current file channel.
-     * @return position of the current file channel.
-     */
-    private long position() {
-        try {
-            return bufferedStream.position();
-        }
-        catch (IOException exc) {
-            throw new ReviewedGATKException("Unable to read position from index file " + mFile, exc);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/GATKBAMIndexData.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/GATKBAMIndexData.java
deleted file mode 100644
index f1d6203..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/GATKBAMIndexData.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import htsjdk.samtools.*;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-
-/**
- * Stores and processes a single reference worth of GATK data.
- */
-public class GATKBAMIndexData {
-    private final GATKBAMIndex index;
-    private final int referenceSequence;
-    private final List<GATKBin> bins;
-    private final LinearIndex linearIndex;
-
-    public GATKBAMIndexData(final GATKBAMIndex index, final int referenceSequence, final List<GATKBin> bins, final LinearIndex linearIndex) {
-        this.index = index;
-        this.referenceSequence = referenceSequence;
-        this.bins = bins;
-        this.linearIndex = linearIndex;
-    }
-
-    public int getReferenceSequence() {
-        return referenceSequence;
-    }
-
-    /**
-     * Perform an overlapping query of all bins bounding the given location.
-     * @param bin The bin over which to perform an overlapping query.
-     * @return The file pointers
-     */
-    public GATKBAMFileSpan getSpanOverlapping(final Bin bin) {
-        if(bin == null)
-            return null;
-
-        GATKBin gatkBin = new GATKBin(bin);
-
-        final int binLevel = index.getLevelForBin(bin);
-        final int firstLocusInBin = index.getFirstLocusInBin(bin);
-
-        // Add the specified bin to the tree if it exists.
-        List<GATKBin> binTree = new ArrayList<GATKBin>();
-        if(gatkBin.getBinNumber() < bins.size() && bins.get(gatkBin.getBinNumber()) != null)
-            binTree.add(bins.get(gatkBin.getBinNumber()));
-
-        int currentBinLevel = binLevel;
-        while(--currentBinLevel >= 0) {
-            final int binStart = index.getFirstBinInLevel(currentBinLevel);
-            final int binWidth = index.getMaxAddressibleGenomicLocation()/index.getLevelSize(currentBinLevel);
-            final int binNumber = firstLocusInBin/binWidth + binStart;
-            if(binNumber < bins.size() && bins.get(binNumber) != null)
-                binTree.add(bins.get(binNumber));
-        }
-
-        List<GATKChunk> chunkList = new ArrayList<GATKChunk>();
-        for(GATKBin coveringBin: binTree) {
-            for(GATKChunk chunk: coveringBin.getChunkList())
-                chunkList.add(chunk.clone());
-        }
-
-        final int start = index.getFirstLocusInBin(bin);
-        chunkList = optimizeChunkList(chunkList,linearIndex.getMinimumOffset(start));
-        return new GATKBAMFileSpan(chunkList.toArray(new GATKChunk[chunkList.size()]));
-    }
-
-    private List<GATKChunk> optimizeChunkList(final List<GATKChunk> chunks, final long minimumOffset) {
-        GATKChunk lastChunk = null;
-        Collections.sort(chunks);
-        final List<GATKChunk> result = new ArrayList<GATKChunk>();
-        for (final GATKChunk chunk : chunks) {
-            if (chunk.getChunkEnd() <= minimumOffset) {
-                continue;               // linear index optimization
-            }
-            if (result.isEmpty()) {
-                result.add(chunk);
-                lastChunk = chunk;
-                continue;
-            }
-            // Coalesce chunks that are in adjacent file blocks.
-            // This is a performance optimization.
-            if (!lastChunk.overlaps(chunk) && !lastChunk.isAdjacentTo(chunk)) {
-                result.add(chunk);
-                lastChunk = chunk;
-            } else {
-                if (chunk.getChunkEnd() > lastChunk.getChunkEnd()) {
-                    lastChunk.setChunkEnd(chunk.getChunkEnd());
-                }
-            }
-        }
-        return result;
-    }
-
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/IntervalOverlapFilteringIterator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/IntervalOverlapFilteringIterator.java
deleted file mode 100644
index c272e0a..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/IntervalOverlapFilteringIterator.java
+++ /dev/null
@@ -1,205 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.util.CloseableIterator;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.sam.AlignmentUtils;
-
-import java.util.List;
-import java.util.NoSuchElementException;
-
-/**
- * High efficiency filtering iterator designed to filter out reads only included
- * in the query results due to the granularity of the BAM index.
- *
- * Built into the BAM index is a notion of 16kbase granularity -- an index query for
- * two regions contained within a 16kbase chunk (say, chr1:5-10 and chr1:11-20) will
- * return exactly the same regions within the BAM file.  This iterator is optimized
- * to subtract out reads which do not at all overlap the interval list passed to the
- * constructor.
- *
- * Example:
- * interval list: chr20:6-10
- * Reads that would pass through the filter: chr20:6-10, chr20:1-15, chr20:1-7, chr20:8-15.
- * Reads that would be discarded by the filter: chr20:1-5, chr20:11-15.
- */
-class IntervalOverlapFilteringIterator implements CloseableIterator<SAMRecord> {
-    /**
-     * The wrapped iterator.
-     */
-    private CloseableIterator<SAMRecord> iterator;
-
-    /**
-     * The next read, queued up and ready to go.
-     */
-    private SAMRecord nextRead;
-
-    /**
-     * Rather than using the straight genomic bounds, use filter out only mapped reads.
-     */
-    private boolean keepOnlyUnmappedReads;
-
-    /**
-     * Custom representation of interval bounds.
-     * Makes it simpler to track current position.
-     */
-    private int[] intervalContigIndices;
-    private int[] intervalStarts;
-    private int[] intervalEnds;
-
-    /**
-     * Position within the interval list.
-     */
-    private int currentBound = 0;
-
-    public IntervalOverlapFilteringIterator(CloseableIterator<SAMRecord> iterator, List<GenomeLoc> intervals) {
-        this.iterator = iterator;
-
-        // Look at the interval list to detect whether we should worry about unmapped reads.
-        // If we find a mix of mapped/unmapped intervals, throw an exception.
-        boolean foundMappedIntervals = false;
-        for(GenomeLoc location: intervals) {
-            if(! GenomeLoc.isUnmapped(location))
-                foundMappedIntervals = true;
-            keepOnlyUnmappedReads |= GenomeLoc.isUnmapped(location);
-        }
-
-
-        if(foundMappedIntervals) {
-            if(keepOnlyUnmappedReads)
-                throw new ReviewedGATKException("Tried to apply IntervalOverlapFilteringIterator to a mixed of mapped and unmapped intervals.  Please apply this filter to only mapped or only unmapped reads");
-            this.intervalContigIndices = new int[intervals.size()];
-            this.intervalStarts = new int[intervals.size()];
-            this.intervalEnds = new int[intervals.size()];
-            int i = 0;
-            for(GenomeLoc interval: intervals) {
-                intervalContigIndices[i] = interval.getContigIndex();
-                intervalStarts[i] = interval.getStart();
-                intervalEnds[i] = interval.getStop();
-                i++;
-            }
-        }
-
-        advance();
-    }
-
-    public boolean hasNext() {
-        return nextRead != null;
-    }
-
-    public SAMRecord next() {
-        if(nextRead == null)
-            throw new NoSuchElementException("No more reads left in this iterator.");
-        SAMRecord currentRead = nextRead;
-        advance();
-        return currentRead;
-    }
-
-    public void remove() {
-        throw new UnsupportedOperationException("Cannot remove from an IntervalOverlapFilteringIterator");
-    }
-
-
-    public void close() {
-        iterator.close();
-    }
-
-    private void advance() {
-        nextRead = null;
-
-        if(!iterator.hasNext())
-            return;
-
-        SAMRecord candidateRead = iterator.next();
-        while(nextRead == null && (keepOnlyUnmappedReads || currentBound < intervalStarts.length)) {
-            if(!keepOnlyUnmappedReads) {
-                // Mapped read filter; check against GenomeLoc-derived bounds.
-                if(readEndsOnOrAfterStartingBound(candidateRead)) {
-                    // This read ends after the current interval begins.
-                    // Promising, but this read must be checked against the ending bound.
-                    if(readStartsOnOrBeforeEndingBound(candidateRead)) {
-                        // Yes, this read is within both bounds.  This must be our next read.
-                        nextRead = candidateRead;
-                        break;
-                    }
-                    else {
-                        // Oops, we're past the end bound.  Increment the current bound and try again.
-                        currentBound++;
-                        continue;
-                    }
-                }
-            }
-            else {
-                // Found a -L UNMAPPED read. NOTE: this is different than just being flagged as unmapped! We're done.
-                if(AlignmentUtils.isReadGenomeLocUnmapped(candidateRead)) {
-                    nextRead = candidateRead;
-                    break;
-                }
-            }
-
-            // No more reads available.  Stop the search.
-            if(!iterator.hasNext())
-                break;
-
-            // No reasonable read found; advance the iterator.
-            candidateRead = iterator.next();
-        }
-    }
-
-    /**
-     * Check whether the read lies after the start of the current bound.  If the read is unmapped but placed, its
-     * end will be distorted, so rely only on the alignment start.
-     * @param read The read to position-check.
-     * @return True if the read starts after the current bounds.  False otherwise.
-     */
-    private boolean readEndsOnOrAfterStartingBound(final SAMRecord read) {
-        return
-                // Read ends on a later contig, or...
-                read.getReferenceIndex() > intervalContigIndices[currentBound] ||
-                        // Read ends of this contig...
-                        (read.getReferenceIndex() == intervalContigIndices[currentBound] &&
-                                // either after this location, or...
-                                (read.getAlignmentEnd() >= intervalStarts[currentBound] ||
-                                        // read is unmapped but positioned and alignment start is on or after this start point.
-                                        (read.getReadUnmappedFlag() && read.getAlignmentStart() >= intervalStarts[currentBound])));
-    }
-
-    /**
-     * Check whether the read lies before the end of the current bound.
-     * @param read The read to position-check.
-     * @return True if the read starts after the current bounds.  False otherwise.
-     */
-    private boolean readStartsOnOrBeforeEndingBound(final SAMRecord read) {
-        return
-                // Read starts on a prior contig, or...
-                read.getReferenceIndex() < intervalContigIndices[currentBound] ||
-                        // Read starts on this contig and the alignment start is registered before this end point.
-                        (read.getReferenceIndex() == intervalContigIndices[currentBound] && read.getAlignmentStart() <= intervalEnds[currentBound]);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/IntervalSharder.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/IntervalSharder.java
deleted file mode 100644
index e355c7e..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/IntervalSharder.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import htsjdk.samtools.util.PeekableIterator;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
-import org.broadinstitute.gatk.utils.interval.IntervalMergingRule;
-
-import java.util.Iterator;
-
-/**
- * Handles the process of aggregating BAM intervals into individual shards.
- * TODO: The task performed by IntervalSharder is now better performed by LocusShardBalancer.  Merge BAMScheduler and IntervalSharder.
- */
-public class IntervalSharder implements Iterator<FilePointer> {
-    /**
-     * The iterator actually laying out the data for BAM scheduling.
-     */
-    private final PeekableIterator<FilePointer> wrappedIterator;
-
-    /**
-     * The parser, for interval manipulation.
-     */
-    private final GenomeLocParser parser;
-
-    public static IntervalSharder shardOverAllReads(final SAMDataSource dataSource, final GenomeLocParser parser) {
-        return new IntervalSharder(BAMScheduler.createOverAllReads(dataSource,parser),parser);
-    }
-
-    public static IntervalSharder shardOverMappedReads(final SAMDataSource dataSource, final GenomeLocParser parser) {
-        return new IntervalSharder(BAMScheduler.createOverMappedReads(dataSource),parser);
-    }
-
-    public static IntervalSharder shardOverIntervals(final SAMDataSource dataSource, final GenomeLocSortedSet loci, final IntervalMergingRule intervalMergeRule) {
-        return new IntervalSharder(BAMScheduler.createOverIntervals(dataSource,intervalMergeRule,loci),loci.getGenomeLocParser());
-    }
-
-    private IntervalSharder(final BAMScheduler scheduler, final GenomeLocParser parser) {
-        wrappedIterator = new PeekableIterator<FilePointer>(scheduler);
-        this.parser = parser;
-    }
-    public void close() {
-      wrappedIterator.close();
-    }
-
-    public boolean hasNext() {
-        return wrappedIterator.hasNext();
-    }
-
-    /**
-     * Accumulate shards where there's no additional cost to processing the next shard in the sequence.
-     * @return The next file pointer to process.
-     */
-    public FilePointer next() {
-        FilePointer current = wrappedIterator.next();
-
-        while ( wrappedIterator.hasNext() &&
-                current.isRegionUnmapped == wrappedIterator.peek().isRegionUnmapped &&
-                (current.getContigIndex() == wrappedIterator.peek().getContigIndex() || current.isRegionUnmapped) &&
-                current.minus(wrappedIterator.peek()) == 0 ) {
-
-            current = current.combine(parser,wrappedIterator.next());
-        }
-
-        return current;
-    }
-
-    public void remove() { throw new UnsupportedOperationException("Unable to remove from an interval sharder."); }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/LocusShard.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/LocusShard.java
deleted file mode 100644
index 28d4faf..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/LocusShard.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import htsjdk.samtools.SAMFileSpan;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.Utils;
-
-import java.util.List;
-import java.util.Map;
-
-/**
- * Handles locus shards of BAM information.
- * @author aaron
- * @version 1.0
- * @date Apr 7, 2009
- */
-public class LocusShard extends Shard {
-    /**
-     * Create a new locus shard, divided by index.
-     * @param intervals List of intervals to process.
-     * @param fileSpans File spans associated with that interval.
-     */
-    public LocusShard(GenomeLocParser parser, SAMDataSource dataSource, List<GenomeLoc> intervals, Map<SAMReaderID,SAMFileSpan> fileSpans) {
-        super(parser, ShardType.LOCUS, intervals, dataSource, fileSpans, false);
-    }
-
-    /**
-     * String representation of this shard.
-     * @return A string representation of the boundaries of this shard.
-     */
-    @Override
-    public String toString() {
-        return Utils.join(";",getGenomeLocs());
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/LocusShardBalancer.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/LocusShardBalancer.java
deleted file mode 100644
index 6fb4d48..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/LocusShardBalancer.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import java.util.Iterator;
-
-/**
- * Batch granular file pointers into potentially larger shards.
- */
-public class LocusShardBalancer extends ShardBalancer {
-    /**
-     * Convert iterators of file pointers into balanced iterators of shards.
-     * @return An iterator over balanced shards.
-     */
-    public Iterator<Shard> iterator() {
-        return new Iterator<Shard>() {
-            public boolean hasNext() {
-                return filePointers.hasNext();
-            }
-
-            public Shard next() {
-                FilePointer current = filePointers.next();
-
-                // FilePointers have already been combined as necessary at the IntervalSharder level. No
-                // need to do so again here.
-
-                return new LocusShard(parser,readsDataSource,current.getLocations(),current.fileSpans);
-            }
-
-            public void remove() {
-                throw new UnsupportedOperationException("Unable to remove from shard balancing iterator");
-            }
-        };
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/ReadShard.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/ReadShard.java
deleted file mode 100644
index d4321da..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/ReadShard.java
+++ /dev/null
@@ -1,271 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import htsjdk.samtools.util.PeekableIterator;
-import htsjdk.samtools.*;
-import htsjdk.samtools.util.CloseableIterator;
-import org.broadinstitute.gatk.engine.iterators.GATKSAMIterator;
-import org.broadinstitute.gatk.engine.iterators.GATKSAMIteratorAdapter;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.util.*;
-
-/**
- *
- * User: aaron
- * Date: Apr 10, 2009
- * Time: 5:03:13 PM
- *
- * The Broad Institute
- * SOFTWARE COPYRIGHT NOTICE AGREEMENT 
- * This software and its documentation are copyright 2009 by the
- * Broad Institute/Massachusetts Institute of Technology. All rights are reserved.
- *
- * This software is supplied without any warranty or guaranteed support whatsoever. Neither
- * the Broad Institute nor MIT can be responsible for its use, misuse, or functionality.
- *
- */
-
-/**
- * Expresses a shard of read data in block format.
- *
- * @author mhanna
- * @version 0.1
- */
-public class ReadShard extends Shard {
-
-    /**
-     * Default read shard buffer size
-     */
-    public static final int DEFAULT_MAX_READS = 10000;
-
-    /**
-     * What is the maximum number of reads per BAM file which should go into a read shard.
-     *
-     * TODO: this non-final static variable should either be made final or turned into an
-     * TODO: instance variable somewhere -- as both static and mutable it wreaks havoc
-     * TODO: with tests that use multiple instances of SAMDataSource (since SAMDataSource
-     * TODO: changes this value)
-     */
-    public static int MAX_READS = DEFAULT_MAX_READS;
-
-    /**
-     * The reads making up this shard.
-     */
-    private final Collection<SAMRecord> reads = new ArrayList<SAMRecord>(MAX_READS);
-
-    public ReadShard(GenomeLocParser parser, SAMDataSource readsDataSource, Map<SAMReaderID,SAMFileSpan> fileSpans, List<GenomeLoc> loci, boolean isUnmapped) {
-        super(parser, ShardType.READ, loci, readsDataSource, fileSpans, isUnmapped);
-    }
-
-    /**
-     * Sets the maximum number of reads buffered in a read shard.  Implemented as a weirdly static interface
-     * until we know what effect tuning this parameter has.
-     *
-     * TODO: this mutable static interface is awful and breaks tests -- need to refactor
-     *
-     * @param bufferSize New maximum number
-     */
-    static void setReadBufferSize(final int bufferSize) {
-        MAX_READS = bufferSize;
-    }
-
-    /**
-     * What read buffer size are we using?
-     *
-     * @return
-     */
-    public static int getReadBufferSize() {
-        return MAX_READS;
-    }
-
-    /**
-     * Returns true if this shard is meant to buffer reads, rather
-     * than just holding pointers to their locations.
-     * @return True if this shard can buffer reads.  False otherwise.
-     */
-    public boolean buffersReads() {
-        return true;
-    }
-
-    /**
-     * Returns true if the read buffer is currently full.
-     * @return True if this shard's buffer is full (and the shard can buffer reads).
-     */
-    public boolean isBufferEmpty() {
-        return reads.size() == 0;
-    }
-
-    /**
-     * Returns true if the read buffer is currently full.
-     * @return True if this shard's buffer is full (and the shard can buffer reads).
-     */
-    public boolean isBufferFull() {
-        return reads.size() > ReadShard.MAX_READS;
-    }
-
-    /**
-     * Adds a read to the read buffer.
-     * @param read Add a read to the internal shard buffer.
-     */
-    public void addRead(SAMRecord read) {
-        // DO NOT validate that the buffer is full.  Paired read sharding will occasionally have to stuff another
-        // read or two into the buffer.
-        reads.add(read);
-    }
-
-    /**
-     * Fills this shard's buffer with reads from the iterator passed in
-     *
-     * @param readIter Iterator from which to draw the reads to fill the shard
-     */
-    @Override
-    public void fill( PeekableIterator<SAMRecord> readIter ) {
-        if( ! buffersReads() )
-            throw new ReviewedGATKException("Attempting to fill a non-buffering shard.");
-
-        SAMFileHeader.SortOrder sortOrder = getReadProperties().getSortOrder();
-        SAMRecord read = null;
-
-        while( ! isBufferFull() && readIter.hasNext() ) {
-            final SAMRecord nextRead = readIter.peek();
-            if ( read == null || (nextRead.getReferenceIndex().equals(read.getReferenceIndex())) ) {
-                // only add reads to the shard if they are on the same contig
-                read = readIter.next();
-                addRead(read);
-            } else {
-                break;
-            }
-        }
-
-        // If the reads are sorted in coordinate order, ensure that all reads
-        // having the same alignment start become part of the same shard, to allow
-        // downsampling to work better across shard boundaries. Note that because our
-        // read stream has already been fed through the positional downsampler, which
-        // ensures that at each alignment start position there are no more than dcov
-        // reads, we're in no danger of accidentally creating a disproportionately huge
-        // shard
-        if ( sortOrder == SAMFileHeader.SortOrder.coordinate ) {
-            while ( readIter.hasNext() ) {
-                SAMRecord additionalRead = readIter.peek();
-
-                // Stop filling the shard as soon as we encounter a read having a different
-                // alignment start or contig from the last read added in the earlier loop
-                // above, or an unmapped read
-                if ( read == null ||
-                     additionalRead.getReadUnmappedFlag() ||
-                     ! additionalRead.getReferenceIndex().equals(read.getReferenceIndex()) ||
-                     additionalRead.getAlignmentStart() != read.getAlignmentStart() ) {
-                    break;
-                }
-
-                addRead(readIter.next());
-            }
-        }
-
-        // If the reads are sorted in queryname order, ensure that all reads
-        // having the same queryname become part of the same shard.
-        if( sortOrder == SAMFileHeader.SortOrder.queryname ) {
-            while( readIter.hasNext() ) {
-                SAMRecord nextRead = readIter.peek();
-                if( read == null || ! read.getReadName().equals(nextRead.getReadName()) )
-                    break;
-                addRead(readIter.next());
-            }
-        }
-    }
-
-    /**
-     * Creates an iterator over reads stored in this shard's read cache.
-     * @return
-     */
-    public GATKSAMIterator iterator() {
-        return GATKSAMIteratorAdapter.adapt(reads.iterator());
-    }
-
-    /**
-     * String representation of this shard.
-     * @return A string representation of the boundaries of this shard.
-     */
-    @Override
-    public String toString() {
-        StringBuilder sb = new StringBuilder();
-        for(Map.Entry<SAMReaderID,SAMFileSpan> entry: getFileSpans().entrySet()) {
-            sb.append(entry.getKey());
-            sb.append(": ");
-            sb.append(entry.getValue());
-            sb.append(' ');
-        }
-        return sb.toString();
-    }
-
-    /**
-     * Get the full span from the start of the left most read to the end of the right most one
-     *
-     * Note this may be different than the getLocation() of the shard, as this reflects the
-     * targeted span, not the actual span of reads
-     *
-     * @return the genome loc representing the span of these reads on the genome
-     */
-    public GenomeLoc getReadsSpan() {
-        if ( isUnmapped() || super.getGenomeLocs() == null || reads.isEmpty() )
-            return super.getLocation();
-        else {
-            int start = Integer.MAX_VALUE;
-            int stop = Integer.MIN_VALUE;
-            String contig = null;
-            boolean foundMapped = false;
-
-            for ( final SAMRecord read : reads ) {
-                if ( contig != null && ! read.getReferenceName().equals(contig) )
-                    throw new ReviewedGATKException("ReadShard contains reads spanning contig boundaries, which is no longer allowed. "
-                            + "First contig is " + contig + " next read was " + read.getReferenceName() );
-                contig = read.getReferenceName();
-
-                // Even if this shard as a *whole* is not "unmapped", we can still encounter *individual* unmapped mates
-                // of mapped reads within this shard's buffer. In fact, if we're very unlucky with shard boundaries,
-                // this shard might consist *only* of unmapped mates! We need to refrain from using the alignment
-                // starts/stops of these unmapped mates, and detect the case where the shard has been filled *only*
-                // with unmapped mates.
-                if ( ! read.getReadUnmappedFlag() ) {
-                    foundMapped = true;
-                    if ( read.getAlignmentStart() < start ) start = read.getAlignmentStart();
-                    if ( read.getAlignmentEnd() > stop ) stop = read.getAlignmentEnd();
-                }
-            }
-
-            assert contig != null;
-
-            if ( ! foundMapped || contig.equals("*") ) // all reads are unmapped
-                return GenomeLoc.UNMAPPED;
-            else
-                return parser.createGenomeLoc(contig, start, stop);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/ReadShardBalancer.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/ReadShardBalancer.java
deleted file mode 100644
index 4a27219..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/ReadShardBalancer.java
+++ /dev/null
@@ -1,231 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import htsjdk.samtools.util.PeekableIterator;
-import htsjdk.samtools.SAMRecord;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.util.*;
-
-/**
- * Convert from an unbalanced iterator over FilePointers to a balanced iterator over Shards.
- *
- * When processing FilePointers, our strategy is to aggregate all FilePointers for each contig
- * together into one monolithic FilePointer, create one persistent set of read iterators over
- * that monolithic FilePointer, and repeatedly use that persistent set of read iterators to
- * fill read shards with reads.
- *
- * This strategy has several important advantages:
- *
- * 1. We avoid issues with file span overlap. FilePointers that are more granular than a whole
- *    contig will have regions that overlap with other FilePointers on the same contig, due
- *    to the limited granularity of BAM index data. By creating only one FilePointer per contig,
- *    we avoid having to track how much of each file region we've visited (as we did in the
- *    former implementation), we avoid expensive non-sequential access patterns in the files,
- *    and we avoid having to repeatedly re-create our iterator chain for every small region
- *    of interest.
- *
- * 2. We avoid boundary issues with the engine-level downsampling. Since we create a single
- *    persistent set of read iterators (which include the downsampling iterator(s)) per contig,
- *    the downsampling process is never interrupted by FilePointer or Shard boundaries, and never
- *    loses crucial state information while downsampling within a contig.
- *
- * TODO: There is also at least one important disadvantage:
- *
- * 1. We load more BAM index data into memory at once, and this work is done upfront before processing
- *    the next contig, creating a delay before traversal of each contig. This delay may be
- *    compensated for by the gains listed in #1 above, and we may be no worse off overall in
- *    terms of total runtime, but we need to verify this empirically.
- *
- * @author David Roazen
- */
-public class ReadShardBalancer extends ShardBalancer {
-
-    private static Logger logger = Logger.getLogger(ReadShardBalancer.class);
-
-    /**
-     * Convert iterators of file pointers into balanced iterators of shards.
-     * @return An iterator over balanced shards.
-     */
-    public Iterator<Shard> iterator() {
-        return new Iterator<Shard>() {
-            /**
-             * The cached shard to be returned next.  Prefetched in the peekable iterator style.
-             */
-            private Shard nextShard = null;
-
-            /**
-             * The file pointer currently being processed.
-             */
-            private FilePointer currentContigFilePointer = null;
-
-            /**
-             * Iterator over the reads from the current contig's file pointer. The same iterator will be
-             * used to fill all shards associated with a given file pointer
-             */
-            private PeekableIterator<SAMRecord> currentContigReadsIterator = null;
-
-            /**
-             * How many FilePointers have we pulled from the filePointers iterator?
-             */
-            private int totalFilePointersConsumed = 0;
-
-            /**
-             * Have we encountered a monolithic FilePointer?
-             */
-            private boolean encounteredMonolithicFilePointer = false;
-
-
-            {
-                createNextContigFilePointer();
-                advance();
-            }
-
-            public boolean hasNext() {
-                return nextShard != null;
-            }
-
-            public Shard next() {
-                if ( ! hasNext() )
-                    throw new NoSuchElementException("No next read shard available");
-                Shard currentShard = nextShard;
-                advance();
-                return currentShard;
-            }
-
-            private void advance() {
-                nextShard = null;
-
-                // May need multiple iterations to fill the next shard if all reads in current file spans get filtered/downsampled away
-                while ( nextShard == null && currentContigFilePointer != null ) {
-
-                    // If we've exhausted the current file pointer of reads, move to the next file pointer (if there is one):
-                    if ( currentContigReadsIterator != null && ! currentContigReadsIterator.hasNext() ) {
-
-                        // Close the old, exhausted chain of iterators to release resources
-                        currentContigReadsIterator.close();
-
-                        // Advance to the FilePointer for the next contig
-                        createNextContigFilePointer();
-
-                        // We'll need to create a fresh iterator for this file pointer when we create the first
-                        // shard for it below.
-                        currentContigReadsIterator = null;
-                    }
-
-                    // At this point our currentContigReadsIterator may be null or non-null depending on whether or not
-                    // this is our first shard for this file pointer.
-                    if ( currentContigFilePointer != null ) {
-                        Shard shard = new ReadShard(parser,readsDataSource, currentContigFilePointer.fileSpans, currentContigFilePointer.locations, currentContigFilePointer.isRegionUnmapped);
-
-                        // Create a new reads iterator only when we've just advanced to the file pointer for the next
-                        // contig. It's essential that the iterators persist across all shards that share the same contig
-                        // to allow the downsampling to work properly.
-                        if ( currentContigReadsIterator == null ) {
-                            currentContigReadsIterator = new PeekableIterator<SAMRecord>(readsDataSource.getIterator(shard));
-                        }
-
-                        if ( currentContigReadsIterator.hasNext() ) {
-                            shard.fill(currentContigReadsIterator);
-                            nextShard = shard;
-                        }
-                    }
-                }
-            }
-
-            /**
-             * Aggregate all FilePointers for the next contig together into one monolithic FilePointer
-             * to avoid boundary issues with visiting the same file regions more than once (since more
-             * granular FilePointers will have regions that overlap with other nearby FilePointers due
-             * to the nature of BAM indices).
-             *
-             * By creating one persistent set of iterators per contig we also avoid boundary artifacts
-             * in the engine-level downsampling.
-             *
-             * TODO: This FilePointer aggregation should ideally be done at the BAMSchedule level for
-             * TODO: read traversals, as there's little point in the BAMSchedule emitting extremely
-             * TODO: granular FilePointers if we're just going to union them. The BAMSchedule should
-             * TODO: emit one FilePointer per contig for read traversals (but, crucially, NOT for
-             * TODO: locus traversals).
-             */
-            private void createNextContigFilePointer() {
-                currentContigFilePointer = null;
-                List<FilePointer> nextContigFilePointers = new ArrayList<FilePointer>();
-
-                if ( filePointers.hasNext() ) {
-                    logger.info("Loading BAM index data");
-                }
-
-                while ( filePointers.hasNext() ) {
-
-                    // Make sure that if we see a monolithic FilePointer (representing all regions in all files) that
-                    // it is the ONLY FilePointer we ever encounter
-                    if ( encounteredMonolithicFilePointer ) {
-                        throw new ReviewedGATKException("Bug: encountered additional FilePointers after encountering a monolithic FilePointer");
-                    }
-                    if ( filePointers.peek().isMonolithic() ) {
-                        if ( totalFilePointersConsumed > 0 ) {
-                            throw new ReviewedGATKException("Bug: encountered additional FilePointers before encountering a monolithic FilePointer");
-                        }
-                        encounteredMonolithicFilePointer = true;
-                        logger.debug(String.format("Encountered monolithic FilePointer: %s", filePointers.peek()));
-                    }
-
-                    // If this is the first FP we've seen, or we're dealing with mapped regions and the next FP is on the
-                    // same contig as previous FPs, or all our FPs are unmapped, add the next FP to the list of FPs to merge
-                    if ( nextContigFilePointers.isEmpty() ||
-                             (! nextContigFilePointers.get(0).isRegionUnmapped && ! filePointers.peek().isRegionUnmapped &&
-                             nextContigFilePointers.get(0).getContigIndex() == filePointers.peek().getContigIndex()) ||
-                                 (nextContigFilePointers.get(0).isRegionUnmapped && filePointers.peek().isRegionUnmapped) ) {
-
-                        nextContigFilePointers.add(filePointers.next());
-                        totalFilePointersConsumed++;
-                    }
-                    else {
-                        break; // next FilePointer is on a different contig or has different mapped/unmapped status,
-                               // save it for next time
-                    }
-                }
-
-                if ( ! nextContigFilePointers.isEmpty() ) {
-                    currentContigFilePointer = FilePointer.union(nextContigFilePointers, parser);
-                }
-
-                if ( currentContigFilePointer != null ) {
-                    logger.info("Done loading BAM index data");
-                    logger.debug(String.format("Next FilePointer: %s", currentContigFilePointer));
-                }
-            }
-
-            public void remove() {
-                throw new UnsupportedOperationException("Unable to remove from shard balancing iterator");
-            }
-        };
-    }
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/SAMDataSource.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/SAMDataSource.java
deleted file mode 100644
index 0fc06fc..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/SAMDataSource.java
+++ /dev/null
@@ -1,1179 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import htsjdk.samtools.MergingSamRecordIterator;
-import htsjdk.samtools.SamFileHeaderMerger;
-import htsjdk.samtools.*;
-import htsjdk.samtools.util.CloseableIterator;
-import htsjdk.samtools.util.RuntimeIOException;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.engine.ReadMetrics;
-import org.broadinstitute.gatk.engine.ReadProperties;
-import org.broadinstitute.gatk.engine.arguments.ValidationExclusion;
-import org.broadinstitute.gatk.engine.downsampling.*;
-import org.broadinstitute.gatk.engine.filters.CountingFilteringIterator;
-import org.broadinstitute.gatk.engine.filters.ReadFilter;
-import org.broadinstitute.gatk.engine.iterators.*;
-import org.broadinstitute.gatk.engine.resourcemanagement.ThreadAllocation;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
-import org.broadinstitute.gatk.utils.SimpleTimer;
-import org.broadinstitute.gatk.utils.baq.ReadTransformingIterator;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.interval.IntervalMergingRule;
-import org.broadinstitute.gatk.utils.sam.GATKSAMReadGroupRecord;
-import org.broadinstitute.gatk.utils.sam.GATKSamRecordFactory;
-
-import java.io.File;
-import java.lang.reflect.InvocationTargetException;
-import java.lang.reflect.Method;
-import java.util.*;
-import java.util.concurrent.Callable;
-
-/**
- * User: aaron
- * Date: Mar 26, 2009
- * Time: 2:36:16 PM
- * <p/>
- * Converts shards to SAM iterators over the specified region
- */
-public class SAMDataSource {
-    final private static GATKSamRecordFactory factory = new GATKSamRecordFactory();
-
-    /** Backing support for reads. */
-    protected final ReadProperties readProperties;
-
-    /**
-     * Runtime metrics of reads filtered, etc.
-     */
-    private final ReadMetrics readMetrics;
-
-    /**
-     * Tools for parsing GenomeLocs, for verifying BAM ordering against general ordering.
-     */
-    protected final GenomeLocParser genomeLocParser;
-
-    /**
-     * Identifiers for the readers driving this data source.
-     */
-    private final Collection<SAMReaderID> readerIDs;
-
-    /**
-     * How strict are the readers driving this data source.
-     */
-    private final ValidationStringency validationStringency;
-
-    /**
-     * Do we want to remove the program records from this data source?
-     */
-    private final boolean removeProgramRecords;
-
-    /**
-     * Store BAM indices for each reader present.
-     */
-    private final Map<SAMReaderID,GATKBAMIndex> bamIndices = new HashMap<SAMReaderID,GATKBAMIndex>();
-
-    /**
-     * The merged header.
-     */
-    private final SAMFileHeader mergedHeader;
-
-    /**
-     * The constituent headers of the unmerged files.
-     */
-    private final Map<SAMReaderID,SAMFileHeader> headers = new HashMap<SAMReaderID,SAMFileHeader>();
-
-    /**
-     * The sort order of the BAM files.  Files without a sort order tag are assumed to be
-     * in coordinate order.
-     */
-    private SAMFileHeader.SortOrder sortOrder = null;
-
-    /**
-     * Whether the read groups in overlapping files collide.
-     */
-    private final boolean hasReadGroupCollisions;
-
-    /**
-     * Maps the SAM readers' merged read group ids to their original ids. Since merged read group ids
-     * are always unique, we can simply use a map here, no need to stratify by reader.
-     */
-    private final ReadGroupMapping mergedToOriginalReadGroupMappings = new ReadGroupMapping();
-
-    /**
-     * Maps the SAM readers' original read group ids to their revised ids. This mapping must be stratified
-     * by readers, since there can be readgroup id collision: different bam files (readers) can list the
-     * same read group id, which will be disambiguated when these input streams are merged.
-     */
-    private final Map<SAMReaderID,ReadGroupMapping> originalToMergedReadGroupMappings = new HashMap<SAMReaderID,ReadGroupMapping>();
-
-    /**
-     * Mapping from input file path to new sample name. Used only when doing on-the-fly sample renaming.
-     */
-    private Map<String, String> sampleRenameMap = null;
-
-    /** our log, which we want to capture anything from this class */
-    private static Logger logger = Logger.getLogger(SAMDataSource.class);
-
-    /**
-     * A collection of readers driving the merging process.
-     */
-    private final SAMResourcePool resourcePool;
-
-    /**
-     * Asynchronously loads BGZF blocks.
-     */
-    private final BGZFBlockLoadingDispatcher dispatcher;
-
-    /**
-     * How are threads allocated.
-     */
-    private final ThreadAllocation threadAllocation;
-
-    /**
-     * How are adjacent intervals merged by the sharder?
-     */
-    private final IntervalMergingRule intervalMergingRule;
-
-    /**
-     * Static set of unsupported programs that create bam files.
-     * The key is the PG record ID and the value is the name of the tool that created it
-     */
-    private static Map<String, String> unsupportedPGs = new HashMap<>();
-    static {
-        unsupportedPGs.put("GATK ReduceReads", "ReduceReads");
-    }
-
-    /**
-     * Create a new SAM data source given the supplied read metadata.
-     *
-     * For testing purposes
-     *
-     * @param samFiles list of reads files.
-     */
-    public SAMDataSource(Collection<SAMReaderID> samFiles, ThreadAllocation threadAllocation, Integer numFileHandles, GenomeLocParser genomeLocParser) {
-        this(
-                samFiles,
-                threadAllocation,
-                numFileHandles,
-                genomeLocParser,
-                false,
-                ValidationStringency.STRICT,
-                null,
-                null,
-                new ValidationExclusion(),
-                new ArrayList<ReadFilter>(),
-                false);
-    }
-
-    /**
-     * See complete constructor.  Does not enable BAQ by default.
-     *
-     * For testing purposes
-     */
-    public SAMDataSource(
-            Collection<SAMReaderID> samFiles,
-            ThreadAllocation threadAllocation,
-            Integer numFileHandles,
-            GenomeLocParser genomeLocParser,
-            boolean useOriginalBaseQualities,
-            ValidationStringency strictness,
-            Integer readBufferSize,
-            DownsamplingMethod downsamplingMethod,
-            ValidationExclusion exclusionList,
-            Collection<ReadFilter> supplementalFilters,
-            boolean includeReadsWithDeletionAtLoci) {
-        this(   samFiles,
-                threadAllocation,
-                numFileHandles,
-                genomeLocParser,
-                useOriginalBaseQualities,
-                strictness,
-                readBufferSize,
-                downsamplingMethod,
-                exclusionList,
-                supplementalFilters,
-                Collections.<ReadTransformer>emptyList(),
-                includeReadsWithDeletionAtLoci,
-                (byte) -1,
-                false,
-                false,
-                null,
-                IntervalMergingRule.ALL);
-    }
-
-    /**
-     * Create a new SAM data source given the supplied read metadata.
-     * @param samFiles list of reads files.
-     * @param useOriginalBaseQualities True if original base qualities should be used.
-     * @param strictness Stringency of reads file parsing.
-     * @param readBufferSize Number of reads to hold in memory per BAM.
-     * @param downsamplingMethod Method for downsampling reads at a given locus.
-     * @param exclusionList what safety checks we're willing to let slide
-     * @param supplementalFilters additional filters to dynamically apply.
-     * @param includeReadsWithDeletionAtLoci if 'true', the base pileups sent to the walker's map() method
-     *         will explicitly list reads with deletion over the current reference base; otherwise, only observed
-     *        bases will be seen in the pileups, and the deletions will be skipped silently.
-     * @param defaultBaseQualities if the reads have incomplete quality scores, set them all to defaultBaseQuality.
-     * @param keepReadsInLIBS should we keep a unique list of reads in LIBS?
-     * @param sampleRenameMap Map of BAM file to new sample ID used during on-the-fly runtime sample renaming.
-     *                        Will be null if we're not doing sample renaming.
-     * @param intervalMergingRule how are adjacent intervals merged by the sharder
-     */
-    public SAMDataSource(
-            Collection<SAMReaderID> samFiles,
-            ThreadAllocation threadAllocation,
-            Integer numFileHandles,
-            GenomeLocParser genomeLocParser,
-            boolean useOriginalBaseQualities,
-            ValidationStringency strictness,
-            Integer readBufferSize,
-            DownsamplingMethod downsamplingMethod,
-            ValidationExclusion exclusionList,
-            Collection<ReadFilter> supplementalFilters,
-            List<ReadTransformer> readTransformers,
-            boolean includeReadsWithDeletionAtLoci,
-            byte defaultBaseQualities,
-            boolean removeProgramRecords,
-            final boolean keepReadsInLIBS,
-            final Map<String, String> sampleRenameMap,
-            final IntervalMergingRule intervalMergingRule) {
-
-        this.readMetrics = new ReadMetrics();
-        this.genomeLocParser = genomeLocParser;
-        this.intervalMergingRule = intervalMergingRule;
-
-        readerIDs = samFiles;
-
-        this.threadAllocation = threadAllocation;
-        // TODO: Consider a borrowed-thread dispatcher implementation.
-        if(this.threadAllocation.getNumIOThreads() > 0) {
-            logger.info("Running in asynchronous I/O mode; number of threads = " + this.threadAllocation.getNumIOThreads());
-            dispatcher = new BGZFBlockLoadingDispatcher(this.threadAllocation.getNumIOThreads(), numFileHandles != null ? numFileHandles : 1);
-        }
-        else
-            dispatcher = null;
-
-        validationStringency = strictness;
-        this.removeProgramRecords = removeProgramRecords;
-        if(readBufferSize != null)
-            ReadShard.setReadBufferSize(readBufferSize);   // TODO: use of non-final static variable here is just awful, especially for parallel tests
-        else {
-            // Choose a sensible default for the read buffer size.
-            // Previously we we're picked 100000 reads per BAM per shard with a max cap of 250K reads in memory at once.
-            // Now we are simply setting it to 100K reads
-            ReadShard.setReadBufferSize(100000);
-        }
-
-        this.sampleRenameMap = sampleRenameMap;
-
-        resourcePool = new SAMResourcePool(Integer.MAX_VALUE);
-        SAMReaders readers = resourcePool.getAvailableReaders();
-
-        // Determine the sort order.
-        for(SAMReaderID readerID: readerIDs) {
-            if (! readerID.samFile.canRead() )
-                throw new UserException.CouldNotReadInputFile(readerID.samFile,"file is not present or user does not have appropriate permissions.  " +
-                        "Please check that the file is present and readable and try again.");
-
-            // Get the sort order, forcing it to coordinate if unsorted.
-            SAMFileReader reader = readers.getReader(readerID);
-            SAMFileHeader header = reader.getFileHeader();
-
-            headers.put(readerID,header);
-
-            if ( header.getReadGroups().isEmpty() ) {
-                throw new UserException.MalformedBAM(readers.getReaderID(reader).samFile,
-                        "SAM file doesn't have any read groups defined in the header.  The GATK no longer supports SAM files without read groups");
-            }
-
-            SAMFileHeader.SortOrder sortOrder = header.getSortOrder() != SAMFileHeader.SortOrder.unsorted ? header.getSortOrder() : SAMFileHeader.SortOrder.coordinate;
-
-            // Validate that all input files are sorted in the same order.
-            if(this.sortOrder != null && this.sortOrder != sortOrder)
-                throw new UserException.MissortedBAM(String.format("Attempted to process mixed of files sorted as %s and %s.",this.sortOrder,sortOrder));
-
-            // Update the sort order.
-            this.sortOrder = sortOrder;
-        }
-
-        mergedHeader = readers.getMergedHeader();
-        hasReadGroupCollisions = readers.hasReadGroupCollisions();
-
-        readProperties = new ReadProperties(
-                samFiles,
-                mergedHeader,
-                sortOrder,
-                useOriginalBaseQualities,
-                strictness,
-                downsamplingMethod,
-                exclusionList,
-                supplementalFilters,
-                readTransformers,
-                includeReadsWithDeletionAtLoci,
-                defaultBaseQualities,
-                keepReadsInLIBS);
-
-        // cache the read group id (original) -> read group id (merged)
-        // and read group id (merged) -> read group id (original) mappings.
-        for(SAMReaderID id: readerIDs) {
-            SAMFileReader reader = readers.getReader(id);
-
-            ReadGroupMapping mappingToMerged = new ReadGroupMapping();
-
-            List<SAMReadGroupRecord> readGroups = reader.getFileHeader().getReadGroups();
-            for(SAMReadGroupRecord readGroup: readGroups) {
-                if(hasReadGroupCollisions) {
-                    mappingToMerged.put(readGroup.getReadGroupId(),readers.getReadGroupId(id,readGroup.getReadGroupId()));
-                    mergedToOriginalReadGroupMappings.put(readers.getReadGroupId(id,readGroup.getReadGroupId()),readGroup.getReadGroupId());
-                } else {
-                    mappingToMerged.put(readGroup.getReadGroupId(),readGroup.getReadGroupId());
-                    mergedToOriginalReadGroupMappings.put(readGroup.getReadGroupId(),readGroup.getReadGroupId());
-                }
-            }
-
-            originalToMergedReadGroupMappings.put(id,mappingToMerged);
-        }
-
-        for(SAMReaderID id: readerIDs) {
-            File indexFile = findIndexFile(id.samFile);
-            if(indexFile != null)
-                bamIndices.put(id,new GATKBAMIndex(indexFile));
-        }
-
-        resourcePool.releaseReaders(readers);
-    }
-
-    /**
-     * Checks whether the provided SAM header if from a reduced bam file.
-     * @param header the SAM header for a given file
-     * @throws UserException if the header is from a reduced bam
-     */
-    private void checkForUnsupportedBamFile(final SAMFileHeader header) {
-        for ( final SAMProgramRecord PGrecord : header.getProgramRecords() ) {
-            if ( unsupportedPGs.containsKey(PGrecord.getId()) )
-                throw new UserException("The GATK no longer supports running off of BAMs produced by " + unsupportedPGs.get(PGrecord.getId()));
-        }
-    }
-
-    public void close() {
-        SAMReaders readers = resourcePool.getAvailableReaders();
-        for(SAMReaderID readerID: readerIDs) {
-            SAMFileReader reader = readers.getReader(readerID);
-            reader.close();
-        }
-    }
-
-    /**
-     * Returns Reads data structure containing information about the reads data sources placed in this pool as well as
-     * information about how they are downsampled, sorted, and filtered
-     * @return
-     */
-    public ReadProperties getReadsInfo() { return readProperties; }
-
-    /**
-     * Checks to see whether any reads files are supplying data.
-     * @return True if no reads files are supplying data to the traversal; false otherwise.
-     */
-    public boolean isEmpty() {
-        return readProperties.getSAMReaderIDs().size() == 0;
-    }
-
-    /**
-     * Gets the SAM file associated with a given reader ID.
-     * @param id The reader for which to retrieve the source file.
-     * @return the file actually associated with the id.
-     */
-    public File getSAMFile(SAMReaderID id) {
-        return id.samFile;
-    }
-
-    /**
-     * Returns readers used by this data source.
-     * @return A list of SAM reader IDs.
-     */
-    public Collection<SAMReaderID> getReaderIDs() {
-        return readerIDs;
-    }
-
-    /**
-     * Retrieves the id of the reader which built the given read.
-     * @param read The read to test.
-     * @return ID of the reader.
-     */
-    public SAMReaderID getReaderID(SAMRecord read) {
-        return resourcePool.getReaderID(read.getFileSource().getReader());
-    }
-
-    /**
-     * Gets the merged header from the SAM file.
-     * @return The merged header.
-     */
-    public SAMFileHeader getHeader() {
-        return mergedHeader;
-    }
-
-    public SAMFileHeader getHeader(SAMReaderID id) {
-        return headers.get(id);
-    }
-
-    /**
-     * Gets the revised read group id mapped to this 'original' read group id.
-     * @param reader for which to grab a read group.
-     * @param originalReadGroupId ID of the original read group.
-     * @return Merged read group ID.
-     */
-    public String getReadGroupId(final SAMReaderID reader, final String originalReadGroupId) {
-        return originalToMergedReadGroupMappings.get(reader).get(originalReadGroupId);
-    }
-
-    /**
-     * Gets the original read group id (as it was specified in the original input bam file) that maps onto
-     * this 'merged' read group id.
-     * @param mergedReadGroupId 'merged' ID of the read group (as it is presented by the read received from merged input stream).
-     * @return Merged read group ID.
-     */
-    public String getOriginalReadGroupId(final String mergedReadGroupId) {
-        return mergedToOriginalReadGroupMappings.get(mergedReadGroupId);
-    }
-
-    /**
-     * True if all readers have an index.
-     * @return True if all readers have an index.
-     */
-    public boolean hasIndex() {
-        return readerIDs.size() == bamIndices.size();
-    }
-
-    /**
-     * Gets the index for a particular reader.  Always preloaded.
-     * @param id Id of the reader.
-     * @return The index.  Will preload the index if necessary.
-     */
-    public GATKBAMIndex getIndex(final SAMReaderID id) {
-        return bamIndices.get(id);
-    }
-
-    /**
-     * Retrieves the sort order of the readers.
-     * @return Sort order.  Can be unsorted, coordinate order, or query name order.
-     */
-    public SAMFileHeader.SortOrder getSortOrder() {
-        return sortOrder;
-    }
-
-    /**
-     * Gets the cumulative read metrics for shards already processed.
-     * @return Cumulative read metrics.
-     */
-    public ReadMetrics getCumulativeReadMetrics() {
-        // don't return a clone here because the engine uses a pointer to this object
-        return readMetrics;
-    }
-
-    /**
-     * Incorporate the given read metrics into the cumulative read metrics.
-     * @param readMetrics The 'incremental' read metrics, to be incorporated into the cumulative metrics.
-     */
-    public void incorporateReadMetrics(final ReadMetrics readMetrics) {
-        this.readMetrics.incrementMetrics(readMetrics);
-    }
-
-    public GATKSAMIterator seek(Shard shard) {
-        if(shard.buffersReads()) {
-            return shard.iterator();
-        }
-        else {
-            return getIterator(shard);
-        }
-    }
-
-    /**
-     * Gets the reader associated with the given read.
-     * @param readers Available readers.
-     * @param read
-     * @return
-     */
-    private SAMReaderID getReaderID(SAMReaders readers, SAMRecord read) {
-        for(SAMReaderID id: getReaderIDs()) {
-            if(readers.getReader(id) == read.getFileSource().getReader())
-                return id;
-        }
-        throw new ReviewedGATKException("Unable to find id for reader associated with read " + read.getReadName());
-    }
-
-    /**
-     * Get the initial reader positions across all BAM files
-     *
-     * @return the start positions of the first chunk of reads for all BAM files
-     */
-    protected Map<SAMReaderID, GATKBAMFileSpan> getInitialReaderPositions() {
-        Map<SAMReaderID, GATKBAMFileSpan> initialPositions = new HashMap<SAMReaderID, GATKBAMFileSpan>();
-        SAMReaders readers = resourcePool.getAvailableReaders();
-
-        for ( SAMReaderID id: getReaderIDs() ) {
-            initialPositions.put(id, new GATKBAMFileSpan(readers.getReader(id).getFilePointerSpanningReads()));
-        }
-
-        resourcePool.releaseReaders(readers);
-        return initialPositions;
-    }
-
-    /**
-     * Get an iterator over the data types specified in the shard.
-     *
-     * @param shard The shard specifying the data limits.
-     * @return An iterator over the selected data.
-     */
-    protected GATKSAMIterator getIterator( Shard shard ) {
-        return getIterator(resourcePool.getAvailableReaders(), shard, shard instanceof ReadShard);
-    }
-
-    /**
-     * Get an iterator over the data types specified in the shard.
-     * @param readers Readers from which to load data.
-     * @param shard The shard specifying the data limits.
-     * @param enableVerification True to verify.  For compatibility with old sharding strategy.
-     * @return An iterator over the selected data.
-     */
-    private GATKSAMIterator getIterator(SAMReaders readers, Shard shard, boolean enableVerification) {
-        // Set up merging to dynamically merge together multiple BAMs.
-        Map<SAMFileReader,CloseableIterator<SAMRecord>> iteratorMap = new HashMap<SAMFileReader,CloseableIterator<SAMRecord>>();
-
-        for(SAMReaderID id: getReaderIDs()) {
-            CloseableIterator<SAMRecord> iterator = null;
-
-            // TODO: null used to be the signal for unmapped, but we've replaced that with a simple index query for the last bin.
-            // TODO: Kill this check once we've proven that the design elements are gone.
-            if(shard.getFileSpans().get(id) == null)
-                throw new ReviewedGATKException("SAMDataSource: received null location for reader " + id + ", but null locations are no longer supported.");
-
-            try {
-                if(threadAllocation.getNumIOThreads() > 0) {
-                    BlockInputStream inputStream = readers.getInputStream(id);
-                    inputStream.submitAccessPlan(new BAMAccessPlan(id, inputStream, (GATKBAMFileSpan) shard.getFileSpans().get(id)));
-                    BAMRecordCodec codec = new BAMRecordCodec(getHeader(id),factory);
-                    codec.setInputStream(inputStream);
-                    iterator = new BAMCodecIterator(inputStream,readers.getReader(id),codec);
-                }
-                else {
-                    iterator = readers.getReader(id).iterator(shard.getFileSpans().get(id));
-                }
-            } catch ( RuntimeException e ) { // we need to catch RuntimeExceptions here because the Picard code is throwing them (among SAMFormatExceptions) sometimes
-                throw new UserException.MalformedBAM(id.samFile, e.getMessage());
-            }
-
-            iterator = new MalformedBAMErrorReformatingIterator(id.samFile, iterator);
-            if(shard.getGenomeLocs().size() > 0)
-                iterator = new IntervalOverlapFilteringIterator(iterator,shard.getGenomeLocs());
-
-            iteratorMap.put(readers.getReader(id), iterator);
-        }
-
-        MergingSamRecordIterator mergingIterator = readers.createMergingIterator(iteratorMap);
-
-        // The readMetrics object being passed in should be that of this dataSource and NOT the shard: the dataSource's
-        // metrics is intended to keep track of the reads seen (and hence passed to the CountingFilteringIterator when
-        // we apply the decorators), whereas the shard's metrics is used to keep track the "records" seen.
-        return applyDecoratingIterators(readMetrics,
-                enableVerification,
-                readProperties.useOriginalBaseQualities(),
-                new ReleasingIterator(readers,GATKSAMIteratorAdapter.adapt(mergingIterator)),
-                readProperties.getValidationExclusionList().contains(ValidationExclusion.TYPE.NO_READ_ORDER_VERIFICATION),
-                readProperties.getSupplementalFilters(),
-                readProperties.getReadTransformers(),
-                readProperties.defaultBaseQualities(),
-                shard instanceof LocusShard);
-    }
-
-    private class BAMCodecIterator implements CloseableIterator<SAMRecord> {
-        private final BlockInputStream inputStream;
-        private final SAMFileReader reader;
-        private final BAMRecordCodec codec;
-        private SAMRecord nextRead;
-
-        private BAMCodecIterator(final BlockInputStream inputStream, final SAMFileReader reader, final BAMRecordCodec codec) {
-            this.inputStream = inputStream;
-            this.reader = reader;
-            this.codec = codec;
-            advance();
-        }
-
-        public boolean hasNext() {
-            return nextRead != null;
-        }
-
-        public SAMRecord next() {
-            if(!hasNext())
-                throw new NoSuchElementException("Unable to retrieve next record from BAMCodecIterator; input stream is empty");
-            SAMRecord currentRead = nextRead;
-            advance();
-            return currentRead;
-        }
-
-        public void close() {
-            // NO-OP.
-        }
-
-        public void remove() {
-            throw new UnsupportedOperationException("Unable to remove from BAMCodecIterator");
-        }
-
-        private void advance() {
-            final long startCoordinate = inputStream.getFilePointer();
-            nextRead = codec.decode();
-            final long stopCoordinate = inputStream.getFilePointer();
-
-            if(reader != null && nextRead != null)
-                PicardNamespaceUtils.setFileSource(nextRead, new SAMFileSource(reader, new GATKBAMFileSpan(new GATKChunk(startCoordinate, stopCoordinate))));
-        }
-    }
-
-    /**
-     * Filter reads based on user-specified criteria.
-     *
-     * @param readMetrics metrics to track when using this iterator.
-     * @param enableVerification Verify the order of reads.
-     * @param useOriginalBaseQualities True if original base qualities should be used.
-     * @param wrappedIterator the raw data source.
-     * @param noValidationOfReadOrder Another trigger for the verifying iterator?  TODO: look into this.
-     * @param supplementalFilters additional filters to apply to the reads.
-     * @param defaultBaseQualities if the reads have incomplete quality scores, set them all to defaultBaseQuality.
-     * @param isLocusBasedTraversal true if we're dealing with a read stream from a LocusShard
-     * @return An iterator wrapped with filters reflecting the passed-in parameters.  Will not be null.
-     */
-    protected GATKSAMIterator applyDecoratingIterators(ReadMetrics readMetrics,
-                                                        boolean enableVerification,
-                                                        boolean useOriginalBaseQualities,
-                                                        GATKSAMIterator wrappedIterator,
-                                                        Boolean noValidationOfReadOrder,
-                                                        Collection<ReadFilter> supplementalFilters,
-                                                        List<ReadTransformer> readTransformers,
-                                                        byte defaultBaseQualities,
-                                                        boolean isLocusBasedTraversal ) {
-
-        // Always apply the ReadFormattingIterator before both ReadFilters and ReadTransformers. At a minimum,
-        // this will consolidate the cigar strings into canonical form. This has to be done before the read
-        // filtering, because not all read filters will behave correctly with things like zero-length cigar
-        // elements. If useOriginalBaseQualities is true or defaultBaseQualities >= 0, this iterator will also
-        // modify the base qualities.
-        wrappedIterator = new ReadFormattingIterator(wrappedIterator, useOriginalBaseQualities, defaultBaseQualities);
-
-        // Read Filters: these are applied BEFORE downsampling, so that we downsample within the set of reads
-        // that actually survive filtering. Otherwise we could get much less coverage than requested.
-        wrappedIterator = GATKSAMIteratorAdapter.adapt(new CountingFilteringIterator(readMetrics,wrappedIterator,supplementalFilters));
-
-        // Downsampling:
-
-        // For locus traversals where we're downsampling to coverage by sample, assume that the downsamplers
-        // will be invoked downstream from us in LocusIteratorByState. This improves performance by avoiding
-        // splitting/re-assembly of the read stream at this stage, and also allows for partial downsampling
-        // of individual reads.
-        boolean assumeDownstreamLIBSDownsampling = isLocusBasedTraversal &&
-                                                   readProperties.getDownsamplingMethod().type == DownsampleType.BY_SAMPLE &&
-                                                   readProperties.getDownsamplingMethod().toCoverage != null;
-
-        // Apply downsampling iterators here only in cases where we know that LocusIteratorByState won't be
-        // doing any downsampling downstream of us
-        if ( ! assumeDownstreamLIBSDownsampling ) {
-            wrappedIterator = applyDownsamplingIterator(wrappedIterator);
-        }
-
-        // unless they've said not to validate read ordering (!noValidationOfReadOrder) and we've enabled verification,
-        // verify the read ordering by applying a sort order iterator
-        if (!noValidationOfReadOrder && enableVerification)
-            wrappedIterator = new VerifyingSamIterator(wrappedIterator);
-
-        // Read transformers: these are applied last, so that we don't bother transforming reads that get discarded
-        // by the read filters or downsampler.
-        for ( final ReadTransformer readTransformer : readTransformers ) {
-            if ( readTransformer.enabled() && readTransformer.getApplicationTime() == ReadTransformer.ApplicationTime.ON_INPUT )
-                wrappedIterator = new ReadTransformingIterator(wrappedIterator, readTransformer);
-        }
-
-        return wrappedIterator;
-    }
-
-    protected GATKSAMIterator applyDownsamplingIterator( GATKSAMIterator wrappedIterator ) {
-        if ( readProperties.getDownsamplingMethod() == null ||
-             readProperties.getDownsamplingMethod().type == DownsampleType.NONE ) {
-            return wrappedIterator;
-        }
-
-        if ( readProperties.getDownsamplingMethod().toFraction != null ) {
-
-            // If we're downsampling to a fraction of reads, there's no point in paying the cost of
-            // splitting/re-assembling the read stream by sample to run the FractionalDownsampler on
-            // reads from each sample separately, since the result would be the same as running the
-            // FractionalDownsampler on the entire stream. So, ALWAYS use the DownsamplingReadsIterator
-            // rather than the PerSampleDownsamplingReadsIterator, even if BY_SAMPLE downsampling
-            // was requested.
-
-            return new DownsamplingReadsIterator(wrappedIterator,
-                                                 new FractionalDownsampler<SAMRecord>(readProperties.getDownsamplingMethod().toFraction));
-        }
-        else if ( readProperties.getDownsamplingMethod().toCoverage != null ) {
-
-            // If we're downsampling to coverage, we DO need to pay the cost of splitting/re-assembling
-            // the read stream to run the downsampler on the reads for each individual sample separately if
-            // BY_SAMPLE downsampling was requested.
-
-            if ( readProperties.getDownsamplingMethod().type == DownsampleType.BY_SAMPLE ) {
-                return new PerSampleDownsamplingReadsIterator(wrappedIterator,
-                                                              new SimplePositionalDownsamplerFactory<SAMRecord>(readProperties.getDownsamplingMethod().toCoverage));
-            }
-            else if ( readProperties.getDownsamplingMethod().type == DownsampleType.ALL_READS ) {
-                return new DownsamplingReadsIterator(wrappedIterator,
-                                                     new SimplePositionalDownsampler<SAMRecord>(readProperties.getDownsamplingMethod().toCoverage));
-            }
-        }
-
-        return wrappedIterator;
-    }
-
-
-    private class SAMResourcePool {
-        /**
-         * How many entries can be cached in this resource pool?
-         */
-        private final int maxEntries;
-
-        /**
-         * All iterators of this reference-ordered data.
-         */
-        private List<SAMReaders> allResources = new ArrayList<SAMReaders>();
-
-        /**
-         * All iterators that are not currently in service.
-         */
-        private List<SAMReaders> availableResources = new ArrayList<SAMReaders>();
-
-        public SAMResourcePool(final int maxEntries) {
-            this.maxEntries = maxEntries;
-        }
-
-        /**
-         * Choose a set of readers from the pool to use for this query.  When complete,
-         * @return
-         */
-        public synchronized SAMReaders getAvailableReaders() {
-            if(availableResources.size() == 0)
-                createNewResource();
-            SAMReaders readers = availableResources.get(0);
-            availableResources.remove(readers);
-            return readers;
-        }
-
-        public synchronized void releaseReaders(SAMReaders readers) {
-            if(!allResources.contains(readers))
-                throw new ReviewedGATKException("Tried to return readers from the pool that didn't originate in the pool.");
-            availableResources.add(readers);
-        }
-
-        /**
-         * Gets the reader id for the given reader.
-         * @param reader Reader for which to determine the id.
-         * @return id of the given reader.
-         */
-        protected synchronized SAMReaderID getReaderID(SamReader reader) {
-            for(SAMReaders readers: allResources) {
-                SAMReaderID id = readers.getReaderID(reader);
-                if(id != null)
-                    return id;
-            }
-            throw new ReviewedGATKException("No such reader id is available");
-        }
-
-        private synchronized void createNewResource() {
-            if(allResources.size() > maxEntries)
-                throw new ReviewedGATKException("Cannot create a new resource pool.  All resources are in use.");
-            SAMReaders readers = new SAMReaders(readerIDs, validationStringency, removeProgramRecords);
-            allResources.add(readers);
-            availableResources.add(readers);
-        }
-
-    }
-
-    /**
-     * A collection of readers derived from a reads metadata structure.
-     */
-    private class SAMReaders implements Iterable<SAMFileReader> {
-        /**
-         * Cached representation of the merged header used to generate a merging iterator.
-         */
-        private final SamFileHeaderMerger headerMerger;
-
-        /**
-         * Internal storage for a map of id -> reader.
-         */
-        private final Map<SAMReaderID,SAMFileReader> readers = new LinkedHashMap<SAMReaderID,SAMFileReader>();
-
-        /**
-         * The inptu streams backing
-         */
-        private final Map<SAMReaderID,BlockInputStream> inputStreams = new LinkedHashMap<SAMReaderID,BlockInputStream>();
-
-        /**
-         * Derive a new set of readers from the Reads metadata.
-         * @param readerIDs reads to load.
-         * TODO: validationStringency is not used here
-         * @param validationStringency validation stringency.
-         * @param removeProgramRecords indicate whether to clear program records from the readers
-         */
-        public SAMReaders(Collection<SAMReaderID> readerIDs, ValidationStringency validationStringency, boolean removeProgramRecords) {
-            final int totalNumberOfFiles = readerIDs.size();
-            int readerNumber = 1;
-            final SimpleTimer timer = new SimpleTimer().start();
-
-            if ( totalNumberOfFiles > 0 ) logger.info("Initializing SAMRecords in serial");
-            final int tickSize = 50;
-            int nExecutedTotal = 0;
-            long lastTick = timer.currentTime();
-            for(final SAMReaderID readerID: readerIDs) {
-                final ReaderInitializer init = new ReaderInitializer(readerID).call();
-
-                checkForUnsupportedBamFile(init.reader.getFileHeader());
-
-                if (removeProgramRecords) {
-                    init.reader.getFileHeader().setProgramRecords(new ArrayList<SAMProgramRecord>());
-                }
-
-                if (threadAllocation.getNumIOThreads() > 0) {
-                    inputStreams.put(init.readerID, init.blockInputStream); // get from initializer
-                }
-
-                logger.debug(String.format("Processing file (%d of %d) %s...", readerNumber++, totalNumberOfFiles,  readerID.samFile));
-                readers.put(init.readerID,init.reader);
-                if ( ++nExecutedTotal % tickSize == 0) {
-                    double tickInSec = (timer.currentTime() - lastTick) / 1000.0;
-                    printReaderPerformance(nExecutedTotal, tickSize, totalNumberOfFiles, timer, tickInSec);
-                    lastTick = timer.currentTime();
-                }
-            }
-
-            if ( totalNumberOfFiles > 0 ) logger.info(String.format("Done initializing BAM readers: total time %.2f", timer.getElapsedTime()));
-
-            Collection<SAMFileHeader> headers = new LinkedList<SAMFileHeader>();
-
-            // Examine the bam headers, perform any requested sample renaming on them, and add
-            // them to the list of headers to pass to the Picard SamFileHeaderMerger:
-            for ( final Map.Entry<SAMReaderID, SAMFileReader> readerEntry : readers.entrySet() ) {
-                final SAMReaderID readerID = readerEntry.getKey();
-                final SAMFileReader reader = readerEntry.getValue();
-                final SAMFileHeader header = reader.getFileHeader();
-
-                // The remappedSampleName will be null if either no on-the-fly sample renaming was requested,
-                // or the user's sample rename map file didn't contain an entry for this bam file:
-                final String remappedSampleName = sampleRenameMap != null ? sampleRenameMap.get(readerID.getSamFilePath()) : null;
-
-                // If we've been asked to rename the sample for this bam file, do so now. We'll check to
-                // make sure this bam only contains reads from one sample before proceeding.
-                //
-                // IMPORTANT: relies on the fact that the Picard SamFileHeaderMerger makes a copy of
-                //            the existing read group attributes (including sample name) when merging
-                //            headers, regardless of whether there are read group collisions or not.
-                if ( remappedSampleName != null ) {
-                    remapSampleName(readerID, header, remappedSampleName);
-                }
-
-                headers.add(header);
-            }
-
-            headerMerger = new SamFileHeaderMerger(SAMFileHeader.SortOrder.coordinate,headers,true);
-
-            // update all read groups to GATKSAMRecordReadGroups
-            final List<SAMReadGroupRecord> gatkReadGroups = new LinkedList<SAMReadGroupRecord>();
-            for ( final SAMReadGroupRecord rg : headerMerger.getMergedHeader().getReadGroups() ) {
-                gatkReadGroups.add(new GATKSAMReadGroupRecord(rg));
-            }
-            headerMerger.getMergedHeader().setReadGroups(gatkReadGroups);
-        }
-
-        /**
-         * Changes the sample name in the read groups for the provided bam file header to match the
-         * remappedSampleName. Blows up with a UserException if the header contains more than one
-         * sample name.
-         *
-         * @param readerID ID for the bam file from which the provided header came from
-         * @param header The bam file header. Will be modified by this call.
-         * @param remappedSampleName New sample name to replace the existing sample attribute in the
-         *                           read groups for the header.
-         */
-        private void remapSampleName( final SAMReaderID readerID, final SAMFileHeader header, final String remappedSampleName ) {
-            String firstEncounteredSample = null;
-
-            for ( final SAMReadGroupRecord readGroup : header.getReadGroups() ) {
-                final String thisReadGroupSample = readGroup.getSample();
-
-                if ( thisReadGroupSample == null ) {
-                    throw new UserException(String.format("On-the fly sample renaming was requested for bam file %s, however this " +
-                                                          "bam file contains a read group (id: %s) with a null sample attribute",
-                                                          readerID.getSamFilePath(), readGroup.getId()));
-                }
-                else if ( firstEncounteredSample == null ) {
-                    firstEncounteredSample = thisReadGroupSample;
-                }
-                else if ( ! firstEncounteredSample.equals(thisReadGroupSample) ) {
-                    throw new UserException(String.format("On-the-fly sample renaming was requested for bam file %s, " +
-                                                          "however this bam file contains reads from more than one sample " +
-                                                          "(encountered samples %s and %s in the bam header). The GATK requires that " +
-                                                          "all bams for which on-the-fly sample renaming is requested " +
-                                                          "contain reads from only a single sample per bam.",
-                                                          readerID.getSamFilePath(), firstEncounteredSample, thisReadGroupSample));
-                }
-
-                readGroup.setSample(remappedSampleName);
-            }
-        }
-
-        final private void printReaderPerformance(final int nExecutedTotal,
-                                                  final int nExecutedInTick,
-                                                  final int totalNumberOfFiles,
-                                                  final SimpleTimer timer,
-                                                  final double tickDurationInSec) {
-            final int pendingSize = totalNumberOfFiles - nExecutedTotal;
-            final double totalTimeInSeconds = timer.getElapsedTime();
-            final double nTasksPerSecond = nExecutedTotal / (1.0*totalTimeInSeconds);
-            final int nRemaining = pendingSize;
-            final double estTimeToComplete = pendingSize / nTasksPerSecond;
-            logger.info(String.format("Init %d BAMs in last %.2f s, %d of %d in %.2f s / %.2f m (%.2f tasks/s).  %d remaining with est. completion in %.2f s / %.2f m",
-                    nExecutedInTick, tickDurationInSec,
-                    nExecutedTotal, totalNumberOfFiles, totalTimeInSeconds, totalTimeInSeconds / 60, nTasksPerSecond,
-                    nRemaining, estTimeToComplete, estTimeToComplete / 60));
-        }
-
-        /**
-         * Return the header derived from the merging of these BAM files.
-         * @return the merged header.
-         */
-        public SAMFileHeader getMergedHeader() {
-            return headerMerger.getMergedHeader();
-        }
-
-        /**
-         * Do multiple read groups collide in this dataset?
-         * @return True if multiple read groups collide; false otherwis.
-         */
-        public boolean hasReadGroupCollisions() {
-            return headerMerger.hasReadGroupCollisions();
-        }
-
-        /**
-         * Get the newly mapped read group ID for the given read group.
-         * @param readerID Reader for which to discern the transformed ID.
-         * @param originalReadGroupID Original read group.
-         * @return Remapped read group.
-         */
-        public String getReadGroupId(final SAMReaderID readerID, final String originalReadGroupID) {
-            SAMFileHeader header = readers.get(readerID).getFileHeader();
-            return headerMerger.getReadGroupId(header,originalReadGroupID);
-        }
-
-        /**
-         * Creates a new merging iterator from the given map, with the given header.
-         * @param iteratorMap A map of readers to iterators.
-         * @return An iterator which will merge those individual iterators.
-         */
-        public MergingSamRecordIterator createMergingIterator(final Map<SAMFileReader,CloseableIterator<SAMRecord>> iteratorMap) {
-            return new MergingSamRecordIterator(headerMerger,iteratorMap,true);
-        }
-
-        /**
-         * Retrieve the reader from the data structure.
-         * @param id The ID of the reader to retrieve.
-         * @return the reader associated with the given id.
-         */
-        public SAMFileReader getReader(SAMReaderID id) {
-            if(!readers.containsKey(id))
-                throw new NoSuchElementException("No reader is associated with id " + id);
-            return readers.get(id);
-        }
-
-        /**
-         * Retrieve the input stream backing a reader.
-         * @param id The ID of the reader to retrieve.
-         * @return the reader associated with the given id.
-         */
-        public BlockInputStream getInputStream(final SAMReaderID id) {
-            return inputStreams.get(id);
-        }
-
-        /**
-         * Searches for the reader id of this reader.
-         * @param reader Reader for which to search.
-         * @return The id associated the given reader, or null if the reader is not present in this collection.
-         */
-        protected SAMReaderID getReaderID(SamReader reader) {
-            for(Map.Entry<SAMReaderID,SAMFileReader> entry: readers.entrySet()) {
-                if(reader == entry.getValue())
-                    return entry.getKey();
-            }
-            // Not found? return null.
-            return null;
-        }
-
-        /**
-         * Returns an iterator over all readers in this structure.
-         * @return An iterator over readers.
-         */
-        public Iterator<SAMFileReader> iterator() {
-            return readers.values().iterator();
-        }
-
-        /**
-         * Returns whether any readers are present in this structure.
-         * @return
-         */
-        public boolean isEmpty() {
-            return readers.isEmpty();
-        }
-    }
-
-    class ReaderInitializer implements Callable<ReaderInitializer> {
-        final SAMReaderID readerID;
-        BlockInputStream blockInputStream = null;
-        SAMFileReader reader;
-
-        public ReaderInitializer(final SAMReaderID readerID) {
-            this.readerID = readerID;
-        }
-
-        public ReaderInitializer call() {
-            final File indexFile = findIndexFile(readerID.samFile);
-            try {
-                if (threadAllocation.getNumIOThreads() > 0)
-                    blockInputStream = new BlockInputStream(dispatcher,readerID,false);
-                reader = new SAMFileReader(readerID.samFile,indexFile,false);
-            } catch ( RuntimeIOException e ) {
-                throw new UserException.CouldNotReadInputFile(readerID.samFile, e);
-            } catch ( SAMFormatException e ) {
-                throw new UserException.MalformedBAM(readerID.samFile, e.getMessage());
-            }
-            // Picard is throwing a RuntimeException here when BAMs are malformed with bad headers (and so look like SAM files).
-            // Let's keep this separate from the SAMFormatException (which ultimately derives from RuntimeException) case,
-            // just in case we want to change this behavior later.
-            catch ( RuntimeException e ) {
-                throw new UserException.MalformedBAM(readerID.samFile, e.getMessage());
-            }
-            reader.setSAMRecordFactory(factory);
-            reader.enableFileSource(true);
-            reader.setValidationStringency(validationStringency);
-            return this;
-        }
-    }
-
-    private class ReleasingIterator implements GATKSAMIterator {
-        /**
-         * The resource acting as the source of the data.
-         */
-        private final SAMReaders resource;
-
-        /**
-         * The iterator to wrap.
-         */
-        private final GATKSAMIterator wrappedIterator;
-
-        public ReleasingIterator(SAMReaders resource, GATKSAMIterator wrapped) {
-            this.resource = resource;
-            this.wrappedIterator = wrapped;
-        }
-
-        public ReleasingIterator iterator() {
-            return this;
-        }
-
-        public void remove() {
-            throw new UnsupportedOperationException("Can't remove from a GATKSAMIterator");
-        }
-
-        public void close() {
-            wrappedIterator.close();
-            resourcePool.releaseReaders(resource);
-        }
-
-        public boolean hasNext() {
-            return wrappedIterator.hasNext();
-        }
-
-        public SAMRecord next() {
-            return wrappedIterator.next();
-        }
-    }
-
-    /**
-     * Maps read groups in the original SAMFileReaders to read groups in
-     */
-    private class ReadGroupMapping extends HashMap<String,String> {}
-
-    /**
-     * Locates the index file alongside the given BAM, if present.
-     * @param bamFile The data file to use.
-     * @return A File object if the index file is present; null otherwise.
-     */
-    private File findIndexFile(File bamFile) {
-        return SamFiles.findIndex(bamFile);
-    }
-
-    /**
-     * Creates a BAM schedule over all reads in the BAM file, both mapped and unmapped.  The outgoing stream
-     * will be as granular as possible given our current knowledge of the best ways to split up BAM files.
-     * @return An iterator that spans all reads in all BAM files.
-     */
-    public Iterable<Shard> createShardIteratorOverAllReads(final ShardBalancer shardBalancer) {
-        shardBalancer.initialize(this,IntervalSharder.shardOverAllReads(this,genomeLocParser),genomeLocParser);
-        return shardBalancer;
-    }
-
-    /**
-     * Creates a BAM schedule over all mapped reads in the BAM file, when a 'mapped' read is defined as any
-     * read that has been assigned
-     *
-     * @param   shardBalancer  shard balancer object
-     * @return non-null initialized version of the shard balancer
-     */
-    public Iterable<Shard> createShardIteratorOverMappedReads(final ShardBalancer shardBalancer) {
-        shardBalancer.initialize(this,IntervalSharder.shardOverMappedReads(this,genomeLocParser),genomeLocParser);
-        return shardBalancer;
-    }
-
-    /**
-     * Create a schedule for processing the initialized BAM file using the given interval list.
-     * The returned schedule should be as granular as possible.
-     * @param intervals The list of intervals for which to create the schedule.
-     * @return A granular iterator over file pointers.
-     */
-    public Iterable<Shard> createShardIteratorOverIntervals(final GenomeLocSortedSet intervals,final ShardBalancer shardBalancer) {
-        if(intervals == null)
-            throw new ReviewedGATKException("Unable to create schedule from intervals; no intervals were provided.");
-        shardBalancer.initialize(this,IntervalSharder.shardOverIntervals(SAMDataSource.this,intervals,intervalMergingRule),genomeLocParser);
-        return shardBalancer;
-    }
-}
-
-
-
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/SAMReaderID.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/SAMReaderID.java
deleted file mode 100644
index ef5aaa0..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/SAMReaderID.java
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import org.broadinstitute.gatk.utils.commandline.Tags;
-
-import java.io.File;
-
-/**
- * Uniquely identifies a SAM file reader.
- *
- * @author mhanna
- * @version 0.1
- */
-public class SAMReaderID implements Comparable {
-    /**
-     * The SAM file at the heart of this reader.  SAMReaderID
-     * currently supports only file-based readers.
-     */
-    protected final File samFile;
-
-    /**
-     * A list of tags associated with this BAM file.
-     */
-    protected final Tags tags;
-
-    /**
-     * Creates an identifier for a SAM file based on read.
-     * @param samFile The source file for SAM data.
-     * @param tags tags to use when creating a reader ID.
-     */
-    public SAMReaderID(File samFile, Tags tags) {
-        this.samFile = samFile;
-        this.tags = tags;
-    }
-
-    /**
-     * Creates an identifier for a SAM file based on read.
-     * @param samFileName The source filename for SAM data.
-     * @param tags tags to use when creating a reader ID.
-     */
-    public SAMReaderID(String samFileName, Tags tags) {
-        this(new File(samFileName),tags);        
-    }
-
-    /**
-     * Gets the absolute pathname of this SAM file
-     * @return  The absolute pathname of this reader's SAM file,
-     *          or null if this reader has no associated SAM file
-     */
-    public String getSamFilePath() {
-        if ( samFile == null ) {
-            return null;
-        }
-
-        return samFile.getAbsolutePath();
-    }
-
-    /**
-     * Gets the tags associated with the given BAM file.
-     * @return A collection of the tags associated with this file.
-     */
-    public Tags getTags() {
-        return tags;
-    }
-
-    /**
-     * Compare two IDs to see whether they're equal.
-     * @param other The other identifier.
-     * @return True iff the two readers point to the same file.
-     */
-    @Override
-    public boolean equals(Object other) {
-        if(other == null) return false;
-        if(!(other instanceof SAMReaderID)) return false;
-
-        SAMReaderID otherID = (SAMReaderID)other;
-        return this.getSamFilePath().equals(otherID.getSamFilePath());
-    }
-
-    /**
-     * Generate a hash code for this object.
-     * @return A hash code, based solely on the file name at this point.
-     */
-    @Override
-    public int hashCode() {
-        return samFile.getAbsolutePath().hashCode();
-    }
-
-    /**
-     * Best string representation for a SAM file reader is the path of the source file.
-     */
-    @Override
-    public String toString() {
-        return getSamFilePath();
-    }
-
-    @Override
-    public int compareTo(Object other) {
-        return this.samFile.getAbsolutePath().compareTo(((SAMReaderID)other).samFile.getAbsolutePath());
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/Shard.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/Shard.java
deleted file mode 100644
index cc8944c..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/Shard.java
+++ /dev/null
@@ -1,253 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import htsjdk.samtools.util.PeekableIterator;
-import htsjdk.samtools.SAMFileSpan;
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.engine.ReadMetrics;
-import org.broadinstitute.gatk.engine.ReadProperties;
-import org.broadinstitute.gatk.engine.iterators.GATKSAMIterator;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.HasGenomeLocation;
-
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-/**
- *
- * User: aaron
- * Date: Apr 10, 2009
- * Time: 5:00:27 PM
- *
- * The Broad Institute
- * SOFTWARE COPYRIGHT NOTICE AGREEMENT 
- * This software and its documentation are copyright 2009 by the
- * Broad Institute/Massachusetts Institute of Technology. All rights are reserved.
- *
- * This software is supplied without any warranty or guaranteed support whatsoever. Neither
- * the Broad Institute nor MIT can be responsible for its use, misuse, or functionality.
- *
- */
-
-/**
- * @author aaron
- * @version 1.0
- * @date Apr 10, 2009
- * <p/>
- * Interface Shard
- * <p/>
- * The base abstract class for shards.
- */
-public abstract class Shard implements HasGenomeLocation {
-    public enum ShardType {
-        READ, LOCUS
-    }
-
-    protected final GenomeLocParser parser; // incredibly annoying!
-
-    /**
-     * What type of shard is this?  Read or locus?
-     */
-    protected final ShardType shardType;
-
-    /**
-     * Locations.
-     */
-    protected final List<GenomeLoc> locs;
-
-    /**
-     * Whether the current location is unmapped.
-     */
-    private final boolean isUnmapped;
-
-    /**
-     * Reads data, if applicable.
-     */
-    private final SAMDataSource readsDataSource;
-
-    /**
-     * The data backing the next chunks to deliver to the traversal engine.
-     */
-    private final Map<SAMReaderID,SAMFileSpan> fileSpans;
-
-    /**
-     * Lazy-calculated span of all of the genome locs in this shard
-     */
-    private GenomeLoc spanningLocation = null;
-
-    /**
-     * Statistics about which reads in this shards were used and which were filtered away.
-     */
-    protected final ReadMetrics readMetrics = new ReadMetrics();
-
-    /**
-     * Whether this shard points to an unmapped region.
-     * Some shard types conceptually be unmapped (e.g. LocusShards).  In
-     * this case, isUnmapped should always return false.
-     * @return True if this shard is unmapped.  False otherwise.
-     */
-    public boolean isUnmapped() {
-        return isUnmapped;
-    }    
-
-    public Shard(GenomeLocParser parser,
-                 ShardType shardType,
-                 List<GenomeLoc> locs,
-                 SAMDataSource readsDataSource,
-                 Map<SAMReaderID,SAMFileSpan> fileSpans,
-                 boolean isUnmapped) {
-        this.locs = locs;
-        this.parser = parser;
-        this.shardType = shardType;
-        this.readsDataSource = readsDataSource;
-        this.fileSpans = fileSpans;
-        this.isUnmapped = isUnmapped;        
-    }
-
-    /**
-     * If isUnmapped is true, than getGenomeLocs by
-     * definition will return a singleton list with a GenomeLoc.UNMAPPED
-     *
-     * Can return null, indicating that the entire genome is covered.
-     *
-     * @return the genome location represented by this shard
-     */
-    public List<GenomeLoc> getGenomeLocs() {
-        return locs;
-    }
-
-    /**
-     * Get the list of chunks delimiting this shard.
-     * @return a list of chunks that contain data for this shard.
-     */
-    public Map<SAMReaderID,SAMFileSpan> getFileSpans() {
-        return Collections.unmodifiableMap(fileSpans);
-    }    
-
-    /**
-     * Returns the span of the genomeLocs comprising this shard
-     * @return a GenomeLoc that starts as the first position in getGenomeLocs() and stops at the stop of the last
-     *    position in getGenomeLocs()
-     */
-    public GenomeLoc getLocation() {
-        if ( spanningLocation == null ) {
-            if ( getGenomeLocs() == null )
-                spanningLocation = GenomeLoc.WHOLE_GENOME;
-            else if ( getGenomeLocs().size() == 0 ) {
-                spanningLocation = getGenomeLocs().get(0);
-            } else {
-                int start = Integer.MAX_VALUE;
-                int stop = Integer.MIN_VALUE;
-                String contig = null;
-
-                for ( GenomeLoc loc : getGenomeLocs() ) {
-                    if ( GenomeLoc.isUnmapped(loc) )
-                        // special case the unmapped region marker, just abort out
-                        return loc;
-                    contig = loc.getContig();
-                    if ( loc.getStart() < start ) start = loc.getStart();
-                    if ( loc.getStop() > stop ) stop = loc.getStop();
-                }
-
-                spanningLocation = parser.createGenomeLoc(contig, start, stop);
-            }
-        }
-
-        return spanningLocation;
-    }
-
-
-    /**
-     * what kind of shard do we return
-     * @return ShardType, indicating the type
-     */
-    public ShardType getShardType() {
-        return shardType;
-    }
-
-    /**
-     * Does any releasing / aggregation required when the shard is through being processed.
-     */
-    public void close() {
-        readsDataSource.incorporateReadMetrics(readMetrics);
-    }
-
-    /**
-     * Gets key read validation and filtering properties.
-     * @return set of read properties associated with this shard.
-     */
-    public ReadProperties getReadProperties() {
-        return readsDataSource.getReadsInfo();
-    }
-
-    /**
-     * Gets the runtime metrics associated with this shard.
-     * Retrieves a storage space of metrics about number of reads included, filtered, etc.
-     * @return Storage space for metrics.
-     */
-    public ReadMetrics getReadMetrics() {
-        return readMetrics;
-    }
-
-    /**
-     * Returns true if this shard is meant to buffer reads, rather
-     * than just holding pointers to their locations.
-     * @return True if this shard can buffer reads.  False otherwise.
-     */
-    public boolean buffersReads() { return false; }
-
-    /**
-     * Returns true if the read buffer is currently full.
-     * @return True if this shard's buffer is full (and the shard can buffer reads).
-     */
-    public boolean isBufferEmpty() { throw new UnsupportedOperationException("This shard does not buffer reads."); }
-
-    /**
-     * Returns true if the read buffer is currently full.
-     * @return True if this shard's buffer is full (and the shard can buffer reads).
-     */
-    public boolean isBufferFull() { throw new UnsupportedOperationException("This shard does not buffer reads."); }
-
-    /**
-     * Adds a read to the read buffer.
-     * @param read Add a read to the internal shard buffer.
-     */
-    public void addRead(SAMRecord read) { throw new UnsupportedOperationException("This shard does not buffer reads."); }
-
-    /**
-     * Fills the shard with reads. Can only do this with shards that buffer reads
-     * @param readIter Iterator from which to draw the reads to fill the shard
-     */
-    public void fill( PeekableIterator<SAMRecord> readIter ) { throw new UnsupportedOperationException("This shard does not buffer reads."); }
-
-    /**
-     * Gets the iterator over the elements cached in the shard.
-     * @return
-     */
-    public GATKSAMIterator iterator() { throw new UnsupportedOperationException("This shard does not buffer reads."); }    
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/ShardBalancer.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/ShardBalancer.java
deleted file mode 100644
index 237a380..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/ShardBalancer.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import htsjdk.samtools.util.PeekableIterator;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-
-import java.util.Iterator;
-
-/**
- * Balances maximally granular file pointers into shards of reasonable size.
- */
-public abstract class ShardBalancer implements Iterable<Shard> {
-    protected SAMDataSource readsDataSource;
-    protected PeekableIterator<FilePointer> filePointers;
-    protected GenomeLocParser parser;
-
-    public void initialize(final SAMDataSource readsDataSource, final Iterator<FilePointer> filePointers, final GenomeLocParser parser) {
-        this.readsDataSource = readsDataSource;
-        this.filePointers = new PeekableIterator<FilePointer>(filePointers);
-        this.parser = parser;
-    }
-    public void close() {
-      this.filePointers.close();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/package-info.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/package-info.java
deleted file mode 100644
index f3506f2..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/package-info.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/BAMFileStat.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/BAMFileStat.java
deleted file mode 100644
index 95e0341..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/BAMFileStat.java
+++ /dev/null
@@ -1,185 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads.utilities;
-
-import htsjdk.samtools.BAMIndex;
-import htsjdk.samtools.SAMFileReader;
-import htsjdk.samtools.ValidationStringency;
-import org.broadinstitute.gatk.utils.commandline.Argument;
-import org.broadinstitute.gatk.utils.commandline.CommandLineProgram;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.instrumentation.Sizeof;
-
-import java.io.File;
-import java.lang.reflect.Field;
-import java.util.List;
-import java.util.Map;
-
-/**
- *
- *
- * @author mhanna
- * @version 0.1
- */
-public class BAMFileStat extends CommandLineProgram {
-    public enum CommandType { ShowBlocks, ShowIndex }
-
-    @Argument(doc="Which operation to run.",required=true)
-    private CommandType command;
-
-    @Argument(doc="The BAM file to inspect.",required=true)
-    private String bamFileName;
-
-    @Argument(doc="The range to inspect.",required=false)
-    private String range;
-
-    public int execute() {
-        switch(command) {
-            case ShowBlocks:
-                throw new ReviewedGATKException("The BAM block inspector has been disabled.");
-            case ShowIndex:
-                showIndexBins(new File(bamFileName),range);
-                break;
-        }
-        return 0;
-    }
-
-    /**
-     * Required main method implementation.
-     * @param argv Command-line arguments.
-     */
-    public static void main(String[] argv) {
-        try {
-            BAMFileStat instance = new BAMFileStat();
-            start(instance, argv);
-            System.exit(CommandLineProgram.result);
-        } catch (Exception e) {
-            exitSystemWithError(e);
-        }
-    }
-
-    private void showIndexBins(File bamFile,String contigName) {
-        SAMFileReader reader;
-        BAMIndex index;
-
-        reader = new SAMFileReader(bamFile);
-        reader.setValidationStringency(ValidationStringency.SILENT);
-        reader.enableIndexCaching(true);
-        index = reader.getIndex();
-
-        reader.queryOverlapping(contigName,1,reader.getFileHeader().getSequence(contigName).getSequenceLength()).close();
-
-        int numBins = 0;
-        int numChunks = 0;
-        int numLinearIndexEntries = 0;
-
-        try {
-            Field[] fields = index.getClass().getDeclaredFields();
-            for(Field field: fields) {
-                if(field.getName().equals("mLastReferenceRetrieved")) {
-                    field.setAccessible(true);
-                    Integer lastReferenceRetrieved = (Integer)field.get(index);
-                    System.out.printf("Last reference retrieved: %d%n", lastReferenceRetrieved);
-                }
-
-                if(field.getName().equals("mQueriesByReference")) {
-                    field.setAccessible(true);
-                    Map<Integer,Object> cachedQueries = (Map<Integer,Object>)field.get(index);
-
-                    for(Object bamIndexContent: cachedQueries.values()) {
-                        List<Object> bins = null;
-                        Map<Object,Object> binToChunkMap = null;
-                        Object linearIndex = null;
-
-                        Field[] indexContentFields = bamIndexContent.getClass().getDeclaredFields();
-                        for(Field indexContentField: indexContentFields) {
-                            if(indexContentField.getName().equals("mReferenceSequence")) {
-                                indexContentField.setAccessible(true);
-                                System.out.printf("Reference sequence: %d%n", indexContentField.getInt(bamIndexContent));
-                            }
-
-                            if(indexContentField.getName().equals("mBins")) {
-                                indexContentField.setAccessible(true);
-                                bins = (List<Object>)indexContentField.get(bamIndexContent);
-                            }
-
-                            if(indexContentField.getName().equals("mBinToChunks")) {
-                                indexContentField.setAccessible(true);
-                                binToChunkMap = (Map<Object,Object>)indexContentField.get(bamIndexContent);
-                            }
-
-                            if(indexContentField.getName().equals("mLinearIndex")) {
-                                indexContentField.setAccessible(true);
-                                linearIndex = indexContentField.get(bamIndexContent);
-                            }
-                        }
-
-                        numBins = bins.size();
-                        for(Object bin: bins) {
-                            int binNumber;
-
-                            Field[] binFields = bin.getClass().getDeclaredFields();
-                            for(Field binField: binFields) {
-                                if(binField.getName().equals("binNumber")) {
-                                    binField.setAccessible(true);
-                                    binNumber = binField.getInt(bin);
-                                    List<Object> chunks = (List<Object>)binToChunkMap.get(bin);
-                                    System.out.printf("\tBin: %d, number of chunks: %d%n",binNumber,chunks.size());
-                                    for(Object chunk: chunks)
-                                        System.out.printf("\t\tChunk: %s%n",chunk);
-                                    numChunks += chunks.size();
-                                }
-                            }
-                        }
-
-                        Field[] linearIndexFields = linearIndex.getClass().getDeclaredFields();
-                        for(Field linearIndexField: linearIndexFields) {
-                            if(linearIndexField.getName().equals("mIndexEntries")) {
-                                linearIndexField.setAccessible(true);
-                                long[] linearIndexEntries = (long[])linearIndexField.get(linearIndex);
-                                System.out.printf("\t\tIndex entries: %d", linearIndexEntries.length);
-                                for(long indexEntry: linearIndexEntries)
-                                    System.out.printf("%d,",indexEntry);
-                                System.out.printf("%n");
-                                numLinearIndexEntries = linearIndexEntries.length;
-                            }
-                        }
-                    }
-                }
-            }
-        }
-        catch(IllegalAccessException ex) {
-            throw new ReviewedGATKException("Unable to examine cached index",ex);
-        }
-
-        System.out.printf("%nOverall: %d bins, %d chunks, %d linear index entries",numBins,numChunks,numLinearIndexEntries);
-        if(Sizeof.isEnabled())
-            System.out.printf(", total index size in bytes: %d",Sizeof.getObjectGraphSize(index));
-        System.out.println();
-
-        reader.close();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/BAMTagRenamer.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/BAMTagRenamer.java
deleted file mode 100644
index bde44a0..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/BAMTagRenamer.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads.utilities;
-
-import htsjdk.samtools.SAMFileReader;
-import htsjdk.samtools.SAMFileWriter;
-import htsjdk.samtools.SAMFileWriterFactory;
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.commandline.Argument;
-import org.broadinstitute.gatk.utils.commandline.CommandLineProgram;
-
-import java.io.File;
-
-/**
- * A simple utility written directly in Picard that will rename tags
- * from one name to another.
- *
- * @author hanna
- * @version 0.1
- */
-
-public class BAMTagRenamer extends CommandLineProgram {
-    @Argument(fullName="input",shortName="I",doc="Input file to process",required=true)
-    private File input = null;
-
-    @Argument(fullName="output",shortName="O",doc="Output file to create",required=true)
-    private File output = null;
-
-    @Argument(fullName="bam_compression",shortName="compress",doc="Compression level to use when writing the BAM file.",required=false)
-    private int compressionLevel = 5;
-
-    @Argument(fullName="original_tag_name",shortName="otn",doc="Tag name to be replaced.",required=true)
-    private String sourceTagName = null;
-
-    @Argument(fullName="replacement_tag_name",shortName="rtn",doc="Tag name to be used as a replacement.",required=true)
-    private String targetTagName = null;
-
-    public int execute() {
-        long readsWritten = 0;
-        long readsAltered = 0;
-
-        SAMFileReader reader = new SAMFileReader(input);
-        SAMFileWriter writer = new SAMFileWriterFactory().makeBAMWriter(reader.getFileHeader(),true,output,compressionLevel);
-
-        for(SAMRecord read: reader) {
-            Object value = read.getAttribute(sourceTagName);
-            if(value != null) {
-                read.setAttribute(sourceTagName,null);
-                read.setAttribute(targetTagName,value);
-                readsAltered++;
-            }
-            writer.addAlignment(read);
-            readsWritten++;
-            if(readsWritten % 1000000 == 0)
-                System.out.printf("%d reads written.  %d tag names updated from %s to %s.%n",readsWritten,readsAltered,sourceTagName,targetTagName);
-        }
-
-        writer.close();
-        System.out.printf("%d reads written.  %d tag names updated from %s to %s.%n",readsWritten,readsAltered,sourceTagName,targetTagName);        
-
-        return 0;
-    }
-
-    /**
-     * Required main method implementation.
-     */
-    public static void main(String[] argv) {
-        BAMTagRenamer instance = new BAMTagRenamer();
-        try {
-            start(instance, argv);
-        } catch (Exception e) {
-            throw new RuntimeException(e);
-        }
-
-        System.exit(CommandLineProgram.result);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/FindLargeShards.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/FindLargeShards.java
deleted file mode 100644
index 9105b4c..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/FindLargeShards.java
+++ /dev/null
@@ -1,192 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads.utilities;
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.utils.commandline.CommandLineProgram;
-import org.broadinstitute.gatk.utils.commandline.Input;
-import org.broadinstitute.gatk.utils.commandline.Output;
-import org.broadinstitute.gatk.engine.datasources.reads.FilePointer;
-import org.broadinstitute.gatk.engine.datasources.reads.IntervalSharder;
-import org.broadinstitute.gatk.engine.datasources.reads.SAMDataSource;
-import org.broadinstitute.gatk.engine.datasources.reads.SAMReaderID;
-import org.broadinstitute.gatk.engine.resourcemanagement.ThreadAllocation;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
-import org.broadinstitute.gatk.utils.interval.IntervalMergingRule;
-import org.broadinstitute.gatk.utils.interval.IntervalUtils;
-import org.broadinstitute.gatk.utils.text.ListFileUtils;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.PrintStream;
-import java.math.BigInteger;
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * Traverses a region in a dataset looking for outliers.
- */
-public class FindLargeShards extends CommandLineProgram {
-    private static Logger logger = Logger.getLogger(FindLargeShards.class);
-
-    @Input(fullName = "input_file", shortName = "I", doc = "SAM or BAM file(s)", required = false)
-    public List<String> samFiles = new ArrayList<String>();
-
-    @Input(fullName = "reference_sequence", shortName = "R", doc = "Reference sequence file", required = false)
-    public File referenceFile = null;
-
-    @Input(fullName = "intervals", shortName = "L", doc = "A list of genomic intervals over which to operate. Can be explicitly specified on the command line or in a file.",required=false)
-    public List<String> intervals = null;
-
-    @Output(required=false)
-    public PrintStream out = System.out;
-
-    /**
-     * The square of the sum of all uncompressed data.  Based on the BAM spec, the size of this could be
-     * up to (2^64)^2.
-     */
-    private BigInteger sumOfSquares = BigInteger.valueOf(0);
-
-    /**
-     * The running sum of all uncompressed data.  Based on the BAM spec, the BAM must be less than Long.MAX_LONG
-     * when compressed -- in other words, the sum of the sizes of all BGZF blocks must be < 2^64.
-     */
-    private BigInteger sum = BigInteger.valueOf(0);
-
-    /**
-     * The number of shards viewed.
-     */
-    private long numberOfShards;
-
-
-    @Override
-    public int execute() throws IOException {
-        // initialize reference
-        IndexedFastaSequenceFile refReader = new IndexedFastaSequenceFile(referenceFile);
-        GenomeLocParser genomeLocParser = new GenomeLocParser(refReader);        
-
-        // initialize reads
-        List<SAMReaderID> bamReaders = ListFileUtils.unpackBAMFileList(samFiles,parser);
-        SAMDataSource dataSource = new SAMDataSource(bamReaders,new ThreadAllocation(),null,genomeLocParser);
-
-        // intervals
-        final GenomeLocSortedSet intervalSortedSet;
-        if ( intervals != null )
-            intervalSortedSet = IntervalUtils.sortAndMergeIntervals(genomeLocParser, IntervalUtils.parseIntervalArguments(genomeLocParser, intervals), IntervalMergingRule.ALL);
-        else
-            intervalSortedSet = GenomeLocSortedSet.createSetFromSequenceDictionary(refReader.getSequenceDictionary());
-
-        logger.info(String.format("PROGRESS: Calculating mean and variance: Contig\tRegion.Start\tRegion.Stop\tSize"));        
-
-        IntervalSharder sharder = IntervalSharder.shardOverIntervals(dataSource,intervalSortedSet,IntervalMergingRule.ALL);
-        while(sharder.hasNext()) {
-            FilePointer filePointer = sharder.next();
-
-            // Size of the file pointer.
-            final long size = filePointer.size();            
-
-            BigInteger bigSize = BigInteger.valueOf(size);
-            sumOfSquares = sumOfSquares.add(bigSize.pow(2));
-            sum = sum.add(bigSize);
-            numberOfShards++;
-
-            if(numberOfShards % 1000 == 0) {
-                GenomeLoc boundingRegion = getBoundingRegion(filePointer,genomeLocParser);
-                logger.info(String.format("PROGRESS: Calculating mean and variance: %s\t%d\t%d\t%d",boundingRegion.getContig(),boundingRegion.getStart(),boundingRegion.getStop(),size));
-            }
-
-        }
-
-        // Print out the stddev: (sum(x^2) - (1/N)*sum(x)^2)/N
-        long mean = sum.divide(BigInteger.valueOf(numberOfShards)).longValue();
-        long stddev = (long)(Math.sqrt(sumOfSquares.subtract(sum.pow(2).divide(BigInteger.valueOf(numberOfShards))).divide(BigInteger.valueOf(numberOfShards)).doubleValue()));
-        logger.info(String.format("Number of shards: %d; mean uncompressed size = %d; stddev uncompressed size  = %d%n",numberOfShards,mean,stddev));
-
-        // Crank through the shards again, this time reporting on the shards significantly larger than the mean.
-        long threshold = mean + stddev*5;
-        logger.warn(String.format("PROGRESS: Searching for large shards: Contig\tRegion.Start\tRegion.Stop\tSize"));
-        out.printf("Contig\tRegion.Start\tRegion.Stop\tSize%n");
-
-        sharder =  IntervalSharder.shardOverIntervals(dataSource,intervalSortedSet,IntervalMergingRule.ALL);
-        while(sharder.hasNext()) {
-            FilePointer filePointer = sharder.next();
-
-            // Bounding region.
-            GenomeLoc boundingRegion = getBoundingRegion(filePointer,genomeLocParser);
-
-            // Size of the file pointer.
-            final long size = filePointer.size();            
-
-            numberOfShards++;
-
-            if(filePointer.size() <= threshold) {
-                if(numberOfShards % 1000 == 0) 
-                    logger.info(String.format("PROGRESS: Searching for large shards: %s\t%d\t%d\t%d",boundingRegion.getContig(),boundingRegion.getStart(),boundingRegion.getStop(),size));
-                continue;
-            }
-
-            out.printf("%s\t%d\t%d\t%d%n",boundingRegion.getContig(),boundingRegion.getStart(),boundingRegion.getStop(),size);
-        }
-
-        return 0;
-    }
-
-    private GenomeLoc getBoundingRegion(final FilePointer filePointer, final GenomeLocParser genomeLocParser) {
-        List<GenomeLoc> regions = filePointer.getLocations();
-
-        // The region contained by this FilePointer.
-        final String contig = regions.get(0).getContig();
-        final int start = regions.get(0).getStart();
-        final int stop = regions.get(regions.size()-1).getStop();
-
-        return genomeLocParser.createGenomeLoc(contig,start,stop);
-    }
-
-    /**
-     * Required main method implementation.
-     * @param argv Command-line argument text.
-     * @throws Exception on error.
-     */
-    public static void main(String[] argv) throws Exception {
-        int returnCode = 0;
-        try {
-            FindLargeShards instance = new FindLargeShards();
-            start(instance, argv);
-            returnCode = 0;
-        }
-        catch(Exception ex) {
-            returnCode = 1;
-            ex.printStackTrace();
-            throw ex;
-        }
-        finally {
-            System.exit(returnCode);
-        }
-    }    
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/PrintBAMRegion.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/PrintBAMRegion.java
deleted file mode 100644
index b0842e1..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/PrintBAMRegion.java
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads.utilities;
-
-import htsjdk.samtools.*;
-import org.broadinstitute.gatk.utils.commandline.Argument;
-import org.broadinstitute.gatk.utils.commandline.CommandLineProgram;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import java.io.File;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-/**
- * Created by IntelliJ IDEA.
- * User: mhanna
- * Date: Feb 25, 2011
- * Time: 3:25:13 PM
- * To change this template use File | Settings | File Templates.
- */
-public class PrintBAMRegion extends CommandLineProgram {
-    @Argument(fullName="input",shortName="I",doc="Input file to process",required=true)
-    private File input = null;
-
-    @Argument(fullName="region",shortName="R",doc="BAM region to process, in chunk format (mmmm:nn-xxxx:yy)",required=true)
-    private String region;
-
-    private static final long MIN_BLOCK_SIZE = 0;
-    private static final long MAX_BLOCK_SIZE = (long)Math.pow(2,48)-1;
-    private static final int MIN_OFFSET_SIZE = 0;
-    private static final int MAX_OFFSET_SIZE = (int)Math.pow(2,16)-1;
-
-    public int execute() {
-        SAMFileReader reader = new SAMFileReader(input);
-        reader.setValidationStringency(ValidationStringency.SILENT);
-
-        Pattern regionPattern = Pattern.compile("(\\d+):(\\d+)-(\\d+):(\\d+)");
-        Matcher matcher = regionPattern.matcher(region);
-        if(!matcher.matches())
-            throw new UserException("BAM region to process must be in chunk format (mmmm:nn-xxxx:yy)");
-
-        long firstBlock = Long.parseLong(matcher.group(1));
-        int firstOffset = Integer.parseInt(matcher.group(2));
-        long lastBlock = Long.parseLong(matcher.group(3));
-        int lastOffset = Integer.parseInt(matcher.group(4));
-
-        if(firstBlock < MIN_BLOCK_SIZE || firstBlock > MAX_BLOCK_SIZE)
-            throw new UserException(String.format("First block is invalid; must be between %d and %d; actually is %d",MIN_BLOCK_SIZE,MAX_BLOCK_SIZE,firstBlock));
-        if(lastBlock < MIN_BLOCK_SIZE || lastBlock > MAX_BLOCK_SIZE)
-            throw new UserException(String.format("Last block is invalid; must be between %d and %d; actually is %d",MIN_BLOCK_SIZE,MAX_BLOCK_SIZE,lastBlock));
-        if(firstOffset < MIN_OFFSET_SIZE || firstOffset > MAX_OFFSET_SIZE)
-            throw new UserException(String.format("First offset is invalid; must be between %d and %d; actually is %d",MIN_OFFSET_SIZE,MAX_OFFSET_SIZE,firstOffset));
-        if(lastOffset < MIN_OFFSET_SIZE || lastOffset > MAX_OFFSET_SIZE)
-            throw new UserException(String.format("Last offset is invalid; must be between %d and %d; actually is %d",MIN_OFFSET_SIZE,MAX_OFFSET_SIZE,lastOffset));
-
-        GATKChunk chunk = new GATKChunk(firstBlock<<16 | firstOffset,lastBlock<<16 | lastOffset);
-        GATKBAMFileSpan fileSpan = new GATKBAMFileSpan(chunk);
-
-        SAMRecordIterator iterator = reader.iterator(fileSpan);
-        long readCount = 0;
-        while(iterator.hasNext()) {
-            System.out.printf("%s%n",iterator.next().format());
-            readCount++;
-        }
-        System.out.printf("%d reads shown.",readCount);
-
-        iterator.close();
-        reader.close();
-
-        return 0;
-    }
-
-
-    /**
-     * Required main method implementation.
-     * @param argv Command-line argument text.
-     * @throws Exception on error.
-     */
-    public static void main(String[] argv) throws Exception {
-        try {
-            PrintBAMRegion instance = new PrintBAMRegion();
-            start(instance, argv);
-            System.exit(0);
-        }
-        catch(Exception ex) {
-            ex.printStackTrace();
-            System.exit(1);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/PrintBGZFBounds.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/PrintBGZFBounds.java
deleted file mode 100644
index 807e038..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/PrintBGZFBounds.java
+++ /dev/null
@@ -1,137 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads.utilities;
-
-import org.broadinstitute.gatk.utils.commandline.Argument;
-import org.broadinstitute.gatk.utils.commandline.CommandLineProgram;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
-
-/**
- * Calculates the bounds of each BGZF block in a BAM index file, along with
- */
-public class PrintBGZFBounds extends CommandLineProgram {
-    @Argument(fullName="input",shortName="I",doc="Input bai file to process",required=true)
-    private File input = null;
-
-    private final int BYTE_SIZE_IN_BYTES = Byte.SIZE / 8;
-    private final int INT_SIZE_IN_BYTES = Integer.SIZE / 8;
-    private final int SHORT_SIZE_IN_BYTES = INT_SIZE_IN_BYTES / 2;
-
-    /**
-     * ID1 + ID2 + CM + FLG + MTIME + XFL + OS + XLEN.
-     */
-    private final int HEADER_SIZE = BYTE_SIZE_IN_BYTES*4+INT_SIZE_IN_BYTES+BYTE_SIZE_IN_BYTES*2+SHORT_SIZE_IN_BYTES + BYTE_SIZE_IN_BYTES*2 + SHORT_SIZE_IN_BYTES*2;;
-
-    /**
-     * CRC32 + ISIZE
-     */
-    private final int FOOTER_SIZE = INT_SIZE_IN_BYTES*2;
-
-    @Override
-    public int execute() throws IOException {
-        FileInputStream fis = new FileInputStream(input);
-        ByteBuffer headerBuffer = allocateBuffer(HEADER_SIZE);
-        ByteBuffer footerBuffer = allocateBuffer(FOOTER_SIZE);
-
-        float compressedSize = 0;
-        float uncompressedSize = 0;
-        long totalBlocks = 0;
-
-        //SAMFileReader reader = new SAMFileReader(input);
-
-        while(true) {
-            final long blockStart = fis.getChannel().position();
-
-            int totalRead = fis.getChannel().read(headerBuffer);
-            if(totalRead <= 0)
-                break;
-            headerBuffer.flip();
-
-            // Read out header information, including subfield IDs.
-            headerBuffer.position(headerBuffer.capacity()-BYTE_SIZE_IN_BYTES*2);
-            final int cDataSize = headerBuffer.getShort()-HEADER_SIZE-FOOTER_SIZE+1;
-            compressedSize += cDataSize;
-
-            // Skip past body.
-            fis.getChannel().position(fis.getChannel().position()+cDataSize);
-
-            // Read the footer
-            fis.getChannel().read(footerBuffer);
-            footerBuffer.flip();
-
-            // Retrieve the uncompressed size from the footer.
-            footerBuffer.position(footerBuffer.capacity()-INT_SIZE_IN_BYTES);
-            uncompressedSize += footerBuffer.getInt();
-
-            // Reset buffers for subsequent reads.
-            headerBuffer.flip();
-            footerBuffer.flip();
-
-            totalBlocks++;
-
-            final long blockStop = fis.getChannel().position() - 1;
-
-            System.out.printf("BGZF block %d: [%d-%d]%n",totalBlocks,blockStart,blockStop);
-        }
-
-        System.out.printf("SUCCESS!  Average compressed block size = %f, average uncompressed size = %f, compressed/uncompressed ratio: %f%n",compressedSize/totalBlocks,uncompressedSize/totalBlocks,compressedSize/uncompressedSize);
-
-        return 0;
-    }
-
-    private ByteBuffer allocateBuffer(final int size) {
-        ByteBuffer buffer = ByteBuffer.allocate(size);
-        buffer.order(ByteOrder.LITTLE_ENDIAN);
-        return buffer;
-    }
-
-    /**
-     * Required main method implementation.
-     * @param argv Command-line argument text.
-     * @throws Exception on error.
-     */
-    public static void main(String[] argv) throws Exception {
-        int returnCode = 0;
-        try {
-            PrintBGZFBounds instance = new PrintBGZFBounds();
-            start(instance, argv);
-            returnCode = 0;
-        }
-        catch(Exception ex) {
-            returnCode = 1;
-            ex.printStackTrace();
-            throw ex;
-        }
-        finally {
-            System.exit(returnCode);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/UnzipSingleBlock.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/UnzipSingleBlock.java
deleted file mode 100644
index d65b779..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/UnzipSingleBlock.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads.utilities;
-
-import htsjdk.samtools.util.BlockGunzipper;
-import org.broadinstitute.gatk.utils.commandline.CommandLineProgram;
-import org.broadinstitute.gatk.utils.commandline.Input;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.lang.reflect.InvocationTargetException;
-import java.lang.reflect.Method;
-
-/**
- * Test decompression of a single BGZF block.
- */
-public class UnzipSingleBlock extends CommandLineProgram {
-    @Input(fullName = "block_file", shortName = "b", doc = "block file over which to test unzipping", required = true)
-    private File blockFile;
-
-    @Input(fullName = "compressed_block_size", shortName = "cbs", doc = "size of compressed block", required = true)
-    private int compressedBufferSize;
-
-    public int execute() throws IOException, NoSuchMethodException, IllegalAccessException, InvocationTargetException {
-        byte[] compressedBuffer = new byte[(int)blockFile.length()];
-        byte[] uncompressedBuffer = new byte[65536];
-
-        FileInputStream fis = new FileInputStream(blockFile);
-        fis.read(compressedBuffer);
-        fis.close();
-
-        BlockGunzipper gunzipper = new BlockGunzipper();
-        gunzipper.setCheckCrcs(true);
-        Method unzipBlock = BlockGunzipper.class.getDeclaredMethod("unzipBlock",byte[].class,byte[].class,Integer.TYPE);
-        unzipBlock.setAccessible(true);
-
-        unzipBlock.invoke(gunzipper,uncompressedBuffer,compressedBuffer,compressedBufferSize);
-
-        System.out.printf("SUCCESS!%n");
-
-        return 0;
-    }
-
-    /**
-     * Required main method implementation.
-     * @param argv Command-line argument text.
-     * @throws Exception on error.
-     */
-    public static void main(String[] argv) throws Exception {
-        int returnCode = 0;
-        try {
-            UnzipSingleBlock instance = new UnzipSingleBlock();
-            start(instance, argv);
-            returnCode = 0;
-        }
-        catch(Exception ex) {
-            returnCode = 1;
-            ex.printStackTrace();
-            throw ex;
-        }
-        finally {
-            System.exit(returnCode);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/package-info.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/package-info.java
deleted file mode 100644
index 65a909a..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reads/utilities/package-info.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads.utilities;
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reference/ReferenceDataSource.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reference/ReferenceDataSource.java
deleted file mode 100644
index 6fdbea3..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reference/ReferenceDataSource.java
+++ /dev/null
@@ -1,199 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reference;
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import htsjdk.samtools.SAMSequenceRecord;
-import org.broadinstitute.gatk.engine.datasources.reads.LocusShard;
-import org.broadinstitute.gatk.engine.datasources.reads.SAMDataSource;
-import org.broadinstitute.gatk.engine.datasources.reads.Shard;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-
-/**
- * Loads reference data from fasta file
- * Looks for fai and dict files, and tries to create them if they don't exist
- */
-public class ReferenceDataSource {
-    private IndexedFastaSequenceFile reference;
-
-    /** our log, which we want to capture anything from this class */
-    protected static final org.apache.log4j.Logger logger = org.apache.log4j.Logger.getLogger(ReferenceDataSource.class);
-
-    /**
-     * Create reference data source from fasta file
-     * @param fastaFile Fasta file to be used as reference
-     */
-    public ReferenceDataSource(File fastaFile) {
-        // does the fasta file exist? check that first...
-        if (!fastaFile.exists())
-            throw new UserException("The fasta file you specified (" + fastaFile.getAbsolutePath() + ") does not exist.");
-
-        final boolean isGzipped = fastaFile.getAbsolutePath().endsWith(".gz");
-        if ( isGzipped ) {
-            throw new UserException.CannotHandleGzippedRef();
-        }
-
-        final File indexFile = new File(fastaFile.getAbsolutePath() + ".fai");
-
-        // determine the name for the dict file
-        final String fastaExt = fastaFile.getAbsolutePath().endsWith("fa") ? "\\.fa$" : "\\.fasta$";
-        final File dictFile = new File(fastaFile.getAbsolutePath().replaceAll(fastaExt, ".dict"));
-
-        // It's an error if either the fai or dict file does not exist. The user is now responsible
-        // for creating these files.
-        if (!indexFile.exists()) {
-            throw new UserException.MissingReferenceFaiFile(indexFile, fastaFile);
-        }
-        if (!dictFile.exists()) {
-            throw new UserException.MissingReferenceDictFile(dictFile, fastaFile);
-        }
-
-        // Read reference data by creating an IndexedFastaSequenceFile.
-        try {
-            reference = new CachingIndexedFastaSequenceFile(fastaFile);
-        }
-        catch (IllegalArgumentException e) {
-            throw new UserException.CouldNotReadInputFile(fastaFile, "Could not read reference sequence.  The FASTA must have either a .fasta or .fa extension", e);
-        }
-        catch (Exception e) {
-            throw new UserException.CouldNotReadInputFile(fastaFile, e);
-        }
-    }
-
-    /**
-     * Get indexed fasta file
-     * @return IndexedFastaSequenceFile that was created from file
-     */
-    public IndexedFastaSequenceFile getReference() {
-        return this.reference;
-    }
-
-    /**
-     * Creates an iterator for processing the entire reference.
-     * @param readsDataSource the reads datasource to embed in the locus shard.
-     * @param parser used to generate/regenerate intervals.  TODO: decouple the creation of the shards themselves from the creation of the driving iterator so that datasources need not be passed to datasources.
-     * @param maxShardSize The maximum shard size which can be used to create this list.
-     * @return Creates a schedule for performing a traversal over the entire reference.
-     */
-    public Iterable<Shard> createShardsOverEntireReference(final SAMDataSource readsDataSource, final GenomeLocParser parser, final int maxShardSize) {
-        List<Shard> shards = new ArrayList<Shard>();
-        for(SAMSequenceRecord refSequenceRecord: reference.getSequenceDictionary().getSequences()) {
-            for(int shardStart = 1; shardStart <= refSequenceRecord.getSequenceLength(); shardStart += maxShardSize) {
-                final int shardStop = Math.min(shardStart+maxShardSize-1, refSequenceRecord.getSequenceLength());
-                shards.add(new LocusShard(parser,
-                        readsDataSource,
-                        Collections.singletonList(parser.createGenomeLoc(refSequenceRecord.getSequenceName(),shardStart,shardStop)),
-                        null));
-            }
-        }
-        return shards;
-    }
-
-
-    public Iterable<Shard> createShardsOverIntervals(final SAMDataSource readsDataSource, final GenomeLocSortedSet intervals, final int maxShardSize) {
-        List<Shard> shards = new ArrayList<Shard>();
-
-        for(GenomeLoc interval: intervals) {
-            while(interval.size() > maxShardSize) {
-                shards.add(new LocusShard(intervals.getGenomeLocParser(),
-                        readsDataSource,
-                        Collections.singletonList(intervals.getGenomeLocParser().createGenomeLoc(interval.getContig(),interval.getStart(),interval.getStart()+maxShardSize-1)),
-                        null));
-                interval = intervals.getGenomeLocParser().createGenomeLoc(interval.getContig(),interval.getStart()+maxShardSize,interval.getStop());
-            }
-            shards.add(new LocusShard(intervals.getGenomeLocParser(),
-                    readsDataSource,
-                    Collections.singletonList(interval),
-                    null));
-        }
-
-        return shards;
-    }
-
-
-    /**
-     * Creates an iterator for processing the entire reference.
-     * @param readsDataSource  the reads datasource to embed in the locus shard.  TODO: decouple the creation of the shards themselves from the creation of the driving iterator so that datasources need not be passed to datasources.
-     * @param intervals        the list of intervals to use when processing the reference.
-     * @param targetShardSize  the suggested - and maximum - shard size which can be used to create this list; we will merge intervals greedily so that we generate shards up to but not greater than the target size.
-     * @return Creates a schedule for performing a traversal over the entire reference.
-     */
-/*
-    public Iterable<Shard> createShardsOverIntervals(final SAMDataSource readsDataSource, final GenomeLocSortedSet intervals, final int targetShardSize) {
-        final List<Shard> shards = new ArrayList<Shard>();
-        final GenomeLocParser parser = intervals.getGenomeLocParser();
-        LinkedList<GenomeLoc> currentIntervals = new LinkedList<GenomeLoc>();
-
-        for(GenomeLoc interval: intervals) {
-            // if the next interval is too big, we can safely shard currentInterval and then break down this one
-            if (interval.size() > targetShardSize) {
-                if (!currentIntervals.isEmpty())
-                    shards.add(createShardFromInterval(currentIntervals, readsDataSource, parser));
-                while(interval.size() > targetShardSize) {
-                    final GenomeLoc partialInterval = parser.createGenomeLoc(interval.getContig(), interval.getStart(), interval.getStart()+targetShardSize-1);
-                    shards.add(createShardFromInterval(Collections.singletonList(partialInterval), readsDataSource, parser));
-                    interval = parser.createGenomeLoc(interval.getContig(), interval.getStart() + targetShardSize, interval.getStop());
-                }
-                currentIntervals = new LinkedList<GenomeLoc>();
-                currentIntervals.add(interval);
-            }
-            // otherwise, we need to check whether we can merge this interval with currentInterval (and either shard currentInterval or merge accordingly)
-            else {
-                if (currentIntervals.isEmpty()) {
-                    currentIntervals.add(interval);
-                }
-                else {
-                    if (currentIntervals.getLast().compareContigs(interval) != 0 || interval.getStop() - currentIntervals.getLast().getStart() + 1 > targetShardSize) {
-                        shards.add(createShardFromInterval(currentIntervals, readsDataSource, parser));
-                        currentIntervals = new LinkedList<GenomeLoc>();
-                    }
-                    currentIntervals.add(interval);
-                }
-            }
-        }
-        if (!currentIntervals.isEmpty())
-            shards.add(createShardFromInterval(currentIntervals, readsDataSource, parser));
-        return shards;
-    }
-
-    private static Shard createShardFromInterval(final List<GenomeLoc> intervals, final SAMDataSource readsDataSource, final GenomeLocParser parser) {
-        //logger.debug("Adding shard " + interval);
-        return new LocusShard(parser,
-                readsDataSource,
-                intervals,
-                null);
-    }
-*/
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reference/package-info.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reference/package-info.java
deleted file mode 100644
index 581d213..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/reference/package-info.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reference;
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/DataStreamSegment.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/DataStreamSegment.java
deleted file mode 100644
index 2543c42..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/DataStreamSegment.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.rmd;
-
-/**
- * Marker interface that represents an arbitrary consecutive segment within a data stream.
- */
-interface DataStreamSegment {
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/EntireStream.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/EntireStream.java
deleted file mode 100644
index eba5b53..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/EntireStream.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.rmd;
-
-/**
- * Models the entire stream of data.
- */
-class EntireStream implements DataStreamSegment {
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/MappedStreamSegment.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/MappedStreamSegment.java
deleted file mode 100644
index 0344ff0..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/MappedStreamSegment.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.rmd;
-
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.HasGenomeLocation;
-
-/**
- * Models a mapped position within a stream of GATK input data.
- */
-class MappedStreamSegment implements DataStreamSegment, HasGenomeLocation {
-    public final GenomeLoc locus;
-
-    /**
-     * Retrieves the first location covered by a mapped stream segment.
-     * @return Location of the first base in this segment.
-     */
-    public GenomeLoc getLocation() {
-        return locus;
-    }
-
-    public MappedStreamSegment(GenomeLoc locus) {
-        this.locus = locus;
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/ReferenceOrderedDataPool.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/ReferenceOrderedDataPool.java
deleted file mode 100644
index 762eb0b..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/ReferenceOrderedDataPool.java
+++ /dev/null
@@ -1,153 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.rmd;
-
-import htsjdk.samtools.SAMSequenceDictionary;
-import org.broadinstitute.gatk.engine.refdata.SeekableRODIterator;
-import org.broadinstitute.gatk.engine.refdata.tracks.RMDTrack;
-import org.broadinstitute.gatk.engine.refdata.tracks.RMDTrackBuilder;
-import org.broadinstitute.gatk.engine.refdata.utils.FlashBackIterator;
-import org.broadinstitute.gatk.engine.refdata.utils.LocationAwareSeekableRODIterator;
-import org.broadinstitute.gatk.engine.refdata.utils.RMDTriplet;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.util.List;
-
-/**
- * A pool of reference-ordered data iterators.
- */
-class ReferenceOrderedDataPool extends ResourcePool<LocationAwareSeekableRODIterator, LocationAwareSeekableRODIterator> {
-    // the reference-ordered data itself.
-    private final RMDTriplet fileDescriptor;
-
-    // our tribble track builder
-    private final RMDTrackBuilder builder;
-
-    /**
-     * The header from this RMD, if present.
-     */
-    private final Object header;
-
-    /**
-     * The sequence dictionary from this ROD.  If no sequence dictionary is present, this dictionary will be the same as the reference's.
-     */
-    private final SAMSequenceDictionary sequenceDictionary;
-
-    boolean flashbackData = false;
-    public ReferenceOrderedDataPool(RMDTriplet fileDescriptor,RMDTrackBuilder builder,SAMSequenceDictionary sequenceDictionary, GenomeLocParser genomeLocParser,boolean flashbackData) {
-        super(sequenceDictionary,genomeLocParser);
-        this.fileDescriptor = fileDescriptor;
-        this.builder = builder;
-        this.flashbackData = flashbackData;
-
-        // prepopulate one RMDTrack
-        LocationAwareSeekableRODIterator iterator = createNewResource();
-        this.addNewResource(iterator);
-
-        // Pull the proper header and sequence dictionary from the prepopulated track.
-        this.header = iterator.getHeader();
-        this.sequenceDictionary = iterator.getSequenceDictionary();
-    }
-
-    /**
-     * Gets the header used by this resource pool.
-     * @return Header used by this resource pool.
-     */
-    public Object getHeader() {
-        return header;
-    }
-
-    /**
-     * Gets the sequence dictionary built into the ROD index file.
-     * @return Sequence dictionary from the index file.
-     */
-    public SAMSequenceDictionary getSequenceDictionary() {
-        return sequenceDictionary;
-    }
-
-    /**
-     * Create a new iterator from the existing reference-ordered data.  This new iterator is expected
-     * to be completely independent of any other iterator.
-     * @return The newly created resource.
-     */
-    public LocationAwareSeekableRODIterator createNewResource() {
-        if(numIterators() > 0)
-            throw new ReviewedGATKException("BUG: Tried to create multiple iterators over streaming ROD interface");
-        RMDTrack track = builder.createInstanceOfTrack(fileDescriptor);
-        LocationAwareSeekableRODIterator iter = new SeekableRODIterator(track.getHeader(),track.getSequenceDictionary(),referenceSequenceDictionary,genomeLocParser,track.getIterator());
-        return (flashbackData) ? new FlashBackIterator(iter) : iter;
-    }
-
-    /**
-     * Finds the best existing ROD iterator from the pool.  In this case, the best existing ROD is defined as
-     * the first one encountered that is at or before the given position.
-     * @param segment @{inheritedDoc}
-     * @param resources @{inheritedDoc}
-     * @return @{inheritedDoc}
-     */
-    public LocationAwareSeekableRODIterator selectBestExistingResource( DataStreamSegment segment, List<LocationAwareSeekableRODIterator> resources ) {
-        if(segment instanceof MappedStreamSegment) {
-            GenomeLoc position = ((MappedStreamSegment)segment).getLocation();
-
-            for( LocationAwareSeekableRODIterator RODIterator : resources ) {
-
-                if( (RODIterator.position() == null && RODIterator.hasNext()) ||
-                    (RODIterator.position() != null && RODIterator.position().isBefore(position)) )
-                    return RODIterator;
-                if (RODIterator.position() != null && RODIterator instanceof FlashBackIterator && ((FlashBackIterator)RODIterator).canFlashBackTo(position)) {
-                    ((FlashBackIterator)RODIterator).flashBackTo(position);
-                    return RODIterator;
-                }
-
-            }
-            return null;
-        }
-        else if(segment instanceof EntireStream) {
-            // Asking for a segment over the entire stream, so by definition, there is no best existing resource.
-            // Force the system to create a new one.
-            return null;
-        }
-        else {
-            throw new ReviewedGATKException("Unable to find a ROD iterator for segments of type " + segment.getClass());
-        }
-    }
-
-    /**
-     * In this case, the iterator is the resource.  Pass it through.
-     */
-    public LocationAwareSeekableRODIterator createIteratorFromResource( DataStreamSegment segment, LocationAwareSeekableRODIterator resource ) {
-        return resource;
-    }
-
-    /**
-     * kill the buffers in the iterator
-     */
-    public void closeResource( LocationAwareSeekableRODIterator resource ) {
-        if (resource instanceof FlashBackIterator) ((FlashBackIterator)resource).close();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/ReferenceOrderedDataSource.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/ReferenceOrderedDataSource.java
deleted file mode 100644
index 9d9e7c8..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/ReferenceOrderedDataSource.java
+++ /dev/null
@@ -1,256 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.rmd;
-
-import htsjdk.samtools.SAMSequenceDictionary;
-import org.broadinstitute.gatk.utils.commandline.Tags;
-import org.broadinstitute.gatk.engine.refdata.SeekableRODIterator;
-import org.broadinstitute.gatk.engine.refdata.tracks.RMDTrack;
-import org.broadinstitute.gatk.engine.refdata.tracks.RMDTrackBuilder;
-import org.broadinstitute.gatk.engine.refdata.utils.LocationAwareSeekableRODIterator;
-import org.broadinstitute.gatk.engine.refdata.utils.RMDTriplet;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.lang.reflect.Type;
-import java.util.List;
-
-/**
- * A data source which provides a single type of reference-ordered data.
- */
-public class ReferenceOrderedDataSource {
-    /**
-     * The reference-ordered data itself.
-     */
-    private final RMDTriplet fileDescriptor;
-
-    /**
-     * The header associated with this VCF, if any.
-     */
-    private final Object header;
-
-    /**
-     * The private sequence dictionary associated with this RMD.
-     */
-    private final SAMSequenceDictionary sequenceDictionary;
-
-    /**
-     * The builder to use when constructing new reference-ordered data readers.
-     */
-    private final RMDTrackBuilder builder;
-
-    /**
-     * A pool of iterators for navigating through the genome.
-     */
-    private final ResourcePool<?,LocationAwareSeekableRODIterator> iteratorPool;
-
-    /**
-     * Create a new reference-ordered data source.
-     */
-    public ReferenceOrderedDataSource(RMDTriplet fileDescriptor,
-                                      RMDTrackBuilder builder,
-                                      SAMSequenceDictionary referenceSequenceDictionary,
-                                      GenomeLocParser genomeLocParser,
-                                      boolean flashbackData ) {
-        this.fileDescriptor = fileDescriptor;
-        this.builder = builder;
-
-        // TODO: Unify the two blocks of code below by creating a ReferenceOrderedDataPool base class of a coherent type (not RMDTrack for one and SeekableIterator for the other).
-        if (fileDescriptor.getStorageType() != RMDTriplet.RMDStorageType.STREAM) {
-            iteratorPool = new ReferenceOrderedQueryDataPool(fileDescriptor,
-                                                             builder,
-                                                             referenceSequenceDictionary,
-                                                             genomeLocParser);
-            this.header = ((ReferenceOrderedQueryDataPool)iteratorPool).getHeader();
-            this.sequenceDictionary = ((ReferenceOrderedQueryDataPool)iteratorPool).getSequenceDictionary();
-        }
-        else {
-            iteratorPool = new ReferenceOrderedDataPool(fileDescriptor,
-                                                        builder,
-                                                        referenceSequenceDictionary,
-                                                        genomeLocParser,
-                                                        flashbackData);
-            this.header = ((ReferenceOrderedDataPool)iteratorPool).getHeader();
-            this.sequenceDictionary = ((ReferenceOrderedDataPool)iteratorPool).getSequenceDictionary();
-        }
-    }
-
-    /**
-     * Return the name of the underlying reference-ordered data.
-     * @return Name of the underlying rod.
-     */
-    public String getName() {
-        return fileDescriptor.getName();
-    }
-
-    public Class getType() {
-        return builder.getFeatureManager().getByTriplet(fileDescriptor).getCodecClass();
-    }
-
-    public Class getRecordType() {
-        return builder.getFeatureManager().getByTriplet(fileDescriptor).getFeatureClass();
-    }
-
-    public File getFile() {
-        return new File(fileDescriptor.getFile());
-    }
-
-    public Object getHeader() {
-        return header;
-    }
-
-    public Tags getTags() {
-        return fileDescriptor.getTags();
-    }
-    
-    public String getTagValue( final String key ) {
-        return fileDescriptor.getTags().getValue( key );
-    }
-
-
-    /**
-     * Retrieves the sequence dictionary created by this ROD.
-     * @return
-     */
-    public SAMSequenceDictionary getSequenceDictionary() {
-        return sequenceDictionary;
-    }
-
-    /**
-     * helper function for determining if we are the same track based on name and record type
-     *
-     * @param name the name to match
-     * @param type the type to match
-     *
-     * @return true on a match, false if the name or type is different
-     */
-    public boolean matchesNameAndRecordType(String name, Type type) {
-        return (name.equals(fileDescriptor.getName()) && (type.getClass().isAssignableFrom(getType().getClass())));
-    }
-
-    /**
-     * Seek to the specified position and return an iterator through the data.
-     *
-     * @param loc GenomeLoc that points to the selected position.
-     *
-     * @return Iterator through the data.
-     */
-    public LocationAwareSeekableRODIterator seek(GenomeLoc loc) {
-        DataStreamSegment dataStreamSegment = loc != null ? new MappedStreamSegment(loc) : new EntireStream();
-        return iteratorPool.iterator(dataStreamSegment);
-    }
-
-
-    /**
-     * Close the specified iterator, returning it to the pool.
-     * @param iterator Iterator to close.
-     */
-    public void close( LocationAwareSeekableRODIterator iterator ) {
-        iteratorPool.release(iterator);
-    }
-
-}
-
-/**
- * a data pool for the new query based RODs
- */
-class ReferenceOrderedQueryDataPool extends ResourcePool<RMDTrack,LocationAwareSeekableRODIterator> {
-    // the reference-ordered data itself.
-    private final RMDTriplet fileDescriptor;
-
-    // our tribble track builder
-    private final RMDTrackBuilder builder;
-
-    /**
-     * The header from this RMD, if present.
-     */
-    private final Object header;
-
-    /**
-     * The sequence dictionary from this ROD.  If no sequence dictionary is present, this dictionary will be the same as the reference's.
-     */
-    private final SAMSequenceDictionary sequenceDictionary;
-
-    public ReferenceOrderedQueryDataPool(RMDTriplet fileDescriptor, RMDTrackBuilder builder, SAMSequenceDictionary referenceSequenceDictionary, GenomeLocParser genomeLocParser) {
-        super(referenceSequenceDictionary,genomeLocParser);
-        this.fileDescriptor = fileDescriptor;
-        this.builder = builder;
-
-        // prepopulate one RMDTrack
-        RMDTrack track = builder.createInstanceOfTrack(fileDescriptor);
-        this.addNewResource(track);
-
-        // Pull the proper header and sequence dictionary from the prepopulated track.
-        this.header = track.getHeader();
-        this.sequenceDictionary = track.getSequenceDictionary();
-    }
-
-    public Object getHeader() {
-        return header;
-    }
-
-    public SAMSequenceDictionary getSequenceDictionary() {
-        return sequenceDictionary;
-    }
-
-    @Override
-    protected RMDTrack createNewResource() {
-        return builder.createInstanceOfTrack(fileDescriptor);
-    }
-
-    @Override
-    protected RMDTrack selectBestExistingResource(DataStreamSegment segment, List<RMDTrack> availableResources) {
-        for (RMDTrack reader : availableResources)
-            if (reader != null) return reader;
-        return null;
-    }
-
-    @Override
-    protected LocationAwareSeekableRODIterator createIteratorFromResource(DataStreamSegment position, RMDTrack track) {
-        try {
-            if (position instanceof MappedStreamSegment) {
-                GenomeLoc pos = ((MappedStreamSegment) position).locus;
-                return new SeekableRODIterator(header,sequenceDictionary,referenceSequenceDictionary,genomeLocParser,track.query(pos));
-            } else {
-                return new SeekableRODIterator(header,sequenceDictionary,referenceSequenceDictionary,genomeLocParser,track.getIterator());
-            }
-        } catch (FileNotFoundException e) {
-            throw new UserException.CouldNotReadInputFile(fileDescriptor.getName(), "it could not be found");
-        } catch (IOException e) {
-            throw new ReviewedGATKException("Unable to create iterator for rod named " + fileDescriptor.getName(),e);
-        }
-    }
-
-    @Override
-    protected void closeResource(RMDTrack track) {
-        track.close();
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/ResourcePool.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/ResourcePool.java
deleted file mode 100644
index 7d6e9c0..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/ResourcePool.java
+++ /dev/null
@@ -1,188 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.rmd;
-
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.util.CloseableIterator;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.util.*;
-
-/**
- * A pool of open resources, all of which can create a closeable iterator.
- */
-abstract class ResourcePool <T,I extends CloseableIterator> {
-    /**
-     * Sequence dictionary.
-     */
-    protected final SAMSequenceDictionary referenceSequenceDictionary;
-
-    /**
-     * Builder/parser for GenomeLocs.
-     */
-    protected final GenomeLocParser genomeLocParser;
-
-    /**
-     * All iterators of this reference-ordered data.
-     */
-    private List<T> allResources = new ArrayList<T>();
-
-    /**
-     * All iterators that are not currently in service.
-     */
-    private List<T> availableResources = new ArrayList<T>();
-
-    /**
-     * Which iterators are assigned to which pools.
-     */
-    private Map<I,T> resourceAssignments = new HashMap<I,T>();
-
-    protected ResourcePool(SAMSequenceDictionary referenceSequenceDictionary,GenomeLocParser genomeLocParser) {
-        this.referenceSequenceDictionary = referenceSequenceDictionary;
-        this.genomeLocParser = genomeLocParser;
-    }
-
-    /**
-     * Get an iterator whose position is before the specified location.  Create a new one if none exists.
-     * @param segment Target position for the iterator.
-     * @return An iterator that can traverse the selected region.  Should be able to iterate concurrently with other
-     *         iterators from tihs pool.
-     */
-    public I iterator( DataStreamSegment segment ) {
-        // Grab the first iterator in the list whose position is before the requested position.
-        T selectedResource = null;
-        synchronized (this) {
-            selectedResource = selectBestExistingResource(segment, availableResources);
-
-            // No iterator found?  Create another.  It is expected that
-            // each iterator created will have its own file handle.
-            if (selectedResource == null) {
-                selectedResource = createNewResource();
-                addNewResource(selectedResource);
-            }
-
-            // Remove the iterator from the list of available iterators.
-            availableResources.remove(selectedResource);
-        }
-
-
-        I iterator = createIteratorFromResource(segment, selectedResource);
-
-        // also protect the resource assignment
-        synchronized (this) {
-            // Make a note of this assignment for proper releasing later.
-            resourceAssignments.put(iterator, selectedResource);
-        }
-
-        return iterator;
-    }
-
-    /**
-     * Release the lock on the given iterator, returning it to the pool.
-     * @param iterator Iterator to return to the pool.
-     */
-    public void release( I iterator ) {
-        synchronized(this) {
-            // Find and remove the resource from the list of allocated resources.
-            T resource = resourceAssignments.get( iterator );
-            Object obj = resourceAssignments.remove(iterator);
-
-            // Close the iterator.
-            iterator.close();
-
-            // make sure we actually removed the assignment
-            if (obj == null)
-                    throw new ReviewedGATKException("Failed to remove resource assignment; target key had no associated value in the resource assignment map");
-            // Return the resource to the pool.
-            if( !allResources.contains(resource) )
-                throw new ReviewedGATKException("Iterator does not belong to the given pool.");
-            availableResources.add(resource);
-        }
-    }
-
-    /**
-     * Add a resource to the list of available resources.  Useful if derived classes
-     * want to seed the pool with a set of at a given time (like at initialization).
-     * @param resource The new resource to add.
-     */
-    protected void addNewResource( T resource ) {
-        synchronized(this) {
-            allResources.add(resource);
-            availableResources.add(resource);
-        }
-    }
-
-    /**
-     * If no appropriate resources are found in the pool, the system can create a new resource.
-     * Delegate the creation of the resource to the subclass.
-     * @return The new resource created.
-     */
-    protected abstract T createNewResource();
-
-    /**
-     * Find the most appropriate resource to acquire the specified data.
-     * @param segment The data over which the resource is required.
-     * @param availableResources A list of candidate resources to evaluate.
-     * @return The best choice of the availableResources, or null if no resource meets the criteria.
-     */
-    protected abstract T selectBestExistingResource( DataStreamSegment segment, List<T> availableResources );
-
-    /**
-     * Create an iterator over the specified resource.
-     * @param position The bounds of iteration.  The first element of the iterator through the last element should all
-     *                 be in the range described by position.
-     * @param resource The resource from which to derive the iterator.
-     * @return A new iterator over the given data.
-     */
-    protected abstract I createIteratorFromResource( DataStreamSegment position, T resource );
-
-    /**
-     * Retire this resource from service.
-     * @param resource The resource to retire.
-     */
-    protected abstract void closeResource(T resource);
-
-    /**
-     * Operating stats...get the number of total iterators.  Package-protected
-     * for unit testing.
-     * @return An integer number of total iterators.
-     */
-    int numIterators() {
-        return allResources.size();
-    }
-
-    /**
-     * Operating stats...get the number of available iterators.  Package-protected
-     * for unit testing.
-     * @return An integer number of available iterators.
-     */
-    int numAvailableIterators() {
-        return availableResources.size();
-    }
-
-}
-
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/package-info.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/package-info.java
deleted file mode 100644
index 41b7e53..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/datasources/rmd/package-info.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.rmd;
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/AlleleBiasedDownsamplingUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/AlleleBiasedDownsamplingUtils.java
deleted file mode 100644
index 0bcf4ee..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/AlleleBiasedDownsamplingUtils.java
+++ /dev/null
@@ -1,369 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.downsampling;
-
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.utils.BaseUtils;
-import org.broadinstitute.gatk.utils.MathUtils;
-import org.broadinstitute.gatk.utils.collections.DefaultHashMap;
-import org.broadinstitute.gatk.utils.exceptions.GATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.pileup.PileupElement;
-import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
-import org.broadinstitute.gatk.utils.pileup.ReadBackedPileupImpl;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.broadinstitute.gatk.utils.text.XReadLines;
-import htsjdk.variant.variantcontext.Allele;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.*;
-
-public class AlleleBiasedDownsamplingUtils {
-
-    // define this class so that we can use Java generics below
-    private final static class PileupElementList extends ArrayList<PileupElement> {}
-
-    /**
-     * Computes an allele biased version of the given pileup
-     *
-     * @param pileup                    the original pileup
-     * @param downsamplingFraction      the fraction of total reads to remove per allele
-     * @return allele biased pileup
-     */
-    public static ReadBackedPileup createAlleleBiasedBasePileup(final ReadBackedPileup pileup, final double downsamplingFraction) {
-        // special case removal of all or no reads
-        if ( downsamplingFraction <= 0.0 )
-            return pileup;
-        if ( downsamplingFraction >= 1.0 )
-            return new ReadBackedPileupImpl(pileup.getLocation(), new ArrayList<PileupElement>());
-
-        final PileupElementList[] alleleStratifiedElements = new PileupElementList[4];
-        for ( int i = 0; i < 4; i++ )
-            alleleStratifiedElements[i] = new PileupElementList();
-
-        // start by stratifying the reads by the alleles they represent at this position
-        for ( final PileupElement pe : pileup ) {
-            final int baseIndex = BaseUtils.simpleBaseToBaseIndex(pe.getBase());
-            if ( baseIndex != -1 )
-                alleleStratifiedElements[baseIndex].add(pe);
-        }
-
-        // make a listing of allele counts and calculate the total count
-        final int[] alleleCounts = calculateAlleleCounts(alleleStratifiedElements);
-        final int totalAlleleCount = (int)MathUtils.sum(alleleCounts);
-
-        // do smart down-sampling
-        final int numReadsToRemove = (int)(totalAlleleCount * downsamplingFraction); // floor
-        final int[] targetAlleleCounts = runSmartDownsampling(alleleCounts, numReadsToRemove);
-
-        final HashSet<PileupElement> readsToRemove = new HashSet<PileupElement>(numReadsToRemove);
-        for ( int i = 0; i < 4; i++ ) {
-            final PileupElementList alleleList = alleleStratifiedElements[i];
-            // if we don't need to remove any reads, then don't
-            if ( alleleCounts[i] > targetAlleleCounts[i] )
-                readsToRemove.addAll(downsampleElements(alleleList, alleleCounts[i], alleleCounts[i] - targetAlleleCounts[i]));
-        }
-
-        // we need to keep the reads sorted because the FragmentUtils code will expect them in coordinate order and will fail otherwise
-        final List<PileupElement> readsToKeep = new ArrayList<PileupElement>(totalAlleleCount - numReadsToRemove);
-        for ( final PileupElement pe : pileup ) {
-            if ( !readsToRemove.contains(pe) ) {
-                readsToKeep.add(pe);
-            }
-        }
-
-        return new ReadBackedPileupImpl(pileup.getLocation(), new ArrayList<PileupElement>(readsToKeep));
-    }
-
-    /**
-     * Calculates actual allele counts for each allele (which can be different than the list size when reduced reads are present)
-     *
-     * @param alleleStratifiedElements       pileup elements stratified by allele
-     * @return non-null int array representing allele counts
-     */
-    private static int[] calculateAlleleCounts(final PileupElementList[] alleleStratifiedElements) {
-        final int[] alleleCounts = new int[alleleStratifiedElements.length];
-        for ( int i = 0; i < alleleStratifiedElements.length; i++ ) {
-            alleleCounts[i] = alleleStratifiedElements[i].size();
-        }
-        return alleleCounts;
-    }
-
-    private static int scoreAlleleCounts(final int[] alleleCounts) {
-        if ( alleleCounts.length < 2 )
-            return 0;
-
-        // sort the counts (in ascending order)
-        final int[] alleleCountsCopy = alleleCounts.clone();
-        Arrays.sort(alleleCountsCopy);
-
-        final int maxCount = alleleCountsCopy[alleleCounts.length - 1];
-        final int nextBestCount = alleleCountsCopy[alleleCounts.length - 2];
-
-        int remainderCount = 0;
-        for ( int i = 0; i < alleleCounts.length - 2; i++ )
-            remainderCount += alleleCountsCopy[i];
-
-        // try to get the best score:
-        //    - in the het case the counts should be equal with nothing else
-        //    - in the hom case the non-max should be zero
-        return Math.min(maxCount - nextBestCount + remainderCount, Math.abs(nextBestCount + remainderCount));
-    }
-
-    /**
-     * Computes an allele biased version of the allele counts for a given pileup
-     *
-     * @param alleleCounts              the allele counts for the original pileup
-     * @param numReadsToRemove          number of total reads to remove per allele
-     * @return non-null array of new counts needed per allele
-     */
-    protected static int[] runSmartDownsampling(final int[] alleleCounts, final int numReadsToRemove) {
-        final int numAlleles = alleleCounts.length;
-
-        int maxScore = scoreAlleleCounts(alleleCounts);
-        int[] alleleCountsOfMax = alleleCounts;
-
-        final int numReadsToRemovePerAllele = numReadsToRemove / 2;
-
-        for ( int i = 0; i < numAlleles; i++ ) {
-            for ( int j = i; j < numAlleles; j++ ) {
-                final int[] newCounts = alleleCounts.clone();
-
-                // split these cases so we don't lose on the floor (since we divided by 2)
-                if ( i == j ) {
-                    newCounts[i] = Math.max(0, newCounts[i] - numReadsToRemove);
-                } else {
-                    newCounts[i] = Math.max(0, newCounts[i] - numReadsToRemovePerAllele);
-                    newCounts[j] = Math.max(0, newCounts[j] - numReadsToRemovePerAllele);
-                }
-
-                final int score = scoreAlleleCounts(newCounts);
-
-                if ( score < maxScore ) {
-                    maxScore = score;
-                    alleleCountsOfMax = newCounts;
-                }
-            }
-        }
-
-        return alleleCountsOfMax;
-    }
-
-    /**
-     * Performs allele biased down-sampling on a pileup and computes the list of elements to remove
-     *
-     * @param elements                  original list of pileup elements
-     * @param originalElementCount      original count of elements (taking reduced reads into account)
-     * @param numElementsToRemove       the number of records to remove
-     * @return the list of pileup elements TO REMOVE
-     */
-    protected static List<PileupElement> downsampleElements(final List<PileupElement> elements, final int originalElementCount, final int numElementsToRemove) {
-        // are there no elements to remove?
-        if ( numElementsToRemove == 0 )
-            return Collections.<PileupElement>emptyList();
-
-        final ArrayList<PileupElement> elementsToRemove = new ArrayList<PileupElement>(numElementsToRemove);
-
-        // should we remove all of the elements?
-        if ( numElementsToRemove >= originalElementCount ) {
-            elementsToRemove.addAll(elements);
-            return elementsToRemove;
-        }
-
-        // create a bitset describing which elements to remove
-        final BitSet itemsToRemove = new BitSet(originalElementCount);
-        for ( final Integer selectedIndex : MathUtils.sampleIndicesWithoutReplacement(originalElementCount, numElementsToRemove) ) {
-            itemsToRemove.set(selectedIndex);
-        }
-
-        int currentBitSetIndex = 0;
-        for ( final PileupElement element : elements ) {
-            if ( itemsToRemove.get(currentBitSetIndex++) ) {
-                elementsToRemove.add(element);
-            }
-        }
-
-        return elementsToRemove;
-    }
-
-    /**
-     * Computes reads to remove based on an allele biased down-sampling
-     *
-     * @param alleleReadMap             original list of records per allele
-     * @param downsamplingFraction      the fraction of total reads to remove per allele
-     * @return list of reads TO REMOVE from allele biased down-sampling
-     */
-    public static <A extends Allele> List<GATKSAMRecord> selectAlleleBiasedReads(final Map<A, List<GATKSAMRecord>> alleleReadMap, final double downsamplingFraction) {
-        int totalReads = 0;
-        for ( final List<GATKSAMRecord> reads : alleleReadMap.values() )
-            totalReads += reads.size();
-
-        int numReadsToRemove = (int)(totalReads * downsamplingFraction);
-
-        // make a listing of allele counts
-        final List<Allele> alleles = new ArrayList<Allele>(alleleReadMap.keySet());
-        alleles.remove(Allele.NO_CALL);    // ignore the no-call bin
-        final int numAlleles = alleles.size();
-
-        final int[] alleleCounts = new int[numAlleles];
-        for ( int i = 0; i < numAlleles; i++ )
-            alleleCounts[i] = alleleReadMap.get(alleles.get(i)).size();
-
-        // do smart down-sampling
-        final int[] targetAlleleCounts = runSmartDownsampling(alleleCounts, numReadsToRemove);
-
-        final List<GATKSAMRecord> readsToRemove = new ArrayList<GATKSAMRecord>(numReadsToRemove);
-        for ( int i = 0; i < numAlleles; i++ ) {
-            if ( alleleCounts[i] > targetAlleleCounts[i] ) {
-                readsToRemove.addAll(downsampleElements(alleleReadMap.get(alleles.get(i)), alleleCounts[i] - targetAlleleCounts[i]));
-            }
-        }
-
-        return readsToRemove;
-    }
-
-    /**
-     * Performs allele biased down-sampling on a pileup and computes the list of elements to remove
-     *
-     * @param reads                     original list of records
-     * @param numElementsToRemove       the number of records to remove
-     * @return the list of pileup elements TO REMOVE
-     */
-    protected static List<GATKSAMRecord> downsampleElements(final List<GATKSAMRecord> reads, final int numElementsToRemove) {
-        // are there no elements to remove?
-        if ( numElementsToRemove == 0 )
-            return Collections.<GATKSAMRecord>emptyList();
-
-        final ArrayList<GATKSAMRecord> elementsToRemove = new ArrayList<GATKSAMRecord>(numElementsToRemove);
-        final int originalElementCount = reads.size();
-
-        // should we remove all of the elements?
-        if ( numElementsToRemove >= originalElementCount ) {
-            elementsToRemove.addAll(reads);
-            return elementsToRemove;
-        }
-
-        // create a bitset describing which elements to remove
-        final BitSet itemsToRemove = new BitSet(originalElementCount);
-        for ( final Integer selectedIndex : MathUtils.sampleIndicesWithoutReplacement(originalElementCount, numElementsToRemove) ) {
-            itemsToRemove.set(selectedIndex);
-        }
-
-        int currentBitSetIndex = 0;
-        for ( final GATKSAMRecord read : reads ) {
-            if ( itemsToRemove.get(currentBitSetIndex++) )
-                elementsToRemove.add(read);
-        }
-
-        return elementsToRemove;
-    }
-
-    /**
-     * Create sample-contamination maps from file
-     *
-     * @param ContaminationFractionFile   Filename containing two columns: SampleID and Contamination
-     * @param AvailableSampleIDs          Set of Samples of interest (no reason to include every sample in file) or null to turn off checking
-     * @param logger                      for logging output
-     * @return sample-contamination Map
-     */
-
-    public static DefaultHashMap<String, Double> loadContaminationFile(File ContaminationFractionFile, final Double defaultContaminationFraction, final Set<String> AvailableSampleIDs, Logger logger) throws GATKException {
-        DefaultHashMap<String, Double> sampleContamination = new DefaultHashMap<String, Double>(defaultContaminationFraction);
-        Set<String> nonSamplesInContaminationFile = new HashSet<String>(sampleContamination.keySet());
-        try {
-
-            XReadLines reader = new XReadLines(ContaminationFractionFile, true);
-            for (String line : reader) {
-
-                if (line.length() == 0) {
-                    continue;
-                }
-
-                StringTokenizer st = new StringTokenizer(line,"\t");
-
-                String fields[] = new String[2];
-                try {
-                    fields[0] = st.nextToken();
-                    fields[1] = st.nextToken();
-                } catch(NoSuchElementException e){
-                    throw new UserException.MalformedFile("Contamination file must have exactly two, tab-delimited columns. Offending line:\n" + line);
-                }
-                if(st.hasMoreTokens()) {
-                    throw new UserException.MalformedFile("Contamination file must have exactly two, tab-delimited columns. Offending line:\n" + line);
-                }
-
-                if (fields[0].length() == 0 || fields[1].length() == 0) {
-                    throw new UserException.MalformedFile("Contamination file can not have empty strings in either column. Offending line:\n" + line);
-                }
-
-                if (sampleContamination.containsKey(fields[0])) {
-                    throw new UserException.MalformedFile("Contamination file contains duplicate entries for input name " + fields[0]);
-                }
-
-                try {
-                    final Double contamination = Double.valueOf(fields[1]);
-                    if (contamination < 0 || contamination > 1){
-                        throw new UserException.MalformedFile("Contamination file contains unacceptable contamination value (must be 0<=x<=1): " + line);
-                    }
-                    if (AvailableSampleIDs==null || AvailableSampleIDs.contains(fields[0])) {// only add samples if they are in the sampleSet (or if it is null)
-                        sampleContamination.put(fields[0], contamination);
-                    }
-                    else {
-                        nonSamplesInContaminationFile.add(fields[0]);
-                    }
-                } catch (NumberFormatException e) {
-                    throw new UserException.MalformedFile("Contamination file contains unparsable double in the second field. Offending line: " + line);
-                }
-            }
-
-
-            //output to the user info lines telling which samples are in the Contamination File
-            if (sampleContamination.size() > 0) {
-                logger.info(String.format("The following samples were found in the Contamination file and will be processed at the contamination level therein: %s", sampleContamination.keySet().toString()));
-
-                //output to the user info lines telling which samples are NOT in the Contamination File
-                if(AvailableSampleIDs!=null){
-                    Set<String> samplesNotInContaminationFile = new HashSet<String>(AvailableSampleIDs);
-                    samplesNotInContaminationFile.removeAll(sampleContamination.keySet());
-                    if (samplesNotInContaminationFile.size() > 0)
-                        logger.info(String.format("The following samples were NOT found in the Contamination file and will be processed at the default contamination level: %s", samplesNotInContaminationFile.toString()));
-                }
-            }
-
-            //output to the user Samples that do not have lines in the Contamination File
-            if (nonSamplesInContaminationFile.size() > 0) {
-                logger.info(String.format("The following entries were found in the Contamination file but were not SAMPLEIDs. They will be ignored: %s", nonSamplesInContaminationFile.toString()));
-            }
-
-            return sampleContamination;
-
-        } catch (IOException e) {
-            throw new GATKException("I/O Error while reading sample-contamination file " + ContaminationFractionFile.getName() + ": " + e.getMessage());
-        }
-
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/DownsampleType.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/DownsampleType.java
deleted file mode 100644
index 715ef6e..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/DownsampleType.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.downsampling;
-
-/**
- * Type of downsampling method to invoke.
- *
- * @author hanna
- * @version 0.1
- */
-
-public enum DownsampleType {
-    NONE,
-    ALL_READS,
-    BY_SAMPLE
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/Downsampler.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/Downsampler.java
deleted file mode 100644
index 8ab0198..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/Downsampler.java
+++ /dev/null
@@ -1,161 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.downsampling;
-
-import java.util.Collection;
-import java.util.List;
-
-/**
- * The basic downsampler API, with no reads-specific operations.
- *
- * Downsamplers that extend this class rather than the ReadsDownsampler class can handle
- * any kind of item, however they cannot be wrapped within a DownsamplingReadsIterator or a
- * PerSampleDownsamplingReadsIterator.
- *
- * @author David Roazen
- */
-public abstract class Downsampler<T> {
-
-    /**
-     * Number of items discarded by this downsampler since the last call to resetStats()
-     */
-    protected int numDiscardedItems = 0;
-
-    /**
-     * Submit one item to the downsampler for consideration. Some downsamplers will be able to determine
-     * immediately whether the item survives the downsampling process, while others will need to see
-     * more items before making that determination.
-     *
-     * @param item the individual item to submit to the downsampler for consideration
-     */
-    public abstract void submit( final T item );
-
-    /**
-     * Submit a collection of items to the downsampler for consideration. Should be equivalent to calling
-     * submit() on each individual item in the collection.
-     *
-     * @param items the collection of items to submit to the downsampler for consideration
-     */
-    public void submit( final Collection<T> items ) {
-        if ( items == null ) {
-            throw new IllegalArgumentException("submitted items must not be null");
-        }
-
-        for ( final T item : items ) {
-            submit(item);
-        }
-    }
-
-    /**
-     * Are there items that have survived the downsampling process waiting to be retrieved?
-     *
-     * @return true if this downsampler has > 0 finalized items, otherwise false
-     */
-    public abstract boolean hasFinalizedItems();
-
-    /**
-     * Return (and *remove*) all items that have survived downsampling and are waiting to be retrieved.
-     *
-     * @return a list of all finalized items this downsampler contains, or an empty list if there are none
-     */
-    public abstract List<T> consumeFinalizedItems();
-
-    /**
-     * Are there items stored in this downsampler that it doesn't yet know whether they will
-     * ultimately survive the downsampling process?
-     *
-     * @return true if this downsampler has > 0 pending items, otherwise false
-     */
-    public abstract boolean hasPendingItems();
-
-    /**
-     * Peek at the first finalized item stored in this downsampler (or null if there are no finalized items)
-     *
-     * @return the first finalized item in this downsampler (the item is not removed from the downsampler by this call),
-     *         or null if there are none
-     */
-    public abstract T peekFinalized();
-
-    /**
-     * Peek at the first pending item stored in this downsampler (or null if there are no pending items)
-     *
-     * @return the first pending item stored in this downsampler (the item is not removed from the downsampler by this call),
-     *         or null if there are none
-     */
-    public abstract T peekPending();
-
-    /**
-     * Get the current number of items in this downsampler
-     *
-     * This should be the best estimate of the total number of elements that will come out of the downsampler
-     * were consumeFinalizedItems() to be called immediately after this call.  In other words it should
-     * be number of finalized items + estimate of number of pending items that will ultimately be included as well.
-     *
-     * @return a positive integer
-     */
-    public abstract int size();
-
-    /**
-     * Returns the number of items discarded (so far) during the downsampling process
-     *
-     * @return the number of items that have been submitted to this downsampler and discarded in the process of
-     *         downsampling
-     */
-    public int getNumberOfDiscardedItems() {
-        return numDiscardedItems;
-    }
-
-    /**
-     * Used to tell the downsampler that no more items will be submitted to it, and that it should
-     * finalize any pending items.
-     */
-    public abstract void signalEndOfInput();
-
-    /**
-     * Empty the downsampler of all finalized/pending items
-     */
-    public abstract void clearItems();
-
-    /**
-     * Reset stats in the downsampler such as the number of discarded items *without* clearing the downsampler of items
-     */
-    public void resetStats() {
-        numDiscardedItems = 0;
-    }
-
-    /**
-     * Indicates whether an item should be excluded from elimination during downsampling. By default,
-     * all items representing reduced reads are excluded from downsampling, but individual downsamplers
-     * may override if they are able to handle reduced reads correctly. Downsamplers should check
-     * the return value of this method before discarding an item.
-     *
-     * @param item The item to test
-     * @return true if the item should not be subject to elimination during downsampling, otherwise false
-     */
-    protected boolean doNotDiscardItem( final Object item ) {
-        return false;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/DownsamplingMethod.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/DownsamplingMethod.java
deleted file mode 100644
index 94a3cc7..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/DownsamplingMethod.java
+++ /dev/null
@@ -1,142 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.downsampling;
-
-import org.broadinstitute.gatk.engine.walkers.ActiveRegionWalker;
-import org.broadinstitute.gatk.engine.walkers.LocusWalker;
-import org.broadinstitute.gatk.engine.walkers.Walker;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-/**
- * Describes the method for downsampling reads at a given locus.
- */
-
-public class DownsamplingMethod {
-    /**
-     * Type of downsampling to perform.
-     */
-    public final DownsampleType type;
-
-    /**
-     * Actual downsampling target is specified as an integer number of reads.
-     */
-    public final Integer toCoverage;
-
-    /**
-     * Actual downsampling target is specified as a fraction of total available reads.
-     */
-    public final Double toFraction;
-
-    /**
-     * Expresses no downsampling applied at all.
-     */
-    public static final DownsamplingMethod NONE = new DownsamplingMethod(DownsampleType.NONE, null, null);
-
-    /**
-     * Default type to use if no type is specified
-     */
-    public static final DownsampleType DEFAULT_DOWNSAMPLING_TYPE = DownsampleType.BY_SAMPLE;
-
-    /**
-     * Don't allow dcov values below this threshold for locus-based traversals (ie., Locus
-     * and ActiveRegion walkers), as they can result in problematic downsampling artifacts
-     */
-    public static final int MINIMUM_SAFE_COVERAGE_TARGET_FOR_LOCUS_BASED_TRAVERSALS = 200;
-
-
-    public DownsamplingMethod( DownsampleType type, Integer toCoverage, Double toFraction ) {
-        this.type = type != null ? type : DEFAULT_DOWNSAMPLING_TYPE;
-
-        if ( type == DownsampleType.NONE ) {
-            this.toCoverage = null;
-            this.toFraction = null;
-        }
-        else {
-            this.toCoverage = toCoverage;
-            this.toFraction = toFraction;
-        }
-
-        validate();
-    }
-
-    private void validate() {
-        // Can't leave toFraction and toCoverage null unless type is NONE
-        if ( type != DownsampleType.NONE && toFraction == null && toCoverage == null )
-            throw new UserException("Must specify either toFraction or toCoverage when downsampling.");
-
-        // Fraction and coverage cannot both be specified.
-        if ( toFraction != null && toCoverage != null )
-            throw new UserException("Downsampling coverage and fraction are both specified. Please choose only one.");
-
-        // toCoverage must be > 0 when specified
-        if ( toCoverage != null && toCoverage <= 0 ) {
-            throw new UserException("toCoverage must be > 0 when downsampling to coverage");
-        }
-
-        // toFraction must be >= 0.0 and <= 1.0 when specified
-        if ( toFraction != null && (toFraction < 0.0 || toFraction > 1.0) ) {
-            throw new UserException("toFraction must be >= 0.0 and <= 1.0 when downsampling to a fraction of reads");
-        }
-    }
-
-    public void checkCompatibilityWithWalker( Walker walker ) {
-        boolean isLocusTraversal = walker instanceof LocusWalker || walker instanceof ActiveRegionWalker;
-
-        if ( isLocusTraversal && type == DownsampleType.ALL_READS && toCoverage != null ) {
-            throw new UserException("Downsampling to coverage with the ALL_READS method for locus-based traversals (eg., LocusWalkers) is not currently supported (though it is supported for ReadWalkers).");
-        }
-
-        // For locus traversals, ensure that the dcov value (if present) is not problematically low
-        if ( isLocusTraversal && type != DownsampleType.NONE && toCoverage != null &&
-             toCoverage < MINIMUM_SAFE_COVERAGE_TARGET_FOR_LOCUS_BASED_TRAVERSALS ) {
-            throw new UserException(String.format("Locus-based traversals (ie., Locus and ActiveRegion walkers) require " +
-                                                  "a minimum -dcov value of %d when downsampling to coverage. Values less " +
-                                                  "than this can produce problematic downsampling artifacts while providing " +
-                                                  "only insignificant improvements in memory usage in most cases.",
-                                                  MINIMUM_SAFE_COVERAGE_TARGET_FOR_LOCUS_BASED_TRAVERSALS));
-        }
-    }
-
-    public String toString() {
-        StringBuilder builder = new StringBuilder("Downsampling Settings: ");
-
-        if ( type == DownsampleType.NONE ) {
-            builder.append("No downsampling");
-        }
-        else {
-            builder.append(String.format("Method: %s, ", type));
-
-            if ( toCoverage != null ) {
-                builder.append(String.format("Target Coverage: %d", toCoverage));
-            }
-            else {
-                builder.append(String.format("Target Fraction: %.2f", toFraction));
-            }
-        }
-
-        return builder.toString();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/DownsamplingReadsIterator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/DownsamplingReadsIterator.java
deleted file mode 100644
index 6b398ab..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/DownsamplingReadsIterator.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.downsampling;
-
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.engine.iterators.GATKSAMIterator;
-
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.NoSuchElementException;
-
-
-/**
- * GATKSAMIterator wrapper around our generic reads downsampler interface. Converts the push-style
- * downsampler interface to a pull model.
- *
- * @author David Roazen
- */
-public class DownsamplingReadsIterator implements GATKSAMIterator {
-
-    private GATKSAMIterator nestedSAMIterator;
-    private ReadsDownsampler<SAMRecord> downsampler;
-    private Collection<SAMRecord> downsampledReadsCache;
-    private SAMRecord nextRead = null;
-    private Iterator<SAMRecord> downsampledReadsCacheIterator = null;
-
-    /**
-     * @param iter wrapped iterator from which this iterator will pull reads
-     * @param downsampler downsampler through which the reads will be fed
-     */
-    public DownsamplingReadsIterator( GATKSAMIterator iter, ReadsDownsampler<SAMRecord> downsampler ) {
-        nestedSAMIterator = iter;
-        this.downsampler = downsampler;
-
-        advanceToNextRead();
-    }
-
-    public boolean hasNext() {
-        return nextRead != null;
-    }
-
-    public SAMRecord next() {
-        if ( nextRead == null ) {
-            throw new NoSuchElementException("next() called when there are no more items");
-        }
-
-        SAMRecord toReturn = nextRead;
-        advanceToNextRead();
-
-        return toReturn;
-    }
-
-    private void advanceToNextRead() {
-        if ( ! readyToReleaseReads() && ! fillDownsampledReadsCache() ) {
-            nextRead = null;
-        }
-        else {
-            nextRead = downsampledReadsCacheIterator.next();
-        }
-    }
-
-    private boolean readyToReleaseReads() {
-        return downsampledReadsCacheIterator != null && downsampledReadsCacheIterator.hasNext();
-    }
-
-    private boolean fillDownsampledReadsCache() {
-        while ( nestedSAMIterator.hasNext() && ! downsampler.hasFinalizedItems() ) {
-            downsampler.submit(nestedSAMIterator.next());
-        }
-
-        if ( ! nestedSAMIterator.hasNext() ) {
-            downsampler.signalEndOfInput();
-        }
-
-        // use returned collection directly rather than make a copy, for speed
-        downsampledReadsCache = downsampler.consumeFinalizedItems();
-        downsampledReadsCacheIterator = downsampledReadsCache.iterator();
-
-        return downsampledReadsCacheIterator.hasNext();
-    }
-
-    public void remove() {
-        throw new UnsupportedOperationException("Can not remove records from a SAM file via an iterator!");
-    }
-
-    public void close() {
-        nestedSAMIterator.close();
-    }
-
-    public Iterator<SAMRecord> iterator() {
-        return this;
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/DownsamplingUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/DownsamplingUtils.java
deleted file mode 100644
index bd236c0..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/DownsamplingUtils.java
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.downsampling;
-
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.broadinstitute.gatk.utils.sam.ReadUtils;
-
-import java.util.*;
-
-/**
- * Utilities for using the downsamplers for common tasks
- *
- * User: depristo
- * Date: 3/6/13
- * Time: 4:26 PM
- */
-public class DownsamplingUtils {
-    private DownsamplingUtils() { }
-
-    /**
-     * Level the coverage of the reads in each sample to no more than downsampleTo reads, no reducing
-     * coverage at any read start to less than minReadsPerAlignmentStart
-     *
-     * This algorithm can be used to handle the situation where you have lots of coverage in some interval, and
-     * want to reduce the coverage of the big peak down without removing the many reads at the edge of this
-     * interval that are in fact good
-     *
-     * This algorithm separately operates on the reads for each sample independently.
-     *
-     * @param reads a sorted list of reads
-     * @param downsampleTo the targeted number of reads we want from reads per sample
-     * @param minReadsPerAlignmentStart don't reduce the number of reads starting at a specific alignment start
-     *                                  to below this.  That is, if this value is 2, we'll never reduce the number
-     *                                  of reads starting at a specific start site to less than 2
-     * @return a sorted list of reads
-     */
-    public static List<GATKSAMRecord> levelCoverageByPosition(final List<GATKSAMRecord> reads, final int downsampleTo, final int minReadsPerAlignmentStart) {
-        if ( reads == null ) throw new IllegalArgumentException("reads must not be null");
-
-        final List<GATKSAMRecord> downsampled = new ArrayList<GATKSAMRecord>(reads.size());
-
-        final Map<String, Map<Integer, List<GATKSAMRecord>>> readsBySampleByStart = partitionReadsBySampleAndStart(reads);
-        for ( final Map<Integer, List<GATKSAMRecord>> readsByPosMap : readsBySampleByStart.values() ) {
-            final LevelingDownsampler<List<GATKSAMRecord>, GATKSAMRecord> downsampler = new LevelingDownsampler<List<GATKSAMRecord>, GATKSAMRecord>(downsampleTo, minReadsPerAlignmentStart);
-            downsampler.submit(readsByPosMap.values());
-            downsampler.signalEndOfInput();
-            for ( final List<GATKSAMRecord> downsampledReads : downsampler.consumeFinalizedItems())
-                downsampled.addAll(downsampledReads);
-        }
-
-        return ReadUtils.sortReadsByCoordinate(downsampled);
-    }
-
-    /**
-     * Build the data structure mapping for each sample -> (position -> reads at position)
-     *
-     * Note that the map position -> reads isn't ordered in any meaningful way
-     *
-     * @param reads a list of sorted reads
-     * @return a map containing the list of reads at each start location, for each sample independently
-     */
-    private static Map<String, Map<Integer, List<GATKSAMRecord>>> partitionReadsBySampleAndStart(final List<GATKSAMRecord> reads) {
-        final Map<String, Map<Integer, List<GATKSAMRecord>>> readsBySampleByStart = new LinkedHashMap<String, Map<Integer, List<GATKSAMRecord>>>();
-
-        for ( final GATKSAMRecord read : reads ) {
-            Map<Integer, List<GATKSAMRecord>> readsByStart = readsBySampleByStart.get(read.getReadGroup().getSample());
-
-            if ( readsByStart == null ) {
-                readsByStart = new LinkedHashMap<Integer, List<GATKSAMRecord>>();
-                readsBySampleByStart.put(read.getReadGroup().getSample(), readsByStart);
-            }
-
-            List<GATKSAMRecord> readsAtStart = readsByStart.get(read.getAlignmentStart());
-            if ( readsAtStart == null ) {
-                readsAtStart = new LinkedList<GATKSAMRecord>();
-                readsByStart.put(read.getAlignmentStart(), readsAtStart);
-            }
-
-            readsAtStart.add(read);
-        }
-
-        return readsBySampleByStart;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/FractionalDownsampler.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/FractionalDownsampler.java
deleted file mode 100644
index a2d613c..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/FractionalDownsampler.java
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.downsampling;
-
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * Fractional Downsampler: selects a specified fraction of the reads for inclusion.
- *
- * Since the selection is done randomly, the actual fraction of reads retained may be slightly
- * more or less than the requested fraction, depending on the total number of reads submitted.
- *
- * @author David Roazen
- */
-public class FractionalDownsampler<T extends SAMRecord> extends ReadsDownsampler<T> {
-
-    private ArrayList<T> selectedReads;
-
-    private final int cutoffForInclusion;
-
-    private static final int RANDOM_POOL_SIZE = 10000;
-
-    /**
-     * Construct a FractionalDownsampler
-     *
-     * @param fraction Fraction of reads to preserve, between 0.0 (inclusive) and 1.0 (inclusive).
-     *                 Actual number of reads preserved may differ randomly.
-     */
-    public FractionalDownsampler( final double fraction ) {
-        if ( fraction < 0.0 || fraction > 1.0 ) {
-            throw new ReviewedGATKException("Fraction of reads to include must be between 0.0 and 1.0, inclusive");
-        }
-
-        cutoffForInclusion = (int)(fraction * RANDOM_POOL_SIZE);
-        clearItems();
-        resetStats();
-    }
-
-    @Override
-    public void submit( final T newRead ) {
-        if ( GenomeAnalysisEngine.getRandomGenerator().nextInt(10000) < cutoffForInclusion || doNotDiscardItem(newRead) ) {
-            selectedReads.add(newRead);
-        }
-        else {
-            numDiscardedItems++;
-        }
-    }
-
-    @Override
-    public boolean hasFinalizedItems() {
-        return selectedReads.size() > 0;
-    }
-
-    @Override
-    public List<T> consumeFinalizedItems() {
-        // pass by reference rather than make a copy, for speed
-        List<T> downsampledItems = selectedReads;
-        clearItems();
-        return downsampledItems;
-    }
-
-    @Override
-    public boolean hasPendingItems() {
-        return false;
-    }
-
-    @Override
-    public T peekFinalized() {
-        return selectedReads.isEmpty() ? null : selectedReads.get(0);
-    }
-
-    @Override
-    public T peekPending() {
-        return null;
-    }
-
-    @Override
-    public int size() {
-        return selectedReads.size();
-    }
-
-    @Override
-    public void signalEndOfInput() {
-        // NO-OP
-    }
-
-    @Override
-    public void clearItems() {
-        selectedReads = new ArrayList<T>();
-    }
-
-    @Override
-    public boolean requiresCoordinateSortOrder() {
-        return false;
-    }
-
-    @Override
-    public void signalNoMoreReadsBefore( final T read ) {
-        // NO-OP
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/FractionalDownsamplerFactory.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/FractionalDownsamplerFactory.java
deleted file mode 100644
index 4ddf8dd..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/FractionalDownsamplerFactory.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.downsampling;
-
-import htsjdk.samtools.SAMRecord;
-
-/**
- * Factory for creating FractionalDownsamplers on demand
- *
- * @author David Roazen
- */
-public class FractionalDownsamplerFactory<T extends SAMRecord> implements ReadsDownsamplerFactory<T> {
-
-    private double fraction;
-
-    public FractionalDownsamplerFactory( double fraction ) {
-        this.fraction = fraction;
-    }
-
-    public ReadsDownsampler<T> newInstance() {
-        return new FractionalDownsampler<T>(fraction);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/LevelingDownsampler.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/LevelingDownsampler.java
deleted file mode 100644
index 4ae7bc5..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/LevelingDownsampler.java
+++ /dev/null
@@ -1,242 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.downsampling;
-
-import org.broadinstitute.gatk.utils.MathUtils;
-
-import java.util.*;
-
-/**
- * Leveling Downsampler: Given a set of Lists of arbitrary items and a target size, removes items from
- * the Lists in an even fashion until the total size of all Lists is <= the target size. Leveling
- * does not occur until all Lists have been submitted and signalEndOfInput() is called.
- *
- * The Lists should be LinkedLists for maximum efficiency during item removal, however other
- * kinds of Lists are also accepted (albeit at a slight performance penalty).
- *
- * Since this downsampler extends the Downsampler interface rather than the ReadsDownsampler interface,
- * the Lists need not contain reads. However this downsampler may not be wrapped within one of the
- * DownsamplingReadsIterators
- *
- * @param <T> the List type representing the stacks to be leveled
- * @param <E> the type of the elements of each List
- *
- * @author David Roazen
- */
-public class LevelingDownsampler<T extends List<E>, E> extends Downsampler<T> {
-    private final int minElementsPerStack;
-
-    private final int targetSize;
-
-    private List<T> groups;
-
-    private boolean groupsAreFinalized;
-
-    /**
-     * Construct a LevelingDownsampler
-     *
-     * Uses the default minElementsPerStack of 1
-     *
-     * @param targetSize the sum of the sizes of all individual Lists this downsampler is fed may not exceed
-     *                   this value -- if it does, items are removed from Lists evenly until the total size
-     *                   is <= this value
-     */
-    public LevelingDownsampler( final int targetSize ) {
-        this(targetSize, 1);
-    }
-
-    /**
-     * Construct a LevelingDownsampler
-     *
-     * @param targetSize the sum of the sizes of all individual Lists this downsampler is fed may not exceed
-     *                   this value -- if it does, items are removed from Lists evenly until the total size
-     *                   is <= this value
-     * @param minElementsPerStack no stack will be reduced below this size during downsampling.  That is,
-     *                            if a stack has only 3 elements and minElementsPerStack is 3, no matter what
-     *                            we'll not reduce this stack below 3.
-     */
-    public LevelingDownsampler( final int targetSize, final int minElementsPerStack ) {
-        if ( targetSize < 0 ) throw new IllegalArgumentException("targetSize must be >= 0 but got " + targetSize);
-        if ( minElementsPerStack < 0 ) throw new IllegalArgumentException("minElementsPerStack must be >= 0 but got " + minElementsPerStack);
-
-        this.targetSize = targetSize;
-        this.minElementsPerStack = minElementsPerStack;
-        clearItems();
-        resetStats();
-    }
-
-    @Override
-    public void submit( final T item ) {
-        groups.add(item);
-    }
-
-    @Override
-    public void submit( final Collection<T> items ){
-        groups.addAll(items);
-    }
-
-    @Override
-    public boolean hasFinalizedItems() {
-        return groupsAreFinalized && groups.size() > 0;
-    }
-
-    @Override
-    public List<T> consumeFinalizedItems() {
-        if ( ! hasFinalizedItems() ) {
-            return new ArrayList<T>();
-        }
-
-        // pass by reference rather than make a copy, for speed
-        final List<T> toReturn = groups;
-        clearItems();
-        return toReturn;
-    }
-
-    @Override
-    public boolean hasPendingItems() {
-        return ! groupsAreFinalized && groups.size() > 0;
-    }
-
-    @Override
-    public T peekFinalized() {
-        return hasFinalizedItems() ? groups.get(0) : null;
-    }
-
-    @Override
-    public T peekPending() {
-        return hasPendingItems() ? groups.get(0) : null;
-    }
-
-    @Override
-    public int size() {
-        int s = 0;
-        for ( final List<E> l : groups ) {
-            s += l.size();
-        }
-        return s;
-    }
-
-    @Override
-    public void signalEndOfInput() {
-        levelGroups();
-        groupsAreFinalized = true;
-    }
-
-    @Override
-    public void clearItems() {
-        groups = new ArrayList<T>();
-        groupsAreFinalized = false;
-    }
-
-    private void levelGroups() {
-        final int[] groupSizes = new int[groups.size()];
-        int totalSize = 0;
-        int currentGroupIndex = 0;
-
-        for ( final T group : groups ) {
-            groupSizes[currentGroupIndex] = group.size();
-            totalSize += groupSizes[currentGroupIndex];
-            currentGroupIndex++;
-        }
-
-        if ( totalSize <= targetSize ) {
-            return;    // no need to eliminate any items
-        }
-
-        // We will try to remove exactly this many items, however we will refuse to allow any
-        // one group to fall below size 1, and so might end up removing fewer items than this
-        int numItemsToRemove = totalSize - targetSize;
-
-        currentGroupIndex = 0;
-        int numConsecutiveUmodifiableGroups = 0;
-
-        // Continue until we've either removed all the items we wanted to, or we can't
-        // remove any more items without violating the constraint that all groups must
-        // be left with at least one item
-        while ( numItemsToRemove > 0 && numConsecutiveUmodifiableGroups < groupSizes.length ) {
-            if ( groupSizes[currentGroupIndex] > minElementsPerStack ) {
-                groupSizes[currentGroupIndex]--;
-                numItemsToRemove--;
-                numConsecutiveUmodifiableGroups = 0;
-            }
-            else {
-                numConsecutiveUmodifiableGroups++;
-            }
-
-            currentGroupIndex = (currentGroupIndex + 1) % groupSizes.length;
-        }
-
-        // Now we actually go through and reduce each group to its new count as specified in groupSizes
-        currentGroupIndex = 0;
-        for ( final T group : groups ) {
-            downsampleOneGroup(group, groupSizes[currentGroupIndex]);
-            currentGroupIndex++;
-        }
-    }
-
-    private void downsampleOneGroup( final T group, final int numItemsToKeep ) {
-        if ( numItemsToKeep >= group.size() ) {
-            return;
-        }
-
-        final BitSet itemsToKeep = new BitSet(group.size());
-        for ( Integer selectedIndex : MathUtils.sampleIndicesWithoutReplacement(group.size(), numItemsToKeep) ) {
-            itemsToKeep.set(selectedIndex);
-        }
-
-        int currentIndex = 0;
-
-        // If our group is a linked list, we can remove the desired items in a single O(n) pass with an iterator
-        if ( group instanceof LinkedList ) {
-            final Iterator<E> iter = group.iterator();
-            while ( iter.hasNext() ) {
-                final E item = iter.next();
-
-                if ( ! itemsToKeep.get(currentIndex) && ! doNotDiscardItem(item) ) {
-                    iter.remove();
-                    numDiscardedItems++;
-                }
-
-                currentIndex++;
-            }
-        }
-        // If it's not a linked list, it's more efficient to copy the desired items into a new list and back rather
-        // than suffer O(n^2) of item shifting
-        else {
-            final List<E> keptItems = new ArrayList<E>(group.size());
-
-            for ( final E item : group ) {
-                if ( itemsToKeep.get(currentIndex) || doNotDiscardItem(item) ) {
-                    keptItems.add(item);
-                }
-                currentIndex++;
-            }
-            numDiscardedItems += group.size() - keptItems.size();
-            group.clear();
-            group.addAll(keptItems);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/PassThroughDownsampler.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/PassThroughDownsampler.java
deleted file mode 100644
index a5fdf24..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/PassThroughDownsampler.java
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.downsampling;
-
-import htsjdk.samtools.SAMRecord;
-
-import java.util.LinkedList;
-import java.util.List;
-
-/**
- * Pass-Through Downsampler: Implementation of the ReadsDownsampler interface that does no
- * downsampling whatsoever, and instead simply "passes-through" all the reads it's given.
- * Useful for situations where you want to disable downsampling, but still need to use
- * the downsampler interface.
- *
- * @author David Roazen
- */
-public class PassThroughDownsampler<T extends SAMRecord> extends ReadsDownsampler<T> {
-
-    private LinkedList<T> selectedReads;
-
-    public PassThroughDownsampler() {
-        clearItems();
-    }
-
-    @Override
-    public void submit( T newRead ) {
-        // All reads pass-through, no reads get downsampled
-        selectedReads.add(newRead);
-    }
-
-    @Override
-    public boolean hasFinalizedItems() {
-        return ! selectedReads.isEmpty();
-    }
-
-    /**
-     * Note that this list is a linked list and so doesn't support fast random access
-     * @return
-     */
-    @Override
-    public List<T> consumeFinalizedItems() {
-        // pass by reference rather than make a copy, for speed
-        final List<T> downsampledItems = selectedReads;
-        clearItems();
-        return downsampledItems;
-    }
-
-    @Override
-    public boolean hasPendingItems() {
-        return false;
-    }
-
-    @Override
-    public T peekFinalized() {
-        return selectedReads.isEmpty() ? null : selectedReads.getFirst();
-    }
-
-    @Override
-    public T peekPending() {
-        return null;
-    }
-
-    @Override
-    public int size() {
-        return selectedReads.size();
-    }
-
-    @Override
-    public void signalEndOfInput() {
-        // NO-OP
-    }
-
-    @Override
-    public void clearItems() {
-        selectedReads = new LinkedList<T>();
-    }
-
-    @Override
-    public boolean requiresCoordinateSortOrder() {
-        return false;
-    }
-
-    @Override
-    public void signalNoMoreReadsBefore( T read ) {
-        // NO-OP
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/PerSampleDownsamplingReadsIterator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/PerSampleDownsamplingReadsIterator.java
deleted file mode 100644
index 118bbbb..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/PerSampleDownsamplingReadsIterator.java
+++ /dev/null
@@ -1,207 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.downsampling;
-
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.SAMRecordComparator;
-import htsjdk.samtools.SAMRecordCoordinateComparator;
-import org.broadinstitute.gatk.engine.iterators.GATKSAMIterator;
-
-import java.util.*;
-
-
-/**
- * GATKSAMIterator wrapper around our generic reads downsampler interface
- * that downsamples reads for each sample independently, and then re-assembles
- * the reads back into a single merged stream.
- *
- * @author David Roazen
- */
-public class PerSampleDownsamplingReadsIterator implements GATKSAMIterator {
-
-    private GATKSAMIterator nestedSAMIterator;
-    private ReadsDownsamplerFactory<SAMRecord> downsamplerFactory;
-    private Map<String, ReadsDownsampler<SAMRecord>> perSampleDownsamplers;
-    private PriorityQueue<SAMRecord> orderedDownsampledReadsCache;
-    private SAMRecord nextRead = null;
-    private SAMRecordComparator readComparator = new SAMRecordCoordinateComparator();
-    private SAMRecord earliestPendingRead = null;
-    private ReadsDownsampler<SAMRecord> earliestPendingDownsampler = null;
-
-    // Initial size of our cache of finalized reads
-    private static final int DOWNSAMPLED_READS_INITIAL_CACHE_SIZE = 4096;
-
-    // The number of positional changes that can occur in the read stream before all downsamplers
-    // should be informed of the current position (guards against samples with relatively sparse reads
-    // getting stuck in a pending state):
-    private static final int DOWNSAMPLER_POSITIONAL_UPDATE_INTERVAL = 3;   // TODO: experiment with this value
-
-    /**
-     * @param iter wrapped iterator from which this iterator will pull reads
-     * @param downsamplerFactory factory used to create new downsamplers as needed
-     */
-    public PerSampleDownsamplingReadsIterator( GATKSAMIterator iter, ReadsDownsamplerFactory<SAMRecord> downsamplerFactory ) {
-        nestedSAMIterator = iter;
-        this.downsamplerFactory = downsamplerFactory;
-        perSampleDownsamplers = new HashMap<String, ReadsDownsampler<SAMRecord>>();
-        orderedDownsampledReadsCache = new PriorityQueue<SAMRecord>(DOWNSAMPLED_READS_INITIAL_CACHE_SIZE, readComparator);
-
-        advanceToNextRead();
-    }
-
-    public boolean hasNext() {
-        return nextRead != null;
-    }
-
-    public SAMRecord next() {
-        if ( nextRead == null ) {
-            throw new NoSuchElementException("next() called when there are no more items");
-        }
-
-        SAMRecord toReturn = nextRead;
-        advanceToNextRead();
-
-        return toReturn;
-    }
-
-    private void advanceToNextRead() {
-        if ( ! readyToReleaseReads() && ! fillDownsampledReadsCache() ) {
-            nextRead = null;
-        }
-        else {
-            nextRead = orderedDownsampledReadsCache.poll();
-        }
-    }
-
-    private boolean readyToReleaseReads() {
-        if ( orderedDownsampledReadsCache.isEmpty() ) {
-            return false;
-        }
-
-        return earliestPendingRead == null ||
-               readComparator.compare(orderedDownsampledReadsCache.peek(), earliestPendingRead) <= 0;
-    }
-
-    private boolean fillDownsampledReadsCache() {
-        SAMRecord prevRead = null;
-        int numPositionalChanges = 0;
-
-        // Continue submitting reads to the per-sample downsamplers until the read at the top of the priority queue
-        // can be released without violating global sort order
-        while ( nestedSAMIterator.hasNext() && ! readyToReleaseReads() ) {
-            SAMRecord read = nestedSAMIterator.next();
-            String sampleName = read.getReadGroup() != null ? read.getReadGroup().getSample() : null;
-
-            ReadsDownsampler<SAMRecord> thisSampleDownsampler = perSampleDownsamplers.get(sampleName);
-            if ( thisSampleDownsampler == null ) {
-                thisSampleDownsampler = downsamplerFactory.newInstance();
-                perSampleDownsamplers.put(sampleName, thisSampleDownsampler);
-            }
-
-            thisSampleDownsampler.submit(read);
-            processFinalizedAndPendingItems(thisSampleDownsampler);
-
-            if ( prevRead != null && prevRead.getAlignmentStart() != read.getAlignmentStart() ) {
-                numPositionalChanges++;
-            }
-
-            // Periodically inform all downsamplers of the current position in the read stream. This is
-            // to prevent downsamplers for samples with sparser reads than others from getting stuck too
-            // long in a pending state.
-            if ( numPositionalChanges > 0 && numPositionalChanges % DOWNSAMPLER_POSITIONAL_UPDATE_INTERVAL == 0 ) {
-                for ( ReadsDownsampler<SAMRecord> perSampleDownsampler : perSampleDownsamplers.values() ) {
-                    perSampleDownsampler.signalNoMoreReadsBefore(read);
-                    processFinalizedAndPendingItems(perSampleDownsampler);
-                }
-            }
-
-            prevRead = read;
-        }
-
-        if ( ! nestedSAMIterator.hasNext() ) {
-            for ( ReadsDownsampler<SAMRecord> perSampleDownsampler : perSampleDownsamplers.values() ) {
-                perSampleDownsampler.signalEndOfInput();
-                if ( perSampleDownsampler.hasFinalizedItems() ) {
-                    orderedDownsampledReadsCache.addAll(perSampleDownsampler.consumeFinalizedItems());
-                }
-            }
-            earliestPendingRead = null;
-            earliestPendingDownsampler = null;
-        }
-
-        return readyToReleaseReads();
-    }
-
-    private void updateEarliestPendingRead( ReadsDownsampler<SAMRecord> currentDownsampler ) {
-        // If there is no recorded earliest pending read and this downsampler has pending items,
-        // then this downsampler's first pending item becomes the new earliest pending read:
-        if ( earliestPendingRead == null && currentDownsampler.hasPendingItems() ) {
-            earliestPendingRead = currentDownsampler.peekPending();
-            earliestPendingDownsampler = currentDownsampler;
-        }
-        // In all other cases, we only need to update the earliest pending read when the downsampler
-        // associated with it experiences a change in its pending reads, since by assuming a sorted
-        // read stream we're assured that each downsampler's earliest pending read will only increase
-        // in genomic position over time.
-        //
-        // TODO: An occasional O(samples) linear search seems like a better option than keeping the downsamplers
-        // TODO: sorted by earliest pending read, which would cost at least O(total_reads * (samples + log(samples))),
-        // TODO: but need to verify this empirically.
-        else if ( currentDownsampler == earliestPendingDownsampler &&
-                  (! currentDownsampler.hasPendingItems() || readComparator.compare(currentDownsampler.peekPending(), earliestPendingRead) != 0) ) {
-
-            earliestPendingRead = null;
-            earliestPendingDownsampler = null;
-            for ( ReadsDownsampler<SAMRecord> perSampleDownsampler : perSampleDownsamplers.values() ) {
-                if ( perSampleDownsampler.hasPendingItems() &&
-                     (earliestPendingRead == null || readComparator.compare(perSampleDownsampler.peekPending(), earliestPendingRead) < 0) ) {
-
-                    earliestPendingRead = perSampleDownsampler.peekPending();
-                    earliestPendingDownsampler = perSampleDownsampler;
-                }
-            }
-        }
-    }
-
-    private void processFinalizedAndPendingItems( ReadsDownsampler<SAMRecord> currentDownsampler ) {
-        if ( currentDownsampler.hasFinalizedItems() ) {
-            orderedDownsampledReadsCache.addAll(currentDownsampler.consumeFinalizedItems());
-        }
-        updateEarliestPendingRead(currentDownsampler);
-    }
-
-    public void remove() {
-        throw new UnsupportedOperationException("Can not remove records from a SAM file via an iterator!");
-    }
-
-    public void close() {
-        nestedSAMIterator.close();
-    }
-
-    public Iterator<SAMRecord> iterator() {
-        return this;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/ReadsDownsampler.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/ReadsDownsampler.java
deleted file mode 100644
index 9263920..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/ReadsDownsampler.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.downsampling;
-
-import htsjdk.samtools.SAMRecord;
-
-/**
- * An extension of the basic downsampler API with reads-specific operations
- *
- * @author David Roazen
- */
-public abstract class ReadsDownsampler<T extends SAMRecord> extends Downsampler<T> {
-
-    /**
-     * Does this downsampler require that reads be fed to it in coordinate order?
-     *
-     * @return true if reads must be submitted to this downsampler in coordinate order, otherwise false
-     */
-    public abstract boolean requiresCoordinateSortOrder();
-
-    /**
-     * Tell this downsampler that no more reads located before the provided read (according to
-     * the sort order of the read stream) will be fed to it.
-     *
-     * Allows position-aware downsamplers to finalize pending reads earlier than they would
-     * otherwise be able to, particularly when doing per-sample downsampling and reads for
-     * certain samples are sparser than average.
-     *
-     * @param read the downsampler will assume that no reads located before this read will ever
-     *             be submitted to it in the future
-     */
-    public abstract void signalNoMoreReadsBefore( final T read );
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/ReadsDownsamplerFactory.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/ReadsDownsamplerFactory.java
deleted file mode 100644
index 9ef847e..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/ReadsDownsamplerFactory.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.downsampling;
-
-import htsjdk.samtools.SAMRecord;
-
-/**
- * A ReadsDownsamplerFactory can be used to create an arbitrary number of instances of a particular
- * downsampler, all sharing the same construction parameters.
- *
- * @author David Roazen
- */
-public interface ReadsDownsamplerFactory<T extends SAMRecord> {
-    public ReadsDownsampler<T> newInstance();
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/ReservoirDownsampler.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/ReservoirDownsampler.java
deleted file mode 100644
index 99a0bbd..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/ReservoirDownsampler.java
+++ /dev/null
@@ -1,219 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.downsampling;
-
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.util.*;
-
-/**
- * Reservoir Downsampler: Selects n reads out of a stream whose size is not known in advance, with
- * every read in the stream having an equal chance of being selected for inclusion.
- *
- * An implementation of "Algorithm R" from the paper "Random Sampling with a Reservoir" (Jeffrey Scott Vitter, 1985)
- *
- * @author David Roazen
- */
-public class ReservoirDownsampler<T extends SAMRecord> extends ReadsDownsampler<T> {
-
-    /**
-     * size of our reservoir -- ie., the maximum number of reads from the stream that will be retained
-     * (not including any undiscardable items)
-     */
-    private final int targetSampleSize;
-
-    /**
-     * if true, this downsampler will be optimized for the case
-     * where most of the time we won't fill up anything like the
-     * targetSampleSize elements.  If this is false, we will allocate
-     * internal buffers to targetSampleSize initially, which minimizes
-     * the cost of allocation if we often use targetSampleSize or more
-     * elements.
-     */
-    private final boolean expectFewOverflows;
-
-    /**
-     * At times this can be a linked list or an array list, depending on how we're accessing the
-     * data and whether or not we're expecting few overflows
-     */
-    private List<T> reservoir;
-
-    /**
-     * Certain items (eg., reduced reads) cannot be discarded at all during downsampling. We store
-     * these items separately so as not to impact the fair selection of items for inclusion in the
-     * reservoir. These items are returned (and cleared) along with any items in the reservoir in
-     * calls to consumeFinalizedItems().
-     */
-    private List<T> undiscardableItems;
-
-    /**
-     * Are we currently using a linked list for the reservoir?
-     */
-    private boolean isLinkedList;
-
-    /**
-     * Count of the number of reads seen that were actually eligible for discarding. Used by the reservoir downsampling
-     * algorithm to ensure that all discardable reads have an equal chance of making it into the reservoir.
-     */
-    private int totalDiscardableReadsSeen;
-
-
-    /**
-     * Construct a ReservoirDownsampler
-     *
-     * @param targetSampleSize Size of the reservoir used by this downsampler. Number of items retained
-     *                         after downsampling will be min(totalDiscardableReads, targetSampleSize) + any
-     *                         undiscardable reads (eg., reduced reads).
-     *
-     * @param expectFewOverflows if true, this downsampler will be optimized for the case
-     *                           where most of the time we won't fill up anything like the
-     *                           targetSampleSize elements.  If this is false, we will allocate
-     *                           internal buffers to targetSampleSize initially, which minimizes
-     *                           the cost of allocation if we often use targetSampleSize or more
-     *                           elements.
-     */
-    public ReservoirDownsampler ( final int targetSampleSize, final boolean expectFewOverflows ) {
-        if ( targetSampleSize <= 0 ) {
-            throw new ReviewedGATKException("Cannot do reservoir downsampling with a sample size <= 0");
-        }
-
-        this.targetSampleSize = targetSampleSize;
-        this.expectFewOverflows = expectFewOverflows;
-        clearItems();
-        resetStats();
-    }
-
-    /**
-     * Construct a ReservoirDownsampler
-     *
-     * @param targetSampleSize Size of the reservoir used by this downsampler. Number of items retained
-     *                         after downsampling will be min(totalReads, targetSampleSize)
-     */
-    public ReservoirDownsampler ( final int targetSampleSize ) {
-        this(targetSampleSize, false);
-    }
-
-    @Override
-    public void submit ( final T newRead ) {
-        if ( doNotDiscardItem(newRead) ) {
-            undiscardableItems.add(newRead);
-            return;
-        }
-
-        // Only count reads that are actually eligible for discarding for the purposes of the reservoir downsampling algorithm
-        totalDiscardableReadsSeen++;
-
-        if ( totalDiscardableReadsSeen <= targetSampleSize ) {
-            reservoir.add(newRead);
-        }
-        else {
-            if ( isLinkedList ) {
-                reservoir = new ArrayList<T>(reservoir);
-                isLinkedList = false;
-            }
-
-            final int randomSlot = GenomeAnalysisEngine.getRandomGenerator().nextInt(totalDiscardableReadsSeen);
-            if ( randomSlot < targetSampleSize ) {
-                reservoir.set(randomSlot, newRead);
-            }
-            numDiscardedItems++;
-        }
-    }
-
-    @Override
-    public boolean hasFinalizedItems() {
-        return ! reservoir.isEmpty() || ! undiscardableItems.isEmpty();
-    }
-
-    @Override
-    public List<T> consumeFinalizedItems() {
-        if ( ! hasFinalizedItems() ) {
-            // if there's nothing here, don't bother allocating a new list
-            return Collections.emptyList();
-        } else {
-            // pass reservoir by reference rather than make a copy, for speed
-            final List<T> downsampledItems = reservoir;
-            downsampledItems.addAll(undiscardableItems);
-            clearItems();
-            return downsampledItems;
-        }
-    }
-
-    @Override
-    public boolean hasPendingItems() {
-        return false;
-    }
-
-    @Override
-    public T peekFinalized() {
-        return ! reservoir.isEmpty() ? reservoir.get(0) : (! undiscardableItems.isEmpty() ? undiscardableItems.get(0) : null);
-    }
-
-    @Override
-    public T peekPending() {
-        return null;
-    }
-
-    @Override
-    public int size() {
-        return reservoir.size() + undiscardableItems.size();
-    }
-
-    @Override
-    public void signalEndOfInput() {
-        // NO-OP
-    }
-
-    /**
-     * Clear the data structures used to hold information
-     */
-    @Override
-    public void clearItems() {
-        // if we aren't expecting many overflows, allocate a linked list not an arraylist
-        reservoir = expectFewOverflows ? new LinkedList<T>() : new ArrayList<T>(targetSampleSize);
-
-        // there's no possibility of overflow with the undiscardable items, so we always use a linked list for them
-        undiscardableItems = new LinkedList<>();
-
-        // it's a linked list if we allocate one
-        isLinkedList = expectFewOverflows;
-
-        // an internal stat used by the downsampling process, so not cleared by resetStats() below
-        totalDiscardableReadsSeen = 0;
-    }
-
-    @Override
-    public boolean requiresCoordinateSortOrder() {
-        return false;
-    }
-
-    @Override
-    public void signalNoMoreReadsBefore( T read ) {
-        // NO-OP
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/ReservoirDownsamplerFactory.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/ReservoirDownsamplerFactory.java
deleted file mode 100644
index c825bae..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/ReservoirDownsamplerFactory.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.downsampling;
-
-import htsjdk.samtools.SAMRecord;
-
-/**
- * Factory for creating ReservoirDownsamplers on demand
- *
- * @author David Roazen
- */
-public class ReservoirDownsamplerFactory<T extends SAMRecord> implements ReadsDownsamplerFactory<T> {
-
-    private int targetSampleSize;
-
-    public ReservoirDownsamplerFactory( int targetSampleSize ) {
-        this.targetSampleSize = targetSampleSize;
-    }
-
-    public ReadsDownsampler<T> newInstance() {
-        return new ReservoirDownsampler<T>(targetSampleSize);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/SimplePositionalDownsampler.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/SimplePositionalDownsampler.java
deleted file mode 100644
index af0aa54..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/SimplePositionalDownsampler.java
+++ /dev/null
@@ -1,171 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.downsampling;
-
-import htsjdk.samtools.SAMRecord;
-
-import java.util.*;
-
-/**
- * Simple Positional Downsampler: Downsample each stack of reads at each alignment start to a size <= a target coverage
- * using a Reservoir downsampler. Stores only O(target coverage) reads in memory at any given time.
- *
- * @author David Roazen
- */
-public class SimplePositionalDownsampler<T extends SAMRecord> extends ReadsDownsampler<T> {
-
-    private final int targetCoverage;
-
-    private final ReservoirDownsampler<T> reservoir;
-
-    private int currentContigIndex;
-
-    private int currentAlignmentStart;
-
-    private boolean positionEstablished;
-
-    private boolean unmappedReadsReached;
-
-    private ArrayList<T> finalizedReads;
-
-
-    /**
-     * Construct a SimplePositionalDownsampler
-     *
-     * @param targetCoverage Maximum number of reads that may share any given alignment start position
-     */
-    public SimplePositionalDownsampler( final int targetCoverage ) {
-        this.targetCoverage = targetCoverage;
-        reservoir = new ReservoirDownsampler<T>(targetCoverage);
-        finalizedReads = new ArrayList<T>();
-        clearItems();
-        resetStats();
-    }
-
-    @Override
-    public void submit( final T newRead ) {
-        updatePositionalState(newRead);
-
-        if ( unmappedReadsReached ) {    // don't downsample the unmapped reads at the end of the stream
-            finalizedReads.add(newRead);
-        }
-        else {
-            final int reservoirPreviouslyDiscardedItems = reservoir.getNumberOfDiscardedItems();
-            // our reservoir downsampler will call doNotDiscardItem() for us to exclude items from elimination as appropriate
-            reservoir.submit(newRead);
-            numDiscardedItems += reservoir.getNumberOfDiscardedItems() - reservoirPreviouslyDiscardedItems;
-        }
-    }
-
-    @Override
-    public boolean hasFinalizedItems() {
-        return finalizedReads.size() > 0;
-    }
-
-    @Override
-    public List<T> consumeFinalizedItems() {
-        // pass by reference rather than make a copy, for speed
-        final List<T> toReturn = finalizedReads;
-        finalizedReads = new ArrayList<T>();
-        return toReturn;
-    }
-
-    @Override
-    public boolean hasPendingItems() {
-        return reservoir.hasFinalizedItems();
-    }
-
-    @Override
-    public T peekFinalized() {
-        return finalizedReads.isEmpty() ? null : finalizedReads.get(0);
-    }
-
-    @Override
-    public T peekPending() {
-        return reservoir.peekFinalized();
-    }
-
-    @Override
-    public int size() {
-        return finalizedReads.size() + reservoir.size();
-    }
-
-    @Override
-    public void signalEndOfInput() {
-        finalizeReservoir();
-    }
-
-    @Override
-    public void clearItems() {
-        reservoir.clearItems();
-        reservoir.resetStats();
-        finalizedReads.clear();
-        positionEstablished = false;
-        unmappedReadsReached = false;
-    }
-
-    @Override
-    public boolean requiresCoordinateSortOrder() {
-        return true;
-    }
-
-    @Override
-    public void signalNoMoreReadsBefore( final T read ) {
-        updatePositionalState(read);
-    }
-
-    private void updatePositionalState( final T newRead ) {
-        if ( readIsPastCurrentPosition(newRead) ) {
-            if ( reservoir.hasFinalizedItems() ) {
-                finalizeReservoir();
-            }
-
-            setCurrentPosition(newRead);
-
-            if ( newRead.getReadUnmappedFlag() ) {
-                unmappedReadsReached = true;
-            }
-        }
-    }
-
-    private void setCurrentPosition( final T read ) {
-        currentContigIndex = read.getReferenceIndex();
-        currentAlignmentStart = read.getAlignmentStart();
-        positionEstablished = true;
-    }
-
-    private boolean readIsPastCurrentPosition( final T read ) {
-        return ! positionEstablished ||
-               read.getReferenceIndex() > currentContigIndex ||
-               read.getAlignmentStart() > currentAlignmentStart ||
-               (read.getReadUnmappedFlag() && ! unmappedReadsReached);
-    }
-
-    private void finalizeReservoir() {
-        finalizedReads.addAll(reservoir.consumeFinalizedItems());
-        reservoir.resetStats();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/SimplePositionalDownsamplerFactory.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/SimplePositionalDownsamplerFactory.java
deleted file mode 100644
index 3fc66ca..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/SimplePositionalDownsamplerFactory.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.downsampling;
-
-import htsjdk.samtools.SAMRecord;
-
-/**
- * Factory for creating SimplePositionalDownsamplers on demand
- *
- * @author David Roazen
- */
-public class SimplePositionalDownsamplerFactory<T extends SAMRecord> implements ReadsDownsamplerFactory<T> {
-
-    private int targetCoverage;
-
-    public SimplePositionalDownsamplerFactory( int targetCoverage ) {
-        this.targetCoverage = targetCoverage;
-    }
-
-    public ReadsDownsampler<T> newInstance() {
-        return new SimplePositionalDownsampler<T>(targetCoverage);
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/Accumulator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/Accumulator.java
deleted file mode 100644
index 9276331..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/Accumulator.java
+++ /dev/null
@@ -1,211 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.executive;
-
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.datasources.providers.LocusShardDataProvider;
-import org.broadinstitute.gatk.engine.datasources.providers.ShardDataProvider;
-import org.broadinstitute.gatk.engine.walkers.Walker;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
-import org.broadinstitute.gatk.utils.collections.Pair;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-/**
- * Manages the
- */
-
-public abstract class Accumulator {
-    /**
-     * The walker for which to accumulate.
-     */
-    protected final Walker walker;
-
-    /**
-     * Create a new Accumulator.  Forbid outside classes from performing this operation.
-     * @param walker
-     */
-    protected Accumulator( Walker walker ) {
-        this.walker = walker;
-    }
-
-    /**
-     * Creates an accumulator suitable for accumulating results of the given walker.
-     * @param walker Walker for which to build an accumulator.
-     * @return Accumulator suitable for this walker.s
-     */
-    public static Accumulator create( GenomeAnalysisEngine engine, Walker walker ) {
-        if( walker.isReduceByInterval() && engine.getIntervals() != null)
-            return new IntervalAccumulator( walker, engine.getIntervals() );
-        else
-            return new StandardAccumulator( walker );
-    }
-
-    /**
-     * Gets the appropriate reduce initializer for this accumulator.
-     * @return Traversal reduce init to feed into traversal engine. 
-     */
-    public abstract Object getReduceInit();
-
-    /**
-     * Roll this traversal result into the given accumulator.
-     * @param result Result of the most recent accumulation.
-     * @return the newest accumulation of the given data.
-     */
-    public abstract void accumulate( ShardDataProvider provider, Object result );
-
-    /**
-     * Finishes off the traversal.  Submits accumulated results to
-     * the walker and returns them.
-     * TODO: Its a bit funky to delegate the finishing of the traversal
-     *       to an accumulator, but we're doing it for type safety so the
-     *       right Walker override gets called.  Clean this up.
-     * @return Final result of accumulation.
-     */
-    public abstract Object finishTraversal();
-
-    /**
-     * Accumulates in the 'standard' fashion; basically funnels
-     * the reduce result back into the reduce init and relies on
-     * the user-supplied reduce to handle the accumulation.
-     */
-    private static class StandardAccumulator extends Accumulator {
-        private Object accumulator = null;
-        private boolean initialized = false;
-
-        protected StandardAccumulator( Walker walker ) {
-            super(walker);
-        }
-
-        /**
-         * Standard accumulator returns reduceInit first, then the
-         * results of the previous accumulation. 
-         */
-        public Object getReduceInit() {
-            if( !initialized ) {
-                initialized = true;
-                return walker.reduceInit();
-            }
-            else
-                return accumulator;
-        }
-
-        /**
-         * The result of the accumulator in a non-intervals walker
-         * already takes the accumulation into account.  return the result. 
-         */
-        public void accumulate( ShardDataProvider provider, Object result ) { this.accumulator = result; }
-
-        /**
-         * The result of the traversal is the list of accumulated intervals.
-         */
-        public Object finishTraversal() {
-            walker.onTraversalDone(getReduceInit());  // must call getReduceInit to ensure that we get the accumulator value or the reduceInit value
-            return this.accumulator;
-        }
-    }
-
-    /**
-     * An interval-based accumulator.  Treats each reduce result independently,
-     * and aggregates those results into a single list.
-     */
-    private static class IntervalAccumulator extends Accumulator {
-        /**
-         * True if a new interval is being started.  This flag is used to
-         * ensure that reduceInit() is not called unnecessarily.
-         */
-        private boolean startingNewInterval = true;
-
-        /**
-         * An iterator through all intervals in the series.
-         */
-        private final Iterator<GenomeLoc> intervalIterator;
-
-        /**
-         * For which interval is the accumulator currently accumulating?
-         */
-        private GenomeLoc currentInterval = null;
-
-        /**
-         * The actual mapping of interval to accumulator.
-         */
-        private final List<Pair<GenomeLoc,Object>> intervalAccumulator = new ArrayList<Pair<GenomeLoc,Object>>();
-
-        /**
-         * Holds the next value to be passed in as the reduce result.
-         */
-        private Object nextReduceInit = null;
-
-        protected IntervalAccumulator(Walker walker, GenomeLocSortedSet intervals) {
-            super(walker);
-            this.intervalIterator = intervals.iterator();
-            if(intervalIterator.hasNext()) currentInterval = intervalIterator.next();
-        }
-
-        /**
-         * Interval accumulator always feeds reduceInit into every new traversal.
-         */
-        public Object getReduceInit() {
-            if(startingNewInterval) {
-                startingNewInterval = false;
-                nextReduceInit = walker.reduceInit();
-            }
-            return nextReduceInit;
-        }
-
-        /**
-         * Create a holder for interval results if none exists.  Add the result to the holder.
-         */
-        public void accumulate( ShardDataProvider provider, Object result ) {
-            if(!(provider instanceof LocusShardDataProvider))
-                throw new ReviewedGATKException("Unable to reduce by interval on reads traversals at this time.");
-
-            GenomeLoc location = ((LocusShardDataProvider)provider).getLocus();
-
-            // Pull the interval iterator ahead to the interval overlapping this shard fragment.
-            while((currentInterval == null || currentInterval.isBefore(location)) && intervalIterator.hasNext())
-                currentInterval = intervalIterator.next();
-
-            if(currentInterval != null && currentInterval.getContig().equals(location.getContig()) && currentInterval.getStop() == location.getStop()) {
-                intervalAccumulator.add(new Pair<GenomeLoc,Object>(currentInterval,result));
-                startingNewInterval = true;
-            }
-            else
-                nextReduceInit = result;
-        }
-
-        /**
-         * The result of the traversal is the list of accumulated intervals.
-         */
-        public Object finishTraversal() {
-            walker.onTraversalDone(intervalAccumulator);
-            return intervalAccumulator;
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/HierarchicalMicroScheduler.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/HierarchicalMicroScheduler.java
deleted file mode 100644
index f7e3dbc..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/HierarchicalMicroScheduler.java
+++ /dev/null
@@ -1,495 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.executive;
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import htsjdk.tribble.TribbleException;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.datasources.reads.SAMDataSource;
-import org.broadinstitute.gatk.engine.datasources.reads.Shard;
-import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
-import org.broadinstitute.gatk.engine.io.OutputTracker;
-import org.broadinstitute.gatk.engine.io.ThreadGroupOutputTracker;
-import org.broadinstitute.gatk.engine.resourcemanagement.ThreadAllocation;
-import org.broadinstitute.gatk.engine.walkers.TreeReducible;
-import org.broadinstitute.gatk.engine.walkers.Walker;
-import org.broadinstitute.gatk.utils.MultiThreadedErrorTracker;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.threading.ThreadPoolMonitor;
-
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.Queue;
-import java.util.concurrent.*;
-
-/**
- * A microscheduler that schedules shards according to a tree-like structure.
- * Requires a special walker tagged with a 'TreeReducible' interface.
- */
-public class HierarchicalMicroScheduler extends MicroScheduler implements HierarchicalMicroSchedulerMBean, ReduceTree.TreeReduceNotifier {
-    /**
-     * How many outstanding output merges are allowed before the scheduler stops
-     * allowing new processes and starts merging flat-out.
-     */
-    private static final int MAX_OUTSTANDING_OUTPUT_MERGES = 50;
-
-    /** Manage currently running threads. */
-    private ExecutorService threadPool;
-
-    /**
-     * A thread local output tracker for managing output per-thread.
-     */
-    private ThreadGroupOutputTracker outputTracker = new ThreadGroupOutputTracker();
-
-    private final Queue<TreeReduceTask> reduceTasks = new LinkedList<TreeReduceTask>();
-
-    /**
-     * An exception that's occurred in this traversal.  If null, no exception has occurred.
-     */
-    final MultiThreadedErrorTracker errorTracker = new MultiThreadedErrorTracker();
-
-    /**
-     * Queue of incoming shards.
-     */
-    private Iterator<Shard> traversalTasks;
-
-    /**
-     * Keep a queue of shard traversals, and constantly monitor it to see what output
-     * merge tasks remain.
-     * TODO: Integrate this into the reduce tree.
-     */
-    private final Queue<ShardTraverser> outputMergeTasks = new LinkedList<ShardTraverser>();
-
-    /** How many shard traversals have run to date? */
-    private int totalCompletedTraversals = 0;
-
-    /** What is the total time spent traversing shards? */
-    private long totalShardTraverseTime = 0;
-
-    /** What is the total time spent tree reducing shard output? */
-    private long totalTreeReduceTime = 0;
-
-    /** How many tree reduces have been completed? */
-    private long totalCompletedTreeReduces = 0;
-
-    /** What is the total time spent merging output? */
-    private long totalOutputMergeTime = 0;
-
-    /**
-     * Create a new hierarchical microscheduler to process the given reads and reference.
-     *
-     * @param walker           the walker used to process the dataset.
-     * @param reads            Reads file(s) to process.
-     * @param reference        Reference for driving the traversal.
-     * @param threadAllocation How should we apply multi-threaded execution?
-     */
-    protected HierarchicalMicroScheduler(final GenomeAnalysisEngine engine,
-                                         final Walker walker,
-                                         final SAMDataSource reads,
-                                         final IndexedFastaSequenceFile reference,
-                                         final Collection<ReferenceOrderedDataSource> rods,
-                                         final ThreadAllocation threadAllocation) {
-        super(engine, walker, reads, reference, rods, threadAllocation);
-
-        final int nThreadsToUse = threadAllocation.getNumDataThreads();
-        if ( threadAllocation.monitorThreadEfficiency() ) {
-            throw new UserException.BadArgumentValue("nt", "Cannot monitor thread efficiency with -nt, sorry");
-        }
-
-        this.threadPool = Executors.newFixedThreadPool(nThreadsToUse, new UniqueThreadGroupThreadFactory());
-    }
-
-    /**
-     * Creates threads for HMS each with a unique thread group.  Critical to
-     * track outputs via the ThreadGroupOutputTracker.
-     */
-    private static class UniqueThreadGroupThreadFactory implements ThreadFactory {
-        int counter = 0;
-
-        @Override
-        public Thread newThread(Runnable r) {
-            final ThreadGroup group = new ThreadGroup("HMS-group-" + counter++);
-            return new Thread(group, r);
-        }
-    }
-
-    public Object execute( Walker walker, Iterable<Shard> shardStrategy ) {
-        super.startingExecution();
-
-        // Fast fail for walkers not supporting TreeReducible interface.
-        if (!( walker instanceof TreeReducible ))
-            throw new IllegalArgumentException("The GATK can currently run in parallel only with TreeReducible walkers");
-
-        this.traversalTasks = shardStrategy.iterator();
-
-        final ReduceTree reduceTree = new ReduceTree(this);
-        initializeWalker(walker);
-
-        while (! abortExecution() && (isShardTraversePending() || isTreeReducePending())) {
-            // Check for errors during execution.
-            errorTracker.throwErrorIfPending();
-
-            // Too many files sitting around taking up space?  Merge them.
-            if (isMergeLimitExceeded())
-                mergeExistingOutput(false);
-
-            // Wait for the next slot in the queue to become free.
-            waitForFreeQueueSlot();
-
-            // Pick the next most appropriate task and run it.  In the interest of
-            // memory conservation, hierarchical reduces always run before traversals.
-            if (isTreeReduceReady())
-                queueNextTreeReduce(walker);
-            else if (isShardTraversePending())
-                queueNextShardTraverse(walker, reduceTree);
-        }
-
-        errorTracker.throwErrorIfPending();
-
-        threadPool.shutdown();
-
-        // Merge any lingering output files.  If these files aren't ready,
-        // sit around and wait for them, then merge them.
-        mergeExistingOutput(true);
-
-        Object result = null;
-        try {
-            result = reduceTree.getResult().get();
-            notifyTraversalDone(walker,result);
-        } catch (ReviewedGATKException ex) {
-            throw ex;
-        } catch ( ExecutionException ex ) {
-            // the thread died and we are failing to get the result, rethrow it as a runtime exception
-            throw notifyOfTraversalError(ex.getCause());
-        } catch (Exception ex) {
-            throw new ReviewedGATKException("Unable to retrieve result", ex);
-        }
-
-        // do final cleanup operations
-        outputTracker.close();
-        cleanup();
-        executionIsDone();
-
-        return result;
-    }
-
-    /**
-     * Run the initialize method of the walker.  Ensure that any calls
-     * to the output stream will bypass thread local storage and write
-     * directly to the output file.
-     * @param walker Walker to initialize.
-     */
-    protected void initializeWalker(Walker walker) {
-        outputTracker.bypassThreadLocalStorage(true);
-        try {
-            walker.initialize();
-        }
-        finally {
-            outputTracker.bypassThreadLocalStorage(false);
-        }
-    }
-
-    /**
-     * Run the initialize method of the walker.  Ensure that any calls
-     * to the output stream will bypass thread local storage and write
-     * directly to the output file.
-     * @param walker Walker to initialize.
-     */
-    protected void notifyTraversalDone(Walker walker, Object result) {
-        outputTracker.bypassThreadLocalStorage(true);
-        try {
-            walker.onTraversalDone(result);
-        }
-        finally {
-            outputTracker.bypassThreadLocalStorage(false);
-        }
-    }
-
-    /**
-     * @{inheritDoc}
-     */
-    public OutputTracker getOutputTracker() {
-        return outputTracker;
-    }
-
-    /**
-     * Returns true if there are unscheduled shard traversal waiting to run.
-     *
-     * @return true if a shard traversal is waiting; false otherwise.
-     */
-    protected boolean isShardTraversePending() {
-        return traversalTasks.hasNext();
-    }
-
-    /**
-     * Returns true if there are tree reduces that can be run without
-     * blocking.
-     *
-     * @return true if a tree reduce is ready; false otherwise.
-     */
-    protected boolean isTreeReduceReady() {
-        if (reduceTasks.size() == 0)
-            return false;
-        return reduceTasks.peek().isReadyForReduce();
-    }
-
-    /**
-     * Returns true if there are tree reduces that need to be run before
-     * the computation is complete.  Returns true if any entries are in the queue,
-     * blocked or otherwise.
-     *
-     * @return true if a tree reduce is pending; false otherwise.
-     */
-    protected boolean isTreeReducePending() {
-        return reduceTasks.size() > 0;
-    }
-
-    /**
-     * Returns whether the maximum number of files is sitting in the temp directory
-     * waiting to be merged back in.
-     *
-     * @return True if the merging needs to take priority.  False otherwise.
-     */
-    protected boolean isMergeLimitExceeded() {
-        int pendingTasks = 0;
-        for( ShardTraverser shardTraverse: outputMergeTasks ) {
-            if( !shardTraverse.isComplete() )
-                break;
-            pendingTasks++;
-        }
-        return (outputMergeTasks.size() >= MAX_OUTSTANDING_OUTPUT_MERGES);
-    }
-
-    /**
-     * Merging all output that's sitting ready in the OutputMerger queue into
-     * the final data streams.
-     */
-    protected void mergeExistingOutput( boolean wait ) {
-        long startTime = System.currentTimeMillis();
-
-//        logger.warn("MergingExistingOutput");
-//        printOutputMergeTasks();
-
-        // Create a list of the merge tasks that will be performed in this run of the mergeExistingOutput().
-        Queue<ShardTraverser> mergeTasksInSession = new LinkedList<ShardTraverser>();
-        while( !outputMergeTasks.isEmpty() ) {
-            ShardTraverser traverser = outputMergeTasks.peek();
-
-            // If the next traversal isn't done and we're not supposed to wait, we've found our working set.  Continue.
-            if( !traverser.isComplete() && !wait )
-                break;
-
-            outputMergeTasks.remove();
-            mergeTasksInSession.add(traverser);
-        }
-
-//        logger.warn("Selected things to merge:");
-//        printOutputMergeTasks(mergeTasksInSession);
-
-        // Actually run through, merging the tasks in the working queue.
-        for( ShardTraverser traverser: mergeTasksInSession ) {
-            //logger.warn("*** Merging " + traverser.getIntervalsString());
-            if( !traverser.isComplete() )
-                traverser.waitForComplete();
-
-            OutputMergeTask mergeTask = traverser.getOutputMergeTask();
-            if( mergeTask != null ) {
-                try {
-                    mergeTask.merge();
-                }
-                catch(TribbleException ex) {
-                    // Specifically catch Tribble I/O exceptions and rethrow them as Reviewed.  We don't expect
-                    // any issues here because we created the Tribble output file mere moments ago and expect it to
-                    // be completely valid.
-                    throw new ReviewedGATKException("Unable to merge temporary Tribble output file.",ex);
-                }
-            }
-        }
-
-        long endTime = System.currentTimeMillis();
-
-        totalOutputMergeTime += ( endTime - startTime );
-    }
-
-    /**
-     * Queues the next traversal of a walker from the traversal tasks queue.
-     *
-     * @param walker     Walker to apply to the dataset.
-     * @param reduceTree Tree of reduces to which to add this shard traverse.
-     */
-    protected void queueNextShardTraverse( Walker walker, ReduceTree reduceTree ) {
-        if (!traversalTasks.hasNext())
-            throw new IllegalStateException("Cannot traverse; no pending traversals exist.");
-
-        final Shard shard = traversalTasks.next();
-
-        // todo -- add ownership claim here
-
-        final ShardTraverser traverser = new ShardTraverser(this, walker, shard, outputTracker);
-
-        final Future traverseResult = threadPool.submit(traverser);
-
-        // Add this traverse result to the reduce tree.  The reduce tree will call a callback to throw its entries on the queue.
-        reduceTree.addEntry(traverseResult);
-        outputMergeTasks.add(traverser);
-
-//        logger.warn("adding merge task");
-//        printOutputMergeTasks();
-
-        // No more data?  Let the reduce tree know so it can finish processing what it's got.
-        if (!isShardTraversePending())
-            reduceTree.complete();
-    }
-
-    private synchronized void printOutputMergeTasks() {
-        printOutputMergeTasks(outputMergeTasks);
-    }
-
-    private synchronized void printOutputMergeTasks(final Queue<ShardTraverser> tasks) {
-        logger.info("Output merge tasks " + tasks.size());
-        for ( final ShardTraverser traverser : tasks )
-            logger.info(String.format("\t%s: complete? %b", traverser.getIntervalsString(), traverser.isComplete()));
-    }
-
-    /** Pulls the next reduce from the queue and runs it. */
-    protected void queueNextTreeReduce( Walker walker ) {
-        if (reduceTasks.size() == 0)
-            throw new IllegalStateException("Cannot reduce; no pending reduces exist.");
-        final TreeReduceTask reducer = reduceTasks.remove();
-        reducer.setWalker((TreeReducible) walker);
-
-        threadPool.submit(reducer);
-    }
-
-    /** Blocks until a free slot appears in the thread queue. */
-    protected void waitForFreeQueueSlot() {
-        final ThreadPoolMonitor monitor = new ThreadPoolMonitor();
-        synchronized (monitor) {
-            threadPool.submit(monitor);
-            monitor.watch();
-        }
-    }
-
-    /**
-     * Callback for adding reduce tasks to the run queue.
-     *
-     * @return A new, composite future of the result of this reduce.
-     */
-    public Future notifyReduce( final Future lhs, final Future rhs ) {
-        final TreeReduceTask reducer = new TreeReduceTask(new TreeReducer(this, lhs, rhs));
-        reduceTasks.add(reducer);
-        return reducer;
-    }
-
-    /**
-     * Allows other threads to notify of an error during traversal.
-     */
-    protected synchronized RuntimeException notifyOfTraversalError(Throwable error) {
-        return errorTracker.notifyOfError(error);
-    }
-
-    /** A small wrapper class that provides the TreeReducer interface along with the FutureTask semantics. */
-    private class TreeReduceTask extends FutureTask {
-        final private TreeReducer treeReducer;
-
-        public TreeReduceTask( TreeReducer treeReducer ) {
-            super(treeReducer);
-            this.treeReducer = treeReducer;
-        }
-
-        public void setWalker( TreeReducible walker ) {
-            treeReducer.setWalker(walker);
-        }
-
-        public boolean isReadyForReduce() {
-            return treeReducer.isReadyForReduce();
-        }
-    }
-
-    /**
-     * Used by the ShardTraverser to report time consumed traversing a given shard.
-     *
-     * @param shardTraversalTime Elapsed time traversing a given shard.
-     */
-    synchronized void reportShardTraverseTime( long shardTraversalTime ) {
-        totalShardTraverseTime += shardTraversalTime;
-        totalCompletedTraversals++;
-    }
-
-    /**
-     * Used by the TreeReducer to report time consumed reducing two shards.
-     *
-     * @param treeReduceTime Elapsed time reducing two shards.
-     */
-    synchronized void reportTreeReduceTime( long treeReduceTime ) {
-        totalTreeReduceTime += treeReduceTime;
-        totalCompletedTreeReduces++;
-
-    }
-
-    /** {@inheritDoc} */
-    public int getNumberOfTasksInReduceQueue() {
-        return reduceTasks.size();
-    }
-
-    /** {@inheritDoc} */
-    public int getNumberOfTasksInIOQueue() {
-        synchronized( outputMergeTasks ) {
-            return outputMergeTasks.size();
-        }
-    }
-
-    /** {@inheritDoc} */
-    public long getTotalShardTraverseTimeMillis() {
-        return totalShardTraverseTime;
-    }
-
-    /** {@inheritDoc} */
-    public long getAvgShardTraverseTimeMillis() {
-        if (totalCompletedTraversals == 0)
-            return 0;
-        return totalShardTraverseTime / totalCompletedTraversals;
-    }
-
-    /** {@inheritDoc} */
-    public long getTotalTreeReduceTimeMillis() {
-        return totalTreeReduceTime;
-    }
-
-    /** {@inheritDoc} */
-    public long getAvgTreeReduceTimeMillis() {
-        if (totalCompletedTreeReduces == 0)
-            return 0;
-        return totalTreeReduceTime / totalCompletedTreeReduces;
-    }
-
-    /** {@inheritDoc} */
-    public long getTotalOutputMergeTimeMillis() {
-        return totalOutputMergeTime;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/HierarchicalMicroSchedulerMBean.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/HierarchicalMicroSchedulerMBean.java
deleted file mode 100644
index 30e03c6..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/HierarchicalMicroSchedulerMBean.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.executive;
-/**
- * User: hanna
- * Date: May 29, 2009
- * Time: 4:05:27 PM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * An interface for retrieving runtime statistics about how the hierarchical
- * microscheduler is behaving. 
- */
-public interface HierarchicalMicroSchedulerMBean {
-    /**
-     * How many tree reduces are waiting in the tree reduce queue?
-     * @return Total number of reduces waiting in the tree reduce queue?
-     */
-    public int getNumberOfTasksInReduceQueue();
-
-    /**
-     * How many pending I/O combining tasks are waiting in the queue?
-     * @return Total number of I/O tasks waiting in the I/O queue.
-     */
-    public int getNumberOfTasksInIOQueue();
-
-    /**
-     * What is the total time spent running traversals?
-     * @return Total time spent traversing shards; 0 if none have been traversed.
-     */
-    public long getTotalShardTraverseTimeMillis();
-
-    /**
-     * What is the average time spent running traversals?
-     * @return Average time spent traversing shards; 0 if none have been traversed.
-     */
-    public long getAvgShardTraverseTimeMillis();
-
-    /**
-     * What is the total time spent merging output?
-     */
-    public long getTotalOutputMergeTimeMillis();
-
-    /**
-     * What is the total time spent running tree reduces?
-     * @return Total time spent running tree reduces; 0 if none have been run.
-     */
-    public long getTotalTreeReduceTimeMillis();
-
-    /**
-     * What is the average time spent running tree reduces?
-     * @return Average time spent running tree reduces; 0 if none have been run.
-     */
-    public long getAvgTreeReduceTimeMillis();
-}
-
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/LinearMicroScheduler.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/LinearMicroScheduler.java
deleted file mode 100644
index 293bb1c..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/LinearMicroScheduler.java
+++ /dev/null
@@ -1,130 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.executive;
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.datasources.providers.LocusShardDataProvider;
-import org.broadinstitute.gatk.engine.datasources.providers.ReadShardDataProvider;
-import org.broadinstitute.gatk.engine.datasources.providers.ShardDataProvider;
-import org.broadinstitute.gatk.engine.datasources.reads.SAMDataSource;
-import org.broadinstitute.gatk.engine.datasources.reads.Shard;
-import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
-import org.broadinstitute.gatk.engine.io.DirectOutputTracker;
-import org.broadinstitute.gatk.engine.io.OutputTracker;
-import org.broadinstitute.gatk.engine.resourcemanagement.ThreadAllocation;
-import org.broadinstitute.gatk.engine.traversals.TraversalEngine;
-import org.broadinstitute.gatk.engine.walkers.Walker;
-import org.broadinstitute.gatk.utils.SampleUtils;
-import org.broadinstitute.gatk.utils.threading.ThreadEfficiencyMonitor;
-
-import java.util.Collection;
-
-
-/** A micro-scheduling manager for single-threaded execution of a traversal. */
-public class LinearMicroScheduler extends MicroScheduler {
-
-    /**
-     * A direct output tracker for directly managing output.
-     */
-    private DirectOutputTracker outputTracker = new DirectOutputTracker();
-
-    /**
-     * Create a new linear microscheduler to process the given reads and reference.
-     *
-     * @param walker    Walker for the traversal.
-     * @param reads     Reads file(s) to process.
-     * @param reference Reference for driving the traversal.
-     * @param rods      Reference-ordered data.
-     */
-    protected LinearMicroScheduler(final GenomeAnalysisEngine engine,
-                                   final Walker walker,
-                                   final SAMDataSource reads,
-                                   final IndexedFastaSequenceFile reference,
-                                   final Collection<ReferenceOrderedDataSource> rods,
-                                   final ThreadAllocation threadAllocation) {
-        super(engine, walker, reads, reference, rods, threadAllocation);
-
-        if ( threadAllocation.monitorThreadEfficiency() )
-            setThreadEfficiencyMonitor(new ThreadEfficiencyMonitor());
-    }
-
-    /**
-     * Run this traversal over the specified subsection of the dataset.
-     *
-     * @param walker    Computation to perform over dataset.
-     * @param shardStrategy A strategy for sharding the data.
-     */
-    public Object execute(Walker walker, Iterable<Shard> shardStrategy) {
-        super.startingExecution();
-        walker.initialize();
-        Accumulator accumulator = Accumulator.create(engine,walker);
-
-        boolean done = walker.isDone();
-        int counter = 0;
-
-        final TraversalEngine traversalEngine = borrowTraversalEngine(this);
-        for (Shard shard : shardStrategy ) {
-            if ( abortExecution() || done || shard == null ) // we ran out of shards that aren't owned
-                break;
-
-            if(shard.getShardType() == Shard.ShardType.LOCUS) {
-                WindowMaker windowMaker = new WindowMaker(shard, engine.getGenomeLocParser(),
-                        getReadIterator(shard), shard.getGenomeLocs(), SampleUtils.getSAMFileSamples(engine));
-                for(WindowMaker.WindowMakerIterator iterator: windowMaker) {
-                    ShardDataProvider dataProvider = new LocusShardDataProvider(shard,iterator.getSourceInfo(),engine.getGenomeLocParser(),iterator.getLocus(),iterator,reference,rods);
-                    Object result = traversalEngine.traverse(walker, dataProvider, accumulator.getReduceInit());
-                    accumulator.accumulate(dataProvider,result);
-                    dataProvider.close();
-                    if ( walker.isDone() ) break;
-                }
-                windowMaker.close();
-            }
-            else {
-                ShardDataProvider dataProvider = new ReadShardDataProvider(shard,engine.getGenomeLocParser(),getReadIterator(shard),reference,rods);
-                Object result = traversalEngine.traverse(walker, dataProvider, accumulator.getReduceInit());
-                accumulator.accumulate(dataProvider,result);
-                dataProvider.close();
-            }
-
-            done = walker.isDone();
-        }
-
-        Object result = accumulator.finishTraversal();
-
-        outputTracker.close();
-        returnTraversalEngine(this, traversalEngine);
-        cleanup();
-        executionIsDone();
-
-        return accumulator;
-    }
-
-    /**
-     * @{inheritDoc}
-     */
-    public OutputTracker getOutputTracker() { return outputTracker; }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/MicroScheduler.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/MicroScheduler.java
deleted file mode 100644
index e192b9a..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/MicroScheduler.java
+++ /dev/null
@@ -1,463 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.executive;
-
-import com.google.java.contract.Ensures;
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.ReadMetrics;
-import org.broadinstitute.gatk.engine.datasources.reads.SAMDataSource;
-import org.broadinstitute.gatk.engine.datasources.reads.Shard;
-import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
-import org.broadinstitute.gatk.engine.io.OutputTracker;
-import org.broadinstitute.gatk.engine.iterators.NullSAMIterator;
-import org.broadinstitute.gatk.engine.iterators.GATKSAMIterator;
-import org.broadinstitute.gatk.engine.resourcemanagement.ThreadAllocation;
-import org.broadinstitute.gatk.engine.traversals.*;
-import org.broadinstitute.gatk.engine.walkers.*;
-import org.broadinstitute.gatk.utils.AutoFormattingTime;
-import org.broadinstitute.gatk.utils.MathUtils;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.progressmeter.ProgressMeter;
-import org.broadinstitute.gatk.utils.threading.ThreadEfficiencyMonitor;
-
-import javax.management.JMException;
-import javax.management.MBeanServer;
-import javax.management.ObjectName;
-import java.io.File;
-import java.lang.management.ManagementFactory;
-import java.util.*;
-
-
-/**
- * Created by IntelliJ IDEA.
- * User: mhanna
- * Date: Apr 26, 2009
- * Time: 12:37:23 PM
- *
- * General base class for all scheduling algorithms
- * Shards and schedules data in manageable chunks.
- *
- * Creates N TraversalEngines for each data thread for the MicroScheduler.  This is necessary
- * because in the HMS case you have multiple threads executing a traversal engine independently, and
- * these engines may need to create separate resources for efficiency or implementation reasons.  For example,
- * the nanoScheduler creates threads to implement the traversal, and this creation is instance specific.
- * So each HMS thread needs to have it's own distinct copy of the traversal engine if it wants to have
- * N data threads x M nano threads => N * M threads total.  These are borrowed from this microscheduler
- * and returned when done.  Also allows us to tracks all created traversal engines so this microscheduler
- * can properly shut them all down when the scheduling is done.
- *
- */
-public abstract class MicroScheduler implements MicroSchedulerMBean {
-    protected static final Logger logger = Logger.getLogger(MicroScheduler.class);
-
-    /**
-     * The list of all Traversal engines we've created in this micro scheduler
-     */
-    final List<TraversalEngine> allCreatedTraversalEngines = new LinkedList<TraversalEngine>();
-
-    /**
-     * All available engines.  Engines are borrowed and returned when a subclass is actually
-     * going to execute the engine on some data.  This allows us to have N copies for
-     * N data parallel executions, but without the dangerous code of having local
-     * ThreadLocal variables.
-     */
-    final LinkedList<TraversalEngine> availableTraversalEngines = new LinkedList<TraversalEngine>();
-
-    /**
-     * Engines that have been allocated to a key already.
-     */
-    final HashMap<Object, TraversalEngine> allocatedTraversalEngines = new HashMap<Object, TraversalEngine>();
-
-    /**
-     * Counts the number of instances of the class that are currently alive.
-     */
-    private static int instanceNumber = 0;
-
-    /**
-     * The engine invoking this scheduler.
-     */
-    protected final GenomeAnalysisEngine engine;
-
-    protected final IndexedFastaSequenceFile reference;
-
-    private final SAMDataSource reads;
-    protected final Collection<ReferenceOrderedDataSource> rods;
-
-    private final MBeanServer mBeanServer;
-    private final ObjectName mBeanName;
-
-    /**
-     * Threading efficiency monitor for tracking the resource utilization of the GATK
-     *
-     * may be null
-     */
-    ThreadEfficiencyMonitor threadEfficiencyMonitor = null;
-
-    /**
-     * MicroScheduler factory function.  Create a microscheduler appropriate for reducing the
-     * selected walker.
-     *
-     * @param walker        Which walker to use.
-     * @param reads         the informations associated with the reads
-     * @param reference     the reference file
-     * @param rods          the rods to include in the traversal
-     * @param threadAllocation Number of threads to utilize.
-     *
-     * @return The best-fit microscheduler.
-     */
-    public static MicroScheduler create(GenomeAnalysisEngine engine, Walker walker, SAMDataSource reads, IndexedFastaSequenceFile reference, Collection<ReferenceOrderedDataSource> rods, ThreadAllocation threadAllocation) {
-        if ( threadAllocation.isRunningInParallelMode() ) {
-            logger.info(String.format("Running the GATK in parallel mode with %d total threads, " +
-                    "%d CPU thread(s) for each of %d data thread(s), of %d processors available on this machine",
-                    threadAllocation.getTotalNumThreads(),
-                    threadAllocation.getNumCPUThreadsPerDataThread(),
-                    threadAllocation.getNumDataThreads(),
-                    Runtime.getRuntime().availableProcessors()));
-            if ( threadAllocation.getTotalNumThreads() > Runtime.getRuntime().availableProcessors() )
-                logger.warn(String.format("Number of requested GATK threads %d is more than the number of " +
-                        "available processors on this machine %d", threadAllocation.getTotalNumThreads(),
-                        Runtime.getRuntime().availableProcessors()));
-        }
-
-        if ( threadAllocation.getNumDataThreads() > 1 ) {
-            if (walker.isReduceByInterval())
-                throw new UserException.BadArgumentValue("nt", String.format("This run of %s is set up to aggregate results by interval.  Due to a current limitation of the GATK, analyses of this type do not currently support parallel execution.  Please run your analysis without the -nt option or check if this tool has an option to disable per-interval calculations.", engine.getWalkerName(walker.getClass())));
-
-            if ( ! (walker instanceof TreeReducible) ) {
-                throw badNT("nt", engine, walker);
-            }
-        }
-
-        if ( threadAllocation.getNumCPUThreadsPerDataThread() > 1 && ! (walker instanceof NanoSchedulable) ) {
-            throw badNT("nct", engine, walker);
-        }
-
-        if ( threadAllocation.getNumDataThreads() > 1 ) {
-            return new HierarchicalMicroScheduler(engine, walker, reads, reference, rods, threadAllocation);
-        } else {
-            return new LinearMicroScheduler(engine, walker, reads, reference, rods, threadAllocation);
-        }
-    }
-
-    private static UserException badNT(final String parallelArg, final GenomeAnalysisEngine engine, final Walker walker) {
-        throw new UserException.BadArgumentValue(parallelArg,
-                String.format("The analysis %s currently does not support parallel execution with %s.  " +
-                        "Please run your analysis without the %s option.", engine.getWalkerName(walker.getClass()), parallelArg, parallelArg));
-    }
-
-    /**
-     * Create a microscheduler given the reads and reference.
-     *
-     * @param walker  the walker to execute with
-     * @param reads   The reads.
-     * @param reference The reference.
-     * @param rods    the rods to include in the traversal
-     * @param threadAllocation the allocation of threads to use in the underlying traversal
-     */
-    protected MicroScheduler(final GenomeAnalysisEngine engine,
-                             final Walker walker,
-                             final SAMDataSource reads,
-                             final IndexedFastaSequenceFile reference,
-                             final Collection<ReferenceOrderedDataSource> rods,
-                             final ThreadAllocation threadAllocation) {
-        this.engine = engine;
-        this.reads = reads;
-        this.reference = reference;
-        this.rods = rods;
-
-        final File progressLogFile = engine.getArguments() == null ? null : engine.getArguments().performanceLog;
-
-        // Creates uninitialized TraversalEngines appropriate for walker and threadAllocation,
-        // and adds it to the list of created engines for later shutdown.
-        for ( int i = 0; i < threadAllocation.getNumDataThreads(); i++ ) {
-            final TraversalEngine traversalEngine = createTraversalEngine(walker, threadAllocation);
-            allCreatedTraversalEngines.add(traversalEngine);
-            availableTraversalEngines.add(traversalEngine);
-        }
-
-        // Create the progress meter, and register it with the analysis engine
-        engine.registerProgressMeter(new ProgressMeter(progressLogFile,
-                availableTraversalEngines.peek().getTraversalUnits(),
-                engine.getRegionsOfGenomeBeingProcessed()));
-
-        // Now that we have a progress meter, go through and initialize the traversal engines
-        for ( final TraversalEngine traversalEngine : allCreatedTraversalEngines )
-            traversalEngine.initialize(engine, walker, engine.getProgressMeter());
-
-        // JMX does not allow multiple instances with the same ObjectName to be registered with the same platform MXBean.
-        // To get around this limitation and since we have no job identifier at this point, register a simple counter that
-        // will count the number of instances of this object that have been created in this JVM.
-        int thisInstance = instanceNumber++;
-        mBeanServer = ManagementFactory.getPlatformMBeanServer();
-        try {
-            mBeanName = new ObjectName("org.broadinstitute.gatk.engine.executive:type=MicroScheduler,instanceNumber="+thisInstance);
-            mBeanServer.registerMBean(this, mBeanName);
-        }
-        catch (JMException ex) {
-            throw new ReviewedGATKException("Unable to register microscheduler with JMX", ex);
-        }
-    }
-
-    /**
-     * Really make us a traversal engine of the appropriate type for walker and thread allocation
-     *
-     * @return a non-null uninitialized traversal engine
-     */
-    @Ensures("result != null")
-    private TraversalEngine createTraversalEngine(final Walker walker, final ThreadAllocation threadAllocation) {
-        if (walker instanceof ReadWalker) {
-            return new TraverseReadsNano(threadAllocation.getNumCPUThreadsPerDataThread());
-        } else if (walker instanceof LocusWalker) {
-            return new TraverseLociNano(threadAllocation.getNumCPUThreadsPerDataThread());
-        } else if (walker instanceof DuplicateWalker) {
-            return new TraverseDuplicates();
-        } else if (walker instanceof ReadPairWalker) {
-            return new TraverseReadPairs();
-        } else if (walker instanceof ActiveRegionWalker) {
-            return new TraverseActiveRegions(threadAllocation.getNumCPUThreadsPerDataThread());
-        } else {
-            throw new UnsupportedOperationException("Unable to determine traversal type, the walker is an unknown type.");
-        }
-    }
-
-
-    /**
-     * Return the ThreadEfficiencyMonitor we are using to track our resource utilization, if there is one
-     *
-     * @return the monitor, or null if none is active
-     */
-    public ThreadEfficiencyMonitor getThreadEfficiencyMonitor() {
-        return threadEfficiencyMonitor;
-    }
-
-    /**
-     * Inform this Microscheduler to use the efficiency monitor used to create threads in subclasses
-     *
-     * @param threadEfficiencyMonitor
-     */
-    public void setThreadEfficiencyMonitor(final ThreadEfficiencyMonitor threadEfficiencyMonitor) {
-        this.threadEfficiencyMonitor = threadEfficiencyMonitor;
-    }
-
-    /**
-     * Should we stop all execution work and exit gracefully?
-     *
-     * Returns true in the case where some external signal or time limit has been received, indicating
-     * that this GATK shouldn't continue executing.  This isn't a kill signal, it is really a "shutdown
-     * gracefully at the next opportunity" signal.  Concrete implementations of the MicroScheduler
-     * examine this value as often as reasonable and, if it returns true, stop what they are doing
-     * at the next available opportunity, shutdown their resources, call notify done, and return.
-     *
-     * @return true if we should abort execution, or false otherwise
-     */
-    protected boolean abortExecution() {
-        final boolean abort = engine.exceedsRuntimeLimit();
-        if ( abort ) {
-            final AutoFormattingTime aft = new AutoFormattingTime(engine.getRuntimeLimitInNanoseconds(), -1, 4);
-            logger.info("Aborting execution (cleanly) because the runtime has exceeded the requested maximum " + aft);
-        }
-        return abort;
-    }
-
-    /**
-     * Walks a walker over the given list of intervals.
-     *
-     * @param walker        Computation to perform over dataset.
-     * @param shardStrategy A strategy for sharding the data.
-     *
-     * @return the return type of the walker
-     */
-    public abstract Object execute(Walker walker, Iterable<Shard> shardStrategy);
-
-    /**
-     * Tells this MicroScheduler that the execution of one of the subclass of this object as started
-     *
-     * Must be called when the implementation of execute actually starts up
-     *
-     * Currently only starts the progress meter timer running, but other start up activities could be incorporated
-     */
-    protected void startingExecution() {
-        engine.getProgressMeter().start();
-    }
-
-    /**
-     * Retrieves the object responsible for tracking and managing output.
-     * @return An output tracker, for loading data in and extracting results.  Will not be null.
-     */
-    public abstract OutputTracker getOutputTracker();
-
-    /**
-     * Gets the an iterator over the given reads, which will iterate over the reads in the given shard.
-     * @param shard the shard to use when querying reads.
-     * @return an iterator over the reads specified in the shard.
-     */
-    protected GATKSAMIterator getReadIterator(Shard shard) {
-        return (!reads.isEmpty()) ? reads.seek(shard) : new NullSAMIterator();
-    }
-
-    /**
-     * Must be called by subclasses when execute is done
-     */
-    protected void executionIsDone() {
-        engine.getProgressMeter().notifyDone(engine.getCumulativeMetrics().getNumIterations());
-        printReadFilteringStats();
-        shutdownTraversalEngines();
-
-        // Print out the threading efficiency of this HMS, if state monitoring is enabled
-        if ( threadEfficiencyMonitor != null ) {
-            // include the master thread information
-            threadEfficiencyMonitor.threadIsDone(Thread.currentThread());
-            threadEfficiencyMonitor.printUsageInformation(logger);
-        }
-    }
-
-    /**
-     * Shutdown all of the created engines, and clear the list of created engines, dropping
-     * pointers to the traversal engines
-     */
-    public synchronized void shutdownTraversalEngines() {
-        for ( final TraversalEngine te : allCreatedTraversalEngines)
-            te.shutdown();
-
-        allCreatedTraversalEngines.clear();
-        availableTraversalEngines.clear();
-    }
-
-    /**
-     * Prints out information about number of reads observed and filtering, if any reads were used in the traversal
-     *
-     * Looks like:
-     *
-     * INFO  10:40:47,370 MicroScheduler - 22 reads were filtered out during traversal out of 101 total (21.78%)
-     * INFO  10:40:47,370 MicroScheduler -   -> 1 reads (0.99% of total) failing BadMateFilter
-     * INFO  10:40:47,370 MicroScheduler -   -> 20 reads (19.80% of total) failing DuplicateReadFilter
-     * INFO  10:40:47,370 MicroScheduler -   -> 1 reads (0.99% of total) failing FailsVendorQualityCheckFilter
-     */
-    private void printReadFilteringStats() {
-        final ReadMetrics cumulativeMetrics = engine.getCumulativeMetrics();
-        if ( cumulativeMetrics.getNumReadsSeen() > 0 ) {
-            // count up the number of skipped reads by summing over all filters
-            long nSkippedReads = 0L;
-            for ( final long countsByFilter : cumulativeMetrics.getCountsByFilter().values())
-                nSkippedReads += countsByFilter;
-
-            logger.info(String.format("%d reads were filtered out during the traversal out of approximately %d total reads (%.2f%%)",
-                    nSkippedReads,
-                    cumulativeMetrics.getNumReadsSeen(),
-                    100.0 * MathUtils.ratio(nSkippedReads, cumulativeMetrics.getNumReadsSeen())));
-
-            for ( final Map.Entry<String, Long> filterCounts : cumulativeMetrics.getCountsByFilter().entrySet() ) {
-                long count = filterCounts.getValue();
-                logger.info(String.format("  -> %d reads (%.2f%% of total) failing %s",
-                        count, 100.0 * MathUtils.ratio(count,cumulativeMetrics.getNumReadsSeen()), filterCounts.getKey()));
-            }
-        }
-    }
-
-    /**
-     * Gets the engine that created this microscheduler.
-     * @return The engine owning this microscheduler.
-     */
-    public GenomeAnalysisEngine getEngine() { return engine; }
-
-    /**
-     * Returns data source maintained by this scheduler
-     * @return
-     */
-    public SAMDataSource getSAMDataSource() { return reads; }
-
-    /**
-     * Returns the reference maintained by this scheduler.
-     * @return The reference maintained by this scheduler.
-     */
-    public IndexedFastaSequenceFile getReference() { return reference; }
-
-    protected void cleanup() {
-        try {
-            mBeanServer.unregisterMBean(mBeanName);
-        }
-        catch (JMException ex) {
-            throw new ReviewedGATKException("Unable to unregister microscheduler with JMX", ex);
-        }
-    }
-
-    /**
-     * Returns a traversal engine suitable for use, associated with key
-     *
-     * Key is an arbitrary object that is used to retrieve the same traversal
-     * engine over and over.  This can be important in the case where the
-     * traversal engine has data associated with it in some other context,
-     * and we need to ensure that the context always sees the same traversal
-     * engine.  This happens in the HierarchicalMicroScheduler, where you want
-     * the a thread executing traversals to retrieve the same engine each time,
-     * as outputs are tracked w.r.t. that engine.
-     *
-     * If no engine is associated with key yet, pops the next available engine
-     * from the available ones maintained by this
-     * microscheduler.  Note that it's a runtime error to pop a traversal engine
-     * from this scheduler if there are none available.  Callers that
-     * once pop'd an engine for use must return it with returnTraversalEngine
-     *
-     * @param key the key to associate with this engine
-     * @return a non-null TraversalEngine suitable for execution in this scheduler
-     */
-    @Ensures("result != null")
-    protected synchronized TraversalEngine borrowTraversalEngine(final Object key) {
-        if ( key == null ) throw new IllegalArgumentException("key cannot be null");
-
-        final TraversalEngine engine = allocatedTraversalEngines.get(key);
-        if ( engine == null ) {
-            if ( availableTraversalEngines.isEmpty() )
-                throw new IllegalStateException("no traversal engines were available");
-            allocatedTraversalEngines.put(key, availableTraversalEngines.pop());
-            return allocatedTraversalEngines.get(key);
-        } else {
-            return engine;
-        }
-    }
-
-    /**
-     * Return a borrowed traversal engine to this MicroScheduler, for later use
-     * in another traversal execution
-     *
-     * @param key the key used to id the engine, provided to the borrowTraversalEngine function
-     * @param traversalEngine the borrowed traversal engine.  Must have been previously borrowed.
-     */
-    protected synchronized void returnTraversalEngine(final Object key, final TraversalEngine traversalEngine) {
-        if ( traversalEngine == null )
-            throw new IllegalArgumentException("Attempting to push a null traversal engine");
-        if ( ! allCreatedTraversalEngines.contains(traversalEngine) )
-            throw new IllegalArgumentException("Attempting to push a traversal engine not created by this MicroScheduler" + engine);
-        if ( ! allocatedTraversalEngines.containsKey(key) )
-            throw new IllegalArgumentException("No traversal engine was never checked out with key " + key);
-
-        // note there's nothing to actually do here, but a function implementation
-        // might want to do something
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/MicroSchedulerMBean.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/MicroSchedulerMBean.java
deleted file mode 100644
index 772fe01..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/MicroSchedulerMBean.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.executive;
-
-/**
- * Created by IntelliJ IDEA.
- * User: mhanna
- * Date: Jan 12, 2011
- * Time: 9:19:27 PM
- * To change this template use File | Settings | File Templates.
- */
-public interface MicroSchedulerMBean {
-    // has nothing because we don't have anything we currently track
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/OutputMergeTask.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/OutputMergeTask.java
deleted file mode 100644
index 4e5ef9f..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/OutputMergeTask.java
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.executive;
-
-import org.broadinstitute.gatk.engine.io.storage.Storage;
-
-import java.util.ArrayList;
-import java.util.Collection;
-
-/**
- * User: hanna
- * Date: Apr 30, 2009
- * Time: 4:04:38 PM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * Hold pointers to the output and error streams, and state to indicate whether
- * a write is complete.  Not generally thread-safe.  Calls to isComplete()/complete()
- * can be made at any time from any thread, but complete() should be called on the
- * thread which is doing the writing. 
- */
-public class OutputMergeTask {
-    /**
-     * The output streams which should be written to.
-     */
-    private final Collection<MergeOperation<?>> mergeOperations = new ArrayList<MergeOperation<?>>();
-
-    /**
-     * Add a new merge operation to this merge task.
-     * @param targetStream Target for stream output.
-     * @param temporaryStorage Temporary storage.
-     * @param <StreamType> Type of the output stream.
-     */
-    public <StreamType> void addMergeOperation( StreamType targetStream, Storage<StreamType> temporaryStorage ) {
-        mergeOperations.add( new MergeOperation<StreamType>(targetStream,temporaryStorage) );
-    }
-
-    /**
-     * Merge data from output streams into target storage.
-     */
-    public synchronized void merge() {
-        for( MergeOperation mergeOperation: mergeOperations )
-            mergeOperation.temporaryStorage.mergeInto(mergeOperation.targetStream);
-    }
-
-    /**
-     * Represents a single file needed to be merged.
-     * @param <StreamType> Type of the file to be merged.
-     */
-    private class MergeOperation<StreamType> {
-        /**
-         * Destination for the temporary file's output.
-         */
-        public final StreamType targetStream;
-
-        /**
-         * Temporary storage location for the file.
-         */
-        public final Storage<StreamType> temporaryStorage;
-
-        /**
-         * Create a new merge file object with the given output stream and storage placeholder.
-         * @param targetStream Target for temporary data.
-         * @param temporaryStorage The temporary data itself.
-         */
-        public MergeOperation( StreamType targetStream, Storage<StreamType> temporaryStorage ) {
-            this.targetStream = targetStream;
-            this.temporaryStorage = temporaryStorage;
-        }
-    }
-}
-
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/ReduceTree.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/ReduceTree.java
deleted file mode 100644
index e02b846..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/ReduceTree.java
+++ /dev/null
@@ -1,187 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.executive;
-
-import java.util.ArrayList;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Queue;
-import java.util.concurrent.Future;
-/**
- * User: hanna
- * Date: Apr 28, 2009
- * Time: 11:09:29 AM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * A tree for organizing reduce results and detecting when enough dependencies
- * are resolved for a reduce to be scheduled.  The tree can trigger a callback
- * whenever it believes a reduce operation is pending.
- *
- * Not thread-safe.  All calls should be made sequentially from the same thread.
- */
-public class ReduceTree {
-    /**
-     * Data structure for the tree.  Each entry in the outer list represents a level
-     * of the tree, and each entry in the inner queues represent nodes in that level.
-     *
-     * Whenever a reduce can happen, the entries to be reduced are pulled out of
-     * their slots in level n of the tree and the composite entry is added to level n+1.
-     */
-    private List<Queue<Future>> treeNodes = new ArrayList<Queue<Future>>();
-
-    /**
-     * The entire contents have been added to the tree.  Completely schedule the reductions.
-     */
-    private boolean treeComplete = false;
-
-    /**
-     * Called to indicate that all data required to perform a given reduce has been scheduled.
-     */
-    private TreeReduceNotifier treeReduceNotifier = null;
-
-    /**
-     * Creates a ReduceTree.
-     * @param notifier A callback indicating that all data required to perform a given reduce has been scheduled.
-     */
-    public ReduceTree( TreeReduceNotifier notifier ) {
-        this.treeReduceNotifier = notifier;
-    }
-
-    /**
-     * A callback indicating that all computations have been scheduled to complete the given reduce.
-     */
-    public interface TreeReduceNotifier {
-        /**
-         * Indicates that a reduce is ready to happen.
-         * @param lhs Left-hand side of the tree reduce.
-         * @param rhs Right-hand side of the tree reduce.
-         * @return The future result of the computation reduce(lhs,rhs)
-         */
-        Future notifyReduce( Future lhs, Future rhs );
-    }
-
-    /**
-     * Add an entry to the list of data to be reduced.  The results of entry.get() will
-     * be scheduled for reduction with neighboring elements.
-     * @param entry Entry to be paired with other elements.
-     */
-    public void addEntry( Future entry ) {
-        addNodeAtLevel( entry, 0 );
-    }
-
-    /**
-     * Signal to the ReduceTree that all possible data has been added and it should reduce
-     * as much as is possible.
-     */
-    public void complete() {
-        treeComplete = true;
-        reduce();
-    }
-
-    /**
-     * Gets the placeholder for the final result of the tree reduce.
-     * @return Future whose get() method will return the final result.  Null if nothing has been added.
-     */
-    public Future getResult() {
-        if( !treeComplete )
-            throw new IllegalStateException( "Cannot get the final result for an incomplete tree.");
-
-        // If nothing has been added to the tree, return null.
-        if( treeNodes.size() == 0 )
-            return null;
-
-        // Assert that there aren't any pending computations that were forgotten along the way.
-        for( int i = 0; i < treeNodes.size() - 2; i++ ) {
-            if( treeNodes.get(i).size() > 0 )
-                throw new IllegalStateException( "Some inner reduces were missed along the way.");
-        }
-
-        Queue<Future> lastLevel = treeNodes.get(treeNodes.size() - 1);
-
-        // Assert that there's only one reduce left at the last level.
-        if( lastLevel.size() != 1 )
-            throw new IllegalStateException( "Invalid number of entries at the tip of the tree: " + lastLevel.size() );
-
-        // Get the placeholder for the last result.
-        return lastLevel.element();
-    }
-
-    /**
-     * Recursively collapse the tree whereever possible.
-     */
-    protected void reduce() {
-        reduce( 0 );
-    }
-
-    /**
-     * Recursively collapse the tree, starting at the specified level.
-     * @param level Level at which to start reducing.
-     */
-    private void reduce( int level ) {
-        // base case for recursion.
-        if( treeNodes.size() <= level )
-            return;
-
-        Queue<Future> treeLevel = treeNodes.get(level);
-
-        while( treeLevel.size() >= 2 ) {
-            addNodeAtLevel( treeReduceNotifier.notifyReduce( treeLevel.remove(), treeLevel.remove() ), level + 1 );
-        }
-
-        if( treeLevel.size() == 1 && treeComplete && !isDeepestLevel(level) ) {
-            Future element = treeLevel.remove();
-            addNodeAtLevel( element, level + 1 );
-        }
-
-        reduce( level + 1 );
-    }
-
-    private boolean isDeepestLevel( int level ) {
-        return level == (treeNodes.size() - 1);
-    }
-
-    /**
-     * Add the given node to the tree at the corresponding level.  Create the level
-     * if it doesn't exist.
-     * @param node Node to add.  Must not be null.
-     * @param level Level number at which to add.  0-based index into treeNodes list.
-     */
-    protected void addNodeAtLevel( Future node, int level ) {
-        while( treeNodes.size() <= level )
-            treeNodes.add( new LinkedList<Future>() );
-        treeNodes.get(level).add(node);
-        reduce(level);
-    }
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/ShardTraverser.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/ShardTraverser.java
deleted file mode 100644
index 443fdf7..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/ShardTraverser.java
+++ /dev/null
@@ -1,163 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.executive;
-
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.engine.datasources.providers.LocusShardDataProvider;
-import org.broadinstitute.gatk.engine.datasources.providers.ShardDataProvider;
-import org.broadinstitute.gatk.engine.datasources.reads.Shard;
-import org.broadinstitute.gatk.engine.io.ThreadGroupOutputTracker;
-import org.broadinstitute.gatk.engine.traversals.TraversalEngine;
-import org.broadinstitute.gatk.engine.walkers.Walker;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.util.concurrent.Callable;
-/**
- * User: hanna
- * Date: Apr 29, 2009
- * Time: 4:40:38 PM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-/**
- * Carries the walker over a given shard, in a callable interface.
- */
-public class ShardTraverser implements Callable {
-    final private HierarchicalMicroScheduler microScheduler;
-    final private Walker walker;
-    final private Shard shard;
-    final private ThreadGroupOutputTracker outputTracker;
-    private OutputMergeTask outputMergeTask;
-
-    /** our log, which we want to capture anything from this class */
-    final protected static Logger logger = Logger.getLogger(ShardTraverser.class);
-
-    /**
-     * Is this traversal complete?
-     */
-    private boolean complete = false;
-
-    public ShardTraverser( HierarchicalMicroScheduler microScheduler,
-                           Walker walker,
-                           Shard shard,
-                           ThreadGroupOutputTracker outputTracker) {
-        this.microScheduler = microScheduler;
-        this.walker = walker;
-        this.shard = shard;
-        this.outputTracker = outputTracker;
-    }
-
-    public Object call() {
-        final Object traversalEngineKey = Thread.currentThread();
-        final TraversalEngine traversalEngine = microScheduler.borrowTraversalEngine(traversalEngineKey);
-
-        try {
-            final long startTime = System.currentTimeMillis();
-
-            // this is CRITICAL -- initializes output maps in this master thread,
-            // so that any subthreads created by the traversal itself can access this map
-            outputTracker.initializeStorage();
-
-            Object accumulator = walker.reduceInit();
-            final WindowMaker windowMaker = new WindowMaker(shard,microScheduler.getEngine().getGenomeLocParser(),
-                    microScheduler.getReadIterator(shard),
-                    shard.getGenomeLocs(),
-                    microScheduler.engine.getSampleDB().getSampleNames()); // todo: microScheduler.engine is protected - is it okay to user it here?
-
-            for(WindowMaker.WindowMakerIterator iterator: windowMaker) {
-                final ShardDataProvider dataProvider = new LocusShardDataProvider(shard,iterator.getSourceInfo(),microScheduler.getEngine().getGenomeLocParser(),iterator.getLocus(),iterator,microScheduler.reference,microScheduler.rods);
-                accumulator = traversalEngine.traverse(walker, dataProvider, accumulator);
-                dataProvider.close();
-            }
-
-            windowMaker.close();
-            outputMergeTask = outputTracker.closeStorage();
-
-            final long endTime = System.currentTimeMillis();
-
-            microScheduler.reportShardTraverseTime(endTime-startTime);
-
-            return accumulator;
-        } catch(Throwable t) {
-            // Notify that an exception has occurred and rethrow it.
-            throw microScheduler.notifyOfTraversalError(t);
-        } finally {
-            synchronized(this) {
-                complete = true;
-                microScheduler.returnTraversalEngine(traversalEngineKey, traversalEngine);
-                notifyAll();
-            }
-        }
-    }
-
-    /**
-     * Return a human readable string describing the intervals this traverser is operating on
-     * @return
-     */
-    public String getIntervalsString() {
-        return Utils.join(",", shard.getGenomeLocs());
-    }
-
-    /**
-     * Has this traversal completed?
-     * @return True if completed, false otherwise.
-     */
-    public boolean isComplete() {
-        synchronized(this) {
-            return complete;
-        }
-    }
-
-   /**
-     * Waits for any the given OutputMerger to be ready for merging.
-     */
-    public void waitForComplete() {
-        try {
-            synchronized(this) {
-                if( isComplete() )
-                    return;
-                wait();
-            }
-        }
-        catch( InterruptedException ex ) {
-            throw new ReviewedGATKException("Interrupted while waiting for more output to be finalized.",ex);
-        }
-    }
-
-    /**
-     * Gets the output merge task associated with the given shard.
-     * @return OutputMergeTask if one exists; null if nothing needs to be merged.
-     */
-    public OutputMergeTask getOutputMergeTask() {
-        return outputMergeTask;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/TreeReducer.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/TreeReducer.java
deleted file mode 100644
index 270b06f..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/TreeReducer.java
+++ /dev/null
@@ -1,127 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.executive;
-
-import org.broadinstitute.gatk.engine.walkers.TreeReducible;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.Future;
-/**
- * User: hanna
- * Date: Apr 29, 2009
- * Time: 4:47:35 PM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * Represents a future reduce...a reduce that will be ready at some point in the future.
- * Provides services for indicating when all data is prepared for the reduce a callable
- * interface to force the reduce.
- */
-public class TreeReducer implements Callable {
-    final private HierarchicalMicroScheduler microScheduler;
-    private TreeReducible walker;
-    private Future lhs;
-    private Future rhs;
-
-    /**
-     * Create a full tree reduce.  Combine this two results using an unspecified walker at some point in the future.
-     * @param microScheduler The parent hierarchical microscheduler for this reducer.
-     * @param lhs Left-hand side of the reduce.
-     * @param rhs Right-hand side of the reduce.
-     */
-    public TreeReducer( HierarchicalMicroScheduler microScheduler, Future lhs, Future rhs ) {
-        this.microScheduler = microScheduler;
-        this.lhs = lhs;
-        this.rhs = rhs;
-    }
-
-    /**
-     * Provide a walker for the future reduce.
-     * @param walker walker to use when performing the reduce.
-     */
-    public void setWalker( TreeReducible walker ) {
-        this.walker = walker;
-    }
-
-    /**
-     * Is the data ready for reduce?  True if lhs and rhs have already been resolved.
-     * @return True if data is ready and waiting, false otherwise.
-     */
-    public boolean isReadyForReduce() {
-        if( lhs == null )
-            throw new IllegalStateException(String.format("Insufficient data on which to reduce; lhs = %s, rhs = %s", lhs, rhs) );
-
-        return lhs.isDone() && (rhs == null || rhs.isDone());
-    }
-
-    /**
-     * Returns the value of the reduce.  If not isReadyForReduce(), this call will until all entries become ready.
-     * @return Result of the reduce.
-     */
-    public Object call() {
-        Object result;
-
-        final long startTime = System.currentTimeMillis();
-
-        try {
-            if( lhs == null )
-                result = null;
-                // todo -- what the hell is this above line?  Shouldn't it be the two below?
-//            if( lhs == null )
-//                throw new IllegalStateException(String.format("Insufficient data on which to reduce; lhs = %s, rhs = %s", lhs, rhs) );
-            else
-                result = walker.treeReduce( lhs.get(), rhs.get() );
-        }
-        catch( InterruptedException ex ) {
-            microScheduler.notifyOfTraversalError(ex);
-            throw new ReviewedGATKException("Hierarchical reduce interrupted", ex);
-        }
-        catch( ExecutionException ex ) {
-            microScheduler.notifyOfTraversalError(ex);
-            throw new ReviewedGATKException("Hierarchical reduce failed", ex);
-        }
-
-        final long endTime = System.currentTimeMillis();
-
-        // Constituent bits of this tree reduces are no longer required.  Throw them away.
-        this.lhs = null;
-        this.rhs = null;
-
-        microScheduler.reportTreeReduceTime( endTime - startTime );
-
-        return result;
-    }
-}
-
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/WindowMaker.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/WindowMaker.java
deleted file mode 100644
index c848329..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/WindowMaker.java
+++ /dev/null
@@ -1,217 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.executive;
-
-import htsjdk.samtools.util.PeekableIterator;
-import org.broadinstitute.gatk.engine.ReadProperties;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.datasources.reads.Shard;
-import org.broadinstitute.gatk.engine.iterators.GATKSAMRecordIterator;
-import org.broadinstitute.gatk.engine.iterators.GATKSAMIterator;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.locusiterator.LocusIterator;
-import org.broadinstitute.gatk.utils.locusiterator.LocusIteratorByState;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.List;
-import java.util.NoSuchElementException;
-
-/**
- * Transforms an iterator of reads which overlap the given interval list into an iterator of covered single-base loci
- * completely contained within the interval list.  To do this, it creates a LocusIteratorByState which will emit a single-bp
- * locus for every base covered by the read iterator, then uses the WindowMakerIterator.advance() to filter down that stream of
- * loci to only those covered by the given interval list.
- *
- * Example:
- * Incoming stream of reads: A:chr20:1-5, B:chr20:2-6, C:chr20:2-7, D:chr20:3-8, E:chr20:5-10
- * Incoming intervals: chr20:3-7
- *
- * Locus iterator by state will produce the following stream of data:
- *  chr1:1 {A}, chr1:2 {A,B,C}, chr1:3 {A,B,C,D}, chr1:4 {A,B,C,D}, chr1:5 {A,B,C,D,E},
- *  chr1:6 {B,C,D,E}, chr1:7 {C,D,E}, chr1:8 {D,E}, chr1:9 {E}, chr1:10 {E}
- *
- * WindowMakerIterator will then filter the incoming stream, emitting the following stream:
- *  chr1:3 {A,B,C,D}, chr1:4 {A,B,C,D}, chr1:5 {A,B,C,D,E}, chr1:6 {B,C,D,E}, chr1:7 {C,D,E}
- *
- * @author mhanna
- * @version 0.1
- */
-public class WindowMaker implements Iterable<WindowMaker.WindowMakerIterator>, Iterator<WindowMaker.WindowMakerIterator> {
-    /**
-     * Source information for iteration.
-     */
-    private final ReadProperties sourceInfo;
-
-    /**
-     * Hold the read iterator so that it can be closed later.
-     */
-    private final GATKSAMRecordIterator readIterator;
-
-    /**
-     * The data source for reads.  Will probably come directly from the BAM file.
-     */
-    private final PeekableIterator<AlignmentContext> sourceIterator;
-
-    /**
-     * Stores the sequence of intervals that the windowmaker should be tracking.
-     */
-    private final PeekableIterator<GenomeLoc> intervalIterator;
-
-    /**
-     * In the case of monolithic sharding, this case returns whether the only shard has been generated.
-     */
-    private boolean shardGenerated = false;
-
-    /**
-     * The alignment context to return from this shard's iterator.  Lazy implementation: the iterator will not find the
-     * currentAlignmentContext until absolutely required to do so.   If currentAlignmentContext is null and advance()
-     * doesn't populate it, no more elements are available.  If currentAlignmentContext is non-null, currentAlignmentContext
-     * should be returned by next().
-     */
-    private AlignmentContext currentAlignmentContext;
-
-    /**
-     * Create a new window maker with the given iterator as a data source, covering
-     * the given intervals.
-     * @param iterator The data source for this window.
-     * @param intervals The set of intervals over which to traverse.
-     * @param sampleNames The complete set of sample names in the reads in shard
-     */
-
-    private final LocusIteratorByState libs;
-
-    public WindowMaker(Shard shard, GenomeLocParser genomeLocParser, GATKSAMIterator iterator, List<GenomeLoc> intervals, Collection<String> sampleNames) {
-        this.sourceInfo = shard.getReadProperties();
-        this.readIterator = new GATKSAMRecordIterator(iterator);
-
-        this.libs = new LocusIteratorByState(readIterator,sourceInfo,genomeLocParser,sampleNames);
-        this.sourceIterator = new PeekableIterator<AlignmentContext>(libs);
-
-        this.intervalIterator = intervals.size()>0 ? new PeekableIterator<GenomeLoc>(intervals.iterator()) : null;
-    }
-
-    public WindowMaker(Shard shard, GenomeLocParser genomeLocParser, GATKSAMIterator iterator, List<GenomeLoc> intervals ) {
-        this(shard, genomeLocParser, iterator, intervals, LocusIteratorByState.sampleListForSAMWithoutReadGroups());
-    }
-
-    public Iterator<WindowMakerIterator> iterator() {
-        return this;
-    }
-
-    public boolean hasNext() {
-        return (intervalIterator != null && intervalIterator.hasNext()) || !shardGenerated;
-    }
-
-    public WindowMakerIterator next() {
-        shardGenerated = true;
-        return new WindowMakerIterator(intervalIterator != null ? intervalIterator.next() : null);
-    }
-
-    public void remove() {
-        throw new UnsupportedOperationException("Cannot remove from a window maker.");
-    }
-
-    public void close() {
-        this.readIterator.close();
-    }
-
-    public class WindowMakerIterator extends LocusIterator {
-        /**
-         * The locus for which this iterator is currently returning reads.
-         */
-        private final GenomeLoc locus;
-
-        public WindowMakerIterator(GenomeLoc locus) {
-            this.locus = locus;
-            advance();
-        }
-
-        public ReadProperties getSourceInfo() {
-            return sourceInfo;
-        }
-
-        public GenomeLoc getLocus() {
-            return locus;
-        }
-
-        public WindowMakerIterator iterator() {
-            return this;
-        }
-
-        public boolean hasNext() {
-            advance();
-            return currentAlignmentContext != null;
-        }
-
-        public AlignmentContext next() {
-            if(!hasNext()) throw new NoSuchElementException("WindowMakerIterator is out of elements for this interval.");
-
-            // Consume this alignment context.
-            AlignmentContext toReturn = currentAlignmentContext;
-            currentAlignmentContext = null;
-
-            // Return the current element.
-            return toReturn;
-        }
-
-        private void advance() {
-            // Need to find the next element that is not past shard boundaries.  If we travel past the edge of
-            // shard boundaries, stop and let the next interval pick it up.
-            while(currentAlignmentContext == null && sourceIterator.hasNext()) {
-                // Advance the iterator and try again.
-                AlignmentContext candidateAlignmentContext = sourceIterator.peek();
-
-                if(locus == null) {
-                    // No filter present.  Return everything that LocusIteratorByState provides us.
-                    currentAlignmentContext = sourceIterator.next();
-                }
-                else if(locus.isPast(candidateAlignmentContext.getLocation()))
-                    // Found a locus before the current window; claim this alignment context and throw it away.
-                    sourceIterator.next();
-                else if(locus.containsP(candidateAlignmentContext.getLocation())) {
-                    // Found a locus within the current window; claim this alignment context and call it the next entry.
-                    currentAlignmentContext = sourceIterator.next();
-                }
-                else if(locus.isBefore(candidateAlignmentContext.getLocation())) {
-                    // Whoops.  Skipped passed the end of the region.  Iteration for this window is complete.  Do
-                    // not claim this alignment context in case it is part of the next shard.
-                    break;
-                }
-                else
-                    throw new ReviewedGATKException("BUG: filtering locus does not contain, is not before, and is not past the given alignment context");
-            }
-        }
-
-        @Override
-        public LocusIteratorByState getLIBS() {
-            return libs;
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/package-info.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/package-info.java
deleted file mode 100644
index c0d6e9d..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/executive/package-info.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.executive;
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/BadCigarFilter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/BadCigarFilter.java
deleted file mode 100644
index fce3a71..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/BadCigarFilter.java
+++ /dev/null
@@ -1,122 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.Cigar;
-import htsjdk.samtools.CigarElement;
-import htsjdk.samtools.CigarOperator;
-import htsjdk.samtools.SAMRecord;
-
-import java.util.Iterator;
-
-/**
- * Filter out reads with wonky cigar strings.
- *
- *  - No reads with Hard/Soft clips in the middle of the cigar
- *  - No reads starting with deletions (with or without preceding clips)
- *  - No reads ending in deletions (with or without follow-up clips)
- *  - No reads that are fully hard or soft clipped
- *  - No reads that have consecutive indels in the cigar (II, DD, ID or DI)
- *
- *  ps: apparently an empty cigar is okay...
- *
- * @author ebanks
- * @version 0.1
- */
-
-public class BadCigarFilter extends ReadFilter {
-
-    public boolean filterOut(final SAMRecord rec) {
-        final Cigar c = rec.getCigar();
-
-        // if there is no Cigar then it can't be bad
-        if( c.isEmpty() ) {
-            return false;
-        }
-
-        Iterator<CigarElement> elementIterator = c.getCigarElements().iterator();
-
-        CigarOperator firstOp = CigarOperator.H;
-        while (elementIterator.hasNext() && (firstOp == CigarOperator.H || firstOp == CigarOperator.S)) {
-            CigarOperator op = elementIterator.next().getOperator();
-
-            // No reads with Hard/Soft clips in the middle of the cigar
-            if (firstOp != CigarOperator.H && op == CigarOperator.H) {
-                    return true;
-            }
-            firstOp = op;
-        }
-
-        // No reads starting with deletions (with or without preceding clips)
-        if (firstOp == CigarOperator.D) {
-            return true;
-        }
-
-        boolean hasMeaningfulElements = (firstOp != CigarOperator.H && firstOp != CigarOperator.S);
-        boolean previousElementWasIndel = firstOp == CigarOperator.I;
-        CigarOperator lastOp = firstOp;
-        CigarOperator previousOp = firstOp;
-
-        while (elementIterator.hasNext()) {
-            CigarOperator op = elementIterator.next().getOperator();
-
-            if (op != CigarOperator.S && op != CigarOperator.H) {
-
-                // No reads with Hard/Soft clips in the middle of the cigar
-                if (previousOp == CigarOperator.S || previousOp == CigarOperator.H)
-                    return true;
-
-                lastOp = op;
-
-                if (!hasMeaningfulElements && op.consumesReadBases()) {
-                    hasMeaningfulElements = true;
-                }
-
-                if (op == CigarOperator.I || op == CigarOperator.D) {
-
-                    // No reads that have consecutive indels in the cigar (II, DD, ID or DI)
-                    if (previousElementWasIndel) {
-                        return true;
-                    }
-                    previousElementWasIndel = true;
-                }
-                else {
-                    previousElementWasIndel = false;
-                }
-            }
-            // No reads with Hard/Soft clips in the middle of the cigar
-            else if (op == CigarOperator.S && previousOp == CigarOperator.H) {
-                return true;
-            }
-
-            previousOp = op;
-        }
-
-        // No reads ending in deletions (with or without follow-up clips)
-        // No reads that are fully hard or soft clipped
-        return lastOp == CigarOperator.D || !hasMeaningfulElements;
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/BadMateFilter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/BadMateFilter.java
deleted file mode 100644
index c25d8d9..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/BadMateFilter.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.SAMRecord;
-
-/**
- * Filter out reads whose mate maps to a different contig.
- *
- * @author ebanks
- * @version 0.1
- */
-
-public class BadMateFilter extends ReadFilter {
-
-    public boolean filterOut(final SAMRecord rec) {
-        return hasBadMate(rec);
-    }
-
-    public static boolean hasBadMate(final SAMRecord rec) {
-        return (rec.getReadPairedFlag() && !rec.getMateUnmappedFlag() && !rec.getReferenceIndex().equals(rec.getMateReferenceIndex()));
-    }
-
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/CountingFilteringIterator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/CountingFilteringIterator.java
deleted file mode 100644
index 8717f1f..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/CountingFilteringIterator.java
+++ /dev/null
@@ -1,150 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.filter.SamRecordFilter;
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.util.CloseableIterator;
-import htsjdk.samtools.util.CloserUtil;
-import org.broadinstitute.gatk.engine.ReadMetrics;
-
-import java.util.*;
-
-/**
- * Filtering Iterator which takes a filter and an iterator and iterates
- * through only those records which are not rejected by the filter.
- * @author Mark DePristo
- */
-public class CountingFilteringIterator implements CloseableIterator<SAMRecord> {
-    private final ReadMetrics globalRuntimeMetrics;
-    private final ReadMetrics privateRuntimeMetrics;
-    private final Iterator<SAMRecord> iterator;
-    private final List<CountingReadFilter> filters = new ArrayList<>();
-    private SAMRecord next = null;
-
-    // wrapper around ReadFilters to count the number of filtered reads
-    private final class CountingReadFilter extends ReadFilter {
-        protected final ReadFilter readFilter;
-        protected long counter = 0L;
-
-        public CountingReadFilter(final ReadFilter readFilter) {
-            this.readFilter = readFilter;
-        }
-
-        @Override
-        public boolean filterOut(final SAMRecord record) {
-            final boolean result = readFilter.filterOut(record);
-            if ( result )
-                counter++;
-            return result;
-        }
-    }
-
-    /**
-     * Constructor
-     *
-     * @param metrics   metrics to accumulate on the nature of filtered reads.
-     * @param iterator  the backing iterator
-     * @param filters    the filter (which may be a FilterAggregator)
-     */
-    public CountingFilteringIterator(ReadMetrics metrics, Iterator<SAMRecord> iterator, Collection<ReadFilter> filters) {
-        this.globalRuntimeMetrics = metrics;
-        privateRuntimeMetrics = new ReadMetrics();
-        this.iterator = iterator;
-        for ( final ReadFilter filter : filters )
-            this.filters.add(new CountingReadFilter(filter));
-        next = getNextRecord();
-    }
-
-    /**
-     * Returns true if the iteration has more elements.
-     *
-     * @return  true if the iteration has more elements.  Otherwise returns false.
-     */
-    public boolean hasNext() {
-        return next != null;
-    }
-
-    /**
-     * Returns the next element in the iteration.
-     *
-     * @return  the next element in the iteration
-     * @throws java.util.NoSuchElementException
-     */
-    public SAMRecord next() {
-        if (next == null) {
-            throw new NoSuchElementException("Iterator has no more elements.");
-        }
-        final SAMRecord result = next;
-        next = getNextRecord();
-        return result;
-    }
-
-    /**
-     * Required method for Iterator API.
-     *
-     * @throws UnsupportedOperationException
-     */
-    public void remove() {
-        throw new UnsupportedOperationException("Remove() not supported by CountingFilteringIterator");
-    }
-
-    public void close() {
-        CloserUtil.close(iterator);
-
-        for ( final CountingReadFilter filter : filters )
-            privateRuntimeMetrics.setFilterCount(filter.readFilter.getClass().getSimpleName(), filter.counter);
-        // update the global metrics with all the data we collected here
-        globalRuntimeMetrics.incrementMetrics(privateRuntimeMetrics);
-    }
-
-    /**
-     * Gets the next record from the underlying iterator that passes the filter
-     *
-     * @return SAMRecord    the next filter-passing record
-     */
-    private SAMRecord getNextRecord() {
-        while (iterator.hasNext()) {
-            SAMRecord record = iterator.next();
-
-            // update only the private copy of the metrics so that we don't need to worry about race conditions
-            // that can arise when trying to update the global copy; it was agreed that this is the cleanest solution.
-            privateRuntimeMetrics.incrementNumReadsSeen();
-
-            boolean filtered = false;
-            for(SamRecordFilter filter: filters) {
-                if(filter.filterOut(record)) {
-                    filtered = true;
-                    break;
-                }
-            }
-
-            if(!filtered) return record;
-        }
-
-        return null;
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/DuplicateReadFilter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/DuplicateReadFilter.java
deleted file mode 100644
index 52861e2..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/DuplicateReadFilter.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.SAMRecord;
-
-/*
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use,
- * copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the
- * Software is furnished to do so, subject to the following
- * conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
- * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
- * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
- * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
- * OTHER DEALINGS IN THE SOFTWARE.
- */
-
-/**
- * Filter out duplicate reads.
- *
- * @author rpoplin
- * @since Dec 9, 2009
- */
-
-public class DuplicateReadFilter extends ReadFilter {
-    public boolean filterOut( final SAMRecord read ) {
-        return read.getDuplicateReadFlag();
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/FailsVendorQualityCheckFilter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/FailsVendorQualityCheckFilter.java
deleted file mode 100644
index 2cc5e2a..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/FailsVendorQualityCheckFilter.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.SAMRecord;
-
-/**
- * Filter out reads that fail the vendor quality check.
- *
- * @author rpoplin
- * @since Jul 19, 2010
- */
-
-public class FailsVendorQualityCheckFilter extends ReadFilter {
-    public boolean filterOut( final SAMRecord read ) {
-        return read.getReadFailsVendorQualityCheckFlag();
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/FilterManager.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/FilterManager.java
deleted file mode 100644
index 59c3f15..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/FilterManager.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import org.broadinstitute.gatk.utils.classloader.PluginManager;
-import org.broadinstitute.gatk.utils.help.GATKDocUtils;
-import org.broadinstitute.gatk.utils.help.HelpConstants;
-
-import java.util.Collection;
-import java.util.List;
-
-/**
- * Manage filters and filter options.  Any requests for basic filtering classes
- * should ultimately be made through this class.
- *
- * @author mhanna
- * @version 0.1
- */
-public class FilterManager extends PluginManager<ReadFilter> {
-    public FilterManager() {
-        super(ReadFilter.class,"filter","Filter");
-    }
-
-    /**
-     * Instantiate a filter of the given type.  Along the way, scream bloody murder if
-     * the filter is not available.
-     * @param filterType The type of the filter
-     * @return The filter
-     */
-    public ReadFilter createFilterByType(Class<? extends ReadFilter> filterType) {
-        return this.createByName(getName(filterType));
-    }
-
-    public Collection<Class<? extends ReadFilter>> getValues() {
-        return this.getPlugins();
-    }
-
-    /**
-     * Rather than use the default error message, print out a list of read filters as well.
-     * @param pluginCategory - string, the category of the plugin (e.g. read filter)
-     * @param pluginName - string, what we were trying to match (but failed to)
-     * @return - A wall of text with the default message, followed by a listing of available read filters
-     */
-    @Override
-    protected String formatErrorMessage(String pluginCategory, String pluginName) {
-        List<Class<? extends ReadFilter>> availableFilters = this.getPluginsImplementing(ReadFilter.class);
-
-
-        return String.format("Read filter %s not found. Available read filters:%n%n%s%n%n%s",pluginName,
-                userFriendlyListofReadFilters(availableFilters),
-                "Please consult the GATK Documentation (" + HelpConstants.GATK_DOCS_URL + ") for more information.");
-    }
-
-    private String userFriendlyListofReadFilters(List<Class<? extends ReadFilter>> filters) {
-        final String headName = "FilterName", headDoc = "Documentation";
-        int longestNameLength = -1;
-        for ( Class < ? extends ReadFilter> filter : filters ) {
-            longestNameLength = Math.max(longestNameLength,this.getName(filter).length());
-        }
-        String format = "   %"+longestNameLength+"s        %s%n";
-
-        StringBuilder listBuilder = new StringBuilder();
-        listBuilder.append(String.format(format,headName,headDoc));
-        for ( Class<? extends ReadFilter> filter : filters ) {
-            String helpLink = GATKDocUtils.helpLinksToGATKDocs(filter);
-            String filterName = this.getName(filter);
-            listBuilder.append(String.format(format,filterName,helpLink));
-        }
-
-        return listBuilder.toString();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/LibraryReadFilter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/LibraryReadFilter.java
deleted file mode 100644
index 8b0f076..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/LibraryReadFilter.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.SAMReadGroupRecord;
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.commandline.Argument;
-import org.broadinstitute.gatk.engine.filters.ReadFilter;
-
-/**
- * Only use reads from the specified library
- *
- * @author kcibul
- * @since Aug 15, 2012
- *
- */
-
-public class LibraryReadFilter extends ReadFilter {
-    @Argument(fullName = "library", shortName = "library", doc="The name of the library to keep, filtering out all others", required=true)
-    private String LIBRARY_TO_KEEP = null;
-
-    public boolean filterOut( final SAMRecord read ) {
-        final SAMReadGroupRecord readGroup = read.getReadGroup();
-        return ( readGroup == null || readGroup.getLibrary() == null || !readGroup.getLibrary().equals( LIBRARY_TO_KEEP ) );
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/MalformedReadFilter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/MalformedReadFilter.java
deleted file mode 100644
index 1b59a06..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/MalformedReadFilter.java
+++ /dev/null
@@ -1,260 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.*;
-import org.broadinstitute.gatk.utils.commandline.Argument;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.ReadProperties;
-import org.broadinstitute.gatk.engine.arguments.ValidationExclusion;
-import org.broadinstitute.gatk.engine.datasources.reads.SAMDataSource;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import java.util.Collections;
-
-/**
- * Filter out malformed reads.
- *
- * @author mhanna
- * @version 0.1
- */
-public class MalformedReadFilter extends ReadFilter {
-
-
-    private static final String FILTER_READS_WITH_N_CIGAR_ARGUMENT_FULL_NAME = "filter_reads_with_N_cigar" ;
-
-    private SAMFileHeader header;
-
-    @Argument(fullName = FILTER_READS_WITH_N_CIGAR_ARGUMENT_FULL_NAME, shortName = "filterRNC", doc = "filter out reads with CIGAR containing the N operator, instead of stop processing and report an error.", required = false)
-    boolean filterReadsWithNCigar = false;
-
-
-    @Argument(fullName = "filter_mismatching_base_and_quals", shortName = "filterMBQ", doc = "if a read has mismatching number of bases and base qualities, filter out the read instead of blowing up.", required = false)
-    boolean filterMismatchingBaseAndQuals = false;
-
-    @Argument(fullName = "filter_bases_not_stored", shortName = "filterNoBases", doc = "if a read has no stored bases (i.e. a '*'), filter out the read instead of blowing up.", required = false)
-    boolean filterBasesNotStored = false;
-
-    /**
-     * Indicates the applicable validation exclusions
-     */
-    private boolean allowNCigars;
-
-    @Override
-    public void initialize(final GenomeAnalysisEngine engine) {
-        header = engine.getSAMFileHeader();
-        ValidationExclusion validationExclusions = null;
-        final SAMDataSource rds = engine.getReadsDataSource();
-        if (rds != null) {
-          final ReadProperties rps = rds.getReadsInfo();
-          if (rps != null) {
-            validationExclusions = rps.getValidationExclusionList();
-          }
-        }
-        if (validationExclusions == null) {
-            allowNCigars = false;
-        } else {
-            allowNCigars = validationExclusions.contains(ValidationExclusion.TYPE.ALLOW_N_CIGAR_READS);
-        }
-    }
-
-    public boolean filterOut(final SAMRecord read) {
-        // slowly changing the behavior to blow up first and filtering out if a parameter is explicitly provided
-        return  !checkInvalidAlignmentStart(read) ||
-                !checkInvalidAlignmentEnd(read) ||
-                !checkAlignmentDisagreesWithHeader(this.header,read) ||
-                !checkHasReadGroup(read) ||
-                !checkMismatchingBasesAndQuals(read, filterMismatchingBaseAndQuals) ||
-                !checkCigarDisagreesWithAlignment(read) ||
-                !checkSeqStored(read, filterBasesNotStored) ||
-                !checkCigarIsSupported(read,filterReadsWithNCigar,allowNCigars);
-    }
-
-    private static boolean checkHasReadGroup(final SAMRecord read) {
-        if ( read.getReadGroup() == null ) {
-            // there are 2 possibilities: either the RG tag is missing or it is not defined in the header
-            final String rgID = (String)read.getAttribute(SAMTagUtil.getSingleton().RG);
-            if ( rgID == null )
-                throw new UserException.ReadMissingReadGroup(read);
-            throw new UserException.ReadHasUndefinedReadGroup(read, rgID);
-        }
-        return true;
-    }
-
-    /**
-     * Check for the case in which the alignment start is inconsistent with the read unmapped flag.
-     * @param read The read to validate.
-     * @return true if read start is valid, false otherwise.
-     */
-    private static boolean checkInvalidAlignmentStart(final SAMRecord read ) {
-        // read is not flagged as 'unmapped', but alignment start is NO_ALIGNMENT_START
-        if( !read.getReadUnmappedFlag() && read.getAlignmentStart() == SAMRecord.NO_ALIGNMENT_START )
-            return false;
-        // Read is not flagged as 'unmapped', but alignment start is -1
-        if( !read.getReadUnmappedFlag() && read.getAlignmentStart() == -1 )
-            return false;
-        return true;
-    }
-
-    /**
-     * Check for invalid end of alignments.
-     * @param read The read to validate.
-     * @return true if read end is valid, false otherwise.
-     */
-    private static boolean checkInvalidAlignmentEnd(final SAMRecord read ) {
-        // Alignment aligns to negative number of bases in the reference.
-        if( !read.getReadUnmappedFlag() && read.getAlignmentEnd() != -1 && (read.getAlignmentEnd()-read.getAlignmentStart()+1)<0 )
-            return false;
-        return true;
-    }
-
-    /**
-     * Check to ensure that the alignment makes sense based on the contents of the header.
-     * @param header The SAM file header.
-     * @param read The read to verify.
-     * @return true if alignment agrees with header, false othrewise.
-     */
-    private static boolean checkAlignmentDisagreesWithHeader(final SAMFileHeader header, final SAMRecord read ) {
-        // Read is aligned to nonexistent contig
-        if( read.getReferenceIndex() == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX && read.getAlignmentStart() != SAMRecord.NO_ALIGNMENT_START )
-            return false;
-        final SAMSequenceRecord contigHeader = header.getSequence( read.getReferenceIndex() );
-        // Read is aligned to a point after the end of the contig
-        if( !read.getReadUnmappedFlag() && read.getAlignmentStart() > contigHeader.getSequenceLength() )
-            return false;
-        return true;
-    }
-
-    /**
-     * Check for inconsistencies between the cigar string and the
-     * @param read The read to validate.
-     * @return true if cigar agrees with alignment, false otherwise.
-     */
-    private static boolean checkCigarDisagreesWithAlignment(final SAMRecord read) {
-        // Read has a valid alignment start, but the CIGAR string is empty
-        if( !read.getReadUnmappedFlag() &&
-            read.getAlignmentStart() != -1 &&
-            read.getAlignmentStart() != SAMRecord.NO_ALIGNMENT_START &&
-            read.getAlignmentBlocks().size() < 0 )
-            return false;
-        return true;
-    }
-
-    /**
-     * Check for unsupported CIGAR operators.
-     * Currently the N operator is not supported.
-     * @param read The read to validate.
-     * @param filterReadsWithNCigar whether the offending read should just
-     *                              be silently filtered or not.
-     * @param allowNCigars whether reads that contain N operators in their CIGARs
-     *                     can be processed or an exception should be thrown instead.
-     * @throws UserException.UnsupportedCigarOperatorException
-     *   if {@link #filterReadsWithNCigar} is <code>false</code> and
-     *   the input read has some unsupported operation.
-     * @return <code>true</code> if the read CIGAR operations are
-     * fully supported, otherwise <code>false</code>, as long as
-     * no exception has been thrown.
-     */
-    private static boolean checkCigarIsSupported(final SAMRecord read, final boolean filterReadsWithNCigar, final boolean allowNCigars) {
-        if( containsNOperator(read)) {
-            if (! filterReadsWithNCigar && !allowNCigars) {
-                throw new UserException.UnsupportedCigarOperatorException(
-                        CigarOperator.N,read,
-                        "Perhaps you are"
-                        + " trying to use RNA-Seq data?"
-                        + " While we are currently actively working to"
-                        + " support this data type unfortunately the"
-                        + " GATK cannot be used with this data in its"
-                        + " current form. You have the option of either"
-                        + " filtering out all reads with operator "
-                        + CigarOperator.N + " in their CIGAR string"
-                        + " (please add --"
-                        +  FILTER_READS_WITH_N_CIGAR_ARGUMENT_FULL_NAME
-                        + " to your command line) or"
-                        + " assume the risk of processing those reads as they"
-                        + " are including the pertinent unsafe flag (please add -U"
-                        + ' ' + ValidationExclusion.TYPE.ALLOW_N_CIGAR_READS
-                        + " to your command line). Notice however that if you were"
-                        + " to choose the latter, an unspecified subset of the"
-                        + " analytical outputs of an unspecified subset of the tools"
-                        + " will become unpredictable. Consequently the GATK team"
-                        + " might well not be able to provide you with the usual support"
-                        + " with any issue regarding any output");
-            }
-            return ! filterReadsWithNCigar;
-        }
-        return true;
-    }
-
-    private static boolean containsNOperator(final SAMRecord read) {
-        final Cigar cigar = read.getCigar();
-        if (cigar == null)   {
-            return false;
-        }
-        for (final CigarElement ce : cigar.getCigarElements()) {
-            if (ce.getOperator() == CigarOperator.N) {
-                return true;
-            }
-        }
-        return false;
-    }
-
-    /**
-     * Check if the read has the same number of bases and base qualities
-     * @param read the read to validate
-     * @return true if they have the same number. False otherwise.
-     */
-    private static boolean checkMismatchingBasesAndQuals(final SAMRecord read, final boolean filterMismatchingBaseAndQuals) {
-        final boolean result;
-        if (read.getReadLength() == read.getBaseQualities().length)
-            result = true;
-        else if (filterMismatchingBaseAndQuals)
-            result = false;
-        else
-            throw new UserException.MalformedBAM(read,
-                    String.format("BAM file has a read with mismatching number of bases and base qualities. Offender: %s [%d bases] [%d quals].%s",
-                            read.getReadName(), read.getReadLength(), read.getBaseQualities().length,
-                            read.getBaseQualities().length == 0 ? " You can use --defaultBaseQualities to assign a default base quality for all reads, but this can be dangerous in you don't know what you are doing." : ""));
-
-        return result;
-    }
-
-    /**
-     * Check if the read has its base sequence stored
-     * @param read the read to validate
-     * @return true if the sequence is stored and false otherwise ("*" in the SEQ field).
-     */
-    protected static boolean checkSeqStored(final SAMRecord read, final boolean filterBasesNotStored) {
-
-        if ( read.getReadBases() != SAMRecord.NULL_SEQUENCE )
-            return true;
-
-        if ( filterBasesNotStored )
-            return false;
-
-        throw new UserException.MalformedBAM(read, String.format("the BAM file has a read with no stored bases (i.e. it uses '*') which is not supported in the GATK; see the --filter_bases_not_stored argument. Offender: %s", read.getReadName()));
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/MappingQualityFilter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/MappingQualityFilter.java
deleted file mode 100644
index 67c62b9..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/MappingQualityFilter.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.commandline.Argument;
-
-/**
- * Filter out reads with low mapping qualities.
- *
- * @author ebanks
- * @version 0.1
- */
-
-public class MappingQualityFilter extends ReadFilter {
-
-    @Argument(fullName = "min_mapping_quality_score", shortName = "mmq", doc = "Minimum read mapping quality required to consider a read for calling", required = false)
-    public int MIN_MAPPING_QUALTY_SCORE = 10;
-
-    public boolean filterOut(SAMRecord rec) {
-        return (rec.getMappingQuality() < MIN_MAPPING_QUALTY_SCORE);
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/MappingQualityUnavailableFilter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/MappingQualityUnavailableFilter.java
deleted file mode 100644
index 05df7fb..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/MappingQualityUnavailableFilter.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.QualityUtils;
-
-/**
- * Filter out mapping quality zero reads.
- *
- * @author ebanks
- * @version 0.1
- */
-
-public class MappingQualityUnavailableFilter extends ReadFilter {
-    public boolean filterOut(SAMRecord rec) {
-        return (rec.getMappingQuality() == QualityUtils.MAPPING_QUALITY_UNAVAILABLE);
-    }
-}
-
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/MappingQualityZeroFilter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/MappingQualityZeroFilter.java
deleted file mode 100644
index f3f7032..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/MappingQualityZeroFilter.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.SAMRecord;
-
-/**
- * Filter out mapping quality zero reads.
- *
- * @author hanna
- * @version 0.1
- */
-
-public class MappingQualityZeroFilter extends ReadFilter {
-    public boolean filterOut(SAMRecord rec) {
-        return (rec.getMappingQuality() == 0);
-    }
-}
-
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/MateSameStrandFilter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/MateSameStrandFilter.java
deleted file mode 100644
index 0818f8f..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/MateSameStrandFilter.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.SAMRecord;
-
-/**
- * Filter out reads that are not paired, have their mate unmapped, are duplicates, fail vendor quality check or both mate and read are in the same strand.
- *
- * @author chartl
- * @since 5/18/11
- */
-public class MateSameStrandFilter extends ReadFilter {
-
-    public boolean filterOut(SAMRecord read) {
-        return (! read.getReadPairedFlag() ) || read.getMateUnmappedFlag() || read.getDuplicateReadFlag() ||
-                read.getReadFailsVendorQualityCheckFlag() || read.getMateNegativeStrandFlag() != read.getReadNegativeStrandFlag();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/MaxInsertSizeFilter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/MaxInsertSizeFilter.java
deleted file mode 100644
index cca05eb..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/MaxInsertSizeFilter.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.commandline.Argument;
-
-/**
- * Filter out reads that exceed a given max insert size
- *
- * @author chartl
- * @since 5/2/11
- */
-public class MaxInsertSizeFilter extends ReadFilter {
-    @Argument(fullName = "maxInsertSize", shortName = "maxInsert", doc="Discard reads with insert size greater than the specified value, defaults to 1000000", required=false)
-    private int maxInsertSize = 1000000;
-
-    public boolean filterOut(SAMRecord record) {
-        return (record.getReadPairedFlag() && (record.getInferredInsertSize() > maxInsertSize || record.getInferredInsertSize() < -1*maxInsertSize));
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/MissingReadGroupFilter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/MissingReadGroupFilter.java
deleted file mode 100644
index 21b291b..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/MissingReadGroupFilter.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.SAMRecord;
-
-/**
- * Filter out reads without read groups.
- *
- * @author ebanks
- * @version 0.1
- */
-
-public class MissingReadGroupFilter extends ReadFilter {
-    public boolean filterOut(SAMRecord rec) {
-        return rec.getReadGroup() == null;
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/NDNCigarReadTransformer.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/NDNCigarReadTransformer.java
deleted file mode 100644
index 65bf1eb..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/NDNCigarReadTransformer.java
+++ /dev/null
@@ -1,118 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.Cigar;
-import htsjdk.samtools.CigarElement;
-import htsjdk.samtools.CigarOperator;
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.iterators.RNAReadTransformer;
-import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
-import org.broadinstitute.gatk.engine.walkers.Walker;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-
-/**
- * A read transformer that refactor NDN cigar elements to one N element.
- *
- *  <p>
- *     This read transformer will refactor cigar strings that contain N-D-N elements to one N element (with total length of the three refactored elements).
- *     This is intended primarily for users of RNA-Seq data handling programs such as TopHat2.
- *     Currently we consider that the internal N-D-N motif is illegal and we error out when we encounter it. By refactoring the cigar string of
- *     those specific reads, users of TopHat and other tools can circumvent this problem without affecting the rest of their dataset.
- *
- *     NOTE: any walker that need that functionality should apply that read transformer in its map function, since it won't be activated by the GATK engine.
- *
- *     The engine parameter that activate this read transformer is --refactor_NDN_cigar_string or -fixNDN
- *  </p>
- *
- *
- *
- * @author ami
- * @since 04/22/14
- */
-
-public class NDNCigarReadTransformer extends RNAReadTransformer {
-
-    private boolean refactorReads;
-
-    @Override
-    public ApplicationTime initializeSub(final GenomeAnalysisEngine engine, final Walker walker) {
-        refactorReads = engine.getArguments().REFACTOR_NDN_CIGAR_READS;
-
-        return ApplicationTime.HANDLED_IN_WALKER;   //  NOTE: any walker that need that functionality should apply that read transformer in its map function, since it won't be activated by the GATK engine.
-    }
-
-    @Override
-    public GATKSAMRecord apply(final GATKSAMRecord read) {
-        if(read == null)
-            throw new UserException.BadInput("try to transform a null GATKSAMRecord");
-        final Cigar originalCigar = read.getCigar();
-        if (originalCigar.isValid(read.getReadName(),-1) != null)
-            throw new UserException.BadInput("try to transform a read with non-valid cigar string: readName: "+read.getReadName()+" Cigar String: "+originalCigar);
-        read.setCigar(refactorNDNtoN(originalCigar));
-        return read;
-    }
-
-    @Override
-    public boolean enabled() {
-        return refactorReads;
-    }
-
-
-
-    protected Cigar refactorNDNtoN(final Cigar originalCigar) {
-        final Cigar refactoredCigar = new Cigar();
-        final int cigarLength = originalCigar.numCigarElements();
-        for(int i = 0; i < cigarLength; i++){
-            final CigarElement element = originalCigar.getCigarElement(i);
-            if(element.getOperator() == CigarOperator.N && thereAreAtLeast2MoreElements(i,cigarLength)){
-                final CigarElement nextElement = originalCigar.getCigarElement(i+1);
-                final CigarElement nextNextElement = originalCigar.getCigarElement(i+2);
-
-                // if it is N-D-N replace with N (with the total length) otherwise just add the first N.
-                if(nextElement.getOperator() == CigarOperator.D && nextNextElement.getOperator() == CigarOperator.N){
-                    final int threeElementsLength = element.getLength() + nextElement.getLength() + nextNextElement.getLength();
-                    final CigarElement refactoredElement = new CigarElement(threeElementsLength,CigarOperator.N);
-                    refactoredCigar.add(refactoredElement);
-                    i += 2; //skip the elements that were refactored
-                }
-                else
-                    refactoredCigar.add(element);  // add only the first N
-            }
-            else
-                refactoredCigar.add(element);  // add any non-N element
-        }
-        return refactoredCigar;
-    }
-
-    private boolean thereAreAtLeast2MoreElements(final int index, final int cigarLength){
-        return index < cigarLength - 2;
-    }
-
-}
-
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/NoOriginalQualityScoresFilter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/NoOriginalQualityScoresFilter.java
deleted file mode 100644
index 8297903..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/NoOriginalQualityScoresFilter.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.SAMRecord;
-
-/*
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use,
- * copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the
- * Software is furnished to do so, subject to the following
- * conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
- * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
- * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
- * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
- * OTHER DEALINGS IN THE SOFTWARE.
- */
-
-/**
- * Filter out reads that don't have base an original quality quality score tag (usually added by BQSR)
- *
- * @author rpoplin
- * @since Nov 19, 2009
- */
-public class NoOriginalQualityScoresFilter extends ReadFilter {
-    public boolean filterOut( final SAMRecord read ) {
-        return (read.getAttribute("OQ") == null);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/NotPrimaryAlignmentFilter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/NotPrimaryAlignmentFilter.java
deleted file mode 100644
index b09e1f6..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/NotPrimaryAlignmentFilter.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.SAMRecord;
-
-/**
- * Filter out duplicate reads.
- *
- * @author rpoplin
- * @since Dec 9, 2009
- */
-
-public class NotPrimaryAlignmentFilter extends ReadFilter {
-    public boolean filterOut( final SAMRecord read ) {
-        return read.getNotPrimaryAlignmentFlag();
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/Platform454Filter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/Platform454Filter.java
deleted file mode 100644
index 79f16a5..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/Platform454Filter.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.broadinstitute.gatk.utils.sam.ReadUtils;
-
-/**
- * Filter out 454 reads.
- *
- * @author ebanks
- * @version 0.1
- */
-
-public class Platform454Filter extends ReadFilter {
-    public boolean filterOut(SAMRecord rec) {
-        return (ReadUtils.is454Read((GATKSAMRecord)rec));
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/PlatformFilter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/PlatformFilter.java
deleted file mode 100644
index 8236cc2..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/PlatformFilter.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.commandline.Argument;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.broadinstitute.gatk.utils.sam.ReadUtils;
-
-/**
- * Filter out PL matching reads.
- *
- * @author ebanks
- * @version 0.1
- */
-public class PlatformFilter extends ReadFilter {
-    @Argument(fullName = "PLFilterName", shortName = "PLFilterName", doc="Discard reads with RG:PL attribute containing this string", required=false)
-    protected String[] PLFilterNames;
-
-    public boolean filterOut(SAMRecord rec) {
-        for ( String name : PLFilterNames )
-            if ( ReadUtils.isPlatformRead((GATKSAMRecord)rec, name.toUpperCase() ))
-                return true;
-        return false;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/PlatformUnitFilter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/PlatformUnitFilter.java
deleted file mode 100644
index 4a6781f..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/PlatformUnitFilter.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.SAMReadGroupRecord;
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import java.util.HashSet;
-import java.util.Set;
-
-/**
- * Filter out reads that have blacklisted platform unit tags. (See code documentation for how to create the blacklist).
- *
- * @author asivache
- * @since Sep 21, 2009
- */
-public class PlatformUnitFilter extends ReadFilter {
-    // a hack: use static in order to be able to fill it with the data from command line at runtime
-    static private Set<String> blackListedLanes = new HashSet<String>();
-
-    public boolean filterOut(SAMRecord samRecord) {
-
-        if ( blackListedLanes.size() == 0 ) return false; // no filters set, nothing to do
-
-        Object pu_attr = samRecord.getAttribute("PU");
-
-        if ( pu_attr == null ) {
-            // no platform unit in the record, go get from read group
-            SAMReadGroupRecord rgr = samRecord.getReadGroup();
-            if ( rgr == null ) throw new UserException.MalformedBAM(samRecord, "Read " + samRecord.getReadName() +" has NO associated read group record");
-            pu_attr = rgr.getAttribute("PU") ;
-        }
-        if ( pu_attr == null ) return false; // could not get PU, forget about the filtering...
-        return blackListedLanes.contains((String)pu_attr);
-    }
-
-    /**
-     * The argument is interpreted as a comma-separated list of lanes (platform units) to be filtered
-     * out. All the specified names will be registered with the filter and filterOut(r) for any SAMRecord r
-     * belonging to one of the specified lanes will thereafter return true.
-     * The names can be surrounded by additional spaces, the latters will be trimmed by this method.
-     * This method can be called multiple times to add more lanes. Re-registering the same lane again is safe.
-     * @param arg
-     */
-    public static void setBlackListedLanes(String arg) {
-        String[] lanes = arg.split(",");
-        for ( int i = 0; i < lanes.length ; i++ ) {
-            blackListedLanes.add(lanes[i].trim());
-        }
-    }
-
-    /**
-     * Adds a single name of a lane (platform unit) to be filtered out by this filter. The name can be surrounded
-     * by spaces, the latters will be trimmed out. This method can be called multiple times to add more lanes.
-     * Re-registering the same lane again is safe.
-     * @param arg
-     */
-    public static void addBlackListedLane(String arg) {
-        blackListedLanes.add(arg.trim());
-    }
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/PlatformUnitFilterHelper.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/PlatformUnitFilterHelper.java
deleted file mode 100644
index 428806d..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/PlatformUnitFilterHelper.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.text.XReadLines;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.util.regex.Pattern;
-
-/**
- * This is a utility class, its sole purpose is to populate PlatformUnitFilter with data. When a command line argument
- * (@Argument) of the type PlatformUnitFilterHelper is declared in an application (walker), its constuctor
- * PlatformUnitFilterHelper(String) automatically called by the argument system will parse its String argument
- * and set up static fields of PlatformUnitFilter object.
- *
- * The String argument can be either a name of existing file, or a list of comma-separated lane (Platform Unit) names.
- * First, the constructor will check if a file with specified name exists. If it does, then it is assumed that each line
- * in the file contains one name of a lane (Platfor Unit) to filter out. If such file does not exist, then the argument is
- * interpreted as a comma-separated list. Blank spaces around lane names are allowed in both cases and will be trimmed out.
- *
- * In other words, all it takes to request filtering out reads from specific lane(s) is
- *
- * 1) declare filter usage in the walker
- *
- * @ReadFilters({PlatformUnitFilter.class,...})
- *
- * 2) specify the argument that will take the list of lanes to filter:
- *
- * @Argument(fullName="filterLanes", shortName="FL", doc="all specified lanes will be ignored", required=false)
- *   PlatformUnitFilterHelper dummy;
- *
- * After that, the walker can be invoked with "--filterLanes 302UBAAXX090508.8,302YAAAXX090427.8" argument.
- *
- * Created by IntelliJ IDEA.
- * User: asivache
- * Date: Sep 22, 2009
- * Time: 11:11:48 AM
- * To change this template use File | Settings | File Templates.
- */
-public class PlatformUnitFilterHelper {
-    final public static Pattern EMPTYLINE_PATTERN = Pattern.compile("^\\s*$");
-
-    public PlatformUnitFilterHelper(String arg) {
-         File f = new File(arg);
-
-         if ( f.exists() ) {
-             try {
-                 XReadLines reader = new XReadLines(f);
-                 for ( String line : reader ) {
-                     if ( EMPTYLINE_PATTERN.matcher(line).matches() ) continue; // skip empty lines
-                     PlatformUnitFilter.addBlackListedLane(line); // PlatformUnitFilter will trim the line as needed
-                 }
-             } catch ( FileNotFoundException e) { throw new UserException.CouldNotReadInputFile(f, e); } // this should NEVER happen
-             return;
-         }
-
-        // no such file, must be a comma-separated list:
-
-        PlatformUnitFilter.setBlackListedLanes(arg); // PlatformUnitFilter will split on commas and trim as needed
-
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/ReadFilter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/ReadFilter.java
deleted file mode 100644
index a2102a8..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/ReadFilter.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.filter.SamRecordFilter;
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
-import org.broadinstitute.gatk.utils.help.HelpConstants;
-
-/**
- * A SamRecordFilter that also depends on the header.
- */
- at DocumentedGATKFeature(
-        groupName = HelpConstants.DOCS_CAT_RF,
-        summary = "GATK Engine arguments that filter or transfer incoming SAM/BAM data files" )
-public abstract class ReadFilter implements SamRecordFilter {
-    /**
-     * Sets the header for use by this filter.
-     * @param engine the engine.
-     */
-    public void initialize(GenomeAnalysisEngine engine) {}
-
-
-    /**
-     * Determines whether a pair of SAMRecord matches this filter
-     *
-     * @param first  the first SAMRecord to evaluate
-     * @param second the second SAMRecord to evaluate
-     *
-     * @return true if the SAMRecords matches the filter, otherwise false
-     * @throws UnsupportedOperationException when paired filter not implemented
-     */
-    public boolean filterOut(final SAMRecord first, final SAMRecord second) {
-        throw new UnsupportedOperationException("Paired filter not implemented: " + this.getClass());
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/ReadGroupBlackListFilter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/ReadGroupBlackListFilter.java
deleted file mode 100644
index 7c6bfb0..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/ReadGroupBlackListFilter.java
+++ /dev/null
@@ -1,120 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.SAMReadGroupRecord;
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.text.XReadLines;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.util.*;
-import java.util.Map.Entry;
-
-/**
- * Removes records matching the read group tag and exact match string.
- * For example, this filter value:
- *   PU:1000G-mpimg-080821-1_1
- * would filter out a read with the read group PU:1000G-mpimg-080821-1_1
- */
-public class ReadGroupBlackListFilter extends ReadFilter {
-    private Set<Entry<String, Collection<String>>> filterEntries;
-
-    public ReadGroupBlackListFilter(List<String> blackLists) {
-        Map<String, Collection<String>> filters = new TreeMap<String, Collection<String>>();
-        for (String blackList : blackLists)
-            addFilter(filters, blackList, null, 0);
-        this.filterEntries = filters.entrySet();
-    }
-
-    public boolean filterOut(SAMRecord samRecord) {
-        for (Entry<String, Collection<String>> filterEntry : filterEntries) {
-            String attributeType = filterEntry.getKey();
-
-            SAMReadGroupRecord samReadGroupRecord = samRecord.getReadGroup();
-            if (samReadGroupRecord != null) {
-                Object attribute;
-                if ("ID".equals(attributeType) || "RG".equals(attributeType))
-                    attribute = samReadGroupRecord.getId();
-                else
-                    attribute = samReadGroupRecord.getAttribute(attributeType);
-                if (attribute != null && filterEntry.getValue().contains(attribute))
-                    return true;
-            }
-        }
-
-        return false;
-    }
-
-    private void addFilter(Map<String, Collection<String>> filters, String filter, File parentFile, int parentLineNum) {
-        if (filter.toLowerCase().endsWith(".list") || filter.toLowerCase().endsWith(".txt")) {
-            File file = new File(filter);
-            try {
-                int lineNum = 0;
-                XReadLines lines = new XReadLines(file);
-                for (String line : lines) {
-                    lineNum++;
-
-                    if (line.trim().length() == 0)
-                        continue;
-
-                    if (line.startsWith("#"))
-                        continue;
-
-                    addFilter(filters, line, file, lineNum);
-                }
-            } catch (FileNotFoundException e) {
-                String message = "Error loading black list: " + file.getAbsolutePath();
-                if (parentFile != null) {
-                    message += ", " + parentFile.getAbsolutePath() + ":" + parentLineNum;
-                }
-                throw new UserException(message);
-            }
-        } else {
-            String[] filterEntry = filter.split(":", 2);
-
-            String message = null;
-            if (filterEntry.length != 2) {
-                message = "Invalid read group filter: " + filter;
-            } else if (filterEntry[0].length() != 2) {
-                message = "Tag is not two characters: " + filter;
-            }
-
-            if (message != null) {
-                if (parentFile != null) {
-                    message += ", " + parentFile.getAbsolutePath() + ":" + parentLineNum;
-                }
-                message += ", format is <TAG>:<SUBSTRING>";
-                throw new UserException(message);
-            }
-
-            if (!filters.containsKey(filterEntry[0]))
-                filters.put(filterEntry[0], new TreeSet<String>());
-            filters.get(filterEntry[0]).add(filterEntry[1]);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/ReadLengthFilter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/ReadLengthFilter.java
deleted file mode 100644
index 1e44df8..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/ReadLengthFilter.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.commandline.Argument;
-
-/**
- * Filters out reads whose length is >= some value or < some value.
- *
- * @author mhanna
- * @version 0.1
- */
-public class ReadLengthFilter extends ReadFilter {
-    @Argument(fullName = "maxReadLength", shortName = "maxRead", doc="Discard reads with length greater than the specified value", required=true)
-    private int maxReadLength;
-
-    @Argument(fullName = "minReadLength", shortName = "minRead", doc="Discard reads with length shorter than the specified value", required=true)
-    private int minReadLength = 1;
-    public boolean filterOut(SAMRecord read) {
-        // check the length
-        return read.getReadLength() > maxReadLength || read.getReadLength() < minReadLength;
-    }
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/ReadNameFilter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/ReadNameFilter.java
deleted file mode 100644
index 23a5151..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/ReadNameFilter.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.commandline.Argument;
-
-/**
- * Filter out all reads except those with this read name
- *
- * @author chartl
- * @since 9/19/11
- */
-public class ReadNameFilter extends ReadFilter {
-     @Argument(fullName = "readName", shortName = "rn", doc="Filter out all reads except those with this read name", required=true)
-    private String readName;
-
-    public boolean filterOut(final SAMRecord rec) {
-        return ! rec.getReadName().equals(readName);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/ReadStrandFilter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/ReadStrandFilter.java
deleted file mode 100644
index fd28766..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/ReadStrandFilter.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.commandline.Argument;
-
-/**
- * Filters out reads whose strand is negative or positive
- *
- * @author chartl
- * @version 0.1
- */
-public class ReadStrandFilter extends ReadFilter {
-    @Argument(fullName = "filterPositive", shortName = "fp", doc="Discard reads on the forward strand",required=false)
-	boolean filterForward = false;
-    
-    public boolean filterOut(SAMRecord read) {
-        // check the length
-        return read.getReadNegativeStrandFlag() != filterForward;
-    }
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/ReassignMappingQualityFilter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/ReassignMappingQualityFilter.java
deleted file mode 100644
index 0c8a93a..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/ReassignMappingQualityFilter.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.commandline.Argument;
-
-/**
- * A read filter (transformer) that sets all reads mapping quality to a given value.
- *
- *  <p>
- *     If a BAM file contains erroneous or missing mapping qualities (MAPQ), this read transformer will set all your
- *     mapping qualities to a given value (see arguments list for default value).
- *  </p>
- *
- * <h3>See also</h3>
- *
- * <p>ReassignOneMappingQualityFilter: reassigns a single MAPQ value, as opposed to all those found in the BAM file.</p>
- *
- * <h3>Caveats</h3>
- *
- * <p>Note that due to the order of operations involved in applying filters, it is possible that other read filters
- * (determined either at command-line or internally by the tool you are using) will be applied to your data before
- * this read transformation can be applied. If one of those other filters acts on the read mapping quality (MAPQ),
- * then you may not obtain the expected results. Unfortunately it is currently not possible to change the order of
- * operations from command line. To avoid the problem, we recommend applying this filter separately from any other
- * analysis, using PrintReads.</p>
- *
- *
- * <h3>Input</h3>
- *  <p>
- *	    BAM file(s)
- *  </p>
- *
- *
- * <h3>Output</h3>
- *  <p>
- *      BAM file(s) with all reads mapping qualities reassigned
- *  </p>
- *
- * <h3>Examples</h3>
- *  <pre>
- *  java -jar GenomeAnalysisTK.jar \
- *      -T PrintReads \
- *      -rf ReassignMappingQuality \
- *      -DMQ 35
- *  </pre>
- *
- * @author carneiro
- * @since 8/8/11
- */
-
-public class ReassignMappingQualityFilter extends ReadFilter {
-
-    @Argument(fullName = "default_mapping_quality", shortName = "DMQ", doc = "Default read mapping quality to assign to all reads", required = false)
-    public int defaultMappingQuality = 60;
-
-    public boolean filterOut(SAMRecord rec) {
-        rec.setMappingQuality(defaultMappingQuality);
-        return false;
-    }
-}
-
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/ReassignOneMappingQualityFilter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/ReassignOneMappingQualityFilter.java
deleted file mode 100644
index f07f197..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/ReassignOneMappingQualityFilter.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.commandline.Argument;
-
-/**
- * A read filter (transformer) that changes a given read mapping quality to a different value.
- *
- *  <p>
- *     This read transformer will change a certain read mapping quality to a different value without affecting reads that
- *     have other mapping qualities. This is intended primarily for users of RNA-Seq data handling programs such
- *     as TopHat, which use MAPQ = 255 to designate uniquely aligned reads. According to convention, 255 normally
- *     designates "unknown" quality, and most GATK tools automatically ignore such reads. By reassigning a different
- *     mapping quality to those specific reads, users of TopHat and other tools can circumvent this problem without
- *     affecting the rest of their dataset.
- *  </p>
- *
- *  <p>
- *     This differs from the ReassignMappingQuality filter by its selectivity -- only one mapping quality is targeted.
- *     ReassignMappingQuality will change ALL mapping qualities to a single one, and is typically used for datasets
- *     that have no assigned mapping qualities.
- *  </p>
- *
- * <h3>Input</h3>
- *  <p>
- *	    BAM file(s)
- *  </p>
- *
- *
- * <h3>Output</h3>
- *  <p>
- *      BAM file(s) with one read mapping quality selectively reassigned as desired
- *  </p>
- *
- * <h3>Examples</h3>
- *  <pre>
- *    java -jar GenomeAnalysisTK.jar
- *      -T PrintReads
- *      -rf ReassignOneMappingQuality
- *      -RMQF 255
- *      -RMQT 60
- *  </pre>
- *
- * @author vdauwera
- * @since 2/19/13
- */
-
-public class ReassignOneMappingQualityFilter extends ReadFilter {
-
-    @Argument(fullName = "reassign_mapping_quality_from", shortName = "RMQF", doc = "Original mapping quality", required = false)
-    public int reassignMappingQualityFrom = 255;
-
-    @Argument(fullName = "reassign_mapping_quality_to", shortName = "RMQT", doc = "Desired mapping quality", required = false)
-    public int reassignMappingQualityTo = 60;
-
-    public boolean filterOut(SAMRecord rec) {
-        if (rec.getMappingQuality() == reassignMappingQualityFrom)
-        rec.setMappingQuality(reassignMappingQualityTo);
-        return false;
-    }
-}
-
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/SampleFilter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/SampleFilter.java
deleted file mode 100644
index 2ec0112..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/SampleFilter.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.SAMReadGroupRecord;
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.commandline.Argument;
-
-import java.util.Set;
-
-/**
- * Filter out all reads except those with this sample
- */
-public class SampleFilter extends ReadFilter {
-    @Argument(fullName = "sample_to_keep", shortName = "goodSM", doc="The name of the sample(s) to keep, filtering out all others", required=true)
-    private Set SAMPLES_TO_KEEP = null;
-
-    public boolean filterOut( final SAMRecord read ) {
-        final SAMReadGroupRecord readGroup = read.getReadGroup();
-        return !( readGroup != null && SAMPLES_TO_KEEP.contains(readGroup.getSample()) );
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/SingleReadGroupFilter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/SingleReadGroupFilter.java
deleted file mode 100644
index 5a9d214..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/SingleReadGroupFilter.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.SAMReadGroupRecord;
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.commandline.Argument;
-
-/**
- * Only use reads from the specified read group.
- *
- * @author rpoplin
- * @since Nov 27, 2009
- *
- */
-
-public class SingleReadGroupFilter extends ReadFilter {
-    @Argument(fullName = "read_group_to_keep", shortName = "goodRG", doc="The name of the read group to keep, filtering out all others", required=true)
-    private String READ_GROUP_TO_KEEP = null;
-
-    public boolean filterOut( final SAMRecord read ) {
-        final SAMReadGroupRecord readGroup = read.getReadGroup();
-        return !( readGroup != null && readGroup.getReadGroupId().equals( READ_GROUP_TO_KEEP ) );
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/UnmappedReadFilter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/UnmappedReadFilter.java
deleted file mode 100644
index e9cc302..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/UnmappedReadFilter.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.SAMRecord;
-
-/**
- * Filter out unmapped reads.
- *
- * @author rpoplin
- * @since Dec 9, 2009
- */
-
-public class UnmappedReadFilter extends ReadFilter {
-    public boolean filterOut( final SAMRecord read ) {
-        return read.getReadUnmappedFlag() || read.getAlignmentStart() == SAMRecord.NO_ALIGNMENT_START;
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/package-info.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/package-info.java
deleted file mode 100644
index 7e36ffb..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/filters/package-info.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/DirectOutputTracker.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/DirectOutputTracker.java
deleted file mode 100644
index 96c9cb2..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/DirectOutputTracker.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.io;
-
-import org.broadinstitute.gatk.engine.io.storage.Storage;
-import org.broadinstitute.gatk.engine.io.storage.StorageFactory;
-import org.broadinstitute.gatk.engine.io.stubs.Stub;
-
-/**
- * Maps creation of storage directly to output streams in parent.
- *
- * @author mhanna
- * @version 0.1
- */
-public class DirectOutputTracker extends OutputTracker {
-    public <T> T getStorage( Stub<T> stub ) {
-        Storage target = outputs.get(stub);
-        if( target == null ) {
-            target = StorageFactory.createStorage(stub);
-            outputs.put(stub, target);
-        }
-        return (T)target;
-    }
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/FastqFileWriter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/FastqFileWriter.java
deleted file mode 100644
index 772c327..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/FastqFileWriter.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.io;
-
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.broadinstitute.gatk.utils.sam.ReadUtils;
-
-import java.io.FileNotFoundException;
-import java.io.PrintStream;
-
-/**
- * User: carneiro
- * Date: 1/27/13
- * Time: 12:54 AM
- */
-public class FastqFileWriter {
-    private PrintStream output;
-
-    public FastqFileWriter(String filename) {
-        try {
-            this.output = new PrintStream(filename);
-        } catch (FileNotFoundException e) {
-            throw new ReviewedGATKException("Can't open file " + filename);
-        }
-    }
-
-    public void addAlignment(GATKSAMRecord read) {
-        output.println("@" + read.getReadName());
-
-        if (read.getReadNegativeStrandFlag()) {
-            output.println(ReadUtils.getBasesReverseComplement(read));
-            output.println("+");
-            output.println(ReadUtils.convertReadQualToString(invertQuals(read.getBaseQualities())));
-        } else {
-            output.println(ReadUtils.convertReadBasesToString(read));
-            output.println("+");
-            output.println(ReadUtils.convertReadQualToString(read));
-        }
-    }
-
-    public void close() {
-        this.output.close();
-    }
-
-    private byte[] invertQuals (byte[] quals) {
-        final int l = quals.length;
-        byte[] invertedQuals = new byte[l];
-        for (int i=0; i<l; i++) {
-            invertedQuals[l-1-i] = quals[i];
-        }
-        return invertedQuals;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/GATKSAMFileWriter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/GATKSAMFileWriter.java
deleted file mode 100644
index c60aae8..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/GATKSAMFileWriter.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.io;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMFileWriter;
-
-/**
- * A writer that will allow unsorted BAM files to be written
- * and sorted on-the-fly.
- *
- * @author mhanna
- * @version 0.1
- */
-public interface GATKSAMFileWriter extends SAMFileWriter {
-    /**
-     * Writes the given custom header to SAM file output.
-     * @param header The header to write.
-     */
-    public void writeHeader(SAMFileHeader header);
-
-    /**
-     * Set Whether the BAM file to create is actually presorted.
-     * @param presorted True if the BAM file is presorted.  False otherwise.
-     */    
-    public void setPresorted(boolean presorted);
-
-    /**
-     * Set how many records in RAM the BAM file stores when sorting on-the-fly.
-     * @param maxRecordsInRam Max number of records in RAM.
-     */
-    public void setMaxRecordsInRam(int maxRecordsInRam);
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/OutputTracker.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/OutputTracker.java
deleted file mode 100644
index 8f2fbe3..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/OutputTracker.java
+++ /dev/null
@@ -1,178 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.io;
-
-import htsjdk.samtools.SAMFileReader;
-import htsjdk.samtools.ValidationStringency;
-import org.broadinstitute.gatk.utils.commandline.ArgumentSource;
-import org.broadinstitute.gatk.engine.io.storage.Storage;
-import org.broadinstitute.gatk.engine.io.storage.StorageFactory;
-import org.broadinstitute.gatk.engine.io.stubs.OutputStreamStub;
-import org.broadinstitute.gatk.engine.io.stubs.Stub;
-import org.broadinstitute.gatk.engine.walkers.Walker;
-import org.broadinstitute.gatk.utils.classloader.JVMUtils;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.io.IOUtils;
-import org.broadinstitute.gatk.utils.sam.SAMFileReaderBuilder;
-
-import java.io.File;
-import java.io.OutputStream;
-import java.lang.reflect.Field;
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Manages the output and err streams that are created specifically for walker
- * output.
- */
-public abstract class OutputTracker {
-    /**
-     * The streams to which walker users should be reading directly.
-     */
-    protected Map<ArgumentSource, Object> inputs = new HashMap<ArgumentSource,Object>();
-
-    /**
-     * The streams to which walker users should be writing directly.
-     */
-    protected Map<Stub,Storage> outputs = new HashMap<Stub,Storage>();
-
-    /**
-     * Special-purpose stub.  Provides a connection to output streams.
-     */
-    protected OutputStreamStub outStub = null;
-
-    /**
-     * Special-purpose stream.  Provides a connection to error streams.
-     */
-    protected OutputStreamStub errStub = null;
-
-    /**
-     * Gets the output storage associated with a given stub.
-     * @param stub The stub for which to find / create the right output stream.
-     * @param <T> Type of the stream to create.
-     * @return Storage object with a facade of type T.
-     */
-    public abstract <T> T getStorage( Stub<T> stub );
-
-    public void prepareWalker( Walker walker, ValidationStringency strictnessLevel ) {
-        for( Map.Entry<ArgumentSource,Object> io: inputs.entrySet() ) {
-            ArgumentSource targetField = io.getKey();
-            Object targetValue = io.getValue();
-
-            // Ghastly hack: reaches in and finishes building out the SAMFileReader.
-            // TODO: Generalize this, and move it to its own initialization step.
-            if( targetValue instanceof SAMFileReaderBuilder) {
-                SAMFileReaderBuilder builder = (SAMFileReaderBuilder)targetValue;
-                builder.setValidationStringency(strictnessLevel);
-                targetValue = builder.build();
-            }
-
-            JVMUtils.setFieldValue( targetField.field, walker, targetValue );
-        }
-    }
-
-    /**
-     * Provide a mechanism for injecting supplemental streams for external management.
-     * @param argumentSource source Class / field into which to inject this stream.
-     * @param stub Stream to manage.
-     */
-    public void addInput( ArgumentSource argumentSource, Object stub ) {
-        inputs.put(argumentSource,stub);
-    }
-
-    /**
-     * Provide a mechanism for injecting supplemental streams for external management.
-     * @param stub Stream to manage.
-     */
-    public <T> void addOutput(Stub<T> stub) {
-        addOutput(stub,null);
-    }
-
-    /**
-     * Provide a mechanism for injecting supplemental streams for external management.
-     * @param stub Stream to manage.
-     */
-    public <T> void addOutput(Stub<T> stub, Storage<T> storage) {
-        stub.register(this);
-        outputs.put(stub,storage);
-        validateOutputPath(stub);
-    }
-
-    /**
-     * Close down all existing output streams.
-     */
-    public void close() {
-        for( Stub stub: outputs.keySet() ) {
-            // If the stream hasn't yet been created, create it so that there's at least an empty file present.
-            if( outputs.get(stub) == null )
-                getTargetStream(stub);
-
-            // Close down the storage.
-            outputs.get(stub).close();
-        }
-    }
-
-    /**
-     * Collects the target stream for this data.
-     * @param stub The stub for this stream.
-     * @param <T> type of stub.
-     * @return An instantiated file into which data can be written.
-     */
-    protected <T> T getTargetStream( Stub<T> stub ) {
-        if( !outputs.containsKey(stub) )
-            throw new ReviewedGATKException("OutputTracker was not notified that this stub exists: " + stub);
-        Storage<T> storage = outputs.get(stub);
-        if( storage == null ) {
-            storage = StorageFactory.createStorage(stub);
-            outputs.put(stub,storage);
-        }
-        return (T)storage;
-    }
-
-    /**
-     * Ensures that the File associated with this stub (if any) is in a writable location
-     * @param stub
-     */
-    protected <T> void validateOutputPath(final Stub<T> stub) {
-        if (stub.getOutputFile() != null && !(IOUtils.isSpecialFile(stub.getOutputFile()))) {
-            final File parentDir = stub.getOutputFile().getAbsoluteFile().getParentFile();
-            if (! (parentDir.canWrite() && parentDir.canExecute()))
-                throw new UserException.CouldNotCreateOutputFile(stub.getOutputFile(),
-                        "either the containing directory doesn't exist or it isn't writable");
-        }
-    }
-
-    /**
-     * Install an OutputStreamStub into the given fieldName of the given walker.
-     * @param walker Walker into which to inject the field name.
-     * @param fieldName Name of the field into which to inject the stub.
-     */
-    private void installStub( Walker walker, String fieldName, OutputStream outputStream ) {
-        Field field = JVMUtils.findField( walker.getClass(), fieldName );
-        JVMUtils.setFieldValue( field, walker, outputStream );
-    }    
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/ThreadGroupOutputTracker.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/ThreadGroupOutputTracker.java
deleted file mode 100644
index fdb5fd2..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/ThreadGroupOutputTracker.java
+++ /dev/null
@@ -1,170 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.io;
-
-import org.broadinstitute.gatk.engine.executive.OutputMergeTask;
-import org.broadinstitute.gatk.engine.io.storage.Storage;
-import org.broadinstitute.gatk.engine.io.storage.StorageFactory;
-import org.broadinstitute.gatk.engine.io.stubs.Stub;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * An output tracker that can either track its output per-thread or directly.
- *
- * This output tracker doesn't use thread local values, but rather looks up the
- * storage map via the thread's group.  This is necessary in the case where
- * there's a master thread that creates the output map, and spawns subthreads
- * that actually do work.  As long as those subthreads are spawned in the
- * thread group of the master thread, this tracker will properly find the
- * storage map associated with the master thread in the group, and return
- * the map to all subthreads.
- *
- * @author mhanna, depristo
- * @version 0.2
- */
-public class ThreadGroupOutputTracker extends OutputTracker {
-    /**
-     * A map from thread ID of the master thread to the storage map from
-     * Stub to Storage objects
-     */
-    private Map<ThreadGroup, Map<Stub, Storage>> threadsToStorage = new HashMap<ThreadGroup, Map<Stub, Storage>>();
-
-    /**
-     * A total hack.  If bypass = true, bypass thread local storage and write directly
-     * to the target file.  Used to handle output during initialize() and onTraversalDone().
-     */
-    private boolean bypass = false;
-    public void bypassThreadLocalStorage(boolean bypass) {
-        this.bypass = bypass;
-    }
-
-    /**
-     * Initialize the storage map for this thread.
-     *
-     * Checks if there's a thread local binding for this thread, and if
-     * not initializes the map for it.  This map is then
-     * populated with stub -> storage bindings according to the
-     * superclasses' outputs map.
-     *
-     * Must be called within the master thread to create a map associated with
-     * the master thread ID.
-     */
-    public synchronized void initializeStorage() {
-        final ThreadGroup group = Thread.currentThread().getThreadGroup();
-        Map<Stub,Storage> threadLocalOutputStreams = threadsToStorage.get(group);
-
-        if( threadLocalOutputStreams == null ) {
-            threadLocalOutputStreams = new HashMap<Stub,Storage>();
-            threadsToStorage.put( group, threadLocalOutputStreams );
-        }
-
-        for ( final Stub stub : outputs.keySet() ) {
-            final Storage target = StorageFactory.createStorage(stub, createTempFile(stub));
-            threadLocalOutputStreams.put(stub, target);
-        }
-    }
-
-    @Override
-    public <T> T getStorage( final Stub<T> stub ) {
-        Storage target;
-
-        if (bypass) {
-            target = outputs.get(stub);
-            if( target == null ) {
-                target = StorageFactory.createStorage(stub);
-                outputs.put(stub, target);
-            }
-        }
-        else {
-            final Map<Stub,Storage> threadLocalOutputStreams = findStorage(Thread.currentThread());
-            target = threadLocalOutputStreams.get(stub);
-
-            // make sure something hasn't gone wrong, and we somehow find a map that doesn't include our stub
-            if ( target == null )
-                throw new ReviewedGATKException("target isn't supposed to be null for " + Thread.currentThread()
-                        + " id " + Thread.currentThread().getId() + " map is " + threadLocalOutputStreams);
-        }
-
-        return (T)target;
-    }
-
-
-    private synchronized Map<Stub,Storage> findStorage(final Thread thread) {
-        final Map<Stub, Storage> map = threadsToStorage.get(thread.getThreadGroup());
-
-        if ( map != null ) {
-            return map;
-        } else {
-            // something is terribly wrong, we have a storage lookup for a thread that doesn't have
-            // any map data associated with it!
-            throw new ReviewedGATKException("Couldn't find storage map associated with thread " + thread + " in group " + thread.getThreadGroup());
-        }
-    }
-
-    /**
-     * Close down any existing temporary files which have been opened.
-     */
-    public synchronized OutputMergeTask closeStorage() {
-        final Map<Stub,Storage> threadLocalOutputStreams = findStorage(Thread.currentThread());
-
-        if( threadLocalOutputStreams == null || threadLocalOutputStreams.isEmpty() )
-            return null;
-
-        final OutputMergeTask outputMergeTask = new OutputMergeTask();
-        for( Map.Entry<Stub,Storage> entry: threadLocalOutputStreams.entrySet() ) {
-            final Stub stub = entry.getKey();
-            final Storage storageEntry = entry.getValue();
-
-            storageEntry.close();
-            outputMergeTask.addMergeOperation(getTargetStream(stub), storageEntry);
-        }
-
-//        logger.info("Closing " + Thread.currentThread().getId() + " => " + threadLocalOutputStreams);
-        threadLocalOutputStreams.clear();
-
-        return outputMergeTask;
-    }
-
-    /**
-     * Creates a temporary file for a stub of the given type.
-     * @param stub Stub for which to create a temporary file.
-     * @param <T> Type of the stub to accept.
-     * @return A temp file, or throw an exception if the temp file cannot be created.
-     */
-    private <T> File createTempFile( Stub<T> stub ) {
-        try {
-            return File.createTempFile( stub.getClass().getName(), null );
-        } catch( IOException ex ) {
-            throw new UserException.BadTmpDir("Unable to create temporary file for stub: " + stub.getClass().getName() );
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/storage/OutputStreamStorage.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/storage/OutputStreamStorage.java
deleted file mode 100644
index ac348a2..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/storage/OutputStreamStorage.java
+++ /dev/null
@@ -1,144 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.io.storage;
-
-import org.broadinstitute.gatk.engine.io.stubs.OutputStreamStub;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import java.io.*;
-import java.nio.channels.Channels;
-import java.nio.channels.FileChannel;
-import java.nio.channels.WritableByteChannel;
-
-public class OutputStreamStorage extends OutputStream implements Storage<OutputStream> {
-    /**
-     * File to which data will temporarily be written.
-     */
-    private final File file;
-
-    /**
-     * Stream to which data in this shard will be written.
-     */
-    private final OutputStream outputStream;
-
-    /**
-     * Create a new storage area with the given stub.
-     * @param stub
-     */
-    public OutputStreamStorage( OutputStreamStub stub ) {
-        if( stub.getOutputFile() != null ) {
-            this.file = stub.getOutputFile();
-            this.outputStream = initializeOutputStream(stub.getOutputFile());
-        }
-        else if( stub.getOutputStream() != null ) {
-            this.file = null;
-            this.outputStream = stub.getOutputStream();           
-        }
-        else
-            throw new ReviewedGATKException("Not enough information to create storage for an OutputStream; need either a file or an existing output stream");
-    }
-
-    public OutputStreamStorage( OutputStreamStub stub, File file ) {
-        this.file = file;
-        this.outputStream = initializeOutputStream(file);
-    }
-
-    private OutputStream initializeOutputStream( File file ) {
-        try {
-            return new FileOutputStream( file );
-        }
-        catch(FileNotFoundException ex) {
-            throw new UserException.CouldNotCreateOutputFile(file, "Unable to open output stream for file", ex);
-        }
-    }
-
-    /**
-     * @{inheritDoc}
-     */
-    public void flush() throws IOException {
-        outputStream.flush();
-    }
-
-    /**
-     * @{inheritDoc}
-     */
-    public void close() {
-        // Don't close System.out or System.err; this'll cause trouble
-        // with subsequent code running in this VM.
-        if( outputStream == System.out || outputStream == System.err )
-            return;
-        
-        try {
-            outputStream.close();
-        }
-        catch( IOException ex ) {
-            throw new UserException.CouldNotCreateOutputFile(file, "Unable to close output stream", ex );
-        }
-    }
-
-    /**
-     * @{inheritDoc}
-     */
-    public void write( byte[] b ) throws IOException {
-        outputStream.write(b);
-    }
-
-    /**
-     * @{inheritDoc}
-     */
-    public void write( byte[] b, int off, int len ) throws IOException {
-        outputStream.write(b, off, len);
-    }
-
-    /**
-     * @{inheritDoc}
-     */
-    public void write( int b ) throws IOException {
-        outputStream.write(b);
-    }
-
-
-    public void mergeInto( OutputStream targetStream ) {
-        FileInputStream sourceStream = null;
-        try {
-            sourceStream = new FileInputStream( file );
-            FileChannel sourceChannel = sourceStream.getChannel();
-
-            WritableByteChannel targetChannel = Channels.newChannel( targetStream );
-            sourceChannel.transferTo( 0, sourceChannel.size(), targetChannel );
-
-            sourceStream.close();
-            file.delete();
-        }
-        catch( FileNotFoundException ex ) {
-            throw new UserException.CouldNotReadInputFile(file, "Unable to open input stream for file", ex);
-        }
-        catch( IOException ex ) {
-            throw new UserException.CouldNotReadInputFile(file, "Unable to transfer contents of file", ex);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/storage/SAMFileWriterStorage.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/storage/SAMFileWriterStorage.java
deleted file mode 100644
index 3956e6e..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/storage/SAMFileWriterStorage.java
+++ /dev/null
@@ -1,157 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.io.storage;
-
-import htsjdk.samtools.*;
-import htsjdk.samtools.util.CloseableIterator;
-import htsjdk.samtools.util.ProgressLoggerInterface;
-import htsjdk.samtools.util.RuntimeIOException;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.engine.io.stubs.SAMFileWriterStub;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.sam.SimplifyingSAMFileWriter;
-
-import java.io.File;
-import java.lang.reflect.InvocationTargetException;
-import java.lang.reflect.Method;
-
-/**
- * Provides temporary storage for SAMFileWriters.
- *
- * @author mhanna
- * @version 0.1
- */
-public class SAMFileWriterStorage implements SAMFileWriter, Storage<SAMFileWriter> {
-    private final File file;
-    private SAMFileWriter writer;
-
-    private static Logger logger = Logger.getLogger(SAMFileWriterStorage.class);
-
-    public SAMFileWriterStorage( SAMFileWriterStub stub ) {
-        this(stub,stub.getOutputFile());
-    }
-
-    public SAMFileWriterStorage( SAMFileWriterStub stub, File file ) {
-        this.file = file;
-        SAMFileWriterFactory factory = new SAMFileWriterFactory();
-        // Enable automatic index creation for pre-sorted BAMs.
-        if (stub.getFileHeader().getSortOrder().equals(SAMFileHeader.SortOrder.coordinate) && stub.getIndexOnTheFly())
-            factory.setCreateIndex(true);
-        if (stub.getGenerateMD5())
-            factory.setCreateMd5File(true);
-        // Adjust max records in RAM.
-        // TODO -- this doesn't actually work because of a bug in Picard; do not use until fixed
-        if(stub.getMaxRecordsInRam() != null)
-            factory.setMaxRecordsInRam(stub.getMaxRecordsInRam());
-
-        if(stub.getOutputFile() != null) {
-            try {
-                this.writer = createBAMWriter(factory,stub.getFileHeader(),stub.isPresorted(),file,stub.getCompressionLevel());
-            }
-            catch(RuntimeIOException ex) {
-                throw new UserException.CouldNotCreateOutputFile(file,"file could not be created",ex);
-            }
-        }
-        else if(stub.getOutputStream() != null){
-            this.writer = factory.makeSAMWriter( stub.getFileHeader(), stub.isPresorted(), stub.getOutputStream());
-        }
-        else
-            throw new UserException("Unable to write to SAM file; neither a target file nor a stream has been specified");
-
-        // if we want to send the BAM file through the simplifying writer, wrap it here
-        if ( stub.simplifyBAM() ) {
-            this.writer = new SimplifyingSAMFileWriter(this.writer);
-        }
-    }
-
-    public SAMFileHeader getFileHeader() {
-        return writer.getFileHeader();
-    }
-
-    public void addAlignment( SAMRecord read ) {
-        writer.addAlignment(read);
-    }
-
-    public void close() {
-        try {
-            writer.close();
-        } catch (RuntimeIOException e) {
-            throw new UserException.ErrorWritingBamFile(e.getMessage());
-        }
-    }
-
-    public void mergeInto( SAMFileWriter targetStream ) {
-        SAMFileReader reader = new SAMFileReader( file );
-        try {
-            CloseableIterator<SAMRecord> iterator = reader.iterator();
-            while( iterator.hasNext() )
-                targetStream.addAlignment( iterator.next() );
-            iterator.close();
-        }
-        finally {
-            reader.close();
-            file.delete();
-        }
-    }
-
-    private SAMFileWriter createBAMWriter(final SAMFileWriterFactory factory,
-                                 final SAMFileHeader header,
-                                 final boolean presorted,
-                                 final File outputFile,
-                                 final Integer compressionLevel) {
-        SAMFileWriter writer;
-        if(compressionLevel != null)
-            writer = factory.makeBAMWriter(header, presorted, outputFile, compressionLevel);
-        else
-            writer = factory.makeBAMWriter(header, presorted, outputFile);
-
-        // mhanna - 1 Mar 2011 - temporary hack until Picard generates an index file for empty BAMs --
-        //                     - do a pre-initialization of the BAM file.
-        try {
-            Method prepareToWriteAlignmentsMethod = writer.getClass().getDeclaredMethod("prepareToWriteAlignments");
-            if(prepareToWriteAlignmentsMethod != null) {
-                prepareToWriteAlignmentsMethod.setAccessible(true);
-                prepareToWriteAlignmentsMethod.invoke(writer);
-            }
-        }
-        catch(NoSuchMethodException ex) {
-            logger.info("Unable to call prepareToWriteAlignments method; this should be reviewed when Picard is updated.");
-        }
-        catch(IllegalAccessException ex) {
-            logger.info("Unable to access prepareToWriteAlignments method; this should be reviewed when Picard is updated.");
-        }
-        catch(InvocationTargetException ex) {
-            logger.info("Unable to invoke prepareToWriteAlignments method; this should be reviewed when Picard is updated.");
-        }
-
-        return writer;
-    }
-
-    @Override
-    public void setProgressLogger(final ProgressLoggerInterface logger) {
-        writer.setProgressLogger(logger);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/storage/Storage.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/storage/Storage.java
deleted file mode 100644
index 363b70f..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/storage/Storage.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.io.storage;
-
-/**
- * An interface representing the temporary storage of data.
- *
- * @author mhanna
- * @version 0.1
- */
-public interface Storage<StreamType> {
-    /**
-     * Writing to the temporary storage is done.  Close down the file.
-     */
-    public void close();
-
-    /**
-     * Merges the stream backing up this temporary storage into the target.
-     * @param target Target stream for the temporary storage.  May not be null.
-     */
-    public void mergeInto( StreamType target );
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/storage/StorageFactory.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/storage/StorageFactory.java
deleted file mode 100644
index 3396006..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/storage/StorageFactory.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.io.storage;
-
-import org.broadinstitute.gatk.engine.io.stubs.OutputStreamStub;
-import org.broadinstitute.gatk.engine.io.stubs.SAMFileWriterStub;
-import org.broadinstitute.gatk.engine.io.stubs.Stub;
-import org.broadinstitute.gatk.engine.io.stubs.VariantContextWriterStub;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.io.File;
-
-/**
- * Construct storage of the required type.
- *
- * @author mhanna
- * @version 0.1
- */
-public class StorageFactory {
-    /**
-     * Disable storage factory construction.
-     */
-    private StorageFactory() {}
-
-    /**
-     * Gets the output storage associated with a given stub.
-     * @param stub The stub for which to find / create the right output stream.
-     * @param <T> Type of the stream to create.
-     * @return Storage object with a facade of type T.
-     */
-    public static <T> Storage<T> createStorage( Stub<T> stub ) {
-        return createStorage( stub, null );
-    }
-
-    /**
-     * Gets the output storage associated with a given stub.
-     * @param stub The stub for which to find / create the right output stream.
-     * @param file The filename to which to write the file.
-     * @param <T> Type of the stream to create.
-     * @return Storage object with a facade of type T.
-     */
-     public static <T> Storage<T> createStorage( Stub<T> stub, File file ) {
-        Storage storage;
-
-        if(stub instanceof OutputStreamStub) {
-            if( file != null )
-                storage = new OutputStreamStorage((OutputStreamStub)stub,file);
-            else
-                storage = new OutputStreamStorage((OutputStreamStub)stub);
-        }
-        else if(stub instanceof SAMFileWriterStub) {
-            if( file != null )
-                storage = new SAMFileWriterStorage((SAMFileWriterStub)stub,file);
-            else
-                storage = new SAMFileWriterStorage((SAMFileWriterStub)stub);
-        }
-        else if(stub instanceof VariantContextWriterStub) {
-            VariantContextWriterStub vcfWriterStub = (VariantContextWriterStub)stub;
-            if( file != null )
-                storage = new VariantContextWriterStorage(vcfWriterStub,file);
-            else
-                storage = new VariantContextWriterStorage(vcfWriterStub);
-        }
-        else
-            throw new ReviewedGATKException("Unsupported stub type: " + stub.getClass().getName());
-
-        return storage;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/storage/VariantContextWriterStorage.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/storage/VariantContextWriterStorage.java
deleted file mode 100644
index c4f7769..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/storage/VariantContextWriterStorage.java
+++ /dev/null
@@ -1,228 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.io.storage;
-
-import htsjdk.samtools.util.BlockCompressedOutputStream;
-import org.apache.log4j.Logger;
-import htsjdk.tribble.AbstractFeatureReader;
-import htsjdk.tribble.Feature;
-import htsjdk.tribble.FeatureCodec;
-import org.broadinstitute.gatk.engine.io.stubs.VariantContextWriterStub;
-import org.broadinstitute.gatk.engine.refdata.tracks.FeatureManager;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import htsjdk.variant.bcf2.BCF2Utils;
-import htsjdk.variant.variantcontext.VariantContext;
-import htsjdk.variant.variantcontext.writer.Options;
-import htsjdk.variant.variantcontext.writer.VariantContextWriter;
-import htsjdk.variant.variantcontext.writer.VariantContextWriterFactory;
-import htsjdk.variant.vcf.VCFHeader;
-
-import java.io.*;
-import java.util.Arrays;
-import java.util.EnumSet;
-import java.util.List;
-
-/**
- * Provides temporary and permanent storage for genotypes in VCF format.
- *
- * @author mhanna
- * @version 0.1
- */
-public class VariantContextWriterStorage implements Storage<VariantContextWriterStorage>, VariantContextWriter {
-    /**
-     * our log, which we want to capture anything from this class
-     */
-    private static Logger logger = Logger.getLogger(VariantContextWriterStorage.class);
-
-    private final static int BUFFER_SIZE = 1048576;
-
-    protected final File file;
-    protected OutputStream stream;
-    protected final VariantContextWriter writer;
-    boolean closed = false;
-
-    /**
-     * Constructs an object which will write directly into the output file provided by the stub.
-     * Intentionally delaying the writing of the header -- this should be filled in by the walker.
-     *
-     * Respecs the isCompressed() request in stub, so if isCompressed() is true then this
-     * will create a storage output that dumps output to a BlockCompressedOutputStream.
-     *
-     * @param stub Stub to use when constructing the output file.
-     */
-    public VariantContextWriterStorage(VariantContextWriterStub stub)  {
-        if ( stub.getOutputFile() != null ) {
-            this.file = stub.getOutputFile();
-            writer = vcfWriterToFile(stub,stub.getOutputFile(),true,true);
-        }
-        else if ( stub.getOutputStream() != null ) {
-            this.file = null;
-            this.stream = stub.getOutputStream();
-            writer = VariantContextWriterFactory.create(stream,
-                    stub.getMasterSequenceDictionary(), stub.getWriterOptions(false));
-        }
-        else
-            throw new ReviewedGATKException("Unable to create target to which to write; storage was provided with neither a file nor a stream.");
-    }
-
-    /**
-     * Constructs an object which will redirect into a different file.
-     *
-     * Note that this function does not respect the isCompressed() request from the stub, in order
-     * to ensure that tmp. files can be read back in by the Tribble system, and merged with the mergeInto function.
-     *
-     * @param stub Stub to use when synthesizing file / header info.
-     * @param tempFile File into which to direct the output data.
-     */
-    public VariantContextWriterStorage(VariantContextWriterStub stub, File tempFile) {
-        //logger.debug("Creating temporary output file " + tempFile.getAbsolutePath() + " for VariantContext output.");
-        this.file = tempFile;
-        this.writer = vcfWriterToFile(stub, file, false, false);
-        writer.writeHeader(stub.getVCFHeader());
-    }
-
-    /**
-     * common initialization routine for multiple constructors
-     * @param stub Stub to use when constructing the output file.
-     * @param file Target file into which to write VCF records.
-     * @param indexOnTheFly true to index the file on the fly.  NOTE: will be forced to false for compressed files.
-     * @param allowCompressed if false, we won't compress the output, even if the stub requests it.  Critical
-     *                        for creating temp. output files that will be subsequently merged, as these do not
-     *                        support compressed output
-     * @return A VCF writer for use with this class
-     */
-    private VariantContextWriter vcfWriterToFile(final VariantContextWriterStub stub,
-                                                 final File file,
-                                                 final boolean indexOnTheFly,
-                                                 final boolean allowCompressed) {
-        try {
-            // we cannot merge compressed outputs, so don't compress if allowCompressed is false,
-            // which is the case when we have a temporary output file for later merging
-            if ( allowCompressed && stub.isCompressed() )
-                stream = new BlockCompressedOutputStream(file);
-            else
-                stream = new PrintStream(new BufferedOutputStream(new FileOutputStream(file), BUFFER_SIZE));
-        }
-        catch(IOException ex) {
-            throw new UserException.CouldNotCreateOutputFile(file, "Unable to open target output stream", ex);
-        }
-
-        EnumSet<Options> options = stub.getWriterOptions(indexOnTheFly);
-        VariantContextWriter writer = VariantContextWriterFactory.create(file, this.stream, stub.getMasterSequenceDictionary(), stub.getIndexCreator(), options);
-
-        // if the stub says to test BCF, create a secondary writer to BCF and an 2 way out writer to send to both
-        // TODO -- remove me when argument generateShadowBCF is removed
-        if ( stub.alsoWriteBCFForTest() && ! VariantContextWriterFactory.isBCFOutput(file, options)) {
-            final File bcfFile = BCF2Utils.shadowBCF(file);
-            if ( bcfFile != null ) {
-                FileOutputStream bcfStream;
-                try {
-                    bcfStream = new FileOutputStream(bcfFile);
-                } catch (FileNotFoundException e) {
-                    throw new RuntimeException(bcfFile + ": Unable to create BCF writer", e);
-                }
-
-                VariantContextWriter bcfWriter = VariantContextWriterFactory.create(bcfFile, bcfStream, stub.getMasterSequenceDictionary(), stub.getIndexCreator(), options);
-                writer = new TestWriter(writer, bcfWriter);
-            }
-        }
-
-        return writer;
-    }
-
-    private final static class TestWriter implements VariantContextWriter {
-        final List<VariantContextWriter> writers;
-
-        private TestWriter(final VariantContextWriter ... writers) {
-            this.writers = Arrays.asList(writers);
-        }
-
-        @Override
-        public void writeHeader(final VCFHeader header) {
-            for ( final VariantContextWriter writer : writers ) writer.writeHeader(header);
-        }
-
-        @Override
-        public void close() {
-            for ( final VariantContextWriter writer : writers ) writer.close();
-        }
-
-        @Override
-        public void add(final VariantContext vc) {
-            for ( final VariantContextWriter writer : writers ) writer.add(vc);
-        }
-    }
-
-    public void add(VariantContext vc) {
-        if ( closed ) throw new ReviewedGATKException("Attempting to write to a closed VariantContextWriterStorage " + vc.getStart() + " storage=" + this);
-        writer.add(vc);
-    }
-
-    /**
-     * initialize this VCF header
-     *
-     * @param header  the header
-     */
-    public void writeHeader(VCFHeader header) {
-        writer.writeHeader(header);
-    }
-
-    /**
-     * Close the VCF storage object.
-     */
-    public void close() {
-        writer.close();
-        closed = true;
-    }
-
-    public void mergeInto(VariantContextWriterStorage target) {
-        try {
-            if ( ! closed )
-                throw new ReviewedGATKException("Writer not closed, but we are merging into the file!");
-            final String targetFilePath = target.file != null ? target.file.getAbsolutePath() : "/dev/stdin";
-            logger.debug(String.format("Merging VariantContextWriterStorage from %s into %s", file.getAbsolutePath(), targetFilePath));
-
-            // use the feature manager to determine the right codec for the tmp file
-            // that way we don't assume it's a specific type
-            final FeatureManager.FeatureDescriptor fd = new FeatureManager().getByFiletype(file);
-            if ( fd == null )
-                throw new UserException.LocalParallelizationProblem(file);
-
-            final FeatureCodec codec = fd.getCodec();
-            final AbstractFeatureReader<Feature, ?> source = AbstractFeatureReader.getFeatureReader(file.getAbsolutePath(), codec, false);
-
-            for ( final Feature vc : source.iterator() ) {
-                target.writer.add((VariantContext) vc);
-            }
-
-            source.close();
-            file.delete(); // this should be last to aid in debugging when the process fails
-        } catch (IOException e) {
-            throw new UserException.CouldNotReadInputFile(file, "Error reading file in VCFWriterStorage: ", e);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/stubs/OutputStreamArgumentTypeDescriptor.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/stubs/OutputStreamArgumentTypeDescriptor.java
deleted file mode 100644
index 89b8b59..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/stubs/OutputStreamArgumentTypeDescriptor.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.io.stubs;
-
-import org.broadinstitute.gatk.utils.commandline.*;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.utils.exceptions.DynamicClassResolutionException;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.io.File;
-import java.io.OutputStream;
-import java.lang.reflect.Constructor;
-import java.lang.reflect.Type;
-
-/**
- * Insert an OutputStreamStub instead of a full-fledged concrete OutputStream implementations.
- */
-public class OutputStreamArgumentTypeDescriptor extends ArgumentTypeDescriptor {
-    /**
-     * The engine into which output stubs should be fed.
-     */
-    private final GenomeAnalysisEngine engine;
-
-    /**
-     * The default output stream to write to write this info if
-     */
-    private final OutputStream defaultOutputStream;
-
-    /**
-     * Create a new OutputStream argument, notifying the given engine when that argument has been created.
-     * @param engine Engine to add SAMFileWriter output to.
-     * @param defaultOutputStream Default target for output file.
-     */
-    public OutputStreamArgumentTypeDescriptor(GenomeAnalysisEngine engine,OutputStream defaultOutputStream) {
-        this.engine = engine;
-        this.defaultOutputStream = defaultOutputStream;
-    }
-
-    @Override
-    public boolean supports( Class type ) {
-        return getConstructorForClass(type) != null;
-    }
-
-    @Override
-    public boolean createsTypeDefault(ArgumentSource source) {
-        return !source.isRequired() && source.defaultsToStdout();
-    }
-
-    @Override
-    public String typeDefaultDocString(ArgumentSource source) {
-        return "stdout";
-    }
-
-    @Override
-    public Object createTypeDefault(ParsingEngine parsingEngine,ArgumentSource source, Type type) {
-        if(source.isRequired() || !source.defaultsToStdout())
-            throw new ReviewedGATKException("BUG: tried to create type default for argument type descriptor that can't support a type default.");
-        OutputStreamStub stub = new OutputStreamStub(defaultOutputStream);
-        engine.addOutput(stub);
-        return createInstanceOfClass((Class)type,stub);
-    }
-
-    @Override
-    public Object parse( ParsingEngine parsingEngine, ArgumentSource source, Type type, ArgumentMatches matches )  {
-        ArgumentDefinition definition = createDefaultArgumentDefinition(source);
-        String fileName = getArgumentValue( definition, matches ).asString();
-
-        // This parser has been passed a null filename and the GATK is not responsible for creating a type default for the object;
-        // therefore, the user must have failed to specify a type default
-        if(fileName == null && source.isRequired())
-            throw new MissingArgumentValueException(definition);
-
-        OutputStreamStub stub = new OutputStreamStub(new File(fileName));
-
-        engine.addOutput(stub);
-
-        Object result = createInstanceOfClass(makeRawTypeIfNecessary(type),stub);
-        // WARNING: Side effects required by engine!
-        parsingEngine.addTags(result,getArgumentTags(matches));
-        
-        return result;
-    }
-
-    /**
-     * Retrieves the constructor for an object that takes exactly one argument: an output stream.
-     * @param type Type for which to go constructor spelunking.
-     * @return Constructor, if available.  Null, if not.
-     */
-    private Constructor<OutputStream> getConstructorForClass( Class type ) {
-        try {
-            return type.getConstructor( OutputStream.class );
-        }
-        catch( NoSuchMethodException ex ) {
-            return null;
-        }
-    }
-
-    /**
-     * Creat a new instance of the class accepting a single outputstream constructor.
-     * @param type Type of object to create.
-     * @param outputStream resulting output stream.
-     * @return A new instance of the outputstream-derived class.
-     */
-    private Object createInstanceOfClass(Class type,OutputStream outputStream) {
-        try {
-            return getConstructorForClass(type).newInstance(outputStream);
-        } catch (Exception e) {
-            throw new DynamicClassResolutionException(type, e);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/stubs/OutputStreamStub.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/stubs/OutputStreamStub.java
deleted file mode 100644
index 2f64dc2..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/stubs/OutputStreamStub.java
+++ /dev/null
@@ -1,142 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.io.stubs;
-
-import org.broadinstitute.gatk.engine.arguments.GATKArgumentCollection;
-import org.broadinstitute.gatk.engine.io.OutputTracker;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.OutputStream;
-
-/**
- * A stub for routing and management of anything backed by an OutputStream.
- *
- * @author mhanna
- * @version 0.1
- */
-public class OutputStreamStub extends OutputStream implements Stub<OutputStream> {
-    /**
-     * The file that this stub should write to.  Should be passed along to
-     * whatever happens to create storage for this stub.  Might be null, if
-     * this stub connects directly to an existing stream.
-     */
-    private final File targetFile;
-
-    /**
-     * The stream that this stub should write to.  Should be passed along to
-     * whatever happens to create storage for this stub.  Might be null, if
-     * this stub connects directly to an existing stream.
-     */
-    private final OutputStream targetStream;
-    
-    /**
-     * Connects this stub with an external stream capable of serving the
-     * requests of the consumer of this stub.
-     */
-    private OutputTracker outputTracker = null;
-
-    /**
-     * Specify that this target output stream should write to the given file.
-     * @param targetFile Target file to which to write.  Should not be null.
-     */
-    public OutputStreamStub( File targetFile ) {
-        this.targetFile = targetFile;
-        this.targetStream = null;
-    }
-
-    /**
-     * Specify that this target output stream should write to the given stream.
-     * @param targetStream Target stream to which to write.  Should not be null.
-     */
-    public OutputStreamStub( OutputStream targetStream ) {
-        this.targetFile = null;
-        this.targetStream = targetStream;
-    }
-
-
-    /**
-     * Return the target file to which this data should be written.
-     * @return Target file.  No sanity checking will have been performed by the file object.
-     */
-    public File getOutputFile() {
-        return targetFile;
-    }
-
-    /**
-     * Return the target stream to which this data should be written.
-     * @return Target stream.  No sanity checking will have been performed by the file object.
-     */
-    public OutputStream getOutputStream() {
-        return targetStream;
-    }
-
-    /**
-     * Registers the given streamConnector with this stub.
-     * @param outputTracker The connector used to provide an appropriate stream.
-     */
-    public void register( OutputTracker outputTracker ) {
-        this.outputTracker = outputTracker;
-    }
-
-    @Override
-    public void processArguments( final GATKArgumentCollection argumentCollection ) {}
-
-    /**
-     * @{inheritDoc}
-     */
-    public void flush() throws IOException {
-        outputTracker.getStorage(this).flush();
-    }
-
-    /**
-     * @{inheritDoc}
-     */
-    public void close() throws IOException {
-        outputTracker.getStorage(this).close();
-    }
-
-    /**
-     * @{inheritDoc}
-     */
-    public void write( byte[] b ) throws IOException {
-        outputTracker.getStorage(this).write(b);
-    }
-
-    /**
-     * @{inheritDoc}
-     */
-    public void write( byte[] b, int off, int len ) throws IOException {
-        outputTracker.getStorage(this).write(b, off, len);
-    }
-
-    /**
-     * @{inheritDoc}
-     */
-    public void write( int b ) throws IOException {
-        outputTracker.getStorage(this).write(b);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/stubs/SAMFileReaderArgumentTypeDescriptor.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/stubs/SAMFileReaderArgumentTypeDescriptor.java
deleted file mode 100644
index 42397cb..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/stubs/SAMFileReaderArgumentTypeDescriptor.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.io.stubs;
-
-import htsjdk.samtools.SAMFileReader;
-import org.broadinstitute.gatk.utils.commandline.*;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.sam.SAMFileReaderBuilder;
-
-import java.lang.reflect.Type;
-
-/**
- * Describe how to parse SAMFileReaders.
- */
-public class SAMFileReaderArgumentTypeDescriptor extends ArgumentTypeDescriptor {
-    /**
-     * The engine into which output stubs should be fed.
-     */
-    private GenomeAnalysisEngine engine;
-
-    /**
-     * Create a new SAMFileReader argument, notifying the given engine when that argument has been created.
-     * @param engine engine
-     */
-    public SAMFileReaderArgumentTypeDescriptor( GenomeAnalysisEngine engine ) {
-        this.engine = engine;
-    }
-
-    @Override
-    public boolean supports( Class type ) {
-        return SAMFileReader.class.isAssignableFrom(type);
-    }
-
-    @Override
-    public Object parse( ParsingEngine parsingEngine, ArgumentSource source, Type type, ArgumentMatches matches ) {
-        SAMFileReaderBuilder builder = new SAMFileReaderBuilder();
-
-        ArgumentMatchValue readerFileName = getArgumentValue( createDefaultArgumentDefinition(source), matches );
-
-        if( readerFileName == null )
-            throw new UserException.CommandLineException("SAM file compression was supplied, but no associated writer was supplied with it.");
-
-        builder.setSAMFile(readerFileName.asFile());
-
-        // WARNING: Skipping required side-effect because stub is impossible to generate.
-        engine.addInput(source, builder);
-
-        // MASSIVE KLUDGE!  SAMFileReader is tricky to implement and we don't yet have a stub.  Return null, then
-        // let the output tracker load it in.
-        // TODO: Add a stub for SAMFileReader.
-        return null;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/stubs/SAMFileWriterArgumentTypeDescriptor.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/stubs/SAMFileWriterArgumentTypeDescriptor.java
deleted file mode 100644
index c454324..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/stubs/SAMFileWriterArgumentTypeDescriptor.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.io.stubs;
-
-import htsjdk.samtools.SAMFileWriter;
-import org.broadinstitute.gatk.utils.commandline.*;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.io.GATKSAMFileWriter;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.io.OutputStream;
-import java.lang.reflect.Type;
-
-/**
- * Insert a SAMFileWriterStub  instead of a full-fledged concrete OutputStream implementations.
- */
-public class SAMFileWriterArgumentTypeDescriptor extends ArgumentTypeDescriptor {
-
-    /**
-     * The engine into which output stubs should be fed.
-     */
-    private final GenomeAnalysisEngine engine;
-
-    /**
-     * The default location to which data should be written if the user specifies no such location.
-     */
-    private final OutputStream defaultOutputStream;
-
-    /**
-     * Create a new SAMFileWriter argument, notifying the given engine when that argument has been created.
-     * @param engine Engine to add SAMFileWriter output to.
-     * @param defaultOutputStream the target for the data
-     */
-    public SAMFileWriterArgumentTypeDescriptor( GenomeAnalysisEngine engine, OutputStream defaultOutputStream ) {
-        this.engine = engine;
-        this.defaultOutputStream = defaultOutputStream;
-    }
-
-    @Override
-    public boolean supports( Class type ) {
-        return SAMFileWriter.class.equals(type) || GATKSAMFileWriter.class.equals(type);
-    }
-
-    @Override
-    public boolean createsTypeDefault(ArgumentSource source) {
-        return !source.isRequired() && source.defaultsToStdout();
-    }
-
-    @Override
-    public String typeDefaultDocString(ArgumentSource source) {
-        return "stdout";
-    }
-
-    @Override
-    public Object createTypeDefault(ParsingEngine parsingEngine,ArgumentSource source, Type type) {
-        if(source.isRequired() || !source.defaultsToStdout())
-            throw new ReviewedGATKException("BUG: tried to create type default for argument type descriptor that can't support a type default.");
-        SAMFileWriterStub stub = new SAMFileWriterStub(engine,defaultOutputStream);
-        engine.addOutput(stub);
-        return stub;
-    }
-
-    @Override
-    public Object parse( ParsingEngine parsingEngine, ArgumentSource source, Type type, ArgumentMatches matches )  {
-        // Extract all possible parameters that could be passed to a BAM file writer?
-        ArgumentDefinition bamArgumentDefinition = createDefaultArgumentDefinition(source);
-        ArgumentMatchValue writerFileName = getArgumentValue( bamArgumentDefinition, matches );
-
-        // Create the stub
-        SAMFileWriterStub stub = null;      // stub = new SAMFileWriterStub(engine, defaultOutputStream);
-
-        if (writerFileName != null &&  writerFileName.asFile() != null ) {
-            stub = new SAMFileWriterStub(engine, writerFileName.asFile());
-
-            // WARNING: Side effects required by engine!
-            parsingEngine.addTags(stub,getArgumentTags(matches));
-            engine.addOutput(stub);
-        }
-
-        return stub;
-    }
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/stubs/SAMFileWriterStub.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/stubs/SAMFileWriterStub.java
deleted file mode 100644
index cc814e9..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/stubs/SAMFileWriterStub.java
+++ /dev/null
@@ -1,336 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.io.stubs;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMFileWriter;
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.util.ProgressLoggerInterface;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.arguments.GATKArgumentCollection;
-import org.broadinstitute.gatk.engine.io.OutputTracker;
-import org.broadinstitute.gatk.engine.io.GATKSAMFileWriter;
-import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
-import org.broadinstitute.gatk.utils.baq.BAQ;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-
-import java.io.File;
-import java.io.OutputStream;
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * A stub for routing and management of SAM file reading and writing.
- *
- * @author mhanna
- * @version 0.1
- */
-public class SAMFileWriterStub implements Stub<SAMFileWriter>, GATKSAMFileWriter {
-    /**
-     * Engine to use for collecting attributes for the output SAM file.
-     */
-    private final GenomeAnalysisEngine engine;
-
-    /**
-     * A header supplied by the user that overrides the merged header from the input BAM.
-     */
-    private SAMFileHeader headerOverride = null;
-
-    /**
-     * The sam file that this stub should write to.  Should be passed along to
-     * whatever happens to create the StreamConnector.
-     */
-    private final File samFile;
-
-    /**
-     * The target output stream, to be used in place of the SAM file.
-     */
-    private final OutputStream samOutputStream;
-
-    /**
-     * The validation stringency to apply when reading this file.
-     */
-    private Integer compressionLevel = null;
-
-    /**
-     * Should the GATK index the output BAM on-the-fly?
-     */
-    private boolean indexOnTheFly = false;
-
-    /**
-     * Should the GATK generate an md5 for the output BAM?
-     */
-    private boolean generateMD5 = false;
-
-    /**
-     * Should this BAM be presorted?
-     */
-    private boolean presorted = true;
-
-    /**
-     * How many records should the BAM writer store in RAM while
-     * sorting the BAM on-the-fly?
-     */
-    private Integer maxRecordsInRam = null;
-
-    /**
-     * Connects this stub with an external stream capable of serving the
-     * requests of the consumer of this stub.
-     */
-    private OutputTracker outputTracker = null;
-
-    /**
-     * Has the write started?  If so, throw an exception if someone tries to
-     * change write parameters to the file (compression level, presorted flag,
-     * header, etc).
-     */
-    private boolean writeStarted = false;
-
-
-    /**
-     * HMM for BAQ, if needed
-     */
-    BAQ baqHMM = new BAQ();
-
-    /**
-     * Should we simplify the BAM file while writing it out?
-     */
-    private boolean simplifyBAM = false;
-
-    private List<ReadTransformer> onOutputReadTransformers = null;
-
-    /**
-     * Create a new stub given the requested SAM file and compression level.
-     * @param engine source of header data, maybe other data about input files.
-     * @param samFile SAM file to (ultimately) create.
-     */
-    public SAMFileWriterStub( GenomeAnalysisEngine engine, File samFile ) {
-        this(engine, samFile, null);
-    }
-
-    /**
-     * Create a new stub given the requested SAM file and compression level.
-     * @param engine source of header data, maybe other data about input files.
-     * @param stream Output stream to which data should be written.
-     */
-    public SAMFileWriterStub( GenomeAnalysisEngine engine, OutputStream stream ) {
-        this(engine, null, stream);
-    }
-
-    private SAMFileWriterStub(final GenomeAnalysisEngine engine, final File samFile, final OutputStream stream) {
-        this.engine = engine;
-        this.samFile = samFile;
-        this.samOutputStream = stream;
-    }
-
-    /**
-     * Retrieves the SAM file to (ultimately) be created.
-     * @return The SAM file.  Must not be null.
-     */
-    public File getOutputFile() {
-        return samFile;
-    }
-
-    public boolean simplifyBAM() {
-        return simplifyBAM;
-    }
-
-    public void setSimplifyBAM(boolean v) {
-        simplifyBAM = v;
-    }
-
-    public OutputStream getOutputStream() {
-        return samOutputStream;
-    }
-
-    /**
-     * Retrieves the header to use when creating the new SAM file.
-     * @return header to use when creating the new SAM file.
-     */
-    public SAMFileHeader getFileHeader() {
-        return headerOverride != null ? headerOverride : engine.getSAMFileHeader();
-    }
-
-    /**
-     * Retrieves the desired compression level for 
-     * @return The current compression level.  Could be null if the user doesn't care.
-     */
-    public Integer getCompressionLevel() {
-        return compressionLevel;
-    }
-
-    /**
-     * Sets the desired compression level.
-     * @param compressionLevel The suggested compression level.
-     */
-    public void setCompressionLevel( Integer compressionLevel ) {
-        if(writeStarted)
-            throw new ReviewedGATKException("Attempted to change the compression level of a file with alignments already in it.");
-        this.compressionLevel = compressionLevel;
-    }
-
-    /**
-     * Gets whether to index this output stream on-the-fly.
-     * @return True means create an index.  False means skip index creation.
-     */
-    public Boolean getIndexOnTheFly() {
-        return indexOnTheFly;
-    }
-
-    /**
-     * Controls whether to index this output stream on-the-fly.
-     * @param indexOnTheFly True means create an index.  False means skip index creation.
-     */
-    public void setIndexOnTheFly( boolean indexOnTheFly ) {
-        if(writeStarted)
-            throw new UserException("Attempted to index a BAM on the fly of a file with alignments already in it.");
-        this.indexOnTheFly = indexOnTheFly;
-    }
-
-    /**
-     * Gets whether to generate an md5 on-the-fly for this BAM.
-     * @return True generates the md5.  False means skip writing the file.
-     */
-    public Boolean getGenerateMD5() {
-        return generateMD5;
-    }
-
-    /**
-     * Gets whether to generate an md5 on-the-fly for this BAM.
-     * @param generateMD5   True generates the md5.  False means skip writing the file.
-     */
-    public void setGenerateMD5(boolean generateMD5) {
-        if(writeStarted)
-            throw new UserException("Attempted to turn on md5 generation for BAM file with alignments already in it.");        
-        this.generateMD5 = generateMD5;
-    }
-
-    /**
-     * Whether the BAM file to create is actually presorted.
-     * @return True if the BAM file is presorted.  False otherwise.
-     */
-    public boolean isPresorted() {
-        return this.presorted;
-    }
-
-    /**
-     * Set Whether the BAM file to create is actually presorted.
-     * @param presorted True if the BAM file is presorted.  False otherwise.
-     */
-    public void setPresorted(boolean presorted) {
-        if(writeStarted)
-            throw new ReviewedGATKException("Attempted to change the presorted state of a file with alignments already in it.");
-        this.presorted = presorted;
-    }
-
-    /**
-     * Get the maximum number of reads to hold in RAM when sorting a BAM on-the-fly.
-     * @return Max records in RAM, or null if unset.
-     */
-    public Integer getMaxRecordsInRam() {
-        return this.maxRecordsInRam;
-    }
-
-    /**
-     * Sets the maximum number of reads to hold in RAM when sorting a BAM on-the-fly.
-     * @param maxRecordsInRam Max number of records in RAM.
-     */
-    public void setMaxRecordsInRam(int maxRecordsInRam) {
-        if(writeStarted)
-            throw new ReviewedGATKException("Attempted to change the max records in RAM of a file with alignments already in it.");
-        this.maxRecordsInRam = maxRecordsInRam;
-    }
-
-    /**
-     * Registers the given streamConnector with this stub.
-     * @param outputTracker The connector used to provide an appropriate stream.
-     */
-    public void register( OutputTracker outputTracker ) {
-        this.outputTracker = outputTracker;
-    }
-
-    @Override
-    public void processArguments( final GATKArgumentCollection argumentCollection ) {
-        if (argumentCollection.bamCompression != null)
-            setCompressionLevel(argumentCollection.bamCompression);
-        setGenerateMD5(argumentCollection.enableBAMmd5);
-        setIndexOnTheFly(!argumentCollection.disableBAMIndexing);
-        setSimplifyBAM(argumentCollection.simplifyBAM);
-
-    }
-
-    /**
-     * Use the given header as the target for this writer.
-     * @param header The header to write.
-     */
-    public void writeHeader(SAMFileHeader header) {
-        if(writeStarted)
-            throw new ReviewedGATKException("Attempted to change the header of a file with alignments already in it.");
-        this.headerOverride = header;
-    }
-
-    private void initializeReadTransformers() {
-        this.onOutputReadTransformers = new ArrayList<>(engine.getReadTransformers().size());
-        for ( final ReadTransformer transformer : engine.getReadTransformers() ) {
-            if ( transformer.getApplicationTime() == ReadTransformer.ApplicationTime.ON_OUTPUT )
-                onOutputReadTransformers.add(transformer);
-        }
-    }
-
-    /**
-     * @{inheritDoc}
-     */
-    public void addAlignment( final SAMRecord readIn ) {
-        if ( onOutputReadTransformers == null )
-            initializeReadTransformers();
-
-        GATKSAMRecord workingRead = (GATKSAMRecord)readIn;
-
-        // run on output read transformers
-        for ( final ReadTransformer transform : onOutputReadTransformers )
-            workingRead = transform.apply(workingRead);
-
-        writeStarted = true;
-        outputTracker.getStorage(this).addAlignment(workingRead);
-    }
-
-    /**
-     * @{inheritDoc}
-     */
-    public void close() {
-        outputTracker.getStorage(this).close();    
-    }
-
-    /**
-     * @throws java.lang.UnsupportedOperationException No progress logging in this implementation.
-     */
-    @Override
-    public void setProgressLogger(final ProgressLoggerInterface logger) {
-        throw new UnsupportedOperationException("Progress logging not supported");
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/stubs/Stub.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/stubs/Stub.java
deleted file mode 100644
index 8a00007..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/stubs/Stub.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.io.stubs;
-
-import org.broadinstitute.gatk.engine.arguments.GATKArgumentCollection;
-import org.broadinstitute.gatk.engine.io.OutputTracker;
-
-import java.io.File;
-import java.io.OutputStream;
-
-/**
- * A stub used for managing IO. Acts as a proxy for IO streams
- * not yet created or streams that need significant external
- * management.
- *
- * @author mhanna
- * @version 0.1
- */
-public interface Stub<StreamType> {
-    /**
-     * Provides a facility to register this stream with the given
-     * StreamConnector.  The stub should route each output method
-     * to the specified connector.
-     * @param outputTracker The connector used to provide an appropriate stream.
-     */
-    public void register( OutputTracker outputTracker );
-
-    /**
-     * Provides a mechanism for uniformly processing command-line arguments
-     * that are important for file processing.  For example, this method
-     * might pass on the compression value specified by the user to
-     * a SAMFileWriter
-     * @param argumentCollection The arguments to be processed
-     */
-    public void processArguments( final GATKArgumentCollection argumentCollection );
-
-    /**
-     * Returns the OutputStream represented by this stub or null if not available.
-     */
-    public OutputStream getOutputStream();
-
-    /**
-     * Returns the File represented by this stub or null if not available.
-     */
-    public File getOutputFile();
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/stubs/VCFWriterArgumentTypeDescriptor.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/stubs/VCFWriterArgumentTypeDescriptor.java
deleted file mode 100644
index 6861339..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/stubs/VCFWriterArgumentTypeDescriptor.java
+++ /dev/null
@@ -1,148 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.io.stubs;
-
-import htsjdk.tribble.AbstractFeatureReader;
-import org.broadinstitute.gatk.utils.commandline.*;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import htsjdk.variant.variantcontext.writer.VariantContextWriter;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.io.File;
-import java.io.OutputStream;
-import java.lang.reflect.Type;
-import java.util.Collection;
-
-/**
- * Injects new command-line arguments into the system providing support for the genotype writer.
- *
- * @author mhanna
- * @version 0.1
- */
-public class VCFWriterArgumentTypeDescriptor extends ArgumentTypeDescriptor {
-
-    /**
-     * The engine into which output stubs should be fed.
-     */
-    private final GenomeAnalysisEngine engine;
-
-    /**
-      * The default location to which data should be written if the user specifies no such location.
-      */
-    private final OutputStream defaultOutputStream;
-
-    /**
-     * The sources into which arguments were injected.
-     */
-    private final Collection<Object> argumentSources;
-
-    /**
-     * Create a new GenotypeWriter argument, notifying the given engine when that argument has been created.
-     * @param engine the engine to be notified.
-     * @param defaultOutputStream the default output stream to be written to if nothing else is specified.
-     * @param argumentSources sources from which command-line arguments should be derived.
-     */
-    public VCFWriterArgumentTypeDescriptor(GenomeAnalysisEngine engine, OutputStream defaultOutputStream, Collection<Object> argumentSources) {
-        this.engine = engine;
-        this.defaultOutputStream = defaultOutputStream;
-        this.argumentSources = argumentSources;
-    }
-
-    /**
-     * Reports whether this ArgumentTypeDescriptor supports the given type.
-     * @param type The type to check.
-     * @return True if the argument is a GenotypeWriter.
-     */
-    @Override
-    public boolean supports( Class type ) {
-        return VariantContextWriter.class.equals(type);
-    }
-
-    /**
-     * This command-line argument descriptor does want to override the provided default value.
-     * @return true always.
-     */
-    @Override
-    public boolean createsTypeDefault(ArgumentSource source) {
-        return !source.isRequired() && source.defaultsToStdout();
-    }
-
-    @Override
-    public String typeDefaultDocString(ArgumentSource source) {
-        return "stdout";
-    }
-
-    @Override
-    public Object createTypeDefault(ParsingEngine parsingEngine, ArgumentSource source, Type type) {
-        if(source.isRequired() || !source.defaultsToStdout())
-            throw new ReviewedGATKException("BUG: tried to create type default for argument type descriptor that can't support a type default.");        
-        VariantContextWriterStub stub = new VariantContextWriterStub(engine, defaultOutputStream, argumentSources);
-        engine.addOutput(stub);
-        return stub;
-    }
-
-    /**
-     * Convert the given argument matches into a single object suitable for feeding into the ArgumentSource.
-     * @param source Source for this argument.
-     * @param type not used
-     * @param matches Matches that match with this argument.
-     * @return Transform from the matches into the associated argument.
-     */
-    @Override
-    public Object parse( ParsingEngine parsingEngine, ArgumentSource source, Type type, ArgumentMatches matches )  {
-        ArgumentDefinition defaultArgumentDefinition = createDefaultArgumentDefinition(source);
-        // Get the filename for the genotype file, if it exists.  If not, we'll need to send output to out.
-        ArgumentMatchValue writerFileName = getArgumentValue(defaultArgumentDefinition,matches);
-        File writerFile = writerFileName != null ? writerFileName.asFile() : null;
-
-        // This parser has been passed a null filename and the GATK is not responsible for creating a type default for the object;
-        // therefore, the user must have failed to specify a type default
-        if(writerFile == null && source.isRequired())
-            throw new MissingArgumentValueException(defaultArgumentDefinition);
-
-        // Create a stub for the given object.
-        final VariantContextWriterStub stub = (writerFile != null)
-                ? new VariantContextWriterStub(engine, writerFile, argumentSources)
-                : new VariantContextWriterStub(engine, defaultOutputStream, argumentSources);
-
-        stub.setCompressed(isCompressed(writerFileName == null ? null: writerFileName.asString()));
-
-        // WARNING: Side effects required by engine!
-        parsingEngine.addTags(stub,getArgumentTags(matches));
-        engine.addOutput(stub);
-
-        return stub;
-    }
-
-    /**
-     * Returns true if the file will be compressed.
-     * @param writerFileName Name of the file
-     * @return true if the file will be compressed.
-     */
-    public static boolean isCompressed(String writerFileName) {
-        return writerFileName != null && AbstractFeatureReader.hasBlockCompressedExtension(writerFileName);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/stubs/VariantContextWriterStub.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/stubs/VariantContextWriterStub.java
deleted file mode 100644
index f40ede5..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/io/stubs/VariantContextWriterStub.java
+++ /dev/null
@@ -1,301 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.io.stubs;
-
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.tribble.index.IndexCreator;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.arguments.GATKArgumentCollection;
-import org.broadinstitute.gatk.engine.io.OutputTracker;
-import org.broadinstitute.gatk.utils.variant.GATKVCFUtils;
-import htsjdk.variant.variantcontext.VariantContext;
-import htsjdk.variant.variantcontext.writer.Options;
-import htsjdk.variant.variantcontext.writer.VariantContextWriter;
-import htsjdk.variant.variantcontext.writer.VariantContextWriterFactory;
-import htsjdk.variant.vcf.VCFHeader;
-import htsjdk.variant.vcf.VCFHeaderLine;
-
-import java.io.File;
-import java.io.OutputStream;
-import java.io.PrintStream;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.EnumSet;
-import java.util.List;
-
-/**
- * A stub for routing and management of genotype reading and writing.
- *
- * @author ebanks
- * @version 0.1
- */
-public class VariantContextWriterStub implements Stub<VariantContextWriter>, VariantContextWriter {
-    public final static boolean UPDATE_CONTIG_HEADERS = true;
-
-    /**
-     * The engine, central to the GATK's processing.
-     */
-    private final GenomeAnalysisEngine engine;
-
-    /**
-     * The file that this stub should write to.  Should be mutually
-     * exclusive with genotypeStream.
-     */
-    private final File genotypeFile;
-
-    /**
-     * The output stream to which stub data should be written.  Will be
-     * mutually exclusive with genotypeFile.
-     */
-    private final PrintStream genotypeStream;
-
-    /**
-     * A hack: push the argument sources into the VCF header so that the VCF header
-     * can rebuild the command-line arguments.
-     */
-    private final Collection<Object> argumentSources;
-
-    /**
-     * Which IndexCreator to use
-     */
-    private final IndexCreator indexCreator;
-
-    /**
-     * The cached VCF header (initialized to null)
-     */
-    private VCFHeader vcfHeader = null;
-
-    /**
-     * Should we emit a compressed output stream?
-     */
-    private boolean isCompressed = false;
-
-    /**
-     * Should the header be written out?  A hidden argument.
-     */
-    private boolean skipWritingCommandLineHeader = false;
-
-    /**
-     * Should we not write genotypes even when provided?
-     */
-    private boolean doNotWriteGenotypes = false;
-
-    /**
-     * Should we force BCF writing regardless of the file extension?
-     */
-    private boolean forceBCF = false;
-
-    /**
-     * Should we write all of the fields in the FORMAT field, even if missing fields could be trimmed?
-     */
-    private boolean writeFullFormatField = false;
-
-    /**
-     * Connects this stub with an external stream capable of serving the
-     * requests of the consumer of this stub.
-     */
-    protected OutputTracker outputTracker = null;
-
-    /**
-     * Create a new stub given the requested file.
-     *
-     * @param engine engine.
-     * @param genotypeFile  file to (ultimately) create.
-     * @param argumentSources sources.
-     */
-    public VariantContextWriterStub(GenomeAnalysisEngine engine, File genotypeFile, Collection<Object> argumentSources) {
-        this.engine = engine;
-        this.genotypeFile = genotypeFile;
-        this.genotypeStream = null;
-        this.indexCreator = GATKVCFUtils.getIndexCreator(engine.getArguments().variant_index_type, engine.getArguments().variant_index_parameter, genotypeFile);
-        this.argumentSources = argumentSources;
-    }
-
-    /**
-     * Create a new stub given the requested file.
-     *
-     * @param engine engine.
-     * @param genotypeStream  stream to (ultimately) write.
-     * @param argumentSources sources.
-     */
-    public VariantContextWriterStub(GenomeAnalysisEngine engine, OutputStream genotypeStream, Collection<Object> argumentSources) {
-        this.engine = engine;
-        this.genotypeFile = null;
-        this.genotypeStream = new PrintStream(genotypeStream);
-        this.indexCreator = null;
-        this.argumentSources = argumentSources;
-    }
-
-    /**
-     * Retrieves the file to (ultimately) be created.
-     * @return The file.  Can be null if genotypeStream is not.
-     */
-    public File getOutputFile() {
-        return genotypeFile;
-    }
-
-    /**
-     * Retrieves the output stream to which to (ultimately) write.
-     * @return The file.  Can be null if genotypeFile is not.
-     */
-    public OutputStream getOutputStream() {
-        return genotypeStream;
-    }
-
-    public boolean isCompressed() {
-        return isCompressed;
-    }
-
-    public void setCompressed(final boolean compressed) {
-        isCompressed = compressed;
-    }
-
-    public void setSkipWritingCommandLineHeader(final boolean skipWritingCommandLineHeader) {
-        this.skipWritingCommandLineHeader = skipWritingCommandLineHeader;
-    }
-
-    public void setDoNotWriteGenotypes(final boolean doNotWriteGenotypes) {
-        this.doNotWriteGenotypes = doNotWriteGenotypes;
-    }
-
-    public void setForceBCF(final boolean forceBCF) {
-        this.forceBCF = forceBCF;
-    }
-
-    public void setWriteFullFormatField(final boolean writeFullFormatField) {
-        this.writeFullFormatField = writeFullFormatField;
-    }
-
-    public IndexCreator getIndexCreator() {
-        return indexCreator;
-    }
-
-    /**
-     * Gets the master sequence dictionary from the engine associated with this stub
-     * @link GenomeAnalysisEngine.getMasterSequenceDictionary
-     * @return the master sequence dictionary from the engine associated with this stub
-     */
-    public SAMSequenceDictionary getMasterSequenceDictionary() {
-        return engine.getMasterSequenceDictionary();
-    }
-
-    public EnumSet<Options> getWriterOptions() {
-        return getWriterOptions(false);
-    }
-
-    public EnumSet<Options> getWriterOptions(boolean indexOnTheFly) {
-        final List<Options> options = new ArrayList<>();
-
-        if ( doNotWriteGenotypes ) options.add(Options.DO_NOT_WRITE_GENOTYPES);
-        if ( engine.lenientVCFProcessing() ) options.add(Options.ALLOW_MISSING_FIELDS_IN_HEADER);
-        if ( indexOnTheFly) options.add(Options.INDEX_ON_THE_FLY);
-        if ( writeFullFormatField ) options.add(Options.WRITE_FULL_FORMAT_FIELD);
-
-        if ( forceBCF || (getOutputFile() != null && VariantContextWriterFactory.isBCFOutput(getOutputFile())) )
-            options.add(Options.FORCE_BCF);
-
-        return options.isEmpty() ? EnumSet.noneOf(Options.class) : EnumSet.copyOf(options);
-    }
-
-    /**
-     * Retrieves the header to use when creating the new file.
-     * @return header to use when creating the new file.
-     */
-    public VCFHeader getVCFHeader() {
-        return vcfHeader;
-    }
-
-    /**
-     * Registers the given streamConnector with this stub.
-     * @param outputTracker The connector used to provide an appropriate stream.
-     */
-    public void register( OutputTracker outputTracker ) {
-        this.outputTracker = outputTracker;
-    }
-
-    @Override
-    public void processArguments( final GATKArgumentCollection argumentCollection ) {
-        setDoNotWriteGenotypes(argumentCollection.sitesOnlyVCF);
-        setSkipWritingCommandLineHeader(argumentCollection.disableCommandLineInVCF);
-        setForceBCF(argumentCollection.forceBCFOutput);
-        setWriteFullFormatField(argumentCollection.neverTrimVCFFormatField);
-    }
-
-    public void writeHeader(VCFHeader header) {
-        vcfHeader = header;
-
-        if ( header.isWriteEngineHeaders() ) {
-            // skip writing the command line header if requested
-            if ( ! skipWritingCommandLineHeader && header.isWriteCommandLine() ) {
-                // Always add the header line, as the current format allows multiple entries
-                final VCFHeaderLine commandLineArgHeaderLine = GATKVCFUtils.getCommandLineArgumentHeaderLine(engine, argumentSources);
-                vcfHeader.addMetaDataLine(commandLineArgHeaderLine);
-            }
-
-            if ( UPDATE_CONTIG_HEADERS )
-                vcfHeader = GATKVCFUtils.withUpdatedContigs(vcfHeader, engine);
-        }
-
-        outputTracker.getStorage(this).writeHeader(vcfHeader);
-    }
-
-    /**
-     * @{inheritDoc}
-     */
-    public void add(VariantContext vc) {
-        outputTracker.getStorage(this).add(vc);
-    }
-
-    /**
-     * @{inheritDoc}
-     */
-    public void close() {
-        outputTracker.getStorage(this).close();
-    }
-
-    /**
-     * Gets a string representation of this object.
-     * @return a string representation of this object.
-     */
-    @Override
-    public String toString() {
-        return getClass().getName();
-    }
-
-    /**
-     * Should we also write a BCF file alongside our VCF file for testing
-     *
-     * TODO -- remove me when argument generateShadowBCF is removed
-     *
-     * @return
-     */
-    public boolean alsoWriteBCFForTest() {
-        return engine.getArguments().numberOfDataThreads == 1 && // only works single threaded
-                ! isCompressed() && // for non-compressed outputs
-                getOutputFile() != null && // that are going to disk
-                engine.getArguments().generateShadowBCF; // and we actually want to do it
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/BoundedReadIterator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/BoundedReadIterator.java
deleted file mode 100644
index cb696e5..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/BoundedReadIterator.java
+++ /dev/null
@@ -1,159 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.iterators;
-
-import htsjdk.samtools.MergingSamRecordIterator;
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMRecord;
-
-import java.util.Iterator;
-
-/*
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use,
- * copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the
- * Software is furnished to do so, subject to the following
- * conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
- * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
- * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
- * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
- * OTHER DEALINGS IN THE SOFTWARE.
- */
-
-
-/**
- * @author aaron
- * @version 1.0
- * @date Apr 14, 2009
- * <p/>
- * Class BoundedReadIterator
- * <p/>
- * This class implements a read iterator that is bounded by the number of reads
- * it will produce over the iteration.
- */
-public class BoundedReadIterator implements GATKSAMIterator {
-
-    // the genome loc we're bounding
-    final private long readCount;
-    private long currentCount = 0;
-
-    // the iterator we want to decorate
-    private final GATKSAMIterator iterator;
-
-    // our unmapped read flag
-    private boolean doNotUseThatUnmappedReadPile = false;
-
-    /**
-     * The next read that we've buffered.  Null indicates that there's
-     * nothing in the buffer (not that there isn't a next read).
-     */
-    private SAMRecord record = null;
-
-    /**
-     * constructor
-     * @param iter
-     * @param readCount
-     */
-    public BoundedReadIterator(GATKSAMIterator iter, long readCount) {
-        this.iterator = iter;
-        this.readCount = readCount;
-    }
-
-    public void useUnmappedReads(boolean useThem) {
-        this.doNotUseThatUnmappedReadPile = useThem;
-    }
-
-    public SAMFileHeader getHeader() {
-        // todo: this is bad, we need an iterface out there for samrecords that supports getting the header,
-        // regardless of the merging
-        if (iterator instanceof MergingSamRecordIterator)
-            return ((MergingSamRecordIterator)iterator).getMergedHeader();
-        else
-            return null;
-    }
-
-    /**
-     * Do we have a next? If the iterator has a read and we're not over the read
-     * count, then yes
-     * @return
-     */
-    public boolean hasNext() {
-        if( record != null )
-            return true;
-
-        if (iterator.hasNext() && currentCount < readCount) {
-            record = iterator.next();
-            ++currentCount;
-            if (record.getAlignmentStart() == 0 && doNotUseThatUnmappedReadPile) {
-                return false;
-            }
-            return true;
-        } else {
-            return false;
-        }
-    }
-
-    /**
-     * get the next SAMRecord
-     * @return SAMRecord representing the next read
-     */
-    public SAMRecord next() {
-        SAMRecord cached = record;
-        record = null;
-        return cached;
-    }
-
-    /**
-     * this is unsupported on SAMRecord iterators
-     */
-    public void remove() {
-        throw new UnsupportedOperationException("You cannot use an iterator to remove a SAMRecord");
-    }
-
-    /**
-     * close the iterator
-     */
-    public void close() {
-        iterator.close();
-    }
-
-    public Iterator<SAMRecord> iterator() {
-        return this;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/GATKSAMIterator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/GATKSAMIterator.java
deleted file mode 100644
index 8ca5cfd..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/GATKSAMIterator.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.iterators;
-
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.util.CloseableIterator;
-/**
- *
- * User: aaron
- * Date: May 6, 2009
- * Time: 5:30:41 PM
- *
- * The Broad Institute
- * SOFTWARE COPYRIGHT NOTICE AGREEMENT 
- * This software and its documentation are copyright 2009 by the
- * Broad Institute/Massachusetts Institute of Technology. All rights are reserved.
- *
- * This software is supplied without any warranty or guaranteed support whatsoever. Neither
- * the Broad Institute nor MIT can be responsible for its use, misuse, or functionality.
- *
- */
-
-/**
- * @author aaron
- * @version 1.0
- * @date May 6, 2009
- * <p/>
- * Interface GATKSAMIterator
- * <p/>
- * This is the standard interface for all iterators in the GATK package that iterate over SAMRecords
- */
-public interface GATKSAMIterator extends CloseableIterator<SAMRecord>, Iterable<SAMRecord> {
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/GATKSAMIteratorAdapter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/GATKSAMIteratorAdapter.java
deleted file mode 100644
index 0dc3e62..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/GATKSAMIteratorAdapter.java
+++ /dev/null
@@ -1,136 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.iterators;
-
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.util.CloseableIterator;
-
-import java.util.Iterator;
-
-/**
- *
- * User: aaron
- * Date: May 13, 2009
- * Time: 6:33:15 PM
- *
- * The Broad Institute
- * SOFTWARE COPYRIGHT NOTICE AGREEMENT 
- * This software and its documentation are copyright 2009 by the
- * Broad Institute/Massachusetts Institute of Technology. All rights are reserved.
- *
- * This software is supplied without any warranty or guaranteed support whatsoever. Neither
- * the Broad Institute nor MIT can be responsible for its use, misuse, or functionality.
- *
- */
-
-
-/**
- * @author aaron
- * @version 1.0
- * @date May 13, 2009
- * <p/>
- * Class GATKSAMIteratorAdapter
- * <p/>
- * This class adapts other SAMRecord iterators to the GATKSAMIterator
- */
-public class GATKSAMIteratorAdapter {  
-
-    public static GATKSAMIterator adapt(Iterator<SAMRecord> iter) {
-        return new PrivateStringSAMIterator(iter);
-    }
-
-    public static GATKSAMIterator adapt(CloseableIterator<SAMRecord> iter) {
-        return new PrivateStringSAMCloseableIterator(iter);
-    }
-
-}
-
-
-/**
- * this class wraps iterators<SAMRecord> in a GATKSAMIterator, which means just adding the
- * methods that implement the iterable<> interface and the close() method from CloseableIterator
- */
-class PrivateStringSAMIterator implements GATKSAMIterator {
-    private Iterator<SAMRecord> iter = null;
-
-    PrivateStringSAMIterator(Iterator<SAMRecord> iter) {
-        this.iter = iter;
-    }
-
-    public void close() {
-        // do nothing, we can't close the iterator anyway.
-    }
-
-    public boolean hasNext() {
-        return iter.hasNext();
-    }
-
-    public SAMRecord next() {
-        return iter.next();
-    }
-
-    public void remove() {
-        throw new UnsupportedOperationException("GATKSAMIterator's don't allow remove()ing");
-    }
-
-    public Iterator<SAMRecord> iterator() {
-        return iter;
-    }
-}
-
-
-/**
- * this class wraps closeable iterators<SAMRecord> in a GATKSAMIterator, which means adding the
- * methods that implement the iterable<> interface.
- */
-class PrivateStringSAMCloseableIterator implements GATKSAMIterator {
-    private CloseableIterator<SAMRecord> iter = null;
-
-    PrivateStringSAMCloseableIterator(CloseableIterator<SAMRecord> iter) {
-        this.iter = iter;
-    }
-
-    public void close() {
-        iter.close();
-    }
-
-    public boolean hasNext() {
-        return iter.hasNext();
-    }
-
-    public SAMRecord next() {
-        return iter.next();
-    }
-
-    public void remove() {
-        throw new UnsupportedOperationException("GATKSAMIterator's don't allow remove()ing");
-    }
-
-    public Iterator<SAMRecord> iterator() {
-        return iter;
-    }
-}
-
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/GATKSAMRecordIterator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/GATKSAMRecordIterator.java
deleted file mode 100644
index 6d02acd..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/GATKSAMRecordIterator.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.iterators;
-
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.util.CloseableIterator;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-
-import java.util.Iterator;
-
-/**
- * Temporarily hack to convert SAMRecords to GATKSAMRecords
- *
- * User: depristo
- * Date: 1/11/13
- * Time: 1:19 PM
- */
-public class GATKSAMRecordIterator implements CloseableIterator<GATKSAMRecord>, Iterable<GATKSAMRecord> {
-    final CloseableIterator<SAMRecord> it;
-
-    public GATKSAMRecordIterator(final CloseableIterator<SAMRecord> it) {
-        this.it = it;
-    }
-
-    public GATKSAMRecordIterator(final GATKSAMIterator it) {
-        this.it = it;
-    }
-
-    @Override public boolean hasNext() { return it.hasNext(); }
-    @Override public GATKSAMRecord next() { return (GATKSAMRecord)it.next(); }
-    @Override public void remove() { it.remove(); }
-    @Override public void close() { it.close(); }
-    @Override public Iterator<GATKSAMRecord> iterator() { return this; }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/GenomeLocusIterator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/GenomeLocusIterator.java
deleted file mode 100644
index c76a07e..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/GenomeLocusIterator.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.iterators;
-
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-
-import java.util.Iterator;
-import java.util.NoSuchElementException;
-/**
- * User: hanna
- * Date: May 12, 2009
- * Time: 10:52:47 AM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * Iterates through all of the loci provided in the reference.
- */
-public class GenomeLocusIterator implements Iterator<GenomeLoc> {
-    /**
-     * Builds individual loci.
-     */
-    private GenomeLocParser parser;
-
-    /**
-     * The entire region over which we're iterating.
-     */
-    private GenomeLoc completeLocus;
-
-    /**
-     * The current position in the traversal.
-     */
-    private GenomeLoc currentLocus;
-
-    /**
-     * Creates an iterator that can traverse over the entire
-     * reference specified in the given ShardDataProvider.
-     * @param completeLocus Data provider to use as a backing source.
-     *                 Provider must have a reference (hasReference() == true).
-     */
-    public GenomeLocusIterator( GenomeLocParser parser, GenomeLoc completeLocus ) {
-        this.parser = parser;
-        this.completeLocus = completeLocus;
-        this.currentLocus = parser.createGenomeLoc(completeLocus.getContig(),completeLocus.getStart());
-    }
-
-    /**
-     * Is the iterator still within the locus?
-     * @return True if the iterator has more elements.  False otherwise. 
-     */
-    public boolean hasNext() {
-        return !currentLocus.isPast(completeLocus);    
-    }
-
-    /**
-     * Get the next single-base locus context bounded by the iterator.
-     * @return GenomeLoc representing the next single-base locus context.
-     */
-    public GenomeLoc next() {
-        if( !hasNext() )
-            throw new NoSuchElementException("No elements remaining in bounded reference region.");
-        GenomeLoc toReturn = currentLocus;
-        currentLocus = parser.incPos(currentLocus);
-        return toReturn;
-    }
-
-    public void remove() {
-        throw new UnsupportedOperationException( "ReferenceLocusIterator is read-only" );
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/IterableIterator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/IterableIterator.java
deleted file mode 100644
index bf8b697..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/IterableIterator.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.iterators;
-
-import java.util.Iterator;
-
-public class IterableIterator<T> implements Iterable<T> {
-    private Iterator<T> iter;
-
-    public IterableIterator(Iterator<T> iter) {
-        this.iter = iter;
-    }
-
-    public Iterator<T> iterator() {
-        return iter;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/MalformedBAMErrorReformatingIterator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/MalformedBAMErrorReformatingIterator.java
deleted file mode 100644
index d3a6bdc..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/MalformedBAMErrorReformatingIterator.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.iterators;
-
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.util.CloseableIterator;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import java.io.File;
-import java.util.Iterator;
-
-/**
- * Traps BAM formatting errors in underlying iterator and rethrows meaningful GATK UserExceptions
- */
-public class MalformedBAMErrorReformatingIterator implements CloseableIterator<SAMRecord> {
-    File source;
-    CloseableIterator<SAMRecord> it;
-
-    public MalformedBAMErrorReformatingIterator(final File source, final CloseableIterator<SAMRecord> it) {
-        this.it = it;
-        this.source = source;
-    }
-
-    public boolean hasNext() {
-        try {
-            return this.it.hasNext();
-        } catch ( RuntimeException e ) { // we need to catch RuntimeExceptions here because the Picard code is throwing them (among SAMFormatExceptions) sometimes
-            throw new UserException.MalformedBAM(source, e.getMessage());
-        }
-    }
-
-    public SAMRecord next() {
-        try {
-            return it.next();
-        } catch ( RuntimeException e ) { // we need to catch RuntimeExceptions here because the Picard code is throwing them (among SAMFormatExceptions) sometimes
-            throw new UserException.MalformedBAM(source, e.getMessage());
-        }
-    }
-
-    public void remove() {
-        throw new UnsupportedOperationException("Can not remove records from a SAM file via an iterator!");
-    }
-
-    public void close() { it.close(); }
-    public Iterator<SAMRecord> iterator() { return this; }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/NullSAMIterator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/NullSAMIterator.java
deleted file mode 100644
index fa130f9..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/NullSAMIterator.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.iterators;
-
-import htsjdk.samtools.SAMRecord;
-
-import java.util.Iterator;
-import java.util.NoSuchElementException;
-/**
- * User: hanna
- * Date: May 19, 2009
- * Time: 6:47:16 PM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * A placeholder for an iterator with no data.
- */
-public class NullSAMIterator implements GATKSAMIterator {
-    public NullSAMIterator() {}
-
-    public Iterator<SAMRecord> iterator() { return this; }
-    public void close() { /* NO-OP */ }
-
-    public boolean hasNext() { return false; }
-    public SAMRecord next() { throw new NoSuchElementException("No next element is available."); }
-    public void remove() { throw new UnsupportedOperationException("Cannot remove from a GATKSAMIterator"); }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/PeekingIterator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/PeekingIterator.java
deleted file mode 100644
index f46fb0c..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/PeekingIterator.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.iterators;
-
-
-/*
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use,
- * copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the
- * Software is furnished to do so, subject to the following
- * conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
- * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
- * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
- * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
- * OTHER DEALINGS IN THE SOFTWARE.
- */
-
-/**
- * @author aaron
- *         <p/>
- *         Class PeekingIterator
- *         <p/>
- *         a peekable interface, that requires a peek() method
- */
-public interface PeekingIterator<T> {
-
-    /** @return returns a peeked value */
-    public T peek();
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/PositionTrackingIterator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/PositionTrackingIterator.java
deleted file mode 100644
index 2eba344..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/PositionTrackingIterator.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.iterators;
-
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.util.CloseableIterator;
-
-/**
- * Iterates through a list of elements, tracking the number of elements it has seen.
- * @author hanna
- * @version 0.1
- */
-public class PositionTrackingIterator implements GATKSAMIterator {
-    /**
-     * The iterator being tracked.
-     */
-    private CloseableIterator<SAMRecord> iterator;
-
-    /**
-     * Current position within the tracked iterator.
-     */
-    private long position;
-
-    /**
-     * Retrieves the current position of the iterator.  The 'current position' of the iterator is defined as
-     * the coordinate of the read that will be returned if next() is called.
-     * @return The current position of the iterator.
-     */
-    public long getPosition() {
-        return position;
-    }
-
-    /**
-     * Create a new iterator wrapping the given position, assuming that the reader is <code>position</code> reads
-     * into the sequence.
-     * @param iterator Iterator to wraps.
-     * @param position Non-negative position where the iterator currently sits.
-     */
-    public PositionTrackingIterator(CloseableIterator<SAMRecord> iterator, long position ) {
-        this.iterator = iterator;
-        this.position = position;
-    }
-
-    /**
-     * {@inheritDoc}
-     */
-    public boolean hasNext() {
-        return iterator.hasNext();
-    }
-
-    /**
-     * Try to get the next read in the list.  If a next read is available, increment the position.
-     * @return next read in the list, if available.
-     */
-    public SAMRecord next() {
-        try {
-            return iterator.next();
-        }
-        finally {
-            position++;
-        }
-    }
-
-    /**
-     * {@inheritDoc}
-     */
-    public GATKSAMIterator iterator() {
-        return this;
-    }
-
-    /**
-     * {@inheritDoc}
-     */
-    public void close() {
-        iterator.close();
-    }
-
-    /**
-     * {@inheritDoc}
-     */
-    public void remove() { throw new UnsupportedOperationException("Cannot remove from a GATKSAMIterator"); }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/PushbackIterator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/PushbackIterator.java
deleted file mode 100644
index 0bb545b..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/PushbackIterator.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.iterators;
-
-import java.util.Iterator;
-
-public class PushbackIterator<T> implements Iterator<T>, Iterable<T> {
-    Iterator<T> underlyingIterator;
-    T pushedElement = null;
-
-    public PushbackIterator(final Iterator<T> underlyingIterator) {
-        this.underlyingIterator = underlyingIterator;
-    }
-
-    public boolean hasNext() {
-        return pushedElement != null || underlyingIterator.hasNext();
-    }
-
-    public Iterator<T> iterator() {
-        return this;
-    }
-
-    /**
-     * Retrieves, but does not remove, the head of this iterator.
-     * @return T the next element in the iterator
-     */
-    public T element() {
-        T x = next();
-        pushback(x);
-        return x;
-    }
-
-    /**
-     * @return the next element in the iteration.
-     */
-    public T next() {
-        if (pushedElement != null) {
-            final T ret = pushedElement;
-            pushedElement = null;
-            return ret;
-        } else {
-            return underlyingIterator.next();
-        }
-    }
-
-    public void pushback(T elt) {
-        assert(pushedElement == null);
-        
-        pushedElement = elt;
-    }
-
-    public void remove() {
-        throw new UnsupportedOperationException();
-    }
-
-    public Iterator<T> getUnderlyingIterator() {
-        return underlyingIterator;
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/RNAReadTransformer.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/RNAReadTransformer.java
deleted file mode 100644
index 4cc2a82..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/RNAReadTransformer.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.iterators;
-
-/**
- *
- * A baseclass for RNAseq read transformer
- *
- * @author ami
- * @since 4/28/14.
- */
-public abstract class RNAReadTransformer extends ReadTransformer{
-    public boolean isRNAReadTransformer(){return true;}
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/ReadFormattingIterator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/ReadFormattingIterator.java
deleted file mode 100644
index 4922279..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/ReadFormattingIterator.java
+++ /dev/null
@@ -1,140 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.iterators;
-
-import htsjdk.samtools.SAMRecord;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.utils.sam.AlignmentUtils;
-
-/**
- * An iterator which does post-processing of a read, including potentially wrapping
- * the read in something with a compatible interface or replacing the read entirely.
- *
- * @author mhanna
- * @version 0.1
- */
-public class ReadFormattingIterator implements GATKSAMIterator {
-    /**
-     * Logger.
-     */
-    final protected static Logger logger = Logger.getLogger(ReadFormattingIterator.class);    
-
-    /**
-     * Iterator to which to pass
-     */
-    private GATKSAMIterator wrappedIterator;
-
-    /**
-     * True if original base qualities should be used.
-     */
-    private final boolean useOriginalBaseQualities;
-
-    /**
-      * Positive if there is a default Base Quality value to fill in the reads with.
-      */
-     private final byte defaultBaseQualities;
-
-
-    /**
-     * Decorate the given iterator inside a ReadWrappingIterator.
-     * @param wrappedIterator iterator
-     * @param useOriginalBaseQualities true if original base qualities should be used
-     * @param defaultBaseQualities if the reads have incomplete quality scores, set them all to defaultBaseQuality.  
-     */
-    public ReadFormattingIterator(GATKSAMIterator wrappedIterator, boolean useOriginalBaseQualities, byte defaultBaseQualities) {
-        this.wrappedIterator = wrappedIterator;
-        this.useOriginalBaseQualities = useOriginalBaseQualities;
-        this.defaultBaseQualities = defaultBaseQualities;
-
-    }
-
-    /**
-     * Convenience function for use in foreach loops.  Dangerous because it does not actually
-     * reset the iterator.
-     * @return An iterator through the current data stream.
-     */
-    public GATKSAMIterator iterator() {
-        // NOTE: this iterator doesn't perform any kind of reset operation; it just returns itself.
-        //       can we do something better?  Do we really have to provide support for the Iterable interface?
-        return this;
-    }
-
-    /**
-     * Close this iterator.
-     */
-    public void close() {
-        wrappedIterator.close();
-    }
-
-    /**
-     * Does the iterator contain more values?
-     * @return True if there are more left to return, false otherwise.
-     */
-    public boolean hasNext() {
-        return wrappedIterator.hasNext();
-    }
-
-    /**
-     * Get the next value in the sequence.
-     * @return Next value in the sequence.  By convention, a NoSuchElementException should be thrown if
-     *         no next exists.
-     */
-    public SAMRecord next() {
-        SAMRecord rec = wrappedIterator.next();
-
-        // Always consolidate the cigar string into canonical form, collapsing zero-length / repeated cigar elements.
-        // Downstream code (like LocusIteratorByState) cannot necessarily handle non-consolidated cigar strings.
-        rec.setCigar(AlignmentUtils.consolidateCigar(rec.getCigar()));
-
-        // if we are using default quals, check if we need them, and add if necessary.
-        // 1. we need if reads are lacking or have incomplete quality scores
-        // 2. we add if defaultBaseQualities has a positive value
-        if (defaultBaseQualities >= 0) {
-            byte reads [] = rec.getReadBases();
-            byte quals [] = rec.getBaseQualities();
-            if (quals == null || quals.length < reads.length) {
-                byte new_quals [] = new byte [reads.length];
-                for (int i=0; i<reads.length; i++)
-                    new_quals[i] = defaultBaseQualities;
-                rec.setBaseQualities(new_quals);
-            }
-        }
-
-        // if we are using original quals, set them now if they are present in the record
-        if ( useOriginalBaseQualities ) {
-            byte[] originalQuals = rec.getOriginalBaseQualities();
-            if ( originalQuals != null )
-                rec.setBaseQualities(originalQuals);
-        }
-
-        return rec;
-    }
-
-    /**
-     * Remove the current element from the list.  Unsupported in this wrapper.
-     */
-    public void remove() { throw new UnsupportedOperationException("Cannot remove from a ReadWrappingIterator"); }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/ReadTransformer.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/ReadTransformer.java
deleted file mode 100644
index b7db505..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/ReadTransformer.java
+++ /dev/null
@@ -1,205 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.iterators;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.walkers.Walker;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-
-import java.util.Comparator;
-
-/**
- * Baseclass used to describe a read transformer like BAQ and BQSR
- *
- * Read transformers are plugable infrastructure that modify read state
- * either on input, on output, or within walkers themselves.
- *
- * The function apply() is called on each read seen by the GATK (after passing
- * all ReadFilters) and it can do as it sees fit (without modifying the alignment)
- * to the read to change qualities, add tags, etc.
- *
- * Initialize is called once right before the GATK traversal begins providing
- * the ReadTransformer with the ability to collect and initialize data from the
- * engine.
- *
- * Note that all ReadTransformers within the classpath are created and initialized.  If one
- * shouldn't be run it should look at the command line options of the engine and override
- * the enabled.
- *
- * @since 8/31/12
- * @author depristo
- */
-abstract public class ReadTransformer {
-    /**
-     * When should this read transform be applied?
-     */
-    private ApplicationTime applicationTime;
-
-    /**
-     * Keep track of whether we've been initialized already, and ensure it's not called more than once.
-     */
-    private boolean initialized = false;
-
-    protected ReadTransformer() {}
-
-    /*
-     * @return the ordering constraint for the given read transformer
-     */
-    public OrderingConstraint getOrderingConstraint() { return OrderingConstraint.DO_NOT_CARE; }
-
-    /**
-     * Master initialization routine.  Called to setup a ReadTransform, using it's overloaded initializeSub routine.
-     *
-     * @param overrideTime if not null, we will run this ReadTransform at the time provided, regardless of the timing of this read transformer itself
-     * @param engine the engine, for initializing values
-     * @param walker the walker we intend to run
-     */
-    @Requires({"initialized == false", "engine != null", "walker != null"})
-    @Ensures("initialized == true")
-    public final void initialize(final ApplicationTime overrideTime, final GenomeAnalysisEngine engine, final Walker walker) {
-        if ( engine == null ) throw new IllegalArgumentException("engine cannot be null");
-        if ( walker == null ) throw new IllegalArgumentException("walker cannot be null");
-
-        this.applicationTime = initializeSub(engine, walker);
-        if ( overrideTime != null ) this.applicationTime = overrideTime;
-        initialized = true;
-    }
-
-    /**
-     * Subclasses must override this to initialize themselves
-     *
-     * @param engine the engine, for initializing values
-     * @param walker the walker we intend to run
-     * @return the point of time we'd like this read transform to be run
-     */
-    @Requires({"engine != null", "walker != null"})
-    @Ensures("result != null")
-    protected abstract ApplicationTime initializeSub(final GenomeAnalysisEngine engine, final Walker walker);
-
-    /**
-     * Should this ReadTransformer be activated?  Called after initialize, which allows this
-     * read transformer to look at its arguments and decide if it should be active.  All
-     * ReadTransformers must override this, as by default they are not enabled.
-     *
-     * @return true if this ReadTransformer should be used on the read stream
-     */
-    public boolean enabled() {
-        return false;
-    }
-
-    /**
-     * Has this transformer been initialized?
-     *
-     * @return true if it has
-     */
-    public final boolean isInitialized() {
-        return initialized;
-    }
-
-    /**
-     * When should we apply this read transformer?
-     *
-     * @return true if yes
-     */
-    public final ApplicationTime getApplicationTime() {
-        return applicationTime;
-    }
-
-    /**
-     * Primary interface function for a read transform to actually do some work
-     *
-     * The function apply() is called on each read seen by the GATK (after passing
-     * all ReadFilters) and it can do as it sees fit (without modifying the alignment)
-     * to the read to change qualities, add tags, etc.
-     *
-     * @param read the read to transform
-     * @return the transformed read
-     */
-    @Requires("read != null")
-    @Ensures("result != null")
-    abstract public GATKSAMRecord apply(final GATKSAMRecord read);
-
-    @Override
-    public String toString() {
-        return getClass().getSimpleName();
-    }
-
-    /**
-     * When should a read transformer be applied?
-     */
-    public static enum ApplicationTime {
-        /**
-         * Walker does not tolerate this read transformer
-         */
-        FORBIDDEN,
-
-        /**
-         * apply the transformation to the incoming reads, the default
-         */
-        ON_INPUT,
-
-        /**
-         * apply the transformation to the outgoing read stream
-         */
-        ON_OUTPUT,
-
-        /**
-         * the walker will deal with the calculation itself
-         */
-        HANDLED_IN_WALKER
-    }
-
-    /*
-     * This enum specifies the constraints that the given read transformer has relative to any other read transformers being used
-     */
-    public enum OrderingConstraint {
-        /*
-         * If 2 read transformers are both active and MUST_BE_FIRST, then an error will be generated
-         */
-        MUST_BE_FIRST,
-
-        /*
-         * No constraints on the ordering for this read transformer
-         */
-        DO_NOT_CARE,
-
-        /*
-         * If 2 read transformers are both active and MUST_BE_LAST, then an error will be generated
-         */
-        MUST_BE_LAST
-    }
-
-    public static class ReadTransformerComparator implements Comparator<ReadTransformer> {
-
-        public int compare(final ReadTransformer r1, final ReadTransformer r2) {
-            if ( r1.getOrderingConstraint() == r2.getOrderingConstraint() )
-                return 0;
-            return ( r1.getOrderingConstraint() == OrderingConstraint.MUST_BE_FIRST || r2.getOrderingConstraint() == OrderingConstraint.MUST_BE_LAST ) ? -1 : 1;
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/ReadTransformersMode.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/ReadTransformersMode.java
deleted file mode 100644
index 3fa18c4..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/ReadTransformersMode.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.iterators;
-
-import java.lang.annotation.*;
-
-/**
- * User: hanna
- * Date: May 14, 2009
- * Time: 1:51:22 PM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * Allows the walker to indicate what type of data it wants to consume.
- */
-
- at Documented
- at Inherited
- at Retention(RetentionPolicy.RUNTIME)
- at Target(ElementType.TYPE)
-public @interface ReadTransformersMode {
-    public abstract ReadTransformer.ApplicationTime ApplicationTime() default ReadTransformer.ApplicationTime.ON_INPUT;
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/VerifyingSamIterator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/VerifyingSamIterator.java
deleted file mode 100644
index a34d4de..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/VerifyingSamIterator.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.iterators;
-
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import java.util.Iterator;
-
-/**
- * Verifies that the incoming stream of reads is correctly sorted
- */
-public class VerifyingSamIterator implements GATKSAMIterator {
-    GATKSAMIterator it;
-    SAMRecord last = null;
-    boolean checkOrderP = true;
-
-    public VerifyingSamIterator(GATKSAMIterator it) {
-        this.it = it;
-    }
-
-    public boolean hasNext() { return this.it.hasNext(); }
-    public SAMRecord next() {
-
-        SAMRecord cur = it.next();
-        if ( last != null )
-            verifyRecord(last, cur);
-        if ( ! cur.getReadUnmappedFlag() )
-            last = cur;
-        return cur;
-    }
-
-    private void verifyRecord( final SAMRecord last, final SAMRecord cur ) {
-        if ( checkOrderP && isOutOfOrder(last, cur) ) {
-            this.last = null;
-            throw new UserException.MissortedBAM(String.format("reads are out of order:%nlast:%n%s%ncurrent:%n%s%n", last.format(), cur.format()) );
-        }
-    }
-
-    private boolean isOutOfOrder( final SAMRecord last, final SAMRecord cur ) {
-        if ( last == null || cur.getReadUnmappedFlag() )
-            return false;
-        else {
-            if(last.getReferenceIndex() == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX || last.getAlignmentStart() == SAMRecord.NO_ALIGNMENT_START)
-                throw new UserException.MalformedBAM(last,String.format("read %s has inconsistent mapping information.",last.format()));
-            if(cur.getReferenceIndex() == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX || cur.getAlignmentStart() == SAMRecord.NO_ALIGNMENT_START)
-                throw new UserException.MalformedBAM(last,String.format("read %s has inconsistent mapping information.",cur.format()));
-
-            return (last.getReferenceIndex() > cur.getReferenceIndex()) ||
-                    (last.getReferenceIndex().equals(cur.getReferenceIndex()) &&
-                            last.getAlignmentStart() > cur.getAlignmentStart());
-        }
-    }
-
-    public void remove() {
-        throw new UnsupportedOperationException("Can not remove records from a SAM file via an iterator!");
-    }
-
-    public void close() {
-        it.close();
-    }
-
-    public Iterator<SAMRecord> iterator() {
-        return this;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/package-info.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/package-info.java
deleted file mode 100644
index 3387890..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/iterators/package-info.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.iterators;
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/phonehome/GATKRunReport.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/phonehome/GATKRunReport.java
deleted file mode 100644
index f8126b9..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/phonehome/GATKRunReport.java
+++ /dev/null
@@ -1,786 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.phonehome;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.walkers.Walker;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.crypt.CryptUtils;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.io.IOUtils;
-import org.broadinstitute.gatk.utils.io.Resource;
-import org.broadinstitute.gatk.utils.threading.ThreadEfficiencyMonitor;
-import org.jets3t.service.S3Service;
-import org.jets3t.service.S3ServiceException;
-import org.jets3t.service.impl.rest.httpclient.RestS3Service;
-import org.jets3t.service.model.S3Object;
-import org.jets3t.service.security.AWSCredentials;
-import org.simpleframework.xml.Element;
-import org.simpleframework.xml.Serializer;
-import org.simpleframework.xml.core.Persister;
-
-import java.io.*;
-import java.security.NoSuchAlgorithmException;
-import java.security.PublicKey;
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
-import java.util.Arrays;
-import java.util.Date;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.zip.GZIPInputStream;
-import java.util.zip.GZIPOutputStream;
-
-
-/**
- * A detailed description of a GATK run, and error if applicable.  Simply create a GATKRunReport
- * with the constructor, providing the walker that was run and the fully instantiated GenomeAnalysisEngine
- * <b>after the run finishes</b> and the GATKRunReport will collect all of the report information
- * into this object.  Call postReport to write out the report, as an XML document, to either STDOUT,
- * a file (in which case the output is gzipped), or with no arguments the report will be posted to the
- * GATK run report database.
- *
- * @author depristo
- * @since 2010
- */
-public class GATKRunReport {
-    protected static final String REPORT_BUCKET_NAME = "broad.gsa.gatk.run.reports";
-    protected static final String TEST_REPORT_BUCKET_NAME = "broad.gsa.gatk.run.reports.test";
-    protected final static String AWS_ACCESS_KEY_MD5 = "34d4a26eb2062b3f06e833b28f9a38c6";
-    protected final static String AWS_SECRET_KEY_MD5 = "83f2332eec99ef1d7425d5dc5d4b514a";
-
-    private static final DateFormat DATE_FORMAT = new SimpleDateFormat("yyyy/MM/dd HH.mm.ss");
-
-    /**
-     * our log
-     */
-    protected static final Logger logger = Logger.getLogger(GATKRunReport.class);
-
-    /**
-     * Default value for the number of milliseconds before an S3 put operation is timed-out.
-     * Can be overridden via a constructor argument.
-     */
-    private static final long S3_DEFAULT_PUT_TIME_OUT_IN_MILLISECONDS = 30 * 1000;
-
-    /**
-     * Number of milliseconds before an S3 put operation is timed-out.
-     */
-    private long s3PutTimeOutInMilliseconds = S3_DEFAULT_PUT_TIME_OUT_IN_MILLISECONDS;
-
-    // -----------------------------------------------------------------
-    // elements captured for the report
-    // -----------------------------------------------------------------
-
-    @Element(required = false, name = "id")
-    private String id;
-
-    @Element(required = false, name = "exception")
-    private GATKRunReportException mException;
-
-    @Element(required = true, name = "start-time")
-    private String startTime = "ND";
-
-    @Element(required = true, name = "end-time")
-    private String endTime;
-
-    @Element(required = true, name = "run-time")
-    private long runTime = 0;
-
-    @Element(required = true, name = "walker-name")
-    private String walkerName;
-
-    @Element(required = true, name = "svn-version")
-    private String svnVersion;
-
-    @Element(required = true, name = "total-memory")
-    private long totalMemory;
-
-    @Element(required = true, name = "max-memory")
-    private long maxMemory;
-
-    @Element(required = true, name = "user-name")
-    private String userName;
-
-    @Element(required = true, name = "host-name")
-    private String hostName;
-
-    @Element(required = true, name = "java")
-    private String javaVersion;
-
-    @Element(required = true, name = "machine")
-    private String machine;
-
-    @Element(required = true, name = "iterations")
-    private long nIterations;
-
-    @Element(required = true, name = "tag")
-    private String tag;
-
-    @Element(required = true, name = "num-threads")
-    private int numThreads;
-    @Element(required = true, name = "percent-time-running")
-    private String percentTimeRunning;
-    @Element(required = true, name = "percent-time-waiting")
-    private String percentTimeWaiting;
-    @Element(required = true, name = "percent-time-blocking")
-    private String percentTimeBlocking;
-    @Element(required = true, name = "percent-time-waiting-for-io")
-    private String percentTimeWaitingForIO;
-
-    /** The error message, if one occurred, or null if none did */
-    public String errorMessage = null;
-    /** The error that occurred, if one did, or null if none did */
-    public Throwable errorThrown = null;
-
-    /**
-     * How should the GATK report its usage?
-     */
-    public enum PhoneHomeOption {
-        /** Disable phone home */
-        NO_ET,
-        /** Forces the report to go to S3 */
-        AWS,
-        /** Force output to STDOUT.  For debugging only */
-        STDOUT
-    }
-
-    /**
-     * To allow us to deserial reports from XML
-     */
-    private GATKRunReport() { }
-
-    /**
-     * Read a GATKRunReport from the serialized XML representation in String reportAsXML
-     * @param stream an input stream containing a serialized XML report
-     * @return a reconstituted GATKRunReport from reportAsXML
-     * @throws Exception if parsing fails for any reason
-     */
-    @Ensures("result != null")
-    protected static GATKRunReport deserializeReport(final InputStream stream) throws Exception {
-        final Serializer serializer = new Persister();
-        return serializer.read(GATKRunReport.class, stream);
-    }
-
-    /**
-     * Create a new GATKRunReport from a report on S3
-     *
-     * Assumes that s3Object has already been written to S3, and this function merely
-     * fetches it from S3 and deserializes it.  The access keys must have permission to
-     * GetObject from S3.
-     *
-     * @param downloaderAccessKey AWS access key with permission to GetObject from bucketName
-     * @param downloaderSecretKey AWS secret key with permission to GetObject from bucketName
-     * @param bucketName the name of the bucket holding the report
-     * @param s3Object the s3Object we wrote to S3 in bucketName that we want to get back and decode
-     * @return a deserialized report derived from s3://bucketName/s3Object.getName()
-     * @throws Exception
-     */
-    @Ensures("result != null")
-    protected static GATKRunReport deserializeReport(final String downloaderAccessKey,
-                                                     final String downloaderSecretKey,
-                                                     final String bucketName,
-                                                     final S3Object s3Object) throws Exception {
-        final S3Service s3Service = initializeAWSService(downloaderAccessKey, downloaderSecretKey);
-
-        // Retrieve the whole data object we created previously
-        final S3Object objectComplete = s3Service.getObject(bucketName, s3Object.getName());
-
-        // Read the data from the object's DataInputStream using a loop, and print it out.
-        return deserializeReport(new GZIPInputStream(objectComplete.getDataInputStream()));
-    }
-
-    /**
-     * Create a new RunReport and population all of the fields with values from the walker and engine.
-     * Allows the S3 put timeout to be explicitly set.
-     *
-     * @param walker the GATK walker that we ran
-     * @param e the exception caused by running this walker, or null if we completed successfully
-     * @param engine the GAE we used to run the walker, so we can fetch runtime, args, etc
-     * @param type the GATK phone home setting
-     * @param s3PutTimeOutInMilliseconds number of milliseconds to wait before timing out an S3 put operation
-     */
-    public GATKRunReport(final Walker<?,?> walker, final Exception e, final GenomeAnalysisEngine engine, final PhoneHomeOption type,
-                         final long s3PutTimeOutInMilliseconds) {
-        this(walker, e, engine, type);
-        this.s3PutTimeOutInMilliseconds = s3PutTimeOutInMilliseconds;
-    }
-
-    /**
-     * Create a new RunReport and population all of the fields with values from the walker and engine.
-     * Leaves the S3 put timeout set to the default value of S3_DEFAULT_PUT_TIME_OUT_IN_MILLISECONDS.
-     *
-     * @param walker the GATK walker that we ran
-     * @param e the exception caused by running this walker, or null if we completed successfully
-     * @param engine the GAE we used to run the walker, so we can fetch runtime, args, etc
-     * @param type the GATK phone home setting
-     */
-    public GATKRunReport(final Walker<?,?> walker, final Exception e, final GenomeAnalysisEngine engine, final PhoneHomeOption type) {
-        if ( type == PhoneHomeOption.NO_ET )
-            throw new ReviewedGATKException("Trying to create a run report when type is NO_ET!");
-
-        logger.debug("Aggregating data for run report");
-
-        // what did we run?
-        id = org.apache.commons.lang.RandomStringUtils.randomAlphanumeric(32);
-        walkerName = engine.getWalkerName(walker.getClass());
-        svnVersion = CommandLineGATK.getVersionNumber();
-
-        // runtime performance metrics
-        Date end = new java.util.Date();
-        endTime = DATE_FORMAT.format(end);
-        if ( engine.getStartTime() != null ) { // made it this far during initialization
-            startTime = DATE_FORMAT.format(engine.getStartTime());
-            runTime = (end.getTime() - engine.getStartTime().getTime()) / 1000L; // difference in seconds
-        }
-
-        // deal with memory usage
-        Runtime.getRuntime().gc(); // call GC so totalMemory is ~ used memory
-        maxMemory = Runtime.getRuntime().maxMemory();
-        totalMemory = Runtime.getRuntime().totalMemory();
-
-        // we can only do some operations if an error hasn't occurred
-        if ( engine.getCumulativeMetrics() != null ) {
-            // it's possible we aborted so early that these data structures arent initialized
-            nIterations = engine.getCumulativeMetrics().getNumIterations();
-        }
-
-        tag = engine.getArguments().tag;
-
-        // user and hostname -- information about the runner of the GATK
-        userName = System.getProperty("user.name");
-        hostName = Utils.resolveHostname();
-
-        // basic java information
-        javaVersion = Utils.join("-", Arrays.asList(System.getProperty("java.vendor"), System.getProperty("java.version")));
-        machine = Utils.join("-", Arrays.asList(System.getProperty("os.name"), System.getProperty("os.arch")));
-
-        // if there was an exception, capture it
-        this.mException = e == null ? null : new GATKRunReportException(e);
-
-        numThreads = engine.getTotalNumberOfThreads();
-        percentTimeRunning = getThreadEfficiencyPercent(engine, ThreadEfficiencyMonitor.State.USER_CPU);
-        percentTimeBlocking = getThreadEfficiencyPercent(engine, ThreadEfficiencyMonitor.State.BLOCKING);
-        percentTimeWaiting = getThreadEfficiencyPercent(engine, ThreadEfficiencyMonitor.State.WAITING);
-        percentTimeWaitingForIO = getThreadEfficiencyPercent(engine, ThreadEfficiencyMonitor.State.WAITING_FOR_IO);
-    }
-
-    /**
-     * Get the random alpha-numeric ID of this GATKRunReport
-     * @return a non-null string ID
-     */
-    @Ensures("result != null")
-    public String getID() {
-        return id;
-    }
-
-    /**
-     * Return a string representing the percent of time the GATK spent in state, if possible.  Otherwise return NA
-     *
-     * @param engine the GATK engine whose threading efficiency info we will use
-     * @param state the state whose occupancy we wish to know
-     * @return a string representation of the percent occupancy of state, or NA is not possible
-     */
-    @Requires({"engine != null", "state != null"})
-    @Ensures("result != null")
-    private String getThreadEfficiencyPercent(final GenomeAnalysisEngine engine, final ThreadEfficiencyMonitor.State state) {
-        final ThreadEfficiencyMonitor tem = engine.getThreadEfficiencyMonitor();
-        return tem == null ? "NA" : String.format("%.2f", tem.getStatePercent(state));
-    }
-
-    /**
-     * Get a filename (no path) appropriate for this report
-     *
-     * @return a non-null string filename
-     */
-    @Ensures("result != null")
-    protected String getReportFileName() {
-        return getID() + ".report.xml.gz";
-    }
-
-    // ---------------------------------------------------------------------------
-    //
-    // Main public interface method for posting reports
-    //
-    // ---------------------------------------------------------------------------
-
-    /**
-     * Post this GATK report to the destination implied by the PhoneHomeOption type
-     *
-     * Guaranteed to never throw an exception (exception noted below) and to return
-     * with a reasonable (~10 seconds) time regardless of successful writing of the report.
-     *
-     * @throws IllegalArgumentException if type == null
-     * @param type the type of phoning home we want to do
-     * @return true if a report was successfully written, false otherwise
-     */
-    public boolean postReport(final PhoneHomeOption type) {
-        if ( type == null ) throw new IllegalArgumentException("type cannot be null");
-
-        logger.debug("Posting report of type " + type);
-        switch (type) {
-            case NO_ET: // don't do anything
-                return false;
-            case AWS:
-                wentToAWS = true;
-                return postReportToAWSS3() != null;
-            case STDOUT:
-                return postReportToStream(System.out);
-            default:
-                exceptDuringRunReport("BUG: unexpected PhoneHomeOption ");
-                return false;
-        }
-    }
-
-    // ---------------------------------------------------------------------------
-    //
-    // Code for sending reports to local files
-    //
-    // ---------------------------------------------------------------------------
-
-    /**
-     * Write an XML representation of this report to the stream, throwing a GATKException if the marshalling
-     * fails for any reason.
-     *
-     * @param stream an output stream to write the report to
-     */
-    @Requires("stream != null")
-    protected boolean postReportToStream(final OutputStream stream) {
-        final Serializer serializer = new Persister();
-        try {
-            serializer.write(this, stream);
-            return true;
-        } catch (Exception e) {
-            return false;
-        }
-    }
-
-    // ---------------------------------------------------------------------------
-    //
-    // Code for sending reports to s3
-    //
-    // ---------------------------------------------------------------------------
-
-    /**
-     * Get the name of the S3 bucket where we should upload this report
-     *
-     * @return the string name of the s3 bucket
-     */
-    @Ensures("result != null")
-    protected String getS3ReportBucket() {
-        return s3ReportBucket;
-    }
-
-    /**
-     * Decrypts encrypted AWS key from encryptedKeySource
-     * @param encryptedKeySource a file containing an encrypted AWS key
-     * @return a decrypted AWS key as a String
-     */
-    @Ensures("result != null")
-    public static String decryptAWSKey(final File encryptedKeySource) throws FileNotFoundException {
-        if ( encryptedKeySource == null ) throw new IllegalArgumentException("encryptedKeySource cannot be null");
-        return decryptAWSKey(new FileInputStream(encryptedKeySource));
-    }
-
-    /**
-     * @see #decryptAWSKey(java.io.File) but with input from an inputstream
-     */
-    @Requires("encryptedKeySource != null")
-    @Ensures("result != null")
-    private static String decryptAWSKey(final InputStream encryptedKeySource) {
-        final PublicKey key = CryptUtils.loadGATKDistributedPublicKey();
-        final byte[] fromDisk = IOUtils.readStreamIntoByteArray(encryptedKeySource);
-        final byte[] decrypted = CryptUtils.decryptData(fromDisk, key);
-        return new String(decrypted);
-    }
-
-    /**
-     * Get the decrypted AWS key sorted in the resource directories of name
-     * @param name the name of the file containing the needed AWS key
-     * @return a non-null GATK
-     */
-    @Requires("name != null")
-    @Ensures("result != null")
-    private static String getAWSKey(final String name) {
-        final Resource resource = new Resource(name, GATKRunReport.class);
-        return decryptAWSKey(resource.getResourceContentsAsStream());
-    }
-
-    /**
-     * Get the AWS access key for the GATK user
-     * @return a non-null AWS access key for the GATK user
-     */
-    @Ensures("result != null")
-    protected static String getAWSUploadAccessKey() {
-        return getAWSKey("resources/GATK_AWS_access.key");
-    }
-
-    /**
-     * Get the AWS secret key for the GATK user
-     * @return a non-null AWS secret key for the GATK user
-     */
-    @Ensures("result != null")
-    protected static String getAWSUploadSecretKey() {
-        return getAWSKey("resources/GATK_AWS_secret.key");
-    }
-
-    /**
-     * Check that the AWS keys can be decrypted and are what we expect them to be
-     *
-     * @throws ReviewedGATKException if anything goes wrong
-     */
-    public static void checkAWSAreValid() {
-        try {
-            final String accessKeyMD5 = Utils.calcMD5(getAWSUploadAccessKey());
-            final String secretKeyMD5 = Utils.calcMD5(getAWSUploadSecretKey());
-
-            if ( ! AWS_ACCESS_KEY_MD5.equals(accessKeyMD5) ) {
-                throw new ReviewedGATKException("Invalid AWS access key found, expected MD5 " + AWS_ACCESS_KEY_MD5 + " but got " + accessKeyMD5);
-            }
-            if ( ! AWS_SECRET_KEY_MD5.equals(secretKeyMD5) ) {
-                throw new ReviewedGATKException("Invalid AWS secret key found, expected MD5 " + AWS_SECRET_KEY_MD5 + " but got " + secretKeyMD5);
-            }
-
-        } catch ( Exception e ) {
-            throw new ReviewedGATKException("Couldn't decrypt AWS keys, something is wrong with the GATK distribution");
-        }
-    }
-
-    /**
-     * Get an initialized S3Service for use in communicating with AWS/s3
-     *
-     * @param awsAccessKey our AWS access key to use
-     * @param awsSecretKey our AWS secret key to use
-     * @return an initialized S3Service object that can be immediately used to interact with S3
-     * @throws S3ServiceException
-     */
-    @Requires({"awsAccessKey != null", "awsSecretKey != null"})
-    @Ensures("result != null")
-    protected static S3Service initializeAWSService(final String awsAccessKey, final String awsSecretKey) throws S3ServiceException {
-        // To communicate with S3, create a class that implements an S3Service. We will use the REST/HTTP
-        // implementation based on HttpClient, as this is the most robust implementation provided with JetS3t.
-        final AWSCredentials awsCredentials = new AWSCredentials(awsAccessKey, awsSecretKey);
-        return new RestS3Service(awsCredentials);
-    }
-
-    /**
-     * A runnable that pushes this GATKReport up to s3.
-     *
-     * Should be run in a separate thread so we can time it out if something is taking too long
-     */
-    private class S3PutRunnable implements Runnable {
-        /** Was the upload operation successful? */
-        public final AtomicBoolean isSuccess;
-        /** The name of this report */
-        private final String filename;
-        /** The contents of this report */
-        private final byte[] contents;
-
-        /** The s3Object that we created to upload, or null if it failed */
-        public S3Object s3Object = null;
-
-        @Requires({"filename != null", "contents != null"})
-        public S3PutRunnable(final String filename, final byte[] contents){
-            this.isSuccess = new AtomicBoolean();
-            this.filename = filename;
-            this.contents = contents;
-        }
-
-        public void run() {
-            try {
-                switch ( awsMode ) {
-                    case FAIL_WITH_EXCEPTION:
-                        throw new IllegalStateException("We are throwing an exception for testing purposes");
-                    case TIMEOUT:
-                        try {
-                            Thread.sleep(s3PutTimeOutInMilliseconds * 100);
-                        } catch ( InterruptedException e ) {
-                            // supposed to be empty
-                        }
-                        break;
-                    case NORMAL:
-                        // IAM GATK user credentials -- only right is to PutObject into broad.gsa.gatk.run.reports bucket
-                        final S3Service s3Service = initializeAWSService(getAWSUploadAccessKey(), getAWSUploadSecretKey());
-
-                        // Create an S3Object based on a file, with Content-Length set automatically and
-                        // Content-Type set based on the file's extension (using the Mimetypes utility class)
-                        final S3Object fileObject = new S3Object(filename, contents);
-                        //logger.info("Created S3Object" + fileObject);
-                        //logger.info("Uploading " + localFile + " to AWS bucket");
-                        s3Object = s3Service.putObject(getS3ReportBucket(), fileObject);
-                        isSuccess.set(true);
-                        break;
-                    default:
-                        throw new IllegalStateException("Unexpected AWS exception");
-                }
-            } catch ( S3ServiceException e ) {
-                exceptDuringRunReport("S3 exception occurred", e);
-            } catch ( NoSuchAlgorithmException e ) {
-                exceptDuringRunReport("Couldn't calculate MD5", e);
-            } catch ( IOException e ) {
-                exceptDuringRunReport("Couldn't read report file", e);
-            } catch ( Exception e ) {
-                exceptDuringRunReport("An unexpected exception occurred during posting", e);
-            }
-        }
-    }
-
-    /**
-     * Post this GATK report to the AWS s3 GATK_Run_Report log
-     *
-     * @return the s3Object pointing to our pushed report, or null if we failed to push
-     */
-    protected S3Object postReportToAWSS3() {
-        // modifying example code from http://jets3t.s3.amazonaws.com/toolkit/code-samples.html
-        this.hostName = Utils.resolveHostname(); // we want to fill in the host name
-        final String key = getReportFileName();
-        logger.debug("Generating GATK report to AWS S3 with key " + key);
-
-        try {
-            // create an byte output stream so we can capture the output as a byte[]
-            final ByteArrayOutputStream byteStream = new ByteArrayOutputStream(8096);
-            final OutputStream outputStream = new GZIPOutputStream(byteStream);
-            postReportToStream(outputStream);
-            outputStream.close();
-            final byte[] report = byteStream.toByteArray();
-
-            // stop us from printing the annoying, and meaningless, mime types warning
-            final Logger mimeTypeLogger = Logger.getLogger(org.jets3t.service.utils.Mimetypes.class);
-            mimeTypeLogger.setLevel(Level.FATAL);
-
-            // Set the S3 upload on its own thread with timeout:
-            final S3PutRunnable s3run = new S3PutRunnable(key,report);
-            final Thread s3thread = new Thread(s3run);
-            s3thread.setDaemon(true);
-            s3thread.setName("S3Put-Thread");
-            s3thread.start();
-
-            s3thread.join(s3PutTimeOutInMilliseconds);
-
-            if(s3thread.isAlive()){
-                s3thread.interrupt();
-                exceptDuringRunReport("Run statistics report upload to AWS S3 timed-out");
-            } else if(s3run.isSuccess.get()) {
-                logger.info("Uploaded run statistics report to AWS S3");
-                logger.debug("Uploaded to AWS: " + s3run.s3Object);
-                return s3run.s3Object;
-            } else {
-                // an exception occurred, the thread should have already invoked the exceptDuringRunReport function
-            }
-        } catch ( IOException e ) {
-            exceptDuringRunReport("Couldn't read report file", e);
-        } catch ( InterruptedException e) {
-            exceptDuringRunReport("Run statistics report upload interrupted", e);
-        }
-
-        return null;
-    }
-
-    // ---------------------------------------------------------------------------
-    //
-    // Error handling code
-    //
-    // ---------------------------------------------------------------------------
-
-    /**
-     * Note that an exception occurred during creating or writing this report
-     * @param msg the message to print
-     * @param e the exception that occurred
-     */
-    @Ensures("exceptionOccurredDuringPost()")
-    private void exceptDuringRunReport(final String msg, final Throwable e) {
-        this.errorMessage = msg;
-        this.errorThrown = e;
-        logger.debug("A problem occurred during GATK run reporting [*** everything is fine, but no report could be generated; please do not post this to the support forum ***].  Message is: " + msg + ".  Error message is: " + e.getMessage());
-    }
-
-    /**
-     * Note that an exception occurred during creating or writing this report
-     * @param msg the message to print
-     */
-    @Ensures("exceptionOccurredDuringPost()")
-    private void exceptDuringRunReport(final String msg) {
-        this.errorMessage = msg;
-        logger.debug("A problem occurred during GATK run reporting [*** everything is fine, but no report could be generated; please do not post this to the support forum ***].  Message is " + msg);
-    }
-
-    /**
-     * Did an error occur during the posting of this run report?
-     * @return true if so, false if not
-     */
-    public boolean exceptionOccurredDuringPost() {
-        return getErrorMessage() != null;
-    }
-
-    /**
-     * If an error occurred during posting of this report, retrieve the message of the error that occurred, or null if
-     * no error occurred
-     * @return a string describing the error that occurred, or null if none did
-     */
-    public String getErrorMessage() {
-        return errorMessage;
-    }
-
-    /**
-     * Get the throwable that caused the exception during posting of this message, or null if none was available
-     *
-     * Note that getting a null valuable from this function doesn't not imply that no error occurred.  Some
-     * errors that occurred many not have generated a throwable.
-     *
-     * @return the Throwable that caused the error, or null if no error occurred or was not caused by a throwable
-     */
-    public Throwable getErrorThrown() {
-        return errorThrown;
-    }
-
-    /**
-     * Helper method to format the exception that occurred during posting, or a string saying none occurred
-     * @return a non-null string
-     */
-    @Ensures("result != null")
-    protected String formatError() {
-        return exceptionOccurredDuringPost()
-                ? String.format("Exception message=%s with cause=%s", getErrorMessage(), getErrorThrown())
-                : "No exception occurred";
-    }
-
-    // ---------------------------------------------------------------------------
-    //
-    // Equals and hashcode -- purely for comparing reports for testing
-    //
-    // ---------------------------------------------------------------------------
-
-    @Override
-    public boolean equals(Object o) {
-        if (this == o) return true;
-        if (o == null || getClass() != o.getClass()) return false;
-
-        GATKRunReport that = (GATKRunReport) o;
-
-        if (maxMemory != that.maxMemory) return false;
-        if (nIterations != that.nIterations) return false;
-        if (numThreads != that.numThreads) return false;
-        if (runTime != that.runTime) return false;
-        if (totalMemory != that.totalMemory) return false;
-        if (endTime != null ? !endTime.equals(that.endTime) : that.endTime != null) return false;
-        if (hostName != null ? !hostName.equals(that.hostName) : that.hostName != null) return false;
-        if (id != null ? !id.equals(that.id) : that.id != null) return false;
-        if (javaVersion != null ? !javaVersion.equals(that.javaVersion) : that.javaVersion != null) return false;
-        if (mException != null ? !mException.equals(that.mException) : that.mException != null) return false;
-        if (machine != null ? !machine.equals(that.machine) : that.machine != null) return false;
-        if (percentTimeBlocking != null ? !percentTimeBlocking.equals(that.percentTimeBlocking) : that.percentTimeBlocking != null)
-            return false;
-        if (percentTimeRunning != null ? !percentTimeRunning.equals(that.percentTimeRunning) : that.percentTimeRunning != null)
-            return false;
-        if (percentTimeWaiting != null ? !percentTimeWaiting.equals(that.percentTimeWaiting) : that.percentTimeWaiting != null)
-            return false;
-        if (percentTimeWaitingForIO != null ? !percentTimeWaitingForIO.equals(that.percentTimeWaitingForIO) : that.percentTimeWaitingForIO != null)
-            return false;
-        if (startTime != null ? !startTime.equals(that.startTime) : that.startTime != null) return false;
-        if (svnVersion != null ? !svnVersion.equals(that.svnVersion) : that.svnVersion != null) return false;
-        if (tag != null ? !tag.equals(that.tag) : that.tag != null) return false;
-        if (userName != null ? !userName.equals(that.userName) : that.userName != null) return false;
-        if (walkerName != null ? !walkerName.equals(that.walkerName) : that.walkerName != null) return false;
-
-        return true;
-    }
-
-    @Override
-    public int hashCode() {
-        int result = id != null ? id.hashCode() : 0;
-        result = 31 * result + (mException != null ? mException.hashCode() : 0);
-        result = 31 * result + (startTime != null ? startTime.hashCode() : 0);
-        result = 31 * result + (endTime != null ? endTime.hashCode() : 0);
-        result = 31 * result + (int) (runTime ^ (runTime >>> 32));
-        result = 31 * result + (walkerName != null ? walkerName.hashCode() : 0);
-        result = 31 * result + (svnVersion != null ? svnVersion.hashCode() : 0);
-        result = 31 * result + (int) (totalMemory ^ (totalMemory >>> 32));
-        result = 31 * result + (int) (maxMemory ^ (maxMemory >>> 32));
-        result = 31 * result + (userName != null ? userName.hashCode() : 0);
-        result = 31 * result + (hostName != null ? hostName.hashCode() : 0);
-        result = 31 * result + (javaVersion != null ? javaVersion.hashCode() : 0);
-        result = 31 * result + (machine != null ? machine.hashCode() : 0);
-        result = 31 * result + (int) (nIterations ^ (nIterations >>> 32));
-        result = 31 * result + (tag != null ? tag.hashCode() : 0);
-        result = 31 * result + numThreads;
-        result = 31 * result + (percentTimeRunning != null ? percentTimeRunning.hashCode() : 0);
-        result = 31 * result + (percentTimeWaiting != null ? percentTimeWaiting.hashCode() : 0);
-        result = 31 * result + (percentTimeBlocking != null ? percentTimeBlocking.hashCode() : 0);
-        result = 31 * result + (percentTimeWaitingForIO != null ? percentTimeWaitingForIO.hashCode() : 0);
-        return result;
-    }
-
-    // ---------------------------------------------------------------------------
-    //
-    // Code specifically for testing the GATKRunReport
-    //
-    // ---------------------------------------------------------------------------
-
-    /**
-     * Enum specifying how the S3 uploader should behave.  Must be normal by default.  Purely for testing purposes
-     */
-    protected enum AWSMode {
-        NORMAL, // write normally to AWS
-        FAIL_WITH_EXCEPTION, // artificially fail during writing
-        TIMEOUT // sleep, so we time out
-    }
-    /** Our AWS mode */
-    private AWSMode awsMode = AWSMode.NORMAL;
-    /** The bucket were we send the GATK report on AWS/s3 */
-    private String s3ReportBucket = REPORT_BUCKET_NAME;
-    /** Did we send the report to AWS? */
-    private boolean wentToAWS = false;
-
-    /**
-     * Send the report to the AWS test bucket -- for testing only
-     */
-    protected void sendAWSToTestBucket() {
-        s3ReportBucket = TEST_REPORT_BUCKET_NAME;
-    }
-
-    /**
-     * Has the report been written to AWS?
-     *
-     * Does not imply anything about the success of the send, just that it was attempted
-     *
-     * @return true if the report has been sent to AWS, false otherwise
-     */
-    protected boolean wentToAWS() {
-        return wentToAWS;
-    }
-
-    /**
-     * Purely for testing purposes.  Tells the AWS uploader whether to actually upload or simulate errors
-     * @param mode what we want to do
-     */
-    @Requires("mode != null")
-    protected void setAwsMode(final AWSMode mode) {
-        this.awsMode = mode;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/phonehome/GATKRunReportException.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/phonehome/GATKRunReportException.java
deleted file mode 100644
index 4de344e..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/phonehome/GATKRunReportException.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.phonehome;
-
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.simpleframework.xml.Element;
-import org.simpleframework.xml.ElementList;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * A helper class for formatting in XML the throwable chain starting at e.
- */
-class GATKRunReportException {
-    @Element(required = false, name = "message")
-    String message = null;
-
-    @ElementList(required = false, name = "stacktrace")
-    final List<String> stackTrace = new ArrayList<String>();
-
-    @Element(required = false, name = "cause")
-    GATKRunReportException cause = null;
-
-    @Element(required = false, name = "is-user-exception")
-    Boolean isUserException;
-
-    @Element(required = false, name = "exception-class")
-    Class exceptionClass;
-
-    /**
-     * Allow us to deserialize from XML
-     */
-    public GATKRunReportException() { }
-
-    public GATKRunReportException(Throwable e) {
-        message = e.getMessage();
-        exceptionClass = e.getClass();
-        isUserException = e instanceof UserException;
-        for (StackTraceElement element : e.getStackTrace()) {
-            stackTrace.add(element.toString());
-        }
-
-        if ( e.getCause() != null ) {
-            cause = new GATKRunReportException(e.getCause());
-        }
-    }
-
-    @Override
-    public boolean equals(Object o) {
-        if (this == o) return true;
-        if (o == null || getClass() != o.getClass()) return false;
-
-        GATKRunReportException that = (GATKRunReportException) o;
-
-        if (cause != null ? !cause.equals(that.cause) : that.cause != null) return false;
-        if (exceptionClass != null ? !exceptionClass.equals(that.exceptionClass) : that.exceptionClass != null)
-            return false;
-        if (isUserException != null ? !isUserException.equals(that.isUserException) : that.isUserException != null)
-            return false;
-        if (message != null ? !message.equals(that.message) : that.message != null) return false;
-        if (stackTrace != null ? !stackTrace.equals(that.stackTrace) : that.stackTrace != null) return false;
-
-        return true;
-    }
-
-    @Override
-    public int hashCode() {
-        int result = message != null ? message.hashCode() : 0;
-        result = 31 * result + (stackTrace != null ? stackTrace.hashCode() : 0);
-        result = 31 * result + (cause != null ? cause.hashCode() : 0);
-        result = 31 * result + (isUserException != null ? isUserException.hashCode() : 0);
-        result = 31 * result + (exceptionClass != null ? exceptionClass.hashCode() : 0);
-        return result;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/RODRecordListImpl.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/RODRecordListImpl.java
deleted file mode 100644
index 7296c39..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/RODRecordListImpl.java
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.refdata;
-
-import org.broadinstitute.gatk.engine.refdata.utils.GATKFeature;
-import org.broadinstitute.gatk.engine.refdata.utils.RODRecordList;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.HasGenomeLocation;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.util.*;
-
-/**
- * Created by IntelliJ IDEA.
- * User: asivache
- * Date: Sep 10, 2009
- * Time: 6:10:48 PM
- * To change this template use File | Settings | File Templates.
- */
-public class RODRecordListImpl extends AbstractList<GATKFeature> implements Comparable<RODRecordList>, Cloneable, RODRecordList, HasGenomeLocation {
-    private List<GATKFeature> records;
-    private GenomeLoc location = null;
-    private String name = null;
-
-    public RODRecordListImpl(String name) {
-        records = new ArrayList<GATKFeature>();
-        this.name = name;
-    }
-
-    /**
-     * Fully qualified constructor: instantiates a new GATKFeatureRecordList object with specified GATKFeature track name, location on the
-     * reference, and list of associated GATKFeatures. This is a knee-deep COPY constructor: passed name, loc, and data element
-     * objects will be referenced from the created GATKFeatureRecordList (so that changing them from outside will affect data
-     * in this object), however, the data elements will be copied into a newly
-     * allocated list, so that the 'data' collection argument can be modified afterwards without affecting the state
-     * of this record list. WARNING: this constructor is (semi-)validating: passed name and location
-     * are allowed to be nulls (although it maybe unsafe, use caution), but if they are not nulls, then passed non-null GATKFeature data
-     * elements must have same track name, and their locations must overlap with the passed 'location' argument. Null
-     * data elements or null 'data' collection argument are allowed as well.
-     * @param name the name of the track
-     * @param data the collection of features at this location
-     * @param loc the location
-     */
-    public RODRecordListImpl(String name, Collection<GATKFeature> data, GenomeLoc loc) {
-        this.records = new ArrayList<GATKFeature>(data==null?0:data.size());
-        this.name = name;
-        this.location = loc;
-        if ( data == null || data.size() == 0 ) return; // empty dataset, nothing to do
-        for ( GATKFeature r : data ) {
-            records.add(r);
-            if ( r == null ) continue;
-            if ( ! this.name.equals(r.getName() ) ) {
-                throw new ReviewedGATKException("Attempt to add GATKFeature with non-matching name "+r.getName()+" to the track "+name);
-            }
-            if ( location != null && ! location.overlapsP(r.getLocation()) ) {
-                    throw new ReviewedGATKException("Attempt to add GATKFeature that lies outside of specified interval "+location+"; offending GATKFeature:\n"+r.toString());
-            }
-        }
-    }
-
-
-    public GenomeLoc getLocation() { return location; }
-    public String getName() { return name; }
-    public Iterator<GATKFeature> iterator() { return records.iterator() ; }
-    public void clear() { records.clear(); }
-    public boolean isEmpty() { return records.isEmpty(); }
-
-    public boolean add(GATKFeature record) { add(record, false); return true;}
-
-    @Override
-    public GATKFeature get(int i) {
-        return records.get(i);
-    }
-
-    public void add(GATKFeature record, boolean allowNameMismatch) {
-        if ( record != null ) {
-            if ( ! allowNameMismatch && ! name.equals(record.getName() ) )
-                throw new ReviewedGATKException("Attempt to add GATKFeature with non-matching name "+record.getName()+" to the track "+name);
-        }
-        records.add(record);
-    }
-
-    public void add(RODRecordList records ) { add( records, false ); }
-
-    public void add(RODRecordList records, boolean allowNameMismatch) {
-        for ( GATKFeature record : records )
-            add(record, allowNameMismatch);
-    }    
-
-    public int size() { return records.size() ; }
-
-    /**
-     * Compares this object with the specified object for order.  Returns a
-     * negative integer, zero, or a positive integer as this object is less
-     * than, equal to, or greater than the specified object.
-     *
-     * @param that the object to be compared.
-     * @return a negative integer, zero, or a positive integer as this object
-     *         is less than, equal to, or greater than the specified object.
-     * @throws ClassCastException if the specified object's type prevents it
-     *                            from being compared to this object.
-     */
-    public int compareTo(RODRecordList that) {
-        return getLocation().compareTo(that.getLocation());  //To change body of implemented methods use File | Settings | File Templates.
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/RefMetaDataTracker.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/RefMetaDataTracker.java
deleted file mode 100644
index 7ccf6e5..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/RefMetaDataTracker.java
+++ /dev/null
@@ -1,497 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.refdata;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import org.apache.log4j.Logger;
-import htsjdk.tribble.Feature;
-import org.broadinstitute.gatk.utils.commandline.RodBinding;
-import org.broadinstitute.gatk.engine.refdata.utils.GATKFeature;
-import org.broadinstitute.gatk.engine.refdata.utils.RODRecordList;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import java.util.*;
-
-/**
- * This class represents the Reference Metadata available at a particular site in the genome.  It can be
- * used to conveniently lookup the RMDs at this site, as well just getting a list of all of the RMDs
- *
- * The standard interaction model is:
- *
- * Traversal system arrives at a site, which has a bunch of RMDs covering it
- * Traversal passes creates a tracker and passes it to the walker
- * walker calls get(rodBinding) to obtain the RMDs values at this site for the track
- * associated with rodBinding.
- *
- * Note that this is an immutable class.  Once created the underlying data structures
- * cannot be modified
- *
- * User: mdepristo
- * Date: Apr 3, 2009
- * Time: 3:05:23 PM
- */
-public class RefMetaDataTracker {
-    // TODO: this should be a list, not a bindings, actually
-    private final static RODRecordList EMPTY_ROD_RECORD_LIST = new RODRecordListImpl("EMPTY");
-
-    final Map<String, RODRecordList> bindings;
-    final protected static Logger logger = Logger.getLogger(RefMetaDataTracker.class);
-    public final static RefMetaDataTracker EMPTY_TRACKER = new RefMetaDataTracker();
-
-    // ------------------------------------------------------------------------------------------
-    //
-    //
-    // Special ENGINE interaction functions
-    //
-    //
-    // ------------------------------------------------------------------------------------------
-
-    /**
-     * Create an tracker with no bindings
-     */
-    public RefMetaDataTracker() {
-        bindings = Collections.emptyMap();
-    }
-
-    public RefMetaDataTracker(final Collection<RODRecordList> allBindings) {
-        // set up the bindings
-        if ( allBindings.isEmpty() )
-            bindings = Collections.emptyMap();
-        else {
-            final Map<String, RODRecordList> tmap = new HashMap<String, RODRecordList>(allBindings.size());
-            for ( RODRecordList rod : allBindings ) {
-                if ( rod != null && ! rod.isEmpty() )
-                    tmap.put(canonicalName(rod.getName()), rod);
-            }
-
-            // ensure that no one modifies the bindings itself
-            bindings = Collections.unmodifiableMap(tmap);
-        }
-    }
-
-    // ------------------------------------------------------------------------------------------
-    //
-    //
-    // Generic accessors
-    //
-    //
-    // ------------------------------------------------------------------------------------------
-
-    /**
-     * Gets all of the Tribble features spanning this locus, returning them as a list of specific
-     * type T extending Feature.  This function looks across all tracks to find the Features, so
-     * if you have two tracks A and B each containing 1 Feature, then getValues will return
-     * a list containing both features.
-     *
-     * Note that this function assumes that all of the bound features are instances of or
-     * subclasses of T.  A ClassCastException will occur if this isn't the case.  If you want
-     * to get all Features without any danger of such an exception use the root Tribble
-     * interface Feature.
-     *
-     * @param type The type of the underlying objects bound here
-     * @param <T> as above
-     * @return A freshly allocated list of all of the bindings, or an empty list if none are bound.
-     */
-    @Requires({"type != null"})
-    @Ensures("result != null")
-    public <T extends Feature> List<T> getValues(final Class<T> type) {
-        return addValues(bindings.keySet(), type, new ArrayList<T>(), null, false, false);
-    }
-
-    /**
-     * Provides the same functionality as @link #getValues(Class<T>) but will only include
-     * Features that start as the GenomeLoc provide onlyAtThisLoc.
-     *
-     * @param type The type of the underlying objects bound here
-     * @param onlyAtThisLoc
-     * @param <T> as above
-     * @return A freshly allocated list of all of the bindings, or an empty list if none are bound.
-     */
-    @Requires({"type != null", "onlyAtThisLoc != null"})
-    @Ensures("result != null")
-    public <T extends Feature> List<T> getValues(final Class<T> type, final GenomeLoc onlyAtThisLoc) {
-        return addValues(bindings.keySet(), type, new ArrayList<T>(), onlyAtThisLoc, true, false);
-    }
-
-    /**
-     * Uses the same logic as @link #getValues(Class) but arbitrary select one of the resulting
-     * elements of the list to return.  That is, if there would be two elements in the result of
-     * @link #getValues(Class), one of these two is selected, and which one it will be isn't
-     * specified.  Consequently, this method is only really safe if (1) you absolutely know
-     * that only one binding will meet the constraints of @link #getValues(Class) or (2)
-     * you truly don't care which of the multiple bindings available you are going to examine.
-     *
-     * If there are no bindings here, getFirstValue() return null
-     *
-     * @param type The type of the underlying objects bound here
-     * @param <T> as above
-     * @return A random single element the RODs bound here, or null if none are bound.
-     */
-    @Requires({"type != null"})
-    public <T extends Feature> T getFirstValue(final Class<T> type) {
-        return safeGetFirst(getValues(type));
-    }
-
-    /**
-     * Uses the same logic as @link #getValue(Class,GenomeLoc) to determine the list
-     * of eligible Features and @link #getFirstValue(Class) to select a single
-     * element from the interval list.
-     *
-     * @param type The type of the underlying objects bound here
-     * @param <T> as above
-     * @param onlyAtThisLoc only Features starting at this site are considered
-     * @return A random single element the RODs bound here starting at onlyAtThisLoc, or null if none are bound.
-     */
-    @Requires({"type != null", "onlyAtThisLoc != null"})
-    public <T extends Feature> T getFirstValue(final Class<T> type, final GenomeLoc onlyAtThisLoc) {
-        return safeGetFirst(getValues(type, onlyAtThisLoc));
-    }
-
-    /**
-     * Same logic as @link #getFirstValue(RodBinding, boolean) but prioritizes records from prioritizeThisLoc if available
-     *
-     * @param rodBindings Only Features coming from the tracks associated with one of rodBindings are fetched
-     * @param <T> The Tribble Feature type of the rodBinding, and consequently the type of the resulting list of Features
-     * @param prioritizeThisLoc only Features starting at this site are considered
-     * @return A freshly allocated list of all of the bindings, or an empty list if none are bound.
-     */
-    @Requires({"rodBindings != null", "prioritizeThisLoc != null"})
-    @Ensures("result != null")
-    public <T extends Feature> List<T> getPrioritizedValue(final Collection<RodBinding<T>> rodBindings, final GenomeLoc prioritizeThisLoc) {
-        final List<T> results = new ArrayList<>();
-
-        for ( final RodBinding<T> rodBinding : rodBindings ) {
-
-            // if there's a value at the prioritized location, take it
-            T value = getFirstValue(rodBinding, prioritizeThisLoc);
-
-            // otherwise, grab any one
-            if ( value == null )
-                value = getFirstValue(rodBinding);
-
-            // add if not null
-            if ( value != null )
-                results.add(value);
-        }
-
-        return results;
-    }
-
-    /**
-     * Gets all of the Tribble features bound to RodBinding spanning this locus, returning them as
-     * a list of specific type T extending Feature.
-     *
-     * Note that this function assumes that all of the bound features are instances of or
-     * subclasses of T.  A ClassCastException will occur if this isn't the case.
-     *
-     * @param rodBinding Only Features coming from the track associated with this rodBinding are fetched
-     * @param <T> The Tribble Feature type of the rodBinding, and consequently the type of the resulting list of Features
-     * @return A freshly allocated list of all of the bindings, or an empty list if none are bound.
-     */
-    @Requires({"rodBinding != null"})
-    @Ensures("result != null")
-    public <T extends Feature> List<T> getValues(final RodBinding<T> rodBinding) {
-        return addValues(rodBinding.getName(), rodBinding.getType(), new ArrayList<T>(1), getTrackDataByName(rodBinding), null, false, false);
-    }
-
-    /**
-     * Gets all of the Tribble features bound to any RodBinding in rodBindings,
-     * spanning this locus, returning them as a list of specific type T extending Feature.
-     *
-     * Note that this function assumes that all of the bound features are instances of or
-     * subclasses of T.  A ClassCastException will occur if this isn't the case.
-     *
-     * @param rodBindings Only Features coming from the tracks associated with one of rodBindings are fetched
-     * @param <T> The Tribble Feature type of the rodBinding, and consequently the type of the resulting list of Features
-     * @return A freshly allocated list of all of the bindings, or an empty list if none are bound.
-     */
-    @Requires({"rodBindings != null"})
-    @Ensures("result != null")
-    public <T extends Feature> List<T> getValues(final Collection<RodBinding<T>> rodBindings) {
-        List<T> results = new ArrayList<T>(1);
-        for ( RodBinding<T> rodBinding : rodBindings )
-            results.addAll(getValues(rodBinding));
-        return results;
-    }
-
-    /**
-     * The same logic as @link #getValues(RodBinding) but enforces that each Feature start at onlyAtThisLoc
-     *
-     * @param rodBinding Only Features coming from the track associated with this rodBinding are fetched
-     * @param <T> The Tribble Feature type of the rodBinding, and consequently the type of the resulting list of Features
-     * @param onlyAtThisLoc only Features starting at this site are considered
-     * @return A freshly allocated list of all of the bindings, or an empty list if none are bound.
-     */
-    @Requires({"rodBinding != null", "onlyAtThisLoc != null"})
-    @Ensures("result != null")
-    public <T extends Feature> List<T> getValues(final RodBinding<T> rodBinding, final GenomeLoc onlyAtThisLoc) {
-        return addValues(rodBinding.getName(), rodBinding.getType(), new ArrayList<T>(1), getTrackDataByName(rodBinding), onlyAtThisLoc, true, false);
-    }
-
-    /**
-     * The same logic as @link #getValues(List) but enforces that each Feature start at onlyAtThisLoc
-     *
-     * @param rodBindings Only Features coming from the tracks associated with one of rodBindings are fetched
-     * @param <T> The Tribble Feature type of the rodBinding, and consequently the type of the resulting list of Features
-     * @param onlyAtThisLoc only Features starting at this site are considered
-     * @return A freshly allocated list of all of the bindings, or an empty list if none are bound.
-     */
-    @Requires({"rodBindings != null", "onlyAtThisLoc != null"})
-    @Ensures("result != null")
-    public <T extends Feature> List<T> getValues(final Collection<RodBinding<T>> rodBindings, final GenomeLoc onlyAtThisLoc) {
-        List<T> results = new ArrayList<T>(1);
-        for ( RodBinding<T> rodBinding : rodBindings )
-            results.addAll(getValues(rodBinding, onlyAtThisLoc));
-        return results;
-    }
-
-    /**
-     * Uses the same logic as @getValues(RodBinding) to determine the list
-     * of eligible Features and select a single element from the resulting set
-     * of eligible features.
-     *
-     * @param rodBinding Only Features coming from the track associated with this rodBinding are fetched
-     * @param <T> as above
-     * @return A random single element the eligible Features found, or null if none are bound.
-     */
-    @Requires({"rodBinding != null"})
-    public <T extends Feature> T getFirstValue(final RodBinding<T> rodBinding) {
-        return safeGetFirst(addValues(rodBinding.getName(), rodBinding.getType(), null, getTrackDataByName(rodBinding), null, false, true));
-    }
-
-    /**
-     * Uses the same logic as @getValues(RodBinding, GenomeLoc) to determine the list
-     * of eligible Features and select a single element from the resulting set
-     * of eligible features.
-     *
-     * @param rodBinding Only Features coming from the track associated with this rodBinding are fetched
-     * @param <T> as above
-     * @param onlyAtThisLoc only Features starting at this site are considered
-     * @return A random single element the eligible Features found, or null if none are bound.
-     */
-    @Requires({"rodBinding != null", "onlyAtThisLoc != null"})
-    public <T extends Feature> T getFirstValue(final RodBinding<T> rodBinding, final GenomeLoc onlyAtThisLoc) {
-        return safeGetFirst(addValues(rodBinding.getName(), rodBinding.getType(), null, getTrackDataByName(rodBinding), onlyAtThisLoc, true, true));
-    }
-
-    /**
-     * Uses the same logic as @getValues(List) to determine the list
-     * of eligible Features and select a single element from the resulting set
-     * of eligible features.
-     *
-     * @param rodBindings Only Features coming from the tracks associated with these rodBindings are fetched
-     * @param <T> as above
-     * @return A random single element the eligible Features found, or null if none are bound.
-     */
-    @Requires({"rodBindings != null"})
-    public <T extends Feature> T getFirstValue(final Collection<RodBinding<T>> rodBindings) {
-        for ( RodBinding<T> rodBinding : rodBindings ) {
-            T val = getFirstValue(rodBinding);
-            if ( val != null )
-                return val;
-        }
-        return null;
-    }
-
-    /**
-     * Uses the same logic as @getValues(RodBinding,GenomeLoc) to determine the list
-     * of eligible Features and select a single element from the resulting set
-     * of eligible features.
-     *
-     * @param rodBindings Only Features coming from the tracks associated with these rodBindings are fetched
-     * @param <T> as above
-     * @param onlyAtThisLoc only Features starting at this site are considered
-     * @return A random single element the eligible Features found, or null if none are bound.
-     */
-    @Requires({"rodBindings != null", "onlyAtThisLoc != null"})
-    public <T extends Feature> T getFirstValue(final Collection<RodBinding<T>> rodBindings, final GenomeLoc onlyAtThisLoc) {
-        for ( RodBinding<T> rodBinding : rodBindings ) {
-            T val = getFirstValue(rodBinding, onlyAtThisLoc);
-            if ( val != null )
-                return val;
-        }
-        return null;
-    }
-
-    /**
-     * Is there a binding at this site to a ROD/track with the specified name?
-     *
-     * @param rodBinding the rod binding we want to know about
-     * @return true if any Features are bound in this tracker to rodBinding
-     */
-    @Requires({"rodBinding != null"})
-    public boolean hasValues(final RodBinding rodBinding) {
-        return bindings.containsKey(canonicalName(rodBinding.getName()));
-    }
-
-    /**
-     * Get all of the RMD tracks at the current site. Each track is returned as a single compound
-     * object (RODRecordList) that may contain multiple RMD records associated with the current site.
-     *
-     * @return List of all tracks
-     */
-    public List<RODRecordList> getBoundRodTracks() {
-        return new ArrayList<RODRecordList>(bindings.values());
-    }
-
-    /**
-     * The number of tracks with at least one value bound here
-     * @return the number of tracks with at least one bound Feature
-     */
-    public int getNTracksWithBoundFeatures() {
-        return bindings.size();
-    }
-
-    // ------------------------------------------------------------------------------------------
-    // Protected accessors using strings for unit testing
-    // ------------------------------------------------------------------------------------------
-
-    protected boolean hasValues(final String name) {
-        return bindings.containsKey(canonicalName(name));
-    }
-
-    protected <T extends Feature> List<T> getValues(final Class<T> type, final String name) {
-        return addValues(name, type, new ArrayList<T>(), getTrackDataByName(name), null, false, false);
-    }
-
-    protected <T extends Feature> List<T> getValues(final Class<T> type, final String name, final GenomeLoc onlyAtThisLoc) {
-        return addValues(name, type, new ArrayList<T>(), getTrackDataByName(name), onlyAtThisLoc, true, false);
-    }
-
-    protected <T extends Feature> T getFirstValue(final Class<T> type, final String name) {
-        return safeGetFirst(getValues(type, name));
-    }
-
-    protected <T extends Feature> T getFirstValue(final Class<T> type, final String name, final GenomeLoc onlyAtThisLoc) {
-        return safeGetFirst(getValues(type, name, onlyAtThisLoc));
-    }
-
-    // ------------------------------------------------------------------------------------------
-    //
-    //
-    // Private utility functions
-    //
-    //
-    // ------------------------------------------------------------------------------------------
-
-    /**
-     * Helper function for getFirst() operations that takes a list of <T> and
-     * returns the first element, or null if no such element exists.
-     *
-     * @param l
-     * @param <T>
-     * @return
-     */
-    @Requires({"l != null"})
-    private <T extends Feature> T safeGetFirst(final List<T> l) {
-        return l.isEmpty() ? null : l.get(0);
-    }
-
-    private <T extends Feature> List<T> addValues(final Collection<String> names,
-                                                  final Class<T> type,
-                                                  List<T> values,
-                                                  final GenomeLoc curLocation,
-                                                  final boolean requireStartHere,
-                                                  final boolean takeFirstOnly ) {
-        for ( String name : names ) {
-            RODRecordList rodList = getTrackDataByName(name); // require that the name is an exact match
-            values = addValues(name, type, values, rodList, curLocation, requireStartHere, takeFirstOnly );
-            if ( takeFirstOnly && ! values.isEmpty() )
-                break;
-        }
-
-        return values;
-    }
-
-
-
-    private <T extends Feature> List<T> addValues(final String name,
-                                                  final Class<T> type,
-                                                  List<T> values,
-                                                  final RODRecordList rodList,
-                                                  final GenomeLoc curLocation,
-                                                  final boolean requireStartHere,
-                                                  final boolean takeFirstOnly ) {
-        for ( GATKFeature rec : rodList ) {
-            if ( ! requireStartHere || rec.getLocation().getStart() == curLocation.getStart() ) {  // ok, we are going to keep this thing
-                Object obj = rec.getUnderlyingObject();
-                if (!(type.isAssignableFrom(obj.getClass())))
-                    throw new UserException.CommandLineException("Unable to cast track named " + name + " to type of " + type.toString()
-                            + " it's of type " + obj.getClass());
-
-                T objT = (T)obj;
-                if ( takeFirstOnly ) {
-                    if ( values == null )
-                        values = Arrays.asList(objT);
-                    else
-                        values.add(objT);
-
-                    break;
-                } else {
-                    if ( values == null )
-                        values = new ArrayList<T>();
-                    values.add(objT);
-                }
-            }
-        }
-
-        return values == null ? Collections.<T>emptyList() : values;
-    }
-
-    /**
-     * Finds the reference metadata track named 'name' and returns all ROD records from that track associated
-     * with the current site as a RODRecordList List object. If no data track with specified name is available,
-     * returns defaultValue wrapped as RODRecordList object. NOTE: if defaultValue is null, it will be wrapped up
-     * with track name set to 'name' and location set to null; otherwise the wrapper object will have name and
-     * location set to defaultValue.getID() and defaultValue.getLocation(), respectively (use caution,
-     * defaultValue.getLocation() may be not equal to what RODRecordList's location would be expected to be otherwise:
-     * for instance, on locus traversal, location is usually expected to be a single base we are currently looking at,
-     * regardless of the presence of "extended" RODs overlapping with that location).
-     * @param name                track name
-     * @return track data for the given rod
-     */
-    private RODRecordList getTrackDataByName(final String name) {
-        final String luName = canonicalName(name);
-        RODRecordList l = bindings.get(luName);
-        return l == null ? EMPTY_ROD_RECORD_LIST : l;
-    }
-
-    private RODRecordList getTrackDataByName(final RodBinding binding) {
-        return getTrackDataByName(binding.getName());
-    }
-
-    /**
-     * Returns the canonical name of the rod name (lowercases it)
-     * @param name the name of the rod
-     * @return canonical name of the rod
-     */
-    private String canonicalName(final String name) {
-        // todo -- remove me after switch to RodBinding syntax
-        return name.toLowerCase();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/ReferenceDependentFeatureCodec.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/ReferenceDependentFeatureCodec.java
deleted file mode 100644
index 9bff00d..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/ReferenceDependentFeatureCodec.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.refdata;
-
-import htsjdk.tribble.FeatureCodec;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-
-/**
- * An interface marking that a given Tribble feature/codec is actually dependent on context within the
- * reference, rather than having a dependency only on the contig, start, and stop of the given feature.
- * A HACK.  Tribble should contain all the information in needs to decode the unqualified position of
- * a feature.
- */
-public interface ReferenceDependentFeatureCodec {
-    /**
-     * Sets the appropriate GenomeLocParser, providing additional context when decoding larger and more variable features.
-     * @param genomeLocParser The parser to supply. 
-     */
-    public void setGenomeLocParser(GenomeLocParser genomeLocParser);
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/ReferenceOrderedDatum.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/ReferenceOrderedDatum.java
deleted file mode 100644
index 95de832..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/ReferenceOrderedDatum.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.refdata;
-
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.HasGenomeLocation;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-
-/**
- * Created by IntelliJ IDEA.
- * User: mdepristo
- * Date: Feb 27, 2009
- * Time: 10:49:47 AM
- * To change this template use File | Settings | File Templates.
- */
-public interface ReferenceOrderedDatum extends Comparable<ReferenceOrderedDatum>, HasGenomeLocation {
-    public String getName();
-    public boolean parseLine(final Object header, final String[] parts) throws IOException;
-    public String toString();
-    public String toSimpleString();
-    public String repl();
-
-    /**
-     * Used by the ROD system to determine how to split input lines
-     * @return Regex string delimiter separating fields
-     */
-    public String delimiterRegex();
-
-    public GenomeLoc getLocation();
-    public int compareTo( ReferenceOrderedDatum that );
-
-    /**
-     * Backdoor hook to read header, meta-data, etc. associated with the file.  Will be
-     * called by the ROD system before streaming starts
-     *
-     * @param source source data file on disk from which this rod stream will be pulled
-     * @return a header object that will be passed to parseLine command
-     */
-    public Object initialize(final File source) throws FileNotFoundException;
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/SeekableRODIterator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/SeekableRODIterator.java
deleted file mode 100644
index 4126214..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/SeekableRODIterator.java
+++ /dev/null
@@ -1,412 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.refdata;
-
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.util.CloseableIterator;
-import org.broadinstitute.gatk.engine.iterators.PushbackIterator;
-import org.broadinstitute.gatk.engine.refdata.utils.GATKFeature;
-import org.broadinstitute.gatk.engine.refdata.utils.LocationAwareSeekableRODIterator;
-import org.broadinstitute.gatk.engine.refdata.utils.RODRecordList;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-
-/**
- * Wrapper class for iterators over ROD objects. It is assumed that the underlying iterator can only
- * perform standard next() operation, which advances it to the next ROD in the stream (i.e. reads the data file
- * line by line). This iterator 1) shifts the focus from record-based traversal to position-based traversal,
- * and 2) adds querying seekForward() method.
- *
- * Namely, this iterator's next() method advances not to the next ROD in the underlying stream, but to the next
- * genomic position covered by (at least one) ROD, and returns all RODs overlapping with that position as a RODRecordList
- * collection-like object. Similarly, when seekForward(interval) is called, this iterator skips all the RODs from the
- * underlying stream, until it reaches specified genomic interval, and returns the list of all RODs overlapping with that interval.
- *
- * NOTE: this iterator has a STATE: next() operation is not allowed after a seekForward() to a non-point (extended) interval
- * of length > 1. Such a call would leave the iterator in an inconsistent state. seekForward() can always be called after
- * either seekForward() or next() (as long as usual ordering criteria are satisfied: the query interval location can neither
- * start before the current position, nor end before the previous query end). seekForward to an interval of length 1
- * reenables next() operation. 
- *
- * Created by IntelliJ IDEA.
- * User: asivache
- * Date: Sep 10, 2009
- * Time: 6:20:46 PM
- * To change this template use File | Settings | File Templates.
- */
-public class SeekableRODIterator implements LocationAwareSeekableRODIterator {
-    /**
-     * Header for the datasource backing this iterator.
-     */
-    private final Object header;
-
-    /**
-     * The parser, used to construct new genome locs.
-     */
-    private final GenomeLocParser parser;
-
-    private final SAMSequenceDictionary sequenceDictionary;
-
-    private PushbackIterator<GATKFeature> it;
-    List<GATKFeature> records = null;  // here we will keep a pile of records overlaping with current position; when we iterate
-                               // and step out of record's scope, we purge it from the list
-    String name = null; // name of the ROD track wrapped by this iterator. Will be pulled from underlying iterator.
-
-    int curr_position = 0; // where the iterator is currently positioned on the genome
-    int max_position = 0;  // the rightmost stop position of currently loaded records
-    String curr_contig = null;   // what contig the iterator is currently on
-    boolean next_is_allowed = true; // see discussion below. next() is illegal after seek-forward queries of length > 1
-
-    // the stop position of the last query. We can query only in forward direction ("seek forward");
-    // it is not only the start position of every successive query that can not be before the start
-    // of the previous one (curr_start), but it is also illegal for a query interval to *end* before
-    // the end of previous query, otherwise we can end up in an inconsistent state
-    int curr_query_end = -1;
-
-    // EXAMPLE of inconsistency curr_query_end guards against:
-    //              record 1      record 2
-    //             ----------     -----------
-    // -------------------------------------------------- REF
-    //         ------------------------- query 1 (interval 1)
-    //               ----------  query 2 (interval 2)
-    //                     --------------- query 3
-    //
-    // If we query first for interval 1, both record 1 and record 2 will be loaded.
-    // Query for interval 2, on the other hand, should return only record 1, but after
-    // query 1 was performed, record 2 is already loaded from the file. If, on the other hand,
-    // we try to un-load it from memory, we won't be able to read it again. Hence query 2 is not
-    // allowed after query 1. Note also, that curr_query_end is not equivalent to max_position:
-    // the latter only tracks where currently loaded records end (and hence helps to re-load records);
-    // after query 1 is performed, max_position will be the end of record 2, but query 3 is still
-    // perfectly legal after query 1.
-    //
-    // IMPORTANT NOTE: it follows from the above discussion and example that next() is illegal after ANY
-    // seek-forward query EXCEPT those that are performed with length-1 intervals (queryInterval.start=queryinteval.stop).
-    // Indeed, in the example above, after, e.g., query 1 is performed, the iterator is "located" at the start
-    // of interval 1, but record1 and record 2 are already loaded. On the other hand, a subsequent call to next() would
-    // need to shift iterator's position by 1 base and return only record 1.
-    //
-    // This implementation tracks the query history and makes next() illegal after a seekforward query of length > 1,
-    // but re-enables next() again after a length-1 query.
-
-    public SeekableRODIterator(Object header,SAMSequenceDictionary rodDictionary,SAMSequenceDictionary referenceDictionary,GenomeLocParser parser,CloseableIterator<GATKFeature> it) {
-        this.header = header;
-        this.parser = parser;
-        this.sequenceDictionary = rodDictionary;
-        this.it = new PushbackIterator<GATKFeature>(it);
-        records = new LinkedList<GATKFeature>();
-        // the following is a trick: we would like the iterator to know the actual name assigned to
-        // the ROD implementing object we are working with. But the only way to do that is to
-        // get an instance of that ROD and query it for its name. Now, the only generic way we have at this point to instantiate
-        // the ROD is to make the underlying stream iterator to do it for us. So we are reading (or rather peeking into)
-        // the first line of the track data file just to get the ROD object created.
-        GATKFeature r = null;
-        if (this.it.hasNext()) r = this.it.element();
-        name = (r==null?null:r.getName());
-
-        curr_contig = referenceDictionary.getSequence(0).getSequenceName();
-    }
-
-    /**
-     * Gets the header associated with the backing input stream.
-     * @return the ROD header.
-     */
-    @Override
-    public Object getHeader() {
-        return header;
-    }
-
-    /**
-     * Gets the sequence dictionary associated with the backing input stream.
-     * @return sequence dictionary from the ROD header.
-     */
-    @Override
-    public SAMSequenceDictionary getSequenceDictionary() {
-        return sequenceDictionary;
-    }
-
-
-    /**
-     * Returns true if the data we iterate over has records associated with (any, not necessarily adjacent)
-     * genomic position farther along the reference.
-     * @return
-     */
-    public boolean hasNext() {
-
-        // if we did not walk to the very end of the interval(s) covered by currently loaded
-        // annotations (records), then we definitely have data for next genomic location
-        if ( curr_position < max_position ) return true;
-
-        // we are past currently loaded stuff; we have next if there are more lines to load:
-        return it.hasNext();
-    }
-
-    // Returns point location (i.e. genome loc of length 1) on the reference, to which this iterator will advance
-    // upon next call to next().
-    public GenomeLoc peekNextLocation() {
-        if ( curr_position + 1 <= max_position ) return parser.createGenomeLoc(curr_contig,curr_position+1);
-
-        // sorry, next reference position is not covered by the RODs we are currently holding. In this case,
-        // the location we will jump to upon next call to next() is the start of the next ROD record that we did
-        // not read yet:
-        if ( it.hasNext() ) {
-            GATKFeature r = it.element(); // peek, do not load!
-            return parser.createGenomeLoc(r.getLocation().getContig(),r.getLocation().getStart());
-        }
-        return null; // underlying iterator has no more records, there is no next location!
-    }
-
-    /** Advances iterator to the next genomic position that has ROD record(s) associated with it,
-     * and returns all the records overlapping with that position as a RODList. The location of the whole
-     * RODList object will be set to the smallest interval subsuming genomic intervals of all returned records.
-     * Note that next() is disabled (will throw an exception) after seekForward() operation with query length > 1.
-     * @return list of all RODs overlapping with the next "covered" genomic position
-     */
-     public RODRecordList next() {
-         if ( ! next_is_allowed )
-             throw new ReviewedGATKException("Illegal use of iterator: Can not advance iterator with next() after seek-forward query of length > 1");
-
-         curr_position++;
- //        curr_query_end = -1;
-
-         if ( curr_position <= max_position ) {
-
-             // we still have bases covered by at least one currently loaded record;
-             // we have to purge only subset of records, on which we moved past the end
-             purgeOutOfScopeRecords();
-         } else {
-             // ooops, we are past the end of all loaded records - kill them all at once,
-             // load next record and reinitialize by fastforwarding current position to the start of next record
-             records.clear();
-             GATKFeature r = it.next(); // if hasNext() previously returned true, we are guaranteed that this call to reader.next() is safe
-             records.add( r );
-             curr_contig = r.getLocation().getContig();
-             curr_position = r.getLocation().getStart();
-             max_position = r.getLocation().getStop();
-         }
-
-         // current position is ste and at this point 'records' only keeps those annotations, on which we did not reach the end yet
-         // (we might have reloaded records completely if it was necessary); but we are not guaranteed yet that we
-         // hold ALL the records overlapping with the current position. Time to check if we just walked into the interval(s)
-         // covered by new records, so we need to load them too:
-
-         while ( it.hasNext() ) {
-             GATKFeature r = it.element();
-             if ( r == null ) {
-                 it.next();
-                 continue;
-             }
-
-             GenomeLoc currentContig = parser.createOverEntireContig(curr_contig);
-             GenomeLoc thatContig = r.getLocation();
-
-             if ( currentContig.isPast(thatContig) )
-                 throw new UserException("LocationAwareSeekableRODIterator: contig " +r.getLocation().getContig() +
-                         " occurs out of order in track " + r.getName() );
-             if ( currentContig.isBefore(thatContig) ) break; // next record is on a higher contig, we do not need it yet...
-
-             if ( r.getLocation().getStart() < curr_position )
-                 throw new UserException("LocationAwareSeekableRODIterator: track "+r.getName() +
-                         " is out of coordinate order on contig "+r.getLocation() + " compared to " + curr_contig + ":" + curr_position);
-
-             if ( r.getLocation().getStart() > curr_position ) break; // next record starts after the current position; we do not need it yet
-
-             r = it.next(); // we got here only if we do need next record, time to load it for real
-
-             int stop = r.getLocation().getStop();
-             if ( stop < curr_position ) throw new ReviewedGATKException("DEBUG: encountered contig that should have been loaded earlier"); // this should never happen
-             if ( stop > max_position ) max_position = stop; // max_position keeps the rightmost stop position across all loaded records
-             records.add(r);
-         }
-
-         // 'records' and current position are fully updated. Last, we need to set the location of the whole track
-        // (collection of ROD records) to the genomic site we are currently looking at, and return the list
-
-         return new RODRecordListImpl(name,records, parser.createGenomeLoc(curr_contig,curr_position));
-     }
-
-    /**
-     * Removes from the underlying collection the last element returned by the
-     * iterator (optional operation).  This method can be called only once per
-     * call to <tt>next</tt>.  The behavior of an iterator is unspecified if
-     * the underlying collection is modified while the iteration is in
-     * progress in any way other than by calling this method.
-     *
-     * @throws UnsupportedOperationException if the <tt>remove</tt>
-     *                                       operation is not supported by this Iterator.
-     * @throws IllegalStateException         if the <tt>next</tt> method has not
-     *                                       yet been called, or the <tt>remove</tt> method has already
-     *                                       been called after the last call to the <tt>next</tt>
-     *                                       method.
-     */
-    public void remove() {
-        throw new UnsupportedOperationException("LocationAwareSeekableRODIterator does not implement remove() operation");
-    }
-
-
-    /**
-     * Returns the current "position" (not location!! ;) ) of this iterator. This method is used by the sharding
-     * system when it searches for available iterators in the pool that can be reused to resume traversal.
-     * When iterator is advanced using next(), current position
-     * is the same as 'location'. However, after a seekForward() query with extended interval, returned position
-     * will be set to the last position of the query interval, to disable (illegal) attempts to roll the iterator
-     * back and re-start traversal from current location.
-     * @return Current ending position of the iterator, or null if no position exists.
-     */
-    public GenomeLoc position() {
-        if ( curr_contig == null ) return null;
-        if ( curr_query_end > curr_position )  {
-            // do not attempt to reuse this iterator if the position we need it for lies before the end of last query performed
-            return parser.createGenomeLoc(curr_contig,curr_query_end,curr_query_end);
-        }
-        else {
-            return parser.createGenomeLoc(curr_contig,curr_position);
-        }
-    }
-
-    /**
-     * Seeks forward through the file until the specified interval is reached.
-     * The location object <code>interval</code> can be either a single point or an extended interval. All
-     * ROD records overlapping with the whole interval will be returned, or null if no such records exist.
-     *
-     * Query interval must start at or after the iterator's current location, or exception will be thrown.
-     *
-     * Query interval must end at or after the stop position of the previous query, if any, or an exception will
-     * be thrown: subsequent queries that end before the stop of previous ones are illegal.
-     *
-     * If seekForward() is performed to an extended (length > 1 i.e. start != stop) interval, next() operation becomes
-     * illegal (the iterator changes state). Only seekForward() calls are allowed thereafter, until a seekForward() call
-     * to a length-1 interval is performed, which re-enables next(). seekForward() queries with length-1 intervals can
-     * always be safely intermixed with next() (as long as ordering is respected and query intervals are at or after the
-     * current position).
-     *
-     * Note that in contrast to
-     * next() (which always advances current position of the iterator on the reference), this method scrolls
-     * forward ONLY if the specified interval is ahead of the current location of
-     * the iterator. However, if called again with the same 'interval' argument as before, seekForward will NOT
-     * advance, but will simply return the same ROD list as before.
-     *
-     *
-     * @param interval point-like genomic location to fastforward to.
-     * @return ROD object at (or overlapping with) the specified position, or null if no such ROD exists.
-     */
-    public RODRecordList seekForward(GenomeLoc interval) {
-
-        if ( interval.isBefore(parser.createOverEntireContig(curr_contig)) &&
-             !(interval.getStart() == 0 && interval.getStop() == 0 && interval.getContig().equals(curr_contig)) ) // This criteria is syntactic sugar for 'seek to right before curr_contig'
-            throw new ReviewedGATKException("Out of order query: query contig "+interval.getContig()+" is located before "+
-                                     "the iterator's current contig");
-        if ( interval.getContig().equals(curr_contig) ) {
-            if ( interval.getStart() < curr_position )
-                throw new ReviewedGATKException("Out of order query: query position "+interval +" is located before "+
-                        "the iterator's current position "+curr_contig + ":" + curr_position);
-            if ( interval.getStop() < curr_query_end )
-                throw new ReviewedGATKException("Unsupported querying sequence: current query interval " +
-                        interval+" ends before the end of previous query interval ("+curr_query_end+")");
-        }
-
-        curr_position = interval.getStart();
-        curr_query_end = interval.getStop();
-
-        next_is_allowed = ( curr_position == curr_query_end ); // we can call next() later only if interval length is 1
-
-        if (  interval.getContig().equals(curr_contig) &&  curr_position <= max_position ) {
-            // some of the intervals we are currently keeping do overlap with the query interval
-
-            purgeOutOfScopeRecords();
-        } else {
-            // clean up and get ready for fast-forwarding towards the requested position
-            records.clear();
-            max_position = -1;
-            curr_contig = interval.getContig();
-        }
-
-        // curr_contig and curr_position are set to where we asked to scroll to
-
-        while ( it.hasNext() ) {
-            GATKFeature r = it.next();
-            if ( r == null ) continue;
-
-            GenomeLoc currentContig = parser.createOverEntireContig(curr_contig);
-            GenomeLoc thatContig = r.getLocation();
-
-            if ( currentContig.isPast(thatContig) ) continue; // did not reach requested contig yet
-            if ( currentContig.isBefore(thatContig) ) {
-                it.pushback(r); // next record is on the higher contig, we do not need it yet...
-                break;
-            }
-
-            // we get here if we are on the requested contig:
-
-            if ( r.getLocation().getStop() < curr_position ) continue; // did not reach the requested interval yet
-
-            if ( r.getLocation().getStart() > curr_query_end ) {
-                // past the query interval
-                it.pushback(r);
-                break;
-            }
-
-            // we get here only if interval of the record r overlaps with query interval, so the record should be loaded
-            if ( r.getLocation().getStop() > max_position ) max_position = r.getLocation().getStop();
-            records.add(r);
-        }
-
-        if ( records.size() > 0 ) {
-            return new RODRecordListImpl(name,records,interval);
-        } else {
-            return null;
-        }
-
-    }
-
-    /**
-     * Removes records that end before the curr_position from the list of currently kept records. This is a
-     * convenience (private) shortcut that does not perform extensive checking. In particular, it assumes that
-     * curr_position <= max_position, as well as that we are still on the same contig.
-     */
-    private void purgeOutOfScopeRecords() {
-        Iterator<GATKFeature> i = records.iterator();
-        while ( i.hasNext() ) {
-            GATKFeature r = i.next();
-            if ( r.getLocation().getStop() < curr_position ) {
-                i.remove(); // we moved past the end of interval the record r is associated with, purge the record forever
-            }
-        }
-
-    }
-
-    @Override
-    public void close() {
-        if (this.it != null) ((CloseableIterator)this.it.getUnderlyingIterator()).close();
-    }
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/VariantContextAdaptors.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/VariantContextAdaptors.java
deleted file mode 100644
index 82a826c..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/VariantContextAdaptors.java
+++ /dev/null
@@ -1,399 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.refdata;
-
-import htsjdk.samtools.util.SequenceUtil;
-import htsjdk.tribble.Feature;
-import htsjdk.tribble.annotation.Strand;
-import htsjdk.tribble.dbsnp.OldDbSNPFeature;
-import htsjdk.tribble.gelitext.GeliTextFeature;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.classloader.PluginManager;
-import org.broadinstitute.gatk.utils.codecs.hapmap.RawHapMapFeature;
-import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
-import htsjdk.variant.variantcontext.*;
-
-import java.util.*;
-
-/**
- * A terrible but temporary approach to converting objects to VariantContexts.  If you want to add a converter,
- * you need to create a adaptor object here and register a converter from your class to this object.  When tribble arrives,
- * we'll use a better approach.
- *
- * To add a new converter:
- *
- *   create a subclass of VCAdaptor, overloading the convert operator
- *   add it to the static map from input type -> converter where the input type is the object.class you want to convert
- *
- * That's it 
- *
- * @author depristo at broadinstitute.org
- */
-public class VariantContextAdaptors {
-    // --------------------------------------------------------------------------------------------------------------
-    //
-    // Generic support routines.  Do not modify
-    //
-    // --------------------------------------------------------------------------------------------------------------
-
-    private static Map<Class<? extends Feature>,VCAdaptor> adaptors = new HashMap<Class<? extends Feature>,VCAdaptor>();
-
-    static {
-        PluginManager<VCAdaptor> vcAdaptorManager = new PluginManager<VCAdaptor>(VCAdaptor.class);
-        List<VCAdaptor> adaptorInstances = vcAdaptorManager.createAllTypes();
-        for(VCAdaptor adaptor: adaptorInstances)
-            adaptors.put(adaptor.getAdaptableFeatureType(),adaptor);
-    }
-
-    public static boolean canBeConvertedToVariantContext(Object variantContainingObject) {
-        return adaptors.containsKey(variantContainingObject.getClass());
-    }
-
-    /** generic superclass */
-    public interface VCAdaptor {
-        /**
-         * Gets the type of feature that this adaptor can 'adapt' into a VariantContext.
-         * @return Type of adaptable feature.  Must be a Tribble feature class.
-         */
-        Class<? extends Feature> getAdaptableFeatureType();
-        VariantContext convert(String name, Object input, ReferenceContext ref);
-    }
-
-    public static VariantContext toVariantContext(String name, Object variantContainingObject, ReferenceContext ref) {
-        if ( ! adaptors.containsKey(variantContainingObject.getClass()) )
-            return null;
-        else {
-            return adaptors.get(variantContainingObject.getClass()).convert(name, variantContainingObject, ref);
-        }
-    }
-
-    // --------------------------------------------------------------------------------------------------------------
-    //
-    // From here below you can add adaptor classes for new rods (or other types) to convert to VC
-    //
-    // --------------------------------------------------------------------------------------------------------------
-    private static class VariantContextAdaptor implements VCAdaptor {
-        /**
-         * 'Null' adaptor; adapts variant contexts to variant contexts.
-         * @return VariantContext.
-         */
-        @Override
-        public Class<? extends Feature> getAdaptableFeatureType() { return VariantContext.class; }
-
-        // already a VC, just cast and return it
-        @Override        
-        public VariantContext convert(String name, Object input, ReferenceContext ref) {
-            return (VariantContext)input;
-        }
-    }
-
-    // --------------------------------------------------------------------------------------------------------------
-    //
-    // dbSNP to VariantContext
-    //
-    // --------------------------------------------------------------------------------------------------------------
-
-    private static class DBSnpAdaptor implements VCAdaptor {
-        private static boolean isSNP(OldDbSNPFeature feature) {
-            return feature.getVariantType().contains("single") && feature.getLocationType().contains("exact");
-        }
-
-        private static boolean isMNP(OldDbSNPFeature feature) {
-            return feature.getVariantType().contains("mnp") && feature.getLocationType().contains("range");
-        }
-
-        private static boolean isInsertion(OldDbSNPFeature feature) {
-            return feature.getVariantType().contains("insertion");
-        }
-
-        private static boolean isDeletion(OldDbSNPFeature feature) {
-            return feature.getVariantType().contains("deletion");
-        }
-
-        private static boolean isIndel(OldDbSNPFeature feature) {
-            return isInsertion(feature) || isDeletion(feature) || isComplexIndel(feature);
-        }
-
-        public static boolean isComplexIndel(OldDbSNPFeature feature) {
-            return feature.getVariantType().contains("in-del");
-        }
-
-        /**
-         * gets the alternate alleles.  This method should return all the alleles present at the location,
-         * NOT including the reference base.  This is returned as a string list with no guarantee ordering
-         * of alleles (i.e. the first alternate allele is not always going to be the allele with the greatest
-         * frequency).
-         *
-         * @return an alternate allele list
-         */
-        public static List<String> getAlternateAlleleList(OldDbSNPFeature feature) {
-            List<String> ret = new ArrayList<String>();
-            for (String allele : getAlleleList(feature))
-                if (!allele.equals(String.valueOf(feature.getNCBIRefBase()))) ret.add(allele);
-            return ret;
-        }
-
-        /**
-         * gets the alleles.  This method should return all the alleles present at the location,
-         * including the reference base.  The first allele should always be the reference allele, followed
-         * by an unordered list of alternate alleles.
-         *
-         * @return an alternate allele list
-         */
-        public static List<String> getAlleleList(OldDbSNPFeature feature) {
-            List<String> alleleList = new ArrayList<String>();
-            // add ref first
-            if ( feature.getStrand() == Strand.POSITIVE )
-                alleleList = Arrays.asList(feature.getObserved());
-            else
-                for (String str : feature.getObserved())
-                    alleleList.add(SequenceUtil.reverseComplement(str));
-            if ( alleleList.size() > 0 && alleleList.contains(feature.getNCBIRefBase())
-                    && !alleleList.get(0).equals(feature.getNCBIRefBase()) )
-                Collections.swap(alleleList, alleleList.indexOf(feature.getNCBIRefBase()), 0);
-
-            return alleleList;
-        }
-
-        /**
-         * Converts non-VCF formatted dbSNP records to VariantContext. 
-         * @return OldDbSNPFeature.
-         */
-        @Override
-        public Class<? extends Feature> getAdaptableFeatureType() { return OldDbSNPFeature.class; }
-
-        @Override        
-        public VariantContext convert(String name, Object input, ReferenceContext ref) {
-            OldDbSNPFeature dbsnp = (OldDbSNPFeature)input;
-
-            int index = dbsnp.getStart() - ref.getWindow().getStart() - 1;
-            if ( index < 0 )
-                return null; // we weren't given enough reference context to create the VariantContext
-
-            final byte refBaseForIndel = ref.getBases()[index];
-            final boolean refBaseIsDash = dbsnp.getNCBIRefBase().equals("-");
-
-            boolean addPaddingBase;
-            if ( isSNP(dbsnp) || isMNP(dbsnp) )
-                addPaddingBase = false;
-            else if ( isIndel(dbsnp) || dbsnp.getVariantType().contains("mixed") )
-                addPaddingBase = refBaseIsDash || GATKVariantContextUtils.requiresPaddingBase(stripNullDashes(getAlleleList(dbsnp)));
-            else
-                return null; // can't handle anything else
-
-            Allele refAllele;
-            if ( refBaseIsDash )
-                refAllele = Allele.create(refBaseForIndel, true);
-            else if ( ! Allele.acceptableAlleleBases(dbsnp.getNCBIRefBase()) )
-                return null;
-            else
-                refAllele = Allele.create((addPaddingBase ? (char)refBaseForIndel : "") + dbsnp.getNCBIRefBase(), true);
-
-            final List<Allele> alleles = new ArrayList<Allele>();
-            alleles.add(refAllele);
-
-            // add all of the alt alleles
-            for ( String alt : getAlternateAlleleList(dbsnp) ) {
-                if ( Allele.wouldBeNullAllele(alt.getBytes()))
-                    alt = "";
-                else if ( ! Allele.acceptableAlleleBases(alt) )
-                    return null;
-
-                alleles.add(Allele.create((addPaddingBase ? (char)refBaseForIndel : "") + alt, false));
-            }
-
-            final VariantContextBuilder builder = new VariantContextBuilder();
-            builder.source(name).id(dbsnp.getRsID());
-            builder.loc(dbsnp.getChr(), dbsnp.getStart() - (addPaddingBase ? 1 : 0), dbsnp.getEnd() - (addPaddingBase && refAllele.length() == 1 ? 1 : 0));
-            builder.alleles(alleles);
-            return builder.make();
-        }
-
-        private static List<String> stripNullDashes(final List<String> alleles) {
-            final List<String> newAlleles = new ArrayList<String>(alleles.size());
-            for ( final String allele : alleles ) {
-                if ( allele.equals("-") )
-                    newAlleles.add("");
-                else
-                    newAlleles.add(allele);
-            }
-            return newAlleles;
-        }
-    }
-
-    // --------------------------------------------------------------------------------------------------------------
-    //
-    // GELI to VariantContext
-    //
-    // --------------------------------------------------------------------------------------------------------------
-
-    private static class GeliTextAdaptor implements VCAdaptor {
-        /**
-         * Converts Geli text records to VariantContext. 
-         * @return GeliTextFeature.
-         */
-        @Override
-        public Class<? extends Feature> getAdaptableFeatureType() { return GeliTextFeature.class; }
-
-        /**
-         * convert to a Variant Context, given:
-         * @param name  the name of the ROD
-         * @param input the Rod object, in this case a RodGeliText
-         * @param ref   the reference context
-         * @return a VariantContext object
-         */
-        @Override
-        public VariantContext convert(String name, Object input, ReferenceContext ref) {
-            GeliTextFeature geli = (GeliTextFeature)input;
-            if ( ! Allele.acceptableAlleleBases(String.valueOf(geli.getRefBase())) )
-                return null;
-            Allele refAllele = Allele.create(String.valueOf(geli.getRefBase()), true);
-
-            // make sure we can convert it
-            if ( geli.getGenotype().isHet() || !geli.getGenotype().containsBase(geli.getRefBase())) {
-                // add the reference allele
-                List<Allele> alleles = new ArrayList<Allele>();
-                List<Allele> genotypeAlleles = new ArrayList<Allele>();
-                // add all of the alt alleles
-                for ( char alt : geli.getGenotype().toString().toCharArray() ) {
-                    if ( ! Allele.acceptableAlleleBases(String.valueOf(alt)) ) {
-                        return null;
-                    }
-                    Allele allele = Allele.create(String.valueOf(alt), false);
-                    if (!alleles.contains(allele) && !refAllele.basesMatch(allele.getBases())) alleles.add(allele);
-
-                    // add the allele, first checking if it's reference or not
-                    if (!refAllele.basesMatch(allele.getBases())) genotypeAlleles.add(allele);
-                    else genotypeAlleles.add(refAllele);
-                }
-
-                Map<String, Object> attributes = new HashMap<String, Object>();
-                Collection<Genotype> genotypes = new ArrayList<Genotype>();
-                Genotype call = GenotypeBuilder.create(name, genotypeAlleles);
-
-                // add the call to the genotype list, and then use this list to create a VariantContext
-                genotypes.add(call);
-                alleles.add(refAllele);
-                GenomeLoc loc = ref.getGenomeLocParser().createGenomeLoc(geli.getChr(),geli.getStart());
-                return new VariantContextBuilder(name, loc.getContig(), loc.getStart(), loc.getStop(), alleles).genotypes(genotypes).log10PError(-1 * geli.getLODBestToReference()).attributes(attributes).make();
-            } else
-                return null; // can't handle anything else
-        }
-    }
-
-    // --------------------------------------------------------------------------------------------------------------
-    //
-    // HapMap to VariantContext
-    //
-    // --------------------------------------------------------------------------------------------------------------
-
-    private static class HapMapAdaptor implements VCAdaptor {
-        /**
-         * Converts HapMap records to VariantContext. 
-         * @return HapMapFeature.
-         */
-        @Override
-        public Class<? extends Feature> getAdaptableFeatureType() { return RawHapMapFeature.class; }
-
-        /**
-         * convert to a Variant Context, given:
-         * @param name  the name of the ROD
-         * @param input the Rod object, in this case a RodGeliText
-         * @param ref   the reference context
-         * @return a VariantContext object
-         */
-        @Override        
-        public VariantContext convert(String name, Object input, ReferenceContext ref) {
-            if ( ref == null )
-                throw new UnsupportedOperationException("Conversion from HapMap to VariantContext requires a reference context");
-
-            RawHapMapFeature hapmap = (RawHapMapFeature)input;
-
-            int index = hapmap.getStart() - ref.getWindow().getStart();
-            if ( index < 0 )
-                return null; // we weren't given enough reference context to create the VariantContext
-
-            HashSet<Allele> alleles = new HashSet<Allele>();
-            Allele refSNPAllele = Allele.create(ref.getBase(), true);
-            int deletionLength = -1;
-
-            Map<String, Allele> alleleMap = hapmap.getActualAlleles();
-            // use the actual alleles, if available
-            if ( alleleMap != null ) {
-                alleles.addAll(alleleMap.values());
-                Allele deletionAllele = alleleMap.get(RawHapMapFeature.INSERTION);  // yes, use insertion here (since we want the reference bases)
-                if ( deletionAllele != null && deletionAllele.isReference() )
-                    deletionLength = deletionAllele.length();
-            } else {
-                // add the reference allele for SNPs
-                alleles.add(refSNPAllele);
-            }
-
-            // make a mapping from sample to genotype
-            String[] samples = hapmap.getSampleIDs();
-            String[] genotypeStrings = hapmap.getGenotypes();
-
-            GenotypesContext genotypes = GenotypesContext.create(samples.length);
-            for ( int i = 0; i < samples.length; i++ ) {
-                // ignore bad genotypes
-                if ( genotypeStrings[i].contains("N") )
-                    continue;
-
-                String a1 = genotypeStrings[i].substring(0,1);
-                String a2 = genotypeStrings[i].substring(1);
-                ArrayList<Allele> myAlleles = new ArrayList<Allele>(2);
-
-                // use the mapping to actual alleles, if available
-                if ( alleleMap != null ) {
-                    myAlleles.add(alleleMap.get(a1));
-                    myAlleles.add(alleleMap.get(a2));
-                } else {
-                    // ignore indels (which we can't handle without knowing the alleles)
-                    if ( genotypeStrings[i].contains("I") || genotypeStrings[i].contains("D") )
-                        continue;
-
-                    Allele allele1 = Allele.create(a1, refSNPAllele.basesMatch(a1));
-                    Allele allele2 = Allele.create(a2, refSNPAllele.basesMatch(a2));
-
-                    myAlleles.add(allele1);
-                    myAlleles.add(allele2);
-                    alleles.add(allele1);
-                    alleles.add(allele2);
-                }
-
-                Genotype g = GenotypeBuilder.create(samples[i], myAlleles);
-                genotypes.add(g);
-            }
-
-            long end = hapmap.getEnd();
-            if ( deletionLength > 0 )
-                end += (deletionLength - 1);
-            VariantContext vc = new VariantContextBuilder(name, hapmap.getChr(), hapmap.getStart(), end, alleles).id(hapmap.getName()).genotypes(genotypes).make();
-            return vc;
-       }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/package-info.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/package-info.java
deleted file mode 100644
index e9e9714..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/package-info.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.refdata;
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/tracks/FeatureManager.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/tracks/FeatureManager.java
deleted file mode 100644
index d466f3f..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/tracks/FeatureManager.java
+++ /dev/null
@@ -1,280 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.refdata.tracks;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import htsjdk.tribble.Feature;
-import htsjdk.tribble.FeatureCodec;
-import htsjdk.tribble.NameAwareCodec;
-import org.broadinstitute.gatk.engine.refdata.ReferenceDependentFeatureCodec;
-import org.broadinstitute.gatk.engine.refdata.utils.RMDTriplet;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.classloader.PluginManager;
-import htsjdk.variant.vcf.AbstractVCFCodec;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.help.GATKDocUtils;
-
-import java.io.File;
-import java.util.*;
-
-
-/**
- * Class for managing Tribble Feature readers available to the GATK.  The features
- * are dynamically determined via a PluginManager.  This class provides convenient
- * getter methods for obtaining FeatureDescriptor objects that collect all of the
- * useful information about the Tribble Codec, Feature, and name in one place.
- *
- * @author depristo
- */
-public class FeatureManager  {
-    public static class FeatureDescriptor implements Comparable<FeatureDescriptor> {
-        final String name;
-        final FeatureCodec codec;
-
-        public FeatureDescriptor(final String name, final FeatureCodec codec) {
-            this.name = name;
-            this.codec = codec;
-        }
-
-        public String getName() {
-            return name;
-        }
-        public String getSimpleFeatureName() { return getFeatureClass().getSimpleName(); }
-        public FeatureCodec getCodec() {
-            return codec;
-        }
-        public Class getCodecClass() { return codec.getClass(); }
-        public Class getFeatureClass() { return codec.getFeatureType(); }
-
-        @Override
-        public String toString() {
-            return String.format("FeatureDescriptor name=%s codec=%s feature=%s",
-                    getName(), getCodecClass().getName(), getFeatureClass().getName());
-        }
-
-        @Override
-        public int compareTo(FeatureDescriptor o) {
-            return getName().compareTo(o.getName());
-        }
-    }
-
-    private final PluginManager<FeatureCodec> pluginManager;
-    private final Collection<FeatureDescriptor> featureDescriptors = new TreeSet<FeatureDescriptor>();
-    private final boolean lenientVCFProcessing;
-
-    /**
-     * Construct a FeatureManager without a master VCF header
-     */
-    public FeatureManager() {
-        this(false);
-    }
-
-    public FeatureManager(final boolean lenientVCFProcessing) {
-        this.lenientVCFProcessing = lenientVCFProcessing;
-        pluginManager = new PluginManager<FeatureCodec>(FeatureCodec.class, "Codecs", "Codec");
-
-        for (final String rawName: pluginManager.getPluginsByName().keySet()) {
-            FeatureCodec codec = pluginManager.createByName(rawName);
-            String name = rawName.toUpperCase();
-            FeatureDescriptor featureDescriptor = new FeatureDescriptor(name, codec);
-            featureDescriptors.add(featureDescriptor);
-        }
-    }
-
-    /**
-     * Return the FeatureDescriptor whose getCodecClass().equals(codecClass).
-     *
-     * @param codecClass
-     * @return A FeatureDescriptor or null if none is found
-     */
-    @Requires("codecClass != null")
-    public FeatureDescriptor getByCodec(Class codecClass) {
-        for ( FeatureDescriptor descriptor : featureDescriptors )
-            if ( descriptor.getCodecClass().equals(codecClass) )
-                return descriptor;
-        return null;
-    }
-
-    /**
-     * Returns a collection of FeatureDescriptors that emit records of type featureClass
-     *
-     * @param featureClass
-     * @return A FeatureDescriptor or null if none is found
-     */
-    @Requires("featureClass != null")
-    public <T extends Feature> Collection<FeatureDescriptor> getByFeature(Class<T> featureClass) {
-        Set<FeatureDescriptor> consistentDescriptors = new TreeSet<FeatureDescriptor>();
-
-        if (featureClass == null)
-            throw new IllegalArgumentException("trackRecordType value is null, please pass in an actual class object");
-
-        for ( FeatureDescriptor descriptor : featureDescriptors ) {
-            if ( featureClass.isAssignableFrom(descriptor.getFeatureClass()))
-                consistentDescriptors.add(descriptor);
-        }
-        return consistentDescriptors;
-    }
-
-    /**
-     * Return the FeatureDescriptor with getID().equals(name)
-     *
-     * @param name
-     * @return A FeatureDescriptor or null if none is found
-     */
-    @Requires("name != null")
-    public FeatureDescriptor getByName(String name) {
-        for ( FeatureDescriptor descriptor : featureDescriptors )
-            if ( descriptor.getName().equalsIgnoreCase(name) )
-                return descriptor;
-        return null;
-    }
-
-    /**
-     * Returns the FeatureDescriptor that can read the contexts of File file, is one can be determined
-     *
-     * @param file
-     * @return A FeatureDescriptor or null if none is found
-     */
-    @Requires({"file != null", "file.isFile()", "file.canRead()"})
-    public FeatureDescriptor getByFiletype(File file) {
-        List<FeatureDescriptor> canParse = new ArrayList<FeatureDescriptor>();
-        for ( FeatureDescriptor descriptor : featureDescriptors )
-            if ( descriptor.getCodec().canDecode(file.getPath()) ) {
-                canParse.add(descriptor);
-            }
-
-        if ( canParse.size() == 0 )
-            return null;
-        else if ( canParse.size() > 1 )
-            throw new ReviewedGATKException("BUG: multiple feature descriptors can read file " + file + ": " + canParse);
-        else
-            return canParse.get(0);
-    }
-
-    /**
-     * Returns the FeatureDescriptor associated with the type described by triplet, or null if none is found
-     * @param triplet
-     * @return
-     */
-    @Requires("triplet != null")
-    public FeatureDescriptor getByTriplet(RMDTriplet triplet) {
-        return getByName(triplet.getType());
-    }
-
-    /**
-     * @return all of the FeatureDescriptors available to the GATK.  Never null
-     */
-    @Ensures("result != null")
-    public Collection<FeatureDescriptor> getFeatureDescriptors() {
-        return Collections.unmodifiableCollection(featureDescriptors);
-    }
-
-
-    /**
-     * Returns a list of the available tribble track names (vcf,dbsnp,etc) that we can load
-     * @return
-     */
-    @Ensures("result != null")
-    public String userFriendlyListOfAvailableFeatures() {
-        return userFriendlyListOfAvailableFeatures(Feature.class);
-    }
-
-    /**
-     * Returns a list of the available tribble track names (vcf,dbsnp,etc) that we can load
-     * restricted to only Codecs producting Features consistent with the requiredFeatureType
-     * @return
-     */
-    @Ensures("result != null")
-    public String userFriendlyListOfAvailableFeatures(Class<? extends Feature> requiredFeatureType) {
-        final String nameHeader="Name", featureHeader = "FeatureType", docHeader="Documentation";
-
-        int maxNameLen = nameHeader.length(), maxFeatureNameLen = featureHeader.length();
-        for ( final FeatureDescriptor descriptor : featureDescriptors ) {
-            if ( requiredFeatureType.isAssignableFrom(descriptor.getFeatureClass()) ) {
-                maxNameLen = Math.max(maxNameLen, descriptor.getName().length());
-                maxFeatureNameLen = Math.max(maxFeatureNameLen, descriptor.getSimpleFeatureName().length());
-            }
-        }
-
-        StringBuilder docs = new StringBuilder();
-        String format = "%" + maxNameLen + "s   %" + maxFeatureNameLen + "s   %s%n";
-        docs.append(String.format(format, nameHeader, featureHeader, docHeader));
-        for ( final FeatureDescriptor descriptor : featureDescriptors ) {
-            if ( requiredFeatureType.isAssignableFrom(descriptor.getFeatureClass()) ) {
-                final String DocURL = GATKDocUtils.helpLinksToGATKDocs(descriptor.getCodecClass());
-                final String oneDoc;
-                if ( DocURL.contains("_sting_") ) {
-                    oneDoc = String.format(format,
-                            descriptor.getName(),
-                            descriptor.getSimpleFeatureName(),
-                            DocURL);
-                } else {
-                    oneDoc = String.format(format,
-                            descriptor.getName(),
-                            descriptor.getSimpleFeatureName(),
-                            "(this is an external codec and is not documented within GATK)");
-                }
-
-                docs.append(oneDoc);
-            }
-        }
-
-        return docs.toString();
-    }
-
-    /**
-     * Create a new FeatureCodec of the type described in descriptor, assigning it the
-     * name (if possible) and providing it the genomeLocParser (where necessary)
-     *
-     * @param descriptor FeatureDescriptor of the Tribble FeatureCodec we want to create
-     * @param name the name to assign this codec
-     * @param genomeLocParser GenomeLocParser for ReferenceDependentFeatureCodecs
-     * @param remappedSampleName replacement sample name for single-sample vcfs, or null if we're not performing
-     *                           sample name remapping
-     * @return the feature codec itself
-     */
-    @Requires({"descriptor != null", "name != null", "genomeLocParser != null"})
-    @Ensures("result != null")
-    public FeatureCodec createCodec(final FeatureDescriptor descriptor, final String name, final GenomeLocParser genomeLocParser,
-                                    final String remappedSampleName) {
-        FeatureCodec codex = pluginManager.createByType(descriptor.getCodecClass());
-        if ( codex instanceof NameAwareCodec )
-            ((NameAwareCodec)codex).setName(name);
-        if ( codex instanceof ReferenceDependentFeatureCodec )
-            ((ReferenceDependentFeatureCodec)codex).setGenomeLocParser(genomeLocParser);
-        if ( codex instanceof AbstractVCFCodec ) {
-            if ( lenientVCFProcessing ) {
-                ((AbstractVCFCodec)codex).disableOnTheFlyModifications();
-            }
-            if ( remappedSampleName != null ) {
-                ((AbstractVCFCodec)codex).setRemappedSampleName(remappedSampleName);
-            }
-        }
-
-        return codex;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/tracks/IndexDictionaryUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/tracks/IndexDictionaryUtils.java
deleted file mode 100644
index 5c18d3a..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/tracks/IndexDictionaryUtils.java
+++ /dev/null
@@ -1,114 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.refdata.tracks;
-
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.SAMSequenceRecord;
-import org.apache.log4j.Logger;
-import htsjdk.tribble.index.Index;
-import htsjdk.tribble.index.MutableIndex;
-import org.broadinstitute.gatk.engine.arguments.ValidationExclusion;
-import org.broadinstitute.gatk.utils.SequenceDictionaryUtils;
-
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeSet;
-
-/**
- * Utilities for working with Sequence Dictionaries embedded in tribble indices
- *
- * @author Your Name
- * @since Date created
- */
-public class IndexDictionaryUtils {
-    private final static Logger logger = Logger.getLogger(IndexDictionaryUtils.class);
-
-    // a constant we use for marking sequence dictionary entries in the Tribble index property list
-    public static final String SequenceDictionaryPropertyPredicate = "DICT:";
-
-    /**
-     * get the sequence dictionary from the track, if available.  If not, make it from the contig list that is always in the index
-     * @param index the index file to use
-     * @return a SAMSequenceDictionary if available, null if unavailable
-     */
-    public static SAMSequenceDictionary getSequenceDictionaryFromProperties(Index index) {
-        SAMSequenceDictionary dict = new SAMSequenceDictionary();
-        for (Map.Entry<String,String> entry : index.getProperties().entrySet()) {
-            if (entry.getKey().startsWith(SequenceDictionaryPropertyPredicate))
-                dict.addSequence(new SAMSequenceRecord(entry.getKey().substring(SequenceDictionaryPropertyPredicate.length() , entry.getKey().length()),
-                        Integer.valueOf(entry.getValue())));
-        }
-        return dict;
-    }
-
-    /**
-     * create the sequence dictionary with the contig list; a backup approach
-     * @param index the index file to use
-     * @param dict the sequence dictionary to add contigs to
-     * @return the filled-in sequence dictionary
-     */
-    static SAMSequenceDictionary createSequenceDictionaryFromContigList(final Index index, final SAMSequenceDictionary dict) {
-        final List<String> seqNames = index.getSequenceNames();
-        if (seqNames == null) {
-            return dict;
-        }
-        for (final String name : seqNames) {
-            SAMSequenceRecord seq = new SAMSequenceRecord(name, 0);
-            dict.addSequence(seq);
-        }
-        return dict;
-    }
-
-    /**
-     *  Sets the sequence dictionary of the given index.  THE INDEX MUST BE MUTABLE (i.e. not Tabix).
-     *
-     * @param index the (mutable) index file to use
-     * @param dict  the dictionary to use
-     */
-    public static void setIndexSequenceDictionary(Index index, SAMSequenceDictionary dict) {
-        for ( SAMSequenceRecord seq : dict.getSequences() ) {
-            final String contig = IndexDictionaryUtils.SequenceDictionaryPropertyPredicate + seq.getSequenceName();
-            final String length = String.valueOf(seq.getSequenceLength());
-            ((MutableIndex)index).addProperty(contig, length);
-        }
-    }
-
-    public static void validateTrackSequenceDictionary(final String trackName,
-                                                       final SAMSequenceDictionary trackDict,
-                                                       final SAMSequenceDictionary referenceDict,
-                                                       final ValidationExclusion.TYPE validationExclusionType ) {
-        // if the sequence dictionary is empty (as well as null which means it doesn't have a dictionary), skip validation
-        if (trackDict == null || trackDict.size() == 0)
-            logger.warn("Track " + trackName + " doesn't have a sequence dictionary built in, skipping dictionary validation");
-        else {
-            Set<String> trackSequences = new TreeSet<String>();
-            for (SAMSequenceRecord dictionaryEntry : trackDict.getSequences())
-                trackSequences.add(dictionaryEntry.getSequenceName());
-            SequenceDictionaryUtils.validateDictionaries(logger, validationExclusionType, trackName, trackDict, "reference", referenceDict, false, null);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/tracks/RMDTrack.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/tracks/RMDTrack.java
deleted file mode 100644
index 51cb8f4..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/tracks/RMDTrack.java
+++ /dev/null
@@ -1,147 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.refdata.tracks;
-
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.util.CloseableIterator;
-import org.apache.log4j.Logger;
-import htsjdk.tribble.AbstractFeatureReader;
-import htsjdk.tribble.CloseableTribbleIterator;
-import htsjdk.tribble.Feature;
-import htsjdk.tribble.FeatureCodec;
-import org.broadinstitute.gatk.engine.refdata.utils.FeatureToGATKFeatureIterator;
-import org.broadinstitute.gatk.engine.refdata.utils.GATKFeature;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import java.io.File;
-import java.io.IOException;
-
-
-/**
- * @author aaron
- *         <p/>
- *         Class RMDTrack
- *         <p/>
- *         the basics of what a reference metadata track must contain.
- */
-public class RMDTrack {
-    private final static Logger logger = Logger.getLogger(RMDTrackBuilder.class);
-
-    // the basics of a track:
-    private final Class type;           // our type
-    private final String name;          // the name
-    private final File file;            // the associated file we create the reader from
-
-    // our feature reader - allows queries
-    private AbstractFeatureReader reader;
-
-    // our sequence dictionary, which can be null
-    private final SAMSequenceDictionary dictionary;
-
-    /**
-     * Parser to use when creating/parsing GenomeLocs.
-     */
-    private final GenomeLocParser genomeLocParser;
-
-    // our codec type
-    private final FeatureCodec codec;
-
-    public Class getType() {
-        return type;
-    }
-
-    public String getName() {
-        return name;
-    }
-
-    public File getFile() {
-        return file;
-    }
-
-    /**
-     * Create a track
-     *
-     * @param type the type of track, used for track lookup
-     * @param name the name of this specific track
-     * @param file the associated file, for reference or recreating the reader
-     * @param reader the feature reader to use as the underlying data source
-     * @param dict the sam sequence dictionary
-     * @param codec the feature codec we use to decode this type
-     */
-    public RMDTrack(Class type, String name, File file, AbstractFeatureReader reader, SAMSequenceDictionary dict, GenomeLocParser genomeLocParser, FeatureCodec codec) {
-        this.type = type;
-        this.name = name;
-        this.file = file;
-        this.reader = reader;
-        this.dictionary = dict;
-        this.genomeLocParser = genomeLocParser;
-        this.codec = codec;
-    }
-
-    /**
-     * @return how to get an iterator of the underlying data.  This is all a track has to support,
-     *         but other more advanced tracks support the query interface
-     */
-    public CloseableIterator<GATKFeature> getIterator() {
-        try {
-            return new FeatureToGATKFeatureIterator(genomeLocParser,reader.iterator(),this.getName());
-        } catch (IOException e) {
-            throw new UserException.CouldNotReadInputFile(getFile(), "Unable to read from file", e);
-        }
-    }
-
-    public CloseableIterator<GATKFeature> query(GenomeLoc interval) throws IOException {
-        CloseableTribbleIterator<Feature> iter = reader.query(interval.getContig(),interval.getStart(),interval.getStop());
-        return new FeatureToGATKFeatureIterator(genomeLocParser, iter, this.getName());
-    }
-
-    public void close() {
-        try {
-            reader.close();
-        } catch (IOException e) {
-            throw new UserException.MalformedFile("Unable to close reader " + reader.toString(),e);
-        }
-        reader = null;
-    }
-
-    /**
-     * get the sequence dictionary from the track, if available
-     * @return a SAMSequenceDictionary if available, null if unavailable
-     */
-    public SAMSequenceDictionary getSequenceDictionary() {
-        return dictionary;
-    }
-
-    public Object getHeader() {
-        return reader.getHeader();
-    }
-
-    public FeatureCodec getCodec() {
-        return codec;
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/tracks/RMDTrackBuilder.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/tracks/RMDTrackBuilder.java
deleted file mode 100644
index dc9e967..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/tracks/RMDTrackBuilder.java
+++ /dev/null
@@ -1,430 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.refdata.tracks;
-
-import htsjdk.samtools.SAMSequenceDictionary;
-import org.apache.log4j.Logger;
-import htsjdk.tribble.AbstractFeatureReader;
-import htsjdk.tribble.FeatureCodec;
-import htsjdk.tribble.Tribble;
-import htsjdk.tribble.TribbleException;
-import htsjdk.tribble.index.Index;
-import htsjdk.tribble.index.IndexFactory;
-import htsjdk.tribble.util.LittleEndianOutputStream;
-import org.broadinstitute.gatk.utils.commandline.Tags;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.arguments.ValidationExclusion;
-import org.broadinstitute.gatk.engine.io.stubs.VCFWriterArgumentTypeDescriptor;
-import org.broadinstitute.gatk.engine.refdata.utils.RMDTriplet;
-import org.broadinstitute.gatk.engine.refdata.utils.RMDTriplet.RMDStorageType;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.collections.Pair;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.file.FSLockWithShared;
-import org.broadinstitute.gatk.utils.instrumentation.Sizeof;
-
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.util.Map;
-
-
-/**
- *
- * @author aaron
- *                                           `
- * Class RMDTrackBuilder
- *
- * This class keeps track of the available codecs, and knows how to put together a track of
- * that gets iterators from the FeatureReader using Tribble.
- *
- */
-public class RMDTrackBuilder { // extends PluginManager<FeatureCodec> {
-    /**
-     * our log, which we use to capture anything from this class
-     */
-    private final static Logger logger = Logger.getLogger(RMDTrackBuilder.class);
-
-    // private sequence dictionary we use to set our tracks with
-    private final SAMSequenceDictionary dict;
-
-    /**
-     * Private genome loc parser to use when building out new locs.
-     */
-    private final GenomeLocParser genomeLocParser;
-
-    /**
-     * Validation exclusions, for validating the sequence dictionary.
-     */
-    private ValidationExclusion.TYPE validationExclusionType;
-
-    private final FeatureManager featureManager;
-
-    // If true, do not attempt to create index files if they don't exist or are outdated, and don't
-    // make any file lock acquisition calls on the index files.
-    private final boolean disableAutoIndexCreation;
-
-    // Map of file name -> new sample name used when performing on-the-fly sample renaming
-    private final Map<String, String> sampleRenameMap;
-
-    /**
-     * Construct an RMDTrackerBuilder, allowing the user to define tracks to build after-the-fact.  This is generally
-     * used when walkers want to directly manage the ROD system for whatever reason.  Before using this constructor,
-     * please talk through your approach with the SE team.
-     * @param dict Sequence dictionary to use.
-     * @param genomeLocParser Location parser to use.
-     * @param validationExclusionType Types of validations to exclude, for sequence dictionary verification.
-     * @param disableAutoIndexCreation Do not auto-create index files, and do not use file locking when accessing index files.
-     *                                 UNSAFE in general (because it causes us not to lock index files before reading them) --
-     *                                 suitable only for test suite use.
-     * @param sampleRenameMap Map of file name -> new sample name used when performing on-the-fly sample renaming
-     */
-    public RMDTrackBuilder(final SAMSequenceDictionary dict,
-                           final GenomeLocParser genomeLocParser,
-                           final ValidationExclusion.TYPE validationExclusionType,
-                           final boolean disableAutoIndexCreation,
-                           final Map<String, String> sampleRenameMap) {
-        this.dict = dict;
-        this.validationExclusionType = validationExclusionType;
-        this.genomeLocParser = genomeLocParser;
-        this.featureManager = new FeatureManager(GenomeAnalysisEngine.lenientVCFProcessing(validationExclusionType));
-        this.disableAutoIndexCreation = disableAutoIndexCreation;
-        this.sampleRenameMap = sampleRenameMap;
-    }
-
-    /**
-     * Return the feature manager this RMDTrackBuilder is using the create tribble tracks
-     *
-     * @return
-     */
-    public FeatureManager getFeatureManager() {
-        return featureManager;
-    }
-
-    /**
-     * create a RMDTrack of the specified type
-     *
-     * @param fileDescriptor a description of the type of track to build.
-     *
-     * @return an instance of the track
-     */
-    public RMDTrack createInstanceOfTrack(RMDTriplet fileDescriptor) {
-        String name = fileDescriptor.getName();
-        File inputFile = new File(fileDescriptor.getFile());
-
-        FeatureManager.FeatureDescriptor descriptor = getFeatureManager().getByTriplet(fileDescriptor);
-        if (descriptor == null)
-            throw new UserException.BadArgumentValue("-B",fileDescriptor.getType());
-
-        // return a feature reader track
-        Pair<AbstractFeatureReader, SAMSequenceDictionary> pair;
-        if (VCFWriterArgumentTypeDescriptor.isCompressed(inputFile.toString()))
-            pair = createTabixIndexedFeatureSource(descriptor, name, inputFile);
-        else
-            pair = getFeatureSource(descriptor, name, inputFile, fileDescriptor.getStorageType());
-        if (pair == null) throw new UserException.CouldNotReadInputFile(inputFile, "Unable to make the feature reader for input file");
-        return new RMDTrack(descriptor.getCodecClass(), name, inputFile, pair.first, pair.second, genomeLocParser, createCodec(descriptor, name, inputFile));
-    }
-
-    /**
-     * Convenience method simplifying track creation.  Assume unnamed track based on a file rather than a stream.
-     * @param codecClass Type of Tribble codec class to build.
-     * @param inputFile Input file type to use.
-     * @return An RMDTrack, suitable for accessing reference metadata.
-     */
-    public RMDTrack createInstanceOfTrack(Class codecClass, File inputFile) {
-        final FeatureManager.FeatureDescriptor descriptor = getFeatureManager().getByCodec(codecClass);
-
-        if (descriptor == null)
-            throw new ReviewedGATKException("Unable to find type name for codec class " + codecClass.getName());
-
-        return createInstanceOfTrack(new RMDTriplet("anonymous",descriptor.getName(),inputFile.getAbsolutePath(),RMDStorageType.FILE,new Tags()));
-    }
-
-    /**
-     * create a feature reader, without assuming there exists an index.  This code assumes the feature
-     * reader of the appropriate type will figure out what the right index type is, and determine if it
-     * exists.
-     *
-     * @param descriptor the FeatureDescriptor describing the FeatureCodec we want to create
-     * @param name the name of the track
-     * @param inputFile the file to load
-     * @return a feature reader implementation
-     */
-    private Pair<AbstractFeatureReader, SAMSequenceDictionary> createTabixIndexedFeatureSource(FeatureManager.FeatureDescriptor descriptor, String name, File inputFile) {
-        // we might not know the index type, try loading with the default reader constructor
-        logger.debug("Attempting to load " + inputFile + " as a tabix indexed file without validating it");
-        try {
-            // getFeatureReader will detect that it's Tabix
-            return new Pair<>(AbstractFeatureReader.getFeatureReader(inputFile.getAbsolutePath(), createCodec(descriptor, name, inputFile)), null);
-        } catch (TribbleException e) {
-            throw new UserException(e.getMessage(), e);
-        }
-    }
-
-    /**
-     * add a name to the codec, if it takes one
-     * @param descriptor the class to create a codec for
-     * @param name the name to assign this codec
-     * @param inputFile input file that we will be decoding
-     * @return the feature codec itself
-     */
-    private FeatureCodec createCodec(final FeatureManager.FeatureDescriptor descriptor, final String name, final File inputFile) {
-        // The remappedSampleName will be null if either no on-the-fly sample renaming was requested,
-        // or the user's sample rename map file didn't contain an entry for this file:
-        final String remappedSampleName = sampleRenameMap != null ? sampleRenameMap.get(inputFile.getAbsolutePath()) : null;
-
-        return featureManager.createCodec(descriptor, name, genomeLocParser, remappedSampleName);
-    }
-
-    /**
-     * create a feature source object given:
-     * @param descriptor the FeatureDescriptor describing the FeatureCodec we want to create
-     * @param name the name of the codec
-     * @param inputFile the tribble file to parse
-     * @param storageType How the RMD is streamed into the input file.
-     * @return the input file as a FeatureReader
-     */
-    private Pair<AbstractFeatureReader, SAMSequenceDictionary> getFeatureSource(FeatureManager.FeatureDescriptor descriptor,
-                                                                        String name,
-                                                                        File inputFile,
-                                                                        RMDStorageType storageType) {
-        // Feature source and sequence dictionary to use as the ultimate reference
-        AbstractFeatureReader featureSource = null;
-        SAMSequenceDictionary sequenceDictionary = null;
-
-        // Detect whether or not this source should be indexed.
-        boolean canBeIndexed = (storageType == RMDStorageType.FILE);
-
-        if(canBeIndexed) {
-            try {
-                Index index = loadIndex(inputFile, createCodec(descriptor, name, inputFile));
-                try { logger.info(String.format("  Index for %s has size in bytes %d", inputFile, Sizeof.getObjectGraphSize(index))); }
-                catch (ReviewedGATKException e) { }
-
-                sequenceDictionary = IndexDictionaryUtils.getSequenceDictionaryFromProperties(index);
-
-                // if we don't have a dictionary in the Tribble file, and we've set a dictionary for this builder, set it in the file if they match
-                if (sequenceDictionary.size() == 0 && dict != null) {
-                    validateAndUpdateIndexSequenceDictionary(inputFile, index, dict);
-
-                    if ( ! disableAutoIndexCreation ) {
-                        File indexFile = Tribble.indexFile(inputFile);
-                        try { // re-write the index
-                            writeIndexToDisk(index,indexFile,new FSLockWithShared(indexFile));
-                        } catch (IOException e) {
-                            logger.warn("Unable to update index with the sequence dictionary for file " + indexFile + "; this will not affect your run of the GATK");
-                        }
-                    }
-
-                    sequenceDictionary = IndexDictionaryUtils.getSequenceDictionaryFromProperties(index);
-                }
-
-                featureSource = AbstractFeatureReader.getFeatureReader(inputFile.getAbsolutePath(), createCodec(descriptor, name, inputFile), index);
-            }
-            catch (TribbleException e) {
-                throw new UserException(e.getMessage());
-            }
-            catch (IOException e) {
-                throw new UserException("I/O error loading or writing tribble index file for " + inputFile.getAbsolutePath(), e);
-            }
-        }
-        else {
-            featureSource = AbstractFeatureReader.getFeatureReader(inputFile.getAbsolutePath(), createCodec(descriptor, name, inputFile), false);
-        }
-
-        return new Pair<AbstractFeatureReader,SAMSequenceDictionary>(featureSource,sequenceDictionary);
-    }
-
-    /**
-     * create an index for the input file
-     * @param inputFile the input file
-     * @param codec the codec to use
-     * @return a linear index for the specified type
-     * @throws IOException if we cannot write the index file
-     */
-    public synchronized Index loadIndex( final File inputFile, final FeatureCodec codec) throws IOException {
-        final File indexFile = Tribble.indexFile(inputFile);
-        final FSLockWithShared lock = new FSLockWithShared(indexFile);
-        Index idx = null;
-
-        // If the index file exists and is readable, attempt to load it from disk. We'll get null back
-        // if a problem was discovered with the index file when it was inspected, and we'll get an
-        // in-memory index back in the case where the index file could not be locked.
-        if (indexFile.canRead()) {
-            idx = disableAutoIndexCreation ? loadFromDisk(inputFile, indexFile)  // load without locking if we're in disableAutoIndexCreation mode
-                                           : attemptToLockAndLoadIndexFromDisk(inputFile, codec, indexFile, lock);
-        }
-
-        // If we have an index, it means we either loaded it from disk without issue or we created an in-memory
-        // index due to not being able to acquire a lock.
-        if (idx != null) return idx;
-
-        // We couldn't read the file, or we discovered a problem with the index file, so continue on to making a new index
-        idx = createIndexInMemory(inputFile, codec);
-        if ( ! disableAutoIndexCreation ) {
-            writeIndexToDisk(idx, indexFile, lock);
-        }
-        return idx;
-    }
-
-    /**
-     * Attempt to acquire a shared lock and then load the index from disk. Returns an in-memory index if
-     * a lock could not be obtained. Returns null if a problem was discovered with the index file when it
-     * was examined (eg., it was out-of-date).
-     *
-     * @param inputFile the input file
-     * @param codec the codec to read from
-     * @param indexFile the index file itself
-     * @param lock the lock file
-     * @return an index, or null if we couldn't load one
-     * @throws IOException if we fail for FS issues
-     */
-    protected Index attemptToLockAndLoadIndexFromDisk( final File inputFile, final FeatureCodec codec, final File indexFile, final FSLockWithShared lock ) throws IOException {
-        boolean locked = false;
-        Index idx = null;
-
-        try {
-            locked = lock.sharedLock();
-
-            if ( ! locked ) { // can't lock file
-                logger.info(String.format("Could not acquire a shared lock on index file %s, falling back to using an in-memory index for this GATK run.",
-                                          indexFile.getAbsolutePath()));
-                idx = createIndexInMemory(inputFile, codec);
-            }
-            else {
-                idx = loadFromDisk(inputFile, indexFile);
-            }
-        } finally {
-            if (locked) lock.unlock();
-        }
-        return idx;
-    }
-
-    /**
-     * load the index from disk, checking for out of date indexes and old versions (both of which are deleted)
-     * @param inputFile the input file
-     * @param indexFile the input file, plus the index extension
-     * @return an Index, or null if we're unable to load
-     */
-    protected Index loadFromDisk( final File inputFile, final File indexFile ) {
-        logger.debug("Loading Tribble index from disk for file " + inputFile);
-        Index index = IndexFactory.loadIndex(indexFile.getAbsolutePath());
-
-        // check if the file is up-to date (filestamp and version check)
-        if (index.isCurrentVersion() && indexFile.lastModified() >= inputFile.lastModified())
-            return index;
-        else if (indexFile.lastModified() < inputFile.lastModified())
-            logger.warn("Index file " + indexFile + " is out of date (index older than input file), " +
-                        (disableAutoIndexCreation ? "falling back to an in-memory index" : "deleting and updating the index file"));
-        else // we've loaded an old version of the index, we want to remove it <-- currently not used, but may re-enable
-            logger.warn("Index file " + indexFile + " is out of date (old version), " +
-                        (disableAutoIndexCreation ? "falling back to an in-memory index" : "deleting and updating the index file"));
-
-        if ( ! disableAutoIndexCreation ) {
-            boolean deleted = indexFile.delete();
-            if (!deleted) logger.warn("Index file " + indexFile + " is out of date, but could not be removed; it will not be trusted (we'll try to rebuild an in-memory copy)");
-        }
-
-        return null;
-    }
-
-
-    /**
-     * attempt to write the index to disk
-     * @param index the index to write to disk
-     * @param indexFile the index file location
-     * @param lock the locking object
-     * @throws IOException when unable to create the new index
-     */
-    private void writeIndexToDisk( final Index index, final File indexFile, final FSLockWithShared lock ) throws IOException {
-        if ( disableAutoIndexCreation ) {
-            return;
-        }
-
-        boolean locked = false;
-
-        try {
-            locked = lock.exclusiveLock();
-
-            if (locked) {
-                logger.info("Writing Tribble index to disk for file " + indexFile);
-                LittleEndianOutputStream stream = new LittleEndianOutputStream(new FileOutputStream(indexFile));
-                index.write(stream);
-                stream.close();
-            }
-            else // we can't write it to disk, just store it in memory, tell them this
-                logger.warn("Unable to write to " + indexFile + " for the index file, creating index in memory only");
-
-            try { logger.info(String.format("  Index for %s has size in bytes %d", indexFile, Sizeof.getObjectGraphSize(index))); }
-            catch ( ReviewedGATKException e) { }
-        }
-        finally {
-            if (locked) lock.unlock();
-        }
-
-    }
-
-    /**
-     * create the index in memory, given the input file and feature codec
-     * @param inputFile the input file
-     * @param codec the codec
-     * @return a LinearIndex, given the file location
-     * @throws IOException when unable to create the index in memory
-     */
-    protected Index createIndexInMemory(File inputFile, FeatureCodec codec) {
-        // this can take a while, let them know what we're doing
-        logger.debug("Creating Tribble index in memory for file " + inputFile);
-        Index idx = IndexFactory.createDynamicIndex(inputFile, codec, IndexFactory.IndexBalanceApproach.FOR_SEEK_TIME);
-        validateAndUpdateIndexSequenceDictionary(inputFile, idx, dict);
-        return idx;
-    }
-
-    /**
-     * set the sequence dictionary of the track.  This function checks that the contig listing of the underlying file is compatible.
-     * (that each contig in the index is in the sequence dictionary).
-     * @param inputFile for proper error message formatting.
-     * @param dict the sequence dictionary
-     * @param index the index file
-     */
-    public void validateAndUpdateIndexSequenceDictionary(final File inputFile, final Index index, final SAMSequenceDictionary dict) {
-        if (dict == null) throw new ReviewedGATKException("BUG: dict cannot be null");
-
-        // check that every contig in the RMD contig list is at least in the sequence dictionary we're being asked to set
-        final SAMSequenceDictionary currentDict = IndexDictionaryUtils.createSequenceDictionaryFromContigList(index, new SAMSequenceDictionary());
-        validateTrackSequenceDictionary(inputFile.getAbsolutePath(), currentDict, dict);
-
-        // actually update the dictionary in the index
-        IndexDictionaryUtils.setIndexSequenceDictionary(index, dict);
-    }
-
-    public void validateTrackSequenceDictionary(final String trackName,
-                                                final SAMSequenceDictionary trackDict,
-                                                final SAMSequenceDictionary referenceDict ) {
-        IndexDictionaryUtils.validateTrackSequenceDictionary(trackName, trackDict, referenceDict, validationExclusionType);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/utils/FeatureToGATKFeatureIterator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/utils/FeatureToGATKFeatureIterator.java
deleted file mode 100644
index 6fb073e..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/utils/FeatureToGATKFeatureIterator.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.refdata.utils;
-
-import htsjdk.samtools.util.CloseableIterator;
-import htsjdk.tribble.CloseableTribbleIterator;
-import htsjdk.tribble.Feature;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-
-
-/**
- * 
- * @author aaron 
- * 
- * Class FeatureToGATKFeatureIterator
- *
- * a wrapper on Tribble feature iterators so that they produce GATKFeatures (which produce GenomeLocs)
- */
-public class FeatureToGATKFeatureIterator implements CloseableIterator<GATKFeature> {
-    private final GenomeLocParser genomeLocParser;
-    private final CloseableTribbleIterator<Feature> iterator;
-    private final String name;
-
-    public FeatureToGATKFeatureIterator(GenomeLocParser genomeLocParser,CloseableTribbleIterator<Feature> iter, String name) {
-        this.genomeLocParser = genomeLocParser;
-        this.name = name;
-        this.iterator = iter;
-    }
-
-    @Override
-    public boolean hasNext() {
-        return iterator.hasNext();
-    }
-
-    @Override
-    public GATKFeature next() {
-        return new GATKFeature.TribbleGATKFeature(genomeLocParser,iterator.next(),name);
-    }
-
-    @Override
-    public void remove() {
-        throw new UnsupportedOperationException("Why does Iterator have this method? We always throw an exception here");
-    }
-
-    @Override
-    public void close() {
-        // The private adapted iterator may not be passed on by the method constructing this object,
-        // leaving only this adapter to close the wrapped iterator.
-        iterator.close();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/utils/FlashBackIterator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/utils/FlashBackIterator.java
deleted file mode 100644
index 8fc549c..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/utils/FlashBackIterator.java
+++ /dev/null
@@ -1,221 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.refdata.utils;
-
-import htsjdk.samtools.SAMSequenceDictionary;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.HasGenomeLocation;
-
-import java.util.Comparator;
-import java.util.LinkedList;
-
-
-/**
- * 
- * @author aaron 
- * 
- * Class FlashBackIterator
- *
- * better than acid washed jeans...more like a Delorean that flies through time
- *
- * This iterator buffers a certain amount of ROD data to 'flash back' to.  This
- * is needed for using ROD's in read traversals, because between shards we sometimes
- * (actually often) need to go back to before the current iterators location and
- * get RODs that overlap the current read.
- */
-public class FlashBackIterator implements LocationAwareSeekableRODIterator {
-    private LocationAwareSeekableRODIterator iterator;
-    private LinkedList<ComparableList> pastQueue = new LinkedList<ComparableList>();
-    private LinkedList<ComparableList> aheadQueue = new LinkedList<ComparableList>();
-    private int MAX_QUEUE = 200;
-
-    /**
-     * create a flashback iterator
-     * @param iterator given a LocationAwareSeekableRODIterator
-     */
-    public FlashBackIterator(LocationAwareSeekableRODIterator iterator) {
-        this.iterator = iterator;
-    }
-
-    /**
-     * Gets the header associated with the backing input stream.
-     * @return the ROD header.
-     */
-    @Override
-    public Object getHeader() {
-        return iterator.getHeader();
-    }
-
-    /**
-     * Gets the sequence dictionary associated with the backing input stream.
-     * @return sequence dictionary from the ROD header.
-     */
-    @Override
-    public SAMSequenceDictionary getSequenceDictionary() {
-        return iterator.getSequenceDictionary();
-    }
-
-
-    /**
-     * peek at the next location
-     * @return
-     */
-    @Override
-    public GenomeLoc peekNextLocation() {
-        return (aheadQueue.size() > 0) ? aheadQueue.getFirst().getLocation() : iterator.peekNextLocation();
-    }
-
-    /**
-     * get the position of this iterator
-     * @return
-     */
-    @Override
-    public GenomeLoc position() {
-        return (aheadQueue.size() > 0) ? aheadQueue.getFirst().getLocation() : iterator.position();
-    }
-
-    /**
-     * seek forward on the iterator
-     * @param interval the interval to seek to
-     * @return a RODRecordList at that location, null otherwise
-     */
-    @Override
-    public RODRecordList seekForward(GenomeLoc interval) {
-
-        RODRecordList lt = iterator.seekForward(interval);
-        createPastRecord(lt);
-        return lt;
-    }
-
-    /**
-     * do we have a next record
-     * @return true if we have another record
-     */
-    @Override
-    public boolean hasNext() {
-        return (aheadQueue.size() > 0 ||  iterator.hasNext());
-    }
-
-    /**
-     * get the next record
-     * @return a RODRecordList
-     */
-    @Override
-    public RODRecordList next() {
-        return getNext();
-    }
-
-    /**
-     * we don't support remove
-     */
-    @Override
-    public void remove() {
-        throw new UnsupportedOperationException("We don't support remove");
-    }
-
-    /**
-     * get the next record, either from the queue or from the iterator
-     * @return a RODRecordList
-     */
-    private RODRecordList getNext() {
-        if (aheadQueue.size() > 0) {
-            RODRecordList ret = aheadQueue.getFirst().getList();
-            aheadQueue.removeFirst();
-            return ret;
-        } else {
-            RODRecordList ret = iterator.next();
-            createPastRecord(ret);
-            return ret;
-        }
-    }
-
-    private void createPastRecord(RODRecordList ret) {
-        ComparableList rec = new ComparableList(ret);
-        if (rec.getLocation() != null) pastQueue.addLast(new ComparableList(ret));
-        if (pastQueue.size() > this.MAX_QUEUE) pastQueue.removeFirst();
-    }
-
-    /**
-     * can we flash back to the specified location?
-     *
-     * @param location the location to try and flash back to
-     *
-     * @return true if we can, false otherwise
-     */
-    public boolean canFlashBackTo(GenomeLoc location) {
-        GenomeLoc farthestBack = (pastQueue.size() > 0) ? pastQueue.getFirst().getLocation() : iterator.peekNextLocation();
-        return (!farthestBack.isPast(location));
-    }
-
-    /**
-     * flashback! Throws an unsupported operation exception
-     *
-     * @param location where to flash back to
-     */
-    public void flashBackTo(GenomeLoc location) {
-        if (!canFlashBackTo(location)) throw new UnsupportedOperationException("we can't flash back to " + location);
-        if (pastQueue.size()==0) return; // the iterator can do it alone
-        while (pastQueue.size() > 0 && !pastQueue.getLast().getLocation().isBefore(location)) {
-            aheadQueue.addFirst(pastQueue.getLast());
-            pastQueue.removeLast();
-        }
-    }
-
-    public void close() {
-        this.aheadQueue.clear();
-        this.pastQueue.clear();
-    }
-}
-
-/**
- * a list that buffers the location for this rod
- */
-class ComparableList implements Comparator<ComparableList>, HasGenomeLocation {
-    private RODRecordList list;
-    private GenomeLoc location = null;
-    public ComparableList(RODRecordList list) {
-        this.list = list;
-        if (list != null && list.size() != 0)
-            location = list.getLocation();
-    }
-
-    @Override
-    public int compare(ComparableList list1, ComparableList list2) {
-        if (list1.location == null && list2.location == null)
-            return 0;
-        if (list1.location == null) return 1;
-        if (list2.location == null) return -1;
-        return (list1.location.compareTo(list2.location));
-    }
-
-    public GenomeLoc getLocation() {
-        return location;
-    }
-
-    public RODRecordList getList() {
-        return list;
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/utils/GATKFeature.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/utils/GATKFeature.java
deleted file mode 100644
index 4d08f1b..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/utils/GATKFeature.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.refdata.utils;
-
-import htsjdk.tribble.Feature;
-import org.broadinstitute.gatk.engine.refdata.ReferenceOrderedDatum;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.HasGenomeLocation;
-
-
-/**
- * 
- * @author aaron 
- * 
- * Class GATKFeature
- *
- * This wraps a Tribble feature or a RODatum so that both present the same interface: a genome loc for position and a
- * way of retrieving the track name.
- */
-public abstract class GATKFeature implements Feature, HasGenomeLocation {
-
-    public GATKFeature(String name) {
-        this.name = name;
-    }
-
-    String name;
-
-    protected void setName(String name) {
-        this.name = name;
-    }
-
-    public String getName() {
-        return name;
-    }
-
-    public abstract GenomeLoc getLocation();
-
-    // TODO: this should be a Feature
-    public abstract Object getUnderlyingObject();
-
-    /**
-     * wrapping a Tribble feature in a GATK friendly interface
-     */
-    public static class TribbleGATKFeature extends GATKFeature {
-        private final GenomeLocParser genomeLocParser;
-        private final Feature feature;
-        private GenomeLoc position = null;
-        
-        public TribbleGATKFeature(GenomeLocParser genomeLocParser,Feature f, String name) {
-            super(name);
-            this.genomeLocParser = genomeLocParser;
-            feature = f;
-        }
-        public GenomeLoc getLocation() {
-            if (position == null) position = genomeLocParser.createGenomeLoc(feature.getChr(), feature.getStart(), feature.getEnd());
-            return position;
-        }
-
-        /** Return the features reference sequence name, e.g chromosome or contig */
-        @Override
-        public String getChr() {
-            return feature.getChr();
-        }
-
-        /** Return the start position in 1-based coordinates (first base is 1) */
-        @Override
-        public int getStart() {
-            return feature.getStart();
-        }
-
-        /**
-         * Return the end position following 1-based fully closed conventions.  The length of a feature is
-         * end - start + 1;
-         */
-        @Override
-        public int getEnd() {
-            return feature.getEnd();
-        }
-
-        // TODO: this should be a Feature, actually
-        public Object getUnderlyingObject() {
-            return feature;
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/utils/LocationAwareSeekableRODIterator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/utils/LocationAwareSeekableRODIterator.java
deleted file mode 100644
index 96c60b9..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/utils/LocationAwareSeekableRODIterator.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.refdata.utils;
-
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.util.CloseableIterator;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-
-/**
- * @author aaron
- *         <p/>
- *         Interface LocationAwareSeekableRODIterator
- *         <p/>
- *         combine iteration with a position aware interface
- */
-public interface LocationAwareSeekableRODIterator extends CloseableIterator<RODRecordList> {
-    public Object getHeader();
-
-    public SAMSequenceDictionary getSequenceDictionary();
-
-    public GenomeLoc peekNextLocation();
-
-    public GenomeLoc position();
-
-    public RODRecordList seekForward(GenomeLoc interval);    
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/utils/RMDTriplet.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/utils/RMDTriplet.java
deleted file mode 100644
index 9fa3d1e..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/utils/RMDTriplet.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.refdata.utils;
-
-
-import org.broadinstitute.gatk.utils.commandline.Tags;
-
-/**
- * a helper class to manage our triplets of data for the -B command line option (name, type, file)
- * TODO: The presence of four datapoints here suggests that this class' name isn't sufficient to describe its function.  Rename.
- */
-public class RMDTriplet {
-    public enum RMDStorageType { FILE, STREAM };
-
-    private final String name;
-    private final String type;
-    private final String file;
-    private final RMDStorageType storageType;
-    private final Tags tags;
-
-    public RMDTriplet(final String name, final String type, final String file, final RMDStorageType storageType, final Tags tags) {
-        this.name = name;
-        this.type = type;
-        this.file = file;
-        this.storageType = storageType;
-        this.tags = tags;
-    }
-
-    /**
-     * Gets the name of this track.  RefMetaDataTrackers can use this identifier to retrieve data of a certain type.
-     * @return Name associated with this track.
-     */
-    public String getName() {
-        return name;
-    }
-
-    /**
-     * Gets the type of this track.  Informs the GATK how to parse this file type.
-     * @return Type associated with this track.
-     */
-    public String getType() {
-        return type;
-    }
-
-    /**
-     * Gets the filename representing this track.  Data is loaded from this file.
-     * @return Filename of the RMD.
-     */
-    public String getFile() {
-        return file;
-    }
-
-    /**
-     * The type of storage being used for this metadata track.  Right now, can be either a
-     * file type (can be indexed) or a stream type (can't be indexed).
-     * @return Storage type for this RMD 'triplet'.
-     */
-    public RMDStorageType getStorageType() {
-        return storageType;
-    }
-
-    /**
-     * Gets the key=value tags associated with this track
-     * @return Tags associated with this track.
-     */
-    public Tags getTags() {
-        return tags;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/utils/RODRecordList.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/utils/RODRecordList.java
deleted file mode 100644
index b859edc..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/refdata/utils/RODRecordList.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.refdata.utils;
-
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.HasGenomeLocation;
-
-import java.util.List;
-
-
-/**
- * @author aaron
- *         <p/>
- *         Class RODRecordList
- *         <p/>
- *         make the RODRecord list an interface, so we can stub in other implementations
- *         during testing.
- */
-public interface RODRecordList extends List<GATKFeature>, Comparable<RODRecordList>, HasGenomeLocation {
-    public GenomeLoc getLocation();
-    public String getName();
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/report/GATKReport.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/report/GATKReport.java
deleted file mode 100644
index 660ea95..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/report/GATKReport.java
+++ /dev/null
@@ -1,376 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.report;
-
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import java.io.*;
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-import java.util.TreeMap;
-
-/**
- * Container class for GATK report tables
- */
-public class GATKReport {
-    public static final String GATKREPORT_HEADER_PREFIX = "#:GATKReport.";
-    public static final GATKReportVersion LATEST_REPORT_VERSION = GATKReportVersion.V1_1;
-    private static final String SEPARATOR = ":";
-    private GATKReportVersion version = LATEST_REPORT_VERSION;
-
-    private final TreeMap<String, GATKReportTable> tables = new TreeMap<String, GATKReportTable>();
-
-    /**
-     * Create a new, empty GATKReport.
-     */
-    public GATKReport() {
-    }
-
-    /**
-     * Create a new GATKReport with the contents of a GATKReport on disk.
-     *
-     * @param filename the path to the file to load
-     */
-    public GATKReport(String filename) {
-        this(new File(filename));
-    }
-
-    /**
-     * Create a new GATKReport with the contents of a GATKReport on disk.
-     *
-     * @param file the file to load
-     */
-    public GATKReport(File file) {
-        loadReport(file);
-    }
-
-    /**
-     * Create a new GATK report from GATK report tables
-     * @param tables Any number of tables that you want to add to the report
-     */
-    public GATKReport(GATKReportTable... tables) {
-        for( GATKReportTable table: tables)
-            addTable(table);
-    }
-
-    /**
-     * Load a GATKReport file from disk
-     *
-     * @param file the file to load
-     */
-    private void loadReport(File file) {
-        BufferedReader reader;
-        String reportHeader;
-        try {
-            reader = new BufferedReader(new FileReader(file));
-            reportHeader = reader.readLine();
-        } catch (FileNotFoundException e) {
-            throw new UserException.CouldNotReadInputFile(file, "it does not exist");
-        } catch (IOException e) { 
-            throw new UserException.CouldNotReadInputFile(file, e);
-        }   
-
-
-        // Read the first line for the version and number of tables.
-        version = GATKReportVersion.fromHeader(reportHeader);
-        if (version.equals(GATKReportVersion.V0_1) ||
-                version.equals(GATKReportVersion.V0_2))
-            throw new UserException("The GATK no longer supports reading legacy GATK Reports. Please use v1.0 or newer.");
-
-        int nTables = Integer.parseInt(reportHeader.split(":")[2]);
-
-        // Read each table according ot the number of tables
-        for (int i = 0; i < nTables; i++) {
-            addTable(new GATKReportTable(reader, version));
-        }
-    }
-
-    /**
-     * Add a new, empty table to the report
-     *
-     * @param tableName        the name of the table
-     * @param tableDescription the description of the table
-     * @param numColumns       the number of columns in this table
-     */
-    public void addTable(final String tableName, final String tableDescription, final int numColumns) {
-        addTable(tableName, tableDescription, numColumns, GATKReportTable.TableSortingWay.DO_NOT_SORT);
-    }
-
-    /**
-     * Add a new, empty table to the report
-     *
-     * @param tableName        the name of the table
-     * @param tableDescription the description of the table
-     * @param numColumns       the number of columns in this table
-     * @param sortingWay       way to sort table
-     */
-    public void addTable(final String tableName, final String tableDescription, final int numColumns, final GATKReportTable.TableSortingWay sortingWay) {
-        GATKReportTable table = new GATKReportTable(tableName, tableDescription, numColumns, sortingWay);
-        tables.put(tableName, table);
-    }
-
-    /**
-     * Adds a table, empty or populated, to the report
-     *
-     * @param table the table to add
-     */
-    public void addTable(GATKReportTable table) {
-        tables.put(table.getTableName(), table);
-    }
-
-    public void addTables(List<GATKReportTable> gatkReportTableV2s) {
-        for ( GATKReportTable table : gatkReportTableV2s )
-            addTable(table);
-    }
-
-    /**
-     * Return true if table with a given name exists
-     *
-     * @param tableName the name of the table
-     * @return true if the table exists, false otherwise
-     */
-    public boolean hasTable(String tableName) {
-        return tables.containsKey(tableName);
-    }
-
-    /**
-     * Return a table with a given name
-     *
-     * @param tableName the name of the table
-     * @return the table object
-     */
-    public GATKReportTable getTable(String tableName) {
-        GATKReportTable table = tables.get(tableName);
-        if (table == null)
-            throw new ReviewedGATKException("Table is not in GATKReport: " + tableName);
-        return table;
-    }
-
-    /**
-     * Print all tables contained within this container to a PrintStream
-     *
-     * @param out the PrintStream to which the tables should be written
-     */
-    public void print(PrintStream out) {
-        out.println(GATKREPORT_HEADER_PREFIX + getVersion().toString() + SEPARATOR + getTables().size());
-        for (GATKReportTable table : tables.values())
-            table.write(out);
-    }
-
-    public Collection<GATKReportTable> getTables() {
-        return tables.values();
-    }
-
-    /**
-     * This is the main function is charge of gathering the reports. It checks that the reports are compatible and then
-     * calls the table gathering functions.
-     *
-     * @param input another GATKReport of the same format
-     */
-    public void concat(GATKReport input) {
-
-        if ( !isSameFormat(input) ) {
-            throw new ReviewedGATKException("Failed to combine GATKReport, format doesn't match!");
-        }
-
-        for ( Map.Entry<String, GATKReportTable> table : tables.entrySet() ) {
-            table.getValue().concat(input.getTable(table.getKey()));
-        }
-    }
-
-    public GATKReportVersion getVersion() {
-        return version;
-    }
-
-    /**
-     * Returns whether or not the two reports have the same format, from columns, to tables, to reports, and everything
-     * in between. This does not check if the data inside is the same. This is the check to see if the two reports are
-     * gatherable or reduceable.
-     *
-     * @param report another GATK report
-     * @return true if the the reports are gatherable
-     */
-    public boolean isSameFormat(GATKReport report) {
-        if (!version.equals(report.version)) {
-            return false;
-        }
-        if (!tables.keySet().equals(report.tables.keySet())) {
-            return false;
-        }
-        for (String tableName : tables.keySet()) {
-            if (!getTable(tableName).isSameFormat(report.getTable(tableName)))
-                return false;
-        }
-        return true;
-    }
-
-    /**
-     * Checks that the reports are exactly the same.
-     *
-     * @param report another GATK report
-     * @return true if all field in the reports, tables, and columns are equal.
-     */
-    public boolean equals(GATKReport report) {
-        if (!version.equals(report.version)) {
-            return false;
-        }
-        if (!tables.keySet().equals(report.tables.keySet())) {
-            return false;
-        }
-        for (String tableName : tables.keySet()) {
-            if (!getTable(tableName).equals(report.getTable(tableName)))
-                return false;
-        }
-        return true;
-    }
-
-    /**
-     * The constructor for a simplified GATK Report. Simplified GATK report are designed for reports that do not need
-     * the advanced functionality of a full GATK Report.
-     * <p/>
-     * A simple GATK Report consists of:
-     * <p/>
-     * - A single table
-     * - No primary key ( it is hidden )
-     * <p/>
-     * Optional:
-     * - Only untyped columns. As long as the data is an Object, it will be accepted.
-     * - Default column values being empty strings.
-     * <p/>
-     * Limitations:
-     * <p/>
-     * - A simple GATK report cannot contain multiple tables.
-     * - It cannot contain typed columns, which prevents arithmetic gathering.
-     *
-     * @param tableName The name of your simple GATK report table
-     * @param columns   The names of the columns in your table
-     * @return a simplified GATK report
-     */
-    public static GATKReport newSimpleReport(final String tableName, final String... columns) {
-        return newSimpleReportWithDescription(tableName, "A simplified GATK table report", columns);
-    }
-
-    /**
-     * @see #newSimpleReport(String, String...) but with a customized description
-     * @param tableName
-     * @param desc
-     * @param columns
-     * @return
-     */
-    public static GATKReport newSimpleReportWithDescription(final String tableName, final String desc, final String... columns) {
-        GATKReportTable table = new GATKReportTable(tableName, desc, columns.length);
-
-        for (String column : columns) {
-            table.addColumn(column, "");
-        }
-
-        GATKReport output = new GATKReport();
-        output.addTable(table);
-
-        return output;
-    }
-
-    /**
-     * The constructor for a simplified GATK Report. Simplified GATK report are designed for reports that do not need
-     * the advanced functionality of a full GATK Report.
-     * <p/>
-     * A simple GATK Report consists of:
-     * <p/>
-     * - A single table
-     * - No primary key ( it is hidden )
-     * <p/>
-     * Optional:
-     * - Only untyped columns. As long as the data is an Object, it will be accepted.
-     * - Default column values being empty strings.
-     * <p/>
-     * Limitations:
-     * <p/>
-     * - A simple GATK report cannot contain multiple tables.
-     * - It cannot contain typed columns, which prevents arithmetic gathering.
-     *
-     * @param tableName The name of your simple GATK report table
-     * @param columns   The names of the columns in your table
-     * @return a simplified GATK report
-     */
-    public static GATKReport newSimpleReport(final String tableName, final List<String> columns) {
-        GATKReportTable table = new GATKReportTable(tableName, "A simplified GATK table report", columns.size());
-
-        for (String column : columns) {
-            table.addColumn(column, "");
-        }
-
-        GATKReport output = new GATKReport();
-        output.addTable(table);
-
-        return output;
-    }
-
-    /**
-     * This method provides an efficient way to populate a simplified GATK report. This method will only work on reports
-     * that qualify as simplified GATK reports. See the newSimpleReport() constructor for more information.
-     *
-     * @param values     the row of data to be added to the table.
-     *               Note: the number of arguments must match the columns in the table.
-     */
-    public void addRow(final Object... values) {
-        // Must be a simple report
-        if ( tables.size() != 1 )
-            throw new ReviewedGATKException("Cannot write a row to a complex GATK Report");
-
-        GATKReportTable table = tables.firstEntry().getValue();
-        if ( table.getNumColumns() != values.length )
-            throw new ReviewedGATKException("The number of arguments in writeRow (" + values.length + ") must match the number of columns in the table (" + table.getNumColumns() + ")" );
-
-        final int rowIndex = table.getNumRows();
-        for ( int i = 0; i < values.length; i++ )
-            table.set(rowIndex, i, values[i]);
-    }
-
-    /**
-     * This method provides an efficient way to populate a simplified GATK report. This method will only work on reports
-     * that qualify as simplified GATK reports. See the newSimpleReport() constructor for more information.
-     *
-     * @param values     the row of data to be added to the table.
-     *               Note: the number of arguments must match the columns in the table.
-     */
-    public void addRowList(final List<Object> values) {
-        if ( tables.size() != 1 )
-            throw new ReviewedGATKException("Cannot write a row to a complex GATK Report");
-
-        GATKReportTable table = tables.firstEntry().getValue();
-        if ( table.getNumColumns() != values.size() )
-            throw new ReviewedGATKException("The number of arguments in writeRow() must match the number of columns in the table");
-
-        final int rowIndex = table.getNumRows();
-        int idx = 0;
-        for ( Object value : values ) {
-            table.set(rowIndex,idx,value);
-            idx++;
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/report/GATKReportColumn.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/report/GATKReportColumn.java
deleted file mode 100644
index ffdefff..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/report/GATKReportColumn.java
+++ /dev/null
@@ -1,147 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.report;
-
-import org.apache.commons.lang.math.NumberUtils;
-
-import java.util.Arrays;
-import java.util.Collection;
-
-/**
- * column information within a GATK report table
- */
-public class GATKReportColumn {
-    final private String columnName;
-    final private String format;
-    final private GATKReportDataType dataType;
-
-    private GATKReportColumnFormat columnFormat;
-    private GATKReportColumnFormat.Alignment alignment = GATKReportColumnFormat.Alignment.RIGHT;  // default alignment is to the right unless values added ask for a left alignment
-    private int maxWidth = 0;
-
-    /**
-     * Construct the column object, specifying the column name, default value, whether or not the column should be
-     * displayed, and the format string. This cannot be null.
-     *
-     * @param columnName   the name of the column
-     * @param format       format string
-     */
-    public GATKReportColumn(final String columnName, final String format) {
-        this.columnName = columnName;
-        this.maxWidth = columnName.length();
-        if ( format.equals("") ) {
-            this.format = "%s";
-            this.dataType = GATKReportDataType.Unknown;
-        }
-        else {
-            this.format = format;
-            this.dataType = GATKReportDataType.fromFormatString(format);
-        }
-    }
-
-    /**
-     * Get the display width for this column.  This allows the entire column to be displayed with the appropriate, fixed
-     * width.
-     *
-     * @return the format string for this column
-     */
-    public GATKReportColumnFormat getColumnFormat() {
-        if (columnFormat != null)
-            return columnFormat;
-
-        columnFormat = new GATKReportColumnFormat(maxWidth, alignment);
-        return columnFormat;
-    }
-
-    private static final Collection<String> RIGHT_ALIGN_STRINGS = Arrays.asList(
-            "null",
-            "NA",
-            String.valueOf(Double.POSITIVE_INFINITY),
-            String.valueOf(Double.NEGATIVE_INFINITY),
-            String.valueOf(Double.NaN));
-
-    /**
-     * Check if the value can be right aligned. Does not trim the values before checking if numeric since it assumes
-     * the spaces mean that the value is already padded.
-     *
-     * @param value to check
-     * @return true if the value is a right alignable
-     */
-    protected static boolean isRightAlign(final String value) {
-        return value == null || RIGHT_ALIGN_STRINGS.contains(value) || NumberUtils.isNumber(value.trim());
-    }
-
-    /**
-     * Returns a string version of the values.
-     *
-     * @param obj The object to convert to a string
-     * @return The string representation of the column
-     */
-    private String formatValue(final Object obj) {
-        String value;
-        if (obj == null) {
-            value = "null";
-        }
-        else if ( dataType.equals(GATKReportDataType.Unknown) && (obj instanceof Double || obj instanceof Float) ) {
-            value = String.format("%.8f", obj);
-        }
-        else
-            value = String.format(format, obj);
-
-        return value;
-    }
-
-    public GATKReportDataType getDataType() {
-        return dataType;
-    }
-
-    public String getColumnName() {
-        return columnName;
-    }
-
-    public String getFormat() {
-        return dataType.equals(GATKReportDataType.Unknown) ? "%s" : format;
-    }
-
-    public void updateFormatting(final Object value) {
-        if (value != null) {
-            final String formatted = formatValue(value);
-            if ( formatted.length() > 0 ) {
-                updateMaxWidth(formatted);
-                updateFormat(formatted);
-            }
-        }
-    }
-
-    private void updateMaxWidth(final String formatted) {
-        maxWidth = Math.max(formatted.length(), maxWidth);
-    }
-
-    private void updateFormat(final String formatted) {
-        if (alignment == GATKReportColumnFormat.Alignment.RIGHT)
-            alignment = isRightAlign(formatted) ? GATKReportColumnFormat.Alignment.RIGHT : GATKReportColumnFormat.Alignment.LEFT;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/report/GATKReportColumnFormat.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/report/GATKReportColumnFormat.java
deleted file mode 100644
index 664b503..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/report/GATKReportColumnFormat.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.report;
-
-/**
- * Column width and left/right alignment.
- */
-public class GATKReportColumnFormat {
-    public static enum Alignment { LEFT, RIGHT }
-    private final int width;
-    private final Alignment alignment;
-
-    public GATKReportColumnFormat(int width, Alignment alignment) {
-        this.width = width;
-        this.alignment = alignment;
-    }
-
-    public int getWidth() {
-        return width;
-    }
-
-    public Alignment getAlignment() {
-        return alignment;
-    }
-
-    public String getNameFormat() {
-        return "%-" + width + "s";
-    }
-
-    public String getValueFormat() {
-        switch (alignment) {
-            case LEFT:
-                return "%-" + width + "s";
-            case RIGHT:
-                return "%" + width + "s";
-            default:
-                throw new UnsupportedOperationException("Unknown alignment: " + alignment);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/report/GATKReportDataType.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/report/GATKReportDataType.java
deleted file mode 100644
index acfa74f..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/report/GATKReportDataType.java
+++ /dev/null
@@ -1,236 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.report;
-
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * The gatherable data types acceptable in a GATK report column.
- */
-public enum GATKReportDataType {
-    /**
-     * The null type should not be used.
-     */
-    Null("Null"),
-
-    /**
-     * The default value when a format string is not present
-     */
-    Unknown("Unknown"),
-
-    /**
-     * Used for boolean values. Will display as true or false in the table.
-     */
-    Boolean("%[Bb]"),
-
-    /**
-     * Used for char values. Will display as a char so use printable values!
-     */
-    Character("%[Cc]"),
-
-    /**
-     * Used for float and double values. Will output a decimal with format %.8f unless otherwise specified.
-     */
-    Decimal("%.*[EeFf]"),
-
-    /**
-     * Used for int, byte, short, and long values. Will display the full number by default.
-     */
-    Integer("%[Dd]"),
-
-    /**
-     * Used for string values. Displays the string itself.
-     */
-    String("%[Ss]");
-
-    private final String dataTypeString;
-
-    private GATKReportDataType(String dataTypeString) {
-        this.dataTypeString = dataTypeString;
-    }
-
-    private static final Map<String, GATKReportDataType> lookup = new HashMap<String, GATKReportDataType>();
-
-    static {
-        for (GATKReportDataType s : EnumSet.allOf(GATKReportDataType.class))
-            lookup.put(s.dataTypeString, s);
-    }
-
-
-    @Override
-    public String toString() {
-        return this.dataTypeString;
-    }
-
-    /**
-     * Returns a GATK report data type from the Object specified. It looks through the list of acceptable classes and
-     * returns the appropriate data type.
-     *
-     * @param object the object ot derive the data type from
-     * @return the appropriate data type
-     */
-    public static GATKReportDataType fromObject(Object object) {
-        GATKReportDataType value;
-        if (object instanceof Boolean) {
-            value = GATKReportDataType.Boolean;
-
-        } else if (object instanceof Character) {
-            value = GATKReportDataType.Character;
-
-        } else if (object instanceof Float ||
-                object instanceof Double) {
-            value = GATKReportDataType.Decimal;
-
-        } else if (object instanceof Integer ||
-                object instanceof Long ||
-                object instanceof Short ||
-                object instanceof Byte ) {
-            value = GATKReportDataType.Integer;
-
-        } else if (object instanceof String) {
-            value = GATKReportDataType.String;
-
-        } else {
-            value = GATKReportDataType.Unknown;
-            //throw new UserException("GATKReport could not convert the data object into a GATKReportDataType. Acceptable data objects are found in the documentation.");
-        }
-        return value;
-    }
-
-    /**
-     * Returns a GATK report data type from the format string specified. It uses regex matching from the enumerated
-     * Strings.
-     *
-     * @param format the format string to derive the data type from
-     * @return the appropriate data type
-     */
-    public static GATKReportDataType fromFormatString(String format) {
-        if (format.equals(""))
-            return Unknown;
-        for (GATKReportDataType type : lookup.values()) {
-            if (format.matches(type.toString()) )
-                return type;
-        }
-        return Unknown;
-    }
-
-    /**
-     * Returns the default value of the data type. It returns an object that matches the class of the data type.
-     *
-     * @return an object that matches the data type
-     */
-    public Object getDefaultValue() {
-        switch (this) {
-            case Decimal:
-                return 0.0D;
-            case Boolean:
-                return false;
-            case Character:
-                return '0';
-            case Integer:
-                return 0L;
-            case String:
-                return "";
-            default:
-                return null;
-        }
-    }
-
-    /**
-     * Checks if the two objects are equal using the appropriate test form the data types.
-     *
-     * @param a an object
-     * @param b another object to check if equal
-     * @return true - the objects are equal, false - the objects are nto equal
-     */
-    public boolean isEqual(Object a, Object b) {
-        switch (this) {
-            case Null:
-                return true;
-            case Decimal:
-            case Boolean:
-            case Integer:
-                return a.toString().equals(b.toString());
-            case Character:
-            case String:
-            default:
-                return a.equals(b);
-        }
-    }
-
-    /**
-     * Converts an input String to the appropriate type using the data type. Used for parsing loading a GATK report from
-     * file.
-     *
-     * @param obj The input string
-     * @return an object that matches the data type.
-     */
-    Object Parse(Object obj) {
-        if (obj instanceof String) {
-            String str = obj.toString();
-            switch (this) {
-                case Decimal:
-                    return Double.parseDouble(str);
-                case Boolean:
-                    return java.lang.Boolean.parseBoolean(str);
-                case Integer:
-                    return Long.parseLong(str);
-                case String:
-                    return str;
-                case Character:
-                    return str.toCharArray()[0];
-                default:
-                    return str;
-            }
-        } else
-            return null;
-    }
-
-    /**
-     * Returns a format string version of the value according to the data type.
-     *
-     * @return The printf string representation of the object according to data type.
-     */
-    public String getDefaultFormatString() {
-        switch (this) {
-            case Decimal:
-                return "%.8f";
-            case Boolean:
-                return "%b";
-            case Integer:
-                return "%d";
-            case String:
-                return "%s";
-            case Character:
-                return "%c";
-            case Null:
-            default:
-                return "%s";
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/report/GATKReportGatherer.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/report/GATKReportGatherer.java
deleted file mode 100644
index 5f7f767..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/report/GATKReportGatherer.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.report;
-
-import org.broadinstitute.gatk.utils.commandline.Gatherer;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.PrintStream;
-import java.util.List;
-
-public class GATKReportGatherer extends Gatherer {
-    @Override
-    public void gather(List<File> inputs, File output) {
-        //Combines inputs GATKReport to one output
-
-        PrintStream o;
-        try {
-            o = new PrintStream(output);
-        } catch (FileNotFoundException e) {
-            throw new UserException(String.format("File %s to be output by GATKReportGatherer function was not found", output));
-        }
-
-        GATKReport current = new GATKReport();
-        boolean isFirst = true;
-        for (File input : inputs) {
-            if (isFirst) {
-                current = new GATKReport(input);
-                isFirst = false;
-            } else {
-                current.concat(new GATKReport(input));
-            }
-        }
-
-        current.print(o);
-        o.close();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/report/GATKReportTable.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/report/GATKReportTable.java
deleted file mode 100644
index 6a1e456..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/report/GATKReportTable.java
+++ /dev/null
@@ -1,779 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.report;
-
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.text.TextFormattingUtils;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.PrintStream;
-import java.util.*;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-public class GATKReportTable {
-    /**
-     * REGEX that matches any table with an invalid name
-     */
-    public static final String INVALID_TABLE_NAME_REGEX = "[^a-zA-Z0-9_\\-\\.]";
-    private static final String GATKTABLE_HEADER_PREFIX = "#:GATKTable";
-    private static final String SEPARATOR = ":";
-    private static final String ENDLINE = ":;";
-
-    private final String tableName;
-    private final String tableDescription;
-
-    private final TableSortingWay sortingWay;
-
-    private List<Object[]> underlyingData;
-    private final List<GATKReportColumn> columnInfo;
-    private final Map<Object, Integer> columnNameToIndex;
-    private final HashMap<Object, Integer> rowIdToIndex;
-
-    private static final String COULD_NOT_READ_HEADER = "Could not read the header of this file -- ";
-    private static final String COULD_NOT_READ_COLUMN_NAMES = "Could not read the column names of this file -- ";
-    private static final String COULD_NOT_READ_DATA_LINE = "Could not read a data line of this table -- ";
-    private static final String COULD_NOT_READ_EMPTY_LINE = "Could not read the last empty line of this table -- ";
-    private static final String OLD_GATK_TABLE_VERSION = "We no longer support older versions of the GATK Tables";
-
-    private static final int INITITAL_ARRAY_SIZE = 10000;
-    private static final String NUMBER_CONVERSION_EXCEPTION = "String is a number but is not a long or a double: ";
-
-    protected enum TableDataHeaderFields {
-        COLS(2),
-        ROWS(3),
-        FORMAT_START(4);
-
-        private final int index;
-        TableDataHeaderFields(int index) { this.index = index; }
-        public int index() { return index; }
-    }
-
-    public enum TableSortingWay {
-        SORT_BY_ROW,
-        SORT_BY_COLUMN,
-        DO_NOT_SORT
-    }
-
-    protected enum TableNameHeaderFields {
-        NAME(2),
-        DESCRIPTION(3);
-
-        private final int index;
-        TableNameHeaderFields(int index) { this.index = index; }
-        public int index() { return index; }
-    }
-
-    /**
-     * Construct a new GATK report table from the reader
-     * Note that the row ID mappings are just the index -> index
-     *
-     * @param reader        the reader
-     * @param version       the GATK report version
-     */
-    public GATKReportTable(BufferedReader reader, GATKReportVersion version) {
-
-        switch ( version ) {
-            case V1_1:
-                // read in the header lines
-                final String[] tableData, tableNameData;
-                try {
-                    tableData = reader.readLine().split(SEPARATOR);
-                    tableNameData = reader.readLine().split(SEPARATOR);
-                } catch (IOException e) {
-                    throw new ReviewedGATKException(COULD_NOT_READ_HEADER + e.getMessage());
-                }
-
-                // parse the header fields
-                tableName = tableNameData[TableNameHeaderFields.NAME.index()];
-                tableDescription = (tableNameData.length <= TableNameHeaderFields.DESCRIPTION.index()) ? "" : tableNameData[TableNameHeaderFields.DESCRIPTION.index()];                                           // table may have no description! (and that's okay)
-
-                // when reading from a file, we do not re-sort the rows
-                sortingWay = TableSortingWay.DO_NOT_SORT;
-
-                // initialize the data
-                final int nColumns = Integer.parseInt(tableData[TableDataHeaderFields.COLS.index()]);
-                final int nRows = Integer.parseInt(tableData[TableDataHeaderFields.ROWS.index()]);
-                underlyingData = new ArrayList<Object[]>(nRows);
-                columnInfo = new ArrayList<GATKReportColumn>(nColumns);
-                columnNameToIndex = new HashMap<Object, Integer>(nColumns);
-
-                // when reading from a file, the row ID mapping is just the index
-                rowIdToIndex = new HashMap<Object, Integer>();
-                for ( int i = 0; i < nRows; i++ )
-                    rowIdToIndex.put(i, i);
-
-                // read the column names
-                final String columnLine;
-                try {
-                    columnLine = reader.readLine();
-                } catch (IOException e) {
-                    throw new ReviewedGATKException(COULD_NOT_READ_COLUMN_NAMES);
-                }
-
-                final List<Integer> columnStarts = TextFormattingUtils.getWordStarts(columnLine);
-                final String[] columnNames = TextFormattingUtils.splitFixedWidth(columnLine, columnStarts);
-
-                // Put in columns using the format string from the header
-                for ( int i = 0; i < nColumns; i++ ) {
-                    final String format = tableData[TableDataHeaderFields.FORMAT_START.index() + i];
-                    addColumn(columnNames[i], format);
-                }
-
-                // fill in the table
-                try {
-                    for ( int i = 0; i < nRows; i++ ) {
-                        // read a data line
-                        final String dataLine = reader.readLine();
-                        final List<String> lineSplits = Arrays.asList(TextFormattingUtils.splitFixedWidth(dataLine, columnStarts));
-
-                        underlyingData.add(new Object[nColumns]);
-                        for ( int columnIndex = 0; columnIndex < nColumns; columnIndex++ ) {
-
-                            final GATKReportDataType type = columnInfo.get(columnIndex).getDataType();
-                            final String columnName = columnNames[columnIndex];
-                            set(i, columnName, type.Parse(lineSplits.get(columnIndex)));
-
-                        }
-                    }
-                } catch (IOException e) {
-                    throw new ReviewedGATKException(COULD_NOT_READ_DATA_LINE + e.getMessage());
-                }
-
-                try {
-                    reader.readLine();
-                } catch (IOException e) {
-                    throw new ReviewedGATKException(COULD_NOT_READ_EMPTY_LINE + e.getMessage());
-                }
-            break;
-
-            default:
-                throw new ReviewedGATKException(OLD_GATK_TABLE_VERSION);
-        }
-    }
-
-    /**
-     * Construct a new GATK report table with the specified name and description
-     *
-     * @param tableName        the name of the table
-     * @param tableDescription the description of the table
-     * @param numColumns       the number of columns in this table
-     */
-    public GATKReportTable(final String tableName, final String tableDescription, final int numColumns) {
-        this(tableName, tableDescription, numColumns, TableSortingWay.SORT_BY_ROW);
-    }
-
-    /**
-     * Construct a new GATK report table with the specified name and description and whether to sort rows by the row ID.
-     *
-     * @param tableName          the name of the table
-     * @param tableDescription   the description of the table
-     * @param numColumns         the number of columns in this table
-     * @param sortingWay         in what way to sort rows (instead of the order in which they were added)
-     */
-    public GATKReportTable(final String tableName, final String tableDescription, final int numColumns, final TableSortingWay sortingWay) {
-        if ( !isValidName(tableName) ) {
-            throw new ReviewedGATKException("Attempted to set a GATKReportTable name of '" + tableName + "'.  GATKReportTable names must be purely alphanumeric - no spaces or special characters are allowed.");
-        }
-
-        if ( !isValidDescription(tableDescription) ) {
-            throw new ReviewedGATKException("Attempted to set a GATKReportTable description of '" + tableDescription + "'.  GATKReportTable descriptions must not contain newlines.");
-        }
-
-        this.tableName = tableName;
-        this.tableDescription = tableDescription;
-        this.sortingWay = sortingWay;
-
-        underlyingData = new ArrayList<Object[]>(INITITAL_ARRAY_SIZE);
-        columnInfo = new ArrayList<GATKReportColumn>(numColumns);
-        columnNameToIndex = new HashMap<Object, Integer>(numColumns);
-        rowIdToIndex = new HashMap<Object, Integer>();
-    }
-
-    /**
-     * Create a new GATKReportTable with the same structure
-     * @param tableToCopy
-     */
-    public GATKReportTable(final GATKReportTable tableToCopy, final boolean copyData) {
-        this(tableToCopy.getTableName(), tableToCopy.getTableDescription(), tableToCopy.getNumColumns(), tableToCopy.sortingWay);
-        for ( final GATKReportColumn column : tableToCopy.getColumnInfo() )
-            addColumn(column.getColumnName(), column.getFormat());
-        if ( copyData )
-            throw new IllegalArgumentException("sorry, copying data in GATKReportTable isn't supported");
-    }
-
-        /**
-        * Verifies that a table or column name has only alphanumeric characters - no spaces or special characters allowed
-        *
-        * @param name the name of the table or column
-        * @return true if the name is valid, false if otherwise
-        */
-    private boolean isValidName(String name) {
-        Pattern p = Pattern.compile(INVALID_TABLE_NAME_REGEX);
-        Matcher m = p.matcher(name);
-
-        return !m.find();
-    }
-
-    /**
-     * Verifies that a table or column name has only alphanumeric characters - no spaces or special characters allowed
-     *
-     * @param description the name of the table or column
-     * @return true if the name is valid, false if otherwise
-     */
-    private boolean isValidDescription(String description) {
-        Pattern p = Pattern.compile("\\r|\\n");
-        Matcher m = p.matcher(description);
-
-        return !m.find();
-    }
-
-    /**
-     * Add a mapping from ID to the index of a new row added to the table.
-     *
-     * @param ID                    the unique ID
-     */
-    public void addRowID(final String ID) {
-        addRowID(ID, false);
-    }
-
-    /**
-     * Add a mapping from ID to the index of a new row added to the table.
-     *
-     * @param ID                    the unique ID
-     * @param populateFirstColumn   should we automatically populate the first column with the row's ID?
-     */
-    public void addRowID(final String ID, final boolean populateFirstColumn) {
-        addRowIDMapping(ID, underlyingData.size(), populateFirstColumn);
-    }
-
-    /**
-     * Add a mapping from ID to row index.
-     *
-     * @param ID                    the unique ID
-     * @param index                 the index associated with the ID
-     */
-    public void addRowIDMapping(final String ID, final int index) {
-        addRowIDMapping(ID, index, false);
-    }
-
-    /**
-     * Add a mapping from ID to row index.
-     *
-     * @param ID                    the unique ID
-     * @param index                 the index associated with the ID
-     * @param populateFirstColumn   should we automatically populate the first column with the row's ID?
-     */
-    public void addRowIDMapping(final Object ID, final int index, final boolean populateFirstColumn) {
-        expandTo(index, false);
-        rowIdToIndex.put(ID, index);
-
-        if ( populateFirstColumn )
-            set(index, 0, ID);
-    }
-
-    /**
-     * Remove a mapping from ID to row index.
-     *
-     * @param ID   the row ID
-     */
-    public void removeRowIDMapping(final Object ID) {
-        rowIdToIndex.remove(ID);
-    }
-
-    /**
-     * Add a column to the report
-     *
-     * @param columnName   the name of the column
-     */
-    public void addColumn(String columnName) {
-        addColumn(columnName, "");
-    }
-
-    /**
-     * Add a column to the report and the format string used to display the data.
-     *
-     * @param columnName   the name of the column
-     * @param format       the format string used to display data
-     */
-    public void addColumn(String columnName, String format) {
-        columnNameToIndex.put(columnName, columnInfo.size());
-        columnInfo.add(new GATKReportColumn(columnName, format));
-    }
-
-    /**
-     * Check if the requested cell is valid and expand the table if necessary
-     *
-     * @param rowIndex    the row index
-     * @param colIndex    the column index
-     */
-    private void verifyEntry(final int rowIndex, final int colIndex) {
-        if ( rowIndex < 0 || colIndex < 0 || colIndex >= getNumColumns() )
-            throw new ReviewedGATKException("attempted to access a cell that does not exist in table '" + tableName + "'");
-    }
-
-    /**
-     * expand the underlying table if needed to include the given row index
-     *
-     * @param rowIndex        the row index
-     * @param updateRowIdMap  should we update the row ID map?
-     */
-    private void expandTo(final int rowIndex, final boolean updateRowIdMap) {
-        int currentSize = underlyingData.size();
-        if ( rowIndex >= currentSize ) {
-            final int numNewRows = rowIndex - currentSize + 1;
-            for ( int i = 0; i < numNewRows; i++ ) {
-                if ( updateRowIdMap )
-                    rowIdToIndex.put(currentSize, currentSize);
-                underlyingData.add(new Object[getNumColumns()]);
-                currentSize++;
-            }
-        }
-    }
-
-    /**
-     * Set the value for a given position in the table.
-     * If the row ID doesn't exist, it will create a new row in the table with the given ID.
-     *
-     * @param rowID        the row ID
-     * @param columnName   the name of the column
-     * @param value        the value to set
-     */
-    public void set(final Object rowID, final String columnName, final Object value) {
-        if ( !rowIdToIndex.containsKey(rowID) ) {
-            rowIdToIndex.put(rowID, underlyingData.size());
-            expandTo(underlyingData.size(), false);
-        }
-        set(rowIdToIndex.get(rowID), columnNameToIndex.get(columnName), value);
-    }
-
-    /**
-     * Set the value for a given position in the table.
-     * If the row index doesn't exist, it will create new rows in the table accordingly.
-     *
-     * @param rowIndex     the row index
-     * @param colIndex     the column index
-     * @param value        the value to set
-     */
-    public void set(final int rowIndex, final int colIndex, Object value) {
-        expandTo(rowIndex, true);
-        verifyEntry(rowIndex, colIndex);
-        GATKReportColumn column = columnInfo.get(colIndex);
-
-        // We do not accept internal null values
-        if (value == null)
-            value = "null";
-        else
-            value = fixType(value, column);
-
-        if ( column.getDataType().equals(GATKReportDataType.fromObject(value)) || column.getDataType().equals(GATKReportDataType.Unknown) ) {
-            underlyingData.get(rowIndex)[colIndex] = value;
-            column.updateFormatting(value);
-        } else {
-            throw new ReviewedGATKException(String.format("Tried to add an object of type: %s to a column of type: %s", GATKReportDataType.fromObject(value).name(), column.getDataType().name()));
-        }
-    }
-
-    /**
-     * Returns true if the table contains a row mapping with the given ID
-     *
-     * @param rowID        the row ID
-     */
-    public boolean containsRowID(final Object rowID) {
-        return rowIdToIndex.containsKey(rowID);
-    }
-
-    /**
-     * Returns the row mapping IDs
-     *
-     */
-    public Collection<Object> getRowIDs() {
-        return rowIdToIndex.keySet();
-    }
-
-    /**
-    * Increment the value for a given position in the table.
-    * Throws an exception if the value in the cell is not an integer.
-    *
-    * @param rowID        the row ID
-    * @param columnName   the name of the column
-    */
-    public void increment(final Object rowID, final String columnName) {
-        int prevValue;
-        if ( !rowIdToIndex.containsKey(rowID) ) {
-            rowIdToIndex.put(rowID, underlyingData.size());
-            underlyingData.add(new Object[getNumColumns()]);
-            prevValue = 0;
-        } else {
-            Object obj = get(rowID, columnName);
-            if ( !(obj instanceof Integer) )
-                throw new ReviewedGATKException("Attempting to increment a value in a cell that is not an integer");
-            prevValue = (Integer)obj;
-        }
-
-        set(rowIdToIndex.get(rowID), columnNameToIndex.get(columnName), prevValue + 1);
-    }
-
-    /**
-     * Returns the index of the first row matching the column values.
-     * Ex: "CountVariants", "dbsnp", "eval", "called", "all", "novel", "all"
-     *
-     * @param columnValues column values.
-     * @return The index of the first row matching the column values or -1 if no such row exists.
-     */
-    public int findRowByData(final Object... columnValues) {
-        if ( columnValues == null || columnValues.length == 0 || columnValues.length > getNumColumns() )
-            return -1;
-
-        for ( int rowIndex = 0; rowIndex < underlyingData.size(); rowIndex++ ) {
-
-            final Object[] row = underlyingData.get(rowIndex);
-
-            boolean matches = true;
-            for ( int colIndex = 0; colIndex < columnValues.length; colIndex++ ) {
-                if ( !columnValues[colIndex].equals(row[colIndex]) ) {
-                    matches = false;
-                    break;
-                }
-            }
-
-            if ( matches )
-                return rowIndex;
-        }
-
-        return -1;
-    }
-
-    private Object fixType(final Object value, final GATKReportColumn column) {
-        // Below is some code to convert a string into its appropriate type.
-
-        // todo -- Types have to be more flexible. For example, %d should accept Integers, Shorts and Bytes.
-
-        Object newValue = null;
-        if ( value instanceof String && !column.getDataType().equals(GATKReportDataType.String) ) {
-            // Integer case
-            if ( column.getDataType().equals(GATKReportDataType.Integer) ) {
-                try {
-                    newValue = Long.parseLong((String) value);
-                } catch (Exception e) {
-                    /** do nothing */
-                }
-            }
-            if ( column.getDataType().equals(GATKReportDataType.Decimal) ) {
-                try {
-                    newValue = Double.parseDouble((String) value);
-                } catch (Exception e) {
-                    /** do nothing */
-                }
-            }
-            if ( column.getDataType().equals(GATKReportDataType.Character) && ((String) value).length() == 1 ) {
-                newValue = ((String) value).charAt(0);
-            }
-        }
-
-        return  (newValue != null) ? newValue : value;
-    }
-
-    /**
-     * Get a value from the given position in the table
-     *
-     * @param rowID       the row ID
-     * @param columnName  the name of the column
-     * @return the value stored at the specified position in the table
-     */
-    public Object get(final Object rowID, final String columnName) {
-        return get(rowIdToIndex.get(rowID), columnNameToIndex.get(columnName));
-    }
-
-    /**
-     * Get a value from the given position in the table
-     *
-     * @param rowIndex       the row ID
-     * @param columnName  the name of the column
-     * @return the value stored at the specified position in the table
-     */
-    public Object get(final int rowIndex, final String columnName) {
-        return get(rowIndex, columnNameToIndex.get(columnName));
-    }
-
-    /**
-     * Get a value from the given position in the table
-     *
-     * @param rowIndex    the index of the row
-     * @param columnIndex the index of the column
-     * @return the value stored at the specified position in the table
-     */
-    public Object get(int rowIndex, int columnIndex) {
-        verifyEntry(rowIndex, columnIndex);
-        return underlyingData.get(rowIndex)[columnIndex];
-    }
-
-    /**
-     * Write the table to the PrintStream, formatted nicely to be human-readable, AWK-able, and R-friendly.
-     *
-     * @param out the PrintStream to which the table should be written
-     */
-     void write(final PrintStream out) {
-
-         /*
-          * Table header:
-          * #:GATKTable:nColumns:nRows:(DataType for each column):;
-          * #:GATKTable:TableName:Description :;
-          * key   colA  colB
-          * row1  xxxx  xxxxx
-         */
-
-         // write the table definition
-         out.printf(GATKTABLE_HEADER_PREFIX + ":%d:%d", getNumColumns(), getNumRows());
-
-         // write the formats for all the columns
-         for ( final GATKReportColumn column : columnInfo )
-             out.print(SEPARATOR + column.getFormat());
-         out.println(ENDLINE);
-
-         // write the table name & description
-         out.printf(GATKTABLE_HEADER_PREFIX + ":%s:%s\n", tableName, tableDescription);
-
-         // write the column names
-         boolean needsPadding = false;
-         for ( final GATKReportColumn column : columnInfo ) {
-             if ( needsPadding )
-                 out.printf("  ");
-             needsPadding = true;
-
-             out.printf(column.getColumnFormat().getNameFormat(), column.getColumnName());
-         }
-         out.println();
-
-         // write the table body
-         switch (sortingWay) {
-             case SORT_BY_COLUMN:
-                 Collections.sort(underlyingData, new Comparator<Object[]>() {
-                     //INVARIANT the two arrays are of the same length and corresponding elements are of the same type
-                     @Override
-                     public int compare(Object[] objectArr1, Object[] objectArr2) {
-                         final int EQUAL = 0;
-
-                         int result = EQUAL;
-
-                         int l = objectArr1.length;
-                         for (int x = 0; x < l; x++) {
-                             if (objectArr1[x] instanceof Integer) {
-                                 result = ((Integer)objectArr1[x]).compareTo((Integer)objectArr2[x]);
-                             } else if (objectArr1[x] instanceof Double) {
-                                 result = ((Double)objectArr1[x]).compareTo((Double)objectArr2[x]);
-                             } else { // default uses String comparison
-                                 result = objectArr1[x].toString().compareTo(objectArr2[x].toString());
-                             }
-                             if( result  != EQUAL) {
-                                 return result;
-                             }
-                         }
-                         return result;
-                     }
-                 });
-                 for ( final Object[] row : underlyingData )
-                     writeRow(out, row);
-                 break;
-             case SORT_BY_ROW:
-                 // make sure that there are exactly the correct number of ID mappings
-                 if ( rowIdToIndex.size() != underlyingData.size() )
-                     throw new ReviewedGATKException("There isn't a 1-to-1 mapping from row ID to index; this can happen when rows are not created consistently");
-
-                 final TreeMap<Object, Integer> sortedMap;
-                 try {
-                     sortedMap = new TreeMap<Object, Integer>(rowIdToIndex);
-                 } catch (ClassCastException e) {
-                     throw new ReviewedGATKException("Unable to sort the rows based on the row IDs because the ID Objects are of different types");
-                 }
-                 for ( final Map.Entry<Object, Integer> rowKey : sortedMap.entrySet() )
-                     writeRow(out, underlyingData.get(rowKey.getValue()));
-                 break;
-             case DO_NOT_SORT:
-                 for ( final Object[] row : underlyingData )
-                     writeRow(out, row);
-         }
-         out.println();
-     }
-
-    private void writeRow(final PrintStream out, final Object[] row) {
-        boolean needsPadding = false;
-        for ( int i = 0; i < row.length; i++ ) {
-            if ( needsPadding )
-                out.printf("  ");
-            needsPadding = true;
-
-            final Object obj = row[i];
-            final String value;
-
-            final GATKReportColumn info = columnInfo.get(i);
-
-            if ( obj == null )
-                value = "null";
-            else if ( info.getDataType().equals(GATKReportDataType.Unknown) && (obj instanceof Double || obj instanceof Float) )
-                value = String.format("%.8f", obj);
-            else
-                value = String.format(info.getFormat(), obj);
-
-            out.printf(info.getColumnFormat().getValueFormat(), value);
-        }
-
-        out.println();
-    }
-
-    public int getNumRows() {
-        return underlyingData.size();
-    }
-
-    public int getNumColumns() {
-        return columnInfo.size();
-    }
-
-    public List<GATKReportColumn> getColumnInfo() {
-        return columnInfo;
-    }
-
-    public String getTableName() {
-        return tableName;
-    }
-
-    public String getTableDescription() {
-        return tableDescription;
-    }
-
-    /**
-     * Concatenates the rows from the table to this one
-     *
-     * @param table another GATK table
-     */
-    public void concat(final GATKReportTable table) {
-        if ( !isSameFormat(table) )
-            throw new ReviewedGATKException("Error trying to concatenate tables with different formats");
-
-        // add the data
-        underlyingData.addAll(table.underlyingData);
-
-        // update the row index map
-        final int currentNumRows = getNumRows();
-        for ( Map.Entry<Object, Integer> entry : table.rowIdToIndex.entrySet() )
-            rowIdToIndex.put(entry.getKey(), entry.getValue() + currentNumRows);
-    }
-
-    /**
-     * Returns whether or not the two tables have the same format including columns and everything in between. This does
-     * not check if the data inside is the same. This is the check to see if the two tables are gatherable or
-     * reduceable
-     *
-     * @param table another GATK table
-     * @return true if the the tables are gatherable
-     */
-    public boolean isSameFormat(final GATKReportTable table) {
-        if ( !tableName.equals(table.tableName) ||
-                !tableDescription.equals(table.tableDescription) ||
-                columnInfo.size() != table.columnInfo.size() )
-            return false;
-
-        for ( int i = 0; i < columnInfo.size(); i++ ) {
-            if ( !columnInfo.get(i).getFormat().equals(table.columnInfo.get(i).getFormat()) ||
-                    !columnInfo.get(i).getColumnName().equals(table.columnInfo.get(i).getColumnName()) )
-                return false;
-        }
-
-        return true;
-    }
-
-    /**
-     * Checks that the tables are exactly the same.
-     *
-     * @param table another GATK report
-     * @return true if all field in the reports, tables, and columns are equal.
-     */
-    public boolean equals(final GATKReportTable table) {
-        if ( !isSameFormat(table) ||
-                underlyingData.size() != table.underlyingData.size() )
-            return false;
-
-        final List<Object[]> myOrderedRows = getOrderedRows();
-        final List<Object[]> otherOrderedRows = table.getOrderedRows();
-
-        for ( int i = 0; i < underlyingData.size(); i++ ) {
-            final Object[] myData = myOrderedRows.get(i);
-            final Object[] otherData = otherOrderedRows.get(i);
-            for ( int j = 0; j < myData.length; j++ ) {
-                if ( !myData[j].toString().equals(otherData[j].toString()) )       // need to deal with different typing (e.g. Long vs. Integer)
-                    return false;
-            }
-        }
-
-        return true;
-    }
-
-    private List<Object[]> getOrderedRows() {
-
-        switch (sortingWay) {
-            case SORT_BY_COLUMN:
-                Collections.sort(underlyingData, new Comparator<Object[]>() {
-                    //INVARIANT the two arrays are of the same length and corresponding elements are of the same type
-                    @Override
-                    public int compare(Object[] objectArr1, Object[] objectArr2) {
-                        final int EQUAL = 0;
-                        int result = EQUAL;
-                        int l = objectArr1.length;
-                            for (int x = 0; x < l; x++) {
-                                if (objectArr1[x] instanceof Integer) {
-                                    result = ((Integer)objectArr1[x]).compareTo((Integer)objectArr2[x]);
-                                } else if (objectArr1[x] instanceof Double) {
-                                    result = ((Double)objectArr1[x]).compareTo((Double)objectArr2[x]);
-                                } else  { // default uses String comparison
-                                    result = objectArr1[x].toString().compareTo(objectArr2[x].toString());
-                                }
-                                if( result != EQUAL) {
-                                    return result;
-                                }
-                            }
-                            return result;
-                    }
-                });
-                return underlyingData;
-            case SORT_BY_ROW:
-                final TreeMap<Object, Integer> sortedMap;
-                try {
-                    sortedMap = new TreeMap<Object, Integer>(rowIdToIndex);
-                } catch (ClassCastException e) {
-                    return underlyingData;
-                }
-
-                final List<Object[]> orderedData = new ArrayList<Object[]>(underlyingData.size());
-                for ( final int rowKey : sortedMap.values() )
-                    orderedData.add(underlyingData.get(rowKey));
-
-                return orderedData;
-            default:
-                return underlyingData;
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/report/GATKReportVersion.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/report/GATKReportVersion.java
deleted file mode 100644
index 226365b..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/report/GATKReportVersion.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.report;
-
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-public enum GATKReportVersion {
-    /**
-     * Differences between other versions:
-     * - Does not allow spaces in cells.
-     * - Mostly fixed width but has a bug where the string width of floating point
-     * values was not measured correctly leading to columns that aren't aligned
-     */
-    V0_1("v0.1"),
-
-    /**
-     * Differences between other versions:
-     * - Spaces allowed in cells, for example in sample names with spaces in them ex: "C507/FG-CR 6".
-     * - Fixed width fixed for floating point values
-     */
-    V0_2("v0.2"),
-
-    /*
-    * Differences between v0.x
-    * - Added table and report headers
-    * - Headers changed format, include the number of tables, rows, and metadata for gathering
-    * - IS GATHERABLE
-    */
-    V1_0("v1.0"),
-
-    /*
-    * Differences between v1.0
-    * - column numbers in header reflect the actual count of columns
-    * - primary keys are never displayed
-    */
-    V1_1("v1.1");
-
-    private final String versionString;
-
-    private GATKReportVersion(String versionString) {
-        this.versionString = versionString;
-    }
-
-    @Override
-    public String toString() {
-        return versionString;
-    }
-
-    public boolean equals(GATKReportVersion that) {
-        return (versionString.equals(that.versionString));
-    }
-
-    /**
-     * Returns the GATK Report Version from the file header.
-     *
-     * @param header Header from the file starting with ##:GATKReport.v[version]
-     * @return The version as an enum.
-     */
-    public static GATKReportVersion fromHeader(String header) {
-        if ( header == null )
-            throw new UserException.BadInput("The GATK report has no version specified in the header");
-
-        if (header.startsWith("##:GATKReport.v0.1 "))
-            return GATKReportVersion.V0_1;
-
-        if (header.startsWith("##:GATKReport.v0.2 "))
-            return GATKReportVersion.V0_2;
-
-        if (header.startsWith("#:GATKReport.v1.0"))
-            return GATKReportVersion.V1_0;
-
-        if (header.startsWith("#:GATKReport.v1.1"))
-            return GATKReportVersion.V1_1;
-
-        throw new UserException.BadInput("The GATK report has an unknown/unsupported version in the header: " + header);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/resourcemanagement/ThreadAllocation.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/resourcemanagement/ThreadAllocation.java
deleted file mode 100644
index 0344135..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/resourcemanagement/ThreadAllocation.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.resourcemanagement;
-
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-/**
- * Models how threads are distributed between various components of the GATK.
- */
-public class ThreadAllocation {
-    /**
-     * The number of CPU threads to be used by the GATK.
-     */
-    private final int numDataThreads;
-
-    /**
-     * The number of CPU threads per data thread for GATK processing
-     */
-    private final int numCPUThreadsPerDataThread;
-
-    /**
-     * Number of threads to devote exclusively to IO.  Default is 0.
-     */
-    private final int numIOThreads;
-
-    /**
-     * Should we monitor thread efficiency?
-     */
-    private final boolean monitorEfficiency;
-
-    public int getNumDataThreads() {
-        return numDataThreads;
-    }
-
-    public int getNumCPUThreadsPerDataThread() {
-        return numCPUThreadsPerDataThread;
-    }
-
-    public int getNumIOThreads() {
-        return numIOThreads;
-    }
-
-    public boolean monitorThreadEfficiency() {
-        return monitorEfficiency;
-    }
-
-    /**
-     * Are we running in parallel mode?
-     *
-     * @return true if any parallel processing is enabled
-     */
-    public boolean isRunningInParallelMode() {
-        return getTotalNumThreads() > 1;
-    }
-
-    /**
-     * What is the total number of threads in use by the GATK?
-     *
-     * @return the sum of all thread allocations in this object
-     */
-    public int getTotalNumThreads() {
-        return getNumDataThreads() * getNumCPUThreadsPerDataThread() + getNumIOThreads();
-    }
-
-    /**
-     * Construct the default thread allocation.
-     */
-    public ThreadAllocation() {
-        this(1, 1, 0, false);
-    }
-
-    /**
-     * Set up the thread allocation.  Default allocation is 1 CPU thread, 0 IO threads.
-     * (0 IO threads means that no threads are devoted exclusively to IO; they're inline on the CPU thread).
-     * @param numDataThreads Total number of threads allocated to the traversal.
-     * @param numCPUThreadsPerDataThread The number of CPU threads per data thread to allocate
-     * @param numIOThreads Total number of threads allocated exclusively to IO.
-     * @param monitorEfficiency should we monitor threading efficiency in the GATK?
-     */
-    public ThreadAllocation(final int numDataThreads,
-                            final int numCPUThreadsPerDataThread,
-                            final int numIOThreads,
-                            final boolean monitorEfficiency) {
-        if ( numDataThreads < 1 ) throw new ReviewedGATKException("numDataThreads cannot be less than 1, but saw " + numDataThreads);
-        if ( numCPUThreadsPerDataThread < 1 ) throw new ReviewedGATKException("numCPUThreadsPerDataThread cannot be less than 1, but saw " + numCPUThreadsPerDataThread);
-        if ( numIOThreads < 0 ) throw new ReviewedGATKException("numIOThreads cannot be less than 0, but saw " + numIOThreads);
-
-        this.numDataThreads = numDataThreads;
-        this.numCPUThreadsPerDataThread = numCPUThreadsPerDataThread;
-        this.numIOThreads = numIOThreads;
-        this.monitorEfficiency = monitorEfficiency;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/samples/Affection.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/samples/Affection.java
deleted file mode 100644
index 0e5833b..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/samples/Affection.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.samples;
-
-/**
- * Categorical sample trait for association and analysis
- *
- * Samples can have unknown status, be affected or unaffected by the
- * categorical trait, or they can be marked as actually having an
- * other trait value (stored in an associated value in the Sample class)
- *
- * @author Mark DePristo
- * @since Sept. 2011
- */
-public enum Affection {
-    /** Status is unknown */
-    UNKNOWN,
-    /** Suffers from the disease */
-    AFFECTED,
-    /** Unaffected by the disease */
-    UNAFFECTED,
-    /** An "other" trait: value of the trait is stored elsewhere and is an arbitrary string */
-    OTHER
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/samples/Gender.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/samples/Gender.java
deleted file mode 100644
index 0f26bc6..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/samples/Gender.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.samples;
-
-/**
-* ENUM of possible human genders: male, female, or unknown
-*/
-public enum Gender {
-    MALE,
-    FEMALE,
-    UNKNOWN
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/samples/PedReader.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/samples/PedReader.java
deleted file mode 100644
index 8946d28..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/samples/PedReader.java
+++ /dev/null
@@ -1,311 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.samples;
-
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.utils.MathUtils;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.text.XReadLines;
-
-import java.io.*;
-import java.util.*;
-
-/**
- * Reads PED file-formatted tabular text files
- *
- * See http://www.broadinstitute.org/mpg/tagger/faq.html
- * See http://pngu.mgh.harvard.edu/~purcell/plink/data.shtml#ped
- *
- * The "ped" file format refers to the widely-used format for linkage pedigree data.
- * Each line describes a single (diploid) individual in the following format:
- *
- *      family_ID individual_ID father_ID mother_ID gender phenotype genotype_1 genotype_2 ...
- *
- * If your data lacks pedigree information (for example, unrelated case/control individuals),
- * set the father_ID and mother_ID to 0. sex denotes the individual's gender with 1=male and 2=female.
- * phenotype refers to the affected status (for association studies) where 0=unknown, 1=unaffected, 2=affected.
- * Finally, each genotype is written as two (=diploid) integer numbers (separated by whitespace),
- * where 1=A, 2=C, 3=G, 4=T. No header lines are allowed and all columns must be separated by whitespace.
- * Check out the information at the PLINK website on the "ped" file format.
- *
- * The PED file is a white-space (space or tab) delimited file: the first six columns are mandatory:
- *  Family ID
- *  Individual ID
- *  Paternal ID
- *  Maternal ID
- *  Sex (1=male; 2=female; other=unknown)
- *  Phenotype
- *
- *  The IDs are alphanumeric: the combination of family and individual ID should uniquely identify a person.
- *  A PED file must have 1 and only 1 phenotype in the sixth column. The phenotype can be either a
- *  quantitative trait or an affection status column: PLINK will automatically detect which type
- *  (i.e. based on whether a value other than 0, 1, 2 or the missing genotype code is observed).
- *  Note that the GATK actually supports arbitrary values for quantitative trait -- not just doubles --
- *  and are actually representing these values as strings instead of doubles
- *
- *  NOTE Quantitative traits with decimal points must be coded with a period/full-stop character and
- *  not a comma, i.e. 2.394 not 2,394
- *
- *  If an individual's sex is unknown, then any character other than 1 or 2 can be used.
- *  When new files are created (PED, FAM, or other which contain sex) then the original coding will be
- *  preserved. However, these individuals will be dropped from any analyses (i.e. phenotype set to missing also)
- *  and an error message will arise if an analysis that uses family information is requested and an
- *  individual of 'unknown' sex is specified as a father or mother.
- *
- *
- *  HINT You can add a comment to a PED or MAP file by starting the line with a # character. The rest of that
- *  line will be ignored. Do not start any family IDs with this character therefore.
- *
- *  Affection status, by default, should be coded:
- *  -9 missing
- *   0 missing
- *   1 unaffected
- *   2 affected
- *
- * If your file is coded 0/1 to represent unaffected/affected, then use the --1 flag:
- * plink --file mydata --1 which will specify a disease phenotype coded:
- *
- *  -9 missing
- *  0 unaffected
- *  1 affected
- *
- * The missing phenotype value for quantitative traits is, by default, -9 (this can also be used for
- * disease traits as well as 0). It can be reset by including the --missing-phenotype option:
- *
- * Genotypes (column 7 onwards) should also be white-space delimited; they can be any character
- * (e.g. 1,2,3,4 or A,C,G,T or anything else) except 0 which is, by default, the missing genotype
- * character. All markers should be biallelic. All SNPs (whether haploid or not) must have two
- * alleles specified. Either Both alleles should be missing (i.e. 0) or neither.
- *
- * No header row should be given. For example, here are two individuals typed for 3 SNPs (one row = one person):
- *
- *   FAM001  1  0 0  1  2  A A  G G  A C
- *   FAM001  2  0 0  1  2  A A  A G  0 0
- *   ...
- *
- * Note that the GATK does not support genotypes in a PED file.
- *
- * @author Mark DePristo
- * @since 2011
- */
-public class PedReader {
-    private static Logger logger = Logger.getLogger(PedReader.class);
-    final static private Set<String> CATAGORICAL_TRAIT_VALUES = new HashSet<String>(Arrays.asList("-9", "0", "1", "2"));
-    final static private String commentMarker = "#";
-
-    /**
-     * An enum that specifies which, if any, of the standard PED fields are
-     * missing from the input records.  For example, suppose we have the full record:
-     *
-     * "fam1 kid dad mom 1 2"
-     *
-     * indicating a male affected child.  This can be parsed with the -ped x.ped argument
-     * to the GATK.  Suppose we only have:
-     *
-     * "fam1 kid 1"
-     *
-     * we can parse the reduced version of this record with -ped:NO_PARENTS,NO_PHENOTYPE x.ped
-     */
-    public enum MissingPedField {
-        /**
-         * The PED records do not have the first (FAMILY_ID) argument.  The family id
-         * will be set to null / empty.
-         */
-        NO_FAMILY_ID,
-
-        /**
-         * The PED records do not have either the paternal or maternal IDs, so
-         * the corresponding IDs are set to null.
-         */
-        NO_PARENTS,
-
-        /**
-         * The PED records do not have the GENDER field, so the sex of each
-         * sample will be set to UNKNOWN.
-         */
-        NO_SEX,
-
-        /**
-         * The PED records do not have the PHENOTYPE field, so the phenotype
-         * of each sample will be set to UNKNOWN.
-         */
-        NO_PHENOTYPE
-    }
-
-    protected enum Field {
-        FAMILY_ID, INDIVIDUAL_ID, PATERNAL_ID, MATERNAL_ID, GENDER, PHENOTYPE
-    }
-
-    // phenotype
-    private final static String MISSING_VALUE1 = "-9";
-    private final static String MISSING_VALUE2 = "0";
-    private final static String PHENOTYPE_UNAFFECTED = "1";
-    private final static String PHENOTYPE_AFFECTED = "2";
-
-    // Sex
-    private final static String SEX_MALE = "1";
-    private final static String SEX_FEMALE = "2";
-    // other=unknown
-
-    public PedReader() { }
-
-    public final List<Sample> parse(File source, EnumSet<MissingPedField> missingFields, SampleDB sampleDB) throws FileNotFoundException  {
-        logger.info("Reading PED file " + source + " with missing fields: " + missingFields);
-        return parse(new FileReader(source), missingFields, sampleDB);
-    }
-
-    public final List<Sample> parse(final String source, EnumSet<MissingPedField> missingFields, SampleDB sampleDB) {
-        logger.warn("Reading PED string: \"" + source + "\" with missing fields: " + missingFields);
-        return parse(new StringReader(source.replace(";", String.format("%n"))), missingFields, sampleDB);
-    }
-
-    public final List<Sample> parse(Reader reader, EnumSet<MissingPedField> missingFields, SampleDB sampleDB) {
-        final List<String> lines = new XReadLines(reader).readLines();
-
-        // What are the record offsets?
-        final int familyPos = missingFields.contains(MissingPedField.NO_FAMILY_ID) ? -1 : 0;
-        final int samplePos = familyPos + 1;
-        final int paternalPos = missingFields.contains(MissingPedField.NO_PARENTS) ? -1 : samplePos + 1;
-        final int maternalPos = missingFields.contains(MissingPedField.NO_PARENTS) ? -1 : paternalPos + 1;
-        final int sexPos = missingFields.contains(MissingPedField.NO_SEX) ? -1 : Math.max(maternalPos, samplePos) + 1;
-        final int phenotypePos = missingFields.contains(MissingPedField.NO_PHENOTYPE) ? -1 : Math.max(sexPos, Math.max(maternalPos, samplePos)) + 1;
-        final int nExpectedFields = MathUtils.arrayMaxInt(Arrays.asList(samplePos, paternalPos, maternalPos, sexPos, phenotypePos)) + 1;
-
-        // go through once and determine properties
-        int lineNo = 1;
-        boolean isQT = false;
-        final List<String[]> splits = new ArrayList<String[]>(lines.size());
-        for ( final String line : lines ) {
-            if ( line.startsWith(commentMarker)) continue;
-            if ( line.trim().equals("") ) continue;
-
-            final String[] parts = line.split("\\s+");
-
-            if ( parts.length != nExpectedFields )
-                throw new UserException.MalformedFile(reader.toString(), "Bad PED line " + lineNo + ": wrong number of fields");
-
-            if ( phenotypePos != -1 ) {
-                isQT = isQT || ! CATAGORICAL_TRAIT_VALUES.contains(parts[phenotypePos]);
-            }
-
-            splits.add(parts);
-            lineNo++;
-        }
-        logger.info("Phenotype is other? " + isQT);
-
-        // now go through and parse each record
-        lineNo = 1;
-        final List<Sample> samples = new ArrayList<Sample>(splits.size());
-        for ( final String[] parts : splits ) {
-            String familyID = null, individualID, paternalID = null, maternalID = null;
-            Gender sex = Gender.UNKNOWN;
-            String quantitativePhenotype = Sample.UNSET_QT;
-            Affection affection = Affection.UNKNOWN;
-
-            if ( familyPos != -1 ) familyID = maybeMissing(parts[familyPos]);
-            individualID = parts[samplePos];
-            if ( paternalPos != -1 ) paternalID = maybeMissing(parts[paternalPos]);
-            if ( maternalPos != -1 ) maternalID = maybeMissing(parts[maternalPos]);
-
-            if ( sexPos != -1 ) {
-                if ( parts[sexPos].equals(SEX_MALE) ) sex = Gender.MALE;
-                else if ( parts[sexPos].equals(SEX_FEMALE) ) sex = Gender.FEMALE;
-                else sex = Gender.UNKNOWN;
-            }
-
-            if ( phenotypePos != -1 ) {
-                if ( isQT ) {
-                    if ( parts[phenotypePos].equals(MISSING_VALUE1) )
-                        affection = Affection.UNKNOWN;
-                    else {
-                        affection = Affection.OTHER;
-                        quantitativePhenotype = parts[phenotypePos];
-                    }
-                } else {
-                    if ( parts[phenotypePos].equals(MISSING_VALUE1) ) affection = Affection.UNKNOWN;
-                    else if ( parts[phenotypePos].equals(MISSING_VALUE2) ) affection = Affection.UNKNOWN;
-                    else if ( parts[phenotypePos].equals(PHENOTYPE_UNAFFECTED) ) affection = Affection.UNAFFECTED;
-                    else if ( parts[phenotypePos].equals(PHENOTYPE_AFFECTED) ) affection = Affection.AFFECTED;
-                    else throw new ReviewedGATKException("Unexpected phenotype type " + parts[phenotypePos] + " at line " + lineNo);
-                }
-            }
-
-            final Sample s = new Sample(individualID, sampleDB, familyID, paternalID, maternalID, sex, affection, quantitativePhenotype);
-            samples.add(s);
-            sampleDB.addSample(s);
-            lineNo++;
-        }
-
-        for ( final Sample sample : new ArrayList<Sample>(samples) ) {
-            Sample dad = maybeAddImplicitSample(sampleDB, sample.getPaternalID(), sample.getFamilyID(), Gender.MALE);
-            if ( dad != null ) samples.add(dad);
-
-            Sample mom = maybeAddImplicitSample(sampleDB, sample.getMaternalID(), sample.getFamilyID(), Gender.FEMALE);
-            if ( mom != null ) samples.add(mom);
-        }
-
-        return samples;
-    }
-
-    private final static String maybeMissing(final String string) {
-        if ( string.equals(MISSING_VALUE1) || string.equals(MISSING_VALUE2) )
-            return null;
-        else
-            return string;
-    }
-
-    private final Sample maybeAddImplicitSample(SampleDB sampleDB, final String id, final String familyID, final Gender gender) {
-        if ( id != null && sampleDB.getSample(id) == null ) {
-            Sample s = new Sample(id, sampleDB, familyID, null, null, gender, Affection.UNKNOWN, Sample.UNSET_QT);
-            sampleDB.addSample(s);
-            return s;
-        } else
-            return null;
-    }
-
-    /**
-     * Parses a list of tags from the command line, assuming it comes from the GATK Engine
-     * tags, and returns the corresponding EnumSet.
-     *
-     * @param arg the actual engine arg, used for the UserException if there's an error
-     * @param tags a list of string tags that should be converted to the MissingPedField value
-     * @return
-     */
-    public static final EnumSet<MissingPedField> parseMissingFieldTags(final Object arg, final List<String> tags) {
-        final EnumSet<MissingPedField> missingFields = EnumSet.noneOf(MissingPedField.class);
-
-        for ( final String tag : tags ) {
-            try {
-                missingFields.add(MissingPedField.valueOf(tag));
-            } catch ( IllegalArgumentException e ) {
-                throw new UserException.BadArgumentValue(arg.toString(), "Unknown tag " + tag + " allowed values are " + MissingPedField.values());
-            }
-        }
-
-        return missingFields;
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/samples/PedigreeValidationType.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/samples/PedigreeValidationType.java
deleted file mode 100644
index 14fefd2..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/samples/PedigreeValidationType.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.samples;
-
-/**
-*
-*/
-public enum PedigreeValidationType {
-    /**
-     * Require if a pedigree file is provided at all samples in the VCF or BAM files have a corresponding
-     * entry in the pedigree file(s).
-     */
-    STRICT,
-
-    /**
-     * Do not enforce any overlap between the VCF/BAM samples and the pedigree data
-     * */
-    SILENT
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/samples/Sample.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/samples/Sample.java
deleted file mode 100644
index 0d60c39..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/samples/Sample.java
+++ /dev/null
@@ -1,259 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.samples;
-
-
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- *
- */
-public class Sample implements Comparable<Sample> { // implements java.io.Serializable {
-    final private String familyID, paternalID, maternalID;
-    final private Gender gender;
-    final private String otherPhenotype;
-    final private Affection affection;
-    final private String ID;
-    final private SampleDB infoDB;
-    final private Map<String, Object> properties = new HashMap<String, Object>();
-
-    public final static String UNSET_QT = null;
-
-    public Sample(final String ID, final SampleDB infoDB,
-                  final String familyID, final String paternalID, final String maternalID,
-                  final Gender gender, final Affection affection, final String otherPhenotype) {
-        this.familyID = familyID;
-        this.paternalID = paternalID;
-        this.maternalID = maternalID;
-        this.gender = gender;
-        this.otherPhenotype = otherPhenotype;
-        this.affection = affection;
-        this.ID = ID;
-        this.infoDB = infoDB;
-    }
-
-    protected Sample(final String ID,
-                     final String familyID, final String paternalID, final String maternalID,
-                     final Gender gender, final Affection affection, final String otherPhenotype) {
-        this(ID, null, familyID, paternalID, maternalID, gender, affection, otherPhenotype);
-    }
-
-    protected Sample(final String ID,
-                     final String familyID, final String paternalID, final String maternalID,
-                     final Gender gender, final Affection affection) {
-        this(ID, null, familyID, paternalID, maternalID, gender, affection, UNSET_QT);
-    }
-
-
-    public Sample(final String ID, final SampleDB infoDB,
-                  final String familyID, final String paternalID, final String maternalID, final Gender gender) {
-        this(ID, infoDB, familyID, paternalID, maternalID, gender, Affection.UNKNOWN, UNSET_QT);
-    }
-
-    public Sample(final String ID, final SampleDB infoDB, final Affection affection, final String otherPhenotype) {
-        this(ID, infoDB, null, null, null, Gender.UNKNOWN, affection, otherPhenotype);
-    }
-
-    public Sample(String id, SampleDB infoDB) {
-        this(id, infoDB, null, null, null,
-                Gender.UNKNOWN, Affection.UNKNOWN, UNSET_QT);
-    }
-
-    // -------------------------------------------------------------------------------------
-    //
-    // standard property getters
-    //
-    // -------------------------------------------------------------------------------------
-
-    public String getID() {
-        return ID;
-    }
-
-    public String getFamilyID() {
-        return familyID;
-    }
-
-    public String getPaternalID() {
-        return paternalID;
-    }
-
-    public String getMaternalID() {
-        return maternalID;
-    }
-
-    public Affection getAffection() {
-        return affection;
-    }
-
-    public boolean hasOtherPhenotype() {
-        return affection == Affection.OTHER;
-    }
-
-    public String getOtherPhenotype() {
-        return otherPhenotype;
-    }
-
-    /**
-     * Get the sample's mother
-     * @return sample object with relationship mother, if exists, or null
-     */
-    public Sample getMother() {
-        return infoDB.getSample(maternalID);
-    }
-
-    /**
-     * Get the sample's father
-     * @return sample object with relationship father, if exists, or null
-     */
-    public Sample getFather() {
-        return infoDB.getSample(paternalID);
-    }
-
-    public ArrayList<Sample> getParents(){
-        ArrayList<Sample> parents = new ArrayList<Sample>(2);
-        Sample parent = getMother();
-        if(parent != null)
-            parents.add(parent);
-        parent = getFather();
-        if(parent != null)
-            parents.add(parent);
-        return parents;
-    }
-
-    /**
-     * Get gender of the sample
-     * @return property of key "gender" - must be of type Gender
-     */
-    public Gender getGender() {
-        return gender;
-    }
-
-    @Override
-    public int compareTo(final Sample sample) {
-        return ID.compareTo(sample.getID());
-    }
-
-    @Override
-    public String toString() {
-        return String.format("Sample %s fam=%s dad=%s mom=%s gender=%s affection=%s qt=%s props=%s",
-                getID(), getFamilyID(), getPaternalID(), getMaternalID(), getGender(), getAffection(),
-                getOtherPhenotype(), properties);
-    }
-
-//    // -------------------------------------------------------------------------------------
-//    //
-//    // code for working with additional -- none standard -- properites
-//    //
-//    // -------------------------------------------------------------------------------------
-//
-//    public Map<String, Object> getExtraProperties() {
-//        return Collections.unmodifiableMap(properties);
-//    }
-//
-//    /**
-//     * Get one property
-//     * @param key key of property
-//     * @return value of property as generic object
-//     */
-//    public Object getExtraPropertyValue(final String key) {
-//        return properties.get(key);
-//    }
-//
-//    /**
-//     *
-//     * @param key property key
-//     * @return true if sample has this property (even if its value is null)
-//     */
-//    public boolean hasExtraProperty(String key) {
-//        return properties.containsKey(key);
-//    }
-
-    @Override
-    public int hashCode() {
-        return ID.hashCode();
-    }
-
-    @Override
-    public boolean equals(final Object o) {
-        if(o == null)
-            return false;
-        if(o instanceof Sample) {
-            Sample otherSample = (Sample)o;
-            return ID.equals(otherSample.ID) &&
-                    equalOrNull(familyID, otherSample.familyID) &&
-                    equalOrNull(paternalID, otherSample.paternalID) &&
-                    equalOrNull(maternalID, otherSample.maternalID) &&
-                    equalOrNull(gender, otherSample.gender) &&
-                    equalOrNull(otherPhenotype, otherSample.otherPhenotype) &&
-                    equalOrNull(affection, otherSample.affection) &&
-                    equalOrNull(properties, otherSample.properties);
-        }
-        return false;
-    }
-
-    private final static boolean equalOrNull(final Object o1, final Object o2) {
-        if ( o1 == null )
-            return o2 == null;
-        else
-            return o2 == null ? false : o1.equals(o2);
-    }
-
-    private final static <T> T mergeValues(final String name, final String field, final T o1, final T o2, final T emptyValue) {
-        if ( o1 == null || o1.equals(emptyValue) ) {
-            // take o2 if both are null, otherwise keep o2
-            return o2 == null ? null : o2;
-        } else {
-            if ( o2 == null || o2.equals(emptyValue) )
-                return o1; // keep o1, since it's a real value
-            else {
-                // both o1 and o2 have a value
-                if ( o1 == o2 )
-                    return o1;
-                else
-                    throw new UserException("Inconsistent values detected for " + name + " for field " + field + " value1 " + o1 + " value2 " + o2);
-            }
-        }
-    }
-
-    public final static Sample mergeSamples(final Sample prev, final Sample next) {
-        if ( prev.equals(next) )
-            return next;
-        else {
-            return new Sample(prev.getID(), prev.infoDB,
-                    mergeValues(prev.getID(), "Family_ID", prev.getFamilyID(), next.getFamilyID(), null),
-                    mergeValues(prev.getID(), "Paternal_ID", prev.getPaternalID(), next.getPaternalID(), null),
-                    mergeValues(prev.getID(), "Material_ID", prev.getMaternalID(), next.getMaternalID(), null),
-                    mergeValues(prev.getID(), "Gender", prev.getGender(), next.getGender(), Gender.UNKNOWN),
-                    mergeValues(prev.getID(), "Affection", prev.getAffection(), next.getAffection(), Affection.UNKNOWN),
-                    mergeValues(prev.getID(), "OtherPhenotype", prev.getOtherPhenotype(), next.getOtherPhenotype(), UNSET_QT));
-                    //mergeValues(prev.getID(), "ExtraProperties", prev.getExtraProperties(), next.getExtraProperties(), Collections.emptyMap()));
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/samples/SampleDB.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/samples/SampleDB.java
deleted file mode 100644
index 141f01b..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/samples/SampleDB.java
+++ /dev/null
@@ -1,338 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.samples;
-
-import htsjdk.samtools.SAMReadGroupRecord;
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.exceptions.GATKException;
-import htsjdk.variant.variantcontext.Genotype;
-
-import java.util.*;
-
-/**
- *
- */
-public class SampleDB {
-    /**
-     * This is where Sample objects are stored. Samples are usually accessed by their ID, which is unique, so
-     * this is stored as a HashMap.
-     */
-    private final HashMap<String, Sample> samples = new HashMap<String, Sample>();
-
-    /**
-     * Constructor takes both a SAM header and sample files because the two must be integrated.
-     */
-    public SampleDB() {
-
-    }
-
-    /**
-     * Protected function to add a single sample to the database
-     *
-     * @param sample to be added
-     */
-    protected SampleDB addSample(Sample sample) {
-        Sample prev = samples.get(sample.getID());
-        if ( prev != null )
-            sample = Sample.mergeSamples(prev, sample);
-        samples.put(sample.getID(), sample);
-        return this;
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // Functions for getting a sample from the DB
-    //
-    // --------------------------------------------------------------------------------
-
-    /**
-     * Get a sample by its ID
-     * If an alias is passed in, return the main sample object 
-     * @param id
-     * @return sample Object with this ID, or null if this does not exist
-     */
-    public Sample getSample(String id) {
-        return samples.get(id);
-    }
-
-    /**
-     *
-     * @param read
-     * @return sample Object with this ID, or null if this does not exist
-     */
-    public Sample getSample(final SAMRecord read) {
-        return getSample(read.getReadGroup());
-    }
-
-    /**
-     *
-     * @param rg
-     * @return sample Object with this ID, or null if this does not exist
-     */
-    public Sample getSample(final SAMReadGroupRecord rg) {
-        return getSample(rg.getSample());
-    }
-
-    /**
-     * @param g Genotype
-     * @return sample Object with this ID, or null if this does not exist
-     */
-    public Sample getSample(final Genotype g) {
-        return getSample(g.getSampleName());
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // Functions for accessing samples in the DB
-    //
-    // --------------------------------------------------------------------------------
-
-    /**
-     * Get number of sample objects
-     * @return size of samples map
-     */
-    public int sampleCount() {
-        return samples.size();
-    }
-
-    public Set<Sample> getSamples() {
-        return new LinkedHashSet<>(samples.values());
-    }
-
-    public Collection<String> getSampleNames() {
-        return Collections.unmodifiableCollection(samples.keySet());
-    }
-
-
-    /**
-     * Takes a collection of sample names and returns their corresponding sample objects
-     * Note that, since a set is returned, if you pass in a list with duplicates names there will not be any duplicates in the returned set
-     * @param sampleNameList Set of sample names
-     * @return Corresponding set of samples
-     */
-    public Set<Sample> getSamples(Collection<String> sampleNameList) {
-        HashSet<Sample> samples = new HashSet<Sample>();
-        for (String name : sampleNameList) {
-            try {
-                samples.add(getSample(name));
-            }
-            catch (Exception e) {
-                throw new GATKException("Could not get sample with the following ID: " + name, e);
-            }
-        }
-        return samples;
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // Higher level pedigree functions
-    //
-    // --------------------------------------------------------------------------------
-
-    /**
-     * Returns a sorted set of the family IDs in all samples (excluding null ids)
-     * @return
-     */
-    public final Set<String> getFamilyIDs() {
-        return getFamilies().keySet();
-    }
-
-    /**
-     * Returns a map from family ID -> set of family members for all samples with
-     * non-null family ids
-     *
-     * @return
-     */
-    public final Map<String, Set<Sample>> getFamilies() {
-        return getFamilies(null);
-    }
-
-    /**
-     * Returns a map from family ID -> set of family members for all samples in sampleIds with
-     * non-null family ids
-     *
-     * @param sampleIds - all samples to include. If null is passed then all samples are returned.
-     * @return
-     */
-    public final Map<String, Set<Sample>> getFamilies(Collection<String> sampleIds) {
-        final Map<String, Set<Sample>> families = new TreeMap<String, Set<Sample>>();
-
-        for ( final Sample sample : samples.values() ) {
-            if(sampleIds == null || sampleIds.contains(sample.getID())){
-                final String famID = sample.getFamilyID();
-                if ( famID != null ) {
-                    if ( ! families.containsKey(famID) )
-                        families.put(famID, new TreeSet<Sample>());
-                    families.get(famID).add(sample);
-                }
-            }
-        }
-        return families;
-    }
-
-    /**
-     * Returns all the trios present in the sample database. The strictOneChild parameter determines
-     * whether multiple children of the same parents resolve to multiple trios, or are excluded
-     * @param strictOneChild - exclude pedigrees with >1 child for parental pair
-     * @return - all of the mother+father=child triplets, subject to strictOneChild
-     */
-    public final Set<Trio> getTrios(boolean strictOneChild) {
-        Set<Trio> trioSet = new HashSet<Trio>();
-        for ( String familyString : getFamilyIDs() ) {
-            Set<Sample> family = getFamily(familyString);
-            for ( Sample sample : family) {
-                if ( sample.getParents().size() == 2 ) {
-                    Trio trio = new Trio(sample.getMother(),sample.getFather(),sample);
-                    trioSet.add(trio);
-                }
-            }
-        }
-
-        if ( strictOneChild )
-            trioSet = removeTriosWithSameParents(trioSet);
-
-        return trioSet;
-    }
-
-    /**
-     * Returns all the trios present in the db. See getTrios(boolean strictOneChild)
-     * @return all the trios present in the samples db.
-     */
-    public final Set<Trio> getTrios() {
-        return getTrios(false);
-    }
-
-    /**
-     * Subsets a set of trios to only those with nonmatching founders. If two (or more) trio objects have
-     * the same mother and father, then both (all) are removed from the returned set.
-     * @param trios - a set of Trio objects
-     * @return those subset of Trio objects in the input set with nonmatching founders
-     */
-    private Set<Trio> removeTriosWithSameParents(final Set<Trio> trios) {
-        Set<Trio> filteredTrios = new HashSet<Trio>();
-        filteredTrios.addAll(trios);
-        Set<Trio> triosWithSameParents = new HashSet<Trio>();
-        for ( Trio referenceTrio : filteredTrios ) {
-            for ( Trio compareTrio : filteredTrios ) {
-                if ( referenceTrio != compareTrio &&
-                     referenceTrio.getFather().equals(compareTrio.getFather()) &&
-                     referenceTrio.getMother().equals(compareTrio.getMother()) ) {
-                    triosWithSameParents.add(referenceTrio);
-                    triosWithSameParents.add(compareTrio);
-                }
-            }
-        }
-        filteredTrios.removeAll(triosWithSameParents);
-        return filteredTrios;
-    }
-
-    /**
-     * Returns the set of all children that have both of their parents.
-     * Note that if a family is composed of more than 1 child, each child is
-     * returned.
-     * @return - all the children that have both of their parents
-     * @deprecated - getTrios() replaces this function
-     */
-    @Deprecated
-    public final Set<Sample> getChildrenWithParents(){
-        return getChildrenWithParents(false);
-    }
-
-    /**
-     * Returns the set of all children that have both of their parents.
-     * Note that if triosOnly = false, a family is composed of more than 1 child, each child is
-     * returned.
-     *
-     * This method can be used wherever trios are needed
-     *
-     * @param triosOnly - if set to true, only strict trios are returned
-     * @return - all the children that have both of their parents
-     * @deprecated - getTrios(boolean strict) replaces this function
-     * @bug -- does not work for extracting multiple generations of trios, e.g.
-     * ..........Mom1------Dad1
-     * ................|
-     * ..............Child1--------Mom2
-     * .......................|
-     * .....................Child2
-     */
-    @Deprecated
-    public final Set<Sample> getChildrenWithParents(boolean triosOnly) {
-
-        Map<String, Set<Sample>> families = getFamilies();
-        final Set<Sample> childrenWithParents = new HashSet<Sample>();
-        Iterator<Sample> sampleIterator;
-
-        for ( Set<Sample> familyMembers: families.values() ) {
-            if(triosOnly && familyMembers.size() != 3)
-                continue;
-
-            sampleIterator = familyMembers.iterator();
-            Sample sample;
-            while(sampleIterator.hasNext()){
-                sample = sampleIterator.next();
-                if(sample.getParents().size() == 2 && familyMembers.containsAll(sample.getParents()))
-                    childrenWithParents.add(sample);
-            }
-
-        }
-        return childrenWithParents;
-    }
-
-    /**
-     * Return all samples with a given family ID
-     * @param familyId
-     * @return
-     */
-    public Set<Sample> getFamily(String familyId) {
-        return getFamilies().get(familyId);
-    }
-
-    /**
-     * Returns all children of a given sample
-     * See note on the efficiency of getFamily() - since this depends on getFamily() it's also not efficient
-     * @param sample
-     * @return
-     */
-    public Set<Sample> getChildren(Sample sample) {
-        final HashSet<Sample> children = new HashSet<Sample>();
-        for ( final Sample familyMember : getFamily(sample.getFamilyID())) {
-            if ( familyMember.getMother() == sample || familyMember.getFather() == sample ) {
-                children.add(familyMember);
-            }
-        }
-        return children;
-    }
-
-    public Set<String> getFounderIds(){
-        Set<String> founders = new HashSet<String>();
-        for(Sample sample : getSamples()){
-            if(sample.getParents().size()<1)
-                founders.add(sample.getID());
-
-        }
-        return founders;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/samples/SampleDBBuilder.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/samples/SampleDBBuilder.java
deleted file mode 100644
index 6fdb9fa..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/samples/SampleDBBuilder.java
+++ /dev/null
@@ -1,161 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.samples;
-
-import htsjdk.samtools.SAMFileHeader;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.utils.SampleUtils;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.util.*;
-
-/**
- *
- */
-public class SampleDBBuilder {
-    PedigreeValidationType validationStrictness;
-    final SampleDB sampleDB = new SampleDB();
-    final GenomeAnalysisEngine engine;
-
-    Set<Sample> samplesFromDataSources = new HashSet<Sample>();
-    Set<Sample> samplesFromPedigrees = new HashSet<Sample>();
-
-    /** for testing only */
-    protected SampleDBBuilder(PedigreeValidationType validationStrictness) {
-        engine = null;
-        this.validationStrictness = validationStrictness;
-    }
-
-    /**
-     * Constructor takes both a SAM header and sample files because the two must be integrated.
-     */
-    public SampleDBBuilder(GenomeAnalysisEngine engine, PedigreeValidationType validationStrictness) {
-        this.engine = engine;
-        this.validationStrictness = validationStrictness;
-    }
-
-    /**
-     * Hallucinates sample objects for all the samples in the SAM file and stores them
-     */
-    public SampleDBBuilder addSamplesFromSAMHeader(final SAMFileHeader header) {
-        addSamplesFromSampleNames(SampleUtils.getSAMFileSamples(header));
-        return this;
-    }
-
-    public SampleDBBuilder addSamplesFromSampleNames(final Collection<String> sampleNames) {
-        for (final String sampleName : sampleNames) {
-            if (sampleDB.getSample(sampleName) == null) {
-                final Sample newSample = new Sample(sampleName, sampleDB);
-                sampleDB.addSample(newSample);
-                samplesFromDataSources.add(newSample); // keep track of data source samples
-            }
-        }
-        return this;
-    }
-
-    public SampleDBBuilder addSamplesFromPedigreeFiles(final List<File> pedigreeFiles) {
-        for (final File pedFile : pedigreeFiles) {
-            Collection<Sample> samples = addSamplesFromPedigreeArgument(pedFile);
-            samplesFromPedigrees.addAll(samples);
-        }
-
-        return this;
-    }
-
-    public SampleDBBuilder addSamplesFromPedigreeStrings(final List<String> pedigreeStrings) {
-        for (final String pedString : pedigreeStrings) {
-            Collection<Sample> samples = addSamplesFromPedigreeArgument(pedString);
-            samplesFromPedigrees.addAll(samples);
-        }
-
-        return this;
-    }
-
-    /**
-     * Parse one sample file and integrate it with samples that are already there
-     * Fail quickly if we find any errors in the file
-     */
-    private Collection<Sample> addSamplesFromPedigreeArgument(File sampleFile) {
-        final PedReader reader = new PedReader();
-
-        try {
-            return reader.parse(sampleFile, getMissingFields(sampleFile), sampleDB);
-        } catch ( FileNotFoundException e ) {
-            throw new UserException.CouldNotReadInputFile(sampleFile, e);
-        }
-    }
-
-    private Collection<Sample> addSamplesFromPedigreeArgument(final String string) {
-        final PedReader reader = new PedReader();
-        return reader.parse(string, getMissingFields(string), sampleDB);
-    }
-
-    public SampleDB getFinalSampleDB() {
-        validate();
-        return sampleDB;
-    }
-
-    public EnumSet<PedReader.MissingPedField> getMissingFields(final Object engineArg) {
-        if ( engine == null )
-            return EnumSet.noneOf(PedReader.MissingPedField.class);
-        else {
-            final List<String> posTags = engine.getTags(engineArg).getPositionalTags();
-            return PedReader.parseMissingFieldTags(engineArg, posTags);
-        }
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // Validation
-    //
-    // --------------------------------------------------------------------------------
-
-    protected final void validate() {
-        validatePedigreeIDUniqueness();
-        if ( validationStrictness != PedigreeValidationType.SILENT ) {
-            // check that samples in data sources are all annotated, if anything is annotated
-            if ( ! samplesFromPedigrees.isEmpty() && ! samplesFromDataSources.isEmpty() ) {
-                final Set<String> sampleNamesFromPedigrees = new HashSet<String>();
-                for ( final Sample pSample : samplesFromPedigrees )
-                    sampleNamesFromPedigrees.add(pSample.getID());
-
-                for ( final Sample dsSample : samplesFromDataSources )
-                    if ( ! sampleNamesFromPedigrees.contains(dsSample.getID()) )
-                        throw new UserException("Sample " + dsSample.getID() + " found in data sources but not in pedigree files with STRICT pedigree validation");
-            }
-        }
-    }
-
-    private void validatePedigreeIDUniqueness() {
-        Set<String> pedigreeIDs = new HashSet<String>();
-        for ( Sample sample : samplesFromPedigrees ) {
-            pedigreeIDs.add(sample.getID());
-        }
-        assert pedigreeIDs.size() == samplesFromPedigrees.size() : "The number of sample IDs extracted from the pedigree does not equal the number of samples in the pedigree. Is a sample associated with multiple families?";
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/samples/Trio.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/samples/Trio.java
deleted file mode 100644
index b5a698b..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/samples/Trio.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.samples;
-
-/**
- * A class for imposing a trio structure on three samples; a common paradigm
- *
- * todo -- there should probably be an interface or abstract class "Pedigree" that generalizes the notion of
- *      -- imposing structure on samples. But given how complex pedigrees can quickly become, it's not
- *      -- clear the best way to do this.
- */
-public class Trio {
-    private Sample mother;
-    private Sample father;
-    private Sample child;
-
-    public Trio(Sample mom, Sample dad, Sample spawn) {
-        assert mom.getID().equals(spawn.getMaternalID()) && dad.getID().equals(spawn.getPaternalID()) : "Samples passed to trio constructor do not form a trio";
-        mother = mom;
-        father = dad;
-        child = spawn;
-    }
-
-    public Sample getMother() {
-        return mother;
-    }
-
-    public String getMaternalID() {
-        return mother.getID();
-    }
-
-    public Sample getFather() {
-        return father;
-    }
-
-    public String getPaternalID() {
-        return father.getID();
-    }
-
-    public Sample getChild() {
-        return child;
-    }
-
-    public String getChildID() {
-        return child.getID();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/traversals/TAROrderedReadCache.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/traversals/TAROrderedReadCache.java
deleted file mode 100644
index d28ea3b..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/traversals/TAROrderedReadCache.java
+++ /dev/null
@@ -1,168 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.traversals;
-
-import org.broadinstitute.gatk.engine.downsampling.Downsampler;
-import org.broadinstitute.gatk.engine.downsampling.ReservoirDownsampler;
-import org.broadinstitute.gatk.utils.sam.AlignmentStartComparator;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-
-/**
- * Subsystem to track a list of all reads currently live in the TraverseActiveRegions system,
- * while limiting the total number of reads to a maximum capacity.
- *
- * User: depristo
- * Date: 4/7/13
- * Time: 11:23 AM
- */
-public class TAROrderedReadCache {
-    private final int maxCapacity;
-    private ArrayList<GATKSAMRecord> undownsampledCache;
-    private Downsampler<GATKSAMRecord> downsampler;
-
-    private static final int UNDOWNSAMPLED_CACHE_MAX_INITIAL_SIZE = 10000;
-
-    /**
-     * Create a new empty ReadCache
-     * @param maxCapacity the max capacity of the read cache.
-     */
-    public TAROrderedReadCache( final int maxCapacity ) {
-        if ( maxCapacity < 0 ) throw new IllegalArgumentException("maxCapacity must be >= 0 but got " + maxCapacity);
-        this.maxCapacity = maxCapacity;
-
-        // The one we're not currently using will always be null:
-        initializeUndownsampledCache();
-        this.downsampler = null;
-    }
-
-    /**
-     * Moves all reads over to the downsampler, causing it to be used from this point on. Should be called
-     * when the undownsampledCache fills up and we need to start discarding reads. Since the
-     * ReservoirDownsampler doesn't preserve relative ordering, pop operations become expensive
-     * after this point, as they require a O(n log n) sort.
-     */
-    private void activateDownsampler() {
-        downsampler = new ReservoirDownsampler<>(maxCapacity, false);
-        downsampler.submit(undownsampledCache);
-        undownsampledCache = null; // preferable to the O(n) clear() method
-    }
-
-    /**
-     * Allocate the undownsampled cache used when we have fewer than maxCapacity items
-     */
-    private void initializeUndownsampledCache() {
-        undownsampledCache = new ArrayList<>(Math.min(maxCapacity + 1, UNDOWNSAMPLED_CACHE_MAX_INITIAL_SIZE));
-    }
-
-    /**
-     * What's the maximum number of reads we'll store in the cache?
-     * @return a positive integer
-     */
-    public int getMaxCapacity() {
-        return maxCapacity;
-    }
-
-    /**
-     * Add a single read to this cache.  Assumed to be in sorted order w.r.t. the previously added reads
-     * @param read a read to add
-     */
-    public void add( final GATKSAMRecord read ) {
-        if ( read == null ) throw new IllegalArgumentException("Read cannot be null");
-
-        if ( downsampler != null ) {
-            downsampler.submit(read);
-        }
-        else {
-            undownsampledCache.add(read);
-
-            // No more room in the undownsampledCache? Time to start downsampling
-            if ( undownsampledCache.size() > maxCapacity ) {
-                activateDownsampler();
-            }
-        }
-    }
-
-    /**
-     * Add a collection of reads to this cache.  Assumed to be in sorted order w.r.t. the previously added reads and each other
-     * @param reads a collection of reads to add
-     */
-    public void addAll( final List<GATKSAMRecord> reads ) {
-        if ( reads == null ) throw new IllegalArgumentException("Reads cannot be null");
-        for ( final GATKSAMRecord read : reads ) {
-            add(read);
-        }
-    }
-
-    /**
-     * How many reads are currently in the cache?
-     * @return a positive integer
-     */
-    public int size() {
-        return downsampler != null ? downsampler.size() : undownsampledCache.size();
-    }
-
-    /**
-     * How many reads were discarded since the last call to popCurrentReads
-     *
-     * @return number of items discarded during downsampling since last pop operation
-     */
-    public int getNumDiscarded() {
-        return downsampler != null ? downsampler.getNumberOfDiscardedItems() : 0;
-    }
-
-    /**
-     * Removes all reads currently in the cache, and returns them in sorted order (w.r.t. alignmentStart)
-     *
-     * Flushes this cache, so after this call the cache will contain no reads, and we'll be in the same
-     * initial state as the constructor would put us in, with a non-null undownsampledCache and a null
-     * downsampler.
-     *
-     * @return a list of GATKSAMRecords in this cache
-     */
-    public List<GATKSAMRecord> popCurrentReads() {
-        final List<GATKSAMRecord> poppedReads;
-
-        if ( downsampler == null ) {
-            poppedReads = undownsampledCache;  // avoid making a copy here, since we're going to allocate a new cache
-        }
-        else {
-            // If we triggered the downsampler, we need to sort the reads before returning them,
-            // since the ReservoirDownsampler is not guaranteed to preserve relative ordering of items.
-            // After consuming the downsampled items in this call to popCurrentReads(), we switch back
-            // to using the undownsampledCache until we fill up again.
-            poppedReads = downsampler.consumeFinalizedItems();  // avoid making a copy here
-            Collections.sort(poppedReads, new AlignmentStartComparator());
-            downsampler = null;
-        }
-
-        initializeUndownsampledCache();
-        return poppedReads;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/traversals/TraversalEngine.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/traversals/TraversalEngine.java
deleted file mode 100644
index 25abafd..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/traversals/TraversalEngine.java
+++ /dev/null
@@ -1,124 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.traversals;
-
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.ReadMetrics;
-import org.broadinstitute.gatk.engine.datasources.providers.ShardDataProvider;
-import org.broadinstitute.gatk.engine.walkers.Walker;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.progressmeter.ProgressMeter;
-
-public abstract class TraversalEngine<M,T,WalkerType extends Walker<M,T>,ProviderType extends ShardDataProvider> {
-    /** our log, which we want to capture anything from this class */
-    protected static final Logger logger = Logger.getLogger(TraversalEngine.class);
-
-    protected GenomeAnalysisEngine engine;
-    private ProgressMeter progressMeter;
-
-    // ----------------------------------------------------------------------------------------------------
-    //
-    // ABSTRACT METHODS
-    //
-    // ----------------------------------------------------------------------------------------------------
-
-    /**
-     * Gets the named traversal type associated with the given traversal, such as loci, reads, etc.
-     *
-     * @return A user-friendly name for the given traversal type.
-     */
-    public abstract String getTraversalUnits();
-
-    /**
-     * this method must be implemented by all traversal engines
-     *
-     * @param walker       the walker to run with
-     * @param dataProvider the data provider that generates data given the shard
-     * @param sum          the accumulator
-     *
-     * @return an object of the reduce type
-     */
-    public abstract T traverse(WalkerType walker,
-                               ProviderType dataProvider,
-                               T sum);
-
-    /**
-     * Initialize the traversal engine.  After this point traversals can be run over the data
-     *
-     * @param engine GenomeAnalysisEngine for this traversal
-     * @param progressMeter An optional (null == optional) meter to track our progress
-     */
-    public void initialize(final GenomeAnalysisEngine engine, final Walker walker, final ProgressMeter progressMeter) {
-        if ( engine == null )
-            throw new ReviewedGATKException("BUG: GenomeAnalysisEngine cannot be null!");
-
-        this.engine = engine;
-        this.progressMeter = progressMeter;
-    }
-
-    /**
-     * For testing only.  Does not initialize the progress meter
-     *
-     * @param engine
-     */
-    protected void initialize(final GenomeAnalysisEngine engine, final Walker walker) {
-        initialize(engine, walker, null);
-    }
-
-    /**
-     * Called by the MicroScheduler when all work is done and the GATK is shutting down.
-     *
-     * To be used by subclasses that need to free up resources (such as threads)
-     */
-    public void shutdown() {
-        // by default there's nothing to do
-    }
-
-    /**
-     * Update the cumulative traversal metrics according to the data in this shard
-     *
-     * @param singleTraverseMetrics read metrics object containing the information about a single shard's worth
-     *                              of data processing
-     */
-    public void updateCumulativeMetrics(final ReadMetrics singleTraverseMetrics) {
-        engine.getCumulativeMetrics().incrementMetrics(singleTraverseMetrics);
-    }
-
-    /**
-     * Forward request to notifyOfProgress
-     *
-     * Assumes that one cycle has been completed
-     *
-     * @param loc  the location
-     */
-    public void printProgress(final GenomeLoc loc) {
-        if ( progressMeter != null )
-            progressMeter.notifyOfProgress(loc, engine.getCumulativeMetrics().getNumIterations());
-    }
-}
-
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/traversals/TraverseActiveRegions.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/traversals/TraverseActiveRegions.java
deleted file mode 100644
index 7d93311..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/traversals/TraverseActiveRegions.java
+++ /dev/null
@@ -1,719 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.traversals;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.datasources.providers.*;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.engine.walkers.ActiveRegionTraversalParameters;
-import org.broadinstitute.gatk.engine.walkers.ActiveRegionWalker;
-import org.broadinstitute.gatk.engine.walkers.Walker;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.SampleUtils;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.activeregion.ActiveRegion;
-import org.broadinstitute.gatk.utils.activeregion.ActivityProfile;
-import org.broadinstitute.gatk.utils.activeregion.ActivityProfileState;
-import org.broadinstitute.gatk.utils.activeregion.BandPassActivityProfile;
-import org.broadinstitute.gatk.utils.nanoScheduler.NSMapFunction;
-import org.broadinstitute.gatk.utils.nanoScheduler.NSProgressFunction;
-import org.broadinstitute.gatk.utils.nanoScheduler.NSReduceFunction;
-import org.broadinstitute.gatk.utils.nanoScheduler.NanoScheduler;
-import org.broadinstitute.gatk.utils.progressmeter.ProgressMeter;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-
-import java.io.PrintStream;
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-
-/**
- * Implement active region traversal
- *
- * User: depristo
- * Date: 1/9/13
- * Time: 4:45 PM
- *
- * Live region:
- *
- *   The ART tracks a thing called the live region.  The live region is a position on a specific contig
- *   of the alignment start of the last read we processed during this traversal.  Because the
- *   read stream is sorted, future reads must occurs in the the live region.  Therefore the the dead region
- *   (everything to the left of the live boundary) cannot have any more read data.  The live / dead
- *   regions are used to decide when we can safely call map on active regions, as only active regions
- *   contained completely within the dead region (including extensions) have a complete set of read data
- *   in the collected read list.  All of the data related to the live region is captured by the local
- *   variable spanOfLastReadSeen
- *
- */
-public final class TraverseActiveRegions<M, T> extends TraversalEngine<M,T,ActiveRegionWalker<M,T>,LocusShardDataProvider> {
-    private final static boolean DEBUG = false;
-    protected final static Logger logger = Logger.getLogger(TraversalEngine.class);
-    protected final static boolean LOG_READ_CARRYING = false;
-
-    // set by the traversal
-    private boolean walkerHasPresetRegions = false;
-    private int activeRegionExtension = -1;
-    private int maxRegionSize = -1;
-    private int minRegionSize = -1;
-
-    private final LinkedList<ActiveRegion> workQueue = new LinkedList<>();
-
-    private TAROrderedReadCache myReads = null;
-
-    private GenomeLoc lastRegionProcessed = null;
-    private GenomeLoc spanOfLastReadSeen = null;
-    private ActivityProfile activityProfile = null;
-    int maxReadsInMemory = 0;
-    ActiveRegionWalker<M, T> walker;
-
-    final NanoScheduler<MapData, M, T> nanoScheduler;
-
-    /**
-     * Data to use in the ActiveRegionWalker.map function produced by the NanoScheduler input iterator
-     */
-    private static class MapData {
-        public ActiveRegion activeRegion;
-        public RefMetaDataTracker tracker;
-
-        private MapData(ActiveRegion activeRegion, RefMetaDataTracker tracker) {
-            this.activeRegion = activeRegion;
-            this.tracker = tracker;
-        }
-    }
-
-    /**
-     * Create a single threaded active region traverser
-     */
-    public TraverseActiveRegions() {
-        this(1);
-    }
-
-    /**
-     * Create an active region traverser that uses nThreads for getting its work done
-     * @param nThreads number of threads
-     */
-    public TraverseActiveRegions(final int nThreads) {
-        nanoScheduler = new NanoScheduler<>(nThreads);
-        nanoScheduler.setProgressFunction(new NSProgressFunction<MapData>() {
-            @Override
-            public void progress(MapData lastActiveRegion) {
-                if ( lastActiveRegion != null )
-                    // note, need to use getStopLocation so we don't give an interval to ProgressMeterDaemon
-                    printProgress(lastActiveRegion.activeRegion.getLocation().getStopLocation());
-            }
-        });
-    }
-
-    /**
-     * Have the debugging output streams been initialized already?
-     *
-     * We have to do lazy initialization because when the initialize() function is called
-     * the streams aren't yet initialized in the GATK walker.
-     */
-    private boolean streamsInitialized = false;
-
-    @Override
-    public void initialize(GenomeAnalysisEngine engine, Walker walker, ProgressMeter progressMeter) {
-        super.initialize(engine, walker, progressMeter);
-
-        this.walker = (ActiveRegionWalker<M,T>)walker;
-        if ( this.walker.wantsExtendedReads() && ! this.walker.wantsNonPrimaryReads() ) {
-            throw new IllegalArgumentException("Active region walker " + this.walker + " requested extended events but not " +
-                    "non-primary reads, an inconsistent state.  Please modify the walker");
-        }
-
-        ActiveRegionTraversalParameters annotation = walker.getClass().getAnnotation(ActiveRegionTraversalParameters.class);
-        this.activeRegionExtension = this.walker.activeRegionExtension == null ? annotation.extension() : this.walker.activeRegionExtension;
-        this.maxRegionSize = this.walker.activeRegionMaxSize == null ? annotation.maxRegion() : this.walker.activeRegionMaxSize;
-        this.minRegionSize = annotation.minRegion();
-        final double bandPassSigma = this.walker.bandPassSigma == null ? annotation.bandPassSigma() : this.walker.bandPassSigma;
-        walkerHasPresetRegions = this.walker.hasPresetActiveRegions();
-
-        activityProfile = new BandPassActivityProfile(engine.getGenomeLocParser(), engine.getIntervals(), this.walker.maxProbPropagationDistance, this.walker.activeProbThreshold,
-                BandPassActivityProfile.MAX_FILTER_SIZE, bandPassSigma);
-
-        final int maxReadsAcrossSamples = annotation.maxReadsToHoldInMemoryPerSample() * SampleUtils.getSAMFileSamples(engine).size();
-        final int maxReadsToHoldInMemory = Math.min(maxReadsAcrossSamples, annotation.maxReadsToHoldTotal());
-        myReads = new TAROrderedReadCache(maxReadsToHoldInMemory);
-    }
-
-    // -------------------------------------------------------------------------------------
-    //
-    // Utility functions
-    //
-    // -------------------------------------------------------------------------------------
-
-    /**
-     * Load in the preset regions for contig into workQueue
-     *
-     * Should be called before starting to process work on contig
-     *
-     * Can only be called when walkerHasPresetRegions is true or an IllegalStateException will be thrown
-     *
-     * @param contig the contig we are about to process
-     */
-    protected void loadPresetRegionsForContigToWorkQueue(final String contig) {
-        if ( ! walkerHasPresetRegions ) throw new IllegalStateException("only appropriate to call when walker has preset regions");
-
-        final GenomeLoc contigSpan = engine.getGenomeLocParser().createOverEntireContig(contig);
-        for ( final GenomeLoc loc : this.walker.getPresetActiveRegions().getOverlapping(contigSpan) ) {
-            workQueue.add(new ActiveRegion(loc, null, true, engine.getGenomeLocParser(), getActiveRegionExtension()));
-        }
-    }
-
-    protected int getActiveRegionExtension() {
-        return activeRegionExtension;
-    }
-
-    protected int getMaxRegionSize() {
-        return maxRegionSize;
-    }
-
-    protected int getMinRegionSize() {
-        return minRegionSize;
-    }
-
-    @Override
-    public String getTraversalUnits() {
-        return "active regions";
-    }
-
-    @Override
-    public String toString() {
-        return "TraverseActiveRegions";
-    }
-
-    /**
-     * Is the loc outside of the intervals being requested for processing by the GATK?
-     * @param loc
-     * @return
-     */
-    protected boolean outsideEngineIntervals(final GenomeLoc loc) {
-        return engine.getIntervals() != null && ! engine.getIntervals().overlaps(loc);
-    }
-
-    // -------------------------------------------------------------------------------------
-    //
-    // Actual traverse function
-    //
-    // -------------------------------------------------------------------------------------
-
-    /**
-     * Did read appear in the last shard?
-     *
-     * When we transition across shard boundaries we see duplicate reads because
-     * each shard contains the reads that *overlap* the shard.  So if we just finished
-     * shard 1-1000 and are now in 1001-2000 we'll see duplicate reads from 1001
-     * that overlapped 1-1000.  This function tests read to determine if we would have
-     * seen it before by asking if read.getAlignmentStart() is less than the
-     * stop position of the last seen read at the start of the traversal.  The reason
-     * we need to use the location of the last read at the start of the traversal
-     * is that we update the lastRead during the traversal, and we only want to filter
-     * out reads whose start is before the last read of the previous shard, not the
-     * current shard.
-     *
-     * @param locOfLastReadAtTraversalStart the location of the last read seen at the start of the traversal
-     * @param read the read we want to test if it's already been seen in the last shard
-     * @return true if read would have appeared in the last shard, false otherwise
-     */
-    @Requires({"read != null"})
-    private boolean appearedInLastShard(final GenomeLoc locOfLastReadAtTraversalStart, final GATKSAMRecord read) {
-        if ( locOfLastReadAtTraversalStart == null )
-            // we're in the first shard, so obviously the answer is no
-            return false;
-        else {
-            // otherwise check to see if the alignment occurred in the previous shard
-            return read.getAlignmentStart() <= locOfLastReadAtTraversalStart.getStart()
-                    // we're on the same contig
-                    && read.getReferenceIndex() == locOfLastReadAtTraversalStart.getContigIndex();
-        }
-
-    }
-
-    @Override
-    public T traverse( final ActiveRegionWalker<M,T> walker,
-                       final LocusShardDataProvider dataProvider,
-                       T sum) {
-        if ( LOG_READ_CARRYING || logger.isDebugEnabled() )
-            logger.info(String.format("TraverseActiveRegions.traverse: Shard is %s", dataProvider));
-
-        nanoScheduler.setDebug(false);
-        final Iterator<MapData> activeRegionIterator = new ActiveRegionIterator(dataProvider);
-        final TraverseActiveRegionMap myMap = new TraverseActiveRegionMap();
-        final TraverseActiveRegionReduce myReduce = new TraverseActiveRegionReduce();
-        final T result = nanoScheduler.execute(activeRegionIterator, myMap, sum, myReduce);
-
-        return result;
-    }
-
-    private class ActiveRegionIterator implements Iterator<MapData> {
-        private final LocusShardDataProvider dataProvider;
-        private LinkedList<MapData> readyActiveRegions = new LinkedList<>();
-        private boolean done = false;
-        private final LocusView locusView;
-        private final LocusReferenceView referenceView;
-        private final GenomeLoc locOfLastReadAtTraversalStart;
-        private final IntervalReferenceOrderedView referenceOrderedDataView;
-        private final GenomeLoc currentWindow;
-        private final boolean processRemainingActiveRegions;
-
-        public ActiveRegionIterator( final LocusShardDataProvider dataProvider ) {
-            this.dataProvider = dataProvider;
-            locusView = new AllLocusView(dataProvider);
-            referenceView = new LocusReferenceView( walker, dataProvider );
-
-            // The data shard may carry a number of locations to process (due to being indexed together).
-            // This value is just the interval we are processing within the entire provider
-            currentWindow = dataProvider.getLocus();
-            final int currentWindowPos = dataProvider.getShard().getGenomeLocs().indexOf(currentWindow);
-            if ( currentWindowPos == -1 ) throw new IllegalStateException("Data provider " + dataProvider + " didn't have our current window in it " + currentWindow);
-            processRemainingActiveRegions = currentWindowPos == dataProvider.getShard().getGenomeLocs().size() - 1;
-
-            // the rodSpan covers all of the bases in the activity profile, including all of the bases
-            // through the current window interval.  This is because we may issue a query to get data for an
-            // active region spanning before the current interval as far back as the start of the current profile,
-            // if we have pending work to do that finalizes in this interval.
-            final GenomeLoc rodSpan = activityProfile.getSpan() == null ? currentWindow : activityProfile.getSpan().endpointSpan(currentWindow);
-            if ( ! dataProvider.getShard().getLocation().containsP(rodSpan) ) throw new IllegalStateException("Rod span " + rodSpan + " isn't contained within the data shard " + dataProvider.getShard().getLocation() + ", meaning we wouldn't get all of the data we need");
-            referenceOrderedDataView = new IntervalReferenceOrderedView( dataProvider, rodSpan );
-
-            // We keep processing while the next reference location is within the interval
-            locOfLastReadAtTraversalStart = spanOfLastSeenRead();
-
-            // load in the workQueue the present regions that span the current contig, if it's different from the last one
-            if ( walkerHasPresetRegions && ( lastRegionProcessed == null || ! currentWindow.onSameContig(lastRegionProcessed)) ) {
-                loadPresetRegionsForContigToWorkQueue(currentWindow.getContig());
-            }
-
-            // remember the last region we processed for sanity checking later
-            lastRegionProcessed = currentWindow;
-        }
-
-        @Override public void remove() { throw new UnsupportedOperationException("Cannot remove from ActiveRegionIterator"); }
-
-        @Override
-        public MapData next() {
-            return readyActiveRegions.pop();
-        }
-        @Override
-        public boolean hasNext() {
-            if ( engine.exceedsRuntimeLimit() ) // too much time has been dedicated to doing work, just stop
-                 return false;
-            if ( ! readyActiveRegions.isEmpty() )
-                return true;
-            if ( done )
-                return false;
-            else {
-
-                while( locusView.hasNext() ) {
-                    final AlignmentContext locus = locusView.next();
-                    final GenomeLoc location = locus.getLocation();
-
-                    rememberLastLocusLocation(location);
-
-                    // get all of the new reads that appear in the current pileup, and them to our list of reads
-                    // provided we haven't seen them before
-                    final Collection<GATKSAMRecord> reads = locusView.getLIBS().transferReadsFromAllPreviousPileups();
-                    for( final GATKSAMRecord read : reads ) {
-                        // note that ActiveRegionShards span entire contigs, so this check is in some
-                        // sense no longer necessary, as any read that appeared in the last shard would now
-                        // by definition be on a different contig.  However, the logic here doesn't hurt anything
-                        // and makes us robust should we decided to provide shards that don't fully span
-                        // contigs at some point in the future
-                        if ( ! appearedInLastShard(locOfLastReadAtTraversalStart, read) ) {
-                            rememberLastReadLocation(read);
-                            myReads.add(read);
-                        }
-                    }
-
-                    // skip this location -- it's not part of our engine intervals
-                    if ( outsideEngineIntervals(location) )
-                        continue;
-
-                    // we've move across some interval boundary, restart profile
-                    final boolean flushProfile = ! activityProfile.isEmpty()
-                            && ( activityProfile.getContigIndex() != location.getContigIndex()
-                            || location.getStart() != activityProfile.getStop() + 1);
-                    final List<MapData> newActiveRegions = prepActiveRegionsForProcessing(walker, flushProfile, false, referenceOrderedDataView);
-
-                    dataProvider.getShard().getReadMetrics().incrementNumIterations();
-
-                    // create reference context. Note that if we have a pileup of "extended events", the context will
-                    // hold the (longest) stretch of deleted reference bases (if deletions are present in the pileup).
-                    final ReferenceContext refContext = referenceView.getReferenceContext(location);
-
-                    // Iterate forward to get all reference ordered data covering this location
-                    final RefMetaDataTracker tracker = referenceOrderedDataView.getReferenceOrderedDataAtLocus(locus.getLocation());
-
-                    // Call the walkers isActive function for this locus and add them to the list to be integrated later
-                    addIsActiveResult(walker, tracker, refContext, locus);
-
-                    maxReadsInMemory = Math.max(myReads.size(), maxReadsInMemory);
-                    printProgress(location);
-
-                    if ( ! newActiveRegions.isEmpty() ) {
-                        readyActiveRegions.addAll(newActiveRegions);
-                        if ( DEBUG )
-                            for ( final MapData region : newActiveRegions )
-                                logger.info("Adding region to queue for processing " + region.activeRegion);
-                        return true;
-                    }
-                }
-
-                if ( processRemainingActiveRegions ) {
-                    // we've run out of stuff to process, and since shards now span entire contig boundaries
-                    // we should finalized our regions.  This allows us to continue to use our referenceOrderedDataView
-                    // which would otherwise be shutdown.  Only followed when the microschedule says that we're
-                    // inside of the last window in the current shard
-                    readyActiveRegions.addAll(prepActiveRegionsForProcessing(walker, true, true, referenceOrderedDataView));
-                }
-
-                return ! readyActiveRegions.isEmpty();
-            }
-        }
-    }
-
-    // -------------------------------------------------------------------------------------
-    //
-    // Functions to manage and interact with the live / dead zone
-    //
-    // -------------------------------------------------------------------------------------
-
-    /**
-     * Update the live region to reflect that the last read we've seen in the traversal is read
-     *
-     * Requires that sequential calls always be provided reads in coordinate sorted order
-     *
-     * @param read the last read we've seen during the traversal
-     */
-    @Requires({"read != null"})
-    protected void rememberLastReadLocation(final GATKSAMRecord read) {
-        final GenomeLoc currentLocation = engine.getGenomeLocParser().createGenomeLoc(read);
-        if ( spanOfLastReadSeen == null )
-            spanOfLastReadSeen = currentLocation;
-        else {
-            if ( currentLocation.isBefore(spanOfLastReadSeen) )
-                throw new IllegalStateException("Updating last read seen in the traversal with read " + read + " with span " + currentLocation + " but this occurs before the previously seen read " + spanOfLastReadSeen);
-            spanOfLastReadSeen = currentLocation;
-        }
-    }
-
-    /**
-     * Update the live region to reflect that we've reached locus
-     *
-     * This function is complementary to #rememberLastReadLocation, but if we don't have any reads for a long
-     * time (e.g., there's no coverage) we will keep active regions around far longer than necessary.
-     *
-     * Only updates the span if it's beyond the last seen
-     *
-     * @param currentLocation the current location we've processed on the genome
-     */
-    protected void rememberLastLocusLocation(final GenomeLoc currentLocation) {
-        if ( spanOfLastReadSeen == null )
-            spanOfLastReadSeen = currentLocation;
-        else {
-            if ( currentLocation.isPast(spanOfLastReadSeen) )
-                spanOfLastReadSeen = currentLocation;
-        }
-    }
-
-
-    /**
-     * Get a GenomeLoc indicating the start (heading to the right) of the live ART region.
-     * @return the left-most position of the live region on the genome
-     */
-    protected GenomeLoc spanOfLastSeenRead() {
-        return spanOfLastReadSeen;
-    }
-
-    /**
-     * Is the active region completely within the traversal's dead zone?
-     *
-     * @param region the region we want to test
-     * @return true if the extended location of region is completely within the current dead zone, false otherwise
-     */
-    protected boolean regionCompletelyWithinDeadZone(final ActiveRegion region) {
-        if ( spanOfLastSeenRead() == null )
-            return false;
-
-        final int contigCmp = region.getExtendedLoc().compareContigs(spanOfLastSeenRead());
-        if ( contigCmp > 0 )
-            throw new IllegalStateException("Active region " + region + " on a contig after last seen read " + spanOfLastSeenRead());
-        else {
-            return contigCmp < 0 || region.getExtendedLoc().getStop() < spanOfLastSeenRead().getStart();
-        }
-    }
-
-    /**
-     * Is the read dead?  That is, can it no longer be in any future active region, and therefore can be discarded?
-     *
-     * read: start |--------> stop ------ stop + extension
-     * region:                      start |-----------------| end
-     *
-     * Since the regions are coming in order, read could potentially be contained in a future interval if
-     * stop + activeRegionExtension >= end.  If, on the other hand, stop + extension is < the end
-     * of this region, then we can discard it, since any future region could only include reads
-     * up to end + 1 - extension.
-     *
-     * Note that this function doesn't care about the dead zone.  We're assuming that by
-     * actually calling this function with an active region that region is already in the dead zone,
-     * so checking that the read is in the dead zone doesn't make sense.
-     *
-     * @param read the read we're testing
-     * @param activeRegion the current active region
-     * @return true if the read is dead, false other
-     */
-    @Requires({"read != null", "activeRegion != null"})
-    private boolean readCannotOccurInAnyMoreActiveRegions(final GATKSAMRecord read, final ActiveRegion activeRegion) {
-        return read.getReferenceIndex() < activeRegion.getLocation().getContigIndex() ||
-                ( read.getReferenceIndex() == activeRegion.getLocation().getContigIndex()
-                        && read.getAlignmentEnd() + getActiveRegionExtension() < activeRegion.getLocation().getStop() );
-    }
-
-    // -------------------------------------------------------------------------------------
-    //
-    // Functions to write out activity profiles and active regions
-    //
-    // -------------------------------------------------------------------------------------
-
-    /**
-     * Initialize the debugging output streams (activity profile and active regions), if not done so already
-     */
-    @Ensures("streamsInitialized == true")
-    private void initializeOutputStreamsIfNecessary() {
-        if ( ! streamsInitialized ) {
-            streamsInitialized = true;
-            if ( walker.activityProfileOutStream != null ) {
-                printIGVFormatHeader(walker.activityProfileOutStream, "line", "ActivityProfile");
-            }
-
-            if ( walker.activeRegionOutStream != null ) {
-                printIGVFormatHeader(walker.activeRegionOutStream, "line", "ActiveRegions");
-            }
-        }
-    }
-
-    /**
-     * Helper function to write out a IGV formatted line to out, at loc, with values
-     *
-     * http://www.broadinstitute.org/software/igv/IGV
-     *
-     * @param out a non-null PrintStream where we'll write our line
-     * @param graphType the type of graph to show in IGV for this track
-     * @param columns the column names for this IGV track
-     */
-    @Requires({
-            "out != null",
-            "graphType != null",
-            "columns.length > 0"
-    })
-    private void printIGVFormatHeader(final PrintStream out, final String graphType, final String ... columns ) {
-        out.printf("#track graphType=%s%n", graphType);
-        out.printf("Chromosome\tStart\tEnd\tFeature\t%s%n", Utils.join("\t", columns));
-
-    }
-
-    /**
-     * Helper function to write out a IGV formatted line to out, at loc, with values
-     *
-     * http://www.broadinstitute.org/software/igv/IGV
-     *
-     * @param out a non-null PrintStream where we'll write our line
-     * @param loc the location of values
-     * @param featureName string name of this feature (see IGV format)
-     * @param values the floating point values to associate with loc and feature name in out
-     */
-    @Requires({
-            "out != null",
-            "loc != null",
-            "values.length > 0"
-    })
-    private void printIGVFormatRow(final PrintStream out, final GenomeLoc loc, final String featureName, final double ... values) {
-        // note that start and stop are 0 based, but the stop is exclusive so we don't subtract 1
-        out.printf("%s\t%d\t%d\t%s", loc.getContig(), loc.getStart() - 1, loc.getStop(), featureName);
-        for ( final double value : values )
-            out.print(String.format("\t%.5f", value));
-        out.println();
-    }
-
-    /**
-     * Write out activity profile information, if requested by the walker
-     *
-     * @param states the states in the current activity profile
-     */
-    @Requires("states != null")
-    private void writeActivityProfile(final List<ActivityProfileState> states) {
-        if ( walker.activityProfileOutStream != null ) {
-            initializeOutputStreamsIfNecessary();
-            for ( final ActivityProfileState state : states ) {
-                printIGVFormatRow(walker.activityProfileOutStream, state.getLoc(), "state", Math.min(state.isActiveProb, 1.0));
-            }
-        }
-    }
-
-    /**
-     * Write out each active region to the walker activeRegionOutStream
-     *
-     * @param region the region we're currently operating on
-     */
-    @Requires("region != null")
-    private void writeActiveRegion(final ActiveRegion region) {
-        if( walker.activeRegionOutStream != null ) {
-            initializeOutputStreamsIfNecessary();
-            printIGVFormatRow(walker.activeRegionOutStream, region.getLocation().getStartLocation(),
-                    "end-marker", 0.0);
-            printIGVFormatRow(walker.activeRegionOutStream, region.getLocation(),
-                    "size=" + region.getLocation().size(), region.isActive() ? 1.0 : -1.0);
-        }
-    }
-
-
-    // -------------------------------------------------------------------------------------
-    //
-    // Functions to process active regions that are ready for map / reduce calls
-    //
-    // -------------------------------------------------------------------------------------
-
-    /**
-     * Invoke the walker isActive function, and incorporate its result into the activity profile
-     *
-     * @param walker the walker we're running
-     * @param tracker the ref meta data tracker to pass on to the isActive function of walker
-     * @param refContext the refContext to pass on to the isActive function of walker
-     * @param locus the AlignmentContext to pass on to the isActive function of walker
-     */
-    private void addIsActiveResult(final ActiveRegionWalker<M, T> walker,
-                                   final RefMetaDataTracker tracker, final ReferenceContext refContext,
-                                   final AlignmentContext locus) {
-        // must be called, even if we won't use the result, to satisfy walker contract
-        final ActivityProfileState state = walker.isActive( tracker, refContext, locus );
-        if ( walker.forceActive) state.isActiveProb = 1.0;
-        if ( ! walkerHasPresetRegions ) {
-            activityProfile.add(state);
-        }
-    }
-
-    /**
-     * Take the individual isActive calls and integrate them into contiguous active regions and
-     * add these blocks of work to the work queue
-     * band-pass filter the list of isActive probabilities and turn into active regions
-     */
-    private List<MapData> prepActiveRegionsForProcessing(final ActiveRegionWalker<M, T> walker,
-                                                              final boolean flushActivityProfile,
-                                                              final boolean forceAllRegionsToBeActive,
-                                                              final IntervalReferenceOrderedView referenceOrderedDataView) {
-        if ( ! walkerHasPresetRegions ) {
-            // We don't have preset regions, so we get our regions from the activity profile
-            final Collection<ActiveRegion> activeRegions = activityProfile.popReadyActiveRegions(getActiveRegionExtension(), getMinRegionSize(), getMaxRegionSize(), flushActivityProfile);
-            workQueue.addAll(activeRegions);
-            if ( ! activeRegions.isEmpty() && logger.isDebugEnabled() ) logger.debug("Integrated " + activityProfile.size() + " isActive calls into " + activeRegions.size() + " regions." );
-        }
-
-        // Since we've traversed sufficiently past this point (or this contig!) in the workQueue we can unload those regions and process them
-        final LinkedList<MapData> readyRegions = new LinkedList<>();
-        while( workQueue.peek() != null ) {
-            final ActiveRegion activeRegion = workQueue.peek();
-            if ( forceAllRegionsToBeActive || regionCompletelyWithinDeadZone(activeRegion) ) {
-                writeActivityProfile(activeRegion.getSupportingStates());
-                writeActiveRegion(activeRegion);
-                readyRegions.add(prepActiveRegionForProcessing(workQueue.remove(), walker, referenceOrderedDataView));
-            } else {
-                break;
-            }
-        }
-
-        return readyRegions;
-
-    }
-
-    private MapData prepActiveRegionForProcessing(final ActiveRegion activeRegion,
-                                                  final ActiveRegionWalker<M, T> walker,
-                                                  final IntervalReferenceOrderedView referenceOrderedDataView) {
-        final List<GATKSAMRecord> stillLive = new LinkedList<>();
-        for ( final GATKSAMRecord read : myReads.popCurrentReads() ) {
-            boolean killed = false;
-            final GenomeLoc readLoc = this.engine.getGenomeLocParser().createGenomeLoc( read );
-
-            if( activeRegion.getLocation().overlapsP( readLoc ) ) {
-                activeRegion.add(read);
-
-                if ( ! walker.wantsNonPrimaryReads() ) {
-                    killed = true;
-                }
-            } else if( walker.wantsExtendedReads() && activeRegion.getExtendedLoc().overlapsP( readLoc )) {
-                activeRegion.add( read );
-            }
-
-            // if the read hasn't already been killed, check if it cannot occur in any more active regions, and maybe kill it
-            if ( ! killed && readCannotOccurInAnyMoreActiveRegions(read, activeRegion) ) {
-                killed = true;
-            }
-
-            // keep track of all of the still live active regions
-            if ( ! killed ) stillLive.add(read);
-        }
-        myReads.addAll(stillLive);
-
-        if ( logger.isDebugEnabled() ) {
-            logger.debug(">> Map call with " + activeRegion.getReads().size() + " " + (activeRegion.isActive() ? "active" : "inactive") + " reads @ " + activeRegion.getLocation() + " with full extent: " + activeRegion.getReadSpanLoc());
-        }
-
-        if ( LOG_READ_CARRYING )
-            logger.info(String.format("Processing region %20s span=%3d active?=%5b with %4d reads.  Overall max reads carried is %s",
-                    activeRegion.getLocation(), activeRegion.getLocation().size(), activeRegion.isActive(), activeRegion.size(), maxReadsInMemory));
-
-        // prepare the RefMetaDataTracker information
-        final GenomeLoc loc = activeRegion.getLocation();
-        // get all of the RODs that cover the active region (without extension)
-        final RefMetaDataTracker tracker = referenceOrderedDataView.getReferenceOrderedDataForInterval(loc);
-        // trim away all of the features that occurred before this location, as we will not need them in the future
-        referenceOrderedDataView.trimCurrentFeaturesToLoc(loc);
-
-        return new MapData(activeRegion, tracker);
-    }
-
-    private class TraverseActiveRegionMap implements NSMapFunction<MapData, M> {
-        @Override
-        public M apply(final MapData mapData) {
-            if ( DEBUG ) logger.info("Executing walker.map for " + mapData.activeRegion + " in thread " + Thread.currentThread().getName());
-            return walker.map(mapData.activeRegion, mapData.tracker);
-        }
-    }
-
-    private class TraverseActiveRegionReduce implements NSReduceFunction<M, T> {
-        @Override
-        public T apply(M one, T sum) {
-            return walker.reduce(one, sum);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/traversals/TraverseDuplicates.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/traversals/TraverseDuplicates.java
deleted file mode 100644
index 6cffe94..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/traversals/TraverseDuplicates.java
+++ /dev/null
@@ -1,205 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.traversals;
-
-import htsjdk.samtools.SAMRecord;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.datasources.providers.ReadShardDataProvider;
-import org.broadinstitute.gatk.engine.datasources.providers.ReadView;
-import org.broadinstitute.gatk.engine.iterators.PushbackIterator;
-import org.broadinstitute.gatk.engine.walkers.DuplicateWalker;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.pileup.ReadBackedPileupImpl;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-
-import java.util.*;
-
-/**
- * @author Mark DePristo
- * @version 0.1
- *          <p/>
- *          Class TraverseDuplicates
- *          <p/>
- *          This class handles traversing lists of duplicate reads in the new shardable style
- */
-public class TraverseDuplicates<M,T> extends TraversalEngine<M,T,DuplicateWalker<M,T>,ReadShardDataProvider> {
-    /** our log, which we want to capture anything from this class */
-    protected static Logger logger = Logger.getLogger(TraverseDuplicates.class);
-
-    /** Turn this to true to enable logger.debug output */
-    private final boolean DEBUG = false;
-
-    @Override
-    public String getTraversalUnits() {
-        return "dups";
-    }
-
-    private List<GATKSAMRecord> readsAtLoc(final GATKSAMRecord read, PushbackIterator<SAMRecord> iter) {
-        GenomeLoc site = engine.getGenomeLocParser().createGenomeLoc(read);
-        ArrayList<GATKSAMRecord> l = new ArrayList<GATKSAMRecord>();
-
-        l.add(read);
-        for (SAMRecord read2 : iter) {
-            GenomeLoc site2 = engine.getGenomeLocParser().createGenomeLoc(read2);
-
-            // the next read starts too late
-            if (site2.getStart() != site.getStart()) {
-                iter.pushback(read2);
-                break;
-            } else {
-                l.add((GATKSAMRecord) read2);
-            }
-        }
-
-        return l;
-    }
-
-    /**
-     * Creates a set of lists of reads, where each list contains reads from the same underlying molecule according
-     * to their duplicate flag and their (and mate, if applicable) start/end positions.
-     *
-     * @param reads the list of reads to split into unique molecular samples
-     * @return
-     */
-    protected Set<List<GATKSAMRecord>> uniqueReadSets(List<GATKSAMRecord> reads) {
-        Set<List<GATKSAMRecord>> readSets = new LinkedHashSet<List<GATKSAMRecord>>();
-
-        // for each read, find duplicates, and either add the read to its duplicate list or start a new one
-        for ( GATKSAMRecord read : reads ) {
-            List<GATKSAMRecord> readSet = findDuplicateReads(read, readSets);
-
-            if ( readSet == null ) {
-                readSets.add(new ArrayList<GATKSAMRecord>(Arrays.asList(read)));    // copy so I can add to the list
-            } else {
-                readSet.add(read);
-            }
-        }
-
-        return readSets;
-    }
-
-    /**
-     * Find duplicate reads for read in the set of unique reads.  This is effective a duplicate marking algorithm,
-     * but it relies for safety's sake on the file itself being marked by a true duplicate marking algorithm.  Pair
-     * and single-end read aware.
-     *
-     * @param read
-     * @param readSets
-     * @return The list of duplicate reads that read is a member of, or null if it's the only one of its kind
-     */
-    protected List<GATKSAMRecord> findDuplicateReads(GATKSAMRecord read, Set<List<GATKSAMRecord>> readSets ) {
-        if ( read.getReadPairedFlag() ) {
-            // paired
-            final GenomeLoc readMateLoc = engine.getGenomeLocParser().createGenomeLoc(read.getMateReferenceName(), read.getMateAlignmentStart(), read.getMateAlignmentStart());
-
-            for (List<GATKSAMRecord> reads : readSets) {
-                GATKSAMRecord key = reads.get(0);
-
-                // read and key start at the same place, and either the this read and the key
-                // share a mate location or the read is flagged as a duplicate
-                if ( read.getAlignmentStart() == key.getAlignmentStart() && key.getReadPairedFlag() && ( key.getDuplicateReadFlag() || read.getDuplicateReadFlag() ) ) {
-                    // at least one has to be marked as a duplicate
-                    final GenomeLoc keyMateLoc = engine.getGenomeLocParser().createGenomeLoc(key.getMateReferenceName(), key.getMateAlignmentStart(), key.getMateAlignmentStart());
-                    if ( readMateLoc.compareTo(keyMateLoc) == 0 ) {
-                        // we are at the same position as the dup and have the same mat pos, it's a dup
-                        if (DEBUG) logger.debug(String.format("  => Adding read to dups list: %s %d %s vs. %s", read, reads.size(), readMateLoc, keyMateLoc));
-                        return reads;
-                    }
-                }
-            }
-        } else {
-            for (List<GATKSAMRecord> reads : readSets) {
-                GATKSAMRecord key = reads.get(0);
-                boolean v = (! key.getReadPairedFlag()) && read.getAlignmentStart() == key.getAlignmentStart() && ( key.getDuplicateReadFlag() || read.getDuplicateReadFlag() ) && read.getReadLength() == key.getReadLength();
-                //System.out.printf("%s %s %b %b %d %d %d %d => %b%n",
-                //        read.getReadPairedFlag(), key.getReadPairedFlag(), read.getDuplicateReadFlag(), key.getDuplicateReadFlag(),
-                //        read.getAlignmentStart(), key.getAlignmentStart(), read.getReadLength(), key.getReadLength(), v);
-                if ( v ) {
-                    //System.out.printf("Returning reads...%n");
-                    return reads;
-                }
-            }
-        }
-
-        return null;
-    }
-
-    // --------------------------------------------------------------------------------------------------------------
-    //
-    // new style interface to the system
-    //
-    // --------------------------------------------------------------------------------------------------------------
-
-    /**
-     * Traverse by reads, given the data and the walker
-     *
-     * @param walker the walker to execute over
-     * @param sum    of type T, the return from the walker
-     *
-     * @return the result type T, the product of all the reduce calls
-     */
-    public T traverse(DuplicateWalker<M, T> walker,
-                      ReadShardDataProvider dataProvider,
-                      T sum) {
-        PushbackIterator<SAMRecord> iter = new PushbackIterator<SAMRecord>(new ReadView(dataProvider).iterator());
-
-        /**
-         * while we still have more reads:
-         * ok, here's the idea.  We get all the reads that start at the same position in the genome
-         * We then split the list of reads into sublists of reads:
-         *   -> those with the same mate pair position, for paired reads
-         *   -> those flagged as unpaired and duplicated but having the same start and end
-         */
-        boolean done = walker.isDone();
-        for (SAMRecord read : iter) {
-            if ( done ) break;
-            // get the genome loc from the read
-            GenomeLoc site = engine.getGenomeLocParser().createGenomeLoc(read);
-
-            Set<List<GATKSAMRecord>> readSets = uniqueReadSets(readsAtLoc((GATKSAMRecord) read, iter));
-            if ( DEBUG ) logger.debug(String.format("*** TraverseDuplicates.traverse at %s with %d read sets", site, readSets.size()));
-
-            // Jump forward in the reference to this locus location
-            AlignmentContext locus = new AlignmentContext(site, new ReadBackedPileupImpl(site));
-
-            // update the number of duplicate sets we've seen
-            dataProvider.getShard().getReadMetrics().incrementNumIterations();
-
-            // actually call filter and map, accumulating sum
-            final boolean keepMeP = walker.filter(site, locus, readSets);
-            if (keepMeP) {
-                M x = walker.map(site, locus, readSets);
-                sum = walker.reduce(x, sum);
-            }
-
-            printProgress(site.getStopLocation());
-            done = walker.isDone();
-        }
-
-        return sum;
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/traversals/TraverseLociNano.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/traversals/TraverseLociNano.java
deleted file mode 100644
index 02c1a7e..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/traversals/TraverseLociNano.java
+++ /dev/null
@@ -1,304 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.traversals;
-
-import org.broadinstitute.gatk.engine.WalkerManager;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.datasources.providers.*;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.engine.walkers.DataSource;
-import org.broadinstitute.gatk.engine.walkers.LocusWalker;
-import org.broadinstitute.gatk.engine.walkers.Walker;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.nanoScheduler.NSMapFunction;
-import org.broadinstitute.gatk.utils.nanoScheduler.NSProgressFunction;
-import org.broadinstitute.gatk.utils.nanoScheduler.NSReduceFunction;
-import org.broadinstitute.gatk.utils.nanoScheduler.NanoScheduler;
-import org.broadinstitute.gatk.utils.pileup.ReadBackedPileupImpl;
-
-import java.util.Iterator;
-
-/**
- * A simple solution to iterating over all reference positions over a series of genomic locations.
- */
-public class TraverseLociNano<M,T> extends TraversalEngine<M,T,LocusWalker<M,T>,LocusShardDataProvider> {
-    /** our log, which we want to capture anything from this class */
-    private static final boolean DEBUG = false;
-
-    final NanoScheduler<MapData, MapResult, T> nanoScheduler;
-
-    public TraverseLociNano(int nThreads) {
-        nanoScheduler = new NanoScheduler<MapData, MapResult, T>(nThreads);
-        nanoScheduler.setProgressFunction(new TraverseLociProgress());
-    }
-
-    @Override
-    public final String getTraversalUnits() {
-        return "sites";
-    }
-
-    protected static class TraverseResults<T> {
-        final int numIterations;
-        final T reduceResult;
-
-        public TraverseResults(int numIterations, T reduceResult) {
-            this.numIterations = numIterations;
-            this.reduceResult = reduceResult;
-        }
-    }
-
-    @Override
-    public T traverse( LocusWalker<M,T> walker,
-                       LocusShardDataProvider dataProvider,
-                       T sum) {
-        logger.debug(String.format("TraverseLoci.traverse: Shard is %s", dataProvider));
-
-        final LocusView locusView = getLocusView( walker, dataProvider );
-
-        if ( locusView.hasNext() ) { // trivial optimization to avoid unnecessary processing when there's nothing here at all
-            //ReferenceOrderedView referenceOrderedDataView = new ReferenceOrderedView( dataProvider );
-            ReferenceOrderedView referenceOrderedDataView = null;
-            if ( WalkerManager.getWalkerDataSource(walker) != DataSource.REFERENCE_ORDERED_DATA )
-                referenceOrderedDataView = new ManagingReferenceOrderedView( dataProvider );
-            else
-                referenceOrderedDataView = (RodLocusView)locusView;
-
-            final LocusReferenceView referenceView = new LocusReferenceView( walker, dataProvider );
-
-            final TraverseResults<T> result = traverse( walker, locusView, referenceView, referenceOrderedDataView, sum );
-            sum = result.reduceResult;
-            dataProvider.getShard().getReadMetrics().incrementNumIterations(result.numIterations);
-        }
-
-        // We have a final map call to execute here to clean up the skipped based from the
-        // last position in the ROD to that in the interval
-        if ( WalkerManager.getWalkerDataSource(walker) == DataSource.REFERENCE_ORDERED_DATA && ! walker.isDone() ) {
-            // only do this if the walker isn't done!
-            final RodLocusView rodLocusView = (RodLocusView)locusView;
-            final long nSkipped = rodLocusView.getLastSkippedBases();
-            if ( nSkipped > 0 ) {
-                final GenomeLoc site = rodLocusView.getLocOneBeyondShard();
-                final AlignmentContext ac = new AlignmentContext(site, new ReadBackedPileupImpl(site), nSkipped);
-                final M x = walker.map(null, null, ac);
-                sum = walker.reduce(x, sum);
-            }
-        }
-
-        return sum;
-    }
-
-    /**
-     * Gets the best view of loci for this walker given the available data.  The view will function as a 'trigger track'
-     * of sorts, providing a consistent interface so that TraverseLoci doesn't need to be reimplemented for any new datatype
-     * that comes along.
-     * @param walker walker to interrogate.
-     * @param dataProvider Data which which to drive the locus view.
-     * @return A view of the locus data, where one iteration of the locus view maps to one iteration of the traversal.
-     */
-    private LocusView getLocusView( Walker<M,T> walker, LocusShardDataProvider dataProvider ) {
-        final DataSource dataSource = WalkerManager.getWalkerDataSource(walker);
-        if( dataSource == DataSource.READS )
-            return new CoveredLocusView(dataProvider);
-        else if( dataSource == DataSource.REFERENCE ) //|| ! GenomeAnalysisEngine.instance.getArguments().enableRodWalkers )
-            return new AllLocusView(dataProvider);
-        else if( dataSource == DataSource.REFERENCE_ORDERED_DATA )
-            return new RodLocusView(dataProvider);
-        else
-            throw new UnsupportedOperationException("Unsupported traversal type: " + dataSource);
-    }
-
-    protected TraverseResults<T> traverse(final LocusWalker<M, T> walker,
-                                          final LocusView locusView,
-                                          final LocusReferenceView referenceView,
-                                          final ReferenceOrderedView referenceOrderedDataView,
-                                          final T sum) {
-        nanoScheduler.setDebug(DEBUG);
-        final TraverseLociMap myMap = new TraverseLociMap(walker);
-        final TraverseLociReduce myReduce = new TraverseLociReduce(walker);
-
-        final MapDataIterator inputIterator = new MapDataIterator(locusView, referenceView, referenceOrderedDataView);
-        final T result = nanoScheduler.execute(inputIterator, myMap, sum, myReduce);
-
-        return new TraverseResults<T>(inputIterator.numIterations, result);
-    }
-
-    /**
-     * Create iterator that provides inputs for all map calls into MapData, to be provided
-     * to NanoScheduler for Map/Reduce
-     */
-    private class MapDataIterator implements Iterator<MapData> {
-        final LocusView locusView;
-        final LocusReferenceView referenceView;
-        final ReferenceOrderedView referenceOrderedDataView;
-        int numIterations = 0;
-
-        private MapDataIterator(LocusView locusView, LocusReferenceView referenceView, ReferenceOrderedView referenceOrderedDataView) {
-            this.locusView = locusView;
-            this.referenceView = referenceView;
-            this.referenceOrderedDataView = referenceOrderedDataView;
-        }
-
-        @Override
-        public boolean hasNext() {
-            return locusView.hasNext() && ! engine.exceedsRuntimeLimit();
-        }
-
-        @Override
-        public MapData next() {
-            final AlignmentContext locus = locusView.next();
-            final GenomeLoc location = locus.getLocation();
-
-            //logger.info("Pulling data from MapDataIterator at " + location);
-
-            // create reference context. Note that if we have a pileup of "extended events", the context will
-            // hold the (longest) stretch of deleted reference bases (if deletions are present in the pileup).
-            final ReferenceContext refContext = referenceView.getReferenceContext(location);
-
-            // Iterate forward to get all reference ordered data covering this location
-            final RefMetaDataTracker tracker = referenceOrderedDataView.getReferenceOrderedDataAtLocus(location);
-
-            numIterations++;
-            return new MapData(locus, refContext,  tracker);
-        }
-
-        @Override
-        public void remove() {
-            throw new UnsupportedOperationException("Cannot remove elements from MapDataIterator");
-        }
-    }
-
-    @Override
-    public void shutdown() {
-        nanoScheduler.shutdown();
-    }
-
-    /**
-     * The input data needed for each map call.  The read, the reference, and the RODs
-     */
-    private class MapData {
-        final AlignmentContext alignmentContext;
-        final ReferenceContext refContext;
-        final RefMetaDataTracker tracker;
-
-        private MapData(final AlignmentContext alignmentContext, ReferenceContext refContext, RefMetaDataTracker tracker) {
-            this.alignmentContext = alignmentContext;
-            this.refContext = refContext;
-            this.tracker = tracker;
-        }
-
-        @Override
-        public String toString() {
-            return "MapData " + alignmentContext.getLocation();
-        }
-    }
-
-    /**
-     * Contains the results of a map call, indicating whether the call was good, filtered, or done
-     */
-    private class MapResult {
-        final M value;
-        final boolean reduceMe;
-
-        /**
-         * Create a MapResult with value that should be reduced
-         *
-         * @param value the value to reduce
-         */
-        private MapResult(final M value) {
-            this.value = value;
-            this.reduceMe = true;
-        }
-
-        /**
-         * Create a MapResult that shouldn't be reduced
-         */
-        private MapResult() {
-            this.value = null;
-            this.reduceMe = false;
-        }
-    }
-
-    /**
-     * A static object that tells reduce that the result of map should be skipped (filtered or done)
-     */
-    private final MapResult SKIP_REDUCE = new MapResult();
-
-    /**
-     * MapFunction for TraverseReads meeting NanoScheduler interface requirements
-     *
-     * Applies walker.map to MapData, returning a MapResult object containing the result
-     */
-    private class TraverseLociMap implements NSMapFunction<MapData, MapResult> {
-        final LocusWalker<M,T> walker;
-
-        private TraverseLociMap(LocusWalker<M, T> walker) {
-            this.walker = walker;
-        }
-
-        @Override
-        public MapResult apply(final MapData data) {
-            if ( ! walker.isDone() ) {
-                final boolean keepMeP = walker.filter(data.tracker, data.refContext, data.alignmentContext);
-                if (keepMeP) {
-                    final M x = walker.map(data.tracker, data.refContext, data.alignmentContext);
-                    return new MapResult(x);
-                }
-            }
-            return SKIP_REDUCE;
-        }
-    }
-
-    /**
-     * NSReduceFunction for TraverseReads meeting NanoScheduler interface requirements
-     *
-     * Takes a MapResult object and applies the walkers reduce function to each map result, when applicable
-     */
-    private class TraverseLociReduce implements NSReduceFunction<MapResult, T> {
-        final LocusWalker<M,T> walker;
-
-        private TraverseLociReduce(LocusWalker<M, T> walker) {
-            this.walker = walker;
-        }
-
-        @Override
-        public T apply(MapResult one, T sum) {
-            if ( one.reduceMe )
-                // only run reduce on values that aren't DONE or FAILED
-                return walker.reduce(one.value, sum);
-            else
-                return sum;
-        }
-    }
-
-    private class TraverseLociProgress implements NSProgressFunction<MapData> {
-        @Override
-        public void progress(MapData lastProcessedMap) {
-            if (lastProcessedMap.alignmentContext != null)
-                printProgress(lastProcessedMap.alignmentContext.getLocation());
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/traversals/TraverseReadPairs.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/traversals/TraverseReadPairs.java
deleted file mode 100644
index c68e109..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/traversals/TraverseReadPairs.java
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.traversals;
-
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.SAMRecordCoordinateComparator;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.engine.datasources.providers.ReadShardDataProvider;
-import org.broadinstitute.gatk.engine.datasources.providers.ReadView;
-import org.broadinstitute.gatk.engine.datasources.reads.Shard;
-import org.broadinstitute.gatk.engine.walkers.DataSource;
-import org.broadinstitute.gatk.engine.walkers.ReadPairWalker;
-import org.broadinstitute.gatk.engine.walkers.Requires;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-
-/**
- * Traverse over a collection of read pairs, assuming that a given shard will contain all pairs.
- *
- * @author mhanna
- * @version 0.1
- */
- at Requires({DataSource.REFERENCE})
-public class TraverseReadPairs<M,T> extends TraversalEngine<M,T, ReadPairWalker<M,T>,ReadShardDataProvider> {
-
-    /** our log, which we want to capture anything from this class */
-    protected static final Logger logger = Logger.getLogger(TraverseReadPairs.class);
-
-    @Override
-    public String getTraversalUnits() {
-        return "read pairs";
-    }
-
-    /**
-     * Traverse by reads, given the data and the walker
-     *
-     * @param walker the walker to execute over
-     * @param sum    of type T, the return from the walker
-     *
-     * @return the result type T, the product of all the reduce calls
-     */
-    public T traverse(ReadPairWalker<M, T> walker,
-                      ReadShardDataProvider dataProvider,
-                      T sum) {
-        logger.debug(String.format("TraverseReadsPairs.traverse Covered dataset is %s", dataProvider));
-
-        if( !dataProvider.hasReads() )
-            throw new IllegalArgumentException("Unable to traverse reads; no read data is available.");
-
-        ReadView reads = new ReadView(dataProvider);
-        List<SAMRecord> pairs = new ArrayList<SAMRecord>();
-
-        boolean done = walker.isDone();
-        for(SAMRecord read: reads) {
-            if ( done ) break;
-            dataProvider.getShard().getReadMetrics().incrementNumReadsSeen();
-
-            if(pairs.size() == 0 || pairs.get(0).getReadName().equals(read.getReadName())) {
-                // If this read name is the same as the last, accumulate it.
-                pairs.add(read);
-            }
-            else {
-                // Otherwise, walk over the accumulated list, then start fresh with the new read.
-                sum = walkOverPairs(walker,dataProvider.getShard(),pairs,sum);
-                pairs.clear();
-                pairs.add(read);
-
-                printProgress(null);
-            }
-
-            done = walker.isDone();
-        }
-
-        // If any data was left in the queue, process it.
-        if(pairs.size() > 0)
-            sum = walkOverPairs(walker,dataProvider.getShard(),pairs,sum);
-
-        return sum;
-    }
-
-    /**
-     * Filter / map / reduce over a single pair.
-     * @param walker The walker.
-     * @param shard The shard currently being processed.
-     * @param reads The reads in the pair.
-     * @param sum The accumulator.
-     * @return The accumulator after application of the given read pairing.
-     */
-    private T walkOverPairs(ReadPairWalker<M,T> walker, Shard shard, List<SAMRecord> reads, T sum) {
-        // update the number of reads we've seen
-        shard.getReadMetrics().incrementNumIterations();
-
-        // Sort the reads present in coordinate order.
-        Collections.sort(reads,new SAMRecordCoordinateComparator());
-
-        final boolean keepMeP = walker.filter(reads);
-        if (keepMeP) {
-            M x = walker.map(reads);
-            sum = walker.reduce(x, sum);
-        }
-
-        return sum;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/traversals/TraverseReadsNano.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/traversals/TraverseReadsNano.java
deleted file mode 100644
index 2ce752b..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/traversals/TraverseReadsNano.java
+++ /dev/null
@@ -1,256 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.traversals;
-
-import htsjdk.samtools.SAMRecord;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.datasources.providers.ReadBasedReferenceOrderedView;
-import org.broadinstitute.gatk.engine.datasources.providers.ReadReferenceView;
-import org.broadinstitute.gatk.engine.datasources.providers.ReadShardDataProvider;
-import org.broadinstitute.gatk.engine.datasources.providers.ReadView;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.engine.walkers.ReadWalker;
-import org.broadinstitute.gatk.utils.nanoScheduler.NSMapFunction;
-import org.broadinstitute.gatk.utils.nanoScheduler.NSProgressFunction;
-import org.broadinstitute.gatk.utils.nanoScheduler.NSReduceFunction;
-import org.broadinstitute.gatk.utils.nanoScheduler.NanoScheduler;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-
-import java.util.Iterator;
-import java.util.LinkedList;
-
-/**
- * A nano-scheduling version of TraverseReads.
- *
- * Implements the traversal of a walker that accepts individual reads, the reference, and
- * RODs per map call.  Directly supports shared memory parallelism via NanoScheduler
- *
- * @author depristo
- * @version 1.0
- * @date 9/2/2012
- */
-public class TraverseReadsNano<M,T> extends TraversalEngine<M,T,ReadWalker<M,T>,ReadShardDataProvider> {
-    /** our log, which we want to capture anything from this class */
-    private final static boolean PRE_READ_ALL_MAP_DATA = true;
-    protected static final Logger logger = Logger.getLogger(TraverseReadsNano.class);
-    private static final boolean DEBUG = false;
-    final NanoScheduler<MapData, MapResult, T> nanoScheduler;
-
-    public TraverseReadsNano(int nThreads) {
-        nanoScheduler = new NanoScheduler<MapData, MapResult, T>(nThreads);
-        nanoScheduler.setProgressFunction(new NSProgressFunction<MapData>() {
-            @Override
-            public void progress(MapData lastProcessedMap) {
-                if ( lastProcessedMap.refContext != null )
-                    // note, need to use getStopLocation so we don't give an interval to ProgressMeterDaemon
-                    printProgress(lastProcessedMap.refContext.getLocus().getStopLocation());
-            }
-        });
-    }
-
-    @Override
-    public String getTraversalUnits() {
-        return "reads";
-    }
-
-    /**
-     * Traverse by reads, given the data and the walker
-     *
-     * @param walker the walker to traverse with
-     * @param dataProvider the provider of the reads data
-     * @param sum the value of type T, specified by the walker, to feed to the walkers reduce function
-     * @return the reduce variable of the read walker
-     */
-    public T traverse(ReadWalker<M,T> walker,
-                      ReadShardDataProvider dataProvider,
-                      T sum) {
-        if ( logger.isDebugEnabled() )
-            logger.debug(String.format("TraverseReadsNano.traverse Covered dataset is %s", dataProvider));
-
-        if( !dataProvider.hasReads() )
-            throw new IllegalArgumentException("Unable to traverse reads; no read data is available.");
-
-        nanoScheduler.setDebug(DEBUG);
-        final TraverseReadsMap myMap = new TraverseReadsMap(walker);
-        final TraverseReadsReduce myReduce = new TraverseReadsReduce(walker);
-
-        final Iterator<MapData> aggregatedInputs = aggregateMapData(dataProvider);
-        final T result = nanoScheduler.execute(aggregatedInputs, myMap, sum, myReduce);
-
-        return result;
-    }
-
-    /**
-     * Aggregate all of the inputs for all map calls into MapData, to be provided
-     * to NanoScheduler for Map/Reduce
-     *
-     * @param dataProvider the source of our data
-     * @return a linked list of MapData objects holding the read, ref, and ROD info for every map/reduce
-     *          should execute
-     */
-    private Iterator<MapData> aggregateMapData(final ReadShardDataProvider dataProvider) {
-        final Iterator<MapData> it = makeDataIterator(dataProvider);
-        if ( PRE_READ_ALL_MAP_DATA ) {
-            final LinkedList<MapData> l = new LinkedList<MapData>();
-            while ( it.hasNext() ) l.add(it.next());
-            return l.iterator();
-        } else {
-            return it;
-        }
-    }
-
-
-    private Iterator<MapData> makeDataIterator(final ReadShardDataProvider dataProvider) {
-        return new Iterator<MapData> ()  {
-            final ReadView reads = new ReadView(dataProvider);
-            final ReadReferenceView reference = new ReadReferenceView(dataProvider);
-            final ReadBasedReferenceOrderedView rodView = new ReadBasedReferenceOrderedView(dataProvider);
-            final Iterator<SAMRecord> readIterator = reads.iterator();
-
-            @Override public boolean hasNext() { return ! engine.exceedsRuntimeLimit() && readIterator.hasNext(); }
-
-            @Override
-            public MapData next() {
-                final SAMRecord read = readIterator.next();
-                final ReferenceContext refContext = ! read.getReadUnmappedFlag()
-                        ? reference.getReferenceContext(read)
-                        : null;
-
-                // if the read is mapped, create a metadata tracker
-                final RefMetaDataTracker tracker = read.getReferenceIndex() >= 0
-                        ? rodView.getReferenceOrderedDataForRead(read)
-                        : null;
-
-                // update the number of reads we've seen
-                dataProvider.getShard().getReadMetrics().incrementNumIterations();
-
-                return new MapData((GATKSAMRecord)read, refContext, tracker);
-            }
-
-            @Override public void remove() {
-                throw new UnsupportedOperationException("Remove not supported");
-            }
-        };
-    }
-
-    @Override
-    public void shutdown() {
-        nanoScheduler.shutdown();
-    }
-
-    /**
-     * The input data needed for each map call.  The read, the reference, and the RODs
-     */
-    private class MapData {
-        final GATKSAMRecord read;
-        final ReferenceContext refContext;
-        final RefMetaDataTracker tracker;
-
-        private MapData(GATKSAMRecord read, ReferenceContext refContext, RefMetaDataTracker tracker) {
-            this.read = read;
-            this.refContext = refContext;
-            this.tracker = tracker;
-        }
-    }
-
-    /**
-     * Contains the results of a map call, indicating whether the call was good, filtered, or done
-     */
-    private class MapResult {
-        final M value;
-        final boolean reduceMe;
-
-        /**
-         * Create a MapResult with value that should be reduced
-         *
-         * @param value the value to reduce
-         */
-        private MapResult(final M value) {
-            this.value = value;
-            this.reduceMe = true;
-        }
-
-        /**
-         * Create a MapResult that shouldn't be reduced
-         */
-        private MapResult() {
-            this.value = null;
-            this.reduceMe = false;
-        }
-    }
-
-    /**
-     * A static object that tells reduce that the result of map should be skipped (filtered or done)
-     */
-    private final MapResult SKIP_REDUCE = new MapResult();
-
-    /**
-     * MapFunction for TraverseReads meeting NanoScheduler interface requirements
-     *
-     * Applies walker.map to MapData, returning a MapResult object containing the result
-     */
-    private class TraverseReadsMap implements NSMapFunction<MapData, MapResult> {
-        final ReadWalker<M,T> walker;
-
-        private TraverseReadsMap(ReadWalker<M, T> walker) {
-            this.walker = walker;
-        }
-
-        @Override
-        public MapResult apply(final MapData data) {
-            if ( ! walker.isDone() ) {
-                final boolean keepMeP = walker.filter(data.refContext, data.read);
-                if (keepMeP)
-                    return new MapResult(walker.map(data.refContext, data.read, data.tracker));
-            }
-
-            return SKIP_REDUCE;
-        }
-    }
-
-    /**
-     * NSReduceFunction for TraverseReads meeting NanoScheduler interface requirements
-     *
-     * Takes a MapResult object and applies the walkers reduce function to each map result, when applicable
-     */
-    private class TraverseReadsReduce implements NSReduceFunction<MapResult, T> {
-        final ReadWalker<M,T> walker;
-
-        private TraverseReadsReduce(ReadWalker<M, T> walker) {
-            this.walker = walker;
-        }
-
-        @Override
-        public T apply(MapResult one, T sum) {
-            if ( one.reduceMe )
-                // only run reduce on values that aren't DONE or FAILED
-                return walker.reduce(one.value, sum);
-            else
-                return sum;
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/traversals/package-info.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/traversals/package-info.java
deleted file mode 100644
index 72d1099..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/traversals/package-info.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.traversals;
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/ActiveRegionTraversalParameters.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/ActiveRegionTraversalParameters.java
deleted file mode 100644
index 7c428cd..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/ActiveRegionTraversalParameters.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers;
-
-import org.broadinstitute.gatk.utils.activeregion.BandPassActivityProfile;
-
-import java.lang.annotation.Documented;
-import java.lang.annotation.Inherited;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-
-/**
- * Describes the parameters that this walker requires of the active region traversal
- *
- * User: rpoplin
- * Date: 1/18/12
- */
- at Documented
- at Inherited
- at Retention(RetentionPolicy.RUNTIME)
-
-public @interface ActiveRegionTraversalParameters {
-    /**
-     * How far to either side of the active region itself should we include reads?
-     *
-     * That is, if the active region is 10 bp wide, and extension is 5, ART will provide
-     * the walker with active regions 10 bp, with 5 bp of extension on either side, and
-     * all reads that cover the 20 bp of the region + extension.
-     *
-     * @return the size of the active region extension we'd like
-     */
-    public int extension() default 0;
-
-    /**
-     * The minimum number of bp for an active region, when we need to chop it up into pieces because
-     * it's become too big.  This only comes into effect when there's literally no good place to chop
-     * that does make the region smaller than this value.
-     *
-     * @return the min size in bp of regions
-     */
-    public int minRegion() default 50;
-
-    /**
-     * The maximum size in bp of active regions wanted by this walker
-     *
-     * Active regions larger than this value are automatically cut up by ART into smaller
-     * regions of size <= this value.
-     *
-     * @return the max size in bp of regions
-     */
-    public int maxRegion() default 1500;
-
-    /**
-     * The variance value for the Gaussian kernel of the band pass filter employed by ART
-     * @return the breadth of the band pass gaussian kernel we want for our traversal
-     */
-    public double bandPassSigma() default BandPassActivityProfile.DEFAULT_SIGMA;
-
-    /**
-     * What is the maximum number of reads we're willing to hold in memory per sample
-     * during the traversal?  This limits our exposure to unusually large amounts
-     * of coverage in the engine.
-     * @return the maximum number of reads we're willing to hold in memory
-     */
-    public int maxReadsToHoldInMemoryPerSample() default 3000;
-
-    /**
-     * No matter what the per sample value says, we will never hold more than this
-     * number of reads in memory at any time.  Provides an upper bound on the total number
-     * of reads in the case where we have a lot of samples.
-     * @return the maximum number of reads to hold in memory
-     */
-    public int maxReadsToHoldTotal() default 1000000;
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/ActiveRegionWalker.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/ActiveRegionWalker.java
deleted file mode 100644
index 9ff68bc..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/ActiveRegionWalker.java
+++ /dev/null
@@ -1,196 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers;
-
-import com.google.java.contract.Ensures;
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import htsjdk.tribble.Feature;
-import org.broadinstitute.gatk.utils.commandline.*;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.downsampling.DownsampleType;
-import org.broadinstitute.gatk.engine.filters.*;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
-import org.broadinstitute.gatk.utils.activeregion.ActiveRegion;
-import org.broadinstitute.gatk.utils.activeregion.ActiveRegionReadState;
-import org.broadinstitute.gatk.utils.activeregion.ActivityProfileState;
-import org.broadinstitute.gatk.utils.interval.IntervalMergingRule;
-import org.broadinstitute.gatk.utils.interval.IntervalSetRule;
-import org.broadinstitute.gatk.utils.interval.IntervalUtils;
-
-import java.io.PrintStream;
-import java.util.*;
-
-/**
- * Base class for all the Active Region Walkers.
- * User: rpoplin
- * Date: 12/7/11
- */
-
- at By(DataSource.READS)
- at Requires({DataSource.READS, DataSource.REFERENCE})
- at PartitionBy(PartitionType.READ)
- at ActiveRegionTraversalParameters(extension=50,maxRegion=1500)
- at ReadFilters({UnmappedReadFilter.class, NotPrimaryAlignmentFilter.class, DuplicateReadFilter.class, FailsVendorQualityCheckFilter.class, MappingQualityUnavailableFilter.class})
- at Downsample(by = DownsampleType.BY_SAMPLE, toCoverage = 1000)
- at RemoveProgramRecords
-public abstract class ActiveRegionWalker<MapType, ReduceType> extends Walker<MapType, ReduceType> {
-    /**
-     * If provided, this walker will write out its activity profile (per bp probabilities of being active)
-     * to this file in the IGV formatted TAB deliminated output:
-     *
-     * http://www.broadinstitute.org/software/igv/IGV
-     *
-     * Intended to make debugging the activity profile calculations easier
-     */
-    @Output(fullName="activityProfileOut", shortName="APO", doc="Output the raw activity profile results in IGV format", required = false, defaultToStdout = false)
-    public PrintStream activityProfileOutStream = null;
-
-    /**
-     * If provided, this walker will write out its active and inactive regions
-     * to this file in the IGV formatted TAB deliminated output:
-     *
-     * http://www.broadinstitute.org/software/igv/IGV
-     *
-     * Intended to make debugging the active region calculations easier
-     */
-    @Output(fullName="activeRegionOut", shortName="ARO", doc="Output the active region to this IGV formatted file", required = false, defaultToStdout = false)
-    public PrintStream activeRegionOutStream = null;
-
-    @Advanced
-    @Input(fullName="activeRegionIn", shortName="AR", doc="Use this interval list file as the active regions to process", required = false)
-    protected List<IntervalBinding<Feature>> activeRegionBindings = null;
-
-    @Advanced
-    @Argument(fullName="activeRegionExtension", shortName="activeRegionExtension", doc="The active region extension; if not provided defaults to Walker annotated default", required = false)
-    public Integer activeRegionExtension = null;
-
-    /**
-     * For the active region walker to treat all bases as active.  Useful for debugging when you want to force something like
-     * the HaplotypeCaller to process a specific interval you provide the GATK
-     */
-    @Advanced
-    @Argument(fullName="forceActive", shortName="forceActive", doc="If provided, all bases will be tagged as active", required = false)
-    public boolean forceActive = false;
-
-    @Advanced
-    @Argument(fullName="activeRegionMaxSize", shortName="activeRegionMaxSize", doc="The active region maximum size; if not provided defaults to Walker annotated default", required = false)
-    public Integer activeRegionMaxSize = null;
-
-    @Advanced
-    @Argument(fullName="bandPassSigma", shortName="bandPassSigma", doc="The sigma of the band pass filter Gaussian kernel; if not provided defaults to Walker annotated default", required = false)
-    public Double bandPassSigma = null;
-
-    /*
-     * For active region limits in ActivityProfile
-*   */
-    @Hidden
-    @Argument(fullName = "maxProbPropagationDistance", shortName = "maxProbPropDist", minValue = 0, doc="Region probability propagation distance beyond it's maximum size.", required = false)
-    public Integer maxProbPropagationDistance = 50;
-
-    @Advanced
-    @Argument(fullName = "activeProbabilityThreshold", shortName = "ActProbThresh", minValue = 0.0, maxValue = 1.0, doc="Threshold for the probability of a profile state being active.", required = false)
-    public Double activeProbThreshold = 0.002;
-
-    private GenomeLocSortedSet presetActiveRegions = null;
-
-    @Override
-    public void initialize() {
-        if( activeRegionBindings == null ) { return; }
-        List<GenomeLoc> allIntervals = new ArrayList<GenomeLoc>(0);
-        for ( IntervalBinding intervalBinding : activeRegionBindings ) {
-            List<GenomeLoc> intervals = intervalBinding.getIntervals(this.getToolkit());
-
-            if ( intervals.isEmpty() ) {
-                logger.warn("The interval file " + intervalBinding.getSource() + " contains no intervals that could be parsed.");
-            }
-
-            allIntervals = IntervalUtils.mergeListsBySetOperator(intervals, allIntervals, IntervalSetRule.UNION);
-        }
-
-        presetActiveRegions = IntervalUtils.sortAndMergeIntervals(this.getToolkit().getGenomeLocParser(), allIntervals, IntervalMergingRule.ALL);
-    }
-
-    /**
-     * Does this walker want us to use a set of preset action regions instead of dynamically using the result of isActive?
-     * @return true if yes, false if no
-     */
-    public boolean hasPresetActiveRegions() {
-        return presetActiveRegions != null;
-    }
-
-    /**
-     * Get the set of preset active regions, or null if none were provided
-     * @return a set of genome locs specifying fixed active regions requested by the walker, or null if none exist
-     */
-    public GenomeLocSortedSet getPresetActiveRegions() {
-        return presetActiveRegions;
-    }
-
-    // Do we actually want to operate on the context?
-    public boolean filter(final RefMetaDataTracker tracker, final ReferenceContext ref, final AlignmentContext context) {
-        return true;    // We are keeping all the reads
-    }
-
-    public EnumSet<ActiveRegionReadState> desiredReadStates() {
-        return EnumSet.of(ActiveRegionReadState.PRIMARY);
-    }
-
-    public final boolean wantsNonPrimaryReads() {
-        return desiredReadStates().contains(ActiveRegionReadState.NONPRIMARY);
-    }
-
-    public boolean wantsExtendedReads() {
-        return desiredReadStates().contains(ActiveRegionReadState.EXTENDED);
-    }
-
-    public boolean wantsUnmappedReads() {
-        return desiredReadStates().contains(ActiveRegionReadState.UNMAPPED);
-    }
-
-    // Determine probability of active status over the AlignmentContext
-    @Ensures({"result.isActiveProb >= 0.0", "result.isActiveProb <= 1.0"})
-    public abstract ActivityProfileState isActive(final RefMetaDataTracker tracker, final ReferenceContext ref, final AlignmentContext context);
-
-    // Map over the ActiveRegion
-    public abstract MapType map(final ActiveRegion activeRegion, final RefMetaDataTracker metaDataTracker);
-
-    public final GenomeLocSortedSet extendIntervals( final GenomeLocSortedSet intervals, final GenomeLocParser genomeLocParser, IndexedFastaSequenceFile reference ) {
-        final int activeRegionExtension = this.getClass().getAnnotation(ActiveRegionTraversalParameters.class).extension();
-        final List<GenomeLoc> allIntervals = new ArrayList<GenomeLoc>();
-        for( final GenomeLoc interval : intervals.toList() ) {
-            final int start = Math.max( 1, interval.getStart() - activeRegionExtension );
-            final int stop = Math.min( reference.getSequenceDictionary().getSequence(interval.getContig()).getSequenceLength(), interval.getStop() + activeRegionExtension );
-            allIntervals.add( genomeLocParser.createGenomeLoc(interval.getContig(), start, stop) );
-        }
-        return IntervalUtils.sortAndMergeIntervals(genomeLocParser, allIntervals, IntervalMergingRule.ALL);
-    }
-
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/Allows.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/Allows.java
deleted file mode 100644
index 7188fd0..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/Allows.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers;
-
-import java.lang.annotation.*;
-/**
- * User: hanna
- * Date: May 19, 2009
- * Time: 10:05:01 AM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * Determines what data sources are allowed by a given walker.
- */
- at Documented
- at Inherited
- at Retention(RetentionPolicy.RUNTIME)
- at Target(ElementType.TYPE)
-public @interface Allows {
-    DataSource[] value();
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/Attribution.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/Attribution.java
deleted file mode 100644
index ded2941..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/Attribution.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers;
-
-import java.lang.annotation.*;
-
-/**
- * Allow users to provide attribution text that will appear prominently in the log output.
- */
- at Documented
- at Inherited
- at Retention(RetentionPolicy.RUNTIME)
- at Target(ElementType.TYPE)
-public @interface Attribution {
-    public String[] value();
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/BAQMode.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/BAQMode.java
deleted file mode 100644
index 931381e..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/BAQMode.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers;
-
-import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
-
-import java.lang.annotation.*;
-
-/**
- * User: hanna
- * Date: May 14, 2009
- * Time: 1:51:22 PM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * Allows the walker to indicate what type of data it wants to consume.
- */
-
- at Documented
- at Inherited
- at Retention(RetentionPolicy.RUNTIME)
- at Target(ElementType.TYPE)
-public @interface BAQMode {
-    public abstract org.broadinstitute.gatk.utils.baq.BAQ.QualityMode QualityMode() default org.broadinstitute.gatk.utils.baq.BAQ.QualityMode.OVERWRITE_QUALS;
-    public abstract ReadTransformer.ApplicationTime ApplicationTime() default ReadTransformer.ApplicationTime.ON_INPUT;
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/By.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/By.java
deleted file mode 100644
index 3962c98..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/By.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers;
-
-import java.lang.annotation.*;
-/**
- * User: hanna
- * Date: May 14, 2009
- * Time: 1:51:22 PM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * Allows the walker to indicate what type of data it wants to consume.
- */
-
- at Documented
- at Inherited
- at Retention(RetentionPolicy.RUNTIME)
- at Target(ElementType.TYPE)
-public @interface By {
-    DataSource value();
-}
-
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/DataSource.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/DataSource.java
deleted file mode 100644
index fab9840..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/DataSource.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers;
-/**
- * User: hanna
- * Date: May 14, 2009
- * Time: 2:12:33 PM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * Allow user to choose between a number of different data sources.
- */
-public enum DataSource {
-    /**
-     * Does this walker require read (BAM) data to work?
-     */
-    READS,
-
-    /**
-     * Does this walker require reference data to work?
-     */
-    REFERENCE,
-
-    /**
-     * Does this walker require reference order data (VCF) to work?
-     */
-    REFERENCE_ORDERED_DATA
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/Downsample.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/Downsample.java
deleted file mode 100644
index c112d7d..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/Downsample.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers;
-
-import org.broadinstitute.gatk.engine.downsampling.DownsampleType;
-
-import java.lang.annotation.*;
-
-/**
- * Specifies a method for downsampling the reads passed to a given
- * walker based on the input from that walker.
- *
- * @author hanna
- * @version 0.1
- */
- at Documented
- at Inherited
- at Retention(RetentionPolicy.RUNTIME)
- at Target(ElementType.TYPE)
-public @interface Downsample {
-    DownsampleType by();
-    int toCoverage() default -1;
-    double toFraction() default -1.0F;
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/DuplicateWalker.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/DuplicateWalker.java
deleted file mode 100644
index 96d2d5d..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/DuplicateWalker.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers;
-
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.filters.NotPrimaryAlignmentFilter;
-import org.broadinstitute.gatk.engine.filters.UnmappedReadFilter;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-
-import java.util.List;
-import java.util.Set;
-
-/**
- * Created by IntelliJ IDEA.
- * User: mdepristo
- * Date: Feb 22, 2009
- * Time: 2:52:28 PM
- * To change this template use File | Settings | File Templates.
- */
- at Requires({DataSource.READS,DataSource.REFERENCE})
- at ReadFilters({UnmappedReadFilter.class,NotPrimaryAlignmentFilter.class})
-public abstract class DuplicateWalker<MapType, ReduceType> extends Walker<MapType, ReduceType> {
-    // Do we actually want to operate on the context?
-    public boolean filter(GenomeLoc loc, AlignmentContext context, Set<List<GATKSAMRecord>> readSets ) {
-        return true;    // We are keeping all the reads
-    }
-
-    public abstract MapType map(GenomeLoc loc, AlignmentContext context, Set<List<GATKSAMRecord>> readSets );
-
-    // Given result of map function
-    public abstract ReduceType reduceInit();
-    public abstract ReduceType reduce(MapType value, ReduceType sum);
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/LocusWalker.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/LocusWalker.java
deleted file mode 100644
index 1e7b0e5..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/LocusWalker.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers;
-
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.downsampling.DownsampleType;
-import org.broadinstitute.gatk.engine.filters.DuplicateReadFilter;
-import org.broadinstitute.gatk.engine.filters.FailsVendorQualityCheckFilter;
-import org.broadinstitute.gatk.engine.filters.NotPrimaryAlignmentFilter;
-import org.broadinstitute.gatk.engine.filters.UnmappedReadFilter;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-
-/**
- * Created by IntelliJ IDEA.
- * User: mdepristo
- * Date: Feb 22, 2009
- * Time: 2:52:28 PM
- * To change this template use File | Settings | File Templates.
- */
- at By(DataSource.READS)
- at Requires({DataSource.READS,DataSource.REFERENCE})
- at PartitionBy(PartitionType.LOCUS)
- at ReadFilters({UnmappedReadFilter.class,NotPrimaryAlignmentFilter.class,DuplicateReadFilter.class,FailsVendorQualityCheckFilter.class})
- at Downsample(by = DownsampleType.BY_SAMPLE, toCoverage = 1000)
- at RemoveProgramRecords
-public abstract class LocusWalker<MapType, ReduceType> extends Walker<MapType, ReduceType> {
-    // Do we actually want to operate on the context?
-    public boolean filter(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
-        return true;    // We are keeping all the reads
-    }
-
-    // Map over the org.broadinstitute.gatk.engine.contexts.AlignmentContext
-    public abstract MapType map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context);
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/Multiplex.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/Multiplex.java
deleted file mode 100644
index e771d1e..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/Multiplex.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers;
-
-import java.lang.annotation.*;
-
-/**
- * Indicates that the class should be multiplexed according to the rules
- * specified in the multiplexer.
- *
- * @author mhanna
- * @version 0.1
- */
- at Documented
- at Inherited
- at Retention(RetentionPolicy.RUNTIME)
- at Target({ElementType.FIELD})
-public @interface Multiplex {
-    public Class<? extends Multiplexer> value();
-    public String[] arguments() default {};
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/Multiplexer.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/Multiplexer.java
deleted file mode 100644
index 969e288..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/Multiplexer.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers;
-
-import java.util.Collection;
-
-/**
- * An interface for multiplexing output streams.
- *
- * @author mhanna
- * @version 0.1
- */
-public interface Multiplexer<T> {
-    /**
-     * Generate a list of the potential outputs that can be created as a function of the other
-     * command-line arguments in this class.
-     * @return A collection of unique identifiers for the file multiplex.
-     */
-    public Collection<T> multiplex();
-
-    /**
-     * Transform the given command-line argument into a suitable form specific to this filename.
-     * @param multiplexedEntry Identifies the individual component of the multiplex.  Will be a value in the collection
-     *        passed back by multiplex().
-     * @param argument The actual command-line argument, supplied for transformation.
-     * @return A transformed representation of the command-line argument.
-     */
-    public String transformArgument(final T multiplexedEntry, final String argument);
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/NanoSchedulable.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/NanoSchedulable.java
deleted file mode 100644
index 5852b77..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/NanoSchedulable.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers;
-
-/**
- * Root parallelism interface.  Walkers that implement this
- * declare that their map function is thread-safe and so multiple
- * map calls can be run in parallel in the same JVM instance.
- */
-public interface NanoSchedulable {
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/PartitionBy.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/PartitionBy.java
deleted file mode 100644
index 346f7c4..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/PartitionBy.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers;
-
-import java.lang.annotation.*;
-
-/**
- * Allows the walker to indicate how to partition data it wants to consume.
- */
- at Documented
- at Inherited
- at Retention(RetentionPolicy.RUNTIME)
- at Target(ElementType.TYPE)
-public @interface PartitionBy {
-    PartitionType value();
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/PartitionType.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/PartitionType.java
deleted file mode 100644
index 2c738e1..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/PartitionType.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers;
-
-/**
- * Defines the ways walkers inputs can be partitioned before
- * being passed to multiple instances of the walker.
- */
-public enum PartitionType {
-    /**
-     * Do not partition the walker inputs.
-     */
-    NONE,
-
-    /**
-     * The walker inputs can be chunked down to individual
-     * reads.
-     */
-    READ,
-
-    /**
-     * The walker inputs can be chunked down to the
-     * per-locus level.
-     */
-    LOCUS,
-
-    /**
-     * The walker inputs should be processed as complete
-     * intervals defined -L or the reference contigs.
-     */
-    INTERVAL,
-
-    /**
-     * The walker inputs should always be processed as complete
-     * contigs, even if there are multiple intervals per contig.
-     */
-    CONTIG
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/RMD.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/RMD.java
deleted file mode 100644
index a2ee8d0..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/RMD.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers;
-
-import htsjdk.tribble.Feature;
-
-import java.lang.annotation.Documented;
-import java.lang.annotation.Inherited;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-/**
- * User: hanna
- * Date: May 19, 2009
- * Time: 1:34:15 PM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * A data type representing reference-ordered data.
- */
- at Documented
- at Inherited
- at Retention(RetentionPolicy.RUNTIME)
-public @interface RMD {
-    String name();    
-    Class type() default Feature.class;
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/ReadFilters.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/ReadFilters.java
deleted file mode 100644
index eac5715..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/ReadFilters.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers;
-
-import htsjdk.samtools.filter.SamRecordFilter;
-import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
-
-import java.lang.annotation.*;
-
-/**
- * An annotation to describe what kind of data will be filtered out.
- *
- * @author hanna
- * @version 0.1
- */
- at Documented
- at Inherited
- at Retention(RetentionPolicy.RUNTIME)
- at Target(ElementType.TYPE)
-public @interface ReadFilters {
-    public Class<? extends SamRecordFilter>[] value() default {};
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/ReadPairWalker.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/ReadPairWalker.java
deleted file mode 100644
index 67eae69..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/ReadPairWalker.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers;
-
-import htsjdk.samtools.SAMRecord;
-
-import java.util.Collection;
-
-/**
- * Walks over all pairs/collections of reads in a BAM file sorted by
- * read name.
- *
- * @author mhanna
- * @version 0.1
- */
- at Requires({DataSource.READS})
-public abstract class ReadPairWalker<MapType,ReduceType> extends Walker<MapType,ReduceType> {
-
-    /**
-     * Optionally filters out read pairs.
-     * @param reads collections of all reads with the same read name.
-     * @return True to process the reads with map/reduce; false otherwise.
-     */
-    public boolean filter(Collection<SAMRecord> reads) {
-        // Keep all pairs by default.
-        return true;
-    }
-
-    /**
-     * Maps a read pair to a given reduce of type MapType.  Semantics determined by subclasser.
-     * @param reads Collection of reads having the same name.
-     * @return Semantics defined by implementer.
-     */
-    public abstract MapType map(Collection<SAMRecord> reads);
-
-    // Given result of map function
-    public abstract ReduceType reduceInit();
-    public abstract ReduceType reduce(MapType value, ReduceType sum);
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/ReadWalker.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/ReadWalker.java
deleted file mode 100644
index 9528cf1..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/ReadWalker.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers;
-
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-
-/**
- * Created by IntelliJ IDEA.
- * User: mdepristo
- * Date: Feb 22, 2009
- * Time: 2:52:28 PM
- * To change this template use File | Settings | File Templates.
- */
- at Requires({DataSource.READS, DataSource.REFERENCE})
- at PartitionBy(PartitionType.READ)
-public abstract class ReadWalker<MapType, ReduceType> extends Walker<MapType, ReduceType> {
-    public boolean requiresOrderedReads() { return false; }
-    
-    // Do we actually want to operate on the context?
-    /** Must return true for reads that need to be processed. Reads, for which this method return false will
-     * be skipped by the engine and never passed to the walker.
-     */
-    public boolean filter(ReferenceContext ref, GATKSAMRecord read) {
-        // We are keeping all the reads
-        return true;
-    }
-
-    // Map over the org.broadinstitute.gatk.engine.contexts.AlignmentContext
-    public abstract MapType map(ReferenceContext ref, GATKSAMRecord read, RefMetaDataTracker metaDataTracker);
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/RefWalker.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/RefWalker.java
deleted file mode 100644
index 90c10c5..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/RefWalker.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers;
-
-/**
- * Created by IntelliJ IDEA.
- * User: mdepristo
- * Date: Feb 22, 2009
- * Time: 2:52:28 PM
- * To change this template use File | Settings | File Templates.
- */
- at By(DataSource.REFERENCE)
- at Requires({DataSource.REFERENCE})
- at Allows(DataSource.REFERENCE)
-public abstract class RefWalker<MapType, ReduceType> extends LocusWalker<MapType, ReduceType> {
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/Reference.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/Reference.java
deleted file mode 100644
index 3598cf5..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/Reference.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers;
-
-import java.lang.annotation.*;
-
-/**
- * Describes presentation, capabilities, and limitations of the reference
- * provided to the GATK.
- *
- * @author mhanna
- * @version 0.1
- */
- at Documented
- at Inherited
- at Retention(RetentionPolicy.RUNTIME)
- at Target(ElementType.TYPE)
-public @interface Reference {
-    /**
-     * Specifies the window expansion for the current walker.
-     * @return The window to which the reference should be expanded.  Defaults to [0,0] (no expansion).
-     */
-    public Window window() default @Window;
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/RemoveProgramRecords.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/RemoveProgramRecords.java
deleted file mode 100644
index ad945f2..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/RemoveProgramRecords.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers;
-
-/**
- * Created with IntelliJ IDEA.
- * User: thibault
- * Date: 8/2/12
- * Time: 1:58 PM
- * To change this template use File | Settings | File Templates.
- */
-
-import java.lang.annotation.*;
-
-/**
- * Indicates that program records should be removed from SAM headers by default for this walker
- */
- at Documented
- at Inherited
- at Retention(RetentionPolicy.RUNTIME)
- at Target(ElementType.TYPE)
-public @interface RemoveProgramRecords {
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/Requires.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/Requires.java
deleted file mode 100644
index 5a16a67..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/Requires.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers;
-
-import java.lang.annotation.*;
-/**
- * User: hanna
- * Date: May 19, 2009
- * Time: 10:06:47 AM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * Determines what data sources are mandated by a given walker.
- */
- at Documented
- at Inherited
- at Retention(RetentionPolicy.RUNTIME)
- at Target(ElementType.TYPE)
-public @interface Requires {
-    DataSource[] value();
-    RMD[] referenceMetaData() default {};
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/RodWalker.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/RodWalker.java
deleted file mode 100644
index 88a1eaa..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/RodWalker.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers;
-
-/**
- * Created by IntelliJ IDEA.
- * User: mdepristo
- * Date: Feb 22, 2009
- * Time: 2:52:28 PM
- * To change this template use File | Settings | File Templates.
- */
- at By(DataSource.REFERENCE_ORDERED_DATA)
- at Requires({DataSource.REFERENCE, DataSource.REFERENCE_ORDERED_DATA})
- at Allows({DataSource.REFERENCE, DataSource.REFERENCE_ORDERED_DATA})
-public abstract class RodWalker<MapType, ReduceType> extends LocusWalker<MapType, ReduceType> {
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/TreeReducible.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/TreeReducible.java
deleted file mode 100644
index c170f3d..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/TreeReducible.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers;
-
-/**
- * Created by IntelliJ IDEA.
- * User: mhanna
- * Date: Apr 26, 2009
- * Time: 5:34:11 PM
- * To change this template use File | Settings | File Templates.
- */
-
-/**
- * Indicates that a class is tree reducible, aka that any two adjacent
- * shards of the data can reduce with each other, and the composite result
- * can be reduced with other composite results.
- */
-public interface TreeReducible<ReduceType> {
-    /**
-     * A composite, 'reduce of reduces' function.
-     * @param lhs 'left-most' portion of data in the composite reduce.
-     * @param rhs 'right-most' portion of data in the composite reduce.
-     * @return The composite reduce type.
-     */
-    ReduceType treeReduce(ReduceType lhs, ReduceType rhs);
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/Walker.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/Walker.java
deleted file mode 100644
index 31472fd..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/Walker.java
+++ /dev/null
@@ -1,177 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers;
-
-import htsjdk.samtools.SAMSequenceDictionary;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.downsampling.DownsampleType;
-import org.broadinstitute.gatk.engine.filters.MalformedReadFilter;
-import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
-import org.broadinstitute.gatk.engine.samples.Sample;
-import org.broadinstitute.gatk.engine.samples.SampleDB;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.baq.BAQ;
-import org.broadinstitute.gatk.utils.collections.Pair;
-import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
-import org.broadinstitute.gatk.utils.recalibration.BQSRMode;
-
-import java.util.List;
-
-/**
- * Created by IntelliJ IDEA.
- * User: hanna
- * Date: Mar 17, 2009
- * Time: 1:53:31 PM
- * To change this template use File | Settings | File Templates.
- */
- at ReadFilters(MalformedReadFilter.class)
- at PartitionBy(PartitionType.NONE)
- at Downsample(by = DownsampleType.NONE)
- at BAQMode(QualityMode = BAQ.QualityMode.OVERWRITE_QUALS, ApplicationTime = ReadTransformer.ApplicationTime.ON_INPUT)
- at BQSRMode(ApplicationTime = ReadTransformer.ApplicationTime.ON_INPUT)
- at DocumentedGATKFeature(groupName = "Uncategorized", extraDocs = {CommandLineGATK.class})
-public abstract class Walker<MapType, ReduceType> {
-    final protected static Logger logger = Logger.getLogger(Walker.class);
-    private GenomeAnalysisEngine toolkit;
-
-    protected Walker() {
-    }
-
-    /**
-     * Set the toolkit, for peering into internal structures that can't
-     * otherwise be read.
-     * @param toolkit The genome analysis toolkit.
-     */
-    public void setToolkit(GenomeAnalysisEngine toolkit) {
-        this.toolkit = toolkit;
-    }
-
-    /**
-     * Retrieve the toolkit, for peering into internal structures that can't
-     * otherwise be read.  Use sparingly, and discuss uses with software engineering
-     * team.
-     * @return The genome analysis toolkit.
-     */
-    protected GenomeAnalysisEngine getToolkit() {
-        return toolkit;
-    }
-
-    /**
-     * Gets the master sequence dictionary for this walker
-     * @link GenomeAnalysisEngine.getMasterSequenceDictionary
-     * @return
-     */
-    protected SAMSequenceDictionary getMasterSequenceDictionary() {
-        return getToolkit().getMasterSequenceDictionary();
-    }
-
-    public SampleDB getSampleDB() {
-        return getToolkit().getSampleDB();
-    }
-
-    protected Sample getSample(final String id) {
-        return getToolkit().getSampleDB().getSample(id);
-    }
-
-    /**
-     * (conceptual static) method that states whether you want to see reads piling up at a locus
-     * that contain a deletion at the locus.
-     *
-     * ref:   ATCTGA
-     * read1: ATCTGA
-     * read2: AT--GA
-     *
-     * Normally, the locus iterator only returns a list of read1 at this locus at position 3, but
-     * if this function returns true, then the system will return (read1, read2) with offsets
-     * of (3, -1).  The -1 offset indicates a deletion in the read.
-     *
-     * @return false if you don't want to see deletions, or true if you do
-     */
-    public boolean includeReadsWithDeletionAtLoci() { 
-        return false;
-    }
-
-    public void initialize() { }
-
-    /**
-     * A function for overloading in subclasses providing a mechanism to abort early from a walker.
-     *
-     * If this ever returns true, then the Traversal engine will stop executing map calls
-     * and start the process of shutting down the walker in an orderly fashion.
-     * @return
-     */
-    public boolean isDone() {
-        return false;
-    }
-
-    /**
-     * Provide an initial value for reduce computations.
-     * @return Initial value of reduce.
-     */
-    public abstract ReduceType reduceInit();
-
-    /**
-     * Reduces a single map with the accumulator provided as the ReduceType.
-     * @param value result of the map.
-     * @param sum accumulator for the reduce.
-     * @return accumulator with result of the map taken into account.
-     */
-    public abstract ReduceType reduce(MapType value, ReduceType sum);    
-
-    public void onTraversalDone(ReduceType result) {
-        logger.info("[REDUCE RESULT] Traversal result is: " + result);
-    }
-
-    /**
-     * General interval reduce routine called after all of the traversals are done
-     * @param results interval reduce results
-     */
-    public void onTraversalDone(List<Pair<GenomeLoc, ReduceType>> results) {
-        for ( Pair<GenomeLoc, ReduceType> result : results ) {
-            logger.info(String.format("[INTERVAL REDUCE RESULT] at %s ", result.getFirst()));
-            this.onTraversalDone(result.getSecond());
-        }
-    }
-
-    /**
-     * Return true if your walker wants to reduce each interval separately.  Default is false.
-     *
-     * If you set this flag, several things will happen.
-     *
-     * The system will invoke reduceInit() once for each interval being processed, starting a fresh reduce
-     * Reduce will accumulate normally at each map unit in the interval
-     * However, onTraversalDone(reduce) will be called after each interval is processed.
-     * The system will call onTraversalDone( GenomeLoc -> reduce ), after all reductions are done,
-     *   which is overloaded here to call onTraversalDone(reduce) for each location
-     *
-     * @return true if your walker wants to reduce each interval separately.
-     */
-    public boolean isReduceByInterval() {
-        return false;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/WalkerName.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/WalkerName.java
deleted file mode 100644
index 9e02bf1..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/WalkerName.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers;
-
-import java.lang.annotation.*;
-
-/**
- * Created by IntelliJ IDEA.
- * User: hanna
- * Date: Mar 26, 2009
- * Time: 3:00:16 PM
- * To change this template use File | Settings | File Templates.
- */
- at Documented
- at Retention(RetentionPolicy.RUNTIME)
- at Target(ElementType.TYPE)
-public @interface WalkerName {
-    public String value() default "";
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/Window.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/Window.java
deleted file mode 100644
index 66205b9..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/Window.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers;
-
-import java.lang.annotation.Documented;
-import java.lang.annotation.Inherited;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-
-/**
- * Describes the size of the window into the genome.  Has differing semantics based on
- * the data this annotation is used to describe.
- *
- * @author mhanna
- * @version 0.1
- */
- at Documented
- at Inherited
- at Retention(RetentionPolicy.RUNTIME)
-public @interface Window {
-    /**
-     * Controls where the window should start and stop relative to
-     * the locus currently being processed.
-     * @return start point; default is 0, indicating 'supply only the reference base at the current locus'.
-     */
-    public int start() default 0;
-
-    /**
-     * Controls where the window should start and stop relative to
-     * the locus currently being processed.
-     * @return stop point; default is 0, indicating 'supply only the reference base at the current locus'.
-     */
-    public int stop() default 0;
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/diffengine/BAMDiffableReader.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/diffengine/BAMDiffableReader.java
deleted file mode 100644
index 2c8cc7a..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/diffengine/BAMDiffableReader.java
+++ /dev/null
@@ -1,119 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers.diffengine;
-
-import htsjdk.samtools.SAMFileReader;
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.SAMRecordIterator;
-import htsjdk.samtools.ValidationStringency;
-import htsjdk.samtools.util.BlockCompressedInputStream;
-
-import java.io.*;
-import java.util.Arrays;
-
-
-/**
- * Created by IntelliJ IDEA.
- * User: depristo
- * Date: 7/4/11
- * Time: 1:09 PM
- *
- * Class implementing diffnode reader for VCF
- */
-public class BAMDiffableReader implements DiffableReader {
-    @Override
-    public String getName() { return "BAM"; }
-
-    @Override
-    public DiffElement readFromFile(File file, int maxElementsToRead) {
-        final SAMFileReader reader = new SAMFileReader(file, null); // null because we don't want it to look for the index
-        reader.setValidationStringency(ValidationStringency.SILENT);
-
-        DiffNode root = DiffNode.rooted(file.getName());
-        SAMRecordIterator iterator = reader.iterator();
-
-        int count = 0;
-        while ( iterator.hasNext() ) {
-            final SAMRecord record = iterator.next();
-
-            // name is the read name + first of pair
-            String name = record.getReadName().replace('.', '_');
-            if ( record.getReadPairedFlag() ) {
-                name += record.getFirstOfPairFlag() ? "_1" : "_2";
-            }
-
-            DiffNode readRoot = DiffNode.empty(name, root);
-
-            // add fields
-            readRoot.add("NAME", record.getReadName());
-            readRoot.add("FLAGS", record.getFlags());
-            readRoot.add("RNAME", record.getReferenceName());
-            readRoot.add("POS", record.getAlignmentStart());
-            readRoot.add("MAPQ", record.getMappingQuality());
-            readRoot.add("CIGAR", record.getCigarString());
-            readRoot.add("RNEXT", record.getMateReferenceName());
-            readRoot.add("PNEXT", record.getMateAlignmentStart());
-            readRoot.add("TLEN", record.getInferredInsertSize());
-            readRoot.add("SEQ", record.getReadString());
-            readRoot.add("QUAL", record.getBaseQualityString());
-
-            for ( SAMRecord.SAMTagAndValue xt : record.getAttributes() ) {
-                readRoot.add(xt.tag, xt.value);
-            }
-
-            // add record to root
-            if ( ! root.hasElement(name) )
-                // protect ourselves from malformed files
-                root.add(readRoot);
-            count += readRoot.size();
-            if ( count > maxElementsToRead && maxElementsToRead != -1)
-                break;
-        }
-
-        reader.close();
-
-        return root.getBinding();
-    }
-
-    @Override
-    public boolean canRead(File file) {
-        final byte[] BAM_MAGIC = "BAM\1".getBytes();
-        final byte[] buffer = new byte[BAM_MAGIC.length];
-        try {
-            InputStream fstream = new BufferedInputStream(new FileInputStream(file));
-            if ( !BlockCompressedInputStream.isValidFile(fstream) )
-                return false;
-            final BlockCompressedInputStream BCIS = new BlockCompressedInputStream(fstream);
-            BCIS.read(buffer, 0, BAM_MAGIC.length);
-            BCIS.close();
-            return Arrays.equals(buffer, BAM_MAGIC);
-        } catch ( IOException e ) {
-            return false;
-        } catch ( htsjdk.samtools.FileTruncatedException e ) {
-            return false;
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/diffengine/DiffElement.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/diffengine/DiffElement.java
deleted file mode 100644
index ebed914..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/diffengine/DiffElement.java
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers.diffengine;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Invariant;
-import com.google.java.contract.Requires;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-/**
- * Created by IntelliJ IDEA.
- * User: depristo
- * Date: 7/4/11
- * Time: 12:55 PM
- *
- * An interface that must be implemented to allow us to calculate differences
- * between structured objects
- */
- at Invariant({
-        "name != null",
-        "value != null",
-        "parent != null || name.equals(\"ROOT\")",
-        "value == null || value.getBinding() == this"})
-public class DiffElement {
-    public final static DiffElement ROOT = new DiffElement();
-
-    final private String name;
-    final private DiffElement parent;
-    final private DiffValue value;
-
-    /**
-     * For ROOT only
-     */
-    private DiffElement() {
-        this.name = "ROOT";
-        this.parent = null;
-        this.value = new DiffValue(this, "ROOT");
-    }
-
-    @Requires({"name != null", "parent != null", "value != null"})
-    public DiffElement(String name, DiffElement parent, DiffValue value) {
-        if ( name.equals("ROOT") ) throw new IllegalArgumentException("Cannot use reserved name ROOT");
-        this.name = name;
-        this.parent = parent;
-        this.value = value;
-        this.value.setBinding(this);
-    }
-
-    @Ensures({"result != null"})
-    public String getName() {
-        return name;
-    }
-
-    public DiffElement getParent() {
-        return parent;
-    }
-
-    @Ensures({"result != null"})
-    public DiffValue getValue() {
-        return value;
-    }
-
-    public boolean isRoot() { return this == ROOT; }
-
-    @Ensures({"result != null"})
-    @Override
-    public String toString() {
-        return getName() + "=" + getValue().toString();
-    }
-
-    public String toString(int offset) {
-        return (offset > 0 ? Utils.dupString(' ', offset) : 0) + getName() + "=" + getValue().toString(offset);
-    }
-
-    @Ensures({"result != null"})
-    public final String fullyQualifiedName() {
-        if ( isRoot() )
-            return "";
-        else if ( parent.isRoot() )
-            return name;
-        else
-            return parent.fullyQualifiedName() + "." + name;
-    }
-
-    @Ensures({"result != null"})
-    public String toOneLineString() {
-        return getName() + "=" + getValue().toOneLineString();
-    }
-
-    @Ensures({"result != null"})
-    public DiffNode getValueAsNode() {
-        if ( getValue().isCompound() )
-            return (DiffNode)getValue();
-        else
-            throw new ReviewedGATKException("Illegal request conversion of a DiffValue into a DiffNode: " + this);
-    }
-
-    public int size() {
-        return 1 + getValue().size();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/diffengine/DiffEngine.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/diffengine/DiffEngine.java
deleted file mode 100644
index d10cfea..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/diffengine/DiffEngine.java
+++ /dev/null
@@ -1,437 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers.diffengine;
-
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.engine.report.GATKReport;
-import org.broadinstitute.gatk.engine.report.GATKReportTable;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.classloader.PluginManager;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import java.io.File;
-import java.io.PrintStream;
-import java.util.*;
-
-/**
- * Created by IntelliJ IDEA.
- * User: depristo
- * Date: 7/4/11
- * Time: 12:51 PM
- * A generic engine for comparing tree-structured objects
- *
- */
-public class DiffEngine {
-    final protected static Logger logger = Logger.getLogger(DiffEngine.class);
-
-    private final Map<String, DiffableReader> readers = new HashMap<String, DiffableReader>();
-
-    public DiffEngine() {
-        loadDiffableReaders();
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // difference calculation
-    //
-    // --------------------------------------------------------------------------------
-
-    public List<Difference> diff(DiffElement master, DiffElement test) {
-        DiffValue masterValue = master.getValue();
-        DiffValue testValue = test.getValue();
-
-        if ( masterValue.isCompound() && masterValue.isCompound() ) {
-            return diff(master.getValueAsNode(), test.getValueAsNode());
-        } else if ( masterValue.isAtomic() && testValue.isAtomic() ) {
-            return diff(masterValue, testValue);
-        } else {
-            // structural difference in types.  one is node, other is leaf
-            return Arrays.asList(new Difference(master, test));
-        }
-    }
-
-    public List<Difference> diff(DiffNode master, DiffNode test) {
-        Set<String> allNames = new HashSet<String>(master.getElementNames());
-        allNames.addAll(test.getElementNames());
-        List<Difference> diffs = new ArrayList<Difference>();
-
-        for ( String name : allNames ) {
-            DiffElement masterElt = master.getElement(name);
-            DiffElement testElt = test.getElement(name);
-            if ( masterElt == null && testElt == null ) {
-                throw new ReviewedGATKException("BUG: unexpectedly got two null elements for field: " + name);
-            } else if ( masterElt == null || testElt == null ) { // if either is null, we are missing a value
-                // todo -- should one of these be a special MISSING item?
-                diffs.add(new Difference(masterElt, testElt));
-            } else {
-                diffs.addAll(diff(masterElt, testElt));
-            }
-        }
-
-        return diffs;
-    }
-
-    public List<Difference> diff(DiffValue master, DiffValue test) {
-        if ( master.getValue().equals(test.getValue()) ) {
-            return Collections.emptyList();
-        } else {
-            return Arrays.asList(new Difference(master.getBinding(), test.getBinding()));
-        }
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // Summarizing differences
-    //
-    // --------------------------------------------------------------------------------
-
-    /**
-     * Emits a summary of the diffs to out.  Suppose you have the following three differences:
-     *
-     *   A.X.Z:1!=2
-     *   A.Y.Z:3!=4
-     *   B.X.Z:5!=6
-     *
-     * The above is the itemized list of the differences.  The summary looks for common differences
-     * in the name hierarchy, counts those shared elements, and emits the differences that occur
-     * in order of decreasing counts.
-     *
-     * So, in the above example, what are the shared elements?
-     *
-     * A.X.Z and B.X.Z share X.Z, so there's a *.X.Z with count 2
-     * A.X.Z, A.Y.Z, and B.X.Z all share *.*.Z, with count 3
-     * Each of A.X.Z, A.Y.Z, and B.X.Z are individually unique, with count 1
-     *
-     * So we would emit the following summary:
-     *
-     * *.*.Z: 3
-     * *.X.Z: 2
-     * A.X.Z: 1 [specific difference: 1!=2]
-     * A.Y.Z: 1 [specific difference: 3!=4]
-     * B.X.Z: 1 [specific difference: 5!=6]
-     *
-     * The algorithm to accomplish this calculation is relatively simple. Start with all of the
-     * concrete differences.  For each pair of differences A1.A2....AN and B1.B2....BN:
-     *
-     * find the longest common subsequence Si.Si+1...SN where Ai = Bi = Si
-     * If i == 0, then there's no shared substructure
-     * If i > 0, then generate the summarized value X = *.*...Si.Si+1...SN
-     * if X is a known summary, increment it's count, otherwise set its count to 1
-     *
-     * Not that only pairs of the same length are considered as potentially equivalent
-     *
-     * @param params determines how we display the items
-     * @param diffs the list of differences to summarize
-     */
-    public void reportSummarizedDifferences(List<Difference> diffs, SummaryReportParams params ) {
-        printSummaryReport(summarizedDifferencesOfPaths(diffs, params.doPairwise, params.maxRawDiffsToSummarize), params );
-    }
-
-    final protected static String[] diffNameToPath(String diffName) {
-        return diffName.split("\\.");
-    }
-
-    protected List<Difference> summarizedDifferencesOfPathsFromString(List<String> singletonDiffs) {
-        List<Difference> diffs = new ArrayList<Difference>();
-
-        for ( String diff : singletonDiffs ) {
-            diffs.add(new Difference(diff));
-        }
-
-        return summarizedDifferencesOfPaths(diffs, true, -1);
-    }
-
-    /**
-     * Computes a minimum set of potential differences between all singleton differences
-     * in singletonDiffs.  Employs an expensive pairwise O(n^2) algorithm.
-     *
-     * @param singletonDiffs
-     * @param maxRawDiffsToSummarize
-     * @return
-     */
-    private Map<String, Difference> initialPairwiseSummaries(final List<? extends Difference> singletonDiffs,
-                                                             final int maxRawDiffsToSummarize) {
-        Map<String, Difference> summaries = new HashMap<String, Difference>();
-
-        // create the initial set of differences
-        for ( int i = 0; i < singletonDiffs.size(); i++ ) {
-            for ( int j = 0; j <= i; j++ ) {
-                Difference diffPath1 = singletonDiffs.get(i);
-                Difference diffPath2 = singletonDiffs.get(j);
-                if ( diffPath1.length() == diffPath2.length() ) {
-                    int lcp = longestCommonPostfix(diffPath1.getParts(), diffPath2.getParts());
-                    String path = diffPath2.getPath();
-                    if ( lcp != 0 && lcp != diffPath1.length() )
-                        path = summarizedPath(diffPath2.getParts(), lcp);
-                    Difference sumDiff = new Difference(path, diffPath2.getMaster(), diffPath2.getTest());
-                    sumDiff.setCount(0);
-                    addSummaryIfMissing(summaries, sumDiff);
-
-                    if ( maxRawDiffsToSummarize != -1 && summaries.size() > maxRawDiffsToSummarize)
-                        return summaries;
-                }
-            }
-        }
-
-        return summaries;
-    }
-
-    /**
-     * Computes the possible leaf differences among the singleton diffs.
-     *
-     * The leaf differences are all of the form *.*...*.X where all internal
-     * differences are wildcards and the only summarized difference considered
-     * interesting to compute is
-     *
-     * @param singletonDiffs
-     * @param maxRawDiffsToSummarize
-     * @return
-     */
-    private Map<String, Difference> initialLeafSummaries(final List<? extends Difference> singletonDiffs,
-                                                         final int maxRawDiffsToSummarize) {
-        Map<String, Difference> summaries = new HashMap<String, Difference>();
-
-        // create the initial set of differences
-        for ( final Difference d : singletonDiffs ) {
-            final String path = summarizedPath(d.getParts(), 1);
-            Difference sumDiff = new Difference(path, d.getMaster(), d.getTest());
-            sumDiff.setCount(0);
-            addSummaryIfMissing(summaries, sumDiff);
-
-            if ( maxRawDiffsToSummarize != -1 && summaries.size() > maxRawDiffsToSummarize)
-                return summaries;
-        }
-
-        return summaries;
-    }
-
-    protected List<Difference> summarizedDifferencesOfPaths(final List<? extends Difference> singletonDiffs,
-                                                            final boolean doPairwise,
-                                                            final int maxRawDiffsToSummarize) {
-        final Map<String, Difference> summaries = doPairwise
-                ? initialPairwiseSummaries(singletonDiffs, maxRawDiffsToSummarize)
-                : initialLeafSummaries(singletonDiffs, maxRawDiffsToSummarize);
-
-        // count differences
-        for ( Difference diffPath : singletonDiffs ) {
-            for ( Difference sumDiff : summaries.values() ) {
-                if ( sumDiff.matches(diffPath.getParts()) )
-                    sumDiff.incCount();
-            }
-        }
-
-        List<Difference> sortedSummaries = new ArrayList<Difference>(summaries.values());
-        Collections.sort(sortedSummaries);
-        return sortedSummaries;
-    }
-
-    protected void addSummaryIfMissing(Map<String, Difference> summaries, Difference diff) {
-        if ( ! summaries.containsKey(diff.getPath()) ) {
-            summaries.put(diff.getPath(), diff);
-        }
-    }
-
-    protected void printSummaryReport(List<Difference> sortedSummaries, SummaryReportParams params ) {
-        List<Difference> toShow = new ArrayList<Difference>();
-        int count = 0, count1 = 0;
-        for ( Difference diff : sortedSummaries ) {
-            if ( diff.getCount() < params.minSumDiffToShow )
-                // in order, so break as soon as the count is too low
-                break;
-
-            if ( params.maxItemsToDisplay != 0 && count++ > params.maxItemsToDisplay )
-                break;
-
-            if ( diff.getCount() == 1 ) {
-                count1++;
-                if ( params.maxCountOneItems != 0 && count1 > params.maxCountOneItems )
-                    break;
-            }
-
-            toShow.add(diff);
-        }
-
-        // if we want it in descending order, reverse the list
-        if ( ! params.descending ) {
-            Collections.reverse(toShow);
-        }
-
-        // now that we have a specific list of values we want to show, display them
-        GATKReport report = new GATKReport();
-        final String tableName = "differences";
-        report.addTable(tableName, "Summarized differences between the master and test files. See http://www.broadinstitute.org/gatk/guide/article?id=1299 for more information", 3);
-        final GATKReportTable table = report.getTable(tableName);
-        table.addColumn("Difference");
-        table.addColumn("NumberOfOccurrences");
-        table.addColumn("ExampleDifference");
-        for ( final Difference diff : toShow ) {
-            final String key = diff.getPath();
-            table.addRowID(key, true);
-            table.set(key, "NumberOfOccurrences", diff.getCount());
-            table.set(key, "ExampleDifference", diff.valueDiffString());
-        }
-        GATKReport output = new GATKReport(table);
-        output.print(params.out);
-    }
-
-    protected static int longestCommonPostfix(String[] diffPath1, String[] diffPath2) {
-        int i = 0;
-        for ( ; i < diffPath1.length; i++ ) {
-            int j = diffPath1.length - i - 1;
-            if ( ! diffPath1[j].equals(diffPath2[j]) )
-                break;
-        }
-        return i;
-    }
-
-    /**
-     * parts is [A B C D]
-     * commonPostfixLength: how many parts are shared at the end, suppose its 2
-     * We want to create a string *.*.C.D
-     *
-     * @param parts the separated path values [above without .]
-     * @param commonPostfixLength
-     * @return
-     */
-    protected static String summarizedPath(String[] parts, int commonPostfixLength) {
-        int stop = parts.length - commonPostfixLength;
-        if ( stop > 0 ) parts = parts.clone();
-        for ( int i = 0; i < stop; i++ ) {
-            parts[i] = "*";
-        }
-        return Utils.join(".", parts);
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // plugin manager
-    //
-    // --------------------------------------------------------------------------------
-
-    public void loadDiffableReaders() {
-        List<Class<? extends DiffableReader>> drClasses = new PluginManager<DiffableReader>( DiffableReader.class ).getPlugins();
-
-        logger.info("Loading diffable modules:");
-        for (Class<? extends DiffableReader> drClass : drClasses ) {
-            logger.info("\t" + drClass.getSimpleName());
-
-            try {
-                DiffableReader dr = drClass.newInstance();
-                readers.put(dr.getName(), dr);
-            } catch (InstantiationException e) {
-                throw new ReviewedGATKException("Unable to instantiate module '" + drClass.getSimpleName() + "'");
-            } catch (IllegalAccessException e) {
-                throw new ReviewedGATKException("Illegal access error when trying to instantiate '" + drClass.getSimpleName() + "'");
-            }
-        }
-    }
-
-    protected Map<String, DiffableReader> getReaders() {
-        return readers;
-    }
-
-    protected DiffableReader getReader(String name) {
-        return readers.get(name);
-    }
-
-    /**
-     * Returns a reader appropriate for this file, or null if no such reader exists
-     * @param file
-     * @return
-     */
-    public DiffableReader findReaderForFile(File file) {
-        for ( DiffableReader reader : readers.values() )
-            if (reader.canRead(file) )
-                return reader;
-
-        return null;
-    }
-
-    /**
-     * Returns true if reader appropriate for this file, or false if no such reader exists
-     * @param file
-     * @return
-     */
-    public boolean canRead(File file) {
-        return findReaderForFile(file) != null;
-    }
-
-
-    public DiffElement createDiffableFromFile(File file) {
-        return createDiffableFromFile(file, -1);
-    }
-
-    public DiffElement createDiffableFromFile(File file, int maxElementsToRead) {
-        DiffableReader reader = findReaderForFile(file);
-        if ( reader == null )
-            throw new UserException("Unsupported file type: " + file);
-        else
-            return reader.readFromFile(file, maxElementsToRead);
-    }
-
-    public static boolean simpleDiffFiles(File masterFile, File testFile, int maxElementsToRead, DiffEngine.SummaryReportParams params) {
-        DiffEngine diffEngine = new DiffEngine();
-
-        if ( diffEngine.canRead(masterFile) && diffEngine.canRead(testFile) ) {
-            DiffElement master = diffEngine.createDiffableFromFile(masterFile, maxElementsToRead);
-            DiffElement test = diffEngine.createDiffableFromFile(testFile, maxElementsToRead);
-            List<Difference> diffs = diffEngine.diff(master, test);
-            diffEngine.reportSummarizedDifferences(diffs, params);
-            return true;
-        } else {
-            return false;
-        }
-    }
-
-    public static class SummaryReportParams {
-        final PrintStream out;
-        final int maxItemsToDisplay;
-        final int maxCountOneItems;
-        final int minSumDiffToShow;
-        final int maxRawDiffsToSummarize;
-        final boolean doPairwise;
-        boolean descending = true;
-
-        public SummaryReportParams(PrintStream out,
-                                   int maxItemsToDisplay,
-                                   int maxCountOneItems,
-                                   int minSumDiffToShow,
-                                   int maxRawDiffsToSummarize,
-                                   final boolean doPairwise) {
-            this.out = out;
-            this.maxItemsToDisplay = maxItemsToDisplay;
-            this.maxCountOneItems = maxCountOneItems;
-            this.minSumDiffToShow = minSumDiffToShow;
-            this.maxRawDiffsToSummarize = maxRawDiffsToSummarize;
-            this.doPairwise = doPairwise;
-        }
-
-        public void setDescending(boolean descending) {
-            this.descending = descending;
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/diffengine/DiffNode.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/diffengine/DiffNode.java
deleted file mode 100644
index dde9ca5..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/diffengine/DiffNode.java
+++ /dev/null
@@ -1,249 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers.diffengine;
-
-import com.google.java.contract.Requires;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.util.*;
-
-/**
- * Created by IntelliJ IDEA.
- * User: depristo
- * Date: 7/4/11
- * Time: 12:55 PM
- *
- * An interface that must be implemented to allow us to calculate differences
- * between structured objects
- */
-public class DiffNode extends DiffValue {
-    private Map<String, DiffElement> getElementMap() {
-        return (Map<String, DiffElement>)super.getValue();
-    }
-    private static Map<String, DiffElement> emptyElements() { return new HashMap<String, DiffElement>(); }
-
-    private DiffNode(Map<String, DiffElement> elements) {
-        super(elements);
-    }
-
-    private DiffNode(DiffElement binding, Map<String, DiffElement> elements) {
-        super(binding, elements);
-    }
-
-    // ---------------------------------------------------------------------------
-    //
-    // constructors
-    //
-    // ---------------------------------------------------------------------------
-
-    public static DiffNode rooted(String name) {
-        return empty(name, DiffElement.ROOT);
-    }
-
-    public static DiffNode empty(String name, DiffElement parent) {
-        DiffNode df = new DiffNode(emptyElements());
-        DiffElement elt = new DiffElement(name, parent, df);
-        df.setBinding(elt);
-        return df;
-    }
-
-    public static DiffNode empty(String name, DiffValue parent) {
-        return empty(name, parent.getBinding());
-    }
-
-    // ---------------------------------------------------------------------------
-    //
-    // accessors
-    //
-    // ---------------------------------------------------------------------------
-
-    @Override
-    public boolean isAtomic() { return false; }
-
-    public Collection<String> getElementNames() {
-        return getElementMap().keySet();
-    }
-
-    public Collection<DiffElement> getElements() {
-        return getElementMap().values();
-    }
-
-    private Collection<DiffElement> getElements(boolean atomicOnly) {
-        List<DiffElement> elts = new ArrayList<DiffElement>();
-        for ( DiffElement elt : getElements() )
-            if ( (atomicOnly && elt.getValue().isAtomic()) || (! atomicOnly && elt.getValue().isCompound()))
-                elts.add(elt);
-        return elts;
-    }
-
-    public Collection<DiffElement> getAtomicElements() {
-        return getElements(true);
-    }
-
-    public Collection<DiffElement> getCompoundElements() {
-        return getElements(false);
-    }
-
-    /**
-     * Returns the element bound to name, or null if no such binding exists
-     * @param name
-     * @return
-     */
-    public DiffElement getElement(String name) {
-        return getElementMap().get(name);
-    }
-
-    /**
-     * Returns true if name is bound in this node
-     * @param name
-     * @return
-     */
-    public boolean hasElement(String name) {
-        return getElement(name) != null;
-    }
-
-    // ---------------------------------------------------------------------------
-    //
-    // add
-    //
-    // ---------------------------------------------------------------------------
-
-    @Requires("elt != null")
-    public void add(DiffElement elt) {
-        if ( getElementMap().containsKey(elt.getName()) )
-            throw new IllegalArgumentException("Attempting to rebind already existing binding: " + elt + " node=" + this);
-        getElementMap().put(elt.getName(), elt);
-    }
-
-    @Requires("elt != null")
-    public void add(DiffValue elt) {
-        add(elt.getBinding());
-    }
-
-    @Requires("elts != null")
-    public void add(Collection<DiffElement> elts) {
-        for ( DiffElement e : elts )
-            add(e);
-    }
-
-    public void add(String name, Object value) {
-        add(new DiffElement(name, this.getBinding(), new DiffValue(value)));
-    }
-
-    public int size() {
-        int count = 0;
-        for ( DiffElement value : getElements() )
-            count += value.size();
-        return count;
-    }
-
-    // ---------------------------------------------------------------------------
-    //
-    // toString
-    //
-    // ---------------------------------------------------------------------------
-
-    @Override
-    public String toString() {
-        return toString(0);
-    }
-
-    @Override
-    public String toString(int offset) {
-        String off = offset > 0 ? Utils.dupString(' ', offset) : "";
-        StringBuilder b = new StringBuilder();
-
-        b.append("(").append("\n");
-        Collection<DiffElement> atomicElts = getAtomicElements();
-        for ( DiffElement elt : atomicElts ) {
-            b.append(elt.toString(offset + 2)).append('\n');
-        }
-
-        for ( DiffElement elt : getCompoundElements() ) {
-            b.append(elt.toString(offset + 4)).append('\n');
-        }
-        b.append(off).append(")").append("\n");
-
-        return b.toString();
-    }
-
-    @Override
-    public String toOneLineString() {
-        StringBuilder b = new StringBuilder();
-
-        b.append('(');
-        List<String> parts = new ArrayList<String>();
-        for ( DiffElement elt : getElements() )
-            parts.add(elt.toOneLineString());
-        b.append(Utils.join(" ", parts));
-        b.append(')');
-
-        return b.toString();
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // fromString and toOneLineString
-    //
-    // --------------------------------------------------------------------------------
-
-    public static DiffElement fromString(String tree) {
-        return fromString(tree, DiffElement.ROOT);
-    }
-
-    /**
-     * Doesn't support full tree structure parsing
-     * @param tree
-     * @param parent
-     * @return
-     */
-    private static DiffElement fromString(String tree, DiffElement parent) {
-        // X=(A=A B=B C=(D=D))
-        String[] parts = tree.split("=", 2);
-        if ( parts.length != 2 )
-            throw new ReviewedGATKException("Unexpected tree structure: " + tree);
-        String name = parts[0];
-        String value = parts[1];
-
-        if ( value.length() == 0 )
-            throw new ReviewedGATKException("Illegal tree structure: " + value + " at " + tree);
-
-        if ( value.charAt(0) == '(' ) {
-            if ( ! value.endsWith(")") )
-                throw new ReviewedGATKException("Illegal tree structure.  Missing ): " + value + " at " + tree);
-            String subtree = value.substring(1, value.length()-1);
-            DiffNode rec = DiffNode.empty(name, parent);
-            String[] subParts = subtree.split(" ");
-            for ( String subPart : subParts ) {
-                rec.add(fromString(subPart, rec.getBinding()));
-            }
-            return rec.getBinding();
-        } else {
-            return new DiffValue(name, parent, value).getBinding();
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/diffengine/DiffObjects.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/diffengine/DiffObjects.java
deleted file mode 100644
index c622e24..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/diffengine/DiffObjects.java
+++ /dev/null
@@ -1,276 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers.diffengine;
-
-import org.broadinstitute.gatk.utils.commandline.Argument;
-import org.broadinstitute.gatk.utils.commandline.Input;
-import org.broadinstitute.gatk.utils.commandline.Output;
-import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.engine.walkers.RodWalker;
-import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
-import org.broadinstitute.gatk.utils.help.HelpConstants;
-
-import java.io.File;
-import java.io.PrintStream;
-import java.util.List;
-
-/**
- * A generic engine for comparing tree-structured objects
- *
- * <p>
- *      Compares two record-oriented files, itemizing specific difference between equivalent
- *      records in the two files.  Reports both itemized and summarized differences.
- * </p>
- *
- * <h3>What are the summarized differences and the DiffObjectsWalker?</h3>
- *
- * <p>
- *     The GATK contains a summarizing difference engine that compares hierarchical data structures to emit:
- *      <ul>
- *          <li>A list of specific differences between the two data structures.  This is similar to saying the value in field A in record 1 in file F differences from the value in field A in record 1 in file G.
- *          <li>A summarized list of differences ordered by frequency of the difference.  This output is similar to saying field A in 50 records in files F and G differed.
- *      </ul>
- * </p>
- *
- * <p>
- *      The GATK contains a private walker DiffObjects that allows you access to the DiffEngine capabilities on the command line.  Simply provide the walker with the master and test files and it will emit summarized differences for you.
- * </p>
- *
- * <h3>Why?</h3>
- *
- * <p>
- *      The reason for this system is that it allows you to compare two structured files -- such as BAMs and VCFs -- for common differences among them.  This is primarily useful in regression testing or optimization, where you want to ensure that the differences are those that you expect and not any others.
- * </p>
- *
- * <h3>Input</h3>
- * <p>
- *      The DiffObjectsWalker works with BAM or VCF files.
- * </p>
- *
- * <h3>Output</h3>
- * <p>
- *      The DiffEngine system compares to two hierarchical data structures for specific differences in the values of named
- *      nodes.  Suppose I have two trees:
- * <pre>
- *     Tree1=(A=1 B=(C=2 D=3))
- *     Tree2=(A=1 B=(C=3 D=3 E=4))
- *     Tree3=(A=1 B=(C=4 D=3 E=4))
- * </pre>
- * <p>
- *     where every node in the tree is named, or is a raw value (here all leaf values are integers).  The DiffEngine
- *      traverses these data structures by name, identifies equivalent nodes by fully qualified names
- *      (Tree1.A is distinct from Tree2.A, and determines where their values are equal (Tree1.A=1, Tree2.A=1, so they are).
- *      These itemized differences are listed as:
- * <pre>
- *     Tree1.B.C=2 != Tree2.B.C=3
- *     Tree1.B.C=2 != Tree3.B.C=4
- *     Tree2.B.C=3 != Tree3.B.C=4
- *     Tree1.B.E=MISSING != Tree2.B.E=4
- * </pre>
- *
- * <p>
- *      This conceptually very similar to the output of the unix command line tool diff.  What's nice about DiffEngine though
- *      is that it computes similarity among the itemized differences and displays the count of differences names
- *      in the system.  In the above example, the field C is not equal three times, while the missing E in Tree1 occurs
- *      only once.  So the summary is:
- *
- * <pre>
- *     *.B.C : 3
- *     *.B.E : 1
- * </pre>
- *
- * <p>
- *      where the * operator indicates that any named field matches.  This output is sorted by counts, and provides an
- *      immediate picture of the commonly occurring differences among the files.
- * <p>
- *      Below is a detailed example of two VCF fields that differ because of a bug in the AC, AF, and AN counting routines,
- *      detected by the integrationtest integration (more below).  You can see that in the although there are many specific
- *      instances of these differences between the two files, the summarized differences provide an immediate picture that
- *      the AC, AF, and AN fields are the major causes of the differences.
- * <p>
- *
- * <pre>
- [testng] path                                                             count
- [testng] *.*.*.AC                                                         6
- [testng] *.*.*.AF                                                         6
- [testng] *.*.*.AN                                                         6
- [testng] 64b991fd3850f83614518f7d71f0532f.integrationtest.20:10000000.AC  1
- [testng] 64b991fd3850f83614518f7d71f0532f.integrationtest.20:10000000.AF  1
- [testng] 64b991fd3850f83614518f7d71f0532f.integrationtest.20:10000000.AN  1
- [testng] 64b991fd3850f83614518f7d71f0532f.integrationtest.20:10000117.AC  1
- [testng] 64b991fd3850f83614518f7d71f0532f.integrationtest.20:10000117.AF  1
- [testng] 64b991fd3850f83614518f7d71f0532f.integrationtest.20:10000117.AN  1
- [testng] 64b991fd3850f83614518f7d71f0532f.integrationtest.20:10000211.AC  1
- [testng] 64b991fd3850f83614518f7d71f0532f.integrationtest.20:10000211.AF  1
- [testng] 64b991fd3850f83614518f7d71f0532f.integrationtest.20:10000211.AN  1
- [testng] 64b991fd3850f83614518f7d71f0532f.integrationtest.20:10000598.AC  1
- </pre>
- *
- * <h3>Caveat</h3>
- * <p>Because this is a walker, it requires that you pass a reference file. However the reference is not actually used, so it does not matter what you pass as reference.</p>
- *
- *
- * @author Mark DePristo
- * @since 7/4/11
- */
- at DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_QC, extraDocs = {CommandLineGATK.class} )
-public class DiffObjects extends RodWalker<Integer, Integer> {
-    /**
-     * Writes out a file of the DiffEngine format:
-     *
-     *     See http://www.broadinstitute.org/gatk/guide/article?id=1299 for details.
-     */
-    @Output(doc="File to which results should be written")
-    protected PrintStream out;
-
-    /**
-     * The master file against which we will compare test.  This is one of the two required
-     * files to do the comparison.  Conceptually master is the original file contained the expected
-     * results, but this doesn't currently have an impact on the calculations, but might in the future.
-     */
-    @Input(fullName="master", shortName="m", doc="Master file: expected results", required=true)
-    File masterFile;
-
-    /**
-     * The test file against which we will compare to the master.  This is one of the two required
-     * files to do the comparison.  Conceptually test is the derived file from master, but this
-     * doesn't currently have an impact on the calculations, but might in the future.
-     */
-    @Input(fullName="test", shortName="t", doc="Test file: new results to compare to the master file", required=true)
-    File testFile;
-
-    /**
-     * The engine will read at most this number of objects from each of master and test files.  This reduces
-     * the memory requirements for DiffObjects but does limit you to comparing at most this number of objects
-     */
-    @Argument(fullName="maxObjectsToRead", shortName="motr", doc="Max. number of objects to read from the files.  -1 [default] means unlimited", required=false)
-    int MAX_OBJECTS_TO_READ = -1;
-
-    @Argument(fullName="maxRawDiffsToSummarize", shortName="maxRawDiffsToSummarize", doc="Max. number of differences to include in the summary.  -1 [default] means unlimited", required=false)
-    int maxRawDiffsToSummary = -1;
-
-    @Argument(fullName="doPairwise", shortName="doPairwise", doc="If provided, we will compute the minimum pairwise differences to summary, which can be extremely expensive", required=false)
-    boolean doPairwise = false;
-
-    /**
-     * The max number of differences to display when summarizing.  For example, if there are 10M differences, but
-     * maxDiffs is 10, then the comparison aborts after first ten summarized differences are shown.  Note that
-     * the system shows differences sorted by frequency, so these 10 would be the most common between the two files.
-     * A value of 0 means show all possible differences.
-     */
-    @Argument(fullName="maxDiffs", shortName="M", doc="Max. number of diffs to process", required=false)
-    int MAX_DIFFS = 0;
-
-    /**
-     * The maximum number of singleton (occurs exactly once between the two files) to display when writing out
-     * the summary.  Only applies if maxDiffs hasn't been exceeded.  For example, if maxDiffs is 10 and maxCount1Diffs
-     * is 2 and there are 20 diffs with count > 1, then only 10 are shown, all of which have count above 1.
-     */
-    @Argument(fullName="maxCount1Diffs", shortName="M1", doc="Max. number of diffs occuring exactly once in the file to process", required=false)
-    int MAX_COUNT1_DIFFS = 0;
-
-    /**
-     * Only differences that occur more than minCountForDiff are displayed.  For example, if minCountForDiff is 10, then
-     * a difference must occur at least 10 times between the two files to be shown.
-     */
-    @Argument(fullName="minCountForDiff", shortName="MCFD", doc="Min number of observations for a records to display", required=false)
-    int minCountForDiff = 1;
-
-    /**
-     * If provided, the system will write out the summarized, individual differences.  May lead to enormous outputs,
-     * depending on how many differences are found.  Note these are not sorted in any way, so if you have 10M
-     * common differences in the files, you will see 10M records, whereas the final summarize will just list the
-     * difference and its count of 10M.
-     */
-    @Argument(fullName="showItemizedDifferences", shortName="SID", doc="Should we enumerate all differences between the files?", required=false)
-    boolean showItemizedDifferences = false;
-
-    @Argument(fullName="iterations", doc="Number of iterations to perform, should be 1 unless you are doing memory testing", required=false)
-    int iterations = 1;
-
-    DiffEngine diffEngine;
-
-    @Override
-    public void initialize() {
-        this.diffEngine = new DiffEngine();
-    }
-
-    @Override
-    public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
-        return 0;
-    }
-
-    @Override
-    public Integer reduceInit() {
-        return 0;
-    }
-
-    @Override
-    public Integer reduce(Integer counter, Integer sum) {
-        return counter + sum;
-    }
-
-    @Override
-    public void onTraversalDone(Integer sum) {
-        if ( iterations > 1 ) {
-            for ( int i = 0; i < iterations; i++ ) {
-                DiffEngine.SummaryReportParams params = new DiffEngine.SummaryReportParams(out, 20, 10, 0, -1, false);
-                boolean success = DiffEngine.simpleDiffFiles(masterFile, testFile, MAX_OBJECTS_TO_READ, params);
-                logger.info("Iteration " + i + " success " + success);
-            }
-        } else {
-            //out.printf("Reading master file %s%n", masterFile);
-            DiffElement master = diffEngine.createDiffableFromFile(masterFile, MAX_OBJECTS_TO_READ);
-            logger.info(String.format("Read %d objects", master.size()));
-            //out.printf("Reading test file %s%n", testFile);
-            DiffElement test = diffEngine.createDiffableFromFile(testFile, MAX_OBJECTS_TO_READ);
-            logger.info(String.format("Read %d objects", test.size()));
-
-//        out.printf("Master diff objects%n");
-//        out.println(master.toString());
-//        out.printf("Test diff objects%n");
-//        out.println(test.toString());
-
-            List<Difference> diffs = diffEngine.diff(master, test);
-            logger.info(String.format("Done computing diff with %d differences found", diffs.size()));
-            if ( showItemizedDifferences ) {
-                out.printf("Itemized results%n");
-                for ( Difference diff : diffs )
-                    out.printf("DIFF: %s%n", diff.toString());
-            }
-
-            DiffEngine.SummaryReportParams params = new DiffEngine.SummaryReportParams(out,
-                    MAX_DIFFS, MAX_COUNT1_DIFFS, minCountForDiff,
-                    maxRawDiffsToSummary, doPairwise);
-            params.setDescending(false);
-            diffEngine.reportSummarizedDifferences(diffs, params);
-            logger.info(String.format("Done summarizing differences"));
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/diffengine/DiffValue.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/diffengine/DiffValue.java
deleted file mode 100644
index acec383..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/diffengine/DiffValue.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers.diffengine;
-
-/**
- * Created by IntelliJ IDEA.
- * User: depristo
- * Date: 7/4/11
- * Time: 12:55 PM
- *
- * An interface that must be implemented to allow us to calculate differences
- * between structured objects
- */
-public class DiffValue {
-    private DiffElement binding = null;
-    final private Object value;
-
-    public DiffValue(Object value) {
-        this.value = value;
-    }
-
-    public DiffValue(DiffElement binding, Object value) {
-        this.binding = binding;
-        this.value = value;
-    }
-
-    public DiffValue(DiffValue parent, Object value) {
-        this(parent.getBinding(), value);
-    }
-
-    public DiffValue(String name, DiffElement parent, Object value) {
-        this.binding = new DiffElement(name, parent, this);
-        this.value = value;
-    }
-
-    public DiffValue(String name, DiffValue parent, Object value) {
-        this(name, parent.getBinding(), value);
-    }
-
-    public DiffElement getBinding() {
-        return binding;
-    }
-
-    protected void setBinding(DiffElement binding) {
-        this.binding = binding;
-    }
-
-    public Object getValue() {
-        return value;
-    }
-
-    public String toString() {
-        return getValue().toString();
-    }
-
-    public String toString(int offset) {
-        return toString();
-    }
-
-    public String toOneLineString() {
-        return getValue().toString();
-    }
-
-    public boolean isAtomic() { return true; }
-    public boolean isCompound() { return ! isAtomic(); }
-    public int size() { return 1; }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/diffengine/DiffableReader.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/diffengine/DiffableReader.java
deleted file mode 100644
index 903a073..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/diffengine/DiffableReader.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers.diffengine;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-
-import java.io.File;
-
-/**
- * Created by IntelliJ IDEA.
- * User: depristo
- * Date: 7/4/11
- * Time: 1:09 PM
- *
- * Interface for readers creating diffable objects from a file
- */
-public interface DiffableReader {
-    @Ensures("result != null")
-    /**
-     * Return the name of this DiffableReader type.  For example, the VCF reader returns 'VCF' and the
-     * bam reader 'BAM'
-     */
-    public String getName();
-
-    @Ensures("result != null")
-    @Requires("file != null")
-    /**
-     * Read up to maxElementsToRead DiffElements from file, and return them.
-     */
-    public DiffElement readFromFile(File file, int maxElementsToRead);
-
-    /**
-     * Return true if the file can be read into DiffElement objects with this reader. This should
-     * be uniquely true/false for all readers, as the system will use the first reader that can read the
-     * file.  This routine should never throw an exception.  The VCF reader, for example, looks at the
-     * first line of the file for the ##format=VCF4.1 header, and the BAM reader for the BAM_MAGIC value
-     * @param file
-     * @return
-     */
-    @Requires("file != null")
-    public boolean canRead(File file);
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/diffengine/Difference.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/diffengine/Difference.java
deleted file mode 100644
index c8794a7..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/diffengine/Difference.java
+++ /dev/null
@@ -1,137 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers.diffengine;
-
-public class Difference implements Comparable<Difference> {
-    final String path; // X.Y.Z
-    final String[] parts;
-    int count = 1;
-    DiffElement master = null , test = null;
-
-    public Difference(String path) {
-        this.path = path;
-        this.parts = DiffEngine.diffNameToPath(path);
-    }
-
-    public Difference(DiffElement master, DiffElement test) {
-        this(createPath(master, test), master, test);
-    }
-
-    public Difference(String path, DiffElement master, DiffElement test) {
-        this(path);
-        this.master = master;
-        this.test = test;
-    }
-
-    public String[] getParts() {
-        return parts;
-    }
-
-    public void incCount() { count++; }
-
-    public int getCount() {
-        return count;
-    }
-
-    public void setCount(int count) {
-        this.count = count;
-    }
-
-    /**
-     * The fully qualified path object A.B.C etc
-     * @return
-     */
-    public String getPath() {
-        return path;
-    }
-
-    /**
-     * @return the length of the parts of this summary
-     */
-    public int length() {
-        return this.parts.length;
-    }
-
-    /**
-     * Returns true if the string parts matches this summary.  Matches are
-     * must be equal() everywhere where this summary isn't *.
-     * @param otherParts
-     * @return
-     */
-    public boolean matches(String[] otherParts) {
-        if ( otherParts.length != length() )
-            return false;
-
-        // TODO optimization: can start at right most non-star element
-        for ( int i = 0; i < length(); i++ ) {
-            String part = parts[i];
-            if ( ! part.equals("*") && ! part.equals(otherParts[i]) )
-                return false;
-        }
-
-        return true;
-    }
-
-    @Override
-    public String toString() {
-        return String.format("%s:%d:%s", getPath(), getCount(), valueDiffString());
-    }
-
-    @Override
-    public int compareTo(Difference other) {
-        // sort first highest to lowest count, then by lowest to highest path
-        int countCmp = Integer.valueOf(count).compareTo(other.count);
-        return countCmp != 0 ? -1 * countCmp : path.compareTo(other.path);
-    }
-
-    public String valueDiffString() {
-        if ( hasSpecificDifference() ) {
-            return String.format("%s!=%s", getOneLineString(master), getOneLineString(test));
-        } else {
-            return "N/A";
-        }
-    }
-
-    private static String createPath(DiffElement master, DiffElement test) {
-        return (master == null ? test : master).fullyQualifiedName();
-    }
-
-    private static String getOneLineString(DiffElement elt) {
-        return elt == null ? "MISSING" : elt.getValue().toOneLineString();
-    }
-
-    public boolean hasSpecificDifference() {
-        return master != null || test != null;
-    }
-
-    public DiffElement getMaster() {
-        return master;
-    }
-
-    public DiffElement getTest() {
-        return test;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/diffengine/GATKReportDiffableReader.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/diffengine/GATKReportDiffableReader.java
deleted file mode 100644
index 4a78448..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/diffengine/GATKReportDiffableReader.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers.diffengine;
-
-import org.broadinstitute.gatk.engine.report.GATKReport;
-import org.broadinstitute.gatk.engine.report.GATKReportColumn;
-import org.broadinstitute.gatk.engine.report.GATKReportTable;
-
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
-
-
-/**
- * Class implementing diffnode reader for GATKReports
- */
-
-// TODO Version check to be added at the report level
-
-public class GATKReportDiffableReader implements DiffableReader {
-    @Override
-    public String getName() {
-        return "GATKReport";
-    }
-
-    @Override
-    public DiffElement readFromFile(File file, int maxElementsToRead) {
-        DiffNode root = DiffNode.rooted(file.getName());
-        try {
-            // one line reads the whole thing into memory
-            GATKReport report = new GATKReport(file);
-
-            for (GATKReportTable table : report.getTables()) {
-                root.add(tableToNode(table, root));
-            }
-
-            return root.getBinding();
-        } catch (Exception e) {
-            return null;
-        }
-    }
-
-    private DiffNode tableToNode(GATKReportTable table, DiffNode root) {
-        DiffNode tableRoot = DiffNode.empty(table.getTableName(), root);
-
-        tableRoot.add("Description", table.getTableDescription());
-        tableRoot.add("NumberOfRows", table.getNumRows());
-
-        for ( GATKReportColumn column : table.getColumnInfo() ) {
-            DiffNode columnRoot = DiffNode.empty(column.getColumnName(), tableRoot);
-
-            columnRoot.add("Width", column.getColumnFormat().getWidth());
-            // NOTE: as the values are trimmed during parsing left/right alignment is not currently preserved
-            columnRoot.add("Displayable", true);
-
-            for ( int i = 0; i < table.getNumRows(); i++ ) {
-                String name = column.getColumnName() + (i+1);
-                columnRoot.add(name, table.get(i, column.getColumnName()).toString());
-            }
-
-            tableRoot.add(columnRoot);
-        }
-
-        return tableRoot;
-    }
-
-    @Override
-    public boolean canRead(File file) {
-        try {
-            final String HEADER = GATKReport.GATKREPORT_HEADER_PREFIX;
-            final char[] buff = new char[HEADER.length()];
-            final FileReader FR = new FileReader(file);
-            FR.read(buff, 0, HEADER.length());
-            FR.close();
-            String firstLine = new String(buff);
-            return firstLine.startsWith(HEADER);
-        } catch (IOException e) {
-            return false;
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/diffengine/VCFDiffableReader.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/diffengine/VCFDiffableReader.java
deleted file mode 100644
index 23b213e..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/walkers/diffengine/VCFDiffableReader.java
+++ /dev/null
@@ -1,145 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers.diffengine;
-
-import org.apache.log4j.Logger;
-import htsjdk.tribble.AbstractFeatureReader;
-import htsjdk.tribble.FeatureReader;
-import org.broadinstitute.gatk.utils.Utils;
-import htsjdk.variant.vcf.*;
-import htsjdk.variant.variantcontext.Genotype;
-import htsjdk.variant.variantcontext.VariantContext;
-
-import java.io.*;
-import java.util.Iterator;
-import java.util.Map;
-
-
-/**
- * Created by IntelliJ IDEA.
- * User: depristo
- * Date: 7/4/11
- * Time: 1:09 PM
- *
- * Class implementing diffnode reader for VCF
- */
-public class VCFDiffableReader implements DiffableReader {
-    private static Logger logger = Logger.getLogger(VCFDiffableReader.class);
-
-    @Override
-    public String getName() { return "VCF"; }
-
-    @Override
-    public DiffElement readFromFile(File file, int maxElementsToRead) {
-        DiffNode root = DiffNode.rooted(file.getName());
-        try {
-            // read the version line from the file
-            BufferedReader br = new BufferedReader(new FileReader(file));
-            final String version = br.readLine();
-            root.add("VERSION", version);
-            br.close();
-
-            final VCFCodec vcfCodec = new VCFCodec();
-            vcfCodec.disableOnTheFlyModifications(); // must be read as state is stored in reader itself
-
-            FeatureReader<VariantContext> reader = AbstractFeatureReader.getFeatureReader(file.getAbsolutePath(), vcfCodec, false);
-            VCFHeader header = (VCFHeader)reader.getHeader();
-            for ( VCFHeaderLine headerLine : header.getMetaDataInInputOrder() ) {
-                String key = headerLine.getKey();
-                if ( headerLine instanceof VCFIDHeaderLine)
-                    key += "_" + ((VCFIDHeaderLine) headerLine).getID();
-                if ( root.hasElement(key) )
-                    logger.warn("Skipping duplicate header line: file=" + file + " line=" + headerLine.toString());
-                else
-                    root.add(key, headerLine.toString());
-            }
-
-            int count = 0, nRecordsAtPos = 1;
-            String prevName = "";
-            Iterator<VariantContext> it = reader.iterator();
-            while ( it.hasNext() ) {
-                VariantContext vc = it.next();
-                String name = vc.getChr() + ":" + vc.getStart();
-                if ( name.equals(prevName) ) {
-                    name += "_" + ++nRecordsAtPos;
-                } else {
-                    prevName = name;
-                }
-                DiffNode vcRoot = DiffNode.empty(name, root);
-
-                // add fields
-                vcRoot.add("CHROM", vc.getChr());
-                vcRoot.add("POS", vc.getStart());
-                vcRoot.add("ID", vc.getID());
-                vcRoot.add("REF", vc.getReference());
-                vcRoot.add("ALT", vc.getAlternateAlleles());
-                vcRoot.add("QUAL", vc.hasLog10PError() ? vc.getLog10PError() * -10 : VCFConstants.MISSING_VALUE_v4);
-                vcRoot.add("FILTER", ! vc.filtersWereApplied() // needs null to differentiate between PASS and .
-                        ? VCFConstants.MISSING_VALUE_v4
-                        : ( vc.getFilters().isEmpty() ? VCFConstants.PASSES_FILTERS_v4 : vc.getFilters()) );
-
-                // add info fields
-                for (Map.Entry<String, Object> attribute : vc.getAttributes().entrySet()) {
-                    if ( ! attribute.getKey().startsWith("_") )
-                        vcRoot.add(attribute.getKey(), attribute.getValue());
-                }
-
-                for (Genotype g : vc.getGenotypes() ) {
-                    DiffNode gRoot = DiffNode.empty(g.getSampleName(), vcRoot);
-                    gRoot.add("GT", g.getGenotypeString());
-                    if ( g.hasGQ() ) gRoot.add("GQ", g.getGQ() );
-                    if ( g.hasDP() ) gRoot.add("DP", g.getDP() );
-                    if ( g.hasAD() ) gRoot.add("AD", Utils.join(",", g.getAD()));
-                    if ( g.hasPL() ) gRoot.add("PL", Utils.join(",", g.getPL()));
-                    if ( g.getFilters() != null ) gRoot.add("FT", g.getFilters());
-
-                    for (Map.Entry<String, Object> attribute : g.getExtendedAttributes().entrySet()) {
-                        if ( ! attribute.getKey().startsWith("_") )
-                            gRoot.add(attribute.getKey(), attribute.getValue());
-                    }
-
-                    vcRoot.add(gRoot);
-                }
-
-                root.add(vcRoot);
-                count += vcRoot.size();
-                if ( count > maxElementsToRead && maxElementsToRead != -1)
-                    break;
-            }
-
-            reader.close();
-        } catch ( IOException e ) {
-            return null;
-        }
-
-        return root.getBinding();
-    }
-
-    @Override
-    public boolean canRead(File file) {
-        return AbstractVCFCodec.canDecodeFile(file.getPath(), VCFCodec.VCF4_MAGIC_HEADER);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/CatVariants.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/CatVariants.java
index 43403ab..f11e01d 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/CatVariants.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/CatVariants.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,6 +25,7 @@
 
 package org.broadinstitute.gatk.tools;
 
+import htsjdk.samtools.SAMSequenceDictionary;
 import htsjdk.samtools.reference.ReferenceSequenceFile;
 import htsjdk.samtools.reference.ReferenceSequenceFileFactory;
 import org.apache.log4j.BasicConfigurator;
@@ -40,13 +41,14 @@ import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
 import org.broadinstitute.gatk.utils.help.HelpConstants;
 import org.broadinstitute.gatk.utils.text.XReadLines;
 import org.broadinstitute.gatk.utils.variant.GATKVCFIndexType;
-import org.broadinstitute.gatk.utils.variant.GATKVCFUtils;
+import org.broadinstitute.gatk.engine.GATKVCFUtils;
 import htsjdk.variant.bcf2.BCF2Codec;
 import org.broadinstitute.gatk.utils.collections.Pair;
 import htsjdk.variant.vcf.VCFCodec;
 import htsjdk.variant.vcf.VCFHeader;
 import org.broadinstitute.gatk.utils.exceptions.UserException;
 import htsjdk.variant.variantcontext.VariantContext;
+import htsjdk.variant.variantcontext.VariantContextComparator;
 import htsjdk.variant.variantcontext.writer.Options;
 import htsjdk.variant.variantcontext.writer.VariantContextWriter;
 import htsjdk.variant.variantcontext.writer.VariantContextWriterFactory;
@@ -56,24 +58,23 @@ import java.util.*;
 
 /**
  *
- * Concatenates VCF files of non-overlapped genome intervals, all with the same set of samples
+ * Concatenate VCF files of non-overlapping genome intervals, all with the same set of samples
  *
  * <p>
  * The main purpose of this tool is to speed up the gather function when using scatter-gather parallelization.
  * This tool concatenates the scattered output VCF files. It assumes that:
- * - All the input VCFs (or BCFs) contain the same samples in the same order.
- * - The variants in each input file are from non-overlapping (scattered) intervals.
- *
- * When the input files are already sorted based on the intervals start positions, use -assumeSorted.
- *
- * Note: Currently the tool is more efficient when working with VCFs; we will work to make it as efficient for BCFs.
- *
+ * <ul>
+ *     <li>All the input VCFs (or BCFs) contain the same samples in the same order.</li>
+ *     <li>The variants in each input file are from non-overlapping (scattered) intervals.</li>
+ * </ul>
  * </p>
+ * <p>When the input files are already sorted based on the intervals start positions, use -assumeSorted.</p>
  *
  * <h3>Input</h3>
  * <p>
- * One or more variant sets to combine. They should be of non-overlapping genome intervals and with the same samples (in the same order).
- * If the files are ordered according to the appearance of intervals in the ref genome, then one can use the -assumeSorted flag.
+ * Two or more variant sets to combine. They should be of non-overlapping genome intervals and with the same
+ * samples (sorted in the same order). If the files are ordered according to the appearance of intervals in the ref
+ * genome, then one can use the -assumeSorted flag.
  * </p>
  *
  * <h3>Output</h3>
@@ -86,16 +87,19 @@ import java.util.*;
  * invoke it is a little different from other GATK tools (see example below), and it does not accept any of the
  * classic "CommandLineGATK" arguments.</p>
  *
- * <h3>Example</h3>
+ * <h3>Usage example</h3>
  * <pre>
  * java -cp GenomeAnalysisTK.jar org.broadinstitute.gatk.tools.CatVariants \
- *    -R ref.fasta \
+ *    -R reference.fasta \
  *    -V input1.vcf \
  *    -V input2.vcf \
  *    -out output.vcf \
  *    -assumeSorted
  * </pre>
  *
+ * <h3>Caveat</h3>
+ * <p>Currently the tool is more efficient when working with VCFs than with BCFs.</p>
+ *
  * @author Ami Levy Moonshine
  * @since Jan 2012
  */
@@ -147,37 +151,30 @@ public class CatVariants extends CommandLineProgram {
         INVALID
     }
 
-    private FileType fileExtensionCheck(File inFile, File outFile) {
+    private FileType fileExtensionCheck(File inFile, FileType previousFileType) {
         final String inFileName = inFile.toString().toLowerCase();
-        final String outFileName = outFile.toString().toLowerCase();
-
-        FileType inFileType = FileType.INVALID;
 
         if (inFileName.endsWith(".vcf")) {
-            inFileType = FileType.VCF;
-            if (outFileName.endsWith(".vcf"))
-                return inFileType;
+            if (previousFileType == FileType.VCF || previousFileType == null) {
+                return FileType.VCF;
+            }
         }
 
         if (inFileName.endsWith(".bcf")) {
-            inFileType = FileType.BCF;
-            if (outFileName.endsWith(".bcf"))
-                return inFileType;
+            if (previousFileType == FileType.BCF || previousFileType == null) {
+                return FileType.BCF;
+            }
         }
 
         for (String extension : AbstractFeatureReader.BLOCK_COMPRESSED_EXTENSIONS) {
             if (inFileName.endsWith(".vcf" + extension)) {
-                inFileType = FileType.BLOCK_COMPRESSED_VCF;
-                if (outFileName.endsWith(".vcf" + extension))
-                    return inFileType;
+                if (previousFileType == FileType.BLOCK_COMPRESSED_VCF || previousFileType == null) {
+                    return FileType.BLOCK_COMPRESSED_VCF;
+                }
             }
         }
 
-        if (inFileType == FileType.INVALID)
-            System.err.println(String.format("File extension for input file %s is not valid for CatVariants", inFile));
-        else
-            System.err.println(String.format("File extension mismatch between input %s and output %s", inFile, outFile));
-
+        System.err.println(String.format("File extension for input file %s is not valid for CatVariants", inFile));
         printUsage();
         return FileType.INVALID;
     }
@@ -233,23 +230,23 @@ public class CatVariants extends CommandLineProgram {
 
         variant = parseVariantList(variant);
 
-        Comparator<Pair<Integer,File>> positionComparator = new PositionComparator();
+        Comparator<Pair<VariantContext,File>> positionComparator = new PositionComparator(ref.getSequenceDictionary());
 
-        Queue<Pair<Integer,File>> priorityQueue;
+        Queue<Pair<VariantContext,File>> priorityQueue;
         if (assumeSorted)
             priorityQueue = new LinkedList<>();
         else
             priorityQueue = new PriorityQueue<>(10000, positionComparator);
 
-        FileType fileType = FileType.INVALID;
+        FileType fileType = null;
         for (File file : variant) {
             // if it returns a valid type, it will be the same for all files
-            fileType = fileExtensionCheck(file, outputFile);
+            fileType = fileExtensionCheck(file, fileType);
             if (fileType == FileType.INVALID)
                 return 1;
 
             if (assumeSorted){
-                priorityQueue.add(new Pair<>(0,file));
+                priorityQueue.add(new Pair<VariantContext,File>(null,file));
             }
             else{
                 if (!file.exists()) {
@@ -262,16 +259,14 @@ public class CatVariants extends CommandLineProgram {
                     continue;
                 }
                 VariantContext vc = it.next();
-                int firstPosition = vc.getStart();
                 reader.close();
-                priorityQueue.add(new Pair<>(firstPosition,file));
+                priorityQueue.add(new Pair<>(vc,file));
             }
-
         }
 
         FileOutputStream outputStream = new FileOutputStream(outputFile);
         EnumSet<Options> options = EnumSet.of(Options.INDEX_ON_THE_FLY);
-        final IndexCreator idxCreator = GATKVCFUtils.getIndexCreator(variant_index_type, variant_index_parameter, outputFile, ref.getSequenceDictionary());
+        IndexCreator idxCreator = GATKVCFUtils.makeIndexCreator(variant_index_type, variant_index_parameter, outputFile, ref.getSequenceDictionary());
         final VariantContextWriter outputWriter = VariantContextWriterFactory.create(outputFile, outputStream, ref.getSequenceDictionary(), idxCreator, options);
 
         boolean firstFile = true;
@@ -324,15 +319,19 @@ public class CatVariants extends CommandLineProgram {
         }
     }
 
-    private static class PositionComparator implements Comparator<Pair<Integer,File>> {
+    private static class PositionComparator implements Comparator<Pair<VariantContext,File>> {
+    	
+    	VariantContextComparator comp;
+    	
+    	public PositionComparator(final SAMSequenceDictionary dict){
+    		comp = new VariantContextComparator(dict);
+    	}
 
         @Override
-        public int compare(Pair<Integer,File> p1, Pair<Integer,File> p2) {
-            int startPositionP1 = p1.getFirst();
-            int startPositionP2 = p2.getFirst();
-            if (startPositionP1  == startPositionP2)
-                return 0;
-            return startPositionP1 < startPositionP2 ? -1 : 1 ;
+        public int compare(final Pair<VariantContext,File> p1, final Pair<VariantContext,File> p2) {
+            final VariantContext startPositionP1 = p1.getFirst();
+            final VariantContext startPositionP2 = p2.getFirst();
+            return comp.compare(startPositionP1, startPositionP2);
         }
     }
 }
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/ListAnnotations.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/ListAnnotations.java
deleted file mode 100644
index 72d30de..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/ListAnnotations.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.tools;
-
-import org.broadinstitute.gatk.utils.commandline.CommandLineProgram;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
-import org.broadinstitute.gatk.utils.help.HelpConstants;
-import org.broadinstitute.gatk.utils.help.HelpUtils;
-
-/**
- * Utility program to print a list of available annotations
- *
- * <p>This is a very simple utility tool that retrieves available annotations for use with tools such as
- * UnifiedGenotyper, HaplotypeCaller and VariantAnnotator.</p>
- *
- * <h3>Important note</h3>
- * <p>This is a command-line utility that bypasses the GATK engine. As a result, the command-line you must use to
- * invoke it is a little different from other GATK tools (see usage below), and it does not accept any of the
- * classic "CommandLineGATK" arguments.</p>
- *
- * <h3>Usage</h3>
- * <pre>java -cp GenomeAnalysisTK.jar org.broadinstitute.gatk.tools.ListAnnotations</pre>
- *
- * @author vdauwera
- * @since 3/14/13
- */
- at DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_HELPUTILS )
-public class ListAnnotations extends CommandLineProgram {
-
-    /*
-     * Print usage information
-     *
-     * TODO: would be more convenient if we could just call the program by name instead of the full classpath
-     */
-    private static void printUsage() {
-        System.err.println("Usage: java -cp dist/GenomeAnalysisTK.jar org.broadinstitute.gatk.tools.ListAnnotations");
-        System.err.println("    Prints a list of available annotations and exits.");
-    }
-
-    // TODO: override CommandLineProgram bit that offers version, logging etc arguments. We don't need that stuff here and it makes the doc confusing.
-
-    @Override
-    protected int execute() throws Exception {
-
-        HelpUtils.listAnnotations();
-        return 0;
-    }
-
-    public static void main(String[] args){
-        try {
-            ListAnnotations instance = new ListAnnotations();
-            start(instance, args);
-            System.exit(CommandLineProgram.result);
-        } catch ( UserException e ) {
-            printUsage();
-            exitSystemWithUserError(e);
-        } catch ( Exception e ) {
-            exitSystemWithError(e);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/AlleleBalance.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/AlleleBalance.java
index 9127b5e..ccb4be7 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/AlleleBalance.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/AlleleBalance.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -30,16 +30,15 @@ import htsjdk.variant.variantcontext.Allele;
 import htsjdk.variant.variantcontext.Genotype;
 import htsjdk.variant.variantcontext.GenotypesContext;
 import htsjdk.variant.variantcontext.VariantContext;
-import htsjdk.variant.vcf.VCFHeaderLineType;
-import htsjdk.variant.vcf.VCFInfoHeaderLine;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.AnnotatorCompatible;
 import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.InfoFieldAnnotation;
 import org.broadinstitute.gatk.utils.MathUtils;
 import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap;
 import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
+import org.broadinstitute.gatk.utils.variant.GATKVCFConstants;
 
 import java.util.Arrays;
 import java.util.HashMap;
@@ -144,15 +143,15 @@ public class AlleleBalance extends InfoFieldAnnotation {
 
         Map<String, Object> map = new HashMap<>();
         if ( weightHet > 0.0 ) {
-            map.put("ABHet",ratioHet/weightHet);
+            map.put(GATKVCFConstants.ALLELE_BALANCE_HET_KEY,ratioHet/weightHet);
         }
 
         if ( weightHom > 0.0 ) {
-            map.put("ABHom",ratioHom/weightHom);
+            map.put(GATKVCFConstants.ALLELE_BALANCE_HOM_KEY,ratioHom/weightHom);
         }
 
         if ( overallNonDiploid > 0.0 ) {
-            map.put("OND",overallNonDiploid);
+            map.put(GATKVCFConstants.NON_DIPLOID_RATIO_KEY,overallNonDiploid);
         }
         return map;
     }
@@ -210,9 +209,10 @@ public class AlleleBalance extends InfoFieldAnnotation {
 
     }
 
-    public List<String> getKeyNames() { return Arrays.asList("ABHet","ABHom","OND"); }
-
-    public List<VCFInfoHeaderLine> getDescriptions() { return Arrays.asList(new VCFInfoHeaderLine("ABHet", 1, VCFHeaderLineType.Float, "Allele Balance for heterozygous calls (ref/(ref+alt))"),
-            new VCFInfoHeaderLine("ABHom", 1, VCFHeaderLineType.Float, "Allele Balance for homozygous calls (A/(A+O)) where A is the allele (ref or alt) and O is anything other"),
-            new VCFInfoHeaderLine("OND", 1, VCFHeaderLineType.Float, "Overall non-diploid ratio (alleles/(alleles+non-alleles))")); }
+    @Override
+    public List<String> getKeyNames() {
+        return Arrays.asList(GATKVCFConstants.ALLELE_BALANCE_HET_KEY,
+                             GATKVCFConstants.ALLELE_BALANCE_HOM_KEY,
+                             GATKVCFConstants.NON_DIPLOID_RATIO_KEY);
+    }
 }
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/AlleleBalanceBySample.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/AlleleBalanceBySample.java
index 9f5ee9c..fb73008 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/AlleleBalanceBySample.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/AlleleBalanceBySample.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -32,9 +32,9 @@ import htsjdk.variant.variantcontext.GenotypeBuilder;
 import htsjdk.variant.variantcontext.VariantContext;
 import htsjdk.variant.vcf.VCFFormatHeaderLine;
 import htsjdk.variant.vcf.VCFHeaderLineType;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.AnnotatorCompatible;
 import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.ExperimentalAnnotation;
 import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.GenotypeAnnotation;
@@ -43,6 +43,8 @@ import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap;
 import org.broadinstitute.gatk.utils.pileup.PileupElement;
 import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
 import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.broadinstitute.gatk.utils.variant.GATKVCFConstants;
+import org.broadinstitute.gatk.utils.variant.GATKVCFHeaderLines;
 
 import java.util.Arrays;
 import java.util.HashMap;
@@ -63,7 +65,7 @@ import java.util.Set;
  * <ul>
  *     <li>This annotation will only work properly for biallelic heterozygous calls.</li>
  *     <li>This annotation cannot currently be calculated for indels.</li>
- *     <li>tThe reasoning underlying this annotation only applies to germline variants in DNA sequencing data. In somatic/cancer analysis, divergent ratios are expected due to tumor heterogeneity. In RNAseq analysis, divergent ratios may indicate differential allele expression.</li>
+ *     <li>The reasoning underlying this annotation only applies to germline variants in DNA sequencing data. In somatic/cancer analysis, divergent ratios are expected due to tumor heterogeneity. In RNAseq analysis, divergent ratios may indicate differential allele expression.</li>
  *     <li>As stated above, this annotation is experimental and should be interpreted with caution as we cannot guarantee that it is appropriate. Basically, use it at your own risk.</li>
  * </ul>
  * <h3>Related annotations</h3>
@@ -92,7 +94,7 @@ public class AlleleBalanceBySample extends GenotypeAnnotation implements Experim
         // and isBiallelic() while ignoring the <NON_REF> allele
         boolean biallelicSNP = vc.isSNP() && vc.isBiallelic();
 
-        if(vc.hasAllele(GVCF_NONREF)){
+        if(vc.hasAllele(GATKVCFConstants.NON_REF_SYMBOLIC_ALLELE)){
             // If we have the GVCF <NON_REF> allele, then the SNP is biallelic
             // iff there are 3 alleles and both the reference and first alt
             // allele are length 1.
@@ -118,8 +120,6 @@ public class AlleleBalanceBySample extends GenotypeAnnotation implements Experim
         gb.attribute(getKeyNames().get(0), Double.valueOf(String.format("%.2f", ratio)));
     }
 
-    private static final Allele GVCF_NONREF = Allele.create("<NON_REF>", false);
-
     private Double annotateWithPileup(final AlignmentContext stratifiedContext, final VariantContext vc) {
 
         final HashMap<Byte, Integer> alleleCounts = new HashMap<>();
@@ -175,7 +175,7 @@ public class AlleleBalanceBySample extends GenotypeAnnotation implements Experim
 
     }
 
-    public List<String> getKeyNames() { return Arrays.asList("AB"); }
+    public List<String> getKeyNames() { return Arrays.asList(GATKVCFConstants.ALLELE_BALANCE_KEY); }
 
-    public List<VCFFormatHeaderLine> getDescriptions() { return Arrays.asList(new VCFFormatHeaderLine(getKeyNames().get(0), 1, VCFHeaderLineType.Float, "Allele balance for each het genotype")); }
+    public List<VCFFormatHeaderLine> getDescriptions() { return Arrays.asList(GATKVCFHeaderLines.getFormatLine(getKeyNames().get(0))); }
 }
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/AlleleSpecificAnnotationData.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/AlleleSpecificAnnotationData.java
new file mode 100644
index 0000000..df39442
--- /dev/null
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/AlleleSpecificAnnotationData.java
@@ -0,0 +1,96 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.tools.walkers.annotator;
+
+import htsjdk.variant.variantcontext.Allele;
+import org.broadinstitute.gatk.utils.exceptions.GATKException;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * A class to encapsulate the raw data for allele-specific classes compatible with the ReducibleAnnotation interface
+ * @param <T> the type of raw data to be stored for later annotation calculation
+ */
+public class AlleleSpecificAnnotationData<T> extends ReducibleAnnotationData<T>{
+    final private List<Allele> alleleList;
+    private Allele refAllele;
+
+    public AlleleSpecificAnnotationData(final List<Allele> inputAlleles, final String inputData) {
+        super(inputData);
+        attributeMap = new HashMap<>();
+        for(final Allele a : inputAlleles) {
+            attributeMap.put(a, null);
+        }
+        alleleList = inputAlleles;
+        for(Allele a : alleleList) {
+            if(a.isReference()) {
+                refAllele = a;
+            }
+        }
+    }
+
+    @Override
+    public List<Allele> getAlleles() {return Collections.unmodifiableList(alleleList);}
+
+    /**
+     * Get the reference allele for this allele-specific data.
+     * (Used in cases where annotations compare some attribute of the alt alleles to that of the reference.)
+     * @return  the reference allele for this data
+     */
+    public Allele getRefAllele() {return refAllele;}
+
+    public void setAttributeMap(Map<Allele, T> inputMap) {
+        super.setAttributeMap(inputMap);
+        checkRefAlleles();
+    }
+
+    private void checkRefAlleles() {
+        boolean foundRef = false;
+        for (Allele a : alleleList) {
+            if (a.isReference()) {
+                if (foundRef)
+                    throw new GATKException("ERROR: multiple reference alleles found in annotation data\n");
+                foundRef = true;
+            }
+        }
+        if (!foundRef)
+            throw new GATKException("ERROR: no reference alleles found in annotation data\n");
+    }
+
+    public String makeRawAnnotationString(String printDelim) {
+        String annotationString = "";
+        for (final Allele current : alleleList) {
+            if (!annotationString.isEmpty())
+                annotationString += printDelim;
+            if(attributeMap.get(current) != null)
+                annotationString += attributeMap.get(current).toString();
+        }
+        return annotationString.replaceAll("[\\[\\]\\s]", "");
+    }
+}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/BaseCounts.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/BaseCounts.java
index 44579f9..66ea8df 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/BaseCounts.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/BaseCounts.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,16 +25,17 @@
 
 package org.broadinstitute.gatk.tools.walkers.annotator;
 
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.AnnotatorCompatible;
 import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.InfoFieldAnnotation;
 import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap;
 import org.broadinstitute.gatk.utils.BaseUtils;
-import htsjdk.variant.vcf.VCFHeaderLineType;
 import htsjdk.variant.vcf.VCFInfoHeaderLine;
 import htsjdk.variant.variantcontext.VariantContext;
+import org.broadinstitute.gatk.utils.variant.GATKVCFConstants;
+import org.broadinstitute.gatk.utils.variant.GATKVCFHeaderLines;
 
 import java.util.Arrays;
 import java.util.HashMap;
@@ -80,12 +81,12 @@ import java.util.Map;
                     counts[index]++;
             }
         }
-        Map<String, Object> map = new HashMap<String, Object>();
+        Map<String, Object> map = new HashMap<>();
         map.put(getKeyNames().get(0), counts);
         return map;
     }
 
-    public List<String> getKeyNames() { return Arrays.asList("BaseCounts"); }
+    public List<String> getKeyNames() { return Arrays.asList(GATKVCFConstants.BASE_COUNTS_KEY); }
 
-    public List<VCFInfoHeaderLine> getDescriptions() { return Arrays.asList(new VCFInfoHeaderLine("BaseCounts", 4, VCFHeaderLineType.Integer, "Counts of each base")); }
+    public List<VCFInfoHeaderLine> getDescriptions() { return Arrays.asList(GATKVCFHeaderLines.getInfoLine(getKeyNames().get(0))); }
 }
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/ChromosomeCountConstants.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/ChromosomeCountConstants.java
deleted file mode 100644
index 67fc0a4..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/ChromosomeCountConstants.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.tools.walkers.annotator;
-
-import htsjdk.variant.vcf.VCFConstants;
-import htsjdk.variant.vcf.VCFInfoHeaderLine;
-import htsjdk.variant.vcf.VCFStandardHeaderLines;
-
-
-/**
- * Keys and descriptions for the common chromosome count annotations
- */
-public class ChromosomeCountConstants {
-
-    public static final String[] keyNames = { VCFConstants.ALLELE_NUMBER_KEY, VCFConstants.ALLELE_COUNT_KEY, VCFConstants.ALLELE_FREQUENCY_KEY };
-
-    public static final VCFInfoHeaderLine[] descriptions = {
-            VCFStandardHeaderLines.getInfoLine(VCFConstants.ALLELE_FREQUENCY_KEY),
-            VCFStandardHeaderLines.getInfoLine(VCFConstants.ALLELE_COUNT_KEY),
-            VCFStandardHeaderLines.getInfoLine(VCFConstants.ALLELE_NUMBER_KEY) };
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/CompressedDataList.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/CompressedDataList.java
new file mode 100644
index 0000000..a89859f
--- /dev/null
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/CompressedDataList.java
@@ -0,0 +1,117 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.tools.walkers.annotator;
+
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+
+/**
+ * A class to represent data as a list of <value,count> pairs.  For example, the list 2,2,2,2,2,2,3,4,4,4,5,5
+ * would be compressed as 2,6,3,1,4,3,5,2. The compressed list should be sorted in ascending order by value.
+ *
+ * Created by gauthier on 9/25/15.
+ */
+public class CompressedDataList<T>  implements Iterable<T> {
+    protected Map<T,Integer> valueCounts = new HashMap<>();
+
+    public Map<T,Integer> getValueCounts(){
+        return valueCounts;
+    }
+
+    public boolean isEmpty(){
+        return valueCounts.isEmpty();
+    }
+
+    @Override
+    public Iterator<T> iterator(){
+        Iterator<T> it = new Iterator<T>() {
+            private Iterator<T> keySetIterator = valueCounts.keySet().iterator();
+            private T currentKey = valueCounts.isEmpty() ? null : keySetIterator.next();
+            private int currentValueIndex = 0;
+            private int currentValueSize = valueCounts.isEmpty() ? 0 : valueCounts.get(currentKey);
+
+            @Override
+            public boolean hasNext() {
+                return !valueCounts.isEmpty() && (keySetIterator.hasNext() || currentValueIndex < currentValueSize);
+            }
+
+            @Override
+            public T next() {
+                T retKey = currentKey;
+                currentValueIndex++;
+                if(currentValueIndex==currentValueSize){
+                    if(keySetIterator.hasNext()) {
+                        currentKey = keySetIterator.next();
+                        currentValueIndex = 0;
+                        currentValueSize = valueCounts.get(currentKey);
+                    }
+                }
+                return retKey;
+            }
+
+            @Override
+            public void remove() {
+                throw new UnsupportedOperationException();
+            }
+        };
+        return it;
+    }
+
+    @Override
+    public String toString(){
+        String str = "";
+        Object[] keys = valueCounts.keySet().toArray();
+        Arrays.sort(keys);
+        for (Object i: keys){
+            if(!str.isEmpty())
+                str+=",";
+            str+=(i+","+valueCounts.get(i));
+        }
+        return str;
+    }
+
+    public void add(final T val){
+        add(val, 1);
+    }
+
+    public void add(final T val, final int count){
+        if(valueCounts.containsKey(val)){
+            valueCounts.put(val, valueCounts.get(val)+count);
+        }
+        else
+            valueCounts.put(val, count);
+
+    }
+
+    public void add(final CompressedDataList<T> obj){
+        for(Map.Entry<T, Integer> pair : obj.getValueCounts().entrySet()){
+            this.add(pair.getKey(),pair.getValue());
+        }
+    }
+
+}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/FractionInformativeReads.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/FractionInformativeReads.java
new file mode 100644
index 0000000..747e0fc
--- /dev/null
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/FractionInformativeReads.java
@@ -0,0 +1,115 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.tools.walkers.annotator;
+
+import htsjdk.variant.variantcontext.Allele;
+import htsjdk.variant.variantcontext.Genotype;
+import htsjdk.variant.variantcontext.VariantContext;
+import htsjdk.variant.vcf.VCFConstants;
+import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.AnnotatorCompatible;
+import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.InfoFieldAnnotation;
+import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.ReducibleAnnotation;
+import org.broadinstitute.gatk.utils.MathUtils;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.variant.GATKVCFConstants;
+
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * The fraction of reads that were deemed "informative" by the HaplotypeCaller over the entire cohort
+ *
+ * <p>The FractionInformativeReads annotation produces a single fraction for each site: sum(AD)/sum(DP). The sum in the numerator
+ * is over all the samples in the cohort and all the alleles in each sample. The sum in the denominator is over all the samples.
+ *
+ *
+ * <h3>Caveats</h3>
+ * <ul>
+ *     <li>This annotation is generated by HaplotypeCaller or GenotypeGVCFs (it will not work when called from VariantAnnotator).</li>
+ * </ul>
+ *
+ * <h3>Related annotations</h3>
+ * <ul>
+ *     <li><b><a href="https://www.broadinstitute.org/gatk/guide/tooldocs/org_broadinstitute_gatk_tools_walkers_annotator_DepthPerAlleleBySample.php">DepthPerAlleleBySample</a></b> displays the number of reads supporting each allele, without calculating the fraction.</li>
+ * </ul>
+ */
+
+public class FractionInformativeReads extends InfoFieldAnnotation implements ReducibleAnnotation {
+    @Override
+    public String getRawKeyName() {
+        return null;
+    }
+
+    @Override
+    public List<String> getKeyNames() {
+        return Collections.singletonList(GATKVCFConstants.FRACTION_INFORMATIVE_READS_KEY);
+    }
+
+    @Override
+    public Map<String, Object> annotateRawData(RefMetaDataTracker tracker, AnnotatorCompatible walker, ReferenceContext ref, Map<String, AlignmentContext> stratifiedContexts, VariantContext vc, Map<String, PerReadAlleleLikelihoodMap> stratifiedPerReadAlleleLikelihoodMap) {
+        return null;
+    }
+
+    @Override
+    public Map<String, Object> combineRawData(List<Allele> allelesList, List<? extends ReducibleAnnotationData> listOfRawData) {
+        return null;
+    }
+
+    @Override
+    public Map<String, Object> finalizeRawData(VariantContext vc, VariantContext originalVC) {
+
+        int totalAD = 0;
+        for (final Genotype gt : vc.getGenotypes()){
+            if(gt != null) {
+                if(gt.hasAD()) {
+                    totalAD += MathUtils.sum(gt.getAD());
+                    continue;
+                }
+                // this is needed since the finalizing of HOM_REF genotypes comes after the finalizing of annotations. so the AD field is null at this point.
+                // TODO: this will become unneeded if the above statement is false in which case it can be safely removed.
+                if(gt.hasExtendedAttribute(GATKVCFConstants.MIN_DP_FORMAT_KEY)) {
+                    totalAD += Integer.parseInt((String) gt.getExtendedAttribute(GATKVCFConstants.MIN_DP_FORMAT_KEY));
+                }
+            }
+        }
+        final int depth =  vc.getAttributeAsInt(VCFConstants.DEPTH_KEY, 0);
+        return Collections.singletonMap(GATKVCFConstants.FRACTION_INFORMATIVE_READS_KEY, (Object) (depth != 0 ? totalAD / (double) depth : 0));
+    }
+
+    @Override
+    public void calculateRawData(VariantContext vc, Map<String, PerReadAlleleLikelihoodMap> pralm, ReducibleAnnotationData rawAnnotations) {
+
+    }
+
+    @Override
+    public Map<String, Object> annotate(RefMetaDataTracker tracker, AnnotatorCompatible walker, ReferenceContext ref, Map<String, AlignmentContext> stratifiedContexts, VariantContext vc, Map<String, PerReadAlleleLikelihoodMap> stratifiedPerReadAlleleLikelihoodMap) {
+        return null;
+    }
+}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/LowMQ.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/LowMQ.java
index 05054a3..34c5868 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/LowMQ.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/LowMQ.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,16 +25,17 @@
 
 package org.broadinstitute.gatk.tools.walkers.annotator;
 
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.AnnotatorCompatible;
 import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.InfoFieldAnnotation;
 import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap;
-import htsjdk.variant.vcf.VCFHeaderLineType;
 import htsjdk.variant.vcf.VCFInfoHeaderLine;
 import org.broadinstitute.gatk.utils.pileup.PileupElement;
 import htsjdk.variant.variantcontext.VariantContext;
+import org.broadinstitute.gatk.utils.variant.GATKVCFConstants;
+import org.broadinstitute.gatk.utils.variant.GATKVCFHeaderLines;
 
 import java.util.Arrays;
 import java.util.HashMap;
@@ -48,8 +49,7 @@ import java.util.Map;
  * <p>This annotation tells you what fraction of reads have a mapping quality of less than the given threshold of 10 (including 0). Note that certain tools may impose a different minimum mapping quality threshold. For example, HaplotypeCaller excludes reads with MAPQ<20.</p>
  *
  * <h3>Calculation</h3>
- * <p> $$ LowMQ = \frac{# reads with MAPQ=0 + # reads with MAPQ<10}{total # reads} $$
- * </p>
+ * $$ LowMQ = \frac{# reads with MAPQ=0 + # reads with MAPQ<10}{total # reads} $$
  *
  * <h3>Related annotations</h3>
  * <ul>
@@ -80,12 +80,12 @@ public class LowMQ extends InfoFieldAnnotation {
 				total += 1; 
             }
         }
-        Map<String, Object> map = new HashMap<String, Object>();
+        Map<String, Object> map = new HashMap<>();
         map.put(getKeyNames().get(0), String.format("%.04f,%.04f,%.00f", mq0/total, mq10/total, total));
         return map;
     }
 
-    public List<String> getKeyNames() { return Arrays.asList("LowMQ"); }
+    public List<String> getKeyNames() { return Arrays.asList(GATKVCFConstants.LOW_MQ_KEY); }
 
-    public List<VCFInfoHeaderLine> getDescriptions() { return Arrays.asList(new VCFInfoHeaderLine(getKeyNames().get(0), 3, VCFHeaderLineType.Float, "3-tuple: <fraction of reads with MQ=0>,<fraction of reads with MQ<=10>,<total number of reads>")); }
+    public List<VCFInfoHeaderLine> getDescriptions() { return Arrays.asList(GATKVCFHeaderLines.getInfoLine(getKeyNames().get(0))); }
 }
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/MappingQualityZeroBySample.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/MappingQualityZeroBySample.java
index cf1323f..be3b401 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/MappingQualityZeroBySample.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/MappingQualityZeroBySample.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,20 +25,21 @@
 
 package org.broadinstitute.gatk.tools.walkers.annotator;
 
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.AnnotatorCompatible;
 import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.GenotypeAnnotation;
 import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap;
 import htsjdk.variant.vcf.VCFConstants;
 import htsjdk.variant.vcf.VCFFormatHeaderLine;
-import htsjdk.variant.vcf.VCFHeaderLineType;
 import org.broadinstitute.gatk.utils.pileup.PileupElement;
 import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
 import htsjdk.variant.variantcontext.Genotype;
 import htsjdk.variant.variantcontext.GenotypeBuilder;
 import htsjdk.variant.variantcontext.VariantContext;
+import org.broadinstitute.gatk.utils.variant.GATKVCFConstants;
+import org.broadinstitute.gatk.utils.variant.GATKVCFHeaderLines;
 
 import java.util.Arrays;
 import java.util.List;
@@ -48,6 +49,9 @@ import java.util.List;
  *
  * <p>This annotation gives you the count of all reads that have MAPQ = 0 for each sample. The count of reads with MAPQ0 can be used for quality control; high counts typically indicate regions where it is difficult to make confident calls.</p>
  *
+ * <h3>Caveat</h3>
+ * <p>It is not useful to apply this annotation with HaplotypeCaller because HC filters out all reads with MQ0 upfront, so the annotation will always return a value of 0.</p>
+ *
  * <h3>Related annotations</h3>
  * <ul>
  *     <li><b><a href="https://www.broadinstitute.org/gatk/guide/tooldocs/org_broadinstitute_gatk_tools_walkers_annotator_MappingQualityZero.php">MappingQualityZero</a></b> gives the count of reads with MAPQ=0 across all samples.</li>
@@ -76,11 +80,9 @@ public class MappingQualityZeroBySample extends GenotypeAnnotation {
         gb.attribute(getKeyNames().get(0), mq0);
     }
 
-    public List<String> getKeyNames() { return Arrays.asList(VCFConstants.MAPPING_QUALITY_ZERO_KEY); }
+    public List<String> getKeyNames() { return Arrays.asList(GATKVCFConstants.MAPPING_QUALITY_ZERO_BY_SAMPLE_KEY); }
 
-    public List<VCFFormatHeaderLine> getDescriptions() { return Arrays.asList(
-            new VCFFormatHeaderLine(getKeyNames().get(0), 1,
-                    VCFHeaderLineType.Integer, "Number of Mapping Quality Zero Reads per sample")); }
+    public List<VCFFormatHeaderLine> getDescriptions() { return Arrays.asList(GATKVCFHeaderLines.getFormatLine(getKeyNames().get(0))); }
 
 
 }
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/NBaseCount.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/NBaseCount.java
index 544feb7..b4cafa8 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/NBaseCount.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/NBaseCount.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,17 +25,18 @@
 
 package org.broadinstitute.gatk.tools.walkers.annotator;
 
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.AnnotatorCompatible;
 import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.InfoFieldAnnotation;
 import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap;
 import org.broadinstitute.gatk.utils.BaseUtils;
-import htsjdk.variant.vcf.VCFHeaderLineType;
 import htsjdk.variant.vcf.VCFInfoHeaderLine;
 import org.broadinstitute.gatk.utils.pileup.PileupElement;
 import htsjdk.variant.variantcontext.VariantContext;
+import org.broadinstitute.gatk.utils.variant.GATKVCFConstants;
+import org.broadinstitute.gatk.utils.variant.GATKVCFHeaderLines;
 
 import java.util.Arrays;
 import java.util.HashMap;
@@ -47,7 +48,8 @@ import java.util.Map;
  *
  * <p>N occurs in a sequence when the sequencer does not have enough information to determine which base it should call. The presence of many Ns at the same site lowers our confidence in any calls made there, because it suggests that there was some kind of technical difficulty that interfered with the sequencing process.</p>
  *
- * <p><b>Note that in GATK versions 3.2 and earlier, this annotation only counted N bases from reads generated with SOLiD technology. This functionality was generalized for all sequencing platforms in GATK version 3.3.</b></p>
+ * <h3>Note</h3>
+ * <p>In GATK versions 3.2 and earlier, this annotation only counted N bases from reads generated with SOLiD technology. This functionality was generalized for all sequencing platforms in GATK version 3.3.</p>
  *
  * <h3>Related annotations</h3>
  * <ul>
@@ -70,7 +72,6 @@ public class NBaseCount extends InfoFieldAnnotation {
 
         for( final AlignmentContext context : stratifiedContexts.values() ) {
             for( final PileupElement p : context.getBasePileup()) {
-                final String platform = p.getRead().getReadGroup().getPlatform();
                 if( BaseUtils.isNBase( p.getBase() ) ) {
                     countNBase++;
                 } else if( BaseUtils.isRegularBase( p.getBase() ) ) {
@@ -78,12 +79,12 @@ public class NBaseCount extends InfoFieldAnnotation {
                 }
             }
         }
-        final Map<String, Object> map = new HashMap<String, Object>();
+        final Map<String, Object> map = new HashMap<>();
         map.put(getKeyNames().get(0), String.format("%.4f", (double)countNBase / (double)(countNBase + countRegularBase + 1)));
         return map;
     }
 
-    public List<String> getKeyNames() { return Arrays.asList("PercentNBase"); }
+    public List<String> getKeyNames() { return Arrays.asList(GATKVCFConstants.N_BASE_COUNT_KEY); }
 
-    public List<VCFInfoHeaderLine> getDescriptions() { return Arrays.asList(new VCFInfoHeaderLine("PercentNBase", 1, VCFHeaderLineType.Float, "Percentage of N bases in the pileup")); }
+    public List<VCFInfoHeaderLine> getDescriptions() { return Arrays.asList(GATKVCFHeaderLines.getInfoLine(getKeyNames().get(0))); }
 }
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/ReducibleAnnotationData.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/ReducibleAnnotationData.java
new file mode 100644
index 0000000..0c18bb1
--- /dev/null
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/ReducibleAnnotationData.java
@@ -0,0 +1,105 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.tools.walkers.annotator;
+
+import htsjdk.variant.variantcontext.Allele;
+
+import java.util.*;
+
+/**
+ * A class to encapsulate the raw data for classes compatible with the ReducibleAnnotation interface
+ */
+public class ReducibleAnnotationData<T> {
+    protected String rawData;
+    protected Map<Allele, T> attributeMap;
+
+    /**
+     * Create a new ReducibleAnnotationData using the raw data string from a VCF
+     * @param inputData the raw data as read in from a VCF
+     */
+    public ReducibleAnnotationData(final String inputData) {
+        rawData = inputData; attributeMap = new HashMap<>();
+        attributeMap.put(Allele.NO_CALL, null);
+    }
+
+    /**
+     *
+     * @return the string of raw data as represented in the VCF
+     */
+    public String getRawData() {return rawData;}
+
+    /**
+     * Note: parent class ReducibleAnnotationData is non-allele specific and stores all values with the no-call allele
+     * @return the list of alleles for which we have raw annotation data
+     */
+    public List<Allele> getAlleles() {
+        List ret = new ArrayList<Allele>();
+        ret.addAll(attributeMap.keySet());
+        return ret;
+    }
+
+    /**
+     *
+     * @param key   the allele of interest
+     * @return  do we have data for the allele of interest?
+     */
+    public boolean hasAttribute(Allele key) {
+        return attributeMap.containsKey(key);
+    }
+
+    /**
+     *
+     * @param key the allele of interest
+     * @return  data for the allele of interest
+     */
+    public T getAttribute(Allele key) {
+        return attributeMap.get(key);
+    }
+
+    /**
+     *
+     * @param key   the allele of interest
+     * @param value raw data corresponding to the allele of interest
+     */
+    public void putAttribute(Allele key, T value) {
+        attributeMap.put(key, value);
+    }
+
+    /**
+     * Assign all of the per-allele raw data at once
+     * @param inputMap  the pre-calculated per-allele data
+     */
+    public void setAttributeMap(Map<Allele, T> inputMap) {
+        attributeMap = inputMap;
+    }
+
+    /**
+     * Get the stored raw per-allele data
+     * @return
+     */
+    public Map<Allele, T> getAttributeMap() {return Collections.unmodifiableMap(attributeMap);}
+
+}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/SnpEff.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/SnpEff.java
index f514f67..ceb353b 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/SnpEff.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/SnpEff.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -28,17 +28,16 @@ package org.broadinstitute.gatk.tools.walkers.annotator;
 import org.apache.log4j.Logger;
 import org.broadinstitute.gatk.utils.commandline.RodBinding;
 import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.AnnotatorCompatible;
 import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.InfoFieldAnnotation;
 import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.RodRequiringAnnotation;
 import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap;
 import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.variant.GATKVCFUtils;
+import org.broadinstitute.gatk.engine.GATKVCFUtils;
 import htsjdk.variant.vcf.*;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
 import htsjdk.variant.variantcontext.VariantContext;
 
 import java.util.*;
@@ -47,9 +46,9 @@ import java.util.regex.Pattern;
 /**
  * Top effect from SnpEff functional predictions
  *
- * <p>This annotation processes the output of the SnpEff functional prediction tool to select only the predicted effect with the highest biological impact. The SnpEff output must be provided on the command line by specifying "--snpEffFile filename.vcf". See <a href="http://snpeff.sourceforge.net/">http://snpeff.sourceforge.net/</a> for more information about the SnpEff tool</p>.
+ * <p>This annotation processes the output of the SnpEff functional prediction tool to select only the predicted effect with the highest biological impact. The SnpEff output must be provided on the command line by specifying "--snpEffFile filename.vcf". See <a href="http://snpeff.sourceforge.net/">http://snpeff.sourceforge.net/</a> for more information about the SnpEff tool.</p>
  *
- * <h3>Caveats</h3>
+ * <h3>Caveat</h3>
  *
  * <ul><li>This annotation currently only supports output from SnpEff version 2.0.5.</li></ul>
  *
@@ -58,6 +57,8 @@ public class SnpEff extends InfoFieldAnnotation implements RodRequiringAnnotatio
 
     private static Logger logger = Logger.getLogger(SnpEff.class);
 
+    private boolean canAnnotate = true;
+
     // We refuse to parse SnpEff output files generated by unsupported versions, or
     // lacking a SnpEff version number in the VCF header:
     public static final String[] SUPPORTED_SNPEFF_VERSIONS = { "2.0.5" };
@@ -209,10 +210,15 @@ public class SnpEff extends InfoFieldAnnotation implements RodRequiringAnnotatio
         }
     }
 
+    @Override
     public void initialize ( AnnotatorCompatible walker, GenomeAnalysisEngine toolkit, Set<VCFHeaderLine> headerLines ) {
         // Make sure that we actually have a valid SnpEff rod binding (just in case the user specified -A SnpEff
         // without providing a SnpEff rod via --snpEffFile):
-        validateRodBinding(walker.getSnpEffRodBinding());
+        if ( !isValidRodBinding(walker.getSnpEffRodBinding()) ) {
+            canAnnotate = false;
+            return;
+        }
+
         RodBinding<VariantContext> snpEffRodBinding = walker.getSnpEffRodBinding();
 
         // Make sure that the SnpEff version number and command-line header lines are present in the VCF header of
@@ -221,21 +227,40 @@ public class SnpEff extends InfoFieldAnnotation implements RodRequiringAnnotatio
         VCFHeaderLine snpEffVersionLine = snpEffVCFHeader.getOtherHeaderLine(SNPEFF_VCF_HEADER_VERSION_LINE_KEY);
         VCFHeaderLine snpEffCommandLine = snpEffVCFHeader.getOtherHeaderLine(SNPEFF_VCF_HEADER_COMMAND_LINE_KEY);
 
-        checkSnpEffVersionAndCommandLine(snpEffVersionLine, snpEffCommandLine);
+       if ( !isValidSnpEffVersionAndCommandLine(snpEffVersionLine, snpEffCommandLine) ) {
+           canAnnotate = false;
+           return;
+       }
 
         // If everything looks ok, add the SnpEff version number and command-line header lines to the
         // header of the VCF output file, changing the key names so that our output file won't be
         // mistaken in the future for a SnpEff output file:
         headerLines.add(new VCFHeaderLine(OUTPUT_VCF_HEADER_VERSION_LINE_KEY, snpEffVersionLine.getValue()));
         headerLines.add(new VCFHeaderLine(OUTPUT_VCF_HEADER_COMMAND_LINE_KEY, snpEffCommandLine.getValue()));
+
+        // Can only be called from VariantAnnotator
+        if ( !(walker instanceof VariantAnnotator) ) {
+            if ( walker != null )
+                logger.warn("Annotation will not be calculated, must be called from VariantAnnotator, not " + walker.getClass().getName());
+            else
+                logger.warn("Annotation will not be calculated, must be called from VariantAnnotator");
+            canAnnotate = false;
+            return;
+        }
     }
 
+    @Override
     public Map<String, Object> annotate(final RefMetaDataTracker tracker,
                                         final AnnotatorCompatible walker,
                                         final ReferenceContext ref,
                                         final Map<String, AlignmentContext> stratifiedContexts,
                                         final VariantContext vc,
                                         final Map<String, PerReadAlleleLikelihoodMap> stratifiedPerReadAlleleLikelihoodMap) {
+
+        // Can not annotate if failed initialization conditions
+        if ( !canAnnotate )
+            return null;
+
         RodBinding<VariantContext> snpEffRodBinding = walker.getSnpEffRodBinding();
 
         // Get only SnpEff records that start at this locus, not merely span it:
@@ -251,7 +276,7 @@ public class SnpEff extends InfoFieldAnnotation implements RodRequiringAnnotatio
 
         // Parse the SnpEff INFO field annotation from the matching record into individual effect objects:
         List<SnpEffEffect> effects = parseSnpEffRecord(matchingRecord);
-        if ( effects.size() == 0 ) {
+        if ( effects.isEmpty() ) {
             return null;
         }
 
@@ -260,35 +285,42 @@ public class SnpEff extends InfoFieldAnnotation implements RodRequiringAnnotatio
         return mostSignificantEffect.getAnnotations();
     }
 
-    private void validateRodBinding ( RodBinding<VariantContext> snpEffRodBinding ) {
+    private boolean isValidRodBinding ( RodBinding<VariantContext> snpEffRodBinding ) {
         if ( snpEffRodBinding == null || ! snpEffRodBinding.isBound() ) {
-            throw new UserException("The SnpEff annotator requires that a SnpEff VCF output file be provided " +
+            logger.warn("The SnpEff annotator requires that a SnpEff VCF output file be provided " +
                                     "as a rodbinding on the command line via the --snpEffFile option, but " +
                                     "no SnpEff rodbinding was found.");
+            return false;
         }
+        return true;
     }
 
-    private void checkSnpEffVersionAndCommandLine( final VCFHeaderLine snpEffVersionLine, final VCFHeaderLine snpEffCommandLine ) {
+    private boolean isValidSnpEffVersionAndCommandLine( final VCFHeaderLine snpEffVersionLine, final VCFHeaderLine snpEffCommandLine ){
         if ( snpEffVersionLine == null || snpEffVersionLine.getValue() == null || snpEffVersionLine.getValue().trim().length() == 0 ) {
-            throw new UserException(String.format("Could not find a %s entry in the VCF header for the SnpEff input file, " +
-                                                  "and so could not verify that the file was generated by a supported version of SnpEff (%s)",
-                                                  SNPEFF_VCF_HEADER_VERSION_LINE_KEY, supportedSnpEffVersionsString()));
+            logger.warn(String.format("Could not find a %s entry in the VCF header for the SnpEff input file, " +
+                            "and so could not verify that the file was generated by a supported version of SnpEff (%s)",
+                    SNPEFF_VCF_HEADER_VERSION_LINE_KEY, supportedSnpEffVersionsString()));
+            return false;
         }
 
         if ( snpEffCommandLine == null || snpEffCommandLine.getValue() == null || snpEffCommandLine.getValue().trim().length() == 0 ) {
-            throw new UserException(String.format("Could not find a %s entry in the VCF header for the SnpEff input file, " +
+            logger.warn(String.format("Could not find a %s entry in the VCF header for the SnpEff input file, " +
                                                   "which should be added by all supported versions of SnpEff (%s)",
                                                   SNPEFF_VCF_HEADER_COMMAND_LINE_KEY, supportedSnpEffVersionsString()));
+            return false;
         }
 
         String snpEffVersionString = snpEffVersionLine.getValue().replaceAll("\"", "").split(" ")[0];
 
         if ( ! isSupportedSnpEffVersion(snpEffVersionString, snpEffCommandLine.getValue()) ) {
-            throw new UserException(String.format("The version of SnpEff used to generate the SnpEff input file (%s) " +
-                                                  "is not currently supported by the GATK, and was not run in GATK " +
-                                                  "compatibility mode. Supported versions are: %s",
-                                                  snpEffVersionString, supportedSnpEffVersionsString()));
+            logger.warn(String.format("The version of SnpEff used to generate the SnpEff input file (%s) " +
+                            "is not currently supported by the GATK, and was not run in GATK " +
+                            "compatibility mode. Supported versions are: %s",
+                    snpEffVersionString, supportedSnpEffVersionsString()));
+            return false;
         }
+
+        return true;
     }
 
     private boolean isSupportedSnpEffVersion( final String versionString, final String commandLine ) {
@@ -377,6 +409,7 @@ public class SnpEff extends InfoFieldAnnotation implements RodRequiringAnnotatio
         return mostSignificantEffect;
     }
 
+    @Override
     public List<String> getKeyNames() {
         return Arrays.asList( InfoFieldKey.EFFECT_KEY.getKeyName(),
                               InfoFieldKey.IMPACT_KEY.getKeyName(),
@@ -390,6 +423,7 @@ public class SnpEff extends InfoFieldAnnotation implements RodRequiringAnnotatio
                             );
     }
 
+    @Override
     public List<VCFInfoHeaderLine> getDescriptions() {
         return Arrays.asList(
             new VCFInfoHeaderLine(InfoFieldKey.EFFECT_KEY.getKeyName(),            1, VCFHeaderLineType.String,  "The highest-impact effect resulting from the current variant (or one of the highest-impact effects, if there is a tie)"),
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/SnpEffUtil.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/SnpEffUtil.java
index c82a013..adb2c4a 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/SnpEffUtil.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/SnpEffUtil.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/VariantAnnotator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/VariantAnnotator.java
index 042ba48..ca386e8 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/VariantAnnotator.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/VariantAnnotator.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -30,17 +30,17 @@ import org.broadinstitute.gatk.utils.commandline.*;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
 import org.broadinstitute.gatk.engine.arguments.DbsnpArgumentCollection;
 import org.broadinstitute.gatk.engine.arguments.StandardVariantContextInputArgumentCollection;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContextUtils;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.downsampling.DownsampleType;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContextUtils;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.downsampling.DownsampleType;
+import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.AnnotationHelpUtils;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.*;
 import org.broadinstitute.gatk.utils.help.HelpConstants;
-import org.broadinstitute.gatk.utils.help.HelpUtils;
-import org.broadinstitute.gatk.utils.variant.GATKVCFUtils;
+import org.broadinstitute.gatk.engine.GATKVCFUtils;
 import org.broadinstitute.gatk.utils.BaseUtils;
-import org.broadinstitute.gatk.utils.SampleUtils;
+import org.broadinstitute.gatk.engine.SampleUtils;
 import htsjdk.variant.vcf.*;
 import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
 import htsjdk.variant.variantcontext.VariantContext;
@@ -50,11 +50,13 @@ import it.unimi.dsi.fastutil.objects.ObjectOpenHashSet;
 import java.util.*;
 
 /**
- * Annotates variant calls with context information.
+ * Annotate variant calls with context information
  *
  * <p>
- * VariantAnnotator is a GATK tool for annotating variant calls based on their context.
- * The tool is modular; new annotations can be written easily without modifying VariantAnnotator itself.
+ * This tool is designed to annotate variant calls based on their context (as opposed to functional annotation).
+ * Various annotation modules are available; see the
+ * <a href="https://www.broadinstitute.org/gatk/guide/tooldocs/org_broadinstitute_gatk_tools_walkers_annotator_VariantAnnotator.php#VariantAnnotations">documentation</a>
+ * for a complete list.
  *
  * <h3>Input</h3>
  * <p>
@@ -66,19 +68,48 @@ import java.util.*;
  * An annotated VCF.
  * </p>
  *
- * <h3>Examples</h3>
+ * <h3>Usage examples</h3>
+ * <br />
+ *
+ * <h4>Annotate a VCF with dbSNP IDs and depth of coverage for each sample</h4>
  * <pre>
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
+ * java -jar GenomeAnalysisTK.jar \
+ *   -R reference.fasta \
  *   -T VariantAnnotator \
  *   -I input.bam \
  *   -o output.vcf \
  *   -A Coverage \
- *   --variant input.vcf \
+ *   -V input.vcf \
  *   -L input.vcf \
  *   --dbsnp dbsnp.vcf
  * </pre>
  *
+ * <h4>Annotate a VCF with allele frequency by an external resource. Annotation will only occur if there is allele concordance between the resource and the input VCF </h4>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
+ *   -R reference.fasta \
+ *   -T VariantAnnotator \
+ *   -I input.bam \
+ *   -o output.vcf \
+ *   -V input.vcf \
+ *   -L input.vcf \
+ *   --resource:foo resource.vcf
+ *   -E foo.AF
+ *   --resourceAlleleConcordance
+ * </pre>
+ *
+ * <h4>Annotate with AF and FILTER fields from an external resource </h4>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
+ *   -R reference.fasta \
+ *   -T VariantAnnotator \
+ *   -o output.vcf \
+ *   --resource:foo resource.vcf \
+ *   --expression foo.AF \
+ *   --expression foo.FILTER \
+ *   -V input.vcf \
+ * </pre>
+ *
  */
 @DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_VARMANIP, extraDocs = {CommandLineGATK.class} )
 @Requires(value={})
@@ -93,9 +124,9 @@ public class VariantAnnotator extends RodWalker<Integer, Integer> implements Ann
 
     /**
      * The INFO field will be annotated with information on the most biologically significant effect
-     * listed in the SnpEff output file for each variant.
+     * listed for each variant in the SnpEff file.
      */
-    @Input(fullName="snpEffFile", shortName = "snpEffFile", doc="A SnpEff output file from which to add annotations", required=false)
+    @Input(fullName="snpEffFile", shortName = "snpEffFile", doc="SnpEff file from which to get annotations", required=false)
     public RodBinding<VariantContext> snpEffFile;
     public RodBinding<VariantContext> getSnpEffRodBinding() { return snpEffFile; }
 
@@ -112,7 +143,7 @@ public class VariantAnnotator extends RodWalker<Integer, Integer> implements Ann
       * Records that are filtered in the comp track will be ignored. Note that 'dbSNP' has been special-cased
       * (see the --dbsnp argument).
       */
-    @Input(fullName="comp", shortName = "comp", doc="comparison VCF file", required=false)
+    @Input(fullName="comp", shortName = "comp", doc="Comparison VCF file", required=false)
     public List<RodBinding<VariantContext>> comps = Collections.emptyList();
     public List<RodBinding<VariantContext>> getCompRodBindings() { return comps; }
 
@@ -125,7 +156,8 @@ public class VariantAnnotator extends RodWalker<Integer, Integer> implements Ann
       * '-E my_resource.AC' (-E is short for --expression, also documented on this page). In the resulting output
       * VCF, any records for which there is a record at the same position in the resource file will be annotated with
       * 'my_resource.AC=N'. Note that if there are multiple records in the resource file that overlap the given
-      * position, one is chosen randomly.
+      * position, one is chosen randomly. Check for allele concordance if using --resourceAlleleConcordance, otherwise
+      * the match is based on position only.
       */
     @Input(fullName="resource", shortName = "resource", doc="External resource VCF file", required=false)
     public List<RodBinding<VariantContext>> resources = Collections.emptyList();
@@ -162,6 +194,7 @@ public class VariantAnnotator extends RodWalker<Integer, Integer> implements Ann
      * 'resource_file.vcf', you tag it with '-resource:my_resource resource_file.vcf' (see the -resource argument, also
      * documented on this page) and you specify '-E my_resource.AC'. In the resulting output VCF, any records for
      * which there is a record at the same position in the resource file will be annotated with 'my_resource.AC=N'.
+     * INFO field data, ID, ALT, and FILTER fields may be used as expression values.
      * Note that if there are multiple records in the resource file that overlap the given position, one is chosen
      * randomly.
      */
@@ -169,6 +202,14 @@ public class VariantAnnotator extends RodWalker<Integer, Integer> implements Ann
     protected Set<String> expressionsToUse = new ObjectOpenHashSet();
 
     /**
+     * If this argument is specified, add annotations (specified by --expression) from an external resource
+     * (specified by --resource) to the input VCF (specified by --variant) only if the alleles are
+     * concordant between input and the resource VCFs. Otherwise, always add the annotations.
+     */
+    @Argument(fullName="resourceAlleleConcordance", shortName="rac", doc="Check for allele concordances when using an external resource VCF file", required=false)
+    protected Boolean expressionAlleleConcordance = false;
+
+    /**
      * You can use the -XL argument in combination with this one to exclude specific annotations.Note that some
      * annotations may not be actually applied if they are not applicable to the data provided or if they are
      * unavailable to the tool (e.g. there are several annotations that are currently not hooked up to
@@ -182,20 +223,25 @@ public class VariantAnnotator extends RodWalker<Integer, Integer> implements Ann
     protected Boolean USE_ALL_ANNOTATIONS = false;
 
     /**
-     * Note that the --list argument requires a fully resolved and correct command-line to work. As an alternative, you can use ListAnnotations (see Help Utilities).
+     * Note that the --list argument requires a fully resolved and correct command-line to work. As an alternative,
+     * you can use ListAnnotations (see Help Utilities).
      */
     @Argument(fullName="list", shortName="ls", doc="List the available annotations and exit", required=false)
     protected Boolean LIST = false;
 
     /**
-     * By default, the dbSNP ID is added only when the ID field in the variant VCF is empty (not already annotated).
-     * This argument allows you to override that behavior. This is used in conjuction with the -dbsnp argument.
+     * By default, a dbSNP ID is added only when the ID field in the variant record is empty (not already annotated).
+     * This argument allows you to override that behavior, and appends the new ID to the existing one. This is used
+     * in conjunction with the -dbsnp argument.
      */
-    @Argument(fullName="alwaysAppendDbsnpId", shortName="alwaysAppendDbsnpId", doc="Append the dbSNP ID even when the variant VCF already has the ID field populated", required=false)
+    @Argument(fullName="alwaysAppendDbsnpId", shortName="alwaysAppendDbsnpId", doc="Add dbSNP ID even if one is already present", required=false)
     protected Boolean ALWAYS_APPEND_DBSNP_ID = false;
     public boolean alwaysAppendDbsnpId() { return ALWAYS_APPEND_DBSNP_ID; }
 
-    @Argument(fullName="MendelViolationGenotypeQualityThreshold",shortName="mvq",required=false,doc="The genotype quality threshold in order to annotate mendelian violation ratio")
+    /**
+     * The genotype quality (GQ) threshold above which the mendelian violation ratio should be annotated.
+     */
+    @Argument(fullName="MendelViolationGenotypeQualityThreshold",shortName="mvq",required=false,doc="GQ threshold for annotating MV ratio")
     public double minGenotypeQualityP = 0.0;
 
     private VariantAnnotatorEngine engine;
@@ -206,7 +252,7 @@ public class VariantAnnotator extends RodWalker<Integer, Integer> implements Ann
     public void initialize() {
 
         if ( LIST ) {
-            HelpUtils.listAnnotations();
+            AnnotationHelpUtils.listAnnotations();
             System.exit(0);
         }
 
@@ -219,12 +265,13 @@ public class VariantAnnotator extends RodWalker<Integer, Integer> implements Ann
         else
             engine = new VariantAnnotatorEngine(annotationGroupsToUse, annotationsToUse, annotationsToExclude, this, getToolkit());
         engine.initializeExpressions(expressionsToUse);
+        engine.setExpressionAlleleConcordance(expressionAlleleConcordance);
 
         // setup the header fields
         // note that if any of the definitions conflict with our new ones, then we want to overwrite the old ones
         final Set<VCFHeaderLine> hInfo = new HashSet<>();
         hInfo.addAll(engine.getVCFAnnotationDescriptions());
-        for ( final VCFHeaderLine line : GATKVCFUtils.getHeaderFields(getToolkit(), Arrays.asList(variantCollection.variants.getName())) ) {
+        for ( final VCFHeaderLine line : GATKVCFUtils.getHeaderFields(getToolkit(), rodName) ) {
             if ( isUniqueHeaderLine(line, hInfo) )
                 hInfo.add(line);
         }
@@ -256,6 +303,7 @@ public class VariantAnnotator extends RodWalker<Integer, Integer> implements Ann
             }
         }
 
+        engine.makeHeaderInfoMap(hInfo);
         engine.invokeAnnotationInitializationMethods(hInfo);
 
         VCFHeader vcfHeader = new VCFHeader(hInfo, samples);
@@ -293,8 +341,9 @@ public class VariantAnnotator extends RodWalker<Integer, Integer> implements Ann
         if ( tracker == null )
             return 0;
 
+        // get the variant contexts for all the variants at the location
         Collection<VariantContext> VCs = tracker.getValues(variantCollection.variants, context.getLocation());
-        if ( VCs.size() == 0 )
+        if ( VCs.isEmpty() )
             return 0;
 
         Collection<VariantContext> annotatedVCs = VCs;
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/VariantAnnotatorEngine.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/VariantAnnotatorEngine.java
index 60c8824..bddc020 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/VariantAnnotatorEngine.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/VariantAnnotatorEngine.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -29,36 +29,45 @@ import com.google.java.contract.Ensures;
 import com.google.java.contract.Requires;
 import htsjdk.variant.variantcontext.*;
 import htsjdk.variant.vcf.*;
+import org.apache.log4j.Logger;
 import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.*;
 import org.broadinstitute.gatk.utils.GenomeLoc;
 import org.broadinstitute.gatk.utils.commandline.RodBinding;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
 import org.broadinstitute.gatk.utils.exceptions.UserException;
 import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap;
 import org.broadinstitute.gatk.utils.genotyper.ReadLikelihoods;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
 
 import java.util.*;
 
 
 public class VariantAnnotatorEngine {
+    private final static Logger logger = Logger.getLogger(VariantAnnotatorEngine.class);
     private List<InfoFieldAnnotation> requestedInfoAnnotations = Collections.emptyList();
+    private List<InfoFieldAnnotation> requestedReducibleInfoAnnotations = new ArrayList<>();
+    private List<InfoFieldAnnotation> requestedNonReducibleInfoAnnotations = new ArrayList<>();
     private List<GenotypeAnnotation> requestedGenotypeAnnotations = Collections.emptyList();
     private List<VAExpression> requestedExpressions = new ArrayList<>();
+    private boolean expressionAlleleConcordance = false;
 
     private final AnnotatorCompatible walker;
     private final GenomeAnalysisEngine toolkit;
 
     VariantOverlapAnnotator variantOverlapAnnotator = null;
 
+    // Map of info field name to info field
+    private final Map<String, VCFInfoHeaderLine> hInfoMap = new HashMap<>();
+
     protected static class VAExpression {
 
         public String fullName, fieldName;
         public RodBinding<VariantContext> binding;
 
-        public VAExpression(String fullExpression, List<RodBinding<VariantContext>> bindings) {
+        public VAExpression(String fullExpression, List<RodBinding<VariantContext>> bindings){
             final int indexOfDot = fullExpression.lastIndexOf(".");
             if ( indexOfDot == -1 )
                 throw new UserException.BadArgumentValue(fullExpression, "it should be in rodname.value format");
@@ -83,6 +92,7 @@ public class VariantAnnotatorEngine {
         requestedInfoAnnotations = AnnotationInterfaceManager.createAllInfoFieldAnnotations();
         requestedGenotypeAnnotations = AnnotationInterfaceManager.createAllGenotypeAnnotations();
         excludeAnnotations(annotationsToExclude);
+        setReducibleAnnotations();
         initializeDBs(toolkit);
     }
 
@@ -91,9 +101,17 @@ public class VariantAnnotatorEngine {
         this.walker = walker;
         this.toolkit = toolkit;
         initializeAnnotations(annotationGroupsToUse, annotationsToUse, annotationsToExclude);
+        setReducibleAnnotations();
         initializeDBs(toolkit);
     }
 
+    public void makeHeaderInfoMap(final Set<VCFHeaderLine> hInfo ){
+        for ( VCFHeaderLine hLine : hInfo ) {
+            if ( hLine instanceof VCFInfoHeaderLine )
+                hInfoMap.put( ((VCFInfoHeaderLine)hLine).getID(), (VCFInfoHeaderLine)hLine);
+        }
+    }
+
     // select specific expressions to use
     public void initializeExpressions(Set<String> expressionsToUse) {
         // set up the expressions
@@ -101,8 +119,15 @@ public class VariantAnnotatorEngine {
             requestedExpressions.add(new VAExpression(expression, walker.getResourceRodBindings()));
     }
 
+    // set whether enforing allele concordance for expression
+    public void setExpressionAlleleConcordance(Boolean expressionAlleleConcordance){
+        this.expressionAlleleConcordance = expressionAlleleConcordance;
+    }
+
     protected List<VAExpression> getRequestedExpressions() { return requestedExpressions; }
 
+    public List<InfoFieldAnnotation> getRequestedReducibleInfoAnnotations() { return Collections.unmodifiableList(requestedReducibleInfoAnnotations); }
+
     private void initializeAnnotations(List<String> annotationGroupsToUse, List<String> annotationsToUse, List<String> annotationsToExclude) {
         AnnotationInterfaceManager.validateAnnotations(annotationGroupsToUse, annotationsToUse);
         requestedInfoAnnotations = AnnotationInterfaceManager.createInfoFieldAnnotations(annotationGroupsToUse, annotationsToUse);
@@ -111,7 +136,7 @@ public class VariantAnnotatorEngine {
     }
 
     private void excludeAnnotations(List<String> annotationsToExclude) {
-        if ( annotationsToExclude.size() == 0 )
+        if ( annotationsToExclude.isEmpty() )
             return;
 
         final List<InfoFieldAnnotation> tempRequestedInfoAnnotations = new ArrayList<>(requestedInfoAnnotations.size());
@@ -183,50 +208,157 @@ public class VariantAnnotatorEngine {
                                           final Map<String, AlignmentContext> stratifiedContexts,
                                           final VariantContext vc,
                                           final Map<String,PerReadAlleleLikelihoodMap> perReadAlleleLikelihoodMap) {
-        final Map<String, Object> infoAnnotations = new LinkedHashMap<>(vc.getAttributes());
+        // annotate genotypes
+        final VariantContextBuilder builder = new VariantContextBuilder(vc).genotypes(annotateGenotypes(tracker, ref, stratifiedContexts, vc, perReadAlleleLikelihoodMap));
+        VariantContext newGenotypeAnnotatedVC = builder.make();
 
         // annotate expressions where available
-        annotateExpressions(tracker, ref.getLocus(), infoAnnotations);
+        final Map<String, Object> infoAnnotations = new LinkedHashMap<>(newGenotypeAnnotatedVC.getAttributes());
+        annotateExpressions(tracker, ref.getLocus(), newGenotypeAnnotatedVC, infoAnnotations);
 
         // go through all the requested info annotationTypes
         for ( final InfoFieldAnnotation annotationType : requestedInfoAnnotations ) {
-            final Map<String, Object> annotationsFromCurrentType = annotationType.annotate(tracker, walker, ref, stratifiedContexts, vc, perReadAlleleLikelihoodMap);
+            final Map<String, Object> annotationsFromCurrentType = annotationType.annotate(tracker, walker, ref, stratifiedContexts, newGenotypeAnnotatedVC, perReadAlleleLikelihoodMap);
             if ( annotationsFromCurrentType != null )
                 infoAnnotations.putAll(annotationsFromCurrentType);
         }
 
-        // generate a new annotated VC
-        final VariantContextBuilder builder = new VariantContextBuilder(vc).attributes(infoAnnotations);
-
-        // annotate genotypes, creating another new VC in the process
-        final VariantContext annotated = builder.genotypes(annotateGenotypes(tracker, ref, stratifiedContexts, vc, perReadAlleleLikelihoodMap)).make();
+        // create a new VC in the with info and genotype annotations
+        final VariantContext annotated = builder.attributes(infoAnnotations).make();
 
         // annotate db occurrences
         return annotateDBs(tracker, annotated);
     }
 
-    public VariantContext annotateContextForActiveRegion(final RefMetaDataTracker tracker,
+    /**
+     *
+     * @param referenceContext
+     * @param tracker
+     * @param readLikelihoods
+     * @param vc
+     * @param useRaw    output annotation data as raw data? (Yes in the case of gVCF mode for HaplotypeCaller)
+     * @return
+     */
+    public VariantContext annotateContextForActiveRegion(final ReferenceContext referenceContext,
+                                                         final RefMetaDataTracker tracker,
                                                          final ReadLikelihoods<Allele> readLikelihoods,
-                                                         final VariantContext vc) {
+                                                         final VariantContext vc,
+                                                         final boolean useRaw) {
         //TODO we transform the read-likelihood into the Map^2 previous version for the sake of not changing of not changing annotation interface.
         //TODO should we change those interfaces?
+
         final Map<String, PerReadAlleleLikelihoodMap> annotationLikelihoods = readLikelihoods.toPerReadAlleleLikelihoodMap();
-        return annotateContextForActiveRegion(tracker, annotationLikelihoods, vc);
+        return annotateContextForActiveRegion(referenceContext, tracker, annotationLikelihoods, vc, useRaw);
     }
 
-    public VariantContext annotateContextForActiveRegion(final RefMetaDataTracker tracker,
+    /**
+     *
+     * @param referenceContext
+     * @param tracker
+     * @param perReadAlleleLikelihoodMap
+     * @param vc
+     * @param useRaw    output annotation data as raw data? (Yes in the case of gVCF mode for HaplotypeCaller)
+     * @return
+     */
+    public VariantContext annotateContextForActiveRegion(final ReferenceContext referenceContext,
+                                                         final RefMetaDataTracker tracker,
                                                          final Map<String, PerReadAlleleLikelihoodMap> perReadAlleleLikelihoodMap,
-                                                         final VariantContext vc) {
+                                                         final VariantContext vc,
+                                                         final boolean useRaw) {
+        // annotate genotypes
+        final VariantContextBuilder builder = new VariantContextBuilder(vc).genotypes(annotateGenotypes(null, null, null, vc, perReadAlleleLikelihoodMap));
+        VariantContext newGenotypeAnnotatedVC = builder.make();
+
+        final Map<String, Object> infoAnnotations = new LinkedHashMap<>(newGenotypeAnnotatedVC.getAttributes());
+
+        // go through all the requested info annotationTypes that are reducible
+        if (useRaw) {
+            for (final InfoFieldAnnotation annotationType : requestedReducibleInfoAnnotations) {
+                if (!(annotationType instanceof ActiveRegionBasedAnnotation))
+                    continue;
+
+
+                    ReducibleAnnotation currentASannotation = (ReducibleAnnotation) annotationType;
+                    final Map<String, Object> annotationsFromCurrentType = currentASannotation.annotateRawData(null, null, referenceContext, null, newGenotypeAnnotatedVC, perReadAlleleLikelihoodMap);
+                    if (annotationsFromCurrentType != null) {
+                        infoAnnotations.putAll(annotationsFromCurrentType);
+                    }
+            }
+        }
+        //if not in reference-confidence mode, do annotate with reducible annotations, but skip the raw data and go straight to the finalized values
+        else {
+            for (final InfoFieldAnnotation annotationType : requestedReducibleInfoAnnotations) {
+                if (!(annotationType instanceof ActiveRegionBasedAnnotation))
+                    continue;
+
+                final Map<String, Object> annotationsFromCurrentType = annotationType.annotate(null, null, referenceContext, null, newGenotypeAnnotatedVC, perReadAlleleLikelihoodMap);
+                if (annotationsFromCurrentType != null) {
+                    infoAnnotations.putAll(annotationsFromCurrentType);
+                }
+            }
+        }
+        //leave this in or else the median will overwrite until we do truly allele-specific
+        //// for now do both allele-specific and not
+        for ( final InfoFieldAnnotation annotationType : requestedNonReducibleInfoAnnotations ) {
+            if ( !(annotationType instanceof ActiveRegionBasedAnnotation) )
+                continue;
+
+                final Map<String, Object> annotationsFromCurrentType = annotationType.annotate(referenceContext, perReadAlleleLikelihoodMap, newGenotypeAnnotatedVC);
+                if (annotationsFromCurrentType != null) {
+                    infoAnnotations.putAll(annotationsFromCurrentType);
+                }
+        }
+
+        // create a new VC with info and genotype annotations
+        final VariantContext annotated = builder.attributes(infoAnnotations).make();
+
+        // annotate db occurrences
+        return annotateDBs(tracker, annotated);
+    }
+
+    /**
+     * Combine (raw) data for reducible annotations (those that use raw data in gVCFs)
+     * Mutates annotationMap by removing the annotations that were combined
+     * @param allelesList   the list of merged alleles across all variants being combined
+     * @param annotationMap attributes of merged variant contexts -- is modifying by removing successfully combined annotations
+     * @return  a map containing the keys and raw values for the combined annotations
+     */
+    public Map<String, Object> combineAnnotations(final List<Allele> allelesList, Map<String, List<ReducibleAnnotationData>> annotationMap) {
+        Map<String, Object> combinedAnnotations = new HashMap<>();
+
+        // go through all the requested reducible info annotationTypes
+        for (final InfoFieldAnnotation annotationType : requestedReducibleInfoAnnotations) {
+                ReducibleAnnotation currentASannotation = (ReducibleAnnotation) annotationType;
+                if (annotationMap.containsKey(currentASannotation.getRawKeyName())) {
+                    final List<ReducibleAnnotationData> annotationValue = annotationMap.get(currentASannotation.getRawKeyName());
+                    final Map<String, Object> annotationsFromCurrentType = currentASannotation.combineRawData(allelesList, annotationValue);
+                    combinedAnnotations.putAll(annotationsFromCurrentType);
+                    //remove the combined annotations so that the next method only processes the non-reducible ones
+                    annotationMap.remove(currentASannotation.getRawKeyName());
+                }
+        }
+        return combinedAnnotations;
+    }
+
+    /**
+     * Finalize reducible annotations (those that use raw data in gVCFs)
+     * @param vc    the merged VC with the final set of alleles, possibly subset to the number of maxAltAlleles for genotyping
+     * @param originalVC    the merged but non-subset VC that contains the full list of merged alleles
+     * @return  a VariantContext with the final annotation values for reducible annotations
+     */
+    public VariantContext finalizeAnnotations(VariantContext vc, VariantContext originalVC) {
         final Map<String, Object> infoAnnotations = new LinkedHashMap<>(vc.getAttributes());
 
         // go through all the requested info annotationTypes
-        for ( final InfoFieldAnnotation annotationType : requestedInfoAnnotations ) {
-            if ( !(annotationType instanceof ActiveRegionBasedAnnotation) )
-                continue;
+        for ( final InfoFieldAnnotation annotationType : requestedReducibleInfoAnnotations ) {
 
-            final Map<String, Object> annotationsFromCurrentType = annotationType.annotate(perReadAlleleLikelihoodMap, vc);
+            ReducibleAnnotation currentASannotation = (ReducibleAnnotation)annotationType;
+
+            final Map<String, Object> annotationsFromCurrentType = currentASannotation.finalizeRawData(vc, originalVC);
             if ( annotationsFromCurrentType != null ) {
                 infoAnnotations.putAll(annotationsFromCurrentType);
+                //clean up raw annotation data after annotations are finalized
+                infoAnnotations.remove(currentASannotation.getRawKeyName());
             }
         }
 
@@ -234,10 +366,8 @@ public class VariantAnnotatorEngine {
         final VariantContextBuilder builder = new VariantContextBuilder(vc).attributes(infoAnnotations);
 
         // annotate genotypes, creating another new VC in the process
-        final VariantContext annotated = builder.genotypes(annotateGenotypes(null, null, null, vc, perReadAlleleLikelihoodMap)).make();
-
-        // annotate db occurrences
-        return annotateDBs(tracker, annotated);
+        final VariantContext annotated = builder.make();
+        return annotated;
     }
 
     /**
@@ -253,28 +383,109 @@ public class VariantAnnotatorEngine {
         return variantOverlapAnnotator.annotateOverlaps(tracker, variantOverlapAnnotator.annotateRsID(tracker, vc));
     }
 
-    private void annotateExpressions(final RefMetaDataTracker tracker, final GenomeLoc loc, final Map<String, Object> infoAnnotations) {
+    /**
+     * Annotate the requested expressions
+     *
+     * @param tracker   ref meta data tracker (cannot be null)
+     * @param loc       the location on the genome
+     * @param vc        variant context to annotate
+     * @param infoAnnotations the annotations for the requested expressions
+     */
+    @Requires({"tracker != null && loc != null && vc != null"})
+    private void annotateExpressions(final RefMetaDataTracker tracker, final GenomeLoc loc, final VariantContext vc, final Map<String, Object> infoAnnotations){
+
+        // each requested expression
         for ( final VAExpression expression : requestedExpressions ) {
-            final Collection<VariantContext> VCs = tracker.getValues(expression.binding, loc);
-            if ( VCs.size() == 0 )
+
+            // get the variant contexts for all the expressions at the location
+            final Collection<VariantContext> expressionVCs = tracker.getValues(expression.binding, loc);
+            if ( expressionVCs.isEmpty() )
                 continue;
 
-            final VariantContext vc = VCs.iterator().next();
+            // get the expression's variant context
+            final VariantContext expressionVC = expressionVCs.iterator().next();
+
             // special-case the ID field
             if ( expression.fieldName.equals("ID") ) {
-                if ( vc.hasID() )
-                    infoAnnotations.put(expression.fullName, vc.getID());
+                if ( expressionVC.hasID() )
+                    infoAnnotations.put(expression.fullName, expressionVC.getID());
             } else if (expression.fieldName.equals("ALT")) {
-                infoAnnotations.put(expression.fullName, vc.getAlternateAllele(0).getDisplayString());
-
-            } else if ( vc.hasAttribute(expression.fieldName) ) {
-                    infoAnnotations.put(expression.fullName, vc.getAttribute(expression.fieldName));
+                infoAnnotations.put(expression.fullName, expressionVC.getAlternateAllele(0).getDisplayString());
+            } else if (expression.fieldName.equals("FILTER")) {
+                if ( expressionVC.isFiltered() ) {
+                    infoAnnotations.put(expression.fullName, expressionVC.getFilters().toString().replace("[", "").replace("]", "").replace(" ", ""));
+                } else {
+                    infoAnnotations.put(expression.fullName, "PASS");
+                }
+            } else if ( expressionVC.hasAttribute(expression.fieldName) ) {
+                // find the info field
+                final VCFInfoHeaderLine hInfo = hInfoMap.get(expression.fullName);
+                if ( hInfo == null ){
+                    throw new UserException("Cannot annotate expression " + expression.fullName + " at " + loc + " for variant allele(s) " + vc.getAlleles() + ", missing header info");
+                }
 
+                //
+                // Add the info field annotations
+                //
+                final boolean useRefAndAltAlleles = VCFHeaderLineCount.R == hInfo.getCountType();
+                final boolean useAltAlleles = VCFHeaderLineCount.A == hInfo.getCountType();
+
+                // Annotation uses ref and/or alt alleles or enforce allele concordance
+                if ( (useAltAlleles || useRefAndAltAlleles) || expressionAlleleConcordance ){
+
+                    // remove brackets and spaces from expression value
+                    final String cleanedExpressionValue = expressionVC.getAttribute(expression.fieldName).toString().replaceAll("[\\[\\]\\s]", "");
+
+                    // get comma separated expression values
+                    final ArrayList<String> expressionValuesList = new ArrayList<String>(Arrays.asList(cleanedExpressionValue.split(",")));
+
+                    // get the minimum biallelics without genotypes
+                    final List<VariantContext> minBiallelicVCs = getMinRepresentationBiallelics(vc);
+                    final List<VariantContext> minBiallelicExprVCs = getMinRepresentationBiallelics(expressionVC);
+
+                    // check concordance
+                    final List<String> annotationValues = new ArrayList<>();
+                    boolean canAnnotate = false;
+                    for ( final VariantContext biallelicVC : minBiallelicVCs ) {
+                        // check that ref and alt alleles are the same
+                        List<Allele> exprAlleles = biallelicVC.getAlleles();
+                        boolean isAlleleConcordant = false;
+                        int i = 0;
+                        for ( final VariantContext biallelicExprVC : minBiallelicExprVCs ){
+                            List<Allele> alleles = biallelicExprVC.getAlleles();
+                            // concordant
+                            if ( alleles.equals(exprAlleles) ){
+                                // get the value for the reference if needed.
+                                if ( i == 0 && useRefAndAltAlleles )
+                                    annotationValues.add(expressionValuesList.get(i++));
+                                // use annotation expression and add to vc
+                                annotationValues.add(expressionValuesList.get(i));
+                                isAlleleConcordant = true;
+                                canAnnotate = true;
+                                break;
+                            }
+                            i++;
+                        }
+
+                        // can not find allele match so set to annotation value to zero
+                        if ( !isAlleleConcordant )
+                            annotationValues.add("0");
+                    }
+
+                    // no allele matches so can not annotate
+                    if ( !canAnnotate )
+                        continue;
+
+                    // add the annotation values
+                    infoAnnotations.put(expression.fullName, annotationValues);
+                } else {
+                    // use all of the expression values
+                    infoAnnotations.put(expression.fullName, expressionVC.getAttribute(expression.fieldName));
+                }
             }
         }
     }
 
-
     private GenotypesContext annotateGenotypes(final RefMetaDataTracker tracker,
                                                final ReferenceContext ref, final Map<String, AlignmentContext> stratifiedContexts,
                                                final VariantContext vc,
@@ -301,4 +512,37 @@ public class VariantAnnotatorEngine {
 
         return genotypes;
     }
+
+    /**
+     * Break the variant context into bialleles (reference and alternate alleles) and trim to a minimum representation
+     *
+     * @param vc variant context to annotate
+     * @return list of biallelics trimmed to a minimum representation
+     */
+    private List<VariantContext> getMinRepresentationBiallelics(final VariantContext vc) {
+        final List<VariantContext> minRepresentationBiallelicVCs = new ArrayList<VariantContext>();
+        final boolean isMultiAllelic = vc.getNAlleles() > 2;
+        if (isMultiAllelic) {
+            final List<VariantContext> vcList = GATKVariantContextUtils.splitVariantContextToBiallelics(vc);
+            for (final VariantContext biallelicVC : vcList) {
+                if (!biallelicVC.isSNP())
+                    minRepresentationBiallelicVCs.add(GATKVariantContextUtils.trimAlleles(biallelicVC, true, true));
+                else
+                    minRepresentationBiallelicVCs.add(biallelicVC);
+            }
+        } else {
+            minRepresentationBiallelicVCs.add(vc);
+        }
+
+        return minRepresentationBiallelicVCs;
+    }
+
+    private void setReducibleAnnotations() {
+        for(final InfoFieldAnnotation annotationType : requestedInfoAnnotations) {
+            if (annotationType instanceof ReducibleAnnotation)
+                requestedReducibleInfoAnnotations.add(annotationType);
+            else
+                requestedNonReducibleInfoAnnotations.add(annotationType);
+        }
+    }
 }
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/VariantOverlapAnnotator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/VariantOverlapAnnotator.java
index 03f707f..8fd0abe 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/VariantOverlapAnnotator.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/VariantOverlapAnnotator.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -26,7 +26,7 @@
 package org.broadinstitute.gatk.tools.walkers.annotator;
 
 import org.broadinstitute.gatk.utils.commandline.RodBinding;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.utils.GenomeLoc;
 import org.broadinstitute.gatk.utils.GenomeLocParser;
 import htsjdk.variant.variantcontext.Allele;
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/AS_StandardAnnotation.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/AS_StandardAnnotation.java
new file mode 100644
index 0000000..55c3e47
--- /dev/null
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/AS_StandardAnnotation.java
@@ -0,0 +1,31 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.tools.walkers.annotator.interfaces;
+
+/**
+ * Created by gauthier on 9/28/15.
+ */
+public interface AS_StandardAnnotation extends AnnotationType {}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/ActiveRegionBasedAnnotation.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/ActiveRegionBasedAnnotation.java
index 8a32ae1..ee818ea 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/ActiveRegionBasedAnnotation.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/ActiveRegionBasedAnnotation.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/AnnotationHelpUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/AnnotationHelpUtils.java
new file mode 100644
index 0000000..bb48867
--- /dev/null
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/AnnotationHelpUtils.java
@@ -0,0 +1,56 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.tools.walkers.annotator.interfaces;
+
+import org.broadinstitute.gatk.utils.classloader.PluginManager;
+
+import java.util.List;
+
+public class AnnotationHelpUtils {
+
+    /**
+     * Simple method to print a list of available annotations.
+     */
+    public static void listAnnotations() {
+        System.out.println("\nThis is a list of available Variant Annotations for use with tools such as UnifiedGenotyper, HaplotypeCaller and VariantAnnotator. Please see the Technical Documentation for more details about these annotations:");
+        System.out.println("http://www.broadinstitute.org/gatk/tooldocs/");
+        System.out.println("\nStandard annotations in the list below are marked with a '*'.");
+        List<Class<? extends InfoFieldAnnotation>> infoAnnotationClasses = new PluginManager<InfoFieldAnnotation>(InfoFieldAnnotation.class).getPlugins();
+        System.out.println("\nAvailable annotations for the VCF INFO field:");
+        for (int i = 0; i < infoAnnotationClasses.size(); i++)
+            System.out.println("\t" + (StandardAnnotation.class.isAssignableFrom(infoAnnotationClasses.get(i)) ? "*" : "") + infoAnnotationClasses.get(i).getSimpleName());
+        System.out.println();
+        List<Class<? extends GenotypeAnnotation>> genotypeAnnotationClasses = new PluginManager<GenotypeAnnotation>(GenotypeAnnotation.class).getPlugins();
+        System.out.println("\nAvailable annotations for the VCF FORMAT field:");
+        for (int i = 0; i < genotypeAnnotationClasses.size(); i++)
+            System.out.println("\t" + (StandardAnnotation.class.isAssignableFrom(genotypeAnnotationClasses.get(i)) ? "*" : "") + genotypeAnnotationClasses.get(i).getSimpleName());
+        System.out.println();
+        System.out.println("\nAvailable classes/groups of annotations:");
+        for ( Class c : new PluginManager<AnnotationType>(AnnotationType.class).getInterfaces() )
+            System.out.println("\t" + c.getSimpleName());
+        System.out.println();
+    }
+}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/AnnotationInterfaceManager.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/AnnotationInterfaceManager.java
index 37b570c..985774f 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/AnnotationInterfaceManager.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/AnnotationInterfaceManager.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -133,7 +133,7 @@ public class AnnotationInterfaceManager {
 
         // note that technically an annotation can work on both the INFO and FORMAT fields
         for ( Class c : classes )
-            annotations.add(pluginManager.createByType(c));
+            annotations.add((T)pluginManager.createByType(c));
 
         return annotations;
     }
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/AnnotationType.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/AnnotationType.java
index 0051c97..40a94cf 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/AnnotationType.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/AnnotationType.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/AnnotatorCompatible.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/AnnotatorCompatible.java
index f1aeede..363da05 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/AnnotatorCompatible.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/AnnotatorCompatible.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/ExperimentalAnnotation.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/ExperimentalAnnotation.java
index 9ed24db..87954d2 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/ExperimentalAnnotation.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/ExperimentalAnnotation.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/GenotypeAnnotation.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/GenotypeAnnotation.java
index a6a81d7..590c6b9 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/GenotypeAnnotation.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/GenotypeAnnotation.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,9 +25,9 @@
 
 package org.broadinstitute.gatk.tools.walkers.annotator.interfaces;
 
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap;
 import htsjdk.variant.vcf.VCFFormatHeaderLine;
 import htsjdk.variant.variantcontext.Genotype;
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/InfoFieldAnnotation.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/InfoFieldAnnotation.java
index 55a30d8..cc91cda 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/InfoFieldAnnotation.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/InfoFieldAnnotation.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,13 +25,16 @@
 
 package org.broadinstitute.gatk.tools.walkers.annotator.interfaces;
 
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap;
 import htsjdk.variant.vcf.VCFInfoHeaderLine;
 import htsjdk.variant.variantcontext.VariantContext;
+import org.broadinstitute.gatk.utils.variant.GATKVCFHeaderLines;
 
+import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
@@ -49,6 +52,10 @@ public abstract class InfoFieldAnnotation extends VariantAnnotatorAnnotation {
         return annotate(null, null, null, null, vc, perReadAlleleLikelihoodMap);
     }
 
+    public Map<String, Object> annotate(ReferenceContext referenceContext, Map<String, PerReadAlleleLikelihoodMap> perReadAlleleLikelihoodMap, VariantContext vc) {
+
+        return annotate(null, null, referenceContext, null, vc, perReadAlleleLikelihoodMap);
+    }
 
     public abstract Map<String, Object> annotate(final RefMetaDataTracker tracker,
                                                  final AnnotatorCompatible walker,
@@ -58,5 +65,11 @@ public abstract class InfoFieldAnnotation extends VariantAnnotatorAnnotation {
                                                  final Map<String, PerReadAlleleLikelihoodMap> stratifiedPerReadAlleleLikelihoodMap);
 
     // return the descriptions used for the VCF INFO meta field
-    public abstract List<VCFInfoHeaderLine> getDescriptions();
+    public List<VCFInfoHeaderLine> getDescriptions() {
+        final List<VCFInfoHeaderLine> lines = new ArrayList<>(5);
+        for (final String key : getKeyNames()) {
+            lines.add(GATKVCFHeaderLines.getInfoLine(key));
+        }
+        return lines;
+    }
 }
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/ReducibleAnnotation.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/ReducibleAnnotation.java
new file mode 100644
index 0000000..25d2d8f
--- /dev/null
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/ReducibleAnnotation.java
@@ -0,0 +1,88 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.tools.walkers.annotator.interfaces;
+
+import htsjdk.variant.variantcontext.Allele;
+import htsjdk.variant.variantcontext.VariantContext;
+import org.broadinstitute.gatk.tools.walkers.annotator.ReducibleAnnotationData;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+
+import java.util.List;
+import java.util.Map;
+
+/**
+ * An interface for annotations that are calculated using raw data across samples, rather than the median (or median of median) of samples values
+ */
+public interface ReducibleAnnotation extends AnnotationType {
+    public abstract String getRawKeyName();
+
+    /**
+     * Generate the raw data necessary to calculate the annotation. Raw data is the final endpoint for gVCFs.
+     *
+     * @param tracker
+     * @param walker
+     * @param ref
+     * @param stratifiedContexts
+     * @param vc
+     * @param stratifiedPerReadAlleleLikelihoodMap
+     * @return
+     */
+    public abstract Map<String, Object> annotateRawData(final RefMetaDataTracker tracker,
+                                                        final AnnotatorCompatible walker,
+                                                        final ReferenceContext ref,
+                                                        final Map<String, AlignmentContext> stratifiedContexts,
+                                                        final VariantContext vc,
+                                                        final Map<String, PerReadAlleleLikelihoodMap> stratifiedPerReadAlleleLikelihoodMap);
+
+    /**
+     * Combine raw data, typically during the merging of raw data contained in multiple gVCFs as in CombineGVCFs and the
+     * preliminary merge for GenotypeGVCFs
+     * @param allelesList   The merged allele list across all variants being combined/merged
+     * @param listOfRawData The raw data for all the variants being combined/merged
+     * @return
+     */
+    public abstract Map<String, Object> combineRawData(final List<Allele> allelesList, final List <? extends ReducibleAnnotationData> listOfRawData);
+
+
+    /**
+     * Calculate the final annotation value from the raw data
+     * @param vc -- contains the final set of alleles, possibly subset by GenotypeGVCFs
+     * @param originalVC -- used to get all the alleles for all gVCFs
+     * @return
+     */
+    public abstract Map<String, Object> finalizeRawData(final VariantContext vc, final VariantContext originalVC);
+
+    /**
+     *
+     * @param vc
+     * @param pralm
+     * @param rawAnnotations
+     */
+    public abstract void calculateRawData(VariantContext vc, Map<String, PerReadAlleleLikelihoodMap> pralm, ReducibleAnnotationData rawAnnotations);
+}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/RodRequiringAnnotation.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/RodRequiringAnnotation.java
index 04e545a..c489d66 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/RodRequiringAnnotation.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/RodRequiringAnnotation.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/StandardAnnotation.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/StandardAnnotation.java
index 247af00..70d7c9f 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/StandardAnnotation.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/StandardAnnotation.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/StandardSomaticAnnotation.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/StandardSomaticAnnotation.java
new file mode 100644
index 0000000..72eecaa
--- /dev/null
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/StandardSomaticAnnotation.java
@@ -0,0 +1,28 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.tools.walkers.annotator.interfaces;
+
+public interface StandardSomaticAnnotation extends AnnotationType {}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/StandardUGAnnotation.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/StandardUGAnnotation.java
new file mode 100644
index 0000000..13fe48b
--- /dev/null
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/StandardUGAnnotation.java
@@ -0,0 +1,28 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.tools.walkers.annotator.interfaces;
+
+public interface StandardUGAnnotation extends AnnotationType {}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/VariantAnnotatorAnnotation.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/VariantAnnotatorAnnotation.java
index 0c68955..4bb9b28 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/VariantAnnotatorAnnotation.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/VariantAnnotatorAnnotation.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/WorkInProgressAnnotation.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/WorkInProgressAnnotation.java
index 9daab43..cc9a7d7 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/WorkInProgressAnnotation.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/interfaces/WorkInProgressAnnotation.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/beagle/BeagleOutputToVCF.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/beagle/BeagleOutputToVCF.java
deleted file mode 100644
index 726ea9b..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/beagle/BeagleOutputToVCF.java
+++ /dev/null
@@ -1,392 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.tools.walkers.beagle;
-
-import org.broadinstitute.gatk.utils.commandline.*;
-import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.arguments.StandardVariantContextInputArgumentCollection;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.engine.walkers.RodWalker;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.SampleUtils;
-import org.broadinstitute.gatk.utils.codecs.beagle.BeagleFeature;
-import org.broadinstitute.gatk.utils.help.HelpConstants;
-import org.broadinstitute.gatk.utils.variant.GATKVCFUtils;
-import htsjdk.variant.vcf.*;
-import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
-import htsjdk.variant.variantcontext.*;
-import htsjdk.variant.variantcontext.writer.VariantContextWriter;
-
-import java.util.*;
-
-import static java.lang.Math.log10;
-
-
-/**
- * Takes files produced by Beagle imputation engine and creates a vcf with modified annotations.
- *
- * <p>This walker is intended to be run after Beagle has successfully executed. The full calling sequence for using Beagle along with the GATK is:      </p>
- *
- * <p>1. Run ProduceBeagleInputWalker.  </p>
- * <p>2. Run Beagle</p>
- * <p>3. Uncompress output files</p>
- * <p>4. Run BeagleOutputToVCFWalker.</p>
- *
- *
- * Note that this walker requires all input files produced by Beagle.
- *
- *
- * <h3>Example</h3>
- * <pre>
- *     java -Xmx4000m -jar dist/GenomeAnalysisTK.jar \
- *      -R reffile.fasta -T BeagleOutputToVCF \
- *      -V input_vcf.vcf \
- *      -beagleR2:BEAGLE /myrun.beagle_output.r2 \
- *      -beaglePhased:BEAGLE /myrun.beagle_output.phased \
- *      -beagleProbs:BEAGLE /myrun.beagle_output.gprobs \
- *      -o output_vcf.vcf
- *      </pre>
-
- <p> Note that Beagle produces some of these files compressed as .gz, so gunzip must be run on them before walker is run in order to decompress them </p>
-
- */
- at DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_VARDISC, extraDocs = {CommandLineGATK.class} )
-public class BeagleOutputToVCF extends RodWalker<Integer, Integer> {
-
-    @ArgumentCollection
-    protected StandardVariantContextInputArgumentCollection variantCollection = new StandardVariantContextInputArgumentCollection();
-
-    /**
-     * If this argument is present, the original allele frequencies and counts from this vcf are added as annotations ACH,AFH and ANH. at each record present in this vcf
-     */
-    @Input(fullName="comp", shortName = "comp", doc="Comparison VCF file", required=false)
-    public RodBinding<VariantContext> comp;
-
-
-    /**
-     * This required argument is used to annotate each site in the vcf INFO field with R2 annotation. Will be NaN if Beagle determined there are no variant samples.
-     */
-    @Input(fullName="beagleR2", shortName = "beagleR2", doc="Beagle-produced .r2 file containing R^2 values for all markers", required=true)
-    public RodBinding<BeagleFeature> beagleR2;
-
-    /**
-     * These values will populate the GL field for each sample and contain the posterior probability of each genotype given the data after phasing and imputation.
-     */
-    @Input(fullName="beagleProbs", shortName = "beagleProbs", doc="Beagle-produced .probs file containing posterior genotype probabilities", required=true)
-    public RodBinding<BeagleFeature> beagleProbs;
-
-    /**
-     * By default, all genotypes will be marked in the VCF as "phased", using the "|" separator after Beagle.
-     */
-    @Input(fullName="beaglePhased", shortName = "beaglePhased", doc="Beagle-produced .phased file containing phased genotypes", required=true)
-    public RodBinding<BeagleFeature> beaglePhased;
-
-    @Output(doc="VCF File to which variants should be written")
-    protected VariantContextWriter vcfWriter = null;
-
-    /**
-     * If this argument is absent, and if Beagle determines that there is no sample in a site that has a variant genotype, the site will be marked as filtered (Default behavior).
-     * If the argument is present, the site won't be marked as filtered under this condition even if there are no variant genotypes.
-     */
-    @Argument(fullName="dont_mark_monomorphic_sites_as_filtered", shortName="keep_monomorphic", doc="If provided, we won't filter sites that beagle tags as monomorphic.  Useful for imputing a sample's genotypes from a reference panel" ,required=false)
-    public boolean DONT_FILTER_MONOMORPHIC_SITES = false;
-
-    /**
-     * Value between 0 and 1. If the probability of getting a genotype correctly (based on the posterior genotype probabilities and the actual genotype) is below this threshold,
-     * a genotype will be substitute by a no-call.
-     */
-    @Argument(fullName="no" +
-            "call_threshold", shortName="ncthr", doc="Threshold of confidence at which a genotype won't be called", required=false)
-    private double noCallThreshold = 0.0;
-
-    protected static String line = null;
-
-    private final double MIN_PROB_ERROR = 0.000001;
-    private final double MAX_GENOTYPE_QUALITY = -6.0;
-
-    private final static String BEAGLE_MONO_FILTER_STRING = "BGL_SET_TO_MONOMORPHIC";
-    private final static String ORIGINAL_ALT_ALLELE_INFO_KEY = "OriginalAltAllele";
-
-    public void initialize() {
-
-        // setup the header fields
-
-        final Set<VCFHeaderLine> hInfo = new HashSet<VCFHeaderLine>();
-        hInfo.addAll(GATKVCFUtils.getHeaderFields(getToolkit()));
-        hInfo.add(new VCFFormatHeaderLine("OG",1, VCFHeaderLineType.String, "Original Genotype input to Beagle"));
-        hInfo.add(new VCFInfoHeaderLine("R2", 1, VCFHeaderLineType.Float, "r2 Value reported by Beagle on each site"));
-        hInfo.add(new VCFInfoHeaderLine("NumGenotypesChanged", 1, VCFHeaderLineType.Integer, "The number of genotypes changed by Beagle"));
-        hInfo.add(new VCFInfoHeaderLine(ORIGINAL_ALT_ALLELE_INFO_KEY, 1, VCFHeaderLineType.String, "The original alt allele for a site set to monomorphic by Beagle"));
-        hInfo.add(new VCFFilterHeaderLine(BEAGLE_MONO_FILTER_STRING, "This site was set to monomorphic by Beagle"));
-
-        if ( comp.isBound() ) {
-            hInfo.add(new VCFInfoHeaderLine("ACH", 1, VCFHeaderLineType.Integer, "Allele Count from Comparison ROD at this site"));
-            hInfo.add(new VCFInfoHeaderLine("ANH", 1, VCFHeaderLineType.Integer, "Allele Frequency from Comparison ROD at this site"));
-            hInfo.add(new VCFInfoHeaderLine("AFH", 1, VCFHeaderLineType.Float, "Allele Number from Comparison ROD at this site"));
-        }
-
-        Set<String> samples = SampleUtils.getSampleListWithVCFHeader(getToolkit(), Arrays.asList(variantCollection.variants.getName()));
-
-        final VCFHeader vcfHeader = new VCFHeader(hInfo, samples);
-        vcfWriter.writeHeader(vcfHeader);
-    }
-
-    public Integer map( RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context ) {
-
-        if ( tracker == null )
-            return 0;
-
-        GenomeLoc loc = context.getLocation();
-        VariantContext vc_input = tracker.getFirstValue(variantCollection.variants, loc);
-
-        VariantContext vc_comp = tracker.getFirstValue(comp, loc);
-
-        if ( vc_input == null  )
-            return 0;
-
-        if (vc_input.isFiltered()) {
-            vcfWriter.add(vc_input);
-            return 1;
-        }
-
-        BeagleFeature beagleR2Feature = tracker.getFirstValue(beagleR2);
-        BeagleFeature beagleProbsFeature = tracker.getFirstValue(beagleProbs);
-        BeagleFeature beaglePhasedFeature = tracker.getFirstValue(beaglePhased);
-
-        // ignore places where we don't have a variant
-        if ( beagleR2Feature == null || beagleProbsFeature == null ||  beaglePhasedFeature == null)
-        {
-            vcfWriter.add(vc_input);
-            return 1;
-        }
-
-
-        // get reference base for current position
-        byte refByte = ref.getBase();
-
-        // make new Genotypes based on Beagle results
-        GenotypesContext genotypes = GenotypesContext.create(vc_input.getGenotypes().size());
-
-        // for each genotype, create a new object with Beagle information on it
-
-        int numGenotypesChangedByBeagle = 0;
-        Integer alleleCountH = 0, chrCountH = 0;
-        Double alleleFrequencyH = 0.0;
-        int beagleVarCounts = 0;
-
-        GenotypesContext hapmapGenotypes = null;
-
-        if (vc_comp != null) {
-            hapmapGenotypes = vc_comp.getGenotypes();
-        }
-
-        for ( final Genotype g : vc_input.getGenotypes() ) {
-            boolean genotypeIsPhased = true;
-            String sample = g.getSampleName();
-
-            // If we have  a Hapmap (comp) ROD, compute Hapmap AC, AN and AF
-            // use sample as key into genotypes structure
-            if (vc_comp != null) {
-
-                if (vc_input.getGenotypes().containsSample(sample) && hapmapGenotypes.containsSample(sample))  {
-
-                    Genotype hapmapGenotype = hapmapGenotypes.get(sample);
-                    if (hapmapGenotype.isCalled()){
-                        chrCountH += 2;
-                        if (hapmapGenotype.isHet()) {
-                            alleleCountH += 1;
-                        }    else if (hapmapGenotype.isHomVar()) {
-                            alleleCountH += 2;
-                        }
-                    }
-                }
-            }
-
-            ArrayList<String> beagleProbabilities = beagleProbsFeature.getProbLikelihoods().get(sample);
-            ArrayList<String> beagleGenotypePairs = beaglePhasedFeature.getGenotypes().get(sample);
-
-            // original alleles at this genotype
-            Allele originalAlleleA = g.getAllele(0);
-
-            Allele originalAlleleB = (g.getAlleles().size() == 2) ? g.getAllele(1) : g.getAllele(0); // hack to deal with no-call genotypes
-
-
-            // We have phased genotype in hp. Need to set the isRef field in the allele.
-            List<Allele> alleles = new ArrayList<Allele>();
-
-            String alleleA = beagleGenotypePairs.get(0);
-            String alleleB = beagleGenotypePairs.get(1);
-
-            if ( alleleA.equals("null") || alleleB.equals("null") ) {
-                logger.warn("Beagle produced 'null' alleles at location "+ref.getLocus().toString()+". Ignoring.");
-                return 0;
-            }
-
-            // Beagle always produces genotype strings based on the strings we input in the likelihood file.
-            String refString = vc_input.getReference().getDisplayString();
-
-            Allele bglAlleleA, bglAlleleB;
-
-            if (alleleA.matches(refString))
-                bglAlleleA = Allele.create(alleleA,true);
-            else
-                bglAlleleA = Allele.create(alleleA,false);
-
-            if (alleleB.matches(refString))
-                bglAlleleB = Allele.create(alleleB,true);
-            else
-                bglAlleleB = Allele.create(alleleB,false);
-
-
-            alleles.add(bglAlleleA);
-            alleles.add(bglAlleleB);
-
-            // Compute new GQ field = -10*log10Pr(Genotype call is wrong)
-            // Beagle gives probability that genotype is AA, AB and BB.
-            // Which, by definition, are prob of hom ref, het and hom var.
-            double probWrongGenotype, genotypeQuality;
-            Double homRefProbability = Double.valueOf(beagleProbabilities.get(0));
-            Double hetProbability = Double.valueOf(beagleProbabilities.get(1));
-            Double homVarProbability = Double.valueOf(beagleProbabilities.get(2));
-
-            if (bglAlleleA.isReference() && bglAlleleB.isReference()) // HomRef call
-                probWrongGenotype = hetProbability + homVarProbability;
-            else if ((bglAlleleB.isReference() && bglAlleleA.isNonReference()) || (bglAlleleA.isReference() && bglAlleleB.isNonReference()))
-                probWrongGenotype = homRefProbability + homVarProbability;
-            else // HomVar call
-                probWrongGenotype = hetProbability + homRefProbability;
-
-            // deal with numerical errors coming from limited formatting value on Beagle output files
-            if (probWrongGenotype > 1 - MIN_PROB_ERROR)
-                probWrongGenotype = 1 - MIN_PROB_ERROR;
-
-            if (1-probWrongGenotype < noCallThreshold) {
-                // quality is bad: don't call genotype
-                alleles.clear();
-                alleles.add(originalAlleleA);
-                alleles.add(originalAlleleB);
-                genotypeIsPhased = false;
-            }
-
-            if (probWrongGenotype < MIN_PROB_ERROR)
-                genotypeQuality = MAX_GENOTYPE_QUALITY;
-            else
-                genotypeQuality = log10(probWrongGenotype);
-
-            HashMap<String,Object> originalAttributes = new HashMap<String,Object>(g.getExtendedAttributes());
-
-            // get original encoding and add to keynotype attributes
-            String a1, a2, og;
-            if (originalAlleleA.isNoCall())
-                a1 = ".";
-            else if (originalAlleleA.isReference())
-                a1 = "0";
-            else
-                a1 = "1";
-
-            if (originalAlleleB.isNoCall())
-                a2 = ".";
-            else if (originalAlleleB.isReference())
-                a2 = "0";
-            else
-                a2 = "1";
-
-            og = a1+"/"+a2;
-
-            // See if Beagle switched genotypes
-            if (! originalAlleleA.equals(Allele.NO_CALL) && beagleSwitchedGenotypes(bglAlleleA,originalAlleleA,bglAlleleB,originalAlleleB)){
-                originalAttributes.put("OG",og);
-                numGenotypesChangedByBeagle++;
-            }
-            else {
-                originalAttributes.put("OG",".");
-            }
-            Genotype imputedGenotype = new GenotypeBuilder(g).alleles(alleles).log10PError(genotypeQuality).attributes(originalAttributes).phased(genotypeIsPhased).make();
-            if ( imputedGenotype.isHet() || imputedGenotype.isHomVar() ) {
-                beagleVarCounts++;
-            }
-
-            genotypes.add(imputedGenotype);
-        }
-
-        final VariantContextBuilder builder = new VariantContextBuilder(vc_input).source("outputvcf").genotypes(genotypes);
-        if ( ! ( beagleVarCounts > 0 || DONT_FILTER_MONOMORPHIC_SITES ) ) {
-            builder.attribute(ORIGINAL_ALT_ALLELE_INFO_KEY, vc_input.getAlternateAllele(0));
-            builder.alleles(Collections.singleton(vc_input.getReference())).filter(BEAGLE_MONO_FILTER_STRING);
-        }
-
-        // re-compute chromosome counts
-        VariantContextUtils.calculateChromosomeCounts(builder, false);
-
-        // Get Hapmap AC and AF
-        if (vc_comp != null) {
-            builder.attribute("ACH", alleleCountH.toString() );
-            builder.attribute("ANH", chrCountH.toString() );
-            builder.attribute("AFH", String.format("%4.2f", (double)alleleCountH/chrCountH) );
-
-        }
-
-        builder.attribute("NumGenotypesChanged", numGenotypesChangedByBeagle );
-        if( !beagleR2Feature.getR2value().equals(Double.NaN) ) {
-            builder.attribute("R2", beagleR2Feature.getR2value().toString() );
-        }
-
-        vcfWriter.add(builder.make());
-
-        return 1;
-    }
-
-    private boolean beagleSwitchedGenotypes(Allele bglAlleleA, Allele originalAlleleA, Allele bglAlleleB, Allele originalAlleleB) {
-       return !((bglAlleleA.equals(originalAlleleA) && bglAlleleB.equals(originalAlleleB) ||
-                    (bglAlleleA.equals(originalAlleleB) && bglAlleleB.equals(originalAlleleA))));
-    }
-
-    public Integer reduceInit() {
-        return 0; // Nothing to do here
-    }
-
-    /**
-     * Increment the number of loci processed.
-     *
-     * @param value result of the map.
-     * @param sum   accumulator for the reduce.
-     * @return the new number of loci processed.
-     */
-    public Integer reduce(Integer value, Integer sum) {
-        return sum + value;
-    }
-
-    /**
-     * Tell the user the number of loci processed and close out the new variants file.
-     *
-     * @param result  the number of loci seen.
-     */
-    public void onTraversalDone(Integer result) {
-        System.out.printf("Processed %d loci.\n", result);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/beagle/ProduceBeagleInput.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/beagle/ProduceBeagleInput.java
deleted file mode 100644
index dab5d16..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/beagle/ProduceBeagleInput.java
+++ /dev/null
@@ -1,463 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.tools.walkers.beagle;
-
-import org.broadinstitute.gatk.utils.commandline.*;
-import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.arguments.StandardVariantContextInputArgumentCollection;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.engine.samples.Gender;
-import org.broadinstitute.gatk.engine.walkers.RodWalker;
-import org.broadinstitute.gatk.tools.walkers.variantrecalibration.VQSRCalibrationCurve;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.MathUtils;
-import org.broadinstitute.gatk.utils.SampleUtils;
-import org.broadinstitute.gatk.utils.help.HelpConstants;
-import org.broadinstitute.gatk.utils.variant.GATKVCFUtils;
-import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
-import htsjdk.variant.vcf.VCFFilterHeaderLine;
-import htsjdk.variant.vcf.VCFHeader;
-import htsjdk.variant.vcf.VCFHeaderLine;
-import org.broadinstitute.gatk.utils.exceptions.GATKException;
-import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
-import htsjdk.variant.variantcontext.*;
-import htsjdk.variant.variantcontext.writer.VariantContextWriter;
-
-import java.io.File;
-import java.io.PrintStream;
-import java.util.*;
-
-/**
- *  Converts the input VCF into a format accepted by the Beagle imputation/analysis program.
- * <p>
- *
- * <h3>Input</h3>
- * <p>
- * A VCF with variants to convert to Beagle format
- * </p>
- *
- * <h2>Outputs</h2>
- * <p>
- * A single text file which can be fed to Beagle
- * </p>
- * <p>
- * Optional: A file with a list of markers
- * </p>
-  *
- * <h3>Examples</h3>
- * <pre>
- *     java -Xmx2g -jar dist/GenomeAnalysisTK.jar -L 20 \
- *      -R reffile.fasta -T ProduceBeagleInput \
- *      -V path_to_input_vcf/inputvcf.vcf -o path_to_beagle_output/beagle_output
- * </pre>
- *
- */
-
- at DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_VARDISC, extraDocs = {CommandLineGATK.class} )
-public class ProduceBeagleInput extends RodWalker<Integer, Integer> {
-
-    @ArgumentCollection protected StandardVariantContextInputArgumentCollection variantCollection = new StandardVariantContextInputArgumentCollection();
-
-    @Hidden
-    @Input(fullName="validation", shortName = "validation", doc="Validation VCF file", required=false)
-    public RodBinding<VariantContext> validation;
-
-
-    @Output(doc="File to which BEAGLE input should be written")
-    protected PrintStream  beagleWriter = null;
-
-    @Hidden
-    @Output(doc="File to which BEAGLE markers should be written", shortName="markers", fullName = "markers", required = false, defaultToStdout = false)
-    protected PrintStream  markers = null;
-    int markerCounter = 1;
-
-    @Hidden
-    @Input(doc="VQSqual calibration file", shortName = "cc", required=false)
-    protected File VQSRCalibrationFile = null;
-    protected VQSRCalibrationCurve VQSRCalibrator = null;
-
-    @Hidden
-    @Argument(doc="VQSqual key", shortName = "vqskey", required=false)
-    protected String VQSLOD_KEY = "VQSqual";
-
-    @Hidden
-     @Argument(fullName = "inserted_nocall_rate", shortName = "nc_rate", doc = "Rate (0-1) at which genotype no-calls will be randomly inserted, for testing", required = false)
-    public double insertedNoCallRate  = 0;
-    @Hidden
-     @Argument(fullName = "validation_genotype_ptrue", shortName = "valp", doc = "Flat probability to assign to validation genotypes. Will override GL field.", required = false)
-    public double validationPrior = -1.0;
-    @Hidden
-     @Argument(fullName = "validation_bootstrap", shortName = "bs", doc = "Proportion of records to be used in bootstrap set", required = false)
-    public double bootstrap = 0.0;
-    @Hidden
-     @Argument(fullName = "bootstrap_vcf",shortName = "bvcf", doc = "Output a VCF with the records used for bootstrapping filtered out", required = false)
-    VariantContextWriter bootstrapVCFOutput = null;
-
-    /**
-     * If sample gender is known, this flag should be set to true to ensure that Beagle treats male Chr X properly.
-     */
-    @Argument(fullName = "checkIsMaleOnChrX", shortName = "checkIsMaleOnChrX", doc = "Set to true when Beagle-ing chrX and want to ensure male samples don't have heterozygous calls.", required = false)
-    public boolean CHECK_IS_MALE_ON_CHR_X = false;
-
-    @Hidden
-    @Argument(fullName = "variant_genotype_ptrue", shortName = "varp", doc = "Flat probability prior to assign to variant (not validation) genotypes. Does not override GL field.", required = false)
-    public double variantPrior = 0.96;
-
-    private Set<String> samples = null;
-    private Set<String> BOOTSTRAP_FILTER = new HashSet<String>( Arrays.asList("bootstrap") );
-    private int bootstrapSetSize = 0;
-    private int testSetSize = 0;
-    private CachingFormatter formatter = new CachingFormatter("%5.4f ", 100000);
-    private int certainFPs = 0;
-
-    public void initialize() {
-
-        samples = SampleUtils.getSampleListWithVCFHeader(getToolkit(), Arrays.asList(variantCollection.variants.getName()));
-
-        beagleWriter.print("marker alleleA alleleB");
-        for ( String sample : samples )
-            beagleWriter.print(String.format(" %s %s %s", sample, sample, sample));
-
-        beagleWriter.println();
-
-        if ( bootstrapVCFOutput != null ) {
-            initializeVcfWriter();
-        }
-
-        if ( VQSRCalibrationFile != null ) {
-            VQSRCalibrator = VQSRCalibrationCurve.readFromFile(VQSRCalibrationFile);
-            logger.info("Read calibration curve");
-            VQSRCalibrator.printInfo(logger);
-        }
-    }
-
-    public Integer map( RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context ) {
-        if( tracker != null ) {
-            GenomeLoc loc = context.getLocation();
-            VariantContext variant_eval = tracker.getFirstValue(variantCollection.variants, loc);
-            VariantContext validation_eval = tracker.getFirstValue(validation, loc);
-
-            if ( goodSite(variant_eval,validation_eval) ) {
-                if ( useValidation(validation_eval, ref) ) {
-                    writeBeagleOutput(validation_eval, variant_eval, true, validationPrior);
-                    return 1;
-                } else {
-                    if ( goodSite(variant_eval) ) {
-                        writeBeagleOutput(variant_eval,validation_eval,false,variantPrior);
-                        return 1;
-                    } else { // todo -- if the variant site is bad, validation is good, but not in bootstrap set -- what do?
-                        return 0;
-                    }
-                }
-            } else {
-                return 0;
-            }
-        } else {
-            return 0;
-        }
-    }
-
-    public boolean goodSite(VariantContext a, VariantContext b) {
-        return goodSite(a) || goodSite(b);
-    }
-
-    public boolean goodSite(VariantContext v) {
-        if ( canBeOutputToBeagle(v) ) {
-            if ( VQSRCalibrator != null && VQSRCalibrator.certainFalsePositive(VQSLOD_KEY, v) ) {
-                certainFPs++;
-                return false;
-            } else {
-                return true;
-            }
-        } else {
-            return false;
-        }
-    }
-
-    public static boolean canBeOutputToBeagle(VariantContext v) {
-        return v != null && ! v.isFiltered() && v.isBiallelic() && v.hasGenotypes();
-    }
-
-    public boolean useValidation(VariantContext validation, ReferenceContext ref) {
-        if( goodSite(validation) ) {
-            // if using record keeps us below expected proportion, use it
-            logger.debug(String.format("boot: %d, test: %d, total: %d", bootstrapSetSize, testSetSize, bootstrapSetSize+testSetSize+1));
-            if ( (bootstrapSetSize+1.0)/(1.0+bootstrapSetSize+testSetSize) <= bootstrap ) {
-                if ( bootstrapVCFOutput != null ) {
-                    bootstrapVCFOutput.add(new VariantContextBuilder(validation).filters(BOOTSTRAP_FILTER).make());
-                }
-                bootstrapSetSize++;
-                return true;
-            } else {
-                if ( bootstrapVCFOutput != null ) {
-                    bootstrapVCFOutput.add(validation);
-                }
-                testSetSize++;
-                return false;
-            }
-        } else {
-            if ( validation != null && bootstrapVCFOutput != null ) {
-                bootstrapVCFOutput.add(validation);
-            }
-            return false;
-        }
-    }
-
-    private final static double[] HAPLOID_FLAT_LOG10_LIKELIHOODS = MathUtils.toLog10(new double[]{ 0.5, 0.0, 0.5 });
-    private final static double[] DIPLOID_FLAT_LOG10_LIKELIHOODS = MathUtils.toLog10(new double[]{ 0.33, 0.33, 0.33 });
-
-    public void writeBeagleOutput(VariantContext preferredVC, VariantContext otherVC, boolean isValidationSite, double prior) {
-        GenomeLoc currentLoc = GATKVariantContextUtils.getLocation(getToolkit().getGenomeLocParser(), preferredVC);
-        StringBuffer beagleOut = new StringBuffer();
-
-        String marker = String.format("%s:%d ",currentLoc.getContig(),currentLoc.getStart());
-        beagleOut.append(marker);
-        if ( markers != null ) markers.append(marker).append("\t").append(Integer.toString(markerCounter++)).append("\t");
-        for ( Allele allele : preferredVC.getAlleles() ) {
-            String bglPrintString;
-            if (allele.isNoCall())
-                bglPrintString = "-";
-            else
-                bglPrintString = allele.getBaseString();  // get rid of * in case of reference allele
-
-            beagleOut.append(String.format("%s ", bglPrintString));
-            if ( markers != null ) markers.append(bglPrintString).append("\t");
-        }
-        if ( markers != null ) markers.append("\n");
-
-        GenotypesContext preferredGenotypes = preferredVC.getGenotypes();
-        GenotypesContext otherGenotypes = goodSite(otherVC) ? otherVC.getGenotypes() : null;
-        for ( String sample : samples ) {
-            boolean isMaleOnChrX = CHECK_IS_MALE_ON_CHR_X && getSample(sample).getGender() == Gender.MALE;
-
-            Genotype genotype;
-            boolean isValidation;
-            // use sample as key into genotypes structure
-            if ( preferredGenotypes.containsSample(sample) ) {
-                genotype = preferredGenotypes.get(sample);
-                isValidation = isValidationSite;
-            } else if ( otherGenotypes != null && otherGenotypes.containsSample(sample) ) {
-                genotype = otherGenotypes.get(sample);
-                isValidation = ! isValidationSite;
-            } else {
-                // there is magically no genotype for this sample.
-                throw new GATKException("Sample "+sample+" arose with no genotype in variant or validation VCF. This should never happen.");
-            }
-
-            /*
-             * Use likelihoods if: is validation, prior is negative; or: is not validation, has genotype key
-             */
-            double [] log10Likelihoods = null;
-            if ( (isValidation && prior < 0.0) || genotype.hasLikelihoods() ) {
-                log10Likelihoods = genotype.getLikelihoods().getAsVector();
-
-                // see if we need to randomly mask out genotype in this position.
-                if ( GenomeAnalysisEngine.getRandomGenerator().nextDouble() <= insertedNoCallRate ) {
-                    // we are masking out this genotype
-                    log10Likelihoods = isMaleOnChrX ? HAPLOID_FLAT_LOG10_LIKELIHOODS : DIPLOID_FLAT_LOG10_LIKELIHOODS;
-                }
-
-                if( isMaleOnChrX ) {
-                    log10Likelihoods[1] = -255;  // todo -- warning this is dangerous for multi-allele case
-                }
-            }
-            /**
-             * otherwise, use the prior uniformly
-             */
-            else if (! isValidation && genotype.isCalled() && ! genotype.hasLikelihoods() ) {
-                // hack to deal with input VCFs with no genotype likelihoods.  Just assume the called genotype
-                // is confident.  This is useful for Hapmap and 1KG release VCFs.
-                double AA = (1.0-prior)/2.0;
-                double AB = (1.0-prior)/2.0;
-                double BB = (1.0-prior)/2.0;
-
-                if (genotype.isHomRef()) { AA = prior; }
-                else if (genotype.isHet()) { AB = prior; }
-                else if (genotype.isHomVar()) { BB = prior; }
-
-                log10Likelihoods = MathUtils.toLog10(new double[]{ AA, isMaleOnChrX ? 0.0 : AB, BB });
-            }
-            else  {
-                log10Likelihoods = isMaleOnChrX ? HAPLOID_FLAT_LOG10_LIKELIHOODS : DIPLOID_FLAT_LOG10_LIKELIHOODS;
-            }
-
-            writeSampleLikelihoods(beagleOut, preferredVC, log10Likelihoods);
-        }
-
-        beagleWriter.println(beagleOut.toString());
-    }
-
-    private void writeSampleLikelihoods( StringBuffer out, VariantContext vc, double[] log10Likelihoods ) {
-        if ( VQSRCalibrator != null ) {
-            log10Likelihoods = VQSRCalibrator.includeErrorRateInLikelihoods(VQSLOD_KEY, vc, log10Likelihoods);
-        }
-
-        double[] normalizedLikelihoods = MathUtils.normalizeFromLog10(log10Likelihoods);
-        // see if we need to randomly mask out genotype in this position.
-        for (double likeVal: normalizedLikelihoods) {
-            out.append(formatter.format(likeVal));
-//            out.append(String.format("%5.4f ",likeVal));
-        }
-    }
-
-
-    public Integer reduceInit() {
-        return 0; // Nothing to do here
-    }
-
-    public Integer reduce( Integer value, Integer sum ) {
-        return value + sum; // count up the sites
-    }
-
-    public void onTraversalDone( Integer includedSites ) {
-        logger.info("Sites included in beagle likelihoods file             : " + includedSites);
-        logger.info(String.format("Certain false positive found from recalibration curve : %d (%.2f%%)",
-                certainFPs, (100.0 * certainFPs) / (Math.max(certainFPs + includedSites, 1))));
-    }
-
-    private void initializeVcfWriter() {
-        final List<String> inputNames = Arrays.asList(validation.getName());
-
-        // setup the header fields
-        Set<VCFHeaderLine> hInfo = new HashSet<VCFHeaderLine>();
-        hInfo.addAll(GATKVCFUtils.getHeaderFields(getToolkit(), inputNames));
-        hInfo.add(new VCFFilterHeaderLine("bootstrap","This site used for genotype bootstrapping with ProduceBeagleInputWalker"));
-
-        bootstrapVCFOutput.writeHeader(new VCFHeader(hInfo, SampleUtils.getUniqueSamplesFromRods(getToolkit(), inputNames)));
-    }
-
-    public static class CachingFormatter {
-        private String format;
-        private LRUCache<Double, String> cache;
-
-        public String getFormat() {
-            return format;
-        }
-
-        public String format(double value) {
-            String f = cache.get(value);
-            if ( f == null ) {
-                f = String.format(format, value);
-                cache.put(value, f);
-//                if ( cache.usedEntries() < maxCacheSize ) {
-//                    System.out.printf("CACHE size %d%n", cache.usedEntries());
-//                } else {
-//                    System.out.printf("CACHE is full %f%n", value);
-//                }
-//            }
-//            } else {
-//                System.out.printf("CACHE hit %f%n", value);
-//            }
-            }
-
-            return f;
-        }
-
-        public CachingFormatter(String format, int maxCacheSize) {
-            this.format = format;
-            this.cache = new LRUCache<Double, String>(maxCacheSize);
-        }
-    }
-
-    /**
-    * An LRU cache, based on <code>LinkedHashMap</code>.
-    *
-    * <p>
-    * This cache has a fixed maximum number of elements (<code>cacheSize</code>).
-    * If the cache is full and another entry is added, the LRU (least recently used) entry is dropped.
-    *
-    * <p>
-    * This class is thread-safe. All methods of this class are synchronized.
-    *
-    * <p>
-    * Author: Christian d'Heureuse, Inventec Informatik AG, Zurich, Switzerland<br>
-    * Multi-licensed: EPL / LGPL / GPL / AL / BSD.
-    */
-    public static class LRUCache<K,V> {
-
-    private static final float   hashTableLoadFactor = 0.75f;
-
-    private LinkedHashMap<K,V>   map;
-    private int                  cacheSize;
-
-    /**
-    * Creates a new LRU cache.
-    * @param cacheSize the maximum number of entries that will be kept in this cache.
-    */
-    public LRUCache (int cacheSize) {
-       this.cacheSize = cacheSize;
-       int hashTableCapacity = (int)Math.ceil(cacheSize / hashTableLoadFactor) + 1;
-       map = new LinkedHashMap<K,V>(hashTableCapacity, hashTableLoadFactor, true) {
-          // (an anonymous inner class)
-          private static final long serialVersionUID = 1;
-          @Override protected boolean removeEldestEntry (Map.Entry<K,V> eldest) {
-             return size() > LRUCache.this.cacheSize; }}; }
-
-    /**
-    * Retrieves an entry from the cache.<br>
-    * The retrieved entry becomes the MRU (most recently used) entry.
-    * @param key the key whose associated value is to be returned.
-    * @return    the value associated to this key, or null if no value with this key exists in the cache.
-    */
-    public synchronized V get (K key) {
-       return map.get(key); }
-
-    /**
-    * Adds an entry to this cache.
-    * The new entry becomes the MRU (most recently used) entry.
-    * If an entry with the specified key already exists in the cache, it is replaced by the new entry.
-    * If the cache is full, the LRU (least recently used) entry is removed from the cache.
-    * @param key    the key with which the specified value is to be associated.
-    * @param value  a value to be associated with the specified key.
-    */
-    public synchronized void put (K key, V value) {
-       map.put (key, value); }
-
-    /**
-    * Clears the cache.
-    */
-    public synchronized void clear() {
-       map.clear(); }
-
-    /**
-    * Returns the number of used entries in the cache.
-    * @return the number of entries currently in the cache.
-    */
-    public synchronized int usedEntries() {
-       return map.size(); }
-
-    /**
-    * Returns a <code>Collection</code> that contains a copy of all cache entries.
-    * @return a <code>Collection</code> with a copy of the cache content.
-    */
-    public synchronized Collection<Map.Entry<K,V>> getAll() {
-       return new ArrayList<Map.Entry<K,V>>(map.entrySet()); }
-
-    } // end class LRUCache
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/beagle/VariantsToBeagleUnphased.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/beagle/VariantsToBeagleUnphased.java
deleted file mode 100644
index c45ceb2..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/beagle/VariantsToBeagleUnphased.java
+++ /dev/null
@@ -1,184 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.tools.walkers.beagle;
-
-import org.broadinstitute.gatk.utils.commandline.Argument;
-import org.broadinstitute.gatk.utils.commandline.Input;
-import org.broadinstitute.gatk.utils.commandline.Output;
-import org.broadinstitute.gatk.utils.commandline.RodBinding;
-import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.engine.walkers.RodWalker;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.SampleUtils;
-import org.broadinstitute.gatk.utils.help.HelpConstants;
-import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
-import htsjdk.variant.vcf.VCFHeader;
-import htsjdk.variant.vcf.VCFHeaderLine;
-import org.broadinstitute.gatk.utils.variant.GATKVCFUtils;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
-import htsjdk.variant.variantcontext.Allele;
-import htsjdk.variant.variantcontext.Genotype;
-import htsjdk.variant.variantcontext.VariantContext;
-import htsjdk.variant.variantcontext.writer.VariantContextWriter;
-
-import java.io.PrintStream;
-import java.util.Arrays;
-import java.util.Set;
-
-/**
- * Produces an input file to Beagle imputation engine, listing unphased, hard-called genotypes for a single sample
- * in input variant file.  Will additionally hold back a fraction of the sites for evaluation, marking the
- * genotypes at that sites as missing, and writing the truth of these sites to a second VCF file
- */
- at DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_VARDISC, extraDocs = {CommandLineGATK.class} )
-public class VariantsToBeagleUnphased extends RodWalker<Integer, Integer> {
-    @Input(fullName="variants", shortName = "V", doc="Input VCF file", required=true)
-    public RodBinding<VariantContext> variants;
-
-    @Output(doc="File to which BEAGLE unphased genotypes should be written")
-    protected PrintStream  beagleWriter = null;
-
-    @Argument(fullName = "bootstrap_fraction", shortName = "bs", doc = "Proportion of records to be used in bootstrap set", required = false)
-    public double bootstrap = 0.0;
-
-    @Argument(fullName = "bootstrap_vcf",shortName = "bsvcf", doc = "Output a VCF with the records used for bootstrapping filtered out", required = false)
-    VariantContextWriter bootstrapVCFOutput = null;
-
-    @Argument(fullName = "missing", shortName = "missing", doc = "String to identify missing data in beagle output", required = false)
-    public String MISSING = "?";
-
-    private Set<String> samples = null;
-    private int bootstrapSetSize = 0;
-    private int testSetSize = 0;
-
-    public void initialize() {
-        samples = SampleUtils.getSampleListWithVCFHeader(getToolkit(), Arrays.asList(variants.getName()));
-
-        beagleWriter.print("I marker alleleA alleleB");
-        for ( String sample : samples )
-            beagleWriter.print(String.format(" %s %s", sample, sample));
-
-        beagleWriter.println();
-
-        if ( bootstrap < 0.0 | bootstrap > 1.0 )
-            throw new UserException.BadArgumentValue("bootstrap", "Bootstrap value must be fraction between 0 and 1");
-
-        if ( bootstrapVCFOutput != null ) {
-            Set<VCFHeaderLine> hInfo = GATKVCFUtils.getHeaderFields(getToolkit());
-            bootstrapVCFOutput.writeHeader(new VCFHeader(hInfo, SampleUtils.getUniqueSamplesFromRods(getToolkit())));
-        }
-    }
-
-    /**
-     * Iterate over each site, emitting the BEAGLE unphased genotypes file format
-     * @param tracker
-     * @param ref
-     * @param context
-     * @return
-     */
-    public Integer map( RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context ) {
-        if( tracker != null ) {
-            GenomeLoc loc = context.getLocation();
-            VariantContext vc = tracker.getFirstValue(variants, loc);
-
-            if ( ProduceBeagleInput.canBeOutputToBeagle(vc) ) {
-                // do we want to hold back this site?
-                boolean makeMissing = dropSite(vc);
-
-                // if we are holding it back and we are writing a bootstrap VCF, write it out
-                if ( makeMissing && bootstrapVCFOutput != null ) {
-                    bootstrapVCFOutput.add(vc);
-                }
-
-                // regardless, all sites are written to the unphased genotypes file, marked as missing if appropriate
-                writeUnphasedBeagleOutput(vc, makeMissing);
-            }
-        }
-
-        return 0;
-    }
-
-    /**
-     * Do we want to hold back this site for bootstrap?  Considers the bootstrap fraction member variable
-     *
-     * @param vc
-     * @return
-     */
-    public boolean dropSite(VariantContext vc) {
-        if ( (bootstrapSetSize+1.0)/(1.0+bootstrapSetSize+testSetSize) <= bootstrap ) {
-            bootstrapSetSize++;
-            return true;
-        } else {
-            testSetSize++;
-            return false;
-        }
-    }
-
-    public void writeUnphasedBeagleOutput(VariantContext vc, boolean makeMissing) {
-        GenomeLoc currentLoc = GATKVariantContextUtils.getLocation(getToolkit().getGenomeLocParser(), vc);
-        StringBuffer beagleOut = new StringBuffer();
-
-        String marker = String.format("%s:%d ",currentLoc.getContig(), currentLoc.getStart());
-        beagleOut.append("M ").append(marker);
-
-        // write out the alleles at this site
-        for ( Allele allele : vc.getAlleles() ) {
-            beagleOut.append(allele.isNoCall() ? "-" : allele.getBaseString()).append(" ");
-        }
-
-        // write out sample level genotypes
-        for ( String sample : samples ) {
-            Genotype genotype = vc.getGenotype(sample);
-            if ( ! makeMissing && genotype.isCalled() ) {
-                addAlleles(beagleOut, genotype);
-            } else {
-                addAlleles(beagleOut, MISSING, MISSING);
-            }
-        }
-
-        beagleWriter.println(beagleOut.toString());
-    }
-
-    private void addAlleles(StringBuffer buf, Genotype g) {
-        addAlleles(buf, g.getAllele(0).getBaseString(), g.getAllele(1).getBaseString());
-
-    }
-
-    private void addAlleles(StringBuffer buf, String a, String b) {
-        buf.append(a).append(" ").append(b);
-    }
-
-    public Integer reduceInit() { return 0; }
-    public Integer reduce( Integer value, Integer sum ) { return value + sum; }
-
-    public void onTraversalDone( Integer includedSites ) {
-        logger.info("Sites included in beagle genotypes file : " + includedSites);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/coverage/CallableLoci.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/coverage/CallableLoci.java
index 1757e7b..bf2eddc 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/coverage/CallableLoci.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/coverage/CallableLoci.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -29,9 +29,9 @@ import org.broadinstitute.gatk.utils.commandline.Advanced;
 import org.broadinstitute.gatk.utils.commandline.Argument;
 import org.broadinstitute.gatk.utils.commandline.Output;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.By;
 import org.broadinstitute.gatk.engine.walkers.DataSource;
 import org.broadinstitute.gatk.engine.walkers.LocusWalker;
@@ -47,25 +47,25 @@ import java.io.PrintStream;
 
 
 /**
- * Emits a data file containing information about callable, uncallable, poorly mapped, and other parts of the genome
- * <p/>
+ * Collect statistics on callable, uncallable, poorly mapped, and other parts of the genome
+ *
  * <p>
- * A very common question about a NGS set of reads is what areas of the genome are considered callable. The system
+ * A very common question about a NGS set of reads is what areas of the genome are considered callable. This tool
  * considers the coverage at each locus and emits either a per base state or a summary interval BED file that
  * partitions the genomic intervals into the following callable states:
  * <dl>
  * <dt>REF_N</dt>
- * <dd>the reference base was an N, which is not considered callable the GATK</dd>
+ * <dd>The reference base was an N, which is not considered callable the GATK</dd>
  * <dt>PASS</dt>
- * <dd>the base satisfied the min. depth for calling but had less than maxDepth to avoid having EXCESSIVE_COVERAGE</dd>
+ * <dd>The base satisfied the min. depth for calling but had less than maxDepth to avoid having EXCESSIVE_COVERAGE</dd>
  * <dt>NO_COVERAGE</dt>
- * <dd>absolutely no reads were seen at this locus, regardless of the filtering parameters</dd>
+ * <dd>Absolutely no reads were seen at this locus, regardless of the filtering parameters</dd>
  * <dt>LOW_COVERAGE</dt>
- * <dd>there were less than min. depth bases at the locus, after applying filters</dd>
+ * <dd>There were fewer than min. depth bases at the locus, after applying filters</dd>
  * <dt>EXCESSIVE_COVERAGE</dt>
- * <dd>more than -maxDepth read at the locus, indicating some sort of mapping problem</dd>
+ * <dd>More than -maxDepth read at the locus, indicating some sort of mapping problem</dd>
  * <dt>POOR_MAPPING_QUALITY</dt>
- * <dd>more than --maxFractionOfReadsWithLowMAPQ at the locus, indicating a poor mapping quality of the reads</dd>
+ * <dd>More than --maxFractionOfReadsWithLowMAPQ at the locus, indicating a poor mapping quality of the reads</dd>
  * </dl>
  * </p>
  * <p/>
@@ -76,22 +76,19 @@ import java.io.PrintStream;
  * <p/>
  * <h3>Output</h3>
  * <p>
- * <ul>
- * <li>-o: a OutputFormatted (recommended BED) file with the callable status covering each base</li>
- * <li>-summary: a table of callable status x count of all examined bases</li>
- * </ul>
+ *     A file with the callable status covering each base and a table of callable status x count of all examined bases
  * </p>
- * <p/>
- * <h3>Examples</h3>
+ * <h3>Usage example</h3>
  * <pre>
  *  java -jar GenomeAnalysisTK.jar \
  *     -T CallableLoci \
- *     -I my.bam \
- *     -summary my.summary \
- *     -o my.bed
+ *     -R reference.fasta \
+ *     -I myreads.bam \
+ *     -summary table.txt \
+ *     -o callable_status.bed
  * </pre>
  * <p/>
- * would produce a BED file (my.bed) that looks like:
+ * would produce a BED file that looks like:
  * <p/>
  * <pre>
  *     20 10000000 10000864 PASS
@@ -107,14 +104,13 @@ import java.io.PrintStream;
  *     20 10012552 10012554 PASS
  *     20 10012555 10012557 LOW_COVERAGE
  *     20 10012558 10012558 PASS
- *     et cetera...
  * </pre>
  * as well as a summary table that looks like:
  * <p/>
  * <pre>
  *                        state nBases
  *                        REF_N 0
- *                     PASS 996046
+ *                         PASS 996046
  *                  NO_COVERAGE 121
  *                 LOW_COVERAGE 928
  *           EXCESSIVE_COVERAGE 0
@@ -131,7 +127,7 @@ public class CallableLoci extends LocusWalker<CallableLoci.CallableBaseState, Ca
     PrintStream out;
 
     /**
-     * Callable loci summary counts (see outputs) will be written to this file.
+     * Callable loci summary counts will be written to this file.
      */
     @Output(fullName = "summary", shortName = "summary", doc = "Name of file for output summary", required = true)
     File summaryFile;
@@ -190,7 +186,7 @@ public class CallableLoci extends LocusWalker<CallableLoci.CallableBaseState, Ca
     double maxLowMAPQFraction = 0.1;
 
     /**
-     * The output of this walker will be written in this format.  The recommended option is BED.
+     * The output of this tool will be written in this format.  The recommended option is BED.
      */
     @Advanced
     @Argument(fullName = "format", shortName = "format", doc = "Output format", required = false)
@@ -205,7 +201,7 @@ public class CallableLoci extends LocusWalker<CallableLoci.CallableBaseState, Ca
         BED,
 
         /**
-         * Emit chr start stop state quads for each base.  Produces a potentially disasterously
+         * Emit chr start stop state quads for each base.  Produces a potentially disastrously
          * large amount of output.
          */
         STATE_PER_BASE
@@ -213,7 +209,7 @@ public class CallableLoci extends LocusWalker<CallableLoci.CallableBaseState, Ca
 
     public enum CalledState {
         /**
-         * the reference base was an N, which is not considered callable the GATK
+         * the reference base was an N, which is not considered callable by the GATK
          */
         REF_N,
         /**
@@ -225,7 +221,7 @@ public class CallableLoci extends LocusWalker<CallableLoci.CallableBaseState, Ca
          */
         NO_COVERAGE,
         /**
-         * there were less than min. depth bases at the locus, after applying filters
+         * there were fewer than min. depth bases at the locus, after applying filters
          */
         LOW_COVERAGE,
         /**
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/coverage/CompareCallableLoci.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/coverage/CompareCallableLoci.java
index 9ab8555..1951a43 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/coverage/CompareCallableLoci.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/coverage/CompareCallableLoci.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -31,9 +31,9 @@ import org.broadinstitute.gatk.utils.commandline.Input;
 import org.broadinstitute.gatk.utils.commandline.Output;
 import org.broadinstitute.gatk.utils.commandline.RodBinding;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.RodWalker;
 import org.broadinstitute.gatk.utils.GenomeLoc;
 import org.broadinstitute.gatk.utils.exceptions.UserException;
@@ -45,7 +45,33 @@ import java.util.Arrays;
 import java.util.List;
 
 /**
- * Test routine for new VariantContext object
+ * Compare callability statistics
+ *
+ * <p>This tool can be used to evaluate how different sequence datasets compare in terms of "callability"
+ * based on the output of the CallableLoci tool. </p>
+ *
+ *
+ * <h3>Input</h3>
+ * <p>
+ * Two files to compare, output by two runs of CallableLoci
+ * </p>
+ *
+ * <h3>Output</h3>
+ * <p>
+ * A table showing the callability status of each interval of interest in the two comparison sets and whether they match.
+ * </p>
+ *
+ * <h3>Usage example</h3>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
+ *   -R reference.fasta \
+ *   -T CompareCallableLoci \
+ *   -comp1 callable_loci_1.bed \
+ *   -comp2 callable_loci_2.bed \
+ *   [-L input.intervals \]
+ *   -o comparison.table
+ * </pre>
+ *
  */
 @DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_QC, extraDocs = {CommandLineGATK.class} )
 public class CompareCallableLoci extends RodWalker<List<CallableLoci.CallableBaseState>, long[][]> {
@@ -103,7 +129,7 @@ public class CompareCallableLoci extends RodWalker<List<CallableLoci.CallableBas
         //System.out.printf("tracker %s%n", tracker);
         List<BEDFeature> bindings = tracker.getValues(rodBinding);
         if ( bindings.size() != 1 ) {
-            throw new UserException.MalformedFile(String.format("%s track isn't a properly formated CallableBases object!", rodBinding.getName()));
+            throw new UserException.MalformedFile(String.format("%s track isn't a properly formatted CallableBases object!", rodBinding.getName()));
         }
 
         BEDFeature bed = bindings.get(0);
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/coverage/CoverageUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/coverage/CoverageUtils.java
index 7514fa5..6f0e550 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/coverage/CoverageUtils.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/coverage/CoverageUtils.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -27,7 +27,7 @@ package org.broadinstitute.gatk.tools.walkers.coverage;
 
 import htsjdk.samtools.SAMReadGroupRecord;
 import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
 import org.broadinstitute.gatk.utils.BaseUtils;
 import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
 import org.broadinstitute.gatk.utils.exceptions.UserException;
@@ -137,6 +137,9 @@ public class CoverageUtils {
     public static Map<SAMReadGroupRecord,int[]> getBaseCountsByReadGroup(AlignmentContext context, int minMapQ, int maxMapQ, byte minBaseQ, byte maxBaseQ, CountPileupType countType) {
         Map<SAMReadGroupRecord, int[]> countsByRG = new HashMap<SAMReadGroupRecord,int[]>();
 
+        Map<String, int[]> countsByRGName = new HashMap<String, int[]>();
+        Map<String, SAMReadGroupRecord> RGByName = new HashMap<String, SAMReadGroupRecord>();
+
         List<PileupElement> countPileup = new LinkedList<PileupElement>();
         FragmentCollection<PileupElement> fpile;
 
@@ -202,10 +205,20 @@ public class CoverageUtils {
 
         for (PileupElement e : countPileup) {
             SAMReadGroupRecord readGroup = getReadGroup(e.getRead());
-            if (!countsByRG.keySet().contains(readGroup))
-                countsByRG.put(readGroup, new int[6]);
 
-            updateCounts(countsByRG.get(readGroup), e);
+            String readGroupId = readGroup.getSample() + "_" + readGroup.getReadGroupId();
+            int[] counts = countsByRGName.get(readGroupId);
+            if (counts == null) {
+                counts = new int[6];
+                countsByRGName.put(readGroupId, counts);
+                RGByName.put(readGroupId, readGroup);
+            }
+
+            updateCounts(counts, e);
+        }
+
+        for (String readGroupId : RGByName.keySet()) {
+            countsByRG.put(RGByName.get(readGroupId), countsByRGName.get(readGroupId));
         }
 
         return countsByRG;
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/coverage/DepthOfCoverage.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/coverage/DepthOfCoverage.java
index 3fc2e59..96144ce 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/coverage/DepthOfCoverage.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/coverage/DepthOfCoverage.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -26,24 +26,22 @@
 package org.broadinstitute.gatk.tools.walkers.coverage;
 
 import htsjdk.samtools.SAMReadGroupRecord;
+import org.apache.log4j.Logger;
 import org.broadinstitute.gatk.engine.walkers.*;
-import org.broadinstitute.gatk.utils.commandline.Advanced;
-import org.broadinstitute.gatk.utils.commandline.Argument;
-import org.broadinstitute.gatk.utils.commandline.Output;
+import org.broadinstitute.gatk.utils.commandline.*;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.downsampling.DownsampleType;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.engine.refdata.SeekableRODIterator;
-import org.broadinstitute.gatk.engine.refdata.tracks.RMDTrack;
-import org.broadinstitute.gatk.engine.refdata.tracks.RMDTrackBuilder;
-import org.broadinstitute.gatk.engine.refdata.utils.GATKFeature;
-import org.broadinstitute.gatk.engine.refdata.utils.LocationAwareSeekableRODIterator;
-import org.broadinstitute.gatk.engine.refdata.utils.RODRecordList;
+import org.broadinstitute.gatk.utils.downsampling.DownsampleType;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.refdata.SeekableRODIterator;
+import org.broadinstitute.gatk.utils.refdata.tracks.RMDTrack;
+import org.broadinstitute.gatk.utils.refdata.tracks.RMDTrackBuilder;
+import org.broadinstitute.gatk.utils.refdata.utils.GATKFeature;
+import org.broadinstitute.gatk.utils.refdata.utils.LocationAwareSeekableRODIterator;
+import org.broadinstitute.gatk.utils.refdata.utils.RODRecordList;
 import org.broadinstitute.gatk.utils.BaseUtils;
 import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.SampleUtils;
 import org.broadinstitute.gatk.utils.codecs.refseq.RefSeqCodec;
 import org.broadinstitute.gatk.utils.codecs.refseq.RefSeqFeature;
 import org.broadinstitute.gatk.utils.collections.Pair;
@@ -51,6 +49,7 @@ import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
 import org.broadinstitute.gatk.utils.exceptions.UserException;
 import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
 import org.broadinstitute.gatk.utils.help.HelpConstants;
+import org.broadinstitute.gatk.utils.sam.ReadUtils;
 
 import java.io.File;
 import java.io.PrintStream;
@@ -63,46 +62,37 @@ import java.util.*;
  * This tool processes a set of bam files to determine coverage at different levels of partitioning and
  * aggregation. Coverage can be analyzed per locus, per interval, per gene, or in total; can be partitioned by
  * sample, by read group, by technology, by center, or by library; and can be summarized by mean, median, quartiles,
- * and/or percentage of bases covered to or beyond a threshold.
- * Additionally, reads and bases can be filtered by mapping or base quality score.
+ * and/or percentage of bases covered to or beyond a threshold. Additionally, reads and bases can be filtered by
+ * mapping or base quality score.
+ * </p>
  *
  * <h3>Input</h3>
- * <p>
- * One or more bam files (with proper headers) to be analyzed for coverage statistics
- * </p>
- * <p>
- *(Optional) A REFSEQ Rod to aggregate coverage to the gene level
- * <p>
- * (for information about creating the REFSEQ Rod, please consult the online documentation)
- *</p></p>
+ * <ul>
+ *     <li>One or more bam files (with proper headers) to be analyzed for coverage statistics</li>
+ *     <li>(Optional) A REFSEQ file to aggregate coverage to the gene level (for information about creating the REFSEQ Rod, please consult the online documentation)</li>
+ * </ul>
+
  * <h3>Output</h3>
  * <p>
  * Tables pertaining to different coverage summaries. Suffix on the table files declares the contents:
- * </p><p>
- *  - no suffix: per locus coverage
- * </p><p>
- *  - _summary: total, mean, median, quartiles, and threshold proportions, aggregated over all bases
- * </p><p>
- *  - _statistics: coverage histograms (# locus with X coverage), aggregated over all bases
- * </p><p>
- *  - _interval_summary: total, mean, median, quartiles, and threshold proportions, aggregated per interval
- * </p><p>
- *  - _interval_statistics: 2x2 table of # of intervals covered to >= X depth in >=Y samples
- * </p><p>
- *  - _gene_summary: total, mean, median, quartiles, and threshold proportions, aggregated per gene
- * </p><p>
- *  - _gene_statistics: 2x2 table of # of genes covered to >= X depth in >= Y samples
- * </p><p>
- *  - _cumulative_coverage_counts: coverage histograms (# locus with >= X coverage), aggregated over all bases
- * </p><p>
- *  - _cumulative_coverage_proportions: proprotions of loci with >= X coverage, aggregated over all bases
  * </p>
+ * <ul>
+ *     <li>no suffix: per locus coverage</li>
+ *     <li>_summary: total, mean, median, quartiles, and threshold proportions, aggregated over all bases</li>
+ *     <li>_statistics: coverage histograms (# locus with X coverage), aggregated over all bases</li>
+ *     <li>_interval_summary: total, mean, median, quartiles, and threshold proportions, aggregated per interval</li>
+ *     <li>_interval_statistics: 2x2 table of # of intervals covered to >= X depth in >=Y samples</li>
+ *     <li>_gene_summary: total, mean, median, quartiles, and threshold proportions, aggregated per gene</li>
+ *     <li>_gene_statistics: 2x2 table of # of genes covered to >= X depth in >= Y samples</li>
+ *     <li>_cumulative_coverage_counts: coverage histograms (# locus with >= X coverage), aggregated over all bases</li>
+ *     <li>_cumulative_coverage_proportions: proprotions of loci with >= X coverage, aggregated over all bases</li>
+ * </ul>
  *
- * <h3>Examples</h3>
+ * <h3>Usage example</h3>
  * <pre>
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
+ * java -jar GenomeAnalysisTK.jar \
  *   -T DepthOfCoverage \
+ *   -R reference.fasta \
  *   -o file_name_base \
  *   -I input_bams.list
  *   [-geneList refSeq.sorted.txt] \
@@ -110,7 +100,6 @@ import java.util.*;
  *   [-ct 4 -ct 6 -ct 10] \
  *   [-L my_capture_genes.interval_list]
  * </pre>
- *
  */
 // todo -- cache the map from sample names to means in the print functions, rather than regenerating each time
 // todo -- support for granular histograms for total depth; maybe n*[start,stop], bins*sqrt(n)
@@ -122,6 +111,13 @@ import java.util.*;
 @PartitionBy(PartitionType.NONE)
 @Downsample(by= DownsampleType.NONE, toCoverage=Integer.MAX_VALUE)
 public class DepthOfCoverage extends LocusWalker<Map<DoCOutputType.Partition,Map<String,int[]>>, CoveragePartitioner> implements TreeReducible<CoveragePartitioner> {
+    private final static Logger logger = Logger.getLogger(DepthOfCoverage.class);
+
+    /**
+     * Warning message for when the incompatible arguments --calculateCoverageOverGenes and --omitIntervalStatistics are used together.
+     */
+    private static final String incompatibleArgsMsg = "The arguments --calculateCoverageOverGenes and --omitIntervalStatistics are incompatible. Using them together will result in an empty gene summary output file.";
+
     @Output
     @Multiplex(value=DoCOutputMultiplexer.class,arguments={"partitionTypes","refSeqGeneList","omitDepthOutput","omitIntervals","omitSampleSummary","omitLocusTable"})
     Map<DoCOutputType,PrintStream> out;
@@ -174,6 +170,9 @@ public class DepthOfCoverage extends LocusWalker<Map<DoCOutputType.Partition,Map
 
     /**
      * Specify a RefSeq file for use in aggregating coverage statistics over genes.
+     *
+     * This argument is incompatible with --calculateCoverageOverGenes and --omitIntervalStatistics. A warning will be logged and no output file will be produced for the gene list if these arguments are enabled together.
+     *
      */
     @Argument(fullName = "calculateCoverageOverGenes", shortName = "geneList", doc = "Calculate coverage statistics over this list of genes", required = false)
     File refSeqGeneList = null;
@@ -261,8 +260,14 @@ public class DepthOfCoverage extends LocusWalker<Map<DoCOutputType.Partition,Map
 
     public boolean includeReadsWithDeletionAtLoci() { return includeDeletions && ! ignoreDeletionSites; }
 
+    public static String incompatibleArgsMsg() { return incompatibleArgsMsg; }
+
     public void initialize() {
 
+        if ( omitIntervals && refSeqGeneList != null ){
+            logger.warn(incompatibleArgsMsg);
+        }
+
         if ( printBinEndpointsAndExit ) {
             int[] endpoints = DepthOfCoverageStats.calculateBinEndpoints(start,stop,nBins);
             System.out.print("[ ");
@@ -336,7 +341,7 @@ public class DepthOfCoverage extends LocusWalker<Map<DoCOutputType.Partition,Map
     private HashSet<String> getSamplesFromToolKit(DoCOutputType.Partition type) {
         HashSet<String> partition = new HashSet<String>();
         if ( type == DoCOutputType.Partition.sample ) {
-            partition.addAll(SampleUtils.getSAMFileSamples(getToolkit()));
+            partition.addAll(ReadUtils.getSAMFileSamples(getToolkit().getSAMFileHeader()));
         } else if ( type == DoCOutputType.Partition.readgroup ) {
             for ( SAMReadGroupRecord rg : getToolkit().getSAMFileHeader().getReadGroups() ) {
                 partition.add(rg.getSample()+"_rg_"+rg.getReadGroupId());
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/coverage/DepthOfCoverageStats.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/coverage/DepthOfCoverageStats.java
index c8a4356..aa7b5ae 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/coverage/DepthOfCoverageStats.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/coverage/DepthOfCoverageStats.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/coverage/DoCOutputType.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/coverage/DoCOutputType.java
index 6e2266d..f91c385 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/coverage/DoCOutputType.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/coverage/DoCOutputType.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/coverage/GCContentByInterval.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/coverage/GCContentByInterval.java
index a23cfe3..c4d1aef 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/coverage/GCContentByInterval.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/coverage/GCContentByInterval.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -28,9 +28,9 @@ package org.broadinstitute.gatk.tools.walkers.coverage;
 import org.broadinstitute.gatk.engine.walkers.*;
 import org.broadinstitute.gatk.utils.commandline.Output;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.utils.BaseUtils;
 import org.broadinstitute.gatk.utils.GenomeLoc;
 import org.broadinstitute.gatk.utils.collections.Pair;
@@ -41,7 +41,7 @@ import java.io.PrintStream;
 import java.util.List;
 
 /**
- * Walks along reference and calculates the GC content for each interval.
+ * Calculates the GC content of the reference sequence for each interval
  *
  *
  * <h3>Input</h3>
@@ -54,11 +54,11 @@ import java.util.List;
  *  GC content calculations per interval.
  * </p>
  *
- * <h3>Example</h3>
+ * <h3>Usage example</h3>
  * <pre>
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
+ * java -jar GenomeAnalysisTK.jar \
  *   -T GCContentByInterval \
- *   -R ref.fasta \
+ *   -R reference.fasta \
  *   -o output.txt \
  *   -L input.intervals
  * </pre>
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/diagnostics/CoveredByNSamplesSites.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/diagnostics/CoveredByNSamplesSites.java
deleted file mode 100644
index 370cea2..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/diagnostics/CoveredByNSamplesSites.java
+++ /dev/null
@@ -1,154 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.tools.walkers.diagnostics;
-
-
-import org.broadinstitute.gatk.engine.walkers.By;
-import org.broadinstitute.gatk.engine.walkers.DataSource;
-import org.broadinstitute.gatk.engine.walkers.RodWalker;
-import org.broadinstitute.gatk.engine.walkers.TreeReducible;
-import org.broadinstitute.gatk.utils.commandline.Argument;
-import org.broadinstitute.gatk.utils.commandline.ArgumentCollection;
-import org.broadinstitute.gatk.utils.commandline.Output;
-import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.arguments.StandardVariantContextInputArgumentCollection;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
-import org.broadinstitute.gatk.utils.help.HelpConstants;
-import htsjdk.variant.variantcontext.Genotype;
-import htsjdk.variant.variantcontext.GenotypesContext;
-import htsjdk.variant.variantcontext.VariantContext;
-
-
-import java.io.*;
-import java.util.Collection;
-
-/**
- * Print intervals file with all the variant sites for which most of the samples have good coverage
- *
- * <p>
- * CoveredByNSamplesSites is a GATK tool for filtering out sites based on their coverage.
- * The sites that pass the filter are printed out to an intervals file.
- *
- * See argument defaults for what constitutes "most" samples and "good" coverage. These parameters can be modified from the command line.
- * </p>
- *
- * <h3>Input</h3>
- * <p>
- * A variant file and optionally min coverage and sample percentage values.
- * </p>
- *
- * <h3>Output</h3>
- * <p>
- * An intervals file.
- * </p>
- *
- * <h3>Example</h3>
- * <pre>
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
- *   -T CoveredByNSamplesSites \
- *   -V input.vcf \
- *   -out output.intervals \
- *   -minCov 15
- * </pre>
- *
- */
- at DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_QC, extraDocs = {CommandLineGATK.class} )
- at By(DataSource.REFERENCE_ORDERED_DATA)
-public class CoveredByNSamplesSites extends RodWalker<GenomeLoc, Integer> implements TreeReducible<Integer> {
-
-    @Output(fullName = "OutputIntervals", shortName = "out", doc = "Name of file for output intervals")
-    PrintStream outputStream;
-
-    @ArgumentCollection
-    protected StandardVariantContextInputArgumentCollection variantCollection = new StandardVariantContextInputArgumentCollection();
-
-    @Argument(fullName = "minCoverage", shortName = "minCov",doc = "only samples that have coverage bigger than minCoverage will be counted",required = false)
-    int minCoverage = 10;
-
-    @Argument(fullName = "percentageOfSamples", shortName = "percentage", doc = "only sites where at least percentageOfSamples of the samples have good coverage, will be emitted", required = false)
-    double percentageOfSamples = 0.9;
-
-    @Override
-    public GenomeLoc map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
-        if ( tracker == null )
-            return null;
-
-        Collection<VariantContext> VCs = tracker.getValues(variantCollection.variants, context.getLocation());
-        if ( VCs.size() == 0 )
-            return null;
-
-        boolean emitSite = false;
-        for(VariantContext vc : VCs){
-            int coveredSamples = 0;
-            final GenotypesContext genotypes = vc.getGenotypes();
-            final int numOfGenotypes = genotypes.size();
-            for(Genotype g : genotypes){
-                if(g.getDP() >= minCoverage)
-                    coveredSamples++;
-            }
-            if((double)coveredSamples/numOfGenotypes > percentageOfSamples){
-                emitSite = true;
-            }
-        }
-        if (emitSite)
-            return ref.getLocus();
-        else
-            return null;
-    }
-
-    @Override
-    public Integer reduceInit() { return 0; }
-
-    @Override
-    public Integer reduce(GenomeLoc value, Integer sum) {
-        if ( value != null ) {
-            outputStream.println(value);
-            sum++;
-        }
-        return sum;
-    }
-
-    @Override
-    public Integer treeReduce(Integer lhs, Integer rhs) {
-        return lhs + rhs;
-    }
-
-    /**
-     *
-     * @param result the number of sites that passed the filter.
-     */
-    public void onTraversalDone(Integer result) {
-        logger.info(result+" sites that have "+(percentageOfSamples*100)+"% of the samples with at least "+minCoverage+" coverage.\n");
-    }
-
-
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/diagnostics/ErrorRatePerCycle.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/diagnostics/ErrorRatePerCycle.java
index 910afa4..6808d92 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/diagnostics/ErrorRatePerCycle.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/diagnostics/ErrorRatePerCycle.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -28,11 +28,11 @@ package org.broadinstitute.gatk.tools.walkers.diagnostics;
 import org.broadinstitute.gatk.utils.commandline.Argument;
 import org.broadinstitute.gatk.utils.commandline.Output;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.engine.report.GATKReport;
-import org.broadinstitute.gatk.engine.report.GATKReportTable;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.report.GATKReport;
+import org.broadinstitute.gatk.utils.report.GATKReportTable;
 import org.broadinstitute.gatk.engine.walkers.LocusWalker;
 import org.broadinstitute.gatk.utils.BaseUtils;
 import org.broadinstitute.gatk.utils.QualityUtils;
@@ -83,19 +83,18 @@ import java.io.PrintStream;
  *      </pre>
  *  </p>
  *
- * <h3>Example</h3>
+ * <h3>Usage example</h3>
  *  <pre>
- *    java
- *      -jar GenomeAnalysisTK.jar
- *      -T ErrorRatePerCycle
- *      -R human_g1k_v37.fasta
- *      -I my_sequence_reads.bam
+ *    java -jar GenomeAnalysisTK.jar \
+ *      -T ErrorRatePerCycle \
+ *      -R reference.fasta \
+ *      -I my_sequence_reads.bam \
  *      -o error_rates.gatkreport.txt
  *  </pre>
  *
  * <h3>Caveat</h3>
  *
- * <p>Note that when it is run on paired-end sequence data, this tool only uses the first read in a pair.</p>
+ * <p>When it is run on paired-end sequence data, this tool only uses the first read in a pair.</p>
  *
  * @author Kiran Garimella, Mark DePristo
  */
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/diagnostics/ReadGroupProperties.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/diagnostics/ReadGroupProperties.java
index e9856de..86185b1 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/diagnostics/ReadGroupProperties.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/diagnostics/ReadGroupProperties.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -29,10 +29,10 @@ import htsjdk.samtools.SAMReadGroupRecord;
 import org.broadinstitute.gatk.utils.commandline.Argument;
 import org.broadinstitute.gatk.utils.commandline.Output;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.engine.report.GATKReport;
-import org.broadinstitute.gatk.engine.report.GATKReportTable;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.report.GATKReport;
+import org.broadinstitute.gatk.utils.report.GATKReportTable;
 import org.broadinstitute.gatk.engine.walkers.ReadWalker;
 import org.broadinstitute.gatk.utils.Median;
 import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
@@ -45,13 +45,15 @@ import java.util.HashMap;
 import java.util.Map;
 
 /**
- * Emits a GATKReport containing read group, sample, library, platform, center, sequencing data,
+ * Collect statistics about read groups and their properties
+ *
+ * <p>This tool emits a GATKReport containing read group, sample, library, platform, center, sequencing data,
  * paired end status, simple read type name (e.g. 2x76) median insert size and median read length
- * for each read group in every provided BAM file
+ * for each read group in every provided BAM file.</p>
  *
- * Note that this walker stops when all read groups have been observed at least a few thousand times so that
- * the median statistics are well determined.  It is safe to run it WG and it'll finish in an appropriate
- * timeframe.
+ * <p>Note that this walker stops when all read groups have been observed at least a few thousand times so that
+ * the median statistics are well determined.  It is safe to run it on whole genome sequence data and expect it to
+ * finish in an appropriate timeframe.</p>
  *
  * <h3>Input</h3>
  *  <p>
@@ -86,14 +88,14 @@ import java.util.Map;
  *      </pre>
  *  </p>
  *
- * <h3>Examples</h3>
+ * <h3>Usage example</h3>
  *  <pre>
- *    java
- *      -jar GenomeAnalysisTK.jar
- *      -T ReadGroupProperties
- *      -I example1.bam -I example2.bam etc
- *      -R reference.fasta
- *      -o example.gatkreport.txt
+ *    java -jar GenomeAnalysisTK.jar \
+ *      -T ReadGroupProperties \
+ *      -R reference.fasta \
+ *      -I example1.bam \
+ *      -I example2.bam \
+ *      -o readgroup_report.grp
  *  </pre>
  *
  * @author Mark DePristo
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/diagnostics/ReadLengthDistribution.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/diagnostics/ReadLengthDistribution.java
index a632f25..1f3d6c7 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/diagnostics/ReadLengthDistribution.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/diagnostics/ReadLengthDistribution.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -28,10 +28,10 @@ package org.broadinstitute.gatk.tools.walkers.diagnostics;
 import htsjdk.samtools.SAMReadGroupRecord;
 import org.broadinstitute.gatk.utils.commandline.Output;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.engine.report.GATKReport;
-import org.broadinstitute.gatk.engine.report.GATKReportTable;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.report.GATKReport;
+import org.broadinstitute.gatk.utils.report.GATKReportTable;
 import org.broadinstitute.gatk.engine.walkers.ReadWalker;
 import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
 import org.broadinstitute.gatk.utils.help.HelpConstants;
@@ -44,10 +44,10 @@ import java.util.Map;
 import java.util.TreeMap;
 
 /**
- * Outputs the read lengths of all the reads in a file.
+ * Collect read length statistics
  *
  *  <p>
- *     Generates a table with the read lengths categorized per sample. If the file has no sample information
+ *     This tool generates a table with the read lengths categorized per sample. If the file has no sample information
  *     (no read groups) it considers all reads to come from the same sample.
  *  </p>
  *
@@ -59,16 +59,15 @@ import java.util.TreeMap;
  *
  * <h3>Output</h3>
  *  <p>
- *      A human/R readable table of tab separated values with one column per sample and one row per read.
+ *      A human/R-readable table of tab-separated values with one column per sample and one row per read.
  *  </p>
  *
- * <h3>Examples</h3>
+ * <h3>Usage example</h3>
  *  <pre>
- *    java
- *      -jar GenomeAnalysisTK.jar
- *      -T ReadLengthDistribution
- *      -I example.bam
- *      -R reference.fasta
+ *    java -jar GenomeAnalysisTK.jar \
+ *      -T ReadLengthDistribution \
+ *      -R reference.fasta \
+ *      -I example.bam \
  *      -o example.tbl
  *  </pre>
  *
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/diffengine/DiffObjects.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/diffengine/DiffObjects.java
new file mode 100644
index 0000000..f12d08a
--- /dev/null
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/diffengine/DiffObjects.java
@@ -0,0 +1,279 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.tools.walkers.diffengine;
+
+import org.broadinstitute.gatk.utils.commandline.Argument;
+import org.broadinstitute.gatk.utils.commandline.Input;
+import org.broadinstitute.gatk.utils.commandline.Output;
+import org.broadinstitute.gatk.engine.CommandLineGATK;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.diffengine.DiffElement;
+import org.broadinstitute.gatk.utils.diffengine.DiffEngine;
+import org.broadinstitute.gatk.utils.diffengine.Difference;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.engine.walkers.RodWalker;
+import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
+import org.broadinstitute.gatk.utils.help.HelpConstants;
+
+import java.io.File;
+import java.io.PrintStream;
+import java.util.List;
+
+/**
+ * A generic engine for comparing tree-structured objects
+ *
+ * <p>
+ *      This tool compares two record-oriented files, itemizing specific difference between equivalent
+ *      records in the two files.  Reports both itemized and summarized differences.
+ * </p>
+ *
+ * <h3>What are the summarized differences and the DiffObjectsWalker?</h3>
+ *
+ * <p>
+ *     The GATK contains a summarizing difference engine that compares hierarchical data structures to emit:
+ *      <ul>
+ *          <li>A list of specific differences between the two data structures.  This is similar to saying the value in field A in record 1 in file F differences from the value in field A in record 1 in file G.</li>
+ *          <li>A summarized list of differences ordered by frequency of the difference.  This output is similar to saying field A in 50 records in files F and G differed.</li>
+ *      </ul>
+ * </p>
+ *
+ * <p>
+ *      The GATK contains a private walker DiffObjects that allows you access to the DiffEngine capabilities on the command line.  Simply provide the walker with the master and test files and it will emit summarized differences for you.
+ * </p>
+ *
+ * <h3>Why?</h3>
+ *
+ * <p>
+ *      The reason for this system is that it allows you to compare two structured files -- such as BAMs and VCFs -- for common differences among them.  This is primarily useful in regression testing or optimization, where you want to ensure that the differences are those that you expect and not any others.
+ * </p>
+ *
+ * <h3>Input</h3>
+ * <p>
+ *      The DiffObjectsWalker works with BAM or VCF files.
+ * </p>
+ *
+ * <h3>Output</h3>
+ * <p>
+ *      The DiffEngine system compares to two hierarchical data structures for specific differences in the values of named
+ *      nodes.  Suppose I have two trees:
+ * <pre>
+ *     Tree1=(A=1 B=(C=2 D=3))
+ *     Tree2=(A=1 B=(C=3 D=3 E=4))
+ *     Tree3=(A=1 B=(C=4 D=3 E=4))
+ * </pre>
+ * <p>
+ *     where every node in the tree is named, or is a raw value (here all leaf values are integers).  The DiffEngine
+ *      traverses these data structures by name, identifies equivalent nodes by fully qualified names
+ *      (Tree1.A is distinct from Tree2.A, and determines where their values are equal (Tree1.A=1, Tree2.A=1, so they are).
+ *      These itemized differences are listed as:
+ * <pre>
+ *     Tree1.B.C=2 != Tree2.B.C=3
+ *     Tree1.B.C=2 != Tree3.B.C=4
+ *     Tree2.B.C=3 != Tree3.B.C=4
+ *     Tree1.B.E=MISSING != Tree2.B.E=4
+ * </pre>
+ *
+ * <p>
+ *      This conceptually very similar to the output of the unix command line tool diff.  What's nice about DiffEngine though
+ *      is that it computes similarity among the itemized differences and displays the count of differences names
+ *      in the system.  In the above example, the field C is not equal three times, while the missing E in Tree1 occurs
+ *      only once.  So the summary is:
+ *
+ * <pre>
+ *     *.B.C : 3
+ *     *.B.E : 1
+ * </pre>
+ *
+ * <p>
+ *      where the * operator indicates that any named field matches.  This output is sorted by counts, and provides an
+ *      immediate picture of the commonly occurring differences among the files.
+ * <p>
+ *      Below is a detailed example of two VCF fields that differ because of a bug in the AC, AF, and AN counting routines,
+ *      detected by the integrationtest integration (more below).  You can see that in the although there are many specific
+ *      instances of these differences between the two files, the summarized differences provide an immediate picture that
+ *      the AC, AF, and AN fields are the major causes of the differences.
+ * <p>
+ *
+ * <pre>
+ [testng] path                                                             count
+ [testng] *.*.*.AC                                                         6
+ [testng] *.*.*.AF                                                         6
+ [testng] *.*.*.AN                                                         6
+ [testng] 64b991fd3850f83614518f7d71f0532f.integrationtest.20:10000000.AC  1
+ [testng] 64b991fd3850f83614518f7d71f0532f.integrationtest.20:10000000.AF  1
+ [testng] 64b991fd3850f83614518f7d71f0532f.integrationtest.20:10000000.AN  1
+ [testng] 64b991fd3850f83614518f7d71f0532f.integrationtest.20:10000117.AC  1
+ [testng] 64b991fd3850f83614518f7d71f0532f.integrationtest.20:10000117.AF  1
+ [testng] 64b991fd3850f83614518f7d71f0532f.integrationtest.20:10000117.AN  1
+ [testng] 64b991fd3850f83614518f7d71f0532f.integrationtest.20:10000211.AC  1
+ [testng] 64b991fd3850f83614518f7d71f0532f.integrationtest.20:10000211.AF  1
+ [testng] 64b991fd3850f83614518f7d71f0532f.integrationtest.20:10000211.AN  1
+ [testng] 64b991fd3850f83614518f7d71f0532f.integrationtest.20:10000598.AC  1
+ </pre>
+ *
+ * <h3>Caveat</h3>
+ * <p>Because this is a walker, it requires that you pass a reference file. However the reference is not actually used, so it does not matter what you pass as reference.</p>
+ *
+ *
+ * @author Mark DePristo
+ * @since 7/4/11
+ */
+ at DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_QC, extraDocs = {CommandLineGATK.class} )
+public class DiffObjects extends RodWalker<Integer, Integer> {
+    /**
+     * Writes out a file of the DiffEngine format:
+     *
+     *     See http://www.broadinstitute.org/gatk/guide/article?id=1299 for details.
+     */
+    @Output(doc="File to which results should be written")
+    protected PrintStream out;
+
+    /**
+     * The master file against which we will compare test.  This is one of the two required
+     * files to do the comparison.  Conceptually master is the original file contained the expected
+     * results, but this doesn't currently have an impact on the calculations, but might in the future.
+     */
+    @Input(fullName="master", shortName="m", doc="Master file: expected results", required=true)
+    File masterFile;
+
+    /**
+     * The test file against which we will compare to the master.  This is one of the two required
+     * files to do the comparison.  Conceptually test is the derived file from master, but this
+     * doesn't currently have an impact on the calculations, but might in the future.
+     */
+    @Input(fullName="test", shortName="t", doc="Test file: new results to compare to the master file", required=true)
+    File testFile;
+
+    /**
+     * The engine will read at most this number of objects from each of master and test files.  This reduces
+     * the memory requirements for DiffObjects but does limit you to comparing at most this number of objects
+     */
+    @Argument(fullName="maxObjectsToRead", shortName="motr", doc="Max. number of objects to read from the files.  -1 [default] means unlimited", required=false)
+    int MAX_OBJECTS_TO_READ = -1;
+
+    @Argument(fullName="maxRawDiffsToSummarize", shortName="maxRawDiffsToSummarize", doc="Max. number of differences to include in the summary.  -1 [default] means unlimited", required=false)
+    int maxRawDiffsToSummary = -1;
+
+    @Argument(fullName="doPairwise", shortName="doPairwise", doc="If provided, we will compute the minimum pairwise differences to summary, which can be extremely expensive", required=false)
+    boolean doPairwise = false;
+
+    /**
+     * The max number of differences to display when summarizing.  For example, if there are 10M differences, but
+     * maxDiffs is 10, then the comparison aborts after first ten summarized differences are shown.  Note that
+     * the system shows differences sorted by frequency, so these 10 would be the most common between the two files.
+     * A value of 0 means show all possible differences.
+     */
+    @Argument(fullName="maxDiffs", shortName="M", doc="Max. number of diffs to process", required=false)
+    int MAX_DIFFS = 0;
+
+    /**
+     * The maximum number of singleton (occurs exactly once between the two files) to display when writing out
+     * the summary.  Only applies if maxDiffs hasn't been exceeded.  For example, if maxDiffs is 10 and maxCount1Diffs
+     * is 2 and there are 20 diffs with count > 1, then only 10 are shown, all of which have count above 1.
+     */
+    @Argument(fullName="maxCount1Diffs", shortName="M1", doc="Max. number of diffs occuring exactly once in the file to process", required=false)
+    int MAX_COUNT1_DIFFS = 0;
+
+    /**
+     * Only differences that occur more than minCountForDiff are displayed.  For example, if minCountForDiff is 10, then
+     * a difference must occur at least 10 times between the two files to be shown.
+     */
+    @Argument(fullName="minCountForDiff", shortName="MCFD", doc="Min number of observations for a records to display", required=false)
+    int minCountForDiff = 1;
+
+    /**
+     * If provided, the system will write out the summarized, individual differences.  May lead to enormous outputs,
+     * depending on how many differences are found.  Note these are not sorted in any way, so if you have 10M
+     * common differences in the files, you will see 10M records, whereas the final summarize will just list the
+     * difference and its count of 10M.
+     */
+    @Argument(fullName="showItemizedDifferences", shortName="SID", doc="Should we enumerate all differences between the files?", required=false)
+    boolean showItemizedDifferences = false;
+
+    @Argument(fullName="iterations", doc="Number of iterations to perform, should be 1 unless you are doing memory testing", required=false)
+    int iterations = 1;
+
+    DiffEngine diffEngine;
+
+    @Override
+    public void initialize() {
+        this.diffEngine = new DiffEngine();
+    }
+
+    @Override
+    public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
+        return 0;
+    }
+
+    @Override
+    public Integer reduceInit() {
+        return 0;
+    }
+
+    @Override
+    public Integer reduce(Integer counter, Integer sum) {
+        return counter + sum;
+    }
+
+    @Override
+    public void onTraversalDone(Integer sum) {
+        if ( iterations > 1 ) {
+            for ( int i = 0; i < iterations; i++ ) {
+                DiffEngine.SummaryReportParams params = new DiffEngine.SummaryReportParams(out, 20, 10, 0, -1, false);
+                boolean success = DiffEngine.simpleDiffFiles(masterFile, testFile, MAX_OBJECTS_TO_READ, params);
+                logger.info("Iteration " + i + " success " + success);
+            }
+        } else {
+            //out.printf("Reading master file %s%n", masterFile);
+            DiffElement master = diffEngine.createDiffableFromFile(masterFile, MAX_OBJECTS_TO_READ);
+            logger.info(String.format("Read %d objects", master.size()));
+            //out.printf("Reading test file %s%n", testFile);
+            DiffElement test = diffEngine.createDiffableFromFile(testFile, MAX_OBJECTS_TO_READ);
+            logger.info(String.format("Read %d objects", test.size()));
+
+//        out.printf("Master diff objects%n");
+//        out.println(master.toString());
+//        out.printf("Test diff objects%n");
+//        out.println(test.toString());
+
+            List<Difference> diffs = diffEngine.diff(master, test);
+            logger.info(String.format("Done computing diff with %d differences found", diffs.size()));
+            if ( showItemizedDifferences ) {
+                out.printf("Itemized results%n");
+                for ( Difference diff : diffs )
+                    out.printf("DIFF: %s%n", diff.toString());
+            }
+
+            DiffEngine.SummaryReportParams params = new DiffEngine.SummaryReportParams(out,
+                    MAX_DIFFS, MAX_COUNT1_DIFFS, minCountForDiff,
+                    maxRawDiffsToSummary, doPairwise);
+            params.setDescending(false);
+            diffEngine.reportSummarizedDifferences(diffs, params);
+            logger.info(String.format("Done summarizing differences"));
+        }
+    }
+}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/examples/GATKDocsExample.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/examples/GATKDocsExample.java
index 80fca67..d95fd2b 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/examples/GATKDocsExample.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/examples/GATKDocsExample.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -27,9 +27,9 @@ package org.broadinstitute.gatk.tools.walkers.examples;
 
 import org.broadinstitute.gatk.utils.commandline.Argument;
 import org.broadinstitute.gatk.utils.commandline.Hidden;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.RodWalker;
 
 /**
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/examples/GATKPaperGenotyper.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/examples/GATKPaperGenotyper.java
index 8cfc14e..54db6f3 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/examples/GATKPaperGenotyper.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/examples/GATKPaperGenotyper.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -28,9 +28,9 @@ package org.broadinstitute.gatk.tools.walkers.examples;
 import org.broadinstitute.gatk.utils.commandline.Argument;
 import org.broadinstitute.gatk.utils.commandline.Output;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.LocusWalker;
 import org.broadinstitute.gatk.engine.walkers.TreeReducible;
 import org.broadinstitute.gatk.utils.genotyper.DiploidGenotype;
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/fasta/FastaAlternateReferenceMaker.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/fasta/FastaAlternateReferenceMaker.java
index 8fd4152..5be2e89 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/fasta/FastaAlternateReferenceMaker.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/fasta/FastaAlternateReferenceMaker.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -31,13 +31,13 @@ import org.broadinstitute.gatk.utils.commandline.Input;
 import org.broadinstitute.gatk.utils.commandline.RodBinding;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
 import org.broadinstitute.gatk.engine.arguments.StandardVariantContextInputArgumentCollection;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.*;
 import org.broadinstitute.gatk.utils.BaseUtils;
 import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.SampleUtils;
+import org.broadinstitute.gatk.engine.SampleUtils;
 import org.broadinstitute.gatk.utils.collections.Pair;
 import org.broadinstitute.gatk.utils.exceptions.UserException;
 import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
@@ -51,41 +51,41 @@ import java.util.Set;
 
 
 /**
- * Generates an alternative reference sequence over the specified interval.
+ * Generate an alternative reference sequence over the specified interval
  *
- * <p>
- * Given variant tracks, it replaces the reference bases at variation sites with the bases supplied by the ROD(s).
- * Additionally, allows for one or more "snpmask" VCFs to set overlapping bases to 'N'.
+ * <p>Given a variant callset, this tool replaces the reference bases at variation sites with the bases supplied in the
+ * corresponding callset records. Additionally, it allows for one or more "snpmask" VCFs to set overlapping bases to 'N'.</p>
  *
- * The output format can be partially controlled using the provided command-line arguments.
+ * <p>The output format can be partially controlled using the provided command-line arguments.
  * Specify intervals with the usual -L argument to output only the reference bases within your intervals.
  * Overlapping intervals are automatically merged; reference bases for each disjoint interval will be output as a
- * separate fasta sequence (named numerically in order).
- *
- * Several important notes:
- * 1) if there are multiple variants that start at a site, it chooses one of them randomly.
- * 2) when there are overlapping indels (but with different start positions) only the first will be chosen.
- * 3) this tool works only for SNPs and for simple indels (but not for things like complex substitutions).
- * Reference bases for each interval will be output as a separate fasta sequence (named numerically in order).
+ * separate fasta sequence (named numerically in order).</p>
  *
+ * <h3>Caveats</h3>
+ * <ul>
+ *     <li>If there are multiple variants that start at a site, it chooses one of them randomly.</li>
+ *     <li>When there are overlapping indels (but with different start positions) only the first will be chosen.</li>
+ *     <li>This tool works only for SNPs and for simple indels (but not for things like complex substitutions).</li>
+ * </ul>
+
  * <h3>Input</h3>
  * <p>
- * The reference, requested intervals, and any number of variant rod files.
+ * The reference, requested intervals, and any number of variant ROD files.
  * </p>
  *
  * <h3>Output</h3>
  * <p>
- * A fasta file representing the requested intervals.
+ * A FASTA file representing the requested intervals.
  * </p>
  *
- * <h3>Examples</h3>
+ * <h3>Usage example</h3>
  * <pre>
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
+ * java -jar GenomeAnalysisTK.jar \
  *   -T FastaAlternateReferenceMaker \
+ *   -R reference.fasta \
  *   -o output.fasta \
  *   -L input.intervals \
- *   --variant input.vcf \
+ *   -V input.vcf \
  *   [--snpmask mask.vcf]
  * </pre>
  *
@@ -102,13 +102,18 @@ public class FastaAlternateReferenceMaker extends FastaReferenceMaker {
     protected StandardVariantContextInputArgumentCollection variantCollection = new StandardVariantContextInputArgumentCollection();
 
     /**
-     * Snps from this file are used as a mask (inserting N's in the sequence) when constructing the alternate reference
-     * (regardless of whether they overlap a variant site).
+     * SNPs from this file are used as a mask (inserting N's in the sequence) when constructing the alternate reference
      */
     @Input(fullName="snpmask", shortName = "snpmask", doc="SNP mask VCF file", required=false)
     protected RodBinding<VariantContext> snpmask;
 
     /**
+     * Gives priority to a SNP mask over an input VCF for a site. Only has an effect if the --snpmask argument is used.
+     */
+    @Argument(fullName="snpmaskPriority", shortName = "snpmaskPriority", doc="SNP mask priority", required=false)
+    protected Boolean snpmaskPriority = false;
+
+    /**
      * This option will generate an error if the specified sample does not exist in the VCF.
      * Non-diploid (or non-called) genotypes are ignored.
      */
@@ -138,6 +143,13 @@ public class FastaAlternateReferenceMaker extends FastaReferenceMaker {
 
         final String refBase = String.valueOf((char)ref.getBase());
 
+        // If we have a mask at this site, use it
+        if ( snpmaskPriority ){
+            final Pair<GenomeLoc, String> mask = maskSnp(tracker, context);
+            if ( mask != null )
+                return mask;
+        }
+
         // Check to see if we have a called snp
         for ( final VariantContext vc : tracker.getValues(variantCollection.variants, ref.getLocus()) ) {
             if ( vc.isFiltered() )
@@ -155,11 +167,10 @@ public class FastaAlternateReferenceMaker extends FastaReferenceMaker {
             }
         }
 
-        // if we don't have a called site, and we have a mask at this site, mask it
-        for ( final VariantContext vc : tracker.getValues(snpmask) ) {
-            if ( vc.isSNP()) {
-                return new Pair<>(context.getLocation(), "N");
-            }
+        if ( !snpmaskPriority ){
+            final Pair<GenomeLoc, String> mask = maskSnp(tracker, context);
+            if ( mask != null )
+                return mask;
         }
 
         // if we got here then we're just ref
@@ -167,6 +178,23 @@ public class FastaAlternateReferenceMaker extends FastaReferenceMaker {
     }
 
     /**
+     * Mask a SNP (inserting N's in the sequence)
+     *
+     * @param tracker the Reference Metadata available at a particular site in the genome
+     * @param context the locus context data
+     * @return mask at the locus or null if no SNP at that locus
+     */
+    private Pair<GenomeLoc, String> maskSnp(final RefMetaDataTracker tracker, final AlignmentContext context){
+        for (final VariantContext vc : tracker.getValues(snpmask)) {
+            if (vc.isSNP()) {
+                return new Pair<>(context.getLocation(), "N");
+            }
+        }
+
+        return null;
+    }
+
+    /**
      * Returns the IUPAC encoding for the given genotype or the reference base if not possible
      *
      * @param genotype  the genotype to encode
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/fasta/FastaReferenceMaker.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/fasta/FastaReferenceMaker.java
index 8459506..4eb8f3e 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/fasta/FastaReferenceMaker.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/fasta/FastaReferenceMaker.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -28,9 +28,9 @@ package org.broadinstitute.gatk.tools.walkers.fasta;
 import org.broadinstitute.gatk.utils.commandline.Argument;
 import org.broadinstitute.gatk.utils.commandline.Output;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.RefWalker;
 import org.broadinstitute.gatk.utils.GenomeLoc;
 import org.broadinstitute.gatk.utils.collections.Pair;
@@ -40,13 +40,13 @@ import org.broadinstitute.gatk.utils.help.HelpConstants;
 import java.io.PrintStream;
 
 /**
- * Renders a new reference in FASTA format consisting of only those loci provided in the input data set.
+ * Create a subset of a FASTA reference sequence
  *
- * <p>
- * The output format can be partially controlled using the provided command-line arguments.
- * Specify intervals with the usual -L argument to output only the reference bases within your intervals.
+ * <p>This tool creates a new reference in FASTA format consisting of only those positions or intervals
+ * provided in the input data set. The output format can be partially controlled using the provided command-line
+ * arguments. Specify intervals with the usual -L argument to output only the reference bases within your intervals.
  * Overlapping intervals are automatically merged; reference bases for each disjoint interval will be output as a
- * separate fasta sequence (named numerically in order).
+ * separate fasta sequence (named numerically in order).</p>
  *
  * <h3>Input</h3>
  * <p>
@@ -55,14 +55,22 @@ import java.io.PrintStream;
  *
  * <h3>Output</h3>
  * <p>
- * A fasta file representing the requested intervals.
+ * A fasta file representing the requested intervals. Each interval has a description line starting with a greater-than (">") symbol followed by sequence data.
+ * The description begins with the contig name followed by the beginning position on the contig.
+ * <pre>
+ * For example, the fasta file for contig 1 and intervals 1:3-1:4 and 1:6-1:9
+ * >1 1:3
+ * AT
+ * >1 1:6
+ * GGGG
+ * </pre>
  * </p>
  *
- * <h3>Examples</h3>
+ * <h3>Usage example</h3>
  * <pre>
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
+ * java -jar GenomeAnalysisTK.jar \
  *   -T FastaReferenceMaker \
+ *   -R reference.fasta \
  *   -o output.fasta \
  *   -L input.intervals
  * </pre>
@@ -104,18 +112,20 @@ public class FastaReferenceMaker extends RefWalker<Pair<GenomeLoc, String>, Geno
         // if there is no interval to the left, then this is the first one
         if ( sum == null ) {
             sum = value.first;
+            fasta.setName(fasta.getName() + " " + sum.toString());
             fasta.append(value.second);
         }
-        // if the intervals don't overlap, print out the leftmost one and start a new one
+        // if the intervals are not contiguous, print out the leftmost one and start a new one
         // (end of contig or new interval)
-        else if ( value.first.getStart() != sum.getStop() + 1 ) {
+        else if ( value.first.getStart() != sum.getStop() + 1 || ! value.first.getContig().equals(sum.getContig()) ) {
             fasta.flush();
             sum = value.first;
+            fasta.setName(fasta.getName() + " " + sum.toString());
             fasta.append(value.second);
         }
         // otherwise, merge them
         else {
-            sum = getToolkit().getGenomeLocParser().setStop(sum, value.first.getStop());
+            sum = sum.setStop(sum, value.first.getStop());
             fasta.append(value.second);
         }
 		return sum;
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/fasta/FastaSequence.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/fasta/FastaSequence.java
index 013e356..062297b 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/fasta/FastaSequence.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/fasta/FastaSequence.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/fasta/FastaStats.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/fasta/FastaStats.java
index 22a592b..01d26c0 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/fasta/FastaStats.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/fasta/FastaStats.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -27,9 +27,9 @@ package org.broadinstitute.gatk.tools.walkers.fasta;
 
 import org.broadinstitute.gatk.utils.commandline.Output;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.RefWalker;
 import org.broadinstitute.gatk.utils.BaseUtils;
 import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
@@ -52,11 +52,11 @@ import java.io.PrintStream;
  * Base counts are written to file if an output file name is given (with -o), otherwise output to stdout.
  * </p>
  *
- * <h3>Example</h3>
+ * <h3>Usage example</h3>
  * <pre>
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
+ * java -jar GenomeAnalysisTK.jar \
  *   -T FastaStats \
- *   -R ref.fasta \
+ *   -R reference.fasta \
  *   [-o output.txt]
  * </pre>
  */
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/filters/ClusteredSnps.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/filters/ClusteredSnps.java
index 97bde83..c0de7b9 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/filters/ClusteredSnps.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/filters/ClusteredSnps.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/filters/FiltrationContext.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/filters/FiltrationContext.java
index 513763b..e48c8f4 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/filters/FiltrationContext.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/filters/FiltrationContext.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,7 +25,7 @@
 
 package org.broadinstitute.gatk.tools.walkers.filters;
 
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
 import htsjdk.variant.variantcontext.VariantContext;
 
 
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/filters/FiltrationContextWindow.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/filters/FiltrationContextWindow.java
index 731b1a3..e47d54b 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/filters/FiltrationContextWindow.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/filters/FiltrationContextWindow.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/filters/VariantFiltration.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/filters/VariantFiltration.java
index 3988498..3c61235 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/filters/VariantFiltration.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/filters/VariantFiltration.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -26,19 +26,20 @@
 package org.broadinstitute.gatk.tools.walkers.filters;
 
 import htsjdk.tribble.Feature;
+import org.broadinstitute.gatk.utils.Utils;
 import org.broadinstitute.gatk.utils.commandline.*;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
 import org.broadinstitute.gatk.engine.arguments.StandardVariantContextInputArgumentCollection;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.Reference;
 import org.broadinstitute.gatk.engine.walkers.RodWalker;
 import org.broadinstitute.gatk.engine.walkers.Window;
 import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.SampleUtils;
+import org.broadinstitute.gatk.engine.SampleUtils;
 import org.broadinstitute.gatk.utils.help.HelpConstants;
-import org.broadinstitute.gatk.utils.variant.GATKVCFUtils;
+import org.broadinstitute.gatk.engine.GATKVCFUtils;
 import htsjdk.variant.vcf.*;
 import org.broadinstitute.gatk.utils.exceptions.UserException;
 import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
@@ -49,11 +50,12 @@ import java.util.*;
 
 
 /**
- * Filters variant calls using a number of user-selectable, parameterizable criteria.
+ * Filter variant calls based on INFO and FORMAT annotations
  *
  * <p>
- * VariantFiltration is a GATK tool for hard-filtering variant calls based on certain criteria.
- * Records are hard-filtered by changing the value in the FILTER field to something other than PASS.
+ * This tool is designed for hard-filtering variant calls based on certain criteria.
+ * Records are hard-filtered by changing the value in the FILTER field to something other than PASS. Filtered records
+ * will be preserved in the output unless their removal is requested in the command line. </p>
  *
  * <h3>Input</h3>
  * <p>
@@ -65,11 +67,11 @@ import java.util.*;
  * A filtered VCF.
  * </p>
  *
- * <h3>Examples</h3>
+ * <h3>Usage example</h3>
  * <pre>
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
+ * java -jar GenomeAnalysisTK.jar \
  *   -T VariantFiltration \
+ *   -R reference.fasta \
  *   -o output.vcf \
  *   --variant input.vcf \
  *   --filterExpression "AB < 0.2 || MQ0 > 50" \
@@ -103,13 +105,13 @@ public class VariantFiltration extends RodWalker<Integer, Integer> {
      * --filterName One --filterExpression "X < 1" --filterName Two --filterExpression "X > 2").
      */
     @Argument(fullName="filterExpression", shortName="filter", doc="One or more expression used with INFO fields to filter", required=false)
-    protected ArrayList<String> FILTER_EXPS = new ArrayList<String>();
+    protected ArrayList<String> filterExpressions = new ArrayList<String>();
 
     /**
      * This name is put in the FILTER field for variants that get filtered.  Note that there must be a 1-to-1 mapping between filter expressions and filter names.
      */
     @Argument(fullName="filterName", shortName="filterName", doc="Names to use for the list of filters", required=false)
-    protected ArrayList<String> FILTER_NAMES = new ArrayList<String>();
+    protected ArrayList<String> filterNames = new ArrayList<String>();
 
     /**
      * Similar to the INFO field based expressions, but used on the FORMAT (genotype) fields instead.
@@ -119,13 +121,13 @@ public class VariantFiltration extends RodWalker<Integer, Integer> {
      * expressions isCalled, isNoCall, isMixed, and isAvailable, in accordance with the methods of the Genotype object.
      */
     @Argument(fullName="genotypeFilterExpression", shortName="G_filter", doc="One or more expression used with FORMAT (sample/genotype-level) fields to filter (see documentation guide for more info)", required=false)
-    protected ArrayList<String> GENOTYPE_FILTER_EXPS = new ArrayList<String>();
+    protected ArrayList<String> genotypeFilterExpressions = new ArrayList<String>();
 
     /**
      * Similar to the INFO field based expressions, but used on the FORMAT (genotype) fields instead.
      */
     @Argument(fullName="genotypeFilterName", shortName="G_filterName", doc="Names to use for the list of sample/genotype filters (must be a 1-to-1 mapping); this name is put in the FILTER field for variants that get filtered", required=false)
-    protected ArrayList<String> GENOTYPE_FILTER_NAMES = new ArrayList<String>();
+    protected ArrayList<String> genotypeFilterNames = new ArrayList<String>();
 
     /**
      * Works together with the --clusterWindowSize argument.
@@ -140,7 +142,7 @@ public class VariantFiltration extends RodWalker<Integer, Integer> {
     protected Integer clusterWindow = 0;
 
     @Argument(fullName="maskExtension", shortName="maskExtend", doc="How many bases beyond records from a provided 'mask' rod should variants be filtered", required=false)
-    protected Integer MASK_EXTEND = 0;
+    protected Integer maskExtension = 0;
 
     /**
      * When using the -mask argument, the maskName will be annotated in the variant record.
@@ -149,7 +151,7 @@ public class VariantFiltration extends RodWalker<Integer, Integer> {
      * (e.g. if masking against Hapmap, use -maskName=hapmap for the normal masking and -maskName=not_hapmap for the reverse masking).
      */
     @Argument(fullName="maskName", shortName="maskName", doc="The text to put in the FILTER field if a 'mask' rod is provided and overlaps with a variant call", required=false)
-    protected String MASK_NAME = "Mask";
+    protected String maskName = "Mask";
 
     /**
      * By default, if the -mask argument is used, any variant falling in a mask will be filtered.
@@ -166,7 +168,7 @@ public class VariantFiltration extends RodWalker<Integer, Integer> {
      * Use this argument to have it evaluate as failing filters instead for these cases.
      */
     @Argument(fullName="missingValuesInExpressionsShouldEvaluateAsFailing", doc="When evaluating the JEXL expressions, missing values should be considered failing the expression", required=false)
-    protected Boolean FAIL_MISSING_VALUES = false;
+    protected Boolean failMissingValues = false;
 
     /**
      * Invalidate previous filters applied to the VariantContext, applying only the filters here
@@ -174,6 +176,24 @@ public class VariantFiltration extends RodWalker<Integer, Integer> {
     @Argument(fullName="invalidatePreviousFilters",doc="Remove previous filters applied to the VCF",required=false)
     boolean invalidatePrevious = false;
 
+    /**
+     * Invert the selection criteria for --filterExpression
+     */
+    @Argument(fullName="invertFilterExpression", shortName="invfilter", doc="Invert the selection criteria for --filterExpression", required=false)
+    protected boolean invertFilterExpression = false;
+
+    /**
+     * Invert the selection criteria for --genotypeFilterExpression
+     */
+    @Argument(fullName="invertGenotypeFilterExpression", shortName="invG_filter", doc="Invert the selection criteria for --genotypeFilterExpression", required=false)
+    protected boolean invertGenotypeFilterExpression = false;
+
+    /**
+     * If this argument is provided, set filtered genotypes to no-call (./.).
+     */
+    @Argument(fullName="setFilteredGtToNocall", required=false, doc="Set filtered genotypes to no-call")
+    private boolean setFilteredGenotypesToNocall = false;
+
     // JEXL expressions for the filters
     List<VariantContextUtils.JexlVCMatchExp> filterExps;
     List<VariantContextUtils.JexlVCMatchExp> genotypeFilterExps;
@@ -184,9 +204,23 @@ public class VariantFiltration extends RodWalker<Integer, Integer> {
 
     // the structures necessary to initialize and maintain a windowed context
     private FiltrationContextWindow variantContextWindow;
-    private static final int windowSize = 10;  // 10 variants on either end of the current one
+    private static final int WINDOW_SIZE = 10;  // 10 variants on either end of the current one
     private ArrayList<FiltrationContext> windowInitializer = new ArrayList<FiltrationContext>();
 
+    private final List<Allele> diploidNoCallAlleles = Arrays.asList(Allele.NO_CALL, Allele.NO_CALL);
+
+    /**
+     * Prepend inverse phrase to description if --invertFilterExpression
+     *
+     * @param description the description
+     * @return the description with inverse prepended if --invert_filter_expression
+     */
+    private String possiblyInvertFilterExpression( String description ){
+        if ( invertFilterExpression )
+            description = "Inverse of: " + description;
+        return description;
+    }
+
     private void initializeVcfWriter() {
 
         final List<String> inputNames = Arrays.asList(variantCollection.variants.getName());
@@ -198,19 +232,19 @@ public class VariantFiltration extends RodWalker<Integer, Integer> {
         if ( clusterWindow > 0 )
             hInfo.add(new VCFFilterHeaderLine(CLUSTERED_SNP_FILTER_NAME, "SNPs found in clusters"));
 
-        if ( genotypeFilterExps.size() > 0 )
+        if ( !genotypeFilterExps.isEmpty() )
             hInfo.add(VCFStandardHeaderLines.getFormatLine(VCFConstants.GENOTYPE_FILTER_KEY));
 
         try {
             for ( VariantContextUtils.JexlVCMatchExp exp : filterExps )
-                hInfo.add(new VCFFilterHeaderLine(exp.name, exp.exp.toString()));
+                hInfo.add(new VCFFilterHeaderLine(exp.name, possiblyInvertFilterExpression(exp.exp.toString())));
             for ( VariantContextUtils.JexlVCMatchExp exp : genotypeFilterExps )
-                hInfo.add(new VCFFilterHeaderLine(exp.name, exp.exp.toString()));
+                hInfo.add(new VCFFilterHeaderLine(exp.name, possiblyInvertFilterExpression(exp.exp.toString())));
 
             if ( mask.isBound() ) {
                 if (filterRecordsNotInMask)
-                    hInfo.add(new VCFFilterHeaderLine(MASK_NAME, "Doesn't overlap a user-input mask"));
-                else hInfo.add(new VCFFilterHeaderLine(MASK_NAME, "Overlaps a user-input mask"));
+                    hInfo.add(new VCFFilterHeaderLine(maskName, "Doesn't overlap a user-input mask"));
+                else hInfo.add(new VCFFilterHeaderLine(maskName, "Overlaps a user-input mask"));
             }
         } catch (IllegalArgumentException e) {
             throw new UserException.BadInput(e.getMessage());
@@ -223,13 +257,13 @@ public class VariantFiltration extends RodWalker<Integer, Integer> {
         if ( clusterWindow > 0 )
             clusteredSNPs = new ClusteredSnps(getToolkit().getGenomeLocParser(),clusterSize, clusterWindow);
 
-        if ( MASK_EXTEND < 0 )
+        if ( maskExtension < 0 )
              throw new UserException.BadArgumentValue("maskExtension", "negative values are not allowed");
 
         if (filterRecordsNotInMask && !mask.isBound())
             throw new UserException.BadArgumentValue("filterNotInMask","argument not allowed if mask argument is not provided");
-        filterExps = VariantContextUtils.initializeMatchExps(FILTER_NAMES, FILTER_EXPS);
-        genotypeFilterExps = VariantContextUtils.initializeMatchExps(GENOTYPE_FILTER_NAMES, GENOTYPE_FILTER_EXPS);
+        filterExps = VariantContextUtils.initializeMatchExps(filterNames, filterExpressions);
+        genotypeFilterExps = VariantContextUtils.initializeMatchExps(genotypeFilterNames, genotypeFilterExpressions);
 
         VariantContextUtils.engine.get().setSilent(true);
 
@@ -261,15 +295,9 @@ public class VariantFiltration extends RodWalker<Integer, Integer> {
             if ( invalidatePrevious ) {
                 vc = (new VariantContextBuilder(vc)).filters(new HashSet<String>()).make();
             }
+
             // filter based on previous mask position
-            if ( previousMaskPosition != null &&                                       // we saw a previous mask site
-                 previousMaskPosition.getContig().equals(vc.getChr()) &&               // it's on the same contig
-                 vc.getStart() - previousMaskPosition.getStop() <= MASK_EXTEND &&      // it's within the mask area (multi-base masks that overlap this site will always give a negative distance)
-                 (vc.getFilters() == null || !vc.getFilters().contains(MASK_NAME)) ) { // the filter hasn't already been applied
-                Set<String> filters = new LinkedHashSet<String>(vc.getFilters());
-                filters.add(MASK_NAME);
-                vc = new VariantContextBuilder(vc).filters(filters).make();
-            }
+            vc = addMaskIfCoversVariant(vc, previousMaskPosition, maskName, maskExtension, false);
 
             FiltrationContext varContext = new FiltrationContext(ref, vc);
 
@@ -279,11 +307,11 @@ public class VariantFiltration extends RodWalker<Integer, Integer> {
                 // if this is a mask position, filter previous records
                 if ( hasMask ) {
                     for ( FiltrationContext prevVC : windowInitializer )
-                        prevVC.setVariantContext(checkMaskForPreviousLocation(prevVC.getVariantContext(), ref.getLocus()));
+                        prevVC.setVariantContext(addMaskIfCoversVariant(prevVC.getVariantContext(), ref.getLocus(), maskName, maskExtension, true));
                 }
 
                 windowInitializer.add(varContext);
-                if ( windowInitializer.size() == windowSize ) {
+                if ( windowInitializer.size() == WINDOW_SIZE ) {
                     variantContextWindow = new FiltrationContextWindow(windowInitializer);
                     windowInitializer = null;
                 }
@@ -293,7 +321,7 @@ public class VariantFiltration extends RodWalker<Integer, Integer> {
                 if ( hasMask ) {
                     for ( FiltrationContext prevVC : variantContextWindow.getWindow(10, 10) ) {
                         if ( prevVC != null )
-                            prevVC.setVariantContext(checkMaskForPreviousLocation(prevVC.getVariantContext(), ref.getLocus()));
+                            prevVC.setVariantContext(addMaskIfCoversVariant(prevVC.getVariantContext(), ref.getLocus(), maskName, maskExtension, true));
                     }
                 }
 
@@ -305,12 +333,44 @@ public class VariantFiltration extends RodWalker<Integer, Integer> {
         return 1;
     }
 
-    private VariantContext checkMaskForPreviousLocation(VariantContext vc, GenomeLoc maskLoc) {
-        if ( maskLoc.getContig().equals(vc.getChr()) &&               // it's on the same contig
-             maskLoc.getStart() - vc.getEnd() <= MASK_EXTEND &&       // it's within the mask area (multi-base VCs that overlap this site will always give a negative distance)
-             (vc.getFilters() == null || !vc.getFilters().contains(MASK_NAME)) ) { // the filter hasn't already been applied
+    /**
+     * Helper function to check if a mask covers the variant location.
+     *
+     * @param vc variant context
+     * @param genomeLoc genome location
+     * @param maskName name of the mask
+     * @param maskExtension bases beyond the mask
+     * @param vcBeforeLoc if true, variant context is before the genome location; if false, the converse is true.
+     * @return true if the genome location is within the extended mask area, false otherwise
+     */
+    protected static boolean doesMaskCoverVariant(VariantContext vc, GenomeLoc genomeLoc, String maskName, int maskExtension, boolean vcBeforeLoc) {
+        boolean logic = genomeLoc != null &&                                        // have a location
+                genomeLoc.getContig().equals(vc.getChr()) &&                        // it's on the same contig
+                (vc.getFilters() == null || !vc.getFilters().contains(maskName));   // the filter hasn't already been applied
+        if ( logic ) {
+            if (vcBeforeLoc)
+                return genomeLoc.getStart() - vc.getEnd() <= maskExtension;  // it's within the mask area (multi-base VCs that overlap this site will always give a negative distance)
+            else
+                return vc.getStart() - genomeLoc.getStop() <= maskExtension;
+        } else {
+            return false;
+        }
+    }
+
+    /**
+     * Add mask to variant context filters if it covers the it's location
+     *
+     * @param vc VariantContext
+     * @param genomeLoc genome location
+     * @param maskName name of the mask
+     * @param maskExtension bases beyond the mask
+     * @param locStart if true, start at genome location and end at VariantContext. If false, do the opposite.
+     * @return VariantContext with the mask added if the VariantContext is within the extended mask area
+     */
+    private VariantContext addMaskIfCoversVariant(VariantContext vc, GenomeLoc genomeLoc, String maskName, int maskExtension, boolean locStart) {
+        if (doesMaskCoverVariant(vc, genomeLoc, maskName, maskExtension, locStart) ) {
             Set<String> filters = new LinkedHashSet<String>(vc.getFilters());
-            filters.add(MASK_NAME);
+            filters.add(maskName);
             vc = new VariantContextBuilder(vc).filters(filters).make();
         }
 
@@ -327,7 +387,7 @@ public class VariantFiltration extends RodWalker<Integer, Integer> {
         final VariantContextBuilder builder = new VariantContextBuilder(vc);
 
         // make new Genotypes based on filters
-        if ( genotypeFilterExps.size() > 0 ) {
+        if ( !genotypeFilterExps.isEmpty() || setFilteredGenotypesToNocall ) {
             GenotypesContext genotypes = GenotypesContext.create(vc.getGenotypes().size());
 
             // for each genotype, check filters then create a new object
@@ -336,12 +396,17 @@ public class VariantFiltration extends RodWalker<Integer, Integer> {
                     final List<String> filters = new ArrayList<String>();
                     if ( g.isFiltered() ) filters.add(g.getFilters());
 
+                    // Add if expression filters the variant context
                     for ( VariantContextUtils.JexlVCMatchExp exp : genotypeFilterExps ) {
-                        if ( VariantContextUtils.match(vc, g, exp) )
+                        if ( Utils.invertLogic(VariantContextUtils.match(vc, g, exp), invertGenotypeFilterExpression) )
                             filters.add(exp.name);
                     }
 
-                    genotypes.add(new GenotypeBuilder(g).filters(filters).make());
+                    // if sample is filtered and --setFilteredGtToNocall, set genotype to non-call
+                    if ( !filters.isEmpty() && setFilteredGenotypesToNocall )
+                        genotypes.add(new GenotypeBuilder(g).filters(filters).alleles(diploidNoCallAlleles).make());
+                    else
+                        genotypes.add(new GenotypeBuilder(g).filters(filters).make());
                 } else {
                     genotypes.add(g);
                 }
@@ -359,11 +424,11 @@ public class VariantFiltration extends RodWalker<Integer, Integer> {
 
         for ( VariantContextUtils.JexlVCMatchExp exp : filterExps ) {
             try {
-                if ( VariantContextUtils.match(vc, exp) )
+                if ( Utils.invertLogic(VariantContextUtils.match(vc, exp), invertFilterExpression) )
                     filters.add(exp.name);
             } catch (Exception e) {
                 // do nothing unless specifically asked to; it just means that the expression isn't defined for this context
-                if ( FAIL_MISSING_VALUES )
+                if ( failMissingValues  )
                     filters.add(exp.name);                         
             }
         }
@@ -388,11 +453,11 @@ public class VariantFiltration extends RodWalker<Integer, Integer> {
     public void onTraversalDone(Integer result) {
         // move the window over so that we can filter the last few variants
         if ( windowInitializer != null ) {
-            while ( windowInitializer.size() < windowSize )
+            while ( windowInitializer.size() < WINDOW_SIZE )
                 windowInitializer.add(null);
             variantContextWindow = new FiltrationContextWindow(windowInitializer);
         }
-        for (int i=0; i < windowSize; i++) {
+        for (int i=0; i < WINDOW_SIZE; i++) {
             variantContextWindow.moveWindow(null);
             filter();
         }
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/AlleleList.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/AlleleList.java
deleted file mode 100644
index 3b0aded..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/AlleleList.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.tools.walkers.genotyper;
-
-import htsjdk.variant.variantcontext.Allele;
-
-/**
- * Created by valentin on 5/12/14.
- */
-public interface AlleleList<A extends Allele> {
-
-    public int alleleCount();
-
-    public int alleleIndex(final A allele);
-
-    public A alleleAt(final int index);
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/AlleleListPermutation.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/AlleleListPermutation.java
deleted file mode 100644
index 8d95fa4..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/AlleleListPermutation.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.tools.walkers.genotyper;
-
-import htsjdk.variant.variantcontext.Allele;
-import org.broadinstitute.gatk.utils.collections.Permutation;
-
-/**
- * Marks allele list permutation implementation classes.
- */
-public interface AlleleListPermutation<A extends Allele> extends Permutation<A>, AlleleList<A> {
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/AlleleListUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/AlleleListUtils.java
deleted file mode 100644
index 4f40f51..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/AlleleListUtils.java
+++ /dev/null
@@ -1,334 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.tools.walkers.genotyper;
-
-import htsjdk.variant.variantcontext.Allele;
-
-import java.util.AbstractList;
-import java.util.List;
-
-/**
- * Utils operations on {@link AlleleList} instances.
- *
- * @author Valentin Ruano-Rubio <valentin at broadinstitute.org>
- */
-public class AlleleListUtils {
-
-    @SuppressWarnings("unchecked")
-    private static final AlleleList EMPTY_LIST = new AlleleList() {
-        @Override
-        public int alleleCount() {
-            return 0;
-        }
-
-        @Override
-        public int alleleIndex(final Allele allele) {
-            return -1;
-        }
-
-        @Override
-        public Allele alleleAt(final int index) {
-            throw new IllegalArgumentException("allele index is out of range");
-        }
-    };
-
-    /**
-     * Checks whether two allele lists are in fact the same.
-     * @param first one list to compare.
-     * @param second another list to compare.
-     *
-     * @throws IllegalArgumentException if if either list is {@code null}.
-     *
-     * @return {@code true} iff both list are equal.
-     */
-    public static <A extends Allele> boolean equals(final AlleleList<A> first, final AlleleList<A> second) {
-        if (first == null || second == null)
-            throw new IllegalArgumentException("no null list allowed");
-        final int alleleCount = first.alleleCount();
-        if (alleleCount != second.alleleCount())
-            return false;
-
-        for (int i = 0; i < alleleCount; i++) {
-            final A firstSample = first.alleleAt(i);
-            if (firstSample == null)
-                throw new IllegalStateException("no null samples allowed in sample-lists: first list at " + i);
-            final A secondSample = second.alleleAt(i);
-            if (secondSample == null)
-                throw new IllegalArgumentException("no null samples allowed in sample-list: second list at " + i);
-            if (!firstSample.equals(secondSample))
-                return false;
-        }
-
-        return true;
-    }
-
-    /**
-     * Resolves the index of the reference allele in an allele-list.
-     *
-     * <p>
-     *     If there is no reference allele, it returns -1. If there is more than one reference allele,
-     *     it returns the first occurrence (lowest index).
-     * </p>
-     *
-     * @param list the search allele-list.
-     * @param <A> allele component type.
-     *
-     * @throws IllegalArgumentException if {@code list} is {@code null}.
-     *
-     * @return -1 if there is no reference allele, or a values in [0,{@code list.alleleCount()}).
-     */
-    public static <A extends Allele> int indexOfReference(final AlleleList<A> list) {
-        if (list == null)
-            throw new IllegalArgumentException("the input list cannot be null");
-        final int alleleCount = list.alleleCount();
-        for (int i = 0; i < alleleCount; i++)
-            if (list.alleleAt(i).isReference())
-                return i;
-        return -1;
-    }
-
-
-    /**
-     * Returns a {@link java.util.List} unmodifiable view of a allele-list
-     * @param list the sample-list to wrap.
-     *
-     * @throws IllegalArgumentException if {@code list} is {@code null}.
-     *
-     * @return never {@code null}.
-     */
-    public static <A extends Allele> List<A> asList(final AlleleList<A> list) {
-        if (list == null)
-            throw new IllegalArgumentException("the list cannot be null");
-        return new AsList(list);
-    }
-
-    /**
-     * Returns an unmodifiable empty allele-list.
-     * @param <A> the allele class.
-     * @return never {@code null}.
-     */
-    @SuppressWarnings("unchecked")
-    public static final <A extends Allele> AlleleList<A> emptyList() {
-        return EMPTY_LIST;
-    }
-
-    /**
-     * Simple list view of a sample-list.
-     */
-    private static class AsList<A extends Allele> extends AbstractList<A> {
-
-        private final AlleleList<A> list;
-
-        private AsList(final AlleleList<A> list) {
-            this.list = list;
-
-        }
-
-        @Override
-        public A get(int index) {
-            return list.alleleAt(index);
-        }
-
-        @Override
-        public int size() {
-            return list.alleleCount();
-        }
-    }
-
-
-    /**
-     * Returns a permutation between two allele lists.
-     * @param original the original allele list.
-     * @param target the target allele list.
-     * @param <A> the allele type.
-     *
-     * @throws IllegalArgumentException if {@code original} or {@code target} is {@code null}, or
-     * elements in {@code target} is not contained in {@code original}
-     *
-     * @return never {@code null}
-     */
-    public static <A extends Allele> AlleleListPermutation<A> permutation(final AlleleList<A> original, final AlleleList<A> target) {
-        if (equals(original,target))
-            return new NonPermutation<>(original);
-        else
-            return new ActualPermutation<>(original,target);
-    }
-
-    private static class NonPermutation<A extends Allele> implements AlleleListPermutation<A> {
-
-        private final AlleleList<A> list;
-
-        public NonPermutation(final AlleleList<A> original) {
-            list = original;
-        }
-
-        @Override
-        public boolean isPartial() {
-            return false;
-        }
-
-        @Override
-        public boolean isNonPermuted() {
-            return true;
-        }
-
-        @Override
-        public int toIndex(int fromIndex) {
-            return fromIndex;
-        }
-
-        @Override
-        public int fromIndex(int toIndex) {
-            return toIndex;
-        }
-
-        @Override
-        public int fromSize() {
-            return list.alleleCount();
-        }
-
-        @Override
-        public int toSize() {
-            return list.alleleCount();
-        }
-
-        @Override
-        public List<A> fromList() {
-            return asList(list);
-        }
-
-        @Override
-        public java.util.List<A> toList() {
-            return asList(list);
-        }
-
-
-        @Override
-        public int alleleCount() {
-            return list.alleleCount();
-        }
-
-        @Override
-        public int alleleIndex(final A allele) {
-            return list.alleleIndex(allele);
-        }
-
-        @Override
-        public A alleleAt(final int index) {
-            return list.alleleAt(index);
-        }
-    }
-
-    private static class ActualPermutation<A extends Allele> implements AlleleListPermutation<A> {
-
-        private final AlleleList<A> from;
-
-        private final AlleleList<A> to;
-
-        private final int[] fromIndex;
-
-        private final boolean nonPermuted;
-
-        private final boolean isPartial;
-
-        private ActualPermutation(final AlleleList<A> original, final AlleleList<A> target) {
-            this.from = original;
-            this.to = target;
-            final int toSize = target.alleleCount();
-            final int fromSize = original.alleleCount();
-            if (fromSize < toSize)
-                throw new IllegalArgumentException("target allele list is not a permutation of the original allele list");
-
-            fromIndex = new int[toSize];
-            boolean nonPermuted = fromSize == toSize;
-            this.isPartial = !nonPermuted;
-            for (int i = 0; i < toSize; i++) {
-                final int originalIndex = original.alleleIndex(target.alleleAt(i));
-                if (originalIndex < 0)
-                    throw new IllegalArgumentException("target allele list is not a permutation of the original allele list");
-                fromIndex[i] = originalIndex;
-                nonPermuted &= originalIndex == i;
-            }
-
-            this.nonPermuted = nonPermuted;
-        }
-
-        @Override
-        public boolean isPartial() {
-            return isPartial;
-        }
-
-        @Override
-        public boolean isNonPermuted() {
-            return nonPermuted;
-        }
-
-        @Override
-        public int toIndex(int fromIndex) {
-            return to.alleleIndex(from.alleleAt(fromIndex));
-        }
-
-        @Override
-        public int fromIndex(int toIndex) {
-            return fromIndex[toIndex];
-        }
-
-        @Override
-        public int fromSize() {
-            return from.alleleCount();
-        }
-
-        @Override
-        public int toSize() {
-            return to.alleleCount();
-        }
-
-        @Override
-        public List<A> fromList() {
-            return asList(from);
-        }
-
-        @Override
-        public List<A> toList() {
-            return asList(to);
-        }
-
-        @Override
-        public int alleleCount() {
-            return to.alleleCount();
-        }
-
-        @Override
-        public int alleleIndex(final A allele) {
-            return to.alleleIndex(allele);
-        }
-
-        @Override
-        public A alleleAt(final int index) {
-            return to.alleleAt(index);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/IndexedAlleleList.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/IndexedAlleleList.java
deleted file mode 100644
index 9238af7..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/IndexedAlleleList.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.tools.walkers.genotyper;
-
-import htsjdk.variant.variantcontext.Allele;
-import org.broadinstitute.gatk.utils.collections.IndexedSet;
-
-import java.util.Collection;
-
-/**
- * Allele list implementation using and indexed-set.
- *
- * @author Valentin Ruano-Rubio <valentin at broadinstitute.org>
- */
-public class IndexedAlleleList<A extends Allele> implements AlleleList<A> {
-
-    private final IndexedSet<A> alleles;
-
-    /**
-     * Constructs a new empty allele-list
-     */
-    public IndexedAlleleList() {
-        alleles = new IndexedSet<>();
-    }
-
-    /**
-     * Constructs a new allele-list from an array of alleles.
-     *
-     * <p>
-     *     Repeats in the input array will be ignored (keeping the first one). The order of alleles in the
-     *     resulting list is the same as in the natural traversal of the input collection.
-     *
-     * </p>
-     * @param alleles the original allele array
-     *
-     * @throws java.lang.IllegalArgumentException if {@code alleles} is {@code null} or contains {@code null}s.
-     */
-    public IndexedAlleleList(final A ... alleles) {
-        this.alleles = new IndexedSet<>(alleles);
-    }
-
-    /**
-     * Constructs a new allele-list from a collection of alleles.
-     *
-     * <p>
-     *     Repeats in the input collection will be ignored (keeping the first one). The order of alleles in the
-     *     resulting list is the same as in the natural traversal of the input collection.
-     *
-     * </p>
-     * @param alleles the original allele collection
-     *
-     * @throws java.lang.IllegalArgumentException if {@code alleles} is {@code null} or contains {@code null}s.
-     */
-    public IndexedAlleleList(final Collection<A> alleles) {
-        this.alleles = new IndexedSet<>(alleles);
-    }
-
-    @Override
-    public int alleleCount() {
-        return alleles.size();
-    }
-
-    @Override
-    public int alleleIndex(final A allele) {
-        return alleles.indexOf(allele);
-    }
-
-    @Override
-    public A alleleAt(final int index) {
-        return alleles.get(index);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/IndexedSampleList.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/IndexedSampleList.java
deleted file mode 100644
index 94022c8..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/IndexedSampleList.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.tools.walkers.genotyper;
-
-import org.broadinstitute.gatk.utils.collections.IndexedSet;
-
-import java.util.Collection;
-
-/**
- * Simple implementation of a sample-list using and indexed-set.
- *
- * @author Valentin Ruano-Rubio <valentin at broadinstitute.org>
- */
-public class IndexedSampleList implements SampleList {
-
-    private final IndexedSet<String> samples;
-
-    /**
-     * Constructs an empty sample-list.
-     */
-    public IndexedSampleList() {
-        samples = new IndexedSet<>(0);
-    }
-
-    /**
-     * Constructs a sample-list from a collection of samples.
-     *
-     * <p>
-     *     Repeats in the input collection are ignored (just the first occurrence is kept).
-     *     Sample names will be sorted based on the traversal order
-     *     of the original collection.
-     * </p>
-     *
-     * @param samples input sample collection.
-     *
-     * @throws IllegalArgumentException if {@code samples} is {@code null} or it contains {@code nulls}.
-     */
-    public IndexedSampleList(final Collection<String> samples) {
-        this.samples = new IndexedSet<>(samples);
-    }
-
-    /**
-     * Constructs a sample-list from an array of samples.
-     *
-     * <p>
-     *     Repeats in the input array are ignored (just the first occurrence is kept).
-     *     Sample names will be sorted based on the traversal order
-     *     of the original array.
-     * </p>
-     *
-     * @param samples input sample array.
-     *
-     * @throws IllegalArgumentException if {@code samples} is {@code null} or it contains {@code nulls}.
-     */
-    public IndexedSampleList(final String ... samples) {
-        this.samples = new IndexedSet<>(samples);
-    }
-
-    @Override
-    public int sampleCount() {
-        return samples.size();
-    }
-
-    @Override
-    public int sampleIndex(final String sample) {
-        return samples.indexOf(sample);
-    }
-
-    @Override
-    public String sampleAt(int sampleIndex) {
-        return samples.get(sampleIndex);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/SampleList.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/SampleList.java
deleted file mode 100644
index 29cb428..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/SampleList.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.tools.walkers.genotyper;
-
-/**
- * A indexed set of samples.
- *
- * <p>
- *     Implementing classes must guarantee that the sample list will remain <b>constant</b> through the life of the object.
- * </p>
- */
-public interface SampleList  {
-
-    public int sampleCount();
-
-    public int sampleIndex(final String sample);
-
-    public String sampleAt(final int sampleIndex);
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/SampleListUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/SampleListUtils.java
deleted file mode 100644
index 2071f5d..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/genotyper/SampleListUtils.java
+++ /dev/null
@@ -1,224 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.tools.walkers.genotyper;
-
-import java.util.*;
-
-/**
- * Some utility operations on sample lists.
- *
- * @author Valentin Ruano-Rubio <valentin at broadinstitute.org>
- */
-public class SampleListUtils {
-
-    private static final SampleList EMPTY_LIST = new SampleList() {
-
-        @Override
-        public int sampleCount() {
-            return 0;
-        }
-
-        @Override
-        public int sampleIndex(String sample) {
-            return -1;
-        }
-
-        @Override
-        public String sampleAt(final int sampleIndex) {
-            throw new IllegalArgumentException("index is out of valid range");
-        }
-    };
-
-    /**
-     * Empty list.
-     *
-     * @return never {@code null}
-     */
-    public static SampleList emptyList() {
-        return EMPTY_LIST;
-    }
-
-    /**
-     * Checks whether two sample lists are in fact the same.
-     * @param first one list to compare.
-     * @param second another list to compare.
-     *
-     * @throws IllegalArgumentException if if either list is {@code null}.
-     *
-     * @return {@code true} iff both list are equal.
-     */
-    public static boolean equals(final SampleList first, final SampleList second) {
-        if (first == null || second == null)
-            throw new IllegalArgumentException("no null list allowed");
-        final int sampleCount = first.sampleCount();
-        if (sampleCount != second.sampleCount())
-            return false;
-
-        for (int i = 0; i < sampleCount; i++) {
-            final String firstSample = first.sampleAt(i);
-            if (firstSample == null)
-                throw new IllegalStateException("no null samples allowed in sample-lists: first list at " + i);
-            final String secondSample = second.sampleAt(i);
-            if (secondSample == null)
-                throw new IllegalArgumentException("no null samples allowed in sample-list: second list at " + i);
-            if (!firstSample.equals(secondSample))
-                return false;
-        }
-        return true;
-    }
-
-    /**
-     * Returns a {@link List} unmodifiable view of a sample-list
-     * @param list the sample-list to wrap.
-     *
-     * @throws IllegalArgumentException if {@code list} is {@code null}.
-     *
-     * @return never {@code null}.
-     */
-    public static List<String> asList(final SampleList list) {
-        if (list == null)
-            throw new IllegalArgumentException("the list cannot be null");
-        return new AsList(list);
-    }
-
-    /**
-     * Returns a {@link Set} unmodifiable view of the sample-list
-     *
-     * @param list the sample-list to wrap.
-     *
-     * @throws IllegalArgumentException if {@code list} is {@code null}
-     */
-    public static Set<String> asSet(final SampleList list) {
-        if (list == null)
-            throw new IllegalArgumentException("the list cannot be null");
-        return new AsSet(list);
-    }
-
-    /**
-     * Creates a list with a single sample.
-     *
-     * @param sampleName the sample name.
-     * @return never {@code sampleName}
-     */
-    public static SampleList singletonList(final String sampleName) {
-        if (sampleName == null)
-            throw new IllegalArgumentException("the sample name cannot be null");
-        return new SampleList() {
-
-            @Override
-            public int sampleCount() {
-                return 1;
-            }
-
-            @Override
-            public int sampleIndex(final String sample) {
-                return sampleName.equals(sample) ? 0 : -1;
-            }
-
-            @Override
-            public String sampleAt(int sampleIndex) {
-                if (sampleIndex == 0)
-                    return sampleName;
-                throw new IllegalArgumentException("index is out of bounds");
-            }
-        };
-    }
-
-    /**
-     * Simple list view of a sample-list.
-     */
-    private static class AsList extends AbstractList<String> {
-
-        private final SampleList list;
-
-        private AsList(final SampleList list) {
-            this.list = list;
-
-        }
-
-        @Override
-        public String get(int index) {
-            return list.sampleAt(index);
-        }
-
-        @Override
-        public int size() {
-            return list.sampleCount();
-        }
-    }
-
-    /**
-     * Simple set view of a sample-list
-     */
-    private static class AsSet extends AbstractSet<String> {
-
-        private final SampleList list;
-
-        private AsSet(final SampleList list) {
-            this.list = list;
-
-        }
-
-        @Override
-        public Iterator<String> iterator() {
-            return new Iterator<String>() {
-                private int index = 0;
-
-                @Override
-                public boolean hasNext() {
-                    return index < list.sampleCount();
-                }
-
-                @Override
-                public String next() {
-                    if (index >= list.sampleCount())
-                        throw new NoSuchElementException("iterating beyond sample list end");
-                    return list.sampleAt(index++);
-                }
-
-                @Override
-                public void remove() {
-                    throw new UnsupportedOperationException("unsupported operation exception");
-                }
-            };
-        }
-
-        @Override
-        public int size() {
-            return list.sampleCount();
-        }
-
-        @Override
-        public boolean contains(final Object obj) {
-            if (obj == null)
-                return false;
-            else if (obj instanceof String)
-                return list.sampleIndex(((String)obj)) >= 0;
-            else
-                return false;
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/HCMappingQualityFilter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/HCMappingQualityFilter.java
index ce6fe06..96586c0 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/HCMappingQualityFilter.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/HCMappingQualityFilter.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -32,7 +32,27 @@ import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
 import org.broadinstitute.gatk.engine.filters.ReadFilter;
 
 /**
- * Filter out reads with low mapping qualities.
+ * Filter out reads with low mapping qualities for HaplotypeCaller
+ *
+ * <p>This filter is applied by default for HaplotypeCaller and is designed to ensure that only reads that are likely
+ * to be informative will be used in the reassembly process. It performs the same basic function as the regular
+ * MappingQualityFilter, but it is used at specific points in the operation of HC where it is helpful
+ * to be able to apply a different quality threshold from the general case.</p>
+ *
+ * <h3>Usage example</h3>
+ *
+ * <h4>Set the HC-specific mapping quality filter to filter out reads with MAPQ < 10</h4>
+ * <pre>
+ *     java -jar GenomeAnalysisTk.jar \
+ *         -T HaplotypeCaller \
+ *         -R reference.fasta \
+ *         -I input.bam \
+ *         -o output.vcf \
+ *         -mmq 10
+ * </pre>
+ *
+ * <p>Note that the HCMappingQuality filter itself does not need to be specified in the command line because it is set
+ * automatically for HaplotypeCaller.</p>
  *
  * @author mdepristo
  */
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/help/WalkerDoclet.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/help/WalkerDoclet.java
new file mode 100644
index 0000000..ac15e74
--- /dev/null
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/help/WalkerDoclet.java
@@ -0,0 +1,64 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.tools.walkers.help;
+
+import com.sun.javadoc.RootDoc;
+import org.broadinstitute.gatk.engine.CommandLineGATK;
+import org.broadinstitute.gatk.tools.walkers.qc.DocumentationTest;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.help.DocumentedGATKFeatureHandler;
+import org.broadinstitute.gatk.utils.help.GATKDoclet;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * GATKDocs for walkers.
+ * Specifically, allows testing of documentation.
+ */
+public class WalkerDoclet extends GATKDoclet {
+    /**
+     * Any class that's in this list will be included in the documentation
+     * when the -test argument is provided.  Useful for debugging.
+     */
+    private static final List<Class<?>> testOnlyKeepers = Arrays.asList(
+            DocumentationTest.class, CommandLineGATK.class, UserException.class);
+
+    @Override
+    protected List<Class<?>> getTestOnlyKeepers() {
+        return testOnlyKeepers;
+    }
+
+    @Override
+    protected DocumentedGATKFeatureHandler createDocumentedGATKFeatureHandler() {
+        return new WalkerDocumentationHandler();
+    }
+
+    public static boolean start(RootDoc rootDoc) throws IOException {
+        return new WalkerDoclet().startProcessDocs(rootDoc);
+    }
+}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/help/WalkerDocumentationHandler.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/help/WalkerDocumentationHandler.java
new file mode 100644
index 0000000..8e3881c
--- /dev/null
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/help/WalkerDocumentationHandler.java
@@ -0,0 +1,375 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.tools.walkers.help;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang.StringEscapeUtils;
+import org.broadinstitute.gatk.engine.CommandLineGATK;
+import org.broadinstitute.gatk.engine.walkers.*;
+import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.GenotypeAnnotation;
+import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.InfoFieldAnnotation;
+import org.broadinstitute.gatk.utils.commandline.CommandLineProgram;
+import org.broadinstitute.gatk.utils.help.GATKDocUtils;
+import org.broadinstitute.gatk.utils.help.GenericDocumentationHandler;
+import org.broadinstitute.gatk.utils.help.HelpConstants;
+
+import java.lang.annotation.Annotation;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.util.*;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class WalkerDocumentationHandler extends GenericDocumentationHandler {
+    private final static String CMDLINE_GATK_URL = HelpConstants.GATK_DOCS_URL + "org_broadinstitute_gatk_engine_CommandLineGATK.php";
+
+    @Override
+    protected CommandLineProgram createCommandLineProgram() {
+        return new CommandLineGATK();
+    }
+
+    /**
+     * Umbrella function that groups the collection of values for specific annotations applied to an
+     * instance of class c. Lists of collected values are added directly to the "toProcess" object.
+     * Requires being able to instantiate the class.
+     *
+     * @param classToProcess the object to instantiate and query for the annotation
+     * @param root the root of the document handler, to which we'll store collected annotations
+     */
+    @Override
+    protected void getClazzAnnotations(Class classToProcess, Map<String, Object> root) {
+        //
+        // attempt to instantiate the class
+        final Object instance = makeInstanceIfPossible(classToProcess);
+        if (instance != null) {
+            final Class myClass = instance.getClass();
+            // Get parallelism options
+            final HashSet<HashMap<String, Object>> parallelOptions = getParallelism(myClass, new HashSet<HashMap<String, Object>>());
+            root.put("parallel", parallelOptions);
+            // Get annotation info (what type of annotation, standard etc.)
+            final HashSet<String> annotInfo = getAnnotInfo(myClass, new HashSet<String>());
+            root.put("annotinfo", StringUtils.join(annotInfo, ", "));
+            // Get annotation field (whether it goes in INFO or FORMAT)
+            root.put("annotfield", getAnnotField(myClass));
+            // Get walker type if applicable
+            root.put("walkertype", getWalkerType(myClass));
+            // Get partition type if applicable
+            root.put("partitiontype", getPartitionType(myClass));
+            // Get read filter annotations (ReadFilters) if applicable
+            final HashSet<HashMap<String, Object>> bucket= getReadFilters(myClass, new HashSet<HashMap<String, Object>>());
+            root.put("readfilters", bucket);
+            // Get default downsampling settings
+            final HashMap<String, Object> dsSettings = getDownSamplingSettings(myClass, new HashMap<String, Object>());
+            root.put("downsampling", dsSettings);
+            // Get reference window size settings
+            final HashMap<String, Object> refwindow = getRefWindow(myClass, new HashMap<String, Object>());
+            root.put("refwindow", refwindow);
+            // Get ActiveRegion size settings
+            final HashMap<String, Object> activeRegion = getActiveRegion(myClass, new HashMap<String, Object>());
+            root.put("activeregion", activeRegion);
+            // Get annotation header line description if applicable
+            final Object annotDescriptLines = getAnnotDescript(instance, myClass);
+            root.put("annotdescript", annotDescriptLines);
+
+            // anything else?
+        } else {
+            // put empty items to avoid blowups
+            root.put("parallel", new HashSet<String>());
+            root.put("annotinfo", "");
+            root.put("annotfield", "");
+            root.put("walkertype", "");
+            root.put("partitiontype", "");
+            root.put("readfilters", new HashSet<HashMap<String, Object>>());
+            root.put("downsampling", new HashMap<String, Object>());
+            root.put("refwindow", new HashMap<String, Object>());
+            root.put("activeregion", new HashMap<String, Object>());
+            root.put("annotdescript", new ArrayList<HashMap<String, Object>>());
+        }
+    }
+
+    /**
+     * Utility function that looks up annotation descriptions if applicable.
+     *
+     * @param myClass the class to query
+     * @return a hash map of descriptions, otherwise an empty map
+     */
+    private Object getAnnotDescript(Object instance, Class myClass) {
+        //
+        // Check if the class has the method we want
+        for (Method classMethod : myClass.getMethods()) {
+            if (classMethod.toString().contains("getDescriptions") && classMethod.toString().contains("annotator")) {
+                try {
+                    String headerLine = (classMethod.invoke(instance)).toString();
+                    Pattern p = Pattern.compile("(INFO=<.*?>|FORMAT=<.*?>)");
+                    Matcher m = p.matcher(headerLine);
+                    List<String> annotLines = new ArrayList<>();
+                    while (m.find()) {
+                        annotLines.add(StringEscapeUtils.escapeHtml(m.group()));
+                        System.out.println("found "+m.group());
+                    }
+                    return annotLines;
+                } catch (IllegalArgumentException e) {
+                } catch (IllegalAccessException e) {
+                } catch (InvocationTargetException e) {
+                }
+            }
+        }
+        return null;
+    }
+
+    /**
+     * Utility function that checks which parallelism options are available for an instance of class c.
+     *
+     * @param myClass the class to query for the interfaces
+     * @param parallelOptions an empty HashSet in which to collect the info
+     * @return a hash set of parallelism options, otherwise an empty set
+     */
+    private HashSet<HashMap<String, Object>> getParallelism(Class myClass, HashSet<HashMap<String, Object>> parallelOptions) {
+        //
+        // Retrieve interfaces
+        Class[] implementedInterfaces = myClass.getInterfaces();
+        for (Class intfClass : implementedInterfaces) {
+            final HashMap<String, Object> nugget = new HashMap<String, Object>();
+            if (intfClass.getSimpleName().equals("TreeReducible")) {
+                nugget.put("name", intfClass.getSimpleName());
+                nugget.put("arg", HelpConstants.ARG_TREEREDUCIBLE);
+                nugget.put("link", CMDLINE_GATK_URL + "#" + HelpConstants.ARG_TREEREDUCIBLE);
+            } else if (intfClass.getSimpleName().equals("NanoSchedulable")) {
+                nugget.put("name", intfClass.getSimpleName());
+                nugget.put("arg", HelpConstants.ARG_NANOSCHEDULABLE);
+                nugget.put("link", CMDLINE_GATK_URL + "#" + HelpConstants.ARG_NANOSCHEDULABLE);
+            } else {
+                continue;
+            }
+            parallelOptions.add(nugget);
+        }
+        // Look up superclasses recursively
+        final Class mySuperClass = myClass.getSuperclass();
+        if (mySuperClass.getSimpleName().equals("Object")) {
+            return parallelOptions;
+        }
+        return getParallelism(mySuperClass, parallelOptions);
+    }
+
+    /**
+     * Utility function that looks up whether the annotation goes in INFO or FORMAT field.
+     *
+     * @param myClass the class to query for the interfaces
+     * @return a String specifying the annotation field
+     */
+    private final String getAnnotField(Class myClass) {
+        //
+        // Look up superclasses recursively until we find either
+        // GenotypeAnnotation or InfoFieldAnnotation
+        final Class mySuperClass = myClass.getSuperclass();
+        if (mySuperClass == InfoFieldAnnotation.class) {
+            return "INFO (variant-level)";
+        } else if (mySuperClass == GenotypeAnnotation.class) {
+            return "FORMAT (sample genotype-level)";
+        } else if (mySuperClass.getSimpleName().equals("Object")) {
+            return "";
+        }
+        return getAnnotField(mySuperClass);
+    }
+
+    /**
+     * Utility function that determines the annotation type for an instance of class c.
+     *
+     * @param myClass the class to query for the interfaces
+     * @param annotInfo an empty HashSet in which to collect the info
+     * @return a hash set of the annotation types, otherwise an empty set
+     */
+    private HashSet<String> getAnnotInfo(Class myClass, HashSet<String> annotInfo) {
+        //
+        // Retrieve interfaces
+        Class[] implementedInterfaces = myClass.getInterfaces();
+        for (Class intfClass : implementedInterfaces) {
+            if (intfClass.getName().contains("Annotation")) {
+                annotInfo.add(intfClass.getSimpleName());
+            }
+        }
+        // Look up superclasses recursively
+        final Class mySuperClass = myClass.getSuperclass();
+        if (mySuperClass.getSimpleName().equals("Object")) {
+            return annotInfo;
+        }
+        return getAnnotInfo(mySuperClass, annotInfo);
+    }
+
+    /**
+     * Utility function that determines the default downsampling settings for an instance of class c.
+     *
+     * @param myClass the class to query for the settings
+     * @param dsSettings an empty HashMap in which to collect the info
+     * @return a hash set of the downsampling settings, otherwise an empty set
+     */
+    private HashMap<String, Object> getDownSamplingSettings(Class myClass, HashMap<String, Object> dsSettings) {
+        //
+        // Check for RODWalker first
+        if (!checkForRODWalker(myClass).equals("yes")) {
+            //
+            // Retrieve annotation
+            if (myClass.isAnnotationPresent(Downsample.class)) {
+                final Annotation thisAnnotation = myClass.getAnnotation(Downsample.class);
+                if(thisAnnotation instanceof Downsample) {
+                    final Downsample dsAnnotation = (Downsample) thisAnnotation;
+                    dsSettings.put("by", dsAnnotation.by().toString());
+                    dsSettings.put("to_cov", dsAnnotation.toCoverage());
+                }
+            }
+        }
+        return dsSettings;
+    }
+
+    /**
+     * Utility function that determines the reference window size for an instance of class c.
+     *
+     * @param myClass the class to query for the settings
+     * @param refWindow an empty HashMap in which to collect the info
+     * @return a HashMap of the window start and stop, otherwise an empty HashMap
+     */
+    private HashMap<String, Object> getRefWindow(Class myClass, HashMap<String, Object> refWindow) {
+        //
+        // Retrieve annotation
+        if (myClass.isAnnotationPresent(Reference.class)) {
+            final Annotation thisAnnotation = myClass.getAnnotation(Reference.class);
+            if(thisAnnotation instanceof Reference) {
+                final Reference refAnnotation = (Reference) thisAnnotation;
+                refWindow.put("start", refAnnotation.window().start());
+                refWindow.put("stop", refAnnotation.window().stop());
+            }
+        }
+        return refWindow;
+    }
+
+    /**
+     * Utility function that determines the ActiveRegion settings for an instance of class c.
+     *
+     * @param myClass the class to query for the settings
+     * @param activeRegion an empty HashMap in which to collect the info
+     * @return a HashMap of the ActiveRegion parameters, otherwise an empty HashMap
+     */
+    private HashMap<String, Object> getActiveRegion(Class myClass, HashMap<String, Object> activeRegion) {
+        //
+        // Retrieve annotation
+        if (myClass.isAnnotationPresent(ActiveRegionTraversalParameters.class)) {
+            final Annotation thisAnnotation = myClass.getAnnotation(ActiveRegionTraversalParameters.class);
+            if(thisAnnotation instanceof ActiveRegionTraversalParameters) {
+                final ActiveRegionTraversalParameters arAnnotation = (ActiveRegionTraversalParameters) thisAnnotation;
+                activeRegion.put("ext", arAnnotation.extension());
+                activeRegion.put("max", arAnnotation.maxRegion());
+                activeRegion.put("min", arAnnotation.minRegion());
+            }
+        }
+        return activeRegion;
+    }
+
+    /**
+     * Utility function that determines the partition type of an instance of class c.
+     *
+     * @param myClass the class to query for the annotation
+     * @return the partition type if applicable, otherwise an empty string
+     */
+    private String getPartitionType(Class myClass) {
+        //
+        // Retrieve annotation
+        if (myClass.isAnnotationPresent(PartitionBy.class)) {
+            final Annotation thisAnnotation = myClass.getAnnotation(PartitionBy.class);
+            if(thisAnnotation instanceof PartitionBy) {
+                final PartitionBy partAnnotation = (PartitionBy) thisAnnotation;
+                return partAnnotation.value().toString();
+            }
+        }
+        return "";
+    }
+
+    /**
+     * Utility function that determines the type of walker subclassed by an instance of class c.
+     *
+     * @param myClass the class to query for the annotation
+     * @return the type of walker if applicable, otherwise an empty string
+     */
+    private String getWalkerType(Class myClass) {
+        //
+        // Look up superclasses recursively until we find either Walker or Object
+        final Class mySuperClass = myClass.getSuperclass();
+        if (mySuperClass.getSimpleName().equals("Walker")) {
+            return myClass.getSimpleName();
+        } else if (mySuperClass.getSimpleName().equals("Object")) {
+            return "";
+        }
+        return getWalkerType(mySuperClass);
+    }
+
+    /**
+     * Utility function that checks whether an instance of class c is a subclass of RODWalker.
+     *
+     * @param myClass the class to query for the annotation
+     * @return "yes" or "no" (can't use a Boolean because of the recursion)
+     */
+    private String checkForRODWalker(Class myClass) {
+        //
+        // Look up superclasses recursively until we find either RODWalker or (Walker or Object)
+        final Class mySuperClass = myClass.getSuperclass();
+        if (mySuperClass.getSimpleName().equals("RodWalker")) {
+            return "yes";
+        } else if (mySuperClass.getSimpleName().equals("Object") || mySuperClass.getSimpleName().equals("Walker")) {
+            return "";
+        }
+        return checkForRODWalker(mySuperClass);
+    }
+
+    /**
+     * Utility function that finds the values of ReadFilters annotation applied to an instance of class c.
+     *
+     * @param myClass the class to query for the annotation
+     * @param bucket a container in which we store the annotations collected
+     * @return a hash set of values, otherwise an empty set
+     */
+    private HashSet<HashMap<String, Object>> getReadFilters(Class myClass, HashSet<HashMap<String, Object>> bucket) {
+        //
+        // Retrieve annotation
+        if (myClass.isAnnotationPresent(ReadFilters.class)) {
+            final Annotation thisAnnotation = myClass.getAnnotation(ReadFilters.class);
+            if(thisAnnotation instanceof ReadFilters) {
+                final ReadFilters rfAnnotation = (ReadFilters) thisAnnotation;
+                for (Class<?> filter : rfAnnotation.value()) {
+                    // make hashmap of simplename and url
+                    final HashMap<String, Object> nugget = new HashMap<String, Object>();
+                    nugget.put("name", filter.getSimpleName());
+                    nugget.put("filename", GATKDocUtils.phpFilenameForClass(filter));
+                    bucket.add(nugget);
+                }
+            }
+        }
+        // Look up superclasses recursively
+        final Class mySuperClass = myClass.getSuperclass();
+        if (mySuperClass.getSimpleName().equals("Object")) {
+            return bucket;
+        }
+        return getReadFilters(mySuperClass, bucket);
+    }
+}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/package-info.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/package-info.java
index 4201ef0..53c155f 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/package-info.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/package-info.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CheckPileup.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CheckPileup.java
index a3848dc..d38543c 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CheckPileup.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CheckPileup.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -30,9 +30,9 @@ import org.broadinstitute.gatk.utils.commandline.Input;
 import org.broadinstitute.gatk.utils.commandline.Output;
 import org.broadinstitute.gatk.utils.commandline.RodBinding;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.DataSource;
 import org.broadinstitute.gatk.engine.walkers.LocusWalker;
 import org.broadinstitute.gatk.engine.walkers.Requires;
@@ -118,17 +118,17 @@ import java.util.Arrays;
  * </p>
  *
  * <h3>Input</h3>
- * <p>A BAM file conatining your aligned sequence data and a pileup file generated by Samtools covering the region you
+ * <p>A BAM file containing your aligned sequence data and a pileup file generated by Samtools covering the region you
  * want to examine.</p>
  *
  * <h3>Output</h3>
  * <p>A text file listing mismatches between the input pileup and the GATK's internal pileup. If there are no mismatches, the output file is empty.</p>
  *
- * <h3>Example</h3>
+ * <h3>Usage example</h3>
  * <pre>
  * java -jar GenomeAnalysisTK.jar \
  *   -T CheckPileup \
- *   -R ref.fasta \
+ *   -R reference.fasta \
  *   -I your_data.bam \
  *   --pileup:SAMPileup pileup_file.txt \
  *   -L chr1:257-275 \
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountBases.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountBases.java
index 1500ce3..e0d2a3c 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountBases.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountBases.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -26,8 +26,8 @@
 package org.broadinstitute.gatk.tools.walkers.qc;
 
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.DataSource;
 import org.broadinstitute.gatk.engine.walkers.ReadWalker;
 import org.broadinstitute.gatk.engine.walkers.Requires;
@@ -36,7 +36,7 @@ import org.broadinstitute.gatk.utils.help.HelpConstants;
 import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
 
 /**
- * Walks over the input data set, calculating the number of bases seen for diagnostic purposes.
+ * Count the number of bases in a set of reads
  *
  * <h3>Input</h3>
  * <p>
@@ -45,13 +45,14 @@ import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
  *
  * <h3>Output</h3>
  * <p>
- * Number of bases seen.
+ * Number of bases seen. If an output file name is provided, then the result will be written to that file.
+ * Otherwise it will be sent to standard console output.
  * </p>
  *
- * <h3>Examples</h3>
+ * <h3>Usage example</h3>
  * <pre>
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
+ * java -jar GenomeAnalysisTK.jar \
+ *   -R reference.fasta \
  *   -T CountBases \
  *   -I input.bam \
  *   [-L input.intervals]
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountIntervals.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountIntervals.java
index 221c4a9..21f0427 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountIntervals.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountIntervals.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -31,9 +31,9 @@ import org.broadinstitute.gatk.utils.commandline.Input;
 import org.broadinstitute.gatk.utils.commandline.Output;
 import org.broadinstitute.gatk.utils.commandline.RodBinding;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.RefWalker;
 import org.broadinstitute.gatk.utils.GenomeLoc;
 import org.broadinstitute.gatk.utils.collections.Pair;
@@ -45,7 +45,7 @@ import java.util.Collections;
 import java.util.List;
 
 /**
- * Count contiguous regions in an interval list.
+ * Count contiguous regions in an interval list
  *
  * <p>When the GATK reads in intervals from an intervals list, any intervals that overlap each other get merged into
  * a single interval spanning the original ones. For example, if you have the following intervals:
@@ -63,7 +63,7 @@ import java.util.List;
  *
  * <h3>Input</h3>
  * <p>
- * One or more rod files containing intervals to check.
+ * One or more ROD files containing intervals to check.
  * </p>
  *
  * <h3>Output</h3>
@@ -73,12 +73,12 @@ import java.util.List;
  *
  * You can use the -numOverlaps argument to find out how many cases you have of a specific number of overlaps.
  *
- * <h3>Example</h3>
+ * <h3>Usage example</h3>
  * <pre>
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
+ * java -jar GenomeAnalysisTK.jar \
  *   -T CountIntervals \
- *   -R ref.fasta \
- *   -0 output.txt \
+ *   -R reference.fasta \
+ *   -o output.txt \
  *   -check intervals.list
  * </pre>
  */
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountLoci.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountLoci.java
index 51c1617..004906c 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountLoci.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountLoci.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -27,9 +27,9 @@ package org.broadinstitute.gatk.tools.walkers.qc;
 
 import org.broadinstitute.gatk.utils.commandline.Output;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.LocusWalker;
 import org.broadinstitute.gatk.engine.walkers.NanoSchedulable;
 import org.broadinstitute.gatk.engine.walkers.TreeReducible;
@@ -39,10 +39,10 @@ import org.broadinstitute.gatk.utils.help.HelpConstants;
 import java.io.PrintStream;
 
 /**
- * Walks over the input data set, calculating the total number of covered loci for diagnostic purposes.
+ * Count the total number of covered loci
  *
  * <p>
- * This is the simplest example of a locus walker.
+ * This tool counts the number of loci (positions in the reference) that are covered by sequence data.
  * </p>
  *
  * <h3>Input</h3>
@@ -56,11 +56,11 @@ import java.io.PrintStream;
  * Otherwise it will be sent to standard console output.
  * </p>
  *
- * <h3>Examples</h3>
+ * <h3>Usage example</h3>
  * <pre>
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
+ * java -jar GenomeAnalysisTK.jar \
  *   -T CountLoci \
- *   -R ref.fasta \
+ *   -R reference.fasta \
  *   -I input.bam \
  *   -o output.txt \
  *   [-L input.intervals]
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountMales.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountMales.java
index d665e61..a829141 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountMales.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountMales.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -27,8 +27,8 @@ package org.broadinstitute.gatk.tools.walkers.qc;
 
 import org.broadinstitute.gatk.utils.commandline.Output;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.samples.Gender;
 import org.broadinstitute.gatk.engine.samples.Sample;
 import org.broadinstitute.gatk.engine.walkers.DataSource;
@@ -41,7 +41,9 @@ import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
 import java.io.PrintStream;
 
 /**
- * Walks over the input data set, calculating the number of reads seen from male samples for diagnostic purposes.
+ * Count the number of reads seen from male samples
+ *
+ * <p>This tool counts the number of sequence reads seen from samples that are male according to the sample metadata.</p>
  *
  * <h3>Input</h3>
  * <p>
@@ -50,14 +52,15 @@ import java.io.PrintStream;
  *
  * <h3>Output</h3>
  * <p>
- * Number of reads seen from male samples.
+ * Number of reads seen from male samples. If an output file name is provided, then the result will be written to that file.
+ * Otherwise it will be sent to standard console output.
  * </p>
  *
- * <h3>Examples</h3>
+ * <h3>Usage example</h3>
  * <pre>
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
+ * java -jar GenomeAnalysisTK.jar \
  *   -T CountMales \
- *   -R ref.fasta \
+ *   -R reference.fasta \
  *   -I samples.bam \
  *   -o output.txt
  * </pre>
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountRODs.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountRODs.java
index e068ff7..78f194f 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountRODs.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountRODs.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -33,10 +33,10 @@ import org.broadinstitute.gatk.utils.commandline.Input;
 import org.broadinstitute.gatk.utils.commandline.Output;
 import org.broadinstitute.gatk.utils.commandline.RodBinding;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.engine.refdata.utils.RODRecordList;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.refdata.utils.RODRecordList;
 import org.broadinstitute.gatk.engine.walkers.NanoSchedulable;
 import org.broadinstitute.gatk.engine.walkers.RodWalker;
 import org.broadinstitute.gatk.engine.walkers.TreeReducible;
@@ -51,9 +51,9 @@ import java.io.PrintStream;
 import java.util.*;
 
 /**
- * Prints out counts of the number of reference ordered data objects encountered.
+ * Count the number of ROD objects encountered
  *
- * <p>CountRods is a RODWalker, and so traverses the data by ROD. For example if the ROD passed to it is a VCF file,
+ * <p>CountRods is a RODWalker, and so traverses the data by ROD (reference ordered data). For example if the ROD passed to it is a VCF file,
  * it will count the variants in the file.</p>
  *
  * <p>Note that this tool is different from CountRodsByRef which is a RefWalker, and so traverses the data by
@@ -66,19 +66,19 @@ import java.util.*;
  *
  * <h3>Input</h3>
  * <p>
- * One or more rod files.
+ * One or more ROD files.
  * </p>
  *
  * <h3>Output</h3>
  * <p>
- * Number of rods seen.
+ * Number of RODs seen.
  * </p>
  *
- * <h3>Example</h3>
+ * <h3>Usage example</h3>
  * <pre>
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
+ * java -jar GenomeAnalysisTK.jar \
  *   -T CountRODs \
- *   -R ref.fasta \
+ *   -R reference.fasta \
  *   -o output.txt \
  *   --rod input.vcf
  * </pre>
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountRODsByRef.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountRODsByRef.java
index 40471b5..8934fce 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountRODsByRef.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountRODsByRef.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -30,9 +30,9 @@ import org.broadinstitute.gatk.utils.commandline.Argument;
 import org.broadinstitute.gatk.utils.commandline.Input;
 import org.broadinstitute.gatk.utils.commandline.RodBinding;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.RefWalker;
 import org.broadinstitute.gatk.utils.collections.ExpandingArrayList;
 import org.broadinstitute.gatk.utils.collections.Pair;
@@ -43,7 +43,7 @@ import java.util.Collections;
 import java.util.List;
 
 /**
- * Prints out counts of the number of reference ordered data objects encountered along the reference.
+ * Count the number of ROD objects encountered along the reference
  *
  * <p>CountRodsByRef is a RefWalker, and so traverses the data by position along the reference. It counts ROD
  * elements (such as, but not limited to, variants) found at each position or within specific intervals if you use
@@ -58,19 +58,19 @@ import java.util.List;
  *
  * <h3>Input</h3>
  * <p>
- * One or more rod files.
+ * One or more ROD files.
  * </p>
  *
  * <h3>Output</h3>
  * <p>
- * Number of rods seen.
+ * Number of RODs seen.
  * </p>
  *
- * <h3>Examples</h3>
+ * <h3>Usage example</h3>
  * <pre>
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
+ * java -jar GenomeAnalysisTK.jar \
  *   -T CountRODsByRef \
- *   -R ref.fasta \
+ *   -R reference.fasta \
  *   -o output.txt \
  *   --rod input.vcf
  * </pre>
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountReadEvents.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountReadEvents.java
index 998448e..20d60a1 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountReadEvents.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountReadEvents.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -28,9 +28,9 @@ package org.broadinstitute.gatk.tools.walkers.qc;
 import htsjdk.samtools.CigarOperator;
 import org.broadinstitute.gatk.utils.commandline.Output;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.engine.report.GATKReport;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.report.GATKReport;
 import org.broadinstitute.gatk.engine.walkers.DataSource;
 import org.broadinstitute.gatk.engine.walkers.ReadWalker;
 import org.broadinstitute.gatk.engine.walkers.Requires;
@@ -45,7 +45,9 @@ import java.util.HashMap;
 import java.util.Map;
 
 /**
- * Walks over the input data set, counting the number of read events (from the CIGAR operator)
+ * Count the number of read events
+ *
+ * <p>This tool counts the number of "events" (I, D, M etc) encountered in the CIGAR strings of the sequence reads.</p>
  *
  * <h3>Input</h3>
  * <p>
@@ -55,12 +57,13 @@ import java.util.Map;
  * <h3>Output</h3>
  * <p>
  * Number of read events for each category, formatted as a GATKReport table.
+ * </p>
  *
- * <h3>Examples</h3>
+ * <h3>Usage example</h3>
  * <pre>
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
+ * java -jar GenomeAnalysisTK.jar \
  *   -T CountReadEvents \
- *   -R ref.fasta \
+ *   -R reference.fasta \
  *   -I input.bam \
  *   -o output.grp \
  *   [-L input.intervals]
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountReads.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountReads.java
index 33d22ca..17aa5bc 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountReads.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountReads.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -26,8 +26,8 @@
 package org.broadinstitute.gatk.tools.walkers.qc;
 
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.DataSource;
 import org.broadinstitute.gatk.engine.walkers.NanoSchedulable;
 import org.broadinstitute.gatk.engine.walkers.ReadWalker;
@@ -37,11 +37,12 @@ import org.broadinstitute.gatk.utils.help.HelpConstants;
 import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
 
 /**
- * Walks over the input data set, calculating the number of reads seen for diagnostic purposes.
+ * Count the number of reads
  *
  * <p>
- * Can also count the number of reads matching a given criterion using read filters (see the
- * --read-filter command line argument).  Simplest example of a read-backed analysis.
+ * This is especially useful in combination with read filters (see the --read-filter command line argument) which
+ * allow you to count reads matching specific criteria (e.g. read group tags or quality parameters).
+ * </p>
  *
  *
  * <h3>Input</h3>
@@ -51,13 +52,13 @@ import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
  *
  * <h3>Output</h3>
  * <p>
- * Number of reads seen.
+ * Number of reads seen. This is output to the terminal/stdout.
  * </p>
  *
- * <h3>Examples</h3>
+ * <h3>Usage example</h3>
  * <pre>
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
+ * java -jar GenomeAnalysisTK.jar \
+ *   -R reference.fasta \
  *   -T CountReads \
  *   -I input.bam \
  *   [-L input.intervals]
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountTerminusEvent.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountTerminusEvent.java
index 90a131c..942e7c3 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountTerminusEvent.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/CountTerminusEvent.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -29,8 +29,8 @@ import htsjdk.samtools.CigarElement;
 import htsjdk.samtools.CigarOperator;
 import org.broadinstitute.gatk.utils.commandline.Output;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.DataSource;
 import org.broadinstitute.gatk.engine.walkers.ReadWalker;
 import org.broadinstitute.gatk.engine.walkers.Requires;
@@ -44,7 +44,9 @@ import java.io.PrintStream;
 import java.util.List;
 
 /**
- * Walks over the input data set, counting the number of reads ending in insertions/deletions or soft-clips
+ * Count the number of reads ending in insertions, deletions or soft-clips
+ *
+ * <p>This tool reports the number of reads where the end bases do not map to the reference sequence.</p>
  *
  * <h3>Input</h3>
  * <p>
@@ -56,13 +58,13 @@ import java.util.List;
  * Number of reads ending in each category.
  * </p>
  *
- * <h3>Examples</h3>
+ * <h3>Usage example</h3>
  * <pre>
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
+ * java -jar GenomeAnalysisTK.jar \
  *   -T CountTerminusEvent \
- *   -o output.txt \
+ *   -R reference.fasta \
  *   -I input.bam \
+ *   -o output.txt \
  *   [-L input.intervals]
  * </pre>
  */
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/DocumentationTest.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/DocumentationTest.java
index b5a1e74..579f43f 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/DocumentationTest.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/DocumentationTest.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -29,9 +29,9 @@ import htsjdk.tribble.Feature;
 import org.broadinstitute.gatk.utils.commandline.*;
 import org.broadinstitute.gatk.engine.arguments.DbsnpArgumentCollection;
 import org.broadinstitute.gatk.engine.arguments.StandardVariantContextInputArgumentCollection;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.RodWalker;
 import htsjdk.variant.variantcontext.writer.VariantContextWriter;
 import htsjdk.variant.variantcontext.VariantContext;
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/ErrorThrowing.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/ErrorThrowing.java
index 6e872e3..030c964 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/ErrorThrowing.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/ErrorThrowing.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,17 +25,16 @@
 
 package org.broadinstitute.gatk.tools.walkers.qc;
 
+import org.broadinstitute.gatk.engine.walkers.FailMethod;
 import org.broadinstitute.gatk.utils.commandline.Argument;
 import org.broadinstitute.gatk.utils.commandline.Input;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.NanoSchedulable;
 import org.broadinstitute.gatk.engine.walkers.RefWalker;
 import org.broadinstitute.gatk.engine.walkers.TreeReducible;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
 import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
 import org.broadinstitute.gatk.utils.help.HelpConstants;
 
@@ -50,12 +49,6 @@ public class ErrorThrowing extends RefWalker<Integer,Integer> implements TreeRed
     @Argument(fullName = "failMethod", shortName = "fail", doc = "Determines which method to fail in", required = false)
     public FailMethod failMethod = FailMethod.MAP;
 
-    public enum FailMethod {
-          MAP,
-          REDUCE,
-          TREE_REDUCE
-    }
-
     //
     // Template code to allow us to build the walker, doesn't actually do anything
     //
@@ -65,7 +58,7 @@ public class ErrorThrowing extends RefWalker<Integer,Integer> implements TreeRed
             return null;
 
         if ( failMethod == FailMethod.MAP )
-            fail();
+            FailMethod.fail(exceptionToThrow);
 
         return 0;
     }
@@ -78,33 +71,13 @@ public class ErrorThrowing extends RefWalker<Integer,Integer> implements TreeRed
     @Override
     public Integer reduce(Integer value, Integer sum) {
         if ( value != null && failMethod == FailMethod.REDUCE )
-            fail();
+            FailMethod.fail(exceptionToThrow);
         return sum;
     }
 
     public Integer treeReduce(final Integer lhs, final Integer rhs) {
         if ( failMethod == FailMethod.TREE_REDUCE )
-            fail();
+            FailMethod.fail(exceptionToThrow);
         return rhs;
     }
-
-    private void fail() {
-        if ( exceptionToThrow.equals("UserException") ) {
-            throw new UserException("UserException");
-        } else if ( exceptionToThrow.equals("NullPointerException") ) {
-            throw new NullPointerException();
-        } else if ( exceptionToThrow.equals("ReviewedGATKException") ) {
-            throw new ReviewedGATKException("ReviewedGATKException");
-        } else if ( exceptionToThrow.equals("SamError1") ) {
-            throw new RuntimeException(CommandLineGATK.PICARD_TEXT_SAM_FILE_ERROR_1);
-        } else if ( exceptionToThrow.equals("SamError2") ) {
-            throw new RuntimeException(CommandLineGATK.PICARD_TEXT_SAM_FILE_ERROR_2);
-        } else if ( exceptionToThrow.equals("NoSpace1") ) {
-            throw new htsjdk.samtools.util.RuntimeIOException(new java.io.IOException("No space left on device java.io.FileOutputStream.writeBytes(Native Method)"));
-        } else if ( exceptionToThrow.equals("NoSpace2") ) {
-            throw new htsjdk.samtools.SAMException("Exception writing BAM index file", new java.io.IOException("No space left on device java.io.FileOutputStream.writeBytes(Native Method)"));
-        } else {
-            throw new UserException.BadArgumentValue("exception", "exception isn't a recognized value " + exceptionToThrow);
-        }
-    }
 }
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/FlagStat.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/FlagStat.java
index 83c2cc4..fee7887 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/FlagStat.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/FlagStat.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -27,8 +27,8 @@ package org.broadinstitute.gatk.tools.walkers.qc;
 
 import org.broadinstitute.gatk.utils.commandline.Output;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.DataSource;
 import org.broadinstitute.gatk.engine.walkers.NanoSchedulable;
 import org.broadinstitute.gatk.engine.walkers.ReadWalker;
@@ -42,9 +42,9 @@ import java.text.DecimalFormat;
 import java.text.NumberFormat;
 
 /**
- * A reimplementation of the 'samtools flagstat' subcommand in the GATK
+ * Collect statistics about sequence reads based on their SAM flags
  *
- * <p>This tool walks over all input data, accumulating statistics such as total number of reads,
+ * <p>This tool emulates the behavior of 'samtools flagstat'. It collects statistics such as total number of reads,
  * reads with QC failure flag set, number of duplicates, percentage mapped, etc.</p>
  *
  * <h3>Input</h3>
@@ -57,11 +57,11 @@ import java.text.NumberFormat;
  * Resulting stats are written to file if an output file name is given (with -o), otherwise output to stdout.
  * </p>
  *
- * <h3>Example</h3>
+ * <h3>Usage example</h3>
  * <pre>
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
+ * java -jar GenomeAnalysisTK.jar \
  *   -T FlagStat \
- *   -R ref.fasta \
+ *   -R reference.fasta \
  *   -I reads.bam \
  *   [-o output.txt]
  * </pre>
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/Pileup.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/Pileup.java
index 322cea6..cd2c336 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/Pileup.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/Pileup.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -28,9 +28,9 @@ package org.broadinstitute.gatk.tools.walkers.qc;
 import htsjdk.tribble.Feature;
 import org.broadinstitute.gatk.utils.commandline.*;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.LocusWalker;
 import org.broadinstitute.gatk.engine.walkers.NanoSchedulable;
 import org.broadinstitute.gatk.engine.walkers.TreeReducible;
@@ -46,9 +46,10 @@ import java.util.Collections;
 import java.util.List;
 
 /**
- * Emulates the samtools pileup command to print aligned reads
+ * Print read alignments in Pileup-style format
  *
- * <p>Prints the alignment in something similar to the Samtools pileup format (see the
+ * <p>This tool emulates the 'samtools pileup' command. It prints the alignment in a format that is very similar to
+ * the Samtools pileup format (see the
  * <a href="http://samtools.sourceforge.net/pileup.shtml">Pileup format documentation</a> for more details about
  * the original format). There is one line per genomic position, listing the chromosome name, coordinate, reference
  * base, read bases, and read qualities. In addition to these default fields, additional information can be added to
@@ -58,7 +59,6 @@ import java.util.List;
  * <pre>
  *  samtools pileup -f in.ref.fasta -l in.site_list input.bam
  * </pre>
-
  *
  * <h3>Input</h3>
  * <p>
@@ -70,12 +70,12 @@ import java.util.List;
  *  Alignment of reads formatted in the Pileup style.
  * </p>
  *
- * <h3>Example</h3>
+ * <h3>Usage example</h3>
  * <pre>
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
+ * java -jar GenomeAnalysisTK.jar \
  *   -T Pileup \
- *   -R exampleFASTA.fasta \
- *   -I exampleBAM.bam \
+ *   -R reference.fasta \
+ *   -I my_reads.bam \
  *   -L chr1:257-267
  *   -o output.txt
  * </pre>
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/PrintRODs.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/PrintRODs.java
index 22ab7d1..7873af9 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/PrintRODs.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/PrintRODs.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -30,9 +30,9 @@ import org.broadinstitute.gatk.utils.commandline.Input;
 import org.broadinstitute.gatk.utils.commandline.Output;
 import org.broadinstitute.gatk.utils.commandline.RodBinding;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.RodWalker;
 import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
 import org.broadinstitute.gatk.utils.help.HelpConstants;
@@ -40,8 +40,11 @@ import org.broadinstitute.gatk.utils.help.HelpConstants;
 import java.io.PrintStream;
 
 /**
- * Prints out all of the RODs in the input data set. Data is rendered using the toString() method
- * of the given ROD.
+ * Print out all of the RODs in the input data set
+ *
+ * <p>This tool reports what RODs (reference ordered data sets) are contained in a given input.</p>
+ *
+ *
  */
 @DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_QC, extraDocs = {CommandLineGATK.class} )
 public class PrintRODs extends RodWalker<Integer, Integer> {
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/QCRef.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/QCRef.java
index ee8b68f..9349354 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/QCRef.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/QCRef.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -29,9 +29,9 @@ import htsjdk.samtools.reference.IndexedFastaSequenceFile;
 import htsjdk.samtools.reference.ReferenceSequence;
 import org.broadinstitute.gatk.utils.commandline.Output;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.RefWalker;
 import org.broadinstitute.gatk.utils.BaseUtils;
 import org.broadinstitute.gatk.utils.exceptions.GATKException;
@@ -51,14 +51,15 @@ import java.io.PrintStream;
  *
  * <h3>Output</h3>
  * <p>
- *     If ok, nothing, else will throw an exception at the site where there's been a problem
+ *     If the reference is fully valid, the run will complete successfully. If not, an error message will be produced
+ *     at the site where the program encountered a problem.
  * </p>
  *
- * <h3>Examples</h3>
+ * <h3>Usage example</h3>
  * <pre>
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
- *   -T QCRef
+ * java -jar GenomeAnalysisTK.jar \
+ *   -T QCRef \
+ *   -R reference.fasta
  * </pre>
  *
  */
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/ReadClippingStats.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/ReadClippingStats.java
index 14a1de5..1517ae7 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/ReadClippingStats.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/ReadClippingStats.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -31,8 +31,8 @@ import org.broadinstitute.gatk.utils.commandline.Advanced;
 import org.broadinstitute.gatk.utils.commandline.Argument;
 import org.broadinstitute.gatk.utils.commandline.Output;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.DataSource;
 import org.broadinstitute.gatk.engine.walkers.ReadWalker;
 import org.broadinstitute.gatk.engine.walkers.Requires;
@@ -48,23 +48,21 @@ import java.io.PrintStream;
 import java.util.Arrays;
 
 /**
- * Read clipping statistics for all reads.
+ * Collect read clipping statistics
  *
- * Walks over the input reads, printing out statistics about the read length, number of clipping events, and length
- * of the clipping to the output stream.
- *
- * Note: Ignores N's in the Cigar string.
+ * <p>This tool collects statistics about the read length, number of clipping events, and length
+ * of the clipping in all reads in the dataset.</p>
  *
  * <h3>Input</h3>
- * One or more BAM files
+ * One or more BAM files.
  *
  * <h3>Output</h3>
- * A simple tabulated text file with read length and clipping statistics for every read (or every N reads if the "skip"
- * option is used)
+ * A simple tabulated text file with read length and clipping statistics for every read (or every given number of reads
+ * if the "skip" option is used).
+ *
+ * <h3>Caveat</h3>
+ * <p>This tool ignores "N" events in the CIGAR string.</p>
  *
- * User: depristo
- * Date: May 5, 2010
- * Time: 12:16:41 PM
  */
 
 @DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_QC, extraDocs = {CommandLineGATK.class} )
@@ -109,11 +107,13 @@ public class ReadClippingStats extends ReadWalker<ReadClippingStats.ReadClipping
                     info.nClippingEvents++;
                     info.nClippedBases += elt.getLength();
                     break;
-                case M :
+                case EQ : // sequence match
+                case M : // alignment match
                 case D : // deletion w.r.t. the reference
                 case P : // ignore pads
                 case I : // insertion w.r.t. the reference
                 case N : // reference skip (looks and gets processed just like a "deletion", just different logical meaning)
+                case X : // sequence mismatch
                     break;
                 default : throw new IllegalStateException("Case statement didn't deal with cigar op: " + elt.getOperator());
             }
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/RodSystemValidation.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/RodSystemValidation.java
index 5f1d396..1043675 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/RodSystemValidation.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/qc/RodSystemValidation.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -26,11 +26,11 @@
 package org.broadinstitute.gatk.tools.walkers.qc;
 
 import org.broadinstitute.gatk.utils.commandline.*;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
 import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.engine.refdata.utils.RODRecordList;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.refdata.utils.RODRecordList;
 import org.broadinstitute.gatk.engine.walkers.Reference;
 import org.broadinstitute.gatk.engine.walkers.RodWalker;
 import org.broadinstitute.gatk.engine.walkers.Window;
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/readutils/ClipReads.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/readutils/ClipReads.java
index de3c0dc..f8b46b6 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/readutils/ClipReads.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/readutils/ClipReads.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -34,9 +34,9 @@ import org.broadinstitute.gatk.utils.commandline.Argument;
 import org.broadinstitute.gatk.utils.commandline.Hidden;
 import org.broadinstitute.gatk.utils.commandline.Output;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.io.GATKSAMFileWriter;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.sam.GATKSAMFileWriter;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.DataSource;
 import org.broadinstitute.gatk.engine.walkers.ReadWalker;
 import org.broadinstitute.gatk.engine.walkers.Requires;
@@ -111,7 +111,7 @@ import java.util.regex.Pattern;
  *     </pre>
  * </p>
  *
- * <h3>Example</h3>
+ * <h3>Usage example</h3>
  * <pre>
  *   java -jar GenomeAnalysisTK.jar \
  *     -T ClipReads \
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/readutils/PrintReads.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/readutils/PrintReads.java
index f271fe9..6aede1b 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/readutils/PrintReads.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/readutils/PrintReads.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -28,18 +28,19 @@ package org.broadinstitute.gatk.tools.walkers.readutils;
 import htsjdk.samtools.SAMFileHeader;
 import htsjdk.samtools.SAMFileWriter;
 import htsjdk.samtools.SAMReadGroupRecord;
+import org.broadinstitute.gatk.engine.io.NWaySAMFileWriter;
 import org.broadinstitute.gatk.engine.walkers.*;
 import org.broadinstitute.gatk.utils.commandline.Argument;
 import org.broadinstitute.gatk.utils.commandline.Hidden;
 import org.broadinstitute.gatk.utils.commandline.Output;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
 import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.io.GATKSAMFileWriter;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.sam.GATKSAMFileWriter;
 import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
 import org.broadinstitute.gatk.engine.iterators.ReadTransformersMode;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.utils.SampleUtils;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.engine.SampleUtils;
 import org.broadinstitute.gatk.utils.Utils;
 import org.broadinstitute.gatk.utils.baq.BAQ;
 import org.broadinstitute.gatk.utils.exceptions.UserException;
@@ -51,17 +52,18 @@ import java.io.File;
 import java.util.*;
 
 /**
- * Renders, in SAM/BAM format, all reads from the input data set in the order in which they appear in the input file.
+ * Write out sequence read data (for filtering, merging, subsetting etc)
  *
  * <p>
- * PrintReads can dynamically merge the contents of multiple input BAM files, resulting
- * in merged output sorted in coordinate order.  Can also optionally filter reads based on the
- * --read_filter command line argument.
+ * PrintReads is a generic utility tool for manipulating sequencing data in SAM/BAM format. It can dynamically
+ * merge the contents of multiple input BAM files, resulting in merged output sorted in coordinate order. It can
+ * also optionally filter reads based on various read properties such as read group tags using the `--read_filter/-rf`
+ * command line argument (see documentation on read filters for more information).
  * </p>
  *
  * <p>
  * Note that when PrintReads is used as part of the Base Quality Score Recalibration workflow,
- * it takes the --BQSR engine argument, which is listed under Inherited Arguments > CommandLineGATK below.
+ * it takes the `--BQSR` engine argument, which is listed under Inherited Arguments > CommandLineGATK below.
  * </p>
  *
  * <h3>Input</h3>
@@ -74,30 +76,31 @@ import java.util.*;
  * A single processed bam file.
  * </p>
  *
- * <h3>Examples</h3>
+ * <h3>Usage examples</h3>
  * <pre>
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
+ // Prints all reads that have a mapping quality above zero
+ * java -jar GenomeAnalysisTK.jar \
  *   -T PrintReads \
- *   -o output.bam \
+ *   -R reference.fasta \
  *   -I input1.bam \
  *   -I input2.bam \
+ *   -o output.bam \
  *   --read_filter MappingQualityZero
  *
  * // Prints the first 2000 reads in the BAM file
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
+ * java -jar GenomeAnalysisTK.jar \
  *   -T PrintReads \
- *   -o output.bam \
+ *   -R reference.fasta \
  *   -I input.bam \
+ *   -o output.bam \
  *   -n 2000
  *
  * // Downsamples BAM file to 25%
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
+ * java -jar GenomeAnalysisTK.jar \
  *   -T PrintReads \
- *   -o output.bam \
+ *   -R reference.fasta \
  *   -I input.bam \
+ *   -o output.bam \
  *   -dfrac 0.25
  * </pre>
  *
@@ -141,11 +144,11 @@ public class PrintReads extends ReadWalker<GATKSAMRecord, SAMFileWriter> impleme
     /**
      * Erase all extra attributes in the read but keep the read group information 
      */
-    @Argument(fullName="simplify", shortName="s", doc="Simplify all reads.", required=false)
+    @Argument(fullName="simplify", shortName="s", doc="Simplify all reads", required=false)
     public boolean simplifyReads = false;
 
     @Hidden
-    @Argument(fullName = "no_pg_tag", shortName = "npt", doc ="", required = false)
+    @Argument(fullName = "no_pg_tag", shortName = "npt", doc ="Don't output a program tag", required = false)
     public boolean NO_PG_TAG = false;
 
     List<ReadTransformer> readTransformers = Collections.emptyList();
@@ -181,7 +184,7 @@ public class PrintReads extends ReadWalker<GATKSAMRecord, SAMFileWriter> impleme
         if (!sampleNames.isEmpty())
             samplesToChoose.addAll(sampleNames);
 
-        random = GenomeAnalysisEngine.getRandomGenerator();
+        random = Utils.getRandomGenerator();
 
         if (toolkit != null) {
             final SAMFileHeader outputHeader = toolkit.getSAMFileHeader().clone();
@@ -193,7 +196,7 @@ public class PrintReads extends ReadWalker<GATKSAMRecord, SAMFileWriter> impleme
             //Add the program record (if appropriate) and set up the writer
             final boolean preSorted = true;
             if (toolkit.getArguments().BQSR_RECAL_FILE != null && !NO_PG_TAG ) {
-                Utils.setupWriter(out, toolkit, outputHeader, preSorted, this, PROGRAM_RECORD_NAME);
+                NWaySAMFileWriter.setupWriter(out, toolkit, outputHeader, preSorted, this, PROGRAM_RECORD_NAME);
             } else {
                 out.writeHeader(outputHeader);
                 out.setPresorted(preSorted);
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/readutils/ReadAdaptorTrimmer.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/readutils/ReadAdaptorTrimmer.java
deleted file mode 100644
index 6f0ee85..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/readutils/ReadAdaptorTrimmer.java
+++ /dev/null
@@ -1,395 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.tools.walkers.readutils;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import htsjdk.samtools.SAMFileWriter;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.engine.walkers.NanoSchedulable;
-import org.broadinstitute.gatk.engine.walkers.PartitionBy;
-import org.broadinstitute.gatk.engine.walkers.PartitionType;
-import org.broadinstitute.gatk.engine.walkers.ReadWalker;
-import org.broadinstitute.gatk.utils.commandline.Advanced;
-import org.broadinstitute.gatk.utils.commandline.Argument;
-import org.broadinstitute.gatk.utils.commandline.Hidden;
-import org.broadinstitute.gatk.utils.commandline.Output;
-import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.utils.BaseUtils;
-import org.broadinstitute.gatk.utils.collections.Pair;
-import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
-import org.broadinstitute.gatk.utils.help.HelpConstants;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-/**
- * Utility tool to blindly strip base adaptors. Main application is for FASTQ/unaligned BAM pre-processing where libraries
- * have very short inserts, and hence a substantial part of the sequencing data will have adaptor sequence present.
- * <p>
- * By design, tool will only work for Illumina-like library constructs, where the typical library architecture is:
- * [Adaptor 1]-[Genomic Insert]-[Adaptor 2 (index/barcode)]
- * <p>
- * It is assumed that when data is paired, one read will span the forward strand and one read will span the reverse strand.
- * Hence, when specifying adaptors they should be specified as both forward and reverse-complement to make sure they're removed in all cases.
- * By design, as well, "circular" constructions where a read can have an insert, then adaptor, then more genomic insert, are not supported.
- * When an adaptor is detected, all bases downstream from it (i.e. in the 3' direction) will be removed.
- * Adaptor detection is carried out by looking for overlaps between forward and reverse reads in a pair.
- * If a sufficiently high overlap is found, the insert size is computed and if insert size < read lengths adaptor bases are removed from reads.
- *
- * Advantages over ReadClipper:
- * - No previous knowledge of adaptors or library structure is necessary
- *
- * Advantages over 3rd party tools like SeqPrep:
- * - Can do BAM streaming instead of having to convert to fastq
- * - No need to merge reads - merging reads can have some advantages, but complicates downstream processing and loses information that can be used,
- *   e.g. in variant calling
- * <p>
- *
- * <h2>Input</h2>
- * <p>
- * The input read data in BAM format. Read data MUST be in query name ordering as produced, for example with Picard's FastqToBam
- *
- * <h2>Output</h2>
- * <p>
- * A merged BAM file with unaligned reads
- * </p>
- *
- * <h2>Examples</h2>
- * <pre>
- * java -Xmx4g -jar GenomeAnalysisTK.jar \
- *   -T ReadAdaptorTrimmer \
- *   -I my_reads.bam \
- *   -R resources/Homo_sapiens_assembly18.fasta \
- *   -o trimmed_Reads.bam
- * </pre>
- */
-
- at DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_DATA, extraDocs = {CommandLineGATK.class} )
- at PartitionBy(PartitionType.READ)
-public class ReadAdaptorTrimmer extends ReadWalker<List<GATKSAMRecord>, SAMFileWriter> implements NanoSchedulable {
-    @Output(doc="Write output to this BAM filename instead of STDOUT", required = false)
-    SAMFileWriter out;
-
-     /**
-     * Only prints the first n reads of the file - for short testing
-     */
-     @Hidden
-    @Argument(fullName = "number", shortName = "n", doc="Print the first n reads from the file, discarding the rest", required = false)
-    int nReadsToPrint = -1;
-
-    /**
-     * Argument to control strictness of match between forward and reverse reads - by default, we require 15 matches between them to declare
-     * an overlap.
-     */
-    @Advanced
-    @Argument(fullName = "minMatches", shortName = "minMatches", doc="Minimum number of substring matches to detect pair overlaps", required = false)
-    int minMatchesForOverlap = 15;
-
-
-    /**
-     * If true, this argument will make the walker discard unpaired reads instead of erroring out.
-     */
-    @Advanced
-    @Argument(fullName = "removeUnpairedReads", shortName = "removeUnpairedReads", doc="Remove unpaired reads instead of erroring out", required = false)
-    boolean cleanUnpairedReads = false;
-
-     /**
-     * private class members
-     */
-    private GATKSAMRecord firstReadInPair;
-    private TrimStats trimStats = new TrimStats();
-
-    static class TrimStats {
-        long numReadsProcessed;
-        long numReadsWithAdaptorTrimmed;
-        long numUnpairedReadsFound;
-    }
-
-   /**
-     * The reads filter function.
-     *
-     * @param ref  the reference bases that correspond to our read, if a reference was provided
-     * @param read the read itself, as a GATKSAMRecord
-     * @return true if the read passes the filter, false if it doesn't
-     */
-    public boolean filter(ReferenceContext ref, GATKSAMRecord read) {
-         // check if we've reached the output limit
-        if ( nReadsToPrint == 0 ) {
-            return false;          // n == 0 means we've printed all we needed.
-        }
-        else if (nReadsToPrint > 0) {
-            nReadsToPrint--;       // n > 0 means there are still reads to be printed.
-        }
-        return true;
-    }
-    /**
-     * reduceInit is called once before any calls to the map function.  We use it here to setup the output
-     * bam file, if it was specified on the command line
-     *
-     * @return SAMFileWriter, set to the BAM output file if the command line option was set, null otherwise
-     */
-    public SAMFileWriter reduceInit() {
-        return out;
-    }
-
-    public List<GATKSAMRecord> map( final ReferenceContext ref, final GATKSAMRecord readIn, final RefMetaDataTracker metaDataTracker ) {
-
-
-        final List<GATKSAMRecord> readsToEmit = new ArrayList<GATKSAMRecord>();
-
-
-        // cache first read in pair if flag set.
-        if (readIn.getFirstOfPairFlag()) {
-            firstReadInPair = GATKSAMRecord.emptyRead(readIn);
-            firstReadInPair.setReadString(readIn.getReadString());
-            firstReadInPair.setReadName(readIn.getReadName());
-            firstReadInPair.setBaseQualities(readIn.getBaseQualities());
-        }
-        else {
-            if (!readIn.getReadName().equals(firstReadInPair.getReadName()))  {
-                if (cleanUnpairedReads) {
-                    trimStats.numUnpairedReadsFound++;
-                    return readsToEmit;
-                }
-                else // by default require that reads be completely paired
-                    throw new IllegalStateException("Second read in pair must follow first read in pair: data not ordered?");
-            }
-
-            final int oldLength1 = firstReadInPair.getReadLength();
-            final int oldLength2 = readIn.getReadLength();
-            // try to strip any adaptor sequence in read pair
-            final Integer result = trimReads(firstReadInPair, readIn, minMatchesForOverlap, logger);
-
-            if (logger.isDebugEnabled()) {
-                if (result == null)
-                    logger.debug("No overlap found, insert size cannot be computed");
-                else
-                    logger.debug("Insert size estimate = " + result);
-
-            }
-
-
-            readsToEmit.add(firstReadInPair);
-            readsToEmit.add(readIn);
-
-            if (oldLength1 != firstReadInPair.getReadLength())
-                trimStats.numReadsWithAdaptorTrimmed++;
-            if (oldLength2 != readIn.getReadLength())
-                trimStats.numReadsWithAdaptorTrimmed++;
-
-         }
-
-
-        trimStats.numReadsProcessed++;
-        return readsToEmit;
-
-    }
-
-    /**
-     * given a read and a output location, reduce by emitting the read
-     *
-     * @param readsToEmit   the read itself
-     * @param output the output source
-     * @return the SAMFileWriter, so that the next reduce can emit to the same source
-     */
-    public SAMFileWriter reduce( final List<GATKSAMRecord> readsToEmit, final SAMFileWriter output ) {
-        for (final GATKSAMRecord read : readsToEmit)
-             output.addAlignment(read);
-
-        return output;
-    }
-
-    @Override
-    public void onTraversalDone(SAMFileWriter output) {
-
-        logger.info("Finished Trimming:");
-        logger.info("Number of processed reads:                     "+ trimStats.numReadsProcessed);
-        logger.info("Number of reads with adaptor sequence trimmed: "+ trimStats.numReadsWithAdaptorTrimmed);
-        if (cleanUnpairedReads)
-            logger.info("Number of unpaired reads thrown out: "+ trimStats.numUnpairedReadsFound);
-    }
-
-
-    /**
-     *
-     * Workhorse routines...
-     *
-     */
-        /**
-         * Core routine that does most underlying work for walker. Takes two reads and looks for overlaps in them.
-         * An overlap is defined as a contiguous chunk of N bases that matches reverse-complement between reads.
-         * Currently, the only insert structure that it will look for overlaps is as follows:
-         * CASE 1: Insert shorter than read length:
-         * 3' XXXXXXXXXXXXXXXX 5'            (second read)
-         * 5'      YYYYYYYYYYYYYYYY 3'       (first read)
-         *         ***********
-         *
-         * In this case, if X and Y are complements at the 11 positions marked by *, routine will do the following
-         * iff minMatchesForOverlap <= 11:
-         *  a) Cleave adaptor from end of second read (leftmost dangling part in diagram above)
-         *  b) Cleave adaptor from end of first read (rightmost part in diagram).
-         *
-         * CASE 2: Insert size >= read length:
-         * 3'             XXXXXXXXXXXXXXXX 5'           (second read)
-         * 5'      YYYYYYYYYYYYYYYY 3'                  (first read)
-         *                *********                        (overlap)
-         *
-         * In this case, no trimming is done and reads are left unchanged
-         * @param first                      (I/O) First read in pair - read contents (bases/quals) can be modified if adaptor is detected
-         * @param second                     (I/O) Second read in pair - read contents (bases/quals) can be modified if adaptor is detected
-         * @param minMatchesForOverlap       Reads need to match in these # of bases to be joined
-         * @return                           Offset between second and first read.
-         *                                   If there's no detectable offset, return Null
-         */
-    @Requires({"first != null","second != null","minMatchesForOverlap>0"})
-    protected static Integer trimReads(final GATKSAMRecord first,
-                                       final GATKSAMRecord second,
-                                       final int minMatchesForOverlap,
-                                       final Logger logger) {
-
-        final Integer insertSize = estimateInsertSize(first.getReadBases(), second.getReadBases(),
-                minMatchesForOverlap, logger);
-
-        if (insertSize == null)
-            return insertSize;
-        if (insertSize < first.getReadLength()) {
-            // trim adaptor sequence from read
-            first.setReadBases(Arrays.copyOfRange(first.getReadBases(),0,insertSize));
-            first.setBaseQualities(Arrays.copyOfRange(first.getBaseQualities(),0,insertSize));
-        }
-        if (insertSize < second.getReadLength()) {
-            // trim adaptor sequence from read
-            second.setReadBases(Arrays.copyOfRange(second.getReadBases(),0,insertSize));
-            second.setBaseQualities(Arrays.copyOfRange(second.getBaseQualities(),0,insertSize));
-        }
-        return insertSize;
-    }
-
-    /**
-    * Brain-dead implementation of an aligner of two sequences, where it's assumed that there might be an overlap
-    * from the first into the second. From this, an estimate of insert size is performed and returned
-    * Assumes that reads come in reverse direction, so one of the base sequences needs to be reverse-complemented.]
-    *
-    * @param firstRead                           Bytes from first read
-    * @param secondRead                          Bytes from second read (reverse direction)
-    * @return                                  Estimated insert size based on offset between first and second read.
-    *                                          If no overlap can be detected, return null
-    */
-
-    @Requires({"firstRead != null","secondRead != null","minMatches>0","firstRead.length == secondRead.length"})
-    protected static Integer estimateInsertSize(final byte[] firstRead,
-                                                                final byte[] secondRead,
-                                                                final int minMatches,
-                                                                final Logger logger) {
-        final byte[] firstBases = firstRead;
-        final byte[] secondBases = BaseUtils.simpleReverseComplement(secondRead);
-
-        final Pair<Integer,Integer> overlaps = findOverlappingSequence(firstBases, secondBases);
-        final int bestOffset = overlaps.first;
-        final int maxScore = overlaps.second;
-        if ( logger.isDebugEnabled()) {
-            String sb="", s1 = new String(firstBases), s2 = new String(secondBases);
-            for (int k=0; k < Math.abs(bestOffset); k++) sb+=" ";
-            if (maxScore >= minMatches) {
-                logger.debug(String.format("Match, Max Score = %d, best offset = %d\n",maxScore, bestOffset));
-                if (bestOffset>0)
-                    s2 = sb+s2;
-                else
-                    s1 = sb+s1;
-            }
-            else logger.debug("NoMatch:");
-            logger.debug("R1:"+s1);
-            logger.debug("R2:"+s2);
-
-
-        }
-
-        if (maxScore < minMatches)
-            return null; // no overlap detected
-
-        return bestOffset+secondRead.length;
-
-
-    }
-
-
-     /**
-     * Tries to find overlapping sequence between two reads, and computes offset between them
-      * For each possible offset, computes matching score, which is = MATCH_SCORE*Num_matches + MISMATCH_SCORE*num_mismatches
-      * (like SW with infinite gap penalties).
-     * @param first                              First read bytes
-     * @param second                             Second read bytes
-     * @return                                   Pair of integers (x,y). x = best offset between reads, y = corresponding score
-     */
-    @Requires({"first != null","second != null"})
-    @Ensures("result != null")
-    protected static Pair<Integer,Integer> findOverlappingSequence(final byte[] first,
-                                                 final byte[] second) {
-        final int MATCH_SCORE = 1;
-        final int MISMATCH_SCORE = -1;
-        // try every possible offset - O(N^2) algorithm
-
-        // In case of following structure,
-        //      111111111
-        // 222222222
-        // computed offset will be negative (=-5 in this case).
-        // If however,
-        //   111111111
-        //      222222222
-        // then offset will be positive (=3 in this case)
-        int maxScore = 0, bestOffset =0;
-        for (int offset = -second.length; offset < first.length; offset++) {
-            int score = 0;
-            // compute start index for each array
-            int ind1 = (offset<0)?0:offset;
-            int ind2 = (offset<0)?-offset:0;
-            for (int k=0; k < Math.min(first.length, second.length) ; k++) {
-                if (ind1 >= first.length)
-                    break;
-                if (ind2 >= second.length )
-                    break;
-                if (first[ind1] != 'N' && second[ind2] != 'N')  {
-                    if (first[ind1] == second[ind2])
-                        score += MATCH_SCORE;
-                    else
-                        score += MISMATCH_SCORE;
-                }
-                ind1++;
-                ind2++;
-            }
-            if (score > maxScore) {
-                maxScore = score;
-                bestOffset = offset;
-            }
-        }
-        return new Pair<Integer, Integer>(bestOffset,maxScore);
-    }
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/readutils/SplitSamFile.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/readutils/SplitSamFile.java
index f4ee4a4..029d8b8 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/readutils/SplitSamFile.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/readutils/SplitSamFile.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -30,10 +30,13 @@ import htsjdk.samtools.SAMFileWriter;
 import htsjdk.samtools.SAMReadGroupRecord;
 import htsjdk.samtools.SAMRecord;
 import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.engine.io.DirectOutputTracker;
+import org.broadinstitute.gatk.engine.io.OutputTracker;
+import org.broadinstitute.gatk.engine.io.stubs.SAMFileWriterStub;
 import org.broadinstitute.gatk.utils.commandline.Argument;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.DataSource;
 import org.broadinstitute.gatk.engine.walkers.ReadWalker;
 import org.broadinstitute.gatk.engine.walkers.Requires;
@@ -41,7 +44,6 @@ import org.broadinstitute.gatk.engine.walkers.WalkerName;
 import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
 import org.broadinstitute.gatk.utils.help.HelpConstants;
 import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.broadinstitute.gatk.utils.sam.ReadUtils;
 
 import java.util.ArrayList;
 import java.util.HashMap;
@@ -49,8 +51,29 @@ import java.util.List;
 import java.util.Map;
 
 /**
- * Divides the input data set into separate BAM files, one for each sample in the input data set.  The split
- * files are named concatenating the sample name to the end of the provided outputRoot command-line argument.
+ * Split a BAM file by sample
+ *
+ * <p>This tool divides the input data set into separate BAM files, one for each sample in the input data set. The split
+ * files are named by concatenating the sample name to the end of the provided outputRoot command-line argument.</p>
+ *
+ * <h3>Input</h3>
+ * <p>
+ * A single bam file.
+ * </p>
+ *
+ * <h3>Output</h3>
+ * <p>
+ * A separate bam file for each sample.
+ * </p>
+ *
+ * <h3>Usage example</h3>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
+ *   -T SplitSamFile \
+ *   -R reference.fasta \
+ *   -I input.bam \
+ *   --outputRoot myproject_
+ * </pre>
  */
 @DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_DATA, extraDocs = {CommandLineGATK.class} )
 @WalkerName("SplitSamFile")
@@ -104,13 +127,15 @@ public class SplitSamFile extends ReadWalker<SAMRecord, Map<String, SAMFileWrite
         }
 
         HashMap<String, SAMFileWriter> outputs = new HashMap<>();
+        final OutputTracker outputTracker = new DirectOutputTracker();
         for ( Map.Entry<String, SAMFileHeader> elt : headers.entrySet() ) {
             final String sample = elt.getKey();
             final String filename = outputRoot + sample + ".bam";
             logger.info(String.format("Creating BAM output file %s for sample %s", filename, sample));
 
-            final SAMFileWriter output = ReadUtils.createSAMFileWriter(filename, getToolkit(), elt.getValue());
+            final SAMFileWriter output = SAMFileWriterStub.createSAMFileWriter(filename, getToolkit(), elt.getValue());
             outputs.put(sample, output);
+            outputTracker.addOutput( (SAMFileWriterStub) output);
         }
 
         return outputs;
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/rnaseq/ASEReadCounter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/rnaseq/ASEReadCounter.java
new file mode 100644
index 0000000..1e151ad
--- /dev/null
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/rnaseq/ASEReadCounter.java
@@ -0,0 +1,311 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.tools.walkers.rnaseq;
+
+import htsjdk.variant.variantcontext.VariantContext;
+import org.broadinstitute.gatk.engine.CommandLineGATK;
+import org.broadinstitute.gatk.engine.walkers.Downsample;
+import org.broadinstitute.gatk.engine.walkers.LocusWalker;
+import org.broadinstitute.gatk.tools.walkers.coverage.CoverageUtils;
+import org.broadinstitute.gatk.utils.commandline.*;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.downsampling.DownsampleType;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
+import org.broadinstitute.gatk.utils.help.HelpConstants;
+import org.broadinstitute.gatk.utils.pileup.PileupElement;
+import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+
+import java.io.PrintStream;
+import java.util.List;
+
+/**
+ * Calculate read counts per allele for allele-specific expression analysis
+ *
+ * <p>
+ * This tool calculates allele counts at a set of positions after applying filters that are tuned for enabling
+ * allele-specific expression (ASE) analysis. The filters operate on mapping quality, base quality, depth of coverage,
+ * overlapping paired reads and deletions overlapping the position. All thresholds and options are controlled by
+ * command-line arguments.
+ * </p>
+ *
+ * <h3>Input</h3>
+ * <ul>
+ *     <li>BAM files (with proper headers) to be analyzed for ASE</li>
+ *     <li>A VCF file with specific sites to process.</li>
+ * </ul>
+ *
+ * <h3>Output</h3>
+ * <p>
+ * A table of allele counts at the given sites. By default, it is formatted as a tab-delimited text file
+ * that is readable by R and compatible with <a href="http://www.well.ox.ac.uk/~rivas/mamba/">Mamba</a>,
+ * a downstream tool developed for allele-specific expression analysis.
+ * </p>
+ *
+ * <h3>Usage example</h3>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
+ *   -R reference.fasta \
+ *   -T ASEReadCounter \
+ *   -o file_name.csv \
+ *   -I input.bam \
+ *   -sites sites.vcf \
+ *   -U ALLOW_N_CIGAR_READS \
+ *   [-minDepth 10] \
+ *   [--minMappingQuality 10] \
+ *   [--minBaseQuality 2] \
+ *   [-drf DuplicateRead]
+ * </pre>
+ *
+ * <h3>Note</h3>
+ * <ul>
+ *     <li>Like most GATK tools, this tools filters out duplicate reads by default. However, some ASE methods
+ *     recommend including duplicate reads in the analysis, so the DuplicateRead filter can be disabled using the
+ *     "-drf DuplicateRead" flag in the command-line.</li>
+ * </ul>
+ * <h3>Caveat</h3>
+ * <ul>
+ *     <li>This tool will only process biallelic sites. If your callset contains multiallelic sites, they will be ignored.
+ *     Optionally, you can subset your callset to just biallelic variants using e.g.
+ *     <a href="org_broadinstitute_gatk_tools_walkers_variantutils_SelectVariants.php">SelectVariants</a>
+ *     with the option "-restrictAllelesTo BIALLELIC".</li>
+ * </ul>
+ *
+ */
+ at DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_QC, extraDocs = {CommandLineGATK.class} )
+ at Downsample(by = DownsampleType.BY_SAMPLE, toCoverage = 10000)
+//@DisabledReadFilters({DuplicateReadFilter.class})  //currently can be disabled using the command line argument -drf DuplicateRead
+public class ASEReadCounter extends LocusWalker<String, Integer> {
+
+    @Output
+    public PrintStream out;
+
+    @Input (fullName = "sitesVCFFile",shortName = "sites")
+    public RodBinding<VariantContext> sites;
+
+    /**
+     * If this argument is enabled, loci with total depth lower than this threshold after all filters have been applied
+     * will be skipped. This is set to -1 by default to disable the evaluation and ignore this threshold.
+     */
+    @Argument(fullName = "minDepthOfNonFilteredBase", shortName = "minDepth", doc = "Minimum number of bases that pass filters", required = false, minValue = 0, maxValue = Integer.MAX_VALUE)
+    public int minDepthOfNonFilteredBases = -1;
+
+    /**
+     * If this argument is enabled, reads with mapping quality values lower than this threshold will not be counted.
+     * This is set to -1 by default to disable the evaluation and ignore this threshold.
+     */
+    @Argument(fullName = "minMappingQuality", shortName = "mmq", doc = "Minimum read mapping quality", required = false, minValue = 0, maxValue = Integer.MAX_VALUE)
+    public int minMappingQuality = 0;
+
+    /**
+     * If this argument is enabled, bases with quality scores lower than this threshold will not be counted.
+     * This is set to -1 by default to disable the evaluation and ignore this threshold.
+     */
+    @Argument(fullName = "minBaseQuality", shortName = "mbq", doc = "Minimum base quality", required = false, minValue = 0, maxValue = Byte.MAX_VALUE)
+    public byte minBaseQuality = 0;
+
+    /**
+     * These options modify how the tool deals with overlapping read pairs.
+     * COUNT_READS -  Count all reads independently, even if they are from the same fragment.
+     * COUNT_FRAGMENTS - Count all fragments, even if the reads that compose the fragment are not consistent at that base.
+     * COUNT_FRAGMENTS_REQUIRE_SAME_BASE - Count all fragments, but only if the reads that compose the fragment are consistent at that base (default).
+     */
+    @Argument(fullName = "countOverlapReadsType", shortName = "overlap", doc = "Handling of overlapping reads from the same fragment", required = false)
+    public CoverageUtils.CountPileupType countType = CoverageUtils.CountPileupType.COUNT_FRAGMENTS_REQUIRE_SAME_BASE;
+
+    /**
+     * Available options are csv, table, rtable. By default, the format is an r-readable table.
+     */
+    @Argument(fullName = "outputFormat", doc = "Format of the output file, can be CSV, TABLE, RTABLE", required = false)
+    public OUTPUT_FORMAT outputFormat = OUTPUT_FORMAT.RTABLE;
+
+    /**
+     * Consider a spanning deletion as contributing to coverage. Also enables deletion counts in per-base output.
+     */
+    @Advanced
+    @Argument(fullName = "includeDeletions", shortName = "dels", doc = "Include information on deletions", required = false)
+    public boolean includeDeletions = false;
+
+    @Advanced
+    @Argument(fullName = "ignoreDeletionSites", doc = "Ignore sites consisting only of deletions", required = false)
+    public boolean ignoreDeletionSites = false;
+
+    public String separator = "\t";
+
+    public enum OUTPUT_FORMAT{
+        TABLE,
+        RTABLE,
+        CSV
+    }
+
+    ////////////////////////////////////////////////////////////////////////////////////
+    // STANDARD WALKER METHODS
+    ////////////////////////////////////////////////////////////////////////////////////
+
+    public boolean includeReadsWithDeletionAtLoci() { return includeDeletions && ! ignoreDeletionSites; }
+
+    public void initialize() {
+
+        // Check the output format
+        boolean goodOutputFormat = false;
+        for ( final OUTPUT_FORMAT f : OUTPUT_FORMAT.values()) {
+            goodOutputFormat = goodOutputFormat || f.equals(outputFormat);
+        }
+
+        if ( ! goodOutputFormat ) {
+            throw new IllegalArgumentException("Improper output format. Can be one of TABLE, RTABLE, CSV. Was "+outputFormat);
+        }
+
+        if ( outputFormat.equals(OUTPUT_FORMAT.CSV) ) {
+            separator = ",";
+        }
+        final String header = "contig"+separator+"position"+separator+"variantID"+separator+"refAllele"+separator+"altAllele"+separator+"refCount"+separator+"altCount"+separator+"totalCount"+separator+"lowMAPQDepth"+separator+"lowBaseQDepth"+separator+"rawDepth"+separator+"otherBases"+separator+"improperPairs";
+        out.println(header);
+
+    }
+
+
+    @Override
+    public String map(final RefMetaDataTracker tracker, final ReferenceContext ref, final AlignmentContext context) {
+        if ( tracker == null )
+            return null;
+        final String contig = context.getLocation().getContig();
+        final long position = context.getPosition();
+
+        final char refAllele = (char)ref.getBase();
+
+        final List<VariantContext> VCs =  tracker.getValues(sites, context.getLocation());
+        if(VCs != null && VCs.size() > 1)
+            throw new UserException("More then one variant context at position: "+contig+":"+position);
+        if(VCs == null || VCs.size() == 0)
+            return null;
+
+        final VariantContext vc = VCs.get(0);
+        if(!vc.isBiallelic()) {
+            logger.warn("Ignoring site: cannot run ASE on non-biallelic sites: " + vc.toString());
+            return null;
+        }
+
+        final char altAllele = (char)vc.getAlternateAllele(0).getBases()[0];
+        final String siteID = vc.getID();
+        final ReadBackedPileup pileup = filterPileup(context.getBasePileup(), countType, includeReadsWithDeletionAtLoci());
+
+        // count up the depths of all and QC+ bases
+        return calculateLineForSite(pileup, contig, position, siteID, refAllele, altAllele);
+
+    }
+
+    protected ReadBackedPileup filterPileup(final ReadBackedPileup originalPileup, final CoverageUtils.CountPileupType countType, final boolean includeDeletions){
+
+        ReadBackedPileup pileupWithDeletions;
+        if(countType.equals(CoverageUtils.CountPileupType.COUNT_FRAGMENTS_REQUIRE_SAME_BASE))
+            pileupWithDeletions = originalPileup.getOverlappingFragmentFilteredPileup(true,true);
+        else if(countType.equals(CoverageUtils.CountPileupType.COUNT_READS))
+            pileupWithDeletions = originalPileup;
+        else if(countType.equals(CoverageUtils.CountPileupType.COUNT_FRAGMENTS))
+            pileupWithDeletions = originalPileup.getOverlappingFragmentFilteredPileup(false,true);
+        else
+            throw new UserException("Must use valid CountPileupType");
+
+        return includeDeletions ? pileupWithDeletions: pileupWithDeletions.getPileupWithoutDeletions();
+
+    }
+
+    protected String calculateLineForSite(final ReadBackedPileup pileup, final String contig, final long position, final String siteID, final char refAllele, final char altAllele){
+
+        int rawDepth = 0, lowBaseQDepth = 0, lowMAPQDepth = 0, refCount = 0, altCount = 0, totalNonFilteredCount = 0, otherBasesCount = 0, improperPairsCount = 0 ;
+
+        for (final PileupElement base : pileup) {
+            rawDepth++;
+
+            if (base.getRead().getReadPairedFlag() && (base.getRead().getMateUnmappedFlag() || !base.getRead().getProperPairFlag())){
+                improperPairsCount++;
+                continue;
+            }
+            if (base.getMappingQual() < minMappingQuality) {
+                lowMAPQDepth++;
+                continue;
+            }
+
+            if (base.getQual() < minBaseQuality) {
+                lowBaseQDepth++;
+                continue;
+            }
+
+            if(base.getBase() == refAllele)
+                refCount++;
+            else if(base.getBase() == altAllele)
+                altCount++;
+            else {
+                otherBasesCount++;
+                continue;
+            }
+            totalNonFilteredCount++;
+        }
+
+        if(totalNonFilteredCount < minDepthOfNonFilteredBases)
+            return null;
+
+        return contig +separator+
+                position +separator+
+                siteID +separator+
+                refAllele +separator+
+                altAllele +separator+
+                refCount +separator+
+                altCount +separator+
+                totalNonFilteredCount +separator+
+                lowMAPQDepth +separator+
+                lowBaseQDepth +separator+
+                rawDepth +separator+
+                otherBasesCount +separator+
+                improperPairsCount;
+    }
+
+    @Override
+    public Integer reduceInit() {
+        return 0;
+    }
+
+    @Override
+    public Integer reduce(String results, Integer sum) {
+        if(results!= null)
+            out.println(results);
+        return ++sum;
+    }
+
+    @Override
+    public void onTraversalDone(Integer sum) {
+        logger.info("Done processing "+sum+" loci");
+        out.close();
+    }
+
+
+
+
+
+}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/VariantEval.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/VariantEval.java
index 67c1fcb..1617f4f 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/VariantEval.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/VariantEval.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -29,17 +29,19 @@ import com.google.java.contract.Requires;
 import htsjdk.samtools.reference.IndexedFastaSequenceFile;
 import htsjdk.samtools.util.IntervalTree;
 import htsjdk.samtools.SAMSequenceRecord;
+import oracle.jrockit.jfr.StringConstantPool;
 import org.apache.log4j.Logger;
 import htsjdk.tribble.Feature;
+import org.broadinstitute.gatk.engine.samples.Trio;
 import org.broadinstitute.gatk.engine.walkers.*;
+import org.broadinstitute.gatk.tools.walkers.varianteval.evaluators.*;
 import org.broadinstitute.gatk.utils.commandline.*;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
 import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
 import org.broadinstitute.gatk.engine.arguments.DbsnpArgumentCollection;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.tools.walkers.varianteval.evaluators.VariantEvaluator;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.tools.walkers.varianteval.stratifications.IntervalStratification;
 import org.broadinstitute.gatk.tools.walkers.varianteval.stratifications.VariantStratifier;
 import org.broadinstitute.gatk.tools.walkers.varianteval.stratifications.manager.StratificationManager;
@@ -47,9 +49,9 @@ import org.broadinstitute.gatk.tools.walkers.varianteval.util.EvaluationContext;
 import org.broadinstitute.gatk.tools.walkers.varianteval.util.SortableJexlVCMatchExp;
 import org.broadinstitute.gatk.tools.walkers.varianteval.util.VariantEvalUtils;
 import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.SampleUtils;
+import org.broadinstitute.gatk.engine.SampleUtils;
 import org.broadinstitute.gatk.utils.help.HelpConstants;
-import org.broadinstitute.gatk.utils.variant.GATKVCFUtils;
+import org.broadinstitute.gatk.engine.GATKVCFUtils;
 import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
 import htsjdk.variant.vcf.VCFHeader;
 import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
@@ -76,6 +78,7 @@ import java.util.*;
  * degeneracy of the site, etc. VariantEval facilitates these calculations in two ways: by providing several built-in
  * evaluation and stratification modules, and by providing a framework that permits the easy development of new evaluation
  * and stratification modules.
+ * </p>
  *
  * <h3>Input</h3>
  * <p>
@@ -86,8 +89,9 @@ import java.util.*;
  * <p>
  * Evaluation tables detailing the results of the eval modules which were applied.
  * For example:
+ * </p>
  * <pre>
- * output.eval.gatkreport:
+ * output.eval.grp:
  * ##:GATKReport.v0.1 CountVariants : Counts different classes of variants in the sample
  * CountVariants  CompRod   CpG      EvalRod  JexlExpression  Novelty  nProcessedLoci  nCalledLoci  nRefLoci  nVariantLoci  variantRate ...
  * CountVariants  dbsnp     CpG      eval     none            all      65900028        135770       0         135770        0.00206024  ...
@@ -103,22 +107,36 @@ import java.util.*;
  * </pre>
  * </p>
  *
- * <h3>Examples</h3>
+ * <h3>Usage examples</h3>
  * <pre>
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
+ * java -jar GenomeAnalysisTK.jar \
  *   -T VariantEval \
- *   -o output.eval.gatkreport \
+ *   -R reference.fasta \
+ *   -o output.eval.grp \
  *   --eval:set1 set1.vcf \
  *   --eval:set2 set2.vcf \
  *   [--comp comp.vcf]
  * </pre>
  *
+ * Count Mendelian violations for each family in a callset with multiple families (and provided pedigree)
+ * <pre>
+ * Java -jar GenomeAnalysisTK.jar \
+ *   -T VariantEval \
+ *   -R reference.fasta \
+ *   -o output.MVs.byFamily.table \
+ *   --eval multiFamilyCallset.vcf \
+ *   -noEV -noST \
+ *   -ST Family \
+ *   -EV MendelianViolationEvaluator
+ * </pre>
+ *
  * <h3>Caveat</h3>
  *
- * <p>Some stratifications and evaluators are incompatible with each other due to their respective memory requirements, such as AlleleCount and VariantSummary, or Sample and VariantSummary.
- * If you specify such a combination, the program will output an error message and ask you to disable one of these options.
- * We do not currently provide an exhaustive list of incompatible combinations, so we recommend trying out combinations that you are interested in on a dummy command line, to rapidly ascertain whether it will work or not.</p>
+ * <p>Some stratifications and evaluators are incompatible with each other due to their respective memory requirements,
+ * such as AlleleCount and VariantSummary, or Sample and VariantSummary. If you specify such a combination, the program
+ * will output an error message and ask you to disable one of these options. We do not currently provide an exhaustive
+ * list of incompatible combinations, so we recommend trying out combinations that you are interested in on a dummy
+ * command line, to rapidly ascertain whether it will work or not.</p>
  *
  */
 @DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_VARMANIP, extraDocs = {CommandLineGATK.class} )
@@ -245,14 +263,18 @@ public class VariantEval extends RodWalker<Integer, Integer> implements TreeRedu
 
     private boolean isSubsettingSamples;
     private Set<String> sampleNamesForEvaluation = new LinkedHashSet<String>();
+    private Set<String> familyNamesForEvaluation = new LinkedHashSet<String>();
     private Set<String> sampleNamesForStratification = new LinkedHashSet<String>();
+    private Set<String> familyNamesForStratification = new LinkedHashSet<String>();
 
     // important stratifications
     private boolean byFilterIsEnabled = false;
     private boolean perSampleIsEnabled = false;
+    private boolean perFamilyIsEnabled = false;
 
     // Public constants
-    private static String ALL_SAMPLE_NAME = "all";
+    final private static String ALL_SAMPLE_NAME = "all";
+    final private static String ALL_FAMILY_NAME = "all";
 
     // the number of processed bp for this walker
     long nProcessedLoci = 0;
@@ -299,12 +321,22 @@ public class VariantEval extends RodWalker<Integer, Integer> implements TreeRedu
         final Set<String> allSampleNames = SampleUtils.getSamplesFromCommandLineInput(vcfSamples);
         sampleNamesForEvaluation.addAll(new TreeSet<String>(SampleUtils.getSamplesFromCommandLineInput(vcfSamples, SAMPLE_EXPRESSIONS)));
         isSubsettingSamples = ! sampleNamesForEvaluation.containsAll(allSampleNames);
+        familyNamesForEvaluation.addAll(getSampleDB().getFamilyIDs());
 
-        if (Arrays.asList(STRATIFICATIONS_TO_USE).contains("Sample")) {
+        //If stratifying by sample name, assign a stratification for each sample we're evaluating (based on commandline args)...
+        if (Arrays.asList(STRATIFICATIONS_TO_USE).contains("Sample") ) {
             sampleNamesForStratification.addAll(sampleNamesForEvaluation);
         }
+        //...and also a stratification for the sum over all samples
         sampleNamesForStratification.add(ALL_SAMPLE_NAME);
 
+        //If stratifying by sample name, assign a stratification for each family...
+        if ( Arrays.asList(STRATIFICATIONS_TO_USE).contains("Family") ) {
+            familyNamesForStratification.addAll(familyNamesForEvaluation);
+        }
+        //...and also a stratification for the sum over all families
+        familyNamesForStratification.add(ALL_FAMILY_NAME);
+
         // Initialize select expressions
         for (VariantContextUtils.JexlVCMatchExp jexl : VariantContextUtils.initializeMatchExps(SELECT_NAMES, SELECT_EXPS)) {
             SortableJexlVCMatchExp sjexl = new SortableJexlVCMatchExp(jexl.name, jexl.exp);
@@ -323,8 +355,17 @@ public class VariantEval extends RodWalker<Integer, Integer> implements TreeRedu
                 byFilterIsEnabled = true;
             else if ( vs.getName().equals("Sample") )
                 perSampleIsEnabled = true;
+            else if ( vs.getName().equals("Family"))
+                perFamilyIsEnabled = true;
         }
 
+        if (perSampleIsEnabled && perFamilyIsEnabled)
+            throw new UserException.BadArgumentValue("ST", "Variants cannot be stratified by sample and family at the same time");
+
+        if (perFamilyIsEnabled && getSampleDB().getTrios().isEmpty())
+            throw new UserException.BadArgumentValue("ST", "Cannot stratify by family without *.ped file");
+
+
         if ( intervalsFile != null ) {
             boolean fail = true;
             for ( final VariantStratifier vs : stratificationObjects ) {
@@ -380,7 +421,7 @@ public class VariantEval extends RodWalker<Integer, Integer> implements TreeRedu
     public final Map<String, IntervalTree<GenomeLoc>> createIntervalTreeByContig(final IntervalBinding<Feature> intervals) {
         final Map<String, IntervalTree<GenomeLoc>> byContig = new HashMap<String, IntervalTree<GenomeLoc>>();
 
-        final List<GenomeLoc> locs = intervals.getIntervals(getToolkit());
+        final List<GenomeLoc> locs = intervals.getIntervals(getToolkit().getGenomeLocParser());
 
         // set up the map from contig -> interval tree
         for ( final String contig : getContigNames() )
@@ -416,17 +457,24 @@ public class VariantEval extends RodWalker<Integer, Integer> implements TreeRedu
 //            }
 
             //      --------- track ---------           sample  - VariantContexts -
-            HashMap<RodBinding<VariantContext>, HashMap<String, Collection<VariantContext>>> evalVCs = variantEvalUtils.bindVariantContexts(tracker, ref, evals, byFilterIsEnabled, true, perSampleIsEnabled, mergeEvals);
-            HashMap<RodBinding<VariantContext>, HashMap<String, Collection<VariantContext>>> compVCs = variantEvalUtils.bindVariantContexts(tracker, ref, comps, byFilterIsEnabled, false, false, false);
+            HashMap<RodBinding<VariantContext>, HashMap<String, Collection<VariantContext>>> evalVCs = variantEvalUtils.bindVariantContexts(tracker, ref, evals, byFilterIsEnabled, true, perSampleIsEnabled, perFamilyIsEnabled, mergeEvals);
+            HashMap<RodBinding<VariantContext>, HashMap<String, Collection<VariantContext>>> compVCs = variantEvalUtils.bindVariantContexts(tracker, ref, comps, byFilterIsEnabled, false, false, false, false);
 
             // for each eval track
             for ( final RodBinding<VariantContext> evalRod : evals ) {
                 final Map<String, Collection<VariantContext>> emptyEvalMap = Collections.emptyMap();
                 final Map<String, Collection<VariantContext>> evalSet = evalVCs.containsKey(evalRod) ? evalVCs.get(evalRod) : emptyEvalMap;
 
+                Set<String> statificationLevels;
+
                 // for each sample stratifier
-                for ( final String sampleName : sampleNamesForStratification ) {
-                    Collection<VariantContext> evalSetBySample = evalSet.get(sampleName);
+                if (perFamilyIsEnabled)
+                    statificationLevels = familyNamesForStratification;
+                else
+                    statificationLevels = sampleNamesForStratification;
+                for ( final String stratLevelName : statificationLevels ) {
+                    Collection<VariantContext> evalSetBySample = evalSet.get(stratLevelName);
+
                     if ( evalSetBySample == null ) {
                         evalSetBySample = new HashSet<VariantContext>(1);
                         evalSetBySample.add(null);
@@ -448,7 +496,18 @@ public class VariantEval extends RodWalker<Integer, Integer> implements TreeRedu
                             // find the comp
                             final VariantContext comp = findMatchingComp(eval, compSet);
 
-                            for ( EvaluationContext nec : getEvaluationContexts(tracker, ref, eval, evalRod.getName(), comp, compRod.getName(), sampleName) ) {
+                            Collection<EvaluationContext> contextsForStratification;
+                            if (perFamilyIsEnabled)
+                                contextsForStratification = getEvaluationContexts(tracker, ref, eval, evalRod.getName(), comp, compRod.getName(), null, stratLevelName);
+                            else {
+                                String familyID;
+                                if (stratLevelName.equals("all"))
+                                    familyID = "all";
+                                else
+                                    familyID = getSampleDB().getSample(stratLevelName).getFamilyID();
+                                contextsForStratification = getEvaluationContexts(tracker, ref, eval, evalRod.getName(), comp, compRod.getName(), stratLevelName, familyID);
+                            }
+                            for ( EvaluationContext nec : contextsForStratification ) {
 
                                 // eval against the comp
                                 synchronized (nec) {
@@ -517,10 +576,11 @@ public class VariantEval extends RodWalker<Integer, Integer> implements TreeRedu
                                                                   final String evalName,
                                                                   final VariantContext comp,
                                                                   final String compName,
-                                                                  final String sampleName ) {
+                                                                  final String sampleName,
+                                                                  final String familyName) {
         final List<List<Object>> states = new LinkedList<List<Object>>();
         for ( final VariantStratifier vs : stratManager.getStratifiers() ) {
-            states.add(vs.getRelevantStates(ref, tracker, comp, compName, eval, evalName, sampleName));
+            states.add(vs.getRelevantStates(ref, tracker, comp, compName, eval, evalName, sampleName, familyName));
         }
         return stratManager.values(states);
     }
@@ -605,7 +665,34 @@ public class VariantEval extends RodWalker<Integer, Integer> implements TreeRedu
         for ( final EvaluationContext nec : stratManager.values() )
             for ( final VariantEvaluator ve : nec.getVariantEvaluators() )
                 ve.finalizeEvaluation();
-        
+
+        //send data to MetricsCollection
+        CompOverlap compOverlap = null;
+        IndelSummary indelSummary = null;
+        CountVariants countVariants = null;
+        MultiallelicSummary multiallelicSummary = null;
+        TiTvVariantEvaluator tiTvVariantEvaluator = null;
+        MetricsCollection metricsCollection = null;
+        for(final EvaluationContext nec: stratManager.values()) {
+            for(final VariantEvaluator ve : nec.getVariantEvaluators()) {
+                if (ve instanceof CompOverlap)
+                    compOverlap = (CompOverlap) ve;
+                else if (ve instanceof IndelSummary)
+                    indelSummary = (IndelSummary) ve;
+                else if (ve instanceof CountVariants)
+                    countVariants = (CountVariants) ve;
+                else if (ve instanceof MultiallelicSummary)
+                    multiallelicSummary = (MultiallelicSummary) ve;
+                else if (ve instanceof TiTvVariantEvaluator)
+                    tiTvVariantEvaluator = (TiTvVariantEvaluator) ve;
+                else if (ve instanceof MetricsCollection)
+                    metricsCollection = (MetricsCollection) ve;
+            }
+
+        if(metricsCollection != null)
+            metricsCollection.setData(compOverlap.concordantRate, indelSummary.n_SNPs, countVariants.nSNPs, indelSummary.n_indels, multiallelicSummary.nIndels, indelSummary.insertion_to_deletion_ratio, countVariants.insertionDeletionRatio, tiTvVariantEvaluator.tiTvRatio);
+        }
+
         VariantEvalReportWriter.writeReport(out, stratManager, stratManager.getStratifiers(), stratManager.get(0).getVariantEvaluators());
     }
 
@@ -618,6 +705,7 @@ public class VariantEval extends RodWalker<Integer, Integer> implements TreeRedu
     public double getMendelianViolationQualThreshold() { return MENDELIAN_VIOLATION_QUAL_THRESHOLD; }
 
     public static String getAllSampleName() { return ALL_SAMPLE_NAME; }
+    public static String getAllFamilyName() { return ALL_FAMILY_NAME; }
 
     public List<RodBinding<VariantContext>> getKnowns() { return knowns; }
 
@@ -626,6 +714,8 @@ public class VariantEval extends RodWalker<Integer, Integer> implements TreeRedu
     public boolean isSubsettingToSpecificSamples() { return isSubsettingSamples; }
     public Set<String> getSampleNamesForEvaluation() { return sampleNamesForEvaluation; }
 
+    public Set<String> getFamilyNamesForEvaluation() { return familyNamesForEvaluation; }
+
     public int getNumberOfSamplesForEvaluation() {
         if (sampleNamesForEvaluation!= null &&  !sampleNamesForEvaluation.isEmpty())
             return sampleNamesForEvaluation.size();
@@ -636,6 +726,8 @@ public class VariantEval extends RodWalker<Integer, Integer> implements TreeRedu
     }
     public Set<String> getSampleNamesForStratification() { return sampleNamesForStratification; }
 
+    public Set<String> getFamilyNamesForStratification() { return familyNamesForStratification; }
+
     public List<RodBinding<VariantContext>> getComps() { return comps; }
 
     public Set<SortableJexlVCMatchExp> getJexlExpressions() { return jexlExpressions; }
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/VariantEvalReportWriter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/VariantEvalReportWriter.java
index 7244a94..aef7d8b 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/VariantEvalReportWriter.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/VariantEvalReportWriter.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,8 +25,8 @@
 
 package org.broadinstitute.gatk.tools.walkers.varianteval;
 
-import org.broadinstitute.gatk.engine.report.GATKReport;
-import org.broadinstitute.gatk.engine.report.GATKReportTable;
+import org.broadinstitute.gatk.utils.report.GATKReport;
+import org.broadinstitute.gatk.utils.report.GATKReportTable;
 import org.broadinstitute.gatk.tools.walkers.varianteval.evaluators.VariantEvaluator;
 import org.broadinstitute.gatk.tools.walkers.varianteval.stratifications.VariantStratifier;
 import org.broadinstitute.gatk.tools.walkers.varianteval.stratifications.manager.StratificationManager;
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/CompOverlap.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/CompOverlap.java
index 7ebf96e..1e634b7 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/CompOverlap.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/CompOverlap.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,9 +25,9 @@
 
 package org.broadinstitute.gatk.tools.walkers.varianteval.evaluators;
 
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.tools.walkers.varianteval.util.Analysis;
 import org.broadinstitute.gatk.tools.walkers.varianteval.util.DataPoint;
 import htsjdk.variant.variantcontext.Allele;
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/CountVariants.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/CountVariants.java
index 89b37f0..2ab63c5 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/CountVariants.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/CountVariants.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,9 +25,9 @@
 
 package org.broadinstitute.gatk.tools.walkers.varianteval.evaluators;
 
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.tools.walkers.varianteval.util.Analysis;
 import org.broadinstitute.gatk.tools.walkers.varianteval.util.DataPoint;
 import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/IndelLengthHistogram.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/IndelLengthHistogram.java
index 8e202a7..21164ce 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/IndelLengthHistogram.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/IndelLengthHistogram.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,9 +25,9 @@
 
 package org.broadinstitute.gatk.tools.walkers.varianteval.evaluators;
 
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.tools.walkers.varianteval.util.Analysis;
 import org.broadinstitute.gatk.tools.walkers.varianteval.util.Molten;
 import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/IndelSummary.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/IndelSummary.java
index 484541e..878d6fc 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/IndelSummary.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/IndelSummary.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -26,9 +26,9 @@
 package org.broadinstitute.gatk.tools.walkers.varianteval.evaluators;
 
 import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.tools.walkers.varianteval.util.Analysis;
 import org.broadinstitute.gatk.tools.walkers.varianteval.util.DataPoint;
 import org.broadinstitute.gatk.utils.Utils;
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/MendelianViolationEvaluator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/MendelianViolationEvaluator.java
index c01aae1..29d81ce 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/MendelianViolationEvaluator.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/MendelianViolationEvaluator.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,14 +25,14 @@
 
 package org.broadinstitute.gatk.tools.walkers.varianteval.evaluators;
 
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.samples.Sample;
 import org.broadinstitute.gatk.tools.walkers.varianteval.VariantEval;
 import org.broadinstitute.gatk.tools.walkers.varianteval.util.Analysis;
 import org.broadinstitute.gatk.tools.walkers.varianteval.util.DataPoint;
-import org.broadinstitute.gatk.utils.MendelianViolation;
+import org.broadinstitute.gatk.engine.samples.MendelianViolation;
 import htsjdk.variant.variantcontext.VariantContext;
 
 import java.util.Map;
@@ -172,7 +172,7 @@ public class MendelianViolationEvaluator extends VariantEvaluator {
             HomVarHet_inheritedRef += mv.getParentsVarHetInheritedRef();
             HomVarHet_inheritedVar += mv.getParentsVarHetInheritedVar();
 
-            if(mv.getFamilyCalledCount()>0){
+            if(mv.getFamilyCalledCount()>0 || mv.getFamilyLowQualsCount()>0 || mv.getFamilyCalledCount()>0){
                 nVariants++;
                 nFamCalled += mv.getFamilyCalledCount();
                 nLowQual += mv.getFamilyLowQualsCount();
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/MetricsCollection.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/MetricsCollection.java
new file mode 100644
index 0000000..ea73944
--- /dev/null
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/MetricsCollection.java
@@ -0,0 +1,67 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.tools.walkers.varianteval.evaluators;
+
+import org.broadinstitute.gatk.tools.walkers.varianteval.util.Analysis;
+import org.broadinstitute.gatk.tools.walkers.varianteval.util.DataPoint;
+
+/**
+ * Created by knoblett on 9/15/15.
+ */
+
+ at Analysis(description = "Metrics Collection")
+public class MetricsCollection extends VariantEvaluator {
+
+    @DataPoint(description = "The concordance rate from CompOverlap", format = "%.2f")
+    public double concordantRate;
+    @DataPoint(description = "Number of SNPs from IndelSummary", format = "%d")
+    public int nSNPs;
+    @DataPoint(description = "Number of SNP loci from CountVariants", format = "%d")
+    public long nSNPloci;
+    @DataPoint(description = "Number of indels from IndelSummary", format = "%d")
+    public int nIndels;
+    @DataPoint(description = "Number of indel loci from MultiallelicSummary", format = "%d")
+    public int nIndelLoci;
+    @DataPoint(description = "Insertion  to deletion ratio from IndelSummary")
+    public String indelRatio;
+    @DataPoint(description = "Insertion to deletion ratio from CountVariants", format = "%.2f")
+    public double indelRatioLociBased;
+    @DataPoint(description = "The transition to transversion ratio from TiTvVariantEvaluator", format = "%.2f")
+    public double tiTvRatio;
+
+    public int getComparisonOrder() {return 2;}
+
+    public void setData(double concordantRate, int nSNPs, long nSNPloci, int nIndels, int nIndelLoci, String indelRatio, double indelRatioLociBased, double tiTvRatio){
+        this.concordantRate = concordantRate;
+        this.nSNPs = nSNPs;
+        this.nSNPloci = nSNPloci;
+        this.nIndels = nIndels;
+        this.nIndelLoci = nIndelLoci;
+        this.indelRatio = indelRatio;
+        this.indelRatioLociBased = indelRatioLociBased;
+        this.tiTvRatio = tiTvRatio;
+    }
+}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/MultiallelicSummary.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/MultiallelicSummary.java
index 88543b5..a45dfbc 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/MultiallelicSummary.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/MultiallelicSummary.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -26,9 +26,9 @@
 package org.broadinstitute.gatk.tools.walkers.varianteval.evaluators;
 
 import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.tools.walkers.varianteval.util.Analysis;
 import org.broadinstitute.gatk.tools.walkers.varianteval.util.DataPoint;
 import org.broadinstitute.gatk.utils.Utils;
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/PrintMissingComp.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/PrintMissingComp.java
index 0d3d4cf..f9c67e8 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/PrintMissingComp.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/PrintMissingComp.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,16 +25,17 @@
 
 package org.broadinstitute.gatk.tools.walkers.varianteval.evaluators;
 
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.apache.commons.lang.ObjectUtils;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.tools.walkers.varianteval.util.Analysis;
 import org.broadinstitute.gatk.tools.walkers.varianteval.util.DataPoint;
 import htsjdk.variant.variantcontext.VariantContext;
 
- at Analysis(name = "PrintMissingComp", description = "the overlap between eval and comp sites")
+ at Analysis(name = "PrintMissingComp", description = "count the number of comp SNP sites that are not in eval")
 public class PrintMissingComp extends VariantEvaluator {
-    @DataPoint(description = "number of eval sites outside of comp sites", format = "%d")
+    @DataPoint(description = "number of comp SNP sites outside of eval sites", format = "%d")
     public long nMissing = 0;
 
     public String getName() {
@@ -49,9 +50,8 @@ public class PrintMissingComp extends VariantEvaluator {
         final boolean compIsGood = comp != null && comp.isNotFiltered() && comp.isSNP();
         final boolean evalIsGood = eval != null && eval.isSNP();
 
-        if ( compIsGood & ! evalIsGood ) {
+        if ( compIsGood && !evalIsGood ) {
             nMissing++;
-            super.getWalker().getLogger().info("MissingFrom" + eval.toString() + " is missing from " + comp.getSource());
         }
     }
 }
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/StandardEval.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/StandardEval.java
index c3b75c1..9ee366c 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/StandardEval.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/StandardEval.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/ThetaVariantEvaluator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/ThetaVariantEvaluator.java
index 60a4881..ef91df2 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/ThetaVariantEvaluator.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/ThetaVariantEvaluator.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,9 +25,9 @@
 
 package org.broadinstitute.gatk.tools.walkers.varianteval.evaluators;
 
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.tools.walkers.varianteval.util.Analysis;
 import org.broadinstitute.gatk.tools.walkers.varianteval.util.DataPoint;
 import htsjdk.variant.variantcontext.Allele;
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/TiTvVariantEvaluator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/TiTvVariantEvaluator.java
index 1919c5f..b60ed84 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/TiTvVariantEvaluator.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/TiTvVariantEvaluator.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,9 +25,9 @@
 
 package org.broadinstitute.gatk.tools.walkers.varianteval.evaluators;
 
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.tools.walkers.varianteval.util.Analysis;
 import org.broadinstitute.gatk.tools.walkers.varianteval.util.DataPoint;
 import org.broadinstitute.gatk.utils.BaseUtils;
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/ValidationReport.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/ValidationReport.java
index 664e5f2..74b280f 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/ValidationReport.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/ValidationReport.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,9 +25,9 @@
 
 package org.broadinstitute.gatk.tools.walkers.varianteval.evaluators;
 
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.tools.walkers.varianteval.util.Analysis;
 import org.broadinstitute.gatk.tools.walkers.varianteval.util.DataPoint;
 import htsjdk.variant.vcf.VCFConstants;
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/VariantEvaluator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/VariantEvaluator.java
index 0984a2e..f1144a0 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/VariantEvaluator.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/VariantEvaluator.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,9 +25,9 @@
 
 package org.broadinstitute.gatk.tools.walkers.varianteval.evaluators;
 
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.tools.walkers.varianteval.VariantEval;
 import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
 import htsjdk.variant.variantcontext.VariantContext;
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/VariantSummary.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/VariantSummary.java
index 9147330..a34ddeb 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/VariantSummary.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/evaluators/VariantSummary.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -27,9 +27,9 @@ package org.broadinstitute.gatk.tools.walkers.varianteval.evaluators;
 
 import htsjdk.samtools.util.IntervalTree;
 import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.tools.walkers.varianteval.VariantEval;
 import org.broadinstitute.gatk.tools.walkers.varianteval.util.Analysis;
 import org.broadinstitute.gatk.tools.walkers.varianteval.util.DataPoint;
@@ -164,7 +164,7 @@ public class VariantSummary extends VariantEvaluator implements StandardEval {
 
         if ( walker.knownCNVsFile != null ) {
             knownCNVs = walker.createIntervalTreeByContig(walker.knownCNVsFile);
-            final List<GenomeLoc> locs = walker.knownCNVsFile.getIntervals(walker.getToolkit());
+            final List<GenomeLoc> locs = walker.knownCNVsFile.getIntervals(walker.getToolkit().getGenomeLocParser());
             logger.info(String.format("Creating known CNV list %s containing %d intervals covering %d bp",
                     walker.knownCNVsFile.getSource(), locs.size(), IntervalUtils.intervalSize(locs)));
         }
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/AlleleCount.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/AlleleCount.java
index 1f7ed14..2ab005a 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/AlleleCount.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/AlleleCount.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,14 +25,15 @@
 
 package org.broadinstitute.gatk.tools.walkers.varianteval.stratifications;
 
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.tools.walkers.varianteval.evaluators.VariantEvaluator;
 import org.broadinstitute.gatk.tools.walkers.varianteval.evaluators.VariantSummary;
 import htsjdk.variant.vcf.VCFConstants;
 import org.broadinstitute.gatk.utils.exceptions.UserException;
 import htsjdk.variant.variantcontext.Allele;
 import htsjdk.variant.variantcontext.VariantContext;
+import org.broadinstitute.gatk.utils.variant.GATKVCFConstants;
 
 import java.util.*;
 
@@ -66,15 +67,15 @@ public class AlleleCount extends VariantStratifier {
         getVariantEvalWalker().getLogger().info("AlleleCount using " + nchrom + " chromosomes");
     }
 
-    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName) {
+    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName, String familyName) {
         if (eval != null) {
             int AC = 0; // by default, the site is considered monomorphic
 
             try {
                 if ( eval.isBiallelic() ) {
-                    if ( eval.hasAttribute(VCFConstants.MLE_ALLELE_COUNT_KEY) ) {
+                    if ( eval.hasAttribute(GATKVCFConstants.MLE_ALLELE_COUNT_KEY) ) {
                         // the MLEAC is allowed to be larger than the AN (e.g. in the case of all PLs being 0, the GT is ./. but the exact model may arbitrarily choose an AC>1)
-                        AC = Math.min(eval.getAttributeAsInt(VCFConstants.MLE_ALLELE_COUNT_KEY, 0), nchrom);
+                        AC = Math.min(eval.getAttributeAsInt(GATKVCFConstants.MLE_ALLELE_COUNT_KEY, 0), nchrom);
                     } else if ( eval.hasAttribute(VCFConstants.ALLELE_COUNT_KEY) ) {
                         AC = eval.getAttributeAsInt(VCFConstants.ALLELE_COUNT_KEY, 0);
                     }
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/AlleleFrequency.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/AlleleFrequency.java
index 349979a..d52ea8f 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/AlleleFrequency.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/AlleleFrequency.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,8 +25,8 @@
 
 package org.broadinstitute.gatk.tools.walkers.varianteval.stratifications;
 
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.utils.MathUtils;
 import htsjdk.variant.variantcontext.VariantContext;
 
@@ -47,7 +47,7 @@ public class AlleleFrequency extends VariantStratifier {
         }
     }
 
-    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName) {
+    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName, String FamilyName) {
         if (eval != null) {
             try {
                 return Collections.singletonList((Object)String.format("%.3f", (5.0 * MathUtils.round(eval.getAttributeAsDouble("AF", 0.0) / 5.0, 3))));
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/CompRod.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/CompRod.java
index f131ca7..7200619 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/CompRod.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/CompRod.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -26,8 +26,8 @@
 package org.broadinstitute.gatk.tools.walkers.varianteval.stratifications;
 
 import org.broadinstitute.gatk.utils.commandline.RodBinding;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import htsjdk.variant.variantcontext.VariantContext;
 
 import java.util.Collections;
@@ -45,7 +45,7 @@ public class CompRod extends VariantStratifier implements RequiredStratification
         }
     }
 
-    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName) {
+    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName, String FamilyName) {
         return Collections.singletonList((Object)compName);
     }
 }
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/Contig.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/Contig.java
index f90e7c5..0645fd5 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/Contig.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/Contig.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,8 +25,8 @@
 
 package org.broadinstitute.gatk.tools.walkers.varianteval.stratifications;
 
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import htsjdk.variant.variantcontext.VariantContext;
 
 import java.util.Arrays;
@@ -44,7 +44,7 @@ public class Contig extends VariantStratifier {
     }
 
     @Override
-    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName) {
+    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName, String FamilyName) {
         if (eval != null) {
             return Arrays.asList((Object)"all", eval.getChr());
         } else {
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/CpG.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/CpG.java
index 97e5e72..f4c31a5 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/CpG.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/CpG.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,8 +25,8 @@
 
 package org.broadinstitute.gatk.tools.walkers.varianteval.stratifications;
 
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import htsjdk.variant.variantcontext.VariantContext;
 
 import java.util.ArrayList;
@@ -53,7 +53,7 @@ public class CpG extends VariantStratifier {
     }
 
     @Override
-    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName) {
+    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName, String FamilyName) {
         boolean isCpG = false;
         if (ref != null && ref.getBases() != null) {
             String fwRefBases = new String(ref.getBases());
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/Degeneracy.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/Degeneracy.java
index 03cba8c..1e8368f 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/Degeneracy.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/Degeneracy.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,8 +25,8 @@
 
 package org.broadinstitute.gatk.tools.walkers.varianteval.stratifications;
 
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import htsjdk.variant.variantcontext.VariantContext;
 
 import java.util.ArrayList;
@@ -103,7 +103,7 @@ public class Degeneracy extends VariantStratifier {
         }
     }
 
-    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName) {
+    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName, String FamilyName) {
         ArrayList<Object> relevantStates = new ArrayList<Object>();
 
         relevantStates.add("all");
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/DynamicStratification.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/DynamicStratification.java
index 1c42898..4293968 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/DynamicStratification.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/DynamicStratification.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/EvalRod.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/EvalRod.java
index 8fdd007..dddb201 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/EvalRod.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/EvalRod.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -26,8 +26,8 @@
 package org.broadinstitute.gatk.tools.walkers.varianteval.stratifications;
 
 import org.broadinstitute.gatk.utils.commandline.RodBinding;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import htsjdk.variant.variantcontext.VariantContext;
 
 import java.util.Arrays;
@@ -46,7 +46,7 @@ public class EvalRod extends VariantStratifier implements RequiredStratification
         }
     }
 
-    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName) {
+    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName, String FamilyName) {
         return Arrays.asList((Object)evalName);
     }
 }
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/Family.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/Family.java
new file mode 100644
index 0000000..e4acb9e
--- /dev/null
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/Family.java
@@ -0,0 +1,55 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+*
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+*
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+*
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.tools.walkers.varianteval.stratifications;
+
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.tools.walkers.varianteval.evaluators.VariantEvaluator;
+import org.broadinstitute.gatk.tools.walkers.varianteval.evaluators.VariantSummary;
+import htsjdk.variant.variantcontext.VariantContext;
+
+import java.util.*;
+
+/**
+ * Stratifies the eval RODs by each family in the eval ROD, as described by the pedigree.
+ *
+ * This allows the system to analyze each family separately.  This is particularly useful for the MendelianViolationEvaluator module.
+ */
+public class Family extends VariantStratifier {
+    @Override
+    public void initialize() {
+        states.addAll(getVariantEvalWalker().getFamilyNamesForStratification());
+    }
+
+    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName, String familyName) {
+        return Collections.singletonList((Object) familyName);
+    }
+
+    @Override
+    public Set<Class<? extends VariantEvaluator>> getIncompatibleEvaluators() {
+        return new HashSet<Class<? extends VariantEvaluator>>(Arrays.asList(VariantSummary.class));
+    }
+}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/Filter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/Filter.java
index c37f003..ac9b9e4 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/Filter.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/Filter.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,8 +25,8 @@
 
 package org.broadinstitute.gatk.tools.walkers.varianteval.stratifications;
 
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import htsjdk.variant.variantcontext.VariantContext;
 
 import java.util.ArrayList;
@@ -43,7 +43,7 @@ public class Filter extends VariantStratifier {
         states.add("raw");
     }
 
-    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName) {
+    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName, String FamilyName) {
         ArrayList<Object> relevantStates = new ArrayList<Object>();
 
         relevantStates.add("raw");
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/FunctionalClass.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/FunctionalClass.java
index 08ff9d4..89ffe86 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/FunctionalClass.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/FunctionalClass.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,8 +25,8 @@
 
 package org.broadinstitute.gatk.tools.walkers.varianteval.stratifications;
 
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.tools.walkers.annotator.SnpEff;
 import htsjdk.variant.variantcontext.VariantContext;
 
@@ -53,7 +53,7 @@ public class FunctionalClass extends VariantStratifier {
     }
 
 
-    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName) {
+    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName, String FamilyName) {
         ArrayList<Object> relevantStates = new ArrayList<Object>();
 
         relevantStates.add("all");
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/IndelSize.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/IndelSize.java
index e5cb240..48efe1e 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/IndelSize.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/IndelSize.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,8 +25,8 @@
 
 package org.broadinstitute.gatk.tools.walkers.varianteval.stratifications;
 
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import htsjdk.variant.variantcontext.VariantContext;
 
 import java.util.Collections;
@@ -48,7 +48,7 @@ public class IndelSize extends VariantStratifier {
         }
     }
 
-    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName) {
+    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName, String FamilyName) {
         if (eval != null && eval.isIndel() && eval.isBiallelic()) {
             try {
                 int eventLength = 0;
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/IntervalStratification.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/IntervalStratification.java
index 8ee4e79..57ba0e7 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/IntervalStratification.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/IntervalStratification.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -27,8 +27,8 @@ package org.broadinstitute.gatk.tools.walkers.varianteval.stratifications;
 
 import htsjdk.samtools.util.IntervalTree;
 import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.utils.GenomeLoc;
 import org.broadinstitute.gatk.utils.exceptions.UserException;
 import org.broadinstitute.gatk.utils.interval.IntervalUtils;
@@ -62,7 +62,7 @@ public class IntervalStratification extends VariantStratifier {
         if ( getVariantEvalWalker().intervalsFile == null )
             throw new UserException.MissingArgument("stratIntervals", "Must be provided when IntervalStratification is enabled");
 
-        final List<GenomeLoc> locs = getVariantEvalWalker().intervalsFile.getIntervals(getVariantEvalWalker().getToolkit());
+        final List<GenomeLoc> locs = getVariantEvalWalker().intervalsFile.getIntervals(getVariantEvalWalker().getToolkit().getGenomeLocParser());
 
         if ( locs.isEmpty() )
             throw new UserException.BadArgumentValue("stratIntervals", "Contains no intervals.  Perhaps the file is malformed or empty?");
@@ -75,7 +75,7 @@ public class IntervalStratification extends VariantStratifier {
         states.addAll(Arrays.asList("all", "overlaps.intervals", "outside.intervals"));
     }
 
-    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName) {
+    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName, String FamilyName) {
         if (eval != null) {
             final GenomeLoc loc = getVariantEvalWalker().getToolkit().getGenomeLocParser().createGenomeLoc(eval);
             IntervalTree<GenomeLoc> intervalTree = intervalTreeByContig.get(loc.getContig());
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/JexlExpression.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/JexlExpression.java
index 00fec2a..7a11062 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/JexlExpression.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/JexlExpression.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,8 +25,8 @@
 
 package org.broadinstitute.gatk.tools.walkers.varianteval.stratifications;
 
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.tools.walkers.varianteval.util.SortableJexlVCMatchExp;
 import htsjdk.variant.variantcontext.VariantContext;
 import htsjdk.variant.variantcontext.VariantContextUtils;
@@ -54,7 +54,7 @@ public class JexlExpression extends VariantStratifier implements StandardStratif
         }
     }
 
-    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName) {
+    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName, String FamilyName) {
         ArrayList<Object> relevantStates = new ArrayList<Object>();
         relevantStates.add("none");
 
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/Novelty.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/Novelty.java
index 0114bf2..1faf818 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/Novelty.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/Novelty.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -26,8 +26,8 @@
 package org.broadinstitute.gatk.tools.walkers.varianteval.stratifications;
 
 import org.broadinstitute.gatk.utils.commandline.RodBinding;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import htsjdk.variant.variantcontext.VariantContext;
 
 import java.util.*;
@@ -48,7 +48,7 @@ public class Novelty extends VariantStratifier implements StandardStratification
         knowns = getVariantEvalWalker().getKnowns();
     }
 
-    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName) {
+    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName, String FamilyName) {
         if (tracker != null && eval != null) {
             final Collection<VariantContext> knownComps = tracker.getValues(knowns, ref.getLocus());
             for ( final VariantContext c : knownComps ) {
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/OneBPIndel.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/OneBPIndel.java
index 7ad45e0..9430d9f 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/OneBPIndel.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/OneBPIndel.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,8 +25,8 @@
 
 package org.broadinstitute.gatk.tools.walkers.varianteval.stratifications;
 
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import htsjdk.variant.variantcontext.VariantContext;
 
 import java.util.Arrays;
@@ -47,7 +47,7 @@ public class OneBPIndel extends VariantStratifier {
     }
 
     @Override
-    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName) {
+    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName, String FamilyName) {
         if (eval != null && eval.isIndel()) {
             for ( int l : eval.getIndelLengths() )
                 if ( Math.abs(l) > 1 )
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/RequiredStratification.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/RequiredStratification.java
index 946c723..9a9313c 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/RequiredStratification.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/RequiredStratification.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/Sample.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/Sample.java
index bd0b6f1..c052dca 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/Sample.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/Sample.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,8 +25,8 @@
 
 package org.broadinstitute.gatk.tools.walkers.varianteval.stratifications;
 
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.tools.walkers.varianteval.evaluators.VariantEvaluator;
 import org.broadinstitute.gatk.tools.walkers.varianteval.evaluators.VariantSummary;
 import htsjdk.variant.variantcontext.VariantContext;
@@ -46,7 +46,7 @@ public class Sample extends VariantStratifier {
         states.addAll(getVariantEvalWalker().getSampleNamesForStratification());
     }
 
-    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName) {
+    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName, String FamilyName) {
         return Collections.singletonList((Object) sampleName);
     }
 
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/SnpEffPositionModifier.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/SnpEffPositionModifier.java
index c2ddd80..a09feae 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/SnpEffPositionModifier.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/SnpEffPositionModifier.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,8 +25,8 @@
 
 package org.broadinstitute.gatk.tools.walkers.varianteval.stratifications;
 
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.tools.walkers.annotator.SnpEff;
 import org.broadinstitute.gatk.tools.walkers.annotator.SnpEff.EffectType;
 import org.broadinstitute.gatk.tools.walkers.annotator.SnpEff.InfoFieldKey;
@@ -64,7 +64,8 @@ public class SnpEffPositionModifier extends VariantStratifier {
 			final String compName,
 			final VariantContext eval,
 			final String evalName,
-			final String sampleName)
+			final String sampleName,
+            final String FamilyName)
 	{
 		final List<Object> relevantStates = new ArrayList<Object>();
 		if (eval != null && eval.isVariant() && eval.hasAttribute(InfoFieldKey.EFFECT_KEY.getKeyName())) {
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/StandardStratification.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/StandardStratification.java
index 41c52c2..de9388a 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/StandardStratification.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/StandardStratification.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/TandemRepeat.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/TandemRepeat.java
index 6eba4b4..7349ccb 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/TandemRepeat.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/TandemRepeat.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,8 +25,8 @@
 
 package org.broadinstitute.gatk.tools.walkers.varianteval.stratifications;
 
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
 import htsjdk.variant.variantcontext.VariantContext;
 
@@ -48,7 +48,7 @@ public class TandemRepeat extends VariantStratifier {
     }
 
     @Override
-    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName) {
+    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName, String FamilyName) {
         if ( eval == null || ! eval.isIndel() )
             return ALL;
         else if ( GATKVariantContextUtils.isTandemRepeat(eval, ref.getForwardBases()) ) {
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/VariantStratifier.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/VariantStratifier.java
index 0832ebd..5b4e6c0 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/VariantStratifier.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/VariantStratifier.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,8 +25,8 @@
 
 package org.broadinstitute.gatk.tools.walkers.varianteval.stratifications;
 
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.tools.walkers.varianteval.VariantEval;
 import org.broadinstitute.gatk.tools.walkers.varianteval.evaluators.VariantEvaluator;
 import org.broadinstitute.gatk.tools.walkers.varianteval.stratifications.manager.Stratifier;
@@ -54,7 +54,7 @@ public abstract class VariantStratifier implements Comparable<VariantStratifier>
 
     public abstract void initialize();
 
-    public abstract List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName);
+    public abstract List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName, String familyName);
 
     // -------------------------------------------------------------------------------------
     //
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/VariantType.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/VariantType.java
index 0ba5b60..bc017db 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/VariantType.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/VariantType.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,8 +25,8 @@
 
 package org.broadinstitute.gatk.tools.walkers.varianteval.stratifications;
 
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import htsjdk.variant.variantcontext.VariantContext;
 
 import java.util.Collections;
@@ -42,7 +42,7 @@ public class VariantType extends VariantStratifier {
             states.add(t.toString());
     }
 
-    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName) {
+    public List<Object> getRelevantStates(ReferenceContext ref, RefMetaDataTracker tracker, VariantContext comp, String compName, VariantContext eval, String evalName, String sampleName, String FamilyName) {
         return eval == null ? Collections.emptyList() : Collections.singletonList((Object)eval.getType().toString());
     }
 
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/manager/StratNode.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/manager/StratNode.java
index 0db9370..4d3dcae 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/manager/StratNode.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/manager/StratNode.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/manager/StratNodeIterator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/manager/StratNodeIterator.java
index a789b70..305f65c 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/manager/StratNodeIterator.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/manager/StratNodeIterator.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/manager/StratificationManager.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/manager/StratificationManager.java
index 7290016..c9398a1 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/manager/StratificationManager.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/manager/StratificationManager.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/manager/Stratifier.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/manager/Stratifier.java
index b096db9..f206a30 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/manager/Stratifier.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/stratifications/manager/Stratifier.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/util/Analysis.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/util/Analysis.java
index 67dc187..035faba 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/util/Analysis.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/util/Analysis.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/util/AnalysisModuleScanner.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/util/AnalysisModuleScanner.java
index 411394b..d5f70a9 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/util/AnalysisModuleScanner.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/util/AnalysisModuleScanner.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/util/DataPoint.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/util/DataPoint.java
index 0805cb5..ac02b4d 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/util/DataPoint.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/util/DataPoint.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/util/EvaluationContext.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/util/EvaluationContext.java
index 7e9f2da..f18c381 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/util/EvaluationContext.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/util/EvaluationContext.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,9 +25,9 @@
 
 package org.broadinstitute.gatk.tools.walkers.varianteval.util;
 
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.tools.walkers.varianteval.VariantEval;
 import org.broadinstitute.gatk.tools.walkers.varianteval.evaluators.VariantEvaluator;
 import org.broadinstitute.gatk.tools.walkers.varianteval.stratifications.manager.StratificationManager;
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/util/Molten.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/util/Molten.java
index 57c4fbc..f7a1566 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/util/Molten.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/util/Molten.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/util/SortableJexlVCMatchExp.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/util/SortableJexlVCMatchExp.java
index a759eb2..3b1c142 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/util/SortableJexlVCMatchExp.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/util/SortableJexlVCMatchExp.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/util/VariantEvalUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/util/VariantEvalUtils.java
index 6f623d4..15198dd 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/util/VariantEvalUtils.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/varianteval/util/VariantEvalUtils.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -26,9 +26,10 @@
 package org.broadinstitute.gatk.tools.walkers.varianteval.util;
 
 import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.engine.samples.Sample;
 import org.broadinstitute.gatk.utils.commandline.RodBinding;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.tools.walkers.varianteval.VariantEval;
 import org.broadinstitute.gatk.tools.walkers.varianteval.evaluators.StandardEval;
 import org.broadinstitute.gatk.tools.walkers.varianteval.evaluators.VariantEvaluator;
@@ -176,6 +177,11 @@ public class VariantEvalUtils {
             }
         }
 
+        //add MetricsCollection if required modules are included
+
+        if(evals.contains(classMap.get("CompOverlap")) && evals.contains(classMap.get("IndelSummary")) && evals.contains(classMap.get("TiTvVariantEvaluator")) && evals.contains(classMap.get("CountVariants")) && evals.contains(classMap.get("MultiallelicSummary")) )
+            evals.add(classMap.get("MetricsCollection"));
+
         return evals;
     }
 
@@ -250,6 +256,7 @@ public class VariantEvalUtils {
                         boolean byFilter,
                         boolean subsetBySample,
                         boolean trackPerSample,
+                        boolean trackPerFamily,
                         boolean mergeTracks) {
         if (tracker == null)
             return null;
@@ -265,9 +272,10 @@ public class VariantEvalUtils {
                 // First, filter the VariantContext to represent only the samples for evaluation
                 VariantContext vcsub = vc;
 
-                if (subsetBySample && vc.hasGenotypes())
+                if ((subsetBySample) && vc.hasGenotypes())
                     vcsub = getSubsetOfVariantContext(vc, variantEvalWalker.getSampleNamesForEvaluation());
 
+                //always add a mapping for all samples together
                 if ((byFilter || !vcsub.isFiltered())) {
                     addMapping(mapping, VariantEval.getAllSampleName(), vcsub);
                 }
@@ -282,6 +290,26 @@ public class VariantEvalUtils {
                         }
                     }
                 }
+                else if (vc.hasGenotypes() && trackPerFamily) {
+                    for (final String familyName : variantEvalWalker.getFamilyNamesForEvaluation()) {
+                        Set<String> familyMemberNames = new HashSet<>();
+                        //if the current stratification family name is "all", then add all the families to the VC for evaluation here
+                        if (familyName.equals(VariantEval.getAllFamilyName())) {
+                            familyMemberNames = variantEvalWalker.getSampleNamesForEvaluation();
+                        }
+                        else {
+                            Set<Sample> familyMembers = variantEvalWalker.getToolkit().getSampleDB().getFamily(familyName);
+                            for (final Sample s : familyMembers) {
+                                familyMemberNames.add(s.getID());
+                            }
+                        }
+                        VariantContext samplevc = getSubsetOfVariantContext(vc, familyMemberNames);
+
+                        if (byFilter || !samplevc.isFiltered()) {
+                            addMapping(mapping, familyName, samplevc);
+                        }
+                    }
+                }
             }
 
             if (mergeTracks && bindings.containsKey(firstTrack)) {
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantrecalibration/VQSRCalibrationCurve.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantrecalibration/VQSRCalibrationCurve.java
index 355441d..330a723 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantrecalibration/VQSRCalibrationCurve.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantrecalibration/VQSRCalibrationCurve.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/CombineVariants.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/CombineVariants.java
index aa69693..0901248 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/CombineVariants.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/CombineVariants.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -27,18 +27,19 @@ package org.broadinstitute.gatk.tools.walkers.variantutils;
 
 import org.broadinstitute.gatk.utils.commandline.*;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
 import org.broadinstitute.gatk.engine.io.stubs.VariantContextWriterStub;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.Reference;
 import org.broadinstitute.gatk.engine.walkers.RodWalker;
 import org.broadinstitute.gatk.engine.walkers.TreeReducible;
 import org.broadinstitute.gatk.engine.walkers.Window;
-import org.broadinstitute.gatk.tools.walkers.annotator.ChromosomeCountConstants;
-import org.broadinstitute.gatk.utils.SampleUtils;
+import org.broadinstitute.gatk.utils.variant.ChromosomeCountConstants;
+import org.broadinstitute.gatk.engine.SampleUtils;
 import org.broadinstitute.gatk.utils.help.HelpConstants;
-import org.broadinstitute.gatk.utils.variant.GATKVCFUtils;
+import org.broadinstitute.gatk.engine.GATKVCFUtils;
+import org.broadinstitute.gatk.utils.variant.GATKVCFConstants;
 import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
 import htsjdk.variant.vcf.*;
 import org.broadinstitute.gatk.utils.exceptions.UserException;
@@ -51,41 +52,43 @@ import htsjdk.variant.variantcontext.writer.VariantContextWriter;
 
 import java.util.*;
 
+
 /**
- * Combines VCF records from different sources.
+ * Combine variant records from different sources
  *
- * <p>
- * CombineVariants combines VCF records from different sources. Any (unique) name can be used to bind your rod data
- * and any number of sources can be input. This tool currently supports two different combination types for each of
- * variants (the first 8 fields of the VCF) and genotypes (the rest).
- * Merge: combines multiple records into a single one; if sample names overlap then they are uniquified.
- * Union: assumes each rod represents the same set of samples (although this is not enforced); using the
- * priority list (if provided), it emits a single record instance at every position represented in the rods.
+ * <p>CombineVariants reads in variants records from separate ROD (Reference-Ordered Data) sources and combines them into
+ * a single VCF. Any number of sources can be input. This tool aims to fulfill two main possible use cases, reflected
+ * by the two combination options (MERGE and UNION), for merging records at the variant level (the first 8 fields of
+ * the VCF) or at the genotype level (the rest).</p>
  *
- * CombineVariants will include a record at every site in all of your input VCF files, and annotate which input ROD
- * bindings the record is present, pass, or filtered in in the set attribute in the INFO field. In effect,
- * CombineVariants always produces a union of the input VCFs.  However, any part of the Venn of the N merged VCFs
- * can be exacted using JEXL expressions on the set attribute using SelectVariants.  If you want to extract just
- * the records in common between two VCFs, you would first run CombineVariants on the two files to generate a single
- * VCF and then run SelectVariants to extract the common records with -select 'set == "Intersection"', as worked out
- * in the detailed example in the documentation guide.
+ * <ul>
+ * <li><b>MERGE:</b> combines multiple variant records present at the same site in the different input sources into a
+ * single variant record in the output. If sample names overlap, then they are "uniquified" by default, which means a
+ * suffix is appended to make them unique. <em>Note that in version 3.3, the automatic uniquifying was disabled
+ * (unintentionally), and required setting `-genotypeMergeOptions UNIQUIFY` manually.</em></li>
  *
- * Note that CombineVariants supports multi-threaded parallelism (8/15/12).  This is particularly useful
- * when converting from VCF to BCF2, which can be expensive.  In this case each thread spends CPU time
- * doing the conversion, and the GATK engine is smart enough to merge the partial BCF2 blocks together
- * efficiency.  However, since this merge runs in only one thread, you can quickly reach diminishing
- * returns with the number of parallel threads.  -nt 4 works well but -nt 8 may be too much.
+ * <li><b>UNION:</b> assumes that each ROD source represents the same set of samples (although this is not enforced).
+ * It uses the priority list (if provided) to emit a single record instance at every position represented in the input RODs.</li>
+ * </ul>
+ *
+ * <p>By default, the input sets will be named variants, variants2, variants3, and so on. You can override this by
+ * providing an explicit name tag for each input, using the syntax " -V:format,name". Each input tagged in this
+ * way will be labeled as such in the output (i.e., set=name rather than set=variants2). For example, you could specify
+ * a set of control samples as " -V:vcf,control my_control_samples.vcf", and the resulting VCF records would contain
+ * the annotation "set=control" in the INFO field. It is strongly recommended to provide explicit names in this way
+ * when a rod priority list is provided.</p>
  *
- * Some fine details about the merging algorithm:
- *   <ul>
- *   <li> As of GATK 2.1, when merging multiple VCF records at a site, the combined VCF record has the QUAL of
- *      the first VCF record with a non-MISSING QUAL value.  The previous behavior was to take the
- *      max QUAL, which resulted in sometime strange downstream confusion</li>
- *   </ul>
+ * <p>CombineVariants will emit a record for every site that was present in any of your input VCF files, and will annotate
+ * (in the set attribute in the INFO field) whether the record had a PASS or FILTER status in each input ROD . In effect,
+ * CombineVariants always produces a union of the input VCFs.  However, any part of the Venn of the merged VCFs
+ * can be extracted using JEXL expressions on the set attribute using SelectVariants.  If you want to extract just
+ * the records in common between two VCFs, you would first run CombineVariants on the two files to generate a single
+ * VCF and then run SelectVariants to extract the common records with `-select 'set == "Intersection"'`, as worked out
+ * in the detailed example in the documentation guide.</p>
  *
  * <h3>Input</h3>
  * <p>
- * One or more variant sets to combine.
+ * Two or more variant sets to combine.
  * </p>
  *
  * <h3>Output</h3>
@@ -93,44 +96,55 @@ import java.util.*;
  * A combined VCF.
  * </p>
  *
- * <h3>Examples</h3>
+ * <h3>Usage examples</h3>
+ *  
+ * <h4>Merge two separate callsets</h4>
  * <pre>
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
+ * java -jar GenomeAnalysisTK.jar \
  *   -T CombineVariants \
+ *   -R reference.fasta \
  *   --variant input1.vcf \
  *   --variant input2.vcf \
  *   -o output.vcf \
  *   -genotypeMergeOptions UNIQUIFY
+ * </pre>
  *
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
+ * <h4>Get the union of calls made on the same samples </h4>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
  *   -T CombineVariants \
+ *   -R reference.fasta \
  *   --variant:foo input1.vcf \
  *   --variant:bar input2.vcf \
  *   -o output.vcf \
- *   -genotypeMergeOptions PRIORITIZE
+ *   -genotypeMergeOptions PRIORITIZE \
  *   -priority foo,bar
  * </pre>
  *
+ * <h3>Caveats</h3>
+ * <ul>
+ * <li>This tool is not intended to manipulate GVCFS! To combine GVCF files output by HaplotypeCaller, use CombineGVCFs.</li>
+ * <li>To join intermediate VCFs produced by running jobs in parallel by interval (e.g. by chromosome), use CatVariants.</li>
+ * </ul>
+ *
+ * <h3>Additional notes</h3>
+ * <ul>
+ * <li> Using this tool's multi-threaded parallelism capability is particularly useful
+ * when converting from VCF to BCF2, which can be time-consuming. In this case each thread spends CPU time
+ * doing the conversion, and the GATK engine is smart enough to merge the partial BCF2 blocks together
+ * efficiently.  However, since this merge runs in only one thread, you can quickly reach diminishing
+ * returns with the number of parallel threads.  In our hands, `-nt 4` works well but `-nt 8` tends to be be too much.</li>
+ * <li>Since GATK 2.1, when merging multiple VCF records at a site, the combined VCF record has the QUAL of the first
+ * VCF record with a non-MISSING QUAL value.  The previous behavior was to take the max QUAL, which could result
+ * in strange downstream confusion</li>
+ * </ul>
+ *
  */
 @DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_VARMANIP, extraDocs = {CommandLineGATK.class} )
 @Reference(window=@Window(start=-50,stop=50))
 public class CombineVariants extends RodWalker<Integer, Integer> implements TreeReducible<Integer> {
-    /**
-     * The VCF files to merge together
-     *
-     * variants can take any number of arguments on the command line.  Each -V argument
-     * will be included in the final merged output VCF.  If no explicit name is provided,
-     * the -V arguments will be named using the default algorithm: variants, variants2, variants3, etc.
-     * The user can override this by providing an explicit name -V:name,vcf for each -V argument,
-     * and each named argument will be labeled as such in the output (i.e., set=name rather than
-     * set=variants2).  The order of arguments does not matter unless except for the naming, so
-     * if you provide an rod priority list and no explicit names than variants, variants2, etc
-     * are technically order dependent.  It is strongly recommended to provide explicit names when
-     * a rod priority list is provided.
-     */
-    @Input(fullName="variant", shortName = "V", doc="Input VCF file", required=true)
+
+    @Input(fullName="variant", shortName = "V", doc="VCF files to merge together", required=true)
     public List<RodBindingCollection<VariantContext>> variantCollections;
     final private List<RodBinding<VariantContext>> variants = new ArrayList<>();
 
@@ -148,48 +162,75 @@ public class CombineVariants extends RodWalker<Integer, Integer> implements Tree
     public GATKVariantContextUtils.MultipleAllelesMergeType multipleAllelesMergeType = GATKVariantContextUtils.MultipleAllelesMergeType.BY_TYPE;
 
     /**
-     * Used when taking the union of variants that contain genotypes.  A complete priority list MUST be provided.
+     * Refers to the merging priority behavior described in the tool documentation regarding the choice of which record
+     * gets emitted when taking the union of variants that contain genotypes. The list must be passed as a
+     * comma-separated string listing the names of the variant input files. The list must be complete and include all
+     * variant inputs that are being provided to the tool. Use name tags for best results.
      */
-    @Argument(fullName="rod_priority_list", shortName="priority", doc="A comma-separated string describing the priority ordering for the genotypes as far as which record gets emitted", required=false)
+    @Argument(fullName="rod_priority_list", shortName="priority", doc="Ordered list specifying priority for merging", required=false)
     public String PRIORITY_STRING = null;
 
-    @Argument(fullName="printComplexMerges", shortName="printComplexMerges", doc="Print out interesting sites requiring complex compatibility merging", required=false)
+    @Argument(fullName="printComplexMerges", shortName="printComplexMerges", doc="Emit interesting sites requiring complex compatibility merging to file", required=false)
     public boolean printComplexMerges = false;
 
-    @Argument(fullName="filteredAreUncalled", shortName="filteredAreUncalled", doc="If true, then filtered VCFs are treated as uncalled, so that filtered set annotations don't appear in the combined VCF", required=false)
+    /**
+     * If enabled, this flag causes filtered variants (i.e. variant records where the FILTER field is populated by
+     * something other than PASS or a dot) to be omitted from the output.
+     */
+    @Argument(fullName="filteredAreUncalled", shortName="filteredAreUncalled", doc="Treat filtered variants as uncalled", required=false)
     public boolean filteredAreUncalled = false;
 
     /**
-     * Used to generate a sites-only file.
+     * If this flag is enabled, the INFO, FORMAT and sample-level (genotype) fields will not be emitted to the output file.
      */
-    @Argument(fullName="minimalVCF", shortName="minimalVCF", doc="If true, then the output VCF will contain no INFO or genotype FORMAT fields", required=false)
+    @Argument(fullName="minimalVCF", shortName="minimalVCF", doc="Emit a sites-only file", required=false)
     public boolean minimalVCF = false;
 
-    @Argument(fullName="excludeNonVariants", shortName="env", doc="Don't include loci found to be non-variant after the combining procedure", required=false)
+    /**
+     * Exclude sites that do not contain any called ALT alleles in the merged callset. The evaluation is made after the
+     * merging procedure is complete.
+     */
+    @Argument(fullName="excludeNonVariants", shortName="env", doc="Exclude sites where no variation is present after merging", required=false)
     public boolean EXCLUDE_NON_VARIANTS = false;
 
     /**
-     * Set to 'null' if you don't want the set field emitted.
+     * Key used in the INFO key=value tag emitted describing which set(s) the combined record came from
+     * (e.g. set=control). This provides the option to override the default naming, so instead of set=control you could
+     * have it be origin=control, or any other word you want that is not already an INFO field attribute. Set this to
+     * 'null' if you don't want the set attribute emitted at all.
      */
-    @Argument(fullName="setKey", shortName="setKey", doc="Key used in the INFO key=value tag emitted describing which set the combined VCF record came from", required=false)
+    @Argument(fullName="setKey", shortName="setKey", doc="Key name for the set attribute", required=false)
     public String SET_KEY = "set";
 
     /**
-     * This option allows the user to perform a simple merge (concatenation) to combine the VCFs, drastically reducing the runtime.
+     * This option allows you to perform a simple merge (concatenation) to combine the VCFs, drastically reducing
+     * runtime. Note that in many cases where you think you want to use this option, you may want to check out the
+     * CatVariants tool instead, because CatVariants provides the same functionality, but does so even more efficiently.
      */
-    @Argument(fullName="assumeIdenticalSamples", shortName="assumeIdenticalSamples", doc="If true, assume input VCFs have identical sample sets and disjoint calls", required=false)
+    @Argument(fullName="assumeIdenticalSamples", shortName="assumeIdenticalSamples", doc="Assume input VCFs have identical sample sets and disjoint calls", required=false)
     public boolean ASSUME_IDENTICAL_SAMPLES = false;
 
-    @Argument(fullName="minimumN", shortName="minN", doc="Combine variants and output site only if the variant is present in at least N input files.", required=false)
+    /**
+     * Sites that are present in fewer than this number of inputs will be ignored. This is a convenient way to build
+     * a collection of common variants and exclude rare variants.
+     */
+    @Argument(fullName="minimumN", shortName="minN", doc="Minimum number of input files the site must be observed in to be included", required=false)
     public int minimumN = 1;
 
     /**
-     * This option allows the suppression of the command line in the VCF header. This is most often usefully when combining variants for dozens or hundreds of smaller VCFs.
+     * By default, this tool writes the command line that was used in the header of the output VCF file. This flag
+     * enables you to override that behavior . This is most often useful when combining variants for dozens or
+     * hundreds of smaller VCFs iteratively, to avoid cluttering the header with a lot of command lines.
      */
-    @Argument(fullName="suppressCommandLineHeader", shortName="suppressCommandLineHeader", doc="If true, do not output the header containing the command line used", required=false)
+    @Argument(fullName="suppressCommandLineHeader", shortName="suppressCommandLineHeader", doc="Do not output the command line to the header", required=false)
     public boolean SUPPRESS_COMMAND_LINE_HEADER = false;
 
-    @Argument(fullName="mergeInfoWithMaxAC", shortName="mergeInfoWithMaxAC", doc="If true, when VCF records overlap the info field is taken from the one with the max AC instead of only taking the fields which are identical across the overlapping records.", required=false)
+    /**
+     * By default, the INFO field of the merged variant record only contains the INFO field attributes for which all
+     * original overlapping records had the same values. Discordant attributes are therefore discarded. This flag allows you to
+     * override that behavior and simply copy over the INFO field contents of whichever record had the highest AC value.
+     */
+    @Argument(fullName="mergeInfoWithMaxAC", shortName="mergeInfoWithMaxAC", doc="Use the INFO content of the record with the highest AC", required=false)
     public boolean MERGE_INFO_WITH_MAX_AC = false;
 
     private List<String> priority = null;
@@ -205,16 +246,16 @@ public class CombineVariants extends RodWalker<Integer, Integer> implements Tree
             sitesOnlyVCF = ((VariantContextWriterStub)vcfWriter).getWriterOptions().contains(Options.DO_NOT_WRITE_GENOTYPES);
             if ( sitesOnlyVCF ) logger.info("Pre-stripping genotypes for performance");
         } else
-            logger.warn("VCF output file not an instance of VCFWriterStub; cannot enable sites only output option");
+            logger.warn("VCF output file not an instance of VCFWriterStub; cannot enable sites-only output option");
 
         validateAnnotateUnionArguments();
 
         final boolean sampleNamesAreUnique = SampleUtils.verifyUniqueSamplesNames(vcfRods);
 
-        if (genotypeMergeOption == null) {
+        if (genotypeMergeOption == null && !ASSUME_IDENTICAL_SAMPLES) {
             if (!sampleNamesAreUnique)
                 throw new UserException("Duplicate sample names were discovered but no genotypemergeoption was supplied. " +
-                    "To combine samples without merging specify --genotypemergeoption UNIQUIFY. Merging duplicate samples " +
+                    "To combine samples without merging, specify --genotypemergeoption UNIQUIFY. Merging duplicate samples " +
                     "without specified priority is unsupported, but can be achieved by specifying --genotypemergeoption UNSORTED.");
             else
                 genotypeMergeOption = GATKVariantContextUtils.GenotypeMergeType.UNSORTED;
@@ -337,7 +378,7 @@ public class CombineVariants extends RodWalker<Integer, Integer> implements Tree
             if ( mergedVC == null )
                 continue;
 
-            if ( mergedVC.hasAllele(GATKVariantContextUtils.NON_REF_SYMBOLIC_ALLELE) )
+            if ( mergedVC.hasAllele(GATKVCFConstants.NON_REF_SYMBOLIC_ALLELE) )
                 throw new UserException("CombineVariants should not be used to merge gVCFs produced by the HaplotypeCaller; use CombineGVCFs instead");
 
             final VariantContextBuilder builder = new VariantContextBuilder(mergedVC);
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/ConcordanceMetrics.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/ConcordanceMetrics.java
index 2b1897c..5019662 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/ConcordanceMetrics.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/ConcordanceMetrics.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -46,9 +46,9 @@ public class ConcordanceMetrics {
     final PrintStream sitesFile;
 
     public ConcordanceMetrics(VCFHeader evaluate, VCFHeader truth, PrintStream inputSitesFile) {
-        HashSet<String> overlappingSamples = new HashSet<String>(evaluate.getGenotypeSamples());
+        HashSet<String> overlappingSamples = new HashSet<>(evaluate.getGenotypeSamples());
         overlappingSamples.retainAll(truth.getGenotypeSamples());
-        perSampleGenotypeConcordance = new HashMap<String, GenotypeConcordanceTable>(overlappingSamples.size());
+        perSampleGenotypeConcordance = new HashMap<>(overlappingSamples.size());
         for ( String sample : overlappingSamples ) {
             perSampleGenotypeConcordance.put(sample,new GenotypeConcordanceTable());
         }
@@ -82,7 +82,7 @@ public class ConcordanceMetrics {
     }
 
     public Map<String,Double> getPerSampleNRD() {
-        Map<String,Double> nrd = new HashMap<String,Double>(perSampleGenotypeConcordance.size());
+        Map<String,Double> nrd = new HashMap<>(perSampleGenotypeConcordance.size());
         for ( Map.Entry<String,GenotypeConcordanceTable> sampleTable : perSampleGenotypeConcordance.entrySet() ) {
             nrd.put(sampleTable.getKey(),calculateNRD(sampleTable.getValue()));
         }
@@ -91,7 +91,7 @@ public class ConcordanceMetrics {
     }
 
     public Map<String,Double> getPerSampleOGC() {
-        Map<String,Double> ogc = new HashMap<String,Double>(perSampleGenotypeConcordance.size());
+        Map<String,Double> ogc = new HashMap<>(perSampleGenotypeConcordance.size());
         for ( Map.Entry<String,GenotypeConcordanceTable> sampleTable : perSampleGenotypeConcordance.entrySet() ) {
             ogc.put(sampleTable.getKey(),calculateOGC(sampleTable.getValue()));
         }
@@ -108,7 +108,7 @@ public class ConcordanceMetrics {
     }
 
     public Map<String,Double> getPerSampleNRS() {
-        Map<String,Double> nrs = new HashMap<String,Double>(perSampleGenotypeConcordance.size());
+        Map<String,Double> nrs = new HashMap<>(perSampleGenotypeConcordance.size());
         for ( Map.Entry<String,GenotypeConcordanceTable> sampleTable : perSampleGenotypeConcordance.entrySet() ) {
             nrs.put(sampleTable.getKey(),calculateNRS(sampleTable.getValue()));
         }
@@ -121,25 +121,20 @@ public class ConcordanceMetrics {
     }
 
     @Requires({"eval != null","truth != null"})
-    public void update(VariantContext eval, VariantContext truth) {
-        boolean doPrint = false;
+    public void update(final VariantContext eval, final VariantContext truth) {
         overallSiteConcordance.update(eval,truth);
-        Set<String> alleleTruth = new HashSet<String>(8);
-        String truthRef = truth.getReference().getBaseString();
-        alleleTruth.add(truthRef);
-        for ( Allele a : truth.getAlternateAlleles() ) {
-            alleleTruth.add(a.getBaseString());
-        }
-        for ( String sample : perSampleGenotypeConcordance.keySet() ) {
-            Genotype evalGenotype = eval.getGenotype(sample);
-            Genotype truthGenotype = truth.getGenotype(sample);
+        final Set<Allele> truthAlleles = new HashSet<>(truth.getAlleles());
+        for ( final String sample : perSampleGenotypeConcordance.keySet() ) {
+            final Genotype evalGenotype = eval.getGenotype(sample);
+            final Genotype truthGenotype = truth.getGenotype(sample);
             // ensure genotypes are either no-call ("."), missing (empty alleles), or diploid
             if ( ( ! evalGenotype.isNoCall() && evalGenotype.getPloidy() != 2 && evalGenotype.getPloidy() > 0) ||
                  ( ! truthGenotype.isNoCall() && truthGenotype.getPloidy() != 2 && truthGenotype.getPloidy() > 0) ) {
                 throw new UserException(String.format("Concordance Metrics is currently only implemented for DIPLOID genotypes, found eval ploidy: %d, comp ploidy: %d",evalGenotype.getPloidy(),truthGenotype.getPloidy()));
             }
-            perSampleGenotypeConcordance.get(sample).update(evalGenotype,truthGenotype,alleleTruth,truthRef);
-            doPrint = overallGenotypeConcordance.update(evalGenotype,truthGenotype,alleleTruth,truthRef);
+            final boolean allelesMatch = doAllelesMatch(evalGenotype, truthGenotype, truth.getReference(), truthAlleles);
+            perSampleGenotypeConcordance.get(sample).update(allelesMatch, evalGenotype, truthGenotype);
+            final boolean doPrint = overallGenotypeConcordance.update(allelesMatch, evalGenotype, truthGenotype);
             if(sitesFile != null && doPrint)
                 sitesFile.println(eval.getChr() + ":" + eval.getStart() + "\t" + sample + "\t" + truthGenotype.getType() + "\t" + evalGenotype.getType());
         }
@@ -211,6 +206,29 @@ public class ConcordanceMetrics {
         return total == 0l ? 0.0 : ( (double) confirmedVariant ) / ( (double) ( total ) );
     }
 
+    private boolean doAllelesMatch(final Genotype eval, final Genotype truth,
+                                   final Allele truthRef, final Set<Allele> truthSiteAlleles) {
+        // When determining if alleles match, there are a number of cases to consider.  In order:
+        //  1) If either genotype is uncalled or unavailable, the alleles MATCH
+        //  2) If the truth genotype is hom ref, then:
+        //     a) If the truth variant is mononallelic (no alternate alleles), the alleles MATCH
+        //     b) Otherwise, the alleles match IFF the alleles in the eval genotype are a subset
+        //        of the alleles in the truth VARIANT
+        //  3) Otherwise, the alleles match IFF the alleles in the eval genotype are a subset
+        //     of the alleles in (the truth GENOTYPE + the truth REF allele)
+        boolean matching = true;
+        if (eval.isCalled() && truth.isCalled()) { // Case 1
+            if (truth.isHomRef()) { // Case 2
+                matching = truthSiteAlleles.size() == 1 || truthSiteAlleles.containsAll(eval.getAlleles());
+            } else { // Case 3
+                final Set<Allele> truthAlleles = new HashSet<>(truth.getAlleles());
+                truthAlleles.add(truthRef);
+                matching = truthAlleles.containsAll(eval.getAlleles());
+            }
+        }
+        return matching;
+    }
+
 
     class GenotypeConcordanceTable {
 
@@ -223,38 +241,10 @@ public class ConcordanceMetrics {
         }
 
         @Requires({"eval!=null","truth != null","truthAlleles != null"})
-        public Boolean update(Genotype eval, Genotype truth, Set<String> truthAlleles, String truthRef) {
-            // this is slow but correct.
-
-            // NOTE: a reference call in "truth" is a special case, the eval can match *any* of the truth alleles
-            // that is, if the reference base is C, and a sample is C/C in truth, A/C, A/A, T/C, T/T will
-            // all match, so long as A and T are alleles in the truth callset.
-            boolean matchingAlt = true;
-            int evalGT, truthGT;
-            if ( eval.isCalled() && truth.isCalled() && truth.isHomRef() ) {
-                // by default, no-calls "match" between alleles, so if
-                // one or both sites are no-call or unavailable, the alt alleles match
-                // otherwise, check explicitly: if the eval has an allele that's not ref, no-call, or present in truth
-                // the alt allele is mismatching - regardless of whether the genotype is correct.
-                for ( Allele evalAllele : eval.getAlleles() ) {
-                    matchingAlt &= truthAlleles.contains(evalAllele.getBaseString());
-                }
-            } else if ( eval.isCalled() && truth.isCalled() ) {
-                // otherwise, the eval genotype has to match either the alleles in the truth genotype, or the truth reference allele
-                // todo -- this can be sped up by caching the truth allele sets
-                Set<String> genoAlleles = new HashSet<String>(3);
-                genoAlleles.add(truthRef);
-                for ( Allele truthGenoAl : truth.getAlleles() ) {
-                    genoAlleles.add(truthGenoAl.getBaseString());
-                }
-                for ( Allele evalAllele : eval.getAlleles() ) {
-                    matchingAlt &= genoAlleles.contains(evalAllele.getBaseString());
-                }
-            }
-
+        public Boolean update(final boolean matchingAlt, final Genotype eval, final Genotype truth) {
             if ( matchingAlt ) {
-                evalGT = eval.getType().ordinal();
-                truthGT = truth.getType().ordinal();
+                final int evalGT = eval.getType().ordinal();
+                final int truthGT = truth.getType().ordinal();
                 genotypeCounts[evalGT][truthGT]++;
                 if(evalGT != truthGT)  //report variants where genotypes don't match
                     return true;
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/FilterLiftedVariants.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/FilterLiftedVariants.java
deleted file mode 100644
index 73995bf..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/FilterLiftedVariants.java
+++ /dev/null
@@ -1,136 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.tools.walkers.variantutils;
-
-import org.broadinstitute.gatk.engine.walkers.Reference;
-import org.broadinstitute.gatk.engine.walkers.RodWalker;
-import org.broadinstitute.gatk.engine.walkers.Window;
-import org.broadinstitute.gatk.utils.commandline.ArgumentCollection;
-import org.broadinstitute.gatk.utils.commandline.Output;
-import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.arguments.StandardVariantContextInputArgumentCollection;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.utils.SampleUtils;
-import org.broadinstitute.gatk.utils.help.HelpConstants;
-import htsjdk.variant.vcf.VCFHeader;
-import htsjdk.variant.vcf.VCFHeaderLine;
-import org.broadinstitute.gatk.utils.variant.GATKVCFUtils;
-import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
-import htsjdk.variant.variantcontext.writer.VariantContextWriter;
-import htsjdk.variant.variantcontext.VariantContext;
-
-import java.util.*;
-
-/**
- * Filters a lifted-over VCF file for ref bases that have been changed.
- *
- * "Lifting over" variants means adjusting variant calls from one reference to another. Specifically, the process adjusts the position of the call to match the corresponding position on the target reference.
- * For example, if you have variants called from reads aligned to the hg19 reference, and you want to compare them to calls made based on the b37 reference, you need to liftover one of the callsets to the other reference.
- *
- * FilteredLiftedVariants is intended to be the second of two processing steps for the liftover process. The first step is to run LiftoverVariants on your VCF file.
- * The second step is to run FilterLiftedVariants on the output of LiftoverVariants. This will produce valid well-behaved VCF files, where you'll see that the contig names in the header have all been correctly replaced.
- *
- */
- at DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_VARMANIP, extraDocs = {CommandLineGATK.class} )
- at Reference(window=@Window(start=0,stop=100))
-public class FilterLiftedVariants extends RodWalker<Integer, Integer> {
-
-    @ArgumentCollection
-    protected StandardVariantContextInputArgumentCollection variantCollection = new StandardVariantContextInputArgumentCollection();
-
-    private static final int MAX_VARIANT_SIZE = 100;
-
-    @Output(doc="File to which variants should be written")
-    protected VariantContextWriter writer = null;
-
-    private long failedLocs = 0, totalLocs = 0;
-
-    public void initialize() {
-        String trackName = variantCollection.variants.getName();
-        Set<String> samples = SampleUtils.getSampleListWithVCFHeader(getToolkit(), Arrays.asList(trackName));
-        Map<String, VCFHeader> vcfHeaders = GATKVCFUtils.getVCFHeadersFromRods(getToolkit(), Arrays.asList(trackName));
-
-        final VCFHeader vcfHeader = new VCFHeader(vcfHeaders.containsKey(trackName) ? vcfHeaders.get(trackName).getMetaDataInSortedOrder() : Collections.<VCFHeaderLine>emptySet(), samples);
-        writer.writeHeader(vcfHeader);
-    }
-
-    /**
-     * Determines whether records should be filtered; if not, writes them to the output
-     *
-     * @param ref   the reference context
-     * @param vc    the VariantContext to process
-     * @return true if the record is not filtered, false otherwise
-     */
-    protected boolean filterOrWrite(final byte[] ref, final VariantContext vc) {
-	if ( ref == null ) throw new IllegalArgumentException("Cannot filter based on a null reference array");
-	if ( vc == null ) throw new IllegalArgumentException("Cannot filter a null Variant Context");
-
-        totalLocs++;
-
-        boolean filter = false;
-        final byte[] recordRef = vc.getReference().getBases();
-
-        // this can happen for records that get placed at the ends of chromosomes
-        if ( recordRef.length > ref.length ) {
-            filter = true;
-        } else {
-            for (int i = 0; i < recordRef.length && i < MAX_VARIANT_SIZE; i++) {
-                if ( recordRef[i] != ref[i] ) {
-                    filter = true;
-                    break;
-                }
-            }
-        }
-
-        if ( filter )
-            failedLocs++;
-        else
-            writer.add(vc);
-
-        return !filter;
-    }
-
-    public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
-        if ( tracker == null )
-            return 0;
-
-        final Collection<VariantContext> VCs = tracker.getValues(variantCollection.variants, context.getLocation());
-        for ( final VariantContext vc : VCs )
-            filterOrWrite(ref.getBases(), vc);
-
-        return 0;
-    }
-
-    public Integer reduceInit() { return 0; }
-
-    public Integer reduce(Integer value, Integer sum) { return 0; }
-
-    public void onTraversalDone(Integer result) {
-        System.out.println("Filtered " + failedLocs + " records out of " + totalLocs + " total records.");
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/GenotypeConcordance.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/GenotypeConcordance.java
index d2f251a..4ac6b9b 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/GenotypeConcordance.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/GenotypeConcordance.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -27,16 +27,16 @@ package org.broadinstitute.gatk.tools.walkers.variantutils;
 
 import org.broadinstitute.gatk.utils.commandline.*;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.engine.report.GATKReport;
-import org.broadinstitute.gatk.engine.report.GATKReportTable;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.report.GATKReport;
+import org.broadinstitute.gatk.utils.report.GATKReportTable;
 import org.broadinstitute.gatk.engine.walkers.RodWalker;
 import org.broadinstitute.gatk.utils.collections.Pair;
 import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
 import org.broadinstitute.gatk.utils.help.HelpConstants;
-import org.broadinstitute.gatk.utils.variant.GATKVCFUtils;
+import org.broadinstitute.gatk.engine.GATKVCFUtils;
 import htsjdk.variant.variantcontext.*;
 import htsjdk.variant.vcf.VCFHeader;
 
@@ -44,10 +44,10 @@ import java.io.PrintStream;
 import java.util.*;
 
 /**
- * Genotype concordance (per-sample and aggregate counts and frequencies, NRD/NRS and site allele overlaps) between two callsets
+ * Genotype concordance between two callsets
  *
  * <p>
- *  GenotypeConcordance takes in two callsets (vcfs) and tabulates the number of sites which overlap and share alleles,
+ *  This tool takes in two callsets (vcfs) and tabulates the number of sites which overlap and share alleles,
  *  and for each sample, the genotype-by-genotype counts (e.g. the number of sites at which a sample was
  *  called homozygous-reference in the EVAL callset, but homozygous-variant in the COMP callset). It outputs these
  *  counts as well as convenient proportions (such as the proportion of het calls in the EVAL which were called REF in
@@ -74,15 +74,15 @@ import java.util.*;
  *  <h4>Tables</h4>
  *  <p>
  *  Headers for the (non-moltenized -- see below) GenotypeConcordance counts and proportions tables give the genotype of
- *  the COMP callset followed by the genotype of the EVAL callset. For example the value corresponding to HOM_REF_HET
- *  reflects variants called HOM_REF in the COMP callset and HET in the EVAL callset. Variants for which the alternate
+ *  the EVAL callset followed by the genotype of the COMP callset. For example the value corresponding to HOM_REF_HET
+ *  reflects variants called HOM_REF in the EVAL callset and HET in the COMP callset. Variants for which the alternate
  *  alleles between the EVAL and COMP sample did not match are excluded from genotype comparisons and given in the
  *  "Mismatching_Alleles" field.
  *  </p>
  *  <p>
  *  It may be informative to reshape rows of the GenotypeConcordance counts and proportions tables into separate row-major tables
  *  where the columns indicate the COMP genotype and the rows indicate the EVAL genotype for easy comparison between the
- *  two callsets. This can be done with a command similar to d <- matrix(sampleRow,nrow=6,byrow=T) in R where sampleRow is the 36-value row corresponding to the sample of interest, excluding "Mismatching_Alleles".
+ *  two callsets. This can be done with the gsa.reshape.concordance.table function in the gsalib R library.
  *  In Excel this can be accomplished using the OFFSET function.
  *  </p>
  *  <ul>
@@ -192,7 +192,17 @@ import java.util.*;
  *  NA12891  NO_CALL_HOM_VAR   0.000
  *  (...)
  *  </pre>
-
+ *
+ * <h3>Usage example</h3>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
+ *   -T GenotypeConcordance \
+ *   -R reference.fasta \
+ *   -eval test_set.vcf \
+ *   -comp truth_set.vcf \
+ *   -o output.grp
+ * </pre>
+ *
  */
 @DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_VARMANIP, extraDocs = {CommandLineGATK.class} )
 public class GenotypeConcordance extends RodWalker<List<Pair<VariantContext,VariantContext>>,ConcordanceMetrics> {
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/LeftAlignAndTrimVariants.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/LeftAlignAndTrimVariants.java
index 6410478..0ba7e10 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/LeftAlignAndTrimVariants.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/LeftAlignAndTrimVariants.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -35,19 +35,19 @@ import org.broadinstitute.gatk.utils.commandline.ArgumentCollection;
 import org.broadinstitute.gatk.utils.commandline.Output;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
 import org.broadinstitute.gatk.engine.arguments.StandardVariantContextInputArgumentCollection;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.Reference;
 import org.broadinstitute.gatk.engine.walkers.RodWalker;
 import org.broadinstitute.gatk.engine.walkers.Window;
-import org.broadinstitute.gatk.utils.SampleUtils;
+import org.broadinstitute.gatk.engine.SampleUtils;
 import org.broadinstitute.gatk.utils.collections.Pair;
 import org.broadinstitute.gatk.utils.help.HelpConstants;
 import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
 import htsjdk.variant.vcf.VCFHeader;
 import htsjdk.variant.vcf.VCFHeaderLine;
-import org.broadinstitute.gatk.utils.variant.GATKVCFUtils;
+import org.broadinstitute.gatk.engine.GATKVCFUtils;
 import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
 import org.broadinstitute.gatk.utils.sam.AlignmentUtils;
 import htsjdk.variant.variantcontext.*;
@@ -57,18 +57,19 @@ import htsjdk.variant.variantcontext.writer.VariantContextWriterFactory;
 import java.util.*;
 
 /**
- * Left-aligns indels from a variants file.
+ * Left-align indels in a variant callset
  *
  * <p>
- * LeftAlignAndTrimVariants is a tool that takes a VCF file and left-aligns the indels inside it.  The same indel can often be
- * placed at multiple positions and still represent the same haplotype.  While the standard convention with VCF is to
- * place an indel at the left-most position this doesn't always happen, so this tool can be used to left-align them.
- * Note that this tool cannot handle anything other than bi-allelic, simple indels.  Complex events are written out unchanged.
- * Optionally, the tool will also trim common bases from indels, leaving them with a minimum representation.
+ * LeftAlignAndTrimVariants is a tool that takes a VCF file, left-aligns the indels and trims common bases from indels,
+ * leaving them with a minimum representation. The same indel can often be placed at multiple positions and still
+ * represent the same haplotype. While the standard convention with VCF is to place an indel at the left-most position
+ * this isn't always done, so this tool can be used to left-align them. This tool optionally splits multiallelic
+ * sites into biallelics and left-aligns individual alleles. Optionally, the tool will not trim common bases from indels.
+ * </p>
  *
  * <h3>Input</h3>
  * <p>
- * A variant set to left-align and trim.
+ * A variant call set to left-align and trim.
  * </p>
  *
  * <h3>Output</h3>
@@ -76,28 +77,74 @@ import java.util.*;
  * A left-aligned VCF.
  * </p>
  *
- * <h3>Examples</h3>
+ * <h3>Usage examples</h3>
+ *
+ * <h4>Left align and trim alleles</h4>
  * <pre>
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
+ * java -jar GenomeAnalysisTK.jar \
  *   -T LeftAlignAndTrimVariants \
+ *   -R reference.fasta \
  *   --variant input.vcf \
  *   -o output.vcf
  * </pre>
  *
+ * <h4>Left align and don't trim alleles</h4>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
+ *   -T LeftAlignAndTrimVariants \
+ *   -R reference.fasta \
+ *   --variant input.vcf \
+ *   -o output.vcf \
+ *   --dontTrimAlleles
+ * </pre>
+ *
+ * <h4>Left align and trim alleles, process alleles <= 208 bases</h4>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
+ *   -T LeftAlignAndTrimVariants \
+ *   -R reference.fasta \
+ *   --variant input.vcf \
+ *   -o output.vcf \
+ *   --reference_window_stop 208
+ * </pre>
+ *
+ * <h4>Split multiallics into biallelics, left align and trim alleles</h4>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
+ *   -T LeftAlignAndTrimVariants \
+ *   -R reference.fasta \
+ *   --variant input.vcf \
+ *   -o output.vcf \
+ *   --splitMultiallelics
+ * </pre>
+ *
+ * <h4>Split multiallelics into biallics, left align but don't trim alleles</h4>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
+ *   -T LeftAlignAndTrimVariants \
+ *   -R reference.fasta \
+ *   --variant input.vcf \
+ *   -o output.vcf \
+ *   --splitMultiallelics \
+ *   --dontTrimAlleles
+ * </pre>
+ *
  */
 @DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_VARMANIP, extraDocs = {CommandLineGATK.class} )
 @Reference(window=@Window(start=-200,stop=200))    // WARNING: if this changes,MAX_INDEL_LENGTH needs to change as well!
 public class LeftAlignAndTrimVariants extends RodWalker<Integer, Integer> {
 
+    // Log message for a reference allele that is too long
+    protected static final String REFERENCE_ALLELE_TOO_LONG_MSG = "Reference allele is too long";
+
     @ArgumentCollection
     protected StandardVariantContextInputArgumentCollection variantCollection = new StandardVariantContextInputArgumentCollection();
 
     /**
-     * If this argument is set, bases common to all alleles will be removed, leaving only their minimal representation.
+     * If this argument is set, bases common to all alleles will not be removed and will not leave their minimal representation.
      */
-    @Argument(fullName="trimAlleles", shortName="trim", doc="Trim alleles to remove bases common to all of them", required=false)
-    protected boolean trimAlleles = false;
+    @Argument(fullName="dontTrimAlleles", shortName="notrim", doc="Do not Trim alleles to remove bases common to all of them", required=false)
+    protected boolean dontTrimAlleles = false;
 
     /**
      * If this argument is set, split multiallelic records and left-align individual alleles.
@@ -113,6 +160,10 @@ public class LeftAlignAndTrimVariants extends RodWalker<Integer, Integer> {
     private VariantContextWriter writer;
 
     private static final int MAX_INDEL_LENGTH = 200; // needs to match reference window size!
+
+    // Stop of the expanded window for which the reference context should be provided, relative to the locus.
+    private int referenceWindowStop;
+
     public void initialize() {
         String trackName = variantCollection.variants.getName();
         Set<String> samples = SampleUtils.getSampleListWithVCFHeader(getToolkit(), Arrays.asList(trackName));
@@ -121,7 +172,9 @@ public class LeftAlignAndTrimVariants extends RodWalker<Integer, Integer> {
         Set<VCFHeaderLine> headerLines = vcfHeaders.get(trackName).getMetaDataInSortedOrder();
         baseWriter.writeHeader(new VCFHeader(headerLines, samples));
 
-        writer = VariantContextWriterFactory.sortOnTheFly(baseWriter, 200);
+        writer = VariantContextWriterFactory.sortOnTheFly(baseWriter, MAX_INDEL_LENGTH);
+
+        referenceWindowStop = getToolkit().getArguments().reference_window_stop;
     }
 
     public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
@@ -132,32 +185,16 @@ public class LeftAlignAndTrimVariants extends RodWalker<Integer, Integer> {
 
         int changedSites = 0;
         for ( final VariantContext vc : VCs ) {
-            // split first into biallelics, and optionally trim alleles to minimal representation
-            Pair<VariantContext,Integer> result = new Pair<VariantContext, Integer>(vc,0); // default value
+            // split first into biallelics, and optionally don't trim alleles to minimal representation
             if (splitMultiallelics) {
                 final List<VariantContext> vcList = GATKVariantContextUtils.splitVariantContextToBiallelics(vc);
                 for (final VariantContext biallelicVC: vcList) {
-                    final VariantContext v = (trimAlleles ? GATKVariantContextUtils.trimAlleles(biallelicVC,true,true) : biallelicVC);
-                    result = alignAndWrite(v, ref);
-
-                    // strip out PLs and AD if we've subsetted the alleles
-                    if ( vcList.size() > 1 )
-                        result.first = new VariantContextBuilder(result.first).genotypes(GATKVariantContextUtils.stripPLsAndAD(result.first.getGenotypes())).make();
-
-                    writer.add(result.first);
-                    changedSites += result.second;
+                    changedSites += trimAlignWrite(biallelicVC, ref, vcList.size());
                 }
             }
             else {
-                if (trimAlleles)
-                    result = alignAndWrite(GATKVariantContextUtils.trimAlleles(vc,true,true), ref);
-                else
-                    result = alignAndWrite(vc,ref);
-                writer.add(result.first);
-                changedSites += result.second;
-
+                changedSites += trimAlignWrite(vc, ref, 1);
             }
-
         }
 
         return changedSites;
@@ -175,11 +212,48 @@ public class LeftAlignAndTrimVariants extends RodWalker<Integer, Integer> {
     }
 
     /**
-     * Main routine workhorse. By definitio, it will only take biallelic vc's. Splitting into multiple alleles has to be
+     * Trim, align and write out the vc.
+     *
+     * @param vc                Input VC with variants to left align
+     * @param ref               Reference context
+     * @param numBiallelics     Number of biallelics from the original VC
+     * @return                  Number of records left-aligned (0 or 1)
+     */
+    @Requires("vc != null")
+    protected int trimAlignWrite(final VariantContext vc, final ReferenceContext ref, final int numBiallelics ){
+
+        final int refLength =  vc.getReference().length();
+
+        // ignore if the reference length is greater than the reference window stop before and after expansion
+        if ( refLength > MAX_INDEL_LENGTH && refLength > referenceWindowStop ) {
+            logger.info(String.format("%s (%d) at position %s:%d; skipping that record. Set --referenceWindowStop >= %d",
+                        REFERENCE_ALLELE_TOO_LONG_MSG, refLength, vc.getChr(), vc.getStart(), refLength));
+            return 0;
+        }
+
+        // optionally don't trim VC
+        final VariantContext v = dontTrimAlleles ? vc : GATKVariantContextUtils.trimAlleles(vc, true, true);
+
+        // align the VC
+        final Pair<VariantContext,Integer> result = alignAndWrite(v, ref);
+
+        // strip out PLs and AD if we've subsetted the alleles
+        if ( numBiallelics > 1 )
+            result.first = new VariantContextBuilder(result.first).genotypes(GATKVariantContextUtils.stripPLsAndAD(result.first.getGenotypes())).make();
+
+        // write out new VC
+        writer.add(result.first);
+
+        // number of records left aligned
+        return result.second;
+    }
+
+    /**
+     * Main routine workhorse. By definition, it will only take biallelic vc's. Splitting into multiple alleles has to be
      * handled by calling routine.
      * @param vc                  Input VC with variants to left align
      * @param ref                 Reference context
-     * @return                    # of records left-aligned (0 or 1) and new VC.
+     * @return                    Number of records left-aligned (0 or 1) and new VC.
      */
     @Requires({"vc != null","ref != null", "vc.isBiallelic() == true","ref.getBases().length>=2*MAX_INDEL_LENGTH+1"})
     @Ensures({"result != null","result.first != null", "result.second >=0"})
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/LiftoverVariants.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/LiftoverVariants.java
deleted file mode 100644
index 710aad5..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/LiftoverVariants.java
+++ /dev/null
@@ -1,179 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.tools.walkers.variantutils;
-
-import htsjdk.samtools.liftover.LiftOver;
-import htsjdk.samtools.util.Interval;
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMFileReader;
-import org.broadinstitute.gatk.utils.commandline.Argument;
-import org.broadinstitute.gatk.utils.commandline.ArgumentCollection;
-import org.broadinstitute.gatk.utils.commandline.Output;
-import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.arguments.StandardVariantContextInputArgumentCollection;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.engine.walkers.RodWalker;
-import org.broadinstitute.gatk.utils.SampleUtils;
-import org.broadinstitute.gatk.utils.help.HelpConstants;
-import org.broadinstitute.gatk.utils.variant.GATKVCFUtils;
-import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
-import htsjdk.variant.variantcontext.writer.Options;
-import htsjdk.variant.vcf.*;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
-import htsjdk.variant.variantcontext.VariantContext;
-import htsjdk.variant.variantcontext.VariantContextBuilder;
-import htsjdk.variant.variantcontext.writer.VariantContextWriter;
-import htsjdk.variant.variantcontext.writer.VariantContextWriterFactory;
-
-import java.io.File;
-import java.util.*;
-
-/**
- * Lifts a VCF file over from one build to another.
- *
- * "Lifting over" variants means adjusting variant calls from one reference to another. Specifically, the process adjusts the position of the call to match the corresponding position on the target reference.
- * For example, if you have variants called from reads aligned to the hg19 reference, and you want to compare them to calls made based on the b37 reference, you need to liftover one of the callsets to the other reference.
- *
- * LiftoverVariants is intended to be the first of two processing steps for the liftover process.
- * The second step is to run FilterLiftedVariants on the output of LiftoverVariants. This will produce valid well-behaved VCF files, where you'll see that the contig names in the header have all been correctly replaced.
- *
- * To be clear, the VCF resulting from the LiftoverVariants run is not guaranteed to be valid according to the official specification.  The file could
- * possibly be mis-sorted and the header may not be complete. That is why you need to run FilterLiftedVariants on it.
- */
- at DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_VARMANIP, extraDocs = {CommandLineGATK.class} )
-public class LiftoverVariants extends RodWalker<Integer, Integer> {
-
-    @ArgumentCollection
-    protected StandardVariantContextInputArgumentCollection variantCollection = new StandardVariantContextInputArgumentCollection();
-
-    @Output(doc="File to which variants should be written", required=true, defaultToStdout=false)
-    protected File file = null;
-    protected VariantContextWriter writer = null;
-
-    @Argument(fullName="chain", shortName="chain", doc="Chain file", required=true)
-    protected File CHAIN = null;
-
-    @Argument(fullName="newSequenceDictionary", shortName="dict", doc="Sequence .dict file for the new build", required=true)
-    protected File NEW_SEQ_DICT = null;
-
-    @Argument(fullName="recordOriginalLocation", shortName="recordOriginalLocation", doc="Should we record what the original location was in the INFO field?", required=false)
-    protected Boolean RECORD_ORIGINAL_LOCATION = false;
-
-    private LiftOver liftOver;
-
-    private long successfulIntervals = 0, failedIntervals = 0;
-
-    public void initialize() {
-        try {
-            liftOver = new LiftOver(CHAIN);
-        } catch (RuntimeException e) {
-            throw new UserException.BadInput("there is a problem with the chain file you are using: " + e.getMessage());
-        }
-
-        liftOver.setLiftOverMinMatch(LiftOver.DEFAULT_LIFTOVER_MINMATCH);
-
-        try {
-            final SAMFileHeader toHeader = new SAMFileReader(NEW_SEQ_DICT).getFileHeader();
-            liftOver.validateToSequences(toHeader.getSequenceDictionary());
-        } catch (RuntimeException e) {
-            throw new UserException.BadInput("the chain file you are using is not compatible with the reference you are trying to lift over to; please use the appropriate chain file for the given reference");    
-        }
-
-        String trackName = variantCollection.variants.getName();
-        Set<String> samples = SampleUtils.getSampleListWithVCFHeader(getToolkit(), Arrays.asList(trackName));
-        Map<String, VCFHeader> vcfHeaders = GATKVCFUtils.getVCFHeadersFromRods(getToolkit(), Arrays.asList(trackName));
-
-        Set<VCFHeaderLine> metaData = new HashSet<VCFHeaderLine>();
-        if ( vcfHeaders.containsKey(trackName) )
-            metaData.addAll(vcfHeaders.get(trackName).getMetaDataInSortedOrder());
-        if ( RECORD_ORIGINAL_LOCATION ) {
-            metaData.add(new VCFInfoHeaderLine("OriginalChr", 1, VCFHeaderLineType.String, "Original contig name for the record"));
-            metaData.add(new VCFInfoHeaderLine("OriginalStart", 1, VCFHeaderLineType.Integer, "Original start position for the record"));
-        }
-
-
-        final VCFHeader vcfHeader = new VCFHeader(metaData, samples);
-        writer = VariantContextWriterFactory.create(file, getMasterSequenceDictionary(), EnumSet.of(Options.ALLOW_MISSING_FIELDS_IN_HEADER));
-        writer.writeHeader(vcfHeader);
-    }
-
-    private void convertAndWrite(VariantContext vc, ReferenceContext ref) {
-
-        final Interval fromInterval = new Interval(vc.getChr(), vc.getStart(), vc.getStart(), false, String.format("%s:%d", vc.getChr(), vc.getStart()));
-        final int length = vc.getEnd() - vc.getStart();
-        final Interval toInterval = liftOver.liftOver(fromInterval);
-        VariantContext originalVC = vc;
-
-        if ( toInterval != null ) {
-            // check whether the strand flips, and if so reverse complement everything
-            if ( fromInterval.isPositiveStrand() != toInterval.isPositiveStrand() && vc.isPointEvent() ) {
-                vc = GATKVariantContextUtils.reverseComplement(vc);
-            }
-
-            vc = new VariantContextBuilder(vc).loc(toInterval.getSequence(), toInterval.getStart(), toInterval.getStart() + length).make();
-
-            if ( RECORD_ORIGINAL_LOCATION ) {
-                vc = new VariantContextBuilder(vc)
-                        .attribute("OriginalChr", fromInterval.getSequence())
-                        .attribute("OriginalStart", fromInterval.getStart()).make();
-            }
-
-            if ( originalVC.isSNP() && originalVC.isBiallelic() && GATKVariantContextUtils.getSNPSubstitutionType(originalVC) != GATKVariantContextUtils.getSNPSubstitutionType(vc) ) {
-                logger.warn(String.format("VCF at %s / %d => %s / %d is switching substitution type %s/%s to %s/%s",
-                        originalVC.getChr(), originalVC.getStart(), vc.getChr(), vc.getStart(),
-                        originalVC.getReference(), originalVC.getAlternateAllele(0), vc.getReference(), vc.getAlternateAllele(0)));
-            }
-
-            writer.add(vc);
-            successfulIntervals++;
-        } else {
-            failedIntervals++;
-        }
-    }
-
-    public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
-        if ( tracker == null )
-            return 0;
-
-        Collection<VariantContext> VCs = tracker.getValues(variantCollection.variants, context.getLocation());
-        for ( VariantContext vc : VCs )
-            convertAndWrite(vc, ref);
-
-        return 0;
-    }
-
-    public Integer reduceInit() { return 0; }
-
-    public Integer reduce(Integer value, Integer sum) { return 0; }
-
-    public void onTraversalDone(Integer result) {
-        System.out.println("Converted " + successfulIntervals + " records; failed to convert " + failedIntervals + " records.");
-        writer.close();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/RandomlySplitVariants.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/RandomlySplitVariants.java
index d50b4f2..41962bb 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/RandomlySplitVariants.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/RandomlySplitVariants.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -25,19 +25,19 @@
 
 package org.broadinstitute.gatk.tools.walkers.variantutils;
 
+import org.broadinstitute.gatk.utils.Utils;
 import org.broadinstitute.gatk.utils.commandline.Argument;
 import org.broadinstitute.gatk.utils.commandline.ArgumentCollection;
 import org.broadinstitute.gatk.utils.commandline.Output;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
 import org.broadinstitute.gatk.engine.arguments.StandardVariantContextInputArgumentCollection;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.RodWalker;
-import org.broadinstitute.gatk.utils.SampleUtils;
+import org.broadinstitute.gatk.engine.SampleUtils;
 import org.broadinstitute.gatk.utils.help.HelpConstants;
-import org.broadinstitute.gatk.utils.variant.GATKVCFUtils;
+import org.broadinstitute.gatk.engine.GATKVCFUtils;
 import htsjdk.variant.vcf.VCFHeader;
 import htsjdk.variant.vcf.VCFHeaderLine;
 import org.broadinstitute.gatk.utils.exceptions.UserException;
@@ -50,7 +50,33 @@ import java.io.File;
 import java.util.*;
 
 /**
- * Takes a VCF file, randomly splits variants into two different sets, and outputs 2 new VCFs with the results.
+ * Randomly split variants into different sets
+ *
+ * <p>This tool takes a VCF file, randomly splits variants into different sets, and writes the
+ * results to separate files. By default the tool splits the input into two new sets, but it can be made to output
+ * more than two separate call sets.</p>
+ *
+ * <h3>Input</h3>
+ * <p>
+ * A variant call set to split.
+ * </p>
+ *
+ * <h3>Output</h3>
+ * <p>
+ * The new callsets.
+ * </p>
+ *
+ * <h3>Usage example</h3>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
+ *   -T RandomlySplitVariants \
+ *   -R reference.fasta \
+ *   -V input.vcf \
+ *   -o1 output_1.vcf \
+ *   -o2 output_2.vcf
+ * </pre>
+ *
+ *
  */
 @DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_VARMANIP, extraDocs = {CommandLineGATK.class} )
 public class RandomlySplitVariants extends RodWalker<Integer, Integer> {
@@ -134,7 +160,7 @@ public class RandomlySplitVariants extends RodWalker<Integer, Integer> {
 
         final Collection<VariantContext> vcs = tracker.getValues(variantCollection.variants, context.getLocation());
         for ( final VariantContext vc : vcs ) {
-            final double random = GenomeAnalysisEngine.getRandomGenerator().nextDouble();
+            final double random = Utils.getRandomGenerator().nextDouble();
             if(splitToMany){
                 final int index = (int)(numOfFiles * random);
                 writers[index].add(vc);
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/SelectHeaders.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/SelectHeaders.java
index c9842c8..e95cbf3 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/SelectHeaders.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/SelectHeaders.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -30,16 +30,16 @@ import htsjdk.tribble.Feature;
 import org.broadinstitute.gatk.utils.commandline.*;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
 import org.broadinstitute.gatk.engine.arguments.StandardVariantContextInputArgumentCollection;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.RodWalker;
 import org.broadinstitute.gatk.engine.walkers.TreeReducible;
-import org.broadinstitute.gatk.utils.SampleUtils;
+import org.broadinstitute.gatk.engine.SampleUtils;
 import org.broadinstitute.gatk.utils.help.HelpConstants;
 import org.broadinstitute.gatk.utils.interval.IntervalMergingRule;
 import org.broadinstitute.gatk.utils.interval.IntervalSetRule;
-import org.broadinstitute.gatk.utils.variant.GATKVCFUtils;
+import org.broadinstitute.gatk.engine.GATKVCFUtils;
 import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
 import htsjdk.variant.vcf.*;
 import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
@@ -51,8 +51,8 @@ import java.io.File;
 import java.util.*;
 
 /**
- * Selects headers from a VCF source.
- * <p/>
+ * Selects headers from a VCF source
+ *
  * <p>
  * Often, a VCF containing many headers will need to be subset in order to facilitate certain formatting guidelines.
  * SelectHeaders can be used for this purpose. Given a single VCF file, one or more headers can be extracted from the
@@ -65,44 +65,49 @@ import java.util.*;
  * <p/>
  * <h3>Output</h3>
  * <p>
- * A header selected VCF.
+ * A VCF with the selected headers.
  * </p>
- * <p/>
- * <h3>Examples</h3>
+ *
+ * <h3>Usage examples</h3>
+ * <h4>Select only the FILTER, FORMAT, and INFO headers</h4>
  * <pre>
- * Select only the FILTER, FORMAT, and INFO headers:
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
+ * java -jar GenomeAnalysisTK.jar \
  *   -T SelectHeaders \
- *   --variant input.vcf \
+ *   -R reference.fasta \
+ *   -V input.vcf \
  *   -o output.vcf \
  *   -hn FILTER \
  *   -hn FORMAT \
  *   -hn INFO
+ * </pre>
  *
- * Select only the FILTER, FORMAT, and INFO headers and add in the reference file names:
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
+ * <h4>Select only the FILTER, FORMAT, and INFO headers and add in the reference file names</h4>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
  *   -T SelectHeaders \
- *   --variant input.vcf \
+ *   -R reference.fasta \
+ *   -V input.vcf \
  *   -o output.vcf \
  *   -hn FILTER \
  *   -hn FORMAT \
  *   -hn INFO \
  *   -irn \
  *   -iln
+ * </pre>
  *
- * Select only the FILTER, FORMAT, and INFO headers, plus any headers with SnpEff:
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
+ * <h4>Select only the FILTER, FORMAT, and INFO headers, plus any headers with "SnpEff"</h4>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
  *   -T SelectHeaders \
- *   --variant input.vcf \
+ *   -R reference.fasta \
+ *   -V input.vcf \
  *   -o output.vcf \
  *   -hn FILTER \
  *   -hn FORMAT \
  *   -hn INFO \
  *   -he '.*SnpEff.*'
  * </pre>
+ *
  */
 @SuppressWarnings("unused")
 @DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_VARMANIP, extraDocs = {CommandLineGATK.class} )
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/SelectVariants.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/SelectVariants.java
index db9d082..abd53f9 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/SelectVariants.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/SelectVariants.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -27,19 +27,20 @@ package org.broadinstitute.gatk.tools.walkers.variantutils;
 
 import org.broadinstitute.gatk.utils.commandline.*;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
 import org.broadinstitute.gatk.engine.arguments.StandardVariantContextInputArgumentCollection;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.RodWalker;
 import org.broadinstitute.gatk.engine.walkers.TreeReducible;
-import org.broadinstitute.gatk.tools.walkers.annotator.ChromosomeCountConstants;
-import org.broadinstitute.gatk.utils.MendelianViolation;
-import org.broadinstitute.gatk.utils.SampleUtils;
+import org.broadinstitute.gatk.utils.variant.ChromosomeCountConstants;
+import org.broadinstitute.gatk.engine.samples.MendelianViolation;
+import org.broadinstitute.gatk.engine.SampleUtils;
 import org.broadinstitute.gatk.utils.Utils;
 import org.broadinstitute.gatk.utils.help.HelpConstants;
-import org.broadinstitute.gatk.utils.variant.GATKVCFUtils;
+import org.broadinstitute.gatk.engine.GATKVCFUtils;
+import org.broadinstitute.gatk.utils.variant.GATKVCFConstants;
+import org.broadinstitute.gatk.utils.variant.GATKVCFHeaderLines;
 import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
 import htsjdk.variant.vcf.*;
 import org.broadinstitute.gatk.utils.exceptions.UserException;
@@ -53,134 +54,265 @@ import java.io.FileNotFoundException;
 import java.util.*;
 
 /**
- * Selects variants from a VCF source.
+ * Select a subset of variants from a larger callset
  *
  * <p>
  * Often, a VCF containing many samples and/or variants will need to be subset in order to facilitate certain analyses
  * (e.g. comparing and contrasting cases vs. controls; extracting variant or non-variant loci that meet certain
  * requirements, displaying just a few samples in a browser like IGV, etc.). SelectVariants can be used for this purpose.
- * Given a single VCF file, one or more samples can be extracted from the file (based on a complete sample name or a
- * pattern match).  Variants can be further selected by specifying criteria for inclusion, i.e. "DP > 1000" (depth of
- * coverage greater than 1000x), "AF < 0.25" (sites with allele frequency less than 0.25).  These JEXL expressions are
- * documented in the Using JEXL expressions section (http://www.broadinstitute.org/gatk/guide/article?id=1255).
- * One can optionally include concordance or discordance tracks for use in selecting overlapping variants.
- *
+ * </p>
+ * <p>
+ * There are many different options for selecting subsets of variants from a larger callset:
+ * <ul>
+ *     <li>Extract one or more samples from a callset based on either a complete sample name or a pattern match.</li>  
+ *     <li>Specify criteria for inclusion that place thresholds on annotation values, e.g. "DP > 1000" (depth of
+ * coverage greater than 1000x), "AF < 0.25" (sites with allele frequency less than 0.25). These criteria are written 
+ * as "JEXL expressions", which are documented in the 
+ * <a href="http://www.broadinstitute.org/gatk/guide/article?id=1255">article about using JEXL expressions</a>.</li>
+ *     <li>Provide concordance or discordance tracks in order to include or exclude variants that are 
+ * also present in other given callsets.</li>
+ *     <li>Select variants based on criteria like their type 
+ * (e.g. INDELs only), evidence of mendelian violation, filtering status, allelicity, and so on.</li>
+ * </ul>
+ * </p>
+ * 
+ * <p>There are also several options for recording the original values of certain annotations that are recalculated 
+ * when a subsetting the new callset, trimming alleles, and so on.</p>
+ * 
  * <h3>Input</h3>
  * <p>
- * A variant set to select from.
+ * A variant call set from which to select a subset.
  * </p>
  *
  * <h3>Output</h3>
  * <p>
- * A selected VCF.
+ * A new VCF file containing the selected subset of variants.
  * </p>
  *
- * <h3>Examples</h3>
+ * <h3>Usage examples</h3>
+ * <h4>Select two samples out of a VCF with many samples</h4>
  * <pre>
- * Select two samples out of a VCF with many samples:
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
+ * java -jar GenomeAnalysisTK.jar \
  *   -T SelectVariants \
- *   --variant input.vcf \
+ *   -R reference.fasta \
+ *   -V input.vcf \
  *   -o output.vcf \
  *   -sn SAMPLE_A_PARC \
  *   -sn SAMPLE_B_ACTG
+ * </pre>
  *
- * Select two samples and any sample that matches a regular expression:
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
+ * <h4>Select two samples and any sample that matches a regular expression</h4>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
  *   -T SelectVariants \
- *   --variant input.vcf \
+ *   -R reference.fasta \
+ *   -V input.vcf \
  *   -o output.vcf \
  *   -sn SAMPLE_1_PARC \
  *   -sn SAMPLE_1_ACTG \
  *   -se 'SAMPLE.+PARC'
+ * </pre>
  *
- * Select any sample that matches a regular expression and sites where the QD annotation is more than 10:
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
+ * <h4>Exclude two samples and any sample that matches a regular expression:</h4>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
  *   -R ref.fasta \
  *   -T SelectVariants \
  *   --variant input.vcf \
  *   -o output.vcf \
- *   -se 'SAMPLE.+PARC'
- *   -select "QD > 10.0"
+ *   -xl_sn SAMPLE_1_PARC \
+ *   -xl_sn SAMPLE_1_ACTG \
+ *   -xl_se 'SAMPLE.+PARC'
+ * </pre>
  *
- * Select a sample and exclude non-variant loci and filtered loci:
+ * <h4>Select any sample that matches a regular expression and sites where the QD annotation is more than 10:</h4>
+ * <pre>
  * java -Xmx2g -jar GenomeAnalysisTK.jar \
  *   -R ref.fasta \
  *   -T SelectVariants \
+ *   -R reference.fasta \
+ *   -V input.vcf \
+ *   -o output.vcf \
+ *   -se 'SAMPLE.+PARC' \
+ *   -select "QD > 10.0"
+ * </pre>
+ *
+ * <h4>Select any sample that does not match a regular expression and sites where the QD annotation is more than 10:</h4>
+ * <pre>
+ * java  -jar GenomeAnalysisTK.jar \
+ *   -R ref.fasta \
+ *   -T SelectVariants \
  *   --variant input.vcf \
  *   -o output.vcf \
+ *   -se 'SAMPLE.+PARC' \
+ *   -select "QD > 10.0"
+ *   -invertSelect
+ * </pre>
+ *
+ * <h4>Select a sample and exclude non-variant loci and filtered loci (trim remaining alleles by default):</h4>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
+ *   -R ref.fasta \
+ *   -T SelectVariants \
+ *   -R reference.fasta \
+ *   -V input.vcf \
+ *   -o output.vcf \
  *   -sn SAMPLE_1_ACTG \
  *   -env \
  *   -ef
+ * </pre>
  *
- * Select a sample and restrict the output vcf to a set of intervals:
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
+ * <h4>Select a sample, subset remaining alleles, but don't trim:</h4>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
  *   -T SelectVariants \
- *   --variant input.vcf \
+ *   -R reference.fasta \
+ *   -V input.vcf \
+ *   -o output.vcf \
+ *   -sn SAMPLE_1_ACTG \
+ *   -env \
+ *   -noTrim
+ *</pre>
+ *
+ * <h4>Select a sample and restrict the output vcf to a set of intervals:</h4>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
+ *   -T SelectVariants \
+ *   -R reference.fasta \
+ *   -V input.vcf \
  *   -o output.vcf \
  *   -L /path/to/my.interval_list \
  *   -sn SAMPLE_1_ACTG
+ * </pre>
  *
- * Select all calls missed in my vcf, but present in HapMap (useful to take a look at why these variants weren't called by this dataset):
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
+ * <h4>Select all calls missed in my vcf, but present in HapMap (useful to take a look at why these variants weren't called in my dataset):</h4>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
  *   -T SelectVariants \
- *   --variant hapmap.vcf \
- *   --discordance myCalls.vcf
+ *   -R reference.fasta \
+ *   -V hapmap.vcf \
+ *   --discordance myCalls.vcf \
  *   -o output.vcf \
  *   -sn mySample
+ * </pre>
  *
- * Select all calls made by both myCalls and hisCalls (useful to take a look at what is consistent between the two callers):
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
+ * <h4>Select all calls made by both myCalls and theirCalls (useful to take a look at what is consistent between two callers):</h4>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
  *   -T SelectVariants \
- *   --variant myCalls.vcf \
- *   --concordance hisCalls.vcf
+ *   -R reference.fasta \
+ *   -V myCalls.vcf \
+ *   --concordance theirCalls.vcf \
  *   -o output.vcf \
  *   -sn mySample
+ * </pre>
  *
- * Generating a VCF of all the variants that are mendelian violations:
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
+ * <h4>Generating a VCF of all the variants that are mendelian violations. The optional argument '-mvq' restricts the selection to sites that have a QUAL score of 50 or more</h4>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
  *   -T SelectVariants \
- *   --variant input.vcf \
+ *   -R reference.fasta \
+ *   -V input.vcf \
  *   -ped family.ped \
- *   -mvq 50 \
+ *   -mv -mvq 50 \
  *   -o violations.vcf
+ * </pre>
  *
- * Creating a set with 50% of the total number of variants in the variant VCF:
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
+ * <h4>Generating a VCF of all the variants that are not mendelian violations. The optional argument '-mvq' together with '-invMv' restricts the selection to sites that have a QUAL score of 50 or less</h4>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
  *   -T SelectVariants \
- *   --variant input.vcf \
+ *   -R reference.fasta \
+ *   -V input.vcf \
+ *   -ped family.ped \
+ *   -mv -mvq 50 -invMv \
+ *   -o violations.vcf
+ * </pre>
+ *
+ * <h4>Create a set with 50% of the total number of variants in the variant VCF:</h4>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
+ *   -T SelectVariants \
+ *   -R reference.fasta \
+ *   -V input.vcf \
  *   -o output.vcf \
  *   -fraction 0.5
+ * </pre>
  *
- * Select only indels from a VCF:
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
+ * <h4>Select only indels between 2 and 5 bases long from a VCF:</h4>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
  *   -R ref.fasta \
  *   -T SelectVariants \
- *   --variant input.vcf \
+ *   -R reference.fasta \
+ *   -V input.vcf \
  *   -o output.vcf \
  *   -selectType INDEL
+ *   --minIndelSize 2
+ *   --maxIndelSize 5
+ * </pre>
  *
- * Select only multi-allelic SNPs and MNPs from a VCF (i.e. SNPs with more than one allele listed in the ALT column):
+ * <h4>Exclude indels from a VCF:</h4>
+ * <pre>
  * java -Xmx2g -jar GenomeAnalysisTK.jar \
  *   -R ref.fasta \
  *   -T SelectVariants \
  *   --variant input.vcf \
  *   -o output.vcf \
+ *   --selectTypeToExclude INDEL
+ * </pre>
+ *
+ * <h4>Select only multi-allelic SNPs and MNPs from a VCF (i.e. SNPs with more than one allele listed in the ALT column):</h4>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
+ *   -T SelectVariants \
+ *   -R reference.fasta \
+ *   -V input.vcf \
+ *   -o output.vcf \
  *   -selectType SNP -selectType MNP \
  *   -restrictAllelesTo MULTIALLELIC
+ * </pre>
  *
+ * <h4>Select IDs in fileKeep and exclude IDs in fileExclude:</h4>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
+ *   -R ref.fasta \
+ *   -T SelectVariants \
+ *   --variant input.vcf \
+ *   -o output.vcf \
+ *   -IDs fileKeep \
+ *   -excludeIDs fileExclude
+ * </pre>
+ *
+ * <h4>Select sites where there are between 2 and 5 samples and between 10 and 50 percent of the sample genotypes are filtered:</h4>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
+ *   -R ref.fasta \
+ *   -T SelectVariants \
+ *   --variant input.vcf \
+ *   --maxFilteredGenotypes 5
+ *   --minFilteredGenotypes 2
+ *   --maxFractionFilteredGenotypes 0.60
+ *   --minFractionFilteredGenotypes 0.10
+ * </pre>
+ *
+ *  <h4>Set filtered genotypes to no-call (./.):</h4>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
+ *   -R ref.fasta \
+ *   -T SelectVariants \
+ *   --variant input.vcf \
+ *   --setFilteredGtToNocall
  * </pre>
  *
  */
 @DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_VARMANIP, extraDocs = {CommandLineGATK.class} )
 public class SelectVariants extends RodWalker<Integer, Integer> implements TreeReducible<Integer> {
+    static final int MAX_FILTERED_GENOTYPES_DEFAULT_VALUE  = Integer.MAX_VALUE;
+    static final int MIN_FILTERED_GENOTYPES_DEFAULT_VALUE  = 0;
+    static final double MAX_FRACTION_FILTERED_GENOTYPES_DEFAULT_VALUE = 1.0;
+    static final double MIN_FRACTION_FILTERED_GENOTYPES_DEFAULT_VALUE = 0.0;
+    
     @ArgumentCollection protected StandardVariantContextInputArgumentCollection variantCollection = new StandardVariantContextInputArgumentCollection();
 
     /**
@@ -188,7 +320,7 @@ public class SelectVariants extends RodWalker<Integer, Integer> implements TreeR
      * and either the site isn't present in this track, the sample isn't present in this track,
      * or the sample is called reference in this track.
      */
-    @Input(fullName="discordance", shortName = "disc", doc="Output variants that were not called in this comparison track", required=false)
+    @Input(fullName="discordance", shortName = "disc", doc="Output variants not called in this comparison track", required=false)
     protected RodBinding<VariantContext> discordanceTrack;
 
     /**
@@ -196,112 +328,260 @@ public class SelectVariants extends RodWalker<Integer, Integer> implements TreeR
      * in both the variant and concordance tracks or (2) every sample present in the variant track is present in the
      * concordance track and they have the sample genotype call.
      */
-    @Input(fullName="concordance", shortName = "conc", doc="Output variants that were also called in this comparison track", required=false)
+    @Input(fullName="concordance", shortName = "conc", doc="Output variants also called in this comparison track", required=false)
     protected RodBinding<VariantContext> concordanceTrack;
 
     @Output(doc="File to which variants should be written")
     protected VariantContextWriter vcfWriter = null;
 
-    @Argument(fullName="sample_name", shortName="sn", doc="Include genotypes from this sample. Can be specified multiple times", required=false)
-    public Set<String> sampleNames = new HashSet<String>(0);
+    /**
+     * This argument can be specified multiple times in order to provide multiple sample names.
+     */
+    @Argument(fullName="sample_name", shortName="sn", doc="Include genotypes from this sample", required=false)
+    public Set<String> sampleNames = new HashSet<>(0);
 
-    @Argument(fullName="sample_expressions", shortName="se", doc="Regular expression to select many samples from the ROD tracks provided. Can be specified multiple times", required=false)
-    public Set<String> sampleExpressions ;
+    /**
+     * Using a regular expression allows you to match multiple sample names that have that pattern in common. This
+     * argument can be specified multiple times in order to use multiple different matching patterns.
+     */
+    @Argument(fullName="sample_expressions", shortName="se", doc="Regular expression to select multiple samples", required=false)
+    public Set<String> sampleExpressions;
 
-    @Input(fullName="sample_file", shortName="sf", doc="File containing a list of samples (one per line) to include. Can be specified multiple times", required=false)
+    /**
+     * Sample names should be in a plain text file listing one sample name per line. This argument can be specified multiple times in order to provide
+     * multiple sample list files.
+     */
+    @Input(fullName="sample_file", shortName="sf", doc="File containing a list of samples to include", required=false)
     public Set<File> sampleFiles;
 
     /**
-     * Note that sample exclusion takes precedence over inclusion, so that if a sample is in both lists it will be excluded.
+     * Note that sample exclusion takes precedence over inclusion, so that if a sample is in both lists it will be
+     * excluded. This argument can be specified multiple times in order to provide multiple sample names.
      */
-    @Argument(fullName="exclude_sample_name", shortName="xl_sn", doc="Exclude genotypes from this sample. Can be specified multiple times", required=false)
-    public Set<String> XLsampleNames = new HashSet<String>(0);
+    @Argument(fullName="exclude_sample_name", shortName="xl_sn", doc="Exclude genotypes from this sample", required=false)
+    public Set<String> XLsampleNames = new HashSet<>(0);
 
     /**
-     * Note that sample exclusion takes precedence over inclusion, so that if a sample is in both lists it will be excluded.
+     * Sample names should be in a plain text file listing one sample name per line. Note that sample exclusion takes precedence over inclusion, so that
+     * if a sample is in both lists it will be excluded. This argument can be specified multiple times in order to
+     * provide multiple sample list files.
      */
-    @Input(fullName="exclude_sample_file", shortName="xl_sf", doc="File containing a list of samples (one per line) to exclude. Can be specified multiple times", required=false)
-    public Set<File> XLsampleFiles = new HashSet<File>(0);
+    @Input(fullName="exclude_sample_file", shortName="xl_sf", doc="List of samples to exclude", required=false)
+    public Set<File> XLsampleFiles = new HashSet<>(0);
 
     /**
-     * Note that these expressions are evaluated *after* the specified samples are extracted and the INFO field annotations are updated.
+     * Using a regular expression allows you to match multiple sample names that have that pattern in common. Note that sample exclusion takes precedence
+     * over inclusion, so that if a sample is in both lists it will be excluded. This  argument can be specified multiple times in order to use multiple
+     * different matching patterns.
+     */
+    @Input(fullName="exclude_sample_expressions", shortName="xl_se", doc="List of sample expressions to exclude", required=false)
+    public Set<String> XLsampleExpressions = new HashSet<>(0);
+
+    /**
+     * See example commands above for detailed usage examples. Note that these expressions are evaluated *after* the
+     * specified samples are extracted and the INFO field annotations are updated.
      */
     @Argument(shortName="select", doc="One or more criteria to use when selecting the data", required=false)
-    public ArrayList<String> SELECT_EXPRESSIONS = new ArrayList<String>();
+    public ArrayList<String> selectExpressions = new ArrayList<>();
+
+    /**
+     * Invert the selection criteria for -select.
+     */
+    @Argument(shortName="invertSelect", doc="Invert the selection criteria for -select", required=false)
+    protected boolean invertSelect = false;
 
-    @Argument(fullName="excludeNonVariants", shortName="env", doc="Don't include loci found to be non-variant after the subsetting procedure", required=false)
-    protected boolean EXCLUDE_NON_VARIANTS = false;
+    /*
+     * If this flag is enabled, sites that are found to be non-variant after the subsetting procedure (i.e. where none
+     * of the selected samples display evidence of variation) will be excluded from the output.
+     */
+    @Argument(fullName="excludeNonVariants", shortName="env", doc="Don't include non-variant sites", required=false)
+    protected boolean XLnonVariants = false;
 
-    @Argument(fullName="excludeFiltered", shortName="ef", doc="Don't include filtered loci in the analysis", required=false)
-    protected boolean EXCLUDE_FILTERED = false;
+    /**
+     * If this flag is enabled, sites that have been marked as filtered (i.e. have anything other than `.` or `PASS`
+     * in the FILTER field) will be excluded from the output.
+     */
+    @Argument(fullName="excludeFiltered", shortName="ef", doc="Don't include filtered sites", required=false)
+    protected boolean XLfiltered = false;
 
     /**
-     * When this argument is used, we can choose to include only multiallelic or biallelic sites, depending on how many alleles are listed in the ALT column of a vcf.
+     * The default behavior of this tool is to remove bases common to all remaining alleles after subsetting
+     * operations have been completed, leaving only their minimal representation. If this flag is enabled, the original
+     * alleles will be preserved as recorded in the input VCF.
+     */
+    @Argument(fullName="preserveAlleles", shortName="noTrim", doc="Preserve original alleles, do not trim", required=false)
+    protected boolean preserveAlleles = false;
+
+    /**
+     * When this flag is enabled, all alternate alleles that are not present in the (output) samples will be removed.
+     * Note that this even extends to biallelic SNPs - if the alternate allele is not present in any sample, it will be
+     * removed and the record will contain a '.' in the ALT column. Note also that sites-only VCFs, by definition, do
+     * not include the alternate allele in any genotype calls.
+     */
+    @Argument(fullName="removeUnusedAlternates", shortName="trimAlternates", doc="Remove alternate alleles not present in any genotypes", required=false)
+    protected boolean removeUnusedAlternates = false;
+
+    /**
+     * When this argument is used, we can choose to include only multiallelic or biallelic sites, depending on how many alleles are listed in the ALT column of a VCF.
      * For example, a multiallelic record such as:
-     * 1    100 .   A   AAA,AAAAA
-     * will be excluded if "-restrictAllelesTo BIALLELIC" is included, because there are two alternate alleles, whereas a record such as:
-     * 1    100 .   A  T
-     * will be included in that case, but would be excluded if "-restrictAllelesTo MULTIALLELIC
+     *     1    100 .   A   AAA,AAAAA
+     * will be excluded if `-restrictAllelesTo BIALLELIC` is used, because there are two alternate alleles, whereas a record such as:
+     *     1    100 .   A  T
+     * will be included in that case, but would be excluded if `-restrictAllelesTo MULTIALLELIC` is used.
+     * Valid options are ALL (default), MULTIALLELIC or BIALLELIC.
      */
-    @Argument(fullName="restrictAllelesTo", shortName="restrictAllelesTo", doc="Select only variants of a particular allelicity. Valid options are ALL (default), MULTIALLELIC or BIALLELIC", required=false)
+    @Argument(fullName="restrictAllelesTo", shortName="restrictAllelesTo", doc="Select only variants of a particular allelicity", required=false)
     private  NumberAlleleRestriction alleleRestriction = NumberAlleleRestriction.ALL;
 
-    @Argument(fullName="keepOriginalAC", shortName="keepOriginalAC", doc="Store the original AC, AF, and AN values in the INFO field after selecting (using keys AC_Orig, AF_Orig, and AN_Orig)", required=false)
-    private boolean KEEP_ORIGINAL_CHR_COUNTS = false;
+    /**
+     * When subsetting a callset, this tool recalculates the AC, AF, and AN values corresponding to the contents of the
+     * subset. If this flag is enabled, the original values of those annotations will be stored in new annotations called
+     * AC_Orig, AF_Orig, and AN_Orig.
+     */
+    @Argument(fullName="keepOriginalAC", shortName="keepOriginalAC", doc="Store the original AC, AF, and AN values after subsetting", required=false)
+    private boolean keepOriginalChrCounts = false;
 
     /**
-     * This activates the mendelian violation module that will select all variants that correspond to a mendelian violation following the rules given by the family structure.
+     * When subsetting a callset, this tool recalculates the site-level (INFO field) DP value corresponding to the contents of the
+     * subset. If this flag is enabled, the original value of the DP annotation will be stored in a new annotation called
+     * DP_Orig.
      */
-    @Argument(fullName="mendelianViolation", shortName="mv", doc="output mendelian violation sites only", required=false)
-    private Boolean MENDELIAN_VIOLATIONS = false;
+    @Argument(fullName="keepOriginalDP", shortName="keepOriginalDP", doc="Store the original DP value after subsetting", required=false)
+    private boolean keepOriginalDepth = false;
 
-    @Argument(fullName="mendelianViolationQualThreshold", shortName="mvq", doc="Minimum genotype QUAL score for each trio member required to accept a site as a violation", required=false)
-    protected double MENDELIAN_VIOLATION_QUAL_THRESHOLD = 0;
+    /**
+     * If this flag is enabled, this tool will select only variants that correspond to a mendelian violation as
+     * determined on the basis of family structure. Requires passing a pedigree file using the engine-level
+     * `-ped` argument.
+     */
+    @Argument(fullName="mendelianViolation", shortName="mv", doc="Output mendelian violation sites only", required=false)
+    private Boolean mendelianViolations = false;
+
+    /**
+     * If this flag is enabled, this tool will select only variants that do not correspond to a mendelian violation as
+     * determined on the basis of family structure. Requires passing a pedigree file using the engine-level
+     * `-ped` argument.
+     */
+    @Argument(fullName="invertMendelianViolation", shortName="invMv", doc="Output non-mendelian violation sites only", required=false)
+    private Boolean invertMendelianViolations = false;
+
+    /**
+     * This argument specifies the genotype quality (GQ) threshold that all members of a trio must have in order
+     * for a site to be accepted as a mendelian violation. Note that the `-mv` flag must be set for this argument to have an effect.
+     */
+    @Argument(fullName="mendelianViolationQualThreshold", shortName="mvq", doc="Minimum GQ score for each trio member to accept a site as a violation", required=false)
+    protected double medelianViolationQualThreshold = 0;
 
     /**
-     * This routine is based on probability, so the final result is not guaranteed to carry the exact fraction.  Can be used for large fractions.
+     * The value of this argument should be a number between 0 and 1 specifying the fraction of total variants to be
+     * randomly selected from the input callset. Note that this is done using a probabilistic function, so the final
+     * result is not guaranteed to carry the exact fraction requested. Can be used for large fractions.
      */
-    @Argument(fullName="select_random_fraction", shortName="fraction", doc="Selects a fraction (a number between 0 and 1) of the total variants at random from the variant track", required=false)
+    @Argument(fullName="select_random_fraction", shortName="fraction", doc="Select a fraction of variants at random from the input", required=false)
     protected double fractionRandom = 0;
 
-    @Argument(fullName="remove_fraction_genotypes", shortName="fractionGenotypes", doc="Selects a fraction (a number between 0 and 1) of the total genotypes at random from the variant track and sets them to nocall", required=false)
+    /**
+     * The value of this argument should be a number between 0 and 1 specifying the fraction of total variants to be
+     * randomly selected from the input callset and set to no-call (./). Note that this is done using a probabilistic
+     * function, so the final result is not guaranteed to carry the exact fraction requested. Can be used for large fractions.
+     */
+    @Argument(fullName="remove_fraction_genotypes", shortName="fractionGenotypes", doc="Select a fraction of genotypes at random from the input and sets them to no-call", required=false)
     protected double fractionGenotypes = 0;
 
     /**
-     * This argument select particular kinds of variants out of a list. If left empty, there is no type selection and all variant types are considered for other selection criteria.
-     * When specified one or more times, a particular type of variant is selected.
-     *
+     * This argument selects particular kinds of variants out of a list. If left empty, there is no type selection
+     * and all variant types are considered for other selection criteria. Valid types are INDEL, SNP, MIXED, MNP,
+     * SYMBOLIC, NO_VARIATION. Can be specified multiple times.
      */
-    @Argument(fullName="selectTypeToInclude", shortName="selectType", doc="Select only a certain type of variants from the input file. Valid types are INDEL, SNP, MIXED, MNP, SYMBOLIC, NO_VARIATION. Can be specified multiple times", required=false)
-    private List<VariantContext.Type> TYPES_TO_INCLUDE = new ArrayList<VariantContext.Type>();
+    @Argument(fullName="selectTypeToInclude", shortName="selectType", doc="Select only a certain type of variants from the input file", required=false)
+    private List<VariantContext.Type> typesToInclude = new ArrayList<>();
 
     /**
-     * If provided, we will only include variants whose ID field is present in this list of ids.  The matching
-     * is exact string matching.  The file format is just one ID per line
-     *
+     * This argument excludes particular kinds of variants out of a list. If left empty, there is no type selection
+     * and all variant types are considered for other selection criteria. Valid types are INDEL, SNP, MIXED, MNP,
+     * SYMBOLIC, NO_VARIATION. Can be specified multiple times.
+     */
+    @Argument(fullName="selectTypeToExclude", shortName="xlSelectType", doc="Do not select certain type of variants from the input file", required=false)
+    private List<VariantContext.Type> typesToExclude = new ArrayList<>();
+
+    /**
+     * If a file containing a list of IDs is provided to this argument, the tool will only select variants whose ID
+     * field is present in this list of IDs. The matching is done by exact string matching. The expected file format
+     * is simply plain text with one ID per line.
      */
-    @Argument(fullName="keepIDs", shortName="IDs", doc="Only emit sites whose ID is found in this file (one ID per line)", required=false)
+    @Argument(fullName="keepIDs", shortName="IDs", doc="List of variant IDs to select", required=false)
     private File rsIDFile = null;
 
+    /**
+     * If a file containing a list of IDs is provided to this argument, the tool will not select variants whose ID
+     * field is present in this list of IDs. The matching is done by exact string matching. The expected file format
+     * is simply plain text with one ID per line.
+     */
+    @Argument(fullName="excludeIDs", shortName="xlIDs", doc="List of variant IDs to select", required=false)
+    private File XLrsIDFile = null;
 
     @Hidden
     @Argument(fullName="fullyDecode", doc="If true, the incoming VariantContext will be fully decoded", required=false)
     private boolean fullyDecode = false;
 
     @Hidden
-    @Argument(fullName="forceGenotypesDecode", doc="If true, the incoming VariantContext will have its genotypes forcibly decoded by computing AC across all genotypes.  For efficiency testing only", required=false)
-    private boolean forceGenotypesDecode = false;
-
-    @Hidden
     @Argument(fullName="justRead", doc="If true, we won't actually write the output file.  For efficiency testing only", required=false)
     private boolean justRead = false;
 
-    @Argument(doc="indel size select",required=false,fullName="maxIndelSize")
+    /**
+     * If this argument is provided, indels that are larger than the specified size will be excluded.
+     */
+    @Argument(fullName="maxIndelSize", required=false, doc="Maximum size of indels to include")
     private int maxIndelSize = Integer.MAX_VALUE;
 
-    @Argument(doc="Allow samples other than those in the VCF to be specified on the command line. These samples will be ignored.",required=false,fullName="ALLOW_NONOVERLAPPING_COMMAND_LINE_SAMPLES")
-    private boolean ALLOW_NONOVERLAPPING_COMMAND_LINE_SAMPLES = false;
+    /**
+     * If this argument is provided, indels that are smaller than the specified size will be excluded.
+     */
+    @Argument(fullName="minIndelSize", required=false, doc="Minimum size of indels to include")
+    private int minIndelSize = 0;
+
+    /**
+     * If this argument is provided, select sites where at most a maximum number of samples are filtered at the genotype level.
+     */
+    @Argument(fullName="maxFilteredGenotypes", required=false, doc="Maximum number of samples filtered at the genotype level")
+    private int maxFilteredGenotypes = MAX_FILTERED_GENOTYPES_DEFAULT_VALUE;
+
+    /**
+     * If this argument is provided, select sites where at least a minimum number of samples are filtered at the genotype level.
+     */
+    @Argument(fullName="minFilteredGenotypes", required=false, doc="Minimum number of samples filtered at the genotype level")
+    private int minFilteredGenotypes = MIN_FILTERED_GENOTYPES_DEFAULT_VALUE;
+
+    /**
+     * If this argument is provided, select sites where a fraction or less of the samples are filtered at the genotype level.
+     */
+    @Argument(fullName="maxFractionFilteredGenotypes", required=false, doc="Maximum fraction of samples filtered at the genotype level")
+    private double maxFractionFilteredGenotypes = MAX_FRACTION_FILTERED_GENOTYPES_DEFAULT_VALUE;
+
+    /**
+     * If this argument is provided, select sites where a fraction or more of the samples are filtered at the genotype level.
+     */
+    @Argument(fullName="minFractionFilteredGenotypes", required=false, doc="Maximum fraction of samples filtered at the genotype level")
+    private double minFractionFilteredGenotypes = MIN_FRACTION_FILTERED_GENOTYPES_DEFAULT_VALUE;
+
+    /**
+     * If this argument is provided, set filtered genotypes to no-call (./.).
+     */
+    @Argument(fullName="setFilteredGtToNocall", required=false, doc="Set filtered genotypes to no-call")
+    private boolean setFilteredGenotypesToNocall = false;
 
+    @Hidden
+    @Argument(fullName="ALLOW_NONOVERLAPPING_COMMAND_LINE_SAMPLES", required=false, doc="Allow samples other than those in the VCF to be specified on the command line. These samples will be ignored.")
+    private boolean allowNonOverlappingCommandLineSamples = false;
+
+    /**
+     * If this argument is provided, the output will be compliant with the version in the header, however it will also
+     * cause the tool to run slower than without the argument. Without the argument the header will be compliant with
+     * the up-to-date version, but the output in the body may not be compliant. If an up-to-date input file is used,
+     * then the output will also be up-to-date regardless of this argument.
+     */
+    @Argument(fullName="forceValidOutput", required=false, doc="Forces output VCF to be compliant to up-to-date version")
+    private boolean forceValidOutput = false;
 
     public enum NumberAlleleRestriction {
         ALL,
@@ -309,28 +589,31 @@ public class SelectVariants extends RodWalker<Integer, Integer> implements TreeR
         MULTIALLELIC
     }
 
-    private ArrayList<VariantContext.Type> selectedTypes = new ArrayList<VariantContext.Type>();
-    private ArrayList<String> selectNames = new ArrayList<String>();
+    private ArrayList<VariantContext.Type> selectedTypes = new ArrayList<>();
+    private ArrayList<String> selectNames = new ArrayList<>();
     private List<VariantContextUtils.JexlVCMatchExp> jexls = null;
 
-    private TreeSet<String> samples = new TreeSet<String>();
-    private boolean NO_SAMPLES_SPECIFIED = false;
+    private TreeSet<String> samples = new TreeSet<>();
+    private boolean noSamplesSpecified = false;
 
-    private boolean DISCORDANCE_ONLY = false;
-    private boolean CONCORDANCE_ONLY = false;
+    private boolean discordanceOnly = false;
+    private boolean concordanceOnly = false;
 
     private MendelianViolation mv;
 
 
     /* variables used by the SELECT RANDOM modules */
-    private boolean SELECT_RANDOM_FRACTION = false;
+    private boolean selectRandomFraction = false;
 
     //Random number generator for the genotypes to remove
     private Random randomGenotypes = new Random();
 
     private Set<String> IDsToKeep = null;
+    private Set<String> IDsToRemove = null;
     private Map<String, VCFHeader> vcfRods;
 
+    private final List<Allele> diploidNoCallAlleles = Arrays.asList(Allele.NO_CALL, Allele.NO_CALL);
+
     /**
      * Set up the VCF writer, the sample expressions and regexs, and the JEXL matcher
      */
@@ -339,13 +622,13 @@ public class SelectVariants extends RodWalker<Integer, Integer> implements TreeR
         List<String> rodNames = Arrays.asList(variantCollection.variants.getName());
 
         vcfRods = GATKVCFUtils.getVCFHeadersFromRods(getToolkit(), rodNames);
-        TreeSet<String> vcfSamples = new TreeSet<String>(SampleUtils.getSampleList(vcfRods, GATKVariantContextUtils.GenotypeMergeType.REQUIRE_UNIQUE));
+        TreeSet<String> vcfSamples = new TreeSet<>(SampleUtils.getSampleList(vcfRods, GATKVariantContextUtils.GenotypeMergeType.REQUIRE_UNIQUE));
 
         Collection<String> samplesFromFile = SampleUtils.getSamplesFromFiles(sampleFiles);
         Collection<String> samplesFromExpressions = SampleUtils.matchSamplesExpressions(vcfSamples, sampleExpressions);
 
         // first, check overlap between requested and present samples
-        Set<String> commandLineUniqueSamples = new HashSet<String>(samplesFromFile.size()+samplesFromExpressions.size()+sampleNames.size());
+        Set<String> commandLineUniqueSamples = new HashSet<>(samplesFromFile.size()+samplesFromExpressions.size()+sampleNames.size());
         commandLineUniqueSamples.addAll(samplesFromFile);
         commandLineUniqueSamples.addAll(samplesFromExpressions);
         commandLineUniqueSamples.addAll(sampleNames);
@@ -356,99 +639,124 @@ public class SelectVariants extends RodWalker<Integer, Integer> implements TreeR
         samples.addAll(samplesFromExpressions);
         samples.addAll(samplesFromFile);
 
-        logger.debug(Utils.join(",",commandLineUniqueSamples));
-
-        if ( commandLineUniqueSamples.size() > 0 && ALLOW_NONOVERLAPPING_COMMAND_LINE_SAMPLES ) {
-            logger.warn("Samples present on command line input that are not present in the VCF. These samples will be ignored.");
-            samples.removeAll(commandLineUniqueSamples);
-        } else if (commandLineUniqueSamples.size() > 0 ) {
-            throw new UserException.BadInput(String.format("%s%n%n%s%n%n%s%n%n%s",
-                    "Samples entered on command line (through -sf or -sn) that are not present in the VCF.",
-                    "A list of these samples:",
-                    Utils.join(",",commandLineUniqueSamples),
-                    "To ignore these samples, run with --ALLOW_NONOVERLAPPING_COMMAND_LINE_SAMPLES"));
-        }
-
+        logger.debug(Utils.join(",", commandLineUniqueSamples));
+
+            if (!commandLineUniqueSamples.isEmpty()) {
+                if (allowNonOverlappingCommandLineSamples) {
+                    logger.warn("Samples present on command line input that are not present in the VCF. These samples will be ignored.");
+                    samples.removeAll(commandLineUniqueSamples);
+                } else {
+                    throw new UserException.BadInput(String.format("%s%n%n%s%n%n%s%n%n%s",
+                            "Samples entered on command line (through -sf or -sn) that are not present in the VCF.",
+                            "A list of these samples:",
+                            Utils.join(",", commandLineUniqueSamples),
+                            "To ignore these samples, run with --allowNonOverlappingCommandLineSamples"));
+                }
+            }
 
         // if none were requested, we want all of them
         if ( samples.isEmpty() ) {
             samples.addAll(vcfSamples);
-            NO_SAMPLES_SPECIFIED = true;
+            noSamplesSpecified = true;
         }
 
         // now, exclude any requested samples
         final Collection<String> XLsamplesFromFile = SampleUtils.getSamplesFromFiles(XLsampleFiles);
+        final Collection<String> XLsamplesFromExpressions = SampleUtils.matchSamplesExpressions(vcfSamples, XLsampleExpressions);
         samples.removeAll(XLsamplesFromFile);
         samples.removeAll(XLsampleNames);
-        NO_SAMPLES_SPECIFIED = NO_SAMPLES_SPECIFIED && XLsampleNames.isEmpty() && XLsamplesFromFile.isEmpty();
+        samples.removeAll(XLsamplesFromExpressions);
+        noSamplesSpecified = noSamplesSpecified && XLsampleNames.isEmpty() && XLsamplesFromFile.isEmpty() &&
+                XLsamplesFromExpressions.isEmpty();
 
-        if ( samples.size() == 0 && !NO_SAMPLES_SPECIFIED )
+        if ( samples.isEmpty() && !noSamplesSpecified )
             throw new UserException("All samples requested to be included were also requested to be excluded.");
 
-        if ( ! NO_SAMPLES_SPECIFIED )
+        if ( ! noSamplesSpecified )
             for ( String sample : samples )
             logger.info("Including sample '" + sample + "'");
 
         // if user specified types to include, add these, otherwise, add all possible variant context types to list of vc types to include
-        if (TYPES_TO_INCLUDE.isEmpty()) {
-
+        if (typesToInclude.isEmpty()) {
             for (VariantContext.Type t : VariantContext.Type.values())
                 selectedTypes.add(t);
-
         }
         else {
-            for (VariantContext.Type t : TYPES_TO_INCLUDE)
+            for (VariantContext.Type t : typesToInclude)
                 selectedTypes.add(t);
-
         }
+
+        // remove specified types
+        for (VariantContext.Type t : typesToExclude)
+            selectedTypes.remove(t);
+
         // Initialize VCF header
         Set<VCFHeaderLine> headerLines = VCFUtils.smartMergeHeaders(vcfRods.values(), true);
         headerLines.add(new VCFHeaderLine("source", "SelectVariants"));
 
-        if (KEEP_ORIGINAL_CHR_COUNTS) {
-            headerLines.add(new VCFInfoHeaderLine("AC_Orig", VCFHeaderLineCount.A, VCFHeaderLineType.Integer, "Original AC"));
-            headerLines.add(new VCFInfoHeaderLine("AF_Orig", VCFHeaderLineCount.A, VCFHeaderLineType.Float, "Original AF"));
-            headerLines.add(new VCFInfoHeaderLine("AN_Orig", 1, VCFHeaderLineType.Integer, "Original AN"));
+        if (keepOriginalChrCounts) {
+            headerLines.add(GATKVCFHeaderLines.getInfoLine(GATKVCFConstants.ORIGINAL_AC_KEY));
+            headerLines.add(GATKVCFHeaderLines.getInfoLine(GATKVCFConstants.ORIGINAL_AF_KEY));
+            headerLines.add(GATKVCFHeaderLines.getInfoLine(GATKVCFConstants.ORIGINAL_AN_KEY));
         }
+        if (keepOriginalDepth)
+            headerLines.add(GATKVCFHeaderLines.getInfoLine(GATKVCFConstants.ORIGINAL_DP_KEY));
         headerLines.addAll(Arrays.asList(ChromosomeCountConstants.descriptions));
         headerLines.add(VCFStandardHeaderLines.getInfoLine(VCFConstants.DEPTH_KEY));
 
-        for (int i = 0; i < SELECT_EXPRESSIONS.size(); i++) {
+        for (int i = 0; i < selectExpressions.size(); i++) {
             // It's not necessary that the user supply select names for the JEXL expressions, since those
             // expressions will only be needed for omitting records.  Make up the select names here.
             selectNames.add(String.format("select-%d", i));
         }
 
-        jexls = VariantContextUtils.initializeMatchExps(selectNames, SELECT_EXPRESSIONS);
+        jexls = VariantContextUtils.initializeMatchExps(selectNames, selectExpressions);
 
         // Look at the parameters to decide which analysis to perform
-        DISCORDANCE_ONLY = discordanceTrack.isBound();
-        if (DISCORDANCE_ONLY) logger.info("Selecting only variants discordant with the track: " + discordanceTrack.getName());
+        discordanceOnly = discordanceTrack.isBound();
+        if (discordanceOnly) logger.info("Selecting only variants discordant with the track: " + discordanceTrack.getName());
 
-        CONCORDANCE_ONLY = concordanceTrack.isBound();
-        if (CONCORDANCE_ONLY) logger.info("Selecting only variants concordant with the track: " + concordanceTrack.getName());
+        concordanceOnly = concordanceTrack.isBound();
+        if (concordanceOnly) logger.info("Selecting only variants concordant with the track: " + concordanceTrack.getName());
 
-        if (MENDELIAN_VIOLATIONS) {
-            mv = new MendelianViolation(MENDELIAN_VIOLATION_QUAL_THRESHOLD,false,true);
+        if (mendelianViolations) {
+            mv = new MendelianViolation(medelianViolationQualThreshold,false,true);
         }
 
-        SELECT_RANDOM_FRACTION = fractionRandom > 0;
-        if (SELECT_RANDOM_FRACTION) logger.info("Selecting approximately " + 100.0*fractionRandom + "% of the variants at random from the variant track");
+        selectRandomFraction = fractionRandom > 0;
+        if (selectRandomFraction) logger.info("Selecting approximately " + 100.0*fractionRandom + "% of the variants at random from the variant track");
+
+        // Get variant IDs to keep and removed
+        IDsToKeep = getIDsFromFile(rsIDFile);
+
+        IDsToRemove = getIDsFromFile(XLrsIDFile);
+
+        vcfWriter.writeHeader(new VCFHeader(headerLines, samples));
+    }
 
+    /**
+     * Get IDs from a file
+     *
+     * @param file file containing the IDs
+     * @return set of IDs or null if the file is null
+     * @throws UserException.CouldNotReadInputFile if could not read the file
+     */
+    private Set<String> getIDsFromFile(final File file){
         /** load in the IDs file to a hashset for matching */
-        if ( rsIDFile != null ) {
-            IDsToKeep = new HashSet<String>();
+        if ( file != null ) {
+            Set<String> ids = new HashSet<>();
             try {
-                for ( final String line : new XReadLines(rsIDFile).readLines() ) {
-                    IDsToKeep.add(line.trim());
+                for ( final java.lang.String line : new XReadLines(file).readLines() ) {
+                    ids.add(line.trim());
                 }
-                logger.info("Selecting only variants with one of " + IDsToKeep.size() + " IDs from " + rsIDFile);
+                logger.info("Selecting only variants with one of " + ids.size() + " IDs from " + file);
             } catch ( FileNotFoundException e ) {
-                throw new UserException.CouldNotReadInputFile(rsIDFile, e);
+                throw new UserException.CouldNotReadInputFile(file, e);
             }
+            return ids;
         }
 
-        vcfWriter.writeHeader(new VCFHeader(headerLines, samples));
+        return null;
     }
 
     /**
@@ -466,33 +774,30 @@ public class SelectVariants extends RodWalker<Integer, Integer> implements TreeR
 
         Collection<VariantContext> vcs = tracker.getValues(variantCollection.variants, context.getLocation());
 
-        if ( vcs == null || vcs.size() == 0) {
+        if ( vcs == null || vcs.isEmpty()) {
             return 0;
         }
 
         for (VariantContext vc : vcs) {
             // an option for performance testing only
-            if ( fullyDecode )
-                vc = vc.fullyDecode(vcfRods.get(vc.getSource()), getToolkit().lenientVCFProcessing() );
+            if (fullyDecode)
+                vc = vc.fullyDecode(vcfRods.get(vc.getSource()), getToolkit().lenientVCFProcessing());
 
-            // an option for performance testing only
-            if ( forceGenotypesDecode ) {
-                final int x = vc.getCalledChrCount();
-                //logger.info("forceGenotypesDecode with getCalledChrCount() = " + );
-            }
+            if (IDsToKeep != null && !IDsToKeep.contains(vc.getID()))
+                continue;
 
-            if ( IDsToKeep != null && ! IDsToKeep.contains(vc.getID()) )
+            if (IDsToRemove != null && IDsToRemove.contains(vc.getID()))
                 continue;
 
-            if (MENDELIAN_VIOLATIONS && mv.countViolations(this.getSampleDB().getFamilies(samples),vc) < 1)
+            if (mendelianViolations && Utils.invertLogic(mv.countViolations(this.getSampleDB().getFamilies(samples), vc) == 0, invertMendelianViolations))
                 break;
 
-            if (DISCORDANCE_ONLY) {
+            if (discordanceOnly) {
                 Collection<VariantContext> compVCs = tracker.getValues(discordanceTrack, context.getLocation());
                 if (!isDiscordant(vc, compVCs))
                     continue;
             }
-            if (CONCORDANCE_ONLY) {
+            if (concordanceOnly) {
                 Collection<VariantContext> compVCs = tracker.getValues(concordanceTrack, context.getLocation());
                 if (!isConcordant(vc, compVCs))
                     continue;
@@ -507,16 +812,30 @@ public class SelectVariants extends RodWalker<Integer, Integer> implements TreeR
             if (!selectedTypes.contains(vc.getType()))
                 continue;
 
-            if ( containsIndelLargerThan(vc, maxIndelSize) )
+            if (containsIndelLargerOrSmallerThan(vc, maxIndelSize, minIndelSize))
                 continue;
 
-            VariantContext sub = subsetRecord(vc, EXCLUDE_NON_VARIANTS);
+            if ( needNumFilteredGenotypes()) {
+                int numFilteredSamples = numFilteredGenotypes(vc);
+                double fractionFilteredGenotypes = samples.isEmpty() ? 0.0 : numFilteredSamples / samples.size();
+                if (numFilteredSamples > maxFilteredGenotypes || numFilteredSamples < minFilteredGenotypes ||
+                        fractionFilteredGenotypes > maxFractionFilteredGenotypes || fractionFilteredGenotypes < minFractionFilteredGenotypes)
+                    continue;
+            }
+
+            VariantContext sub = subsetRecord(vc, preserveAlleles, removeUnusedAlternates);
+
+            VariantContext filteredGenotypeToNocall = setFilteredGenotypeToNocall(sub, setFilteredGenotypesToNocall);
 
-            if ( (!EXCLUDE_NON_VARIANTS || sub.isPolymorphicInSamples()) && (!EXCLUDE_FILTERED || !sub.isFiltered()) ) {
+            // Not excluding non-variants or subsetted polymorphic variants AND including filtered loci or subsetted variant is not filtered
+            if ( (!XLnonVariants || filteredGenotypeToNocall.isPolymorphicInSamples()) && (!XLfiltered || !filteredGenotypeToNocall.isFiltered()) ) {
+
+                // Write the subsetted variant if it matches all of the expressions
                 boolean failedJexlMatch = false;
+
                 try {
                     for (VariantContextUtils.JexlVCMatchExp jexl : jexls) {
-                        if (!VariantContextUtils.match(sub, jexl)) {
+                        if ( Utils.invertLogic(!VariantContextUtils.match(filteredGenotypeToNocall, jexl), invertSelect) ){
                             failedJexlMatch = true;
                             break;
                         }
@@ -528,8 +847,8 @@ public class SelectVariants extends RodWalker<Integer, Integer> implements TreeR
                 }
                 if ( !failedJexlMatch &&
                         !justRead &&
-                        ( !SELECT_RANDOM_FRACTION || GenomeAnalysisEngine.getRandomGenerator().nextDouble() < fractionRandom ) ) {
-                    vcfWriter.add(sub);
+                        ( !selectRandomFraction || Utils.getRandomGenerator().nextDouble() < fractionRandom ) ) {
+                    vcfWriter.add(filteredGenotypeToNocall);
                 }
             }
         }
@@ -538,19 +857,20 @@ public class SelectVariants extends RodWalker<Integer, Integer> implements TreeR
     }
 
     /*
-     * Determines if any of the alternate alleles are greater than the max indel size
+     * Determines if any of the alternate alleles are greater than the max indel size or less than the min indel size
      *
      * @param vc            the variant context to check
      * @param maxIndelSize  the maximum size of allowed indels
-     * @return true if the VC contains an indel larger than maxIndelSize and false otherwise
+     * @param minIndelSize  the minimum size of allowed indels
+     * @return true if the VC contains an indel larger than maxIndelSize or less than the minIndelSize, false otherwise
      */
-    protected static boolean containsIndelLargerThan(final VariantContext vc, final int maxIndelSize) {
+    protected static boolean containsIndelLargerOrSmallerThan(final VariantContext vc, final int maxIndelSize, final int minIndelSize) {
         final List<Integer> lengths = vc.getIndelLengths();
         if ( lengths == null )
             return false;
 
         for ( Integer indelLength : lengths ) {
-            if ( Math.abs(indelLength) > maxIndelSize )
+            if ( Math.abs(indelLength) > maxIndelSize || Math.abs(indelLength) < minIndelSize )
                 return true;
         }
 
@@ -558,17 +878,38 @@ public class SelectVariants extends RodWalker<Integer, Integer> implements TreeR
     }
 
     /**
+     * Find the number of filtered samples
+     *
+     * @param vc the variant rod VariantContext
+     * @return number of filtered samples
+     */
+    private int numFilteredGenotypes(final VariantContext vc){
+        if (vc == null)
+            return 0;
+
+        int numFiltered = 0;
+        // check if we find it in the variant rod
+        GenotypesContext genotypes = vc.getGenotypes(samples);
+        for (final Genotype g : genotypes)
+            if ( g.isFiltered() && !g.getFilters().isEmpty())
+                numFiltered++;
+
+        return numFiltered;
+    }
+
+    /**
      * Checks if vc has a variant call for (at least one of) the samples.
+     *
      * @param vc the variant rod VariantContext. Here, the variant is the dataset you're looking for discordances to (e.g. HapMap)
-     * @param compVCs the comparison VariantContext (discordance
-     * @return true if is discordant
+     * @param compVCs the comparison VariantContext (discordance)
+     * @return true VariantContexts are discordant, false otherwise
      */
-    private boolean isDiscordant (VariantContext vc, Collection<VariantContext> compVCs) {
+    private boolean isDiscordant (final VariantContext vc, final Collection<VariantContext> compVCs) {
         if (vc == null)
             return false;
 
         // if we're not looking at specific samples then the absence of a compVC means discordance
-        if (NO_SAMPLES_SPECIFIED)
+        if (noSamplesSpecified)
             return (compVCs == null || compVCs.isEmpty());
 
         // check if we find it in the variant rod
@@ -594,12 +935,19 @@ public class SelectVariants extends RodWalker<Integer, Integer> implements TreeR
         return false; // we only get here if all samples have a variant in the comp rod.
     }
 
-    private boolean isConcordant (VariantContext vc, Collection<VariantContext> compVCs) {
+    /**
+     * Checks if the two variants have the same genotypes for the selected samples
+     *
+     * @param vc the variant rod VariantContext.
+     * @param compVCs the comparison VariantContext
+     * @return true if VariantContexts are concordant, false otherwise
+     */
+    private boolean isConcordant (final VariantContext vc, final Collection<VariantContext> compVCs) {
         if (vc == null || compVCs == null || compVCs.isEmpty())
             return false;
 
         // if we're not looking for specific samples then the fact that we have both VCs is enough to call it concordant.
-        if (NO_SAMPLES_SPECIFIED)
+        if (noSamplesSpecified)
             return true;
 
         // make a list of all samples contained in this variant VC that are being tracked by the user command line arguments.
@@ -625,8 +973,8 @@ public class SelectVariants extends RodWalker<Integer, Integer> implements TreeR
         return true;
     }
 
-    private boolean sampleHasVariant(Genotype g) {
-        return (g !=null && !g.isHomRef() && (g.isCalled() || (g.isFiltered() && !EXCLUDE_FILTERED)));
+    private boolean sampleHasVariant(final Genotype g) {
+        return (g !=null && !g.isHomRef() && (g.isCalled() || (g.isFiltered() && !XLfiltered)));
     }
 
     private boolean haveSameGenotypes(final Genotype g1, final Genotype g2) {
@@ -635,7 +983,7 @@ public class SelectVariants extends RodWalker<Integer, Integer> implements TreeR
 
         if ((g1.isCalled() && g2.isFiltered()) ||
                 (g2.isCalled() && g1.isFiltered()) ||
-                (g1.isFiltered() && g2.isFiltered() && EXCLUDE_FILTERED))
+                (g1.isFiltered() && g2.isFiltered() && XLfiltered))
             return false;
 
         List<Allele> a1s = g1.getAlleles();
@@ -663,25 +1011,30 @@ public class SelectVariants extends RodWalker<Integer, Integer> implements TreeR
      * Helper method to subset a VC record, modifying some metadata stored in the INFO field (i.e. AN, AC, AF).
      *
      * @param vc       the VariantContext record to subset
-     * @param excludeNonVariants should we exclude sites that have AC=0 for any alternate alleles?
+     * @param preserveAlleles should we trim constant sequence from the beginning and/or end of all alleles, or preserve it?
+     * @param removeUnusedAlternates removes alternate alleles with AC=0
      * @return the subsetted VariantContext
      */
-    private VariantContext subsetRecord(final VariantContext vc, final boolean excludeNonVariants) {
-        if ( NO_SAMPLES_SPECIFIED || samples.isEmpty() )
+    private VariantContext subsetRecord(final VariantContext vc, final boolean preserveAlleles, final boolean removeUnusedAlternates) {
+        //subContextFromSamples() always decodes the vc, which is a fairly expensive operation.  Avoid if possible
+        if ( noSamplesSpecified && !removeUnusedAlternates && !forceValidOutput )
             return vc;
 
-        final VariantContext sub = vc.subContextFromSamples(samples, excludeNonVariants); // strip out the alternate alleles that aren't being used
+        // strip out the alternate alleles that aren't being used
+        final VariantContext sub = vc.subContextFromSamples(samples, removeUnusedAlternates);
+
+        //If no subsetting happened, exit now
+        if ( sub.getNSamples() == vc.getNSamples() && sub.getNAlleles() == vc.getNAlleles() )
+            return vc;
 
         final VariantContextBuilder builder = new VariantContextBuilder(sub);
 
         // if there are fewer alternate alleles now in the selected VC, we need to fix the PL and AD values
-        GenotypesContext newGC = GATKVariantContextUtils.updatePLsAndAD(sub, vc);
+        GenotypesContext newGC = GATKVariantContextUtils.updatePLsSACsAD(sub, vc);
 
-        // if we have fewer samples in the selected VC than in the original VC, we need to strip out the MLE tags
-        if ( vc.getNSamples() != sub.getNSamples() ) {
-            builder.rmAttribute(VCFConstants.MLE_ALLELE_COUNT_KEY);
-            builder.rmAttribute(VCFConstants.MLE_ALLELE_FREQUENCY_KEY);
-        }
+        // since the VC has been subset (either by sample or allele), we need to strip out the MLE tags
+        builder.rmAttribute(GATKVCFConstants.MLE_ALLELE_COUNT_KEY);
+        builder.rmAttribute(GATKVCFConstants.MLE_ALLELE_FREQUENCY_KEY);
 
         // Remove a fraction of the genotypes if needed
         if ( fractionGenotypes > 0 ){
@@ -689,8 +1042,7 @@ public class SelectVariants extends RodWalker<Integer, Integer> implements TreeR
             for ( Genotype genotype : newGC ) {
                 //Set genotype to no call if it falls in the fraction.
                 if(fractionGenotypes>0 && randomGenotypes.nextDouble()<fractionGenotypes){
-                    final List<Allele> alleles = Arrays.asList(Allele.NO_CALL, Allele.NO_CALL);
-                    genotypes.add(new GenotypeBuilder(genotype).alleles(alleles).noGQ().make());
+                    genotypes.add(new GenotypeBuilder(genotype).alleles(diploidNoCallAlleles).noGQ().make());
                 }
                 else{
                     genotypes.add(genotype);
@@ -703,9 +1055,37 @@ public class SelectVariants extends RodWalker<Integer, Integer> implements TreeR
 
         addAnnotations(builder, vc, sub.getSampleNames());
 
-        return builder.make();
+        final VariantContext subset = builder.make();
+
+        final VariantContext trimmed = preserveAlleles? subset : GATKVariantContextUtils.trimAlleles(subset,true,true);
+
+        return trimmed;
     }
 
+    /**
+     * If --setFilteredGtToNocall, set filtered genotypes to no-call
+     *
+     * @param vc the VariantContext record to set filtered genotypes to no-call
+     * @param filteredGenotypesToNocall  set filtered genotypes to non-call?
+     * @return the VariantContext with no-call genotypes if the sample was filtered
+     */
+    private VariantContext setFilteredGenotypeToNocall(final VariantContext vc, final boolean filteredGenotypesToNocall) {
+
+        if ( !filteredGenotypesToNocall )
+            return vc;
+
+        final VariantContextBuilder builder = new VariantContextBuilder(vc);
+        final GenotypesContext genotypes = GenotypesContext.create(vc.getGenotypes().size());
+
+        for ( final Genotype g : vc.getGenotypes() ) {
+            if ( g.isCalled() && g.isFiltered() )
+                genotypes.add(new GenotypeBuilder(g).alleles(diploidNoCallAlleles).make());
+            else
+                genotypes.add(g);
+        }
+
+        return builder.genotypes(genotypes).make();
+    }
     /*
      * Add annotations to the new VC
      *
@@ -716,7 +1096,7 @@ public class SelectVariants extends RodWalker<Integer, Integer> implements TreeR
     private void addAnnotations(final VariantContextBuilder builder, final VariantContext originalVC, final Set<String> selectedSampleNames) {
         if ( fullyDecode ) return; // TODO -- annotations are broken with fully decoded data
 
-        if ( KEEP_ORIGINAL_CHR_COUNTS ) {
+        if ( keepOriginalChrCounts ) {
             final int[] indexOfOriginalAlleleForNewAllele;
             final List<Allele> newAlleles = builder.getAlleles();
             final int numOriginalAlleles = originalVC.getNAlleles();
@@ -743,15 +1123,18 @@ public class SelectVariants extends RodWalker<Integer, Integer> implements TreeR
             }
 
             if ( originalVC.hasAttribute(VCFConstants.ALLELE_COUNT_KEY) )
-                builder.attribute("AC_Orig", getReorderedAttributes(originalVC.getAttribute(VCFConstants.ALLELE_COUNT_KEY), indexOfOriginalAlleleForNewAllele));
+                builder.attribute(GATKVCFConstants.ORIGINAL_AC_KEY, getReorderedAttributes(originalVC.getAttribute(VCFConstants.ALLELE_COUNT_KEY), indexOfOriginalAlleleForNewAllele));
             if ( originalVC.hasAttribute(VCFConstants.ALLELE_FREQUENCY_KEY) )
-                builder.attribute("AF_Orig", getReorderedAttributes(originalVC.getAttribute(VCFConstants.ALLELE_FREQUENCY_KEY), indexOfOriginalAlleleForNewAllele));
+                builder.attribute(GATKVCFConstants.ORIGINAL_AF_KEY, getReorderedAttributes(originalVC.getAttribute(VCFConstants.ALLELE_FREQUENCY_KEY), indexOfOriginalAlleleForNewAllele));
             if ( originalVC.hasAttribute(VCFConstants.ALLELE_NUMBER_KEY) )
-                builder.attribute("AN_Orig", originalVC.getAttribute(VCFConstants.ALLELE_NUMBER_KEY));
+                builder.attribute(GATKVCFConstants.ORIGINAL_AN_KEY, originalVC.getAttribute(VCFConstants.ALLELE_NUMBER_KEY));
         }
 
         VariantContextUtils.calculateChromosomeCounts(builder, false);
 
+        if (keepOriginalDepth && originalVC.hasAttribute(VCFConstants.DEPTH_KEY))
+            builder.attribute(GATKVCFConstants.ORIGINAL_DP_KEY, originalVC.getAttribute(VCFConstants.DEPTH_KEY));
+
         boolean sawDP = false;
         int depth = 0;
         for ( final String sample : selectedSampleNames ) {
@@ -766,7 +1149,7 @@ public class SelectVariants extends RodWalker<Integer, Integer> implements TreeR
         }
 
         if ( sawDP )
-            builder.attribute("DP", depth);
+            builder.attribute(VCFConstants.DEPTH_KEY, depth);
     }
 
     /**
@@ -798,4 +1181,16 @@ public class SelectVariants extends RodWalker<Integer, Integer> implements TreeR
         }
         return result;
     }
-}
+
+    /**
+     * Need the number of filtered genotypes samples?
+     *
+     * @return true if any of the filtered genotype samples arguments is used (not the default value), false otherwise
+     */
+    private boolean needNumFilteredGenotypes(){
+        return maxFilteredGenotypes != MAX_FILTERED_GENOTYPES_DEFAULT_VALUE ||
+                minFilteredGenotypes != MIN_FILTERED_GENOTYPES_DEFAULT_VALUE ||
+                maxFractionFilteredGenotypes != MAX_FRACTION_FILTERED_GENOTYPES_DEFAULT_VALUE ||
+                minFractionFilteredGenotypes != MIN_FRACTION_FILTERED_GENOTYPES_DEFAULT_VALUE;
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/ValidateVariants.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/ValidateVariants.java
index 6b6e6ca..a5b710a 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/ValidateVariants.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/ValidateVariants.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -31,9 +31,9 @@ import org.broadinstitute.gatk.utils.commandline.ArgumentCollection;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
 import org.broadinstitute.gatk.engine.arguments.DbsnpArgumentCollection;
 import org.broadinstitute.gatk.engine.arguments.StandardVariantContextInputArgumentCollection;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.Reference;
 import org.broadinstitute.gatk.engine.walkers.RodWalker;
 import org.broadinstitute.gatk.engine.walkers.Window;
@@ -49,10 +49,10 @@ import java.util.*;
 
 
 /**
- * Validates a VCF file with an extra strict set of criteria.
+ * Validate a VCF file with an extra strict set of criteria
  *
  * <p>
- * ValidateVariants is a GATK tool that takes a VCF file and validates much of the information inside it.
+ * This tool is designed to validate much of the information inside a VCF file.
  * In addition to standard adherence to the VCF specification, this tool performs extra strict validations to ensure
  * the information contained within the file is correct. These include:
  * </p><p>
@@ -80,37 +80,43 @@ import java.util.*;
  * A variant set to validate using <code>-V</code> or <code>--variant</code> as shown below.
  * </p>
  *
- * <h3>Examples</h3>
- *
- * <p>To perform VCF format and all strict validations: </p>
+ * <h3>Usage examples</h3>
  *
+ * <h4>To perform VCF format tests and all strict validations</h4>
  * <pre>
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
+ * java -jar GenomeAnalysisTK.jar \
  *   -T ValidateVariants \
- *   --variant input.vcf \
+ *   -R reference.fasta \
+ *   -V input.vcf \
  *   --dbsnp dbsnp.vcf
  * </pre>
  *
- * <p>To perform only VCF format tests:</p>
- *
+ * <h4>To perform VCF format tests and all strict validations with the VCFs containing alleles <= 208 bases</h4>
  * <pre>
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
+ * java -jar GenomeAnalysisTK.jar \
  *   -T ValidateVariants \
- *   <b>--validationTypeToExclude ALL</b> \
- *   --variant input.vcf
+ *   -R reference.fasta \
+ *   -V input.vcf \
+ *   --dbsnp dbsnp.vcf
+ *   --reference_window_stop 208
  * </pre>
  *
- * <p>To perform all validations except the strict <i>ALLELE</i> validation:</p>
+ * <h4>To perform only VCF format tests</h4>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
+ *   -T ValidateVariants \
+ *   -R reference.fasta \
+ *   -V input.vcf \
+ *   <b>--validationTypeToExclude ALL</b>
+ * </pre>
  *
+ * <h4>To perform all validations except the strict <i>ALLELE</i> validation</h4>
  * <pre>
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
+ * java -jar GenomeAnalysisTK.jar \
  *   -T ValidateVariants \
+ *   -R reference.fasta \
+ *   -V input.vcf \
  *   <b>--validationTypeToExclude ALLELES</b>
- *   --variant input.vcf \
- *   --dbsnp dbsnp.vcf
  * </pre>
  *
  */
@@ -118,6 +124,9 @@ import java.util.*;
 @Reference(window=@Window(start=0,stop=100))
 public class ValidateVariants extends RodWalker<Integer, Integer> {
 
+    // Log message for a reference allele that is too long
+    protected static final String REFERENCE_ALLELE_TOO_LONG_MSG = "Reference allele is too long";
+
     @ArgumentCollection
     protected StandardVariantContextInputArgumentCollection variantCollection = new StandardVariantContextInputArgumentCollection();
 
@@ -185,6 +194,9 @@ public class ValidateVariants extends RodWalker<Integer, Integer> {
 
     private File file = null;
 
+    // Stop of the expanded window for which the reference context should be provided, relative to the locus.
+    private int referenceWindowStop;
+
     /**
      * Contains final set of validation to apply.
      */
@@ -193,6 +205,7 @@ public class ValidateVariants extends RodWalker<Integer, Integer> {
     public void initialize() {
         file = new File(variantCollection.variants.getSource());
         validationTypes = calculateValidationTypesToApply(excludeTypes);
+        referenceWindowStop = getToolkit().getArguments().reference_window_stop;
     }
 
     public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
@@ -224,8 +237,11 @@ public class ValidateVariants extends RodWalker<Integer, Integer> {
         // get the true reference allele
         final Allele reportedRefAllele = vc.getReference();
         final int refLength = reportedRefAllele.length();
-        if ( refLength > 100 ) {
-            logger.info(String.format("Reference allele is too long (%d) at position %s:%d; skipping that record.", refLength, vc.getChr(), vc.getStart()));
+
+        // reference length is greater than the reference window stop before and after expansion
+        if ( refLength > 100 && refLength > referenceWindowStop ) {
+            logger.info(String.format("%s (%d) at position %s:%d; skipping that record. Set --referenceWindowStop >= %d",
+                    REFERENCE_ALLELE_TOO_LONG_MSG, refLength, vc.getChr(), vc.getStart(), refLength));
             return;
         }
 
@@ -263,7 +279,7 @@ public class ValidateVariants extends RodWalker<Integer, Integer> {
      * @return never {@code null} but perhaps an empty set.
      */
     private Collection<ValidationType> calculateValidationTypesToApply(final List<ValidationType> excludeTypes) {
-        if (excludeTypes.size() == 0)
+        if (excludeTypes.isEmpty())
             return Collections.singleton(ValidationType.ALL);
         final Set<ValidationType> excludeTypeSet = new LinkedHashSet<>(excludeTypes);
         if (excludeTypes.size() != excludeTypeSet.size())
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/VariantValidationAssessor.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/VariantValidationAssessor.java
deleted file mode 100644
index 9031bf7..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/VariantValidationAssessor.java
+++ /dev/null
@@ -1,304 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.tools.walkers.variantutils;
-
-import org.broadinstitute.gatk.engine.walkers.Reference;
-import org.broadinstitute.gatk.engine.walkers.RodWalker;
-import org.broadinstitute.gatk.engine.walkers.Window;
-import org.broadinstitute.gatk.utils.commandline.*;
-import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.arguments.StandardVariantContextInputArgumentCollection;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.utils.QualityUtils;
-import org.broadinstitute.gatk.utils.SampleUtils;
-import org.broadinstitute.gatk.utils.help.HelpConstants;
-import org.broadinstitute.gatk.utils.variant.GATKVCFUtils;
-import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
-import htsjdk.variant.vcf.*;
-import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
-import htsjdk.variant.variantcontext.writer.VariantContextWriter;
-import htsjdk.variant.variantcontext.Allele;
-import htsjdk.variant.variantcontext.VariantContext;
-import htsjdk.variant.variantcontext.VariantContextBuilder;
-
-import java.util.*;
-
-/**
- * Annotates a validation (from Sequenom for example) VCF with QC metrics (HW-equilibrium, % failed probes)
- *
- * <p>
- * The Variant Validation Assessor is a tool for vetting/assessing validation data (containing genotypes).
- * The tool produces a VCF that is annotated with information pertaining to plate quality control and by
- * default is soft-filtered by high no-call rate or low Hardy-Weinberg probability.
- * If you have .ped files, please first convert them to VCF format.
- *
- * <h3>Input</h3>
- * <p>
- * A validation VCF to annotate.
- * </p>
- *
- * <h3>Output</h3>
- * <p>
- * An annotated VCF.  Additionally, a table like the following will be output:
- * <pre>
- *     Total number of samples assayed:                  185
- *     Total number of records processed:                152
- *     Number of Hardy-Weinberg violations:              34 (22%)
- *     Number of no-call violations:                     12 (7%)
- *     Number of homozygous variant violations:          0 (0%)
- *     Number of records passing all filters:            106 (69%)
- *     Number of passing records that are polymorphic:   98 (92%)
- * </pre>
- * </p>
- *
- * <h3>Examples</h3>
- * <pre>
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
- *   -T VariantValidationAssessor \
- *   --variant input.vcf \
- *   -o output.vcf
- * </pre>
- *
- */
- at DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_VALIDATION, extraDocs = {CommandLineGATK.class} )
- at Reference(window=@Window(start=0,stop=40))
-public class VariantValidationAssessor extends RodWalker<VariantContext,Integer> {
-
-    @ArgumentCollection
-    protected StandardVariantContextInputArgumentCollection variantCollection = new StandardVariantContextInputArgumentCollection();
-
-    @Output(doc="File to which variants should be written")
-    protected VariantContextWriter vcfwriter = null;
-
-    @Argument(fullName="maxHardy", doc="Maximum phred-scaled Hardy-Weinberg violation pvalue to consider an assay valid", required=false)
-    protected double maxHardy = 20.0;
-
-    /**
-     * To disable, set to a value greater than 1.
-     */
-    @Argument(fullName="maxNoCall", doc="Maximum no-call rate (as a fraction) to consider an assay valid", required=false)
-    protected double maxNoCall = 0.05;
-
-    /**
-     * To disable, set to a value greater than 1.
-     */
-    @Argument(fullName="maxHomVar", doc="Maximum homozygous variant rate (as a fraction) to consider an assay valid", required=false)
-    protected double maxHomNonref = 1.1;
-
-    //@Argument(fullName="populationFile", shortName="populations", doc="A tab-delimited file relating individuals to populations,"+
-    //          "used for smart Hardy-Weinberg annotation",required = false)
-    //private File popFile = null;
-
-    // sample names
-    private TreeSet<String> sampleNames = null;
-
-    // variant context records
-    private ArrayList<VariantContext> records = new ArrayList<VariantContext>();
-
-    // statistics
-    private int numRecords = 0;
-    private int numHWViolations = 0;
-    private int numNoCallViolations = 0;
-    private int numHomVarViolations = 0;
-    private int numTrueVariants = 0;
-
-    //private HashMap<String,String> samplesToPopulation;
-
-    public void initialize() {
-        //if ( popFile != null ) {
-        //    samplesToPopulation = parsePopulationFile(popFile);
-        //}
-    }
-
-    public Integer reduceInit() {
-        return 0;
-    }
-
-    public VariantContext map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
-        if ( tracker == null )
-            return null;
-
-        VariantContext vc = tracker.getFirstValue(variantCollection.variants, ref.getLocus());
-        // ignore places where we don't have a variant
-        if ( vc == null )
-            return null;
-
-        if ( sampleNames == null )
-            sampleNames = new TreeSet<String>(vc.getSampleNames());        
-
-        return addVariantInformationToCall(vc);
-    }
-
-    public Integer reduce(VariantContext call, Integer numVariants) {
-        if ( call != null ) {
-            numVariants++;
-            records.add(call);
-        }
-        return numVariants;                        
-    }
-
-    public void onTraversalDone(Integer finalReduce) {
-        final List<String> inputNames = Arrays.asList(variantCollection.variants.getName());
-
-        // setup the header fields
-        Set<VCFHeaderLine> hInfo = new HashSet<VCFHeaderLine>();
-        hInfo.addAll(GATKVCFUtils.getHeaderFields(getToolkit(), inputNames));
-
-        // set up the info and filter headers
-        hInfo.add(new VCFInfoHeaderLine("NoCallPct", 1, VCFHeaderLineType.Float, "Percent of no-calls"));
-        hInfo.add(new VCFInfoHeaderLine("HomRefPct", 1, VCFHeaderLineType.Float, "Percent of homozygous reference genotypes"));
-        hInfo.add(new VCFInfoHeaderLine("HetPct", 1, VCFHeaderLineType.Float, "Percent of heterozygous genotypes"));
-        hInfo.add(new VCFInfoHeaderLine("HomVarPct", 1, VCFHeaderLineType.Float, "Percent homozygous variant genotypes"));
-        hInfo.add(new VCFInfoHeaderLine("HW", 1, VCFHeaderLineType.Float, "Phred-scaled Hardy-Weinberg violation p-value"));
-        hInfo.add(VCFStandardHeaderLines.getInfoLine(VCFConstants.ALLELE_COUNT_KEY));
-        hInfo.add(VCFStandardHeaderLines.getInfoLine(VCFConstants.ALLELE_NUMBER_KEY));
-        hInfo.add(new VCFFilterHeaderLine("HardyWeinbergViolation", "The validation is in Hardy-Weinberg violation"));
-        hInfo.add(new VCFFilterHeaderLine("HighNoCallRate", "The validation no-call rate is too high"));
-        hInfo.add(new VCFFilterHeaderLine("TooManyHomVars", "The validation homozygous variant rate is too high"));
-
-        // print out (and add to headers) the validation metrics
-        System.out.println(String.format("Total number of samples assayed:\t\t\t%d", sampleNames.size()));
-        hInfo.add(new VCFHeaderLine("ValidationMetrics_SamplesAssayed", String.format("%d", sampleNames.size())));
-        System.out.println(String.format("Total number of records processed:\t\t\t%d", numRecords));
-        hInfo.add(new VCFHeaderLine("ValidationMetrics_RecordsProcessed", String.format("%d", numRecords)));
-        if ( numRecords > 0 ) {
-            System.out.println(String.format("Number of Hardy-Weinberg violations:\t\t\t%d (%d%%)", numHWViolations, 100*numHWViolations/numRecords));
-            hInfo.add(new VCFHeaderLine("ValidationMetrics_HardyWeinbergViolations", String.format("\"%d (%d%%)\"", numHWViolations, 100*numHWViolations/numRecords)));
-            System.out.println(String.format("Number of no-call violations:\t\t\t\t%d (%d%%)", numNoCallViolations, 100*numNoCallViolations/numRecords));
-            hInfo.add(new VCFHeaderLine("ValidationMetrics_NoCallViolations", String.format("\"%d (%d%%)\"", numNoCallViolations, 100*numNoCallViolations/numRecords)));
-            System.out.println(String.format("Number of homozygous variant violations:\t\t%d (%d%%)", numHomVarViolations, 100*numHomVarViolations/numRecords));
-            hInfo.add(new VCFHeaderLine("ValidationMetrics_HomVarViolations", String.format("\"%d (%d%%)\"", numHomVarViolations, 100*numHomVarViolations/numRecords)));
-            int goodRecords = numRecords - numHWViolations - numNoCallViolations - numHomVarViolations;
-            System.out.println(String.format("Number of records passing all filters:\t\t\t%d (%d%%)", goodRecords, 100*goodRecords/numRecords));
-            hInfo.add(new VCFHeaderLine("ValidationMetrics_RecordsPassingFilters", String.format("\"%d (%d%%)\"", goodRecords, 100*goodRecords/numRecords)));
-            if ( goodRecords > 0 ) {
-                System.out.println(String.format("Number of passing records that are polymorphic:\t\t%d (%d%%)", numTrueVariants, 100*numTrueVariants/goodRecords));
-                hInfo.add(new VCFHeaderLine("ValidationMetrics_PolymorphicPassingRecords", String.format("\"%d (%d%%)\"", numTrueVariants, 100*numTrueVariants/goodRecords)));
-            }
-        }
-        
-        vcfwriter.writeHeader(new VCFHeader(hInfo, SampleUtils.getUniqueSamplesFromRods(getToolkit(), inputNames)));
-
-        for ( VariantContext record : records )
-            vcfwriter.add(record);
-    }
-
-
-    private VariantContext addVariantInformationToCall(VariantContext vContext) {
-
-        // check possible filters
-        double hwPvalue = hardyWeinbergCalculation(vContext);
-        double hwScore = Math.abs(QualityUtils.phredScaleErrorRate(hwPvalue));
-        double noCallProp = (double)vContext.getNoCallCount() / (double)vContext.getNSamples();
-        double homRefProp = (double)vContext.getHomRefCount() / (double)vContext.getNSamples();
-        double hetProp = (double)vContext.getHetCount() / (double)vContext.getNSamples();
-        double homVarProp = (double)vContext.getHomVarCount() / (double)vContext.getNSamples();
-
-        boolean isViolation = false;
-        Set<String> filters = new HashSet<String>();
-        if ( noCallProp > maxNoCall ) {
-            filters.add("HighNoCallRate");
-            numNoCallViolations++;
-            isViolation = true;
-        } else if ( hwScore > maxHardy ) {
-            filters.add("HardyWeinbergViolation");
-            numHWViolations++;
-            isViolation = true;
-        } else if ( homVarProp > maxHomNonref) {
-            filters.add("TooManyHomVars");
-            numHomVarViolations++;
-            isViolation = true;
-        }
-
-        VariantContextBuilder builder = new VariantContextBuilder(vContext).filters(filters);
-        numRecords++;
-
-        // add the info fields
-        builder.attribute("NoCallPct", String.format("%.1f", 100.0 * noCallProp));
-        builder.attribute("HomRefPct", String.format("%.1f", 100.0 * homRefProp));
-        builder.attribute("HomVarPct", String.format("%.1f", 100.0 * homVarProp));
-        builder.attribute("HetPct", String.format("%.1f", 100.0 * hetProp));
-        builder.attribute("HW", String.format("%.2f", hwScore));
-        Collection<Allele> altAlleles = vContext.getAlternateAlleles();
-        int altAlleleCount = altAlleles.size() == 0 ? 0 : vContext.getCalledChrCount(altAlleles.iterator().next());
-        if ( !isViolation && altAlleleCount > 0 )
-            numTrueVariants++;
-        builder.attribute(VCFConstants.ALLELE_COUNT_KEY, String.format("%d", altAlleleCount));
-        builder.attribute(VCFConstants.ALLELE_NUMBER_KEY, String.format("%d", vContext.getCalledChrCount()));
-
-        return builder.make();
-    }
-
-    private double hardyWeinbergCalculation(VariantContext vc) {
-        //if ( popFile != null ) {
-        //    throw new GATKException("We still need to implement this!");
-        //} else {
-        return GATKVariantContextUtils.computeHardyWeinbergPvalue(vc);
-        //}
-    }
-
-    // TODO -- REWRITE THIS TO WORK WITH VARIANT CONTEXT
-    /******
-
-    private String smartHardy(ReferenceContext ref, VCFRecord rec) {
-        HashMap<String,ArrayList<Genotype>> genotypesByPopulation = new HashMap<String,ArrayList<Genotype>>(10);
-        HashMap<String,String> hardyWeinbergByPopulation = new HashMap<String,String>(10);
-
-        for ( String population : samplesToPopulation.values() ) {
-            genotypesByPopulation.put(population,new ArrayList<Genotype>());
-        }
-
-        //for ( String name : sampleNames ) {
-        //    String pop = samplesToPopulation.get(name);
-        //    if ( rec.getGenotype(name) != null ) {
-        //        genotypesByPopulation.get(pop).add(rec.getGenotype(name));
-        //    }
-        //}
-
-        for ( String population : samplesToPopulation.values() ) {
-            VCFVariationCall v = new VCFVariationCall(ref.getBase(),ref.getLocus(),VCFVariationCall.VARIANT_TYPE.SNP);
-            v.setGenotypeCalls(genotypesByPopulation.get(population));
-            hardyWeinbergByPopulation.put(population,HWCalc.annotate(null,ref,null,v));
-        }
-
-        return smartHardyString(hardyWeinbergByPopulation);
-    }
-
-    private String smartHardyString(HashMap<String,String> hwByPop) {
-        // for now just return the maximum:
-        int maxH = -100;
-        for ( String pop : samplesToPopulation.values() ) {
-            maxH = Integer.parseInt(hwByPop.get(pop)) > maxH ? Integer.parseInt(hwByPop.get(pop)) : maxH;
-        }
-
-        return String.format("%s",maxH);
-    }
-
-    *********/
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/VariantsToAllelicPrimitives.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/VariantsToAllelicPrimitives.java
index 1f7b20c..a75491d 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/VariantsToAllelicPrimitives.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/VariantsToAllelicPrimitives.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -30,14 +30,14 @@ import org.broadinstitute.gatk.utils.commandline.ArgumentCollection;
 import org.broadinstitute.gatk.utils.commandline.Output;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
 import org.broadinstitute.gatk.engine.arguments.StandardVariantContextInputArgumentCollection;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.RodWalker;
-import org.broadinstitute.gatk.utils.SampleUtils;
+import org.broadinstitute.gatk.engine.SampleUtils;
 import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
 import org.broadinstitute.gatk.utils.help.HelpConstants;
-import org.broadinstitute.gatk.utils.variant.GATKVCFUtils;
+import org.broadinstitute.gatk.engine.GATKVCFUtils;
 import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
 import htsjdk.variant.variantcontext.*;
 import htsjdk.variant.variantcontext.writer.VariantContextWriter;
@@ -48,35 +48,37 @@ import htsjdk.variant.vcf.VCFHeaderLine;
 import java.util.*;
 
 /**
- * Takes alleles from a variants file and breaks them up (if possible) into more basic/primitive alleles.
+ * Simplify multi-nucleotide variants (MNPs) into more basic/primitive alleles.
  *
- * <p>
- * For now this tool modifies only multi-nucleotide polymorphisms (MNPs) and leaves SNPs, indels, and complex substitutions as is,
- * although one day it may be extended to handle the complex substitution case.
- *
- * This tool will take an MNP (e.g. ACCCA -> TCCCG) and break it up into separate records for each component part (A-T and A->G).
+ * <p>This tool will take an MNP (e.g. ACCCA -> TCCCG) and break it up into separate records for each component
+ * part (A-T and A->G).</p>
  *
- * Note that this tool modifies only bi-allelic variants.
- *
- * <h2>Input</h2>
+ * <h3>Input</h3>
  * <p>
  * A variant set with any type of alleles.
  * </p>
  *
- * <h2>Output</h2>
+ * <h3>Output</h3>
  * <p>
  * A VCF with alleles broken into primitive types.
  * </p>
  *
- * <h2>Examples</h2>
+ * <h3>Usage example</h3>
  * <pre>
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
+ * java -jar GenomeAnalysisTK.jar \
  *   -T VariantsToAllelicPrimitives \
- *   --variant input.vcf \
+ *   -R reference.fasta \
+ *   -V input.vcf \
  *   -o output.vcf
  * </pre>
  *
+ * <h3>Caveats</h3>
+ * <ul>
+ *     <li>For now this tool modifies only multi-nucleotide polymorphisms (MNPs) and leaves SNPs, indels, and
+ * complex substitutions as is, although one day it may be extended to handle the complex substitution case.</li>
+ *     <li>This tool modifies only bi-allelic variants.</li>
+ * </ul>
+ *
  */
 @DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_VARMANIP, extraDocs = {CommandLineGATK.class} )
 public class VariantsToAllelicPrimitives extends RodWalker<Integer, Integer> {
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/VariantsToBinaryPed.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/VariantsToBinaryPed.java
index b51349a..10f4637 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/VariantsToBinaryPed.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/VariantsToBinaryPed.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -30,15 +30,15 @@ import org.broadinstitute.gatk.utils.commandline.*;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
 import org.broadinstitute.gatk.engine.arguments.DbsnpArgumentCollection;
 import org.broadinstitute.gatk.engine.arguments.StandardVariantContextInputArgumentCollection;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.Reference;
 import org.broadinstitute.gatk.engine.walkers.RodWalker;
 import org.broadinstitute.gatk.engine.walkers.Window;
 import org.broadinstitute.gatk.utils.help.HelpConstants;
 import org.broadinstitute.gatk.utils.QualityUtils;
-import org.broadinstitute.gatk.utils.variant.GATKVCFUtils;
+import org.broadinstitute.gatk.engine.GATKVCFUtils;
 import htsjdk.variant.vcf.VCFHeader;
 import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
 import org.broadinstitute.gatk.utils.exceptions.UserException;
@@ -50,7 +50,55 @@ import java.io.*;
 import java.util.*;
 
 /**
- * Converts a VCF file to a binary plink Ped file (.bed/.bim/.fam)
+ * Convert VCF to binary pedigree file
+ *
+ * <p>This tool takes a VCF and produces a binary pedigree as used by
+ * <a href="http://pngu.mgh.harvard.edu/~purcell/plink/">PLINK</a>, consisting of three associated files (.bed/.bim/.fam).</p>
+ *
+ * <h3>Inputs</h3>
+ * <p>
+ * A VCF file and a metadata file.
+ * </p>
+ *
+* <p>The metaData file can take two formats, the first of which is the first 6 lines of the standard pedigree file. This
+ * is what Plink describes as a .fam file. Note that the sex encoding convention is 1=male; 2=female; other=unknown. An example .fam file is as follows (note that there is no header):</p>
+ * <pre>
+ * CEUTrio NA12878 NA12891 NA12892 2 -9
+ * CEUTrio NA12891 UNKN1 UNKN2 1 -9
+ * CEUTrio NA12892 UNKN3 UNKN4 2 -9
+ * </pre>
+ * <p>where the entries are: FamilyID IndividualID DadID MomID Sex Phenotype.</p>
+ * <p>An alternate format is a two-column key-value file:</p>
+ * <pre>
+ * NA12878        fid=CEUTrio;dad=NA12891;mom=NA12892;sex=2;phenotype=-9
+ * NA12891        fid=CEUTrio;sex=1;phenotype=-9
+ * NA12892        fid=CEUTrio;sex=2;phenotype=-9
+ * </pre>
+ * <p>where unknown parents do not need to be specified. The columns are the individual ID and a list of key-value pairs.</p>
+ *
+ * <p>
+ * Regardless of which file is specified, the tool will output a .fam file alongside the pedigree file. If the
+ * command line has "-m [name].fam", the fam file will be subset and reordered to match the sample content and ordering
+ * of the VCF. However, if a metadata file of the alternate format is passed by "-m [name].txt", the tool will
+ * construct a formatted .fam file from the data.
+ * </p>
+ *
+ * <h3>Outputs</h3>
+ * <p>
+ * A binary pedigree in PLINK format, composed of three files (.bed/.bim/.fam). See the <a href='http://pngu.mgh.harvard.edu/~purcell/plink/data.shtml#ped'>PLINK format specification</a> for more details.
+ * </p>
+ *
+ * <h3>Example</h3>
+ * <pre>
+ * java -jar GenomeAnalysisTK.jar \
+ *   -T VariantsToBinaryPed \
+ *   -R reference.fasta \
+ *   -V variants.vcf \
+ *   -m metadata.fam \
+ *   -bed output.bed \
+ *   -bim output.bim \
+ *   -fam output.fam
+ * </pre>
  */
 @DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_VARMANIP, extraDocs = {CommandLineGATK.class} )
 @Reference(window=@Window(start=0,stop=100))
@@ -60,39 +108,14 @@ public class VariantsToBinaryPed extends RodWalker<Integer,Integer> {
 
     @ArgumentCollection
     protected DbsnpArgumentCollection dbsnp = new DbsnpArgumentCollection();
-
-    /**
-     * The metaData file can take two formats, the first of which is the first 6 lines of the standard ped file. This
-     * is what Plink describes as a fam file. An example fam file is (note that there is no header):
-     * <p><p>
-     * CEUTrio NA12878 NA12891 NA12892 2 -9</p><p>
-     * CEUTrio NA12891 UNKN1 UNKN2 2 -9</p><p>
-     * CEUTrio NA12892 UNKN3 UNKN4 1 -9</p><p>
-     * </p>
-     * where the entries are (FamilyID IndividualID DadID MomID Phenotype Sex)
-     * <p>
-     * An alternate format is a two-column key-value file
-     * </p><p><p>
-     * NA12878        fid=CEUTrio;dad=NA12891;mom=NA12892;sex=2;phenotype=-9</p><p>
-     * NA12891        fid=CEUTrio;sex=2;phenotype=-9</p><p>
-     * NA12892        fid=CEUTrio;sex=1;phenotype=-9</p><p>
-     * </p><p>
-     * wherein unknown parents needn't be specified. The columns are the individual ID, and a list of key-value pairs.
-     * </p><p>
-     * Regardless of which file is specified, the walker will output a .fam file alongside the bed file. If the
-     * command line has "-md [name].fam", the fam file will be subset and reordered to match the sample content and ordering
-     * of the VCF. However, if a metadata file of the alternate format is passed by "-md [name].txt", the walker will
-     * construct a formatted .fam file from the data.
-     * </p>
-     */
-    @Input(shortName="m",fullName = "metaData",required=true,doc="Sample metadata file. You may specify a .fam file " +
-            "(in which case it will be copied to the file you provide as fam output).")
+    
+    @Input(shortName="m",fullName = "metaData",required=true,doc="Sample metadata file")
     File metaDataFile;
 
     @Input(shortName="mode",fullName="outputMode",required=false,doc="The output file mode (SNP major or individual major)")
     OutputMode mode = OutputMode.INDIVIDUAL_MAJOR;
 
-    @Output(shortName="bed",fullName = "bed",required=true,doc="output ped file")
+    @Output(shortName="bed",fullName = "bed",required=true,doc="output bed file")
     PrintStream outBed;
 
     @Output(shortName="bim",fullName="bim",required=true,doc="output map file")
@@ -208,8 +231,8 @@ public class VariantsToBinaryPed extends RodWalker<Integer,Integer> {
         try {
             validateVariantSite(vc,ref,context);
         } catch (TribbleException e) {
-            throw new UserException("Input VCF file is invalid; we cannot guarantee the resulting ped file. "+
-            "Please run ValidateVariants for more detailed information. This error is: "+e.getMessage());
+            throw new UserException("Input VCF file is invalid. "+
+            "Please run ValidateVariants for more detailed information. The error is: "+e.getMessage());
         }
 
         String refOut;
@@ -461,12 +484,12 @@ public class VariantsToBinaryPed extends RodWalker<Integer,Integer> {
                 for ( String line : new XReadLines(metaDataFile) ) {
                     String[] famSplit = line.split("\\s+");
                     if ( famSplit.length != 6 ) {
-                        throw new UserException("Line of the fam file is malformatted. Expected 6 entries. Line is "+line);
+                        throw new UserException("Line of the fam file is malformed. Expected 6 entries. Line is "+line);
                     }
                     String sid = famSplit[1];
                     String fid = famSplit[0];
-                    String mom = famSplit[2];
-                    String dad = famSplit[3];
+                    String dad = famSplit[2];
+                    String mom = famSplit[3];
                     String sex = famSplit[4];
                     String pheno = famSplit[5];
                     HashMap<String,String> values = new HashMap<String, String>();
@@ -501,7 +524,7 @@ public class VariantsToBinaryPed extends RodWalker<Integer,Integer> {
     private void validateVariantSite(VariantContext vc, ReferenceContext ref, AlignmentContext context) {
         final Allele reportedRefAllele = vc.getReference();
         final int refLength = reportedRefAllele.length();
-        if ( refLength > 100 ) {
+        if ( refLength > 100 ) { //TODO: get rid of this hardcoded limit?
             logger.info(String.format("Reference allele is too long (%d) at position %s:%d; skipping that record.", refLength, vc.getChr(), vc.getStart()));
             return;
         }
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/VariantsToTable.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/VariantsToTable.java
index 9a65a70..a2651ef 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/VariantsToTable.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/VariantsToTable.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -27,18 +27,18 @@ package org.broadinstitute.gatk.tools.walkers.variantutils;
 
 import org.broadinstitute.gatk.utils.commandline.*;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.utils.SampleUtils;
+import org.broadinstitute.gatk.engine.SampleUtils;
 import org.broadinstitute.gatk.utils.help.HelpConstants;
-import org.broadinstitute.gatk.utils.variant.GATKVCFUtils;
+import org.broadinstitute.gatk.engine.GATKVCFUtils;
 import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
 import htsjdk.variant.vcf.VCFConstants;
 import htsjdk.variant.vcf.VCFHeader;
 import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
 import htsjdk.variant.variantcontext.Allele;
 import htsjdk.variant.variantcontext.VariantContext;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
 import org.broadinstitute.gatk.engine.walkers.RodWalker;
 import org.broadinstitute.gatk.utils.Utils;
 import org.broadinstitute.gatk.utils.exceptions.UserException;
@@ -48,11 +48,13 @@ import java.lang.reflect.Array;
 import java.util.*;
 
 /**
- * Emits specific fields from a VCF file to a tab-deliminated table
+ * Extract specific fields from a VCF file to a tab-delimited table
  *
  * <p>
- * This walker accepts a single VCF file and writes out user-selected fields from the
- * VCF as a header-containing, tab-deliminated file.  The user specifies one or more
+ * This tool is designed to extract fields from the VCF to a table format that is more convenient to work with in
+ * downstream analyses.</p>
+ *
+ * <p>The user specifies one or more
  * fields to print with the -F NAME, each of which appears as a single column in
  * the output file, with a header named NAME, and the value of this field in the VCF
  * one per line.  NAME can be any standard VCF column (CHROM, ID, QUAL) or any binding
@@ -62,9 +64,7 @@ import java.util.*;
  * genotypes), NO-CALL (count of no-call genotypes), TYPE (the type of event), VAR (count of
  * non-reference genotypes), NSAMPLES (number of samples), NCALLED (number of called samples),
  * GQ (from the genotype field; works only for a file with a single sample), and MULTI-ALLELIC
- * (is the record from a multi-allelic site).  Note that if a VCF record is missing a value, then the tool by
- * default throws an error, but the special value NA can be emitted instead with
- * appropriate tool arguments.
+ * (is the record from a multi-allelic site).  </p>
  *
  * </p>
  *
@@ -81,7 +81,7 @@ import java.util.*;
  * A tab-delimited file containing the values of the requested fields in the VCF file
  * </p>
  *
- * <h3>Examples</h3>
+ * <h3>Usage example</h3>
  * <pre>
  *     java -jar GenomeAnalysisTK.jar \
  *     -R reference.fasta
@@ -89,15 +89,19 @@ import java.util.*;
  *     -V file.vcf \
  *     -F CHROM -F POS -F ID -F QUAL -F AC \
  *     -o results.table
- *
- *     would produce a file that looks like:
- *
+ * </pre>
+ * <p>would produce a file that looks like:</p>
+ * <pre>
  *     CHROM    POS ID      QUAL    AC
  *     1        10  .       50      1
  *     1        20  rs10    99      10
  *     et cetera...
  * </pre>
  *
+ * <h3>Caveat</h3>
+ * <p>If a VCF record is missing a value, then the tool by default throws an error, but the special value NA can
+ * be emitted instead if requested at the command line using --allowMissingData.</p>
+ *
  * @author Mark DePristo
  * @since 2010
  */
@@ -351,7 +355,7 @@ public class VariantsToTable extends RodWalker<Integer, Integer> {
         }
         // otherwise, add the original value to all of the records
         else {
-            final String valStr = val.toString();
+            final String valStr = prettyPrintObject(val);
             for ( List<String> record : result )
                 record.add(valStr);
         }
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/VariantsToVCF.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/VariantsToVCF.java
index 3e0e2ab..0511370 100644
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/VariantsToVCF.java
+++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/variantutils/VariantsToVCF.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -30,22 +30,22 @@ import htsjdk.tribble.Feature;
 import org.broadinstitute.gatk.utils.commandline.*;
 import org.broadinstitute.gatk.engine.CommandLineGATK;
 import org.broadinstitute.gatk.engine.arguments.DbsnpArgumentCollection;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.engine.refdata.VariantContextAdaptors;
-import org.broadinstitute.gatk.engine.refdata.tracks.RMDTrackBuilder;
-import org.broadinstitute.gatk.engine.refdata.utils.GATKFeature;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.refdata.RefMetaDataTracker;
+import org.broadinstitute.gatk.utils.refdata.VariantContextAdaptors;
+import org.broadinstitute.gatk.utils.refdata.tracks.RMDTrackBuilder;
+import org.broadinstitute.gatk.utils.refdata.utils.GATKFeature;
 import org.broadinstitute.gatk.engine.walkers.Reference;
 import org.broadinstitute.gatk.engine.walkers.RodWalker;
 import org.broadinstitute.gatk.engine.walkers.Window;
 import org.broadinstitute.gatk.tools.walkers.annotator.VariantOverlapAnnotator;
 import org.broadinstitute.gatk.utils.BaseUtils;
 import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.SampleUtils;
+import org.broadinstitute.gatk.engine.SampleUtils;
 import org.broadinstitute.gatk.utils.codecs.hapmap.RawHapMapFeature;
 import org.broadinstitute.gatk.utils.help.HelpConstants;
-import org.broadinstitute.gatk.utils.variant.GATKVCFUtils;
+import org.broadinstitute.gatk.engine.GATKVCFUtils;
 import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
 import htsjdk.variant.vcf.*;
 import org.broadinstitute.gatk.utils.exceptions.UserException;
@@ -58,14 +58,15 @@ import java.io.File;
 import java.util.*;
 
 /**
- * Converts variants from other file formats to VCF format.
+ * Convert variants from other file formats to VCF format
  *
  * <p>
- * Note that there must be a Tribble feature/codec for the file format as well as an adaptor.
+ * Note that there must be a Tribble feature/codec available for the file format as well as an adaptor.
+ * </p>
  *
  * <h3>Input</h3>
  * <p>
- * A variant file to filter.
+ * A variant file to convert.
  * </p>
  *
  * <h3>Output</h3>
@@ -73,14 +74,13 @@ import java.util.*;
  * A VCF file.
  * </p>
  *
- * <h3>Examples</h3>
+ * <h3>Usage example</h3>
  * <pre>
- * java -Xmx2g -jar GenomeAnalysisTK.jar \
- *   -R ref.fasta \
+ * java -jar GenomeAnalysisTK.jar \
  *   -T VariantsToVCF \
+ *   -R reference.fasta \
  *   -o output.vcf \
- *   --variant:RawHapMap input.hapmap \
- *   --dbsnp dbsnp.vcf
+ *   --variant:RawHapMap input.hapmap
  * </pre>
  *
  */
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/AutoFormattingTime.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/AutoFormattingTime.java
deleted file mode 100644
index 31032e3..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/AutoFormattingTime.java
+++ /dev/null
@@ -1,185 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import java.util.concurrent.TimeUnit;
-
-/**
- * Conveniently print a time with an automatically determined time unit
- *
- * For example, if the amount of time is 10^6 seconds, instead of printing
- * out 10^6 seconds, prints out 11.57 days instead.
- *
- * Dynamically uses time units:
- *
- *   - seconds: s
- *   - minutes: m
- *   - hours  : h
- *   - days   : d
- *   - weeks  : w
- *
- * @author depristo
- * @since 2009
- */
-public class AutoFormattingTime {
-    private static final double NANOSECONDS_PER_SECOND = 1e9;
-
-    /**
-     * Width a la format's %WIDTH.PERCISIONf
-     */
-    private final int width; // for format
-
-    /**
-     * Precision a la format's %WIDTH.PERCISIONf
-     */
-    private final int precision;      // for format
-
-    /**
-     * The elapsed time in nanoseconds
-     */
-    private final long nanoTime;
-
-    /**
-     * Create a new autoformatting time with elapsed time nanoTime in nanoseconds
-     * @param nanoTime the elapsed time in nanoseconds
-     * @param width the width >= 0 (a la format's %WIDTH.PERCISIONf) to use to display the format, or -1 if none is required
-     * @param precision the precision to display the time at.  Must be >= 0;
-     */
-    public AutoFormattingTime(final long nanoTime, final int width, int precision) {
-        if ( width < -1 ) throw new IllegalArgumentException("Width " + width + " must be >= -1");
-        if ( precision < 0 ) throw new IllegalArgumentException("Precision " + precision + " must be >= 0");
-
-        this.width = width;
-        this.nanoTime = nanoTime;
-        this.precision = precision;
-    }
-
-    /**
-     * @see #AutoFormattingTime(long, int, int) but with default width and precision
-     * @param nanoTime
-     */
-    public AutoFormattingTime(final long nanoTime) {
-        this(nanoTime, 6, 1);
-    }
-
-    /**
-     * @see #AutoFormattingTime(long, int, int) but with time specificied as a double in seconds
-     */
-    public AutoFormattingTime(final double timeInSeconds, final int width, final int precision) {
-        this(secondsToNano(timeInSeconds), width, precision);
-    }
-
-    /**
-     * @see #AutoFormattingTime(long) but with time specificied as a double in seconds
-     */
-    public AutoFormattingTime(double timeInSeconds) {
-        this(timeInSeconds, 6, 1);
-    }
-
-    /**
-     * Precomputed format string suitable for string.format with the required width and precision
-     */
-    private String getFormatString() {
-        final StringBuilder b = new StringBuilder("%");
-        if ( width != -1 )
-            b.append(width);
-        b.append(".").append(precision).append("f %s");
-        return b.toString();
-    }
-
-    /**
-     * Get the time associated with this object in nanoseconds
-     * @return the time in nanoseconds
-     */
-    public long getTimeInNanoSeconds() {
-        return nanoTime;
-    }
-
-    /**
-     * Get the time associated with this object in seconds, as a double
-     * @return time in seconds as a double
-     */
-    public double getTimeInSeconds() {
-        return TimeUnit.NANOSECONDS.toSeconds(getTimeInNanoSeconds());
-    }
-
-    /**
-     * @return the precision (a la format's %WIDTH.PERCISIONf)
-     */
-    public int getWidth() {
-        return width;
-    }
-
-    /**
-     * @return the precision (a la format's %WIDTH.PERCISIONf)
-     */
-    public int getPrecision() {
-        return precision;
-    }
-
-    /**
-     * Get a string representation of this time, automatically converting the time
-     * to a human readable unit with width and precision provided during construction
-     * @return a non-null string
-     */
-    public String toString() {
-        double unitTime = getTimeInSeconds();
-        String unit = "s";
-
-        if ( unitTime > 120 ) {
-            unitTime /= 60; // minutes
-            unit = "m";
-
-            if ( unitTime > 120 ) {
-                unitTime /= 60; // hours
-                unit = "h";
-
-                if ( unitTime > 100 ) {
-                    unitTime /= 24; // days
-                    unit = "d";
-
-                    if ( unitTime > 20 ) {
-                        unitTime /= 7; // weeks
-                        unit = "w";
-                    }
-                }
-            }
-        }
-
-        return String.format(getFormatString(), unitTime, unit);
-    }
-
-
-    /**
-     * Convert a time in seconds as a double into nanoseconds as a long
-     * @param timeInSeconds an elapsed time in seconds, as a double
-     * @return an equivalent value in nanoseconds as a long
-     */
-    private static long secondsToNano(final double timeInSeconds) {
-        return (long)(NANOSECONDS_PER_SECOND * timeInSeconds);
-    }
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/BaseUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/BaseUtils.java
deleted file mode 100644
index 194db68..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/BaseUtils.java
+++ /dev/null
@@ -1,672 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import htsjdk.samtools.util.StringUtil;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import java.util.Arrays;
-import java.util.Comparator;
-import java.util.Random;
-
-/**
- * BaseUtils contains some basic utilities for manipulating nucleotides.
- */
-public class BaseUtils {
-
-    public enum Base {
-        A ('A'),
-        C ('C'),
-        G ('G'),
-        T ('T'),
-        N ('N'),
-        D ('D');
-
-        public byte base;
-
-        private Base(final char base) {
-            this.base = (byte)base;
-        }
-    }
-
-    // todo -- add this to the generalized base abstraction using the Base enum.
-    public final static byte[] BASES = {'A', 'C', 'G', 'T'};
-    public final static byte[] EXTENDED_BASES = {'A', 'C', 'G', 'T', 'N', 'D'};
-
-    static private final int[] baseIndexMap = new int[256];
-    static {
-        Arrays.fill(baseIndexMap, -1);
-        baseIndexMap['A'] = Base.A.ordinal();
-        baseIndexMap['a'] = Base.A.ordinal();
-        baseIndexMap['*'] = Base.A.ordinal();    // the wildcard character counts as an A
-        baseIndexMap['C'] = Base.C.ordinal();
-        baseIndexMap['c'] = Base.C.ordinal();
-        baseIndexMap['G'] = Base.G.ordinal();
-        baseIndexMap['g'] = Base.G.ordinal();
-        baseIndexMap['T'] = Base.T.ordinal();
-        baseIndexMap['t'] = Base.T.ordinal();
-    }
-
-    static private final int[] baseIndexWithIupacMap = baseIndexMap.clone();
-    static {
-        baseIndexWithIupacMap['*'] = -1;    // the wildcard character is bad
-        baseIndexWithIupacMap['N'] = Base.N.ordinal();
-        baseIndexWithIupacMap['n'] = Base.N.ordinal();
-        baseIndexWithIupacMap['R'] = Base.N.ordinal();
-        baseIndexWithIupacMap['r'] = Base.N.ordinal();
-        baseIndexWithIupacMap['Y'] = Base.N.ordinal();
-        baseIndexWithIupacMap['y'] = Base.N.ordinal();
-        baseIndexWithIupacMap['M'] = Base.N.ordinal();
-        baseIndexWithIupacMap['m'] = Base.N.ordinal();
-        baseIndexWithIupacMap['K'] = Base.N.ordinal();
-        baseIndexWithIupacMap['k'] = Base.N.ordinal();
-        baseIndexWithIupacMap['W'] = Base.N.ordinal();
-        baseIndexWithIupacMap['w'] = Base.N.ordinal();
-        baseIndexWithIupacMap['S'] = Base.N.ordinal();
-        baseIndexWithIupacMap['s'] = Base.N.ordinal();
-        baseIndexWithIupacMap['B'] = Base.N.ordinal();
-        baseIndexWithIupacMap['b'] = Base.N.ordinal();
-        baseIndexWithIupacMap['D'] = Base.N.ordinal();
-        baseIndexWithIupacMap['d'] = Base.N.ordinal();
-        baseIndexWithIupacMap['H'] = Base.N.ordinal();
-        baseIndexWithIupacMap['h'] = Base.N.ordinal();
-        baseIndexWithIupacMap['V'] = Base.N.ordinal();
-        baseIndexWithIupacMap['v'] = Base.N.ordinal();
-    }
-
-    /// In genetics, a transition is a mutation changing a purine to another purine nucleotide (A <-> G) or
-    // a pyrimidine to another pyrimidine nucleotide (C <-> T).
-    // Approximately two out of every three single nucleotide polymorphisms (SNPs) are transitions.
-    public enum BaseSubstitutionType {
-        TRANSITION,         // A <-> G or C <-> T
-        TRANSVERSION
-    }
-
-    /**
-     * Returns the base substitution type of the 2 state SNP
-     *
-     * @param base1
-     * @param base2
-     * @return
-     */
-    public static BaseSubstitutionType SNPSubstitutionType(byte base1, byte base2) {
-        BaseSubstitutionType t = isTransition(base1, base2) ? BaseSubstitutionType.TRANSITION : BaseSubstitutionType.TRANSVERSION;
-        //System.out.printf("SNPSubstitutionType( char %c, char %c ) => %s%n", base1, base2, t);
-        return t;
-    }
-
-    public static boolean isTransition(byte base1, byte base2) {
-        final int b1 = simpleBaseToBaseIndex(base1);
-        final int b2 = simpleBaseToBaseIndex(base2);
-        return b1 == Base.A.ordinal() && b2 == Base.G.ordinal() || b1 == Base.G.ordinal() && b2 == Base.A.ordinal() ||
-                b1 == Base.C.ordinal() && b2 == Base.T.ordinal() || b1 == Base.T.ordinal() && b2 == Base.C.ordinal();
-    }
-
-    public static boolean isTransversion(byte base1, byte base2) {
-        return !isTransition(base1, base2);
-    }
-
-    /**
-     * Private constructor.  No instantiating this class!
-     */
-    private BaseUtils() {}
-
-    static public boolean basesAreEqual(byte base1, byte base2) {
-        return simpleBaseToBaseIndex(base1) == simpleBaseToBaseIndex(base2);
-    }
-
-    /**
-     * Checks whether to bases are the same in fact ignore ambiguous 'N' bases.
-     *
-     * @param base1 first base to compare.
-     * @param base2 second base to compare.
-     * @return true if {@code base1 == base2} or either is an 'N', false otherwise.
-     */
-    static public boolean basesAreEqualIgnoreAmbiguous(final byte base1, final byte base2) {
-        if (base1 == base2) return true;
-        else if (base1 == 'n' || base1 == 'N' || base2 == 'N' || base2 == 'n') return true;
-        else return false;
-    }
-
-    /**
-     * Compare to base arrays ranges checking whether they contain the same bases.
-     *
-     * <p>
-     *     By default two array have equal bases, i.e. {@code length == 0} results results in {@code true}.
-     * </p>
-     *
-     * @param bases1 first base array to compare.
-     * @param offset1 position of the first base in bases1 to compare.
-     * @param bases2 second base array to compare.
-     * @param offset2 position of the first base in bases2 to compare.
-     * @param length number of bases to compare.
-     *
-     * @throws NullPointerException if {@code bases1} or {@code bases2} is {@code null}.
-     * @throws ArrayIndexOutOfBoundsException if:
-     * <ul>
-     *      <li>{@code offset1} is not within the range [0,{@code bases1.length}) or</li>
-     *     <li>{@code offset2} is not within the range [0,{@code bases2.length}) or</li>
-     *     <li>{@code offset1 + length} is not within the range [0,{@code bases1.length}) or </li>
-     *     <li>{@code offset2 + length} is not within the range [0,{@code bases2.length})</li>
-     * </ul>
-     * @return
-     */
-    static public boolean basesAreEqualIgnoreAmbiguous(final byte[] bases1, final int offset1, final byte[] bases2, final int offset2, final int length) {
-        for (int i = 0; i < length; i++)
-            if (!basesAreEqualIgnoreAmbiguous(bases1[offset1 + i],bases2[offset2 + i])) return false;
-        return true;
-    }
-
-    static public boolean extendedBasesAreEqual(byte base1, byte base2) {
-        return extendedBaseToBaseIndex(base1) == extendedBaseToBaseIndex(base2);
-    }
-
-    /**
-     * @return true iff the bases array contains at least one instance of base
-     */
-    static public boolean containsBase(final byte[] bases, final byte base) {
-        for ( final byte b : bases ) {
-            if ( b == base )
-                return true;
-        }
-        return false;
-    }
-
-    public static boolean isUpperCase(final byte[] bases) {
-        for ( byte base : bases )
-            if ( ! isUpperCase(base) )
-                return false;
-        return true;
-    }
-
-    public static boolean isUpperCase(final byte base) {
-        return base >= 'A' && base <= 'Z';
-    }
-
-    public static byte[] convertIUPACtoN(final byte[] bases, final boolean errorOnBadReferenceBase, final boolean ignoreConversionOfFirstByte) {
-        final int length = bases.length;
-        final int start = ignoreConversionOfFirstByte ? 1 : 0;
-
-        for ( int i = start; i < length; i++ ) {
-            final int baseIndex = baseIndexWithIupacMap[bases[i]];
-            if ( baseIndex == Base.N.ordinal() ) {
-                bases[i] = 'N';
-            } else if ( errorOnBadReferenceBase && baseIndex == -1 ) {
-                throw new UserException.BadInput("We encountered a non-standard non-IUPAC base in the provided reference: '" + bases[i] + "'");
-            }
-        }
-        return bases;
-    }
-
-    /**
-     * Converts a IUPAC nucleotide code to a pair of bases
-     *
-     * @param code
-     * @return 0, 1, 2, 3, or -1 if the base can't be understood
-     */
-    @Deprecated
-    static public char[] iupacToBases(char code) {
-        char[] bases = new char[2];
-        switch (code) {
-            case '*':               // the wildcard character counts as an A
-            case 'A':
-            case 'a':
-                bases[0] = bases[1] = 'A';
-                break;
-            case 'C':
-            case 'c':
-                bases[0] = bases[1] = 'C';
-                break;
-            case 'G':
-            case 'g':
-                bases[0] = bases[1] = 'G';
-                break;
-            case 'T':
-            case 't':
-                bases[0] = bases[1] = 'T';
-                break;
-            case 'R':
-            case 'r':
-                bases[0] = 'A';
-                bases[1] = 'G';
-                break;
-            case 'Y':
-            case 'y':
-                bases[0] = 'C';
-                bases[1] = 'T';
-                break;
-            case 'S':
-            case 's':
-                bases[0] = 'G';
-                bases[1] = 'C';
-                break;
-            case 'W':
-            case 'w':
-                bases[0] = 'A';
-                bases[1] = 'T';
-                break;
-            case 'K':
-            case 'k':
-                bases[0] = 'G';
-                bases[1] = 'T';
-                break;
-            case 'M':
-            case 'm':
-                bases[0] = 'A';
-                bases[1] = 'C';
-                break;
-            default:
-                bases[0] = bases[1] = 'N';
-        }
-        return bases;
-    }
-
-    /**
-     * Converts a pair of bases to their IUPAC ambiguity code
-     *
-     * @param base1  1st base
-     * @param base2  2nd base
-     * @return byte
-     */
-    static public byte basesToIUPAC(final byte base1, final byte base2) {
-        // ensure that the bases come in order
-        if ( base2 < base1 )
-            return basesToIUPAC(base2, base1);
-
-        // ensure that the bases are regular ones
-        if ( !isRegularBase(base1) || !isRegularBase(base2) )
-            return Base.N.base;
-
-        // IUPAC codes are not needed if the bases are identical
-        if ( basesAreEqual(base1, base2) )
-            return base1;
-
-        if ( base1 == Base.A.base )
-            return (byte)(base2 == Base.C.base ? 'M' : (base2 == Base.G.base ? 'R' : 'W'));
-
-        if ( base1 == Base.C.base )
-            return (byte)(base2 == Base.G.base ? 'S' : 'Y');
-
-        // the only possibility left is G/T
-        return 'K';
-    }
-
-    /**
-     * Converts a simple base to a base index
-     *
-     * @param base [AaCcGgTt]
-     * @return 0, 1, 2, 3, or -1 if the base can't be understood
-     */
-    static public int simpleBaseToBaseIndex(final byte base) {
-        if ( base < 0 || base >= 256 )
-            throw new UserException.BadInput("Non-standard bases were encountered in either the input reference or BAM file(s)");
-        return baseIndexMap[base];
-    }
-
-    /**
-     * Converts a simple base to a base index
-     *
-     * @param base [AaCcGgTt]
-     * @return 0, 1, 2, 3, or -1 if the base can't be understood
-     */
-    @Deprecated
-    static public int simpleBaseToBaseIndex(char base) {
-        return baseIndexMap[base];
-    }
-
-    static public int extendedBaseToBaseIndex(byte base) {
-        switch (base) {
-            case 'd':
-            case 'D':
-                return Base.D.ordinal();
-            case 'n':
-            case 'N':
-                return Base.N.ordinal();
-
-            default:
-                return simpleBaseToBaseIndex(base);
-        }
-    }
-
-    @Deprecated
-    static public boolean isRegularBase( final char base ) {
-        return simpleBaseToBaseIndex(base) != -1;
-    }
-
-    static public boolean isRegularBase( final byte base ) {
-        return simpleBaseToBaseIndex(base) != -1;
-    }
-
-    static public boolean isAllRegularBases( final byte[] bases ) {
-        for( final byte base : bases) {
-            if( !isRegularBase(base) ) { return false; }
-        }
-        return true;
-    }
-
-    static public boolean isNBase(byte base) {
-        return base == 'N' || base == 'n';
-    }
-
-    /**
-     * Converts a base index to a simple base
-     *
-     * @param baseIndex 0, 1, 2, 3
-     * @return A, C, G, T, or '.' if the index can't be understood
-     */
-    static public byte baseIndexToSimpleBase(int baseIndex) {
-        switch (baseIndex) {
-            case 0:
-                return 'A';
-            case 1:
-                return 'C';
-            case 2:
-                return 'G';
-            case 3:
-                return 'T';
-            default:
-                return '.';
-        }
-    }
-
-    /**
-     * Return the complement (A <-> T or C <-> G) of a base, or the specified base if it can't be complemented (i.e. an ambiguous base).
-     *
-     * @param base the base [AaCcGgTt]
-     * @return the complementary base, or the input base if it's not one of the understood ones
-     */
-    static public byte simpleComplement(byte base) {
-        switch (base) {
-            case 'A':
-            case 'a':
-                return 'T';
-            case 'C':
-            case 'c':
-                return 'G';
-            case 'G':
-            case 'g':
-                return 'C';
-            case 'T':
-            case 't':
-                return 'A';
-            default:
-                return base;
-        }
-    }
-
-    @Deprecated
-    static private char simpleComplement(char base) {
-        return (char) simpleComplement((byte) base);
-    }
-
-    /**
-     * Reverse complement a byte array of bases (that is, chars casted to bytes, *not* base indices in byte form)
-     *
-     * @param bases the byte array of bases
-     * @return the reverse complement of the base byte array
-     */
-    static public byte[] simpleReverseComplement(byte[] bases) {
-        byte[] rcbases = new byte[bases.length];
-
-        for (int i = 0; i < bases.length; i++) {
-            rcbases[i] = simpleComplement(bases[bases.length - 1 - i]);
-        }
-
-        return rcbases;
-    }
-
-    /**
-     * Reverse complement a char array of bases
-     *
-     * @param bases the char array of bases
-     * @return the reverse complement of the char byte array
-     */
-    @Deprecated
-    static public char[] simpleReverseComplement(char[] bases) {
-        char[] rcbases = new char[bases.length];
-
-        for (int i = 0; i < bases.length; i++) {
-            rcbases[i] = simpleComplement(bases[bases.length - 1 - i]);
-        }
-
-        return rcbases;
-    }
-
-    /**
-     * Reverse complement a String of bases.  Preserves ambiguous bases.
-     *
-     * @param bases the String of bases
-     * @return the reverse complement of the String
-     */
-    @Deprecated
-    static public String simpleReverseComplement(String bases) {
-        return new String(simpleReverseComplement(bases.getBytes()));
-    }
-
-    /**
-     * Returns the uppercased version of the bases
-     *
-     * @param bases   the bases
-     * @return the upper cased version
-     */
-    static public void convertToUpperCase(final byte[] bases) {
-        StringUtil.toUpperCase(bases);
-    }
-
-    /**
-     * Returns the index of the most common base in the basecounts array. To be used with
-     * pileup.getBaseCounts.
-     *
-     * @param baseCounts counts of a,c,g,t in order.
-     * @return the index of the most common base
-     */
-    static public int mostFrequentBaseIndex(int[] baseCounts) {
-        int mostFrequentBaseIndex = 0;
-        for (int baseIndex = 1; baseIndex < 4; baseIndex++) {
-            if (baseCounts[baseIndex] > baseCounts[mostFrequentBaseIndex]) {
-                mostFrequentBaseIndex = baseIndex;
-            }
-        }
-        return mostFrequentBaseIndex;
-    }
-
-    static public int mostFrequentBaseIndexNotRef(int[] baseCounts, int refBaseIndex) {
-        int tmp = baseCounts[refBaseIndex];
-        baseCounts[refBaseIndex] = -1;
-        int result = mostFrequentBaseIndex(baseCounts);
-        baseCounts[refBaseIndex] = tmp;
-        return result;
-    }
-
-    static public int mostFrequentBaseIndexNotRef(int[] baseCounts, byte refSimpleBase) {
-        return mostFrequentBaseIndexNotRef(baseCounts, simpleBaseToBaseIndex(refSimpleBase));
-    }
-
-    /**
-     * Returns the most common base in the basecounts array. To be used with pileup.getBaseCounts.
-     *
-     * @param baseCounts counts of a,c,g,t in order.
-     * @return the most common base
-     */
-    static public byte mostFrequentSimpleBase(int[] baseCounts) {
-        return baseIndexToSimpleBase(mostFrequentBaseIndex(baseCounts));
-    }
-
-    /**
-     * For the most frequent base in the sequence, return the percentage of the read it constitutes.
-     *
-     * @param sequence the read sequence
-     * @return the percentage of the read that's made up of the most frequent base
-     */
-    static public double mostFrequentBaseFraction(byte[] sequence) {
-        int[] baseCounts = new int[4];
-
-        for (byte base : sequence) {
-            int baseIndex = simpleBaseToBaseIndex(base);
-
-            if (baseIndex >= 0) {
-                baseCounts[baseIndex]++;
-            }
-        }
-
-        int mostFrequentBaseIndex = mostFrequentBaseIndex(baseCounts);
-
-        return ((double) baseCounts[mostFrequentBaseIndex]) / ((double) sequence.length);
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // random bases
-    //
-    // --------------------------------------------------------------------------------
-
-    /**
-     * Return a random base index (A=0, C=1, G=2, T=3).
-     *
-     * @return a random base index (A=0, C=1, G=2, T=3)
-     */
-    static public int getRandomBaseIndex() {
-        return getRandomBaseIndex(-1);
-    }
-
-    /**
-     * Return random bases.
-     *
-     * @param length base count and length of returned array.
-     *
-     * @throws IllegalArgumentException if {@code length} is less than 0.
-     *
-     * @return never {@code null}
-     */
-    @SuppressWarnings("unused")
-    public static byte[] getRandomBases(final int length) {
-        if (length < 0)
-            throw new IllegalArgumentException("length must zero or greater");
-        final byte[] result = new byte[length];
-        fillWithRandomBases(result);
-        return result;
-    }
-
-    /**
-     * Fills an array with random bases.
-     *
-     * @param dest the array to fill.
-     *
-     * @throws IllegalArgumentException if {@code result} is {@code null}.
-     */
-    public static void fillWithRandomBases(final byte[] dest) {
-        fillWithRandomBases(dest,0,dest.length);
-    }
-
-    /**
-     * Fill an array section with random bases.
-     *
-     * @param dest array to fill.
-     * @param fromIndex first index to be filled (inclusive).
-     * @param toIndex index after last to be filled (exclusive).
-     *
-     * @throws IllegalArgumentException if {@code dest} is {@code null},
-     *              {@code fromIndex} or {@code toIndex} is negative,
-     *              {@code fromIndex} or {@code toIndex} are greater than {@code dest} length,
-     *              or {@code fromIndex} greater than {@code toIndex}.
-     */
-    public static void fillWithRandomBases(final byte[] dest, final int fromIndex, final int toIndex) {
-        final Random rnd = GenomeAnalysisEngine.getRandomGenerator();
-        if (dest == null)
-            throw new IllegalArgumentException("the dest array cannot be null");
-        if (fromIndex > toIndex)
-            throw new IllegalArgumentException("fromIndex cannot be larger than toIndex");
-        if (fromIndex < 0)
-            throw new IllegalArgumentException("both indexes must be positive");
-        if (toIndex > dest.length)
-            throw new IllegalArgumentException("both indexes must be less or equal to the destination array length");
-
-        for (int i = fromIndex; i < toIndex; i++)
-            dest[i] = baseIndexToSimpleBase(rnd.nextInt(4));
-    }
-
-    /**
-     * Return a random base index, excluding some base index.
-     *
-     * @param excludeBaseIndex the base index to exclude
-     * @return a random base index, excluding the one specified (A=0, C=1, G=2, T=3)
-     */
-    static public int getRandomBaseIndex(int excludeBaseIndex) {
-        int randomBaseIndex = excludeBaseIndex;
-
-        while (randomBaseIndex == excludeBaseIndex) {
-            randomBaseIndex = GenomeAnalysisEngine.getRandomGenerator().nextInt(4);
-        }
-
-        return randomBaseIndex;
-    }
-
-    public static byte getComplement(byte base) {
-        switch(base) {
-            case 'a':
-            case 'A':
-                return 'T';
-            case 'c':
-            case 'C':
-                return 'G';
-            case 'g':
-            case 'G':
-                return 'C';
-            case 't':
-            case 'T':
-                return 'A';
-            case 'n':
-            case 'N':
-                return 'N';
-            default:
-                throw new ReviewedGATKException("base must be A, C, G or T. " + (char) base + " is not a valid base.");
-        }
-    }
-
-
-    /**
-     * Lexicographical sorting of base arrays {@link Comparator}.
-     */
-    public static final Comparator<byte[]> BASES_COMPARATOR = new Comparator<byte[]> (){
-
-        @Override
-        public int compare(final byte[] o1,final byte[] o2) {
-            final int minLength = Math.min(o1.length,o2.length);
-            for (int i = 0; i < minLength; i++) {
-                final int cmp = Byte.compare(o1[i],o2[i]);
-                if (cmp != 0) return cmp;
-            }
-            if (o1.length == o2.length)
-                return 0;
-            else if (o1.length == minLength)
-                return -1;
-            else
-                return 1;
-        }
-    };
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/BitSetUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/BitSetUtils.java
deleted file mode 100644
index a9ab00d..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/BitSetUtils.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import java.util.BitSet;
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Utilities for bitset conversion
- *
- * @author Mauricio Carneiro
- * @since 3/5/12
- */
-public class BitSetUtils {
-
-    static final private byte NBITS_LONG_REPRESENTATION = 64;                   // the number of bits used in the long version to represent the bit set (necessary for the two's complement representation of negative numbers)
-    static final private byte NBITS_SHORT_REPRESENTATION = 16;                  // the number of bits used in the short version to represent the bit set (necessary for the two's complement representation of negative numbers)
-
-    /**
-     * Creates an long out of a bitset
-     *
-     * @param bitSet the bitset
-     * @return a long from the bitset representation
-     */
-    public static long longFrom(final BitSet bitSet) {
-        return longFrom(bitSet, NBITS_LONG_REPRESENTATION);
-    }
-
-    /**
-     * Creates a short integer from a bitset
-     *
-     * @param bitSet the bitset
-     * @return a short from the bitset representation
-     */
-    public static short shortFrom(final BitSet bitSet) {
-        return (short) longFrom(bitSet, NBITS_SHORT_REPRESENTATION);
-    }
-
-    /**
-     * Cretes an integer with any number of bits (up to 64 -- long precision) from a bitset
-     *
-     * @param bitSet the bitset
-     * @param nBits  the number of bits to be used for this representation
-     * @return an integer with nBits from the bitset representation
-     */
-    public static long longFrom(final BitSet bitSet, final int nBits) {
-        long number = 0;
-        for (int bitIndex = bitSet.nextSetBit(0); bitIndex >= 0 && bitIndex <= nBits; bitIndex = bitSet.nextSetBit(bitIndex + 1))
-            number |= 1L << bitIndex;
-
-        return number;
-    }
-
-    /**
-     * Creates a BitSet representation of a given long
-     *
-     * @param number the number to turn into a bitset
-     * @return a bitset representation of the long
-     */
-    public static BitSet bitSetFrom(long number) {
-        return bitSetFrom(number, NBITS_LONG_REPRESENTATION);
-    }
-
-    /**
-     * Creates a BitSet representation of a given short
-     *
-     * @param number the number to turn into a bitset
-     * @return a bitset representation of the short
-     */
-    public static BitSet bitSetFrom(short number) {
-        BitSet result = shortCache.get(number);
-        if (result == null) {
-            result = bitSetFrom(number, NBITS_SHORT_REPRESENTATION);
-            shortCache.put(number, result);
-        }
-        return result;
-    }
-    // use a static cache for shorts (but not for longs, because there could be a lot of entries)
-    private static final Map<Short, BitSet> shortCache = new HashMap<Short, BitSet>(2 * Short.MAX_VALUE);
-
-    /**
-     * Creates a BitSet representation of an arbitrary integer (number of bits capped at 64 -- long precision)
-     *
-     * @param number the number to turn into a bitset
-     * @param nBits  the number of bits to use as precision for this conversion
-     * @return a bitset representation of the integer
-     */
-    public static BitSet bitSetFrom(long number, int nBits) {
-        BitSet bitSet = new BitSet(nBits);
-        boolean isNegative = number < 0;
-        int bitIndex = 0;
-        while (number != 0) {
-            if (number % 2 != 0)
-                bitSet.set(bitIndex);
-            bitIndex++;
-            number /= 2;
-        }
-        if (isNegative) {
-            boolean foundFirstSetBit = false;
-            for (int i = bitSet.nextSetBit(0); i < nBits && i >= 0; i++) {
-                boolean bit = bitSet.get(i);
-                if (!foundFirstSetBit && bit)
-                    foundFirstSetBit = true;    // maintain all bits until the first 1 is found (inclusive)
-                else if (foundFirstSetBit)
-                    bitSet.flip(i);             // flip every other bit up to NBITS_REPRESENTATION
-            }
-        }
-        return bitSet;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/ContigComparator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/ContigComparator.java
deleted file mode 100644
index f3f93b4..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/ContigComparator.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import htsjdk.samtools.SAMSequenceDictionary;
-
-import java.util.Comparator;
-import java.util.Set;
-import java.util.TreeSet;
-
-/**
- * Created by IntelliJ IDEA.
- * User: carneiro
- * Date: 7/23/11
- * Time: 6:07 PM
- *
- * Contig comparator -- sorting contigs like Picard
- *
- *   This is very useful if you want to output your text files or manipulate data in the usual chromosome ordering :
- *    1
- *    2
- *    3
- *    ...
- *    21
- *    22
- *    X
- *    Y
- *    GL***
- *    ...
- * Just use this comparator in any SortedSet class constructor and your data will be sorted like in the BAM file.
- */
-public class ContigComparator implements Comparator<String> {
-    final SAMSequenceDictionary dict;
-
-    public ContigComparator(final SAMSequenceDictionary dict) {
-        if ( dict == null ) throw new IllegalArgumentException("dict cannot be null");
-        this.dict = dict;
-    }
-
-    @Override
-    public int compare(final String chr1, final String chr2) {
-        final int index1 = getIndex(chr1);
-        final int index2 = getIndex(chr2);
-        return Integer.valueOf(index1).compareTo(index2);
-    }
-
-    /**
-     * Convert contig to its index in the dict, or throw an exception if it's not found or is null
-     * @param chr the contig
-     */
-    private int getIndex(final String chr) {
-        if ( chr == null ) throw new IllegalArgumentException("chr is null");
-        final int index = dict.getSequenceIndex(chr);
-        if ( index == -1 ) throw new IllegalArgumentException("Unknown contig " + chr);
-        return index;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/DeprecatedToolChecks.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/DeprecatedToolChecks.java
deleted file mode 100644
index 9fcd848..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/DeprecatedToolChecks.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import it.unimi.dsi.fastutil.objects.Object2ObjectMap;
-import it.unimi.dsi.fastutil.objects.Object2ObjectOpenHashMap;
-
-import java.util.*;
-
-/**
- * Utility class for handling deprecated tools gracefully
- *
- * @author vdauwera
- * @since 3/11/13
- */
-public class DeprecatedToolChecks {
-
-    // Mapping from walker name to major version number where the walker first disappeared and optional replacement options
-    private static Object2ObjectMap deprecatedGATKWalkers = new Object2ObjectOpenHashMap();
-    static {
-        // Indicate recommended replacement in parentheses if applicable
-        deprecatedGATKWalkers.put("ReduceReads", "3.0 (use recommended best practices pipeline with the HaplotypeCaller)");
-        deprecatedGATKWalkers.put("CountCovariates", "2.0 (use BaseRecalibrator instead; see documentation for usage)");
-        deprecatedGATKWalkers.put("TableRecalibration", "2.0 (use PrintReads with -BQSR instead; see documentation for usage)");
-        deprecatedGATKWalkers.put("AlignmentWalker", "2.2 (no replacement)");
-        deprecatedGATKWalkers.put("CountBestAlignments", "2.2 (no replacement)");
-        deprecatedGATKWalkers.put("SomaticIndelDetector", "2.0 (replaced by the standalone tool Indelocator; see Cancer Tools documentation)");
-    }
-
-    // Mapping from walker name to major version number where the walker first disappeared and optional replacement options
-    private static Object2ObjectMap deprecatedGATKAnnotations = new Object2ObjectOpenHashMap();
-    static {
-        // Same comments as for walkers
-        deprecatedGATKAnnotations.put("DepthOfCoverage", "2.4 (renamed to Coverage)");
-    }
-
-    /**
-     * Utility method to check whether a given walker has been deprecated in a previous GATK release
-     *
-     * @param walkerName   the walker class name (not the full package) to check
-     */
-    public static boolean isDeprecatedWalker(final String walkerName) {
-        return deprecatedGATKWalkers.containsKey(walkerName);
-    }
-
-    /**
-     * Utility method to check whether a given annotation has been deprecated in a previous GATK release
-     *
-     * @param annotationName   the annotation class name (not the full package) to check
-     */
-    public static boolean isDeprecatedAnnotation(final String annotationName) {
-        return deprecatedGATKAnnotations.containsKey(annotationName);
-    }
-
-    /**
-     * Utility method to pull up the version number at which a walker was deprecated and the suggested replacement, if any
-     *
-     * @param walkerName   the walker class name (not the full package) to check
-     */
-    public static String getWalkerDeprecationInfo(final String walkerName) {
-        return deprecatedGATKWalkers.get(walkerName).toString();
-    }
-
-    /**
-     * Utility method to pull up the version number at which an annotation was deprecated and the suggested replacement, if any
-     *
-     * @param annotationName   the annotation class name (not the full package) to check
-     */
-    public static String getAnnotationDeprecationInfo(final String annotationName) {
-        return deprecatedGATKAnnotations.get(annotationName).toString();
-    }
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/GenomeLocParser.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/GenomeLocParser.java
deleted file mode 100644
index 55e6624..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/GenomeLocParser.java
+++ /dev/null
@@ -1,622 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import com.google.java.contract.ThrowEnsures;
-import htsjdk.samtools.reference.ReferenceSequenceFile;
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.SAMSequenceRecord;
-import org.apache.log4j.Logger;
-import htsjdk.tribble.Feature;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-/**
- * Factory class for creating GenomeLocs
- */
-public final class GenomeLocParser {
-    private static Logger logger = Logger.getLogger(GenomeLocParser.class);
-
-    /**
-     * How much validation should we do at runtime with this parser?
-     */
-    public enum ValidationLevel {
-        /** Do the standard amount of validation */
-        STANDARD,
-        /** Don't do any real checking at all */
-        NONE
-    }
-
-    // --------------------------------------------------------------------------------------------------------------
-    //
-    // Ugly global variable defining the optional ordering of contig elements
-    //
-    // --------------------------------------------------------------------------------------------------------------
-
-    /**
-     * This single variable holds the underlying SamSequenceDictionary used by the GATK.  We assume
-     * it is thread safe.
-     */
-    final private SAMSequenceDictionary SINGLE_MASTER_SEQUENCE_DICTIONARY;
-
-    /**
-     * A thread-local CachingSequenceDictionary
-     */
-    private final ThreadLocal<MRUCachingSAMSequenceDictionary> contigInfoPerThread =
-            new ThreadLocal<MRUCachingSAMSequenceDictionary>() {
-                @Override
-                protected MRUCachingSAMSequenceDictionary initialValue() {
-                    return new MRUCachingSAMSequenceDictionary(SINGLE_MASTER_SEQUENCE_DICTIONARY);
-                }
-            };
-
-    /**
-     * How much validation are we doing at runtime with this GenomeLocParser?
-     */
-    private final ValidationLevel validationLevel;
-
-    /**
-     * @return a caching sequence dictionary appropriate for this thread
-     */
-    private MRUCachingSAMSequenceDictionary getContigInfo() {
-        return contigInfoPerThread.get();
-    }
-
-    /**
-     * set our internal reference contig order
-     * @param refFile the reference file
-     */
-    @Requires("refFile != null")
-    public GenomeLocParser(final ReferenceSequenceFile refFile) {
-        this(refFile.getSequenceDictionary());
-    }
-
-    /**
-     * Create a new GenomeLocParser based on seqDictionary with the standard validation level
-     * @param seqDict a non-null sequence dictionary
-     */
-    public GenomeLocParser(SAMSequenceDictionary seqDict) {
-        this(seqDict, ValidationLevel.STANDARD);
-    }
-
-    /**
-     * Create a genome loc parser based on seqDict with the specified level of validation
-     * @param seqDict the sequence dictionary to use when creating genome locs
-     * @param validationLevel how much validation should we do of the genome locs at runtime? Purely for testing purposes
-     */
-    protected GenomeLocParser(SAMSequenceDictionary seqDict, final ValidationLevel validationLevel) {
-        if (validationLevel == null)
-            throw new IllegalArgumentException("validation level cannot be null");
-        if (seqDict == null) { // we couldn't load the reference dictionary
-            //logger.info("Failed to load reference dictionary, falling back to lexicographic order for contigs");
-            throw new UserException.CommandLineException("Failed to load reference dictionary");
-        }
-
-        this.validationLevel = validationLevel;
-        this.SINGLE_MASTER_SEQUENCE_DICTIONARY = seqDict;
-        if ( logger.isDebugEnabled() ) {
-            logger.debug(String.format("Prepared reference sequence contig dictionary"));
-            for (SAMSequenceRecord contig : seqDict.getSequences()) {
-                logger.debug(String.format(" %s (%d bp)", contig.getSequenceName(), contig.getSequenceLength()));
-            }
-        }
-    }
-
-    /**
-     * Determines whether the given contig is valid with respect to the sequence dictionary
-     * already installed in the GenomeLoc.
-     *
-     * @param contig a potentially null string name for the contig
-     * @return True if the contig is valid.  False otherwise.
-     */
-    public final boolean contigIsInDictionary(final String contig) {
-        return contig != null && getContigInfo().hasContig(contig);
-    }
-
-    /**
-     * get the contig's SAMSequenceRecord
-     *
-     * @param contig the string name of the contig
-     *
-     * @return the sam sequence record
-     */
-    @Ensures("result != null")
-    @ThrowEnsures({"UserException.MalformedGenomeLoc", "!contigIsInDictionary(contig) || contig == null"})
-    public final SAMSequenceRecord getContigInfo(final String contig) {
-        if ( contig == null || ! contigIsInDictionary(contig) )
-            throw new UserException.MalformedGenomeLoc(String.format("Contig %s given as location, but this contig isn't present in the Fasta sequence dictionary", contig));
-        return getContigInfo().getSequence(contig);
-    }
-
-    /**
-     * Returns the contig index of a specified string version of the contig
-     *
-     * @param contig the contig string
-     *
-     * @return the contig index, -1 if not found
-     */
-    @Ensures("result >= 0")
-    @ThrowEnsures({"UserException.MalformedGenomeLoc", "!contigIsInDictionary(contig) || contig == null"})
-    public final int getContigIndex(final String contig) {
-        return getContigInfo(contig).getSequenceIndex();
-    }
-
-    @Requires("contig != null")
-    protected int getContigIndexWithoutException(final String contig) {
-        if ( contig == null || ! getContigInfo().hasContig(contig) )
-            return -1;
-        return getContigInfo().getSequenceIndex(contig);
-    }
-
-    /**
-     * Return the master sequence dictionary used within this GenomeLocParser
-     * @return
-     */
-    public final SAMSequenceDictionary getContigs() {
-        return getContigInfo().getDictionary();
-    }
-
-    // --------------------------------------------------------------------------------------------------------------
-    //
-    // Low-level creation functions
-    //
-    // --------------------------------------------------------------------------------------------------------------
-
-    /**
-     * @see #createGenomeLoc(String, int, int, int, boolean) for exact details of the creation.
-     *
-     * Note that because this function doesn't take the contig index as an argument for contig, it
-     * has a slight performance penalty over the version that does take the contig index.  Does not
-     * require the created genome loc on the reference genome
-     */
-    @Ensures("result != null")
-    @ThrowEnsures({"UserException.MalformedGenomeLoc", "!isValidGenomeLoc(contig, start, stop)"})
-    public GenomeLoc createGenomeLoc(String contig, final int start, final int stop) {
-        return createGenomeLoc(contig, getContigIndex(contig), start, stop);
-    }
-
-    /**
-     * @see #createGenomeLoc(String, int, int, int, boolean) for exact details of the creation.
-     *
-     * Note that because this function doesn't take the contig index as an argument for contig, it
-     * has a slight performance penalty over the version that does take the contig index.
-     */
-    public GenomeLoc createGenomeLoc(final String contig, final int start, final int stop, boolean mustBeOnReference) {
-        return createGenomeLoc(contig, getContigIndex(contig), start, stop, mustBeOnReference);
-    }
-
-    /**
-     * @see #createGenomeLoc(String, int, int, int, boolean) for exact details of the creation.
-     *
-     * Doesn't require the start and stop to be on the genome
-     */
-    @ThrowEnsures({"UserException.MalformedGenomeLoc", "!isValidGenomeLoc(contig, start, stop, false)"})
-    public GenomeLoc createGenomeLoc(String contig, int index, final int start, final int stop) {
-        return createGenomeLoc(contig, index, start, stop, false);
-    }
-
-    /**
-     * Create a GenomeLoc on contig, starting at start and ending (inclusive) at stop.
-     *
-     * @param contig the contig name
-     * @param index the index into the GATK's SAMSequencingDictionary of contig (passed for efficiency to avoid the lookup)
-     * @param start the starting position
-     * @param stop  the stop position of this loc, inclusive
-     * @param mustBeOnReference if true, this factory will throw a UserException.MalformedGenomeLoc if start or stop isn't on the contig
-     *
-     * @return a non-null GenomeLoc
-     */
-    @ThrowEnsures({"UserException.MalformedGenomeLoc", "!isValidGenomeLoc(contig, start, stop,mustBeOnReference)"})
-    @Ensures("result != null")
-    public GenomeLoc createGenomeLoc(final String contig, int index, final int start, final int stop, boolean mustBeOnReference) {
-        // optimization: by interning the string we ensure that future comparisons use == not the full string comp
-        final String interned = validateGenomeLoc(contig, index, start, stop, mustBeOnReference);
-        return new GenomeLoc(interned, index, start, stop);
-    }
-
-    /**
-     * Create a new GenomeLoc, on contig, including the single position pos.
-     *
-     * Pos is not required to be on the reference
-     *
-     * @see #createGenomeLoc(String, int, int, int, boolean) for exact details of the creation.
-     *
-     * @param contig the contig name
-     * @param pos    the start and stop of the created genome loc
-     *
-     * @return a genome loc representing a single base at the specified postion on the contig
-     */
-    @Ensures("result != null")
-    @ThrowEnsures({"UserException.MalformedGenomeLoc", "!isValidGenomeLoc(contig, pos, pos, true)"})
-    public GenomeLoc createGenomeLoc(final String contig, final int pos) {
-        return createGenomeLoc(contig, getContigIndex(contig), pos, pos);
-    }
-
-    /**
-     * validate a position or interval on the genome as valid
-     *
-     * Requires that contig exist in the master sequence dictionary, and that contig index be valid as well.  Requires
-     * that start <= stop.
-     *
-     * if mustBeOnReference is true,
-     * performs boundary validation for genome loc INTERVALS:
-     * start and stop are on contig and start <= stop
-     *
-     * @param contig the contig name
-     * @param start  the start position
-     * @param stop   the stop position
-     *
-     * @return the interned contig name, an optimization that ensures that contig == the string in the sequence dictionary
-     */
-    protected String validateGenomeLoc(final String contig, final int contigIndex, final int start, final int stop, final boolean mustBeOnReference) {
-        if ( validationLevel == ValidationLevel.NONE )
-            return contig;
-        else {
-            if (stop < start)
-                vglHelper(String.format("The stop position %d is less than start %d in contig %s", stop, start, contig));
-
-            final SAMSequenceRecord contigInfo = getContigInfo().getSequence(contig);
-            if ( contigInfo.getSequenceIndex() != contigIndex )
-                vglHelper(String.format("The contig index %d is bad, doesn't equal the contig index %d of the contig from a string %s",
-                        contigIndex, contigInfo.getSequenceIndex(), contig));
-
-            if ( mustBeOnReference ) {
-                if (start < 1)
-                    vglHelper(String.format("The start position %d is less than 1", start));
-
-                if (stop < 1)
-                    vglHelper(String.format("The stop position %d is less than 1", stop));
-
-                final int contigSize = contigInfo.getSequenceLength();
-                if (start > contigSize || stop > contigSize)
-                    vglHelper(String.format("The genome loc coordinates %d-%d exceed the contig size (%d)", start, stop, contigSize));
-            }
-
-            return contigInfo.getSequenceName();
-        }
-    }
-
-    /**
-     * Would a genome loc created with the given parameters be valid w.r.t. the master sequence dictionary?
-     * @param contig the contig we'd use
-     * @param start the start position
-     * @param stop the stop
-     * @param mustBeOnReference should we require the resulting genome loc to be completely on the reference genome?
-     * @return true if this would produce a valid genome loc, false otherwise
-     */
-    public boolean isValidGenomeLoc(String contig, int start, int stop, boolean mustBeOnReference ) {
-        try {
-            validateGenomeLoc(contig, getContigIndexWithoutException(contig), start, stop, mustBeOnReference);
-            return true;
-        } catch ( ReviewedGATKException e) {
-            return false;
-        }
-    }
-
-    /**
-     * @see #isValidGenomeLoc(String, int, int) with mustBeOnReference == true
-     */
-    public boolean isValidGenomeLoc(String contig, int start, int stop ) {
-        return isValidGenomeLoc(contig, start, stop, true);
-    }
-
-    private void vglHelper(final String msg) {
-        throw new UserException.MalformedGenomeLoc("Parameters to GenomeLocParser are incorrect:" + msg);
-    }
-
-    // --------------------------------------------------------------------------------------------------------------
-    //
-    // Parsing genome locs
-    //
-    // --------------------------------------------------------------------------------------------------------------
-
-    /**
-     * parse a genome interval, from a location string
-     *
-     * Performs interval-style validation:
-     *
-     * contig is valid; start and stop less than the end; start <= stop, and start/stop are on the contig
-     * @param str the string to parse
-     *
-     * @return a GenomeLoc representing the String
-     *
-     */
-    @Requires("str != null")
-    @Ensures("result != null")
-    public GenomeLoc parseGenomeLoc(final String str) {
-        // 'chr2', 'chr2:1000000' or 'chr2:1,000,000-2,000,000'
-        //System.out.printf("Parsing location '%s'%n", str);
-
-        String contig = null;
-        int start = 1;
-        int stop = -1;
-
-        final int colonIndex = str.lastIndexOf(":");
-        if(colonIndex == -1) {
-            contig = str.substring(0, str.length());  // chr1
-            stop = Integer.MAX_VALUE;
-        } else {
-            contig = str.substring(0, colonIndex);
-            final int dashIndex = str.indexOf('-', colonIndex);
-            try {
-                if(dashIndex == -1) {
-                    if(str.charAt(str.length() - 1) == '+') {
-                        start = parsePosition(str.substring(colonIndex + 1, str.length() - 1));  // chr:1+
-                        stop = Integer.MAX_VALUE;
-                    } else {
-                        start = parsePosition(str.substring(colonIndex + 1));   // chr1:1
-                        stop = start;
-                    }
-                } else {
-                    start = parsePosition(str.substring(colonIndex + 1, dashIndex));  // chr1:1-1
-                    stop = parsePosition(str.substring(dashIndex + 1));
-                }
-            } catch(Exception e) {
-                throw new UserException("Failed to parse Genome Location string: " + str, e);
-            }
-        }
-
-        // is the contig valid?
-        if (!contigIsInDictionary(contig))
-            throw new UserException.MalformedGenomeLoc("Contig '" + contig + "' does not match any contig in the GATK sequence dictionary derived from the reference; are you sure you are using the correct reference fasta file?");
-
-        if (stop == Integer.MAX_VALUE)
-            // lookup the actually stop position!
-            stop = getContigInfo(contig).getSequenceLength();
-
-        return createGenomeLoc(contig, getContigIndex(contig), start, stop, true);
-    }
-
-    /**
-     * Parses a number like 1,000,000 into a long.
-     * @param pos
-     */
-    @Requires("pos != null")
-    @Ensures("result >= 0")
-    protected int parsePosition(final String pos) {
-        if(pos.indexOf('-') != -1) {
-            throw new NumberFormatException("Position: '" + pos + "' can't contain '-'." );
-        }
-
-        if(pos.indexOf(',') != -1) {
-            final StringBuilder buffer = new StringBuilder();
-            for(int i = 0; i < pos.length(); i++) {
-                final char c = pos.charAt(i);
-
-                if(c == ',') {
-                    continue;
-                } else if(c < '0' || c > '9') {
-                    throw new NumberFormatException("Position: '" + pos + "' contains invalid chars." );
-                } else {
-                    buffer.append(c);
-                }
-            }
-            return Integer.parseInt(buffer.toString());
-        } else {
-            return Integer.parseInt(pos);
-        }
-    }
-
-    // --------------------------------------------------------------------------------------------------------------
-    //
-    // Parsing string representations
-    //
-    // --------------------------------------------------------------------------------------------------------------
-
-    /**
-     * Create a genome loc, given a read. If the read is unmapped, *and* yet the read has a contig and start position,
-     * then a GenomeLoc is returned for contig:start-start, otherwise an UNMAPPED GenomeLoc is returned.
-     *
-     * @param read the read from which to create a genome loc
-     *
-     * @return the GenomeLoc that was created
-     */
-    @Requires("read != null")
-    @Ensures("result != null")
-    public GenomeLoc createGenomeLoc(final SAMRecord read) {
-        if ( read.getReadUnmappedFlag() && read.getReferenceIndex() == -1 )
-            // read is unmapped and not placed anywhere on the genome
-            return GenomeLoc.UNMAPPED;
-        else {
-            // Use Math.max to ensure that end >= start (Picard assigns the end to reads that are entirely within an insertion as start-1)
-            final int end = read.getReadUnmappedFlag() ? read.getAlignmentStart() : Math.max(read.getAlignmentEnd(), read.getAlignmentStart());
-            return createGenomeLoc(read.getReferenceName(), read.getReferenceIndex(), read.getAlignmentStart(), end, false);
-        }
-    }
-
-    /**
-     * Create a genome loc, given a read using its unclipped alignment. If the read is unmapped, *and* yet the read has a contig and start position,
-     * then a GenomeLoc is returned for contig:start-start, otherwise an UNMAPPED GenomeLoc is returned.
-     *
-     * @param read the read from which to create a genome loc
-     *
-     * @return the GenomeLoc that was created
-     */
-    @Requires("read != null")
-    @Ensures("result != null")
-    public GenomeLoc createGenomeLocUnclipped(final SAMRecord read) {
-        if ( read.getReadUnmappedFlag() && read.getReferenceIndex() == -1 )
-            // read is unmapped and not placed anywhere on the genome
-            return GenomeLoc.UNMAPPED;
-        else {
-            // Use Math.max to ensure that end >= start (Picard assigns the end to reads that are entirely within an insertion as start-1)
-            final int end = read.getReadUnmappedFlag() ? read.getUnclippedEnd() : Math.max(read.getUnclippedEnd(), read.getUnclippedStart());
-            return createGenomeLoc(read.getReferenceName(), read.getReferenceIndex(), read.getUnclippedStart(), end, false);
-        }
-    }
-
-    /**
-     * Creates a GenomeLoc from a Tribble feature
-     * @param feature
-     * @return
-     */
-    public GenomeLoc createGenomeLoc(final Feature feature) {
-        return createGenomeLoc(feature.getChr(), feature.getStart(), feature.getEnd());
-    }
-
-    /**
-     * @see GenomeLoc.setStart
-     */
-    @Deprecated
-    public GenomeLoc setStart(final GenomeLoc loc, final int start) {
-        return createGenomeLoc(loc.getContig(), loc.getContigIndex(), start, loc.getStop());
-    }
-
-    /**
-     * @see GenomeLoc.setStop
-     */
-    @Deprecated
-    public GenomeLoc setStop(final GenomeLoc loc, final int stop) {
-        return createGenomeLoc(loc.getContig(), loc.getContigIndex(), loc.start, stop);
-    }
-
-    /**
-     * @see GenomeLoc.incPos
-     */
-    @Deprecated
-    public GenomeLoc incPos(final GenomeLoc loc) {
-        return incPos(loc, 1);
-    }
-
-    /**
-     * @see GenomeLoc.incPos
-     */
-    @Deprecated
-    public GenomeLoc incPos(final GenomeLoc loc, final int by) {
-        return createGenomeLoc(loc.getContig(), loc.getContigIndex(), loc.start + by, loc.stop + by);
-    }
-
-    /**
-     * Creates a GenomeLoc than spans the entire contig.
-     * @param contigName Name of the contig.
-     * @return A locus spanning the entire contig.
-     */
-    @Requires("contigName != null")
-    @Ensures("result != null")
-    public GenomeLoc createOverEntireContig(final String contigName) {
-        SAMSequenceRecord contig = getContigInfo().getSequence(contigName);
-        return createGenomeLoc(contigName,contig.getSequenceIndex(),1,contig.getSequenceLength(), true);
-    }
-
-    /**
-     * Creates a loc to the left (starting at the loc start + 1) of maxBasePairs size.
-     * @param loc The original loc
-     * @param maxBasePairs The maximum number of basePairs
-     * @return The contiguous loc of up to maxBasePairs length or null if the loc is already at the start of the contig.
-     */
-    @Requires({"loc != null", "maxBasePairs > 0"})
-    public GenomeLoc createGenomeLocAtStart(final GenomeLoc loc, final int maxBasePairs) {
-        if (GenomeLoc.isUnmapped(loc))
-            return null;
-        final String contigName = loc.getContig();
-        final SAMSequenceRecord contig = getContigInfo().getSequence(contigName);
-        final int contigIndex = contig.getSequenceIndex();
-
-        int start = loc.getStart() - maxBasePairs;
-        int stop = loc.getStart() - 1;
-
-        if (start < 1)
-            start = 1;
-        if (stop < 1)
-            return null;
-
-        return createGenomeLoc(contigName, contigIndex, start, stop, true);
-    }
-
-    /**
-     * Creates a loc padded in both directions by maxBasePairs size (if possible).
-     * @param loc      The original loc
-     * @param padding  The number of base pairs to pad on either end
-     * @return The contiguous loc of length up to the original length + 2*padding (depending on the start/end of the contig).
-     */
-    @Requires({"loc != null", "padding >= 0"})
-    public GenomeLoc createPaddedGenomeLoc(final GenomeLoc loc, final int padding) {
-        if (GenomeLoc.isUnmapped(loc) || padding == 0)
-            return loc;
-        else
-            return createGenomeLocOnContig(loc.getContig(), loc.getContigIndex(), loc.getStart() - padding, loc.getStop() + padding);
-    }
-
-    /**
-     * Creates a loc to the right (starting at the loc stop + 1) of maxBasePairs size.
-     * @param loc The original loc
-     * @param maxBasePairs The maximum number of basePairs
-     * @return The contiguous loc of up to maxBasePairs length or null if the loc is already at the end of the contig.
-     */
-    @Requires({"loc != null", "maxBasePairs > 0"})
-    public GenomeLoc createGenomeLocAtStop(final GenomeLoc loc, final int maxBasePairs) {
-        if (GenomeLoc.isUnmapped(loc))
-            return null;
-        String contigName = loc.getContig();
-        SAMSequenceRecord contig = getContigInfo().getSequence(contigName);
-        int contigIndex = contig.getSequenceIndex();
-        int contigLength = contig.getSequenceLength();
-
-        int start = loc.getStop() + 1;
-        int stop = loc.getStop() + maxBasePairs;
-
-        if (start > contigLength)
-            return null;
-        if (stop > contigLength)
-            stop = contigLength;
-
-        return createGenomeLoc(contigName, contigIndex, start, stop, true);
-    }
-
-    /**
-     * @see #createGenomeLocOnContig(String, int, int, int) with the contig index looked up from contig
-     */
-    public GenomeLoc createGenomeLocOnContig(final String contig, final int start, final int stop) {
-        return createGenomeLocOnContig(contig, getContigIndex(contig), start, stop);
-    }
-
-    /**
-     * Create a new genome loc, bounding start and stop by the start and end of contig
-     *
-     * This function will return null if start and stop cannot be adjusted in any reasonable way
-     * to be on the contig.  For example, if start and stop are both past the end of the contig,
-     * there's no way to fix this, and null will be returned.
-     *
-     * @param contig our contig
-     * @param start our start as an arbitrary integer (may be negative, etc)
-     * @param stop our stop as an arbitrary integer (may be negative, etc)
-     * @return a valid genome loc over contig, or null if a meaningful genome loc cannot be created
-     */
-    public GenomeLoc createGenomeLocOnContig(final String contig, final int contigIndex, final int start, final int stop) {
-        final int contigLength = getContigInfo().getSequence(contigIndex).getSequenceLength();
-        final int boundedStart = Math.max(1, start);
-        final int boundedStop = Math.min(contigLength, stop);
-
-        if ( boundedStart > contigLength || boundedStop < 1 )
-            // there's no meaningful way to create this genome loc, as the start and stop are off the contig
-            return null;
-        else
-            return createGenomeLoc(contig, contigIndex, boundedStart, boundedStop);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/GenomeLocSortedSet.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/GenomeLocSortedSet.java
deleted file mode 100644
index 694c271..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/GenomeLocSortedSet.java
+++ /dev/null
@@ -1,476 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.SAMSequenceRecord;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.interval.IntervalMergingRule;
-import org.broadinstitute.gatk.utils.interval.IntervalUtils;
-
-import java.util.*;
-
-/**
- *         <p/>
- *         Class GenomeLocCollection
- *         <p/>
- *         a set of genome locations. This collection is self sorting,
- *         and will merge genome locations that are overlapping. The remove function
- *         will also remove a region from the list, if the region to remove is a
- *         partial interval of a region in the collection it will remove the region from
- *         that element.
- *
- * @author aaron
- * Date: May 22, 2009
- * Time: 10:54:40 AM
- */
-public class GenomeLocSortedSet extends AbstractSet<GenomeLoc> {
-    private static Logger logger = Logger.getLogger(GenomeLocSortedSet.class);
-
-    private GenomeLocParser genomeLocParser;
-
-    // our private storage for the GenomeLoc's
-    private final List<GenomeLoc> mArray = new ArrayList<GenomeLoc>();
-
-    // cache this to make overlap checking much more efficient
-    private int previousOverlapSearchIndex = -1;
-
-    /**
-     * Create a new, empty GenomeLocSortedSet
-     *
-     * @param parser a non-null the parser we use to create genome locs
-     */
-    public GenomeLocSortedSet(final GenomeLocParser parser) {
-        if ( parser == null ) throw new IllegalArgumentException("parser cannot be null");
-        this.genomeLocParser = parser;
-    }
-
-    /**
-     * Create a new GenomeLocSortedSet containing location e
-     *
-     * @param parser a non-null the parser we use to create genome locs
-     * @param e a single genome locs to add to this set
-     */
-    public GenomeLocSortedSet(final GenomeLocParser parser, final GenomeLoc e) {
-        this(parser);
-        add(e);
-    }
-
-    /**
-     * Create a new GenomeLocSortedSet containing locations l
-     *
-     * The elements in l can be in any order, and can be overlapping.  They will be sorted first and
-     * overlapping (but not contiguous) elements will be merged
-     *
-     * @param parser a non-null the parser we use to create genome locs
-     * @param l a collection of genome locs to add to this set
-     */
-    public GenomeLocSortedSet(final GenomeLocParser parser, final Collection<GenomeLoc> l) {
-        this(parser);
-
-        final ArrayList<GenomeLoc> sorted = new ArrayList<GenomeLoc>(l);
-        Collections.sort(sorted);
-        mArray.addAll(IntervalUtils.mergeIntervalLocations(sorted, IntervalMergingRule.OVERLAPPING_ONLY));
-    }
-
-    /**
-     * Gets the GenomeLocParser used to create this sorted set.
-     * @return The parser.  Will never be null.
-     */
-    public GenomeLocParser getGenomeLocParser() {
-        return genomeLocParser;
-    }
-
-    /**
-     * get an iterator over this collection
-     *
-     * @return an iterator<GenomeLoc>
-     */
-    public Iterator<GenomeLoc> iterator() {
-        return mArray.iterator();
-    }
-
-    /**
-     * return the size of the collection
-     *
-     * @return the size of the collection
-     */
-    public int size() {
-        return mArray.size();
-    }
-
-    /**
-     * Return the size, in bp, of the genomic regions by all of the regions in this set
-     * @return size in bp of the covered regions
-     */
-    public long coveredSize() {
-        long s = 0;
-        for ( GenomeLoc e : this )
-            s += e.size();
-        return s;
-    }
-
-    /**
-     * Return the number of bps before loc in the sorted set
-     *
-     * @param loc the location before which we are counting bases
-     * @return the number of base pairs over all previous intervals
-     */
-    public long sizeBeforeLoc(GenomeLoc loc) {
-        long s = 0;
-
-        for ( GenomeLoc e : this ) {
-            if ( e.isBefore(loc) )
-                s += e.size();
-            else if ( e.isPast(loc) )
-                break; // we are done
-            else // loc is inside of s
-                s += loc.getStart() - e.getStart();
-        }
-
-        return s;
-    }
-
-    /**
-     * determine if the collection is empty
-     *
-     * @return true if we have no elements
-     */
-    public boolean isEmpty() {
-        return mArray.isEmpty();
-    }
-
-    /**
-     * Determine if the given loc overlaps any loc in the sorted set
-     *
-     * @param loc the location to test
-     * @return trip if the location overlaps any loc
-     */
-    public boolean overlaps(final GenomeLoc loc) {
-        // edge condition
-        if ( mArray.isEmpty() )
-            return false;
-
-        // use the cached version first
-        if ( previousOverlapSearchIndex != -1 && overlapsAtOrImmediatelyAfterCachedIndex(loc, true) )
-            return true;
-
-        // update the cached index
-        previousOverlapSearchIndex = Collections.binarySearch(mArray, loc);
-
-        // if it matches an interval exactly, we are done
-        if ( previousOverlapSearchIndex >= 0 )
-            return true;
-
-        // check whether it overlaps the interval before or after the insertion point
-        previousOverlapSearchIndex = Math.max(0, -1 * previousOverlapSearchIndex - 2);
-        return overlapsAtOrImmediatelyAfterCachedIndex(loc, false);
-    }
-
-    private boolean overlapsAtOrImmediatelyAfterCachedIndex(final GenomeLoc loc, final boolean updateCachedIndex) {
-        // check the cached entry
-        if ( mArray.get(previousOverlapSearchIndex).overlapsP(loc) )
-            return true;
-
-        // check the entry after the cached entry since we may have moved to it
-        boolean returnValue = false;
-        if ( previousOverlapSearchIndex < mArray.size() - 1 ) {
-            returnValue = mArray.get(previousOverlapSearchIndex + 1).overlapsP(loc);
-            if ( updateCachedIndex )
-                previousOverlapSearchIndex++;
-        }
-
-        return returnValue;
-    }
-
-    /**
-     * Return a list of intervals overlapping loc
-     *
-     * @param loc the location we want overlapping intervals
-     * @return a non-null list of locations that overlap loc
-     */
-    public List<GenomeLoc> getOverlapping(final GenomeLoc loc) {
-        // the max ensures that if loc would be the first element, that we start searching at the first element
-        final int index = Collections.binarySearch(mArray, loc);
-        if ( index >= 0 )
-            // we can safely return a singleton because overlapping regions are merged and loc is exactly in
-            // the set already
-            return Collections.singletonList(loc);
-
-        // if loc isn't in the list index is (-(insertion point) - 1). The insertion point is defined as the point at
-        // which the key would be inserted into the list: the index of the first element greater than the key, or list.size()
-        // -ins - 1 = index => -ins = index + 1 => ins = -(index + 1)
-        // Note that we look one before the index in this case, as loc might occur after the previous overlapping interval
-        final int start = Math.max(-(index + 1) - 1, 0);
-        final int size = mArray.size();
-
-        final List<GenomeLoc> overlapping = new LinkedList<GenomeLoc>();
-        for ( int i = start; i < size; i++ ) {
-            final GenomeLoc myLoc = mArray.get(i);
-            if ( loc.overlapsP(myLoc) )
-                overlapping.add(myLoc);
-            else if ( myLoc.isPast(loc) )
-                // since mArray is ordered, if myLoc is past loc that means all future
-                // intervals cannot overlap loc either.  So we can safely abort the search
-                // note that we need to be a bit conservative on our tests since index needs to start
-                // at -1 the position of index, so it's possible that myLoc and loc don't overlap but the next
-                // position might
-                break;
-        }
-
-        return overlapping;
-    }
-
-    /**
-     * Return a list of intervals overlapping loc by enumerating all locs and testing for overlap
-     *
-     * Purely for testing purposes -- this is way to slow for any production code
-     *
-     * @param loc the location we want overlapping intervals
-     * @return a non-null list of locations that overlap loc
-     */
-    protected List<GenomeLoc> getOverlappingFullSearch(final GenomeLoc loc) {
-        final List<GenomeLoc> overlapping = new LinkedList<GenomeLoc>();
-
-        // super slow, but definitely works
-        for ( final GenomeLoc myLoc : mArray ) {
-            if ( loc.overlapsP(myLoc) )
-                overlapping.add(myLoc);
-        }
-
-        return overlapping;
-    }
-
-    /**
-     * Adds a GenomeLoc to the collection, inserting at the correct sorted position into the set.
-     * Throws an exception if the loc overlaps another loc already in the set.
-     *
-     * @param loc the GenomeLoc to add
-     *
-     * @return true if the loc was added or false otherwise (if the loc was null)
-     */
-    public boolean add(final GenomeLoc loc) {
-        return add(loc, false);
-    }
-
-    /**
-     * Adds a GenomeLoc to the collection, merging it if it overlaps another region.
-     * If it's not overlapping then we insert it at the correct sorted position into the set.
-     *
-     * @param loc the GenomeLoc to add
-     *
-     * @return true if the loc was added or false otherwise (if the loc was null)
-     */
-    public boolean addRegion(final GenomeLoc loc) {
-        return add(loc, true);
-    }
-
-    /**
-     * Adds a GenomeLoc to the collection, inserting at the correct sorted position into the set.
-     *
-     * @param loc                      the GenomeLoc to add
-     * @param mergeIfIntervalOverlaps  if true we merge the interval if it overlaps another one already in the set, otherwise we throw an exception
-     *
-     * @return true if the loc was added or false otherwise (if the loc was null or an exact duplicate)
-     */
-    public boolean add(final GenomeLoc loc, final boolean mergeIfIntervalOverlaps) {
-        if ( loc == null )
-            return false;
-
-        // if we have no other intervals yet or if the new loc is past the last one in the list (which is usually the
-        // case because locs are generally added in order) then be extra efficient and just add the loc to the end
-        if ( mArray.size() == 0 || loc.isPast(mArray.get(mArray.size() - 1)) ) {
-            return mArray.add(loc);
-        }
-
-        // find where in the list the new loc belongs
-        final int binarySearchIndex = Collections.binarySearch(mArray,loc);
-
-        // if it already exists in the list, return or throw an exception as needed
-        if ( binarySearchIndex >= 0 ) {
-            if ( mergeIfIntervalOverlaps )
-                return false;
-            throw new IllegalArgumentException("GenomeLocSortedSet already contains the GenomeLoc " + loc);
-        }
-
-        // if it overlaps a loc already in the list merge or throw an exception as needed
-        final int insertionIndex = -1 * (binarySearchIndex + 1);
-        if ( ! mergeOverlappingIntervalsFromAdd(loc, insertionIndex, !mergeIfIntervalOverlaps) ) {
-            // it does not overlap any current intervals, so add it to the set
-            mArray.add(insertionIndex, loc);
-        }
-
-        return true;
-    }
-
-    /*
-     * If the provided GenomeLoc overlaps another already in the set, merge them (or throw an exception if requested)
-     *
-     * @param loc                          the GenomeLoc to add
-     * @param insertionIndex               the index in the sorted set to add the new loc
-     * @param throwExceptionIfOverlapping  if true we throw an exception if there's overlap, otherwise we merge them
-     *
-     * @return true if the loc was added or false otherwise
-     */
-    private boolean mergeOverlappingIntervalsFromAdd(final GenomeLoc loc, final int insertionIndex, final boolean throwExceptionIfOverlapping) {
-        // try merging with the previous index
-        if ( insertionIndex != 0 && loc.overlapsP(mArray.get(insertionIndex - 1)) ) {
-            if ( throwExceptionIfOverlapping )
-                throw new IllegalArgumentException(String.format("GenomeLocSortedSet contains a GenomeLoc (%s) that overlaps with the provided one (%s)", mArray.get(insertionIndex - 1).toString(), loc.toString()));
-            mArray.set(insertionIndex - 1, mArray.get(insertionIndex - 1).merge(loc));
-            return true;
-        }
-
-        // try merging with the following index
-        if ( insertionIndex < mArray.size() && loc.overlapsP(mArray.get(insertionIndex)) ) {
-            if ( throwExceptionIfOverlapping )
-                throw new IllegalArgumentException(String.format("GenomeLocSortedSet contains a GenomeLoc (%s) that overlaps with the provided one (%s)", mArray.get(insertionIndex).toString(), loc.toString()));
-            mArray.set(insertionIndex, mArray.get(insertionIndex).merge(loc));
-            return true;
-        }
-
-        return false;
-    }
-
-    public GenomeLocSortedSet subtractRegions(GenomeLocSortedSet toRemoveSet) {
-        LinkedList<GenomeLoc> good = new LinkedList<GenomeLoc>();
-        Stack<GenomeLoc> toProcess = new Stack<GenomeLoc>();
-        Stack<GenomeLoc> toExclude = new Stack<GenomeLoc>();
-
-        // initialize the stacks
-        toProcess.addAll(mArray);
-        Collections.reverse(toProcess);
-        toExclude.addAll(toRemoveSet.mArray);
-        Collections.reverse(toExclude);
-
-        int i = 0;
-        while ( ! toProcess.empty() ) {    // while there's still stuff to process
-            if ( toExclude.empty() ) {
-                good.addAll(toProcess);         // no more excludes, all the processing stuff is good
-                break;
-            }
-
-            GenomeLoc p = toProcess.peek();
-            GenomeLoc e = toExclude.peek();
-
-            if ( p.overlapsP(e) ) {
-                toProcess.pop();
-                for ( GenomeLoc newP : p.subtract(e) )
-                    toProcess.push(newP);
-            } else if ( p.compareContigs(e) < 0 ) {
-                good.add(toProcess.pop());         // p is now good
-            } else if ( p.compareContigs(e) > 0 ) {
-                toExclude.pop();                 // e can't effect anything
-            } else if ( p.getStop() < e.getStart() ) {
-                good.add(toProcess.pop());         // p stops before e starts, p is good
-            } else if ( e.getStop() < p.getStart() ) {
-                toExclude.pop();                 // p starts after e stops, e is done
-            } else {
-                throw new ReviewedGATKException("BUG: unexpected condition: p=" + p + ", e=" + e);
-            }
-
-            if ( i++ % 10000 == 0 )
-                logger.debug("removeRegions operation: i = " + i);
-        }
-
-        return createSetFromList(genomeLocParser,good);
-    }
-
-
-    /**
-     * a simple removal of an interval contained in this list.  The interval must be identical to one in the list (no partial locations or overlapping)
-     * @param location the GenomeLoc to remove
-     */
-    public void remove(GenomeLoc location) {
-        if (!mArray.contains(location)) throw new IllegalArgumentException("Unable to remove location: " + location + ", not in the list");
-        mArray.remove(location);
-    }
-
-    /**
-     * create a list of genomic locations, given a reference sequence
-     *
-     * @param dict the sequence dictionary to create a collection from
-     *
-     * @return the GenomeLocSet of all references sequences as GenomeLoc's
-     */
-    public static GenomeLocSortedSet createSetFromSequenceDictionary(final SAMSequenceDictionary dict) {
-        final GenomeLocParser parser = new GenomeLocParser(dict);
-        final GenomeLocSortedSet returnSortedSet = new GenomeLocSortedSet(parser);
-        for ( final SAMSequenceRecord sequence : dict.getSequences() ) {
-            returnSortedSet.add(parser.createOverEntireContig(sequence.getSequenceName()));
-        }
-        return returnSortedSet;
-    }
-
-    /**
-     * Create a sorted genome location set from a list of GenomeLocs.
-     *
-     * @param locs the list<GenomeLoc>
-     *
-     * @return the sorted genome loc list
-     */
-    public static GenomeLocSortedSet createSetFromList(GenomeLocParser parser,List<GenomeLoc> locs) {
-        GenomeLocSortedSet set = new GenomeLocSortedSet(parser);
-        set.addAll(locs);
-        return set;
-    }
-
-
-    /**
-     * return a deep copy of this collection.
-     *
-     * @return a new GenomeLocSortedSet, identical to the current GenomeLocSortedSet.
-     */
-    public GenomeLocSortedSet clone() {
-        GenomeLocSortedSet ret = new GenomeLocSortedSet(genomeLocParser);
-        for (GenomeLoc loc : this.mArray) {
-            // ensure a deep copy
-            ret.mArray.add(genomeLocParser.createGenomeLoc(loc.getContig(), loc.getStart(), loc.getStop()));
-        }
-        return ret;
-    }
-
-    /**
-     * convert this object to a list
-     * @return the lists
-     */
-    public List<GenomeLoc> toList() {
-        return this.mArray;
-    }
-
-    public String toString() {
-        StringBuilder s = new StringBuilder();
-        s.append("[");
-        for ( GenomeLoc e : this ) {
-            s.append(" ");
-            s.append(e.toString());
-        }
-        s.append("]");
-
-        return s.toString();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/HeapSizeMonitor.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/HeapSizeMonitor.java
deleted file mode 100644
index 041bf76..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/HeapSizeMonitor.java
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.lang.management.ManagementFactory;
-import java.lang.management.MemoryMXBean;
-
-/**
- * Monitor the current heap size, allowing the application to programmatically
- * access the data.
- *
- * @author mhanna
- * @version 0.1
- */
-public class HeapSizeMonitor {
-    private final int monitorFrequencyMillis;
-    private final MonitorRunnable monitorRunnable;
-
-    private Thread monitorThread;
-
-    public HeapSizeMonitor() {
-        this(1000);
-    }
-
-    public HeapSizeMonitor(final int monitorFrequencyMillis) {
-        this.monitorFrequencyMillis = monitorFrequencyMillis;
-        this.monitorRunnable = new MonitorRunnable();
-    }
-
-    public long getMaxMemoryUsed() {
-        return monitorRunnable.getMaxMemoryUsed();
-    }
-
-    public void start() {
-        monitorThread = new Thread(monitorRunnable);
-        monitorThread.start();
-    }
-
-    public void stop() {
-        monitorRunnable.stop = true;
-        try {
-            monitorThread.join();
-        }
-        catch(InterruptedException ex) {
-            throw new ReviewedGATKException("Unable to connect to monitor thread");
-        }
-        monitorThread = null;        
-    }
-
-    private class MonitorRunnable implements Runnable {
-        private MemoryMXBean monitor;
-
-        private long maxMemoryUsed;
-        private boolean stop;
-
-        public MonitorRunnable() {
-            monitor = ManagementFactory.getMemoryMXBean();   
-        }
-
-        public void reset() {
-            maxMemoryUsed = 0L;
-            stop = false;
-        }
-
-        public long getMaxMemoryUsed() {
-            return maxMemoryUsed;
-        }
-
-        public void run() {
-            while(!stop) {
-                System.gc();
-                maxMemoryUsed = Math.max(monitor.getHeapMemoryUsage().getUsed(),maxMemoryUsed);
-                try {
-                    Thread.sleep(monitorFrequencyMillis);
-                }
-                catch(InterruptedException ex) {
-                    throw new ReviewedGATKException("Unable to continue monitoring heap consumption",ex);
-                }
-            }
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/IndelUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/IndelUtils.java
deleted file mode 100644
index 81a2bdc..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/IndelUtils.java
+++ /dev/null
@@ -1,262 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import htsjdk.variant.variantcontext.VariantContext;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-
-/**
- * Created by IntelliJ IDEA.
- * User: delangel
- * Date: Feb 3, 2011
- * Time: 2:44:22 PM
- * To change this template use File | Settings | File Templates.
- */
-public class IndelUtils {
-    protected final static String[] COLUMN_KEYS;
-
-
-
-    static {
-        COLUMN_KEYS= new String[51];
-        COLUMN_KEYS[0] = "Novel_A";
-        COLUMN_KEYS[1] = "Novel_C";
-        COLUMN_KEYS[2] = "Novel_G";
-        COLUMN_KEYS[3] = "Novel_T";
-        COLUMN_KEYS[4]  = "NOVEL_1";
-        COLUMN_KEYS[5]  = "NOVEL_2";
-        COLUMN_KEYS[6]  = "NOVEL_3";
-        COLUMN_KEYS[7]  = "NOVEL_4";
-        COLUMN_KEYS[8]  = "NOVEL_5";
-        COLUMN_KEYS[9]  = "NOVEL_6";
-        COLUMN_KEYS[10] = "NOVEL_7";
-        COLUMN_KEYS[11] = "NOVEL_8";
-        COLUMN_KEYS[12] = "NOVEL_9";
-        COLUMN_KEYS[13] = "NOVEL_10orMore";
-        COLUMN_KEYS[14] = "RepeatExpansion_A";
-        COLUMN_KEYS[15] = "RepeatExpansion_C";
-        COLUMN_KEYS[16] = "RepeatExpansion_G";
-        COLUMN_KEYS[17] = "RepeatExpansion_T";
-        COLUMN_KEYS[18] = "RepeatExpansion_AC";
-        COLUMN_KEYS[19] = "RepeatExpansion_AG";
-        COLUMN_KEYS[20] = "RepeatExpansion_AT";
-        COLUMN_KEYS[21] = "RepeatExpansion_CA";
-        COLUMN_KEYS[22] = "RepeatExpansion_CG";
-        COLUMN_KEYS[23] = "RepeatExpansion_CT";
-        COLUMN_KEYS[24] = "RepeatExpansion_GA";
-        COLUMN_KEYS[25] = "RepeatExpansion_GC";
-        COLUMN_KEYS[26] = "RepeatExpansion_GT";
-        COLUMN_KEYS[27] = "RepeatExpansion_TA";
-        COLUMN_KEYS[28] = "RepeatExpansion_TC";
-        COLUMN_KEYS[29] = "RepeatExpansion_TG";
-        COLUMN_KEYS[30] = "EventLength_1";
-        COLUMN_KEYS[31] = "EventLength_2";
-        COLUMN_KEYS[32] = "EventLength_3";
-        COLUMN_KEYS[33] = "EventLength_4";
-        COLUMN_KEYS[34] = "EventLength_5";
-        COLUMN_KEYS[35] = "EventLength_6";
-        COLUMN_KEYS[36] = "EventLength_7";
-        COLUMN_KEYS[37] = "EventLength_8";
-        COLUMN_KEYS[38] = "EventLength_9";
-        COLUMN_KEYS[39] = "EventLength_10orMore";
-        COLUMN_KEYS[40] = "NumRepetitions_1";
-        COLUMN_KEYS[41] = "NumRepetitions_2";
-        COLUMN_KEYS[42] = "NumRepetitions_3";
-        COLUMN_KEYS[43] = "NumRepetitions_4";
-        COLUMN_KEYS[44] = "NumRepetitions_5";
-        COLUMN_KEYS[45] = "NumRepetitions_6";
-        COLUMN_KEYS[46] = "NumRepetitions_7";
-        COLUMN_KEYS[47] = "NumRepetitions_8";
-        COLUMN_KEYS[48] = "NumRepetitions_9";
-        COLUMN_KEYS[49] = "NumRepetitions_10orMore";
-        COLUMN_KEYS[50] = "Other";
-
-    }
-
-    private static final int START_IND_NOVEL = 4;
-    private static final int STOP_IND_NOVEL = 13;
-    private static final int START_IND_FOR_REPEAT_EXPANSION_1 = 14;
-    private static final int IND_FOR_REPEAT_EXPANSION_A = 14;
-    private static final int IND_FOR_REPEAT_EXPANSION_C = 15;
-    private static final int IND_FOR_REPEAT_EXPANSION_G = 16;
-    private static final int IND_FOR_REPEAT_EXPANSION_T = 17;
-    private static final int STOP_IND_FOR_REPEAT_EXPANSION_2 = 29;
-    private static final int START_IND_FOR_REPEAT_EXPANSION_COUNTS = 30;
-    private static final int STOP_IND_FOR_REPEAT_EXPANSION_COUNTS = 39;
-    private static final int START_IND_FOR_NUM_REPETITION_COUNTS = 40;
-    private static final int STOP_IND_FOR_NUM_REPETITION_COUNTS = 49;
-    private static final int IND_FOR_OTHER_EVENT = 50;
-    private static final int START_IND_NOVEL_PER_BASE = 0;
-    private static final int STOP_IND_NOVEL_PER_BASE = 3;
-
-    private static String findMinimalEvent(String eventString) {
-
-        // for each length up to given string length, see if event string is a repetition of units of size N
-        String minEvent = eventString;
-        for (int k=1; k < eventString.length(); k++) {
-            if (eventString.length() % k > 0)
-                continue;
-            String str = eventString.substring(0,k);
-            // now see if event string is a repetition of str
-            int numReps = eventString.length() / k;
-            String r = "";
-            for (int j=0; j < numReps; j++)
-                r = r.concat(str);
-
-            if (r.matches(eventString)) {
-                minEvent = str;
-                break;
-            }
-
-        }
-        return minEvent;
-    }
-
-    public static ArrayList<Integer> findEventClassificationIndex(VariantContext vc, ReferenceContext ref) {
-        int eventLength;
-
-        String indelAlleleString;
-        boolean done = false;
-
-        ArrayList<Integer> inds = new ArrayList<Integer>();
-        if ( vc.isSimpleInsertion() ) {
-            indelAlleleString = vc.getAlternateAllele(0).getDisplayString().substring(1);
-        } else if ( vc.isSimpleDeletion() ) {
-            indelAlleleString = vc.getReference().getDisplayString().substring(1);
-        }
-        else {
-            inds.add(IND_FOR_OTHER_EVENT);
-            return inds;
-        }
-
-        byte[] refBases = ref.getBases();
-
-        indelAlleleString = findMinimalEvent(indelAlleleString);
-        eventLength = indelAlleleString.length();
-
-        // See first if indel is a repetition of bases before current
-        int indStart = refBases.length/2-eventLength+1;
-
-        int numRepetitions = 0;
-        while (!done) {
-            if (indStart < 0)
-                done = true;
-            else {
-                String refPiece = new String(Arrays.copyOfRange(refBases,indStart,indStart+eventLength));
-                if (refPiece.matches(indelAlleleString))
-                {
-                    numRepetitions++;
-                    indStart = indStart - eventLength;
-                }
-                else
-                    done = true;
-
-            }
-        }
-
-        // now do it forward
-        done = false;
-        indStart = refBases.length/2+1;
-        while (!done) {
-            if (indStart + eventLength >= refBases.length)
-                break;
-            else {
-                String refPiece = new String(Arrays.copyOfRange(refBases,indStart,indStart+eventLength));
-                if (refPiece.matches(indelAlleleString))
-                {
-                    numRepetitions++;
-                    indStart = indStart + eventLength;
-                }
-                else
-                    done = true;
-
-            }
-        }
-
-        if (numRepetitions == 0) {
-            //unrepeated sequence from surroundings
-            int ind = START_IND_NOVEL + (eventLength-1);
-            if (ind > STOP_IND_NOVEL)
-                ind = STOP_IND_NOVEL;
-            inds.add(ind);
-
-            if (eventLength == 1) {
-                // log single base indels additionally by base
-                String keyStr = "Novel_" + indelAlleleString;
-                int k;
-                for (k=START_IND_NOVEL_PER_BASE; k <= STOP_IND_NOVEL_PER_BASE; k++) {
-                    if (keyStr.matches(COLUMN_KEYS[k]))
-                        break;
-                }
-                inds.add(k);
-            }
-        }
-        else {
-            // log number of repetition counts
-            int ind = START_IND_FOR_NUM_REPETITION_COUNTS + (numRepetitions-1);
-            if (ind > STOP_IND_FOR_NUM_REPETITION_COUNTS)
-                ind = STOP_IND_FOR_NUM_REPETITION_COUNTS;
-            inds.add(ind);
-
-            ind = START_IND_FOR_REPEAT_EXPANSION_COUNTS + (eventLength - 1);
-            if (ind > STOP_IND_FOR_REPEAT_EXPANSION_COUNTS)
-                    ind = STOP_IND_FOR_REPEAT_EXPANSION_COUNTS;
-            inds.add(ind);
-            
-            // log event length
-            if (eventLength<=2) {
-                // for single or dinucleotide indels, we further log the base in which they occurred
-                String keyStr = "RepeatExpansion_" + indelAlleleString;
-                int k;
-                for (k=START_IND_FOR_REPEAT_EXPANSION_1; k <= STOP_IND_FOR_REPEAT_EXPANSION_2; k++) {
-                    if (keyStr.matches(COLUMN_KEYS[k]))
-                        break;
-                }
-                // log now event
-                inds.add(k);
-            }
-
-
-        }
-
-        return inds;
-    }
-
-    public static String getIndelClassificationName(int k) {
-        if (k >=0 && k < COLUMN_KEYS.length)
-            return COLUMN_KEYS[k];
-        else
-            throw new ReviewedGATKException("Invalid index when trying to get indel classification name");
-    }
-
-    public static boolean isInsideExtendedIndel(VariantContext vc, ReferenceContext ref) {
-        return (vc.getStart() != ref.getLocus().getStart());
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/LRUCache.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/LRUCache.java
deleted file mode 100644
index df2e829..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/LRUCache.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-/**
- * An LRU cache implemented as an extension to LinkedHashMap
- */
-public class LRUCache<K,V> extends LinkedHashMap<K,V> {
-    private int capacity; // Maximum number of items in the cache.
-
-    public LRUCache(int capacity) {
-        super(capacity+1, 1.0f, true); // Pass 'true' for accessOrder.
-        this.capacity = capacity;
-    }
-
-    protected boolean removeEldestEntry(final Map.Entry entry) {
-        return (size() > this.capacity);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/MRUCachingSAMSequenceDictionary.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/MRUCachingSAMSequenceDictionary.java
deleted file mode 100644
index 2f9a3b0..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/MRUCachingSAMSequenceDictionary.java
+++ /dev/null
@@ -1,186 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.SAMSequenceRecord;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-/**
- * A wrapper class that provides efficient most recently used caching for the global
- * SAMSequenceDictionary underlying all of the GATK engine capabilities.  It is essential
- * that these class be as efficient as possible.  It doesn't need to be thread-safe, as
- * GenomeLocParser uses a thread-local variable to ensure that each thread gets its own MRU
- * cache.
- *
- * The MRU elements are the SAMSequenceRecord, the lastContig, and the lastIndex.  The
- * cached value is the actual SAMSequenceRecord of the most recently accessed value from
- * getSequence, along with local variables for the contig index and contig string.
- */
-final class MRUCachingSAMSequenceDictionary {
-    /**
-     * Our sequence dictionary
-     */
-    private final SAMSequenceDictionary dict;
-
-    SAMSequenceRecord lastSSR = null;
-    String lastContig = "";
-    int lastIndex = -1;
-
-    /**
-     * Create a new MRUCachingSAMSequenceDictionary that provides information about sequences in dict
-     * @param dict a non-null, non-empty sequencing dictionary
-     */
-    @Ensures("lastSSR == null")
-    public MRUCachingSAMSequenceDictionary(final SAMSequenceDictionary dict) {
-        if ( dict == null ) throw new IllegalArgumentException("Dictionary cannot be null");
-        if ( dict.size() == 0 ) throw new IllegalArgumentException("Dictionary cannot have size zero");
-
-        this.dict = dict;
-    }
-
-    /**
-     * Get our sequence dictionary
-     * @return a non-null SAMSequenceDictionary
-     */
-    @Ensures("result != null")
-    public SAMSequenceDictionary getDictionary() {
-        return dict;
-    }
-
-    /**
-     * Is contig present in the dictionary?  Efficiently caching.
-     * @param contig a non-null contig we want to test
-     * @return true if contig is in dictionary, false otherwise
-     */
-    @Requires("contig != null")
-    public final boolean hasContig(final String contig) {
-        return contig.equals(lastContig) || dict.getSequence(contig) != null;
-    }
-
-    /**
-     * Is contig index present in the dictionary?  Efficiently caching.
-     * @param contigIndex an integer offset that might map to a contig in this dictionary
-     * @return true if contigIndex is in dictionary, false otherwise
-     */
-    @Requires("contigIndex >= 0")
-    public final boolean hasContigIndex(final int contigIndex) {
-        return lastIndex == contigIndex || dict.getSequence(contigIndex) != null;
-    }
-
-    /**
-     * Same as SAMSequenceDictionary.getSequence but uses a MRU cache for efficiency
-     *
-     * @param contig the contig name we want to get the sequence record of
-     * @throws ReviewedGATKException if contig isn't present in the dictionary
-     * @return the sequence record for contig
-     */
-    @Requires("contig != null")
-    @Ensures("result != null")
-    public final SAMSequenceRecord getSequence(final String contig) {
-        if ( isCached(contig) )
-            return lastSSR;
-        else
-            return updateCache(contig, -1);
-    }
-
-    /**
-     * Same as SAMSequenceDictionary.getSequence but uses a MRU cache for efficiency
-     *
-     * @param index the contig index we want to get the sequence record of
-     * @throws ReviewedGATKException if contig isn't present in the dictionary
-     * @return the sequence record for contig
-     */
-    @Requires("index >= 0")
-    @Ensures("result != null")
-    public final SAMSequenceRecord getSequence(final int index) {
-        if ( isCached(index) )
-            return lastSSR;
-        else
-            return updateCache(null, index);
-    }
-
-    /**
-     * Same as SAMSequenceDictionary.getSequenceIndex but uses a MRU cache for efficiency
-     *
-     * @param contig the contig we want to get the sequence record of
-     * @throws ReviewedGATKException if index isn't present in the dictionary
-     * @return the sequence record index for contig
-     */
-    @Requires("contig != null")
-    @Ensures("result >= 0")
-    public final int getSequenceIndex(final String contig) {
-        if ( ! isCached(contig) ) {
-            updateCache(contig, -1);
-        }
-
-        return lastIndex;
-    }
-
-    /**
-     * Is contig the MRU cached contig?
-     * @param contig the contig to test
-     * @return true if contig is the currently cached contig, false otherwise
-     */
-    @Requires({"contig != null"})
-    protected boolean isCached(final String contig) {
-        return contig.equals(lastContig);
-    }
-
-    /**
-     * Is the contig index index the MRU cached index?
-     * @param index the contig index to test
-     * @return true if contig index is the currently cached contig index, false otherwise
-     */
-    protected boolean isCached(final int index) {
-        return lastIndex == index;
-    }
-
-    /**
-     * The key algorithm.  Given a new record, update the last used record, contig
-     * name, and index.
-     *
-     * @param contig the contig we want to look up.  If null, index is used instead
-     * @param index the contig index we want to look up.  Only used if contig is null
-     * @throws ReviewedGATKException if index isn't present in the dictionary
-     * @return the SAMSequenceRecord for contig / index
-     */
-    @Requires("contig != null || index >= 0")
-    @Ensures("result != null")
-    private SAMSequenceRecord updateCache(final String contig, int index ) {
-        SAMSequenceRecord rec = contig == null ? dict.getSequence(index) : dict.getSequence(contig);
-        if ( rec == null ) {
-            throw new ReviewedGATKException("BUG: requested unknown contig=" + contig + " index=" + index);
-        } else {
-            lastSSR = rec;
-            lastContig = rec.getSequenceName();
-            lastIndex = rec.getSequenceIndex();
-            return rec;
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/MannWhitneyU.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/MannWhitneyU.java
deleted file mode 100644
index a918c0a..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/MannWhitneyU.java
+++ /dev/null
@@ -1,508 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import cern.jet.math.Arithmetic;
-import cern.jet.random.Normal;
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import org.apache.commons.math.MathException;
-import org.apache.commons.math.distribution.NormalDistribution;
-import org.apache.commons.math.distribution.NormalDistributionImpl;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.utils.collections.Pair;
-import org.broadinstitute.gatk.utils.exceptions.GATKException;
-
-import java.io.Serializable;
-import java.util.Comparator;
-import java.util.TreeSet;
-
-/**
- * Created by IntelliJ IDEA.
- * User: chartl
- */
-public class MannWhitneyU {
-
-    private static Normal STANDARD_NORMAL = new Normal(0.0,1.0,null);
-    private static NormalDistribution APACHE_NORMAL = new NormalDistributionImpl(0.0,1.0,1e-2);
-    private static double LNSQRT2PI = Math.log(Math.sqrt(2.0*Math.PI));
-
-    private TreeSet<Pair<Number,USet>> observations;
-    private int sizeSet1;
-    private int sizeSet2;
-    private ExactMode exactMode;
-
-    public MannWhitneyU(ExactMode mode, boolean dither) {
-        if ( dither )
-            observations = new TreeSet<Pair<Number,USet>>(new DitheringComparator());
-        else
-            observations = new TreeSet<Pair<Number,USet>>(new NumberedPairComparator());
-        sizeSet1 = 0;
-        sizeSet2 = 0;
-        exactMode = mode;
-    }
-
-    public MannWhitneyU() {
-        this(ExactMode.POINT,true);
-    }
-
-    public MannWhitneyU(boolean dither) {
-        this(ExactMode.POINT,dither);
-    }
-
-    public MannWhitneyU(ExactMode mode) {
-        this(mode,true);
-    }
-
-    /**
-     * Add an observation into the observation tree
-     * @param n: the observation (a number)
-     * @param set: whether the observation comes from set 1 or set 2
-     */
-    public void add(Number n, USet set) {
-        observations.add(new Pair<Number,USet>(n,set));
-        if ( set == USet.SET1 ) {
-            ++sizeSet1;
-        } else {
-            ++sizeSet2;
-        }
-    }
-
-    public Pair<Long,Long> getR1R2() {
-        long u1 = calculateOneSidedU(observations,MannWhitneyU.USet.SET1);
-        long n1 = sizeSet1*(sizeSet1+1)/2;
-        long r1 = u1 + n1;
-        long n2 = sizeSet2*(sizeSet2+1)/2;
-        long u2 = n1*n2-u1;
-        long r2 = u2 + n2;
-
-        return new Pair<Long,Long>(r1,r2);
-    }
-
-    /**
-     * Runs the one-sided test under the hypothesis that the data in set "lessThanOther" stochastically
-     * dominates the other set
-     * @param lessThanOther - either Set1 or Set2
-     * @return - u-based z-approximation, and p-value associated with the test (p-value is exact for small n,m)
-     */
-    @Requires({"lessThanOther != null"})
-    @Ensures({"validateObservations(observations) || Double.isNaN(result.getFirst())","result != null", "! Double.isInfinite(result.getFirst())", "! Double.isInfinite(result.getSecond())"})
-    public Pair<Double,Double> runOneSidedTest(USet lessThanOther) {
-        long u = calculateOneSidedU(observations, lessThanOther);
-        int n = lessThanOther == USet.SET1 ? sizeSet1 : sizeSet2;
-        int m = lessThanOther == USet.SET1 ? sizeSet2 : sizeSet1;
-        if ( n == 0 || m == 0 ) {
-            // test is uninformative as one or both sets have no observations
-            return new Pair<Double,Double>(Double.NaN,Double.NaN);
-        }
-
-        // the null hypothesis is that {N} is stochastically less than {M}, so U has counted
-        // occurrences of {M}s before {N}s. We would expect that this should be less than (n*m+1)/2 under
-        // the null hypothesis, so we want to integrate from K=0 to K=U for cumulative cases. Always.
-        return calculateP(n, m, u, false, exactMode);
-    }
-
-    /**
-     * Runs the standard two-sided test,
-     * returns the u-based z-approximate and p values.
-     * @return a pair holding the u and p-value.
-     */
-    @Ensures({"result != null", "! Double.isInfinite(result.getFirst())", "! Double.isInfinite(result.getSecond())"})
-    //@Requires({"validateObservations(observations)"})
-    public Pair<Double,Double> runTwoSidedTest() {
-        Pair<Long,USet> uPair = calculateTwoSidedU(observations);
-        long u = uPair.first;
-        int n = uPair.second == USet.SET1 ? sizeSet1 : sizeSet2;
-        int m = uPair.second == USet.SET1 ? sizeSet2 : sizeSet1;
-        if ( n == 0 || m == 0 ) {
-            // test is uninformative as one or both sets have no observations
-            return new Pair<Double,Double>(Double.NaN,Double.NaN);
-        }
-        return calculateP(n, m, u, true, exactMode);
-    }
-
-    /**
-     * Given a u statistic, calculate the p-value associated with it, dispatching to approximations where appropriate
-     * @param n - The number of entries in the stochastically smaller (dominant) set
-     * @param m - The number of entries in the stochastically larger (dominated) set
-     * @param u - the Mann-Whitney U value
-     * @param twoSided - is the test twosided
-     * @return the (possibly approximate) p-value associated with the MWU test, and the (possibly approximate) z-value associated with it
-     * todo -- there must be an approximation for small m and large n
-     */
-    @Requires({"m > 0","n > 0"})
-    @Ensures({"result != null", "! Double.isInfinite(result.getFirst())", "! Double.isInfinite(result.getSecond())"})
-    protected static Pair<Double,Double> calculateP(int n, int m, long u, boolean twoSided, ExactMode exactMode) {
-        Pair<Double,Double> zandP;
-        if ( n > 8 && m > 8 ) {
-            // large m and n - normal approx
-            zandP = calculatePNormalApproximation(n,m,u, twoSided);
-        } else if ( n > 5 && m > 7 ) {
-            // large m, small n - sum uniform approx
-            // todo -- find the appropriate regimes where this approximation is actually better enough to merit slowness
-            // pval = calculatePUniformApproximation(n,m,u);
-            zandP = calculatePNormalApproximation(n, m, u, twoSided);
-        } else if ( n > 8 || m > 8 ) {
-            zandP = calculatePFromTable(n, m, u, twoSided);
-        } else {
-            // small m and n - full approx
-            zandP = calculatePRecursively(n,m,u,twoSided,exactMode);
-        }
-
-        return zandP;
-    }
-
-    public static Pair<Double,Double> calculatePFromTable(int n, int m, long u, boolean twoSided) {
-        // todo -- actually use a table for:
-        // todo      - n large, m small
-        return calculatePNormalApproximation(n,m,u, twoSided);
-    }
-
-    /**
-     * Uses a normal approximation to the U statistic in order to return a cdf p-value. See Mann, Whitney [1947]
-     * @param n - The number of entries in the stochastically smaller (dominant) set
-     * @param m - The number of entries in the stochastically larger (dominated) set
-     * @param u - the Mann-Whitney U value
-     * @param twoSided - whether the test should be two sided
-     * @return p-value associated with the normal approximation
-     */
-    @Requires({"m > 0","n > 0"})
-    @Ensures({"result != null", "! Double.isInfinite(result.getFirst())", "! Double.isInfinite(result.getSecond())"})
-    public static Pair<Double,Double> calculatePNormalApproximation(int n,int m,long u, boolean twoSided) {
-        double z = getZApprox(n,m,u);
-        if ( twoSided ) {
-            return new Pair<Double,Double>(z,2.0*(z < 0 ? STANDARD_NORMAL.cdf(z) : 1.0-STANDARD_NORMAL.cdf(z)));
-        } else {
-            return new Pair<Double,Double>(z,STANDARD_NORMAL.cdf(z));
-        }
-    }
-
-    /**
-     * Calculates the Z-score approximation of the u-statistic
-     * @param n - The number of entries in the stochastically smaller (dominant) set
-     * @param m - The number of entries in the stochastically larger (dominated) set
-     * @param u - the Mann-Whitney U value
-     * @return the asymptotic z-approximation corresponding to the MWU p-value for n < m
-     */
-    @Requires({"m > 0","n > 0"})
-    @Ensures({"! Double.isNaN(result)", "! Double.isInfinite(result)"})
-    private static double getZApprox(int n, int m, long u) {
-        double mean = ( ((long)m)*n+1.0)/2;
-        double var = (((long) n)*m*(n+m+1.0))/12;
-        double z = ( u - mean )/Math.sqrt(var);
-        return z;
-    }
-
-    /**
-     * Uses a sum-of-uniform-0-1 random variable approximation to the U statistic in order to return an approximate
-     * p-value. See Buckle, Kraft, van Eeden [1969] (approx) and Billingsly [1995] or Stephens, MA [1966, biometrika] (sum of uniform CDF)
-     * @param n - The number of entries in the stochastically smaller (dominant) set
-     * @param m - The number of entries in the stochastically larger (dominated) set
-     * @param u - mann-whitney u value
-     * @return p-value according to sum of uniform approx
-     * todo -- this is currently not called due to not having a good characterization of where it is significantly more accurate than the
-     * todo -- normal approxmation (e.g. enough to merit the runtime hit)
-     */
-    public static double calculatePUniformApproximation(int n, int m, long u) {
-        long R = u + (n*(n+1))/2;
-        double a = Math.sqrt(m*(n+m+1));
-        double b = (n/2.0)*(1-Math.sqrt((n+m+1)/m));
-        double z = b + ((double)R)/a;
-        if ( z < 0 ) { return 1.0; }
-        else if ( z > n ) { return 0.0; }
-        else {
-            if ( z > ((double) n) /2 ) {
-                return 1.0-1/(Arithmetic.factorial(n))*uniformSumHelper(z, (int) Math.floor(z), n, 0);
-            } else {
-                return 1/(Arithmetic.factorial(n))*uniformSumHelper(z, (int) Math.floor(z), n, 0);
-            }
-        }
-    }
-
-    /**
-     * Helper function for the sum of n uniform random variables
-     * @param z - value at which to compute the (un-normalized) cdf
-     * @param m - a cutoff integer (defined by m <= z < m + 1)
-     * @param n - the number of uniform random variables
-     * @param k - holder variable for the recursion (alternatively, the index of the term in the sequence)
-     * @return the (un-normalized) cdf for the sum of n random variables
-     */
-    private static double uniformSumHelper(double z, int m, int n, int k) {
-        if ( k > m ) { return 0; }
-        int coef = (k % 2 == 0) ? 1 : -1;
-        return coef*Arithmetic.binomial(n,k)*Math.pow(z-k,n) + uniformSumHelper(z,m,n,k+1);
-    }
-
-    /**
-     * Calculates the U-statistic associated with a two-sided test (e.g. the RV from which one set is drawn
-     * stochastically dominates the RV from which the other set is drawn); two-sidedness is accounted for
-     * later on simply by multiplying the p-value by 2.
-     *
-     * Recall: If X stochastically dominates Y, the test is for occurrences of Y before X, so the lower value of u is chosen
-     * @param observed - the observed data
-     * @return the minimum of the U counts (set1 dominates 2, set 2 dominates 1)
-     */
-    @Requires({"observed != null", "observed.size() > 0"})
-    @Ensures({"result != null","result.first > 0"})
-    public static Pair<Long,USet> calculateTwoSidedU(TreeSet<Pair<Number,USet>> observed) {
-        int set1SeenSoFar = 0;
-        int set2SeenSoFar = 0;
-        long uSet1DomSet2 = 0;
-        long uSet2DomSet1 = 0;
-        USet previous = null;
-        for ( Pair<Number,USet> dataPoint : observed ) {
-
-            if ( dataPoint.second == USet.SET1 ) {
-                ++set1SeenSoFar;
-            } else {
-                ++set2SeenSoFar;
-            }
-
-            if ( previous != null ) {
-                if ( dataPoint.second == USet.SET1 ) {
-                    uSet2DomSet1 += set2SeenSoFar;
-                } else {
-                    uSet1DomSet2 += set1SeenSoFar;
-                }
-            }
-
-            previous = dataPoint.second;
-        }
-
-        return uSet1DomSet2 < uSet2DomSet1 ? new Pair<Long,USet>(uSet1DomSet2,USet.SET1) : new Pair<Long,USet>(uSet2DomSet1,USet.SET2);
-    }
-
-    /**
-     * Calculates the U-statistic associated with the one-sided hypothesis that "dominator" stochastically dominates
-     * the other U-set. Note that if S1 dominates S2, we want to count the occurrences of points in S2 coming before points in S1.
-     * @param observed - the observed data points, tagged by each set
-     * @param dominator - the set that is hypothesized to be stochastically dominating
-     * @return the u-statistic associated with the hypothesis that dominator stochastically dominates the other set
-     */
-    @Requires({"observed != null","dominator != null","observed.size() > 0"})
-    @Ensures({"result >= 0"})
-    public static long calculateOneSidedU(TreeSet<Pair<Number,USet>> observed,USet dominator) {
-        long otherBeforeDominator = 0l;
-        int otherSeenSoFar = 0;
-        for ( Pair<Number,USet> dataPoint : observed ) {
-            if ( dataPoint.second != dominator ) {
-                ++otherSeenSoFar;
-            } else {
-                otherBeforeDominator += otherSeenSoFar;
-            }
-        }
-
-        return otherBeforeDominator;
-    }
-
-    /**
-     * The Mann-Whitney U statistic follows a recursive equation (that enumerates the proportion of possible
-     * binary strings of "n" zeros, and "m" ones, where a one precedes a zero "u" times). This accessor
-     * calls into that recursive calculation.
-     * @param n: number of set-one entries (hypothesis: set one is stochastically less than set two)
-     * @param m: number of set-two entries
-     * @param u: number of set-two entries that precede set-one entries (e.g. 0,1,0,1,0 -> 3 )
-     * @param twoSided: whether the test is two sided or not. The recursive formula is symmetric, multiply by two for two-sidedness.
-     * @param  mode: whether the mode is a point probability, or a cumulative distribution
-     * @return the probability under the hypothesis that all sequences are equally likely of finding a set-two entry preceding a set-one entry "u" times.
-     */
-    @Requires({"m > 0","n > 0","u >= 0"})
-    @Ensures({"result != null","! Double.isInfinite(result.getFirst())", "! Double.isInfinite(result.getSecond())"})
-    public static Pair<Double,Double> calculatePRecursively(int n, int m, long u, boolean twoSided, ExactMode mode) {
-        if ( m > 8 && n > 5 ) { throw new GATKException(String.format("Please use the appropriate (normal or sum of uniform) approximation. Values n: %d, m: %d",n,m)); }
-        double p = mode == ExactMode.POINT ? cpr(n,m,u) : cumulativeCPR(n,m,u);
-        //p *= twoSided ? 2.0 : 1.0;
-        double z;
-        try {
-
-            if ( mode == ExactMode.CUMULATIVE ) {
-                z = APACHE_NORMAL.inverseCumulativeProbability(p);
-            } else {
-                double sd = Math.sqrt((1.0+1.0/(1+n+m))*(n*m)*(1.0+n+m)/12); // biased variance empirically better fit to distribution then asymptotic variance
-                //System.out.printf("SD is %f and Max is %f and prob is %f%n",sd,1.0/Math.sqrt(sd*sd*2.0*Math.PI),p);
-                if ( p > 1.0/Math.sqrt(sd*sd*2.0*Math.PI) ) { // possible for p-value to be outside the range of the normal. Happens at the mean, so z is 0.
-                    z = 0.0;
-                } else {
-                    if ( u >= n*m/2 ) {
-                        z = Math.sqrt(-2.0*(Math.log(sd)+Math.log(p)+LNSQRT2PI));
-                    } else {
-                        z = -Math.sqrt(-2.0*(Math.log(sd)+Math.log(p)+LNSQRT2PI));
-                    }
-                }
-            }
-
-        } catch (MathException me) {
-            throw new GATKException("A math exception occurred in inverting the probability",me);
-        }
-
-        return new Pair<Double,Double>(z,(twoSided ? 2.0*p : p));
-    }
-
-    /**
-     * Hook into CPR with sufficient warning (for testing purposes)
-     * calls into that recursive calculation.
-     * @param n: number of set-one entries (hypothesis: set one is stochastically less than set two)
-     * @param m: number of set-two entries
-     * @param u: number of set-two entries that precede set-one entries (e.g. 0,1,0,1,0 -> 3 )
-     * @return same as cpr
-     */
-    protected static double calculatePRecursivelyDoNotCheckValuesEvenThoughItIsSlow(int n, int m, long u) {
-        return cpr(n,m,u);
-    }
-
-    /**
-     * For testing
-     *
-     * @param n: number of set-one entries (hypothesis: set one is stochastically less than set two)
-     * @param m: number of set-two entries
-     * @param u: number of set-two entries that precede set-one entries (e.g. 0,1,0,1,0 -> 3 )
-     */
-    protected static long countSequences(int n, int m, long u) {
-        if ( u < 0 ) { return 0; }
-        if ( m == 0 || n == 0 ) { return u == 0 ? 1 : 0; }
-
-        return countSequences(n-1,m,u-m) + countSequences(n,m-1,u);
-    }
-
-    /**
-     * : just a shorter name for calculatePRecursively. See Mann, Whitney, [1947]
-     * @param n: number of set-1 entries
-     * @param m: number of set-2 entries
-     * @param u: number of times a set-2 entry as preceded a set-1 entry
-     * @return recursive p-value
-     */
-    private static double cpr(int n, int m, long u) {
-        if ( u < 0 ) {
-            return 0.0;
-        }
-        if ( m == 0 || n == 0 ) {
-            // there are entries in set 1 or set 2, so no set-2 entry can precede a set-1 entry; thus u must be zero.
-            // note that this exists only for edification, as when we reach this point, the coefficient on this term is zero anyway
-            return ( u == 0 ) ? 1.0 : 0.0;
-        }
-
-
-        return (((double)n)/(n+m))*cpr(n-1,m,u-m) + (((double)m)/(n+m))*cpr(n,m-1,u);
-    }
-
-    private static double cumulativeCPR(int n, int m, long u ) {
-        // from above:
-        // the null hypothesis is that {N} is stochastically less than {M}, so U has counted
-        // occurrences of {M}s before {N}s. We would expect that this should be less than (n*m+1)/2 under
-        // the null hypothesis, so we want to integrate from K=0 to K=U for cumulative cases. Always.
-        double p = 0.0;
-        // optimization using symmetry, use the least amount of sums possible
-        long uSym = ( u <= n*m/2 ) ? u : ((long)n)*m-u;
-        for ( long uu = 0; uu < uSym; uu++ ) {
-            p += cpr(n,m,uu);
-        }
-        // correct by 1.0-p if the optimization above was used (e.g. 1-right tail = left tail)
-        return (u <= n*m/2) ? p : 1.0-p;
-    }
-
-    /**
-     * hook into the data tree, for testing purposes only
-     * @return  observations
-     */
-    protected TreeSet<Pair<Number,USet>> getObservations() {
-        return observations;
-    }
-
-    /**
-     * hook into the set sizes, for testing purposes only
-     * @return size set 1, size set 2
-     */
-    protected Pair<Integer,Integer> getSetSizes() {
-        return new Pair<Integer,Integer>(sizeSet1,sizeSet2);
-    }
-
-    /**
-     * Validates that observations are in the correct format for a MWU test -- this is only called by the contracts API during testing
-     * @param tree - the collection of labeled observations
-     * @return true iff the tree set is valid (no INFs or NaNs, at least one data point in each set)
-     */
-    protected static boolean validateObservations(TreeSet<Pair<Number,USet>> tree) {
-        boolean seen1 = false;
-        boolean seen2 = false;
-        boolean seenInvalid = false;
-        for ( Pair<Number,USet> p : tree) {
-            if ( ! seen1 && p.getSecond() == USet.SET1 ) {
-                seen1 = true;
-            }
-
-            if ( ! seen2 && p.getSecond() == USet.SET2 ) {
-                seen2 = true;
-            }
-
-            if ( Double.isNaN(p.getFirst().doubleValue()) || Double.isInfinite(p.getFirst().doubleValue())) {
-                seenInvalid = true;
-            }
-
-        }
-
-            return ! seenInvalid && seen1 && seen2;
-    }
-
-    /**
-     * A comparator class which uses dithering on tie-breaking to ensure that the internal treeset drops no values
-     * and to ensure that rank ties are broken at random.
-     */
-    private static class DitheringComparator implements Comparator<Pair<Number,USet>>, Serializable {
-
-        public DitheringComparator() {}
-
-        @Override
-        public boolean equals(Object other) { return false; }
-
-        @Override
-        public int compare(Pair<Number,USet> left, Pair<Number,USet> right) {
-            double comp = Double.compare(left.first.doubleValue(),right.first.doubleValue());
-            if ( comp > 0 ) { return 1; }
-            if ( comp < 0 ) { return -1; }
-            return GenomeAnalysisEngine.getRandomGenerator().nextBoolean() ? -1 : 1;
-        }
-    }
-
-    /**
-     * A comparator that reaches into the pair and compares numbers without tie-braking.
-     */
-    private static class NumberedPairComparator implements Comparator<Pair<Number,USet>>, Serializable {
-
-        public NumberedPairComparator() {}
-
-        @Override
-        public boolean equals(Object other) { return false; }
-
-        @Override
-        public int compare(Pair<Number,USet> left, Pair<Number,USet> right ) {
-            return Double.compare(left.first.doubleValue(),right.first.doubleValue());
-        }
-    }
-
-    public enum USet { SET1, SET2 }
-    public enum ExactMode { POINT, CUMULATIVE }
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/MathUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/MathUtils.java
deleted file mode 100644
index 01aa133..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/MathUtils.java
+++ /dev/null
@@ -1,1690 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import org.apache.commons.math.distribution.ExponentialDistribution;
-import org.apache.commons.math.distribution.ExponentialDistributionImpl;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.math.BigDecimal;
-import java.util.*;
-
-/**
- * MathUtils is a static class (no instantiation allowed!) with some useful math methods.
- *
- * @author Kiran Garimella
- */
-public class MathUtils {
-
-    /**
-     * Private constructor.  No instantiating this class!
-     */
-    private MathUtils() {
-    }
-
-    /**
-     * The smallest log10 value we'll emit from normalizeFromLog10 and other functions
-     * where the real-space value is 0.0.
-     */
-    public static final double LOG10_P_OF_ZERO = -1000000.0;
-    public static final double FAIR_BINOMIAL_PROB_LOG10_0_5 = Math.log10(0.5);
-    public static final double LOG_ONE_HALF = -Math.log10(2.0);
-    public static final double LOG_ONE_THIRD = -Math.log10(3.0);
-    private static final double NATURAL_LOG_OF_TEN = Math.log(10.0);
-    private static final double SQUARE_ROOT_OF_TWO_TIMES_PI = Math.sqrt(2.0 * Math.PI);
-
-    /**
-     * A helper class to maintain a cache of log10 values
-     */
-    public static class Log10Cache {
-        /**
-         * Get the value of log10(n), expanding the cache as necessary
-         * @param n operand
-         * @return log10(n)
-         */
-        public static double get(final int n) {
-            if (n < 0)
-                throw new ReviewedGATKException(String.format("Can't take the log of a negative number: %d", n));
-            if (n >= cache.length)
-                ensureCacheContains(Math.max(n+10, 2*cache.length));
-            /*
-               Array lookups are not atomic.  It's possible that the reference to cache could be
-               changed between the time the reference is loaded and the data is fetched from the correct
-               offset.  However, the value retrieved can't change, and it's guaranteed to be present in the
-               old reference by the conditional above.
-             */
-            return cache[n];
-        }
-
-        /**
-         * Ensures that the cache contains a value for n.  After completion of ensureCacheContains(n),
-         * #get(n) is guaranteed to return without causing a cache expansion
-         * @param n desired value to be precomputed
-         */
-        public static synchronized void ensureCacheContains(final int n) {
-            if (n < cache.length)
-                return;
-            final double[] newCache = new double[n + 1];
-            System.arraycopy(cache, 0, newCache, 0, cache.length);
-            for (int i=cache.length; i < newCache.length; i++)
-                newCache[i] = Math.log10(i);
-            cache = newCache;
-        }
-
-        //initialize with the special case: log10(0) = NEGATIVE_INFINITY
-        private static double[] cache = new double[] { Double.NEGATIVE_INFINITY };
-    }
-
-    /**
-     * Get a random int between min and max (inclusive) using the global GATK random number generator
-     *
-     * @param min lower bound of the range
-     * @param max upper bound of the range
-     * @return a random int >= min and <= max
-     */
-    public static int randomIntegerInRange( final int min, final int max ) {
-        return GenomeAnalysisEngine.getRandomGenerator().nextInt(max - min + 1) + min;
-    }
-
-    /**
-     * Encapsulates the second term of Jacobian log identity for differences up to MAX_TOLERANCE
-     */
-    private static class JacobianLogTable {
-
-        public static final double MAX_TOLERANCE = 8.0;
-
-        public static double get(final double difference) {
-            if (cache == null)
-                initialize();
-            final int index = fastRound(difference * INV_STEP);
-            return cache[index];
-        }
-
-        private static synchronized void initialize() {
-            if (cache == null) {
-                final int tableSize = (int) (MAX_TOLERANCE / TABLE_STEP) + 1;
-                cache = new double[tableSize];
-                for (int k = 0; k < cache.length; k++)
-                    cache[k] = Math.log10(1.0 + Math.pow(10.0, -((double) k) * TABLE_STEP));
-            }
-        }
-
-        private static final double TABLE_STEP = 0.0001;
-        private static final double INV_STEP = 1.0 / TABLE_STEP;
-        private static double[] cache = null;
-    }
-
-    // A fast implementation of the Math.round() method.  This method does not perform
-    // under/overflow checking, so this shouldn't be used in the general case (but is fine
-    // if one is already make those checks before calling in to the rounding).
-    public static int fastRound(final double d) {
-        return (d > 0.0) ? (int) (d + 0.5d) : (int) (d - 0.5d);
-    }
-
-    public static double approximateLog10SumLog10(final double[] vals) {
-        return approximateLog10SumLog10(vals, vals.length);
-    }
-
-    /**
-     * Calculate the approximate log10 sum of an array range.
-     * @param vals the input values.
-     * @param fromIndex the first inclusive index in the input array.
-     * @param toIndex index following the last element to sum in the input array (exclusive).
-     * @return the approximate sum.
-     * @throws IllegalArgumentException if {@code vals} is {@code null} or  {@code fromIndex} is out of bounds
-     * or if {@code toIndex} is larger than
-     * the length of the input array or {@code fromIndex} is larger than {@code toIndex}.
-     */
-    public static double approximateLog10SumLog10(final double[] vals, final int fromIndex, final int toIndex) {
-        if (fromIndex == toIndex) return Double.NEGATIVE_INFINITY;
-        final int maxElementIndex = MathUtils.maxElementIndex(vals,fromIndex,toIndex);
-        double approxSum = vals[maxElementIndex];
-
-        for (int i = fromIndex; i < toIndex; i++) {
-            final double val;
-            if (i == maxElementIndex || (val = vals[i]) == Double.NEGATIVE_INFINITY)
-                continue;
-            final double diff = approxSum - val;
-            if (diff < JacobianLogTable.MAX_TOLERANCE)
-                approxSum += JacobianLogTable.get(diff);
-        }
-        return approxSum;
-    }
-
-    public static double approximateLog10SumLog10(final double[] vals, final int endIndex) {
-
-        final int maxElementIndex = MathUtils.maxElementIndex(vals, endIndex);
-        double approxSum = vals[maxElementIndex];
-
-        for (int i = 0; i < endIndex; i++) {
-            if (i == maxElementIndex || vals[i] == Double.NEGATIVE_INFINITY)
-                continue;
-
-            final double diff = approxSum - vals[i];
-            if (diff < JacobianLogTable.MAX_TOLERANCE) {
-                // See notes from the 2-inout implementation below
-                approxSum += JacobianLogTable.get(diff);
-            }
-        }
-
-        return approxSum;
-    }
-
-    public static double approximateLog10SumLog10(final double a, final double b, final double c) {
-        return approximateLog10SumLog10(a, approximateLog10SumLog10(b, c));
-    }
-
-    public static double approximateLog10SumLog10(double small, double big) {
-        // make sure small is really the smaller value
-        if (small > big) {
-            final double t = big;
-            big = small;
-            small = t;
-        }
-
-        if (small == Double.NEGATIVE_INFINITY || big == Double.NEGATIVE_INFINITY)
-            return big;
-
-        final double diff = big - small;
-        if (diff >= JacobianLogTable.MAX_TOLERANCE)
-            return big;
-
-        // OK, so |y-x| < tol: we use the following identity then:
-        // we need to compute log10(10^x + 10^y)
-        // By Jacobian logarithm identity, this is equal to
-        // max(x,y) + log10(1+10^-abs(x-y))
-        // we compute the second term as a table lookup with integer quantization
-        // we have pre-stored correction for 0,0.1,0.2,... 10.0
-        return big + JacobianLogTable.get(diff);
-    }
-
-    public static double sum(final double[] values) {
-        double s = 0.0;
-        for (double v : values)
-            s += v;
-        return s;
-    }
-
-    public static long sum(final int[] x) {
-        long total = 0;
-        for (int v : x)
-            total += v;
-        return total;
-    }
-
-    public static int sum(final byte[] x) {
-        int total = 0;
-        for (byte v : x)
-            total += (int)v;
-        return total;
-    }
-
-    public static double percentage(int x, int base) {
-        return (base > 0 ? ((double) x / (double) base) * 100.0 : 0);
-    }
-
-    public static double ratio(final int num, final int denom) {
-        if ( denom > 0 ) {
-            return ((double) num)/denom;
-        } else {
-            if ( num == 0 && denom == 0) {
-                return 0.0;
-            } else {
-                throw new ReviewedGATKException(String.format("The denominator of a ratio cannot be zero or less than zero: %d/%d",num,denom));
-            }
-        }
-    }
-
-    public static double ratio(final long num, final long denom) {
-        if ( denom > 0L ) {
-            return ((double) num)/denom;
-        } else {
-            if ( num == 0L && denom == 0L ) {
-                return 0.0;
-            } else {
-                throw new ReviewedGATKException(String.format("The denominator of a ratio cannot be zero or less than zero: %d/%d",num,denom));
-            }
-        }
-    }
-
-    /**
-     * Converts a real space array of numbers (typically probabilities) into a log10 array
-     *
-     * @param prRealSpace
-     * @return
-     */
-    public static double[] toLog10(final double[] prRealSpace) {
-        double[] log10s = new double[prRealSpace.length];
-        for (int i = 0; i < prRealSpace.length; i++) {
-            log10s[i] = Math.log10(prRealSpace[i]);
-        }
-        return log10s;
-    }
-
-    public static double log10sumLog10(final double[] log10p, final int start) {
-        return log10sumLog10(log10p, start, log10p.length);
-    }
-
-    public static double log10sumLog10(final double[] log10p, final int start, final int finish) {
-
-        if (start >= finish)
-            return Double.NEGATIVE_INFINITY;
-        final int maxElementIndex = MathUtils.maxElementIndex(log10p, start, finish);
-        final double maxValue = log10p[maxElementIndex];
-        if(maxValue == Double.NEGATIVE_INFINITY)
-            return maxValue;
-        double sum = 1.0;
-        for (int i = start; i < finish; i++) {
-            double curVal = log10p[i];
-            double scaled_val = curVal - maxValue;
-            if (i == maxElementIndex || curVal == Double.NEGATIVE_INFINITY) {
-                continue;
-            }
-            else {
-                sum += Math.pow(10.0, scaled_val);
-            }
-        }
-        if ( Double.isNaN(sum) || sum == Double.POSITIVE_INFINITY ) {
-            throw new IllegalArgumentException("log10p: Values must be non-infinite and non-NAN");
-        }
-        return maxValue + (sum != 1.0 ? Math.log10(sum) : 0.0);
-    }
-
-    public static double sumLog10(final double[] log10values) {
-        return Math.pow(10.0, log10sumLog10(log10values));
-    }
-
-    public static double log10sumLog10(final double[] log10values) {
-        return log10sumLog10(log10values, 0);
-    }
-
-    public static boolean wellFormedDouble(final double val) {
-        return !Double.isInfinite(val) && !Double.isNaN(val);
-    }
-
-    public static double bound(final double value, final double minBoundary, final double maxBoundary) {
-        return Math.max(Math.min(value, maxBoundary), minBoundary);
-    }
-
-    public static boolean isBounded(final double val, final double lower, final double upper) {
-        return val >= lower && val <= upper;
-    }
-
-    public static boolean isPositive(final double val) {
-        return !isNegativeOrZero(val);
-    }
-
-    public static boolean isPositiveOrZero(final double val) {
-        return isBounded(val, 0.0, Double.POSITIVE_INFINITY);
-    }
-
-    public static boolean isNegativeOrZero(final double val) {
-        return isBounded(val, Double.NEGATIVE_INFINITY, 0.0);
-    }
-
-    public static boolean isNegative(final double val) {
-        return !isPositiveOrZero(val);
-    }
-
-    /**
-     * Compares double values for equality (within 1e-6), or inequality.
-     *
-     * @param a the first double value
-     * @param b the second double value
-     * @return -1 if a is greater than b, 0 if a is equal to be within 1e-6, 1 if b is greater than a.
-     */
-    public static byte compareDoubles(final double a, final double b) {
-        return compareDoubles(a, b, 1e-6);
-    }
-
-    /**
-     * Compares double values for equality (within epsilon), or inequality.
-     *
-     * @param a       the first double value
-     * @param b       the second double value
-     * @param epsilon the precision within which two double values will be considered equal
-     * @return -1 if a is greater than b, 0 if a is equal to be within epsilon, 1 if b is greater than a.
-     */
-    public static byte compareDoubles(final double a, final double b, final double epsilon) {
-        if (Math.abs(a - b) < epsilon) {
-            return 0;
-        }
-        if (a > b) {
-            return -1;
-        }
-        return 1;
-    }
-
-    /**
-     * Calculate f(x) = Normal(x | mu = mean, sigma = sd)
-     * @param mean the desired mean of the Normal distribution
-     * @param sd the desired standard deviation of the Normal distribution
-     * @param x the value to evaluate
-     * @return a well-formed double
-     */
-    public static double normalDistribution(final double mean, final double sd, final double x) {
-        if( sd < 0 )
-            throw new IllegalArgumentException("sd: Standard deviation of normal must be >0");
-        if ( ! wellFormedDouble(mean) || ! wellFormedDouble(sd) || ! wellFormedDouble(x) )
-            throw new IllegalArgumentException("mean, sd, or, x : Normal parameters must be well formatted (non-INF, non-NAN)");
-        double a = 1.0 / (sd * Math.sqrt(2.0 * Math.PI));
-        double b = Math.exp(-1.0 * (Math.pow(x - mean, 2.0) / (2.0 * sd * sd)));
-        return a * b;
-    }
-
-    /**
-     * Calculate f(x) = log10 ( Normal(x | mu = mean, sigma = sd) )
-     * @param mean the desired mean of the Normal distribution
-     * @param sd the desired standard deviation of the Normal distribution
-     * @param x the value to evaluate
-     * @return a well-formed double
-     */
-
-    public static double normalDistributionLog10(final double mean, final double sd, final double x) {
-        if( sd < 0 )
-            throw new IllegalArgumentException("sd: Standard deviation of normal must be >0");
-        if ( ! wellFormedDouble(mean) || ! wellFormedDouble(sd) || ! wellFormedDouble(x) )
-            throw new IllegalArgumentException("mean, sd, or, x : Normal parameters must be well formatted (non-INF, non-NAN)");
-        final double a = -1.0 * Math.log10(sd * SQUARE_ROOT_OF_TWO_TIMES_PI);
-        final double b = -1.0 * (square(x - mean) / (2.0 * square(sd))) / NATURAL_LOG_OF_TEN;
-        return a + b;
-    }
-
-    /**
-     * Calculate f(x) = x^2
-     * @param x the value to square
-     * @return x * x
-     */
-    public static double square(final double x) {
-        return x * x;
-    }
-
-    /**
-     * Calculates the log10 of the binomial coefficient. Designed to prevent
-     * overflows even with very large numbers.
-     *
-     * @param n total number of trials
-     * @param k number of successes
-     * @return the log10 of the binomial coefficient
-     */
-    public static double binomialCoefficient(final int n, final int k) {
-        return Math.pow(10, log10BinomialCoefficient(n, k));
-    }
-
-    /**
-     * @see #binomialCoefficient(int, int) with log10 applied to result
-     */
-    public static double log10BinomialCoefficient(final int n, final int k) {
-        if ( n < 0 ) {
-            throw new IllegalArgumentException("n: Must have non-negative number of trials");
-        }
-        if ( k > n || k < 0 ) {
-            throw new IllegalArgumentException("k: Must have non-negative number of successes, and no more successes than number of trials");
-        }
-
-        return log10Factorial(n) - log10Factorial(k) - log10Factorial(n - k);
-    }
-
-    /**
-     * Computes a binomial probability.  This is computed using the formula
-     * <p/>
-     * B(k; n; p) = [ n! / ( k! (n - k)! ) ] (p^k)( (1-p)^k )
-     * <p/>
-     * where n is the number of trials, k is the number of successes, and p is the probability of success
-     *
-     * @param n number of Bernoulli trials
-     * @param k number of successes
-     * @param p probability of success
-     * @return the binomial probability of the specified configuration.  Computes values down to about 1e-237.
-     */
-    public static double binomialProbability(final int n, final int k, final double p) {
-        return Math.pow(10, log10BinomialProbability(n, k, Math.log10(p)));
-    }
-
-    /**
-     * @see #binomialProbability(int, int, double) with log10 applied to result
-     */
-    public static double log10BinomialProbability(final int n, final int k, final double log10p) {
-        if ( log10p > 1e-18 )
-            throw new IllegalArgumentException("log10p: Log-probability must be 0 or less");
-        double log10OneMinusP = Math.log10(1 - Math.pow(10, log10p));
-        return log10BinomialCoefficient(n, k) + log10p * k + log10OneMinusP * (n - k);
-    }
-
-    /**
-     * @see #binomialProbability(int, int, double) with p=0.5
-     */
-    public static double binomialProbability(final int n, final int k) {
-        return Math.pow(10, log10BinomialProbability(n, k));
-    }
-
-    /**
-     * @see #binomialProbability(int, int, double) with p=0.5 and log10 applied to result
-     */
-    public static double log10BinomialProbability(final int n, final int k) {
-        return log10BinomialCoefficient(n, k) + (n * FAIR_BINOMIAL_PROB_LOG10_0_5);
-    }
-
-    /** A memoization container for {@link #binomialCumulativeProbability(int, int, int)}.  Synchronized to accomodate multithreading. */
-    private static final Map<Long, Double> BINOMIAL_CUMULATIVE_PROBABILITY_MEMOIZATION_CACHE = 
-            Collections.synchronizedMap(new LRUCache<Long, Double>(10_000)); 
-    
-    /**
-     * Primitive integer-triplet bijection into long.  Returns null when the bijection function fails (in lieu of an exception), which will
-     * happen when: any value is negative or larger than a short.  This method is optimized for speed; it is not intended to serve as a 
-     * utility function.
-     */
-    static Long fastGenerateUniqueHashFromThreeIntegers(final int one, final int two, final int three) {
-        if (one < 0 || two < 0 || three < 0 || Short.MAX_VALUE < one || Short.MAX_VALUE < two || Short.MAX_VALUE < three) {
-            return null;
-        } else {
-            long result = 0;
-            result += (short) one;
-            result <<= 16;
-            result += (short) two;
-            result <<= 16;
-            result += (short) three;
-            return result;
-        }
-    }
-    
-    /**
-     * Performs the cumulative sum of binomial probabilities, where the probability calculation is done in log space.
-     * Assumes that the probability of a successful hit is fair (i.e. 0.5).
-     * 
-     * This pure function is memoized because of its expensive BigDecimal calculations.
-     *
-     * @param n         number of attempts for the number of hits
-     * @param k_start   start (inclusive) of the cumulant sum (over hits)
-     * @param k_end     end (inclusive) of the cumulant sum (over hits)
-     * @return - returns the cumulative probability
-     */
-    public static double binomialCumulativeProbability(final int n, final int k_start, final int k_end) {
-        if ( k_end > n )
-            throw new IllegalArgumentException(String.format("Value for k_end (%d) is greater than n (%d)", k_end, n));
-
-        // Fetch cached value, if applicable.
-        final Long memoizationKey = fastGenerateUniqueHashFromThreeIntegers(n, k_start, k_end);
-        final Double memoizationCacheResult;
-        if (memoizationKey != null) {
-            memoizationCacheResult = BINOMIAL_CUMULATIVE_PROBABILITY_MEMOIZATION_CACHE.get(memoizationKey);
-        } else {
-            memoizationCacheResult = null;
-        }
-
-        final double result;
-        if (memoizationCacheResult != null) {
-            result = memoizationCacheResult;
-        } else {
-            double cumProb = 0.0;
-            double prevProb;
-            BigDecimal probCache = BigDecimal.ZERO;
-
-            for (int hits = k_start; hits <= k_end; hits++) {
-                prevProb = cumProb;
-                final double probability = binomialProbability(n, hits);
-                cumProb += probability;
-                if (probability > 0 && cumProb - prevProb < probability / 2) { // loss of precision
-                    probCache = probCache.add(new BigDecimal(prevProb));
-                    cumProb = 0.0;
-                    hits--; // repeat loop
-                    // prevProb changes at start of loop
-                }
-            }
-
-            result = probCache.add(new BigDecimal(cumProb)).doubleValue();
-            if (memoizationKey != null) {
-                BINOMIAL_CUMULATIVE_PROBABILITY_MEMOIZATION_CACHE.put(memoizationKey, result);
-            }
-        }
-        return result;
-    }
-
-    private static final double LOG1MEXP_THRESHOLD = Math.log(0.5);
-
-    private static final double LN_10 = Math.log(10);
-
-    /**
-     * Calculates {@code log(1-exp(a))} without loosing precision.
-     *
-     * <p>
-     *     This is based on the approach described in:
-     *
-     * </p>
-     * <p>
-     *     Maechler M, Accurately Computing log(1-exp(-|a|)) Assessed by the Rmpfr package, 2012 <br/>
-     *     <a ref="http://cran.r-project.org/web/packages/Rmpfr/vignettes/log1mexp-note.pdf">Online document</a>.
-     *
-     * </p>
-     *
-     * @param a the input exponent.
-     * @return {@link Double#NaN NaN} if {@code a > 0}, otherwise the corresponding value.
-     */
-    public static double log1mexp(final double a) {
-        if (a > 0) return Double.NaN;
-        if (a == 0) return Double.NEGATIVE_INFINITY;
-
-        return (a < LOG1MEXP_THRESHOLD) ? Math.log1p(-Math.exp(a)) : Math.log(-Math.expm1(a));
-    }
-
-    /**
-     * Calculates {@code log10(1-10^a)} without loosing precision.
-     *
-     * <p>
-     *     This is based on the approach described in:
-     *
-     * </p>
-     * <p>
-     *     Maechler M, Accurately Computing log(1-exp(-|a|)) Assessed by the Rmpfr package, 2012 <br/>
-     *     <a ref="http://cran.r-project.org/web/packages/Rmpfr/vignettes/log1mexp-note.pdf">Online document</a>.
-     * </p>
-     *
-     * @param a the input exponent.
-     * @return {@link Double#NaN NaN} if {@code a > 0}, otherwise the corresponding value.
-     */
-    public static double log10OneMinusPow10(final double a) {
-        if (a > 0) return Double.NaN;
-        if (a == 0) return Double.NEGATIVE_INFINITY;
-        final double b = a * LN_10;
-        return log1mexp(b) / LN_10;
-    }
-
-    /**
-     * Calculates the log10 of the multinomial coefficient. Designed to prevent
-     * overflows even with very large numbers.
-     *
-     * @param n total number of trials
-     * @param k array of any size with the number of successes for each grouping (k1, k2, k3, ..., km)
-     * @return {@link Double#NaN NaN} if {@code a > 0}, otherwise the corresponding value.
-     */
-    public static double log10MultinomialCoefficient(final int n, final int[] k) {
-        if ( n < 0 )
-            throw new IllegalArgumentException("n: Must have non-negative number of trials");
-        double denominator = 0.0;
-        int sum = 0;
-        for (int x : k) {
-            if ( x < 0 )
-                throw new IllegalArgumentException("x element of k: Must have non-negative observations of group");
-            if ( x > n )
-                throw new IllegalArgumentException("x element of k, n: Group observations must be bounded by k");
-            denominator += log10Factorial(x);
-            sum += x;
-        }
-        if ( sum != n )
-            throw new IllegalArgumentException("k and n: Sum of observations in multinomial must sum to total number of trials");
-        return log10Factorial(n) - denominator;
-    }
-
-    /**
-     * Computes the log10 of the multinomial distribution probability given a vector
-     * of log10 probabilities. Designed to prevent overflows even with very large numbers.
-     *
-     * @param n      number of trials
-     * @param k      array of number of successes for each possibility
-     * @param log10p array of log10 probabilities
-     * @return
-     */
-    public static double log10MultinomialProbability(final int n, final int[] k, final double[] log10p) {
-        if (log10p.length != k.length)
-            throw new IllegalArgumentException("p and k: Array of log10 probabilities must have the same size as the array of number of sucesses: " + log10p.length + ", " + k.length);
-        double log10Prod = 0.0;
-        for (int i = 0; i < log10p.length; i++) {
-            if ( log10p[i] > 1e-18 )
-                throw new IllegalArgumentException("log10p: Log-probability must be <= 0");
-            log10Prod += log10p[i] * k[i];
-        }
-        return log10MultinomialCoefficient(n, k) + log10Prod;
-    }
-
-    /**
-     * Computes a multinomial coefficient efficiently avoiding overflow even for large numbers.
-     * This is computed using the formula:
-     * <p/>
-     * M(x1,x2,...,xk; n) = [ n! / (x1! x2! ... xk!) ]
-     * <p/>
-     * where xi represents the number of times outcome i was observed, n is the number of total observations.
-     * In this implementation, the value of n is inferred as the sum over i of xi.
-     *
-     * @param k an int[] of counts, where each element represents the number of times a certain outcome was observed
-     * @return the multinomial of the specified configuration.
-     */
-    public static double multinomialCoefficient(final int[] k) {
-        int n = 0;
-        for (int xi : k) {
-            n += xi;
-        }
-
-        return Math.pow(10, log10MultinomialCoefficient(n, k));
-    }
-
-    /**
-     * Computes a multinomial probability efficiently avoiding overflow even for large numbers.
-     * This is computed using the formula:
-     * <p/>
-     * M(x1,x2,...,xk; n; p1,p2,...,pk) = [ n! / (x1! x2! ... xk!) ] (p1^x1)(p2^x2)(...)(pk^xk)
-     * <p/>
-     * where xi represents the number of times outcome i was observed, n is the number of total observations, and
-     * pi represents the probability of the i-th outcome to occur.  In this implementation, the value of n is
-     * inferred as the sum over i of xi.
-     *
-     * @param k an int[] of counts, where each element represents the number of times a certain outcome was observed
-     * @param p a double[] of probabilities, where each element represents the probability a given outcome can occur
-     * @return the multinomial probability of the specified configuration.
-     */
-    public static double multinomialProbability(final int[] k, final double[] p) {
-        if (p.length != k.length)
-            throw new IllegalArgumentException("p and k: Array of log10 probabilities must have the same size as the array of number of sucesses: " + p.length + ", " + k.length);
-
-        int n = 0;
-        double[] log10P = new double[p.length];
-        for (int i = 0; i < p.length; i++) {
-            log10P[i] = Math.log10(p[i]);
-            n += k[i];
-        }
-        return Math.pow(10, log10MultinomialProbability(n, k, log10P));
-    }
-
-    /**
-     * calculate the Root Mean Square of an array of integers
-     *
-     * @param x an byte[] of numbers
-     * @return the RMS of the specified numbers.
-     */
-    public static double rms(final byte[] x) {
-        if (x.length == 0)
-            return 0.0;
-
-        double rms = 0.0;
-        for (int i : x)
-            rms += i * i;
-        rms /= x.length;
-        return Math.sqrt(rms);
-    }
-
-    /**
-     * calculate the Root Mean Square of an array of integers
-     *
-     * @param x an int[] of numbers
-     * @return the RMS of the specified numbers.
-     */
-    public static double rms(final int[] x) {
-        if (x.length == 0)
-            return 0.0;
-
-        double rms = 0.0;
-        for (int i : x)
-            rms += i * i;
-        rms /= x.length;
-        return Math.sqrt(rms);
-    }
-
-    /**
-     * calculate the Root Mean Square of an array of doubles
-     *
-     * @param x a double[] of numbers
-     * @return the RMS of the specified numbers.
-     */
-    public static double rms(final Double[] x) {
-        if (x.length == 0)
-            return 0.0;
-
-        double rms = 0.0;
-        for (Double i : x)
-            rms += i * i;
-        rms /= x.length;
-        return Math.sqrt(rms);
-    }
-
-    public static double rms(final Collection<Integer> l) {
-        if (l.size() == 0)
-            return 0.0;
-
-        double rms = 0.0;
-        for (int i : l)
-            rms += i * i;
-        rms /= l.size();
-        return Math.sqrt(rms);
-    }
-
-    public static double distanceSquared(final double[] x, final double[] y) {
-        double dist = 0.0;
-        for (int iii = 0; iii < x.length; iii++) {
-            dist += (x[iii] - y[iii]) * (x[iii] - y[iii]);
-        }
-        return dist;
-    }
-
-    public static double round(final double num, final int digits) {
-        double result = num * Math.pow(10.0, (double) digits);
-        result = Math.round(result);
-        result = result / Math.pow(10.0, (double) digits);
-        return result;
-    }
-
-    /**
-     * normalizes the log10-based array.  ASSUMES THAT ALL ARRAY ENTRIES ARE <= 0 (<= 1 IN REAL-SPACE).
-     *
-     * @param array             the array to be normalized
-     * @param takeLog10OfOutput if true, the output will be transformed back into log10 units
-     * @return a newly allocated array corresponding the normalized values in array, maybe log10 transformed
-     */
-    public static double[] normalizeFromLog10(final double[] array, final boolean takeLog10OfOutput) {
-        return normalizeFromLog10(array, takeLog10OfOutput, false);
-    }
-
-    /**
-     * See #normalizeFromLog10 but with the additional option to use an approximation that keeps the calculation always in log-space
-     *
-     * @param array
-     * @param takeLog10OfOutput
-     * @param keepInLogSpace
-     *
-     * @return
-     */
-    public static double[] normalizeFromLog10(final double[] array, final boolean takeLog10OfOutput, final boolean keepInLogSpace) {
-        // for precision purposes, we need to add (or really subtract, since they're
-        // all negative) the largest value; also, we need to convert to normal-space.
-        double maxValue = arrayMax(array);
-
-        // we may decide to just normalize in log space without converting to linear space
-        if (keepInLogSpace) {
-            for (int i = 0; i < array.length; i++) {
-                array[i] -= maxValue;
-            }
-            return array;
-        }
-
-        // default case: go to linear space
-        double[] normalized = new double[array.length];
-
-        for (int i = 0; i < array.length; i++)
-            normalized[i] = Math.pow(10, array[i] - maxValue);
-
-        // normalize
-        double sum = 0.0;
-        for (int i = 0; i < array.length; i++)
-            sum += normalized[i];
-        for (int i = 0; i < array.length; i++) {
-            double x = normalized[i] / sum;
-            if (takeLog10OfOutput) {
-                x = Math.log10(x);
-                if ( x < LOG10_P_OF_ZERO || Double.isInfinite(x) )
-                    x = array[i] - maxValue;
-            }
-
-            normalized[i] = x;
-        }
-
-        return normalized;
-    }
-
-    /**
-     * normalizes the log10-based array.  ASSUMES THAT ALL ARRAY ENTRIES ARE <= 0 (<= 1 IN REAL-SPACE).
-     *
-     * @param array the array to be normalized
-     * @return a newly allocated array corresponding the normalized values in array
-     */
-    public static double[] normalizeFromLog10(final double[] array) {
-        return normalizeFromLog10(array, false);
-    }
-
-    /**
-     * normalizes the real-space probability array.
-     *
-     * Does not assume anything about the values in the array, beyond that no elements are below 0.  It's ok
-     * to have values in the array of > 1, or have the sum go above 0.
-     *
-     * @param array the array to be normalized
-     * @return a newly allocated array corresponding the normalized values in array
-     */
-    @Requires("array != null")
-    @Ensures({"result != null"})
-    public static double[] normalizeFromRealSpace(final double[] array) {
-        if ( array.length == 0 )
-            return array;
-
-        final double sum = sum(array);
-        final double[] normalized = new double[array.length];
-        if ( sum < 0.0 ) throw new IllegalArgumentException("Values in probability array sum to a negative number " + sum);
-        for ( int i = 0; i < array.length; i++ ) {
-            normalized[i] = array[i] / sum;
-        }
-        return normalized;
-    }
-
-    public static int maxElementIndex(final double[] array) {
-        return maxElementIndex(array, array.length);
-    }
-
-    public static int maxElementIndex(final double[] array, final int start, final int endIndex) {
-        if (array == null || array.length == 0)
-            throw new IllegalArgumentException("Array cannot be null!");
-
-        if (start > endIndex) {
-           throw new IllegalArgumentException("Start cannot be after end.");
-        }
-
-        int maxI = start;
-        for (int i = (start+1); i < endIndex; i++) {
-            if (array[i] > array[maxI])
-                maxI = i;
-        }
-        return maxI;
-    }
-
-    public static int maxElementIndex(final double[] array, final int endIndex) {
-        return maxElementIndex(array, 0, endIndex);
-    }
-
-    public static int maxElementIndex(final int[] array) {
-        return maxElementIndex(array, array.length);
-    }
-
-    public static int maxElementIndex(final byte[] array) {
-        return maxElementIndex(array, array.length);
-    }
-
-    public static int maxElementIndex(final int[] array, final int endIndex) {
-        if (array == null || array.length == 0)
-            throw new IllegalArgumentException("Array cannot be null!");
-
-        int maxI = 0;
-        for (int i = 1; i < endIndex; i++) {
-            if (array[i] > array[maxI])
-                maxI = i;
-        }
-        return maxI;
-    }
-
-    public static int maxElementIndex(final byte[] array, final int endIndex) {
-        if (array == null || array.length == 0)
-            throw new IllegalArgumentException("Array cannot be null!");
-
-        int maxI = 0;
-        for (int i = 1; i < endIndex; i++) {
-            if (array[i] > array[maxI])
-                maxI = i;
-        }
-
-        return maxI;
-    }
-
-    public static int arrayMax(final int[] array) {
-        return array[maxElementIndex(array)];
-    }
-
-
-    public static double arrayMax(final double[] array) {
-        return array[maxElementIndex(array)];
-    }
-
-    public static double arrayMax(final double[] array, final int endIndex) {
-        return array[maxElementIndex(array, endIndex)];
-    }
-
-    public static double arrayMin(final double[] array) {
-        return array[minElementIndex(array)];
-    }
-
-    public static int arrayMin(final int[] array) {
-        return array[minElementIndex(array)];
-    }
-
-    public static byte arrayMin(final byte[] array) {
-        return array[minElementIndex(array)];
-    }
-
-    /**
-     * Compute the min element of a List<Integer>
-     * @param array a non-empty list of integer
-     * @return the min
-     */
-    public static int arrayMin(final List<Integer> array) {
-        if ( array == null || array.isEmpty() ) throw new IllegalArgumentException("Array must be non-null and non-empty");
-        int min = array.get(0);
-        for ( final int i : array )
-            if ( i < min ) min = i;
-        return min;
-    }
-
-    /**
-     * Compute the median element of the list of integers
-     * @param array a list of integers
-     * @return the median element
-     */
-    public static <T extends Comparable<? super T>> T median(final List<T> array) {
-         /* TODO -- from Valentin
-        the current implementation is not the usual median when the input is of even length. More concretely it returns the ith element of the list where i = floor(input.size() / 2).
-
-        But actually that is not the "usual" definition of a median, as it is supposed to return the average of the two middle values when the sample length is an even number (i.e. median(1,2,3,4,5,6) == 3.5). [Sources: R and wikipedia]
-
-        My suggestion for a solution is then:
-
-        unify median and medianDoubles to public static <T extends Number> T median(Collection<T>)
-        check on null elements and throw an exception if there are any or perhaps return a null; documented in the javadoc.
-        relocate, rename and refactor MathUtils.median(X) to Utils.ithElement(X,X.size()/2)
-        In addition, the current median implementation sorts the whole input list witch is O(n log n). However find out the ith element (thus calculate the median) can be done in O(n)
-        */
-        if ( array == null ) throw new IllegalArgumentException("Array must be non-null");
-        final int size = array.size();
-        if ( size == 0 ) throw new IllegalArgumentException("Array cannot have size 0");
-        else if ( size == 1 ) return array.get(0);
-        else {
-            final ArrayList<T> sorted = new ArrayList<>(array);
-            Collections.sort(sorted);
-            return sorted.get(size / 2);
-        }
-    }
-
-    public static int minElementIndex(final double[] array) {
-        if (array == null || array.length == 0)
-            throw new IllegalArgumentException("Array cannot be null!");
-
-        int minI = 0;
-        for (int i = 1; i < array.length; i++) {
-            if (array[i] < array[minI])
-                minI = i;
-        }
-
-        return minI;
-    }
-
-    public static int minElementIndex(final byte[] array) {
-        if (array == null || array.length == 0)
-            throw new IllegalArgumentException("Array cannot be null!");
-
-        int minI = 0;
-        for (int i = 1; i < array.length; i++) {
-            if (array[i] < array[minI])
-                minI = i;
-        }
-
-        return minI;
-    }
-
-    public static int minElementIndex(final int[] array) {
-        if (array == null || array.length == 0)
-            throw new IllegalArgumentException("Array cannot be null!");
-
-        int minI = 0;
-        for (int i = 1; i < array.length; i++) {
-            if (array[i] < array[minI])
-                minI = i;
-        }
-
-        return minI;
-    }
-
-    public static int arrayMaxInt(final List<Integer> array) {
-        if (array == null)
-            throw new IllegalArgumentException("Array cannot be null!");
-        if (array.size() == 0)
-            throw new IllegalArgumentException("Array size cannot be 0!");
-
-        int m = array.get(0);
-        for (int e : array)
-            m = Math.max(m, e);
-        return m;
-    }
-
-    public static int sum(final List<Integer> list ) {
-        int sum = 0;
-        for ( Integer i : list ) {
-          sum += i;
-        }
-        return sum;
-    }
-
-    public static double average(final List<Long> vals, final int maxI) {
-        long sum = 0L;
-
-        int i = 0;
-        for (long x : vals) {
-            if (i > maxI)
-                break;
-            sum += x;
-            i++;
-        }
-
-        return (1.0 * sum) / i;
-    }
-
-    public static double average(final List<Long> vals) {
-        return average(vals, vals.size());
-    }
-
-    public static int countOccurrences(final char c, final String s) {
-        int count = 0;
-        for (int i = 0; i < s.length(); i++) {
-            count += s.charAt(i) == c ? 1 : 0;
-        }
-        return count;
-    }
-
-    public static <T> int countOccurrences(T x, List<T> l) {
-        int count = 0;
-        for (T y : l) {
-            if (x.equals(y))
-                count++;
-        }
-
-        return count;
-    }
-
-    public static int countOccurrences(byte element, byte[] array) {
-        int count = 0;
-        for (byte y : array) {
-            if (element == y)
-                count++;
-        }
-
-        return count;
-    }
-
-    public static int countOccurrences(final boolean element, final boolean[] array) {
-        int count = 0;
-        for (final boolean b : array) {
-            if (element == b)
-                count++;
-        }
-
-        return count;
-    }
-
-
-    /**
-     * Returns n random indices drawn with replacement from the range 0..(k-1)
-     *
-     * @param n the total number of indices sampled from
-     * @param k the number of random indices to draw (with replacement)
-     * @return a list of k random indices ranging from 0 to (n-1) with possible duplicates
-     */
-    static public ArrayList<Integer> sampleIndicesWithReplacement(final int n, final int k) {
-
-        ArrayList<Integer> chosen_balls = new ArrayList<Integer>(k);
-        for (int i = 0; i < k; i++) {
-            //Integer chosen_ball = balls[rand.nextInt(k)];
-            chosen_balls.add(GenomeAnalysisEngine.getRandomGenerator().nextInt(n));
-            //balls.remove(chosen_ball);
-        }
-
-        return chosen_balls;
-    }
-
-    /**
-     * Returns n random indices drawn without replacement from the range 0..(k-1)
-     *
-     * @param n the total number of indices sampled from
-     * @param k the number of random indices to draw (without replacement)
-     * @return a list of k random indices ranging from 0 to (n-1) without duplicates
-     */
-    static public ArrayList<Integer> sampleIndicesWithoutReplacement(final int n, final int k) {
-        ArrayList<Integer> chosen_balls = new ArrayList<Integer>(k);
-
-        for (int i = 0; i < n; i++) {
-            chosen_balls.add(i);
-        }
-
-        Collections.shuffle(chosen_balls, GenomeAnalysisEngine.getRandomGenerator());
-
-        //return (ArrayList<Integer>) chosen_balls.subList(0, k);
-        return new ArrayList<Integer>(chosen_balls.subList(0, k));
-    }
-
-    /**
-     * Given a list of indices into a list, return those elements of the list with the possibility of drawing list elements multiple times
-     *
-     * @param indices the list of indices for elements to extract
-     * @param list    the list from which the elements should be extracted
-     * @param <T>     the template type of the ArrayList
-     * @return a new ArrayList consisting of the elements at the specified indices
-     */
-    static public <T> ArrayList<T> sliceListByIndices(final List<Integer> indices, final List<T> list) {
-        ArrayList<T> subset = new ArrayList<T>();
-
-        for (int i : indices) {
-            subset.add(list.get(i));
-        }
-
-        return subset;
-    }
-
-    /**
-     * Given two log-probability vectors, compute log of vector product of them:
-     * in Matlab notation, return log10(10.*x'*10.^y)
-     * @param x vector 1
-     * @param y vector 2
-     * @return a double representing log (dotProd(10.^x,10.^y)
-     */
-    public static double logDotProduct(final double [] x, final double[] y) {
-        if (x.length != y.length)
-            throw new ReviewedGATKException("BUG: Vectors of different lengths");
-
-        double tmpVec[] = new double[x.length];
-
-        for (int k=0; k < tmpVec.length; k++ ) {
-            tmpVec[k] = x[k]+y[k];
-        }
-
-        return log10sumLog10(tmpVec);
-
-
-
-    }
-
-    /**
-     * Check that the log10 prob vector vector is well formed
-     *
-     * @param vector
-     * @param expectedSize
-     * @param shouldSumToOne
-     *
-     * @return true if vector is well-formed, false otherwise
-     */
-    public static boolean goodLog10ProbVector(final double[] vector, final int expectedSize, final boolean shouldSumToOne) {
-        if ( vector.length != expectedSize ) return false;
-
-        for ( final double pr : vector ) {
-            if ( ! goodLog10Probability(pr) )
-                return false;
-        }
-
-        if ( shouldSumToOne && compareDoubles(sumLog10(vector), 1.0, 1e-4) != 0 )
-            return false;
-
-        return true; // everything is good
-    }
-
-    /**
-     * Checks that the result is a well-formed log10 probability
-     *
-     * @param result a supposedly well-formed log10 probability value.  By default allows
-     *               -Infinity values, as log10(0.0) == -Infinity.
-     * @return true if result is really well formed
-     */
-    public static boolean goodLog10Probability(final double result) {
-        return goodLog10Probability(result, true);
-    }
-
-    /**
-     * Checks that the result is a well-formed log10 probability
-     *
-     * @param result a supposedly well-formed log10 probability value
-     * @param allowNegativeInfinity should we consider a -Infinity value ok?
-     * @return true if result is really well formed
-     */
-    public static boolean goodLog10Probability(final double result, final boolean allowNegativeInfinity) {
-        return result <= 0.0 && result != Double.POSITIVE_INFINITY && (allowNegativeInfinity || result != Double.NEGATIVE_INFINITY) && ! Double.isNaN(result);
-    }
-
-    /**
-     * Checks that the result is a well-formed probability
-     *
-     * @param result a supposedly well-formed probability value
-     * @return true if result is really well formed
-     */
-    public static boolean goodProbability(final double result) {
-        return result >= 0.0 && result <= 1.0 && ! Double.isInfinite(result) && ! Double.isNaN(result);
-    }
-
-    /**
-     * A utility class that computes on the fly average and standard deviation for a stream of numbers.
-     * The number of observations does not have to be known in advance, and can be also very big (so that
-     * it could overflow any naive summation-based scheme or cause loss of precision).
-     * Instead, adding a new number <code>observed</code>
-     * to a sample with <code>add(observed)</code> immediately updates the instance of this object so that
-     * it contains correct mean and standard deviation for all the numbers seen so far. Source: Knuth, vol.2
-     * (see also e.g. http://www.johndcook.com/standard_deviation.html for online reference).
-     */
-    public static class RunningAverage {
-        private double mean = 0.0;
-        private double s = 0.0;
-        private long obs_count = 0;
-
-        public void add(double obs) {
-            obs_count++;
-            double oldMean = mean;
-            mean += (obs - mean) / obs_count; // update mean
-            s += (obs - oldMean) * (obs - mean);
-        }
-
-        public void addAll(Collection<Number> col) {
-            for (Number o : col) {
-                add(o.doubleValue());
-            }
-        }
-
-        public double mean() {
-            return mean;
-        }
-
-        public double stddev() {
-            return Math.sqrt(s / (obs_count - 1));
-        }
-
-        public double var() {
-            return s / (obs_count - 1);
-        }
-
-        public long observationCount() {
-            return obs_count;
-        }
-
-        public RunningAverage clone() {
-            RunningAverage ra = new RunningAverage();
-            ra.mean = this.mean;
-            ra.s = this.s;
-            ra.obs_count = this.obs_count;
-            return ra;
-        }
-
-        public void merge(RunningAverage other) {
-            if (this.obs_count > 0 || other.obs_count > 0) { // if we have any observations at all
-                this.mean = (this.mean * this.obs_count + other.mean * other.obs_count) / (this.obs_count + other.obs_count);
-                this.s += other.s;
-            }
-            this.obs_count += other.obs_count;
-        }
-    }
-
-    //
-    // useful common utility routines
-    //
-
-    static public double max(double x0, double x1, double x2) {
-        double a = Math.max(x0, x1);
-        return Math.max(a, x2);
-    }
-
-    /**
-     * Converts LN to LOG10
-     *
-     * @param ln log(x)
-     * @return log10(x)
-     */
-    public static double lnToLog10(final double ln) {
-        return ln * Math.log10(Math.E);
-    }
-
-    /**
-     * Constants to simplify the log gamma function calculation.
-     */
-    private static final double zero = 0.0, one = 1.0, half = .5, a0 = 7.72156649015328655494e-02, a1 = 3.22467033424113591611e-01, a2 = 6.73523010531292681824e-02, a3 = 2.05808084325167332806e-02, a4 = 7.38555086081402883957e-03, a5 = 2.89051383673415629091e-03, a6 = 1.19270763183362067845e-03, a7 = 5.10069792153511336608e-04, a8 = 2.20862790713908385557e-04, a9 = 1.08011567247583939954e-04, a10 = 2.52144565451257326939e-05, a11 = 4.48640949618915160150e-05, tc = 1.46163214496836224576e [...]
-
-    /**
-     * Efficient rounding functions to simplify the log gamma function calculation
-     * double to long with 32 bit shift
-     */
-    private static final int HI(final double x) {
-        return (int) (Double.doubleToLongBits(x) >> 32);
-    }
-
-    /**
-     * Efficient rounding functions to simplify the log gamma function calculation
-     * double to long without shift
-     */
-    private static final int LO(final double x) {
-        return (int) Double.doubleToLongBits(x);
-    }
-
-    /**
-     * Most efficent implementation of the lnGamma (FDLIBM)
-     * Use via the log10Gamma wrapper method.
-     */
-    private static double lnGamma(final double x) {
-        double t, y, z, p, p1, p2, p3, q, r, w;
-        int i;
-
-        int hx = HI(x);
-        int lx = LO(x);
-
-        /* purge off +-inf, NaN, +-0, and negative arguments */
-        int ix = hx & 0x7fffffff;
-        if (ix >= 0x7ff00000)
-            return Double.POSITIVE_INFINITY;
-        if ((ix | lx) == 0 || hx < 0)
-            return Double.NaN;
-        if (ix < 0x3b900000) {    /* |x|<2**-70, return -log(|x|) */
-            return -Math.log(x);
-        }
-
-        /* purge off 1 and 2 */
-        if ((((ix - 0x3ff00000) | lx) == 0) || (((ix - 0x40000000) | lx) == 0))
-            r = 0;
-            /* for x < 2.0 */
-        else if (ix < 0x40000000) {
-            if (ix <= 0x3feccccc) {     /* lgamma(x) = lgamma(x+1)-log(x) */
-                r = -Math.log(x);
-                if (ix >= 0x3FE76944) {
-                    y = one - x;
-                    i = 0;
-                }
-                else if (ix >= 0x3FCDA661) {
-                    y = x - (tc - one);
-                    i = 1;
-                }
-                else {
-                    y = x;
-                    i = 2;
-                }
-            }
-            else {
-                r = zero;
-                if (ix >= 0x3FFBB4C3) {
-                    y = 2.0 - x;
-                    i = 0;
-                } /* [1.7316,2] */
-                else if (ix >= 0x3FF3B4C4) {
-                    y = x - tc;
-                    i = 1;
-                } /* [1.23,1.73] */
-                else {
-                    y = x - one;
-                    i = 2;
-                }
-            }
-
-            switch (i) {
-                case 0:
-                    z = y * y;
-                    p1 = a0 + z * (a2 + z * (a4 + z * (a6 + z * (a8 + z * a10))));
-                    p2 = z * (a1 + z * (a3 + z * (a5 + z * (a7 + z * (a9 + z * a11)))));
-                    p = y * p1 + p2;
-                    r += (p - 0.5 * y);
-                    break;
-                case 1:
-                    z = y * y;
-                    w = z * y;
-                    p1 = t0 + w * (t3 + w * (t6 + w * (t9 + w * t12)));    /* parallel comp */
-                    p2 = t1 + w * (t4 + w * (t7 + w * (t10 + w * t13)));
-                    p3 = t2 + w * (t5 + w * (t8 + w * (t11 + w * t14)));
-                    p = z * p1 - (tt - w * (p2 + y * p3));
-                    r += (tf + p);
-                    break;
-                case 2:
-                    p1 = y * (u0 + y * (u1 + y * (u2 + y * (u3 + y * (u4 + y * u5)))));
-                    p2 = one + y * (v1 + y * (v2 + y * (v3 + y * (v4 + y * v5))));
-                    r += (-0.5 * y + p1 / p2);
-            }
-        }
-        else if (ix < 0x40200000) {             /* x < 8.0 */
-            i = (int) x;
-            t = zero;
-            y = x - (double) i;
-            p = y * (s0 + y * (s1 + y * (s2 + y * (s3 + y * (s4 + y * (s5 + y * s6))))));
-            q = one + y * (r1 + y * (r2 + y * (r3 + y * (r4 + y * (r5 + y * r6)))));
-            r = half * y + p / q;
-            z = one;    /* lgamma(1+s) = log(s) + lgamma(s) */
-            switch (i) {
-                case 7:
-                    z *= (y + 6.0);    /* FALLTHRU */
-                case 6:
-                    z *= (y + 5.0);    /* FALLTHRU */
-                case 5:
-                    z *= (y + 4.0);    /* FALLTHRU */
-                case 4:
-                    z *= (y + 3.0);    /* FALLTHRU */
-                case 3:
-                    z *= (y + 2.0);    /* FALLTHRU */
-                    r += Math.log(z);
-                    break;
-            }
-            /* 8.0 <= x < 2**58 */
-        }
-        else if (ix < 0x43900000) {
-            t = Math.log(x);
-            z = one / x;
-            y = z * z;
-            w = w0 + z * (w1 + y * (w2 + y * (w3 + y * (w4 + y * (w5 + y * w6)))));
-            r = (x - half) * (t - one) + w;
-        }
-        else
-            /* 2**58 <= x <= inf */
-            r = x * (Math.log(x) - one);
-        return r;
-    }
-
-    /**
-     * Calculates the log10 of the gamma function for x using the efficient FDLIBM
-     * implementation to avoid overflows and guarantees high accuracy even for large
-     * numbers.
-     *
-     * @param x the x parameter
-     * @return the log10 of the gamma function at x.
-     */
-    public static double log10Gamma(final double x) {
-        return lnToLog10(lnGamma(x));
-    }
-
-    public static double factorial(final int x) {
-        // avoid rounding errors caused by fact that 10^log(x) might be slightly lower than x and flooring may produce 1 less than real value
-        return (double)Math.round(Math.pow(10, log10Factorial(x)));
-    }
-
-    public static double log10Factorial(final int x) {
-        if (x >= Log10FactorialCache.size() || x < 0)
-            return log10Gamma(x + 1);
-        else
-            return Log10FactorialCache.get(x);
-    }
-
-    /**
-     * Wrapper class so that the log10Factorial array is only calculated if it's used
-     */
-    private static class Log10FactorialCache {
-
-        /**
-         * The size of the precomputed cache.  Must be a positive number!
-         */
-        private static final int CACHE_SIZE = 10_000;
-
-        public static int size() { return CACHE_SIZE; }
-
-        public static double get(final int n) {
-            if (cache == null)
-                initialize();
-            return cache[n];
-        }
-
-        private static synchronized void initialize() {
-            if (cache == null) {
-                Log10Cache.ensureCacheContains(CACHE_SIZE);
-                cache = new double[CACHE_SIZE];
-                cache[0] = 0.0;
-                for (int k = 1; k < cache.length; k++)
-                    cache[k] = cache[k-1] + Log10Cache.get(k);
-            }
-        }
-
-        private static double[] cache = null;
-    }
-
-    /**
-     * Adds two arrays together and returns a new array with the sum.
-     *
-     * @param a one array
-     * @param b another array
-     * @return a new array with the sum of a and b
-     */
-    @Requires("a.length == b.length")
-    @Ensures("result.length == a.length")
-    public static int[] addArrays(final int[] a, final int[] b) {
-        int[] c = new int[a.length];
-        for (int i = 0; i < a.length; i++)
-            c[i] = a[i] + b[i];
-        return c;
-    }
-
-    /** Same routine, unboxed types for efficiency
-     *
-     * @param x                 First vector
-     * @param y                 Second vector
-     * @return Vector of same length as x and y so that z[k] = x[k]+y[k]
-     */
-    public static double[] vectorSum(final double[]x, final double[] y) {
-        if (x.length != y.length)
-            throw new ReviewedGATKException("BUG: Lengths of x and y must be the same");
-
-        double[] result = new double[x.length];
-        for (int k=0; k <x.length; k++)
-            result[k] = x[k]+y[k];
-
-        return result;
-    }
-
-    /** Compute Z=X-Y for two numeric vectors X and Y
-     *
-     * @param x                 First vector
-     * @param y                 Second vector
-     * @return Vector of same length as x and y so that z[k] = x[k]-y[k]
-     */
-    public static int[] vectorDiff(final int[]x, final int[] y) {
-        if (x.length != y.length)
-            throw new ReviewedGATKException("BUG: Lengths of x and y must be the same");
-
-        int[] result = new int[x.length];
-        for (int k=0; k <x.length; k++)
-            result[k] = x[k]-y[k];
-
-        return result;
-    }
-
-    /**
-     * Returns a series of integer values between start and stop, inclusive,
-     * expontentially distributed between the two.  That is, if there are
-     * ten values between 0-10 there will be 10 between 10-100.
-     *
-     * WARNING -- BADLY TESTED
-     * @param start
-     * @param stop
-     * @param eps
-     * @return
-     */
-    public static List<Integer> log10LinearRange(final int start, final int stop, final double eps) {
-        final LinkedList<Integer> values = new LinkedList<>();
-        final double log10range = Math.log10(stop - start);
-
-        if ( start == 0 )
-            values.add(0);
-
-        double i = 0.0;
-        while ( i <= log10range ) {
-            final int index = (int)Math.round(Math.pow(10, i)) + start;
-            if ( index < stop && (values.peekLast() == null || values.peekLast() != index ) )
-                values.add(index);
-            i += eps;
-        }
-
-        if ( values.peekLast() == null || values.peekLast() != stop )
-            values.add(stop);
-
-        return values;
-    }
-
-    /**
-     * Compute in a numerical correct way the quantity log10(1-x)
-     *
-     * Uses the approximation log10(1-x) = log10(1/x - 1) + log10(x) to avoid very quick underflow
-     * in 1-x when x is very small
-     *
-     * @param x a positive double value between 0.0 and 1.0
-     * @return an estimate of log10(1-x)
-     */
-    @Requires("x >= 0.0 && x <= 1.0")
-    @Ensures("result <= 0.0")
-    public static double log10OneMinusX(final double x) {
-        if ( x == 1.0 )
-            return Double.NEGATIVE_INFINITY;
-        else if ( x == 0.0 )
-            return 0.0;
-        else {
-            final double d = Math.log10(1 / x - 1) + Math.log10(x);
-            return Double.isInfinite(d) || d > 0.0 ? 0.0 : d;
-        }
-    }
-
-    /**
-     * Draw N random elements from list
-     * @param list - the list from which to draw randomly
-     * @param N - the number of elements to draw
-     */
-    public static <T> List<T> randomSubset(final List<T> list, final int N) {
-        if (list.size() <= N) {
-            return list;
-        }
-
-        return sliceListByIndices(sampleIndicesWithoutReplacement(list.size(),N),list);
-    }
-
-    /**
-    * Draw N random elements from list with replacement
-    * @param list - the list from which to draw randomly
-    * @param N - the number of elements to draw
-    */
-    public static <T> List<T> randomSample(final List<T> list, final int N) {
-        if (list.isEmpty() ) {
-            return list;
-        }
-        return sliceListByIndices(sampleIndicesWithReplacement(list.size(),N),list);
-    }
-
-    /**
-     * Return the likelihood of observing the counts of categories having sampled a population
-     * whose categorial frequencies are distributed according to a Dirichlet distribution
-     * @param dirichletParams - params of the prior dirichlet distribution
-     * @param dirichletSum - the sum of those parameters
-     * @param counts - the counts of observation in each category
-     * @param countSum - the sum of counts (number of trials)
-     * @return - associated likelihood
-     */
-    public static double dirichletMultinomial(final double[] dirichletParams, final double dirichletSum,
-                                              final int[] counts, final int countSum) {
-        if ( dirichletParams.length != counts.length ) {
-            throw new IllegalStateException("The number of dirichlet parameters must match the number of categories");
-        }
-        // todo -- lots of lnGammas here. At some point we can safely switch to x * ( ln(x) - 1)
-        double likelihood = log10MultinomialCoefficient(countSum,counts);
-        likelihood += log10Gamma(dirichletSum);
-        likelihood -= log10Gamma(dirichletSum+countSum);
-        for ( int idx = 0; idx < counts.length; idx++ ) {
-            likelihood += log10Gamma(counts[idx] + dirichletParams[idx]);
-            likelihood -= log10Gamma(dirichletParams[idx]);
-        }
-
-        return likelihood;
-    }
-
-    public static double dirichletMultinomial(double[] params, int[] counts) {
-        return dirichletMultinomial(params,sum(params),counts,(int) sum(counts));
-    }
-
-    public static ExponentialDistribution exponentialDistribution( final double mean ) {
-        return new ExponentialDistributionImpl(mean);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/Median.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/Median.java
deleted file mode 100644
index 40e41f2..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/Median.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import java.util.*;
-
-/**
- * Utility class for calculating median from a data set, potentially limiting the size of data to a
- * fixed amount
- *
- * @author Your Name
- * @since Date created
- */
-public class Median<T extends Comparable> {
-    final List<T> values;
-    final int maxValuesToKeep;
-    boolean sorted = false;
-
-    public Median() {
-        this(Integer.MAX_VALUE);
-    }
-
-    public Median(final int maxValuesToKeep) {
-        this.maxValuesToKeep = maxValuesToKeep;
-        this.values = new ArrayList<T>();
-    }
-
-    public boolean isFull() {
-        return values.size() >= maxValuesToKeep;
-    }
-
-    public int size() {
-        return values.size();
-    }
-
-    public boolean isEmpty() {
-        return values.isEmpty();
-    }
-
-    public T getMedian() {
-        if ( isEmpty() )
-            throw new IllegalStateException("Cannot get median value from empty array");
-        return getMedian(null);  // note that value null will never be used
-    }
-
-    /**
-     * Returns the floor(n + 1 / 2) item from the list of values if the list
-     * has values, or defaultValue is the list is empty.
-     */
-    public T getMedian(final T defaultValue) {
-        if ( isEmpty() )
-            return defaultValue;
-
-        if ( ! sorted ) {
-            sorted = true;
-            Collections.sort(values);
-        }
-
-        final int offset = (int)Math.floor((values.size() + 1) * 0.5) - 1;
-        return values.get(offset);
-    }
-
-    public boolean add(final T value) {
-        if ( ! isFull() ) {
-            sorted = false;
-            return values.add(value);
-        }
-        else
-            return false;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/MendelianViolation.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/MendelianViolation.java
deleted file mode 100644
index 75666a7..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/MendelianViolation.java
+++ /dev/null
@@ -1,460 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import org.broadinstitute.gatk.engine.samples.Sample;
-import htsjdk.variant.variantcontext.Genotype;
-import htsjdk.variant.variantcontext.GenotypeType;
-import htsjdk.variant.variantcontext.VariantContext;
-
-import java.util.*;
-
-/**
- * User: carneiro / lfran
- * Date: 3/9/11
- * Time: 12:38 PM
- *
- * Class for the identification and tracking of mendelian violation. It can be used in 2 distinct ways:
- * - Either using an instance of the MendelianViolation class to track mendelian violations for each of the families while
- * walking over the variants
- * - Or using the static methods to directly get information about mendelian violation in a family at a given locus
- *
- */
-public class MendelianViolation {
-    //List of families with violations
-    private List<String> violationFamilies;
-
-    //Call information
-    private int nocall = 0;
-    private int familyCalled = 0;
-    private int varFamilyCalled = 0;
-    private int lowQual = 0;
-
-    private boolean allCalledOnly = true;
-
-    //Stores occurrences of inheritance
-    private EnumMap<GenotypeType, EnumMap<GenotypeType,EnumMap<GenotypeType,Integer>>> inheritance;
-
-    private int violations_total=0;
-
-    private double minGenotypeQuality;
-
-    private boolean abortOnSampleNotFound;
-
-    //Number of families with genotype information for all members
-    public int getFamilyCalledCount(){
-        return familyCalled;
-    }
-
-    //Number of families with genotype information for all members
-    public int getVarFamilyCalledCount(){
-        return varFamilyCalled;
-    }
-
-    //Number of families missing genotypes for one or more of their members
-    public int getFamilyNoCallCount(){
-        return nocall;
-    }
-
-    //Number of families with genotypes below the set quality threshold
-    public int getFamilyLowQualsCount(){
-        return lowQual;
-    }
-
-    public int getViolationsCount(){
-        return violations_total;
-    }
-
-    //Count of alt alleles inherited from het parents (no violation)
-    public int getParentHetInheritedVar(){
-        return getParentsHetHetInheritedVar() + getParentsRefHetInheritedVar() + getParentsVarHetInheritedVar();
-    }
-
-    //Count of ref alleles inherited from het parents (no violation)
-    public int getParentHetInheritedRef(){
-        return getParentsHetHetInheritedRef() + getParentsRefHetInheritedRef() + getParentsVarHetInheritedRef();
-    }
-
-    //Count of HomRef/HomRef/HomRef trios
-    public int getRefRefRef(){
-        return inheritance.get(GenotypeType.HOM_REF).get(GenotypeType.HOM_REF).get(GenotypeType.HOM_REF);
-    }
-
-    //Count of HomVar/HomVar/HomVar trios
-    public int getVarVarVar(){
-        return inheritance.get(GenotypeType.HOM_VAR).get(GenotypeType.HOM_VAR).get(GenotypeType.HOM_VAR);
-    }
-
-    //Count of HomRef/HomVar/Het trios
-    public int getRefVarHet(){
-        return inheritance.get(GenotypeType.HOM_REF).get(GenotypeType.HOM_VAR).get(GenotypeType.HET) +
-                inheritance.get(GenotypeType.HOM_VAR).get(GenotypeType.HOM_REF).get(GenotypeType.HET);
-    }
-
-    //Count of Het/Het/Het trios
-    public int getHetHetHet(){
-        return inheritance.get(GenotypeType.HET).get(GenotypeType.HET).get(GenotypeType.HET);
-    }
-
-    //Count of Het/Het/HomRef trios
-    public int getHetHetHomRef(){
-        return inheritance.get(GenotypeType.HET).get(GenotypeType.HET).get(GenotypeType.HOM_REF);
-    }
-
-    //Count of Het/Het/HomVar trios
-    public int getHetHetHomVar(){
-        return inheritance.get(GenotypeType.HET).get(GenotypeType.HET).get(GenotypeType.HOM_VAR);
-    }
-
-    //Count of ref alleles inherited from Het/Het parents (no violation)
-    public int getParentsHetHetInheritedRef(){
-        return inheritance.get(GenotypeType.HET).get(GenotypeType.HET).get(GenotypeType.HET)
-               + 2*inheritance.get(GenotypeType.HET).get(GenotypeType.HET).get(GenotypeType.HOM_REF);
-        //return parentsHetHet_childRef;
-    }
-
-    //Count of var alleles inherited from Het/Het parents (no violation)
-    public int getParentsHetHetInheritedVar(){
-        return inheritance.get(GenotypeType.HET).get(GenotypeType.HET).get(GenotypeType.HET)
-               + 2*inheritance.get(GenotypeType.HET).get(GenotypeType.HET).get(GenotypeType.HOM_VAR);
-        //return parentsHetHet_childVar;
-    }
-
-    //Count of ref alleles inherited from HomRef/Het parents (no violation)
-    public int getParentsRefHetInheritedRef(){
-        return inheritance.get(GenotypeType.HOM_REF).get(GenotypeType.HET).get(GenotypeType.HOM_REF)
-               + inheritance.get(GenotypeType.HET).get(GenotypeType.HOM_REF).get(GenotypeType.HOM_REF);
-        //return parentsHomRefHet_childRef;
-    }
-
-    //Count of var alleles inherited from HomRef/Het parents (no violation)
-    public int getParentsRefHetInheritedVar(){
-        return inheritance.get(GenotypeType.HOM_REF).get(GenotypeType.HET).get(GenotypeType.HET)
-               + inheritance.get(GenotypeType.HET).get(GenotypeType.HOM_REF).get(GenotypeType.HET);
-        //return parentsHomRefHet_childVar;
-    }
-
-    //Count of ref alleles inherited from HomVar/Het parents (no violation)
-    public int getParentsVarHetInheritedRef(){
-        return inheritance.get(GenotypeType.HOM_VAR).get(GenotypeType.HET).get(GenotypeType.HET)
-               + inheritance.get(GenotypeType.HET).get(GenotypeType.HOM_VAR).get(GenotypeType.HET);
-        //return parentsHomVarHet_childRef;
-    }
-
-    //Count of var alleles inherited from HomVar/Het parents (no violation)
-    public int getParentsVarHetInheritedVar(){
-        return inheritance.get(GenotypeType.HOM_VAR).get(GenotypeType.HET).get(GenotypeType.HOM_VAR)
-               + inheritance.get(GenotypeType.HET).get(GenotypeType.HOM_VAR).get(GenotypeType.HOM_VAR);
-        //return parentsHomVarHet_childVar;
-    }
-
-    //Count of violations of the type HOM_REF/HOM_REF -> HOM_VAR
-    public int getParentsRefRefChildVar(){
-        return inheritance.get(GenotypeType.HOM_REF).get(GenotypeType.HOM_REF).get(GenotypeType.HOM_VAR);
-    }
-
-    //Count of violations of the type HOM_REF/HOM_REF -> HET
-    public int getParentsRefRefChildHet(){
-        return inheritance.get(GenotypeType.HOM_REF).get(GenotypeType.HOM_REF).get(GenotypeType.HET);
-    }
-
-    //Count of violations of the type HOM_REF/HET -> HOM_VAR
-    public int getParentsRefHetChildVar(){
-        return inheritance.get(GenotypeType.HOM_REF).get(GenotypeType.HET).get(GenotypeType.HOM_VAR)
-                + inheritance.get(GenotypeType.HET).get(GenotypeType.HOM_REF).get(GenotypeType.HOM_VAR);
-    }
-
-    //Count of violations of the type HOM_REF/HOM_VAR -> HOM_VAR
-    public int getParentsRefVarChildVar(){
-        return inheritance.get(GenotypeType.HOM_REF).get(GenotypeType.HOM_VAR).get(GenotypeType.HOM_VAR)
-                + inheritance.get(GenotypeType.HOM_VAR).get(GenotypeType.HOM_REF).get(GenotypeType.HOM_VAR);
-    }
-
-    //Count of violations of the type HOM_REF/HOM_VAR -> HOM_REF
-    public int getParentsRefVarChildRef(){
-        return inheritance.get(GenotypeType.HOM_REF).get(GenotypeType.HOM_VAR).get(GenotypeType.HOM_REF)
-                + inheritance.get(GenotypeType.HOM_VAR).get(GenotypeType.HOM_REF).get(GenotypeType.HOM_REF);
-    }
-
-    //Count of violations of the type HOM_VAR/HET -> HOM_REF
-    public int getParentsVarHetChildRef(){
-        return inheritance.get(GenotypeType.HET).get(GenotypeType.HOM_VAR).get(GenotypeType.HOM_REF)
-                + inheritance.get(GenotypeType.HOM_VAR).get(GenotypeType.HET).get(GenotypeType.HOM_REF);
-    }
-
-    //Count of violations of the type HOM_VAR/HOM_VAR -> HOM_REF
-    public int getParentsVarVarChildRef(){
-        return inheritance.get(GenotypeType.HOM_VAR).get(GenotypeType.HOM_VAR).get(GenotypeType.HOM_REF);
-    }
-
-    //Count of violations of the type HOM_VAR/HOM_VAR -> HET
-    public int getParentsVarVarChildHet(){
-        return inheritance.get(GenotypeType.HOM_VAR).get(GenotypeType.HOM_VAR).get(GenotypeType.HET);
-    }
-
-
-    //Count of violations of the type HOM_VAR/? -> HOM_REF
-    public int getParentVarChildRef(){
-        return getParentsRefVarChildRef() + getParentsVarHetChildRef() +getParentsVarVarChildRef();
-    }
-
-    //Count of violations of the type HOM_REF/? -> HOM_VAR
-    public int getParentRefChildVar(){
-        return getParentsRefVarChildVar() + getParentsRefHetChildVar() +getParentsRefRefChildVar();
-    }
-
-    //Returns a String containing all trios where a Mendelian violation was observed.
-    //The String is formatted "mom1+dad1=child1,mom2+dad2=child2,..."
-    public String getViolationFamiliesString(){
-        if(violationFamilies.isEmpty())
-            return "";
-
-        Iterator<String> it = violationFamilies.iterator();
-        String violationFams = it.next();
-        while(it.hasNext()){
-            violationFams += ","+it.next();
-        }
-        return violationFams;
-    }
-
-    public List<String> getViolationFamilies(){
-        return violationFamilies;
-    }
-
-    static final int[] mvOffsets = new int[] { 1,2,5,6,8,11,15,18,20,21,24,25 };
-    static final int[] nonMVOffsets = new int[]{ 0,3,4,7,9,10,12,13,14,16,17,19,22,23,26 };
-
-    public double getMinGenotypeQuality() {
-        return minGenotypeQuality;
-    }
-
-   /**
-     * Constructor
-     * @param minGenotypeQualityP - the minimum phred scaled genotype quality score necessary to asses mendelian violation
-     *
-     */
-    public MendelianViolation(double minGenotypeQualityP) {
-        this(minGenotypeQualityP,true);
-    }
-
-    /**
-     * Constructor
-     * @param minGenotypeQualityP - the minimum phred scaled genotype quality score necessary to asses mendelian violation
-     * @param abortOnSampleNotFound - Whether to stop execution if a family is passed but no relevant genotypes are found. If false, then the family is ignored.
-     */
-    public MendelianViolation(double minGenotypeQualityP, boolean abortOnSampleNotFound) {
-        minGenotypeQuality = minGenotypeQualityP;
-        this.abortOnSampleNotFound = abortOnSampleNotFound;
-        violationFamilies = new ArrayList<String>();
-        createInheritanceMap();
-    }
-
-    /**
-     * Constructor
-     * @param minGenotypeQualityP - the minimum phred scaled genotype quality score necessary to asses mendelian violation
-     * @param abortOnSampleNotFound - Whether to stop execution if a family is passed but no relevant genotypes are found. If false, then the family is ignored.
-     * @param completeTriosOnly - whether only complete trios are considered or parent/child pairs are too.
-     */
-    public MendelianViolation(double minGenotypeQualityP, boolean abortOnSampleNotFound, boolean completeTriosOnly) {
-        minGenotypeQuality = minGenotypeQualityP;
-        this.abortOnSampleNotFound = abortOnSampleNotFound;
-        violationFamilies = new ArrayList<String>();
-        createInheritanceMap();
-        allCalledOnly = completeTriosOnly;
-    }
-
-    /**
-     * @param families the families to be checked for Mendelian violations
-     * @param vc the variant context to extract the genotypes and alleles for mom, dad and child.
-     * @return whether or not there is a mendelian violation at the site.
-     */
-    public int countViolations(Map<String, Set<Sample>> families, VariantContext vc){
-
-        //Reset counts
-        nocall = 0;
-        lowQual = 0;
-        familyCalled = 0;
-        varFamilyCalled = 0;
-        violations_total=0;
-        violationFamilies.clear();
-        clearInheritanceMap();
-
-        for(Set<Sample> family : families.values()){
-            Iterator<Sample> sampleIterator = family.iterator();
-            Sample sample;
-            while(sampleIterator.hasNext()){
-                sample = sampleIterator.next();
-                if(sample.getParents().size() > 0)
-                    updateViolations(sample.getFamilyID(),sample.getMaternalID(), sample.getPaternalID(), sample.getID() ,vc);
-            }
-        }
-        return violations_total;
-    }
-
-    public boolean isViolation(Sample mother, Sample father, Sample child, VariantContext vc){
-
-        //Reset counts
-        nocall = 0;
-        lowQual = 0;
-        familyCalled = 0;
-        varFamilyCalled = 0;
-        violations_total=0;
-        violationFamilies.clear();
-        clearInheritanceMap();
-        updateViolations(mother.getFamilyID(),mother.getID(),father.getID(),child.getID(),vc);
-        return violations_total>0;
-    }
-
-
-    private void updateViolations(String familyId, String motherId, String fatherId, String childId, VariantContext vc){
-
-            int count;
-            Genotype gMom = vc.getGenotype(motherId);
-            Genotype gDad = vc.getGenotype(fatherId);
-            Genotype gChild = vc.getGenotype(childId);
-
-            if (gMom == null || gDad == null || gChild == null){
-                if(abortOnSampleNotFound)
-                    throw new IllegalArgumentException(String.format("Variant %s:%d: Missing genotypes for family %s: mom=%s dad=%s family=%s", vc.getChr(), vc.getStart(), familyId, motherId, fatherId, childId));
-                else
-                    return;
-            }
-            //Count No calls
-            if(allCalledOnly && (!gMom.isCalled() || !gDad.isCalled() || !gChild.isCalled())){
-                nocall++;
-            }
-            else if (!gMom.isCalled() && !gDad.isCalled() || !gChild.isCalled()){
-                nocall++;
-            }
-            //Count lowQual. Note that if min quality is set to 0, even values with no quality associated are returned
-            else if (minGenotypeQuality>0 && (gMom.getPhredScaledQual()   < minGenotypeQuality ||
-                gDad.getPhredScaledQual()   < minGenotypeQuality ||
-                gChild.getPhredScaledQual() < minGenotypeQuality )) {
-                lowQual++;
-            }
-            else{
-                //Count all families per loci called
-                familyCalled++;
-                //If the family is all homref, not too interesting
-                if(!(gMom.isHomRef() && gDad.isHomRef() && gChild.isHomRef()))
-                {
-                    varFamilyCalled++;
-                    if(isViolation(gMom, gDad, gChild)){
-                        violationFamilies.add(familyId);
-                        violations_total++;
-                    }
-                }
-                count = inheritance.get(gMom.getType()).get(gDad.getType()).get(gChild.getType());
-                inheritance.get(gMom.getType()).get(gDad.getType()).put(gChild.getType(),count+1);
-
-            }
-    }
-
-    /**
-     * Evaluate the genotypes of mom, dad, and child to detect Mendelian violations
-     *
-     * @param gMom
-     * @param gDad
-     * @param gChild
-     * @return true if the three genotypes represent a Mendelian violation; false otherwise
-     */
-    public static boolean isViolation(final Genotype gMom, final Genotype gDad, final Genotype gChild) {
-        //1 parent is no "call
-        if(!gMom.isCalled()){
-            return (gDad.isHomRef() && gChild.isHomVar()) || (gDad.isHomVar() && gChild.isHomRef());
-        }
-        else if(!gDad.isCalled()){
-            return (gMom.isHomRef() && gChild.isHomVar()) || (gMom.isHomVar() && gChild.isHomRef());
-        }
-        //Both parents have genotype information
-        return !(gMom.getAlleles().contains(gChild.getAlleles().get(0)) && gDad.getAlleles().contains(gChild.getAlleles().get(1)) ||
-            gMom.getAlleles().contains(gChild.getAlleles().get(1)) && gDad.getAlleles().contains(gChild.getAlleles().get(0)));
-    }
-
-    private void createInheritanceMap(){
-
-        inheritance = new EnumMap<GenotypeType,EnumMap<GenotypeType,EnumMap<GenotypeType,Integer>>>(GenotypeType.class);
-        for(GenotypeType mType : GenotypeType.values()){
-            inheritance.put(mType, new EnumMap<GenotypeType,EnumMap<GenotypeType,Integer>>(GenotypeType.class));
-            for(GenotypeType dType : GenotypeType.values()){
-                inheritance.get(mType).put(dType, new EnumMap<GenotypeType,Integer>(GenotypeType.class));
-                for(GenotypeType cType : GenotypeType.values()){
-                    inheritance.get(mType).get(dType).put(cType, 0);
-                }
-            }
-        }
-
-    }
-
-    private void clearInheritanceMap(){
-        for(GenotypeType mType : GenotypeType.values()){
-            for(GenotypeType dType : GenotypeType.values()){
-                for(GenotypeType cType : GenotypeType.values()){
-                    inheritance.get(mType).get(dType).put(cType, 0);
-                }
-            }
-        }
-    }
-
-    /**
-     * @return the likelihood ratio for a mendelian violation
-     */
-    public double violationLikelihoodRatio(VariantContext vc, String motherId, String fatherId, String childId) {
-        double[] logLikAssignments = new double[27];
-        // the matrix to set up is
-        // MOM   DAD    CHILD
-        //                    |-  AA
-        //   AA     AA    |    AB
-        //                    |-   BB
-        //                    |- AA
-        //  AA     AB     |   AB
-        //                    |- BB
-        // etc. The leaves are counted as 0-11 for MVs and 0-14 for non-MVs
-        double[] momGL = vc.getGenotype(motherId).getLikelihoods().getAsVector();
-        double[] dadGL = vc.getGenotype(fatherId).getLikelihoods().getAsVector();
-        double[] childGL = vc.getGenotype(childId).getLikelihoods().getAsVector();
-        int offset = 0;
-        for ( int oMom = 0; oMom < 3; oMom++ ) {
-            for ( int oDad = 0; oDad < 3; oDad++ ) {
-                for ( int oChild = 0; oChild < 3; oChild ++ ) {
-                    logLikAssignments[offset++] = momGL[oMom] + dadGL[oDad] + childGL[oChild];
-                }
-            }
-        }
-        double[] mvLiks = new double[12];
-        double[] nonMVLiks = new double[15];
-        for ( int i = 0; i < 12; i ++ ) {
-            mvLiks[i] = logLikAssignments[mvOffsets[i]];
-        }
-
-        for ( int i = 0; i < 15; i++) {
-            nonMVLiks[i] = logLikAssignments[nonMVOffsets[i]];
-        }
-
-        return MathUtils.log10sumLog10(mvLiks) - MathUtils.log10sumLog10(nonMVLiks);
-    }
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/MultiThreadedErrorTracker.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/MultiThreadedErrorTracker.java
deleted file mode 100644
index edbf25d..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/MultiThreadedErrorTracker.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-/**
- * A utility to track exceptions that occur across threads.
- *
- * Uses a notify mechanism so that multiple threads can tell the tracker that an
- * error has occurred, and a master thread can monitor this object for an error
- * occurring and take appropriate action.  Only maintains the first
- * error to reach the tracker.
- *
- * Refactored from HierarchicalMicroScheduler
- *
- * User: depristo
- * Date: 9/19/12
- * Time: 11:20 AM
- */
-public class MultiThreadedErrorTracker {
-    /**
-     * An exception that's occurred.  If null, no exception has occurred.
-     */
-    private RuntimeException error = null;
-
-    /**
-     * Convenience function to check, and throw, an error is one is pending
-     */
-    public synchronized void throwErrorIfPending() {
-        if (hasAnErrorOccurred())
-            throw getError();
-    }
-
-    /**
-     * Detects whether an execution error has occurred.
-     * @return True if an error has occurred.  False otherwise.
-     */
-    public synchronized boolean hasAnErrorOccurred() {
-        return error != null;
-    }
-
-    /**
-     * Retrieve the error that has occurred.
-     *
-     * @throws ReviewedGATKException if no error has occurred.
-     * @return
-     */
-    public synchronized RuntimeException getError() {
-        if(!hasAnErrorOccurred())
-            throw new ReviewedGATKException("User has attempted to retrieve a traversal error when none exists");
-        return error;
-    }
-
-    /**
-     * Notify this error tracker that an error has occurs.  Only updates the tracked
-     * error if it is currently null (i.e., no error has been already reported).  So
-     * calling this successively with multiple errors only keeps the first, which is the
-     * right thing to do as the initial failure is usually the meaningful one, but
-     * generates a cascade of failures as other subsystems fail.
-     */
-    public synchronized RuntimeException notifyOfError(Throwable error) {
-        if ( this.error == null )
-            this.error = toRuntimeException(error);
-
-        return this.error;
-    }
-
-    /**
-     * Convert error to a Runtime exception, or keep as is if it already is one
-     *
-     * @param error the error that has occurred
-     * @return the potentially converted error
-     */
-    private RuntimeException toRuntimeException(final Throwable error) {
-        // If the error is already a Runtime, pass it along as is.  Otherwise, wrap it.
-        if (error instanceof RuntimeException)
-            return (RuntimeException)error;
-        else
-            return new ReviewedGATKException("An error occurred during the traversal.  Message=" + error.getMessage(), error);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/NGSPlatform.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/NGSPlatform.java
deleted file mode 100644
index f0c40a0..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/NGSPlatform.java
+++ /dev/null
@@ -1,136 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import org.broadinstitute.gatk.utils.sam.GATKSAMReadGroupRecord;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-
-import java.util.LinkedList;
-import java.util.List;
-
-/**
- * A canonical, master list of the standard NGS platforms.  These values
- * can be obtained (efficiently) from a GATKSAMRecord object with the
- * getNGSPlatform method.
- *
- * @author Mark DePristo
- * @since 2011
- */
-public enum NGSPlatform {
-    // note the order of elements here determines the order of matching operations, and therefore the
-    // efficiency of getting a NGSPlatform from a string.
-    ILLUMINA("ILLUMINA", "SLX", "SOLEXA"),
-    SOLID("SOLID"),
-    LS454("454"),
-    COMPLETE_GENOMICS("COMPLETE"),
-    PACBIO("PACBIO"),
-    ION_TORRENT("IONTORRENT"),
-    CAPILLARY("CAPILLARY"),
-    HELICOS("HELICOS"),
-    UNKNOWN("UNKNOWN");
-
-    /**
-     * Array of the prefix names in a BAM file for each of the platforms.
-     */
-    protected final String[] BAM_PL_NAMES;
-
-    NGSPlatform(final String... BAM_PL_NAMES) {
-        if ( BAM_PL_NAMES.length == 0 ) throw new IllegalStateException("Platforms must have at least one name");
-
-        for ( int i = 0; i < BAM_PL_NAMES.length; i++ )
-            BAM_PL_NAMES[i] = BAM_PL_NAMES[i].toUpperCase();
-
-        this.BAM_PL_NAMES = BAM_PL_NAMES;
-    }
-
-    /**
-     * Returns a representative PL string for this platform
-     * @return
-     */
-    public final String getDefaultPlatform() {
-        return BAM_PL_NAMES[0];
-    }
-
-    /**
-     * Convenience get -- get the NGSPlatform from a GATKSAMRecord.
-     *
-     * Just gets the platform from the GATKReadGroupRecord associated with this read.
-     *
-     * @param read a non-null GATKSAMRecord
-     * @return an NGSPlatform object matching the PL field of the header, of UNKNOWN if there was no match,
-     *         if there is no read group for read, or there's no PL field for the read group
-     */
-    public static NGSPlatform fromRead(final GATKSAMRecord read) {
-        if ( read == null ) throw new IllegalArgumentException("read cannot be null");
-        final GATKSAMReadGroupRecord rg = read.getReadGroup();
-        return rg == null ? UNKNOWN : rg.getNGSPlatform();
-    }
-
-    /**
-     * Returns the NGSPlatform corresponding to the PL tag in the read group
-     * @param plFromRG -- the PL field (or equivalent) in a ReadGroup object.  Can be null => UNKNOWN
-     * @return an NGSPlatform object matching the PL field of the header, or UNKNOWN if there was no match or plFromRG is null
-     */
-    public static NGSPlatform fromReadGroupPL(final String plFromRG) {
-        if ( plFromRG == null ) return UNKNOWN;
-
-        // todo -- algorithm could be implemented more efficiently, as the list of all
-        // todo -- names is known upfront, so a decision tree could be used to identify
-        // todo -- a prefix common to PL
-        final String pl = plFromRG.toUpperCase();
-        for ( final NGSPlatform ngsPlatform : NGSPlatform.values() ) {
-            for ( final String bamPLName : ngsPlatform.BAM_PL_NAMES ) {
-                if ( pl.contains(bamPLName) )
-                    return ngsPlatform;
-            }
-        }
-
-        return UNKNOWN;
-    }
-
-    /**
-     * checks whether or not the requested platform is listed in the set (and is not unknown)
-     *
-     * @param platform the read group string that describes the platform used.  can be null
-     * @return true if the platform is known (i.e. it's in the list and is not UNKNOWN)
-     */
-    public static boolean isKnown(final String platform) {
-        return fromReadGroupPL(platform) != UNKNOWN;
-    }
-
-    /**
-     * Get a human-readable list of platform names
-     * @return the list of platform names
-     */
-    public static String knownPlatformsString() {
-        final List<String> names = new LinkedList<String>();
-        for ( final NGSPlatform pl : values() ) {
-            for ( final String name : pl.BAM_PL_NAMES )
-                names.add(name);
-        }
-        return Utils.join(",", names);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/PathUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/PathUtils.java
deleted file mode 100644
index ef6d5a0..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/PathUtils.java
+++ /dev/null
@@ -1,195 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import org.apache.commons.io.comparator.LastModifiedFileComparator;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.io.File;
-import java.io.FilenameFilter;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.List;
-
-/**
- * Created by IntelliJ IDEA.
- * User: hanna
- * Date: Mar 30, 2009
- * Time: 5:43:39 PM
- * To change this template use File | Settings | File Templates.
- *
- * A set of static utility methods for common operations on paths.
- */
-public class PathUtils {
-    private static Logger logger = Logger.getLogger(PathUtils.class);
-
-    /**
-     * Constructor access disallowed...static utility methods only!
-     */
-    private PathUtils() { }
-
-    /**
-     * Find the files in the given directory matching the given extension.
-     *
-     * @param basePath       Path to search.
-     * @param relativePrefix What directory should the given files be presented relative to?
-     * @param extension      Extension for which to search.
-     * @param recursive      Search recursively.  Beware of symlinks!
-     * @return A list of files matching the specified criteria.
-     *         TODO: Test recursive traversal in the presence of a symlink.
-     */
-    public static List<String> findFilesInPath(final File basePath, final String relativePrefix, final String extension, boolean recursive) {
-        List<String> filesInPath = new ArrayList<String>();
-
-        FilenameFilter filter = new OrFilenameFilter(new DirectoryFilter(),
-                new ExtensionFilter(extension));
-        File[] contents = basePath.listFiles( filter );
-        for (File content : contents) {
-            String relativeFileName = relativePrefix.trim().length() != 0 ?
-                    relativePrefix + File.separator + content.getName() :
-                    content.getName();
-            if (relativeFileName.endsWith(extension))
-                filesInPath.add(relativeFileName);
-            else if (content.isDirectory() && recursive)
-                filesInPath.addAll(findFilesInPath(content, relativeFileName, extension, recursive));
-        }
-
-        return filesInPath;
-    }
-
-    /**
-     * Filter files by extension.
-     */
-    public static class ExtensionFilter implements FilenameFilter {
-        private String extensionName = null;
-
-        public ExtensionFilter(String extensionName) {
-            this.extensionName = extensionName;
-        }
-
-        public boolean accept(File f, String s) {
-            return s.endsWith("." + extensionName);
-        }
-    }
-
-    /**
-     * Filter directories from list of files.
-     */
-    public static class DirectoryFilter implements FilenameFilter {
-        public boolean accept(File f, String s) {
-            return new File(f, s).isDirectory();
-        }
-    }
-
-    /**
-     * Join two FilenameFilters together in a logical 'or' operation.
-     */
-    public static class OrFilenameFilter implements FilenameFilter {
-        private FilenameFilter lhs = null, rhs = null;
-
-        public OrFilenameFilter(FilenameFilter lhs, FilenameFilter rhs) {
-            this.lhs = lhs;
-            this.rhs = rhs;
-        }
-
-        public boolean accept(File f, String s) {
-            return lhs.accept(f, s) || rhs.accept(f, s);
-        }
-    }
-
-    /**
-     * Refreshes the volume associated with a given file or directory by attempting to access it
-     * a few times before giving up.  The file need not exist, though the parent directory must.
-     * This method is particularly useful when your job has been dispatched to LSF and you need to
-     * ensure an NSF-mounted volume is actually accessible (many times it isn't for a few seconds,
-     * just enough to cause your program to come crashing down).
-     *
-     * @param file  the file or directory that resides in the volume to be refreshed.
-     */
-    public static void refreshVolume(File file) {
-        File dir = file.isDirectory() ? file : file.getParentFile();
-
-        int sleepCount = 0;
-        while (sleepCount < 3 && dir.listFiles() == null) {
-            try {
-                Thread.sleep((sleepCount + 1)*3000);
-            } catch (InterruptedException e) {
-            }
-
-            sleepCount++;
-        }
-
-        if (dir.listFiles() == null) {
-            throw new ReviewedGATKException("The volume '" + dir.getAbsolutePath() + "' could not be accessed.");
-        }
-    }
-
-
-    /**
-     * Walk over the GATK released directories to find the most recent JAR files corresponding
-     * to the version prefix.  For example, providing input "1.2" will
-     * return the full path to the most recent GenomeAnalysisTK.jar in the GATK_RELEASE_DIR
-     * in directories that match gatkReleaseDir/GenomeAnalysisTK-1.2*
-     *
-     * @param gatkReleaseDir Path to directory containing GATK release binaries (e.g., /humgen/gsa-hpprojects/GATK/bin/)
-     * @param releaseVersionNumber Desired GATK version number (e.g., 1.2)
-     * @return A file pointing to the most recent GATK file in the release directory with GATK release number
-     */
-    public static File findMostRecentGATKVersion(final File gatkReleaseDir, final String releaseVersionNumber) {
-        final String versionString = "GenomeAnalysisTK-" + releaseVersionNumber;
-
-        final List<File> gatkJars = new ArrayList<File>();
-        for ( final String path : gatkReleaseDir.list(new isGATKVersion(versionString)) ) {
-            gatkJars.add(new File(gatkReleaseDir.getAbsolutePath() + "/" + path + "/GenomeAnalysisTK.jar"));
-        }
-
-        if ( gatkJars.isEmpty() )
-            return null;
-        else {
-            Collections.sort(gatkJars, LastModifiedFileComparator.LASTMODIFIED_REVERSE);
-            //for ( File jar : gatkJars ) logger.info(String.format("%s => %d", jar, jar.lastModified()));
-            final File last = gatkJars.get(0);
-            logger.debug(String.format("findMostRecentGATKVersion: Found %d jars for %s, keeping last one %s",
-                    gatkJars.size(), releaseVersionNumber, last));
-            return last;
-        }
-    }
-
-    private final static class isGATKVersion implements FilenameFilter {
-        private final String versionString;
-
-        private isGATKVersion(final String versionString) {
-            this.versionString = versionString;
-        }
-
-        @Override
-        public boolean accept(final File file, final String s) {
-            return s.contains(versionString);
-        }
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/QualityUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/QualityUtils.java
deleted file mode 100644
index cd6cfc6..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/QualityUtils.java
+++ /dev/null
@@ -1,397 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import com.google.java.contract.Ensures;
-import htsjdk.samtools.SAMUtils;
-
-/**
- * QualityUtils is a static class (no instantiation allowed!) with some utility methods for manipulating
- * quality scores.
- *
- * @author Kiran Garimella, Mark DePristo
- * @since Way back
- */
-public class QualityUtils {
-    /**
-     * Maximum quality score that can be encoded in a SAM/BAM file
-     */
-    public final static byte MAX_SAM_QUAL_SCORE = SAMUtils.MAX_PHRED_SCORE;
-
-
-    private final static double RAW_MIN_PHRED_SCALED_QUAL = Math.log10(Double.MIN_VALUE);
-    protected final static double MIN_PHRED_SCALED_QUAL = -10.0 * RAW_MIN_PHRED_SCALED_QUAL;
-
-    /**
-     * bams containing quals above this value are extremely suspicious and we should warn the user
-     */
-    public final static byte MAX_REASONABLE_Q_SCORE = 60;
-
-    /**
-     * The lowest quality score for a base that is considered reasonable for statistical analysis.  This is
-     * because Q 6 => you stand a 25% of being right, which means all bases are equally likely
-     */
-    public final static byte MIN_USABLE_Q_SCORE = 6;
-    public final static int MAPPING_QUALITY_UNAVAILABLE = 255;
-
-    /**
-     * Maximum sense quality value.
-     */
-    public static final int MAX_QUAL = 254;
-
-    /**
-     * Cached values for qual as byte calculations so they are very fast
-     */
-    private static double qualToErrorProbCache[] = new double[MAX_QUAL + 1];
-    private static double qualToProbLog10Cache[] = new double[MAX_QUAL + 1];
-
-
-    static {
-        for (int i = 0; i <= MAX_QUAL; i++) {
-            qualToErrorProbCache[i] = qualToErrorProb((double) i);
-            qualToProbLog10Cache[i] = Math.log10(1.0 - qualToErrorProbCache[i]);
-        }
-    }
-
-    /**
-     * Private constructor.  No instantiating this class!
-     */
-    private QualityUtils() {}
-
-    // ----------------------------------------------------------------------
-    //
-    // These are all functions to convert a phred-scaled quality score to a probability
-    //
-    // ----------------------------------------------------------------------
-
-    /**
-     * Convert a phred-scaled quality score to its probability of being true (Q30 => 0.999)
-     *
-     * This is the Phred-style conversion, *not* the Illumina-style conversion.
-     *
-     * Because the input is a discretized byte value, this function uses a cache so is very efficient
-     *
-     * WARNING -- because this function takes a byte for maxQual, you must be careful in converting
-     * integers to byte.  The appropriate way to do this is ((byte)(myInt & 0xFF))
-     *
-     * @param qual a quality score (0-255)
-     * @return a probability (0.0-1.0)
-     */
-    @Ensures("result >= 0.0 && result <= 1.0")
-    public static double qualToProb(final byte qual) {
-        return 1.0 - qualToErrorProb(qual);
-    }
-
-    /**
-     * Convert a phred-scaled quality score to its probability of being true (Q30 => 0.999)
-     *
-     * This is the Phred-style conversion, *not* the Illumina-style conversion.
-     *
-     * Because the input is a double value, this function must call Math.pow so can be quite expensive
-     *
-     * @param qual a phred-scaled quality score encoded as a double.  Can be non-integer values (30.5)
-     * @return a probability (0.0-1.0)
-     */
-    @Ensures("result >= 0.0 && result <= 1.0")
-    public static double qualToProb(final double qual) {
-        if ( qual < 0.0 ) throw new IllegalArgumentException("qual must be >= 0.0 but got " + qual);
-        return 1.0 - qualToErrorProb(qual);
-    }
-
-    /**
-     * Convert a phred-scaled quality score to its log10 probability of being true (Q30 => log10(0.999))
-     *
-     * This is the Phred-style conversion, *not* the Illumina-style conversion.
-     *
-     * Because the input is a double value, this function must call Math.pow so can be quite expensive
-     *
-     * WARNING -- because this function takes a byte for maxQual, you must be careful in converting
-     * integers to byte.  The appropriate way to do this is ((byte)(myInt & 0xFF))
-     *
-     * @param qual a phred-scaled quality score encoded as a double.  Can be non-integer values (30.5)
-     * @return a probability (0.0-1.0)
-     */
-    @Ensures("result <= 0.0")
-    public static double qualToProbLog10(final byte qual) {
-        return qualToProbLog10Cache[(int)qual & 0xff]; // Map: 127 -> 127; -128 -> 128; -1 -> 255; etc.
-    }
-
-    /**
-     * Convert a phred-scaled quality score to its probability of being wrong (Q30 => 0.001)
-     *
-     * This is the Phred-style conversion, *not* the Illumina-style conversion.
-     *
-     * Because the input is a double value, this function must call Math.pow so can be quite expensive
-     *
-     * @param qual a phred-scaled quality score encoded as a double.  Can be non-integer values (30.5)
-     * @return a probability (0.0-1.0)
-     */
-    @Ensures("result >= 0.0 && result <= 1.0")
-    public static double qualToErrorProb(final double qual) {
-        if ( qual < 0.0 ) throw new IllegalArgumentException("qual must be >= 0.0 but got " + qual);
-        return Math.pow(10.0, qual / -10.0);
-    }
-
-    /**
-     * Convert a phred-scaled quality score to its probability of being wrong (Q30 => 0.001)
-     *
-     * This is the Phred-style conversion, *not* the Illumina-style conversion.
-     *
-     * Because the input is a byte value, this function uses a cache so is very efficient
-     *
-     * WARNING -- because this function takes a byte for maxQual, you must be careful in converting
-     * integers to byte.  The appropriate way to do this is ((byte)(myInt & 0xFF))
-     *
-     * @param qual a phred-scaled quality score encoded as a byte
-     * @return a probability (0.0-1.0)
-     */
-    @Ensures("result >= 0.0 && result <= 1.0")
-    public static double qualToErrorProb(final byte qual) {
-        return qualToErrorProbCache[(int)qual & 0xff]; // Map: 127 -> 127; -128 -> 128; -1 -> 255; etc.
-    }
-
-
-    /**
-     * Convert a phred-scaled quality score to its log10 probability of being wrong (Q30 => log10(0.001))
-     *
-     * This is the Phred-style conversion, *not* the Illumina-style conversion.
-     *
-     * The calculation is extremely efficient
-     *
-     * WARNING -- because this function takes a byte for maxQual, you must be careful in converting
-     * integers to byte.  The appropriate way to do this is ((byte)(myInt & 0xFF))
-     *
-     * @param qual a phred-scaled quality score encoded as a byte
-     * @return a probability (0.0-1.0)
-     */
-    @Ensures("result <= 0.0")
-    public static double qualToErrorProbLog10(final byte qual) {
-        return qualToErrorProbLog10((double)(qual & 0xFF));
-    }
-
-    /**
-     * Convert a phred-scaled quality score to its log10 probability of being wrong (Q30 => log10(0.001))
-     *
-     * This is the Phred-style conversion, *not* the Illumina-style conversion.
-     *
-     * The calculation is extremely efficient
-     *
-     * @param qual a phred-scaled quality score encoded as a double
-     * @return a probability (0.0-1.0)
-     */
-    @Ensures("result <= 0.0")
-    public static double qualToErrorProbLog10(final double qual) {
-        if ( qual < 0.0 ) throw new IllegalArgumentException("qual must be >= 0.0 but got " + qual);
-        return qual / -10.0;
-    }
-
-    // ----------------------------------------------------------------------
-    //
-    // Functions to convert a probability to a phred-scaled quality score
-    //
-    // ----------------------------------------------------------------------
-
-    /**
-     * Convert a probability of being wrong to a phred-scaled quality score (0.01 => 20).
-     *
-     * Note, this function caps the resulting quality score by the public static value MAX_SAM_QUAL_SCORE
-     * and by 1 at the low-end.
-     *
-     * @param errorRate a probability (0.0-1.0) of being wrong (i.e., 0.01 is 1% change of being wrong)
-     * @return a quality score (0-MAX_SAM_QUAL_SCORE)
-     */
-    public static byte errorProbToQual(final double errorRate) {
-        return errorProbToQual(errorRate, MAX_SAM_QUAL_SCORE);
-    }
-
-    /**
-     * Convert a probability of being wrong to a phred-scaled quality score (0.01 => 20).
-     *
-     * Note, this function caps the resulting quality score by the public static value MIN_REASONABLE_ERROR
-     * and by 1 at the low-end.
-     *
-     * WARNING -- because this function takes a byte for maxQual, you must be careful in converting
-     * integers to byte.  The appropriate way to do this is ((byte)(myInt & 0xFF))
-     *
-     * @param errorRate a probability (0.0-1.0) of being wrong (i.e., 0.01 is 1% change of being wrong)
-     * @return a quality score (0-maxQual)
-     */
-    public static byte errorProbToQual(final double errorRate, final byte maxQual) {
-        if ( ! MathUtils.goodProbability(errorRate) ) throw new IllegalArgumentException("errorRate must be good probability but got " + errorRate);
-        final double d = Math.round(-10.0*Math.log10(errorRate));
-        return boundQual((int)d, maxQual);
-    }
-
-    /**
-     * @see #errorProbToQual(double, byte) with proper conversion of maxQual integer to a byte
-     */
-    public static byte errorProbToQual(final double prob, final int maxQual) {
-        if ( maxQual < 0 || maxQual > 255 ) throw new IllegalArgumentException("maxQual must be between 0-255 but got " + maxQual);
-        return errorProbToQual(prob, (byte)(maxQual & 0xFF));
-    }
-
-    /**
-     * Convert a probability of being right to a phred-scaled quality score (0.99 => 20).
-     *
-     * Note, this function caps the resulting quality score by the public static value MAX_SAM_QUAL_SCORE
-     * and by 1 at the low-end.
-     *
-     * @param prob a probability (0.0-1.0) of being right
-     * @return a quality score (0-MAX_SAM_QUAL_SCORE)
-     */
-    public static byte trueProbToQual(final double prob) {
-        return trueProbToQual(prob, MAX_SAM_QUAL_SCORE);
-    }
-
-    /**
-     * Convert a probability of being right to a phred-scaled quality score (0.99 => 20).
-     *
-     * Note, this function caps the resulting quality score by the min probability allowed (EPS).
-     * So for example, if prob is 1e-6, which would imply a Q-score of 60, and EPS is 1e-4,
-     * the result of this function is actually Q40.
-     *
-     * Note that the resulting quality score, regardless of EPS, is capped by MAX_SAM_QUAL_SCORE and
-     * bounded on the low-side by 1.
-     *
-     * WARNING -- because this function takes a byte for maxQual, you must be careful in converting
-     * integers to byte.  The appropriate way to do this is ((byte)(myInt & 0xFF))
-     *
-     * @param trueProb a probability (0.0-1.0) of being right
-     * @param maxQual the maximum quality score we are allowed to emit here, regardless of the error rate
-     * @return a phred-scaled quality score (0-maxQualScore) as a byte
-     */
-    @Ensures("(result & 0xFF) >= 1 && (result & 0xFF) <= (maxQual & 0xFF)")
-    public static byte trueProbToQual(final double trueProb, final byte maxQual) {
-        if ( ! MathUtils.goodProbability(trueProb) ) throw new IllegalArgumentException("trueProb must be good probability but got " + trueProb);
-        final double lp = Math.round(-10.0*MathUtils.log10OneMinusX(trueProb));
-        return boundQual((int)lp, maxQual);
-    }
-
-    /**
-     * @see #trueProbToQual(double, byte) with proper conversion of maxQual to a byte
-     */
-    public static byte trueProbToQual(final double prob, final int maxQual) {
-        if ( maxQual < 0 || maxQual > 255 ) throw new IllegalArgumentException("maxQual must be between 0-255 but got " + maxQual);
-        return trueProbToQual(prob, (byte)(maxQual & 0xFF));
-    }
-
-    /**
-     * Convert a probability of being right to a phred-scaled quality score of being wrong as a double
-     *
-     * This is a very generic method, that simply computes a phred-scaled double quality
-     * score given an error rate.  It has the same precision as a normal double operation
-     *
-     * @param trueRate the probability of being right (0.0-1.0)
-     * @return a phred-scaled version of the error rate implied by trueRate
-     */
-    @Ensures("result >= 0.0")
-    public static double phredScaleCorrectRate(final double trueRate) {
-        return phredScaleLog10ErrorRate(MathUtils.log10OneMinusX(trueRate));
-    }
-
-    /**
-     * Convert a log10 probability of being right to a phred-scaled quality score of being wrong as a double
-     *
-     * This is a very generic method, that simply computes a phred-scaled double quality
-     * score given an error rate.  It has the same precision as a normal double operation
-     *
-     * @param trueRateLog10 the log10 probability of being right (0.0-1.0).  Can be -Infinity to indicate
-     *                      that the result is impossible in which MIN_PHRED_SCALED_QUAL is returned
-     * @return a phred-scaled version of the error rate implied by trueRate
-     */
-    @Ensures("result >= 0.0")
-    public static double phredScaleLog10CorrectRate(final double trueRateLog10) {
-        return phredScaleCorrectRate(Math.pow(10.0, trueRateLog10));
-    }
-
-    /**
-     * Convert a probability of being wrong to a phred-scaled quality score of being wrong as a double
-     *
-     * This is a very generic method, that simply computes a phred-scaled double quality
-     * score given an error rate.  It has the same precision as a normal double operation
-     *
-     * @param errorRate the probability of being wrong (0.0-1.0)
-     * @return a phred-scaled version of the error rate
-     */
-    @Ensures("result >= 0.0")
-    public static double phredScaleErrorRate(final double errorRate) {
-        return phredScaleLog10ErrorRate(Math.log10(errorRate));
-    }
-
-    /**
-     * Convert a log10 probability of being wrong to a phred-scaled quality score of being wrong as a double
-     *
-     * This is a very generic method, that simply computes a phred-scaled double quality
-     * score given an error rate.  It has the same precision as a normal double operation
-     *
-     * @param errorRateLog10 the log10 probability of being wrong (0.0-1.0).  Can be -Infinity, in which case
-     *                       the result is MIN_PHRED_SCALED_QUAL
-     * @return a phred-scaled version of the error rate
-     */
-    @Ensures("result >= 0.0")
-    public static double phredScaleLog10ErrorRate(final double errorRateLog10) {
-        if ( ! MathUtils.goodLog10Probability(errorRateLog10) ) throw new IllegalArgumentException("errorRateLog10 must be good probability but got " + errorRateLog10);
-        // abs is necessary for edge base with errorRateLog10 = 0 producing -0.0 doubles
-        return Math.abs(-10.0 * Math.max(errorRateLog10, RAW_MIN_PHRED_SCALED_QUAL));
-    }
-
-    // ----------------------------------------------------------------------
-    //
-    // Routines to bound a quality score to a reasonable range
-    //
-    // ----------------------------------------------------------------------
-
-    /**
-     * Return a quality score that bounds qual by MAX_SAM_QUAL_SCORE and 1
-     *
-     * @param qual the uncapped quality score as an integer
-     * @return the bounded quality score
-     */
-    @Ensures("(result & 0xFF) >= 1 && (result & 0xFF) <= (MAX_SAM_QUAL_SCORE & 0xFF)")
-    public static byte boundQual(int qual) {
-        return boundQual(qual, MAX_SAM_QUAL_SCORE);
-    }
-
-    /**
-     * Return a quality score that bounds qual by maxQual and 1
-     *
-     * WARNING -- because this function takes a byte for maxQual, you must be careful in converting
-     * integers to byte.  The appropriate way to do this is ((byte)(myInt & 0xFF))
-     *
-     * @param qual the uncapped quality score as an integer.  Can be < 0 (which may indicate an error in the
-     *             client code), which will be brought back to 1, but this isn't an error, as some
-     *             routines may use this functionality (BaseRecalibrator, for example)
-     * @param maxQual the maximum quality score, must be less < 255
-     * @return the bounded quality score
-     */
-    @Ensures("(result & 0xFF) >= 1 && (result & 0xFF) <= (maxQual & 0xFF)")
-    public static byte boundQual(final int qual, final byte maxQual) {
-        return (byte) (Math.max(Math.min(qual, maxQual & 0xFF), 1) & 0xFF);
-    }
-
-    }
-
-
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/R/RScriptExecutor.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/R/RScriptExecutor.java
deleted file mode 100644
index c6c4fba..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/R/RScriptExecutor.java
+++ /dev/null
@@ -1,191 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.R;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.lang.StringUtils;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.exceptions.GATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.io.IOUtils;
-import org.broadinstitute.gatk.utils.io.Resource;
-import org.broadinstitute.gatk.utils.runtime.ProcessController;
-import org.broadinstitute.gatk.utils.runtime.ProcessSettings;
-import org.broadinstitute.gatk.utils.runtime.RuntimeUtils;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * Generic service for executing RScripts
- */
-public class RScriptExecutor {
-    private static final String RSCRIPT_BINARY = "Rscript";
-    private static final File RSCRIPT_PATH = RuntimeUtils.which(RSCRIPT_BINARY);
-    public static final boolean RSCRIPT_EXISTS = (RSCRIPT_PATH != null);
-    private static final String RSCRIPT_MISSING_MESSAGE = "Please add the Rscript directory to your environment ${PATH}";
-
-    /**
-     * our log
-     */
-    private static Logger logger = Logger.getLogger(RScriptExecutor.class);
-
-    private boolean exceptOnError = false;
-    private final List<RScriptLibrary> libraries = new ArrayList<RScriptLibrary>();
-    private final List<Resource> scriptResources = new ArrayList<Resource>();
-    private final List<File> scriptFiles = new ArrayList<File>();
-    private final List<String> args = new ArrayList<String>();
-
-    public void setExceptOnError(boolean exceptOnError) {
-        this.exceptOnError = exceptOnError;
-    }
-
-    public void addLibrary(RScriptLibrary library) {
-        this.libraries.add(library);
-    }
-
-    public void addScript(Resource script) {
-        this.scriptResources.add(script);
-    }
-
-    public void addScript(File script) {
-        this.scriptFiles.add(script);
-    }
-
-    /**
-     * Adds args to the end of the Rscript command line.
-     * @param args the args.
-     * @throws NullPointerException if any of the args are null.
-     */
-    public void addArgs(Object... args) {
-        for (Object arg: args)
-            this.args.add(arg.toString());
-    }
-
-    public String getApproximateCommandLine() {
-        StringBuilder command = new StringBuilder("Rscript");
-        for (Resource script: this.scriptResources)
-            command.append(" (resource)").append(script.getFullPath());
-        for (File script: this.scriptFiles)
-            command.append(" ").append(script.getAbsolutePath());
-        for (String arg: this.args)
-            command.append(" ").append(arg);
-        return command.toString();
-    }
-
-    public boolean exec() {
-        if (!RSCRIPT_EXISTS) {
-            if (exceptOnError) {
-                throw new UserException.CannotExecuteRScript(RSCRIPT_MISSING_MESSAGE);
-            } else {
-                logger.warn("Skipping: " + getApproximateCommandLine());
-                return false;
-            }
-        }
-
-        List<File> tempFiles = new ArrayList<File>();
-        try {
-            File tempLibSourceDir  = IOUtils.tempDir("RlibSources.", "");
-            File tempLibInstallationDir = IOUtils.tempDir("Rlib.", "");
-            tempFiles.add(tempLibSourceDir);
-            tempFiles.add(tempLibInstallationDir);
-
-            StringBuilder expression = new StringBuilder("tempLibDir = '").append(tempLibInstallationDir).append("';");
-
-            if (this.libraries.size() > 0) {
-                List<String> tempLibraryPaths = new ArrayList<String>();
-                for (RScriptLibrary library: this.libraries) {
-                    File tempLibrary = library.writeLibrary(tempLibSourceDir);
-                    tempFiles.add(tempLibrary);
-                    tempLibraryPaths.add(tempLibrary.getAbsolutePath());
-                }
-
-                expression.append("install.packages(");
-                expression.append("pkgs=c('").append(StringUtils.join(tempLibraryPaths, "', '")).append("'), lib=tempLibDir, repos=NULL, type='source', ");
-                // Install faster by eliminating cruft.
-                expression.append("INSTALL_opts=c('--no-libs', '--no-data', '--no-help', '--no-demo', '--no-exec')");
-                expression.append(");");
-
-                for (RScriptLibrary library: this.libraries) {
-                    expression.append("library('").append(library.getLibraryName()).append("', lib.loc=tempLibDir);");
-                }
-            }
-
-            for (Resource script: this.scriptResources) {
-                File tempScript = IOUtils.writeTempResource(script);
-                tempFiles.add(tempScript);
-                expression.append("source('").append(tempScript.getAbsolutePath()).append("');");
-            }
-
-            for (File script: this.scriptFiles) {
-                expression.append("source('").append(script.getAbsolutePath()).append("');");
-            }
-
-            String[] cmd = new String[this.args.size() + 3];
-            int i = 0;
-            cmd[i++] = RSCRIPT_BINARY;
-            cmd[i++] = "-e";
-            cmd[i++] = expression.toString();
-            for (String arg: this.args)
-                cmd[i++] = arg;
-
-            ProcessSettings processSettings = new ProcessSettings(cmd);
-            if (logger.isDebugEnabled()) {
-                processSettings.getStdoutSettings().printStandard(true);
-                processSettings.getStderrSettings().printStandard(true);
-            }
-
-            ProcessController controller = ProcessController.getThreadLocal();
-
-            if (logger.isDebugEnabled()) {
-                logger.debug("Executing:");
-                for (String arg: cmd)
-                    logger.debug("  " + arg);
-            }
-            int exitValue = controller.exec(processSettings).getExitValue();
-            logger.debug("Result: " + exitValue);
-
-            if (exitValue != 0)
-                throw new RScriptExecutorException(
-                        "RScript exited with " + exitValue +
-                                (logger.isDebugEnabled() ? "" : ". Run with -l DEBUG for more info."));
-
-            return true;
-        } catch (GATKException e) {
-            if (exceptOnError) {
-                throw e;
-            } else {
-                logger.warn(e.getMessage());
-                return false;
-            }
-        } finally {
-            for (File temp: tempFiles)
-                FileUtils.deleteQuietly(temp);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/R/RScriptExecutorException.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/R/RScriptExecutorException.java
deleted file mode 100644
index 233ff73..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/R/RScriptExecutorException.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.R;
-
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-public class RScriptExecutorException extends ReviewedGATKException {
-    public RScriptExecutorException(String msg) {
-        super(msg);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/R/RScriptLibrary.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/R/RScriptLibrary.java
deleted file mode 100644
index 390edc7..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/R/RScriptLibrary.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.R;
-
-import org.broadinstitute.gatk.utils.io.IOUtils;
-import org.broadinstitute.gatk.utils.io.Resource;
-
-import java.io.File;
-
-/**
- * Libraries embedded in the StingUtils package.
- */
-public enum RScriptLibrary {
-    GSALIB("gsalib");
-
-    private final String name;
-
-    private RScriptLibrary(String name) {
-        this.name = name;
-    }
-
-    public String getLibraryName() {
-        return this.name;
-    }
-
-    public String getResourcePath() {
-        return name + ".tar.gz";
-    }
-
-    /**
-     * Writes the library source code to a temporary tar.gz file and returns the path.
-     * @return The path to the library source code. The caller must delete the code when done.
-     */
-    public File writeTemp() {
-        return IOUtils.writeTempResource(new Resource(getResourcePath(), RScriptLibrary.class));
-    }
-
-    public File writeLibrary(File tempDir) {
-        File libraryFile = new File(tempDir, getLibraryName());
-        IOUtils.writeResource(new Resource(getResourcePath(), RScriptLibrary.class), libraryFile);
-        return libraryFile;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/R/RUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/R/RUtils.java
deleted file mode 100644
index 80f7313..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/R/RUtils.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.R;
-
-import org.apache.commons.lang.StringUtils;
-
-import java.text.SimpleDateFormat;
-import java.util.Collection;
-import java.util.Date;
-
-public class RUtils {
-    /**
-     * Converts a collection of values to an R compatible list. A null list will return NA,
-     * otherwise the values will be escaped with single quotes and combined with c().
-     * @param list Collection of values
-     * @return The R representation of the list
-     */
-    public static String toStringList(Collection<? extends CharSequence> list) {
-        if (list == null)
-            return "NA";
-        if (list.size() == 0)
-            return "c()";
-        return "c('" + StringUtils.join(list, "','") + "')";
-    }
-
-    /**
-     * Converts a collection of values to an R compatible list. A null list will return NA,
-     * otherwise the values will be combined with c().
-     * @param list Collection of values
-     * @return The R representation of the list
-     */
-    public static String toNumberList(Collection<? extends Number> list) {
-        return list == null ? "NA": "c(" + StringUtils.join(list, ",") + ")";
-    }
-
-    /**
-     * Converts a collection of values to an R compatible list. A null list will return NA,
-     * otherwise the date will be escaped with single quotes and combined with c().
-     * @param list Collection of values
-     * @return The R representation of the list
-     */
-    public static String toDateList(Collection<? extends Date> list) {
-        return toDateList(list, "''yyyy-MM-dd''");
-    }
-
-    /**
-     * Converts a collection of values to an R compatible list formatted by pattern.
-     * @param list Collection of values
-     * @param pattern format pattern string for each date
-     * @return The R representation of the list
-     */
-    public static String toDateList(Collection<? extends Date> list, String pattern) {
-
-        if (list == null)
-            return "NA";
-        SimpleDateFormat format = new SimpleDateFormat(pattern);
-        StringBuilder sb = new StringBuilder();
-        sb.append("c(");
-        boolean first = true;
-        for (Date date : list) {
-            if (!first) sb.append(",");
-            sb.append(format.format(date));
-            first = false;
-        }
-        sb.append(")");
-        return sb.toString();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/SampleUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/SampleUtils.java
deleted file mode 100644
index 77fc170..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/SampleUtils.java
+++ /dev/null
@@ -1,290 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMReadGroupRecord;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.variant.GATKVCFUtils;
-import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
-import org.broadinstitute.gatk.utils.collections.Pair;
-import htsjdk.variant.vcf.VCFHeader;
-import org.broadinstitute.gatk.utils.text.ListFileUtils;
-import org.broadinstitute.gatk.utils.text.XReadLines;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.util.*;
-
-
-/**
- * SampleUtils is a static class (no instantiation allowed!) with some utility methods for getting samples
- * quality scores.
- *
- * @author ebanks
- */
-public class SampleUtils {
-    /**
-     * Private constructor.  No instantiating this class!
-     */
-    private SampleUtils() {}
-
-    /**
-     * Pull out the samples from a SAMFileHeader;
-     * note that we use a TreeSet so that they are sorted
-     *
-     * @param header  the sam file header
-     * @return list of strings representing the sample names
-     */
-    public static Set<String> getSAMFileSamples(final SAMFileHeader header) {
-        // get all of the unique sample names
-        final Set<String> samples = new TreeSet<String>();
-        List<SAMReadGroupRecord> readGroups = header.getReadGroups();
-        for ( SAMReadGroupRecord readGroup : readGroups )
-            samples.add(readGroup.getSample());
-        return samples;
-    }
-
-
-    /**
-     * Same as @link getSAMFileSamples but gets all of the samples
-     * in the SAM files loaded by the engine
-     *
-     * @param engine engine
-     * @return samples
-     */
-    public static Set<String> getSAMFileSamples(GenomeAnalysisEngine engine) {
-        return SampleUtils.getSAMFileSamples(engine.getSAMFileHeader());
-    }
-
-    /**
-     * Gets all of the unique sample names from all VCF rods input by the user
-     *
-     * @param toolkit    GATK engine
-     *
-     * @return the set of unique samples
-     */
-    public static Set<String> getUniqueSamplesFromRods(GenomeAnalysisEngine toolkit) {
-        return getUniqueSamplesFromRods(toolkit, null);
-    }
-
-    /**
-     * Gets all of the unique sample names from the set of provided VCF rod names input by the user
-     *
-     * @param toolkit    GATK engine
-     * @param rodNames   list of rods to use; if null, uses all VCF rods
-     *
-     * @return the set of unique samples
-     */
-    public static Set<String> getUniqueSamplesFromRods(GenomeAnalysisEngine toolkit, Collection<String> rodNames) {
-        Set<String> samples = new LinkedHashSet<>();
-
-        for ( VCFHeader header : GATKVCFUtils.getVCFHeadersFromRods(toolkit, rodNames).values() )
-            samples.addAll(header.getGenotypeSamples());
-
-        return samples;
-    }
-
-    public static Set<String> getRodNamesWithVCFHeader(GenomeAnalysisEngine toolkit, Collection<String> rodNames) {
-        return GATKVCFUtils.getVCFHeadersFromRods(toolkit, rodNames).keySet();
-    }
-
-    public static Set<String> getSampleListWithVCFHeader(GenomeAnalysisEngine toolkit, Collection<String> rodNames) {
-        return getSampleList(GATKVCFUtils.getVCFHeadersFromRods(toolkit, rodNames));
-    }
-
-    public static Set<String> getSampleList(Map<String, VCFHeader> headers) {
-        return getSampleList(headers, GATKVariantContextUtils.GenotypeMergeType.PRIORITIZE);
-    }
-
-    public static Set<String> getSampleList(Map<String, VCFHeader> headers, GATKVariantContextUtils.GenotypeMergeType mergeOption) {
-        Set<String> samples = new TreeSet<String>();
-        for ( Map.Entry<String, VCFHeader> val : headers.entrySet() ) {
-            VCFHeader header = val.getValue();
-            for ( String sample : header.getGenotypeSamples() ) {
-                samples.add(GATKVariantContextUtils.mergedSampleName(val.getKey(), sample, mergeOption == GATKVariantContextUtils.GenotypeMergeType.UNIQUIFY));
-            }
-        }
-
-        return samples;
-    }
-
-
-    /**
-     *
-     * @param VCF_Headers
-     * @return false if there are names duplication between the samples names in the VCF headers
-     */
-    public static boolean verifyUniqueSamplesNames(Map<String, VCFHeader> VCF_Headers) {
-        Set<String> samples = new HashSet<String>();
-        for ( Map.Entry<String, VCFHeader> val : VCF_Headers.entrySet() ) {
-            VCFHeader header = val.getValue();
-            for ( String sample : header.getGenotypeSamples() ) {
-                if (samples.contains(sample)){
-
-                    return false;
-                }
-                samples.add(sample);
-            }
-        }
-
-        return true;
-    }
-
-    /**
-     * Gets the sample names from all VCF rods input by the user and uniquifies them if there is overlap
-     * (e.g. sampleX.1, sampleX.2, ...)
-     * When finished, samples contains the uniquified sample names and rodNamesToSampleNames contains a mapping
-     * from rod/sample pairs to the new uniquified names
-     *
-     * @param toolkit    GATK engine
-     * @param samples    set to store the sample names
-     * @param rodNamesToSampleNames mapping of rod/sample pairs to new uniquified sample names
-     */
-    public static void getUniquifiedSamplesFromRods(GenomeAnalysisEngine toolkit, Set<String> samples, Map<Pair<String, String>, String> rodNamesToSampleNames) {
-
-        // keep a map of sample name to occurrences encountered
-        HashMap<String, Integer> sampleOverlapMap = new HashMap<String, Integer>();
-
-        // iterate to get all of the sample names
-
-        for ( Map.Entry<String, VCFHeader> pair : GATKVCFUtils.getVCFHeadersFromRods(toolkit).entrySet() ) {
-            for ( String sample : pair.getValue().getGenotypeSamples() )
-                addUniqueSample(samples, sampleOverlapMap, rodNamesToSampleNames, sample, pair.getKey());
-        }
-    }
-
-    private static void addUniqueSample(Set<String> samples, Map<String, Integer> sampleOverlapMap, Map<Pair<String, String>, String> rodNamesToSampleNames, String newSample, String rodName) {
-
-        // how many occurrences have we seen so far?
-        Integer occurrences = sampleOverlapMap.get(newSample);
-
-        // if this is the first one, just add it to the list of samples
-        if ( occurrences == null ) {
-            samples.add(newSample);
-            rodNamesToSampleNames.put(new Pair<String, String>(rodName, newSample), newSample);
-            sampleOverlapMap.put(newSample, 1);
-        }
-
-        // if it's already been seen multiple times, give it a unique suffix and increment the value
-        else if ( occurrences >= 2 ) {
-            String uniqueName = newSample + "." + rodName;
-            samples.add(uniqueName);
-            rodNamesToSampleNames.put(new Pair<String, String>(rodName, newSample), uniqueName);
-            sampleOverlapMap.put(newSample, occurrences + 1);
-        }
-
-        // if this is the second occurrence of the sample name, uniquify both of them
-        else { // occurrences == 2
-
-            // remove the 1st occurrence, uniquify it, and add it back
-            samples.remove(newSample);
-            String uniqueName1 = null;
-            for ( Map.Entry<Pair<String, String>, String> entry : rodNamesToSampleNames.entrySet() ) {
-                if ( entry.getValue().equals(newSample) ) {
-                    uniqueName1 = newSample + "." + entry.getKey().first;
-                    entry.setValue(uniqueName1);
-                    break;
-                }
-            }
-            samples.add(uniqueName1);
-
-            // add the second one
-            String uniqueName2 = newSample + "." + rodName;
-            samples.add(uniqueName2);
-            rodNamesToSampleNames.put(new Pair<String, String>(rodName, newSample), uniqueName2);
-
-            sampleOverlapMap.put(newSample, 2);
-        }
-
-    }
-
-    /**
-     * Returns a new set of samples, containing a final list of samples expanded from sampleArgs
-     *
-     * Each element E of sampleArgs can either be a literal sample name or a file.  For each E,
-     * we try to read a file named E from disk, and if possible all lines from that file are expanded
-     * into unique sample names.
-     *
-     * @param sampleArgs args
-     * @return samples
-     */
-    public static Set<String> getSamplesFromCommandLineInput(Collection<String> sampleArgs) {
-        if (sampleArgs != null) {
-            return ListFileUtils.unpackSet(sampleArgs);
-        }
-
-        return new HashSet<String>();
-    }
-
-    public static Set<String> getSamplesFromCommandLineInput(Collection<String> vcfSamples, Collection<String> sampleExpressions) {
-        Set<String> samples = ListFileUtils.unpackSet(vcfSamples);
-        if (sampleExpressions == null) {
-            return samples;
-        } else {
-            return ListFileUtils.includeMatching(samples, sampleExpressions, false);
-        }
-    }
-
-    /**
-     * Given a collection of samples and a collection of regular expressions, generates the set of samples that match each expression
-     * @param originalSamples list of samples to select samples from
-     * @param sampleExpressions list of expressions to use for matching samples
-     * @return the set of samples from originalSamples that satisfy at least one of the expressions in sampleExpressions
-     */
-    public static Collection<String> matchSamplesExpressions (Collection<String> originalSamples, Collection<String> sampleExpressions) {
-        // Now, check the expressions that weren't used in the previous step, and use them as if they're regular expressions
-        Set<String> samples = new HashSet<String>();
-        if (sampleExpressions != null) {
-            samples.addAll(ListFileUtils.includeMatching(originalSamples, sampleExpressions, false));
-        }
-        return samples;
-    }
-
-    /**
-     * Given a list of files with sample names it reads all files and creates a list of unique samples from all these files.
-     * @param files list of files with sample names in
-     * @return a collection of unique samples from all files
-     */
-    public static Collection<String> getSamplesFromFiles (Collection<File> files) {
-        Set<String> samplesFromFiles = new HashSet<String>();
-        if (files != null) {
-            for (File file : files) {
-                try {
-                    XReadLines reader = new XReadLines(file);
-                    List<String> lines = reader.readLines();
-                    for (String line : lines) {
-                        samplesFromFiles.add(line);
-                    }
-                } catch (FileNotFoundException e) {
-                    throw new UserException.CouldNotReadInputFile(file, e);
-                }
-            }
-        }
-        return samplesFromFiles;
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/SequenceDictionaryUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/SequenceDictionaryUtils.java
deleted file mode 100644
index d869037..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/SequenceDictionaryUtils.java
+++ /dev/null
@@ -1,527 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.SAMSequenceRecord;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.engine.arguments.ValidationExclusion;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import java.util.*;
-
-/**
- * Created by IntelliJ IDEA.
- * User: depristo
- * Date: Sep 10, 2010
- * Time: 1:56:24 PM
- *
- * A series of utility functions that enable the GATK to compare two sequence dictionaries -- from the reference,
- * from BAMs, or from RODs -- for consistency.  The system supports two basic modes: get an enum state that
- * describes at a high level the consistency between two dictionaries, or a validateDictionaries that will
- * blow up with a UserException if the dicts are too incompatible.
- *
- * Dictionaries are tested for contig name overlaps, consistency in ordering in these overlap set, and length,
- * if available.  Examines the Engine arguments to decided if the -U option to allow danger seq dict inconsistency
- * is enabled before it blows up.
- */
-public class SequenceDictionaryUtils {
-    //
-    // for detecting lexicographically sorted human references
-    //
-    private static final boolean ENABLE_LEXICOGRAPHIC_REQUIREMENT_FOR_HUMAN = true;
-
-    // hg18
-    protected static final SAMSequenceRecord CHR1_HG18 = new SAMSequenceRecord("chr1", 247249719);
-    protected static final SAMSequenceRecord CHR2_HG18 = new SAMSequenceRecord("chr2", 242951149);
-    protected static final SAMSequenceRecord CHR10_HG18 = new SAMSequenceRecord("chr10", 135374737);
-
-    // hg19
-    protected static final SAMSequenceRecord CHR1_HG19 = new SAMSequenceRecord("chr1", 249250621);
-    protected static final SAMSequenceRecord CHR2_HG19 = new SAMSequenceRecord("chr2", 243199373);
-    protected static final SAMSequenceRecord CHR10_HG19 = new SAMSequenceRecord("chr10", 135534747);
-
-    // b36
-    protected static final SAMSequenceRecord CHR1_B36 = new SAMSequenceRecord("1", 247249719);
-    protected static final SAMSequenceRecord CHR2_B36 = new SAMSequenceRecord("2", 242951149);
-    protected static final SAMSequenceRecord CHR10_B36 = new SAMSequenceRecord("10", 135374737);
-
-    // b37
-    protected static final SAMSequenceRecord CHR1_B37 = new SAMSequenceRecord("1", 249250621);
-    protected static final SAMSequenceRecord CHR2_B37 = new SAMSequenceRecord("2", 243199373);
-    protected static final SAMSequenceRecord CHR10_B37 = new SAMSequenceRecord("10", 135534747);
-
-
-    public enum SequenceDictionaryCompatibility {
-        IDENTICAL,                      // the dictionaries are identical
-        COMMON_SUBSET,                  // there exists a common subset of equivalent contigs
-        NO_COMMON_CONTIGS,              // no overlap between dictionaries
-        UNEQUAL_COMMON_CONTIGS,         // common subset has contigs that have the same name but different lengths
-        NON_CANONICAL_HUMAN_ORDER,      // human reference detected but the order of the contigs is non-standard (lexicographic, for examine)
-        OUT_OF_ORDER,                   // the two dictionaries overlap but the overlapping contigs occur in different
-                                        // orders with respect to each other
-        DIFFERENT_INDICES               // the two dictionaries overlap and the overlapping contigs occur in the same
-                                        // order with respect to each other, but one or more of them have different
-                                        // indices in the two dictionaries. Eg., { chrM, chr1, chr2 } vs. { chr1, chr2 }
-    }
-
-    /**
-     * @param validationExclusion exclusions to validation
-     * @return Returns true if the engine is in tolerant mode and we'll let through dangerous but not fatal dictionary inconsistency
-     */
-    private static boolean allowNonFatalIncompabilities(ValidationExclusion.TYPE validationExclusion) {
-        return ( validationExclusion == ValidationExclusion.TYPE.ALLOW_SEQ_DICT_INCOMPATIBILITY ||
-                        validationExclusion == ValidationExclusion.TYPE.ALL );
-    }
-
-    /**
-     * Tests for compatibility between two sequence dictionaries.  If the dictionaries are incompatible, then
-     * UserExceptions are thrown with detailed error messages.  If the engine is in permissive mode, then
-     * logger warnings are generated instead.
-     *
-     * @param logger for warnings
-     * @param validationExclusion exclusions to validation
-     * @param name1 name associated with dict1
-     * @param dict1 the sequence dictionary dict1
-     * @param name2 name associated with dict2
-     * @param dict2 the sequence dictionary dict2
-     * @param isReadsToReferenceComparison true if one of the dictionaries comes from a reads data source (eg., a BAM),
-     *                                     and the other from a reference data source
-     * @param intervals the user-specified genomic intervals: only required when isReadsToReferenceComparison is true,
-     *                  otherwise can be null
-     */
-    public static void validateDictionaries( final Logger logger,
-                                             final ValidationExclusion.TYPE validationExclusion,
-                                             final String name1,
-                                             final SAMSequenceDictionary dict1,
-                                             final String name2,
-                                             final SAMSequenceDictionary dict2,
-                                             final boolean isReadsToReferenceComparison,
-                                             final GenomeLocSortedSet intervals ) {
-
-        final SequenceDictionaryCompatibility type = compareDictionaries(dict1, dict2);
-
-        switch ( type ) {
-            case IDENTICAL:
-                return;
-            case COMMON_SUBSET:
-                 return;
-            case NO_COMMON_CONTIGS:
-                throw new UserException.IncompatibleSequenceDictionaries("No overlapping contigs found", name1, dict1, name2, dict2);
-
-            case UNEQUAL_COMMON_CONTIGS: {
-                List<SAMSequenceRecord> x = findDisequalCommonContigs(getCommonContigsByName(dict1, dict2), dict1, dict2);
-                SAMSequenceRecord elt1 = x.get(0);
-                SAMSequenceRecord elt2 = x.get(1);
-
-                // todo -- replace with toString when SAMSequenceRecord has a nice toString routine
-                UserException ex = new UserException.IncompatibleSequenceDictionaries(String.format("Found contigs with the same name but different lengths:\n  contig %s = %s / %d\n  contig %s = %s / %d",
-                        name1, elt1.getSequenceName(), elt1.getSequenceLength(),
-                        name2, elt2.getSequenceName(), elt2.getSequenceLength()),
-                        name1, dict1, name2, dict2);
-
-                if ( allowNonFatalIncompabilities(validationExclusion) )
-                    logger.warn(ex.getMessage());
-                else
-                    throw ex;
-                break;
-            }
-
-            case NON_CANONICAL_HUMAN_ORDER: {
-                UserException ex;
-                if ( nonCanonicalHumanContigOrder(dict1) )
-                    ex = new UserException.LexicographicallySortedSequenceDictionary(name1, dict1);
-                else
-                    ex = new UserException.LexicographicallySortedSequenceDictionary(name2, dict2);
-                
-                if ( allowNonFatalIncompabilities(validationExclusion) )
-                    logger.warn(ex.getMessage());
-                else
-                    throw ex;
-                break;
-            }
-
-            case OUT_OF_ORDER: {
-                UserException ex = new UserException.IncompatibleSequenceDictionaries("Relative ordering of overlapping contigs differs, which is unsafe", name1, dict1, name2, dict2);
-                if ( allowNonFatalIncompabilities(validationExclusion) )
-                    logger.warn(ex.getMessage());
-                else
-                    throw ex;
-                break;
-            }
-
-            case DIFFERENT_INDICES: {
-                // This is currently only known to be problematic when the index mismatch is between a bam and the
-                // reference AND when the user's intervals actually include one or more of the contigs that are
-                // indexed differently from the reference. In this case, the engine will fail to correctly serve
-                // up the reads from those contigs, so throw an exception unless unsafe operations are enabled.
-                if ( isReadsToReferenceComparison && intervals != null ) {
-
-                     final Set<String> misindexedContigs = findMisindexedContigsInIntervals(intervals, dict1, dict2);
-
-                     if ( ! misindexedContigs.isEmpty() ) {
-                         final String msg = String.format("The following contigs included in the intervals to process have " +
-                                                          "different indices in the sequence dictionaries for the reads vs. " +
-                                                          "the reference: %s.  As a result, the GATK engine will not correctly " +
-                                                          "process reads from these contigs. You should either fix the sequence " +
-                                                          "dictionaries for your reads so that these contigs have the same indices " +
-                                                          "as in the sequence dictionary for your reference, or exclude these contigs " +
-                                                          "from your intervals. This error can be disabled via -U %s, " +
-                                                          "however this is not recommended as the GATK engine will not behave correctly.",
-                                                          misindexedContigs, ValidationExclusion.TYPE.ALLOW_SEQ_DICT_INCOMPATIBILITY);
-                         final UserException ex = new UserException.IncompatibleSequenceDictionaries(msg, name1, dict1, name2, dict2);
-
-                         if ( allowNonFatalIncompabilities(validationExclusion) )
-                             logger.warn(ex.getMessage());
-                         else
-                             throw ex;
-                     }
-                }
-                break;
-            }
-
-            default:
-                throw new ReviewedGATKException("Unexpected SequenceDictionaryComparison type: " + type);
-        }
-    }
-
-    /**
-     * Workhorse routine that takes two dictionaries and returns their compatibility.
-     *
-     * @param dict1 first sequence dictionary
-     * @param dict2 second sequence dictionary
-     * @return A SequenceDictionaryCompatibility enum value describing the compatibility of the two dictionaries
-     */
-    public static SequenceDictionaryCompatibility compareDictionaries( final SAMSequenceDictionary dict1, final SAMSequenceDictionary dict2) {
-        if ( nonCanonicalHumanContigOrder(dict1) || nonCanonicalHumanContigOrder(dict2) )
-            return SequenceDictionaryCompatibility.NON_CANONICAL_HUMAN_ORDER;
-
-        final Set<String> commonContigs = getCommonContigsByName(dict1, dict2);
-
-        if (commonContigs.size() == 0)
-            return SequenceDictionaryCompatibility.NO_COMMON_CONTIGS;
-        else if ( ! commonContigsHaveSameLengths(commonContigs, dict1, dict2) )
-            return SequenceDictionaryCompatibility.UNEQUAL_COMMON_CONTIGS;
-        else if ( ! commonContigsAreInSameRelativeOrder(commonContigs, dict1, dict2) )
-            return SequenceDictionaryCompatibility.OUT_OF_ORDER;
-        else if ( commonContigs.size() == dict1.size() && commonContigs.size() == dict2.size() )
-            return SequenceDictionaryCompatibility.IDENTICAL;
-        else if ( ! commonContigsAreAtSameIndices(commonContigs, dict1, dict2) )
-            return SequenceDictionaryCompatibility.DIFFERENT_INDICES;
-        else {
-            return SequenceDictionaryCompatibility.COMMON_SUBSET;
-        }
-    }
-
-    /**
-     * Utility function that tests whether the commonContigs in both dicts are equivalent.  Equivalence means
-     * that the seq records have the same length, if both are non-zero.
-     *
-     * @param commonContigs
-     * @param dict1
-     * @param dict2
-     * @return true if all of the common contigs are equivalent
-     */
-    private static boolean commonContigsHaveSameLengths(Set<String> commonContigs, SAMSequenceDictionary dict1, SAMSequenceDictionary dict2) {
-        return findDisequalCommonContigs(commonContigs, dict1, dict2) == null;
-    }
-
-    /**
-     * Returns a List(x,y) that contains two disequal sequence records among the common contigs in both dicts.  Returns
-     * null if all common contigs are equivalent
-     *
-     * @param commonContigs
-     * @param dict1
-     * @param dict2
-     * @return
-     */
-    private static List<SAMSequenceRecord> findDisequalCommonContigs(Set<String> commonContigs, SAMSequenceDictionary dict1, SAMSequenceDictionary dict2) {
-        for ( String name : commonContigs ) {
-            SAMSequenceRecord elt1 = dict1.getSequence(name);
-            SAMSequenceRecord elt2 = dict2.getSequence(name);
-            if ( ! sequenceRecordsAreEquivalent(elt1, elt2) )
-                return Arrays.asList(elt1,elt2);
-        }
-
-        return null;
-    }
-
-    /**
-     * Helper routine that returns two sequence records are equivalent, defined as having the same name and
-     * lengths, if both are non-zero
-     *
-     * @param me
-     * @param that
-     * @return
-     */
-    private static boolean sequenceRecordsAreEquivalent(final SAMSequenceRecord me, final SAMSequenceRecord that) {
-        if (me == that) return true;
-        if (that == null) return false;
-
-        if (me.getSequenceLength() != 0 && that.getSequenceLength() != 0 && me.getSequenceLength() != that.getSequenceLength())
-            return false;
-
-            // todo -- reenable if we want to be really strict here
-//        if (me.getExtendedAttribute(SAMSequenceRecord.MD5_TAG) != null && that.getExtendedAttribute(SAMSequenceRecord.MD5_TAG) != null) {
-//            final BigInteger thisMd5 = new BigInteger((String)me.getExtendedAttribute(SAMSequenceRecord.MD5_TAG), 16);
-//            final BigInteger thatMd5 = new BigInteger((String)that.getExtendedAttribute(SAMSequenceRecord.MD5_TAG), 16);
-//            if (!thisMd5.equals(thatMd5)) {
-//                return false;
-//            }
-//        }
-//        else {
-        if (me.getSequenceName() != that.getSequenceName())
-            return false; // Compare using == since we intern() the Strings
-//        }
-
-        return true;
-    }
-
-    /**
-     * A very simple (and naive) algorithm to determine (1) if the dict is a human reference (hg18/hg19) and if it's
-     * lexicographically sorted.  Works by matching lengths of the static chr1, chr10, and chr2, and then if these
-     * are all matched, requiring that the order be chr1, chr2, chr10.
-     *
-     * @param dict
-     * @return
-     */
-    private static boolean nonCanonicalHumanContigOrder(SAMSequenceDictionary dict) {
-        if ( ! ENABLE_LEXICOGRAPHIC_REQUIREMENT_FOR_HUMAN ) // if we don't want to enable this test, just return false
-            return false;
-
-        SAMSequenceRecord chr1 = null, chr2 = null, chr10 = null;
-
-        for ( SAMSequenceRecord elt : dict.getSequences() ) {
-            if ( isHumanSeqRecord(elt, CHR1_HG18, CHR1_HG19 ) ) chr1 = elt;
-            if ( isHumanSeqRecord(elt, CHR2_HG18, CHR2_HG19 ) ) chr2 = elt;
-            if ( isHumanSeqRecord(elt, CHR10_HG18, CHR10_HG19 ) ) chr10 = elt;
-        }
-
-        if ( chr1 != null && chr2 != null && chr10 != null) {
-            // we found them all
-            return ! ( chr1.getSequenceIndex() < chr2.getSequenceIndex() && chr2.getSequenceIndex() < chr10.getSequenceIndex() );
-        } else {
-            return false;
-        }
-    }
-
-    /**
-     * Trivial helper that returns true if elt has the same length as rec1 or rec2
-     * @param elt record to test
-     * @param rec1 first record to test for length equivalence
-     * @param rec2 first record to test for length equivalence
-     * @return true if elt has the same length as either rec1 or rec2
-     */
-    private static boolean isHumanSeqRecord(SAMSequenceRecord elt, SAMSequenceRecord rec1, SAMSequenceRecord rec2 ) {
-        return elt.getSequenceLength() == rec1.getSequenceLength() || elt.getSequenceLength() == rec2.getSequenceLength();
-    }
-
-    /**
-     * Returns true if the common contigs in dict1 and dict2 are in the same relative order, without regard to
-     * absolute index position. This is accomplished by getting the common contigs in both dictionaries, sorting
-     * these according to their indices, and then walking through the sorted list to ensure that each ordered contig
-     * is equivalent
-     *
-     * @param commonContigs names of the contigs common to both dictionaries
-     * @param dict1 first SAMSequenceDictionary
-     * @param dict2 second SAMSequenceDictionary
-     * @return true if the common contigs occur in the same relative order in both dict1 and dict2, otherwise false
-     */
-    private static boolean commonContigsAreInSameRelativeOrder(Set<String> commonContigs, SAMSequenceDictionary dict1, SAMSequenceDictionary dict2) {
-        List<SAMSequenceRecord> list1 = sortSequenceListByIndex(getSequencesOfName(commonContigs, dict1));
-        List<SAMSequenceRecord> list2 = sortSequenceListByIndex(getSequencesOfName(commonContigs, dict2));
-
-        for ( int i = 0; i < list1.size(); i++ ) {
-            SAMSequenceRecord elt1 = list1.get(i);
-            SAMSequenceRecord elt2 = list2.get(i);
-            if ( ! elt1.getSequenceName().equals(elt2.getSequenceName()) )
-                return false;
-        }
-
-        return true;
-    }
-
-    /**
-     * Gets the subset of SAMSequenceRecords in commonContigs in dict
-     *
-     * @param commonContigs
-     * @param dict
-     * @return
-     */
-    private static List<SAMSequenceRecord> getSequencesOfName(Set<String> commonContigs, SAMSequenceDictionary dict) {
-        List<SAMSequenceRecord> l = new ArrayList<SAMSequenceRecord>(commonContigs.size());
-        for ( String name : commonContigs ) {
-            l.add(dict.getSequence(name) );
-        }
-
-        return l;
-    }
-
-    /**
-     * Compares sequence records by their order
-     */
-    private static class CompareSequenceRecordsByIndex implements Comparator<SAMSequenceRecord> {
-        public int compare(SAMSequenceRecord x, SAMSequenceRecord y) {
-            return Integer.valueOf(x.getSequenceIndex()).compareTo(y.getSequenceIndex());
-        }
-    }
-
-    /**
-     * Returns a sorted list of SAMSequenceRecords sorted by their indices.  Note that the
-     * list is modified in place, so the returned list is == to the unsorted list.
-     *
-     * @param unsorted
-     * @return
-     */
-    private static List<SAMSequenceRecord> sortSequenceListByIndex(List<SAMSequenceRecord> unsorted) {
-        Collections.sort(unsorted, new CompareSequenceRecordsByIndex());
-        return unsorted;
-    }
-
-    /**
-     * Checks whether the common contigs in the given sequence dictionaries occur at the same indices
-     * in both dictionaries
-     *
-     * @param commonContigs Set of names of the contigs that occur in both dictionaries
-     * @param dict1 first sequence dictionary
-     * @param dict2 second sequence dictionary
-     * @return true if the contigs common to dict1 and dict2 occur at the same indices in both dictionaries,
-     *         otherwise false
-     */
-    private static boolean commonContigsAreAtSameIndices( final Set<String> commonContigs, final SAMSequenceDictionary dict1, final SAMSequenceDictionary dict2 ) {
-        for ( String commonContig : commonContigs ) {
-            SAMSequenceRecord dict1Record = dict1.getSequence(commonContig);
-            SAMSequenceRecord dict2Record = dict2.getSequence(commonContig);
-
-            // Each common contig must have the same index in both dictionaries
-            if ( dict1Record.getSequenceIndex() != dict2Record.getSequenceIndex() ) {
-                return false;
-            }
-        }
-
-        return true;
-    }
-
-    /**
-     * Gets the set of names of the contigs found in both sequence dictionaries that have different indices
-     * in the two dictionaries.
-     *
-     * @param commonContigs Set of names of the contigs common to both dictionaries
-     * @param dict1 first sequence dictionary
-     * @param dict2 second sequence dictionary
-     * @return a Set containing the names of the common contigs indexed differently in dict1 vs. dict2,
-     *         or an empty Set if there are no such contigs
-     */
-    private static Set<String> getDifferentlyIndexedCommonContigs( final Set<String> commonContigs,
-                                                                   final SAMSequenceDictionary dict1,
-                                                                   final SAMSequenceDictionary dict2 ) {
-
-        final Set<String> differentlyIndexedCommonContigs = new LinkedHashSet<String>(Utils.optimumHashSize(commonContigs.size()));
-
-        for ( String commonContig : commonContigs ) {
-            if ( dict1.getSequence(commonContig).getSequenceIndex() != dict2.getSequence(commonContig).getSequenceIndex() ) {
-                differentlyIndexedCommonContigs.add(commonContig);
-            }
-        }
-
-        return differentlyIndexedCommonContigs;
-    }
-
-    /**
-     * Finds the names of any contigs indexed differently in the two sequence dictionaries that also
-     * occur in the provided set of intervals.
-     *
-     * @param intervals GenomeLocSortedSet containing the intervals to check
-     * @param dict1 first sequence dictionary
-     * @param dict2 second sequence dictionary
-     * @return a Set of the names of the contigs indexed differently in dict1 vs dict2 that also
-     *         occur in the provided intervals, or an empty Set if there are no such contigs
-     */
-    private static Set<String> findMisindexedContigsInIntervals( final GenomeLocSortedSet intervals,
-                                                                 final SAMSequenceDictionary dict1,
-                                                                 final SAMSequenceDictionary dict2 ) {
-
-        final Set<String> differentlyIndexedCommonContigs = getDifferentlyIndexedCommonContigs(getCommonContigsByName(dict1, dict2), dict1, dict2);
-        final Set<String> misindexedContigsInIntervals = new LinkedHashSet<String>(Utils.optimumHashSize(differentlyIndexedCommonContigs.size()));
-
-        // We know differentlyIndexedCommonContigs is a HashSet, so this loop is O(intervals)
-        for ( GenomeLoc interval : intervals ) {
-            if ( differentlyIndexedCommonContigs.contains(interval.getContig()) ) {
-                misindexedContigsInIntervals.add(interval.getContig());
-            }
-        }
-
-        return misindexedContigsInIntervals;
-    }
-
-    /**
-     * Returns the set of contig names found in both dicts.
-     * @param dict1
-     * @param dict2
-     * @return
-     */
-    public static Set<String> getCommonContigsByName(SAMSequenceDictionary dict1, SAMSequenceDictionary dict2) {
-        Set<String> intersectingSequenceNames = getContigNames(dict1);
-        intersectingSequenceNames.retainAll(getContigNames(dict2));
-        return intersectingSequenceNames;
-    }
-
-    public static Set<String> getContigNames(SAMSequenceDictionary dict) {
-        Set<String> contigNames = new HashSet<String>(Utils.optimumHashSize(dict.size()));
-        for (SAMSequenceRecord dictionaryEntry : dict.getSequences())
-            contigNames.add(dictionaryEntry.getSequenceName());
-        return contigNames;
-    }
-
-    /**
-     * Returns a compact String representation of the sequence dictionary it's passed
-     *
-     * The format of the returned String is:
-     * [ contig1Name(length: contig1Length) contig2Name(length: contig2Length) ... ]
-     *
-     * @param dict a non-null SAMSequenceDictionary
-     * @return A String containing all of the contig names and lengths from the sequence dictionary it's passed
-     */
-    public static String getDictionaryAsString( final SAMSequenceDictionary dict ) {
-        if ( dict == null ) {
-            throw new IllegalArgumentException("Sequence dictionary must be non-null");
-        }
-
-        StringBuilder s = new StringBuilder("[ ");
-
-        for ( SAMSequenceRecord dictionaryEntry : dict.getSequences() ) {
-            s.append(dictionaryEntry.getSequenceName());
-            s.append("(length:");
-            s.append(dictionaryEntry.getSequenceLength());
-            s.append(") ");
-        }
-
-        s.append("]");
-
-        return s.toString();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/UnvalidatingGenomeLoc.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/UnvalidatingGenomeLoc.java
deleted file mode 100644
index 2c3d24b..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/UnvalidatingGenomeLoc.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import com.google.java.contract.Requires;
-
-/**
- * GenomeLocs are very useful objects to keep track of genomic locations and perform set operations
- * with them.
- *
- * However, GenomeLocs are bound to strict validation through the GenomeLocParser and cannot
- * be created easily for small tasks that do not require the rigors of the GenomeLocParser validation
- *
- * UnvalidatingGenomeLoc is a simple utility to create GenomeLocs without going through the parser.
- *
- * WARNING: SHOULD BE USED ONLY BY EXPERT USERS WHO KNOW WHAT THEY ARE DOING!
- *
- * User: carneiro
- * Date: 10/16/12
- * Time: 2:07 PM
- */
-public class UnvalidatingGenomeLoc extends GenomeLoc {
-
-    public UnvalidatingGenomeLoc(String contigName, int contigIndex, int start, int stop) {
-        super(contigName, contigIndex, start, stop);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/Utils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/Utils.java
deleted file mode 100644
index d664ef6..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/Utils.java
+++ /dev/null
@@ -1,1186 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMProgramRecord;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.io.GATKSAMFileWriter;
-import org.broadinstitute.gatk.utils.text.TextFormattingUtils;
-
-import java.lang.reflect.Array;
-import java.math.BigInteger;
-import java.net.InetAddress;
-import java.security.MessageDigest;
-import java.security.NoSuchAlgorithmException;
-import java.util.*;
-
-/**
- * Created by IntelliJ IDEA.
- * User: depristo
- * Date: Feb 24, 2009
- * Time: 10:12:31 AM
- * To change this template use File | Settings | File Templates.
- */
-public class Utils {
-    /** our log, which we want to capture anything from this class */
-    private static Logger logger = Logger.getLogger(Utils.class);
-
-    public static final float JAVA_DEFAULT_HASH_LOAD_FACTOR = 0.75f;
-
-    /**
-     * Boolean xor operation.  Only true if x != y.
-     *
-     * @param x a boolean
-     * @param y a boolean
-     * @return true if x != y
-     */
-    public static boolean xor(final boolean x, final boolean y) {
-        return x != y;
-    }
-
-    /**
-     * Calculates the optimum initial size for a hash table given the maximum number
-     * of elements it will need to hold. The optimum size is the smallest size that
-     * is guaranteed not to result in any rehash/table-resize operations.
-     *
-     * @param maxElements  The maximum number of elements you expect the hash table
-     *                     will need to hold
-     * @return             The optimum initial size for the table, given maxElements
-     */
-    public static int optimumHashSize ( int maxElements ) {
-        return (int)(maxElements / JAVA_DEFAULT_HASH_LOAD_FACTOR) + 2;
-    }
-
-    /**
-     * Compares two objects, either of which might be null.
-     *
-     * @param lhs One object to compare.
-     * @param rhs The other object to compare.
-     *
-     * @return True if the two objects are equal, false otherwise.
-     */
-    public static boolean equals(Object lhs, Object rhs) {
-        return lhs == null && rhs == null || lhs != null && lhs.equals(rhs);
-    }
-
-    public static <T> List<T> cons(final T elt, final List<T> l) {
-        List<T> l2 = new ArrayList<T>();
-        l2.add(elt);
-        if (l != null) l2.addAll(l);
-        return l2;
-    }
-
-    public static void warnUser(final String msg) {
-        warnUser(logger, msg);
-    }
-    
-    public static void warnUser(final Logger logger, final String msg) {
-        logger.warn(String.format("********************************************************************************"));
-        logger.warn(String.format("* WARNING:"));
-        logger.warn(String.format("*"));
-        prettyPrintWarningMessage(logger, msg);
-        logger.warn(String.format("********************************************************************************"));
-    }
-
-    /**
-     * pretty print the warning message supplied
-     *
-     * @param logger logger for the message
-     * @param message the message
-     */
-    private static void prettyPrintWarningMessage(Logger logger, String message) {
-        StringBuilder builder = new StringBuilder(message);
-        while (builder.length() > 70) {
-            int space = builder.lastIndexOf(" ", 70);
-            if (space <= 0) space = 70;
-            logger.warn(String.format("* %s", builder.substring(0, space)));
-            builder.delete(0, space + 1);
-        }
-        logger.warn(String.format("* %s", builder));
-    }
-
-    /**
-     * join the key value pairs of a map into one string, i.e. myMap = [A->1,B->2,C->3] with a call of:
-     * joinMap("-","*",myMap) -> returns A-1*B-2*C-3
-     *
-     * Be forewarned, if you're not using a map that is aware of the ordering (i.e. HashMap instead of LinkedHashMap)
-     * the ordering of the string you get back might not be what you expect! (i.e. C-3*A-1*B-2 vrs A-1*B-2*C-3)
-     *
-     * @param keyValueSeperator the string to seperate the key-value pairs
-     * @param recordSeperator the string to use to seperate each key-value pair from other key-value pairs
-     * @param map the map to draw from
-     * @param <L> the map's key type
-     * @param <R> the map's value type
-     * @return a string representing the joined map
-     */
-    public static <L,R> String joinMap(String keyValueSeperator, String recordSeperator, Map<L,R> map) {
-        if (map.size() < 1) { return null; }
-        String joinedKeyValues[] = new String[map.size()];
-        int index = 0;
-        for (L key : map.keySet()) {
-           joinedKeyValues[index++] = String.format("%s%s%s",key.toString(),keyValueSeperator,map.get(key).toString());
-        }
-        return join(recordSeperator,joinedKeyValues);
-    }
-
-    /**
-     * Splits a String using indexOf instead of regex to speed things up.
-     *
-     * @param str the string to split.
-     * @param delimiter the delimiter used to split the string.
-     * @return an array of tokens.
-     */
-    public static ArrayList<String> split(String str, String delimiter) {
-        return split(str, delimiter, 10);
-    }
-
-    /**
-     * Splits a String using indexOf instead of regex to speed things up.
-     *
-     * @param str the string to split.
-     * @param delimiter the delimiter used to split the string.
-     * @param expectedNumTokens The number of tokens expected. This is used to initialize the ArrayList.
-     * @return an array of tokens.
-     */
-    public static ArrayList<String> split(String str, String delimiter, int expectedNumTokens) {
-        final ArrayList<String> result =  new ArrayList<String>(expectedNumTokens);
-
-        int delimiterIdx = -1;
-        do {
-            final int tokenStartIdx = delimiterIdx + 1;
-            delimiterIdx = str.indexOf(delimiter, tokenStartIdx);
-            final String token = (delimiterIdx != -1 ? str.substring(tokenStartIdx, delimiterIdx) : str.substring(tokenStartIdx) );
-            result.add(token);
-        } while( delimiterIdx != -1 );
-
-        return result;
-    }
-
-
-    /**
-     * join an array of strings given a seperator
-     * @param separator the string to insert between each array element
-     * @param strings the array of strings
-     * @return a string, which is the joining of all array values with the separator
-     */
-    public static String join(String separator, String[] strings) {
-        return join(separator, strings, 0, strings.length);
-    }
-
-    public static String join(String separator, String[] strings, int start, int end) {
-        if ((end - start) == 0) {
-            return "";
-        }
-        StringBuilder ret = new StringBuilder(strings[start]);
-        for (int i = start + 1; i < end; ++i) {
-            ret.append(separator);
-            ret.append(strings[i]);
-        }
-        return ret.toString();
-    }
-
-    public static String join(String separator, int[] ints) {
-        if ( ints == null || ints.length == 0)
-            return "";
-        else {
-            StringBuilder ret = new StringBuilder();
-            ret.append(ints[0]);
-            for (int i = 1; i < ints.length; ++i) {
-                ret.append(separator);
-                ret.append(ints[i]);
-            }
-            return ret.toString();
-        }
-    }
-
-    /**
-     * Create a new list that contains the elements of left along with elements elts
-     * @param left a non-null list of elements
-     * @param elts a varargs vector for elts to append in order to left
-     * @return A newly allocated linked list containing left followed by elts
-     */
-    public static <T> List<T> append(final List<T> left, T ... elts) {
-        final List<T> l = new LinkedList<T>(left);
-        l.addAll(Arrays.asList(elts));
-        return l;
-    }
-
-    /**
-     * Returns a string of the values in joined by separator, such as A,B,C
-     *
-     * @param separator separator character
-     * @param doubles   the array with values
-     * @return a string with the values separated by the separator
-     */
-    public static String join(String separator, double[] doubles) {
-        if ( doubles == null || doubles.length == 0)
-            return "";
-        else {
-            StringBuilder ret = new StringBuilder();
-            ret.append(doubles[0]);
-            for (int i = 1; i < doubles.length; ++i) {
-                ret.append(separator);
-                ret.append(doubles[i]);
-            }
-            return ret.toString();
-        }
-    }
-
-    /**
-     * Returns a string of the form elt1.toString() [sep elt2.toString() ... sep elt.toString()] for a collection of
-     * elti objects (note there's no actual space between sep and the elti elements).  Returns
-     * "" if collection is empty.  If collection contains just elt, then returns elt.toString()
-     *
-     * @param separator the string to use to separate objects
-     * @param objects a collection of objects.  the element order is defined by the iterator over objects
-     * @param <T> the type of the objects
-     * @return a non-null string
-     */
-    public static <T> String join(final String separator, final Collection<T> objects) {
-        if (objects.isEmpty()) { // fast path for empty collection
-            return "";
-        } else {
-            final Iterator<T> iter = objects.iterator();
-            final T first = iter.next();
-
-            if ( ! iter.hasNext() ) // fast path for singleton collections
-                return first.toString();
-            else { // full path for 2+ collection that actually need a join
-                final StringBuilder ret = new StringBuilder(first.toString());
-                while(iter.hasNext()) {
-                    ret.append(separator);
-                    ret.append(iter.next().toString());
-                }
-                return ret.toString();
-            }
-        }
-    }
-
-    /**
-     * Returns a {@link List List<Integer>} representation of an primitive int array.
-     * @param values the primitive int array to represent.
-     * @return never code {@code null}. The returned list will be unmodifiable yet it will reflect changes in values in the original array yet
-     *   you cannot change the values
-     */
-    public static List<Integer> asList(final int ... values) {
-        if (values == null)
-            throw new IllegalArgumentException("the input array cannot be null");
-        return new AbstractList<Integer>() {
-
-            @Override
-            public Integer get(final int index) {
-                return values[index];
-            }
-
-            @Override
-            public int size() {
-                return values.length;
-            }
-        };
-    }
-
-    /**
-     * Returns a {@link List List<Double>} representation of an primitive double array.
-     * @param values the primitive int array to represent.
-     * @return never code {@code null}. The returned list will be unmodifiable yet it will reflect changes in values in the original array yet
-     *   you cannot change the values.
-     */
-    public static List<Double> asList(final double ... values) {
-        if (values == null)
-            throw new IllegalArgumentException("the input array cannot be null");
-        return new AbstractList<Double>() {
-
-            @Override
-            public Double get(final int index) {
-                return values[index];
-            }
-
-            @Override
-            public int size() {
-                return values.length;
-            }
-        };
-    }
-
-    public static <T> String join(final String separator, final T ... objects) {
-        return join(separator, Arrays.asList(objects));
-    }
-
-    /**
-     * Create a new string thats a n duplicate copies of s
-     * @param s the string to duplicate
-     * @param nCopies how many copies?
-     * @return a string
-     */
-    public static String dupString(final String s, int nCopies) {
-        if ( s == null || s.equals("") ) throw new IllegalArgumentException("Bad s " + s);
-        if ( nCopies < 0 ) throw new IllegalArgumentException("nCopies must be >= 0 but got " + nCopies);
-
-        final StringBuilder b = new StringBuilder();
-        for ( int i = 0; i < nCopies; i++ )
-            b.append(s);
-        return b.toString();
-    }
-
-    public static String dupString(char c, int nCopies) {
-        char[] chars = new char[nCopies];
-        Arrays.fill(chars, c);
-        return new String(chars);
-    }
-
-    public static byte[] dupBytes(byte b, int nCopies) {
-        byte[] bytes = new byte[nCopies];
-        Arrays.fill(bytes, b);
-        return bytes;
-    }
-
-    // trim a string for the given character (i.e. not just whitespace)
-    public static String trim(String str, char ch) {
-        char[] array = str.toCharArray();
-
-
-        int start = 0;
-        while ( start < array.length && array[start] == ch )
-            start++;
-
-        int end = array.length - 1;
-        while ( end > start && array[end] == ch )
-            end--;
-
-        return str.substring(start, end+1);
-    }
-
-    /**
-     * Splits expressions in command args by spaces and returns the array of expressions.
-     * Expressions may use single or double quotes to group any individual expression, but not both.
-     * @param args Arguments to parse.
-     * @return Parsed expressions.
-     */
-    public static String[] escapeExpressions(String args) {
-        // special case for ' and " so we can allow expressions
-        if (args.indexOf('\'') != -1)
-            return escapeExpressions(args, "'");
-        else if (args.indexOf('\"') != -1)
-            return escapeExpressions(args, "\"");
-        else
-            return args.trim().split(" +");
-    }
-
-    /**
-     * Splits expressions in command args by spaces and the supplied delimiter and returns the array of expressions.
-     * @param args Arguments to parse.
-     * @param delimiter Delimiter for grouping expressions.
-     * @return Parsed expressions.
-     */
-    private static String[] escapeExpressions(String args, String delimiter) {
-        String[] command = {};
-        String[] split = args.split(delimiter);
-        String arg;
-        for (int i = 0; i < split.length - 1; i += 2) {
-            arg = split[i].trim();
-            if (arg.length() > 0) // if the unescaped arg has a size
-                command = Utils.concatArrays(command, arg.split(" +"));
-            command = Utils.concatArrays(command, new String[]{split[i + 1]});
-        }
-        arg = split[split.length - 1].trim();
-        if (split.length % 2 == 1) // if the command ends with a delimiter
-            if (arg.length() > 0) // if the last unescaped arg has a size
-                command = Utils.concatArrays(command, arg.split(" +"));
-        return command;
-    }
-
-    /**
-     * Concatenates two String arrays.
-     * @param A First array.
-     * @param B Second array.
-     * @return Concatenation of A then B.
-     */
-    public static String[] concatArrays(String[] A, String[] B) {
-       String[] C = new String[A.length + B.length];
-       System.arraycopy(A, 0, C, 0, A.length);
-       System.arraycopy(B, 0, C, A.length, B.length);
-       return C;
-    }
-
-    /**
-     * Concatenates byte arrays
-     * @return a concat of all bytes in allBytes in order
-     */
-    public static byte[] concat(final byte[] ... allBytes) {
-        int size = 0;
-        for ( final byte[] bytes : allBytes ) size += bytes.length;
-
-        final byte[] c = new byte[size];
-        int offset = 0;
-        for ( final byte[] bytes : allBytes ) {
-            System.arraycopy(bytes, 0, c, offset, bytes.length);
-            offset += bytes.length;
-        }
-
-        return c;
-    }
-
-    /**
-     * Appends String(s) B to array A.
-     * @param A First array.
-     * @param B Strings to append.
-     * @return A with B(s) appended.
-     */
-    public static String[] appendArray(String[] A, String... B) {
-        return concatArrays(A, B);
-    }
-
-    public static <T extends Comparable<T>> List<T> sorted(Collection<T> c) {
-        return sorted(c, false);
-    }
-
-    public static <T extends Comparable<T>> List<T> sorted(Collection<T> c, boolean reverse) {
-        List<T> l = new ArrayList<T>(c);
-        Collections.sort(l);
-        if ( reverse ) Collections.reverse(l);
-        return l;
-    }
-
-    public static <T extends Comparable<T>, V> List<V> sorted(Map<T,V> c) {
-        return sorted(c, false);
-    }
-
-    public static <T extends Comparable<T>, V> List<V> sorted(Map<T,V> c, boolean reverse) {
-        List<T> t = new ArrayList<T>(c.keySet());
-        Collections.sort(t);
-        if ( reverse ) Collections.reverse(t);
-
-        List<V> l = new ArrayList<V>();
-        for ( T k : t ) {
-            l.add(c.get(k));
-        }
-        return l;
-    }
-
-    /**
-     * Reverse a byte array of bases
-     *
-     * @param bases  the byte array of bases
-     * @return the reverse of the base byte array
-     */
-    static public byte[] reverse(byte[] bases) {
-        byte[] rcbases = new byte[bases.length];
-
-        for (int i = 0; i < bases.length; i++) {
-            rcbases[i] = bases[bases.length - i - 1];
-        }
-
-        return rcbases;
-    }
-
-    static public <T> List<T> reverse(final List<T> l) {
-        final List<T> newL = new ArrayList<T>(l);
-        Collections.reverse(newL);
-        return newL;
-    }
-
-    /**
-     * Reverse an int array of bases
-     *
-     * @param bases  the int array of bases
-     * @return the reverse of the base int array
-     */
-    static public int[] reverse(int[] bases) {
-        int[] rcbases = new int[bases.length];
-
-        for (int i = 0; i < bases.length; i++) {
-            rcbases[i] = bases[bases.length - i - 1];
-        }
-
-        return rcbases;
-    }
-
-    /**
-     * Reverse (NOT reverse-complement!!) a string
-     *
-     * @param bases  input string
-     * @return the reversed string
-     */
-    static public String reverse(String bases) {
-        return new String( reverse( bases.getBytes() )) ;
-    }
-
-    public static boolean isFlagSet(int value, int flag) {
-        return ((value & flag) == flag);
-    }
-
-    /**
-     * Helper utility that calls into the InetAddress system to resolve the hostname.  If this fails,
-     * unresolvable gets returned instead.
-     */
-    public static String resolveHostname() {
-        try {
-            return InetAddress.getLocalHost().getCanonicalHostName();
-        }
-        catch (java.net.UnknownHostException uhe) { // [beware typo in code sample -dmw]
-            return "unresolvable";
-            // handle exception
-        }
-    }
-
-
-    public static byte [] arrayFromArrayWithLength(byte[] array, int length) {
-        byte [] output = new byte[length];
-        for (int j = 0; j < length; j++)
-            output[j] = array[(j % array.length)];
-        return output;
-    }
-
-    public static void fillArrayWithByte(byte[] array, byte value) {
-        for (int i=0; i<array.length; i++)
-            array[i] = value;
-    }
-
-    /**
-     * Creates a program record for the program, adds it to the list of program records (@PG tags) in the bam file and sets
-     * up the writer with the header and presorted status.
-     *
-     * @param originalHeader      original header
-     * @param programRecord       the program record for this program
-     */
-    public static SAMFileHeader setupWriter(final SAMFileHeader originalHeader, final SAMProgramRecord programRecord) {
-        final SAMFileHeader header = originalHeader.clone();
-        final List<SAMProgramRecord> oldRecords = header.getProgramRecords();
-        final List<SAMProgramRecord> newRecords = new ArrayList<SAMProgramRecord>(oldRecords.size()+1);
-        for ( SAMProgramRecord record : oldRecords )
-            if ( (programRecord != null && !record.getId().startsWith(programRecord.getId())))
-                newRecords.add(record);
-
-        if (programRecord != null) {
-            newRecords.add(programRecord);
-            header.setProgramRecords(newRecords);
-        }
-        return header;
-    }
-
-    /**
-    * Creates a program record for the program, adds it to the list of program records (@PG tags) in the bam file and returns
-    * the new header to be added to the BAM writer.
-    *
-    * @param toolkit             the engine
-    * @param walker              the walker object (so we can extract the command line)
-    * @param PROGRAM_RECORD_NAME the name for the PG tag
-    * @return a pre-filled header for the bam writer
-    */
-    public static SAMFileHeader setupWriter(final GenomeAnalysisEngine toolkit, final SAMFileHeader originalHeader, final Object walker, final String PROGRAM_RECORD_NAME) {
-        final SAMProgramRecord programRecord = createProgramRecord(toolkit, walker, PROGRAM_RECORD_NAME);
-        return setupWriter(originalHeader, programRecord);
-    }
-
-    /**
-     * Creates a program record for the program, adds it to the list of program records (@PG tags) in the bam file and sets
-     * up the writer with the header and presorted status.
-     *
-     * @param writer              BAM file writer
-     * @param toolkit             the engine
-     * @param preSorted           whether or not the writer can assume reads are going to be added are already sorted
-     * @param walker              the walker object (so we can extract the command line)
-     * @param PROGRAM_RECORD_NAME the name for the PG tag
-     */
-    public static void setupWriter(GATKSAMFileWriter writer, GenomeAnalysisEngine toolkit, SAMFileHeader originalHeader, boolean preSorted, Object walker, String PROGRAM_RECORD_NAME) {
-        SAMFileHeader header = setupWriter(toolkit, originalHeader, walker, PROGRAM_RECORD_NAME);
-        writer.writeHeader(header);
-        writer.setPresorted(preSorted);
-    }
-
-
-    /**
-     * Creates a program record (@PG) tag
-     *
-     * @param toolkit             the engine
-     * @param walker              the walker object (so we can extract the command line)
-     * @param PROGRAM_RECORD_NAME the name for the PG tag
-     * @return a program record for the tool
-     */
-    public static SAMProgramRecord createProgramRecord(GenomeAnalysisEngine toolkit, Object walker, String PROGRAM_RECORD_NAME) {
-        final SAMProgramRecord programRecord = new SAMProgramRecord(PROGRAM_RECORD_NAME);
-        final ResourceBundle headerInfo = TextFormattingUtils.loadResourceBundle("GATKText");
-        try {
-            final String version = headerInfo.getString("org.broadinstitute.gatk.tools.version");
-            programRecord.setProgramVersion(version);
-        } catch (MissingResourceException e) {
-            // couldn't care less if the resource is missing...
-        }
-        programRecord.setCommandLine(toolkit.createApproximateCommandLineArgumentString(toolkit, walker));
-        return programRecord;
-    }
-
-    /**
-     * Returns the number of combinations represented by this collection
-     * of collection of options.
-     *
-     * For example, if this is [[A, B], [C, D], [E, F, G]] returns 2 * 2 * 3 = 12
-     */
-    @Requires("options != null")
-    public static <T> int nCombinations(final Collection<T>[] options) {
-        int nStates = 1;
-        for ( Collection<T> states : options ) {
-            nStates *= states.size();
-        }
-        return nStates;
-    }
-
-    @Requires("options != null")
-    public static <T> int nCombinations(final List<List<T>> options) {
-        if ( options.isEmpty() )
-            return 0;
-        else {
-            int nStates = 1;
-            for ( Collection<T> states : options ) {
-                nStates *= states.size();
-            }
-            return nStates;
-        }
-    }
-
-    /**
-     * Make all combinations of N size of objects
-     *
-     * if objects = [A, B, C]
-     * if N = 1 => [[A], [B], [C]]
-     * if N = 2 => [[A, A], [B, A], [C, A], [A, B], [B, B], [C, B], [A, C], [B, C], [C, C]]
-     *
-     * @param objects         list of objects
-     * @param n               size of each combination
-     * @param withReplacement if false, the resulting permutations will only contain unique objects from objects
-     * @return a list with all combinations with size n of objects.
-     */
-    public static <T> List<List<T>> makePermutations(final List<T> objects, final int n, final boolean withReplacement) {
-        final List<List<T>> combinations = new ArrayList<List<T>>();
-
-        if ( n == 1 ) {
-            for ( final T o : objects )
-                combinations.add(Collections.singletonList(o));
-        } else if (n > 1) {
-            final List<List<T>> sub = makePermutations(objects, n - 1, withReplacement);
-            for ( List<T> subI : sub ) {
-                for ( final T a : objects ) {
-                    if ( withReplacement || ! subI.contains(a) )
-                        combinations.add(Utils.cons(a, subI));
-                }
-            }
-        }
-
-        return combinations;
-    }
-
-    /**
-     * Convenience function that formats the novelty rate as a %.2f string
-     *
-     * @param known number of variants from all that are known
-     * @param all number of all variants
-     * @return a String novelty rate, or NA if all == 0
-     */
-    public static String formattedNoveltyRate(final int known, final int all) {
-        return formattedPercent(all - known, all);
-    }
-
-    /**
-     * Convenience function that formats the novelty rate as a %.2f string
-     *
-     * @param x number of objects part of total that meet some criteria
-     * @param total count of all objects, including x
-     * @return a String percent rate, or NA if total == 0
-     */
-    public static String formattedPercent(final long x, final long total) {
-        return total == 0 ? "NA" : String.format("%.2f", (100.0*x) / total);
-    }
-
-    /**
-     * Convenience function that formats a ratio as a %.2f string
-     *
-     * @param num  number of observations in the numerator
-     * @param denom number of observations in the denumerator
-     * @return a String formatted ratio, or NA if all == 0
-     */
-    public static String formattedRatio(final long num, final long denom) {
-        return denom == 0 ? "NA" : String.format("%.2f", num / (1.0 * denom));
-    }
-
-    /**
-     * Adds element from an array into a collection.
-     *
-     * In the event of exception being throw due to some element, <code>dest</code> might have been modified by
-     * the successful addition of element before that one.
-     *
-     * @param dest the destination collection which cannot be <code>null</code> and should be able to accept
-     *             the input elements.
-     * @param elements the element to add to <code>dest</code>
-     * @param <T>  collection type element.
-     * @throws UnsupportedOperationException if the <tt>add</tt> operation
-     *         is not supported by <code>dest</code>.
-     * @throws ClassCastException if the class of any of the elements
-     *         prevents it from being added to <code>dest</code>.
-     * @throws NullPointerException if any of the elements is <code>null</code> and <code>dest</code>
-     *         does not permit <code>null</code> elements
-     * @throws IllegalArgumentException if some property of any of the elements
-     *         prevents it from being added to this collection
-     * @throws IllegalStateException if any of the elements cannot be added at this
-     *         time due to insertion restrictions.
-     * @return <code>true</code> if the collection was modified as a result.
-     */
-    public static <T> boolean addAll(Collection<T> dest, T ... elements) {
-        boolean result = false;
-        for (final T e : elements) {
-            result = dest.add(e) | result;
-        }
-        return result;
-    }
-
-    /**
-     * Create a constant map that maps each value in values to itself
-     */
-    public static <T> Map<T, T> makeIdentityFunctionMap(Collection<T> values) {
-        Map<T,T> map = new HashMap<T, T>(values.size());
-        for ( final T value : values )
-            map.put(value, value);
-        return Collections.unmodifiableMap(map);
-    }
-
-    /**
-     * Divides the input list into a list of sublists, which contains group size elements (except potentially the last one)
-     *
-     * list = [A, B, C, D, E]
-     * groupSize = 2
-     * result = [[A, B], [C, D], [E]]
-     *
-     */
-    public static <T> List<List<T>> groupList(final List<T> list, final int groupSize) {
-        if ( groupSize < 1 ) throw new IllegalArgumentException("groupSize >= 1");
-
-        final List<List<T>> subLists = new LinkedList<List<T>>();
-        int n = list.size();
-        for ( int i = 0; i < n; i += groupSize ) {
-            subLists.add(list.subList(i, Math.min(i + groupSize, n)));
-        }
-        return subLists;
-    }
-
-    /**
-     * @see #calcMD5(byte[])
-     */
-    public static String calcMD5(final String s) {
-        return calcMD5(s.getBytes());
-    }
-
-    /**
-     * Calculate the md5 for bytes, and return the result as a 32 character string
-     *
-     * @param bytes the bytes to calculate the md5 of
-     * @return the md5 of bytes, as a 32-character long string
-     */
-    @Ensures({"result != null", "result.length() == 32"})
-    public static String calcMD5(final byte[] bytes) {
-        if ( bytes == null ) throw new IllegalArgumentException("bytes cannot be null");
-        try {
-            final byte[] thedigest = MessageDigest.getInstance("MD5").digest(bytes);
-            final BigInteger bigInt = new BigInteger(1, thedigest);
-
-            String md5String = bigInt.toString(16);
-            while (md5String.length() < 32) md5String = "0" + md5String; // pad to length 32
-            return md5String;
-        }
-        catch ( NoSuchAlgorithmException e ) {
-            throw new IllegalStateException("MD5 digest algorithm not present");
-        }
-    }
-
-    /**
-     * Does big end with the exact sequence of bytes in suffix?
-     *
-     * @param big a non-null byte[] to test if it a prefix + suffix
-     * @param suffix a non-null byte[] to test if it's a suffix of big
-     * @return true if big is proper byte[] composed of some prefix + suffix
-     */
-    public static boolean endsWith(final byte[] big, final byte[] suffix) {
-        if ( big == null ) throw new IllegalArgumentException("big cannot be null");
-        if ( suffix == null ) throw new IllegalArgumentException("suffix cannot be null");
-        return new String(big).endsWith(new String(suffix));
-    }
-
-    /**
-     * Get the length of the longest common prefix of seq1 and seq2
-     * @param seq1 non-null byte array
-     * @param seq2 non-null byte array
-     * @param maxLength the maximum allowed length to return
-     * @return the length of the longest common prefix of seq1 and seq2, >= 0
-     */
-    public static int longestCommonPrefix(final byte[] seq1, final byte[] seq2, final int maxLength) {
-        if ( seq1 == null ) throw new IllegalArgumentException("seq1 is null");
-        if ( seq2 == null ) throw new IllegalArgumentException("seq2 is null");
-        if ( maxLength < 0 ) throw new IllegalArgumentException("maxLength < 0 " + maxLength);
-
-        final int end = Math.min(seq1.length, Math.min(seq2.length, maxLength));
-        for ( int i = 0; i < end; i++ ) {
-            if ( seq1[i] != seq2[i] )
-                return i;
-        }
-        return end;
-    }
-
-    /**
-     * Get the length of the longest common suffix of seq1 and seq2
-     * @param seq1 non-null byte array
-     * @param seq2 non-null byte array
-     * @param maxLength the maximum allowed length to return
-     * @return the length of the longest common suffix of seq1 and seq2, >= 0
-     */
-    public static int longestCommonSuffix(final byte[] seq1, final byte[] seq2, final int maxLength) {
-        if ( seq1 == null ) throw new IllegalArgumentException("seq1 is null");
-        if ( seq2 == null ) throw new IllegalArgumentException("seq2 is null");
-        if ( maxLength < 0 ) throw new IllegalArgumentException("maxLength < 0 " + maxLength);
-
-        final int end = Math.min(seq1.length, Math.min(seq2.length, maxLength));
-        for ( int i = 0; i < end; i++ ) {
-            if ( seq1[seq1.length - i - 1] != seq2[seq2.length - i - 1] )
-                return i;
-        }
-        return end;
-    }
-
-    /**
-     * Trim any number of bases from the front and/or back of an array
-     *
-     * @param seq                the sequence to trim
-     * @param trimFromFront      how much to trim from the front
-     * @param trimFromBack       how much to trim from the back
-     * @return a non-null array; can be the original array (i.e. not a copy)
-     */
-    public static byte[] trimArray(final byte[] seq, final int trimFromFront, final int trimFromBack) {
-        if ( trimFromFront + trimFromBack > seq.length )
-            throw new IllegalArgumentException("trimming total is larger than the original array");
-
-        // don't perform array copies if we need to copy everything anyways
-        return  ( trimFromFront == 0 && trimFromBack == 0 ) ? seq : Arrays.copyOfRange(seq, trimFromFront, seq.length - trimFromBack);
-    }
-
-    /**
-     * Simple wrapper for sticking elements of a int[] array into a List<Integer>
-     * @param ar - the array whose elements should be listified
-     * @return - a List<Integer> where each element has the same value as the corresponding index in @ar
-     */
-    public static List<Integer> listFromPrimitives(final int[] ar) {
-        final ArrayList<Integer> lst = new ArrayList<>(ar.length);
-        for ( final int d : ar ) {
-            lst.add(d);
-        }
-
-        return lst;
-    }
-
-    /**
-     * Compares sections from to byte arrays to verify whether they contain the same values.
-     *
-     * @param left first array to compare.
-     * @param leftOffset first position of the first array to compare.
-     * @param right second array to compare.
-     * @param rightOffset first position of the second array to compare.
-     * @param length number of positions to compare.
-     *
-     * @throws IllegalArgumentException if <ul>
-     *     <li>either {@code left} or {@code right} is {@code null} or</li>
-     *     <li>any off the offset or length combine point outside any of the two arrays</li>
-     * </ul>
-     * @return {@code true} iff {@code length} is 0 or all the bytes in both ranges are the same two-by-two.
-     */
-    public static boolean equalRange(final byte[] left, final int leftOffset, byte[] right, final int rightOffset, final int length) {
-        if (left == null) throw new IllegalArgumentException("left cannot be null");
-        if (right == null) throw new IllegalArgumentException("right cannot be null");
-        if (length < 0) throw new IllegalArgumentException("the length cannot be negative");
-        if (leftOffset < 0) throw new IllegalArgumentException("left offset cannot be negative");
-        if (leftOffset + length > left.length) throw new IllegalArgumentException("length goes beyond end of left array");
-        if (rightOffset < 0) throw new IllegalArgumentException("right offset cannot be negative");
-        if (rightOffset + length > right.length) throw new IllegalArgumentException("length goes beyond end of right array");
-
-        for (int i = 0; i < length; i++)
-            if (left[leftOffset + i] != right[rightOffset + i])
-                return false;
-        return true;
-    }
-
-    /**
-     * Skims out positions of an array returning a shorter one with the remaning positions in the same order.
-     * @param original the original array to splice.
-     * @param remove for each position in {@code original} indicates whether it should be spliced away ({@code true}),
-     *               or retained ({@code false})
-     *
-     * @param <T> the array type.
-     *
-     * @throws IllegalArgumentException if either {@code original} or {@code remove} is {@code null},
-     *    or {@code remove length is different to {@code original}'s}, or {@code original} is not in
-     *    fact an array.
-     *
-     * @return never {@code null}.
-     */
-    public static <T> T skimArray(final T original, final boolean[] remove) {
-        return skimArray(original,0,null,0,remove,0);
-    }
-
-    /**
-     * Skims out positions of an array returning a shorter one with the remaning positions in the same order.
-     *
-     * <p>
-     *     If the {@code dest} array provide is not long enough a new one will be created and returned with the
-     *     same component type. All elements before {@code destOffset} will be copied from the input to the
-     *     result array. If {@code dest} is {@code null}, a brand-new array large enough will be created where
-     *     the position preceding {@code destOffset} will be left with the default value. The component type
-     *     Will match the one of the {@code source} array.
-     * </p>
-     *
-     * @param source the original array to splice.
-     * @param sourceOffset the first position to skim.
-     * @param dest the destination array.
-     * @param destOffset the first position where to copy the skimed array values.
-     * @param remove for each position in {@code original} indicates whether it should be spliced away ({@code true}),
-     *               or retained ({@code false})
-     * @param removeOffset the first position in the remove index array to consider.
-     *
-     * @param <T> the array type.
-     *
-     * @throws IllegalArgumentException if either {@code original} or {@code remove} is {@code null},
-     *    or {@code remove length is different to {@code original}'s}, or {@code original} is not in
-     *    fact an array.
-     *
-     * @return never {@code null}.
-     */
-    public static <T> T skimArray(final T source, final int sourceOffset, final T dest, final int destOffset, final boolean[] remove, final int removeOffset) {
-        if (source == null)
-            throw new IllegalArgumentException("the source array cannot be null");
-        @SuppressWarnings("unchecked")
-        final Class<T> sourceClazz = (Class<T>) source.getClass();
-
-        if (!sourceClazz.isArray())
-            throw new IllegalArgumentException("the source array is not in fact an array instance");
-        final int length = Array.getLength(source) - sourceOffset;
-        if (length < 0)
-            throw new IllegalArgumentException("the source offset goes beyond the source array length");
-        return skimArray(source,sourceOffset,dest,destOffset,remove,removeOffset,length);
-    }
-
-    /**
-     * Skims out positions of an array returning a shorter one with the remaning positions in the same order.
-     *
-     * <p>
-     *     If the {@code dest} array provide is not long enough a new one will be created and returned with the
-     *     same component type. All elements before {@code destOffset} will be copied from the input to the
-     *     result array. If {@code dest} is {@code null}, a brand-new array large enough will be created where
-     *     the position preceding {@code destOffset} will be left with the default value. The component type
-     *     Will match the one of the {@code source} array.
-     * </p>
-     *
-     * @param source the original array to splice.
-     * @param sourceOffset the first position to skim.
-     * @param dest the destination array.
-     * @param destOffset the first position where to copy the skimed array values.
-     * @param remove for each position in {@code original} indicates whether it should be spliced away ({@code true}),
-     *               or retained ({@code false})
-     * @param removeOffset the first position in the remove index array to consider.
-     * @param length the total number of position in {@code source} to consider. Thus only the {@code sourceOffset} to
-     *               {@code sourceOffset + length - 1} region will be skimmed.
-     *
-     * @param <T> the array type.
-     *
-     * @throws IllegalArgumentException if either {@code original} or {@code remove} is {@code null},
-     *    or {@code remove length is different to {@code original}'s}, or {@code original} is not in
-     *    fact an array.
-     *
-     * @return never {@code null}.
-     */
-    public static <T> T skimArray(final T source, final int sourceOffset, final T dest, final int destOffset,
-                                  final boolean[] remove, final int removeOffset, final int length) {
-        if (source == null)
-            throw new IllegalArgumentException("the source array cannot be null");
-        if (remove == null)
-            throw new IllegalArgumentException("the remove array cannot be null");
-        if (sourceOffset < 0)
-            throw new IllegalArgumentException("the source array offset cannot be negative");
-        if (destOffset < 0)
-            throw new IllegalArgumentException("the destination array offset cannot be negative");
-        if (removeOffset < 0)
-            throw new IllegalArgumentException("the remove array offset cannot be negative");
-        if (length < 0)
-            throw new IllegalArgumentException("the length provided cannot be negative");
-
-        final int removeLength = Math.min(remove.length - removeOffset,length);
-
-        if (removeLength < 0)
-            throw new IllegalArgumentException("the remove offset provided falls beyond the remove array end");
-
-
-        @SuppressWarnings("unchecked")
-        final Class<T> sourceClazz = (Class<T>) source.getClass();
-
-        if (!sourceClazz.isArray())
-            throw new IllegalArgumentException("the source array is not in fact an array instance");
-
-        final Class<T> destClazz = skimArrayDetermineDestArrayClass(dest, sourceClazz);
-
-        final int sourceLength = Array.getLength(source);
-
-        if (sourceLength < length + sourceOffset)
-            throw new IllegalArgumentException("the source array is too small considering length and offset");
-
-        // count how many positions are to be removed.
-
-        int removeCount = 0;
-
-        final int removeEnd = removeLength + removeOffset;
-        for (int i = removeOffset; i < removeEnd; i++)
-            if  (remove[i]) removeCount++;
-
-
-        final int newLength = length - removeCount;
-
-
-        @SuppressWarnings("unchecked")
-        final T result = skimArrayBuildResultArray(dest, destOffset, destClazz, newLength);
-        // No removals, just copy the whole thing.
-
-        if (removeCount == 0)
-            System.arraycopy(source,sourceOffset,result,destOffset,length);
-        else if (length > 0) {  // if length == 0 nothing to do.
-            int nextOriginalIndex = 0;
-            int nextNewIndex = 0;
-            int nextRemoveIndex = removeOffset;
-            while (nextOriginalIndex < length && nextNewIndex < newLength) {
-                while (nextRemoveIndex < removeEnd && remove[nextRemoveIndex++]) { nextOriginalIndex++; } // skip positions to be spliced.
-                // Since we make the nextNewIndex < newLength check in the while condition
-                // there is no need to include the following break, as is guaranteed not to be true:
-                // if (nextOriginalIndex >= length) break; // we reach the final (last positions are to be spliced.
-                final int copyStart = nextOriginalIndex;
-                while (++nextOriginalIndex < length && (nextRemoveIndex >= removeEnd || !remove[nextRemoveIndex])) { nextRemoveIndex++; }
-                final int copyEnd = nextOriginalIndex;
-                final int copyLength = copyEnd - copyStart;
-                System.arraycopy(source, sourceOffset + copyStart, result, destOffset + nextNewIndex, copyLength);
-                nextNewIndex += copyLength;
-            }
-        }
-        return result;
-    }
-
-    private static <T> T skimArrayBuildResultArray(final T dest, final int destOffset, final Class<T> destClazz, final int newLength) {
-        @SuppressWarnings("unchecked")
-        final T result;
-
-        if (dest == null)
-            result = (T) Array.newInstance(destClazz.getComponentType(), newLength + destOffset);
-        else if (Array.getLength(dest) < newLength + destOffset) {
-            result = (T) Array.newInstance(destClazz.getComponentType(),newLength + destOffset);
-            if (destOffset > 0) System.arraycopy(dest,0,result,0,destOffset);
-        } else
-            result = dest;
-        return result;
-    }
-
-    private static <T> Class<T> skimArrayDetermineDestArrayClass(final T dest, Class<T> sourceClazz) {
-        final Class<T> destClazz;
-        if (dest == null)
-            destClazz = sourceClazz;
-        else {
-            destClazz = (Class<T>) dest.getClass();
-            if (destClazz != sourceClazz) {
-                if (!destClazz.isArray())
-                    throw new IllegalArgumentException("the destination array class must be an array");
-                if (sourceClazz.getComponentType().isAssignableFrom(destClazz.getComponentType()))
-                    throw new IllegalArgumentException("the provided destination array class cannot contain values from the source due to type incompatibility");
-            }
-        }
-        return destClazz;
-    }
-
-    /**
-     * Makes a deep clone of the array provided.
-     *
-     * <p>
-     *     When you can use {@link Arrays#copyOf} or an array {@link Object#clone()}  to create a copy of itself,
-     *     if it is multi-dimentional each sub array or matrix would be cloned.
-     * </p>
-     *
-     * <p>
-     *     Notice however that if the base type is an Object type, the base elements themselves wont be cloned.
-     * </p>
-     *
-     * @param array the array to deep-clone.
-     * @param <T> type of the array.
-     *
-     * @throws IllegalArgumentException if {@code array} is {@code null} or is not an array.
-     */
-    public static <T>  T deepCloneArray(final T array) {
-
-        if (array == null)
-            throw new IllegalArgumentException("");
-
-        @SuppressWarnings("unchecked")
-        final Class<T> clazz = (Class<T>) array.getClass();
-
-
-        if (!clazz.isArray())
-            throw new IllegalArgumentException("the input is not an array");
-
-        final int dimension = calculateArrayDimensions(clazz);
-
-        return deepCloneArrayUnchecked(array,clazz, dimension);
-    }
-
-    private static int calculateArrayDimensions(final Class<?> clazz) {
-        if (clazz.isArray())
-            return calculateArrayDimensions(clazz.getComponentType()) + 1;
-        else
-            return 0;
-    }
-
-    private static <T> T deepCloneArrayUnchecked(final T array, final Class<T> clazz, final int dimension) {
-
-
-        final int length = Array.getLength(array);
-
-        final Class componentClass = clazz.getComponentType();
-
-        final T result = (T) Array.newInstance(componentClass,length);
-
-        if (dimension <= 1) {
-            System.arraycopy(array, 0, result, 0, length);
-            return result;
-        }
-
-
-        final int dimensionMinus1 = dimension - 1;
-
-        for (int i = 0; i < length; i++)
-            Array.set(result,i,deepCloneArrayUnchecked(Array.get(array,i),componentClass,dimensionMinus1));
-
-        return result;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/activeregion/ActiveRegion.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/activeregion/ActiveRegion.java
deleted file mode 100644
index 86a89c1..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/activeregion/ActiveRegion.java
+++ /dev/null
@@ -1,500 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.activeregion;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Invariant;
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
-import org.broadinstitute.gatk.utils.HasGenomeLocation;
-import org.broadinstitute.gatk.utils.clipping.ReadClipper;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.broadinstitute.gatk.utils.sam.ReadUtils;
-
-import java.util.*;
-
-/**
- * Represents a single active region created by the Active Region Traversal for processing
- *
- * An active region is a single contiguous span of bases on the genome that should be operated
- * on as a single unit for the active region traversal.  The action may contains a list of
- * reads that overlap the region (may because there may be no reads in the region).  The region
- * is tagged as being either active or inactive, depending on the probabilities provided by
- * the isActiveProb results from the ART walker.  Each region carries with it the
- * exact span of the region (bases which are the core of the isActiveProbs from the walker) as
- * well as an extended size, that includes the ART walker's extension size.  Reads in the region
- * provided by ART include all reads overlapping the extended span, not the raw span.
- *
- * User: rpoplin
- * Date: 1/4/12
- */
- at Invariant({
-        "extension >= 0",
-        "activeRegionLoc != null",
-        "genomeLocParser != null",
-        "spanIncludingReads != null",
-        "extendedLoc != null"
-})
-public class ActiveRegion implements HasGenomeLocation {
-    /**
-     * The reads included in this active region.  May be empty upon creation, and expand / contract
-     * as reads are added or removed from this region.
-     */
-    private final List<GATKSAMRecord> reads = new ArrayList<GATKSAMRecord>();
-
-    /**
-     * An ordered list (by genomic coordinate) of the ActivityProfileStates that went
-     * into this active region.  May be empty, which says that no supporting states were
-     * provided when this region was created.
-     */
-    private final List<ActivityProfileState> supportingStates;
-
-    /**
-     * The raw span of this active region, not including the active region extension
-     */
-    private final GenomeLoc activeRegionLoc;
-
-    /**
-     * The span of this active region on the genome, including the active region extension
-     */
-    private final GenomeLoc extendedLoc;
-
-    /**
-     * The extension, in bp, of this active region.
-     */
-    private final int extension;
-
-    /**
-     * A genomeLocParser so we can create genomeLocs
-     */
-    private final GenomeLocParser genomeLocParser;
-
-    /**
-     * Does this region represent an active region (all isActiveProbs above threshold) or
-     * an inactive region (all isActiveProbs below threshold)?
-     */
-    private final boolean isActive;
-
-    /**
-     * The span of this active region, including the bp covered by all reads in this
-     * region.  This union of extensionLoc and the loc of all reads in this region.
-     *
-     * Must be at least as large as extendedLoc, but may be larger when reads
-     * partially overlap this region.
-     */
-    private GenomeLoc spanIncludingReads;
-
-
-    /**
-     * Indicates whether the active region has been finalized
-     */
-    private boolean hasBeenFinalized;
-
-    /**
-     * Create a new ActiveRegion containing no reads
-     *
-     * @param activeRegionLoc the span of this active region
-     * @param supportingStates the states that went into creating this region, or null / empty if none are available.
-     *                         If not empty, must have exactly one state for each bp in activeRegionLoc
-     * @param isActive indicates whether this is an active region, or an inactve one
-     * @param genomeLocParser a non-null parser to let us create new genome locs
-     * @param extension the active region extension to use for this active region
-     */
-    public ActiveRegion( final GenomeLoc activeRegionLoc, final List<ActivityProfileState> supportingStates, final boolean isActive, final GenomeLocParser genomeLocParser, final int extension ) {
-        if ( activeRegionLoc == null ) throw new IllegalArgumentException("activeRegionLoc cannot be null");
-        if ( activeRegionLoc.size() == 0 ) throw new IllegalArgumentException("Active region cannot be of zero size, but got " + activeRegionLoc);
-        if ( genomeLocParser == null ) throw new IllegalArgumentException("genomeLocParser cannot be null");
-        if ( extension < 0 ) throw new IllegalArgumentException("extension cannot be < 0 but got " + extension);
-
-        this.activeRegionLoc = activeRegionLoc;
-        this.supportingStates = supportingStates == null ? Collections.<ActivityProfileState>emptyList() : new ArrayList<ActivityProfileState>(supportingStates);
-        this.isActive = isActive;
-        this.genomeLocParser = genomeLocParser;
-        this.extension = extension;
-        this.extendedLoc = genomeLocParser.createGenomeLocOnContig(activeRegionLoc.getContig(), activeRegionLoc.getStart() - extension, activeRegionLoc.getStop() + extension);
-        this.spanIncludingReads = extendedLoc;
-
-        if ( ! this.supportingStates.isEmpty() ) {
-            if ( this.supportingStates.size() != activeRegionLoc.size() )
-                throw new IllegalArgumentException("Supporting states wasn't empty but it doesn't have exactly one state per bp in the active region: states " + this.supportingStates.size() + " vs. bp in region = " + activeRegionLoc.size());
-            GenomeLoc lastStateLoc = null;
-            for ( final ActivityProfileState state : this.supportingStates ) {
-                if ( lastStateLoc != null ) {
-                    if ( state.getLoc().getStart() != lastStateLoc.getStart() + 1 || state.getLoc().getContigIndex() != lastStateLoc.getContigIndex())
-                        throw new IllegalArgumentException("Supporting state has an invalid sequence: last state was " + lastStateLoc + " but next state was " + state);
-                }
-                lastStateLoc = state.getLoc();
-            }
-        }
-    }
-
-    /**
-     * Simple interface to create an active region that isActive without any profile state
-     */
-    public ActiveRegion( final GenomeLoc activeRegionLoc, final GenomeLocParser genomeLocParser, final int extension ) {
-        this(activeRegionLoc, Collections.<ActivityProfileState>emptyList(), true, genomeLocParser, extension);
-    }
-
-    @Override
-    public String toString() {
-        return "ActiveRegion "  + activeRegionLoc.toString() + " active?=" + isActive() + " nReads=" + reads.size();
-    }
-
-    /**
-     * See #getActiveRegionReference but with padding == 0
-     */
-    public byte[] getActiveRegionReference( final IndexedFastaSequenceFile referenceReader ) {
-        return getActiveRegionReference(referenceReader, 0);
-    }
-
-    /**
-     * Get the reference bases from referenceReader spanned by the extended location of this active region,
-     * including additional padding bp on either side.  If this expanded region would exceed the boundaries
-     * of the active region's contig, the returned result will be truncated to only include on-genome reference
-     * bases
-     * @param referenceReader the source of the reference genome bases
-     * @param padding the padding, in BP, we want to add to either side of this active region extended region
-     * @return a non-null array of bytes holding the reference bases in referenceReader
-     */
-    @Ensures("result != null")
-    public byte[] getActiveRegionReference( final IndexedFastaSequenceFile referenceReader, final int padding ) {
-        return getReference(referenceReader, padding, extendedLoc);
-    }
-
-    /**
-     * See #getActiveRegionReference but using the span including regions not the extended span
-     */
-    public byte[] getFullReference( final IndexedFastaSequenceFile referenceReader ) {
-        return getFullReference(referenceReader, 0);
-    }
-
-    /**
-     * See #getActiveRegionReference but using the span including regions not the extended span
-     */
-    public byte[] getFullReference( final IndexedFastaSequenceFile referenceReader, final int padding ) {
-        return getReference(referenceReader, padding, spanIncludingReads);
-    }
-
-    /**
-     * Get the reference bases from referenceReader spanned by the extended location of this active region,
-     * including additional padding bp on either side.  If this expanded region would exceed the boundaries
-     * of the active region's contig, the returned result will be truncated to only include on-genome reference
-     * bases
-     * @param referenceReader the source of the reference genome bases
-     * @param padding the padding, in BP, we want to add to either side of this active region extended region
-     * @param genomeLoc a non-null genome loc indicating the base span of the bp we'd like to get the reference for
-     * @return a non-null array of bytes holding the reference bases in referenceReader
-     */
-    @Ensures("result != null")
-    public byte[] getReference( final IndexedFastaSequenceFile referenceReader, final int padding, final GenomeLoc genomeLoc ) {
-        if ( referenceReader == null ) throw new IllegalArgumentException("referenceReader cannot be null");
-        if ( padding < 0 ) throw new IllegalArgumentException("padding must be a positive integer but got " + padding);
-        if ( genomeLoc == null ) throw new IllegalArgumentException("genomeLoc cannot be null");
-        if ( genomeLoc.size() == 0 ) throw new IllegalArgumentException("GenomeLoc must have size > 0 but got " + genomeLoc);
-
-        final byte[] reference =  referenceReader.getSubsequenceAt( genomeLoc.getContig(),
-                Math.max(1, genomeLoc.getStart() - padding),
-                Math.min(referenceReader.getSequenceDictionary().getSequence(genomeLoc.getContig()).getSequenceLength(), genomeLoc.getStop() + padding) ).getBases();
-
-        return reference;
-    }
-
-    /**
-     * Get the raw span of this active region (excluding the extension)
-     * @return a non-null genome loc
-     */
-    @Override
-    @Ensures("result != null")
-    public GenomeLoc getLocation() { return activeRegionLoc; }
-
-    /**
-     * Get the span of this active region including the extension value
-     * @return a non-null GenomeLoc
-     */
-    @Ensures("result != null")
-    public GenomeLoc getExtendedLoc() { return extendedLoc; }
-
-    /**
-     * Get the span of this active region including the extension and the projects on the
-     * genome of all reads in this active region.  That is, returns the bp covered by this
-     * region and all reads in the region.
-     * @return a non-null genome loc
-     */
-    @Ensures("result != null")
-    public GenomeLoc getReadSpanLoc() { return spanIncludingReads; }
-
-    /**
-     * Get the active profile states that went into creating this region, if possible
-     * @return an unmodifiable list of states that led to the creation of this region, or an empty
-     *         list if none were provided
-     */
-    @Ensures("result != null")
-    public List<ActivityProfileState> getSupportingStates() {
-        return Collections.unmodifiableList(supportingStates);
-    }
-
-    /**
-     * Get the active region extension applied to this region
-     *
-     * The extension is >= 0 bp in size, and indicates how much padding this art walker wanted for its regions
-     *
-     * @return the size in bp of the region extension
-     */
-    @Ensures("result >= 0")
-    public int getExtension() { return extension; }
-
-    /**
-     * Get an unmodifiable list of reads currently in this active region.
-     *
-     * The reads are sorted by their coordinate position
-     *
-     * @return an unmodifiable list of reads in this active region
-     */
-    @Ensures("result != null")
-    public List<GATKSAMRecord> getReads() {
-        return Collections.unmodifiableList(reads);
-    }
-
-    /**
-     * Get the number of reads currently in this active region
-     * @return an integer >= 0
-     */
-    @Ensures("result >= 0")
-    public int size() { return reads.size(); }
-
-    /**
-     * Add read to this active region
-     *
-     * Read must have alignment start >= than the last read currently in this active region.
-     *
-     * @throws IllegalArgumentException if read doesn't overlap the extended region of this active region
-     *
-     * @param read a non-null GATKSAMRecord
-     */
-    @Ensures("reads.size() == old(reads.size()) + 1")
-    public void add( final GATKSAMRecord read ) {
-        if ( read == null ) throw new IllegalArgumentException("Read cannot be null");
-
-        final GenomeLoc readLoc = genomeLocParser.createGenomeLoc( read );
-        if ( ! readOverlapsRegion(read) )
-            throw new IllegalArgumentException("Read location " + readLoc + " doesn't overlap with active region extended span " + extendedLoc);
-
-        spanIncludingReads = spanIncludingReads.union( readLoc );
-
-        if ( ! reads.isEmpty() ) {
-            final GATKSAMRecord lastRead = reads.get(size() - 1);
-            if ( ! lastRead.getReferenceIndex().equals(read.getReferenceIndex()) )
-                throw new IllegalArgumentException("Attempting to add a read to ActiveRegion not on the same contig as other reads: lastRead " + lastRead + " attempting to add " + read);
-
-            if ( read.getAlignmentStart() < lastRead.getAlignmentStart() )
-                throw new IllegalArgumentException("Attempting to add a read to ActiveRegion out of order w.r.t. other reads: lastRead " + lastRead + " at " + lastRead.getAlignmentStart() + " attempting to add " + read + " at " + read.getAlignmentStart());
-        }
-
-        reads.add( read );
-    }
-
-    /**
-     * Returns true if read would overlap the extended extent of this region
-     * @param read the read we want to test
-     * @return true if read can be added to this region, false otherwise
-     */
-    public boolean readOverlapsRegion(final GATKSAMRecord read) {
-        final GenomeLoc readLoc = genomeLocParser.createGenomeLoc( read );
-        return readLoc.overlapsP(extendedLoc);
-    }
-
-    /**
-     * Add all reads to this active region
-     * @param reads a collection of reads to add to this active region
-     */
-    public void addAll(final Collection<GATKSAMRecord> reads) {
-        if ( reads == null ) throw new IllegalArgumentException("reads cannot be null");
-        for ( final GATKSAMRecord read : reads )
-            add(read);
-    }
-
-    /**
-     * Clear all of the reads currently in this active region
-     */
-    @Ensures("size() == 0")
-    public void clearReads() {
-        spanIncludingReads = extendedLoc;
-        reads.clear();
-    }
-
-    /**
-     * Remove all of the reads in readsToRemove from this active region
-     * @param readsToRemove the set of reads we want to remove
-     */
-    public void removeAll( final Set<GATKSAMRecord> readsToRemove ) {
-        final Iterator<GATKSAMRecord> it = reads.iterator();
-        spanIncludingReads = extendedLoc;
-        while ( it.hasNext() ) {
-            final GATKSAMRecord read = it.next();
-            if ( readsToRemove.contains(read) )
-                it.remove();
-            else
-                spanIncludingReads = spanIncludingReads.union( genomeLocParser.createGenomeLoc(read) );
-        }
-    }
-
-    /**
-     * Is this region equal to other, excluding any reads in either region in the comparison
-     * @param other the other active region we want to test
-     * @return true if this region is equal, excluding any reads and derived values, to other
-     */
-    protected boolean equalExceptReads(final ActiveRegion other) {
-        if ( activeRegionLoc.compareTo(other.activeRegionLoc) != 0 ) return false;
-        if ( isActive() != other.isActive()) return false;
-        if ( genomeLocParser != other.genomeLocParser ) return false;
-        if ( extension != other.extension ) return false;
-        if ( extendedLoc.compareTo(other.extendedLoc) != 0 ) return false;
-        return true;
-    }
-
-    /**
-     * Does this region represent an active region (all isActiveProbs above threshold) or
-     * an inactive region (all isActiveProbs below threshold)?
-     */
-    public boolean isActive() {
-        return isActive;
-    }
-
-    /**
-     * Intersect this active region with the allowed intervals, returning a list of active regions
-     * that only contain locations present in intervals
-     *
-     * Note that the returned list may be empty, if this active region doesn't overlap the set at all
-     *
-     * Note that the resulting regions are all empty, regardless of whether the current active region has reads
-     *
-     * @param intervals a non-null set of intervals that are allowed
-     * @return an ordered list of active region where each interval is contained within intervals
-     */
-    @Ensures("result != null")
-    protected List<ActiveRegion> splitAndTrimToIntervals(final GenomeLocSortedSet intervals) {
-        final List<GenomeLoc> allOverlapping = intervals.getOverlapping(getLocation());
-        final List<ActiveRegion> clippedRegions = new LinkedList<ActiveRegion>();
-
-        for ( final GenomeLoc overlapping : allOverlapping ) {
-            clippedRegions.add(trim(overlapping, extension));
-        }
-
-        return clippedRegions;
-    }
-
-    /**
-     * Trim this active to just the span, producing a new active region without any reads that has only
-     * the extent of newExtend intersected with the current extent
-     * @param span the new extend of the active region we want
-     * @param extension the extension size we want for the newly trimmed active region
-     * @return a non-null, empty active region
-     */
-    public ActiveRegion trim(final GenomeLoc span, final int extension) {
-        if ( span == null ) throw new IllegalArgumentException("Active region extent cannot be null");
-        if ( extension < 0) throw new IllegalArgumentException("the extension size must be 0 or greater");
-        final int extendStart = Math.max(1,span.getStart() - extension);
-        final int maxStop = genomeLocParser.getContigs().getSequence(span.getContigIndex()).getSequenceLength();
-        final int extendStop = Math.min(span.getStop() + extension, maxStop);
-        final GenomeLoc extendedSpan = genomeLocParser.createGenomeLoc(span.getContig(), extendStart, extendStop);
-        return trim(span, extendedSpan);
-
-//TODO - Inconsiste support of substates trimming. Check lack of consistency!!!!
-//        final GenomeLoc subLoc = getLocation().intersect(span);
-//        final int subStart = subLoc.getStart() - getLocation().getStart();
-//        final int subEnd = subStart + subLoc.size();
-//        final List<ActivityProfileState> subStates = supportingStates.isEmpty() ? supportingStates : supportingStates.subList(subStart, subEnd);
-//        return new ActiveRegion( subLoc, subStates, isActive, genomeLocParser, extension );
-
-    }
-
-    public ActiveRegion trim(final GenomeLoc span) {
-        return trim(span,span);
-    }
-
-    /**
-     * Trim this active to no more than the span, producing a new active region with properly trimmed reads that
-     * attempts to provide the best possible representation of this active region covering the span.
-     *
-     * The challenge here is that span may (1) be larger than can be represented by this active region
-     * + its original extension and (2) the extension must be symmetric on both sides.  This algorithm
-     * therefore determines how best to represent span as a subset of the span of this
-     * region with a padding value that captures as much of the span as possible.
-     *
-     * For example, suppose this active region is
-     *
-     * Active:    100-200 with extension of 50, so that the true span is 50-250
-     * NewExtent: 150-225 saying that we'd ideally like to just have bases 150-225
-     *
-     * Here we represent the active region as a active region from 150-200 with 25 bp of padding.
-     *
-     * The overall constraint is that the active region can never exceed the original active region, and
-     * the extension is chosen to maximize overlap with the desired region
-     *
-     * @param span the new extend of the active region we want
-     * @return a non-null, empty active region
-     */
-    public ActiveRegion trim(final GenomeLoc span, final GenomeLoc extendedSpan) {
-        if ( span == null ) throw new IllegalArgumentException("Active region extent cannot be null");
-        if ( extendedSpan == null ) throw new IllegalArgumentException("Active region extended span cannot be null");
-        if ( ! extendedSpan.containsP(span))
-            throw new IllegalArgumentException("The requested extended must fully contain the requested span");
-
-        final GenomeLoc subActive = getLocation().intersect(span);
-        final int requiredOnRight = Math.max(extendedSpan.getStop() - subActive.getStop(), 0);
-        final int requiredOnLeft = Math.max(subActive.getStart() - extendedSpan.getStart(), 0);
-        final int requiredExtension = Math.min(Math.max(requiredOnLeft, requiredOnRight), getExtension());
-
-        final ActiveRegion result = new ActiveRegion( subActive, Collections.<ActivityProfileState>emptyList(), isActive, genomeLocParser, requiredExtension );
-
-        final List<GATKSAMRecord> myReads = getReads();
-        final GenomeLoc resultExtendedLoc = result.getExtendedLoc();
-        final int resultExtendedLocStart = resultExtendedLoc.getStart();
-        final int resultExtendedLocStop = resultExtendedLoc.getStop();
-
-        final List<GATKSAMRecord> trimmedReads = new ArrayList<>(myReads.size());
-        for( final GATKSAMRecord read : myReads ) {
-            final GATKSAMRecord clippedRead = ReadClipper.hardClipToRegion(read,
-                    resultExtendedLocStart, resultExtendedLocStop);
-            if( result.readOverlapsRegion(clippedRead) && clippedRead.getReadLength() > 0 )
-                trimmedReads.add(clippedRead);
-        }
-        result.clearReads();
-        result.addAll(ReadUtils.sortReadsByCoordinate(trimmedReads));
-        return result;
-    }
-
-    public void setFinalized(final boolean value) {
-        hasBeenFinalized = value;
-    }
-
-    public boolean isFinalized() {
-        return hasBeenFinalized;
-    }
-
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/activeregion/ActiveRegionReadState.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/activeregion/ActiveRegionReadState.java
deleted file mode 100644
index 76b4eb6..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/activeregion/ActiveRegionReadState.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.activeregion;
-
-/**
- * Describes how a read relates to an assigned ActiveRegion
- *
- * User: thibault
- * Date: 11/26/12
- * Time: 2:35 PM
- */
-public enum ActiveRegionReadState {
-    PRIMARY,        // This is the read's primary region
-    NONPRIMARY,     // This region overlaps the read, but it is not primary
-    EXTENDED,       // This region would overlap the read if it were extended
-    UNMAPPED        // This read is not mapped
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/activeregion/ActivityProfile.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/activeregion/ActivityProfile.java
deleted file mode 100644
index 2d97f69..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/activeregion/ActivityProfile.java
+++ /dev/null
@@ -1,520 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.activeregion;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
-
-import java.util.*;
-
-/**
- * Class holding information about per-base activity scores for the
- * active region traversal
- *
- * @author Mark DePristo
- * @since Date created
- */
-public class ActivityProfile {
-    protected final List<ActivityProfileState> stateList;
-    protected final GenomeLocParser parser;
-    protected final GenomeLocSortedSet restrictToIntervals;
-
-    protected final int maxProbPropagationDistance;
-    protected final double activeProbThreshold;
-
-    protected GenomeLoc regionStartLoc = null;
-    protected GenomeLoc regionStopLoc = null;
-
-    /**
-     * A cached value of the regionStartLoc contig length, to make calls to
-     * getCurrentContigLength efficient
-     */
-    protected int contigLength = -1;
-
-    /**
-     * Create a new empty ActivityProfile
-     * @param parser the parser we can use to create genome locs, cannot be null
-     * @param maxProbPropagationDistance region probability propagation distance beyond it's maximum size
-     * @param activeProbThreshold threshold for the probability of am active profile state being active
-     */
-    public ActivityProfile(final GenomeLocParser parser, final int maxProbPropagationDistance, final double activeProbThreshold) {
-        this(parser, maxProbPropagationDistance, activeProbThreshold, null);
-    }
-
-    /**
-     * Create a empty ActivityProfile, restricting output to profiles overlapping intervals, if not null
-     * @param parser the parser we can use to create genome locs, cannot be null
-     * @param maxProbPropagationDistance region probability propagation distance beyond it's maximum size
-     * @param activeProbThreshold threshold for the probability of a profile state being active
-     * @param intervals only include states that are within these intervals, if not null
-     */
-    public ActivityProfile(final GenomeLocParser parser, final int maxProbPropagationDistance, final double activeProbThreshold, final GenomeLocSortedSet intervals) {
-        if ( parser == null ) throw new IllegalArgumentException("parser cannot be null");
-
-        this.parser = parser;
-        this.stateList = new ArrayList<ActivityProfileState>();
-        this.restrictToIntervals = intervals;
-        this.maxProbPropagationDistance = maxProbPropagationDistance;
-        this.activeProbThreshold = activeProbThreshold;
-    }
-
-    @Override
-    public String toString() {
-        return "ActivityProfile{" +
-                "start=" + regionStartLoc +
-                ", stop=" + regionStopLoc +
-                '}';
-    }
-
-    /**
-     * How far away can probability mass be moved around in this profile?
-     *
-     * This distance puts an upper limit on how far, in bp, we will ever propagate probability max around
-     * when adding a new ActivityProfileState.  For example, if the value of this function is
-     * 10, and you are looking at a state at bp 5, and we know that no states beyond 5 + 10 will have
-     * their probability propagated back to that state.
-     *
-     * @return a positive integer distance in bp
-     */
-    @Ensures("result >= 0")
-    public int getMaxProbPropagationDistance() {
-        return maxProbPropagationDistance;
-    }
-
-    /**
-     * How many profile results are in this profile?
-     * @return the number of profile results
-     */
-    @Ensures("result >= 0")
-    public int size() {
-        return stateList.size();
-    }
-
-    /**
-     * Is this profile empty?
-     * @return true if the profile is empty
-     */
-    @Ensures("isEmpty() == (size() == 0)")
-    public boolean isEmpty() {
-        return stateList.isEmpty();
-    }
-
-    /**
-     * Get the span of this activity profile, which is from the start of the first state to the stop of the last
-     * @return a potentially null GenomeLoc.  Will be null if this profile is empty
-     */
-    public GenomeLoc getSpan() {
-        return isEmpty() ? null : regionStartLoc.endpointSpan(regionStopLoc);
-    }
-
-    @Requires("! isEmpty()")
-    public int getContigIndex() {
-        return regionStartLoc.getContigIndex();
-    }
-
-    @Requires("! isEmpty()")
-    public int getStop() {
-        return regionStopLoc.getStop();
-    }
-
-    /**
-     * Get the list of active profile results in this object
-     * @return a non-null, ordered list of active profile results
-     */
-    @Ensures("result != null")
-    protected List<ActivityProfileState> getStateList() {
-        return stateList;
-    }
-
-    /**
-     * Get the probabilities of the states as a single linear array of doubles
-     * @return a non-null array
-     */
-    @Ensures("result != null")
-    protected double[] getProbabilitiesAsArray() {
-        final double[] probs = new double[getStateList().size()];
-        int i = 0;
-        for ( final ActivityProfileState state : getStateList() )
-            probs[i++] = state.isActiveProb;
-        return probs;
-    }
-
-    /**
-     * Helper function that gets the genome loc for a site offset from relativeLoc, protecting ourselves from
-     * falling off the edge of the contig.
-     *
-     * @param relativeLoc the location offset is relative to
-     * @param offset the offset from relativeLoc where we'd like to create a GenomeLoc
-     * @return a genome loc with relativeLoc.start + offset, if this is on the contig, null otherwise
-     */
-    @Requires("relativeLoc != null")
-    protected GenomeLoc getLocForOffset(final GenomeLoc relativeLoc, final int offset) {
-        final int start = relativeLoc.getStart() + offset;
-        if ( start < 0 || start > getCurrentContigLength() ) {
-            return null;
-        } else {
-            return parser.createGenomeLoc(regionStartLoc.getContig(), regionStartLoc.getContigIndex(), start, start);
-        }
-    }
-
-    /**
-     * Get the length of the current contig
-     * @return the length in bp
-     */
-    @Requires("regionStartLoc != null")
-    @Ensures("result > 0")
-    private int getCurrentContigLength() {
-        return contigLength;
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // routines to add states to a profile
-    //
-    // --------------------------------------------------------------------------------
-
-    /**
-     * Add the next ActivityProfileState to this profile.
-     *
-     * Must be contiguous with the previously added result, or an IllegalArgumentException will be thrown
-     *
-     * @param state a well-formed ActivityProfileState result to incorporate into this profile
-     */
-    @Requires("state != null")
-    public void add(final ActivityProfileState state) {
-        final GenomeLoc loc = state.getLoc();
-
-        if ( regionStartLoc == null ) {
-            regionStartLoc = loc;
-            regionStopLoc = loc;
-            contigLength = parser.getContigInfo(regionStartLoc.getContig()).getSequenceLength();
-        } else {
-            if ( regionStopLoc.getStart() != loc.getStart() - 1 )
-                throw new IllegalArgumentException("Bad add call to ActivityProfile: loc " + loc + " not immediately after last loc " + regionStopLoc );
-            regionStopLoc = loc;
-        }
-
-        final Collection<ActivityProfileState> processedStates = processState(state);
-        for ( final ActivityProfileState processedState : processedStates ) {
-            incorporateSingleState(processedState);
-        }
-    }
-
-    /**
-     * Incorporate a single activity profile state into the current list of states
-     *
-     * If state's position occurs immediately after the last position in this profile, then
-     * the state is appended to the state list.  If it's within the existing states list,
-     * the prob of stateToAdd is added to its corresponding state in the list.  If the
-     * position would be before the start of this profile, stateToAdd is simply ignored.
-     *
-     * @param stateToAdd the state we want to add to the states list
-     */
-    @Requires("stateToAdd != null")
-    private void incorporateSingleState(final ActivityProfileState stateToAdd) {
-        final int position = stateToAdd.getOffset(regionStartLoc);
-
-        if ( position > size() )
-            // should we allow this?  probably not
-            throw new IllegalArgumentException("Must add state contiguous to existing states: adding " + stateToAdd);
-
-        if ( position >= 0 ) {
-            // ignore states starting before this region's start
-            if ( position < size() ) {
-                stateList.get(position).isActiveProb += stateToAdd.isActiveProb;
-            } else {
-                if ( position != size() ) throw new IllegalStateException("position == size but it wasn't");
-                stateList.add(stateToAdd);
-            }
-        }
-    }
-
-    /**
-     * Process justAddedState, returning a collection of derived states that actually be added to the stateList
-     *
-     * The purpose of this function is to transform justAddedStates, if needed, into a series of atomic states
-     * that we actually want to track.  For example, if state is for soft clips, we transform that single
-     * state into a list of states that surround the state up to the distance of the soft clip.
-     *
-     * Can be overridden by subclasses to transform states in any way
-     *
-     * There's no particular contract for the output states, except that they can never refer to states
-     * beyond the current end of the stateList unless the explicitly include preceding states before
-     * the reference.  So for example if the current state list is [1, 2, 3] this function could return
-     * [1,2,3,4,5] but not [1,2,3,5].
-     *
-     * @param justAddedState the state our client provided to use to add to the list
-     * @return a list of derived states that should actually be added to this profile's state list
-     */
-    protected Collection<ActivityProfileState> processState(final ActivityProfileState justAddedState) {
-        if ( justAddedState.resultState.equals(ActivityProfileState.Type.HIGH_QUALITY_SOFT_CLIPS) ) {
-            // special code to deal with the problem that high quality soft clipped bases aren't added to pileups
-            final List<ActivityProfileState> states = new LinkedList<ActivityProfileState>();
-            // add no more than the max prob propagation distance num HQ clips
-            final int numHQClips = Math.min(justAddedState.resultValue.intValue(), getMaxProbPropagationDistance());
-            for( int jjj = - numHQClips; jjj <= numHQClips; jjj++ ) {
-                final GenomeLoc loc = getLocForOffset(justAddedState.getLoc(), jjj);
-                if ( loc != null )
-                    states.add(new ActivityProfileState(loc, justAddedState.isActiveProb));
-            }
-
-            return states;
-        } else {
-            return Collections.singletonList(justAddedState);
-        }
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // routines to get active regions from the profile
-    //
-    // --------------------------------------------------------------------------------
-
-    /**
-     * Get the next completed active regions from this profile, and remove all states supporting them from this profile
-     *
-     * Takes the current profile and finds all of the active / inactive from the start of the profile that are
-     * ready.  By ready we mean unable to have their probability modified any longer by future additions to the
-     * profile.  The regions that are popped off the profile take their states with them, so the start of this
-     * profile will always be after the end of the last region returned here.
-     *
-     * The regions are returned sorted by genomic position.
-     *
-     * This function may not return anything in the list, if no regions are ready
-     *
-     * No returned region will be larger than maxRegionSize.
-     *
-     * @param activeRegionExtension the extension value to provide to the constructed regions
-     * @param minRegionSize the minimum region size, in the case where we have to cut up regions that are too large
-     * @param maxRegionSize the maximize size of the returned region
-     * @param forceConversion if true, we'll return a region whose end isn't sufficiently far from the end of the
-     *                        stateList.  Used to close out the active region when we've hit some kind of end (such
-     *                        as the end of the contig)
-     * @return a non-null list of active regions
-     */
-    @Ensures("result != null")
-    public List<ActiveRegion> popReadyActiveRegions(final int activeRegionExtension, final int minRegionSize, final int maxRegionSize, final boolean forceConversion) {
-        if ( activeRegionExtension < 0 ) throw new IllegalArgumentException("activeRegionExtension must be >= 0 but got " + activeRegionExtension);
-        if ( minRegionSize < 1 ) throw new IllegalArgumentException("minRegionSize must be >= 1 but got " + minRegionSize);
-        if ( maxRegionSize < 1 ) throw new IllegalArgumentException("maxRegionSize must be >= 1 but got " + maxRegionSize);
-
-        final LinkedList<ActiveRegion> regions = new LinkedList<ActiveRegion>();
-
-        while ( true ) {
-            final ActiveRegion nextRegion = popNextReadyActiveRegion(activeRegionExtension, minRegionSize, maxRegionSize, forceConversion);
-            if ( nextRegion == null )
-                return regions;
-            else {
-                if ( restrictToIntervals == null )
-                    regions.add(nextRegion);
-                else
-                    regions.addAll(nextRegion.splitAndTrimToIntervals(restrictToIntervals));
-            }
-        }
-    }
-
-    /**
-     * Helper function for popReadyActiveRegions that pops the first ready region off the front of this profile
-     *
-     * If a region is returned, modifies the state of this profile so that states used to make the region are
-     * no longer part of the profile.  Associated information (like the region start position) of this profile
-     * are also updated.
-     *
-     * @param activeRegionExtension the extension value to provide to the constructed regions
-     * @param minRegionSize the minimum region size, in the case where we have to cut up regions that are too large
-     * @param maxRegionSize the maximize size of the returned region
-     * @param forceConversion if true, we'll return a region whose end isn't sufficiently far from the end of the
-     *                        stateList.  Used to close out the active region when we've hit some kind of end (such
-     *                        as the end of the contig)
-     * @return a fully formed active region, or null if none can be made
-     */
-    private ActiveRegion popNextReadyActiveRegion(final int activeRegionExtension, final int minRegionSize, final int maxRegionSize, final boolean forceConversion) {
-        if ( stateList.isEmpty() )
-            return null;
-
-        // If we are flushing the activity profile we need to trim off the excess states so that we don't create regions outside of our current processing interval
-        if( forceConversion ) {
-            final List<ActivityProfileState> statesToTrimAway = new ArrayList<ActivityProfileState>(stateList.subList(getSpan().size(), stateList.size()));
-            stateList.removeAll(statesToTrimAway);
-        }
-
-        final ActivityProfileState first = stateList.get(0);
-        final boolean isActiveRegion = first.isActiveProb > activeProbThreshold;
-        final int offsetOfNextRegionEnd = findEndOfRegion(isActiveRegion, minRegionSize, maxRegionSize, forceConversion);
-        if ( offsetOfNextRegionEnd == -1 )
-            // couldn't find a valid ending offset, so we return null
-            return null;
-
-        // we need to create the active region, and clip out the states we're extracting from this profile
-        final List<ActivityProfileState> sub = stateList.subList(0, offsetOfNextRegionEnd + 1);
-        final List<ActivityProfileState> supportingStates = new ArrayList<ActivityProfileState>(sub);
-        sub.clear();
-
-        // update the start and stop locations as necessary
-        if ( stateList.isEmpty() ) {
-            regionStartLoc = regionStopLoc = null;
-        } else {
-            regionStartLoc = stateList.get(0).getLoc();
-        }
-        final GenomeLoc regionLoc = parser.createGenomeLoc(first.getLoc().getContig(), first.getLoc().getStart(), first.getLoc().getStart() + offsetOfNextRegionEnd);
-        return new ActiveRegion(regionLoc, supportingStates, isActiveRegion, parser, activeRegionExtension);
-    }
-
-    /**
-     * Find the end of the current region, returning the index into the element isActive element, or -1 if the region isn't done
-     *
-     * The current region is defined from the start of the stateList, looking for elements that have the same isActiveRegion
-     * flag (i.e., if isActiveRegion is true we are looking for states with isActiveProb > threshold, or alternatively
-     * for states < threshold).  The maximize size of the returned region is maxRegionSize.  If forceConversion is
-     * true, then we'll return the region end even if this isn't safely beyond the max prob propagation distance.
-     *
-     * Note that if isActiveRegion is true, and we can construct a active region > maxRegionSize in bp, we
-     * find the further local minimum within that max region, and cut the region there, under the constraint
-     * that the resulting region must be at least minRegionSize in bp.
-     *
-     * @param isActiveRegion is the region we're looking for an active region or inactive region?
-     * @param minRegionSize the minimum region size, in the case where we have to cut up regions that are too large
-     * @param maxRegionSize the maximize size of the returned region
-     * @param forceConversion if true, we'll return a region whose end isn't sufficiently far from the end of the
-     *                        stateList.  Used to close out the active region when we've hit some kind of end (such
-     *                        as the end of the contig)
-     * @return the index into stateList of the last element of this region, or -1 if it cannot be found
-     */
-    @Ensures({
-            "result >= -1",
-            "result == -1 || result < maxRegionSize",
-            "! (result == -1 && forceConversion)"})
-    private int findEndOfRegion(final boolean isActiveRegion, final int minRegionSize, final int maxRegionSize, final boolean forceConversion) {
-        if ( ! forceConversion && stateList.size() < maxRegionSize + getMaxProbPropagationDistance() ) {
-            // we really haven't finalized at the probability mass that might affect our decision, so keep
-            // waiting until we do before we try to make any decisions
-            return -1;
-        }
-
-        int endOfActiveRegion = findFirstActivityBoundary(isActiveRegion, maxRegionSize);
-
-        if ( isActiveRegion && endOfActiveRegion == maxRegionSize )
-            // we've run to the end of the region, let's find a good place to cut
-            endOfActiveRegion = findBestCutSite(endOfActiveRegion, minRegionSize);
-
-        // we're one past the end, so i must be decremented
-        return endOfActiveRegion - 1;
-    }
-
-    /**
-     * Find the the local minimum within 0 - endOfActiveRegion where we should divide region
-     *
-     * This algorithm finds the global minimum probability state within the region [minRegionSize, endOfActiveRegion)
-     * (exclusive of endOfActiveRegion), and returns the state index of that state.
-     * that it
-     *
-     * @param endOfActiveRegion the last state of the current active region (exclusive)
-     * @param minRegionSize the minimum of the left-most region, after cutting
-     * @return the index of state after the cut site (just like endOfActiveRegion)
-     */
-    @Requires({"endOfActiveRegion >= minRegionSize", "minRegionSize >= 0"})
-    @Ensures({"result >= minRegionSize", "result <= endOfActiveRegion"})
-    private int findBestCutSite(final int endOfActiveRegion, final int minRegionSize) {
-        int minI = endOfActiveRegion - 1;
-        double minP = Double.MAX_VALUE;
-
-        for ( int i = minI; i >= minRegionSize - 1; i-- ) {
-            double cur = getProb(i);
-            if ( cur < minP && isMinimum(i) ) {
-                minP = cur;
-                minI = i;
-            }
-        }
-
-        return minI + 1;
-    }
-
-    /**
-     * Find the first index into the state list where the state is considered ! isActiveRegion
-     *
-     * Note that each state has a probability of being active, and this function thresholds that
-     * value on activeProbThreshold, coloring each state as active or inactive.  Finds the
-     * largest contiguous stretch of states starting at the first state (index 0) with the same isActive
-     * state as isActiveRegion.  If the entire state list has the same isActive value, then returns
-     * maxRegionSize
-     *
-     * @param isActiveRegion are we looking for a stretch of active states, or inactive ones?
-     * @param maxRegionSize don't look for a boundary that would yield a region of size > maxRegionSize
-     * @return the index of the first state in the state list with isActive value != isActiveRegion, or maxRegionSize
-     *         if no such element exists
-     */
-    @Requires({"maxRegionSize > 0"})
-    @Ensures({"result >= 0", "result <= stateList.size()"})
-    private int findFirstActivityBoundary(final boolean isActiveRegion, final int maxRegionSize) {
-        final int nStates = stateList.size();
-        int endOfActiveRegion = 0;
-
-        while ( endOfActiveRegion < nStates && endOfActiveRegion < maxRegionSize ) {
-            if ( getProb(endOfActiveRegion) > activeProbThreshold != isActiveRegion ) {
-                break;
-            }
-            endOfActiveRegion++;
-        }
-
-        return endOfActiveRegion;
-    }
-
-    /**
-     * Helper function to get the probability of the state at offset index
-     * @param index a valid offset into the state list
-     * @return the isActiveProb of the state at index
-     */
-    @Requires({"index >= 0", "index < stateList.size()"})
-    private double getProb(final int index) {
-        return stateList.get(index).isActiveProb;
-    }
-
-    /**
-     * Is the probability at index in a local minimum?
-     *
-     * Checks that the probability at index is <= both the probabilities to either side.
-     * Returns false if index is at the end or the start of the state list.
-     *
-     * @param index the index of the state we want to test
-     * @return true if prob at state is a minimum, false otherwise
-     */
-    @Requires({"index >= 0", "index < stateList.size()"})
-    private boolean isMinimum(final int index) {
-        if ( index == stateList.size() - 1 )
-            // we cannot be at a minimum if the current position is the last in the state list
-            return false;
-        else if ( index < 1 )
-            // we cannot be at a minimum if the current position is the first or second
-            return false;
-        else {
-            final double indexP = getProb(index);
-            return indexP <= getProb(index+1) && indexP < getProb(index-1);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/activeregion/ActivityProfileState.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/activeregion/ActivityProfileState.java
deleted file mode 100644
index 915db61..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/activeregion/ActivityProfileState.java
+++ /dev/null
@@ -1,112 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.activeregion;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-
-/**
- * The state of an active region walker's isActive call at a specific locus in the genome
- *
- * User: rpoplin
- * Date: 7/27/12
- */
-public class ActivityProfileState {
-    final private GenomeLoc loc;
-    public double isActiveProb;
-    public Type resultState;
-    public Number resultValue;
-
-    public enum Type {
-        NONE,
-        HIGH_QUALITY_SOFT_CLIPS
-    }
-
-    /**
-     * Create a new ActivityProfileState at loc with probability of being active of isActiveProb
-     *
-     * @param loc the position of the result profile (for debugging purposes)
-     * @param isActiveProb the probability of being active (between 0 and 1)
-     */
-    @Requires({"loc != null", "isActiveProb >= 0.0 && isActiveProb <= 1.0"})
-    public ActivityProfileState(final GenomeLoc loc, final double isActiveProb) {
-        this(loc, isActiveProb, Type.NONE, null);
-    }
-
-    /**
-     * Create a new ActivityProfileState at loc with probability of being active of isActiveProb that maintains some
-     * information about the result state and value
-     *
-     * The only state value in use is HIGH_QUALITY_SOFT_CLIPS, and here the value is interpreted as the number
-     * of bp affected by the soft clips.
-     *
-     * @param loc the position of the result profile (for debugging purposes)
-     * @param isActiveProb the probability of being active (between 0 and 1)
-     */
-    @Requires({"loc != null", "isActiveProb >= 0.0 && isActiveProb <= 1.0"})
-    public ActivityProfileState(final GenomeLoc loc, final double isActiveProb, final Type resultState, final Number resultValue) {
-        // make sure the location of that activity profile is 1
-        if ( loc.size() != 1 )
-            throw new IllegalArgumentException("Location for an ActivityProfileState must have to size 1 bp but saw " + loc);
-        if ( resultValue != null && resultValue.doubleValue() < 0 )
-            throw new IllegalArgumentException("Result value isn't null and its < 0, which is illegal: " + resultValue);
-
-        this.loc = loc;
-        this.isActiveProb = isActiveProb;
-        this.resultState = resultState;
-        this.resultValue = resultValue;
-    }
-
-    /**
-     * The offset of state w.r.t. our current region's start location
-     * @param regionStartLoc the start of the region, as a genome loc
-     * @return the position of this profile relative to the start of this region
-     */
-    public int getOffset(final GenomeLoc regionStartLoc) {
-        return getLoc().getStart() - regionStartLoc.getStart();
-    }
-
-
-    /**
-     * Get the genome loc associated with the ActivityProfileState
-     * @return the location of this result
-     */
-    @Ensures("result != null")
-    public GenomeLoc getLoc() {
-        return loc;
-    }
-
-    @Override
-    public String toString() {
-        return "ActivityProfileState{" +
-                "loc=" + loc +
-                ", isActiveProb=" + isActiveProb +
-                ", resultState=" + resultState +
-                ", resultValue=" + resultValue +
-                '}';
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/activeregion/BandPassActivityProfile.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/activeregion/BandPassActivityProfile.java
deleted file mode 100644
index 52437a8..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/activeregion/BandPassActivityProfile.java
+++ /dev/null
@@ -1,194 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.activeregion;
-
-import com.google.java.contract.Ensures;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
-import org.broadinstitute.gatk.utils.MathUtils;
-
-import java.util.Collection;
-import java.util.LinkedList;
-
-/**
- * A band pass filtering version of the activity profile
- *
- * Applies a band pass filter with a Gaussian kernel to the input state probabilities to smooth
- * them out of an interval
- *
- * @author Mark DePristo
- * @since 2011
- */
-public class BandPassActivityProfile extends ActivityProfile {
-    public static final int MAX_FILTER_SIZE = 50;
-    private final static double MIN_PROB_TO_KEEP_IN_FILTER = 1e-5;
-    public static final double DEFAULT_SIGMA = 17.0;
-
-    private final int filterSize;
-    private final double sigma;
-    private final double[] GaussianKernel;
-
-    /**
-     * Create a new BandPassActivityProfile with default sigma and filter sizes
-     *
-     * @see #BandPassActivityProfile(org.broadinstitute.gatk.utils.GenomeLocParser, org.broadinstitute.gatk.utils.GenomeLocSortedSet, int, double, int, double, boolean)
-     */
-    public BandPassActivityProfile(final GenomeLocParser parser, final GenomeLocSortedSet restrictToIntervals,
-                                   final int maxProbPropagationDistance, final double activeProbThreshold) {
-        this(parser, restrictToIntervals, maxProbPropagationDistance, activeProbThreshold, MAX_FILTER_SIZE, DEFAULT_SIGMA);
-    }
-
-    /**
-     * @see #BandPassActivityProfile(org.broadinstitute.gatk.utils.GenomeLocParser, org.broadinstitute.gatk.utils.GenomeLocSortedSet, int, double, int, double, boolean)
-     *
-     * sets adaptiveFilterSize to true
-     */
-    public BandPassActivityProfile(final GenomeLocParser parser, final GenomeLocSortedSet restrictToIntervals,
-                                   final int maxProbPropagationDistance, final double activeProbThreshold,
-                                   final int maxFilterSize, final double sigma) {
-        this(parser, restrictToIntervals, maxProbPropagationDistance, activeProbThreshold, maxFilterSize, sigma, true);
-    }
-
-    /**
-     * Create an activity profile that implements a band pass filter on the states
-     *
-     * @param parser our genome loc parser
-     * @param restrictToIntervals only include states that are within these intervals, if not null
-     * @param maxProbPropagationDistance region probability propagation distance beyond it's maximum size
-     * @param activeProbThreshold  threshold for the probability of a profile state being active
-     * @param maxFilterSize the maximum size of the band pass filter we are allowed to create, regardless of sigma
-     * @param sigma the variance of the Gaussian kernel for this band pass filter
-     * @param adaptiveFilterSize if true, use the kernel itself to determine the best filter size
-     */
-    public BandPassActivityProfile(final GenomeLocParser parser, final GenomeLocSortedSet restrictToIntervals, final int maxProbPropagationDistance,
-                                   final double activeProbThreshold, final int maxFilterSize, final double sigma, final boolean adaptiveFilterSize) {
-        super(parser, maxProbPropagationDistance, activeProbThreshold, restrictToIntervals);
-
-        if ( sigma < 0 ) throw new IllegalArgumentException("Sigma must be greater than or equal to 0 but got " + sigma);
-
-        // setup the Gaussian kernel for the band pass filter
-        this.sigma = sigma;
-        final double[] fullKernel = makeKernel(maxFilterSize, sigma);
-        this.filterSize = adaptiveFilterSize ? determineFilterSize(fullKernel, MIN_PROB_TO_KEEP_IN_FILTER) : maxFilterSize;
-        this.GaussianKernel = makeKernel(this.filterSize, sigma);
-    }
-
-    protected static int determineFilterSize(final double[] kernel, final double minProbToKeepInFilter) {
-        final int middle = (kernel.length - 1) / 2;
-        int filterEnd = middle;
-        while ( filterEnd > 0 ) {
-            if ( kernel[filterEnd - 1] < minProbToKeepInFilter ) {
-                break;
-            }
-            filterEnd--;
-        }
-        return middle - filterEnd;
-    }
-
-    protected static double[] makeKernel(final int filterSize, final double sigma) {
-        final int bandSize = 2 * filterSize + 1;
-        final double[] kernel = new double[bandSize];
-        for( int iii = 0; iii < bandSize; iii++ ) {
-            kernel[iii] = MathUtils.normalDistribution(filterSize, sigma, iii);
-        }
-        return MathUtils.normalizeFromRealSpace(kernel);
-    }
-
-    /**
-     * Our maximize propagation distance is whatever our parent's is, plus our filter size
-     *
-     * Stops the profile from interpreting sites that aren't yet fully determined due to
-     * propagation of the probabilities.
-     *
-     * @return the distance in bp we might move our probabilities around for some site i
-     */
-    @Override
-    public int getMaxProbPropagationDistance() {
-        return super.getMaxProbPropagationDistance() + filterSize;
-    }
-
-    /**
-     * Get the size (in bp) of the band pass filter
-     * @return a positive integer
-     */
-    @Ensures("result >= 1")
-    public int getBandSize() {
-        return 2 * filterSize + 1;
-    }
-
-    /**
-     * Get the filter size (which is the size of each wing of the band, minus the center point)
-     * @return a positive integer
-     */
-    @Ensures("result >= 0")
-    public int getFilteredSize() {
-        return filterSize;
-    }
-
-    /**
-     * Get the Gaussian kernel sigma value
-     * @return a positive double
-     */
-    @Ensures("result >= 0")
-    public double getSigma() {
-        return sigma;
-    }
-
-    /**
-     * Get the kernel of this band pass filter.  Do not modify returned result
-     * @return the kernel used in this band pass filter
-     */
-    @Ensures({"result != null", "result.length == getBandSize()"})
-    protected double[] getKernel() {
-        return GaussianKernel;
-    }
-
-    /**
-     * Band pass the probabilities in the ActivityProfile, producing a new profile that's band pass filtered
-     * @return a new double[] that's the band-pass filtered version of this profile
-     */
-    @Override
-    protected Collection<ActivityProfileState> processState(final ActivityProfileState justAddedState) {
-        final Collection<ActivityProfileState> states = new LinkedList<ActivityProfileState>();
-
-        for ( final ActivityProfileState superState : super.processState(justAddedState) ) {
-            if ( superState.isActiveProb > 0.0 ) {
-                for( int jjj = -filterSize; jjj <= filterSize; jjj++ ) {
-                    final GenomeLoc loc = getLocForOffset(justAddedState.getLoc(), jjj);
-                    if ( loc != null ) {
-                        final double newProb = superState.isActiveProb * GaussianKernel[jjj + filterSize];
-                        states.add(new ActivityProfileState(loc, newProb));
-                    }
-                }
-            } else {
-                states.add(justAddedState);
-            }
-        }
-
-        return states;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/analysis/AminoAcid.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/analysis/AminoAcid.java
deleted file mode 100644
index 0416609..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/analysis/AminoAcid.java
+++ /dev/null
@@ -1,114 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.analysis;
-
-/*
- * Copyright (c) 2010 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use,
- * copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the
- * Software is furnished to do so, subject to the following
- * conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
- * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
- * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
- * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
- * OTHER DEALINGS IN THE SOFTWARE.
- */
-
-/**
- * @author chartl
- * @since June 28, 2010
- */
-
-public enum AminoAcid {
-    
-    Alanine("Alanine","Ala","A",new String[]{"GCA","GCC","GCG","GCT"}),
-    Arganine("Arganine","Arg","R",new String[]{"AGA","AGG","CGA","CGC","CGG","CGT"}),
-    Asparagine("Asparagine","Asn","N",new String[]{"AAC","AAT"}),
-    Aspartic_acid("Aspartic acid","Asp","D",new String[]{"GAT","GAC"}),
-    Cysteine("Cysteine","Cys","C",new String[]{"TGC","TGC"}),
-    Glutamic_acid("Glutamic acid","Glu","E",new String[]{"GAA","GAG"}),
-    Glutamine("Glutamine","Gln","Q",new String[]{"CAA","CAG"}),
-    Glycine("Glycine","Gly","G",new String[]{"GGA","GGC","GGG","GGT"}),
-    Histidine("Histidine","His","H",new String[]{"CAC","CAT"}),
-    Isoleucine("Isoleucine","Ile","I",new String[]{"ATA","ATC","ATT"}),
-    Leucine("Leucine","Leu","L",new String[]{"CTA","CTC","CTG","CTT","TTA","TTG"}),
-    Lysine("Lysine","Lys","K", new String[]{"AAA","AAG"}),
-    Methionine("Methionine","Met","M",new String[]{"ATG"}),
-    Phenylalanine("Phenylalanine","Phe","F",new String[]{"TTC","TTT"}),
-    Proline("Proline","Pro","P",new String[]{"CCA","CCC","CCG","CCT"}),
-    Serine("Serine","Ser","S",new String[]{"AGC","AGT","TCA","TCC","TCG","TCT"}),
-    Stop_codon("Stop codon","Stop","*",new String[]{"TAA","TAG","TGA"}),
-    Threonine("Threonine","Thr","T",new String[]{"ACA","ACC","ACG","ACT"}),
-    Tryptophan("Tryptophan","Trp","W",new String[]{"TGG"}),
-    Tyrosine("Tyrosine","Tyr","Y",new String[]{"TAC","TAT"}),
-    Valine("Valine","Val","V",new String[]{"GTA","GTC","GTG","GTT"});
-
-    String[] codons;
-    String fullName;
-    String code;
-    String letter;
-
-    AminoAcid(String name, String shortName, String abbrev, String[] myCodons) {
-        codons = myCodons;
-        fullName = name;
-        code = shortName;
-        letter = abbrev;
-    }
-
-    public String getName() {
-        return fullName;
-    }
-
-    public String getLetter() {
-        return letter;
-    }
-
-    public String getCode() {
-        return code;
-    }
-
-    public boolean isStop() {
-        return this == Stop_codon;
-    }
-
-    public String toString() {
-        return getName();
-    }
-    
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/analysis/AminoAcidTable.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/analysis/AminoAcidTable.java
deleted file mode 100644
index 7cd8933..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/analysis/AminoAcidTable.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.analysis;
-
-import java.util.HashMap;
-
-/*
- * Copyright (c) 2010 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use,
- * copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the
- * Software is furnished to do so, subject to the following
- * conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
- * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
- * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
- * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
- * OTHER DEALINGS IN THE SOFTWARE.
- */
-
-/**
- * @author chartl
- * @since June 28, 2010
- */
-
-public class AminoAcidTable {
-    public HashMap<String,AminoAcid> tableByCodon = new HashMap<String,AminoAcid>(21);
-    public HashMap<String,AminoAcid> tableByCode = new HashMap<String,AminoAcid>(21);
-    public AminoAcidTable() {
-        for ( AminoAcid acid : AminoAcid.values() ) {
-            tableByCode.put(acid.getCode(),acid);
-            for ( String codon : acid.codons ) {
-                tableByCodon.put(codon,acid);
-            }
-        }
-    }
-
-    // todo -- these functions are for the genomic annotator and are named too generally -- they are
-    // todo -- actually accessors by codon; thus should be more specific.
-    public AminoAcid getEukaryoticAA(String codon) {
-        return tableByCodon.get(codon.toUpperCase());
-    }
-
-    public AminoAcid getMitochondrialAA(String codon, boolean isFirst) {
-        String upperCodon = codon.toUpperCase();
-        if ( isFirst && upperCodon.equals("ATT") || upperCodon.equals("ATA") ) {
-            return AminoAcid.Methionine;
-        } else if ( upperCodon.equals("AGA") || upperCodon.equals("AGG") ) {
-            return AminoAcid.Stop_codon;
-        } else if ( upperCodon.equals("TGA") ) {
-            return AminoAcid.Tryptophan;
-        } else {
-            return tableByCodon.get(upperCodon);
-        }
-    }
-
-    public AminoAcid getAminoAcidByCode(String code) {
-        return tableByCode.get(code);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/analysis/AminoAcidUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/analysis/AminoAcidUtils.java
deleted file mode 100644
index 9213e82..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/analysis/AminoAcidUtils.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.analysis;
-
-/*
- * Copyright (c) 2010 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use,
- * copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the
- * Software is furnished to do so, subject to the following
- * conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
- * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
- * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
- * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
- * OTHER DEALINGS IN THE SOFTWARE.
- */
-
-/**
- * @author chartl
- * @since June 28, 2010
- */
-
-public class AminoAcidUtils {
-
-    public static String[] getAminoAcidNames() {
-        String[] names = new String[AminoAcid.values().length];
-        for ( AminoAcid acid : AminoAcid.values() ) {
-            names[acid.ordinal()] = acid.getName();
-        }
-
-        return names;
-    }
-
-    public static String[] getAminoAcidCodes() {
-        String[] codes = new String[AminoAcid.values().length];
-        for ( AminoAcid acid : AminoAcid.values() ) {
-            codes[acid.ordinal()] = acid.getCode();
-        }
-
-        return codes;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/baq/BAQ.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/baq/BAQ.java
deleted file mode 100644
index da8d00e..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/baq/BAQ.java
+++ /dev/null
@@ -1,713 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.baq;
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import htsjdk.samtools.reference.ReferenceSequence;
-import htsjdk.samtools.CigarElement;
-import htsjdk.samtools.CigarOperator;
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.SAMUtils;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.utils.collections.Pair;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.sam.ReadUtils;
-
-/*
-  The topology of the profile HMM:
-
-           /\             /\        /\             /\
-           I[1]           I[k-1]    I[k]           I[L]
-            ^   \      \    ^    \   ^   \      \   ^
-            |    \      \   |     \  |    \      \  |
-    M[0]   M[1] -> ... -> M[k-1] -> M[k] -> ... -> M[L]   M[L+1]
-                \      \/        \/      \/      /
-                 \     /\        /\      /\     /
-                       -> D[k-1] -> D[k] ->
-
-   M[0] points to every {M,I}[k] and every {M,I}[k] points M[L+1].
-
-   On input, _ref is the reference sequence and _query is the query
-   sequence. Both are sequences of 0/1/2/3/4 where 4 stands for an
-   ambiguous residue. iqual is the base quality. c sets the gap open
-   probability, gap extension probability and band width.
-
-   On output, state and q are arrays of length l_query. The higher 30
-   bits give the reference position the query base is matched to and the
-   lower two bits can be 0 (an alignment match) or 1 (an
-   insertion). q[i] gives the phred scaled posterior probability of
-   state[i] being wrong.
- */
-public class BAQ {
-    private final static Logger logger = Logger.getLogger(BAQ.class);
-    private final static boolean DEBUG = false;
-
-    public enum CalculationMode {
-        OFF,                        // don't apply BAQ at all, the default
-        CALCULATE_AS_NECESSARY,     // do HMM BAQ calculation on the fly, as necessary, if there's no tag
-        RECALCULATE                 // do HMM BAQ calculation on the fly, regardless of whether there's a tag present
-    }
-
-    /** these are features that only the walker can override */
-    public enum QualityMode {
-        ADD_TAG,                    // calculate the BAQ, but write it into the reads as the BAQ tag, leaving QUAL field alone
-        OVERWRITE_QUALS,            // overwrite the quality field directly
-        DONT_MODIFY                 // do the BAQ, but don't modify the quality scores themselves, just return them in the function.
-    }
-
-    public static final String BAQ_TAG = "BQ";
-
-    private static double[] qual2prob = new double[256];
-    static {
-        for (int i = 0; i < 256; ++i)
-            qual2prob[i] = Math.pow(10, -i/10.);
-    }
-
-    // Phred scaled now (changed 1/10/2011)
-    public static final double DEFAULT_GOP = 40;
-
-    /*  Takes a Phred Scale quality score and returns the error probability.
-     *
-     *  Quick conversion function to maintain internal structure of BAQ calculation on
-     *  probability scale, but take the user entered parameter in phred-scale.
-     *
-     *  @param x phred scaled score
-     *  @return probability of incorrect base call
-     */
-    private double convertFromPhredScale(double x) { return (Math.pow(10,(-x)/10.));}
-
-    public double cd = -1;      // gap open probability [1e-3]
-    private double ce = 0.1;    // gap extension probability [0.1]
-	private int cb = 7;         // band width [7]
-    private boolean includeClippedBases = false;
-
-    public byte getMinBaseQual() {
-        return minBaseQual;
-    }
-
-    /**
-     * Any bases with Q < MIN_BASE_QUAL are raised up to this base quality
-     */
-    private byte minBaseQual = 4;
-
-    public double getGapOpenProb() {
-        return cd;
-    }
-
-    public double getGapExtensionProb() {
-        return ce;
-    }
-
-    public int getBandWidth() {
-        return cb;
-    }
-
-    /**
-     * Use defaults for everything
-     */
-    public BAQ() {
-        this(DEFAULT_GOP);
-    }
-
-    /**
-     * Use defaults for everything
-     */
-    public BAQ(final double gapOpenPenalty) {
-        cd = convertFromPhredScale(gapOpenPenalty);
-        initializeCachedData();
-    }
-
-
-
-    /**
-     * Create a new HmmGlocal object with specified parameters
-     *
-     * @param d gap open prob (not phred scaled!).
-     * @param e gap extension prob.
-     * @param b band width
-     * @param minBaseQual All bases with Q < minBaseQual are up'd to this value
-     */
-	public BAQ(final double d, final double e, final int b, final byte minBaseQual, boolean includeClippedBases) {
-		cd = d; ce = e; cb = b;
-        this.minBaseQual = minBaseQual;
-        this.includeClippedBases = includeClippedBases;
-        initializeCachedData();
-	}
-
-    private final static double EM = 0.33333333333;
-    private final static double EI = 0.25;
-
-    private double[][][] EPSILONS = new double[256][256][SAMUtils.MAX_PHRED_SCORE+1];
-
-    private void initializeCachedData() {
-        for ( int i = 0; i < 256; i++ )
-            for ( int j = 0; j < 256; j++ )
-                for ( int q = 0; q <= SAMUtils.MAX_PHRED_SCORE; q++ ) {
-                    EPSILONS[i][j][q] = 1.0;
-                }
-
-        for ( char b1 : "ACGTacgt".toCharArray() ) {
-            for ( char b2 : "ACGTacgt".toCharArray() ) {
-                for ( int q = 0; q <= SAMUtils.MAX_PHRED_SCORE; q++ ) {
-                    double qual = qual2prob[q < minBaseQual ? minBaseQual : q];
-                    double e = Character.toLowerCase(b1) == Character.toLowerCase(b2) ? 1 - qual : qual * EM;
-                    EPSILONS[(byte)b1][(byte)b2][q] = e;
-                }
-            }
-        }
-    }
-
-    protected double calcEpsilon( byte ref, byte read, byte qualB ) {
-        return EPSILONS[ref][read][qualB];
-    }
-
-    // ####################################################################################################
-    //
-    // NOTE -- THIS CODE IS SYNCHRONIZED WITH CODE IN THE SAMTOOLS REPOSITORY.  CHANGES TO THIS CODE SHOULD BE
-    // NOTE -- PUSHED BACK TO HENG LI
-    //
-    // ####################################################################################################
-    public int hmm_glocal(final byte[] ref, final byte[] query, int qstart, int l_query, final byte[] _iqual, int[] state, byte[] q) {
-        if ( ref == null ) throw new ReviewedGATKException("BUG: ref sequence is null");
-        if ( query == null ) throw new ReviewedGATKException("BUG: query sequence is null");
-        if ( _iqual == null ) throw new ReviewedGATKException("BUG: query quality vector is null");
-        if ( query.length != _iqual.length ) throw new ReviewedGATKException("BUG: read sequence length != qual length");
-        if ( l_query < 1 ) throw new ReviewedGATKException("BUG: length of query sequence < 0: " + l_query);
-        if ( qstart < 0 ) throw new ReviewedGATKException("BUG: query sequence start < 0: " + qstart);
-
-        //if ( q != null && q.length != state.length ) throw new ReviewedGATKException("BUG: BAQ quality length != read sequence length");
-        //if ( state != null && state.length != l_query ) throw new ReviewedGATKException("BUG: state length != read sequence length");
-
-		int i, k;
-
-        /*** initialization ***/
-		// change coordinates
-		final int l_ref = ref.length;
-
-		// set band width
-		int bw2, bw = l_ref > l_query? l_ref : l_query;
-        if (cb < Math.abs(l_ref - l_query)) {
-            bw = Math.abs(l_ref - l_query) + 3;
-            //System.out.printf("SC  cb=%d, bw=%d%n", cb, bw);
-        }
-        if (bw > cb) bw = cb;
-		if (bw < Math.abs(l_ref - l_query)) {
-            //int bwOld = bw;
-            bw = Math.abs(l_ref - l_query);
-            //System.out.printf("old bw is %d, new is %d%n", bwOld, bw);
-        }
-        //System.out.printf("c->bw = %d, bw = %d, l_ref = %d, l_query = %d\n", cb, bw, l_ref, l_query);
-		bw2 = bw * 2 + 1;
-
-        // allocate the forward and backward matrices f[][] and b[][] and the scaling array s[]
-		double[][] f = new double[l_query+1][bw2*3 + 6];
-		double[][] b = new double[l_query+1][bw2*3 + 6];
-		double[] s = new double[l_query+2];
-
-		// initialize transition probabilities
-		double sM, sI, bM, bI;
-		sM = sI = 1. / (2 * l_query + 2);
-        bM = (1 - cd) / l_ref; bI = cd / l_ref; // (bM+bI)*l_ref==1
-
-		double[] m = new double[9];
-		m[0*3+0] = (1 - cd - cd) * (1 - sM); m[0*3+1] = m[0*3+2] = cd * (1 - sM);
-		m[1*3+0] = (1 - ce) * (1 - sI); m[1*3+1] = ce * (1 - sI); m[1*3+2] = 0.;
-		m[2*3+0] = 1 - ce; m[2*3+1] = 0.; m[2*3+2] = ce;
-
-
-		/*** forward ***/
-		// f[0]
-		f[0][set_u(bw, 0, 0)] = s[0] = 1.;
-		{ // f[1]
-			double[] fi = f[1];
-			double sum;
-			int beg = 1, end = l_ref < bw + 1? l_ref : bw + 1, _beg, _end;
-			for (k = beg, sum = 0.; k <= end; ++k) {
-				int u;
-                double e = calcEpsilon(ref[k-1], query[qstart], _iqual[qstart]);
-				u = set_u(bw, 1, k);
-				fi[u+0] = e * bM; fi[u+1] = EI * bI;
-				sum += fi[u] + fi[u+1];
-			}
-			// rescale
-			s[1] = sum;
-			_beg = set_u(bw, 1, beg); _end = set_u(bw, 1, end); _end += 2;
-			for (k = _beg; k <= _end; ++k) fi[k] /= sum;
-		}
-
-		// f[2..l_query]
-		for (i = 2; i <= l_query; ++i) {
-			double[] fi = f[i], fi1 = f[i-1];
-			double sum;
-			int beg = 1, end = l_ref, x, _beg, _end;
-			byte qyi = query[qstart+i-1];
-			x = i - bw; beg = beg > x? beg : x; // band start
-			x = i + bw; end = end < x? end : x; // band end
-			for (k = beg, sum = 0.; k <= end; ++k) {
-				int u, v11, v01, v10;
-                double e = calcEpsilon(ref[k-1], qyi, _iqual[qstart+i-1]);
-				u = set_u(bw, i, k); v11 = set_u(bw, i-1, k-1); v10 = set_u(bw, i-1, k); v01 = set_u(bw, i, k-1);
-				fi[u+0] = e * (m[0] * fi1[v11+0] + m[3] * fi1[v11+1] + m[6] * fi1[v11+2]);
-				fi[u+1] = EI * (m[1] * fi1[v10+0] + m[4] * fi1[v10+1]);
-				fi[u+2] = m[2] * fi[v01+0] + m[8] * fi[v01+2];
-				sum += fi[u] + fi[u+1] + fi[u+2];
-				//System.out.println("("+i+","+k+";"+u+"): "+fi[u]+","+fi[u+1]+","+fi[u+2]);
-			}
-			// rescale
-			s[i] = sum;
-			_beg = set_u(bw, i, beg); _end = set_u(bw, i, end); _end += 2;
-			for (k = _beg, sum = 1./sum; k <= _end; ++k) fi[k] *= sum;
-		}
-		{ // f[l_query+1]
-			double sum;
-			for (k = 1, sum = 0.; k <= l_ref; ++k) {
-				int u = set_u(bw, l_query, k);
-				if (u < 3 || u >= bw2*3+3) continue;
-				sum += f[l_query][u+0] * sM + f[l_query][u+1] * sI;
-			}
-			s[l_query+1] = sum; // the last scaling factor
-		}
-
-		/*** backward ***/
-		// b[l_query] (b[l_query+1][0]=1 and thus \tilde{b}[][]=1/s[l_query+1]; this is where s[l_query+1] comes from)
-		for (k = 1; k <= l_ref; ++k) {
-			int u = set_u(bw, l_query, k);
-			double[] bi = b[l_query];
-			if (u < 3 || u >= bw2*3+3) continue;
-			bi[u+0] = sM / s[l_query] / s[l_query+1]; bi[u+1] = sI / s[l_query] / s[l_query+1];
-		}
-		// b[l_query-1..1]
-		for (i = l_query - 1; i >= 1; --i) {
-			int beg = 1, end = l_ref, x, _beg, _end;
-			double[] bi = b[i], bi1 = b[i+1];
-			double y = (i > 1)? 1. : 0.;
-			byte qyi1 = query[qstart+i];
-			x = i - bw; beg = beg > x? beg : x;
-			x = i + bw; end = end < x? end : x;
-			for (k = end; k >= beg; --k) {
-				int u, v11, v01, v10;
-				u = set_u(bw, i, k); v11 = set_u(bw, i+1, k+1); v10 = set_u(bw, i+1, k); v01 = set_u(bw, i, k+1);
-                final double e = (k >= l_ref? 0 : calcEpsilon(ref[k], qyi1, _iqual[qstart+i])) * bi1[v11];
-                bi[u+0] = e * m[0] + EI * m[1] * bi1[v10+1] + m[2] * bi[v01+2]; // bi1[v11] has been folded into e.
-				bi[u+1] = e * m[3] + EI * m[4] * bi1[v10+1];
-				bi[u+2] = (e * m[6] + m[8] * bi[v01+2]) * y;
-			}
-			// rescale
-			_beg = set_u(bw, i, beg); _end = set_u(bw, i, end); _end += 2;
-			for (k = _beg, y = 1./s[i]; k <= _end; ++k) bi[k] *= y;
-		}
-
- 		double pb;
-		{ // b[0]
-			int beg = 1, end = l_ref < bw + 1? l_ref : bw + 1;
-			double sum = 0.;
-			for (k = end; k >= beg; --k) {
-				int u = set_u(bw, 1, k);
-                double e = calcEpsilon(ref[k-1], query[qstart], _iqual[qstart]);
-                if (u < 3 || u >= bw2*3+3) continue;
-				sum += e * b[1][u+0] * bM + EI * b[1][u+1] * bI;
-			}
-			pb = b[0][set_u(bw, 0, 0)] = sum / s[0]; // if everything works as is expected, pb == 1.0
-		}
-
-        
-		/*** MAP ***/
-		for (i = 1; i <= l_query; ++i) {
-			double sum = 0., max = 0.;
-			final double[] fi = f[i], bi = b[i];
-			int beg = 1, end = l_ref, x, max_k = -1;
-			x = i - bw; beg = beg > x? beg : x;
-			x = i + bw; end = end < x? end : x;
-			for (k = beg; k <= end; ++k) {
-				final int u = set_u(bw, i, k);
-				double z;
-				sum += (z = fi[u+0] * bi[u+0]); if (z > max) { max = z; max_k = (k-1)<<2 | 0; }
-				sum += (z = fi[u+1] * bi[u+1]); if (z > max) { max = z; max_k = (k-1)<<2 | 1; }
-			}
-			max /= sum; sum *= s[i]; // if everything works as is expected, sum == 1.0
-			if (state != null) state[qstart+i-1] = max_k;
-			if (q != null) {
-				k = (int)(-4.343 * Math.log(1. - max) + .499); // = 10*log10(1-max)
-				q[qstart+i-1] = (byte)(k > 100? 99 : (k < minBaseQual ? minBaseQual : k));
-			}
-			//System.out.println("("+pb+","+sum+")"+" ("+(i-1)+","+(max_k>>2)+","+(max_k&3)+","+max+")");
-		}
-
-		return 0;
-	}
-
-    // ---------------------------------------------------------------------------------------------------------------
-    //
-    // Helper routines
-    //
-    // ---------------------------------------------------------------------------------------------------------------
-
-    /** decode the bit encoded state array values */
-    public static boolean stateIsIndel(int state) {
-        return (state & 3) != 0;
-    }
-
-    /** decode the bit encoded state array values */
-    public static int stateAlignedPosition(int state) {
-        return state >> 2;
-    }
-
-    /**
-     * helper routine for hmm_glocal
-     *
-     * @param b
-     * @param i
-     * @param k
-     * @return
-     */
-    private static int set_u(final int b, final int i, final int k) {
-		int x = i - b;
-		x = x > 0 ? x : 0;
-		return (k + 1 - x) * 3;
-	}
-
-    // ---------------------------------------------------------------------------------------------------------------
-    //
-    // Actually working with the BAQ tag now
-    //
-    // ---------------------------------------------------------------------------------------------------------------
-    
-    /**
-     * Get the BAQ attribute from the tag in read.  Returns null if no BAQ tag is present.
-     * @param read
-     * @return
-     */
-    public static byte[] getBAQTag(SAMRecord read) {
-        String s = read.getStringAttribute(BAQ_TAG);
-        return s != null ? s.getBytes() : null;
-    }
-
-    public static String encodeBQTag(SAMRecord read, byte[] baq) {
-        // Offset to base alignment quality (BAQ), of the same length as the read sequence.
-        // At the i-th read base, BAQi = Qi - (BQi - 64) where Qi is the i-th base quality.
-        // so BQi = Qi - BAQi + 64
-        byte[] bqTag = new byte[baq.length];
-        for ( int i = 0; i < bqTag.length; i++) {
-            final int bq = (int)read.getBaseQualities()[i] + 64;
-            final int baq_i = (int)baq[i];
-            final int tag = bq - baq_i;
-            // problem with the calculation of the correction factor; this is our problem
-            if ( tag < 0 )
-                throw new ReviewedGATKException("BAQ tag calculation error.  BAQ value above base quality at " + read);
-            // the original quality is too high, almost certainly due to using the wrong encoding in the BAM file
-            if ( tag > Byte.MAX_VALUE )
-                throw new UserException.MisencodedBAM(read, "we encountered an extremely high quality score (" + (int)read.getBaseQualities()[i] + ") with BAQ correction factor of " + baq_i);
-            bqTag[i] = (byte)tag;
-        }
-        return new String(bqTag);
-    }
-
-    public static void addBAQTag(SAMRecord read, byte[] baq) {
-        read.setAttribute(BAQ_TAG, encodeBQTag(read, baq));
-    }
-
-
-    /**
-      * Returns true if the read has a BAQ tag, or false otherwise
-      * @param read
-      * @return
-      */
-    public static boolean hasBAQTag(SAMRecord read) {
-        return read.getStringAttribute(BAQ_TAG) != null;
-    }
-
-    /**
-     * Returns a new qual array for read that includes the BAQ adjustment.  Does not support on-the-fly BAQ calculation
-     *
-     * @param read the SAMRecord to operate on
-     * @param overwriteOriginalQuals If true, we replace the original qualities scores in the read with their BAQ'd version
-     * @param useRawQualsIfNoBAQTag If useRawQualsIfNoBAQTag is true, then if there's no BAQ annotation we just use the raw quality scores.  Throws IllegalStateException is false and no BAQ tag is present
-     * @return
-     */
-    public static byte[] calcBAQFromTag(SAMRecord read, boolean overwriteOriginalQuals, boolean useRawQualsIfNoBAQTag) {
-        byte[] rawQuals = read.getBaseQualities();
-        byte[] newQuals = rawQuals;
-        byte[] baq = getBAQTag(read);
-
-        if ( baq != null ) {
-            // Offset to base alignment quality (BAQ), of the same length as the read sequence.
-            // At the i-th read base, BAQi = Qi - (BQi - 64) where Qi is the i-th base quality.
-            newQuals = overwriteOriginalQuals ? rawQuals : new byte[rawQuals.length];
-            for ( int i = 0; i < rawQuals.length; i++) {
-                int rawQual = (int)rawQuals[i];
-                int baq_delta = (int)baq[i] - 64;
-                int newval =  rawQual - baq_delta;
-                if ( newval < 0 )
-                    throw new UserException.MalformedBAM(read, "BAQ tag error: the BAQ value is larger than the base quality");
-                newQuals[i] = (byte)newval;
-            }
-        } else if ( ! useRawQualsIfNoBAQTag ) {
-            throw new IllegalStateException("Required BAQ tag to be present, but none was on read " + read.getReadName());
-        }
-
-        return newQuals;
-    }
-
-    /**
-     * Returns the BAQ adjusted quality score for this read at this offset.  Does not support on-the-fly BAQ calculation
-     *
-     * @param read the SAMRecord to operate on
-     * @param offset the offset of operate on
-     * @param useRawQualsIfNoBAQTag If useRawQualsIfNoBAQTag is true, then if there's no BAQ annotation we just use the raw quality scores.  Throws IllegalStateException is false and no BAQ tag is present
-     * @return
-     */
-    public static byte calcBAQFromTag(SAMRecord read, int offset, boolean useRawQualsIfNoBAQTag) {
-        byte rawQual = read.getBaseQualities()[offset];
-        byte newQual = rawQual;
-        byte[] baq = getBAQTag(read);
-
-        if ( baq != null ) {
-            // Offset to base alignment quality (BAQ), of the same length as the read sequence.
-            // At the i-th read base, BAQi = Qi - (BQi - 64) where Qi is the i-th base quality.
-            int baq_delta = (int)baq[offset] - 64;
-            int newval =  rawQual - baq_delta;
-            if ( newval < 0 )
-                throw new UserException.MalformedBAM(read, "BAQ tag error: the BAQ value is larger than the base quality");
-            newQual = (byte)newval;
-        
-        } else if ( ! useRawQualsIfNoBAQTag ) {
-            throw new IllegalStateException("Required BAQ tag to be present, but none was on read " + read.getReadName());
-        }
-
-        return newQual;
-    }
-
-    public static class BAQCalculationResult {
-        public byte[] refBases, rawQuals, readBases, bq;
-        public int[] state;
-
-        public BAQCalculationResult(SAMRecord read, byte[] ref) {
-            this(read.getBaseQualities(), read.getReadBases(), ref);
-        }
-
-        public BAQCalculationResult(byte[] bases, byte[] quals, byte[] ref) {
-            // prepares data for calculation
-            rawQuals = quals;
-            readBases = bases;
-
-            // now actually prepare the data structures, and fire up the hmm
-            bq = new byte[rawQuals.length];
-            state = new int[rawQuals.length];
-            this.refBases = ref;
-        }
-    }
-
-     public BAQCalculationResult calcBAQFromHMM(SAMRecord read, IndexedFastaSequenceFile refReader) {
-        // start is alignment start - band width / 2 - size of first I element, if there is one.  Stop is similar
-        int offset = getBandWidth() / 2;
-        long readStart = includeClippedBases ? read.getUnclippedStart() : read.getAlignmentStart();
-        long start = Math.max(readStart - offset - ReadUtils.getFirstInsertionOffset(read), 0);
-        long stop = (includeClippedBases ? read.getUnclippedEnd() : read.getAlignmentEnd()) + offset + ReadUtils.getLastInsertionOffset(read);
-
-        if ( stop > refReader.getSequenceDictionary().getSequence(read.getReferenceName()).getSequenceLength() ) {
-            return null;
-        } else {
-            // now that we have the start and stop, get the reference sequence covering it
-            ReferenceSequence refSeq = refReader.getSubsequenceAt(read.getReferenceName(), start, stop);
-            return calcBAQFromHMM(read, refSeq.getBases(), (int)(start - readStart));
-        }
-    }
-
-//    final SimpleTimer total = new SimpleTimer();
-//    final SimpleTimer local = new SimpleTimer();
-//    int n = 0;
-    public BAQCalculationResult calcBAQFromHMM(byte[] ref, byte[] query, byte[] quals, int queryStart, int queryEnd ) {
-//        total.restart();
-        if ( queryStart < 0 ) throw new ReviewedGATKException("BUG: queryStart < 0: " + queryStart);
-        if ( queryEnd < 0 ) throw new ReviewedGATKException("BUG: queryEnd < 0: " + queryEnd);
-        if ( queryEnd < queryStart ) throw new ReviewedGATKException("BUG: queryStart < queryEnd : " + queryStart + " end =" + queryEnd);
-
-        // note -- assumes ref is offset from the *CLIPPED* start
-        BAQCalculationResult baqResult = new BAQCalculationResult(query, quals, ref);
-        int queryLen = queryEnd - queryStart;
-//        local.restart();
-        hmm_glocal(baqResult.refBases, baqResult.readBases, queryStart, queryLen, baqResult.rawQuals, baqResult.state, baqResult.bq);
-//        local.stop();
-//        total.stop();
-//        if ( n++ % 100000 == 0 )
-//            logger.info("n = " + n + ": Total " + total.getElapsedTimeNano() + " local " + local.getElapsedTimeNano());
-        return baqResult;
-    }
-
-
-    /**
-     * Determine the appropriate start and stop offsets in the reads for the bases given the cigar string
-     * @param read
-     * @return
-     */
-    private final Pair<Integer,Integer> calculateQueryRange(SAMRecord read) {
-        int queryStart = -1, queryStop = -1;
-        int readI = 0;
-
-        // iterate over the cigar elements to determine the start and stop of the read bases for the BAQ calculation
-        for ( CigarElement elt : read.getCigar().getCigarElements() ) {
-            switch (elt.getOperator()) {
-                case N:  return null; // cannot handle these
-                case H : case P : case D: break; // ignore pads, hard clips, and deletions
-                case I : case S: case M: case EQ: case X:
-                    int prev = readI;
-                    readI += elt.getLength();
-                    if ( includeClippedBases || elt.getOperator() != CigarOperator.S) {
-                        if ( queryStart == -1 )
-                            queryStart = prev;
-                        queryStop = readI;
-                    }
-                    // in the else case we aren't including soft clipped bases, so we don't update
-                    // queryStart or queryStop
-                    break;
-                default: throw new ReviewedGATKException("BUG: Unexpected CIGAR element " + elt + " in read " + read.getReadName());
-            }
-        }
-
-        if ( queryStop == queryStart ) {
-            // this read is completely clipped away, and yet is present in the file for some reason
-            // usually they are flagged as non-PF, but it's possible to push them through the BAM
-            //System.err.printf("WARNING -- read is completely clipped away: " + read.format());
-            return null;
-        }
-
-        return new Pair<Integer, Integer>(queryStart, queryStop);
-    }
-
-    // we need to pad ref by at least the bandwidth / 2 on either side
-    public BAQCalculationResult calcBAQFromHMM(SAMRecord read, byte[] ref, int refOffset) {
-        // todo -- need to handle the case where the cigar sum of lengths doesn't cover the whole read
-        Pair<Integer, Integer> queryRange = calculateQueryRange(read);
-        if ( queryRange == null ) return null; // read has Ns, or is completely clipped away
-
-        int queryStart = queryRange.getFirst();
-        int queryEnd = queryRange.getSecond();
-
-        BAQCalculationResult baqResult = calcBAQFromHMM(ref, read.getReadBases(), read.getBaseQualities(), queryStart, queryEnd);
-
-        // cap quals
-        int readI = 0, refI = 0;
-        for ( CigarElement elt : read.getCigar().getCigarElements() ) {
-            int l = elt.getLength();
-            switch (elt.getOperator()) {
-                case N: // cannot handle these
-                    return null;
-                case H : case P : // ignore pads and hard clips
-                    break;
-                case S : refI += l; // move the reference too, in addition to I
-                case I :
-                    // todo -- is it really the case that we want to treat I and S the same?
-                    for ( int i = readI; i < readI + l; i++ ) baqResult.bq[i] = baqResult.rawQuals[i];
-                    readI += l;
-                    break;
-                case D : refI += l; break;
-                case M :
-                    for (int i = readI; i < readI + l; i++) {
-                        int expectedPos = refI - refOffset + (i - readI);
-                        baqResult.bq[i] = capBaseByBAQ( baqResult.rawQuals[i], baqResult.bq[i], baqResult.state[i], expectedPos );
-                    }
-                    readI += l; refI += l;
-                    break;
-                default:
-                    throw new ReviewedGATKException("BUG: Unexpected CIGAR element " + elt + " in read " + read.getReadName());
-            }
-        }
-        if ( readI != read.getReadLength() ) // odd cigar string
-            System.arraycopy(baqResult.rawQuals, 0, baqResult.bq, 0, baqResult.bq.length);
-
-        return baqResult;
-    }
-
-    public byte capBaseByBAQ( byte oq, byte bq, int state, int expectedPos ) {
-        byte b;
-        boolean isIndel = stateIsIndel(state);
-        int pos = stateAlignedPosition(state);
-        if ( isIndel || pos != expectedPos ) // we are an indel or we don't align to our best current position
-            b = minBaseQual; // just take b = minBaseQuality
-        else
-            b = bq < oq ? bq : oq;
-
-        return b;
-    }
-
-    /**
-     * Modifies read in place so that the base quality scores are capped by the BAQ calculation.  Uses the BAQ
-     * tag if present already and alwaysRecalculate is false, otherwise fires up the HMM and does the BAQ on the fly
-     * using the refReader to obtain the reference bases as needed.
-     * 
-     * @param read
-     * @param refReader
-     * @param calculationType
-     * @return BQ qualities for use, in case qmode is DONT_MODIFY
-     */
-    public byte[] baqRead(SAMRecord read, IndexedFastaSequenceFile refReader, CalculationMode calculationType, QualityMode qmode ) {
-        if ( DEBUG ) System.out.printf("BAQ %s read %s%n", calculationType, read.getReadName());
-
-        byte[] BAQQuals = read.getBaseQualities();      // in general we are overwriting quals, so just get a pointer to them
-        if ( calculationType == CalculationMode.OFF) { // we don't want to do anything
-            ; // just fall though
-        } else if ( excludeReadFromBAQ(read) ) {
-            ; // just fall through
-        } else {
-            final boolean readHasBAQTag = hasBAQTag(read);
-
-            if ( calculationType == CalculationMode.RECALCULATE || ! readHasBAQTag ) {
-                if ( DEBUG ) System.out.printf("  Calculating BAQ on the fly%n");
-                BAQCalculationResult hmmResult = calcBAQFromHMM(read, refReader);
-                if ( hmmResult != null ) {
-                    switch ( qmode ) {
-                        case ADD_TAG:         addBAQTag(read, hmmResult.bq); break;
-                        case OVERWRITE_QUALS: System.arraycopy(hmmResult.bq, 0, read.getBaseQualities(), 0, hmmResult.bq.length); break;
-                        case DONT_MODIFY:     BAQQuals = hmmResult.bq; break;
-                        default:              throw new ReviewedGATKException("BUG: unexpected qmode " + qmode);
-                    }
-                } else if ( readHasBAQTag ) {
-                    // remove the BAQ tag if it's there because we cannot trust it
-                    read.setAttribute(BAQ_TAG, null);
-                }
-            } else if ( qmode == QualityMode.OVERWRITE_QUALS ) { // only makes sense if we are overwriting quals
-                if ( DEBUG ) System.out.printf("  Taking BAQ from tag%n");
-                // this overwrites the original qualities
-                calcBAQFromTag(read, true, false);
-            }
-        }
-
-        return BAQQuals;
-    }
-
-    /**
-     * Returns true if we don't think this read is eligible for the BAQ calculation.  Examples include non-PF reads,
-     * duplicates, or unmapped reads.  Used by baqRead to determine if a read should fall through the calculation.
-     *
-     * @param read
-     * @return
-     */
-    public boolean excludeReadFromBAQ(SAMRecord read) {
-        // keeping mapped reads, regardless of pairing status, or primary alignment status.
-        return read.getReadUnmappedFlag() || read.getReadFailsVendorQualityCheckFlag() || read.getDuplicateReadFlag();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/baq/BAQReadTransformer.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/baq/BAQReadTransformer.java
deleted file mode 100644
index c9192e1..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/baq/BAQReadTransformer.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.baq;
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.WalkerManager;
-import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
-import org.broadinstitute.gatk.engine.walkers.BAQMode;
-import org.broadinstitute.gatk.engine.walkers.Walker;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-
-/**
- * Applies Heng's BAQ calculation to a stream of incoming reads
- */
-public class BAQReadTransformer extends ReadTransformer {
-    private BAQ baqHMM;
-    private IndexedFastaSequenceFile refReader;
-    private BAQ.CalculationMode cmode;
-    private BAQ.QualityMode qmode;
-
-    @Override
-    public ApplicationTime initializeSub(final GenomeAnalysisEngine engine, final Walker walker) {
-        final BAQMode mode = WalkerManager.getWalkerAnnotation(walker, BAQMode.class);
-        this.refReader = engine.getReferenceDataSource().getReference();
-        this.cmode = engine.getArguments().BAQMode;
-        this.qmode = mode.QualityMode();
-        baqHMM = new BAQ(engine.getArguments().BAQGOP);
-
-        if ( qmode == BAQ.QualityMode.DONT_MODIFY )
-            throw new ReviewedGATKException("BUG: shouldn't create BAQ transformer with quality mode DONT_MODIFY");
-
-        if ( mode.ApplicationTime() == ReadTransformer.ApplicationTime.FORBIDDEN && enabled() )
-            throw new UserException.BadArgumentValue("baq", "Walker cannot accept BAQ'd base qualities, and yet BAQ mode " + cmode + " was requested.");
-
-        return mode.ApplicationTime();
-    }
-
-    @Override
-    public boolean enabled() {
-        return cmode != BAQ.CalculationMode.OFF;
-    }
-
-    @Override
-    public GATKSAMRecord apply(final GATKSAMRecord read) {
-        baqHMM.baqRead(read, refReader, cmode, qmode);
-        return read;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/baq/ReadTransformingIterator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/baq/ReadTransformingIterator.java
deleted file mode 100644
index 18ca02f..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/baq/ReadTransformingIterator.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.baq;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
-import org.broadinstitute.gatk.engine.iterators.GATKSAMIterator;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-
-import java.util.Iterator;
-
-/**
- * Iterator that applies a ReadTransformer to a stream of reads
- */
-public class ReadTransformingIterator implements GATKSAMIterator {
-    private final GATKSAMIterator it;
-    private final ReadTransformer transformer;
-
-    /**
-     * Creates a new ReadTransforming iterator
-     */
-    @Requires({"it != null", "transformer != null", "transformer.isInitialized()"})
-    public ReadTransformingIterator(final GATKSAMIterator it, final ReadTransformer transformer) {
-        if ( ! transformer.isInitialized() )
-            throw new IllegalStateException("Creating a read transformer stream for an uninitialized read transformer: " + transformer);
-        if ( transformer.getApplicationTime() == ReadTransformer.ApplicationTime.FORBIDDEN )
-            throw new IllegalStateException("Creating a read transformer stream for a forbidden transformer " + transformer);
-
-        this.it = it;
-        this.transformer = transformer;
-    }
-
-    @Requires("hasNext()")
-    @Ensures("result != null")
-    public SAMRecord next()     {
-        final GATKSAMRecord read = (GATKSAMRecord)it.next();
-        return transformer.apply(read);
-    }
-
-    public boolean hasNext()    { return this.it.hasNext(); }
-    public void remove()        { throw new UnsupportedOperationException("Can not remove records from a SAM file via an iterator!"); }
-    public void close()         { it.close(); }
-    public Iterator<SAMRecord> iterator() { return this; }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/classloader/JVMUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/classloader/JVMUtils.java
deleted file mode 100644
index d695543..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/classloader/JVMUtils.java
+++ /dev/null
@@ -1,309 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.classloader;
-
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.GATKException;
-import org.reflections.util.ClasspathHelper;
-
-import java.io.File;
-import java.io.IOException;
-import java.lang.reflect.*;
-import java.net.URL;
-import java.util.*;
-
-/**
- * Created by IntelliJ IDEA.
- * User: hanna
- * Date: Mar 30, 2009
- * Time: 5:38:05 PM
- *
- * A set of static utility methods for determining information about this runtime environment.
- * Introspects classes, loads jars, etc.
- */
-public class JVMUtils {
-    /**
-     * Constructor access disallowed...static utility methods only!
-     */
-    private JVMUtils() { }
-
-    /**
-     * Determines which location contains the specified class.
-     * @param clazz The specified class.
-     * @return Location (either jar file or directory) of path containing class.
-     * @throws IOException when the URI cannot be found.
-     */
-    public static File getLocationFor( Class clazz ) throws IOException {
-        try {
-            java.net.URI locationURI = clazz.getProtectionDomain().getCodeSource().getLocation().toURI();
-            return new File(locationURI);
-        }
-        catch (java.net.URISyntaxException ex) {
-            // a URISyntaxException here must be an IO error; wrap as such.
-            throw new IOException(ex);
-        }
-        catch ( NullPointerException ne ) {
-        	throw new IOException("Can not extract code source location for "+clazz.getName());
-        }
-    }    
-
-    /**
-     * Is the specified class a concrete implementation of baseClass?
-     * @param clazz Class to check.
-     * @return True if clazz is concrete.  False otherwise.
-     */
-    public static boolean isConcrete( Class clazz ) {
-        return !Modifier.isAbstract(clazz.getModifiers()) &&
-               !Modifier.isInterface(clazz.getModifiers());
-    }
-
-    /**
-     * Is the specified class anonymous?  The plugin manager (for one) generally requires that
-     * plugin classes be named so that they can easily be specified from the command line.
-     * @param clazz The class on which to perform the anonymous check.
-     * @return True if the class is anonymous; false otherwise.
-     */
-    public static boolean isAnonymous(Class clazz) {
-        return clazz.isAnonymousClass();
-    }
-
-    /**
-     * Retrieve all fields available in this object, regardless of where they are declared or
-     * whether they're accessible.
-     * @param type Type to inspect for fields.
-     * @return A list of all available fields.
-     */
-    public static List<Field> getAllFields(Class type) {
-        List<Field> allFields = new ArrayList<Field>();
-        while( type != null ) {
-            allFields.addAll(Arrays.asList(type.getDeclaredFields()));
-            type = type.getSuperclass();
-        }
-        return allFields;
-    }
-
-    /**
-     * Find the field with the given name in the class.  Will inspect all fields, independent
-     * of access level.
-     * @param type Class in which to search for the given field.
-     * @param fieldName Name of the field for which to search.
-     * @return The field, or null if no such field exists.
-     */
-    public static Field findField( Class type, String fieldName ) {
-        while( type != null ) {
-            Field[] fields = type.getDeclaredFields();
-            for( Field field: fields ) {
-                if( field.getName().equals(fieldName) )
-                    return field;
-            }
-            type = type.getSuperclass();
-        }
-        return null;
-    }
-
-    /**
-     * Sets the provided field in the given instance to the given value.  Circumvents access restrictions:
-     * a field can be private and still set successfully by this function.
-     * @param field Field to set in the given object.
-     * @param instance Instance in which to set the field.
-     * @param value The value to which to set the given field in the given instance.
-     */
-    public static void setFieldValue( Field field, Object instance, Object value ) {
-        try {
-            field.setAccessible(true);
-            field.set(instance, value);
-        }
-        catch( IllegalAccessException ex ) {
-            throw new ReviewedGATKException(String.format("Could not set %s in instance %s to %s",field.getName(),instance.getClass().getName(),value.toString()));
-        }
-    }
-
-    /**
-     * Gets the value stored in the provided field in the given instance.
-     * @param field Field to set in the given object.
-     * @param instance Instance in which to set the field.
-     * @return Value currently stored in the given field.
-     */
-    public static Object getFieldValue( Field field, Object instance ) {
-        try {
-            field.setAccessible(true);
-            return field.get(instance);
-        }
-        catch( IllegalAccessException ex ) {
-            throw new ReviewedGATKException(String.format("Could not retrieve %s in instance %s",field.getName(),instance.getClass().getName()));
-        }
-    }
-
-    /**
-     * Gets a single object in the list matching or type-compatible with the given type.  Exceptions out if multiple objects match. 
-     * @param objectsToFilter objects to filter.
-     * @param type The desired type.
-     * @param <T> The selected type.
-     * @return A collection of the given arguments with the specified type.
-     */
-    public static <T> T getObjectOfType(Collection<Object> objectsToFilter, Class<T> type) {
-        // TODO: Make JVM utils.
-        Collection<T> selectedObjects = getObjectsOfType(objectsToFilter,type);
-        if(selectedObjects.size() > 1)
-            throw new ReviewedGATKException("User asked for a single instance of the type, multiple were present");
-        if(selectedObjects.size() == 0)
-            throw new ReviewedGATKException("User asked for a single instance of the type, but none were present");
-        return selectedObjects.iterator().next();
-    }
-
-    /**
-     * Gets a collection of all objects in the list matching or type-compatible with the given type.
-     * @param objectsToFilter objects to filter.
-     * @param type The desired type.
-     * @param <T> Again, the desired type.  Used so that clients can ignore type safety.
-     * @return A collection of the given arguments with the specified type.
-     */
-    @SuppressWarnings("unchecked")
-    public static <T> Collection<T> getObjectsOfType(Collection<Object> objectsToFilter, Class<T> type) {
-        Collection<T> selectedObjects = new ArrayList<T>();
-        for(Object object: objectsToFilter) {
-            if(type.isAssignableFrom(object.getClass()))
-                selectedObjects.add((T)object);
-        }
-        return selectedObjects;
-    }
-
-    /**
-     * Returns the list of class path urls.
-     * @return the list of class path urls.
-     */
-    public static Set<URL> getClasspathURLs() {
-        return ClasspathHelper.forManifest();
-    }
-
-    /**
-     * Adds all the generic types from a class definition to the collection.
-     * Does not inspect the methods or fields, only the class.
-     * @param classes Set to collect the classes.
-     * @param type Type to inspect.
-     */
-    public static void addGenericTypes(Set<Class<?>> classes, Type type) {
-        if (type instanceof ParameterizedType) {
-            ParameterizedType parameterizedType = (ParameterizedType)type;
-            for (Type actualType: parameterizedType.getActualTypeArguments())
-                addGenericTypes(classes, actualType);
-        } else if (type instanceof GenericArrayType) {
-            addGenericTypes(classes, ((GenericArrayType)type).getGenericComponentType());
-        } else if (type instanceof WildcardType) {
-            WildcardType wildcardType = (WildcardType)type;
-            for (Type upperType: wildcardType.getUpperBounds())
-                addGenericTypes(classes, upperType);
-            for (Type lowerType: wildcardType.getLowerBounds())
-                addGenericTypes(classes, lowerType);
-        } else if (type instanceof Class<?>) {
-            classes.add((Class<?>) type);
-        } else {
-            throw new GATKException("Unknown type: " + type + " (" + type.getClass().getName() + ")");
-        }
-    }
-
-    public static Class getParameterizedTypeClass(Type t) {
-        if ( t instanceof ParameterizedType ) {
-            ParameterizedType parameterizedType = (ParameterizedType)t;
-            if ( parameterizedType.getActualTypeArguments().length != 1 )
-                throw new ReviewedGATKException("BUG: more than 1 generic type found on class" + t);
-            return (Class)parameterizedType.getActualTypeArguments()[0];
-        } else
-            throw new ReviewedGATKException("BUG: could not find generic type on class " + t);
-    }
-
-    /**
-     * Returns a comma-separated list of the names of the interfaces implemented by this class
-     *
-     * @param covClass class
-     * @return names of interfaces
-     */
-    public static String classInterfaces(final Class covClass) {
-        final List<String> interfaces = new ArrayList<String>();
-        for ( final Class interfaceClass : covClass.getInterfaces() )
-            interfaces.add(interfaceClass.getSimpleName());
-        return Utils.join(", ", interfaces);
-    }
-
-    /**
-     * Returns the Class that invoked the specified "callee" class by examining the runtime stack.
-     * The calling class is defined as the first class below the callee class on the stack.
-     *
-     * For example, given callee == MyClass and the following runtime stack:
-     *
-     * JVMUtils.getCallingClass(MyClass) <-- top
-     * MyClass.foo()
-     * MyClass.bar()
-     * OtherClass.foo()
-     * OtherClass.bar()
-     * etc.
-     *
-     * this method would return OtherClass, since its methods invoked the methods in MyClass.
-     *
-     * Considers only the occurrence of the callee class on the stack that is closest to the top
-     * (even if there are multiple, non-contiguous occurrences).
-     *
-     * @param callee Class object for the class whose calling class we want to locate
-     * @return Class object for the class that invoked the callee class, or null if
-     *         no calling class was found
-     * @throws IllegalArgumentException if the callee class is not found on the runtime stack
-     * @throws IllegalStateException if we get an error while trying to load the Class object for the calling
-     *                               class reported on the runtime stack
-     */
-    public static Class getCallingClass( final Class callee ) {
-        final StackTraceElement[] stackTrace = new Throwable().getStackTrace();
-        final String calleeClassName = callee.getName();
-
-        // Start examining the stack at the second-from-the-top position, to remove
-        // this method call (ie., the call to getCallingClass() itself) from consideration.
-        int stackTraceIndex = 1;
-
-        // Find the first occurrence of the callee on the runtime stack. Need to use String comparison
-        // unfortunately, due to limitations of the StackTraceElement class.
-        while ( stackTraceIndex < stackTrace.length && ! stackTrace[stackTraceIndex].getClassName().equals(calleeClassName) ) {
-            stackTraceIndex++;
-        }
-
-        // Make sure we actually found the callee class on the stack
-        if ( stackTraceIndex == stackTrace.length ) {
-            throw new IllegalArgumentException(String.format("Specified callee %s is not present on the call stack", callee.getSimpleName()));
-        }
-
-        // Now find the caller class, which will be the class below the callee on the stack
-        while ( stackTraceIndex < stackTrace.length && stackTrace[stackTraceIndex].getClassName().equals(calleeClassName) ) {
-            stackTraceIndex++;
-        }
-
-        try {
-            return stackTraceIndex < stackTrace.length ? Class.forName(stackTrace[stackTraceIndex].getClassName()) : null;
-        }
-        catch ( ClassNotFoundException e ) {
-            throw new IllegalStateException(String.format("Could not find caller class %s from the runtime stack in the classpath",
-                                                          stackTrace[stackTraceIndex].getClassName()));
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/classloader/PluginManager.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/classloader/PluginManager.java
deleted file mode 100644
index 7313e19..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/classloader/PluginManager.java
+++ /dev/null
@@ -1,355 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.classloader;
-
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.engine.WalkerManager;
-import org.broadinstitute.gatk.engine.filters.FilterManager;
-import org.broadinstitute.gatk.utils.exceptions.DynamicClassResolutionException;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.reflections.Reflections;
-import org.reflections.scanners.SubTypesScanner;
-import org.reflections.util.ConfigurationBuilder;
-
-import java.io.File;
-import java.lang.reflect.Constructor;
-import java.lang.reflect.Method;
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.net.URLClassLoader;
-import java.util.*;
-
-/**
- * Manage plugins and plugin configuration.
- * @author mhanna
- * @version 0.1
- */
-public class PluginManager<PluginType> {
-    /**
-     * A reference into our introspection utility.
-     */
-    private static final Reflections defaultReflections;
-
-    static {
-        // turn off logging in the reflections library - they talk too much
-        Reflections.log = null;
-
-        Set<URL> classPathUrls = new LinkedHashSet<URL>();
-
-        URL cwd;
-        try {
-            cwd = new File(".").getAbsoluteFile().toURI().toURL();
-        } catch (MalformedURLException e) {
-            throw new RuntimeException(e);
-        }
-
-        // NOTE: Reflections also scans directories for classes.
-        // Meanwhile some of the jar MANIFEST.MF Bundle-ClassPath properties contain "."
-        // Do NOT let reflections scan the CWD where it often picks up test classes when
-        // they weren't explicitly in the classpath, for example the UninstantiableWalker
-        for (URL url: JVMUtils.getClasspathURLs())
-            if (!url.equals(cwd))
-                classPathUrls.add(url);
-
-        defaultReflections = new Reflections( new ConfigurationBuilder()
-            .setUrls(classPathUrls)
-            .setScanners(new SubTypesScanner()));
-    }
-
-    /**
-     * Defines the category of plugin defined by the subclass.
-     */
-    protected final String pluginCategory;
-
-    /**
-     * Define common strings to trim off the end of the name.
-     */
-    protected final String pluginSuffix;
-    
-    /**
-     * Plugins stored based on their name.
-     */
-    private final SortedMap<String, Class<? extends PluginType>> pluginsByName;
-
-    private final List<Class<? extends PluginType>> plugins;
-    private final List<Class<? extends PluginType>> interfaces;
-
-    /**
-     * Create a new plugin manager.
-     * @param pluginType Core type for a plugin.
-     */
-    public PluginManager(Class pluginType) {
-        this(pluginType, pluginType.getSimpleName().toLowerCase(), pluginType.getSimpleName(), null);
-    }
-
-    /**
-     * Create a new plugin manager.
-     * @param pluginType Core type for a plugin.
-     * @param classpath Custom class path to search for classes.
-     */
-    public PluginManager(Class pluginType, List<URL> classpath) {
-        this(pluginType, pluginType.getSimpleName().toLowerCase(), pluginType.getSimpleName(), classpath);
-    }
-
-    /**
-     * Create a new plugin manager.
-     * @param pluginType Core type for a plugin.
-     * @param pluginCategory Provides a category name to the plugin.  Must not be null.
-     * @param pluginSuffix Provides a suffix that will be trimmed off when converting to a plugin name.  Can be null.
-     */
-    public PluginManager(Class pluginType, String pluginCategory, String pluginSuffix) {
-        this(pluginType, pluginCategory, pluginSuffix, null);
-    }
-
-    /**
-     * Create a new plugin manager.
-     * @param pluginType Core type for a plugin.
-     * @param pluginCategory Provides a category name to the plugin.  Must not be null.
-     * @param pluginSuffix Provides a suffix that will be trimmed off when converting to a plugin name.  Can be null.
-     * @param classpath Custom class path to search for classes.
-     */
-    public PluginManager(Class pluginType, String pluginCategory, String pluginSuffix, List<URL> classpath) {
-        this.pluginCategory = pluginCategory;
-        this.pluginSuffix = pluginSuffix;
-
-        this.plugins = new ArrayList<Class<? extends PluginType>>();
-        this.interfaces = new ArrayList<Class<? extends PluginType>>();
-
-        Reflections reflections;
-        if (classpath == null) {
-            reflections = defaultReflections;
-        } else {
-            addClasspath(classpath);
-            reflections = new Reflections( new ConfigurationBuilder()
-                .setUrls(classpath)
-                .setScanners(new SubTypesScanner()));
-        }
-
-        // Load all classes types filtering them by concrete.
-        @SuppressWarnings("unchecked")
-        Set<Class<? extends PluginType>> allTypes = reflections.getSubTypesOf(pluginType);
-        for( Class<? extends PluginType> type: allTypes ) {
-            // The plugin manager does not support anonymous classes; to be a plugin, a class must have a name.
-            if(JVMUtils.isAnonymous(type))
-                continue;
-
-            if( JVMUtils.isConcrete(type) )
-                plugins.add(type);
-            else
-                interfaces.add(type);
-        }
-
-        pluginsByName = new TreeMap<String, Class<? extends PluginType>>();
-        for (Class<? extends PluginType> pluginClass : plugins) {
-            String pluginName = getName(pluginClass);
-            pluginsByName.put(pluginName, pluginClass);
-        }
-
-        // sort the plugins so the order of elements is deterministic
-        sortPlugins(plugins);
-        sortPlugins(interfaces);
-    }
-
-    /**
-     * Sorts, in place, the list of plugins according to getName() on each element
-     *
-     * @param unsortedPlugins unsorted plugins
-     */
-    private void sortPlugins(final List<Class<? extends PluginType>> unsortedPlugins) {
-        Collections.sort(unsortedPlugins, new ComparePluginsByName());
-    }
-
-    private final class ComparePluginsByName implements Comparator<Class<? extends PluginType>> {
-        @Override
-        public int compare(final Class<? extends PluginType> aClass, final Class<? extends PluginType> aClass1) {
-            String pluginName1 = getName(aClass);
-            String pluginName2 = getName(aClass1);
-            return pluginName1.compareTo(pluginName2);
-        }
-    }
-
-    /**
-     * Adds the URL to the system class loader classpath using reflection.
-     * HACK: Uses reflection to modify the class path, and assumes loader is a URLClassLoader.
-     * @param urls URLs to add to the system class loader classpath.
-     */
-    private static void addClasspath(List<URL> urls) {
-      Set<URL> existing = JVMUtils.getClasspathURLs();
-      for (URL url : urls) {
-          if (existing.contains(url))
-            continue;
-          try {
-              Method method = URLClassLoader.class.getDeclaredMethod("addURL", URL.class);
-              if (!method.isAccessible())
-                  method.setAccessible(true);
-              method.invoke(ClassLoader.getSystemClassLoader(), url);
-          } catch (Exception e) {
-              throw new ReviewedGATKException("Error adding url to the current classloader.", e);
-          }
-      }
-    }
-    
-    public Map<String, Class<? extends PluginType>> getPluginsByName() {
-        return Collections.unmodifiableMap(pluginsByName);
-    }
-
-    /**
-     * Does a plugin with the given name exist?
-     *
-     * @param pluginName Name of the plugin for which to search.
-     * @return True if the plugin exists, false otherwise.
-     */
-    public boolean exists(String pluginName) {
-        return pluginsByName.containsKey(pluginName);
-    }
-
-    /**
-     * Does a plugin with the given name exist?
-     *
-     * @param plugin Name of the plugin for which to search.
-     * @return True if the plugin exists, false otherwise.
-     */
-    public boolean exists(Class<? extends PluginType> plugin) {
-        return pluginsByName.containsValue(plugin);
-    }
-
-    /**
-     * Returns the plugin classes
-     * @return the plugin classes
-     */
-    public List<Class<? extends PluginType>> getPlugins() {
-        return plugins;
-    }
-
-    /**
-     * Returns the interface classes
-     * @return the interface classes
-     */
-    public List<Class<? extends PluginType>> getInterfaces() {
-        return interfaces;
-    }
-
-    /**
-     * Returns the plugin classes implementing interface or base clase
-     * @param type type of interface or base class
-     * @return the plugin classes implementing interface or base class
-     */
-    public List<Class<? extends PluginType>> getPluginsImplementing(Class<?> type) {
-        List<Class<? extends PluginType>> implementing = new ArrayList<Class<? extends PluginType>>();
-        for (Class<? extends PluginType> plugin: getPlugins())
-            if (type.isAssignableFrom(plugin))
-                implementing.add(plugin);
-        return implementing;
-    }
-
-
-
-    /**
-     * Gets a plugin with the given name
-     *
-     * @param pluginName Name of the plugin to retrieve.
-     * @return The plugin object if found; null otherwise.
-     */
-    public PluginType createByName(String pluginName) {
-        Class<? extends PluginType> plugin = pluginsByName.get(pluginName);
-        if( plugin == null ) {
-            String errorMessage = formatErrorMessage(pluginCategory,pluginName);
-            if ( this.getClass().isAssignableFrom(FilterManager.class) ) {
-                throw new UserException.MalformedReadFilterException(errorMessage);
-            } else if ( this.getClass().isAssignableFrom(WalkerManager.class) ) {
-                throw new UserException.MalformedWalkerArgumentsException(errorMessage);
-            } else {
-                throw new UserException.CommandLineException(errorMessage);
-            }
-        }
-        try {
-            return plugin.newInstance();
-        } catch (Exception e) {
-            throw new DynamicClassResolutionException(plugin, e);
-        }
-    }
-
-    /**
-     * create a plugin with the given type
-     *
-     * @param pluginType type of the plugin to create.
-     * @return The plugin object if created; null otherwise.
-     */
-    public PluginType createByType(Class<? extends PluginType> pluginType) {
-        Logger logger = Logger.getLogger(PluginManager.class);
-        logger.setLevel(Level.ERROR);
-        try {
-            Constructor<? extends PluginType> noArgsConstructor = pluginType.getDeclaredConstructor((Class[])null);
-            noArgsConstructor.setAccessible(true);
-            return noArgsConstructor.newInstance();
-        } catch (Exception e) {
-            logger.error("Couldn't initialize the plugin. Typically this is because of wrong global class variable initializations.");
-            throw new DynamicClassResolutionException(pluginType, e);
-        }
-    }
-
-    /**
-     * Returns concrete instances of the plugins
-     * @return concrete instances of the plugins
-     */
-    public List<PluginType> createAllTypes() {
-        List<PluginType> instances = new ArrayList<PluginType>();
-        for ( Class<? extends PluginType> c : getPlugins() ) {
-            instances.add(createByType(c));
-        }
-        return instances;
-    }
-
-    /**
-     * Create a name for this type of plugin.
-     *
-     * @param pluginType The type of plugin.
-     * @return A name for this type of plugin.
-     */
-    public String getName(Class pluginType) {
-        String pluginName = "";
-
-        if (pluginName.length() == 0) {
-            pluginName = pluginType.getSimpleName();
-            if (pluginSuffix != null && pluginName.endsWith(pluginSuffix))
-                pluginName = pluginName.substring(0, pluginName.lastIndexOf(pluginSuffix));
-        }
-
-        return pluginName;
-    }
-
-    /**
-     * Generate the error message for the plugin manager. The message is allowed to depend on the class.
-     * @param pluginCategory - string, the category of the plugin (e.g. read filter)
-     * @param pluginName - string, what we were trying to match (but failed to)
-     * @return error message text describing the error
-     */
-    protected String formatErrorMessage(String pluginCategory, String pluginName ) {
-        return String.format("Could not find %s with name: %s", pluginCategory,pluginName);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/classloader/ProtectedPackageSource.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/classloader/ProtectedPackageSource.java
deleted file mode 100644
index 7c7a776..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/classloader/ProtectedPackageSource.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.classloader;
-
-public interface ProtectedPackageSource {}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/classloader/PublicPackageSource.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/classloader/PublicPackageSource.java
deleted file mode 100644
index 5321466..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/classloader/PublicPackageSource.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.classloader;
-
-public interface PublicPackageSource {}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/clipping/ClippingOp.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/clipping/ClippingOp.java
deleted file mode 100644
index f4ca70e..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/clipping/ClippingOp.java
+++ /dev/null
@@ -1,617 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.clipping;
-
-import com.google.java.contract.Requires;
-import htsjdk.samtools.Cigar;
-import htsjdk.samtools.CigarElement;
-import htsjdk.samtools.CigarOperator;
-import org.broadinstitute.gatk.utils.recalibration.EventType;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-
-import java.util.Iterator;
-import java.util.List;
-import java.util.Stack;
-import java.util.Vector;
-
-/**
- * Represents a clip on a read.  It has a type (see the enum) along with a start and stop in the bases
- * of the read, plus an option extraInfo (useful for carrying info where needed).
- * <p/>
- * Also holds the critical apply function that actually execute the clipping operation on a provided read,
- * according to the wishes of the supplied ClippingAlgorithm enum.
- */
-public class ClippingOp {
-    public final int start, stop; // inclusive
-
-    public ClippingOp(int start, int stop) {
-        this.start = start;
-        this.stop = stop;
-    }
-
-
-    public int getLength() {
-        return stop - start + 1;
-    }
-
-    /**
-     * Clips the bases in read according to this operation's start and stop.  Uses the clipping
-     * representation used is the one provided by algorithm argument.
-     *
-     * @param algorithm    clipping algorithm to use
-     * @param originalRead the read to be clipped
-     */
-    public GATKSAMRecord apply(ClippingRepresentation algorithm, GATKSAMRecord originalRead) {
-        GATKSAMRecord read = (GATKSAMRecord) originalRead.clone();
-        byte[] quals = read.getBaseQualities();
-        byte[] bases = read.getReadBases();
-        byte[] newBases = new byte[bases.length];
-        byte[] newQuals = new byte[quals.length];
-
-        switch (algorithm) {
-            // important note:
-            //   it's not safe to call read.getReadBases()[i] = 'N' or read.getBaseQualities()[i] = 0
-            //   because you're not guaranteed to get a pointer to the actual array of bytes in the GATKSAMRecord
-            case WRITE_NS:
-                for (int i = 0; i < bases.length; i++) {
-                    if (i >= start && i <= stop) {
-                        newBases[i] = 'N';
-                    }
-                    else {
-                        newBases[i] = bases[i];
-                    }
-                }
-                read.setReadBases(newBases);
-                break;
-            case WRITE_Q0S:
-                for (int i = 0; i < quals.length; i++) {
-                    if (i >= start && i <= stop) {
-                        newQuals[i] = 0;
-                    }
-                    else {
-                        newQuals[i] = quals[i];
-                    }
-                }
-                read.setBaseQualities(newQuals);
-                break;
-            case WRITE_NS_Q0S:
-                for (int i = 0; i < bases.length; i++) {
-                    if (i >= start && i <= stop) {
-                        newQuals[i] = 0;
-                        newBases[i] = 'N';
-                    }
-                    else {
-                        newQuals[i] = quals[i];
-                        newBases[i] = bases[i];
-                    }
-                }
-                read.setBaseQualities(newBases);
-                read.setReadBases(newBases);
-                break;
-            case HARDCLIP_BASES:
-                read = hardClip(read, start, stop);
-                break;
-
-            case SOFTCLIP_BASES:
-                if (read.getReadUnmappedFlag()) {
-                    // we can't process unmapped reads
-                    throw new UserException("Read Clipper cannot soft clip unmapped reads");
-                }
-
-                //System.out.printf("%d %d %d%n", stop, start, read.getReadLength());
-                int myStop = stop;
-                if ((stop + 1 - start) == read.getReadLength()) {
-                    // BAM representation issue -- we can't SOFTCLIP away all bases in a read, just leave it alone
-                    //Walker.logger.info(String.format("Warning, read %s has all bases clip but this can't be represented with SOFTCLIP_BASES, just leaving it alone", read.getReadName()));
-                    //break;
-                    myStop--; // just decrement stop
-                }
-
-                if (start > 0 && myStop != read.getReadLength() - 1)
-                    throw new RuntimeException(String.format("Cannot apply soft clipping operator to the middle of a read: %s to be clipped at %d-%d", read.getReadName(), start, myStop));
-
-                Cigar oldCigar = read.getCigar();
-
-                int scLeft = 0, scRight = read.getReadLength();
-                if (start == 0)
-                    scLeft = myStop + 1;
-                else
-                    scRight = start;
-
-                Cigar newCigar = softClip(oldCigar, scLeft, scRight);
-                read.setCigar(newCigar);
-
-                int newClippedStart = getNewAlignmentStartOffset(newCigar, oldCigar);
-                int newStart = read.getAlignmentStart() + newClippedStart;
-                read.setAlignmentStart(newStart);
-
-                break;
-
-            case REVERT_SOFTCLIPPED_BASES:
-                read = revertSoftClippedBases(read);
-                break;
-
-            default:
-                throw new IllegalStateException("Unexpected Clipping operator type " + algorithm);
-        }
-
-        return read;
-    }
-
-    private GATKSAMRecord revertSoftClippedBases(GATKSAMRecord read) {
-        GATKSAMRecord unclipped = (GATKSAMRecord) read.clone();
-
-        Cigar unclippedCigar = new Cigar();
-        int matchesCount = 0;
-        for (CigarElement element : read.getCigar().getCigarElements()) {
-            if (element.getOperator() == CigarOperator.SOFT_CLIP || element.getOperator() == CigarOperator.MATCH_OR_MISMATCH)
-                matchesCount += element.getLength();
-            else if (matchesCount > 0) {
-                unclippedCigar.add(new CigarElement(matchesCount, CigarOperator.MATCH_OR_MISMATCH));
-                matchesCount = 0;
-                unclippedCigar.add(element);
-            } else
-                unclippedCigar.add(element);
-        }
-        if (matchesCount > 0)
-            unclippedCigar.add(new CigarElement(matchesCount, CigarOperator.MATCH_OR_MISMATCH));
-
-        unclipped.setCigar(unclippedCigar);
-        final int newStart = read.getAlignmentStart() + calculateAlignmentStartShift(read.getCigar(), unclippedCigar);
-        unclipped.setAlignmentStart(newStart);
-
-        if ( newStart <= 0 ) {
-            // if the start of the unclipped read occurs before the contig,
-            // we must hard clip away the bases since we cannot represent reads with
-            // negative or 0 alignment start values in the SAMRecord (e.g., 0 means unaligned)
-            return hardClip(unclipped, 0, - newStart);
-        } else {
-            return unclipped;
-        }
-    }
-
-    /**
-     * Given a cigar string, get the number of bases hard or soft clipped at the start
-     */
-    private int getNewAlignmentStartOffset(final Cigar __cigar, final Cigar __oldCigar) {
-        int num = 0;
-        for (CigarElement e : __cigar.getCigarElements()) {
-            if (!e.getOperator().consumesReferenceBases()) {
-                if (e.getOperator().consumesReadBases()) {
-                    num += e.getLength();
-                }
-            } else {
-                break;
-            }
-        }
-
-        int oldNum = 0;
-        int curReadCounter = 0;
-
-        for (CigarElement e : __oldCigar.getCigarElements()) {
-            int curRefLength = e.getLength();
-            int curReadLength = e.getLength();
-            if (!e.getOperator().consumesReadBases()) {
-                curReadLength = 0;
-            }
-
-            boolean truncated = false;
-            if (curReadCounter + curReadLength > num) {
-                curReadLength = num - curReadCounter;
-                curRefLength = num - curReadCounter;
-                truncated = true;
-            }
-
-            if (!e.getOperator().consumesReferenceBases()) {
-                curRefLength = 0;
-            }
-
-            curReadCounter += curReadLength;
-            oldNum += curRefLength;
-
-            if (curReadCounter > num || truncated) {
-                break;
-            }
-        }
-
-        return oldNum;
-    }
-
-    /**
-     * Given a cigar string, soft clip up to startClipEnd and soft clip starting at endClipBegin
-     */
-    private Cigar softClip(final Cigar __cigar, final int __startClipEnd, final int __endClipBegin) {
-        if (__endClipBegin <= __startClipEnd) {
-            //whole thing should be soft clipped
-            int cigarLength = 0;
-            for (CigarElement e : __cigar.getCigarElements()) {
-                cigarLength += e.getLength();
-            }
-
-            Cigar newCigar = new Cigar();
-            newCigar.add(new CigarElement(cigarLength, CigarOperator.SOFT_CLIP));
-            assert newCigar.isValid(null, -1) == null;
-            return newCigar;
-        }
-
-        int curLength = 0;
-        Vector<CigarElement> newElements = new Vector<CigarElement>();
-        for (CigarElement curElem : __cigar.getCigarElements()) {
-            if (!curElem.getOperator().consumesReadBases()) {
-                if (curElem.getOperator() == CigarOperator.HARD_CLIP || curLength > __startClipEnd && curLength < __endClipBegin) {
-                    newElements.add(new CigarElement(curElem.getLength(), curElem.getOperator()));
-                }
-                continue;
-            }
-
-            int s = curLength;
-            int e = curLength + curElem.getLength();
-            if (e <= __startClipEnd || s >= __endClipBegin) {
-                //must turn this entire thing into a clip
-                newElements.add(new CigarElement(curElem.getLength(), CigarOperator.SOFT_CLIP));
-            } else if (s >= __startClipEnd && e <= __endClipBegin) {
-                //same thing
-                newElements.add(new CigarElement(curElem.getLength(), curElem.getOperator()));
-            } else {
-                //we are clipping in the middle of this guy
-                CigarElement newStart = null;
-                CigarElement newMid = null;
-                CigarElement newEnd = null;
-
-                int midLength = curElem.getLength();
-                if (s < __startClipEnd) {
-                    newStart = new CigarElement(__startClipEnd - s, CigarOperator.SOFT_CLIP);
-                    midLength -= newStart.getLength();
-                }
-
-                if (e > __endClipBegin) {
-                    newEnd = new CigarElement(e - __endClipBegin, CigarOperator.SOFT_CLIP);
-                    midLength -= newEnd.getLength();
-                }
-                assert midLength >= 0;
-                if (midLength > 0) {
-                    newMid = new CigarElement(midLength, curElem.getOperator());
-                }
-                if (newStart != null) {
-                    newElements.add(newStart);
-                }
-                if (newMid != null) {
-                    newElements.add(newMid);
-                }
-                if (newEnd != null) {
-                    newElements.add(newEnd);
-                }
-            }
-            curLength += curElem.getLength();
-        }
-
-        Vector<CigarElement> finalNewElements = new Vector<CigarElement>();
-        CigarElement lastElement = null;
-        for (CigarElement elem : newElements) {
-            if (lastElement == null || lastElement.getOperator() != elem.getOperator()) {
-                if (lastElement != null) {
-                    finalNewElements.add(lastElement);
-                }
-                lastElement = elem;
-            } else {
-                lastElement = new CigarElement(lastElement.getLength() + elem.getLength(), lastElement.getOperator());
-            }
-        }
-        if (lastElement != null) {
-            finalNewElements.add(lastElement);
-        }
-
-        Cigar newCigar = new Cigar(finalNewElements);
-        assert newCigar.isValid(null, -1) == null;
-        return newCigar;
-    }
-
-    /**
-     * Hard clip bases from read, from start to stop in base coordinates
-     *
-     * If start == 0, then we will clip from the front of the read, otherwise we clip
-     * from the right.  If start == 0 and stop == 10, this would clip out the first
-     * 10 bases of the read.
-     *
-     * Note that this function works with reads with negative alignment starts, in order to
-     * allow us to hardClip reads that have had their soft clips reverted and so might have
-     * negative alignment starts
-     *
-     * Works properly with reduced reads and insertion/deletion base qualities
-     *
-     * @param read a non-null read
-     * @param start a start >= 0 and < read.length
-     * @param stop a stop >= 0 and < read.length.
-     * @return a cloned version of read that has been properly trimmed down
-     */
-    private GATKSAMRecord hardClip(GATKSAMRecord read, int start, int stop) {
-
-        // If the read is unmapped there is no Cigar string and neither should we create a new cigar string
-        final CigarShift cigarShift = (read.getReadUnmappedFlag()) ? new CigarShift(new Cigar(), 0, 0) : hardClipCigar(read.getCigar(), start, stop);
-
-        // the cigar may force a shift left or right (or both) in case we are left with insertions
-        // starting or ending the read after applying the hard clip on start/stop.
-        final int newLength = read.getReadLength() - (stop - start + 1) - cigarShift.shiftFromStart - cigarShift.shiftFromEnd;
-        final byte[] newBases = new byte[newLength];
-        final byte[] newQuals = new byte[newLength];
-        final int copyStart = (start == 0) ? stop + 1 + cigarShift.shiftFromStart : cigarShift.shiftFromStart;
-
-        System.arraycopy(read.getReadBases(), copyStart, newBases, 0, newLength);
-        System.arraycopy(read.getBaseQualities(), copyStart, newQuals, 0, newLength);
-
-        final GATKSAMRecord hardClippedRead = (GATKSAMRecord) read.clone();
-
-        hardClippedRead.resetSoftStartAndEnd();                                                                         // reset the cached soft start and end because they may have changed now that the read was hard clipped. No need to calculate them now. They'll be lazily calculated on the next call to getSoftStart()/End()
-        hardClippedRead.setBaseQualities(newQuals);
-        hardClippedRead.setReadBases(newBases);
-        hardClippedRead.setCigar(cigarShift.cigar);
-        if (start == 0)
-            hardClippedRead.setAlignmentStart(read.getAlignmentStart() + calculateAlignmentStartShift(read.getCigar(), cigarShift.cigar));
-
-        if (read.hasBaseIndelQualities()) {
-            final byte[] newBaseInsertionQuals = new byte[newLength];
-            final byte[] newBaseDeletionQuals = new byte[newLength];
-            System.arraycopy(read.getBaseInsertionQualities(), copyStart, newBaseInsertionQuals, 0, newLength);
-            System.arraycopy(read.getBaseDeletionQualities(), copyStart, newBaseDeletionQuals, 0, newLength);
-            hardClippedRead.setBaseQualities(newBaseInsertionQuals, EventType.BASE_INSERTION);
-            hardClippedRead.setBaseQualities(newBaseDeletionQuals, EventType.BASE_DELETION);
-        }
-
-        return hardClippedRead;
-
-    }
-
-    @Requires({"!cigar.isEmpty()"})
-    private CigarShift hardClipCigar(Cigar cigar, int start, int stop) {
-        Cigar newCigar = new Cigar();
-        int index = 0;
-        int totalHardClipCount = stop - start + 1;
-        int alignmentShift = 0; // caused by hard clipping deletions
-
-        // hard clip the beginning of the cigar string
-        if (start == 0) {
-            Iterator<CigarElement> cigarElementIterator = cigar.getCigarElements().iterator();
-            CigarElement cigarElement = cigarElementIterator.next();
-            // Skip all leading hard clips
-            while (cigarElement.getOperator() == CigarOperator.HARD_CLIP) {
-                totalHardClipCount += cigarElement.getLength();
-                if (cigarElementIterator.hasNext())
-                    cigarElement = cigarElementIterator.next();
-                else
-                    throw new ReviewedGATKException("Read is entirely hardclipped, shouldn't be trying to clip it's cigar string");
-            }
-            // keep clipping until we hit stop
-            while (index <= stop) {
-                int shift = 0;
-                if (cigarElement.getOperator().consumesReadBases())
-                    shift = cigarElement.getLength();
-
-                // we're still clipping or just finished perfectly
-                if (index + shift == stop + 1) {
-                    alignmentShift += calculateHardClippingAlignmentShift(cigarElement, cigarElement.getLength());
-                    newCigar.add(new CigarElement(totalHardClipCount + alignmentShift, CigarOperator.HARD_CLIP));
-                }
-                // element goes beyond what we need to clip
-                else if (index + shift > stop + 1) {
-                    int elementLengthAfterChopping = cigarElement.getLength() - (stop - index + 1);
-                    alignmentShift += calculateHardClippingAlignmentShift(cigarElement, stop - index + 1);
-                    newCigar.add(new CigarElement(totalHardClipCount + alignmentShift, CigarOperator.HARD_CLIP));
-                    newCigar.add(new CigarElement(elementLengthAfterChopping, cigarElement.getOperator()));
-                }
-                index += shift;
-                alignmentShift += calculateHardClippingAlignmentShift(cigarElement, shift);
-
-                if (index <= stop && cigarElementIterator.hasNext())
-                    cigarElement = cigarElementIterator.next();
-                else
-                    break;
-            }
-
-            // add the remaining cigar elements
-            while (cigarElementIterator.hasNext()) {
-                cigarElement = cigarElementIterator.next();
-                newCigar.add(new CigarElement(cigarElement.getLength(), cigarElement.getOperator()));
-            }
-        }
-
-        // hard clip the end of the cigar string
-        else {
-            Iterator<CigarElement> cigarElementIterator = cigar.getCigarElements().iterator();
-            CigarElement cigarElement = cigarElementIterator.next();
-
-            // Keep marching on until we find the start
-            while (index < start) {
-                int shift = 0;
-                if (cigarElement.getOperator().consumesReadBases())
-                    shift = cigarElement.getLength();
-
-                // we haven't gotten to the start yet, keep everything as is.
-                if (index + shift < start)
-                    newCigar.add(new CigarElement(cigarElement.getLength(), cigarElement.getOperator()));
-
-                    // element goes beyond our clip starting position
-                else {
-                    int elementLengthAfterChopping = start - index;
-                    alignmentShift += calculateHardClippingAlignmentShift(cigarElement, cigarElement.getLength() - (start - index));
-
-                    // if this last element is a HARD CLIP operator, just merge it with our hard clip operator to be added later
-                    if (cigarElement.getOperator() == CigarOperator.HARD_CLIP)
-                        totalHardClipCount += elementLengthAfterChopping;
-                        // otherwise, maintain what's left of this last operator
-                    else
-                        newCigar.add(new CigarElement(elementLengthAfterChopping, cigarElement.getOperator()));
-                }
-                index += shift;
-                if (index < start && cigarElementIterator.hasNext())
-                    cigarElement = cigarElementIterator.next();
-                else
-                    break;
-            }
-
-            // check if we are hard clipping indels
-            while (cigarElementIterator.hasNext()) {
-                cigarElement = cigarElementIterator.next();
-                alignmentShift += calculateHardClippingAlignmentShift(cigarElement, cigarElement.getLength());
-
-                // if the read had a HardClip operator in the end, combine it with the Hard Clip we are adding
-                if (cigarElement.getOperator() == CigarOperator.HARD_CLIP)
-                    totalHardClipCount += cigarElement.getLength();
-            }
-            newCigar.add(new CigarElement(totalHardClipCount + alignmentShift, CigarOperator.HARD_CLIP));
-        }
-        return cleanHardClippedCigar(newCigar);
-    }
-
-    /**
-     * Checks if a hard clipped cigar left a read starting or ending with deletions or gap (N)
-     * and cleans it up accordingly.
-     *
-     * @param cigar the original cigar
-     * @return an object with the shifts (see CigarShift class)
-     */
-    private CigarShift cleanHardClippedCigar(final Cigar cigar) {
-        final Cigar cleanCigar = new Cigar();
-        int shiftFromStart = 0;
-        int shiftFromEnd = 0;
-        Stack<CigarElement> cigarStack = new Stack<CigarElement>();
-        final Stack<CigarElement> inverseCigarStack = new Stack<CigarElement>();
-
-        for (final CigarElement cigarElement : cigar.getCigarElements())
-            cigarStack.push(cigarElement);
-
-        for (int i = 1; i <= 2; i++) {
-            int shift = 0;
-            int totalHardClip = 0;
-            boolean readHasStarted = false;
-            boolean addedHardClips = false;
-
-            while (!cigarStack.empty()) {
-                CigarElement cigarElement = cigarStack.pop();
-
-                if (!readHasStarted &&
-                        cigarElement.getOperator() != CigarOperator.DELETION &&
-                        cigarElement.getOperator() != CigarOperator.SKIPPED_REGION &&
-                        cigarElement.getOperator() != CigarOperator.HARD_CLIP)
-                    readHasStarted = true;
-
-                else if (!readHasStarted && cigarElement.getOperator() == CigarOperator.HARD_CLIP)
-                    totalHardClip += cigarElement.getLength();
-
-                else if (!readHasStarted && cigarElement.getOperator() == CigarOperator.DELETION)
-                    totalHardClip += cigarElement.getLength();
-
-                else if (!readHasStarted && cigarElement.getOperator() == CigarOperator.SKIPPED_REGION)
-                    totalHardClip += cigarElement.getLength();
-
-                if (readHasStarted) {
-                    if (i == 1) {
-                        if (!addedHardClips) {
-                            if (totalHardClip > 0)
-                                inverseCigarStack.push(new CigarElement(totalHardClip, CigarOperator.HARD_CLIP));
-                            addedHardClips = true;
-                        }
-                        inverseCigarStack.push(cigarElement);
-                    } else {
-                        if (!addedHardClips) {
-                            if (totalHardClip > 0)
-                                cleanCigar.add(new CigarElement(totalHardClip, CigarOperator.HARD_CLIP));
-                            addedHardClips = true;
-                        }
-                        cleanCigar.add(cigarElement);
-                    }
-                }
-            }
-            // first pass  (i=1) is from end to start of the cigar elements
-            if (i == 1) {
-                shiftFromEnd = shift;
-                cigarStack = inverseCigarStack;
-            }
-            // second pass (i=2) is from start to end with the end already cleaned
-            else {
-                shiftFromStart = shift;
-            }
-        }
-        return new CigarShift(cleanCigar, shiftFromStart, shiftFromEnd);
-    }
-
-    /**
-     * Compute the offset of the first "real" position in the cigar on the genome
-     *
-     * This is defined as a first position after a run of Hs followed by a run of Ss
-     *
-     * @param cigar A non-null cigar
-     * @return the offset (from 0) of the first on-genome base
-     */
-    private int calcHardSoftOffset(final Cigar cigar) {
-        final List<CigarElement> elements = cigar.getCigarElements();
-
-        int size = 0;
-        int i = 0;
-        while ( i < elements.size() && elements.get(i).getOperator() == CigarOperator.HARD_CLIP ) {
-            size += elements.get(i).getLength();
-            i++;
-        }
-        while ( i < elements.size() && elements.get(i).getOperator() == CigarOperator.SOFT_CLIP ) {
-            size += elements.get(i).getLength();
-            i++;
-        }
-
-        return size;
-    }
-
-    private int calculateAlignmentStartShift(Cigar oldCigar, Cigar newCigar) {
-        final int newShift = calcHardSoftOffset(newCigar);
-        final int oldShift = calcHardSoftOffset(oldCigar);
-        return newShift - oldShift;
-    }
-
-    private int calculateHardClippingAlignmentShift(CigarElement cigarElement, int clippedLength) {
-        // Insertions should be discounted from the total hard clip count
-        if (cigarElement.getOperator() == CigarOperator.INSERTION)
-            return -clippedLength;
-
-            // Deletions and Ns should be added to the total hard clip count (because we want to maintain the original alignment start)
-        else if (cigarElement.getOperator() == CigarOperator.DELETION || cigarElement.getOperator() == CigarOperator.SKIPPED_REGION)
-            return cigarElement.getLength();
-
-        // There is no shift if we are not clipping an indel
-        return 0;
-    }
-
-    private static class CigarShift {
-        private Cigar cigar;
-        private int shiftFromStart;
-        private int shiftFromEnd;
-
-        private CigarShift(Cigar cigar, int shiftFromStart, int shiftFromEnd) {
-            this.cigar = cigar;
-            this.shiftFromStart = shiftFromStart;
-            this.shiftFromEnd = shiftFromEnd;
-        }
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/clipping/ClippingRepresentation.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/clipping/ClippingRepresentation.java
deleted file mode 100644
index 5d86e0b..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/clipping/ClippingRepresentation.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.clipping;
-
-/**
- * How should we represent a clipped bases in a read?
- */
-public enum ClippingRepresentation {
-    /** Clipped bases are changed to Ns */
-    WRITE_NS,
-
-    /** Clipped bases are changed to have Q0 quality score */
-    WRITE_Q0S,
-
-    /** Clipped bases are change to have both an N base and a Q0 quality score */
-    WRITE_NS_Q0S,
-
-    /**
-     * Change the read's cigar string to soft clip (S, see sam-spec) away the bases.
-     * Note that this can only be applied to cases where the clipped bases occur
-     * at the start or end of a read.
-     */
-    SOFTCLIP_BASES,
-
-    /**
-     * WARNING: THIS OPTION IS STILL UNDER DEVELOPMENT AND IS NOT SUPPORTED.
-     *
-     * Change the read's cigar string to hard clip (H, see sam-spec) away the bases.
-     * Hard clipping, unlike soft clipping, actually removes bases from the read,
-     * reducing the resulting file's size but introducing an irrevesible (i.e.,
-     * lossy) operation.  Note that this can only be applied to cases where the clipped
-     * bases occur at the start or end of a read.
-     */
-    HARDCLIP_BASES,
-
-    /**
-     * Turn all soft-clipped bases into matches
-     */
-    REVERT_SOFTCLIPPED_BASES,
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/clipping/ReadClipper.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/clipping/ReadClipper.java
deleted file mode 100644
index c31784f..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/clipping/ReadClipper.java
+++ /dev/null
@@ -1,568 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.clipping;
-
-import com.google.java.contract.Requires;
-import htsjdk.samtools.CigarElement;
-import htsjdk.samtools.CigarOperator;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.recalibration.EventType;
-import org.broadinstitute.gatk.utils.sam.CigarUtils;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.broadinstitute.gatk.utils.sam.ReadUtils;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * A comprehensive clipping tool.
- *
- * General Contract:
- *  - All clipping operations return a new read with the clipped bases requested, it never modifies the original read.
- *  - If a read is fully clipped, return an empty GATKSAMRecord, never null.
- *  - When hard clipping, add cigar operator H for every *reference base* removed (i.e. Matches, SoftClips and Deletions, but *not* insertions). See Hard Clipping notes for details.
- *
- *
- * There are several types of clipping to use:
- *
- * Write N's:
- *   Change the bases to N's in the desired region. This can be applied anywhere in the read.
- *
- * Write Q0's:
- *   Change the quality of the bases in the desired region to Q0. This can be applied anywhere in the read.
- *
- * Write both N's and Q0's:
- *   Same as the two independent operations, put together.
- *
- * Soft Clipping:
- *   Do not change the read, just mark the reads as soft clipped in the Cigar String
- *   and adjust the alignment start and end of the read.
- *
- * Hard Clipping:
- *   Creates a new read without the hard clipped bases (and base qualities). The cigar string
- *   will be updated with the cigar operator H for every reference base removed (i.e. Matches,
- *   Soft clipped bases and deletions, but *not* insertions). This contract with the cigar
- *   is necessary to allow read.getUnclippedStart() / End() to recover the original alignment
- *   of the read (before clipping).
- *
- */
-public class ReadClipper {
-    final GATKSAMRecord read;
-    boolean wasClipped;
-    List<ClippingOp> ops = null;
-
-    /**
-     * Initializes a ReadClipper object.
-     *
-     * You can set up your clipping operations using the addOp method. When you're ready to
-     * generate a new read with all the clipping operations, use clipRead().
-     *
-     * Note: Use this if you want to set up multiple operations on the read using the ClippingOp
-     * class. If you just want to apply one of the typical modes of clipping, use the static
-     * clipping functions available in this class instead.
-     *
-     * @param read the read to clip
-     */
-    public ReadClipper(final GATKSAMRecord read) {
-        this.read = read;
-        this.wasClipped = false;
-    }
-
-    /**
-     * Add clipping operation to the read.
-     *
-     * You can add as many operations as necessary to this read before clipping. Beware that the
-     * order in which you add these operations matter. For example, if you hard clip the beginning
-     * of a read first then try to hard clip the end, the indices will have changed. Make sure you
-     * know what you're doing, otherwise just use the static functions below that take care of the
-     * ordering for you.
-     *
-     * Note: You only choose the clipping mode when you use clipRead()
-     *
-     * @param op a ClippingOp object describing the area you want to clip.
-     */
-    public void addOp(ClippingOp op) {
-        if (ops == null) ops = new ArrayList<ClippingOp>();
-        ops.add(op);
-    }
-
-    /**
-     * Check the list of operations set up for this read.
-     *
-     * @return a list of the operations set up for this read.
-     */
-    public List<ClippingOp> getOps() {
-        return ops;
-    }
-
-    /**
-     * Check whether or not this read has been clipped.
-     * @return true if this read has produced a clipped read, false otherwise.
-     */
-    public boolean wasClipped() {
-        return wasClipped;
-    }
-
-    /**
-     * The original read.
-     *
-     * @return  returns the read to be clipped (original)
-     */
-    public GATKSAMRecord getRead() {
-        return read;
-    }
-
-    /**
-     * Clips a read according to ops and the chosen algorithm.
-     *
-     * @param algorithm What mode of clipping do you want to apply for the stacked operations.
-     * @return the read with the clipping applied.
-     */
-    public GATKSAMRecord clipRead(ClippingRepresentation algorithm) {
-        if (ops == null)
-            return getRead();
-
-        GATKSAMRecord clippedRead = read;
-        for (ClippingOp op : getOps()) {
-            final int readLength = clippedRead.getReadLength();
-            //check if the clipped read can still be clipped in the range requested
-            if (op.start < readLength) {
-                ClippingOp fixedOperation = op;
-                if (op.stop >= readLength)
-                    fixedOperation = new ClippingOp(op.start, readLength - 1);
-
-                clippedRead = fixedOperation.apply(algorithm, clippedRead);
-            }
-        }
-        wasClipped = true;
-        ops.clear();
-        if ( clippedRead.isEmpty() )
-            return GATKSAMRecord.emptyRead(clippedRead);
-        return clippedRead;
-    }
-
-
-    /**
-     * Hard clips the left tail of a read up to (and including) refStop using reference
-     * coordinates.
-     *
-     * @param refStop the last base to be hard clipped in the left tail of the read.
-     * @return a new read, without the left tail.
-     */
-    @Requires("!read.getReadUnmappedFlag()")  // can't handle unmapped reads, as we're using reference coordinates to clip
-    private GATKSAMRecord hardClipByReferenceCoordinatesLeftTail(int refStop) {
-        return hardClipByReferenceCoordinates(-1, refStop);
-    }
-    public static GATKSAMRecord hardClipByReferenceCoordinatesLeftTail(GATKSAMRecord read, int refStop) {
-        return (new ReadClipper(read)).hardClipByReferenceCoordinates(-1, refStop);
-    }
-
-
-
-    /**
-     * Hard clips the right tail of a read starting at (and including) refStart using reference
-     * coordinates.
-     *
-     * @param refStart refStop the first base to be hard clipped in the right tail of the read.
-     * @return a new read, without the right tail.
-     */
-    @Requires("!read.getReadUnmappedFlag()")  // can't handle unmapped reads, as we're using reference coordinates to clip
-    private GATKSAMRecord hardClipByReferenceCoordinatesRightTail(int refStart) {
-        return hardClipByReferenceCoordinates(refStart, -1);
-    }
-    public static GATKSAMRecord hardClipByReferenceCoordinatesRightTail(GATKSAMRecord read, int refStart) {
-        return (new ReadClipper(read)).hardClipByReferenceCoordinates(refStart, -1);
-    }
-
-    /**
-     * Hard clips a read using read coordinates.
-     *
-     * @param start the first base to clip (inclusive)
-     * @param stop the last base to clip (inclusive)
-     * @return a new read, without the clipped bases
-     */
-    @Requires({"start >= 0 && stop <= read.getReadLength() - 1",   // start and stop have to be within the read
-               "start == 0 || stop == read.getReadLength() - 1"})  // cannot clip the middle of the read
-    private GATKSAMRecord hardClipByReadCoordinates(int start, int stop) {
-        if (read.isEmpty() || (start == 0 && stop == read.getReadLength() - 1))
-            return GATKSAMRecord.emptyRead(read);
-
-        this.addOp(new ClippingOp(start, stop));
-        return clipRead(ClippingRepresentation.HARDCLIP_BASES);
-    }
-    public static GATKSAMRecord hardClipByReadCoordinates(GATKSAMRecord read, int start, int stop) {
-        return (new ReadClipper(read)).hardClipByReadCoordinates(start, stop);
-    }
-
-
-    /**
-     * Hard clips both tails of a read.
-     *   Left tail goes from the beginning to the 'left' coordinate (inclusive)
-     *   Right tail goes from the 'right' coordinate (inclusive) until the end of the read
-     *
-     * @param left the coordinate of the last base to be clipped in the left tail (inclusive)
-     * @param right the coordinate of the first base to be clipped in the right tail (inclusive)
-     * @return a new read, without the clipped bases
-     */
-    @Requires({"left <= right",                    // tails cannot overlap
-               "left >= read.getAlignmentStart()", // coordinate has to be within the mapped read
-               "right <= read.getAlignmentEnd()"}) // coordinate has to be within the mapped read
-    private GATKSAMRecord hardClipBothEndsByReferenceCoordinates(int left, int right) {
-        if (read.isEmpty() || left == right)
-            return GATKSAMRecord.emptyRead(read);
-        GATKSAMRecord leftTailRead = hardClipByReferenceCoordinates(right, -1);
-
-        // after clipping one tail, it is possible that the consequent hard clipping of adjacent deletions
-        // make the left cut index no longer part of the read. In that case, clip the read entirely.
-        if (left > leftTailRead.getAlignmentEnd())
-            return GATKSAMRecord.emptyRead(read);
-
-        ReadClipper clipper = new ReadClipper(leftTailRead);
-        return clipper.hardClipByReferenceCoordinatesLeftTail(left);
-    }
-    public static GATKSAMRecord hardClipBothEndsByReferenceCoordinates(GATKSAMRecord read, int left, int right) {
-        return (new ReadClipper(read)).hardClipBothEndsByReferenceCoordinates(left, right);
-    }
-
-
-    /**
-     * Clips any contiguous tail (left, right or both) with base quality lower than lowQual using the desired algorithm.
-     *
-     * This function will look for low quality tails and hard clip them away. A low quality tail
-     * ends when a base has base quality greater than lowQual.
-     *
-     * @param algorithm the algorithm to use (HardClip, SoftClip, Write N's,...)
-     * @param lowQual every base quality lower than or equal to this in the tail of the read will be hard clipped
-     * @return a new read without low quality tails
-     */
-    private GATKSAMRecord clipLowQualEnds(ClippingRepresentation algorithm, byte lowQual) {
-        if (read.isEmpty())
-            return read;
-
-        final byte [] quals = read.getBaseQualities();
-        final int readLength = read.getReadLength();
-        int leftClipIndex = 0;
-        int rightClipIndex = readLength - 1;
-
-        // check how far we can clip both sides
-        while (rightClipIndex >= 0 && quals[rightClipIndex] <= lowQual) rightClipIndex--;
-        while (leftClipIndex < readLength && quals[leftClipIndex] <= lowQual) leftClipIndex++;
-
-        // if the entire read should be clipped, then return an empty read.
-        if (leftClipIndex > rightClipIndex)
-            return GATKSAMRecord.emptyRead(read);
-
-        if (rightClipIndex < readLength - 1) {
-            this.addOp(new ClippingOp(rightClipIndex + 1, readLength - 1));
-        }
-        if (leftClipIndex > 0 ) {
-            this.addOp(new ClippingOp(0, leftClipIndex - 1));
-        }
-        return this.clipRead(algorithm);
-    }
-
-    private GATKSAMRecord hardClipLowQualEnds(byte lowQual) {
-        return this.clipLowQualEnds(ClippingRepresentation.HARDCLIP_BASES, lowQual);
-    }
-    public static GATKSAMRecord hardClipLowQualEnds(GATKSAMRecord read, byte lowQual) {
-        return (new ReadClipper(read)).hardClipLowQualEnds(lowQual);
-    }
-    public static GATKSAMRecord clipLowQualEnds(GATKSAMRecord read, byte lowQual, ClippingRepresentation algorithm) {
-        return (new ReadClipper(read)).clipLowQualEnds(algorithm, lowQual);
-    }
-
-
-    /**
-     * Will hard clip every soft clipped bases in the read.
-     *
-     * @return a new read without the soft clipped bases
-     */
-    private GATKSAMRecord hardClipSoftClippedBases () {
-        if (read.isEmpty())
-            return read;
-
-        int readIndex = 0;
-        int cutLeft = -1;            // first position to hard clip (inclusive)
-        int cutRight = -1;           // first position to hard clip (inclusive)
-        boolean rightTail = false;   // trigger to stop clipping the left tail and start cutting the right tail
-
-        for (CigarElement cigarElement : read.getCigar().getCigarElements()) {
-            if (cigarElement.getOperator() == CigarOperator.SOFT_CLIP) {
-                if (rightTail) {
-                    cutRight = readIndex;
-                }
-                else {
-                    cutLeft = readIndex + cigarElement.getLength() - 1;
-                }
-            }
-            else if (cigarElement.getOperator() != CigarOperator.HARD_CLIP)
-                rightTail = true;
-
-            if (cigarElement.getOperator().consumesReadBases())
-                readIndex += cigarElement.getLength();
-        }
-
-        // It is extremely important that we cut the end first otherwise the read coordinates change.
-        if (cutRight >= 0)
-            this.addOp(new ClippingOp(cutRight, read.getReadLength() - 1));
-        if (cutLeft >= 0)
-            this.addOp(new ClippingOp(0, cutLeft));
-
-        return clipRead(ClippingRepresentation.HARDCLIP_BASES);
-    }
-    public static GATKSAMRecord hardClipSoftClippedBases (GATKSAMRecord read) {
-        return (new ReadClipper(read)).hardClipSoftClippedBases();
-    }
-
-
-    /**
-     * Hard clip the read to the variable region (from refStart to refStop)
-     *
-     * @param read     the read to be clipped
-     * @param refStart the beginning of the variant region (inclusive)
-     * @param refStop  the end of the variant region (inclusive)
-     * @return the read hard clipped to the variant region
-     */
-    public static GATKSAMRecord hardClipToRegion( final GATKSAMRecord read, final int refStart, final int refStop ) {
-        final int start = read.getAlignmentStart();
-        final int stop = read.getAlignmentEnd();
-        return hardClipToRegion(read, refStart, refStop,start,stop);
-    }
-
-    /**
-     * Hard clip the read to the variable region (from refStart to refStop) processing also the clipped bases
-     *
-     * @param read     the read to be clipped
-     * @param refStart the beginning of the variant region (inclusive)
-     * @param refStop  the end of the variant region (inclusive)
-     * @return the read hard clipped to the variant region
-     */
-    public static GATKSAMRecord hardClipToRegionIncludingClippedBases( final GATKSAMRecord read, final int refStart, final int refStop ) {
-        final int start = read.getOriginalAlignmentStart();
-        final int stop = start + CigarUtils.countRefBasesBasedOnCigar(read,0,read.getCigarLength()) - 1;
-        return hardClipToRegion(read, refStart, refStop,start,stop);
-    }
-
-    private static GATKSAMRecord hardClipToRegion( final GATKSAMRecord read, final int refStart, final int refStop, final int alignmentStart, final int alignmentStop){
-        // check if the read is contained in region
-        if (alignmentStart <= refStop && alignmentStop >= refStart) {
-            if (alignmentStart < refStart && alignmentStop > refStop)
-                return hardClipBothEndsByReferenceCoordinates(read, refStart - 1, refStop + 1);
-            else if (alignmentStart < refStart)
-                return hardClipByReferenceCoordinatesLeftTail(read, refStart - 1);
-            else if (alignmentStop > refStop)
-                return hardClipByReferenceCoordinatesRightTail(read, refStop + 1);
-            return read;
-        } else
-            return GATKSAMRecord.emptyRead(read);
-
-    }
-
-    public static List<GATKSAMRecord> hardClipToRegion( final List<GATKSAMRecord> reads, final int refStart, final int refStop ) {
-        final List<GATKSAMRecord> returnList = new ArrayList<GATKSAMRecord>( reads.size() );
-        for( final GATKSAMRecord read : reads ) {
-            final GATKSAMRecord clippedRead = hardClipToRegion( read, refStart, refStop );
-            if( !clippedRead.isEmpty() ) {
-                returnList.add( clippedRead );
-            }
-        }
-        return returnList;
-    }
-
-    /**
-     * Checks if a read contains adaptor sequences. If it does, hard clips them out.
-     *
-     * Note: To see how a read is checked for adaptor sequence see ReadUtils.getAdaptorBoundary()
-     *
-     * @return a new read without adaptor sequence
-     */
-    private GATKSAMRecord hardClipAdaptorSequence () {
-        final int adaptorBoundary = ReadUtils.getAdaptorBoundary(read);
-
-        if (adaptorBoundary == ReadUtils.CANNOT_COMPUTE_ADAPTOR_BOUNDARY || !ReadUtils.isInsideRead(read, adaptorBoundary))
-            return read;
-
-        return read.getReadNegativeStrandFlag() ? hardClipByReferenceCoordinatesLeftTail(adaptorBoundary) : hardClipByReferenceCoordinatesRightTail(adaptorBoundary);
-    }
-    public static GATKSAMRecord hardClipAdaptorSequence (GATKSAMRecord read) {
-        return (new ReadClipper(read)).hardClipAdaptorSequence();
-    }
-
-
-    /**
-     * Hard clips any leading insertions in the read. Only looks at the beginning of the read, not the end.
-     *
-     * @return a new read without leading insertions
-     */
-    private GATKSAMRecord hardClipLeadingInsertions() {
-        if (read.isEmpty())
-            return read;
-
-        for(CigarElement cigarElement : read.getCigar().getCigarElements()) {
-            if (cigarElement.getOperator() != CigarOperator.HARD_CLIP && cigarElement.getOperator() != CigarOperator.SOFT_CLIP &&
-                cigarElement.getOperator() != CigarOperator.INSERTION)
-                break;
-
-            else if (cigarElement.getOperator() == CigarOperator.INSERTION)
-                this.addOp(new ClippingOp(0, cigarElement.getLength() - 1));
-
-        }
-        return clipRead(ClippingRepresentation.HARDCLIP_BASES);
-    }
-    public static GATKSAMRecord hardClipLeadingInsertions(GATKSAMRecord read) {
-        return (new ReadClipper(read)).hardClipLeadingInsertions();
-    }
-
-
-    /**
-     * Turns soft clipped bases into matches
-     * @return a new read with every soft clip turned into a match
-     */
-    private GATKSAMRecord revertSoftClippedBases() {
-        if (read.isEmpty())
-            return read;
-
-        this.addOp(new ClippingOp(0, 0));
-        return this.clipRead(ClippingRepresentation.REVERT_SOFTCLIPPED_BASES);
-    }
-
-    /**
-     * Reverts ALL soft-clipped bases
-     *
-     * @param read the read
-     * @return the read with all soft-clipped bases turned into matches
-     */
-    public static GATKSAMRecord revertSoftClippedBases(GATKSAMRecord read) {
-        return (new ReadClipper(read)).revertSoftClippedBases();
-    }
-
-    /**
-     * Reverts only soft clipped bases with quality score greater than or equal to minQual
-     *
-     * todo -- Note: Will write a temporary field with the number of soft clips that were undone on each side (left: 'SL', right: 'SR') -- THIS HAS BEEN REMOVED TEMPORARILY SHOULD HAPPEN INSIDE THE CLIPPING ROUTINE!
-     *
-     * @param read    the read
-     * @param minQual the mininum base quality score to revert the base (inclusive)
-     * @return a new read with high quality soft clips reverted
-     */
-    public static GATKSAMRecord revertSoftClippedBases(GATKSAMRecord read, byte minQual) {
-        return revertSoftClippedBases(hardClipLowQualitySoftClips(read, minQual));
-    }
-
-    /**
-     * Hard clips away soft clipped bases that are below the given quality threshold
-     *
-     * @param read    the read
-     * @param minQual the mininum base quality score to revert the base (inclusive)
-     * @return a new read without low quality soft clipped bases
-     */
-    public static GATKSAMRecord hardClipLowQualitySoftClips(GATKSAMRecord read, byte minQual) {
-        int nLeadingSoftClips = read.getAlignmentStart() - read.getSoftStart();
-        if (read.isEmpty() || nLeadingSoftClips > read.getReadLength())
-            return GATKSAMRecord.emptyRead(read);
-
-        byte [] quals = read.getBaseQualities(EventType.BASE_SUBSTITUTION);
-        int left = -1;
-
-        if (nLeadingSoftClips > 0) {
-            for (int i = nLeadingSoftClips - 1; i >= 0; i--) {
-                if (quals[i] >= minQual)
-                    left = i;
-                else
-                    break;
-            }
-        }
-
-        int right = -1;
-        int nTailingSoftClips = read.getSoftEnd() - read.getAlignmentEnd();
-        if (nTailingSoftClips > 0) {
-            for (int i = read.getReadLength() - nTailingSoftClips; i < read.getReadLength() ; i++) {
-                if (quals[i] >= minQual)
-                    right = i;
-                else
-                    break;
-            }
-        }
-
-        GATKSAMRecord clippedRead = read;
-        if (right >= 0 && right + 1 < clippedRead.getReadLength())                                                      // only clip if there are softclipped bases (right >= 0) and the first high quality soft clip is not the last base (right+1 < readlength)
-                clippedRead = hardClipByReadCoordinates(clippedRead, right+1, clippedRead.getReadLength()-1);           // first we hard clip the low quality soft clips on the right tail
-        if (left >= 0 && left - 1 > 0)                                                                                  // only clip if there are softclipped bases (left >= 0) and the first high quality soft clip is not the last base (left-1 > 0)
-                clippedRead = hardClipByReadCoordinates(clippedRead, 0, left-1);                                        // then we hard clip the low quality soft clips on the left tail
-
-        return clippedRead;
-    }
-
-    /**
-     * Generic functionality to hard clip a read, used internally by hardClipByReferenceCoordinatesLeftTail
-     * and hardClipByReferenceCoordinatesRightTail. Should not be used directly.
-     *
-     * Note, it REQUIRES you to give the directionality of your hard clip (i.e. whether you're clipping the
-     * left of right tail) by specifying either refStart < 0 or refStop < 0.
-     *
-     * @param refStart  first base to clip (inclusive)
-     * @param refStop last base to clip (inclusive)
-     * @return a new read, without the clipped bases
-     */
-    @Requires({"!read.getReadUnmappedFlag()", "refStart < 0 || refStop < 0"})  // can't handle unmapped reads, as we're using reference coordinates to clip
-    protected GATKSAMRecord hardClipByReferenceCoordinates(int refStart, int refStop) {
-        if (read.isEmpty())
-            return read;
-
-        int start;
-        int stop;
-
-        // Determine the read coordinate to start and stop hard clipping
-        if (refStart < 0) {
-            if (refStop < 0)
-                throw new ReviewedGATKException("Only one of refStart or refStop must be < 0, not both (" + refStart + ", " + refStop + ")");
-            start = 0;
-            stop = ReadUtils.getReadCoordinateForReferenceCoordinate(read, refStop, ReadUtils.ClippingTail.LEFT_TAIL);
-        }
-        else {
-            if (refStop >= 0)
-                throw new ReviewedGATKException("Either refStart or refStop must be < 0 (" + refStart + ", " + refStop + ")");
-            start = ReadUtils.getReadCoordinateForReferenceCoordinate(read, refStart, ReadUtils.ClippingTail.RIGHT_TAIL);
-            stop = read.getReadLength() - 1;
-        }
-
-        if (start < 0 || stop > read.getReadLength() - 1)
-            throw new ReviewedGATKException("Trying to clip before the start or after the end of a read");
-
-        if ( start > stop )
-            throw new ReviewedGATKException(String.format("START (%d) > (%d) STOP -- this should never happen, please check read: %s (CIGAR: %s)", start, stop, read, read.getCigarString()));
-
-        if ( start > 0 && stop < read.getReadLength() - 1)
-            throw new ReviewedGATKException(String.format("Trying to clip the middle of the read: start %d, stop %d, cigar: %s", start, stop, read.getCigarString()));
-
-        this.addOp(new ClippingOp(start, stop));
-        GATKSAMRecord clippedRead = clipRead(ClippingRepresentation.HARDCLIP_BASES);
-        this.ops = null;
-        return clippedRead;
-    }
-
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/beagle/BeagleCodec.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/beagle/BeagleCodec.java
deleted file mode 100644
index 9159003..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/beagle/BeagleCodec.java
+++ /dev/null
@@ -1,276 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.codecs.beagle;
-/*
- * Copyright (c) 2010 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use,
- * copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the
- * Software is furnished to do so, subject to the following
- * conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
- * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
- * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
- * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
- * THE USE OR OTHER DEALINGS IN THE SOFTWARE.
- */
-
-
-import htsjdk.tribble.AsciiFeatureCodec;
-import htsjdk.tribble.exception.CodecLineParsingException;
-import htsjdk.tribble.readers.LineIterator;
-import org.broadinstitute.gatk.engine.refdata.ReferenceDependentFeatureCodec;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-
-import java.io.IOException;
-import java.util.*;
-import java.util.regex.Pattern;
-
-/**
- * Codec for Beagle imputation engine
- *
- * <p>
- * Reads in tabular files with site markers and genotype posteriors, genotypes and phasing that Beagle produced
- * </p>
- *
- * <p>
- * See also: @see <a href="http://faculty.washington.edu/browning/beagle/beagle.html">BEAGLE home page</a><br>
- * </p>
-
- * </p>
- *
- * <h2>File format example for phased genotypes file</h2>
- * <pre>
- *     dummy header
- *      20:60251 T T T T T T
- *      20:60321 G G G G G G
- *      20:60467 G G G G G G
- * </pre>
- *
- * <h2>File format example for genotype posteriors</h2>
- * <pre>
- *     marker alleleA alleleB NA07056 NA07056 NA07056
- *     20:60251 T C 0.9962 0.0038 0 0.99245 0.00755 0 0.99245 0.00755 0
- *     20:60321 G T 0.98747 0.01253 0 0.99922 0.00078 0 0.99368 0.00632 0
- *     20:60467 G C 0.97475 0.02525 0 0.98718 0.01282 0 0.98718 0.01282 0
- * </pre>
- *
- * <h2>File format example for r2 file
- * <pre>
- *      20:60251        0.747
- *      20:60321        0.763
- *      20:60467        0.524
- * </pre>
- * </h2>
- * @author Mark DePristo
- * @since 2010
- */
-public class BeagleCodec extends AsciiFeatureCodec<BeagleFeature> implements ReferenceDependentFeatureCodec {
-    private String[] header;
-    public enum BeagleReaderType {PROBLIKELIHOOD, GENOTYPES, R2};
-    private BeagleReaderType readerType;
-    private int valuesPerSample;
-    private int initialSampleIndex;
-    private int markerPosition;
-    private ArrayList<String> sampleNames;
-    private int expectedTokensPerLine;
-    private final static Set<String> HEADER_IDs = new HashSet<String>(Arrays.asList("marker", "I"));
-
-    private static final String delimiterRegex = "\\s+";
-
-    /**
-     * The parser to use when resolving genome-wide locations.
-     */
-    private GenomeLocParser genomeLocParser;
-
-    public BeagleCodec() {
-        super(BeagleFeature.class);
-    }
-
-    /**
-     * Set the parser to use when resolving genetic data.
-     * @param genomeLocParser The supplied parser.
-     */
-    public void setGenomeLocParser(GenomeLocParser genomeLocParser) {
-        this.genomeLocParser =  genomeLocParser;
-    }
-
-    @Override
-    public Object readActualHeader(LineIterator reader) {
-        int[] lineCounter = new int[1];
-        try {
-            header = readHeader(reader, lineCounter);
-
-            Boolean getSamples = true;
-            markerPosition = 0; //default value for all readers
-
-            if (header[0].matches("I")) {
-                // Phased genotype Beagle files start with "I"
-                readerType = BeagleReaderType.GENOTYPES;
-                valuesPerSample = 2;
-                initialSampleIndex = 2;
-                markerPosition = 1;
-            }
-            else if (header[0].matches("marker")) {
-                readerType = BeagleReaderType.PROBLIKELIHOOD;
-                valuesPerSample = 3;
-                initialSampleIndex = 3;
-            }
-            else {
-                readerType = BeagleReaderType.R2;
-                getSamples = false;
-                // signal we don't have a header
-                lineCounter[0] = 0;
-                // not needed, but for consistency:
-                valuesPerSample = 0;
-                initialSampleIndex = 0;
-            }
-
-            sampleNames = new ArrayList<String>();
-
-            if (getSamples) {
-                for (int k = initialSampleIndex; k < header.length; k += valuesPerSample)
-                    sampleNames.add(header[k]);
-
-                expectedTokensPerLine = sampleNames.size()*valuesPerSample+initialSampleIndex;
-
-            } else {
-                expectedTokensPerLine = 2;
-            }
-
-
-        } catch(IOException e) {
-            throw new IllegalArgumentException("Unable to read from file.", e);
-        }
-        return header;
-    }
-
-    private static String[] readHeader(final LineIterator source, int[] lineCounter) throws IOException {
-
-        String[] header = null;
-        int numLines = 0;
-
-        //find the 1st line that's non-empty and not a comment
-        while(source.hasNext()) {
-            final String line = source.next();
-            numLines++;
-            if ( line.trim().isEmpty() ) {
-                continue;
-            }
-
-            //parse the header
-            header = line.split(delimiterRegex);
-            break;
-        }
-
-        // check that we found the header
-        if ( header == null ) {
-            throw new IllegalArgumentException("No header in " + source);
-        }
-
-        if(lineCounter != null) {
-            lineCounter[0] = numLines;
-        }
-
-        return header;
-    }
-
-    private static Pattern MARKER_PATTERN = Pattern.compile("(.+):([0-9]+)");
-
-    public BeagleFeature decode(String line) {
-        String[] tokens;
-
-        // split the line
-        tokens = line.split(delimiterRegex);
-        if (tokens.length != expectedTokensPerLine)
-            throw new CodecLineParsingException("Incorrect number of fields in Beagle input on line "+line);
-
-        if ( HEADER_IDs.contains(tokens[0]) )
-            return null;
-
-        BeagleFeature bglFeature = new BeagleFeature();
-
-        final GenomeLoc loc = genomeLocParser.parseGenomeLoc(tokens[markerPosition]); //GenomeLocParser.parseGenomeLoc(values.get(0)); - TODO switch to this
-
-        //parse the location: common to all readers
-        bglFeature.setChr(loc.getContig());
-        bglFeature.setStart((int) loc.getStart());
-        bglFeature.setEnd((int) loc.getStop());
-
-        // Parse R2 if needed
-        if (readerType == BeagleReaderType.R2) {
-            bglFeature.setR2value(Double.valueOf(tokens[1]));
-        }
-        else if (readerType == BeagleReaderType.GENOTYPES) {
-            // read phased Genotype pairs
-            HashMap<String, ArrayList<String>> sampleGenotypes = new HashMap<String, ArrayList<String>>();
-
-            for ( int i = 2; i < tokens.length; i+=2 ) {
-                String sampleName = sampleNames.get(i/2-1);
-                if ( ! sampleGenotypes.containsKey(sampleName) ) {
-                    sampleGenotypes.put(sampleName, new ArrayList<String>());
-                }
-
-                sampleGenotypes.get(sampleName).add(tokens[i]);
-                sampleGenotypes.get(sampleName).add(tokens[i+1]);
-            }
-
-            bglFeature.setGenotypes(sampleGenotypes);
-        }
-        else {
-            // read probabilities/likelihood trios and alleles
-            bglFeature.setAlleleA(tokens[1], true);
-            bglFeature.setAlleleB(tokens[2], false);
-            HashMap<String, ArrayList<String>> sampleProbLikelihoods = new HashMap<String, ArrayList<String>>();
-
-            for ( int i = 3; i < tokens.length; i+=3 ) {
-                String sampleName = sampleNames.get(i/3-1);
-                if ( ! sampleProbLikelihoods.containsKey(sampleName) ) {
-                    sampleProbLikelihoods.put(sampleName, new ArrayList<String>());
-                }
-
-                sampleProbLikelihoods.get(sampleName).add(tokens[i]);
-                sampleProbLikelihoods.get(sampleName).add(tokens[i+1]);
-                sampleProbLikelihoods.get(sampleName).add(tokens[i+2]);
-            }
-            bglFeature.setProbLikelihoods(sampleProbLikelihoods);
-        }
-
-        return bglFeature;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/beagle/BeagleFeature.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/beagle/BeagleFeature.java
deleted file mode 100644
index bd9e4ef..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/beagle/BeagleFeature.java
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.codecs.beagle;
-
-import htsjdk.tribble.Feature;
-import htsjdk.variant.variantcontext.Allele;
-
-import java.util.ArrayList;
-import java.util.Map;
-
-public class BeagleFeature implements Feature {
-
-    private String chr;
-    private int start;
-    private int end;
-
-    Map<String, ArrayList<String>> sampleGenotypes;
-    private Double r2Value;
-    Map<String, ArrayList<String>> probLikelihoods;
-
-    Allele AlleleA;
-    Allele AlleleB;
-
-
-    public String getChr() {
-        return chr;
-    }
-
-    public int getStart() {
-        return start;
-    }
-
-    public int getEnd() {
-        return end;
-    }
-
-    public Double getR2value() {
-        return r2Value;
-    }
-
-    public Allele getAlleleA() {
-        return AlleleA;
-    }
-
-    public Allele getAlleleB() {
-        return AlleleB;
-    }
-
-    public Map<String, ArrayList<String>> getProbLikelihoods() {
-        return probLikelihoods;
-    }
-
-    public Map<String, ArrayList<String>> getGenotypes() {
-        return sampleGenotypes;        
-    }
-
-    protected void setChr(String chr) {
-       this.chr = chr;
-    }
-
-    protected void setStart(int start) {
-        this.start = start;
-    }
-
-    protected void setEnd(int end) {
-        this.end = end;
-    }
-
-    protected void setR2value(double r2) {
-        this.r2Value = r2;
-    }
-
-    protected void setAlleleA(String a, boolean isRef) {
-        this.AlleleA = Allele.create(a, isRef);
-    }
-
-    protected void setAlleleB(String a, boolean isRef) {
-        this.AlleleB = Allele.create(a, isRef);
-    }
-
-    protected void setProbLikelihoods(Map<String, ArrayList<String>> p) {
-        this.probLikelihoods = p;
-    }
-
-    protected void setGenotypes(Map<String, ArrayList<String>> p) {
-        this.sampleGenotypes = p;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/hapmap/RawHapMapCodec.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/hapmap/RawHapMapCodec.java
deleted file mode 100644
index ac50853..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/hapmap/RawHapMapCodec.java
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.codecs.hapmap;
-
-import htsjdk.tribble.AsciiFeatureCodec;
-import htsjdk.tribble.FeatureCodecHeader;
-import htsjdk.tribble.annotation.Strand;
-import htsjdk.tribble.readers.LineIterator;
-
-import java.io.IOException;
-import java.util.Arrays;
-
-/**
- * A codec for the file types produced by the HapMap consortium
- *
- * <p>
- *     The format includes eleven standard fields, plus genotypes for each of the samples included
- *     in the file:
- *
- * <pre>
- *     Col1: refSNP rs# identifier at the time of release (NB might merge with another rs# in the future)
- *     Col2: SNP alleles according to dbSNP
- *     Col3: chromosome that SNP maps to
- *     Col4: chromosome position of SNP, in basepairs on reference sequence
- *     Col5: strand of reference sequence that SNP maps to
- *     Col6: version of reference sequence assembly
- *     Col7: HapMap genotype center that produced the genotypes
- *     Col8: LSID for HapMap protocol used for genotyping
- *     Col9: LSID for HapMap assay used for genotyping
- *     Col10: LSID for panel of individuals genotyped
- *     Col11: QC-code, currently 'QC+' for all entries (for future use)
- *     Col12 and on: observed genotypes of samples, one per column, sample identifiers in column headers (Coriell catalog numbers, example: NA10847). Duplicate samples have .dup suffix.
- * </pre>
- * </p>
- *
- * <p>
- *  See also: @See <a href="http://hapmap.ncbi.nlm.nih.gov/downloads/genotypes/">HapMap genotypes download</a>
- * </p>
- *
- * <h2>File format example</h2>
- * From <a href="http://hapmap.ncbi.nlm.nih.gov/downloads/genotypes/latest/forward/non-redundant/genotypes_chr1_ASW_r27_nr.b36_fwd.txt.gz">genotypes_chr1_ASW_r27_nr.b36_fwd.txt.gz</a>:
- * <pre>
- *     rs# alleles chrom pos strand assembly# center protLSID assayLSID panelLSID QCcode NA19625 NA19700 NA19701 NA19702 NA19703 NA19704 NA19705 NA19708 NA19712 NA19711 NA19818 NA19819 NA19828 NA19835 NA19834 NA19836 NA19902 NA19901 NA19900 NA19904 NA19919 NA19908 NA19909 NA19914 NA19915 NA19916 NA19917 NA19918 NA19921 NA20129 NA19713 NA19982 NA19983 NA19714 NA19985 NA20128 NA20126 NA20127 NA20277 NA20276 NA20279 NA20282 NA20281 NA20284 NA20287 NA20288 NA20290 NA20289 NA20291 NA20292 NA2 [...]
- *     rs9629043 C/T chr1 554636 + ncbi_b36 broad urn:LSID:affymetrix.hapmap.org:Protocol:GenomeWideSNP_6.0:3 urn:LSID:broad.hapmap.org:Assay:SNP_A-8575115:3 urn:lsid:dcc.hapmap.org:Panel:US_African-30-trios:3 QC+ CC CC CC CC CC CC CC CC CC CC CC CC NN CC CC CC CT CT CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CT CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC
- *     rs28446478 G/T chr1 576058 + ncbi_b36 sanger urn:LSID:illumina.hapmap.org:Protocol:Human_1M_BeadChip:3 urn:LSID:sanger.hapmap.org:Assay:H1Mrs28446478:3 urn:lsid:dcc.hapmap.org:Panel:US_African-30-trios:3 QC+ GT TT GT TT TT TT TT GT GT TT TT TT TT GT GT GT GT TT GT TT GT GT TT GT GT TT TT TT GT GT TT TT TT GT TT GT TT GT GT GT GT GT TT GT TT TT GT GT TT TT TT TT TT TT GT GT GT GT TT TT TT TT GT TT GT TT TT GT TT TT TT GT TT TT TT GT GT TT GT TT GT TT TT
- *     rs12565286 C/G chr1 711153 + ncbi_b36 broad urn:LSID:affymetrix.hapmap.org:Protocol:GenomeWideSNP_6.0:3 urn:LSID:broad.hapmap.org:Assay:SNP_A-8709646:3 urn:lsid:dcc.hapmap.org:Panel:US_African-30-trios:3 QC+ GG GG GG GG GG GG GG GG CG GG GG GG GG GG GG GG GG GG GG CG GG GG GG GG GG GG GG GG GG GG GG GG GG GG GG GG GG GG GG GG CG GG GG GG GG GG GG GG CG CG GG GG GG GG GG GG GG GG GG CG CG GG GG GG GG GG GG GG GG GG GG CG NN GG GG GG GG GG GG NN GG NN NN
- * </pre>
- *
- * @author Mark DePristo
- * @since 2010
- */
-public class RawHapMapCodec extends AsciiFeatureCodec<RawHapMapFeature> {
-    // the minimum number of features in the HapMap file line
-    private static final int minimumFeatureCount = 11;
-
-    private String headerLine;
-
-    public RawHapMapCodec() {
-        super(RawHapMapFeature.class);
-    }
-
-    /**
-     * decode the hapmap record
-     * @param line the input line to decode
-     * @return a HapMapFeature, with the given fields 
-     */
-    public RawHapMapFeature decode(String line) {
-        String[] array = line.split("\\s+");
-
-        // make sure the split was successful - that we got an appropriate number of fields
-        if (array.length < minimumFeatureCount)
-            throw new IllegalArgumentException("Unable to parse line " + line + ", the length of split features is less than the minimum of " + minimumFeatureCount);
-
-        // create a new feature given the array
-        return new RawHapMapFeature(array[0],
-                array[1].split("/"),
-                array[2],
-                Long.valueOf(array[3]),
-                Strand.toStrand(array[4]),
-                array[5],
-                array[6],
-                array[7],
-                array[8],
-                array[9],
-                array[10],
-                Arrays.copyOfRange(array,11,array.length),
-                headerLine);
-    }
-
-    @Override
-    public Object readActualHeader(final LineIterator lineIterator) {
-        this.headerLine = lineIterator.next();
-        return headerLine;
-    }
-
-    @Override
-    public FeatureCodecHeader readHeader(final LineIterator lineIterator) throws IOException {
-        final String header = (String) readActualHeader(lineIterator);
-        // TODO: This approach may cause issues with files formatted with \r\n-style line-endings.
-        return new FeatureCodecHeader(header, header.length() + 1);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/hapmap/RawHapMapFeature.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/hapmap/RawHapMapFeature.java
deleted file mode 100644
index a9b8788..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/hapmap/RawHapMapFeature.java
+++ /dev/null
@@ -1,196 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.codecs.hapmap;
-
-import htsjdk.tribble.Feature;
-import htsjdk.tribble.annotation.Strand;
-import htsjdk.variant.variantcontext.Allele;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * a feature returned by the HapMap Codec - it represents contig, position, name,
- * alleles, other hapmap information, and genotypes for specified samples
- */
-public class RawHapMapFeature implements Feature {
-
-    public static final String NULL_ALLELE_STRING = "-";
-    public static final String INSERTION = "I";
-    public static final String DELETION = "D";
-
-    // the variables we store internally in the class
-    private final String name;
-    private final String[] alleles;
-    private Map<String, Allele> actualAlleles = null;
-    private final String contig ;
-    private long position;
-    private final Strand strand;
-    private final String assembly;
-    private final String center;
-    private final String protLSID;
-    private final String assayLSID;
-    private final String panelLSID;
-    private final String qccode;
-    private final String[] genotypes;
-
-    // we store the header line, if they'd like to get the samples
-    private final String headerLine;
-
-    /**
-     * create a HapMap Feature, based on all the records available in the hapmap file
-     * @param contig the contig name
-     * @param position the position
-     * @param strand the strand enum
-     * @param assembly what assembly this feature is from
-     * @param center the center that provided this SNP
-     * @param protLSID ??
-     * @param assayLSID ??
-     * @param panelLSID ??
-     * @param qccode ??
-     * @param genotypes a list of strings, representing the genotypes for the list of samples
-     */
-    public RawHapMapFeature(String name,
-                            String[] alleles,
-                            String contig,
-                            Long position,
-                            Strand strand,
-                            String assembly,
-                            String center,
-                            String protLSID,
-                            String assayLSID,
-                            String panelLSID,
-                            String qccode,
-                            String[] genotypes,
-                            String headerLine) {
-        this.name = name;
-        this.alleles = alleles;
-        this.contig = contig;
-        this.position = position;
-        this.strand = strand;
-        this.assembly =  assembly;
-        this.center =  center;
-        this.protLSID = protLSID ;
-        this.assayLSID = assayLSID ;
-        this.panelLSID = panelLSID ;
-        this.qccode = qccode;
-        this.genotypes = genotypes;
-        this.headerLine = headerLine;
-    }
-
-    /**
-     * get the contig value
-     * @return a string representing the contig
-     */
-    public String getChr() {
-        return contig;
-    }
-
-    /**
-     * get the start position, as an integer
-     * @return an int, representing the start position
-     */
-    public int getStart() {
-        return (int)position;
-    }
-
-    /**
-     * get the end position
-     * @return get the end position as an int
-     */
-    public int getEnd() {
-        return (int)position;
-    }
-
-    /**
-     * Getter methods
-     */
-
-    public Strand getStrand() {
-        return strand;
-    }
-
-    public String getAssembly() {
-        return assembly;
-    }
-
-    public String getCenter() {
-        return center;
-    }
-
-    public String getProtLSID() {
-        return protLSID;
-    }
-
-    public String getAssayLSID() {
-        return assayLSID;
-    }
-
-    public String getPanelLSID() {
-        return panelLSID;
-    }
-
-    public String getQCCode() {
-        return qccode;
-    }
-
-    public String getName() {
-        return name;
-    }
-
-    public String[] getAlleles() {
-        return alleles;
-    }
-
-    public String[] getGenotypes() {
-        return genotypes;
-    }
-
-    // This is necessary because HapMap places insertions in the incorrect position
-    public void updatePosition(long position) {
-        this.position = position;
-    }
-
-    public void setActualAlleles(Map<String, Allele> alleleMap) {
-        actualAlleles = new HashMap<String, Allele>(alleleMap);
-    }
-
-    public Map<String, Allele> getActualAlleles() {
-        return actualAlleles;
-    }
-    
-    /**
-     * get a list of the samples from the header (in order)
-     * @return a string array of sample names
-     */
-    public String[] getSampleIDs() {
-		String[] header = headerLine.split("\\s+");
-		String[] sample_ids = new String[header.length-11];
-		for (int i = 11; i < header.length; i++)
-			sample_ids[i-11] = header[i];
-		return sample_ids;
-	}
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/refseq/RefSeqCodec.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/refseq/RefSeqCodec.java
deleted file mode 100644
index 9d60076..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/refseq/RefSeqCodec.java
+++ /dev/null
@@ -1,171 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.codecs.refseq;
-
-import htsjdk.tribble.AsciiFeatureCodec;
-import htsjdk.tribble.Feature;
-import htsjdk.tribble.TribbleException;
-import htsjdk.tribble.readers.LineIterator;
-import org.broadinstitute.gatk.engine.refdata.ReferenceDependentFeatureCodec;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import java.util.ArrayList;
-
-/**
- * Allows for reading in RefSeq information
- *
- * <p>
- * Parses a sorted UCSC RefSeq file (see below) into relevant features: the gene name, the unique gene name (if multiple transcrips get separate entries), exons, gene start/stop, coding start/stop,
- * strandedness of transcription. 
- * </p>
- *
- * <p>
- * Instructions for generating a RefSeq file for use with the RefSeq codec can be found on the documentation guide here
- * <a href="http://www.broadinstitute.org/gatk/guide/article?id=1329">http://www.broadinstitute.org/gatk/guide/article?id=1329</a>
- * </p>
- * <h2> Usage </h2>
- * The RefSeq Rod can be bound as any other rod, and is specified by REFSEQ, for example
- * <pre>
- * -refSeqBinding:REFSEQ /path/to/refSeq.txt
- * </pre>
- *
- * You will need to consult individual walkers for the binding name ("refSeqBinding", above)
- *
- * <h2>File format example</h2>
- * If you want to define your own file for use, the format is (tab delimited):
- * bin, name, chrom, strand, transcription start, transcription end, coding start, coding end, num exons, exon starts, exon ends, id, alt. name, coding start status (complete/incomplete), coding end status (complete,incomplete)
- * and exon frames, for example:
- * <pre>
- * 76 NM_001011874 1 - 3204562 3661579 3206102 3661429 3 3204562,3411782,3660632, 3207049,3411982,3661579, 0 Xkr4 cmpl cmpl 1,2,0,
- * </pre>
- * for more information see <a href="http://skip.ucsc.edu/cgi-bin/hgTables?hgsid=5651&hgta_doSchemaDb=mm8&hgta_doSchemaTable=refGene">here</a>
- * <p>
- *     
- * </p>
- *
- * @author Mark DePristo
- * @since 2010
- */
-public class RefSeqCodec extends AsciiFeatureCodec<RefSeqFeature> implements ReferenceDependentFeatureCodec {
-
-    /**
-     * The parser to use when resolving genome-wide locations.
-     */
-    private GenomeLocParser genomeLocParser;
-    private boolean zero_coding_length_user_warned = false;
-
-    public RefSeqCodec() {
-        super(RefSeqFeature.class);
-    }
-
-    /**
-     * Set the parser to use when resolving genetic data.
-     * @param genomeLocParser The supplied parser.
-     */
-    @Override
-    public void setGenomeLocParser(GenomeLocParser genomeLocParser) {
-        this.genomeLocParser =  genomeLocParser;
-    }
-
-    @Override
-    public Feature decodeLoc(final LineIterator lineIterator) {
-        final String line = lineIterator.next();
-        if (line.startsWith("#")) return null;
-        String fields[] = line.split("\t");
-        if (fields.length < 3) throw new TribbleException("RefSeq (decodeLoc) : Unable to parse line -> " + line + ", we expected at least 3 columns, we saw " + fields.length);
-        String contig_name = fields[2];
-        try {
-            return new RefSeqFeature(genomeLocParser.createGenomeLoc(contig_name, Integer.parseInt(fields[4])+1, Integer.parseInt(fields[5])));
-        } catch ( UserException.MalformedGenomeLoc e ) {
-            Utils.warnUser("RefSeq file is potentially incorrect, as some transcripts or exons have a negative length ("+fields[2]+")");
-            return null;
-        } catch ( NumberFormatException e ) {
-            throw new UserException.MalformedFile("Could not parse location from line: " + line);
-        }
-    }
-
-    /** Fills this object from a text line in RefSeq (UCSC) text dump file */
-    @Override
-    public RefSeqFeature decode(String line) {
-        if (line.startsWith("#")) return null;
-        String fields[] = line.split("\t");
-
-        // we reference postion 15 in the split array below, make sure we have at least that many columns
-        if (fields.length < 16) throw new TribbleException("RefSeq (decode) : Unable to parse line -> " + line + ", we expected at least 16 columns, we saw " + fields.length);
-        String contig_name = fields[2];
-        RefSeqFeature feature = new RefSeqFeature(genomeLocParser.createGenomeLoc(contig_name, Integer.parseInt(fields[4])+1, Integer.parseInt(fields[5])));
-
-        feature.setTranscript_id(fields[1]);
-        if ( fields[3].length()==1 && fields[3].charAt(0)=='+') feature.setStrand(1);
-        else if ( fields[3].length()==1 && fields[3].charAt(0)=='-') feature.setStrand(-1);
-        else throw new UserException.MalformedFile("Expected strand symbol (+/-), found: "+fields[3] + " for line=" + line);
-
-        int coding_start = Integer.parseInt(fields[6])+1;
-        int coding_stop = Integer.parseInt(fields[7]);
-
-        if ( coding_start > coding_stop ) {
-            if ( ! zero_coding_length_user_warned ) {
-                Utils.warnUser("RefSeq file contains transcripts with zero coding length. "+
-                        "Such transcripts will be ignored (this warning is printed only once)");
-                zero_coding_length_user_warned = true;
-            }
-            return null;
-        }
-
-        feature.setTranscript_interval(genomeLocParser.createGenomeLoc(contig_name, Integer.parseInt(fields[4])+1, Integer.parseInt(fields[5])));
-        feature.setTranscript_coding_interval(genomeLocParser.createGenomeLoc(contig_name, coding_start, coding_stop));
-        feature.setGene_name(fields[12]);
-        String[] exon_starts = fields[9].split(",");
-        String[] exon_stops = fields[10].split(",");
-        String[] eframes = fields[15].split(",");
-
-        if ( exon_starts.length != exon_stops.length )
-            throw new UserException.MalformedFile("Data format error: numbers of exon start and stop positions differ for line=" + line);
-        if ( exon_starts.length != eframes.length )
-            throw new UserException.MalformedFile("Data format error: numbers of exons and exon frameshifts differ for line=" + line);
-
-        ArrayList<GenomeLoc> exons = new ArrayList<GenomeLoc>(exon_starts.length);
-        ArrayList<Integer> exon_frames = new ArrayList<Integer>(eframes.length);
-
-        for ( int i = 0 ; i < exon_starts.length  ; i++ ) {
-            exons.add(genomeLocParser.createGenomeLoc(contig_name, Integer.parseInt(exon_starts[i])+1, Integer.parseInt(exon_stops[i]) ) );
-            exon_frames.add(Integer.decode(eframes[i]));
-        }
-
-        feature.setExons(exons);
-        feature.setExon_frames(exon_frames);
-        return feature;
-    }
-
-    @Override
-    public Object readActualHeader(LineIterator lineIterator) {
-        // No header for this format
-        return null;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/refseq/RefSeqFeature.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/refseq/RefSeqFeature.java
deleted file mode 100644
index 226a353..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/refseq/RefSeqFeature.java
+++ /dev/null
@@ -1,323 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.codecs.refseq;
-
-import htsjdk.tribble.Feature;
-import org.broadinstitute.gatk.engine.refdata.utils.GATKFeature;
-import org.broadinstitute.gatk.engine.refdata.utils.RODRecordList;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.util.*;
-
-/**
- * the ref seq feature
- */
-public class RefSeqFeature implements Transcript, Feature {
-
-    private String transcript_id;
-    private int strand;
-    private GenomeLoc transcript_interval;
-    private GenomeLoc transcript_coding_interval;
-    private List<GenomeLoc> exons;
-    private String gene_name;
-    private List<Integer> exon_frames;
-    private String name;
-
-    public RefSeqFeature(GenomeLoc genomeLoc) {
-        this.transcript_interval = genomeLoc;
-    }
-
-    /** Returns id of the transcript (RefSeq NM_* id) */
-    public String getTranscriptId() { return transcript_id; }
-
-    /** Returns coding strand of the transcript, 1 or -1 for positive or negative strand, respectively */
-    public int getStrand() { return strand; }
-
-    /** Returns transcript's full genomic interval (includes all exons with UTRs) */
-    public GenomeLoc getLocation() {
-        return transcript_interval;
-    }
-
-    /** Returns genomic interval of the coding sequence (does not include UTRs, but still includes introns, since it's a single interval on the DNA) */
-    public GenomeLoc getCodingLocation() { return transcript_coding_interval; }
-
-    /** Name of the gene this transcript corresponds to (NOT gene id such as Entrez etc) */
-    public String getGeneName() { return gene_name; }
-
-    /** Number of exons in this transcript */
-    public int getNumExons() { return exons.size(); }
-
-    /** Genomic location of the n-th exon; throws an exception if n is out of bounds */
-    public GenomeLoc getExonLocation(int n) {
-        if ( n >= exons.size() || n < 0 ) throw new ReviewedGATKException("Index out-of-bounds. Transcript has " + exons.size() +" exons; requested: "+n);
-        return exons.get(n);
-    }
-
-    /** Returns the list of all exons in this transcript, as genomic intervals */
-    public List<GenomeLoc> getExons() { return exons; }
-
-    /** Returns all exons falling ::entirely:: inside an interval **/
-    public List<GenomeLoc> getExonsInInterval( GenomeLoc interval ) {
-        List<GenomeLoc> relevantExons = new ArrayList<GenomeLoc>(exons.size());
-        for ( GenomeLoc exon : getExons() ) {
-            if ( interval.containsP(exon) ) {
-                relevantExons.add(exon);
-            }
-        }
-
-        return relevantExons;
-    }
-
-    /** convenience method; returns the numbers of the exons in the interval **/
-    public List<Integer> getExonNumbersInInterval( GenomeLoc interval ) {
-        List<Integer> numbers = new ArrayList<Integer>();
-        int iNo = 0;
-        for ( GenomeLoc exon : getExons() ) {
-            if ( interval.containsP(exon) ) {
-                numbers.add(iNo);
-            }
-            iNo++;
-        }
-
-        return numbers;
-    }
-
-    public String getTranscriptUniqueGeneName() {
-        return String.format("%s(%s)",getGeneName(),getTranscriptId());
-    }
-
-    public String getOverlapString(GenomeLoc position) {
-        boolean is_exon = false;
-        StringBuilder overlapString = new StringBuilder();
-        int exonNo = 1;
-
-        for ( GenomeLoc exon : exons ) {
-            if ( exon.containsP(position) ) {
-                overlapString.append(String.format("exon_%d",exonNo));
-                is_exon = true;
-                break;
-            }
-            exonNo ++;
-        }
-
-        if ( ! is_exon ) {
-            if ( overlapsCodingP(position) ) {
-                overlapString.append("Intron");
-            } else {
-                overlapString.append("UTR");
-            }
-        }
-
-        return overlapString.toString();
-    }
-
-    ArrayList<GenomeLoc> exonInRefOrderCache = null;
-
-    public Integer getSortedOverlapInteger(GenomeLoc position) {
-        int exonNo = -1;
-        ArrayList<GenomeLoc> exonsInReferenceOrder = exonInRefOrderCache != null ? exonInRefOrderCache : new ArrayList<GenomeLoc>(exons);
-        if ( exonInRefOrderCache == null ) {
-            Collections.sort(exonsInReferenceOrder);
-        }
-        exonInRefOrderCache = exonsInReferenceOrder;
-        for ( GenomeLoc exon : exonsInReferenceOrder ) {
-            if ( exon.overlapsP(position) ) {
-                return ++exonNo;
-            }
-            ++exonNo;
-        }
-
-        return -1;
-    }
-
-    public GenomeLoc getSortedExonLoc(int offset) {
-        ArrayList<GenomeLoc> exonsInReferenceOrder = exonInRefOrderCache != null ? exonInRefOrderCache : new ArrayList<GenomeLoc>(exons);
-        if ( exonInRefOrderCache == null ) {
-            Collections.sort(exonsInReferenceOrder);
-        }
-        exonInRefOrderCache = exonsInReferenceOrder;
-        return exonsInReferenceOrder.get(offset);
-    }
-
-    /** Returns true if the specified interval 'that' overlaps with the full genomic interval of this transcript */
-    public boolean overlapsP (GenomeLoc that) {
-        return getLocation().overlapsP(that);
-    }
-
-    /** Returns true if the specified interval 'that' overlaps with the coding genomic interval of this transcript.
-     * NOTE: since "coding interval" is still a single genomic interval, it will not contain UTRs of the outermost exons,
-     * but it will still contain introns and/or exons internal to this genomic locus that are not spliced into this transcript.
-     * @see #overlapsExonP
-     */
-    public boolean overlapsCodingP (GenomeLoc that) {
-        return transcript_coding_interval.overlapsP(that);
-    }
-
-    /** Returns true if the specified interval 'that' overlaps with any of the exons actually spliced into this transcript */
-    public boolean overlapsExonP (GenomeLoc that) {
-        for ( GenomeLoc e : exons ) {
-            if ( e.overlapsP(that) ) return true;
-        }
-        return false;
-    }
-    public String toString() {
-            StringBuilder b = new StringBuilder("000\t"); // first field is unused but required in th ecurrent format; just set to something
-            b.append(transcript_id);   // #1
-            b.append('\t');
-            b.append(getLocation().getContig()); // #2
-            b.append('\t');
-            b.append( (strand==1?'+':'-') ); // #3
-            b.append('\t');
-            b.append( (getLocation().getStart() - 1) ); // #4
-            b.append('\t');
-            b.append( getLocation().getStop());  // #5
-            b.append('\t');
-            b.append( (transcript_coding_interval.getStart() - 1) ); // #6
-            b.append('\t');
-            b.append( transcript_coding_interval.getStop());  // #7
-            b.append('\t');
-            b.append(exons.size()); // #8
-            b.append('\t');
-            for ( GenomeLoc loc : exons ) { b.append( (loc.getStart()-1) ); b.append(','); } // #9
-            b.append('\t');
-            for ( GenomeLoc loc : exons ) { b.append( loc.getStop() ); b.append(','); } // #10
-            b.append("\t0\t"); // # 11 - unused?
-            b.append(gene_name); // # 12
-            b.append("\tcmpl\tcmpl\t"); // #13, #14 - unused?
-            for ( Integer f : exon_frames ) { b.append( f ); b.append(','); } // #15
-
-
-            return b.toString();
-        }
-
-        /** Convenience method, which is packaged here for a lack of better place; it is indeed closely related to
-         * rodRefSeq though: takes list of rods (transcripts) overlapping with a given position and determines whether
-         * this position is fully whithin an exon of <i>any</i> of those transcripts. Passing null is safe (will return false).
-         * NOTE: position can be still within a UTR, see #isCoding
-         * @return true if it's an exon
-         */
-        public static boolean isExon(RODRecordList l) {
-
-            if ( l == null ) return false;
-
-            GenomeLoc loc = l.getLocation();
-
-            for ( GATKFeature t : l ) {
-                if ( ((RefSeqFeature)t.getUnderlyingObject()).overlapsExonP(loc) ) return true;
-            }
-            return false;
-
-        }
-
-        /** Convenience method, which is packaged here for a lack of better place; it is indeed closely related to
-         * rodRefSeq though: takes list of rods (transcripts) overlapping with a given position and determines whether
-         * this position is fully whithin a coding region of <i>any</i> of those transcripts.
-         * Passing null is safe (will return false).
-         * NOTE: "coding" interval is defined as a single genomic interval, so it
-         * does not include the UTRs of the outermost exons, but it includes introns between exons spliced into a
-         * transcript, or internal exons that are not spliced into a given transcript. To check that a position is
-         * indeed within an exon but not in UTR, use #isCodingExon().
-         * @return
-         */
-        public static boolean isCoding(RODRecordList l) {
-
-            if ( l == null ) return false;
-
-            GenomeLoc loc = l.getLocation();
-
-            for ( GATKFeature t : l ) {
-                if ( ((RefSeqFeature)t.getUnderlyingObject()).overlapsCodingP(loc) ) return true;
-            }
-            return false;
-
-        }
-
-        /** Convenience method, which is packaged here for a lack of better place; it is indeed closely related to
-         * rodRefSeq though: takes list of rods (transcripts) overlapping with a given position and determines whether
-         * this position is fully whithin a coding exon portion (i.e. true coding sequence) of <i>any</i> of those transcripts.
-         * Passing null is safe (will return false). In other words, this method returns true if the list contains a transcript,
-         * for which the current position is within an exon <i>and</i> within a coding interval simultaneously.
-         * @return
-         */
-        public static boolean isCodingExon(RODRecordList l) {
-
-            if ( l == null ) return false;
-
-            GenomeLoc loc = l.getLocation();
-
-            for ( GATKFeature t : l ) {
-                if ( ((RefSeqFeature)t.getUnderlyingObject()).overlapsCodingP(loc) && ((RefSeqFeature)t.getUnderlyingObject()).overlapsExonP(loc) ) return true;
-            }
-            return false;
-
-        }
-
-
-    public void setTranscript_id(String transcript_id) {
-        this.transcript_id = transcript_id;
-    }
-
-    public void setStrand(int strand) {
-        this.strand = strand;
-    }
-
-    public void setTranscript_interval(GenomeLoc transcript_interval) {
-        this.transcript_interval = transcript_interval;
-    }
-
-    public void setTranscript_coding_interval(GenomeLoc transcript_coding_interval) {
-        this.transcript_coding_interval = transcript_coding_interval;
-    }
-
-    public void setExons(List<GenomeLoc> exons) {
-        this.exons = exons;
-    }
-
-    public void setGene_name(String gene_name) {
-        this.gene_name = gene_name;
-    }
-
-    public void setExon_frames(List<Integer> exon_frames) {
-        this.exon_frames = exon_frames;
-    }
-
-    public void setName(String name) {
-        this.name = name;
-    }
-
-    public String getChr() {
-        return transcript_interval.getContig();
-    }
-
-    public int getStart() {
-        return transcript_interval.getStart();
-    }
-
-    public int getEnd() {
-        return transcript_interval.getStop();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/refseq/Transcript.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/refseq/Transcript.java
deleted file mode 100644
index 1671c79..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/refseq/Transcript.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.codecs.refseq;
-
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.HasGenomeLocation;
-
-import java.util.List;
-
-/**
- * Created by IntelliJ IDEA.
- * User: asivache
- * Date: Sep 22, 2009
- * Time: 5:22:30 PM
- * To change this template use File | Settings | File Templates.
- */
-public interface Transcript extends HasGenomeLocation {
-
-    /** Returns id of the transcript (RefSeq NM_* id) */
-    public String getTranscriptId();
-    /** Returns coding strand of the transcript, 1 or -1 for positive or negative strand, respectively */
-    public int getStrand();
-    /** Returns transcript's full genomic interval (includes all exons with UTRs) */
-    public GenomeLoc getLocation();
-    /** Returns genomic interval of the coding sequence (does not include
-     * UTRs, but still includes introns, since it's a single interval on the DNA)
-     */
-    public GenomeLoc getCodingLocation();
-    /** Name of the gene this transcript corresponds to (typically NOT gene id such as Entrez etc,
-     * but the implementation can decide otherwise)
-     */
-    public String getGeneName();
-    /** Number of exons in this transcript */
-    public int getNumExons();
-    /** Genomic location of the n-th exon; expected to throw an exception (runtime) if n is out of bounds */
-    public GenomeLoc getExonLocation(int n);
-
-    /** Returns the list of all exons in this transcript, as genomic intervals */
-    public List<GenomeLoc> getExons();
-
-    /** Returns true if the specified interval 'that' overlaps with the full genomic interval of this transcript */
-    public boolean overlapsP (GenomeLoc that);
-
-    /** Returns true if the specified interval 'that' overlaps with the coding genomic interval of this transcript.
-      * NOTE: since "coding interval" is still a single genomic interval, it will not contain UTRs of the outermost exons,
-      * but it will still contain introns and/or exons internal to this genomic locus that are not spliced into this transcript.
-      * @see #overlapsExonP
-      */
-    public boolean overlapsCodingP (GenomeLoc that);
-
-    /** Returns true if the specified interval 'that' overlaps with any of the exons actually spliced into this transcript */
-    public boolean overlapsExonP (GenomeLoc that);
-
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/sampileup/SAMPileupCodec.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/sampileup/SAMPileupCodec.java
deleted file mode 100644
index 8c43854..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/sampileup/SAMPileupCodec.java
+++ /dev/null
@@ -1,354 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.codecs.sampileup;
-
-import htsjdk.tribble.AsciiFeatureCodec;
-import htsjdk.tribble.exception.CodecLineParsingException;
-import htsjdk.tribble.readers.LineIterator;
-import htsjdk.tribble.util.ParsingUtils;
-
-import java.util.ArrayList;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import static org.broadinstitute.gatk.utils.codecs.sampileup.SAMPileupFeature.VariantType;
-
-/**
- * Decoder for SAM pileup data.
- *
- * <p>
- *     From the <a href="http://samtools.sourceforge.net/">SAMTools project documentation</a>:
- * </p>
- * <p>The Pileup format was first used by Tony Cox and Zemin Ning at
- *     the Sanger Institute. It describes the base-pair information at each chromosomal position. This format
- *     facilitates SNP/indel calling and brief alignment viewing by eye. Note that the pileup program has been replaced
- *     in Samtools by mpileup, which produces a slightly different output format by default.
- * </p>
-
- * <h3>Format</h3>
- * <p>There are two versions of the original pileup format: the current 6-column format produced by Samtools, and the old
- * 10/13-column "consensus" format which could be obtained by using the -c argument, now deprecated. </p>
- * <h4>Simple pileup: 6-column format</h4>
- * <p>
- *     Each line consists of chromosome, 1-based coordinate, reference base, the
- *     number of reads covering the site, read bases and base qualities. At the
- *     read base column, a dot stands for a match to the reference base on the
- *     forward strand, a comma for a match on the reverse strand, `ACGTN' for a mismatch
- *     on the forward strand and `acgtn' for a mismatch on the reverse strand.
- *     A pattern `\+[0-9]+[ACGTNacgtn]+' indicates there is an insertion between
- *     this reference position and the next reference position. The length of the
- *     insertion is given by the integer in the pattern, followed by the inserted sequence.
- * </p>
- * <pre>
- *     seq1 272 T 24  ,.$.....,,.,.,...,,,.,..^+. <<<+;<<<<<<<<<<<=<;<;7<&
- *     seq1 273 T 23  ,.....,,.,.,...,,,.,..A <<<;<<<<<<<<<3<=<<<;<<+
- *     seq1 274 T 23  ,.$....,,.,.,...,,,.,...    7<7;<;<<<<<<<<<=<;<;<<6
- *     seq1 275 A 23  ,$....,,.,.,...,,,.,...^l.  <+;9*<<<<<<<<<=<<:;<<<<
- *     seq1 276 G 22  ...T,,.,.,...,,,.,....  33;+<<7=7<<7<&<<1;<<6<
- *     seq1 277 T 22  ....,,.,.,.C.,,,.,..G.  +7<;<<<<<<<&<=<<:;<<&<
- *     seq1 278 G 23  ....,,.,.,...,,,.,....^k.   %38*<<;<7<<7<=<<<;<<<<<
- *     seq1 279 C 23  A..T,,.,.,...,,,.,..... ;75&<<<<<<<<<=<<<9<<:<<
- * </pre>
- * <p>
- *     See the <a href="http://samtools.sourceforge.net/pileup.shtml">Pileup format documentation</a> for more details.
- * </p>
- *
- * <h4>Consensus pileup: 10/13-column format</h4>
- * <p>The "consensus" or extended pileup consists of the following:
- *  <ul>
- *      <li>original 6 columns as described above</li>
- *      <li>4 extra columns representing consensus values (consensus base, consensus quality, variant quality and maximum mapping quality of the
- * reads covering the sites) for all sites, inserted before the bases and quality strings</li>
- *      <li>3 extra columns indicating counts of reads supporting indels (just for indel sites)</li>
- *  </ul>
- * </p>
- * <h4>Example of consensus pileup for SNP or non-variant sites</h4>
- * <pre>
- *     seq1  60  T  T  66  0  99  13  ...........^~.^~.   9<<55<;<<<<<<
- *     seq1  61  G  G  72  0  99  15  .............^~.^y. (;975&;<<<<<<<<
- *     seq1  62  T  T  72  0  99  15  .$..............    <;;,55;<<<<<<<<
- *     seq1  63  G  G  72  0  99  15  .$.............^~.  4;2;<7:+<<<<<<<
- *     seq1  64  G  G  69  0  99  14  ..............  9+5<;;;<<<<<<<
- *     seq1  65  A  A  69  0  99  14  .$............. <5-2<;;<<<<<<;
- *     seq1  66  C  C  66  0  99  13  .............   &*<;;<<<<<<8<
- *     seq1  67  C  C  69  0  99  14  .............^~.    ,75<.4<<<<<-<<
- *     seq1  68  C  C  69  0  99  14  ..............  576<;7<<<<<8<< *
- * </pre>
- *
- * <h4>Example of consensus pileup for indels</h4>
- * <pre>
- *     Escherichia_coli_K12	3995037	*	*\/*	430	0	37	144	*	+A	143	1	0
- *     Escherichia_coli_K12	3995279	*	*\/*	202	0	36	68	*	+A	67	1	0
- *     Escherichia_coli_K12	3995281	*	*\/*	239	0	36	67	*	-CG	66	1	0
- * </pre>
- * <p>
- *     See <a href="http://samtools.sourceforge.net/cns0.shtml/">Consensus pileup format (deprecated)</a> for more details.
- * </p>
- *
- * <h3>Caveat</h3>
- * <p>Handling of indels is questionable at the moment. Proceed with care.</p>
- *
- *
- * @author Matt Hanna, Geraldine VdAuwera
- * @since 2014
- */
-public class SAMPileupCodec extends AsciiFeatureCodec<SAMPileupFeature> {
-    // number of tokens expected (6 or 10 are valid, anything else is wrong)
-    private static final int basicTokenCount = 6;
-    private static final int consensusSNPTokenCount = 10;
-    private static final int consensusIndelTokenCount = 13;
-    private static final char fldDelim = '\t';
-    // allocate once and don't ever bother creating them again:
-    private static final String baseA = "A";
-    private static final String baseC = "C";
-    private static final String baseG = "G";
-    private static final String baseT = "T";
-    private static final String emptyStr = ""; // we will use this for "reference" allele in insertions
-
-    public SAMPileupCodec() {
-        super(SAMPileupFeature.class);
-    }
-
-    public SAMPileupFeature decode(String line) {
-        //+1 because we want to know if we have more than the max
-        String[] tokens = new String[consensusIndelTokenCount+1];
-
-        // split the line
-        final int count = ParsingUtils.split(line,tokens,fldDelim);
-
-        SAMPileupFeature feature = new SAMPileupFeature();
-
-        /**
-         * Tokens 0, 1, 2 are the same for both formats so they will be interpreted without differentiation.
-         * The 10/13-format has 4 tokens inserted after token 2 compared to the 6-format, plus 3 more tokens added at
-         * the end for indels. We are currently not making any use of the extra indel tokens.
-         *
-         * Any token count other than basicTokenCount, consensusSNPTokenCount or consensusIndelTokenCount is wrong.
-         */
-        final String observedString, bases, quals;
-
-        feature.setChr(tokens[0]);
-        feature.setStart(Integer.parseInt(tokens[1]));
-
-        if(tokens[2].length() != 1)  {
-            throw new CodecLineParsingException("The SAM pileup line had unexpected base " + tokens[2] + " on line = " + line);
-        }
-        feature.setRef(tokens[2].charAt(0));
-
-        switch (count) {
-            case basicTokenCount:
-                bases = tokens[4];
-                quals = tokens[5];
-                // parsing is pretty straightforward for 6-col format
-                if ( feature.getRef() == '*' ) {   // this indicates an indel -- but it shouldn't occur with vanilla 6-col format
-                    throw new CodecLineParsingException("Found an indel on line = " + line + " but it shouldn't happen in simple pileup format");
-                } else {
-                    parseBasesAndQuals(feature, bases, quals);
-                    feature.setRefBases(tokens[2].toUpperCase());
-                    feature.setEnd(feature.getStart());
-                }
-                break;
-            case consensusSNPTokenCount: // pileup called a SNP or a reference base
-                observedString = tokens[3].toUpperCase();
-                feature.setFWDAlleles(new ArrayList<String>(2));
-                feature.setConsensusConfidence(Double.parseDouble(tokens[4]));
-                feature.setVariantConfidence(Double.parseDouble(tokens[5]));
-                bases = tokens[8];
-                quals = tokens[9];
-                // confirm that we have a non-variant, not a mis-parsed indel
-                if ( feature.getRef() == '*' ) {
-                    throw new CodecLineParsingException("Line parsing of " + line + " says we have a SNP or non-variant but the ref base is '*', which indicates an indel");
-                }
-                // Parse the SNP or non-variant
-                parseBasesAndQuals(feature, bases, quals);
-                if ( observedString.length() != 1 ) {
-                    throw new CodecLineParsingException( "Line parsing of " + line + " says we have a SNP or non-variant but the genotype token is not a single letter: " + observedString);
-                }
-                feature.setRefBases(tokens[2].toUpperCase());
-                feature.setEnd(feature.getStart());
-
-                char ch = observedString.charAt(0);
-
-                switch ( ch ) {  // record alleles (decompose ambiguous base codes)
-                    case 'A': feature.getFWDAlleles().add(baseA); feature.getFWDAlleles().add(baseA); break;
-                    case 'C': feature.getFWDAlleles().add(baseC); feature.getFWDAlleles().add(baseC); break;
-                    case 'G': feature.getFWDAlleles().add(baseG); feature.getFWDAlleles().add(baseG); break;
-                    case 'T': feature.getFWDAlleles().add(baseT); feature.getFWDAlleles().add(baseT); break;
-                    case 'M': feature.getFWDAlleles().add(baseA); feature.getFWDAlleles().add(baseC); break;
-                    case 'R': feature.getFWDAlleles().add(baseA); feature.getFWDAlleles().add(baseG); break;
-                    case 'W': feature.getFWDAlleles().add(baseA); feature.getFWDAlleles().add(baseT); break;
-                    case 'S': feature.getFWDAlleles().add(baseC); feature.getFWDAlleles().add(baseG); break;
-                    case 'Y': feature.getFWDAlleles().add(baseC); feature.getFWDAlleles().add(baseT); break;
-                    case 'K': feature.getFWDAlleles().add(baseG); feature.getFWDAlleles().add(baseT); break;
-                }
-                if ( feature.getFWDAlleles().get(0).charAt(0) == feature.getRef() && feature.getFWDAlleles().get(1).charAt(0) == feature.getRef() ) feature.setVariantType(VariantType.NONE);
-                else {
-                    // 	we know that at least one allele is non-ref;
-                    // if one is ref and the other is non-ref, or if both are non ref but they are the same (i.e.
-                    // homozygous non-ref), we still have 2 allelic variants at the site (e.g. one ref and one nonref)
-                    feature.setVariantType(VariantType.SNP);
-                    if ( feature.getFWDAlleles().get(0).charAt(0) == feature.getRef() ||
-                            feature.getFWDAlleles().get(1).charAt(0) == feature.getRef() ||
-                            feature.getFWDAlleles().get(0).equals(feature.getFWDAlleles().get(1))
-                            ) feature.setNumNonRef(1);
-                    else feature.setNumNonRef(2); // if both observations differ from ref and they are not equal to one another, then we get multiallelic site...
-                }
-                break;
-            case consensusIndelTokenCount:
-                observedString = tokens[3].toUpperCase();
-                feature.setFWDAlleles(new ArrayList<String>(2));
-                feature.setConsensusConfidence(Double.parseDouble(tokens[4]));
-                feature.setVariantConfidence(Double.parseDouble(tokens[5]));
-                // confirm that we have an indel, not a mis-parsed SNP or non-variant
-                if ( feature.getRef() != '*' ) {
-                    throw new CodecLineParsingException("Line parsing of " + line + " says we have an indel but the ref base is not '*'");
-                }
-                // Parse the indel
-                parseIndels(observedString,feature) ;
-                if ( feature.isDeletion() ) feature.setEnd(feature.getStart()+feature.length()-1);
-                else feature.setEnd(feature.getStart()); // if it's not a deletion and we are biallelic, this has got to be an insertion; otherwise the state is inconsistent!!!!
-                break;
-            default:
-                throw new CodecLineParsingException("The SAM pileup line didn't have the expected number of tokens " +
-                    "(expected = " + basicTokenCount + " (basic pileup), " + consensusSNPTokenCount +
-                    " (consensus pileup for a SNP or non-variant site) or " + consensusIndelTokenCount +
-                    " (consensus pileup for an indel); saw = " + count + " on line = " + line + ")");
-        }
-        return feature;
-    }
-
-    @Override
-    public Object readActualHeader(LineIterator lineIterator) {
-        // No header for this format
-        return null;
-    }
-
-    private void parseIndels(String genotype,SAMPileupFeature feature) {
-        String [] obs = genotype.split("/"); // get observations, now need to tinker with them a bit
-
-        // if reference allele is among the observed alleles, we will need to take special care of it since we do not have direct access to the reference;
-        // if we have an insertion, the "reference" allele is going to be empty; if it it is a deletion, we will deduce the "reference allele" bases
-        // from what we have recorded for the deletion allele (e.g. "-CAC")
-        boolean hasRefAllele = false;
-
-        for ( int i = 0 ; i < obs.length ; i++ ) {
-            if ( obs[i].length() == 1 && obs[i].charAt(0) == '*'  ) {
-                hasRefAllele = true;
-                feature.getFWDAlleles().add(emptyStr);
-                continue;
-            }
-
-            String varBases = obs[i].toUpperCase();
-
-            switch ( obs[i].charAt(0) )  {
-                case '+':
-                    if (!feature.isReference() && !feature.isInsertion()) feature.setVariantType(VariantType.INDEL);
-                    else feature.setVariantType(VariantType.INSERTION);
-                    feature.setRefBases(emptyStr);
-                    break;
-                case '-' :
-                    if (!feature.isReference() && !feature.isDeletion()) feature.setVariantType(VariantType.INDEL);
-                    else feature.setVariantType(VariantType.DELETION);
-                    feature.setRefBases(varBases); // remember what was deleted, this will be saved as "reference allele"
-                    break;
-                default: throw new CodecLineParsingException("Can not interpret observed indel allele record: "+genotype);
-            }
-            feature.getFWDAlleles().add(varBases);
-            feature.setLength(obs[i].length()-1); // inconsistent for non-biallelic indels!!
-        }
-        if ( hasRefAllele ) {
-            // we got at least one ref. allele (out of two recorded)
-            if (feature.isReference()) { // both top theories are actually ref allele;
-                feature.setNumNonRef(0); // no observations of non-reference allele at all
-                feature.setRefBases(emptyStr);
-            } else {
-                feature.setNumNonRef(1); // hasRefAllele = true, so one allele was definitely ref, hence there is only one left
-            }
-        } else {
-            // we observe two non-ref alleles; they better be the same variant, otherwise the site is not bi-allelic and at the moment we
-            // fail to set data in a consistent way.
-            if ( feature.getFWDAlleles().get(0).equals(feature.getFWDAlleles().get(1))) feature.setNumNonRef(1);
-            else feature.setNumNonRef(2);
-        }
-        // DONE with indels
-
-    }
-
-    private void parseBasesAndQuals(SAMPileupFeature feature, final String bases, final String quals)
-    {
-        //System.out.printf("%s%n%s%n", bases, quals);
-
-        // needs to convert the base string with its . and , to the ref base
-        StringBuilder baseBuilder = new StringBuilder();
-        StringBuilder qualBuilder = new StringBuilder();
-        boolean done = false;
-        for ( int i = 0, j = 0; i < bases.length() && ! done; i++ ) {
-            //System.out.printf("%d %d%n", i, j);
-            char c = (char)bases.charAt(i);
-
-            switch ( c ) {
-                case '.':   // matches reference
-                case ',':   // matches reference
-                    baseBuilder.append(feature.getRef());
-                    qualBuilder.append(quals.charAt(j++));
-                    break;
-                case '$':   // end of read
-                    break;
-                case '*':   // end of indel?
-                    j++;
-                    break;
-                case '^':   // mapping quality
-                    i++;
-                    break;
-                case '+':   // start of indel
-                case '-':   // start of indel
-                    final Pattern regex = Pattern.compile("([0-9]+).*");             // matches case 1
-                    final String rest = bases.substring(i+1);
-                    //System.out.printf("sub is %s%n", rest);
-                    Matcher match = regex.matcher(rest);
-                    if ( ! match.matches() ) {
-                        if ( feature.getRef() != '*' )
-                            throw new CodecLineParsingException("Bad pileup format: " + bases + " at position " + i);
-                        done = true;
-                    }
-                    else {
-                        String g = match.group(1);
-                        //System.out.printf("group is %d, match is %s%n", match.groupCount(), g);
-                        int l = Integer.parseInt(g);
-                        i += l + g.length();    // length of number + that many bases + +/- at the start (included in the next i++)
-                        //System.out.printf("remaining is %d => %s%n", l, bases.substring(i+1));
-                    }
-                    break;
-                default:   // non reference base
-                    baseBuilder.append(c);
-                    qualBuilder.append(quals.charAt(j++));
-            }
-        }
-
-        feature.setPileupBases(baseBuilder.toString());
-        feature.setPileupQuals(qualBuilder.toString());
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/sampileup/SAMPileupFeature.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/sampileup/SAMPileupFeature.java
deleted file mode 100644
index 89f168b..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/sampileup/SAMPileupFeature.java
+++ /dev/null
@@ -1,276 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.codecs.sampileup;
-
-import htsjdk.samtools.util.StringUtil;
-import htsjdk.tribble.Feature;
-
-import java.util.List;
-
-/**
- * A tribble feature representing a SAM pileup.
- *
- * Allows intake of both simple (6-column) or extended/consensus (10/13-column) pileups. Simple pileup features will
- * contain only basic information, no observed alleles or variant/genotype inferences, and so shouldn't be used as
- * input for analysis that requires that information.
- *
- * @author mhanna
- * @version 0.1
- */
-public class SAMPileupFeature implements Feature {
-    public enum VariantType { NONE, SNP, INSERTION, DELETION, INDEL }; 
-
-    private String contig;            // genomic location of this genotyped site
-    private int start;
-    private int stop;
-
-    private char refBaseChar; // what we have set for the reference base (is set to a '*' for indel!)
-    private String refBases;        // the reference base sequence according to NCBI; single base for point mutations, deleted bases for  deletions, empty string for insertions
-
-    private String pileupQuals;     // the read base qualities
-    private String pileupBases;     // the read bases themselves
-
-    private List<String> observedAlleles = null;    // The sequences of the observed alleles (e.g. {"A","C"} for point mutation or {"","+CC"} for het. insertion
-    private VariantType varType = VariantType.NONE;
-    private int nNonref = 0; // number of non-reference alleles observed
-    private int eventLength = 0; // number of inserted or deleted bases    
-
-    private double consensusScore = 0;
-    private double variantScore = 0;
-
-    /**
-     * create the pileup feature.  Default protection so that only other classes in this package can create it.
-     */
-    SAMPileupFeature() {}
-
-    public String getChr() {
-        return contig;
-    }
-
-    protected void setChr(String chr) {
-        this.contig = chr;
-    }
-
-    public int getStart() {
-        return start;
-    }
-
-    protected void setStart(int start) {
-        this.start = start;
-    }
-
-    public int getEnd() {
-        return stop;
-    }
-
-    protected void setEnd(int end) {
-        this.stop = end;
-    }
-
-    public String getQualsAsString()        { return pileupQuals; }
-
-    protected void setPileupQuals(String pileupQuals) {
-        this.pileupQuals = pileupQuals;
-    }
-
-    /** Returns reference base for point genotypes or '*' for indel genotypes, as a char.
-     *
-     */
-    public char getRef()            { return refBaseChar; }
-
-    protected void setRef(char ref) {
-        this.refBaseChar = ref;
-    }
-
-    public int size()               { return pileupQuals.length(); }
-
-    /** Returns pile of observed bases over the current genomic location.
-     *
-     */
-    public String getBasesAsString()        { return pileupBases; }
-
-    protected void setPileupBases(String pileupBases) {
-        this.pileupBases = pileupBases;
-    }
-
-    /** Returns formatted pileup string for the current genomic location as
-     * "location: reference_base observed_base_pile observed_qual_pile"
-     */
-    public String getPileupString()
-    {
-        if(start == stop)
-            return String.format("%s:%d: %s %s %s", getChr(), getStart(), getRef(), getBasesAsString(), getQualsAsString());
-        else
-            return String.format("%s:%d-%d: %s %s %s", getChr(), getStart(), getEnd(), getRef(), getBasesAsString(), getQualsAsString());
-    }
-
-    /**
-     * Gets the bases in byte array form.
-     * @return byte array of the available bases.
-     */
-    public byte[] getBases() {
-        return StringUtil.stringToBytes(getBasesAsString());
-    }
-
-    /**
-     * Gets the Phred base qualities without ASCII offset.
-     * @return Phred base qualities.
-     */
-    public byte[] getQuals() {
-        byte[] quals = StringUtil.stringToBytes(getQualsAsString());
-        for(int i = 0; i < quals.length; i++) quals[i] -= 33;
-        return quals;
-    }
-
-    /** Returns bases in the reference allele as a String. For point genotypes, the string consists of a single
-     * character (reference base). For indel genotypes, the string is empty for insertions into
-     * the reference, or consists of deleted bases for deletions.
-     *
-     * @return reference allele, forward strand
-     */
-    public String getFWDRefBases() {
-        return refBases;
-    }
-
-    protected void setRefBases(String refBases) {
-        this.refBases = refBases;
-    }
-
-    public List<String> getFWDAlleles()  {
-        return observedAlleles;
-    }
-
-    protected void setFWDAlleles(List<String> alleles) {
-        this.observedAlleles = alleles;
-    }
-
-    // ----------------------------------------------------------------------
-    //
-    // What kind of variant are we?
-    //
-    // ----------------------------------------------------------------------
-    public boolean isSNP() { return varType == VariantType.SNP; }
-    public boolean isInsertion() { return varType == VariantType.INSERTION; }
-    public boolean isDeletion() { return varType == VariantType.DELETION ; }
-    public boolean isIndel() { return isInsertion() || isDeletion() || varType == VariantType.INDEL; }
-    public boolean isReference()  { return varType == VariantType.NONE; }
-
-    protected void setVariantType(VariantType variantType) {
-        this.varType = variantType;
-    }
-
-    public boolean isHom() {
-    	// implementation-dependent: here we use the fact that for ref and snps we actually use fixed static strings to remember the genotype
-    	if ( ! isIndel() ) return ( observedAlleles.get(0).equals(observedAlleles.get(1)) );
-    	return ( isInsertion() || isDeletion() ) && observedAlleles.get(0).equals(observedAlleles.get(1) );
-    }
-
-    public boolean isHet() {
-    	// implementation-dependent: here we use the fact that for ref and snps we actually use fixed static strings to remember the genotype
-    	if ( ! isIndel() ) return ( !(observedAlleles.get(0).equals(observedAlleles.get(1))) );
-    	return isIndel() || ( ! observedAlleles.get(0).equals(observedAlleles.get(1) ) );
-    }
-
-    public double getVariantConfidence() {
-        return variantScore;
-    }
-
-    protected void setVariantConfidence(double variantScore) {
-        this.variantScore = variantScore;
-    }
-
-    public boolean isBiallelic() {
-        return nNonref  < 2;
-    }
-
-    protected void setNumNonRef(int nNonref) {
-        this.nNonref = nNonref;
-    }
-
-    public double getConsensusConfidence() {
-        return consensusScore;
-    }
-
-    protected void setConsensusConfidence(double consensusScore) {
-        this.consensusScore = consensusScore;
-    }
-
-    public int length() {
-        return eventLength;
-    }
-
-    protected void setLength(int eventLength) {
-        this.eventLength = eventLength;
-    }
-
-	public boolean isIndelGenotype() {
-		return refBaseChar == '*';
-	}
-
-
-	public boolean isPointGenotype() {
-		return ! isIndelGenotype();
-	}
-
-	/** Implements method required by GenotypeList interface. If this object represents
-	 * an indel genotype, then it returns itself through this method. If this object is a
-	 * point genotype, this method returns null.
-	 * @return
-	 */
-	public SAMPileupFeature getIndelGenotype() {
-		if ( isIndelGenotype() ) return this;
-		else return null;
-	}
-
-	/** Implements method required by GenotypeList interface. If this object represents
-	 * a point genotype, then it returns itself through this method. If this object is an
-	 * indel genotype, this method returns null.
-	 * @return
-	 */
-	public SAMPileupFeature getPointGenotype() {
-		if ( isPointGenotype() ) return this;
-		else return null;
-	}
-
-	/** Returns true if this object \em is an indel genotype (and thus
-	 * indel genotype is what it only has).
-	 * @return
-	 */
-	public boolean hasIndelGenotype() {
-		return isIndelGenotype();
-	}
-
-	/** Returns true if this object \em is a point genotype (and thus
-	 * point genotype is what it only has.
-	 * @return
-	 */
-	public boolean hasPointGenotype() {
-		return isPointGenotype();
-	}
-
-
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/samread/SAMReadCodec.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/samread/SAMReadCodec.java
deleted file mode 100644
index d83ce6d..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/samread/SAMReadCodec.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.codecs.samread;
-
-import htsjdk.samtools.Cigar;
-import htsjdk.samtools.TextCigarCodec;
-import htsjdk.samtools.util.StringUtil;
-import htsjdk.tribble.AsciiFeatureCodec;
-import htsjdk.tribble.exception.CodecLineParsingException;
-import htsjdk.tribble.readers.LineIterator;
-import htsjdk.tribble.util.ParsingUtils;
-
-/**
- * Decodes a simple SAM text string.
- *
- * <p>
- * Reads in the SAM text version of a BAM file as a ROD.  For testing only
- * </p>
- *
- * <p>
- * See also: @see <a href="http://samtools.sourceforge.net">SAMTools</a> for format specification
- * </p>
- *
- * <h2>File format example</h2>
- * <pre>
- *     SL-XBC:1:10:628:923#0	16	Escherichia_coli_K12	1	37	76M	=	1	0	AGCTTTTCATTCTGACTGCAACGGGCAATATGTCTCTGTGTGGATTAAAAAAAGAGTGTCTGATAGCAGCTTCTGA	B@>87<;A@?@957:>>@AA at B>@A9AB at B>@A@@@@@A;=AAB at BBBBBCBBBB@>A>:ABB at BAABCB=CA at CB
- * </pre>
- *
- * @author Matt Hanna
- * @since 2009
- */
-public class SAMReadCodec extends AsciiFeatureCodec<SAMReadFeature> {
-    /* SL-XBC:1:10:628:923#0	16	Escherichia_coli_K12	1	37	76M	=	1	0	AGCTTTTCATTCTGACTGCAACGGGCAATATGTCTCTGTGTGGATTAAAAAAAGAGTGTCTGATAGCAGCTTCTGA	B@>87<;A@?@957:>>@AA at B>@A9AB at B>@A@@@@@A;=AAB at BBBBBCBBBB@>A>:ABB at BAABCB=CA at CB */
-
-    // the number of tokens we expect to parse from a read line
-    private static final int expectedTokenCount = 11;
-
-    public SAMReadCodec() {
-        super(SAMReadFeature.class);
-    }
-
-    /**
-     * Decode a single line in a SAM text file.
-     * @param line line to decode.
-     * @return A SAMReadFeature modeling that line.
-     */
-    public SAMReadFeature decode(String line) {
-        // we may be asked to process a header line; ignore it
-        if (line.startsWith("@")) return null;        
-
-        String[] tokens = new String[expectedTokenCount];
-
-        // split the line
-        int count = ParsingUtils.splitWhitespace(line,tokens);
-
-        // check to see if we've parsed the string into the right number of tokens (expectedTokenCount)
-        if (count != expectedTokenCount)
-            throw new CodecLineParsingException("the SAM read line didn't have the expected number of tokens " +
-                                                "(expected = " + expectedTokenCount + ", saw = " + count + " on " +
-                                                "line = " + line + ")");
-
-        final String readName = tokens[0];
-        final int flags = Integer.parseInt(tokens[1]);
-        final String contigName = tokens[2];
-        final int alignmentStart = Integer.parseInt(tokens[3]);
-        final int mapQ = Integer.parseInt(tokens[4]);
-        final String cigarString = tokens[5];
-        final String mateContigName = tokens[6];
-        final int mateAlignmentStart = Integer.parseInt(tokens[7]);
-        final int inferredInsertSize = Integer.parseInt(tokens[8]);
-        final byte[] bases = StringUtil.stringToBytes(tokens[9]);
-        final byte[] qualities = StringUtil.stringToBytes(tokens[10]);
-
-        // Infer the alignment end.
-        Cigar cigar = TextCigarCodec.getSingleton().decode(cigarString);
-        int alignmentEnd = alignmentStart + cigar.getReferenceLength() - 1;
-
-        // Remove printable character conversion from the qualities.
-        for(byte quality: qualities) quality -= 33;
-
-        return new SAMReadFeature(readName,
-                                  flags,
-                                  contigName,
-                                  alignmentStart,
-                                  alignmentEnd,
-                                  mapQ,
-                                  cigarString,
-                                  mateContigName,
-                                  mateAlignmentStart,
-                                  inferredInsertSize,
-                                  bases,
-                                  qualities);
-    }
-
-    @Override
-    public Object readActualHeader(LineIterator lineIterator) {
-        // No header for this format
-        return null;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/samread/SAMReadFeature.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/samread/SAMReadFeature.java
deleted file mode 100644
index 129ae6e..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/samread/SAMReadFeature.java
+++ /dev/null
@@ -1,199 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.codecs.samread;
-
-import htsjdk.tribble.Feature;
-
-/**
- * Represents a SAM record read from a SAM text format file. 
- *
- * @author mhanna
- * @version 0.1
- */
-public class SAMReadFeature implements Feature {
-    /**
-     * Name of this read.
-     */
-    private final String readName;
-
-    /**
-     * Flags associated with this read.
-     */
-    private final int flags;
-
-    /**
-     * Contig to which this read is aligned.
-     */
-    private final String contig;
-
-    /**
-     * Position on contig to which this read is aligned.
-     */
-    private final int alignmentStart;
-
-    /**
-     * Position on contig at which this alignment ends.
-     */
-    private final int alignmentEnd;
-
-    /**
-     * Mapping quality for the read.
-     */
-    private final int mapQ;
-
-    /**
-     * Cigar string matching read to reference.
-     */
-    private final String cigarString;
-
-    /**
-     * Contig to which this read's pair is aligned.
-     */
-    private final String mateContig;
-
-    /**
-     * Position in contig to which this read's pair is aligned.
-     */
-    private final int mateAlignmentStart;
-
-    /**
-     * Size between pairs.
-     */
-    private final int insertSize;
-
-    /**
-     * Bases in this read.
-     */
-    private final byte[] bases;
-
-    /**
-     * Qualities constituting this read.
-     */
-    private final byte[] qualities;
-
-    // Tags are not currently supported.
-
-    /**
-     * create the read feature.  Default protection so that only other classes in this package can create it.
-     */
-    SAMReadFeature(final String readName,
-                   final int flags,
-                   final String contig,
-                   final int alignmentStart,
-                   final int alignmentEnd,
-                   final int mapQ,
-                   final String cigarString,
-                   final String mateContig,
-                   final int mateAlignmentStart,
-                   final int insertSize,
-                   final byte[] bases,
-                   final byte[] qualities) {
-        this.readName = readName;
-        this.flags = flags;
-        this.contig = contig;
-        this.alignmentStart = alignmentStart;
-        this.alignmentEnd = alignmentEnd;
-        this.mapQ = mapQ;
-        this.cigarString = cigarString;
-        this.mateContig = mateContig;
-        this.mateAlignmentStart = mateAlignmentStart;
-        this.insertSize = insertSize;
-        this.bases = bases;
-        this.qualities = qualities;
-    }
-
-    public String getReadName() {
-        return readName;
-    }
-
-    public int getFlags() {
-        return flags;
-    }
-
-    public String getReferenceName() {
-        return contig;
-    }
-
-    public int getAlignmentStart() {
-        return alignmentStart;
-    }
-
-    public int getAlignmentEnd() {
-        return alignmentEnd;
-    }
-
-    /**
-     * An alias for getReferenceName, required by Feature interface.
-     * @return Aligned contig name.
-     */
-    public String getChr() {
-        return getReferenceName();
-    }
-
-    /**
-     * An alias for getAlignmentEnd(), required by Feature interface.
-     * @return End of alignment, inclusive.
-     */
-    public int getStart() {
-        return getAlignmentStart();
-    }
-
-    /**
-     * An alias for getAlignmentStart(), required by Feature interface.
-     * @return Aligned position.  1-based.
-     */
-    public int getEnd() {
-        return getAlignmentEnd();
-    }    
-
-    public int getMappingQuality() {
-        return mapQ;
-    }
-
-    public String getCigarString() {
-        return cigarString;
-    }
-
-    public String getMateReferenceName() {
-        return mateContig;
-    }
-
-    public int getMateAlignmentStart() {
-        return mateAlignmentStart;
-    }
-
-    public int getInferredInsertSize() {
-        return insertSize;
-    }
-
-    public byte[] getReadBases() {
-        return bases;    
-    }
-
-    public byte[] getReadQualities() {
-        return qualities;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/table/BedTableCodec.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/table/BedTableCodec.java
deleted file mode 100644
index 9a0115f..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/table/BedTableCodec.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.codecs.table;
-
-import htsjdk.tribble.Feature;
-import org.broadinstitute.gatk.engine.refdata.ReferenceDependentFeatureCodec;
-
-import java.util.Arrays;
-
-/**
- * The standard table codec that expects loci as contig start stop, not contig:start-stop
- *
- * <p>
- * The standard table codec with a slightly different parsing convention
- * (expects loci as contig start stop, not contig:start-stop)
- * </p>
- *
- * <p>
- * See also: TableCodec
- * </p>
- *
- * @author Chris Hartl
- * @since 2010
- */
-public class BedTableCodec extends TableCodec implements ReferenceDependentFeatureCodec {
-
-    @Override
-    public TableFeature decode(String line) {
-        if (line.startsWith(headerDelimiter) || line.startsWith(commentDelimiter) || line.startsWith(igvHeaderDelimiter))
-            return null;
-        String[] split = line.split(delimiterRegex);
-        if (split.length < 1)
-            throw new IllegalArgumentException("TableCodec line = " + line + " doesn't appear to be a valid table format");
-        return new TableFeature(genomeLocParser.createGenomeLoc(split[0],Integer.parseInt(split[1])-1,Integer.parseInt(split[2])), Arrays.asList(split),header);
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/table/TableCodec.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/table/TableCodec.java
deleted file mode 100644
index 1058d3e..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/table/TableCodec.java
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.codecs.table;
-
-import htsjdk.tribble.AsciiFeatureCodec;
-import htsjdk.tribble.readers.LineIterator;
-import org.broadinstitute.gatk.engine.refdata.ReferenceDependentFeatureCodec;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-
-/**
- * Reads tab deliminated tabular text files
- *
- * <p>
- *     <ul>
- *     <li>Header: must begin with line HEADER or track (for IGV), followed by any number of column names,
- *     separated by whitespace.</li>
- *     <li>Comment lines starting with # are ignored</li>
- *     <li>Each non-header and non-comment line is split into parts by whitespace,
- *     and these parts are assigned as a map to their corresponding column name in the header.
- *     Note that the first element (corresponding to the HEADER column) must be a valid genome loc
- *     such as 1, 1:1 or 1:1-10, which is the position of the Table element on the genome.  TableCodec
- *     requires that there be one value for each column in the header, and no more, on all lines.</li>
- *     </ul>
- * </p>
- *
- * </p>
- *
- * <h2>File format example</h2>
- * <pre>
- *     HEADER a b c
- *     1:1  1   2   3
- *     1:2  4   5   6
- *     1:3  7   8   9
- * </pre>
- *
- * @author Mark DePristo
- * @since 2009
- */
-public class TableCodec extends AsciiFeatureCodec<TableFeature> implements ReferenceDependentFeatureCodec {
-    final static protected String delimiterRegex = "\\s+";
-    final static protected String headerDelimiter = "HEADER";
-    final static protected String igvHeaderDelimiter = "track";
-    final static protected String commentDelimiter = "#";
-
-    protected ArrayList<String> header = new ArrayList<String>();
-
-    /**
-     * The parser to use when resolving genome-wide locations.
-     */
-    protected GenomeLocParser genomeLocParser;
-
-    public TableCodec() {
-        super(TableFeature.class);
-    }
-
-    /**
-     * Set the parser to use when resolving genetic data.
-     * @param genomeLocParser The supplied parser.
-     */
-    @Override
-    public void setGenomeLocParser(GenomeLocParser genomeLocParser) {
-        this.genomeLocParser =  genomeLocParser;
-    }
-
-    @Override
-    public TableFeature decode(String line) {
-        if (line.startsWith(headerDelimiter) || line.startsWith(commentDelimiter) || line.startsWith(igvHeaderDelimiter))
-            return null;
-        String[] split = line.split(delimiterRegex);
-        if (split.length < 1)
-            throw new IllegalArgumentException("TableCodec line = " + line + " doesn't appear to be a valid table format");
-        return new TableFeature(genomeLocParser.parseGenomeLoc(split[0]),Arrays.asList(split), header);
-    }
-
-    @Override
-    public Object readActualHeader(final LineIterator reader) {
-        boolean isFirst = true;
-        while (reader.hasNext()) {
-            final String line = reader.peek(); // Peek to avoid reading non-header data
-            if ( isFirst && ! line.startsWith(headerDelimiter) && ! line.startsWith(commentDelimiter)) {
-                throw new UserException.MalformedFile("TableCodec file does not have a header");
-            }
-            isFirst &= line.startsWith(commentDelimiter);
-            if (line.startsWith(headerDelimiter)) {
-                reader.next(); // "Commit" the peek
-                if (header.size() > 0) throw new IllegalStateException("Input table file seems to have two header lines.  The second is = " + line);
-                final String spl[] = line.split(delimiterRegex);
-                Collections.addAll(header, spl);
-                return header;
-            } else if (line.startsWith(commentDelimiter)) {
-                reader.next(); // "Commit" the peek
-            } else {
-                break;
-            }
-        }
-        return header;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/table/TableFeature.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/table/TableFeature.java
deleted file mode 100644
index 58b06c9..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/codecs/table/TableFeature.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.codecs.table;
-
-
-import htsjdk.tribble.Feature;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.Utils;
-
-import java.util.List;
-
-/**
- * A feature representing a single row out of a text table
- */
-public class TableFeature implements Feature {
-    // stores the values for the columns seperated out
-    private final List<String> values;
-
-    // if we have column names, we store them here
-    private final List<String> keys;
-
-    // our location
-    private final GenomeLoc position;
-
-    public TableFeature(GenomeLoc position, List<String> values, List<String> keys) {
-        this.values = values;
-        this.keys = keys;
-        this.position = position;
-    }
-
-    @Override
-    public String getChr() {
-        return position.getContig();
-    }
-
-    @Override
-    public int getStart() {
-        return (int)position.getStart();
-    }
-
-    @Override
-    public int getEnd() {
-        return (int)position.getStop();
-    }
-
-    public String getValue(int columnPosition) {
-        if (columnPosition >= values.size()) throw new IllegalArgumentException("We only have " + values.size() + "columns, the requested column = " + columnPosition);
-        return values.get(columnPosition);
-    }
-
-    public String toString() {
-        return String.format("%s\t%s",position.toString(), Utils.join("\t",values));
-    }
-
-    public String get(String columnName) {
-        int position = keys.indexOf(columnName);
-        if (position < 0) throw new IllegalArgumentException("We don't have a column named " + columnName);
-        return values.get(position);
-    }
-
-    public GenomeLoc getLocation() {
-        return this.position;
-    }
-
-    public List<String> getAllValues() {
-        return getValuesTo(values.size());
-    }
-
-    public List<String> getValuesTo(int columnPosition) {
-        return values.subList(0,columnPosition);
-    }
-
-    public List<String> getHeader() {
-        return keys;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/collections/DefaultHashMap.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/collections/DefaultHashMap.java
deleted file mode 100644
index 2c543dd..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/collections/DefaultHashMap.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.collections;
-
-import java.util.HashMap;
-
-/**
- * Created with IntelliJ IDEA.
- * User: farjoun
- * Date: 10/30/12
- * Time: 3:20 PM
- * To change this template use File | Settings | File Templates.
- */
-
-//lifted from http://stackoverflow.com/questions/7519339
-//could also use org.apache.commons.collections.map.DefaultedMap http://commons.apache.org/collections/apidocs/org/apache/commons/collections/map/DefaultedMap.html
-public class DefaultHashMap<K,V> extends HashMap<K,V> {
-
-    public void setDefaultValue(V defaultValue) {
-        this.defaultValue = defaultValue;
-    }
-    protected V defaultValue;
-    public DefaultHashMap(V defaultValue) {
-        this.defaultValue = defaultValue;
-    }
-    @Override
-    public V get(Object k) {
-        V v = super.get(k);
-        return ((v == null) && !this.containsKey(k)) ? this.defaultValue : v;
-    }
-
-}
-
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/collections/ExpandingArrayList.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/collections/ExpandingArrayList.java
deleted file mode 100644
index b2b23d3..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/collections/ExpandingArrayList.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.collections;
-
-import java.util.ArrayList;
-import java.util.Collection;
-
-public class ExpandingArrayList<E> extends ArrayList<E> {
-    public ExpandingArrayList() { super(); }
-    public ExpandingArrayList(Collection<? extends E> c) { super(c); }
-    public ExpandingArrayList(int initialCapacity) { super(initialCapacity); }
-
-    /**
-     * Returns the element at the specified position in this list.  If index > size,
-     * returns null.  Otherwise tries to access the array
-     * @param index
-     * @return
-     * @throws IndexOutOfBoundsException in index < 0
-     */
-    public E get(int index) throws IndexOutOfBoundsException {
-        if ( index < size() )
-            return super.get(index);
-        else
-            return null;
-    }
-
-    public E expandingGet(int index, E default_value) throws IndexOutOfBoundsException {
-        maybeExpand(index, default_value);
-        return super.get(index);
-    }
-
-    private void maybeExpand(int index, E value) {
-        if ( index >= size() ) {
-            ensureCapacity(index+1); // make sure we have space to hold at least index + 1 elements
-            // We need to add null items until we can safely set index to element
-            for ( int i = size(); i <= index; i++ )
-                add(value);
-        }
-    }
-
-
-    public E set(int index, E element) {
-        maybeExpand(index, null);
-        return super.set(index, element);
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/collections/IndexedSet.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/collections/IndexedSet.java
deleted file mode 100644
index 2bedb92..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/collections/IndexedSet.java
+++ /dev/null
@@ -1,342 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.collections;
-
-import it.unimi.dsi.fastutil.objects.Object2IntMap;
-import it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap;
-
-import java.util.*;
-
-/**
-* Set set where each element can be reference by a unique integer index that runs from
-*     0 to the size of the set - 1.
-*
-* @author Valentin Ruano-Rubio <valentin at broadinstitute.org>
-*/
-public class IndexedSet<E> extends AbstractSet<E> implements Set<E> {
-
-    /**
-     * Elements stored in an array-list by their index.
-     */
-    private final ArrayList<E> elements;
-
-    /**
-     * A unmodifiable view to the element list. Initially {@code null} it is thread-unsafe lazy instantiated
-     * when requested first time through {@link #asList}. Therefore typically it is shared by invoking code but
-     * there could be some extra copies (rare though) in multi-thread runs.
-     */
-    private transient List<E> unmodifiableElementsListView;
-
-    /**
-     * Quick element to index lookup map.
-     * <p>
-     *  Uses a primitive int value map for efficiency sake.
-     * </p>
-     */
-    private final Object2IntMap<E> indexByElement;
-
-    /**
-     * Creates an empty indexed set indicating the expected number of elements.
-     *
-     * @param initialCapacity the initial number of elements.
-     */
-    public IndexedSet(final int initialCapacity) {
-        elements = new ArrayList<>(initialCapacity);
-        indexByElement = new Object2IntOpenHashMap<>(initialCapacity);
-    }
-
-    /**
-     * Creates a new sample list from a existing collection of elements.
-     *
-     * <p>
-     *     Elements will be indexed as they appear in the input array. Repeats will be ignored.
-     * </p>
-     *
-     * @param values the original sample list.
-     *
-     * @throws IllegalArgumentException
-     * if {@code values} array is {@code null} itself, or it contains {@code null}.
-     */
-    @SuppressWarnings("unchecked")
-    public IndexedSet(final Collection<E> values) {
-        if (values == null)
-            throw new IllegalArgumentException("input values cannot be null");
-
-        final int initialCapacity = values.size();
-        elements = new ArrayList<>(initialCapacity);
-        indexByElement = new Object2IntOpenHashMap<>(initialCapacity);
-        int nextIndex = 0;
-        for (final E value : values) {
-            if (value == null)
-                throw new IllegalArgumentException("null element not allowed: index == " + nextIndex);
-            if (indexByElement.containsKey(value))
-                continue;
-            indexByElement.put(value, nextIndex++);
-            elements.add(value);
-        }
-    }
-
-    /**
-     * Creates a new sample list from a existing array of elements.
-     *
-     * <p>
-     *     Elements will be indexed as they appear in the collection. Repeats will be ignored.
-     * </p>
-     *
-     * @param values the original sample list.
-     *
-     * @throws IllegalArgumentException
-     * if {@code values} collection is {@code null} itself, or it contains {@code null}.
-     */
-    @SuppressWarnings("unchecked")
-    public IndexedSet(final E ... values) {
-        if (values == null)
-            throw new IllegalArgumentException("input values cannot be null");
-
-        final int initialCapacity = values.length;
-        elements = new ArrayList<>(initialCapacity);
-        indexByElement = new Object2IntOpenHashMap<>(initialCapacity);
-        int nextIndex = 0;
-        for (final E value : values) {
-            if (value == null)
-                throw new IllegalArgumentException("null element not allowed: index == " + nextIndex);
-            if (indexByElement.containsKey(value))
-                continue;
-            indexByElement.put(value, nextIndex++);
-            elements.add(value);
-        }
-    }
-
-    /**
-     * Returns a list view of the elements in the set.
-     *
-     * <p>
-     *     Elements are sorted by their index within the set.
-     * </p>
-     *
-     * <p>
-     *     This view changes as the indexed set changes but it cannot be used to update its contents.
-     *     In such case a {@link UnsupportedOperationException} exception will be thrown if the calling
-     *     code tries to tho just that.
-     * </p>
-     *
-     * @return never {@code null}.
-     */
-    public List<E> asList() {
-        if (unmodifiableElementsListView == null)
-            unmodifiableElementsListView = Collections.unmodifiableList(elements);
-        return unmodifiableElementsListView;
-    }
-
-    /**
-     * Throws an exception if an index is out of bounds.
-     *
-     * <p>
-     *     An element index is valid iff is within [0,{@link #size()}).
-     * </p>
-     *
-     * @param index the query index.
-     *
-     * @throws IllegalArgumentException {@code index} is out of bounds.
-     */
-    protected void checkIndex(final int index) {
-        if (index < 0)
-            throw new IllegalArgumentException("the index cannot be negative: " + index);
-        if (index >= size())
-            throw new IllegalArgumentException("the index is equal or larger than the list length: " + index + " >= " + size());
-    }
-
-    @Override
-    public Iterator<E> iterator() {
-        return asList().iterator();
-    }
-
-    /**
-     * Returns number of elements in the set.
-     * @return never {@code null}.
-     */
-    @Override
-    public int size() {
-        return elements.size();
-    }
-
-    /**
-     *
-     * @param o
-     * @return {@code true} iff {@code o} is in
-     */
-    @Override
-    @SuppressWarnings("all")
-    public boolean contains(final Object o) {
-        return o != null && indexByElement.containsKey(o);
-    }
-
-    /**
-     * Adds a new element to the set.
-     *
-     * <p>
-     *     If the element was already in th set nothing will happen and the method will return {@code false}. However,
-     *     if the element is new to this set, it will assigned the next index available (equal to the size before addition).
-     *     The method will return {@code true} in this case.
-     * </p>
-     *
-     * @param o the object to add.
-     *
-     * @throw IllegalArgumentException if {@code o} is {@code null}.
-     *
-     * @return {@code true} iff the set was modified by this operation.
-     */
-    @Override
-    public boolean add(final E o) {
-        if (o == null)
-            throw new IllegalArgumentException("the input argument cannot be null");
-        if (contains(o))
-            return false;
-        final int nextIndex = size();
-        elements.add(o);
-        indexByElement.put(o, nextIndex);
-        return true;
-    }
-
-    /**
-     * Removes an element from the set.
-     *
-     * <p>
-     *     If the element was not present in the set, nothing happens and the method return false. However,
-     *     if the element is new to this set, it will be assigned the next index available (equal to the size
-     *     before addition).
-     *     The method will return {@code true} in this case.
-     * </p>
-     *
-     * @param o the object to add.
-     *
-     * @throw IllegalArgumentException if {@code o} is {@code null}.
-     *
-     * @return {@code true} iff the set was modified by this operation.
-     */   @Override
-    public boolean remove(final Object o) {
-        final int index = indexByElement.removeInt(o);
-        if (index == -1)
-            return false;
-        elements.remove(index);
-        indexByElement.remove(o);
-        final ListIterator<E> it = elements.listIterator(index);
-        int nextIndex = index;
-        while (it.hasNext())
-            indexByElement.put(it.next(),nextIndex++);
-        return true;
-    }
-
-    /**
-     * Removes all elements in the set.
-     */
-    @Override
-    public void clear() {
-        elements.clear();
-        indexByElement.clear();
-    }
-
-    /**
-     * Compares this with another indexed set.
-     * @param o the other object to compare to.
-     * @return {@code false} unless {@code o} is a indexed-set that contains the same elements in the same order.
-     */
-    @Override
-    public boolean equals(final Object o) {
-        if (o == this)
-            return true;
-        if (o == null)
-            return false;
-        if (!(o instanceof IndexedSet<?>))
-            return false;
-
-        final IndexedSet<?> other = (IndexedSet<?>)o;
-
-        return equals(other);
-    }
-
-    /**
-     * Compare to another indexed set.
-     *
-     * @param other the target indexed set.
-     *
-     * @throws java.lang.IllegalArgumentException if {@code other} is {@code null}.
-     *
-     * @return {@code true} iff {@other} is not {@code null}, and contains exactly the same elements
-     * (as compared using {@link Object#equals} a this set with matching indices.
-     */
-    public boolean equals(final IndexedSet<?> other) {
-        if (other == null)
-            throw new IllegalArgumentException("other cannot be null");
-        final ArrayList<?> otherElements = other.elements;
-
-        final int elementCount = elements.size();
-        if (otherElements.size() != elementCount)
-            return false;
-        for (int i = 0; i < elementCount; i++)
-            if (!elements.get(i).equals(otherElements.get(i)))
-                return false;
-        return true;
-    }
-
-    @Override
-    public int hashCode() {
-        int result = 1;
-
-        for (final E element : elements)
-            result = 31 * result + (element == null ? 0 : element.hashCode());
-        return result;
-    }
-
-    /**
-     * Returns the element given its index within the set.
-     * @param index the target element's index.
-     *
-     * @throws IllegalArgumentException if {@code index} is not valid; in [0,{@link #size()}).
-     *
-     * @return never {@code null}; as null is not a valid element.
-     */
-    public E get(final int index) {
-        checkIndex(index);
-        return elements.get(index);
-    }
-
-    /**
-     * Returns the index of an object.
-     * @param o the object of interest.
-     *
-     * @throws IllegalArgumentException if {@code o} is {@code null}.
-     *
-     * @return {@code -1} if such an object is not an element of this set, otherwise is index in the set thus a
-     * values within [0,{@link #size()}).
-     */
-    public int indexOf(final E o) {
-        if (o == null)
-            throw new IllegalArgumentException("the query object cannot be null");
-        return indexByElement.containsKey(o) ? indexByElement.getInt(o) : -1;
-    }
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/collections/LoggingNestedIntegerArray.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/collections/LoggingNestedIntegerArray.java
deleted file mode 100644
index 3117852..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/collections/LoggingNestedIntegerArray.java
+++ /dev/null
@@ -1,120 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.collections;
-
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.io.PrintStream;
-
-/**
- * Wrapper around the basic NestedIntegerArray class that logs all updates (ie., all calls to put())
- * to the provided output stream. For testing/debugging purposes.
- *
- * Log entries are of the following form (fields are tab-separated):
- * LABEL    OPERATION    VALUE   KEY1    KEY2    ...     KEY_N
- *
- * A header line is written before the log entries giving the dimensions of this NestedIntegerArray.
- * It has the form:
- *
- * # LABEL    SIZE_OF_FIRST_DIMENSION    SIZE_OF_SECOND_DIMENSION    ...    SIZE_OF_NTH_DIMENSION
- *
- * @author David Roazen
- */
-public class LoggingNestedIntegerArray<T> extends NestedIntegerArray<T> {
-
-    private PrintStream log;
-    private String logEntryLabel;
-
-    public static final String HEADER_LINE_PREFIX = "# ";
-    public enum NestedIntegerArrayOperation { GET, PUT };
-
-    /**
-     *
-     * @param log output stream to which to log update operations
-     * @param logEntryLabel String that should be prefixed to each log entry
-     * @param dimensions
-     */
-    public LoggingNestedIntegerArray( PrintStream log, String logEntryLabel, final int... dimensions ) {
-        super(dimensions);
-
-        if ( log == null ) {
-            throw new ReviewedGATKException("Log output stream must not be null");
-        }
-        this.log = log;
-        this.logEntryLabel = logEntryLabel != null ? logEntryLabel : "";
-
-        // Write the header line recording the dimensions of this NestedIntegerArray:
-        StringBuilder logHeaderLine = new StringBuilder();
-
-        logHeaderLine.append(HEADER_LINE_PREFIX);
-        logHeaderLine.append(this.logEntryLabel);
-        for ( int dimension : dimensions ) {
-            logHeaderLine.append("\t");
-            logHeaderLine.append(dimension);
-        }
-
-        this.log.println(logHeaderLine.toString());
-    }
-
-    @Override
-    public T get( final int... keys ) {
-        StringBuilder logEntry = new StringBuilder();
-
-        logEntry.append(logEntryLabel);
-        logEntry.append("\t");
-        logEntry.append(NestedIntegerArrayOperation.GET);
-        logEntry.append("\t");  // empty field for the datum value
-
-        for ( int key : keys ) {
-            logEntry.append("\t");
-            logEntry.append(key);
-        }
-
-        log.println(logEntry.toString());
-
-        return super.get(keys);
-    }
-
-    @Override
-    public boolean put( final T value, final int... keys ) {
-        StringBuilder logEntry = new StringBuilder();
-
-        logEntry.append(logEntryLabel);
-        logEntry.append("\t");
-        logEntry.append(NestedIntegerArrayOperation.PUT);
-        logEntry.append("\t");
-        logEntry.append(value);
-        for ( int key : keys ) {
-            logEntry.append("\t");
-            logEntry.append(key);
-        }
-
-        // PrintStream methods all use synchronized blocks internally, so our logging is thread-safe
-        log.println(logEntry.toString());
-
-        return super.put(value, keys);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/collections/NestedIntegerArray.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/collections/NestedIntegerArray.java
deleted file mode 100644
index 02dd15a..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/collections/NestedIntegerArray.java
+++ /dev/null
@@ -1,221 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.collections;
-
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-/**
- * Created by IntelliJ IDEA.
- * User: ebanks
- * Date: July 1, 2012
- */
-
-public class NestedIntegerArray<T> {
-
-    private static Logger logger = Logger.getLogger(NestedIntegerArray.class);
-
-    protected final Object[] data;
-
-    protected final int numDimensions;
-    protected final int[] dimensions;
-
-    // Preallocate the first two dimensions to limit contention during tree traversals in put()
-    private static final int NUM_DIMENSIONS_TO_PREALLOCATE = 2;
-
-    public NestedIntegerArray(final int... dimensions) {
-        numDimensions = dimensions.length;
-        if ( numDimensions == 0 )
-            throw new ReviewedGATKException("There must be at least one dimension to an NestedIntegerArray");
-        this.dimensions = dimensions.clone();
-
-        int dimensionsToPreallocate = Math.min(dimensions.length, NUM_DIMENSIONS_TO_PREALLOCATE);
-
-        if ( logger.isDebugEnabled() ) logger.debug(String.format("Creating NestedIntegerArray with dimensions %s", Arrays.toString(dimensions)));
-        if ( logger.isDebugEnabled() ) logger.debug(String.format("Pre-allocating first %d dimensions", dimensionsToPreallocate));
-
-        data = new Object[dimensions[0]];
-        preallocateArray(data, 0, dimensionsToPreallocate);
-
-        if ( logger.isDebugEnabled() ) logger.debug(String.format("Done pre-allocating first %d dimensions", dimensionsToPreallocate));
-    }
-
-    /**
-     * @return the dimensions of this nested integer array.  DO NOT MODIFY
-     */
-    public int[] getDimensions() {
-        return dimensions;
-    }
-
-    /**
-     * Recursively allocate the first dimensionsToPreallocate dimensions of the tree
-     *
-     * Pre-allocating the first few dimensions helps limit contention during tree traversals in put()
-     *
-     * @param subarray current node in the tree
-     * @param dimension current level in the tree
-     * @param dimensionsToPreallocate preallocate only this many dimensions (starting from the first)
-     */
-    private void preallocateArray( Object[] subarray, int dimension, int dimensionsToPreallocate ) {
-        if ( dimension >= dimensionsToPreallocate - 1 ) {
-            return;
-        }
-
-        for ( int i = 0; i < subarray.length; i++ ) {
-            subarray[i] = new Object[dimensions[dimension + 1]];
-            preallocateArray((Object[])subarray[i], dimension + 1, dimensionsToPreallocate);
-        }
-    }
-
-    public T get(final int... keys) {
-        final int numNestedDimensions = numDimensions - 1;
-        Object[] myData = data;
-
-        for( int i = 0; i < numNestedDimensions; i++ ) {
-            if ( keys[i] >= dimensions[i] )
-                return null;
-
-            myData = (Object[])myData[keys[i]];
-            if ( myData == null )
-                return null;
-        }
-
-        return (T)myData[keys[numNestedDimensions]];
-    }
-
-    /**
-     * Insert a value at the position specified by the given keys.
-     *
-     * This method is thread-safe, however the caller MUST check the
-     * return value to see if the put succeeded. This method RETURNS FALSE if
-     * the value could not be inserted because there already was a value present
-     * at the specified location. In this case the caller should do a get() to get
-     * the already-existing value and (potentially) update it.
-     *
-     * @param value value to insert
-     * @param keys keys specifying the location of the value in the tree
-     * @return true if the value was inserted, false if it could not be inserted because there was already
-     *         a value at the specified position
-     */
-    public boolean put(final T value, final int... keys) { // WARNING! value comes before the keys!
-        if ( keys.length != numDimensions )
-            throw new ReviewedGATKException("Exactly " + numDimensions + " keys should be passed to this NestedIntegerArray but " + keys.length + " were provided");
-
-        final int numNestedDimensions = numDimensions - 1;
-        Object[] myData = data;
-        for ( int i = 0; i < numNestedDimensions; i++ ) {
-            if ( keys[i] >= dimensions[i] )
-                throw new ReviewedGATKException("Key " + keys[i] + " is too large for dimension " + i + " (max is " + (dimensions[i]-1) + ")");
-
-            // If we're at or beyond the last dimension that was pre-allocated, we need to do a synchronized
-            // check to see if the next branch exists, and if it doesn't, create it
-            if ( i >= NUM_DIMENSIONS_TO_PREALLOCATE - 1 ) {
-                synchronized ( myData ) {
-                    if ( myData[keys[i]] == null ) {
-                        myData[keys[i]] = new Object[dimensions[i + 1]];
-                    }
-                }
-            }
-
-            myData = (Object[])myData[keys[i]];
-        }
-
-        synchronized ( myData ) {   // lock the bottom row while we examine and (potentially) update it
-
-            // Insert the new value only if there still isn't any existing value in this position
-            if ( myData[keys[numNestedDimensions]] == null ) {
-                myData[keys[numNestedDimensions]] = value;
-            }
-            else {
-                // Already have a value for this leaf (perhaps another thread came along and inserted one
-                // while we traversed the tree), so return false to notify the caller that we didn't put
-                // the item
-                return false;
-            }
-        }
-
-        return true;
-    }
-
-    public List<T> getAllValues() {
-        final List<T> result = new ArrayList<T>();
-        fillAllValues(data, result);
-        return result;
-    }
-
-    private void fillAllValues(final Object[] array, final List<T> result) {
-        for ( Object value : array ) {
-            if ( value == null )
-                continue;
-            if ( value instanceof Object[] )
-                fillAllValues((Object[])value, result);
-            else
-                result.add((T)value);
-        }
-    }
-
-    public static class Leaf<T> {
-        public final int[] keys;
-        public final T value;
-
-        public Leaf(final int[] keys, final T value) {
-            this.keys = keys;
-            this.value = value;
-        }
-    }
-
-    public List<Leaf<T>> getAllLeaves() {
-        final List<Leaf<T>> result = new ArrayList<Leaf<T>>();
-        fillAllLeaves(data, new int[0], result);
-        return result;
-    }
-
-    private void fillAllLeaves(final Object[] array, final int[] path, final List<Leaf<T>> result) {
-        for ( int key = 0; key < array.length; key++ ) {
-            final Object value = array[key];
-            if ( value == null )
-                continue;
-            final int[] newPath = appendToPath(path, key);
-            if ( value instanceof Object[] ) {
-                fillAllLeaves((Object[]) value, newPath, result);
-            } else {
-                result.add(new Leaf<T>(newPath, (T)value));
-            }
-        }
-    }
-
-    private int[] appendToPath(final int[] path, final int newKey) {
-        final int[] newPath = new int[path.length + 1];
-        for ( int i = 0; i < path.length; i++ )
-            newPath[i] = path[i];
-        newPath[path.length] = newKey;
-        return newPath;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/collections/Pair.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/collections/Pair.java
deleted file mode 100644
index b09c9df..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/collections/Pair.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.collections;
-
-
-public class Pair<X,Y> {
-    // declare public, STL-style for easier and more efficient access:
-    public X first; 
-    public Y second;
-
-    public Pair(X x, Y y) { first = x; second = y; }
-
-    public void set(X x, Y y) { first = x; second = y; }
-
-    /** Java-style getter; note that we currently allow direct access to 
-        the member field.
-    */
-    public X getFirst() { return first; }
-
-    /** Java-style getter; note that we currently allow direct access to 
-        the member field.
-    */
-    public Y getSecond() { return second; }
-
-    /**
-     * Calculate whether this pair object is equal to another object.
-     * @param o The other object (hopefully a pair).
-     * @return True if the two are equal; false otherwise.
-     */
-    @Override
-    public boolean equals( Object o ) {
-        if( o == null )
-            return false;
-        if( !(o instanceof Pair) )
-            return false;
-
-        Pair other = (Pair)o;
-
-        // Check to see whether one is null but not the other.
-        if( this.first == null && other.first != null ) return false;
-        if( this.second == null && other.second != null ) return false;
-
-        // Check to see whether the values are equal.
-        //  If the param of equals is null, it should by contract return false.
-        if( this.first != null && !this.first.equals(other.first) ) return false;
-        if( this.second != null && !this.second.equals(other.second) ) return false;        
-
-        return true;
-    }
-
-    /**
-     * Basic hashcode function.  Assume hashcodes of first and second are
-     * randomly distributed and return the XOR of the two.
-     * @return Randomly distributed hashcode of the pair.
-     */
-    @Override
-    public int hashCode() {
-        if( second == null && first == null )
-            return 0;
-        if( second == null )
-            return first.hashCode();
-        if( first == null )
-            return second.hashCode();
-        return first.hashCode() ^ second.hashCode();
-    }
-
-    public String toString() {
-        return first+","+second;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/collections/Permutation.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/collections/Permutation.java
deleted file mode 100644
index 53eafe7..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/collections/Permutation.java
+++ /dev/null
@@ -1,103 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.collections;
-
-import java.util.List;
-
-/**
- * Represent a permutation of a ordered set or list of elements.
- *
- * @author Valentin Ruano-Rubio <valentin at broadinstitute.org>
- */
-public interface Permutation<E> {
-
-    /**
-     * Checks whether this permutation is a partial one of the original list.
-     *
-     * <p>
-     *     A partial permutation is one in that no all original elements take part of.
-     * </p>
-     *
-     * @return {@code true} iff this is a partial permutation.
-     */
-    public boolean isPartial();
-
-    /**
-     * Checks whether this is a trivial permutation where the resulting element list is the same as original.
-     *
-     * @return {@code true} iff the resulting element list is the same as the original.
-     */
-    public boolean isNonPermuted();
-
-    /**
-     * Given an index on the original list, returns the position of tha element in the resulting list.
-     *
-     * @param fromIndex the query original element index.
-     *
-     * @throws IllegalArgumentException if {@code fromIndex} is not a valid index within the original list.
-     *
-     * @return -1 if that element is not part of the result (partial) permutation, otherwise some number between
-     *   0 and {@link #toSize()} - 1.
-     */
-    public int toIndex(final int fromIndex);
-
-    /**
-     * Given an index on the resulting list, it gives you the index of that element on the original list.
-     * @param toIndex the query resulting list index.
-     *
-     * @throws IllegalArgumentException if {@code toIndex} is not a valid index, i.e. in [0,{@link #toSize()}-1).
-     *
-     * @return a value between 0 and {@link #fromSize()} - 1.
-     */
-    public int fromIndex(final int toIndex);
-
-    /**
-     * Length of the original element list.
-     *
-     * @return 0 or greater.
-     */
-    public int fromSize();
-
-    /**
-     * Length of the resulting element list.
-     *
-     * @return 0 or greater.
-     */
-    public int toSize();
-
-    /**
-     * Returns an unmodifiable view to the original element list.
-     * @return never {@code null}.
-     */
-    public List<E> fromList();
-
-    /**
-     * Returns an unmodifiable view to the original element list.
-     *
-     * @return never {@code null}.
-     */
-    public List<E> toList();
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/collections/PrimitivePair.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/collections/PrimitivePair.java
deleted file mode 100644
index 2b759ce..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/collections/PrimitivePair.java
+++ /dev/null
@@ -1,200 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.collections;
-
-
-/** This class is used to group together multiple Pair classes for
- *  primitive types (thanks to generics shortcomings, these implementations
- *  are more efficient then generic ones). This class contains no methods and
- *  no fields, but only declarations of inner classes.
- */
- 
-public class PrimitivePair {
-
-   /** Pair of two integers */
-  public static class Int {
-    // declare public, STL-style for easier and more efficient access:
-    public int first; 
-    public int second;
-
-    public Int(int x, int y) { first = x; second = y; }
-    public Int() { first = second = 0; }
-
-    public void set(int x, int y) { first = x; second = y; }
-
-    /** Java-style getter; note that we currently allow direct access to 
-        the member field.
-    */
-    public int getFirst() { return first; }
-
-    /** Java-style getter; note that we currently allow direct access to 
-        the member field.
-    */
-    public int getSecond() { return second; }
-
-       /** Increments the elements of this pair by the
-        * corresponding elements of the pair <code>p</code> and returns this
-        * pair (modified). This method does not allocate a new pair, but changes
-        * in place the values stored in the object the method is invoked from. The
-        * method is unsafe: if p is null, a runtime exception will be thrown.
-        * @param p
-        * @return
-        */
-    public PrimitivePair.Int add(PrimitivePair.Int p) {
-        first += p.first;
-        second += p.second;
-        return this;
-    }
-
-       /** Decrements the elements of this pair by the
-        * corresponding elements of the pair <code>p</code> and returns this
-        * pair (modified). This method does not allocate a new pair, but changes
-        * in place the values stored in the object the method is invoked from. The
-        * method is unsafe: if p is null, a runtime exception will be thrown.
-        * @param p
-        * @return
-        */
-    public PrimitivePair.Int subtract(PrimitivePair.Int p) {
-        first -= p.first;
-        second -= p.second;
-        return this;
-    }
-
-       /** Copies values from the argument <code>p</code> into the corresponding
-        * elements of this pair and returns this pair (modified).
-        * @param p
-        * @return
-        */
-    public PrimitivePair.Int assignFrom(PrimitivePair.Int p ) {
-        first = p.first;
-        second = p.second;
-        return this;
-    }
-
-
-  }
-
-    public static class Long {
-      // declare public, STL-style for easier and more efficient access:
-      public long first;
-      public long second;
-
-      public Long(long x, long y) { first = x; second = y; }
-      public Long() { first = second = 0; }
-
-      public void set(long x, long y) { first = x; second = y; }
-
-      /** Java-style getter; note that we currently allow direct access to
-          the member field.
-      */
-      public long getFirst() { return first; }
-
-      /** Java-style getter; note that we currently allow direct access to
-          the member field.
-      */
-      public long getSecond() { return second; }
-
-        /** Increments the elements of this pair by the
-         * corresponding elements of the pair <code>p</code> and returns this
-         * pair (modified). This method does not allocate a new pair, but changes
-         * in place the values stored in the object the method is invoked from. The
-         * method is unsafe: if p is null, a runtime exception will be thrown.
-         * @param p
-         * @return
-         */
-     public PrimitivePair.Long add(PrimitivePair.Int p) {
-         first += p.first;
-         second += p.second;
-         return this;
-     }
-
-        /** Increments the elements of this pair by the
-         * corresponding elements of the pair <code>p</code> and returns this
-         * pair (modified). This method does not allocate a new pair, but changes
-         * in place the values stored in the object the method is invoked from. The
-         * method is unsafe: if p is null, a runtime exception will be thrown.
-         * @param p
-         * @return
-         */
-     public PrimitivePair.Long add(PrimitivePair.Long p) {
-         first += p.first;
-         second += p.second;
-         return this;
-     }
-
-        /** Decrements the elements of this pair by the
-         * corresponding elements of the pair <code>p</code> and returns this
-         * pair (modified). This method does not allocate a new pair, but changes
-         * in place the values stored in the object the method is invoked from. The
-         * method is unsafe: if p is null, a runtime exception will be thrown.
-         * @param p
-         * @return
-         */
-     public PrimitivePair.Long subtract(PrimitivePair.Int p) {
-         first -= p.first;
-         second -= p.second;
-         return this;
-     }
-
-        /** Decrements the elements of this pair by the
-         * corresponding elements of the pair <code>p</code> and returns this
-         * pair (modified). This method does not allocate a new pair, but changes
-         * in place the values stored in the object the method is invoked from. The
-         * method is unsafe: if p is null, a runtime exception will be thrown.
-         * @param p
-         * @return
-         */
-     public PrimitivePair.Long subtract(PrimitivePair.Long p) {
-         first -= p.first;
-         second -= p.second;
-         return this;
-     }
-
-     /** Copies values from the argument <code>p</code> into the corresponding
-       * elements of this pair and returns this pair (modified).
-       * @param p
-       * @return
-     */
-     public PrimitivePair.Long assignFrom(PrimitivePair.Long p ) {
-         first = p.first;
-         second = p.second;
-         return this;
-     }
-
-     /** Copies values from the argument <code>p</code> into the corresponding
-       * elements of this pair and returns this pair (modified).
-       * @param p
-       * @return
-     */
-     public PrimitivePair.Long assignFrom(PrimitivePair.Int p ) {
-            first = p.first;
-            second = p.second;
-            return this;
-     }
-
-    }
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/collections/RODMergingIterator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/collections/RODMergingIterator.java
deleted file mode 100644
index 7af62bd..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/collections/RODMergingIterator.java
+++ /dev/null
@@ -1,160 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.collections;
-
-import org.broadinstitute.gatk.engine.refdata.utils.LocationAwareSeekableRODIterator;
-import org.broadinstitute.gatk.engine.refdata.utils.RODRecordList;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.PriorityQueue;
-
-public class RODMergingIterator implements Iterator<RODRecordList>, Iterable<RODRecordList> {
-    PriorityQueue<Element> queue = new PriorityQueue<Element>();
-
-    private class Element implements Comparable<Element> {
-        public LocationAwareSeekableRODIterator it = null;
-        public GenomeLoc nextLoc = null;
-
-        public Element(Iterator<RODRecordList> it) {
-            if ( it instanceof LocationAwareSeekableRODIterator) {
-                this.it = (LocationAwareSeekableRODIterator)it;
-                if ( ! it.hasNext() ) throw new ReviewedGATKException("Iterator is empty");
-                update();
-            } else {
-                throw new ReviewedGATKException("Iterator passed to RODMergingIterator is not LocationAwareSeekableRODIterator");
-            }
-        }
-
-        public Element update() {
- //           E prev = value;
-            nextLoc = it.peekNextLocation(); // will return null if there is no next location
-            return this;
-        }
-
-        public int compareTo(Element other) {
-            if ( nextLoc == null ) {
-                if ( other.nextLoc != null ) return 1; // null means no more data available, so its after any non-null position
-                return 0;
-            }
-            if ( other.nextLoc == null ) return -1; // we can get to this point only if this.nextLoc != null
-
-            return nextLoc.compareTo(other.nextLoc);
-        }
-
-        public RODRecordList next() {
-            RODRecordList value = it.next();
-            update();
-            return value;
-        }
-    }
-
-    public Iterator<RODRecordList> iterator() {
-        return this;
-    }
-
-    public RODMergingIterator() {
-        ;
-    }
-
-    public RODMergingIterator(Iterator<RODRecordList> it) {
-         add(it);
-    }
-
-    public RODMergingIterator(Collection<Iterator<RODRecordList>> its) {
-        for ( Iterator<RODRecordList> it : its ) {
-            add(it);
-        }
-    }
-
-    /** If the iterator is non-empty (hasNext() is true), put it into the queue. The next location the iterator
-     * will be after a call to next() is peeked into and cached as queue's priority value.
-     * @param it
-     */
-    public void add(Iterator<RODRecordList> it) {
-        if ( it.hasNext() )
-            queue.add(new Element(it));
-    }
-
-    public boolean hasNext() {
-        return ! queue.isEmpty();
-    }
-
-    public RODRecordList next() {
-        Element e = queue.poll();
-        RODRecordList value = e.next(); // next() will also update next location cached by the Element
-
-        if ( e.nextLoc != null ) // we have more data in the track
-            queue.add(e); // add the element back to queue (note: its next location, on which priority is based, was updated
-
-        //System.out.printf("Element is %s%n", e.value);
-        return value;
-    }
-
-    /** Peeks into the genomic location of the record this iterator will return next.
-     *
-     * @return
-     */
-    public GenomeLoc peekLocation() {
-        return queue.peek().nextLoc;
-    }
-
-    public Collection<RODRecordList> allElementsLTE(RODRecordList elt) {
-        return allElementsLTE(elt, true);
-    }
-
-    public Collection<RODRecordList> allElementsLTE(RODRecordList elt, boolean includeElt) {
-        LinkedList<RODRecordList> all = new LinkedList<RODRecordList>();
-
-        if ( includeElt ) all.add(elt);
-        
-        while ( hasNext() ) {
-            Element x = queue.peek();
-            //System.out.printf("elt.compareTo(x) == %d%n", elt.compareTo(x));
-            //System.out.printf("In allElementLTE%n");
-            int cmp = elt.getLocation().compareTo(x.nextLoc);
-            //System.out.printf("x=%s%n  elt=%s%n  => elt.compareTo(x) == %d%n", x, elt, cmp);
-            if ( cmp >= 0 ) {
-                //System.out.printf("  Adding element x=%s, size = %d%n", x, all.size());
-                all.add(next());
-                //System.out.printf("  Added size = %d%n", all.size());
-            }
-            else {
-                //System.out.printf("breaking...%n");
-                break;
-            }
-        }
-
-        return all;
-    }
-
-    public void remove() {
-        throw new UnsupportedOperationException();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/Advanced.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/Advanced.java
deleted file mode 100644
index 3995ff7..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/Advanced.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import java.lang.annotation.*;
-
-/**
- * Indicates that a walker argument should is considered an advanced option.
- *
- * @author Mark DePristo
- * @version 0.1
- */
- at Documented
- at Inherited
- at Retention(RetentionPolicy.RUNTIME)
- at Target({ElementType.TYPE,ElementType.FIELD})
-public @interface Advanced {
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/Argument.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/Argument.java
deleted file mode 100644
index 66c5629..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/Argument.java
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import java.lang.annotation.*;
-
-/**
- * Created by IntelliJ IDEA.
- * User: hanna
- * Date: Mar 24, 2009
- * Time: 11:11:36 AM
- */
-/**
- * Annotates fields in objects that should be used as command-line arguments.
- * Any field annotated with @Argument can appear as a command-line parameter. 
- */
- at Documented
- at Inherited
- at Retention(RetentionPolicy.RUNTIME)
- at Target(ElementType.FIELD)
-public @interface Argument {
-    /**
-     * The full name of the command-line argument.  Full names should be
-     * prefixed on the command-line with a double dash (--).
-     * @return Selected full name, or "" to use the default.
-     */
-    String fullName() default "";
-
-    /**
-     * Specified short name of the command.  Short names should be prefixed
-     * with a single dash.  Argument values can directly abut single-char
-     * short names or be separated from them by a space.
-     * @return Selected short name, or "" for none.
-     */
-    String shortName() default "";
-
-    /**
-     * Documentation for the command-line argument.  Should appear when the
-     * --help argument is specified. 
-     * @return Doc string associated with this command-line argument.
-     */
-    String doc() default "Undocumented option";
-
-    /**
-     * Is this argument required.  If true, the command-line argument system will
-     * make a best guess for populating this argument based on the type descriptor,
-     * and will fail if the type can't be populated.
-     * @return True if the argument is required.  False otherwise.
-     */
-    boolean required() default true;
-
-    /**
-     * Should this command-line argument be exclusive of others.  Should be
-     * a comma-separated list of names of arguments of which this should be
-     * independent.
-     * @return A comma-separated string listing other arguments of which this
-     *         argument should be independent.
-     */
-    String exclusiveOf() default "";
-
-    /**
-     * Provide a regexp-based validation string.
-     * @return Non-empty regexp for validation, blank otherwise. 
-     */
-    String validation() default "";
-
-    /**
-     * Hard lower bound on the allowed value for the annotated argument -- generates an exception if violated.
-     * Enforced only for numeric types whose values are explicitly specified on the command line.
-     *
-     * @return Hard lower bound on the allowed value for the annotated argument, or Double.NEGATIVE_INFINITY
-     *         if there is none.
-     */
-    double minValue() default Double.NEGATIVE_INFINITY;
-
-    /**
-     * Hard upper bound on the allowed value for the annotated argument -- generates an exception if violated.
-     * Enforced only for numeric types whose values are explicitly specified on the command line.
-     *
-     * @return Hard upper bound on the allowed value for the annotated argument, or Double.POSITIVE_INFINITY
-     *         if there is none.
-     */
-    double maxValue() default Double.POSITIVE_INFINITY;
-
-    /**
-     * Soft lower bound on the allowed value for the annotated argument -- generates a warning if violated.
-     * Enforced only for numeric types whose values are explicitly specified on the command line.
-     *
-     * @return Soft lower bound on the allowed value for the annotated argument, or Double.NEGATIVE_INFINITY
-     *         if there is none.
-     */
-    double minRecommendedValue() default Double.NEGATIVE_INFINITY;
-
-    /**
-     * Soft upper bound on the allowed value for the annotated argument -- generates a warning if violated.
-     * Enforced only for numeric types whose values are explicitly specified on the command line.
-     *
-     * @return Soft upper bound on the allowed value for the annotated argument, or Double.POSITIVE_INFINITY
-     *         if there is none.
-     */
-    double maxRecommendedValue() default Double.POSITIVE_INFINITY;
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentCollection.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentCollection.java
deleted file mode 100644
index c142f06..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentCollection.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import java.lang.annotation.*;
-
-/**
- * @author aaron
- * @version 1.0
- * @date May 8, 2009
- * <p/>
- * @interface ArgumentCollection
- * <p/>
- * This object represents an class, that is a collection of arguments.
- */
- at Documented
- at Inherited
- at Retention(RetentionPolicy.RUNTIME)
- at Target({ElementType.FIELD})
-public @interface ArgumentCollection {
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentDefinition.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentDefinition.java
deleted file mode 100644
index f2e7e6e..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentDefinition.java
+++ /dev/null
@@ -1,297 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.lang.annotation.Annotation;
-import java.util.List;
-
-/**
- * A specific argument definition.  Maps one-to-one with a field in some class.
- */
-public class ArgumentDefinition {
-    /**
-     * Whether an argument is an input or an output.
-     */
-    public final ArgumentIOType ioType;
-
-    /**
-     * The class of the argument.
-     */
-    public final Class argumentType;
-
-    /**
-     * Full name of the argument.  Must have a value.
-     */
-    public final String fullName;
-
-    /**
-     * Short name of the argument.  Can be null.
-     */
-    public final String shortName;
-
-    /**
-     * Doc string for the argument.  Displayed in help.
-     */
-    public final String doc;
-
-    /**
-     * Must this argument be specified on the command-line?  Note that there's a
-     * critical difference between the meaning of a required argument from the
-     * perspective of the argument source and the perspective of the argument
-     * definition: the argument source's required field indicates that the field
-     * should somehow be populated by the GATK (and fail if there's an error).
-     * The ArgumentDefinition required element means that the required element
-     * must be specified on the command-line.
-     */
-    public final boolean required;
-
-    /**
-     * Is this argument a flag?  Users can't specify a value for a flag.
-     */
-    public final boolean isFlag;
-
-    /**
-     * Does this argument support multiple values (repeated "-arg value1 -arg value2"-style structures).
-     */
-    public final boolean isMultiValued;
-
-    /**
-     * The class of the componentType.  Not used for scalars.
-     */
-    public final Class componentType;
-
-    /**
-     * Is this argument hidden from the help system?
-     */
-    public final boolean isHidden;
-
-    /**
-     * Is this argument exclusive of other arguments?
-     */
-    public final String exclusiveOf;
-
-    /**
-     * Can we validate this regular expression?
-     */
-    public final String validation;
-
-    /**
-     * A list of valid options for this argument, if there is a compelling subset.
-     */
-    public final List<String> validOptions;
-
-    /**
-     * Creates a new argument definition.
-     * @param ioType Whether the argument is an input or an output.
-     * @param argumentType The class of the field.
-     * @param fullName Full name for this argument definition.
-     * @param shortName Short name for this argument definition.
-     * @param doc Doc string for this argument.
-     * @param required Whether or not this argument is required.
-     * @param isFlag Whether or not this argument should be treated as a flag.
-     * @param isMultiValued Whether or not this argument supports multiple values.
-     * @param isHidden Whether or not this argument should be hidden from the command-line argument system.
-     * @param componentType For multivalued arguments the type of the components.
-     * @param exclusiveOf Whether this command line argument is mutually exclusive of other arguments.
-     * @param validation A regular expression for command-line argument validation.
-     * @param validOptions is there a particular list of options that's valid for this argument definition?  List them if so, otherwise set this to null. 
-     */
-    public ArgumentDefinition( ArgumentIOType ioType,
-                               Class argumentType,
-                               String fullName,
-                               String shortName,
-                               String doc,
-                               boolean required,
-                               boolean isFlag,
-                               boolean isMultiValued,
-                               boolean isHidden,
-                               Class componentType,
-                               String exclusiveOf,
-                               String validation,
-                               List<String> validOptions) {
-        this.ioType = ioType;
-        this.argumentType = argumentType;
-        this.fullName = fullName;
-        this.shortName = shortName;
-        this.doc = doc;
-        this.required = required;
-        this.isFlag = isFlag;
-        this.isMultiValued = isMultiValued;
-        this.isHidden = isHidden;
-        this.componentType = componentType;
-        this.exclusiveOf = exclusiveOf;
-        this.validation = validation;
-        this.validOptions = validOptions;
-
-        validateName(shortName);
-        validateName(fullName);
-    }
-
-    /**
-     * Creates a new argument definition.
-     * @param annotation The annotation on the field.
-     * @param argumentType The class of the field.
-     * @param defaultFullName Default full name for this argument definition.
-     * @param defaultShortName Default short name for this argument definition.
-     * @param isFlag Whether or not this argument should be treated as a flag.
-     * @param isMultiValued Whether or not this argument supports multiple values.
-     * @param componentType For multivalued arguments the type of the components.
-     * @param isHidden Whether or not this argument should be hidden from the command-line argument system.
-     * @param validOptions is there a particular list of options that's valid for this argument definition?  List them if so, otherwise set this to null.
-     */
-    public ArgumentDefinition( Annotation annotation,
-                               ArgumentIOType ioType,
-                               Class argumentType,
-                               String defaultFullName,
-                               String defaultShortName,
-                               String doc,
-                               boolean isRequired,
-                               boolean isFlag,
-                               boolean isMultiValued,
-                               boolean isHidden,
-                               Class componentType,
-                               String exclusiveOf,
-                               String validation,
-                               List<String> validOptions) {
-
-        String fullName = (String)CommandLineUtils.getValue(annotation, "fullName");
-        String shortName = (String)CommandLineUtils.getValue(annotation, "shortName");
-        boolean isFullNameProvided = fullName.trim().length() > 0;
-        boolean isShortNameProvided = shortName.trim().length() > 0;
-
-        fullName = isFullNameProvided ? fullName.trim() : defaultFullName;
-
-        // If the short name is provided, use that.  If the user hasn't provided any names at all, use
-        // the default.  If somewhere in the middle, leave the short name blank.
-        if( isShortNameProvided )
-            shortName = shortName.trim();
-        else if( !isFullNameProvided )
-            shortName = defaultShortName;
-        else
-            shortName = null;
-
-        validateName(shortName);
-        validateName(fullName);
-
-        this.ioType = ioType;
-        this.argumentType = argumentType;
-        this.fullName = fullName;
-        this.shortName = shortName;
-        this.doc = doc;
-        this.required = isRequired;
-        this.isFlag = isFlag;
-        this.isMultiValued = isMultiValued;
-        this.isHidden = isHidden;
-        this.componentType = componentType;
-        this.exclusiveOf = exclusiveOf;
-        this.validation = validation;
-        this.validOptions = validOptions;
-    }
-    
-    @Override
-    public int hashCode() {
-        int hashCode = fullName.hashCode();
-        if(shortName != null) hashCode ^= shortName.hashCode();
-        return hashCode;
-    }
-
-    public boolean equals( Object o ) {
-        if( o == null )
-            return false;
-        if( !(o instanceof ArgumentDefinition) )
-            return false;
-
-        ArgumentDefinition other = (ArgumentDefinition)o;
-
-        return Utils.equals(fullName,other.fullName) &&
-               Utils.equals(shortName,other.shortName);
-    }
-
-    /**
-     * Retrieves the full name of the argument, specifiable with the '--' prefix.  The full name can be
-     * either specified explicitly with the fullName annotation parameter or implied by the field name.
-     * @param annotation Original field annotation.
-     * @param fieldName Original field name.
-     * @return full name of the argument.  Never null.
-     */
-    public static String getFullName( Annotation annotation, String fieldName ) {
-        String fullName = (String)CommandLineUtils.getValue(annotation, "fullName");
-        return fullName.trim().length() > 0 ? fullName.trim() : fieldName.toLowerCase();
-    }
-
-    /**
-     * Retrieves the short name of the argument, specifiable with the '-' prefix.  The short name can
-     * be specified or not; if left unspecified, no short name will be present.
-     * @param annotation Original field annotation.
-     * @return short name of the argument.  Null if no short name exists.
-     */
-    public static String getShortName( Annotation annotation ) {
-        String shortName = (String)CommandLineUtils.getValue(annotation, "shortName");
-        return shortName.trim().length() > 0 ? shortName.trim() : null;
-    }
-
-    /**
-     * Documentation for this argument.  Mandatory field.
-     * @param annotation Original field annotation.
-     * @return Documentation for this argument.
-     */
-    public static String getDoc( Annotation annotation ) {
-        return (String)CommandLineUtils.getValue(annotation, "doc");
-    }
-
-    /**
-     * Specifies other arguments which cannot be used in conjunction with this argument.  Comma-separated list.
-     * @param annotation Original field annotation.
-     * @return A comma-separated list of exclusive arguments, or null if none are present.
-     */
-    public static String getExclusiveOf( Annotation annotation ) {
-        String exclusiveOf = (String)CommandLineUtils.getValue(annotation, "exclusiveOf");
-        return exclusiveOf.trim().length() > 0 ? exclusiveOf.trim() : null;
-    }
-
-    /**
-     * A regular expression which can be used for validation.
-     * @param annotation Original field annotation.
-     * @return a JVM regex-compatible regular expression, or null to permit any possible value.
-     */
-    public static String getValidationRegex( Annotation annotation ) {
-        String validation = (String)CommandLineUtils.getValue(annotation, "validation");
-        return validation.trim().length() > 0 ? validation.trim() : null;
-    }
-
-    /**
-     * Make sure the argument's name is valid
-     *
-     * @param name
-     */
-    private void validateName(final String name) {
-        if ( name != null && name.startsWith("-") )
-            throw new ReviewedGATKException("Invalid argument definition: " + name + " begins with a -");
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentDefinitionGroup.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentDefinitionGroup.java
deleted file mode 100644
index b6bb16c..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentDefinitionGroup.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * A group of argument definitions.
- */
-public class ArgumentDefinitionGroup implements Iterable<ArgumentDefinition> {
-    /**
-     * Name of this group.
-     */
-    public final String groupName;
-
-    /**
-     * The argument definitions associated with this group.
-     */
-    public final List<ArgumentDefinition> argumentDefinitions;
-
-    public ArgumentDefinitionGroup( String groupName, List<ArgumentDefinition> argumentDefinitions ) {
-        this.groupName = groupName;
-        this.argumentDefinitions = Collections.unmodifiableList( argumentDefinitions );
-    }
-
-    /**
-     * Does the name of this argument group match the name of another?
-     */
-    public boolean groupNameMatches( ArgumentDefinitionGroup other ) {
-        if( this.groupName == null )
-            return other.groupName == null;
-        return this.groupName.equals(other.groupName);
-    }
-
-    /**
-     * Merges another argument group into this argument group.  Return a new
-     * group since argument groups are supposed to be immutable. Asserts that
-     * both argument groups have the same name.
-     */
-    public ArgumentDefinitionGroup merge( ArgumentDefinitionGroup other ) {
-        if( !groupNameMatches(other) )
-            throw new ReviewedGATKException("Unable to merge two argument groups with differing names.");
-
-        // Create a merged definition group.
-        List<ArgumentDefinition> mergedDefinitions = new ArrayList<ArgumentDefinition>();
-        mergedDefinitions.addAll(this.argumentDefinitions);
-        mergedDefinitions.addAll(other.argumentDefinitions);
-
-        return new ArgumentDefinitionGroup(groupName,mergedDefinitions);
-    }
-
-    /**
-     * Iterate over the arguments in an argument definition group.
-     * @return
-     */
-    public Iterator<ArgumentDefinition> iterator() {
-        return argumentDefinitions.iterator();
-    }
-
-    /**
-     * Reports whether all the arguments in this group are hidden.
-     * @return True if all are hidden, false if some or none are hidden.
-     */
-    public boolean allHidden() {
-        for(ArgumentDefinition argumentDefinition: argumentDefinitions) {
-            if(!argumentDefinition.isHidden)
-                return false;
-        }
-        return true;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentDefinitions.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentDefinitions.java
deleted file mode 100644
index 8bc17d7..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentDefinitions.java
+++ /dev/null
@@ -1,195 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.util.Collection;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.Set;
-
-/**
- * A collection of argument definitions.
- */
-public class ArgumentDefinitions implements Iterable<ArgumentDefinition> {
-    /**
-     * Backing data set of argument stored by short name and long name.
-     */
-    private Set<ArgumentDefinition> argumentDefinitions = new HashSet<ArgumentDefinition>();
-
-    /**
-     * The groupings of argument definitions.  Used mainly for help output.
-     */
-    private Set<ArgumentDefinitionGroup> argumentDefinitionGroups = new HashSet<ArgumentDefinitionGroup>();
-
-    /**
-     * Adds an argument to the this argument definition list.
-     * @param argumentDefinitionGroup The group of arguments to add.
-     */
-    public void add( ArgumentDefinitionGroup argumentDefinitionGroup ) {
-        for( ArgumentDefinition definition: argumentDefinitionGroup ) {
-            // Do some basic validation before adding the definition. 
-            if( definition.fullName.length() == 0 )
-                throw new IllegalArgumentException( "Argument cannot have 0-length fullname." );
-            if( hasArgumentDefinition( definition.fullName, FullNameDefinitionMatcher ) )
-                throw new ReviewedGATKException("Duplicate definition of argument with full name: " + definition.fullName);
-            if( definition.shortName != null && hasArgumentDefinition( definition.shortName, ShortNameDefinitionMatcher ) )
-                throw new ReviewedGATKException("Duplicate definition of argument with short name: " + definition.shortName);
-
-            argumentDefinitions.add( definition );
-        }
-
-        // Find an existing argument definition group with this name.
-        // If one exists, merge this group into the other.
-        Iterator<ArgumentDefinitionGroup> definitionGroupIterator = argumentDefinitionGroups.iterator();
-        while( definitionGroupIterator.hasNext() ) {
-            ArgumentDefinitionGroup candidate = definitionGroupIterator.next();            
-            if( candidate.groupNameMatches(argumentDefinitionGroup) ) {
-                argumentDefinitionGroup = candidate.merge(argumentDefinitionGroup);
-                definitionGroupIterator.remove();
-            }
-        }
-
-        // Otherwise, add the new group.
-        argumentDefinitionGroups.add( argumentDefinitionGroup );
-    }
-
-    /**
-     * Are there any argument definitions matching the given property?
-     * @param property Property to find.
-     * @param matcher Method of matching a given property.
-     * @return True if one or multiple argument definitions match; false otherwise.
-     */
-    public boolean hasArgumentDefinition( Object property, DefinitionMatcher matcher ) {
-        return findArgumentDefinitions( property, matcher ).size() > 0;
-    }
-
-    /**
-     * Find the given definition matching this property.
-     * @param property Property to find.
-     * @param matcher Method of matching a given property.
-     * @return The ArgumentDefinition matching the given property.  Null if none matches.
-     * @throws IllegalArgumentException if multiple arguments match this definition.
-     */
-    public ArgumentDefinition findArgumentDefinition( Object property, DefinitionMatcher matcher ) {
-        Collection<ArgumentDefinition> selectedDefinitions = findArgumentDefinitions( property, matcher );
-        if( selectedDefinitions.size() > 1 )
-            throw new IllegalArgumentException("Multiple argument definitions match the selected property: " + property);
-
-        if( selectedDefinitions.size() == 0 )
-            return null;
-
-        return selectedDefinitions.iterator().next();
-    }
-
-    /**
-     * Find all argument definitions matching a certain category.
-     * @param property Property to inspect.
-     * @param matcher Test to see whether property matches.
-     * @return All argument definitions matching a certain object.
-     */
-    public Collection<ArgumentDefinition> findArgumentDefinitions( Object property, DefinitionMatcher matcher ) {
-        Set<ArgumentDefinition> selectedArgumentDefinitions = new HashSet<ArgumentDefinition>();
-        for( ArgumentDefinition argumentDefinition: argumentDefinitions ) {
-            if( matcher.matches( argumentDefinition, property ) )
-                selectedArgumentDefinitions.add( argumentDefinition );
-        }
-        return selectedArgumentDefinitions;
-    }
-
-    /**
-     * Return a list of the available argument groups.
-     * @return All the argument groups that have been added.
-     */
-    public Collection<ArgumentDefinitionGroup> getArgumentDefinitionGroups() {
-        return argumentDefinitionGroups;
-    }
-
-    /**
-     * Iterates through all command-line arguments.
-     * @return an iterator over command-line arguments.
-     */
-    public Iterator<ArgumentDefinition> iterator() {
-        return argumentDefinitions.iterator();
-    }
-
-    /**
-     * Match the full name of a definition.
-     */
-    static DefinitionMatcher FullNameDefinitionMatcher = new DefinitionMatcher() {
-        public boolean matches( ArgumentDefinition definition, Object key ) {
-            if( definition.fullName == null )
-                return key == null;
-            else
-                return definition.fullName.equals( key );
-        }        
-    };
-
-    /**
-     * Match the short name of a definition.
-     */
-    static DefinitionMatcher ShortNameDefinitionMatcher = new DefinitionMatcher() {
-        public boolean matches( ArgumentDefinition definition, Object key ) {
-            if( definition.shortName == null )
-                return key == null;
-            else
-                return definition.shortName.equals( key );
-        }
-    };
-
-    /**
-     * Find all required definitions.
-     */
-    static DefinitionMatcher RequiredDefinitionMatcher = new DefinitionMatcher() {
-        public boolean matches( ArgumentDefinition definition, Object key ) {
-            if( !(key instanceof Boolean) )
-                throw new IllegalArgumentException("RequiredDefinitionMatcher requires boolean key");
-            return definition.required == (Boolean)key;
-        }
-    };
-
-    static DefinitionMatcher VerifiableDefinitionMatcher = new DefinitionMatcher() {
-        public boolean matches( ArgumentDefinition definition, Object key ) {
-            // We can perform some sort of validation for anything that isn't a flag or enum.
-            // Because enums can have a default value, it might be valid to specify an enum argument with no value
-            return !definition.isFlag  && !definition.argumentType.isEnum();
-        }        
-    };
-}
-
-/**
- * A Comparator-esque interface for finding argument definitions within a collection.
- */
-interface DefinitionMatcher {
-    /**
-     * Does the given definition match the provided key?
-     * @param definition The definition to inspect.
-     * @param key The value to match.
-     * @return True if the key matches the definition, false otherwise.
-     */
-    boolean matches( ArgumentDefinition definition, Object key );
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentException.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentException.java
deleted file mode 100644
index a55da89..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentException.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-/**
- * Generic class for handling misc parsing exceptions.
- */
-public class ArgumentException extends UserException {
-    public ArgumentException( String message ) {
-        super( message );
-    }
-}
-
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentIOType.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentIOType.java
deleted file mode 100644
index 27b8163..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentIOType.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.lang.annotation.Annotation;
-
-public enum ArgumentIOType {
-    INPUT(Input.class), OUTPUT(Output.class), ARGUMENT(Argument.class);
-
-    public final Class<? extends Annotation> annotationClass;
-
-    ArgumentIOType(Class<? extends Annotation> annotationClass) {
-        this.annotationClass = annotationClass;
-    }
-
-    /**
-     * Returns the ArgumentIOType for the annotation.
-     * @param annotation @Input or @Output
-     * @return ArgumentIOType.Input, Output, or Unknown
-     */
-    public static ArgumentIOType getIOType(Annotation annotation) {
-        for (ArgumentIOType ioType: ArgumentIOType.values())
-            if (ioType.annotationClass.isAssignableFrom(annotation.getClass()))
-                return ioType;
-        throw new ReviewedGATKException("Unknown annotation type: " + annotation);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatch.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatch.java
deleted file mode 100644
index 885e02d..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatch.java
+++ /dev/null
@@ -1,294 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import org.broadinstitute.gatk.engine.walkers.Multiplexer;
-
-import java.util.*;
-
-/**
- * A mapping of all the sites where an argument definition maps to a site on the command line.
- */
-public class ArgumentMatch implements Iterable<ArgumentMatch> {
-    /**
-     * The argument definition that's been matched.
-     */
-    public final ArgumentDefinition definition;
-
-    /**
-     * The text that's been matched, as it appears in the command line arguments.
-     */
-    public final String label;
-
-    /**
-     * Maps indices of command line arguments to values paired with that argument.
-     */
-    public final SortedMap<ArgumentMatchSite,List<ArgumentMatchValue>> sites = new TreeMap<ArgumentMatchSite,List<ArgumentMatchValue>>();
-
-    /**
-     * An ordered, freeform collection of tags.
-     */
-    public final Tags tags;
-
-    /**
-     * Create a new argument match, defining its properties later.  Used to create invalid arguments.
-     */
-    public ArgumentMatch() {
-        this(null,null);
-    }
-
-    /**
-     * Minimal constructor for transform function.
-     * @param label Label of the argument match.  Must not be null.
-     * @param definition The associated definition, if one exists.  May be null.
-     */
-    private ArgumentMatch(final String label, final ArgumentDefinition definition) {
-        this.label = label;
-        this.definition = definition;
-        this.tags = new Tags();
-    }
-
-    /**
-     * A simple way of indicating that an argument with the given label and definition exists at this site.
-     * @param label Label of the argument match.  Must not be null.
-     * @param definition The associated definition, if one exists.  May be null.
-     * @param site Position of the argument.  Must not be null.
-     * @param tags ordered freeform text tags associated with this argument.
-     */
-    public ArgumentMatch(final String label, final ArgumentDefinition definition, final ArgumentMatchSite site, final Tags tags) {
-        this( label, definition, site, null, tags );
-    }
-
-    /**
-     * A simple way of indicating that an argument with the given label and definition exists at this site.
-     * @param label Label of the argument match.  Must not be null.
-     * @param definition The associated definition, if one exists.  May be null.
-     * @param site Position of the argument.  Must not be null.
-     * @param value Value for the argument at this position.
-     * @param tags ordered freeform text tags associated with this argument.
-     */
-    private ArgumentMatch(final String label, final ArgumentDefinition definition, final ArgumentMatchSite site, final ArgumentMatchValue value, final Tags tags) {
-        this.label = label;
-        this.definition = definition;
-
-        ArrayList<ArgumentMatchValue> values = new ArrayList<ArgumentMatchValue>();
-        if( value != null )
-            values.add(value);
-        sites.put(site,values );
-
-        this.tags = tags;
-    }
-
-    /**
-     * Check to see whether two ArgumentMatch objects are equal.
-     * @param other Object to which this should be compared.
-     * @return True if objects are equal, false if objects are not equal or incomparable.
-     */
-    @Override
-    public boolean equals(Object other) {
-        // this clearly isn't null, since this.equals() when this == null would result in an NPE.
-        if(other == null)
-            return false;
-        if(!(other instanceof ArgumentMatch))
-            return false;
-        ArgumentMatch otherArgumentMatch = (ArgumentMatch)other;
-        return this.definition.equals(otherArgumentMatch.definition) &&
-                this.label.equals(otherArgumentMatch.label) &&
-                this.sites.equals(otherArgumentMatch.sites) &&
-                this.tags.equals(otherArgumentMatch.tags);
-    }
-
-
-    /**
-     * Reformat the given entries with the given multiplexer and key.
-     * TODO: Generify this.
-     * @param multiplexer Multiplexer that controls the transformation process.
-     * @param key Key which specifies the transform.
-     * @return A variant of this ArgumentMatch with all keys transformed.
-     */
-    @SuppressWarnings("unchecked")
-    ArgumentMatch transform(Multiplexer multiplexer, Object key) {
-        SortedMap<ArgumentMatchSite,List<ArgumentMatchValue>> newIndices = new TreeMap<ArgumentMatchSite,List<ArgumentMatchValue>>();
-        for(Map.Entry<ArgumentMatchSite,List<ArgumentMatchValue>> site: sites.entrySet()) {
-            List<ArgumentMatchValue> newEntries = new ArrayList<ArgumentMatchValue>();
-            for(ArgumentMatchValue entry: site.getValue())
-                newEntries.add(new ArgumentMatchStringValue(multiplexer.transformArgument(key,entry.asString())));
-            newIndices.put(site.getKey(),newEntries);
-        }
-        ArgumentMatch newArgumentMatch = new ArgumentMatch(label,definition);
-        newArgumentMatch.sites.putAll(newIndices);
-        return newArgumentMatch;
-    }
-
-    /**
-     * Return a string representation of the given argument match, for debugging purposes.
-     * @return String representation of the match.
-     */
-    public String toString() {
-        return label;
-    }
-
-    /**
-     * Creates an iterator that walks over each individual match at each position of a given argument.
-     * @return An iterator over the individual matches in this argument.  Will not be null.
-     */
-    public Iterator<ArgumentMatch> iterator() {
-        return new Iterator<ArgumentMatch>() {
-            /**
-             * Iterate over each the available site.
-             */
-            private Iterator<ArgumentMatchSite> siteIterator = null;
-
-            /**
-             * Iterate over each available token.
-             */
-            private Iterator<ArgumentMatchValue> tokenIterator = null;
-
-            /**
-             * The next site to return.  Null if none remain.
-             */
-            ArgumentMatchSite nextSite = null;
-
-            /**
-             * The next token to return.  Null if none remain.
-             */
-            ArgumentMatchValue nextToken = null;
-
-            {
-                siteIterator = sites.keySet().iterator();
-                prepareNext();
-            }
-
-            /**
-             * Is there a nextToken available to return?
-             * @return True if there's another token waiting in the wings.  False otherwise.
-             */
-            public boolean hasNext() {
-                return nextSite != null;
-            }
-
-            /**
-             * Get the next token, if one exists.  If not, throw an IllegalStateException.
-             * @return The next ArgumentMatch in the series.  Should never be null.
-             */
-            public ArgumentMatch next() {
-                if( nextSite == null )
-                    throw new IllegalStateException( "No more ArgumentMatches are available" );
-
-                ArgumentMatch match = new ArgumentMatch( label, definition, nextSite, nextToken, tags );
-                prepareNext();
-                return match;
-            }
-
-            /**
-             * Initialize the next ArgumentMatch to return.  If no ArgumentMatches are available,
-             * initialize nextSite / nextToken to null.
-             */
-            private void prepareNext() {
-                if( tokenIterator != null && tokenIterator.hasNext() ) {
-                    nextToken = tokenIterator.next();
-                }
-                else {
-                    nextSite = null;
-                    nextToken = null;
-
-                    // Do a nested loop.  While more data is present in the inner loop, grab that data.
-                    // Otherwise, troll the outer iterator looking for more data.
-                    while( siteIterator.hasNext() ) {
-                        nextSite = siteIterator.next();
-                        if( sites.get(nextSite) != null ) {
-                            tokenIterator = sites.get(nextSite).iterator();
-                            nextToken = tokenIterator.hasNext() ? tokenIterator.next() : null;
-                            break;
-                        }
-                    }
-                }
-
-            }
-
-            /**
-             * Remove is unsupported in this context.
-             */
-            public void remove() {
-                throw new UnsupportedOperationException("Cannot remove an argument match from the collection while iterating.");
-            }
-        };
-    }
-
-    /**
-     * Merge two ArgumentMatches, so that the values for all arguments go into the
-     * same data structure.
-     * @param other The other match to merge into.
-     */
-    public void mergeInto( ArgumentMatch other ) {
-        sites.putAll(other.sites);
-    }
-
-    /**
-     * Associate a value with this merge maapping.
-     * @param site site of the command-line argument to which this value is mated.
-     * @param value Text representation of value to add.
-     */
-    public void addValue( ArgumentMatchSite site, ArgumentMatchValue value ) {
-        if( !sites.containsKey(site) || sites.get(site) == null )
-            sites.put(site, new ArrayList<ArgumentMatchValue>() );
-        sites.get(site).add(value);
-    }
-
-    /**
-     * Does this argument already have a value at the given site?
-     * Arguments are only allowed to be single-valued per site, and
-     * flags aren't allowed a value at all.
-     * @param site Site at which to check for values.
-     * @return True if the argument has a value at the given site.  False otherwise.
-     */
-    public boolean hasValueAtSite( ArgumentMatchSite site ) {
-        return (sites.get(site) != null && sites.get(site).size() >= 1) || isArgumentFlag();
-    }
-
-    /**
-     * Return the values associated with this argument match.
-     * @return A collection of the string representation of these value.
-     */
-    public List<ArgumentMatchValue> values() {
-        final List<ArgumentMatchValue> values = new ArrayList<ArgumentMatchValue>();
-        for ( final List<ArgumentMatchValue> siteValue : sites.values() ) {
-            if ( siteValue != null )
-                values.addAll(siteValue);
-            else
-                values.add(null);
-        }
-        return values;
-    }
-
-    /**
-     * Convenience method returning true if the definition is a flag.
-     * @return True if definition is known to be a flag; false if not known to be a flag.
-     */
-    private boolean isArgumentFlag() {
-        return definition != null && definition.isFlag;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchFileValue.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchFileValue.java
deleted file mode 100644
index 3b9c8d3..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchFileValue.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import java.io.File;
-
-/**
- * Holds a reference to a file as an argument match value.
- *
- * This is useful when the type of the stored file may be a subclass of java.io.File,
- * for example a Queue RemoteFile.
- */
-public class ArgumentMatchFileValue extends ArgumentMatchValue {
-    private final File file;
-
-    public ArgumentMatchFileValue(File file) {
-        this.file = file;
-    }
-
-    @Override
-    public String asString() {
-        return file == null ? null : file.getAbsolutePath();
-    }
-
-    @Override
-    public File asFile() {
-        return file;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchSite.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchSite.java
deleted file mode 100644
index 967d4c6..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchSite.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-/**
- * Which source and the index within the source where an argument match was found.
- */
-public class ArgumentMatchSite implements Comparable<ArgumentMatchSite> {
-    private final ArgumentMatchSource source;
-    private final int index;
-
-    public ArgumentMatchSite(ArgumentMatchSource source, int index) {
-        this.source = source;
-        this.index = index;
-    }
-
-    public ArgumentMatchSource getSource() {
-        return source;
-    }
-
-    public int getIndex() {
-        return index;
-    }
-
-    @Override
-    public boolean equals(Object o) {
-        if (this == o) return true;
-        if (o == null || getClass() != o.getClass()) return false;
-
-        ArgumentMatchSite that = (ArgumentMatchSite) o;
-
-        return (index == that.index) && (source == null ? that.source == null : source.equals(that.source));
-    }
-
-    @Override
-    public int hashCode() {
-        int result = source != null ? source.hashCode() : 0;
-        // Generated by intellij. No other special reason to this implementation. -ks
-        result = 31 * result + index;
-        return result;
-    }
-
-    @Override
-    public int compareTo(ArgumentMatchSite that) {
-        int comp = this.source.compareTo(that.source);
-        if (comp != 0)
-            return comp;
-
-        // Both files are the same.
-        if (this.index == that.index)
-            return 0;
-        return this.index < that.index ? -1 : 1;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchSource.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchSource.java
deleted file mode 100644
index a7ce7ba..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchSource.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-/**
- * Where an argument match originated, via the commandline or a custom provider.
- */
-public class ArgumentMatchSource implements Comparable<ArgumentMatchSource> {
-    public static final ArgumentMatchSource COMMAND_LINE = new ArgumentMatchSource(ArgumentMatchSourceType.CommandLine, null);
-
-    private final ArgumentMatchSourceType type;
-    private final String description;
-
-    /**
-     * Creates an argument match source from the specified file.
-     * @param description Where the arguments originated.
-     */
-    public ArgumentMatchSource(String description) {
-        this(ArgumentMatchSourceType.Provider, description);
-    }
-
-    private ArgumentMatchSource(ArgumentMatchSourceType type, String description) {
-        if (type == ArgumentMatchSourceType.Provider && description == null)
-            throw new IllegalArgumentException("An argument match source provider cannot have a null description.");
-        this.type = type;
-        this.description = description;
-    }
-
-    public ArgumentMatchSourceType getType() {
-        return type;
-    }
-
-    public String getDescription() {
-        return description;
-    }
-
-    @Override
-    public boolean equals(Object o) {
-        if (this == o) return true;
-        if (o == null || getClass() != o.getClass()) return false;
-
-        ArgumentMatchSource that = (ArgumentMatchSource) o;
-
-        return (type == that.type) && (description == null ? that.description == null : description.equals(that.description));
-    }
-
-    @Override
-    public int hashCode() {
-        int result = type != null ? type.hashCode() : 0;
-        result = 31 * result + (description != null ? description.hashCode() : 0);
-        return result;
-    }
-
-    /**
-     * Compares two sources, putting the command line first, then files.
-     */
-    @Override
-    public int compareTo(ArgumentMatchSource that) {
-        int comp = this.type.compareTo(that.type);
-        if (comp != 0)
-            return comp;
-
-        String d1 = this.description;
-        String d2 = that.description;
-
-        if ((d1 == null) ^ (d2 == null)) {
-            // If one of the descriptions is null and the other is not
-            // put the null description first
-            return d1 == null ? -1 : 1;
-        }
-
-        return d1 == null ? 0 : d1.compareTo(d2);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchSourceType.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchSourceType.java
deleted file mode 100644
index 9dee5be..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchSourceType.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-/**
- * Type of where an argument match originated, via the commandline or a some other provider.
- */
-public enum ArgumentMatchSourceType {
-    CommandLine, Provider
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchStringValue.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchStringValue.java
deleted file mode 100644
index 9f772bc..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchStringValue.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import java.io.File;
-
-/**
- * Argument values that originated from a string.
- */
-public class ArgumentMatchStringValue extends ArgumentMatchValue {
-    private final String value;
-
-    public ArgumentMatchStringValue(String value) {
-        this.value = value;
-    }
-
-    @Override
-    public String asString() {
-        return value;
-    }
-
-    @Override
-    public File asFile() {
-        return value == null ? null : new File(value);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchValue.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchValue.java
deleted file mode 100644
index f37d538..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchValue.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import java.io.File;
-
-/**
- * Returns argument values as either strings or values.
- */
-public abstract class ArgumentMatchValue {
-    /**
-     * @return the value of this argument as a String object.
-     */
-    public abstract String asString();
-
-    /**
-     * @return the value of this argument as a File object.
-     */
-    public abstract File asFile();
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatches.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatches.java
deleted file mode 100644
index 2d81cfc..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatches.java
+++ /dev/null
@@ -1,211 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import org.broadinstitute.gatk.engine.walkers.Multiplexer;
-
-import java.util.*;
-/**
- * Represents a list of potential matches between the arguments defined
- * by the argument sources and the arguments passed in via the command line.
- */
-public class ArgumentMatches implements Iterable<ArgumentMatch> {
-    /**
-     * Collection matches from argument definition to argument value.
-     * Package protected access is deliberate.
-     */
-    Map<ArgumentMatchSite,ArgumentMatch> argumentMatches = new TreeMap<ArgumentMatchSite,ArgumentMatch>();
-
-    /**
-     * Provide a place to put command-line argument values that don't seem to belong to
-     * any particular command-line option.
-     */
-    ArgumentMatch MissingArgument = new ArgumentMatch();
-
-    /**
-     * Get an iterator cycling through *unique* command-line argument <-> definition matches.
-     * @return Iterator over all argument matches.
-     */
-    public Iterator<ArgumentMatch> iterator() {
-        return getUniqueMatches().iterator();
-    }
-
-    /**
-     * Create an empty ArgumentMatches object.
-     */
-    public ArgumentMatches() {
-    }
-
-    /**
-     * Create a singleton ArgumentMatches object.
-     * @param match Match to incorporate.
-     */
-    public ArgumentMatches( ArgumentMatch match ) {
-        mergeInto( match );
-    }
-
-    /**
-     * Returns the number of matches in this structure.
-     * @return Count of the matches in this structure.
-     */
-    public int size() {
-        return argumentMatches.size();
-    }
-
-    /**
-     * Indicates whether the site contains a matched argument.
-     * @param site Site at which to check.
-     * @return True if the site has a match.  False otherwise.
-     */
-    boolean hasMatch( ArgumentMatchSite site ) {
-        return argumentMatches.containsKey( site );
-    }
-
-    /**
-     * Gets the match at a given site.
-     * @param site Site at which to look for a match.
-     * @return The match present at the given site.
-     * @throws IllegalArgumentException if site does not contain a match.
-     */
-    ArgumentMatch getMatch( ArgumentMatchSite site ) {
-        if( !argumentMatches.containsKey(site) )
-            throw new IllegalArgumentException( "Site does not contain an argument: " + site );
-        return argumentMatches.get(site);
-    }
-
-    /**
-     * Does the match collection have a match for this argument definition.
-     * @param definition Definition to match.
-     * @return True if a match exists; false otherwise.
-     */
-    boolean hasMatch( ArgumentDefinition definition ) {
-        return findMatches( definition ).size() > 0;
-    }
-
-    /**
-     * Return all argument matches of this source.
-     * @param parsingEngine Parsing engine.
-     * @param argumentSource Argument source to match.
-     * @return List of all matches.
-     */
-
-    ArgumentMatches findMatches(ParsingEngine parsingEngine, ArgumentSource argumentSource) {
-        List<ArgumentDefinition> sourceDefinitions = parsingEngine.selectBestTypeDescriptor(argumentSource.field.getType()).createArgumentDefinitions(argumentSource);
-
-        ArgumentMatches matches = new ArgumentMatches();
-        for( ArgumentMatch argumentMatch: getUniqueMatches() ) {
-            if( sourceDefinitions.contains(argumentMatch.definition) )
-                matches.mergeInto( argumentMatch );
-        }
-        return matches;
-    }
-
-    /**
-     * Return all argument matches of this definition.
-     * @param definition Argument definition to match.
-     * @return List of all matches.
-     */
-    ArgumentMatches findMatches( ArgumentDefinition definition ) {
-        ArgumentMatches matches = new ArgumentMatches();
-        for( ArgumentMatch argumentMatch: argumentMatches.values() ) {
-            if( argumentMatch.definition == definition )
-                matches.mergeInto( argumentMatch );
-        }
-        return matches;
-    }
-
-    /**
-     * Find all successful matches (a 'successful' match is one paired with a definition).
-     * @return All successful matches.
-     */
-    ArgumentMatches findSuccessfulMatches() {
-        ArgumentMatches matches = new ArgumentMatches();
-        for( ArgumentMatch argumentMatch: argumentMatches.values() ) {
-            if( argumentMatch.definition != null )
-                matches.mergeInto( argumentMatch );
-        }
-        return matches;
-    }
-
-    /**
-     * Find arguments that are unmatched to any definition.
-     * @return Set of matches that have no associated definition.
-     */
-    ArgumentMatches findUnmatched() {
-        ArgumentMatches matches = new ArgumentMatches();
-        for( ArgumentMatch argumentMatch: argumentMatches.values() ) {
-            if( argumentMatch.definition == null )
-                matches.mergeInto( argumentMatch );
-        }
-        return matches;
-    }
-
-    /**
-     * Reformat the given entries with the given multiplexer and key.
-     * TODO: Generify this.
-     * @param multiplexer Multiplexer that controls the transformation process.
-     * @param key Key which specifies the transform.
-     * @return new argument matches.
-     */
-    ArgumentMatches transform(Multiplexer multiplexer, Object key) {
-        ArgumentMatches newArgumentMatches = new ArgumentMatches();
-        for(ArgumentMatch match: argumentMatches.values())
-            newArgumentMatches.mergeInto(match.transform(multiplexer,key));
-        return newArgumentMatches;
-    }
-
-    /**
-     * Merges the given argument match into the set of existing argument matches.
-     * If multiple arguments are present, those arguments will end up grouped.
-     * @param match The match to merge into.
-     */
-    void mergeInto( ArgumentMatch match ) {
-        boolean definitionExists = false;
-
-        // Clone the list of argument matches to avoid ConcurrentModificationExceptions.
-        for( ArgumentMatch argumentMatch: getUniqueMatches() ) {
-            if( argumentMatch.definition == match.definition && argumentMatch.tags.equals(match.tags) ) {
-                argumentMatch.mergeInto( match );
-                for( ArgumentMatchSite site: match.sites.keySet() )
-                    argumentMatches.put( site, argumentMatch );
-                definitionExists = true;
-            }
-        }
-
-        if( !definitionExists ) {
-            for( ArgumentMatchSite site: match.sites.keySet() )
-                argumentMatches.put( site, match );
-        }
-    }    
-
-    /**
-     * Determines, of the argument matches by position, which are unique and returns that list.
-     * @return A unique set of matches.
-     */
-    private Set<ArgumentMatch> getUniqueMatches() {
-        return new LinkedHashSet<ArgumentMatch>( argumentMatches.values() );
-    }    
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentSource.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentSource.java
deleted file mode 100644
index 79e07a6..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentSource.java
+++ /dev/null
@@ -1,243 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.lang.reflect.Field;
-import java.util.Arrays;
-import java.util.List;
-
-/**
- * Describes the source field which defines a command-line argument.
- * A parsed-object version of the command-line argument will be
- * injected into an object containing this field.
- *
- * @author mhanna
- * @version 0.1
- */
-public class ArgumentSource {
-    /**
-     * Field into which to inject command-line arguments.
-     */
-    public final Field[] parentFields;
-
-    /**
-     * Field into which to inject command-line arguments.
-     */
-    public final Field field;
-
-    /**
-     * Type descriptor to use when parsing new argument types.
-     */
-    private final ArgumentTypeDescriptor typeDescriptor;
-
-    /**
-     * Create a new command-line argument target.
-     * @param parentFields Parent fields containing the the field.  Field must be annotated with 'ArgumentCollection'.
-     * @param field Field containing the argument.  Field must be annotated with 'Input' or 'Output'.
-     * @param typeDescriptor custom type descriptor to use when parsing.
-     */
-    protected ArgumentSource( Field[] parentFields, Field field, ArgumentTypeDescriptor typeDescriptor) {
-        this.parentFields = parentFields;
-        this.field = field;
-        this.typeDescriptor = typeDescriptor;
-    }
-
-    /**
-     * Somewhat hackish copy constructor to track fields with a custom type descriptor.
-     * TODO: Separate type descriptor from ArgumentSource in general usage.
-     * @param typeDescriptor New type descriptor for the object.
-     */
-    public ArgumentSource copyWithCustomTypeDescriptor(final ArgumentTypeDescriptor typeDescriptor) {
-        return new ArgumentSource(parentFields,field,typeDescriptor);
-    }
-
-    /**
-     * True if this argument source equals other.
-     * @param other Another object, possibly an argument source, to test for equality.  Any object can
-     *              be tested, but only instances of ArgumentSource will result in equals returning true.
-     * @return True if this argument source matches other.  False otherwise.
-     */
-    @Override
-    public boolean equals( Object other ) {
-        if( other == null )
-            return false;
-        if( !(other instanceof ArgumentSource) )
-            return false;
-
-        ArgumentSource otherArgumentSource = (ArgumentSource)other;
-        return this.field == otherArgumentSource.field && Arrays.equals(this.parentFields, otherArgumentSource.parentFields);
-    }
-
-    /**
-     * Returns an appropriate hash code for this argument source.
-     * @return A uniformly distributed hashcode representing this argument source.
-     */
-    @Override
-    public int hashCode() {
-        return field.hashCode();
-    }
-
-    /**
-     * Generate a list of all argument definitions to which this argument source maps.
-     * @return A non-null, non-empty list of argument definitions.
-     */
-    public List<ArgumentDefinition> createArgumentDefinitions() {
-        return typeDescriptor.createArgumentDefinitions( this );
-    }
-
-    /**
-     * Parses the specified value based on the specified type.
-     * @param values String representation of all values passed.
-     * @return the parsed value of the object.
-     */
-    public Object parse( ParsingEngine parsingEngine, ArgumentMatches values ) {
-        return typeDescriptor.parse( parsingEngine, this, values );
-    }
-
-    /**
-     * Returns whether this field is required.  Note that flag fields are always forced to 'not required'.
-     * @return True if the field is mandatory and not a boolean flag.  False otherwise.
-     */
-    public boolean isRequired() {
-        return (Boolean)CommandLineUtils.getValue(ArgumentTypeDescriptor.getArgumentAnnotation(this),"required");
-    }
-
-    /**
-     * Returns true if the argument is a flag (a 0-valued argument).
-     * @return True if argument is a flag; false otherwise.
-     */
-    public boolean isFlag() {
-        return (field.getType() == Boolean.class) || (field.getType() == Boolean.TYPE);
-    }
-
-    /**
-     * Can this argument support multiple values, or just one?
-     * @return True if the argument supports multiple values.
-     */
-    public boolean isMultiValued() {
-        return typeDescriptor.isMultiValued( this );
-    }
-
-    /**
-     * Should the given class be hidden from the command-line argument system.
-     * @return True if so.  False otherwise.
-     */
-    public boolean isHidden() {
-        return field.isAnnotationPresent(Hidden.class) || field.isAnnotationPresent(Deprecated.class);
-    }
-
-    /**
-     * Is the given argument considered an advanced option when displaying on the command-line argument system.
-     * @return True if so.  False otherwise.
-     */
-    public boolean isAdvanced() {
-        return field.isAnnotationPresent(Advanced.class);
-    }
-
-    /**
-     * Is the given argument an output.
-     * @return True if so. False otherwise.
-     */
-    public boolean isOutput() {
-        return field.isAnnotationPresent(Output.class);
-    }
-
-    /**
-     * Is the given argument an input.
-     * @return True if so. False otherwise.
-     */
-    public boolean isInput() {
-        return field.isAnnotationPresent(Input.class);
-    }
-
-    /**
-     * Is this command-line argument dependent on some primitive argument types?
-     * @return True if this command-line argument depends on other arguments; false otherwise.
-     */
-    public boolean isDependent() {
-        return typeDescriptor instanceof MultiplexArgumentTypeDescriptor;
-    }
-
-    /**
-     * Returns whether the field has been deprecated and should no longer be used.
-     * @return True if field has been deprecated.
-     */
-    public boolean isDeprecated() {
-        return field.isAnnotationPresent(Deprecated.class);
-    }
-
-    /**
-     * Returns whether the field should default to stdout if not provided explicitly on the command-line.
-     * @return True if field should default to stdout.
-     */
-    public boolean defaultsToStdout() {
-        return field.isAnnotationPresent(Output.class) && (Boolean)CommandLineUtils.getValue(ArgumentTypeDescriptor.getArgumentAnnotation(this),"defaultToStdout");
-    }
-
-    /**
-     * Returns false if a type-specific default can be employed.
-     * @return True to throw in a type specific default.  False otherwise.
-     */
-    public boolean createsTypeDefault() {
-        return typeDescriptor.createsTypeDefault(this);
-    }
-
-    public String typeDefaultDocString() {
-        return typeDescriptor.typeDefaultDocString(this);
-    }
-
-    /**
-     * Generates a default for the given type.
-     * @param parsingEngine the parsing engine used to validate this argument type descriptor.
-     * @return A default value for the given type.
-     */
-    public Object createTypeDefault(ParsingEngine parsingEngine) {
-        return typeDescriptor.createTypeDefault(parsingEngine,this,field.getGenericType());
-    }
-
-    /**
-     * Builds out a new type descriptor for the given dependent argument as a function
-     * of the containing object.
-     * @param parsingEngine the parsing engine to use when building out this custom type descriptor.
-     * @param containingObject The containing object.
-     * @return An argument type descriptor for the custom derivative field.
-     */
-    public MultiplexArgumentTypeDescriptor createDependentTypeDescriptor(ParsingEngine parsingEngine,Object containingObject) {
-        if(!isDependent())
-            throw new ReviewedGATKException("Field " + field.getName() + " is independent; no dependent type descriptor can be derived.");
-        return ((MultiplexArgumentTypeDescriptor)typeDescriptor).createCustomTypeDescriptor(parsingEngine,this,containingObject);
-    }
-
-    /**
-     * Gets a string representation of the argument source for debugging.
-     * @return String representation of the argument source.
-     */
-    public String toString() {
-        return field.getDeclaringClass().getSimpleName() + ": " + field.getName();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentTypeDescriptor.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentTypeDescriptor.java
deleted file mode 100644
index 5bfc516..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentTypeDescriptor.java
+++ /dev/null
@@ -1,1030 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import org.apache.log4j.Logger;
-import htsjdk.tribble.Feature;
-import org.broadinstitute.gatk.engine.refdata.tracks.FeatureManager;
-import org.broadinstitute.gatk.engine.walkers.Multiplex;
-import org.broadinstitute.gatk.engine.walkers.Multiplexer;
-import org.broadinstitute.gatk.utils.classloader.JVMUtils;
-import org.broadinstitute.gatk.utils.exceptions.DynamicClassResolutionException;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.text.XReadLines;
-
-import java.io.File;
-import java.io.IOException;
-import java.lang.annotation.Annotation;
-import java.lang.reflect.*;
-import java.util.*;
-
-/**
- * An descriptor capable of providing parsers that can parse any type
- * of supported command-line argument.
- *
- * @author mhanna
- * @version 0.1
- */
-public abstract class ArgumentTypeDescriptor {
-    private static Class[] ARGUMENT_ANNOTATIONS = {Input.class, Output.class, Argument.class};
-
-    /**
-     * our log, which we want to capture anything from org.broadinstitute.gatk
-     */
-    protected static final Logger logger = Logger.getLogger(ArgumentTypeDescriptor.class);
-
-    /**
-     * Fetch the given descriptor from the descriptor repository.
-     * @param descriptors the descriptors from which to select a good match.
-     * @param type Class for which to specify a descriptor.
-     * @return descriptor for the given type.
-     */
-    public static ArgumentTypeDescriptor selectBest( Collection<ArgumentTypeDescriptor> descriptors, Class type ) {
-        for( ArgumentTypeDescriptor descriptor: descriptors ) {
-            if( descriptor.supports(type) )
-                return descriptor;
-        }
-        throw new ReviewedGATKException("Can't process command-line arguments of type: " + type.getName());
-    }
-
-    /**
-     * Does this descriptor support classes of the given type?
-     * @param type The type to check.
-     * @return true if this descriptor supports the given type, false otherwise.
-     */
-    public abstract boolean supports( Class type );
-
-    /**
-     * Returns false if a type-specific default can be employed.
-     * @param source Source of the command-line argument.
-     * @return True to throw in a type specific default.  False otherwise.
-     */
-    public boolean createsTypeDefault(ArgumentSource source) { return false; }
-
-    /**
-     * Returns a documentation-friendly value for the default of a type descriptor.
-     * Must be overridden if createsTypeDefault return true.  cannot be called otherwise
-     * @param source Source of the command-line argument.
-     * @return Friendly string of the default value, for documentation.  If doesn't create a default, throws
-     * and UnsupportedOperationException
-     */
-    public String typeDefaultDocString(ArgumentSource source) {
-        throw new UnsupportedOperationException();
-    }
-
-    /**
-     * Generates a default for the given type.
-     *
-     * @param parsingEngine the parsing engine used to validate this argument type descriptor.
-     * @param source Source of the command-line argument.
-     * @param type Type of value to create, in case the command-line argument system wants influence.
-     * @return A default value for the given type.
-     */
-    public Object createTypeDefault(ParsingEngine parsingEngine,ArgumentSource source, Type type) { throw new UnsupportedOperationException("Unable to create default for type " + getClass()); }
-
-    /**
-     * Given the given argument source and attributes, synthesize argument definitions for command-line arguments.
-     * @param source Source class and field for the given argument.
-     * @return A list of command-line argument definitions supporting this field.
-     */
-    public List<ArgumentDefinition> createArgumentDefinitions( ArgumentSource source ) {
-        return Collections.singletonList(createDefaultArgumentDefinition(source));
-    }
-
-    /**
-     * Parses an argument source to an object.
-     * WARNING!  Mandatory side effect of parsing!  Each parse routine should register the tags it finds with the proper CommandLineProgram.
-     * TODO: Fix this, perhaps with an event model indicating that a new argument has been created.
-     *
-     * @param parsingEngine The engine responsible for parsing.
-     * @param source The source used to find the matches.
-     * @param matches The matches for the source.
-     * @return The parsed object.
-     */
-    public Object parse(ParsingEngine parsingEngine, ArgumentSource source, ArgumentMatches matches) {
-        return parse(parsingEngine, source, source.field.getGenericType(), matches);
-    }
-
-    /**
-     * Returns true if the field is a collection or an array.
-     * @param source The argument source to check.
-     * @return true if the field is a collection or an array.
-     */
-    public boolean isMultiValued( ArgumentSource source ) {
-        Class argumentType = source.field.getType();
-        return Collection.class.isAssignableFrom(argumentType) || argumentType.isArray();
-    }
-
-    /**
-     * By default, argument sources create argument definitions with a set of default values.
-     * Use this method to create the one simple argument definition.
-     * @param source argument source for which to create a default definition.
-     * @return The default definition for this argument source.
-     */
-    protected ArgumentDefinition createDefaultArgumentDefinition( ArgumentSource source ) {
-        Annotation argumentAnnotation = getArgumentAnnotation(source);
-        return new ArgumentDefinition( ArgumentIOType.getIOType(argumentAnnotation),
-                source.field.getType(),
-                ArgumentDefinition.getFullName(argumentAnnotation, source.field.getName()),
-                ArgumentDefinition.getShortName(argumentAnnotation),
-                ArgumentDefinition.getDoc(argumentAnnotation),
-                source.isRequired() && !createsTypeDefault(source) && !source.isFlag() && !source.isDeprecated(),
-                source.isFlag(),
-                source.isMultiValued(),
-                source.isHidden(),
-                makeRawTypeIfNecessary(getCollectionComponentType(source.field)),
-                ArgumentDefinition.getExclusiveOf(argumentAnnotation),
-                ArgumentDefinition.getValidationRegex(argumentAnnotation),
-                getValidOptions(source) );
-    }
-
-    /**
-     * Return the component type of a field, or String.class if the type cannot be found.
-     * @param field The reflected field to inspect.
-     * @return The parameterized component type, or String.class if the parameterized type could not be found.
-     * @throws IllegalArgumentException If more than one parameterized type is found on the field.
-     */
-    protected Type getCollectionComponentType( Field field ) {
-        return null;
-    }
-
-    /**
-     * Parses the argument matches for a class type into an object.
-     * @param source The original argument source used to find the matches.
-     * @param type The current class type being inspected.  May not match the argument source.field.getType() if this as a collection for example.
-     * @param matches The argument matches for the argument source, or the individual argument match for a scalar if this is being called to help parse a collection.
-     * @return The individual parsed object matching the argument match with Class type.
-     */
-    public abstract Object parse( ParsingEngine parsingEngine, ArgumentSource source, Type type, ArgumentMatches matches );
-
-    /**
-     * If the argument source only accepts a small set of options, populate the returned list with
-     * those options.  Otherwise, leave the list empty.
-     * @param source Original field specifying command-line arguments.
-     * @return A list of valid options.
-     */
-    protected List<String> getValidOptions( ArgumentSource source ) {
-        if(!source.field.getType().isEnum())
-            return null;
-        List<String> validOptions = new ArrayList<String>();
-        for(Object constant: source.field.getType().getEnumConstants())
-            validOptions.add(constant.toString());
-        return validOptions;
-    }
-
-    /**
-     * Returns true if the argument with the given full name exists in the collection of ArgumentMatches.
-     * @param definition Definition of the argument for which to find matches.
-     * @param matches The matches for the given argument.
-     * @return true if the argument is present, or false if not present.
-     */
-    protected boolean argumentIsPresent( ArgumentDefinition definition, ArgumentMatches matches ) {
-        for( ArgumentMatch match: matches ) {
-            if( match.definition.equals(definition) )
-                return true;
-        }
-        return false;
-    }
-
-    /**
-     * Gets the value of an argument with the given full name, from the collection of ArgumentMatches.
-     * If the argument matches multiple values, an exception will be thrown.
-     * @param definition Definition of the argument for which to find matches.
-     * @param matches The matches for the given argument.
-     * @return The value of the argument if available, or null if not present.
-     */
-    protected ArgumentMatchValue getArgumentValue( ArgumentDefinition definition, ArgumentMatches matches ) {
-        Collection<ArgumentMatchValue> argumentValues = getArgumentValues( definition, matches );
-        if( argumentValues.size() > 1 )
-            throw new UserException.CommandLineException("Multiple values associated with given definition, but this argument expects only one: " + definition.fullName);
-        return argumentValues.size() > 0 ? argumentValues.iterator().next() : null;
-    }
-
-    /**
-     * Gets the tags associated with a given command-line argument.
-     * If the argument matches multiple values, an exception will be thrown.
-     * @param matches The matches for the given argument.
-     * @return The value of the argument if available, or null if not present.
-     */
-    protected Tags getArgumentTags(ArgumentMatches matches) {
-        Tags tags = new Tags();
-        for(ArgumentMatch match: matches) {
-            if(!tags.isEmpty() && !match.tags.isEmpty())
-                throw new ReviewedGATKException("BUG: multiple conflicting sets of tags are available, and the type descriptor specifies no way of resolving the conflict.");
-            tags = match.tags;
-        }
-        return tags;
-    }
-
-    /**
-     * Gets the values of an argument with the given full name, from the collection of ArgumentMatches.
-     * @param definition Definition of the argument for which to find matches.
-     * @param matches The matches for the given argument.
-     * @return The value of the argument if available, or an empty collection if not present.
-     */
-    protected Collection<ArgumentMatchValue> getArgumentValues( ArgumentDefinition definition, ArgumentMatches matches ) {
-        Collection<ArgumentMatchValue> values = new ArrayList<ArgumentMatchValue>();
-        for( ArgumentMatch match: matches ) {
-            if( match.definition.equals(definition) )
-                values.addAll(match.values());
-        }
-        return values;
-    }
-
-    /**
-     * Retrieves the argument description from the given argument source.  Will throw an exception if
-     * the given ArgumentSource
-     * @param source source of the argument.
-     * @return Argument description annotation associated with the given field.
-     */
-    @SuppressWarnings("unchecked")
-    protected static Annotation getArgumentAnnotation( ArgumentSource source ) {
-        for (Class annotation: ARGUMENT_ANNOTATIONS)
-            if (source.field.isAnnotationPresent(annotation))
-                return source.field.getAnnotation(annotation);
-        throw new ReviewedGATKException("ArgumentAnnotation is not present for the argument field: " + source.field.getName());
-    }
-
-    /**
-     * Returns true if an argument annotation is present
-     * @param field The field to check for an annotation.
-     * @return True if an argument annotation is present on the field.
-     */
-    @SuppressWarnings("unchecked")
-    public static boolean isArgumentAnnotationPresent(Field field) {
-        for (Class annotation: ARGUMENT_ANNOTATIONS)
-            if (field.isAnnotationPresent(annotation))
-                return true;
-        return false;
-    }
-
-    /**
-     * Returns true if the given annotation is hidden from the help system.
-     * @param field Field to test.
-     * @return True if argument should be hidden.  False otherwise.
-     */
-    public static boolean isArgumentHidden(Field field) {
-        return field.isAnnotationPresent(Hidden.class);
-    }
-
-    public static Class makeRawTypeIfNecessary(Type t) {
-        if ( t == null )
-            return null;
-        else if ( t instanceof ParameterizedType )
-            return (Class)((ParameterizedType) t).getRawType();
-        else if ( t instanceof Class ) {
-            return (Class)t;
-        } else {
-            throw new IllegalArgumentException("Unable to determine Class-derived component type of field: " + t);
-        }
-    }
-
-    /**
-     * The actual argument parsing method.
-     * @param source             source
-     * @param type               type to check
-     * @param matches            matches
-     * @param tags               argument tags
-     * @return the RodBinding/IntervalBinding object depending on the value of createIntervalBinding.
-     */
-    protected Object parseBinding(ArgumentSource source, Type type, ArgumentMatches matches, Tags tags) {
-        ArgumentDefinition defaultDefinition = createDefaultArgumentDefinition(source);
-        ArgumentMatchValue value = getArgumentValue(defaultDefinition, matches);
-        @SuppressWarnings("unchecked")
-        Class<? extends Feature> parameterType = JVMUtils.getParameterizedTypeClass(type);
-        String name = defaultDefinition.fullName;
-
-        return parseBinding(value, parameterType, type, name, tags, source.field.getName());
-    }
-
-    /**
-     *
-     * @param value The source of the binding
-     * @param parameterType The Tribble Feature parameter type
-     * @param bindingClass The class type for the binding (ex: RodBinding, IntervalBinding, etc.) Must have the correct constructor for creating the binding.
-     * @param bindingName The name of the binding passed to the constructor.
-     * @param tags Tags for the binding used for parsing and passed to the constructor.
-     * @param fieldName The name of the field that was parsed. Used for error reporting.
-     * @return The newly created binding object of type bindingClass.
-     */
-    public static Object parseBinding(ArgumentMatchValue value, Class<? extends Feature> parameterType, Type bindingClass,
-                                      String bindingName, Tags tags, String fieldName) {
-        try {
-            String tribbleType = null;
-            // must have one or two tag values here
-            if ( tags.getPositionalTags().size() > 2 ) {
-                throw new UserException.CommandLineException(
-                        String.format("Unexpected number of positional tags for argument %s : %s. " +
-                                "Rod bindings only support -X:type and -X:name,type argument styles",
-                                value.asString(), fieldName));
-            } else if ( tags.getPositionalTags().size() == 2 ) {
-                // -X:name,type style
-                bindingName = tags.getPositionalTags().get(0);
-                tribbleType = tags.getPositionalTags().get(1);
-
-                FeatureManager manager = new FeatureManager();
-                if ( manager.getByName(tribbleType) == null )
-                    throw new UserException.UnknownTribbleType(
-                            tribbleType,
-                            String.format("Unable to find tribble type '%s' provided on the command line. " +
-                                    "Please select a correct type from among the supported types:%n%s",
-                                    tribbleType, manager.userFriendlyListOfAvailableFeatures(parameterType)));
-
-            } else {
-                // case with 0 or 1 positional tags
-                FeatureManager manager = new FeatureManager();
-
-                // -X:type style is a type when we cannot determine the type dynamically
-                String tag1 = tags.getPositionalTags().size() == 1 ? tags.getPositionalTags().get(0) : null;
-                if ( tag1 != null ) {
-                    if ( manager.getByName(tag1) != null ) // this a type
-                        tribbleType = tag1;
-                    else
-                        bindingName = tag1;
-                }
-
-                if ( tribbleType == null ) {
-                    // try to determine the file type dynamically
-                    File file = value.asFile();
-                    if ( file.canRead() && file.isFile() ) {
-                        FeatureManager.FeatureDescriptor featureDescriptor = manager.getByFiletype(file);
-                        if ( featureDescriptor != null ) {
-                            tribbleType = featureDescriptor.getName();
-                            logger.debug("Dynamically determined type of " + file + " to be " + tribbleType);
-                        }
-                    }
-
-                    if ( tribbleType == null ) {
-                        // IntervalBinding can be created from a normal String
-                        Class rawType = (makeRawTypeIfNecessary(bindingClass));
-                        try {
-                            return rawType.getConstructor(String.class).newInstance(value.asString());
-                        } catch (NoSuchMethodException e) {
-                            /* ignore */
-                        }
-
-                        if ( ! file.exists() ) {
-                            throw new UserException.CouldNotReadInputFile(file, "file does not exist");
-                        } else if ( ! file.canRead() || ! file.isFile() ) {
-                            throw new UserException.CouldNotReadInputFile(file, "file could not be read");
-                        } else {
-                            throw new UserException.CommandLineException(
-                                    String.format("No tribble type was provided on the command line and the type of the file could not be determined dynamically. " +
-                                            "Please add an explicit type tag :NAME listing the correct type from among the supported types:%n%s",
-                                            manager.userFriendlyListOfAvailableFeatures(parameterType)));
-                        }
-                    }
-                }
-            }
-
-            Constructor ctor = (makeRawTypeIfNecessary(bindingClass)).getConstructor(Class.class, String.class, String.class, String.class, Tags.class);
-            return ctor.newInstance(parameterType, bindingName, value.asString(), tribbleType, tags);
-        } catch (Exception e) {
-            if ( e instanceof UserException )
-                throw ((UserException)e);
-            else
-                throw new UserException.CommandLineException(
-                        String.format("Failed to parse value %s for argument %s. Message: %s",
-                                value, fieldName, e.getMessage()));
-        }
-    }
-
-    /**
-     * Parse the source of a RodBindingCollection, which can be either a file of RodBindings or an actual RodBinding.
-     *
-     * @param parsingEngine the parsing engine used to validate this argument type descriptor
-     * @param source             source
-     * @param type               type
-     * @param matches            matches
-     * @param tags               argument tags
-     * @return the newly created binding object
-     */
-    public Object parseRodBindingCollectionSource(final ParsingEngine parsingEngine,
-                                                  final ArgumentSource source,
-                                                  final Type type,
-                                                  final ArgumentMatches matches,
-                                                  final Tags tags) {
-
-        final ArgumentDefinition defaultDefinition = createDefaultArgumentDefinition(source);
-        final ArgumentMatchValue value = getArgumentValue(defaultDefinition, matches);
-        @SuppressWarnings("unchecked")
-        Class<? extends Feature> parameterType = JVMUtils.getParameterizedTypeClass(type);
-        String name = defaultDefinition.fullName;
-
-        // if this a list of files, get those bindings
-        final File file = value.asFile();
-        try {
-            if (file.getAbsolutePath().endsWith(".list")) {
-                return getRodBindingsCollection(file, parsingEngine, parameterType, name, tags, source.field.getName());
-            }
-        } catch (IOException e) {
-            throw new UserException.CouldNotReadInputFile(file, e);
-        }
-
-        // otherwise, treat this as an individual binding
-        final RodBinding binding = (RodBinding)parseBinding(value, parameterType, RodBinding.class, name, tags, source.field.getName());
-        parsingEngine.addTags(binding, tags);
-        parsingEngine.addRodBinding(binding);
-        return RodBindingCollection.createRodBindingCollectionOfType(parameterType, Arrays.asList(binding));
-    }
-
-    /**
-     * Retrieve and parse a collection of RodBindings from the given file.
-     *
-     * If the file contains duplicate entries or is empty, an exception will be thrown.
-     *
-     * @param file             the source file
-     * @param parsingEngine    the engine responsible for parsing
-     * @param parameterType    the Tribble Feature parameter type
-     * @param bindingName      the name of the binding passed to the constructor.
-     * @param defaultTags      general tags for the binding used for parsing and passed to the constructor.
-     * @param fieldName        the name of the field that was parsed. Used for error reporting.
-     * @return the newly created collection of binding objects.
-     */
-    public static Object getRodBindingsCollection(final File file,
-                                                  final ParsingEngine parsingEngine,
-                                                  final Class<? extends Feature> parameterType,
-                                                  final String bindingName,
-                                                  final Tags defaultTags,
-                                                  final String fieldName) throws IOException {
-        final List<RodBinding> bindings = new ArrayList<>();
-
-        // Keep track of the files in this list so that we can check for duplicates and empty files
-        final Set<String> fileValues = new HashSet<>();
-
-        // parse each line separately using the given Tags if none are provided on each line
-        for ( final String line: new XReadLines(file) ) {
-            final String[] tokens = line.split("\\s+");
-            final RodBinding binding;
-
-            if ( tokens.length == 0 ) {
-                continue; // empty line, so do nothing
-            }
-            // use the default tags if none are provided for this binding
-            else if ( tokens.length == 1 ) {
-                final ArgumentMatchValue value = parseAndValidateArgumentMatchValue(tokens[0], fileValues, fieldName, file.getName());
-                binding = (RodBinding)parseBinding(value, parameterType, RodBinding.class, bindingName, defaultTags, fieldName);
-                parsingEngine.addTags(binding, defaultTags);
-
-            }
-            // use the new tags if provided
-            else if ( tokens.length == 2 ) {
-                final Tags tags = ParsingMethod.parseTags(fieldName, tokens[0]);
-                final ArgumentMatchValue value = parseAndValidateArgumentMatchValue(tokens[1], fileValues, fieldName, file.getName());
-                binding = (RodBinding)parseBinding(value, parameterType, RodBinding.class, bindingName, tags, fieldName);
-                parsingEngine.addTags(binding, tags);
-            } else {
-                throw new UserException.BadArgumentValue(fieldName, "data lines should consist of an optional set of tags along with a path to a file; too many tokens are present for line: " + line);
-            }
-
-            bindings.add(binding);
-            parsingEngine.addRodBinding(binding);
-        }
-
-        if (fileValues.isEmpty()) {
-            throw new UserException.BadArgumentValue(fieldName, "The input list " + file.getName() + " is empty.");
-        }
-
-        return RodBindingCollection.createRodBindingCollectionOfType(parameterType, bindings);
-    }
-
-    /**
-     * Validates the resource file name and constructs an ArgumentMatchValue from it.
-     *
-     * If the list name has already been processed in the current list, throws a UserException, otherwise
-     * creates an ArgumentMatchValue to represent the list.
-     *
-     * @param token Name of the ROD resource file.
-     * @param fileValues Set of names of ROD files that have already been processed.
-     * @param fieldName Name of the argument field being populated.
-     * @param listFileName Name of the list file being processed.
-     * @return
-     */
-    private static ArgumentMatchValue parseAndValidateArgumentMatchValue(final String token, final Set<String> fileValues, final String fieldName,
-                                                                         final String listFileName) {
-        checkForDuplicateFileName(token, fileValues, fieldName, listFileName);
-        return new ArgumentMatchStringValue(token);
-    }
-
-    /**
-     * Checks to make sure that the current file name to be processed has not already been processed.
-     *
-     * Checks the name of the current file against the names that have already been processed, throwing
-     * an informative BadArgumentValue exception if it has already been seen. As a side effect adds the
-     * current file name to the set of filenames that have already been processed.
-     *
-     * @param currentFile Name of the current file to process
-     * @param processedFiles Set of file names that have already been processed
-     * @param fieldName Name of the argument that is being populated
-     * @param listName Filename of the list that is being processed
-     */
-    protected static void checkForDuplicateFileName(final String currentFile, final Set<String> processedFiles,
-                                                    final String fieldName, final String listName) {
-        if (processedFiles.contains(currentFile)) {
-            throw new UserException.BadArgumentValue(fieldName, "The input list " + listName + " contains file " + currentFile +
-                                                     " multiple times, which isn't allowed. If you are intentionally trying to " +
-                                                     "include the same file more than once, you will need to specify it in separate file lists.");
-        }
-        processedFiles.add(currentFile);
-    }
-}
-
-/**
- * Parser for RodBinding objects
- */
-class RodBindingArgumentTypeDescriptor extends ArgumentTypeDescriptor {
-    /**
-     * We only want RodBinding class objects
-     * @param type The type to check.
-     * @return true if the provided class is a RodBinding.class
-     */
-    @Override
-    public boolean supports( Class type ) {
-        return isRodBinding(type);
-    }
-
-    public static boolean isRodBinding( Class type ) {
-        return RodBinding.class.isAssignableFrom(type);
-    }
-
-    @Override
-    public boolean createsTypeDefault(ArgumentSource source) { return ! source.isRequired(); }
-
-    @Override
-    @SuppressWarnings("unchecked")
-    public Object createTypeDefault(ParsingEngine parsingEngine, ArgumentSource source, Type type) {
-        Class parameterType = JVMUtils.getParameterizedTypeClass(type);
-        return RodBinding.makeUnbound((Class<? extends Feature>)parameterType);
-    }
-
-    @Override
-    public String typeDefaultDocString(ArgumentSource source) {
-        return "none";
-    }
-
-    @Override
-    public Object parse(ParsingEngine parsingEngine, ArgumentSource source, Type type, ArgumentMatches matches) {
-        Tags tags = getArgumentTags(matches);
-        RodBinding rbind = (RodBinding)parseBinding(source, type, matches, tags);
-        parsingEngine.addTags(rbind, tags);
-        parsingEngine.addRodBinding(rbind);
-        return rbind;
-    }
-}
-
-/**
- * Parser for IntervalBinding objects
- */
-class IntervalBindingArgumentTypeDescriptor extends ArgumentTypeDescriptor {
-    /**
-     * We only want IntervalBinding class objects
-     * @param type The type to check.
-     * @return true if the provided class is an IntervalBinding.class
-     */
-    @Override
-    public boolean supports( Class type ) {
-        return isIntervalBinding(type);
-    }
-
-    public static boolean isIntervalBinding( Class type ) {
-        return IntervalBinding.class.isAssignableFrom(type);
-    }
-
-    /**
-     * See note from RodBindingArgumentTypeDescriptor.parse().
-     *
-     * @param parsingEngine      parsing engine
-     * @param source             source
-     * @param type               type to check
-     * @param matches            matches
-     * @return the IntervalBinding object.
-     */
-    @Override
-    public Object parse(ParsingEngine parsingEngine, ArgumentSource source, Type type, ArgumentMatches matches) {
-        return parseBinding(source, type, matches, getArgumentTags(matches));
-    }
-}
-
-/**
- * Parser for RodBindingCollection objects
- */
-class RodBindingCollectionArgumentTypeDescriptor extends ArgumentTypeDescriptor {
-    /**
-     * We only want RodBindingCollection class objects
-     * @param type The type to check.
-     * @return true if the provided class is an RodBindingCollection.class
-     */
-    @Override
-    public boolean supports( final Class type ) {
-        return isRodBindingCollection(type);
-    }
-
-    public static boolean isRodBindingCollection( final Class type ) {
-        return RodBindingCollection.class.isAssignableFrom(type);
-    }
-
-    /**
-     * See note from RodBindingArgumentTypeDescriptor.parse().
-     *
-     * @param parsingEngine      parsing engine
-     * @param source             source
-     * @param type               type to check
-     * @param matches            matches
-     * @return the IntervalBinding object.
-     */
-    @Override
-    public Object parse(final ParsingEngine parsingEngine, final ArgumentSource source, final Type type, final ArgumentMatches matches) {
-        final Tags tags = getArgumentTags(matches);
-        return parseRodBindingCollectionSource(parsingEngine, source, type, matches, tags);
-    }
-}
-
-/**
- * Parse simple argument types: java primitives, wrapper classes, and anything that has
- * a simple String constructor.
- */
-class SimpleArgumentTypeDescriptor extends ArgumentTypeDescriptor {
-
-    /**
-     * @param type  the class type
-     * @return true if this class is a binding type, false otherwise
-     */
-    private boolean isBinding(final Class type) {
-        return RodBindingArgumentTypeDescriptor.isRodBinding(type) ||
-                IntervalBindingArgumentTypeDescriptor.isIntervalBinding(type) ||
-                RodBindingCollectionArgumentTypeDescriptor.isRodBindingCollection(type);
-    }
-
-
-    @Override
-    public boolean supports( Class type ) {
-        if ( isBinding(type) ) return false;
-        if ( type.isPrimitive() ) return true;
-        if ( type.isEnum() ) return true;
-        if ( primitiveToWrapperMap.containsValue(type) ) return true;
-
-        try {
-            type.getConstructor(String.class);
-            return true;
-        }
-        catch( Exception ex ) {
-            // An exception thrown above means that the String constructor either doesn't
-            // exist or can't be accessed.  In either case, this descriptor doesn't support this type.
-            return false;
-        }
-    }
-
-    @Override
-    public Object parse(ParsingEngine parsingEngine, ArgumentSource source, Type fulltype, ArgumentMatches matches) {
-        Class type = makeRawTypeIfNecessary(fulltype);
-        if (source.isFlag())
-            return true;
-
-        ArgumentDefinition defaultDefinition = createDefaultArgumentDefinition(source);
-        ArgumentMatchValue value = getArgumentValue(defaultDefinition, matches);
-        Object result;
-        Tags tags = getArgumentTags(matches);
-
-        // lets go through the types we support
-        try {
-            if (type.isPrimitive()) {
-                Method valueOf = primitiveToWrapperMap.get(type).getMethod("valueOf",String.class);
-                if(value == null)
-                    throw new MissingArgumentValueException(createDefaultArgumentDefinition(source));
-                result = valueOf.invoke(null,value.asString().trim());
-            } else if (type.isEnum()) {
-                Object[] vals = type.getEnumConstants();
-                Object defaultEnumeration = null;  // as we look at options, record the default option if it exists
-                for (Object val : vals) {
-                    if (String.valueOf(val).equalsIgnoreCase(value == null ? null : value.asString())) return val;
-                    try { if (type.getField(val.toString()).isAnnotationPresent(EnumerationArgumentDefault.class)) defaultEnumeration = val; }
-                    catch (NoSuchFieldException e) { throw new ReviewedGATKException("parsing " + type.toString() + "doesn't contain the field " + val.toString()); }
-                }
-                // if their argument has no value (null), and there's a default, return that default for the enum value
-                if (defaultEnumeration != null && value == null)
-                    result = defaultEnumeration;
-                    // if their argument has no value and there's no default, throw a missing argument value exception.
-                    // TODO: Clean this up so that null values never make it to this point.  To fix this, we'll have to clean up the implementation of -U.
-                else if (value == null)
-                    throw new MissingArgumentValueException(createDefaultArgumentDefinition(source));
-                else
-                    throw new UnknownEnumeratedValueException(createDefaultArgumentDefinition(source),value.asString());
-            } else if (type.equals(File.class)) {
-                result = value == null ? null : value.asFile();
-            } else {
-                if (value == null)
-                    throw new MissingArgumentValueException(createDefaultArgumentDefinition(source));
-                Constructor ctor = type.getConstructor(String.class);
-                result = ctor.newInstance(value.asString());
-            }
-        } catch (UserException e) {
-            throw e;
-        } catch (InvocationTargetException e) {
-            throw new UserException.CommandLineException(String.format("Failed to parse value %s for argument %s.  This is most commonly caused by providing an incorrect data type (e.g. a double when an int is required)",
-                    value, source.field.getName()));
-        } catch (Exception e) {
-            throw new DynamicClassResolutionException(String.class, e);
-        }
-
-        // TODO FIXME!
-
-        // WARNING: Side effect!
-        parsingEngine.addTags(result,tags);
-
-        return result;
-    }
-
-
-    /**
-     * A mapping of the primitive types to their associated wrapper classes.  Is there really no way to infer
-     * this association available in the JRE?
-     */
-    private static Map<Class,Class> primitiveToWrapperMap = new HashMap<Class,Class>() {
-        {
-            put( Boolean.TYPE, Boolean.class );
-            put( Character.TYPE, Character.class );
-            put( Byte.TYPE, Byte.class );
-            put( Short.TYPE, Short.class );
-            put( Integer.TYPE, Integer.class );
-            put( Long.TYPE, Long.class );
-            put( Float.TYPE, Float.class );
-            put( Double.TYPE, Double.class );
-        }
-    };
-}
-
-/**
- * Process compound argument types: arrays, and typed and untyped collections.
- */
-class CompoundArgumentTypeDescriptor extends ArgumentTypeDescriptor {
-    @Override
-    public boolean supports( Class type ) {
-        return ( Collection.class.isAssignableFrom(type) || type.isArray() );
-    }
-
-    @Override
-    @SuppressWarnings("unchecked")
-    public Object parse(ParsingEngine parsingEngine,ArgumentSource source, Type fulltype, ArgumentMatches matches) {
-        Class type = makeRawTypeIfNecessary(fulltype);
-        Type componentType;
-        Object result;
-
-        if( Collection.class.isAssignableFrom(type) ) {
-
-            // If this is a generic interface, pick a concrete implementation to create and pass back.
-            // Because of type erasure, don't worry about creating one of exactly the correct type.
-            if( Modifier.isInterface(type.getModifiers()) || Modifier.isAbstract(type.getModifiers()) )
-            {
-                if( java.util.List.class.isAssignableFrom(type) ) type = ArrayList.class;
-                else if( java.util.Queue.class.isAssignableFrom(type) ) type = java.util.ArrayDeque.class;
-                else if( java.util.Set.class.isAssignableFrom(type) ) type = java.util.TreeSet.class;
-            }
-
-            componentType = getCollectionComponentType( source.field );
-            ArgumentTypeDescriptor componentArgumentParser = parsingEngine.selectBestTypeDescriptor(makeRawTypeIfNecessary(componentType));
-
-            Collection collection;
-            try {
-                collection = (Collection)type.newInstance();
-            }
-            catch (InstantiationException e) {
-                logger.fatal("ArgumentParser: InstantiationException: cannot convert field " + source.field.getName());
-                throw new ReviewedGATKException("constructFromString:InstantiationException: Failed conversion " + e.getMessage());
-            }
-            catch (IllegalAccessException e) {
-                logger.fatal("ArgumentParser: IllegalAccessException: cannot convert field " + source.field.getName());
-                throw new ReviewedGATKException("constructFromString:IllegalAccessException: Failed conversion " + e.getMessage());
-            }
-
-            for( ArgumentMatch match: matches ) {
-                for( ArgumentMatch value: match ) {
-                    Object object = componentArgumentParser.parse(parsingEngine,source,componentType,new ArgumentMatches(value));
-                    collection.add( object );
-                    // WARNING: Side effect!
-                    parsingEngine.addTags(object,value.tags);
-                }
-            }
-
-            result = collection;
-
-        }
-        else if( type.isArray() ) {
-            componentType = type.getComponentType();
-            ArgumentTypeDescriptor componentArgumentParser = parsingEngine.selectBestTypeDescriptor(makeRawTypeIfNecessary(componentType));
-
-            // Assemble a collection of individual values used in this computation.
-            Collection<ArgumentMatch> values = new ArrayList<ArgumentMatch>();
-            for( ArgumentMatch match: matches )
-                for( ArgumentMatch value: match )
-                    values.add(value);
-
-            result = Array.newInstance(makeRawTypeIfNecessary(componentType),values.size());
-
-            int i = 0;
-            for( ArgumentMatch value: values ) {
-                Object object = componentArgumentParser.parse(parsingEngine,source,componentType,new ArgumentMatches(value));
-                Array.set(result,i++,object);
-                // WARNING: Side effect!
-                parsingEngine.addTags(object,value.tags);
-            }
-        }
-        else
-            throw new ReviewedGATKException("Unsupported compound argument type: " + type);
-
-        return result;
-    }
-
-    /**
-     * Return the component type of a field, or String.class if the type cannot be found.
-     * @param field The reflected field to inspect.
-     * @return The parameterized component type, or String.class if the parameterized type could not be found.
-     * @throws IllegalArgumentException If more than one parameterized type is found on the field.
-     */
-    @Override
-    protected Type getCollectionComponentType( Field field ) {
-        // If this is a parameterized collection, find the contained type.  If blow up if more than one type exists.
-        if( field.getGenericType() instanceof ParameterizedType) {
-            ParameterizedType parameterizedType = (ParameterizedType)field.getGenericType();
-            if( parameterizedType.getActualTypeArguments().length > 1 )
-                throw new IllegalArgumentException("Unable to determine collection type of field: " + field.toString());
-            return parameterizedType.getActualTypeArguments()[0];
-        }
-        else
-            return String.class;
-    }
-}
-
-class MultiplexArgumentTypeDescriptor extends ArgumentTypeDescriptor {
-    /**
-     * The multiplexer controlling how data is split.
-     */
-    private final Multiplexer multiplexer;
-
-    /**
-     * The set of identifiers for the multiplexed entries.
-     */
-    private final Collection<?> multiplexedIds;
-
-    public MultiplexArgumentTypeDescriptor() {
-        this.multiplexer = null;
-        this.multiplexedIds = null;
-    }
-
-    /**
-     * Private constructor to use in creating a closure of the MultiplexArgumentTypeDescriptor specific to the
-     * given set of multiplexed ids.
-     * @param multiplexedIds The collection of multiplexed entries
-     */
-    private MultiplexArgumentTypeDescriptor(final Multiplexer multiplexer, final Collection<?> multiplexedIds) {
-        this.multiplexer = multiplexer;
-        this.multiplexedIds = multiplexedIds;
-    }
-
-    @Override
-    public boolean supports( Class type ) {
-        return ( Map.class.isAssignableFrom(type) );
-    }
-
-    @Override
-    public boolean createsTypeDefault(ArgumentSource source) {
-        // Multiplexing always creates a type default.
-        return true;
-    }
-
-    @Override
-    public Object createTypeDefault(ParsingEngine parsingEngine,ArgumentSource source, Type type) {
-        if(multiplexer == null || multiplexedIds == null)
-            throw new ReviewedGATKException("No multiplexed ids available");
-
-        Map<Object,Object> multiplexedMapping = new HashMap<Object,Object>();
-        Class componentType = makeRawTypeIfNecessary(getCollectionComponentType(source.field));
-        ArgumentTypeDescriptor componentTypeDescriptor = parsingEngine.selectBestTypeDescriptor(componentType);
-
-        for(Object id: multiplexedIds) {
-            Object value = null;
-            if(componentTypeDescriptor.createsTypeDefault(source))
-                value = componentTypeDescriptor.createTypeDefault(parsingEngine,source,componentType);
-            multiplexedMapping.put(id,value);
-        }
-        return multiplexedMapping;
-    }
-
-    @Override
-    public String typeDefaultDocString(ArgumentSource source) {
-        return "None";
-    }
-
-    @Override
-    public Object parse(ParsingEngine parsingEngine, ArgumentSource source, Type type, ArgumentMatches matches) {
-        if(multiplexedIds == null)
-            throw new ReviewedGATKException("Cannot directly parse a MultiplexArgumentTypeDescriptor; must create a derivative type descriptor first.");
-
-        Map<Object,Object> multiplexedMapping = new HashMap<Object,Object>();
-
-        Class componentType = makeRawTypeIfNecessary(getCollectionComponentType(source.field));
-
-
-        for(Object id: multiplexedIds) {
-            Object value = parsingEngine.selectBestTypeDescriptor(componentType).parse(parsingEngine,source,componentType,matches.transform(multiplexer,id));
-            multiplexedMapping.put(id,value);
-        }
-
-        parsingEngine.addTags(multiplexedMapping,getArgumentTags(matches));
-
-        return multiplexedMapping;
-    }
-
-    public MultiplexArgumentTypeDescriptor createCustomTypeDescriptor(ParsingEngine parsingEngine,ArgumentSource dependentArgument,Object containingObject) {
-        String[] sourceFields = dependentArgument.field.getAnnotation(Multiplex.class).arguments();
-
-        List<ArgumentSource> allSources = parsingEngine.extractArgumentSources(containingObject.getClass());
-        Class[] sourceTypes = new Class[sourceFields.length];
-        Object[] sourceValues = new Object[sourceFields.length];
-        int currentField = 0;
-
-        for(String sourceField: sourceFields) {
-            boolean fieldFound = false;
-            for(ArgumentSource source: allSources) {
-                if(!source.field.getName().equals(sourceField))
-                    continue;
-                if(source.field.isAnnotationPresent(Multiplex.class))
-                    throw new ReviewedGATKException("Command-line arguments can only depend on independent fields");
-                sourceTypes[currentField] = source.field.getType();
-                sourceValues[currentField] = JVMUtils.getFieldValue(source.field,containingObject);
-                currentField++;
-                fieldFound = true;
-            }
-            if(!fieldFound)
-                throw new ReviewedGATKException(String.format("Unable to find source field %s, referred to by dependent field %s",sourceField,dependentArgument.field.getName()));
-        }
-
-        Class<? extends Multiplexer> multiplexerType = dependentArgument.field.getAnnotation(Multiplex.class).value();
-        Constructor<? extends Multiplexer> multiplexerConstructor;
-        try {
-            multiplexerConstructor = multiplexerType.getConstructor(sourceTypes);
-            multiplexerConstructor.setAccessible(true);
-        }
-        catch(NoSuchMethodException ex) {
-            throw new ReviewedGATKException(String.format("Unable to find constructor for class %s with parameters %s",multiplexerType.getName(),Arrays.deepToString(sourceFields)),ex);
-        }
-
-        Multiplexer multiplexer;
-        try {
-            multiplexer = multiplexerConstructor.newInstance(sourceValues);
-        }
-        catch(IllegalAccessException ex) {
-            throw new ReviewedGATKException(String.format("Constructor for class %s with parameters %s is inaccessible",multiplexerType.getName(),Arrays.deepToString(sourceFields)),ex);
-        }
-        catch(InstantiationException ex) {
-            throw new ReviewedGATKException(String.format("Can't create class %s with parameters %s",multiplexerType.getName(),Arrays.deepToString(sourceFields)),ex);
-        }
-        catch(InvocationTargetException ex) {
-            throw new ReviewedGATKException(String.format("Can't invoke constructor of class %s with parameters %s",multiplexerType.getName(),Arrays.deepToString(sourceFields)),ex);
-        }
-
-        return new MultiplexArgumentTypeDescriptor(multiplexer,multiplexer.multiplex());
-    }
-
-    /**
-     * Return the component type of a field, or String.class if the type cannot be found.
-     * @param field The reflected field to inspect.
-     * @return The parameterized component type, or String.class if the parameterized type could not be found.
-     * @throws IllegalArgumentException If more than one parameterized type is found on the field.
-     */
-    @Override
-    protected Type getCollectionComponentType( Field field ) {
-        // Multiplex arguments must resolve to maps from which the clp should extract the second type.
-        if( field.getGenericType() instanceof ParameterizedType) {
-            ParameterizedType parameterizedType = (ParameterizedType)field.getGenericType();
-            if( parameterizedType.getActualTypeArguments().length != 2 )
-                throw new IllegalArgumentException("Unable to determine collection type of field: " + field.toString());
-            return (Class)parameterizedType.getActualTypeArguments()[1];
-        }
-        else
-            return String.class;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ClassType.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ClassType.java
deleted file mode 100644
index d57d326..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ClassType.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import java.lang.annotation.*;
-
-/**
- * Annotates generic fields where the parameterized type is not specified or erased.
- * Primarily used for Queue traits.  Defined in java since scala does not support RetentionPolicy.RUNTIME.
- */
- at Documented
- at Inherited
- at Retention(RetentionPolicy.RUNTIME)
- at Target({ElementType.FIELD})
-public @interface ClassType {
-    Class value();
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/CommandLineProgram.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/CommandLineProgram.java
deleted file mode 100644
index 80ebe2c..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/CommandLineProgram.java
+++ /dev/null
@@ -1,447 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import org.apache.log4j.FileAppender;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
-import org.apache.log4j.PatternLayout;
-import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.help.ApplicationDetails;
-import org.broadinstitute.gatk.utils.help.HelpConstants;
-import org.broadinstitute.gatk.utils.help.HelpFormatter;
-
-import java.io.IOException;
-import java.util.*;
-
-public abstract class CommandLineProgram {
-
-    /** The command-line program and the arguments it returned. */
-    public ParsingEngine parser = null;
-
-    /**
-     * Setting INFO gets you INFO up to FATAL, setting ERROR gets you ERROR and FATAL level logging, and so on.
-     */
-    @Argument(fullName = "logging_level", shortName = "l", doc = "Set the minimum level of logging", required = false)
-    protected String logging_level = "INFO";
-
-    /**
-     * File to save the logging output.
-     */
-    @Output(fullName = "log_to_file", shortName = "log", doc = "Set the logging location", required = false)
-    protected String toFile = null;
-
-    /**
-     * This will produce a help message in the terminal with general usage information, listing available arguments
-     * as well as tool-specific information if applicable.
-     */
-    @Argument(fullName = "help", shortName = "h", doc = "Generate the help message", required = false)
-    public Boolean help = false;
-
-    /**
-     * Use this to check the version number of the GATK executable you are invoking. Note that the version number is
-     * always included in the output at the start of every run as well as any error message.
-     */
-    @Argument(fullName = "version", shortName = "version", doc ="Output version information", required = false)
-    public Boolean version = false;
-
-
-    /** our logging output patterns */
-    private static final String patternString = "%-5p %d{HH:mm:ss,SSS} %C{1} - %m %n";
-
-    static {
-        /**
-         * The very first thing that any GATK application does is forces the JVM locale into US English, so that we don't have
-         * to think about number formatting issues.
-         */
-        forceJVMLocaleToUSEnglish();
-        // setup a basic log configuration
-        CommandLineUtils.configureConsoleLogging();
-    }
-
-
-    /**
-     * Allows a given application to return a brief description of itself.
-     *
-     * @return An ApplicationDetails object describing the current application.  Should not be null.
-     */
-    protected ApplicationDetails getApplicationDetails() {
-        return new ApplicationDetails(ApplicationDetails.createDefaultHeader(getClass()),
-                                      Collections.<String>emptyList(),
-                                      ApplicationDetails.createDefaultRunningInstructions(getClass()),
-                                      null);
-    }
-
-    /**
-     * Subclasses of CommandLinePrograms can provide their own types of command-line arguments.
-     * @return A collection of type descriptors generating implementation-dependent placeholders.
-     */
-    protected Collection<ArgumentTypeDescriptor> getArgumentTypeDescriptors() {
-        return Collections.emptyList();
-    }
-
-    /**
-     * Will this application want to vary its argument list dynamically?
-     * If so, parse the command-line options and then prompt the subclass to return
-     * a list of argument providers.
-     *
-     * @return Whether the application should vary command-line arguments dynamically.
-     */
-    protected boolean canAddArgumentsDynamically() { return false; }
-
-    /**
-     * Provide a list of object to inspect, looking for additional command-line arguments.
-     *
-     * @return A list of objects to inspect.
-     */
-    protected Class[] getArgumentSources() {
-        return new Class[]{};
-    }
-
-    /**
-     * Name this argument source.  Provides the (full) class name as a default.
-     *
-     * @param source The argument source.
-     *
-     * @return a name for the argument source.
-     */
-    protected String getArgumentSourceName( Class source ) { return source.toString(); }
-
-    /**
-     * Sets the command-line parsing engine. Necessary for unit testing purposes.
-     * @param parser the new command-line parsing engine
-     */
-    public void setParser( ParsingEngine parser ) {
-        this.parser = parser;
-    }
-
-    /**
-     * this is the function that the inheriting class can expect to have called
-     * when all the argument processing is done
-     *
-     * @return the return code to exit the program with
-     * @throws Exception when an exception occurs
-     */
-    protected abstract int execute() throws Exception;
-
-    public static int result = -1;
-
-    @SuppressWarnings("unchecked")
-    public static void start(CommandLineProgram clp, String[] args) throws Exception {
-        start(clp, args, false);
-    }
-
-    /**
-     * This function is called to start processing the command line, and kick
-     * off the execute message of the program.
-     *
-     * @param clp  the command line program to execute
-     * @param args the command line arguments passed in
-     * @param dryRun dry run
-     * @throws Exception when an exception occurs
-     */
-    @SuppressWarnings("unchecked")
-    public static void start(CommandLineProgram clp, String[] args, boolean dryRun) throws Exception {
-
-        try {
-            // setup our log layout
-            PatternLayout layout = new PatternLayout();
-
-            Logger logger = CommandLineUtils.getStingLogger();
-
-            // now set the layout of all the loggers to our layout
-            CommandLineUtils.setLayout(logger, layout);
-
-            // Initialize the logger using the defaults.
-            clp.setupLoggerLevel(layout);
-
-            // setup the parser
-            ParsingEngine parser = clp.parser = new ParsingEngine(clp);
-            parser.addArgumentSource(clp.getClass());
-
-            Map<ArgumentMatchSource, ParsedArgs> parsedArgs;
-
-            // process the args
-            if (clp.canAddArgumentsDynamically()) {
-                // if the command-line program can toss in extra args, fetch them and reparse the arguments.
-                parser.parse(args);
-
-                // Allow invalid and missing required arguments to pass this validation step.
-                //   - InvalidArgument in case these arguments are specified by plugins.
-                //   - MissingRequiredArgument in case the user requested help.  Handle that later, once we've
-                //                             determined the full complement of arguments.
-                if ( ! dryRun )
-                    parser.validate(EnumSet.of(ParsingEngine.ValidationType.MissingRequiredArgument,
-                            ParsingEngine.ValidationType.InvalidArgument));
-                parser.loadArgumentsIntoObject(clp);
-
-                // Initialize the logger using the loaded command line.
-                clp.setupLoggerLevel(layout);
-
-                Class[] argumentSources = clp.getArgumentSources();
-                    for (Class argumentSource : argumentSources)
-                    parser.addArgumentSource(clp.getArgumentSourceName(argumentSource), argumentSource);
-                parsedArgs = parser.parse(args);
-
-                if (isVersionPresent(parser))
-                    printVersionAndExit();
-
-                if (isHelpPresent(parser))
-                    printHelpAndExit(clp, parser);
-
-                if ( ! dryRun ) parser.validate();
-            } else {
-                parsedArgs = parser.parse(args);
-
-                if ( ! dryRun ) {
-                    if (isHelpPresent(parser))
-                        printHelpAndExit(clp, parser);
-
-                    parser.validate();
-                }
-                parser.loadArgumentsIntoObject(clp);
-
-                // Initialize the logger using the loaded command line.
-                clp.setupLoggerLevel(layout);
-            }
-
-            if ( ! dryRun ) {
-                // if they specify a log location, output our data there
-                if (clp.toFile != null) {
-                    FileAppender appender;
-                    try {
-                        appender = new FileAppender(layout, clp.toFile, false);
-                        logger.addAppender(appender);
-                    } catch (IOException e) {
-                        throw new RuntimeException("Unable to re-route log output to " + clp.toFile + " make sure the destination exists");
-                    }
-                }
-
-                // regardless of what happens next, generate the header information
-                HelpFormatter.generateHeaderInformation(clp.getApplicationDetails(), parsedArgs);
-
-                // call the execute
-                CommandLineProgram.result = clp.execute();
-            }
-        }
-        catch (ArgumentException e) {
-            //clp.parser.printHelp(clp.getApplicationDetails());
-            // Rethrow the exception to exit with an error.
-            throw e;
-        }
-    }
-
-    /**
-     * Find fields in the object obj that look like command-line arguments, and put command-line
-     * arguments into them.
-     *
-     * @param obj Object to inspect for command line arguments.
-     */
-    public void loadArgumentsIntoObject(Object obj) {
-        parser.loadArgumentsIntoObject(obj);
-    }
-
-    /**
-     * this function checks the logger level passed in on the command line, taking the lowest
-     * level that was provided.
-     * @param layout Pattern layout to format based on the logger level.
-     */
-    private void setupLoggerLevel(PatternLayout layout) {
-        layout.setConversionPattern(patternString);
-
-        // set the default logger level
-        Level par;
-        if (logging_level.toUpperCase().equals("DEBUG")) {
-            par = Level.DEBUG;
-        } else if (logging_level.toUpperCase().equals("INFO")) {
-            par = Level.INFO;
-        } else if (logging_level.toUpperCase().equals("WARN")) {
-            par = Level.WARN;
-        } else if (logging_level.toUpperCase().equals("ERROR")) {
-            par = Level.ERROR;
-        } else if (logging_level.toUpperCase().equals("FATAL")) {
-            par = Level.FATAL;
-        } else if (logging_level.toUpperCase().equals("OFF")) {
-            par = Level.OFF;
-        } else {
-            // we don't understand the logging level, let's get out of here
-            throw new ArgumentException("Unable to match: " + logging_level + " to a logging level, make sure it's a valid level (DEBUG, INFO, WARN, ERROR, FATAL, OFF)");
-        }
-
-        Logger.getRootLogger().setLevel(par);
-    }
-
-    /**
-     * a function used to indicate an error occurred in the command line tool
-     */
-    private static void printDocumentationReference() {
-        errorPrintf("Visit our website and forum for extensive documentation and answers to %n");
-        errorPrintf("commonly asked questions " + HelpConstants.BASE_GATK_URL + "%n");
-    }
-
-
-    /**
-     * Do a cursory search for the given argument.
-     *
-     * @param parser Parser
-     *
-     * @return True if help is present; false otherwise.
-     */
-    private static boolean isHelpPresent(ParsingEngine parser) {
-        return parser.isArgumentPresent("help");
-    }
-
-    /**
-     * Print help and exit.
-     *
-     * @param clp    Instance of the command-line program.
-     * @param parser True if help is present; false otherwise.
-     */
-    private static void printHelpAndExit(CommandLineProgram clp, ParsingEngine parser) {
-        parser.printHelp(clp.getApplicationDetails());
-        System.exit(0);
-    }
-
-    /**
-     * Do a cursory search for the argument "version".
-     *
-     * @param parser Parser
-     *
-     * @return True if version is present; false otherwise.
-     */
-    private static boolean isVersionPresent(ParsingEngine parser) {
-        return parser.isArgumentPresent("version");
-    }
-
-    /**
-     * Print help and exit.
-     */
-    private static void printVersionAndExit() {
-        System.out.println(CommandLineGATK.getVersionNumber().toString());
-        System.exit(0);
-    }
-
-
-    private static void errorPrintf(String format, Object... s) {
-        String formatted = String.format(format, s);
-
-        if ( formatted.trim().equals("") )
-            System.err.println("##### ERROR");
-        else {
-            for ( String part : formatted.split("\n") ) {
-                System.err.println("##### ERROR " + part);
-            }
-        }
-    }
-
-
-    /**
-     * used to indicate an error occured
-     *
-     * @param msg the message
-     * @param t   the error
-     */
-    public static void exitSystemWithError(String msg, final Throwable t) {
-        errorPrintf("------------------------------------------------------------------------------------------%n");
-        errorPrintf("stack trace %n");
-        t.printStackTrace();
-
-        errorPrintf("------------------------------------------------------------------------------------------%n");
-        errorPrintf("A GATK RUNTIME ERROR has occurred (version %s):%n", CommandLineGATK.getVersionNumber());
-        errorPrintf("%n");
-        errorPrintf("This might be a bug. Please check the documentation guide to see if this is a known problem.%n");
-        errorPrintf("If not, please post the error message, with stack trace, to the GATK forum.%n");
-        printDocumentationReference();
-        if ( msg == null ) // some exceptions don't have detailed messages
-            msg = "Code exception (see stack trace for error itself)";
-        errorPrintf("%n");
-        errorPrintf("MESSAGE: %s%n", msg.trim());
-        errorPrintf("------------------------------------------------------------------------------------------%n");
-        System.exit(1);
-    }
-
-    public static void exitSystemWithUserError(final Exception e) {
-        if ( e.getMessage() == null )
-            throw new ReviewedGATKException("UserException found with no message!", e);
-
-        errorPrintf("------------------------------------------------------------------------------------------%n");
-        errorPrintf("A USER ERROR has occurred (version %s): %n", CommandLineGATK.getVersionNumber());
-        errorPrintf("%n");
-        errorPrintf("This means that one or more arguments or inputs in your command are incorrect.%n");
-        errorPrintf("The error message below tells you what is the problem.%n");
-        errorPrintf("%n");
-        errorPrintf("If the problem is an invalid argument, please check the online documentation guide%n");
-        errorPrintf("(or rerun your command with --help) to view allowable command-line arguments for this tool.%n");
-        errorPrintf("%n");
-        printDocumentationReference();
-        errorPrintf("%n");
-        errorPrintf("Please do NOT post this error to the GATK forum unless you have really tried to fix it yourself.%n");
-        errorPrintf("%n");
-        errorPrintf("MESSAGE: %s%n", e.getMessage().trim());
-        errorPrintf("------------------------------------------------------------------------------------------%n");
-        System.exit(1);
-    }
-
-    public static void exitSystemWithSamError(final Throwable t) {
-        if ( t.getMessage() == null )
-            throw new ReviewedGATKException("SamException found with no message!", t);
-
-        errorPrintf("------------------------------------------------------------------------------------------%n");
-        errorPrintf("A BAM ERROR has occurred (version %s): %n", CommandLineGATK.getVersionNumber());
-        errorPrintf("%n");
-        errorPrintf("This means that there is something wrong with the BAM file(s) you provided.%n");
-        errorPrintf("The error message below tells you what is the problem.%n");
-        errorPrintf("%n");
-        printDocumentationReference();
-        errorPrintf("%n");
-        errorPrintf("Please do NOT post this error to the GATK forum until you have followed these instructions:%n");
-        errorPrintf("- Make sure that your BAM file is well-formed by running Picard's validator on it%n");
-        errorPrintf("(see http://picard.sourceforge.net/command-line-overview.shtml#ValidateSamFile for details)%n");
-        errorPrintf("- Ensure that your BAM index is not corrupted: delete the current one and regenerate it with 'samtools index'%n");
-        errorPrintf("%n");
-        errorPrintf("MESSAGE: %s%n", t.getMessage().trim());
-        errorPrintf("------------------------------------------------------------------------------------------%n");
-        System.exit(1);
-    }
-
-
-    /**
-     * used to indicate an error occured
-     *
-     * @param t the exception that occurred
-     */
-    public static void exitSystemWithError(Throwable t) {
-        exitSystemWithError(t.getMessage(), t);
-    }
-
-    /**
-     * A hack to ensure that numbers are always formatted in the US style.
-     */
-    protected static void forceJVMLocaleToUSEnglish() {
-        Locale.setDefault(Locale.US);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/CommandLineUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/CommandLineUtils.java
deleted file mode 100644
index 70f5532..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/CommandLineUtils.java
+++ /dev/null
@@ -1,192 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import org.apache.log4j.Appender;
-import org.apache.log4j.ConsoleAppender;
-import org.apache.log4j.Logger;
-import org.apache.log4j.PatternLayout;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.lang.annotation.Annotation;
-import java.util.Collections;
-import java.util.Enumeration;
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-/**
- * Static utility methods for working with command-line arguments.
- *
- * @author mhanna
- * @version 0.1
- */
-public class CommandLineUtils {
-
-    /**
-     * Returns a key-value mapping of the command-line arguments passed into the GATK.
-     * Will be approximate; this class doesn't have all the required data to completely
-     * reconstruct the list of command-line arguments from the given objects.
-     *
-     * @param parsingEngine      The parsing engine
-     * @param argumentProviders  The providers of command-line arguments.
-     * @return A key-value mapping of argument full names to argument values.  Produces best string representation
-     *         possible given the information available.
-     */
-    public static Map<String,String> getApproximateCommandLineArguments(ParsingEngine parsingEngine, Object... argumentProviders) {
-        return getApproximateCommandLineArguments(parsingEngine, false, argumentProviders);
-    }
-
-    /**
-     * Returns a key-value mapping of the command-line arguments passed into the GATK.
-     * Will be approximate; this class doesn't have all the required data to completely
-     * reconstruct the list of command-line arguments from the given objects.
-     * 
-     * @param parsingEngine      The parsing engine
-     * @param skipObjectPointers Should we skip arguments whose values are pointers (and don't print nicely)?
-     * @param argumentProviders  The providers of command-line arguments.
-     * @return A key-value mapping of argument full names to argument values.  Produces best string representation
-     *         possible given the information available.
-     */
-    public static Map<String,String> getApproximateCommandLineArguments(ParsingEngine parsingEngine, boolean skipObjectPointers, Object... argumentProviders) {
-        Map<String,String> commandLineArguments = new LinkedHashMap<String,String>();
-
-        for(Object argumentProvider: argumentProviders) {
-            Map<ArgumentSource, Object> argBindings = parsingEngine.extractArgumentBindings(argumentProvider);
-            for(Map.Entry<ArgumentSource, Object> elt: argBindings.entrySet()) {
-                Object argumentValue = elt.getValue();
-
-                String argumentValueString = argumentValue != null ? argumentValue.toString() : null;
-                if ( skipObjectPointers && isObjectPointer(argumentValueString) )
-                    continue;
-
-                for(ArgumentDefinition definition: elt.getKey().createArgumentDefinitions()) {
-                    String argumentName = definition.fullName;
-                    commandLineArguments.put(argumentName,argumentValueString);
-                }
-            }
-        }
-
-        return commandLineArguments;
-    }
-
-    /**
-     * Create an approximate list of command-line arguments based on the given argument providers.
-     * @param parsingEngine      The parsing engine
-     * @param argumentProviders  Argument providers to inspect.
-     * @return A string representing the given command-line arguments.
-     */
-    public static String createApproximateCommandLineArgumentString(ParsingEngine parsingEngine, Object... argumentProviders) {
-        return createApproximateCommandLineArgumentString(parsingEngine, true, argumentProviders);
-    }
-
-    /**
-     * Create an approximate list of command-line arguments based on the given argument providers.
-     * @param parsingEngine      The parsing engine
-     * @param skipObjectPointers Should we skip arguments whose values are pointers (and don't print nicely)?
-     * @param argumentProviders  Argument providers to inspect.
-     * @return A string representing the given command-line arguments.
-     */
-    public static String createApproximateCommandLineArgumentString(ParsingEngine parsingEngine, boolean skipObjectPointers, Object... argumentProviders) {
-        Map<String,String> commandLineArgs = getApproximateCommandLineArguments(parsingEngine, skipObjectPointers, argumentProviders);
-        StringBuffer sb = new StringBuffer();
-
-        boolean first = true;
-        for ( Map.Entry<String, String> commandLineArg : commandLineArgs.entrySet() ) {
-            if ( !first )
-                sb.append(" ");
-            sb.append(commandLineArg.getKey());
-            sb.append("=");
-            sb.append(commandLineArg.getValue());
-            first = false;
-        }
-
-        return sb.toString();
-    }
-
-    /**
-     * A hack to get around the fact that Java doesn't like inheritance in Annotations.
-     * @param annotation to run the method on
-     * @param method the method to invoke
-     * @return the return value of the method
-     */
-    public static Object getValue(Annotation annotation, String method) {
-        try {
-            return annotation.getClass().getMethod(method).invoke(annotation);
-        } catch (Exception e) {
-            throw new ReviewedGATKException("Unable to access method " + method + " on annotation " + annotation.getClass(), e);
-        }
-    }
-
-    // The problem here is that some of the fields being output are Objects - and those
-    //  Objects don't overload toString() so that the output is just the memory pointer
-    //  to the Object.  Because those values are non-deterministic, they don't merge well
-    //  into BAM/VCF headers (plus, it's just damn ugly).  Perhaps there's a better way to
-    //  do this, but at least this one works for the moment.
-    private static final String pointerRegexp = ".+@[0-9a-fA-F]+$";
-    private static boolean isObjectPointer(String s) {
-        return s != null && s.matches(pointerRegexp);
-    }
-
-    /**
-     * Returns the root logger for all GATK code.
-     * @return the root logger for all GATK  code.
-     */
-    public static Logger getStingLogger() {
-        return Logger.getLogger("org.broadinstitute.gatk");
-    }
-
-    /**
-     * Enables console logging.
-     */
-    @SuppressWarnings("unchecked")
-    public static void configureConsoleLogging() {
-        // Check to see if a console logger has already been enabled.
-        for (Logger logger = getStingLogger(); logger != null; logger = (Logger)logger.getParent()) {
-            Enumeration<Appender> e = (Enumeration<Appender>) logger.getAllAppenders();
-            for (Appender appender: Collections.list(e)) {
-                if (appender instanceof ConsoleAppender)
-                    return;
-            }
-        }
-        // Extracted from BasicConfigurator.configure(), but only applied to the GATK logger.
-        Logger.getRootLogger().addAppender(new ConsoleAppender(
-                    new PatternLayout(PatternLayout.TTCC_CONVERSION_PATTERN), ConsoleAppender.SYSTEM_ERR));
-    }
-
-    /**
-     * Sets the layout of the logger.
-     * @param logger The logger.
-     * @param layout The layout.
-     */
-    @SuppressWarnings("unchecked")
-    public static void setLayout(Logger logger, PatternLayout layout) {
-        for (; logger != null; logger = (Logger)logger.getParent()) {
-            Enumeration<Appender> e = (Enumeration<Appender>) logger.getAllAppenders();
-            for (Appender appender: Collections.list(e))
-                appender.setLayout(layout);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/EnumerationArgumentDefault.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/EnumerationArgumentDefault.java
deleted file mode 100644
index 3bbdedb..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/EnumerationArgumentDefault.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-/**
- * @author aaron
- * <p/>
- * Annotation EnumerationArgumentDefault
- * <p/>
- * Allows the default argument value to be set for an enum; this allows us to treat enums as
- * booleans on the command line. I.e.
- *
- * if we're using an enum Shape,
- *
- * enum shape {
- *  SQUARE,
- *  CIRCLE,
- *  @EnumerationArgumentDefault
- *  TRIANGLE
- * }
- *
- * and a command line option -shape, the EnumerationArgumentDefault would allow you to say:
- * -shape
- * or
- * -shape TRIANGLE
- *
- * would get -shape set to TRIANGLE, where:
- *
- * -shape SQUARE
- *
- * would set shape to SQUARE
- *
- */
- at Retention(RetentionPolicy.RUNTIME)
- at Target(ElementType.FIELD)
-public @interface EnumerationArgumentDefault {
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/Gather.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/Gather.java
deleted file mode 100644
index b291663..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/Gather.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import java.lang.annotation.*;
-
-/**
- * Specifies the class type to gather an @Output
- */
- at Documented
- at Inherited
- at Retention(RetentionPolicy.RUNTIME)
- at Target({ElementType.FIELD})
-public @interface Gather {
-    Class value() default Gather.class;
-    String className() default "";
-    boolean enabled() default true;
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/Gatherer.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/Gatherer.java
deleted file mode 100644
index 761611f..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/Gatherer.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import java.io.File;
-import java.util.List;
-
-/**
- * Combines a list of files into a single output.
- */
-public abstract class Gatherer {
-    /**
-     * Gathers a list of files into a single output.
-     * @param inputs Files to combine.
-     * @param output Path to output file.
-     */
-    public abstract void gather(List<File> inputs, File output);
-
-    /**
-     * Returns true if the caller should wait for the input files to propagate over NFS before running gather().
-     * @return true if the caller should wait for the input files to propagate over NFS before running gather().
-     */
-    public boolean waitForInputs() { return true; }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/Hidden.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/Hidden.java
deleted file mode 100644
index 02325a7..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/Hidden.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import java.lang.annotation.*;
-
-/**
- * Indicates that a walker or walker argument should not be presented in the help system.
- *
- * @author mhanna
- * @version 0.1
- */
- at Documented
- at Inherited
- at Retention(RetentionPolicy.RUNTIME)
- at Target({ElementType.TYPE,ElementType.FIELD})
-public @interface Hidden {
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/Input.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/Input.java
deleted file mode 100644
index 8ec0483..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/Input.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import java.lang.annotation.*;
-
-/**
- * Annotates fields in objects that should be used as command-line arguments.
- * Any field annotated with @Input can appear as a command-line parameter.
- */
- at Documented
- at Inherited
- at Retention(RetentionPolicy.RUNTIME)
- at Target(ElementType.FIELD)
-public @interface Input {
-    /**
-     * The full name of the command-line argument.  Full names should be
-     * prefixed on the command-line with a double dash (--).
-     * @return Selected full name, or "" to use the default.
-     */
-    String fullName() default "";
-
-    /**
-     * Specified short name of the command.  Short names should be prefixed
-     * with a single dash.  Argument values can directly abut single-char
-     * short names or be separated from them by a space.
-     * @return Selected short name, or "" for none.
-     */
-    String shortName() default "";
-
-    /**
-     * Documentation for the command-line argument.  Should appear when the
-     * --help argument is specified.
-     * @return Doc string associated with this command-line argument.
-     */
-    String doc() default "Undocumented option";
-
-    /**
-     * Is this argument required.  If true, the command-line argument system will
-     * make a best guess for populating this argument based on the type descriptor,
-     * and will fail if the type can't be populated.
-     * @return True if the argument is required.  False otherwise.
-     */
-    boolean required() default true;
-
-    /**
-     * Should this command-line argument be exclusive of others.  Should be
-     * a comma-separated list of names of arguments of which this should be
-     * independent.
-     * @return A comma-separated string listing other arguments of which this
-     *         argument should be independent.
-     */
-    String exclusiveOf() default "";
-
-    /**
-     * Provide a regexp-based validation string.
-     * @return Non-empty regexp for validation, blank otherwise.
-     */
-    String validation() default "";
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/IntervalArgumentCollection.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/IntervalArgumentCollection.java
deleted file mode 100644
index 717a077..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/IntervalArgumentCollection.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import htsjdk.tribble.Feature;
-import org.broadinstitute.gatk.utils.interval.IntervalMergingRule;
-import org.broadinstitute.gatk.utils.interval.IntervalSetRule;
-
-import java.util.List;
-
-public class IntervalArgumentCollection {
-    /**
-     * Use this option to perform the analysis over only part of the genome. This argument can be specified multiple times.
-     * You can use samtools-style intervals either explicitly on the command line (e.g. -L chr1 or -L chr1:100-200) or
-     * by loading in a file containing a list of intervals (e.g. -L myFile.intervals).
-     *
-     * Additionally, you can also specify a ROD file (such as a VCF file) in order to perform the analysis at specific
-     * positions based on the records present in the file (e.g. -L file.vcf).
-     *
-     * Finally, you can also use this to perform the analysis on the reads that are completely unmapped in the BAM file
-     * (i.e. those without a reference contig) by specifying -L unmapped.
-     */
-    @Input(fullName = "intervals", shortName = "L", doc = "One or more genomic intervals over which to operate", required = false)
-    public List<IntervalBinding<Feature>> intervals = null;
-
-    /**
-     * Use this option to exclude certain parts of the genome from the analysis (like -L, but the opposite).
-     * This argument can be specified multiple times. You can use samtools-style intervals either explicitly on the
-     * command line (e.g. -XL chr1 or -XL chr1:100-200) or by loading in a file containing a list of intervals
-     * (e.g. -XL myFile.intervals).
-     *
-     * Additionally, you can also specify a ROD file (such as a VCF file) in order to exclude specific
-     * positions from the analysis based on the records present in the file (e.g. -XL file.vcf).
-     * */
-    @Input(fullName = "excludeIntervals", shortName = "XL", doc = "One or more genomic intervals to exclude from processing", required = false)
-    public List<IntervalBinding<Feature>> excludeIntervals = null;
-
-    /**
-     * By default, the program will take the UNION of all intervals specified using -L and/or -XL. However, you can
-     * change this setting for -L, for example if you want to take the INTERSECTION of the sets instead. E.g. to perform the
-     * analysis on positions for which there is a record in a VCF, but restrict this to just those on chromosome 20,
-     * you would do -L chr20 -L file.vcf -isr INTERSECTION. However, it is not possible to modify the merging approach
-     * for intervals passed using -XL (they will always be merged using UNION).
-     *
-     * Note that if you specify both -L and -XL, the -XL interval set will be subtracted from the -L interval set.
-     */
-    @Argument(fullName = "interval_set_rule", shortName = "isr", doc = "Set merging approach to use for combining interval inputs", required = false)
-    public IntervalSetRule intervalSetRule = IntervalSetRule.UNION;
-
-    /**
-     * By default, the program merges abutting intervals (i.e. intervals that are directly side-by-side but do not
-     * actually overlap) into a single continuous interval. However you can change this behavior if you want them to be
-     * treated as separate intervals instead.
-     */
-    @Argument(fullName = "interval_merging", shortName = "im", doc = "Interval merging rule for abutting intervals", required = false)
-    public IntervalMergingRule intervalMerging = IntervalMergingRule.ALL;
-
-    /**
-     * Use this to add padding to the intervals specified using -L and/or -XL. For example, '-L chr1:100' with a
-     * padding value of 20 would turn into '-L chr1:80-120'. This is typically used to add padding around exons when
-     * analyzing exomes. The general Broad exome calling pipeline uses 100 bp padding by default.
-     */
-    @Argument(fullName = "interval_padding", shortName = "ip", doc = "Amount of padding (in bp) to add to each interval", required = false, minValue = 0)
-    public int intervalPadding = 0;
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/IntervalBinding.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/IntervalBinding.java
deleted file mode 100644
index 59048a9..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/IntervalBinding.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import com.google.java.contract.Requires;
-import htsjdk.tribble.AbstractFeatureReader;
-import htsjdk.tribble.Feature;
-import htsjdk.tribble.FeatureCodec;
-import htsjdk.tribble.FeatureReader;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.refdata.ReferenceDependentFeatureCodec;
-import org.broadinstitute.gatk.engine.refdata.tracks.FeatureManager;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.interval.IntervalUtils;
-
-import java.util.*;
-
-/**
- * An IntervalBinding representing a walker argument that gets bound to either a ROD track or interval string.
- *
- * The IntervalBinding<T> is a formal GATK argument that bridges between a walker and
- * the engine to construct intervals for traversal at runtime.  The IntervalBinding can
- * either be a RodBinding<T>, a string of one interval, or a file with interval strings.
- * The GATK Engine takes care of initializing the binding when appropriate and determining intervals from it.
- *
- * Note that this class is immutable.
- */
-public final class IntervalBinding<T extends Feature> {
-
-    private RodBinding<T> featureIntervals;
-    private String stringIntervals;
-
-    @Requires({"type != null", "rawName != null", "source != null", "tribbleType != null", "tags != null"})
-    public IntervalBinding(Class<T> type, final String rawName, final String source, final String tribbleType, final Tags tags) {
-        featureIntervals = new RodBinding<>(type, rawName, source, tribbleType, tags);
-    }
-
-    @Requires({"intervalArgument != null"})
-    public IntervalBinding(String intervalArgument) {
-        stringIntervals = intervalArgument;
-    }
-
-    public String getSource() {
-        return ( featureIntervals != null ? featureIntervals.getSource() : stringIntervals );
-    }
-
-    public List<GenomeLoc> getIntervals(final GenomeAnalysisEngine toolkit) {
-        return getIntervals(toolkit.getGenomeLocParser());
-    }
-
-    public List<GenomeLoc> getIntervals(final GenomeLocParser genomeLocParser) {
-        List<GenomeLoc> intervals;
-
-        if ( featureIntervals != null ) {
-            intervals = new ArrayList<>();
-
-            // TODO -- after ROD system cleanup, go through the ROD system so that we can handle things like gzipped files
-
-            final FeatureCodec codec = new FeatureManager().getByName(featureIntervals.getTribbleType()).getCodec();
-            if ( codec instanceof ReferenceDependentFeatureCodec )
-                ((ReferenceDependentFeatureCodec)codec).setGenomeLocParser(genomeLocParser);
-            try {
-                FeatureReader<Feature> reader = AbstractFeatureReader.getFeatureReader(featureIntervals.getSource(), codec, false);
-                for ( Feature feature : reader.iterator() )
-                    intervals.add(genomeLocParser.createGenomeLoc(feature));
-            } catch (Exception e) {
-                throw new UserException.MalformedFile(featureIntervals.getSource(), "Problem reading the interval file", e);
-            }
-
-        } else {
-            intervals = IntervalUtils.parseIntervalArguments(genomeLocParser, stringIntervals);
-        }
-
-        Collections.sort(intervals);
-        return intervals;
-    }
-
-    public String toString() {
-        return getSource();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/MissingArgumentValueException.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/MissingArgumentValueException.java
deleted file mode 100644
index f8f3895..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/MissingArgumentValueException.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import org.broadinstitute.gatk.utils.Utils;
-
-/**
- * Specifies that a value was missing when attempting to populate an argument.
- */
-public class MissingArgumentValueException extends ArgumentException {
-    public MissingArgumentValueException( ArgumentDefinition... missingArguments ) {
-        super( formatArguments(missingArguments) );
-    }
-
-    private static String formatArguments( ArgumentDefinition... missingArguments ) {
-        StringBuilder sb = new StringBuilder();
-        for( ArgumentDefinition missingArgument: missingArguments ) {
-            if( missingArgument.shortName != null )
-                sb.append( String.format("%nValue for argument with name '--%s' (-%s) is missing.", missingArgument.fullName, missingArgument.shortName) );
-            else
-                sb.append( String.format("%nValue for argument with name '--%s' is missing.", missingArgument.fullName) );
-            if(missingArgument.validOptions != null)
-                sb.append( String.format("  Valid options are (%s).", Utils.join(",",missingArgument.validOptions)));
-        }
-        return sb.toString();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/Output.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/Output.java
deleted file mode 100644
index 88057a2..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/Output.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import java.lang.annotation.*;
-
-/**
- * Annotates fields in objects that should be used as command-line arguments.
- * Any field annotated with @Argument can appear as a command-line parameter.
- */
- at Documented
- at Inherited
- at Retention(RetentionPolicy.RUNTIME)
- at Target(ElementType.FIELD)
-public @interface Output {
-    /**
-     * The full name of the command-line argument.  Full names should be
-     * prefixed on the command-line with a double dash (--).
-     * @return Selected full name, or "" to use the default.
-     */
-    String fullName() default "out";
-
-    /**
-     * Specified short name of the command.  Short names should be prefixed
-     * with a single dash.  Argument values can directly abut single-char
-     * short names or be separated from them by a space.
-     * @return Selected short name, or "" for none.
-     */
-    String shortName() default "o";
-
-    /**
-     * Documentation for the command-line argument.  Should appear when the
-     * --help argument is specified.
-     * @return Doc string associated with this command-line argument.
-     */
-    String doc() default "An output file created by the walker.  Will overwrite contents if file exists";
-
-    /**
-     * Is this argument required.  If true, the command-line argument system will
-     * make a best guess for populating this argument based on the type, and will
-     * fail if the type can't be populated.
-     * @return True if the argument is required.  False otherwise.
-     */
-    boolean required() default false;
-
-    /**
-     * If this argument is not required, should it default to use stdout if no
-     * output file is explicitly provided on the command-line?
-     * @return True if the argument should default to stdout.  False otherwise.
-     */
-    boolean defaultToStdout() default true;
-
-    /**
-     * Should this command-line argument be exclusive of others.  Should be
-     * a comma-separated list of names of arguments of which this should be
-     * independent.
-     * @return A comma-separated string listing other arguments of which this
-     *         argument should be independent.
-     */
-    String exclusiveOf() default "";
-
-    /**
-     * Provide a regexp-based validation string.
-     * @return Non-empty regexp for validation, blank otherwise.
-     */
-    String validation() default "";
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsedArgs.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsedArgs.java
deleted file mode 100644
index 45cc055..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsedArgs.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-/**
- * Represents a collection of parsed arguments for an argument source.
- *
- * Useful for printing out help documents.
- */
-public abstract class ParsedArgs {
-    /**
-     * @return A compact description of the arguments from an provider/source.
-     */
-    public abstract String getDescription();
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsedListArgs.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsedListArgs.java
deleted file mode 100644
index aa9e186..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsedListArgs.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import org.apache.commons.lang.StringUtils;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-/**
- * A list of string arguments, usually from the command line or an args list file.
- */
-public class ParsedListArgs extends ParsedArgs {
-    private final List<String> args = new ArrayList<String>();
-
-    public ParsedListArgs() {
-    }
-
-    public ParsedListArgs(List<String> args) {
-        this.args.addAll(args);
-    }
-
-    public void add(String... args) {
-        this.args.addAll(Arrays.asList(args));
-    }
-
-    @Override
-    public String getDescription() {
-        return StringUtils.join(this.args, " ");
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsingEngine.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsingEngine.java
deleted file mode 100644
index 6244b86..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsingEngine.java
+++ /dev/null
@@ -1,829 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import com.google.java.contract.Requires;
-import org.apache.commons.io.FileUtils;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.classloader.JVMUtils;
-import org.broadinstitute.gatk.utils.classloader.PluginManager;
-import org.broadinstitute.gatk.utils.collections.Pair;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.help.ApplicationDetails;
-import org.broadinstitute.gatk.utils.help.HelpFormatter;
-
-import java.io.File;
-import java.io.IOException;
-import java.lang.annotation.Annotation;
-import java.lang.reflect.Field;
-import java.util.*;
-
-/**
- * A parser for command-line arguments.
- */
-public class ParsingEngine {
-
-    /**
-     * The loaded argument sources along with their back definitions.
-     */
-    private Map<ArgumentDefinition,ArgumentSource> argumentSourcesByDefinition = new HashMap<ArgumentDefinition,ArgumentSource>();
-
-    /**
-     * A list of defined arguments against which command lines are matched.
-     * Package protected for testing access.
-     */
-    public ArgumentDefinitions argumentDefinitions = new ArgumentDefinitions();
-
-    /**
-     * A list of matches from defined arguments to command-line text.
-     * Indicates as best as possible where command-line text remains unmatched
-     * to existing arguments.
-     */
-    private ArgumentMatches argumentMatches = null;
-
-    /**
-     * Techniques for parsing and for argument lookup.
-     */
-    private List<ParsingMethod> parsingMethods = new ArrayList<ParsingMethod>();
-
-    /**
-     * All of the RodBinding objects we've seen while parsing
-     */
-    private List<RodBinding> rodBindings = new ArrayList<RodBinding>();
-
-    /**
-     * Class reference to the different types of descriptors that the create method can create.
-     * The type of set used must be ordered (but not necessarily sorted).
-     */
-    private static final Set<ArgumentTypeDescriptor> STANDARD_ARGUMENT_TYPE_DESCRIPTORS = new LinkedHashSet<ArgumentTypeDescriptor>( Arrays.asList(new SimpleArgumentTypeDescriptor(),
-            new IntervalBindingArgumentTypeDescriptor(),
-            new RodBindingArgumentTypeDescriptor(),
-            new RodBindingCollectionArgumentTypeDescriptor(),
-            new CompoundArgumentTypeDescriptor(),
-            new MultiplexArgumentTypeDescriptor()) );
-
-    private Set<ArgumentTypeDescriptor> argumentTypeDescriptors = new LinkedHashSet<ArgumentTypeDescriptor>();
-
-    /**
-     * List of tags associated with the given instantiation of the command-line argument.
-     */
-    private final Map<Object,Tags> tags = new IdentityHashMap<Object,Tags>();
-
-    private PluginManager<ParsingEngineArgumentProvider> argumentProviderPluginManager =
-            new PluginManager<ParsingEngineArgumentProvider>(ParsingEngineArgumentProvider.class);
-
-    /**
-     * our log, which we want to capture anything from org.broadinstitute.gatk
-     */
-    protected static Logger logger = Logger.getLogger(ParsingEngine.class);
-
-    public ParsingEngine( CommandLineProgram clp ) {
-        RodBinding.resetNameCounter();
-        parsingMethods.add( ParsingMethod.FullNameParsingMethod );
-        parsingMethods.add( ParsingMethod.ShortNameParsingMethod );
-
-        // Order matters here!  Make sure the clp's new type descriptors go in before the original type descriptors.
-        if(clp != null)
-            argumentTypeDescriptors.addAll(clp.getArgumentTypeDescriptors());
-        argumentTypeDescriptors.addAll(STANDARD_ARGUMENT_TYPE_DESCRIPTORS);
-
-        List<Class<? extends ParsingEngineArgumentProvider>> providers = argumentProviderPluginManager.getPlugins();
-        for (Class<? extends ParsingEngineArgumentProvider> provider: providers) {
-            addArgumentSource(provider);
-        }
-    }
-
-    /**
-     * Add a main argument source.  Argument sources are expected to have
-     * any number of fields with an @Argument annotation attached.
-     * @param source     An argument source from which to extract command-line arguments.
-     */
-    public void addArgumentSource( Class source ) {
-        addArgumentSource(null, source);
-    }
-
-    public ArgumentMatches getArgumentMatches() {
-        return argumentMatches;
-    }
-
-    /**
-     * Add an argument source.  Argument sources are expected to have
-     * any number of fields with an @Argument annotation attached.
-     * @param sourceName name for this argument source.  'Null' indicates that this source should be treated
-     *                   as the main module.
-     * @param sourceClass A class containing argument sources from which to extract command-line arguments.
-     */
-    public void addArgumentSource( String sourceName, Class sourceClass ) {
-        List<ArgumentDefinition> argumentsFromSource = new ArrayList<ArgumentDefinition>();
-        for( ArgumentSource argumentSource: extractArgumentSources(sourceClass) ) {
-            List<ArgumentDefinition> argumentDefinitions = argumentSource.createArgumentDefinitions();
-            for(ArgumentDefinition argumentDefinition: argumentDefinitions) {
-                argumentSourcesByDefinition.put(argumentDefinition,argumentSource);
-                argumentsFromSource.add( argumentDefinition );
-            }
-        }
-        argumentDefinitions.add( new ArgumentDefinitionGroup(sourceName, argumentsFromSource) );
-    }
-
-    /**
-     * Do a cursory search to see if an argument with the given name is present.
-     * @param argumentFullName full name of the argument.
-     * @return True if the argument is present.  False otherwise.
-     */
-    public boolean isArgumentPresent( String argumentFullName ) {
-        ArgumentDefinition definition =
-                argumentDefinitions.findArgumentDefinition(argumentFullName,ArgumentDefinitions.FullNameDefinitionMatcher);
-        return argumentMatches.hasMatch(definition);
-
-    }
-
-    /**
-     * Parse the given set of command-line arguments, returning
-     * an ArgumentMatches object describing the best fit of these
-     * command-line arguments to the arguments that are actually
-     * required.
-     * @param tokens Tokens passed on the command line.
-     * @return The parsed arguments by file.
-     */
-    public SortedMap<ArgumentMatchSource, ParsedArgs> parse( String[] tokens ) {
-        argumentMatches = new ArgumentMatches();
-        SortedMap<ArgumentMatchSource, ParsedArgs> parsedArgs = new TreeMap<ArgumentMatchSource, ParsedArgs>();
-
-        List<String> cmdLineTokens = Arrays.asList(tokens);
-        parse(ArgumentMatchSource.COMMAND_LINE, cmdLineTokens, argumentMatches, parsedArgs);
-
-        List<ParsingEngineArgumentProvider> providers = argumentProviderPluginManager.createAllTypes();
-
-        for (ParsingEngineArgumentProvider provider: providers) {
-            // Load the arguments ONLY into the provider.
-            // Validation may optionally run on the rest of the arguments.
-            loadArgumentsIntoObject(provider);
-        }
-
-        for (ParsingEngineArgumentProvider provider: providers) {
-            provider.parse(this, parsedArgs);
-        }
-
-        return parsedArgs;
-    }
-
-    public void parse(ArgumentMatchSource matchSource, List<String> tokens,
-                         ArgumentMatches argumentMatches, SortedMap<ArgumentMatchSource, ParsedArgs> parsedArgs) {
-        ArgumentMatchSite lastArgumentMatchSite = new ArgumentMatchSite(matchSource, -1);
-
-        int i = 0;
-        for (String token: tokens) {
-            // If the token is of argument form, parse it into its own argument match.
-            // Otherwise, pair it with the most recently used argument discovered.
-            ArgumentMatchSite site = new ArgumentMatchSite(matchSource, i);
-            if( isArgumentForm(token) ) {
-                ArgumentMatch argumentMatch = parseArgument( token, site );
-                if( argumentMatch != null ) {
-                    argumentMatches.mergeInto( argumentMatch );
-                    lastArgumentMatchSite = site;
-                }
-            }
-            else {
-                if( argumentMatches.hasMatch(lastArgumentMatchSite) &&
-                        !argumentMatches.getMatch(lastArgumentMatchSite).hasValueAtSite(lastArgumentMatchSite))
-                    argumentMatches.getMatch(lastArgumentMatchSite).addValue( lastArgumentMatchSite, new ArgumentMatchStringValue(token) );
-                else
-                    argumentMatches.MissingArgument.addValue( site, new ArgumentMatchStringValue(token) );
-
-            }
-            i++;
-        }
-
-        parsedArgs.put(matchSource, new ParsedListArgs(tokens));
-    }
-
-    public void parsePairs(ArgumentMatchSource matchSource, List<Pair<String, ArgumentMatchValue>> tokens,
-                         ArgumentMatches argumentMatches, ParsedArgs matchSourceArgs,
-                         SortedMap<ArgumentMatchSource, ParsedArgs> parsedArgs) {
-        int i = 0;
-        for (Pair<String, ArgumentMatchValue> pair: tokens) {
-
-            ArgumentMatchSite site = new ArgumentMatchSite(matchSource, i);
-            List<DefinitionMatcher> matchers = Arrays.asList(ArgumentDefinitions.FullNameDefinitionMatcher, ArgumentDefinitions.ShortNameDefinitionMatcher);
-            ArgumentDefinition definition = null;
-            for (DefinitionMatcher matcher: matchers) {
-                definition = argumentDefinitions.findArgumentDefinition( pair.getFirst(), matcher );
-                if (definition != null)
-                    break;
-            }
-            if (definition == null)
-                continue;
-            ArgumentMatch argumentMatch = new ArgumentMatch(pair.getFirst(), definition, site, new Tags());
-            argumentMatches.mergeInto(argumentMatch);
-            argumentMatch.addValue(site, pair.getSecond());
-            i++;
-        }
-
-        parsedArgs.put(matchSource, matchSourceArgs);
-    }
-
-    protected List<String> getArguments(File file) {
-        try {
-            if (file.getAbsolutePath().endsWith(".list")) {
-                return getListArguments(file);
-            }
-        } catch (IOException e) {
-            throw new UserException.CouldNotReadInputFile(file, e);
-        }
-        throw new UserException.CouldNotReadInputFile(file, "file extension is not .list");
-    }
-
-    private List<String> getListArguments(File file) throws IOException {
-        ArrayList<String> argsList = new ArrayList<String>();
-        for (String line: FileUtils.readLines(file))
-            argsList.addAll(Arrays.asList(Utils.escapeExpressions(line)));
-        return argsList;
-    }
-
-    public enum ValidationType { MissingRequiredArgument,
-                                 InvalidArgument,
-                                 InvalidArgumentValue,
-                                 ValueMissingArgument,
-                                 TooManyValuesForArgument,
-                                 MutuallyExclusive }
-
-    /**
-     * Validates the list of command-line argument matches.
-     */
-    public void validate() {
-        validate( EnumSet.noneOf(ValidationType.class) );
-    }
-
-    /**
-     * Validates the list of command-line argument matches.  On failure throws an exception with detailed info about the
-     * particular failures.  Takes an EnumSet indicating which validation checks to skip.
-     * @param skipValidationOf List of validation checks to skip.
-     */
-    public void validate( EnumSet<ValidationType> skipValidationOf ) {
-        // Find missing required arguments.
-        if( !skipValidationOf.contains(ValidationType.MissingRequiredArgument) ) {
-            Collection<ArgumentDefinition> requiredArguments =
-                    argumentDefinitions.findArgumentDefinitions( true, ArgumentDefinitions.RequiredDefinitionMatcher );
-            Collection<ArgumentDefinition> missingArguments = new ArrayList<ArgumentDefinition>();
-            for( ArgumentDefinition requiredArgument: requiredArguments ) {
-                if( !argumentMatches.hasMatch(requiredArgument) )
-                    missingArguments.add( requiredArgument );
-            }
-
-            if( missingArguments.size() > 0 )
-                throw new MissingArgumentException( missingArguments );
-        }
-
-        // Find invalid arguments.  Invalid arguments will have a null argument definition.
-        if( !skipValidationOf.contains(ValidationType.InvalidArgument) ) {
-            ArgumentMatches invalidArguments = argumentMatches.findUnmatched();
-            if( invalidArguments.size() > 0 )
-                throw new InvalidArgumentException( invalidArguments );
-        }
-
-        // Find invalid argument values -- invalid arguments are either completely missing or fail the specified 'validation' regular expression.
-        if( !skipValidationOf.contains(ValidationType.InvalidArgumentValue) ) {
-            Collection<ArgumentDefinition> verifiableArguments = 
-                    argumentDefinitions.findArgumentDefinitions( null, ArgumentDefinitions.VerifiableDefinitionMatcher );
-            Collection<Pair<ArgumentDefinition,String>> invalidValues = new ArrayList<Pair<ArgumentDefinition,String>>();
-            for( ArgumentDefinition verifiableArgument: verifiableArguments ) {
-                ArgumentMatches verifiableMatches = argumentMatches.findMatches( verifiableArgument );
-                // Check to see whether an argument value was specified.  Argument values must be provided
-                // when the argument name is specified and the argument is not a flag type.
-                for(ArgumentMatch verifiableMatch: verifiableMatches) {
-                    ArgumentSource argumentSource = argumentSourcesByDefinition.get(verifiableArgument);
-                    if(verifiableMatch.values().size() == 0 && !verifiableArgument.isFlag && !argumentSource.createsTypeDefault())
-                        invalidValues.add(new Pair<ArgumentDefinition,String>(verifiableArgument,null));
-                }
-
-                // Ensure that the field contents meet the validation criteria specified by the regular expression.
-                for( ArgumentMatch verifiableMatch: verifiableMatches ) {
-                    for( ArgumentMatchValue value: verifiableMatch.values() ) {
-                        if( verifiableArgument.validation != null && !value.asString().matches(verifiableArgument.validation) )
-                            invalidValues.add( new Pair<ArgumentDefinition,String>(verifiableArgument, value.asString()) );
-                    }
-                }
-            }
-
-            if( invalidValues.size() > 0 )
-                throw new InvalidArgumentValueException( invalidValues );
-        }
-
-        // Find values without an associated mate.
-        if( !skipValidationOf.contains(ValidationType.ValueMissingArgument) ) {
-            if( argumentMatches.MissingArgument.values().size() > 0 )
-                throw new UnmatchedArgumentException( argumentMatches.MissingArgument );
-        }
-
-        // Find arguments with too many values.
-        if( !skipValidationOf.contains(ValidationType.TooManyValuesForArgument)) {
-            Collection<ArgumentMatch> overvaluedArguments = new ArrayList<ArgumentMatch>();
-            for( ArgumentMatch argumentMatch: argumentMatches.findSuccessfulMatches() ) {
-                // Warning: assumes that definition is not null (asserted by checks above).
-                if( !argumentMatch.definition.isMultiValued && argumentMatch.values().size() > 1 )
-                    overvaluedArguments.add(argumentMatch);
-            }
-
-            if( !overvaluedArguments.isEmpty() )
-                throw new TooManyValuesForArgumentException(overvaluedArguments);
-        }
-
-        // Find sets of options that are supposed to be mutually exclusive.
-        if( !skipValidationOf.contains(ValidationType.MutuallyExclusive)) {
-            Collection<Pair<ArgumentMatch,ArgumentMatch>> invalidPairs = new ArrayList<Pair<ArgumentMatch,ArgumentMatch>>();
-            for( ArgumentMatch argumentMatch: argumentMatches.findSuccessfulMatches() ) {
-                if( argumentMatch.definition.exclusiveOf != null ) {
-                    for( ArgumentMatch conflictingMatch: argumentMatches.findSuccessfulMatches() ) {
-                        // Skip over the current element.
-                        if( argumentMatch == conflictingMatch )
-                            continue;
-                        if( argumentMatch.definition.exclusiveOf.equals(conflictingMatch.definition.fullName) ||
-                            argumentMatch.definition.exclusiveOf.equals(conflictingMatch.definition.shortName))
-                            invalidPairs.add( new Pair<ArgumentMatch,ArgumentMatch>(argumentMatch, conflictingMatch) );
-                    }
-                }
-            }
-
-            if( !invalidPairs.isEmpty() )
-                throw new ArgumentsAreMutuallyExclusiveException( invalidPairs );
-        }
-    }
-
-    /**
-     * Loads a set of matched command-line arguments into the given object.
-     * @param object Object into which to add arguments.
-     */
-    public void loadArgumentsIntoObject( Object object ) {
-        loadArgumentsIntoObject(object, true);
-    }
-
-    /**
-     * Loads a set of matched command-line arguments into the given object.
-     * @param object Object into which to add arguments.
-     * @param enforceArgumentRanges If true, check that the argument value is within the range specified
-     *                              in the corresponding Argument annotation by min/max value attributes. This
-     *                              check is only performed for numeric types, and only when a min and/or
-     *                              max value is actually defined in the annotation. It is also only performed
-     *                              for values actually specified on the command line, and not for default values.
-     */
-    public void loadArgumentsIntoObject( Object object, boolean enforceArgumentRanges ) {
-        List<ArgumentSource> argumentSources = extractArgumentSources(object.getClass());
-
-        List<ArgumentSource> dependentArguments = new ArrayList<ArgumentSource>();
-
-        for( ArgumentSource argumentSource: argumentSources ) {
-            if(argumentSource.isDeprecated() && argumentMatches.findMatches(this,argumentSource).size() > 0)
-                notifyDeprecatedCommandLineArgument(argumentSource);
-
-            // If this argument source depends on other command-line arguments, skip it and make a note to process it later.
-            if(argumentSource.isDependent()) {
-                dependentArguments.add(argumentSource);
-                continue;
-            }
-            loadValueIntoObject(argumentSource, object, argumentMatches.findMatches(this,argumentSource), enforceArgumentRanges);
-        }
-
-        for(ArgumentSource dependentArgument: dependentArguments) {
-            MultiplexArgumentTypeDescriptor dependentDescriptor = dependentArgument.createDependentTypeDescriptor(this,object);
-            ArgumentSource dependentSource = dependentArgument.copyWithCustomTypeDescriptor(dependentDescriptor);
-            loadValueIntoObject(dependentSource,object,argumentMatches.findMatches(this,dependentSource), enforceArgumentRanges);
-        }
-    }
-
-    /**
-     * Notify the user that tags have been created.
-     * @param key The key created.
-     * @param tags List of tags, or empty list if no tags are present.
-     */
-    public void addTags(Object key, final Tags tags) {
-        this.tags.put(key,tags);        
-    }
-
-    /**
-     * Gets the tags associated with a given object.
-     * @param key Key for which to find a tag.
-     * @return List of tags associated with this key.
-     */
-    public Tags getTags(Object key)  {
-        if(!tags.containsKey(key))
-            return new Tags();
-        return tags.get(key);
-    }
-
-    /**
-     * Add a RodBinding type argument to this parser.  Called during parsing to allow
-     * us to track all of the RodBindings discovered in the command line.
-     * @param rodBinding the rodbinding to add.  Must not be added twice
-     */
-    @Requires("rodBinding != null")
-    public void addRodBinding(final RodBinding rodBinding) {
-        rodBindings.add(rodBinding);
-    }
-
-    /**
-     * Notify the user that a deprecated command-line argument has been used.
-     * @param argumentSource Deprecated argument source specified by user.
-     */
-    private void notifyDeprecatedCommandLineArgument(ArgumentSource argumentSource) {
-        // Grab the first argument definition and report that one as the failure.  Theoretically, we should notify of all failures.
-        List<ArgumentDefinition> definitions = argumentSource.createArgumentDefinitions();
-        if(definitions.size() < 1)
-            throw new ReviewedGATKException("Internal error.  Argument source creates no definitions.");
-        ArgumentDefinition definition = definitions.get(0);
-        throw new UserException.DeprecatedArgument(definition.fullName,definition.doc);
-    }
-
-    /**
-     * Loads a single argument into the object and that objects children.
-     * @param argumentMatches Argument matches to load into the object.
-     * @param source Argument source to load into the object.
-     * @param instance Object into which to inject the value.  The target might be in a container within the instance.
-     * @param enforceArgumentRanges If true, check that the argument value is within the range specified
-     *                              in the corresponding Argument annotation by min/max value attributes. This
-     *                              check is only performed for numeric types, and only when a min and/or
-     *                              max value is actually defined in the annotation. It is also only performed
-     *                              for values actually specified on the command line, and not for default values.
-     */
-    private void loadValueIntoObject( ArgumentSource source, Object instance, ArgumentMatches argumentMatches, boolean enforceArgumentRanges ) {
-        // Nothing to load
-        if( argumentMatches.size() == 0 && ! source.createsTypeDefault() )
-            return;
-
-        // Target instance into which to inject the value.
-        Collection<Object> targets = findTargets( source, instance );
-
-        // Abort if no home is found for the object.
-        if( targets.size() == 0 )
-            throw new ReviewedGATKException("Internal command-line parser error: unable to find a home for argument matches " + argumentMatches);
-
-        for( Object target: targets ) {
-            Object value;
-            boolean usedTypeDefault = false;
-            if ( argumentMatches.size() != 0 ) {
-                value = source.parse(this,argumentMatches);
-            }
-            else {
-                value = source.createTypeDefault(this);
-                usedTypeDefault = true;
-            }
-
-            // Only check argument ranges if a check was requested AND we used a value from the command line rather
-            // than the type default
-            if ( enforceArgumentRanges && ! usedTypeDefault ) {
-                checkArgumentRange(source, value);
-            }
-
-            JVMUtils.setFieldValue(source.field,target,value);
-        }
-    }
-
-    /**
-     * Check the provided value against any range constraints specified in the Argument annotation
-     * for the corresponding field. Throw an exception if hard limits are violated, or emit a warning
-     * if soft limits are violated.
-     *
-     * Only checks numeric types (int, double, etc.)
-     * Only checks fields with an actual @Argument annotation
-     * Only checks manually-specified constraints (there are no default constraints).
-     *
-     * @param argumentSource The source field for the command-line argument
-     * @param argumentValue The value we're considering putting in that source field
-     */
-    private void checkArgumentRange( final ArgumentSource argumentSource, final Object argumentValue ) {
-        // Only validate numeric types
-        if ( ! (argumentValue instanceof Number) ) {
-            return;
-        }
-        final double argumentDoubleValue = ((Number)argumentValue).doubleValue();
-
-        // Only validate fields with an @Argument annotation
-        final Annotation argumentAnnotation = argumentSource.field.getAnnotation(Argument.class);
-        if ( argumentAnnotation == null ) {
-            return;
-        }
-
-        final double minValue = (Double)CommandLineUtils.getValue(argumentAnnotation, "minValue");
-        final double maxValue = (Double)CommandLineUtils.getValue(argumentAnnotation, "maxValue");
-        final double minRecommendedValue = (Double)CommandLineUtils.getValue(argumentAnnotation, "minRecommendedValue");
-        final double maxRecommendedValue = (Double)CommandLineUtils.getValue(argumentAnnotation, "maxRecommendedValue");
-        final String argumentName = (String)CommandLineUtils.getValue(argumentAnnotation, "fullName");
-
-        // Check hard limits first, if specified
-        if ( minValue != Double.NEGATIVE_INFINITY && argumentDoubleValue < minValue ) {
-            throw new ArgumentValueOutOfRangeException(argumentName, argumentDoubleValue, minValue, "minimum");
-        }
-
-        if ( maxValue != Double.POSITIVE_INFINITY && argumentDoubleValue > maxValue ) {
-            throw new ArgumentValueOutOfRangeException(argumentName, argumentDoubleValue, maxValue, "maximum");
-        }
-
-        // Then check soft limits, if specified
-        if ( minRecommendedValue != Double.NEGATIVE_INFINITY && argumentDoubleValue < minRecommendedValue ) {
-            logger.warn(String.format("WARNING: argument --%s has value %.2f, but minimum recommended value is %.2f",
-                        argumentName, argumentDoubleValue, minRecommendedValue));
-        }
-
-        if ( maxRecommendedValue != Double.POSITIVE_INFINITY && argumentDoubleValue > maxRecommendedValue ) {
-            logger.warn(String.format("WARNING: argument --%s has value %.2f, but maximum recommended value is %.2f",
-                        argumentName, argumentDoubleValue, maxRecommendedValue));
-        }
-    }
-
-    public Collection<RodBinding> getRodBindings() {
-        return Collections.unmodifiableCollection(rodBindings);
-    }
-
-    /**
-     * Gets a collection of the container instances of the given type stored within the given target.
-     * @param source Argument source.
-     * @param instance Container.
-     * @return A collection of containers matching the given argument source.
-     */
-    private Collection<Object> findTargets(ArgumentSource source, Object instance) {
-        LinkedHashSet<Object> targets = new LinkedHashSet<Object>();
-        for( Class clazz = instance.getClass(); clazz != null; clazz = clazz.getSuperclass() ) {
-            for( Field field: clazz.getDeclaredFields() ) {
-                if( field.equals(source.field) ) {
-                    targets.add(instance);
-                } else if( field.isAnnotationPresent(ArgumentCollection.class) ) {
-                    targets.addAll(findTargets(source, JVMUtils.getFieldValue(field, instance)));
-                }
-            }
-        }
-        return targets;
-    }
-
-    /**
-     * Prints out the help associated with these command-line argument definitions.
-     * @param applicationDetails Details about the specific GATK-based application being run.
-     */
-    public void printHelp( ApplicationDetails applicationDetails ) {
-        new HelpFormatter().printHelp(applicationDetails,argumentDefinitions);
-    }
-
-    /**
-     * Extract all the argument sources from a given object.
-     * @param sourceClass class to act as sources for other arguments.
-     * @return A list of sources associated with this object and its aggregated objects.
-     */
-    public List<ArgumentSource> extractArgumentSources(Class sourceClass) {
-        return extractArgumentSources(sourceClass, new Field[0]);
-    }
-
-    /**
-     * Fetch the best command-line argument descriptor for the given class.
-     * @param type Class for which to specify a descriptor.
-     * @return descriptor for the given type.
-     */
-    public ArgumentTypeDescriptor selectBestTypeDescriptor(Class type) {
-        return ArgumentTypeDescriptor.selectBest(argumentTypeDescriptors,type);
-    }
-
-    private List<ArgumentSource> extractArgumentSources(Class sourceClass, Field[] parentFields) {
-        // now simply call into the truly general routine extract argument bindings but with a null
-        // object so bindings aren't computed
-        Map<ArgumentSource, Object> bindings = extractArgumentBindings(null, sourceClass, parentFields);
-        return new ArrayList<ArgumentSource>(bindings.keySet());
-    }
-
-    public Map<ArgumentSource, Object> extractArgumentBindings(Object obj) {
-        if ( obj == null ) throw new IllegalArgumentException("Incoming object cannot be null");
-        return extractArgumentBindings(obj, obj.getClass(), new Field[0]);
-    }
-
-    /**
-     * Extract all the argument sources from a given object, along with their bindings if obj != null .
-     * @param obj the object corresponding to the sourceClass
-     * @param sourceClass class to act as sources for other arguments.
-     * @param parentFields Parent Fields
-     * @return A map of sources associated with this object and its aggregated objects and bindings to their bindings values
-     */
-    private Map<ArgumentSource, Object> extractArgumentBindings(Object obj, Class sourceClass, Field[] parentFields) {
-        Map<ArgumentSource, Object> bindings = new LinkedHashMap<ArgumentSource, Object>();
-
-        while( sourceClass != null ) {
-            Field[] fields = sourceClass.getDeclaredFields();
-            for( Field field: fields ) {
-                if( ArgumentTypeDescriptor.isArgumentAnnotationPresent(field) ) {
-                    Object val = obj != null ? JVMUtils.getFieldValue(field, obj) : null;
-                    bindings.put( new ArgumentSource(parentFields, field, selectBestTypeDescriptor(field.getType())), val );
-                }
-                if( field.isAnnotationPresent(ArgumentCollection.class) ) {
-                    Object val = obj != null ? JVMUtils.getFieldValue(field, obj) : null;
-                    Field[] newParentFields = Arrays.copyOf(parentFields, parentFields.length + 1);
-                    newParentFields[parentFields.length] = field;
-                    bindings.putAll( extractArgumentBindings(val, field.getType(), newParentFields) );
-                }
-            }
-
-            sourceClass = sourceClass.getSuperclass();
-        }
-
-        return bindings;
-    }
-
-    /**
-     * Determines whether a token looks like the name of an argument.
-     * @param token Token to inspect.  Can be surrounded by whitespace.
-     * @return True if token is of short name form.
-     */
-    private boolean isArgumentForm( String token ) {
-        for( ParsingMethod parsingMethod: parsingMethods ) {
-            if( parsingMethod.matches(token) )
-                return true;
-        }
-
-        return false;
-    }
-
-    /**
-     * Parse a short name into an ArgumentMatch.
-     * @param token The token to parse.  The token should pass the isLongArgumentForm test.
-     * @param position The position of the token in question.
-     * @return ArgumentMatch associated with this token, or null if no match exists.
-     */    
-    private ArgumentMatch parseArgument( String token, ArgumentMatchSite position ) {
-        if( !isArgumentForm(token) )
-            throw new IllegalArgumentException( "Token is not recognizable as an argument: " + token );
-
-        for( ParsingMethod parsingMethod: parsingMethods ) {
-            if( parsingMethod.matches( token ) )
-                return parsingMethod.match( argumentDefinitions, token, position );
-        }
-
-        // No parse results found.
-        return null;
-    }
-}
-
-/**
- * An exception indicating that some required arguments are missing.
- */
-class MissingArgumentException extends ArgumentException {
-    public MissingArgumentException( Collection<ArgumentDefinition> missingArguments ) {
-        super( formatArguments(missingArguments) );
-    }
-
-    private static String formatArguments( Collection<ArgumentDefinition> missingArguments ) {
-        StringBuilder sb = new StringBuilder();
-        for( ArgumentDefinition missingArgument: missingArguments ) {
-            if( missingArgument.shortName != null )
-                sb.append( String.format("%nArgument with name '--%s' (-%s) is missing.", missingArgument.fullName, missingArgument.shortName) );
-            else
-                sb.append( String.format("%nArgument with name '--%s' is missing.", missingArgument.fullName) );
-        }
-        return sb.toString();
-    }
-}
-
-/**
- * An exception for undefined arguments.
- */
-class InvalidArgumentException extends ArgumentException {
-    public InvalidArgumentException( ArgumentMatches invalidArguments ) {
-        super( formatArguments(invalidArguments) );
-    }
-
-    private static String formatArguments( ArgumentMatches invalidArguments ) {
-        StringBuilder sb = new StringBuilder();
-        for( ArgumentMatch invalidArgument: invalidArguments )
-            sb.append( String.format("%nArgument with name '%s' isn't defined.", invalidArgument.label) );
-        return sb.toString();
-    }
-}
-
-/**
- * An exception for values whose format is invalid.
- */
-class InvalidArgumentValueException extends ArgumentException {
-    public InvalidArgumentValueException( Collection<Pair<ArgumentDefinition,String>> invalidArgumentValues ) {
-        super( formatArguments(invalidArgumentValues) );
-    }
-
-    private static String formatArguments( Collection<Pair<ArgumentDefinition,String>> invalidArgumentValues ) {
-        StringBuilder sb = new StringBuilder();
-        for( Pair<ArgumentDefinition,String> invalidValue: invalidArgumentValues ) {
-            if(invalidValue.getSecond() == null)
-                sb.append( String.format("%nArgument '--%s' requires a value but none was provided",
-                                         invalidValue.first.fullName) );
-            else
-                sb.append( String.format("%nArgument '--%s' has value of incorrect format: %s (should match %s)",
-                        invalidValue.first.fullName,
-                        invalidValue.second,
-                        invalidValue.first.validation) );
-        }
-        return sb.toString();
-    }
-}
-
-class ArgumentValueOutOfRangeException extends ArgumentException {
-    public ArgumentValueOutOfRangeException( final String argumentName, final double argumentActualValue,
-                                             final double argumentBoundaryValue, final String argumentBoundaryType ) {
-        super(String.format("Argument --%s has value %.2f, but %s allowed value is %.2f",
-                            argumentName, argumentActualValue, argumentBoundaryType, argumentBoundaryValue));
-    }
-}
-
-/**
- * An exception for values that can't be mated with any argument.
- */
-class UnmatchedArgumentException extends ArgumentException {
-    public UnmatchedArgumentException( ArgumentMatch invalidValues ) {
-        super( formatArguments(invalidValues) );
-    }
-
-    private static String formatArguments( ArgumentMatch invalidValues ) {
-        StringBuilder sb = new StringBuilder();
-        for( ArgumentMatchSite site: invalidValues.sites.keySet() )
-            for( ArgumentMatchValue value: invalidValues.sites.get(site) ) {
-                switch (site.getSource().getType()) {
-                    case CommandLine:
-                        sb.append( String.format("%nInvalid argument value '%s' at position %d.",
-                                value.asString(), site.getIndex()) );
-                        break;
-                    case Provider:
-                        sb.append( String.format("%nInvalid argument value '%s' in %s at position %d.",
-                                value.asString(), site.getSource().getDescription(), site.getIndex()) );
-                        break;
-                    default:
-                        throw new RuntimeException( String.format("Unexpected argument match source type: %s",
-                                site.getSource().getType()));
-                }
-                if(value.asString() != null && Utils.dupString(' ',value.asString().length()).equals(value.asString()))
-                    sb.append("  Please make sure any line continuation backslashes on your command line are not followed by whitespace.");
-            }
-        return sb.toString();
-    }
-}
-
-/**
- * An exception indicating that too many values have been provided for the given argument.
- */
-class TooManyValuesForArgumentException extends ArgumentException {
-    public TooManyValuesForArgumentException( Collection<ArgumentMatch> arguments ) {
-        super( formatArguments(arguments) );
-    }
-
-    private static String formatArguments( Collection<ArgumentMatch> arguments ) {
-        StringBuilder sb = new StringBuilder();
-        for( ArgumentMatch argument: arguments )
-            sb.append( String.format("%nArgument '%s' has too many values: %s.", argument.label, Arrays.deepToString(argument.values().toArray())) );
-        return sb.toString();
-    }
-}
-
-/**
- * An exception indicating that mutually exclusive options have been passed in the same command line.
- */
-class ArgumentsAreMutuallyExclusiveException extends ArgumentException {
-    public ArgumentsAreMutuallyExclusiveException( Collection<Pair<ArgumentMatch,ArgumentMatch>> arguments ) {
-        super( formatArguments(arguments) );
-    }
-
-    private static String formatArguments( Collection<Pair<ArgumentMatch,ArgumentMatch>> arguments ) {
-        StringBuilder sb = new StringBuilder();
-        for( Pair<ArgumentMatch,ArgumentMatch> argument: arguments )
-            sb.append( String.format("%nArguments '%s' and '%s' are mutually exclusive.", argument.first.definition.fullName, argument.second.definition.fullName ) );
-        return sb.toString();
-    }
-
-}
-
-
-/**
- * An exception for when an argument doesn't match an of the enumerated options for that var type
- */
-class UnknownEnumeratedValueException extends ArgumentException {
-    public UnknownEnumeratedValueException(ArgumentDefinition definition, String argumentPassed) {
-        super( formatArguments(definition,argumentPassed) );
-    }
-
-    private static String formatArguments(ArgumentDefinition definition, String argumentPassed) {
-        return String.format("Invalid value %s specified for argument %s; valid options are (%s).", argumentPassed, definition.fullName, Utils.join(",",definition.validOptions));
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsingEngineArgumentFiles.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsingEngineArgumentFiles.java
deleted file mode 100644
index 0361c4c..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsingEngineArgumentFiles.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.SortedMap;
-
-/**
- * Container class to store the list of argument files.
- * The files will be parsed after the command line arguments.
- */
-public class ParsingEngineArgumentFiles extends ParsingEngineArgumentProvider {
-    @Argument(fullName = "arg_file", shortName = "args", doc = "Reads arguments from the specified file", required = false)
-    public List<File> files = new ArrayList<File>();
-
-    @Override
-    public void parse(ParsingEngine parsingEngine, SortedMap<ArgumentMatchSource, ParsedArgs> parsedArgs) {
-        ArgumentMatches argumentMatches = parsingEngine.getArgumentMatches();
-        for (File file: this.files) {
-            List<String> fileTokens = parsingEngine.getArguments(file);
-            parsingEngine.parse(new ArgumentMatchFileSource(file), fileTokens, argumentMatches, parsedArgs);
-        }
-    }
-}
-
-class ArgumentMatchFileSource extends ArgumentMatchSource {
-    ArgumentMatchFileSource(File file) {
-        super("file " + file.getAbsolutePath());
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsingEngineArgumentProvider.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsingEngineArgumentProvider.java
deleted file mode 100644
index d53a36c..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsingEngineArgumentProvider.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import java.util.List;
-import java.util.SortedMap;
-
-/**
- * A class that can parse arguments for the engine
- */
-public abstract class ParsingEngineArgumentProvider {
-    public abstract void parse(ParsingEngine parsingEngine, SortedMap<ArgumentMatchSource, ParsedArgs> parsedArgs);
-}
-
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsingMethod.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsingMethod.java
deleted file mode 100644
index a939742..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsingMethod.java
+++ /dev/null
@@ -1,127 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import org.broadinstitute.gatk.utils.Utils;
-
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-/**
- * Holds a pattern, along with how to get to the argument definitions that could match that pattern.
- */
-public abstract class ParsingMethod {
-    private final Pattern pattern;
-    private final DefinitionMatcher definitionMatcher;
-
-    /**
-     * Create a new parsing method with the given identifying / validating pattern and definition matcher.
-     * @param pattern The pattern
-     * @param definitionMatcher The definition matcher.
-     */
-    private ParsingMethod( Pattern pattern, DefinitionMatcher definitionMatcher ) {
-        this.pattern = pattern;
-        this.definitionMatcher = definitionMatcher;
-    }
-
-    /**
-     * Can the given token be parsed by this parsing method?
-     * @param token Token to validate.
-     * @return True if the given token matches.
-     */
-    public boolean matches( String token ) {
-        Matcher matcher = pattern.matcher(token);
-        return matcher.matches();        
-    }
-
-    /**
-     * Find the best match for a given token at a given position from among the provided
-     * argument definitions.
-     * @param definitions List of argument definitions.
-     * @param token The token from the command line to match.  Should be validated using
-     *              ParsingMethod's matches() tester.
-     * @param position Position at which this command-line argument occurs.  Will be used
-     *                 for validation later.
-     * @return An argument match.  Definition field will be populated if a match was found or
-     *         empty if no appropriate definition could be found. 
-     */
-    public ArgumentMatch match( ArgumentDefinitions definitions, String token, ArgumentMatchSite position ) {
-        // If the argument is valid, parse out the argument.
-        Matcher matcher = pattern.matcher(token);
-
-        // Didn't match?  Must be bad input.
-        if( !matcher.matches() )
-            throw new IllegalArgumentException( String.format("Unable to parse token %s with pattern %s", token, pattern.pattern()) );
-
-        String argument = matcher.group(1).trim();
-
-        Tags tags = parseTags(argument, matcher.group(2));
-
-        // Find the most appropriate argument definition for the given argument.
-        ArgumentDefinition argumentDefinition = definitions.findArgumentDefinition( argument, definitionMatcher );
-
-        // Try to find a matching argument.  If found, label that as the match.  If not found, add the argument
-        // with a null definition.
-        return new ArgumentMatch(argument,argumentDefinition,position,tags);
-    }
-
-    public static Tags parseTags(String argument, String tagString) {
-        Tags tags = new Tags();
-        if (tagString != null) {
-            for(String tag: Utils.split(tagString, ",")) {
-                // Check for presence of an '=' sign, indicating a key-value pair in the tag line.
-                int equalDelimiterPos = tag.indexOf('=');
-                if(equalDelimiterPos >= 0) {
-                    // Sanity check; ensure that there aren't multiple '=' in this key-value pair.
-                    if(tag.indexOf('=',equalDelimiterPos+1) >= 0)
-                        throw new ArgumentException(String.format("Tag %s passed to argument %s is malformed.  Please ensure that " +
-                                "key-value tags are of the form <key>=<value>, and neither key " +
-                                "nor value contain the '=' character", tag, argument));
-                    tags.addKeyValueTag(tag.substring(0,equalDelimiterPos),tag.substring(equalDelimiterPos+1));
-                }
-                else
-                    tags.addPositionalTag(tag);
-
-            }
-        }
-        return tags;
-    }
-
-    /**
-     * A command-line argument always starts with an alphabetical character or underscore followed by any word character.
-     */
-    private static final String ARGUMENT_TEXT = "[A-Za-z_][\\w\\-\\.]*";
-
-    /**
-     * Tags, on the other hand, can start with any word character.
-     */
-    private static final String TAG_TEXT = "[\\w\\-\\.\\=]*";
-
-    public static final ParsingMethod FullNameParsingMethod = new ParsingMethod(Pattern.compile(String.format("\\s*--(%1$s)(?:\\:(%2$s(?:,%2$s)*))?\\s*",ARGUMENT_TEXT,TAG_TEXT)),
-                                                                          ArgumentDefinitions.FullNameDefinitionMatcher) {};
-    public static final ParsingMethod ShortNameParsingMethod = new ParsingMethod(Pattern.compile(String.format("\\s*-(%1$s)(?:\\:(%2$s(?:,%2$s)*))?\\s*",ARGUMENT_TEXT,TAG_TEXT)),
-                                                                           ArgumentDefinitions.ShortNameDefinitionMatcher) {};
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/RodBinding.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/RodBinding.java
deleted file mode 100644
index 2c55787..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/RodBinding.java
+++ /dev/null
@@ -1,197 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import htsjdk.tribble.Feature;
-
-import java.util.*;
-
-/**
- * A RodBinding represents a walker argument that gets bound to a ROD track.
- *
- * The RodBinding<T> is a formal GATK argument that bridges between a walker and
- * the RefMetaDataTracker to obtain data about this rod track at runtime.  The RodBinding
- * is explicitly typed with type of the Tribble.Feature expected to be produced by this
- * argument.  The GATK Engine takes care of initializing the binding and connecting it
- * to the RMD system.
- *
- * It is recommended that optional RodBindings be initialized to the value returned
- * by the static method makeUnbound().
- *
- * Note that this class is immutable.
- */
-public final class RodBinding<T extends Feature> {
-    protected final static String UNBOUND_VARIABLE_NAME = "";
-    protected final static String UNBOUND_SOURCE = "UNBOUND";
-    protected final static String UNBOUND_TRIBBLE_TYPE = "";
-
-    /**
-     * Create an unbound Rodbinding of type.  This is the correct programming
-     * style for an optional RodBinding<T>
-     *
-     *     At Input()
-     *     RodBinding<T> x = RodBinding.makeUnbound(T.class)
-     *
-     * The unbound binding is guaranteed to never match any binding.  It uniquely
-     * returns false to isBound().
-     *
-     * @param type the Class type produced by this unbound object
-     * @param <T> any class extending Tribble Feature
-     * @return the UNBOUND RodBinding producing objects of type T
-     */
-    @Requires("type != null")
-    protected final static <T extends Feature> RodBinding<T> makeUnbound(Class<T> type) {
-        return new RodBinding<T>(type);
-    }
-
-    /** The name of this binding.  Often the name of the field itself, but can be overridden on cmdline */
-    final private String name;
-    /** where the data for this ROD is coming from.  A file or special value if coming from stdin */
-    final private String source;
-    /** the string name of the tribble type, such as vcf, bed, etc. */
-    final private String tribbleType;
-    /** The command line tags associated with this RodBinding */
-    final private Tags tags;
-    /** The Java class expected for this RodBinding.  Must correspond to the type emitted by Tribble */
-    final private Class<T> type;
-    /** True for all RodBindings except the special UNBOUND binding, which is the default for optional arguments */
-    final private boolean bound;
-
-    /**
-     * The name counter.  This is how we create unique names for collections of RodBindings
-     * on the command line.  If you have provide the GATK with -X file1 and -X file2 to a
-     * RodBinding argument as List<RodBinding<T>> then each binding will receive automatically
-     * the name of X and X2.
-     */
-    final private static Map<String, Integer> nameCounter = new HashMap<String, Integer>();
-
-    /** for UnitTests */
-    final public static void resetNameCounter() {
-        nameCounter.clear();
-    }
-
-    @Requires("rawName != null")
-    @Ensures("result != null")
-    final private static synchronized String countedVariableName(final String rawName) {
-        Integer count = nameCounter.get(rawName);
-        if ( count == null ) {
-            nameCounter.put(rawName, 1);
-            return rawName;
-        } else {
-            nameCounter.put(rawName, count + 1);
-            return rawName + (count + 1);
-        }
-    }
-
-    @Requires({"type != null", "rawName != null", "source != null", "tribbleType != null", "tags != null"})
-    public RodBinding(Class<T> type, final String rawName, final String source, final String tribbleType, final Tags tags) {
-        this.type = type;
-        this.name = countedVariableName(rawName);
-        this.source = source;
-        this.tribbleType = tribbleType;
-        this.tags = tags;
-        this.bound = true;
-    }
-
-    /**
-     * For testing purposes only.  Creates a RodBinding sufficient for looking up associations to rawName
-     * @param type
-     * @param rawName
-     */
-    public RodBinding(Class<T> type, final String rawName) {
-        this(type, rawName, "missing", type.getSimpleName(), new Tags());
-    }
-
-    /**
-     * Make an unbound RodBinding<T>.  Only available for creating the globally unique UNBOUND object
-     * @param type class this unbound RodBinding creates
-     */
-    @Requires({"type != null"})
-    private RodBinding(Class<T> type) {
-        this.type = type;
-        this.name = UNBOUND_VARIABLE_NAME;  // special value can never be found in RefMetaDataTracker
-        this.source = UNBOUND_SOURCE;
-        this.tribbleType = UNBOUND_TRIBBLE_TYPE;
-        this.tags = new Tags();
-        this.bound = false;
-    }
-
-
-   /**
-     * @return True for all RodBindings except the special UNBOUND binding, which is the default for optional arguments
-     */
-    final public boolean isBound() {
-        return bound;
-    }
-
-    /**
-     * @return The name of this binding.  Often the name of the field itself, but can be overridden on cmdline
-     */
-    @Ensures({"result != null"})
-    final public String getName() {
-        return name;
-    }
-
-    /**
-     * @return the string name of the tribble type, such as vcf, bed, etc.
-     */
-    @Ensures({"result != null"})
-    final public Class<T> getType() {
-        return type;
-    }
-
-    /**
-     * @return where the data for this ROD is coming from.  A file or special value if coming from stdin
-     */
-    @Ensures({"result != null"})
-    final public String getSource() {
-        return source;
-    }
-
-    /**
-     * @return The command line tags associated with this RodBinding.  Will include the tags used to
-     * determine the name and type of this RodBinding
-     */
-    @Ensures({"result != null"})
-    final public Tags getTags() {
-        return tags;
-    }
-
-    /**
-     * @return The Java class expected for this RodBinding.  Must correspond to the type emited by Tribble
-     */
-    @Ensures({"result != null"})
-    final public String getTribbleType() {
-        return tribbleType;
-    }
-
-    @Override
-    public String toString() {
-        return String.format("(RodBinding name=%s source=%s)", getName(), getSource());
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/RodBindingCollection.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/RodBindingCollection.java
deleted file mode 100644
index faf4565..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/RodBindingCollection.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import com.google.java.contract.Ensures;
-import htsjdk.tribble.Feature;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import java.lang.reflect.Constructor;
-import java.lang.reflect.InvocationTargetException;
-import java.util.*;
-
-/**
- * A RodBindingCollection represents a collection of RodBindings.
- *
- * The RodBindingCollection<T> is a formal GATK argument that is used to specify a file of RodBindings.
- *
- */
-public final class RodBindingCollection<T extends Feature> {
-
-    /** The Java class expected for this RodBinding.  Must correspond to the type emitted by Tribble */
-    final private Class<T> type;
-
-    private Collection<RodBinding<T>> rodBindings;
-
-    public RodBindingCollection(final Class<T> type, final Collection<RodBinding<T>> rodBindings) {
-        this.type = type;
-        this.rodBindings = Collections.unmodifiableCollection(rodBindings);
-    }
-
-    /**
-     * @return the collection of RodBindings
-     */
-    final public Collection<RodBinding<T>> getRodBindings() {
-        return rodBindings;
-    }
-
-    /**
-     * @return the string name of the tribble type, such as vcf, bed, etc.
-     */
-    @Ensures({"result != null"})
-    final public Class<T> getType() {
-        return type;
-    }
-
-    @Override
-    public String toString() {
-        return String.format("(RodBindingCollection %s)", getRodBindings());
-    }
-
-    /**
-     * Utility method to help construct a RodBindingCollection of the given Feature type
-     *
-     * @param type         the Feature type
-     * @param rodBindings  the rod bindings to put into the collection
-     * @return a new RodBindingCollection object
-     */
-    public static Object createRodBindingCollectionOfType(final Class<? extends Feature> type, final Collection<RodBinding> rodBindings) {
-        try {
-            final Constructor ctor = RodBindingCollection.class.getConstructor(Class.class, Collection.class);
-            return ctor.newInstance(type, rodBindings);
-        } catch (final Exception e) {
-            throw new IllegalStateException("Failed to create a RodBindingCollection for type " + type);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/Tags.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/Tags.java
deleted file mode 100644
index 2b1c7f7..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/Tags.java
+++ /dev/null
@@ -1,112 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import java.util.*;
-
-/**
- * Models the tags that can appear after command-line arguments
- * in the GATK.
- */
-public class Tags {
-    /**
-     * Storage for the ordered, unkeyed, positional tags.
-     */
-    private final List<String> positionalTags = new ArrayList<String>();
-
-    /**
-     * Storage for key-value tags of the form <key>=<value>
-     */
-    private Map<String,String> keyValueTags = new HashMap<String,String>();
-
-    /**
-     * Tests to see whether two tag sets are equal.
-     * @param other Other object to test for equality.
-     * @return True if objects are the same.  False if objects differ.
-     */
-    @Override
-    public boolean equals(Object other) {
-        if(other == null)
-            return false;
-
-        if(!(other instanceof Tags))
-            return false;
-
-        Tags otherTags = (Tags)other;
-        return this.positionalTags.equals(otherTags.positionalTags) && this.keyValueTags.equals(otherTags.keyValueTags);
-    }
-
-    /**
-     * Returns whether any tags are specified on the command-line for this operation.
-     * @return True if the tags are empty; false otherwise.
-     */
-    public boolean isEmpty() {
-        return positionalTags.isEmpty() && keyValueTags.isEmpty();
-    }
-
-    /**
-     * Retrieves the list of all positional tags associated with this argument.
-     * @return A list of positional tags.
-     */
-    public List<String> getPositionalTags() {
-        return Collections.unmodifiableList(positionalTags);
-    }
-
-    /**
-     * Gets the value associated with a given <key>=<value> argument tag.
-     * @param key The key for which to retrieve the value.
-     * @return The value paired with the given key, or null if no such element exists.
-     */
-    public String getValue(final String key) {
-        return keyValueTags.get(key);
-    }
-
-    /**
-     * Returns true if tags contains given key
-     * @param key The key for which to check existence.
-     * @return true if tags contains given key
-     */
-    public boolean containsKey(final String key) {
-        return keyValueTags.containsKey(key);
-    }
-
-    /**
-     * Adds positional tag(s) to the tag object.
-     * @param tags The tag strings to add.
-     */
-    protected void addPositionalTag(final String... tags) {
-        positionalTags.addAll(Arrays.asList(tags));
-    }
-
-    /**
-     * Adds a <key>-<value> tag to this tag library.
-     * @param key key tag to add.
-     * @param value value to associate with this key.
-     */
-    protected void addKeyValueTag(final String key, final String value) {
-        keyValueTags.put(key,value);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/package-info.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/package-info.java
deleted file mode 100644
index f572d34..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/commandline/package-info.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/crypt/CryptUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/crypt/CryptUtils.java
deleted file mode 100644
index d6ccd32..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/crypt/CryptUtils.java
+++ /dev/null
@@ -1,391 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.crypt;
-
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.io.IOUtils;
-
-import javax.crypto.Cipher;
-import java.io.File;
-import java.io.InputStream;
-import java.security.*;
-import java.security.spec.InvalidKeySpecException;
-import java.security.spec.KeySpec;
-import java.security.spec.PKCS8EncodedKeySpec;
-import java.security.spec.X509EncodedKeySpec;
-import java.util.Arrays;
-
-/**
- * A set of cryptographic utility methods and constants.
- *
- * Contains methods to:
- *
- * -Create a public/private key pair
- * -Read and write public/private keys to/from files/streams
- * -Load the GATK master private/public keys
- * -Encrypt/decrypt data
- *
- * Also contains constants that control the cryptographic defaults
- * throughout the GATK.
- *
- * @author David Roazen
- */
-public class CryptUtils {
-
-    // ---------------------------------------------------------------------------------
-    // Constants (these control the default cryptographic settings throughout the GATK):
-    // ---------------------------------------------------------------------------------
-
-    /**
-     * Default key length in bits of newly-created keys. 2048 bits provides a good balance between
-     * security and speed.
-     */
-    public static final int DEFAULT_KEY_LENGTH = 2048;
-
-    /**
-     * Default encryption algorithm to use, when none is specified.
-     */
-    public static final String DEFAULT_ENCRYPTION_ALGORITHM = "RSA";
-
-    /**
-     * Default random-number generation algorithm to use, when none is specified.
-     */
-    public static final String DEFAULT_RANDOM_NUMBER_GENERATION_ALGORITHM = "SHA1PRNG";
-
-    /**
-     * Name of the public key file distributed with the GATK. This file is packaged
-     * into the GATK jar, and we use the system ClassLoader to find it.
-     */
-    public static final String GATK_DISTRIBUTED_PUBLIC_KEY_FILE_NAME = "GATK_public.key";
-
-    /**
-     * Location of the master copy of the GATK private key.
-     */
-    public static final String GATK_MASTER_PRIVATE_KEY_FILE = "/humgen/gsa-hpprojects/GATK/data/gatk_master_keys/GATK_private.key";
-
-    /**
-     * Location of the master copy of the GATK public key. This file should always be the same as
-     * the public key file distributed with the GATK (and there are automated tests to ensure that it is).
-     */
-    public static final String GATK_MASTER_PUBLIC_KEY_FILE =  "/humgen/gsa-hpprojects/GATK/data/gatk_master_keys/GATK_public.key";
-
-    /**
-     * Directory where generated GATK user keys are stored. See the GATKKey class for more information.
-     */
-    public static final String GATK_USER_KEY_DIRECTORY =      "/humgen/gsa-hpprojects/GATK/data/gatk_user_keys/";
-
-
-    // -----------------------
-    // Utility Methods:
-    // -----------------------
-
-    /**
-     * Generate a new public/private key pair using the default encryption settings defined above.
-     *
-     * @return A new public/private key pair created using the default settings
-     */
-    public static KeyPair generateKeyPair() {
-        return generateKeyPair(DEFAULT_KEY_LENGTH, DEFAULT_ENCRYPTION_ALGORITHM, DEFAULT_RANDOM_NUMBER_GENERATION_ALGORITHM);
-    }
-
-    /**
-     * Generate a new public/private key pair using custom encryption settings.
-     *
-     * @param keyLength Length of the key in bits
-     * @param encryptionAlgorithm Encryption algorithm to use
-     * @param randNumberAlgorithm Random-number generation algorithm to use
-     * @return A new public/private key pair, created according to the specified parameters
-     */
-    public static KeyPair generateKeyPair( int keyLength, String encryptionAlgorithm, String randNumberAlgorithm ) {
-        try {
-            KeyPairGenerator keyGen = KeyPairGenerator.getInstance(encryptionAlgorithm);
-            SecureRandom randomnessSource = createRandomnessSource(randNumberAlgorithm);
-
-            keyGen.initialize(keyLength, randomnessSource);
-            return keyGen.generateKeyPair();
-        }
-        catch ( NoSuchAlgorithmException e ) {
-            throw new ReviewedGATKException(String.format("Could not find an implementation of the requested encryption algorithm %s", encryptionAlgorithm), e);
-        }
-        catch ( Exception e ) {
-            throw new ReviewedGATKException("Error while generating key pair", e);
-        }
-    }
-
-    /**
-     * Create a source of randomness using the default random-number generation algorithm.
-     *
-     * @return A randomness source that uses the default algorithm
-     */
-    public static SecureRandom createRandomnessSource() {
-        return createRandomnessSource(DEFAULT_RANDOM_NUMBER_GENERATION_ALGORITHM);
-    }
-
-    /**
-     * Create a source of randomness using a custom random-number generation algorithm.
-     *
-     * @param randAlgorithm The random-number generation algorithm to use
-     * @return A randomness sources that uses the specified algorithm
-     */
-    public static SecureRandom createRandomnessSource ( String randAlgorithm ) {
-        try {
-            return SecureRandom.getInstance(randAlgorithm);
-        }
-        catch ( NoSuchAlgorithmException e ) {
-            throw new ReviewedGATKException(String.format("Could not find an implementation of the requested random-number generation algorithm %s", randAlgorithm), e);
-        }
-    }
-
-    /**
-     * Writes a public/private key pair to disk
-     *
-     * @param keyPair The key pair we're writing to disk
-     * @param privateKeyFile Location to write the private key
-     * @param publicKeyFile Location to write the public key
-     */
-    public static void writeKeyPair ( KeyPair keyPair, File privateKeyFile, File publicKeyFile ) {
-        writeKey(keyPair.getPrivate(), privateKeyFile);
-        writeKey(keyPair.getPublic(), publicKeyFile);
-    }
-
-    /**
-     * Writes an arbitrary key to disk
-     *
-     * @param key The key to write
-     * @param destination Location to write the key to
-     */
-    public static void writeKey ( Key key, File destination ) {
-        IOUtils.writeByteArrayToFile(key.getEncoded(), destination);
-    }
-
-    /**
-     * Reads in a public key created using the default encryption algorithm from a file.
-     *
-     * @param source File containing the public key
-     * @return The public key read
-     */
-    public static PublicKey readPublicKey ( File source ) {
-        return decodePublicKey(IOUtils.readFileIntoByteArray(source), DEFAULT_ENCRYPTION_ALGORITHM);
-    }
-
-    /**
-     * Reads in a public key created using the default encryption algorithm from a stream.
-     *
-     * @param source Stream attached to the public key
-     * @return The public key read
-     */
-    public static PublicKey readPublicKey ( InputStream source ) {
-        return decodePublicKey(IOUtils.readStreamIntoByteArray(source), DEFAULT_ENCRYPTION_ALGORITHM);
-    }
-
-    /**
-     * Decodes the raw bytes of a public key into a usable object.
-     *
-     * @param rawKey The encoded bytes of a public key as read from, eg., a file. The
-     *               key must be in the standard X.509 format for a public key.
-     * @param encryptionAlgorithm The encryption algorithm used to create the public key
-     * @return The public key as a usable object
-     */
-    public static PublicKey decodePublicKey ( byte[] rawKey, String encryptionAlgorithm ) {
-        try {
-            KeySpec keySpec = new X509EncodedKeySpec(rawKey);
-            KeyFactory keyFactory = KeyFactory.getInstance(encryptionAlgorithm);
-            return keyFactory.generatePublic(keySpec);
-        }
-        catch ( NoSuchAlgorithmException e ) {
-            throw new ReviewedGATKException(String.format("Could not find an implementation of the requested encryption algorithm %s", encryptionAlgorithm), e);
-        }
-        catch ( InvalidKeySpecException e ) {
-            throw new ReviewedGATKException("Unable to use X.509 key specification to decode the given key", e);
-        }
-    }
-
-    /**
-     * Reads in a private key created using the default encryption algorithm from a file.
-     *
-     * @param source File containing the private key
-     * @return The private key read
-     */
-    public static PrivateKey readPrivateKey ( File source ) {
-        return decodePrivateKey(IOUtils.readFileIntoByteArray(source), DEFAULT_ENCRYPTION_ALGORITHM);
-    }
-
-    /**
-     * Reads in a private key created using the default encryption algorithm from a stream.
-     *
-     * @param source Stream attached to the private key
-     * @return The private key read
-     */
-    public static PrivateKey readPrivateKey ( InputStream source ) {
-        return decodePrivateKey(IOUtils.readStreamIntoByteArray(source), DEFAULT_ENCRYPTION_ALGORITHM);
-    }
-
-    /**
-     * Decodes the raw bytes of a private key into a usable object.
-     *
-     * @param rawKey The encoded bytes of a private key as read from, eg., a file. The
-     *               key must be in the standard PKCS #8 format for a private key.
-     * @param encryptionAlgorithm The encryption algorithm used to create the private key
-     * @return The private key as a usable object
-     */
-    public static PrivateKey decodePrivateKey ( byte[] rawKey, String encryptionAlgorithm ) {
-        try {
-            KeySpec keySpec = new PKCS8EncodedKeySpec(rawKey);
-            KeyFactory keyFactory = KeyFactory.getInstance(encryptionAlgorithm);
-            return keyFactory.generatePrivate(keySpec);
-        }
-        catch ( NoSuchAlgorithmException e ) {
-            throw new ReviewedGATKException(String.format("Could not find an implementation of the requested encryption algorithm %s", encryptionAlgorithm), e);
-        }
-        catch ( InvalidKeySpecException e ) {
-            throw new ReviewedGATKException("Unable to use the PKCS #8 key specification to decode the given key", e);
-        }
-    }
-
-    /**
-     * Loads the copy of the GATK public key that is distributed with the GATK. Uses the system
-     * ClassLoader to locate the public key file, which should be stored at the root of the GATK
-     * jar file.
-     *
-     * @return The GATK public key as a usable object
-     */
-    public static PublicKey loadGATKDistributedPublicKey() {
-        InputStream publicKeyInputStream = ClassLoader.getSystemResourceAsStream(GATK_DISTRIBUTED_PUBLIC_KEY_FILE_NAME);
-
-        if ( publicKeyInputStream == null ) {
-            throw new ReviewedGATKException(String.format("Could not locate the GATK public key %s in the classpath",
-                                                           GATK_DISTRIBUTED_PUBLIC_KEY_FILE_NAME));
-        }
-
-        return readPublicKey(publicKeyInputStream);
-    }
-
-    /**
-     * Loads the master copy of the GATK private key. You must have the appropriate UNIX permissions
-     * to do this!
-     *
-     * @return The GATK master private key as a usable object
-     */
-    public static PrivateKey loadGATKMasterPrivateKey() {
-        return readPrivateKey(new File(GATK_MASTER_PRIVATE_KEY_FILE));
-    }
-
-    /**
-     * Loads the master copy of the GATK public key. This should always be the same as the
-     * public key distributed with the GATK returned by loadGATKDistributedPublicKey().
-     *
-     * @return The GATK master public key as a usable object
-     */
-    public static PublicKey loadGATKMasterPublicKey() {
-        return readPublicKey(new File(GATK_MASTER_PUBLIC_KEY_FILE));
-    }
-
-    /**
-     * Encrypts the given data using the key provided.
-     *
-     * @param data The data to encrypt, as a byte array
-     * @param encryptKey The key with which to encrypt the data
-     * @return The encrypted version of the provided data
-     */
-    public static byte[] encryptData ( byte[] data, Key encryptKey ) {
-        return transformDataUsingCipher(data, encryptKey, Cipher.ENCRYPT_MODE);
-    }
-
-    /**
-     * Decrypts the given data using the key provided.
-     *
-     * @param encryptedData Data to decrypt, as a byte array
-     * @param decryptKey The key with which to decrypt the data
-     * @return The decrypted version of the provided data
-     */
-    public static byte[] decryptData ( byte[] encryptedData, Key decryptKey ) {
-        return transformDataUsingCipher(encryptedData, decryptKey, Cipher.DECRYPT_MODE);
-    }
-
-    /**
-     * Helper method for encryption/decryption that takes data and processes it using
-     * the given key
-     *
-     * @param data Data to encrypt/decrypt
-     * @param key Key to use to encrypt/decrypt the data
-     * @param cipherMode Specifies whether we are encrypting or decrypting
-     * @return The encrypted/decrypted data
-     */
-    private static byte[] transformDataUsingCipher ( byte[] data, Key key, int cipherMode ) {
-        try {
-            Cipher cipher = Cipher.getInstance(key.getAlgorithm());
-            cipher.init(cipherMode, key);
-            return cipher.doFinal(data);
-        }
-        catch ( NoSuchAlgorithmException e ) {
-            throw new ReviewedGATKException(String.format("Could not find an implementation of the requested algorithm %s",
-                                             key.getAlgorithm()), e);
-        }
-        catch ( InvalidKeyException e ) {
-            throw new ReviewedGATKException("Key is invalid", e);
-        }
-        catch ( GeneralSecurityException e ) {
-            throw new ReviewedGATKException("Error during encryption", e);
-        }
-    }
-
-    /**
-     * Tests whether the public/private keys provided can each decrypt data encrypted by
-     * the other key -- ie., tests whether these two keys are part of the same public/private
-     * key pair.
-     *
-     * @param privateKey The private key to test
-     * @param publicKey The public key to test
-     * @return True if the keys are part of the same key pair and can decrypt each other's
-     *         encrypted data, otherwise false.
-     */
-    public static boolean keysDecryptEachOther ( PrivateKey privateKey, PublicKey publicKey ) {
-        byte[] plainText = "Test PlainText".getBytes();
-
-        byte[] dataEncryptedUsingPrivateKey = CryptUtils.encryptData(plainText, privateKey);
-        byte[] dataEncryptedUsingPublicKey = CryptUtils.encryptData(plainText, publicKey);
-
-        byte[] privateKeyDataDecryptedWithPublicKey = CryptUtils.decryptData(dataEncryptedUsingPrivateKey, publicKey);
-        byte[] publicKeyDataDecryptedWithPrivateKey = CryptUtils.decryptData(dataEncryptedUsingPublicKey, privateKey);
-
-        // Make sure we actually transformed the data during encryption:
-        if ( Arrays.equals(plainText, dataEncryptedUsingPrivateKey) ||
-             Arrays.equals(plainText, dataEncryptedUsingPublicKey) ||
-             Arrays.equals(dataEncryptedUsingPrivateKey, dataEncryptedUsingPublicKey) ) {
-            return false;
-        }
-
-        // Make sure that we were able to recreate the original plaintext using
-        // both the public key on the private-key-encrypted data and the private
-        // key on the public-key-encrypted data:
-        if ( ! Arrays.equals(plainText, privateKeyDataDecryptedWithPublicKey) ||
-             ! Arrays.equals(plainText, publicKeyDataDecryptedWithPrivateKey) ) {
-            return false;
-        }
-
-        return true;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/crypt/GATKKey.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/crypt/GATKKey.java
deleted file mode 100644
index ab21a2a..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/crypt/GATKKey.java
+++ /dev/null
@@ -1,350 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.crypt;
-
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.io.IOUtils;
-
-import java.io.*;
-import java.security.*;
-import java.util.zip.GZIPInputStream;
-import java.util.zip.GZIPOutputStream;
-
-/**
- * Class to represent a GATK user key.
- *
- * A GATK user key contains an email address and a cryptographic signature.
- * The signature is the SHA-1 hash of the email address encrypted using
- * the GATK master private key. The GATK master public key (distributed
- * with the GATK) is used to decrypt the signature and validate the key
- * at the start of each GATK run that requires a key.
- *
- * Keys are cryptographically secure in that valid keys definitely come
- * from us and cannot be fabricated, however nothing prevents keys from
- * being shared between users.
- *
- * GATK user keys have the following on-disk format:
- *
- *     GZIP Container:
- *         Email address
- *         NUL byte (delimiter)
- *         Cryptographic Signature (encrypted SHA-1 hash of email address)
- *
- * The key data is wrapped within a GZIP container to placate over-zealous
- * email filters (since keys must often be emailed) and also to provide an
- * additional integrity check via the built-in GZIP CRC.
- *
- * @author David Roazen
- */
-public class GATKKey {
-
-    /**
-     * Private key used to sign the GATK key. Required only when creating a new
-     * key from scratch, not when loading an existing key from disk.
-     */
-    private PrivateKey privateKey;
-
-    /**
-     * Public key used to validate the GATK key.
-     */
-    private PublicKey publicKey;
-
-    /**
-     * The user's email address, stored within the key and signed.
-     */
-    private String emailAddress;
-
-    /**
-     * The cryptographic signature of the email address. By default, this is
-     * the SHA-1 hash of the email address encrypted using the RSA algorithm.
-     */
-    private byte[] signature;
-
-    /**
-     * The combination of hash/encryption algorithms to use to generate the signature.
-     * By default this is "SHA1withRSA"
-     */
-    private String signingAlgorithm;
-
-    /**
-     * Default hash/encryption algorithms to use to sign the key.
-     */
-    public static final String DEFAULT_SIGNING_ALGORITHM = "SHA1withRSA";
-
-    /**
-     * Byte value used to separate the email address from its signature in the key file.
-     */
-    public static final byte GATK_KEY_SECTIONAL_DELIMITER = 0;
-
-
-    // -----------------------
-    // Constructors:
-    // -----------------------
-
-    /**
-     * Constructor to create a new GATK key from scratch using an email address
-     * and public/private key pair. The private key is used for signing, and the
-     * public key is used to validate the newly-created key.
-     *
-     * @param privateKey Private key used to sign the new GATK key
-     * @param publicKey Public key used to validate the new GATK key
-     * @param emailAddress The user's email address, which we will store in the key and sign
-     */
-    public GATKKey ( PrivateKey privateKey, PublicKey publicKey, String emailAddress ) {
-        this(privateKey, publicKey, emailAddress, DEFAULT_SIGNING_ALGORITHM);
-    }
-
-    /**
-     * Constructor to create a new GATK key from scratch using an email address
-     * and public/private key pair, and additionally specify the signing algorithm
-     * to use. The private key is used for signing, and the public key is used to
-     * validate the newly-created key.
-     *
-     * @param privateKey Private key used to sign the new GATK key
-     * @param publicKey Public key used to validate the new GATK key
-     * @param emailAddress The user's email address, which we will store in the key and sign
-     * @param signingAlgorithm The combination of hash and encryption algorithms to use to sign the key
-     */
-    public GATKKey ( PrivateKey privateKey, PublicKey publicKey, String emailAddress, String signingAlgorithm ) {
-        if ( privateKey == null || publicKey == null || emailAddress == null || emailAddress.length() == 0 || signingAlgorithm == null ) {
-            throw new ReviewedGATKException("Cannot construct GATKKey using null/empty arguments");
-        }
-
-        this.privateKey = privateKey;
-        this.publicKey = publicKey;
-        this.emailAddress = emailAddress;
-        this.signingAlgorithm = signingAlgorithm;
-
-        validateEmailAddress();
-        generateSignature();
-
-        if ( ! isValid() ) {
-            throw new ReviewedGATKException("Newly-generated GATK key fails validation -- this should never happen!");
-        }
-    }
-
-    /**
-     * Constructor to load an existing GATK key from a file.
-     *
-     * During loading, the key file is checked for integrity, but not cryptographic
-     * validity (which must be done through a subsequent call to isValid()).
-     *
-     * @param publicKey Public key that will be used to validate the loaded GATK key
-     *                  in subsequent calls to isValid()
-     * @param keyFile File containing the GATK key to load
-     */
-    public GATKKey ( PublicKey publicKey, File keyFile ) {
-        this(publicKey, keyFile, DEFAULT_SIGNING_ALGORITHM);
-    }
-
-    /**
-     * Constructor to load an existing GATK key from a file, and additionally specify
-     * the signing algorithm used to sign the key being loaded.
-     *
-     * During loading, the key file is checked for integrity, but not cryptographic
-     * validity (which must be done through a subsequent call to isValid()).
-     *
-     * @param publicKey Public key that will be used to validate the loaded GATK key
-     *                  in subsequent calls to isValid()
-     * @param keyFile File containing the GATK key to load
-     * @param signingAlgorithm The combination of hash and encryption algorithms used to sign the key
-     */
-    public GATKKey ( PublicKey publicKey, File keyFile, String signingAlgorithm ) {
-        if ( publicKey == null || keyFile == null || signingAlgorithm == null ) {
-            throw new ReviewedGATKException("Cannot construct GATKKey using null arguments");
-        }
-
-        this.publicKey = publicKey;
-        this.signingAlgorithm = signingAlgorithm;
-
-        readKey(keyFile);
-    }
-
-    // -----------------------
-    // Public API Methods:
-    // -----------------------
-
-    /**
-     * Writes out this key to a file in the format described at the top of this class,
-     * encapsulating the key within a GZIP container.
-     *
-     * @param destination File to write the key to
-     */
-    public void writeKey ( File destination ) {
-        try {
-            byte[] keyBytes = marshalKeyData();
-            IOUtils.writeByteArrayToStream(keyBytes, new GZIPOutputStream(new FileOutputStream(destination)));
-        }
-        catch ( IOException e ) {
-            throw new UserException.CouldNotCreateOutputFile(destination, e);
-        }
-    }
-
-    /**
-     * Checks whether the signature of this key is cryptographically valid (ie., can be
-     * decrypted by the public key to produce a valid SHA-1 hash of the email address
-     * in the key).
-     *
-     * @return True if the key's signature passes validation, otherwise false
-     */
-    public boolean isValid() {
-        try {
-            Signature sig = Signature.getInstance(signingAlgorithm);
-            sig.initVerify(publicKey);
-            sig.update(emailAddress.getBytes());
-            return sig.verify(signature);
-        }
-        catch ( NoSuchAlgorithmException e ) {
-            throw new ReviewedGATKException(String.format("Signing algorithm %s not found", signingAlgorithm), e);
-        }
-        catch ( InvalidKeyException e ) {
-            // If the GATK public key is invalid, it's likely our problem, not the user's:
-            throw new ReviewedGATKException(String.format("Public key %s is invalid", publicKey), e);
-        }
-        catch ( SignatureException e ) {
-            throw new UserException.UnreadableKeyException("Signature is invalid or signing algorithm was unable to process the input data", e);
-        }
-    }
-
-    // -----------------------
-    // Private Helper Methods:
-    // -----------------------
-
-    /**
-     * Helper method that creates a signature for this key using the combination of
-     * hash/encryption algorithms specified at construction time.
-     */
-    private void generateSignature() {
-        try {
-            Signature sig = Signature.getInstance(signingAlgorithm);
-            sig.initSign(privateKey, CryptUtils.createRandomnessSource());
-            sig.update(emailAddress.getBytes());
-            signature = sig.sign();
-        }
-        catch ( NoSuchAlgorithmException e ) {
-            throw new ReviewedGATKException(String.format("Signing algorithm %s not found", signingAlgorithm), e);
-        }
-        catch ( InvalidKeyException e ) {
-            throw new ReviewedGATKException(String.format("Private key %s is invalid", privateKey), e);
-        }
-        catch ( SignatureException e ) {
-            throw new ReviewedGATKException(String.format("Error creating signature for email address %s", emailAddress), e);
-        }
-    }
-
-    /**
-     * Helper method that reads in a GATK key from a file. Should not be called directly --
-     * use the appropriate constructor above.
-     *
-     * @param source File to read the key from
-     */
-    private void readKey ( File source ) {
-        try {
-            byte[] keyBytes = IOUtils.readStreamIntoByteArray(new GZIPInputStream(new FileInputStream(source)));
-
-            // As a sanity check, compare the number of bytes read to the uncompressed file size
-            // stored in the GZIP ISIZE field. If they don't match, the key must be corrupt:
-            if ( keyBytes.length != IOUtils.getGZIPFileUncompressedSize(source) ) {
-                throw new UserException.UnreadableKeyException("Number of bytes read does not match the uncompressed size specified in the GZIP ISIZE field");
-            }
-
-            unmarshalKeyData(keyBytes);
-        }
-        catch ( FileNotFoundException e ) {
-            throw new UserException.CouldNotReadInputFile(source, e);
-        }
-        catch ( IOException e ) {
-            throw new UserException.UnreadableKeyException(source, e);
-        }
-        catch ( UserException.CouldNotReadInputFile e ) {
-            throw new UserException.UnreadableKeyException(source, e);
-        }
-    }
-
-    /**
-     * Helper method that assembles the email address and signature into a format
-     * suitable for writing to disk.
-     *
-     * @return The aggregated key data, ready to be written to disk
-     */
-    private byte[] marshalKeyData() {
-        byte[] emailAddressBytes = emailAddress.getBytes();
-        byte[] assembledKey = new byte[emailAddressBytes.length + 1 + signature.length];
-
-        System.arraycopy(emailAddressBytes, 0, assembledKey, 0, emailAddressBytes.length);
-        assembledKey[emailAddressBytes.length] = GATK_KEY_SECTIONAL_DELIMITER;
-        System.arraycopy(signature, 0, assembledKey, emailAddressBytes.length + 1, signature.length);
-
-        return assembledKey;
-    }
-
-    /**
-     * Helper method that parses the raw key data from disk into its component
-     * email address and signature. Performs some basic validation in the process.
-     *
-     * @param keyBytes The raw, uncompressed key data read from disk
-     */
-    private void unmarshalKeyData ( byte[] keyBytes ) {
-        int delimiterPosition = -1;
-
-        for ( int i = 0; i < keyBytes.length; i++ ) {
-            if ( keyBytes[i] == GATK_KEY_SECTIONAL_DELIMITER ) {
-                delimiterPosition = i;
-                break;
-            }
-        }
-
-        if ( delimiterPosition == -1 ) {
-            throw new UserException.UnreadableKeyException("Malformed GATK key contains no sectional delimiter");
-        }
-        else if ( delimiterPosition == 0 ) {
-            throw new UserException.UnreadableKeyException("Malformed GATK key contains no email address");
-        }
-        else if ( delimiterPosition == keyBytes.length - 1 ) {
-            throw new UserException.UnreadableKeyException("Malformed GATK key contains no signature");
-        }
-
-        byte[] emailAddressBytes = new byte[delimiterPosition];
-        System.arraycopy(keyBytes, 0, emailAddressBytes, 0, delimiterPosition);
-        emailAddress = new String(emailAddressBytes);
-
-        signature = new byte[keyBytes.length - delimiterPosition - 1];
-        System.arraycopy(keyBytes, delimiterPosition + 1, signature, 0, keyBytes.length - delimiterPosition - 1);
-    }
-
-    /**
-     * Helper method that ensures that the user's email address does not contain the NUL byte, which we
-     * reserve as a delimiter within each key file.
-     */
-    private void validateEmailAddress() {
-        for ( byte b : emailAddress.getBytes() ) {
-            if ( b == GATK_KEY_SECTIONAL_DELIMITER ) {
-                throw new UserException(String.format("Email address must not contain a byte with value %d", GATK_KEY_SECTIONAL_DELIMITER));
-            }
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/duplicates/DupUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/duplicates/DupUtils.java
deleted file mode 100644
index 3d27407..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/duplicates/DupUtils.java
+++ /dev/null
@@ -1,142 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.duplicates;
-
-import org.broadinstitute.gatk.utils.BaseUtils;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.QualityUtils;
-import org.broadinstitute.gatk.utils.collections.Pair;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.pileup.PileupElement;
-import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
-import org.broadinstitute.gatk.utils.pileup.ReadBackedPileupImpl;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-
-import java.util.Arrays;
-import java.util.List;
-
-public class DupUtils {
-    private static GATKSAMRecord tmpCopyRead(GATKSAMRecord read) {
-        return (GATKSAMRecord)read.clone();
-    }
-
-    public static GATKSAMRecord combineDuplicates(GenomeLocParser genomeLocParser,List<GATKSAMRecord> duplicates, int maxQScore) {
-        if ( duplicates.size() == 0 )
-            return null;
-
-        // make the combined read by copying the first read and setting the
-        // bases and quals to new arrays
-        GATKSAMRecord comb = tmpCopyRead(duplicates.get(0));
-        //GATKSAMRecord comb = tmpCopyRead(duplicates.get(0));
-        comb.setDuplicateReadFlag(false);
-        int readLen = comb.getReadBases().length;
-        byte[] bases = new byte[readLen];
-        byte[] quals = new byte[readLen];
-
-        for ( int i = 0; i < readLen; i++ ) {
-            //System.out.printf("I is %d%n", i);
-            //for ( GATKSAMRecord read : duplicates ) {
-            //    System.out.printf("dup base %c %d%n", (char)read.getReadBases()[i], read.getBaseQualities()[i]);
-            //}
-            Pair<Byte, Byte> baseAndQual = combineBaseProbs(genomeLocParser,duplicates, i, maxQScore);
-            bases[i] = baseAndQual.getFirst();
-            quals[i] = baseAndQual.getSecond();            
-        }
-
-
-        comb.setBaseQualities(quals);
-        comb.setReadBases(bases);
-
-        return comb;
-    }
-
-    private static Pair<Byte, Byte> baseProbs2BaseAndQual(double[] probs, int maxQScore) {
-        byte bestBase = 0;
-        double bestProb = Double.NEGATIVE_INFINITY;
-        double sumProbs = 0;
-
-        for ( int i = 0; i < 4; i++ ) {
-            sumProbs += Math.pow(10, probs[i]);
-            //System.out.printf("Bestprob is %f > %f%n", bestProb, probs[i]);
-            if ( probs[i] > bestProb ) {
-                bestBase = BaseUtils.baseIndexToSimpleBase(i);
-                bestProb = probs[i];
-            }
-        }
-
-        Arrays.sort(probs);
-        double normalizedP = Math.pow(10, bestProb) / sumProbs;
-        byte qual = QualityUtils.trueProbToQual(normalizedP, maxQScore);
-//        if ( false ) {
-//            System.out.printf("Best base is %s %.8f%n", bestBase, bestProb);
-//            System.out.printf("2nd  base is %.8f%n", probs[1]);
-//            System.out.printf("normalized P %.8f%n", normalizedP);
-//            System.out.printf("normalized Q %.8f%n", 1 - normalizedP);
-//            System.out.printf("max Q        %2d%n", maxQScore);
-//            System.out.printf("eps          %.8f%n", eps);
-//            System.out.printf("encoded    Q %2d%n", qual);
-//        }
-
-        return new Pair<Byte, Byte>(bestBase, qual);
-    }
-
-    private static void print4BaseQuals(String header, double[] probs) {
-        System.out.printf("%s log10(P(b)) is ", header);
-        for ( int i = 0; i < 4; i++ ) {
-            System.out.printf("%c=%+.8f ", (char)BaseUtils.baseIndexToSimpleBase(i), probs[i]);
-        }
-        System.out.printf("%n");
-    }
-
-    private static Pair<Byte, Byte> combineBaseProbs(GenomeLocParser genomeLocParser,List<GATKSAMRecord> duplicates, int readOffset, int maxQScore) {
-        GenomeLoc loc = genomeLocParser.createGenomeLoc(duplicates.get(0));
-        ReadBackedPileup pileup = new ReadBackedPileupImpl(loc, duplicates, readOffset);
-
-        final boolean debug = false;
-
-        // calculate base probs
-        double[] qualSums = {0.0, 0.0, 0.0, 0.0};
-        if ( debug ) print4BaseQuals("start", qualSums);
-
-        for (PileupElement e : pileup ) {
-            int baseIndex = e.getBaseIndex();
-            byte qual = e.getQual();
-            double pqual = QualityUtils.qualToProb(qual);
-            for ( int j = 0; j < 4; j++) {
-                qualSums[j] += Math.log10(j == baseIndex ?  pqual : (1 - pqual)/3);
-            }
-
-            if ( debug ) print4BaseQuals(String.format("%c Q%2d", e.getBase(), qual), qualSums);
-        }
-        if ( debug ) print4BaseQuals("final", qualSums);
-
-        Pair<Byte, Byte> combined = baseProbs2BaseAndQual(qualSums, maxQScore);
-        if ( debug ) System.out.printf("%s => %c Q%s%n", pileup.getPileupString('N'), (char)(byte)combined.getFirst(), combined.getSecond());
-
-        return combined;
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/duplicates/DuplicateComp.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/duplicates/DuplicateComp.java
deleted file mode 100644
index 9213a3e..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/duplicates/DuplicateComp.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.duplicates;
-
-public class DuplicateComp {
-    public int getQLarger() {
-        return qLarger;
-    }
-
-    public void setQLarger(int qLarger) {
-        this.qLarger = qLarger;
-    }
-
-    public int getQSmaller() {
-        return qSmaller;
-    }
-
-    public void setQSmaller(int qSmaller) {
-        this.qSmaller = qSmaller;
-    }
-
-    public boolean isMismatchP() {
-        return mismatchP;
-    }
-
-    public void setMismatchP(boolean mismatchP) {
-        this.mismatchP = mismatchP;
-    }
-
-    private int qLarger;
-    private int qSmaller;
-    private boolean mismatchP;
-
-    public DuplicateComp(int qLarger, int qSmaller, boolean misMatchP) {
-        this.qLarger = qLarger;
-        this.qSmaller = qSmaller;
-        this.mismatchP = misMatchP;
-    }
-
-    public String toString() {
-        return String.format("%d %d %b", qLarger, qSmaller, mismatchP);
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/exceptions/DynamicClassResolutionException.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/exceptions/DynamicClassResolutionException.java
deleted file mode 100644
index 1c53420..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/exceptions/DynamicClassResolutionException.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.exceptions;
-
-import java.lang.reflect.InvocationTargetException;
-
-/**
- * Class for handling common failures of dynamic class resolution
- */
-public class DynamicClassResolutionException extends UserException {
-    public DynamicClassResolutionException(Class c, Exception ex) {
-        super(String.format("Could not create module %s because %s caused by exception %s",
-                c.getSimpleName(), moreInfo(ex), ex.getMessage()));
-    }
-
-    private static String moreInfo(Exception ex) {
-        try {
-            throw ex;
-        } catch (InstantiationException e) {
-            return "BUG: cannot instantiate class: must be concrete class";
-        } catch (NoSuchMethodException e) {
-            return "BUG: Cannot find expected constructor for class";
-        } catch (IllegalAccessException e) {
-            return "Cannot instantiate class (Illegal Access)";
-        } catch (InvocationTargetException e) {
-            return "Cannot instantiate class (Invocation failure)";
-        } catch ( Exception e ) {
-            return String.format("an exception of type %s occurred",e.getClass().getSimpleName());
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/exceptions/UserException.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/exceptions/UserException.java
deleted file mode 100644
index 07db4fc..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/exceptions/UserException.java
+++ /dev/null
@@ -1,485 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.exceptions;
-
-import htsjdk.samtools.CigarOperator;
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.SAMSequenceDictionary;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
-import org.broadinstitute.gatk.utils.help.HelpConstants;
-import org.broadinstitute.gatk.utils.sam.ReadUtils;
-import org.broadinstitute.gatk.utils.variant.GATKVCFIndexType;
-import htsjdk.variant.variantcontext.VariantContext;
-
-import java.io.File;
-
-/**
- * Represents the common user errors detected by GATK
- *
- * Root class for all GATK user errors, as well as the container for errors themselves
- */
- at DocumentedGATKFeature(
-        groupName = HelpConstants.DOCS_CAT_USRERR,
-        summary = "Errors caused by incorrect user behavior, such as bad files, bad arguments, etc." )
-public class UserException extends ReviewedGATKException {
-    /**
-     * The URL where people can get help messages.  Printed when an error occurs
-     */
-    public static final String PHONE_HOME_DOCS_URL = "http://gatkforums.broadinstitute.org/discussion/1250/what-is-phone-home-and-how-does-it-affect-me#latest";
-
-    public UserException(String msg) { super(msg); }
-    public UserException(String msg, Throwable e) { super(msg, e); }
-    private UserException(Throwable e) { super("", e); } // cannot be called, private access
-
-    protected static String getMessage(Throwable t) {
-        String message = t.getMessage();
-        return message != null ? message : t.getClass().getName();
-    }
-
-    public static class CommandLineException extends UserException {
-        public CommandLineException(String message) {
-            super(String.format("Invalid command line: %s", message));
-        }
-    }
-
-    public static class MalformedReadFilterException extends CommandLineException {
-        public MalformedReadFilterException(String message) {
-            super(String.format("Malformed read filter: %s",message));
-        }
-    }
-
-    public static class IncompatibleReadFiltersException extends CommandLineException {
-        public IncompatibleReadFiltersException(final String filter1, final String filter2) {
-            super(String.format("Two read filters are enabled that are incompatible and cannot be used simultaneously: %s and %s", filter1, filter2));
-        }
-    }
-
-    public static class MalformedWalkerArgumentsException extends CommandLineException {
-        public MalformedWalkerArgumentsException(String message) {
-            super(String.format("Malformed walker argument: %s",message));
-        }
-    }
-
-    public static class UnsupportedCigarOperatorException extends UserException {
-        public UnsupportedCigarOperatorException(final CigarOperator co, final SAMRecord read, final String message) {
-            super(String.format(
-                "Unsupported CIGAR operator %s in read %s at %s:%d. %s",
-                co,
-                read.getReadName(),
-                read.getReferenceName(),
-                read.getAlignmentStart(),
-                message));
-        }
-    }
-
-
-    public static class MalformedGenomeLoc extends UserException {
-        public MalformedGenomeLoc(String message, GenomeLoc loc) {
-            super(String.format("Badly formed genome loc: %s: %s", message, loc));
-        }
-
-        public MalformedGenomeLoc(String message) {
-            super(String.format("Badly formed genome loc: %s", message));
-        }
-    }
-
-    public static class BadInput extends UserException {
-        public BadInput(String message) {
-            super(String.format("Bad input: %s", message));
-        }
-    }
-
-    // todo -- fix up exception cause passing
-    public static class MissingArgument extends CommandLineException {
-        public MissingArgument(String arg, String message) {
-            super(String.format("Argument %s was missing: %s", arg, message));
-        }
-    }
-
-    public static class BadArgumentValue extends CommandLineException {
-        public BadArgumentValue(String arg, String message) {
-            super(String.format("Argument %s has a bad value: %s", arg, message));
-        }
-    }
-
-    public static class UnknownTribbleType extends CommandLineException {
-        public UnknownTribbleType(String type, String message) {
-            super(String.format("Unknown tribble type %s: %s", type, message));
-        }
-    }
-
-
-    public static class BadTmpDir extends UserException {
-        public BadTmpDir(String message) {
-            super(String.format("Failure working with the tmp directory %s. Override with -Djava.io.tmpdir=X on the command line to a bigger/better file system.  Exact error was %s", System.getProperties().get("java.io.tmpdir"), message));
-        }
-    }
-
-    public static class TooManyOpenFiles extends UserException {
-        public TooManyOpenFiles() {
-            super(String.format("There was a failure because there are too many files open concurrently; your system's open file handle limit is too small.  See the unix ulimit command to adjust this limit"));
-        }
-    }
-
-    public static class LocalParallelizationProblem extends UserException {
-        public LocalParallelizationProblem(final File file) {
-            super(String.format("There was a failure because temporary file %s could not be found while running the GATK with more than one thread.  Possible causes for this problem include: your system's open file handle limit is too small, your output or temp directories do not have sufficient space, or just an isolated file system blip", file.getAbsolutePath()));
-        }
-    }
-
-    public static class NotEnoughMemory extends UserException {
-        public NotEnoughMemory() {
-            super(String.format("There was a failure because you did not provide enough memory to run this program.  See the -Xmx JVM argument to adjust the maximum heap size provided to Java"));
-        }
-    }
-
-    public static class ErrorWritingBamFile extends UserException {
-        public ErrorWritingBamFile(String message) {
-            super(String.format("An error occurred when trying to write the BAM file.  Usually this happens when there is not enough space in the directory to which the data is being written (generally the temp directory) or when your system's open file handle limit is too small.  To tell Java to use a bigger/better file system use -Djava.io.tmpdir=X on the command line.  The exact error was %s", message));
-        }
-    }
-
-    public static class NoSpaceOnDevice extends UserException {
-        public NoSpaceOnDevice() {
-            super("There is no space left on the device, so writing failed");
-        }
-    }
-
-    public static class CouldNotReadInputFile extends UserException {
-        public CouldNotReadInputFile(String message, Exception e) {
-            super(String.format("Couldn't read file because %s caused by %s", message, getMessage(e)));
-        }
-
-        public CouldNotReadInputFile(File file) {
-            super(String.format("Couldn't read file %s", file.getAbsolutePath()));
-        }
-
-        public CouldNotReadInputFile(File file, String message) {
-            super(String.format("Couldn't read file %s because %s", file.getAbsolutePath(), message));
-        }
-
-        public CouldNotReadInputFile(String file, String message) {
-            super(String.format("Couldn't read file %s because %s", file, message));
-        }
-
-        public CouldNotReadInputFile(File file, String message, Exception e) {
-            super(String.format("Couldn't read file %s because %s with exception %s", file.getAbsolutePath(), message, getMessage(e)));
-        }
-
-        public CouldNotReadInputFile(File file, Exception e) {
-            this(file, getMessage(e));
-        }
-
-        public CouldNotReadInputFile(String message) {
-            super(message);
-        }
-    }
-
-
-    public static class CouldNotCreateOutputFile extends UserException {
-        public CouldNotCreateOutputFile(File file, String message, Exception e) {
-            super(String.format("Couldn't write file %s because %s with exception %s", file.getAbsolutePath(), message, getMessage(e)));
-        }
-
-        public CouldNotCreateOutputFile(File file, String message) {
-            super(String.format("Couldn't write file %s because %s", file.getAbsolutePath(), message));
-        }
-
-        public CouldNotCreateOutputFile(String filename, String message, Exception e) {
-            super(String.format("Couldn't write file %s because %s with exception %s", filename, message, getMessage(e)));
-        }
-
-        public CouldNotCreateOutputFile(File file, Exception e) {
-            super(String.format("Couldn't write file %s because exception %s", file.getAbsolutePath(), getMessage(e)));
-        }
-
-        public CouldNotCreateOutputFile(String message, Exception e) {
-            super(message, e);
-        }
-    }
-
-    public static class MissortedBAM extends UserException {
-        public MissortedBAM(SAMFileHeader.SortOrder order, File file, SAMFileHeader header) {
-            super(String.format("Missorted Input SAM/BAM files: %s is must be sorted in %s order but order was: %s", file, order, header.getSortOrder()));
-        }
-
-        public MissortedBAM(SAMFileHeader.SortOrder order, String message) {
-            super(String.format("Missorted Input SAM/BAM files: files are not sorted in %s order; %s", order, message));
-        }
-
-        public MissortedBAM(SAMFileHeader.SortOrder order, SAMRecord read, String message) {
-            super(String.format("Missorted Input SAM/BAM file %s: file sorted in %s order but %s is required; %s",
-                    read.getFileSource().getReader(), read.getHeader().getSortOrder(), order, message));
-        }
-
-        public MissortedBAM(String message) {
-            super(String.format("Missorted Input SAM/BAM files: %s", message));
-        }
-    }
-
-    public static class MalformedBAM extends UserException {
-        public MalformedBAM(SAMRecord read, String message) {
-            this(read.getFileSource() != null ? read.getFileSource().getReader().toString() : "(none)", message);
-        }
-
-        public MalformedBAM(File file, String message) {
-            this(file.toString(), message);
-        }
-
-        public MalformedBAM(String source, String message) {
-            super(String.format("SAM/BAM file %s is malformed: %s", source, message));
-        }
-    }
-
-    public static class MisencodedBAM extends UserException {
-        public MisencodedBAM(SAMRecord read, String message) {
-            this(read.getFileSource() != null ? read.getFileSource().getReader().toString() : "(none)", message);
-        }
-
-        public MisencodedBAM(String source, String message) {
-            super(String.format("SAM/BAM file %s appears to be using the wrong encoding for quality scores: %s; please see the GATK --help documentation for options related to this error", source, message));
-        }
-    }
-
-    public static class MalformedVCF extends UserException {
-        public MalformedVCF(String message, String line) {
-            super(String.format("The provided VCF file is malformed at line %s: %s", line, message));
-        }
-
-        public MalformedVCF(String message) {
-            super(String.format("The provided VCF file is malformed: %s", message));
-        }
-
-        public MalformedVCF(String message, int lineNo) {
-            super(String.format("The provided VCF file is malformed at approximately line number %d: %s", lineNo, message));
-        }
-    }
-
-    public static class MalformedBCF2 extends UserException {
-        public MalformedBCF2( String message ) {
-            super(String.format("Malformed BCF2 file: %s", message));
-        }
-    }
-
-    public static class MalformedVCFHeader extends UserException {
-        public MalformedVCFHeader(String message) {
-            super(String.format("The provided VCF file has a malformed header: %s", message));
-        }
-    }
-
-    public static class ReadMissingReadGroup extends MalformedBAM {
-        public ReadMissingReadGroup(final SAMRecord read) {
-            super(read, String.format("Read %s is missing the read group (RG) tag, which is required by the GATK.  Please use " + HelpConstants.forumPost("discussion/59/companion-utilities-replacereadgroups to fix this problem"), read.getReadName()));
-        }
-    }
-
-    public static class ReadHasUndefinedReadGroup extends MalformedBAM {
-        public ReadHasUndefinedReadGroup(final SAMRecord read, final String rgID) {
-            super(read, String.format("Read %s uses a read group (%s) that is not defined in the BAM header, which is not valid.  Please use " + HelpConstants.forumPost("discussion/59/companion-utilities-replacereadgroups to fix this problem"), read.getReadName(), rgID));
-        }
-    }
-
-    public static class VariantContextMissingRequiredField extends UserException {
-        public VariantContextMissingRequiredField(String field, VariantContext vc) {
-            super(String.format("Variant at %s:%d is is missing the required field %s", vc.getChr(), vc.getStart(), field));
-        }
-    }
-
-    public static class MissortedFile extends UserException {
-        public MissortedFile(File file, String message, Exception e) {
-            super(String.format("Missorted Input file: %s is must be sorted in coordinate order. %s and got error %s", file, message, getMessage(e)));
-        }
-    }
-
-    public static class FailsStrictValidation extends UserException {
-        public FailsStrictValidation(File f, String message) {
-            super(String.format("File %s fails strict validation: %s", f.getAbsolutePath(), message));
-        }
-    }
-
-    public static class MalformedFile extends UserException {
-        public MalformedFile(String message) {
-            super(String.format("Unknown file is malformed: %s", message));
-        }
-
-        public MalformedFile(String message, Exception e) {
-            super(String.format("Unknown file is malformed: %s caused by %s", message, getMessage(e)));
-        }
-
-        public MalformedFile(File f, String message) {
-            super(String.format("File %s is malformed: %s", f.getAbsolutePath(), message));
-        }
-
-        public MalformedFile(File f, String message, Exception e) {
-            super(String.format("File %s is malformed: %s caused by %s", f.getAbsolutePath(), message, getMessage(e)));
-        }
-
-        public MalformedFile(String name, String message) {
-            super(String.format("File associated with name %s is malformed: %s", name, message));
-        }
-
-        public MalformedFile(String name, String message, Exception e) {
-            super(String.format("File associated with name %s is malformed: %s caused by %s", name, message, getMessage(e)));
-        }
-     }
-
-    public static class CannotExecuteRScript extends UserException {
-        public CannotExecuteRScript(String message) {
-            super(String.format("Unable to execute RScript command: " + message));
-        }
-        public CannotExecuteRScript(String message, Exception e) {
-            super(String.format("Unable to execute RScript command: " + message), e);
-        }
-    }
-
-    public static class DeprecatedArgument extends CommandLineException {
-        public DeprecatedArgument(String param, String doc) {
-            super(String.format("The parameter %s is deprecated.  %s",param,doc));
-        }
-    }
-
-
-    public static class IncompatibleSequenceDictionaries extends UserException {
-        public IncompatibleSequenceDictionaries(String message, String name1, SAMSequenceDictionary dict1, String name2, SAMSequenceDictionary dict2) {
-            super(String.format("Input files %s and %s have incompatible contigs: %s.\n  %s contigs = %s\n  %s contigs = %s",
-                    name1, name2, message, name1, ReadUtils.prettyPrintSequenceRecords(dict1), name2, ReadUtils.prettyPrintSequenceRecords(dict2)));
-        }
-    }
-
-    public static class LexicographicallySortedSequenceDictionary extends UserException {
-        public LexicographicallySortedSequenceDictionary(String name, SAMSequenceDictionary dict) {
-            super(String.format("Lexicographically sorted human genome sequence detected in %s."
-                    + "\nFor safety's sake the GATK requires human contigs in karyotypic order: 1, 2, ..., 10, 11, ..., 20, 21, 22, X, Y with M either leading or trailing these contigs."
-                    + "\nThis is because all distributed GATK resources are sorted in karyotypic order, and your processing will fail when you need to use these files."
-                    + "\nYou can use the ReorderSam utility to fix this problem: " + HelpConstants.forumPost("discussion/58/companion-utilities-reordersam")
-                    + "\n  %s contigs = %s",
-                    name, name, ReadUtils.prettyPrintSequenceRecords(dict)));
-        }
-    }
-
-    public static class DeprecatedWalker extends UserException {
-        public DeprecatedWalker(String walkerName, String version) {
-            super(String.format("Walker %s is no longer available in the GATK; it has been deprecated since version %s", walkerName, version));
-        }
-    }
-
-    public static class DeprecatedAnnotation extends UserException {
-        public DeprecatedAnnotation(String annotationName, String version) {
-            super(String.format("Annotation %s is no longer available in the GATK; it has been deprecated since version %s", annotationName, version));
-        }
-    }
-
-    public static class CannotExecuteQScript extends UserException {
-        public CannotExecuteQScript(String message) {
-            super(String.format("Unable to execute QScript: " + message));
-        }
-        public CannotExecuteQScript(String message, Exception e) {
-            super(String.format("Unable to execute QScript: " + message), e);
-        }
-    }
-
-    public static class CannotHandleGzippedRef extends UserException {
-        public CannotHandleGzippedRef() {
-            super("The GATK cannot process compressed (.gz) reference sequences. Please unzip the file and try again.  Sorry for the inconvenience.");
-        }
-    }
-
-    public static class MissingReferenceFaiFile extends UserException {
-        public MissingReferenceFaiFile( final File indexFile, final File fastaFile ) {
-            super(String.format("Fasta index file %s for reference %s does not exist. Please see %s for help creating it.",
-                                indexFile.getAbsolutePath(), fastaFile.getAbsolutePath(),
-                                HelpConstants.forumPost("discussion/1601/how-can-i-prepare-a-fasta-file-to-use-as-reference")));
-        }
-    }
-
-    public static class MissingReferenceDictFile extends UserException {
-        public MissingReferenceDictFile( final File dictFile, final File fastaFile ) {
-            super(String.format("Fasta dict file %s for reference %s does not exist. Please see %s for help creating it.",
-                                dictFile.getAbsolutePath(), fastaFile.getAbsolutePath(),
-                                HelpConstants.forumPost("discussion/1601/how-can-i-prepare-a-fasta-file-to-use-as-reference")));
-        }
-    }
-
-    public static class UnreadableKeyException extends UserException {
-        public UnreadableKeyException ( File f, Exception e ) {
-            super(String.format("Key file %s cannot be read (possibly the key file is corrupt?). Error was: %s. " +
-                                "Please see %s for help.",
-                                f.getAbsolutePath(), getMessage(e), PHONE_HOME_DOCS_URL));
-        }
-
-        public UnreadableKeyException ( String message, Exception e ) {
-            this(String.format("%s. Error was: %s", message, getMessage(e)));
-        }
-
-        public UnreadableKeyException ( String message ) {
-            super(String.format("Key file cannot be read (possibly the key file is corrupt?): %s. " +
-                                "Please see %s for help.",
-                                message, PHONE_HOME_DOCS_URL));
-        }
-    }
-
-    public static class KeySignatureVerificationException extends UserException {
-        public KeySignatureVerificationException ( File f ) {
-            super(String.format("The signature in key file %s failed cryptographic verification. " +
-                                "If this key was valid in the past, it's likely been revoked. " +
-                                "Please see %s for help.",
-                                f.getAbsolutePath(), PHONE_HOME_DOCS_URL));
-        }
-    }
-
-    public static class GVCFIndexException extends UserException {
-        public GVCFIndexException (GATKVCFIndexType indexType, int indexParameter) {
-            super(String.format("GVCF output requires a specific indexing strategy.  Please re-run including the arguments " +
-                    "-variant_index_type %s -variant_index_parameter %d.",
-                    indexType, indexParameter));
-        }
-    }
-
-    /**
-     * A special exception that happens only in the case where
-     * the filesystem, by design or configuration, is completely unable
-     * to handle locking.  This exception will specifically NOT be thrown
-     * in the case where the filesystem handles locking but is unable to
-     * acquire a lock due to concurrency.
-     */
-    public static class FileSystemInabilityToLockException extends UserException {
-        public FileSystemInabilityToLockException( String message ) {
-            super(message);
-        }
-
-        public FileSystemInabilityToLockException( String message, Exception innerException ) {
-            super(message,innerException);
-        }
-    }
-
-    public static class IncompatibleRecalibrationTableParameters extends UserException {
-        public IncompatibleRecalibrationTableParameters(String s) {
-            super(s);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/fasta/ArtificialFastaUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/fasta/ArtificialFastaUtils.java
deleted file mode 100644
index bf03ec6..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/fasta/ArtificialFastaUtils.java
+++ /dev/null
@@ -1,154 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.fasta;
-
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.PrintStream;
-import java.util.List;
-
-
-/**
- * @author aaron
- *         <p/>
- *         Class ArtificialFastaUtils
- *         <p/>
- *         artificial fasta utility class, for generating fake fastas.
- */
-public class ArtificialFastaUtils {
-    public enum BASE_PATTERN {
-        RANDOM, ALL_A, ALL_T, ALL_C, ALL_G;
-    }
-
-    // what bases we support
-    public enum BASES {
-        A, T, C, G;
-    }
-
-    // create an artificial fasta file
-    public static void createArtificialFasta(String fileName,
-                                             List<String> contigNames,
-                                             List<Integer> contigSizes,
-                                             BASE_PATTERN pattern) {
-        PrintStream s;
-        try {
-            s = new PrintStream(new FileOutputStream(fileName));
-        } catch (FileNotFoundException e) {
-            throw new ReviewedGATKException("Filename " + fileName + " passed to the ArtificialFastaUtils generated a FileNotFound exception", e);
-        }
-        generateFakeFasta(contigNames, contigSizes, pattern, s);
-    }
-
-    // create an artificial fasta file
-    public static void createArtificialFasta(PrintStream stream,
-                                             List<String> contigNames,
-                                             List<Integer> contigSizes,
-                                             BASE_PATTERN pattern) {
-
-        generateFakeFasta(contigNames, contigSizes, pattern, stream);
-    }
-
-    /**
-     * create a fake fasta file
-     *
-     * @param contigNames the pile of contig names
-     * @param contigSizes the pile of contig sizes
-     * @param pattern     the pattern to use for the base distrobution
-     * @param s           the print stream to write to
-     */
-    private static void generateFakeFasta(List<String> contigNames, List<Integer> contigSizes, BASE_PATTERN pattern, PrintStream s) {
-        if (contigNames.size() != contigSizes.size()) {
-            throw new ReviewedGATKException("ArtificialContig name and size arrays are not equal sizes");
-        }
-        for (int x = 0; x < contigNames.size(); x++) {
-            ArtificialContig tig = new ArtificialContig(contigNames.get(x), contigSizes.get(x), pattern);
-            tig.write(s);
-        }
-        s.close();
-    }
-
-}
-
-
-/** the fake contig class, a fasta is made up of these */
-class ArtificialContig {
-    public static final int COLUMN_WIDTH = 80;
-
-    final protected String mName;
-    final protected int mSize;
-    final protected ArtificialFastaUtils.BASE_PATTERN mPattern;
-
-    public ArtificialContig(String name, int size, ArtificialFastaUtils.BASE_PATTERN pat) {
-        this.mName = name;
-        this.mSize = size;
-        this.mPattern = pat;
-    }
-
-    /**
-     * write out the contig to a stream
-     *
-     * @param stream
-     */
-    public void write(PrintStream stream) {
-        stream.println(">" + mName);
-        int count = 0;
-        while (count < mSize) {
-            for (int x = 0; x < COLUMN_WIDTH; x++) {
-                stream.print(generateAppropriateBase());
-                count++;
-                if (count >= mSize) {
-                    break;
-                }
-            }
-            stream.println();
-        }
-    }
-
-    /**
-     * generate the appropriate base, given the BASE_PATTERN
-     *
-     * @return a base, as a string
-     */
-    public String generateAppropriateBase() {
-        switch (mPattern) {
-            case RANDOM:
-                return (ArtificialFastaUtils.BASES.values()[(int) Math.round(Math.random() * 4)]).toString();
-            case ALL_A:
-                return "A";
-            case ALL_T:
-                return "T";
-            case ALL_C:
-                return "C";
-            case ALL_G:
-                return "G";
-            default:
-                throw new ReviewedGATKException("Unknown base pattern");
-        }
-    }
-
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/fasta/CachingIndexedFastaSequenceFile.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/fasta/CachingIndexedFastaSequenceFile.java
deleted file mode 100644
index 05f2ccf..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/fasta/CachingIndexedFastaSequenceFile.java
+++ /dev/null
@@ -1,311 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.fasta;
-
-import picard.PicardException;
-import htsjdk.samtools.reference.FastaSequenceIndex;
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import htsjdk.samtools.reference.ReferenceSequence;
-import htsjdk.samtools.SAMSequenceRecord;
-import htsjdk.samtools.util.StringUtil;
-import org.apache.log4j.Priority;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.BaseUtils;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.util.Arrays;
-
-/**
- * A caching version of the IndexedFastaSequenceFile that avoids going to disk as often as the raw indexer.
- *
- * Thread-safe!  Uses a thread-local cache.
- *
- * Automatically upper-cases the bases coming in, unless the flag preserveCase is explicitly set.
- * Automatically converts IUPAC bases to Ns, unless the flag preserveIUPAC is explicitly set.
- */
-public class CachingIndexedFastaSequenceFile extends IndexedFastaSequenceFile {
-    protected static final org.apache.log4j.Logger logger = org.apache.log4j.Logger.getLogger(CachingIndexedFastaSequenceFile.class);
-
-    /** do we want to print debugging information about cache efficiency? */
-    private static final boolean PRINT_EFFICIENCY = false;
-
-    /** If we are printing efficiency info, what frequency should we do it at? */
-    private static final int PRINT_FREQUENCY = 10000;
-
-    /** The default cache size in bp */
-    public static final long DEFAULT_CACHE_SIZE = 1000000;
-
-    /** The cache size of this CachingIndexedFastaSequenceFile */
-    private final long cacheSize;
-
-    /** When we have a cache miss at position X, we load sequence from X - cacheMissBackup */
-    private final long cacheMissBackup;
-
-    /**
-     * If true, we will preserve the case of the original base in the genome
-     */
-    private final boolean preserveCase;
-
-    /**
-     * If true, we will preserve the IUPAC bases in the genome
-     */
-    private final boolean preserveIUPAC;
-
-    // information about checking efficiency
-    long cacheHits = 0;
-    long cacheMisses = 0;
-
-    /** Represents a specific cached sequence, with a specific start and stop, as well as the bases */
-    private static class Cache {
-        long start = -1, stop = -1;
-        ReferenceSequence seq = null;
-    }
-
-    /**
-     * Thread local cache to allow multi-threaded use of this class
-     */
-    private ThreadLocal<Cache> cache;
-    {
-        cache = new ThreadLocal<Cache> () {
-            @Override protected Cache initialValue() {
-                return new Cache();
-            }
-        };
-    }
-
-    /**
-     * Same as general constructor but allows one to override the default cacheSize
-     *
-     * @param fasta the file we will read our FASTA sequence from.
-     * @param index the index of the fasta file, used for efficient random access
-     * @param cacheSize the size in bp of the cache we will use for this reader
-     * @param preserveCase If true, we will keep the case of the underlying bases in the FASTA, otherwise everything is converted to upper case
-     * @param preserveIUPAC If true, we will keep the IUPAC bases in the FASTA, otherwise they are converted to Ns
-     */
-    public CachingIndexedFastaSequenceFile(final File fasta, final FastaSequenceIndex index, final long cacheSize, final boolean preserveCase, final boolean preserveIUPAC) {
-        super(fasta, index);
-        if ( cacheSize < 0 ) throw new IllegalArgumentException("cacheSize must be > 0");
-        this.cacheSize = cacheSize;
-        this.cacheMissBackup = Math.max(cacheSize / 1000, 1);
-        this.preserveCase = preserveCase;
-        this.preserveIUPAC = preserveIUPAC;
-    }
-
-    /**
-     * Open the given indexed fasta sequence file.  Throw an exception if the file cannot be opened.
-     *
-     * Looks for a index file for fasta on disk
-     * Uses provided cacheSize instead of the default
-     *
-     * @param fasta The file to open.
-     * @param cacheSize the size of the cache to use in this CachingIndexedFastaReader, must be >= 0
-     * @param preserveCase If true, we will keep the case of the underlying bases in the FASTA, otherwise everything is converted to upper case
-     */
-    public CachingIndexedFastaSequenceFile(final File fasta, final long cacheSize, final boolean preserveCase, final boolean  preserveIUPAC) throws FileNotFoundException {
-        super(fasta);
-        if ( cacheSize < 0 ) throw new IllegalArgumentException("cacheSize must be > 0");
-        this.cacheSize = cacheSize;
-        this.cacheMissBackup = Math.max(cacheSize / 1000, 1);
-        this.preserveCase = preserveCase;
-        this.preserveIUPAC = preserveIUPAC;
-    }
-
-    /**
-     * Same as general constructor but allows one to override the default cacheSize
-     *
-     * By default, this CachingIndexedFastaReader converts all incoming bases to upper case
-     *
-     * @param fasta the file we will read our FASTA sequence from.
-     * @param index the index of the fasta file, used for efficient random access
-     * @param cacheSize the size in bp of the cache we will use for this reader
-     */
-    public CachingIndexedFastaSequenceFile(final File fasta, final FastaSequenceIndex index, final long cacheSize) {
-        this(fasta, index, cacheSize, false, false);
-    }
-
-    /**
-     * Open the given indexed fasta sequence file.  Throw an exception if the file cannot be opened.
-     *
-     * Looks for a index file for fasta on disk.
-     * This CachingIndexedFastaReader will convert all FASTA bases to upper cases under the hood
-     *
-     * @param fasta The file to open.
-     */
-    public CachingIndexedFastaSequenceFile(final File fasta) throws FileNotFoundException {
-        this(fasta, false);
-    }
-
-    /**
-     * Open the given indexed fasta sequence file.  Throw an exception if the file cannot be opened.
-     *
-     * Looks for a index file for fasta on disk
-     *
-     * @param fasta The file to open.
-     * @param preserveCase If true, we will keep the case of the underlying bases in the FASTA, otherwise everything is converted to upper case
-     */
-    public CachingIndexedFastaSequenceFile(final File fasta, final boolean preserveCase) throws FileNotFoundException {
-        this(fasta, DEFAULT_CACHE_SIZE, preserveCase, false);
-    }
-
-    /**
-     * Open the given indexed fasta sequence file.  Throw an exception if the file cannot be opened.
-     *
-     * Looks for a index file for fasta on disk
-     * Uses provided cacheSize instead of the default
-     *
-     * @param fasta The file to open.
-     * @param cacheSize the size of the cache to use in this CachingIndexedFastaReader, must be >= 0
-     */
-    public CachingIndexedFastaSequenceFile(final File fasta, final long cacheSize ) throws FileNotFoundException {
-        this(fasta, cacheSize, false, false);
-    }
-
-    /**
-     * Print the efficiency (hits / queries) to logger with priority
-     */
-    public void printEfficiency(final Priority priority) {
-        logger.log(priority, String.format("### CachingIndexedFastaReader: hits=%d misses=%d efficiency %.6f%%", cacheHits, cacheMisses, calcEfficiency()));
-    }
-
-    /**
-     * Returns the efficiency (% of hits of all queries) of this object
-     * @return
-     */
-    public double calcEfficiency() {
-        return 100.0 * cacheHits / (cacheMisses + cacheHits * 1.0);
-    }
-
-    /**
-     * @return the number of cache hits that have occurred
-     */
-    public long getCacheHits() {
-        return cacheHits;
-    }
-
-    /**
-     * @return the number of cache misses that have occurred
-     */
-    public long getCacheMisses() {
-        return cacheMisses;
-    }
-
-    /**
-     * @return the size of the cache we are using
-     */
-    public long getCacheSize() {
-        return cacheSize;
-    }
-
-    /**
-     * Is this CachingIndexedFastaReader keeping the original case of bases in the fasta, or is
-     * everything being made upper case?
-     *
-     * @return true if the bases coming from this reader are in the original case in the fasta, false if they are all upper cased
-     */
-    public boolean isPreservingCase() {
-        return preserveCase;
-    }
-
-    /**
-     * Is uppercasing bases?
-     *
-     * @return true if bases coming from this CachingIndexedFastaSequenceFile are all upper cased, false if this reader are in the original case in the fasta
-     */
-    public boolean isUppercasingBases() {
-        return ! isPreservingCase();
-    }
-
-    /**
-     * Is this CachingIndexedFastaReader keeping the IUPAC bases in the fasta, or is it turning them into Ns?
-     *
-     * @return true if the IUPAC bases coming from this reader are not modified
-     */
-    public boolean isPreservingIUPAC() {
-        return preserveIUPAC;
-    }
-
-    /**
-     * Gets the subsequence of the contig in the range [start,stop]
-     *
-     * Uses the sequence cache if possible, or updates the cache to handle the request.  If the range
-     * is larger than the cache itself, just loads the sequence directly, not changing the cache at all
-     *
-     * @param contig Contig whose subsequence to retrieve.
-     * @param start inclusive, 1-based start of region.
-     * @param stop inclusive, 1-based stop of region.
-     * @return The partial reference sequence associated with this range.  If preserveCase is false, then
-     *         all of the bases in the ReferenceSequence returned by this method will be upper cased.
-     */
-    @Override
-    public ReferenceSequence getSubsequenceAt( final String contig, long start, final long stop ) {
-        final ReferenceSequence result;
-        final Cache myCache = cache.get();
-
-        if ( (stop - start) >= cacheSize ) {
-            cacheMisses++;
-            result = super.getSubsequenceAt(contig, start, stop);
-            if ( ! preserveCase ) StringUtil.toUpperCase(result.getBases());
-            if ( ! preserveIUPAC ) BaseUtils.convertIUPACtoN(result.getBases(), true, start < 1);
-        } else {
-            // todo -- potential optimization is to check if contig.name == contig, as this in general will be true
-            SAMSequenceRecord contigInfo = super.getSequenceDictionary().getSequence(contig);
-
-            if (stop > contigInfo.getSequenceLength())
-                throw new PicardException("Query asks for data past end of contig");
-
-            if ( start < myCache.start || stop > myCache.stop || myCache.seq == null || myCache.seq.getContigIndex() != contigInfo.getSequenceIndex() ) {
-                cacheMisses++;
-                myCache.start = Math.max(start - cacheMissBackup, 0);
-                myCache.stop  = Math.min(start + cacheSize + cacheMissBackup, contigInfo.getSequenceLength());
-                myCache.seq   = super.getSubsequenceAt(contig, myCache.start, myCache.stop);
-
-                // convert all of the bases in the sequence to upper case if we aren't preserving cases
-                if ( ! preserveCase ) StringUtil.toUpperCase(myCache.seq.getBases());
-                if ( ! preserveIUPAC ) BaseUtils.convertIUPACtoN(myCache.seq.getBases(), true, myCache.start == 0);
-            } else {
-                cacheHits++;
-            }
-
-            // at this point we determine where in the cache we want to extract the requested subsequence
-            final int cacheOffsetStart = (int)(start - myCache.start);
-            final int cacheOffsetStop = (int)(stop - start + cacheOffsetStart + 1);
-
-            try {
-                result = new ReferenceSequence(myCache.seq.getName(), myCache.seq.getContigIndex(), Arrays.copyOfRange(myCache.seq.getBases(), cacheOffsetStart, cacheOffsetStop));
-            } catch ( ArrayIndexOutOfBoundsException e ) {
-                throw new ReviewedGATKException(String.format("BUG: bad array indexing.  Cache start %d and end %d, request start %d end %d, offset start %d and end %d, base size %d",
-                        myCache.start, myCache.stop, start, stop, cacheOffsetStart, cacheOffsetStop, myCache.seq.getBases().length), e);
-            }
-        }
-
-        // for debugging -- print out our efficiency if requested
-        if ( PRINT_EFFICIENCY && (getCacheHits() + getCacheMisses()) % PRINT_FREQUENCY == 0 )
-            printEfficiency(Priority.INFO);
-
-        return result;
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/fasta/package-info.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/fasta/package-info.java
deleted file mode 100644
index ec94dac..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/fasta/package-info.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.fasta;
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/file/FSLockWithShared.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/file/FSLockWithShared.java
deleted file mode 100644
index 934a022..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/file/FSLockWithShared.java
+++ /dev/null
@@ -1,293 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.file;
-
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.RandomAccessFile;
-import java.nio.channels.*;
-import java.util.concurrent.*;
-
-/**
- * a quick implementation of a file based lock, using the Java NIO classes
- */
-public class FSLockWithShared {
-    // connect to the logger
-    private final static Logger logger = Logger.getLogger(FSLockWithShared.class);
-
-    // the file we're attempting to lock
-    private final File file;
-
-    // the file lock
-    private FileLock lock = null;
-
-    // the file channel we open
-    private FileChannel channel = null;
-
-    // Timeout (in milliseconds) before we give up during non-blocking lock-acquisition calls.
-    // Necessary because these "non-blocking" calls can hang if there's a problem with the
-    // OS file locking support.
-    private int lockAcquisitionTimeout;
-
-    // Default value for lockAcquisitionTimeout when none is explicitly provided
-    public static final int DEFAULT_LOCK_ACQUISITION_TIMEOUT_IN_MILLISECONDS = 30 * 1000;
-
-    // Amount of time to wait when trying to shut down the lock-acquisition thread before giving up
-    public static final int THREAD_TERMINATION_TIMEOUT_IN_MILLISECONDS = 30 * 1000;
-
-    /**
-     * Create a lock associated with the specified File. Use the default lock
-     * acquisition timeout of 30 seconds.
-     *
-     * @param file file to lock
-     */
-    public FSLockWithShared( final File file ) {
-        this.file = file;
-        lockAcquisitionTimeout = DEFAULT_LOCK_ACQUISITION_TIMEOUT_IN_MILLISECONDS;
-    }
-
-    /**
-     * Create a lock associated with the specified File, and set a custom lock
-     * acquisition timeout.
-     *
-     * @param file file to lock
-     * @param lockAcquisitionTimeout maximum number of milliseconds to wait during non-blocking
-     *                               lock acquisition calls before concluding that there's a
-     *                               problem with the OS file locking support and throwing an error.
-     */
-    public FSLockWithShared( final File file, final int lockAcquisitionTimeout ) {
-        this.file = file;
-        this.lockAcquisitionTimeout = lockAcquisitionTimeout;
-    }
-
-    /**
-     * Get a shared (read) lock on a file. Does not block, and returns immediately
-     * under normal conditions with the result of the lock acquisition attempt. Will
-     * throw an exception if there's a problem with the OS file locking support.
-     *
-     * @return boolean true if we obtained a lock, false if we failed to obtain one
-     */
-    public boolean sharedLock() {
-        return acquireLockWithTimeout(true);
-    }
-
-    /**
-     * Get an exclusive (read-write) lock on a file. Does not block, and returns immediately
-     * under normal conditions with the result of the lock acquisition attempt. Will
-     * throw an exception if there's a problem with the OS file locking support.
-     *
-     * @return boolean true if we obtained a lock, false if we failed to obtain one
-     */
-    public boolean exclusiveLock() {
-        return acquireLockWithTimeout(false);
-    }
-
-    /**
-     * Attempt to acquire a lock of the specified type on the file in a background thread.
-     * Uses non-blocking lock-acquisition calls that should return immediately, but may
-     * get stuck if there's a problem with the OS file locking support. If the call gets
-     * stuck and the timeout elapses, throws a UserException, since it's not safe to
-     * proceed with a stuck lock acquisition thread (and there's no way to reliably
-     * interrupt it once the underlying system call hangs).
-     *
-     * @param acquireSharedLock if true, request a shared lock rather than an exclusive lock
-     * @return true if a lock was acquired, false if we failed
-     */
-    private boolean acquireLockWithTimeout( final boolean acquireSharedLock ) {
-        // Use daemon threads so that hopelessly stuck lock acquisition threads won't prevent the JVM from exiting
-        final ExecutorService executor = Executors.newSingleThreadExecutor(new ThreadFactory() {
-                                                                               public Thread newThread( Runnable r ) {
-                                                                                   Thread lockAcquisitionThread = new Thread(r);
-                                                                                   lockAcquisitionThread.setDaemon(true);
-                                                                                   return lockAcquisitionThread;
-                                                                               }
-                                                                           });
-        final FutureTask<Boolean> lockAcquisitionTask = new FutureTask<Boolean>(new LockAcquisitionTask(acquireSharedLock));
-        boolean lockAcquired = false;
-
-        try {
-            executor.execute(lockAcquisitionTask);
-
-            // Wait at most lockAcquisitionTimeout milliseconds for the lock acquisition task to finish.
-            lockAcquired = lockAcquisitionTask.get(lockAcquisitionTimeout, TimeUnit.MILLISECONDS);
-        }
-        // Lock acquisition timeout elapsed. Since we're using NON-BLOCKING lock-acquisition calls,
-        // this implies that there's a problem with the OS locking daemon, or locks are not supported.
-        // Since it's not safe to proceed with a potentially stuck lock acquisition thread, we need to
-        // shut down the JVM in order to kill it.
-        catch ( TimeoutException e ) {
-            throw new UserException.FileSystemInabilityToLockException(
-                    String.format("Timeout of %d milliseconds was reached while trying to acquire a lock on file %s. " +
-                                  "Since the GATK uses non-blocking lock acquisition calls that are not supposed to wait, " +
-                                  "this implies a problem with the file locking support in your operating system.",
-                                  lockAcquisitionTimeout, file.getAbsolutePath()));
-        }
-        // Lock acquisition thread threw an exception. Need to unpack it via e.getCause()
-        catch ( ExecutionException e ) {
-            logger.warn(String.format("WARNING: Unable to lock file %s because exception %s occurred with error message %s",
-                                      file.getAbsolutePath(),
-                                      e.getCause() != null ? e.getCause().getClass().getSimpleName() : "unknown",
-                                      e.getCause() != null ? e.getCause().getMessage() : "none"));
-            lockAcquired = false;
-        }
-        // Interrupted while waiting for the lock acquisition thread -- not likely to happen
-        catch ( InterruptedException e ) {
-            logger.warn(String.format("WARNING: interrupted while attempting to acquire a lock for file %s", file.getAbsolutePath()));
-            lockAcquired = false;
-        }
-        catch ( Exception e ) {
-            logger.warn(String.format("WARNING: error while attempting to acquire a lock for file %s. Error message: %s",
-                                      file.getAbsolutePath(), e.getMessage()));
-            lockAcquired = false;
-        }
-
-        shutdownLockAcquisitionTask(executor);
-
-        // Upon failure to acquire a lock, we always call unlock() to close the FileChannel if it was opened
-        // and to deal with very hypothetical edge cases where a lock might actually have been acquired despite the
-        // lock acquisition thread returning false.
-        if ( ! lockAcquired ) {
-            unlock();
-        }
-
-        return lockAcquired;
-    }
-
-    /**
-     * Ensures that the lock acquisition task running in the provided executor has cleanly terminated.
-     * Throws a UserException if unable to shut it down within the period defined by the THREAD_TERMINATION_TIMEOUT.
-     *
-     * @param executor ExecutorService executing the lock-acquisition thread
-     */
-    private void shutdownLockAcquisitionTask( final ExecutorService executor ) {
-        boolean shutdownAttemptSucceeded;
-
-        try {
-            executor.shutdownNow();
-            shutdownAttemptSucceeded = executor.awaitTermination(THREAD_TERMINATION_TIMEOUT_IN_MILLISECONDS, TimeUnit.MILLISECONDS);
-        }
-        catch ( InterruptedException e ) {
-            shutdownAttemptSucceeded = false;
-        }
-
-        if ( ! shutdownAttemptSucceeded ) {
-            throw new UserException(String.format("Failed to terminate lock acquisition thread while trying to lock file %s. " +
-                                                  "Exiting because it's not safe to proceed with this run of the GATK.",
-                                                  file.getAbsolutePath()));
-        }
-    }
-
-    /**
-     * Background task that attempts to acquire a lock of the specified type, and returns a boolean
-     * indicating success/failure. Uses a non-blocking tryLock() call that should return immediately
-     * (but may get stuck if there's a problem with the OS locking daemon).
-     */
-    private class LockAcquisitionTask implements Callable<Boolean> {
-        private final boolean acquireSharedLock;
-
-        public LockAcquisitionTask( final boolean acquireSharedLock ) {
-            this.acquireSharedLock = acquireSharedLock;
-        }
-
-        public Boolean call() {
-            // Get a read-only or read-write file channel, depending on the type of lock
-            try {
-                channel = new RandomAccessFile(file, acquireSharedLock ? "r" : "rw").getChannel();
-            }
-            catch ( IOException e ) {
-                logger.warn(String.format("WARNING: Unable to lock file %s because we could not open a file channel", file.getAbsolutePath()));
-                return false;
-            }
-
-            boolean lockAcquired = false;
-
-            try {
-                // Non-blocking lock-acquisition call, should return right away. If it doesn't return immediately
-                // due to problems with the OS locking daemon, it will potentially be timed-out and interrupted.
-                lock = channel.tryLock(0, Long.MAX_VALUE, acquireSharedLock);
-                lockAcquired = lock != null;
-            }
-            catch ( AsynchronousCloseException e ) {
-                logger.warn(String.format("WARNING: Unable to lock file %s because the file channel was closed by another thread", file.getAbsolutePath()));
-                lockAcquired = false;
-            }
-            catch ( ClosedChannelException e ) {
-                logger.warn(String.format("WARNING: Unable to lock file %s because the file channel is closed.", file.getAbsolutePath()));
-                lockAcquired = false;
-            }
-            catch ( OverlappingFileLockException e ) {
-                logger.warn(String.format("WARNING: Unable to lock file %s because you already have a lock on this file.", file.getAbsolutePath()));
-                lockAcquired = false;
-            }
-            catch ( FileLockInterruptionException e ) {
-                logger.warn(String.format("WARNING: Interrupted while attempting to lock file %s", file.getAbsolutePath()));
-                lockAcquired = false;
-            }
-            catch ( IOException e ) {
-                logger.warn(String.format("WARNING: Unable to lock file %s because an IOException occurred with message: %s.", file.getAbsolutePath(), e.getMessage()));
-                lockAcquired = false;
-            }
-
-            return lockAcquired;
-        }
-    }
-
-    /**
-     * Unlock the file
-     *
-     * note: this allows unlocking a file that failed to lock (no required user checks on null locks).
-     */
-    public void unlock() {
-        releaseLock();
-        closeChannel();
-    }
-
-    private void releaseLock() {
-        try {
-            if ( lock != null )
-                lock.release();
-        }
-        catch ( ClosedChannelException e ) {
-            // if the channel was already closed we don't have to worry
-        }
-        catch ( IOException e ) {
-            throw new UserException(String.format("An error occurred while releasing the lock for file %s", file.getAbsolutePath()), e);
-        }
-    }
-
-    private void closeChannel() {
-        try {
-            if ( channel != null )
-                channel.close();
-        }
-        catch ( IOException e ) {
-            throw new UserException(String.format("An error occurred while closing channel for file %s", file.getAbsolutePath()), e);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/fragments/FragmentCollection.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/fragments/FragmentCollection.java
deleted file mode 100644
index 67d55ff..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/fragments/FragmentCollection.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.fragments;
-
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-
-/**
- * Useful helper class to represent the results of the reads -> fragment calculation.
- *
- * Contains singleton -- objects whose underlying reads do not overlap their mate pair
- * Contains overlappingPairs -- objects whose underlying reads do overlap their mate pair
- *
- * User: ebanks, depristo
- * Date: Jan 10, 2011
- */
-public class FragmentCollection<T> {
-    Collection<T> singletons;
-    Collection<List<T>> overlappingPairs;
-
-    public FragmentCollection(final Collection<T> singletons, final Collection<List<T>> overlappingPairs) {
-        this.singletons = singletons == null ? Collections.<T>emptyList() : singletons;
-        this.overlappingPairs = overlappingPairs == null ? Collections.<List<T>>emptyList() : overlappingPairs;
-    }
-
-    /**
-     * Gets the T elements not containing overlapping elements, in no particular order
-     *
-     * @return
-     */
-    public Collection<T> getSingletonReads() {
-        return singletons;
-    }
-
-    /**
-     * Gets the T elements containing overlapping elements, in no particular order
-     *
-     * @return
-     */
-    public Collection<List<T>> getOverlappingPairs() {
-        return overlappingPairs;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/fragments/FragmentUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/fragments/FragmentUtils.java
deleted file mode 100644
index 689fdf6..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/fragments/FragmentUtils.java
+++ /dev/null
@@ -1,377 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.fragments;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import htsjdk.samtools.util.QualityUtil;
-import htsjdk.samtools.Cigar;
-import htsjdk.samtools.CigarElement;
-import htsjdk.samtools.CigarOperator;
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.clipping.ReadClipper;
-import org.broadinstitute.gatk.utils.recalibration.EventType;
-import org.broadinstitute.gatk.utils.collections.Pair;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.pileup.PileupElement;
-import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.broadinstitute.gatk.utils.sam.ReadUtils;
-
-import java.util.*;
-
-/**
- * An easy to access fragment-based pileup, which contains two separate pileups.  The first
- * is a regular collection of PileupElements containing all of the reads in the original RBP
- * that uniquely info about a fragment.  The second are TwoReadPileupElements that, as the
- * name suggests, contain two reads that are sequenced from the same underlying fragment.
- *
- * Based on the original code by E. Banks
- *
- * Oct 21: note that the order of the oneReadPileup and twoReadPileups are not
- * defined.  The algorithms that produce these lists are in fact producing
- * lists of Pileup elements *NOT* sorted by alignment start position of the underlying
- * reads.
- *
- * User: depristo
- * Date: 3/26/11
- * Time: 10:09 PM
- */
-public final class FragmentUtils {
-
-    public final static double DEFAULT_PCR_ERROR_RATE = 1e-4;
-    public final static int DEFAULT_PCR_ERROR_QUAL = QualityUtil.getPhredScoreFromErrorProbability(DEFAULT_PCR_ERROR_RATE);
-    public final static int HALF_OF_DEFAULT_PCR_ERROR_QUAL = DEFAULT_PCR_ERROR_QUAL / 2;
-
-    protected final static byte MIN_QUAL_BAD_OVERLAP = 16;
-    private FragmentUtils() {} // private constructor
-
-    /**
-     * A getter function that takes an Object of type T and returns its associated SAMRecord.
-     *
-     * Allows us to write a generic T -> Fragment algorithm that works with any object containing
-     * a read.
-     *
-     * @param <T> The type of the object that contains a GATKSAMRecord
-     */
-    public interface ReadGetter<T> {
-        /**
-         * Get the GATKSAMRecord associated with object
-         *
-         * @param object the thing that contains the read
-         * @return a non-null GATKSAMRecord read
-         */
-        public GATKSAMRecord get(T object);
-    }
-
-    /**
-     * Identify getter for SAMRecords themselves
-     */
-    private final static ReadGetter<GATKSAMRecord> SamRecordGetter = new ReadGetter<GATKSAMRecord>() {
-        @Override public GATKSAMRecord get(final GATKSAMRecord object) { return object; }
-    };
-
-    /**
-     * Gets the SAMRecord in a PileupElement
-     */
-    private final static ReadGetter<PileupElement> PileupElementGetter = new ReadGetter<PileupElement>() {
-        @Override public GATKSAMRecord get(final PileupElement object) { return object.getRead(); }
-    };
-
-
-    /**
-     * Generic algorithm that takes an iterable over T objects, a getter routine to extract the reads in T,
-     * and returns a FragmentCollection that contains the T objects whose underlying reads either overlap (or
-     * not) with their mate pairs.
-     *
-     * @param readContainingObjects An iterator of objects that contain GATKSAMRecords
-     * @param nElements the number of elements to be provided by the iterator, which is usually known upfront and
-     *                  greatly improves the efficiency of the fragment calculation
-     * @param getter a helper function that takes an object of type T and returns is associated GATKSAMRecord
-     * @param <T>
-     * @return a fragment collection
-     */
-    @Requires({
-            "readContainingObjects != null",
-            "nElements >= 0",
-            "getter != null"
-    })
-    @Ensures("result != null")
-    private static <T> FragmentCollection<T> create(final Iterable<T> readContainingObjects, final int nElements, final ReadGetter<T> getter) {
-        Collection<T> singletons = null;
-        Collection<List<T>> overlapping = null;
-        Map<String, T> nameMap = null;
-
-        int lastStart = -1;
-
-        // build an initial map, grabbing all of the multi-read fragments
-        for ( final T p : readContainingObjects ) {
-            final SAMRecord read = getter.get(p);
-
-            if ( read.getAlignmentStart() < lastStart ) {
-                throw new IllegalArgumentException(String.format(
-                        "FragmentUtils.create assumes that the incoming objects are ordered by " +
-                                "SAMRecord alignment start, but saw a read %s with alignment start " +
-                                "%d before the previous start %d", read.getSAMString(), read.getAlignmentStart(), lastStart));
-            }
-            lastStart = read.getAlignmentStart();
-
-            final int mateStart = read.getMateAlignmentStart();
-            if ( mateStart == 0 || mateStart > read.getAlignmentEnd() ) {
-                // if we know that this read won't overlap its mate, or doesn't have one, jump out early
-                if ( singletons == null ) singletons = new ArrayList<T>(nElements); // lazy init
-                singletons.add(p);
-            } else {
-                // the read might overlap it's mate, or is the rightmost read of a pair
-                final String readName = read.getReadName();
-                final T pe1 = nameMap == null ? null : nameMap.get(readName);
-                if ( pe1 != null ) {
-                    // assumes we have at most 2 reads per fragment
-                    if ( overlapping == null ) overlapping = new ArrayList<List<T>>(); // lazy init
-                    overlapping.add(Arrays.asList(pe1, p));
-                    nameMap.remove(readName);
-                } else {
-                    if ( nameMap == null ) nameMap = new HashMap<String, T>(nElements); // lazy init
-                    nameMap.put(readName, p);
-                }
-            }
-        }
-
-        // add all of the reads that are potentially overlapping but whose mate never showed
-        // up to the oneReadPile
-        if ( nameMap != null && ! nameMap.isEmpty() ) {
-            if ( singletons == null )
-                singletons = nameMap.values();
-            else
-                singletons.addAll(nameMap.values());
-        }
-
-        return new FragmentCollection<T>(singletons, overlapping);
-    }
-
-    /**
-     * Create a FragmentCollection containing PileupElements from the ReadBackedPileup rbp
-     * @param rbp a non-null read-backed pileup.  The elements in this ReadBackedPileup must be ordered
-     * @return a non-null FragmentCollection
-     */
-    @Ensures("result != null")
-    public static FragmentCollection<PileupElement> create(final ReadBackedPileup rbp) {
-        if ( rbp == null ) throw new IllegalArgumentException("Pileup cannot be null");
-        return create(rbp, rbp.getNumberOfElements(), PileupElementGetter);
-    }
-
-    /**
-     * Create a FragmentCollection containing GATKSAMRecords from a list of reads
-     *
-     * @param reads a non-null list of reads, ordered by their start location
-     * @return a non-null FragmentCollection
-     */
-    @Ensures("result != null")
-    public static FragmentCollection<GATKSAMRecord> create(final List<GATKSAMRecord> reads) {
-        if ( reads == null ) throw new IllegalArgumentException("Pileup cannot be null");
-        return create(reads, reads.size(), SamRecordGetter);
-    }
-
-    public static void adjustQualsOfOverlappingPairedFragments( final List<GATKSAMRecord> overlappingPair ) {
-        if( overlappingPair.size() != 2 ) { throw new ReviewedGATKException("Found overlapping pair with " + overlappingPair.size() + " reads, but expecting exactly 2."); }
-
-        final GATKSAMRecord firstRead = overlappingPair.get(0);
-        final GATKSAMRecord secondRead = overlappingPair.get(1);
-
-        if ( secondRead.getSoftStart() < firstRead.getSoftStart() ) {
-            adjustQualsOfOverlappingPairedFragments(secondRead, firstRead);
-        } else {
-            adjustQualsOfOverlappingPairedFragments(firstRead, secondRead);
-        }
-    }
-
-    /**
-     * Fix two overlapping reads from the same fragment by adjusting base qualities, if possible
-     *
-     * firstRead and secondRead must be part of the same fragment (though this isn't checked).  Looks
-     * at the bases and alignment, and tries its best to create adjusted base qualities so that the observations
-     * are not treated independently.
-     *
-     * Assumes that firstRead starts before secondRead (according to their soft clipped starts)
-     *
-     * @param clippedFirstRead the left most read
-     * @param clippedSecondRead the right most read
-     *
-     * @return a strandless merged read of first and second, or null if the algorithm cannot create a meaningful one
-     */
-    public static void adjustQualsOfOverlappingPairedFragments(final GATKSAMRecord clippedFirstRead, final GATKSAMRecord clippedSecondRead) {
-        if ( clippedFirstRead == null ) throw new IllegalArgumentException("clippedFirstRead cannot be null");
-        if ( clippedSecondRead == null ) throw new IllegalArgumentException("clippedSecondRead cannot be null");
-        if ( ! clippedFirstRead.getReadName().equals(clippedSecondRead.getReadName()) ) throw new IllegalArgumentException("attempting to merge two reads with different names " + clippedFirstRead + " and " + clippedSecondRead);
-
-        // don't adjust fragments that do not overlap
-        if ( clippedFirstRead.getAlignmentEnd() < clippedSecondRead.getAlignmentStart() || !clippedFirstRead.getReferenceIndex().equals(clippedSecondRead.getReferenceIndex()) )
-            return;
-
-        final Pair<Integer, Boolean> pair = ReadUtils.getReadCoordinateForReferenceCoordinate(clippedFirstRead, clippedSecondRead.getAlignmentStart());
-        final int firstReadStop = ( pair.getSecond() ? pair.getFirst() + 1 : pair.getFirst() );
-        final int numOverlappingBases = Math.min(clippedFirstRead.getReadLength() - firstReadStop, clippedSecondRead.getReadLength());
-
-        final byte[] firstReadBases = clippedFirstRead.getReadBases();
-        final byte[] firstReadQuals = clippedFirstRead.getBaseQualities();
-        final byte[] secondReadBases = clippedSecondRead.getReadBases();
-        final byte[] secondReadQuals = clippedSecondRead.getBaseQualities();
-
-        for ( int i = 0; i < numOverlappingBases; i++ ) {
-            final int firstReadIndex = firstReadStop + i;
-            final byte firstReadBase = firstReadBases[firstReadIndex];
-            final byte secondReadBase = secondReadBases[i];
-
-            if ( firstReadBase == secondReadBase ) {
-                firstReadQuals[firstReadIndex] = (byte) Math.min(firstReadQuals[firstReadIndex], HALF_OF_DEFAULT_PCR_ERROR_QUAL);
-                secondReadQuals[i] = (byte) Math.min(secondReadQuals[i], HALF_OF_DEFAULT_PCR_ERROR_QUAL);
-            } else {
-                // TODO -- use the proper statistical treatment of the quals from DiploidSNPGenotypeLikelihoods.java
-                firstReadQuals[firstReadIndex] = 0;
-                secondReadQuals[i] = 0;
-            }
-        }
-
-        clippedFirstRead.setBaseQualities(firstReadQuals);
-        clippedSecondRead.setBaseQualities(secondReadQuals);
-    }
-
-    public static List<GATKSAMRecord> mergeOverlappingPairedFragments( final List<GATKSAMRecord> overlappingPair ) {
-        if( overlappingPair.size() != 2 ) { throw new ReviewedGATKException("Found overlapping pair with " + overlappingPair.size() + " reads, but expecting exactly 2."); }
-
-        final GATKSAMRecord firstRead = overlappingPair.get(0);
-        final GATKSAMRecord secondRead = overlappingPair.get(1);
-
-        final GATKSAMRecord merged;
-        if( !(secondRead.getSoftStart() <= firstRead.getSoftEnd() && secondRead.getSoftStart() >= firstRead.getSoftStart() && secondRead.getSoftEnd() >= firstRead.getSoftEnd()) ) {
-            merged = mergeOverlappingPairedFragments(secondRead, firstRead);
-        } else {
-            merged = mergeOverlappingPairedFragments(firstRead, secondRead);
-        }
-
-        return merged == null ? overlappingPair : Collections.singletonList(merged);
-    }
-
-    /**
-     * Merge two overlapping reads from the same fragment into a single super read, if possible
-     *
-     * firstRead and secondRead must be part of the same fragment (though this isn't checked).  Looks
-     * at the bases and alignment, and tries its best to create a meaningful synthetic single super read
-     * that represents the entire sequenced fragment.
-     *
-     * Assumes that firstRead starts before secondRead (according to their soft clipped starts)
-     *
-     * @param unclippedFirstRead the left most read
-     * @param unclippedSecondRead the right most read
-     *
-     * @return a strandless merged read of first and second, or null if the algorithm cannot create a meaningful one
-     */
-    public static GATKSAMRecord mergeOverlappingPairedFragments(final GATKSAMRecord unclippedFirstRead, final GATKSAMRecord unclippedSecondRead) {
-        if ( unclippedFirstRead == null ) throw new IllegalArgumentException("unclippedFirstRead cannot be null");
-        if ( unclippedSecondRead == null ) throw new IllegalArgumentException("unclippedSecondRead cannot be null");
-        if ( ! unclippedFirstRead.getReadName().equals(unclippedSecondRead.getReadName()) ) throw new IllegalArgumentException("attempting to merge two reads with different names " + unclippedFirstRead + " and " + unclippedSecondRead);
-
-        if( unclippedFirstRead.getCigarString().contains("I") || unclippedFirstRead.getCigarString().contains("D") || unclippedSecondRead.getCigarString().contains("I") || unclippedSecondRead.getCigarString().contains("D") ) {
-            return null; // fragments contain indels so don't merge them
-        }
-
-        final GATKSAMRecord firstRead = ReadClipper.hardClipAdaptorSequence(ReadClipper.revertSoftClippedBases(unclippedFirstRead));
-        final GATKSAMRecord secondRead = ReadClipper.hardClipAdaptorSequence(ReadClipper.revertSoftClippedBases(unclippedSecondRead));
-
-        if( !(secondRead.getSoftStart() <= firstRead.getSoftEnd() && secondRead.getSoftStart() >= firstRead.getSoftStart() && secondRead.getSoftEnd() >= firstRead.getSoftEnd()) ) {
-            return null; // can't merge them, yet:  AAAAAAAAAAA-BBBBBBBBBBB-AAAAAAAAAAAAAA, B is contained entirely inside A
-        }
-
-        final Pair<Integer, Boolean> pair = ReadUtils.getReadCoordinateForReferenceCoordinate(firstRead, secondRead.getAlignmentStart());
-
-        final int firstReadStop = ( pair.getSecond() ? pair.getFirst() + 1 : pair.getFirst() );
-        final int numBases = firstReadStop + secondRead.getReadLength();
-        final byte[] bases = new byte[numBases];
-        final byte[] quals = new byte[numBases];
-        final byte[] insertionQuals = new byte[numBases];
-        final byte[] deletionQuals = new byte[numBases];
-        final byte[] firstReadBases = firstRead.getReadBases();
-        final byte[] firstReadQuals = firstRead.getBaseQualities();
-        final byte[] secondReadBases = secondRead.getReadBases();
-        final byte[] secondReadQuals = secondRead.getBaseQualities();
-
-        for(int iii = 0; iii < firstReadStop; iii++) {
-            bases[iii] = firstReadBases[iii];
-            quals[iii] = firstReadQuals[iii];
-        }
-        for(int iii = firstReadStop; iii < firstRead.getReadLength(); iii++) {
-            if( firstReadQuals[iii] > MIN_QUAL_BAD_OVERLAP && secondReadQuals[iii-firstReadStop] > MIN_QUAL_BAD_OVERLAP && firstReadBases[iii] != secondReadBases[iii-firstReadStop] ) {
-                return null; // high qual bases don't match exactly, probably indel in only one of the fragments, so don't merge them
-            }
-            if( firstReadQuals[iii] < MIN_QUAL_BAD_OVERLAP && secondReadQuals[iii-firstReadStop] < MIN_QUAL_BAD_OVERLAP ) {
-                return null; // both reads have low qual bases in the overlap region so don't merge them because don't know what is going on
-            }
-            bases[iii] = ( firstReadQuals[iii] > secondReadQuals[iii-firstReadStop] ? firstReadBases[iii] : secondReadBases[iii-firstReadStop] );
-            quals[iii] = ( firstReadQuals[iii] > secondReadQuals[iii-firstReadStop] ? firstReadQuals[iii] : secondReadQuals[iii-firstReadStop] );
-        }
-        for(int iii = firstRead.getReadLength(); iii < numBases; iii++) {
-            bases[iii] = secondReadBases[iii-firstReadStop];
-            quals[iii] = secondReadQuals[iii-firstReadStop];
-        }
-
-        final GATKSAMRecord returnRead = new GATKSAMRecord( firstRead.getHeader() );
-        returnRead.setIsStrandless(true);
-        returnRead.setAlignmentStart( firstRead.getAlignmentStart() );
-        returnRead.setReadBases( bases );
-        returnRead.setBaseQualities( quals );
-        returnRead.setReadGroup( firstRead.getReadGroup() );
-        returnRead.setReferenceName( firstRead.getReferenceName() );
-        returnRead.setReadName( firstRead.getReadName() );
-        final CigarElement c = new CigarElement(bases.length, CigarOperator.M);
-        final ArrayList<CigarElement> cList = new ArrayList<CigarElement>();
-        cList.add(c);
-        returnRead.setCigar( new Cigar( cList ));
-        returnRead.setMappingQuality( firstRead.getMappingQuality() );
-
-        if( firstRead.hasBaseIndelQualities() || secondRead.hasBaseIndelQualities() ) {
-            final byte[] firstReadInsertionQuals = firstRead.getBaseInsertionQualities();
-            final byte[] firstReadDeletionQuals = firstRead.getBaseDeletionQualities();
-            final byte[] secondReadInsertionQuals = secondRead.getBaseInsertionQualities();
-            final byte[] secondReadDeletionQuals = secondRead.getBaseDeletionQualities();
-            for(int iii = 0; iii < firstReadStop; iii++) {
-                insertionQuals[iii] = firstReadInsertionQuals[iii];
-                deletionQuals[iii] = firstReadDeletionQuals[iii];
-            }
-            for(int iii = firstReadStop; iii < firstRead.getReadLength(); iii++) {
-                insertionQuals[iii] = ( firstReadQuals[iii] > secondReadQuals[iii-firstReadStop] ? firstReadInsertionQuals[iii] : secondReadInsertionQuals[iii-firstReadStop] ); // Purposefully checking the highest *base* quality score
-                deletionQuals[iii] = ( firstReadQuals[iii] > secondReadQuals[iii-firstReadStop] ? firstReadDeletionQuals[iii] : secondReadDeletionQuals[iii-firstReadStop] ); // Purposefully checking the highest *base* quality score
-            }
-            for(int iii = firstRead.getReadLength(); iii < numBases; iii++) {
-                insertionQuals[iii] = secondReadInsertionQuals[iii-firstReadStop];
-                deletionQuals[iii] = secondReadDeletionQuals[iii-firstReadStop];
-            }
-            returnRead.setBaseQualities( insertionQuals, EventType.BASE_INSERTION );
-            returnRead.setBaseQualities( deletionQuals, EventType.BASE_DELETION );
-        }
-
-        return returnRead;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/genotyper/DiploidGenotype.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/genotyper/DiploidGenotype.java
deleted file mode 100644
index f836f44..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/genotyper/DiploidGenotype.java
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.genotyper;
-
-import org.broadinstitute.gatk.utils.BaseUtils;
-
-public enum DiploidGenotype {
-    AA ('A', 'A'),
-    AC ('A', 'C'),
-    CC ('C', 'C'),
-    AG ('A', 'G'),
-    CG ('C', 'G'),
-    GG ('G', 'G'),
-    AT ('A', 'T'),
-    CT ('C', 'T'),
-    GT ('G', 'T'),
-    TT ('T', 'T');
-
-    public byte base1, base2;
-
-    @Deprecated
-    private DiploidGenotype(char base1, char base2) {
-        this((byte)base1, (byte)base2);
-    }
-
-    private DiploidGenotype(byte base1, byte base2) {
-        this.base1 = base1;
-        this.base2 = base2;
-    }
-
-    public boolean isHomRef(byte r) {
-        return isHom() && r == base1;
-    }
-
-    public boolean isHomVar(byte r) {
-        return isHom() && r != base1;
-    }
-
-    public boolean isHetRef(byte r) {
-        if ( base1 == r )
-            return r != base2;
-        else
-            return base2 == r;
-    }
-
-    public boolean isHom() {
-        return ! isHet();
-    }
-
-    public boolean isHet() {
-        return base1 != base2;
-    }
-
-    /**
-     * create a diploid genotype, given a character to make into a hom genotype
-     * @param hom the character to turn into a hom genotype, i.e. if it is A, then returned will be AA
-     * @return the diploid genotype
-     */
-    public static DiploidGenotype createHomGenotype(byte hom) {
-        int index = BaseUtils.simpleBaseToBaseIndex(hom);
-        if ( index == -1 )
-            throw new IllegalArgumentException(hom + " is not a valid base character");
-        return conversionMatrix[index][index];
-    }
-
-    /**
-     * create a diploid genotype, given 2 chars which may not necessarily be ordered correctly
-     * @param base1 base1
-     * @param base2 base2
-     * @return the diploid genotype
-     */
-    public static DiploidGenotype createDiploidGenotype(byte base1, byte base2) {
-        int index1 = BaseUtils.simpleBaseToBaseIndex(base1);
-        if ( index1 == -1 )
-            throw new IllegalArgumentException(base1 + " is not a valid base character");
-        int index2 = BaseUtils.simpleBaseToBaseIndex(base2);
-        if ( index2 == -1 )
-            throw new IllegalArgumentException(base2 + " is not a valid base character");
-        return conversionMatrix[index1][index2];
-    }
-
-    /**
-     * create a diploid genotype, given 2 base indexes which may not necessarily be ordered correctly
-     * @param baseIndex1 base1
-     * @param baseIndex2 base2
-     * @return the diploid genotype
-     */
-    public static DiploidGenotype createDiploidGenotype(int baseIndex1, int baseIndex2) {
-        if ( baseIndex1 == -1 )
-            throw new IllegalArgumentException(baseIndex1 + " does not represent a valid base character");
-        if ( baseIndex2 == -1 )
-            throw new IllegalArgumentException(baseIndex2 + " does not represent a valid base character");
-        return conversionMatrix[baseIndex1][baseIndex2];
-    }
-
-    private static final DiploidGenotype[][] conversionMatrix = {
-            { DiploidGenotype.AA, DiploidGenotype.AC, DiploidGenotype.AG, DiploidGenotype.AT },
-            { DiploidGenotype.AC, DiploidGenotype.CC, DiploidGenotype.CG, DiploidGenotype.CT },
-            { DiploidGenotype.AG, DiploidGenotype.CG, DiploidGenotype.GG, DiploidGenotype.GT },
-            { DiploidGenotype.AT, DiploidGenotype.CT, DiploidGenotype.GT, DiploidGenotype.TT }
-    };
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/genotyper/MostLikelyAllele.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/genotyper/MostLikelyAllele.java
deleted file mode 100644
index 65c1fd0..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/genotyper/MostLikelyAllele.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.genotyper;
-
-import org.broadinstitute.gatk.utils.MathUtils;
-import htsjdk.variant.variantcontext.Allele;
-
-/**
- * Stores the most likely and second most likely alleles, along with a threshold
- * for assuming computing that a read is informative.
- *
- * If the difference between the most-likely allele and the next-most-likely allele is < INFORMATIVE_LIKELIHOOD_THRESHOLD
- * then the most likely allele is set to "no call", and isInformative will return false.  This constant can be
- * overridden simply by using one of the version of these calls that accepts informative threshold as an argument.
- *
- * For convenience, there are functions called getAlleleIfInformative that return either the most likely allele, or
- * NO_CALL if two or more alleles have likelihoods within INFORMATIVE_LIKELIHOOD_THRESHOLD of one another.
- *
- * By default empty allele maps will return NO_CALL, and allele maps with a single entry will return the
- * corresponding key
- *
- * User: depristo
- * Date: 3/24/13
- * Time: 1:39 PM
- */
-public final class MostLikelyAllele {
-    public static final double INFORMATIVE_LIKELIHOOD_THRESHOLD = 0.2;
-
-    final Allele mostLikely;
-    final Allele secondLikely;
-    final double log10LikelihoodOfMostLikely;
-    final double log10LikelihoodOfSecondBest;
-
-    /**
-     * Create a new MostLikelyAllele
-     *
-     * If there's a meaningful most likely allele, allele should be a real allele.  If none can be determined,
-     * mostLikely should be a NO_CALL allele.
-     *
-     * @param mostLikely the most likely allele
-     * @param secondMostLikely the most likely allele after mostLikely
-     * @param log10LikelihoodOfMostLikely the log10 likelihood of the most likely allele
-     * @param log10LikelihoodOfSecondBest the log10 likelihood of the next most likely allele (should be NEGATIVE_INFINITY if none is available)
-     */
-    public MostLikelyAllele(final Allele mostLikely, final Allele secondMostLikely, double log10LikelihoodOfMostLikely, double log10LikelihoodOfSecondBest) {
-        if ( mostLikely == null ) throw new IllegalArgumentException("mostLikely allele cannot be null");
-        if ( log10LikelihoodOfMostLikely != Double.NEGATIVE_INFINITY && ! MathUtils.goodLog10Probability(log10LikelihoodOfMostLikely) )
-            throw new IllegalArgumentException("log10LikelihoodOfMostLikely must be either -Infinity or a good log10 prob but got " + log10LikelihoodOfMostLikely);
-        if ( log10LikelihoodOfSecondBest != Double.NEGATIVE_INFINITY && ! MathUtils.goodLog10Probability(log10LikelihoodOfSecondBest) )
-            throw new IllegalArgumentException("log10LikelihoodOfSecondBest must be either -Infinity or a good log10 prob but got " + log10LikelihoodOfSecondBest);
-        if ( log10LikelihoodOfMostLikely < log10LikelihoodOfSecondBest )
-            throw new IllegalArgumentException("log10LikelihoodOfMostLikely must be <= log10LikelihoodOfSecondBest but got " + log10LikelihoodOfMostLikely + " vs 2nd " + log10LikelihoodOfSecondBest);
-
-        this.mostLikely = mostLikely;
-        this.secondLikely = secondMostLikely;
-        this.log10LikelihoodOfMostLikely = log10LikelihoodOfMostLikely;
-        this.log10LikelihoodOfSecondBest = log10LikelihoodOfSecondBest;
-    }
-
-    public Allele getMostLikelyAllele() {
-        return mostLikely;
-    }
-
-    public Allele getSecondMostLikelyAllele() {
-        return secondLikely;
-    }
-
-    public double getLog10LikelihoodOfMostLikely() {
-        return log10LikelihoodOfMostLikely;
-    }
-
-    public double getLog10LikelihoodOfSecondBest() {
-        return log10LikelihoodOfSecondBest;
-    }
-
-    /**
-     * @see #isInformative(double) with threshold of INFORMATIVE_LIKELIHOOD_THRESHOLD
-     */
-    public boolean isInformative() {
-        return isInformative(INFORMATIVE_LIKELIHOOD_THRESHOLD);
-    }
-
-    /**
-     * Was this allele selected from an object that was specifically informative about the allele?
-     *
-     * The calculation that implements this is whether the likelihood of the most likely allele is larger
-     * than the second most likely by at least the log10ThresholdForInformative
-     *
-     * @return true if so, false if not
-     */
-    public boolean isInformative(final double log10ThresholdForInformative) {
-        return getLog10LikelihoodOfMostLikely() - getLog10LikelihoodOfSecondBest() > log10ThresholdForInformative;
-    }
-
-    /**
-     * @see #getAlleleIfInformative(double) with threshold of INFORMATIVE_LIKELIHOOD_THRESHOLD
-     */
-    public Allele getAlleleIfInformative() {
-        return getAlleleIfInformative(INFORMATIVE_LIKELIHOOD_THRESHOLD);
-    }
-
-    /**
-     * Get the most likely allele if isInformative(log10ThresholdForInformative) is true, or NO_CALL otherwise
-     *
-     * @param log10ThresholdForInformative a log10 threshold to determine if the most likely allele was informative
-     * @return a non-null allele
-     */
-    public Allele getAlleleIfInformative(final double log10ThresholdForInformative) {
-        return isInformative(log10ThresholdForInformative) ? getMostLikelyAllele() : Allele.NO_CALL;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/genotyper/PerReadAlleleLikelihoodMap.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/genotyper/PerReadAlleleLikelihoodMap.java
deleted file mode 100644
index 1dd8a8a..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/genotyper/PerReadAlleleLikelihoodMap.java
+++ /dev/null
@@ -1,413 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.genotyper;
-
-
-import com.google.java.contract.Ensures;
-import org.broadinstitute.gatk.engine.downsampling.AlleleBiasedDownsamplingUtils;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.MathUtils;
-import org.broadinstitute.gatk.utils.haplotype.Haplotype;
-import org.broadinstitute.gatk.utils.pileup.PileupElement;
-import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
-import org.broadinstitute.gatk.utils.sam.AlignmentUtils;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import htsjdk.variant.variantcontext.Allele;
-
-import java.util.*;
-
-/**
- *   Wrapper class that holds a set of maps of the form (Read -> Map(Allele->Double))
- *   For each read, this holds underlying alleles represented by an aligned read, and corresponding relative likelihood.
- */
-public class PerReadAlleleLikelihoodMap {
-    /** A set of all of the allele, so we can efficiently determine if an allele is already present */
-    private final Map<Allele,Integer> allelesSet = new HashMap<>();
-    /** A list of the unique allele, as an ArrayList so we can call get(i) efficiently */
-    protected final List<Allele> alleles = new ArrayList<>();
-
-
-
-    protected final Map<GATKSAMRecord, Map<Allele, Double>> likelihoodReadMap = new LinkedHashMap<>();
-
-    public PerReadAlleleLikelihoodMap() { }
-
-    /**
-     * Add a new entry into the Read -> ( Allele -> Likelihood ) map of maps.
-     * @param read - the GATKSAMRecord that was evaluated
-     * @param a - the Allele against which the GATKSAMRecord was evaluated
-     * @param likelihood - the likelihood score resulting from the evaluation of "read" against "a"
-     */
-    public void add(final GATKSAMRecord read, final Allele a, final Double likelihood) {
-        if ( read == null ) throw new IllegalArgumentException("Cannot add a null read to the allele likelihood map");
-        if ( a == null ) throw new IllegalArgumentException("Cannot add a null allele to the allele likelihood map");
-        if ( likelihood == null ) throw new IllegalArgumentException("Likelihood cannot be null");
-        if ( likelihood > 0.0 ) throw new IllegalArgumentException("Likelihood must be negative (L = log(p))");
-
-        if (!allelesSet.containsKey(a)) {
-            allelesSet.put(a,alleles.size());
-            alleles.add(a);
-        }
-        Map<Allele,Double> likelihoodMap = likelihoodReadMap.get(read);
-        if (likelihoodMap == null){
-            // LinkedHashMap will ensure iterating through alleles will be in consistent order
-            likelihoodMap = new LinkedHashMap<>();
-            likelihoodReadMap.put(read,likelihoodMap);
-        }
-
-        likelihoodMap.put(a,likelihood);
-
-
-    }
-
-    public ReadBackedPileup createPerAlleleDownsampledBasePileup(final ReadBackedPileup pileup, final double downsamplingFraction) {
-        return AlleleBiasedDownsamplingUtils.createAlleleBiasedBasePileup(pileup, downsamplingFraction);
-    }
-
-    /**
-     * For each allele "a" , identify those reads whose most likely allele is "a", and remove a "downsamplingFraction" proportion
-     * of those reads from the "likelihoodReadMap". This is used for e.g. sample contamination
-     * @param downsamplingFraction - the fraction of supporting reads to remove from each allele. If <=0 all reads kept, if >=1 all reads tossed.
-     */
-    public void performPerAlleleDownsampling(final double downsamplingFraction) {
-        // special case removal of all or no reads
-        if ( downsamplingFraction <= 0.0 )
-            return;
-        if ( downsamplingFraction >= 1.0 ) {
-            likelihoodReadMap.clear();
-            return;
-        }
-
-        // start by stratifying the reads by the alleles they represent at this position
-        final Map<Allele, List<GATKSAMRecord>> alleleReadMap = getAlleleStratifiedReadMap();
-
-        // compute the reads to remove and actually remove them
-        final List<GATKSAMRecord> readsToRemove = AlleleBiasedDownsamplingUtils.selectAlleleBiasedReads(alleleReadMap, downsamplingFraction);
-        for ( final GATKSAMRecord read : readsToRemove )
-            likelihoodReadMap.remove(read);
-    }
-
-    /**
-     * Convert the @likelihoodReadMap to a map of alleles to reads, where each read is mapped uniquely to the allele
-     * for which it has the greatest associated likelihood
-     * @return a map from each allele to a list of reads that 'support' the allele
-     */
-    protected Map<Allele,List<GATKSAMRecord>> getAlleleStratifiedReadMap() {
-        final Map<Allele, List<GATKSAMRecord>> alleleReadMap = new HashMap<>(alleles.size());
-        for ( final Allele allele : alleles )
-            alleleReadMap.put(allele, new ArrayList<GATKSAMRecord>());
-
-        for ( final Map.Entry<GATKSAMRecord, Map<Allele, Double>> entry : likelihoodReadMap.entrySet() ) {
-            final MostLikelyAllele bestAllele = getMostLikelyAllele(entry.getValue());
-            if ( bestAllele.isInformative() )
-                alleleReadMap.get(bestAllele.getMostLikelyAllele()).add(entry.getKey());
-        }
-
-        return alleleReadMap;
-    }
-
-    @Ensures("result >=0")
-    public int size() {
-        return likelihoodReadMap.size();
-    }
-
-    /**
-     * Helper function to add the read underneath a pileup element to the map
-     * @param p                              Pileup element
-     * @param a                              Corresponding allele
-     * @param likelihood                     Allele likelihood
-     */
-    public void add(PileupElement p, Allele a, Double likelihood) {
-        if (p==null)
-            throw new IllegalArgumentException("Pileup element cannot be null");
-        if ( p.getRead()==null )
-           throw new IllegalArgumentException("Read underlying pileup element cannot be null");
-        if ( a == null )
-           throw new IllegalArgumentException("Allele for add() cannot be null");
-
-        add(p.getRead(), a, likelihood);
-    }
-
-     /**
-     * Does the current map contain the key associated with a particular SAM record in pileup?
-     * @param p                 Pileup element
-     * @return true if the map contains pileup element, else false
-     */
-    public boolean containsPileupElement(final PileupElement p) {
-        return likelihoodReadMap.containsKey(p.getRead());
-    }
-
-    public boolean isEmpty() {
-        return likelihoodReadMap.isEmpty();
-    }
-
-    public Map<GATKSAMRecord,Map<Allele,Double>> getLikelihoodReadMap() {
-        return likelihoodReadMap;
-    }
-
-    public void clear() {
-        allelesSet.clear();
-        alleles.clear();
-        likelihoodReadMap.clear();
-    }
-
-    public Set<GATKSAMRecord> getStoredElements() {
-        return likelihoodReadMap.keySet();
-    }
-
-//    public Collection<Map<Allele,Double>> getLikelihoodMapValues() {
-//        return likelihoodReadMap.values();
-//    }
-
-    public int getNumberOfStoredElements() {
-        return likelihoodReadMap.size();
-    }
-
-    public Map<Allele,Double> getLikelihoodsAssociatedWithPileupElement(final PileupElement p) {
-        if (!likelihoodReadMap.containsKey(p.getRead()))
-            return null;
-
-        return likelihoodReadMap.get(p.getRead());
-    }
-
-
-    /**
-     * Get the log10 likelihood associated with an individual read/allele
-     *
-     * @param read the read whose likelihood we want
-     * @param allele the allele whose likelihood we want
-     * @return the log10 likelihood that this read matches this allele
-     */
-    public double getLikelihoodAssociatedWithReadAndAllele(final GATKSAMRecord read, final Allele allele){
-        if (!allelesSet.containsKey(allele) || !likelihoodReadMap.containsKey(read))
-            return 0.0;
-
-        return likelihoodReadMap.get(read).get(allele);
-    }
-
-    /**
-     * Get the most likely alleles estimated across all reads in this object
-     *
-     * Takes the most likely two alleles according to their diploid genotype likelihoods.  That is, for
-     * each allele i and j we compute p(D | i,j) where D is the read likelihoods.  We track the maximum
-     * i,j likelihood and return an object that contains the alleles i and j as well as the max likelihood.
-     *
-     * Note that the second most likely diploid genotype is not tracked so the resulting MostLikelyAllele
-     * doesn't have a meaningful get best likelihood.
-     *
-     * @return a MostLikelyAllele object, or null if this map is empty
-     */
-    public MostLikelyAllele getMostLikelyDiploidAlleles() {
-        if ( isEmpty() ) return null;
-
-        int hap1 = 0;
-        int hap2 = 0;
-        double maxElement = Double.NEGATIVE_INFINITY;
-        for( int iii = 0; iii < alleles.size(); iii++ ) {
-            final Allele iii_allele = alleles.get(iii);
-            for( int jjj = 0; jjj <= iii; jjj++ ) {
-                final Allele jjj_allele = alleles.get(jjj);
-
-                double haplotypeLikelihood = 0.0;
-                for( final Map.Entry<GATKSAMRecord, Map<Allele,Double>> entry : likelihoodReadMap.entrySet() ) {
-                    // Compute log10(10^x1/2 + 10^x2/2) = log10(10^x1+10^x2)-log10(2)
-                    final double likelihood_iii = entry.getValue().get(iii_allele);
-                    final double likelihood_jjj = entry.getValue().get(jjj_allele);
-                    haplotypeLikelihood += MathUtils.approximateLog10SumLog10(likelihood_iii, likelihood_jjj) + MathUtils.LOG_ONE_HALF;
-
-                    // fast exit.  If this diploid pair is already worse than the max, just stop and look at the next pair
-                    if ( haplotypeLikelihood < maxElement ) break;
-                }
-
-                // keep track of the max element and associated indices
-                if ( haplotypeLikelihood > maxElement ) {
-                    hap1 = iii;
-                    hap2 = jjj;
-                    maxElement = haplotypeLikelihood;
-                }
-            }
-        }
-
-        if ( maxElement == Double.NEGATIVE_INFINITY )
-            throw new IllegalStateException("max likelihood is " + maxElement + " indicating something has gone wrong");
-
-        return new MostLikelyAllele(alleles.get(hap1), alleles.get(hap2), maxElement, maxElement);
-    }
-
-    /**
-     * Given a map from alleles to likelihoods, find the allele with the largest likelihood.
-     *
-     * @param alleleMap - a map from alleles to likelihoods
-     * @return - a MostLikelyAllele object
-     */
-    @Ensures("result != null")
-    public static MostLikelyAllele getMostLikelyAllele( final Map<Allele,Double> alleleMap ) {
-        return getMostLikelyAllele(alleleMap, null);
-    }
-
-    /**
-     * Given a map from alleles to likelihoods, find the allele with the largest likelihood.
-     *
-     * @param alleleMap - a map from alleles to likelihoods
-     * @param onlyConsiderTheseAlleles if not null, we will only consider alleles in this set for being one of the best.
-     *                                 this is useful for the case where you've selected a subset of the alleles that
-     *                                 the reads have been computed for further analysis.  If null totally ignored
-     * @return - a MostLikelyAllele object
-     */
-    public static MostLikelyAllele getMostLikelyAllele( final Map<Allele,Double> alleleMap, final Set<Allele> onlyConsiderTheseAlleles ) {
-        if ( alleleMap == null ) throw new IllegalArgumentException("The allele to likelihood map cannot be null");
-        double maxLike = Double.NEGATIVE_INFINITY;
-        double prevMaxLike = Double.NEGATIVE_INFINITY;
-        Allele mostLikelyAllele = Allele.NO_CALL;
-        Allele secondMostLikely = null;
-
-        for (final Map.Entry<Allele,Double> el : alleleMap.entrySet()) {
-            if ( onlyConsiderTheseAlleles != null && ! onlyConsiderTheseAlleles.contains(el.getKey()) )
-                continue;
-
-            if (el.getValue() > maxLike) {
-                prevMaxLike = maxLike;
-                maxLike = el.getValue();
-                secondMostLikely = mostLikelyAllele;
-                mostLikelyAllele = el.getKey();
-            } else if( el.getValue() > prevMaxLike ) {
-                secondMostLikely = el.getKey();
-                prevMaxLike = el.getValue();
-            }
-        }
-
-        return new MostLikelyAllele(mostLikelyAllele, secondMostLikely, maxLike, prevMaxLike);
-    }
-
-    /**
-     * Debug method to dump contents of object into string for display
-     */
-    public String toString() {
-        final StringBuilder sb = new StringBuilder();
-
-        sb.append("Alelles in map:");
-        for (final Allele a:alleles) {
-            sb.append(a.getDisplayString()+",");
-        }
-        sb.append("\n");
-        for (final Map.Entry <GATKSAMRecord, Map<Allele, Double>> el : getLikelihoodReadMap().entrySet() ) {
-            for (final Map.Entry<Allele,Double> eli : el.getValue().entrySet()) {
-                sb.append("Read "+el.getKey().getReadName()+". Allele:"+eli.getKey().getDisplayString()+" has likelihood="+Double.toString(eli.getValue())+"\n");
-            }
-
-        }
-        return sb.toString();
-    }
-
-    /**
-     * Remove reads from this map that are poorly modelled w.r.t. their per allele likelihoods
-     *
-     * Goes through each read in this map, and if it is poorly modelled removes it from the map.
-     *
-     * @see #readIsPoorlyModelled(org.broadinstitute.gatk.utils.sam.GATKSAMRecord, java.util.Collection, double)
-     * for more information about the poorly modelled test.
-     *
-     * @param maxErrorRatePerBase see equivalent parameter in #readIsPoorlyModelled
-     * @return the list of reads removed from this map because they are poorly modelled
-     */
-    public List<GATKSAMRecord> filterPoorlyModelledReads(final double maxErrorRatePerBase) {
-        final List<GATKSAMRecord> removedReads = new LinkedList<>();
-        final Iterator<Map.Entry<GATKSAMRecord, Map<Allele, Double>>> it = likelihoodReadMap.entrySet().iterator();
-        while ( it.hasNext() ) {
-            final Map.Entry<GATKSAMRecord, Map<Allele, Double>> record = it.next();
-            if ( readIsPoorlyModelled(record.getKey(), record.getValue().values(), maxErrorRatePerBase) ) {
-                it.remove();
-                removedReads.add(record.getKey());
-            }
-        }
-
-        return removedReads;
-    }
-
-    /**
-     * Is this read poorly modelled by all of the alleles in this map?
-     *
-     * A read is poorly modeled when it's likelihood is below what would be expected for a read
-     * originating from one of the alleles given the maxErrorRatePerBase of the reads in general.
-     *
-     * This function makes a number of key assumptions.  First, that the likelihoods reflect the total likelihood
-     * of the read.  In other words, that the read would be fully explained by one of the alleles.  This means
-     * that the allele should be something like the full haplotype from which the read might originate.
-     *
-     * It further assumes that each error in the read occurs with likelihood of -3 (Q30 confidence per base).  So
-     * a read with a 10% error rate with Q30 bases that's 100 bp long we'd expect to see 10 real Q30 errors
-     * even against the true haplotype.  So for this read to be well modelled by at least one allele we'd expect
-     * a likelihood to be >= 10 * -3.
-     *
-     * @param read the read we want to evaluate
-     * @param log10Likelihoods a list of the log10 likelihoods of the read against a set of haplotypes.
-     * @param maxErrorRatePerBase the maximum error rate we'd expect for this read per base, in real space.  So
-     *                            0.01 means a 1% error rate
-     * @return true if none of the log10 likelihoods imply that the read truly originated from one of the haplotypes
-     */
-    protected boolean readIsPoorlyModelled(final GATKSAMRecord read, final Collection<Double> log10Likelihoods, final double maxErrorRatePerBase) {
-        final double maxErrorsForRead = Math.min(2.0, Math.ceil(read.getReadLength() * maxErrorRatePerBase));
-        final double log10QualPerBase = -4.0;
-        final double log10MaxLikelihoodForTrueAllele = maxErrorsForRead * log10QualPerBase;
-
-        for ( final double log10Likelihood : log10Likelihoods )
-            if ( log10Likelihood >= log10MaxLikelihoodForTrueAllele )
-                return false;
-
-        return true;
-    }
-
-    /**
-     * Get an unmodifiable set of the unique alleles in this PerReadAlleleLikelihoodMap
-     * @return a non-null unmodifiable map
-     */
-    public Set<Allele> getAllelesSet() {
-        return Collections.unmodifiableSet(allelesSet.keySet());
-    }
-
-    /**
-     * Loop over all of the reads in this likelihood map and realign them to its most likely haplotype
-     * @param haplotypes            the collection of haplotypes
-     * @param paddedReferenceLoc    the active region
-     */
-    public void realignReadsToMostLikelyHaplotype(final Collection<Haplotype> haplotypes, final GenomeLoc paddedReferenceLoc) {
-
-        // we need to remap the Alleles back to the Haplotypes; inefficient but unfortunately this is a requirement currently
-        final Map<Allele, Haplotype> alleleToHaplotypeMap = new HashMap<>(haplotypes.size());
-        for ( final Haplotype haplotype : haplotypes )
-            alleleToHaplotypeMap.put(Allele.create(haplotype.getBases()), haplotype);
-
-        final Map<GATKSAMRecord, Map<Allele, Double>> newLikelihoodReadMap = new LinkedHashMap<>(likelihoodReadMap.size());
-        for( final Map.Entry<GATKSAMRecord, Map<Allele, Double>> entry : likelihoodReadMap.entrySet() ) {
-            final MostLikelyAllele bestAllele = PerReadAlleleLikelihoodMap.getMostLikelyAllele(entry.getValue());
-            final GATKSAMRecord alignedToRef = AlignmentUtils.createReadAlignedToRef(entry.getKey(), alleleToHaplotypeMap.get(bestAllele.getMostLikelyAllele()), paddedReferenceLoc.getStart(), bestAllele.isInformative());
-            newLikelihoodReadMap.put(alignedToRef, entry.getValue());
-        }
-
-        likelihoodReadMap.clear();
-        likelihoodReadMap.putAll(newLikelihoodReadMap);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/genotyper/ReadLikelihoods.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/genotyper/ReadLikelihoods.java
deleted file mode 100644
index fa9fc30..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/genotyper/ReadLikelihoods.java
+++ /dev/null
@@ -1,1587 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.genotyper;
-
-import htsjdk.variant.variantcontext.Allele;
-import it.unimi.dsi.fastutil.ints.IntArrayList;
-import it.unimi.dsi.fastutil.objects.Object2IntMap;
-import it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap;
-import org.broadinstitute.gatk.engine.downsampling.AlleleBiasedDownsamplingUtils;
-import org.broadinstitute.gatk.tools.walkers.genotyper.*;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
-
-import java.util.*;
-
-/**
- * Read-likelihoods container implementation based on integer indexed arrays.
- *
- * @param <A> the type of the allele the likelihood makes reference to.
- *
- * @author Valentin Ruano-Rubio <valentin at broadinstitute.org>
- */
-public class ReadLikelihoods<A extends Allele> implements SampleList, AlleleList<A>, Cloneable {
-
-    /**
-     * Reads by sample index. Each sub array contains reference to the reads of the ith sample.
-     */
-    private GATKSAMRecord[][] readsBySampleIndex;
-
-    /**
-     * Indexed per sample, allele and finally read (within sample).
-     * <p>
-     *     valuesBySampleIndex[s][a][r] == lnLk(R_r | A_a) where R_r comes from Sample s.
-     * </p>
-     */
-    private double[][][] valuesBySampleIndex;
-
-    /**
-     * Sample list
-     */
-    private final SampleList samples;
-
-    /**
-     * Allele list
-     */
-    private AlleleList<A> alleles;
-
-    /**
-     * Cached allele list.
-     */
-    private List<A> alleleList;
-
-    /**
-     * Cached sample list.
-     */
-    private List<String> sampleList;
-
-    /**
-     * Maps from each read to its index within the sample.
-     *
-     * <p>In order to save CPU time the indices contained in this array (not the array itself) is
-     * lazily initialized by invoking {@link #readIndexBySampleIndex(int)}.</p>
-     */
-    private final Object2IntMap<GATKSAMRecord>[] readIndexBySampleIndex;
-
-    /**
-     * Index of the reference allele if any, otherwise -1
-     */
-    private int referenceAlleleIndex = -1;
-
-    /**
-     * Caches the read-list per sample list returned by {@link #sampleReads}
-     */
-    private final List<GATKSAMRecord>[] readListBySampleIndex;
-
-    /**
-     * Sample matrices lazily initialized (the elements not the array) by invoking {@link #sampleMatrix(int)}.
-     */
-    private final Matrix<A>[] sampleMatrices;
-
-    /**
-     * Constructs a new read-likelihood collection.
-     *
-     * <p>
-     *     The initial likelihoods for all allele-read combinations are
-     *     0.
-     * </p>
-     *
-     * @param samples all supported samples in the collection.
-     * @param alleles all supported alleles in the collection.
-     * @param reads reads stratified per sample.
-     *
-     * @throws IllegalArgumentException if any of {@code allele}, {@code samples}
-     * or {@code reads} is {@code null},
-     *  or if they contain null values.
-     */
-    @SuppressWarnings("unchecked")
-    public ReadLikelihoods(final SampleList samples, final AlleleList<A> alleles,
-                           final Map<String, List<GATKSAMRecord>> reads) {
-        if (alleles == null)
-            throw new IllegalArgumentException("allele list cannot be null");
-        if (samples == null)
-            throw new IllegalArgumentException("sample list cannot be null");
-        if (reads == null)
-            throw new IllegalArgumentException("read map cannot be null");
-
-        this.samples = samples;
-        this.alleles = alleles;
-
-        final int sampleCount = samples.sampleCount();
-        final int alleleCount = alleles.alleleCount();
-
-        readsBySampleIndex = new GATKSAMRecord[sampleCount][];
-        readListBySampleIndex = new List[sampleCount];
-        valuesBySampleIndex = new double[sampleCount][][];
-        referenceAlleleIndex = findReferenceAllele(alleles);
-
-        readIndexBySampleIndex = new Object2IntMap[sampleCount];
-
-        setupIndexes(reads, sampleCount, alleleCount);
-
-        sampleMatrices = (Matrix<A>[]) new Matrix[sampleCount];
-    }
-
-    // Add all the indices to alleles, sample and reads in the look-up maps.
-    private void setupIndexes(final Map<String, List<GATKSAMRecord>> reads, final int sampleCount, final int alleleCount) {
-        for (int i = 0; i < sampleCount; i++)
-            setupSampleData(i, reads, alleleCount);
-    }
-
-    // Assumes that {@link #samples} has been initialized with the sample names.
-    private void setupSampleData(final int sampleIndex, final Map<String, List<GATKSAMRecord>> readsBySample,
-                                 final int alleleCount) {
-        final String sample = samples.sampleAt(sampleIndex);
-
-        final List<GATKSAMRecord> reads = readsBySample.get(sample);
-        readsBySampleIndex[sampleIndex] = reads == null
-                ? new GATKSAMRecord[0]
-                : reads.toArray(new GATKSAMRecord[reads.size()]);
-        final int sampleReadCount = readsBySampleIndex[sampleIndex].length;
-
-        final double[][] sampleValues = new double[alleleCount][sampleReadCount];
-        valuesBySampleIndex[sampleIndex] = sampleValues;
-    }
-
-    /**
-     * Create an independent copy of this read-likelihoods collection
-     */
-    public ReadLikelihoods<A> clone() {
-
-        final int sampleCount = samples.sampleCount();
-        final int alleleCount = alleles.alleleCount();
-
-        final double[][][] newLikelihoodValues = new double[sampleCount][alleleCount][];
-
-        @SuppressWarnings("unchecked")
-        final Object2IntMap<GATKSAMRecord>[] newReadIndexBySampleIndex = new Object2IntMap[sampleCount];
-        final GATKSAMRecord[][] newReadsBySampleIndex = new GATKSAMRecord[sampleCount][];
-
-        for (int s = 0; s < sampleCount; s++) {
-            newReadsBySampleIndex[s] = readsBySampleIndex[s].clone();
-            for (int a = 0; a < alleleCount; a++)
-                newLikelihoodValues[s][a] = valuesBySampleIndex[s][a].clone();
-        }
-
-        // Finally we create the new read-likelihood
-        return new ReadLikelihoods<>(alleles, samples,
-                newReadsBySampleIndex,
-                newReadIndexBySampleIndex, newLikelihoodValues);
-    }
-
-    // Internally used constructor.
-    @SuppressWarnings("unchecked")
-    private ReadLikelihoods(final AlleleList alleles, final SampleList samples,
-                            final GATKSAMRecord[][] readsBySampleIndex, final Object2IntMap<GATKSAMRecord>[] readIndex,
-                            final double[][][] values) {
-        this.samples = samples;
-        this.alleles = alleles;
-        this.readsBySampleIndex = readsBySampleIndex;
-        this.valuesBySampleIndex = values;
-        this.readIndexBySampleIndex = readIndex;
-        final int sampleCount = samples.sampleCount();
-        this.readListBySampleIndex = new List[sampleCount];
-
-        referenceAlleleIndex = findReferenceAllele(alleles);
-        sampleMatrices = (Matrix<A>[]) new Matrix[sampleCount];
-    }
-
-    // Search for the reference allele, if not found the index is -1.
-    private int findReferenceAllele(final AlleleList<A> alleles) {
-        final int alleleCount = alleles.alleleCount();
-        for (int i = 0; i < alleleCount; i++)
-            if (alleles.alleleAt(i).isReference())
-                return i;
-        return -1;
-    }
-
-    /**
-     * Returns the index of a sample within the likelihood collection.
-     *
-     * @param sample the query sample.
-     *
-     * @throws IllegalArgumentException if {@code sample} is {@code null}.
-     * @return -1 if the allele is not included, 0 or greater otherwise.
-     */
-    public int sampleIndex(final String sample) {
-        return samples.sampleIndex(sample);
-    }
-
-    /**
-     * Number of samples included in the likelihood collection.
-     * @return 0 or greater.
-     */
-    public int sampleCount() {
-        return samples.sampleCount();
-    }
-
-    /**
-     * Returns sample name given its index.
-     *
-     * @param sampleIndex query index.
-     *
-     * @throws IllegalArgumentException if {@code sampleIndex} is negative.
-     *
-     * @return never {@code null}.
-     */
-    public String sampleAt(final int sampleIndex) {
-        return samples.sampleAt(sampleIndex);
-    }
-
-    /**
-     * Returns the index of an allele within the likelihood collection.
-     *
-     * @param allele the query allele.
-     *
-     * @throws IllegalArgumentException if {@code allele} is {@code null}.
-     *
-     * @return -1 if the allele is not included, 0 or greater otherwise.
-     */
-    public int alleleIndex(final A allele) {
-        return alleles.alleleIndex(allele);
-    }
-
-    /**
-     * Returns number of alleles in the collection.
-     * @return 0 or greater.
-     */
-    @SuppressWarnings("unused")
-    public int alleleCount() {
-        return alleles.alleleCount();
-    }
-
-    /**
-     * Returns the allele given its index.
-     *
-     * @param alleleIndex the allele index.
-     *
-     * @throws IllegalArgumentException the allele index is {@code null}.
-     *
-     * @return never {@code null}.
-     */
-    public A alleleAt(final int alleleIndex) {
-        return alleles.alleleAt(alleleIndex);
-    }
-
-    /**
-     * Returns the reads that belong to a sample sorted by their index (within that sample).
-     *
-     * @param sampleIndex the requested sample.
-     * @return never {@code null} but perhaps a zero-length array if there is no reads in sample. No element in
-     *   the array will be null.
-     */
-    public List<GATKSAMRecord> sampleReads(final int sampleIndex) {
-        checkSampleIndex(sampleIndex);
-        final List<GATKSAMRecord> extantList = readListBySampleIndex[sampleIndex];
-        if (extantList == null)
-            return readListBySampleIndex[sampleIndex] = Collections.unmodifiableList(Arrays.asList(readsBySampleIndex[sampleIndex]));
-        else
-            return extantList;
-    }
-
-    /**
-     * Returns a read vs allele likelihood matrix corresponding to a sample.
-     *
-     * @param sampleIndex target sample.
-     *
-     * @throws IllegalArgumentException if {@code sampleIndex} is not null.
-     *
-     * @return never {@code null}
-     */
-    public Matrix<A> sampleMatrix(final int sampleIndex) {
-        checkSampleIndex(sampleIndex);
-        final Matrix<A> extantResult = sampleMatrices[sampleIndex];
-        if (extantResult != null)
-            return extantResult;
-        else
-            return sampleMatrices[sampleIndex] = new SampleMatrix(sampleIndex);
-    }
-
-    /**
-     * Adjusts likelihoods so that for each read, the best allele likelihood is 0 and caps the minimum likelihood
-     * of any allele for each read based on the maximum alternative allele likelihood.
-     *
-     * @param bestToZero set the best likelihood to 0, others will be subtracted the same amount.
-     * @param maximumLikelihoodDifferenceCap maximum difference between the best alternative allele likelihood
-     *                                           and any other likelihood.
-     *
-     * @throws IllegalArgumentException if {@code maximumDifferenceWithBestAlternative} is not 0 or less.
-     */
-    public void normalizeLikelihoods(final boolean bestToZero, final double maximumLikelihoodDifferenceCap) {
-        if (maximumLikelihoodDifferenceCap >= 0.0 || Double.isNaN(maximumLikelihoodDifferenceCap))
-            throw new IllegalArgumentException("the minimum reference likelihood fall cannot be positive");
-
-        if (maximumLikelihoodDifferenceCap == Double.NEGATIVE_INFINITY && !bestToZero)
-            return;
-
-        final int alleleCount = alleles.alleleCount();
-        if (alleleCount == 0) // trivial case there is no alleles.
-            return;
-        else if (alleleCount == 1 && !bestToZero)
-            return;
-
-        for (int s = 0; s < valuesBySampleIndex.length; s++) {
-            final double[][] sampleValues = valuesBySampleIndex[s];
-            final int readCount = readsBySampleIndex[s].length;
-            for (int r = 0; r < readCount; r++)
-                normalizeLikelihoodsPerRead(bestToZero, maximumLikelihoodDifferenceCap, sampleValues, s, r);
-        }
-    }
-
-    // Does the normalizeLikelihoods job for each read.
-    private void normalizeLikelihoodsPerRead(final boolean bestToZero, final double maximumBestAltLikelihoodDifference,
-                                             final double[][] sampleValues, final int sampleIndex, final int readIndex) {
-
-        final BestAllele bestAlternativeAllele = searchBestAllele(sampleIndex,readIndex,false);
-
-        final double worstLikelihoodCap = bestAlternativeAllele.likelihood + maximumBestAltLikelihoodDifference;
-
-        final double referenceLikelihood = referenceAlleleIndex == -1 ? Double.NEGATIVE_INFINITY :
-                sampleValues[referenceAlleleIndex][readIndex];
-
-
-        final double bestAbsoluteLikelihood = Math.max(bestAlternativeAllele.likelihood,referenceLikelihood);
-
-        final int alleleCount = alleles.alleleCount();
-        if (bestToZero) {
-            if (bestAbsoluteLikelihood == Double.NEGATIVE_INFINITY)
-                for (int a = 0; a < alleleCount; a++)
-                    sampleValues[a][readIndex] = 0;
-            else if (worstLikelihoodCap != Double.NEGATIVE_INFINITY)
-                for (int a = 0; a < alleleCount; a++)
-                    sampleValues[a][readIndex] = (sampleValues[a][readIndex] < worstLikelihoodCap ? worstLikelihoodCap : sampleValues[a][readIndex]) - bestAbsoluteLikelihood;
-            else
-                for (int a = 0; a < alleleCount; a++)
-                    sampleValues[a][readIndex] -= bestAbsoluteLikelihood;
-        } else  // else if (maximumReferenceLikelihoodFall != Double.NEGATIVE_INFINITY ) { //
-            // Guarantee to be the case by enclosing code.
-            for (int a = 0; a < alleleCount; a++)
-                if (sampleValues[a][readIndex] < worstLikelihoodCap)
-                    sampleValues[a][readIndex] = worstLikelihoodCap;
-    }
-
-    /**
-     * Returns the samples in this read-likelihood collection.
-     * <p>
-     *     Samples are sorted by their index in the collection.
-     * </p>
-     *
-     * <p>
-     *     The returned list is an unmodifiable view on the read-likelihoods sample list.
-     * </p>
-     *
-     * @return never {@code null}.
-     */
-    public List<String> samples() {
-        return sampleList == null ? sampleList = SampleListUtils.asList(samples) : sampleList;
-
-    }
-
-    /**
-     * Returns the samples in this read-likelihood collection.
-     * <p>
-     *     Samples are sorted by their index in the collection.
-     * </p>
-     *
-     * <p>
-     *     The returned list is an unmodifiable. It will not be updated if the collection
-     *     allele list changes.
-     * </p>
-     *
-     * @return never {@code null}.
-     */
-    public List<A> alleles() {
-        return alleleList == null ? alleleList = AlleleListUtils.asList(alleles) : alleleList;
-    }
-
-
-    /**
-     * Search the best allele for a read.
-     *
-     * @param sampleIndex including sample index.
-     * @param readIndex  target read index.
-     *
-     * @return never {@code null}, but with {@link BestAllele#allele allele} == {@code null}
-     * if non-could be found.
-     */
-    private BestAllele searchBestAllele(final int sampleIndex, final int readIndex, final boolean canBeReference) {
-        final int alleleCount = alleles.alleleCount();
-        if (alleleCount == 0 || (alleleCount == 1 && referenceAlleleIndex == 0 && !canBeReference))
-            return new BestAllele(sampleIndex,readIndex,-1,Double.NEGATIVE_INFINITY,Double.NEGATIVE_INFINITY);
-
-        final double[][] sampleValues = valuesBySampleIndex[sampleIndex];
-        int bestAlleleIndex = canBeReference || referenceAlleleIndex != 0 ? 0 : 1;
-
-        double bestLikelihood = sampleValues[bestAlleleIndex][readIndex];
-        double secondBestLikelihood = Double.NEGATIVE_INFINITY;
-        for (int a = bestAlleleIndex + 1; a < alleleCount; a++) {
-            if (!canBeReference && referenceAlleleIndex == a)
-                continue;
-            final double candidateLikelihood = sampleValues[a][readIndex];
-            if (candidateLikelihood > bestLikelihood) {
-                bestAlleleIndex = a;
-                secondBestLikelihood = bestLikelihood;
-                bestLikelihood = candidateLikelihood;
-            } else if (candidateLikelihood > secondBestLikelihood) {
-                secondBestLikelihood = candidateLikelihood;
-            }
-        }
-        return new BestAllele(sampleIndex,readIndex,bestAlleleIndex,bestLikelihood,secondBestLikelihood);
-    }
-
-    public void changeReads(final Map<GATKSAMRecord, GATKSAMRecord> readRealignments) {
-        final int sampleCount = samples.sampleCount();
-        for (int s = 0; s < sampleCount; s++) {
-            final GATKSAMRecord[] sampleReads = readsBySampleIndex[s];
-            final Object2IntMap<GATKSAMRecord> readIndex = readIndexBySampleIndex[s];
-            final int sampleReadCount = sampleReads.length;
-            for (int r = 0; r < sampleReadCount; r++) {
-                final GATKSAMRecord read = sampleReads[r];
-                final GATKSAMRecord replacement = readRealignments.get(read);
-                if (replacement == null)
-                    continue;
-                sampleReads[r] = replacement;
-                if (readIndex != null) {
-                    readIndex.remove(read);
-                    readIndex.put(replacement, r);
-                }
-            }
-        }
-    }
-
-    /**
-     * Add alleles that are missing in the read-likelihoods collection giving all reads a default
-     * likelihood value.
-     * @param candidateAlleles the potentially missing alleles.
-     * @param defaultLikelihood the default read likelihood value for that allele.
-     *
-     * @throws IllegalArgumentException if {@code candidateAlleles} is {@code null} or there is more than
-     * one missing allele that is a reference or there is one but the collection already has
-     * a reference allele.
-     */
-    public void addMissingAlleles(final Collection<A> candidateAlleles, final double defaultLikelihood) {
-        if (candidateAlleles == null)
-            throw new IllegalArgumentException("the candidateAlleles list cannot be null");
-        if (candidateAlleles.isEmpty())
-            return;
-        final List<A> allelesToAdd = new ArrayList<>(candidateAlleles.size());
-        for (final A allele : candidateAlleles)
-            if (alleles.alleleIndex(allele) == -1)
-                allelesToAdd.add(allele);
-
-        if (allelesToAdd.isEmpty())
-            return;
-
-        final int oldAlleleCount = alleles.alleleCount();
-        final int newAlleleCount = alleles.alleleCount() + allelesToAdd.size();
-
-        alleleList = null;
-        int referenceIndex = this.referenceAlleleIndex;
-        @SuppressWarnings("unchecked")
-        final A[] newAlleles = (A[]) new Allele[newAlleleCount];
-        for (int a = 0; a < oldAlleleCount; a++)
-            newAlleles[a] = this.alleleAt(a);
-        int newIndex = oldAlleleCount;
-        for (final A allele : allelesToAdd) {
-            if (allele.isReference()) {
-                if (referenceIndex != -1)
-                    throw new IllegalArgumentException("there cannot be more than one reference allele");
-                referenceIndex = newIndex;
-            }
-            newAlleles[newIndex++] = allele;
-        }
-
-        alleles = new IndexedAlleleList<>(newAlleles);
-
-        if (referenceIndex != -1)
-            referenceAlleleIndex = referenceIndex;
-
-        final int sampleCount = samples.sampleCount();
-        for (int s = 0; s < sampleCount; s++) {
-            final int sampleReadCount = readsBySampleIndex[s].length;
-            final double[][] newValuesBySampleIndex = Arrays.copyOf(valuesBySampleIndex[s],newAlleleCount);
-            for (int a = oldAlleleCount; a < newAlleleCount; a++) {
-                newValuesBySampleIndex[a] = new double[sampleReadCount];
-                if (defaultLikelihood != 0.0)
-                    Arrays.fill(newValuesBySampleIndex[a],defaultLikelihood);
-            }
-            valuesBySampleIndex[s] = newValuesBySampleIndex;
-        }
-    }
-
-    /**
-     * Likelihood matrix between a set of alleles and reads.
-     * @param <A> the allele-type.
-     */
-    public interface Matrix<A extends Allele> extends AlleleList<A> {
-
-        /**
-         * List of reads in the matrix sorted by their index therein.
-         * @return never {@code null}.
-         */
-        public List<GATKSAMRecord> reads();
-
-        /**
-         * List of alleles in the matrix sorted by their index in the collection.
-         * @return never {@code null}.
-         */
-        public List<A> alleles();
-
-        /**
-         * Set the likelihood of a read given an allele through their indices.
-         *
-         * @param alleleIndex the target allele index.
-         * @param readIndex the target read index.
-         * @param value new likelihood value for the target read give the target allele.
-         *
-         * @throws IllegalArgumentException if {@code alleleIndex} or {@code readIndex}
-         *  are not valid allele and read indices respectively.
-         */
-        public void set(final int alleleIndex, final int readIndex, final double value);
-
-        /**
-         * Returns the likelihood of a read given a haplotype.
-         *
-         * @param alleleIndex the index of the given haplotype.
-         * @param readIndex the index of the target read.
-         *
-         * @throws IllegalArgumentException if {@code alleleIndex} or {@code readIndex} is not a
-         * valid allele or read index respectively.
-         *
-         * @return the requested likelihood, whatever value was provided using {@link #set(int,int,double) set}
-         *    or 0.0 if none was set.
-         */
-        public double get(final int alleleIndex, final int readIndex);
-
-        /**
-         * Queries the index of an allele in the matrix.
-         *
-         * @param allele the target allele.
-         *
-         * @throws IllegalArgumentException if {@code allele} is {@code null}.
-         * @return -1 if such allele does not exist, otherwise its index which 0 or greater.
-         */
-        @SuppressWarnings("unused")
-        public int alleleIndex(final A allele);
-
-        /**
-         * Queries the index of a read in the matrix.
-         *
-         * @param read the target read.
-         *
-         * @throws IllegalArgumentException if {@code read} is {@code null}.
-         *
-         * @return -1 if there is not such a read in the matrix, otherwise its index
-         *    which is 0 or greater.
-         */
-        @SuppressWarnings("unused")
-        public int readIndex(final GATKSAMRecord read);
-
-        /**
-         * Number of allele in the matrix.
-         * @return never negative.
-         */
-        public int alleleCount();
-
-        /**
-         * Number of reads in the matrix.
-         * @return never negative.
-         */
-        public int readCount();
-
-        /**
-         * Returns the allele given its index.
-         *
-         * @param alleleIndex the target allele index.
-         *
-         * @throws IllegalArgumentException if {@code alleleIndex} is not a valid allele index.
-         * @return never {@code null}.
-         */
-        public A alleleAt(final int alleleIndex);
-
-        /**
-         * Returns the allele given its index.
-         *
-         * @param readIndex the target allele index.
-         *
-         * @throws IllegalArgumentException if {@code readIndex} is not a valid read index.
-         * @return never {@code null}.
-         */
-        public GATKSAMRecord readAt(final int readIndex);
-
-
-        /**
-         * Copies the likelihood of all the reads for a given allele into an array from a particular offset.
-         * @param alleleIndex the targeted allele
-         * @param dest the destination array.
-         * @param offset the copy offset within the destination allele
-         */
-        public void copyAlleleLikelihoods(final int alleleIndex, final double[] dest, final int offset);
-    }
-
-    /**
-     * Perform marginalization from an allele set to another (smaller one) taking the maximum value
-     * for each read in the original allele subset.
-     *
-     * @param newToOldAlleleMap map where the keys are the new alleles and the value list the original
-     *                          alleles that correspond to the new one.
-     * @return never {@code null}. The result will have the requested set of new alleles (keys in {@code newToOldAlleleMap}, and
-     * the same set of samples and reads as the original.
-     *
-     * @throws IllegalArgumentException is {@code newToOldAlleleMap} is {@code null} or contains {@code null} values,
-     *  or its values contain reference to non-existing alleles in this read-likelihood collection. Also no new allele
-     *  can have zero old alleles mapping nor two new alleles can make reference to the same old allele.
-     */
-    public <B extends Allele> ReadLikelihoods<B> marginalize(final Map<B, List<A>> newToOldAlleleMap) {
-
-        if (newToOldAlleleMap == null)
-            throw new IllegalArgumentException("the input allele mapping cannot be null");
-
-        @SuppressWarnings("unchecked")
-        final B[] newAlleles = newToOldAlleleMap.keySet().toArray((B[]) new Allele[newToOldAlleleMap.size()]);
-        final int oldAlleleCount = alleles.alleleCount();
-        final int newAlleleCount = newAlleles.length;
-
-        // we get the index correspondence between new old -> new allele, -1 entries mean that the old
-        // allele does not map to any new; supported but typically not the case.
-        final int[] oldToNewAlleleIndexMap = oldToNewAlleleIndexMap(newToOldAlleleMap, newAlleles, oldAlleleCount, newAlleleCount);
-
-        // We calculate the marginal likelihoods.
-
-        final double[][][] newLikelihoodValues = marginalLikelihoods(oldAlleleCount, newAlleleCount, oldToNewAlleleIndexMap, null);
-
-        final int sampleCount = samples.sampleCount();
-
-        @SuppressWarnings("unchecked")
-        final Object2IntMap<GATKSAMRecord>[] newReadIndexBySampleIndex = new Object2IntMap[sampleCount];
-        final GATKSAMRecord[][] newReadsBySampleIndex = new GATKSAMRecord[sampleCount][];
-
-        for (int s = 0; s < sampleCount; s++) {
-            newReadsBySampleIndex[s] = readsBySampleIndex[s].clone();
-        }
-
-        // Finally we create the new read-likelihood
-        return new ReadLikelihoods<>(new IndexedAlleleList(newAlleles), samples,
-                newReadsBySampleIndex,
-                newReadIndexBySampleIndex, newLikelihoodValues);
-    }
-
-
-    /**
-     * Perform marginalization from an allele set to another (smaller one) taking the maximum value
-     * for each read in the original allele subset.
-     *
-     * @param newToOldAlleleMap map where the keys are the new alleles and the value list the original
-     *                          alleles that correspond to the new one.
-     * @return never {@code null}. The result will have the requested set of new alleles (keys in {@code newToOldAlleleMap}, and
-     * the same set of samples and reads as the original.
-     *
-     * @param overlap if not {@code null}, only reads that overlap the location (with unclipping) will be present in
-     *                        the output read-collection.
-     *
-     * @throws IllegalArgumentException is {@code newToOldAlleleMap} is {@code null} or contains {@code null} values,
-     *  or its values contain reference to non-existing alleles in this read-likelihood collection. Also no new allele
-     *  can have zero old alleles mapping nor two new alleles can make reference to the same old allele.
-     */
-    public <B extends Allele> ReadLikelihoods<B> marginalize(final Map<B, List<A>> newToOldAlleleMap, final GenomeLoc overlap) {
-
-        if (overlap == null)
-            return marginalize(newToOldAlleleMap);
-
-        if (newToOldAlleleMap == null)
-            throw new IllegalArgumentException("the input allele mapping cannot be null");
-
-        @SuppressWarnings("unchecked")
-        final B[] newAlleles = newToOldAlleleMap.keySet().toArray((B[]) new Allele[newToOldAlleleMap.size()]);
-        final int oldAlleleCount = alleles.alleleCount();
-        final int newAlleleCount = newAlleles.length;
-
-        // we get the index correspondence between new old -> new allele, -1 entries mean that the old
-        // allele does not map to any new; supported but typically not the case.
-        final int[] oldToNewAlleleIndexMap = oldToNewAlleleIndexMap(newToOldAlleleMap, newAlleles, oldAlleleCount, newAlleleCount);
-
-        final int[][] readsToKeep = overlappingReadIndicesBySampleIndex(overlap);
-        // We calculate the marginal likelihoods.
-
-        final double[][][] newLikelihoodValues = marginalLikelihoods(oldAlleleCount, newAlleleCount, oldToNewAlleleIndexMap, readsToKeep);
-
-        final int sampleCount = samples.sampleCount();
-
-        @SuppressWarnings("unchecked")
-        final Object2IntMap<GATKSAMRecord>[] newReadIndexBySampleIndex = new Object2IntMap[sampleCount];
-        final GATKSAMRecord[][] newReadsBySampleIndex = new GATKSAMRecord[sampleCount][];
-
-        for (int s = 0; s < sampleCount; s++) {
-            final int[] sampleReadsToKeep = readsToKeep[s];
-            final GATKSAMRecord[] oldSampleReads = readsBySampleIndex[s];
-            final int oldSampleReadCount = oldSampleReads.length;
-            final int newSampleReadCount = sampleReadsToKeep.length;
-            if (newSampleReadCount == oldSampleReadCount) {
-                newReadsBySampleIndex[s] = oldSampleReads.clone();
-            } else {
-                newReadsBySampleIndex[s] = new GATKSAMRecord[newSampleReadCount];
-                for (int i = 0; i < newSampleReadCount; i++)
-                    newReadsBySampleIndex[s][i] = oldSampleReads[sampleReadsToKeep[i]];
-            }
-        }
-
-        // Finally we create the new read-likelihood
-        return new ReadLikelihoods<>(new IndexedAlleleList(newAlleles), samples,
-                newReadsBySampleIndex,
-                newReadIndexBySampleIndex, newLikelihoodValues);
-    }
-
-    private int[][] overlappingReadIndicesBySampleIndex(final GenomeLoc overlap) {
-        if (overlap == null)
-            return null;
-        final int sampleCount = samples.sampleCount();
-        final int[][] result = new int[sampleCount][];
-        final IntArrayList buffer = new IntArrayList(200);
-        final int referenceIndex = overlap.getContigIndex();
-        final int overlapStart = overlap.getStart();
-        final int overlapEnd = overlap.getStop();
-        for (int s = 0; s < sampleCount; s++) {
-            buffer.clear();
-            final GATKSAMRecord[] sampleReads = readsBySampleIndex[s];
-            final int sampleReadCount = sampleReads.length;
-            buffer.ensureCapacity(sampleReadCount);
-            for (int r = 0; r < sampleReadCount; r++)
-                if (unclippedReadOverlapsRegion(sampleReads[r], referenceIndex, overlapStart, overlapEnd))
-                    buffer.add(r);
-            result[s] = buffer.toIntArray();
-        }
-        return result;
-    }
-
-    public static boolean unclippedReadOverlapsRegion(final GATKSAMRecord read, final GenomeLoc region) {
-        return unclippedReadOverlapsRegion(read, region.getContigIndex(), region.getStart(), region.getStop());
-    }
-
-    private static boolean unclippedReadOverlapsRegion(final GATKSAMRecord sampleRead, final int referenceIndex, final int start, final int end) {
-        final int readReference = sampleRead.getReferenceIndex();
-       if (readReference != referenceIndex)
-            return false;
-
-        final int readStart = sampleRead.getUnclippedStart();
-        if (readStart > end)
-            return false;
-
-        final int readEnd = sampleRead.getReadUnmappedFlag() ? sampleRead.getUnclippedEnd()
-                : Math.max(sampleRead.getUnclippedEnd(), sampleRead.getUnclippedStart());
-        return readEnd >= start;
-    }
-
-    // Calculate the marginal likelihoods considering the old -> new allele index mapping.
-    private double[][][] marginalLikelihoods(final int oldAlleleCount, final int newAlleleCount, final int[] oldToNewAlleleIndexMap, final int[][] readsToKeep) {
-
-        final int sampleCount = samples.sampleCount();
-        final double[][][] result = new double[sampleCount][][];
-
-        for (int s = 0; s < sampleCount; s++) {
-            final int sampleReadCount = readsBySampleIndex[s].length;
-            final double[][] oldSampleValues = valuesBySampleIndex[s];
-            final int[] sampleReadToKeep = readsToKeep == null || readsToKeep[s].length == sampleReadCount ? null : readsToKeep[s];
-            final int newSampleReadCount = sampleReadToKeep == null ? sampleReadCount : sampleReadToKeep.length;
-            final double[][] newSampleValues = result[s] = new double[newAlleleCount][newSampleReadCount];
-            // We initiate all likelihoods to -Inf.
-            for (int a = 0; a < newAlleleCount; a++)
-                Arrays.fill(newSampleValues[a], Double.NEGATIVE_INFINITY);
-            // For each old allele and read we update the new table keeping the maximum likelihood.
-            for (int r = 0; r < newSampleReadCount; r++) {
-                for (int a = 0; a < oldAlleleCount; a++) {
-                    final int oldReadIndex = newSampleReadCount == sampleReadCount ? r : sampleReadToKeep[r];
-                    final int newAlleleIndex = oldToNewAlleleIndexMap[a];
-                    if (newAlleleIndex == -1)
-                        continue;
-                    final double likelihood = oldSampleValues[a][oldReadIndex];
-                    if (likelihood > newSampleValues[newAlleleIndex][r])
-                        newSampleValues[newAlleleIndex][r] = likelihood;
-                }
-            }
-        }
-        return result;
-    }
-
-    /**
-     * Given a collection of likelihood in the old map format, it creates the corresponding read-likelihoods collection.
-     *
-     * @param map the likelihoods to transform.
-     *
-     * @throws IllegalArgumentException if {@code map} is {@code null}.
-     *
-     * @return never {@code null}.
-     */
-    public static ReadLikelihoods<Allele> fromPerAlleleReadLikelihoodsMap(final Map<String,PerReadAlleleLikelihoodMap> map) {
-
-        // First we need to create the read-likelihood collection with all required alleles, samples and reads.
-        final SampleList sampleList = new IndexedSampleList(map.keySet());
-        final Set<Allele> alleles = new LinkedHashSet<>(10);
-        final Map<String,List<GATKSAMRecord>> sampleToReads = new HashMap<>(sampleList.sampleCount());
-        for (final Map.Entry<String,PerReadAlleleLikelihoodMap> entry : map.entrySet()) {
-            final String sample = entry.getKey();
-            final PerReadAlleleLikelihoodMap sampleLikelihoods = entry.getValue();
-            alleles.addAll(sampleLikelihoods.getAllelesSet());
-            sampleToReads.put(sample,new ArrayList<>(sampleLikelihoods.getLikelihoodReadMap().keySet()));
-        }
-
-        final AlleleList<Allele> alleleList = new IndexedAlleleList<>(alleles);
-        final ReadLikelihoods<Allele> result = new ReadLikelihoods<>(sampleList,alleleList,sampleToReads);
-
-        // Now set the likelihoods.
-        for (final Map.Entry<String,PerReadAlleleLikelihoodMap> sampleEntry : map.entrySet()) {
-            final ReadLikelihoods.Matrix<Allele> sampleMatrix = result.sampleMatrix(result.sampleIndex(sampleEntry.getKey()));
-            for (final Map.Entry<GATKSAMRecord,Map<Allele,Double>> readEntry : sampleEntry.getValue().getLikelihoodReadMap().entrySet()) {
-                final GATKSAMRecord read = readEntry.getKey();
-                final int readIndex = sampleMatrix.readIndex(read);
-                for (final Map.Entry<Allele,Double> alleleEntry : readEntry.getValue().entrySet()) {
-                    final int alleleIndex = result.alleleIndex(alleleEntry.getKey());
-                    sampleMatrix.set(alleleIndex,readIndex,alleleEntry.getValue());
-                }
-            }
-        }
-        return result;
-    }
-
-    // calculates an old to new allele index map array.
-    private <B extends Allele> int[] oldToNewAlleleIndexMap(final Map<B, List<A>> newToOldAlleleMap, final B[] newAlleles,
-                                                            final int oldAlleleCount, final int newAlleleCount) {
-
-        final int[] oldToNewAlleleIndexMap = new int[oldAlleleCount];
-        Arrays.fill(oldToNewAlleleIndexMap, -1);  // -1 indicate that there is no new allele that make reference to that old one.
-
-        for (int i = 0; i < newAlleleCount; i++) {
-            final B newAllele = newAlleles[i];
-            if (newAllele == null)
-                throw new IllegalArgumentException("input alleles cannot be null");
-            final List<A> oldAlleles = newToOldAlleleMap.get(newAllele);
-            if (oldAlleles == null)
-                throw new IllegalArgumentException("no new allele list can be null");
-            for (final A oldAllele : oldAlleles) {
-                if (oldAllele == null)
-                    throw new IllegalArgumentException("old alleles cannot be null");
-                final int oldAlleleIndex = alleleIndex(oldAllele);
-                if (oldAlleleIndex == -1)
-                    throw new IllegalArgumentException("missing old allele " + oldAllele + " in likelihood collection ");
-                if (oldToNewAlleleIndexMap[oldAlleleIndex] != -1)
-                    throw new IllegalArgumentException("collision: two new alleles make reference to the same old allele");
-                oldToNewAlleleIndexMap[oldAlleleIndex] = i;
-            }
-        }
-        return oldToNewAlleleIndexMap;
-    }
-
-    /**
-     * Remove those reads that do not overlap certain genomic location.
-     *
-     * <p>
-     *     This method modifies the current read-likelihoods collection.
-     * </p>
-     *
-     * @param location the target location.
-     *
-     * @throws IllegalArgumentException the location cannot be {@code null} nor unmapped.
-     */
-    @SuppressWarnings("unused")
-    public void filterToOnlyOverlappingUnclippedReads(final GenomeLoc location) {
-        if (location == null)
-            throw new IllegalArgumentException("the location cannot be null");
-        if (location.isUnmapped())
-            throw new IllegalArgumentException("the location cannot be unmapped");
-
-        final int sampleCount = samples.sampleCount();
-
-        final int locContig = location.getContigIndex();
-        final int locStart = location.getStart();
-        final int locEnd = location.getStop();
-
-        final int alleleCount = alleles.alleleCount();
-        final IntArrayList removeIndices = new IntArrayList(10);
-        for (int s = 0; s < sampleCount; s++) {
-            int readRemoveCount = 0;
-            final GATKSAMRecord[] sampleReads = readsBySampleIndex[s];
-            final int sampleReadCount = sampleReads.length;
-            for (int r = 0; r < sampleReadCount; r++)
-                if (!unclippedReadOverlapsRegion(sampleReads[r], locContig, locStart, locEnd))
-                    removeIndices.add(r);
-            removeSampleReads(s,removeIndices,alleleCount);
-            removeIndices.clear();
-        }
-    }
-
-    // Compare the read coordinates to the location of interest.
-    private boolean readOverlapsLocation(final String contig, final int locStart,
-                                         final int locEnd, final GATKSAMRecord read) {
-        final boolean overlaps;
-
-        if (read.getReadUnmappedFlag())
-            overlaps = false;
-        else if (!read.getReferenceName().equals(contig))
-            overlaps = false;
-        else {
-            int alnStart = read.getAlignmentStart();
-            int alnStop = read.getAlignmentEnd();
-            if (alnStart > alnStop) { // Paranoia? based on GLP.createGenomeLoc(Read) this can happen?.
-                final int end = alnStart;
-                alnStart = alnStop;
-                alnStop = end;
-            }
-            overlaps = !(alnStop < locStart || alnStart > locEnd);
-        }
-        return overlaps;
-    }
-
-    /**
-     * Removes those read that the best possible likelihood given any allele is just too low.
-     *
-     * <p>
-     *     This is determined by a maximum error per read-base against the best likelihood possible.
-     * </p>
-     *
-     * @param maximumErrorPerBase the minimum acceptable error rate per read base, must be
-     *                            a positive number.
-     *
-     * @throws IllegalStateException is not supported for read-likelihood that do not contain alleles.
-     *
-     * @throws IllegalArgumentException if {@code maximumErrorPerBase} is negative.
-     */
-    public void filterPoorlyModeledReads(final double maximumErrorPerBase) {
-        if (alleles.alleleCount() == 0)
-            throw new IllegalStateException("unsupported for read-likelihood collections with no alleles");
-        if (Double.isNaN(maximumErrorPerBase) || maximumErrorPerBase <= 0.0)
-            throw new IllegalArgumentException("the maximum error per base must be a positive number");
-        final int sampleCount = samples.sampleCount();
-
-        final int alleleCount = alleles.alleleCount();
-        final IntArrayList removeIndices = new IntArrayList(10);
-        for (int s = 0; s < sampleCount; s++) {
-            final GATKSAMRecord[] sampleReads = readsBySampleIndex[s];
-            final int sampleReadCount = sampleReads.length;
-            for (int r = 0; r < sampleReadCount; r++) {
-                final GATKSAMRecord read = sampleReads[r];
-                if (readIsPoorlyModelled(s,r,read, maximumErrorPerBase))
-                    removeIndices.add(r);
-            }
-            removeSampleReads(s, removeIndices, alleleCount);
-            removeIndices.clear();
-        }
-    }
-
-    // Check whether the read is poorly modelled.
-    protected boolean readIsPoorlyModelled(final int sampleIndex, final int readIndex, final GATKSAMRecord read, final double maxErrorRatePerBase) {
-        final double maxErrorsForRead = Math.min(2.0, Math.ceil(read.getReadLength() * maxErrorRatePerBase));
-        final double log10QualPerBase = -4.0;
-        final double log10MaxLikelihoodForTrueAllele = maxErrorsForRead * log10QualPerBase;
-
-        final int alleleCount = alleles.alleleCount();
-        final double[][] sampleValues = valuesBySampleIndex[sampleIndex];
-        for (int a = 0; a < alleleCount; a++)
-            if (sampleValues[a][readIndex] >= log10MaxLikelihoodForTrueAllele)
-                return false;
-        return true;
-    }
-
-
-    /**
-     * Add more reads to the collection.
-     *
-     * @param readsBySample reads to add.
-     * @param initialLikelihood the likelihood for the new entries.
-     *
-     * @throws IllegalArgumentException if {@code readsBySample} is {@code null} or {@code readsBySample} contains
-     *  {@code null} reads, or {@code readsBySample} contains read that are already present in the read-likelihood
-     *  collection.
-     */
-    public void addReads(final Map<String,List<GATKSAMRecord>> readsBySample, final double initialLikelihood) {
-
-        for (final Map.Entry<String,List<GATKSAMRecord>> entry : readsBySample.entrySet()) {
-
-            final String sample = entry.getKey();
-            final List<GATKSAMRecord> newSampleReads = entry.getValue();
-            final int sampleIndex = samples.sampleIndex(sample);
-
-            if (sampleIndex == -1)
-                throw new IllegalArgumentException("input sample " + sample +
-                        " is not part of the read-likelihoods collection");
-
-            if (newSampleReads == null || newSampleReads.size() == 0)
-                continue;
-
-            final int sampleReadCount = readsBySampleIndex[sampleIndex].length;
-            final int newSampleReadCount = sampleReadCount + newSampleReads.size();
-
-            appendReads(newSampleReads, sampleIndex, sampleReadCount, newSampleReadCount);
-            extendsLikelihoodArrays(initialLikelihood, sampleIndex, sampleReadCount, newSampleReadCount);
-        }
-    }
-
-    // Extends the likelihood arrays-matrices.
-    private void extendsLikelihoodArrays(double initialLikelihood, int sampleIndex, int sampleReadCount, int newSampleReadCount) {
-        final double[][] sampleValues = valuesBySampleIndex[sampleIndex];
-        final int alleleCount = alleles.alleleCount();
-        for (int a = 0; a < alleleCount; a++)
-            sampleValues[a] = Arrays.copyOf(sampleValues[a], newSampleReadCount);
-        if (initialLikelihood != 0.0) // the default array new value.
-            for (int a = 0; a < alleleCount; a++)
-                Arrays.fill(sampleValues[a],sampleReadCount,newSampleReadCount,initialLikelihood);
-    }
-
-    // Append the new read reference into the structure per-sample.
-    private void appendReads(final List<GATKSAMRecord> newSampleReads, final int sampleIndex,
-                             final int sampleReadCount, final int newSampleReadCount) {
-        final GATKSAMRecord[] sampleReads = readsBySampleIndex[sampleIndex] =
-                Arrays.copyOf(readsBySampleIndex[sampleIndex], newSampleReadCount);
-
-        int nextReadIndex = sampleReadCount;
-        final Object2IntMap<GATKSAMRecord> sampleReadIndex = readIndexBySampleIndex[sampleIndex];
-        for (final GATKSAMRecord newRead : newSampleReads) {
-        //    if (sampleReadIndex.containsKey(newRead)) // might be worth handle this without exception (ignore the read?) but in practice should never be the case.
-        //        throw new IllegalArgumentException("you cannot add reads that are already in read-likelihood collection");
-            if (sampleReadIndex != null ) sampleReadIndex.put(newRead,nextReadIndex);
-            sampleReads[nextReadIndex++] = newRead;
-        }
-    }
-
-    /**
-     * Adds the non-reference allele to the read-likelihood collection setting each read likelihood to the second
-     * best found (or best one if only one allele has likelihood).
-     *
-     * <p>Nothing will happen if the read-likelihoods collection already includes the non-ref allele</p>
-     *
-     * <p>
-     *     <i>Implementation note: even when strictly speaking we do not need to demand the calling code to pass
-     *     the reference the non-ref allele, we still demand it in order to lead the
-     *     the calling code to use the right generic type for this likelihoods
-     *     collection {@link Allele}.</i>
-     * </p>
-     *
-     * @param nonRefAllele the non-ref allele.
-     *
-     * @throws IllegalArgumentException if {@code nonRefAllele} is anything but the designated <NON_REF>
-     * symbolic allele {@link GATKVariantContextUtils#NON_REF_SYMBOLIC_ALLELE}.
-     */
-    public void addNonReferenceAllele(final A nonRefAllele) {
-
-        if (nonRefAllele == null)
-            throw new IllegalArgumentException("non-ref allele cannot be null");
-        if (!nonRefAllele.equals(GATKVariantContextUtils.NON_REF_SYMBOLIC_ALLELE))
-            throw new IllegalArgumentException("the non-ref allele is not valid");
-        // Already present?
-        if (alleles.alleleIndex(nonRefAllele) != -1)
-            return;
-
-        final int oldAlleleCount = alleles.alleleCount();
-        final int newAlleleCount = oldAlleleCount + 1;
-        @SuppressWarnings("unchecked")
-        final A[] newAlleles = (A[]) new Allele[newAlleleCount];
-        for (int a = 0; a < oldAlleleCount; a++)
-            newAlleles[a] = alleles.alleleAt(a);
-        newAlleles[oldAlleleCount] = nonRefAllele;
-        alleles = new IndexedAlleleList<>(newAlleles);
-        alleleList = null; // remove the cached alleleList.
-
-        final int sampleCount = samples.sampleCount();
-        for (int s = 0; s < sampleCount; s++)
-            addNonReferenceAlleleLikelihoodsPerSample(oldAlleleCount, newAlleleCount, s);
-    }
-
-    // Updates per-sample structures according to the addition of the NON_REF allele.
-    private void addNonReferenceAlleleLikelihoodsPerSample(final int alleleCount, final int newAlleleCount, final int sampleIndex) {
-        final double[][] sampleValues = valuesBySampleIndex[sampleIndex] = Arrays.copyOf(valuesBySampleIndex[sampleIndex], newAlleleCount);
-        final int sampleReadCount = readsBySampleIndex[sampleIndex].length;
-
-        final double[] nonRefAlleleLikelihoods = sampleValues[alleleCount] = new double [sampleReadCount];
-        Arrays.fill(nonRefAlleleLikelihoods,Double.NEGATIVE_INFINITY);
-        for (int r = 0; r < sampleReadCount; r++) {
-            final BestAllele bestAllele = searchBestAllele(sampleIndex,r,true);
-            final double secondBestLikelihood = Double.isInfinite(bestAllele.confidence) ? bestAllele.likelihood
-                    : bestAllele.likelihood - bestAllele.confidence;
-            nonRefAlleleLikelihoods[r] = secondBestLikelihood;
-        }
-    }
-
-    /**
-     * Downsamples reads based on contamination fractions making sure that all alleles are affected proportionally.
-     *
-     * @param perSampleDownsamplingFraction contamination sample map where the sample name are the keys and the
-     *                                       fractions are the values.
-     *
-     * @throws IllegalArgumentException if {@code perSampleDownsamplingFraction} is {@code null}.
-     */
-    public void contaminationDownsampling(final Map<String, Double> perSampleDownsamplingFraction) {
-
-        final int sampleCount = samples.sampleCount();
-        final IntArrayList readsToRemove = new IntArrayList(10); // blind estimate, can be improved?
-        final int alleleCount = alleles.alleleCount();
-        for (int s = 0; s < sampleCount; s++) {
-            final String sample = samples.sampleAt(s);
-            final Double fractionDouble = perSampleDownsamplingFraction.get(sample);
-            if (fractionDouble == null)
-                continue;
-            final double fraction = fractionDouble;
-            if (Double.isNaN(fraction) || fraction <= 0.0)
-                continue;
-            if (fraction >= 1.0) {
-                final int sampleReadCount = readsBySampleIndex[s].length;
-                readsToRemove.ensureCapacity(sampleReadCount);
-                for (int r = 0; r < sampleReadCount; r++)
-                    readsToRemove.add(r);
-                removeSampleReads(s,readsToRemove,alleleCount);
-                readsToRemove.clear();
-            }
-            else {
-                final Map<A,List<GATKSAMRecord>> readsByBestAllelesMap = readsByBestAlleleMap(s);
-                removeSampleReads(s,AlleleBiasedDownsamplingUtils.selectAlleleBiasedReads(readsByBestAllelesMap, fraction),alleleCount);
-            }
-        }
-    }
-
-    /**
-     * Given a collection of likelihood in the old map format, it creates the corresponding read-likelihoods collection.
-     *
-     * @param alleleList the target list of alleles.
-     * @param map the likelihoods to transform.
-     *
-     *
-     * @throws IllegalArgumentException if {@code map} is {@code null}, or {@code map} does not contain likelihoods for all read vs allele combinations.
-     *
-     * @return never {@code null}.
-     */
-    public static ReadLikelihoods<Allele> fromPerAlleleReadLikelihoodsMap(final AlleleList<Allele> alleleList, final Map<String,PerReadAlleleLikelihoodMap> map) {
-
-        //TODO add test code for this method.
-        // First we need to create the read-likelihood collection with all required alleles, samples and reads.
-        final SampleList sampleList = new IndexedSampleList(map.keySet());
-        final int alleleCount = alleleList.alleleCount();
-        final Map<String,List<GATKSAMRecord>> sampleToReads = new HashMap<>(sampleList.sampleCount());
-        for (final Map.Entry<String,PerReadAlleleLikelihoodMap> entry : map.entrySet()) {
-            final String sample = entry.getKey();
-            final PerReadAlleleLikelihoodMap sampleLikelihoods = entry.getValue();
-            sampleToReads.put(sample,new ArrayList<>(sampleLikelihoods.getLikelihoodReadMap().keySet()));
-        }
-
-        final ReadLikelihoods<Allele> result = new ReadLikelihoods<>(sampleList,alleleList,sampleToReads);
-
-        // Now set the likelihoods.
-        for (final Map.Entry<String,PerReadAlleleLikelihoodMap> sampleEntry : map.entrySet()) {
-            final ReadLikelihoods.Matrix<Allele> sampleMatrix = result.sampleMatrix(result.sampleIndex(sampleEntry.getKey()));
-            for (final Map.Entry<GATKSAMRecord,Map<Allele,Double>> readEntry : sampleEntry.getValue().getLikelihoodReadMap().entrySet()) {
-                final GATKSAMRecord read = readEntry.getKey();
-                final int readIndex = sampleMatrix.readIndex(read);
-                final Map<Allele,Double> alleleToLikelihoodMap = readEntry.getValue();
-                for (int a = 0; a < alleleCount; a++) {
-                    final Allele allele = alleleList.alleleAt(a);
-                    final Double likelihood = alleleToLikelihoodMap.get(allele);
-                    if (likelihood == null)
-                        throw new IllegalArgumentException("there is no likelihood for allele " + allele + " and read " + read);
-                    sampleMatrix.set(a,readIndex,likelihood);
-                }
-            }
-        }
-        return result;
-    }
-
-    /**
-     * Returns the collection of best allele estimates for the reads based on the read-likelihoods.
-     *
-     * @throws IllegalStateException if there is no alleles.
-     *
-     * @return never {@code null}, one element per read in the read-likelihoods collection.
-     */
-    public Collection<BestAllele> bestAlleles() {
-        final List<BestAllele> result = new ArrayList<>(100); // blind estimate.
-        final int sampleCount = samples.sampleCount();
-        for (int s = 0; s < sampleCount; s++) {
-            final GATKSAMRecord[] sampleReads = readsBySampleIndex[s];
-            final int readCount = sampleReads.length;
-            for (int r = 0; r < readCount; r++)
-                result.add(searchBestAllele(s,r,true));
-        }
-        return result;
-    }
-
-    /**
-     * Returns reads stratified by their best allele.
-     * @param sampleIndex the target sample.
-     * @return never {@code null}, perhaps empty.
-     */
-    public Map<A,List<GATKSAMRecord>> readsByBestAlleleMap(final int sampleIndex) {
-        checkSampleIndex(sampleIndex);
-        final int alleleCount = alleles.alleleCount();
-        final int sampleReadCount = readsBySampleIndex[sampleIndex].length;
-        final Map<A,List<GATKSAMRecord>> result = new HashMap<>(alleleCount);
-        for (int a = 0; a < alleleCount; a++)
-            result.put(alleles.alleleAt(a),new ArrayList<GATKSAMRecord>(sampleReadCount));
-        readsByBestAlleleMap(sampleIndex,result);
-        return result;
-    }
-
-    /**
-     * Returns reads stratified by their best allele.
-     * @return never {@code null}, perhaps empty.
-     */
-    @SuppressWarnings("unused")
-    public Map<A,List<GATKSAMRecord>> readsByBestAlleleMap() {
-        final int alleleCount = alleles.alleleCount();
-        final Map<A,List<GATKSAMRecord>> result = new HashMap<>(alleleCount);
-        final int totalReadCount = readCount();
-        for (int a = 0; a < alleleCount; a++)
-            result.put(alleles.alleleAt(a),new ArrayList<GATKSAMRecord>(totalReadCount));
-        final int sampleCount = samples.sampleCount();
-        for (int s = 0; s < sampleCount; s++)
-            readsByBestAlleleMap(s,result);
-        return result;
-    }
-
-    private void readsByBestAlleleMap(final int sampleIndex, final Map<A,List<GATKSAMRecord>> result) {
-        final GATKSAMRecord[] reads = readsBySampleIndex[sampleIndex];
-        final int readCount = reads.length;
-
-        for (int r = 0; r < readCount; r++) {
-            final BestAllele bestAllele = searchBestAllele(sampleIndex,r,true);
-            if (!bestAllele.isInformative())
-                continue;
-            result.get(bestAllele.allele).add(bestAllele.read);
-        }
-    }
-
-    /**
-     * Returns the index of a read within a sample read-likelihood sub collection.
-     * @param sampleIndex the sample index.
-     * @param read the query read.
-     * @return -1 if there is no such read in that sample, 0 or greater otherwise.
-     */
-    @SuppressWarnings("unused")
-    public int readIndex(final int sampleIndex, final GATKSAMRecord read) {
-        final Object2IntMap<GATKSAMRecord> readIndex = readIndexBySampleIndex(sampleIndex);
-        if (readIndex.containsKey(read))
-            return readIndexBySampleIndex(sampleIndex).getInt(read);
-        else
-            return -1;
-    }
-
-    /**
-     * Returns the total number of reads in the read-likelihood collection.
-     *
-     * @return never {@code null}
-     */
-    public int readCount() {
-        int sum = 0;
-        final int sampleCount = samples.sampleCount();
-        for (int i = 0; i < sampleCount; i++)
-            sum += readsBySampleIndex[i].length;
-        return sum;
-    }
-
-    /**
-     * Returns the number of reads that belong to a sample in the read-likelihood collection.
-     * @param sampleIndex the query sample index.
-     *
-     * @throws IllegalArgumentException if {@code sampleIndex} is not a valid sample index.
-     * @return 0 or greater.
-     */
-    public int sampleReadCount(int sampleIndex) {
-        checkSampleIndex(sampleIndex);
-        return readsBySampleIndex[sampleIndex].length;
-    }
-
-    /**
-     * Contains information about the best allele for a read search result.
-     */
-    public class BestAllele {
-        public static final double INFORMATIVE_THRESHOLD = 0.2;
-
-        /**
-         * Null if there is no possible match (no allele?).
-         */
-        public final A allele;
-
-        /**
-         * The containing sample.
-         */
-        public final String sample;
-
-        /**
-         * The query read.
-         */
-        public final GATKSAMRecord read;
-
-        /**
-         * If allele != null, the indicates the likelihood of the read.
-         */
-        public final double likelihood;
-
-        /**
-         * Confidence that the read actually was generated under that likelihood.
-         * This is equal to the difference between this and the second best allele match.
-         */
-        public final double confidence;
-
-        private BestAllele(final int sampleIndex, final int readIndex, final int bestAlleleIndex,
-                           final double likelihood, final double secondBestLikelihood) {
-            allele = bestAlleleIndex == -1 ? null : alleles.alleleAt(bestAlleleIndex);
-            this.likelihood = likelihood;
-            sample = samples.sampleAt(sampleIndex);
-            read = readsBySampleIndex[sampleIndex][readIndex];
-            confidence = likelihood == secondBestLikelihood ? 0 : likelihood - secondBestLikelihood;
-        }
-
-        public boolean isInformative() {
-            return confidence > INFORMATIVE_THRESHOLD;
-        }
-    }
-
-    private void removeSampleReads(final int sampleIndex, final IntArrayList indexToRemove, final int alleleCount) {
-        final int removeCount = indexToRemove.size();
-        if (removeCount == 0)
-            return;
-
-        final GATKSAMRecord[] sampleReads = readsBySampleIndex[sampleIndex];
-        final int sampleReadCount = sampleReads.length;
-
-        final Object2IntMap<GATKSAMRecord> indexByRead = readIndexBySampleIndex[sampleIndex];
-        if (indexByRead != null)
-            for (int i = 0; i < removeCount; i++)
-                indexByRead.remove(sampleReads[indexToRemove.getInt(i)]);
-        final boolean[] removeIndex = new boolean[sampleReadCount];
-        int firstDeleted = indexToRemove.get(0);
-        for (int i = 0; i < removeCount; i++)
-            removeIndex[indexToRemove.get(i)] = true;
-
-        final int newSampleReadCount = sampleReadCount - removeCount;
-
-        // Now we skim out the removed reads from the read array.
-        final GATKSAMRecord[] oldSampleReads = readsBySampleIndex[sampleIndex];
-        final GATKSAMRecord[] newSampleReads = new GATKSAMRecord[newSampleReadCount];
-
-        System.arraycopy(oldSampleReads,0,newSampleReads,0,firstDeleted);
-        Utils.skimArray(oldSampleReads,firstDeleted, newSampleReads, firstDeleted, removeIndex, firstDeleted);
-
-        // Then we skim out the likelihoods of the removed reads.
-        final double[][] oldSampleValues = valuesBySampleIndex[sampleIndex];
-        final double[][] newSampleValues = new double[alleleCount][newSampleReadCount];
-        for (int a = 0; a < alleleCount; a++) {
-            System.arraycopy(oldSampleValues[a],0,newSampleValues[a],0,firstDeleted);
-            Utils.skimArray(oldSampleValues[a], firstDeleted, newSampleValues[a], firstDeleted, removeIndex, firstDeleted);
-        }
-        valuesBySampleIndex[sampleIndex] = newSampleValues;
-        readsBySampleIndex[sampleIndex] = newSampleReads;
-        readListBySampleIndex[sampleIndex] = null; // reset the unmodifiable list.
-    }
-
-
-    // Requires that the collection passed iterator can remove elements, and it can be modified.
-    private void removeSampleReads(final int sampleIndex, final Collection<GATKSAMRecord> readsToRemove, final int alleleCount) {
-        final GATKSAMRecord[] sampleReads = readsBySampleIndex[sampleIndex];
-        final int sampleReadCount = sampleReads.length;
-
-        final Object2IntMap<GATKSAMRecord> indexByRead = readIndexBySampleIndex(sampleIndex);
-        // Count how many we are going to remove, which ones (indexes) and remove entry from the read-index map.
-        final boolean[] removeIndex = new boolean[sampleReadCount];
-        int removeCount = 0; // captures the number of deletions.
-        int firstDeleted = sampleReadCount;    // captures the first position that was deleted.
-
-        final Iterator<GATKSAMRecord> readsToRemoveIterator = readsToRemove.iterator();
-        while (readsToRemoveIterator.hasNext()) {
-            final GATKSAMRecord read = readsToRemoveIterator.next();
-            if (indexByRead.containsKey(read)) {
-                final int index = indexByRead.getInt(read);
-                if (firstDeleted > index)
-                    firstDeleted = index;
-                removeCount++;
-                removeIndex[index] = true;
-                readsToRemoveIterator.remove();
-                indexByRead.remove(read);
-            }
-        }
-
-        // Nothing to remove we just finish here.
-        if (removeCount == 0)
-            return;
-
-        final int newSampleReadCount = sampleReadCount - removeCount;
-
-        // Now we skim out the removed reads from the read array.
-        final GATKSAMRecord[] oldSampleReads = readsBySampleIndex[sampleIndex];
-        final GATKSAMRecord[] newSampleReads = new GATKSAMRecord[newSampleReadCount];
-
-        System.arraycopy(oldSampleReads,0,newSampleReads,0,firstDeleted);
-        Utils.skimArray(oldSampleReads,firstDeleted, newSampleReads, firstDeleted, removeIndex, firstDeleted);
-
-        // Update the indices for the extant reads from the first deletion onwards.
-        for (int r = firstDeleted; r < newSampleReadCount; r++) {
-            indexByRead.put(newSampleReads[r], r);
-        }
-
-        // Then we skim out the likelihoods of the removed reads.
-        final double[][] oldSampleValues = valuesBySampleIndex[sampleIndex];
-        final double[][] newSampleValues = new double[alleleCount][newSampleReadCount];
-        for (int a = 0; a < alleleCount; a++) {
-            System.arraycopy(oldSampleValues[a],0,newSampleValues[a],0,firstDeleted);
-            Utils.skimArray(oldSampleValues[a], firstDeleted, newSampleValues[a], firstDeleted, removeIndex, firstDeleted);
-        }
-        valuesBySampleIndex[sampleIndex] = newSampleValues;
-        readsBySampleIndex[sampleIndex] = newSampleReads;
-        readListBySampleIndex[sampleIndex] = null; // reset the unmodifiable list.
-    }
-
-    private Object2IntMap<GATKSAMRecord> readIndexBySampleIndex(final int sampleIndex) {
-        if (readIndexBySampleIndex[sampleIndex] == null) {
-            final GATKSAMRecord[] sampleReads = readsBySampleIndex[sampleIndex];
-            final int sampleReadCount = sampleReads.length;
-            readIndexBySampleIndex[sampleIndex] = new Object2IntOpenHashMap<>(sampleReadCount);
-            for (int r = 0; r < sampleReadCount; r++)
-                readIndexBySampleIndex[sampleIndex].put(sampleReads[r],r);
-        }
-        return readIndexBySampleIndex[sampleIndex];
-    }
-
-    /**
-     * Transform into a multi-sample HashMap backed {@link PerReadAlleleLikelihoodMap} type.
-     * @return never {@code null}.
-     *
-     * @deprecated
-     *
-     * This method should eventually disappear once we have removed PerReadAlleleLikelihoodMap class completelly.
-     */
-    @Deprecated
-    @SuppressWarnings("all")
-    public Map<String, PerReadAlleleLikelihoodMap> toPerReadAlleleLikelihoodMap() {
-        final int sampleCount = samples.sampleCount();
-        final Map<String, PerReadAlleleLikelihoodMap> result = new HashMap<>(sampleCount);
-        for (int s = 0; s < sampleCount; s++)
-            result.put(samples.sampleAt(s),toPerReadAlleleLikelihoodMap(s));
-        return result;
-    }
-
-    /**
-     * Transform into a single-sample HashMap backed {@link PerReadAlleleLikelihoodMap} type.
-     *
-     * @return never {@code null}.
-     */
-    @Deprecated
-    public PerReadAlleleLikelihoodMap toPerReadAlleleLikelihoodMap(final int sampleIndex) {
-        checkSampleIndex(sampleIndex);
-        final PerReadAlleleLikelihoodMap result = new PerReadAlleleLikelihoodMap();
-        final int alleleCount = alleles.alleleCount();
-        final GATKSAMRecord[] sampleReads = readsBySampleIndex[sampleIndex];
-        final int sampleReadCount = sampleReads.length;
-        for (int a = 0; a < alleleCount; a++) {
-            final A allele = alleles.alleleAt(a);
-            final double[] readLikelihoods = valuesBySampleIndex[sampleIndex][a];
-            for (int r = 0; r < sampleReadCount; r++)
-                result.add(sampleReads[r], allele, readLikelihoods[r]);
-        }
-        return result;
-    }
-
-    /**
-     * Implements a likelihood matrix per sample given its index.
-     */
-    private class SampleMatrix implements Matrix<A> {
-
-        private final int sampleIndex;
-
-        private SampleMatrix(final int sampleIndex) {
-            this.sampleIndex = sampleIndex;
-        }
-
-        @Override
-        public List<GATKSAMRecord> reads() {
-            return sampleReads(sampleIndex);
-        }
-
-        @Override
-        public List<A> alleles() {
-            return ReadLikelihoods.this.alleles();
-        }
-
-        @Override
-        public void set(final int alleleIndex, final int readIndex, final double value) {
-            valuesBySampleIndex[sampleIndex][alleleIndex][readIndex] = value;
-        }
-
-        @Override
-        public double get(final int alleleIndex, final int readIndex) {
-            return valuesBySampleIndex[sampleIndex][alleleIndex][readIndex];
-        }
-
-        @Override
-        public int alleleIndex(final A allele) {
-            return ReadLikelihoods.this.alleleIndex(allele);
-        }
-
-        @Override
-        public int readIndex(final GATKSAMRecord read) {
-            return ReadLikelihoods.this.readIndex(sampleIndex, read);
-        }
-
-        @Override
-        public int alleleCount() {
-            return alleles.alleleCount();
-        }
-
-        @Override
-        public int readCount() {
-            return readsBySampleIndex[sampleIndex].length;
-        }
-
-        @Override
-        public A alleleAt(int alleleIndex) {
-            return ReadLikelihoods.this.alleleAt(alleleIndex);
-        }
-
-        @Override
-        public GATKSAMRecord readAt(final int readIndex) {
-            if (readIndex < 0)
-                throw new IllegalArgumentException("the read-index cannot be negative");
-            final GATKSAMRecord[] sampleReads = readsBySampleIndex[sampleIndex];
-            if (readIndex >= sampleReads.length)
-                throw new IllegalArgumentException("the read-index is beyond the read count of the sample");
-            return sampleReads[readIndex];
-        }
-
-        @Override
-        public void copyAlleleLikelihoods(final int alleleIndex, final double[] dest, final int offset) {
-            System.arraycopy(valuesBySampleIndex[sampleIndex][alleleIndex],0,dest,offset,readCount());
-        }
-    }
-
-    /**
-     * Checks whether the provide sample index is valid.
-     * <p>
-     *     If not, it throws an exception.
-     * </p>
-     * @param sampleIndex the target sample index.
-     *
-     * @throws IllegalArgumentException if {@code sampleIndex} is invalid, i.e. outside the range [0,{@link #sampleCount}).
-     */
-    private void checkSampleIndex(final int sampleIndex) {
-        if (sampleIndex < 0 || sampleIndex >= samples.sampleCount())
-            throw new IllegalArgumentException("invalid sample index: " + sampleIndex);
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/haplotype/EventMap.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/haplotype/EventMap.java
deleted file mode 100644
index e5eee12..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/haplotype/EventMap.java
+++ /dev/null
@@ -1,423 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.haplotype;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import htsjdk.samtools.Cigar;
-import htsjdk.samtools.CigarElement;
-import org.apache.commons.lang.ArrayUtils;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.utils.BaseUtils;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.sam.AlignmentUtils;
-import htsjdk.variant.variantcontext.Allele;
-import htsjdk.variant.variantcontext.VariantContext;
-import htsjdk.variant.variantcontext.VariantContextBuilder;
-
-import java.util.*;
-
-/**
- * Extract simple VariantContext events from a single haplotype
- *
- * User: depristo
- * Date: 3/27/13
- * Time: 8:35 AM
- */
-public class EventMap extends TreeMap<Integer, VariantContext> {
-    private final static Logger logger = Logger.getLogger(EventMap.class);
-    protected final static int MIN_NUMBER_OF_EVENTS_TO_COMBINE_INTO_BLOCK_SUBSTITUTION = 3;
-    private static final int MAX_EVENTS_PER_HAPLOTYPE = 3;
-    private static final int MAX_INDELS_PER_HAPLOTYPE = 2;
-    public final static Allele SYMBOLIC_UNASSEMBLED_EVENT_ALLELE = Allele.create("<UNASSEMBLED_EVENT>", false);
-
-    private final Haplotype haplotype;
-    private final byte[] ref;
-    private final GenomeLoc refLoc;
-    private final String sourceNameToAdd;
-
-    public EventMap(final Haplotype haplotype, final byte[] ref, final GenomeLoc refLoc, final String sourceNameToAdd) {
-        super();
-        this.haplotype = haplotype;
-        this.ref = ref;
-        this.refLoc = refLoc;
-        this.sourceNameToAdd = sourceNameToAdd;
-
-        processCigarForInitialEvents();
-    }
-
-    /**
-     * For testing.  Let's you set up a explicit configuration without having to process a haplotype and reference
-     * @param stateForTesting
-     */
-    public EventMap(final Collection<VariantContext> stateForTesting) {
-        haplotype = null;
-        ref = null;
-        refLoc = null;
-        sourceNameToAdd = null;
-        for ( final VariantContext vc : stateForTesting )
-            addVC(vc);
-    }
-
-    protected void processCigarForInitialEvents() {
-        final Cigar cigar = haplotype.getCigar();
-        final byte[] alignment = haplotype.getBases();
-
-        int refPos = haplotype.getAlignmentStartHapwrtRef();
-        if( refPos < 0 ) {
-            return;
-        } // Protection against SW failures
-
-        final List<VariantContext> proposedEvents = new ArrayList<>();
-
-        int alignmentPos = 0;
-
-        for( int cigarIndex = 0; cigarIndex < cigar.numCigarElements(); cigarIndex++ ) {
-            final CigarElement ce = cigar.getCigarElement(cigarIndex);
-            final int elementLength = ce.getLength();
-            switch( ce.getOperator() ) {
-                case I:
-                {
-                    if( refPos > 0 ) { // protect against trying to create insertions/deletions at the beginning of a contig
-                        final List<Allele> insertionAlleles = new ArrayList<Allele>();
-                        final int insertionStart = refLoc.getStart() + refPos - 1;
-                        final byte refByte = ref[refPos-1];
-                        if( BaseUtils.isRegularBase(refByte) ) {
-                            insertionAlleles.add( Allele.create(refByte, true) );
-                        }
-                        if( cigarIndex == 0 || cigarIndex == cigar.getCigarElements().size() - 1 ) {
-                            // if the insertion isn't completely resolved in the haplotype, skip it
-                            // note this used to emit SYMBOLIC_UNASSEMBLED_EVENT_ALLELE but that seems dangerous
-                        } else {
-                            byte[] insertionBases = new byte[]{};
-                            insertionBases = ArrayUtils.add(insertionBases, ref[refPos - 1]); // add the padding base
-                            insertionBases = ArrayUtils.addAll(insertionBases, Arrays.copyOfRange(alignment, alignmentPos, alignmentPos + elementLength));
-                            if( BaseUtils.isAllRegularBases(insertionBases) ) {
-                                insertionAlleles.add( Allele.create(insertionBases, false) );
-                            }
-                        }
-                        if( insertionAlleles.size() == 2 ) { // found a proper ref and alt allele
-                            proposedEvents.add(new VariantContextBuilder(sourceNameToAdd, refLoc.getContig(), insertionStart, insertionStart, insertionAlleles).make());
-                        }
-                    }
-                    alignmentPos += elementLength;
-                    break;
-                }
-                case S:
-                {
-                    alignmentPos += elementLength;
-                    break;
-                }
-                case D:
-                {
-                    if( refPos > 0 ) { // protect against trying to create insertions/deletions at the beginning of a contig
-                        final byte[] deletionBases = Arrays.copyOfRange( ref, refPos - 1, refPos + elementLength );  // add padding base
-                        final List<Allele> deletionAlleles = new ArrayList<Allele>();
-                        final int deletionStart = refLoc.getStart() + refPos - 1;
-                        final byte refByte = ref[refPos-1];
-                        if( BaseUtils.isRegularBase(refByte) && BaseUtils.isAllRegularBases(deletionBases) ) {
-                            deletionAlleles.add( Allele.create(deletionBases, true) );
-                            deletionAlleles.add( Allele.create(refByte, false) );
-                            proposedEvents.add(new VariantContextBuilder(sourceNameToAdd, refLoc.getContig(), deletionStart, deletionStart + elementLength, deletionAlleles).make());
-                        }
-                    }
-                    refPos += elementLength;
-                    break;
-                }
-                case M:
-                case EQ:
-                case X:
-                {
-                    for( int iii = 0; iii < elementLength; iii++ ) {
-                        final byte refByte = ref[refPos];
-                        final byte altByte = alignment[alignmentPos];
-                        if( refByte != altByte ) { // SNP!
-                            if( BaseUtils.isRegularBase(refByte) && BaseUtils.isRegularBase(altByte) ) {
-                                final List<Allele> snpAlleles = new ArrayList<Allele>();
-                                snpAlleles.add( Allele.create( refByte, true ) );
-                                snpAlleles.add( Allele.create( altByte, false ) );
-                                proposedEvents.add(new VariantContextBuilder(sourceNameToAdd, refLoc.getContig(), refLoc.getStart() + refPos, refLoc.getStart() + refPos, snpAlleles).make());
-                            }
-                        }
-                        refPos++;
-                        alignmentPos++;
-                    }
-                    break;
-                }
-                case N:
-                case H:
-                case P:
-                default:
-                    throw new ReviewedGATKException( "Unsupported cigar operator created during SW alignment: " + ce.getOperator() );
-            }
-        }
-
-        for ( final VariantContext proposedEvent : proposedEvents )
-            addVC(proposedEvent, true);
-    }
-
-    /**
-     * Add VariantContext vc to this map, merging events with the same start sites if necessary
-     * @param vc the variant context to add
-     */
-    protected void addVC(final VariantContext vc) {
-        addVC(vc, true);
-    }
-
-    /**
-     * Add VariantContext vc to this map
-     * @param vc the variant context to add
-     * @param merge should we attempt to merge it with an already existing element, or should we throw an error in that case?
-     */
-    protected void addVC(final VariantContext vc, final boolean merge) {
-        if ( vc == null ) throw new IllegalArgumentException("vc cannot be null");
-
-        if ( containsKey(vc.getStart()) ) {
-            if ( merge ) {
-                final VariantContext prev = get(vc.getStart());
-                put(vc.getStart(), makeBlock(prev, vc));
-            } else {
-                throw new IllegalStateException("Will not merge previously bound variant contexts as merge is false at " + vc);
-            }
-        } else
-            put(vc.getStart(), vc);
-    }
-
-    /**
-     * Create a block substitution out of two variant contexts that start at the same position
-     *
-     * vc1 can be SNP, and vc2 can then be either a insertion or deletion.
-     * If vc1 is an indel, then vc2 must be the opposite type (vc1 deletion => vc2 must be an insertion)
-     *
-     * @param vc1 the first variant context we want to merge
-     * @param vc2 the second
-     * @return a block substitution that represents the composite substitution implied by vc1 and vc2
-     */
-    protected VariantContext makeBlock(final VariantContext vc1, final VariantContext vc2) {
-        if ( vc1.getStart() != vc2.getStart() )  throw new IllegalArgumentException("vc1 and 2 must have the same start but got " + vc1 + " and " + vc2);
-        if ( ! vc1.isBiallelic() ) throw new IllegalArgumentException("vc1 must be biallelic");
-        if ( ! vc1.isSNP() ) {
-            if ( ! ((vc1.isSimpleDeletion() && vc2.isSimpleInsertion()) || (vc1.isSimpleInsertion() && vc2.isSimpleDeletion())))
-                throw new IllegalArgumentException("Can only merge single insertion with deletion (or vice versa) but got " + vc1 + " merging with " + vc2);
-        } else if ( vc2.isSNP() ) {
-            throw new IllegalArgumentException("vc1 is " + vc1 + " but vc2 is a SNP, which implies there's been some terrible bug in the cigar " + vc2);
-        }
-
-        final Allele ref, alt;
-        final VariantContextBuilder b = new VariantContextBuilder(vc1);
-        if ( vc1.isSNP() ) {
-            // we have to repair the first base, so SNP case is special cased
-            if ( vc1.getReference().equals(vc2.getReference()) ) {
-                // we've got an insertion, so we just update the alt to have the prev alt
-                ref = vc1.getReference();
-                alt = Allele.create(vc1.getAlternateAllele(0).getDisplayString() + vc2.getAlternateAllele(0).getDisplayString().substring(1), false);
-            } else {
-                // we're dealing with a deletion, so we patch the ref
-                ref = vc2.getReference();
-                alt = vc1.getAlternateAllele(0);
-                b.stop(vc2.getEnd());
-            }
-        } else {
-            final VariantContext insertion = vc1.isSimpleInsertion() ? vc1 : vc2;
-            final VariantContext deletion  = vc1.isSimpleInsertion() ? vc2 : vc1;
-            ref = deletion.getReference();
-            alt = insertion.getAlternateAllele(0);
-            b.stop(deletion.getEnd());
-        }
-
-        return b.alleles(Arrays.asList(ref, alt)).make();
-    }
-
-    // TODO -- warning this is an O(N^3) algorithm because I'm just lazy.  If it's valuable we need to reengineer it
-    @Requires("getNumberOfEvents() > 0")
-    protected void replaceClumpedEventsWithBlockSubstitutions() {
-        if ( getNumberOfEvents() >= MIN_NUMBER_OF_EVENTS_TO_COMBINE_INTO_BLOCK_SUBSTITUTION) {
-            int lastStart = -1;
-            for ( boolean foundOne = true; foundOne; ) {
-                foundOne = false;
-                for ( final VariantContext vc : getVariantContexts() ) {
-                    if ( vc.getStart() > lastStart ) {
-                        lastStart = vc.getStart();
-                        final List<VariantContext> neighborhood = getNeighborhood(vc, 10);
-                        if ( updateToBlockSubstitutionIfBetter(neighborhood) ) {
-                            foundOne = true;
-                            break;
-                        }
-                    }
-                }
-            }
-        }
-    }
-
-    protected boolean updateToBlockSubstitutionIfBetter(final List<VariantContext> neighbors) {
-        if (neighbors.size() < MIN_NUMBER_OF_EVENTS_TO_COMBINE_INTO_BLOCK_SUBSTITUTION)
-            return false;
-        // TODO -- need more tests to decide if this is really so good
-
-        final VariantContext first = neighbors.get(0);
-        final int refStartOffset = first.getStart() - refLoc.getStart();
-        final int refEndOffset = neighbors.get(neighbors.size() - 1).getEnd() - refLoc.getStart();
-
-        final byte[] refBases = Arrays.copyOfRange(ref, refStartOffset, refEndOffset + 1);
-        final byte[] hapBases = AlignmentUtils.getBasesCoveringRefInterval(refStartOffset, refEndOffset, haplotype.getBases(), haplotype.getAlignmentStartHapwrtRef(), haplotype.getCigar());
-
-        final VariantContextBuilder builder = new VariantContextBuilder(first);
-        builder.stop(first.getStart() + refBases.length - 1);
-        builder.alleles(Arrays.asList(Allele.create(refBases, true), Allele.create(hapBases)));
-        final VariantContext block = builder.make();
-
-        // remove all merged events
-        for ( final VariantContext merged : neighbors ) {
-            if ( remove(merged.getStart()) == null )
-                throw new IllegalArgumentException("Expected to remove variant context from the event map but remove said there wasn't any element there: " + merged);
-        }
-
-        // note must be after we remove the previous events as the treeset only allows one key per start
-        logger.info("Transforming into block substitution at " + block);
-        addVC(block, false);
-
-        return true;
-    }
-
-    /**
-     * Get all of the variant contexts starting at leftMost that are within maxBP of each other
-     *
-     * @param leftMost the left most (smallest position) variant context that will start the neighborhood
-     * @param maxBPBetweenEvents the maximum distance in BP between the end of one event the start of the next
-     *                           to be included the the resulting list
-     * @return a list that contains at least one element (leftMost)
-     */
-    @Requires({"leftMost != null", "maxBPBetweenEvents >= 0"})
-    @Ensures({"result != null", "! result.isEmpty()"})
-    protected List<VariantContext> getNeighborhood(final VariantContext leftMost, final int maxBPBetweenEvents) {
-        final List<VariantContext> neighbors = new LinkedList<VariantContext>();
-
-        VariantContext left = leftMost;
-        for ( final VariantContext vc : getVariantContexts() ) {
-            if ( vc.getStart() < leftMost.getStart() )
-                continue;
-
-            if ( vc.getStart() - left.getEnd() < maxBPBetweenEvents ) {
-                // this vc is within max distance to the end of the left event, so accumulate it
-                neighbors.add(vc);
-                left = vc;
-            }
-        }
-
-        return neighbors;
-    }
-
-    /**
-     * Get the starting positions of events in this event map
-     * @return
-     */
-    public Set<Integer> getStartPositions() {
-        return keySet();
-    }
-
-    /**
-     * Get the variant contexts in order of start position in this event map
-     * @return
-     */
-    public Collection<VariantContext> getVariantContexts() {
-        return values();
-    }
-
-    /**
-     * How many events do we have?
-     * @return
-     */
-    public int getNumberOfEvents() {
-        return size();
-    }
-
-    @Override
-    public String toString() {
-        final StringBuilder b = new StringBuilder("EventMap{");
-        for ( final VariantContext vc : getVariantContexts() )
-            b.append(String.format("%s:%d-%d %s,", vc.getChr(), vc.getStart(), vc.getEnd(), vc.getAlleles()));
-        b.append("}");
-        return b.toString();
-    }
-
-    /**
-     * Build event maps for each haplotype, returning the sorted set of all of the starting positions of all
-     * events across all haplotypes
-     *
-     * @param haplotypes a list of haplotypes
-     * @param ref the reference bases
-     * @param refLoc the span of the reference bases
-     * @param debug if true, we'll emit debugging information during this operation
-     * @return a sorted set of start positions of all events among all haplotypes
-     */
-    public static TreeSet<Integer> buildEventMapsForHaplotypes( final List<Haplotype> haplotypes,
-                                                                final byte[] ref,
-                                                                final GenomeLoc refLoc,
-                                                                final boolean debug) {
-        // Using the cigar from each called haplotype figure out what events need to be written out in a VCF file
-        final TreeSet<Integer> startPosKeySet = new TreeSet<Integer>();
-        int hapNumber = 0;
-
-        if( debug ) logger.info("=== Best Haplotypes ===");
-        for( final Haplotype h : haplotypes ) {
-            // Walk along the alignment and turn any difference from the reference into an event
-            h.setEventMap( new EventMap( h, ref, refLoc, "HC" + hapNumber++ ) );
-            startPosKeySet.addAll(h.getEventMap().getStartPositions());
-
-            if( debug ) {
-                logger.info(h.toString());
-                logger.info("> Cigar = " + h.getCigar());
-                logger.info(">> Events = " + h.getEventMap());
-            }
-        }
-
-        return startPosKeySet;
-    }
-
-    private static class VariantContextComparator implements Comparator<VariantContext> {
-        @Override
-        public int compare(VariantContext vc1, VariantContext vc2) {
-            return vc1.getStart() - vc2.getStart();
-        }
-    }
-
-    /**
-     * Get all of the VariantContexts in the event maps for all haplotypes, sorted by their start position
-     * @param haplotypes the set of haplotypes to grab the VCs from
-     * @return a sorted set of variant contexts
-     */
-    public static TreeSet<VariantContext> getAllVariantContexts( final List<Haplotype> haplotypes ) {
-        // Using the cigar from each called haplotype figure out what events need to be written out in a VCF file
-        final TreeSet<VariantContext> vcs = new TreeSet<VariantContext>(new VariantContextComparator());
-
-        for( final Haplotype h : haplotypes ) {
-            vcs.addAll(h.getEventMap().getVariantContexts());
-        }
-
-        return vcs;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/haplotype/Haplotype.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/haplotype/Haplotype.java
deleted file mode 100644
index 7b31b2a..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/haplotype/Haplotype.java
+++ /dev/null
@@ -1,343 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.haplotype;
-
-import com.google.java.contract.Requires;
-import htsjdk.samtools.Cigar;
-import htsjdk.samtools.CigarElement;
-import htsjdk.samtools.CigarOperator;
-import org.apache.commons.lang.ArrayUtils;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.sam.AlignmentUtils;
-import org.broadinstitute.gatk.utils.sam.ReadUtils;
-import htsjdk.variant.variantcontext.Allele;
-
-import java.util.Arrays;
-import java.util.Comparator;
-import java.util.LinkedHashMap;
-import java.util.List;
-
-public class Haplotype extends Allele {
-
-
-    private GenomeLoc genomeLocation = null;
-    private EventMap eventMap = null;
-    private Cigar cigar;
-    private int alignmentStartHapwrtRef;
-    private double score = Double.NaN;
-
-    /**
-     * Main constructor
-     *
-     * @param bases a non-null array of bases
-     * @param isRef is this the reference haplotype?
-     */
-    public Haplotype( final byte[] bases, final boolean isRef ) {
-        super(bases.clone(), isRef);
-    }
-
-    /**
-     * Create a new non-ref haplotype
-     *
-     * @param bases a non-null array of bases
-     */
-    public Haplotype( final byte[] bases ) {
-        this(bases, false);
-    }
-
-    /**
-     * Create a new haplotype with bases
-     *
-     * Requires bases.length == cigar.getReadLength()
-     *
-     * @param bases a non-null array of bases
-     * @param isRef is this the reference haplotype?
-     * @param alignmentStartHapwrtRef offset of this haplotype w.r.t. the reference
-     * @param cigar the cigar that maps this haplotype to the reference sequence
-     */
-    public Haplotype( final byte[] bases, final boolean isRef, final int alignmentStartHapwrtRef, final Cigar cigar) {
-        this(bases, isRef);
-        this.alignmentStartHapwrtRef = alignmentStartHapwrtRef;
-        setCigar(cigar);
-    }
-
-    /**
-     * Copy constructor.  Note the ref state of the provided allele is ignored!
-     *
-     * @param allele allele to copy
-     */
-    public Haplotype( final Allele allele ) {
-        super(allele, true);
-    }
-
-    public Haplotype( final byte[] bases, final GenomeLoc loc ) {
-        this(bases, false);
-        this.genomeLocation = loc;
-    }
-
-    /**
-     * Create a new Haplotype derived from this one that exactly spans the provided location
-     *
-     * Note that this haplotype must have a contain a genome loc for this operation to be successful.  If no
-     * GenomeLoc is contained than @throws an IllegalStateException
-     *
-     * Also loc must be fully contained within this Haplotype's genomeLoc.  If not an IllegalArgumentException is
-     * thrown.
-     *
-     * @param loc a location completely contained within this Haplotype's location
-     * @return a new Haplotype within only the bases spanning the provided location, or null for some reason the haplotype would be malformed if
-     */
-    public Haplotype trim(final GenomeLoc loc) {
-        if ( loc == null ) throw new IllegalArgumentException("Loc cannot be null");
-        if ( genomeLocation == null ) throw new IllegalStateException("Cannot trim a Haplotype without containing GenomeLoc");
-        if ( ! genomeLocation.containsP(loc) ) throw new IllegalArgumentException("Can only trim a Haplotype to a containing span.  My loc is " + genomeLocation + " but wanted trim to " + loc);
-        if ( getCigar() == null ) throw new IllegalArgumentException("Cannot trim haplotype without a cigar " + this);
-
-        final int newStart = loc.getStart() - this.genomeLocation.getStart();
-        final int newStop = newStart + loc.size() - 1;
-        final byte[] newBases = AlignmentUtils.getBasesCoveringRefInterval(newStart, newStop, getBases(), 0, getCigar());
-        final Cigar newCigar = AlignmentUtils.trimCigarByReference(getCigar(), newStart, newStop);
-
-        if ( newBases == null || AlignmentUtils.startsOrEndsWithInsertionOrDeletion(newCigar) )
-            // we cannot meaningfully chop down the haplotype, so return null
-            return null;
-
-        final Haplotype ret = new Haplotype(newBases, isReference());
-        ret.setCigar(newCigar);
-        ret.setGenomeLocation(loc);
-        ret.setAlignmentStartHapwrtRef(newStart + getAlignmentStartHapwrtRef());
-        return ret;
-    }
-
-    @Override
-    public boolean equals( Object h ) {
-        return h instanceof Haplotype && Arrays.equals(getBases(), ((Haplotype) h).getBases());
-    }
-
-    @Override
-    public int hashCode() {
-        return Arrays.hashCode(getBases());
-    }
-
-    public EventMap getEventMap() {
-        return eventMap;
-    }
-
-    public void setEventMap( final EventMap eventMap ) {
-        this.eventMap = eventMap;
-    }
-
-    @Override
-    public String toString() {
-        return getDisplayString();
-    }
-
-    /**
-     * Get the span of this haplotype (may be null)
-     * @return a potentially null genome loc
-     */
-    public GenomeLoc getGenomeLocation() {
-        return genomeLocation;
-    }
-
-    public void setGenomeLocation(GenomeLoc genomeLocation) {
-        this.genomeLocation = genomeLocation;
-    }
-
-    public long getStartPosition() {
-        return genomeLocation.getStart();
-    }
-
-    public long getStopPosition() {
-        return genomeLocation.getStop();
-    }
-
-    public int getAlignmentStartHapwrtRef() {
-        return alignmentStartHapwrtRef;
-    }
-
-    public void setAlignmentStartHapwrtRef( final int alignmentStartHapwrtRef ) {
-        this.alignmentStartHapwrtRef = alignmentStartHapwrtRef;
-    }
-
-    /**
-     * Get the cigar for this haplotype.  Note that the cigar is guaranteed to be consolidated
-     * in that multiple adjacent equal operates will have been merged
-     * @return the cigar of this haplotype
-     */
-    public Cigar getCigar() {
-        return cigar;
-    }
-
-    /**
-     * Get the haplotype cigar extended by padSize M at the tail, consolidated into a clean cigar
-     *
-     * @param padSize how many additional Ms should be appended to the end of this cigar.  Must be >= 0
-     * @return a newly allocated Cigar that consolidate(getCigar + padSize + M)
-     */
-    public Cigar getConsolidatedPaddedCigar(final int padSize) {
-        if ( padSize < 0 ) throw new IllegalArgumentException("padSize must be >= 0 but got " + padSize);
-        final Cigar extendedHaplotypeCigar = new Cigar(getCigar().getCigarElements());
-        if ( padSize > 0 ) extendedHaplotypeCigar.add(new CigarElement(padSize, CigarOperator.M));
-        return AlignmentUtils.consolidateCigar(extendedHaplotypeCigar);
-    }
-
-    /**
-     * Set the cigar of this haplotype to cigar.
-     *
-     * Note that this function consolidates the cigar, so that 1M1M1I1M1M => 2M1I2M
-     *
-     * @param cigar a cigar whose readLength == length()
-     */
-    public void setCigar( final Cigar cigar ) {
-        this.cigar = AlignmentUtils.consolidateCigar(cigar);
-        if ( this.cigar.getReadLength() != length() )
-            throw new IllegalArgumentException("Read length " + length() + " not equal to the read length of the cigar " + cigar.getReadLength() + " " + this.cigar);
-    }
-
-    @Requires({"refInsertLocation >= 0"})
-    public Haplotype insertAllele( final Allele refAllele, final Allele altAllele, final int refInsertLocation, final int genomicInsertLocation ) {
-        // refInsertLocation is in ref haplotype offset coordinates NOT genomic coordinates
-        final int haplotypeInsertLocation = ReadUtils.getReadCoordinateForReferenceCoordinate(alignmentStartHapwrtRef, cigar, refInsertLocation, ReadUtils.ClippingTail.RIGHT_TAIL, true);
-        final byte[] myBases = this.getBases();
-        if( haplotypeInsertLocation == -1 || haplotypeInsertLocation + refAllele.length() >= myBases.length ) { // desired change falls inside deletion so don't bother creating a new haplotype
-            return null;
-        }
-
-        byte[] newHaplotypeBases = new byte[]{};
-        newHaplotypeBases = ArrayUtils.addAll(newHaplotypeBases, ArrayUtils.subarray(myBases, 0, haplotypeInsertLocation)); // bases before the variant
-        newHaplotypeBases = ArrayUtils.addAll(newHaplotypeBases, altAllele.getBases()); // the alt allele of the variant
-        newHaplotypeBases = ArrayUtils.addAll(newHaplotypeBases, ArrayUtils.subarray(myBases, haplotypeInsertLocation + refAllele.length(), myBases.length)); // bases after the variant
-        return new Haplotype(newHaplotypeBases);
-    }
-
-    public static LinkedHashMap<Allele,Haplotype> makeHaplotypeListFromAlleles(final List<Allele> alleleList,
-                                                                               final int startPos,
-                                                                               final ReferenceContext ref,
-                                                                               final int haplotypeSize,
-                                                                               final int numPrefBases) {
-
-        LinkedHashMap<Allele,Haplotype> haplotypeMap = new LinkedHashMap<Allele,Haplotype>();
-
-        Allele refAllele = null;
-
-        for (Allele a:alleleList) {
-            if (a.isReference()) {
-                refAllele = a;
-                break;
-            }
-        }
-
-        if (refAllele == null)
-            throw new ReviewedGATKException("BUG: no ref alleles in input to makeHaplotypeListfrom Alleles at loc: "+ startPos);
-
-        final byte[] refBases = ref.getBases();
-
-        final int startIdxInReference = 1 + startPos - numPrefBases - ref.getWindow().getStart();
-        final String basesBeforeVariant = new String(Arrays.copyOfRange(refBases, startIdxInReference, startIdxInReference + numPrefBases));
-
-        // protect against long events that overrun available reference context
-        final int startAfter = Math.min(startIdxInReference + numPrefBases + refAllele.getBases().length - 1, refBases.length);
-        final String basesAfterVariant = new String(Arrays.copyOfRange(refBases, startAfter, refBases.length));
-
-        // Create location for all haplotypes
-        final int startLoc = ref.getWindow().getStart() + startIdxInReference;
-        final int stopLoc = startLoc + haplotypeSize-1;
-
-        final GenomeLoc locus = ref.getGenomeLocParser().createGenomeLoc(ref.getLocus().getContig(),startLoc,stopLoc);
-
-        for (final Allele a : alleleList) {
-
-            final byte[] alleleBases = a.getBases();
-            // use string concatenation
-            String haplotypeString = basesBeforeVariant + new String(Arrays.copyOfRange(alleleBases, 1, alleleBases.length)) + basesAfterVariant;
-            haplotypeString = haplotypeString.substring(0,haplotypeSize);
-
-            haplotypeMap.put(a,new Haplotype(haplotypeString.getBytes(), locus));
-        }
-
-        return haplotypeMap;
-    }
-
-    private static class Event {
-        public Allele ref;
-        public Allele alt;
-        public int pos;
-
-        public Event( final Allele ref, final Allele alt, final int pos ) {
-            this.ref = ref;
-            this.alt = alt;
-            this.pos = pos;
-        }
-    }
-
-    /**
-     * Get the score (an estimate of the support) of this haplotype
-     * @return a double, where higher values are better
-     */
-    public double getScore() {
-        return score;
-    }
-
-    /**
-     * Set the score (an estimate of the support) of this haplotype.
-     *
-     * Note that if this is the reference haplotype it is always given Double.MAX_VALUE score
-     *
-     * @param score a double, where higher values are better
-     */
-    public void setScore(double score) {
-        this.score = score;
-    }
-
-    /**
-     * Comparator used to sort haplotypes, alphanumerically.
-     *
-     * <p>
-     *     If one haplotype is the prefix of the other, the shorter one comes first.
-     * </p>
-     */
-    public static final Comparator<Haplotype> ALPHANUMERICAL_COMPARATOR = new Comparator<Haplotype>() {
-
-        @Override
-        public int compare(final Haplotype o1, final Haplotype o2) {
-            if (o1 == o2)
-                return 0;
-            final byte[] bases1 = o1.getBases();
-            final byte[] bases2 = o2.getBases();
-            final int iLimit = Math.min(bases1.length, bases2.length);
-            for (int i = 0; i < iLimit; i++) {
-                final int cmp = Byte.compare(bases1[i], bases2[i]);
-                if (cmp != 0) return cmp;
-            }
-            if (bases1.length == bases2.length) return 0;
-            return (bases1.length > bases2.length) ? -1 : 1; // is a bit better to get the longest haplotypes first.
-        }
-    };
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/haplotype/HaplotypeBaseComparator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/haplotype/HaplotypeBaseComparator.java
deleted file mode 100644
index 8d1dfff..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/haplotype/HaplotypeBaseComparator.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.haplotype;
-
-import java.util.Comparator;
-
-/**
- * Compares two haplotypes in the lexicographic order of their bases
- *
- * User: depristo
- * Date: 3/29/13
- * Time: 11:09 AM
- */
-public class HaplotypeBaseComparator implements Comparator<Haplotype> {
-    @Override
-    public int compare( final Haplotype hap1, final Haplotype hap2 ) {
-        return hap1.getBaseString().compareTo(hap2.getBaseString());
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/haplotype/HaplotypeScoreComparator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/haplotype/HaplotypeScoreComparator.java
deleted file mode 100644
index 7818d3e..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/haplotype/HaplotypeScoreComparator.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.haplotype;
-
-import java.util.Comparator;
-
-/**
- * A comparator that sorts haplotypes in decreasing order of score, so that the best supported
- * haplotypes are at the top
- */
-public class HaplotypeScoreComparator implements Comparator<Haplotype> {
-    @Override
-    public int compare(Haplotype o1, Haplotype o2) {
-        return -1 * Double.valueOf(o1.getScore()).compareTo(o2.getScore());
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/haplotype/HaplotypeSizeAndBaseComparator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/haplotype/HaplotypeSizeAndBaseComparator.java
deleted file mode 100644
index 4818068..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/haplotype/HaplotypeSizeAndBaseComparator.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.haplotype;
-
-import java.util.Comparator;
-
-/**
- * Compares two haplotypes first by their lengths and then by lexicographic order of their bases.
- *
- * User: btaylor
- * Date: 8/1/13
- * Time: 11:09 AM
- */
-public class HaplotypeSizeAndBaseComparator implements Comparator<Haplotype> {
-    @Override
-    public int compare( final Haplotype hap1, final Haplotype hap2 ) {
-        if (hap1.getBases().length < hap2.getBases().length)
-            return -1;
-        else if (hap1.getBases().length > hap2.getBases().length)
-            return 1;
-        else
-            return hap1.getBaseString().compareTo(hap2.getBaseString());
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/ApplicationDetails.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/ApplicationDetails.java
deleted file mode 100644
index a0c7afb..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/ApplicationDetails.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.help;
-
-import org.broadinstitute.gatk.utils.commandline.CommandLineProgram;
-import org.broadinstitute.gatk.utils.classloader.JVMUtils;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.io.IOException;
-import java.util.Collections;
-import java.util.List;
-
-/**
- * Contains details additional details that the program can
- * supply about itself.
- *
- * @author hanna
- * @version 0.1
- */
-
-public class ApplicationDetails {
-    /**
-     * Retrieve key information about the application (name, who to contact for support, etc.).
-     */
-    final List<String> applicationHeader;
-
-    /**
-     * Stores additional attribution for a given walker.
-     */
-    final List<String> attribution;
-
-    /**
-     * Extract details covering exactly how to run this executable.
-     */
-    final String runningInstructions;
-
-    /**
-     * Additional help particular to this command-line application.
-     */
-    final String additionalHelp;
-
-    public ApplicationDetails( List<String> applicationHeader, List<String> attribution, String runningInstructions, String additionalHelp ) {
-        this.applicationHeader = applicationHeader;
-        this.attribution = attribution;
-        this.runningInstructions = runningInstructions;
-        this.additionalHelp = additionalHelp;
-    }
-
-    public static List<String> createDefaultHeader(Class<? extends CommandLineProgram> application) {
-        return Collections.singletonList("Program Name: " + application.getName());
-    }
-
-    public static String createDefaultRunningInstructions(Class<? extends CommandLineProgram> application) {
-        // Default implementation to find a command line that makes sense.
-        // If the user is running from a jar, return '-jar <jarname>'; otherwise
-        // return the full class name.
-        String runningInstructions = null;
-        try {
-            runningInstructions = JVMUtils.getLocationFor( application ).getName();
-        }
-        catch( IOException ex ) {
-            throw new ReviewedGATKException("Unable to determine running instructions", ex);
-        }
-
-        if( runningInstructions.endsWith(".jar") )
-            runningInstructions = String.format("-jar %s", runningInstructions);
-        else
-            runningInstructions = application.getName();
-
-        return runningInstructions;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/DocletUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/DocletUtils.java
deleted file mode 100644
index 324fcfc..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/DocletUtils.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.help;
-
-import com.sun.javadoc.FieldDoc;
-import com.sun.javadoc.PackageDoc;
-import com.sun.javadoc.ProgramElementDoc;
-import org.broadinstitute.gatk.utils.classloader.JVMUtils;
-
-import java.lang.reflect.Field;
-
-/**
- * Methods in the class must ONLY be used by doclets, since the com.sun.javadoc.* classes are not
- * available on all systems, and we don't want the GATK proper to depend on them.
- */
-public class DocletUtils {
-
-    protected static boolean assignableToClass(ProgramElementDoc classDoc, Class lhsClass, boolean requireConcrete) {
-        try {
-            Class type = getClassForDoc(classDoc);
-            return lhsClass.isAssignableFrom(type) && (!requireConcrete || JVMUtils.isConcrete(type));
-        } catch (Throwable t) {
-            // Ignore errors.
-            return false;
-        }
-    }
-
-    protected static Class getClassForDoc(ProgramElementDoc doc) throws ClassNotFoundException {
-        return Class.forName(getClassName(doc));
-    }
-
-    protected static Field getFieldForFieldDoc(FieldDoc fieldDoc) {
-        try {
-            Class clazz = getClassForDoc(fieldDoc.containingClass());
-            return JVMUtils.findField(clazz, fieldDoc.name());
-        } catch (ClassNotFoundException e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    /**
-     * Reconstitute the class name from the given class JavaDoc object.
-     *
-     * @param doc the Javadoc model for the given class.
-     * @return The (string) class name of the given class.
-     */
-    protected static String getClassName(ProgramElementDoc doc) {
-        PackageDoc containingPackage = doc.containingPackage();
-        return containingPackage.name().length() > 0 ?
-                String.format("%s.%s", containingPackage.name(), doc.name()) :
-                String.format("%s", doc.name());
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/DocumentedGATKFeature.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/DocumentedGATKFeature.java
deleted file mode 100644
index eed95b4..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/DocumentedGATKFeature.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.help;
-
-import java.lang.annotation.*;
-
-/**
- * An annotation to identify a class as a GATK capability for documentation
- *
- * @author depristo
- */
- at Documented
- at Inherited
- at Retention(RetentionPolicy.RUNTIME)
- at Target(ElementType.TYPE)
-public @interface DocumentedGATKFeature {
-    /** Should we actually document this feature, even though it's annotated? */
-    public boolean enable() default true;
-    /** The overall group name (walkers, readfilters) this feature is associated with */
-    public String groupName();
-    /** A human readable summary of the purpose of this group of features */
-    public String summary() default "";
-    /** Are there links to other docs that we should include?  CommandLineGATK.class for walkers, for example? */
-    public Class[] extraDocs() default {};
-    /** Who is the go-to developer for operation/documentation issues? */
-    public String gotoDev() default "NA";
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/DocumentedGATKFeatureHandler.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/DocumentedGATKFeatureHandler.java
deleted file mode 100644
index e81ab21..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/DocumentedGATKFeatureHandler.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.help;
-
-import com.sun.javadoc.ClassDoc;
-import com.sun.javadoc.RootDoc;
-
-import java.io.*;
-import java.util.Set;
-
-/**
- * Extend this class to provide a documentation handler for GATKdocs
- */
-public abstract class DocumentedGATKFeatureHandler {
-    private GATKDoclet doclet;
-
-    /**
-     * @return the javadoc RootDoc of this javadoc run
-     */
-    protected RootDoc getRootDoc() {
-        return this.doclet.rootDoc;
-    }
-
-    /** Set the master doclet driving this handler */
-    public void setDoclet(GATKDoclet doclet) {
-        this.doclet = doclet;
-    }
-
-    /**
-     * @return the GATKDoclet driving this documentation run
-     */
-    public GATKDoclet getDoclet() {
-        return doclet;
-    }
-
-    /**
-     * Should return false iff this handler wants GATKDoclet to skip documenting
-     * this ClassDoc.
-     * @param doc that is being considered for inclusion in the docs
-     * @return true if the doclet should document ClassDoc doc
-     */
-    public boolean includeInDocs(ClassDoc doc) { return true; }
-
-    /**
-     * Return the flat filename (no paths) that the handler would like the Doclet to
-     * write out the documentation for ClassDoc doc and its associated Class clazz
-     * @param doc
-     * @param clazz
-     * @return
-     */
-    public String getDestinationFilename(ClassDoc doc, Class clazz) {
-        return GATKDocUtils.phpFilenameForClass(clazz);
-    }
-
-    /**
-     * Return the name of the FreeMarker template we will use to process ClassDoc doc.
-     *
-     * Note this is a flat filename relative to settings/helpTemplates in the GATK source tree
-     * @param doc
-     * @return
-     * @throws IOException
-     */
-    public abstract String getTemplateName(ClassDoc doc) throws IOException;
-
-    /**
-     * Actually generate the documentation map associated with toProcess
-     *
-     * Can use all to provide references and rootDoc for additional information, if necessary.
-     * Implementing methods should end with a call to setHandlerContext on toProcess, as in:
-     *
-     * toProcess.setHandlerContent(summary, rootMap);
-     *
-     * @param toProcess
-     */
-    public abstract void processOne(GATKDocWorkUnit toProcess);
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/DocumentedGATKFeatureObject.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/DocumentedGATKFeatureObject.java
deleted file mode 100644
index 45f0c14..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/DocumentedGATKFeatureObject.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.help;
-
-/**
- * Documentation unit.  Effectively a class version of the DocumentedGATKFeature.
- * Immutable data structure.
- *
- * @author depristo
- */
-class DocumentedGATKFeatureObject {
-    /** Which class are we documenting.  Specific to each class being documented */
-    private final Class classToDoc;
-    /** Are we enabled? */
-    private final boolean enable;
-    private final String groupName, summary, gotoDev;
-    private final Class[] extraDocs;
-
-    public DocumentedGATKFeatureObject(Class classToDoc, final boolean enable, final String groupName, final String summary, final Class[] extraDocs, final String gotoDev) {
-        this.classToDoc = classToDoc;
-        this.enable = enable;
-        this.groupName = groupName;
-        this.summary = summary;
-        this.extraDocs = extraDocs;
-        this.gotoDev = gotoDev;
-    }
-
-    public DocumentedGATKFeatureObject(Class classToDoc, final String groupName, final String summary, final String gotoDev) {
-        this(classToDoc, true, groupName, summary, new Class[]{}, gotoDev);
-    }
-
-    public Class getClassToDoc() { return classToDoc; }
-    public boolean enable() { return enable; }
-    public String groupName() { return groupName; }
-    public String summary() { return summary; }
-    public Class[] extraDocs() { return extraDocs; }
-    public String gotoDev() { return gotoDev; }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/ForumAPIUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/ForumAPIUtils.java
deleted file mode 100644
index fbf6528..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/ForumAPIUtils.java
+++ /dev/null
@@ -1,173 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.help;
-
-import com.google.gson.Gson;
-import org.apache.commons.io.IOUtils;
-import org.apache.http.HttpResponse;
-import org.apache.http.client.ClientProtocolException;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.http.client.methods.HttpPost;
-import org.apache.http.entity.StringEntity;
-import org.apache.http.impl.client.DefaultHttpClient;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.net.MalformedURLException;
-import java.util.ArrayList;
-import java.util.List;
-
-public class ForumAPIUtils {
-    /**
-     * How we post to the forum
-     */
-    final private static String ACCESS_TOKEN = "access_token=";
-
-    public static List<String> getPostedTools(String forumKey) {
-        Gson gson = new Gson();
-        List<String> output = new ArrayList<String>();
-
-        String text = httpGet(HelpConstants.GATK_FORUM_API_URL + "categories.json?CategoryIdentifier=tool-bulletin&page=1-100000&" + ACCESS_TOKEN + forumKey);
-
-        APIQuery details = gson.fromJson(text, APIQuery.class);
-        ForumDiscussion[] discussions = details.Discussions;
-
-        for (ForumDiscussion post : discussions) {
-            output.add(post.Name);
-        }
-
-        /*
-        System.out.println(details.isJsonArray());
-        System.out.println(details.isJsonNull());
-        System.out.println(details.isJsonObject());
-        System.out.println(details.isJsonPrimitive());
-
-        JsonArray posted = details.getAsJsonPrimitive().get("Discussions").getAsJsonArray();
-
-        for ( JsonElement post : posted ) {
-            output.add( post.getAsJsonObject().get("Name").getAsString());
-        }
-        */
-        return output;
-    }
-
-
-    private static String httpGet(String urlStr) {
-        String output = "";
-
-        try {
-
-            DefaultHttpClient httpClient = new DefaultHttpClient();
-            HttpGet getRequest = new HttpGet(urlStr);
-            getRequest.addHeader("accept", "application/json");
-
-            HttpResponse response = httpClient.execute(getRequest);
-
-            if (response.getStatusLine().getStatusCode() != 200) {
-                throw new RuntimeException("Failed : HTTP error code : "
-                        + response.getStatusLine().getStatusCode());
-            }
-
-            output = IOUtils.toString(response.getEntity().getContent());
-
-            httpClient.getConnectionManager().shutdown();
-
-        } catch (ClientProtocolException e) {
-
-            e.printStackTrace();
-
-        } catch (IOException e) {
-
-            e.printStackTrace();
-        }
-        return output;
-    }
-
-    private static String httpPost(String data, String URL) {
-        try {
-
-            DefaultHttpClient httpClient = new DefaultHttpClient();
-            HttpPost postRequest = new HttpPost(URL);
-
-            StringEntity input = new StringEntity(data);
-            input.setContentType("application/json");
-            postRequest.setEntity(input);
-
-            HttpResponse response = httpClient.execute(postRequest);
-
-            if (response.getStatusLine().getStatusCode() != 200) {
-                throw new RuntimeException("Failed : HTTP error code : "
-                        + response.getStatusLine().getStatusCode());
-            }
-
-            BufferedReader br = new BufferedReader(
-                    new InputStreamReader((response.getEntity().getContent())));
-
-            String output = "";
-            String line;
-            System.out.println("Output from Server .... \n");
-            while ((line = br.readLine()) != null) {
-                output += (line + '\n');
-                System.out.println(line);
-            }
-
-            br.close();
-            httpClient.getConnectionManager().shutdown();
-            return output;
-
-        } catch (MalformedURLException e) {
-
-            e.printStackTrace();
-
-        } catch (IOException e) {
-
-            e.printStackTrace();
-
-        }
-        return null;
-    }
-
-    public static void postToForum(GATKDocWorkUnit tool, final String forumKey) {
-
-
-        ForumDiscussion post = new ForumDiscussion(tool);
-
-        Gson gson = new Gson();
-
-        String data = gson.toJson(post.getPostData());
-        httpPost(data, HelpConstants.GATK_FORUM_API_URL + "post/discussion.json?" + ACCESS_TOKEN + forumKey);
-
-
-    }
-
-    class APIQuery {
-        ForumDiscussion[] Discussions;
-
-        public APIQuery() {}
-    }
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/ForumDiscussion.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/ForumDiscussion.java
deleted file mode 100644
index 7b95b50..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/ForumDiscussion.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.help;
-
-import java.util.HashMap;
-import java.util.Map;
-
-class ForumDiscussion {
-
-    final private static String POST_TEMPLATE = "<p>A new tool has been released!</p><p>Check out the documentation at <a href='%s'>%s</a>.</p>";
-
-    final int Announce;
-    final String Body;
-    final String Category;
-    final int Closed;
-    final String Format;
-    final String Name;
-    final int Sink;
-    final String Tags;
-    final String Type;
-
-    public ForumDiscussion(String name, String body, String format, String category,
-                           String tagsCSV, String type, int closed, int announce, int sink) {
-        this.Name = name;
-        this.Body = body;
-        this.Format = format;
-        this.Category = category;
-        this.Tags = tagsCSV;
-        this.Type = type;
-        this.Closed = closed;
-        this.Announce = announce;
-        this.Sink = sink;
-    }
-
-    public ForumDiscussion(GATKDocWorkUnit tool) {
-        this(tool.name,
-                String.format(POST_TEMPLATE, GATKDocUtils.URL_ROOT_FOR_RELEASE_GATKDOCS + tool.filename, tool.name),
-                "Html", "tool-bulletin", tool.name + "," + tool.group + ",gatkdocs", "Discussion", 0, -1, -1);
-    }
-
-    public Map<String, String> getPostData() {
-        Map<String, String> output = new HashMap<String, String>();
-
-        output.put("Name", Name);
-        output.put("Body", Body);
-        output.put("Format", Format);
-        output.put("Category", Category);
-        if (Tags != null)
-            output.put("Tags", Tags);
-        if (Type != null)
-            output.put("Type", Type);
-        if (Closed != -1)
-            output.put("Closed", Closed == 1 ? "1" : "0");
-        if (Announce != -1)
-            output.put("Announce", Announce == 1 ? "1" : "0");
-        if (Sink != -1)
-            output.put("Sink", Sink == 1 ? "1" : "0");
-
-        return output;
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/GATKDocUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/GATKDocUtils.java
deleted file mode 100644
index 72aba4d..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/GATKDocUtils.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.help;
-
-public class GATKDocUtils {
-    /**
-     * The URL root for RELEASED GATKDOC units
-     */
-    public final static String URL_ROOT_FOR_RELEASE_GATKDOCS = HelpConstants.GATK_DOCS_URL;
-    /**
-     * The URL root for STABLE GATKDOC units             //TODO: do sthing with this or remove -- URL goes nowhere
-     */
-    public final static String URL_ROOT_FOR_STABLE_GATKDOCS = "http://iwww.broadinstitute.org/gsa/gatkdocs/stable/";
-    /**
-     * The URL root for UNSTABLE GATKDOC units           //TODO: do sthing with this or remove -- URL goes nowhere
-     */
-    public final static String URL_ROOT_FOR_UNSTABLE_GATKDOCS = "http://iwww.broadinstitute.org/gsa/gatkdocs/unstable/";
-
-    /**
-     * Return the filename of the GATKDoc PHP that would be generated for Class.  This
-     * does not guarantee that the docs exist, or that docs would actually be generated
-     * for class (might not be annotated for documentation, for example).  But if
-     * this class is documented, GATKDocs will write the docs to a file named as returned
-     * by this function.
-     *
-     * @param c
-     * @return
-     */
-    public static String phpFilenameForClass(Class c) {
-        return c.getName().replace(".", "_") + ".php";
-    }
-
-    /**
-     * Returns a full URL http://etc/ linking to the documentation for class (assuming it
-     * exists).  Currently points to the RELEASE doc path only.     //TODO: do sthing with other paths or remove ?
-     *
-     * @param c
-     * @return
-     */
-    public static String helpLinksToGATKDocs(Class c) {
-        String classPath = phpFilenameForClass(c);
-        StringBuilder b = new StringBuilder();
-        b.append(URL_ROOT_FOR_RELEASE_GATKDOCS).append(classPath);
-        //b.append("stable   version: ").append(URL_ROOT_FOR_STABLE_GATKDOCS).append(classPath).append("\n");
-        //b.append("unstable version: ").append(URL_ROOT_FOR_UNSTABLE_GATKDOCS).append(classPath).append("\n");
-        return b.toString();
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/GATKDocWorkUnit.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/GATKDocWorkUnit.java
deleted file mode 100644
index 005d900..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/GATKDocWorkUnit.java
+++ /dev/null
@@ -1,127 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.help;
-
-import com.sun.javadoc.ClassDoc;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Simple collection of all relevant information about something the GATKDoclet can document
- * <p/>
- * Created by IntelliJ IDEA.
- * User: depristo
- * Date: 7/24/11
- * Time: 7:59 PM
- */
-class GATKDocWorkUnit implements Comparable<GATKDocWorkUnit> {
-    /**
-     * The class that's being documented
-     */
-    final Class clazz;
-    /**
-     * The name of the thing we are documenting
-     */
-    final String name;
-    /**
-     * the filename where we will be writing the docs for this class
-     */
-    final String filename;
-    /**
-     * The name of the documentation group (e.g., walkers, read filters) class belongs to
-     */
-    final String group;
-    /**
-     * The documentation handler for this class
-     */
-    final DocumentedGATKFeatureHandler handler;
-    /**
-     * The javadoc documentation for clazz
-     */
-    final ClassDoc classDoc;
-    /**
-     * The annotation that lead to this Class being in GATKDoc
-     */
-    final DocumentedGATKFeatureObject annotation;
-    /**
-     * When was this walker built, and what's the absolute version number
-     */
-    final String buildTimestamp, absoluteVersion;
-
-    // set by the handler
-    String summary;
-    Map<String, Object> forTemplate; // this is where the actual doc content gets stored
-
-    public GATKDocWorkUnit(String name, String filename, String group, DocumentedGATKFeatureObject annotation,
-                           DocumentedGATKFeatureHandler handler, ClassDoc classDoc, Class clazz,
-                           String buildTimestamp, String absoluteVersion) {
-        this.annotation = annotation;
-        this.name = name;
-        this.filename = filename;
-        this.group = group;
-        this.handler = handler;
-        this.classDoc = classDoc;
-        this.clazz = clazz;
-        this.buildTimestamp = buildTimestamp;
-        this.absoluteVersion = absoluteVersion;
-    }
-
-    /**
-     * Called by the GATKDoclet to set handler provided context for this work unit
-     *
-     * @param summary
-     * @param forTemplate
-     */
-    public void setHandlerContent(String summary, Map<String, Object> forTemplate) {
-        this.summary = summary;
-        this.forTemplate = forTemplate;
-    }
-
-    /**
-     * Return a String -> String map suitable for FreeMarker to create an index to this WorkUnit
-     *
-     * @return
-     */
-    public Map<String, String> indexDataMap() {
-        Map<String, String> data = new HashMap<String, String>();
-        data.put("name", name);
-        data.put("summary", summary);
-        data.put("filename", filename);
-        data.put("group", group);
-        return data;
-    }
-
-    /**
-     * Sort in order of the name of this WorkUnit
-     *
-     * @param other
-     * @return
-     */
-    public int compareTo(GATKDocWorkUnit other) {
-        return this.name.compareTo(other.name);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/GATKDoclet.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/GATKDoclet.java
deleted file mode 100644
index bd03add..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/GATKDoclet.java
+++ /dev/null
@@ -1,576 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.help;
-
-import com.google.gson.ExclusionStrategy;
-import com.google.gson.FieldAttributes;
-import com.google.gson.Gson;
-import com.google.gson.GsonBuilder;
-import com.google.gson.annotations.Expose;
-import com.google.gson.stream.JsonWriter;
-import com.sun.javadoc.ClassDoc;
-import com.sun.javadoc.RootDoc;
-import freemarker.template.Configuration;
-import freemarker.template.DefaultObjectWrapper;
-import freemarker.template.Template;
-import freemarker.template.TemplateException;
-import org.apache.commons.io.FileUtils;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
-import htsjdk.tribble.FeatureCodec;
-import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.tools.walkers.qc.DocumentationTest;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.text.XReadLines;
-
-import java.io.*;
-import java.util.*;
-
-/**
- * Javadoc Doclet that combines javadoc, GATK ParsingEngine annotations, and FreeMarker
- * templates to produce PHP formatted GATKDocs for walkers
- * and other classes.
- * <p/>
- * This document has the following workflow:
- * <p/>
- * 1 -- walk the javadoc hierarchy, looking for class that have the
- * DocumentedGATKFeature annotation or are in the type hierarchy in the
- * static list of things to document, and are to be documented
- * 2 -- construct for each a GATKDocWorkUnit, resulting in the complete
- * set of things to document
- * 3 -- for each unit, actually generate a PHP page documenting it
- * as well as links to related features via their units.  Writing
- * of a specific class PHP is accomplished by a generate DocumentationHandler
- * 4 -- write out an index of all units, organized by group
- * 5 -- emit JSON version of GATKDocs using Google GSON (currently incomplete but workable)
- * <p/>
- * The documented classes are restricted to only those with @DocumentedGATKFeature
- * annotation or are in the STATIC_DOCS class.
- */
-public class GATKDoclet {
-    final protected static Logger logger = Logger.getLogger(GATKDoclet.class);
-
-    /**
-     * Where we find the help FreeMarker templates
-     */
-    final protected static File SETTINGS_DIR = new File("settings/helpTemplates");
-
-    /**
-     * Where we write the GATKDoc PHP directory
-     */
-    final protected static File DESTINATION_DIR = new File("gatkdocs");
-
-    final private static String FORUM_KEY_PATH = "/local/gsa-engineering/gatkdocs_publisher/forum.key";
-    // ----------------------------------------------------------------------
-    //
-    // Global variables that are set on the command line by javadoc
-    //
-    // ----------------------------------------------------------------------
-    protected static File settingsDir = SETTINGS_DIR;
-    protected static File destinationDir = DESTINATION_DIR;
-    protected static String forumKeyPath = FORUM_KEY_PATH;
-    protected static String buildTimestamp = null, absoluteVersion = null;
-    protected static boolean showHiddenFeatures = false;
-
-    protected static boolean testOnly = false;
-
-    /**
-     * Any class that's in this list will be included in the documentation
-     * when the -test argument is provided.  Useful for debugging.
-     */
-    private static final List<Class<?>> testOnlyKeepers = Arrays.asList(
-            DocumentationTest.class, CommandLineGATK.class, UserException.class);
-
-    /**
-     * The javadoc root doc
-     */
-    RootDoc rootDoc;
-
-    /**
-     * The set of all things we are going to document
-     */
-    Set<GATKDocWorkUnit> myWorkUnits;
-
-    /**
-     * A static list of DocumentedGATKFeatureObjects.  Any class that is as or extends
-     * one of the DocumentedGATKFeatureObjects.clazz of this collection will also
-     * be documented, even if it doesn't have the @DocumentedGATKFeature annotation.  Useful
-     * when you want to document things that implement an interface (annotations on java
-     * interfaces aren't inherited) or whose base class isn't under your control (tribble
-     * codecs).
-     */
-    final static Collection<DocumentedGATKFeatureObject> STATIC_DOCS = new ArrayList<DocumentedGATKFeatureObject>();
-
-    static {
-        STATIC_DOCS.add(new DocumentedGATKFeatureObject(FeatureCodec.class,
-                HelpConstants.DOCS_CAT_RODCODECS,
-                "Tribble codecs for reading reference ordered data (ROD) files such as VCF or BED",
-                "NA"));
-    }
-
-    /**
-     * Extracts the contents of certain types of javadoc and adds them to an XML file.
-     *
-     * @param rootDoc The documentation root.
-     * @return Whether the JavaDoc run succeeded.
-     * @throws java.io.IOException if output can't be written.
-     */
-    public static boolean start(RootDoc rootDoc) throws IOException {
-        logger.setLevel(Level.INFO);
-
-        // load arguments
-        for (String[] options : rootDoc.options()) {
-            if (options[0].equals("-settings-dir"))
-                settingsDir = new File(options[1]);
-            if (options[0].equals("-destination-dir"))
-                destinationDir = new File(options[1]);
-            if (options[0].equals("-forum-key-path"))
-                forumKeyPath = options[1];
-            if (options[0].equals("-build-timestamp"))
-                buildTimestamp = options[1];
-            if (options[0].equals("-absolute-version"))
-                absoluteVersion = options[1];
-            if (options[0].equals("-include-hidden"))
-                showHiddenFeatures = true;
-            if (options[0].equals("-test"))
-                testOnly = true;
-        }
-
-        if (!settingsDir.exists())
-            throw new RuntimeException("-settings-dir " + settingsDir.getPath() + " does not exist");
-        else if (!settingsDir.isDirectory())
-            throw new RuntimeException("-settings-dir " + settingsDir.getPath() + " is not a directory");
-
-        // process the docs
-        new GATKDoclet().processDocs(rootDoc);
-
-        return true;
-    }
-
-    /**
-     * Validate the given options against options supported by this doclet.
-     *
-     * @param option Option to validate.
-     * @return Number of potential parameters; 0 if not supported.
-     */
-    public static int optionLength(String option) {
-        if (option.equals("-settings-dir") ||
-                option.equals("-destination-dir") ||
-                option.equals("-forum-key-path") ||
-                option.equals("-build-timestamp") ||
-                option.equals("-absolute-version") ||
-                option.equals("-include-hidden")) {
-            return 2;
-        } else if (option.equals("-test"))
-            return 1;
-        else
-            return 0;
-    }
-
-    /**
-     * Are we supposed to include @Hidden annotations in our documented output?
-     *
-     * @return
-     */
-    public boolean showHiddenFeatures() {
-        return showHiddenFeatures;
-    }
-
-    /**
-     * @param rootDoc
-     */
-    private void processDocs(RootDoc rootDoc) {
-        // setup the global access to the root
-        this.rootDoc = rootDoc;
-
-        try {
-            // print the Version number
-            FileUtils.writeByteArrayToFile(new File(destinationDir + "/current.version.txt"), getSimpleVersion(absoluteVersion).getBytes());
-
-            /* ------------------------------------------------------------------- */
-            /* You should do this ONLY ONCE in the whole application life-cycle:   */
-
-            Configuration cfg = new Configuration();
-            // Specify the data source where the template files come from.
-            cfg.setDirectoryForTemplateLoading(settingsDir);
-            // Specify how templates will see the data-model. This is an advanced topic...
-            cfg.setObjectWrapper(new DefaultObjectWrapper());
-
-            myWorkUnits = computeWorkUnits();
-
-            List<Map<String, String>> groups = new ArrayList<Map<String, String>>();
-            Set<String> seenDocumentationFeatures = new HashSet<String>();
-            List<Map<String, String>> data = new ArrayList<Map<String, String>>();
-            for (GATKDocWorkUnit workUnit : myWorkUnits) {
-                data.add(workUnit.indexDataMap());
-                if (!seenDocumentationFeatures.contains(workUnit.annotation.groupName())) {
-                    groups.add(toMap(workUnit.annotation));
-                    seenDocumentationFeatures.add(workUnit.annotation.groupName());
-                }
-            }
-
-            for (GATKDocWorkUnit workUnit : myWorkUnits) {
-                processDocWorkUnit(cfg, workUnit, groups, data);
-            }
-
-            processIndex(cfg, new ArrayList<GATKDocWorkUnit>(myWorkUnits));
-
-            File forumKeyFile = new File(forumKeyPath);
-            if (forumKeyFile.exists()) {
-                String forumKey = null;
-                // Read in a one-line file so we can do a for loop
-                for (String line : new XReadLines(forumKeyFile))
-                    forumKey = line;
-                updateForum(myWorkUnits, forumKey);
-            }
-        } catch (FileNotFoundException e) {
-            throw new RuntimeException(e);
-        } catch (IOException e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    private void updateForum(Set<GATKDocWorkUnit> docWorkUnits, String forumKey) {
-        //first get list of posts that need to be added
-        List<String> old = ForumAPIUtils.getPostedTools(forumKey);
-
-        for (String s : old)
-            System.out.println(s);
-
-        System.out.printf("Forum has %d items%n", old.size());
-        System.out.printf("Docs have %d items%n", docWorkUnits.size());
-
-        List<GATKDocWorkUnit> toAdd = new ArrayList<GATKDocWorkUnit>();
-        for (GATKDocWorkUnit tool : docWorkUnits) {
-            if (!old.contains(tool.name)) {
-                System.out.println("WILL POST: " + tool.name + " TO FORUM");
-                toAdd.add(tool);
-            }
-        }
-
-        //update using list
-        for (GATKDocWorkUnit tool : toAdd) {
-            //if ( tool.name.equals("ApplyRecalibration") )
-            ForumAPIUtils.postToForum(tool, forumKey);
-        }
-    }
-
-    /**
-     * Returns the set of all GATKDocWorkUnits that we are going to generate docs for.
-     *
-     * @return
-     */
-    private Set<GATKDocWorkUnit> computeWorkUnits() {
-        TreeSet<GATKDocWorkUnit> m = new TreeSet<GATKDocWorkUnit>();
-
-        for (ClassDoc doc : rootDoc.classes()) {
-            //logger.debug("Considering " + doc);
-            Class clazz = getClassForClassDoc(doc);
-
-            // don't add anything that's not DocumentationTest if we are in test mode
-            if (clazz != null && testOnly && !testOnlyKeepers.contains(clazz))
-                continue;
-
-            //if ( clazz != null && clazz.getName().equals("org.broadinstitute.gatk.tools.walkers.annotator.AlleleBalance"))
-            //    logger.debug("foo");
-
-            DocumentedGATKFeatureObject feature = getFeatureForClassDoc(doc);
-            DocumentedGATKFeatureHandler handler = createHandler(doc, feature);
-            if (handler != null && handler.includeInDocs(doc)) {
-                //logger.info("Generating documentation for class " + doc);
-                String filename = handler.getDestinationFilename(doc, clazz);
-                GATKDocWorkUnit unit = new GATKDocWorkUnit(doc.name(),
-                        filename, feature.groupName(), feature, handler, doc, clazz,
-                        buildTimestamp, absoluteVersion);
-                m.add(unit);
-            }
-        }
-
-        return m;
-    }
-
-    /**
-     * Create a handler capable of documenting the class doc according to feature.  Returns
-     * null if no appropriate handler is found or doc shouldn't be documented at all.
-     *
-     * @param doc
-     * @param feature
-     * @return
-     */
-    private DocumentedGATKFeatureHandler createHandler(ClassDoc doc, DocumentedGATKFeatureObject feature) {
-        if (feature != null) {
-            if (feature.enable()) {
-                DocumentedGATKFeatureHandler handler = new GenericDocumentationHandler();
-                handler.setDoclet(this);
-                return handler;
-            } else {
-                logger.info("Skipping disabled Documentation for " + doc);
-            }
-        }
-
-        return null;
-    }
-
-    /**
-     * Returns the instantiated DocumentedGATKFeatureObject that describes the GATKDoc
-     * structure we will apply to Doc.
-     *
-     * @param doc
-     * @return null if this proves inappropriate or doc shouldn't be documented
-     */
-    private DocumentedGATKFeatureObject getFeatureForClassDoc(ClassDoc doc) {
-        Class<? extends Object> docClass = getClassForClassDoc(doc);
-
-        if (docClass == null)
-            return null; // not annotated so it shouldn't be documented
-
-        if (docClass.isAnnotationPresent(DocumentedGATKFeature.class)) {
-            DocumentedGATKFeature f = docClass.getAnnotation(DocumentedGATKFeature.class);
-            return new DocumentedGATKFeatureObject(docClass, f.enable(), f.groupName(), f.summary(), f.extraDocs(), f.gotoDev());
-        } else {
-            for (DocumentedGATKFeatureObject staticDocs : STATIC_DOCS) {
-                if (staticDocs.getClassToDoc().isAssignableFrom(docClass)) {
-                    return new DocumentedGATKFeatureObject(docClass, staticDocs.enable(), staticDocs.groupName(), staticDocs.summary(), staticDocs.extraDocs(), staticDocs.gotoDev());
-                }
-            }
-            return null;
-        }
-    }
-
-    /**
-     * Return the Java class described by the ClassDoc doc
-     *
-     * @param doc
-     * @return
-     */
-    private Class<? extends Object> getClassForClassDoc(ClassDoc doc) {
-        try {
-            // todo -- what do I need the ? extends Object to pass the compiler?
-            return (Class<? extends Object>) DocletUtils.getClassForDoc(doc);
-        } catch (ClassNotFoundException e) {
-            //logger.warn("Couldn't find class for ClassDoc " + doc);
-            // we got a classdoc for a class we can't find.  Maybe in a library or something
-            return null;
-        } catch (NoClassDefFoundError e) {
-            return null;
-        } catch (UnsatisfiedLinkError e) {
-            return null; // naughty BWA bindings
-        }
-    }
-
-    /**
-     * Create the php index listing all of the GATKDocs features
-     *
-     * @param cfg
-     * @param indexData
-     * @throws IOException
-     */
-    private void processIndex(Configuration cfg, List<GATKDocWorkUnit> indexData) throws IOException {
-        /* Get or create a template */
-        Template temp = cfg.getTemplate("generic.index.template.html");
-
-        /* Merge data-model with template */
-        Writer out = new OutputStreamWriter(new FileOutputStream(new File(destinationDir + "/index.php")));
-        try {
-            temp.process(groupIndexData(indexData), out);
-            out.flush();
-        } catch (TemplateException e) {
-            throw new ReviewedGATKException("Failed to create GATK documentation", e);
-        }
-    }
-
-    /**
-     * Helpful function to create the php index.  Given all of the already run GATKDocWorkUnits,
-     * create the high-level grouping data listing individual features by group.
-     *
-     * @param indexData
-     * @return
-     */
-    private Map<String, Object> groupIndexData(List<GATKDocWorkUnit> indexData) {
-        //
-        // root -> data -> { summary -> y, filename -> z }, etc
-        //      -> groups -> group1, group2, etc.
-        Map<String, Object> root = new HashMap<String, Object>();
-
-
-        Collections.sort(indexData);
-
-        List<Map<String, String>> groups = new ArrayList<Map<String, String>>();
-        Set<String> seenDocumentationFeatures = new HashSet<String>();
-        List<Map<String, String>> data = new ArrayList<Map<String, String>>();
-        for (GATKDocWorkUnit workUnit : indexData) {
-            data.add(workUnit.indexDataMap());
-            if (!seenDocumentationFeatures.contains(workUnit.annotation.groupName())) {
-                groups.add(toMap(workUnit.annotation));
-                seenDocumentationFeatures.add(workUnit.annotation.groupName());
-            }
-        }
-
-        //System.out.printf(groups.toString());
-
-        root.put("data", data);
-        root.put("groups", groups);
-        root.put("timestamp", buildTimestamp);
-        root.put("version", absoluteVersion);
-
-        return root;
-    }
-
-    /**
-     * Trivial helper routine that returns the map of name and summary given the annotation
-     * AND adds a super-category so that we can custom-order the categories in the index
-     *
-     * @param annotation
-     * @return
-     */
-    private static final Map<String, String> toMap(DocumentedGATKFeatureObject annotation) {
-        Map<String, String> root = new HashMap<String, String>();
-        root.put("id", annotation.groupName().replaceAll("\\W", ""));
-        root.put("name", annotation.groupName());
-        root.put("summary", annotation.summary());
-
-        /**
-         * Add-on super-category definitions. The assignments depend on parsing the names
-         * defined in HelpConstants.java so be careful of changing anything.
-         * Also, the super-category value strings need to be the same as used in the
-         * Freemarker template. This is all fairly clunky but the best I could do without
-         * making major changes to the DocumentedGATKFeatureObject. Doesn't help that
-         * Freemarker makes any scripting horribly awkward.
-         */
-        final String supercatValue;
-        if (annotation.groupName().endsWith(" Tools")) supercatValue = "tools";
-        else if (annotation.groupName().endsWith(" Utilities")) supercatValue = "utilities";
-        else if (annotation.groupName().startsWith("Engine ")) supercatValue = "engine";
-        else if (annotation.groupName().endsWith(" (DevZone)")) supercatValue = "dev";
-        else supercatValue = "other";
-
-        root.put("supercat", supercatValue);
-
-        return root;
-    }
-
-    /**
-     * Helper function that finding the GATKDocWorkUnit associated with class from among all of the work units
-     *
-     * @param c the class we are looking for
-     * @return the GATKDocWorkUnit whose .clazz.equals(c), or null if none could be found
-     */
-    public final GATKDocWorkUnit findWorkUnitForClass(Class c) {
-        for (final GATKDocWorkUnit unit : this.myWorkUnits)
-            if (unit.clazz.equals(c))
-                return unit;
-        return null;
-    }
-
-    /**
-     * Return the ClassDoc associated with clazz
-     *
-     * @param clazz
-     * @return
-     */
-    public ClassDoc getClassDocForClass(Class clazz) {
-        return rootDoc.classNamed(clazz.getName());
-    }
-
-    /**
-     * High-level function that processes a single DocWorkUnit unit using its handler
-     *
-     * @param cfg
-     * @param unit
-     * @param data
-     * @throws IOException
-     */
-    private void processDocWorkUnit(Configuration cfg, GATKDocWorkUnit unit, List<Map<String, String>> groups, List<Map<String, String>> data)
-            throws IOException {
-        //System.out.printf("Processing documentation for class %s%n", unit.classDoc);
-        unit.handler.processOne(unit);
-        unit.forTemplate.put("groups", groups);
-        unit.forTemplate.put("data", data);
-        // Get or create a template
-        Template temp = cfg.getTemplate(unit.handler.getTemplateName(unit.classDoc));
-
-        // Merge data-model with template
-        File outputPath = new File(destinationDir + "/" + unit.filename);
-        try {
-            Writer out = new OutputStreamWriter(new FileOutputStream(outputPath));
-            temp.process(unit.forTemplate, out);
-            out.flush();
-        } catch (TemplateException e) {
-            throw new ReviewedGATKException("Failed to create GATK documentation", e);
-        }
-
-        // Create GSON-friendly object from unit.forTemplate
-        GSONWorkUnit gsonworkunit = new GSONWorkUnit();
-        gsonworkunit.populate(  unit.forTemplate.get("summary").toString(),
-                                unit.forTemplate.get("parallel"),
-                                unit.forTemplate.get("activeregion"),
-                                unit.forTemplate.get("partitiontype").toString(),
-                                unit.forTemplate.get("walkertype").toString(),
-                                unit.forTemplate.get("gson-arguments"),
-                                unit.forTemplate.get("refwindow"),
-                                unit.forTemplate.get("description").toString(),
-                                unit.forTemplate.get("name").toString(),
-                                unit.forTemplate.get("annotinfo").toString(),
-                                unit.forTemplate.get("readfilters"),
-                                unit.forTemplate.get("downsampling"),
-                                unit.forTemplate.get("group").toString(),
-                                unit.forTemplate.get("annotfield").toString(),
-                                unit.forTemplate.get("annotdescript")
-        );
-
-        // Prepare to write JSON entry to file
-        File outputPathForJSON = new File(destinationDir + "/" + unit.filename + ".json");
-
-        try {
-            BufferedWriter outJSON = new BufferedWriter(new FileWriter(outputPathForJSON));
-            // Convert object to JSON
-            Gson gson = new GsonBuilder()
-                .serializeSpecialFloatingPointValues()
-                .setPrettyPrinting()
-                .create();
-            String json = gson.toJson(gsonworkunit); // was run on unit.forTemplate
-            outJSON.write(json);
-            outJSON.close();
-
-        } catch (Exception e) {
-            throw new ReviewedGATKException("Failed to create JSON entry", e);
-        }
-    }
-
-    private static String getSimpleVersion(String absoluteVersion) {
-        String[] parts = absoluteVersion.split("-");
-
-        // by skipping i=0, there is no trailing separator
-        for (int i = 1; i < 2; i++) {
-            parts[0] = parts[0].concat("-");
-            parts[0] = parts[0].concat(parts[i]);
-        }
-
-        return parts[0];
-    }
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/GSONArgument.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/GSONArgument.java
deleted file mode 100644
index db214b9..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/GSONArgument.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.help;
-
-import java.util.List;
-import java.util.Map;
-
-/**
- * GSON-friendly version of the argument bindings
- */
-public class GSONArgument {
-
-    String summary;
-    String name;
-    String synonyms;
-    String type;
-    String required;
-    String fulltext;
-    String defaultValue;
-    String minValue;
-    String maxValue;
-    String minRecValue;
-    String maxRecValue;
-    String rodTypes;
-    String kind;
-    List<Map<String, Object>> options;
-
-    public void populate(   String summary,
-                            String name,
-                            String synonyms,
-                            String type,
-                            String required,
-                            String fulltext,
-                            String defaultValue,
-                            String minValue,
-                            String maxValue,
-                            String minRecValue,
-                            String maxRecValue,
-                            String rodTypes,
-                            String kind,
-                            List<Map<String, Object>> options
-    ) {
-        this.summary = summary;
-        this.name = name;
-        this.synonyms = synonyms;
-        this.type = type;
-        this.required = required;
-        this.fulltext = fulltext;
-        this.defaultValue = defaultValue;
-        this.minValue = minValue;
-        this.maxValue = maxValue;
-        this.minRecValue = minRecValue;
-        this.maxRecValue = maxRecValue;
-        this.rodTypes = rodTypes;
-        this.kind = kind;
-        this.options = options;
-    }
-
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/GSONWorkUnit.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/GSONWorkUnit.java
deleted file mode 100644
index c4481c0..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/GSONWorkUnit.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.help;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-
-/**
- * GSON-friendly version of the GATKDocWorkUnit
- */
-public class GSONWorkUnit {
-
-    String summary;
-    Object parallel;
-    Object activeregion;
-    String partitiontype;
-    String walkertype;
-    Object arguments;
-    Object refwindow;
-    String description;
-    String name;
-    String annotinfo;
-    Object readfilters;
-    Object downsampling;
-    String group;
-    String annotfield;
-    Object annotdescript;
-
-    public void populate(String summary,
-                         Object parallel,
-                         Object activeregion,
-                         String partitiontype,
-                         String walkertype,
-                         Object arguments,
-                         Object refwindow,
-                         String description,
-                         String name,
-                         String annotinfo,
-                         Object readfilters,
-                         Object downsampling,
-                         String group,
-                         String annotfield,
-                         Object annotdescript
-    ) {
-        this.summary = summary;
-        this.parallel = parallel;
-        this.activeregion = activeregion;
-        this.partitiontype = partitiontype;
-        this.walkertype = walkertype;
-        this.arguments = arguments;
-        this.refwindow = refwindow;
-        this.description = description;
-        this.name = name;
-        this.annotinfo = annotinfo;
-        this.readfilters = readfilters;
-        this.downsampling = downsampling;
-        this.group = group;
-        this.annotfield = annotfield;
-        this.annotdescript = annotdescript;
-    }
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/GenericDocumentationHandler.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/GenericDocumentationHandler.java
deleted file mode 100644
index fea1496..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/GenericDocumentationHandler.java
+++ /dev/null
@@ -1,1008 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.help;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import com.sun.javadoc.ClassDoc;
-import com.sun.javadoc.FieldDoc;
-import com.sun.javadoc.Tag;
-import org.apache.commons.lang.StringUtils;
-import org.apache.log4j.Logger;
-import htsjdk.tribble.Feature;
-import org.broadinstitute.gatk.engine.walkers.*;
-import org.broadinstitute.gatk.utils.commandline.*;
-import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.refdata.tracks.FeatureManager;
-import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.GenotypeAnnotation;
-import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.InfoFieldAnnotation;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.classloader.JVMUtils;
-import org.broadinstitute.gatk.utils.collections.Pair;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.GATKException;
-
-import java.io.IOException;
-import java.lang.annotation.Annotation;
-import java.lang.reflect.*;
-import java.util.*;
-
-/**
- *
- */
-public class GenericDocumentationHandler extends DocumentedGATKFeatureHandler {
-    private static Logger logger = Logger.getLogger(GenericDocumentationHandler.class);
-
-    /**
-     * The max. length of the longest of --fullName -shortName argument name
-     * before we prefer the shorter option.
-     */
-    private static final int MAX_DISPLAY_NAME = 30;
-
-    /**
-     * The Class we are documenting
-     */
-    private GATKDocWorkUnit toProcess;
-
-    @Override
-    public boolean includeInDocs(ClassDoc doc) {
-        try {
-            Class type = DocletUtils.getClassForDoc(doc);
-            boolean hidden = !getDoclet().showHiddenFeatures() && type.isAnnotationPresent(Hidden.class);
-            return !hidden && JVMUtils.isConcrete(type);
-        } catch (ClassNotFoundException e) {
-            return false;
-        }
-    }
-
-
-    @Override
-    public String getTemplateName(ClassDoc doc) throws IOException {
-        return "generic.template.html";
-    }
-
-    @Override
-    public void processOne(GATKDocWorkUnit toProcessArg) {
-        this.toProcess = toProcessArg;
-
-        //System.out.printf("%s class %s%n", toProcess.group, toProcess.classDoc);
-        Map<String, Object> root = new HashMap<String, Object>();
-
-        addHighLevelBindings(root);
-        addArgumentBindings(root);
-        addRelatedBindings(root);
-        root.put("group", toProcess.group);
-
-        // Adding in retrieval of peripheral info (rf annotations etc)
-        getClazzAnnotations(toProcess.clazz, root);
-
-        toProcess.setHandlerContent((String) root.get("summary"), root);
-    }
-
-    /**
-     * Add high-level summary information about toProcess to root, such as its
-     * name, summary, description, version, etc.
-     *
-     * @param root
-     */
-    protected void addHighLevelBindings(Map<String, Object> root) {
-        root.put("name", toProcess.classDoc.name());
-
-        // Extract overrides from the doc tags.
-        StringBuilder summaryBuilder = new StringBuilder();
-        for (Tag tag : toProcess.classDoc.firstSentenceTags())
-            summaryBuilder.append(tag.text());
-        root.put("summary", summaryBuilder.toString());
-        root.put("description", toProcess.classDoc.commentText().substring(summaryBuilder.toString().length()));
-        root.put("timestamp", toProcess.buildTimestamp);
-        root.put("version", toProcess.absoluteVersion);
-
-        for (Tag tag : toProcess.classDoc.tags()) {
-            root.put(tag.name(), tag.text());
-        }
-
-        root.put("gotoDev", toProcess.annotation.gotoDev());
-    }
-
-    /**
-     * Add bindings describing related GATK capabilites to toProcess
-     *
-     * @param root
-     */
-    protected void addRelatedBindings(Map<String, Object> root) {
-        List<Map<String, Object>> extraDocsData = new ArrayList<Map<String, Object>>();
-
-        // add in all of the explicitly related items
-        for (final Class extraDocClass : toProcess.annotation.extraDocs()) {
-            final GATKDocWorkUnit otherUnit = getDoclet().findWorkUnitForClass(extraDocClass);
-            if (otherUnit == null)
-                throw new ReviewedGATKException("Requested extraDocs for class without any documentation: " + extraDocClass);
-            extraDocsData.add(
-                    new HashMap<String, Object>() {{
-                        put("filename", otherUnit.filename);
-                        put("name", otherUnit.name);
-                    }});
-        }
-        root.put("extradocs", extraDocsData);
-    }
-
-    /**
-     * Add information about all of the arguments available to toProcess to root
-     *
-     * @param root
-     */
-    protected void addArgumentBindings(Map<String, Object> root) {
-        ParsingEngine parsingEngine = createStandardGATKParsingEngine();
-
-        Map<String, List<Map<String, Object>>> args = createArgumentMap();
-        root.put("arguments", args);
-        try {
-            // loop over all of the arguments according to the parsing engine
-            for (final ArgumentSource argumentSource : parsingEngine.extractArgumentSources(DocletUtils.getClassForDoc(toProcess.classDoc))) {
-                ArgumentDefinition argDef = argumentSource.createArgumentDefinitions().get(0);
-                FieldDoc fieldDoc = getFieldDoc(toProcess.classDoc, argumentSource.field.getName());
-                Map<String, Object> argBindings = docForArgument(fieldDoc, argumentSource, argDef);
-                if (!argumentSource.isHidden() || getDoclet().showHiddenFeatures()) {
-                    final String kind = docKindOfArg(argumentSource);
-                    argBindings.put("kind", kind);
-                    // Retrieve default value
-                    final Object value = argumentValue(toProcess.clazz, argumentSource);
-                    if (value != null) {
-                        argBindings.put("defaultValue", prettyPrintValueString(value));
-                    } else {
-                        argBindings.put("defaultValue", "NA");
-                    }
-                    // Retrieve min and max / hard and soft value thresholds for numeric args
-                    if (value instanceof Number) {
-                        if (argumentSource.field.isAnnotationPresent(Argument.class))   {
-                            argBindings.put("minValue", argumentSource.field.getAnnotation(Argument.class).minValue());
-                            argBindings.put("maxValue", argumentSource.field.getAnnotation(Argument.class).maxValue());
-                            if (argumentSource.field.getAnnotation(Argument.class).minRecommendedValue() != Double.NEGATIVE_INFINITY) {
-                                argBindings.put("minRecValue", argumentSource.field.getAnnotation(Argument.class).minRecommendedValue());
-                            } else {
-                                argBindings.put("minRecValue", "NA");
-                            }
-                            if (argumentSource.field.getAnnotation(Argument.class).maxRecommendedValue() != Double.POSITIVE_INFINITY) {
-                                argBindings.put("maxRecValue", argumentSource.field.getAnnotation(Argument.class).maxRecommendedValue());
-                            } else {
-                                argBindings.put("maxRecValue", "NA");
-                            }
-                        }
-                    } else {
-                        argBindings.put("minValue", "NA");
-                        argBindings.put("maxValue", "NA");
-                        argBindings.put("minRecValue", "NA");
-                        argBindings.put("maxRecValue", "NA");
-                        argBindings.put("defaultValue", "NA");
-                    }
-                    // Finalize argument bindings
-                    args.get(kind).add(argBindings);
-                    args.get("all").add(argBindings);
-                }
-            }
-
-            // sort the arguments
-            for (Map.Entry<String, List<Map<String, Object>>> entry : args.entrySet()) {
-                entry.setValue(sortArguments(entry.getValue()));
-            }
-            // make a GSON-friendly map of arguments -- uses some hacky casting
-            List<GSONArgument> allGSONArgs = new ArrayList<GSONArgument>();
-            for ( Map<String, Object> item : args.get("all")) {
-                GSONArgument itemGSONArg = new GSONArgument();
-
-                itemGSONArg.populate(item.get("summary").toString(),
-                        item.get("name").toString(),
-                        item.get("synonyms").toString(),
-                        item.get("type").toString(),
-                        item.get("required").toString(),
-                        item.get("fulltext").toString(),
-                        item.get("defaultValue").toString(),
-                        item.get("minValue").toString(),
-                        item.get("maxValue").toString(),
-                        item.get("minRecValue").toString(),
-                        item.get("maxRecValue").toString(),
-                        item.get("rodTypes").toString(),
-                        item.get("kind").toString(),
-                        (List<Map<String, Object>>)item.get("options")
-                );
-                allGSONArgs.add(itemGSONArg);
-            }
-            root.put("gson-arguments", allGSONArgs);
-
-        } catch (ClassNotFoundException e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    /**
-     * Return the argument kind (required, advanced, hidden, etc) of this argumentSource
-     *
-     * @param argumentSource
-     * @return
-     */
-    @Requires("argumentSource != null")
-    @Ensures("result != null")
-    private String docKindOfArg(ArgumentSource argumentSource) {
-        if (argumentSource.isRequired()) {
-            if (argumentSource.isInput()) return "required_in";
-            else if (argumentSource.isOutput()) return "required_out";
-            else if (argumentSource.isFlag()) return "required_flag";
-            else return "required_param";
-            }
-        else if (argumentSource.isAdvanced()) {
-            if (argumentSource.isInput()) return "advanced_in";
-            else if (argumentSource.isOutput()) return "advanced_out";
-            else if (argumentSource.isFlag()) return "advanced_flag";
-            else return "advanced_param";
-        }
-        else if (argumentSource.isHidden()) return "hidden";
-        else if (argumentSource.isDeprecated()) return "deprecated";
-        else {
-            if (argumentSource.isInput()) return "optional_in";
-            else if (argumentSource.isOutput()) return "optional_out";
-            else if (argumentSource.isFlag()) return "optional_flag";
-            else return "optional_param";
-        }
-    }
-
-    /**
-     * Attempts to determine the value of argumentSource in an instantiated version of c
-     *
-     * @param c
-     * @param argumentSource
-     * @return value of argumentSource, or null if this isn't possible
-     */
-    @Requires({"c != null", "argumentSource != null"})
-    private Object argumentValue(Class c, ArgumentSource argumentSource) {
-        // get the value of the field
-        // attempt to instantiate the class
-        final Object instance = makeInstanceIfPossible(toProcess.clazz);
-        if (instance != null) {
-            final Object value = getFieldValue(instance, argumentSource.field.getName());
-            if (value != null)
-                return value;
-
-            if (argumentSource.createsTypeDefault()) {
-                try { // handle the case where there's an implicit default
-                    return argumentSource.typeDefaultDocString();
-                } catch (ReviewedGATKException e) {
-                    ; // failed to create type default, don't worry about it
-                }
-            }
-        }
-
-        return null;
-    }
-
-    /**
-     * Create the argument map for holding class arguments
-     *
-     * @return
-     */
-    private Map<String, List<Map<String, Object>>> createArgumentMap() {
-        Map<String, List<Map<String, Object>>> args = new HashMap<String, List<Map<String, Object>>>();
-        args.put("all", new ArrayList<Map<String, Object>>());
-        args.put("required_in", new ArrayList<Map<String, Object>>());
-        args.put("required_out", new ArrayList<Map<String, Object>>());
-        args.put("required_param", new ArrayList<Map<String, Object>>());
-        args.put("required_flag", new ArrayList<Map<String, Object>>());
-        args.put("optional_in", new ArrayList<Map<String, Object>>());
-        args.put("optional_out", new ArrayList<Map<String, Object>>());
-        args.put("optional_param", new ArrayList<Map<String, Object>>());
-        args.put("optional_flag", new ArrayList<Map<String, Object>>());
-        args.put("advanced_in", new ArrayList<Map<String, Object>>());
-        args.put("advanced_out", new ArrayList<Map<String, Object>>());
-        args.put("advanced_param", new ArrayList<Map<String, Object>>());
-        args.put("advanced_flag", new ArrayList<Map<String, Object>>());
-        args.put("hidden", new ArrayList<Map<String, Object>>());
-        args.put("deprecated", new ArrayList<Map<String, Object>>());
-        return args;
-    }
-
-
-    /**
-     * Sorts the individual argument list in unsorted according to CompareArgumentsByName
-     *
-     * @param unsorted
-     * @return
-     */
-    private List<Map<String, Object>> sortArguments(List<Map<String, Object>> unsorted) {
-        Collections.sort(unsorted, new CompareArgumentsByName());
-        return unsorted;
-    }
-
-    /**
-     * Sort arguments by case-insensitive comparison ignoring the -- and - prefixes
-     */
-    private class CompareArgumentsByName implements Comparator<Map<String, Object>> {
-        public int compare(Map<String, Object> x, Map<String, Object> y) {
-            return elt(x).compareTo(elt(y));
-        }
-
-        private String elt(Map<String, Object> m) {
-            String v = m.get("name").toString().toLowerCase();
-            if (v.startsWith("--"))
-                return v.substring(2);
-            else if (v.startsWith("-"))
-                return v.substring(1);
-            else
-                throw new RuntimeException("Expect to see arguments beginning with at least one -, but found " + v);
-        }
-    }
-
-    /**
-     * Umbrella function that groups the collection of values for specific annotations applied to an
-     * instance of class c. Lists of collected values are added directly to the "toProcess" object.
-     * Requires being able to instantiate the class.
-     *
-     * @param classToProcess the object to instantiate and query for the annotation
-     * @param root the root of the document handler, to which we'll store collected annotations
-     */
-    private void getClazzAnnotations(Class classToProcess, Map<String, Object> root) {
-        //
-        // attempt to instantiate the class
-        final Object instance = makeInstanceIfPossible(classToProcess);
-        if (instance != null) {
-            final Class myClass = instance.getClass();
-            // Get parallelism options
-            final HashSet<HashMap<String, Object>> parallelOptions = getParallelism(myClass, new HashSet<HashMap<String, Object>>());
-            root.put("parallel", parallelOptions);
-            // Get annotation info (what type of annotation, standard etc.)
-            final HashSet<String> annotInfo = getAnnotInfo(myClass, new HashSet<String>());
-            root.put("annotinfo", StringUtils.join(annotInfo, ", "));
-            // Get annotation field (whether it goes in INFO or FORMAT)
-            root.put("annotfield", getAnnotField(myClass));
-            // Get walker type if applicable
-            root.put("walkertype", getWalkerType(myClass));
-            // Get partition type if applicable
-            root.put("partitiontype", getPartitionType(myClass));
-            // Get read filter annotations (ReadFilters) if applicable
-            final HashSet<HashMap<String, Object>> bucket= getReadFilters(myClass, new HashSet<HashMap<String, Object>>());
-            root.put("readfilters", bucket);
-            // Get default downsampling settings
-            final HashMap<String, Object> dsSettings = getDownSamplingSettings(myClass, new HashMap<String, Object>());
-            root.put("downsampling", dsSettings);
-            // Get reference window size settings
-            final HashMap<String, Object> refwindow = getRefWindow(myClass, new HashMap<String, Object>());
-            root.put("refwindow", refwindow);
-            // Get ActiveRegion size settings
-            final HashMap<String, Object> activeRegion = getActiveRegion(myClass, new HashMap<String, Object>());
-            root.put("activeregion", activeRegion);
-            // Get annotation header line description if applicable
-            final Object annotDescriptLines = getAnnotDescript(instance, myClass);
-            root.put("annotdescript", annotDescriptLines);
-
-            // anything else?
-        } else {
-            // put empty items to avoid blowups
-            root.put("parallel", new HashSet<String>());
-            root.put("annotinfo", "");
-            root.put("annotfield", "");
-            root.put("walkertype", "");
-            root.put("partitiontype", "");
-            root.put("readfilters", new HashSet<HashMap<String, Object>>());
-            root.put("downsampling", new HashMap<String, Object>());
-            root.put("refwindow", new HashMap<String, Object>());
-            root.put("activeregion", new HashMap<String, Object>());
-            root.put("annotdescript", new ArrayList<HashMap<String, Object>>());
-        }
-    }
-
-    /**
-     * Utility function that looks up annotation descriptions if applicable.
-     *
-     * @param myClass the class to query
-     * @return a hash map of descriptions, otherwise an empty map
-     */
-    private Object getAnnotDescript(Object instance, Class myClass) {
-        //
-        // Check if the class has the method we want
-        for (Method classMethod : myClass.getMethods()) {
-            if (classMethod.toString().contains("getDescriptions") && classMethod.toString().contains("annotator")) {
-                try {
-                    return classMethod.invoke(instance);
-                } catch (IllegalArgumentException e) {
-                } catch (IllegalAccessException e) {
-                } catch (InvocationTargetException e) {
-                }
-            }
-        }
-        return null;
-    }
-
-    /**
-     * Utility function that checks which parallelism options are available for an instance of class c.
-     *
-     * @param myClass the class to query for the interfaces
-     * @param parallelOptions an empty HashSet in which to collect the info
-     * @return a hash set of parallelism options, otherwise an empty set
-     */
-    private HashSet<HashMap<String, Object>> getParallelism(Class myClass, HashSet<HashMap<String, Object>> parallelOptions) {
-        //
-        // Retrieve interfaces
-        Class[] implementedInterfaces = myClass.getInterfaces();
-        for (Class intfClass : implementedInterfaces) {
-            final HashMap<String, Object> nugget = new HashMap<String, Object>();
-            if (intfClass.getSimpleName().equals("TreeReducible")) {
-                nugget.put("name", intfClass.getSimpleName());
-                nugget.put("arg", HelpConstants.ARG_TREEREDUCIBLE);
-                nugget.put("link", HelpConstants.CMDLINE_GATK_URL + "#" + HelpConstants.ARG_TREEREDUCIBLE);
-            } else if (intfClass.getSimpleName().equals("NanoSchedulable")) {
-                nugget.put("name", intfClass.getSimpleName());
-                nugget.put("arg", HelpConstants.ARG_NANOSCHEDULABLE);
-                nugget.put("link", HelpConstants.CMDLINE_GATK_URL + "#" + HelpConstants.ARG_NANOSCHEDULABLE);
-            } else {
-                continue;
-            }
-            parallelOptions.add(nugget);
-        }
-        // Look up superclasses recursively
-        final Class mySuperClass = myClass.getSuperclass();
-        if (mySuperClass.getSimpleName().equals("Object")) {
-            return parallelOptions;
-        }
-        return getParallelism(mySuperClass, parallelOptions);
-    }
-
-    /**
-     * Utility function that looks up whether the annotation goes in INFO or FORMAT field.
-     *
-     * @param myClass the class to query for the interfaces
-     * @return a String specifying the annotation field
-     */
-    private final String getAnnotField(Class myClass) {
-        //
-        // Look up superclasses recursively until we find either
-        // GenotypeAnnotation or InfoFieldAnnotation
-        final Class mySuperClass = myClass.getSuperclass();
-        if (mySuperClass == InfoFieldAnnotation.class) {
-            return "INFO (variant-level)";
-        } else if (mySuperClass == GenotypeAnnotation.class) {
-            return "FORMAT (sample genotype-level)";
-        } else if (mySuperClass.getSimpleName().equals("Object")) {
-            return "";
-        }
-        return getAnnotField(mySuperClass);
-    }
-
-    /**
-     * Utility function that determines the annotation type for an instance of class c.
-     *
-     * @param myClass the class to query for the interfaces
-     * @param annotInfo an empty HashSet in which to collect the info
-     * @return a hash set of the annotation types, otherwise an empty set
-     */
-    private HashSet<String> getAnnotInfo(Class myClass, HashSet<String> annotInfo) {
-        //
-        // Retrieve interfaces
-        Class[] implementedInterfaces = myClass.getInterfaces();
-        for (Class intfClass : implementedInterfaces) {
-            if (intfClass.getName().contains("Annotation")) {
-                annotInfo.add(intfClass.getSimpleName());
-            }
-        }
-        // Look up superclasses recursively
-        final Class mySuperClass = myClass.getSuperclass();
-        if (mySuperClass.getSimpleName().equals("Object")) {
-            return annotInfo;
-        }
-        return getAnnotInfo(mySuperClass, annotInfo);
-    }
-
-    /**
-     * Utility function that determines the default downsampling settings for an instance of class c.
-     *
-     * @param myClass the class to query for the settings
-     * @param dsSettings an empty HashMap in which to collect the info
-     * @return a hash set of the downsampling settings, otherwise an empty set
-     */
-    private HashMap<String, Object> getDownSamplingSettings(Class myClass, HashMap<String, Object> dsSettings) {
-        //
-        // Retrieve annotation
-        if (myClass.isAnnotationPresent(Downsample.class)) {
-            final Annotation thisAnnotation = myClass.getAnnotation(Downsample.class);
-            if(thisAnnotation instanceof Downsample) {
-                final Downsample dsAnnotation = (Downsample) thisAnnotation;
-                dsSettings.put("by", dsAnnotation.by().toString());
-                dsSettings.put("to_cov", dsAnnotation.toCoverage());
-            }
-        }
-        return dsSettings;
-    }
-
-    /**
-     * Utility function that determines the reference window size for an instance of class c.
-     *
-     * @param myClass the class to query for the settings
-     * @param refWindow an empty HashMap in which to collect the info
-     * @return a HashMap of the window start and stop, otherwise an empty HashMap
-     */
-    private HashMap<String, Object> getRefWindow(Class myClass, HashMap<String, Object> refWindow) {
-        //
-        // Retrieve annotation
-        if (myClass.isAnnotationPresent(Reference.class)) {
-            final Annotation thisAnnotation = myClass.getAnnotation(Reference.class);
-            if(thisAnnotation instanceof Reference) {
-                final Reference refAnnotation = (Reference) thisAnnotation;
-                refWindow.put("start", refAnnotation.window().start());
-                refWindow.put("stop", refAnnotation.window().stop());
-            }
-        }
-        return refWindow;
-    }
-
-    /**
-     * Utility function that determines the ActiveRegion settings for an instance of class c.
-     *
-     * @param myClass the class to query for the settings
-     * @param activeRegion an empty HashMap in which to collect the info
-     * @return a HashMap of the ActiveRegion parameters, otherwise an empty HashMap
-     */
-    private HashMap<String, Object> getActiveRegion(Class myClass, HashMap<String, Object> activeRegion) {
-        //
-        // Retrieve annotation
-        if (myClass.isAnnotationPresent(ActiveRegionTraversalParameters.class)) {
-            final Annotation thisAnnotation = myClass.getAnnotation(ActiveRegionTraversalParameters.class);
-            if(thisAnnotation instanceof ActiveRegionTraversalParameters) {
-                final ActiveRegionTraversalParameters arAnnotation = (ActiveRegionTraversalParameters) thisAnnotation;
-                activeRegion.put("ext", arAnnotation.extension());
-                activeRegion.put("max", arAnnotation.maxRegion());
-                activeRegion.put("min", arAnnotation.minRegion());
-            }
-        }
-        return activeRegion;
-    }
-
-    /**
-     * Utility function that determines the partition type of an instance of class c.
-     *
-     * @param myClass the class to query for the annotation
-     * @return the partition type if applicable, otherwise an empty string
-     */
-    private String getPartitionType(Class myClass) {
-        //
-        // Retrieve annotation
-        if (myClass.isAnnotationPresent(PartitionBy.class)) {
-            final Annotation thisAnnotation = myClass.getAnnotation(PartitionBy.class);
-            if(thisAnnotation instanceof PartitionBy) {
-                final PartitionBy partAnnotation = (PartitionBy) thisAnnotation;
-                return partAnnotation.value().toString();
-            }
-        }
-        return "";
-    }
-
-    /**
-     * Utility function that determines the type of walker subclassed by an instance of class c.
-     *
-     * @param myClass the class to query for the annotation
-     * @return the type of walker if applicable, otherwise an empty string
-     */
-    private String getWalkerType(Class myClass) {
-        //
-        // Look up superclasses recursively until we find either Walker or Object
-        final Class mySuperClass = myClass.getSuperclass();
-        if (mySuperClass.getSimpleName().equals("Walker")) {
-            return myClass.getSimpleName();
-        } else if (mySuperClass.getSimpleName().equals("Object")) {
-            return "";
-        }
-        return getWalkerType(mySuperClass);
-    }
-
-    /**
-     * Utility function that finds the values of ReadFilters annotation applied to an instance of class c.
-     *
-     * @param myClass the class to query for the annotation
-     * @param bucket a container in which we store the annotations collected
-     * @return a hash set of values, otherwise an empty set
-     */
-    private HashSet<HashMap<String, Object>> getReadFilters(Class myClass, HashSet<HashMap<String, Object>> bucket) {
-        //
-        // Retrieve annotation
-        if (myClass.isAnnotationPresent(ReadFilters.class)) {
-            final Annotation thisAnnotation = myClass.getAnnotation(ReadFilters.class);
-            if(thisAnnotation instanceof ReadFilters) {
-                final ReadFilters rfAnnotation = (ReadFilters) thisAnnotation;
-                for (Class<?> filter : rfAnnotation.value()) {
-                    // make hashmap of simplename and url
-                    final HashMap<String, Object> nugget = new HashMap<String, Object>();
-                    nugget.put("name", filter.getSimpleName());
-                    nugget.put("filename", GATKDocUtils.phpFilenameForClass(filter));
-                    bucket.add(nugget);
-                }
-            }
-        }
-        // Look up superclasses recursively
-        final Class mySuperClass = myClass.getSuperclass();
-        if (mySuperClass.getSimpleName().equals("Object")) {
-            return bucket;
-        }
-        return getReadFilters(mySuperClass, bucket);
-    }
-
-
-    /**
-     * Utility function that finds the value of fieldName in any fields of ArgumentCollection fields in
-     * instance of class c.
-     *
-     * @param instance  the object to query for the field value
-     * @param fieldName the name of the field we are looking for in instance
-     * @return The value assigned to field in the ArgumentCollection, otherwise null
-     */
-    private Object getFieldValue(Object instance, String fieldName) {
-        //
-        // subtle note.  If you have a field named X that is an ArgumentCollection that
-        // contains a field X as well, you need only consider fields in the argumentCollection, not
-        // matching the argument itself.
-        //
-        // @ArgumentCollection
-        // protected DbsnpArgumentCollection dbsnp = new DbsnpArgumentCollection();
-        //
-
-        for (Field field : JVMUtils.getAllFields(instance.getClass())) {
-            if (field.isAnnotationPresent(ArgumentCollection.class)) {
-                //System.out.printf("Searching for %s in argument collection field %s%n", fieldName, field);
-                Object fieldValue = JVMUtils.getFieldValue(field, instance);
-                Object value = getFieldValue(fieldValue, fieldName);
-                if (value != null)
-                    return value;
-            } else if (field.getName().equals(fieldName)) {
-                return JVMUtils.getFieldValue(field, instance);
-            }
-        }
-
-        return null;
-    }
-
-    /**
-     * Pretty prints value
-     * <p/>
-     * Assumes value != null
-     *
-     * @param value
-     * @return
-     */
-    private Object prettyPrintValueString(Object value) {
-        if (value.getClass().isArray()) {
-            Class type = value.getClass().getComponentType();
-            if (boolean.class.isAssignableFrom(type))
-                return Arrays.toString((boolean[]) value);
-            if (byte.class.isAssignableFrom(type))
-                return Arrays.toString((byte[]) value);
-            if (char.class.isAssignableFrom(type))
-                return Arrays.toString((char[]) value);
-            if (double.class.isAssignableFrom(type))
-                return Arrays.toString((double[]) value);
-            if (float.class.isAssignableFrom(type))
-                return Arrays.toString((float[]) value);
-            if (int.class.isAssignableFrom(type))
-                return Arrays.toString((int[]) value);
-            if (long.class.isAssignableFrom(type))
-                return Arrays.toString((long[]) value);
-            if (short.class.isAssignableFrom(type))
-                return Arrays.toString((short[]) value);
-            if (Object.class.isAssignableFrom(type))
-                return Arrays.toString((Object[]) value);
-            else
-                throw new RuntimeException("Unexpected array type in prettyPrintValue.  Value was " + value + " type is " + type);
-        } else if (RodBinding.class.isAssignableFrom(value.getClass())) {
-            // annoying special case to handle the UnBound() constructor
-            return "none";
-        } else if (value instanceof String) {
-            return value.equals("") ? "\"\"" : value;
-        } else {
-            return value.toString();
-        }
-    }
-
-    /**
-     * Attempt to instantiate class c, if possible.  Returns null if this proves impossible.
-     *
-     * @param c
-     * @return
-     */
-    private Object makeInstanceIfPossible(Class c) {
-        Object instance = null;
-        try {
-            // don't try to make something where we will obviously fail
-            if (!c.isEnum() && !c.isAnnotation() && !c.isAnonymousClass() &&
-                    !c.isArray() && !c.isPrimitive() & JVMUtils.isConcrete(c)) {
-                instance = c.newInstance();
-                //System.out.printf("Created object of class %s => %s%n", c, instance);
-                return instance;
-            } else
-                return null;
-        } catch (IllegalAccessException e) {
-        } catch (InstantiationException e) {
-        } catch (ExceptionInInitializerError e) {
-        } catch (SecurityException e) {
-        }
-        // this last one is super dangerous, but some of these methods catch ClassNotFoundExceptions
-        // and rethrow then as RuntimeExceptions
-        catch (RuntimeException e) {
-        }
-
-        return instance;
-    }
-
-
-    /**
-     * Create an instance of the GATK parsing engine, for argument processing with GATKDoclet
-     *
-     * @return
-     */
-    private ParsingEngine createStandardGATKParsingEngine() {
-        CommandLineProgram clp = new CommandLineGATK();
-        try {
-            CommandLineProgram.start(clp, new String[]{}, true);
-            return clp.parser;
-        } catch (Exception e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    /**
-     * Gets the javadocs associated with field name in classDoc.  Throws a
-     * runtime exception if this proves impossible.
-     *
-     * @param classDoc
-     * @param name
-     * @return
-     */
-    private FieldDoc getFieldDoc(ClassDoc classDoc, String name) {
-        return getFieldDoc(classDoc, name, true);
-    }
-
-    /**
-     * Recursive helper routine to getFieldDoc()
-     *
-     * @param classDoc
-     * @param name
-     * @param primary
-     * @return
-     */
-    private FieldDoc getFieldDoc(ClassDoc classDoc, String name, boolean primary) {
-        //System.out.printf("Looking for %s in %s%n", name, classDoc.name());
-        for (FieldDoc fieldDoc : classDoc.fields(false)) {
-            //System.out.printf("fieldDoc " + fieldDoc + " name " + fieldDoc.name());
-            if (fieldDoc.name().equals(name))
-                return fieldDoc;
-
-            Field field = DocletUtils.getFieldForFieldDoc(fieldDoc);
-            if (field == null)
-                throw new RuntimeException("Could not find the field corresponding to " + fieldDoc + ", presumably because the field is inaccessible");
-            if (field.isAnnotationPresent(ArgumentCollection.class)) {
-                ClassDoc typeDoc = getRootDoc().classNamed(fieldDoc.type().qualifiedTypeName());
-                if (typeDoc == null)
-                    throw new ReviewedGATKException("Tried to get javadocs for ArgumentCollection field " + fieldDoc + " but could't find the class in the RootDoc");
-                else {
-                    FieldDoc result = getFieldDoc(typeDoc, name, false);
-                    if (result != null)
-                        return result;
-                    // else keep searching
-                }
-            }
-        }
-
-        // if we didn't find it here, wander up to the superclass to find the field
-        if (classDoc.superclass() != null) {
-            return getFieldDoc(classDoc.superclass(), name, false);
-        }
-
-        if (primary)
-            throw new RuntimeException("No field found for expected field " + name);
-        else
-            return null;
-    }
-
-    /**
-     * Returns a Pair of (main, synonym) names for argument with fullName s1 and
-     * shortName s2.
-     *
-     * Previously we had it so the main name was selected to be the longest of the two, provided
-     * it didn't exceed MAX_DISPLAY_NAME, in which case the shorter was taken. But we now disable
-     * the length-based name rearrangement in order to maintain consistency in the GATKDocs table.
-     *
-     * This may cause messed up spacing in the CLI-help display but we don't care as much about that
-     * since more users use the online GATKDocs for looking up arguments.
-     *
-     * @param s1 the short argument name without -, or null if not provided
-     * @param s2 the long argument name without --, or null if not provided
-     * @return A pair of fully qualified names (with - or --) for the argument.  The first
-     *         element is the primary display name while the second (potentially null) is a
-     *         synonymous name.
-     */
-    Pair<String, String> displayNames(String s1, String s2) {
-        s1 = s1 == null ? null : "-" + s1;
-        s2 = s2 == null ? null : "--" + s2;
-
-        if (s1 == null) return new Pair<String, String>(s2, null);
-        if (s2 == null) return new Pair<String, String>(s1, null);
-
-        return new Pair<String, String>(s2, s1);
-    }
-
-    /**
-     * Returns a human readable string that describes the Type type of a GATK argument.
-     * <p/>
-     * This will include parameterized types, so that Set{T} shows up as Set(T) and not
-     * just Set in the docs.
-     *
-     * @param type
-     * @return
-     */
-    protected String argumentTypeString(Type type) {
-        if (type instanceof ParameterizedType) {
-            ParameterizedType parameterizedType = (ParameterizedType) type;
-            List<String> subs = new ArrayList<String>();
-            for (Type actualType : parameterizedType.getActualTypeArguments())
-                subs.add(argumentTypeString(actualType));
-            return argumentTypeString(((ParameterizedType) type).getRawType()) + "[" + Utils.join(",", subs) + "]";
-        } else if (type instanceof GenericArrayType) {
-            return argumentTypeString(((GenericArrayType) type).getGenericComponentType()) + "[]";
-        } else if (type instanceof WildcardType) {
-            throw new RuntimeException("We don't support wildcards in arguments: " + type);
-        } else if (type instanceof Class<?>) {
-            return ((Class) type).getSimpleName();
-        } else {
-            throw new GATKException("Unknown type: " + type);
-        }
-    }
-
-    /**
-     * Helper routine that returns the Feature.class required by a RodBinding,
-     * either T for RodBinding{T} or List{RodBinding{T}}.  Returns null if
-     * the Type doesn't fit either model.
-     *
-     * @param type
-     * @return
-     */
-    protected Class<? extends Feature> getFeatureTypeIfPossible(Type type) {
-        if (type instanceof ParameterizedType) {
-            ParameterizedType paramType = (ParameterizedType) type;
-            if (RodBinding.class.isAssignableFrom((Class<?>) paramType.getRawType())) {
-                return (Class<? extends Feature>) JVMUtils.getParameterizedTypeClass(type);
-            } else {
-                for (Type paramtype : paramType.getActualTypeArguments()) {
-                    Class<? extends Feature> x = getFeatureTypeIfPossible(paramtype);
-                    if (x != null)
-                        return x;
-                }
-            }
-        }
-
-        return null;
-    }
-
-    /**
-     * High-level entry point for creating a FreeMarker map describing the GATK argument
-     * source with definition def, with associated javadoc fieldDoc.
-     *
-     * @param fieldDoc
-     * @param source
-     * @param def
-     * @return a non-null Map binding argument keys with their values
-     */
-    protected Map<String, Object> docForArgument(FieldDoc fieldDoc, ArgumentSource source, ArgumentDefinition def) {
-        Map<String, Object> root = new HashMap<String, Object>();
-        Pair<String, String> names = displayNames(def.shortName, def.fullName);
-
-        root.put("name", names.getFirst());
-
-        if (names.getSecond() != null) {
-            root.put("synonyms", names.getSecond());
-        } else {
-            root.put("synonyms", "NA");
-        }
-
-        root.put("required", def.required ? "yes" : "no");
-
-        // type of the field
-        root.put("type", argumentTypeString(source.field.getGenericType()));
-
-        Class<? extends Feature> featureClass = getFeatureTypeIfPossible(source.field.getGenericType());
-        if (featureClass != null) {
-            // deal with the allowable types
-            FeatureManager manager = new FeatureManager();
-            List<String> rodTypes = new ArrayList<String>();
-            for (FeatureManager.FeatureDescriptor descriptor : manager.getByFeature(featureClass)) {
-                rodTypes.add(String.format("<a href=%s>%s</a>",
-                        GATKDocUtils.phpFilenameForClass(descriptor.getCodecClass()),
-                        descriptor.getName()));
-            }
-
-            root.put("rodTypes", Utils.join(", ", rodTypes));
-        } else {
-            root.put("rodTypes", "NA");
-        }
-
-        // summary and fulltext
-        root.put("summary", def.doc != null ? def.doc : "");
-        root.put("fulltext", fieldDoc.commentText());
-
-        // What are our enum options?
-        if (def.validOptions != null) {
-            root.put("options", docForEnumArgument(source.field.getType()));
-        } else {
-            root.put("options", new ArrayList());
-        }
-        // general attributes
-        List<String> attributes = new ArrayList<String>();
-        if (def.required) attributes.add("required");
-        if (source.isDeprecated()) attributes.add("deprecated");
-        if (attributes.size() > 0) {
-            root.put("attributes", Utils.join(", ", attributes));
-        } else {
-            root.put("attributes", "NA");
-        }
-        return root;
-    }
-
-    /**
-     * Helper routine that provides a FreeMarker map for an enumClass, grabbing the
-     * values of the enum and their associated javadoc documentation.
-     *
-     * @param enumClass
-     * @return
-     */
-    @Requires("enumClass.isEnum()")
-    private List<Map<String, Object>> docForEnumArgument(final Class enumClass) {
-        final ClassDoc doc = this.getDoclet().getClassDocForClass(enumClass);
-        if ( doc == null )
-            throw new RuntimeException("Tried to get docs for enum " + enumClass + " but got null instead");
-
-        final Set<String> enumConstantFieldNames = enumConstantsNames(enumClass);
-
-        final List<Map<String, Object>> bindings = new ArrayList<Map<String, Object>>();
-        for (final FieldDoc fieldDoc : doc.fields(false)) {
-            if (enumConstantFieldNames.contains(fieldDoc.name()) )
-                bindings.add(
-                        new HashMap<String, Object>() {{
-                            put("name", fieldDoc.name());
-                            put("summary", fieldDoc.commentText());
-                        }});
-        }
-
-        return bindings;
-    }
-
-    /**
-     * Returns the name of the fields that are enum constants according to reflection
-     *
-     * @return a non-null set of fields that are enum constants
-     */
-    private Set<String> enumConstantsNames(final Class enumClass) {
-        final Set<String> enumConstantFieldNames = new HashSet<String>();
-
-        for ( final Field field : enumClass.getFields() ) {
-            if ( field.isEnumConstant() )
-                enumConstantFieldNames.add(field.getName());
-        }
-
-        return enumConstantFieldNames;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/HelpConstants.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/HelpConstants.java
deleted file mode 100644
index 16257c6..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/HelpConstants.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.help;
-
-public class HelpConstants {
-
-    public final static String BASE_GATK_URL = "http://www.broadinstitute.org/gatk";
-    public final static String GATK_DOCS_URL = BASE_GATK_URL + "/tooldocs/";
-    public final static String GATK_FORUM_URL = "http://gatkforums.broadinstitute.org/";
-    public final static String GATK_FORUM_API_URL = "https://gatkforums.broadinstitute.org/api/v1/";
-
-    /**
-     * Arguments for parallelism options
-     */
-    public final static String ARG_TREEREDUCIBLE = "-nt";
-    public final static String ARG_NANOSCHEDULABLE = "-nct";
-    public final static String CMDLINE_GATK_URL = GATK_DOCS_URL + "org_broadinstitute_gatk_engine_CommandLineGATK.php";
-
-    /**
-     * Definition of the group names / categories of tools.
-     * The names get parsed to make supercategories in the doc index,
-     * so be careful when making big changes -- see GATKDoclet.java toMap()
-     */
-    public final static String DOCS_CAT_DATA = "Sequence Data Processing Tools";
-    public final static String DOCS_CAT_QC = "Diagnostics and Quality Control Tools";
-    public final static String DOCS_CAT_ENGINE = "Engine Parameters (available to all tools)";
-    public final static String DOCS_CAT_RF = "Read Filters";
-    public final static String DOCS_CAT_REFUTILS = "Reference Utilities";
-    public final static String DOCS_CAT_RODCODECS = "ROD Codecs";
-    public final static String DOCS_CAT_USRERR = "User Exceptions (DevZone)";
-    public final static String DOCS_CAT_VALIDATION = "Validation Utilities";
-    public final static String DOCS_CAT_ANNOT = "Variant Annotations";
-    public final static String DOCS_CAT_VARDISC = "Variant Discovery Tools";
-    public final static String DOCS_CAT_VARMANIP = "Variant Evaluation and Manipulation Tools";
-    public final static String DOCS_CAT_TOY = "Toy Walkers (DevZone)";
-    public final static String DOCS_CAT_HELPUTILS = "Help Utilities";
-
-    public static String forumPost(String post) {
-    	return GATK_FORUM_URL + post;
-    }
-
-    /**
-     * Go-to developer name codes for tracking and display purposes. Only current team members should be in this list.
-     * When someone leaves, their charges should be redistributed. The actual string should be closest to the dev's
-     * abbreviated name or two/three-letter nickname as possible. The code can be something else if necessary to
-     * disambiguate from other variable.
-     */
-    public final static String MC = "MC"; // Mauricio Carneiro
-    public final static String EB = "EB"; // Eric Banks
-    public final static String RP = "RP"; // Ryan Poplin
-    public final static String GVDA = "GG"; // Geraldine Van der Auwera
-    public final static String VRR = "VRR"; // Valentin Ruano-Rubio
-    public final static String ALM = "ALM"; // Ami Levy-Moonshine
-    public final static String BH = "BH"; // Bertrand Haas
-    public final static String JoT = "JT"; // Joel Thibault
-    public final static String DR = "DR"; // David Roazen
-    public final static String KS = "KS"; // Khalid Shakir
-
-
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/HelpFormatter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/HelpFormatter.java
deleted file mode 100644
index a8d4693..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/HelpFormatter.java
+++ /dev/null
@@ -1,336 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.help;
-
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.utils.commandline.*;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.text.TextFormattingUtils;
-
-import java.net.InetAddress;
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
-import java.util.*;
-/**
- * Print out help for GATK command-line applications.
- */
-
-public class HelpFormatter {
-    /** our log, which we want to capture anything from org.broadinstitute.gatk */
-    private static Logger logger = Logger.getLogger(HelpFormatter.class);
-
-    public static final int FIELD_SEPARATION_WIDTH = 3;
-
-    /**
-     * Prints the help, given a collection of argument definitions.
-     * @param applicationDetails Application details
-     * @param argumentDefinitions Argument definitions for which help should be printed.
-     */
-    public void printHelp( ApplicationDetails applicationDetails, ArgumentDefinitions argumentDefinitions ) {
-        List<ArgumentDefinitionGroup> argumentGroups = prepareArgumentGroups( argumentDefinitions );
-
-        List<String> header = applicationDetails.applicationHeader;
-        String barrier = createBarrier(header);
-
-        System.out.printf("%s%n",barrier);
-        for(String headerLine: header)
-            System.out.printf("%s%n",headerLine);
-        System.out.printf("%s%n",barrier);
-        for(String attributionLine: applicationDetails.attribution)
-            System.out.printf("%s%n",attributionLine);
-        System.out.printf("%s%n",barrier);
-
-        String synopsis = getSynopsis(applicationDetails.runningInstructions,argumentGroups);
-        String additionalDetails = applicationDetails.additionalHelp != null ? applicationDetails.additionalHelp : "";
-        String detailedDescription = getDetailed(argumentGroups);
-
-        System.out.printf("%s%n%s%n%s%n",synopsis,detailedDescription,additionalDetails );
-    }
-
-    /**
-     * Gets the synopsis: the actual command to run.
-     * @param runningInstructions Instructions on how to run hte application.
-     * @param argumentGroups Program arguments sorted in order of definition group displays.
-     * @return A synopsis line.
-     */
-    private String getSynopsis( String runningInstructions,
-                                List<ArgumentDefinitionGroup> argumentGroups ) {
-        // Build out the synopsis all as one long line.        
-        StringBuilder lineBuilder = new StringBuilder();
-        Formatter lineFormatter = new Formatter( lineBuilder );
-
-        lineFormatter.format("java %s", runningInstructions);
-
-        for( ArgumentDefinitionGroup argumentGroup: argumentGroups ) {
-            for( ArgumentDefinition argumentDefinition: argumentGroup.argumentDefinitions ) {
-                if(argumentDefinition.isHidden)
-                    continue;
-                lineFormatter.format(" ");
-                if( !argumentDefinition.required ) lineFormatter.format("[");
-                if( argumentDefinition.shortName != null )
-                    lineFormatter.format("-%s", argumentDefinition.shortName);
-                else
-                    lineFormatter.format("--%s", argumentDefinition.fullName);
-                if( !argumentDefinition.isFlag )
-                    lineFormatter.format(" <%s>", argumentDefinition.fullName);                
-                if( !argumentDefinition.required ) lineFormatter.format("]");
-            }
-        }
-
-        // Word wrap the synopsis.
-        List<String> wrappedSynopsis = TextFormattingUtils.wordWrap( lineBuilder.toString(), TextFormattingUtils.DEFAULT_LINE_WIDTH );
-
-        String header = "usage: ";
-        int headerLength = header.length();
-
-        StringBuilder synopsisBuilder = new StringBuilder();
-        Formatter synopsisFormatter = new Formatter(synopsisBuilder);
-        for( String synopsisLine: wrappedSynopsis ) {
-            synopsisFormatter.format("%" + headerLength + "s%s%n", header, synopsisLine);
-            header = "";
-        }
-
-        return synopsisBuilder.toString();
-    }
-
-    /**
-     * Gets detailed output about each argument type.
-     * @param argumentGroups Collection of program arguments sorted according to how they should be shown. 
-     * @return Detailed text about all arguments.
-     */
-    private String getDetailed( List<ArgumentDefinitionGroup> argumentGroups ) {
-        StringBuilder builder = new StringBuilder();
-
-        for( ArgumentDefinitionGroup argumentGroup: argumentGroups )
-            builder.append( getDetailForGroup( argumentGroup ) );
-
-        return builder.toString();
-    }
-
-    /**
-     * Gets a detailed description for a given argument group.
-     * @param argumentDefinitionGroup The group of argument definitions to render.
-     * @return A string giving detailed info about the contents of this group.
-     */
-    private String getDetailForGroup( ArgumentDefinitionGroup argumentDefinitionGroup ) {
-        if(argumentDefinitionGroup.allHidden())
-            return "";
-
-        StringBuilder builder = new StringBuilder();
-        Formatter formatter = new Formatter( builder );
-
-        if( argumentDefinitionGroup.groupName != null && argumentDefinitionGroup.argumentDefinitions.size() != 0 )
-            builder.append( String.format("%nArguments for %s:%n", argumentDefinitionGroup.groupName ) );
-
-        List<ArgumentDefinition> argumentDefinitions = new ArrayList<ArgumentDefinition>();
-        for(ArgumentDefinition argumentDefinition: argumentDefinitionGroup.argumentDefinitions) {
-            if(!argumentDefinition.isHidden)
-                argumentDefinitions.add(argumentDefinition);
-        }
-
-        // Try to fit the entire argument definition across the screen, but impose an arbitrary cap of 3/4 *
-        // LINE_WIDTH in case the length of the arguments gets out of control.
-        int argWidth = Math.min( findLongestArgumentCallingInfo(argumentDefinitions), (TextFormattingUtils.DEFAULT_LINE_WIDTH*3)/4 - FIELD_SEPARATION_WIDTH );
-        int docWidth = TextFormattingUtils.DEFAULT_LINE_WIDTH - argWidth - FIELD_SEPARATION_WIDTH;
-
-        for( ArgumentDefinition argumentDefinition: argumentDefinitions ) {
-            Iterator<String> wordWrappedArgs = TextFormattingUtils.wordWrap( getArgumentCallingInfo(argumentDefinition), argWidth ).iterator();
-            Iterator<String> wordWrappedDoc  = TextFormattingUtils.wordWrap( getArgumentDoc(argumentDefinition), docWidth ).iterator();
-
-            while( wordWrappedArgs.hasNext() || wordWrappedDoc.hasNext() ) {
-                String arg = wordWrappedArgs.hasNext() ? wordWrappedArgs.next() : "";
-                String doc = wordWrappedDoc.hasNext() ? wordWrappedDoc.next() : "";
-
-                String formatString = "%-" + argWidth + "s%" + FIELD_SEPARATION_WIDTH + "s%s%n";
-                formatter.format( formatString, arg, "", doc );
-            }
-        }
-
-        return builder.toString();
-    }
-
-    /**
-     * Gets a string indicating how this argument should be passed to the application.
-     * @param argumentDefinition Argument definition for which help should be printed.
-     * @return Calling information for this argument.
-     */
-    private String getArgumentCallingInfo( ArgumentDefinition argumentDefinition ) {
-        StringBuilder builder = new StringBuilder();
-        Formatter formatter = new Formatter( builder );
-
-        formatter.format(" ");
-        if( argumentDefinition.shortName != null )
-            formatter.format("-%s,", argumentDefinition.shortName);
-        formatter.format("--%s", argumentDefinition.fullName);
-        if( !argumentDefinition.isFlag )
-            formatter.format(" <%s>", argumentDefinition.fullName);
-
-        return builder.toString();
-    }
-
-    /**
-     * Gets a string of argument documentation.
-     * @param argumentDefinition Argument definition for which help should be printed.
-     * @return Brief description for this argument.
-     */
-    private String getArgumentDoc( ArgumentDefinition argumentDefinition ) {
-        StringBuilder builder = new StringBuilder();
-        builder.append(argumentDefinition.doc);
-        if( argumentDefinition.validOptions != null ) {
-            builder.append(" (");
-            builder.append(Utils.join("|",argumentDefinition.validOptions));
-            builder.append(")");
-        }
-        return builder.toString();
-    }
-
-    /**
-     * Crude implementation which finds the longest argument portion
-     * given a set of arguments.
-     * @param argumentDefinitions argument definitions to inspect.
-     * @return longest argument length.
-     */
-    private int findLongestArgumentCallingInfo( Collection<ArgumentDefinition> argumentDefinitions ) {
-        int longest = 0;
-        for( ArgumentDefinition argumentDefinition: argumentDefinitions ) {
-            String argumentText = getArgumentCallingInfo( argumentDefinition );
-            if( longest < argumentText.length() )
-                longest = argumentText.length();
-        }
-        return longest;
-    }
-
-    /**
-     * Extract the argument definition groups from the argument definitions and arrange them appropriately.
-     * For help, we want the arguments sorted as they are declared in the class.  However, required arguments
-     * should appear before optional arguments.
-     * @param argumentDefinitions Argument definitions from which to extract argument groups.
-     * @return A list of argument groups sorted in display order.
-     */
-    private List<ArgumentDefinitionGroup> prepareArgumentGroups( ArgumentDefinitions argumentDefinitions ) {
-        // Sort the list of argument definitions according to how they should be shown.
-        // Put the sorted results into a new cloned data structure.
-        Comparator<ArgumentDefinition> definitionComparator = new Comparator<ArgumentDefinition>() {
-            public int compare( ArgumentDefinition lhs, ArgumentDefinition rhs ) {
-                if( lhs.required && rhs.required ) return 0;
-                if( lhs.required ) return -1;
-                if( rhs.required ) return 1;
-                return 0;
-            }
-        };
-
-        List<ArgumentDefinitionGroup> argumentGroups = new ArrayList<ArgumentDefinitionGroup>();
-        for( ArgumentDefinitionGroup argumentGroup: argumentDefinitions.getArgumentDefinitionGroups() ) {
-            List<ArgumentDefinition> sortedDefinitions = new ArrayList<ArgumentDefinition>( argumentGroup.argumentDefinitions );
-            Collections.sort( sortedDefinitions, definitionComparator );
-            argumentGroups.add( new ArgumentDefinitionGroup(argumentGroup.groupName,sortedDefinitions) );
-        }
-
-        // Sort the argument groups themselves with main arguments first, followed by plugins sorted in name order.
-        Comparator<ArgumentDefinitionGroup> groupComparator = new Comparator<ArgumentDefinitionGroup>() {
-            public int compare( ArgumentDefinitionGroup lhs, ArgumentDefinitionGroup rhs ) {
-                if( lhs.groupName == null && rhs.groupName == null ) return 0;
-                if( lhs.groupName == null ) return -1;
-                if( rhs.groupName == null ) return 1;
-                return lhs.groupName.compareTo(rhs.groupName);
-            }
-        };
-        Collections.sort( argumentGroups, groupComparator );
-
-
-        return argumentGroups;
-    }
-
-    /**
-     * generateHeaderInformation
-     * <p/>
-     * <p/>
-     * Generate a standard header for the logger
-     *
-     * @param applicationDetails details of the application to run.
-     * @param parsedArgs the arguments passed in
-     */
-    public static void generateHeaderInformation(ApplicationDetails applicationDetails, Map<ArgumentMatchSource, ParsedArgs> parsedArgs) {
-
-        DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
-        java.util.Date date = new java.util.Date();
-
-        String barrier = createBarrier(applicationDetails.applicationHeader);
-
-        logger.info(barrier);
-        for (String headerLine : applicationDetails.applicationHeader)
-            logger.info(headerLine);
-        logger.debug("Current directory: " + System.getProperty("user.dir"));
-        for (Map.Entry<ArgumentMatchSource, ParsedArgs> entry: parsedArgs.entrySet()) {
-            ArgumentMatchSource matchSource = entry.getKey();
-            final String sourceName;
-            switch (matchSource.getType()) {
-                case CommandLine: sourceName = "Program"; break;
-                case Provider: sourceName = matchSource.getDescription(); break;
-                default: throw new RuntimeException("Unexpected argument match source type: " + matchSource.getType());
-            }
-
-            String output = sourceName + " Args: " + entry.getValue().getDescription();
-            logger.info(output);
-        }
-        logger.info(generateUserHelpData());
-        logger.info("Date/Time: " + dateFormat.format(date));
-        logger.info(barrier);
-
-        for(String attribution: applicationDetails.attribution)
-            logger.info(attribution);
-        logger.info(barrier);
-    }
-
-    /**
-     * Create the user-related help information.
-     * @return a non-null, non-empty String with the relevant information.
-     */
-    private static String generateUserHelpData() {
-	try {
-	    return "Executing as " +
-		System.getProperty("user.name") + "@" + InetAddress.getLocalHost().getHostName() +
-		" on " + System.getProperty("os.name") + " " + System.getProperty("os.version") +
-		" " + System.getProperty("os.arch") + "; " + System.getProperty("java.vm.name") +
-		" " + System.getProperty("java.runtime.version") + ".";
-	} catch (Exception e) {
-	    // don't fail
-	    return "";
-	}
-    }
-
-    /**
-     * Create a barrier to use to distinguish the header from the rest of the output.
-     * @param text A collection of lines to output as part of a header.
-     * @return A barrier consisting of the '-' character.
-     */
-    private static String createBarrier(List<String> text) {
-        int barrierWidth = 0;
-        for(String headerLine: text)
-            barrierWidth = Math.max(headerLine.length(),barrierWidth);
-        return String.format("%0" + barrierWidth + "d",0).replace('0','-');
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/HelpUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/HelpUtils.java
deleted file mode 100644
index 1011a49..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/HelpUtils.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.help;
-
-import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.AnnotationType;
-import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.GenotypeAnnotation;
-import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.InfoFieldAnnotation;
-import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.StandardAnnotation;
-import org.broadinstitute.gatk.utils.classloader.PluginManager;
-
-import java.util.List;
-
-/**
- * NON-javadoc/doclet help-related utility methods should go here. Anything with a com.sun.javadoc.* dependency
- * should go into DocletUtils for use only by doclets.
- */
-public class HelpUtils {
-
-    /**
-     * Simple method to print a list of available annotations.
-     */
-    public static void listAnnotations() {
-        System.out.println("\nThis is a list of available Variant Annotations for use with tools such as UnifiedGenotyper, HaplotypeCaller and VariantAnnotator. Please see the Technical Documentation for more details about these annotations:");
-        System.out.println("http://www.broadinstitute.org/gatk/tooldocs/");
-        System.out.println("\nStandard annotations in the list below are marked with a '*'.");
-        List<Class<? extends InfoFieldAnnotation>> infoAnnotationClasses = new PluginManager<InfoFieldAnnotation>(InfoFieldAnnotation.class).getPlugins();
-        System.out.println("\nAvailable annotations for the VCF INFO field:");
-        for (int i = 0; i < infoAnnotationClasses.size(); i++)
-            System.out.println("\t" + (StandardAnnotation.class.isAssignableFrom(infoAnnotationClasses.get(i)) ? "*" : "") + infoAnnotationClasses.get(i).getSimpleName());
-        System.out.println();
-        List<Class<? extends GenotypeAnnotation>> genotypeAnnotationClasses = new PluginManager<GenotypeAnnotation>(GenotypeAnnotation.class).getPlugins();
-        System.out.println("\nAvailable annotations for the VCF FORMAT field:");
-        for (int i = 0; i < genotypeAnnotationClasses.size(); i++)
-            System.out.println("\t" + (StandardAnnotation.class.isAssignableFrom(genotypeAnnotationClasses.get(i)) ? "*" : "") + genotypeAnnotationClasses.get(i).getSimpleName());
-        System.out.println();
-        System.out.println("\nAvailable classes/groups of annotations:");
-        for ( Class c : new PluginManager<AnnotationType>(AnnotationType.class).getInterfaces() )
-            System.out.println("\t" + c.getSimpleName());
-        System.out.println();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/ResourceBundleExtractorDoclet.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/ResourceBundleExtractorDoclet.java
deleted file mode 100644
index f28130b..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/help/ResourceBundleExtractorDoclet.java
+++ /dev/null
@@ -1,228 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.help;
-
-import com.sun.javadoc.*;
-import org.broadinstitute.gatk.engine.walkers.Walker;
-import org.broadinstitute.gatk.utils.Utils;
-
-import java.io.*;
-import java.util.*;
-
-/**
- * Extracts certain types of javadoc (specifically package and class descriptions) and makes them available
- * to applications at runtime.
- *
- * @author mhanna
- * @version 0.1
- */
-public class ResourceBundleExtractorDoclet {
-    /**
-     * Taglet for the particular version number.
-     */
-    public static final String VERSION_TAGLET_NAME = "version";
-    public static final String SUMMARY_TAGLET_NAME = "help.summary";
-    public static final String DESCRIPTION_TAGLET_NAME = "help.description";
-
-    /**
-     * Maintains a collection of resources in memory as they're accumulated.
-     */
-    protected final Properties resourceText = new Properties();
-
-    /**
-     * Maintains a collection of classes that should really be documented.
-     */
-    protected final Set<String> undocumentedWalkers = new HashSet<String>();
-
-    protected String buildTimestamp = null, absoluteVersion = null;
-
-    /**
-     * Extracts the contents of certain types of javadoc and adds them to an XML file.
-     * @param rootDoc The documentation root.
-     * @return Whether the JavaDoc run succeeded.
-     * @throws IOException if output can't be written.
-     */
-    public static boolean start(RootDoc rootDoc) throws IOException {
-        ResourceBundleExtractorDoclet doclet = new ResourceBundleExtractorDoclet();
-        PrintStream out = doclet.loadData(rootDoc, true);
-        doclet.processDocs(rootDoc, out);
-        return true;
-    }
-
-    protected PrintStream loadData(RootDoc rootDoc, boolean overwriteResourcesFile) {
-        PrintStream out = System.out;
-
-        for(String[] options: rootDoc.options()) {
-            if(options[0].equals("-out")) {
-                try {
-                    loadExistingResourceFile(options[1], rootDoc);
-                    if ( overwriteResourcesFile )
-                        out = new PrintStream(options[1]);
-                } catch ( FileNotFoundException e ) {
-                    throw new RuntimeException(e);
-                } catch ( IOException e ) {
-                    throw new RuntimeException(e);
-                }
-            }
-            if(options[0].equals("-build-timestamp"))
-                buildTimestamp = options[1];
-            if (options[0].equals("-absolute-version"))
-                absoluteVersion = options[1];
-        }
-
-        resourceText.setProperty("build.timestamp",buildTimestamp);
-        return out;
-    }
-
-    protected void processDocs(RootDoc rootDoc, PrintStream out) {
-        // Cache packages as we see them, since there's no direct way to iterate over packages.
-        Set<PackageDoc> packages = new HashSet<PackageDoc>();
-
-        for(ClassDoc currentClass: rootDoc.classes()) {
-            PackageDoc containingPackage = currentClass.containingPackage();
-            packages.add(containingPackage);
-
-            if(isRequiredJavadocMissing(currentClass) && isWalker(currentClass))
-                undocumentedWalkers.add(currentClass.name());
-
-            renderHelpText(DocletUtils.getClassName(currentClass),currentClass);
-        }
-
-        for(PackageDoc currentPackage: packages)
-            renderHelpText(currentPackage.name(),currentPackage);
-
-        try {
-            resourceText.store(out,"Strings displayed by the GATK help system");
-        } catch ( FileNotFoundException e ) {
-            throw new RuntimeException(e);
-        } catch ( IOException e ) {
-            throw new RuntimeException(e);
-        }
-
-        // ASCII codes for making text blink
-        final String blink = "\u001B\u005B\u0035\u006D";
-        final String reset = "\u001B\u005B\u006D";
-
-        if(undocumentedWalkers.size() > 0)
-            Utils.warnUser(String.format("The following walkers are currently undocumented: %s%s%s", blink, Utils.join(" ",undocumentedWalkers), reset));
-    }
-
-    /**
-     * Validate the given options against options supported by this doclet.
-     * @param option Option to validate.
-     * @return Number of potential parameters; 0 if not supported.
-     */
-    public static int optionLength(String option) {
-        if(option.equals("-build-timestamp") || option.equals("-out") || option.equals("-absolute-version") ) {
-            return 2;
-        }
-        return 0;
-    }
-
-    /**
-     * Attempts to load the contents of the resource file named by resourceFileName into
-     * our in-memory resource collection resourceText. If the resource file doesn't exist,
-     * prints a notice to the user but does not throw an exception back to the calling method,
-     * since we'll just create a new resource file from scratch in that case.
-     * @param  resourceFileName  name of the resource file to attempt to load.
-     * @param  rootDoc           the documentation root.
-     * @throws IOException       if there is an I/O-related error other than FileNotFoundException
-     *                           while attempting to read the resource file.
-     */
-    private void loadExistingResourceFile( String resourceFileName, RootDoc rootDoc ) throws IOException {
-        try {
-            BufferedReader resourceFile = new BufferedReader(new FileReader(resourceFileName));
-            try {
-                resourceText.load(resourceFile);
-            }
-            finally {
-                resourceFile.close();
-            }
-        }
-        catch ( FileNotFoundException e ) {
-            rootDoc.printNotice("Resource file not found -- generating a new one from scratch.");
-        }
-    }
-
-    /**
-     * Determine whether a given class is a walker.
-     * @param classDoc the type of the given class.
-     * @return True if the class of the given name is a walker.  False otherwise.
-     */
-    protected static boolean isWalker(ClassDoc classDoc) {
-        return DocletUtils.assignableToClass(classDoc, Walker.class, true);
-    }
-
-    /**
-     * Is the javadoc for the given class missing?
-     * @param classDoc Class for which to inspect the JavaDoc.
-     * @return True if the JavaDoc is missing.  False otherwise.
-     */
-    private static boolean isRequiredJavadocMissing(ClassDoc classDoc) {
-        return classDoc.commentText().length() == 0 || classDoc.commentText().contains("Created by IntelliJ");
-    }
-
-    /**
-     * Renders all the help text required for a given name.
-     * @param elementName element name to use as the key
-     * @param element Doc element to process.
-     */
-    private void renderHelpText(String elementName, Doc element) {
-        StringBuilder summaryBuilder = new StringBuilder();
-        for(Tag tag: element.firstSentenceTags())
-             summaryBuilder.append(tag.text());
-        String summary = summaryBuilder.toString();
-        String description = element.commentText();
-
-        // this might seem unnecessary, but the GATK command line program uses this tag to determine the version when running
-        if(absoluteVersion != null)
-            resourceText.setProperty(String.format("%s.%s",elementName,VERSION_TAGLET_NAME),absoluteVersion);
-
-        // Write out an alternate element summary, if exists.
-        resourceText.setProperty(String.format("%s.%s",elementName,SUMMARY_TAGLET_NAME),formatText(summary));
-
-        // Write out an alternate description, if present.
-        resourceText.setProperty(String.format("%s.%s",elementName,DESCRIPTION_TAGLET_NAME),formatText(description));
-    }
-
-    /**
-     * Format text for consumption by the properties file.
-     * @param text Text to format.
-     * @return Formatted text; string trimmed, newlines removed.
-     */
-    private static String formatText(String text) {
-        Scanner scanner = new Scanner(text);
-        StringBuilder output = new StringBuilder();
-
-        while(scanner.hasNextLine()) {
-            if(output.length() > 0)
-                output.append(' ');
-            output.append(scanner.nextLine().trim());
-        }
-
-        return output.toString();    
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/instrumentation/Sizeof.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/instrumentation/Sizeof.java
deleted file mode 100644
index a31c498..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/instrumentation/Sizeof.java
+++ /dev/null
@@ -1,146 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.instrumentation;
-
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.lang.instrument.Instrumentation;
-import java.lang.reflect.Array;
-import java.lang.reflect.Field;
-import java.lang.reflect.Modifier;
-import java.util.IdentityHashMap;
-
-/**
- * A sizeof implementation for Java.  Relies on the Java instrumentation API, so
- * it must be added as an agent to function properly.
- *
- * To run, add -javaagent:$STING_HOME/dist/StingUtils.jar as a command-line
- * JVM argument.
- *
- * @author mhanna
- * @version 0.1
- */
-public class Sizeof {
-    /**
-     * Instrumentation object.  Registered by the JVM via the premain() method.
-     */
-    private static Instrumentation instrumentation;
-
-    /**
-     * Called by the JVM before the agent is started.
-     * @param args Arguments?
-     * @param inst Instrumentation object, used to perform instrumentation in the JVM.
-     */
-    public static void premain(String args, Instrumentation inst) {
-        instrumentation = inst;
-    }
-
-    /**
-     * Is this Sizeof operator enabled?  To enable, add the -javaagent directive listed in the class-level javadoc.
-     * @return True if sizeof() is enabled.  If false, any calls to utility methods of this class will throw an exception.
-     */
-    public static boolean isEnabled() {
-        return instrumentation != null;
-    }
-
-    /**
-     * Gets the size of the given object.  Retrieves the size for only this object; any reference fields in the object will only be
-     * counted as single pointers.
-     * @param o The object to sizeof().
-     * @return Gets the best possible approximation we can get of the size of the object in memory.  On Sun JVM, includes some object padding.
-     */
-    public static long getObjectSize(Object o) {
-        if(!isEnabled())
-            throw new ReviewedGATKException("Sizeof operator is currently disabled!  To enable, review the documentation in Sizeof.java");
-        return instrumentation.getObjectSize(o);
-    }
-
-    /**
-     * Gets the size of the given object, including the size of the objects to which this object refers.
-     * @param o The object to sizeof().
-     * @return Gets the best possible approximation we can get of the size of the object in memory, including all references within each object.
-     */
-    public static long getObjectGraphSize(Object o) {
-        if(!isEnabled())
-            throw new ReviewedGATKException("Sizeof operator is currently disabled!  To enable, review the documentation in Sizeof.java");
-        IdentityHashMap<Object,Object> objectsSeen = new IdentityHashMap<Object,Object>();
-        return getObjectGraphSize(o,objectsSeen);
-    }
-
-    /**
-     * The engine for walking the graph of all objects and their children.
-     * @param o The object to traverse.
-     * @param objectsSeen A list of all objects already seen.
-     * @return Gets the best possible approximation we can get of the size of the object in memory, including all references within each object.
-     */
-    private static long getObjectGraphSize(Object o,IdentityHashMap<Object,Object> objectsSeen) {
-        // Size of a null object itself (as opposed to the reference to the null object) is 0.
-        if(o == null)
-            return 0;
-        
-        // Don't allow repeated traversals of the same object.
-        if(objectsSeen.containsKey(o))
-            return 0;
-        objectsSeen.put(o,o);
-
-        // Get the size of the object itself, plus all contained primitives.
-        long totalSize = instrumentation.getObjectSize(o);
-
-        // Get the size of (non-primitive) array elements.
-        Class<?> classToInspect = o.getClass();
-        if(classToInspect.isArray()) {
-            if(!classToInspect.getComponentType().isPrimitive()) {
-                for(int i = 0; i < Array.getLength(o); i++)
-                    totalSize += getObjectGraphSize(Array.get(o,i),objectsSeen);
-            }
-        }
-
-        // Walk the descendents of each field of this class.  Be sure to avoid synthetic fields like this$0 -- these
-        // are back references to the parent of the object contained in the inner class.
-        // Potential BUG: Are there other types of synthetic fields we should be tracking?
-        while(classToInspect != null) {
-            for(Field field: classToInspect.getDeclaredFields()) {
-                if(field.getType().isPrimitive())
-                    continue;
-                if(Modifier.isStatic(field.getModifiers()))
-                    continue;
-                if(field.isSynthetic())
-                    continue;
-                field.setAccessible(true);
-                Object fieldValue;
-                try {
-                    fieldValue = field.get(o);
-                }
-                catch(IllegalAccessException ex) {
-                    throw new ReviewedGATKException("Unable to access field " + field.getName(),ex);
-                }
-                totalSize += getObjectGraphSize(fieldValue,objectsSeen);
-            }
-            classToInspect = classToInspect.getSuperclass();
-        }
-        return totalSize;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/interval/IntervalMergingRule.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/interval/IntervalMergingRule.java
deleted file mode 100644
index 3e5eb45..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/interval/IntervalMergingRule.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.interval;
-
-
-/**
- * a class we use to determine the merging rules for intervals passed to the GATK
- */
-public enum IntervalMergingRule {
-    ALL, // we merge both overlapping intervals and abutting intervals
-    OVERLAPPING_ONLY // We merge intervals that are overlapping, but NOT ones that only abut each other
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/interval/IntervalSetRule.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/interval/IntervalSetRule.java
deleted file mode 100644
index e9d20ee..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/interval/IntervalSetRule.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.interval;
-
-/**
- * set operators for combining lists of intervals
- */
-public enum IntervalSetRule {
-    /** Take the union of all intervals */
-    UNION,
-    /** Take the intersection of intervals (the subset that overlaps all intervals specified) */
-    INTERSECTION;
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/interval/IntervalUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/interval/IntervalUtils.java
deleted file mode 100644
index 7fffb12..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/interval/IntervalUtils.java
+++ /dev/null
@@ -1,890 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.interval;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import htsjdk.samtools.util.Interval;
-import htsjdk.samtools.util.IntervalList;
-import htsjdk.samtools.SAMFileHeader;
-import org.apache.log4j.Logger;
-import htsjdk.tribble.Feature;
-import org.broadinstitute.gatk.utils.commandline.IntervalArgumentCollection;
-import org.broadinstitute.gatk.utils.commandline.IntervalBinding;
-import org.broadinstitute.gatk.engine.datasources.reference.ReferenceDataSource;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.collections.Pair;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.text.XReadLines;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.*;
-
-/**
- * Parse text representations of interval strings that
- * can appear in GATK-based applications.
- *
- * @author mhanna
- * @version 0.1
- */
-public class IntervalUtils {
-    private static Logger logger = Logger.getLogger(IntervalUtils.class);
-
-    /**
-     * Turns a set of strings describing intervals into a parsed set of intervals.  Valid string elements can be files,
-     * intervals in samtools notation (chrA:B-C), or some combination of the above separated by semicolons.  Additionally,
-     * 'all' can be supplied to indicate all possible intervals, but 'all' must be exclusive of all other interval
-     * specifications.
-     *
-     * @param parser Genome loc parser.
-     * @param argList A list of strings containing interval data.
-     * @return an unsorted, unmerged representation of the given intervals.  Null is used to indicate that all intervals should be used.
-     */
-    public static List<GenomeLoc> parseIntervalArguments(GenomeLocParser parser, List<String> argList) {
-        List<GenomeLoc> rawIntervals = new ArrayList<GenomeLoc>();    // running list of raw GenomeLocs
-
-        if (argList != null) { // now that we can be in this function if only the ROD-to-Intervals was provided, we need to
-                               // ensure that the arg list isn't null before looping.
-            for (String argument : argList) {
-                rawIntervals.addAll(parseIntervalArguments(parser, argument));
-            }
-        }
-
-        return rawIntervals;
-    }
-
-    public static List<GenomeLoc> parseIntervalArguments(GenomeLocParser parser, String arg) {
-        List<GenomeLoc> rawIntervals = new ArrayList<GenomeLoc>();    // running list of raw GenomeLocs
-
-        if ( arg.indexOf(';') != -1 ) {
-            throw new UserException.BadArgumentValue("-L " + arg, "The legacy -L \"interval1;interval2\" syntax " +
-                                                     "is no longer supported. Please use one -L argument for each " +
-                                                     "interval or an interval file instead.");
-        }
-
-        // if any argument is 'unmapped', "parse" it to a null entry.  A null in this case means 'all the intervals with no alignment data'.
-        if (isUnmapped(arg))
-            rawIntervals.add(GenomeLoc.UNMAPPED);
-        // if it's a file, add items to raw interval list
-        else if (isIntervalFile(arg)) {
-            try {
-                rawIntervals.addAll(intervalFileToList(parser, arg));
-            }
-            catch ( UserException.MalformedGenomeLoc e ) {
-                throw e;
-            }
-            catch ( Exception e ) {
-                throw new UserException.MalformedFile(arg, "Interval file could not be parsed in any supported format.", e);
-            }
-        }
-        // otherwise treat as an interval -> parse and add to raw interval list
-        else {
-            rawIntervals.add(parser.parseGenomeLoc(arg));
-        }
-
-        return rawIntervals;
-    }
-
-    /**
-     * Read a file of genome locations to process. The file may be in BED, Picard,
-     * or GATK interval format.
-     *
-     * @param glParser   GenomeLocParser
-     * @param file_name  interval file
-     * @return List<GenomeLoc> List of Genome Locs that have been parsed from file
-     */
-    public static List<GenomeLoc> intervalFileToList(final GenomeLocParser glParser, final String file_name) {
-        // try to open file
-        File inputFile = new File(file_name);
-        List<GenomeLoc> ret = new ArrayList<GenomeLoc>();
-
-        // case: BED file
-        if ( file_name.toUpperCase().endsWith(".BED") ) {
-            // this is now supported in Tribble
-            throw new ReviewedGATKException("BED files must be parsed through Tribble; parsing them as intervals through the GATK engine is no longer supported");
-        }
-        else {
-            /**
-             * IF not a BED file:
-             * first try to read it as a Picard interval file since that's well structured
-             * we'll fail quickly if it's not a valid file.
-             */
-            boolean isPicardInterval = false;
-            try {
-                // Note: Picard will skip over intervals with contigs not in the sequence dictionary
-                IntervalList il = IntervalList.fromFile(inputFile);
-                isPicardInterval = true;
-
-                int nInvalidIntervals = 0;
-                for (Interval interval : il.getIntervals()) {
-                    if ( glParser.isValidGenomeLoc(interval.getSequence(), interval.getStart(), interval.getEnd(), true))
-                        ret.add(glParser.createGenomeLoc(interval.getSequence(), interval.getStart(), interval.getEnd(), true));
-                    else {
-                        nInvalidIntervals++;
-                    }
-                }
-                if ( nInvalidIntervals > 0 )
-                    logger.warn("Ignoring " + nInvalidIntervals + " invalid intervals from " + inputFile);
-            }
-
-            // if that didn't work, try parsing file as a GATK interval file
-            catch (Exception e) {
-                if ( isPicardInterval ) // definitely a picard file, but we failed to parse
-                    throw new UserException.CouldNotReadInputFile(inputFile, e);
-                else {
-                    try {
-                        XReadLines reader = new XReadLines(new File(file_name));
-                        for(String line: reader) {
-                            if ( line.trim().length() > 0 ) {
-                                ret.add(glParser.parseGenomeLoc(line));
-                            }
-                        }
-                        reader.close();
-                    }
-                    catch (IOException e2) {
-                        throw new UserException.CouldNotReadInputFile(inputFile, e2);
-                    }
-                }
-            }
-        }
-
-        return ret;
-    }
-
-    /**
-     * Returns true if the interval string is the "unmapped" interval
-     * @param interval Interval to check
-     * @return true if the interval string is the "unmapped" interval
-     */
-    public static boolean isUnmapped(String interval) {
-        return (interval != null && interval.trim().toLowerCase().equals("unmapped"));
-    }
-
-    /**
-     * merge two interval lists, using an interval set rule
-     * @param setOne a list of genomeLocs, in order (cannot be NULL)
-     * @param setTwo a list of genomeLocs, also in order (cannot be NULL)
-     * @param rule the rule to use for merging, i.e. union, intersection, etc
-     * @return a list, correctly merged using the specified rule
-     */
-    public static List<GenomeLoc> mergeListsBySetOperator(List<GenomeLoc> setOne, List<GenomeLoc> setTwo, IntervalSetRule rule) {
-        // shortcut, if either set is zero, return the other set
-        if (setOne == null || setOne.size() == 0 || setTwo == null || setTwo.size() == 0)
-            return Collections.unmodifiableList((setOne == null || setOne.size() == 0) ? setTwo : setOne);
-
-        // our master list, since we can't guarantee removal time in a generic list
-        LinkedList<GenomeLoc> retList = new LinkedList<GenomeLoc>();
-
-        // if we're set to UNION, just add them all
-        if (rule == null || rule == IntervalSetRule.UNION) {
-            retList.addAll(setOne);
-            retList.addAll(setTwo);
-            return Collections.unmodifiableList(retList);
-        }
-
-        // else we're INTERSECTION, create two indexes into the lists
-        int iOne = 0;
-        int iTwo = 0;
-
-        // merge the second into the first using the rule
-        while (iTwo < setTwo.size() && iOne < setOne.size())
-            // if the first list is ahead, drop items off the second until we overlap
-            if (setTwo.get(iTwo).isBefore(setOne.get(iOne)))
-                iTwo++;
-            // if the second is ahead, drop intervals off the first until we overlap
-            else if (setOne.get(iOne).isBefore(setTwo.get(iTwo)))
-                iOne++;
-            // we overlap, intersect the two intervals and add the result.  Then remove the interval that ends first.
-            else {
-                retList.add(setOne.get(iOne).intersect(setTwo.get(iTwo)));
-                if (setOne.get(iOne).getStop() < setTwo.get(iTwo).getStop()) iOne++;
-                else iTwo++;
-            }
-
-        //if we have an empty list, throw an exception.  If they specified intersection and there are no items, this is bad.
-        if (retList.size() == 0)
-                throw new UserException.BadInput("The INTERSECTION of your -L options produced no intervals.");
-
-        // we don't need to add the rest of remaining locations, since we know they don't overlap. return what we have
-        return Collections.unmodifiableList(retList);
-    }
-
-    /**
-     * Sorts and merges an interval list.  Multiple techniques are available for merging: ALL, which combines
-     * all overlapping and abutting intervals into an interval that spans the union of all covered bases, and
-     * OVERLAPPING_ONLY, which unions overlapping intervals but keeps abutting intervals separate.
-     *
-     * @param parser Genome loc parser for the intervals.
-     * @param intervals A collection of intervals to merge.
-     * @param mergingRule A descriptor for the type of merging to perform.
-     * @return A sorted, merged version of the intervals passed in.
-     */
-    public static GenomeLocSortedSet sortAndMergeIntervals(GenomeLocParser parser, List<GenomeLoc> intervals, IntervalMergingRule mergingRule) {
-        // Make a copy of the (potentially unmodifiable) list to be sorted
-        intervals = new ArrayList<GenomeLoc>(intervals);
-        // sort raw interval list
-        Collections.sort(intervals);
-        // now merge raw interval list
-        intervals = mergeIntervalLocations(intervals, mergingRule);
-
-        return GenomeLocSortedSet.createSetFromList(parser,intervals);
-    }
-
-    /**
-     * computes whether the test interval list is equivalent to master.  To be equivalent, test must
-     * contain GenomeLocs covering every base in master, exactly once.  Note that this algorithm
-     * assumes that master genomelocs are all discontiguous (i.e., we don't have locs like 1-3 and 4-6 but
-     * rather just 1-6).  In order to use this algorithm with contiguous genomelocs first merge them.  The algorithm
-     * doesn't assume that test has discontinuous genomelocs.
-     *
-     * Returns a null string if there are no differences, otherwise returns a string describing the difference
-     * (useful for UnitTests).  Assumes both lists are sorted
-     *
-     * @param masterArg sorted master genome locs
-     * @param testArg sorted test genome locs
-     * @return null string if there are no difference, otherwise a string describing the difference
-     */
-    public static String equateIntervals(List<GenomeLoc> masterArg, List<GenomeLoc> testArg) {
-        LinkedList<GenomeLoc> master = new LinkedList<GenomeLoc>(masterArg);
-        LinkedList<GenomeLoc> test = new LinkedList<GenomeLoc>(testArg);
-
-        while ( ! master.isEmpty() ) { // there's still unchecked bases in master
-            final GenomeLoc masterHead = master.pop();
-            final GenomeLoc testHead = test.pop();
-
-            if ( testHead.overlapsP(masterHead) ) {
-                // remove the parts of test that overlap master, and push the remaining
-                // parts onto master for further comparison.
-                for ( final GenomeLoc masterPart : Utils.reverse(masterHead.subtract(testHead)) ) {
-                    master.push(masterPart);
-                }
-            } else {
-                // testHead is incompatible with masterHead, so we must have extra bases in testHead
-                // that aren't in master
-                return "Incompatible locs detected masterHead=" + masterHead + ", testHead=" + testHead;
-            }
-        }
-
-        if ( test.isEmpty() ) // everything is equal
-            return null; // no differences
-        else
-            return "Remaining elements found in test: first=" + test.peek();
-    }
-
-
-    /**
-     * Check if string argument was intented as a file
-     * Accepted file extensions: .bed .list, .picard, .interval_list, .intervals.
-     * @param str token to identify as a filename.
-     * @return true if the token looks like a filename, or false otherwise.
-     */
-    public static boolean isIntervalFile(String str) {
-        return isIntervalFile(str, true);
-    }
-
-    /**
-     * Check if string argument was intented as a file
-     * Accepted file extensions: .bed .list, .picard, .interval_list, .intervals.
-     * @param str token to identify as a filename.
-     * @param checkExists if true throws an exception if the file doesn't exist.
-     * @return true if the token looks like a filename, or false otherwise.
-     */
-    public static boolean isIntervalFile(String str, boolean checkExists) {
-        // should we define list of file extensions as a public array somewhere?
-        // is regex or endsiwth better?
-        File file = new File(str);
-        if (str.toUpperCase().endsWith(".BED") || str.toUpperCase().endsWith(".LIST") ||
-                str.toUpperCase().endsWith(".PICARD") || str.toUpperCase().endsWith(".INTERVAL_LIST")
-                || str.toUpperCase().endsWith(".INTERVALS")) {
-            if (!checkExists)
-                return true;
-            else if (file.exists())
-                return true;
-            else
-                throw new UserException.CouldNotReadInputFile(file, "The interval file does not exist.");
-        }
-
-        if(file.exists())
-            throw new UserException.CouldNotReadInputFile(file, String.format("The interval file %s does not have one of " +
-                    "the supported extensions (.bed, .list, .picard, .interval_list, or .intervals). " +
-                    "Please rename your file with the appropriate extension. If %s is NOT supposed to be a file, " +
-                    "please move or rename the file at location %s", str, str, file.getAbsolutePath()));
-
-        else return false;
-    }
-
-    /**
-     * Returns a map of contig names with their sizes.
-     * @param reference The reference for the intervals.
-     * @return A map of contig names with their sizes.
-     */
-    public static Map<String, Integer> getContigSizes(File reference) {
-        ReferenceDataSource referenceSource = new ReferenceDataSource(reference);
-        List<GenomeLoc> locs = GenomeLocSortedSet.createSetFromSequenceDictionary(referenceSource.getReference().getSequenceDictionary()).toList();
-        Map<String, Integer> lengths = new LinkedHashMap<String, Integer>();
-        for (GenomeLoc loc: locs)
-            lengths.put(loc.getContig(), loc.size());
-        return lengths;
-    }
-
-    /**
-     * Splits an interval list into multiple files.
-     * @param fileHeader The sam file header.
-     * @param locs The genome locs to split.
-     * @param scatterParts The output interval lists to write to.
-     */
-    public static void scatterContigIntervals(SAMFileHeader fileHeader, List<GenomeLoc> locs, List<File> scatterParts) {
-
-	// Contract: must divide locs up so that each of scatterParts gets a sublist such that:
-	// (a) all locs concerning a particular contig go to the same part
-	// (b) locs are not split or combined, and remain in the same order (so scatterParts[0] + ... + scatterParts[n] == locs)
-
-	// Locs are already sorted.
-
-	long totalBases = 0;
-	for(GenomeLoc loc : locs)
-	    totalBases += loc.size();
-
-	long idealBasesPerPart = totalBases / scatterParts.size();
-	if(idealBasesPerPart == 0)
-	    throw new UserException.BadInput(String.format("Genome region is too short (%d bases) to split into %d parts", totalBases, scatterParts.size()));
-
-	// Find the indices in locs where we switch from one contig to the next.
-	ArrayList<Integer> contigStartLocs = new ArrayList<Integer>();
-	String prevContig = null;
-
-	for(int i = 0; i < locs.size(); ++i) {
-
-	    GenomeLoc loc = locs.get(i);
-	    if(prevContig == null || !loc.getContig().equals(prevContig))
-		contigStartLocs.add(i);
-	    prevContig = loc.getContig();
-
-	}
-
-	if(contigStartLocs.size() < scatterParts.size())
-	    throw new UserException.BadInput(String.format("Input genome region has too few contigs (%d) to split into %d parts", contigStartLocs.size(), scatterParts.size()));
-
-	long thisPartBases = 0;
-	int partIdx = 0;
-	IntervalList outList = new IntervalList(fileHeader);
-
-	for(int i = 0; i < locs.size(); ++i) {
-
-	    GenomeLoc loc = locs.get(i);
-	    thisPartBases += loc.getStop() - loc.getStart();
-
-	    outList.add(toInterval(loc, i));
-
-	    boolean partMustStop = false;
-
-	    if(partIdx < (scatterParts.size() - 1)) {
-
-		// If there are n contigs and n parts remaining then we must split here,
-		// otherwise we will run out of contigs.
-
-		int nextPart = partIdx + 1;
-		int nextPartMustStartBy = contigStartLocs.get(nextPart + (contigStartLocs.size() - scatterParts.size()));
-		if(i + 1 == nextPartMustStartBy)
-		    partMustStop = true;
-		
-	    }
-	    else if(i == locs.size() - 1) {
-
-		// We're done! Write the last scatter file.
-		partMustStop = true;
-
-	    }
-	    
-	    if(partMustStop || thisPartBases > idealBasesPerPart) {
-
-		// Ideally we would split here. However, we must make sure to do so
-		// on a contig boundary. Test always passes with partMustStop == true
-		// since that indicates we're at a contig boundary.
-
-		GenomeLoc nextLoc = null;
-		if((i + 1) < locs.size())
-		    nextLoc = locs.get(i+1);
-
-		if(nextLoc == null || !nextLoc.getContig().equals(loc.getContig())) {
-
-		    // Write out this part:
-		    outList.write(scatterParts.get(partIdx));
-
-		    // Reset. If this part ran long, leave the excess in thisPartBases
-		    // and the next will be a little shorter to compensate.
-		    outList = new IntervalList(fileHeader);
-		    thisPartBases -= idealBasesPerPart;
-		    ++partIdx;
-		    
-		}
-
-	    }
-
-	}
-
-    }
-
-    /**
-     * Splits an interval list into multiple sublists.
-     * @param locs The genome locs to split.
-     * @param splits The stop points for the genome locs returned by splitFixedIntervals.
-     * @return A list of lists of genome locs, split according to splits
-     */
-    public static List<List<GenomeLoc>> splitIntervalsToSubLists(List<GenomeLoc> locs, List<Integer> splits) {
-        int start = 0;
-        List<List<GenomeLoc>> sublists = new ArrayList<List<GenomeLoc>>(splits.size());
-        for (Integer stop: splits) {
-            List<GenomeLoc> curList = new ArrayList<GenomeLoc>();
-            for (int i = start; i < stop; i++)
-                curList.add(locs.get(i));
-            start = stop;
-            sublists.add(curList);
-        }
-
-        return sublists;
-    }
-
-
-    /**
-     * Splits an interval list into multiple files.
-     * @param fileHeader The sam file header.
-     * @param splits Pre-divided genome locs returned by splitFixedIntervals.
-     * @param scatterParts The output interval lists to write to.
-     */
-    public static void scatterFixedIntervals(SAMFileHeader fileHeader, List<List<GenomeLoc>> splits, List<File> scatterParts) {
-        if (splits.size() != scatterParts.size())
-            throw new UserException.BadArgumentValue("splits", String.format("Split points %d does not equal the number of scatter parts %d.", splits.size(), scatterParts.size()));
-
-        int fileIndex = 0;
-        int locIndex = 1;
-        for (final List<GenomeLoc> split : splits) {
-            IntervalList intervalList = new IntervalList(fileHeader);
-            for (final GenomeLoc loc : split)
-                intervalList.add(toInterval(loc, locIndex++));
-            intervalList.write(scatterParts.get(fileIndex++));
-        }
-    }
-
-    /**
-     * Splits the genome locs up by size.
-     * @param locs Genome locs to split.
-     * @param numParts Number of parts to split the locs into.
-     * @return The stop points to split the genome locs.
-     */
-    public static List<List<GenomeLoc>> splitFixedIntervals(List<GenomeLoc> locs, int numParts) {
-        if (locs.size() < numParts)
-            throw new UserException.BadArgumentValue("scatterParts", String.format("Cannot scatter %d locs into %d parts.", locs.size(), numParts));
-        final long locsSize = intervalSize(locs);
-        final List<Integer> splitPoints = new ArrayList<Integer>();
-        addFixedSplit(splitPoints, locs, locsSize, 0, locs.size(), numParts);
-        Collections.sort(splitPoints);
-        splitPoints.add(locs.size());
-        return splitIntervalsToSubLists(locs, splitPoints);
-    }
-
-    @Requires({"locs != null", "numParts > 0"})
-    @Ensures("result != null")
-    public static List<List<GenomeLoc>> splitLocusIntervals(List<GenomeLoc> locs, int numParts) {
-        // the ideal size of each split
-        final long bp = IntervalUtils.intervalSize(locs);
-        final long idealSplitSize = Math.max((long)Math.floor(bp / (1.0*numParts)), 1);
-
-        // algorithm:
-        // split = ()
-        // set size = 0
-        // pop the head H off locs.
-        // If size + size(H) < splitSize:
-        //      add H to split, continue
-        // If size + size(H) == splitSize:
-        //      done with split, put in splits, restart
-        // if size + size(H) > splitSize:
-        //      cut H into two pieces, first of which has splitSize - size bp
-        //      push both pieces onto locs, continue
-        // The last split is special -- when you have only one split left, it gets all of the remaining locs
-        // to deal with rounding issues
-        final List<List<GenomeLoc>> splits = new ArrayList<List<GenomeLoc>>(numParts);
-
-        LinkedList<GenomeLoc> locsLinkedList = new LinkedList<GenomeLoc>(locs);
-        while ( ! locsLinkedList.isEmpty() ) {
-            if ( splits.size() + 1 == numParts ) {
-                // the last one gets all of the remaining parts
-                splits.add(new ArrayList<GenomeLoc>(locsLinkedList));
-                locsLinkedList.clear();
-            } else {
-                final SplitLocusRecursive one = splitLocusIntervals1(locsLinkedList, idealSplitSize);
-                splits.add(one.split);
-                locsLinkedList = one.remaining;
-            }
-        }
-
-        return splits;
-    }
-
-    @Requires({"remaining != null", "!remaining.isEmpty()", "idealSplitSize > 0"})
-    @Ensures({"result != null"})
-    static SplitLocusRecursive splitLocusIntervals1(LinkedList<GenomeLoc> remaining, long idealSplitSize) {
-        final List<GenomeLoc> split = new ArrayList<GenomeLoc>();
-        long size = 0;
-
-        while ( ! remaining.isEmpty() ) {
-            GenomeLoc head = remaining.pop();
-            final long newSize = size + head.size();
-
-            if ( newSize == idealSplitSize ) {
-                split.add(head);
-                break; // we are done
-            } else if ( newSize > idealSplitSize ) {
-                final long remainingBp = idealSplitSize - size;
-                final long cutPoint = head.getStart() + remainingBp;
-                GenomeLoc[] parts = head.split((int)cutPoint);
-                remaining.push(parts[1]);
-                remaining.push(parts[0]);
-                // when we go around, head.size' = idealSplitSize - size
-                // so newSize' = splitSize + head.size' = size + (idealSplitSize - size) = idealSplitSize
-            } else {
-                split.add(head);
-                size = newSize;
-            }
-        }
-
-        return new SplitLocusRecursive(split, remaining);
-    }
-
-    /**
-     * Setup the intervals to be processed
-     */
-    public static GenomeLocSortedSet parseIntervalBindings(
-            final ReferenceDataSource referenceDataSource,
-            final List<IntervalBinding<Feature>> intervals,
-            final IntervalSetRule intervalSetRule, final IntervalMergingRule intervalMergingRule, final int intervalPadding,
-            final List<IntervalBinding<Feature>> excludeIntervals) {
-
-        Pair<GenomeLocSortedSet, GenomeLocSortedSet> includeExcludePair = parseIntervalBindingsPair(
-                referenceDataSource, intervals, intervalSetRule, intervalMergingRule, intervalPadding, excludeIntervals);
-
-        GenomeLocSortedSet includeSortedSet = includeExcludePair.getFirst();
-        GenomeLocSortedSet excludeSortedSet = includeExcludePair.getSecond();
-
-        if (excludeSortedSet != null) {
-            return includeSortedSet.subtractRegions(excludeSortedSet);
-        } else {
-            return includeSortedSet;
-        }
-    }
-
-    public static GenomeLocSortedSet parseIntervalArguments(final ReferenceDataSource referenceDataSource, IntervalArgumentCollection argCollection) {
-        GenomeLocSortedSet intervals = null;
-
-        // return if no interval arguments at all
-        if ( argCollection.intervals == null && argCollection.excludeIntervals == null )
-            return intervals;
-
-        // Note that the use of '-L all' is no longer supported.
-
-        // if include argument isn't given, create new set of all possible intervals
-
-        final Pair<GenomeLocSortedSet, GenomeLocSortedSet> includeExcludePair = IntervalUtils.parseIntervalBindingsPair(
-                referenceDataSource,
-                argCollection.intervals,
-                argCollection.intervalSetRule, argCollection.intervalMerging, argCollection.intervalPadding,
-                argCollection.excludeIntervals);
-
-        final GenomeLocSortedSet includeSortedSet = includeExcludePair.getFirst();
-        final GenomeLocSortedSet excludeSortedSet = includeExcludePair.getSecond();
-
-        // if no exclude arguments, can return parseIntervalArguments directly
-        if ( excludeSortedSet == null )
-            intervals = includeSortedSet;
-
-            // otherwise there are exclude arguments => must merge include and exclude GenomeLocSortedSets
-        else {
-            intervals = includeSortedSet.subtractRegions(excludeSortedSet);
-
-            // logging messages only printed when exclude (-XL) arguments are given
-            final long toPruneSize = includeSortedSet.coveredSize();
-            final long toExcludeSize = excludeSortedSet.coveredSize();
-            final long intervalSize = intervals.coveredSize();
-            logger.info(String.format("Initial include intervals span %d loci; exclude intervals span %d loci", toPruneSize, toExcludeSize));
-            logger.info(String.format("Excluding %d loci from original intervals (%.2f%% reduction)",
-                    toPruneSize - intervalSize, (toPruneSize - intervalSize) / (0.01 * toPruneSize)));
-        }
-
-        logger.info(String.format("Processing %d bp from intervals", intervals.coveredSize()));
-        return intervals;
-    }
-
-    public static Pair<GenomeLocSortedSet, GenomeLocSortedSet> parseIntervalBindingsPair(
-            final ReferenceDataSource referenceDataSource,
-            final List<IntervalBinding<Feature>> intervals,
-            final IntervalSetRule intervalSetRule, final IntervalMergingRule intervalMergingRule, final int intervalPadding,
-            final List<IntervalBinding<Feature>> excludeIntervals) {
-        GenomeLocParser genomeLocParser = new GenomeLocParser(referenceDataSource.getReference());
-
-        // if include argument isn't given, create new set of all possible intervals
-        GenomeLocSortedSet includeSortedSet = ((intervals == null || intervals.size() == 0) ?
-                GenomeLocSortedSet.createSetFromSequenceDictionary(referenceDataSource.getReference().getSequenceDictionary()) :
-                loadIntervals(intervals, intervalSetRule, intervalMergingRule, intervalPadding, genomeLocParser));
-
-        GenomeLocSortedSet excludeSortedSet = null;
-        if (excludeIntervals != null && excludeIntervals.size() > 0) {
-            excludeSortedSet = loadIntervals(excludeIntervals, IntervalSetRule.UNION, intervalMergingRule, 0, genomeLocParser);
-        }
-        return new Pair<GenomeLocSortedSet, GenomeLocSortedSet>(includeSortedSet, excludeSortedSet);
-    }
-
-    public static GenomeLocSortedSet loadIntervals(
-            final List<IntervalBinding<Feature>> intervalBindings,
-            final IntervalSetRule rule, final IntervalMergingRule intervalMergingRule, final int padding,
-            final GenomeLocParser genomeLocParser) {
-        List<GenomeLoc> allIntervals = new ArrayList<GenomeLoc>();
-        for ( IntervalBinding intervalBinding : intervalBindings) {
-            @SuppressWarnings("unchecked")
-            List<GenomeLoc> intervals = intervalBinding.getIntervals(genomeLocParser);
-
-            if ( intervals.isEmpty() ) {
-                logger.warn("The interval file " + intervalBinding.getSource() + " contains no intervals that could be parsed.");
-            }
-
-            if ( padding > 0 ) {
-                intervals = getIntervalsWithFlanks(genomeLocParser, intervals, padding);
-            }
-
-            allIntervals = mergeListsBySetOperator(intervals, allIntervals, rule);
-        }
-
-        return sortAndMergeIntervals(genomeLocParser, allIntervals, intervalMergingRule);
-    }
-
-    private final static class SplitLocusRecursive {
-        final List<GenomeLoc> split;
-        final LinkedList<GenomeLoc> remaining;
-
-        @Requires({"split != null", "remaining != null"})
-        private SplitLocusRecursive(final List<GenomeLoc> split, final LinkedList<GenomeLoc> remaining) {
-            this.split = split;
-            this.remaining = remaining;
-        }
-    }
-
-    public static List<GenomeLoc> flattenSplitIntervals(List<List<GenomeLoc>> splits) {
-        final List<GenomeLoc> locs = new ArrayList<GenomeLoc>();
-        for ( final List<GenomeLoc> split : splits )
-            locs.addAll(split);
-        return locs;
-    }
-
-    private static void addFixedSplit(List<Integer> splitPoints, List<GenomeLoc> locs, long locsSize, int startIndex, int stopIndex, int numParts) {
-        if (numParts < 2)
-            return;
-        int halfParts = (numParts + 1) / 2;
-        Pair<Integer, Long> splitPoint = getFixedSplit(locs, locsSize, startIndex, stopIndex, halfParts, numParts - halfParts);
-        int splitIndex = splitPoint.first;
-        long splitSize = splitPoint.second;
-        splitPoints.add(splitIndex);
-        addFixedSplit(splitPoints, locs, splitSize, startIndex, splitIndex, halfParts);
-        addFixedSplit(splitPoints, locs, locsSize - splitSize, splitIndex, stopIndex, numParts - halfParts);
-    }
-
-    private static Pair<Integer, Long> getFixedSplit(List<GenomeLoc> locs, long locsSize, int startIndex, int stopIndex, int minLocs, int maxLocs) {
-        int splitIndex = startIndex;
-        long splitSize = 0;
-        for (int i = 0; i < minLocs; i++) {
-            splitSize += locs.get(splitIndex).size();
-            splitIndex++;
-        }
-        long halfSize = locsSize / 2;
-        while (splitIndex < (stopIndex - maxLocs) && splitSize < halfSize) {
-            splitSize += locs.get(splitIndex).size();
-            splitIndex++;
-        }
-        return new Pair<Integer, Long>(splitIndex, splitSize);
-    }
-
-    /**
-     * Converts a GenomeLoc to a picard interval.
-     * @param loc The GenomeLoc.
-     * @param locIndex The loc index for use in the file.
-     * @return The picard interval.
-     */
-    private static htsjdk.samtools.util.Interval toInterval(GenomeLoc loc, int locIndex) {
-        return new htsjdk.samtools.util.Interval(loc.getContig(), loc.getStart(), loc.getStop(), false, "interval_" + locIndex);
-    }
-
-    /**
-     * merge a list of genome locs that may be overlapping, returning the list of unique genomic locations
-     *
-     * @param raw the unchecked genome loc list
-     * @param rule the merging rule we're using
-     *
-     * @return the list of merged locations
-     */
-    public static List<GenomeLoc> mergeIntervalLocations(final List<GenomeLoc> raw, IntervalMergingRule rule) {
-        if (raw.size() <= 1)
-            return Collections.unmodifiableList(raw);
-        else {
-            ArrayList<GenomeLoc> merged = new ArrayList<GenomeLoc>();
-            Iterator<GenomeLoc> it = raw.iterator();
-            GenomeLoc prev = it.next();
-            while (it.hasNext()) {
-                GenomeLoc curr = it.next();
-                if (prev.overlapsP(curr)) {
-                    prev = prev.merge(curr);
-                } else if (prev.contiguousP(curr) && (rule == null || rule == IntervalMergingRule.ALL)) {
-                    prev = prev.merge(curr);
-                } else {
-                    merged.add(prev);
-                    prev = curr;
-                }
-            }
-            merged.add(prev);
-            return Collections.unmodifiableList(merged);
-        }
-    }
-
-    public static long intervalSize(final List<GenomeLoc> locs) {
-        long size = 0;
-        for ( final GenomeLoc loc : locs )
-            size += loc.size();
-        return size;
-    }
-
-    public static void writeFlankingIntervals(File reference, File inputIntervals, File flankingIntervals, int basePairs) {
-        ReferenceDataSource referenceDataSource = new ReferenceDataSource(reference);
-        GenomeLocParser parser = new GenomeLocParser(referenceDataSource.getReference());
-        List<GenomeLoc> originalList = intervalFileToList(parser, inputIntervals.getAbsolutePath());
-
-        if (originalList.isEmpty())
-            throw new UserException.MalformedFile(inputIntervals, "File contains no intervals");
-
-        List<GenomeLoc> flankingList = getFlankingIntervals(parser, originalList, basePairs);
-
-        if (flankingList.isEmpty())
-            throw new UserException.MalformedFile(inputIntervals, "Unable to produce any flanks for the intervals");
-
-        SAMFileHeader samFileHeader = new SAMFileHeader();
-        samFileHeader.setSequenceDictionary(referenceDataSource.getReference().getSequenceDictionary());
-        IntervalList intervalList = new IntervalList(samFileHeader);
-        int i = 0;
-        for (GenomeLoc loc: flankingList)
-            intervalList.add(toInterval(loc, ++i));
-        intervalList.write(flankingIntervals);
-    }
-
-    /**
-     * Returns a list of intervals between the passed int locs. Does not extend UNMAPPED locs.
-     * @param parser A genome loc parser for creating the new intervals
-     * @param locs Original genome locs
-     * @param basePairs Number of base pairs on each side of loc
-     * @return The list of intervals between the locs
-     */
-    public static List<GenomeLoc> getFlankingIntervals(final GenomeLocParser parser, final List<GenomeLoc> locs, final int basePairs) {
-        List<GenomeLoc> sorted = sortAndMergeIntervals(parser, locs, IntervalMergingRule.ALL).toList();
-
-        if (sorted.size() == 0)
-            return Collections.emptyList();
-
-        LinkedHashMap<String, List<GenomeLoc>> locsByContig = splitByContig(sorted);
-        List<GenomeLoc> expanded = new ArrayList<GenomeLoc>();
-        for (Map.Entry<String, List<GenomeLoc>> contig: locsByContig.entrySet()) {
-            List<GenomeLoc> contigLocs = contig.getValue();
-            int contigLocsSize = contigLocs.size();
-
-            GenomeLoc startLoc, stopLoc;
-
-            // Create loc at start of the list
-            startLoc = parser.createGenomeLocAtStart(contigLocs.get(0), basePairs);
-            if (startLoc != null)
-                expanded.add(startLoc);
-
-            // Create locs between each loc[i] and loc[i+1]
-            for (int i = 0; i < contigLocsSize - 1; i++) {
-                stopLoc = parser.createGenomeLocAtStop(contigLocs.get(i), basePairs);
-                startLoc = parser.createGenomeLocAtStart(contigLocs.get(i + 1), basePairs);
-                if (stopLoc.getStop() + 1 >= startLoc.getStart()) {
-                    // NOTE: This is different than GenomeLoc.merge()
-                    // merge() returns a loc which covers the entire range of stop and start,
-                    // possibly returning positions inside loc(i) or loc(i+1)
-                    // We want to make sure that the start of the stopLoc is used, and the stop of the startLoc
-                    GenomeLoc merged = parser.createGenomeLoc(
-                            stopLoc.getContig(), stopLoc.getStart(), startLoc.getStop());
-                    expanded.add(merged);
-                } else {
-                    expanded.add(stopLoc);
-                    expanded.add(startLoc);
-                }
-            }
-
-            // Create loc at the end of the list
-            stopLoc = parser.createGenomeLocAtStop(contigLocs.get(contigLocsSize - 1), basePairs);
-            if (stopLoc != null)
-                expanded.add(stopLoc);
-        }
-        return expanded;
-    }
-
-    /**
-     * Returns a list of intervals between the passed int locs. Does not extend UNMAPPED locs.
-     * @param parser A genome loc parser for creating the new intervals
-     * @param locs Original genome locs
-     * @param basePairs Number of base pairs on each side of loc
-     * @return The list of intervals between the locs
-     */
-    public static List<GenomeLoc> getIntervalsWithFlanks(final GenomeLocParser parser, final List<GenomeLoc> locs, final int basePairs) {
-
-        if (locs.size() == 0)
-            return Collections.emptyList();
-
-        final List<GenomeLoc> expanded = new ArrayList<GenomeLoc>();
-        for ( final GenomeLoc loc : locs ) {
-            expanded.add(parser.createPaddedGenomeLoc(loc, basePairs));
-        }
-
-        return sortAndMergeIntervals(parser, expanded, IntervalMergingRule.ALL).toList();
-    }
-
-    private static LinkedHashMap<String, List<GenomeLoc>> splitByContig(List<GenomeLoc> sorted) {
-        LinkedHashMap<String, List<GenomeLoc>> splits = new LinkedHashMap<String, List<GenomeLoc>>();
-        GenomeLoc last = null;
-        List<GenomeLoc> contigLocs = null;
-        for (GenomeLoc loc: sorted) {
-            if (GenomeLoc.isUnmapped(loc))
-                continue;
-            if (last == null || !last.onSameContig(loc)) {
-                contigLocs = new ArrayList<GenomeLoc>();
-                splits.put(loc.getContig(), contigLocs);
-            }
-            contigLocs.add(loc);
-            last = loc;
-        }
-        return splits;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/io/FileExtension.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/io/FileExtension.java
deleted file mode 100644
index e099a45..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/io/FileExtension.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.io;
-
-import java.io.File;
-
-public interface FileExtension {
-    /**
-     * Returns a clone of the FileExtension with a new path.
-     * @param path New path.
-     * @return New FileExtension
-     */
-    public File withPath(String path);
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/io/HardThresholdingOutputStream.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/io/HardThresholdingOutputStream.java
deleted file mode 100644
index 1d041b6..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/io/HardThresholdingOutputStream.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.io;
-
-import org.apache.commons.io.output.ThresholdingOutputStream;
-
-import java.io.IOException;
-
-/**
- * An output stream which stops at the threshold
- * instead of potentially triggering early.
- */
-public abstract class HardThresholdingOutputStream extends ThresholdingOutputStream {
-    protected HardThresholdingOutputStream(int threshold) {
-        super(threshold);
-    }
-
-    @Override
-    public void write(byte[] b) throws IOException {
-        write(b, 0, b.length);
-    }
-
-    @Override
-    public void write(byte[] b, int off, int len) throws IOException {
-        int remaining = this.getThreshold() - (int)this.getByteCount();
-        if (!isThresholdExceeded() && len > remaining) {
-            super.write(b, off, remaining);
-            super.write(b, off + remaining, len - remaining);
-        } else {
-            super.write(b, off, len);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/io/IOUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/io/IOUtils.java
deleted file mode 100644
index 7defcea..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/io/IOUtils.java
+++ /dev/null
@@ -1,575 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.io;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.io.FilenameUtils;
-import org.apache.commons.io.LineIterator;
-import org.apache.commons.lang.StringUtils;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.GATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import java.io.*;
-import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
-import java.util.*;
-
-public class IOUtils {
-    private static Logger logger = Logger.getLogger(IOUtils.class);
-    private static final File DEV_DIR = new File("/dev");
-
-    /**
-     * Checks if the temp directory has been setup and throws an exception if they user hasn't set it correctly.
-     *
-     * @param tempDir Temporary directory.
-     */
-    public static void checkTempDir(File tempDir) {
-        if (isDefaultTempDir(tempDir))
-            throw new UserException.BadTmpDir("java.io.tmpdir must be explicitly set");
-        if (!tempDir.exists() && !tempDir.mkdirs())
-            throw new UserException.BadTmpDir("Could not create directory: " + tempDir.getAbsolutePath());
-    }
-
-    /**
-     * Returns true if the directory is a default temporary directory.
-     * @param tempDir the directory to check.
-     * @return true if the directory is a default temporary directory.
-     */
-    public static boolean isDefaultTempDir(File tempDir) {
-        String tempDirPath = tempDir.getAbsolutePath();
-        // Keeps the user from leaving the temp directory as the default, and on Macs from having pluses
-        // in the path which can cause problems with the Google Reflections library.
-        // see also: http://benjchristensen.com/2009/09/22/mac-osx-10-6-java-java-io-tmpdir/
-        return (tempDirPath.startsWith("/var/folders/") || (tempDirPath.equals("/tmp")) || (tempDirPath.equals("/tmp/")));
-    }
-
-    /**
-     * Creates a temp directory with the prefix and optional suffix.
-     *
-     * @param prefix       Prefix for the directory name.
-     * @param suffix       Optional suffix for the directory name.
-     * @return The created temporary directory.
-     */
-    public static File tempDir(String prefix, String suffix) {
-        return tempDir(prefix, suffix, null);
-    }
-
-    /**
-     * Creates a temp directory with the prefix and optional suffix.
-     *
-     * @param prefix        Prefix for the directory name.
-     * @param suffix        Optional suffix for the directory name.
-     * @param tempDirParent Parent directory for the temp directory.
-     * @return The created temporary directory.
-     */
-    public static File tempDir(String prefix, String suffix, File tempDirParent) {
-        try {
-            if (tempDirParent == null)
-                tempDirParent = FileUtils.getTempDirectory();
-            if (!tempDirParent.exists() && !tempDirParent.mkdirs())
-                throw new UserException.BadTmpDir("Could not create temp directory: " + tempDirParent);
-            File temp = File.createTempFile(prefix, suffix, tempDirParent);
-            if (!temp.delete())
-                throw new UserException.BadTmpDir("Could not delete sub file: " + temp.getAbsolutePath());
-            if (!temp.mkdir())
-                throw new UserException.BadTmpDir("Could not create sub directory: " + temp.getAbsolutePath());
-            return absolute(temp);
-        } catch (IOException e) {
-            throw new UserException.BadTmpDir(e.getMessage());
-        }
-    }
-
-    /**
-     * Writes content to a temp file and returns the path to the temporary file.
-     *
-     * @param content   to write.
-     * @param prefix    Prefix for the temp file.
-     * @param suffix    Suffix for the temp file.
-     * @return the path to the temp file.
-     */
-    public static File writeTempFile(String content, String prefix, String suffix) {
-        return writeTempFile(content, prefix, suffix, null);
-    }
-
-    /**
-     * Writes content to a temp file and returns the path to the temporary file.
-     *
-     * @param content   to write.
-     * @param prefix    Prefix for the temp file.
-     * @param suffix    Suffix for the temp file.
-     * @param directory Directory for the temp file.
-     * @return the path to the temp file.
-     */
-    public static File writeTempFile(String content, String prefix, String suffix, File directory) {
-        try {
-            File tempFile = absolute(File.createTempFile(prefix, suffix, directory));
-            FileUtils.writeStringToFile(tempFile, content);
-            return tempFile;
-        } catch (IOException e) {
-            throw new UserException.BadTmpDir(e.getMessage());
-        }
-    }
-
-    /**
-     * Waits for NFS to propagate a file creation, imposing a timeout.
-     *
-     * Based on Apache Commons IO FileUtils.waitFor()
-     *
-     * @param file    The file to wait for.
-     * @param seconds The maximum time in seconds to wait.
-     * @return true if the file exists
-     */
-    public static boolean waitFor(File file, int seconds) {
-        return waitFor(Collections.singletonList(file), seconds).isEmpty();
-    }
-
-    /**
-     * Waits for NFS to propagate a file creation, imposing a timeout.
-     *
-     * Based on Apache Commons IO FileUtils.waitFor()
-     *
-     * @param files   The list of files to wait for.
-     * @param seconds The maximum time in seconds to wait.
-     * @return Files that still do not exists at the end of the timeout, or a empty list if all files exists.
-     */
-    public static List<File> waitFor(Collection<File> files, int seconds) {
-        long timeout = 0;
-        long tick = 0;
-        List<File> missingFiles = new ArrayList<File>();
-        for (File file : files)
-            if (!file.exists())
-                missingFiles.add(file);
-
-        while (!missingFiles.isEmpty() && timeout <= seconds) {
-            if (tick >= 10) {
-                tick = 0;
-                timeout++;
-            }
-            tick++;
-            try {
-                Thread.sleep(100);
-            } catch (InterruptedException ignore) {
-            }
-            List<File> newMissingFiles = new ArrayList<File>();
-            for (File file : missingFiles)
-                if (!file.exists())
-                    newMissingFiles.add(file);
-            missingFiles = newMissingFiles;
-        }
-        return missingFiles;
-    }
-
-    /**
-     * Returns the directory at the number of levels deep.
-     * For example 2 levels of /path/to/dir will return /path/to
-     *
-     * @param dir   Directory path.
-     * @param level how many levels deep from the root.
-     * @return The path to the parent directory that is level-levels deep.
-     */
-    public static File dirLevel(File dir, int level) {
-        List<File> directories = new ArrayList<File>();
-        File parentDir = absolute(dir);
-        while (parentDir != null) {
-            directories.add(0, parentDir);
-            parentDir = parentDir.getParentFile();
-        }
-        if (directories.size() <= level)
-            return directories.get(directories.size() - 1);
-        else
-            return directories.get(level);
-    }
-
-    /**
-     * Returns the sub path rooted at the parent.
-     *
-     * @param parent The parent directory.
-     * @param path   The sub path to append to the parent, if the path is not absolute.
-     * @return The absolute path to the file in the parent dir if the path was not absolute, otherwise the original path.
-     */
-    public static File absolute(File parent, String path) {
-        return absolute(parent, new File(path));
-    }
-
-    /**
-     * Returns the sub path rooted at the parent.
-     *
-     * @param parent The parent directory.
-     * @param file   The sub path to append to the parent, if the path is not absolute.
-     * @return The absolute path to the file in the parent dir if the path was not absolute, otherwise the original path.
-     */
-    public static File absolute(File parent, File file) {
-        String newPath;
-        if (file.isAbsolute())
-            newPath = absolutePath(file);
-        else
-            newPath = absolutePath(new File(parent, file.getPath()));
-        return replacePath(file, newPath);
-    }
-
-    /**
-     * A mix of getCanonicalFile and getAbsoluteFile that returns the
-     * absolute path to the file without deferencing symbolic links.
-     *
-     * @param file the file.
-     * @return the absolute path to the file.
-     */
-    public static File absolute(File file) {
-        return replacePath(file, absolutePath(file));
-    }
-
-    private static String absolutePath(File file) {
-        File fileAbs = file.getAbsoluteFile();
-        LinkedList<String> names = new LinkedList<String>();
-        while (fileAbs != null) {
-            String name = fileAbs.getName();
-            fileAbs = fileAbs.getParentFile();
-
-            if (".".equals(name)) {
-                /* skip */
-
-                /* TODO: What do we do for ".."?
-              } else if (name == "..") {
-
-                CentOS tcsh says use getCanonicalFile:
-                ~ $ mkdir -p test1/test2
-                ~ $ ln -s test1/test2 test3
-                ~ $ cd test3/..
-                ~/test1 $
-
-                Mac bash says keep going with getAbsoluteFile:
-                ~ $ mkdir -p test1/test2
-                ~ $ ln -s test1/test2 test3
-                ~ $ cd test3/..
-                ~ $
-
-                For now, leave it and let the shell figure it out.
-                */
-            } else {
-                names.add(0, name);
-            }
-        }
-
-        return ("/" + StringUtils.join(names, "/"));
-    }
-
-    private static File replacePath(File file, String path) {
-        if (file instanceof FileExtension)
-            return ((FileExtension)file).withPath(path);
-        if (!File.class.equals(file.getClass()))
-            throw new GATKException("Sub classes of java.io.File must also implement FileExtension");
-        return new File(path);
-    }
-
-    /**
-     * Returns the last lines of the file.
-     * NOTE: This is only safe to run on smaller files!
-     *
-     * @param file  File to read.
-     * @param count Maximum number of lines to return.
-     * @return The last count lines from file.
-     * @throws IOException When unable to read the file.
-     */
-    public static List<String> tail(File file, int count) throws IOException {
-        LinkedList<String> tailLines = new LinkedList<String>();
-        FileReader reader = new FileReader(file);
-        try {
-            LineIterator iterator = org.apache.commons.io.IOUtils.lineIterator(reader);
-            int lineCount = 0;
-            while (iterator.hasNext()) {
-                String line = iterator.nextLine();
-                lineCount++;
-                if (lineCount > count)
-                    tailLines.removeFirst();
-                tailLines.offer(line);
-            }
-        } finally {
-            org.apache.commons.io.IOUtils.closeQuietly(reader);
-        }
-        return tailLines;
-    }
-
-    /**
-     * Tries to delete a file. Emits a warning if the file
-     * is not a special file and was unable to be deleted.
-     *
-     * @param file File to delete.
-     * @return true if the file was deleted.
-     */
-    public static boolean tryDelete(File file) {
-        if (isSpecialFile(file)) {
-            logger.debug("Not trying to delete " + file);
-            return false;
-        }
-        boolean deleted = FileUtils.deleteQuietly(file);
-        if (deleted)
-            logger.debug("Deleted " + file);
-        else if (file.exists())
-            logger.warn("Unable to delete " + file);
-        return deleted;
-    }
-
-    /**
-     * Writes the an embedded resource to a temp file.
-     * File is not scheduled for deletion and must be cleaned up by the caller.
-     * @param resource Embedded resource.
-     * @return Path to the temp file with the contents of the resource.
-     */
-    public static File writeTempResource(Resource resource) {
-        File temp;
-        try {
-            temp = File.createTempFile(FilenameUtils.getBaseName(resource.getPath()) + ".", "." + FilenameUtils.getExtension(resource.getPath()));
-        } catch (IOException e) {
-            throw new UserException.BadTmpDir(e.getMessage());
-        }
-        writeResource(resource, temp);
-        return temp;
-    }
-
-    /**
-     * Writes the an embedded resource to a file.
-     * File is not scheduled for deletion and must be cleaned up by the caller.
-     * @param resource Embedded resource.
-     * @param file File path to write.
-     */
-    public static void writeResource(Resource resource, File file) {
-        String path = resource.getPath();
-        InputStream inputStream = resource.getResourceContentsAsStream();
-        OutputStream outputStream = null;
-        try {
-            outputStream = FileUtils.openOutputStream(file);
-            org.apache.commons.io.IOUtils.copy(inputStream, outputStream);
-        } catch (IOException e) {
-            throw new GATKException(String.format("Unable to copy resource '%s' to '%s'", path, file), e);
-        } finally {
-            org.apache.commons.io.IOUtils.closeQuietly(inputStream);
-            org.apache.commons.io.IOUtils.closeQuietly(outputStream);
-        }
-    }
-
-    /**
-     * Returns a file throwing a UserException if the file cannot be read.
-     * @param path File path
-     * @return LineIterator
-     */
-    public static LineIterator lineIterator(String path) {
-        return lineIterator(new File(path));
-    }
-
-    /**
-     * Returns a file throwing a UserException if the file cannot be read.
-     * @param file File
-     * @return LineIterator
-     */
-    public static LineIterator lineIterator(File file) {
-        try {
-            return FileUtils.lineIterator(file);
-        } catch (IOException e) {
-            throw new UserException.CouldNotReadInputFile(file, e);
-        }
-
-    }
-
-    /**
-     * Returns true if the file is a special file.
-     * @param file File path to check.
-     * @return true if the file is a special file.
-     */
-    public static boolean isSpecialFile(File file) {
-        return file != null && (file.getAbsolutePath().startsWith("/dev/") || file.equals(DEV_DIR));
-    }
-
-    /**
-     * Reads the entirety of the given file into a byte array. Uses a read buffer size of 4096 bytes.
-     *
-     * @param source File to read
-     * @return The contents of the file as a byte array
-     */
-    public static byte[] readFileIntoByteArray ( File source ) {
-        return readFileIntoByteArray(source, 4096);
-    }
-
-    /**
-     * Reads the entirety of the given file into a byte array using the requested read buffer size.
-     *
-     * @param source File to read
-     * @param readBufferSize Number of bytes to read in at one time
-     * @return The contents of the file as a byte array
-     */
-    public static byte[] readFileIntoByteArray ( File source, int readBufferSize ) {
-        if ( source == null ) {
-            throw new ReviewedGATKException("Source file was null");
-        }
-
-        byte[] fileContents;
-
-        try {
-            fileContents = readStreamIntoByteArray(new FileInputStream(source), readBufferSize);
-        }
-        catch ( FileNotFoundException e ) {
-            throw new UserException.CouldNotReadInputFile(source, e);
-        }
-
-        if ( fileContents.length != source.length() ) {
-            throw new UserException.CouldNotReadInputFile(String.format("Unable to completely read file %s: read only %d/%d bytes",
-                                                          source.getAbsolutePath(), fileContents.length, source.length()));
-        }
-
-        return fileContents;
-    }
-
-    /**
-     * Reads all data from the given stream into a byte array. Uses a read buffer size of 4096 bytes.
-     *
-     * @param in Stream to read data from
-     * @return The contents of the stream as a byte array
-     */
-    public static byte[] readStreamIntoByteArray ( InputStream in ) {
-        return readStreamIntoByteArray(in, 4096);
-    }
-
-    /**
-     * Reads all data from the given stream into a byte array using the requested read buffer size.
-     *
-     * @param in Stream to read data from
-     * @param readBufferSize Number of bytes to read in at one time
-     * @return The contents of the stream as a byte array
-     */
-    public static byte[] readStreamIntoByteArray ( InputStream in, int readBufferSize ) {
-        if ( in == null ) {
-            throw new ReviewedGATKException("Input stream was null");
-        }
-        else if ( readBufferSize <= 0 ) {
-            throw new ReviewedGATKException("Read buffer size must be > 0");
-        }
-
-        // Use a fixed-size buffer for each read, but a dynamically-growing buffer
-        // to hold the accumulated contents of the file/stream:
-        byte[] readBuffer = new byte[readBufferSize];
-        ByteArrayOutputStream fileBuffer = new ByteArrayOutputStream(readBufferSize * 4);
-
-        try {
-            try {
-                int currentBytesRead;
-
-                while ( (currentBytesRead = in.read(readBuffer, 0, readBuffer.length)) >= 0 ) {
-                    fileBuffer.write(readBuffer, 0, currentBytesRead);
-                }
-            }
-            finally {
-                in.close();
-            }
-        }
-        catch ( IOException e ) {
-            throw new UserException.CouldNotReadInputFile("I/O error reading from input stream", e);
-        }
-
-        return fileBuffer.toByteArray();
-    }
-
-    /**
-     * Writes the given array of bytes to a file
-     *
-     * @param bytes Data to write
-     * @param destination File to write the data to
-     */
-    public static void writeByteArrayToFile ( byte[] bytes, File destination ) {
-        if ( destination == null ) {
-            throw new ReviewedGATKException("Destination file was null");
-        }
-
-        try {
-            writeByteArrayToStream(bytes, new FileOutputStream(destination));
-        }
-        catch ( FileNotFoundException e ) {
-            throw new UserException.CouldNotCreateOutputFile(destination, e);
-        }
-    }
-
-    /**
-     * Writes the given array of bytes to a stream
-     *
-     * @param bytes Data to write
-     * @param out Stream to write the data to
-     */
-    public static void writeByteArrayToStream ( byte[] bytes, OutputStream out ) {
-        if ( bytes == null || out == null ) {
-            throw new ReviewedGATKException("Data to write or output stream was null");
-        }
-
-        try {
-            try {
-                out.write(bytes);
-            }
-            finally {
-                out.close();
-            }
-        }
-        catch ( IOException e ) {
-            throw new UserException.CouldNotCreateOutputFile("I/O error writing to output stream", e);
-        }
-    }
-
-    /**
-     * Determines the uncompressed size of a GZIP file. Uses the GZIP ISIZE field in the last
-     * 4 bytes of the file to get this information.
-     *
-     * @param gzipFile GZIP-format file whose uncompressed size to determine
-     * @return The uncompressed size (in bytes) of the GZIP file
-     */
-    public static int getGZIPFileUncompressedSize ( File gzipFile ) {
-        if ( gzipFile == null ) {
-            throw new ReviewedGATKException("GZIP file to examine was null");
-        }
-
-        try {
-            // The GZIP ISIZE field holds the uncompressed size of the compressed data.
-            // It occupies the last 4 bytes of any GZIP file:
-            RandomAccessFile in = new RandomAccessFile(gzipFile, "r");
-            in.seek(gzipFile.length() - 4);
-            byte[] sizeBytes = new byte[4];
-            in.read(sizeBytes, 0, 4);
-
-            ByteBuffer byteBuf = ByteBuffer.wrap(sizeBytes);
-            byteBuf.order(ByteOrder.LITTLE_ENDIAN);   // The GZIP spec mandates little-endian byte order
-            int uncompressedSize = byteBuf.getInt();
-
-            // If the size read in is negative, we've overflowed our signed integer:
-            if ( uncompressedSize < 0 ) {
-                throw new UserException.CouldNotReadInputFile(String.format("Cannot accurately determine the uncompressed size of file %s " +
-                                                               "because it's either larger than %d bytes or the GZIP ISIZE field is corrupt",
-                                                               gzipFile.getAbsolutePath(), Integer.MAX_VALUE));
-            }
-
-            return uncompressedSize;
-        }
-        catch ( IOException e ) {
-            throw new UserException.CouldNotReadInputFile(gzipFile, e);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/io/Resource.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/io/Resource.java
deleted file mode 100644
index abebe52..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/io/Resource.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.io;
-
-import java.io.File;
-import java.io.InputStream;
-
-/**
- * Stores a resource by path and a relative class.
- */
-public class Resource {
-    private final String path;
-    private final Class<?> relativeClass;
-
-    /**
-     * Create a resource with a path and a relative class.
-     * @param path Relative or absolute path to the class.
-     * @param relativeClass Relative class to use as a class loader and for a relative package.
-     *
-     * If the relative class is null then the system classloader will be used and the path must be absolute.
-     */
-    public Resource(String path, Class<?> relativeClass) {
-        this.path = path;
-        this.relativeClass = relativeClass;
-    }
-
-    public Class<?> getRelativeClass() {
-        return relativeClass;
-    }
-
-    public String getPath() {
-        return path;
-    }
-
-    public String getFullPath() {
-        if (relativeClass == null)
-            return path;
-        if (new File(path).isAbsolute())
-            return path;
-        return String.format("%s%s%s",
-                relativeClass.getPackage().getName().replace('.', File.separatorChar),
-                File.separator,
-                path);
-    }
-
-    /**
-     * Get the contents of this resource as an InputStream
-     * @throws IllegalArgumentException if resource cannot be read
-     * @return an input stream that will read the contents of this resource
-     */
-    public InputStream getResourceContentsAsStream() {
-        final Class<?> clazz = getRelativeClass();
-
-        final InputStream inputStream;
-        if (clazz == null) {
-            inputStream = ClassLoader.getSystemResourceAsStream(path);
-            if (inputStream == null)
-                throw new IllegalArgumentException("Resource not found: " + path);
-        } else {
-            inputStream = clazz.getResourceAsStream(path);
-            if (inputStream == null)
-                throw new IllegalArgumentException("Resource not found relative to " + clazz + ": " + path);
-
-        }
-
-        return inputStream;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/jna/clibrary/JNAUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/jna/clibrary/JNAUtils.java
deleted file mode 100644
index 0c14ffa..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/jna/clibrary/JNAUtils.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.jna.clibrary;
-
-import com.sun.jna.Platform;
-
-/**
- * Collection of functions that are in the standard CLibrary but are associated with different headers on different platforms.
- */
-public class JNAUtils {
-    /**
-     * Defined in different places on different systems, this is currently 256 on mac and 64 everywhere else.
-     */
-    public static final int MAXHOSTNAMELEN;
-
-    /**
-     * Maximum path length.
-     */
-    public static final int MAXPATHLEN = 1024;
-
-    static {
-      int maxhostnamelen = 64;
-      if (Platform.isMac())
-         maxhostnamelen = 256;
-      MAXHOSTNAMELEN = maxhostnamelen;
-    }
-
-    /**
-     * Converts a non-zero int to true, otherwise false.
-     * @param val int to check.
-     * @return true if val is non-zero.
-     */
-    public static boolean toBoolean(int val) {
-        return val != 0;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/jna/clibrary/LibC.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/jna/clibrary/LibC.java
deleted file mode 100644
index dd2d7e7..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/jna/clibrary/LibC.java
+++ /dev/null
@@ -1,200 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.jna.clibrary;
-
-import com.sun.jna.LastErrorException;
-import com.sun.jna.Native;
-import com.sun.jna.NativeLong;
-import com.sun.jna.Structure;
-import com.sun.jna.ptr.NativeLongByReference;
-
-/**
- * Sparse port of the Standard C Library libc -lc.
- */
- at SuppressWarnings("unused")
-public class LibC {
-
-    static {
-        Native.register("c");
-    }
-
-    /** Operation not permitted */
-    public static final int EPERM = 1;
-
-    /** No such file or directory */
-    public static final int ENOENT = 2;
-
-    /** No such process */
-    public static final int ESRCH = 3;
-
-    /** Interrupted system call */
-    public static final int EINTR = 4;
-
-    /** I/O error */
-    public static final int EIO = 5;
-
-    /** No such device or address */
-    public static final int ENXIO = 6;
-
-    /** Argument list too long */
-    public static final int E2BIG = 7;
-
-    /** Exec format error */
-    public static final int ENOEXEC = 8;
-
-    /** Bad file number */
-    public static final int EBADF = 9;
-
-    /** No child processes */
-    public static final int ECHILD = 10;
-
-    /** Try again */
-    public static final int EAGAIN = 11;
-
-    /** Out of memory */
-    public static final int ENOMEM = 12;
-
-    /** Permission denied */
-    public static final int EACCES = 13;
-
-    /** Bad address */
-    public static final int EFAULT = 14;
-
-    /** Block device required */
-    public static final int ENOTBLK = 15;
-
-    /** Device or resource busy */
-    public static final int EBUSY = 16;
-
-    /** File exists */
-    public static final int EEXIST = 17;
-
-    /** Cross-device link */
-    public static final int EXDEV = 18;
-
-    /** No such device */
-    public static final int ENODEV = 19;
-
-    /** Not a directory */
-    public static final int ENOTDIR = 20;
-
-    /** Is a directory */
-    public static final int EISDIR = 21;
-
-    /** Invalid argument */
-    public static final int EINVAL = 22;
-
-    /** File table overflow */
-    public static final int ENFILE = 23;
-
-    /** Too many open files */
-    public static final int EMFILE = 24;
-
-    /** Not a typewriter */
-    public static final int ENOTTY = 25;
-
-    /** Text file busy */
-    public static final int ETXTBSY = 26;
-
-    /** File too large */
-    public static final int EFBIG = 27;
-
-    /** No space left on device */
-    public static final int ENOSPC = 28;
-
-    /** Illegal seek */
-    public static final int ESPIPE = 29;
-
-    /** Read-only file system */
-    public static final int EROFS = 30;
-
-    /** Too many links */
-    public static final int EMLINK = 31;
-
-    /** Broken pipe */
-    public static final int EPIPE = 32;
-
-    /** Math argument out of domain of func */
-    public static final int EDOM = 33;
-
-    /** Math result not representable */
-    public static final int ERANGE = 34;
-
-    /**
-     * Inserts or resets the environment variable name in the current environment list.  If the variable name does not exist
-     * in the list, it is inserted with the given value.  If the variable does exist, the argument overwrite is tested; if overwrite is zero, the
-     * variable is not reset, otherwise it is reset to the given value.
-     * @param name the environment variable name
-     * @param value the given value
-     * @param overwrite if overwrite is zero, the variable is not reset, otherwise it is reset to the given value
-     * @return the value 0 if successful; otherwise the value -1 is returned and the global variable errno is set to indicate the error.
-     * @throws LastErrorException [ENOMEM] The function failed because it was unable to allocate memory for the environment.
-     */
-    public static native int setenv(String name, String value, int overwrite) throws LastErrorException;
-
-    /**
-     * Obtains the current value of the environment variable, name.
-     * @param name the environment variable name
-     * @return the value of the environment variable as a NUL-terminated string.  If the variable name is not in the current environment, NULL is returned.
-     */
-    public static native String getenv(String name);
-
-    /**
-     * The unsetenv() function deletes all instances of the variable name pointed to by name from the list.  Note that only the variable name
-     * (e.g., "NAME") should be given; "NAME=value" will not work.
-     * @param name the environment variable name
-     * @return the value 0 if successful; otherwise the value -1 is returned and the global variable errno is set to indicate the error.
-     * @throws LastErrorException The function failed.
-     */
-    public static native int unsetenv(String name) throws LastErrorException;
-
-    public static class timeval extends Structure {
-        public static class ByReference extends timeval implements Structure.ByReference {
-        }
-
-        public static class ByValue extends timeval implements Structure.ByValue {
-        }
-
-        public NativeLong tv_sec;
-        public NativeLong tv_usec;
-    }
-
-    /**
-     * The time() function returns the value of time in seconds since 0 hours, 0 minutes, 0 seconds, January 1, 1970, Coordinated Universal Time, without including leap seconds.  If an error occurs, time() returns the value (time_t)-1.
-     * The return value is also stored in *tloc, provided that t is non-null.
-     * @param t the value of time in seconds,  provided that t is non-null.
-     * @return the value of time in seconds
-     */
-    public static native NativeLong time(NativeLongByReference t);
-
-    /**
-     * Returns the difference between two calendar times, (time1 - time0), expressed in seconds.
-     * @param time1 Time 1
-     * @param time0 Time 0
-     * @return the difference between two calendar times, (time1 - time0), expressed in seconds.
-     */
-    public static native double difftime(NativeLong time1, NativeLong time0);
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/JnaJobInfo.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/JnaJobInfo.java
deleted file mode 100644
index 1a99bfa..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/JnaJobInfo.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.jna.drmaa.v1_0;
-
-import org.ggf.drmaa.DrmaaException;
-import org.ggf.drmaa.JobInfo;
-
-import java.util.Map;
-
-/**
- * JNA mapping from Java to C DRMAA binding.
- */
-public class JnaJobInfo implements JobInfo {
-
-    private final String jobId;
-    private final Map<String, String> rusage;
-    private final boolean hasExited;
-    private final int exitStatus;
-    private final boolean hasSignaled;
-    private final String terminatingSignal;
-    private final boolean hasCoreDump;
-    private final boolean wasAborted;
-            
-    public JnaJobInfo(String jobId, Map<String, String> rusage, boolean hasExited, int exitStatus, boolean hasSignaled, String terminatingSignal, boolean hasCoreDump, boolean wasAborted) {
-        this.jobId = jobId;
-        this.rusage = rusage;
-        this.hasExited = hasExited;
-        this.exitStatus = exitStatus;
-        this.hasSignaled = hasSignaled;
-        this.terminatingSignal = terminatingSignal;
-        this.hasCoreDump = hasCoreDump;
-        this.wasAborted = wasAborted;
-    }
-
-    @Override
-    public String getJobId() throws DrmaaException {
-        return this.jobId;
-    }
-
-    @Override
-    public Map getResourceUsage() throws DrmaaException {
-        return rusage;
-    }
-
-    @Override
-    public boolean hasExited() throws DrmaaException {
-        return hasExited;
-    }
-
-    @Override
-    public int getExitStatus() throws DrmaaException {
-        if (!hasExited)
-            throw new IllegalStateException("job has not exited");
-        return exitStatus;
-    }
-
-    @Override
-    public boolean hasSignaled() throws DrmaaException {
-        return hasSignaled;
-    }
-
-    @Override
-    public String getTerminatingSignal() throws DrmaaException {
-        if (!hasSignaled)
-            throw new IllegalStateException("job has not signaled");
-        return terminatingSignal;
-    }
-
-    @Override
-    public boolean hasCoreDump() throws DrmaaException {
-        return hasCoreDump;
-    }
-
-    @Override
-    public boolean wasAborted() throws DrmaaException {
-        return wasAborted;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/JnaJobTemplate.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/JnaJobTemplate.java
deleted file mode 100644
index b8add99..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/JnaJobTemplate.java
+++ /dev/null
@@ -1,316 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.jna.drmaa.v1_0;
-
-import com.sun.jna.Pointer;
-import org.ggf.drmaa.*;
-
-import java.util.*;
-
-/**
- * JNA mapping from Java to C DRMAA binding.
- */
-public class JnaJobTemplate implements JobTemplate {
-    private final JnaSession session;
-    private final Pointer jt;
-
-    public JnaJobTemplate(JnaSession session, Pointer jt) {
-        this.session = session;
-        this.jt = jt;
-    }
-
-    public Pointer getPointer() {
-        return jt;
-    }
-
-    @Override
-    public void setRemoteCommand(String s) throws DrmaaException {
-        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_REMOTE_COMMAND, s);
-    }
-
-    @Override
-    public String getRemoteCommand() throws DrmaaException {
-        return JnaSession.getAttribute(jt, LibDrmaa.DRMAA_REMOTE_COMMAND);
-    }
-
-    @SuppressWarnings("unchecked")
-    @Override
-    public void setArgs(List list) throws DrmaaException {
-        JnaSession.setVectorAttribute(jt, LibDrmaa.DRMAA_V_ARGV, list);
-    }
-
-    @Override
-    public List getArgs() throws DrmaaException {
-        return JnaSession.getVectorAttribute(jt, LibDrmaa.DRMAA_V_ARGV);
-    }
-
-    @Override
-    public void setJobSubmissionState(int state) throws DrmaaException {
-        String stateString;
-        if (state == JobTemplate.HOLD_STATE)
-            stateString = LibDrmaa.DRMAA_SUBMISSION_STATE_HOLD;
-        else if (state == JobTemplate.ACTIVE_STATE)
-            stateString = LibDrmaa.DRMAA_SUBMISSION_STATE_ACTIVE;
-        else
-            throw new InvalidAttributeValueException("jobSubmissionState attribute is invalid");
-        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_JS_STATE, stateString);
-    }
-
-    @Override
-    public int getJobSubmissionState() throws DrmaaException {
-        int state;
-        String stateString = JnaSession.getAttribute(jt, LibDrmaa.DRMAA_JS_STATE);
-        if (LibDrmaa.DRMAA_SUBMISSION_STATE_HOLD.equals(stateString))
-            state = JobTemplate.HOLD_STATE;
-        else if (LibDrmaa.DRMAA_SUBMISSION_STATE_ACTIVE.equals(stateString))
-            state = JobTemplate.ACTIVE_STATE;
-        else
-            throw new InvalidAttributeValueException("jobSubmissionState attribute is invalid");
-        return state;
-    }
-
-    @SuppressWarnings("unchecked")
-    @Override
-    public void setJobEnvironment(Map env) throws DrmaaException {
-        JnaSession.setVectorAttribute(jt, LibDrmaa.DRMAA_V_ENV, JnaSession.mapToCollection(env));
-    }
-
-    @SuppressWarnings("unchecked")
-    @Override
-    public Map getJobEnvironment() throws DrmaaException {
-        return JnaSession.collectionToMap(JnaSession.getVectorAttribute(jt, LibDrmaa.DRMAA_V_ENV));
-    }
-
-    @Override
-    public void setWorkingDirectory(String s) throws DrmaaException {
-        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_WD, s);
-    }
-
-    @Override
-    public String getWorkingDirectory() throws DrmaaException {
-        return JnaSession.getAttribute(jt, LibDrmaa.DRMAA_WD);
-    }
-
-    @Override
-    public void setJobCategory(String s) throws DrmaaException {
-        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_JOB_CATEGORY, s);
-    }
-
-    @Override
-    public String getJobCategory() throws DrmaaException {
-        return JnaSession.getAttribute(jt, LibDrmaa.DRMAA_JOB_CATEGORY);
-    }
-
-    @Override
-    public void setNativeSpecification(String s) throws DrmaaException {
-        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_NATIVE_SPECIFICATION, s);
-    }
-
-    @Override
-    public String getNativeSpecification() throws DrmaaException {
-        return JnaSession.getAttribute(jt, LibDrmaa.DRMAA_NATIVE_SPECIFICATION);
-    }
-
-    @SuppressWarnings("unchecked")
-    @Override
-    public void setEmail(Set set) throws DrmaaException {
-        JnaSession.setVectorAttribute(jt, LibDrmaa.DRMAA_V_EMAIL, set);
-    }
-
-    @SuppressWarnings("unchecked")
-    @Override
-    public Set getEmail() throws DrmaaException {
-        return new LinkedHashSet<String>(JnaSession.getVectorAttribute(jt, LibDrmaa.DRMAA_V_EMAIL));
-    }
-
-    @Override
-    public void setBlockEmail(boolean b) throws DrmaaException {
-        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_BLOCK_EMAIL, b ? "1" : "0");
-    }
-
-    @Override
-    public boolean getBlockEmail() throws DrmaaException {
-        return "1".equals(JnaSession.getAttribute(jt, LibDrmaa.DRMAA_BLOCK_EMAIL));
-    }
-
-    @Override
-    public void setStartTime(PartialTimestamp partialTimestamp) throws DrmaaException {
-        JnaSession.setPartialTime(jt, LibDrmaa.DRMAA_START_TIME, partialTimestamp);
-    }
-
-    @Override
-    public PartialTimestamp getStartTime() throws DrmaaException {
-        return JnaSession.getPartialTime(jt, LibDrmaa.DRMAA_START_TIME);
-    }
-
-    @Override
-    public void setJobName(String s) throws DrmaaException {
-        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_JOB_NAME, s);
-    }
-
-    @Override
-    public String getJobName() throws DrmaaException {
-        return JnaSession.getAttribute(jt, LibDrmaa.DRMAA_JOB_NAME);
-    }
-
-    @Override
-    public void setInputPath(String s) throws DrmaaException {
-        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_INPUT_PATH, s);
-    }
-
-    @Override
-    public String getInputPath() throws DrmaaException {
-        return JnaSession.getAttribute(jt, LibDrmaa.DRMAA_INPUT_PATH);
-    }
-
-    @Override
-    public void setOutputPath(String s) throws DrmaaException {
-        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_OUTPUT_PATH, s);
-    }
-
-    @Override
-    public String getOutputPath() throws DrmaaException {
-        return JnaSession.getAttribute(jt, LibDrmaa.DRMAA_OUTPUT_PATH);
-    }
-
-    @Override
-    public void setErrorPath(String s) throws DrmaaException {
-        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_ERROR_PATH, s);
-    }
-
-    @Override
-    public String getErrorPath() throws DrmaaException {
-        return JnaSession.getAttribute(jt, LibDrmaa.DRMAA_ERROR_PATH);
-    }
-
-    @Override
-    public void setJoinFiles(boolean b) throws DrmaaException {
-        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_JOIN_FILES, b ? "y" : "n");
-    }
-
-    @Override
-    public boolean getJoinFiles() throws DrmaaException {
-        return "y".equals(JnaSession.getAttribute(jt, LibDrmaa.DRMAA_JOIN_FILES));
-    }
-
-    @Override
-    public void setTransferFiles(FileTransferMode fileTransferMode) throws DrmaaException {
-        StringBuilder buf = new StringBuilder();
-
-        if (fileTransferMode.getInputStream())
-            buf.append('i');
-
-        if (fileTransferMode.getOutputStream())
-            buf.append('o');
-
-        if (fileTransferMode.getErrorStream())
-            buf.append('e');
-
-        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_TRANSFER_FILES, buf.toString());
-    }
-
-    @Override
-    public FileTransferMode getTransferFiles() throws DrmaaException {
-        String mode = JnaSession.getAttribute(jt, LibDrmaa.DRMAA_TRANSFER_FILES);
-
-        if (mode == null)
-            return null;
-
-        FileTransferMode fileTransferMode = new FileTransferMode();
-        fileTransferMode.setInputStream(mode.indexOf('i') >= 0);
-        fileTransferMode.setOutputStream(mode.indexOf('o') >= 0);
-        fileTransferMode.setErrorStream(mode.indexOf('e') >= 0);
-        return fileTransferMode;
-    }
-
-    @Override
-    public void setDeadlineTime(PartialTimestamp partialTimestamp) throws DrmaaException {
-        JnaSession.setPartialTime(jt, LibDrmaa.DRMAA_DEADLINE_TIME, partialTimestamp);
-    }
-
-    @Override
-    public PartialTimestamp getDeadlineTime() throws DrmaaException {
-        return JnaSession.getPartialTime(jt, LibDrmaa.DRMAA_DEADLINE_TIME);
-    }
-
-    @Override
-    public void setHardWallclockTimeLimit(long l) throws DrmaaException {
-        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_WCT_HLIMIT, JnaSession.formatLimit(l));
-    }
-
-    @Override
-    public long getHardWallclockTimeLimit() throws DrmaaException {
-        return JnaSession.parseLimit(JnaSession.getAttribute(jt, LibDrmaa.DRMAA_WCT_HLIMIT));
-    }
-
-    @Override
-    public void setSoftWallclockTimeLimit(long l) throws DrmaaException {
-        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_WCT_SLIMIT, JnaSession.formatLimit(l));
-    }
-
-    @Override
-    public long getSoftWallclockTimeLimit() throws DrmaaException {
-        return JnaSession.parseLimit(JnaSession.getAttribute(jt, LibDrmaa.DRMAA_WCT_SLIMIT));
-    }
-
-    @Override
-    public void setHardRunDurationLimit(long l) throws DrmaaException {
-        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_DURATION_HLIMIT, JnaSession.formatLimit(l));
-    }
-
-    @Override
-    public long getHardRunDurationLimit() throws DrmaaException {
-        return JnaSession.parseLimit(JnaSession.getAttribute(jt, LibDrmaa.DRMAA_DURATION_HLIMIT));
-    }
-
-    @Override
-    public void setSoftRunDurationLimit(long l) throws DrmaaException {
-        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_DURATION_SLIMIT, JnaSession.formatLimit(l));
-    }
-
-    @Override
-    public long getSoftRunDurationLimit() throws DrmaaException {
-        return JnaSession.parseLimit(JnaSession.getAttribute(jt, LibDrmaa.DRMAA_DURATION_SLIMIT));
-    }
-
-    @Override
-    public Set getAttributeNames() throws DrmaaException {
-        return JnaSession.getAttrNames();
-    }
-
-    @Override
-    public boolean equals(Object obj) {
-        if (!(obj instanceof JnaJobTemplate))
-            return false;
-        JnaJobTemplate other = (JnaJobTemplate) obj;
-        return this.jt.equals(other.jt) && this.session.equals(other.session);
-    }
-
-    @Override
-    public int hashCode() {
-        return jt.hashCode();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/JnaSession.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/JnaSession.java
deleted file mode 100644
index 67eaad7..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/JnaSession.java
+++ /dev/null
@@ -1,461 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.jna.drmaa.v1_0;
-
-import com.sun.jna.Memory;
-import com.sun.jna.NativeLong;
-import com.sun.jna.Pointer;
-import com.sun.jna.StringArray;
-import com.sun.jna.ptr.IntByReference;
-import com.sun.jna.ptr.PointerByReference;
-import org.ggf.drmaa.*;
-
-import java.text.ParseException;
-import java.util.*;
-
-/**
- * JNA mapping from Java to C DRMAA binding.
- * See: Java and C Binding Documents on http://drmaa.org
- */
-public class JnaSession implements Session {
-    private static final PartialTimestampFormat PARTIAL_TIMESTAMP_FORMAT = new PartialTimestampFormat();
-    private static final ThreadLocal<Memory> threadError = new ThreadLocal<Memory>() {
-        @Override
-        protected Memory initialValue() {
-            return new Memory(LibDrmaa.DRMAA_ERROR_STRING_BUFFER);
-        }
-    };
-
-    @Override
-    public void init(String contact) throws DrmaaException {
-        checkError(LibDrmaa.drmaa_init(contact, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
-    }
-
-    @Override
-    public void exit() throws DrmaaException {
-        checkError(LibDrmaa.drmaa_exit(getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
-    }
-
-    @Override
-    public JobTemplate createJobTemplate() throws DrmaaException {
-        PointerByReference jtRef = new PointerByReference();
-        checkError(LibDrmaa.drmaa_allocate_job_template(jtRef, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
-        return new JnaJobTemplate(this, jtRef.getValue());
-    }
-
-    @Override
-    public void deleteJobTemplate(JobTemplate jobTemplate) throws DrmaaException {
-        JnaJobTemplate jnaJobTemplate = (JnaJobTemplate) jobTemplate;
-        checkError(LibDrmaa.drmaa_delete_job_template(jnaJobTemplate.getPointer(), getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
-    }
-
-    @Override
-    public String runJob(JobTemplate jobTemplate) throws DrmaaException {
-        Memory jobId = new Memory(LibDrmaa.DRMAA_JOBNAME_BUFFER);
-        JnaJobTemplate jnaJobTemplate = (JnaJobTemplate) jobTemplate;
-        checkError(LibDrmaa.drmaa_run_job(jobId, LibDrmaa.DRMAA_JOBNAME_BUFFER_LEN, jnaJobTemplate.getPointer(), getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
-        return jobId.getString(0);
-    }
-
-    @Override
-    public List runBulkJobs(JobTemplate jobTemplate, int start, int end, int incr) throws DrmaaException {
-        PointerByReference jobIds = new PointerByReference();
-        JnaJobTemplate jnaJobTemplate = (JnaJobTemplate) jobTemplate;
-        checkError(LibDrmaa.drmaa_run_bulk_jobs(jobIds, jnaJobTemplate.getPointer(), start, end, incr, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
-        try {
-            return getJobIds(jobIds);
-        } finally {
-            releaseJobIds(jobIds);
-        }
-    }
-
-    @Override
-    public void control(String jobId, int action) throws DrmaaException {
-        checkError(LibDrmaa.drmaa_control(jobId, action, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
-    }
-
-    @SuppressWarnings("unchecked")
-    @Override
-    public void synchronize(List list, long timeout, boolean dispose) throws DrmaaException {
-        StringArray jobIds = new StringArray((String[]) list.toArray(new String[list.size()]));
-        checkError(LibDrmaa.drmaa_synchronize(jobIds, new NativeLong(timeout), dispose ? 1 : 0, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
-    }
-
-    @Override
-    public JobInfo wait(String jobId, long timeout) throws DrmaaException {
-        Memory jobIdOut = new Memory(LibDrmaa.DRMAA_JOBNAME_BUFFER);
-        IntByReference stat = new IntByReference();
-        PointerByReference rusage = new PointerByReference();
-        IntByReference exited = new IntByReference();
-        IntByReference exitStatus = new IntByReference();
-        IntByReference signaled = new IntByReference();
-        Memory signal = new Memory(LibDrmaa.DRMAA_SIGNAL_BUFFER);
-        IntByReference coreDumped = new IntByReference();
-        IntByReference aborted = new IntByReference();
-
-        int errnum;
-
-        errnum = LibDrmaa.drmaa_wait(jobId, jobIdOut, LibDrmaa.DRMAA_JOBNAME_BUFFER_LEN, stat, new NativeLong(timeout), rusage, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
-
-        Map<String, String> rusageMap;
-        if (errnum == LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_NO_RUSAGE) {
-            rusageMap = null;
-        } else {
-            try {
-                rusageMap = collectionToMap(getAttrValues(rusage));
-            } finally {
-                releaseAttrValues(rusage);
-            }
-        }
-
-        checkError(LibDrmaa.drmaa_wifexited(exited, stat.getValue(), getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
-
-        if (exited.getValue() != 0) {
-            checkError(LibDrmaa.drmaa_wexitstatus(exitStatus, stat.getValue(), getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
-        }
-
-        checkError(LibDrmaa.drmaa_wifsignaled(signaled, stat.getValue(), getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
-
-        if (signaled.getValue() != 0) {
-            checkError(LibDrmaa.drmaa_wtermsig(signal, LibDrmaa.DRMAA_SIGNAL_BUFFER_LEN, stat.getValue(), getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
-            checkError(LibDrmaa.drmaa_wcoredump(coreDumped, stat.getValue(), getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
-        }
-
-        checkError(LibDrmaa.drmaa_wifaborted(aborted, stat.getValue(), getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
-
-        return new JnaJobInfo(jobIdOut.getString(0), rusageMap, exited.getValue() != 0, exitStatus.getValue(),
-                signaled.getValue() != 0, signal.getString(0), coreDumped.getValue() != 0, aborted.getValue() != 0);
-    }
-
-    @Override
-    public int getJobProgramStatus(String jobId) throws DrmaaException {
-        IntByReference remotePs = new IntByReference();
-        checkError(LibDrmaa.drmaa_job_ps(jobId, remotePs, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
-        return remotePs.getValue();
-    }
-
-    @Override
-    public String getContact() {
-        Memory contact = new Memory(LibDrmaa.DRMAA_CONTACT_BUFFER);
-        try {
-            checkError(LibDrmaa.drmaa_get_contact(contact, LibDrmaa.DRMAA_CONTACT_BUFFER_LEN, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
-        } catch (DrmaaException e) {
-            // DRMAA spec says this method should throw DrmaaException.
-            // Why doesn't interface implement this?
-            throw new RuntimeException(e);
-        }
-        return contact.getString(0);
-    }
-
-    @Override
-    public Version getVersion() {
-        IntByReference major = new IntByReference();
-        IntByReference minor = new IntByReference();
-        try {
-            checkError(LibDrmaa.drmaa_version(major, minor, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
-        } catch (DrmaaException e) {
-            // DRMAA spec says this method should throw DrmaaException.
-            // Why doesn't interface implement this?
-            throw new RuntimeException(e);
-        }
-        return new Version(major.getValue(), minor.getValue());
-    }
-
-    @Override
-    public String getDrmSystem() {
-        Memory drmSystem = new Memory(LibDrmaa.DRMAA_DRM_SYSTEM_BUFFER);
-        try {
-            checkError(LibDrmaa.drmaa_get_DRM_system(drmSystem, LibDrmaa.DRMAA_DRM_SYSTEM_BUFFER_LEN, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
-        } catch (DrmaaException e) {
-            // DRMAA spec says this method should throw DrmaaException.
-            // Why doesn't interface implement this?
-            throw new RuntimeException(e);
-        }
-        return drmSystem.getString(0);
-    }
-
-    @Override
-    public String getDrmaaImplementation() {
-        Memory drmaaImplementation = new Memory(LibDrmaa.DRMAA_DRMAA_IMPLEMENTATION_BUFFER);
-        try {
-            checkError(LibDrmaa.drmaa_get_DRMAA_implementation(drmaaImplementation, LibDrmaa.DRMAA_DRMAA_IMPLEMENTATION_BUFFER_LEN, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
-        } catch (DrmaaException e) {
-            // DRMAA spec says this method should throw DrmaaException.
-            // Why doesn't interface implement this?
-            throw new RuntimeException(e);
-        }
-        return drmaaImplementation.getString(0);
-    }
-
-    public static void setAttribute(Pointer jt, String name, String value) throws DrmaaException {
-        if (getAttrNames().contains(name)) {
-            checkError(LibDrmaa.drmaa_set_attribute(jt, name, value, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
-        }
-        else {
-            throw new InvalidAttributeValueException("Attribute " + name + " is not supported by this implementation of DRMAA");
-        }
-    }
-
-    public static String getAttribute(Pointer jt, String name) throws DrmaaException {
-        if (getAttrNames().contains(name)) {
-            Memory attrBuffer = new Memory(LibDrmaa.DRMAA_ATTR_BUFFER);
-            checkError(LibDrmaa.drmaa_get_attribute(jt, name, attrBuffer, LibDrmaa.DRMAA_ATTR_BUFFER_LEN, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
-            return attrBuffer.getString(0);
-        }
-        else {
-            throw new InvalidAttributeValueException("Attribute " + name + " is not supported by this implementation of DRMAA");
-        }
-    }
-
-    public static void setVectorAttribute(Pointer jt, String name, Collection<String> values) throws DrmaaException {
-        StringArray valuesArray = new StringArray(values.toArray(new String[values.size()]));
-        checkError(LibDrmaa.drmaa_set_vector_attribute(jt, name, valuesArray, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
-    }
-
-    public static List<String> getVectorAttribute(Pointer jt, String name) throws DrmaaException {
-        PointerByReference values = new PointerByReference();
-        checkError(LibDrmaa.drmaa_get_vector_attribute(jt, name, values, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
-        try {
-            return getAttrValues(values);
-        } finally {
-            releaseAttrValues(values);
-        }
-    }
-
-    public static void setPartialTime(Pointer jt, String name, PartialTimestamp partialTimestamp) throws DrmaaException {
-        setAttribute(jt, name, PARTIAL_TIMESTAMP_FORMAT.format(partialTimestamp));
-    }
-
-    public static PartialTimestamp getPartialTime(Pointer jt, String name) throws DrmaaException {
-        String time = getAttribute(jt, name);
-        if (time == null)
-            return null;
-        try {
-            return PARTIAL_TIMESTAMP_FORMAT.parse(time);
-        } catch (ParseException e) {
-            throw new InternalException(name + " property is unparsable");
-        }
-    }
-
-    public static Set<String> getAttrNames() throws DrmaaException {
-        PointerByReference values = new PointerByReference();
-        checkError(LibDrmaa.drmaa_get_attribute_names(values, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
-        try {
-            return new LinkedHashSet<String>(getAttrNames(values));
-        } finally {
-            releaseAttrNames(values);
-        }
-    }
-
-    public static Collection<String> mapToCollection(Map<String, String> map) {
-        Collection<String> collection = new LinkedHashSet<String>();
-        for (Map.Entry<String, String> entry: map.entrySet())
-            collection.add(entry.getKey() + "=" + entry.getValue());
-        return collection;
-    }
-
-    public static Map<String, String> collectionToMap(Collection<String> list) {
-        Map<String, String> map = new LinkedHashMap<String, String>();
-        for (String entry: list) {
-            if (entry == null)
-                continue;
-            int equals = entry.indexOf('=');
-            if (equals < 0)
-                continue;
-            map.put(entry.substring(0, equals), entry.substring(equals + 1));
-        }
-        return map;
-    }
-
-    public static String formatLimit(long secs) {
-        long seconds = (secs % 60);
-        long minutes = (secs / 60) % 60;
-        long hours = (secs / 3600);
-        return String.format("%d:%02d:%02d", hours, minutes, seconds);
-    }
-
-    public static long parseLimit(String limit) {
-        long seconds = 0;
-        if (limit != null) {
-            for (String token: limit.split(":")) {
-                seconds *= 60;
-                seconds += Long.parseLong(token);
-            }
-        }
-        return seconds;
-    }
-
-    private static List<String> getAttrNames(PointerByReference names) throws DrmaaException {
-        List<String> namesList = new ArrayList<String>();
-        IntByReference size = new IntByReference();
-        int errnum;
-
-        errnum = LibDrmaa.drmaa_get_num_attr_names(names.getValue(), size);
-        checkError(errnum, "unable to get attribute names");
-        int num = size.getValue();
-
-        Memory value = new Memory(LibDrmaa.DRMAA_ATTR_BUFFER);
-        for (int i = 1; i <= num; i++) {
-            errnum = LibDrmaa.drmaa_get_next_attr_name(names.getValue(), value, LibDrmaa.DRMAA_ATTR_BUFFER_LEN);
-            checkError(errnum, "unable to get attribute name " + i);
-            if (errnum == LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_NO_MORE_ELEMENTS)
-                break;
-            namesList.add(value.getString(0));
-        }
-
-        return namesList;
-    }
-
-    private static List<String> getAttrValues(PointerByReference values) throws DrmaaException {
-        List<String> valuesList = new ArrayList<String>();
-        IntByReference size = new IntByReference();
-        int errnum;
-
-        errnum = LibDrmaa.drmaa_get_num_attr_values(values.getValue(), size);
-        checkError(errnum, "unable to get attribute values");
-        int num = size.getValue();
-
-        Memory value = new Memory(LibDrmaa.DRMAA_ATTR_BUFFER);
-        for (int i = 1; i <= num; i++) {
-            errnum = LibDrmaa.drmaa_get_next_attr_value(values.getValue(), value, LibDrmaa.DRMAA_ATTR_BUFFER_LEN);
-            checkError(errnum, "unable to get attribute value " + i);
-            if (errnum == LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_NO_MORE_ELEMENTS)
-                break;
-            valuesList.add(value.getString(0));
-        }
-
-        return valuesList;
-    }
-
-    private static List<String> getJobIds(PointerByReference jobIds) throws DrmaaException {
-        List<String> jobIdsList = new ArrayList<String>();
-        IntByReference size = new IntByReference();
-        int errnum;
-
-        errnum = LibDrmaa.drmaa_get_num_job_ids(jobIds.getValue(), size);
-        checkError(errnum, "unable to get jobIds");
-        int num = size.getValue();
-
-        Memory value = new Memory(LibDrmaa.DRMAA_JOBNAME_BUFFER);
-        for (int i = 1; i <= num; i++) {
-            errnum = LibDrmaa.drmaa_get_next_job_id(jobIds.getValue(), value, LibDrmaa.DRMAA_JOBNAME_BUFFER_LEN);
-            checkError(errnum, "unable to get jobId " + i);
-            if (errnum == LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_NO_MORE_ELEMENTS)
-                break;
-            jobIdsList.add(value.getString(0));
-        }
-
-        return jobIdsList;
-    }
-
-    private static void releaseAttrNames(PointerByReference names) throws DrmaaException {
-        LibDrmaa.drmaa_release_attr_names(names.getValue());
-    }
-
-    private static void releaseAttrValues(PointerByReference values) throws DrmaaException {
-        LibDrmaa.drmaa_release_attr_values(values.getValue());
-    }
-
-    private static void releaseJobIds(PointerByReference jobIds) throws DrmaaException {
-        LibDrmaa.drmaa_release_job_ids(jobIds.getValue());
-    }
-
-    private static Memory getError() {
-        return threadError.get();
-    }
-
-    private static void checkError(int errnum) throws DrmaaException {
-        if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
-            checkError(errnum, getError().getString(0));
-    }
-
-    private static void checkError(int errnum, String error) throws DrmaaException {
-        switch (errnum) {
-            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS:
-                break;
-            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_INTERNAL_ERROR:
-                throw new InternalException(error);
-            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_DRM_COMMUNICATION_FAILURE:
-                throw new DrmCommunicationException(error);
-            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_AUTH_FAILURE:
-                throw new AuthorizationException(error);
-            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_INVALID_ARGUMENT:
-                throw new IllegalArgumentException(error);
-            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_NO_ACTIVE_SESSION:
-                throw new NoActiveSessionException(error);
-            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_NO_MEMORY:
-                throw new OutOfMemoryError(error);
-
-                /* -------------- init and exit specific --------------- */
-            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_INVALID_CONTACT_STRING:
-                throw new InvalidContactStringException(error);
-            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_DEFAULT_CONTACT_STRING_ERROR:
-                throw new DefaultContactStringException(error);
-            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_NO_DEFAULT_CONTACT_STRING_SELECTED:
-                throw new NoDefaultContactStringException(error);
-            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_DRMS_INIT_FAILED:
-                throw new DrmsInitException(error);
-            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_ALREADY_ACTIVE_SESSION:
-                throw new AlreadyActiveSessionException(error);
-            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_DRMS_EXIT_ERROR:
-                throw new DrmsExitException(error);
-
-                /* ---------------- job attributes specific -------------- */
-            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_INVALID_ATTRIBUTE_FORMAT:
-                throw new InvalidAttributeFormatException(error);
-            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_INVALID_ATTRIBUTE_VALUE:
-                throw new InvalidAttributeValueException(error);
-            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_CONFLICTING_ATTRIBUTE_VALUES:
-                throw new ConflictingAttributeValuesException(error);
-
-                /* --------------------- job submission specific -------------- */
-            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_TRY_LATER:
-                throw new TryLaterException(error);
-            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_DENIED_BY_DRM:
-                throw new DeniedByDrmException(error);
-
-                /* ------------------------------- job control specific ---------------- */
-            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_INVALID_JOB:
-                throw new InvalidJobException(error);
-            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_RESUME_INCONSISTENT_STATE:
-                throw new ResumeInconsistentStateException(error);
-            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUSPEND_INCONSISTENT_STATE:
-                throw new SuspendInconsistentStateException(error);
-            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_HOLD_INCONSISTENT_STATE:
-                throw new HoldInconsistentStateException(error);
-            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_RELEASE_INCONSISTENT_STATE:
-                throw new ReleaseInconsistentStateException(error);
-            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_EXIT_TIMEOUT:
-                throw new ExitTimeoutException(error);
-            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_NO_RUSAGE:
-                break;
-            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_NO_MORE_ELEMENTS:
-                break;
-            default:
-                throw new IllegalArgumentException(String.format("Unknown error code %d: %s", errnum, error));
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/JnaSessionFactory.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/JnaSessionFactory.java
deleted file mode 100644
index f4dbc98..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/JnaSessionFactory.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.jna.drmaa.v1_0;
-
-import org.ggf.drmaa.Session;
-import org.ggf.drmaa.SessionFactory;
-
-/**
- * JNA mapping from Java to C DRMAA binding.
- */
- at SuppressWarnings("unused")
-public class JnaSessionFactory extends SessionFactory {
-    @Override
-    public Session getSession() {
-        return new JnaSession();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/LibDrmaa.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/LibDrmaa.java
deleted file mode 100644
index 3e5c4e4..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/LibDrmaa.java
+++ /dev/null
@@ -1,723 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.jna.drmaa.v1_0;
-
-import com.sun.jna.*;
-import com.sun.jna.ptr.IntByReference;
-import com.sun.jna.ptr.PointerByReference;
-
- at SuppressWarnings("unused")
-public class LibDrmaa {
-    static {
-        Native.register("drmaa");
-    }
-
-/* see www.drmaa.org for more details on the DRMAA specification */
-/****** DRMAA/-DRMAA_Interface *************************************************
-*  NAME
-*     DRMAA_Interface -- DRMAA interface
-*
-*  FUNCTION
-*     The enlisted functions specify the C/C++ binding of the DRMAA interface
-*     specification.
-*
-*  SEE ALSO
-*     DRMAA/drmaa_get_next_attr_name()
-*     DRMAA/drmaa_get_next_attr_value()
-*     DRMAA/drmaa_get_next_job_id()
-*     DRMAA/drmaa_release_attr_names()
-*     DRMAA/drmaa_release_attr_values()
-*     DRMAA/drmaa_release_job_ids()
-*     DRMAA/drmaa_init()
-*     DRMAA/drmaa_exit()
-*     DRMAA/drmaa_allocate_job_template()
-*     DRMAA/drmaa_delete_job_template()
-*     DRMAA/drmaa_set_attribute()
-*     DRMAA/drmaa_get_attribute()
-*     DRMAA/drmaa_set_vector_attribute()
-*     DRMAA/drmaa_get_vector_attribute()
-*     DRMAA/drmaa_get_attribute_names()
-*     DRMAA/drmaa_get_vector_attribute_names()
-*     DRMAA/drmaa_run_job()
-*     DRMAA/drmaa_run_bulk_jobs()
-*     DRMAA/drmaa_control()
-*     DRMAA/drmaa_synchronize()
-*     DRMAA/drmaa_wait()
-*     DRMAA/drmaa_wifexited()
-*     DRMAA/drmaa_wexitstatus()
-*     DRMAA/drmaa_wifsignaled()
-*     DRMAA/drmaa_wtermsig()
-*     DRMAA/drmaa_wcoredump()
-*     DRMAA/drmaa_wifaborted()
-*     DRMAA/drmaa_job_ps()
-*     DRMAA/drmaa_strerror()
-*     DRMAA/drmaa_get_contact()
-*     DRMAA/drmaa_version()
-*     DRMAA/drmaa_get_DRM_system()
-*******************************************************************************/
-
-/* ------------------- Constants ------------------- */
-/*
- * some not yet agreed buffer length constants
- * these are recommended minimum values
- */
-
-/* drmaa_get_attribute() */
-public static final long DRMAA_ATTR_BUFFER = 1024;
-public static final NativeLong DRMAA_ATTR_BUFFER_LEN = new NativeLong(DRMAA_ATTR_BUFFER - 1);
-
-/* drmaa_get_contact() */
-public static final long DRMAA_CONTACT_BUFFER = 1024;
-public static final NativeLong DRMAA_CONTACT_BUFFER_LEN = new NativeLong(DRMAA_CONTACT_BUFFER - 1);
-
-/* drmaa_get_DRM_system() */
-public static final long DRMAA_DRM_SYSTEM_BUFFER = 1024;
-public static final NativeLong DRMAA_DRM_SYSTEM_BUFFER_LEN = new NativeLong(DRMAA_DRM_SYSTEM_BUFFER - 1);
-
-/* drmaa_get_DRM_system() */
-public static final long DRMAA_DRMAA_IMPLEMENTATION_BUFFER = 1024;
-public static final NativeLong DRMAA_DRMAA_IMPLEMENTATION_BUFFER_LEN = new NativeLong(DRMAA_DRMAA_IMPLEMENTATION_BUFFER - 1);
-
-/*
- * Agreed buffer length constants
- * these are recommended minimum values
- */
-public static final long DRMAA_ERROR_STRING_BUFFER = 1024;
-public static final long DRMAA_JOBNAME_BUFFER = 1024;
-public static final long DRMAA_SIGNAL_BUFFER = 32;
-
-public static final NativeLong DRMAA_ERROR_STRING_BUFFER_LEN = new NativeLong(DRMAA_ERROR_STRING_BUFFER - 1);
-public static final NativeLong DRMAA_JOBNAME_BUFFER_LEN = new NativeLong(DRMAA_JOBNAME_BUFFER - 1);
-public static final NativeLong DRMAA_SIGNAL_BUFFER_LEN = new NativeLong(DRMAA_SIGNAL_BUFFER - 1);
-
-/*
- * Agreed constants
- */
-public static final NativeLong DRMAA_TIMEOUT_WAIT_FOREVER = new NativeLong(-1);
-public static final NativeLong DRMAA_TIMEOUT_NO_WAIT = new NativeLong(0);
-
-public static final String DRMAA_JOB_IDS_SESSION_ANY = "DRMAA_JOB_IDS_SESSION_ANY";
-public static final String DRMAA_JOB_IDS_SESSION_ALL = "DRMAA_JOB_IDS_SESSION_ALL";
-
-public static final String DRMAA_SUBMISSION_STATE_ACTIVE = "drmaa_active";
-public static final String DRMAA_SUBMISSION_STATE_HOLD = "drmaa_hold";
-
-/*
- * Agreed placeholder names
- */
-public static final String DRMAA_PLACEHOLDER_INCR = "$drmaa_incr_ph$";
-public static final String DRMAA_PLACEHOLDER_HD = "$drmaa_hd_ph$";
-public static final String DRMAA_PLACEHOLDER_WD = "$drmaa_wd_ph$";
-
-/*
- * Agreed names of job template attributes
- */
-public static final String DRMAA_REMOTE_COMMAND = "drmaa_remote_command";
-public static final String DRMAA_JS_STATE = "drmaa_js_state";
-public static final String DRMAA_WD = "drmaa_wd";
-public static final String DRMAA_JOB_CATEGORY = "drmaa_job_category";
-public static final String DRMAA_NATIVE_SPECIFICATION = "drmaa_native_specification";
-public static final String DRMAA_BLOCK_EMAIL = "drmaa_block_email";
-public static final String DRMAA_START_TIME = "drmaa_start_time";
-public static final String DRMAA_JOB_NAME = "drmaa_job_name";
-public static final String DRMAA_INPUT_PATH = "drmaa_input_path";
-public static final String DRMAA_OUTPUT_PATH = "drmaa_output_path";
-public static final String DRMAA_ERROR_PATH = "drmaa_error_path";
-public static final String DRMAA_JOIN_FILES = "drmaa_join_files";
-public static final String DRMAA_TRANSFER_FILES = "drmaa_transfer_files";
-public static final String DRMAA_DEADLINE_TIME = "drmaa_deadline_time";
-public static final String DRMAA_WCT_HLIMIT = "drmaa_wct_hlimit";
-public static final String DRMAA_WCT_SLIMIT = "drmaa_wct_slimit";
-public static final String DRMAA_DURATION_HLIMIT = "drmaa_duration_hlimit";
-public static final String DRMAA_DURATION_SLIMIT = "drmaa_duration_slimit";
-
-/* names of job template vector attributes */
-public static final String DRMAA_V_ARGV = "drmaa_v_argv";
-public static final String DRMAA_V_ENV = "drmaa_v_env";
-public static final String DRMAA_V_EMAIL = "drmaa_v_email";
-
-/*
- * DRMAA errno values
- *
- * do not touch these values are agreed !!!
- */
-public static interface DRMAA_ERRNO {
-   /* -------------- these are relevant to all sections ---------------- */
-   public static final int DRMAA_ERRNO_SUCCESS = 0; /* Routine returned normally with success. */
-   public static final int DRMAA_ERRNO_INTERNAL_ERROR = 1; /* Unexpected or internal DRMAA error like memory allocation, system call failure, etc. */
-   public static final int DRMAA_ERRNO_DRM_COMMUNICATION_FAILURE = 2; /* Could not contact DRM system for this request. */
-   public static final int DRMAA_ERRNO_AUTH_FAILURE = 3; /* The specified request is not processed successfully due to authorization failure. */
-   public static final int DRMAA_ERRNO_INVALID_ARGUMENT = 4; /* The input value for an argument is invalid. */
-   public static final int DRMAA_ERRNO_NO_ACTIVE_SESSION = 5; /* Exit routine failed because there is no active session */
-   public static final int DRMAA_ERRNO_NO_MEMORY = 6; /* failed allocating memory */
-
-   /* -------------- init and exit specific --------------- */
-   public static final int DRMAA_ERRNO_INVALID_CONTACT_STRING = 7; /* Initialization failed due to invalid contact string. */
-   public static final int DRMAA_ERRNO_DEFAULT_CONTACT_STRING_ERROR = 8; /* DRMAA could not use the default contact string to connect to DRM system. */
-   public static final int DRMAA_ERRNO_NO_DEFAULT_CONTACT_STRING_SELECTED = 9; /* No default contact string was provided or selected. DRMAA requires that the default contact string is selected when there is more than one default contact string due to multiple DRMAA implementation contained in the binary module. */
-   public static final int DRMAA_ERRNO_DRMS_INIT_FAILED = 10; /* Initialization failed due to failure to init DRM system. */
-   public static final int DRMAA_ERRNO_ALREADY_ACTIVE_SESSION = 11; /* Initialization failed due to existing DRMAA session. */
-   public static final int DRMAA_ERRNO_DRMS_EXIT_ERROR = 12; /* DRM system disengagement failed. */
-
-   /* ---------------- job attributes specific -------------- */
-   public static final int DRMAA_ERRNO_INVALID_ATTRIBUTE_FORMAT = 13; /* The format for the job attribute value is invalid. */
-   public static final int DRMAA_ERRNO_INVALID_ATTRIBUTE_VALUE = 14; /* The value for the job attribute is invalid. */
-   public static final int DRMAA_ERRNO_CONFLICTING_ATTRIBUTE_VALUES = 15; /* The value of this attribute is conflicting with a previously set attributes. */
-
-   /* --------------------- job submission specific -------------- */
-   public static final int DRMAA_ERRNO_TRY_LATER = 16; /* Could not pass job now to DRM system. A retry may succeed however (saturation). */
-   public static final int DRMAA_ERRNO_DENIED_BY_DRM = 17; /* The DRM system rejected the job. The job will never be accepted due to DRM configuration or job template settings. */
-
-   /* ------------------------------- job control specific ---------------- */
-   public static final int DRMAA_ERRNO_INVALID_JOB = 18; /* The job specified by the 'jobid' does not exist. */
-   public static final int DRMAA_ERRNO_RESUME_INCONSISTENT_STATE = 19; /* The job has not been suspended. The RESUME request will not be processed. */
-   public static final int DRMAA_ERRNO_SUSPEND_INCONSISTENT_STATE = 20; /* The job has not been running, and it cannot be suspended. */
-   public static final int DRMAA_ERRNO_HOLD_INCONSISTENT_STATE = 21; /* The job cannot be moved to a HOLD state. */
-   public static final int DRMAA_ERRNO_RELEASE_INCONSISTENT_STATE = 22; /* The job is not in a HOLD state. */
-   public static final int DRMAA_ERRNO_EXIT_TIMEOUT = 23; /* We have encountered a time-out condition for drmaa_synchronize or drmaa_wait. */
-   public static final int DRMAA_ERRNO_NO_RUSAGE = 24; /* This error code is returned by drmaa_wait() when a job has finished but no rusage and stat data could be provided. */
-   public static final int DRMAA_ERRNO_NO_MORE_ELEMENTS = 25; /* There are no more elements in the opaque string vector. */
-
-   public static final int DRMAA_NO_ERRNO = 26;
-}
-
-/*
- * Agreed DRMAA job states as returned by drmaa_job_ps()
- */
-public static interface DRMAA_PS {
- public static final int DRMAA_PS_UNDETERMINED = 0x00; /* process status cannot be determined */
- public static final int DRMAA_PS_QUEUED_ACTIVE = 0x10; /* job is queued and active */
- public static final int DRMAA_PS_SYSTEM_ON_HOLD = 0x11; /* job is queued and in system hold */
- public static final int DRMAA_PS_USER_ON_HOLD = 0x12; /* job is queued and in user hold */
- public static final int DRMAA_PS_USER_SYSTEM_ON_HOLD = 0x13; /* job is queued and in user and system hold */
- public static final int DRMAA_PS_RUNNING = 0x20; /* job is running */
- public static final int DRMAA_PS_SYSTEM_SUSPENDED = 0x21; /* job is system suspended */
- public static final int DRMAA_PS_USER_SUSPENDED = 0x22; /* job is user suspended */
- public static final int DRMAA_PS_USER_SYSTEM_SUSPENDED = 0x23; /* job is user and system suspended */
- public static final int DRMAA_PS_DONE = 0x30; /* job finished normally */
- public static final int DRMAA_PS_FAILED = 0x40;  /* job finished, but failed */
-}
-
-/*
- * Agreed DRMAA actions for drmaa_control()
- */
-public static interface DRMAA_CONTROL {
- public static final int DRMAA_CONTROL_SUSPEND = 0;
- public static final int DRMAA_CONTROL_RESUME = 1;
- public static final int DRMAA_CONTROL_HOLD = 2;
- public static final int DRMAA_CONTROL_RELEASE = 3;
- public static final int DRMAA_CONTROL_TERMINATE = 4;
-}
-
-/* ------------------- Data types ------------------- */
-/*
- * Agreed opaque DRMAA job template
- * struct drmaa_job_template_s is in japiP.h
- */
-//typedef struct drmaa_job_template_s drmaa_job_template_t;
-
-/* ---------- C/C++ language binding specific interfaces -------- */
-
-//typedef struct drmaa_attr_names_s drmaa_attr_names_t;
-//typedef struct drmaa_attr_values_s drmaa_attr_values_t;
-//typedef struct drmaa_job_ids_s  drmaa_job_ids_t;
-
-/*
- * get next string attribute from iterator
- *
- * returns DRMAA_ERRNO_SUCCESS or DRMAA_ERRNO_INVALID_ATTRIBUTE_VALUE
- * if no such exists
- */
-
-public static native int drmaa_get_next_attr_name(/* drmaa_attr_names_t* */ Pointer values, Pointer value,
-                             NativeLong value_len);
-public static native int drmaa_get_next_attr_value(/* drmaa_attr_names_t* */ Pointer values, Pointer value,
-                              NativeLong value_len);
-public static native int drmaa_get_next_job_id(/* drmaa_job_ids_t* */ Pointer values, Pointer value,
-                          NativeLong value_len);
-
-/*
- * get element count of opaque string vector
- *
- * Gives the number of elements in the opaque string vector.  Useful for
- * copying the contents into an array.
- */
-public static native int drmaa_get_num_attr_names(/* drmaa_attr_names_t* */ Pointer values, IntByReference size);
-public static native int drmaa_get_num_attr_values(/* drmaa_attr_values_t* */ Pointer values, IntByReference size);
-public static native int drmaa_get_num_job_ids(/* drmaa_job_ids_t* */ Pointer values, IntByReference size);
-
-/*
- * release opaque string vector
- *
- * Opaque string vectors can be used without any constraint
- * until the release function has been called.
- */
-public static native void drmaa_release_attr_names(/* drmaa_attr_names_t* */ Pointer values);
-public static native void drmaa_release_attr_values(/* drmaa_attr_values_t* */ Pointer values);
-public static native void drmaa_release_job_ids(/* drmaa_job_ids_t* */ Pointer values);
-
-/* ------------------- init/exit routines ------------------- */
-/*
- * Initialize DRMAA API library and create a new DRMAA Session. 'Contact'
- * is an implementation dependent string which MAY be used to specify
- * which DRM system to use. This routine MUST be called before any
- * other DRMAA calls, except for drmaa_version().
- * If 'contact' is NULL, the default DRM system SHALL be used provided there is
- * only one DRMAA implementation in the provided binary module.  When these is
- * more than one DRMAA implementation in the binary module, drmaa_init() SHALL
- * return the DRMAA_ERRNO_NO_DEFAULT_CONTACT_STRING_SELECTED error. drmaa_init()
- * SHOULD be called by only one of the threads. The main thread is RECOMMENDED.
- * A call by another thread SHALL return DRMAA_ERRNO_ALREADY_ACTIVE_SESSION.
- * When 'contact' is a a semi-colon separated list of name=value strings, the
- * strings will be parsed and interpreted.  The current list of accepted names
- * is:
- *    session -- the id of the session to which to reconnect
-#if 0
- *    sge_root -- the SGE_ROOT to use
- *    sge_cell -- the SGE_CELL to use
-#endif
- *
- * drmaa_init() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
- *    DRMAA_ERRNO_INVALID_CONTACT_STRING,
- *    DRMAA_ERRNO_NO_MEMORY,
- *    DRMAA_ERRNO_ALREADY_ACTIVE_SESSION,
- *    DRMAA_ERRNO_NO_DEFAULT_CONTACT_STRING_SELECTED, or
- *    DRMAA_ERRNO_DEFAULT_CONTACT_STRING_ERROR.
- */
-public static native int drmaa_init(String contact, Pointer error_diagnosis, NativeLong error_diag_len);
-
-
-/*
- * Disengage from DRMAA library and allow the DRMAA library to perform
- * any necessary internal clean up.
- * This routine SHALL end the current DRMAA Session, but SHALL NOT effect any
- * jobs (e.g., queued and running jobs SHALL remain queued and running).
- * drmaa_exit() SHOULD be called by only one of the threads. Other thread calls
- * to drmaa_exit() MAY fail since there is no active session.
- *
- * drmaa_exit() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
- *    DRMAA_ERRNO_DRMS_EXIT_ERROR or
- *    DRMAA_ERRNO_NO_ACTIVE_SESSION.
- */
-public static native int drmaa_exit(Pointer error_diagnosis, NativeLong error_diag_len);
-
-/* ------------------- job template routines ------------------- */
-
-/*
- * Allocate a new job template.
- *
- * drmaa_allocate_job_template() SHALL return DRMAA_ERRNO_SUCCESS on success,
- * otherwise:
- *    DRMAA_ERRNO_DRM_COMMUNICATION_FAILURE,
- *    DRMAA_ERRNO_INTERNAL_ERROR or
- *    DRMAA_ERRNO_NO_MEMORY.
- */
-public static native int drmaa_allocate_job_template(/* drmaa_job_template_t** */ PointerByReference jt, Pointer error_diagnosis, NativeLong error_diag_len);
-
-/*
- * Deallocate a job template. This routine has no effect on jobs.
- *
- * drmaa_delete_job_template() SHALL return DRMAA_ERRNO_SUCCESS on success,
- * otherwise:
- *    DRMAA_ERRNO_DRM_COMMUNICATION_FAILURE or
- *    DRMAA_ERRNO_INTERNAL_ERROR.
- */
-public static native int drmaa_delete_job_template(/* drmaa_job_template_t* */ Pointer jt, Pointer error_diagnosis,
-                              NativeLong error_diag_len);
-
-
-/*
- * Adds ('name', 'value') pair to list of attributes in job template 'jt'.
- * Only non-vector attributes SHALL be passed.
- *
- * drmaa_set_attribute() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
- *    DRMAA_ERRNO_INVALID_ATTRIBUTE_FORMAT,
- *    DRMAA_ERRNO_INVALID_ARGUMENT,
- *    DRMAA_ERRNO_NO_MEMORY,
- *    DRMAA_ERRNO_INVALID_ATTRIBUTE_VALUE or
- *    DRMAA_ERRNO_CONFLICTING_ATTRIBUTE_VALUES.
- */
-public static native int drmaa_set_attribute(/* drmaa_job_template_t* */ Pointer jt, String name,
-                        String value, Pointer error_diagnosis,
-                        NativeLong error_diag_len);
-
-
-/*
- * If 'name' is an existing non-vector attribute name in the job
- * template 'jt', then the value of 'name' SHALL be returned; otherwise,
- * NULL is returned.
- *
- * drmaa_get_attribute() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
- *    DRMAA_ERRNO_INVALID_ATTRIBUTE_VALUE.
- */
-public static native int drmaa_get_attribute(/* drmaa_job_template_t* */ Pointer jt, String name, Pointer value,
-                        NativeLong value_len, Pointer error_diagnosis,
-                        NativeLong error_diag_len);
-
-/* Adds ('name', 'values') pair to list of vector attributes in job template
- * 'jt'. Only vector attributes SHALL be passed.
- * A 'value' string vector containing n elements must be n+1 elements long, with
- * the nth value, i.e. value[n], being set to NULL as a delimitor.
- *
- * drmaa_set_vector_attribute() SHALL return DRMAA_ERRNO_SUCCESS on success,
- * otherwise:
- *    DRMAA_ERRNO_INVALID_ATTRIBUTE_FORMAT,
- *    DRMAA_ERRNO_INVALID_ARGUMENT,
- *    DRMAA_ERRNO_NO_MEMORY,
- *    DRMAA_ERRNO_CONFLICTING_ATTRIBUTE_VALUES.
- */
-public static native int drmaa_set_vector_attribute(/* drmaa_job_template_t* */ Pointer jt, String name,
-                               Pointer value, Pointer error_diagnosis,
-                               NativeLong error_diag_len);
-
-
-/*
- * If 'name' is an existing vector attribute name in the job template 'jt',
- * then the values of 'name' are returned; otherwise, NULL is returned.
- *
- * drmaa_get_vector_attribute() SHALL return DRMAA_ERRNO_SUCCESS on success,
- * otherwise:
- *    DRMAA_ERRNO_INVALID_ATTRIBUTE_VALUE.
- */
-public static native int drmaa_get_vector_attribute(/* drmaa_job_template_t* */ Pointer jt, String name,
-                               /* drmaa_attr_values_t ** */ PointerByReference values,
-                               Pointer error_diagnosis, NativeLong error_diag_len);
-
-
-/*
- * SHALL return the set of supported attribute names whose associated
- * value type is String. This set SHALL include supported DRMAA reserved
- * attribute names and native attribute names.
- *
- * drmaa_get_attribute_names() SHALL return DRMAA_ERRNO_SUCCESS on success,
- * otherwise:
- *    DRMAA_ERRNO_NO_MEMORY.
- */
-public static native int drmaa_get_attribute_names(/* drmaa_attr_names_t ** */ PointerByReference values,
-                              Pointer error_diagnosis, NativeLong error_diag_len);
-
-/*
- * SHALL return the set of supported attribute names whose associated
- * value type is String Vector.  This set SHALL include supported DRMAA reserved
- * attribute names and native attribute names.
- *
- * drmaa_get_vector_attribute_names() SHALL return DRMAA_ERRNO_SUCCESS on
- * success, otherwise:
- *    DRMAA_ERRNO_NO_MEMORY.
- */
-public static native int drmaa_get_vector_attribute_names(/* drmaa_attr_names_t ** */ PointerByReference values,
-                                     Pointer error_diagnosis,
-                                     NativeLong error_diag_len);
-
-/* ------------------- job submission routines ------------------- */
-
-/*
- * Submit a job with attributes defined in the job template 'jt'.
- * The job identifier 'job_id' is a printable, NULL terminated string,
- * identical to that returned by the underlying DRM system.
- *
- * drmaa_run_job() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
- *    DRMAA_ERRNO_TRY_LATER,
- *    DRMAA_ERRNO_DENIED_BY_DRM,
- *    DRMAA_ERRNO_NO_MEMORY,
- *    DRMAA_ERRNO_DRM_COMMUNICATION_FAILURE or
- *    DRMAA_ERRNO_AUTH_FAILURE.
- */
-public static native int drmaa_run_job(Pointer job_id, NativeLong job_id_len,
-                  /* drmaa_job_template_t * */ Pointer jt, Pointer error_diagnosis,
-                  NativeLong error_diag_len);
-
-/*
- * Submit a set of parametric jobs, dependent on the implied loop index, each
- * with attributes defined in the job template 'jt'.
- * The job identifiers 'job_ids' SHALL all be printable,
- * NULL terminated strings, identical to those returned by the underlying
- * DRM system. Nonnegative loop bounds SHALL NOT use file names
- * that start with minus sign like command line options.
- * DRMAA defines a special index placeholder, drmaa_incr_ph, (which has the
- * value "$incr_pl$") that is used to construct parametric job templates.
- * For example:
- * //C++ string syntax used
- * drmaa_set_attribute(pjt, "stderr", drmaa_incr_ph + ".err" );
- *
- * drmaa_run_bulk_jobs() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
- *    DRMAA_ERRNO_TRY_LATER,
- *    DRMAA_ERRNO_DENIED_BY_DRM,
- *    DRMAA_ERRNO_NO_MEMORY,
- *    DRMAA_ERRNO_DRM_COMMUNICATION_FAILURE or
- *    DRMAA_ERRNO_AUTH_FAILURE.
- */
-public static native int drmaa_run_bulk_jobs(/* drmaa_job_ids_t ** */ PointerByReference jobids,
-                        /* drmaa_job_template_t * */ Pointer jt, int start, int end,
-                        int incr, Pointer error_diagnosis, NativeLong error_diag_len);
-
-/* ------------------- job control routines ------------------- */
-
-/*
- * Start, stop, restart, or kill the job identified by 'job_id'.
- * If 'job_id' is DRMAA_JOB_IDS_SESSION_ALL then this routine
- * acts on all jobs *submitted* during this DRMAA session.
- * The legal values for 'action' and their meanings SHALL be:
- * DRMAA_CONTROL_SUSPEND:     stop the job,
- * DRMAA_CONTROL_RESUME:      (re)start the job,
- * DRMAA_CONTROL_HOLD:        put the job on-hold,
- * DRMAA_CONTROL_RELEASE:     release the hold on the job, and
- * DRMAA_CONTROL_TERMINATE:   kill the job.
- *
- * This routine SHALL return once the action has been acknowledged by
- * the DRM system, but does not necessarily wait until the action
- * has been completed.
- *
- * drmaa_control() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
- *    DRMAA_ERRNO_DRM_COMMUNICATION_FAILURE,
- *    DRMAA_ERRNO_AUTH_FAILURE,
- *    DRMAA_ERRNO_NO_MEMORY,
- *    DRMAA_ERRNO_RESUME_INCONSISTENT_STATE,
- *    DRMAA_ERRNO_SUSPEND_INCONSISTENT_STATE,
- *    DRMAA_ERRNO_HOLD_INCONSISTENT_STATE,
- *    DRMAA_ERRNO_RELEASE_INCONSISTENT_STATE or
- *    DRMAA_ERRNO_INVALID_JOB.
- */
-public static native int drmaa_control(String jobid, int action, Pointer error_diagnosis,
-                  NativeLong error_diag_len);
-
-
-/*
- * Wait until all jobs specified by 'job_ids' have finished
- * execution. If 'job_ids' is DRMAA_JOB_IDS_SESSION_ALL then this routine
- * waits for all jobs *submitted* during this DRMAA session. The timeout value
- * is used to specify the number of seconds to wait for the job to fail finish
- * before returning if a result is not immediately available.  The value
- * DRMAA_TIMEOUT_WAIT_FOREVER can be used to specify that routine should wait
- * indefinitely for a result. The value DRMAA_TIMEOUT_NO_WAIT can be used to
- * specify that the routine should return immediately if no result is available.
- * If the call exits before timeout, all the jobs have
- * been waited on or there was an interrupt.
- * If the invocation exits on timeout, the return code is
- * DRMAA_ERRNO_EXIT_TIMEOUT. The caller SHOULD check system time before and
- * after this call in order to check how much time has passed.
- *
- * The dispose parameter specifies how to treat reaping information:
- * True=1      "fake reap", i.e. dispose of the rusage data
- * False=0     do not reap
- *
- * A 'job_ids' string vector containing n elements must be n+1 elements long,
- * with the nth value, i.e. job_ids[n], being set to NULL as a delimitor.
- *
- * drmaa_synchronize() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
- *    DRMAA_ERRNO_DRM_COMMUNICATION_FAILURE,
- *    DRMAA_ERRNO_AUTH_FAILURE,
- *    DRMAA_ERRNO_NO_MEMORY,
- *    DRMAA_ERRNO_EXIT_TIMEOUT or
- *    DRMAA_ERRNO_INVALID_JOB.
- */
-public static native int drmaa_synchronize(Pointer job_ids, NativeLong timeout, int dispose,
-                      Pointer error_diagnosis, NativeLong error_diag_len);
-
-
-/*
- * This routine SHALL wait for a job with job_id to fail or finish execution. If
- * the special string, DRMAA_JOB_IDS_SESSION_ANY is provided as the job_id,
- * this routine SHALL wait for any job from the session. This routine is modeled
- * on the wait3 POSIX routine. The timeout value is used to specify the number
- * of seconds to wait for the job to fail finish before returning if a result is
- * not immediately available.  The value DRMAA_TIMEOUT_WAIT_FOREVER can be
- * used to specify that routine should wait indefinitely for a result. The value
- * DRMAA_TIMEOUT_NO_WAIT may be specified that the routine should return
- * immediately if no result is available.
- * If the call exits before timeout ,the job has been waited on
- * successfully or there was an interrupt.
- * If the invocation exits on timeout, the return code is
- * DRMAA_ERRNO_EXIT_TIMEOUT. The caller SHOULD check system time before and
- * after this call in order to check how much time has passed.
- * The routine reaps jobs on a successful call, so any subsequent calls
- * to drmaa_wait SHOULD fail returning an error DRMAA_ERRNO_INVALID_JOB meaning
- * that the job has been already reaped. This error is the same as if the job
- * was unknown. Failing due to an elapsed timeout has an effect that it is
- * possible to issue drmaa_wait multiple times for the same job_id.  When
- * successful, the rusage information SHALL be provided as an array of strings,
- * where each string complies with the format <name>=<value>. The string portion
- * <value> contains the amount of resources consumed by the job and is opaque.
- * The 'stat' drmaa_wait parameter is used in the drmaa_w* functions for
- * providing more detailed information about job termination if available. An
- * analogous set of macros is defined in POSIX for analyzing the wait3(2) OUT
- * parameter 'stat'.
- *
- * drmaa_wait() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
- *    DRMAA_ERRNO_DRM_COMMUNICATION_FAILURE,
- *    DRMAA_ERRNO_AUTH_FAILURE,
- *    DRMAA_ERRNO_NO_RUSAGE,
- *    DRMAA_ERRNO_NO_MEMORY,
- *    DRMAA_ERRNO_EXIT_TIMEOUT or
- *    DRMAA_ERRNO_INVALID_JOB.
- */
-public static native int drmaa_wait(String job_id, Pointer job_id_out, NativeLong job_id_out_len,
-               IntByReference stat, NativeLong timeout, /* drmaa_attr_values_t ** */ PointerByReference rusage,
-               Pointer error_diagnosis, NativeLong error_diag_len);
-
-/*
- * Evaluates into 'exited' a non-zero value if stat was returned for a
- * job that terminated normally. A zero value can also indicate that
- * altough the job has terminated normally an exit status is not available
- * or that it is not known whether the job terminated normally. In both
- * cases drmaa_wexitstatus() SHALL NOT provide exit status information.
- * A non-zero 'exited' value indicates more detailed diagnosis can be provided
- * by means of drmaa_wifsignaled(), drmaa_wtermsig() and drmaa_wcoredump().
- */
-public static native int drmaa_wifexited(IntByReference exited, int stat, Pointer error_diagnosis,
-                    NativeLong error_diag_len);
-
-/*
- * If the OUT parameter 'exited' of drmaa_wifexited() is non-zero,
- * this function evaluates into 'exit_code' the exit code that the
- * job passed to _exit() (see exit(2)) or exit(3C), or the value that
- * the child process returned from main.
- */
-public static native int drmaa_wexitstatus(IntByReference exit_status, int stat, Pointer error_diagnosis,
-                      NativeLong error_diag_len);
-
-/*
- * Evaluates into 'signaled' a non-zero value if status was returned
- * for a job that terminated due to the receipt of a signal. A zero value
- * can also indicate that altough the job has terminated due to the receipt
- * of a signal the signal is not available or that it is not known whether
- * the job terminated due to the receipt of a signal. In both cases
- * drmaa_wtermsig() SHALL NOT provide signal information.
- */
-public static native int drmaa_wifsignaled(IntByReference signaled, int stat, Pointer error_diagnosis,
-                      NativeLong error_diag_len);
-
-/*
- * If the OUT parameter 'signaled' of drmaa_wifsignaled(stat) is
- * non-zero, this function evaluates into signal a string representation of the
- * signal that caused the termination of the job. For signals declared by POSIX,
- * the symbolic names SHALL be returned (e.g., SIGABRT, SIGALRM).
- * For signals not declared by POSIX, any other string MAY be returned.
- */
-public static native int drmaa_wtermsig(Pointer signal, NativeLong signal_len, int stat,
-                   Pointer error_diagnosis, NativeLong error_diag_len);
-
-/*
- * If the OUT parameter 'signaled' of drmaa_wifsignaled(stat) is
- * non-zero, this function evaluates into 'core_dumped' a non-zero value
- * if a core image of the terminated job was created.
- */
-public static native int drmaa_wcoredump(IntByReference core_dumped, int stat, Pointer error_diagnosis,
-                    NativeLong error_diag_len);
-
-/*
- * Evaluates into 'aborted' a non-zero value if 'stat'
- * was returned for a job that ended before entering the running state.
- */
-public static native int drmaa_wifaborted(IntByReference aborted, int stat, Pointer error_diagnosis,
-                     NativeLong error_diag_len);
-
-
-
-/*
- * Get the program status of the job identified by 'job_id'.
- * The possible values returned in 'remote_ps' and their meanings SHALL be:
- *
- * DRMAA_PS_UNDETERMINED          = 0x00: process status cannot be determined
- * DRMAA_PS_QUEUED_ACTIVE         = 0x10: job is queued and active
- * DRMAA_PS_SYSTEM_ON_HOLD        = 0x11: job is queued and in system hold
- * DRMAA_PS_USER_ON_HOLD          = 0x12: job is queued and in user hold
- * DRMAA_PS_USER_SYSTEM_ON_HOLD   = 0x13: job is queued and in user and system
- *                                        hold
- * DRMAA_PS_RUNNING               = 0x20: job is running
- * DRMAA_PS_SYSTEM_SUSPENDED      = 0x21: job is system suspended
- * DRMAA_PS_USER_SUSPENDED        = 0x22: job is user suspended
- * DRMAA_PS_USER_SYSTEM_SUSPENDED = 0x23: job is user and system suspended
- * DRMAA_PS_DONE                  = 0x30: job finished normally
- * DRMAA_PS_FAILED                = 0x40: job finished, but failed
- *
- * DRMAA SHOULD always get the status of job_id from DRM system, unless the
- * previous status has been DRMAA_PS_FAILED or DRMAA_PS_DONE and the status has
- * been successfully cached. Terminated jobs get DRMAA_PS_FAILED status.
- *
- * drmaa_synchronize() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
- *    DRMAA_ERRNO_DRM_COMMUNICATION_FAILURE,
- *    DRMAA_ERRNO_AUTH_FAILURE,
- *    DRMAA_ERRNO_NO_MEMORY or
- *    DRMAA_ERRNO_INVALID_JOB.
- */
-public static native int drmaa_job_ps(String job_id, IntByReference remote_ps, Pointer error_diagnosis,
-                 NativeLong error_diag_len);
-
-/* ------------------- auxiliary routines ------------------- */
-
-/*
- * SHALL return the error message text associated with the errno number. The
- * routine SHALL return null string if called with invalid ERRNO number.
- */
-public static native String drmaa_strerror(int drmaa_errno);
-
-/*
- * If called before drmaa_init(), it SHALL return a comma delimited default
- * DRMAA implementation contacts string, one per each DRM system provided
- * implementation. If called after drmaa_init(), it SHALL return the selected
- * contact string. The output string is Implementation dependent.
- * drmaa_get_contact() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
- *    DRMAA_ERRNO_INTERNAL_ERROR.
- */
-public static native int drmaa_get_contact(Pointer contact, NativeLong contact_len,
-         Pointer error_diagnosis, NativeLong error_diag_len);
-
-/*
- * OUT major - major version number (non-negative integer)
- * OUT minor - minor version number (non-negative integer)
- * SHALL return the major and minor version numbers of the DRMAA library;
- * for DRMAA 1.0, 'major' is 1 and 'minor' is 0.
- */
-public static native int drmaa_version(IntByReference major, IntByReference minor,
-         Pointer error_diagnosis, NativeLong error_diag_len);
-
-
-/*
- * If called before drmaa_init(), it SHALL return a comma delimited DRM systems
- * string, one per each DRM system provided implementation. If called after
- * drmaa_init(), it SHALL return the selected DRM system. The output string is
- * implementation dependent.
- *
- * drmaa_get_DRM_system() SHALL return DRMAA_ERRNO_SUCCESS on success,
- * otherwise:
- *    DRMAA_ERRNO_INTERNAL_ERROR.
- */
-public static native int drmaa_get_DRM_system(Pointer drm_system, NativeLong drm_system_len,
-         Pointer error_diagnosis, NativeLong error_diag_len);
-
-
-/*
- * If called before drmaa_init(), it SHALL return a comma delimited DRMAA
- * implementations string, one per each DRM system provided implementation. If
- * called after drmaa_init(), it SHALL return the selected DRMAA implementation.
- * The output (string) is implementation dependent. drmaa_get_DRM_implementation
- * routine SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
- *    DRMAA_ERRNO_INTERNAL_ERROR.
- */
-public static native int drmaa_get_DRMAA_implementation(Pointer drmaa_impl, NativeLong drmaa_impl_len,
-         Pointer error_diagnosis, NativeLong error_diag_len);
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/jna/lsf/v7_0_6/LibBat.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/jna/lsf/v7_0_6/LibBat.java
deleted file mode 100644
index e66a40d..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/jna/lsf/v7_0_6/LibBat.java
+++ /dev/null
@@ -1,20014 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.jna.lsf.v7_0_6;
-
-import com.sun.jna.*;
-import com.sun.jna.ptr.*;
-import org.broadinstitute.gatk.utils.jna.clibrary.JNAUtils;
-import org.broadinstitute.gatk.utils.jna.clibrary.LibC;
-
-/*
-  NOTE: This library uses Pointer for some Struct.ByReference members going
-  against the JNA recommendations at http://jna.java.net/#structure_use
-  Instead stuct arrays are Pointers and each structure contains a
-  constructor that can accept the Pointer iff the size of the array is
-  known to be greater than zero.
-
-  This was especially problematic in jobInfoEnt->items->resName. When
-  jobInfo->reserveCnt was zero jobInfoItems->items was not necessarily null.
-
-  LSF will often reuse memory for structure arrays but will set the
-  array size / count (reserveCnt above) to zero when the array should
-  not be accessed. When LSF has reused memory and points to a non-null
-  structure pointer (items) the inner structure may contain further
-  garbage pointers (especially items->resName).
-
-  When JNA sees a non-null Structure.ByReference it will autoRead() the
-  member. When autoRead() eventually gets to the items->resName trying
-  to run strlen on the bad memory address causes a SIGSEGV.
-
-  By using a Pointer instead of the Structure.ByReference JNA will not
-  automatically autoRead(), and the API user will have to pass the
-  pointer to the Structure on their own.
-*/
-
-/**
- * JNA wrappers for LSF's lsbatch.h and -lbat
- *
- * $Id: lsbatch.h,v 2.1043 2009/08/06 16:50:49 bxia Exp $
- * -----------------------------------------------------------------
- *
- *  Lsbatch Distributed Batch Utility --
- *
- *  Header file for all lsbatch components: applications, lsblib,
- *                                          mbatchd and sbatchd
- *
- * ------------------------------------------------------------------
- */
- at SuppressWarnings("unused")
-public class LibBat {
-
-    static {
-        // via Platform LSF Configuration Reference, by default quiet the BSUB output.
-        if ("Y".equals(System.getProperty("BSUB_QUIET", "Y")))
-            LibC.setenv("BSUB_QUIET", "Y", 1);
-        String lsfLibDir = System.getenv("LSF_LIBDIR");
-        if (lsfLibDir != null) {
-            NativeLibrary.addSearchPath("lsf", lsfLibDir);
-            NativeLibrary.addSearchPath("bat", lsfLibDir);
-        }
-        /*
-        LSF 7.0.6 on the mac is missing the unsatisfied exported symbol for environ which was removed on MacOS X 10.5+.
-        nm $LSF_LIBDIR/liblsf.dylib | grep environ
-        See "man environ" for more info, along with http://lists.apple.com/archives/java-dev/2007/Dec/msg00096.html
-        For now, we export environ ourselves using libenvironhack.dylib available in c/libenvironhack.
-        */
-        if (Platform.isMac())
-            NativeLibrary.getInstance("environhack");
-        NativeLibrary liblsf = NativeLibrary.getInstance("lsf");
-        Native.register("bat");
-        // HACK: Running into a weird error:
-        //   java.lang.UnsatisfiedLinkError: Unable to load library 'bat': <$LSF_LIBDIR>/libbat.so: undefined symbol: xdr_resourceInfoReq
-        // This function is very clearly unsatisfied by running 'nm $LSF_LIBDIR/libbat.so | grep xdr_resourceInfoReq' but is
-        // found in liblsf.so when running 'nm $LSF_LIBDIR/liblsf.so | grep xdr_resourceInfoReq'. For now holding on to a reference
-        // to the LSF lib just in case this is a problem with the NativeLibrary's internal WeakReferences and the library being unloaded?
-        liblsf.getFunction("xdr_resourceInfoReq").getName();
-    }
-
-    // Via support at platform.com:
-    //    For equivalent api of bsub -a "xxx aaa qqq", option -a is not in struct submit, we
-    //    have to use setOption_ to set it. setOption_ can be used in user program by including
-    //    cmd.h or opensource.h of LSF opensource. You can refer to cmd.sub.c in opensource.
-    //
-    //    Here is a demonstration on the api for bsub -a
-    //    =========================================================================
-    //    /*define external setOption_ function*/
-    //    extern int setOption_(int argc, char **argv, char *template,
-    //    struct submit *req, int mask, int mask2, char **errMsg);
-    //
-    //    int setEsub(char *esub, struct submit *req) {
-    //    int x;
-    //    char *template, *arg[3];
-    //    /*set esub with the following strings and set array length*/
-    //    arg[0] = "blah";
-    //    arg[1] = "-a";
-    //    arg[2] = test;
-    //    /* -a "test", You can add additional esubs in here.  Just make sure they're space delimited.  ie. "test mpich lammpi" */
-    //    x=3;
-    //    /*set template*/
-    //    template = "a:"
-    //    /*run setOption_()*/
-    //    if (setOption_(x, arg, template, req, ~0, ~0, ~0, NULL) == -1) {
-    //    return(-1);
-    //    }
-    //    else {
-    //    return(0);
-    //    }
-    //    }
-    //    =========================================================================
-
-    /**
-     * Used for setting esub and other options not in struct submit.
-     * Via support at platform.com
-     *
-     * @param argc number of args
-     * @param argv arguments including a first argument that will not be used
-     * @param template a colon delimited list of arguments in getopt format
-     * @param jobSubReq the lsf submit
-     * @param mask unknown
-     * @param mask2 unknown
-     * @param mask3 unknown
-     * @param errMsg unknown
-     * @return -1 if the option setting failed
-     */
-    public static native int setOption_(int argc, Pointer argv, String template, submit jobSubReq, int mask, int mask2, int mask3, Pointer errMsg);
-
-    /** Max job name length as defined by 'man bsub'. */
-    public static final int MAX_JOB_NAME_LEN = 4094;
-
-/* if only everyone had <paths.h> */
-    public static final String _PATH_NULL = "/dev/null";
-
-    //public static int SKIP_SPACES (int word)  { while (word[0] == ' ' )  word++; }
-
-    //public static void FREEUP_ARRAY(int num, Pointer vector) { FREE_STRING_VECTOR_ENTRIES(num, vector);  FREEUP(vector); }
-
-
-/* event log version:
-*  each new major release requires to add a new line
- */
-    public static final float LSB_EVENT_VERSION3_0 = 3.0f;
-    public static final float LSB_EVENT_VERSION3_1 = 3.1f;
-    public static final float LSB_EVENT_VERSION3_2 = 3.2f;
-    public static final float LSB_EVENT_VERSION4_0 = 4.0f;
-    public static final float LSB_EVENT_VERSION4_1 = 4.1f;
-    public static final float LSB_EVENT_VERSION4_2 = 4.2f;
-    public static final float LSB_EVENT_VERSION5_0 = 5.0f;
-    public static final float LSB_EVENT_VERSION5_1 = 5.1f;
-    public static final float LSB_EVENT_VERSION6_0 = 6.0f;
-    public static final float LSB_EVENT_VERSION6_1 = 6.1f;
-    public static final float LSB_EVENT_VERSION6_2 = 6.2f;
-    public static final float LSB_EVENT_VERSION7_0 = 7.0f;
-    public static final float LSB_EVENT_VERSION7_0_1 = 7.01f;
-    public static final float LSB_EVENT_VERSION7_0_2 = 7.02f;
-    public static final float LSB_EVENT_VERSION7_0_3 = 7.03f;
-    public static final float LSB_EVENT_VERSION7_0_4 = 7.04f;
-    public static final float LSB_EVENT_VERSION7_0_5 = 7.05f;
-    public static final float LSB_EVENT_VERSION7_0_6 = 7.06f;
-
-/* current event version number of the mbatchd */
-    public static final String THIS_VERSION = "7.06";
-
-    public static final int MAX_VERSION_LEN = 12;
-
-/* num of users per host partition */
-    public static final int MAX_HPART_USERS = 100;
-
-/* max byte limit, OS independent */
-    public static final int MAX_CHARLEN = 20;
-
-/* the max length of name */
-    public static final int MAX_LSB_NAME_LEN = 60;
-
-/*the max length of user group*/
-    public static final int MAX_LSB_UG_NAME_LEN = 512;
-
-/*Maximum levels that a user group hierachy can have*/
-    public static final int MAX_LSB_UG_HIERDEPTH = 64;
-
-/* the max length of command */
-    public static final int MAX_CMD_DESC_LEN = 512;
-
-/* for the local cluster */
-    public static final int MAX_CALENDARS = 256;
-
-/* max num of user equivalent entries */
-    public static final int MAX_USER_EQUIVALENT = 128;
-
-/* max num of user mapping entries */
-    public static final int MAX_USER_MAPPING = 128;
-
-/* max external msg's description length */
-    public static final int MAXDESCLEN = 20 * 512;
-
-/* num of user or host group */
-    public static final int MAX_GROUPS = 1024;
-
-/*
-*  RFC #725
- */
-
-/* max len. of a filename */
-    public static final int MAXFULLFILENAMELEN = 4096;
-    public static final int MAXFULLPATHNAMELEN = 2 * MAXFULLFILENAMELEN;
-    public static final int FILENAMEPADDING = 128;
-
-    public static final String DEFAULT_MSG_DESC = "no description";
-
-    public static final int MSGSIZE = 4096;
-
-/* RFC #725
-*  extend the MSG size to 4*max filename len
- */
-    public static final int MAXFULLMSGSIZE = 4 * MAXFULLFILENAMELEN;
-
-/* host status (hStatus) bits */
-    /**
-     *  \addtogroup host_status host_status
-     *  The status of the host. It is the bitwise inclusive OR of some of the following:
-     */
-
-    /**
-     * < Ready to accept and run jobs
-     */
-    public static final int HOST_STAT_OK = 0x0;
-
-/* Load is not good enough */
-    public static final int HOST_STAT_BUSY = 0x01;
-    /**
-     * < The host load is greater than a scheduling threshold. In this status, no new job will be scheduled to run on this host.
-     */
-
-/* Run windows are closed */
-    public static final int HOST_STAT_WIND = 0x02;
-    /**
-     * < The host dispatch window is closed. In this status, no new job will be accepted.
-     */
-
-/* Disabled by admin */
-    public static final int HOST_STAT_DISABLED = 0x04;
-    /**
-     * < The host has been disabled by the LSF administrator and will not accept jobs. In this status, no new job will be scheduled to  run on this host.
-     */
-
-/* Lim locked by admin */
-    public static final int HOST_STAT_LOCKED = 0x08;
-    /**< The host is locked by a exclusive task. In this status, no new job will be scheduled to run on this host.*/
-
-    /**
-     * < Great than job limit
-     */
-    public static final int HOST_STAT_FULL = 0x10;
-    /**< The host has reached its job limit. In this status, no new job will be scheduled to run on this host.*/
-
-    /**
-     * < The sbatchd on this host is unreachable.
-     */
-    public static final int HOST_STAT_UNREACH = 0x20;
-
-    /**
-     * < The LIM and sbatchd on this host are unavailable.
-     */
-    public static final int HOST_STAT_UNAVAIL = 0x40;
-
-    /**
-     * < The host does not have an LSF license.
-     */
-    public static final int HOST_STAT_UNLICENSED = 0x80;
-
-    /**
-     * < The host is running an sbatchd but not a LIM.
-     */
-    public static final int HOST_STAT_NO_LIM = 0x100;
-
-    /**
-     * < Running exclusive job
-     */
-    public static final int HOST_STAT_EXCLUSIVE = 0x200;
-
-    /**
-     * < Lim locked by master LIM
-     */
-    public static final int HOST_STAT_LOCKED_MASTER = 0x400;
-
-    /**
-     * < Close a remote lease host. This flag is  used together with HOST_STAT_DISABLED.
-     */
-    public static final int HOST_STAT_REMOTE_DISABLED = 0x800;
-
-    /**
-     * < Close a remote lease host due to the  lease is renewing or terminating.
-     */
-    public static final int HOST_STAT_LEASE_INACTIVE = 0x1000;
-
-/* if LSF_HPC_EXTENTIONS="LSB_HCLOSE_BY_RES" is set in lsf.conf
-*  host will be closed if RES is unavailable.
- */
-
-    /**
-     * < Host is disabled by RES
-     */
-    public static final int HOST_STAT_DISABLED_RES = 0x4000;
-
-/* Kite#29531 a bit set in hData->hStatus
-*  to show whether the host is closed by
-*  admin or closed because RMS is not available.
- */
-
-    /**
-     * < Host is disabled by RMS
-     */
-    public static final int HOST_STAT_DISABLED_RMS = 0x8000;
-
-/* lsf70 project scheduling, a removed host from mbatchd move into
-*  a new status HOST_STAT_LOCKED_EGO
- */
-
-    /**
-     * < The host is disabled by EGO
-     */
-    public static final int HOST_STAT_LOCKED_EGO = 0x10000;
-
-    /**
-     * < If none of the above hold, hStatus is set to HOST_STAT_OK to indicate that the host is ready to accept and run jobs.
-     */
-    public static final int HOST_CLOSED_BY_ADMIN = 0x20000;
-
-    /**
-     * < Running cu exclusive job
-     */
-    public static final int HOST_STAT_CU_EXCLUSIVE = 0x40000;
-
-/* host is ok */
-
-    public static boolean LSB_HOST_OK(int status) {
-        return (status == HOST_STAT_OK);
-    }
-
-/* host is busy */
-
-    public static boolean LSB_HOST_BUSY(int status) {
-        return ((status & HOST_STAT_BUSY) != 0);
-    }
-
-/* host is closed */
-
-    public static boolean LSB_HOST_CLOSED(int status) {
-        return ((status & (HOST_STAT_WIND | HOST_STAT_DISABLED | HOST_STAT_LOCKED | HOST_STAT_LOCKED_MASTER | HOST_STAT_FULL | HOST_STAT_CU_EXCLUSIVE | HOST_STAT_EXCLUSIVE | HOST_STAT_LEASE_INACTIVE | HOST_STAT_NO_LIM)) != 0);
-    }
-
-/* host is full */
-
-    public static boolean LSB_HOST_FULL(int status) {
-        return ((status & HOST_STAT_FULL) != 0);
-    }
-
-/* host is unlicensed */
-
-    public static boolean LSB_HOST_UNLICENSED(int status) {
-        return ((status & HOST_STAT_UNLICENSED) != 0);
-    }
-
-/* host is unreach */
-
-    public static boolean LSB_HOST_UNREACH(int status) {
-        return ((status & HOST_STAT_UNREACH) != 0);
-    }
-
-/* host is unavail */
-
-    public static boolean LSB_HOST_UNAVAIL(int status) {
-        return ((status & HOST_STAT_UNAVAIL) != 0);
-    }
-
-
-    /* host busy reason bits */
-    /**
-     *  \addtogroup host_load_BusyReason host_load_BusyReason
-     *  If hStatus is HOST_STAT_BUSY, these indicate the host loadSched or loadStop
-     *  busy reason. If none of the thresholds have been exceeded, the value is
-     *  HOST_BUSY_NOT. Otherwise the value is the bitwise inclusive OR of some of the
-     *  following:
-     */
-
-    /**
-     * < Host not busy
-     */
-    public static final int HOST_BUSY_NOT = 0x000;
-
-    /**
-     * < The 15 second average CPU run queue length is too high.
-     */
-    public static final int HOST_BUSY_R15S = 0x001;
-
-    /**
-     * < The 1 minute average CPU run queue length is too high.
-     */
-    public static final int HOST_BUSY_R1M = 0x002;
-
-    /**
-     * < The 15 minute average CPU run queue length is too high.
-     */
-    public static final int HOST_BUSY_R15M = 0x004;
-
-    /**
-     * < The CPU utilization is too high.
-     */
-    public static final int HOST_BUSY_UT = 0x008;
-
-    /**
-     * < The paging rate is too high.
-     */
-    public static final int HOST_BUSY_PG = 0x010;
-
-    /**
-     * < The I/O rate is too high.
-     */
-    public static final int HOST_BUSY_IO = 0x020;
-
-    /**
-     * < There are too many login sessions.
-     */
-    public static final int HOST_BUSY_LS = 0x040;
-
-    /**
-     * < Host has not been idle long enough.
-     */
-    public static final int HOST_BUSY_IT = 0x080;
-
-    /**
-     * < There is not enough free space in the file  system containing /tmp.
-     */
-    public static final int HOST_BUSY_TMP = 0x100;
-
-    /**
-     * < There is not enough free swap space.
-     */
-    public static final int HOST_BUSY_SWP = 0x200;
-
-    /**
-     * < There is not enough free memory.
-     */
-    public static final int HOST_BUSY_MEM = 0x400;
-
-/* host is busy */
-
-    public static boolean LSB_ISBUSYON(int[] status, int index) {
-        return (((status[(index) / LibLsf.INTEGER_BITS]) & (1 << (index) % LibLsf.INTEGER_BITS)) != 0);
-    }
-
-
-/* queue status (qStatus) bits */
-    /**
-     *  \addtogroup queue_status queue_status
-     *  queue status (qStatus) bits
-     */
-
-    /**
-     * < The queue is open to accept newly submitted jobs.
-     */
-    public static final int QUEUE_STAT_OPEN = 0x01;
-
-    /**
-     * < The queue is actively dispatching jobs. The queue can be inactivated and  reactivated by the LSF administrator using  \ref lsb_queuecontrol. The queue will also be inactivated when its run or dispatch window  is closed. In this case it cannot be reactivated manually; it will be reactivated by the LSF system when its run and dispatch windows reopen.
-     */
-    public static final int QUEUE_STAT_ACTIVE = 0x02;
-
-    /**
-     * < The queue run and dispatch windows are open. The initial state of a queue at LSF boot time is open and either active or inactive, depending on its run and dispatch windows.
-     */
-    public static final int QUEUE_STAT_RUN = 0x04;
-
-    /**
-     * < Remote queue rejecting jobs.
-     */
-    public static final int QUEUE_STAT_NOPERM = 0x08;
-
-    /**
-     * < Remote queue status is disconnected.
-     */
-    public static final int QUEUE_STAT_DISC = 0x10;
-
-    /**
-     * < Queue run windows are closed.
-     */
-    public static final int QUEUE_STAT_RUNWIN_CLOSE = 0x20;
-
-/* queue attribute (QAttrib) bits */
-    /**
-     *  \addtogroup queue_attribute queue_attribute
-     *  queue attribute (QAttrib) bits.
-     */
-
-    /**
-     * < This queue accepts jobs which request exclusive execution.
-     */
-    public static final int Q_ATTRIB_EXCLUSIVE = 0x01;
-
-    /**
-     * < This queue is a default LSF queue.
-     */
-    public static final int Q_ATTRIB_DEFAULT = 0x02;
-
-    /**
-     * < This queue uses the FAIRSHARE scheduling policy. The user shares  are given in userShares.
-     */
-    public static final int Q_ATTRIB_FAIRSHARE = 0x04;
-
-    /**
-     * < This queue uses the PREEMPTIVE scheduling policy.
-     */
-    public static final int Q_ATTRIB_PREEMPTIVE = 0x08;
-
-    /**
-     * < This is an NQS forward queue. The target NQS queues are given in nqsQueues. For NQS forward queues, the hostList, procJobLimit, windows, mig and windowsD fields are meaningless.
-     */
-    public static final int Q_ATTRIB_NQS = 0x10;
-
-    /**
-     * < This queue can receive jobs from other clusters
-     */
-    public static final int Q_ATTRIB_RECEIVE = 0x20;
-
-    /**
-     * < This queue uses a preemptable scheduling policy.
-     */
-    public static final int Q_ATTRIB_PREEMPTABLE = 0x40;
-
-    /**
-     * < This queue uses a backfilling policy.
-     */
-    public static final int Q_ATTRIB_BACKFILL = 0x80;
-
-    /**
-     * < This queue uses a host preference policy.
-     */
-    public static final int Q_ATTRIB_HOST_PREFER = 0x100;
-
-    /**
-     * < This queue can't preempt any other another queue.
-     */
-    public static final int Q_ATTRIB_NONPREEMPTIVE = 0x200;
-
-    /**
-     * < This queue can't be preempted from any queue.
-     */
-    public static final int Q_ATTRIB_NONPREEMPTABLE = 0x400;
-
-    /**
-     * < This queue does not accept batch interactive jobs.
-     */
-    public static final int Q_ATTRIB_NO_INTERACTIVE = 0x800;
-
-    /**
-     * < This queue only accepts batch interactive jobs.
-     */
-    public static final int Q_ATTRIB_ONLY_INTERACTIVE = 0x1000;
-
-    /**
-     * < No host type related resource name specified in resource requirement.
-     */
-    public static final int Q_ATTRIB_NO_HOST_TYPE = 0x2000;
-
-    /**
-     * < This queue disables deadline constrained resource scheduling.
-     */
-    public static final int Q_ATTRIB_IGNORE_DEADLINE = 0x4000;
-
-    /**
-     * < Jobs may run as chkpntable.
-     */
-    public static final int Q_ATTRIB_CHKPNT = 0x8000;
-
-    /**
-     * < Jobs may run as rerunnable.
-     */
-    public static final int Q_ATTRIB_RERUNNABLE = 0x10000;
-
-    /**
-     * < Excluding remote jobs when local jobs are present in the queue.
-     */
-    public static final int Q_ATTRIB_EXCL_RMTJOB = 0x20000;
-
-    /**
-     * < Turn on a multicluster fast scheduling policy.
-     */
-    public static final int Q_ATTRIB_MC_FAST_SCHEDULE = 0x40000;
-
-    /**
-     * < Push interactive jobs in front of other jobs in queue.
-     */
-    public static final int Q_ATTRIB_ENQUE_INTERACTIVE_AHEAD = 0x80000;
-
-/* Only one of the following four flags could be TRUE. By default, the queue
-*  is a local queue only(none of them is set.)
-*      0x100000 - 0xf00000 is used for MC attribute
- */
-
-
-    /**
-     * < Flags used by MultiCluster.
-     */
-    public static final int Q_MC_FLAG = 0xf00000;
-
-    /**
-     * < Lease and local.
-     */
-    public static final int Q_ATTRIB_LEASE_LOCAL = 0x100000;
-
-    /**
-     * < Lease only; no local.
-     */
-    public static final int Q_ATTRIB_LEASE_ONLY = 0x200000;
-
-    /**
-     * < Remote batch and local.
-     */
-    public static final int Q_ATTRIB_RMT_BATCH_LOCAL = 0x300000;
-
-    /**
-     * < Remote batch only.
-     */
-    public static final int Q_ATTRIB_RMT_BATCH_ONLY = 0x400000;
-
-
-    /**
-     * < Memory reservation.
-     */
-    public static final int Q_ATTRIB_RESOURCE_RESERVE = 0x1000000;
-
-    /**
-     * < Cross-queue fairshare.
-     */
-    public static final int Q_ATTRIB_FS_DISPATCH_ORDER_QUEUE = 0x2000000;
-
-    /**
-     * < Batch queue/partition
-     */
-    public static final int Q_ATTRIB_BATCH = 0x4000000;
-
-    /**
-     * < Online partition
-     */
-    public static final int Q_ATTRIB_ONLINE = 0x8000000;
-
-    /**
-     * < Interruptible backfill
-     */
-    public static final int Q_ATTRIB_INTERRUPTIBLE_BACKFILL = 0x10000000;
-
-    /**
-     * < Absolute Priority scheduling (APS) value.
-     */
-    public static final int Q_ATTRIB_APS = 0x20000000;
-
-    /**
-     * < No queue with RESOURCE_RESERVE or SLOT_RESERVE has higher priority than this queue.
-     */
-    public static final int Q_ATTRIB_NO_HIGHER_RESERVE = 0x40000000;
-
-    /**
-     * < No host valid
-     */
-    public static final int Q_ATTRIB_NO_HOST_VALID = 0x80000000;
-
-
-/* macros to check queue near real time attributes */
-
-    public static int IS_ONLINE_QUEUE(queueInfoEnt Q) {
-        return (Q.qAttrib & Q_ATTRIB_ONLINE);
-    }
-
-    public static int IS_BATCH_QUEUE(queueInfoEnt Q) {
-        return (Q.qAttrib & Q_ATTRIB_BATCH);
-    }
-
-/* macros to check queue remote attributes */
-
-    public static boolean IS_LEASE_LOCAL_QUEUE(queueInfoEnt Q) {
-        return ((Q.qAttrib & Q_MC_FLAG) == Q_ATTRIB_LEASE_LOCAL);
-    }
-
-    public static boolean IS_LEASE_ONLY_QUEUE(queueInfoEnt Q) {
-        return ((Q.qAttrib & Q_MC_FLAG) == Q_ATTRIB_LEASE_ONLY);
-    }
-
-    public static boolean IS_RMT_BATCH_LOCAL_QUEUE(queueInfoEnt Q) {
-        return ((Q.qAttrib & Q_MC_FLAG) == Q_ATTRIB_RMT_BATCH_LOCAL);
-    }
-
-    public static boolean IS_RMT_BATCH_ONLY_QUEUE(queueInfoEnt Q) {
-        return ((Q.qAttrib & Q_MC_FLAG) == Q_ATTRIB_RMT_BATCH_ONLY);
-    }
-
-    public static boolean IS_LEASE_QUEUE(queueInfoEnt Q) {
-        return (IS_LEASE_LOCAL_QUEUE(Q) || IS_LEASE_ONLY_QUEUE(Q));
-    }
-
-    public static boolean IS_RMT_BATCH_QUEUE(queueInfoEnt Q) {
-        return (IS_RMT_BATCH_LOCAL_QUEUE(Q) || IS_RMT_BATCH_ONLY_QUEUE(Q));
-    }
-
-    public static boolean IS_MC_QUEUE(queueInfoEnt Q) {
-        return (IS_LEASE_QUEUE(Q) || IS_RMT_BATCH_QUEUE(Q));
-    }
-
-    public static int SET_LEASE_LOCAL_QUEUE(queueInfoEnt Q) {
-        return (Q.qAttrib |= Q_ATTRIB_LEASE_LOCAL);
-    }
-
-    public static int SET_LEASE_ONLY_QUEUE(queueInfoEnt Q) {
-        return (Q.qAttrib |= Q_ATTRIB_LEASE_ONLY);
-    }
-
-    public static int SET_RMT_BATCH_LOCAL_QUEUE(queueInfoEnt Q) {
-        return (Q.qAttrib |= Q_ATTRIB_RMT_BATCH_LOCAL);
-    }
-
-    public static int SET_RMT_BATCH_ONLY_QUEUE(queueInfoEnt Q) {
-        return (Q.qAttrib |= Q_ATTRIB_RMT_BATCH_ONLY);
-    }
-
-    public static int CLR_MC_QUEUE_FLAG(queueInfoEnt Q) {
-        return (Q.qAttrib &= ~Q_MC_FLAG);
-    }
-
-
-/* the bits 0x10000000 to 0x80000000 is reserved for internal use (daemons.h) */
-
-/* exit code for mbatchd */
-    public static final int MASTER_NULL = 200;
-    public static final int MASTER_RESIGN = 201;
-    public static final int MASTER_RECONFIG = 202;
-    public static final int MASTER_FATAL = 203;
-    public static final int MASTER_MEM = 204;
-    public static final int MASTER_CONF = 205;
-    public static final int MASTER_EVENT = 206;
-    public static final int MASTER_DISABLE = 207;
-
-/* sub type of mbatchd die */
-    public static final int MBD_USER_CMD = 1;
-    public static final int MBD_NON_USER_CMD = 2;
-
-    /**
-     *  \addtogroup job_states job_states
-     *  define job states
-     */
-
-    /**
-     * < State null
-     */
-    public static final int JOB_STAT_NULL = 0x00;
-
-    /**
-     * < The job is pending, i.e., it  has not been dispatched yet.
-     */
-    public static final int JOB_STAT_PEND = 0x01;
-
-    /**
-     * < The pending job was suspended by its owner or the LSF system administrator.
-     */
-    public static final int JOB_STAT_PSUSP = 0x02;
-
-    /**
-     * < The job is running.
-     */
-    public static final int JOB_STAT_RUN = 0x04;
-
-    /**
-     * < The running job was suspended  by the system because an execution  host was overloaded or the queue run  window closed. (see \ref lsb_queueinfo,  \ref lsb_hostinfo, and lsb.queues.)
-     */
-    public static final int JOB_STAT_SSUSP = 0x08;
-
-    /**
-     * < The running job was suspended by its owner or the LSF systemadministrator.
-     */
-    public static final int JOB_STAT_USUSP = 0x10;
-
-    /**
-     * < The job has terminated with a non-zero status - it may have been aborted due  to an error in its execution, or  killed by its owner or by the  LSF system administrator.
-     */
-    public static final int JOB_STAT_EXIT = 0x20;
-
-    /**
-     * < The job has terminated with status 0.
-     */
-    public static final int JOB_STAT_DONE = 0x40;
-
-    /**
-     * < Post job process done successfully
-     */
-    public static final int JOB_STAT_PDONE = (0x80);
-
-    /**
-     * < Post job process has error
-     */
-    public static final int JOB_STAT_PERR = (0x100);
-
-    /**
-     * < Chunk job waiting its turn to exec
-     */
-    public static final int JOB_STAT_WAIT = (0x200);
-
-    /**
-     * < The slave batch daemon (sbatchd) on  the host on which the job is processed  has lost contact with the master batch  daemon (mbatchd).
-     */
-    public static final int JOB_STAT_UNKWN = 0x10000;
-
-    /**
-     *  \addtogroup event_types event_types
-     *  define statements used by \ref lsb_geteventrec. Events logged in lsb.events file
-     */
-
-    /**
-     * < Submit a new job
-     */
-    public static final int EVENT_JOB_NEW = 1;
-
-    /**
-     * < mbatchd is trying to start a job
-     */
-    public static final int EVENT_JOB_START = 2;
-
-    /**
-     * < Job's status change event
-     */
-    public static final int EVENT_JOB_STATUS = 3;
-
-    /**
-     * < Job switched to another queue
-     */
-    public static final int EVENT_JOB_SWITCH = 4;
-
-    /**
-     * < Move a pending job's position within a queue
-     */
-    public static final int EVENT_JOB_MOVE = 5;
-
-    /**
-     * < Queue's status changed by Platform LSF  administrator (bhc operation)
-     */
-    public static final int EVENT_QUEUE_CTRL = 6;
-
-    /**
-     * < Host status changed by Platform LSF  administrator (bhc operation)
-     */
-    public static final int EVENT_HOST_CTRL = 7;
-
-    /**
-     * < Log parameters before mbatchd died
-     */
-    public static final int EVENT_MBD_DIE = 8;
-
-    /**
-     * < Action that was not taken because the  mbatchd was unable to contact the sbatchd on the job's execution host
-     */
-    public static final int EVENT_MBD_UNFULFILL = 9;
-
-    /**
-     * < Job finished (Logged in lsb.acct)
-     */
-    public static final int EVENT_JOB_FINISH = 10;
-
-    /**
-     * < The complete list of load indices, including external load indices
-     */
-    public static final int EVENT_LOAD_INDEX = 11;
-
-    /**
-     * < Job checkpointed.
-     */
-    public static final int EVENT_CHKPNT = 12;
-
-    /**
-     * < Job migrated
-     */
-    public static final int EVENT_MIG = 13;
-
-    /**
-     * < The pre-execution command started
-     */
-    public static final int EVENT_PRE_EXEC_START = 14;
-
-    /**
-     * < New mbatchd start event
-     */
-    public static final int EVENT_MBD_START = 15;
-
-    /**
-     * < The job has been routed to NQS
-     */
-    public static final int EVENT_JOB_ROUTE = 16;
-
-    /**
-     * < Job modification request
-     */
-    public static final int EVENT_JOB_MODIFY = 17;
-
-    /**
-     * < Signal/delete a job
-     */
-    public static final int EVENT_JOB_SIGNAL = 18;
-
-    /**
-     * < Add new calendar to the system
-     */
-    public static final int EVENT_CAL_NEW = 19;
-
-    /**
-     * < Calendar modified
-     */
-    public static final int EVENT_CAL_MODIFY = 20;
-
-    /**
-     * < Delete a calendar in the system
-     */
-    public static final int EVENT_CAL_DELETE = 21;
-
-    /**
-     * < Job forwarded to another cluster
-     */
-    public static final int EVENT_JOB_FORWARD = 22;
-
-    /**
-     * < Job from a remote cluster dispatched
-     */
-    public static final int EVENT_JOB_ACCEPT = 23;
-
-    /**
-     * < Job status successfully sent to  submission cluster
-     */
-    public static final int EVENT_STATUS_ACK = 24;
-
-    /**
-     * < Job started successfully on the  execution host
-     */
-    public static final int EVENT_JOB_EXECUTE = 25;
-
-    /**
-     * < Send a message to a job
-     */
-    public static final int EVENT_JOB_MSG = 26;
-
-    /**
-     * < The message has been delivered
-     */
-    public static final int EVENT_JOB_MSG_ACK = 27;
-
-    /**
-     * < Job is requeued
-     */
-    public static final int EVENT_JOB_REQUEUE = 28;
-
-    /**
-     * < Submission mbatchd logs this after sending  an occupy request to execution mbatchd
-     */
-    public static final int EVENT_JOB_OCCUPY_REQ = 29;
-
-    /**
-     * < Submission mbatchd logs this event after  all execution mbatchds have vacated the occupied hosts for the job
-     */
-    public static final int EVENT_JOB_VACATED = 30;
-
-    /**
-     * < A signal action on a job has been  initiated or finished
-     */
-    public static final int EVENT_JOB_SIGACT = 32;
-
-    /**
-     * < sbatchd's new job status
-     */
-    public static final int EVENT_SBD_JOB_STATUS = 34;
-
-    /**
-     * < sbatchd accepts job start
-     */
-    public static final int EVENT_JOB_START_ACCEPT = 35;
-
-    /**
-     * < Undelete a calendar in the system
-     */
-    public static final int EVENT_CAL_UNDELETE = 36;
-
-    /**
-     * < Job is cleaned out of the core
-     */
-    public static final int EVENT_JOB_CLEAN = 37;
-
-    /**
-     * < Job exception was detected
-     */
-    public static final int EVENT_JOB_EXCEPTION = 38;
-
-    /**
-     * < Adding a new job group
-     */
-    public static final int EVENT_JGRP_ADD = 39;
-
-    /**
-     * < Modifying a job group
-     */
-    public static final int EVENT_JGRP_MOD = 40;
-
-    /**
-     * < Controlling a job group
-     */
-    public static final int EVENT_JGRP_CTRL = 41;
-
-    /**
-     * < Forcing a job to start on specified  hosts (brun operation)
-     */
-    public static final int EVENT_JOB_FORCE = 42;
-
-    /**
-     * < Switching the event file lsb.events
-     */
-    public static final int EVENT_LOG_SWITCH = 43;
-
-    /**
-     * < Job modification request
-     */
-    public static final int EVENT_JOB_MODIFY2 = 44;
-
-    /**
-     * < Log job group status
-     */
-    public static final int EVENT_JGRP_STATUS = 45;
-
-    /**
-     * < Job attributes have been set
-     */
-    public static final int EVENT_JOB_ATTR_SET = 46;
-
-    /**
-     * < Send an external message to a job
-     */
-    public static final int EVENT_JOB_EXT_MSG = 47;
-
-    /**
-     * < Update data status of a message for a job
-     */
-    public static final int EVENT_JOB_ATTA_DATA = 48;
-
-    /**
-     * < Insert one job to a chunk
-     */
-    public static final int EVENT_JOB_CHUNK = 49;
-
-    /**
-     * < Save unreported sbatchd status
-     */
-    public static final int EVENT_SBD_UNREPORTED_STATUS = 50;
-
-    /**
-     * < Reservation finished
-     */
-    public static final int EVENT_ADRSV_FINISH = 51;
-
-    /**
-     * < Dynamic host group control
-     */
-    public static final int EVENT_HGHOST_CTRL = 52;
-
-    /**
-     * < Saved current CPU allocation on service partition
-     */
-    public static final int EVENT_CPUPROFILE_STATUS = 53;
-
-    /**
-     * < Write out data logging file
-     */
-    public static final int EVENT_DATA_LOGGING = 54;
-
-    /**
-     * < Write job rusage in lsb.stream
-     */
-    public static final int EVENT_JOB_RUN_RUSAGE = 55;
-
-    /**
-     * < Stream closed and new stream opened.
-     */
-    public static final int EVENT_END_OF_STREAM = 56;
-
-    /**
-     * < SLA goal is reavaluated
-     */
-    public static final int EVENT_SLA_RECOMPUTE = 57;
-
-    /**
-     * < Write performance metrics to lsb.stream
-     */
-    public static final int EVENT_METRIC_LOG = 58;
-
-    /**
-     * < Write task finish log to ssched.acct
-     */
-    public static final int EVENT_TASK_FINISH = 59;
-
-    /**
-     * < Resize allocation is made
-     */
-    public static final int EVENT_JOB_RESIZE_NOTIFY_START = 60;
-
-    /**
-     * < Resize notification action initialized
-     */
-    public static final int EVENT_JOB_RESIZE_NOTIFY_ACCEPT = 61;
-
-    /**
-     * < Resize notification action completed
-     */
-    public static final int EVENT_JOB_RESIZE_NOTIFY_DONE = 62;
-
-    /**
-     * < Job resize release request is received
-     */
-    public static final int EVENT_JOB_RESIZE_RELEASE = 63;
-
-    /**
-     * < Job resize cancel request is received
-     */
-    public static final int EVENT_JOB_RESIZE_CANCEL = 64;
-
-    /**
-     * < Job resize event for lsb.acct
-     */
-    public static final int EVENT_JOB_RESIZE = 65;
-
-    /**
-     * < Saved array element's resource consumption  for LSF simulator
-     */
-    public static final int EVENT_JOB_ARRAY_ELEMENT = 66;
-
-    /**
-     * < Saved LSF simulator status
-     */
-    public static final int EVENT_MBD_SIM_STATUS = 67;
-
-/* event kind
- */
-
-    /**
-     * < it is a job related event
-     */
-    public static final int EVENT_JOB_RELATED = 1;
-
-    /**
-     * < it is a non job related event
-     */
-    public static final int EVENT_NON_JOB_RELATED = 0;
-
-    /*
-   *  EXCLUSIVE PENDING REASONS
-   *  a job must stay pending as long as ONE of the exclusive reasons exists
-    */
-
-/* Job Related Reasons (001 - 300)
- */
-    /**
-     * \addtogroup pending_reasons pending_reasons
-     * \brief          Each entry in the table contains one of the following pending reasons
-     */
-
-    /**
-     * < Virtual code; not a reason
-     */
-    public static final int PEND_JOB_REASON = 0;
-
-    /**
-     * < A new job is waiting to be scheduled
-     */
-    public static final int PEND_JOB_NEW = 1;
-
-    /**
-     * < The job is held until its specified start time
-     */
-    public static final int PEND_JOB_START_TIME = 2;
-
-    /**
-     * < The job is waiting for its dependency condition(s) to be satisfied
-     */
-    public static final int PEND_JOB_DEPEND = 3;
-
-    /**
-     * < The dependency condition is invalid or never satisfied
-     */
-    public static final int PEND_JOB_DEP_INVALID = 4;
-
-    /**
-     * < The migrating job is waiting to be rescheduled
-     */
-    public static final int PEND_JOB_MIG = 5;
-
-    /**
-     * < The job's pre-exec command exited with non-zero status
-     */
-    public static final int PEND_JOB_PRE_EXEC = 6;
-
-    /**
-     * < Unable to access jobfile
-     */
-    public static final int PEND_JOB_NO_FILE = 7;
-
-    /**
-     * < Unable to set job's environment variables
-     */
-    public static final int PEND_JOB_ENV = 8;
-
-    /**
-     * < Unable to determine the job's home or working directories
-     */
-    public static final int PEND_JOB_PATHS = 9;
-
-    /**
-     * < Unable to open the job's input and output files
-     */
-    public static final int PEND_JOB_OPEN_FILES = 10;
-
-    /**
-     * < Job execution initialization failed
-     */
-    public static final int PEND_JOB_EXEC_INIT = 11;
-
-    /**
-     * < Unable to copy restarting job's checkpoint files
-     */
-    public static final int PEND_JOB_RESTART_FILE = 12;
-
-    /**
-     * < Scheduling of the job is delayed
-     */
-    public static final int PEND_JOB_DELAY_SCHED = 13;
-
-    /**
-     * < Waiting for the re-scheduling of the job after switching queues
-     */
-    public static final int PEND_JOB_SWITCH = 14;
-
-    /**
-     * < An event is rejected by eeventd due to a syntax error
-     */
-    public static final int PEND_JOB_DEP_REJECT = 15;
-
-    /**
-     * < A JobScheduler feature is not enabled
-     */
-    public static final int PEND_JOB_JS_DISABLED = 16;
-
-    /**
-     * < Failed to get user password
-     */
-    public static final int PEND_JOB_NO_PASSWD = 17;
-
-    /**
-     * < The job is pending due to logon failure
-     */
-    public static final int PEND_JOB_LOGON_FAIL = 18;
-
-    /**
-     * < The job is waiting to be re-scheduled after its parameters have been changed
-     */
-    public static final int PEND_JOB_MODIFY = 19;
-
-    /**
-     * < The job time event is invalid
-     */
-    public static final int PEND_JOB_TIME_INVALID = 20;
-
-    /**
-     * < The job time event has expired
-     */
-    public static final int PEND_TIME_EXPIRED = 21;
-
-    /**
-     * < The job has been requeued
-     */
-    public static final int PEND_JOB_REQUEUED = 23;
-
-    /**
-     * < Waiting for the next time event
-     */
-    public static final int PEND_WAIT_NEXT = 24;
-
-    /**
-     * < The parent group is held
-     */
-    public static final int PEND_JGRP_HOLD = 25;
-
-    /**
-     * < The parent group is inactive
-     */
-    public static final int PEND_JGRP_INACT = 26;
-
-    /**
-     * < The group is waiting for scheduling
-     */
-    public static final int PEND_JGRP_WAIT = 27;
-
-    /**
-     * < The remote cluster(s) are unreachable
-     */
-    public static final int PEND_JOB_RCLUS_UNREACH = 28;
-
-    /**
-     * < SNDJOBS_TO queue rejected by remote  clusters
-     */
-    public static final int PEND_JOB_QUE_REJECT = 29;
-
-    /**
-     * < Waiting for new remote scheduling  session
-     */
-    public static final int PEND_JOB_RSCHED_START = 30;
-
-    /**
-     * < Waiting for allocation reply from remote clusters
-     */
-    public static final int PEND_JOB_RSCHED_ALLOC = 31;
-
-    /**
-     * < The job is forwarded to a remote cluster
-     */
-    public static final int PEND_JOB_FORWARDED = 32;
-
-    /**
-     * < The job running remotely is in a zombie state
-     */
-    public static final int PEND_JOB_RMT_ZOMBIE = 33;
-
-    /**
-     * < Job's enforced user group share account not selected
-     */
-    public static final int PEND_JOB_ENFUGRP = 34;
-
-    /**
-     * < The system is unable to schedule the job
-     */
-    public static final int PEND_SYS_UNABLE = 35;
-
-    /**
-     * < The parent group has just been released
-     */
-    public static final int PEND_JGRP_RELEASE = 36;
-
-    /**
-     * < The job has run since group active
-     */
-    public static final int PEND_HAS_RUN = 37;
-
-    /**
-     * < The job has reached its running element limit
-     */
-    public static final int PEND_JOB_ARRAY_JLIMIT = 38;
-
-    /**
-     * < Checkpoint directory is invalid
-     */
-    public static final int PEND_CHKPNT_DIR = 39;
-
-    /**
-     * < The first job in the chunk failed  (all other jobs in the chunk are set to PEND)
-     */
-    public static final int PEND_CHUNK_FAIL = 40;
-
-    /**
-     * < Optimum number of running jobs for SLA has been reached
-     */
-    public static final int PEND_JOB_SLA_MET = 41;
-
-    /**
-     * < Specified application profile does not exist
-     */
-    public static final int PEND_JOB_APP_NOEXIST = 42;
-
-    /**
-     * < Job no longer satisfies application  PROCLIMIT configuration
-     */
-    public static final int PEND_APP_PROCLIMIT = 43;
-
-    /**
-     * < No hosts for the job from EGO
-     */
-    public static final int PEND_EGO_NO_HOSTS = 44;
-
-    /**
-     * < The specified job group has reached its job limit
-     */
-    public static final int PEND_JGRP_JLIMIT = 45;
-
-    /**
-     * < Job pre-exec retry limit
-     */
-    public static final int PEND_PREEXEC_LIMIT = 46;
-
-    /**
-     * < Job re-queue limit
-     */
-    public static final int PEND_REQUEUE_LIMIT = 47;
-
-    /**
-     * < Job has bad res req
-     */
-    public static final int PEND_BAD_RESREQ = 48;
-
-    /**
-     * < Job's reservation is inactive
-     */
-    public static final int PEND_RSV_INACTIVE = 49;
-
-    /**
-     * < Job was in PSUSP with bad res req, after successful bmod  waiting for the user to bresume
-     */
-    public static final int PEND_WAITING_RESUME = 50;
-
-    /**
-     * < Job slot request cannot satisfy compound  resource requirement
-     */
-    public static final int PEND_SLOT_COMPOUND = 51;
-
-/*
-*  Queue and System Related Reasons (301 - 599)
- */
-
-    /**
-     * < The queue is inactivated by the administrator
-     */
-    public static final int PEND_QUE_INACT = 301;
-
-    /**
-     * < The queue is inactivated by its time windows
-     */
-    public static final int PEND_QUE_WINDOW = 302;
-
-    /**
-     * < The queue has reached its job slot limit
-     */
-    public static final int PEND_QUE_JOB_LIMIT = 303;
-
-    /**
-     * < The user has reached the per-user job slot limit of the queue
-     */
-    public static final int PEND_QUE_USR_JLIMIT = 304;
-
-    /**
-     * < Not enough per-user job slots of the queue for the parallel job
-     */
-    public static final int PEND_QUE_USR_PJLIMIT = 305;
-
-    /**
-     * < The queue's pre-exec command exited with non-zero status
-     */
-    public static final int PEND_QUE_PRE_FAIL = 306;
-
-    /**
-     * < The job was not accepted by the NQS host,  Attempt again later
-     */
-    public static final int PEND_NQS_RETRY = 307;
-
-    /**
-     * < Unable to send the job to an NQS host
-     */
-    public static final int PEND_NQS_REASONS = 308;
-
-    /**
-     * < Unable to contact NQS host
-     */
-    public static final int PEND_NQS_FUN_OFF = 309;
-
-    /**
-     * < The system is not ready for scheduling after reconfiguration
-     */
-    public static final int PEND_SYS_NOT_READY = 310;
-
-    /**
-     * < The requeued job is waiting for rescheduling
-     */
-    public static final int PEND_SBD_JOB_REQUEUE = 311;
-
-    /**
-     * < Not enough hosts to meet the job's spanning requirement
-     */
-    public static final int PEND_JOB_SPREAD_TASK = 312;
-
-    /**
-     * < Not enough hosts to meet the queue's spanning requirement
-     */
-    public static final int PEND_QUE_SPREAD_TASK = 313;
-
-    /**
-     * < The queue has not enough job slots for the parallel job
-     */
-    public static final int PEND_QUE_PJOB_LIMIT = 314;
-
-    /**
-     * < The job will not finish before queue's run window is closed
-     */
-    public static final int PEND_QUE_WINDOW_WILL_CLOSE = 315;
-
-    /**
-     * < Job no longer satisfies queue  PROCLIMIT configuration
-     */
-    public static final int PEND_QUE_PROCLIMIT = 316;
-
-    /**
-     * < Job requeued due to plug-in failure
-     */
-    public static final int PEND_SBD_PLUGIN = 317;
-
-    /**
-     * < Waiting for lease signing
-     */
-    public static final int PEND_WAIT_SIGN_LEASE = 318;
-
-/* waitint for scheduling for SLOT_SHARE*/
-    public static final int PEND_WAIT_SLOT_SHARE = 319;
-
-/*
-*  User Related Reasons (601 - 800)
- */
-
-    /**
-     * < The job slot limit is reached
-     */
-    public static final int PEND_USER_JOB_LIMIT = 601;
-
-    /**
-     * < A user group has reached its job slot limit
-     */
-    public static final int PEND_UGRP_JOB_LIMIT = 602;
-
-    /**
-     * < The job slot limit for the parallel job is reached
-     */
-    public static final int PEND_USER_PJOB_LIMIT = 603;
-
-    /**
-     * < A user group has reached its job slot limit for the parallel job
-     */
-    public static final int PEND_UGRP_PJOB_LIMIT = 604;
-
-    /**
-     * < Waiting for scheduling after resumed by user
-     */
-    public static final int PEND_USER_RESUME = 605;
-
-    /**
-     * < The job was suspended by the user while pending
-     */
-    public static final int PEND_USER_STOP = 607;
-
-    /**
-     * < Unable to determine user account for execution
-     */
-    public static final int PEND_NO_MAPPING = 608;
-
-    /**
-     * < The user has no permission to run the job on remote host/cluster
-     */
-    public static final int PEND_RMT_PERMISSION = 609;
-
-    /**
-     * < The job was suspended by LSF admin or root while pending
-     */
-    public static final int PEND_ADMIN_STOP = 610;
-
-    /**
-     * < The requested label is not valid
-     */
-    public static final int PEND_MLS_INVALID = 611;
-
-    /**
-     * < The requested label is above user allowed range
-     */
-    public static final int PEND_MLS_CLEARANCE = 612;
-
-    /**
-     * < The requested label rejected by /etc/rhost.conf
-     */
-    public static final int PEND_MLS_RHOST = 613;
-
-    /**
-     * < The requested label does not dominate current label
-     */
-    public static final int PEND_MLS_DOMINATE = 614;
-
-    /**
-     * < The requested label problem
-     */
-    public static final int PEND_MLS_FATAL = 615;
-
-    /**
-     * < LSF internally bstoped a pending job
-     */
-    public static final int PEND_INTERNAL_STOP = 616;
-
-/*
-*  NON-EXCLUSIVE PENDING REASONS
-*  A job may still start even though non-exclusive reasons exist.
- */
-
-/*
-*  Host(sbatchd)-Job Related Reasons (1001 - 1300)
- */
-
-    /**
-     * < The job's resource requirements not satisfied
-     */
-    public static final int PEND_HOST_RES_REQ = 1001;
-
-    /**
-     * < The job's requirement for exclusive execution not satisfied
-     */
-    public static final int PEND_HOST_NONEXCLUSIVE = 1002;
-
-    /**
-     * < Higher or equal priority jobs already suspended by system
-     */
-    public static final int PEND_HOST_JOB_SSUSP = 1003;
-
-    /**
-     * < The job failed to compete with other jobs on host partition
-     */
-    public static final int PEND_HOST_PART_PRIO = 1004;
-
-    /**
-     * < Unable to get the PID of the restarting job
-     */
-    public static final int PEND_SBD_GETPID = 1005;
-
-    /**
-     * < Unable to lock the host for exclusively executing the job
-     */
-    public static final int PEND_SBD_LOCK = 1006;
-
-    /**
-     * < Cleaning up zombie job
-     */
-    public static final int PEND_SBD_ZOMBIE = 1007;
-
-    /**
-     * < Can't run jobs submitted by root.  The job is rejected by the sbatchd
-     */
-    public static final int PEND_SBD_ROOT = 1008;
-
-    /**
-     * < Job can't finish on the host before queue's run window is closed
-     */
-    public static final int PEND_HOST_WIN_WILL_CLOSE = 1009;
-
-    /**
-     * < Job can't finish on the host before job's termination deadline
-     */
-    public static final int PEND_HOST_MISS_DEADLINE = 1010;
-
-    /**
-     * < The specified first execution host is  not eligible for this job at this time
-     */
-    public static final int PEND_FIRST_HOST_INELIGIBLE = 1011;
-
-    /**
-     * < Exclusive job reserves slots on host
-     */
-    public static final int PEND_HOST_EXCLUSIVE_RESERVE = 1012;
-
-    /**
-     * < Resized shadow job  or non-first resReq of a compound resReq job try to reuse the first execution host
-     */
-    public static final int PEND_FIRST_HOST_REUSE = 1013;
-/*
-*  Host Related Reasons (1301 - 1600)
- */
-
-    /**
-     * < The host is closed by the LSF administrator
-     */
-    public static final int PEND_HOST_DISABLED = 1301;
-
-    /**
-     * < The host is locked by the LSF administrator
-     */
-    public static final int PEND_HOST_LOCKED = 1302;
-
-    /**
-     * < Not enough job slots for the parallel job
-     */
-    public static final int PEND_HOST_LESS_SLOTS = 1303;
-
-    /**
-     * < Dispatch windows are closed
-     */
-    public static final int PEND_HOST_WINDOW = 1304;
-
-    /**
-     * < The job slot limit reached
-     */
-    public static final int PEND_HOST_JOB_LIMIT = 1305;
-
-    /**
-     * < The queue's per-CPU job slot limit is reached
-     */
-    public static final int PEND_QUE_PROC_JLIMIT = 1306;
-
-    /**
-     * < The queue's per-host job slot limit is reached
-     */
-    public static final int PEND_QUE_HOST_JLIMIT = 1307;
-
-    /**
-     * < The user's per-CPU job slot limit is reached
-     */
-    public static final int PEND_USER_PROC_JLIMIT = 1308;
-
-    /**
-     * < The host's per-user job slot limit is reached
-     */
-    public static final int PEND_HOST_USR_JLIMIT = 1309;
-
-    /**
-     * < Not a member of the queue
-     */
-    public static final int PEND_HOST_QUE_MEMB = 1310;
-
-    /**
-     * < Not a user-specified host
-     */
-    public static final int PEND_HOST_USR_SPEC = 1311;
-
-    /**
-     * < The user has no access to the host partition
-     */
-    public static final int PEND_HOST_PART_USER = 1312;
-
-    /**
-     * < There is no such user account
-     */
-    public static final int PEND_HOST_NO_USER = 1313;
-
-    /**
-     * < Just started a job recently
-     */
-    public static final int PEND_HOST_ACCPT_ONE = 1314;
-
-    /**
-     * < Load info unavailable
-     */
-    public static final int PEND_LOAD_UNAVAIL = 1315;
-
-    /**
-     * < The LIM is unreachable by the sbatchd
-     */
-    public static final int PEND_HOST_NO_LIM = 1316;
-
-    /**
-     * < The host does not have a valid LSF software license
-     */
-    public static final int PEND_HOST_UNLICENSED = 1317;
-
-    /**
-     * < The queue's resource requirements are not satisfied
-     */
-    public static final int PEND_HOST_QUE_RESREQ = 1318;
-
-    /**
-     * < The submission host type is not the same
-     */
-    public static final int PEND_HOST_SCHED_TYPE = 1319;
-
-    /**
-     * < There are not enough processors to meet the job's spanning requirement.  The job level locality is unsatisfied.
-     */
-    public static final int PEND_JOB_NO_SPAN = 1320;
-
-    /**
-     * < There are not enough processors to meet the queue's spanning requirement.  The queue level locality is unsatisfied.
-     */
-    public static final int PEND_QUE_NO_SPAN = 1321;
-
-    /**
-     * < An exclusive job is running
-     */
-    public static final int PEND_HOST_EXCLUSIVE = 1322;
-
-    /**
-     * < Job Scheduler is disabled on the host.  It is not licensed to accept repetitive jobs.
-     */
-    public static final int PEND_HOST_JS_DISABLED = 1323;
-
-    /**
-     * < The user group's per-CPU job slot limit is reached
-     */
-    public static final int PEND_UGRP_PROC_JLIMIT = 1324;
-
-    /**
-     * < Incorrect host, group or cluster name
-     */
-    public static final int PEND_BAD_HOST = 1325;
-
-    /**
-     * < Host is not used by the queue
-     */
-    public static final int PEND_QUEUE_HOST = 1326;
-
-    /**
-     * < Host is locked by master LIM
-     */
-    public static final int PEND_HOST_LOCKED_MASTER = 1327;
-
-    /**
-     * < Not enough reserved job slots at this time for specified reservation ID
-     */
-    public static final int PEND_HOST_LESS_RSVSLOTS = 1328;
-
-    /**
-     * < Not enough slots or resources for whole duration of the job
-     */
-    public static final int PEND_HOST_LESS_DURATION = 1329;
-
-    /**
-     * < Specified reservation has expired or has been deleted
-     */
-    public static final int PEND_HOST_NO_RSVID = 1330;
-
-    /**
-     * < The host is closed due to lease is inactive
-     */
-    public static final int PEND_HOST_LEASE_INACTIVE = 1331;
-
-    /**
-     * < Not enough job slot(s) while advance reservation is active
-     */
-    public static final int PEND_HOST_ADRSV_ACTIVE = 1332;
-
-    /**
-     * < This queue is not configured to send jobs to the cluster specified in the advance
-     */
-    public static final int PEND_QUE_RSVID_NOMATCH = 1333;
-
-    /**
-     * < Individual host based reasons
-     */
-    public static final int PEND_HOST_GENERAL = 1334;
-
-    /**
-     * < Host does not belong to the specified  advance reservation
-     */
-    public static final int PEND_HOST_RSV = 1335;
-
-    /**
-     * < Host does not belong to a compute unit  of the required type
-     */
-    public static final int PEND_HOST_NOT_CU = 1336;
-
-    /**
-     * < A compute unit containing the host is  used exclusively
-     */
-    public static final int PEND_HOST_CU_EXCL = 1337;
-
-    /**
-     * < CU-level excl. job cannot start since CU  is occupied
-     */
-    public static final int PEND_HOST_CU_OCCUPIED = 1338;
-
-    /**
-     * < Insufficiently many usable slots on the  host's compute unit
-     */
-    public static final int PEND_HOST_USABLE_CU = 1339;
-
-    /**
-     * < No first execution compute unit satisfies CU 'usablepercu' requirement.
-     */
-    public static final int PEND_JOB_FIRST_CU = 1340;
-
-    /**
-     * < A CU containing the host is reserved  exclusively
-     */
-    public static final int PEND_HOST_CU_EXCL_RSV = 1341;
-
-    /**
-     * < Maxcus cannot be satisfied
-     */
-    public static final int PEND_JOB_CU_MAXCUS = 1342;
-
-    /**
-     * < Balance cannot be satisfied
-     */
-    public static final int PEND_JOB_CU_BALANCE = 1343;
-
-    /**
-     * < Cu not supported on toplib integration hosts
-     */
-    public static final int PEND_CU_TOPLIB_HOST = 1344;
-
-/*
-*  sbatchd Related Reasons (1601 - 1900)
- */
-
-    /**
-     * < Cannot reach sbatchd
-     */
-    public static final int PEND_SBD_UNREACH = 1601;
-
-    /**
-     * < Number of jobs exceed quota
-     */
-    public static final int PEND_SBD_JOB_QUOTA = 1602;
-
-    /**
-     * < The job failed in talking to the server to start the job
-     */
-    public static final int PEND_JOB_START_FAIL = 1603;
-
-    /**
-     * < Failed in receiving the reply from the server when starting the job
-     */
-    public static final int PEND_JOB_START_UNKNWN = 1604;
-
-    /**
-     * < Unable to allocate memory to run job.  There is no memory on the sbatchd.
-     */
-    public static final int PEND_SBD_NO_MEM = 1605;
-
-    /**
-     * < Unable to fork process to run the job.  There are no more processes on the sbatchd.
-     */
-    public static final int PEND_SBD_NO_PROCESS = 1606;
-
-    /**
-     * < Unable to communicate with the job process
-     */
-    public static final int PEND_SBD_SOCKETPAIR = 1607;
-
-    /**
-     * < The slave batch server failed to accept the job
-     */
-    public static final int PEND_SBD_JOB_ACCEPT = 1608;
-
-    /**
-     * < Lease job remote dispatch failed
-     */
-    public static final int PEND_LEASE_JOB_REMOTE_DISPATCH = 1609;
-
-    /**
-     * < Failed to restart job from last checkpoint
-     */
-    public static final int PEND_JOB_RESTART_FAIL = 1610;
-/*
-*  Load Related Reasons (2001 - 2300)
- */
-
-    /**
-     * < The load threshold is reached
-     */
-    public static final int PEND_HOST_LOAD = 2001;
-
-/*
-*  Queue Resource Reservation Related Reasons (2301 - 2600)
- */
-
-    /**
-     * < The queue's requirements for resource  reservation are not satisfied.
-     */
-    public static final int PEND_HOST_QUE_RUSAGE = 2301;
-
-/*
-*  Jobs Resource Reservation Related Reasons (2601 - 2900)
- */
-
-    /**
-     * < The job's requirements for resource  reservation are not satisfied.
-     */
-    public static final int PEND_HOST_JOB_RUSAGE = 2601;
-
-/*
-*  Remote Forwarding Related Reasons (2901 - 3200)
- */
-
-    /**
-     * < Remote job not recongized by remote cluster, waiting for rescheduling
-     */
-    public static final int PEND_RMT_JOB_FORGOTTEN = 2901;
-
-    /**
-     * < Remote import limit reached, waiting  for rescheduling
-     */
-    public static final int PEND_RMT_IMPT_JOBBKLG = 2902;
-
-    /**
-     * < Remote schedule time reached,  waiting for rescheduling
-     */
-    public static final int PEND_RMT_MAX_RSCHED_TIME = 2903;
-
-    /**
-     * < Remote pre-exec retry limit reached, waiting for rescheduling
-     */
-    public static final int PEND_RMT_MAX_PREEXEC_RETRY = 2904;
-
-    /**
-     * < Remote queue is closed
-     */
-    public static final int PEND_RMT_QUEUE_CLOSED = 2905;
-
-    /**
-     * < Remote queue is inactive
-     */
-    public static final int PEND_RMT_QUEUE_INACTIVE = 2906;
-
-    /**
-     * < Remote queue is congested
-     */
-    public static final int PEND_RMT_QUEUE_CONGESTED = 2907;
-
-    /**
-     * < Remote queue is disconnected
-     */
-    public static final int PEND_RMT_QUEUE_DISCONNECT = 2908;
-
-    /**
-     * < Remote queue is not configured to accept jobs from this cluster
-     */
-    public static final int PEND_RMT_QUEUE_NOPERMISSION = 2909;
-
-    /**
-     * < Job's termination time exceeds the job creation time on remote cluster
-     */
-    public static final int PEND_RMT_BAD_TIME = 2910;
-
-    /**
-     * < Permission denied on the execution cluster
-     */
-    public static final int PEND_RMT_PERMISSIONS = 2911;
-
-    /**
-     * < Job's required on number of processors cannot be satisfied on the remote cluster
-     */
-    public static final int PEND_RMT_PROC_NUM = 2912;
-
-    /**
-     * < User is not defined in the fairshare policy of the remote queue
-     */
-    public static final int PEND_RMT_QUEUE_USE = 2913;
-
-    /**
-     * < Remote queue is a non-interactive queue
-     */
-    public static final int PEND_RMT_NO_INTERACTIVE = 2914;
-
-    /**
-     * < Remote queue is an interactive-only queue
-     */
-    public static final int PEND_RMT_ONLY_INTERACTIVE = 2915;
-
-    /**
-     * < Job's required maximum number of  processors is less then the minimum number
-     */
-    public static final int PEND_RMT_PROC_LESS = 2916;
-
-    /**
-     * < Job's required resource limit exceeds that of the remote queue
-     */
-    public static final int PEND_RMT_OVER_LIMIT = 2917;
-
-    /**
-     * < Job's resource requirements do not match with those of the remote queue
-     */
-    public static final int PEND_RMT_BAD_RESREQ = 2918;
-
-    /**
-     * < Job failed to be created on the remote cluster
-     */
-    public static final int PEND_RMT_CREATE_JOB = 2919;
-
-    /**
-     * < Job is requeued for rerun on the execution cluster
-     */
-    public static final int PEND_RMT_RERUN = 2920;
-
-    /**
-     * < Job is requeued on the execution cluster due to exit value
-     */
-    public static final int PEND_RMT_EXIT_REQUEUE = 2921;
-
-    /**
-     * < Job was killed and requeued on the execution cluster
-     */
-    public static final int PEND_RMT_REQUEUE = 2922;
-
-    /**
-     * < Job was forwarded to remote cluster
-     */
-    public static final int PEND_RMT_JOB_FORWARDING = 2923;
-
-    /**
-     * < Remote import queue defined for the job in lsb.queues is either not ready or not valid
-     */
-    public static final int PEND_RMT_QUEUE_INVALID = 2924;
-
-    /**
-     * < Remote queue is a non-exclusive queue
-     */
-    public static final int PEND_RMT_QUEUE_NO_EXCLUSIVE = 2925;
-
-    /**
-     * < Job was rejected; submitter does not belong to the specified User Group in the remote cluster or the user group does not exist in the remote cluster
-     */
-    public static final int PEND_RMT_UGROUP_MEMBER = 2926;
-
-    /**
-     * < Remote queue is rerunnable: can not accept interactive jobs
-     */
-    public static final int PEND_RMT_INTERACTIVE_RERUN = 2927;
-
-    /**
-     * < Remote cluster failed in talking to server to start the job
-     */
-    public static final int PEND_RMT_JOB_START_FAIL = 2928;
-
-    /**
-     * < Job was rejected; submitter does not belong to the specified User Group in the remote cluster or the user group does not exist in the remote cluster
-     */
-    public static final int PEND_RMT_FORWARD_FAIL_UGROUP_MEMBER = 2930;
-
-    /**
-     * < Specified remote reservation has expired or has been deleted
-     */
-    public static final int PEND_RMT_HOST_NO_RSVID = 2931;
-
-    /**
-     * < Application profile could not be found in the remote cluster.
-     */
-    public static final int PEND_RMT_APP_NULL = 2932;
-
-    /**
-     * < Job's required RUNLIMIT exceeds  RUNTIME*  JOB_RUNLIMIT_RATIO of the remote cluster.
-     */
-    public static final int PEND_RMT_BAD_RUNLIMIT = 2933;
-
-    /**
-     * < Job's required RUNTIME exceeds the hard runtime limit in the remote queue.
-     */
-    public static final int PEND_RMT_OVER_QUEUE_LIMIT = 2934;
-
-    /**
-     * < Job will be pend when no slots available among remote queues.
-     */
-    public static final int PEND_RMT_WHEN_NO_SLOTS = 2935;
-/* SUSPENDING REASONS */
-
-/*
-*  General Resource Limits Related Reasons ( 3201 - 4800)
- */
-
-    /**
-     * < Resource limit defined on user  or user group has been reached.
-     */
-    public static final int PEND_GENERAL_LIMIT_USER = 3201;
-
-    /**
-     * < Resource (%s) limit defined on queue has been reached.
-     */
-    public static final int PEND_GENERAL_LIMIT_QUEUE = 3501;
-
-    /**
-     * < Resource limit defined on project has been reached.
-     */
-    public static final int PEND_GENERAL_LIMIT_PROJECT = 3801;
-
-    /**
-     * < Resource (%s) limit defined cluster wide has been reached.
-     */
-    public static final int PEND_GENERAL_LIMIT_CLUSTER = 4101;
-
-    /**
-     * < Resource (%s) limit defined on host and/or host group has  been reached.
-     */
-    public static final int PEND_GENERAL_LIMIT_HOST = 4401;
-
-    /**
-     * < JOBS limit defined for the user or user group has been reached.
-     */
-    public static final int PEND_GENERAL_LIMIT_JOBS_USER = 4701;
-
-    /**
-     * < JOBS limit defined for the queue has been reached.
-     */
-    public static final int PEND_GENERAL_LIMIT_JOBS_QUEUE = 4702;
-
-    /**
-     * < JOBS limit defined for the project has been reached.
-     */
-    public static final int PEND_GENERAL_LIMIT_JOBS_PROJECT = 4703;
-
-    /**
-     * < JOBS limit defined cluster-wide has been reached.
-     */
-    public static final int PEND_GENERAL_LIMIT_JOBS_CLUSTER = 4704;
-
-    /**
-     * < JOBS limit defined on host or host group has been reached.
-     */
-    public static final int PEND_GENERAL_LIMIT_JOBS_HOST = 4705;
-
-/* LSF2 Presto RLA-related reasons    (4900 - 4989) */
-
-    /**
-     * < RMS scheduler plugin  internal error.
-     */
-    public static final int PEND_RMS_PLUGIN_INTERNAL = 4900;
-
-    /**
-     * < RLA communication failure.
-     */
-    public static final int PEND_RMS_PLUGIN_RLA_COMM = 4901;
-
-    /**
-     * < RMS is not available.
-     */
-    public static final int PEND_RMS_NOT_AVAILABLE = 4902;
-
-    /**
-     * < Cannot satisfy the topology  requirement.
-     */
-    public static final int PEND_RMS_FAIL_TOPOLOGY = 4903;
-
-    /**
-     * < Cannot allocate an RMS resource.
-     */
-    public static final int PEND_RMS_FAIL_ALLOC = 4904;
-
-    /**
-     * < RMS job with special topology requirements cannot be preemptive or backfill job.
-     */
-    public static final int PEND_RMS_SPECIAL_NO_PREEMPT_BACKFILL = 4905;
-
-    /**
-     * < RMS job with special topology requirements cannot reserve slots.
-     */
-    public static final int PEND_RMS_SPECIAL_NO_RESERVE = 4906;
-
-    /**
-     * < RLA internal error.
-     */
-    public static final int PEND_RMS_RLA_INTERNAL = 4907;
-
-    /**
-     * < Not enough slots for job.  Job with RMS topology requirements cannot reserve slots, be preemptive, or be a backfill job.
-     */
-    public static final int PEND_RMS_NO_SLOTS_SPECIAL = 4908;
-
-    /**
-     * < User account does not exist on the execution host.
-     */
-    public static final int PEND_RMS_RLA_NO_SUCH_USER = 4909;
-
-    /**
-     * < Unknown host and/or partition unavailable.
-     */
-    public static final int PEND_RMS_RLA_NO_SUCH_HOST = 4910;
-
-    /**
-     * < Cannot schedule chunk jobs to RMS hosts.
-     */
-    public static final int PEND_RMS_CHUNKJOB = 4911;
-
-    /**
-     * < RLA protocol mismatch.
-     */
-    public static final int PEND_RLA_PROTOMISMATCH = 4912;
-
-    /**
-     * < Contradictory topology requirements specified.
-     */
-    public static final int PEND_RMS_BAD_TOPOLOGY = 4913;
-
-    /**
-     * < Not enough slots to satisfy manditory contiguous requirement.
-     */
-    public static final int PEND_RMS_RESREQ_MCONT = 4914;
-
-    /**
-     * < Not enough slots to satisfy RMS ptile requirement.
-     */
-    public static final int PEND_RMS_RESREQ_PTILE = 4915;
-
-    /**
-     * < Not enough slots to satisfy RMS nodes requirement.
-     */
-    public static final int PEND_RMS_RESREQ_NODES = 4916;
-
-    /**
-     * < Cannot satisfy RMS base node requirement.
-     */
-    public static final int PEND_RMS_RESREQ_BASE = 4917;
-
-    /**
-     * < Cannot satisfy RMS rails requirement.
-     */
-    public static final int PEND_RMS_RESREQ_RAILS = 4918;
-
-    /**
-     * < Cannot satisfy RMS railmask requirement.
-     */
-    public static final int PEND_RMS_RESREQ_RAILMASK = 4919;
-
-
-/*
-*  Maui Integration Related Reasons ( 5000 - 5100)
- */
-
-    /**
-     * < Unable to communicate with external Maui scheduler.
-     */
-    public static final int PEND_MAUI_UNREACH = 5000;
-
-    /**
-     * < Job is pending at external Maui scheduler.
-     */
-    public static final int PEND_MAUI_FORWARD = 5001;
-
-    /**
-     * < External Maui scheduler sets detail reason.
-     */
-    public static final int PEND_MAUI_REASON = 5030;
-
-/*
-*  SGI CPUSET Integration Related Reasons ( 5200 - 5299)
- */
-
-    /**
-     * < CPUSET attach failed.  Job requeued
-     */
-    public static final int PEND_CPUSET_ATTACH = 5200;
-
-    /**
-     * < Not a cpuset host
-     */
-    public static final int PEND_CPUSET_NOT_CPUSETHOST = 5201;
-
-    /**
-     * < Topd initialization failed
-     */
-    public static final int PEND_CPUSET_TOPD_INIT = 5202;
-
-    /**
-     * < Topd communication timeout
-     */
-    public static final int PEND_CPUSET_TOPD_TIME_OUT = 5203;
-
-    /**
-     * < Cannot satisfy the cpuset  allocation requirement
-     */
-    public static final int PEND_CPUSET_TOPD_FAIL_ALLOC = 5204;
-
-    /**
-     * < Bad cpuset allocation request
-     */
-    public static final int PEND_CPUSET_TOPD_BAD_REQUEST = 5205;
-
-    /**
-     * < Topd internal error
-     */
-    public static final int PEND_CPUSET_TOPD_INTERNAL = 5206;
-
-    /**
-     * < Cpuset system API failure
-     */
-    public static final int PEND_CPUSET_TOPD_SYSAPI_ERR = 5207;
-
-    /**
-     * < Specified static cpuset does  not exist on the host
-     */
-    public static final int PEND_CPUSET_TOPD_NOSUCH_NAME = 5208;
-
-    /**
-     * < Cpuset is already allocated   for this job
-     */
-    public static final int PEND_CPUSET_TOPD_JOB_EXIST = 5209;
-
-    /**
-     * < Topd malloc failure
-     */
-    public static final int PEND_CPUSET_TOPD_NO_MEMORY = 5210;
-
-    /**
-     * < User account does not exist   on the cpuset host
-     */
-    public static final int PEND_CPUSET_TOPD_INVALID_USER = 5211;
-
-    /**
-     * < User does not have permission   to run job within cpuset
-     */
-    public static final int PEND_CPUSET_TOPD_PERM_DENY = 5212;
-
-    /**
-     * < Topd is not available
-     */
-    public static final int PEND_CPUSET_TOPD_UNREACH = 5213;
-
-    /**
-     * < Topd communication failure
-     */
-    public static final int PEND_CPUSET_TOPD_COMM_ERR = 5214;
-
-
-    /**
-     * < CPUSET scheduler plugin internal error
-     */
-    public static final int PEND_CPUSET_PLUGIN_INTERNAL = 5215;
-
-    /**
-     * < Cannot schedule chunk jobs to cpuset hosts
-     */
-    public static final int PEND_CPUSET_CHUNKJOB = 5216;
-
-    /**
-     * < Can't satisfy CPU_LIST   requirement
-     */
-    public static final int PEND_CPUSET_CPULIST = 5217;
-
-    /**
-     * < Cannot satisfy CPUSET MAX_RADIUS requirement
-     */
-    public static final int PEND_CPUSET_MAXRADIUS = 5218;
-
-/* Bproc integration related reasons (5300 - 5320)
- */
-
-    /**
-     * < Node allocation failed
-     */
-    public static final int PEND_NODE_ALLOC_FAIL = 5300;
-
-/* Eagle pending reasons  (5400 - 5449) */
-
-    /**
-     * < RMS resource is not available
-     */
-    public static final int PEND_RMSRID_UNAVAIL = 5400;
-
-
-    /**
-     * < Not enough free cpus to satisfy job requirements
-     */
-    public static final int PEND_NO_FREE_CPUS = 5450;
-
-    /**
-     * < Topology unknown or recently changed
-     */
-    public static final int PEND_TOPOLOGY_UNKNOWN = 5451;
-
-    /**
-     * < Contradictory topology requirement specified
-     */
-    public static final int PEND_BAD_TOPOLOGY = 5452;
-
-    /**
-     * < RLA communications failure
-     */
-    public static final int PEND_RLA_COMM = 5453;
-
-    /**
-     * < User account does not exist on execution host
-     */
-    public static final int PEND_RLA_NO_SUCH_USER = 5454;
-
-    /**
-     * < RLA internal error
-     */
-    public static final int PEND_RLA_INTERNAL = 5455;
-
-    /**
-     * < Unknown host and/or partition unavailable
-     */
-    public static final int PEND_RLA_NO_SUCH_HOST = 5456;
-
-    /**
-     * < Too few slots for specified topology requirement
-     */
-    public static final int PEND_RESREQ_TOOFEWSLOTS = 5457;
-
-/* PSET pending reasons (5500 - 5549) */
-
-    /**
-     * < PSET scheduler plugin internal error
-     */
-    public static final int PEND_PSET_PLUGIN_INTERNAL = 5500;
-
-    /**
-     * < Cannot satisfy PSET ptile requirement
-     */
-    public static final int PEND_PSET_RESREQ_PTILE = 5501;
-
-    /**
-     * < Cannot satisfy PSET cells requirement
-     */
-    public static final int PEND_PSET_RESREQ_CELLS = 5502;
-
-    /**
-     * < Cannot schedule chunk jobs to PSET hosts
-     */
-    public static final int PEND_PSET_CHUNKJOB = 5503;
-
-    /**
-     * < Host does not support processor set functionality
-     */
-    public static final int PEND_PSET_NOTSUPPORT = 5504;
-
-    /**
-     * < PSET bind failed. Job requeued
-     */
-    public static final int PEND_PSET_BIND_FAIL = 5505;
-
-    /**
-     * < Cannot satisfy PSET CELL_LIST  requirement
-     */
-    public static final int PEND_PSET_RESREQ_CELLLIST = 5506;
-
-
-/* SLURM pending reasons (5550 - 5599) */
-
-    /**
-     * < SLURM scheduler plugin internal error
-     */
-    public static final int PEND_SLURM_PLUGIN_INTERNAL = 5550;
-
-    /**
-     * < Not enough resource to satisfy  SLURM nodes requirment
-     */
-    public static final int PEND_SLURM_RESREQ_NODES = 5551;
-
-    /**
-     * < Not enough resource to satisfy  SLURM node attributes requirment.
-     */
-    public static final int PEND_SLURM_RESREQ_NODE_ATTR = 5552;
-
-    /**
-     * < Not enough resource to satisfy SLURM exclude requirment.
-     */
-    public static final int PEND_SLURM_RESREQ_EXCLUDE = 5553;
-
-    /**
-     * < Not enough resource to satisfy SLURM nodelist requirment.
-     */
-    public static final int PEND_SLURM_RESREQ_NODELIST = 5554;
-
-    /**
-     * < Not enough resource to satisfy SLURM contiguous requirment.
-     */
-    public static final int PEND_SLURM_RESREQ_CONTIGUOUS = 5555;
-
-    /**
-     * < SLURM allocation is not available. Job requeued.
-     */
-    public static final int PEND_SLURM_ALLOC_UNAVAIL = 5556;
-
-    /**
-     * < Invalid grammar in SLURM constraints option, job will never run.
-     */
-    public static final int PEND_SLURM_RESREQ_BAD_CONSTRAINT = 5557;
-
-/* Cray X1 pending reasons (5600 - 5649) */
-
-    /**
-     * < Not enough SSPs for job
-     */
-    public static final int PEND_CRAYX1_SSP = 5600;
-
-    /**
-     * < Not enough MSPs for job
-     */
-    public static final int PEND_CRAYX1_MSP = 5601;
-
-    /**
-     * < Unable to pass limit information to psched.
-     */
-    public static final int PEND_CRAYX1_PASS_LIMIT = 5602;
-
-/* Cray XT3 pending reasons (5650 - 5699) */
-
-    /**
-     * < Unable to create or assign a  partition by CPA
-     */
-    public static final int PEND_CRAYXT3_ASSIGN_FAIL = 5650;
-
-/* BlueGene pending reasons (5700 - 5749) */
-
-    /**
-     * < BG/L: Scheduler plug-in internal error.
-     */
-    public static final int PEND_BLUEGENE_PLUGIN_INTERNAL = 5700;
-
-    /**
-     * < BG/L: Allocation is not available. Job requeued.
-     */
-    public static final int PEND_BLUEGENE_ALLOC_UNAVAIL = 5701;
-
-    /**
-     * < BG/L: No free base partitions available for a full block allocation.
-     */
-    public static final int PEND_BLUEGENE_NOFREEMIDPLANES = 5702;
-
-    /**
-     * < BG/L: No free quarters available for a small block allocation.
-     */
-    public static final int PEND_BLUEGENE_NOFREEQUARTERS = 5703;
-
-    /**
-     * < BG/L: No free node cards available for a small block allocation.
-     */
-    public static final int PEND_BLUEGENE_NOFREENODECARDS = 5704;
-
-/* resize enhancement releated pending reasons */
-
-    /**
-     * < First execution host unavailable
-     */
-    public static final int PEND_RESIZE_FIRSTHOSTUNAVAIL = 5705;
-
-    /**
-     * < Master is not in the RUN state
-     */
-    public static final int PEND_RESIZE_MASTERSUSP = 5706;
-
-    /**
-     * < Host is not same as for master
-     */
-    public static final int PEND_RESIZE_MASTER_SAME = 5707;
-
-    /**
-     * < Host already used by master
-     */
-    public static final int PEND_RESIZE_SPAN_PTILE = 5708;
-
-    /**
-     * < The job can only use first host
-     */
-    public static final int PEND_RESIZE_SPAN_HOSTS = 5709;
-
-    /**
-     * < The job cannot get slots on remote hosts
-     */
-    public static final int PEND_RESIZE_LEASE_HOST = 5710;
-
-/* compound resreq related pending reasons (5800 - ??) */
-
-    /**
-     * < The job cannot get slots on  pre-7Update5 remote hosts
-     */
-    public static final int PEND_COMPOUND_RESREQ_OLD_LEASE_HOST = 5800;
-
-    /**
-     * < Hosts using LSF HPC system  integrations do not support compound resource requirements.
-     */
-    public static final int PEND_COMPOUND_RESREQ_TOPLIB_HOST = 5801;
-/* multi-phase resreq related pending reasons (5900 - ??) */
-
-    /**
-     * < The job cannot get slots on  pre-7Update6 remote hosts
-     */
-    public static final int PEND_MULTIPHASE_RESREQ_OLD_LEASE_HOST = 5900;
-
-/* EGO-Enabled SLA pending reasons (5750 - 5799) */
-
-    /**
-     * < Host does not have enough slots for this SLA job.
-     */
-    public static final int PEND_PS_PLUGIN_INTERNAL = 5750;
-
-    /**
-     * < EGO SLA: Failed to synchronize resource with MBD.
-     */
-    public static final int PEND_PS_MBD_SYNC = 5751;
-
-
-/* PLATFORM reserves pending reason number from 1 - 20000.
-*  External plugin is suggested to use platform's reserved pending reason
-*  number. However, they can use pending reason number between 20001 - 25000
-*  as customer specific pending reasons. In this case, bjobs -p will only show
-*  the reason number without detailed message
- */
-
-    /**
-     * < Customized pending reason number between min and max.
-     */
-    public static final int PEND_CUSTOMER_MIN = 20001;
-
-    /**
-     * < Customized pending reason number between min and max.
-     */
-    public static final int PEND_CUSTOMER_MAX = 25000;
-
-
-    /**
-     * < The maximum number of reasons
-     */
-    public static final int PEND_MAX_REASONS = 25001;
-
-    /**
-     * \addtogroup suspending_reasons  suspending_reasons
-     * suspending_reasons is part of pending_reasons
-     */
-/* SUSPENDING REASONS */
-
-/* User related reasons */
-
-    /**
-     * < Virtual code. Not a reason
-     */
-    public static final int SUSP_USER_REASON = 0x00000000;
-
-    /**
-     * < The job is waiting to be re-scheduled after being resumed by the user.
-     */
-    public static final int SUSP_USER_RESUME = 0x00000001;
-
-    /**
-     * < The user suspended the job.
-     */
-    public static final int SUSP_USER_STOP = 0x00000002;
-
-/* Queue and system related reasons */
-
-    /**
-     * < Virtual code. Not a reason
-     */
-    public static final int SUSP_QUEUE_REASON = 0x00000004;
-
-    /**
-     * < The run window of the queue is closed.
-     */
-    public static final int SUSP_QUEUE_WINDOW = 0x00000008;
-
-    /**
-     * < Suspended after preemption. The system needs to re-allocate CPU utilization by job priority.
-     */
-    public static final int SUSP_RESCHED_PREEMPT = 0x00000010;
-
-    /**
-     * < The LSF administrator has locked the execution host.
-     */
-    public static final int SUSP_HOST_LOCK = 0x00000020;
-
-    /**
-     * < A load index exceeds its threshold. The subreasons field indicates which indices.
-     */
-    public static final int SUSP_LOAD_REASON = 0x00000040;
-
-    /**
-     * < The job was preempted by mbatchd because of a higher priorty job.
-     */
-    public static final int SUSP_MBD_PREEMPT = 0x00000080;
-
-    /**
-     * < Preempted by sbatchd. The job limit of the host/user has been reached.
-     */
-    public static final int SUSP_SBD_PREEMPT = 0x00000100;
-
-    /**
-     * < The suspend conditions of the queue,  as specified by the STOP_COND parameter in lsb.queues, are true.
-     */
-    public static final int SUSP_QUE_STOP_COND = 0x00000200;
-
-    /**
-     * < The resume conditions of the queue, as specified by the RESUME_COND parameter in lsb.queues, are false.
-     */
-    public static final int SUSP_QUE_RESUME_COND = 0x00000400;
-
-    /**
-     * < The job was suspended due to the paging rate and the host is not idle yet.
-     */
-    public static final int SUSP_PG_IT = 0x00000800;
-
-    /**
-     * < Resets the previous reason.
-     */
-    public static final int SUSP_REASON_RESET = 0x00001000;
-
-    /**
-     * < Load information on the execution hosts is unavailable.
-     */
-    public static final int SUSP_LOAD_UNAVAIL = 0x00002000;
-
-    /**
-     * < The job was suspened by root or the LSF administrator.
-     */
-    public static final int SUSP_ADMIN_STOP = 0x00004000;
-
-    /**
-     * < The job is terminated due to resource limit.
-     */
-    public static final int SUSP_RES_RESERVE = 0x00008000;
-
-    /**
-     * < The job is locked by the mbatchd.
-     */
-    public static final int SUSP_MBD_LOCK = 0x00010000;
-
-    /**
-     * < The job's requirements for resource  reservation are not satisfied.
-     */
-    public static final int SUSP_RES_LIMIT = 0x00020000;
-
-    /**
-     * < The job is suspended while the sbatchd is restarting.
-     */
-    public static final int SUSP_SBD_STARTUP = 0x00040000;
-
-    /**
-     * < The execution host is locked by the master LIM.
-     */
-    public static final int SUSP_HOST_LOCK_MASTER = 0x00080000;
-
-    /**
-     * < An advance reservation using the  host is active
-     */
-    public static final int SUSP_HOST_RSVACTIVE = 0x00100000;
-
-    /**
-     * < There is a detailed reason in the subreason field
-     */
-    public static final int SUSP_DETAILED_SUBREASON = 0x00200000;
-    /* GLB suspending reason */
-
-    /**
-     * < The job is preempted by glb
-     */
-    public static final int SUSP_GLB_LICENSE_PREEMPT = 0x00400000;
-
-    /* Cray X1 suspend reasons */
-
-    /**
-     * < Job not placed by Cray X1  psched
-     */
-    public static final int SUSP_CRAYX1_POSTED = 0x00800000;
-
-    /**
-     * < Job suspended when its advance  reservation expired
-     */
-    public static final int SUSP_ADVRSV_EXPIRED = 0x01000000;
-
-    /**
-     * \addtogroup suspending_subreasons  suspending_subreasons
-     * suspending_subreasons has the following options:
-     */
-
-    /**
-     * < Sub reason of SUSP_RES_LIMIT: RUNLIMIT is reached.
-     */
-    public static final int SUB_REASON_RUNLIMIT = 0x00000001;
-
-    /**
-     * < Sub reason of SUSP_RES_LIMIT: DEADLINE is reached.
-     */
-    public static final int SUB_REASON_DEADLINE = 0x00000002;
-
-    /**
-     * < Sub reason of SUSP_RES_LIMIT: PROCESSLIMIT is reached.
-     */
-    public static final int SUB_REASON_PROCESSLIMIT = 0x00000004;
-
-    /**
-     * < Sub reason of SUSP_RES_LIMIT: CPULIMIT is reached.
-     */
-    public static final int SUB_REASON_CPULIMIT = 0x00000008;
-
-    /**
-     * < Sub reason of SUSP_RES_LIMIT: MEMLIMIT is reached.
-     */
-    public static final int SUB_REASON_MEMLIMIT = 0x00000010;
-
-    /**
-     * < Sub reason of SUSP_RES_LIMIT: THREADLIMIT is reached.
-     */
-    public static final int SUB_REASON_THREADLIMIT = 0x00000020;
-
-    /**
-     * < Sub reason of SUSP_RES_LIMIT: SWAPLIMIT is reached.
-     */
-    public static final int SUB_REASON_SWAPLIMIT = 0x00000040;
-
-    /**
-     * < Account ID does not match those allowed by the gate
-     */
-    public static final int SUB_REASON_CRAYX1_ACCOUNTID = 0x00000001;
-
-    /**
-     * < Attribute does not match  those allowed by the gate
-     */
-    public static final int SUB_REASON_CRAYX1_ATTRIBUTE = 0x00000002;
-
-    /**
-     * < Blocked by one or more gates
-     */
-    public static final int SUB_REASON_CRAYX1_BLOCKED = 0x00000004;
-
-    /**
-     * < Application is in the process of being restarted  and it is under the control  of CPR
-     */
-    public static final int SUB_REASON_CRAYX1_RESTART = 0x00000008;
-
-    /**
-     * < Depth does not match those  allowed by the gate
-     */
-    public static final int SUB_REASON_CRAYX1_DEPTH = 0x00000010;
-
-    /**
-     * < GID does not match those  allowed by the gate
-     */
-    public static final int SUB_REASON_CRAYX1_GID = 0x00000020;
-
-    /**
-     * < No GASID is available
-     */
-    public static final int SUB_REASON_CRAYX1_GASID = 0x00000040;
-
-    /**
-     * < Hard label does not match  those allowed by the gate
-     */
-    public static final int SUB_REASON_CRAYX1_HARDLABEL = 0x00000080;
-
-    /**
-     * < Limit exceeded in regions   or domains
-     */
-    public static final int SUB_REASON_CRAYX1_LIMIT = 0x00000100;
-
-    /**
-     * < Memory size does not match  those allowed by the gate
-     */
-    public static final int SUB_REASON_CRAYX1_MEMORY = 0x00000200;
-
-    /**
-     * < Soft label does not match   those allowed by the gate
-     */
-    public static final int SUB_REASON_CRAYX1_SOFTLABEL = 0x00000400;
-
-    /**
-     * < Size gate (width times  depth larger than gate  allows)
-     */
-    public static final int SUB_REASON_CRAYX1_SIZE = 0x00000800;
-
-    /**
-     * < Time limit does not match those allowed by the gate
-     */
-    public static final int SUB_REASON_CRAYX1_TIME = 0x00001000;
-
-    /**
-     * < UID does not match those  allowed by the gate
-     */
-    public static final int SUB_REASON_CRAYX1_UID = 0x00002000;
-
-    /**
-     * < Width does not match those allowed by the gate
-     */
-    public static final int SUB_REASON_CRAYX1_WIDTH = 0x00004000;
-/*
-*  EXITING REASONS: currently only to indicate exited due to
-*  1) rerunnable job being restart from last chkpnt;
-*  2) being killed while execution host is unavailable
- */
-
-    /** Job finished normally */
-    public static final int EXIT_NORMAL = 0x00000000;
-
-    /** Rerunnable job to be restarted */
-    public static final int EXIT_RESTART = 0x00000001;
-
-    /** Job killed while host unavailable */
-    public static final int EXIT_ZOMBIE = 0x00000002;
-
-    /** Job is finished and put into pend list */
-    public static final int FINISH_PEND = 0x00000004;
-
-    /** The job is killed while the execution host is unreach */
-    public static final int EXIT_KILL_ZOMBIE = 0x00000008;
-
-    /** The job in ZOMBIE is removed */
-    public static final int EXIT_ZOMBIE_JOB = 0x00000010;
-
-    /** Rerun a job without creating a ZOMBIE job */
-    public static final int EXIT_RERUN = 0x00000020;
-
-    /** Remote job has no mapping user name here */
-    public static final int EXIT_NO_MAPPING = 0x00000040;
-
-    /** Remote job has no permission running here */
-    public static final int EXIT_REMOTE_PERMISSION = 0x00000080;
-
-    /** Remote job cannot run locally because of environment problem */
-    public static final int EXIT_INIT_ENVIRON = 0x00000100;
-
-    /** Remote job failed in pre_exec command */
-    public static final int EXIT_PRE_EXEC = 0x00000200;
-
-    /** The job is killed and will be later requeued */
-    public static final int EXIT_REQUEUE = 0x00000400;
-
-    /** Job could not be killed but was removed from system */
-    public static final int EXIT_REMOVE = 0x00000800;
-
-    /** Requeue by exit value */
-    public static final int EXIT_VALUE_REQUEUE = 0x00001000;
-
-    /** Cancel request received from remote cluster. */
-    public static final int EXIT_CANCEL = 0x00002000;
-
-    /** MED killed job on web server */
-    public static final int EXIT_MED_KILLED = 0x00004000;
-
-    /** Remote lease job exit on execution, side, return to pend on submission */
-    public static final int EXIT_REMOTE_LEASE_JOB = 0x00008000;
-
-    /** Exit when cwd does not exist*/
-    public static final int EXIT_CWD_NOTEXIST = 0x00010000;
-
-
-    /** Mode indicating running in batch, js, or batch-js mode */
-    public static final int LSB_MODE_BATCH = 0x1;
-    public static final int LSB_MODE_JS = 0x2;
-    public static final int LSB_MODE_BATCH_RD = 0x4;
-
-    public static final int RLIMIT_CPU = 0;
-    public static final int RLIMIT_FSIZE = 1;
-    public static final int RLIMIT_DATA = 2;
-    public static final int RLIMIT_STACK = 3;
-    public static final int RLIMIT_CORE = 4;
-    public static final int RLIMIT_RSS = 5;
-    public static final int RLIM_INFINITY = 0x7fffffff;
-
-/*
-*  Error codes for lsblib calls
-*  Each error code has its corresponding error message defined in lsb.err.c
-*  The code number is just the position number of its message.
-*  Adding a new code here must add its message there in the corresponding
-*  position.  Changing any code number here must change the position there.
- */
-/* Error codes related to job */
-
-    /** No error at all */
-    public static final int LSBE_NO_ERROR = 0;
-
-    /** No matching job found */
-    public static final int LSBE_NO_JOB = 1;
-
-    /** Job not started yet */
-    public static final int LSBE_NOT_STARTED = 2;
-
-    /** Job already started */
-    public static final int LSBE_JOB_STARTED = 3;
-
-    /** Job already finished */
-    public static final int LSBE_JOB_FINISH = 4;
-
-    /** Ask sbatchd to stop the wrong job */
-    public static final int LSBE_STOP_JOB = 5;
-
-    /** Depend_cond syntax error */
-    public static final int LSBE_DEPEND_SYNTAX = 6;
-
-    /** Queue doesn't accept EXCLUSIVE job */
-    public static final int LSBE_EXCLUSIVE = 7;
-
-    /** Root is not allowed to submit jobs */
-    public static final int LSBE_ROOT = 8;
-
-    /** Job is already being migrated */
-    public static final int LSBE_MIGRATION = 9;
-
-    /** Job is not chkpntable */
-    public static final int LSBE_J_UNCHKPNTABLE = 10;
-
-    /** Job has no output so far */
-    public static final int LSBE_NO_OUTPUT = 11;
-
-    /** No jobId can be used now */
-    public static final int LSBE_NO_JOBID = 12;
-
-    /** Queue only accepts bsub -I job */
-    public static final int LSBE_ONLY_INTERACTIVE = 13;
-
-    /** Queue doesn't accept bsub -I job */
-    public static final int LSBE_NO_INTERACTIVE = 14;
-
-/** Error codes related to user, queue and host */
-
-    /** No user defined in lsb.users file */
-    public static final int LSBE_NO_USER = 15;
-
-    /** Bad user name */
-    public static final int LSBE_BAD_USER = 16;
-
-    /** User permission denied */
-    public static final int LSBE_PERMISSION = 17;
-
-    /** No such queue in the system */
-    public static final int LSBE_BAD_QUEUE = 18;
-
-    /** Queue name should be given */
-    public static final int LSBE_QUEUE_NAME = 19;
-
-    /** Queue has been closed */
-    public static final int LSBE_QUEUE_CLOSED = 20;
-
-    /** Queue windows are closed */
-    public static final int LSBE_QUEUE_WINDOW = 21;
-
-    /** User cannot use the queue */
-    public static final int LSBE_QUEUE_USE = 22;
-
-    /** Bad host name or host group name" */
-    public static final int LSBE_BAD_HOST = 23;
-
-    /** Too many processors requested */
-    public static final int LSBE_PROC_NUM = 24;
-
-    /** No host partition in the system */
-    public static final int LSBE_NO_HPART = 25;
-
-    /** Bad host partition name */
-    public static final int LSBE_BAD_HPART = 26;
-
-    /** No group defined in the system */
-    public static final int LSBE_NO_GROUP = 27;
-
-    /** Bad host/user group name */
-    public static final int LSBE_BAD_GROUP = 28;
-
-    /** Host is not used by the queue */
-    public static final int LSBE_QUEUE_HOST = 29;
-
-    /** User reach UJOB_LIMIT of the queue */
-    public static final int LSBE_UJOB_LIMIT = 30;
-
-    /** No host available for migration */
-    public static final int LSBE_NO_HOST = 31;
-
-
-    /** chklog is corrupted */
-    public static final int LSBE_BAD_CHKLOG = 32;
-
-    /** User reach PJOB_LIMIT of the queue */
-    public static final int LSBE_PJOB_LIMIT = 33;
-
-    /** request from non LSF host rejected*/
-    public static final int LSBE_NOLSF_HOST = 34;
-
-/** Error codes related to input arguments of lsblib call */
-
-    /** Bad argument for lsblib call */
-    public static final int LSBE_BAD_ARG = 35;
-
-    /** Bad time spec for lsblib call */
-    public static final int LSBE_BAD_TIME = 36;
-
-    /** Start time is later than end time */
-    public static final int LSBE_START_TIME = 37;
-
-    /** Bad CPU limit specification */
-    public static final int LSBE_BAD_LIMIT = 38;
-
-    /** Over hard limit of queue */
-    public static final int LSBE_OVER_LIMIT = 39;
-
-    /** Empty job (command) */
-    public static final int LSBE_BAD_CMD = 40;
-
-    /** Bad signal value; not supported */
-    public static final int LSBE_BAD_SIGNAL = 41;
-
-    /** Bad job name */
-    public static final int LSBE_BAD_JOB = 42;
-
-    /** Queue reach QJOB_LIMIT of the queue */
-    public static final int LSBE_QJOB_LIMIT = 43;
-
-    /** Expired job terminate time*/
-    public static final int LSBE_BAD_TERM = 44;
-/** 44 is reserved for future use */
-
-/** Error codes related to lsb.events file */
-
-    /** Unknown event in event log file */
-    public static final int LSBE_UNKNOWN_EVENT = 45;
-
-    /** bad event format in event log file */
-    public static final int LSBE_EVENT_FORMAT = 46;
-
-    /** End of file */
-    public static final int LSBE_EOF = 47;
-/** 48-49 are reserved for future use */
-
-/** Error codes related to system failure */
-
-    /** mbatchd internal error */
-    public static final int LSBE_MBATCHD = 50;
-
-    /** sbatchd internal error */
-    public static final int LSBE_SBATCHD = 51;
-
-    /** lsbatch lib internal error */
-    public static final int LSBE_LSBLIB = 52;
-
-    /** LSLIB call fails */
-    public static final int LSBE_LSLIB = 53;
-
-    /** System call fails */
-    public static final int LSBE_SYS_CALL = 54;
-
-    /** Cannot alloc memory */
-    public static final int LSBE_NO_MEM = 55;
-
-    /** Lsbatch service not registered */
-    public static final int LSBE_SERVICE = 56;
-
-    /** LSB_SHAREDIR not defined */
-    public static final int LSBE_NO_ENV = 57;
-
-    /** chkpnt system call fail */
-    public static final int LSBE_CHKPNT_CALL = 58;
-
-    /** mbatchd cannot fork */
-    public static final int LSBE_NO_FORK = 59;
-
-/** Error codes related to communication between mbatchd/lsblib/sbatchd */
-
-    /** LSBATCH protocol error */
-    public static final int LSBE_PROTOCOL = 60;
-
-    /** XDR en/decode error */
-    public static final int LSBE_XDR = 61;
-
-    /** No appropriate port can be bound */
-    public static final int LSBE_PORT = 62;
-
-    /** Timeout in contacting mbatchd */
-    public static final int LSBE_TIME_OUT = 63;
-
-    /** Timeout on connect() call */
-    public static final int LSBE_CONN_TIMEOUT = 64;
-
-    /** Connection refused by server */
-    public static final int LSBE_CONN_REFUSED = 65;
-
-    /** server connection already exists */
-    public static final int LSBE_CONN_EXIST = 66;
-
-    /** server is not connected */
-    public static final int LSBE_CONN_NONEXIST = 67;
-
-    /** sbatchd cannot be reached */
-    public static final int LSBE_SBD_UNREACH = 68;
-
-    // Search for any ; \s+ /** and fix the comments
-    /** Operation cannot be performed right now, op. will be retried. */
-    public static final int LSBE_OP_RETRY = 69;
-
-    /** user has no enough job slots */
-    public static final int LSBE_USER_JLIMIT = 70;
-/** 71 is reserved for future use */
-
-/** Error codes related to NQS */
-
-    /** Bad specification for a NQS job */
-    public static final int LSBE_NQS_BAD_PAR = 72;
-
-
-    /** Client host has no license */
-    public static final int LSBE_NO_LICENSE = 73;
-
-/** Error codes related to calendar */
-
-    /** Bad calendar name */
-    public static final int LSBE_BAD_CALENDAR = 74;
-
-    /** No calendar found */
-    public static final int LSBE_NOMATCH_CALENDAR = 75;
-
-    /** No calendar in system */
-    public static final int LSBE_NO_CALENDAR = 76;
-
-    /** Bad calendar time events */
-    public static final int LSBE_BAD_TIMEEVENT = 77;
-
-    /** Calendar exist already */
-    public static final int LSBE_CAL_EXIST = 78;
-
-    /** Calendar function is not enabled*/
-    public static final int LSBE_CAL_DISABLED = 79;
-
-/** Error codes related to modify job's parameters */
-
-    /** the job's params cannot be changed */
-    public static final int LSBE_JOB_MODIFY = 80;
-    /** the changed once parameters are not used */
-    public static final int LSBE_JOB_MODIFY_ONCE = 81;
-
-
-    /** the job is not a repetitive job */
-    public static final int LSBE_J_UNREPETITIVE = 82;
-
-    /** bad cluster name */
-    public static final int LSBE_BAD_CLUSTER = 83;
-
-/** Error codes related jobs driven by calendar */
-
-    /** Job can not be killed in pending */
-    public static final int LSBE_PEND_CAL_JOB = 84;
-    /** This Running turn is being terminated */
-    public static final int LSBE_RUN_CAL_JOB = 85;
-
-
-    /** Modified parameters are being used */
-    public static final int LSBE_JOB_MODIFY_USED = 86;
-
-    /** Can not get user's token */
-    public static final int LSBE_AFS_TOKENS = 87;
-
-/** Error codes related to event */
-
-    /** Bad event name */
-    public static final int LSBE_BAD_EVENT = 88;
-
-    /** No event found */
-    public static final int LSBE_NOMATCH_EVENT = 89;
-
-    /** No event in system */
-    public static final int LSBE_NO_EVENT = 90;
-
-/** Error codes related to user, queue and host */
-
-    /** User reach HJOB_LIMIT of the queue */
-    public static final int LSBE_HJOB_LIMIT = 91;
-
-/** Error codes related to bmsg */
-
-    /** Message delivered */
-    public static final int LSBE_MSG_DELIVERED = 92;
-    /** MBD could not find the message that SBD mentions about */
-    public static final int LSBE_NO_JOBMSG = 93;
-
-    /** x */
-    public static final int LSBE_MSG_RETRY = 94;
-
-/** Error codes related to resource requirement */
-
-    /** Bad resource requirement */
-    public static final int LSBE_BAD_RESREQ = 95;
-
-
-    /** No enough hosts */
-    public static final int LSBE_NO_ENOUGH_HOST = 96;
-
-/** Error codes related to configuration lsblib call */
-
-    /** Fatal error in reading conf files */
-    public static final int LSBE_CONF_FATAL = 97;
-
-    /** Warning error in reading conf files */
-    public static final int LSBE_CONF_WARNING = 98;
-
-
-    /** CONF used calendar cannot be modified */
-    public static final int LSBE_CAL_MODIFY = 99;
-
-    /** Job created calendar cannot be modified */
-    public static final int LSBE_JOB_CAL_MODIFY = 100;
-    /** FAIRSHARE queue or HPART defined */
-    public static final int LSBE_HP_FAIRSHARE_DEF = 101;
-
-    /** No resource specified */
-    public static final int LSBE_NO_RESOURCE = 102;
-
-    /** Bad resource name */
-    public static final int LSBE_BAD_RESOURCE = 103;
-    /** Calendar not allowed for interactive job */
-    public static final int LSBE_INTERACTIVE_CAL = 104;
-    /** Interactive job cannot be rerunnable */
-    public static final int LSBE_INTERACTIVE_RERUN = 105;
-
-    /** PTY and infile specified */
-    public static final int LSBE_PTY_INFILE = 106;
-
-    /** JobScheduler is disabled */
-    public static final int LSBE_JS_DISABLED = 107;
-
-    /** Submission host and its host type can not be found any more */
-    public static final int LSBE_BAD_SUBMISSION_HOST = 108;
-    /** Lock the job so that it cann't be resume by sbatchd */
-    public static final int LSBE_LOCK_JOB = 109;
-
-    /** user not in the user group */
-    public static final int LSBE_UGROUP_MEMBER = 110;
-    /** Operation not supported for a Multicluster job */
-    public static final int LSBE_UNSUPPORTED_MC = 111;
-    /** Operation permission denied for a Multicluster job */
-    public static final int LSBE_PERMISSION_MC = 112;
-
-    /** System Calendar exist already */
-    public static final int LSBE_SYSCAL_EXIST = 113;
-
-    /** exceed q's resource reservation */
-    public static final int LSBE_OVER_RUSAGE = 114;
-
-    /** bad host spec of run/cpu limits */
-    public static final int LSBE_BAD_HOST_SPEC = 115;
-
-    /** calendar syntax error */
-    public static final int LSBE_SYNTAX_CALENDAR = 116;
-
-    /** delete a used calendar */
-    public static final int LSBE_CAL_USED = 117;
-
-    /** cyclic calednar dependence */
-    public static final int LSBE_CAL_CYC = 118;
-
-    /** bad user group name */
-    public static final int LSBE_BAD_UGROUP = 119;
-
-    /** esub aborted request */
-    public static final int LSBE_ESUB_ABORT = 120;
-
-    /** Bad exception handler syntax */
-    public static final int LSBE_EXCEPT_SYNTAX = 121;
-    /** Bad exception condition specification */
-    public static final int LSBE_EXCEPT_COND = 122;
-    /** Bad or invalid action specification */
-    public static final int LSBE_EXCEPT_ACTION = 123;
-
-    /** job dependence, not deleted immed */
-    public static final int LSBE_JOB_DEP = 124;
-/** error codes for job group */
-
-    /** the job group exists */
-    public static final int LSBE_JGRP_EXIST = 125;
-
-    /** the job group doesn't exist */
-    public static final int LSBE_JGRP_NULL = 126;
-
-    /** the group contains jobs */
-    public static final int LSBE_JGRP_HASJOB = 127;
-
-    /** the unknown group control signal */
-    public static final int LSBE_JGRP_CTRL_UNKWN = 128;
-
-    /** Bad Job Group name */
-    public static final int LSBE_JGRP_BAD = 129;
-
-    /** Job Array */
-    public static final int LSBE_JOB_ARRAY = 130;
-
-    /** Suspended job not supported */
-    public static final int LSBE_JOB_SUSP = 131;
-
-    /** Forwarded job not suported */
-    public static final int LSBE_JOB_FORW = 132;
-
-    /** parent group is held */
-    public static final int LSBE_JGRP_HOLD = 133;
-
-    /** bad index */
-    public static final int LSBE_BAD_IDX = 134;
-
-    /** index too big */
-    public static final int LSBE_BIG_IDX = 135;
-
-    /** job array not exist*/
-    public static final int LSBE_ARRAY_NULL = 136;
-
-    /** Void calendar */
-    public static final int LSBE_CAL_VOID = 137;
-
-    /** the job exists */
-    public static final int LSBE_JOB_EXIST = 138;
-
-    /** Job Element fail */
-    public static final int LSBE_JOB_ELEMENT = 139;
-
-    /** Bad jobId */
-    public static final int LSBE_BAD_JOBID = 140;
-
-    /** cannot change job name */
-    public static final int LSBE_MOD_JOB_NAME = 141;
-
-/** error codes for frame job */
-
-    /** Bad frame expression */
-    public static final int LSBE_BAD_FRAME = 142;
-
-    /** Frame index too long */
-    public static final int LSBE_FRAME_BIG_IDX = 143;
-
-    /** Frame index syntax error */
-    public static final int LSBE_FRAME_BAD_IDX = 144;
-
-
-    /** child process died */
-    public static final int LSBE_PREMATURE = 145;
-
-/** error code for user not in project group */
-
-    /** Invoker is not in project group */
-    public static final int LSBE_BAD_PROJECT_GROUP = 146;
-
-/** error code for user group / host group */
-
-    /** No host group defined in the system */
-    public static final int LSBE_NO_HOST_GROUP = 147;
-
-    /** No user group defined in the system */
-    public static final int LSBE_NO_USER_GROUP = 148;
-
-    /** Bad jobid index file format */
-    public static final int LSBE_INDEX_FORMAT = 149;
-
-/** error codes for IO_SPOOL facility */
-
-    /** source file does not exist */
-    public static final int LSBE_SP_SRC_NOT_SEEN = 150;
-
-    /** Number of failed spool hosts reached max */
-    public static final int LSBE_SP_FAILED_HOSTS_LIM = 151;
-
-    /** spool copy failed for this host*/
-    public static final int LSBE_SP_COPY_FAILED = 152;
-
-    /** fork failed */
-    public static final int LSBE_SP_FORK_FAILED = 153;
-
-    /** status of child is not available */
-    public static final int LSBE_SP_CHILD_DIES = 154;
-
-    /** child terminated with failure */
-    public static final int LSBE_SP_CHILD_FAILED = 155;
-
-    /** Unable to find a host for spooling */
-    public static final int LSBE_SP_FIND_HOST_FAILED = 156;
-
-    /** Cannot get $JOB_SPOOLDIR for this host */
-    public static final int LSBE_SP_SPOOLDIR_FAILED = 157;
-
-    /** Cannot delete spool file for this host */
-    public static final int LSBE_SP_DELETE_FAILED = 158;
-
-
-    /** Bad user priority */
-    public static final int LSBE_BAD_USER_PRIORITY = 159;
-
-    /** Job priority control undefined */
-    public static final int LSBE_NO_JOB_PRIORITY = 160;
-
-    /** Job has been killed & requeued */
-    public static final int LSBE_JOB_REQUEUED = 161;
-
-    /** Remote job cannot kill-requeued */
-    public static final int LSBE_JOB_REQUEUE_REMOTE = 162;
-
-    /** Cannot submit job array to a NQS queue */
-    public static final int LSBE_NQS_NO_ARRJOB = 163;
-
-/** error codes for EXT_JOB_STATUS */
-
-    /** No message available */
-    public static final int LSBE_BAD_EXT_MSGID = 164;
-
-    /** Not a regular file */
-    public static final int LSBE_NO_IFREG = 165;
-
-    /** MBD fail to create files in the directory*/
-    public static final int LSBE_BAD_ATTA_DIR = 166;
-
-    /** Fail to transfer data */
-    public static final int LSBE_COPY_DATA = 167;
-
-    /** exceed the limit on data transferring of a msg*/
-    public static final int LSBE_JOB_ATTA_LIMIT = 168;
-
-    /** cannot resize a chunk job, cannot bswitch a run/wait job */
-    public static final int LSBE_CHUNK_JOB = 169;
-
-/** Error code used in communications with dlogd */
-
-
-    /** dlogd is already connected */
-    public static final int LSBE_DLOGD_ISCONN = 170;
-
-/** Error code for LANL3_1ST_HOST */
-
-    /** Multiple first execution host */
-    public static final int LSBE_MULTI_FIRST_HOST = 171;
-
-    /** Host group as first execution host */
-    public static final int LSBE_HG_FIRST_HOST = 172;
-
-    /** Host partition as first execution host */
-    public static final int LSBE_HP_FIRST_HOST = 173;
-
-    /** "others" as first execution host */
-    public static final int LSBE_OTHERS_FIRST_HOST = 174;
-
-/** error code for multi-cluster: remote only queue */
-
-    /** cannot specify exec host */
-    public static final int LSBE_MC_HOST = 175;
-
-    /** cannot specify repetitive job */
-    public static final int LSBE_MC_REPETITIVE = 176;
-
-    /** cannot be a chkpnt job */
-    public static final int LSBE_MC_CHKPNT = 177;
-
-    /** cannot specify exception */
-    public static final int LSBE_MC_EXCEPTION = 178;
-
-    /** cannot specify time event */
-    public static final int LSBE_MC_TIMEEVENT = 179;
-
-    /** Too few processors requested */
-    public static final int LSBE_PROC_LESS = 180;
-    /** bmod pending options and running options together towards running job */
-    public static final int LSBE_MOD_MIX_OPTS = 181;
-
-    /** cannot bmod remote running job */
-    public static final int LSBE_MOD_REMOTE = 182;
-    /** cannot bmod cpulimit without LSB_JOB_CPULIMIT defined */
-    public static final int LSBE_MOD_CPULIMIT = 183;
-    /** cannot bmod memlimit without LSB_JOB_MEMLIMIT defined */
-    public static final int LSBE_MOD_MEMLIMIT = 184;
-
-    /** cannot bmod err file name */
-    public static final int LSBE_MOD_ERRFILE = 185;
-
-    /** host is locked by master LIM*/
-    public static final int LSBE_LOCKED_MASTER = 186;
-    /** warning time period is invalid */
-    public static final int LSBE_WARNING_INVALID_TIME_PERIOD = 187;
-    /** either warning time period or warning action is not specified */
-    public static final int LSBE_WARNING_MISSING = 188;
-    /** The job arrays involved in  one to one dependency do not  have the same size. */
-    public static final int LSBE_DEP_ARRAY_SIZE = 189;
-
-    /** Not enough processors to be reserved (lsb_addreservation()) */
-    public static final int LSBE_FEWER_PROCS = 190;
-
-    /** Bad reservation ID */
-    public static final int LSBE_BAD_RSVID = 191;
-
-    /** No more reservation IDs can be used now */
-    public static final int LSBE_NO_RSVID = 192;
-
-    /** No hosts are exported */
-    public static final int LSBE_NO_EXPORT_HOST = 193;
-
-    /** Trying to control remote hosts*/
-    public static final int LSBE_REMOTE_HOST_CONTROL = 194;
-
-/*Can't open a remote host closed by the remote cluster admin */
-    public static final int LSBE_REMOTE_CLOSED = 195;
-
-    /** User suspended job */
-    public static final int LSBE_USER_SUSPENDED = 196;
-
-    /** Admin suspended job */
-    public static final int LSBE_ADMIN_SUSPENDED = 197;
-
-    /** Not a local host name in  bhost -e command */
-    public static final int LSBE_NOT_LOCAL_HOST = 198;
-
-    /** The host's lease is not active. */
-    public static final int LSBE_LEASE_INACTIVE = 199;
-
-    /** The advance reserved host is not on queue. */
-    public static final int LSBE_QUEUE_ADRSV = 200;
-
-    /** The specified host(s) is not exported. */
-    public static final int LSBE_HOST_NOT_EXPORTED = 201;
-
-    /** The user specified host is not inn advance reservation */
-    public static final int LSBE_HOST_ADRSV = 202;
-
-    /** The remote cluster is not connected */
-    public static final int LSBE_MC_CONN_NONEXIST = 203;
-
-    /** The general resource limit broken */
-    public static final int LSBE_RL_BREAK = 204;
-
-/** ---- The following RMS errors are obsoleted in Eagle */
-
-    /** cannot submit a job with special topology requirement to a preemptive queue*/
-    public static final int LSBE_LSF2TP_PREEMPT = 205;
-
-    /** cannot submit a job with special topology requirement to a queue with slot reservation*/
-    public static final int LSBE_LSF2TP_RESERVE = 206;
-    /** cannot submit a job with special topology requirement to a queue with backill */
-    public static final int LSBE_LSF2TP_BACKFILL = 207;
-    /** ---- The above RMS errors are obsoleted in Eagle */
-
-    /** none existed policy name */
-    public static final int LSBE_RSV_POLICY_NAME_BAD = 208;
-
-    /** All normal user has no privilege */
-    public static final int LSBE_RSV_POLICY_PERMISSION_DENIED = 209;
-
-    /** user has no privilege */
-    public static final int LSBE_RSV_POLICY_USER = 210;
-
-    /** user has no privilege to create reservation on host */
-    public static final int LSBE_RSV_POLICY_HOST = 211;
-
-    /** time window is not allowed by policy */
-    public static final int LSBE_RSV_POLICY_TIMEWINDOW = 212;
-
-    /** the feature is disabled */
-    public static final int LSBE_RSV_POLICY_DISABLED = 213;
-    /** the general limit related errors */
-
-    /** There are no general limit defined */
-    public static final int LSBE_LIM_NO_GENERAL_LIMIT = 214;
-
-    /** There are no resource usage */
-    public static final int LSBE_LIM_NO_RSRC_USAGE = 215;
-
-    /** Convert data error */
-    public static final int LSBE_LIM_CONVERT_ERROR = 216;
-
-    /** There are no qualified host found in cluster*/
-    public static final int LSBE_RSV_NO_HOST = 217;
-
-    /** Cannot modify job group on element of job array */
-    public static final int LSBE_MOD_JGRP_ARRAY = 218;
-
-    /** Cannot combine modify job group or service class option with others */
-    public static final int LSBE_MOD_MIX = 219;
-
-    /** the service class doesn't exist */
-    public static final int LSBE_SLA_NULL = 220;
-
-    /** Modify job group for job in service class is not supported*/
-    public static final int LSBE_MOD_JGRP_SLA = 221;
-
-    /** User or user group is not a member of the specified service class */
-    public static final int LSBE_SLA_MEMBER = 222;
-
-    /** There is no exceptional host found */
-    public static final int LSBE_NO_EXCEPTIONAL_HOST = 223;
-
-    /** warning action (signal) is invalid */
-    public static final int LSBE_WARNING_INVALID_ACTION = 224;
-
-
-    /** Extsched option syntax error */
-    public static final int LSBE_EXTSCHED_SYNTAX = 225;
-
-    /** SLA doesn't work with remote only queues */
-    public static final int LSBE_SLA_RMT_ONLY_QUEUE = 226;
-
-    /** Cannot modify service class on element of job array */
-    public static final int LSBE_MOD_SLA_ARRAY = 227;
-
-    /** Modify service class for job in job group is not supported*/
-    public static final int LSBE_MOD_SLA_JGRP = 228;
-
-    /** Max. Pending job error */
-    public static final int LSBE_MAX_PEND = 229;
-
-    /** System concurrent query exceeded */
-    public static final int LSBE_CONCURRENT = 230;
-
-    /** Requested feature not enabled */
-    public static final int LSBE_FEATURE_NULL = 231;
-
-
-    /** Host is already member of group */
-    public static final int LSBE_DYNGRP_MEMBER = 232;
-
-    /** Host is not a dynamic host */
-    public static final int LSBE_BAD_DYN_HOST = 233;
-
-    /** Host was not added with badmin hghostadd */
-    public static final int LSBE_NO_GRP_MEMBER = 234;
-
-    /** Cannot create job info file */
-    public static final int LSBE_JOB_INFO_FILE = 235;
-
-    /** Cannot modify rusage to a new || (or) expression after the job is dispatched */
-    public static final int LSBE_MOD_OR_RUSAGE = 236;
-
-    /** Bad host group name */
-    public static final int LSBE_BAD_GROUP_NAME = 237;
-
-    /** Bad host name */
-    public static final int LSBE_BAD_HOST_NAME = 238;
-
-    /** Bsub is not permitted on DT cluster */
-    public static final int LSBE_DT_BSUB = 239;
-
-
-    /** The parent symphony job/group was  gone when submitting jobs*/
-    public static final int LSBE_PARENT_SYM_JOB = 240;
-
-    /** The partition has no cpu alllocated */
-    public static final int LSBE_PARTITION_NO_CPU = 241;
-
-    /** batch partition does not accept online jobs: obsolete */
-    public static final int LSBE_PARTITION_BATCH = 242;
-
-    /** online partition does not accept batch jobs */
-    public static final int LSBE_PARTITION_ONLINE = 243;
-
-    /** no batch licenses */
-    public static final int LSBE_NOLICENSE_BATCH = 244;
-
-    /** no online licenses */
-    public static final int LSBE_NOLICENSE_ONLINE = 245;
-
-    /** signal is not supported for service job */
-    public static final int LSBE_SIGNAL_SRVJOB = 246;
-
-    /** the begin time is not later than current time. */
-    public static final int LSBE_BEGIN_TIME_INVALID = 247;
-
-    /** the end time is not later than current time. */
-    public static final int LSBE_END_TIME_INVALID = 248;
-
-    /** Bad regular expression */
-    public static final int LSBE_BAD_REG_EXPR = 249;
-
-
-    /** Host group has regular expression */
-    public static final int LSBE_GRP_REG_EXPR = 250;
-
-    /** Host group have no member */
-    public static final int LSBE_GRP_HAVE_NO_MEMB = 251;
-
-    /** the application doesn't exist */
-    public static final int LSBE_APP_NULL = 252;
-
-    /** job's proclimit rejected by App */
-    public static final int LSBE_PROC_JOB_APP = 253;
-
-    /** app's proclimit rejected by Queue */
-    public static final int LSBE_PROC_APP_QUE = 254;
-
-    /** application name is too long */
-    public static final int LSBE_BAD_APPNAME = 255;
-
-    /** Over hard limit of queue */
-    public static final int LSBE_APP_OVER_LIMIT = 256;
-
-    /** Cannot remove default application */
-    public static final int LSBE_REMOVE_DEF_APP = 257;
-
-    /** Host is disabled by EGO */
-    public static final int LSBE_EGO_DISABLED = 258;
-
-    /** Host is a remote host. Remote hosts cannot be added to a local host group. */
-    public static final int LSBE_REMOTE_HOST = 259;
-
-    /** SLA is exclusive, only accept exclusive job. */
-    public static final int LSBE_SLA_EXCLUSIVE = 260;
-
-    /** SLA is non-exclusive, only accept non-exclusive job */
-    public static final int LSBE_SLA_NONEXCLUSIVE = 261;
-
-    /** The feature has already been started */
-    public static final int LSBE_PERFMON_STARTED = 262;
-
-    /** The Featurn has already been turn down */
-    public static final int LSBE_PERFMON_STOPED = 263;
-
-    /** Current sampling period is already set to %%s,seconds. Ignored*/
-    public static final int LSBE_PERFMON_PERIOD_SET = 264;
-
-    /** Default spool dir is disabled */
-    public static final int LSBE_DEFAULT_SPOOL_DIR_DISABLED = 265;
-
-    /** job belongs to an APS queue and cannot be moved */
-    public static final int LSBE_APS_QUEUE_JOB = 266;
-
-    /** job is not in an absolute priority enabled queue */
-    public static final int LSBE_BAD_APS_JOB = 267;
-
-    /** Wrong aps admin value */
-    public static final int LSBE_BAD_APS_VAL = 268;
-
-    /** Trying to delete a non-existent APS string */
-    public static final int LSBE_APS_STRING_UNDEF = 269;
-
-    /** A job cannot be assigned an SLA and an APS queue with factor FS */
-    public static final int LSBE_SLA_JOB_APS_QUEUE = 270;
-
-    /** bmod -aps | -apsn option cannot be mixed with other option */
-    public static final int LSBE_MOD_MIX_APS = 271;
-
-    /** specified ADMIN factor/system APS value out of range */
-    public static final int LSBE_APS_RANGE = 272;
-
-    /** specified ADMIN factor/system APS value is zero */
-    public static final int LSBE_APS_ZERO = 273;
-
-
-    /** res port is unknown */
-    public static final int LSBE_DJOB_RES_PORT_UNKNOWN = 274;
-
-    /** timeout on res communication */
-    public static final int LSBE_DJOB_RES_TIMEOUT = 275;
-
-    /** I/O error on remote stream */
-    public static final int LSBE_DJOB_RES_IOERR = 276;
-
-    /** res internal failure */
-    public static final int LSBE_DJOB_RES_INTERNAL_FAILURE = 277;
-
-
-    /** can not run outside LSF */
-    public static final int LSBE_DJOB_CAN_NOT_RUN = 278;
-
-    /** distributed job's validation failed due to incorrect job ID or index */
-    public static final int LSBE_DJOB_VALIDATION_BAD_JOBID = 279;
-
-    /** distributed job's validation failed due to incorrect host selection */
-    public static final int LSBE_DJOB_VALIDATION_BAD_HOST = 280;
-
-    /** distributed job's validation failed due to incorrect user */
-    public static final int LSBE_DJOB_VALIDATION_BAD_USER = 281;
-
-    /** failed while executing tasks */
-    public static final int LSBE_DJOB_EXECUTE_TASK = 282;
-
-    /** failed while waiting for tasks to finish*/
-    public static final int LSBE_DJOB_WAIT_TASK = 283;
-
-
-    /** HPC License not exist */
-    public static final int LSBE_APS_HPC = 284;
-
-    /** Integrity check of bsub command failed */
-    public static final int LSBE_DIGEST_CHECK_BSUB = 285;
-
-    /** Distributed Application Framework disabled */
-    public static final int LSBE_DJOB_DISABLED = 286;
-
-/** Error codes related to runtime estimation and cwd */
-
-    /** Bad runtime specification */
-    public static final int LSBE_BAD_RUNTIME = 287;
-
-    /** RUNLIMIT: Cannot exceed RUNTIME*JOB_RUNLIMIT_RATIO */
-    public static final int LSBE_BAD_RUNLIMIT = 288;
-
-    /** RUNTIME: Cannot exceed the hard runtime limit in the queue */
-    public static final int LSBE_OVER_QUEUE_LIMIT = 289;
-
-    /** RUNLIMIT: Is not set by command line */
-    public static final int LSBE_SET_BY_RATIO = 290;
-
-    /** current working directory name too long */
-    public static final int LSBE_BAD_CWD = 291;
-
-
-    /** Job group limit is greater than its parent group */
-    public static final int LSBE_JGRP_LIMIT_GRTR_THAN_PARENT = 292;
-
-    /** Job group limit is less than its children groups */
-    public static final int LSBE_JGRP_LIMIT_LESS_THAN_CHILDREN = 293;
-
-    /** Job Array end index should be specified explicitly */
-    public static final int LSBE_NO_ARRAY_END_INDEX = 294;
-
-    /** cannot bmod runtime without LSB_MOD_ALL_JOBS=y defined */
-    public static final int LSBE_MOD_RUNTIME = 295;
-
-    /** EP3 */
-    public static final int LSBE_BAD_SUCCESS_EXIT_VALUES = 296;
-    public static final int LSBE_DUP_SUCCESS_EXIT_VALUES = 297;
-    public static final int LSBE_NO_SUCCESS_EXIT_VALUES = 298;
-
-    public static final int LSBE_JOB_REQUEUE_BADARG = 299;
-    public static final int LSBE_JOB_REQUEUE_DUPLICATED = 300;
-
-    /** "all" with number */
-    public static final int LSBE_JOB_REQUEUE_INVALID_DIGIT = 301;
-
-    /** ~digit without "all" */
-    public static final int LSBE_JOB_REQUEUE_INVALID_TILDE = 302;
-    public static final int LSBE_JOB_REQUEUE_NOVALID = 303;
-
-
-    /** No matching job group found */
-    public static final int LSBE_NO_JGRP = 304;
-    public static final int LSBE_NOT_CONSUMABLE = 305;
-
-/** AR pre/post */
-
-    /** Cannot parse an Advance Reservation -exec string */
-    public static final int LSBE_RSV_BAD_EXEC = 306;
-
-    /** Unknown AR event type */
-    public static final int LSBE_RSV_EVENTTYPE = 307;
-
-    /** pre/post cannot have postive offset */
-    public static final int LSBE_RSV_SHIFT = 308;
-
-    /** pre-AR command cannot have offset < 0 in user-created AR */
-    public static final int LSBE_RSV_USHIFT = 309;
-
-    /** only one pre- and one post- cmd permitted per AR */
-    public static final int LSBE_RSV_NUMEVENTS = 310;
-
-/*Error codes related to AR Modification*/
-
-    /** ID does not correspond to a known AR. */
-    public static final int LSBE_ADRSV_ID_VALID = 311;
-
-    /** disable non-recurrent AR. */
-    public static final int LSBE_ADRSV_DISABLE_NONRECUR = 312;
-
-    /** modification is rejected because AR is activated. */
-    public static final int LSBE_ADRSV_MOD_ACTINSTANCE = 313;
-
-    /** modification is rejected because host slots is not available. */
-    public static final int LSBE_ADRSV_HOST_NOTAVAIL = 314;
-
-    /** the  time of the AR cannot be modified since resource is not available. */
-    public static final int LSBE_ADRSV_TIME_MOD_FAIL = 315;
-
-    /** resource requirement (-R) must be followed a slot requirment (-n) */
-    public static final int LSBE_ADRSV_R_AND_N = 316;
-
-/*modification is rejected because trying to empty the AR. */
-    public static final int LSBE_ADRSV_EMPTY = 317;
-
-/*modification is rejected because switching AR type. */
-    public static final int LSBE_ADRSV_SWITCHTYPE = 318;
-
-/*modification is rejected because specifying -n for system AR. */
-    public static final int LSBE_ADRSV_SYS_N = 319;
-
-    /** disable string is not valid. */
-    public static final int LSBE_ADRSV_DISABLE = 320;
-
-    /** Unique AR ID required */
-    public static final int LSBE_ADRSV_ID_UNIQUE = 321;
-
-    /** Bad reservation name */
-    public static final int LSBE_BAD_RSVNAME = 322;
-
-    /** Cannot change the start time of an active reservation. */
-    public static final int LSBE_ADVRSV_ACTIVESTART = 323;
-
-    /** AR ID is refernced by a job */
-    public static final int LSBE_ADRSV_ID_USED = 324;
-
-    /** the disable period has already been disabled */
-    public static final int LSBE_ADRSV_PREVDISABLED = 325;
-
-    /** an active period of a recurring reservation cannot be disabled */
-    public static final int LSBE_ADRSV_DISABLECURR = 326;
-
-    /** modification is rejected because specified hosts or host groups do not belong to the reservation */
-    public static final int LSBE_ADRSV_NOT_RSV_HOST = 327;
-
-/*new parser */
-
-/*checking resreq return ok */
-    public static final int LSBE_RESREQ_OK = 328;
-
-/*checking resreq return error */
-    public static final int LSBE_RESREQ_ERR = 329;
-
-
-    /** modification is rejected because reservation has running jobs on the specified hosts or host groups */
-    public static final int LSBE_ADRSV_HOST_USED = 330;
-
-
-    /** The checkpoint directory is too long */
-    public static final int LSBE_BAD_CHKPNTDIR = 331;
-
-    /** trying to modify in a remote cluster */
-    public static final int LSBE_ADRSV_MOD_REMOTE = 332;
-    public static final int LSBE_JOB_REQUEUE_BADEXCLUDE = 333;
-
-    /** trying to disable for a date in the past */
-    public static final int LSBE_ADRSV_DISABLE_DATE = 334;
-
-    /** cannot mix the -Un option with others for started jobs */
-    public static final int LSBE_ADRSV_DETACH_MIX = 335;
-
-    /** cannot detach a started job when the reservation is active */
-    public static final int LSBE_ADRSV_DETACH_ACTIVE = 336;
-
-    /** invalid time expression: must specify day for both start and end time */
-    public static final int LSBE_MISSING_START_END_TIME = 337;
-
-    /** Queue level limitation */
-    public static final int LSBE_JOB_RUSAGE_EXCEED_LIMIT = 338;
-
-    /** Queue level limitation */
-    public static final int LSBE_APP_RUSAGE_EXCEED_LIMIT = 339;
-
-    /** Hosts and host groups specified by -m are not used by the queue */
-    public static final int LSBE_CANDIDATE_HOST_EMPTY = 340;
-
-    /** An int must follow an open bracket */
-    public static final int LSBE_HS_BAD_AFTER_BRACKT = 341;
-
-    /** An end index must follow a dash */
-    public static final int LSBE_HS_NO_END_INDEX = 342;
-
-    /** Integers must come before and after the comma */
-    public static final int LSBE_HS_BAD_COMMA = 343;
-
-    /** Incorrect condensed host specification */
-    public static final int LSBE_HS_BAD_FORMAT = 344;
-
-    /** The start index must be less than end index */
-    public static final int LSBE_HS_BAD_ORDER = 345;
-
-    /** The end index must be less than 10 digits */
-    public static final int LSBE_HS_BAD_MANY_DIGITS = 346;
-
-    /** Number of digits in the start index must be less than that of end index */
-    public static final int LSBE_HS_BAD_NUM_DIGITS = 347;
-
-    /** The end index cannot start with zero (0) */
-    public static final int LSBE_HS_BAD_END_INDEX = 348;
-
-    /** Index must be an integer or a range */
-    public static final int LSBE_HS_BAD_INDEX = 349;
-
-/** host group admin*/
-
-    /** When a Host Group Admin (badmin hclose or hopen) closes or opens a host,  the usage of the -C "message" option must be compulsory, as is the logging  of the name of the person performing the action. */
-    public static final int LSBE_COMMENTS = 350;
-
-
-    /** First hosts specified by -m are not used by the queue */
-    public static final int LSBE_FIRST_HOSTS_NOT_IN_QUEUE = 351;
-
-
-    /** The job is not started */
-    public static final int LSBE_JOB_NOTSTART = 352;
-
-    /** Accumulated runtime of the job is not available */
-    public static final int LSBE_RUNTIME_INVAL = 353;
-
-    /** SSH feature can only be used for interactive job */
-    public static final int LSBE_SSH_NOT_INTERACTIVE = 354;
-
-    /** Run time specification is less than the accumulated run time */
-    public static final int LSBE_LESS_RUNTIME = 355;
-
-    /** Resize job notification command */
-    public static final int LSBE_RESIZE_NOTIFY_CMD_LEN = 356;
-
-    /** Job is not resizable */
-    public static final int LSBE_JOB_RESIZABLE = 357;
-
-    /** Bad bresize release host spec */
-    public static final int LSBE_RESIZE_RELEASE_HOSTSPEC = 358;
-
-    /** no resize notify matches in mbatchd*/
-    public static final int LSBE_NO_RESIZE_NOTIFY = 359;
-
-    /** Can't release first exec host */
-    public static final int LSBE_RESIZE_RELEASE_FRISTHOST = 360;
-
-    /** resize event in progress */
-    public static final int LSBE_RESIZE_EVENT_INPROGRESS = 361;
-
-    /** too few or too many slots */
-    public static final int LSBE_RESIZE_BAD_SLOTS = 362;
-
-    /** No active resize request */
-    public static final int LSBE_RESIZE_NO_ACTIVE_REQUEST = 363;
-
-    /** specified host not part of the job's allocation*/
-    public static final int LSBE_HOST_NOT_IN_ALLOC = 364;
-
-    /** nothing released */
-    public static final int LSBE_RESIZE_RELEASE_NOOP = 365;
-
-    /** Can't resize a brun job */
-    public static final int LSBE_RESIZE_URGENT_JOB = 366;
-    public static final int LSBE_RESIZE_EGO_SLA_COEXIST = 367;
-
-    /** hpc jobs can't be resized */
-    public static final int LSBE_HOST_NOT_SUPPORT_RESIZE = 368;
-
-    /** Application doesn't allow resizable */
-    public static final int LSBE_APP_RESIZABLE = 369;
-
-    /** can't operate on lost & found hosts*/
-    public static final int LSBE_RESIZE_LOST_AND_FOUND = 370;
-
-    /** can't resize while the first host is lost & found*/
-    public static final int LSBE_RESIZE_FIRSTHOST_LOST_AND_FOUND = 371;
-
-    /** bad host name (for resize) */
-    public static final int LSBE_RESIZE_BAD_HOST = 372;
-
-    /** proper app is required by an auto-resizable job */
-    public static final int LSBE_AUTORESIZE_APP = 373;
-
-    /** cannot resize job because there is a pedning resize request */
-    public static final int LSBE_RESIZE_PENDING_REQUEST = 374;
-
-    /** number of hosts specified by -m exceeding configuration */
-    public static final int LSBE_ASKED_HOSTS_NUMBER = 375;
-
-    /** All hosts reserved by advanced reservation are invalid in intersected hosts */
-    public static final int LSBE_AR_HOST_EMPTY = 376;
-
-    /** First hosts specified by -m are not used by advanced reservation */
-    public static final int LSBE_AR_FIRST_HOST_EMPTY = 377;
-
-    /** Internal jobbroker error */
-    public static final int LSBE_JB = 378;
-
-    /** Internal jobbroker database library error */
-    public static final int LSBE_JB_DBLIB = 379;
-
-    /** Jobbroker cannot reach database */
-    public static final int LSBE_JB_DB_UNREACH = 380;
-
-    /** Jobbroker cannot reach mbatchd */
-    public static final int LSBE_JB_MBD_UNREACH = 381;
-
-    /** BES server returned an error */
-    public static final int LSBE_JB_BES = 382;
-
-    /** Unsupported BES operation */
-    public static final int LSBE_JB_BES_UNSUPPORTED_OP = 383;
-
-    /** invalid LS project name*/
-    public static final int LSBE_LS_PROJECT_NAME = 384;
-
-    /** the end time is not later than start  time. */
-    public static final int LSBE_END_TIME_INVALID_COMPARE_START = 385;
-
-    /** one host cannot be defined in more than one host partition.*/
-    public static final int LSBE_HP_REDUNDANT_HOST = 386;
-
-    /** The application level compound resreq causes slots requirements conflict */
-    public static final int LSBE_COMPOUND_APP_SLOTS = 387;
-
-    /** The queue level compound resreq causes slots requirements conflict */
-    public static final int LSBE_COMPOUND_QUEUE_SLOTS = 388;
-
-    /** Resizable job cannot work with compound resreq */
-    public static final int LSBE_COMPOUND_RESIZE = 389;
-/** compute unit support */
-
-    /** Compute units cannot have overlapping hosts */
-    public static final int LSBE_CU_OVERLAPPING_HOST = 390;
-
-    /** The compute unit cannot contain other compute units */
-    public static final int LSBE_CU_BAD_HOST = 391;
-
-    /** The compute unit cannot contain host or host group as a member */
-    public static final int LSBE_CU_HOST_NOT_ALLOWED = 392;
-
-    /** Only lowest level compute units are allowed to add hosts as a member */
-    public static final int LSBE_CU_NOT_LOWEST_LEVEL = 393;
-
-    /** You cannot modify a compute unit resource requirement when a job is already running */
-    public static final int LSBE_CU_MOD_RESREQ = 394;
-
-    /** A compute unit resource requirement cannot be specified for auto resizable jobs */
-    public static final int LSBE_CU_AUTORESIZE = 395;
-
-    /** No COMPUTE_UNIT_TYPES are specified in lsb.params */
-    public static final int LSBE_NO_COMPUTE_UNIT_TYPES = 396;
-
-    /** No compute unit defined in the system */
-    public static final int LSBE_NO_COMPUTE_UNIT = 397;
-
-    /** No such compute unit defined in the system */
-    public static final int LSBE_BAD_COMPUTE_UNIT = 398;
-
-    /** The queue is not configured to accept exclusive compute unit jobs */
-    public static final int LSBE_CU_EXCLUSIVE = 399;
-
-    /** The queue is not configured to accept higher level of exclusive compute unit jobs */
-    public static final int LSBE_CU_EXCLUSIVE_LEVEL = 400;
-
-    /** Job cannot be switched due to the exclusive compute unit reqirement */
-    public static final int LSBE_CU_SWITCH = 401;
-
-    /** Job level compound resreq causes slots requirements conflict */
-    public static final int LSBE_COMPOUND_JOB_SLOTS = 402;
-
-    /** "||" used in rusage[] of queue resource requirement. It's conflict with job level compound resource requirement */
-    public static final int LSBE_COMPOUND_QUEUE_RUSAGE_OR = 403;
-
-    /** balance and usablecuslots cannot both be used in a compute unit resource requirement */
-    public static final int LSBE_CU_BALANCE_USABLECUSLOTS = 404;
-
-    /** TS jobs cannot use compound resource requirement (application level) */
-    public static final int LSBE_COMPOUND_TSJOB_APP = 405;
-
-    /** TS jobs cannot use compound resource requirement (queue level) */
-    public static final int LSBE_COMPOUND_TSJOB_QUEUE = 406;
-    /** Job dependency conditions using a job name or job name wild-card exceed limitation set by MAX_JOB_NAME_DEP in lsb.params */
-    public static final int LSBE_EXCEED_MAX_JOB_NAME_DEP = 407;
-
-    /** "is waiting for the remote cluster to synchronize." */
-    public static final int LSBE_WAIT_FOR_MC_SYNC = 408;
-
-    /** Job cannot exceed queue level RESRSV_LIMIT limitation */
-    public static final int LSBE_RUSAGE_EXCEED_RESRSV_LIMIT = 409;
-
-    /** job description too long */
-    public static final int LSBE_JOB_DESCRIPTION_LEN = 410;
-
-    /** Cannot use simulation options */
-    public static final int LSBE_NOT_IN_SIMMODE = 411;
-
-    /** Value of runtime simulation is incorrect */
-    public static final int LSBE_SIM_OPT_RUNTIME = 412;
-
-    /** Value of cputime simulation is incorrect */
-    public static final int LSBE_SIM_OPT_CPUTIME = 413;
-
-    /** Incorrect maxmem simulation opt */
-    public static final int LSBE_SIM_OPT_MAXMEM = 414;
-
-    /** Incorrect job exitstatus simulation opt */
-    public static final int LSBE_SIM_OPT_EXITSTATUS = 415;
-
-    /** Incorrect job simulation option syntax */
-    public static final int LSBE_SIM_OPT_SYNTAX = 416;
-
-    /** Number of the above error codes */
-    public static final int LSBE_NUM_ERR = 417;
-
-    /**
-     * *****************************************************
-     */
-
-/* op codes for hand shake protocol between client/server */
-    public static final int PREPARE_FOR_OP = 1024;
-    public static final int READY_FOR_OP = 1023;
-
-/*
-*  Data structures for lsblib interface
- */
-
-
-    /**
-     * \addtogroup lsb_submit_options lsb_submit_options
-     * define statements used by lsb_submit.
-     */
-
-/* lsb_submit() options */
-    /**
-     * < Flag to indicate jobName parameter has data. Equivalent to bsub -J command line option existence.
-     */
-    public static final int SUB_JOB_NAME = 0x01;
-    /**
-     * < Flag to indicate queue parameter has data. Equivalent to bsub -q command line option existence.
-     */
-    public static final int SUB_QUEUE = 0x02;
-    /**
-     * < Flat to indicate numAskedHosts parameter has data. Equivalent to bsub -m command line option existence.
-     */
-    public static final int SUB_HOST = 0x04;
-    /**
-     * < Flag to indicate inFile parameter has data. Equivalent to bsub -i command line option existence.
-     */
-    public static final int SUB_IN_FILE = 0x08;
-    /**
-     * < Flag to indicate outFile parameter has data. Equivalent to bsub -o command line option existence.
-     */
-    public static final int SUB_OUT_FILE = 0x10;
-    /**
-     * < Flag to indicate errFile parameter has data. Equivalent to bsub -e command line option existence.
-     */
-    public static final int SUB_ERR_FILE = 0x20;
-    /**
-     * < Flag to indicate execution of a job on a host by itself requested. Equivalent to bsub -x command line option existence.
-     */
-    public static final int SUB_EXCLUSIVE = 0x40;
-    /**
-     * < Flag to indicate whether to send mail to the user when the job finishes. Equivalent to bsub -N command line option existence.
-     */
-    public static final int SUB_NOTIFY_END = 0x80;
-    /**
-     * < Flag to indicate whether to send mail to the user when the job is dispatched. Equivalent to bsub -B command line option existence.
-     */
-    public static final int SUB_NOTIFY_BEGIN = 0x100;
-    /**
-     * < Flag to indicate userGroup name parameter has data. Equivalent to bsub -G command line option existence.
-     */
-    public static final int SUB_USER_GROUP = 0x200;
-    /**
-     * < Flag to indicatechkpntPeriod parameter has data . Equivalent to bsub -k command line option existence.
-     */
-    public static final int SUB_CHKPNT_PERIOD = 0x400;
-    /**
-     * < Flag to indicate chkpntDir parameter has data. Equivalent to bsub -k command line option existence.
-     */
-    public static final int SUB_CHKPNT_DIR = 0x800;
-    /**
-     * < Indicates the job is checkpointable. Equivalent to bsub -k command line option.
-     */
-    public static final int SUB_CHKPNTABLE = SUB_CHKPNT_DIR;
-    /**
-     * < Flag to indicate whether to force the job to restart even if non-restartable conditions exist. These conditions are operating system specific. Equivalent to brestart() -f command line option existence.
-     */
-    public static final int SUB_RESTART_FORCE = 0x1000;
-    /**
-     * < Flag to indicate restart of a
-     * checkpointed job. Only jobs that have been successfully checkpointed
-     * can be restarted. Jobs are re-submitted and assigned a new job ID.
-     * By default, jobs are restarted with the same output file, file
-     * transfer specifications, job name, window signal value, checkpoint
-     * directory and period, and rerun options as the original job. To
-     * restart a job on another host, both hosts must be binary compatible,
-     * run the same OS version, have access to the executable, have access
-     * to all open files (LSF must locate them with an absolute path name),
-     * and have access to the checkpoint directory. Equivalent to bsub -k
-     * command line option existence.
-     */
-    public static final int SUB_RESTART = 0x2000;
-    /**
-     * < Indicates the job is re-runnable.
-     * If the execution host of the job is considered down, the batch
-     * system will re-queue this job in the same job queue, and re-run
-     * it from the beginning when a suitable host is found. Everything
-     * will be as if it were submitted as a new job, and a new job ID will
-     * be assigned. The user who submitted the failed job will receive a
-     * mail notice of the job failure, requeueing of the job, and the
-     * new job ID.
-     * <p/>
-     * For a job that was checkpointed before the execution host went down,
-     * the job will be restarted from the last checkpoint. Equivalent to
-     * bsub -r command line option existence.
-     */
-    public static final int SUB_RERUNNABLE = 0x4000;
-    /**
-     * < Flag to indicate sigValue parameter
-     * has data. Sends a signal as the queue window closes.
-     */
-    public static final int SUB_WINDOW_SIG = 0x8000;
-    /**
-     * < Flag to indicate hostSpec parameter
-     * has data.
-     */
-    public static final int SUB_HOST_SPEC = 0x10000;
-    /**
-     * < Flag to indicate dependCond parameter
-     * has data. Equivalent to bsub -w command line option existence.
-     */
-    public static final int SUB_DEPEND_COND = 0x20000;
-    /**
-     * < Flag to indicate resReq parameter
-     * has data. Equivalent to bsub -R command line option existence.
-     */
-    public static final int SUB_RES_REQ = 0x40000;
-    /**
-     * < Flag to indicate nxf parameter and structure xf have data.
-     */
-    public static final int SUB_OTHER_FILES = 0x80000;
-    /**
-     * < Flag to indicate preExecCmd
-     * parameter has data. Equivalent to bsub -E command line option
-     * existence.
-     */
-    public static final int SUB_PRE_EXEC = 0x100000;
-    /**
-     * < Equivalent to bsub -L command line option existence.
-     */
-    public static final int SUB_LOGIN_SHELL = 0x200000;
-    /**
-     * < Flag to indicate mailUser parameter has data.
-     */
-    public static final int SUB_MAIL_USER = 0x400000;
-    /**
-     * < Flag to indicate newCommand parameter has data. Equivalent to bmod bsub_options existence.
-     */
-    public static final int SUB_MODIFY = 0x800000;
-    /**
-     * < Flag to indicate modify option once.
-     */
-    public static final int SUB_MODIFY_ONCE = 0x1000000;
-    /**
-     * < Flag to indicate ProjectName
-     * parameter has data . Equivalent to bsub -P command line option
-     * existence.
-     */
-    public static final int SUB_PROJECT_NAME = 0x2000000;
-    /**
-     * < Indicates that the job is submitted
-     * as a batch interactive job. When this flag is given, \ref lsb_submit
-     * does not return unless an error occurs during the submission process.
-     * When the job is started, the user can interact with the job's
-     * standard input and output via the terminal. See the -I option
-     * in bsub for the description of a batch interactive job. Unless
-     * the SUB_PTY flag is specified, the job will run without a
-     * pseudo-terminal. Equivalent to bsub -I command line option.
-     */
-    public static final int SUB_INTERACTIVE = 0x4000000;
-    /**
-     * < Requests pseudo-terminal support
-     * for a job submitted with the SUB_INTERACTIVE flag. This flag is
-     * ignored if SUB_INTERACTIVE is not specified. A pseudo-terminal
-     * is required to run some applications (such as: vi). Equivalent to
-     * bsub -Ip command line option.
-     */
-    public static final int SUB_PTY = 0x8000000;
-    /**< Requests pseudo-terminal shell
-     *  mode support for a job submitted with the SUB_INTERACTIVE and
-     *  SUB_PTY flags. This flag is ignored if SUB_INTERACTIVE and SUB_PTY
-     *  are not specified. This flag should be specified for submitting
-     *  interactive shells, or applications which redefine the ctrl-C and
-     *  ctrl-Z keys (such as: jove). Equivalent to bsub -Is
-     *  command line option. */
-    public static final int SUB_PTY_SHELL = 0x10000000;
-
-    /**
-     * < Exception handler for job.
-     */
-    public static final int SUB_EXCEPT = 0x20000000;
-
-    /**
-     * < Specifies time_event.
-     */
-    public static final int SUB_TIME_EVENT = 0x40000000;
-/* the last bit 0x80000000 is reserved for internal use */
-
-    /**
-     * \addtogroup lsb_submit_options2 lsb_submit_options2
-     * define statements used by \ref lsb_submit.
-     */
-
-    /**< Hold the job after it is submitted. The job will be in PSUSP status. Equivalent to bsub -H command line option. */
-    public static final int SUB2_HOLD = 0x01;
-
-    /**
-     * < New cmd for bmod.
-     */
-    public static final int SUB2_MODIFY_CMD = 0x02;
-
-    /**//* Removed access to SUB2_BSUB_BLOCK since it exits the process (including the JVM) with the exit code of the submitted job. -kshakir December 14, 2010
-     * < Submit a job in a synchronous
-     * mode so that submission does not return until the job terminates.
-     * Note once this flag is set, the \ref lsb_submit will never return if
-     * the job is accepted by LSF. Programs that wishes to know the status
-     * of the submission needs to fork, with the child process invoking the
-     * API call in the blocking mode and the parent process wait on the
-     * child process (see wait() for details.
-     */
-    //public static final int SUB2_BSUB_BLOCK = 0x04;
-
-    /**
-     * < Submit from NT.
-     */
-    public static final int SUB2_HOST_NT = 0x08;
-
-    /**
-     * < Submit fom UNIX.
-     */
-    public static final int SUB2_HOST_UX = 0x10;
-
-    /**
-     * < Submit to a chkpntable queue.
-     */
-    public static final int SUB2_QUEUE_CHKPNT = 0x20;
-
-    /**
-     * < Submit to a rerunnable queue.
-     */
-    public static final int SUB2_QUEUE_RERUNNABLE = 0x40;
-
-    /**
-     * < Spool job command.
-     */
-    public static final int SUB2_IN_FILE_SPOOL = 0x80;
-
-    /**
-     * < Inputs the specified file with spooling
-     */
-    public static final int SUB2_JOB_CMD_SPOOL = 0x100;
-
-    /**
-     * < Submits job with priority.
-     */
-    public static final int SUB2_JOB_PRIORITY = 0x200;
-
-    /**
-     * < Job submitted without -n, use queue's default proclimit
-     */
-    public static final int SUB2_USE_DEF_PROCLIMIT = 0x400;
-
-    /**
-     * < bmod -c/-M/-W/-o/-e
-     */
-    public static final int SUB2_MODIFY_RUN_JOB = 0x800;
-
-    /**
-     * < bmod options only to pending jobs
-     */
-    public static final int SUB2_MODIFY_PEND_JOB = 0x1000;
-
-    /**
-     * < Job action warning time. Equivalent to bsub or bmod -wt.
-     */
-    public static final int SUB2_WARNING_TIME_PERIOD = 0x2000;
-
-    /**
-     * < Job action to be taken before a job control action occurs. Equivalent to bsub or bmod -wa.
-     */
-    public static final int SUB2_WARNING_ACTION = 0x4000;
-
-    /**
-     * < Use an advance reservation created with the brsvadd command. Equivalent to bsub -U.
-     */
-    public static final int SUB2_USE_RSV = 0x8000;
-
-    /**
-     * < Windows Terminal Services job
-     */
-    public static final int SUB2_TSJOB = 0x10000;
-
-/* SUB2_LSF2TP is obsolete in Eagle. We keep it here for backward
-*  compatibility */
-
-    /**
-     * < Parameter is deprecated
-     */
-    public static final int SUB2_LSF2TP = 0x20000;
-
-    /**
-     * < Submit into a job group
-     */
-    public static final int SUB2_JOB_GROUP = 0x40000;
-
-    /**
-     * < Submit into a service class
-     */
-    public static final int SUB2_SLA = 0x80000;
-
-    /**
-     * < Submit with -extsched options
-     */
-    public static final int SUB2_EXTSCHED = 0x100000;
-
-    /**
-     * < License Scheduler project
-     */
-    public static final int SUB2_LICENSE_PROJECT = 0x200000;
-
-    /**
-     * < Overwrite the standard output of the job. Equivalent to bsub -oo.
-     */
-    public static final int SUB2_OVERWRITE_OUT_FILE = 0x400000;
-
-    /**
-     * < Overwrites the standard error output of the job. Equivalent to bsub -eo.
-     */
-    public static final int SUB2_OVERWRITE_ERR_FILE = 0x800000;
-
-/* Following are for symphony submission definition.
-*  Note that SYM_GRP is an LSF job, which represents a symphony group.
- */
-
-    /**
-     * < (symphony) session job
-     */
-    public static final int SUB2_SSM_JOB = 0x1000000;
-
-    /**
-     * < (symphony) symphony job
-     */
-    public static final int SUB2_SYM_JOB = 0x2000000;
-
-    /**
-     * < (symphony) service(LSF) job
-     */
-    public static final int SUB2_SRV_JOB = 0x4000000;
-
-    /**
-     * < (symphony) "group" job
-     */
-    public static final int SUB2_SYM_GRP = 0x8000000;
-
-    /**
-     * < (symphony) symphony job has child symphony job
-     */
-    public static final int SUB2_SYM_JOB_PARENT = 0x10000000;
-
-    /**
-     * < (symphony) symphony job has real time feature
-     */
-    public static final int SUB2_SYM_JOB_REALTIME = 0x20000000;
-
-    /**
-     * < (symphony) symphony job has dummy feature to hold all persistent service jobs.
-     */
-    public static final int SUB2_SYM_JOB_PERSIST_SRV = 0x40000000;
-
-    /**
-     * < Persistent session job
-     */
-    public static final int SUB2_SSM_JOB_PERSIST = 0x80000000;
-
-    /**
-     *  \addtogroup lsb_submit_options3 lsb_submit_options3
-     *  define statements used by \ref lsb_submit.
-     */
-
-    /**
-     * < Application profile name. Equivalent to bsub -app.
-     */
-    public static final int SUB3_APP = 0x01;
-
-    /**
-     * < Job rerunable because of application profile
-     */
-    public static final int SUB3_APP_RERUNNABLE = 0x02;
-
-    /**
-     * < Job modified with absolute priority. Equivalent to bmod -aps.
-     */
-    public static final int SUB3_ABSOLUTE_PRIORITY = 0x04;
-
-    /**
-     * < Submit into a default job group. Equivalent to bsub -g.
-     */
-    public static final int SUB3_DEFAULT_JOBGROUP = 0x08;
-
-    /**
-     * < Run the specified post-execution command on the execution host after the job finishes. Equivalent to bsub -Ep.
-     */
-    public static final int SUB3_POST_EXEC = 0x10;
-    /**
-     * < Pass user shell limits to execution host. Equivalent to bsub -ul.
-     */
-    public static final int SUB3_USER_SHELL_LIMITS = 0x20;
-    /**
-     * < Current working directory specified on the command line with bsub -cwd
-     */
-    public static final int SUB3_CWD = 0x40;
-    /**< Runtime estimate. Equivalent to bsub -We. Use in conjunction with SUB3_RUNTIME_ESTIMATION_ACC and SUB3_RUNTIME_ESTIMATION_PERC. */
-    public static final int SUB3_RUNTIME_ESTIMATION = 0x80;
-
-    /**
-     * < Job is not rerunnable. Equivalent to bsub -rn.
-     */
-    public static final int SUB3_NOT_RERUNNABLE = 0x100;
-
-    /**
-     * < Job level requeue exit values.
-     */
-    public static final int SUB3_JOB_REQUEUE = 0x200;
-    /**
-     * < Initial checkpoint period. Equivalent to bsub -k initial_checkpoint_period.
-     */
-    public static final int SUB3_INIT_CHKPNT_PERIOD = 0x400;
-    /**< Job migration threshold. Equivalent to bsub -mig migration_threshold. */
-    public static final int SUB3_MIG_THRESHOLD = 0x800;
-
-    /**
-     * < Checkpoint dir was set by application profile
-     */
-    public static final int SUB3_APP_CHKPNT_DIR = 0x1000;
-    /**
-     * < Value of BSUB_CHK_RESREQ environment variable, used for select section resource requirement string syntax checking with bsub -R. bsub only checks the resreq syntax.
-     */
-    public static final int SUB3_BSUB_CHK_RESREQ = 0x2000;
-    /**
-     * < Runtime estimate that is the accumulated run time plus the runtime estimate. Equivalent to bmod -We+. Use in conjunction with SUB3_RUNTIME_ESTIMATION.
-     */
-    public static final int SUB3_RUNTIME_ESTIMATION_ACC = 0x4000;
-    /**
-     * < Runtime estimate in percentage of completion. Equivalent to bmod -Wep. Two digits after the decimal point are suported. The highest eight bits of runtimeEstimation in the submit structure are used for the integer; the remaining bits are used for the fraction. Use in conjunction with SUB3_RUNTIME_ESTIMATION.
-     */
-    public static final int SUB3_RUNTIME_ESTIMATION_PERC = 0x8000;
-
-    /**
-     * < Protects the sessions of interactive jobs with SSH encryption. Equivalent to bsub -IS|-ISp|-ISs.
-     */
-    public static final int SUB3_INTERACTIVE_SSH = 0x10000;
-    /**< Protect the sessions of interactive x-window job with SSH encryption. Equivalent to bsub -IX.*/
-    public static final int SUB3_XJOB_SSH = 0x20000;
-
-    /**
-     * < If set the submitted job is auto-resizable
-     */
-    public static final int SUB3_AUTO_RESIZE = 0x40000;
-
-    /**
-     * < If set, the resize notify cmd specified
-     */
-    public static final int SUB3_RESIZE_NOTIFY_CMD = 0x80000;
-
-
-    /**
-     * < Job broker bulk submit
-     */
-    public static final int SUB3_BULK_SUBMIT = 0x100000;
-
-    /**
-     * < tty mode for interactive job
-     */
-    public static final int SUB3_INTERACTIVE_TTY = 0x200000;
-
-    /**
-     * < Job submitted from floating client
-     */
-    public static final int SUB3_FLOATING_CLIENT = 0x400000;
-
-    /**
-     * < ssh X11 forwarding (bsub -XF)
-     */
-    public static final int SUB3_XFJOB = 0x800000;
-
-    /**
-     * < ssh X11 forwarding (bsub -XF) without bsub -I...
-     */
-    public static final int SUB3_XFJOB_EXCLUSIVE = 0x1000000;
-
-    /**
-     * < Job description.
-     */
-    public static final int SUB3_JOB_DESCRIPTION = 0x2000000;
-
-    /**
-     * < Job submitted from floating client
-     */
-    public static final int SUB3_SIMULATION = 0x4000000;
-
-/* Check whether a job is symphony job. These macros should be used by all
-*  components, including ("submit" actually):
-*    - mbatchd: jData->submitReq
-*    - sbatchd: jobCard->jobSpecs
-*    - API: lsb_submit() and lsb_readjobinfo()
- */
-
-    public static boolean IS_SSM_JOB(int option) {
-        return JNAUtils.toBoolean((option) & SUB2_SSM_JOB);
-    }
-
-    public static boolean IS_SSM_JOB_PERSIST(int option) {
-        return JNAUtils.toBoolean((option) & SUB2_SSM_JOB_PERSIST);
-    }
-
-    public static boolean IS_SYM_JOB(int option) {
-        return JNAUtils.toBoolean((option) & SUB2_SYM_JOB);
-    }
-
-    public static boolean IS_SYM_JOB_PARENT(int option) {
-        return JNAUtils.toBoolean((option) & SUB2_SYM_JOB_PARENT);
-    }
-
-    public static boolean IS_SYM_JOB_REALTIME(int option) {
-        return JNAUtils.toBoolean((option) & SUB2_SYM_JOB_REALTIME);
-    }
-
-    public static boolean IS_SYM_JOB_PERSIST_SRV(int option) {
-        return JNAUtils.toBoolean((option) & SUB2_SYM_JOB_PERSIST_SRV);
-    }
-
-    public static boolean IS_SRV_JOB(int option) {
-        return JNAUtils.toBoolean((option) & SUB2_SRV_JOB);
-    }
-
-    public static boolean IS_SYM_GRP(int option) {
-        return JNAUtils.toBoolean((option) & SUB2_SYM_GRP);
-    }
-
-    public static boolean IS_SYM_JOB_OR_SYM_GRP (int option)  { return (IS_SYM_JOB(option) || IS_SYM_GRP(option)); }
-/* symphony job for which resource usage should be collected */
-    public static boolean IS_REAL_SYM_JOB (int option)  { return (IS_SYM_JOB(option) && !IS_SYM_JOB_PERSIST_SRV(option)); }
-
-    public static boolean IS_WLM_JOB (int option)  { return (IS_SSM_JOB(option) || IS_SYM_JOB(option) || IS_SRV_JOB(option) || IS_SYM_GRP(option)); }
-    public static boolean IS_BATCH_JOB (int option)  { return (!IS_WLM_JOB(option)); }
-/* job for which resource usage should be collected */
-    public static boolean IS_JOB_FOR_ACCT (int option)  { return (IS_REAL_SYM_JOB(option) || IS_BATCH_JOB(option)); }
-
-    public static boolean IS_JOB_FOR_SYM (int option)  { return (IS_SYM_JOB(option) || IS_SRV_JOB(option) || IS_SYM_GRP(option)); }
-
-/* Don't send IS_SYM_JOB/IS_SYM_GRP jobs to scheduler;
-*  neither publish events nor brun the job allowed.
- */
-    // NOTE: Don't know what this jp struct is.
-    //public static boolean IS_SYM_JOB_OR_GRP (int jp)   { return (   (jp) != null && (jp)->shared != null && (  IS_SYM_JOB((jp)->shared->jobBill.options2) ||IS_SYM_GRP((jp)->shared->jobBill.options2))); }
-
-/* name of the lost and find queue and host */
-    public static final String LOST_AND_FOUND = "lost_and_found";
-
-    public static final int DELETE_NUMBER = -2;
-    public static final int DEL_NUMPRO = LibLsf.INFINIT_INT;
-    public static final int DEFAULT_NUMPRO = LibLsf.INFINIT_INT - 1;
-    /**
-     *  \addtogroup calendar_command  calendar_command
-     *  options  for user calendar commands
-     */
-
-    /**
-     * < Add calenda
-     */
-    public static final int CALADD = 1;
-
-    /**
-     * < Modify calenda
-     */
-    public static final int CALMOD = 2;
-
-    /**
-     * < Delete calenda
-     */
-    public static final int CALDEL = 3;
-
-    /**
-     * < Undelete calenda
-     */
-    public static final int CALUNDEL = 4;
-
-    /**
-     * < Calenda occs
-     */
-    public static final int CALOCCS = 5;
-
-/* for user event commands */
-    public static final int EVEADD = 1;
-    public static final int EVEMOD = 2;
-    public static final int EVEDEL = 3;
-
-    public static final int PLUGIN_REQUEUE = 126;
-    public static final int PLUGIN_EXIT = 125;
-
-    /**
-     * \brief  xFile
-     */
-    public static class xFile extends Structure {
-        public static class ByReference extends xFile implements Structure.ByReference {}
-        public static class ByValue extends xFile implements Structure.ByValue {}
-        public xFile() {}
-        public xFile(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Pathname at submission host
-         */
-        public String subFn;
-
-        /**
-         * < Pathname at execution host
-         */
-        public String execFn;
-        /**
-         *  \addtogroup defs_lsb_XF_OP defs_lsb_XF_OP
-         *  options  xFile operation
-         */
-
-        /**
-         * < Transfer files from submit peer to  execution peer
-         */
-        public static final int XF_OP_SUB2EXEC = 0x1;
-
-        /**
-         * < Transfer files from execution peer to  submit peer
-         */
-        public static final int XF_OP_EXEC2SUB = 0x2;
-
-        /**
-         * < Transfer files from submit peer to  execution peer with appending mode
-         */
-        public static final int XF_OP_SUB2EXEC_APPEND = 0x4;
-
-        /**
-         * < Transfer files from execution peer to  submit peer with appending mode
-         */
-        public static final int XF_OP_EXEC2SUB_APPEND = 0x8;
-        public static final int XF_OP_URL_SOURCE = 0x10;
-
-        /**
-         * < Defined in \ref defs_lsb_XF_OP
-         */
-        public int options;
-    }
-
-
-
-    /* For NQS */
-    public static final int NQS_ROUTE = 0x1;
-    public static final int NQS_SIG = 0x2;
-    public static final int NQS_SERVER = 0x4;
-
-
-    public static final int MAXNFA = 1024;
-    public static final int MAXTAG = 10;
-
-    public static final int OKP = 1;
-    public static final int NOP = 0;
-
-    public static final int CHR = 1;
-    public static final int ANY = 2;
-    public static final int CCL = 3;
-    public static final int BOL = 4;
-    public static final int EOL = 5;
-    public static final int BOT = 6;
-    public static final int EOT = 7;
-    public static final int BOW = 8;
-    public static final int EOW = 9;
-    public static final int REF = 10;
-    public static final int CLO = 11;
-
-    public static final int END = 0;
-
-    /**
-     *  The following defines are not meant to be changeable.
-     *  They are for readability only.
-     */
-
-    public static final int MAXCHR = 128;
-    public static final int CHRBIT = 8;
-    public static final int BITBLK = MAXCHR / CHRBIT;
-    public static final int BLKIND = 0xAA;
-    public static final int BITIND = 0x7;
-
-    public static final int ASCIIB = 0x7F;
-
-    /**
-     *  byte classification table for word boundary operators BOW
-     *  and EOW. the reason for not using ctype macros is that we can
-     *  let the user add into our own table. see re_modw. This table
-     *  is not in the bitset form, since we may wish to extend it in the
-     *  future for other byte classifications.
-     *
-     *   TRUE for 0-9 A-Z a-z _
-     */
-
-    public static final byte[] chrtyp = {
-            0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-            0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-            0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-            0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-            0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
-            1, 1, 1, 1, 1, 1, 1, 1, 0, 0,
-            0, 0, 0, 0, 0, 1, 1, 1, 1, 1,
-            1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
-            1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
-            1, 0, 0, 0, 0, 1, 0, 1, 1, 1,
-            1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
-            1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
-            1, 1, 1, 0, 0, 0, 0, 0
-    };
-
-    public static int inascii(int x) {
-        return (0x7F & (x));
-    }
-
-    public static int iswordc(int x) {
-        return chrtyp[inascii(x)];
-    }
-
-/*
-*  skip values for CLO XXX to skip past the closure
- */
-
-
-/* [CLO] ANY END ... */
-    public static final int ANYSKIP = 2;
-
-/* [CLO] CHR chr END ... */
-    public static final int CHRSKIP = 3;
-
-/* [CLO] CCL 16bytes END ... */
-    public static final int CCLSKIP = 18;
-
-/*  In LSF7.0.6, we introduce submit_ext structure to support
-*   extended fields for furture added submit options.
-*   Each new options should have a unique key defined here.
-*   The new defined key should be bigger than 1000.
-*   Keys below 1000 are used for internal use.
- */
-
-/* submit_ext test */
-    public static final int JDATA_EXT_TEST = 1001;
-
-/* LSF simulator: simReq */
-    public static final int JDATA_EXT_SIMREQ = 1002;
-
-/* structure for lsb_submit() call */
-
-    /**
-     * \extend submit data structure
-     */
-    public static class submit_ext extends Structure {
-        public static class ByReference extends submit_ext implements Structure.ByReference {}
-        public static class ByValue extends submit_ext implements Structure.ByValue {}
-        public submit_ext() {}
-        public submit_ext(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < number of key value pairs.
-         */
-        public int num;
-
-        /**
-         * < Array of keys of the extended fields.
-         */
-        public Pointer keys;
-
-        /**
-         * < Array of values of the extended fields
-         */
-        public Pointer values;
-    }
-
-
-
-
-    /**
-     * \brief  submit request structure.
-     */
-    public static class submit extends Structure {
-        public static class ByReference extends submit implements Structure.ByReference {}
-        public static class ByValue extends submit implements Structure.ByValue {}
-        public submit() {}
-        public submit(Pointer p) { super(p); read(); }
-
-
-        /**
-         * <  <lsf/lsbatch.h> defines the flags in \ref lsb_submit_options constructed from bits. These flags correspond to some of the options of the bsub command line. Use the bitwise OR to set more than one flag.
-         */
-        public int options;
-
-
-        /**
-         * < Extended bitwise inclusive OR of some of the flags in \ref lsb_submit_options2.
-         */
-        public int options2;
-
-
-        /**
-         * < The job name. If jobName is null, command is used as the job name.
-         */
-        public String jobName;
-
-        /**
-         * < Submit the job to this queue. If queue is null, submit the job to a system default queue.
-         */
-        public String queue;
-
-        /**
-         * < The number of invoker specified candidate hosts for running the job. If numAskedHosts is 0, all qualified hosts will be considered.
-         */
-        public int numAskedHosts;
-
-        /**
-         * < The array of names of invoker specified candidate hosts.  The number of hosts is given by numAskedHosts.
-         */
-        public Pointer askedHosts;
-
-        /**
-         * < The resource requirements of the job. If resReq is null, the batch system will try to obtain resource requirements for command from the remote task lists (see \ref ls_task ). If the task does not appear in the remote task lists, then the default resource requirement is to run on host() of the same type.
-         */
-        public String resReq;
-
-        /**
-         * < Limits on the consumption of system resources by all processes belonging to this job. See getrlimit() for details. If an element of the array is -1, there is no limit for that resource. For the constants used to index the array, see \ref lsb_queueinfo .
-         */
-        public int[] rLimits = new int[LibLsf.LSF_RLIM_NLIMITS];
-
-        /**
-         * < Specify the host model to use for scaling rLimits[LSF_RLIMIT_CPU] and rLimits[LSF_RLIMIT_RUN]. (See \ref lsb_queueinfo). If hostSpec is null, the local host is assumed.
-         */
-        public String hostSpec;
-
-        /**
-         * <  The initial number of processors needed by a (parallel) job. The default is 1.
-         */
-        public int numProcessors;
-
-        /**
-         * < The job dependency condition.
-         */
-        public String dependCond;
-
-        /**
-         * <  Time event string
-         */
-        public String timeEvent;
-
-        /**
-         * <  Dispatch the job on or after beginTime, where beginTime is the number of seconds since 00:00:00 GMT, Jan. 1, 1970 (See time(), ctime()). If beginTime is 0, start the job as soon as possible.
-         */
-        public NativeLong beginTime;
-
-        /**
-         * <  The job termination deadline. If the job is still running at termTime, it will be sent a USR2 signal. If the job does not terminate within 10 minutes after being sent this signal, it will be ended. termTime has the same representation as beginTime. If termTime is 0, allow the job to run until it reaches a resource limit.
-         */
-        public NativeLong termTime;
-
-        /**
-         * < Applies to jobs submitted to a queue that has a run window (See \ref lsb_queueinfo). Send signal sigValue to the job 10 minutes before the run window is going to close. This allows the job to clean up or checkpoint itself, if desired. If the job does not terminate 10 minutes after being sent this signal, it will be suspended.
-         */
-        public int sigValue;
-
-        /**
-         * < The path name of the job's standard input file. If inFile is null, use /dev/null as the default.
-         */
-        public String inFile;
-
-        /**
-         * < The path name of the job's standard output file. If outFile is null, the job's output will be mailed to the submitter
-         */
-        public String outFile;
-
-        /**
-         * < The path name of the job's standard error output file. If errFile is null, the standard error output will be merged with the standard output of the job.
-         */
-        public String errFile;
-
-        /**
-         * < When submitting a job, the command line of the job.   When modifying a job, a mandatory parameter that  should be set to jobId in string format.
-         */
-        public String command;
-
-        /**
-         * < New command line for bmod.
-         */
-        public String newCommand;
-
-        /**
-         * < The job is checkpointable with a period of chkpntPeriod seconds. The value 0 disables periodic checkpointing.
-         */
-        public NativeLong chkpntPeriod;
-
-        /**
-         * < The directory where the chk directory for this job checkpoint files will be created. When a job is checkpointed, its checkpoint files are placed in chkpntDir/chk. chkpntDir can be a relative or absolute path name.
-         */
-        public String chkpntDir;
-
-        /**
-         * < The number of files to transfer.
-         */
-        public int nxf;
-
-        /**
-         * < The array of file transfer specifications. (The xFile structure is defined in <lsf/lsbatch.h>.)
-         */
-        public Pointer /* xFile.ByReference */ xf;
-
-        /**
-         * < The job pre-execution command.
-         */
-        public String preExecCmd;
-
-        /**
-         * < The user that results are mailed to.
-         */
-        public String mailUser;
-
-        /**
-         * < Delete options in options field.
-         */
-        public int delOptions;
-
-        /**
-         * < Extended delete options in options2 field.
-         */
-        public int delOptions2;
-
-        /**
-         * < The name of the project the job will be charged to.
-         */
-        public String projectName;
-
-        /**
-         * < Maximum number of processors required to run the job.
-         */
-        public int maxNumProcessors;
-
-        /**
-         * < Specified login shell used to initialize the execution environment for the job (see the -L option of bsub).
-         */
-        public String loginShell;
-
-        /**
-         * < The name of the LSF user group (see lsb.users) to which the job will belong. (see the -G option of bsub)
-         */
-        public String userGroup;
-
-        /**
-         * < Passes the exception handlers to mbatchd during a job. (see the -X option of bsub). Specifies execption handlers that tell the system how to respond to an exceptional condition for a job. An action is performed when any one of the following exceptions is detected: - \b missched - A job has not been scheduled within the time event specified in the -T option. - \b overrun - A job did not finish in its maximum time (maxtime). - \b underrun - A job finished before it reaches its [...]
-         */
-        public String exceptList;
-
-
-        /**
-         * < User priority for fairshare scheduling.
-         */
-        public int userPriority;
-
-        /**
-         * < Reservation ID for advance reservation.
-         */
-        public String rsvId;
-
-        /**
-         * < Job group under which the job runs.
-         */
-        public String jobGroup;
-
-        /**
-         * < SLA under which the job runs.
-         */
-        public String sla;
-
-        /**
-         * < External scheduler options.
-         */
-        public String extsched;
-
-        /**
-         * < Warning time period in seconds, -1 if unspecified.
-         */
-        public int warningTimePeriod;
-
-        /**
-         * < Warning action, SIGNAL | CHKPNT | command, null if unspecified.
-         */
-        public String warningAction;
-
-        /**
-         * < License Scheduler project name.
-         */
-        public String licenseProject;
-
-        /**
-         * < Extended bitwise inclusive OR of options flags in \ref lsb_submit_options3.
-         */
-        public int options3;
-
-        /**
-         * < Extended delete options in options3 field.
-         */
-        public int delOptions3;
-
-        /**
-         * < Application profile under which the job runs.
-         */
-        public String app;
-
-        /**
-         * < -1 if no -jsdl and -jsdl_strict options. - 0 -jsdl_strict option - 1 -jsdl option
-         */
-        public int jsdlFlag;
-
-        /**
-         * < JSDL filename
-         */
-        public String jsdlDoc;
-
-        /**
-         * < ARM correlator
-         */
-        public Pointer correlator;
-
-        /**
-         * <  Absolute priority scheduling string set by administrators to denote static system APS value or ADMIN factor APS value. This field is ignored by \ref lsb_submit.
-         */
-        public String apsString;
-
-        /**
-         * < Post-execution commands specified by -Ep option of bsub and bmod.
-         */
-        public String postExecCmd;
-
-        /**
-         * < Current working directory specified by -cwd option of bsub and bmod.
-         */
-        public String cwd;
-
-        /**
-         * < Runtime estimate specified by -We option of bsub and bmod.
-         */
-        public int runtimeEstimation;
-
-        /**
-         * < Job-level requeue exit values specified by -Q option of bsub and bmod.
-         */
-        public String requeueEValues;
-
-        /**
-         * < Initial checkpoint period specified by -k option of bsub and bmod.
-         */
-        public int initChkpntPeriod;
-
-        /**
-         * < Job migration threshold specified by -mig option of bsub and bmod.
-         */
-        public int migThreshold;
-
-        /**
-         * < Job resize notification command to be invoked on the first execution host when a resize request has been satisfied.
-         */
-        public String notifyCmd;
-
-        /**
-         * < Job description.
-         */
-        public String jobDescription;
-/* #if defined(LSF_SIMULATOR)
-
-/**< simulation related options */
-        /*public String simReq;*/
-        /* #endif */
-
-        /**
-         * < For new options in future
-         */
-        public submit_ext.ByReference submitExt;
-    }
-
-
-
-
-    /**
-     * \brief submit reply.
-     */
-    public static class submitReply extends Structure {
-        public static class ByReference extends submitReply implements Structure.ByReference {}
-        public static class ByValue extends submitReply implements Structure.ByValue {}
-        public submitReply() {}
-        public submitReply(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The queue the job was submitted to.
-         */
-        public String queue;
-
-        /**
-         * < DependCond contained badJobId but badJobId does not exist in the system.
-         */
-        public long badJobId;
-
-        /**
-         * < DependCond contained badJobName but badJobName does not exist in the system. If the environment variable BSUB_CHK_RESREQ is set, the value of lsberrno is either LSBE_RESREQ_OK or LSBE_RESREQ_ERR, depending on the result of resource requirement string checking. The badJobName field contains the detailed error message.
-         */
-        public String badJobName;
-
-        /**< If lsberrno is LSBE_BAD_HOST,
-         *  (**askedHosts)[badReqIndx] is not a host known to the system.
-         *  If lsberrno is LSBE_QUEUE_HOST, (**askedHosts)[badReqIndx]
-         *  is not a host used by the specified queue. If lsberrno is
-         *  LSBE_OVER_LIMIT, (*rLimits)[badReqIndx] exceeds the queue's
-         *  limit for the resource. */
-        public int badReqIndx;
-    }
-
-
-
-    /**
-     * \brief  submit migration request.
-     */
-    public static class submig extends Structure {
-        public static class ByReference extends submig implements Structure.ByReference {}
-        public static class ByValue extends submig implements Structure.ByValue {}
-        public submig() {}
-        public submig(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The job ID of the job to be migrated.
-         */
-        public long jobId;
-
-        /**
-         * < Please refer to \ref lsb_submit_options.
-         */
-        public int options;
-
-        /**
-         * < The number of hosts supplied as candidates  for migration.
-         */
-        public int numAskedHosts;
-
-        /**
-         * < An array of pointers to the names of candidate hosts for migration.
-         */
-        public Pointer askedHosts;
-    }
-
-
-
-/* structure for lsb_addjgrp() call */
-
-    public static class jgrpAdd extends Structure {
-        public static class ByReference extends jgrpAdd implements Structure.ByReference {}
-        public static class ByValue extends jgrpAdd implements Structure.ByValue {}
-        public jgrpAdd() {}
-        public jgrpAdd(Pointer p) { super(p); read(); }
-
-        public String groupSpec;
-        public String timeEvent;
-        public String depCond;
-        public String sla;
-        public int maxJLimit;
-    }
-
-
-
-/* structure for lsb_modjgrp() call */
-
-    public static class jgrpMod extends Structure {
-        public static class ByReference extends jgrpMod implements Structure.ByReference {}
-        public static class ByValue extends jgrpMod implements Structure.ByValue {}
-        public jgrpMod() {}
-        public jgrpMod(Pointer p) { super(p); read(); }
-
-        public String destSpec;
-        public jgrpAdd jgrp;
-    }
-
-
-
-/* structure for lsb_addjgrp() and lsb_modjgrp() call reply */
-
-    public static class jgrpReply extends Structure {
-        public static class ByReference extends jgrpReply implements Structure.ByReference {}
-        public static class ByValue extends jgrpReply implements Structure.ByValue {}
-        public jgrpReply() {}
-        public jgrpReply(Pointer p) { super(p); read(); }
-
-        public String badJgrpName;
-        public int num;
-        public Pointer delJgrpList;
-    }
-
-
-
-    /**
-     * \brief Signal a group of jobs.
-     */
-    public static class signalBulkJobs extends Structure {
-        public static class ByReference extends signalBulkJobs implements Structure.ByReference {}
-        public static class ByValue extends signalBulkJobs implements Structure.ByValue {}
-        public signalBulkJobs() {}
-        public signalBulkJobs(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Signal type
-         */
-        public int signal;
-
-        /**
-         * < Number of jobs
-         */
-        public int njobs;
-
-        /**
-         * < Jobids list
-         */
-        public Pointer jobs;
-
-        /**
-         * < Flags
-         */
-        public int flags;
-    }
-
-
-
-/* structure for lsb_ctrljgrp() call */
-
-    public static class jgrpCtrl extends Structure {
-        public static class ByReference extends jgrpCtrl implements Structure.ByReference {}
-        public static class ByValue extends jgrpCtrl implements Structure.ByValue {}
-        public jgrpCtrl() {}
-        public jgrpCtrl(Pointer p) { super(p); read(); }
-
-        public String groupSpec;
-        public String userSpec;
-        public int options;
-
-/* JGRP_RELEASE, JGRP_HOLD, JGRP_DEL */
-        public int ctrlOp;
-    }
-
-
-
-
-/* Indicate no change in chkpnt period for lsb_chkpntjob() */
-    public static final int LSB_CHKPERIOD_NOCHNG = -1;
-
-    /**
-     *  \addtogroup chkpnt_job_option  chkpnt_job_option
-     *  checkpoint job options()
-     */
-
-    /**
-     * < Kill process if successfully chkpnted
-     */
-    public static final int LSB_CHKPNT_KILL = 0x1;
-
-    /**
-     * < Force chkpnt even if non-chkpntable conditions exist.
-     */
-    public static final int LSB_CHKPNT_FORCE = 0x2;
-
-    /**
-     * < Copy all regular files in use by the  checkpointed process to the checkpoint directory.
-     */
-    public static final int LSB_CHKPNT_COPY = 0x3;
-
-    /**
-     * < Chkpnt for the purpose of migration
-     */
-    public static final int LSB_CHKPNT_MIG = 0x4;
-
-    /**
-     * < Stop  process if successfully chkpnted
-     */
-    public static final int LSB_CHKPNT_STOP = 0x8;
-
-    /**
-     *  \addtogroup kill_requeue  kill_requeue
-     *  kill and requeue a job options()
-     */
-
-    /**
-     * < Kill then re-queue a job
-     */
-    public static final int LSB_KILL_REQUEUE = 0x10;
-
-/* options for lsb_openjobinfo() */
-    /**
-     *  \addtogroup defs_lsb_openjobinfo  defs_lsb_openjobinfo
-     *  Information options about job.
-     */
-
-    /**
-     * < Reserved user name
-     */
-    public static final String ALL_USERS = "all";
-    /**
-     * \defgroup defs_lsb_openjobinfo_a defs_lsb_openjobinfo_a
-     * defs_lsb_openjobinfo_a is part of defs_lsb_openjobinfo
-     */
-    public static final int ALL_JOB = 0x0001;
-    /**
-     * < Information about all jobs, including unfinished jobs (pending, running or suspended) and recently finished jobs. LSF remembers jobs finished within the preceding period. This period is set by the parameter CLEAN_PERIOD in the lsb.params file. The default is 3600 seconds (1 hour). (See lsb.params). The command line equivalent is bjobs -a./
-     * <p/>
-     * /**< Information about recently finished jobs.
-     */
-    public static final int DONE_JOB = 0x0002;
-
-    /**
-     * < Information about pending jobs.
-     */
-    public static final int PEND_JOB = 0x0004;
-
-    /**
-     * < Information about suspended jobs.
-     */
-    public static final int SUSP_JOB = 0x0008;
-
-    /**
-     * < Information about all unfinished jobs.
-     */
-    public static final int CUR_JOB = 0x0010;
-
-    /**
-     * < Information about the last submitted job.
-     */
-    public static final int LAST_JOB = 0x0020;
-
-    /**
-     * < Information about all running jobs
-     */
-    public static final int RUN_JOB = 0x0040;
-
-    /**
-     * < Information about JobId only.
-     */
-    public static final int JOBID_ONLY = 0x0080;
-
-    /**
-     * < Internal use only.
-     */
-    public static final int HOST_NAME = 0x0100;
-
-    /**
-     * < Exclude pending jobs.
-     */
-    public static final int NO_PEND_REASONS = 0x0200;
-
-    /**
-     * < Return group info structures
-     */
-    public static final int JGRP_INFO = 0x0400;
-
-    /**
-     * < Recursively search job group tree
-     */
-    public static final int JGRP_RECURSIVE = 0x0800;
-
-    /**
-     * < Return job array info structures
-     */
-    public static final int JGRP_ARRAY_INFO = 0x1000;
-
-    /**
-     * < All jobs in the core
-     */
-    public static final int JOBID_ONLY_ALL = 0x02000;
-
-    /**
-     * < All zombie jobs
-     */
-    public static final int ZOMBIE_JOB = 0x04000;
-
-    /**
-     * < Display remote jobs by their submission jobid.
-     */
-    public static final int TRANSPARENT_MC = 0x08000;
-
-    /**
-     * < Exceptional jobs
-     */
-    public static final int EXCEPT_JOB = 0x10000;
-
-    /**
-     * < Display for murex jobs
-     */
-    public static final int MUREX_JOB = 0x20000;
-
-
-    /**
-     * < To symphony UA
-     */
-    public static final int TO_SYM_UA = 0x40000;
-
-    /**
-     * < Only show top-level symphony job
-     */
-    public static final int SYM_TOP_LEVEL_ONLY = 0x80000;
-
-    /**
-     * < For internal use only
-     */
-    public static final int JGRP_NAME = 0x100000;
-
-    /**
-     * < Condensed host group
-     */
-    public static final int COND_HOSTNAME = 0x200000;
-
-    /**
-     * < Called from command, for internal use only
-     */
-    public static final int FROM_BJOBSCMD = 0x400000;
-
-    /**
-     * < -l in command parameter, for internal use only
-     */
-    public static final int WITH_LOPTION = 0x800000;
-
-    /**
-     * < Jobs submitted to aps queue
-     */
-    public static final int APS_JOB = 0x1000000;
-
-    /**
-     * < Information about user group.
-     */
-    public static final int UGRP_INFO = 0x2000000;
-    /** RFC#1531: -G option support*/
-
-    /**
-     * < -WL
-     */
-    public static final int TIME_LEFT = 0x4000000;
-    /**
-     * < Estimated time remaining based on the runtime estimate or runlimit.
-     */
-
-/* -WF*/
-    public static final int FINISH_TIME = 0x8000000;
-    /**
-     * < Estimated finish time based on the runtime estimate or runlimit.
-     */
-
-/* -WP*/
-    public static final int COM_PERCENTAGE = 0x10000000;
-    /**
-     * < Estimated completion percentage based on the runtime estimate or runlimit. If options is 0, default to CUR_JOB.
-     */
-
-/* -ss option */
-    public static final int SSCHED_JOB = 0x20000000;
-
-/* -G option */
-    public static final int KILL_JGRP_RECURSIVE = 0x40000000;
-
-    /**
-     *  \addtogroup group_nodetypes group_nodetypes
-     *  define statements group node types.
-     */
-
-    /**
-     * <  Job
-     */
-    public static final int JGRP_NODE_JOB = 1;
-
-    /**
-     * <  Group
-     */
-    public static final int JGRP_NODE_GROUP = 2;
-
-    /**
-     * <  Array
-     */
-    public static final int JGRP_NODE_ARRAY = 3;
-
-    /**
-     * <  SLA
-     */
-    public static final int JGRP_NODE_SLA = 4;
-
-/* jobId macros */
-    public static final long LSB_MAX_ARRAY_JOBID = 0x0FFFFFFFFL;
-    public static final long LSB_MAX_ARRAY_IDX = 0x07FFFFFFFL;
-    public static final int LSB_MAX_SEDJOB_RUNID = (0x0F);
-    public static long LSB_JOBID (int array_jobId, int array_idx)    { return (((long)array_idx << 32) | array_jobId); }
-    public static int LSB_ARRAY_IDX (long jobId)   { return (((jobId) == -1) ? (0) : (int)(((long)jobId >> 32)  & LSB_MAX_ARRAY_IDX)); }
-    public static int LSB_ARRAY_JOBID (long jobId)  { return (((jobId) == -1) ? (-1) : (int)(jobId)); }
-    //public static int LSB_ARRAY_JOBID (long jobId)  { return (((jobId) == -1) ? (-1) : (int)(jobId & LSB_MAX_ARRAY_JOBID)); }
-
-/* Status of a job group */
-
-    public static final int JGRP_INACTIVE = 0;
-    public static final int JGRP_ACTIVE = 1;
-    public static final int JGRP_UNDEFINED = -1;
-
-    /**
-     *  \addtogroup jobgroup_controltypes jobgroup_controltypes
-     *  define statements job group control types.
-     */
-
-
-    /**
-     * < bgrelease
-     */
-    public static final int JGRP_RELEASE = 1;
-
-    /**
-     * < bghold
-     */
-    public static final int JGRP_HOLD = 2;
-
-    /**
-     * < bgdel
-     */
-    public static final int JGRP_DEL = 3;
-
-    /**
-     *  \addtogroup jobgroup_counterIndex jobgroup_counterIndex
-     *   Following can be used to index  into 'counters' array.
-     */
-
-    /**
-     * < Total jobs in the array
-     */
-    public static final int JGRP_COUNT_NJOBS = 0;
-
-    /**
-     * < Number of pending jobs in the array
-     */
-    public static final int JGRP_COUNT_PEND = 1;
-
-    /**
-     * < Number of held jobs in the array
-     */
-    public static final int JGRP_COUNT_NPSUSP = 2;
-
-    /**
-     * < Number of running jobs in the array
-     */
-    public static final int JGRP_COUNT_NRUN = 3;
-
-    /**
-     * < Number of jobs suspended by the system in the array
-     */
-    public static final int JGRP_COUNT_NSSUSP = 4;
-
-    /**
-     * < Number of jobs suspended by the user in the array
-     */
-    public static final int JGRP_COUNT_NUSUSP = 5;
-
-    /**
-     * < Number of exited jobs in the array
-     */
-    public static final int JGRP_COUNT_NEXIT = 6;
-
-    /**
-     * < Number of successfully completed jobs
-     */
-    public static final int JGRP_COUNT_NDONE = 7;
-
-    /**
-     * < Total slots in the array
-     */
-    public static final int JGRP_COUNT_NJOBS_SLOTS = 8;
-
-    /**
-     * < Number of pending slots in the array
-     */
-    public static final int JGRP_COUNT_PEND_SLOTS = 9;
-
-    /**
-     * < Number of running slots in the array
-     */
-    public static final int JGRP_COUNT_RUN_SLOTS = 10;
-
-    /**
-     * < Number of slots suspended by the system in the array
-     */
-    public static final int JGRP_COUNT_SSUSP_SLOTS = 11;
-
-    /**
-     * < Number of slots suspended by the user in the array
-     */
-    public static final int JGRP_COUNT_USUSP_SLOTS = 12;
-
-    /**
-     * < Number of reserverd slots in the array
-     */
-    public static final int JGRP_COUNT_RESV_SLOTS = 13;
-
-/* job group modification types */
-    public static final int JGRP_MOD_LIMIT = 0x1;
-
-/*the number of counters of job group
-* based on job level
-*/
-    public static final int NUM_JGRP_JOB_COUNTERS = 8;
-/* the number of all counters of job group,
-* including job level and slot level
-*/
-/* {njobs, npend, npsusp, nrun, nssusp nususp, nexit, ndone} */
-    public static final int NUM_JGRP_COUNTERS = 14;
-
-/* job group is created explicitly */
-    public static final int JGRP_CREATE_EXP = 0x01;
-
-/* job group is created implicitly */
-    public static final int JGRP_CREATE_IMP = 0x02;
-/* The LSF job group.
- */
-
-    public static class jgrp extends Structure {
-        public static class ByReference extends jgrp implements Structure.ByReference {}
-        public static class ByValue extends jgrp implements Structure.ByValue {}
-        public jgrp() {}
-        public jgrp(Pointer p) { super(p); read(); }
-
-        public String name;
-        public String path;
-        public String user;
-        public String sla;
-        public int[] counters = new int[NUM_JGRP_COUNTERS];
-        public int maxJLimit;
-    }
-
-
-
-/* Structure for lsb_setjobattr() call */
-
-    public static class jobAttrInfoEnt extends Structure {
-        public static class ByReference extends jobAttrInfoEnt implements Structure.ByReference {}
-        public static class ByValue extends jobAttrInfoEnt implements Structure.ByValue {}
-        public jobAttrInfoEnt() {}
-        public jobAttrInfoEnt(Pointer p) { super(p); read(); }
-
-
-/* id of the job */
-        public long jobId;
-
-/* port number of the job */
-        public short port;
-
-/* first executing host of the job */
-        public byte[] hostname = new byte[LibLsf.MAXHOSTNAMELEN];
-    }
-
-
-
-    /**
-     * \brief  job attribute setting log.
-     */
-    public static class jobAttrSetLog extends Structure {
-        public static class ByReference extends jobAttrSetLog implements Structure.ByReference {}
-        public static class ByValue extends jobAttrSetLog implements Structure.ByValue {}
-        public jobAttrSetLog() {}
-        public jobAttrSetLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The unique ID for the job
-         */
-        public int jobId;
-
-        /**
-         * < Job array index; must be 0 in JOB_NEW
-         */
-        public int idx;
-
-        /**
-         * < The user who requested the action
-         */
-        public int uid;
-
-        /**
-         * < Job attributes
-         */
-        public int port;
-
-        /**
-         * < Name of the host
-         */
-        public String hostname;
-    }
-
-
-
-    /**
-     * \brief  job information head.
-     */
-    public static class jobInfoHead extends Structure {
-        public static class ByReference extends jobInfoHead implements Structure.ByReference {}
-        public static class ByValue extends jobInfoHead implements Structure.ByValue {}
-        public jobInfoHead() {}
-        public jobInfoHead(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The number of jobs in the connection
-         */
-        public int numJobs;
-
-        /**
-         * < An array of job identification numbers in the conection
-         */
-        public NativeLongByReference jobIds;
-
-        /**
-         * < The number of hosts in the connection
-         */
-        public int numHosts;
-
-        /**
-         * < An array of host names in the connection
-         */
-        public Pointer hostNames;
-
-        /**
-         * < The number of clusters in the connection
-         */
-        public int numClusters;
-
-        /**
-         * < An array of cluster names in the connection
-         */
-        public Pointer clusterNames;
-
-        /**
-         * < The number of remoteHosts in the connection
-         */
-        public IntByReference numRemoteHosts;
-
-        /**
-         * < An array of remoteHost names in the connection
-         */
-        public PointerByReference remoteHosts;
-    }
-
-
-
-    /**
-     * \brief job Information head extent
-     */
-    public static class jobInfoHeadExt extends Structure {
-        public static class ByReference extends jobInfoHeadExt implements Structure.ByReference {}
-        public static class ByValue extends jobInfoHeadExt implements Structure.ByValue {}
-        public jobInfoHeadExt() {}
-        public jobInfoHeadExt(Pointer p) { super(p); read(); }
-
-
-        /**
-         * <  Job Information header
-         */
-        public jobInfoHead.ByReference jobInfoHead;
-
-        /**
-         * <  Group Information returned
-         */
-        public Pointer groupInfo;
-    }
-
-
-
-    /**
-     * \brief structure reserveItem
-     */
-    public static class reserveItem extends Structure {
-        public static class ByReference extends reserveItem implements Structure.ByReference {}
-        public static class ByValue extends reserveItem implements Structure.ByValue {}
-        public reserveItem() {}
-        public reserveItem(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Name of the resource to reserve.
-         */
-        public String resName;
-
-        /**
-         * < The number of hosts to reserve this resource.
-         */
-        public int nHost;
-
-        /**
-         * < Amount of reservation is made on each host. Some hosts may reserve 0.
-         */
-        public FloatByReference value;
-
-        /**
-         * < Flag of shared or host-base resource
-         */
-        public int shared;
-    }
-
-
-
-    /**
-     * \brief  job information entry.
-     */
-    public static class jobInfoEnt extends Structure {
-        public static class ByReference extends jobInfoEnt implements Structure.ByReference {}
-        public static class ByValue extends jobInfoEnt implements Structure.ByValue {}
-        public jobInfoEnt() {}
-        public jobInfoEnt(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The job ID that the LSF system assigned to the job.
-         */
-        public long jobId;
-
-        /**
-         * < The name of the user who submitted the job.
-         */
-        public String user;
-
-        /**
-         * < The current status of the job.Possible values areshown in job_states
-         */
-        public int status;
-
-        /**
-         * < Pending or suspending reasons of the job
-         */
-        public IntByReference reasonTb;
-
-        /**
-         * < Length of reasonTb[]
-         */
-        public int numReasons;
-
-        /**
-         * < The reason a job is pending or suspended.
-         */
-        public int reasons;
-
-        /**
-         * < The reason a job is pending or suspended. If status is JOB_STAT_PEND, the values of reasons and subreasons are explained by \ref lsb_pendreason. If status is JOB_STAT_PSUSP, the values of reasons and subreasons are explained by \ref lsb_suspreason.   When reasons is PEND_HOST_LOAD or SUSP_LOAD_REASON,  subreasons indicates the load indices that are out of bounds. If reasons is PEND_HOST_LOAD, subreasons is the same as busySched in the hostInfoEnt structure; if reasons is SUS [...]
-         */
-        public int subreasons;
-
-        /**
-         * < The job process ID.
-         */
-        public int jobPid;
-
-        /**
-         * < The time the job was submitted, in seconds since 00:00:00 GMT, Jan. 1, 1970.
-         */
-        public NativeLong submitTime;
-
-        /**
-         * < Time when job slots are reserved
-         */
-        public NativeLong reserveTime;
-
-        /**
-         * < The time that the job started running, if it has been dispatched.
-         */
-        public NativeLong startTime;
-
-        /**
-         * < Job's predicted start time
-         */
-        public NativeLong predictedStartTime;
-
-        /**
-         * < The termination time of the job, if it has completed.
-         */
-        public NativeLong endTime;
-
-        /**
-         * < Last time event
-         */
-        public NativeLong lastEvent;
-
-        /**
-         * < Next time event
-         */
-        public NativeLong nextEvent;
-
-        /**
-         * < Duration time (minutes)
-         */
-        public int duration;
-
-        /**
-         * < CPU time consumed by the job
-         */
-        public float cpuTime;
-
-        /**
-         * < The file creation mask when the job was submitted.
-         */
-        public int umask;
-
-        /**
-         * < The current working directory when the job was submitted.
-         */
-        public String cwd;
-
-        /**
-         * < Home directory on submission host.
-         */
-        public String subHomeDir;
-
-        /**
-         * < The name of the host from which the job was  submitted.
-         */
-        public String fromHost;
-
-        /**
-         * < The array of names of hosts on which the job executes.
-         */
-        public Pointer exHosts;
-
-        /**
-         * < The number of hosts on which the job executes.
-         */
-        public int numExHosts;
-
-        /**
-         * < The CPU factor for normalizing CPU and wall clock time limits.
-         */
-        public float cpuFactor;
-
-        /**
-         * < The number of load indices in the loadSched and loadStop arrays.
-         */
-        public int nIdx;
-
-        /**
-         * < The values in the loadSched array specify the thresholds for the corresponding load indices. Only if the current values of all specified load indices of a host are within (below or above,  depending on the meaning of the load index) their corresponding thresholds may the suspended job be resumed on this host.  For an explanation of the entries in the loadSched, see \ref lsb_hostinfo.
-         */
-        public FloatByReference loadSched;
-
-        /**
-         * < The values in the loadStop array specify the thresholds for job suspension; if any of the current load index values of the host crosses its threshold, the job will be suspended.  For an explanation of the entries in the loadStop, see \ref lsb_hostinfo.
-         */
-        public FloatByReference loadStop;
-
-        /**
-         * < Structure for \ref lsb_submit call.
-         */
-        public submit submit;
-
-        /**
-         * < Job exit status.
-         */
-        public int exitStatus;
-
-        /**
-         * < Mapped UNIX user ID on the execution host.
-         */
-        public int execUid;
-
-        /**
-         * < Home directory for the job on the execution host.
-         */
-        public String execHome;
-
-        /**
-         * < Current working directory for the job on the execution host.
-         */
-        public String execCwd;
-
-        /**
-         * < Mapped user name on the execution host.
-         */
-        public String execUsername;
-
-        /**
-         * < Time of the last job resource usage update.
-         */
-        public NativeLong jRusageUpdateTime;
-
-        /**
-         * < Contains resource usage information for the job.
-         */
-        public LibLsf.jRusage runRusage;
-
-        /**
-         * < Job type.N_JOB, N_GROUP, N_HEAD
-         */
-        public int jType;
-
-        /**
-         * < The parent job group of a job or job group.
-         */
-        public String parentGroup;
-
-        /**
-         * < If jType is JGRP_NODE_GROUP, then it is the job group name. Otherwise, it is thejob name.
-         */
-        public String jName;
-
-        /**
-         * < Index into the counter array, only used for job arrays. Possible index values are shown in \ref jobgroup_counterIndex
-         */
-        public int[] counter = new int[NUM_JGRP_COUNTERS];
-
-        /**
-         * < Service port of the job.
-         */
-        public short port;
-
-        /**
-         * < Job dynamic priority
-         */
-        public int jobPriority;
-
-        /**
-         * < The number of external messages in the job.
-         */
-        public int numExternalMsg;
-
-        /**
-         * < This structure contains the information required to define an external message reply.
-         */
-        public Pointer externalMsg;
-
-        /**
-         * < MultiCluster cluster ID. If clusterId is greater than or equal to 0, the job is a pending remote job, and \ref lsb_readjobinfo checks for host_name\@cluster_name. If host name is needed, it should be found in  jInfoH->remoteHosts. If the remote host name is not available, the constant string remoteHost is used.
-         */
-        public int clusterId;
-
-        /**
-         * <  Detail reason field
-         */
-        public String detailReason;
-
-        /**
-         * < Idle factor for job exception handling. If the job idle factor is less than the specified threshold, LSF invokes LSF_SERVERDIR/eadmin to trigger the action for a job idle exception.
-         */
-        public float idleFactor;
-
-        /**
-         * < Job exception handling mask
-         */
-        public int exceptMask;
-
-
-        /**
-         * < Placement information of LSF HPC jobs.Placement information of LSF HPC jobs.Arbitrary information of a job stored as a string currently used by rms_rid  and rms_alloc
-         */
-        public String additionalInfo;
-
-        /**
-         * < Job termination reason. See lsbatch.h.
-         */
-        public int exitInfo;
-
-        /**
-         * < Job warning time period in seconds; -1 if unspecified.
-         */
-        public int warningTimePeriod;
-
-        /**
-         * < Warning action, SIGNAL | CHKPNT |command, null if unspecified
-         */
-        public String warningAction;
-
-        /**
-         * < SAAP charged for job
-         */
-        public String chargedSAAP;
-
-        /**
-         * < The rusage satisfied at job runtime
-         */
-        public String execRusage;
-
-        /**
-         * < The time when advance reservation expired or was deleted.
-         */
-        public NativeLong rsvInActive;
-
-        /**
-         * < The number of licenses reported from License Scheduler.
-         */
-        public int numLicense;
-
-        /**
-         * < License Scheduler license names.
-         */
-        public Pointer licenseNames;
-
-        /**
-         * < Absolute priority scheduling (APS) priority value.
-         */
-        public float aps;
-
-        /**
-         * < Absolute priority scheduling (APS) string set by administrators to denote static system APS value
-         */
-        public float adminAps;
-
-        /**
-         * < The real runtime on the execution host.
-         */
-        public int runTime;
-
-        /**
-         * < How many kinds of resource are reserved by this job
-         */
-        public int reserveCnt;
-
-        /**
-         * < Detail reservation information for each kind of resource
-         */
-        public Pointer /* reserveItem.ByReference */ items;
-
-        /**
-         * < Absolute priority scheduling (APS) string set by administrators to denote ADMIN factor APS value.
-         */
-        public float adminFactorVal;
-
-        /**
-         * < Pending resize min. 0, if no resize pending.
-         */
-        public int resizeMin;
-
-        /**
-         * < Pending resize max. 0, if no resize pending
-         */
-        public int resizeMax;
-
-        /**
-         * < Time when pending request was issued
-         */
-        public NativeLong resizeReqTime;
-
-        /**
-         * < Number of hosts when job starts
-         */
-        public int jStartNumExHosts;
-
-        /**
-         * < Host list when job starts
-         */
-        public Pointer jStartExHosts;
-
-        /**
-         * < Last time when job allocation changed
-         */
-        public NativeLong lastResizeTime;
-    }
-
-
-/* the bit set for jobInfoEnt->exceptMask */
-    public static final int J_EXCEPT_OVERRUN = 0x02;
-    public static final int J_EXCEPT_UNDERUN = 0x04;
-    public static final int J_EXCEPT_IDLE = 0x80;
-    public static final int J_EXCEPT_RUNTIME_EST_EXCEEDED = 0x100;
-
-/* exception showed by bjobs -l and bacct -l*/
-    public static final String OVERRUN = "overrun";
-    public static final String UNDERRUN = "underrun";
-    public static final String IDLE = "idle";
-    public static final String SPACE = "  ";
-    public static final String RUNTIME_EST_EXCEEDED = "runtime_est_exceeded";
-
-/* LSF7.0 moved jobInfoReq structure definition from
-*  daemonout.h to lsbatch.h. This structure will work
-*  with new API \ref lsb_openjobinfo_req
- */
-
-    /**
-     * \brief  job Information Request
-     */
-    public static class jobInfoReq extends Structure {
-        public static class ByReference extends jobInfoReq implements Structure.ByReference {}
-        public static class ByValue extends jobInfoReq implements Structure.ByValue {}
-        public jobInfoReq() {}
-        public jobInfoReq(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Options defined in \ref defs_lsb_openjobinfo
-         */
-        public int options;
-
-        /**
-         * < Name of user whose jobs to be checked
-         */
-        public String userName;
-
-        /**
-         * < Job id, 0 means all jobs
-         */
-        public long jobId;
-
-        /**
-         * < Job name
-         */
-        public String jobName;
-
-        /**
-         * < Queue name
-         */
-        public String queue;
-
-        /**
-         * < Check jobs running on this host
-         */
-        public String host;
-
-        /**
-         * < Job application
-         */
-        public String app;
-
-        /**
-         * < Job description
-         */
-        public String jobDescription;
-
-        /**
-         * < For new options in future
-         */
-        public submit_ext.ByReference submitExt;
-    }
-
-
-
-    /**
-     * \brief  user information entry.
-     */
-    public static class userInfoEnt extends Structure {
-        public static class ByReference extends userInfoEnt implements Structure.ByReference {}
-        public static class ByValue extends userInfoEnt implements Structure.ByValue {}
-        public userInfoEnt() {}
-        public userInfoEnt(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Name of the user or user group
-         */
-        public String user;
-
-        /**
-         * < The maximum number of job slots the user or user group can use on each processor. The job slots can be used by started jobs or reserved for PEND jobs.
-         */
-        public float procJobLimit;
-
-        /**
-         * < The maximum number of job slots that the user or user group can use simultaneously in the local LSF cluster. The job slots can be used by started jobs or reserved for PEND jobs.
-         */
-        public int maxJobs;
-
-        /**
-         * < The current number of job slots used by running and suspended jobs belonging to the user or user group.
-         */
-        public int numStartJobs;
-
-        /**
-         * < The total number of job slots in the LSF cluster for the jobs submitted by the user or user group.
-         */
-        public int numJobs;
-
-        /**
-         * < The number of job slots the user or user group has for pending jobs.
-         */
-        public int numPEND;
-
-        /**
-         * < The number of job slots the user or user group has for running jobs.
-         */
-        public int numRUN;
-
-        /**
-         * < The number of job slots for the jobs belonging to the user or user group that have been suspended by the system.
-         */
-        public int numSSUSP;
-
-        /**
-         * < The number of job slots for the jobs belonging to the user or user group that have been suspended by the user or the LSF system administrator.
-         */
-        public int numUSUSP;
-
-        /**
-         * < The number of job slots reserved for the pending jobs belonging to the user or user group.
-         */
-        public int numRESERVE;
-
-        /**
-         * < The maximum number of pending jobs allowed.
-         */
-        public int maxPendJobs;
-    }
-
-
-
-/* UserEquivalent info */
-
-    public static class userEquivalentInfoEnt extends Structure {
-        public static class ByReference extends userEquivalentInfoEnt implements Structure.ByReference {}
-        public static class ByValue extends userEquivalentInfoEnt implements Structure.ByValue {}
-        public userEquivalentInfoEnt() {}
-        public userEquivalentInfoEnt(Pointer p) { super(p); read(); }
-
-        public String equivalentUsers;
-    }
-
-
-
-/* UserMapping info */
-
-    public static class userMappingInfoEnt extends Structure {
-        public static class ByReference extends userMappingInfoEnt implements Structure.ByReference {}
-        public static class ByValue extends userMappingInfoEnt implements Structure.ByValue {}
-        public userMappingInfoEnt() {}
-        public userMappingInfoEnt(Pointer p) { super(p); read(); }
-
-
-/* Users in the local cluster */
-        public String localUsers;
-
-/* Users in remote clusters */
-        public String remoteUsers;
-
-/* "export" or "import" */
-        public String direction;
-    }
-
-
-
-
-/* APS structures used for mapping between factors */
-
-    /**
-     * \brief  APS structures used for mapping between factors
-     */
-    public static class apsFactorMap extends Structure {
-        public static class ByReference extends apsFactorMap implements Structure.ByReference {}
-        public static class ByValue extends apsFactorMap implements Structure.ByValue {}
-        public apsFactorMap() {}
-        public apsFactorMap(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Name of factor.
-         */
-        public String factorName;
-
-        /**
-         * < SubFactor names.
-         */
-        public String subFactorNames;
-    }
-
-
-
-    /**
-     * \brief  APS structures used for mapping between factors
-     */
-    public static class apsLongNameMap extends Structure {
-        public static class ByReference extends apsLongNameMap implements Structure.ByReference {}
-        public static class ByValue extends apsLongNameMap implements Structure.ByValue {}
-        public apsLongNameMap() {}
-        public apsLongNameMap(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Short name
-         */
-        public String shortName;
-
-        /**
-         * < Long name
-         */
-        public String longName;
-    }
-
-
-
-
-/* options for lsb_queueinfo() , some values should not
-*  conflict with the option values for lsb_usergrpinfo() and lsb_hostinfo_ex()
-*  since they share the same xdr_infoReq()
-*/
-
-/* for compatibility for 2.0 */
-    public static final int ALL_QUEUE = 0x01;
-
-/* for compatibility for 2.0 */
-    public static final int DFT_QUEUE = 0x02;
-    public static final int CHECK_HOST = 0x80;
-    public static final int CHECK_USER = 0x100;
-    public static final int SORT_HOST = 0x200;
-
-/* not bqueues -l or -r */
-    public static final int QUEUE_SHORT_FORMAT = 0x400;
-/* expand hostname into official hostname in lsb_queueinfo */
-    public static final int EXPAND_HOSTNAME = 0x800;
-
-/* only retrieve batch partitions */
-    public static final int RETRIEVE_BATCH = 0x1000;
-
-/* Signal number in each version LSB_SIG_NUM must be equal to
-*  signal number in the latest version.
- */
-    public static final int LSB_SIG_NUM_40 = 25;
-    public static final int LSB_SIG_NUM_41 = 26;
-
-/* Solutions #38347 */
-    public static final int LSB_SIG_NUM_51 = 30;
-    public static final int LSB_SIG_NUM_60 = 30;
-    public static final int LSB_SIG_NUM = 30;
-
-/* Dynamic CPU provision
-*  to indicate whether a SP can lend or borrow hosts
- */
-    public static final int DCP_LEND_HOSTS = 0x0001;
-    public static final int DCP_BORROW_HOSTS = 0x0002;
-
-/* status to indicate the current situation of Dynamic CPU provision
-*  DCP_UNDER_ALLOC_AND_STARVING means a partition is under allocation
-*  of dynamic cpu and its pending jobs are starving for more cpus.
- */
-    public static final int DCP_ALLOC_CPU_OK = 0x0;
-    public static final int DCP_UNDER_ALLOC_CPU = 0x0001;
-    public static final int DCP_JOB_WAIT_FOR_CPU = 0x0002;
-    public static final int DCP_ALLOC_CPU_BUSY = 0x0004;
-
-/* Structure for lsb_queueinfo() call */
-/* !!! IMPORTANT !!!
-*  If you change queueInfoEnt, you have to change Intlib/ade.lsbatch.h too!
- */
-
-    /**
-     * queueInfoEnt  queue information entry.
-     */
-    public static class queueInfoEnt extends Structure {
-        public static class ByReference extends queueInfoEnt implements Structure.ByReference {}
-        public static class ByValue extends queueInfoEnt implements Structure.ByValue {}
-        public queueInfoEnt() {}
-        public queueInfoEnt(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The name of the queue.
-         */
-        public String queue;
-
-        /**
-         * < Describes the typical use of the queue.
-         */
-        public String description;
-
-        /**
-         * < Defines the priority of the queue. This determines the order in which the job queues are searched at job dispatch time: queues with higher priority values are searched first. (This is contrary to UNIX process priority ordering.)
-         */
-        public int priority;
-
-        /**
-         * < Defines the nice value at which jobs in this queue will be run.
-         */
-        public short nice;
-
-        /**
-         * < A blank-separated list of names of users allowed to submit jobs to this queue.
-         */
-        public String userList;
-
-        /**
-         * < A blank-separated list of names of hosts to which jobs in this queue may be dispatched.
-         */
-        public String hostList;
-
-        /**
-         * < Original HOSTS string in case "-" is used.
-         */
-        public String hostStr;
-
-        /**
-         * < The number of load indices in the loadSched and loadStop arrays.
-         */
-        public int nIdx;
-
-        /**
-         * < The queue and host loadSched and loadStop arrays control batch job dispatch, suspension, and resumption. The values in the loadSched array specify thresholds for the corresponding load indices. Only if the current values of all specified load indices of a host are within (below or above, depending on the meaning of the load index) the corresponding thresholds of this queue, will jobs in this queue be dispatched to the host. The same conditions are used to resume jobs dispatc [...]
-         */
-        public FloatByReference loadSched;
-
-        /**
-         * < The values in the loadStop array specify the thresholds for job suspension. If any of the current load index values of a host goes beyond a queue's threshold, jobs from the queue will be suspended. For an explanation of the fields in the loadSched and loadStop arrays, see \ref lsb_hostinfo.
-         */
-        public FloatByReference loadStop;
-
-        /**
-         * < Per-user limit on the number of jobs that can be dispatched from this queue and executed concurrently.
-         */
-        public int userJobLimit;
-
-        /**
-         * < Per-processor limit on the number of jobs that can be dispatched from this queue and executed concurrently.
-         */
-        public float procJobLimit;
-
-        /**
-         * < A blank-separated list of time windows describing the run window of the queue. When a queue's run window is closed, no job from this queue will be dispatched. When the run window closes, any running jobs from this queue will be suspended until the run window reopens, when they will be resumed. The default is no restriction, or always open (i.e., 24 hours a day, seven days a week). A time window has the format begin_time-end_time. Time is specified in the format [day:]hour[:m [...]
-         */
-        public String windows;
-
-        /**
-         * < The per-process UNIX hard resource limits for all jobs submitted to this queue (see getrlimit() and lsb.queues). The default values for the resource limits are unlimited, indicated by -1. The constants used to index the rLimits array and the corresponding resource limits are listed below. <br> LSF_RLIMIT_CPU (CPULIMIT) <br> LSF_RLIMIT_FSIZE (FILELIMIT) <br> LSF_RLIMIT_DATA (DATALIMIT) <br> LSF_RLIMIT_STACK    (STACKLIMIT) <br> LSF_RLIMIT_CORE     (CORELIMIT) <br> LSF_RLIMIT_ [...]
-         */
-        public int[] rLimits = new int[LibLsf.LSF_RLIM_NLIMITS];
-
-        /**
-         * < A host name or host model name. If the queue CPULIMIT or RUNLIMIT gives a host specification, hostSpec will be that specification. Otherwise, if defaultHostSpec (see below) is not null, hostSpec will be defaultHostSpec. Otherwise, if DEFAULT_HOST_SPEC is defined in the lsb.params file, (see lsb.params), hostSpec will be this value. Otherwise, hostSpec will be the name of the host with the largest CPU factor in the cluster.
-         */
-        public String hostSpec;
-
-        /**
-         * < The attributes of the queue.
-         */
-        public int qAttrib;
-
-        /**
-         * < The status of the queue.
-         */
-        public int qStatus;
-
-        /**
-         * < The maximum number of jobs dispatched by the queue and not yet finished.
-         */
-        public int maxJobs;
-
-        /**
-         * < Number of jobs in the queue, including pending, running, and suspended jobs.
-         */
-        public int numJobs;
-
-        /**
-         * < Number of pending jobs in the queue.
-         */
-        public int numPEND;
-
-        /**
-         * < Number of running jobs in the queue.
-         */
-        public int numRUN;
-
-        /**
-         * < Number of system suspended jobs in the queue.
-         */
-        public int numSSUSP;
-
-        /**
-         * < Number of user suspended jobs in the queue.
-         */
-        public int numUSUSP;
-
-        /**
-         * < The queue migration threshold in minutes.
-         */
-        public int mig;
-
-        /**
-         * < The number of seconds that a new job waits, before being scheduled. A value of zero (0) means the job is scheduled without any delay.
-         */
-        public int schedDelay;
-
-        /**
-         * < The number of seconds for a host to wait after dispatching a job to a host, before accepting a second job to dispatch to the same host.
-         */
-        public int acceptIntvl;
-
-        /**
-         * < A blank-separated list of time windows describing the dispatch window of the queue. When a queue's dispatch window is closed, no job from this queue will be dispatched.The default is no restriction, or always open (i.e., 24 hours a day, seven days a week). For the time window format, see windows (above).
-         */
-        public String windowsD;
-
-        /**
-         * < A blank-separated list of queue specifiers. Each queue specifier is of the form queue\@host where host is an NQS host name and queue is the name of a queue on that host.
-         */
-        public String nqsQueues;
-
-        /**
-         * < A blank-separated list of user shares. Each share is of the form [user, share] where user is a user name, a user group name, the reserved word default or the reserved word others, and share is the number of shares the user gets.
-         */
-        public String userShares;
-
-        /**
-         * < The value of DEFAULT_HOST_SPEC in the Queue section for this queue in the lsb.queues file.
-         */
-        public String defaultHostSpec;
-
-        /**
-         * < An LSF resource limit used to limit the number of job slots (processors) a (parallel) job in the queue will use. A job submitted to this queue must specify a number of processors not greater than this limit.
-         */
-        public int procLimit;
-
-        /**
-         * < A list of administrators of the queue. The users whose names are here are allowed to operate on the jobs in the queue and on the queue itself.
-         */
-        public String admins;
-
-        /**
-         * < Queue's pre-exec command. The command is executed before the real batch job is run on the execution host (or on the first host selected for a parallel batch job).
-         */
-        public String preCmd;
-
-        /**
-         * < Queue's post-exec command. The command is run when a job terminates.
-         */
-        public String postCmd;
-
-        /**
-         * < Jobs that exit with these values are automatically requeued.
-         */
-        public String requeueEValues;
-
-        /**
-         * < The maximum number of job slots a host can process from this queue, including job slots of dispatched jobs which have not finished yet and reserved slots for some PEND jobs. This limit controls the number of jobs sent to each host, regardless of a uniprocessor host or multiprocessor host. Default value for this limit is infinity.
-         */
-        public int hostJobLimit;
-
-        /**
-         * < Resource requirement string used to determine eligible hosts for a job.
-         */
-        public String resReq;
-
-        /**
-         * < Number of reserved job slots for pending jobs.
-         */
-        public int numRESERVE;
-
-        /**
-         * < The time used to hold the reserved job slots for a PEND job in this queue.
-         */
-        public int slotHoldTime;
-
-        /**
-         * < Remote MultiCluster send-jobs queues to forward jobs to.
-         */
-        public String sndJobsTo;
-
-        /**
-         * < Remote MultiCluster receive-jobs queues that can forward to this queue.
-         */
-        public String rcvJobsFrom;
-
-        /**
-         * < Resume threshold conditions for a suspended job in this queue.
-         */
-        public String resumeCond;
-
-        /**
-         * < Stop threshold conditions for a running job in this queue.
-         */
-        public String stopCond;
-
-        /**
-         * < Job starter command for a running job in this queue
-         */
-        public String jobStarter;
-
-        /**
-         * < Command configured for the SUSPEND action.
-         */
-        public String suspendActCmd;
-
-        /**
-         * < Command configured for the RESUME action.
-         */
-        public String resumeActCmd;
-
-        /**
-         * < Command configured for the TERMINATE action.
-         */
-        public String terminateActCmd;
-
-        /**
-         * < Configurable signal mapping
-         */
-        public int[] sigMap = new int[LSB_SIG_NUM];
-
-        /**
-         * < Preemptive scheduling and preemption policy specified for the queue.
-         */
-        public String preemption;
-
-        /**
-         * < Time period for a remote cluster to schedule a job. MultiCluster job forwarding model only. Determines how long a MultiCluster job stays pending in the execution cluster before returning to the submission cluster. The remote timeout limit in seconds is: \li MAX_RSCHED_TIME.ByReference  MBD_SLEEP_TIME=timeout
-         */
-        public int maxRschedTime;
-
-
-        /**
-         * < Number of share accounts in the queue.
-         */
-        public int numOfSAccts;
-
-        /**
-         * < (Only used for queues with fairshare policy) a share account vector capturing the fairshare information of the users using the queue. The storage for the array of queueInfoEnt structures will be reused by the next call.
-         */
-        public Pointer /* shareAcctInfoEnt.ByReference */ shareAccts;
-
-        /**
-         * < The directory where the checkpoint files are created.
-         */
-        public String chkpntDir;
-
-        /**
-         * < The checkpoint period in minutes.
-         */
-        public int chkpntPeriod;
-
-        /**
-         * < MultiCluster job forwarding model only. Specifies the MultiCluster pending job limit for a receive-jobs queue. This represents the maximum number of MultiCluster import jobs that can be pending in the queue; once the limit has been reached, the queue stops accepting jobs from remote clusters.
-         */
-        public int imptJobBklg;
-
-        /**
-         * < The default (soft) resource limits for all jobs submitted to this queue (see getrlimit() and lsb.queues).
-         */
-        public int[] defLimits = new int[LibLsf.LSF_RLIM_NLIMITS];
-
-        /**
-         * < The maximum number of jobs allowed to be dispatched together in one job chunk. Must be a positive integer greater than 1.
-         */
-        public int chunkJobSize;
-
-        /**
-         * < The minimum number of job slots (processors) that a job in the queue will use.
-         */
-        public int minProcLimit;
-
-        /**
-         * < The default (soft) limit on the number of job slots (processors) that a job in the queue will use.
-         */
-        public int defProcLimit;
-
-        /**
-         * < The list of queues for cross-queue fairshare.
-         */
-        public String fairshareQueues;
-
-        /**
-         * < Default external scheduling for the queue.
-         */
-        public String defExtSched;
-
-        /**
-         * < Mandatory external scheduling options for the queue.
-         */
-        public String mandExtSched;
-
-        /**
-         * < Share of job slots for queue-based fairshare. Represents the percentage of running jobs (job slots) in use from the queue. SLOT_SHARE must be greater than zero (0) and less than or equal to 100. The sum of SLOT_SHARE for all queues in the pool does not need to be 100%. It can be more or less, depending on your needs.
-         */
-        public int slotShare;
-
-        /**
-         * < Name of the pool of job slots the queue belongs to for queue-based fairshare. A queue can only belong to one pool. All queues in the pool must share the same set of hosts. Specify any ASCII string up to 60 chars long. You can use letters, digits, underscores (_) or dashes (-). You cannot use blank spaces.
-         */
-        public String slotPool;
-
-        /**
-         * < Specifies a threshold for job underrun exception handling. If a job exits before the specified number of minutes, LSF invokes LSF_SERVERDIR/eadmin to trigger the action for a job underrun exception.
-         */
-        public int underRCond;
-
-        /**
-         * < Specifies a threshold for job overrun exception handling. If a job runs longer than the specified run time, LSF invokes LSF_SERVERDIR/eadmin to trigger the action for a job overrun exception.
-         */
-        public int overRCond;
-
-        /**
-         * < Specifies a threshold for idle job exception handling. The value should be a number between 0.0 and 1.0 representing CPU time/runtime. If the job idle factor is less than the specified threshold, LSF invokes LSF_SERVERDIR/eadmin to trigger the action for a job idle exception.
-         */
-        public float idleCond;
-
-        /**
-         * < The number of underrun jobs in the queue.
-         */
-        public int underRJobs;
-
-        /**
-         * < The number of overrun jobs in the queue.
-         */
-        public int overRJobs;
-
-        /**
-         * < The number of idle jobs in the queue.
-         */
-        public int idleJobs;
-
-        /**
-         * < Specifies the amount of time before a job control action occurs that a job warning action is to be taken. For example, 2 minutes before the job reaches run time limit or termination deadline, or the queue's run window is closed, an URG signal is sent to the job. Job action warning time is not normalized. A job action warning time must be specified with a job warning action in order for job warning to take effect.
-         */
-        public int warningTimePeriod;
-
-        /**
-         * < Specifies the job action to be taken before a job control action occurs. For example, 2 minutes before the job reaches run time limit or termination deadline, or the queue's run window is closed, an URG signal is sent to the job. A job warning action must be specified with a job action warning time in order for job warning to take effect. If specified, LSF sends the warning action to the job before the actual control action is taken. This allows the job time to save its resu [...]
-         */
-        public String warningAction;
-
-        /**
-         * < AdminAction - queue control message
-         */
-        public String qCtrlMsg;
-
-        /**
-         * < Acept resource request.
-         */
-        public String acResReq;
-
-        /**
-         * < Limit of running session scheduler jobs.
-         */
-        public int symJobLimit;
-
-        /**
-         * < cpu_req for service partition of session scheduler
-         */
-        public String cpuReq;
-
-        /**
-         * < Indicate whether it would be willing to donate/borrow.
-         */
-        public int proAttr;
-
-        /**
-         * < The maximum number of hosts to lend.
-         */
-        public int lendLimit;
-
-        /**
-         * < The grace period to lend/return idle hosts.
-         */
-        public int hostReallocInterval;
-
-        /**
-         * < Number of CPUs required by CPU provision.
-         */
-        public int numCPURequired;
-
-        /**
-         * < Number of CPUs actually allocated.
-         */
-        public int numCPUAllocated;
-
-        /**
-         * < Number of CPUs borrowed.
-         */
-        public int numCPUBorrowed;
-
-        /**
-         * < Number of CPUs lent.
-         */
-        public int numCPULent;
-        /* the number of reserved cpu(numCPUReserved) = numCPUAllocated - numCPUBorrowed + numCPULent */
-
-
-        /* the following fields are for real-time app(ex. murex) of symphony */
-
-        /**
-         * < Scheduling granularity. in milliseconds.
-         */
-        public int schGranularity;
-
-        /**
-         * < The grace period for stopping session scheduler tasks.
-         */
-        public int symTaskGracePeriod;
-
-        /**
-         * < Minimum number of SSMs.
-         */
-        public int minOfSsm;
-
-        /**
-         * < Maximum number of SSMs.
-         */
-        public int maxOfSsm;
-
-        /**
-         * < Number of allocated slots.
-         */
-        public int numOfAllocSlots;
-
-        /**
-         * < Service preemptin policy.
-         */
-        public String servicePreemption;
-
-
-        /**
-         * < Dynamic cpu provision status.
-         */
-        public int provisionStatus;
-
-        /**
-         * < The minimum time for preemption and backfill, in seconds.
-         */
-        public int minTimeSlice;
-
-        /**
-         * < List of queues defined in a queue group for absolute priority scheduling (APS) across multiple queues.
-         */
-        public String queueGroup;
-
-        /**
-         * < The number of calculation factors for absolute priority scheduling (APS).
-         */
-        public int numApsFactors;
-
-        /**
-         * < List of calculation factors for absolute priority scheduling (APS)
-         */
-        public Pointer /* apsFactorInfo.ByReference */ apsFactorInfoList;
-
-        /**
-         * < The mapping of factors to subfactors for absolute priority scheduling (APS).
-         */
-        public Pointer /* apsFactorMap.ByReference */ apsFactorMaps;
-
-        /**
-         * < The mapping of factors to their long names for absolute priority scheduling (APS).
-         */
-        public Pointer /* apsLongNameMap.ByReference */ apsLongNames;
-
-        /**
-         * < Maximum number of job preempted times.
-         */
-        public int maxJobPreempt;
-
-        /**
-         * < Maximum number of pre-exec retry times.
-         */
-        public int maxPreExecRetry;
-
-        /**
-         * < Maximum number of pre-exec retry times for local cluster
-         */
-        public int localMaxPreExecRetry;
-
-        /**
-         * < Maximum number of job re-queue times.
-         */
-        public int maxJobRequeue;
-
-        /**
-         * < Use Linux-PAM
-         */
-        public int usePam;
-        /* compute unit exclusive */
-
-        /**
-         * < Compute unit type
-         */
-        public int cu_type_exclusive;
-
-        /**
-         * < A string specified in EXCLUSIVE=CU[\<string>]
-         */
-        public String cu_str_exclusive;
-
-        /**
-         * < Resource reservation limit
-         */
-        public String resRsvLimit;
-
-    }
-
-
-
-    /**
-     *  \addtogroup signal_action signal_action
-     *  define status for signal action
-     */
-
-    /**
-     * <  No action
-     */
-    public static final int ACT_NO = 0;
-
-    /**
-     * <  Start
-     */
-    public static final int ACT_START = 1;
-
-    /**
-     * <  Preempt
-     */
-    public static final int ACT_PREEMPT = 2;
-
-    /**
-     * <  Done
-     */
-    public static final int ACT_DONE = 3;
-
-    /**
-     * <  Fail
-     */
-    public static final int ACT_FAIL = 4;
-
-    /**
-     * \brief  host information entry.
-     */
-    public static class hostInfoEnt extends Structure {
-        public static class ByReference extends hostInfoEnt implements Structure.ByReference {}
-        public static class ByValue extends hostInfoEnt implements Structure.ByValue {}
-        public hostInfoEnt() {}
-        public hostInfoEnt(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The name of the host.
-         */
-        public String host;
-
-        /**
-         * < The status of the host. It is the bitwise  inclusive OR.  see \ref host_status
-         */
-        public int hStatus;
-
-        /**
-         * < Indicate host loadSched busy reason
-         */
-        public IntByReference busySched;
-
-        /**
-         * < Indicate host loadStop  busy reason.
-         */
-        public IntByReference busyStop;
-
-        /**
-         * < The host CPU factor used to scale CPU load values to account for differences in CPU speeds. The faster the CPU, the larger the CPU factor.
-         */
-        public float cpuFactor;
-
-        /**
-         * < The number of load indices in the load, loadSched and loadStop arrays.
-         */
-        public int nIdx;
-
-        /**
-         * < Load information array on a host. This array gives the load information that is used for  scheduling batch jobs. This load information  is the effective load information from \ref ls_loadofhosts (see \ref ls_loadofhosts) plus the load reserved for running jobs (see lsb.queues for details on resource reservation). The load array is indexed the same as loadSched and loadStop  (see loadSched and loadStop below).
-         */
-        public FloatByReference load;
-
-        /**
-         * < Stop scheduling new jobs if over
-         */
-        public FloatByReference loadSched;
-
-        /**
-         * < Stop jobs if over this load. The loadSched and loadStop arrays control batch job scheduling, suspension, and resumption. The values in the loadSched array specify the scheduling thresholds for the corresponding load indices. Only if the current values of all specified load indices of this host are within (below or above, depending on the meaning of the load index) the corresponding thresholds of this host, will jobs be scheduled to run on this host. Similarly, the values in  [...]
-         */
-        public FloatByReference loadStop;
-
-        /**
-         * < ASCII desp of run windows.One or more time windows in a week during which batch jobs may be dispatched to run on this host . The default is no restriction, or always open (i.e., 24 hours a day seven days a week). These windows are similar to the dispatch windows of batch job queues. See \ref lsb_queueinfo.
-         */
-        public String windows;
-
-        /**
-         * < The maximum number of job slots any user is allowed to use on this host.
-         */
-        public int userJobLimit;
-
-        /**
-         * < The maximum number of job slots that the host can process concurrently.
-         */
-        public int maxJobs;
-
-        /**
-         * < The number of job slots running or suspended on the host.
-         */
-        public int numJobs;
-
-        /**
-         * < The number of job slots running on the host.
-         */
-        public int numRUN;
-
-        /**
-         * < The number of job slots suspended by the batch daemon on the host.
-         */
-        public int numSSUSP;
-
-        /**
-         * < The number of job slots suspended by the job submitter or the LSF system administrator.
-         */
-        public int numUSUSP;
-
-        /**
-         * < The migration threshold in minutes after which a suspended job will be considered for migration.
-         */
-        public int mig;
-
-
-        /**
-         * < The host attributes; the bitwise inclusive OR of some of \ref host_attributes
-         */
-        public int attr;
-        /**
-         *  \addtogroup host_attributes host_attributes
-         *  The host attributes
-         */
-
-        /**
-         * < This host can checkpoint jobs
-         */
-        public static final int H_ATTR_CHKPNTABLE = 0x1;
-
-        /**
-         * < This host provides kernel support for checkpoint copy.
-         */
-        public static final int H_ATTR_CHKPNT_COPY = 0x2;
-
-        /**
-         * < The effective load of the host.
-         */
-        public FloatByReference realLoad;
-
-        /**
-         * < The number of job slots reserved by LSF for the PEND jobs.
-         */
-        public int numRESERVE;
-
-        /**
-         * < If attr has an H_ATTR_CHKPNT_COPY attribute, chkSig is set to the signal which triggers  checkpoint and copy operation. Otherwise,  chkSig is set to the signal which triggers  checkpoint operation on the host
-         */
-        public int chkSig;
-
-
-        /**
-         * < Num of resource used by the consumer
-         */
-        public float cnsmrUsage;
-
-        /**
-         * < Num of resource used by the provider
-         */
-        public float prvdrUsage;
-
-        /**
-         * < Num of resource available for the consumer to use
-         */
-        public float cnsmrAvail;
-
-        /**
-         * < Num of resource available for the provider to use
-         */
-        public float prvdrAvail;
-
-        /**
-         * < Num maximum of resource available in total
-         */
-        public float maxAvail;
-
-        /**
-         * < The job exit rate threshold on the host
-         */
-        public float maxExitRate;
-
-        /**
-         * < Number of job exit rate on the host
-         */
-        public float numExitRate;
-
-        /**
-         * < AdminAction - host control message
-         */
-        public String hCtrlMsg;
-
-    }
-
-
-
-    /**
-     * \brief  Host information condition entry.
-     */
-    public static class condHostInfoEnt extends Structure {
-        public static class ByReference extends condHostInfoEnt implements Structure.ByReference {}
-        public static class ByValue extends condHostInfoEnt implements Structure.ByValue {}
-        public condHostInfoEnt() {}
-        public condHostInfoEnt(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Host name
-         */
-        public String name;
-
-
-        /**
-         * < How many hosts are in the ok status
-         */
-        public int howManyOk;
-
-        /**
-         * < How many hosts are in the busy status
-         */
-        public int howManyBusy;
-
-        /**
-         * < How many hosts are in the closed status
-         */
-        public int howManyClosed;
-
-        /**
-         * < How many hosts are in the full status
-         */
-        public int howManyFull;
-
-        /**
-         * < How many hosts are in the unreach status
-         */
-        public int howManyUnreach;
-
-        /**
-         * < How many hosts are in the unavail status
-         */
-        public int howManyUnavail;
-
-
-        /**
-         * < The status of each host in the host group
-         */
-        public Pointer /* hostInfoEnt.ByReference */ hostInfo;
-
-    }
-
-
-
-    public static class adjustParam extends Structure {
-        public static class ByReference extends adjustParam implements Structure.ByReference {}
-        public static class ByValue extends adjustParam implements Structure.ByValue {}
-        public adjustParam() {}
-        public adjustParam(Pointer p) { super(p); read(); }
-
-
-/* key name of share adjustment */
-        public String key;
-
-/* value of the key */
-        public float value;
-    }
-
-
-
-
-/* cpu time factor */
-    public static final int FAIR_ADJUST_CPU_TIME_FACTOR = 0;
-
-/* run time factor */
-    public static final int FAIR_ADJUST_RUN_TIME_FACTOR = 1;
-
-/* run job factor */
-    public static final int FAIR_ADJUST_RUN_JOB_FACTOR = 2;
-
-/* committed run time factor */
-    public static final int FAIR_ADJUST_COMMITTED_RUN_TIME_FACTOR = 3;
-
-/* enable hist run time */
-    public static final int FAIR_ADJUST_ENABLE_HIST_RUN_TIME = 4;
-
-/* cpu time of finished jobs with decay */
-    public static final int FAIR_ADJUST_HIST_CPU_TIME = 5;
-
-/* cpu time of finished jobs within decay */
-    public static final int FAIR_ADJUST_NEW_USED_CPU_TIME = 6;
-
-/* total time that job spend in RUN state */
-    public static final int FAIR_ADJUST_RUN_TIME = 7;
-
-/* historical run time of finished jobs */
-    public static final int FAIR_ADJUST_HIST_RUN_TIME = 8;
-
-/* committed run time of started jobs */
-    public static final int FAIR_ADJUST_COMMITTED_RUN_TIME = 9;
-
-/* number of job slots used by started jobs */
-    public static final int FAIR_ADJUST_NUM_START_JOBS = 10;
-
-/* number of reserved slots used by pending jobs */
-    public static final int FAIR_ADJUST_NUM_RESERVE_JOBS = 11;
-
-/* total amount of memory used by started jobs */
-    public static final int FAIR_ADJUST_MEM_USED = 12;
-
-/* average memory allocated per slot */
-    public static final int FAIR_ADJUST_MEM_ALLOCATED = 13;
-
-/* total number of fairshare adjustment key value pairs */
-    public static final int FAIR_ADJUST_KVPS_SUM = 14;
-
-    //public String[] FairAdjustPairArrayName = new String[FAIR_ADJUST_KVPS_SUM];
-
-    public static class shareAdjustPair extends Structure {
-        public static class ByReference extends shareAdjustPair implements Structure.ByReference {}
-        public static class ByValue extends shareAdjustPair implements Structure.ByValue {}
-        public shareAdjustPair() {}
-        public shareAdjustPair(Pointer p) { super(p); read(); }
-
-
-/* queue share account */
-        public static int SHAREACCTTYPEQUEUE = 0x01;
-
-/* host partition share account */
-        public static final int SHAREACCTTYPEHP = 0x02;
-
-/* SLA share account */
-        public static final int SHAREACCTTYPESLA = 0x04;
-
-/* type of share account*/
-        public int shareAcctType;
-
-/* name of the share holder that use the share */
-        public String holderName;
-
-/* name of the provider policy name(name of queue, host partition or SLA) */
-        public String providerName;
-
-/* number of share adjustment key value pair */
-        public int numPair;
-
-/* share adjustment key value pair */
-        public Pointer /* adjustParam.ByReference */ adjustParam;
-    }
-
-
-
-    // NOTE: Not in libbat
-    //public static native float fairshare_adjustment(shareAdjustPair shareAdjustPair1);
-
-/* For lsb_hostpartinfo() call */
-
-    /**
-     * \brief   gets user information about host partitions.
-     */
-    public static class hostPartUserInfo extends Structure {
-        public static class ByReference extends hostPartUserInfo implements Structure.ByReference {}
-        public static class ByValue extends hostPartUserInfo implements Structure.ByValue {}
-        public hostPartUserInfo() {}
-        public hostPartUserInfo(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The user name or user group name.  See \ref lsb_userinfo  and \ref lsb_usergrpinfo
-         */
-        public String user;
-
-        /**
-         * < The number of shares assigned to the user or user group, as configured in the file lsb.hosts. (See lsb.hosts.)
-         */
-        public int shares;
-
-        /**
-         * < The priority of the user or user group to use the host partition. Bigger values represent higher priorities. Jobs belonging to the user or user group with the highest priority are considered first for dispatch when resources in the host partition are being contended for. In general, a user or user group with more shares, fewer numStartJobs and less histCpuTime has higher priority. The storage for the array of hostPartInfoEnt structures will be reused by the next call.
-         */
-        public float priority;
-
-        /**
-         * < The number of job slots belonging to the user or user group that are running or suspended in the host partition.
-         */
-        public int numStartJobs;
-
-        /**
-         * < The normalized CPU time accumulated in the host partition during the recent period by finished jobs belonging to the user or user group. The period may be configured in the file lsb.params (see lsb.params), with a default value of five (5) hours.
-         */
-        public float histCpuTime;
-
-        /**
-         * < The number of job slots that are reserved for the PEND jobs belonging to the user or user group in the host partition.
-         */
-        public int numReserveJobs;
-
-        /**
-         * < The time unfinished jobs spend  in RUN state
-         */
-        public int runTime;
-
-        /**
-         * < The fairshare adjustment value from the fairshare plugin  (libfairshareadjust.ByReference ). The adjustment is enabled and weighted by setting the value of FAIRSHARE_ADJUSTMENT_FACTOR in lsb.params.
-         */
-        public float shareAdjustment;
-    }
-
-
-
-/* For lsb_hostpartinfo() call */
-
-    /**
-     * \brief  gets information entry about host partitions.
-     */
-    public static class hostPartInfoEnt extends Structure {
-        public static class ByReference extends hostPartInfoEnt implements Structure.ByReference {}
-        public static class ByValue extends hostPartInfoEnt implements Structure.ByValue {}
-        public hostPartInfoEnt() {}
-        public hostPartInfoEnt(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The name of the host partition
-         */
-        public byte[] hostPart = new byte[MAX_LSB_NAME_LEN];
-
-        /**
-         * < A blank-separated list of names of hosts and host groups which are members of the host partition. The name of a host group has a '/' appended. see \ref lsb_hostgrpinfo.
-         */
-        public String hostList;
-
-        /**
-         * < The number of users in this host partition. i.e., the number of hostPartUserInfo structures.
-         */
-        public int numUsers;
-
-        /**
-         * < An array of hostPartUserInfo structures which hold information on users in this host partition.
-         */
-        public Pointer /* hostPartUserInfo.ByReference */ users;
-    }
-
-
-
-/* Library rappresentation of the share account */
-
-    /**
-     * \brief Library rappresentation of the share account
-     */
-    public static class shareAcctInfoEnt extends Structure {
-        public static class ByReference extends shareAcctInfoEnt implements Structure.ByReference {}
-        public static class ByValue extends shareAcctInfoEnt implements Structure.ByValue {}
-        public shareAcctInfoEnt() {}
-        public shareAcctInfoEnt(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The user name or user group name. (See \ref lsb_userinfo and \ref lsb_usergrpinfo.)
-         */
-        public String shareAcctPath;
-
-        /**
-         * < The number of shares assigned to  the user or user group, as configured in the file lsb.queues.
-         */
-        public int shares;
-
-        /**
-         * < The priority of the user or user group in the fairshare queue. Larger values represent higher priorities. Job belonging to the user or user group with the highest priority are considered first for dispatch in the fairshare queue. In general, a user or user group with more shares, fewer numStartJobs and less histCpuTime has higher priority.
-         */
-        public float priority;
-
-        /**
-         * < The number of job slots (belonging to the user or user group) that are running or suspended in the fairshare queue.
-         */
-        public int numStartJobs;
-
-        /**
-         * < The normalized CPU time accumulated in the fairshare queue by jobs belonging to the user or user group, over the time period configured in the file lsb.params. The default time period is 5 hours.
-         */
-        public float histCpuTime;
-
-        /**
-         * < The number of job slots that are reserved for the PEND jobs belonging to the user or user group in the host partition.
-         */
-        public int numReserveJobs;
-
-        /**
-         * < The time unfinished jobs spend in the RUN state.
-         */
-        public int runTime;
-
-        /**
-         * < The fairshare adjustment value from the fairshare plugin  (libfairshareadjust.SOEXT). The adjustment is enabled and weighted  by setting the value of FAIRSHARE_ADJUSTMENT_FACTOR in lsb.params.
-         */
-        public float shareAdjustment;
-    }
-
-
-
-/* boundaries and default value used by mbatchd for the maxJobId */
-    public static final int DEF_MAX_JOBID = 999999;
-    public static final int MAX_JOBID_LOW = 999999;
-    public static final int MAX_JOBID_HIGH = (LibLsf.INFINIT_INT - 1);
-
-
-/* default preemption wait time */
-    public static final int DEF_PREEMPTION_WAIT_TIME = 300;
-
-/* default number of hosts specified by -m */
-    public static final int DEF_MAX_ASKED_HOSTS = 512;
-
-/* For lsb_parameterinfo() call */
-
-    /**
-     * \brief The parameterInfo structure contains the following fields:
-     */
-    public static class parameterInfo extends Structure {
-        public static class ByReference extends parameterInfo implements Structure.ByReference {}
-        public static class ByValue extends parameterInfo implements Structure.ByValue {}
-        public parameterInfo() {}
-        public parameterInfo(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < DEFAULT_QUEUE: A blank_separated list of queue names for automatic queue selection.
-         */
-        public String defaultQueues;
-
-        /**
-         * < DEFAULT_HOST_SPEC: The host name or host model name used as the system default for scaling CPULIMIT and RUNLIMIT.
-         */
-        public String defaultHostSpec;
-
-        /**
-         * < MBD_SLEEP_TIME: The interval in seconds at which the mbatchd dispatches jobs.
-         */
-        public int mbatchdInterval;
-
-        /**
-         * < SBD_SLEEP_TIME: The interval in seconds at which the sbatchd suspends or resumes jobs.
-         */
-        public int sbatchdInterval;
-
-        /**
-         * < JOB_ACCEPT_INTERVAL: The interval at which  a host accepts two successive jobs. (In units of SBD_SLEEP_TIME.)
-         */
-        public int jobAcceptInterval;
-
-        /**
-         * < MAX_RETRY: The maximum number of retries for dispatching a job.
-         */
-        public int maxDispRetries;
-
-        /**
-         * < MAX_SBD_FAIL: The maximum number of retries for reaching an sbatchd.
-         */
-        public int maxSbdRetries;
-
-        /**
-         * < PREEM_PERIOD: The interval in seconds for preempting jobs running on the same host.
-         */
-        public int preemptPeriod;
-
-        /**
-         * < CLEAN_PERIOD: The interval in seconds during which finished jobs are kept in core.
-         */
-        public int cleanPeriod;
-
-        /**
-         * < MAX_JOB_NUM: The maximum number of finished jobs that are logged in the current event file.
-         */
-        public int maxNumJobs;
-
-        /**
-         * < HIST_HOURS: The number of hours of resource consumption history used for fair share scheduling and scheduling within a host partition.
-         */
-        public float historyHours;
-
-        /**
-         * < PG_SUSP_IT: The interval a host must be idle before resuming a job suspended for excessive paging.
-         */
-        public int pgSuspendIt;
-
-        /**
-         * < The default project assigned to jobs.
-         */
-        public String defaultProject;
-
-        /**
-         * < Job submission retry interval
-         */
-        public int retryIntvl;
-
-        /**
-         * < For Cray NQS compatiblilty only. Used by LSF to get the NQS queue information
-         */
-        public int nqsQueuesFlags;
-
-        /**
-         * < nqsRequestsFlags
-         */
-        public int nqsRequestsFlags;
-
-        /**
-         * < The maximum number of times to attempt the preexecution command of a job from a remote cluster ( MultiCluster only)
-         */
-        public int maxPreExecRetry;
-
-        /**
-         * < Maximum number of pre-exec retry times for local cluster
-         */
-        public int localMaxPreExecRetry;
-
-        /**
-         * < Event watching Interval in seconds
-         */
-        public int eventWatchTime;
-
-        /**
-         * < Run time weighting factor for fairshare scheduling
-         */
-        public float runTimeFactor;
-
-        /**
-         * < Used for calcultion of the fairshare scheduling formula
-         */
-        public float waitTimeFactor;
-
-        /**
-         * < Job slots weighting factor for fairshare scheduling
-         */
-        public float runJobFactor;
-
-        /**
-         * < Default check interval
-         */
-        public int eEventCheckIntvl;
-
-        /**
-         * < sbatchd report every sbd_sleep_time
-         */
-        public int rusageUpdateRate;
-
-        /**
-         * < sbatchd updates jobs jRusage in mbatchd if more than 10% changes
-         */
-        public int rusageUpdatePercent;
-
-        /**
-         * < Time period to check for reconfig
-         */
-        public int condCheckTime;
-
-        /**
-         * < The maximum number of connections between master and slave batch daemons
-         */
-        public int maxSbdConnections;
-
-        /**
-         * < The interval for rescheduling jobs
-         */
-        public int rschedInterval;
-
-        /**
-         * < Max time mbatchd stays in scheduling routine, after which take a breather
-         */
-        public int maxSchedStay;
-
-        /**
-         * < During which load remains fresh
-         */
-        public int freshPeriod;
-
-        /**
-         * < The preemption behavior, GROUP_MAX, GROUP_JLP, USER_JLP, HOST_JLU,MINI_JOB, LEAST_RUN_TIME
-         */
-        public int preemptFor;
-
-        /**
-         * < Flags whether users can resume their jobs when suspended by the LSF administrator
-         */
-        public int adminSuspend;
-
-        /**
-         * < Flags to enable/disable normal user to create advance reservation
-         */
-        public int userReservation;
-
-        /**
-         * < CPU time weighting factor for fairshare scheduling
-         */
-        public float cpuTimeFactor;
-
-        /**
-         * < The starting month for a fiscal year
-         */
-        public int fyStart;
-
-        /**
-         * < The maximum number of jobs in a job array
-         */
-        public int maxJobArraySize;
-
-        /**
-         * < Replay period for exceptions, in seconds
-         */
-        public NativeLong exceptReplayPeriod;
-
-        /**
-         * < The interval to terminate a job
-         */
-        public int jobTerminateInterval;
-
-        /**
-         * <  User level account mapping for remote jobs is disabled
-         */
-        public int disableUAcctMap;
-
-        /**
-         * < If set to TRUE, Project name for a job will be considerred when doing fairshare scheduling, i.e., as if user has submitted jobs using -G
-         */
-        public int enforceFSProj;
-
-        /**
-         * < Enforces the check to see if the invoker of bsub is in the specifed group when the -P option is used
-         */
-        public int enforceProjCheck;
-
-        /**
-         * < Run time for a job
-         */
-        public int jobRunTimes;
-
-        /**
-         * < Event table Job default interval
-         */
-        public int dbDefaultIntval;
-
-        /**
-         * < Event table Job Host Count
-         */
-        public int dbHjobCountIntval;
-
-        /**
-         * < Event table Job Queue Count
-         */
-        public int dbQjobCountIntval;
-
-        /**
-         * < Event table Job User Count
-         */
-        public int dbUjobCountIntval;
-
-        /**
-         * < Event table Job Resource Interval
-         */
-        public int dbJobResUsageIntval;
-
-        /**
-         * < Event table Resource Load Interval
-         */
-        public int dbLoadIntval;
-
-        /**
-         * < Event table Job Info
-         */
-        public int dbJobInfoIntval;
-
-        /**
-         * < Used with job dependency scheduling
-         */
-        public int jobDepLastSub;
-
-        /**
-         * < Used with job dependency scheduling,  deprecated
-         */
-        public int maxJobNameDep;
-
-        /**
-         * < Select resources to be logged
-         */
-        public String dbSelectLoad;
-
-        /**
-         * < Job synchronizes its group status
-         */
-        public int jobSynJgrp;
-
-        /**
-         * < The batch jobs' temporary output directory
-         */
-        public String pjobSpoolDir;
-
-
-        /**
-         * < Maximal job priority defined for all users
-         */
-        public int maxUserPriority;
-
-        /**
-         * < Job priority is increased by the system dynamically based on waiting time
-         */
-        public int jobPriorityValue;
-
-        /**
-         * < Waiting time to increase Job priority by the system dynamically
-         */
-        public int jobPriorityTime;
-
-        /**
-         * < Enable internal statistical adjustment
-         */
-        public int enableAutoAdjust;
-
-        /**
-         * < Start to autoadjust when the user has  this number of pending jobs
-         */
-        public int autoAdjustAtNumPend;
-
-        /**
-         * < If this number of jobs has been visited skip the user
-         */
-        public float autoAdjustAtPercent;
-
-        /**
-         * <  Static shared resource update interval for the cluster actor
-         */
-        public int sharedResourceUpdFactor;
-
-        /**
-         * < Schedule job based on raw load info
-         */
-        public int scheRawLoad;
-
-        /**
-         * <  The batch jobs' external storage for attached data
-         */
-        public String jobAttaDir;
-
-        /**
-         * < Maximum message number for each job
-         */
-        public int maxJobMsgNum;
-
-        /**
-         * < Maximum attached data size to be transferred for each message
-         */
-        public int maxJobAttaSize;
-
-        /**
-         * < The life time of a child MBD to serve queries in the MT way
-         */
-        public int mbdRefreshTime;
-
-        /**
-         * < The interval of the execution cluster updating the job's resource usage
-         */
-        public int updJobRusageInterval;
-
-        /**
-         * < The account to which all windows workgroup users are to be mapped
-         */
-        public String sysMapAcct;
-
-        /**
-         * < Dispatch delay internal
-         */
-        public int preExecDelay;
-
-        /**
-         * < Update duplicate event interval
-         */
-        public int updEventUpdateInterval;
-
-        /**
-         * < Resources are reserved for parallel jobs on a per-slot basis
-         */
-        public int resourceReservePerSlot;
-
-        /**
-         * < Maximum job id --- read from the lsb.params
-         */
-        public int maxJobId;
-
-        /**
-         * < Define a list of preemptable resource  names
-         */
-        public String preemptResourceList;
-
-        /**
-         * < The preemption wait time
-         */
-        public int preemptionWaitTime;
-
-        /**
-         * < Maximum number of rollover lsb.acct files kept by mbatchd.
-         */
-        public int maxAcctArchiveNum;
-
-        /**
-         * < mbatchd Archive Interval
-         */
-        public int acctArchiveInDays;
-
-        /**
-         * < mbatchd Archive threshold
-         */
-        public int acctArchiveInSize;
-
-        /**
-         * < Committed run time weighting factor
-         */
-        public float committedRunTimeFactor;
-
-        /**
-         * < Enable the use of historical run time in the calculation of fairshare scheduling priority, Disable the use of historical run time in the calculation of fairshare scheduling priority
-         */
-        public int enableHistRunTime;
-
-/*#ifdef PS_SXNQS */
-/**< NQS resource usage update interval */
-/*    public int   nqsUpdateInterval;*/
-/*#endif */
-
-        /**
-         * < Open lease reclaim time
-         */
-        public int mcbOlmReclaimTimeDelay;
-
-        /**
-         * < Enable chunk job dispatch for jobs with CPU limit or run limits
-         */
-        public int chunkJobDuration;
-
-        /**
-         * < The interval for scheduling jobs by scheduler daemon
-         */
-        public int sessionInterval;
-
-        /**
-         * < The number of jobs per user per queue whose pending reason is published at the PEND_REASON_UPDATE_INTERVAL interval
-         */
-        public int publishReasonJobNum;
-
-        /**
-         * < The interval for publishing job pending reason by scheduler daemon
-         */
-        public int publishReasonInterval;
-
-        /**
-         * < Interval(in seconds) of pending reason  publish for all jobs
-         */
-        public int publishReason4AllJobInterval;
-
-        /**
-         * < MC pending reason update interval (0 means no updates)
-         */
-        public int mcUpdPendingReasonInterval;
-
-        /**
-         * < MC pending reason update package size (0 means no limit)
-         */
-        public int mcUpdPendingReasonPkgSize;
-
-        /**
-         * < No preemption if the run time is greater  than the value defined in here
-         */
-        public int noPreemptRunTime;
-
-        /**
-         * < No preemption if the finish time is less than the value defined in here
-         */
-        public int noPreemptFinishTime;
-
-        /**
-         * < mbatchd Archive Time
-         */
-        public String acctArchiveAt;
-
-        /**
-         * < Absolute run limit for job
-         */
-        public int absoluteRunLimit;
-
-        /**
-         * < The job exit rate duration
-         */
-        public int lsbExitRateDuration;
-
-        /**
-         * <  The duration to trigger eadmin
-         */
-        public int lsbTriggerDuration;
-
-        /**
-         * < Maximum time for job information query commands (for example,with bjobs) to wait
-         */
-        public int maxJobinfoQueryPeriod;
-
-        /**
-         * < Job submission retrial interval for client
-         */
-        public int jobSubRetryInterval;
-
-        /**
-         * < System wide max pending jobs
-         */
-        public int pendingJobThreshold;
-
-
-        /**
-         * < Max number of concurrent query
-         */
-        public int maxConcurrentJobQuery;
-
-        /**
-         * < Min event switch time period
-         */
-        public int minSwitchPeriod;
-
-
-        /**
-         * < Condense pending reasons enabled
-         */
-        public int condensePendingReasons;
-
-        /**
-         * < Schedule Parallel jobs based on slots instead of CPUs
-         */
-        public int slotBasedParallelSched;
-
-        /**
-         * < Disable user job movement operations, like btop/bbot.
-         */
-        public int disableUserJobMovement;
-
-        /**
-         * < Detect and report idle jobs only after specified minutes.
-         */
-        public int detectIdleJobAfter;
-        public int useSymbolPriority;
-        /**
-         * < Use symbolic when specifing priority of symphony jobs/
-         * <p/>
-         * /**< Priority rounding for symphony jobs
-         */
-        public int JobPriorityRound;
-
-        /**
-         * < The mapping of the symbolic priority  for symphony jobs
-         */
-        public String priorityMapping;
-
-        /**
-         * < Maximum number of subdirectories under LSB_SHAREDIR/cluster/logdir/info
-         */
-        public int maxInfoDirs;
-
-        /**
-         * < The minimum period of a child MBD to serve queries in the MT way
-         */
-        public int minMbdRefreshTime;
-
-        /**
-         * < Stop asking license to LS not due to lack license
-         */
-        public int enableStopAskingLicenses2LS;
-
-        /**
-         * < Expire time for finished job which will not taken into account when calculating queue fairshare priority
-         */
-        public int expiredTime;
-
-        /**
-         * < MBD child query processes will only run on the following CPUs
-         */
-        public String mbdQueryCPUs;
-
-        /**
-         * < The default application profile assigned to jobs
-         */
-        public String defaultApp;
-
-        /**
-         * < Enable or disable data streaming
-         */
-        public int enableStream;
-
-        /**
-         * < File to which lsbatch data is streamed
-         */
-        public String streamFile;
-
-        /**
-         * < File size in MB to which lsbatch data is streamed
-         */
-        public int streamSize;
-
-        /**
-         * < Sync up host status with master LIM is enabled
-         */
-        public int syncUpHostStatusWithLIM;
-
-        /**
-         * < Project schedulign default SLA
-         */
-        public String defaultSLA;
-
-        /**
-         * < EGO Enabled SLA scheduling timer period
-         */
-        public int slaTimer;
-
-        /**
-         * < EGO Enabled SLA scheduling time to live
-         */
-        public int mbdEgoTtl;
-
-        /**
-         * < EGO Enabled SLA scheduling connection timeout
-         */
-        public int mbdEgoConnTimeout;
-
-        /**
-         * < EGO Enabled SLA scheduling read timeout
-         */
-        public int mbdEgoReadTimeout;
-
-        /**
-         * < EGO Enabled SLA scheduling use MXJ flag
-         */
-        public int mbdUseEgoMXJ;
-
-        /**
-         * < EGO Enabled SLA scheduling reclaim by queue
-         */
-        public int mbdEgoReclaimByQueue;
-
-        /**
-         * < EGO Enabled SLA scheduling default velocity
-         */
-        public int defaultSLAvelocity;
-
-        /**
-         * < Type of host exit rate exception handling types: EXIT_RATE_TYPE
-         */
-        public String exitRateTypes;
-
-        /**
-         * < Type of host exit rate exception handling types: GLOBAL_EXIT_RATE
-         */
-        public float globalJobExitRate;
-
-        /**
-         * < Type of host exit rate exception handling types ENABLE_EXIT_RATE_PER_SLOT
-         */
-        public int enableJobExitRatePerSlot;
-
-        /**
-         * < Performance metrics monitor is enabled  flag
-         */
-        public int enableMetric;
-
-        /**
-         * < Performance metrics monitor sample period flag
-         */
-        public int schMetricsSample;
-
-        /**
-         * < Used to bound: (1) factors, (2) weights, and (3) APS values
-         */
-        public float maxApsValue;
-
-        /**
-         * < Child mbatchd gets updated information about new jobs from the parent mbatchd
-         */
-        public int newjobRefresh;
-
-        /**
-         * < Job type to preempt, PREEMPT_JOBTYPE_BACKFILL, PREEMPT_JOBTYPE_EXCLUSIVE
-         */
-        public int preemptJobType;
-
-        /**
-         * < The default job group assigned to jobs
-         */
-        public String defaultJgrp;
-
-        /**
-         * < Max ratio between run limit and runtime estimation
-         */
-        public int jobRunlimitRatio;
-
-        /**
-         * < Enable the post-execution processing of the job to be included as part of the job flag
-         */
-        public int jobIncludePostproc;
-
-        /**
-         * < Timeout of post-execution processing
-         */
-        public int jobPostprocTimeout;
-
-        /**
-         * < The interval, in seconds, for updating the session scheduler status summary
-         */
-        public int sschedUpdateSummaryInterval;
-
-        /**
-         * < The number of completed tasks for updating the session scheduler status summary
-         */
-        public int sschedUpdateSummaryByTask;
-
-        /**
-         * < The maximum number of times a task can be requeued via requeue exit values
-         */
-        public int sschedRequeueLimit;
-
-        /**
-         * < The maximum number of times a task can be retried after a dispatch error
-         */
-        public int sschedRetryLimit;
-
-        /**
-         * < The maximum number of tasks that can be submitted in one session
-         */
-        public int sschedMaxTasks;
-
-        /**
-         * < The maximum run time of a single task
-         */
-        public int sschedMaxRuntime;
-
-        /**
-         * < The output directory for task accounting files
-         */
-        public String sschedAcctDir;
-
-        /**
-         * < If TRUE enable the job group automatic deletion functionality (default is FALSE).
-         */
-        public int jgrpAutoDel;
-
-        /**
-         * < Maximum number of job preempted times
-         */
-        public int maxJobPreempt;
-
-        /**
-         * < Maximum number of job re-queue times
-         */
-        public int maxJobRequeue;
-
-        /**
-         * < No preempt run time percent
-         */
-        public int noPreemptRunTimePercent;
-
-        /**
-         * < No preempt finish time percent
-         */
-        public int noPreemptFinishTimePercent;
-
-
-        /**
-         * < The reservation request being within JL/U.
-         */
-        public int slotReserveQueueLimit;
-
-        /**
-         * < Job accept limit percentage.
-         */
-        public int maxJobPercentagePerSession;
-
-        /**
-         * < The low priority job will use the slots freed by preempted jobs.
-         */
-        public int useSuspSlots;
-
-
-        /**
-         * < Maximum number of the backup stream.utc files
-         */
-        public int maxStreamFileNum;
-
-        /**
-         * < If enforced only admin can use bkill -r option
-         */
-        public int privilegedUserForceBkill;
-
-        /**
-         * < It controls the remote queue selection flow.
-         */
-        public int mcSchedulingEnhance;
-
-        /**
-         * < It controls update interval of the counters  and other original data in MC implementation
-         */
-        public int mcUpdateInterval;
-
-        /**
-         * < Jobs run on only on hosts belonging to the intersection of the queue the job was submitted to, advance reservation hosts, and any hosts specified by bsub -m at the time of submission.
-         */
-        public int intersectCandidateHosts;
-
-        /**
-         * < Enforces the limitations of a single specified user group.
-         */
-        public int enforceOneUGLimit;
-
-        /**
-         * < Enable or disable logging runtime estimation exceeded event
-         */
-        public int logRuntimeESTExceeded;
-
-        /**
-         * < Compute unit types.
-         */
-        public String computeUnitTypes;
-
-        /**
-         * < Fairshare adjustment weighting factor
-         */
-        public float fairAdjustFactor;
-
-        /**
-         * < abs runtime and cputime for LSF simulator
-         */
-        public int simAbsoluteTime;
-
-        /**
-         * < switch for job exception enhancement
-         */
-        public int extendJobException;
-    }
-
-    /* parameterInfo */
-
-
-/* Bits for preemptFor parameter */
-    public static final int GROUP_MAX = 0x0001;
-    public static final int GROUP_JLP = 0x0002;
-    public static final int USER_JLP = 0x0004;
-    public static final int HOST_JLU = 0x0008;
-
-/* minimum of job */
-    public static final int MINI_JOB = 0x0010;
-
-/* least run time */
-    public static final int LEAST_RUN_TIME = 0x0020;
-
-/* optimal mini job */
-    public static final int OPTIMAL_MINI_JOB = 0x0040;
-
-/* Bits for mcSchedulingEnhance parameter */
-    public static final int RESOURCE_ONLY = 0x0001;
-    public static final int COUNT_PREEMPTABLE = 0x0002;
-    public static final int HIGH_QUEUE_PRIORITY = 0x0004;
-    public static final int PREEMPTABLE_QUEUE_PRIORITY = 0x0008;
-    public static final int PENDING_WHEN_NOSLOTS = 0x0010;
-
-/* options for bcaladd, bcalmod, bcaldel */
-    public static final int CAL_FORCE = 0x0001;
-
-/* Bits for preemptJobType parameter,
-*  used to enable backfill and exclusive
-*  preemption */
-    public static final int PREEMPT_JOBTYPE_EXCLUSIVE = 0x0001;
-    public static final int PREEMPT_JOBTYPE_BACKFILL = 0x0002;
-
-/* For lsb_calendarinfo() call */
-
-    /**
-     * \brief  calendar Information Entry.
-     */
-    public static class calendarInfoEnt extends Structure {
-        public static class ByReference extends calendarInfoEnt implements Structure.ByReference {}
-        public static class ByValue extends calendarInfoEnt implements Structure.ByValue {}
-        public calendarInfoEnt() {}
-        public calendarInfoEnt(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < A pointer to the name of the calendar.
-         */
-        public String name;
-
-        /**
-         * < A description string associated with the calendar.
-         */
-        public String desc;
-
-        /**
-         * < Calendar Expression
-         */
-        public String calExpr;
-
-        /**
-         * < User name
-         */
-        public String userName;
-
-        /**
-         * < Calendar status
-         */
-        public int status;
-
-        /**
-         * < For future use
-         */
-        public int options;
-
-        /**
-         * < Last time event of the calendar
-         */
-        public int lastDay;
-
-        /**
-         * < Next time event of the calendar
-         */
-        public int nextDay;
-
-        /**
-         * < Create Time
-         */
-        public NativeLong creatTime;
-
-        /**
-         * < Last Modify Time
-         */
-        public NativeLong lastModifyTime;
-
-        /**
-         * < Type of calendar, etc.
-         */
-        public int flags;
-    }
-
-
-
-    public static final int ALL_CALENDARS = 0x1;
-
-    public static final int EVE_HIST = 0x1;
-    public static final int EVENT_ACTIVE = 1;
-    public static final int EVENT_INACTIVE = 2;
-    public static final int EVENT_REJECT = 3;
-
-    public static final int EVENT_TYPE_UNKNOWN = 0;
-    public static final int EVENT_TYPE_LATCHED = 1;
-    public static final int EVENT_TYPE_PULSEALL = 2;
-    public static final int EVENT_TYPE_PULSE = 3;
-    public static final int EVENT_TYPE_EXCLUSIVE = 4;
-
-/* define event types */
-    public static final int EV_UNDEF = 0;
-    public static final int EV_FILE = 1;
-    public static final int EV_EXCEPT = 2;
-    public static final int EV_USER = 3;
-
-    public static class loadInfoEnt extends Structure {
-        public static class ByReference extends loadInfoEnt implements Structure.ByReference {}
-        public static class ByValue extends loadInfoEnt implements Structure.ByValue {}
-        public loadInfoEnt() {}
-        public loadInfoEnt(Pointer p) { super(p); read(); }
-
-        public String hostName;
-        public int status;
-        public FloatByReference load;
-    }
-
-
-
-    public static class queuePairEnt extends Structure {
-        public static class ByReference extends queuePairEnt implements Structure.ByReference {}
-        public static class ByValue extends queuePairEnt implements Structure.ByValue {}
-        public queuePairEnt() {}
-        public queuePairEnt(Pointer p) { super(p); read(); }
-
-        public String local;
-        public String remote;
-        public int send;
-        public int status;
-    }
-
-
-
-    public static class rmbCluAppEnt extends Structure {
-        public static class ByReference extends rmbCluAppEnt implements Structure.ByReference {}
-        public static class ByValue extends rmbCluAppEnt implements Structure.ByValue {}
-        public rmbCluAppEnt() {}
-        public rmbCluAppEnt(Pointer p) { super(p); read(); }
-
-        public String name;
-        public String description;
-    }
-
-
-
-/* define 'cluster status' in lease model
-*  for bclusters command
- */
-
-
-/* disconnection */
-    public static final int LEASE_CLU_STAT_DISC = 1;
-
-/* policy is exchanged but no lease is signed */
-    public static final int LEASE_CLU_STAT_CONN = 2;
-
-/* there are leases signed between two clusters */
-    public static final int LEASE_CLU_STAT_OK = 3;
-    public static final int LEASE_CLU_STAT_NUMBER = 3;
-/* consumer cluster status in lease model */
-
-    public static class consumerCluEnt extends Structure {
-        public static class ByReference extends consumerCluEnt implements Structure.ByReference {}
-        public static class ByValue extends consumerCluEnt implements Structure.ByValue {}
-        public consumerCluEnt() {}
-        public consumerCluEnt(Pointer p) { super(p); read(); }
-
-
-/* consumer cluster name */
-        public String cluName;
-
-/* cluster status, Ref- 'cluster status' definitions */
-        public int status;
-    }
-
-
-/* provider cluster status in lease model */
-
-    public static class providerCluEnt extends Structure {
-        public static class ByReference extends providerCluEnt implements Structure.ByReference {}
-        public static class ByValue extends providerCluEnt implements Structure.ByValue {}
-        public providerCluEnt() {}
-        public providerCluEnt(Pointer p) { super(p); read(); }
-
-
-/* provider cluster name */
-        public String cluName;
-
-/* cluster status, Ref- 'cluster status' definitions */
-        public int status;
-    }
-
-
-/* for remote batch model, its definition is same as  clusterInfoEnt*/
-
-    public static class rmbCluInfoEnt extends Structure {
-        public static class ByReference extends rmbCluInfoEnt implements Structure.ByReference {}
-        public static class ByValue extends rmbCluInfoEnt implements Structure.ByValue {}
-        public rmbCluInfoEnt() {}
-        public rmbCluInfoEnt(Pointer p) { super(p); read(); }
-
-        public String cluster;
-        public int numPairs;
-        public Pointer /* queuePairEnt.ByReference */ queues;
-        public int numApps;
-        public Pointer /* rmbCluAppEnt.ByReference */ apps;
-    }
-
-
-
-/* for leasing model */
-
-    public static class leaseCluInfoEnt extends Structure {
-        public static class ByReference extends leaseCluInfoEnt implements Structure.ByReference {}
-        public static class ByValue extends leaseCluInfoEnt implements Structure.ByValue {}
-        public leaseCluInfoEnt() {}
-        public leaseCluInfoEnt(Pointer p) { super(p); read(); }
-
-
-/* 1, import from all if "allremote" defined in lease queue*/
-        public int flags;
-
-/* the array size of consumer cluster array */
-        public int numConsumer;
-
-/* the consumer cluster array */
-        public Pointer /* consumerCluEnt.ByReference */ consumerClus;
-
-/* the array size of provider cluster array */
-        public int numProvider;
-
-/* the provider cluster array */
-        public Pointer /* providerCluEnt.ByReference */ providerClus;
-    }
-
-
-
-/* This is the old data structure, we
-*  leave it here to keep backward compatibility.
-*  It's definition is same as structure rmbCluInfoEnt.
-*  It is to transfer cluster status between mbatchd with
-*  old(4.x) bclusters command and old API-lsb_clusterinfo()
- */
-
-    public static class clusterInfoEnt extends Structure {
-        public static class ByReference extends clusterInfoEnt implements Structure.ByReference {}
-        public static class ByValue extends clusterInfoEnt implements Structure.ByValue {}
-        public clusterInfoEnt() {}
-        public clusterInfoEnt(Pointer p) { super(p); read(); }
-
-        public String cluster;
-        public int numPairs;
-        public Pointer /* queuePairEnt.ByReference */ queues;
-        public int numApps;
-        public Pointer /* rmbCluAppEnt.ByReference */ apps;
-    }
-
-
-/* the new data structure to transfer cluster status between mbatchd with
-*  new(5.0) bclusters command and new API-lsb_clusterinfoEx()
- */
-
-    public static class clusterInfoEntEx extends Structure {
-        public static class ByReference extends clusterInfoEntEx implements Structure.ByReference {}
-        public static class ByValue extends clusterInfoEntEx implements Structure.ByValue {}
-        public clusterInfoEntEx() {}
-        public clusterInfoEntEx(Pointer p) { super(p); read(); }
-
-
-/* cluster status related to remote batch*/
-        public rmbCluInfoEnt.ByReference rmbCluInfo;
-
-/* cluster status related to resource lease*/
-        public leaseCluInfoEnt leaseCluInfo;
-    }
-
-
-
-    public static class eventInfoEnt extends Structure {
-        public static class ByReference extends eventInfoEnt implements Structure.ByReference {}
-        public static class ByValue extends eventInfoEnt implements Structure.ByValue {}
-        public eventInfoEnt() {}
-        public eventInfoEnt(Pointer p) { super(p); read(); }
-
-
-/* name of event */
-        public String name;
-
-/* one of ACTIVE or INACTIVE */
-        public int status;
-
-/* one of LATCHED, PULSE and EXCLUSIVE */
-        public int type;
-
-/* one of FILE, ALARM, USER */
-        public int eType;
-
-/* user who created the event */
-        public String userName;
-
-/* event's attributes sent back from eeventd */
-        public String attributes;
-
-/* number of expression dependent on the event */
-        public int numDependents;
-
-/* last time when eeventd sent back message */
-        public NativeLong updateTime;
-
-/* last dispatched job dependent on the event */
-        public long lastDisJob;
-
-/* the time when the last job was dispatched */
-        public NativeLong lastDisTime;
-    }
-
-
-    public static final int ALL_EVENTS = 0x01;
-
-    /**
-     *  \addtogroup groupinfo_define groupinfo_define
-     *  define options for \ref lsb_usergrpinfo and \ref lsb_hostgrpinfo calls
-     */
-
-    /**
-     * < User group
-     */
-    public static final int USER_GRP = 0x1;
-
-    /**
-     * < Host group
-     */
-    public static final int HOST_GRP = 0x2;
-
-    /**
-     * < Host part group
-     */
-    public static final int HPART_HGRP = 0x4;
-    /**
-     *  \defgroup group_membership_option group_membership_option
-     *  \ingroup groupinfo_define
-     *  group membership options
-     */
-
-    /**
-     * < Expand the group membership recursively. That is, if a member of a group is itself a group, give the names of its members recursively, rather than its name, which is the default.
-     */
-    public static final int GRP_RECURSIVE = 0x8;
-
-    /**
-     * < Get membership of all groups.
-     */
-    public static final int GRP_ALL = 0x10;
-
-    /**
-     * < NQSQ_GRP
-     */
-    public static final int NQSQ_GRP = 0x20;
-
-    /**
-     * < Group shares
-     */
-    public static final int GRP_SHARES = 0x40;
-
-    /**
-     * < Dynamic group
-     */
-    public static final int DYNAMIC_GRP = 0x800;
-
-    /**
-     * < Group cu
-     */
-    public static final int GRP_CU = 0x1000;
-
-    /**
-     * \brief Structure for representing the shares assigned to a user group.
-     */
-    public static class userShares extends Structure {
-        public static class ByReference extends userShares implements Structure.ByReference {}
-        public static class ByValue extends userShares implements Structure.ByValue {}
-        public userShares() {}
-        public userShares(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < This can be a user or a keyword "default" or others
-         */
-        public String user;
-
-        /**
-         * < The number of shares assigned to the user
-         */
-        public int shares;
-    }
-
-
-
-
-    /**
-     * \brief  group information entry.
-     */
-    public static class groupInfoEnt extends Structure {
-        public static class ByReference extends groupInfoEnt implements Structure.ByReference {}
-        public static class ByValue extends groupInfoEnt implements Structure.ByValue {}
-        public groupInfoEnt() {}
-        public groupInfoEnt(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Group name
-         */
-        public String group;
-
-        /**
-         * < ASCII list of member names
-         */
-        public String memberList;
-
-        /**
-         * < ASCII list of admin member names
-         */
-        public String adminMemberList;
-
-        /**
-         * < The number of users with shares
-         */
-        public int numUserShares;
-
-        /**
-         * < The user shares rappresentation
-         */
-        public Pointer /* userShares.ByReference */ userShares;
-
-        /**
-         *  \addtogroup group_define group_define
-         *   group define statements
-         */
-
-        /**
-         * < Group output is in regular (uncondensed) format.
-         */
-        public static final int GRP_NO_CONDENSE_OUTPUT = 0x01;
-
-        /**
-         * < Group output is in condensed format.
-         */
-        public static final int GRP_CONDENSE_OUTPUT = 0x02;
-
-        /**
-         * < Group have regular expresion
-         */
-        public static final int GRP_HAVE_REG_EXP = 0x04;
-
-        /**
-         * < Group is a service class.
-         */
-        public static final int GRP_SERVICE_CLASS = 0x08;
-
-        /**
-         * < Group is a compute unit.
-         */
-        public static final int GRP_IS_CU = 0x10;
-
-        /**
-         * < Options.see \ref group_define
-         */
-        public int options;
-
-        /**
-         * < Host membership pattern
-         */
-        public String pattern;
-
-        /**
-         * < Negation membership pattern
-         */
-        public String neg_pattern;
-
-        /**
-         * < Compute unit type
-         */
-        public int cu_type;
-    }
-
-
-
-    /**
-     * \brief  run job request.
-     */
-    public static class runJobRequest extends Structure {
-        public static class ByReference extends runJobRequest implements Structure.ByReference {}
-        public static class ByValue extends runJobRequest implements Structure.ByValue {}
-        public runJobRequest() {}
-        public runJobRequest(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Jobid of the requested job
-         */
-        public long jobId;
-
-        /**
-         * < The number of hosts
-         */
-        public int numHosts;
-
-        /**
-         * < Vector of hostnames
-         */
-        public Pointer hostname;
-        /**
-         *  \addtogroup runjob_option runjob_option
-         *  Options used for lsb_runjob:
-         */
-
-
-        /**
-         * < Normal jobs
-         */
-        public static final int RUNJOB_OPT_NORMAL = 0x01;
-
-        /**
-         * < Nostop jobs
-         */
-        public static final int RUNJOB_OPT_NOSTOP = 0x02;
-
-        /**
-         * < Pending jobs only, no finished jobs
-         */
-        public static final int RUNJOB_OPT_PENDONLY = 0x04;
-
-        /**
-         * < Check point job only, from beginning
-         */
-        public static final int RUNJOB_OPT_FROM_BEGIN = 0x08;
-
-        /**
-         * < brun to use free CPUs only
-         */
-        public static final int RUNJOB_OPT_FREE = 0x10;
-
-        /**
-         * < brun ignoring rusage
-         */
-        public static final int RUNJOB_OPT_IGNORE_RUSAGE = 0x20;
-
-        /**
-         * < Run job request options, see \ref runjob_option
-         */
-        public int options;
-
-        /**
-         * < Vector of number of slots per host
-         */
-        public IntByReference slots;
-    }
-
-
-
-    /**
-     *  \addtogroup external_msg_processing external_msg_processing
-     *  options for \ref lsb_readjobmsg call
-     */
-
-    /**
-     *  \defgroup external_msg_post external_msg_post
-     *  options specifying if the message has an attachment to be posted
-     */
-
-    /**
-     * < Post the external job message. There  is no attached data file.
-     */
-    public static final int EXT_MSG_POST = 0x01;
-
-    /**
-     * < Post the external job message and data file posted to the job.
-     */
-    public static final int EXT_ATTA_POST = 0x02;
-
-    /**
-     * <Read the external job message. There is no attached data file.
-     */
-    public static final int EXT_MSG_READ = 0x04;
-
-    /**
-     * < Read the external job message and data file posted to the job.If there is no data file attached, the error message "The attached data of the message is not available" is displayed, and the external job message is displayed.
-     */
-    public static final int EXT_ATTA_READ = 0x08;
-
-    /**
-     * < Replay the external message
-     */
-    public static final int EXT_MSG_REPLAY = 0x10;
-
-    /**
-     * < Post the external job noevent message
-     */
-    public static final int EXT_MSG_POST_NOEVENT = 0x20;
-
-
-    /**
-     * \brief structure jobExternalMsgReq contains the information required to
-     * define an external message of a job.
-     */
-    public static class jobExternalMsgReq extends Structure {
-        public static class ByReference extends jobExternalMsgReq implements Structure.ByReference {}
-        public static class ByValue extends jobExternalMsgReq implements Structure.ByValue {}
-        public jobExternalMsgReq() {}
-        public jobExternalMsgReq(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Specifies if the message has an attachment to be read.<lsf/lsbatch.h> defines the following flags constructed from bits. These flags correspond to options.\n EXT_MSG_READ\n Read the external job message. There is no attached data file.\n EXT_ATTA_READ\n Read the external job message and data file posted to the job.\n If there is no data file attached, the error message "The attached data of the message is not available" is displayed, and the external job  message is displayed.
-         */
-        public int options;
-
-        /**
-         * < The system generated job Id of the job.
-         */
-        public long jobId;
-
-        /**
-         * < The name of the job if jobId is undefined (<=0)
-         */
-        public String jobName;
-
-        /**
-         * < The message index. A job can have more than one message. Use msgIdx in an array to index messages.
-         */
-        public int msgIdx;
-
-        /**
-         * < Text description of the msg
-         */
-        public String desc;
-
-        /**
-         * < The userId of the author of the message.
-         */
-        public int userId;
-
-        /**
-         * < The size of the data file. If no data file is attached, the size is 0.
-         */
-        public NativeLong dataSize;
-
-        /**
-         * < The time the author posted the message.
-         */
-        public NativeLong postTime;
-
-        /**
-         * < The author of the message.
-         */
-        public String userName;
-    }
-
-
-
-    /**
-     *  \addtogroup ext_data_status ext_data_status
-     */
-
-    /**
-     * < Transferring the message's data file.
-     */
-    public static final int EXT_DATA_UNKNOWN = 0;
-
-    /**
-     * < The message does not have an attached  data file.
-     */
-    public static final int EXT_DATA_NOEXIST = 1;
-
-    /**
-     * < The message's data file is available.
-     */
-    public static final int EXT_DATA_AVAIL = 2;
-
-    /**
-     * < The message's data file is corrupt.
-     */
-    public static final int EXT_DATA_UNAVAIL = 3;
-
-    /**
-     * \brief structure jobExternalMsgReply contains the information required to
-     * define an external message reply.
-     */
-    public static class jobExternalMsgReply extends Structure {
-        public static class ByReference extends jobExternalMsgReply implements Structure.ByReference {}
-        public static class ByValue extends jobExternalMsgReply implements Structure.ByValue {}
-        public jobExternalMsgReply() {}
-        public jobExternalMsgReply(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The system generated job Id of the job associated with the message.
-         */
-        public long jobId;
-
-        /**
-         * < The message index. A job can have more than one message. Use msgIdx in an array to index messages.
-         */
-        public int msgIdx;
-
-        /**
-         * < The message you want to read.
-         */
-        public String desc;
-
-        /**
-         * < The user Id of the author of the message.
-         */
-        public int userId;
-
-        /**
-         * < The size of the data file attached. If no data file is attached, the size is 0.
-         */
-        public NativeLong dataSize;
-
-        /**
-         * < The time the message was posted.
-         */
-        public NativeLong postTime;
-
-        /**
-         * < The status of the attached data file.  The status of the data file can be one of the following:\n EXT_DATA_UNKNOWN\n Transferring the message's data file.\n EXT_DATA_NOEXIST\n The message does not have an attached data file.\n EXT_DATA_AVAIL\n The message's data file is available. \n EXT_DATA_UNAVAIL\n The message's data file is corrupt.
-         */
-        public int dataStatus;
-
-        /**
-         * < The author of the msg
-         */
-        public String userName;
-    }
-
-
-
-
-    /**
-     * Data structures representing the symphony job status update request.
-     */
-    public static class symJobInfo extends Structure {
-        public static class ByReference extends symJobInfo implements Structure.ByReference {}
-        public static class ByValue extends symJobInfo implements Structure.ByValue {}
-        public symJobInfo() {}
-        public symJobInfo(Pointer p) { super(p); read(); }
-
-
-/* the service parititon that SSM works for */
-        public String partition;
-
-/* the priority of the symphony job */
-        public int priority;
-
-/* the full name that indicates the job relationship */
-        public String jobFullName;
-
-/* the auxiliary description to help updating command info */
-        public String auxCmdDesc;
-
-/* the auxiliary description to help updating job description info */
-        public String auxJobDesc;
-    }
-
-
-
-    public static class symJobStatus extends Structure {
-        public static class ByReference extends symJobStatus implements Structure.ByReference {}
-        public static class ByValue extends symJobStatus implements Structure.ByValue {}
-        public symJobStatus() {}
-        public symJobStatus(Pointer p) { super(p); read(); }
-
-
-/* text description of the symphony job status */
-        public String desc;
-    }
-
-
-
-    public static class symJobProgress extends Structure {
-        public static class ByReference extends symJobProgress implements Structure.ByReference {}
-        public static class ByValue extends symJobProgress implements Structure.ByValue {}
-        public symJobProgress() {}
-        public symJobProgress(Pointer p) { super(p); read(); }
-
-
-/* text description of the symphony job progress */
-        public String desc;
-    }
-
-
-
-
-    public static class symJobStatusUpdateReq extends Structure {
-        public static class ByReference extends symJobStatusUpdateReq implements Structure.ByReference {}
-        public static class ByValue extends symJobStatusUpdateReq implements Structure.ByValue {}
-        public symJobStatusUpdateReq() {}
-        public symJobStatusUpdateReq(Pointer p) { super(p); read(); }
-
-
-/* the job to be update info into MBD */
-        public long jobId;
-
-        public static final int SYM_JOB_UPDATE_NONE = 0x0;
-        public static final int SYM_JOB_UPDATE_INFO = 0x1;
-        public static final int SYM_JOB_UPDATE_STATUS = 0x2;
-        public static final int SYM_JOB_UPDATE_PROGRESS = 0x4;
-
-/* the option to update the info */
-        public int bitOption;
-        public symJobInfo info;
-        public int numOfJobStatus;
-        public Pointer /* symJobStatus.ByReference */ status;
-        public symJobProgress progress;
-    }
-
-
-
-    public static class symJobStatusUpdateReqArray extends Structure {
-        public static class ByReference extends symJobStatusUpdateReqArray implements Structure.ByReference {}
-        public static class ByValue extends symJobStatusUpdateReqArray implements Structure.ByValue {}
-        public symJobStatusUpdateReqArray() {}
-        public symJobStatusUpdateReqArray(Pointer p) { super(p); read(); }
-
-        public int numOfJobReq;
-        public Pointer /* symJobStatusUpdateReq.ByReference */ symJobReqs;
-    }
-
-
-
-
-    /**
-     * Data structures representing the symphony job status update reply.
-     */
-
-    public static class symJobUpdateAck extends Structure {
-        public static class ByReference extends symJobUpdateAck implements Structure.ByReference {}
-        public static class ByValue extends symJobUpdateAck implements Structure.ByValue {}
-        public symJobUpdateAck() {}
-        public symJobUpdateAck(Pointer p) { super(p); read(); }
-
-        public static int SYM_UPDATE_ACK_OK = 0;
-        public static final int SYM_UPDATE_ACK_ERR = 1;
-        public int ackCode;
-
-/* text description of job info update acknowledgement */
-        public String desc;
-    }
-
-
-
-    public static class symJobStatusUpdateReply extends Structure {
-        public static class ByReference extends symJobStatusUpdateReply implements Structure.ByReference {}
-        public static class ByValue extends symJobStatusUpdateReply implements Structure.ByValue {}
-        public symJobStatusUpdateReply() {}
-        public symJobStatusUpdateReply(Pointer p) { super(p); read(); }
-
-
-/* the job to be update info into MBD */
-        public long jobId;
-        public static final int SYM_UPDATE_INFO_IDX = 0;
-        public static final int SYM_UPDATE_STATUS_IDX = 1;
-        public static final int SYM_UPDATE_PROGRESS_IDX = 2;
-        public static final int NUM_SYM_UPDATE_ACK = 3;
-        public symJobUpdateAck[] acks = new symJobUpdateAck[NUM_SYM_UPDATE_ACK];
-    }
-
-
-
-    public static class symJobStatusUpdateReplyArray extends Structure {
-        public static class ByReference extends symJobStatusUpdateReplyArray implements Structure.ByReference {}
-        public static class ByValue extends symJobStatusUpdateReplyArray implements Structure.ByValue {}
-        public symJobStatusUpdateReplyArray() {}
-        public symJobStatusUpdateReplyArray(Pointer p) { super(p); read(); }
-
-        public int numOfJobReply;
-        public Pointer /* symJobStatusUpdateReply.ByReference */ symJobReplys;
-    }
-
-
-
-
-/* Data structure representing the job array requeue operation.
-*  o jobId is the Lsbatch id of the job array to be requeued
-*  o status is the desired requeue status of the job, by default
-*    it is JOB_STAT_PEND, or user specified JOB_STAT_PSUSP
-*  o options specifies the status of the array elements that have
-*    to be requeued.
-*
-*  The function that operates on the data is lsb_requeuejob()
- */
-
-    /**
-     *  \addtogroup requeuejob_options requeuejob_options
-     *  define statements used by \ref lsb_requeuejob.
-     */
-
-    /**
-     * < Requeues jobs that have finished running. Jobs that have exited are not re-run. Equivalent to brequeue -d command line option.
-     */
-    public static final int REQUEUE_DONE = 0x1;
-
-    /**
-     * < Requeues jobs that have exited. Finished jobs are not re-run. Equivalent to brequeue -e command line option.
-     */
-    public static final int REQUEUE_EXIT = 0x2;
-
-    /**
-     * < Requeues running jobs and puts them in PEND state. Equivalent to brequeue -r command line option.
-     */
-    public static final int REQUEUE_RUN = 0x4;
-
-    /**
-     * \brief  requeued job
-     */
-    public static class jobrequeue extends Structure {
-        public static class ByReference extends jobrequeue implements Structure.ByReference {}
-        public static class ByValue extends jobrequeue implements Structure.ByValue {}
-        public jobrequeue() {}
-        public jobrequeue(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Specifies the jobid of a single job or an array of jobs.
-         */
-        public long jobId;
-
-        /**
-         * < Specifies the lsbatch status of the requeued job after it has been requeued. The job status can be JOB_STAT_PEND or JOB_STATE_PSUSP. The default status is JOB_STAT_PEND.
-         */
-        public int status;
-
-        /**
-         * < Specifies the array elements to be requeued.  see \ref requeuejob_options
-         */
-        public int options;
-    }
-
-
-
-    public static class requeueEStruct extends Structure {
-        public static class ByReference extends requeueEStruct implements Structure.ByReference {}
-        public static class ByValue extends requeueEStruct implements Structure.ByValue {}
-        public requeueEStruct() {}
-        public requeueEStruct(Pointer p) { super(p); read(); }
-
-
-/* requeue type: normal, exclude, other, prefer_other, etc. */
-        public int type;
-
-/* requeue type: normal - as in 2.2 */
-        public static final int RQE_NORMAL = 0;
-
-/* requeue type: exclude */
-        public static final int RQE_EXCLUDE = 1;
-
-/* indicate the end of the list */
-        public static final int RQE_END = 255;
-
-/* requeue exit value */
-        public int value;
-
-/* requeue interval */
-        public int interval;
-    }
-
-
-
-    public static class requeue extends Structure {
-        public static class ByReference extends requeue implements Structure.ByReference {}
-        public static class ByValue extends requeue implements Structure.ByValue {}
-        public requeue() {}
-        public requeue(Pointer p) { super(p); read(); }
-
-        public int numReqValues;
-        public Pointer /* requeueEStruct.ByReference */ reqValues;
-    }
-
-
-
-/* The Service Level Agreement in LSF
- */
-
-
-/* This is the library representation of the
-*  service class.
- */
-
-    public static class serviceClass extends Structure {
-        public static class ByReference extends serviceClass implements Structure.ByReference {}
-        public static class ByValue extends serviceClass implements Structure.ByValue {}
-        public serviceClass() {}
-        public serviceClass(Pointer p) { super(p); read(); }
-
-
-/* SLA name */
-        public String name;
-
-/* SLA priority */
-        public float priority;
-
-/* The number of goals */
-        public int ngoals;
-
-/* The array of goals */
-        public Pointer /* objective.ByReference */ goals;
-
-/* Users allowed to use the SLA */
-        public String userGroups;
-
-/* SLA description */
-        public String description;
-
-/* SLA control action */
-        public String controlAction;
-
-/* Finished jobs per CLEAN_PERIOD */
-        public float throughput;
-
-/* Job counters */
-        public int[] counters = new int[NUM_JGRP_COUNTERS + 1];
-
-/* project scheduling enabled sla */
-        public String consumer;
-
-/* SLA EGO control parameters */
-        public slaControl.ByReference ctrl;
-
-/* SLA EGO control parameters */
-        public slaControlExt.ByReference ctrlExt;
-    }
-
-
-
-/* This is the library representation of the
-*  Service Level Objective.
- */
-
-    public static final int GOAL_WINDOW_OPEN = 0x1;
-    public static final int GOAL_WINDOW_CLOSED = 0x2;
-    public static final int GOAL_ONTIME = 0x4;
-    public static final int GOAL_DELAYED = 0x8;
-    public static final int GOAL_DISABLED = 0x10;
-
-/* Enumerate all the possible performance goals
-*  for a service class.
- */
-
-    public static interface objectives {
-        public static int GOAL_DEADLINE = 0;
-        public static int GOAL_VELOCITY = 1;
-        public static int GOAL_THROUGHPUT = 2;
-    }
-
-
-
-/* The objective of a goal, also called SLO, is represented
-*  by this data structure.
- */
-
-    public static class objective extends Structure {
-        public static class ByReference extends objective implements Structure.ByReference {}
-        public static class ByValue extends objective implements Structure.ByValue {}
-        public objective() {}
-        public objective(Pointer p) { super(p); read(); }
-
-
-/* goal specs from lsb.serviceclasses */
-        public String spec;
-
-/* goal type */
-        public int type;
-
-/* the state of the goal OnTime || Delayed */
-        public int state;
-
-/* the configured value */
-        public int goal;
-
-/* the actual value */
-        public int actual;
-
-/* the optimum value */
-        public int optimum;
-
-/* the minimum value */
-        public int minimum;
-    }
-
-
-
-/* Control parameters for SLA management of hosts belonging
-*  to the EGO cluster. The control parameters are for each
-*  SLA that gets its hosts from EGO.
- */
-
-    public static class slaControl extends Structure {
-        public static class ByReference extends slaControl implements Structure.ByReference {}
-        public static class ByValue extends slaControl implements Structure.ByValue {}
-        public slaControl() {}
-        public slaControl(Pointer p) { super(p); read(); }
-
-
-/* sla name */
-        public String sla;
-
-/* EGO consumer the sla is mapped to */
-        public String consumer;
-
-/* timeout for returning hosts to EGO */
-        public int maxHostIdleTime;
-
-/* timeout left before EGO forcefully reclaims */
-        public int recallTimeout;
-
-/* number of hosts beign recalled */
-        public int numHostRecalled;
-
-/* EGO resource requirement */
-        public String egoResReq;
-    }
-
-
-
-    public static class slaControlExt extends Structure {
-        public static class ByReference extends slaControlExt implements Structure.ByReference {}
-        public static class ByValue extends slaControlExt implements Structure.ByValue {}
-        public slaControlExt() {}
-        public slaControlExt(Pointer p) { super(p); read(); }
-
-
-/* whether exclusive allocation */
-        public int allocflags;
-
-/* tile parameter */
-        public int tile;
-    }
-
-
-
-/* Application Encapsulation in LSF
-*
-*  This is the library representation of the
-*  application.
- */
-
-    public static class appInfoEnt extends Structure {
-        public static class ByReference extends appInfoEnt implements Structure.ByReference {}
-        public static class ByValue extends appInfoEnt implements Structure.ByValue {}
-        public appInfoEnt() {}
-        public appInfoEnt(Pointer p) { super(p); read(); }
-
-
-/* app name */
-        public String name;
-
-/* app description */
-        public String description;
-
-/* num of total jobs */
-        public int numJobs;
-
-/* num of pending slots */
-        public int numPEND;
-
-/* num of running slots */
-        public int numRUN;
-
-/* num of suspend slots */
-        public int numSSUSP;
-
-/* num of ususp slots */
-        public int numUSUSP;
-
-/* reserved job slots */
-        public int numRESERVE;
-
-/* app attributes */
-        public int aAttrib;
-
-/* number of jobs in one chunk */
-        public int chunkJobSize;
-
-/* requeue exit values */
-        public String requeueEValues;
-
-/* success exit values */
-        public String successEValues;
-
-/* app pre execution */
-        public String preCmd;
-
-/* app post execution */
-        public String postCmd;
-
-/* Job starter command(s) */
-        public String jobStarter;
-
-/* suspend action command */
-        public String suspendActCmd;
-
-/* resume action command */
-        public String resumeActCmd;
-
-/* terimate action command */
-        public String terminateActCmd;
-
-/*memory limit level type */
-        public int memLimitType;
-
-/* LSF resource limits (soft)*/
-        public int[] defLimits = new int[LibLsf.LSF_RLIM_NLIMITS];
-
-/* host spec from CPULIMIT or  RUNLIMIT */
-        public String hostSpec;
-
-/* resource requirement string */
-        public String resReq;
-
-/* maximal processor limit */
-        public int maxProcLimit;
-
-/* default processor limit */
-        public int defProcLimit;
-
-/* minimal processor limit */
-        public int minProcLimit;
-
-/* estimated run time */
-        public int runTime;
-
-/* include postproc as part of job */
-        public int jobIncludePostProc;
-
-/* time window for postproc */
-        public int jobPostProcTimeOut;
-
-/* remote task gone action */
-        public String rTaskGoneAction;
-
-/* pathname of pjob env script */
-        public String djobEnvScript;
-
-/* DJOB rusage interval */
-        public int djobRuInterval;
-
-/* DJOB heartbeat interval */
-        public int djobHbInterval;
-
-/* DJOB communication fail action */
-        public String djobCommfailAction;
-
-/* disable Distributed Application Framework */
-        public int djobDisabled;
-
-/* grace period (in seconds) before terminating tasks when a job shrinks*/
-        public int djobResizeGracePeriod;
-
-/* chkpnt directory */
-        public String chkpntDir;
-
-/* chlpnt method */
-        public String chkpntMethod;
-
-/* chkpnt period */
-        public int chkpntPeriod;
-
-/* initial chkpnt period */
-        public int initChkpntPeriod;
-
-/* migration  threshold */
-        public int migThreshold;
-
-/* maximum number of job preempted times */
-        public int maxJobPreempt;
-
-/* maximum number of pre-exec retry times */
-        public int maxPreExecRetry;
-
-/* maximum number of pre-exec retry times for local cluster */
-        public int localMaxPreExecRetry;
-
-/* maximum number of job re-queue times */
-        public int maxJobRequeue;
-
-/* no preempt run time */
-        public int noPreemptRunTime;
-
-/* no preempt finish time */
-        public int noPreemptFinishTime;
-
-/* no preempt run time percent */
-        public int noPreemptRunTimePercent;
-
-/* no preempt finish time percent */
-        public int noPreemptFinishTimePercent;
-
-/* use Linux-PAM */
-        public int usePam;
-
-/* processor binding options */
-        public int bindingOption;
-
-/* persistent same hosts and same order */
-        public int persistHostOrder;
-
-/* job resize notification cmd */
-        public String resizeNotifyCmd;
-    }
-
-
-
-/* application attributes
- */
-
-/* rerunnable application */
-    public static final int A_ATTRIB_RERUNNABLE = 0x01;
-
-/* non rerunnable application */
-    public static final int A_ATTRIB_NONRERUNNABLE = 0x02;
-
-/* default application */
-    public static final int A_ATTRIB_DEFAULT = 0x04;
-
-/* runtime is absolute */
-    public static final int A_ATTRIB_ABS_RUNLIMIT = 0x08;
-
-/* process binding application */
-    public static final int A_ATTRIB_JOBBINDING = 0x10;
-
-/* process binding application */
-    public static final int A_ATTRIB_NONJOBBINDING = 0x20;
-
-/* checkpointable application */
-    public static final int A_ATTRIB_CHKPNT = 0x40;
-
-/* Job can be resizable manually */
-    public static final int A_ATTRIB_RESIZABLE = 0x80;
-
-/* Job can be resized automatically */
-    public static final int A_ATTRIB_AUTO_RESIZABLE = 0x100;
-
-
-/* processor binding options */
-    public static final int BINDING_OPTION_BALANCE = 0x1;
-    public static final int BINDING_OPTION_PACK = 0x2;
-    public static final int BINDING_OPTION_ANY = 0x4;
-    public static final int BINDING_OPTION_USER = 0x8;
-    public static final int BINDING_OPTION_USER_CPU_LIST = 0x10;
-    public static final int BINDING_OPTION_NONE = 0x20;
-
-    /**
-     *  \addtogroup movejob_options movejob_options
-     *  options for \ref lsb_movejob call
-     */
-
-    /**
-     * <  To top
-     */
-    public static final int TO_TOP = 1;
-
-    /**
-     * <  To bottom
-     */
-    public static final int TO_BOTTOM = 2;
-
-    /**
-     *  \addtogroup queue_ctrl_option queue_ctrl_option
-     *  options for \ref lsb_queuecontrol call
-     */
-
-    /**
-     * < Open the queue to accept jobs.
-     */
-    public static final int QUEUE_OPEN = 1;
-
-    /**
-     * < Close the queue so it will not accept jobs.
-     */
-    public static final int QUEUE_CLOSED = 2;
-
-    /**
-     * < Activate the queue to dispatch jobs.
-     */
-    public static final int QUEUE_ACTIVATE = 3;
-
-    /**
-     * < Inactivate the queue so it will not dispatch jobs.
-     */
-    public static final int QUEUE_INACTIVATE = 4;
-
-    /**
-     * < Clean the queue
-     */
-    public static final int QUEUE_CLEAN = 5;
-
-    /**
-     * \brief The structure of queueCtrlReq
-     */
-    public static class queueCtrlReq extends Structure {
-        public static class ByReference extends queueCtrlReq implements Structure.ByReference {}
-        public static class ByValue extends queueCtrlReq implements Structure.ByValue {}
-        public queueCtrlReq() {}
-        public queueCtrlReq(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The name of the queue to be controlled.
-         */
-        public String queue;
-
-        /**
-         * < Operations to be applied, for example, QUEUE_OPEN. You can refer to \ref queue_ctrl_option for more options.
-         */
-        public int opCode;
-
-        /**
-         * < The message attached by the admin
-         */
-        public String message;
-    }
-
-
-
-/* options for lsb_hostcontrol() call */
-    /**
-     *  \addtogroup host_ctrl_option host_ctrl_option
-     *  options operations to be applied
-     */
-
-    /**
-     * < Opens the host to accept jobs.
-     */
-    public static final int HOST_OPEN = 1;
-
-    /**
-     * < Closes the host so that no jobs can be dispatched to it.
-     */
-    public static final int HOST_CLOSE = 2;
-
-    /**
-     * < Restarts sbatchd on the host. sbatchd will receive a request from mbatchd and re-execute. This permits the sbatchd binary to be updated. This operation fails if no sbatchd is running on the specified host.
-     */
-    public static final int HOST_REBOOT = 3;
-
-    /**
-     * < The sbatchd on the host will exit.
-     */
-    public static final int HOST_SHUTDOWN = 4;
-
-    /**
-     * < Used for closing leased host on the submission cluster
-     */
-    public static final int HOST_CLOSE_REMOTE = 5;
-
-    /**
-     * \brief  Host control request.
-     */
-    public static class hostCtrlReq extends Structure {
-        public static class ByReference extends hostCtrlReq implements Structure.ByReference {}
-        public static class ByValue extends hostCtrlReq implements Structure.ByValue {}
-        public hostCtrlReq() {}
-        public hostCtrlReq(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The host to be controlled. If host is null, the local host is assumed.
-         */
-        public String host;
-
-        /**
-         * < Operations to be applied in \ref host_ctrl_option.
-         */
-        public int opCode;
-
-        /**
-         * < Message attached by the administrator.
-         */
-        public String message;
-    }
-
-
-
-/* options for lsb_hgcontrol() call */
-    public static final int HGHOST_ADD = 1;
-    public static final int HGHOST_DEL = 2;
-
-    public static class hgCtrlReq extends Structure {
-        public static class ByReference extends hgCtrlReq implements Structure.ByReference {}
-        public static class ByValue extends hgCtrlReq implements Structure.ByValue {}
-        public hgCtrlReq() {}
-        public hgCtrlReq(Pointer p) { super(p); read(); }
-
-        public int opCode;
-        public String grpname;
-        public int numhosts;
-        public Pointer hosts;
-        public String message;
-    }
-
-
-
-    public static class hgCtrlReply extends Structure {
-        public static class ByReference extends hgCtrlReply implements Structure.ByReference {}
-        public static class ByValue extends hgCtrlReply implements Structure.ByValue {}
-        public hgCtrlReply() {}
-        public hgCtrlReply(Pointer p) { super(p); read(); }
-
-        public int numsucc;
-        public int numfail;
-        public Pointer succHosts;
-        public Pointer failHosts;
-        public IntByReference failReasons;
-    }
-
-
-
-/* options for lsb_reconfig() call */
-    /**
-     *  \addtogroup mbd_operation mbd_operation
-     *   options for \ref lsb_reconfig call
-     */
-
-    /**
-     * < mbatchd restart
-     */
-    public static final int MBD_RESTART = 0;
-
-    /**
-     * < mbatchd reread configuration files
-     */
-    public static final int MBD_RECONFIG = 1;
-
-    /**
-     * < mbatchd check validity of configuration files
-     */
-    public static final int MBD_CKCONFIG = 2;
-
-    /**
-     * \brief  mbatchd control request.
-     */
-    public static class mbdCtrlReq extends Structure {
-        public static class ByReference extends mbdCtrlReq implements Structure.ByReference {}
-        public static class ByValue extends mbdCtrlReq implements Structure.ByValue {}
-        public mbdCtrlReq() {}
-        public mbdCtrlReq(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Operation applied, defined in \ref mbd_operation
-         */
-        public int opCode;
-
-        /**
-         * < Not used so far
-         */
-        public String name;
-
-        /**
-         * < The message attached by the admin
-         */
-        public String message;
-    }
-
-
-
-/* opcode for turn on or off the perfmon monitor */
-    public static final int PERFMON_START = 1;
-    public static final int PERFMON_STOP = 2;
-    public static final int PERFMON_SET_PERIOD = 3;
-
-
-/* defualt sample period 60 */
-    public static final int DEF_PERFMON_PERIOD = 60;
-
-
-    public static class perfmonMetricsEnt extends Structure {
-        public static class ByReference extends perfmonMetricsEnt implements Structure.ByReference {}
-        public static class ByValue extends perfmonMetricsEnt implements Structure.ByValue {}
-        public perfmonMetricsEnt() {}
-        public perfmonMetricsEnt(Pointer p) { super(p); read(); }
-
-/* metrice name */
-        public String name;
-
-/* last period counters */
-        public NativeLong current;
-
-/* max of (counter/interval)*sample period for one period */
-        public NativeLong max;
-
-/* min of (counter/interval)*sample period for one period */
-        public NativeLong min;
-
-/* avg of (total/interval)*sample period for one period */
-        public NativeLong avg;
-
-/* total counters from performance monitor turn on */
-        public String total;
-    }
-
-
-
-/*performance monitor info*/
-
-    public static class perfmonInfo extends Structure {
-        public static class ByReference extends perfmonInfo implements Structure.ByReference {}
-        public static class ByValue extends perfmonInfo implements Structure.ByValue {}
-        public perfmonInfo() {}
-        public perfmonInfo(Pointer p) { super(p); read(); }
-
-/* number of metrics*/
-        public int num;
-
-/* array of metrics counter */
-        public Pointer /* perfmonMetricsEnt.ByReference */ record;
-
-/* sample period */
-        public int period;
-
-/* time when the performance moniter turn on */
-        public NativeLong start;
-
-/* time when the performance moniter turn off */
-        public NativeLong end;
-    }
-
-
-
-/* options for lsb_reljgrp() call */
-    public static final int JGRP_RELEASE_PARENTONLY = 0x01;
-
-
-    /**
-     * \brief Records of logged events
-     */
-    public static class logSwitchLog extends Structure {
-        public static class ByReference extends logSwitchLog implements Structure.ByReference {}
-        public static class ByValue extends logSwitchLog implements Structure.ByValue {}
-        public logSwitchLog() {}
-        public logSwitchLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The last jobId so far
-         */
-        public int lastJobId;
-/*#if defined(LSF_SIMULATOR)*/
-
-/**< last trace record time */
-/*    public NativeLong lastTraceTime;*/
-
-        /**< last trace record type */
-/*public int    lastTraceType;*
-
-    /**< last trace record info */
-/*public String lastTraceInfo;*/
-        /*#endif*/
-    }
-
-
-
-    /**
-     * \brief Records of job CPU data logged event
-     */
-    public static class dataLoggingLog extends Structure {
-        public static class ByReference extends dataLoggingLog implements Structure.ByReference {}
-        public static class ByValue extends dataLoggingLog implements Structure.ByValue {}
-        public dataLoggingLog() {}
-        public dataLoggingLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The time of last job cpu data logging
-         */
-        public NativeLong loggingTime;
-    }
-
-
-
-    /**
-     * \brief  new job group log.
-     */
-    public static class jgrpNewLog extends Structure {
-        public static class ByReference extends jgrpNewLog implements Structure.ByReference {}
-        public static class ByValue extends jgrpNewLog implements Structure.ByValue {}
-        public jgrpNewLog() {}
-        public jgrpNewLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The user ID of the submitter
-         */
-        public int userId;
-
-        /**
-         * < The job submission time
-         */
-        public NativeLong submitTime;
-
-        /**
-         * < The name of the submitter
-         */
-        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
-
-        /**
-         * < The job dependency condition
-         */
-        public String depCond;
-
-        /**
-         * < Time event string
-         */
-        public String timeEvent;
-
-        /**
-         * < Job group name
-         */
-        public String groupSpec;
-
-        /**
-         * < New job group name
-         */
-        public String destSpec;
-
-        /**
-         * < Delete options in options field
-         */
-        public int delOptions;
-
-        /**
-         * < Extended Delete options in options2 field
-         */
-        public int delOptions2;
-
-        /**
-         * < Platform type: such as Unix, Windows
-         */
-        public int fromPlatform;
-
-        /**
-         * < SLA service class name under which the job runs
-         */
-        public String sla;
-
-        /**
-         * < Max job group slots limit
-         */
-        public int maxJLimit;
-
-        /**
-         * < Job group creation method: implicit or explicit
-         */
-        public int options;
-    }
-
-
-
-    /**
-     * \brief  job group control log.
-     */
-    public static class jgrpCtrlLog extends Structure {
-        public static class ByReference extends jgrpCtrlLog implements Structure.ByReference {}
-        public static class ByValue extends jgrpCtrlLog implements Structure.ByValue {}
-        public jgrpCtrlLog() {}
-        public jgrpCtrlLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The user ID of the submitter
-         */
-        public int userId;
-
-        /**
-         * < The name of the submitter
-         */
-        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
-
-        /**
-         * < Job group name
-         */
-        public String groupSpec;
-
-        /**
-         * < Options
-         */
-        public int options;
-
-        /**
-         * < Job control JGRP_RELEASE, JGRP_HOLD, JGRP_DEL
-         */
-        public int ctrlOp;
-    }
-
-
-
-    /**
-     * \brief  job group status log.
-     */
-    public static class jgrpStatusLog extends Structure {
-        public static class ByReference extends jgrpStatusLog implements Structure.ByReference {}
-        public static class ByValue extends jgrpStatusLog implements Structure.ByValue {}
-        public jgrpStatusLog() {}
-        public jgrpStatusLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The full group path name for the job group
-         */
-        public String groupSpec;
-
-        /**
-         * < Job group status
-         */
-        public int status;
-
-        /**
-         * < Prior status
-         */
-        public int oldStatus;
-    }
-
-
-
-    /**
-     * \brief jobNewLog logged in lsb.events when a job is submitted.
-     */
-    public static class jobNewLog extends Structure {
-        public static class ByReference extends jobNewLog implements Structure.ByReference {}
-        public static class ByValue extends jobNewLog implements Structure.ByValue {}
-        public jobNewLog() {}
-        public jobNewLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The job ID that the LSF assigned to the job
-         */
-        public int jobId;
-
-        /**
-         * < The user ID of the submitter
-         */
-        public int userId;
-
-        /**
-         * < The name of the submitter
-         */
-        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
-
-        /**
-         * < Job submission options. see \ref lsb_submit.
-         */
-        public int options;
-
-        /**
-         * < Job submission options. see \ref lsb_submit.
-         */
-        public int options2;
-
-        /**
-         * < The number of processors requested for execution
-         */
-        public int numProcessors;
-
-        /**
-         * < The job submission time
-         */
-        public NativeLong submitTime;
-
-        /**
-         * < The job should be started on or after this time
-         */
-        public NativeLong beginTime;
-
-        /**
-         * < If the job has not finished by this time, it will be killed
-         */
-        public NativeLong termTime;
-
-        /**
-         * < The signal value sent to the job 10 minutes before its run window closes
-         */
-        public int sigValue;
-
-        /**
-         * < The checkpointing period
-         */
-        public int chkpntPeriod;
-
-        /**
-         * < The process ID assigned to the job when it was restarted
-         */
-        public int restartPid;
-
-        /**
-         * < The user's resource limits
-         */
-        public int[] rLimits = new int[LibLsf.LSF_RLIM_NLIMITS];
-
-        /**
-         * < The model, host name or host type for scaling CPULIMIT and RUNLIMIT
-         */
-        public byte[] hostSpec = new byte[LibLsf.MAXHOSTNAMELEN];
-
-        /**
-         * < The CPU factor for the above model, host name or host type
-         */
-        public float hostFactor;
-
-        /**
-         * < The file creation mask for this job
-         */
-        public int umask;
-
-        /**
-         * < The name of the queue to which this job was submitted
-         */
-        public byte[] queue = new byte[MAX_LSB_NAME_LEN];
-
-        /**
-         * < The resource requirements of the job
-         */
-        public String resReq;
-
-        /**
-         * < The submission host name
-         */
-        public byte[] fromHost = new byte[LibLsf.MAXHOSTNAMELEN];
-
-        /**
-         * < The current working directory
-         */
-        public String cwd;
-
-        /**
-         * < The checkpoint directory
-         */
-        public String chkpntDir;
-
-        /**
-         * < The input file name
-         */
-        public String inFile;
-
-        /**
-         * < The output file name
-         */
-        public String outFile;
-
-        /**
-         * < The error output file name
-         */
-        public String errFile;
-
-        /**
-         * < Job spool input file
-         */
-        public String inFileSpool;
-
-        /**
-         * < Job spool command file
-         */
-        public String commandSpool;
-
-        /**
-         * < Job spool directory
-         */
-        public String jobSpoolDir;
-
-        /**
-         * < The home directory of the submitter
-         */
-        public String subHomeDir;
-
-        /**
-         * < The job file name
-         */
-        public String jobFile;
-
-        /**
-         * < The number of hosts considered for dispatching this job
-         */
-        public int numAskedHosts;
-
-        /**
-         * < The array of names of hosts considered for dispatching this job
-         */
-        public Pointer askedHosts;
-
-        /**
-         * < The job dependency condition
-         */
-        public String dependCond;
-
-        /**
-         * < Time event string
-         */
-        public String timeEvent;
-
-        /**
-         * < The job name
-         */
-        public String jobName;
-
-        /**
-         * < The job command
-         */
-        public String command;
-
-        /**
-         * < The number of files to transfer
-         */
-        public int nxf;
-
-        /**
-         * < The array of file transfer specifications. (The xFile structure is defined in <lsf/lsbatch.h>)
-         */
-        public Pointer /* xFile.ByReference */ xf;
-
-        /**
-         * < The command string to be pre_executed
-         */
-        public String preExecCmd;
-
-        /**
-         * < User option mail string
-         */
-        public String mailUser;
-
-        /**
-         * < The project name for this job, used for accounting purposes
-         */
-        public String projectName;
-
-        /**
-         * < Port to be used for interactive jobs
-         */
-        public int niosPort;
-
-        /**
-         * < Maximum number of processors
-         */
-        public int maxNumProcessors;
-
-        /**
-         * < Execution host type
-         */
-        public String schedHostType;
-
-        /**
-         * < Login shell specified by user
-         */
-        public String loginShell;
-
-        /**
-         * < The user group name for this job
-         */
-        public String userGroup;
-
-        /**
-         * < List of alarm conditions for job
-         */
-        public String exceptList;
-
-        /**
-         * < Array idx, must be 0 in JOB_NEW
-         */
-        public int idx;
-
-        /**
-         * < User priority
-         */
-        public int userPriority;
-
-        /**
-         * < Advance reservation ID
-         */
-        public String rsvId;
-
-        /**
-         * < The job group under which the job runs.
-         */
-        public String jobGroup;
-
-        /**
-         * < External scheduling options
-         */
-        public String extsched;
-
-        /**
-         * < Warning time period in seconds, -1 if unspecified
-         */
-        public int warningTimePeriod;
-
-        /**
-         * < Warning action, SIGNAL | CHKPNT | command, null if unspecified
-         */
-        public String warningAction;
-
-        /**
-         * < The service class under which the job runs.
-         */
-        public String sla;
-
-        /**
-         * < The absolute run limit of the job
-         */
-        public int SLArunLimit;
-
-        /**
-         * < License Project
-         */
-        public String licenseProject;
-
-        /**
-         * < Extended bitwise inclusive OR of options flags. See \ref lsb_submit.
-         */
-        public int options3;
-
-        /**
-         * < Application profile under which the job runs.
-         */
-        public String app;
-
-        /**
-         * < Post-execution commands.
-         */
-        public String postExecCmd;
-
-        /**
-         * < Runtime estimate specified.
-         */
-        public int runtimeEstimation;
-
-        /**
-         * < Job-level requeue exit values.
-         */
-        public String requeueEValues;
-
-        /**
-         * < Initial checkpoint period
-         */
-        public int initChkpntPeriod;
-
-        /**
-         * < Job migration threshold.
-         */
-        public int migThreshold;
-
-        /**
-         * < Resize notify command
-         */
-        public String notifyCmd;
-
-        /**
-         * < Job description.
-         */
-        public String jobDescription;
-
-        /**
-         * < For new options in future
-         */
-        public submit_ext.ByReference submitExt;
-
-/*#if defined(LSF_SIMULATOR)*/
-
-/**< maximum memory */
-        /*public int    maxmem;*/
-
-        /**< exit status */
-        /*public int    exitstatus;*/
-
-        /**< job run time */
-        /*public int    runtime;*/
-
-        /**< system cpu time */
-        /*public int    cputime;*/
-
-        /**< allocated slots */
-        /*public int    slots;*/
-
-        /**< cpu factor */
-        /*public float  cpufactor;*/
-
-        /*#endif*/
-    }
-
-
-
-/*
-#if defined(LSF_SIMULATOR)
-public static class jobArrayElementLog extends Structure {
-public static class ByReference extends jobArrayElementLog implements Structure.ByReference {}
-public static class ByValue extends jobArrayElementLog implements Structure.ByValue {}
-
-    public int jobId;
-*/
-/* Copy LSF simulator related fields from jobNewLog */
-/*
-    public int idx;
-        public int maxmem;
-        public int exitstatus;
-        public int runtime;
-        public int cputime;
-        public int slots;
-        public float cpufactor;
-    };
-    #endif
-    */
-
-    /**
-     * \brief  job modified log.
-     */
-    public static class jobModLog extends Structure {
-        public static class ByReference extends jobModLog implements Structure.ByReference {}
-        public static class ByValue extends jobModLog implements Structure.ByValue {}
-        public jobModLog() {}
-        public jobModLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < JobId or jobName in String/
-         * public String jobIdStr;
-         * <p/>
-         * /**< Job submission options(See \ref lsb_submit)
-         */
-        public int options;
-
-        /**
-         * < Job submission options(See \ref lsb_submit)
-         */
-        public int options2;
-
-        /**
-         * < Delete options in options field
-         */
-        public int delOptions;
-
-        /**
-         * < Extended delete options in options2 field .
-         */
-        public int delOptions2;
-
-        /**
-         * < The user ID of the submitter
-         */
-        public int userId;
-
-        /**
-         * < The name of the submitter
-         */
-        public String userName;
-
-        /**
-         * < The job submission time
-         */
-        public int submitTime;
-
-        /**
-         * < The file creation mask for this job
-         */
-        public int umask;
-
-        /**
-         * < The number of processors requested for execution
-         */
-        public int numProcessors;
-
-        /**
-         * < The job should be started on or after this time
-         */
-        public NativeLong beginTime;
-
-        /**
-         * < If the job has not finished by this time,  it will be killed
-         */
-        public NativeLong termTime;
-
-        /**
-         * < The signal value sent to the job 10 minutes before its run window closes
-         */
-        public int sigValue;
-
-        /**
-         * < The process ID assigned to the job when it was restarted
-         */
-        public int restartPid;
-
-
-        /**
-         * < The job name
-         */
-        public String jobName;
-
-        /**
-         * < The name of the queue to which this job was submitted
-         */
-        public String queue;
-
-
-        /**
-         * < The number of hosts considered for dispatching this job
-         */
-        public int numAskedHosts;
-
-        /**
-         * < List of asked hosts
-         */
-        public Pointer askedHosts;
-
-
-        /**
-         * < The resource requirements of the job
-         */
-        public String resReq;
-
-        /**
-         * < User's resource limits (soft)
-         */
-        public int[] rLimits = new int[LibLsf.LSF_RLIM_NLIMITS];
-
-        /**
-         * < The model, host name or host type for scaling CPULIMIT and RUNLIMIT
-         */
-        public String hostSpec;
-
-
-        /**
-         * < The job dependency condition
-         */
-        public String dependCond;
-
-        /**
-         * < Time event string.
-         */
-        public String timeEvent;
-
-
-        /**
-         * < The home directory of the submitter
-         */
-        public String subHomeDir;
-
-        /**
-         * < The input file name
-         */
-        public String inFile;
-
-        /**
-         * < The output file name
-         */
-        public String outFile;
-
-        /**
-         * < The error output file name
-         */
-        public String errFile;
-
-        /**
-         * < Command description - this is really a job description field
-         */
-        public String command;
-
-        /**
-         * < Job spool input file
-         */
-        public String inFileSpool;
-
-        /**
-         * < Job spool command file
-         */
-        public String commandSpool;
-
-        /**
-         * < The checkpointing period
-         */
-        public int chkpntPeriod;
-
-        /**
-         * < The checkpoint directory
-         */
-        public String chkpntDir;
-
-        /**
-         * < The number of files to transfer
-         */
-        public int nxf;
-
-        /**
-         * < The array of file transfer specifications.  (The xFile structure is defined in <lsf/lsbatch.h>)
-         */
-        public Pointer /* xFile.ByReference */ xf;
-
-
-        /**
-         * < The job file name: If == '\\0', indicate let mbatchd make up name, otherwise, mbatchd will use given name.  It is '\\0' if it is a regular job,non-nil means it is a restart job.
-         */
-        public String jobFile;
-
-        /**
-         * < The submission host name
-         */
-        public String fromHost;
-
-        /**
-         * < The current working directory
-         */
-        public String cwd;
-
-
-        /**
-         * < The pre-execution command
-         */
-        public String preExecCmd;
-
-        /**
-         * < User option mail string
-         */
-        public String mailUser;
-
-        /**
-         * < Project name for the job; used for accounting purposes
-         */
-        public String projectName;
-
-
-        /**
-         * < NIOS callback port to be used for interactive jobs
-         */
-        public int niosPort;
-
-        /**
-         * < Maximum number of processors
-         */
-        public int maxNumProcessors;
-
-
-        /**
-         * < The login shell specified by user
-         */
-        public String loginShell;
-
-        /**
-         * < Restart job's submission host type
-         */
-        public String schedHostType;
-
-        /**
-         * < The user group name for this job
-         */
-        public String userGroup;
-
-        /**
-         * < List of job exception conditions
-         */
-        public String exceptList;
-
-        /**
-         * < User priority
-         */
-        public int userPriority;
-
-        /**
-         * < Advance reservation ID
-         */
-        public String rsvId;
-
-        /**
-         * < External scheduling options
-         */
-        public String extsched;
-
-        /**
-         * < Job warning time period in seconds; -1 if unspecified
-         */
-        public int warningTimePeriod;
-
-        /**
-         * < Job warning action: SIGNAL | CHKPNT | command; null if unspecified
-         */
-        public String warningAction;
-
-        /**
-         * < The job group under which the job runs
-         */
-        public String jobGroup;
-
-        /**
-         * < SLA service class name under which the job runs
-         */
-        public String sla;
-
-        /**
-         * < LSF License Scheduler project name
-         */
-        public String licenseProject;
-
-        /**
-         * < Extended bitwise inclusive OR of options flags. see \ref lsb_submit.
-         */
-        public int options3;
-
-        /**
-         * < Extended delete options in options3 field.
-         */
-        public int delOptions3;
-
-        /**
-         * < Application profile under which the job runs.
-         */
-        public String app;
-
-        /**
-         * < Absolute priority scheduling string set by administrators to denote static  system APS value or ADMIN factor APS value.
-         */
-        public String apsString;
-
-        /**
-         * < Post-execution commands.
-         */
-        public String postExecCmd;
-
-        /**
-         * < Runtime estimate.
-         */
-        public int runtimeEstimation;
-
-        /**
-         * < Job-level requeue exit values.
-         */
-        public String requeueEValues;
-
-        /**
-         * < Initial checkpoint period
-         */
-        public int initChkpntPeriod;
-
-        /**
-         * < Job migration threshold.
-         */
-        public int migThreshold;
-
-        /**
-         * < Resize notify command
-         */
-        public String notifyCmd;
-
-        /**
-         * < Job description.
-         */
-        public String jobDescription;
-
-        /**
-         * < For new options in future
-         */
-        public submit_ext.ByReference submitExt;
-    }
-
-
-
-    /**
-     * \brief  logged in lsb.events when a job is started.
-     */
-    public static class jobStartLog extends Structure {
-        public static class ByReference extends jobStartLog implements Structure.ByReference {}
-        public static class ByValue extends jobStartLog implements Structure.ByValue {}
-        public jobStartLog() {}
-        public jobStartLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The unique ID for the job
-         */
-        public int jobId;
-
-        /**
-         * < The status of the job (see  \ref lsb_readjobinfo )
-         */
-        public int jStatus;
-
-        /**
-         * < The job process ID
-         */
-        public int jobPid;
-
-        /**
-         * < The job process group ID
-         */
-        public int jobPGid;
-
-        /**
-         * < The CPU factor of the first execution host
-         */
-        public float hostFactor;
-
-        /**
-         * < The number of processors used for execution
-         */
-        public int numExHosts;
-
-        /**
-         * < The array of execution host names
-         */
-        public Pointer execHosts;
-
-        /**
-         * < Pre-execution command defined in the queue
-         */
-        public String queuePreCmd;
-
-        /**
-         * < Post-execution command defined in the queue
-         */
-        public String queuePostCmd;
-
-        /**
-         * < Job processing flags
-         */
-        public int jFlags;
-
-        /**
-         * < The user group name for this job
-         */
-        public String userGroup;
-
-        /**
-         * < Job array index; must be 0 in JOB_NEW
-         */
-        public int idx;
-
-        /**
-         * < Placement information of LSF HPC jobs
-         */
-        public String additionalInfo;
-
-        /**
-         * < How long a backfilled job can run; used for preemption backfill jobs
-         */
-        public int duration4PreemptBackfill;
-
-        /**
-         * <  Job Flags2
-         */
-        public int jFlags2;
-    }
-
-
-
-    /**
-     * \brief logged in lsb.events when a job start request is accepted.
-     */
-    public static class jobStartAcceptLog extends Structure {
-        public static class ByReference extends jobStartAcceptLog implements Structure.ByReference {}
-        public static class ByValue extends jobStartAcceptLog implements Structure.ByValue {}
-        public jobStartAcceptLog() {}
-        public jobStartAcceptLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The unique ID for the job
-         */
-        public int jobId;
-
-        /**
-         * < The job process ID
-         */
-        public int jobPid;
-
-        /**
-         * < The job process group ID
-         */
-        public int jobPGid;
-
-        /**
-         * < Job array index; must be 0 in JOB_NEW
-         */
-        public int idx;
-    }
-
-
-
-    /**
-     * \brief logged in lsb.events when a job is executed.
-     */
-    public static class jobExecuteLog extends Structure {
-        public static class ByReference extends jobExecuteLog implements Structure.ByReference {}
-        public static class ByValue extends jobExecuteLog implements Structure.ByValue {}
-        public jobExecuteLog() {}
-        public jobExecuteLog(Pointer p) { super(p); read(); }
-
-        /* logged in lsb.events when a job is executed */
-
-        /**
-         * < The unique ID for the job
-         */
-        public int jobId;
-
-        /**
-         * < User ID under which the job is running
-         */
-        public int execUid;
-
-        /**
-         * < Home directory of the user denoted by execUid
-         */
-        public String execHome;
-
-        /**
-         * < Current working directory where job is running
-         */
-        public String execCwd;
-
-        /**
-         * < The job process group ID
-         */
-        public int jobPGid;
-
-        /**
-         * < User name under which the job is running
-         */
-        public String execUsername;
-
-        /**
-         * < The job process ID
-         */
-        public int jobPid;
-
-        /**
-         * < Job array index; must be 0 in JOB_NEW
-         */
-        public int idx;
-
-        /**
-         * < Placement information of LSF HPC jobs
-         */
-        public String additionalInfo;
-
-        /**
-         * < The run limit scaled by the exec host
-         */
-        public int SLAscaledRunLimit;
-
-        /**
-         * < The position of the job
-         */
-        public int position;
-
-        /**
-         * < The rusage satisfied at job runtime
-         */
-        public String execRusage;
-
-        /**
-         * < The duration for preemptive backfill class in seconds
-         */
-        public int duration4PreemptBackfill;
-    }
-
-
-
-
-    /**
-     * \brief logged when a job's status is changed.
-     */
-    public static class jobStatusLog extends Structure {
-        public static class ByReference extends jobStatusLog implements Structure.ByReference {}
-        public static class ByValue extends jobStatusLog implements Structure.ByValue {}
-        public jobStatusLog() {}
-        public jobStatusLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The unique ID for the job
-         */
-        public int jobId;
-
-        /**
-         * < The job status (see \ref lsb_readjobinfo )
-         */
-        public int jStatus;
-
-        /**
-         * < The reason the job is pending or suspended  (see \ref lsb_pendreason and \ref lsb_suspreason )
-         */
-        public int reason;
-
-        /**
-         * < The load indices that have overloaded the host (see \ref lsb_pendreason  and \ref lsb_suspreason )
-         */
-        public int subreasons;
-
-        /**
-         * < The CPU time consumed before this event occurred
-         */
-        public float cpuTime;
-
-        /**
-         * < The job completion time
-         */
-        public NativeLong endTime;
-
-        /**
-         * < Boolean indicating lsfRusage is logged
-         */
-        public int ru;
-
-        /**
-         * < Resource usage statisticsThe lsfRusage structure is defined in <lsf/lsf.h>. Note that the availability of certain fields depends on the platform on which the sbatchd runs. The fields that do not make sense on the platform will be logged as -1.0.
-         */
-        public LibLsf.lsfRusage lsfRusage;
-
-        /**
-         * < Job exit status
-         */
-        public int jFlags;
-
-        /**
-         * < Job's exit status
-         */
-        public int exitStatus;
-
-        /**
-         * < Job array index; must be 0 in JOB_NEW
-         */
-        public int idx;
-
-        /**
-         * < Job termination reason, see <lsf/lsbatch.h>
-         */
-        public int exitInfo;
-    }
-
-
-
-
-    /**
-     * \brief logged when a job's status is changed
-     */
-    public static class sbdJobStatusLog extends Structure {
-        public static class ByReference extends sbdJobStatusLog implements Structure.ByReference {}
-        public static class ByValue extends sbdJobStatusLog implements Structure.ByValue {}
-        public sbdJobStatusLog() {}
-        public sbdJobStatusLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The unique ID for the job
-         */
-        public int jobId;
-
-        /**
-         * < The status of the job (see \ref lsb_readjobinfo)
-         */
-        public int jStatus;
-
-        /**
-         * < The reason the job is pending or suspended (See \ref lsb_pendreason and \ref lsb_suspreason)
-         */
-        public int reasons;
-
-        /**
-         * < The load indices that have overloaded the host (See \ref lsb_pendreason and \ref lsb_suspreason)
-         */
-        public int subreasons;
-
-        /**
-         * < Action process ID
-         */
-        public int actPid;
-
-        /**
-         * < Action Value SIG_CHKPNT | SIG_CHKPNT_COPY |  SIG_WARNING
-         */
-        public int actValue;
-
-        /**
-         * < Action period
-         */
-        public NativeLong actPeriod;
-
-        /**
-         * < Action flag
-         */
-        public int actFlags;
-
-        /**
-         * < Action logging status
-         */
-        public int actStatus;
-
-        /**
-         * < Action Reason SUSP_MBD_LOCK | SUSP_USER_STOP | SUSP_USER_RESUME | SUSP_SBD_STARTUP
-         */
-        public int actReasons;
-
-        /**
-         * < Sub Reason SUB_REASON_RUNLIMIT | SUB_REASON_DEADLINE |SUB_REASON_PROCESSLIMIT | SUB_REASON_MEMLIMIT |SUB_REASON_CPULIMIT
-         */
-        public int actSubReasons;
-
-        /**
-         * < Job array index; must be 0 in JOB_NEW
-         */
-        public int idx;
-
-        /**
-         * < The signal value
-         */
-        public int sigValue;
-
-        /**
-         * < The termination reason of a job
-         */
-        public int exitInfo;
-    }
-
-
-
-    /**
-     * \brief job status that we could send to MBD
-     */
-    public static class sbdUnreportedStatusLog extends Structure {
-        public static class ByReference extends sbdUnreportedStatusLog implements Structure.ByReference {}
-        public static class ByValue extends sbdUnreportedStatusLog implements Structure.ByValue {}
-        public sbdUnreportedStatusLog() {}
-        public sbdUnreportedStatusLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The unique ID for the job
-         */
-        public int jobId;
-
-        /**
-         * < Action process ID
-         */
-        public int actPid;
-
-        /**
-         * < The job process ID
-         */
-        public int jobPid;
-
-        /**
-         * < The job process group ID
-         */
-        public int jobPGid;
-
-        /**
-         * < New status of the job
-         */
-        public int newStatus;
-
-        /**
-         * < Pending or suspending reason code
-         */
-        public int reason;
-
-        /**
-         * < Pending or suspending subreason code
-         */
-        public int subreasons;
-
-        /**
-         * < Resource usage information for the job  (see jobFinishLog)
-         */
-        public LibLsf.lsfRusage lsfRusage;
-
-        /**
-         * < User ID under which the job is running
-         */
-        public int execUid;
-
-        /**
-         * < Job exit status
-         */
-        public int exitStatus;
-
-        /**
-         * < Current working directory where job is running
-         */
-        public String execCwd;
-
-        /**
-         * < Home directory of the user denoted by execUid
-         */
-        public String execHome;
-
-        /**
-         * < User name under which the job is running
-         */
-        public String execUsername;
-
-        /**
-         * < Message index
-         */
-        public int msgId;
-
-        /**
-         * < Job's resource usage
-         */
-        public LibLsf.jRusage runRusage;
-
-        /**
-         * < Signal value
-         */
-        public int sigValue;
-
-        /**
-         * < Action logging status
-         */
-        public int actStatus;
-
-        /**
-         * < Sequence status of the job
-         */
-        public int seq;
-
-        /**
-         * < Job array index
-         */
-        public int idx;
-
-        /**
-         * < The termination reason of a job
-         */
-        public int exitInfo;
-    }
-
-
-
-    /**
-     * \brief logged when a job is switched to another queue
-     */
-    public static class jobSwitchLog extends Structure {
-        public static class ByReference extends jobSwitchLog implements Structure.ByReference {}
-        public static class ByValue extends jobSwitchLog implements Structure.ByValue {}
-        public jobSwitchLog() {}
-        public jobSwitchLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The user ID of the submitter
-         */
-        public int userId;
-
-        /**
-         * < The unique ID of the job
-         */
-        public int jobId;
-
-        /**
-         * < The name of the queue the job has been switched to
-         */
-        public byte[] queue = new byte[MAX_LSB_NAME_LEN];
-
-        /**
-         * < Job array index; must be 0 in JOB_NEW
-         */
-        public int idx;
-
-        /**
-         * < The name of the submitter
-         */
-        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
-    }
-
-
-
-    /**
-     * \brief logged when a job is moved to another position
-     */
-    public static class jobMoveLog extends Structure {
-        public static class ByReference extends jobMoveLog implements Structure.ByReference {}
-        public static class ByValue extends jobMoveLog implements Structure.ByValue {}
-        public jobMoveLog() {}
-        public jobMoveLog(Pointer p) { super(p); read(); }
-
-        /* logged when a job is moved to another position */
-
-        /**
-         * < The user ID of the submitter
-         */
-        public int userId;
-
-        /**
-         * < The unique ID of the job
-         */
-        public int jobId;
-
-        /**
-         * < The new position of the job
-         */
-        public int position;
-
-        /**
-         * < The operation code for the move (see  \ref lsb_movejob)
-         */
-        public int base;
-
-        /**
-         * < Job array index; must be 0 in JOB_NEW
-         */
-        public int idx;
-
-        /**
-         * < The name of the submitter
-         */
-        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
-    }
-
-
-
-    /**
-     * \brief  check point log.
-     */
-    public static class chkpntLog extends Structure {
-        public static class ByReference extends chkpntLog implements Structure.ByReference {}
-        public static class ByValue extends chkpntLog implements Structure.ByValue {}
-        public chkpntLog() {}
-        public chkpntLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The unique ID of the job
-         */
-        public int jobId;
-
-        /**
-         * < The new checkpointing period
-         */
-        public NativeLong period;
-
-        /**
-         * < The process ID of the checkpointing process (a child sbatchd)
-         */
-        public int pid;
-
-        /**
-         * < 0: checkpoint started; 1: checkpoint succeeded
-         */
-        public int ok;
-
-        /**
-         * < One of the following: \n LSB_CHKPNT_KILL : Kill process if checkpoint successful \n LSB_CHKPNT_FORCE : Force checkpoint even if non-checkpointable conditions exist \n LSB_CHKPNT_MIG : Checkpoint for the purpose of migration
-         */
-        public int flags;
-
-        /**
-         * < Job array index; must be 0 in JOB_NEW
-         */
-        public int idx;
-    }
-
-
-
-    /**
-     * \brief  job requeue log.
-     */
-    public static class jobRequeueLog extends Structure {
-        public static class ByReference extends jobRequeueLog implements Structure.ByReference {}
-        public static class ByValue extends jobRequeueLog implements Structure.ByValue {}
-        public jobRequeueLog() {}
-        public jobRequeueLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The unique ID of the job
-         */
-        public int jobId;
-
-        /**
-         * < Job array index; must be 0 in JOB_NEW
-         */
-        public int idx;
-    }
-
-
-
-    /**
-     * \brief  job clean log.
-     */
-    public static class jobCleanLog extends Structure {
-        public static class ByReference extends jobCleanLog implements Structure.ByReference {}
-        public static class ByValue extends jobCleanLog implements Structure.ByValue {}
-        public jobCleanLog() {}
-        public jobCleanLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The unique ID for the job
-         */
-        public int jobId;
-
-        /**
-         * < Job array index; must be 0 in JOB_NEW
-         */
-        public int idx;
-    }
-
-
-
-    /**
-     * \brief  job exception log.
-     */
-    public static class jobExceptionLog extends Structure {
-        public static class ByReference extends jobExceptionLog implements Structure.ByReference {}
-        public static class ByValue extends jobExceptionLog implements Structure.ByValue {}
-        public jobExceptionLog() {}
-        public jobExceptionLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The unique ID for the job
-         */
-        public int jobId;
-
-        /**
-         * < Job exception handling mask
-         */
-        public int exceptMask;
-
-        /**
-         * < Action Id (kill | alarm | rerun | setexcept)
-         */
-        public int actMask;
-
-        /**
-         * < Time event string
-         */
-        public NativeLong timeEvent;
-
-        /**
-         * < Except Info, pending reason for missched or cantrun exception, the exit code of thejob for the abend exception, otherwise 0.
-         */
-        public int exceptInfo;
-
-        /**
-         * < Job array index; must be 0 in JOB_NEW
-         */
-        public int idx;
-    }
-
-
-
-    /**
-     * \brief  signal action log.
-     */
-    public static class sigactLog extends Structure {
-        public static class ByReference extends sigactLog implements Structure.ByReference {}
-        public static class ByValue extends sigactLog implements Structure.ByValue {}
-        public sigactLog() {}
-        public sigactLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The unique ID of the job
-         */
-        public int jobId;
-
-        /**
-         * < Action period
-         */
-        public NativeLong period;
-
-        /**
-         * < Action process ID
-         */
-        public int pid;
-
-        /**
-         * < Job status
-         */
-        public int jStatus;
-
-        /**
-         * < Pending reasons
-         */
-        public int reasons;
-
-        /**
-         * < Action flag
-         */
-        public int flags;
-
-        /**
-         * < Signal symbol from the set: DELETEJOB |  KILL | KILLREQUEUE |REQUEUE_DONE | REQUEUE_EXIT | REQUEUE_PEND |REQUEUE_PSUSP_ADMIN | REQUEUE_PSUSP_USER | SIG_CHKPNT |  SIG_CHKPNT_COPY
-         */
-        public String signalSymbol;
-
-        /**
-         * < Action logging status (ACT_NO | ACT_START | ACT_PREEMPT | ACT_DONE |  ACT_FAIL) .Shown in signal_action
-         */
-        public int actStatus;
-
-        /**
-         * < Job array index; must be 0 in JOB_NEW
-         */
-        public int idx;
-    }
-
-
-
-    /**
-     * \brief  migration log.
-     */
-    public static class migLog extends Structure {
-        public static class ByReference extends migLog implements Structure.ByReference {}
-        public static class ByValue extends migLog implements Structure.ByValue {}
-        public migLog() {}
-        public migLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The job to be migrated
-         */
-        public int jobId;
-
-        /**
-         * < The number of candidate hosts for migration
-         */
-        public int numAskedHosts;
-
-        /**
-         * < The array of candidate host names
-         */
-        public Pointer askedHosts;
-
-        /**
-         * < The user ID of the submitter
-         */
-        public int userId;
-
-        /**
-         * < Job array index; must be 0 in JOB_NEW
-         */
-        public int idx;
-
-        /**
-         * < The user name of the submitter
-         */
-        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
-    }
-
-
-
-    /**
-     * \brief  signal log.
-     */
-    public static class signalLog extends Structure {
-        public static class ByReference extends signalLog implements Structure.ByReference {}
-        public static class ByValue extends signalLog implements Structure.ByValue {}
-        public signalLog() {}
-        public signalLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The user ID of the submitter
-         */
-        public int userId;
-
-        /**
-         * < The unique ID of the job
-         */
-        public int jobId;
-
-        /**
-         * < Signal symbol from the set: DELETEJOB | KILL | KILLREQUEUE |REQUEUE_DONE | REQUEUE_EXIT | REQUEUE_PEND |REQUEUE_PSUSP_ADMIN | REQUEUE_PSUSP_USER | SIG_CHKPNT | SIG_CHKPNT_COPY
-         */
-        public String signalSymbol;
-
-        /**
-         * < The number of running times
-         */
-        public int runCount;
-
-        /**
-         * < Job array index; must be 0 in JOB_NEW
-         */
-        public int idx;
-
-        /**
-         * < The name of the submitter
-         */
-        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
-    }
-
-
-
-    /**
-     * \brief logged when bqc command is invoked.
-     */
-    public static class queueCtrlLog extends Structure {
-        public static class ByReference extends queueCtrlLog implements Structure.ByReference {}
-        public static class ByValue extends queueCtrlLog implements Structure.ByValue {}
-        public queueCtrlLog() {}
-        public queueCtrlLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The queue control operation (see \ref lsb_queuecontrol)
-         */
-        public int opCode;
-
-        /**
-         * < The name of the queue
-         */
-        public byte[] queue = new byte[MAX_LSB_NAME_LEN];
-
-        /**
-         * < The user ID of the submitter
-         */
-        public int userId;
-
-        /**
-         * < The name of the submitter
-         */
-        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
-
-        /**
-         * < Queue control message
-         */
-        public byte[] message = new byte[LibLsf.MAXLINELEN];
-    }
-
-
-
-/*
-*  \brief  new debug log.
- */
-
-    public static class newDebugLog extends Structure {
-        public static class ByReference extends newDebugLog implements Structure.ByReference {}
-        public static class ByValue extends newDebugLog implements Structure.ByValue {}
-        public newDebugLog() {}
-        public newDebugLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The queue control operation
-         */
-        public int opCode;
-
-        /**
-         * < Debug level
-         */
-        public int level;
-
-        /**
-         * < Class of log
-         */
-        public int _logclass;
-
-        /**
-         * < Log enabled, disabled
-         */
-        public int turnOff;
-
-        /**
-         * < Name of log file
-         */
-        public byte[] logFileName = new byte[LibLsf.MAXLSFNAMELEN];
-
-        /**
-         * < The user ID of the submitter
-         */
-        public int userId;
-    }
-
-
-
-    /**
-     * \brief log the host control information.
-     */
-    public static class hostCtrlLog extends Structure {
-        public static class ByReference extends hostCtrlLog implements Structure.ByReference {}
-        public static class ByValue extends hostCtrlLog implements Structure.ByValue {}
-        public hostCtrlLog() {}
-        public hostCtrlLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The host control operation (See  \ref lsb_hostcontrol)
-         */
-        public int opCode;
-
-        /**
-         * < The name of the host
-         */
-        public byte[] host = new byte[LibLsf.MAXHOSTNAMELEN];
-
-        /**
-         * < The user ID of the submitter
-         */
-        public int userId;
-
-        /**
-         * < The name of the submitter
-         */
-        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
-
-        /**
-         * < Host control message
-         */
-        public byte[] message = new byte[LibLsf.MAXLINELEN];
-    }
-
-
-
-    /**
-     * \brief logged when dynamic hosts are added to group.
-     */
-    public static class hgCtrlLog extends Structure {
-        public static class ByReference extends hgCtrlLog implements Structure.ByReference {}
-        public static class ByValue extends hgCtrlLog implements Structure.ByValue {}
-        public hgCtrlLog() {}
-        public hgCtrlLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The host control operation  (see \ref lsb_hostcontrol)
-         */
-        public int opCode;
-
-        /**
-         * < The name of the host
-         */
-        public byte[] host = new byte[LibLsf.MAXHOSTNAMELEN];
-
-        /**
-         * < The name of the host group
-         */
-        public byte[] grpname = new byte[LibLsf.MAXHOSTNAMELEN];
-
-        /**
-         * < The user ID of the submitter
-         */
-        public int userId;
-
-        /**
-         * < The name of the submitter
-         */
-        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
-
-        /**
-         * < Host group control message
-         */
-        public byte[] message = new byte[LibLsf.MAXLINELEN];
-    }
-
-
-
-
-/* simulator is ready to schedule jobs */
-    public static final int SIMU_STATUS_READYSCHEDULE = 0x01;
-
-    /**
-     * \brief  mbatchd start log.
-     */
-    public static class mbdStartLog extends Structure {
-        public static class ByReference extends mbdStartLog implements Structure.ByReference {}
-        public static class ByValue extends mbdStartLog implements Structure.ByValue {}
-        public mbdStartLog() {}
-        public mbdStartLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The master host name
-         */
-        public byte[] master = new byte[LibLsf.MAXHOSTNAMELEN];
-
-        /**
-         * < The cluster name
-         */
-        public byte[] cluster = new byte[LibLsf.MAXLSFNAMELEN];
-
-        /**
-         * < The number of hosts in the cluster
-         */
-        public int numHosts;
-
-        /**
-         * < The number of queues in the cluster
-         */
-        public int numQueues;
-/*
-    public int    simDiffTime;
-    public int    pendJobsThreshold;
-    public int    simStatus;
-*/
-    }
-
-
-
-    public static class mbdSimStatusLog extends Structure {
-        public static class ByReference extends mbdSimStatusLog implements Structure.ByReference {}
-        public static class ByValue extends mbdSimStatusLog implements Structure.ByValue {}
-        public mbdSimStatusLog() {}
-        public mbdSimStatusLog(Pointer p) { super(p); read(); }
-
-
-/* simulator status */
-        public int simStatus;
-    }
-
-
-
-    /**
-     * \brief  mbatchd die log.
-     */
-    public static class mbdDieLog extends Structure {
-        public static class ByReference extends mbdDieLog implements Structure.ByReference {}
-        public static class ByValue extends mbdDieLog implements Structure.ByValue {}
-        public mbdDieLog() {}
-        public mbdDieLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The master host name
-         */
-        public byte[] master = new byte[LibLsf.MAXHOSTNAMELEN];
-
-        /**
-         * < The number of finished jobs that have been removed from the system and logged in the current event file
-         */
-        public int numRemoveJobs;
-
-        /**
-         * < The exit code from the master batch daemon
-         */
-        public int exitCode;
-
-        /**
-         * < mbatchd administrator control message
-         */
-        public byte[] message = new byte[LibLsf.MAXLINELEN];
-    }
-
-
-
-    /**
-     * \brief logged before mbatchd dies.
-     */
-    public static class unfulfillLog extends Structure {
-        public static class ByReference extends unfulfillLog implements Structure.ByReference {}
-        public static class ByValue extends unfulfillLog implements Structure.ByValue {}
-        public unfulfillLog() {}
-        public unfulfillLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The job ID.
-         */
-        public int jobId;
-
-        /**
-         * < The mbatchd has switched the job to a new queue but the sbatchd has not been informed of the switch
-         */
-        public int notSwitched;
-
-        /**
-         * < This signal was not sent to the job
-         */
-        public int sig;
-
-        /**
-         * < The job was not signaled to checkpoint itself
-         */
-        public int sig1;
-
-        /**
-         * < Checkpoint flags. see the chkpntLog structure below.
-         */
-        public int sig1Flags;
-
-        /**
-         * < The new checkpoint period for the job
-         */
-        public NativeLong chkPeriod;
-
-        /**
-         * < Flag for bmod running job's parameters
-         */
-        public int notModified;
-
-        /**
-         * < Job array index
-         */
-        public int idx;
-
-        /**
-         * < Option flags for pending job signals
-         */
-        public int miscOpts4PendSig;
-    }
-
-
-
-    public static final int TERM_UNKNOWN = 0;
-    public static final int TERM_PREEMPT = 1;
-    public static final int TERM_WINDOW = 2;
-    public static final int TERM_LOAD = 3;
-    public static final int TERM_OTHER = 4;
-    public static final int TERM_RUNLIMIT = 5;
-    public static final int TERM_DEADLINE = 6;
-    public static final int TERM_PROCESSLIMIT = 7;
-    public static final int TERM_FORCE_OWNER = 8;
-    public static final int TERM_FORCE_ADMIN = 9;
-    public static final int TERM_REQUEUE_OWNER = 10;
-    public static final int TERM_REQUEUE_ADMIN = 11;
-    public static final int TERM_CPULIMIT = 12;
-    public static final int TERM_CHKPNT = 13;
-    public static final int TERM_OWNER = 14;
-    public static final int TERM_ADMIN = 15;
-    public static final int TERM_MEMLIMIT = 16;
-    public static final int TERM_EXTERNAL_SIGNAL = 17;
-    public static final int TERM_RMS = 18;
-    public static final int TERM_ZOMBIE = 19;
-    public static final int TERM_SWAP = 20;
-    public static final int TERM_THREADLIMIT = 21;
-    public static final int TERM_SLURM = 22;
-    public static final int TERM_BUCKET_KILL = 23;
-    public static final int TERM_CTRL_PID = 24;
-    public static final int TERM_CWD_NOTEXIST = 25;
-
-    /**
-     * \brief logged in lsb.acct when a job finished.
-     */
-    public static class jobFinishLog extends Structure {
-        public static class ByReference extends jobFinishLog implements Structure.ByReference {}
-        public static class ByValue extends jobFinishLog implements Structure.ByValue {}
-        public jobFinishLog() {}
-        public jobFinishLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The unique ID for the job
-         */
-        public int jobId;
-
-        /**
-         * < The user ID of the submitter
-         */
-        public int userId;
-
-        /**
-         * < The user name of the submitter
-         */
-        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
-
-        /**
-         * < Job submission options (see  \ref lsb_submit)
-         */
-        public int options;
-
-        /**
-         * < The number of processors requested for execution
-         */
-        public int numProcessors;
-
-        /**
-         * < The status of the job (See \ref lsb_readjobinfo)
-         */
-        public int jStatus;
-
-        /**
-         * < Job submission time
-         */
-        public NativeLong submitTime;
-
-        /**
-         * < The job started at or after this time
-         */
-        public NativeLong beginTime;
-
-        /**
-         * < If the job was not finished by this time, it was killed
-         */
-        public NativeLong termTime;
-
-        /**
-         * < Job dispatch time
-         */
-        public NativeLong startTime;
-
-        /**
-         * < The time the job finished
-         */
-        public NativeLong endTime;
-
-        /**
-         * < The name of the queue to which this job was submitted
-         */
-        public byte[] queue = new byte[MAX_LSB_NAME_LEN];
-
-        /**
-         * < Resource requirements
-         */
-        public String resReq;
-
-        /**
-         * < Submission host name
-         */
-        public byte[] fromHost = new byte[LibLsf.MAXHOSTNAMELEN];
-
-        /**
-         * < Current working directory
-         */
-        public String cwd;
-
-        /**
-         * < Input file name
-         */
-        public String inFile;
-
-        /**
-         * < Output file name
-         */
-        public String outFile;
-
-        /**
-         * < Error output file name
-         */
-        public String errFile;
-
-        /**
-         * < Job spool input file
-         */
-        public String inFileSpool;
-
-        /**
-         * < Job spool command file
-         */
-        public String commandSpool;
-
-        /**
-         * < Job file name
-         */
-        public String jobFile;
-
-        /**
-         * < The number of hosts considered for dispatching this job
-         */
-        public int numAskedHosts;
-
-        /**
-         * < The array of names of hosts considered for dispatching this job
-         */
-        public Pointer askedHosts;
-
-        /**
-         * < The CPU factor of the first execution host
-         */
-        public float hostFactor;
-
-        /**
-         * < The number of processors used for execution
-         */
-        public int numExHosts;
-
-        /**
-         * < The array of names of execution hosts
-         */
-        public Pointer execHosts;
-
-        /**
-         * < The total CPU time consumed by the job
-         */
-        public float cpuTime;
-
-        /**
-         * < Job name
-         */
-        public String jobName;
-
-        /**
-         * < Job command
-         */
-        public String command;
-
-        /**
-         * < Resource usage statistics.The lsfRusage structure is defined in <lsf/lsf.h>. Note that the availability of certain fields depends on the platform on which the sbatchd runs. The fields that do not make sense on this platform will be logged as -1.0.
-         */
-        public LibLsf.lsfRusage lsfRusage;
-
-        /**
-         * < The job dependency condition
-         */
-        public String dependCond;
-
-        /**
-         * < Time event string
-         */
-        public String timeEvent;
-
-        /**
-         * < The pre-execution command
-         */
-        public String preExecCmd;
-
-        /**
-         * < Name of the user to whom job related mail was sent
-         */
-        public String mailUser;
-
-        /**
-         * < The project name, used for accounting purposes.
-         */
-        public String projectName;
-
-        /**
-         * < Job's exit status
-         */
-        public int exitStatus;
-
-        /**
-         * < Maximum number of processors specified for the job
-         */
-        public int maxNumProcessors;
-
-        /**
-         * < Login shell specified by user
-         */
-        public String loginShell;
-
-        /**
-         * < Job array index
-         */
-        public int idx;
-
-        /**
-         * < Maximum memory used by job
-         */
-        public int maxRMem;
-
-        /**
-         * < Maximum swap used by job
-         */
-        public int maxRSwap;
-
-        /**
-         * < Advanced reservation ID
-         */
-        public String rsvId;
-
-        /**
-         * < Service class of the job
-         */
-        public String sla;
-
-        /**
-         * < Job exception handling mask
-         */
-        public int exceptMask;
-
-        /**
-         * < Placement information of LSF HPC jobs
-         */
-        public String additionalInfo;
-
-        /**
-         * < Job termination reason, see <lsf/lsbatch.h>
-         */
-        public int exitInfo;
-
-        /**
-         * < Job warning time period in seconds; -1 if unspecified
-         */
-        public int warningTimePeriod;
-
-        /**
-         * < Warning action, SIGNAL | CHKPNT | command, null if unspecified
-         */
-        public String warningAction;
-
-        /**
-         * < SAAP charged for job
-         */
-        public String chargedSAAP;
-
-        /**
-         * < LSF License Scheduler project name
-         */
-        public String licenseProject;
-
-        /**
-         * < Application profile under which the job runs.
-         */
-        public String app;
-
-        /**
-         * < Post-execution commands.
-         */
-        public String postExecCmd;
-
-        /**
-         * < Runtime estimate specified.
-         */
-        public int runtimeEstimation;
-
-        /**
-         * < Job group name
-         */
-        public String jgroup;
-
-        /**
-         * < Option2
-         */
-        public int options2;
-
-        /**
-         * < Job requeue exit values
-         */
-        public String requeueEValues;
-
-        /**
-         * < Resize notify command
-         */
-        public String notifyCmd;
-
-        /**
-         * < Last resize start time
-         */
-        public NativeLong lastResizeTime;
-
-        /**
-         * < Job description.
-         */
-        public String jobDescription;
-
-        /**
-         * < For new options in future
-         */
-        public submit_ext.ByReference submitExt;
-    }
-
-
-
-
-    /**
-     * \brief  load index log.
-     */
-
-    public static class loadIndexLog extends Structure {
-        public static class ByReference extends loadIndexLog implements Structure.ByReference {}
-        public static class ByValue extends loadIndexLog implements Structure.ByValue {}
-        public loadIndexLog() {}
-        public loadIndexLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The number of load indices
-         */
-        public int nIdx;
-
-        /**
-         * < The array of load index names
-         */
-        public Pointer name;
-    }
-
-
-
-    /**
-     * \brief  calendar log.
-     */
-    public static class calendarLog extends Structure {
-        public static class ByReference extends calendarLog implements Structure.ByReference {}
-        public static class ByValue extends calendarLog implements Structure.ByValue {}
-        public calendarLog() {}
-        public calendarLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Reserved for future use
-         */
-        public int options;
-
-        /**
-         * < The user ID of the submitter
-         */
-        public int userId;
-
-        /**
-         * < The name of the calendar
-         */
-        public String name;
-
-        /**
-         * < Description
-         */
-        public String desc;
-
-        /**
-         * < Calendar expression
-         */
-        public String calExpr;
-    }
-
-
-
-    /**
-     * \brief  job forward log.
-     */
-    public static class jobForwardLog extends Structure {
-        public static class ByReference extends jobForwardLog implements Structure.ByReference {}
-        public static class ByValue extends jobForwardLog implements Structure.ByValue {}
-        public jobForwardLog() {}
-        public jobForwardLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The unique ID of the job
-         */
-        public int jobId;
-
-        /**
-         * < The cluster name
-         */
-        public String cluster;
-
-        /**
-         * < Number of Reserved Hosts
-         */
-        public int numReserHosts;
-
-        /**
-         * < Reserved Host Names
-         */
-        public Pointer reserHosts;
-
-        /**
-         * < Job array index; must be 0 in JOB_NEW
-         */
-        public int idx;
-
-        /**
-         * < Remote job attributes from: \n JOB_FORWARD Remote batch job on submission side \n JOB_LEASE Lease job on submission side \n JOB_REMOTE_BATCH Remote batch job on execution side \n JOB_REMOTE_LEASE Lease job on execution side \n JOB_LEASE_RESYNC Lease job resync during restart \n JOB_REMOTE_RERUNNABLE Remote batch job rerunnable on execution cluster
-         */
-        public int jobRmtAttr;
-    }
-
-
-
-    /**
-     * \brief  job accept log.
-     */
-    public static class jobAcceptLog extends Structure {
-        public static class ByReference extends jobAcceptLog implements Structure.ByReference {}
-        public static class ByValue extends jobAcceptLog implements Structure.ByValue {}
-        public jobAcceptLog() {}
-        public jobAcceptLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The unique ID of the job
-         */
-        public int jobId;
-
-        /**
-         * < The unique ID of the remote job
-         */
-        public long remoteJid;
-
-        /**
-         * < The cluster name
-         */
-        public String cluster;
-
-        /**
-         * < Job array index; must be 0 in JOB_NEW
-         */
-        public int idx;
-
-        /**
-         * < Remote job attributes from: \n JOB_FORWARD Remote batch job on submission side \n JOB_LEASE Lease job on submission side \n JOB_REMOTE_BATCH Remote batch job on execution side \n JOB_REMOTE_LEASE Lease job on execution side \n JOB_LEASE_RESYNC Lease job resync during restart \n JOB_REMOTE_RERUNNABLE Remote batch job rerunnable on execution cluster
-         */
-        public int jobRmtAttr;
-    }
-
-
-
-    /**
-     * \brief  status Ack log.
-     */
-    public static class statusAckLog extends Structure {
-        public static class ByReference extends statusAckLog implements Structure.ByReference {}
-        public static class ByValue extends statusAckLog implements Structure.ByValue {}
-        public statusAckLog() {}
-        public statusAckLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The unique ID of the job
-         */
-        public int jobId;
-
-        /**
-         * < Line number of Status
-         */
-        public int statusNum;
-
-        /**
-         * < Job array index; must be 0 in JOB_NEW
-         */
-        public int idx;
-    }
-
-
-
-    /**
-     * \brief  job message log.
-     */
-    public static class jobMsgLog extends Structure {
-        public static class ByReference extends jobMsgLog implements Structure.ByReference {}
-        public static class ByValue extends jobMsgLog implements Structure.ByValue {}
-        public jobMsgLog() {}
-        public jobMsgLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The user ID of the submitter
-         */
-        public int usrId;
-
-        /**
-         * < The unique ID for the job
-         */
-        public int jobId;
-
-        /**
-         * < Message index
-         */
-        public int msgId;
-
-        /**
-         * < Message type
-         */
-        public int type;
-
-        /**
-         * < Message source
-         */
-        public String src;
-
-        /**
-         * < Message destination
-         */
-        public String dest;
-
-        /**
-         * < Message
-         */
-        public String msg;
-
-        /**
-         * < Job array index; must be 0 in JOB_NEW
-         */
-        public int idx;
-    }
-
-
-
-    /**
-     * \brief  job message ack log.
-     */
-    public static class jobMsgAckLog extends Structure {
-        public static class ByReference extends jobMsgAckLog implements Structure.ByReference {}
-        public static class ByValue extends jobMsgAckLog implements Structure.ByValue {}
-        public jobMsgAckLog() {}
-        public jobMsgAckLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The user ID of the submitter
-         */
-        public int usrId;
-
-        /**
-         * < The unique ID for the job
-         */
-        public int jobId;
-
-        /**
-         * < Message index
-         */
-        public int msgId;
-
-        /**
-         * < Message type
-         */
-        public int type;
-
-        /**
-         * < Message source
-         */
-        public String src;
-
-        /**
-         * < Message destination
-         */
-        public String dest;
-
-        /**
-         * < Message
-         */
-        public String msg;
-
-        /**
-         * < Job array index; must be 0 in JOB_NEW
-         */
-        public int idx;
-    }
-
-
-
-    /**
-     * \brief  job occupy request log. jobOccupyReqLog is for future use.
-     */
-    public static class jobOccupyReqLog extends Structure {
-        public static class ByReference extends jobOccupyReqLog implements Structure.ByReference {}
-        public static class ByValue extends jobOccupyReqLog implements Structure.ByValue {}
-        public jobOccupyReqLog() {}
-        public jobOccupyReqLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The user ID of the submitter
-         */
-        public int userId;
-
-        /**
-         * < The unique ID for the job
-         */
-        public int jobId;
-
-        /**
-         * < Number of Jobs Slots desired
-         */
-        public int numOccupyRequests;
-
-        /**
-         * < List of slots occupied
-         */
-        public Pointer occupyReqList;
-
-        /**
-         * < Job array index; must be 0 in JOB_NEW
-         */
-        public int idx;
-
-        /**
-         * < The name of the submitter
-         */
-        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
-    }
-
-
-
-    /**
-     * \brief  job vacate log.jobVacatedLog is for future use.
-     */
-    public static class jobVacatedLog extends Structure {
-        public static class ByReference extends jobVacatedLog implements Structure.ByReference {}
-        public static class ByValue extends jobVacatedLog implements Structure.ByValue {}
-        public jobVacatedLog() {}
-        public jobVacatedLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The user ID of the submitter
-         */
-        public int userId;
-
-        /**
-         * < The unique ID for the job
-         */
-        public int jobId;
-
-        /**
-         * < Job array index; must be 0 in JOB_NEW
-         */
-        public int idx;
-
-        /**
-         * < The name of the submitter
-         */
-        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
-    }
-
-
-
-    /**
-     * \brief  job force request log.
-     */
-    public static class jobForceRequestLog extends Structure {
-        public static class ByReference extends jobForceRequestLog implements Structure.ByReference {}
-        public static class ByValue extends jobForceRequestLog implements Structure.ByValue {}
-        public jobForceRequestLog() {}
-        public jobForceRequestLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The user ID of the submitter
-         */
-        public int userId;
-
-        /**
-         * < >1 for local/lease jobs; 0 for remote batch model
-         */
-        public int numExecHosts;
-
-        /**
-         * < The array of execution host names
-         */
-        public Pointer execHosts;
-
-        /**
-         * < The unique ID for the job
-         */
-        public int jobId;
-
-        /**
-         * < Job array index; must be 0 in JOB_NEW
-         */
-        public int idx;
-
-        /**
-         * < Job run options (RUNJOB_OPT_NOSTOP | JFLAG_URGENT_NOSTOP |JFLAG_URGENT)
-         */
-        public int options;
-
-        /**
-         * < The name of the submitter
-         */
-        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
-
-        /**
-         * < The name of the queue to which this job was submitted
-         */
-        public String queue;
-    }
-
-
-
-    /**
-     * \brief  job chunck log.
-     */
-    public static class jobChunkLog extends Structure {
-        public static class ByReference extends jobChunkLog implements Structure.ByReference {}
-        public static class ByValue extends jobChunkLog implements Structure.ByValue {}
-        public jobChunkLog() {}
-        public jobChunkLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Size of array membJobId
-         */
-        public NativeLong membSize;
-
-        /**
-         * < Job ids of jobs in the chunk
-         */
-        public LongByReference membJobId;
-
-        /**
-         * < The number of processors used for execution
-         */
-        public NativeLong numExHosts;
-
-        /**
-         * < The array of names of execution hosts
-         */
-        public Pointer execHosts;
-    }
-
-
-
-    /**
-     * \brief  job external message log.
-     */
-    public static class jobExternalMsgLog extends Structure {
-        public static class ByReference extends jobExternalMsgLog implements Structure.ByReference {}
-        public static class ByValue extends jobExternalMsgLog implements Structure.ByValue {}
-        public jobExternalMsgLog() {}
-        public jobExternalMsgLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The unique ID for the job
-         */
-        public int jobId;
-
-        /**
-         * < Job array index; must be 0 in JOB_NEW
-         */
-        public int idx;
-
-        /**
-         * < The message index
-         */
-        public int msgIdx;
-
-        /**
-         * < Message description
-         */
-        public String desc;
-
-        /**
-         * < The user ID of the submitter
-         */
-        public int userId;
-
-        /**
-         * < Size of the message
-         */
-        public NativeLong dataSize;
-
-        /**
-         * < The time the author posted the message.
-         */
-        public NativeLong postTime;
-
-        /**
-         * < The status of the message
-         */
-        public int dataStatus;
-
-        /**
-         * < Name of attached data file. If no file is attached, use null.
-         */
-        public String fileName;
-
-        /**
-         * < The author of the message
-         */
-        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
-    }
-
-
-
-    /**
-     * \brief  reservation request.
-     */
-    public static class rsvRes extends Structure {
-        public static class ByReference extends rsvRes implements Structure.ByReference {}
-        public static class ByValue extends rsvRes implements Structure.ByValue {}
-        public rsvRes() {}
-        public rsvRes(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Name of the resource (currently: host)
-         */
-        public String resName;
-
-        /**
-         * < Reserved counter (currently: cpu number)
-         */
-        public int count;
-
-        /**
-         * < Used of the reserved counter (not used)
-         */
-        public int usedAmt;
-    }
-
-
-
-    /**
-     * \brief for advanced reservation.
-     */
-    public static class rsvFinishLog extends Structure {
-        public static class ByReference extends rsvFinishLog implements Structure.ByReference {}
-        public static class ByValue extends rsvFinishLog implements Structure.ByValue {}
-        public rsvFinishLog() {}
-        public rsvFinishLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Time when the reservation is required
-         */
-        public NativeLong rsvReqTime;
-
-        /**
-         * < Same as the options field in the addRsvRequest(lsbatch.h)
-         */
-        public int options;
-
-        /**
-         * < The user who creat the reservation
-         */
-        public int uid;
-
-        /**
-         * < Reservation ID
-         */
-        public String rsvId;
-
-        /**
-         * < Client of the reservation
-         */
-        public String name;
-
-        /**
-         * < Number of resources reserved
-         */
-        public int numReses;
-
-        /**
-         * < Allocation vector
-         */
-        public Pointer /* rsvRes.ByReference */ alloc;
-
-        /**
-         * < Time window within which the reservation is active \n Two forms: int1-int2 or [day1]:hour1:0-[day2]:hour2:0
-         */
-        public String timeWindow;
-
-        /**
-         * < Duration in seconds. duration = to - from : when the reservation expired
-         */
-        public NativeLong duration;
-
-        /**
-         * < Creator of the reservation
-         */
-        public String creator;
-    }
-
-
-
-    /**
-     * \brief  CPU Profile Log
-     */
-    public static class cpuProfileLog extends Structure {
-        public static class ByReference extends cpuProfileLog implements Structure.ByReference {}
-        public static class ByValue extends cpuProfileLog implements Structure.ByValue {}
-        public cpuProfileLog() {}
-        public cpuProfileLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Queue name
-         */
-        public byte[] servicePartition = new byte[MAX_LSB_NAME_LEN];
-
-        /**
-         * < The number of CPU required
-         */
-        public int slotsRequired;
-
-        /**
-         * < The number of CPU actually allocated
-         */
-        public int slotsAllocated;
-
-        /**
-         * < The number of CPU borrowed
-         */
-        public int slotsBorrowed;
-
-        /**
-         * < The number of CPU lent
-         */
-        public int slotsLent;
-        /** note:  the number of CPU reserved = slotsAllocated - slotsBorrowed + slotsLent */
-    }
-
-
-
-    /**
-     * \brief  job resize start notify log.
-     */
-    public static class jobResizeNotifyStartLog extends Structure {
-        public static class ByReference extends jobResizeNotifyStartLog implements Structure.ByReference {}
-        public static class ByValue extends jobResizeNotifyStartLog implements Structure.ByValue {}
-        public jobResizeNotifyStartLog() {}
-        public jobResizeNotifyStartLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * <  JobId
-         */
-        public int jobId;
-
-        /**
-         * <  Index
-         */
-        public int idx;
-
-        /**
-         * <  Notify Id
-         */
-        public int notifyId;
-
-        /**
-         * <  Number of resized hosts.
-         */
-        public int numResizeHosts;
-
-        /**
-         * <  Resize Hosts
-         */
-        public Pointer resizeHosts;
-    }
-
-
-
-    /**
-     * \brief  job resize accept notify log.
-     */
-    public static class jobResizeNotifyAcceptLog extends Structure {
-        public static class ByReference extends jobResizeNotifyAcceptLog implements Structure.ByReference {}
-        public static class ByValue extends jobResizeNotifyAcceptLog implements Structure.ByValue {}
-        public jobResizeNotifyAcceptLog() {}
-        public jobResizeNotifyAcceptLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * <  JobId
-         */
-        public int jobId;
-
-        /**
-         * <  Index
-         */
-        public int idx;
-
-        /**
-         * <  Notify Id
-         */
-        public int notifyId;
-
-        /**
-         * <  Resize Notify command pid
-         */
-        public int resizeNotifyCmdPid;
-
-        /**
-         * <  Resize Notify command pgid
-         */
-        public int resizeNotifyCmdPGid;
-
-        /**
-         * <  Status
-         */
-        public int status;
-    }
-
-
-
-    /**
-     * \brief  job resize done notify log.
-     */
-    public static class jobResizeNotifyDoneLog extends Structure {
-        public static class ByReference extends jobResizeNotifyDoneLog implements Structure.ByReference {}
-        public static class ByValue extends jobResizeNotifyDoneLog implements Structure.ByValue {}
-        public jobResizeNotifyDoneLog() {}
-        public jobResizeNotifyDoneLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * <  JobId
-         */
-        public int jobId;
-
-        /**
-         * <  Index
-         */
-        public int idx;
-
-        /**
-         * <  Notify Id
-         */
-        public int notifyId;
-
-        /**
-         * <  Status
-         */
-        public int status;
-    }
-
-
-
-    /**
-     * \brief  job resize release log.
-     */
-    public static class jobResizeReleaseLog extends Structure {
-        public static class ByReference extends jobResizeReleaseLog implements Structure.ByReference {}
-        public static class ByValue extends jobResizeReleaseLog implements Structure.ByValue {}
-        public jobResizeReleaseLog() {}
-        public jobResizeReleaseLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * <  JobId
-         */
-        public int jobId;
-
-        /**
-         * <  Index
-         */
-        public int idx;
-
-        /**
-         * <  Request Id
-         */
-        public int reqId;
-
-        /**
-         * <  Options
-         */
-        public int options;
-
-        /**
-         * <  User Id
-         */
-        public int userId;
-
-        /**
-         * <  User Name
-         */
-        public String userName;
-
-        /**
-         * <  Resize Notify command
-         */
-        public String resizeNotifyCmd;
-
-        /**
-         * <  Number of resized hosts
-         */
-        public int numResizeHosts;
-
-        /**
-         * <  Resized hosts
-         */
-        public Pointer resizeHosts;
-    }
-
-
-
-    /**
-     * \brief  job resize cancel log.
-     */
-    public static class jobResizeCancelLog extends Structure {
-        public static class ByReference extends jobResizeCancelLog implements Structure.ByReference {}
-        public static class ByValue extends jobResizeCancelLog implements Structure.ByValue {}
-        public jobResizeCancelLog() {}
-        public jobResizeCancelLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * <  JobId
-         */
-        public int jobId;
-
-        /**
-         * <  Index
-         */
-        public int idx;
-
-        /**
-         * <  User Id
-         */
-        public int userId;
-
-        /**
-         * <  User name
-         */
-        public String userName;
-    }
-
-
-
-    /**
-     * \brief log the running rusage of a job in the lsb.stream file
-     */
-    public static class jobRunRusageLog extends Structure {
-        public static class ByReference extends jobRunRusageLog implements Structure.ByReference {}
-        public static class ByValue extends jobRunRusageLog implements Structure.ByValue {}
-        public jobRunRusageLog() {}
-        public jobRunRusageLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The unique ID of the job
-         */
-        public int jobid;
-
-        /**
-         * < Job array index; must be 0 in JOB_NEW
-         */
-        public int idx;
-
-        /**
-         * < jrusage
-         */
-        public LibLsf.jRusage jrusage;
-    }
-
-
-
-    /**
-     * \brief  SLA event log.
-     */
-    public static class slaLog extends Structure {
-        public static class ByReference extends slaLog implements Structure.ByReference {}
-        public static class ByValue extends slaLog implements Structure.ByValue {}
-        public slaLog() {}
-        public slaLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Service class name
-         */
-        public String name;
-
-        /**
-         * < Consumer name associated with the service class
-         */
-        public String consumer;
-
-        /**
-         * < Objectives
-         */
-        public int goaltype;
-
-        /**
-         * < The service class state (ontime, delayed)
-         */
-        public int state;
-
-        /**
-         * < Optimum number of job slots (or concurrently running jobs) needed for the  service class to meet its service-level goals
-         */
-        public int optimum;
-
-        /**
-         * < Job counters for the service class
-         */
-        public int[] counters = new int[NUM_JGRP_COUNTERS];
-    }
-
-
-
-    /**
-     * \brief  a wrap of structure perfmonLog for performance metrics project
-     */
-    public static class perfmonLogInfo extends Structure {
-        public static class ByReference extends perfmonLogInfo implements Structure.ByReference {}
-        public static class ByValue extends perfmonLogInfo implements Structure.ByValue {}
-        public perfmonLogInfo() {}
-        public perfmonLogInfo(Pointer p) { super(p); read(); }
-
-
-        /**
-         * <  Sample period
-         */
-        public int samplePeriod;
-
-        /**
-         * <  Metrics
-         */
-        public IntByReference metrics;
-
-        /**
-         * <  Start time
-         */
-        public NativeLong startTime;
-
-        /**
-         * <  Log time
-         */
-        public NativeLong logTime;
-    }
-
-
-
-    /**
-     * \brief performance metrics log in lsb.stream
-     */
-    public static class perfmonLog extends Structure {
-        public static class ByReference extends perfmonLog implements Structure.ByReference {}
-        public static class ByValue extends perfmonLog implements Structure.ByValue {}
-        public perfmonLog() {}
-        public perfmonLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Sample rate
-         */
-        public int samplePeriod;
-
-        /**
-         * < Number of Queries
-         */
-        public int totalQueries;
-
-        /**
-         * < Number of Job Query
-         */
-        public int jobQuries;
-
-        /**
-         * < Number of Queue Query
-         */
-        public int queueQuries;
-
-        /**
-         * < Number of Host Query
-         */
-        public int hostQuries;
-
-        /**
-         * < Number of Submission Requests
-         */
-        public int submissionRequest;
-
-        /**
-         * < Number of Jobs Submitted
-         */
-        public int jobSubmitted;
-
-        /**
-         * < Number of Dispatched Jobs
-         */
-        public int dispatchedjobs;
-
-        /**
-         * < Number of Job Completed
-         */
-        public int jobcompleted;
-
-        /**
-         * < Number of MultiCluster Jobs Sent
-         */
-        public int jobMCSend;
-
-        /**
-         * < Number of MultiCluster Jobs Received
-         */
-        public int jobMCReceive;
-
-        /**
-         * < Start Time
-         */
-        public NativeLong startTime;
-    }
-
-
-
-    /**
-     * \brief task finish log.Task accounting record in ssched.acct
-     */
-    public static class taskFinishLog extends Structure {
-        public static class ByReference extends taskFinishLog implements Structure.ByReference {}
-        public static class ByValue extends taskFinishLog implements Structure.ByValue {}
-        public taskFinishLog() {}
-        public taskFinishLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * <  Job finish event
-         */
-        public jobFinishLog jobFinishLog;
-
-        /**
-         * < Task ID
-         */
-        public int taskId;
-
-        /**
-         * < Task index
-         */
-        public int taskIdx;
-
-        /**
-         * < Name of task
-         */
-        public String taskName;
-
-        /**
-         * < Bit mask of task options: \n TASK_IN_FILE (0x01)-specify input file \n TASK_OUT_FILE (0x02)-specify output file \n TASK_ERR_FILE (0x04)-specify error file \n TASK_PRE_EXEC (0x08)-specify pre-exec command \n TASK_POST_EXEC (0x10)-specify post-exec command \n TASK_NAME (0x20)-specify task name
-         */
-        public int taskOptions;
-
-        /**
-         * < Task Exit Reason \n TASK_EXIT_NORMAL = 0- normal exit \n TASK_EXIT_INIT = 1-generic task initialization failure \n TASK_EXIT_PATH = 2-failed to initialize path \n TASK_EXIT_NO_FILE = 3-failed to create task file \n TASK_EXIT_PRE_EXEC = 4- task pre-exec failed \n TASK_EXIT_NO_PROCESS = 5-fork failed \n TASK_EXIT_XDR = 6-xdr communication error \n TASK_EXIT_NOMEM = 7- no memory \n TASK_EXIT_SYS = 8-system call failed \n TASK_EXIT_TSCHILD_EXEC = 9-failed to run sschild \n TASK_ [...]
-         */
-        public int taskExitReason;
-    }
-
-
-
-    /**
-     * \brief End of stream event. The stream is moved to lsb.stream.0 and
-     * a new lsb.stream is opened. Readers of lsb.stream when encounter
-     * the event EVENT_END_OF_STREAM should close and reopen the
-     * lsb.stream file.
-     */
-    public static class eventEOSLog extends Structure {
-        public static class ByReference extends eventEOSLog implements Structure.ByReference {}
-        public static class ByValue extends eventEOSLog implements Structure.ByValue {}
-        public eventEOSLog() {}
-        public eventEOSLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Event end of stream
-         */
-        public int eos;
-    }
-
-
-
-    /**
-     * \brief job resize event: indicating a realized job allocation change
-     */
-    public static class jobResizeLog extends Structure {
-        public static class ByReference extends jobResizeLog implements Structure.ByReference {}
-        public static class ByValue extends jobResizeLog implements Structure.ByValue {}
-        public jobResizeLog() {}
-        public jobResizeLog(Pointer p) { super(p); read(); }
-
-
-        /**
-         * <  JobId
-         */
-        public int jobId;
-
-        /**
-         * <  Index
-         */
-        public int idx;
-
-        /**
-         * <  Start time
-         */
-        public NativeLong startTime;
-
-        /**
-         * <  User Id
-         */
-        public int userId;
-
-        /**
-         * <  User name
-         */
-        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
-
-        /**
-         * < 0 grow, 1 shrink
-         */
-        public int resizeType;
-
-        /**
-         * < The start time of last allocation
-         */
-        public NativeLong lastResizeStartTime;
-
-        /**
-         * < The finish time of last allocation
-         */
-        public NativeLong lastResizeFinishTime;
-
-        /**
-         * < Allocation before the resize
-         */
-        public int numExecHosts;
-
-        /**
-         * <  Execute hosts
-         */
-        public Pointer execHosts;
-
-        /**
-         * < The delta of the allocation change
-         */
-        public int numResizeHosts;
-
-        /**
-         * <  Resize hosts
-         */
-        public Pointer resizeHosts;
-    }
-
-
-
-    /**
-     * \brief  Log event types.
-     */
-    public static class eventLog extends Union {
-        /**
-         * <  Job new event
-         */
-        public jobNewLog jobNewLog;
-
-        /**
-         * <  Job start event
-         */
-        public jobStartLog jobStartLog;
-
-        /**
-         * <  Job status event
-         */
-        public jobStatusLog jobStatusLog;
-
-        /**
-         * <  sbatchd job status event
-         */
-        public sbdJobStatusLog sbdJobStatusLog;
-
-        /**
-         * <  Job switch event
-         */
-        public jobSwitchLog jobSwitchLog;
-
-        /**
-         * <  Job move event
-         */
-        public jobMoveLog jobMoveLog;
-
-        /**
-         * <  Queue control event
-         */
-        public queueCtrlLog queueCtrlLog;
-
-/* New debug event*/
-        public newDebugLog newDebugLog;
-
-        /**
-         * <  Host control event
-         */
-        public hostCtrlLog hostCtrlLog;
-
-        /**
-         * <  mbatchd start event
-         */
-        public mbdStartLog mbdStartLog;
-
-        /**
-         * <  mbatchd die event
-         */
-        public mbdDieLog mbdDieLog;
-
-        /**
-         * <  Unfulfill event
-         */
-        public unfulfillLog unfulfillLog;
-
-        /**
-         * <  Job finish event
-         */
-        public jobFinishLog jobFinishLog;
-
-        /**
-         * <  Load index event
-         */
-        public loadIndexLog loadIndexLog;
-
-        /**
-         * <  Migration initiated event
-         */
-        public migLog migLog;
-
-        /**
-         * <  Calendar event
-         */
-        public calendarLog calendarLog;
-
-        /**
-         * <  Job forward event
-         */
-        public jobForwardLog jobForwardLog;
-
-        /**
-         * <  Job accept event
-         */
-        public jobAcceptLog jobAcceptLog;
-
-        /**
-         * <  Job accepted from another  cluster event
-         */
-        public statusAckLog statusAckLog;
-
-        /**
-         * <  Job signal event
-         */
-        public signalLog signalLog;
-
-        /**
-         * <  Job execution event
-         */
-        public jobExecuteLog jobExecuteLog;
-
-        /**
-         * <  Job message event
-         */
-        public jobMsgLog jobMsgLog;
-
-        /**
-         * <  Job message ackknowledge event
-         */
-        public jobMsgAckLog jobMsgAckLog;
-
-        /**
-         * <  Job requeue event
-         */
-        public jobRequeueLog jobRequeueLog;
-
-        /**
-         * <  Checkpoint event
-         */
-        public chkpntLog chkpntLog;
-
-        /**
-         * <  Signal with action event
-         */
-        public sigactLog sigactLog;
-
-        /**
-         * <  Job occupy request event
-         */
-        public jobOccupyReqLog jobOccupyReqLog;
-
-        /**
-         * <  Job vacate event
-         */
-        public jobVacatedLog jobVacatedLog;
-
-        /**
-         * <  Job start accept event
-         */
-        public jobStartAcceptLog jobStartAcceptLog;
-
-        /**
-         * <  Job clean event
-         */
-        public jobCleanLog jobCleanLog;
-
-        /**
-         * <  Job exception event
-         */
-        public jobExceptionLog jobExceptionLog;
-
-        /**
-         * <  Job group new event
-         */
-        public jgrpNewLog jgrpNewLog;
-
-        /**
-         * <  Job group Ctrl event
-         */
-        public jgrpCtrlLog jgrpCtrlLog;
-
-        /**
-         * <  Job Force Request event
-         */
-        public jobForceRequestLog jobForceRequestLog;
-
-        /**
-         * <  Event switch event
-         */
-        public logSwitchLog logSwitchLog;
-
-        /**
-         * <  Job modify event
-         */
-        public jobModLog jobModLog;
-
-        /**
-         * <  Job group stratus event
-         */
-        public jgrpStatusLog jgrpStatusLog;
-
-        /**
-         * <  Job attribute setting event
-         */
-        public jobAttrSetLog jobAttrSetLog;
-
-        /**
-         * <  Job external message event
-         */
-        public jobExternalMsgLog jobExternalMsgLog;
-
-        /**
-         * <  Job chunk event
-         */
-        public jobChunkLog jobChunkLog;
-
-        /**
-         * < sbatchd  unreported status event
-         */
-        public sbdUnreportedStatusLog sbdUnreportedStatusLog;
-
-        /**
-         * <  Reservation finish event
-         */
-        public rsvFinishLog rsvFinishLog;
-
-        /**
-         * <  Host group control Log
-         */
-        public hgCtrlLog hgCtrlLog;
-
-        /**
-         * <  cpu profile event
-         */
-        public cpuProfileLog cpuProfileLog;
-
-        /**
-         * <  Data logging event
-         */
-        public dataLoggingLog dataLoggingLog;
-
-        /**
-         * <  Job run rusage event
-         */
-        public jobRunRusageLog jobRunRusageLog;
-
-        /**
-         * <  Event EOS event
-         */
-        public eventEOSLog eventEOSLog;
-
-        /**
-         * <  SLA event
-         */
-        public slaLog slaLog;
-
-        /**
-         * <  Performance event
-         */
-        public perfmonLog perfmonLog;
-
-        /**
-         * <  Task finish event
-         */
-        public taskFinishLog taskFinishLog;
-
-        /**
-         * <  Job resize notify start event
-         */
-        public jobResizeNotifyStartLog jobResizeNotifyStartLog;
-
-        /**
-         * <  Job resize notify accept event
-         */
-        public jobResizeNotifyAcceptLog jobResizeNotifyAcceptLog;
-
-        /**
-         * <  Job resize notify done event
-         */
-        public jobResizeNotifyDoneLog jobResizeNotifyDoneLog;
-
-        /**
-         * <  Job resize release event
-         */
-        public jobResizeReleaseLog jobResizeReleaseLog;
-
-        /**
-         * <  Job resize cancel event
-         */
-        public jobResizeCancelLog jobResizeCancelLog;
-
-        /**
-         * <  Job resize event
-         */
-        public jobResizeLog jobResizeLog;
-
-/*#if defined(LSF_SIMULATOR)*/
-/**< Job array element event */
-        /*public jobArrayElementLog jobArrayElementLog;*/
-
-        /**< LSF simulator status event */
-        /*public mbdSimStatusLog   mbdSimStatusLog;*/
-        /*#endif*/
-    }
-
-
-
-
-    /**
-     * \brief  event records.
-     */
-    public static class eventRec extends Structure {
-        public static class ByReference extends eventRec implements Structure.ByReference {}
-        public static class ByValue extends eventRec implements Structure.ByValue {}
-        public eventRec() {}
-        public eventRec(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The mbatchd version number
-         */
-        public byte[] version = new byte[MAX_VERSION_LEN];
-
-        /**
-         * < Event type in \ref event_types
-         */
-        public int type;
-
-        /**
-         * < The time the event occurred
-         */
-        public NativeLong eventTime;
-
-        /**
-         * < The information for this type of event, contained in a structure  corresponding to type
-         */
-        public eventLog eventLog;
-    }
-
-
-
-    public static class eventLogFile extends Structure {
-        public static class ByReference extends eventLogFile implements Structure.ByReference {}
-        public static class ByValue extends eventLogFile implements Structure.ByValue {}
-        public eventLogFile() {}
-        public eventLogFile(Pointer p) { super(p); read(); }
-
-
-/* event file directory */
-        public byte[] eventDir = new byte[LibLsf.MAXFILENAMELEN];
-
-/* start and end event time */
-        public NativeLong beginTime, endTime;
-    }
-
-
-
-    public static class eventLogHandle extends Structure {
-        public static class ByReference extends eventLogHandle implements Structure.ByReference {}
-        public static class ByValue extends eventLogHandle implements Structure.ByValue {}
-        public eventLogHandle() {}
-        public eventLogHandle(Pointer p) { super(p); read(); }
-
-
-/* open event file pointer */
-        public Pointer fp;
-
-/* current open events file name */
-        public byte[] openEventFile = new byte[LibLsf.MAXFILENAMELEN];
-
-/* current open event file number */
-        public int curOpenFile;
-        public int lastOpenFile;                   /* last open event file number, 0
-                  means lsb.events */
-    }
-
-
-
-
-    public static final String LSF_JOBIDINDEX_FILENAME = "lsb.events.index";
-    public static final String LSF_JOBIDINDEX_FILETAG = "#LSF_JOBID_INDEX_FILE";
-
-/* structures used to handle jobId index file */
-
-    public static class jobIdIndexS extends Structure {
-        public static class ByReference extends jobIdIndexS implements Structure.ByReference {}
-        public static class ByValue extends jobIdIndexS implements Structure.ByValue {}
-        public jobIdIndexS() {}
-        public jobIdIndexS(Pointer p) { super(p); read(); }
-
-
-/* the index file name */
-        public byte[] fileName = new byte[LibLsf.MAXFILENAMELEN];
-
-/* open index file pointer */
-        public Pointer fp;
-
-/* version number for future use */
-        public float version;
-
-/* total number of rows(files) indices */
-        public int totalRows;
-
-/* last update time */
-        public NativeLong lastUpdate;
-
-/* current rows */
-        public int curRow;
-        /* the event file currently handled is */
-        /* (totalRows - curRow + 1) */
-
-/* time stamp of current row */
-        public NativeLong timeStamp;
-
-/* min jobId in that row */
-        public long minJobId;
-
-/* max jobId in that row */
-        public long maxJobId;
-
-/* total number of jobIds */
-        public int totalJobIds;
-
-/* jobId array of current row */
-        public IntByReference jobIds;
-    }
-
-
-
-/* structures used to hold one element of sorted int list */
-
-    public static class sortIntList extends Structure {
-        public static class ByReference extends sortIntList implements Structure.ByReference {}
-        public static class ByValue extends sortIntList implements Structure.ByValue {}
-        public sortIntList() {}
-        public sortIntList(Pointer p) { super(p); read(); }
-
-        public int value;
-
-/* points to next element */
-        public sortIntList.ByReference forw;
-
-/* points to prior element */
-        public sortIntList.ByReference back;
-    }
-
-
-
-    public static class nqsStatusReq extends Structure {
-        public static class ByReference extends nqsStatusReq implements Structure.ByReference {}
-        public static class ByValue extends nqsStatusReq implements Structure.ByValue {}
-        public nqsStatusReq() {}
-        public nqsStatusReq(Pointer p) { super(p); read(); }
-
-        public long jobId;
-        public int opCode;
-        public int reportCode;
-        public String nqsQueue;
-        public int fromUid;
-        public String fromUserName;
-        public String fromHostName;
-        public int idx;
-    }
-
-
-
-    public static class nqsStatusReply extends Structure {
-        public static class ByReference extends nqsStatusReply implements Structure.ByReference {}
-        public static class ByValue extends nqsStatusReply implements Structure.ByValue {}
-        public nqsStatusReply() {}
-        public nqsStatusReply(Pointer p) { super(p); read(); }
-
-        public String orgHost;
-        public String orgUser;
-        public NativeLong startTime;
-        public String jobName;
-        public String nqsQueue;
-        public String lsbManager;
-        public int options;
-        public String outFile;
-        public String errFile;
-    }
-
-
-
-/*
-*  SBD uses the following data structure to communicate with
-*  the resource manager.
-*
- */
-    public static final int LSB_MAX_SD_LENGTH = 128;
-
-    public static class lsbMsgHdr extends Structure {
-        public static class ByReference extends lsbMsgHdr implements Structure.ByReference {}
-        public static class ByValue extends lsbMsgHdr implements Structure.ByValue {}
-        public lsbMsgHdr() {}
-        public lsbMsgHdr(Pointer p) { super(p); read(); }
-
-        public int usrId;
-        public long jobId;
-        public int msgId;
-        public int type;
-        public String src;
-        public String dest;
-    }
-
-
-
-    public static class lsbMsg extends Structure {
-        public static class ByReference extends lsbMsg implements Structure.ByReference {}
-        public static class ByValue extends lsbMsg implements Structure.ByValue {}
-        public lsbMsg() {}
-        public lsbMsg(Pointer p) { super(p); read(); }
-
-        public lsbMsgHdr.ByReference header;
-        public String msg;
-    }
-
-
-
-/* data structures related to API_CONF */
-
-    public static final int CONF_NO_CHECK = 0x00;
-    public static final int CONF_CHECK = 0x01;
-    public static final int CONF_EXPAND = 0X02;
-    public static final int CONF_RETURN_HOSTSPEC = 0X04;
-    public static final int CONF_NO_EXPAND = 0X08;
-    public static final int CONF_HAS_CU = 0X10;
-
-    public static class paramConf extends Structure {
-        public static class ByReference extends paramConf implements Structure.ByReference {}
-        public static class ByValue extends paramConf implements Structure.ByValue {}
-        public paramConf() {}
-        public paramConf(Pointer p) { super(p); read(); }
-
-        public parameterInfo.ByReference param;
-    }
-
-
-
-    public static class userConf extends Structure {
-        public static class ByReference extends userConf implements Structure.ByReference {}
-        public static class ByValue extends userConf implements Structure.ByValue {}
-        public userConf() {}
-        public userConf(Pointer p) { super(p); read(); }
-
-        public int numUgroups;
-        public Pointer /* groupInfoEnt.ByReference */ ugroups;
-        public int numUsers;
-        public Pointer /* userInfoEnt.ByReference */ users;
-        public int numUserEquivalent;
-        public Pointer /* userEquivalentInfoEnt.ByReference */ userEquivalent;
-        public int numUserMapping;
-        public Pointer /* userMappingInfoEnt.ByReference */ userMapping;
-    }
-
-
-
-    public static class hostConf extends Structure {
-        public static class ByReference extends hostConf implements Structure.ByReference {}
-        public static class ByValue extends hostConf implements Structure.ByValue {}
-        public hostConf() {}
-        public hostConf(Pointer p) { super(p); read(); }
-
-        public int numHosts;
-        public Pointer /* hostInfoEnt.ByReference */ hosts;
-        public int numHparts;
-        public Pointer /* hostPartInfoEnt.ByReference */ hparts;
-        public int numHgroups;
-        public Pointer /* groupInfoEnt.ByReference */ hgroups;
-    }
-
-
-
-    /**
-     * \brief  lsb shared resource Instance.
-     */
-    public static class lsbSharedResourceInstance extends Structure {
-        public static class ByReference extends lsbSharedResourceInstance implements Structure.ByReference {}
-        public static class ByValue extends lsbSharedResourceInstance implements Structure.ByValue {}
-        public lsbSharedResourceInstance() {}
-        public lsbSharedResourceInstance(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Value used by mbatchd
-         */
-        public String totalValue;
-
-        /**
-         * < Reserved value
-         */
-        public String rsvValue;
-
-        /**
-         * < Number of Hosts associated with the resource.
-         */
-        public int nHosts;
-
-        /**
-         * < Hosts list
-         */
-        public Pointer hostList;
-    }
-
-
-
-    /**
-     * \brief lsb shared resource information.
-     */
-    public static class lsbSharedResourceInfo extends Structure {
-        public static class ByReference extends lsbSharedResourceInfo implements Structure.ByReference {}
-        public static class ByValue extends lsbSharedResourceInfo implements Structure.ByValue {}
-        public lsbSharedResourceInfo() {}
-        public lsbSharedResourceInfo(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Resource name
-         */
-        public String resourceName;
-
-        /**
-         * < Number of instances
-         */
-        public int nInstances;
-
-        /**
-         * < List of instances
-         */
-        public Pointer /* lsbSharedResourceInstance.ByReference */ instances;
-    }
-
-
-
-    public static class queueConf extends Structure {
-        public static class ByReference extends queueConf implements Structure.ByReference {}
-        public static class ByValue extends queueConf implements Structure.ByValue {}
-        public queueConf() {}
-        public queueConf(Pointer p) { super(p); read(); }
-
-        public int numQueues;
-        public Pointer /* queueInfoEnt.ByReference */ queues;
-    }
-
-
-
-    /**
-     * \brief  frame element information.
-     */
-    public static class frameElementInfo extends Structure {
-        public static class ByReference extends frameElementInfo implements Structure.ByReference {}
-        public static class ByValue extends frameElementInfo implements Structure.ByValue {}
-        public frameElementInfo() {}
-        public frameElementInfo(Pointer p) { super(p); read(); }
-
-
-        /**
-         * <  The job index in the frame job array.
-         */
-        public int jobindex;
-
-        /**
-         * <  The job status.
-         */
-        public int jobState;
-
-        /**
-         * <  The start frame of this frame job.
-         */
-        public int start;
-
-        /**
-         * <  The end frame of this frame job.
-         */
-        public int end;
-
-        /**
-         * <  The step of this frame job.
-         */
-        public int step;
-
-        /**
-         * <  The chunk size of this frame job.
-         */
-        public int chunk;
-    }
-
-
-
-    /**
-     * \brief  frame job Infomation.
-     */
-    public static class frameJobInfo extends Structure {
-        public static class ByReference extends frameJobInfo implements Structure.ByReference {}
-        public static class ByValue extends frameJobInfo implements Structure.ByValue {}
-        public frameJobInfo() {}
-        public frameJobInfo(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < The job ID that the LSF system assigned to the frame job array.
-         */
-        public long jobGid;
-
-        /**
-         * < The max job number in one frame job array.
-         */
-        public int maxJob;
-
-        /**
-         * < The user submitted the frame job array.
-         */
-        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
-
-        /**
-         * < Full job name
-         */
-        public byte[] jobName = new byte[LibLsf.MAXLINELEN];
-
-        /**
-         * < The full job name of the frame job array.  frameElementPtr The pointer to frame ob array table.
-         */
-        public frameElementInfo.ByReference frameElementPtr;
-    }
-
-
-
-    public static class nqsRusageReq extends Structure {
-        public static class ByReference extends nqsRusageReq implements Structure.ByReference {}
-        public static class ByValue extends nqsRusageReq implements Structure.ByValue {}
-        public nqsRusageReq() {}
-        public nqsRusageReq(Pointer p) { super(p); read(); }
-
-        public long jobId;
-        public int mem;
-        public float cpuTime;
-    }
-
-
-
-    public static class nqsRusageReply extends Structure {
-        public static class ByReference extends nqsRusageReply implements Structure.ByReference {}
-        public static class ByValue extends nqsRusageReply implements Structure.ByValue {}
-        public nqsRusageReply() {}
-        public nqsRusageReply(Pointer p) { super(p); read(); }
-
-        public int status;
-    }
-
-
-
-/* end of data structures related to API_CONF */
-
-/*
-*  Structure used for the Advance Reservation API
-*
-*  MBD allows the LSF administration to make advance reservation on
-*  behalf of a user, group or or for system maintenance purposes.
-*  Clients can add a reservation, remove a reservation and show
-*  reservation statuses.  The following data structures are used to
-*  encapsulate these requests
-*
-*     addRsvRequest: to add a reservation
-*     rmRsvRequest:  to remove a reservation
-*     rsvInfoEnt:    to display reservation information
-*
- */
-
-    public static class _rsvEventInfo_prePost_t extends Structure {
-        public static class ByReference extends _rsvEventInfo_prePost_t implements Structure.ByReference {}
-        public static class ByValue extends _rsvEventInfo_prePost_t implements Structure.ByValue {}
-        public _rsvEventInfo_prePost_t() {}
-        public _rsvEventInfo_prePost_t(Pointer p) { super(p); read(); }
-
-        public int shift;
-    }
-
-
-
-    public static final int RSV_EXECEVENTTYPE_PRE = 1;
-    public static final int RSV_EXECEVENTTYPE_POST = 2;
-
-    public static final String RSV_EXECEVENTNAME_PRE = "pre";
-    public static final String RSV_EXECEVENTNAME_POST = "post";
-
-    /**
-     * \brief  reservation excution event
-     */
-    public static class _rsvExecEvent_t extends Structure {
-        public static class ByReference extends _rsvExecEvent_t implements Structure.ByReference {}
-        public static class ByValue extends _rsvExecEvent_t implements Structure.ByValue {}
-        public _rsvExecEvent_t() {}
-        public _rsvExecEvent_t(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Event type
-         */
-        public int type;
-
-        /**
-         * < Boolean: is there additional info?
-         */
-        public int infoAttached;
-
-        /**
-         * < Info pertaining to event, such as offset
-         */
-        public Pointer info;
-    }
-
-
-
-    /**
-     * \brief  reservation excution command
-     */
-    public static class _rsvExecCmd_t extends Structure {
-        public static class ByReference extends _rsvExecCmd_t implements Structure.ByReference {}
-        public static class ByValue extends _rsvExecCmd_t implements Structure.ByValue {}
-        public _rsvExecCmd_t() {}
-        public _rsvExecCmd_t(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Full path to the command name
-         */
-        public String path;
-
-        /**
-         * < Size of events array
-         */
-        public int numEvents;
-
-        /**
-         * < Array of events that trigger -exec command
-         */
-        public Pointer /* _rsvExecEvent_t.ByReference */ events;
-    }
-
-
-
-    /**
-     *  \addtogroup reservation_option reservation_option
-     *    definitions of reservation options.
-     */
-
-    /**
-     * <  User
-     */
-    public static final int RSV_OPTION_USER = 0x0001;
-
-    /**
-     * <  Group
-     */
-    public static final int RSV_OPTION_GROUP = 0x0002;
-
-    /**
-     * <  System
-     */
-    public static final int RSV_OPTION_SYSTEM = 0x0004;
-
-    /**
-     * <  Recur
-     */
-    public static final int RSV_OPTION_RECUR = 0x0008;
-
-    /**
-     * <  Resource requirement
-     */
-    public static final int RSV_OPTION_RESREQ = 0x0010;
-
-    /**
-     * <  Host
-     */
-    public static final int RSV_OPTION_HOST = 0x0020;
-
-    /**
-     * <  Open
-     */
-    public static final int RSV_OPTION_OPEN = 0x0040;
-
-    /**
-     * <  Delete
-     */
-    public static final int RSV_OPTION_DELETE = 0x0080;
-
-    /**
-     * <  Close
-     */
-    public static final int RSV_OPTION_CLOSED = 0x0100;
-
-    /**
-     * <  Execute
-     */
-    public static final int RSV_OPTION_EXEC = 0x0200;
-
-    /**
-     * <  Remote execute
-     */
-    public static final int RSV_OPTION_RMEXEC = 0x0400;
-
-    /**
-     * <  Next instance
-     */
-    public static final int RSV_OPTION_NEXTINSTANCE = 0x0800;
-
-    /**
-     * <  Disable
-     */
-    public static final int RSV_OPTION_DISABLE = 0x1000;
-
-    /**
-     * <  Add host
-     */
-    public static final int RSV_OPTION_ADDHOST = 0x2000;
-
-    /**
-     * <  Remote host
-     */
-    public static final int RSV_OPTION_RMHOST = 0x4000;
-
-    /**
-     * <  Description
-     */
-    public static final int RSV_OPTION_DESCRIPTION = 0x8000;
-
-    /**
-     * <  Timewindow mode
-     */
-    public static final int RSV_OPTION_TWMOD = 0x10000;
-
-    /**
-     * <  Switch open/close
-     */
-    public static final int RSV_OPTION_SWITCHOPENCLOSE = 0x20000;
-
-    /**
-     * <  User mode
-     */
-    public static final int RSV_OPTION_USERMOD = 0x40000;
-
-    /**
-     * <  Reservation name
-     */
-    public static final int RSV_OPTION_RSVNAME = 0x80000;
-
-    /**
-     * <  Expired
-     */
-    public static final int RSV_OPTION_EXPIRED = 0x100000;
-
-    /**
-     * \brief add reservation request.
-     */
-    public static class addRsvRequest extends Structure {
-        public static class ByReference extends addRsvRequest implements Structure.ByReference {}
-        public static class ByValue extends addRsvRequest implements Structure.ByValue {}
-        public addRsvRequest() {}
-        public addRsvRequest(Pointer p) { super(p); read(); }
-
-
-        /**
-         * <Reservation options \ref reservation_option
-         */
-        public int options;
-
-        /**
-         * < User or group for which the reservation is made
-         */
-        public String name;
-
-        /**
-         * < Minimum number of processors the required to run the job. See the -g option of brsvadd.
-         */
-        public int minNumProcs;
-
-        /**
-         * < Maximum number of processors the required to run the job.
-         */
-        public int maxNumProcs;
-
-        /**< Range of number of processors */
-        //struct procRange;
-
-        /**
-         * < The number of invoker specified hosts for the reservation. If numAskedHosts is 0, all qualified hosts will be considered.
-         */
-        public int numAskedHosts;
-
-        /**
-         * < The array of names of invoker specified hosts hosts for the reservation. The number of hosts is given by numAskedHosts. See the -m option of brsvadd.
-         */
-        public Pointer askedHosts;
-
-        /**
-         * < The resource requirements of the reservation. See the -R option of brsvadd.
-         */
-        public String resReq;
-
-        /**
-         * < Active time window for a recurring reservation. See the -t option of brsvadd.
-         */
-        public String timeWindow;
-
-        /**
-         * < Info for the -exec option.
-         */
-        public _rsvExecCmd_t.ByReference execCmd;
-
-        /**
-         * < Description for the reservation to be created. The description must be provided as a double quoted text string. The maximum length  is 512 chars.  Equivalent to the value of brsvadd -d.
-         */
-        public String desc;
-
-        /**
-         * < User-defined advance reservation name unique in an LSF cluster. The name is a string of letters, numeric chars, underscores, and dashes beginning with a letter. The maximum length of the name is 39 chars. Equivalent to the value of brsvadd -N.
-         */
-        public String rsvName;
-    }
-
-
-
-    /**
-     * \brief  remove reservation request.
-     */
-    public static class rmRsvRequest extends Structure {
-        public static class ByReference extends rmRsvRequest implements Structure.ByReference {}
-        public static class ByValue extends rmRsvRequest implements Structure.ByValue {}
-        public rmRsvRequest() {}
-        public rmRsvRequest(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Reservation ID of the reservation that you wish to remove.
-         */
-        public String rsvId;
-    }
-
-
-
-    /**
-     * \brief  modifiy reservation request
-     */
-    public static class modRsvRequest extends Structure {
-        public static class ByReference extends modRsvRequest implements Structure.ByReference {}
-        public static class ByValue extends modRsvRequest implements Structure.ByValue {}
-        public modRsvRequest() {}
-        public modRsvRequest(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Reservation ID of the reservation that you  wish to modify.
-         */
-        public String rsvId;
-
-        /**
-         * < LSF user name for the reservation.  See the -g option of brsvadd. .
-         */
-        public addRsvRequest fieldsFromAddReq;
-
-        /**
-         * < Disabled time duration
-         */
-        public String disabledDuration;
-    }
-
-
-
-    /**
-     * \brief  host reservation infromation entry.
-     */
-    public static class hostRsvInfoEnt extends Structure {
-        public static class ByReference extends hostRsvInfoEnt implements Structure.ByReference {}
-        public static class ByValue extends hostRsvInfoEnt implements Structure.ByValue {}
-        public hostRsvInfoEnt() {}
-        public hostRsvInfoEnt(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Host name.
-         */
-        public String host;
-
-        /**
-         * < Number of CPUs reserved on the host.
-         */
-        public int numCPUs;
-
-        /**
-         * < Number of job slots reserved on the host.
-         */
-        public int numSlots;
-
-        /**
-         * < Number of processors reserved on the host.
-         */
-        public int numRsvProcs;
-
-        /**
-         * < Count for used + suspended from reserved slots
-         */
-        public int numusedRsvProcs;
-
-        /**
-         * < Number of processors in use on the host.
-         */
-        public int numUsedProcs;
-    }
-
-
-
-    /**
-     * \brief  reservation information entry.
-     */
-    public static class rsvInfoEnt extends Structure {
-        public static class ByReference extends rsvInfoEnt implements Structure.ByReference {}
-        public static class ByValue extends rsvInfoEnt implements Structure.ByValue {}
-        public rsvInfoEnt() {}
-        public rsvInfoEnt(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Reservation options, see \ref reservation_option
-         */
-        public int options;
-
-        /**
-         * < Reservation ID returned from mbatchd. If the reservation fails, this is null. The memory for rsvid is allocated by the caller.
-         */
-        public String rsvId;
-
-        /**
-         * <  LSF user group name for the reservation. See the -g option of brsvadd.
-         */
-        public String name;
-
-        /**
-         * <  Number of hosts reserved
-         */
-        public int numRsvHosts;
-
-        /**
-         * <  Info about the reserved hosts
-         */
-        public Pointer /* hostRsvInfoEnt.ByReference */ rsvHosts;
-
-        /**
-         * < Active time window for a recurring reservation. See the -t option of  brsvadd.
-         */
-        public String timeWindow;
-
-        /**
-         * < Number of jobs running in the reservation.
-         */
-        public int numRsvJobs;
-
-        /**
-         * < Job IDs of jobs running in the reservation.
-         */
-        public LongByReference jobIds;
-
-        /**
-         * < Status of jobs running in the reservation.
-         */
-        public IntByReference jobStatus;
-
-        /**
-         * <  Description for the reservation to be created. The description must be provided as a double quoted text string. The maximum length is 512 chars. Equivalent to thevalue of brsvadd -d.
-         */
-        public String desc;
-
-        /**
-         * <  Null-terminated list of disabled durations
-         */
-        public Pointer disabledDurations;
-
-        /**
-         * <  The current state of the reservation - active or inactive.
-         */
-        public int state;
-
-        /**
-         * <  The time of the next instance of a recurring reservation.
-         */
-        public String nextInstance;
-
-        /**
-         * <  Creator of the reservation.
-         */
-        public String creator;
-    }
-
-
-
-/* backfill window related data structures and functions */
-
-    public static class slotInfoRequest extends Structure {
-        public static class ByReference extends slotInfoRequest implements Structure.ByReference {}
-        public static class ByValue extends slotInfoRequest implements Structure.ByValue {}
-        public slotInfoRequest() {}
-        public slotInfoRequest(Pointer p) { super(p); read(); }
-
-        /* options mask */
-
-/* Option -R */
-        public static int SLOT_OPTION_RESREQ = 0X001;
-
-        public int options;
-
-/* Resource requirement string */
-        public String resReq;
-    }
-
-
-
-/*copy from SRInfo*/
-
-    public static class SRInfoEnt extends Structure {
-        public static class ByReference extends SRInfoEnt implements Structure.ByReference {}
-        public static class ByValue extends SRInfoEnt implements Structure.ByValue {}
-        public SRInfoEnt() {}
-        public SRInfoEnt(Pointer p) { super(p); read(); }
-
-
-/*number of reserved slots*/
-        public int numReserved;
-
-/* job's predicted start time */
-        public NativeLong predictedStartTime;
-    }
-
-
-
-    public static class hostSRInfoEnt extends Structure {
-        public static class ByReference extends hostSRInfoEnt implements Structure.ByReference {}
-        public static class ByValue extends hostSRInfoEnt implements Structure.ByValue {}
-        public hostSRInfoEnt() {}
-        public hostSRInfoEnt(Pointer p) { super(p); read(); }
-
-        public String host;
-        public int hStatus;
-        public int userJobLimit;
-        public int maxJobs;
-        public int numJobs;
-        public int numRUN;
-        public int numSSUSP;
-        public int numUSUSP;
-        public int numRESERVE;
-        public int numSR;
-        public Pointer /* SRInfoEnt.ByReference */ SRInfo;
-    }
-
-
-
-    public static class slotInfoReply extends Structure {
-        public static class ByReference extends slotInfoReply implements Structure.ByReference {}
-        public static class ByValue extends slotInfoReply implements Structure.ByValue {}
-        public slotInfoReply() {}
-        public slotInfoReply(Pointer p) { super(p); read(); }
-
-
-/* to store the time of Master host */
-        public NativeLong masterTime;
-        public int numHosts;
-        public Pointer /* hostSRInfoEnt.ByReference */ hostInfo;
-        public int numAR;
-        public Pointer /* rsvInfoEnt.ByReference */ ARInfo;
-    }
-
-
-
-
-/* the general limit related data structures and functions */
-
-
-    public static final int LSB_RSRC_LIMIT_TYPE_SLOTS = 0;
-    public static final int LSB_RSRC_LIMIT_TYPE_SLOT_PERPSR = 1;
-    public static final int LSB_RSRC_LIMIT_TYPE_MEM = 2;
-    public static final int LSB_RSRC_LIMIT_TYPE_MEM_PERCENT = 3;
-    public static final int LSB_RSRC_LIMIT_TYPE_SWP = 4;
-    public static final int LSB_RSRC_LIMIT_TYPE_SWP_PERCENT = 5;
-    public static final int LSB_RSRC_LIMIT_TYPE_TMP = 6;
-    public static final int LSB_RSRC_LIMIT_TYPE_TMP_PERCENT = 7;
-    public static final int LSB_RSRC_LIMIT_TYPE_JOBS = 8;
-
-/* all external resources */
-    public static final int LSB_RSRC_LIMIT_TYPE_EXT_RSRC = 9;
-
-    /**
-     * \addtogroup _consumertype _consumertype
-     * consumer types
-     */
-    public static interface consumerType {
-        /**
-         * < Queues
-         */
-        public static final int LIMIT_QUEUES = 1;
-
-        /**
-         * < Per-queue
-         */
-        public static final int LIMIT_PER_QUEUE = 2;
-
-        /**
-         * < Users
-         */
-        public static final int LIMIT_USERS = 3;
-
-        /**
-         * < Per-users
-         */
-        public static final int LIMIT_PER_USER = 4;
-
-        /**
-         * < Hosts
-         */
-        public static final int LIMIT_HOSTS = 5;
-
-        /**
-         * < Per-host
-         */
-        public static final int LIMIT_PER_HOST = 6;
-
-        /**
-         * < Projects
-         */
-        public static final int LIMIT_PROJECTS = 7;
-
-        /**
-         * < Per-project
-         */
-        public static final int LIMIT_PER_PROJECT = 8;
-    }
-
-
-    /**< Type definitions */
-
-    /**
-     * \brief  limit consumer
-     */
-    public static class _limitConsumer extends Structure {
-        public static class ByReference extends _limitConsumer implements Structure.ByReference {}
-        public static class ByValue extends _limitConsumer implements Structure.ByValue {}
-        public _limitConsumer() {}
-        public _limitConsumer(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Consumer type ( _consumertype ):  -  Queues per-queue -  Users and per-user -  Hosts and per-host -  Projects and per-project
-         */
-        public int type;
-
-        /**
-         * < Consumer name
-         */
-        public String name;
-    }
-
-
-
-    /**
-     * \brief  limit resource.
-     */
-    public static class _limitResource extends Structure {
-        public static class ByReference extends _limitResource implements Structure.ByReference {}
-        public static class ByValue extends _limitResource implements Structure.ByValue {}
-        public _limitResource() {}
-        public _limitResource(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Resource name
-         */
-        public String name;
-
-        /**
-         * < Resource type
-         */
-        public int type;
-
-        /**
-         * < Resource val
-         */
-        public float val;
-    }
-
-
-
-    /**
-     * \brief   limit information request
-     */
-    public static class _limitInfoReq extends Structure {
-        public static class ByReference extends _limitInfoReq implements Structure.ByReference {}
-        public static class ByValue extends _limitInfoReq implements Structure.ByValue {}
-        public _limitInfoReq() {}
-        public _limitInfoReq(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Limit policy name given by the user.
-         */
-        public String name;
-
-        /**
-         * < Number of consumers
-         */
-        public int consumerC;
-
-        /**
-         * < Consumer name, queue/host/user/project
-         */
-        public Pointer /* _limitConsumer.ByReference */ consumerV;
-    }
-
-
-
-    /**
-     * \brief  limit item.
-     */
-    public static class _limitItem extends Structure {
-        public static class ByReference extends _limitItem implements Structure.ByReference {}
-        public static class ByValue extends _limitItem implements Structure.ByValue {}
-        public _limitItem() {}
-        public _limitItem(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Number of consumers
-         */
-        public int consumerC;
-
-        /**
-         * < Consumers, such as queue, host, user or project
-         */
-        public Pointer /* _limitConsumer.ByReference */ consumerV;
-
-        /**
-         * < Number of resources
-         */
-        public int resourceC;
-
-        /**
-         * < Resources list
-         */
-        public Pointer /* _limitResource.ByReference */ resourceV;
-    }
-
-
-
-    /**
-     * \brief  limit information entry .
-     */
-    public static class _limitInfoEnt extends Structure {
-        public static class ByReference extends _limitInfoEnt implements Structure.ByReference {}
-        public static class ByValue extends _limitInfoEnt implements Structure.ByValue {}
-        public _limitInfoEnt() {}
-        public _limitInfoEnt(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Limit policy name given by the user
-         */
-        public String name;
-
-        /**
-         * < Limit configuration
-         */
-        public _limitItem confInfo;
-
-        /**
-         * < Size of limit dynamic usage info array
-         */
-        public int usageC;
-
-        /**
-         * < Limit dynamic usage info array
-         */
-        public Pointer /* _limitItem.ByReference */ usageInfo;
-
-    }
-
-
-
-/* action code for threshold based on type/model, is used for
-*  predefinedThresholdTypeModel().
- */
-
-    public static final int ADD_THRESHOLD = 1;
-    public static final int GET_THRESHOLD = 2;
-    public static final int DEL_THRESHOLD = 3;
-
-/* Structure to hold thresholds defined based on host's type/model */
-
-    public static class thresholdEntry extends Structure {
-        public static class ByReference extends thresholdEntry implements Structure.ByReference {}
-        public static class ByValue extends thresholdEntry implements Structure.ByValue {}
-        public thresholdEntry() {}
-        public thresholdEntry(Pointer p) { super(p); read(); }
-
-
-/* Name of type or model */
-        public String attr;
-
-/* Pointer to hostInfo */
-        public hostInfoEnt.ByReference hostEntryPtr;
-    }
-
-
-
-    /**
-     * \page lsb_limitInfo lsb_limitInfo
-     * \brief gets resource allocation limit configuration and dynamic usage
-     * information.
-     * <p/>
-     * Displays current usage of resource allocation limits configured in Limit
-     * sections in lsb.resources:
-     * \li    Configured limit policy name
-     * \li    Users
-     * \li    Queues
-     * \li    Hosts
-     * \li    Project names
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * int lsb_limitInfo( limitInfoReq.ByReference req,  limitInfoEnt.ByReference[] limitItemRef,
-     * IntByReference size, lsInfo.ByReference lsInfo)</b>
-     *
-     * @return int:-1
-     *         \n Function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         On failure, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line command:</b>
-     *         \par
-     *         blimits
-     *         <p/>
-     *         \b Files
-     *         \par
-     *         $LSB_CONFDIR/cluster_name/configdir/lsb.queues \n
-     *         $LSB_CONFDIR/cluster_name/configdir/lsb.users \n
-     *         $LSB_CONFDIR/cluster_name/configdir/lsb.hosts \n
-     *         $LSB_CONFDIR/cluster_name/configdir/lsb.resources
-     * @param req input, the user request for limit information
-     * @param limitItemRef output, the limit information array
-     * @param size output, the size of the limit information array
-     * @param lsInfo Please refer to the \ref lsInfo structure.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * _limitInfoReq
-     * \n _limitConsumer
-     * \n _limitInfoEnt
-     * \n _limitItem
-     * \n _limitResource
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * \ref _consumertype
-     * #see \ref lsb_freeLimitInfoEnt
-     */
-    public static native int lsb_limitInfo(_limitInfoReq req, Pointer limitItemRef, IntByReference size, LibLsf.lsInfo lsInfo);
-
-    /**
-     * \page lsb_freeLimitInfoEnt lsb_freeLimitInfoEnt
-     * \brief Frees the memory allocated by \ref lsb_limitInfo.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * void lsb_freeLimitInfoEnt(limitInfoEnt.ByReference  ent, int size)</b>
-     *
-     * @param size input, the size of the limit information array
-     *             <p/>
-     *             <b>Data Structures:</b>
-     *             \par
-     *             _limitInfoEnt
-     *             \n _limitItem
-     *             \n _limitConsumer
-     *             \n _limitResource
-     *             <p/>
-     *             <b>Define Statements:</b>
-     *             \par
-     *             \ref _consumertype
-     * return void
-     *         \n There's no return value.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         On failure, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         blimits
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         $LSB_CONFDIR/cluster_name/configdir/lsb.queues \n
-     *         $LSB_CONFDIR/cluster_name/configdir/lsb.users \n
-     *         $LSB_CONFDIR/cluster_name/configdir/lsb.hosts \n
-     *         $LSB_CONFDIR/cluster_name/configdir/lsb.resources
-     * @param ent input, the array of limit information
-     * #see \ref lsb_limitInfo
-     */
-
-    public static native void lsb_freeLimitInfoEnt(_limitInfoEnt ent, int size);
-
-    /**
-     *  \addtogroup resizablejob_related resizablejob_related
-     *  Resizable job related definitions.
-     */
-
-    /**
-     * < Means release no slots
-     */
-    public static final int LSB_RESIZE_REL_NONE = 0x0;
-
-    /**
-     * < Means release all slots-In this case, nHosts, hosts and slots  indicate the slots that are not released
-     */
-    public static final int LSB_RESIZE_REL_ALL = 0x01;
-
-    /**
-     * < Means cancel any pending resize request
-     */
-    public static final int LSB_RESIZE_REL_CANCEL = 0x02;
-
-    /**
-     * < Means execute no resize notification command
-     */
-    public static final int LSB_RESIZE_REL_NO_NOTIFY = 0x04;
-
-    /**
-     * \brief  job resize release.
-     */
-    public static class job_resize_release extends Structure {
-        public static class ByReference extends job_resize_release implements Structure.ByReference {}
-        public static class ByValue extends job_resize_release implements Structure.ByValue {}
-        public job_resize_release() {}
-        public job_resize_release(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < LSF job ID
-         */
-        public long jobId;
-
-        /**
-         * < Options is constructed from the bitwise inclusive OR of zero or more of the flags, as defined in \ref resizablejob_related .
-         */
-        public int options;
-
-        /**
-         * < Number of hosts in the hosts list, if no hosts are to be specified this should be zero
-         */
-        public int nHosts;
-
-        /**
-         * < Specified hosts list, nHosts number of elements
-         */
-        public Pointer hosts;
-
-        /**
-         * < Slots list, each element specifies the number of slots per corresponding host (0 implies all), nHosts number of elements
-         */
-        public IntByReference slots;
-
-        /**
-         * < Name and location of notification command
-         */
-        public String notifyCmd;
-    }
-
-
-
-    public static class job_resize_request extends Structure {
-        public static class ByReference extends job_resize_request implements Structure.ByReference {}
-        public static class ByValue extends job_resize_request implements Structure.ByValue {}
-        public job_resize_request() {}
-        public job_resize_request(Pointer p) { super(p); read(); }
-
-        public long jobId;
-        public int options;
-
-/* array size */
-        public int nHosts;
-
-/* array of hosts */
-        public Pointer hosts;
-
-/* notifocation command */
-        public String notifyCmd;
-    }
-
-
-
-/*
-*  End of resizable job related definitions
- */
-
-/* Job Dependency Display */
-
-
-/* Job Dependency Display */
-/* for options */
-    /**
-     *  \addtogroup query_depend query_depend
-     *  Job Dependency Display for options
-     */
-
-    /**
-     * <  Recursively
-     */
-    public static final int QUERY_DEPEND_RECURSIVELY = 0x1;
-
-    /**
-     * <  Detail
-     */
-    public static final int QUERY_DEPEND_DETAIL = 0x2;
-
-    /**
-     * <  Unsatisfied
-     */
-    public static final int QUERY_DEPEND_UNSATISFIED = 0x4;
-
-    /**
-     * <  Child
-     */
-    public static final int QUERY_DEPEND_CHILD = 0x8;
-
-    /**
-     * \brief  job dependent request.
-     */
-
-    public static class jobDepRequest extends Structure {
-        public static class ByReference extends jobDepRequest implements Structure.ByReference {}
-        public static class ByValue extends jobDepRequest implements Structure.ByValue {}
-        public jobDepRequest() {}
-        public jobDepRequest(Pointer p) { super(p); read(); }
-
-        /**
-         * < Job ID of the queried job or job array.
-         */
-        public long jobId;
-
-        /**
-         * < You can set the following bits into this field:\n QUERY_DEPEND_RECURSIVELY\n Query the dependency information recursively.\n QUERY_DEPEND_DETAIL\n Query the detailed dependency information.\n QUERY_DEPEND_UNSATISFIED\n Query the jobs that cause this job pend.\n QUERY_DEPEND_CHILD\n Query child jobs.
-         */
-        public int options;
-
-        /**
-         * < The level when you set QUERY_DEPEND_RECURSIVELY.
-         */
-        public int level;
-    }
-
-
-
-
-    /**
-     * \brief  queried jobs.
-     */
-    public static class queriedJobs extends Structure {
-        public static class ByReference extends queriedJobs implements Structure.ByReference {}
-        public static class ByValue extends queriedJobs implements Structure.ByValue {}
-        public queriedJobs() {}
-        public queriedJobs(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < Job ID of the queried job or job array.
-         */
-        public long jobId;
-
-        /**
-         * < The whole dependency condition of the job.
-         */
-        public String dependcondition;
-
-        /**
-         * < Whether the condition is satisfied.
-         */
-        public int satisfied;
-    }
-
-
-
-/* for hasDependency */
-    /**
-     *  \addtogroup job_has_depend job_has_depend
-     *  options for hasDependency
-     */
-
-    /**
-     * <  Job has dependency
-     */
-    public static final int JOB_HAS_DEPENDENCY = 0x1;
-
-    /**
-     * <  Job has individual  condition.
-     */
-    public static final int JOB_HAS_INDIVIDUAL_CONDITION = 0x2;
-
-    /**
-     * \brief  dependency jobs.
-     */
-
-    public static class dependJobs extends Structure {
-        public static class ByReference extends dependJobs implements Structure.ByReference {}
-        public static class ByValue extends dependJobs implements Structure.ByValue {}
-        public dependJobs() {}
-        public dependJobs(Pointer p) { super(p); read(); }
-
-        /**
-         * < Job ID. By default, it is the parent job of the queried job. Modify to child job by setting QUERY_DEPEND_CHILD in options of JobDepRequest.
-         */
-        public long jobId;
-
-        /**
-         * < The job name associated with the job ID.
-         */
-        public String jobname;
-
-        /**
-         * < The number of degrees of separation from the original job.
-         */
-        public int level;
-
-        /**
-         * < Job status of the job.
-         */
-        public int jobstatus;
-
-        /**
-         * < Whether the job ID has a dependency or not. When you set QUERY_DEPEND_RECURSIVELY in options of JobDepRequest, 0 indicates job ID does not have a dependency. Otherwise, one or more of the following bits displays:-  JOB_HAS_DEPENDENCY: Job has a dependency.-  JOB_HAS_INDIVIDUAL_CONDITION: Job has an individual dependency condition when it is an element of job array.
-         */
-        public int hasDependency;
-
-        /**
-         * < When you set "QUERY_DEPEND_DETAIL" into options, it is dependency condition of jobId. It is "" when you do not set "QUERY_DEPEND_DETAIL".
-         */
-        public String condition;
-
-        /**
-         * < Whether the condition is satisfied.
-         */
-        public int satisfied;
-
-        /**
-         * < Job ID. By default, it is the child job. Modify to parent job by setting QUERY_DEPEND_CHILD in options of JobDepRequest
-         */
-        public long depJobId;
-    }
-
-
-
-    /**
-     * \brief  job dependent information.
-     */
-
-    public static class jobDependInfo extends Structure {
-        public static class ByReference extends jobDependInfo implements Structure.ByReference {}
-        public static class ByValue extends jobDependInfo implements Structure.ByValue {}
-        public jobDependInfo() {}
-        public jobDependInfo(Pointer p) { super(p); read(); }
-
-
-        /**
-         * < You can set the following bits into this field:\n QUERY_DEPEND_RECURSIVELY\n Query the dependency information recursively.\n QUERY_DEPEND_DETAIL\n Query the detailed dependency information.\n QUERY_DEPEND_UNSATISFIED\n Query the jobs that cause this job pend.\n QUERY_DEPEND_CHILD\n Query child jobs.
-         */
-        public int options;
-
-        /**
-         * < The number of jobs you queried. By default, the value is 1. However, when you set QUERY_DEPEND_DETAIL in the options and you query a job array where some elements have a dependency condition that has changed, the value is the number of the changed element + 1.
-         */
-        public int numQueriedJobs;
-
-        /**
-         * < The jobs you queried.
-         */
-        public Pointer /* queriedJobs.ByReference */ queriedJobs;
-
-        /**
-         * < The number of levels returned.
-         */
-        public int level;
-
-        /**
-         * < The number of jobs returned.
-         */
-        public int numJobs;
-
-        /**
-         * < The returned dependency jobs.
-         */
-        public Pointer /* dependJobs.ByReference */ depJobs;
-    }
-
-
-
-
-/*
-*  Functional prototypes of the Advance Reservation API
- */
-
-
-/* Macros */
-
-    public static boolean IS_PEND(int s) {
-        return (JNAUtils.toBoolean((s) & JOB_STAT_PEND) || JNAUtils.toBoolean((s) & JOB_STAT_PSUSP));
-    }
-
-/* Do not test JOB_STAT_UNKWN in IS_START() */
-
-    public static boolean IS_START(int s) {
-        return (JNAUtils.toBoolean((s) & JOB_STAT_RUN) || JNAUtils.toBoolean((s) & JOB_STAT_SSUSP) || JNAUtils.toBoolean((s) & JOB_STAT_USUSP));
-    }
-
-    public static boolean IS_FINISH(int s) {
-        return (JNAUtils.toBoolean((s) & JOB_STAT_DONE) || JNAUtils.toBoolean((s) & JOB_STAT_EXIT));
-    }
-
-    public static boolean IS_SUSP(int s) {
-        return (JNAUtils.toBoolean((s) & JOB_STAT_PSUSP) || JNAUtils.toBoolean((s) & JOB_STAT_SSUSP) || JNAUtils.toBoolean((s) & JOB_STAT_USUSP));
-    }
-
-/* Macro for checking post job process. (IO_SPOOL) */
-
-    public static boolean IS_POST_DONE(int s) {
-        return (((s) & JOB_STAT_PDONE) == JOB_STAT_PDONE);
-    }
-
-    public static boolean IS_POST_ERR(int s) {
-        return (((s) & JOB_STAT_PERR) == JOB_STAT_PERR);
-    }
-
-    public static boolean IS_POST_FINISH(int s) {
-        return (IS_POST_DONE(s) || IS_POST_ERR(s));
-    }
-
-/*On windows ,for dll library ,need to use _declspec(dllexport) to export
-*a symbol .but if do so ,static library will can not work .so we are going
-*to change lsberrno to a function.
-*/
-
-    public static int lsberrno() {
-        return lsb_errno().getValue();
-    }
-
-
-
-
-/*
-*  Version of the mbatchd that was last contacted.
-*  -1 indicates the mbatchd has not been contacted.
- */
-    //public int lsb_mbd_version;
-
-/*
-*  The data definition for host name list operations
- */
-    public static final int PRINT_SHORT_NAMELIST = 0x01;
-    public static final int PRINT_LONG_NAMELIST = 0x02;
-    public static final int PRINT_MCPU_HOSTS = 0x04;
-
-    public static class nameList extends Structure {
-        public static class ByReference extends nameList implements Structure.ByReference {}
-        public static class ByValue extends nameList implements Structure.ByValue {}
-        public nameList() {}
-        public nameList(Pointer p) { super(p); read(); }
-
-
-/* number of names */
-        public int listSize;
-
-/* a group of names */
-        public Pointer names;
-
-/* the ocurrent of corresponding name */
-        public IntByReference counter;
-    }
-
-
-
-    public static native nameList.ByReference lsb_parseShortStr(String string1, int int1);
-
-    public static native nameList.ByReference lsb_parseLongStr(String string1);
-
-    public static native String lsb_printNameList(nameList namelist1, int int1);
-
-    public static native nameList.ByReference lsb_compressStrList(Pointer stringArray1, int int1);
-
-    public static native String lsb_splitName(String string1, IntByReference int1);
-
-    public static native IntByReference lsb_errno();
-
-
-/* external routines related to API_CONF */
-
-    public static native paramConf.ByReference lsb_readparam(LibLsf.lsConf lsConf1);
-
-    public static native userConf.ByReference lsb_readuser(LibLsf.lsConf lsConf1, int int1, LibLsf.clusterConf clusterConf1);
-
-    public static native userConf.ByReference lsb_readuser_ex(LibLsf.lsConf lsConf1, int int1, LibLsf.clusterConf clusterConf1, LibLsf.sharedConf sharedConf1);
-
-    public static native hostConf.ByReference lsb_readhost(LibLsf.lsConf lsConf1, LibLsf.lsInfo lsInfo1, int int1, LibLsf.clusterConf clusterConf1);
-
-    public static native queueConf.ByReference lsb_readqueue(LibLsf.lsConf lsConf1, LibLsf.lsInfo lsInfo1, int int1, LibLsf.sharedConf sharedConf1, LibLsf.clusterConf clusterConf1);
-
-    public static native void updateClusterConf(LibLsf.clusterConf clusterConf1);
-
-/* end of external routines related to API_CONF */
-
-    /**
-     * \page lsb_hostpartinfo lsb_hostpartinfo
-     * Returns informaton about host partitions.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * hostPartInfoEnt.ByReference lsb_hostpartinfo (String[] hostParts,
-     * IntByReference numHostParts)</b> @param hostParts An array of host partition names.
-     *
-     * @return null
-     *         \n Function failed.
-     *         <p/>
-     *         <b>Errors:</b>
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error. If lsberrno is
-     *         LSBE_BAD_HPART, (*hostParts)[*numHostParts] is not a host partition known
-     *         to the LSF system. Otherwise, if.ByReference numHostParts is less than its original value,
-     *         * numHostParts is the actual number of host partitions found.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         $LSB_CONFDIR/cluster_name/configdir/lsb.hosts
-     * @param numHostHosts The number of host partition names.
-     * To get information on all host partitions, set hostParts to null;* numHostParts
-     * will be the actual number of host partitions when this call returns.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * hostPartInfoEnt
-     * \n hostPartUserInfo
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * none
-     * #see \ref lsb_usergrpinfo
-     * #see \ref lsb_hostgrpinfo
-     * @param stringArray1 stringArray1
-     */
-    public static native hostPartInfoEnt.ByReference lsb_hostpartinfo(Pointer stringArray1, IntByReference numHostHosts);
-
-    /**
-     * \page lsb_init lsb_init
-     * \brief Initializes the LSF batch library (LSBLIB), and gets the
-     * configuration environment.
-     * <p/>
-     * You must use \ref lsb_init before any other LSBLIB library routine in your
-     * application.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * int lsb_init(String appname)</b>
-     *
-     * @return int:-1 \n
-     *         The function failed.
-     *         <p/>
-     *         <b>Errors:</b>
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         none
-     * @param appName The name of your application.
-     * If appName holds the name of your application, a logfile with the same
-     * name as
-     * your application receives LSBLIB transaction information.
-     * If appName is null, the logfile $LSF_LOGDIR/bcmd receives LSBLIB
-     * transaction information.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * none
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * none
-     * see none
-     */
-    public static native int lsb_init(String appName);
-
-    public static native int sch_lsb_init();
-
-    /**
-     * \page lsb_openjobinfo lsb_openjobinfo
-     * \brief Returns the number of jobs in the master batch daemon.
-     * <p/>
-     * \ref lsb_openjobinfo accesses information about pending, running and
-     * suspended jobs in the master batch daemon. Use \ref lsb_openjobinfo to
-     * create a connection to the master batch daemon. Next, use \ref lsb_readjobinfo
-     * to read job records.Close the connection using \ref lsb_closejobinfo.
-     * <p/>
-     * \ref lsb_openjobinfo opens a connection with mbatchd and returns the total
-     * number of records in the connection on success.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * int lsb_openjobinfo(long jobId, String jobName,
-     * String userName, String queueName, String hostName,
-     * int options)</b>
-     *
-     * @param jobId   Passes information about jobs with the given job ID.
-     *                If jobId is 0, \ref lsb_openjobinfo looks to another parameter to return
-     *                information about jobs.If a member of a job array is to be passed, use
-     *                the array form jobID[ i ] where jobID is the job array name, and i is
-     *                the index value.
-     * @param options <lsf/lsbatch.h> defines the flags shown in
-     *                \ref defs_lsb_openjobinfo constructed from bits. Use the bitwise OR to set more
-     *                than one flag.
-     *                <p/>
-     *                <b>Data Structures:</b>
-     *                \par
-     *                none
-     *                <p/>
-     *                <b>Define Statements:</b>
-     *                \par
-     *                \ref defs_lsb_openjobinfo_a
-     *                \n \ref defs_lsb_openjobinfo
-     * @return int:-1 \n
-     *         The function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line command:</b>
-     *         \par
-     *         bjobs
-     *         <p/>
-     *         \b Files:
-     *         \par
-     *         ${LSF_ENVDIR:-/etc}/lsf.conf
-     * @param jobName Passes information about jobs with the given job name.
-     * If jobName is null, \ref lsb_openjobinfo looks to another parameter to return
-     * information about jobs.
-     * @param userName Passes information about jobs submitted by the named user
-     * or user group, or by all users if user is all. If user is null,
-     * \ref lsb_openjobinfo assumes the user is invoking this call.
-     * @param queueName Passes information about jobs belonging to the named
-     * queue. If queue is null,jobs in all the queues of the batch system are counted.
-     * @param hostName Passes information about jobs on the named host, host
-     * group or cluster name. If host is null, jobs on all hosts of the batch
-     * system will be considered.
-     * #see \ref               lsb_openjobinfo_a
-     * #see \ref               lsb_openjobinfo_a_ext
-     * #see \ref               lsb_openjobinfo_req
-     * #see \ref               lsb_closejobinfo
-     * #see \ref               lsb_readjobinfo
-     * #see \ref               lsb_readframejob
-     */
-    public static native int lsb_openjobinfo(long jobId, String jobName, String userName, String queueName, String hostName, int options);
-
-    /**
-     * \page lsb_openjobinfo_a lsb_openjobinfo_a
-     * \brief Provides the name and number of jobs and hosts in the master batch
-     * daemon.
-     * <p/>
-     * \ref lsb_openjobinfo_a provides more information on pending, running and
-     * suspended jobs than \ref lsb_openjobinfo. Use \ref lsb_openjobinfo_a to create a
-     * connection to the master batch daemon. Next, use \ref lsb_readjobinfo to read
-     * job records. Close the connection using \ref lsb_closejobinfo.
-     * <p/>
-     * \ref lsb_openjobinfo_a passes information about jobs based on the value of
-     * jobId,jobName, userName, queueName, or hostName. Only one parameter can be
-     * chosen. The other parameters must be null or 0.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * jobInfoHead.ByReference lsb_openjobinfo_a(long jobId,
-     * String jobName,
-     * String userName,
-     * String queueName,
-     * String hostName,
-     * int options)</b>
-     *
-     * @param jobId   Passes information about jobs with the given job ID. If jobId
-     *                is 0, \ref lsb_openjobinfo looks to another parameter to return information
-     *                about jobs.
-     *                If information about a member of a job array is to be passed, use the array
-     *                form jobID[ i ] where jobID is the job array name, and i is the index value.
-     * @param options <lsf/lsbatch.h> defines the flags shown in def_lsb_openjobinfo_a
-     *                constructed from bits. Use the bitwise OR to set more than one flag.
-     *                <p/>
-     *                <b>Data Structures:</b>
-     *                \par
-     *                jobInfoHead
-     *                <p/>
-     *                <b>Define Statements:</b>
-     *                \par
-     *                \ref defs_lsb_openjobinfo_a
-     * @return null \n
-     *         The function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line command:</b>
-     *         \par
-     *         bjobs
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         ${LSF_ENVDIR:-/etc}/lsf.conf \n
-     *         $LSB_CONFDIR/cluster_name/configdir/lsb.params
-     * @param jobName Passes information about jobs with the given job name. If
-     * jobName is null, \ref lsb_openjobinfo looks to another parameter to return
-     * information about jobs.
-     * @param userName Passes information about jobs submitted by the named user
-     * or user group, or by all users if userName is all. If userName is null,
-     * \ref lsb_openjobinfo_a assumes the user is invoking this call.
-     * @param queueName Passes information about jobs belonging to the named queue.
-     * If queueName is null, jobs in all queues of the batch system will be
-     * considered.
-     * @param hostName Passes information about jobs on the named host, host group
-     * or cluster name. If hostName is null, jobs on all hosts of the batch system
-     * will be considered.
-     * #see \ref lsb_openjobinfo
-     * #see \ref lsb_closejobinfo
-     * #see \ref lsb_readjobinfo
-     * #see \ref lsb_readframejob
-     */
-    public static native jobInfoHead.ByReference lsb_openjobinfo_a(long jobId, String jobName, String userName, String queueName, String hostName, int options);
-
-    /**
-     * \page lsb_openjobinfo_a_ext lsb_openjobinfo_a_ext
-     * \brief  Returns the name and number of jobs and hosts in the master batch
-     * daemon with additional host group information.
-     * <p/>
-     * \ref lsb_openjobinfo_a_ext is run from \ref lsb_openjobinfo_a using the same
-     * parameters and provides the same information as \ref lsb_openjobinfo_a, but with
-     * additional host group information.
-     * <p/>
-     * \ref lsb_openjobinfo_a_ext passes information about jobs based on the value of
-     * jobId, jobName, userName, queueName, or hostName. Only one parameter can be
-     * chosen. The other parameters must be null or 0.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * jobInfoHeadExt.ByReference
-     * lsb_openjobinfo_a_ext (long jobId, String jobName,
-     * String userName, String queueName,
-     * String hostName, int options)</b>
-     *
-     * @param jobId   Passes information about jobs with the given job ID. If jobId
-     *                is 0, \ref lsb_openjobinfo_a_ext looks to another parameter to return information
-     *                about jobs. If information about a member of a job array is to be passed, use
-     *                the array form jobID[ i ] where jobID is the job array name, and i is the
-     *                index value.
-     * @param options <lsf/lsbatch.h> defines the flags shown in
-     *                def_lsb_openjobinfo_a constructed from bits. Use the bitwise OR to set more
-     *                than one flag.
-     *                <p/>
-     *                <b>Data Structures:</b>
-     *                \par
-     *                jobInfoHeadExt
-     *                <p/>
-     *                <b>Define Statements:</b>
-     *                \par
-     *                \ref defs_lsb_openjobinfo_a
-     * @return null \n
-     *         The function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line command:</b>
-     *         \par
-     *         bjobs
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         ${LSF_ENVDIR:-/etc}/lsf.conf \n
-     *         $LSB_CONFDIR/cluster_name/configdir/lsb.params
-     * @param jobName Passes information about jobs with the given job name. If
-     * jobName is null, \ref lsb_openjobinfo_a_ext looks to another parameter to return
-     * information about jobs.
-     * @param userName Passes information about jobs submitted by the named user
-     * or user group, or by all users if userName is all. If userName is null,
-     * \ref lsb_openjobinfo_a_ext assumes the user is invoking this call.
-     * @param queueName Passes information about jobs belonging to the named queue.
-     * If queueName is null, jobs in all queues of the batch system will be considered.
-     * @param hostName Passes information about jobs on the named host, host group
-     * or cluster name. If hostName is null, jobs on all hosts of the batch system
-     * will be considered.
-     * #see \ref lsb_openjobinfo
-     * #see \ref lsb_closejobinfo
-     * #see \ref lsb_readjobinfo
-     * #see \ref lsb_readframejob
-     */
-    public static native jobInfoHeadExt.ByReference lsb_openjobinfo_a_ext(long jobId, String jobName, String userName, String queueName, String hostName, int options);
-
-    /**
-     * \page lsb_openjobinfo_req lsb_openjobinfo_req
-     * \brief  Extensible API.
-     * <p/>
-     * Instead of submitting individual requests this API defines
-     * all job info requests as objects, and can easily be enhanced to include
-     * additinal requests.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * jobInfoHeadExt.ByReference lsb_openjobinfo_req (jobInfoReq.ByReference req)</b>
-     *
-     * @return null
-     *         \n Function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line command:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         \b Files:
-     *         \par
-     *         ${LSF_ENVDIR:-/etc}/lsf.conf
-     * @param req  job information request.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * jobInfoReq
-     * \n \ref jobInfoHeadExt
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * \ref defs_lsb_openjobinfo_a
-     * \n \ref defs_lsb_openjobinfo
-     * #see \ref               lsb_openjobinfo_a
-     * #see \ref               lsb_openjobinfo_a_ext
-     * #see \ref               lsb_closejobinfo
-     * #see \ref               lsb_readjobinfo
-     * #see \ref               lsb_readframejob
-     */
-    public static native jobInfoHeadExt.ByReference lsb_openjobinfo_req(jobInfoReq req);
-
-    public static native int lsb_queryjobinfo(int int1, NativeLongByReference long1, String string1);
-
-    public static native jobInfoEnt.ByReference lsb_fetchjobinfo(IntByReference int1, int int2, NativeLongByReference long1, String string1);
-
-    public static native jobInfoEnt.ByReference lsb_fetchjobinfo_ext(IntByReference int1, int int2, NativeLongByReference long1, String string1, jobInfoHeadExt jobInfoHeadExt);
-
-    /**
-     * \page lsb_readjobinfo lsb_readjobinfo
-     * \brief Returns the next job information record in mbatchd.
-     * <p/>
-     * \ref lsb_readjobinfo reads the number of records defined by the more parameter.
-     * The more parameter receives its value from either \ref lsb_openjobinfo or
-     * \ref lsb_openjobinfo_a. Each time \ref lsb_readjobinfo is called, it returns one
-     * record from mbatchd. Use \ref lsb_readjobinfo in a loop and use more to
-     * determine how many times to repeat the loop to retrieve job information records.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * \n \#include <time.h>
-     * \n \#include <lsf/lsf.h>
-     * <p/>
-     * jobInfoEnt.ByReference lsb_readjobinfo(IntByReference more)</b>
-     *
-     * @return null
-     *         \n Function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If there are no more records, then lsberrno is set to LSBE_EOF.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         $LSB_CONFDIR/cluster_name/configdir/lsb.queues
-     * @param more Number of job records in the master batch daemon.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * jobInfoEnt
-     * \n jobExternalMsgReply
-     * \n jRusage
-     * \n pidInfo
-     * \n reserveItem
-     * \n submit
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * \ref job_states
-     * \n \ref jobgroup_counterIndex
-     * \n \ref group_nodetypes
-     * #see \ref lsb_openjobinfo
-     * #see \ref lsb_openjobinfo_a
-     * #see \ref lsb_closejobinfo
-     * #see \ref lsb_hostinfo
-     * #see \ref lsb_pendreason
-     * #see \ref lsb_queueinfo
-     * #see \ref lsb_suspreason
-     */
-    public static native jobInfoEnt.ByReference lsb_readjobinfo(IntByReference more);
-
-    /**
-     * \page  lsb_submit lsb_submit
-     * Submits or restarts a job in the batch system.
-     * <p/>
-     * \ref lsb_submit submits or restarts a job in the batch system according to the
-     * jobSubReq specification.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * long lsb_submit (submit.ByReference jobSubReq,
-     * submitReply.ByReference jobSubReply)</b>
-     *
-     * @return long:-1 \n
-     *         Function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error.
-     *         If the environment variable BSUB_CHK_RESREQ is set, the value of lsberrno is
-     *         either LSBE_RESREQ_OK or LSBE_RESREQ_ERR, depending on the result of
-     *         resource requirement string checking. The badJobName field in the submitReply
-     *         structure contains the detailed error message.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         bsub
-     *         \n brestart
-     *         <p/>
-     *         \b Files:
-     *         \par
-     *         ${LSF_ENVDIR:-/etc}/lsf.conf
-     * @param jobSubReq
-     * Describes the requirements for job submission to the batch system.
-     * A job that does not meet these requirements is not submitted to the
-     * batch system and an error is returned.
-     * @param jobSubReply
-     * Describes the results of the job submission to the batch system.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * submit
-     * \n submitReply
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * \ref lsb_submit_options
-     * \n \ref lsb_submit_options2
-     * \n \ref lsb_submit_options3
-     * #see \ref lsb_modify
-     * #see \ref ls_info
-     * #see \ref lsb_queueinfo
-     */
-    public static native long lsb_submit(submit jobSubReq, submitReply jobSubReply);
-
-    /**
-     * \page lsb_readjobinfo_cond lsb_readjobinfo_cond
-     * \brief Returns the next job information record for condensed host groups
-     * in mbatchd.
-     * <p/>
-     * \ref lsb_readjobinfo_cond reads the number of records defined by the more
-     * parameter. The more parameter receives its value from either \ref lsb_openjobinfo
-     * or \ref lsb_openjobinfo_a. Each time \ref lsb_readjobinfo_cond is called, it
-     * returns one record from mbatchd. Use \ref lsb_readjobinfo_cond in a loop and use
-     * more to determine how many times to repeat the loop to retrieve job information
-     * records.
-     * <p/>
-     * \ref lsb_readjobinfo_cond differs from \ref lsb_readjobinfo in that if jInfoHExt
-     * is not null, \ref lsb_readjobinfo_cond substitutes hostGroup (if it is a condensed
-     * host group) for job execution hosts.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * \n \#include <time.h>
-     * \n \#include <lsf/lsf.h>
-     * <p/>
-     * jobInfoEnt.ByReference lsb_readjobinfo_cond(IntByReference more,
-     * jobInfoHeadExt.ByReference jInfoHExt);</b>
-     *
-     * @return null
-     *         \n Function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If there are no more records, then lsberrno is set to LSBE_EOF.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         $LSB_CONFDIR/cluster_name/configdir/lsb.queues
-     * @param more Number of job records in the master batch daemon.
-     * @param jInfoHExt Job information header info for the condensed host group.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * jobInfoEnt
-     * \n jobExternalMsgReply
-     * \n jRusage
-     * \n pidInfo
-     * \n reserveItem
-     * \n submit
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * \ref external_msg_processing
-     * \n \ref group_nodetypes
-     * #see \ref lsb_openjobinfo
-     * #see \ref lsb_openjobinfo_a
-     * #see \ref lsb_closejobinfo
-     * #see \ref lsb_hostinfo
-     * #see \ref lsb_pendreason
-     * #see \ref lsb_queueinfo
-     * #see \ref lsb_readjobinfo
-     * #see \ref lsb_suspreason
-     */
-    public static native jobInfoEnt.ByReference lsb_readjobinfo_cond(IntByReference more, jobInfoHeadExt jInfoHExt);
-
-    /**
-     * \page lsb_readframejob lsb_readframejob
-     * \brief Returns all frame jobs information which matchs the specified
-     * parameters and fills related information into the frame job information table.
-     * <p/>
-     * \ref lsb_readframejob gets all frame jobs information that matches the specified
-     * parameters and fills related information into the frame job information table.
-     * \ref lsb_readframejob is a wrapper of \ref lsb_openjobinfo, \ref lsb_readjobinfo, and
-     * \ref lsb_closejobinfo. Memory allocated in frameJobInfoTbl will be freed by
-     * user.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * int lsb_readframejob(long jobId, String frameName,
-     * String user, String queue, String host, int options,
-     * frameJobInfo.ByReference[] frameJobInfoTbl)</b>
-     *
-     * @param jobId   Get information about the frame jobs with the given job ID.
-     *                If jobID is 0, get information about frame jobs which satisfy the other
-     *                specifications. If a job in a job array is to be modified, use the array
-     *                form jobID[i] where jobID is the job array name, and i is the index value.
-     * @param options <lsf/lsbatch.h> defines the following flags \ref defs_lsb_openjobinfo_a
-     *                constructed from bits. Use the bitwise OR to set more than one flag.
-     * @return int:-1
-     *         \n Function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         none
-     * @param frameName Get information about frame jobs with the given frame name.
-     * @param user Get information about frame jobs submitted by the named user
-     * or user group, or by all users if user is all. If user is null, the user
-     * invoking this routine is assumed.
-     * @param queue Get information about frame jobs belonging to the named queue.
-     * If queue is null,jobs in all queues of the batch system will be considered.
-     * @param host Get information about frame jobs on the named host, host
-     * group or cluster name.If host is null, jobs on all hosts of the batch
-     * system will be considered.
-     * @param frameJobInfoTbl The result of all frame jobs information.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * \n frameJobInfo
-     * \n frameElementInfo
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * none
-     * #see \ref lsb_openjobinfo
-     * #see \ref lsb_readjobinfo
-     * #see \ref lsb_closejobinfo
-     */
-
-    public static native int lsb_readframejob(long jobId, String frameName, String user, String queue, String host, int options, Pointer frameJobInfoTbl);
-
-    /**
-     * \page lsb_closejobinfo lsb_closejobinfo
-     * \brief Closes job information connection with the master batch daemon.
-     * <p/>
-     * Use \ref lsb_closejobinfo to close the connection to the master batch daemon
-     * after opening a job information connection with \ref lsb_openjobinfo and reading
-     * job records with \ref lsb_readjobinfo.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * void lsb_closejobinfo()</b>
-     *
-     * param void \n
-     *             <p/>
-     *             <b>Data Structures:</b>
-     *             \par
-     *             none
-     *             <p/>
-     *             <b>Define Statements:</b>
-     *             \par
-     *             none
-     * return void
-     *         \n There's no returns value.
-     *         <p/>
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         On failure, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         ${LSF_ENVDIR:-/etc}/lsf.conf
-     * #see \ref lsb_openjobinfo
-     * #see \ref lsb_openjobinfo_a
-     * #see \ref lsb_readjobinfo
-     */
-
-    public static native void lsb_closejobinfo();
-
-    /**
-     * \page  lsb_hostcontrol lsb_hostcontrol
-     * Opens or closes a host, or restarts or shuts down its slave batch daemon.
-     * <p/>
-     * \ref lsb_hostcontrol opens or closes a host, or restarts or shuts down its
-     * slave batch daemon. Any program using this API must be setuid to root if
-     * LSF_AUTH is not defined in the lsf.conf file.
-     * <p/>
-     * To restart the master batch daemon, mbatchd, in order to use updated
-     * batch LSF configuration files, use \ref lsb_reconfig.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * int lsb_hostcontrol (hostCtrlReq.ByReference req)</b>
-     *
-     * @return int:-1 \n
-     *         Function failed.
-     *         <p/>
-     *         <b>Errors:</b>
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         ${LSF_ENVDIR:-/etc}/lsf.conf
-     * @param req The host control request.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * hostCtrlReq
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * \ref host_ctrl_option
-     * #see \ref lsb_reconfig
-     */
-    public static native int lsb_hostcontrol(hostCtrlReq req);
-
-    public static native int lsb_hghostcontrol(hgCtrlReq hostCtrlReq1, hgCtrlReply reply);
-
-    /**
-     * \page lsb_queueinfo lsb_queueinfo
-     * \brief Returns information about batch queues.
-     * <p/>
-     * \ref lsb_queueinfo gets information about batch queues. See lsb.queues for more
-     * information about queue parameters.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * queueInfoEnt.ByReference lsb_queueinfo(String[] queues,
-     * IntByReference numQueues, String hosts, String users,
-     * int options)</b>
-     *
-     * @param options Reserved for future use; supply 0.
-     *                <p/>
-     *                <b>Data Structures:</b>
-     *                \par
-     *                queueInfoEnt
-     *                \n shareAcctInfoEnt
-     *                \n apsFactorInfo
-     *                \n apsFactorMap
-     *                \n apsLongNameMap
-     *                <p/>
-     *                <b>Define Statements:</b>
-     *                \par
-     *                \ref queue_status
-     *                \n \ref queue_attribute
-     * @return null
-     *         \n Function Failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error.
-     *         If lsberrno is LSBE_BAD_QUEUE, (*queues)[*numQueues] is not a queue known
-     *         to the LSF system. Otherwise, if.ByReference numQueues is less than its original value,
-     *         * numQueues is the actual number of queues found.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         bqueues
-     *         <p/>
-     *         \b Files:
-     *         \par
-     *         $LSB_CONFDIR/cluster_name/configdir/lsb.queues
-     * @param queues An array of names of queues of interest.
-     * @param numQueues The number of queue names. To get information on all queues,
-     * set.ByReference numQueues to 0;* numQueues will be updated to the actual number of
-     * queues when this call returns.If.ByReference numQueues is 1 and queues is null,
-     * information on the system default queue is returned.
-     * @param hosts The host or cluster names. If hosts is not null, then only
-     * the queues that are enabled for the hosts are of interest.
-     * @param user The name of user. If user is not null, then only the queues
-     * that are enabled for the user are of interest.
-     * #see \ref lsb_hostinfo
-     * #see \ref lsb_userinfo
-     * #see \ref lsb_usergrpinfo
-     */
-    public static native queueInfoEnt.ByReference lsb_queueinfo(Pointer queues, IntByReference numQueues, String hosts, String user, int options);
-
-    /**
-     * \page lsb_reconfig lsb_reconfig
-     * \brief Dynamically reconfigures an LSF batch system.
-     * <p/>
-     * \ref lsb_reconfig dynamically reconfigures an LSF batch system to pick up new
-     * configuration parameters and changes to the job queue setup since system
-     * startup or the last reconfiguration (see lsb.queues).
-     * <p/>
-     * To restart a slave batch daemon, use \ref lsb_hostcontrol. This call is
-     * successfully invoked only by root or by the LSF administrator.
-     * <p/>
-     * Any program using this API must be setuid to root if LSF_AUTH is not
-     * defined in the lsf.conf file.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * int lsb_reconfig (mbdCtrlReq.ByReference req)</b>
-     *
-     * @return int:-1 \n
-     *         The function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         On failure, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         badmin reconfig
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         ${LSF_ENVDIR:-/etc}/lsf.conf
-     * @param req mbatchd control request.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * mbdCtrlReq
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * \ref mbd_operation
-     * #see \ref lsb_openjobinfo
-     */
-    public static native int lsb_reconfig(mbdCtrlReq req);
-
-    /**
-     * \page lsb_signaljob lsb_signaljob
-     * \brief Sends a signal to a job.
-     * <p/>
-     * Use \ref lsb_signaljob when migrating a job from one host to another. Use
-     * \ref lsb_signaljob to stop or kill a job on a host before using \ref lsb_mig to
-     * migrate the job. Next, use \ref lsb_signaljob to continue the stopped job at
-     * the specified host.
-     * <p/>
-     * Generally, use \ref lsb_signaljob to apply any UNIX signal to a job or process.
-     * <p/>
-     * Any program using this API must be setuid to root if LSF_AUTH is not defined
-     * in the lsf.conf file.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * int lsb_signaljob (long jobId, int sigValue)</b>
-     *
-     * @param jobId    The job to be signaled. If a job in a job array is to be
-     *                 signaled, use the array form jobID[ i ] where jobID is the job array name,
-     *                 and i is the index value.
-     * @param sigValue SIGSTOP, SIGCONT, SIGKILL or some other UNIX signal.
-     *                 <p/>
-     *                 <b>Data Structures:</b>
-     *                 \par
-     *                 none
-     *                 <p/>
-     *                 <b>Define Statements:</b>
-     *                 \par
-     *                 none
-     * @return int:-1 \n
-     *         The function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         bkill \n
-     *         bstop \n
-     *         bresume
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         ${LSF_ENVDIR:-/etc}/lsf.conf
-     * #see \ref lsb_chkpntjob
-     * #see \ref lsb_forcekilljob
-     * #see \ref lsb_mig
-     */
-
-    public static native int lsb_signaljob(long jobId, int sigValue);
-
-    /**
-     * \page lsb_killbulkjobs lsb_killbulkjobs
-     * \brief Kills bulk jobs as soon as possible.
-     * <p/>
-     * Use \ref lsb_killbulkjobs to kill bulk jobs on a local host immediately, or
-     * to kill other jobs as soon as possible. If mbatchd rejects the request, it
-     * issues null as the reservation ID.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * int lsb_killbulkjobs(signalBulkJobs.ByReference s)</b>
-     *
-     * @return int:-1 \n
-     *         The bulk jobs were not killed.
-     *         <p/>
-     *         \b Error:
-     *         \par
-     *         On failure, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line command:</b>
-     *         \par
-     *         bkill -b
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         none
-     * @param s The signal to a group of jobs.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * signalBulkJobs
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * none
-     * see none
-     */
-
-    public static native int lsb_killbulkjobs(signalBulkJobs s);
-
-    public static native int lsb_msgjob(long long1, String s);
-
-    /**
-     * \page lsb_chkpntjob lsb_chkpntjob
-     * \brief Checkpoints a job.
-     * <p/>
-     * Checkpoints a job.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * int lsb_chkpntjob(long jobId, int period, int options)</b>
-     *
-     * @param jobId   The job to be checkpointed.
-     * @param period  The checkpoint period in seconds. The value 0
-     *                disables periodic checkpointing.
-     * @param options The bitwise inclusive OR of some of the following:
-     *                \n LSB_CHKPNT_KILL
-     *                Checkpoint and kill the job as an atomic action.
-     *                \n LSB_CHKPNT_FORCE
-     *                Checkpoint the job even if non-checkpointable conditions exist.
-     *                <p/>
-     *                <b>Data Structures:</b>
-     *                \par
-     *                none
-     *                <p/>
-     *                <b>Define Statements:</b>
-     *                \par
-     *                \ref chkpnt_job_option
-     * @return int:-1 \n
-     *         The function failed.
-     *         <p/>
-     *         \note Any program using this API must be setuid to root if LSF_AUTH
-     *         is not defined in the lsf.conf file.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         On failure, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         bchkpnt
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         ${LSF_ENVDIR:-/etc}/lsf.conf
-     * see none
-     */
-    public static native int lsb_chkpntjob(long jobId, NativeLong period, int options);
-
-    /**
-     * \page lsb_deletejob lsb_deletejob
-     * \brief Kills a job in a queue
-     * <p/>
-     * Use \ref lsb_deletejob to send a signal to kill a running, user-suspended,
-     * or system-suspended job. The job can be requeued or deleted from the batch
-     * system.If the job is requeued, it retains its submit time but it is dispatched
-     * according to its requeue time. When the job is requeued, it is assigned the
-     * PEND status and re-run.If the job is deleted from the batch system, it is
-     * no longer available to be requeued.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * int lsb_deletejob (long jobId, int times, int options)</b>
-     *
-     * @param jobId   The job to be killed. If an element of a job array is to be
-     *                killed, use the array form jobID[i] where jobID is the job array name,
-     *                and i is the index value.
-     * @param times   Original job submit time.
-     * @param options If the preprocessor macro LSB_KILL_REQUEUE in lsbatch.h is
-     *                compared with options and found true, then requeue the job using the same job ID.
-     *                If the preprocessor macro LSB_KILL_REQUEUE in lsbatch.h is compared with
-     *                options and found false, then the job is deleted from the batch system.
-     *                <p/>
-     *                <b>Data Structures:</b>
-     *                \par
-     *                none
-     *                <p/>
-     *                <b>Define Statements:</b>
-     *                \par
-     *                \ref kill_requeue
-     * @return int:-1 \n
-     *         The function failed.
-     *         <p/>
-     *         \note Any program using this API must be setuid to root if LSF_AUTH is not defined in the
-     *         \n lsf.conf file.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         On failure, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         bkill
-     *         \n brequeue -J
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         ${LSF_ENVDIR:-/etc}/lsf.conf
-     * #see \ref lsb_signaljob
-     * #see \ref lsb_chkpntjob
-     */
-    public static native int lsb_deletejob(long jobId, int times, int options);
-
-    /**
-     * \page lsb_forcekilljob lsb_forcekilljob
-     * \brief This function is used to send special force kill signal.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * int lsb_forcekilljob(long jobId)</b>
-     *
-     * @param jobId which job is to be killed.
-     *              <p/>
-     *              <b>Data Structures:</b>
-     *              \par
-     *              none
-     *              <p/>
-     *              <b>Define Statements:</b>
-     *              \par
-     *              none
-     * @return int:-1
-     *         \n Function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         On failure, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         none
-     * #see \ref lsb_signaljob
-     */
-    public static native int lsb_forcekilljob(long jobId);
-
-    /**
-     * \page lsb_submitframe lsb_submitframe
-     * \brief Submits a frame job to the batch system.
-     * <p/>
-     * \ref lsb_submitframe submits a frame job to the batch system according to the
-     * jobSubReq specification and frameExp.
-     * <p/>
-     * Any program using this API must be setuid to root if LSF_AUTH is not defined
-     * in the lsf.conf file.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * int lsb_submitframe (submit.ByReference jobSubReq, String frameExp,
-     * submitReply.ByReference jobSubReply)</b>
-     *
-     * @return int:-1 \n
-     *         Function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error and jobSubReply gives
-     *         additional information about the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         ${LSF_ENVDIR:-/etc}/lsf.conf
-     * @param jobSubReq Describes the requirements for job submission to the
-     * batch system. A job that does not meet these requirements is not submitted
-     * to the batch system and an error is returned. \n
-     * See \ref lsb_submit for descriptions of the submit structure fields.
-     * @param frameExp The syntax of frameExp is: \n
-     * <b>frame_name[indexlist]</b> \n
-     * frame_name is any name consisting of alphanumerics, periods, forward slashes,
-     * dashes or underscores. indexlist is a list of one or more frame indexes,
-     * separated by commas. These indexes can each be either a single integer or
-     * a range, specified in the following format: \n
-     * <b>start-end[xstep[:chunk]]</b> \n
-     * start, end, step, and chunk are integers, but chunk must be positive.
-     * If step and
-     * chunk are ommitted, the default value is 1.\n
-     * An example of a valid expression for frameExp is:\n
-     * <b>Frame_job_1[5,10-15,20-30x2:3]</b>
-     * @param jobSubReply Describes the results of the job submission to the
-     * batch system. \n
-     * See \ref lsb_submit for descriptions of the submitReply structure
-     * fields.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * submit
-     * \n submitReply
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * \ref lsb_submit_options
-     * \n \ref lsb_submit_options2
-     * \n \ref lsb_submit_options3
-     * see none
-     */
-    public static native int lsb_submitframe(submit jobSubReq, String frameExp, submitReply jobSubReply);
-
-    /**
-     * \page lsb_requeuejob lsb_requeuejob
-     * \brief Requeues job arrays, jobs in job arrays, and individual jobs.
-     * <p/>
-     * Use \ref lsb_requeuejob to requeue job arrays, jobs in job arrays, and individual
-     * jobs that are running, pending, done, or exited. In a job array, you can
-     * requeue all the jobs or requeue individual jobs of the array.
-     * <p/>
-     * \ref lsb_requeuejob requeues jobs as if the jobs were in an array. A job not in an
-     * array is considered to be a job array composed of one job.
-     * <p/>
-     * Jobs in a job array can be requeued independently of each other regardless of
-     * any job's status (running, pending, exited, done). A requeued job is requeued
-     * to the same queue it was originally submitted from or switched to. The job
-     * submission time does not change so a requeued job is placed at the top of the
-     * queue. Use \ref lsb_movejob to place a job at the bottom or any other position
-     * in a queue.
-     * <p/>
-     * If a clean period is reached before \ref lsb_requeuejob is called, the cleaned
-     * jobs cannot be requeued. Set the variable CLEAN_PERIOD in your lsb.params file
-     * to determine the amount of time that job records are kept in MBD core memory
-     * after jobs have finished or terminated.
-     * <p/>
-     * To requeue a job assign values to the data members of the jobrequeue data
-     * structure, process command line options in case the user has specified a
-     * different job, and call \ref lsb_requeuejob to requeue the job array.
-     * <p/>
-     * Assign values to the jobID, status, and options data members of the jobrequeue
-     * data structure. Assign the job identification number to jobID. Assign
-     * JOB_STAT_PEND or JOB_STAT_PSUSP to status. Assign REQUEUE_DONE, REQUEUE_EXIT,
-     * and or REQUEUE_RUN to requeue running jobs.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * int lsb_requeuejob(jobrequeue.ByReference  reqPtr)</b>
-     *
-     * @return int:-1 \n
-     *         The function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         On failure, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         brequeue -d
-     *         \n brequeue -e
-     *         \n brequeue -a
-     *         \n brequeue -r
-     *         \n brequeue -H
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         $LSB_CONFDIR/cluster_name/configdir/lsb.params
-     *         \n $LSB_SHAREDIR
-     * @param reqPtr This structure contains the information required to requeue a job.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * jobrequeue
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * \ref requeuejob_options
-     * #see \ref lsb_movejob
-     * #see \ref lsb_pendreason
-     */
-    public static native int lsb_requeuejob(jobrequeue reqPtr);
-
-    /**
-     * \page lsb_sysmsg lsb_sysmsg
-     * \brief Returns a pointer to static data.
-     * <p/>
-     * \ref lsb_sysmsg returns a pointer to static data which stores the batch error
-     * message corresponding to lsberrno. The global variable lsberrno maintained
-     * by LSBLIB holds the error number from the most recent LSBLIB call that caused
-     * an error. If lsberrno == LSBE_SYS_CALL, then the system error message defined
-     * by errno is also returned. If lsberrno == LSBE_LSLIB, then the error message
-     * returned by \ref ls_sysmsg is returned.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * String lsb_sysmsg ()</b>
-     *
-     * param void \n
-     *             <p/>
-     *             <b>Data Structures:</b>
-     *             \par
-     *             none
-     *             <p/>
-     *             <b>Define Statements:</b>
-     *             \par
-     *             none
-     * @return null
-     *         \n Function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         none
-     * #see \ref ls_perror
-     * #see \ref ls_sysmsg
-     */
-    public static native String lsb_sysmsg();
-
-    /**
-     * \page lsb_perror lsb_perror
-     * \brief Prints a batch LSF error message on stderr.
-     * <p/>
-     * \ref lsb_perror prints a batch LSF error message on stderr. The usrMsg is
-     * printed out first, followed by a ":" and the batch error message corresponding
-     * to lsberrno.
-     * <p/>
-     * \ref lsb_perror - Print LSBATCH error message on stderr. In addition
-     * to the error message defined by lsberrno, user supplied message usrMsg1
-     * is printed out first and a ':' is added to separate.ByReference  usrMsg1 and LSBATCH
-     * error message.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * void lsb_perror (String usrMsg)</b>
-     *
-     * return void \n
-     *         Prints out the user supplied error message.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line command:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         \b Files:
-     *         \par
-     *         none
-     * @param usrMsg A user supplied error message.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * none
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * none
-     * see none
-     */
-    public static native void lsb_perror(String usrMsg);
-
-    public static native void lsb_errorByCmd(String string1, String string2, int int1);
-
-    public static native String lsb_sperror(String string1);
-
-    /**
-     * \page lsb_peekjob lsb_peekjob
-     * \brief Returns the base name of the file related to the job ID
-     * <p/>
-     * \ref lsb_peekjob retrieves the name of a job file.
-     * <p/>
-     * Only the submitter can peek at job output.
-     * <p/>
-     * The storage for the file name will be reused by the next call.
-     * <p/>
-     * Any program using this API must be setuid to root if LSF_AUTH
-     * is not defined in the lsf.conf file.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * String  lsb_peekjob (long jobId)</b>
-     *
-     * @param jobId The job ID that the LSF system assigned to the job. If a job
-     *              in a job array is to be returned, use the array form jobID[i] where jobID
-     *              is the job array name, and i is the index value.
-     *              <p/>
-     *              <b>Data Structures:</b>
-     *              \par
-     *              none
-     *              <p/>
-     *              <b>Define Statements:</b>
-     *              \par
-     *              none
-     * @return null
-     *         \n Function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line command:</b>
-     *         \par
-     *         bpeek
-     *         <p/>
-     *         \b Files:
-     *         \par
-     *         ${LSF_ENVDIR:-/etc}/lsf.conf
-     * see none
-     */
-    public static native String lsb_peekjob(long jobId);
-
-    /**
-     * \page lsb_mig lsb_mig
-     * \brief Migrates a job from one host to another.
-     * <p/>
-     * \ref lsb_mig migrates a job from one host to another. Any program using
-     * this API must be setuid to root if LSF_AUTH is not defined
-     * in the lsf.conf file.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * int lsb_mig(submig.ByReference mig, IntByReference badHostIdx)</b>
-     *
-     * @return int:-1 \n
-     *         Function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error and badHostIdx indicates
-     *         which askedHost is not acceptable.
-     *         <p/>
-     *         <b>Equivalent line command:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         \b Files:
-     *         \par
-     *         ${LSF_ENVDIR:-/etc}/lsf.conf
-     * @param mig The job to be migrated.
-     * @param badHostIdx If the call fails, (**askedHosts)[*badHostIdx] is not a
-     * host known to the LSF system.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * submig
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * none
-     * #see \ref lsb_submit
-     */
-    public static native int lsb_mig(submig mig, IntByReference badHostIdx);
-
-    public static native clusterInfoEnt.ByReference lsb_clusterinfo(IntByReference int1, Pointer stringArray1, int int2);
-
-    public static native clusterInfoEntEx.ByReference lsb_clusterinfoEx(IntByReference int1, Pointer stringArray1, int int2);
-
-    /**
-     * \page lsb_hostinfo lsb_hostinfo
-     * Returns information about job server hosts.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * hostInfoEnt.ByReference lsb_hostinfo(String[] hosts, IntByReference numHosts)</b>
-     *
-     * @return hostInfoEnt.ByReference :null
-     *         \n Function failed.
-     *         <p/>
-     *         <b>Errors:</b>
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error. If lsberrno is
-     *         LSBE_BAD_HOST, (*hosts)[*numHosts] is not a host known to the batch system.
-     *         Otherwise, if.ByReference numHosts is less than its original value,* numHosts is the actual
-     *         number of hosts found.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         bhosts
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         $LSB_CONFDIR/cluster_name/configdir/lsb.hosts
-     * @param hosts
-     * An array of host or cluster names.
-     * @param numHosts
-     * The number of host names.
-     * To get information on all hosts, set.ByReference numHosts to 0;* numHosts will be set to the
-     * actual number of hostInfoEnt structures when this call returns.
-     * If.ByReference numHosts is 1 and hosts is null, information on the local host is returned.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * hostInfoEnt
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * \ref host_status
-     * \n \ref host_load_BusyReason
-     * \n \ref host_attributes
-     * #see \ref lsb_hostinfo_ex
-     * #see \ref ls_info
-     * #see \ref ls_loadofhosts
-     * #see \ref lsb_queueinfo
-     * #see \ref lsb_userinfo
-     */
-    public static native hostInfoEnt.ByReference lsb_hostinfo(Pointer hosts, IntByReference numHosts);
-
-    /**
-     * \page lsb_hostinfo_ex lsb_hostinfo_ex
-     * Returns informaton about job server hosts that satisfy specified resource
-     * requirements. \ref lsb_hostinfo_ex returns information about job server hosts
-     * that satisfy the specified resource requirements.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * hostInfoEnt.ByReference lsb_hostinfo_ex(String[] hosts,
-     * IntByReference numHosts, String resReq, int options)</b> @param hosts An array of host or cluster names.
-     *
-     * @param options Options is reserved for the future use.
-     *                <p/>
-     *                <b>Data Structures:</b>
-     *                \par
-     *                hostInfoEnt
-     *                <p/>
-     *                <b>Define Statements:</b>
-     *                \par
-     *                \ref host_status
-     *                \n \ref host_load_BusyReason
-     *                \n \ref host_attributes
-     * @return null
-     *         \n Function failed.
-     *         <p/>
-     *         <b>Errors:</b>
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error. If lsberrno is
-     *         LSBE_BAD_HOST, (*hosts)[*numHosts] is not a host known to the batch system.
-     *         Otherwise, if.ByReference numHosts is less than its original value,* numHosts is the actual
-     *         number of hosts found.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         $LSB_CONFDIR/cluster_name/configdir/lsb.hosts
-     * @param numHosts The number of host names.
-     * To get information on all hosts, set.ByReference numHosts to 0;* numHosts will be set
-     * to the actual number of hostInfoEnt structures when this call returns.
-     * If.ByReference numHosts is 1 and hosts is null, information on the local host is returned.
-     * @param resReq Resource requirements.
-     * If this option is specified, then only host information for those hosts
-     * that satisfy the resource requirements is returned. Returned hosts are
-     * sorted according to the load on the resource() given in resReq, or by
-     * default according to CPU and paging load.
-     * #see \ref ls_info
-     * #see \ref ls_loadofhosts
-     * #see \ref lsb_hostinfo
-     * #see \ref lsb_queueinfo
-     * #see \ref lsb_userinfo
-     * @param string1 string1
-     */
-
-    public static native hostInfoEnt.ByReference lsb_hostinfo_ex(Pointer resReq, IntByReference numHosts, String string1, int options);
-
-    /**
-     * \page lsb_hostinfo_cond lsb_hostinfo_cond
-     * Returns condensed information about job server hosts.
-     * <p/>
-     * \ref lsb_hostinfo_cond returns condensed information about job server hosts.
-     * While \ref lsb_hostinfo returns specific information about individual hosts,
-     * \ref lsb_hostinfo_cond returns the number of jobs in each state within the
-     * entire host group. The condHostInfoEnt structure contains counters that
-     * indicate how many hosts are in the ok, busy, closed, full, unreach, and
-     * unavail states and an array of hostInfoEnt structures that indicate the
-     * status of each host in the host
-     * group.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * condHostInfoEnt.ByReference  lsb_hostinfo_cond
-     * (String[] hosts, IntByReference numHosts,
-     * String resReq, int options)</b>
-     *
-     * @param options Any options called with the function.
-     *                <p/>
-     *                <b>Data Structures</b>
-     *                \par
-     *                condHostInfoEnt
-     *                \n hostInfoEnt
-     *                <p/>
-     *                <b>Define Statements:</b>
-     *                \par
-     *                none
-     * @return null
-     *         \n Function failed.
-     *         <p/>
-     *         <b Errors:</b>
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         none
-     * @param hosts An array of host names belonging to the host group.
-     * @param numHosts The number of host names in the host group.
-     * To get information on all hosts in the host group, set.ByReference numHosts to 0;
-     * * numHosts will be set to the actual number of hostInfoEnt structures in
-     * the host group when this call returns.
-     * @param resReq Any resource requirements called with the function.
-     * #see \ref lsb_hostinfo
-     */
-    public static native condHostInfoEnt.ByReference lsb_hostinfo_cond(Pointer hosts, IntByReference numHosts, String resReq, int options);
-
-    /**
-     * \page lsb_movejob lsb_movejob
-     * \brief Changes the position of a pending job in a queue.
-     * <p/>
-     * Use \ref lsb_movejob to move a pending job to a new position that you specify
-     * in a queue. Position the job in a queue by first specifying the job ID.
-     * Next, count, beginning at 1, from either the top or the bottom of the queue,
-     * to the position you want to place the job.
-     * <p/>
-     * To position a job at the top of a queue, choose the top of a queue parameter
-     * and a postion of 1.To position a job at the bottom of a queue, choose the
-     * bottom of the queue parameter and a position of 1.
-     * <p/>
-     * By default, LSF dispatches
-     * jobs in a queue in order of their arrival (such as first-come-first-served),
-     * subject to the availability of suitable server hosts.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * int lsb_movejob (long jobId, IntByReference position, int opCode)</b>
-     *
-     * @param jobId  The job ID that the LSF system assigns to the job. If a job
-     *               in a job array is to be moved, use the array form jobID[ i ] where jobID is
-     *               the job array name, and i is the index value.
-     * @param opCode The top or bottom position of a queue.
-     *               \n \b TO_TOP
-     *               \n The top position of a queue.
-     *               \n \b TO_BOTTOM
-     *               \n The bottom position of a queue.
-     *               \n If an opCode is not specified for the top or bottom position, the
-     *               function fails.
-     *               <p/>
-     *               <b>Data Structures:</b>
-     *               \par
-     *               none
-     *               <p/>
-     *               <b>Define Statements:</b>
-     *               \par
-     *               \ref movejob_options
-     * @return int:-1 \n
-     *         The function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line command:</b>
-     *         \par
-     *         btop
-     *         \n bbot
-     *         \n bjobs -q
-     *         <p/>
-     *         \b Files:
-     *         \par
-     *         ${LSF_ENVDIR:-/etc}/lsf.conf
-     * @param position The new position of the job in a queue. position must be
-     * a value of 1 or more.
-     * #see \ref lsb_pendreason
-     */
-
-    public static native int lsb_movejob(long jobId, IntByReference opCode, int position);
-
-    /**
-     * \page lsb_switchjob lsb_switchjob
-     * \brief Switches an unfinished job to another queue.
-     * <p/>
-     * \ref lsb_switchjob switches an unfinished job to another queue. Effectively,
-     * the job is removed from its current queue and re-queued in the new queue.
-     * <p/>
-     * The switch operation can be performed only when the job is acceptable to
-     * the new queue. If the switch operation is unsuccessful, the job will stay
-     * where it is.A user can only switch his/her own unfinished jobs, but root
-     * and the LSF administrator can switch any unfinished job.
-     * <p/>
-     * Any program using this API must be setuid to root if LSF_AUTH is not defined
-     * in the lsf.conf file.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * int lsb_switchjob (long jobId, String queue)</b>
-     *
-     * @param jobId The job to be switched. If an element of a job array is to
-     *              be switched, use the array form jobID[i] where jobID is the job array name,
-     *              and i is the index value.
-     * @return int:-1 \n
-     *         Function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         bswitch
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         ${LSF_ENVDIR:-/etc}/lsf.conf
-     * @param queue The new queue for the job.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * none
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * none
-     * see none
-     */
-    public static native int lsb_switchjob(long jobId, String queue);
-
-    /**
-     * \page lsb_queuecontrol lsb_queuecontrol
-     * \brief Changes the status of a queue.
-     * <p/>
-     * \ref lsb_queuecontrol changes the status of a queue.
-     * <p/>
-     * Any program using this API must be setuid to root if LSF_AUTH is not defined
-     * in the lsf.conf file.
-     * <p/>
-     * If a queue is inactivated by its dispatch window (see lsb.queues), then it
-     * cannot be re-activated by this call.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * int lsb_queuecontrol (queueCtrlReq.ByReference req)</b>
-     *
-     * @return int:-1 \n
-     *         Function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         \b Files:
-     *         \par
-     *         ${LSF_ENVDIR:-/etc}/lsf.conf
-     * @param req queue control request.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * queueCtrlReq
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * \ref queue_ctrl_option
-     * #see \ref lsb_queueinfo
-     */
-    public static native int lsb_queuecontrol(queueCtrlReq req);
-
-    /**
-     * \page lsb_userinfo lsb_userinfo
-     * \brief Returns the maximum number of job slots that a user can use
-     * simultaneously on any host and in the whole local LSF cluster.
-     * <p/>
-     * \ref lsb_userinfo gets the maximum number of job slots that a user can use
-     * simultaneously on any host and in the whole local LSF cluster, as well as
-     * the current number of job slots used by running and suspended jobs or
-     * reserved for pending jobs. The maximum numbers of job slots are defined
-     * in the LSF configuration file lsb.users (see lsb.users). The reserved
-     * user name default, defined in the lsb.users configuration file, matches
-     * users not listed in the lsb.users file who have no jobs started in the
-     * system.
-     * <p/>
-     * The returned array will be overwritten by the next call.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * userInfoEnt.ByReference lsb_userinfo(String[] users, IntByReference numUsers)</b>
-     *
-     * @return null \n
-     *         Function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error. If lsberrno is
-     *         LSBE_BAD_USER, (*users)[*numUsers] is not a user known to the LSF system.
-     *         Otherwise, if.ByReference numUsers is less than its original value,* numUsers is the actual
-     *         number of users found.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         busers
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         $LSB_CONFDIR/cluster_name/configdir/lsb.users
-     * @param users An array of user names.
-     * @param numUsers The number of user names.
-     * To get information about all users, set.ByReference numUsers = 0;* numUsers will
-     * be updated to the actual number of users when this call returns. To get
-     * information on the invoker, set users = null,* numUsers = 1.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * userInfoEnt
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * none
-     * #see \ref lsb_hostinfo
-     * #see \ref lsb_queueinfo
-     */
-    public static native userInfoEnt.ByReference lsb_userinfo(Pointer users, IntByReference numUsers);
-
-    /**
-     * \page lsb_hostgrpinfo lsb_hostgrpinfo
-     * Returns LSF host group membership.
-     * <p/>
-     * \ref lsb_hostgrpinfo gets LSF host group membership.
-     * <p/>
-     * LSF host group is defined in the configuration file lsb.hosts.
-     * <p/>
-     * The storage for the array of groupInfoEnt structures will be reused by
-     * the next call.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * groupInfoEnt.ByReference lsb_hostgrpinfo (String[] groups,IntByReference numGroups,
-     * int options)</b>
-     *
-     * @param options The bitwise inclusive OR of some of the following flags:
-     *                \n GRP_RECURSIVE
-     *                \n Expand the group membership recursively. That is, if a member of a
-     *                group is itself a group, give the names of its members recursively, rather
-     *                than its name, which is the default.
-     *                \n GRP_ALL
-     *                \n Get membership of all groups.
-     *                <p/>
-     *                <b>Data Structures:</b>
-     *                \par
-     *                groupInfoEnt
-     *                \n userShares
-     *                <p/>
-     *                <b>Define Statements:</b>
-     *                \par
-     *                \ref group_membership_option
-     *                \n \ref group_define
-     * @return null \n
-     *         Function failed.
-     *         <p/>
-     *         <b>Errors:</b>
-     *         \par
-     *         On failure, returns null and sets lsberrno to indicate the error. If there
-     *         are invalid groups specified, the function returns the groups up to the
-     *         invalid ones and then sets lsberrno to LSBE_BAD_GROUP, which means that
-     *         the specified (*groups)[*numGroups] is not a group known to the LSF system.
-     *         If the first group specified is invalid, the function returns null.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         $LSB_CONFDIR/cluster_name/configdir/lsb.hosts \n
-     *         $LSB_CONFDIR/cluster_name/configdir/lsb.users
-     * @param groups An array of group names.
-     * @param numGroups The number of group names.* numGroups will be updated
-     * to the actual number of groups when this call returns.
-     * #see \ref lsb_usergrpinfo
-     */
-    public static native groupInfoEnt.ByReference lsb_hostgrpinfo(Pointer groups, IntByReference numGroups, int options);
-
-    /**
-     * \page lsb_usergrpinfo lsb_usergrpinfo
-     * \brief Returns LSF user group membership.
-     * <p/>
-     * \ref lsb_usergrpinfo gets LSF user group membership.
-     * LSF user group is defined in the configuration file lsb.users.
-     * The storage for the array of groupInfoEnt structures will be reused by
-     * the next call.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * groupInfoEnt.ByReference lsb_usergrpinfo (String[] groups,
-     * IntByReference numGroups, int options)</b>
-     *
-     * @param options The bitwise inclusive OR of some of flags in \ref group_membership_option
-     *                <p/>
-     *                <b>Data Structures:</b>
-     *                \par
-     *                groupInfoEnt
-     *                <p/>
-     *                <b>Define Statements:</b>
-     *                \par
-     *                \ref group_membership_option
-     *                \n \ref group_define
-     * @return null \n
-     *         Function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         On failure, returns null and sets lsberrno to indicate the error. If there
-     *         are invalid groups specified, the function returns the groups up to the
-     *         invalid ones. It then set lsberrno to LSBE_BAD_GROUP, that is the specified
-     *         (*groups)[*numGroups] is not a group known to the LSF system. If the first
-     *         group is invalid, the function returns null.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         $LSB_CONFDIR/cluster_name/configdir/lsb.hosts
-     *         \n $LSB_CONFDIR/cluster_name/configdir/lsb.users
-     * @param groups An array of group names.
-     * @param numGroups The number of group names.* numGroups will be updated
-     * to the actual number of groups when this call returns.
-     * #see \ref lsb_hostgrpinfo
-     */
-    public static native groupInfoEnt.ByReference lsb_usergrpinfo(Pointer groups, IntByReference numGroups, int options);
-
-    /**
-     * \page lsb_parameterinfo lsb_parameterinfo
-     * \brief Returns information about the LSF cluster.
-     * <p/>
-     * \ref lsb_parameterinfo gets information about the LSF cluster.
-     * <p/>
-     * The static storage for the parameterInfo structure is reused on the next call.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * parameterInfo.ByReference lsb_parameterinfo(String[] names,
-     * IntByReference numUsers, int options)</b>
-     *
-     * @param options Reserved but not used; supply 0.
-     *                <p/>
-     *                <b>Data Structures:</b>
-     *                \par
-     *                \ref parameterInfo
-     *                <p/>
-     *                <b>Define Statements:</b>
-     *                \par
-     *                none
-     * @return null \n
-     *         Function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line command:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         \b Files:
-     *         \par
-     *         $LSB_CONFDIR/cluster_name/configdir/lsb.params
-     * @param names Reserved but not used; supply null.
-     * @param numUsers Reserved but not used; supply null.
-     * see none
-     */
-    public static native parameterInfo.ByReference lsb_parameterinfo(Pointer names, IntByReference numUsers, int options);
-
-    /**
-     * \page lsb_modify lsb_modify
-     * \brief  Modifies a submitted job's parameters.
-     * <p/>
-     * lsb_modify() allows for the modification of a submitted job's parameters.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * long lsb_modify (submit.ByReference jobsubReq,
-     * submitReply.ByReference jobSubReply,
-     * long jobId)</b>
-     *
-     * @param jobId The job to be modified. If an element of a job array is to
-     *              be modified, use the array form jobID[i] where jobID is the job array name,
-     *              and i is the index value.
-     *              <p/>
-     *              <b>Data Structures:</b>
-     *              \par
-     *              \ref submit
-     *              \n \ref submitReply
-     *              <p/>
-     *              <b>Define Statements:</b>
-     *              \par
-     *              none
-     * @return long:-1 \n
-     *         Function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line command :</b>
-     *         \par
-     *         bmod
-     *         <p/>
-     *         \b Files:
-     *         \par
-     *         ${LSF_ENVDIR:-/etc}/lsf.conf
-     * @param jobSubReq Describes the requirements for job modification to the
-     * batch system. A job that does not meet these requirements is not submitted
-     * to the batch system and an error is returned.
-     * @param jobSubReply Describes the results of the job modification to the
-     * batch system.
-     * #see \ref lsb_submit
-     * #see \ref ls_info
-     * #see \ref ls_rtask
-     * #see \ref lsb_queueinfo
-     */
-    public static native long lsb_modify(submit jobSubReq, submitReply jobSubReply, long jobId);
-
-    public static native FloatByReference getCpuFactor(String string1, int int1);
-
-    /**
-     * \page lsb_suspreason lsb_suspreason
-     * \brief Explains why a job was suspended.
-     * <p/>
-     * Using the SBD, \ref lsb_suspreason explains why system-suspended and
-     * user-suspended jobs were suspended.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * String lsb_suspreason(int reasons, int subreasons,
-     * loadIndexLog.ByReference ld)</b>
-     *
-     * @param reasons    Reasons a job suspends.
-     * @param subreasons If reasons is SUSP_LOAD_REASON, subreasons indicates
-     *                   the load indices that are out of bounds. The integer values for the load
-     *                   indices are found in lsf.h.If reasons is SUSP_RES_LIMIT, subreasons
-     *                   indicates the job's requirements for resource reservation are not satisfied.
-     *                   The integer values for the job's requirements for resource reservation are
-     *                   found in lsbatch.h.
-     *                   \n Subreasons a job suspends if reasons is SUSP_LOAD_REASON:
-     *                   - \b  R15S
-     *                   \n 15 second CPU run queue length
-     *                   - \b  R1M
-     *                   \n 1 minute CPU run queue length
-     *                   - \b  R15M
-     *                   \n 15 minute CPU run queue length
-     *                   - \b  UT
-     *                   \n 1 minute CPU utilization
-     *                   - \b  PG
-     *                   \n Paging rate
-     *                   - \b  IO
-     *                   \n Disk IO rate
-     *                   - \b LS
-     *                   \n Number of log in sessions
-     *                   - \b IT
-     *                   \n Idle time
-     *                   - \b TMP
-     *                   \n Available temporary space
-     *                   - \b SWP
-     *                   \n Available swap space
-     *                   - \b MEM
-     *                   \n Available memory
-     *                   - \b USR1
-     *                   \n USR1 is used to describe unavailable or out of bounds user defined load
-     *                   information of an external dynamic load indice on execution hosts.
-     *                   - \b USR2
-     *                   \n USR2 is used to describe unavailable or out of bounds user defined load
-     *                   information of an external dynamic load indice on execution hosts.
-     * @return null \n
-     *         The function failed. The reason code is bad.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         No error handling
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         bjobs -s
-     *         <p/>
-     *         <b>Environment Variable:</b>
-     *         \par
-     *         LSB_SUSP_REASONS
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         $LSB_CONFDIR/cluster_name/configdir/lsb.queues \n
-     *         $LSB_SHAREDIR/cluster_name/logdir/lsb.events
-     * @param ld When reasons is SUSP_LOAD_REASON, ld is used to determine the
-     * name of any external load indices. ld uses the most recent load index log
-     * in the lsb.events file.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * loadIndexLog
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * \ref suspending_reasons \n
-     * \ref suspending_subreasons
-     * #see \ref lsb_pendreason
-     */
-    public static native String lsb_suspreason(int reasons, int subreasons, loadIndexLog ld);
-
-    /**
-     * \page lsb_pendreason  lsb_pendreason
-     * \brief Explains why a job is pending.
-     * <p/>
-     * Use \ref lsb_pendreason to determine why a job is pending. Each pending reason is
-     * associated with one or more hosts.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * String lsb_pendreason (int numReasons, IntByReference rsTb,
-     * jobInfoHead.ByReference jInfoH,
-     * loadIndexLog.ByReference ld, int clusterId)</b>
-     *
-     * @param numReasons The number of reasons in the rsTb reason table.
-     * @param clusterId  MultiCluster cluster ID. If clusterId is greater than or
-     *                   equal to 0, the job is a pending remote job, and \ref lsb_pendreason checks for
-     *                   host_name\@cluster_name. If host name is needed, it should be found in
-     *                   jInfoH->remoteHosts. If the remote host name is not available, the constant
-     *                   string remoteHost is used.
-     *                   <p/>
-     *                   <b>Data Structures:</b>
-     *                   \par
-     *                   \ref jobInfoHead
-     *                   \n \ref loadIndexLog
-     *                   <p/>
-     *                   <b>Define Statements:</b>
-     *                   \par
-     *                   \ref pending_reasons
-     *                   \n \ref suspending_reasons
-     *                   \n \ref suspending_subreasons
-     * @return null \n
-     *         The function fails. The reason code is bad.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If no PEND reason is found, the function fails and lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line command:</b>
-     *         \par
-     *         bjobs -p
-     *         <p/>
-     *         \b Files:
-     *         \par
-     *         ${LSF_ENVDIR:-/etc}/lsf.conf
-     * @param rsTb The reason table. Each entry in the table contains one of \ref pending_reasons
-     * @param jInfoH jInfoH contains job information.
-     * @param ld From \ref lsb_suspreason, when reasons is SUSP_LOAD_REASON, ld is used to
-     * determine the name of any external load indices. ld uses the most recent load
-     * index log in the lsb.events file.
-     * #see \ref lsb_geteventrec
-     */
-    public static native String lsb_pendreason(int numReasons, IntByReference rsTb, jobInfoHead jInfoH, loadIndexLog ld, int clusterId);
-
-    /**
-     * \page lsb_calendarinfo lsb_calendarinfo
-     * \brief Gets information about calendars defined in the batch system.
-     * <p/>
-     * \ref lsb_calendarinfo gets information about calendars defined in the batch system.
-     * <p/>
-     * On success, this routine returns a pointer to an array of calendarInfoEnt
-     * structures which stores the information about the returned calendars and
-     * numCalendars gives number of calendars returned. On failure null is returned
-     * and lsberrno is set to indicate the error.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * calendarInfoEnt.ByReference lsb_calendarinfo(String[] calendars,
-     * IntByReference numCalendars, String user)</b>
-     *
-     * @return null
-     *         \n Function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         none
-     * @param calendars calendars is a pointer to an array of calendar names.
-     * @param numCalendars numCalendars gives the number of calendar names. If
-     * * numCalendars is 0, then information about all calendars is returned.
-     * By default, only the invokers calendars are considered.
-     * @param user Setting the user parameter will cause the given users calendars
-     * to be considered.Use the reserved user name all to get calendars of all users.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * calendarInfoEnt
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * none
-     * #see \ref lsb_calendarop
-     */
-    public static native calendarInfoEnt.ByReference lsb_calendarinfo(Pointer calendars, IntByReference numCalendars, String user);
-
-    public static native int lsb_calExprOccs(String string1, int int1, int int2, String string2, PointerByReference int3);
-
-    /**
-     * \page lsb_calendarop lsb_calendarop
-     * \brief Adds, modifies or deletes a calendar.
-     * <p/>
-     * \ref lsb_calendarop is used to add, modify or delete a calendar. The oper
-     * parameter is one of CALADD, CALMOD, or CALDEL. When the operation CALADD
-     * is specified, the first element of the names array is used as the name of
-     * the calendar to add. The desc and calExpr parameters should point to the
-     * description string and the time expression list, respectively. See bcaladd()
-     * for a description of time expressions.
-     * <p/>
-     * CALMOD permits the modification of the
-     * description or time expression list associated with an existing calendar. The
-     * first name in the names array indicates the calendar to be modified. The desc
-     * and calExpr parameters can be set to the updated value or to null to
-     * indicate that the existing value should be maintained.
-     * <p/>
-     * If the operation is
-     * CALDEL then the names parameter points to an array of calendar names to be
-     * deleted. numNames gives the number of names in the array. options is
-     * reserved for the future use.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * IntByReference lsb_calendarop(int oper, int numNames, String[] names, byte
-     * * desc, String calExpr, int options, String[] badStr)</b>
-     *
-     * @param oper     One of CALADD, CALMOD, or CALDEL. Depending on which one is
-     *                 chosen, adds, modifies, or deletes a calendar.Defined in \ref calendar_command.
-     * @param numNames The number of names in the array.
-     * @param options  Currently unused.
-     * @return int:-1
-     *         \n Function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error. If error
-     *         is related to bad calendar name or time expression, the routine returns
-     *         the name or expression in badStr.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         none
-     * @param names Depending on oper, it defines the name of the calendar is going
-     * to be added, modified or deleted.
-     * @param desc The calendar's description list.
-     * @param calExpr A calendar expression.
-     * @param badStr Return from mbatchd indicating bad name or event time of calendar.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * none
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * \ref calendar_command
-     * #see \ref lsb_calendarinfo
-     */
-    public static native int lsb_calendarop(int oper, int numNames, Pointer names, String desc, String calExpr, int options, String badStr);
-
-    /**
-     * \page lsb_puteventrec lsb_puteventrec
-     * \brief Puts information of an eventRec structure pointed to by logPtr
-     * into a log file.
-     * <p/>
-     * \ref lsb_puteventrec puts information of an eventRec structure pointed to by
-     * logPtr into a log file. log_fp is a pointer pointing to the log file name
-     * that could be either event a log file or job log file.
-     * <p/>
-     * See \ref lsb_geteventrec for detailed information about parameters.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * int lsb_puteventrec(Pointer log_fp, eventRec.ByReference logPtr)</b>
-     *
-     * @return int:-1 \n
-     *         Function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         $LSB_SHAREDIR/cluster_name/logdir/lsb.events
-     * @param logPtr The eventRec structure pointed to by logPtr into a log file.
-     * @param log_fp A pointer pointing to the log file name that could be either
-     * event a log file or job log file.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * eventRec
-     * \n eventLog
-     * \n xFile
-     * \n jobAttrSetLog
-     * \n logSwitchLog
-     * \n dataLoggingLog
-     * \n jgrpNewLog
-     * \n jgrpCtrlLog
-     * \n jgrpStatusLog
-     * \n jobNewLog
-     * \n jobModLog
-     * \n jobStartLog
-     * \n jobStartAcceptLog
-     * \n jobExecuteLog
-     * \n jobStatusLog
-     * \n sbdJobStatusLog
-     * \n sbdUnreportedStatusLog
-     * \n jobSwitchLog
-     * \n jobMoveLog
-     * \n chkpntLog
-     * \n jobRequeueLog
-     * \n jobCleanLog
-     * \n jobExceptionLog
-     * \n sigactLog
-     * \n migLog
-     * \n signalLog
-     * \n queueCtrlLog
-     * \n hostCtrlLog
-     * \n hgCtrlLog
-     * \n mbdStartLog
-     * \n mbdDieLog
-     * \n unfulfillLog
-     * \n jobFinishLog
-     * \n loadIndexLog
-     * \n calendarLog
-     * \n jobForwardLog
-     * \n jobAcceptLog
-     * \n statusAckLog
-     * \n jobMsgLog
-     * \n jobMsgAckLog
-     * \n jobOccupyReqLog
-     * \n jobVacatedLog
-     * \n jobForceRequestLog
-     * \n jobChunkLog
-     * \n jobExternalMsgLog
-     * \n rsvRes
-     * \n rsvFinishLog
-     * \n cpuProfileLog
-     * \n jobRunRusageLog
-     * \n slaLog
-     * \n perfmonLogInfo
-     * \n perfmonLog
-     * \n taskFinishLog
-     * \n eventEOSLog
-     * \n jobResizeNotifyStartLog
-     * \n jobResizeNotifyAcceptLog
-     * \n jobResizeNotifyDoneLog
-     * \n jobResizeReleaseLog
-     * \n jobResizeCancelLog
-     * \n jobResizeLog
-     * \n jRusage
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * \ref event_types
-     * \n \ref defs_lsb_XF_OP
-     * \n \ref jobgroup_controltypes
-     * \n \ref signal_action
-     * #see \ref lsb_geteventrec
-     */
-    public static native int lsb_puteventrec(Pointer logPtr, eventRec log_fp);
-
-    public static native int lsb_puteventrecRaw(Pointer pointer1, eventRec eventRec1, String string1);
-
-    /**
-     * \page lsb_geteventrec lsb_geteventrec
-     * \brief Get an event record from a log file
-     * <p/>
-     * \ref lsb_geteventrec returns an eventRec from a log file.
-     * <p/>
-     * The storage for the eventRec structure returned by \ref lsb_geteventrec will be
-     * reused by the next call.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * eventRec.ByReference lsb_geteventrec(Pointer  log_fp,IntByReference  lineNum)</b>
-     *
-     * @return null \n
-     *         Function failed.If there are no more records, returns null and sets
-     *         lsberrno to LSBE_EOF.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         On failure, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         $LSB_SHAREDIR/cluster_name/logdir/lsb.acct
-     *         \n $LSB_SHAREDIR/cluster_name/logdir/lsb.events
-     *         \n $LSB_SHAREDIR/cluster_name/logdir/lsb.rsv.ids
-     *         \n $LSB_SHAREDIR/cluster_name/logdir/lsb.rsv.state
-     * @param log_fp Either an event log file or a job log file.
-     * @param lineNum The number of the event record.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * eventRec
-     * \n eventLog
-     * \n xFile
-     * \n jobAttrSetLog
-     * \n logSwitchLog
-     * \n dataLoggingLog
-     * \n jgrpNewLog
-     * \n jgrpCtrlLog
-     * \n jgrpStatusLog
-     * \n jobNewLog
-     * \n jobModLog
-     * \n jobStartLog
-     * \n jobStartAcceptLog
-     * \n jobExecuteLog
-     * \n jobStatusLog
-     * \n sbdJobStatusLog
-     * \n sbdUnreportedStatusLog
-     * \n jobSwitchLog
-     * \n jobMoveLog
-     * \n chkpntLog
-     * \n jobRequeueLog
-     * \n jobCleanLog
-     * \n jobExceptionLog
-     * \n sigactLog
-     * \n migLog
-     * \n signalLog
-     * \n queueCtrlLog
-     * \n hostCtrlLog
-     * \n hgCtrlLog
-     * \n mbdStartLog
-     * \n mbdDieLog
-     * \n unfulfillLog
-     * \n jobFinishLog
-     * \n loadIndexLog
-     * \n calendarLog
-     * \n jobForwardLog
-     * \n jobAcceptLog
-     * \n statusAckLog
-     * \n jobMsgLog
-     * \n jobMsgAckLog
-     * \n jobOccupyReqLog
-     * \n jobVacatedLog
-     * \n jobForceRequestLog
-     * \n jobChunkLog
-     * \n jobExternalMsgLog
-     * \n rsvRes
-     * \n rsvFinishLog
-     * \n cpuProfileLog
-     * \n jobRunRusageLog
-     * \n slaLog
-     * \n perfmonLogInfo
-     * \n perfmonLog
-     * \n taskFinishLog
-     * \n eventEOSLog
-     * \n jobResizeNotifyStartLog
-     * \n jobResizeNotifyAcceptLog
-     * \n jobResizeNotifyDoneLog
-     * \n jobResizeReleaseLog
-     * \n jobResizeCancelLog
-     * \n jobResizeLog
-     * \n jRusage
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * \ref event_types
-     * \n \ref defs_lsb_XF_OP
-     * \n \ref jobgroup_controltypes
-     * \n \ref signal_action
-     * #see \ref lsb_hostcontrol
-     * #see \ref lsb_movejob
-     * #see \ref lsb_pendreason
-     * #see \ref lsb_puteventrec
-     * #see \ref lsb_queuecontrol
-     * #see \ref lsb_readjobinfo
-     * #see \ref lsb_submit
-     * #see \ref lsb_suspreason
-     */
-    public static native eventRec.ByReference lsb_geteventrec(Pointer log_fp, IntByReference lineNum);
-
-    public static native eventRec.ByReference lsb_geteventrec_decrypt(Pointer pointer1, IntByReference int1);
-
-    public static native eventRec.ByReference lsb_geteventrecord(Pointer pointer1, IntByReference int1);
-
-    public static native eventRec.ByReference lsb_geteventrecordEx(Pointer pointer1, IntByReference int1, Pointer stringArray1);
-
-    public static native eventRec.ByReference lsb_getnewjob_from_string(String string1);
-
-    public static native eventInfoEnt.ByReference lsb_eventinfo(Pointer stringArray1, IntByReference int1, String string1);
-
-    /**
-     * \page lsb_sharedresourceinfo lsb_sharedresourceinfo
-     * \brief Returns the requested shared resource information in dynamic values.
-     * <p/>
-     * \ref lsb_sharedresourceinfo returns the requested shared resource information in
-     * dynamic values. The result of this call is a chained data structure as
-     * defined in <lsf/lsbatch.h>, which contains requested information.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * LSB_SHARED_RESOURCE_INFO_T.ByReference lsb_sharedresourceinfo(
-     * String[] resources,
-     * IntByReference numResources,
-     * String hostName, int options)</b>
-     *
-     * @param options options is reserved for future use. Currently, it should be set to 0.
-     *                <p/>
-     *                <b>Data Structures:</b>
-     *                \par
-     *                lsbSharedResourceInfo
-     *                \n lsbSharedResourceInstance
-     *                <p/>
-     *                <b>Define Statements:</b>
-     *                \par
-     *                none
-     * @return null \n
-     *         Function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         $LSF_CONFDIR/lsf.shared
-     *         \n $LSF_CONFDIR/lsf.cluster.cluster_name
-     * @param resources resources is an null terminated string array storing
-     * requesting resource names.Setting resources to point to null returns all
-     * shared resources.
-     * @param numResources numResources is an input/output parameter. On input
-     * it indicates how many resources are requested. Value 0 means requesting
-     * all shared resources. On return it contains qualified resource number.
-     * @param hostName hostName is a string containing a host name. Only shared resource
-     * available on the specified host will be returned. If hostName is a null,
-     * shared resource available on all hosts will be returned.
-     * #see \ref ls_sharedresourceinfo
-     */
-    public static native Pointer lsb_sharedresourceinfo(Pointer resources, IntByReference numResources, String hostName, int options);
-
-    /**
-     * \page lsb_geteventrecbyline lsb_geteventrecbyline
-     * Parse an event line and put the result in an event record structure.
-     * The \ref lsb_geteventrecbyline function parses an event line and puts the result
-     * in an event record structure.
-     * <p/>
-     * If the line to be parsed is a comment line, \ref lsb_geteventrecbyline sets errno to
-     * bad event format and logs an error.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * int lsb_geteventrecbyline(String line, eventRec.ByReference logRec)</b>
-     *
-     * @return int:-1
-     *         \n Function failed and lserrno was set.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         none
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         none
-     * @param line
-     * Buffer containing a line of event text string
-     * @param logRec
-     * Pointer to an eventRec structure
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * eventRec
-     * \n eventLog
-     * \n xFile
-     * \n jobAttrSetLog
-     * \n logSwitchLog
-     * \n dataLoggingLog
-     * \n jgrpNewLog
-     * \n jgrpCtrlLog
-     * \n jgrpStatusLog
-     * \n jobNewLog
-     * \n jobModLog
-     * \n jobStartLog
-     * \n jobStartAcceptLog
-     * \n jobExecuteLog
-     * \n jobStatusLog
-     * \n sbdJobStatusLog
-     * \n sbdUnreportedStatusLog
-     * \n jobSwitchLog
-     * \n jobMoveLog
-     * \n chkpntLog
-     * \n jobRequeueLog
-     * \n jobCleanLog
-     * \n jobExceptionLog
-     * \n sigactLog
-     * \n migLog
-     * \n signalLog
-     * \n queueCtrlLog
-     * \n hostCtrlLog
-     * \n hgCtrlLog
-     * \n mbdStartLog
-     * \n mbdDieLog
-     * \n unfulfillLog
-     * \n jobFinishLog
-     * \n loadIndexLog
-     * \n calendarLog
-     * \n jobForwardLog
-     * \n jobAcceptLog
-     * \n statusAckLog
-     * \n jobMsgLog
-     * \n jobMsgAckLog
-     * \n jobOccupyReqLog
-     * \n jobVacatedLog
-     * \n jobForceRequestLog
-     * \n jobChunkLog
-     * \n jobExternalMsgLog
-     * \n rsvRes
-     * \n rsvFinishLog
-     * \n cpuProfileLog
-     * \n jobRunRusageLog
-     * \n slaLog
-     * \n perfmonLogInfo
-     * \n perfmonLog
-     * \n taskFinishLog
-     * \n eventEOSLog
-     * \n jobResizeNotifyStartLog
-     * \n jobResizeNotifyAcceptLog
-     * \n jobResizeNotifyDoneLog
-     * \n jobResizeReleaseLog
-     * \n jobResizeCancelLog
-     * \n jobResizeLog
-     * \n jRusage
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * none
-     * <p/>
-     * <b>Pre-Conditions:</b>
-     * \par
-     * The event record structure must have been initialized outside the
-     * \ref lsb_geteventrecbyline function.
-     * see none
-     */
-
-    public static native int lsb_geteventrecbyline(String line, eventRec logRec);
-/* Retain lsb_connect for now */
-
-    public static int lsb_connect(int a) {
-        return lsb_rcvconnect();
-    }
-
-    public static native int lsb_rcvconnect();
-
-    public static native int lsb_sndmsg(lsbMsgHdr lsbMsgHdr1, String string1, int int1);
-
-    public static native int lsb_rcvmsg(lsbMsgHdr lsbMsgHdr1, Pointer stringArray1, int int1);
-
-    /**
-     * \page  lsb_runjob lsb_runjob
-     * Starts a batch job immediately on a set of specified host().
-     * \ref lsb_runjob starts a batch job immediately on a set of specified host().
-     * The job must have been submitted and is in PEND or FINISHED status. Only
-     * the LSF administrator or the owner of the job can start the job. If the
-     * options is set to RUNJOB_OPT_NOSTOP, then the job will not be suspended by
-     * the queue's RUNWINDOW,loadStop and STOP_COND and the hosts' RUNWINDOW and
-     * loadStop conditions. By default, these conditions apply to the job as do
-     * to other normal jobs.
-     * <p/>
-     * Any program using this API must be setuid to root
-     * if LSF_AUTH is not defined in the lsf.conf file.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * int lsb_runjob(runJobRequest.ByReference runJobRequest)</b>
-     *
-     * @param runJobRequest The job-starting request.
-     *                      <p/>
-     *                      <b>Data Structures:</b>
-     *                      \par
-     *                      runJobRequest
-     *                      <p/>
-     *                      <b>Define Statements:</b>
-     *                      \par
-     *                      \ref runjob_option
-     * @return int:-1 \n
-     *         Function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         brun
-     *         <p/>
-     *         \b Files:
-     *         \par
-     *         ${LSF_ENVDIR:-/etc}/lsf.conf
-     * see none
-     */
-    public static native int lsb_runjob(runJobRequest runJobRequest);
-
-/* API for job group */
-
-    public static native int lsb_addjgrp(jgrpAdd jgrpAdd1, Pointer jgrpReply1);
-
-    public static native int lsb_modjgrp(jgrpMod jgrpMod1, Pointer jgrpReply1);
-
-    public static native int lsb_holdjgrp(String string1, int int1, Pointer jgrpReply1);
-
-    public static native int lsb_reljgrp(String string1, int int1, Pointer jgrpReply1);
-
-    public static native int lsb_deljgrp(String string1, int int1, Pointer jgrpReply1);
-
-    public static native int lsb_deljgrp_ext(jgrpCtrl jgrpCtrl1, Pointer jgrpReply1);
-
-    public static native jgrp.ByReference lsb_listjgrp(IntByReference int1);
-
-    public static native serviceClass.ByReference lsb_serviceClassInfo(IntByReference int1);
-
-/* API for Application Encapsulation */
-
-    public static native appInfoEnt.ByReference lsb_appInfo(IntByReference int1);
-
-    public static native void lsb_freeAppInfoEnts(int int1, appInfoEnt appInfoEnt1);
-
-/* routine to convert the job id to string */
-
-    public static native String lsb_jobid2str(long long1);
-
-    public static native String lsb_jobid2str_r(long long1, byte[] byte1);
-
-    public static native String lsb_jobidinstr(long long1);
-/* routine to compose and decompose 64bit jobId */
-
-    public static native void jobId32To64(LongByReference long1, int int1, int int2);
-
-    public static native void jobId64To32(long long1, IntByReference int1, IntByReference int2);
-/* API for job attribute operations */
-
-    public static native int lsb_setjobattr(int int1, jobAttrInfoEnt jobAttrInfoEnt1);
-
-/* API for remote task execution */
-
-    public static native long lsb_rexecv(int int1, Pointer stringArray1, Pointer stringArray2, IntByReference int2, int int3);
-
-
-    public static interface lsb_catchCallback extends Callback {
-        int invoke(Pointer pointer);
-    }
-
-    public static native int lsb_catch(String string1, lsb_catchCallback callback);
-
-    public static native void lsb_throw(String string1, Pointer pointer1);
-
-/* API for job external message */
-
-    /**
-     *  \page lsb_postjobmsg lsb_postjobmsg
-     *  \brief Sends messages and data posted to a job.
-     *
-     *  Use \ref lsb_postjobmsg to post a message and data to a job, open a TCP
-     *  connection, and transfer attached message and data from the mbatchd. Use
-     *  \ref lsb_readjobmsg to display messages and copy data files posted by
-     *  \ref lsb_postjobmsg.
-     *
-     *  While you can post multiple messages and attached data files to a job,
-     *  you must call \ref lsb_postjobmsg for each message and attached data file
-     *  you want to post. By default, \ref lsb_postjobmsg posts a message to position
-     *  0 of the message index (msgId) (see PARAMETERS) of the specified job.
-     *  To post additional messages to a job, call \ref lsb_postjobmsg and increment
-     *  the message index.
-     *
-     *  \ref lsb_readjobmsg reads posted job messages by their
-     *  position in the message index.
-     *
-     *  If a data file is attached to a message and the flag EXT_ATTA_POST is set,
-     *  use the JOB_ATTA_DIR parameter in lsb.params(5) to specify the directory
-     *  where attachment data fies are saved. The directory must have at least 1MB
-     *  of free space.The mbatchd checks for available space in the job attachment
-     *  directory before transferring the file.
-     *
-     *  Use the MAX_JOB_ATTA_SIZE parameter in lsb.params(5) to set a maximum size
-     *  for job message attachments.
-     *
-     *  Users can only send messages and data from their own jobs. Root and LSF
-     *  administrators can also send messages of jobs submtted by other users, but
-     *  they cannot attach data files to jobs owned by other users.
-     *
-     *  You can post messages and data to a job until it is cleaned from the system.
-     *  You cannot send messages and data to finished or exited jobs.
-     *
-     *  <b>\#include <lsf/lsbatch.h> \n
-     *     \#include <time.h>
-     *
-     *  int lsb_postjobmsg(jobExternalMsgReq.ByReference jobExternalMsg,
-     *                    String filename)</b>
-     *
-     *  @param jobExternalMsg This structure contains the information required to
-     *  define an external message of a job.
-     *  @param filename Name of attached data file. If no file is attached, use null.
-     *
-     *  <b>Data Structures:</b>
-     *  \par
-     *  \ref jobExternalMsgReq
-     *
-     *  <b>Define Statements:</b>
-     *  \par
-     *  \ref external_msg_post
-     *
-     *  @return int:value \n
-     *  The successful function returns a socket number.
-     * return int:0 \n
-     *  The EXT_ATTA_POST bit of options is not set or there is no attached data.
-     *  return int:-1 \n
-     *  The function failed.
-     *
-     *  \b Errors:
-     *  \par
-     *  If the function fails, lserrno is set to indicate the error.
-     *
-     *  <b>Equivalent line command:</b>
-     *  \par
-     *  bpost
-     *
-     *  \b Files:
-     *  \par
-     *  $LSB_CONFDIR/cluster_name/configdir/lsb.params
-     *  \n $JOB_ATTA_DIR
-     *  \n $LSB_SHAREDIR/info
-     *
-     * #see \ref lsb_readjobmsg
-     *
-     */
-
-    public static native int lsb_postjobmsg(jobExternalMsgReq jobExternalMsg, String filename);
-    /**
-     *  \page lsb_readjobmsg lsb_readjobmsg
-     *  \brief Reads messages and data posted to a job.
-     *
-     *  Use \ref lsb_readjobmsg to open a TCP connection, receive attached messages and
-     *  data from the mbatchd, and display the messages posted by \ref lsb_postjobmsg.
-     *
-     *  By default, \ref lsb_readjobmsg displays the message "no description" or the
-     *  message at index position 0 of the specified job. To read other messages,
-     *  choose another index position. The index is populated by \ref lsb_postjobmsg.
-     *
-     *  If a data file is attached to a message and the flag EXT_ATTA_READ is set,
-     *  \ref lsb_readjobmsg gets the message and copies its data file to the default
-     *  directory JOB_ATTA_DIR, overwriting the specified file if it already exists.
-     *  If there is no file attached, the system reports an error.
-     *
-     *  Users can only read messages and data from their own jobs. Root and LSF
-     *  administrators can also read messages of jobs submtted by other users,
-     *  but they cannot read data files attached to jobs owned by other users.
-     *
-     *  You can read messages and data from a job until it is cleaned from the
-     *  system. You cannot read messages and data from done or exited jobs.
-     *
-     *  <b>\#include <lsf/lsbatch.h> \n
-     *  \#include <time.h> \n
-     *  int lsb_readjobmsg(jobExternalMsgReq.ByReference jobExternalMsg,
-     *          jobExternalMsgReply.ByReference jobExternalMsgReply)</b>
-     *
-     *  @param jobExternalMsg the information required to define an external
-     *  message of a job.
-     *  @param jobExternalMsgReply the information required to define an
-     *  external message reply.
-     *
-     *  <b>Data Structures:</b>
-     *  \par
-     *  jobExternalMsgReq
-     *  \n jobExternalMsgReply
-     *
-     *  <b>Define Statements:</b>
-     *  \par
-     *  \ref external_msg_processing
-     *  \n \ref ext_data_status
-     *
-     *  @return int:value \n
-     *  The successful function returns a socket number.
-     *  return int:0 \n
-     *  The EXT_ATTA_READ bit of options is not set or there is no
-     *  attached data.
-     *  return int:-1 \n
-     *  The function failed.
-     *
-     *  \b Errors:
-     *  \par
-     *  If the function fails, lserrno is set to indicate the error.
-     *
-     *  <b>Equivalent line commands:</b>
-     *  \par
-     *  bread
-     *
-     *  <b>Files:</b>
-     *  \par
-     *  $LSB_CONFDIR/cluster_name/configdir/lsb.params
-     *  \n $JOB_ATTA_DIR
-     *  \n $LSB_SHAREDIR/info
-     * #see \ref lsb_postjobmsg
-     */
-
-    public static native int lsb_readjobmsg(jobExternalMsgReq jobExternalMsg, jobExternalMsgReply jobExternalMsgReply);
-
-/* API for symphony job information update in bulk mode */
-
-    public static native int lsb_bulkJobInfoUpdate(symJobStatusUpdateReqArray symJobStatusUpdateReqArray1, symJobStatusUpdateReplyArray symJobStatusUpdateReplyArray1);
-
-/* API for advance reservation */
-
-    /**
-     * \page lsb_addreservation lsb_addreservation
-     * \brief Makes an advance reservation.
-     * <p/>
-     * Use \ref lsb_addreservation to send a reservation request to mbatchd. If
-     * mbatchd grants the reservation, it issues the reservation ID. If mbatchd
-     * rejects the request, it issues null as the reservation ID.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * int lsb_addreservation (addRsvRequest.ByReference request, String rsvId)</b>
-     *
-     * @return int:-1 \n
-     *         The reservation failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         On failure, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         brsvadd
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         none
-     * @param request The reservation request
-     * @param rsvId Reservation ID returned from mbatchd. If the reservation
-     * fails, this is null. The
-     * memory for rsvid is allocated by the caller.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * addRsvRequest
-     * \n _rsvExecCmd_t
-     * \n _rsvExecEvent_t
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * \ref reservation_option
-     * #see \ref lsb_removereservation
-     * #see \ref lsb_modreservation
-     * #see \ref lsb_reservationinfo
-     */
-    public static native int lsb_addreservation(addRsvRequest request, String rsvId);
-
-    /**
-     * \page lsb_removereservation lsb_removereservation
-     * \brief Removes a reservation.
-     * <p/>
-     * Use \ref lsb_removereservation to remove a reservation. mbatchd removes the
-     * reservation with the specified reservation ID.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * int lsb_removereservation(String rsvId)</b>
-     *
-     * @return int:-1 \n
-     *         The reservation removal failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         On failure, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         brsvdel
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         none
-     * @param rsvId Reservation ID of the reservation that you wish to remove.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * none
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * none
-     * #see \ref lsb_addreservation
-     * #see \ref lsb_modreservation
-     * #see \ref lsb_reservationinfo
-     */
-    public static native int lsb_removereservation(String rsvId);
-
-    /**
-     * \page lsb_reservationinfo lsb_reservationinfo
-     * \brief Retrieve reservation information to display active advance reservations.
-     * <p/>
-     * Use \ref lsb_reservationinfo to retrieve reservation information from mbatchd.
-     * This function allocates memory that the caller should free.
-     * <p/>
-     * If the \ref lsb_reservationinfo function succeeds, it returns the reservation
-     * records pertaining to a particular reservation ID (rsvId) as an array of
-     * rsvInfoEnt structs.
-     * <p/>
-     * If rsvId is null, all reservation information will be returned. If a
-     * particular rsvId  is specified:
-     * \li If found, the reservation record pertaining to a particular rsvId is
-     * returned
-     * \li If not found, the number of reservation records is set to zero and
-     * the lsberrno  is set appropiately
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * rsvInfoEnt.ByReference lsb_reservationinfo(String rsvId, IntByReference numEnts,
-     * int options)</b>
-     *
-     * @param options The parameter options is currently ignored.
-     *                <p/>
-     *                <b>Data Structures:</b>
-     *                \par
-     *                rsvInfoEnt
-     *                \n hostRsvInfoEnt
-     *                <p/>
-     *                <b>Define Statements:</b>
-     *                \par
-     *                none
-     * @return null
-     *         \n Function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         On failure, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         brsvs
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         none
-     * @param rsvId Reservation ID of the requested reservation.
-     * @param numEnts Number of reservation entries that mbatchd returns.
-     * #see \ref lsb_addreservation
-     * #see \ref lsb_modreservation
-     * #see \ref lsb_removereservation
-     */
-
-    public static native rsvInfoEnt.ByReference lsb_reservationinfo(String rsvId, IntByReference numEnts, int options);
-
-    public static native int lsb_freeRsvExecCmd(Pointer _rsvExecCmd_tArray1);
-
-    public static native _rsvExecCmd_t.ByReference lsb_dupRsvExecCmd(_rsvExecCmd_t _rsvExecCmd_t1);
-
-    public static native int lsb_parseRsvExecOption(String string1, Pointer _rsvExecCmd_tArray1);
-
-    /**
-     * \page lsb_modreservation lsb_modreservation
-     * \brief Modifies an advance reservation.
-     * <p/>
-     * Use \ref lsb_modreservation to modify an advance reservation. mbatchd receives
-     * the modification request and modifies the reservation with the specified
-     * reservation ID.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * int lsb_modreservation(modRsvRequest.ByReference request)</b>
-     *
-     * @return int:-1 \n
-     *         The reservation modification failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         On failure, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line command:</b>
-     *         \par
-     *         brsvmod
-     *         <p/>
-     *         \b Files:
-     *         \par
-     *         none
-     * @param request modify reservation request.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * modRsvRequest
-     * \n addRsvRequest
-     * \n _rsvExecCmd_t
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * none
-     * #see \ref lsb_addreservation
-     * #see \ref lsb_removereservation
-     * #see \ref lsb_reservationinfo
-     */
-
-    public static native int lsb_modreservation(modRsvRequest request);
-
-/* routines for sorted integer list */
-    /*
-    sortIntList.ByReference  initSortIntList(int);
-    int insertSortIntList(sortIntList.ByReference , int);
-    sortIntList.ByReference  getNextSortIntList(sortIntList.ByReference , sortIntList.ByReference , IntByReference );
-    void freeSortIntList(sortIntList.ByReference );
-    int getMinSortIntList(sortIntList.ByReference , IntByReference );
-    int getMaxSortIntList(sortIntList.ByReference , IntByReference );
-    int getTotalSortIntList(sortIntList.ByReference );
-
-    int updateJobIdIndexFile (String string1, String string1, int);
-    */
-
-/* Structures and routine for obtaining subset of info about jobs
-*  This is being used by Maui integration.
- */
-
-    public static class jobExtschInfoReq extends Structure {
-        public static class ByReference extends jobExtschInfoReq implements Structure.ByReference {}
-        public static class ByValue extends jobExtschInfoReq implements Structure.ByValue {}
-        public jobExtschInfoReq() {}
-        public jobExtschInfoReq(Pointer p) { super(p); read(); }
-
-        public int qCnt;
-        public Pointer queues;
-    }
-
-
-
-    public static class jobExtschInfo extends Structure {
-        public static class ByReference extends jobExtschInfo implements Structure.ByReference {}
-        public static class ByValue extends jobExtschInfo implements Structure.ByValue {}
-        public jobExtschInfo() {}
-        public jobExtschInfo(Pointer p) { super(p); read(); }
-
-        public long jobId;
-        public int status;
-        public NativeLong jRusageUpdateTime;
-        public LibLsf.jRusage runRusage;
-    }
-
-
-
-    public static class jobExtschInfoReply extends Structure {
-        public static class ByReference extends jobExtschInfoReply implements Structure.ByReference {}
-        public static class ByValue extends jobExtschInfoReply implements Structure.ByValue {}
-        public jobExtschInfoReply() {}
-        public jobExtschInfoReply(Pointer p) { super(p); read(); }
-
-        public int jobCnt;
-        public PointerByReference jobs;
-    }
-
-
-
-    public static native int getjobinfo4queues(jobExtschInfoReq jobExtschInfoReq1, jobExtschInfoReply jobExtschInfoReply1);
-
-    public static native void free_jobExtschInfoReply(jobExtschInfoReply jobExtschInfoReply1);
-
-    public static native void free_jobExtschInfoReq(jobExtschInfoReq jobExtschInfoReq1);
-
-/* For RFC 725 */
-
-    public static native String longer_strcpy(String dest, String src);
-
-/* Structures and API for job diagnostics.  These are applicable only if
-*  CONDENSE_PENDING_REASONS is enabled in lsb.params.
- */
-
-    public static class diagnoseJobReq extends Structure {
-        public static class ByReference extends diagnoseJobReq implements Structure.ByReference {}
-        public static class ByValue extends diagnoseJobReq implements Structure.ByValue {}
-        public diagnoseJobReq() {}
-        public diagnoseJobReq(Pointer p) { super(p); read(); }
-
-        public int jobCnt;
-        public LongByReference jobId;
-    }
-
-
-
-    public static native int lsb_diagnosejob(diagnoseJobReq diagnoseJobReq1);
-
-    public static final int SIM_STATUS_RUN = 0x01;
-    public static final int SIM_STATUS_SUSPEND = 0x02;
-
-/* simulator status reply
- */
-
-    public static class simStatusReply extends Structure {
-        public static class ByReference extends simStatusReply implements Structure.ByReference {}
-        public static class ByValue extends simStatusReply implements Structure.ByValue {}
-        public simStatusReply() {}
-        public simStatusReply(Pointer p) { super(p); read(); }
-
-        public int simStatus;
-        public NativeLong curTime;
-    }
-
-
-
-    public static native simStatusReply.ByReference lsb_simstatus();
-
-    public static native void free_simStatusReply(simStatusReply simStatusReply1);
-
-/* batch command options flag for lease */
-    public static final int LSB_HOST_OPTION_EXPORT = 0x1;
-/* bhosts -x option */
-    public static final int LSB_HOST_OPTION_EXCEPT = 0x2;
-/* retrieve hosts that belong to batch partition */
-    public static final int LSB_HOST_OPTION_BATCH = 0x4;
-
-
-/* Display condensed host output */
-    public static final int LSB_HOST_OPTION_CONDENSED = 0x08;
-
-/* error codes, structures and routines for syntax check of RMS external scheduler options */
-
-/*  non-rms option shown up in RMS[] */
-    public static final int RMS_NON_RMS_OPTIONS_ERR = (-1);
-
-/*  nodes and ptile co-exist */
-    public static final int RMS_NODE_PTILE_ERR = (-2);
-
-/*  rails and railmask co-exist */
-    public static final int RMS_RAIL_RAILMASK_ERR = (-3);
-
-/*  nodes is out of range 1..LSB_RMS_MAXNUMNODES */
-    public static final int RMS_NODES_OUT_BOUND_ERR = (-4);
-
-/*  ptile is out of range 1..LSB_RMS_MAXPTILE */
-    public static final int RMS_PTILE_OUT_BOUND_ERR = (-5);
-
-/*  rails is out of range 1..LSB_RMS_MAXNUMRAILS */
-    public static final int RMS_RAIL_OUT_BOUND_ERR = (-6);
-
-/*  railmask syntax error */
-    public static final int RMS_RAILMASK_OUT_BOUND_ERR = (-7);
-
-/*  nodes syntax error */
-    public static final int RMS_NODES_SYNTAX_ERR = (-8);
-
-/*  ptile syntax error */
-    public static final int RMS_PTILE_SYNTAX_ERR = (-9);
-
-/*  rails syntax error */
-    public static final int RMS_RAIL_SYNTAX_ERR = (-10);
-
-/*  railmask syntax error */
-    public static final int RMS_RAILMASK_SYNTAX_ERR = (-11);
-
-/*  base syntax error */
-    public static final int RMS_BASE_SYNTAX_ERR = (-12);
-
-/*  base string too NativeLong*/
-    public static final int RMS_BASE_TOO_LONG = (-13);
-
-/*  >=1 allocation types are specified */
-    public static final int RMS_TOO_MANY_ALLOCTYPE_ERR = (-14);
-
-/*  =1 allocation types are specified */
-    public static final int RMS_NO_LSF_EXTSCHED_Y_ERR = (-15);
-
-/*  error reading env from lsf.conf inside syntax check */
-    public static final int RMS_READ_ENV_ERR = (-20);
-
-/*  memory allocation problems inside syntax check function */
-    public static final int RMS_MEM_ALLOC_ERR = (-21);
-
-/*  [] mis-matched in RMS[] */
-    public static final int RMS_BRACKETS_MISMATCH_ERR = (-22);
-
-    public static interface rmsAllocType_t {
-          public static final int RMS_ALLOC_TYPE_UNKNOWN = 0;
-          public static final int RMS_ALLOC_TYPE_SLOAD = 1;
-          public static final int RMS_ALLOC_TYPE_SNODE = 2;
-          public static final int RMS_ALLOC_TYPE_MCONT = 3;
-    }
-
-
-
-    public static interface rmsTopology_t {
-          public static final int RMS_TOPOLOGY_UNKNOWN = 0;
-          public static final int RMS_TOPOLOGY_PTILE = 1;
-          public static final int RMS_TOPOLOGY_NODES = 2;
-    }
-
-
-
-    public static interface rmsFlags_t {
-          public static final int RMS_FLAGS_UNKNOWN = 0;
-          public static final int RMS_FLAGS_RAILS = 1;
-          public static final int RMS_FLAGS_RAILMASK = 2;
-    }
-
-
-
-    public static class rmsextschedoption extends Structure {
-        public static class ByReference extends rmsextschedoption implements Structure.ByReference {}
-        public static class ByValue extends rmsextschedoption implements Structure.ByValue {}
-        public rmsextschedoption() {}
-        public rmsextschedoption(Pointer p) { super(p); read(); }
-
-        public /*rmsAllocType_t*/ int alloc_type;
-        public /*rmsTopology_t*/ int topology;
-        public int topology_value;
-        public int set_base;
-        public byte[] base = new byte[LibLsf.MAXHOSTNAMELEN];
-        public /*rmsFlags_t*/ int flags;
-        public int flags_value;
-    }
-
-
-
-    public static native int parseRmsOptions(String string1, rmsextschedoption rmsextschedoption1, LibLsf.config_param config_param1);
-
-/* Stream interface.
-*  By default the stream lsb.stream is located in a subdirectory
-*  stream of the cluster working directory i.e.:
-*  work/<clustername>/logdir/stream and the size of
-*  lsb.stream is 1024MB
- */
-    public static final int MBD_DEF_STREAM_SIZE = (1024 * 1024 * 1024);
-
-/* default maximum number of backup stream.utc file */
-    public static final int DEF_MAX_STREAM_FILE_NUMBER = 10;
-
-    /**
-     * \brief  Stream interface.
-     */
-    public static class lsbStream extends Structure {
-        public static class ByReference extends lsbStream implements Structure.ByReference {}
-        public static class ByValue extends lsbStream implements Structure.ByValue {}
-        public lsbStream() {}
-        public lsbStream(Pointer p) { super(p); read(); }
-
-        public static interface trsFunc extends Callback {
-            int invoke(String string1);
-        }
-
-        /**
-         * < Pointer to full path to the stream file
-         */
-        public String streamFile;
-
-        /**
-         * < Max size of the stream file
-         */
-        public int maxStreamSize;
-
-        /**
-         * < Max number of backup stream files
-         */
-        public int maxStreamFileNum;
-
-        /**
-         * < Set to 1 to enable trace of the stream
-         */
-        public int trace;
-
-        /**
-         * < Pointer to a function that the library invokes, passing a trace buffer.
-         */
-        public trsFunc trs;
-    }
-
-
-
-     /**//*
-     * \page lsb_openstream  lsb_openstream
-     * \brief Open and create an lsb_stream file.
-     * <p/>
-     * \ref lsb_openstream opens the streamFile .
-     * <p/>
-     * This API function is inside liblsbstream.so.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * int lsb_openstream(lsbStream.ByReference params)</b>
-     *
-     * @return int:-1 or null \n
-     *         The function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line command:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         $LSB_CONFDIR/cluster_name/configdir/lsb.params
-     * @param params Parameters.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * \ref lsbStream
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * none
-     * #see \ref lsb_closestream
-     * #see \ref lsb_readstreamline
-     * #see \ref lsb_writestream
-     * #see \ref lsb_readstream
-     * #see \ref lsb_streamversion
-     */
-    // NOTE: Not in libbat
-    //public static native int lsb_openstream(lsbStream params);
-
-     /**//*
-     * \page lsb_closestream lsb_closestream
-     * \brief Close an lsb_stream file.
-     * <p/>
-     * \ref lsb_closestream closes the streamFile.
-     * <p/>
-     * This API function is inside liblsbstream.so.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * int lsb_closestream(String config)</b>
-     *
-     * @return int:-1 \n
-     *         The function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         $LSB_CONFDIR/cluster_name/configdir/lsb.params
-     * @param config Pointer to the handle of the stream file.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * none
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * none
-     * #see \ref lsb_openstream
-     * #see \ref lsb_readstreamline
-     * #see \ref lsb_writestream
-     * #see \ref lsb_readstream
-     * #see \ref lsb_streamversion
-     */
-    // NOTE: Not in libbat
-    //public static native int lsb_closestream(String config);
-
-     /**//*
-     * \page lsb_streamversion lsb_streamversion
-     * \brief Version of the current event type supported by mbatchd.
-     * <p/>
-     * \ref lsb_streamversion returns the event version number of mbatchd, which is the
-     * version of the events to be written to the stream file. This API function
-     * is inside liblsbstream.so.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * String  lsb_streamversion()</b>
-     *
-     * param void \n
-     *             <p/>
-     *             <b>Data Structures:</b>
-     *             \par
-     *             none
-     *             <p/>
-     *             <b>Define Statements:</b>
-     *             \par
-     *             none
-     * @return null
-     *         \n Function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         $LSB_CONFDIR/cluster_name/configdir/lsb.params
-     * #see \ref lsb_closestream
-     * #see \ref lsb_geteventrec
-     * #see \ref lsb_openstream
-     * #see \ref lsb_puteventrec
-     * #see \ref lsb_readstreamline
-     * #see \ref lsb_writestream
-     * #see \ref lsb_readstream
-     */
-    // NOTE: Not in libbat
-    //public static native String lsb_streamversion();
-
-     /**//*
-     * \page lsb_writestream lsb_writestream
-     * \brief Writes a current version eventRec structure into the lsb_stream file.
-     * <p/>
-     * \ref lsb_writestream writes an eventrRec to the open streamFile.
-     * This API function is inside liblsbstream.so.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * int lsb_writestream(eventRec.ByReference logPtr)</b>
-     *
-     * @return int:-1 \n
-     *         The function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         $LSB_CONFDIR/cluster_name/configdir/lsb.params
-     * @param logPtr Pointer to the eventRec structure.
-     * \n see \ref lsb_geteventrec for details on the eventRec structure.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * eventRec
-     * \n eventLog
-     * \n xFile
-     * \n jobAttrSetLog
-     * \n logSwitchLog
-     * \n dataLoggingLog
-     * \n jgrpNewLog
-     * \n jgrpCtrlLog
-     * \n jgrpStatusLog
-     * \n jobNewLog
-     * \n jobModLog
-     * \n jobStartLog
-     * \n jobStartAcceptLog
-     * \n jobExecuteLog
-     * \n jobStatusLog
-     * \n sbdJobStatusLog
-     * \n sbdUnreportedStatusLog
-     * \n jobSwitchLog
-     * \n jobMoveLog
-     * \n chkpntLog
-     * \n jobRequeueLog
-     * \n jobCleanLog
-     * \n jobExceptionLog
-     * \n sigactLog
-     * \n migLog
-     * \n signalLog
-     * \n queueCtrlLog
-     * \n hostCtrlLog
-     * \n hgCtrlLog
-     * \n mbdStartLog
-     * \n mbdDieLog
-     * \n unfulfillLog
-     * \n jobFinishLog
-     * \n loadIndexLog
-     * \n calendarLog
-     * \n jobForwardLog
-     * \n jobAcceptLog
-     * \n statusAckLog
-     * \n jobMsgLog
-     * \n jobMsgAckLog
-     * \n jobOccupyReqLog
-     * \n jobVacatedLog
-     * \n jobForceRequestLog
-     * \n jobChunkLog
-     * \n jobExternalMsgLog
-     * \n rsvRes
-     * \n rsvFinishLog
-     * \n cpuProfileLog
-     * \n jobRunRusageLog
-     * \n slaLog
-     * \n perfmonLogInfo
-     * \n perfmonLog
-     * \n taskFinishLog
-     * \n eventEOSLog
-     * \n jobResizeNotifyStartLog
-     * \n jobResizeNotifyAcceptLog
-     * \n jobResizeNotifyDoneLog
-     * \n jobResizeReleaseLog
-     * \n jobResizeCancelLog
-     * \n jobResizeLog
-     * \n jRusage
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * \ref event_types
-     * \n \ref defs_lsb_XF_OP
-     * \n \ref jobgroup_controltypes
-     * \n \ref signal_action
-     * #see \ref lsb_closestream
-     * #see \ref lsb_geteventrec
-     * #see \ref lsb_openstream
-     * #see \ref lsb_puteventrec
-     * #see \ref lsb_readstreamline
-     * #see \ref lsb_streamversion
-     * #see \ref lsb_readstream
-     */
-    // NOTE: Not in libbat
-    //public static native int lsb_writestream(eventRec logPtr);
-
-     /**//*
-     * \page lsb_readstream lsb_readstream
-     * \brief Reads a current version eventRec structure from the lsb_stream file.
-     * <p/>
-     * \ref lsb_readstream reads an eventrRec from the open streamFile.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * eventRec lsb_readstream(IntByReference nline)</b>
-     *
-     * @return int:-1 \n
-     *         The function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         On failure, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         $LSB_CONFDIR/cluster_name/configdir/lsb.params
-     * @param nline Line number in the stream file to be read.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * eventRec
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * none
-     * #see \ref lsb_closestream
-     * #see \ref lsb_geteventrec
-     * #see \ref lsb_openstream
-     * #see \ref lsb_puteventrec
-     * #see \ref lsb_readstreamline
-     * #see \ref lsb_streamversion
-     * #see \ref lsb_writestream
-     */
-    // NOTE: Not in libbat
-    //public static native eventRec.ByReference lsb_readstream(IntByReference nline);
-
-     /**//*
-     * \page lsb_readstreamline lsb_readstreamline
-     * \brief Reads a current version eventRec structure from the lsb_stream file.
-     * <p/>
-     * \ref lsb_readstreamline reads an eventrRec from the open streamFile
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * eventRec.ByReference lsb_readstreamline(String line)</b>
-     *
-     * @return null \n
-     *         The function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         On failure, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         $LSB_CONFDIR/cluster_name/configdir/lsb.params
-     * @param line Line number in the stream file to be read.
-     * See \ref lsb_puteventrec and \ref lsb_geteventrec for details on the eventRec structure.
-     * Additionally, there are three additional event types supported.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * eventRec
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * none
-     * #see \ref lsb_closestream
-     * #see \ref lsb_geteventrec
-     * #see \ref lsb_openstream
-     * #see \ref lsb_puteventrec
-     * #see \ref lsb_readstream
-     * #see \ref lsb_streamversion
-     * #see \ref lsb_writestream
-     */
-    // NOTE: Not in libbat
-    //public static native eventRec.ByReference lsb_readstreamline(String line);
-
-    public static final int NUM_EXITRATE_TYPES = 4;
-
-/* options for exit rate type */
-
-
-/* all exited jobs */
-    public static final int JOB_EXIT = 0x01;
-
-/* jobs failed to start due to initialization problem on execution host*/
-    public static final int JOB_INIT = 0x02;
-
-/* jobs failed to start due to HPC specific initialization problem on execution host*/
-    public static final int HPC_INIT = 0x04;
-
-/* jobs exited not related to reasons set by LSF */
-    public static final int JOB_EXIT_NONLSF = 0x08;
-
-    /**
-     * \brief  APS factor information
-     */
-    public static class apsFactorInfo extends Structure {
-        public static class ByReference extends apsFactorInfo implements Structure.ByReference {}
-        public static class ByValue extends apsFactorInfo implements Structure.ByValue {}
-        public apsFactorInfo() {}
-        public apsFactorInfo(Pointer p) { super(p); read(); }
-
-
-        /**
-         * <  Name
-         */
-        public String name;
-
-        /**
-         * <  Weight
-         */
-        public float weight;
-
-        /**
-         * <  Limit
-         */
-        public float limit;
-
-        /**
-         * <  Grace period
-         */
-        public int gracePeriod;
-    }
-
-
-
-/* options for job group delete */
-
-/* delete the specified user's all empty job groups*/
-    public static final int JGRP_DEL_USER_GROUPS = 0x01;
-
-/* delete one job group's all empty children groups including itself*/
-    public static final int JGRP_DEL_CHILD_GROUPS = 0x02;
-
-/* delete all empty job groups */
-    public static final int JGRP_DEL_ALL = 0x04;
-
-    /**
-     * ------------------------------------------------------------------------
-     * lsb_getallocFromHhostfile
-     * <p/>
-     * Read the specified hostfile and return the host list. If path is null
-     * then read the hostfile specified by LSB_DJOB_HOSTFILE. The hostfile
-     * is assumed to be in simple format of one host per line. A host
-     * can be repeated.
-     * <p/>
-     * This function will allocate the memory for hostlist.
-     * It is the responsibility of the caller to free the lists when no longer
-     * needed. On success hostlist will be a list of strings.
-     * Before freeing hostlist the individual
-     * elements must be freed.
-     * <p/>
-     * Parameters:
-     * @param hostlist  [OUT]
-     * @param path      [IN]    path to hostfile, if null check LSB_DJOB_HOSTFILE
-     * <p/>
-     * @return Value:
-     * >0    success, length of hostlist not including the null last element
-     * -1    failure, lsberrno is set
-     * -------------------------------------------------------------------------
-     */
-    public static native int lsb_getallocFromHostfile(Pointer hostlist, String path);
-
-
-    /**
-     *  \addtogroup defs_lsb_launch defs_lsb_launch
-     *  lsb_launch() Valid options are:
-     */
-
-    /**
-     * < Disable standard input and redirect input from the special  device /dev/null. This is equivalent to blaunch -n.
-     */
-    public static final int LSF_DJOB_DISABLE_STDIN = 0x01;
-
-    /**
-     * < Replace existing enviornment variable values with envp.
-     */
-    public static final int LSF_DJOB_REPLACE_ENV = 0x02;
-
-    /**
-     * < Non-blocking mode; the parallel job does not wait once all tasks start.  This forces \ref lsb_launch not to wait for its tasks to finish.
-     */
-    public static final int LSF_DJOB_NOWAIT = 0x04;
-
-    /**
-     * < Display standard error messages with a corresponding host name where the message was generated.Cannot be specified with LSF_DJOB_NOWAIT.
-     */
-    public static final int LSF_DJOB_STDERR_WITH_HOSTNAME = 0x08;
-
-    /**
-     * < Display standard output messages with a corresponding host name  where the message was generated. Cannot be specified with LSF_DJOB_NOWAIT.
-     */
-    public static final int LSF_DJOB_STDOUT_WITH_HOSTNAME = 0x10;
-
-    /**
-     * < Use user's login shell to  launch tasks
-     */
-    public static final int LSF_DJOB_USE_LOGIN_SHELL = 0x20;
-
-    /**
-     * < Use /bin/sh to launch tasks
-     */
-    public static final int LSF_DJOB_USE_BOURNE_SHELL = 0x40;
-
-    /**
-     * < Separate stderr from stdout
-     */
-    public static final int LSF_DJOB_STDERR = 0x80;
-
-/*
-* -------------------------------------------------------------------------
-*  lsb_launch (where, argv, options, envp)
-*
-*  DESCRIPTION:
-*
-*    The specified command (i.e., argv) will be launched on the remote
-*    nodes in parallel
-*
-*  ARGUMENTS:
-*    where [IN]:
-*        A null terminated list of hosts.
-*        If this parameter is null then the environment variable
-*        LSB_MCPU_HOSTS will be used.
-*        A task will be launched for each slot.
-*    options [IN]:
-*        options value obtained by ORing
-*    Envp [IN]:
-*        A Null terminated list of environment variables (in 'variable=value'
-*        format).
-*        The environment to set for each task.
-*        If this parameter is null then the same environment used to start
-*        the first task will be used.
-*        If non-null, it is appended to the environment used for the
-*        first task.
-*        If LSF_DJOB_REPLACE_ENV is specified, Envp entries will overwrite
-*        existing values except those LSF needs.
-*
-*  RETURN:
-*    < 0 on failure
-*    > 0 upon success (i.e., number of tasks issued)
-*
- */
-
-    /**
-     * \page lsb_launch lsb_launch
-     * \brief  Launch commands on remote hosts in parallel.
-     * <p/>
-     * \ref lsb_launch is a synchronous API call to allow source level integration with
-     * vendor MPI implementations. This API will launch the specified command (argv)
-     * on the remote nodes in parallel.
-     * \n LSF must be installed before integrating your MPI implementation with
-     * \ref lsb_launch. The \ref lsb_launch API requires the full set of liblsf.so,
-     * libbat.so (or liblsf.a, libbat.a).
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * int lsb_launch (String[] where, String[] argv, int userOptions, String[] envp)</b>
-     *
-     * @param userOptions [IN] Options to modify the behavior of \ref lsb_launch
-     *                    Multiple option values can be specified. For example option values can be
-     *                    separated by OR (|):
-     *                    \n \ref lsb_launch (where, argv, LSF_DJOB_REPLACE_ENV | LSF_DJOB_DISABLE_STDIN, envp);
-     * @return < 0 \n
-     *         Function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line command:</b>
-     *         \par
-     *         blaunch
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         none
-     * @param where [IN] A null-terminated list of hosts. A task will be launched
-     * for each slot.If this parameter is null then the environment variable
-     * LSB_MCPU_HOSTS will be used.
-     * @param argv [IN] The command to be executed
-     * @param envp [IN] A null-terminated list of environment variables specifying
-     * the environment to set for each task.If envp is null, \ref lsb_launch uses the
-     * same environment used to start the first task on the first execution host.
-     * If non-null, envp values are appended to the environment used for the first
-     * task.If the LSF_DJOB_REPLACE_ENV option is specified, envp entries will
-     * overwrite all existing environment values except those needed by LSF.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * none
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * \ref defs_lsb_launch
-     * see none
-     */
-    public static native int lsb_launch(Pointer where, Pointer argv, int userOptions, Pointer envp);
-
-/*
-* -------------------------------------------------------------------------
-*  lsb_getalloc
-*
-*  This function will allocate the memory for hostlist.
-*
-*  It is the responsibility of the caller to free the lists when no longer
-*  needed. On success hostlist will be a list of strings.
-*  Before freeing hostlist the individual
-*  elements must be freed.
-*
-*  Parameters:
-*     hostlist     [OUT]     null terminated list of hosts
-*
-*  Returns:
-*    >0    success, length of hostlist not including the null last element
-*    -1    failure, lsberrno is set
-*
-* -------------------------------------------------------------------------
- */
-
-    /**
-     * \page lsb_getalloc lsb_getalloc
-     * \brief Allocates memory for a host list to be used for launching parallel
-     * tasks through blaunch and the \ref lsb_launch API.
-     * <p/>
-     * It is the responsibility of the caller to free the host list when it is
-     * no longer needed.On success, the host list will be a list of strings.
-     * Before freeing host list, the individual elements must be freed.
-     * <p/>
-     * An application using the \ref lsb_getalloc API is assumed to be part of an
-     * LSF job, and that LSB_MCPU_HOSTS is set in the environment.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * int lsb_getalloc(String[][] hostlist)</b>
-     *
-     * @return < 0 \n
-     *         Function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         If the function fails, lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         none
-     * @param hostlist [OUT] A null-terminated list of host names
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * none
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * none
-     * see none
-     */
-    public static native int lsb_getalloc(Pointer hostlist);
-
-    /**
-     * \page lsb_resize_cancel lsb_resize_cancel
-     * \brief Cancels a pending job resize allocation request.
-     * <p/>
-     * Use \ref lsb_resize_cancel to cancel a pending allocation request for a
-     * resizable job. A running job can only have one pending request at any
-     * particular time. If one request is still pending, additional requests
-     * are rejected with a proper error code.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * int lsb_resize_cancel(long jobId);</b>
-     *
-     * @param jobId LSF job ID
-     *              <p/>
-     *              <b>Data Structures:</b>
-     *              \par
-     *              none
-     *              <p/>
-     *              <b>Define Statements:</b>
-     *              \par
-     *              none
-     * @return int:-1 \n
-     *         On failure, returns -1.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         bresize cancel job_ID
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         none
-     * #see \ref lsb_resize_release
-     */
-
-    public static native int lsb_resize_cancel(long jobId);
-
-    /**
-     * \page lsb_resize_release lsb_resize_release
-     * \brief Releases part of the allocation of a running resizable job.
-     * <p/>
-     * Use \ref lsb_resize_release to release part of a running job allocation.
-     * A running job can only have one pending request at any particular time.
-     * If one request is still pending, additional requests are rejected with
-     * a proper error code.
-     * <p/>
-     * If a notification command is defined through job submission, application
-     * profile,or the \ref lsb_resize_release API, the notification command is invoked
-     * on the first execution host of the job allocation once allocation resize
-     * requests have been satisfied.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * int lsb_resize_release(job_resize_release.ByReference req);</b>
-     *
-     * @return int:-1 \n
-     *         On failure, returns -1.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         lsberrno is set to indicate the error.
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         release [-c] [-rnc resize_notification_cmd | -rncn] released_host_specification job_ID
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         none
-     * @param req job resize release request.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * job_resize_release
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * \ref resizablejob_related
-     * #see \ref lsb_resize_cancel
-     */
-    public static native int lsb_resize_release(job_resize_release req);
-
-    public static native int lsb_resize_request(job_resize_request job_resize_request1);
-
-    /**
-     * \page  lsb_getjobdepinfo lsb_getjobdepinfo
-     * Returns the job dependency information.
-     * <p/>
-     * \ref lsb_getjobdepinfo returns information about jobs (including job arrays) when
-     * a job has one or more dependencies on it.
-     * <p/>
-     * <b>\#include <lsf/lsbatch.h>
-     * <p/>
-     * jobDependInfo.ByReference
-     * lsb_getjobdepinfo(jobDepRequest.ByReference jobdepReq)</b>
-     *
-     * @return null
-     *         \n Function failed.
-     *         <p/>
-     *         \b Errors:
-     *         \par
-     *         none
-     *         <p/>
-     *         <b>Equivalent line commands:</b>
-     *         \par
-     *         none
-     *         <p/>
-     *         <b>Files:</b>
-     *         \par
-     *         none
-     * @param jobdepReq Job dependent Request.
-     * <p/>
-     * <b>Data Structures:</b>
-     * \par
-     * dependJobs
-     * \n queriedJobs
-     * \n jobDependInfo
-     * \n jobDepRequest
-     * <p/>
-     * <b>Define Statements:</b>
-     * \par
-     * \ref job_has_depend
-     * \n \ref query_depend
-     */
-    public static native jobDependInfo.ByReference lsb_getjobdepinfo(jobDepRequest jobdepReq);
-
-
-    /**
-     *  \page lsb_jsdl2submit lsb_jsdl2submit
-     *  \brief  Accepts a JSDL job submission file as input and converts the file
-     *   for use with LSF.
-     *
-     *  \ref lsb_jsdl2submit converts parameters specified in the JSDL file and merges
-     *  them with the other command line and job script options. The merged submit
-     *  request is then sent to mbatchd for processing.
-     *
-     *  Code must link to LSF_LIBDIR/libbat.jsdl.lib
-     *
-     *  <b>\#include <lsf/lsbatch.h>
-     *
-     *  int lsb_jsdl2submit(submit.ByReference req, String template);</b>
-     *
-     *  @param req Reads the specified JSDL options and maps them to the
-     *  submitReq structure. Code must specify either jsdl or jsdl_strict.
-     *  @param template The default template, which contains all of the bsub
-     *  submission options.
-     *
-     *  <b>Data Structures:</b>
-     *  \par
-     *  submit
-     *
-     *  <b>Define Statements:</b>
-     *  \par
-     *  none
-     *
-     *  @return int:0 \n
-     *  Function completed successfully.
-     *  @return int:-1 \n
-     *  Function failed.
-     *
-     *  <b>Errors:</b>
-     *  \par
-     *  On failure, sets lsberrno to indicate the error.
-     *
-     *  <b>Equivalent line command:</b>
-     *  \par
-     *   bsub with options
-     *
-     *  <b>Files:</b>
-     *  \par
-     *  $LSF_LIBDIR/jsdl.xsd
-     *  \n $LSF_LIBDIR/jsdl-posix.xsd
-     *  \n $LSF_LIBDIR/jsdl-lsf.xsd
-     *
-     *  @see \ref lsb_submit
-     *  @see \ref lsb_modify
-     */
-
-    /**
-     *  \page lsblib lsblib
-     *  \brief Application Programming Interface (API) library functions for batch jobs
-     *
-     *  LSBLIB functions allow application programs to get information about the hosts,
-     *  queues, users, jobs and configuration of the batch system. Application programs
-     *  can also submit jobs and control hosts, queues and jobs. Finally, application
-     *  programs can read batch log files and write batch error messages.
-     *
-     *  \note
-     *  \par
-     *  All LSBLIB APIs require that the batch header file <lsf/lsbatch.h> be included.
-     *  \par
-     *  Many LSBLIB APIs return a pointer to an array or structure. These data structures
-     *  are in static storage or on the heap. The next time the API is called, the storage
-     *  is overwritten or freed.
-     *  \par
-     *  Any program using LSBLIB APIs that change the state of the batch system (that
-     *  is, except for APIs that just get information about the system) must be setuid
-     *  to root if LSF_AUTH is not defined in the lsf.conf file.
-     *  \par
-     *  On systems which have both System V and BSD programming interfaces, LSBLIB
-     *  typically requires the BSD programming interface. On System V-based versions of
-     *  UNIX, for example SGI IRIX, it is normally necessary to link applications using
-     *  LSBLIB with the BSD compatibility library.
-     *  \par
-     *  On AFS systems, the following needs to be added to the end of your linkage
-     *  specifications when linking with LSBLIB (assuming your AFS library path is
-     *  /usr/afsws):
-     *  \par
-     *  For HP-UX and Solaris,
-     *  \par
-     *  -lc -L/usr/afsws/lib -L/usr/afsws/lib/afs -lsys -lrx -llwp /usr/afsws/lib/afs/util.a
-     *  \par
-     *  For other platforms,
-     *  \par
-     *  -lc -L/usr/afsws/lib -L/usr/afsws/lib/afs -lsys -lrx -llwp
-     *
-     *  \b Files:
-     *  \par
-     *  ${LSF_ENVDIR:-/etc}/lsf.conf
-     *  \n $LSF_CONFDIR/lsf.shared
-     *  \n $LSF_CONFDIR/lsf.cluster.cluster_name
-     *  \n $LSF_CONFDIR/lsf.task
-     *  \n $LSF_CONFDIR/lsf.task.cluster_name
-     *  \n $LSB_CONFDIR/cluster_name/configdir/lsb.hosts
-     *  \n $$LSB_CONFDIR/cluster_name/configdir/lsb.params
-     *  \n $LSB_CONFDIR/cluster_name/configdir/lsb.queues
-     *  \n $LSB_CONFDIR/cluster_name/configdir/lsb.users
-     *
-     *  @see lsblibapis
-     */
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/jna/lsf/v7_0_6/LibLsf.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/jna/lsf/v7_0_6/LibLsf.java
deleted file mode 100644
index cc4721d..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/jna/lsf/v7_0_6/LibLsf.java
+++ /dev/null
@@ -1,1780 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.jna.lsf.v7_0_6;
-
-import com.sun.jna.*;
-import com.sun.jna.ptr.FloatByReference;
-import com.sun.jna.ptr.IntByReference;
-import com.sun.jna.ptr.PointerByReference;
-import org.broadinstitute.gatk.utils.jna.clibrary.JNAUtils;
-import org.broadinstitute.gatk.utils.jna.clibrary.LibC.timeval;
-
-/*
-  NOTE: This library uses Pointer for some Struct.ByReference members going
-  against the JNA recommendations at http://jna.java.net/#structure_use
-  Instead stuct arrays are Pointers and each structure contains a
-  constructor that can accept the Pointer iff the size of the array is
-  known to be greater than zero.
-
-  This was especially problematic in jobInfoEnt->items->resName. When
-  jobInfo->reserveCnt was zero jobInfoItems->items was not necessarily null.
-
-  LSF will often reuse memory for structure arrays but will set the
-  array size / count (reserveCnt above) to zero when the array should
-  not be accessed. When LSF has reused memory and points to a non-null
-  structure pointer (items) the inner structure may contain further
-  garbage pointers (especially items->resName).
-
-  When JNA sees a non-null Structure.ByReference it will autoRead() the
-  member. When autoRead() eventually gets to the items->resName trying
-  to run strlen on the bad memory address causes a SIGSEGV.
-
-  By using a Pointer instead of the Structure.ByReference JNA will not
-  automatically autoRead(), and the API user will have to pass the
-  pointer to the Structure on their own.
-*/
-
-/**
- * JNA wrappers for LSF's lsf.h and -llsf
- *
- * $Id: base.h,v 1.25.6.12.2.5.2.11.2.15 2009/08/17 07:25:05 qlnie Exp $
- ****************************************************************************
- *
- * Load Sharing Facility
- *
- * Header file for all components of load sharing facility.
- *
- ****************************************************************************/
- at SuppressWarnings("unused")
-public class LibLsf {
-
-    static {
-        /*
-        LSF 7.0.6 on the mac is missing the unsatisfied exported symbol for environ which was removed on MacOS X 10.5+.
-        nm $LSF_LIBDIR/liblsf.dylib | grep environ
-        See "man environ" for more info, along with http://lists.apple.com/archives/java-dev/2007/Dec/msg00096.html
-        For now, we export environ ourselves using libenvironhack.dylib available in c/libenvironhack.
-        */
-        if (Platform.isMac())
-            NativeLibrary.getInstance("environhack");
-        String lsfLibDir = System.getenv("LSF_LIBDIR");
-        if (lsfLibDir != null) {
-            NativeLibrary.addSearchPath("lsf", lsfLibDir);
-        }
-        Native.register("lsf");
-    }
-
-    public static final String PASSWD_FILE_LS = "passwd.lsfuser";
-    public static final int PASSWORD_LEN = 64;
-    public static final int MAXHOSTNAMELEN = JNAUtils.MAXHOSTNAMELEN;
-    public static final int MAXPATHLEN = JNAUtils.MAXPATHLEN;
-
-
-    public static final int LOG_EMERG = 0;
-    public static final int LOG_ALERT = 1;
-    public static final int LOG_CRIT = 2;
-    public static final int LOG_ERR = 3;
-    public static final int LOG_WARNING = 4;
-    public static final int LOG_NOTICE = 5;
-    public static final int LOG_INFO = 6;
-    public static final int LOG_DEBUG = 7;
-
-    public static final int INVALID_SOCKET = -1;
-
-    public static boolean SOCK_INVALID(int c) {
-        return ((c) == INVALID_SOCKET);
-    }
-
-    public static class rlimit extends Structure {
-        public static class ByReference extends rlimit implements Structure.ByReference {}
-        public static class ByValue extends rlimit implements Structure.ByValue {}
-        public rlimit() {}
-        public rlimit(Pointer p) { super(p); read(); }
-
-        public NativeLong rlim_cur;
-        public NativeLong rlim_max;
-    }
-
-
-
-    public static class rusage extends Structure {
-        public static class ByReference extends rusage implements Structure.ByReference {}
-        public static class ByValue extends rusage implements Structure.ByValue {}
-        public rusage() {}
-        public rusage(Pointer p) { super(p); read(); }
-
-        public timeval ru_utime;
-        public timeval ru_stime;
-
-
-        public NativeLong ru_maxrss;
-        //public static final int ru_first = ru_ixrss;
-        public NativeLong ru_ixrss;
-        public NativeLong ru_idrss;
-        public NativeLong ru_isrss;
-        public NativeLong ru_minflt;
-        public NativeLong ru_majflt;
-        public NativeLong ru_nswap;
-        public NativeLong ru_inblock;
-        public NativeLong ru_oublock;
-        public NativeLong ru_msgsnd;
-        public NativeLong ru_msgrcv;
-        public NativeLong ru_nsignals;
-        public NativeLong ru_nvcsw;
-        public NativeLong ru_nivcsw;
-        //public static final int ru_last = ru_nivcsw;
-        // Listed in lsf.h but not present in structure.
-        //public NativeLong ru_ioch;
-    }
-
-
-
-
-    public static final String _VERSION_STR_ = "Platform LSF 7.0";
-    public static final String _WORKGROUP_STR_ = "";
-    public static final String _MINOR_STR_ = "";
-    public static final String _BUILD_STR_ = "";
-    public static final String _NOTE_STR_ = "";
-    public static final String _HOTFIX_STR_ = "";
-    public static final String _OS_STR_ = "";
-
-    public static final String _DATE_STR_ = "";
-    public static final String _BUILD_INFO_ = _MINOR_STR_ + "" + _BUILD_STR_ + "" + _WORKGROUP_STR_ + ", " + _DATE_STR_ + "\nCopyright 1992-2009 Platform Computing Corporation\n\n" + _OS_STR_ + _NOTE_STR_ + _HOTFIX_STR_;
-    public static final String _LS_VERSION_ = (_VERSION_STR_ + "" + _BUILD_INFO_);
-
-    //public static int XDR_SETPOS (int xdrs, int pos)  { (*(xdrs)->x_ops->x_setpostn)(xdrs, 0); return (*(xdrs)->x_ops->x_setpostn)(xdrs, pos); }
-    //public static int xdr_setpos (int xdrs, int pos)  { (*(xdrs)->x_ops->x_setpostn)(xdrs, 0); return (*(xdrs)->x_ops->x_setpostn)(xdrs, pos); }
-
-
-    public static final int LSF_XDR_VERSION2_0 = 1;
-    public static final int LSF_XDR_VERSION2_1 = 2;
-    public static final int LSF_XDR_VERSION2_2 = 3;
-    public static final int LSF_XDR_VERSION3_0 = 4;
-    public static final int LSF_XDR_VERSION3_1 = 5;
-    public static final int LSF_XDR_VERSION3_2 = 6;
-    public static final int LSF_XDR_VERSION3_2_2 = 7;
-    public static final int LSF_XDR_VERSION4_0 = 8;
-    public static final int LSF_XDR_VERSION4_1 = 9;
-    public static final int LSF_XDR_VERSION4_2 = 10;
-    public static final int LSF_XDR_VERSION5_0 = 11;
-    public static final int LSF_XDR_VERSION5_1 = 12;
-    public static final int LSF_XDR_VERSION6_0 = 13;
-    public static final int LSF_XDR_VERSION6_1 = 14;
-    public static final int LSF_XDR_VERSION6_2 = 15;
-    public static final int EGO_XDR_VERSION_1_1 = 16;
-    public static final int LSF_XDR_VERSION7_0 = 17;
-    public static final int EGO_XDR_VERSION_1_2 = LSF_XDR_VERSION7_0;
-    public static final int LSF_XDR_VERSION7_0_EP1 = 18;
-    public static final int LSF_XDR_VERSION7_0_EP2 = 19;
-    public static final int LSF_XDR_VERSION7_0_EP3 = 20;
-    public static final int LSF_XDR_VERSION7_0_EP4 = 21;
-    public static final int LSF_XDR_VERSION7_0_EP5 = 22;
-    public static final int LSF_XDR_VERSION7_0_EP6 = 23;
-    public static final int EGO_XDR_VERSION_1_2_2 = LSF_XDR_VERSION7_0_EP1;
-    public static final int EGO_XDR_VERSION_1_2_3 = LSF_XDR_VERSION7_0_EP2;
-
-    public static final int EGO_XDR_VERSION = LSF_XDR_VERSION7_0_EP2;
-
-    //public String LOG_VERSION;
-
-    public static final int LSF_DEFAULT_SOCKS = 15;
-    public static final int MAXLINELEN = 512;
-    public static final int MAXLSFNAMELEN = 40;
-    public static final int MAXLSFNAMELEN_70_EP1 = 128;
-
-    public static final int MAXSRES = 32;
-    public static final int MAXRESDESLEN = 256;
-    public static final int NBUILTINDEX = 11;
-    public static final int MAXTYPES = 128;
-    public static final int MAXMODELS = 1024 + 2;
-    public static final int MAXMODELS_70 = 128;
-    public static final int MAXTYPES_31 = 25;
-    public static final int MAXMODELS_31 = 30;
-    public static final int MAXFILENAMELEN = 256;
-    public static final int MAXEVARS = 30;
-
-    public static final int GENMALLOCPACE = 1024;
-
-
-    public static final int FIRST_RES_SOCK = 20;
-
-
-    public static final int R15S = 0;
-    public static final int R1M = 1;
-    public static final int R15M = 2;
-    public static final int UT = 3;
-    public static final int PG = 4;
-    public static final int IO = 5;
-    public static final int LS = 6;
-    public static final int IT = 7;
-    public static final int TMP = 8;
-    public static final int SWP = 9;
-    public static final int MEM = 10;
-    public static final int USR1 = 11;
-    public static final int USR2 = 12;
-
-
-    public static final float INFINIT_LOAD = (float) (0x7fffffff);
-    public static final float INFINIT_FLOAT = (float) (0x7fffffff);
-
-    public static final int INFINIT_INT = 0x7fffffff;
-    public static final long INFINIT_LONG_INT = 0x7fffffffffffffffL;
-    public static final short INFINIT_SHORT = 0x7fff;
-
-    public static final int DEFAULT_RLIMIT = -1;
-
-    public static final int LSF_RLIMIT_CPU = 0;
-    public static final int LSF_RLIMIT_FSIZE = 1;
-    public static final int LSF_RLIMIT_DATA = 2;
-    public static final int LSF_RLIMIT_STACK = 3;
-    public static final int LSF_RLIMIT_CORE = 4;
-    public static final int LSF_RLIMIT_RSS = 5;
-    public static final int LSF_RLIMIT_NOFILE = 6;
-    public static final int LSF_RLIMIT_OPEN_MAX = 7;
-    public static final int LSF_RLIMIT_VMEM = 8;
-    public static final int LSF_RLIMIT_SWAP = LSF_RLIMIT_VMEM;
-    public static final int LSF_RLIMIT_RUN = 9;
-    public static final int LSF_RLIMIT_PROCESS = 10;
-    public static final int LSF_RLIMIT_THREAD = 11;
-    public static final int LSF_RLIM_NLIMITS = 12;
-
-    public static final int LSF_RLIM_NLIMITS5_1 = 11;
-
-    //public static int seteuid (int x) { return setresuid(-1,x,-1); }
-    //public static int setegid (int x) { return setresgid(-1,x,-1); }
-
-    public static final int LSF_NULL_MODE = 0;
-    public static final int LSF_LOCAL_MODE = 1;
-    public static final int LSF_REMOTE_MODE = 2;
-
-
-    public static final int RF_MAXHOSTS = 5;
-
-
-    public static final int RF_CMD_MAXHOSTS = 0;
-
-
-    public static final int RF_CMD_RXFLAGS = 2;
-
-
-    public static final int STATUS_TIMEOUT = 125;
-    public static final int STATUS_IOERR = 124;
-    public static final int STATUS_EXCESS = 123;
-    public static final int STATUS_REX_NOMEM = 122;
-    public static final int STATUS_REX_FATAL = 121;
-    public static final int STATUS_REX_CWD = 120;
-    public static final int STATUS_REX_PTY = 119;
-    public static final int STATUS_REX_SP = 118;
-    public static final int STATUS_REX_FORK = 117;
-    public static final int STATUS_REX_AFS = 116;
-    public static final int STATUS_REX_UNKNOWN = 115;
-    public static final int STATUS_REX_NOVCL = 114;
-    public static final int STATUS_REX_NOSYM = 113;
-    public static final int STATUS_REX_VCL_INIT = 112;
-    public static final int STATUS_REX_VCL_SPAWN = 111;
-    public static final int STATUS_REX_EXEC = 110;
-    public static final int STATUS_REX_MLS_INVAL = 109;
-    public static final int STATUS_REX_MLS_CLEAR = 108;
-    public static final int STATUS_REX_MLS_RHOST = 107;
-    public static final int STATUS_REX_MLS_DOMIN = 106;
-    public static final int STATUS_DENIED = 105;
-
-
-    public static boolean REX_FATAL_ERROR(int s) {
-        return (((s) == STATUS_REX_NOVCL) || ((s) == STATUS_REX_NOSYM) || ((s) == STATUS_REX_NOMEM) || ((s) == STATUS_REX_FATAL) || ((s) == STATUS_REX_CWD) || ((s) == STATUS_REX_PTY) || ((s) == STATUS_REX_VCL_INIT) || ((s) == STATUS_REX_VCL_SPAWN) || ((s) == STATUS_REX_MLS_INVAL) || ((s) == STATUS_REX_MLS_CLEAR) || ((s) == STATUS_REX_MLS_RHOST) || ((s) == STATUS_REX_MLS_DOMIN));
-    }
-
-
-    public static final int REXF_USEPTY = 0x00000001;
-    public static final int REXF_CLNTDIR = 0x00000002;
-    public static final int REXF_TASKPORT = 0x00000004;
-    public static final int REXF_SHMODE = 0x00000008;
-    public static final int REXF_TASKINFO = 0x00000010;
-    public static final int REXF_REQVCL = 0x00000020;
-    public static final int REXF_SYNCNIOS = 0x00000040;
-    public static final int REXF_TTYASYNC = 0x00000080;
-    public static final int REXF_STDERR = 0x00000100;
-
-
-    public static final int EXACT = 0x01;
-    public static final int OK_ONLY = 0x02;
-    public static final int NORMALIZE = 0x04;
-    public static final int LOCALITY = 0x08;
-    public static final int IGNORE_RES = 0x10;
-    public static final int LOCAL_ONLY = 0x20;
-    public static final int DFT_FROMTYPE = 0x40;
-    public static final int ALL_CLUSTERS = 0x80;
-    public static final int EFFECTIVE = 0x100;
-
-
-    public static final int RECV_FROM_CLUSTERS = 0x200;
-    public static final int NEED_MY_CLUSTER_NAME = 0x400;
-
-
-    public static final int SEND_TO_CLUSTERS = 0x400;
-
-
-    public static final int NO_SORT = 0x800;
-
-
-    public static final int EXCLUSIVE_RESOURCE = 0x1000;
-
-    public static final int DT_CLUSTER_LOAD = 0x2000;
-
-
-    public static final int FROM_MASTER = 0x01;
-
-
-    public static final int KEEPUID = 0x01;
-
-
-    public static final int RES_CMD_REBOOT = 1;
-
-    public static final int RES_CMD_SHUTDOWN = 2;
-
-    public static final int RES_CMD_LOGON = 3;
-
-    public static final int RES_CMD_LOGOFF = 4;
-
-
-    public static final int LIM_CMD_REBOOT = 1;
-    public static final int LIM_CMD_SHUTDOWN = 2;
-    public static final int LIM_CMD_REMOVEHOST = 3;
-    public static final int LIM_CMD_ACTIVATE = 4;
-    public static final int LIM_CMD_DEACTIVATE = 5;
-    public static final int LIM_CMD_ELIM_ENV = 6;
-
-
-    public static class connectEnt extends Structure {
-        public static class ByReference extends connectEnt implements Structure.ByReference {}
-        public static class ByValue extends connectEnt implements Structure.ByValue {}
-        public connectEnt() {}
-        public connectEnt(Pointer p) { super(p); read(); }
-
-        public String hostname;
-        public int[] csock = new int[2];
-    }
-
-
-
-    public static final int INTEGER_BITS = 32;
-
-    public static int GET_INTNUM(int i) {
-        return ((i) / INTEGER_BITS + 1);
-    }
-
-
-    public static final int LIM_UNAVAIL = 0x00010000;
-    public static final int LIM_LOCKEDU = 0x00020000;
-    public static final int LIM_LOCKEDW = 0x00040000;
-    public static final int LIM_BUSY = 0x00080000;
-    public static final int LIM_RESDOWN = 0x00100000;
-    public static final int LIM_UNLICENSED = 0x00200000;
-    public static final int LIM_SBDDOWN = 0x00400000;
-    public static final int LIM_LOCKEDM = 0x00800000;
-
-    public static final int LIM_OK_MASK = 0x00bf0000;
-    public static final int LIM_PEMDOWN = 0x01000000;
-    public static final int LIM_LOCKEDU_RMS = 0x80000000;
-
-
-    public static boolean LS_ISUNAVAIL(int[] status) {
-        return (((status) != null) && (((status[0]) & LIM_UNAVAIL) != 0));
-    }
-
-
-    public static boolean LS_ISBUSYON(int[] status, int index) {
-        return (((status) != null) && (((status[1 + (index) / INTEGER_BITS]) & (1 << (index) % INTEGER_BITS)) != 0));
-    }
-
-    public static boolean LS_ISBUSY(int[] status) {
-        return (((status) != null) && (((status[0]) & LIM_BUSY) != 0));
-    }
-
-
-    public static boolean LS_ISRMSLOCK(int[] status) {
-        return (((status) != null) && (((status[0]) & LIM_LOCKEDU_RMS) != 0));
-    }
-
-
-    public static boolean LS_ISLOCKEDU(int[] status) {
-        return (((status) != null) && (((status[0]) & LIM_LOCKEDU) != 0));
-    }
-
-
-    public static boolean LS_ISLOCKEDW(int[] status) {
-        return (((status) != null) && (((status[0]) & LIM_LOCKEDW) != 0));
-    }
-
-
-    public static boolean LS_ISLOCKEDM(int[] status) {
-        return (((status) != null) && (((status[0]) & LIM_LOCKEDM) != 0));
-    }
-
-
-    public static boolean LS_ISLOCKED(int[] status) {
-        return (((status) != null) && (((status[0]) & (LIM_LOCKEDU | LIM_LOCKEDW | LIM_LOCKEDM)) != 0));
-    }
-
-
-    public static boolean LS_ISRESDOWN(int[] status) {
-        return (((status) != null) && (((status[0]) & LIM_RESDOWN) != 0));
-    }
-
-
-    public static boolean LS_ISSBDDOWN(int[] status) {
-        return (((status) != null) && (((status[0]) & LIM_SBDDOWN) != 0));
-    }
-
-    public static boolean LS_ISPEMDOWN(int[] status) {
-        return (((status[0]) & LIM_PEMDOWN) != 0);
-    }
-
-
-    public static boolean LS_ISUNLICENSED(int[] status) {
-        return (((status) != null) && (((status[0]) & LIM_UNLICENSED) != 0));
-    }
-
-
-    public static boolean LS_ISOK(int[] status) {
-        return (((status) != null) && ((status[0] & LIM_OK_MASK) == 0));
-    }
-
-
-    public static boolean LS_ISOKNRES(int[] status) {
-        return (((status) != null) && (((status[0] & ~(LIM_LOCKEDU_RMS)) & ~(LIM_RESDOWN | LIM_SBDDOWN | LIM_PEMDOWN)) == 0));
-    }
-
-
-    public static class placeInfo extends Structure {
-        public static class ByReference extends placeInfo implements Structure.ByReference {}
-        public static class ByValue extends placeInfo implements Structure.ByValue {}
-        public placeInfo() {}
-        public placeInfo(Pointer p) { super(p); read(); }
-
-        public byte[] hostName = new byte[MAXHOSTNAMELEN];
-        public int numtask;
-    }
-
-
-
-
-    public static class hostLoad extends Structure {
-        public static class ByReference extends hostLoad implements Structure.ByReference {}
-        public static class ByValue extends hostLoad implements Structure.ByValue {}
-        public hostLoad() {}
-        public hostLoad(Pointer p) { super(p); read(); }
-
-        public byte[] hostName = new byte[MAXHOSTNAMELEN];
-        public IntByReference status;
-        public FloatByReference li;
-    }
-
-
-
-
-    public static interface valueType {
-          public static final int LS_BOOLEAN = 0;
-          public static final int LS_NUMERIC = 1;
-          public static final int LS_STRING = 2;
-          public static final int LS_EXTERNAL = 3;
-    }
-
-
-
-    public static interface orderType {
-          public static final int INCR = 0;
-          public static final int DECR = 1;
-          public static final int NA = 2;
-    }
-
-
-
-
-    public static final int RESF_BUILTIN = 0x01;
-    public static final int RESF_DYNAMIC = 0x02;
-    public static final int RESF_GLOBAL = 0x04;
-    public static final int RESF_SHARED = 0x08;
-    public static final int RESF_LIC = 0x10;
-    public static final int RESF_EXTERNAL = 0x20;
-    public static final int RESF_RELEASE = 0x40;
-    public static final int RESF_DEFINED_IN_RESOURCEMAP = 0x80;
-
-    public static final int RESF_NON_CONSUMABLE = 0x100;
-    public static final int RESF_REDEFINABLE = 0x200;
-    public static final int RESF_ESRES = 0x400;
-
-
-    public static class resItem extends Structure {
-        public static class ByReference extends resItem implements Structure.ByReference {}
-        public static class ByValue extends resItem implements Structure.ByValue {}
-        public resItem() {}
-        public resItem(Pointer p) { super(p); read(); }
-
-        public byte[] name = new byte[MAXLSFNAMELEN];
-        public byte[] des = new byte[MAXRESDESLEN];
-        public /*valueType*/ int valueType;
-        public /*orderType*/ int orderType;
-        public int flags;
-        public int interval;
-    }
-
-
-
-
-    public static class lsInfo extends Structure {
-        public static class ByReference extends lsInfo implements Structure.ByReference {}
-        public static class ByValue extends lsInfo implements Structure.ByValue {}
-        public lsInfo() {}
-        public lsInfo(Pointer p) { super(p); read(); }
-
-        // The current version of JNA's Structure.getNativeAlignment passes a "null" to
-        // Native.getNativeSize() when accessing the contents of a 2D array.
-        // Although the method is marked as protected, there are also multiple "TO DO"
-        // comments so when we upgrade don't want to have specialized code floating around.
-
-        public int nRes;
-        public Pointer /* resItem.ByReference */ resTable;
-        public int nTypes;
-        public byte[] hostTypes = new byte[MAXTYPES * MAXLSFNAMELEN];
-        public int nModels;
-        public byte[] hostModels = new byte[MAXMODELS * MAXLSFNAMELEN];
-        public byte[] hostArchs = new byte[MAXMODELS * MAXLSFNAMELEN_70_EP1];
-        public int[] modelRefs = new int[MAXMODELS];
-        public float[] cpuFactor = new float[MAXMODELS];
-        public int numIndx;
-        public int numUsrIndx;
-    }
-
-
-
-
-    public static final int CLUST_STAT_OK = 0x01;
-    public static final int CLUST_STAT_UNAVAIL = 0x02;
-    public static final int CLUST_STAT_RECV_FROM = 0x04;
-    public static final int CLUST_STAT_SEND_TO = 0x08;
-
-
-    public static boolean IS_DEFAULT_AUTH(byte[] auth) {
-        return (auth == null || auth[0] == '\0');
-    }
-
-
-    public static class clusterInfo extends Structure {
-        public static class ByReference extends clusterInfo implements Structure.ByReference {}
-        public static class ByValue extends clusterInfo implements Structure.ByValue {}
-        public clusterInfo() {}
-        public clusterInfo(Pointer p) { super(p); read(); }
-
-        public byte[] clusterName = new byte[MAXLSFNAMELEN];
-        public int status;
-        public byte[] masterName = new byte[MAXHOSTNAMELEN];
-        public byte[] managerName = new byte[MAXLSFNAMELEN];
-        public int managerId;
-        public int numServers;
-        public int numClients;
-        public int nRes;
-        public Pointer resources;
-        public int nTypes;
-        public Pointer hostTypes;
-        public int nModels;
-        public Pointer hostModels;
-        public int nAdmins;
-        public IntByReference adminIds;
-        public Pointer admins;
-        public int analyzerLicFlag;
-        public int jsLicFlag;
-        public byte[] afterHoursWindow = new byte[MAXLINELEN];
-        public byte[] preferAuthName = new byte[MAXLSFNAMELEN];
-        public byte[] inUseAuthName = new byte[MAXLSFNAMELEN];
-    }
-
-
-    public static class hostInfo extends Structure {
-        public static class ByReference extends hostInfo implements Structure.ByReference {}
-        public static class ByValue extends hostInfo implements Structure.ByValue {}
-        public hostInfo() {}
-        public hostInfo(Pointer p) { super(p); read(); }
-
-        public byte[] hostName = new byte[MAXHOSTNAMELEN];
-        public String hostType;
-        public String hostModel;
-        public float cpuFactor;
-        public int maxCpus;
-        public int maxMem;
-        public int maxSwap;
-        public int maxTmp;
-        public int nDisks;
-        public int nRes;
-        public Pointer resources;
-        public int nDRes;
-        public Pointer DResources;
-        public String windows;
-        public int numIndx;
-        public FloatByReference busyThreshold;
-        public byte isServer;
-        public byte licensed;
-        public int rexPriority;
-        public int licFeaturesNeeded;
-
-
-        public static final int LSF_BASE_LIC = 0;
-        public static final int LSF_BATCH_LIC_OBSOLETE = 1;
-        public static final int LSF_JS_SCHEDULER_LIC = 2;
-        public static final int LSF_JS_LIC = 3;
-        public static final int LSF_CLIENT_LIC = 4;
-        public static final int LSF_MC_LIC = 5;
-        public static final int LSF_ANALYZER_SERVER_LIC = 6;
-        public static final int LSF_MAKE_LIC = 7;
-
-        public static final int LSF_PARALLEL_LIC = 8;
-        public static final int LSF_FLOAT_CLIENT_LIC = 9;
-        public static final int LSF_FTA_LIC = 10;
-        public static final int LSF_AFTER_HOURS_LIC = 11;
-        public static final int LSF_RESOURCE_PREEMPT_LIC = 12;
-        public static final int LSF_BACCT_LIC = 13;
-        public static final int LSF_SCHED_FAIRSHARE_LIC = 14;
-        public static final int LSF_SCHED_RESERVE_LIC = 15;
-        public static final int LSF_SCHED_PREEMPTION_LIC = 16;
-        public static final int LSF_SCHED_PARALLEL_LIC = 17;
-        public static final int LSF_SCHED_ADVRSV_LIC = 18;
-        public static final int LSF_API_CLIENT_LIC = 19;
-
-        public static final int CLUSTERWARE_MANAGER_LIC = 20;
-        public static final int LSF_MANAGER_LIC = 21;
-        public static final int LSF_PCC_HPC_LIC = 22;
-        public static final int sCLUSTERWARE_LIC = 23;
-        public static final int OTTAWA_MANAGER_LIC = 24;
-
-        public static final int SYMPHONY_MANAGER_ONLINE_LIC = 25;
-        public static final int SYMPHONY_MANAGER_BATCH_LIC = 26;
-        public static final int SYMPHONY_SCHED_JOB_PRIORITY_LIC = 27;
-        public static final int LSF_DUALCORE_X86_LIC = 28;
-        public static final int LSF_TSCHED_LIC = 29;
-        public static final int LSF_WORKGROUP_LIC = 30;
-        public static final int LSF_NUM_LIC_TYPE = 31;
-        public static final int LSF_WG_NUM_LIC_TYPE = 2;
-        public static final int LSF_NO_NEED_LIC = 32;
-
-        public int licClass;
-        public int cores;
-        public static final int INET6_ADDRSTRLEN = 46;
-        public byte[] hostAddr = new byte[INET6_ADDRSTRLEN];
-        public int pprocs;
-
-        public int cores_per_proc;
-        public int threads_per_core;
-    }
-
-    public static boolean HAS_BATCH_LICENSES(int featureEnabled) {
-        return (JNAUtils.toBoolean(featureEnabled & (1 << hostInfo.CLUSTERWARE_MANAGER_LIC)) || JNAUtils.toBoolean(featureEnabled & (1 << hostInfo.LSF_MANAGER_LIC)) || JNAUtils.toBoolean(featureEnabled & (1 << hostInfo.LSF_WORKGROUP_LIC)) || JNAUtils.toBoolean(featureEnabled & (1 << hostInfo.SYMPHONY_MANAGER_ONLINE_LIC)) || JNAUtils.toBoolean(featureEnabled & (1 << hostInfo.SYMPHONY_MANAGER_BATCH_LIC)));
-    }
-
-    public static boolean HAS_SYMPHONY_LICENSES(int featureEnabled) {
-        return (JNAUtils.toBoolean(featureEnabled & (1 << hostInfo.SYMPHONY_MANAGER_ONLINE_LIC)) || JNAUtils.toBoolean(featureEnabled & (1 << hostInfo.SYMPHONY_MANAGER_BATCH_LIC)));
-    }
-
-
-    public static class config_param extends Structure {
-        public static class ByReference extends config_param implements Structure.ByReference {}
-        public static class ByValue extends config_param implements Structure.ByValue {}
-        public config_param() {}
-        public config_param(Pointer p) { super(p); read(); }
-
-        public String paramName;
-        public String paramValue;
-    }
-
-
-
-    public static class lsfRusage extends Structure {
-        public static class ByReference extends lsfRusage implements Structure.ByReference {}
-        public static class ByValue extends lsfRusage implements Structure.ByValue {}
-        public lsfRusage() {}
-        public lsfRusage(Pointer p) { super(p); read(); }
-
-        public double ru_utime;
-        public double ru_stime;
-        public double ru_maxrss;
-        public double ru_ixrss;
-        public double ru_ismrss;
-        public double ru_idrss;
-        public double ru_isrss;
-        public double ru_minflt;
-        public double ru_majflt;
-        public double ru_nswap;
-        public double ru_inblock;
-        public double ru_oublock;
-        public double ru_ioch;
-        public double ru_msgsnd;
-        public double ru_msgrcv;
-        public double ru_nsignals;
-        public double ru_nvcsw;
-        public double ru_nivcsw;
-        public double ru_exutime;
-    }
-
-
-
-
-    public static class lsfAcctRec extends Structure {
-        public static class ByReference extends lsfAcctRec implements Structure.ByReference {}
-        public static class ByValue extends lsfAcctRec implements Structure.ByValue {}
-        public lsfAcctRec() {}
-        public lsfAcctRec(Pointer p) { super(p); read(); }
-
-        public int pid;
-        public String username;
-        public int exitStatus;
-        public NativeLong dispTime;
-        public NativeLong termTime;
-        public String fromHost;
-        public String execHost;
-        public String cwd;
-        public String cmdln;
-        public lsfRusage lsfRu;
-    }
-
-
-
-
-    public static class confNode extends Structure {
-        public static class ByReference extends confNode implements Structure.ByReference {}
-        public static class ByValue extends confNode implements Structure.ByValue {}
-        public confNode() {}
-        public confNode(Pointer p) { super(p); read(); }
-
-        public confNode.ByReference leftPtr;
-        public confNode.ByReference rightPtr;
-        public confNode.ByReference fwPtr;
-        public String cond;
-        public int beginLineNum;
-        public int numLines;
-        public Pointer lines;
-        public byte tag;
-    }
-
-
-
-    public static class pStack extends Structure {
-        public static class ByReference extends pStack implements Structure.ByReference {}
-        public static class ByValue extends pStack implements Structure.ByValue {}
-        public pStack() {}
-        public pStack(Pointer p) { super(p); read(); }
-
-        public int top;
-        public int size;
-        public PointerByReference nodes;
-    }
-
-
-
-    public static class confHandle extends Structure {
-        public static class ByReference extends confHandle implements Structure.ByReference {}
-        public static class ByValue extends confHandle implements Structure.ByValue {}
-        public confHandle() {}
-        public confHandle(Pointer p) { super(p); read(); }
-
-        public confNode.ByReference rootNode;
-        public String fname;
-        public confNode.ByReference curNode;
-        public int lineCount;
-        public pStack.ByReference ptrStack;
-    }
-
-
-
-    public static class lsConf extends Structure {
-        public static class ByReference extends lsConf implements Structure.ByReference {}
-        public static class ByValue extends lsConf implements Structure.ByValue {}
-        public lsConf() {}
-        public lsConf(Pointer p) { super(p); read(); }
-
-        public confHandle.ByReference confhandle;
-        public int numConds;
-        public Pointer conds;
-        public IntByReference values;
-    }
-
-
-
-    public static class sharedConf extends Structure {
-        public static class ByReference extends sharedConf implements Structure.ByReference {}
-        public static class ByValue extends sharedConf implements Structure.ByValue {}
-        public sharedConf() {}
-        public sharedConf(Pointer p) { super(p); read(); }
-
-        public lsInfo.ByReference lsinfo;
-        public int numCls;
-        public Pointer clusterNames;
-        public Pointer servers;
-    }
-
-
-
-
-    public static class lsSharedResourceInstance extends Structure {
-        public static class ByReference extends lsSharedResourceInstance implements Structure.ByReference {}
-        public static class ByValue extends lsSharedResourceInstance implements Structure.ByValue {}
-        public lsSharedResourceInstance() {}
-        public lsSharedResourceInstance(Pointer p) { super(p); read(); }
-
-        public String value;
-        public int nHosts;
-        public Pointer hostList;
-
-    }
-
-
-
-
-    public static class lsSharedResourceInfo extends Structure {
-        public static class ByReference extends lsSharedResourceInfo implements Structure.ByReference {}
-        public static class ByValue extends lsSharedResourceInfo implements Structure.ByValue {}
-        public lsSharedResourceInfo() {}
-        public lsSharedResourceInfo(Pointer p) { super(p); read(); }
-
-        public String resourceName;
-        public int nInstances;
-        public Pointer /* lsSharedResourceInstance.ByReference */ instances;
-    }
-
-
-
-    public static class clusterConf extends Structure {
-        public static class ByReference extends clusterConf implements Structure.ByReference {}
-        public static class ByValue extends clusterConf implements Structure.ByValue {}
-        public clusterConf() {}
-        public clusterConf(Pointer p) { super(p); read(); }
-
-        public clusterInfo.ByReference clinfo;
-        public int numHosts;
-        public Pointer /* hostInfo.ByReference */ hosts;
-        public int defaultFeatures;
-        public int numShareRes;
-        public Pointer /* lsSharedResourceInfo.ByReference */ shareRes;
-    }
-
-
-
-
-    public static class pidInfo extends Structure {
-        public static class ByReference extends pidInfo implements Structure.ByReference {}
-        public static class ByValue extends pidInfo implements Structure.ByValue {}
-        public pidInfo() {}
-        public pidInfo(Pointer p) { super(p); read(); }
-
-        public int pid;
-        public int ppid;
-        public int pgid;
-        public int jobid;
-    }
-
-
-
-
-    public static class jRusage extends Structure {
-        public static class ByReference extends jRusage implements Structure.ByReference {}
-        public static class ByValue extends jRusage implements Structure.ByValue {}
-        public jRusage() {}
-        public jRusage(Pointer p) { super(p); read(); }
-
-        public int mem;
-        public int swap;
-        public int utime;
-        public int stime;
-        public int npids;
-        public Pointer /* pidInfo.ByReference */ pidInfo;
-
-        public int npgids;
-        public IntByReference pgid;
-        public int nthreads;
-    }
-
-
-
-
-    public static final int NUM_SUBS = 2;
-    public static final int LEN_SUBS = 64;
-    public static final int NUM_CLASS_TYPE = 3;
-
-    public static class licUsage extends Structure {
-        public static class ByReference extends licUsage implements Structure.ByReference {}
-        public static class ByValue extends licUsage implements Structure.ByValue {}
-        public licUsage() {}
-        public licUsage(Pointer p) { super(p); read(); }
-
-        public int licDisplayMask;
-        public int usingDemoLicense;
-        public float[] total = new float[hostInfo.LSF_NUM_LIC_TYPE];
-        public float[] inUse = new float[hostInfo.LSF_NUM_LIC_TYPE];
-    }
-
-
-
-    public static class hostClassInfo extends Structure {
-        public static class ByReference extends hostClassInfo implements Structure.ByReference {}
-        public static class ByValue extends hostClassInfo implements Structure.ByValue {}
-        public hostClassInfo() {}
-        public hostClassInfo(Pointer p) { super(p); read(); }
-
-        public int numHosts;
-        public int numCpus;
-        public int numCores;
-    }
-
-
-
-    public static class lsfLicUsage extends Structure {
-        public static class ByReference extends lsfLicUsage implements Structure.ByReference {}
-        public static class ByValue extends lsfLicUsage implements Structure.ByValue {}
-        public lsfLicUsage() {}
-        public lsfLicUsage(Pointer p) { super(p); read(); }
-
-        public licUsage licUsage;
-        public hostClassInfo[] hostInfo = new hostClassInfo[NUM_CLASS_TYPE];
-        // The current version of JNA's Structure.getNativeAlignment passes a "null" to
-        // Native.getNativeSize() when accessing the contents of a 2D array.
-        // Although the method is marked as protected, there are also multiple "TO DO"
-        // comments so when we upgrade don't want to have specialized code floating around.
-        public byte[] substitution = new byte[NUM_SUBS * LEN_SUBS];
-        public byte[] cluster = new byte[MAXFILENAMELEN];
-    }
-
-
-    public static class param_entry extends Structure {
-        public static class ByReference extends param_entry implements Structure.ByReference {}
-        public static class ByValue extends param_entry implements Structure.ByValue {}
-        public param_entry() {}
-        public param_entry(Pointer p) { super(p); read(); }
-
-        public static int HAS_PARAM_VALUE = 0x001;
-        public static final int HAS_PARAM_DEFAULT = 0x002;
-
-        public int flags;
-        public String key;
-        public String value;
-        public String default_value;
-    }
-
-
-
-    public static class params_key_value_pair extends Structure {
-        public static class ByReference extends params_key_value_pair implements Structure.ByReference {}
-        public static class ByValue extends params_key_value_pair implements Structure.ByValue {}
-        public params_key_value_pair() {}
-        public params_key_value_pair(Pointer p) { super(p); read(); }
-
-        public int num_params;
-        public String daemon_time;
-        public Pointer /* param_entry.ByReference */ param;
-    }
-
-
-
-
-    public static final int LSE_NO_ERR = 0;
-    public static final int LSE_BAD_XDR = 1;
-    public static final int LSE_MSG_SYS = 2;
-    public static final int LSE_BAD_ARGS = 3;
-    public static final int LSE_MASTR_UNKNW = 4;
-    public static final int LSE_LIM_DOWN = 5;
-    public static final int LSE_PROTOC_LIM = 6;
-    public static final int LSE_SOCK_SYS = 7;
-    public static final int LSE_ACCEPT_SYS = 8;
-    public static final int LSE_BAD_TASKF = 9;
-    public static final int LSE_NO_HOST = 10;
-    public static final int LSE_NO_ELHOST = 11;
-    public static final int LSE_TIME_OUT = 12;
-    public static final int LSE_NIOS_DOWN = 13;
-    public static final int LSE_LIM_DENIED = 14;
-    public static final int LSE_LIM_IGNORE = 15;
-    public static final int LSE_LIM_BADHOST = 16;
-    public static final int LSE_LIM_ALOCKED = 17;
-    public static final int LSE_LIM_NLOCKED = 18;
-    public static final int LSE_LIM_BADMOD = 19;
-    public static final int LSE_SIG_SYS = 20;
-    public static final int LSE_BAD_EXP = 21;
-    public static final int LSE_NORCHILD = 22;
-    public static final int LSE_MALLOC = 23;
-    public static final int LSE_LSFCONF = 24;
-    public static final int LSE_BAD_ENV = 25;
-    public static final int LSE_LIM_NREG = 26;
-    public static final int LSE_RES_NREG = 27;
-    public static final int LSE_RES_NOMORECONN = 28;
-    public static final int LSE_BADUSER = 29;
-    public static final int LSE_RES_ROOTSECURE = 30;
-    public static final int LSE_RES_DENIED = 31;
-    public static final int LSE_BAD_OPCODE = 32;
-    public static final int LSE_PROTOC_RES = 33;
-    public static final int LSE_RES_CALLBACK = 34;
-    public static final int LSE_RES_NOMEM = 35;
-    public static final int LSE_RES_FATAL = 36;
-    public static final int LSE_RES_PTY = 37;
-    public static final int LSE_RES_SOCK = 38;
-    public static final int LSE_RES_FORK = 39;
-    public static final int LSE_NOMORE_SOCK = 40;
-    public static final int LSE_WDIR = 41;
-    public static final int LSE_LOSTCON = 42;
-    public static final int LSE_RES_INVCHILD = 43;
-    public static final int LSE_RES_KILL = 44;
-    public static final int LSE_PTYMODE = 45;
-    public static final int LSE_BAD_HOST = 46;
-    public static final int LSE_PROTOC_NIOS = 47;
-    public static final int LSE_WAIT_SYS = 48;
-    public static final int LSE_SETPARAM = 49;
-    public static final int LSE_RPIDLISTLEN = 50;
-    public static final int LSE_BAD_CLUSTER = 51;
-    public static final int LSE_RES_VERSION = 52;
-    public static final int LSE_EXECV_SYS = 53;
-    public static final int LSE_RES_DIR = 54;
-    public static final int LSE_RES_DIRW = 55;
-    public static final int LSE_BAD_SERVID = 56;
-    public static final int LSE_NLSF_HOST = 57;
-    public static final int LSE_UNKWN_RESNAME = 58;
-    public static final int LSE_UNKWN_RESVALUE = 59;
-    public static final int LSE_TASKEXIST = 60;
-    public static final int LSE_BAD_TID = 61;
-    public static final int LSE_TOOMANYTASK = 62;
-    public static final int LSE_LIMIT_SYS = 63;
-    public static final int LSE_BAD_NAMELIST = 64;
-    public static final int LSE_NO_LICENSE = 65;
-    public static final int LSE_LIM_NOMEM = 66;
-    public static final int LSE_NIO_INIT = 67;
-    public static final int LSE_CONF_SYNTAX = 68;
-    public static final int LSE_FILE_SYS = 69;
-    public static final int LSE_CONN_SYS = 70;
-    public static final int LSE_SELECT_SYS = 71;
-    public static final int LSE_EOF = 72;
-    public static final int LSE_ACCT_FORMAT = 73;
-    public static final int LSE_BAD_TIME = 74;
-    public static final int LSE_FORK = 75;
-    public static final int LSE_PIPE = 76;
-    public static final int LSE_ESUB = 77;
-    public static final int LSE_DCE_EXEC = 78;
-    public static final int LSE_EAUTH = 79;
-    public static final int LSE_NO_FILE = 80;
-    public static final int LSE_NO_CHAN = 81;
-    public static final int LSE_BAD_CHAN = 82;
-    public static final int LSE_INTERNAL = 83;
-    public static final int LSE_PROTOCOL = 84;
-    public static final int LSE_THRD_SYS = 85;
-    public static final int LSE_MISC_SYS = 86;
-    public static final int LSE_LOGON_FAIL = 87;
-    public static final int LSE_RES_RUSAGE = 88;
-    public static final int LSE_NO_RESOURCE = 89;
-    public static final int LSE_BAD_RESOURCE = 90;
-    public static final int LSE_RES_PARENT = 91;
-    public static final int LSE_NO_PASSWD = 92;
-    public static final int LSE_SUDOERS_CONF = 93;
-    public static final int LSE_SUDOERS_ROOT = 94;
-    public static final int LSE_I18N_SETLC = 95;
-    public static final int LSE_I18N_CATOPEN = 96;
-    public static final int LSE_I18N_NOMEM = 97;
-    public static final int LSE_NO_MEM = 98;
-    public static final int LSE_REGISTRY_SYS = 99;
-    public static final int LSE_FILE_CLOSE = 100;
-    public static final int LSE_LIMCONF_NOTREADY = 101;
-    public static final int LSE_MASTER_LIM_DOWN = 102;
-    public static final int LSE_MLS_INVALID = 103;
-    public static final int LSE_MLS_CLEARANCE = 104;
-    public static final int LSE_MLS_RHOST = 105;
-    public static final int LSE_MLS_DOMINATE = 106;
-    public static final int LSE_NO_CAL = 107;
-    public static final int LSE_NO_NETWORK = 108;
-    public static final int LSE_GETCONF_FAILED = 109;
-    public static final int LSE_TSSINIT = 110;
-    public static final int LSE_DYNM_DENIED = 111;
-    public static final int LSE_LIC_OVERUSE = 112;
-    public static final int LSE_EGOCONF = 113;
-    public static final int LSE_BAD_EGO_ENV = 114;
-    public static final int LSE_EGO_CONF_SYNTAX = 115;
-    public static final int LSE_EGO_GETCONF_FAILED = 116;
-    public static final int LSE_NS_LOOKUP = 117;
-    public static final int LSE_BAD_PASSWD = 118;
-
-    public static final int LSE_UNKWN_USER = 119;
-    public static final int LSE_NOT_WINHOST = 120;
-    public static final int LSE_NOT_MASTERCAND = 121;
-    public static final int LSE_HOST_UNAUTH = 122;
-    public static final int LSE_UNRESOLVALBE_HOST = 123;
-    public static final int LSE_RESOURCE_NOT_CONSUMABLE = 124;
-    public static final int LSE_SHUTDOWN = 125;
-    public static final int LSE_BAD_SYNTAX = 126;
-    public static final int LSE_NERR = 127;
-
-
-    public static boolean LSE_ISBAD_RESREQ(int s) {
-        return (((s) == LSE_BAD_EXP) || ((s) == LSE_UNKWN_RESNAME) || ((s) == LSE_UNKWN_RESVALUE));
-    }
-
-    public static boolean LSE_SYSCALL(int s) {
-        return (((s) == LSE_SELECT_SYS) || ((s) == LSE_CONN_SYS) || ((s) == LSE_FILE_SYS) || ((s) == LSE_MSG_SYS) || ((s) == LSE_SOCK_SYS) || ((s) == LSE_ACCEPT_SYS) || ((s) == LSE_SIG_SYS) || ((s) == LSE_WAIT_SYS) || ((s) == LSE_EXECV_SYS) || ((s) == LSE_LIMIT_SYS) || ((s) == LSE_PIPE) || ((s) == LSE_ESUB) || ((s) == LSE_REGISTRY_SYS) || ((s) == LSE_MISC_SYS));
-    }
-
-
-    /*
-    public static void TIMEVAL (int level, int func, int val)  {
-        if (timinglevel > level) {
-            timeval before, after;
-            timezone tz;
-            gettimeofday(&before, &tz);
-            func;
-            gettimeofday(&after, &tz);
-            val = (int)((after.tv_sec - before.tv_sec)*1000 +  (after.tv_usec-before.tv_usec)/1000);
-        } else {
-            func;
-            val = 0;
-        }
-    }
-    */
-
-    public static class ls_timeval extends Structure {
-        public static class ByReference extends ls_timeval implements Structure.ByReference {}
-        public static class ByValue extends ls_timeval implements Structure.ByValue {}
-        public ls_timeval() {}
-        public ls_timeval(Pointer p) { super(p); read(); }
-
-        public float rtime;
-        public float utime;
-        public float stime;
-    }
-
-
-
-    /*
-    public static void LS_TIMEVAL_ZERO(ls_timeval tv) {                            tv.rtime = 0.0;          tv.utime = 0.0;          tv.stime = 0.0;      }
-
-    public static int LS_TIMEVAL_INC (ls_timeval tv, int newtv) {                                  tv.rtime += newtv.rtime;       tv.utime += newtv.utime;       tv.stime += newtv.stime;      }
-
-    public static void LOG_TIME_MSG(int level, String name, ls_timeval tv, int count, String msg) { if (timinglevel > level) {  ls_syslog(LOG_INFO, "L%d %s rtime %.2f ms, utime %.2f ms, stime %.2f ms, count %d %s",  level, name, tv.rtime, tv.utime, tv.stime, count, msg);  } }; }
-
-    public static void TIMEIT (int level, String func, String name) {
-        if  (timinglevel > level && clockticks > 0) {
-            timeval _before, _after;
-            timezone _tz;
-            tms _buf, _buf2;
-            gettimeofday(&_before, &_tz);
-            times(&_buf);
-            func;
-            gettimeofday(&_after, &_tz);
-            times(&_buf2);
-            ls_syslog(LOG_INFO,"L%d %s rtime %.2f ms, utime %.2f ms, stime %.2f ms",  level,  name,  (_after.tv_sec - _before.tv_sec)*1000.0 +  (_after.tv_usec - _before.tv_usec)/1000.0,  1000.0*((_buf2.tms_utime - _buf.tms_utime)/clockticks),  1000.0*((_buf2.tms_stime - _buf.tms_stime)/clockticks));
-        } else {
-            func;
-        }
-    }
-
-    public static int TIMEVAL2 (int level, String func, ls_timeval tv) {
-        if (timinglevel > level && clockticks > 0) {
-            timeval _before, _after;
-            timezone _tz;
-            tms _buf, _buf2;
-            gettimeofday(&_before, &_tz);
-            times(&_buf);
-            func;
-            gettimeofday(&_after, &_tz);
-            times(&_buf2);
-            tv.rtime = (_after.tv_sec - _before.tv_sec)*1000.0 +  (_after.tv_usec - _before.tv_usec)/1000.0;
-            tv.utime = 1000.0*((_buf2.tms_utime - _buf.tms_utime)/clockticks);
-            tv.stime = 1000.0*((_buf2.tms_stime - _buf.tms_stime)/clockticks);
-        } else {
-            func;
-            tv.rtime = 0.0;
-            tv.utime = 0.0;
-            tv.stime = 0.0;
-        }
-    }
-
-    public static int TIMEIT_START_BLOCK (int level) {
-        tms _buf, _buf2;
-        timeval _before, _after;
-        timezone _tz;
-        if  (timinglevel > level) {
-            gettimeofday(&_before, &_tz);
-            times(&_buf);
-        }
-    }
-
-    public static int TIMEIT_END_BLOCK (int level, String name)  {
-        if  (timinglevel > level) {
-            float rt, ut, st;
-            gettimeofday(&_after, &_tz);
-            times(&_buf2);
-            rt = (_after.tv_sec - _before.tv_sec)*1000.0 +  (_after.tv_usec - _before.tv_usec)/1000.0;
-            ut = 1000.0*((_buf2.tms_utime - _buf.tms_utime)/clockticks);
-            st = 1000.0*((_buf2.tms_stime - _buf.tms_stime)/clockticks);
-            ls_syslog(LOG_INFO,"L%d %s rtime %.2f ms, utime %.2f ms, stime %.2f ms",  level, name, rt, ut, st);
-        }
-    }
-    */
-
-    public static final int LC_SCHED = 0x00000001;
-    public static final int LC_EXEC = 0x00000002;
-    public static final int LC_TRACE = 0x00000004;
-    public static final int LC_COMM = 0x00000008;
-    public static final int LC_XDR = 0x00000010;
-    public static final int LC_CHKPNT = 0x00000020;
-    public static final int LC_LICENCE = 0x00000040;
-    public static final int LC_LICENSE = 0x00000040;
-    public static final int LC_FILE = 0x00000080;
-    public static final int LC_AFS = 0x00000100;
-    public static final int LC_AUTH = 0x00000200;
-    public static final int LC_HANG = 0x00000400;
-    public static final int LC_MULTI = 0x00000800;
-    public static final int LC_SIGNAL = 0x00001000;
-    public static final int LC_DCE = 0x00002000;
-    public static final int LC_PIM = 0x00004000;
-    public static final int LC_MEMORY = 0x00004000;
-    public static final int LC_SYS = 0x00008000;
-    public static final int LC_JLIMIT = 0x00010000;
-    public static final int LC_FAIR = 0x00020000;
-    public static final int LC_PREEMPT = 0x00040000;
-    public static final int LC_PEND = 0x00080000;
-    public static final int LC_EEVENTD = 0x00100000;
-    public static final int LC_LOADINDX = 0x00200000;
-    public static final int LC_RESOURCE = 0x00200000;
-
-    public static final int LC_JGRP = 0x00400000;
-    public static final int LC_JARRAY = 0x00800000;
-    public static final int LC_MPI = 0x01000000;
-    public static final int LC_ELIM = 0x02000000;
-    public static final int LC_M_LOG = 0x04000000;
-    public static final int LC_PERFM = 0x08000000;
-    public static final int LC_DLOG = 0x10000000;
-    public static final int LC_HPC = 0x20000000;
-    public static final int LC_LICSCHED = 0x40000000;
-
-    public static final int LC_XDRVERSION = 0x80000000;
-    public static final int LC_FLEX = 0x80000000;
-
-    public static final int LC_ADVRSV = LC_DLOG;
-    public static final int LC_RESREQ = LC_M_LOG;
-
-
-    public static final int LOG_DEBUG1 = LOG_DEBUG + 1;
-    public static final int LOG_DEBUG2 = LOG_DEBUG + 2;
-    public static final int LOG_DEBUG3 = LOG_DEBUG + 3;
-
-
-    public static final int LSF_EVENT_LIM_DOWN = 1;
-    public static final int LSF_EVENT_RES_DOWN = 2;
-    public static final int LSF_EVENT_SBD_DOWN = 3;
-    public static final int LSF_EVENT_HOST_UNLIC = 4;
-    public static final int LSF_EVENT_MASTER_ELECT = 5;
-    public static final int LSF_EVENT_MASTER_RESIGN = 6;
-    public static final int LSF_EVENT_MBD_UP = 7;
-    public static final int LSF_EVENT_MBD_DOWN = 8;
-    public static final int LSF_EVENT_MBD_RECONFIG = 9;
-    public static final int LSF_EVENT_WORKDIR_FULL = 10;
-    public static final int LSF_EVENT_HOST_OPENED = 11;
-    public static final int LSF_EVENT_HOST_CLOSED = 12;
-    public static final int LSF_EVENT_QUEUE_OPENED = 13;
-    public static final int LSF_EVENT_QUEUE_CLOSED = 14;
-    public static final int LSF_EVENT_SCH_DOWN = 15;
-    public static final int LSF_EVENT_LIC_OVERUSE = 16;
-
-    public static final int LSF_NIOS_REQUEUE = 127;
-
-
-    /*
-    public int lserrno;
-    public int masterLimDown;
-    public int ls_nerr;
-    public String[] ls_errmsg;
-    public int logclass;
-    public int timinglevel;
-    public int clockticks;
-
-
-    public int lsf_lim_version;
-    */
-
-
-    public static native int ls_readconfenv(config_param config_param1, String string);
-
-
-    public static native Pointer ls_placereq(String resreq, IntByReference numhosts, int options, String fromhost);
-
-
-    public static native Pointer ls_placeofhosts(String resreq, IntByReference numhosts, int options, String fromhost, Pointer hostlist, int listsize);
-
-    // NOTE: Not in liblsf
-    //public static native Pointer ls_placeoftype(String resreq, IntByReference numhosts, int options, String fromhost, String hosttype);
-
-
-    public static native hostLoad.ByReference ls_load(String resreq, IntByReference numhosts, int options, String fromhost);
-
-
-    public static native hostLoad.ByReference ls_loadofhosts(String resreq, IntByReference numhosts, int options, String fromhost, Pointer hostlist, int listsize);
-
-    // NOTE: Not in liblsf
-    //public static native hostLoad.ByReference ls_loadoftype(String resreq, IntByReference numhosts, int options, String fromhost, String hosttype);
-
-
-    public static native hostLoad.ByReference ls_loadinfo(String resreq, IntByReference numhosts, int options, String fromhost, Pointer hostlist, int listsize, Pointer indxnamelist);
-
-
-    public static native int ls_loadadj(String resreq, placeInfo hostlist, int listsize);
-
-
-    public static native int ls_eligible(String task, String resreqstr, byte mode);
-
-
-    public static native String ls_resreq(String task);
-
-
-    public static native int ls_insertrtask(String task);
-
-
-    public static native int ls_insertltask(String task);
-
-
-    public static native int ls_deletertask(String task);
-
-
-    public static native int ls_deleteltask(String task);
-
-
-    public static native int ls_listrtask(Pointer taskList, int sortflag);
-
-
-    public static native int ls_listltask(Pointer taskList, int sortflag);
-
-
-    public static native Pointer ls_findmyconnections();
-
-
-    public static native int ls_isconnected(String hostName);
-
-    // NOTE: Not in liblsf
-    //public static native int ls_lostconnection();
-
-
-    public static native String ls_getclustername();
-
-
-    public static native clusterInfo.ByReference ls_clusterinfo(String string1, IntByReference int1, Pointer stringArray1, int int2, int int3);
-
-
-    public static native lsSharedResourceInfo.ByReference ls_sharedresourceinfo(Pointer stringArray1, IntByReference int1, String string1, int int2);
-
-
-    public static native String ls_getmastername();
-
-
-    public static native String ls_getmyhostname();
-
-
-    public static native String ls_getmyhostname2();
-
-
-    public static native hostInfo.ByReference ls_gethostinfo(String string1, IntByReference int1, Pointer stringArray1, int int2, int int3);
-
-    public static native String ls_getISVmode();
-
-    public static native int ls_isshutdown();
-
-    public static native int ls_isPartialLicensingEnabled();
-
-    /* NOTE: ls_getLicenseUsage() is not supported by LSF v8.x
-    *  Wei Xing, ICR
-    */
-//    public static native lsfLicUsage.ByReference ls_getLicenseUsage();
-
-    public static native lsInfo.ByReference ls_info();
-
-    public static native Pointer ls_indexnames(lsInfo lsInfo1);
-
-    public static native int ls_isclustername(String string);
-
-
-    public static native String ls_gethosttype(String hostname);
-
-
-    public static native FloatByReference ls_getmodelfactor(String modelname);
-
-
-    public static native FloatByReference ls_gethostfactor(String hostname);
-
-
-    public static native String ls_gethostmodel(String hostname);
-
-    // NOTE: Not in liblsf
-    //public static native IntByReference ls_gethostrespriority(String hostname);
-
-
-    public static native int ls_lockhost(NativeLong duration);
-
-
-    public static native int ls_unlockhost();
-
-
-    public static native int ls_limcontrol(String hostname, int opCode);
-
-    public static native void ls_remtty(int ind, int enableIntSus);
-
-    public static native void ls_loctty(int ind);
-
-
-    public static native String ls_sysmsg();
-
-
-    public static native void ls_perror(String usrMsg);
-
-
-    public static native lsConf.ByReference ls_getconf(String string);
-
-    public static native void ls_freeconf(lsConf lsConf1);
-
-    public static native sharedConf.ByReference ls_readshared(String string1);
-
-    public static native clusterConf.ByReference ls_readcluster(String string1, lsInfo lsInfo1);
-
-    public static native clusterConf.ByReference ls_readcluster_ex(String string1, lsInfo lsInfo1, int int1);
-
-
-    public static native int _ls_initdebug(String appName);
-
-    public static native void ls_syslog(int level, String fmt, Pointer args);
-
-    public static native void ls_errlog(Pointer fp, String fmt, Pointer args);
-
-    // NOTE: va_list is too compiler specific.  Skipping this function.
-    //public static native void  ls_verrlog (Pointer fp, String fmt, va_list ap);
-
-    public static native int ls_fdbusy(int fd);
-
-
-    public static native String ls_getmnthost(String fn);
-
-    public static native int ls_servavail(int int1, int int2);
-
-    public static native int ls_getpriority(IntByReference priority);
-
-    public static native int ls_setpriority(int newPriority);
-
-    public static native void ls_ruunix2lsf(rusage rusage, lsfRusage lsfRusage);
-
-    public static native void ls_rulsf2unix(lsfRusage lsfRusage, rusage rusage);
-
-    public static native void cleanLsfRusage(lsfRusage lsfRusage1);
-
-    public static native void cleanRusage(rusage rusage1);
-
-
-    // NOTE: Not in liblsf
-    //public static native int getBEtime(String string1, byte byte1, NativeLongByReference long1);
-
-
-    public static native int ls_postevent(int int1, String string1, Pointer stringArray1, int int2);
-
-    public static native int ls_postmultievent(int int1, String string1, Pointer stringArray1, int int2, int int3);
-
-    public static class extResInfo extends Structure {
-        public static class ByReference extends extResInfo implements Structure.ByReference {}
-        public static class ByValue extends extResInfo implements Structure.ByValue {}
-        public extResInfo() {}
-        public extResInfo(Pointer p) { super(p); read(); }
-
-        public String name;
-        public String type;
-        public String interval;
-        public String increasing;
-        public String des;
-    }
-
-
-
-
-    // NOTE: Not in liblsf
-    //public static native int lim_vcl_get_eres_version();
-
-    // NOTE: Not in liblsf
-    //public static native extResInfo.ByReference lim_vcl_get_eres_def(String string1);
-
-    // NOTE: Not in liblsf
-    //public static native String lim_vcl_get_eres_loc(String string1);
-
-    // NOTE: Not in liblsf
-    //public static native String lim_vcl_get_eres_val(String string1);
-
-
-    public static int isspace(byte c) {
-        return ((c == 0x20 || c == 0x09 || c == 0x0a || c == 0x0b || c == 0x0c || c == 0x0d) ? 8 : 0);
-    }
-
-    public static final int LSF_VERSION = LSF_XDR_VERSION7_0_EP6;
-    public static final String LSF_CURRENT_VERSION = "7.06";
-
-
-    public static final String LSF_PRODUCT_COPYRIGHT_STR = "Copyright 1992-2009 Platform Computing Corp.";
-
-
-    public static final String LSF_NAME_STR = "Platform LSF";
-    public static final String LSF_IDENTIFIER_STR = "";
-    public static final String LSF_PRODUCT_NAME_STR = LSF_NAME_STR + LSF_IDENTIFIER_STR;
-
-
-    public static final String LSF_PRODUCT_COMMENT_STR = "";
-
-
-    public static final String LSF_PRODUCT_BUILD_STR = "";
-
-
-    public static final String LSF_PRODUCT_BUILD_DATE_STR = "";
-
-
-    public static final int LSF_PRODUCT_MAJOR_VERSION = 7;
-    public static final int LSF_PRODUCT_MINOR_VERSION = 0;
-    public static final int LSF_PRODUCT_MAINTAIN_VERSION = 6;
-
-    public static final String LSF_PRODUCT_MAJOR_VERSION_STR = "7";
-    public static final String LSF_PRODUCT_MINOR_VERSION_STR = "0";
-    public static final String LSF_PRODUCT_MAINTAIN_VERSION_STR = "6";
-
-    public static final String LSF_PRODUCT_VERSION_STR = LSF_PRODUCT_MAJOR_VERSION_STR + "." + LSF_PRODUCT_MINOR_VERSION_STR + "." + LSF_PRODUCT_MAINTAIN_VERSION_STR;
-    public static final String LSF_FILE_VERSION_STR = LSF_PRODUCT_MAJOR_VERSION_STR + "." + LSF_PRODUCT_MINOR_VERSION_STR + "." + LSF_PRODUCT_MAINTAIN_VERSION_STR;
-
-
-    public static final String _VERSION_STR_LSID_ = "Platform LSF HPC 7";
-    public static final String _LSID_VERSION_ = (_VERSION_STR_LSID_ + " Update " + _MINOR_STR_ + ", " + _DATE_STR_ + "\nCopyright 1992-2009 Platform Computing Corporation\n");
-
-
-    /* Removing since the ls_nio functions which use fd_set, etc. are not in liblsf.
-
-    public static final int NIO_STDIN_ON = 0x01;
-    public static final int NIO_STDIN_OFF = 0x02;
-    public static final int NIO_TAGSTDOUT_ON = 0x03;
-    public static final int NIO_TAGSTDOUT_OFF = 0x04;
-
-    public static final int NIO_TASK_STDINON = 0x01;
-    public static final int NIO_TASK_STDINOFF = 0x02;
-    public static final int NIO_TASK_ALL = 0x03;
-    public static final int NIO_TASK_CONNECTED = 0x04;
-
-    public static interface nioType {
-          public static final int NIO_STATUS = 0;
-          public static final int NIO_STDOUT = 1;
-          public static final int NIO_EOF = 2;
-          public static final int NIO_IOERR = 3;
-          public static final int NIO_REQUEUE = 4;
-          public static final int NIO_STDERR = 5;
-    }
-
-
-
-    public static class nioEvent extends Structure {
-        public static class ByReference extends nioEvent implements Structure.ByReference {}
-        public static class ByValue extends nioEvent implements Structure.ByValue {}
-        public nioEvent() {}
-        public nioEvent(Pointer p) { super(p); read(); }
-
-        public int tid;
-        public *//*nioType*//* int type;
-        public int status;
-    }
-
-
-
-    public static class nioInfo extends Structure {
-        public static class ByReference extends nioInfo implements Structure.ByReference {}
-        public static class ByValue extends nioInfo implements Structure.ByValue {}
-        public nioInfo() {}
-        public nioInfo(Pointer p) { super(p); read(); }
-
-        public int num;
-        public Pointer / * nioEvent.ByReference * / ioTask;
-    }
-
-
-    public static final int FD_SETSIZE = 64;
-
-    public static class fd_set extends Structure {
-        public static class ByReference extends fd_set implements Structure.ByReference {}
-        public static class ByValue extends fd_set implements Structure.ByValue {}
-        public fd_set() {}
-        public fd_set(Pointer p) { super(p); read(); }
-
-        public int count;
-        public int[] fd = new int[FD_SETSIZE];
-    }
-    */
-
-    public static native int ls_initdebug(String appName);
-
-    // NOTE: Not in liblsf
-    //public static native int ls_nioinit(int sock);
-
-    // NOTE: Not in liblsf
-    //public static native int ls_nioselect(int int1, fd_set fd_set1, fd_set fd_set2, fd_set fd_set3, Pointer nioInfoArray1, timeval timeval1);
-
-    // NOTE: Not in liblsf
-    //public static native int ls_nioctl(int int1, int int2);
-
-    // NOTE: Not in liblsf
-    //public static native int ls_nionewtask(int int1, int int2);
-
-    // NOTE: Not in liblsf
-    //public static native int ls_nioremovetask(int int1);
-
-    // NOTE: Not in liblsf
-    //public static native int ls_niowrite(String string1, int int1);
-
-    // NOTE: Not in liblsf
-    //public static native int ls_nioclose();
-
-    // NOTE: Not in liblsf
-    //public static native int ls_nioread(int int1, String string1, int int2);
-
-    // NOTE: Not in liblsf
-    //public static native int ls_niotasks(int int1, IntByReference int2, int int3);
-
-    // NOTE: Not in liblsf
-    //public static native int ls_niostatus(int int1, IntByReference int2, rusage rusage1);
-
-    // NOTE: Not in liblsf
-    //public static native int ls_niokill(int int1);
-
-    // NOTE: Not in liblsf
-    //public static native int ls_niosetdebug(int int2);
-
-    // NOTE: Not in liblsf
-    //public static native int ls_niodump(int int1, int int2, int int3, String string1);
-
-
-    public int lsf_res_version;
-
-
-    public static native int ls_initrex(int a, int b);
-
-    public static int ls_init(int a, int b) {
-        return ls_initrex(a, b);
-    }
-
-
-    public static native int ls_donerex();
-
-    public static native int ls_niossync(int int1);
-
-
-    public static native int ls_setstdin(int on, IntByReference rpidlist, int len);
-
-
-    public static native int ls_getstdin(int on, IntByReference rpidlist, int maxlen);
-
-    public static native int ls_setstdout(int on, String format);
-
-
-    public static native int ls_stdinmode(int onoff);
-
-
-    public static native int ls_stoprex();
-
-
-    public static native int ls_chdir(String string1, String string2);
-
-
-    public static native int ls_connect(String string1);
-
-
-    public static native int ls_rkill(int int1, int int2);
-
-
-    public static native int ls_rsetenv(String host, Pointer env);
-
-    public static native int ls_rsetenv_async(String host, Pointer env);
-
-
-    public static native int ls_rescontrol(String host, int opcode, int options);
-
-
-    public static native lsfAcctRec.ByReference ls_getacctrec(Pointer pointer1, IntByReference int1);
-
-    public static native int ls_putacctrec(Pointer pointer1, lsfAcctRec lsfAcctRec1);
-
-
-    // NOTE: No idea what resLogRecord is.
-    //public static native resLogRecord.ByReference ls_readrexlog (Pointer );
-
-
-    public static native int ls_rexecv(String string1, Pointer string2, int int1);
-
-
-    public static native int ls_rexecve(String string1, Pointer stringArray1, int int1, Pointer stringArray2);
-
-    public static native int ls_rexecv2(String string1, Pointer stringArray1, int int1);
-
-    public static native int ls_startserver(String string1, Pointer stringArray1, int int1);
-
-
-    public static native int ls_rtask(String string1, Pointer stringArray1, int int1);
-
-
-    public static native int ls_rtaske(String string1, Pointer stringArray1, int int1, Pointer stringArray2);
-
-    public static native int ls_rtask2(String string1, Pointer stringArray1, int int1, Pointer stringArray2);
-
-
-    public static native int ls_rwait(IntByReference int1, int int2, rusage rusage1);
-
-
-    public static native int ls_rwaittid(int int1, IntByReference int2, int int3, rusage rusage1);
-
-
-    public static native int ls_conntaskport(int tid);
-
-
-    public static native int ls_ropen(String host, String fn, int flags, int mode);
-
-
-    public static native int ls_rclose(int rfd);
-
-
-    public static native int ls_rwrite(int rfd, String buf, int len);
-
-
-    public static native int ls_rread(int rfd, String buf, int len);
-
-
-    public static native NativeLong ls_rlseek(int rfd, NativeLong offset, int whence);
-
-
-    public static native int ls_runlink(String host, String fn);
-
-    public static native int ls_rfstat(int rfd, Pointer buf);
-
-    public static native int ls_rstat(String host, String fn, Pointer buf);
-
-
-    public static native String ls_rgetmnthost(String host, String fn);
-
-
-    public static native int ls_rfcontrol(int command, int arg);
-
-
-    public static native int ls_rfterminate(String host);
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/locusiterator/AlignmentStateMachine.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/locusiterator/AlignmentStateMachine.java
deleted file mode 100644
index f940386..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/locusiterator/AlignmentStateMachine.java
+++ /dev/null
@@ -1,370 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.locusiterator;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Invariant;
-import com.google.java.contract.Requires;
-import htsjdk.samtools.Cigar;
-import htsjdk.samtools.CigarElement;
-import htsjdk.samtools.CigarOperator;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.pileup.PileupElement;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-
-/**
- * Steps a single read along its alignment to the genome
- *
- * The logical model for generating extended events is as follows: the "record state"
- * implements the traversal along the reference; thus stepForwardOnGenome() returns
- * on every and only on actual reference bases. This can be a (mis)match or a deletion
- * (in the latter case, we still return on every individual reference base the deletion spans).
- *
- * User: depristo
- * Date: 1/5/13
- * Time: 1:08 PM
- */
- at Invariant({
-        "nCigarElements >= 0",
-        "cigar != null",
-        "read != null",
-        "currentCigarElementOffset >= -1",
-        "currentCigarElementOffset <= nCigarElements"
-})
-public class AlignmentStateMachine {
-    /**
-     * Our read
-     */
-    private final GATKSAMRecord read;
-    private final Cigar cigar;
-    private final int nCigarElements;
-    private int currentCigarElementOffset = -1;
-
-    /**
-     * how far are we offset from the start of the read bases?
-     */
-    private int readOffset;
-
-    /**
-     * how far are we offset from the alignment start on the genome?
-     */
-    private int genomeOffset;
-
-    /**
-     * Our cigar element
-     */
-    private CigarElement currentElement;
-
-    /**
-     * how far are we into our cigarElement?
-     */
-    private int offsetIntoCurrentCigarElement;
-
-    @Requires({"read != null", "read.getAlignmentStart() != -1", "read.getCigar() != null"})
-    public AlignmentStateMachine(final GATKSAMRecord read) {
-        this.read = read;
-        this.cigar = read.getCigar();
-        this.nCigarElements = cigar.numCigarElements();
-        initializeAsLeftEdge();
-    }
-
-    /**
-     * Initialize the state variables to put this machine one bp before the
-     * start of the alignment, so that a call to stepForwardOnGenome() will advance
-     * us to the first proper location
-     */
-    @Ensures("isLeftEdge()")
-    private void initializeAsLeftEdge() {
-        readOffset = offsetIntoCurrentCigarElement = genomeOffset = -1;
-        currentElement = null;
-    }
-
-    /**
-     * Get the read we are aligning to the genome
-     * @return a non-null GATKSAMRecord
-     */
-    @Ensures("result != null")
-    public GATKSAMRecord getRead() {
-        return read;
-    }
-
-    /**
-     * Get the reference index of the underlying read
-     *
-     * @return the reference index of the read
-     */
-    @Ensures("result == getRead().getReferenceIndex()")
-    public int getReferenceIndex() {
-        return getRead().getReferenceIndex();
-    }
-
-    /**
-     * Is this the left edge state?  I.e., one that is before or after the current read?
-     * @return true if this state is an edge state, false otherwise
-     */
-    public boolean isLeftEdge() {
-        return readOffset == -1;
-    }
-
-    /**
-     * Are we on the right edge?  I.e., is the current state off the right of the alignment?
-     * @return true if off the right edge, false if otherwise
-     */
-    public boolean isRightEdge() {
-        return readOffset == read.getReadLength();
-    }
-
-    /**
-     * What is our current offset in the read's bases that aligns us with the reference genome?
-     *
-     * @return the current read offset position.  If an edge will be == -1
-     */
-    @Ensures("result >= -1")
-    public int getReadOffset() {
-        return readOffset;
-    }
-
-    /**
-     * What is the current offset w.r.t. the alignment state that aligns us to the readOffset?
-     *
-     * @return the current offset from the alignment start on the genome.  If this state is
-     * at the left edge the result will be -1;
-     */
-    @Ensures("result >= -1")
-    public int getGenomeOffset() {
-        return genomeOffset;
-    }
-
-    /**
-     * Get the position (1-based as standard) of the current alignment on the genome w.r.t. the read's alignment start
-     * @return the position on the genome of the current state in absolute coordinates
-     */
-    @Ensures("result > 0")
-    public int getGenomePosition() {
-        return read.getAlignmentStart() + getGenomeOffset();
-    }
-
-    /**
-     * Gets #getGenomePosition but as a 1 bp GenomeLoc
-     * @param genomeLocParser the parser to use to create the genome loc
-     * @return a non-null genome location with start position of getGenomePosition
-     */
-    @Requires("genomeLocParser != null")
-    @Ensures("result != null")
-    public GenomeLoc getLocation(final GenomeLocParser genomeLocParser) {
-        // TODO -- may return wonky results if on an edge (could be 0 or could be beyond genome location)
-        return genomeLocParser.createGenomeLoc(read.getReferenceName(), getGenomePosition());
-    }
-
-    /**
-     * Get the cigar element we're currently aligning with.
-     *
-     * For example, if the cigar string is 2M2D2M and we're in the second step of the
-     * first 2M, then this function returns the element 2M.  After calling stepForwardOnGenome
-     * this function would return 2D.
-     *
-     * @return the cigar element, or null if we're the left edge
-     */
-    @Ensures("result != null || isLeftEdge() || isRightEdge()")
-    public CigarElement getCurrentCigarElement() {
-        return currentElement;
-    }
-
-    /**
-     * Get the offset of the current cigar element among all cigar elements in the read
-     *
-     * Suppose our read's cigar is 1M2D3M, and we're at the first 1M.  This would
-     * return 0.  Stepping forward puts us in the 2D, so our offset is 1.  Another
-     * step forward would result in a 1 again (we're in the second position of the 2D).
-     * Finally, one more step forward brings us to 2 (for the 3M element)
-     *
-     * @return the offset of the current cigar element in the reads's cigar.  Will return -1 for
-     * when the state is on the left edge, and be == the number of cigar elements in the
-     * read when we're past the last position on the genome
-     */
-    @Ensures({"result >= -1", "result <= nCigarElements"})
-    public int getCurrentCigarElementOffset() {
-        return currentCigarElementOffset;
-    }
-
-    /**
-     * Get the offset of the current state into the current cigar element
-     *
-     * That is, suppose we have a read with cigar 2M3D4M, and we're right at
-     * the second M position.  offsetIntoCurrentCigarElement would be 1, as
-     * it's two elements into the 2M cigar.  Now stepping forward we'd be
-     * in cigar element 3D, and our offsetIntoCurrentCigarElement would be 0.
-     *
-     * @return the offset (from 0) of the current state in the current cigar element.
-     *  Will be 0 on the right edge, and -1 on the left.
-     */
-    @Ensures({"result >= 0 || (result == -1 && isLeftEdge())", "!isRightEdge() || result == 0"})
-    public int getOffsetIntoCurrentCigarElement() {
-        return offsetIntoCurrentCigarElement;
-    }
-
-    /**
-     * Convenience accessor of the CigarOperator of the current cigar element
-     *
-     * Robust to the case where we're on the edge, and currentElement is null, in which
-     * case this function returns null as well
-     *
-     * @return null if this is an edge state
-     */
-    @Ensures("result != null || isLeftEdge() || isRightEdge()")
-    public CigarOperator getCigarOperator() {
-        return currentElement == null ? null : currentElement.getOperator();
-    }
-
-    @Override
-    public String toString() {
-        return String.format("%s ro=%d go=%d cec=%d %s", read.getReadName(), readOffset, genomeOffset, offsetIntoCurrentCigarElement, currentElement);
-    }
-
-    // -----------------------------------------------------------------------------------------------
-    //
-    // Code for setting up prev / next states
-    //
-    // -----------------------------------------------------------------------------------------------
-
-    /**
-     * Step the state machine forward one unit
-     *
-     * Takes the current state of this machine, and advances the state until the next on-genome
-     * cigar element (M, X, =, D) is encountered, at which point this function returns with the
-     * cigar operator of the current element.
-     *
-     * Assumes that the AlignmentStateMachine is in the left edge state at the start, so that
-     * stepForwardOnGenome() can be called to move the machine to the first alignment position.  That
-     * is, the normal use of this code is:
-     *
-     * AlignmentStateMachine machine = new AlignmentStateMachine(read)
-     * machine.stepForwardOnGenome()
-     * // now the machine is at the first position on the genome
-     *
-     * When stepForwardOnGenome() advances off the right edge of the read, the state machine is
-     * left in a state such that isRightEdge() returns true and returns null, indicating the
-     * the machine cannot advance further.  The machine may explode, though this is not contracted,
-     * if stepForwardOnGenome() is called after a previous call returned null.
-     *
-     * @return the operator of the cigar element that machine stopped at, null if we advanced off the end of the read
-     */
-    @Ensures("result != null || isRightEdge()")
-    public CigarOperator stepForwardOnGenome() {
-        // loop until we either find a cigar element step that moves us one base on the genome, or we run
-        // out of cigar elements
-        while ( true ) {
-            // we enter this method with readOffset = index of the last processed base on the read
-            // (-1 if we did not process a single base yet); this can be last matching base,
-            // or last base of an insertion
-            if (currentElement == null || (offsetIntoCurrentCigarElement + 1) >= currentElement.getLength()) {
-                currentCigarElementOffset++;
-                if (currentCigarElementOffset < nCigarElements) {
-                    currentElement = cigar.getCigarElement(currentCigarElementOffset);
-                    offsetIntoCurrentCigarElement = -1;
-                    // next line: guards against cigar elements of length 0; when new cigar element is retrieved,
-                    // we reenter in order to re-check offsetIntoCurrentCigarElement against currentElement's length
-                    continue;
-                } else {
-                    if (currentElement != null && currentElement.getOperator() == CigarOperator.D)
-                        throw new UserException.MalformedBAM(read, "read ends with deletion. Cigar: " + read.getCigarString() + ". Although the SAM spec technically permits such reads, this is often indicative of malformed files. If you are sure you want to use this file, re-run your analysis with the extra option: -rf BadCigar");
-
-                    // we're done, so set the offset of the cigar to 0 for cleanliness, as well as the current element
-                    offsetIntoCurrentCigarElement = 0;
-                    readOffset = read.getReadLength();
-                    currentElement = null;
-
-                    // Reads that contain indels model the genomeOffset as the following base in the reference.  Because
-                    // we fall into this else block only when indels end the read, increment genomeOffset  such that the
-                    // current offset of this read is the next ref base after the end of the indel.  This position will
-                    // model a point on the reference somewhere after the end of the read.
-                    genomeOffset++; // extended events need that. Logically, it's legal to advance the genomic offset here:
-
-                    // we do step forward on the ref, and by returning null we also indicate that we are past the read end.
-                    return null;
-                }
-            }
-
-            offsetIntoCurrentCigarElement++;
-            boolean done = false;
-            switch (currentElement.getOperator()) {
-                case H: // ignore hard clips
-                case P: // ignore pads
-                    offsetIntoCurrentCigarElement = currentElement.getLength();
-                    break;
-                case I: // insertion w.r.t. the reference
-                case S: // soft clip
-                    offsetIntoCurrentCigarElement = currentElement.getLength();
-                    readOffset += currentElement.getLength();
-                    break;
-                case D: // deletion w.r.t. the reference
-                    if (readOffset < 0)             // we don't want reads starting with deletion, this is a malformed cigar string
-                        throw new UserException.MalformedBAM(read, "read starts with deletion. Cigar: " + read.getCigarString() + ". Although the SAM spec technically permits such reads, this is often indicative of malformed files. If you are sure you want to use this file, re-run your analysis with the extra option: -rf BadCigar");
-                    // should be the same as N case
-                    genomeOffset++;
-                    done = true;
-                    break;
-                case N: // reference skip (looks and gets processed just like a "deletion", just different logical meaning)
-                    genomeOffset++;
-                    done = true;
-                    break;
-                case M:
-                case EQ:
-                case X:
-                    readOffset++;
-                    genomeOffset++;
-                    done = true;
-                    break;
-                default:
-                    throw new IllegalStateException("Case statement didn't deal with cigar op: " + currentElement.getOperator());
-            }
-
-            if ( done )
-                return currentElement.getOperator();
-        }
-    }
-
-    /**
-     * Create a new PileupElement based on the current state of this element
-     *
-     * Must not be a left or right edge
-     *
-     * @return a pileup element
-     */
-    @Ensures("result != null")
-    public final PileupElement makePileupElement() {
-        if ( isLeftEdge() || isRightEdge() )
-            throw new IllegalStateException("Cannot make a pileup element from an edge alignment state");
-        return new PileupElement(read,
-                getReadOffset(),
-                getCurrentCigarElement(),
-                getCurrentCigarElementOffset(),
-                getOffsetIntoCurrentCigarElement());
-    }
-}
-
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/locusiterator/LIBSDownsamplingInfo.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/locusiterator/LIBSDownsamplingInfo.java
deleted file mode 100644
index 01bf17d..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/locusiterator/LIBSDownsamplingInfo.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.locusiterator;
-
-/**
- * Simple wrapper about the information LIBS needs about downsampling
- *
- * User: depristo
- * Date: 1/5/13
- * Time: 1:26 PM
- */
-class LIBSDownsamplingInfo {
-    final private boolean performDownsampling;
-    final private int toCoverage;
-
-    public LIBSDownsamplingInfo(boolean performDownsampling, int toCoverage) {
-        this.performDownsampling = performDownsampling;
-        this.toCoverage = toCoverage;
-    }
-
-    public boolean isPerformDownsampling() {
-        return performDownsampling;
-    }
-
-    public int getToCoverage() {
-        return toCoverage;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/locusiterator/LIBSPerformance.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/locusiterator/LIBSPerformance.java
deleted file mode 100644
index d9b158f..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/locusiterator/LIBSPerformance.java
+++ /dev/null
@@ -1,198 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.locusiterator;
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMFileReader;
-import htsjdk.samtools.SAMReadGroupRecord;
-import htsjdk.samtools.SAMRecordIterator;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.utils.commandline.Argument;
-import org.broadinstitute.gatk.utils.commandline.CommandLineProgram;
-import org.broadinstitute.gatk.utils.commandline.Input;
-import org.broadinstitute.gatk.engine.ReadProperties;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.iterators.GATKSAMRecordIterator;
-import org.broadinstitute.gatk.utils.*;
-import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.broadinstitute.gatk.utils.sam.GATKSamRecordFactory;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.util.*;
-
-/**
- * Caliper microbenchmark of fragment pileup
- */
-public class LIBSPerformance extends CommandLineProgram {
-    private static Logger logger = Logger.getLogger(LIBSPerformance.class);
-
-    @Input(fullName = "input_file", shortName = "I", doc = "SAM or BAM file(s)", required = true)
-    public File samFile = null;
-
-    @Input(fullName = "reference_sequence", shortName = "R", doc = "Reference sequence file", required = true)
-    public File referenceFile = null;
-
-    @Argument(fullName = "L", shortName = "L", doc = "Query location", required = false)
-    public String location = null;
-
-    @Argument(fullName = "dt", shortName = "dt", doc = "Enable downsampling", required = false)
-    public boolean downsample = false;
-
-    @Override
-    public int execute() throws IOException {
-        final IndexedFastaSequenceFile reference = new CachingIndexedFastaSequenceFile(referenceFile);
-        final GenomeLocParser genomeLocParser = new GenomeLocParser(reference);
-
-        final SAMFileReader reader = new SAMFileReader(samFile);
-        reader.setSAMRecordFactory(new GATKSamRecordFactory());
-
-        SAMRecordIterator rawIterator;
-        if ( location == null )
-            rawIterator = reader.iterator();
-        else {
-            final GenomeLoc loc = genomeLocParser.parseGenomeLoc(location);
-            rawIterator = reader.query(loc.getContig(), loc.getStart(), loc.getStop(), false);
-        }
-
-        final GATKSAMRecordIterator iterator = new GATKSAMRecordIterator(rawIterator);
-
-        final Set<String> samples = new HashSet<String>();
-        for ( final SAMReadGroupRecord rg : reader.getFileHeader().getReadGroups() )
-            samples.add(rg.getSample());
-
-        final LIBSDownsamplingInfo ds = new LIBSDownsamplingInfo(downsample, 250);
-
-        final LocusIteratorByState libs =
-                new LocusIteratorByState(
-                        iterator,
-                        ds,
-                        true,
-                        genomeLocParser,
-                        samples,
-                        false);
-
-        final SimpleTimer timer = new SimpleTimer().start();
-        int bp = 0;
-        double lastElapsed = 0;
-        while ( libs.hasNext() ) {
-            AlignmentContext context = libs.next();
-            bp++;
-            if ( timer.getElapsedTime() - lastElapsed > 10 ) {
-                logger.info(bp + " iterations at " + context.getLocation());
-                lastElapsed = timer.getElapsedTime();
-            }
-        }
-        logger.info(String.format("runtime in seconds: %.2f", timer.getElapsedTime()));
-
-        return 0;
-    }
-
-//    private void syntheticTests() {
-//        final int readLength = 101;
-//        final int nReads = 10000;
-//        final int locus = 1;
-//
-//        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000);
-//        final GenomeLocParser genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
-//
-//        int nIterations = 0;
-//        for ( final String cigar : Arrays.asList("101M", "50M10I40M", "50M10D40M") ) {
-//            GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, locus, readLength);
-//            read.setReadBases(Utils.dupBytes((byte) 'A', readLength));
-//            final byte[] quals = new byte[readLength];
-//            for ( int i = 0; i < readLength; i++ )
-//                quals[i] = (byte)(i % QualityUtils.MAX_SAM_QUAL_SCORE);
-//            read.setBaseQualities(quals);
-//            read.setCigarString(cigar);
-//
-//            for ( int j = 0; j < nReads; j++ ) {
-//                for ( int i = 0; i < rep; i++ ) {
-//                    switch ( op ) {
-//                        case NEW_STATE:
-//                        {
-//                            final AlignmentStateMachine alignmentStateMachine = new AlignmentStateMachine(read);
-//                            while ( alignmentStateMachine.stepForwardOnGenome() != null ) {
-//                                nIterations++;
-//                            }
-//                        }
-//                        break;
-////                        case OLD_STATE:
-////                        {
-////                            final SAMRecordAlignmentState alignmentStateMachine = new SAMRecordAlignmentState(read);
-////                            while ( alignmentStateMachine.stepForwardOnGenome() != null ) {
-////                                alignmentStateMachine.getRead();
-////                                nIterations++;
-////                            }
-////                        }
-////                        break;
-//                        case NEW_LIBS:
-//                        {
-//                            final List<GATKSAMRecord> reads = Collections.nCopies(30, read);
-//                            final org.broadinstitute.gatk.utils.locusiterator.LocusIteratorByState libs =
-//                                    new org.broadinstitute.gatk.utils.locusiterator.LocusIteratorByState(
-//                                            new LocusIteratorByStateBaseTest.FakeCloseableIterator<GATKSAMRecord>(reads.iterator()),
-//                                            LocusIteratorByStateBaseTest.createTestReadProperties(),
-//                                            genomeLocParser,
-//                                            LocusIteratorByState.sampleListForSAMWithoutReadGroups());
-//
-//                            while ( libs.hasNext() ) {
-//                                AlignmentContext context = libs.next();
-//                            }
-//                        }
-//                    }
-//                }
-//            }
-//        }
-//
-//        System.out.printf("iterations %d%n", nIterations);
-//    }
-
-    /**
-     * Required main method implementation.
-     * @param argv Command-line argument text.
-     * @throws Exception on error.
-     */
-    public static void main(String[] argv) throws Exception {
-        int returnCode = 0;
-        try {
-            LIBSPerformance instance = new LIBSPerformance();
-            start(instance, argv);
-            returnCode = 0;
-        } catch(Exception ex) {
-            returnCode = 1;
-            ex.printStackTrace();
-            throw ex;
-        } finally {
-            System.exit(returnCode);
-        }
-    }
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/locusiterator/LocusIterator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/locusiterator/LocusIterator.java
deleted file mode 100644
index 72764e4..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/locusiterator/LocusIterator.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.locusiterator;
-
-import htsjdk.samtools.util.CloseableIterator;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-
-import java.util.Iterator;
-
-/**
- * Iterator that traverses a SAM File, accumulating information on a per-locus basis
- */
-public abstract class LocusIterator implements Iterable<AlignmentContext>, CloseableIterator<AlignmentContext> {
-    public Iterator<AlignmentContext> iterator() {
-        return this;
-    }
-
-    public void close() {
-        //this.it.close();
-    }
-
-    public abstract boolean hasNext();
-    public abstract AlignmentContext next();
-
-    /**
-     * Get, if possible, the underlying LocusIteratorByState from this LocusIterator.
-     *
-     * @throws UnsupportedOperationException if we don't support this operation
-     *
-     * @return a non-null locus iterator by state
-     */
-    public LocusIteratorByState getLIBS() {
-        throw new UnsupportedOperationException("This locus iterator does not support getting the underlying LocusIteratorByState");
-    }
-
-    public void remove() {
-        throw new UnsupportedOperationException("Can not remove records from a SAM file via an iterator!");
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/locusiterator/LocusIteratorByState.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/locusiterator/LocusIteratorByState.java
deleted file mode 100644
index aaf6190..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/locusiterator/LocusIteratorByState.java
+++ /dev/null
@@ -1,454 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.locusiterator;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import htsjdk.samtools.CigarOperator;
-import htsjdk.samtools.SAMFileReader;
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.util.CloseableIterator;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.engine.ReadProperties;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.downsampling.DownsampleType;
-import org.broadinstitute.gatk.engine.iterators.GATKSAMRecordIterator;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.SampleUtils;
-import org.broadinstitute.gatk.utils.pileup.PileupElement;
-import org.broadinstitute.gatk.utils.pileup.ReadBackedPileupImpl;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.broadinstitute.gatk.utils.sam.ReadUtils;
-
-import java.util.*;
-
-/**
- * Iterator that traverses a SAM File, accumulating information on a per-locus basis
- *
- * Produces AlignmentContext objects, that contain ReadBackedPileups of PileupElements.  This
- * class has its core job of converting an iterator of ordered SAMRecords into those
- * RBPs.
- *
- * There are a few constraints on required and ensured by LIBS:
- *
- * -- Requires the Iterator<GATKSAMRecord> to returns reads in coordinate sorted order, consistent with the ordering
- * defined by the SAM file format.  That that for performance reasons this constraint isn't actually enforced.
- * The behavior of LIBS is undefined in the case where the reads are badly ordered.
- * -- The reads in the ReadBackedPileup are themselves in the order of appearance of the reads from the iterator.
- * That is, the pileup is ordered in a way consistent with the SAM coordinate ordering
- * -- Only aligned reads with at least one on-genomic cigar operator are passed on in the pileups.  That is,
- * unmapped reads or reads that are all insertions (10I) or soft clipped (10S) are not passed on.
- * -- LIBS can perform per-sample downsampling of a variety of kinds.
- * -- Because of downsampling there's no guarantee that:
- *   -- A read that could be aligned to a position will actually occur in the pileup (downsampled away)
- *   -- A read that appears in a previous pileup that could align to a future position will actually occur
- *      in that pileup.  That is, a read might show up at position i but be downsampled away in the pileup at j
- * -- LIBS can optionally capture all of the reads that come off the iterator, before any leveling downsampling
- * occurs, if requested.  This allows users of LIBS to see both a ReadBackedPileup view of the data as well as
- * a stream of unique, sorted reads
- */
-public final class LocusIteratorByState extends LocusIterator {
-    /** Indicates that we shouldn't do any downsampling */
-    public final static LIBSDownsamplingInfo NO_DOWNSAMPLING = new LIBSDownsamplingInfo(false, -1);
-
-    /**
-     * our log, which we want to capture anything from this class
-     */
-    private final static Logger logger = Logger.getLogger(LocusIteratorByState.class);
-
-    // -----------------------------------------------------------------------------------------------------------------
-    //
-    // member fields
-    //
-    // -----------------------------------------------------------------------------------------------------------------
-
-    /**
-     * Used to create new GenomeLocs as needed
-     */
-    private final GenomeLocParser genomeLocParser;
-
-    /**
-     * A complete list of all samples that may come out of the reads.  Must be
-     * comprehensive.
-     */
-    private final ArrayList<String> samples;
-
-    /**
-     * The system that maps incoming reads from the iterator to their pileup states
-     */
-    private final ReadStateManager readStates;
-
-    /**
-     * Should we include reads in the pileup which are aligned with a deletion operator to the reference?
-     */
-    private final boolean includeReadsWithDeletionAtLoci;
-
-    /**
-     * The next alignment context.  A non-null value means that a
-     * context is waiting from hasNext() for sending off to the next next() call.  A null
-     * value means that either hasNext() has not been called at all or that
-     * the underlying iterator is exhausted
-     */
-    private AlignmentContext nextAlignmentContext;
-
-    // -----------------------------------------------------------------------------------------------------------------
-    //
-    // constructors and other basic operations
-    //
-    // -----------------------------------------------------------------------------------------------------------------
-
-    /**
-     * Create a new LocusIteratorByState
-     *
-     * @param samIterator the iterator of reads to process into pileups.  Reads must be ordered
-     *                    according to standard coordinate-sorted BAM conventions
-     * @param readInformation meta-information about how to process the reads (i.e., should we do downsampling?)
-     * @param genomeLocParser used to create genome locs
-     * @param samples a complete list of samples present in the read groups for the reads coming from samIterator.
-     *                This is generally just the set of read group sample fields in the SAMFileHeader.  This
-     *                list of samples may contain a null element, and all reads without read groups will
-     *                be mapped to this null sample
-     */
-    public LocusIteratorByState(final Iterator<GATKSAMRecord> samIterator,
-                                final ReadProperties readInformation,
-                                final GenomeLocParser genomeLocParser,
-                                final Collection<String> samples) {
-        this(samIterator,
-                toDownsamplingInfo(readInformation),
-                readInformation.includeReadsWithDeletionAtLoci(),
-                genomeLocParser,
-                samples,
-                readInformation.keepUniqueReadListInLIBS());
-    }
-
-    /**
-     * Create a new LocusIteratorByState based on a SAMFileReader using reads in an iterator it
-     *
-     * Simple constructor that uses the samples in the reader, doesn't do any downsampling,
-     * and makes a new GenomeLocParser using the reader.  This constructor will be slow(ish)
-     * if you continually invoke this constructor, but it's easy to make.
-     *
-     * @param reader a non-null reader
-     * @param it an iterator from reader that has the reads we want to use to create ReadBackPileups
-     */
-    public LocusIteratorByState(final SAMFileReader reader, final CloseableIterator<SAMRecord> it) {
-        this(new GATKSAMRecordIterator(it),
-                new LIBSDownsamplingInfo(false, 0),
-                true,
-                new GenomeLocParser(reader.getFileHeader().getSequenceDictionary()),
-                SampleUtils.getSAMFileSamples(reader.getFileHeader()),
-                false);
-    }
-
-    /**
-     * Create a new LocusIteratorByState
-     *
-     * @param samIterator the iterator of reads to process into pileups.  Reads must be ordered
-     *                    according to standard coordinate-sorted BAM conventions
-     * @param downsamplingInfo meta-information about how to downsampling the reads
-     * @param genomeLocParser used to create genome locs
-     * @param samples a complete list of samples present in the read groups for the reads coming from samIterator.
-     *                This is generally just the set of read group sample fields in the SAMFileHeader.  This
-     *                list of samples may contain a null element, and all reads without read groups will
-     *                be mapped to this null sample
-     * @param maintainUniqueReadsList if true, we will keep the unique reads from off the samIterator and make them
-     *                                available via the transferReadsFromAllPreviousPileups interface
-     */
-    public LocusIteratorByState(final Iterator<GATKSAMRecord> samIterator,
-                                final LIBSDownsamplingInfo downsamplingInfo,
-                                final boolean includeReadsWithDeletionAtLoci,
-                                final GenomeLocParser genomeLocParser,
-                                final Collection<String> samples,
-                                final boolean maintainUniqueReadsList) {
-        if ( samIterator == null ) throw new IllegalArgumentException("samIterator cannot be null");
-        if ( downsamplingInfo == null ) throw new IllegalArgumentException("downsamplingInfo cannot be null");
-        if ( genomeLocParser == null ) throw new IllegalArgumentException("genomeLocParser cannot be null");
-        if ( samples == null ) throw new IllegalArgumentException("Samples cannot be null");
-
-        // currently the GATK expects this LocusIteratorByState to accept empty sample lists, when
-        // there's no read data.  So we need to throw this error only when samIterator.hasNext() is true
-        if (samples.isEmpty() && samIterator.hasNext()) {
-            throw new IllegalArgumentException("samples list must not be empty");
-        }
-
-        this.genomeLocParser = genomeLocParser;
-        this.includeReadsWithDeletionAtLoci = includeReadsWithDeletionAtLoci;
-        this.samples = new ArrayList<String>(samples);
-        this.readStates = new ReadStateManager(samIterator, this.samples, downsamplingInfo, maintainUniqueReadsList);
-    }
-
-    @Override
-    public Iterator<AlignmentContext> iterator() {
-        return this;
-    }
-
-    /**
-     * Get the current location (i.e., the bp of the center of the pileup) of the pileup, or null if not anywhere yet
-     *
-     * Assumes that read states is updated to reflect the current pileup position, but not advanced to the
-     * next location.
-     *
-     * @return the location of the current pileup, or null if we're after all reads
-     */
-    private GenomeLoc getLocation() {
-        return readStates.isEmpty() ? null : readStates.getFirst().getLocation(genomeLocParser);
-    }
-
-    // -----------------------------------------------------------------------------------------------------------------
-    //
-    // next() routine and associated collection operations
-    //
-    // -----------------------------------------------------------------------------------------------------------------
-
-    /**
-     * Is there another pileup available?
-     * @return
-     */
-    @Override
-    public boolean hasNext() {
-        lazyLoadNextAlignmentContext();
-        return nextAlignmentContext != null;
-    }
-
-    /**
-     * Get the next AlignmentContext available from the reads.
-     *
-     * @return a non-null AlignmentContext of the pileup after to the next genomic position covered by
-     * at least one read.
-     */
-    @Override
-    public AlignmentContext next() {
-        lazyLoadNextAlignmentContext();
-        if (!hasNext())
-            throw new NoSuchElementException("LocusIteratorByState: out of elements.");
-        AlignmentContext currentAlignmentContext = nextAlignmentContext;
-        nextAlignmentContext = null;
-        return currentAlignmentContext;
-    }
-
-    /**
-     * Move this LIBS until we are over position
-     *
-     * Will return null if cannot reach position (because we run out of data in the locus)
-     *
-     * @param position the start position of the AlignmentContext we want back
-     * @param stopAtFirstNonEmptySiteAfterPosition if true, we will stop as soon as we find a context with data with
-     *                                             position >= position, otherwise we will return a null value
-     *                                             and consume the data for the next position.  This means that without
-     *                                             specifying this value the LIBS will be in an indeterminate state
-     *                                             after calling this function, and should be reconstructed from scratch
-     *                                             for subsequent use
-     * @return a AlignmentContext at position, or null if this isn't possible
-     */
-    public AlignmentContext advanceToLocus(final int position, final boolean stopAtFirstNonEmptySiteAfterPosition) {
-        while ( hasNext() ) {
-            final AlignmentContext context = next();
-
-            if ( context == null )
-                // we ran out of data
-                return null;
-
-            if ( context.getPosition() == position )
-                return context;
-
-            if ( context.getPosition() > position)
-                return stopAtFirstNonEmptySiteAfterPosition ? context : null;
-        }
-
-        return null;
-    }
-
-    /**
-     * Creates the next alignment context from the given state.  Note that this is implemented as a
-     * lazy load method. nextAlignmentContext MUST BE null in order for this method to advance to the
-     * next entry.
-     */
-    private void lazyLoadNextAlignmentContext() {
-        while (nextAlignmentContext == null && readStates.hasNext()) {
-            readStates.collectPendingReads();
-
-            final GenomeLoc location = getLocation();
-            final Map<String, ReadBackedPileupImpl> fullPileup = new HashMap<String, ReadBackedPileupImpl>();
-
-            for (final Map.Entry<String, PerSampleReadStateManager> sampleStatePair : readStates ) {
-                final String sample = sampleStatePair.getKey();
-                final PerSampleReadStateManager readState = sampleStatePair.getValue();
-                final Iterator<AlignmentStateMachine> iterator = readState.iterator();
-                final List<PileupElement> pile = new ArrayList<PileupElement>(readState.size());
-
-                while (iterator.hasNext()) {
-                    // state object with the read/offset information
-                    final AlignmentStateMachine state = iterator.next();
-                    final GATKSAMRecord read = state.getRead();
-                    final CigarOperator op = state.getCigarOperator();
-
-                    if (op == CigarOperator.N) // N's are never added to any pileup
-                        continue;
-
-                    if (!dontIncludeReadInPileup(read, location.getStart())) {
-                        if ( ! includeReadsWithDeletionAtLoci && op == CigarOperator.D ) {
-                            continue;
-                        }
-
-                        pile.add(state.makePileupElement());
-                    }
-                }
-
-                if (! pile.isEmpty() ) // if this pileup added at least one base, add it to the full pileup
-                    fullPileup.put(sample, new ReadBackedPileupImpl(location, pile));
-            }
-
-            readStates.updateReadStates(); // critical - must be called after we get the current state offsets and location
-            if (!fullPileup.isEmpty()) // if we got reads with non-D/N over the current position, we are done
-                nextAlignmentContext = new AlignmentContext(location, new ReadBackedPileupImpl(location, fullPileup), false);
-        }
-    }
-
-    // -----------------------------------------------------------------------------------------------------------------
-    //
-    // getting the list of reads
-    //
-    // -----------------------------------------------------------------------------------------------------------------
-
-    /**
-     * Transfer current list of all unique reads that have ever been used in any pileup, clearing old list
-     *
-     * This list is guaranteed to only contain unique reads, even across calls to the this function.  It is
-     * literally the unique set of reads ever seen.
-     *
-     * The list occurs in the same order as they are encountered in the underlying iterator.
-     *
-     * Takes the maintained list of submitted reads, and transfers it to the caller of this
-     * function.  The old list of set to a new, cleanly allocated list so the caller officially
-     * owns the list returned by this call.  This is the only way to clear the tracking
-     * of submitted reads, if enabled.
-     *
-     * The purpose of this function is allow users of LIBS to keep track of all of the reads pulled off the
-     * underlying GATKSAMRecord iterator and that appeared at any point in the list of SAMRecordAlignmentState for
-     * any reads.  This function is intended to allow users to efficiently reconstruct the unique set of reads
-     * used across all pileups.  This is necessary for LIBS to handle because attempting to do
-     * so from the pileups coming out of LIBS is extremely expensive.
-     *
-     * This functionality is only available if LIBS was created with the argument to track the reads
-     *
-     * @throws UnsupportedOperationException if called when keepingSubmittedReads is false
-     *
-     * @return the current list
-     */
-    @Ensures("result != null")
-    public List<GATKSAMRecord> transferReadsFromAllPreviousPileups() {
-        return readStates.transferSubmittedReads();
-    }
-
-    /**
-     * Get the underlying list of tracked reads.  For testing only
-     * @return a non-null list
-     */
-    @Ensures("result != null")
-    protected List<GATKSAMRecord> getReadsFromAllPreviousPileups() {
-        return readStates.getSubmittedReads();
-    }
-
-    // -----------------------------------------------------------------------------------------------------------------
-    //
-    // utility functions
-    //
-    // -----------------------------------------------------------------------------------------------------------------
-
-    /**
-     * Should this read be excluded from the pileup?
-     *
-     * Generic place to put per-base filters appropriate to LocusIteratorByState
-     *
-     * @param rec the read to potentially exclude
-     * @param pos the genomic position of the current alignment
-     * @return true if the read should be excluded from the pileup, false otherwise
-     */
-    @Requires({"rec != null", "pos > 0"})
-    private boolean dontIncludeReadInPileup(final GATKSAMRecord rec, final long pos) {
-        return ReadUtils.isBaseInsideAdaptor(rec, pos);
-    }
-
-    /**
-     * Create a LIBSDownsamplingInfo object from the requested info in ReadProperties
-     *
-     * LIBS will invoke the Reservoir and Leveling downsamplers on the read stream if we're
-     * downsampling to coverage by sample. SAMDataSource will have refrained from applying
-     * any downsamplers to the read stream in this case, in the expectation that LIBS will
-     * manage the downsampling. The reason for this is twofold: performance (don't have to
-     * split/re-assemble the read stream in SAMDataSource), and to enable partial downsampling
-     * of reads (eg., using half of a read, and throwing the rest away).
-     *
-     * @param readInfo GATK engine information about what should be done to the reads
-     * @return a LIBS specific info holder about downsampling only
-     */
-    @Requires("readInfo != null")
-    @Ensures("result != null")
-    private static LIBSDownsamplingInfo toDownsamplingInfo(final ReadProperties readInfo) {
-        final boolean performDownsampling = readInfo.getDownsamplingMethod() != null &&
-                readInfo.getDownsamplingMethod().type == DownsampleType.BY_SAMPLE &&
-                readInfo.getDownsamplingMethod().toCoverage != null;
-        final int coverage = performDownsampling ? readInfo.getDownsamplingMethod().toCoverage : 0;
-
-        return new LIBSDownsamplingInfo(performDownsampling, coverage);
-    }
-
-    /**
-     * Create a pileup element for read at offset
-     *
-     * offset must correspond to a valid read offset given the read's cigar, or an IllegalStateException will be throw
-     *
-     * @param read a read
-     * @param offset the offset into the bases we'd like to use in the pileup
-     * @return a valid PileupElement with read and at offset
-     */
-    @Ensures("result != null")
-    public static PileupElement createPileupForReadAndOffset(final GATKSAMRecord read, final int offset) {
-        if ( read == null ) throw new IllegalArgumentException("read cannot be null");
-        if ( offset < 0 || offset >= read.getReadLength() ) throw new IllegalArgumentException("Invalid offset " + offset + " outside of bounds 0 and " + read.getReadLength());
-
-        final AlignmentStateMachine stateMachine = new AlignmentStateMachine(read);
-
-        while ( stateMachine.stepForwardOnGenome() != null ) {
-            if ( stateMachine.getReadOffset() == offset )
-                return stateMachine.makePileupElement();
-        }
-
-        throw new IllegalStateException("Tried to create a pileup for read " + read + " with offset " + offset +
-                " but we never saw such an offset in the alignment state machine");
-    }
-
-    /**
-     * For testing only.  Assumes that the incoming SAMRecords have no read groups, so creates a dummy sample list
-     * for the system.
-     */
-    public static List<String> sampleListForSAMWithoutReadGroups() {
-        List<String> samples = new ArrayList<String>();
-        samples.add(null);
-        return samples;
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/locusiterator/PerSampleReadStateManager.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/locusiterator/PerSampleReadStateManager.java
deleted file mode 100644
index e6d49c3..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/locusiterator/PerSampleReadStateManager.java
+++ /dev/null
@@ -1,261 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.locusiterator;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Invariant;
-import com.google.java.contract.Requires;
-import htsjdk.samtools.CigarOperator;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.engine.downsampling.Downsampler;
-import org.broadinstitute.gatk.engine.downsampling.LevelingDownsampler;
-
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-
-/**
- * ReadStateManager for a single sample
- *
- * User: depristo
- * Date: 1/13/13
- * Time: 12:28 PM
- */
- at Invariant({
-        "readStartsAreWellOrdered()",
-        "! isDownsampling() || downsamplingTarget > 0",
-        "nSites >= 0",
-        "nSitesNeedingDownsampling >= 0",
-        "nSitesNeedingDownsampling <= nSites"
-})
-final class PerSampleReadStateManager implements Iterable<AlignmentStateMachine> {
-    private final static Logger logger = Logger.getLogger(ReadStateManager.class);
-    private final static boolean CAPTURE_DOWNSAMPLING_STATS = false;
-
-    /**
-     * A list (potentially empty) of alignment state machines.
-     *
-     * The state machines must be ordered by the alignment start of their underlying reads, with the
-     * lowest alignment starts on the left, and the largest on the right
-     */
-    private LinkedList<AlignmentStateMachine> readStatesByAlignmentStart = new LinkedList<AlignmentStateMachine>();
-
-    private final Downsampler<LinkedList<AlignmentStateMachine>> levelingDownsampler;
-    private final int downsamplingTarget;
-
-    /**
-     * The number of sites where downsampling has been invoked
-     */
-    private int nSitesNeedingDownsampling = 0;
-
-    /**
-     * The number of sites we've visited
-     */
-    private int nSites = 0;
-
-    /**
-     * Create a new PerSampleReadStateManager with downsampling parameters as requested by LIBSDownsamplingInfo
-     * @param LIBSDownsamplingInfo the downsampling params we want to use
-     */
-    public PerSampleReadStateManager(final LIBSDownsamplingInfo LIBSDownsamplingInfo) {
-        this.downsamplingTarget = LIBSDownsamplingInfo.isPerformDownsampling() ? LIBSDownsamplingInfo.getToCoverage() : -1;
-        this.levelingDownsampler = LIBSDownsamplingInfo.isPerformDownsampling()
-                ? new LevelingDownsampler<LinkedList<AlignmentStateMachine>, AlignmentStateMachine>(LIBSDownsamplingInfo.getToCoverage())
-                : null;
-    }
-
-    /**
-     * Group the underlying readStatesByAlignmentStart into a list of list of alignment state machines,
-     * where each list contains machines with a unique genome site.  The outer list is ordered
-     * by alignment start.
-     *
-     * For example, if the flat list has alignment starts [10, 10, 11, 12, 12, 13] then
-     * the resulting grouping will be [[10, 10], [11], [12, 12], [13]].
-     *
-     * @return a non-null list of lists
-     */
-    @Ensures("result != null")
-    private List<LinkedList<AlignmentStateMachine>> groupByAlignmentStart() {
-        final LinkedList<LinkedList<AlignmentStateMachine>> grouped = new LinkedList<LinkedList<AlignmentStateMachine>>();
-
-        AlignmentStateMachine last = null;
-        for ( final AlignmentStateMachine stateMachine : readStatesByAlignmentStart ) {
-            if ( last == null || stateMachine.getGenomeOffset() != last.getGenomeOffset() ) {
-                // we've advanced to a place where the state machine has a different state,
-                // so start a new list
-                grouped.add(new LinkedList<AlignmentStateMachine>());
-                last = stateMachine;
-            }
-            grouped.getLast().add(stateMachine);
-        }
-
-        return grouped;
-    }
-
-    /**
-     * Flattens the grouped list of list of alignment state machines into a single list in order
-     * @return a non-null list contains the state machines
-     */
-    @Ensures("result != null")
-    private LinkedList<AlignmentStateMachine> flattenByAlignmentStart(final List<LinkedList<AlignmentStateMachine>> grouped) {
-        final LinkedList<AlignmentStateMachine> flat = new LinkedList<AlignmentStateMachine>();
-        for ( final List<AlignmentStateMachine> l : grouped )
-            flat.addAll(l);
-        return flat;
-    }
-
-    /**
-     * Test that the reads are ordered by their alignment starts
-     * @return true if well ordered, false otherwise
-     */
-    private boolean readStartsAreWellOrdered() {
-        int lastStart = -1;
-        for ( final AlignmentStateMachine machine : readStatesByAlignmentStart ) {
-            if ( lastStart > machine.getRead().getAlignmentStart() )
-                return false;
-            lastStart = machine.getRead().getAlignmentStart();
-        }
-        return true;
-    }
-
-    /**
-     * Assumes it can just keep the states linked lists without making a copy
-     * @param states the new states to add to this manager
-     * @return The change in the number of states, after including states and potentially downsampling.  Note
-     * that this return result might be negative, if downsampling is enabled, as we might drop
-     * more sites than have been added by the downsampler
-     */
-    @Requires("states != null")
-    public int addStatesAtNextAlignmentStart(final LinkedList<AlignmentStateMachine> states) {
-        if ( states.isEmpty() ) {
-            return 0;
-        }
-
-        readStatesByAlignmentStart.addAll(states);
-        int nStatesAdded = states.size();
-
-        if ( isDownsampling() && readStatesByAlignmentStart.size() > downsamplingTarget ) {
-            // only go into the downsampling branch if we are downsampling and the coverage > the target
-            captureDownsamplingStats();
-            levelingDownsampler.submit(groupByAlignmentStart());
-            levelingDownsampler.signalEndOfInput();
-
-            nStatesAdded -= levelingDownsampler.getNumberOfDiscardedItems();
-
-            // use returned List directly rather than make a copy, for efficiency's sake
-            readStatesByAlignmentStart = flattenByAlignmentStart(levelingDownsampler.consumeFinalizedItems());
-            levelingDownsampler.resetStats();
-        }
-
-        return nStatesAdded;
-    }
-
-    /**
-     * Is downsampling enabled for this manager?
-     * @return true if we are downsampling, false otherwise
-     */
-    private boolean isDownsampling() {
-        return levelingDownsampler != null;
-    }
-
-    /**
-     * Get the leftmost alignment state machine, or null if the read states is empty
-     * @return a potentially null AlignmentStateMachine
-     */
-    public AlignmentStateMachine getFirst() {
-        return isEmpty() ? null : readStatesByAlignmentStart.getFirst();
-    }
-
-    /**
-     * Capture some statistics about the behavior of the downsampling, but only if CAPTURE_DOWNSAMPLING_STATS is true
-     */
-    @Requires("isDownsampling()")
-    private void captureDownsamplingStats() {
-        if ( CAPTURE_DOWNSAMPLING_STATS ) {
-            nSites++;
-            final int loc = getFirst().getGenomePosition();
-            String message = "Pass through";
-            final boolean downsampling = size() > downsamplingTarget;
-            if ( downsampling ) {
-                nSitesNeedingDownsampling++;
-                message = "Downsampling";
-            }
-
-            if ( downsampling || nSites % 10000 == 0 )
-                logger.info(String.format("%20s at %s: coverage=%d, max=%d, fraction of downsampled sites=%.2e",
-                        message, loc, size(), downsamplingTarget, (1.0 * nSitesNeedingDownsampling / nSites)));
-        }
-    }
-
-    /**
-     * Is there at least one alignment for this sample in this manager?
-     * @return true if there's at least one alignment, false otherwise
-     */
-    public boolean isEmpty() {
-        return readStatesByAlignmentStart.isEmpty();
-    }
-
-    /**
-     * Get the number of read states currently in this manager
-     * @return the number of read states
-     */
-    @Ensures("result >= 0")
-    public int size() {
-        return readStatesByAlignmentStart.size();
-    }
-
-    /**
-     * Advances all read states forward by one element, removing states that are
-     * no long aligned to the current position.
-     * @return the number of states we're removed after advancing
-     */
-    public int updateReadStates() {
-        int nRemoved = 0;
-        final Iterator<AlignmentStateMachine> it = iterator();
-        while (it.hasNext()) {
-            final AlignmentStateMachine state = it.next();
-            final CigarOperator op = state.stepForwardOnGenome();
-            if (op == null) {
-                // we discard the read only when we are past its end AND indel at the end of the read (if any) was
-                // already processed. Keeping the read state that returned null upon stepForwardOnGenome() is safe
-                // as the next call to stepForwardOnGenome() will return null again AND will clear hadIndel() flag.
-                it.remove();                                                // we've stepped off the end of the object
-                nRemoved++;
-            }
-        }
-
-        return nRemoved;
-    }
-
-    /**
-     * Iterate over the AlignmentStateMachine in this manager in alignment start order.
-     * @return a valid iterator
-     */
-    @Ensures("result != null")
-    public Iterator<AlignmentStateMachine> iterator() {
-        return readStatesByAlignmentStart.iterator();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/locusiterator/ReadStateManager.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/locusiterator/ReadStateManager.java
deleted file mode 100644
index 0014753..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/locusiterator/ReadStateManager.java
+++ /dev/null
@@ -1,289 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.locusiterator;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import htsjdk.samtools.util.PeekableIterator;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-
-import java.util.*;
-
-/**
- * Manages and updates mapping from sample -> List of SAMRecordAlignmentState
- *
- * Optionally can keep track of all of the reads pulled off the iterator and
- * that appeared at any point in the list of SAMRecordAlignmentState for any reads.
- * This functionaly is only possible at this stage, as this object does the popping of
- * reads off the underlying source iterator, and presents only a pileup-like interface
- * of samples -> SAMRecordAlignmentStates.  Reconstructing the unique set of reads
- * used across all pileups is extremely expensive from that data structure.
- *
- * User: depristo
- * Date: 1/5/13
- * Time: 2:02 PM
- */
-final class ReadStateManager implements Iterable<Map.Entry<String, PerSampleReadStateManager>> {
-    private final List<String> samples;
-    private final PeekableIterator<GATKSAMRecord> iterator;
-    private final SamplePartitioner<GATKSAMRecord> samplePartitioner;
-
-    /**
-     * A mapping from sample name -> the per sample read state manager that manages
-     *
-     * IT IS CRITICAL THAT THIS BE A LINKED HASH MAP, SO THAT THE ITERATION OF THE MAP OCCURS IN THE SAME
-     * ORDER AS THE ORIGINL SAMPLES
-     */
-    private final Map<String, PerSampleReadStateManager> readStatesBySample = new LinkedHashMap<String, PerSampleReadStateManager>();
-
-    private LinkedList<GATKSAMRecord> submittedReads;
-    private final boolean keepSubmittedReads;
-
-    private int totalReadStates = 0;
-
-    public ReadStateManager(final Iterator<GATKSAMRecord> source,
-                            final List<String> samples,
-                            final LIBSDownsamplingInfo LIBSDownsamplingInfo,
-                            final boolean keepSubmittedReads) {
-        this.samples = samples;
-        this.iterator = new PeekableIterator<GATKSAMRecord>(source);
-
-        this.keepSubmittedReads = keepSubmittedReads;
-        this.submittedReads = new LinkedList<GATKSAMRecord>();
-
-        for (final String sample : samples) {
-            // because this is a linked hash map the order of iteration will be in sample order
-            readStatesBySample.put(sample, new PerSampleReadStateManager(LIBSDownsamplingInfo));
-        }
-
-        samplePartitioner = new SamplePartitioner<GATKSAMRecord>(LIBSDownsamplingInfo, samples);
-    }
-
-    /**
-     * Returns a iterator over all the sample -> per-sample read state managers with each sample in this read state manager.
-     *
-     * The order of iteration is the same as the order of the samples provided upon construction to this
-     * ReadStateManager.
-     *
-     * @return Iterator over sample + per sample read state manager pairs for this read state manager.
-     */
-    @Override
-    public Iterator<Map.Entry<String, PerSampleReadStateManager>> iterator() {
-        return readStatesBySample.entrySet().iterator();
-    }
-
-    public boolean isEmpty() {
-        return totalReadStates == 0;
-    }
-
-    /**
-     * Retrieves the total number of reads in the manager across all samples.
-     *
-     * @return Total number of reads over all samples.
-     */
-    public int size() {
-        return totalReadStates;
-    }
-
-    /**
-     * Retrieves the total number of reads in the manager in the given sample.
-     *
-     * @param sample The sample.
-     * @return Total number of reads in the given sample.
-     */
-    public int size(final String sample) {
-        return readStatesBySample.get(sample).size();
-    }
-
-    public AlignmentStateMachine getFirst() {
-        for ( final PerSampleReadStateManager manager : readStatesBySample.values() ) {
-            if ( ! manager.isEmpty() )
-                return manager.getFirst();
-        }
-        return null;
-    }
-
-    public boolean hasNext() {
-        return totalReadStates > 0 || iterator.hasNext();
-    }
-
-    /**
-     * Advances all fo the read states by one bp.  After this call the read states are reflective
-     * of the next pileup.
-     */
-    public void updateReadStates() {
-        for (final PerSampleReadStateManager perSampleReadStateManager : readStatesBySample.values() ) {
-            totalReadStates -= perSampleReadStateManager.updateReadStates();
-        }
-    }
-
-    /**
-     * Does read start at the same position as described by currentContextIndex and currentAlignmentStart?
-     *
-     * @param read the read we want to test
-     * @param currentContigIndex the contig index (from the read's getReferenceIndex) of the reads in this state manager
-     * @param currentAlignmentStart the alignment start of the of the left-most position on the
-     *                           genome of the reads in this read state manager
-     * @return true if read has contig index and start equal to the current ones
-     */
-    private boolean readStartsAtCurrentPosition(final GATKSAMRecord read, final int currentContigIndex, final int currentAlignmentStart) {
-        return read.getAlignmentStart() == currentAlignmentStart && read.getReferenceIndex() == currentContigIndex;
-    }
-
-    /**
-     * Pull all of the reads off the iterator that overlap the left-most position among all
-     * reads this ReadStateManager
-     */
-    public void collectPendingReads() {
-        if (!iterator.hasNext())
-            return;
-
-        // determine the left-most boundary that determines which reads to keep in this new pileup
-        final int firstContigIndex;
-        final int firstAlignmentStart;
-        if ( isEmpty() ) {
-            // there are no reads here, so our next state is the next read in the stream
-            firstContigIndex = iterator.peek().getReferenceIndex();
-            firstAlignmentStart = iterator.peek().getAlignmentStart();
-        } else {
-            // there's a read in the system, so it's our targeted first read
-            final AlignmentStateMachine firstState = getFirst();
-            firstContigIndex = firstState.getReferenceIndex();
-            // note this isn't the alignment start of the read, but rather the alignment start position
-            firstAlignmentStart = firstState.getGenomePosition();
-        }
-
-        while ( iterator.hasNext() && readStartsAtCurrentPosition(iterator.peek(), firstContigIndex, firstAlignmentStart) ) {
-            submitRead(iterator.next());
-        }
-
-        samplePartitioner.doneSubmittingReads();
-
-        for (final String sample : samples) {
-            final Collection<GATKSAMRecord> newReads = samplePartitioner.getReadsForSample(sample);
-
-            // if we're keeping reads, take the (potentially downsampled) list of new reads for this sample
-            // and add to the list of reads.  Note this may reorder the list of reads someone (it groups them
-            // by sample, but it cannot change their absolute position on the genome as they all must
-            // start at the current location
-            if ( keepSubmittedReads )
-                submittedReads.addAll(newReads);
-
-            final PerSampleReadStateManager statesBySample = readStatesBySample.get(sample);
-            addReadsToSample(statesBySample, newReads);
-        }
-
-        samplePartitioner.reset();
-    }
-
-    /**
-     * Add a read to the sample partitioner, potentially adding it to all submitted reads, if appropriate
-     * @param read a non-null read
-     */
-    @Requires("read != null")
-    protected void submitRead(final GATKSAMRecord read) {
-        samplePartitioner.submitRead(read);
-    }
-
-    /**
-     * Transfer current list of submitted reads, clearing old list
-     *
-     * Takes the maintained list of submitted reads, and transfers it to the caller of this
-     * function.  The old list of set to a new, cleanly allocated list so the caller officially
-     * owns the list returned by this call.  This is the only way to clear the tracking
-     * of submitted reads, if enabled.
-     *
-     * How to use this function:
-     *
-     * while ( doing some work unit, such as creating pileup at some locus ):
-     *   interact with ReadStateManager in some way to make work unit
-     *   readsUsedInPileup = transferSubmittedReads)
-     *
-     * @throws UnsupportedOperationException if called when keepSubmittedReads is false
-     *
-     * @return the current list of submitted reads
-     */
-    @Ensures({
-            "result != null",
-            "result != submittedReads" // result and previous submitted reads are not == objects
-    })
-    public List<GATKSAMRecord> transferSubmittedReads() {
-        if ( ! keepSubmittedReads ) throw new UnsupportedOperationException("cannot transferSubmittedReads if you aren't keeping them");
-
-        final List<GATKSAMRecord> prevSubmittedReads = submittedReads;
-        this.submittedReads = new LinkedList<GATKSAMRecord>();
-
-        return prevSubmittedReads;
-    }
-
-    /**
-     * Are we keeping submitted reads, or not?
-     * @return true if we are keeping them, false otherwise
-     */
-    public boolean isKeepingSubmittedReads() {
-        return keepSubmittedReads;
-    }
-
-    /**
-     * Obtain a pointer to the list of submitted reads.
-     *
-     * This is not a copy of the list; it is shared with this ReadStateManager.  It should
-     * not be modified.  Updates to this ReadStateManager may change the contains of the
-     * list entirely.
-     *
-     * For testing purposes only.
-     *
-     * Will always be empty if we are are not keepSubmittedReads
-     *
-     * @return a non-null list of reads that have been submitted to this ReadStateManager
-     */
-    @Ensures({"result != null","keepSubmittedReads || result.isEmpty()"})
-    protected List<GATKSAMRecord> getSubmittedReads() {
-        return submittedReads;
-    }
-
-    /**
-     * Add reads with the given sample name to the given hanger entry.
-     *
-     * @param readStates The list of read states to add this collection of reads.
-     * @param reads      Reads to add.  Selected reads will be pulled from this source.
-     */
-    private void addReadsToSample(final PerSampleReadStateManager readStates, final Collection<GATKSAMRecord> reads) {
-        if (reads.isEmpty())
-            return;
-
-        final LinkedList<AlignmentStateMachine> newReadStates = new LinkedList<AlignmentStateMachine>();
-
-        for (final GATKSAMRecord read : reads) {
-            final AlignmentStateMachine state = new AlignmentStateMachine(read);
-            if ( state.stepForwardOnGenome() != null ) // todo -- should be an assertion not a skip
-                // explicitly filter out reads that are all insertions / soft clips
-                newReadStates.add(state);
-        }
-
-        totalReadStates += readStates.addStatesAtNextAlignmentStart(newReadStates);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/locusiterator/SamplePartitioner.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/locusiterator/SamplePartitioner.java
deleted file mode 100644
index 825cb35..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/locusiterator/SamplePartitioner.java
+++ /dev/null
@@ -1,172 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.locusiterator;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.engine.downsampling.Downsampler;
-import org.broadinstitute.gatk.engine.downsampling.PassThroughDownsampler;
-import org.broadinstitute.gatk.engine.downsampling.ReservoirDownsampler;
-
-import java.util.*;
-
-/**
- * Divides reads by sample and (if requested) does a preliminary downsampling pass
- * with a ReservoirDownsampler.
- *
- * Note: stores reads by sample ID string, not by sample object
- */
-class SamplePartitioner<T extends SAMRecord> {
-    /**
-     * Map from sample name (as a string) to a downsampler of reads for that sample
-     */
-    final private Map<String, Downsampler<T>> readsBySample;
-
-    /**
-     * Are we in a state where we're done submitting reads and have semi-finalized the
-     * underlying per sample downsampler?
-     */
-    boolean doneSubmittingReads = false;
-
-    /**
-     * Create a new SamplePartitioner capable of splitting reads up into buckets of reads for
-     * each sample in samples, and perform a preliminary downsampling of these reads
-     * (separately for each sample) if downsampling is requested in LIBSDownsamplingInfo
-     *
-     * Note that samples must be comprehensive, in that all reads every submitted to this
-     * partitioner must come from one of the samples provided here.  If not, submitRead
-     * will throw an exception.  Duplicates in the list of samples will be ignored
-     *
-     * @param LIBSDownsamplingInfo do we want to downsample, and if so to what coverage?
-     * @param samples the complete list of samples we're going to partition reads into. Can be
-     *                empty, but in that case this code cannot function properly if you
-     *                attempt to add data to it.
-     */
-    @Ensures({
-            "readsBySample != null",
-            "readsBySample.size() == new HashSet(samples).size()"
-    })
-    public SamplePartitioner(final LIBSDownsamplingInfo LIBSDownsamplingInfo, final List<String> samples) {
-        if ( LIBSDownsamplingInfo == null ) throw new IllegalArgumentException("LIBSDownsamplingInfo cannot be null");
-        if ( samples == null ) throw new IllegalArgumentException("samples must be a non-null list");
-
-        readsBySample = new LinkedHashMap<String, Downsampler<T>>(samples.size());
-        for ( final String sample : samples ) {
-            readsBySample.put(sample, createDownsampler(LIBSDownsamplingInfo));
-        }
-    }
-
-    /**
-     * Create a new, ready to use downsampler based on the parameters in LIBSDownsamplingInfo
-     * @param LIBSDownsamplingInfo the parameters to use in creating the downsampler
-     * @return a downsampler appropriate for LIBSDownsamplingInfo.  If no downsampling is requested,
-     *   uses the PassThroughDownsampler, which does nothing at all.
-     */
-    @Requires("LIBSDownsamplingInfo != null")
-    @Ensures("result != null")
-    private Downsampler<T> createDownsampler(final LIBSDownsamplingInfo LIBSDownsamplingInfo) {
-        return LIBSDownsamplingInfo.isPerformDownsampling()
-                ? new ReservoirDownsampler<T>(LIBSDownsamplingInfo.getToCoverage(), true)
-                : new PassThroughDownsampler<T>();
-    }
-
-    /**
-     * Offer this read to the partitioner, putting it into the bucket of reads for the sample
-     * of read (obtained via the read's read group).
-     *
-     * If the read group is missing, uses the special "null" read group
-     *
-     * @throws IllegalStateException if the sample of read wasn't present in the original
-     *   set of samples provided to this SamplePartitioner at construction
-     *
-     * @param read the read to add to the sample's list of reads
-     */
-    @Requires("read != null")
-    @Ensures("doneSubmittingReads == false")
-    public void submitRead(final T read) {
-        final String sampleName = read.getReadGroup() != null ? read.getReadGroup().getSample() : null;
-        final Downsampler<T> downsampler = readsBySample.get(sampleName);
-        if ( downsampler == null )
-            throw new IllegalStateException("Offered read with sample name " + sampleName + " to SamplePartitioner " +
-                    "but this sample wasn't provided as one of possible samples at construction");
-
-        downsampler.submit(read);
-        doneSubmittingReads = false;
-    }
-
-    /**
-     * Tell this partitioner that all reads in this cycle have been submitted, so that we
-     * can finalize whatever downsampling is required by each sample.
-     *
-     * Note that we *must* call this function before getReadsForSample, or else that
-     * function will exception out.
-     */
-    @Ensures("doneSubmittingReads == true")
-    public void doneSubmittingReads() {
-        for ( final Downsampler<T> downsampler : readsBySample.values() ) {
-            downsampler.signalEndOfInput();
-        }
-        doneSubmittingReads = true;
-    }
-
-    /**
-     * Get the final collection of reads for this sample for this cycle
-     *
-     * The cycle is defined as all of the reads that occur between
-     * the first call to submitRead until doneSubmittingReads is called.  At that
-     * point additional downsampling may occur (depending on construction arguments)
-     * and that set of reads is returned here.
-     *
-     * Note that this function can only be called once per cycle, as underlying
-     * collection of reads is cleared.
-     *
-     * @param sampleName the sample we want reads for, must be present in the original samples
-     * @return a non-null collection of reads for sample in this cycle
-     */
-    @Ensures("result != null")
-    public Collection<T> getReadsForSample(final String sampleName) {
-        if ( ! doneSubmittingReads ) throw new IllegalStateException("getReadsForSample called before doneSubmittingReads was called");
-
-        final Downsampler<T> downsampler = readsBySample.get(sampleName);
-        if ( downsampler == null ) throw new NoSuchElementException("Sample name not found");
-
-        return downsampler.consumeFinalizedItems();
-    }
-
-    /**
-     * Resets this SamplePartitioner, indicating that we're starting a new
-     * cycle of adding reads to each underlying downsampler.
-     */
-    @Ensures("doneSubmittingReads == false")
-    public void reset() {
-        for ( final Downsampler<T> downsampler : readsBySample.values() ) {
-            downsampler.clearItems();
-            downsampler.resetStats();
-        }
-        doneSubmittingReads = false;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/EOFMarkedValue.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/EOFMarkedValue.java
deleted file mode 100644
index c5255e4..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/EOFMarkedValue.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.nanoScheduler;
-
-/**
- * Wrapper to hold data that distinguishing an special EOF marker from a real object
- *
- * The only way to tell in a consumer thread that a blocking queue has no more data ever
- * coming down the pipe is to pass in a "poison" or EOF object.  This class provides
- * a generic capacity for that...
- *
- * The use case looks like this:
- *
- * BlockingQueue q
- * producer:
- *   while ( x has items )
- *      q.put(new EOFMarkedValue(x))
- *   q.put(new EOFMarkedValue())
- *
- * Consumer:
- *   while ( true )
- *       value = q.take()
- *       if ( value.isEOFMarker() )
- *          break
- *       else
- *          do something useful with value
- *
- *
- * User: depristo
- * Date: 9/6/12
- * Time: 3:08 PM
- */
-//@Invariant("! isEOFMarker() || value == null")
-class EOFMarkedValue<T> {
-    /**
-     * True if this is the EOF marker object
-     */
-    final private boolean isLast;
-
-    /**
-     * Our value, if we aren't the EOF marker
-     */
-    final private T value;
-
-    /**
-     * Create a new EOFMarkedValue containing a real value, where last is false
-     * @param value
-     */
-    EOFMarkedValue(final T value) {
-        isLast = false;
-        this.value = value;
-    }
-
-    /**
-     * Create a new EOFMarkedValue that is the last item
-     */
-    EOFMarkedValue() {
-        isLast = true;
-        this.value = null;
-    }
-
-    /**
-     * Is this the EOF marker?
-     *
-     * @return true if so, else false
-     */
-    public boolean isEOFMarker() {
-        return isLast;
-    }
-
-    /**
-     * Get the value held by this EOFMarkedValue
-     *
-     * @return the value
-     * @throws IllegalStateException if this is the last item
-     */
-    public T getValue() {
-        if ( isEOFMarker() )
-            throw new IllegalStateException("Cannot get value for last object");
-        return value;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/InputProducer.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/InputProducer.java
deleted file mode 100644
index 3a67b43..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/InputProducer.java
+++ /dev/null
@@ -1,217 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.nanoScheduler;
-
-import org.apache.log4j.Logger;
-
-import java.util.Iterator;
-import java.util.concurrent.CountDownLatch;
-
-/**
- * Helper class that allows multiple threads to reads input values from
- * an iterator, and track the number of items read from that iterator.
- */
-class InputProducer<InputType> {
-    private final static Logger logger = Logger.getLogger(InputProducer.class);
-
-    /**
-     * The iterator we are using to get data from
-     */
-    final Iterator<InputType> inputReader;
-
-    /**
-     * Have we read the last value from inputReader?
-     *
-     * Must be a local variable, as inputReader.hasNext() can actually end up doing a lot
-     * of work, and the method getNumInputValues() is supposed to be called not in the
-     * thread executing the reading of values but in the thread enqueuing results
-     */
-    boolean readLastValue = false;
-
-    /**
-     * Once we've readLastValue, lastValue contains a continually
-     * updating InputValue where EOF is true.  It's not necessarily
-     * a single value, as each read updates lastValue with the
-     * next EOF marker
-     */
-    private InputValue lastValue = null;
-
-    int nRead = 0;
-    int inputID = -1;
-
-    public InputProducer(final Iterator<InputType> inputReader) {
-        if ( inputReader == null ) throw new IllegalArgumentException("inputReader cannot be null");
-        this.inputReader = inputReader;
-    }
-
-    /**
-     * Returns the number of elements in the input stream, AFTER we've read all of the values.
-     * If we haven't read them all yet, returns -1
-     *
-     * @return the total number of elements in input stream, or -1 if some are still to be read
-     */
-    public synchronized int getNumInputValues() {
-        return allInputsHaveBeenRead() ? nRead : -1;
-    }
-
-    /**
-     * Returns true if all of the elements have been read from the input stream
-     *
-     * @return true if all of the elements have been read from the input stream
-     */
-    public synchronized boolean allInputsHaveBeenRead() {
-        return readLastValue;
-    }
-
-    /**
-     * Read the next item from the input stream, if possible
-     *
-     * If the inputReader has values, returns them, otherwise return null.
-     *
-     * This method is synchronized, as it manipulates local state accessed across multiple threads.
-     *
-     * @return the next input stream value, or null if the stream contains no more elements
-     */
-    private synchronized InputType readNextItem() {
-        if ( ! inputReader.hasNext() ) {
-            // we are done, mark ourselves as such and return null
-            readLastValue = true;
-            return null;
-        } else {
-            // get the next value, and return it
-            final InputType input = inputReader.next();
-            if ( input == null )
-                throw new IllegalStateException("inputReader.next() returned a null value, breaking our contract");
-            nRead++;
-            return input;
-        }
-    }
-
-    /**
-     * Are there currently more values in the iterator?
-     *
-     * Note the word currently.  It's possible that some already submitted
-     * job will read a value from this InputProvider, so in some sense
-     * there are no more values and in the future there'll be no next
-     * value.  That said, once this returns false it means that all
-     * of the possible values have been read
-     *
-     * @return true if a future call to next might return a non-EOF value, false if
-     *         the underlying iterator is definitely empty
-     */
-    public synchronized boolean hasNext() {
-        return ! allInputsHaveBeenRead();
-    }
-
-    /**
-     * Get the next InputValue from this producer.  The next value is
-     * either (1) the next value from the iterator, in which case the
-     * the return value is an InputValue containing that value, or (2)
-     * an InputValue with the EOF marker, indicating that the underlying
-     * iterator has been exhausted.
-     *
-     * This function never fails -- it can be called endlessly and
-     * while the underlying iterator has values it returns them, and then
-     * it returns a succession of EOF marking input values.
-     *
-     * @return an InputValue containing the next value in the underlying
-     *         iterator, or one with EOF marker, if the iterator is exhausted
-     */
-    public synchronized InputValue next() {
-        if ( readLastValue ) {
-            // we read the last value, so our value is the next
-            // EOF marker based on the last value.  Make sure to
-            // update the last value so the markers keep incrementing
-            // their job ids
-            lastValue = lastValue.nextEOF();
-            return lastValue;
-        } else {
-            final InputType value = readNextItem();
-
-            if ( value == null ) {
-                if ( ! readLastValue )
-                    throw new IllegalStateException("value == null but readLastValue is false!");
-
-                // add the EOF object so our consumer knows we are done in all inputs
-                // note that we do not increase inputID here, so that variable indicates the ID
-                // of the last real value read from the queue
-                lastValue = new InputValue(inputID + 1);
-                return lastValue;
-            } else {
-                // add the actual value to the outputQueue
-                return new InputValue(++inputID, value);
-            }
-        }
-    }
-
-    /**
-     * Helper class that contains a read value suitable for EOF marking in a BlockingQueue
-     *
-     * This class also contains an ID, an integer incrementing from 0 to N, for N total
-     * values in the input stream.  This ID indicates which element in the element stream this
-     * InputValue corresponds to.  Necessary for tracking and ordering results by input position.
-     *
-     * Note that EOF markers have IDs > N, and ID values >> N can occur if many EOF markers
-     * are enqueued in the outputQueue.
-     */
-    class InputValue extends EOFMarkedValue<InputType> {
-        final int id;
-
-        private InputValue(final int id, InputType datum) {
-            super(datum);
-            if ( id < 0 ) throw new IllegalArgumentException("id must be >= 0");
-            this.id = id;
-        }
-        private InputValue(final int id) {
-            super();
-            if ( id < 0 ) throw new IllegalArgumentException("id must be >= 0");
-            this.id = id;
-        }
-
-        /**
-         * Returns the ID of this input marker
-         * @return id >= 0
-         */
-        public int getId() {
-            return id;
-        }
-
-        /**
-         * Create another EOF marker with ID + 1 to this one.
-         *
-         * Useful in the case where we need to enqueue another EOF marker for future jobs and we
-         * want them to have a meaningful ID, one greater than the last one.
-         *
-         * @return ID
-         */
-        //@Ensures({"result.isEOFMarker()", "result.getId() == getId() + 1"})
-        public InputValue nextEOF() {
-            if ( ! isEOFMarker() )
-                throw new IllegalArgumentException("Cannot request next EOF marker for non-EOF marker InputValue");
-            return new InputValue(getId() + 1);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/MapResult.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/MapResult.java
deleted file mode 100644
index d6628a5..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/MapResult.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.nanoScheduler;
-
-/**
- * Holds the results of a map job suitable for producer/consumer threading
- * via a BlockingQueue
- */
-class MapResult<MapType> extends EOFMarkedValue<MapType> implements Comparable<MapResult<MapType>> {
-    final int jobID;
-
-    /**
-     * Create a new MapResult with value datum and jod jobID ID
-     *
-     * @param datum the value produced by the map job
-     * @param jobID the id of the map job (for correctness testing)
-     */
-    MapResult(final MapType datum, final int jobID) {
-        super(datum);
-        this.jobID = jobID;
-        if ( jobID < 0 ) throw new IllegalArgumentException("JobID must be >= 0");
-    }
-
-    MapResult(final int jobID) {
-        super();
-        this.jobID = jobID;
-        if ( jobID < 0 ) throw new IllegalArgumentException("JobID must be >= 0");
-    }
-
-    /**
-     * @return the job ID of the map job that produced this MapResult
-     */
-    public int getJobID() {
-        return jobID;
-    }
-
-    /**
-     * Compare these MapResults in order of JobID.
-     *
-     * @param o
-     * @return
-     */
-    @Override
-    public int compareTo(MapResult<MapType> o) {
-        return Integer.valueOf(jobID).compareTo(o.getJobID());
-    }
-
-    @Override
-    public String toString() {
-        return "[MapResult id=" + jobID + "]";
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/MapResultsQueue.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/MapResultsQueue.java
deleted file mode 100644
index afeafb5..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/MapResultsQueue.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.nanoScheduler;
-
-import org.broadinstitute.gatk.utils.collections.ExpandingArrayList;
-
-/**
- * Created with IntelliJ IDEA.
- * User: depristo
- * Date: 12/19/12
- * Time: 3:53 PM
- *
- * This class makes some critical assumptions.  First is that the jobID of the first
- * job is 0.  If this isn't true the MapResultsQueue will certainly fail.
- */
-public class MapResultsQueue<MapType> {
-    //private final static boolean DEBUG = false;
-    //private final static Logger logger = Logger.getLogger(MapResultsQueue.class);
-
-    /**
-     * Although naturally stored as priority blocking queue, this is actually quite expensive
-     * due to the O(n log n) sorting calculation.  Since we know that the job ids start
-     * at 0 and increment by 1 in each successive job, we store an array instead.  The
-     * array is indexed by jobID, and contains the MapResult for that job id.  Because elements
-     * can be added to the queue in any order, we need to use an expanding array list to
-     * store the elements.
-     */
-    final ExpandingArrayList<MapResult<MapType>> queue = new ExpandingArrayList<MapResult<MapType>>(10000);
-
-    /**
-     * The jobID of the last job we've seen
-     */
-    int prevJobID = -1; // no jobs observed
-
-    /**
-     * Put mapResult into this MapResultsQueue, associated with its jobID
-     * @param mapResult a non-null map result
-     */
-    public synchronized void put(final MapResult<MapType> mapResult) {
-        if ( mapResult == null ) throw new IllegalArgumentException("mapResult cannot be null");
-
-        // make sure that nothing is at the job id for map
-        assert queue.size() < mapResult.getJobID() || queue.get(mapResult.getJobID()) == null;
-
-        queue.set(mapResult.getJobID(), mapResult);
-    }
-
-    /**
-     * Should we reduce the next value in the mapResultQueue?
-     *
-     * @return true if we should reduce
-     */
-    public synchronized boolean nextValueIsAvailable() {
-        final MapResult<MapType> nextMapResult = queue.get(nextJobID());
-
-        if ( nextMapResult == null ) {
-            // natural case -- the next job hasn't had a value added yet
-            return false;
-        } else if ( nextMapResult.getJobID() != nextJobID() ) {
-            // sanity check -- the job id at next isn't the one we expect
-            throw new IllegalStateException("Next job ID " + nextMapResult.getJobID() + " is not == previous job id " + prevJobID + " + 1");
-        } else {
-            // there's a value at the next job id, so return true
-            return true;
-        }
-    }
-
-    /**
-     * Get the next job ID'd be expect to see given our previous job id
-     * @return the next job id we'd fetch to reduce
-     */
-    private int nextJobID() {
-        return prevJobID + 1;
-    }
-
-    /**
-     * Can only be called when nextValueIsAvailable is true
-     * @return
-     * @throws InterruptedException
-     */
-    // TODO -- does this have to be synchronized? -- I think the answer is no
-    public synchronized MapResult<MapType> take() throws InterruptedException {
-        final MapResult<MapType> result = queue.get(nextJobID());
-
-        // make sure the value we've fetched has the right id
-        assert result.getJobID() == nextJobID();
-
-        prevJobID = result.getJobID();
-        queue.set(prevJobID, null);
-
-        return result;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/NSMapFunction.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/NSMapFunction.java
deleted file mode 100644
index dbd58b4..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/NSMapFunction.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.nanoScheduler;
-
-/**
- * A function that maps from InputType -> ResultType
- *
- * For use with the NanoScheduler
- *
- * User: depristo
- * Date: 8/24/12
- * Time: 9:49 AM
- */
-public interface NSMapFunction<InputType, ResultType> {
-    /**
-     * Return function on input, returning a value of ResultType
-     * @param input
-     * @return
-     */
-    public ResultType apply(final InputType input);
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/NSProgressFunction.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/NSProgressFunction.java
deleted file mode 100644
index 27d713e..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/NSProgressFunction.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.nanoScheduler;
-
-/**
- * Created with IntelliJ IDEA.
- * User: depristo
- * Date: 9/4/12
- * Time: 2:10 PM
- * To change this template use File | Settings | File Templates.
- */
-public interface NSProgressFunction<InputType> {
-    public void progress(final InputType lastMapInput);
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/NSReduceFunction.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/NSReduceFunction.java
deleted file mode 100644
index acb0a78..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/NSReduceFunction.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.nanoScheduler;
-
-/**
- * A function that combines a value of MapType with an existing ReduceValue into a new ResultType
- *
- * User: depristo
- * Date: 8/24/12
- * Time: 9:49 AM
- */
-public interface NSReduceFunction<MapType, ReduceType> {
-    /**
-     * Combine one with sum into a new ReduceType
-     * @param one the result of a map call on an input element
-     * @param sum the cumulative reduce result over all previous map calls
-     * @return
-     */
-    public ReduceType apply(MapType one, ReduceType sum);
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/NanoScheduler.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/NanoScheduler.java
deleted file mode 100644
index 8b02721..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/NanoScheduler.java
+++ /dev/null
@@ -1,494 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.nanoScheduler;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.utils.MultiThreadedErrorTracker;
-import org.broadinstitute.gatk.utils.threading.NamedThreadFactory;
-
-import java.util.Iterator;
-import java.util.List;
-import java.util.concurrent.*;
-
-/**
- * Framework for very fine grained MapReduce parallelism
- *
- * The overall framework works like this
- *
- * nano <- new Nanoschedule(bufferSize, numberOfMapElementsToProcessTogether, nThreads)
- * List[Input] outerData : outerDataLoop )
- *   result = nano.execute(outerData.iterator(), map, reduce)
- *
- * bufferSize determines how many elements from the input stream are read in one go by the
- * nanoscheduler.  The scheduler may hold up to bufferSize in memory at one time, as well
- * as up to bufferSize map results as well.
- *
- * numberOfMapElementsToProcessTogether determines how many input elements are processed
- * together each thread cycle.  For example, if this value is 10, then the input data
- * is grouped together in units of 10 elements each, and map called on each in term.  The more
- * heavy-weight the map function is, in terms of CPU costs, the more it makes sense to
- * have this number be small.  The lighter the CPU cost per element, though, the more this
- * parameter introduces overhead due to need to context switch among threads to process
- * each input element.  A value of -1 lets the nanoscheduler guess at a reasonable trade-off value.
- *
- * nThreads is a bit obvious yes?  Note though that the nanoscheduler assumes that it gets 1 thread
- * from its client during the execute call, as this call blocks until all work is done.  The caller
- * thread is put to work by execute to help with the processing of the data.  So in reality the
- * nanoScheduler only spawn nThreads - 1 additional workers (if this is > 1).
- *
- * User: depristo
- * Date: 8/24/12
- * Time: 9:47 AM
- */
-public class NanoScheduler<InputType, MapType, ReduceType> {
-    private final static Logger logger = Logger.getLogger(NanoScheduler.class);
-    private final static boolean ALLOW_SINGLE_THREAD_FASTPATH = true;
-    protected final static int UPDATE_PROGRESS_FREQ = 100;
-
-    /**
-     * Currently not used, but kept because it's conceptual reasonable to have a buffer
-     */
-    final int bufferSize;
-
-    /**
-     * The number of threads we're using to execute the map jobs in this nano scheduler
-     */
-    final int nThreads;
-
-    final ExecutorService masterExecutor;
-    final ExecutorService mapExecutor;
-    final MultiThreadedErrorTracker errorTracker = new MultiThreadedErrorTracker();
-
-    boolean shutdown = false;
-    boolean debug = false;
-    private NSProgressFunction<InputType> progressFunction = null;
-
-    /**
-     * Create a new nanoscheduler with the desire characteristics requested by the argument
-     *
-     * @param nThreads the number of threads to use to get work done, in addition to the
-     *                 thread calling execute
-     */
-    public NanoScheduler(final int nThreads) {
-        this(nThreads*100, nThreads);
-    }
-
-    protected NanoScheduler(final int bufferSize, final int nThreads) {
-        if ( bufferSize < 1 ) throw new IllegalArgumentException("bufferSize must be >= 1, got " + bufferSize);
-        if ( nThreads < 1 ) throw new IllegalArgumentException("nThreads must be >= 1, got " + nThreads);
-
-        this.bufferSize = bufferSize;
-        this.nThreads = nThreads;
-
-        if ( nThreads == 1 ) {
-            this.mapExecutor = this.masterExecutor = null;
-        } else {
-            this.masterExecutor = Executors.newSingleThreadExecutor(new NamedThreadFactory("NS-master-thread-%d"));
-            this.mapExecutor = Executors.newFixedThreadPool(nThreads, new NamedThreadFactory("NS-map-thread-%d"));
-        }
-    }
-
-    /**
-     * The number of parallel map threads in use with this NanoScheduler
-     * @return
-     */
-    @Ensures("result > 0")
-    public int getnThreads() {
-        return nThreads;
-    }
-
-    /**
-     * The input buffer size used by this NanoScheduler
-     * @return
-     */
-    @Ensures("result > 0")
-    public int getBufferSize() {
-        return this.bufferSize;
-    }
-
-    /**
-     * Tells this nanoScheduler to shutdown immediately, releasing all its resources.
-     *
-     * After this call, execute cannot be invoked without throwing an error
-     */
-    public void shutdown() {
-        if ( nThreads > 1 ) {
-            shutdownExecutor("mapExecutor", mapExecutor);
-            shutdownExecutor("masterExecutor", masterExecutor);
-        }
-
-        shutdown = true;
-    }
-
-    /**
-     * Helper function to cleanly shutdown an execution service, checking that the execution
-     * state is clean when it's done.
-     *
-     * @param name a string name for error messages for the executorService we are shutting down
-     * @param executorService the executorService to shut down
-     */
-    @Requires({"name != null", "executorService != null"})
-    @Ensures("executorService.isShutdown()")
-    private void shutdownExecutor(final String name, final ExecutorService executorService) {
-        if ( executorService.isShutdown() || executorService.isTerminated() )
-            throw new IllegalStateException("Executor service " + name + " is already shut down!");
-
-        final List<Runnable> remaining = executorService.shutdownNow();
-        if ( ! remaining.isEmpty() )
-            throw new IllegalStateException(remaining.size() + " remaining tasks found in an executor " + name + ", unexpected behavior!");
-    }
-
-    /**
-     * @return true if this nanoScheduler is shutdown, or false if its still open for business
-     */
-    public boolean isShutdown() {
-        return shutdown;
-    }
-
-    /**
-     * @return are we displaying verbose debugging information about the scheduling?
-     */
-    public boolean isDebug() {
-        return debug;
-    }
-
-    /**
-     * Helper function to display a String.formatted message if we are doing verbose debugging
-     *
-     * @param format the format argument suitable for String.format
-     * @param args the arguments for String.format
-     */
-    @Requires("format != null")
-    protected void debugPrint(final String format, Object ... args) {
-        if ( isDebug() )
-            logger.warn("Thread " + Thread.currentThread().getId() + ":" + String.format(format, args));
-    }
-
-    /**
-     * Turn on/off verbose debugging
-     *
-     * @param debug true if we want verbose debugging
-     */
-    public void setDebug(boolean debug) {
-        this.debug = debug;
-    }
-
-    /**
-     * Set the progress callback function to progressFunction
-     *
-     * The progress callback is invoked after each buffer size elements have been processed by map/reduce
-     *
-     * @param progressFunction a progress function to call, or null if you don't want any progress callback
-     */
-    public void setProgressFunction(final NSProgressFunction<InputType> progressFunction) {
-        this.progressFunction = progressFunction;
-    }
-
-    /**
-     * Execute a map/reduce job with this nanoScheduler
-     *
-     * Data comes from inputReader.  Will be read until hasNext() == false.
-     * map is called on each element provided by inputReader.  No order of operations is guarenteed
-     * reduce is called in order of the input data provided by inputReader on the result of map() applied
-     * to each element.
-     *
-     * Note that the caller thread is put to work with this function call.  The call doesn't return
-     * until all elements have been processes.
-     *
-     * It is safe to call this function repeatedly on a single nanoScheduler, at least until the
-     * shutdown method is called.
-     *
-     * Note that this function goes through a single threaded fast path if the number of threads
-     * is 1.
-     *
-     * @param inputReader an iterator providing us with the input data to nanoSchedule map/reduce over
-     * @param map the map function from input type -> map type, will be applied in parallel to each input
-     * @param reduce the reduce function from map type + reduce type -> reduce type to be applied in order to map results
-     * @return the last reduce value
-     */
-    public ReduceType execute(final Iterator<InputType> inputReader,
-                              final NSMapFunction<InputType, MapType> map,
-                              final ReduceType initialValue,
-                              final NSReduceFunction<MapType, ReduceType> reduce) {
-        if ( isShutdown() ) throw new IllegalStateException("execute called on already shutdown NanoScheduler");
-        if ( inputReader == null ) throw new IllegalArgumentException("inputReader cannot be null");
-        if ( map == null ) throw new IllegalArgumentException("map function cannot be null");
-        if ( reduce == null ) throw new IllegalArgumentException("reduce function cannot be null");
-
-        ReduceType result;
-        if ( ALLOW_SINGLE_THREAD_FASTPATH && getnThreads() == 1 ) {
-            result = executeSingleThreaded(inputReader, map, initialValue, reduce);
-        } else {
-            result = executeMultiThreaded(inputReader, map, initialValue, reduce);
-        }
-
-        return result;
-    }
-
-    /**
-     * Simple efficient reference implementation for single threaded execution.
-     *
-     * @return the reduce result of this map/reduce job
-     */
-    @Requires({"inputReader != null", "map != null", "reduce != null"})
-    private ReduceType executeSingleThreaded(final Iterator<InputType> inputReader,
-                                             final NSMapFunction<InputType, MapType> map,
-                                             final ReduceType initialValue,
-                                             final NSReduceFunction<MapType, ReduceType> reduce) {
-        ReduceType sum = initialValue;
-        int i = 0;
-
-        while ( true ) {
-            // start timer to ensure that both hasNext and next are caught by the timer
-            if ( ! inputReader.hasNext() ) {
-                break;
-            } else {
-                final InputType input = inputReader.next();
-
-                // map
-                final MapType mapValue = map.apply(input);
-
-                updateProgress(i++, input);
-
-                // reduce
-                sum = reduce.apply(mapValue, sum);
-            }
-        }
-
-        return sum;
-    }
-
-    /**
-     * Maybe update the progress meter (maybe because we don't want to do so so often that it costs cpu time)
-     * @param counter increasing counter to use to cut down on updates
-     * @param input the input we're currently at
-     */
-    private void updateProgress(final int counter, final InputType input) {
-        if ( progressFunction != null && counter % UPDATE_PROGRESS_FREQ == 0 )
-            progressFunction.progress(input);
-    }
-
-    /**
-     * Efficient parallel version of Map/Reduce
-     *
-     * @return the reduce result of this map/reduce job
-     */
-    @Requires({"inputReader != null", "map != null", "reduce != null"})
-    private ReduceType executeMultiThreaded(final Iterator<InputType> inputReader,
-                                            final NSMapFunction<InputType, MapType> map,
-                                            final ReduceType initialValue,
-                                            final NSReduceFunction<MapType, ReduceType> reduce) {
-        debugPrint("Executing nanoScheduler");
-
-        // start up the master job
-        final MasterJob masterJob = new MasterJob(inputReader, map, initialValue, reduce);
-        final Future<ReduceType> reduceResult = masterExecutor.submit(masterJob);
-
-        while ( true ) {
-            // check that no errors occurred while we were waiting
-            handleErrors();
-//            checkForDeadlocks();
-
-            try {
-                final ReduceType result = reduceResult.get(100, TimeUnit.MILLISECONDS);
-
-                // in case an error occurred in the reduce
-                handleErrors();
-
-                // return our final reduce result
-                return result;
-            } catch (final TimeoutException ex ) {
-                // a normal case -- we just aren't done
-            } catch (final InterruptedException ex) {
-                errorTracker.notifyOfError(ex);
-                // will handle error in the next round of the for loop
-            } catch (final ExecutionException ex) {
-                errorTracker.notifyOfError(ex);
-                // will handle error in the next round of the for loop
-            }
-        }
-    }
-
-//    private void checkForDeadlocks() {
-//        if ( deadLockCheckCounter++ % 100 == 0 ) {
-//            logger.info("Checking for deadlocks...");
-//            final ThreadMXBean bean = ManagementFactory.getThreadMXBean();
-//            final long[] threadIds = bean.findDeadlockedThreads(); // Returns null if no threads are deadlocked.
-//
-//            if (threadIds != null) {
-//                final ThreadInfo[] infos = bean.getThreadInfo(threadIds);
-//
-//                logger.error("!!! Deadlock detected !!!!");
-//                for (final ThreadInfo info : infos) {
-//                    logger.error("Thread " + info);
-//                    for ( final StackTraceElement elt : info.getStackTrace() ) {
-//                        logger.error("\t" + elt.toString());
-//                    }
-//                }
-//            }
-//        }
-//    }
-
-    private void handleErrors() {
-        if ( errorTracker.hasAnErrorOccurred() ) {
-            masterExecutor.shutdownNow();
-            mapExecutor.shutdownNow();
-            errorTracker.throwErrorIfPending();
-        }
-    }
-
-    /**
-     * MasterJob has the task to enqueue Map jobs and wait for the final reduce
-     *
-     * It must be run in a separate thread in order to properly handle errors that may occur
-     * in the input, map, or reduce jobs without deadlocking.
-     *
-     * The result of this callable is the final reduce value for the input / map / reduce jobs
-     */
-    private class MasterJob implements Callable<ReduceType> {
-        final Iterator<InputType> inputReader;
-        final NSMapFunction<InputType, MapType> map;
-        final ReduceType initialValue;
-        final NSReduceFunction<MapType, ReduceType> reduce;
-
-        private MasterJob(Iterator<InputType> inputReader, NSMapFunction<InputType, MapType> map, ReduceType initialValue, NSReduceFunction<MapType, ReduceType> reduce) {
-            this.inputReader = inputReader;
-            this.map = map;
-            this.initialValue = initialValue;
-            this.reduce = reduce;
-        }
-
-        @Override
-        public ReduceType call() {
-            // Create the input producer and start it running
-            final InputProducer<InputType> inputProducer = new InputProducer<InputType>(inputReader);
-
-            // create the MapResultsQueue to store results of map jobs.
-            final MapResultsQueue<MapType> mapResultQueue = new MapResultsQueue<MapType>();
-
-            // create the reducer we'll use for this nano scheduling run
-            final Reducer<MapType, ReduceType> reducer = new Reducer<MapType, ReduceType>(reduce, errorTracker, initialValue);
-
-            final CountDownLatch runningMapJobs = new CountDownLatch(nThreads);
-
-            try {
-                // create and submit the info needed by the read/map/reduce threads to do their work
-                for ( int i = 0; i < nThreads; i++ ) {
-                    mapExecutor.submit(new ReadMapReduceJob(inputProducer, mapResultQueue, runningMapJobs, map, reducer));
-                }
-
-                // wait for all of the input and map threads to finish
-                return waitForCompletion(mapResultQueue, runningMapJobs, reducer);
-            } catch (Throwable ex) {
-                errorTracker.notifyOfError(ex);
-                return initialValue;
-            }
-        }
-
-        /**
-         * Wait until the input thread and all map threads have completed running, and return the final reduce result
-         */
-        private ReduceType waitForCompletion(final MapResultsQueue<MapType> mapResultsQueue,
-                                             final CountDownLatch runningMapJobs,
-                                             final Reducer<MapType, ReduceType> reducer) throws InterruptedException {
-            // wait for all the map threads to finish by waiting on the runningMapJobs latch
-            runningMapJobs.await();
-
-            // do a final reduce here.  This is critically important because the InputMapReduce jobs
-            // no longer block on reducing, so it's possible for all the threads to end with a few
-            // reduce jobs on the queue still to do.  This call ensures that we reduce everything
-            reducer.reduceAsMuchAsPossible(mapResultsQueue, true);
-
-            // wait until we have a final reduce result
-            final ReduceType finalSum = reducer.getReduceResult();
-
-            // everything is finally shutdown, return the final reduce value
-            return finalSum;
-        }
-    }
-
-    private class ReadMapReduceJob implements Runnable {
-        final InputProducer<InputType> inputProducer;
-        final MapResultsQueue<MapType> mapResultQueue;
-        final NSMapFunction<InputType, MapType> map;
-        final Reducer<MapType, ReduceType> reducer;
-        final CountDownLatch runningMapJobs;
-
-        private ReadMapReduceJob(final InputProducer<InputType> inputProducer,
-                                 final MapResultsQueue<MapType> mapResultQueue,
-                                 final CountDownLatch runningMapJobs,
-                                 final NSMapFunction<InputType, MapType> map,
-                                 final Reducer<MapType, ReduceType> reducer) {
-            this.inputProducer = inputProducer;
-            this.mapResultQueue = mapResultQueue;
-            this.runningMapJobs = runningMapJobs;
-            this.map = map;
-            this.reducer = reducer;
-        }
-
-        @Override
-        public void run() {
-            try {
-                boolean done = false;
-                while ( ! done ) {
-                    // get the next item from the input producer
-                    final InputProducer<InputType>.InputValue inputWrapper = inputProducer.next();
-
-                    // depending on inputWrapper, actually do some work or not, putting result input result object
-                    final MapResult<MapType> result;
-                    if ( ! inputWrapper.isEOFMarker() ) {
-                        // just skip doing anything if we don't have work to do, which is possible
-                        // because we don't necessarily know how much input there is when we queue
-                        // up our jobs
-                        final InputType input = inputWrapper.getValue();
-
-                        // actually execute the map
-                        final MapType mapValue = map.apply(input);
-
-                        // enqueue the result into the mapResultQueue
-                        result = new MapResult<MapType>(mapValue, inputWrapper.getId());
-
-                        mapResultQueue.put(result);
-
-                        // reduce as much as possible, without blocking, if another thread is already doing reduces
-                        final int nReduced = reducer.reduceAsMuchAsPossible(mapResultQueue, false);
-
-                        updateProgress(inputWrapper.getId(), input);
-                    } else {
-                        done = true;
-                    }
-                }
-            } catch (Throwable ex) {
-                errorTracker.notifyOfError(ex);
-            } finally {
-                // we finished a map job, release the job queue semaphore
-                runningMapJobs.countDown();
-            }
-        }
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/Reducer.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/Reducer.java
deleted file mode 100644
index 41b612f..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/Reducer.java
+++ /dev/null
@@ -1,169 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.nanoScheduler;
-
-import com.google.java.contract.Ensures;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.utils.MultiThreadedErrorTracker;
-
-import java.util.concurrent.locks.Lock;
-import java.util.concurrent.locks.ReentrantLock;
-
-/**
- * Reducer supporting multi-threaded reduce of the map/reduce.
- *
- * reduceAsMuchAsPossible is the key function.  Multiple threads can call into this, providing
- * the map results queue, and this class accumulates the result of calling reduce
- * on the maps objects.  reduceAsMuchAsPossible isn't directly synchronized, but manages multi-threading
- * directly with a lock.  Threads can request either to block on the reduce call until it can be
- * executed, or immediately exit if the lock isn't available.  That allows multi-threaded users
- * to avoid piling up waiting to reduce while one thread is reducing.  They can instead immediately
- * leave to go do something else productive
- *
- * @author depristo
- * @since 2012
- */
-class Reducer<MapType, ReduceType> {
-    private final static Logger logger = Logger.getLogger(Reducer.class);
-
-    /**
-     * The reduce function to execute
-     */
-    private final NSReduceFunction<MapType, ReduceType> reduce;
-
-    /**
-     * Used to communicate errors to the outer master thread
-     */
-    private final MultiThreadedErrorTracker errorTracker;
-
-    /**
-     * Lock used to protect the call reduceAsMuchAsPossible from race conditions
-     */
-    private final Lock reduceLock = new ReentrantLock();
-
-    /**
-     * The sum of the reduce function applied to all MapResults.  After this Reducer
-     * is done sum contains the final reduce result.
-     */
-    ReduceType sum;
-
-    /**
-     * Create a new Reducer that will apply the reduce function with initialSum value
-     * to values via reduceAsMuchAsPossible, timing the reduce function call costs with
-     * reduceTimer
-     *
-     * @param reduce the reduce function to apply
-     * @param initialSum the initial reduce sum
-     */
-    public Reducer(final NSReduceFunction<MapType, ReduceType> reduce,
-                   final MultiThreadedErrorTracker errorTracker,
-                   final ReduceType initialSum) {
-        if ( errorTracker == null ) throw new IllegalArgumentException("Error tracker cannot be null");
-        if ( reduce == null ) throw new IllegalArgumentException("Reduce function cannot be null");
-
-        this.errorTracker = errorTracker;
-        this.reduce = reduce;
-        this.sum = initialSum;
-    }
-
-    /**
-     * Reduce as much data as possible in mapResultQueue, returning the number of reduce calls completed
-     *
-     * As much as possible is defined as all of the MapResults in the queue are in order starting from the
-     * numSubmittedJobs we reduced previously, up to the either the queue being empty or where the next MapResult
-     * doesn't have JobID == prevJobID + 1.
-     *
-     * @param mapResultQueue a queue of MapResults in jobID order
-     * @return the number of reduces run, from 0 >
-     * @throws InterruptedException
-     */
-    @Ensures("result >= 0")
-    public int reduceAsMuchAsPossible(final MapResultsQueue<MapType> mapResultQueue, final boolean waitForLock) {
-        if ( mapResultQueue == null ) throw new IllegalArgumentException("mapResultQueue cannot be null");
-        int nReducesNow = 0;
-
-        final boolean haveLock = acquireReduceLock(waitForLock);
-        try {
-            if ( haveLock ) {
-                while ( mapResultQueue.nextValueIsAvailable() ) {
-                    final MapResult<MapType> result = mapResultQueue.take();
-
-                    if ( ! result.isEOFMarker() ) {
-                        nReducesNow++;
-
-                        // apply reduce, keeping track of sum
-                        sum = reduce.apply(result.getValue(), sum);
-                    }
-                }
-            }
-        } catch (Exception ex) {
-            errorTracker.notifyOfError(ex);
-        } finally {
-            if ( haveLock ) // if we acquired the lock, unlock it
-                releaseReduceLock();
-        }
-
-        return nReducesNow;
-    }
-
-    /**
-     * Acquire the reduce lock, either returning immediately if not possible or blocking until the lock is available
-     *
-     * @param blockUntilAvailable if true, we will block until the lock is available, otherwise we return immediately
-     *                            without acquiring the lock
-     * @return true if the lock has been acquired, false otherwise
-     */
-    protected boolean acquireReduceLock(final boolean blockUntilAvailable) {
-        if ( blockUntilAvailable ) {
-            reduceLock.lock();
-            return true;
-        } else {
-            return reduceLock.tryLock();
-        }
-    }
-
-    /**
-     * Free the reduce lock.
-     *
-     * Assumes that the invoking thread actually previously acquired the lock (it's a problem if not).
-     */
-    protected void releaseReduceLock() {
-        reduceLock.unlock();
-    }
-
-    /**
-     * Get the current reduce result resulting from applying reduce(...) to all MapResult elements.
-     *
-     * Note that this method cannot know if future reduce calls are coming in.  So it simply gets
-     * the current reduce result.  It is up to the caller to know whether the returned value is
-     * a partial result, or the full final value
-     *
-     * @return the total reduce result across all jobs
-     */
-    public ReduceType getReduceResult() {
-        return sum;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pairhmm/BatchPairHMM.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pairhmm/BatchPairHMM.java
deleted file mode 100644
index 2311564..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pairhmm/BatchPairHMM.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.pairhmm;
-
-import org.broadinstitute.gatk.utils.haplotype.Haplotype;
-
-import java.util.List;
-
-public interface BatchPairHMM {
-    public void batchAdd(final List<Haplotype> haplotypes,
-                         final byte[] readBases,
-                         final byte[] readQuals,
-                         final byte[] insertionGOP,
-                         final byte[] deletionGOP,
-                         final byte[] overallGCP);
-
-    public double[] batchGetResult();
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pairhmm/Log10PairHMM.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pairhmm/Log10PairHMM.java
deleted file mode 100644
index 4d84fc5..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pairhmm/Log10PairHMM.java
+++ /dev/null
@@ -1,220 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.pairhmm;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import org.broadinstitute.gatk.utils.MathUtils;
-import org.broadinstitute.gatk.utils.QualityUtils;
-
-import java.util.Arrays;
-
-import static java.lang.Math.log10;
-import static org.broadinstitute.gatk.utils.pairhmm.PairHMMModel.*;
-
-/**
- * Util class for performing the pair HMM for local alignment. Figure 4.3 in Durbin 1998 book.
- *
- * User: rpoplin, carneiro
- * Date: 3/1/12
- */
-public class Log10PairHMM extends N2MemoryPairHMM {
-    /**
-     * Should we use exact log10 calculation (true), or an approximation (false)?
-     */
-    private final boolean doExactLog10;
-
-
-    // we divide e by 3 because the observed base could have come from any of the non-observed alleles
-    protected final static double log10_3 = log10(3.0);
-
-    /**
-     * Create an uninitialized PairHMM
-     *
-     * @param doExactLog10 should the log10 calculations be exact (slow) or approximate (faster)
-     */
-    public Log10PairHMM(final boolean doExactLog10) {
-        this.doExactLog10 = doExactLog10;
-    }
-
-    /**
-     * Is this HMM using exact log10 calculations?
-     * @return true if exact, false if approximate
-     */
-    public boolean isDoingExactLog10Calculations() {
-        return doExactLog10;
-    }
-
-    /**
-     * {@inheritDoc}
-     */
-    @Override
-    public void initialize(final int readMaxLength, final int haplotypeMaxLength ) {
-        super.initialize(readMaxLength, haplotypeMaxLength);
-
-        for( int iii=0; iii < paddedMaxReadLength; iii++ ) {
-            Arrays.fill(matchMatrix[iii], Double.NEGATIVE_INFINITY);
-            Arrays.fill(insertionMatrix[iii], Double.NEGATIVE_INFINITY);
-            Arrays.fill(deletionMatrix[iii], Double.NEGATIVE_INFINITY);
-        }
-    }
-
-    /**
-     * {@inheritDoc}
-     */
-    @Override
-    public double subComputeReadLikelihoodGivenHaplotypeLog10( final byte[] haplotypeBases,
-                                                               final byte[] readBases,
-                                                               final byte[] readQuals,
-                                                               final byte[] insertionGOP,
-                                                               final byte[] deletionGOP,
-                                                               final byte[] overallGCP,
-                                                               final int hapStartIndex,
-                                                               final boolean recacheReadValues,
-                                                               final int nextHapStartIndex) {
-
-
-        if ( ! constantsAreInitialized || recacheReadValues )
-            initializeProbabilities(insertionGOP, deletionGOP, overallGCP);
-        initializePriors(haplotypeBases, readBases, readQuals, hapStartIndex);
-        if (previousHaplotypeBases == null || previousHaplotypeBases.length != haplotypeBases.length) {
-            // set the initial value (free deletions in the beginning) for the first row in the deletion matrix
-            initializeMatrixValues(haplotypeBases);
-        }
-
-        for (int i = 1; i < paddedReadLength; i++) {
-            // +1 here is because hapStartIndex is 0-based, but our matrices are 1 based
-            for (int j = hapStartIndex+1; j < paddedHaplotypeLength; j++) {
-                updateCell(i, j, prior[i][j], transition[i]);
-            }
-        }
-
-        // final probability is the log10 sum of the last element in the Match and Insertion state arrays
-        // this way we ignore all paths that ended in deletions! (huge)
-        // but we have to sum all the paths ending in the M and I matrices, because they're no longer extended.
-        return finalLikelihoodCalculation();
-    }
-
-    protected void initializeMatrixValues(final byte[] haplotypeBases) {
-        final double initialValue = Math.log10(1.0 / haplotypeBases.length);
-        for( int j = 0; j < paddedHaplotypeLength; j++ ) {
-            deletionMatrix[0][j] = initialValue;
-        }
-    }
-
-    protected double finalLikelihoodCalculation() {
-        final int endI = paddedReadLength - 1;
-        double finalSumProbabilities = myLog10SumLog10(new double[]{matchMatrix[endI][1], insertionMatrix[endI][1]});
-        for (int j = 2; j < paddedHaplotypeLength; j++)
-            finalSumProbabilities = myLog10SumLog10(new double[]{finalSumProbabilities, matchMatrix[endI][j], insertionMatrix[endI][j]});
-        return finalSumProbabilities;
-    }
-
-
-    /**
-     * Initializes the matrix that holds all the constants related to the editing
-     * distance between the read and the haplotype.
-     *
-     * @param haplotypeBases the bases of the haplotype
-     * @param readBases      the bases of the read
-     * @param readQuals      the base quality scores of the read
-     * @param startIndex     where to start updating the distanceMatrix (in case this read is similar to the previous read)
-     */
-    public void initializePriors(final byte[] haplotypeBases, final byte[] readBases, final byte[] readQuals, final int startIndex) {
-
-        // initialize the pBaseReadLog10 matrix for all combinations of read x haplotype bases
-        // Abusing the fact that java initializes arrays with 0.0, so no need to fill in rows and columns below 2.
-
-        for (int i = 0; i < readBases.length; i++) {
-            final byte x = readBases[i];
-            final byte qual = readQuals[i];
-            for (int j = startIndex; j < haplotypeBases.length; j++) {
-                final byte y = haplotypeBases[j];
-                prior[i+1][j+1] = ( x == y || x == (byte) 'N' || y == (byte) 'N' ?
-                        QualityUtils.qualToProbLog10(qual) : (QualityUtils.qualToErrorProbLog10(qual) - (doNotUseTristateCorrection ? 0.0 : log10_3)) );
-            }
-        }
-    }
-
-    /**
-     * Initializes the matrix that holds all the constants related to quality scores.
-     *
-     * @param insertionGOP   insertion quality scores of the read
-     * @param deletionGOP    deletion quality scores of the read
-     * @param overallGCP     overall gap continuation penalty
-     */
-    @Requires({
-            "insertionGOP != null",
-            "deletionGOP != null",
-            "overallGCP != null"
-    })
-    @Ensures("constantsAreInitialized")
-    protected void initializeProbabilities(final byte[] insertionGOP, final byte[] deletionGOP, final byte[] overallGCP) {
-        PairHMMModel.qualToTransProbsLog10(transition,insertionGOP,deletionGOP,overallGCP);
-        // note that we initialized the constants
-        constantsAreInitialized = true;
-    }
-
-
-    /**
-     * Compute the log10SumLog10 of the values
-     *
-     * NOTE NOTE NOTE
-     *
-     * Log10PairHMM depends critically on this function tolerating values that are all -Infinity
-     * and the sum returning -Infinity.  Note good.  Needs to be fixed.
-     *
-     * NOTE NOTE NOTE
-     *
-     * @param values an array of log10 probabilities that need to be summed
-     * @return the log10 of the sum of the probabilities
-     */
-    @Requires("values != null")
-    protected double myLog10SumLog10(final double[] values) {
-        return doExactLog10 ? MathUtils.log10sumLog10(values) : MathUtils.approximateLog10SumLog10(values);
-    }
-
-    /**
-     * Updates a cell in the HMM matrix
-     *
-     * The read and haplotype indices are offset by one because the state arrays have an extra column to hold the
-     * initial conditions
-
-     * @param indI             row index in the matrices to update
-     * @param indJ             column index in the matrices to update
-     * @param prior            the likelihood editing distance matrix for the read x haplotype
-     * @param transition        an array with the six transition relevant to this location
-     */
-    protected void updateCell( final int indI, final int indJ, final double prior, final double[] transition) {
-
-        matchMatrix[indI][indJ] = prior +
-                myLog10SumLog10(new double[]{matchMatrix[indI - 1][indJ - 1] + transition[matchToMatch],
-                                         insertionMatrix[indI - 1][indJ - 1] + transition[indelToMatch],
-                                          deletionMatrix[indI - 1][indJ - 1] + transition[indelToMatch]});
-        insertionMatrix[indI][indJ] = myLog10SumLog10(new double[] {matchMatrix[indI - 1][indJ] + transition[matchToInsertion], insertionMatrix[indI - 1][indJ] + transition[insertionToInsertion]});
-        deletionMatrix[indI][indJ]  = myLog10SumLog10(new double[] {matchMatrix[indI][indJ - 1] + transition[matchToDeletion],  deletionMatrix[indI][indJ - 1] + transition[deletionToDeletion]});
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pairhmm/N2MemoryPairHMM.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pairhmm/N2MemoryPairHMM.java
deleted file mode 100644
index 0e0ffb5..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pairhmm/N2MemoryPairHMM.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.pairhmm;
-
-import com.google.java.contract.Requires;
-
-/**
- * Superclass for PairHMM that want to use a full read x haplotype matrix for their match, insertion, and deletion matrix
- *
- * User: rpoplin
- * Date: 10/16/12
- */
-abstract class N2MemoryPairHMM extends PairHMM {
-    protected double[][] transition = null; // The transition probabilities cache
-    protected double[][] prior = null;      // The prior probabilities cache
-    protected double[][] matchMatrix = null;
-    protected double[][] insertionMatrix = null;
-    protected double[][] deletionMatrix = null;
-
-    // only used for debugging purposes
-    protected boolean doNotUseTristateCorrection = false;
-
-    public void doNotUseTristateCorrection() {
-        doNotUseTristateCorrection = true;
-    }
-
-    /**
-     * Initialize this PairHMM, making it suitable to run against a read and haplotype with given lengths
-     *
-     * Note: Do not worry about padding, just provide the true max length of the read and haplotype. The HMM will take care of the padding.
-     *
-     * @param haplotypeMaxLength the max length of haplotypes we want to use with this PairHMM
-     * @param readMaxLength the max length of reads we want to use with this PairHMM
-     */
-    @Override
-    public void initialize( final int readMaxLength, final int haplotypeMaxLength ) {
-        super.initialize(readMaxLength, haplotypeMaxLength);
-
-        matchMatrix = new double[paddedMaxReadLength][paddedMaxHaplotypeLength];
-        insertionMatrix = new double[paddedMaxReadLength][paddedMaxHaplotypeLength];
-        deletionMatrix = new double[paddedMaxReadLength][paddedMaxHaplotypeLength];
-
-        transition = PairHMMModel.createTransitionMatrix(maxReadLength);
-        prior = new double[paddedMaxReadLength][paddedMaxHaplotypeLength];
-    }
-
-    /**
-     * Print out the core hmm matrices for debugging
-     */
-    protected void dumpMatrices() {
-        dumpMatrix("matchMetricArray", matchMatrix);
-        dumpMatrix("insertionMatrix", insertionMatrix);
-        dumpMatrix("deletionMatrix", deletionMatrix);
-    }
-
-    /**
-     * Print out in a human readable form the matrix for debugging
-     * @param name the name of this matrix
-     * @param matrix the matrix of values
-     */
-    @Requires({"name != null", "matrix != null"})
-    private void dumpMatrix(final String name, final double[][] matrix) {
-        System.out.printf("%s%n", name);
-        for ( int i = 0; i < matrix.length; i++) {
-            System.out.printf("\t%s[%d]", name, i);
-            for ( int j = 0; j < matrix[i].length; j++ ) {
-                if ( Double.isInfinite(matrix[i][j]) )
-                    System.out.printf(" %15s", String.format("%f", matrix[i][j]));
-                else
-                    System.out.printf(" % 15.5e", matrix[i][j]);
-            }
-            System.out.println();
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pairhmm/PairHMM.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pairhmm/PairHMM.java
deleted file mode 100644
index 6c4460c..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pairhmm/PairHMM.java
+++ /dev/null
@@ -1,357 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.pairhmm;
-
-import com.google.java.contract.Requires;
-import htsjdk.variant.variantcontext.Allele;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.utils.MathUtils;
-import org.broadinstitute.gatk.utils.genotyper.ReadLikelihoods;
-import org.broadinstitute.gatk.utils.haplotype.Haplotype;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-/**
- * Util class for performing the pair HMM for local alignment. Figure 4.3 in Durbin 1998 book.
- *
- * User: rpoplin
- * Date: 10/16/12
- */
-public abstract class PairHMM {
-    protected final static Logger logger = Logger.getLogger(PairHMM.class);
-
-    protected boolean constantsAreInitialized = false;
-
-    protected byte[] previousHaplotypeBases;
-    protected int hapStartIndex;
-
-    public enum HMM_IMPLEMENTATION {
-        /* Very slow implementation which uses very accurate log10 sum functions. Only meant to be used as a reference test implementation */
-        EXACT,
-        /* PairHMM as implemented for the UnifiedGenotyper. Uses log10 sum functions accurate to only 1E-4 */
-        ORIGINAL,
-        /* Optimized version of the PairHMM which caches per-read computations and operations in real space to avoid costly sums of log10'ed likelihoods */
-        LOGLESS_CACHING,
-        /* Optimized AVX implementation of LOGLESS_CACHING called through JNI */
-        VECTOR_LOGLESS_CACHING,
-        /* Debugging for vector implementation of LOGLESS_CACHING */
-        DEBUG_VECTOR_LOGLESS_CACHING,
-        /* Logless caching PairHMM that stores computations in 1D arrays instead of matrices, and which proceeds diagonally over the (read x haplotype) intersection matrix */
-        ARRAY_LOGLESS
-    }
-
-    protected int maxHaplotypeLength, maxReadLength;
-    protected int paddedMaxReadLength, paddedMaxHaplotypeLength;
-    protected int paddedReadLength, paddedHaplotypeLength;
-    protected boolean initialized = false;
-
-    // only used for debugging purposes
-    protected boolean doNotUseTristateCorrection = false;
-    protected void doNotUseTristateCorrection() { doNotUseTristateCorrection = true; }
-
-    //debug array
-    protected double[] mLikelihoodArray;
-
-    //profiling information
-    protected static Boolean doProfiling = true;
-    protected static long pairHMMComputeTime = 0;
-    protected long threadLocalPairHMMComputeTimeDiff = 0;
-    protected long startTime = 0;
-
-    /**
-     * Initialize this PairHMM, making it suitable to run against a read and haplotype with given lengths
-     *
-     * Note: Do not worry about padding, just provide the true max length of the read and haplotype. The HMM will take care of the padding.
-     *
-     * @param haplotypeMaxLength the max length of haplotypes we want to use with this PairHMM
-     * @param readMaxLength the max length of reads we want to use with this PairHMM
-     */
-    public void initialize( final int readMaxLength, final int haplotypeMaxLength ) {
-        if ( readMaxLength <= 0 ) throw new IllegalArgumentException("READ_MAX_LENGTH must be > 0 but got " + readMaxLength);
-        if ( haplotypeMaxLength <= 0 ) throw new IllegalArgumentException("HAPLOTYPE_MAX_LENGTH must be > 0 but got " + haplotypeMaxLength);
-
-        maxHaplotypeLength = haplotypeMaxLength;
-        maxReadLength = readMaxLength;
-
-        // M, X, and Y arrays are of size read and haplotype + 1 because of an extra column for initial conditions and + 1 to consider the final base in a non-global alignment
-        paddedMaxReadLength = readMaxLength + 1;
-        paddedMaxHaplotypeLength = haplotypeMaxLength + 1;
-
-        previousHaplotypeBases = null;
-
-        constantsAreInitialized = false;
-        initialized = true;
-    }
-
-    /**
-     * Called at the end of PairHMM for a region - mostly used by the JNI implementations
-     */
-    public void finalizeRegion()
-    {
-        ;
-    }
-
-    /**
-     * Initialize this PairHMM, making it suitable to run against a read and haplotype with given lengths
-     * This function is used by the JNI implementations to transfer all data once to the native code
-     * @param haplotypes the list of haplotypes
-     * @param perSampleReadList map from sample name to list of reads
-     * @param haplotypeMaxLength the max length of haplotypes we want to use with this PairHMM
-     * @param readMaxLength the max length of reads we want to use with this PairHMM
-     */
-    public void initialize( final List<Haplotype> haplotypes, final Map<String, List<GATKSAMRecord>> perSampleReadList, final int readMaxLength, final int haplotypeMaxLength ) {
-        initialize(readMaxLength, haplotypeMaxLength);
-    }
-
-    private int findMaxReadLength(final GATKSAMRecord ... reads) {
-        int max = 0;
-        for (final GATKSAMRecord read : reads) {
-            final int readLength = read.getReadLength();
-            if (max < readLength)
-                max = readLength;
-        }
-        return max;
-    }
-
-    private int findMaxAlleleLength(final List<? extends Allele> alleles) {
-        int max = 0;
-        for (final Allele allele : alleles) {
-            final int alleleLength = allele.length();
-            if (max < alleleLength)
-                max = alleleLength;
-        }
-        return max;
-    }
-
-    protected int findMaxReadLength(final List<GATKSAMRecord> reads) {
-        int listMaxReadLength = 0;
-        for(GATKSAMRecord read : reads){
-            final int readLength = read.getReadLength();
-            if( readLength > listMaxReadLength ) { listMaxReadLength = readLength; }
-        }
-        return listMaxReadLength;
-    }
-
-    protected int findMaxHaplotypeLength(final Collection<Haplotype> haplotypes) {
-        int listMaxHaplotypeLength = 0;
-        for( final Haplotype h : haplotypes) {
-            final int haplotypeLength = h.getBases().length;
-            if( haplotypeLength > listMaxHaplotypeLength ) { listMaxHaplotypeLength = haplotypeLength; }
-        }
-        return listMaxHaplotypeLength;
-    }
-
-    /**
-     *  Given a list of reads and haplotypes, for every read compute the total probability of said read arising from
-     *  each haplotype given base substitution, insertion, and deletion probabilities.
-     *
-     * @param processedReads reads to analyze instead of the ones present in the destination read-likelihoods.
-     * @param likelihoods where to store the likelihoods where position [a][r] is reserved for the likelihood of {@code reads[r]}
-     *             conditional to {@code alleles[a]}.
-     * @param gcp penalty for gap continuations base array map for processed reads.
-     *
-     * @throws IllegalArgumentException
-     *
-     * @return never {@code null}.
-     */
-    public void computeLikelihoods(final ReadLikelihoods.Matrix<Haplotype> likelihoods,
-                                   final List<GATKSAMRecord> processedReads,
-                                   final Map<GATKSAMRecord,byte[]> gcp) {
-        if (processedReads.isEmpty())
-            return;
-        if(doProfiling)
-            startTime = System.nanoTime();
-        // (re)initialize the pairHMM only if necessary
-        final int readMaxLength = findMaxReadLength(processedReads);
-        final int haplotypeMaxLength = findMaxAlleleLength(likelihoods.alleles());
-        if (!initialized || readMaxLength > maxReadLength || haplotypeMaxLength > maxHaplotypeLength)
-            initialize(readMaxLength, haplotypeMaxLength);
-
-        final int readCount = processedReads.size();
-        final List<Haplotype> alleles = likelihoods.alleles();
-        final int alleleCount = alleles.size();
-        mLikelihoodArray = new double[readCount * alleleCount];
-        int idx = 0;
-        int readIndex = 0;
-        for(final GATKSAMRecord read : processedReads){
-            final byte[] readBases = read.getReadBases();
-            final byte[] readQuals = read.getBaseQualities();
-            final byte[] readInsQuals = read.getBaseInsertionQualities();
-            final byte[] readDelQuals = read.getBaseDeletionQualities();
-            final byte[] overallGCP = gcp.get(read);
-
-            // peak at the next haplotype in the list (necessary to get nextHaplotypeBases, which is required for caching in the array implementation)
-            final boolean isFirstHaplotype = true;
-            for (int a = 0; a < alleleCount; a++) {
-                final Allele allele = alleles.get(a);
-                final byte[] alleleBases = allele.getBases();
-                final byte[] nextAlleleBases = a == alleles.size() - 1 ? null : alleles.get(a + 1).getBases();
-                final double lk = computeReadLikelihoodGivenHaplotypeLog10(alleleBases,
-                        readBases, readQuals, readInsQuals, readDelQuals, overallGCP, isFirstHaplotype, nextAlleleBases);
-                likelihoods.set(a, readIndex, lk);
-                mLikelihoodArray[idx++] = lk;
-            }
-            readIndex++;
-        }
-        if(doProfiling) {
-            threadLocalPairHMMComputeTimeDiff = (System.nanoTime() - startTime);
-            //synchronized(doProfiling)
-            {
-                pairHMMComputeTime += threadLocalPairHMMComputeTimeDiff;
-            }
-        }
-    }
-
-    /**
-     * Compute the total probability of read arising from haplotypeBases given base substitution, insertion, and deletion
-     * probabilities.
-     *
-     * Note on using hapStartIndex.  This allows you to compute the exact true likelihood of a full haplotypes
-     * given a read, assuming that the previous calculation read over a full haplotype, recaching the read values,
-     * starting only at the place where the new haplotype bases and the previous haplotype bases different.  This
-     * index is 0-based, and can be computed with findFirstPositionWhereHaplotypesDiffer given the two haplotypes.
-     * Note that this assumes that the read and all associated quals values are the same.
-     *
-     * @param haplotypeBases the full sequence (in standard SAM encoding) of the haplotype, must be >= than read bases in length
-     * @param readBases the bases (in standard encoding) of the read, must be <= haplotype bases in length
-     * @param readQuals the phred-scaled per base substitution quality scores of read.  Must be the same length as readBases
-     * @param insertionGOP the phred-scaled per base insertion quality scores of read.  Must be the same length as readBases
-     * @param deletionGOP the phred-scaled per base deletion quality scores of read.  Must be the same length as readBases
-     * @param overallGCP the phred-scaled gap continuation penalties scores of read.  Must be the same length as readBases
-     * @param recacheReadValues if false, we don't recalculate any cached results, assuming that readBases and its associated
-     *                          parameters are the same, and only the haplotype bases are changing underneath us
-     * @return the log10 probability of read coming from the haplotype under the provided error model
-     */
-    protected final double computeReadLikelihoodGivenHaplotypeLog10( final byte[] haplotypeBases,
-                                                                  final byte[] readBases,
-                                                                  final byte[] readQuals,
-                                                                  final byte[] insertionGOP,
-                                                                  final byte[] deletionGOP,
-                                                                  final byte[] overallGCP,
-                                                                  final boolean recacheReadValues,
-                                                                  final byte[] nextHaploytpeBases) {
-
-        if ( ! initialized ) throw new IllegalStateException("Must call initialize before calling computeReadLikelihoodGivenHaplotypeLog10");
-        if ( haplotypeBases == null ) throw new IllegalArgumentException("haplotypeBases cannot be null");
-        if ( haplotypeBases.length > maxHaplotypeLength ) throw new IllegalArgumentException("Haplotype bases is too long, got " + haplotypeBases.length + " but max is " + maxHaplotypeLength);
-        if ( readBases == null ) throw new IllegalArgumentException("readBases cannot be null");
-        if ( readBases.length > maxReadLength ) throw new IllegalArgumentException("readBases is too long, got " + readBases.length + " but max is " + maxReadLength);
-        if ( readQuals.length != readBases.length ) throw new IllegalArgumentException("Read bases and read quals aren't the same size: " + readBases.length + " vs " + readQuals.length);
-        if ( insertionGOP.length != readBases.length ) throw new IllegalArgumentException("Read bases and read insertion quals aren't the same size: " + readBases.length + " vs " + insertionGOP.length);
-        if ( deletionGOP.length != readBases.length ) throw new IllegalArgumentException("Read bases and read deletion quals aren't the same size: " + readBases.length + " vs " + deletionGOP.length);
-        if ( overallGCP.length != readBases.length ) throw new IllegalArgumentException("Read bases and overall GCP aren't the same size: " + readBases.length + " vs " + overallGCP.length);
-
-        paddedReadLength = readBases.length + 1;
-        paddedHaplotypeLength = haplotypeBases.length + 1;
-
-        hapStartIndex =  (recacheReadValues) ? 0 : hapStartIndex;
-
-        // Pre-compute the difference between the current haplotype and the next one to be run
-        // Looking ahead is necessary for the ArrayLoglessPairHMM implementation
-        final int nextHapStartIndex =  (nextHaploytpeBases == null || haplotypeBases.length != nextHaploytpeBases.length) ? 0 : findFirstPositionWhereHaplotypesDiffer(haplotypeBases, nextHaploytpeBases);
-
-        double result = subComputeReadLikelihoodGivenHaplotypeLog10(haplotypeBases, readBases, readQuals, insertionGOP, deletionGOP, overallGCP, hapStartIndex, recacheReadValues, nextHapStartIndex);
-
-        if ( result > 0.0)
-            throw new IllegalStateException("PairHMM Log Probability cannot be greater than 0: " + String.format("haplotype: %s, read: %s, result: %f, PairHMM: %s", new String(haplotypeBases), new String(readBases), result, this.getClass().getSimpleName()));
-        else if (!MathUtils.goodLog10Probability(result))
-            throw new IllegalStateException("Invalid Log Probability: " + result);
-
-        // Warning: Careful if using the PairHMM in parallel! (this update has to be taken care of).
-        // Warning: This assumes no downstream modification of the haplotype bases (saves us from copying the array). It is okay for the haplotype caller and the Unified Genotyper.
-        previousHaplotypeBases = haplotypeBases;
-
-        // For the next iteration, the hapStartIndex for the next haploytpe becomes the index for the current haplotype
-        // The array implementation has to look ahead to the next haplotype to store caching info. It cannot do this if nextHapStart is before hapStart
-        hapStartIndex = (nextHapStartIndex < hapStartIndex) ? 0: nextHapStartIndex;
-
-        return result;
-    }
-
-    /**
-     * To be overloaded by subclasses to actually do calculation for #computeReadLikelihoodGivenHaplotypeLog10
-     */
-    @Requires({"readBases.length == readQuals.length", "readBases.length == insertionGOP.length", "readBases.length == deletionGOP.length",
-            "readBases.length == overallGCP.length", "matchMatrix!=null", "insertionMatrix!=null", "deletionMatrix!=null"})
-    protected abstract double subComputeReadLikelihoodGivenHaplotypeLog10( final byte[] haplotypeBases,
-                                                                           final byte[] readBases,
-                                                                           final byte[] readQuals,
-                                                                           final byte[] insertionGOP,
-                                                                           final byte[] deletionGOP,
-                                                                           final byte[] overallGCP,
-                                                                           final int hapStartIndex,
-                                                                           final boolean recacheReadValues,
-                                                                           final int nextHapStartIndex);
-
-    /**
-     * Compute the first position at which two haplotypes differ
-     *
-     * If the haplotypes are exact copies of each other, returns the min length of the two haplotypes.
-     *
-     * @param haplotype1 the first haplotype1
-     * @param haplotype2 the second haplotype1
-     * @return the index of the first position in haplotype1 and haplotype2 where the byte isn't the same
-     */
-    public static int findFirstPositionWhereHaplotypesDiffer(final byte[] haplotype1, final byte[] haplotype2) {
-        if ( haplotype1 == null || haplotype1.length == 0 ) throw new IllegalArgumentException("Haplotype1 is bad " + Arrays.toString(haplotype1));
-        if ( haplotype2 == null || haplotype2.length == 0 ) throw new IllegalArgumentException("Haplotype2 is bad " + Arrays.toString(haplotype2));
-
-        for( int iii = 0; iii < haplotype1.length && iii < haplotype2.length; iii++ ) {
-            if( haplotype1[iii] != haplotype2[iii] ) {
-                return iii;
-            }
-        }
-
-        return Math.min(haplotype1.length, haplotype2.length);
-    }
-
-    /**
-     * Use number of threads to set doProfiling flag - doProfiling iff numThreads == 1
-     * This function should be called only during initialization phase - single thread phase of HC
-     */
-    public static void setNumberOfThreads(final int numThreads)
-    {
-        doProfiling = (numThreads == 1);
-        if(numThreads > 1)
-            logger.info("Performance profiling for PairHMM is disabled because HaplotypeCaller is being run with multiple threads (-nct>1) option\nProfiling is enabled only when running in single thread mode\n");
-    }
-
-    /**
-     * Return the results of the computeLikelihoods function
-     */
-    public double[] getLikelihoodArray() { return mLikelihoodArray; }
-    /**
-     * Called at the end of the program to close files, print profiling information etc 
-     */
-    public void close()
-    {
-        if(doProfiling)
-            System.out.println("Total compute time in PairHMM computeLikelihoods() : "+(pairHMMComputeTime*1e-9));
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pairhmm/PairHMMModel.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pairhmm/PairHMMModel.java
deleted file mode 100644
index 1cd8865..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pairhmm/PairHMMModel.java
+++ /dev/null
@@ -1,435 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.pairhmm;
-
-import org.broadinstitute.gatk.utils.MathUtils;
-import org.broadinstitute.gatk.utils.QualityUtils;
-
-/**
- * Helper class that implement calculations required to implement the PairHMM Finite State Automation (FSA) model.
- *
- * @author Valentin Ruano-Rubio <valentin at broadinstitute.org>
- */
-public class PairHMMModel {
-
-
-    /**
-     * Prevents instantiation of this class
-     */
-    private PairHMMModel() {
-
-    }
-
-    /**
-     * Length of the standard transition probability array.
-     */
-    public static final int TRANS_PROB_ARRAY_LENGTH = 6;
-
-    /**
-     * Position in the transition probability array for the Match-to-Match transition.
-     */
-    public static final int matchToMatch = 0;
-
-    /**
-     * Position in the transition probability array for the Indel-to-Match transition.
-     */
-    public static final int indelToMatch = 1;
-
-    /**
-     * Position in the transition probability array for the Match-to-Insertion transition.
-     */
-    public static final int matchToInsertion = 2;
-
-    /**
-     * Position in the transition probability array for the Insertion-to-Insertion transition.
-     */
-    public static final int insertionToInsertion = 3;
-
-    /**
-     * Position in the transition probability array for the Match-to-Deletion transition.
-     */
-    public static final int matchToDeletion = 4;
-
-    /**
-     * Position in the transition probability array for the Deletion-to-Deletion transition.
-     */
-    public static final int deletionToDeletion = 5;
-
-    /**
-     * Convenient ln10 constant.
-     */
-    private static double LN10 = Math.log(10);
-
-    /**
-     * Convenient (ln10)^-1 constant.
-     */
-    private static double INV_LN10 = 1.0 / LN10;
-
-    /**
-     * Holds pre-calculated the matchToMath probability values in linear scale.
-     *
-     * <p/>
-     * This is a triangular matrix stored in a unidimentional array like so:
-     * <p/>
-     * (0,0), (0,1), (1,1), (0,2), (1,2), (2,2), (0,3) ... ({@link QualityUtils#MAX_QUAL},{@link QualityUtils#MAX_QUAL})
-     */
-    private static double[] matchToMatchProb = new double[((QualityUtils.MAX_QUAL + 1) * (QualityUtils.MAX_QUAL + 2)) >> 1];
-
-    /**
-     * Holds pre-calculated the matchToMath probability values in log10 scale.
-     *
-     * <p/>
-     * This is a triangular matrix stored in a unidimentional array like so:
-     * <p/>
-     * (0,0), (0,1), (1,1), (0,2), (1,2), (2,2), (0,3) ... ({@link QualityUtils#MAX_QUAL},{@link QualityUtils#MAX_QUAL})
-     */
-    private static double[] matchToMatchLog10 = new double[((QualityUtils.MAX_QUAL + 1) * (QualityUtils.MAX_QUAL + 2)) >> 1];
-
-    /**
-     * Initialize matchToMatch cache tables {@link #matchToMatch} and {@link #matchToMatchLog10}
-     */
-    static {
-        for (int i = 0, offset = 0; i <= QualityUtils.MAX_QUAL; offset += ++i)
-            for (int j = 0; j <= i; j++) {
-                final double log10Sum = MathUtils.approximateLog10SumLog10(-0.1 * i,-0.1 * j);
-                matchToMatchLog10[offset + j] =
-                        Math.log1p( - Math.min(1,Math.pow(10,log10Sum))) * INV_LN10;
-                matchToMatchProb[offset + j] = Math.pow(10,matchToMatchLog10[offset + j]);
-            }
-    }
-
-    /**
-     * Fills a transition probability array given the different quality scores affecting a read site
-     *
-     * @param insQual the insertion quality score as a byte.
-     * @param delQual the deletion quality score as a byte.
-     * @param gcp the gap-continuation-penalty score as a byte.
-     *
-     * @throws NullPointerException if {@code dest} is {@code null}.
-     * @throws ArrayIndexOutOfBoundsException if {@code dest} is not large enough.
-     * @throws IllegalArgumentException if {@code insQual}, {@code delQual} or {@code gcp} is less than negative.
-     */
-    public static void qualToTransProbs(final double[] dest, final byte insQual, final byte delQual, final byte gcp) {
-        if (insQual < 0) throw new IllegalArgumentException("insert quality cannot less than 0: " + insQual);
-        if (delQual < 0) throw new IllegalArgumentException("deletion quality cannot be less than 0: " + delQual);
-        if (gcp < 0) throw new IllegalArgumentException("gcp cannot be less than 0: " + gcp);
-        dest[matchToMatch] = matchToMatchProb(insQual, delQual);
-        dest[matchToInsertion] = QualityUtils.qualToErrorProb(insQual);
-        dest[matchToDeletion] = QualityUtils.qualToErrorProb(delQual);
-        dest[indelToMatch] = QualityUtils.qualToProb(gcp);
-        dest[insertionToInsertion] = dest[deletionToDeletion] = QualityUtils.qualToErrorProb(gcp);
-    }
-
-    /**
-     * Returns a transition probability array given the different quality scores affecting a read site.
-     *
-     * @param insQual the insertion quality score as a byte.
-     * @param delQual the deletion quality score as a byte.
-     * @param gcp the gap-continuation-penalty score as a byte.
-     *
-     * @throws NullPointerException if {@code dest} is {@code null}.
-     * @throws ArrayIndexOutOfBoundsException if {@code dest} is not large enough.
-     * @throws IllegalArgumentException if {@code insQual}, {@code delQual} or {@code gcp} is less than negative.
-     *
-     * @return never {@code null}. An array of length {@link #TRANS_PROB_ARRAY_LENGTH}.
-     */
-    @SuppressWarnings("unused")
-    public static double[] qualToTransProbs(final byte insQual, final byte delQual, final byte gcp) {
-        final double[] dest = new double[TRANS_PROB_ARRAY_LENGTH];
-        qualToTransProbs(dest,insQual,delQual,gcp);
-        return dest;
-    }
-
-    /**
-     * Fills ax matrix with the transition probabilities for a number of bases.
-     *
-     * <p/>
-     * The first dimension of the matrix correspond to the different bases where the first one is stored in position 1.
-     * Thus the position 0 is left empty and the length of the resulting matrix is actually {@code insQual.length + 1}.
-     * <p/>
-     * Each entry is the transition probability array for that base with a length of {@link #TRANS_PROB_ARRAY_LENGTH}.
-     *
-     * @param dest the matrix to update
-     * @param insQuals insertion qualities.
-     * @param delQuals deletion qualities.
-     * @param gcps gap-continuation penalty qualities.
-     *
-     * @throws NullPointerException if any of the input arrays, matrices is {@code null} or any entry in {@code dest} is {@code null}.
-     * @throws IllegalArgumentException if {@code IllegalArgumentException}
-     *  if the input array don't have the same length.
-     * @throws ArrayIndexOutOfBoundsException if {@code dest} or any of its elements is not large enough to contain the
-     *  transition  matrix.
-     */
-    @SuppressWarnings("unused")
-    public static void qualToTransProbs(final double[][] dest, final byte[] insQuals, final byte[] delQuals, final byte[] gcps) {
-        final int readLength = insQuals.length;
-        if (delQuals.length != readLength) throw new IllegalArgumentException("deletion quality array length does not match insert quality array length: " + readLength + " != " + delQuals.length);
-        if (gcps.length != readLength) throw new IllegalArgumentException("deletion quality array length does not match insert quality array length: " + readLength + " != " + gcps.length);
-
-        if (dest.length < readLength + 1) throw new IllegalArgumentException("destination length is not enough for the read length: " + dest.length + " < " + readLength + " + 1");
-
-        for (int i = 0; i < readLength; i++)
-            qualToTransProbs(dest[i + 1], insQuals[i], delQuals[i], gcps[i]);
-    }
-
-    /**
-     * Returns a matrix with the transition probabilities for a number of bases.
-     *
-     * <p/>
-     * The first dimension of the matrix correspond to the different bases where the first one is stored in position 1.
-     * Thus the position 0 is left empty and the length of the resulting matrix is actually {@code insQual.length + 1}.
-     * <p/>
-     * Each entry is the transition probability array for that base with a length of {@link #TRANS_PROB_ARRAY_LENGTH}.
-     *
-     * @param insQuals insertion qualities.
-     * @param delQuals deletion qualities.
-     * @param gcps gap-continuation penalty qualities.
-     *
-     * @throws NullPointerException if any of the input arrays is {@code null}.
-     * @throws IllegalArgumentException if {@code IllegalArgumentException}
-     *  if the input array don't have the same length.
-     *
-     * @return never {@code null}, an matrix of the dimensions explained above.
-     */
-    @SuppressWarnings("unused")
-    public static double[][] qualToTransProbs(final byte[] insQuals, final byte[] delQuals, final byte[] gcps) {
-        final double[][] dest = createTransitionMatrix(insQuals.length);
-        qualToTransProbs(dest,insQuals,delQuals,gcps);
-        return dest;
-    }
-
-    /**
-     * Fills a transition log10 probability array given the different quality scores affecting a read site.
-     *
-     * @param insQual the insertion quality score as a byte.
-     * @param delQual the deletion quality score as a byte.
-     * @param gcp the gap-continuation-penalty score as a byte.
-     *
-     * @throws NullPointerException if {@code dest} is {@code null}.
-     * @throws ArrayIndexOutOfBoundsException if {@code dest} is not large enough.
-     * @throws IllegalArgumentException if {@code insQual}, {@code delQual} or {@code gcp} is less than negative.
-     */
-    public static void qualToTransProbsLog10(final double[] dest, final byte insQual, final byte delQual, final byte gcp) {
-        if (insQual < 0) throw new IllegalArgumentException("insert quality cannot less than 0: " + insQual);
-        if (delQual < 0) throw new IllegalArgumentException("deletion quality cannot be less than 0: " + delQual);
-        if (gcp < 0) throw new IllegalArgumentException("gcp cannot be less than 0: " + gcp);
-        dest[matchToMatch] = matchToMatchProbLog10(insQual, delQual);
-        dest[matchToInsertion] = QualityUtils.qualToErrorProbLog10(insQual);
-        dest[matchToDeletion] = QualityUtils.qualToErrorProbLog10(delQual);
-        dest[indelToMatch] = QualityUtils.qualToProbLog10(gcp);
-        dest[insertionToInsertion] = dest[deletionToDeletion] = QualityUtils.qualToErrorProbLog10(gcp);
-    }
-
-    /**
-     * Returns a transition log10 probability array given the different quality scores affecting a read site.
-     *
-     * @param insQual the insertion quality score as a byte.
-     * @param delQual the deletion quality score as a byte.
-     * @param gcp the gap-continuation-penalty score as a byte.
-     *
-     * @throws NullPointerException if {@code dest} is {@code null}.
-     * @throws ArrayIndexOutOfBoundsException if {@code dest} is not large enough.
-     * @throws IllegalArgumentException if {@code insQual}, {@code delQual} or {@code gcp} is less than negative.
-     *
-     * @return never {@code null}. An array of length {@link #TRANS_PROB_ARRAY_LENGTH}.
-     */
-    @SuppressWarnings("unused")
-    public static double[] qualToTransProbsLog10(final byte insQual, final byte delQual, final byte gcp) {
-        final double[] dest = new double[TRANS_PROB_ARRAY_LENGTH];
-        qualToTransProbsLog10(dest,insQual,delQual,gcp);
-        return dest;
-    }
-
-    /**
-     * Fills a matrix with the log10 transition probabilities for a number of bases.
-     *
-     * <p/>
-     * The first dimension of the matrix correspond to the different bases where the first one is stored in position 1.
-     * Thus the position 0 is left empty and the length of the resulting matrix is actually {@code insQual.length + 1}.
-     * <p/>
-     * Each entry is the transition probability array for that base with a length of {@link #TRANS_PROB_ARRAY_LENGTH}.
-     *
-     * @param insQuals insertion qualities.
-     * @param delQuals deletion qualities.
-     * @param gcps gap-continuation penalty qualities.
-     *
-     * @throws NullPointerException if any of the input arrays, matrices is {@code null} or any entry in {@code dest} is {@code null}.
-     * @throws IllegalArgumentException if {@code IllegalArgumentException}
-     *  if the input array don't have the same length.
-     * @throws ArrayIndexOutOfBoundsException if {@code dest} or any of its elements is not large enough to contain the
-     *  transition  matrix.
-     */
-    @SuppressWarnings("unused")
-    public static void qualToTransProbsLog10(final double[][] dest, final byte[] insQuals, final byte[] delQuals, final byte[] gcps) {
-        final int readLength = insQuals.length;
-        if (delQuals.length != readLength) throw new IllegalArgumentException("deletion quality array length does not match insert quality array length: " + readLength + " != " + delQuals.length);
-        if (gcps.length != readLength) throw new IllegalArgumentException("deletion quality array length does not match insert quality array length: " + readLength + " != " + gcps.length);
-        if (dest.length < readLength + 1) throw new IllegalArgumentException("destination length is not enough for the read length: " + dest.length + " < " + readLength + " + 1");
-
-        for (int i = 0; i < readLength; i++)
-            qualToTransProbsLog10(dest[i+1],insQuals[i],delQuals[i],gcps[i]);
-    }
-
-    /**
-     * Returns a matrix with the log10 transition probabilities for a number of bases.
-     *
-     * <p/>
-     * The first dimension of the matrix correspond to the different bases where the first one is stored in position 1.
-     * Thus the position 0 is left empty and the length of the resulting matrix is actually {@code insQual.length + 1}.
-     * <p/>
-     * Each entry is the transition probability array for that base with a length of {@link #TRANS_PROB_ARRAY_LENGTH}.
-     *
-     * @param insQuals insertion qualities.
-     * @param delQuals deletion qualities.
-     * @param gcps gap-continuation penalty qualities.
-     *
-     * @throws NullPointerException if any of the input arrays is {@code null}.
-     * @throws IllegalArgumentException if {@code IllegalArgumentException}
-     *  if the input array don't have the same length.
-     *
-     * @return never {@code null}, an matrix of the dimensions explained above.
-     */
-    @SuppressWarnings("unused")
-    public static double[][] qualToTransProbsLog10(final byte[] insQuals, final byte[] delQuals, final byte[] gcps) {
-        final double[][] dest = createTransitionMatrix(insQuals.length);
-        qualToTransProbsLog10(dest,insQuals,delQuals,gcps);
-        return dest;
-    }
-
-    /**
-     * Creates a transition probability matrix large enough to work with sequences of a particular length.
-     *
-     * @param maxReadLength the maximum read length for the transition matrix.
-     *
-     * @return never {@code null}. A matrix of {@code maxReadLength + 1} by {@link #TRANS_PROB_ARRAY_LENGTH} positions.
-     */
-    public static double[][] createTransitionMatrix(final int maxReadLength) {
-        return new double[maxReadLength + 1][TRANS_PROB_ARRAY_LENGTH];
-    }
-
-    /**
-     * Returns the probability that neither of two event takes place.
-     * <p/>
-     *
-     * We assume that both event never occur together and that delQual is the conditional probability
-     * (qual. encoded) of the second event, given the first event didn't took place. So that the
-     * probability of no event is: <br/>
-     *
-     * We assume that both event never occur together so that the probability of no event is: <br/>
-     *
-     * <code>1 - ProbErr(insQual) - ProbErr(delQual)</code> <br/>
-     *
-     * @param insQual PhRED scaled quality/probability of the first event.
-     * @param delQual PhRED scaled quality/probability of the second event.
-     *
-     * @return a value between 0 and 1.
-     */
-    public static double matchToMatchProb(final byte insQual, final byte delQual) {
-        return matchToMatchProb((insQual & 0xFF), (delQual & 0xFF));
-    }
-
-    /**
-     * Returns the probability (log 10 scaled) that neither of two event, insertion and deletion, takes place.
-     * <p/>
-     *
-     * We assume that both event never occur together so that the probability of no event is: <br/>
-     *
-     * <code>1 - ProbErr(insQual) - ProbErr(delQual)</code> <br/>
-     *
-     * @param insQual PhRED scaled quality/probability of an insertion.
-     * @param delQual PhRED scaled quality/probability of a deletion.
-     *
-     * @return a value between 0 and -Inf.
-     */
-    public static double matchToMatchProbLog10(final byte insQual, final byte delQual) {
-        return matchToMatchProbLog10((insQual & 0xFF), (delQual & 0xFF));
-    }
-
-    /**
-     * Returns the probability that neither of two events, insertion and deletion, takes place.
-     * <p/>
-     *
-     * We assume that both event never occur together and that delQual is the conditional probability
-     * (qual. encoded) of the second event, given the first event didn't took place. So that the
-     * probability of no event is: <br/>
-     *
-     * We assume that both event never occur together so that the probability of no event is: <br/>
-     *
-     * <code>1 - ProbErr(insQual) - ProbErr(delQual)</code> <br/>
-     *
-     * @param insQual PhRED scaled quality/probability of an insertion.
-     * @param delQual PhRED scaled quality/probability of a deletion.
-     * @return a value between 0 and 1.
-     */
-    public static double matchToMatchProb(final int insQual, final int delQual) {
-        final int minQual;
-        final int maxQual;
-        if (insQual <= delQual) {
-            minQual = insQual;
-            maxQual = delQual;
-        } else {
-            minQual = delQual;
-            maxQual = insQual;
-        }
-
-        if (minQual < 0) throw new IllegalArgumentException("quality cannot be negative: " + minQual + " and " + maxQual);
-
-        return (QualityUtils.MAX_QUAL < maxQual) ?  1.0 - Math.pow(10, MathUtils.approximateLog10SumLog10(-0.1 * minQual, -0.1 * maxQual)) :
-                matchToMatchProb[((maxQual * (maxQual + 1)) >> 1) + minQual];
-    }
-
-    /**
-     * Returns the probability (log 10 scaled) that neither of two event takes place.
-     * <p/>
-     *
-     * We assume that both event never occur together and that delQual is the conditional probability (qual. encoded)
-     * of the second event, given the first event didn't took place. So that the probability of no event is: <br/>
-     *
-     * We assume that both event never occur together so that the probability of no event is: <br/>
-     *
-     * <code>1 - ProbErr(insQual) - ProbErr(delQual)</code> <br/>
-     *
-     * @param insQual PhRED scaled quality/probability of an insertion.
-     * @param delQual PhRED scaled quality/probability of a deletion.
-     *
-     * @return a value between 0 and -Inf.
-     */
-    public static double matchToMatchProbLog10(final int insQual, final int delQual) {
-        final int minQual;
-        final int maxQual;
-        if (insQual <= delQual) {
-            minQual = insQual;
-            maxQual = delQual;
-        } else {
-            minQual = delQual;
-            maxQual = insQual;
-        }
-        return (QualityUtils.MAX_QUAL < maxQual) ? Math.log1p (
-                - Math.min(1,Math.pow(10,
-                        MathUtils.approximateLog10SumLog10(-.1 * minQual, -.1 * maxQual)))) * INV_LN10 :
-                matchToMatchLog10[((maxQual * (maxQual + 1)) >> 1) + minQual];
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pairhmm/PairHMMReadyHaplotypes.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pairhmm/PairHMMReadyHaplotypes.java
deleted file mode 100644
index 2948404..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pairhmm/PairHMMReadyHaplotypes.java
+++ /dev/null
@@ -1,182 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.pairhmm;
-
-import java.util.*;
-
-/**
- * Collection of haplotypes sorted in a conveniently way to be run efficiently by the PairHMM.
- *
- * TODO not yet in use but likely to be as part of making graph-base likelihood run faster.
- * TODO this could be extended to the classical PairHMM implementation simplifyling the PairHMM API.
- */
-public class PairHMMReadyHaplotypes implements Iterable<PairHMMReadyHaplotypes.Entry> {
-
-
-    public class Entry {
-
-        private final byte[] bases;
-
-        private double likelihood = Double.NaN;
-
-        protected Entry(final byte[] bases) {
-            this.bases = bases;
-        }
-
-        protected byte[] getBases() {
-            return bases;
-        }
-
-        public void setLikelihood(final double lk) {
-            likelihood = lk;
-        }
-
-        public double getLikelihood() {
-            return likelihood;
-        }
-
-    }
-
-    private Map<Entry,Map<Entry,Integer>> commonPrefixLength;
-
-    private SortedSet<Entry> entries;
-
-    private int capacity;
-
-    private final Comparator<Entry> comparator = new Comparator<Entry>() {
-        @Override
-        public int compare(final Entry o1, final Entry o2) {
-            final byte[] b1 = o1.bases;
-            final byte[] b2 = o2.bases;
-            Map<Entry,Integer> b1map = commonPrefixLength.get(o1);
-            if (b1map == null)
-                commonPrefixLength.put(o1, b1map = new HashMap<>(capacity));
-            Map<Entry,Integer> b2map = commonPrefixLength.get(o2);
-            if (b2map == null)
-                commonPrefixLength.put(o2, b2map = new HashMap<>(capacity));
-            final Integer previousI = b1map.get(o2) == null ? null : b1map.get(o2);
-            int i;
-            int result;
-            final int iLimit = Math.min(b1.length,b2.length);
-            if (previousI == null) {
-                for (i = 0; i < iLimit; i++)
-                    if (b1[i] != b2[i])
-                        break;
-                b1map.put(o2,i);
-                b2map.put(o1,i);
-            } else
-                i = previousI;
-
-            if (i < iLimit)
-                result = Byte.compare(b1[i],b2[i]);
-            else if (b1.length == b2.length)
-                result = 0;
-            else
-                result = b1.length < b2.length ? -1 : 1;
-            return result;
-        }
-    };
-
-    public PairHMMReadyHaplotypes(final int capacity) {
-        commonPrefixLength = new HashMap<>(capacity);
-        entries = new TreeSet<>(comparator);
-    }
-
-    public void add(final byte[] bases) {
-        final Entry entry = new Entry(bases);
-        entries.add(entry);
-    }
-
-    public int size() {
-        return entries.size();
-    }
-
-    @Override
-    public Iterator iterator() {
-        return new Iterator();
-    }
-
-    public class Iterator implements java.util.Iterator<Entry> {
-
-        private java.util.Iterator<Entry> actualIterator;
-        private Entry previousEntry;
-        private Entry currentEntry;
-        private int startIndex;
-        private int cmp;
-
-        private Iterator() {
-            actualIterator = entries.iterator();
-        }
-
-        public boolean hasNext() {
-            return actualIterator.hasNext();
-        }
-
-        public Entry next() {
-            previousEntry = currentEntry;
-            final Entry result = currentEntry = actualIterator.next();
-            startIndex = -1;
-            return result;
-        }
-
-        @Override
-        public void remove() {
-            throw new UnsupportedOperationException();
-        }
-
-        public byte[] bases() {
-            if (currentEntry == null)
-                throw new NoSuchElementException();
-            return currentEntry.bases;
-        }
-
-        public int startIndex() {
-            if (startIndex >= 0)
-                return startIndex;
-            else if (previousEntry == null)
-                return startIndex = 0;
-            else {
-                // The comparator will make sure the common-prefix-length is updated.
-                // The result in a field so that we avoid dead code elimination.
-                // perhaps I a bit paranohic but it does not harm to prevent.
-                cmp = comparator.compare(previousEntry,currentEntry);
-                return startIndex = commonPrefixLength.get(previousEntry).get(currentEntry);
-            }
-        }
-
-        @Override
-        public String toString() {
-            return super.toString() + " cmp = " + cmp;
-        }
-
-        public void setLikelihood(final double likelihood) {
-            if (currentEntry == null)
-                throw new NoSuchElementException();
-            currentEntry.setLikelihood(likelihood);
-        }
-    }
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pileup/MergingPileupElementIterator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pileup/MergingPileupElementIterator.java
deleted file mode 100644
index d36d355..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pileup/MergingPileupElementIterator.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.pileup;
-
-import htsjdk.samtools.util.PeekableIterator;
-
-import java.util.Comparator;
-import java.util.Iterator;
-import java.util.PriorityQueue;
-
-/**
- * Merges multiple pileups broken down by sample.
- *
- * @author mhanna
- * @version 0.1
- */
-class MergingPileupElementIterator<PE extends PileupElement> implements Iterator<PE> {
-    private final PriorityQueue<PeekableIterator<PE>> perSampleIterators;
-
-    public MergingPileupElementIterator(PerSamplePileupElementTracker<PE> tracker) {
-        perSampleIterators = new PriorityQueue<PeekableIterator<PE>>(Math.max(1,tracker.getSamples().size()),new PileupElementIteratorComparator());
-        for(final String sample: tracker.getSamples()) {
-            PileupElementTracker<PE> trackerPerSample = tracker.getElements(sample);
-            if(trackerPerSample.size() != 0)
-                perSampleIterators.add(new PeekableIterator<PE>(trackerPerSample.iterator()));
-        }
-    }
-
-    public boolean hasNext() {
-        return !perSampleIterators.isEmpty();
-    }
-
-    public PE next() {
-        PeekableIterator<PE> currentIterator = perSampleIterators.remove();
-        PE current = currentIterator.next();
-        if(currentIterator.hasNext())
-            perSampleIterators.add(currentIterator);
-        return current;
-    }
-
-    public void remove() {
-        throw new UnsupportedOperationException("Cannot remove from a merging iterator.");
-    }
-
-    /**
-     * Compares two peekable iterators consisting of pileup elements.
-     */
-    private class PileupElementIteratorComparator implements Comparator<PeekableIterator<PE>> {
-        public int compare(PeekableIterator<PE> lhs, PeekableIterator<PE> rhs) {
-            return rhs.peek().getOffset() - lhs.peek().getOffset();
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pileup/PileupElement.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pileup/PileupElement.java
deleted file mode 100644
index 4db0927..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pileup/PileupElement.java
+++ /dev/null
@@ -1,539 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.pileup;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import htsjdk.samtools.CigarElement;
-import htsjdk.samtools.CigarOperator;
-import org.broadinstitute.gatk.utils.BaseUtils;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-
-import java.util.Arrays;
-import java.util.EnumSet;
-import java.util.LinkedList;
-import java.util.List;
-
-/**
- * Created by IntelliJ IDEA.
- * User: depristo
- * Date: Apr 14, 2009
- * Time: 8:54:05 AM
- */
-public class PileupElement implements Comparable<PileupElement> {
-    private final static LinkedList<CigarElement> EMPTY_LINKED_LIST = new LinkedList<>();
-
-    private final static EnumSet<CigarOperator> ON_GENOME_OPERATORS =
-            EnumSet.of(CigarOperator.M, CigarOperator.EQ, CigarOperator.X, CigarOperator.D);
-
-    public static final byte DELETION_BASE = BaseUtils.Base.D.base;
-    public static final byte DELETION_QUAL = (byte) 16;
-    public static final byte A_FOLLOWED_BY_INSERTION_BASE = (byte) 87;
-    public static final byte C_FOLLOWED_BY_INSERTION_BASE = (byte) 88;
-    public static final byte T_FOLLOWED_BY_INSERTION_BASE = (byte) 89;
-    public static final byte G_FOLLOWED_BY_INSERTION_BASE = (byte) 90;
-
-    protected final GATKSAMRecord read;         // the read this base belongs to
-    protected final int offset;                 // the offset in the bases array for this base
-
-    private final CigarElement currentCigarElement;
-    private final int currentCigarOffset;
-    private final int offsetInCurrentCigar;
-
-    /**
-     * Create a new pileup element
-     *
-     * @param read a non-null read to pileup
-     * @param baseOffset the offset into the read's base / qual vector aligned to this position on the genome. If the
-     *                   current cigar element is a deletion, offset should be the offset of the last M/=/X position.
-     * @param currentElement a non-null CigarElement that indicates the cigar element aligning the read to the genome
-     * @param currentCigarOffset the offset of currentElement in read.getCigar().getElement(currentCigarOffset) == currentElement)
-     * @param offsetInCurrentCigar how far into the currentElement are we in our alignment to the genome?
-     */
-    public PileupElement(final GATKSAMRecord read, final int baseOffset,
-                         final CigarElement currentElement, final int currentCigarOffset,
-                         final int offsetInCurrentCigar) {
-        assert currentElement != null;
-
-        this.read = read;
-        this.offset = baseOffset;
-        this.currentCigarElement = currentElement;
-        this.currentCigarOffset = currentCigarOffset;
-        this.offsetInCurrentCigar = offsetInCurrentCigar;
-
-        // for performance regions these are assertions
-        assert this.read != null;
-        assert this.offset >= 0 && this.offset < this.read.getReadLength();
-        assert this.currentCigarOffset >= 0;
-        assert this.currentCigarOffset < read.getCigarLength();
-        assert this.offsetInCurrentCigar >= 0;
-        assert this.offsetInCurrentCigar < currentElement.getLength();
-    }
-
-    /**
-     * Create a new PileupElement that's a copy of toCopy
-     * @param toCopy the element we want to copy
-     */
-    public PileupElement(final PileupElement toCopy) {
-        this(toCopy.read, toCopy.offset, toCopy.currentCigarElement, toCopy.currentCigarOffset, toCopy.offsetInCurrentCigar);
-    }
-
-    /**
-     * Is this element a deletion w.r.t. the reference genome?
-     *
-     * @return true if this is a deletion, false otherwise
-     */
-    public boolean isDeletion() {
-        return currentCigarElement.getOperator() == CigarOperator.D;
-    }
-
-    /**
-     * Is the current element immediately before a deletion, but itself not a deletion?
-     *
-     * Suppose we are aligning a read with cigar 3M2D1M.  This function is true
-     * if we are in the last cigar position of the 3M, but not if we are in the 2D itself.
-     *
-     * @return true if the next alignment position is a deletion w.r.t. the reference genome
-     */
-    public boolean isBeforeDeletionStart() {
-        return ! isDeletion() && atEndOfCurrentCigar() && hasOperator(getNextOnGenomeCigarElement(), CigarOperator.D);
-    }
-
-    /**
-     * Is the current element immediately after a deletion, but itself not a deletion?
-     *
-     * Suppose we are aligning a read with cigar 1M2D3M.  This function is true
-     * if we are in the first cigar position of the 3M, but not if we are in the 2D itself or
-     * in any but the first position of the 3M.
-     *
-     * @return true if the previous alignment position is a deletion w.r.t. the reference genome
-     */
-    public boolean isAfterDeletionEnd() {
-        return ! isDeletion() && atStartOfCurrentCigar() && hasOperator(getPreviousOnGenomeCigarElement(), CigarOperator.D);
-    }
-
-    /**
-     * Get the read for this pileup element
-     * @return a non-null GATKSAMRecord
-     */
-    @Ensures("result != null")
-    public GATKSAMRecord getRead() {
-        return read;
-    }
-
-    /**
-     * Get the offset of the this element into the read that aligns that read's base to this genomic position.
-     *
-     * If the current element is a deletion then offset is the offset of the last base containing offset.
-     *
-     * @return a valid offset into the read's bases
-     */
-    @Ensures({"result >= 0", "result <= read.getReadLength()"})
-    public int getOffset() {
-        return offset;
-    }
-
-    /**
-     * Get the base aligned to the genome at this location
-     *
-     * If the current element is a deletion returns DELETION_BASE
-     *
-     * @return a base encoded as a byte
-     */
-    @Ensures("result != DELETION_BASE || (isDeletion() && result == DELETION_BASE)")
-    public byte getBase() {
-        return isDeletion() ? DELETION_BASE : read.getReadBases()[offset];
-    }
-
-    @Deprecated
-    public int getBaseIndex() {
-        return BaseUtils.simpleBaseToBaseIndex(getBase());
-    }
-
-    /**
-     * Get the base quality score of the base at this aligned position on the genome
-     * @return a phred-scaled quality score as a byte
-     */
-    public byte getQual() {
-        return isDeletion() ? DELETION_QUAL : read.getBaseQualities()[offset];
-    }
-
-    /**
-     * Get the Base Insertion quality at this pileup position
-     * @return a phred-scaled quality score as a byte
-     */
-    public byte getBaseInsertionQual() {
-        return isDeletion() ? DELETION_QUAL : read.getBaseInsertionQualities()[offset];
-    }
-
-    /**
-     * Get the Base Deletion quality at this pileup position
-     * @return a phred-scaled quality score as a byte
-     */
-    public byte getBaseDeletionQual() {
-        return isDeletion() ? DELETION_QUAL : read.getBaseDeletionQualities()[offset];
-    }
-
-    /**
-     * Get the length of an immediately following insertion or deletion event, or 0 if no such event exists
-     *
-     * Only returns a positive value when this pileup element is immediately before an indel.  Being
-     * immediately before a deletion means that this pileup element isn't an deletion, and that the
-     * next genomic alignment for this read is a deletion.  For the insertion case, this means
-     * that an insertion cigar occurs immediately after this element, between this one and the
-     * next genomic position.
-     *
-     * Note this function may be expensive, so multiple uses should be cached by the caller
-     *
-     * @return length of the event (number of inserted or deleted bases), or 0
-     */
-    @Ensures("result >= 0")
-    public int getLengthOfImmediatelyFollowingIndel() {
-        final CigarElement element = getNextIndelCigarElement();
-        return element == null ? 0 : element.getLength();
-    }
-
-    /**
-     * Helpful function to get the immediately following cigar element, for an insertion or deletion
-     *
-     * if this state precedes a deletion (i.e., next position on genome) or insertion (immediately between
-     * this and the next position) returns the CigarElement corresponding to this event.  Otherwise returns
-     * null.
-     *
-     * @return a CigarElement, or null if the next alignment state ins't an insertion or deletion.
-     */
-    private CigarElement getNextIndelCigarElement() {
-        if ( isBeforeDeletionStart() ) {
-            final CigarElement element = getNextOnGenomeCigarElement();
-            if ( element == null || element.getOperator() != CigarOperator.D )
-                throw new IllegalStateException("Immediately before deletion but the next cigar element isn't a deletion " + element);
-            return element;
-        } else if ( isBeforeInsertion() ) {
-            final CigarElement element = getBetweenNextPosition().get(0);
-            if ( element.getOperator() != CigarOperator.I )
-                throw new IllegalStateException("Immediately before insertion but the next cigar element isn't an insertion " + element);
-            return element;
-        } else {
-            return null;
-        }
-    }
-
-    /**
-     * Get the bases for an insertion that immediately follows this alignment state, or null if none exists
-     *
-     * @see #getLengthOfImmediatelyFollowingIndel() for details on the meaning of immediately.
-     *
-     * If the immediately following state isn't an insertion, returns null
-     *
-     * @return actual sequence of inserted bases, or a null if the event is a deletion or if there is no event in the associated read.
-     */
-    @Ensures("result == null || result.length() == getLengthOfImmediatelyFollowingIndel()")
-    public String getBasesOfImmediatelyFollowingInsertion() {
-        final CigarElement element = getNextIndelCigarElement();
-        if ( element != null && element.getOperator() == CigarOperator.I ) {
-            final int getFrom = offset + 1;
-            final byte[] bases = Arrays.copyOfRange(read.getReadBases(), getFrom, getFrom + element.getLength());
-            return new String(bases);
-        } else
-            return null;
-    }
-
-    /**
-     * Get the mapping quality of the read of this element
-     * @return the mapping quality of the underlying SAM record
-     */
-    public int getMappingQual() {
-        return read.getMappingQuality();
-    }
-
-    @Ensures("result != null")
-    public String toString() {
-        return String.format("%s @ %d = %c Q%d", getRead().getReadName(), getOffset(), (char) getBase(), getQual());
-    }
-
-    @Override
-    public int compareTo(final PileupElement pileupElement) {
-        if (offset < pileupElement.offset)
-            return -1;
-        else if (offset > pileupElement.offset)
-            return 1;
-        else if (read.getAlignmentStart() < pileupElement.read.getAlignmentStart())
-            return -1;
-        else if (read.getAlignmentStart() > pileupElement.read.getAlignmentStart())
-            return 1;
-        else
-            return 0;
-    }
-
-    // --------------------------------------------------------------------------
-    //
-    // Reduced read accessors
-    //
-    // --------------------------------------------------------------------------
-
-    /**
-     * Get the cigar element aligning this element to the genome
-     * @return a non-null CigarElement
-     */
-    @Ensures("result != null")
-    public CigarElement getCurrentCigarElement() {
-        return currentCigarElement;
-    }
-
-    /**
-     * Get the offset of this cigar element in the Cigar of the current read (0-based)
-     *
-     * Suppose the cigar is 1M2D3I4D.  If we are in the 1M state this function returns
-     * 0.  If we are in 2D, the result is 1.  If we are in the 4D, the result is 3.
-     *
-     * @return an offset into the read.getCigar() that brings us to the current cigar element
-     */
-    public int getCurrentCigarOffset() {
-        return currentCigarOffset;
-    }
-
-    /**
-     * Get the offset into the *current* cigar element for this alignment position
-     *
-     * We can be anywhere from offset 0 (first position) to length - 1 of the current
-     * cigar element aligning us to this genomic position.
-     *
-     * @return a valid offset into the current cigar element
-     */
-    @Ensures({"result >= 0", "result < getCurrentCigarElement().getLength()"})
-    public int getOffsetInCurrentCigar() {
-        return offsetInCurrentCigar;
-    }
-
-    /**
-     * Get the cigar elements that occur before the current position but after the previous position on the genome
-     *
-     * For example, if we are in the 3M state of 1M2I3M state then 2I occurs before this position.
-     *
-     * Note that this function does not care where we are in the current cigar element.  In the previous
-     * example this list of elements contains the 2I state regardless of where you are in the 3M.
-     *
-     * Note this returns the list of all elements that occur between this and the prev site, so for
-     * example we might have 5S10I2M and this function would return [5S, 10I].
-     *
-     * @return a non-null list of CigarElements
-     */
-    @Ensures("result != null")
-    public LinkedList<CigarElement> getBetweenPrevPosition() {
-        return atStartOfCurrentCigar() ? getBetween(Direction.PREV) : EMPTY_LINKED_LIST;
-    }
-
-    /**
-     * Get the cigar elements that occur after the current position but before the next position on the genome
-     *
-     * @see #getBetweenPrevPosition() for more details
-     *
-     * @return a non-null list of CigarElements
-     */
-    @Ensures("result != null")
-    public LinkedList<CigarElement> getBetweenNextPosition() {
-        return atEndOfCurrentCigar() ? getBetween(Direction.NEXT) : EMPTY_LINKED_LIST;
-    }
-
-    /** for some helper functions */
-    private enum Direction { PREV, NEXT }
-
-    /**
-     * Helper function to get cigar elements between this and either the prev or next genomic position
-     *
-     * @param direction PREVIOUS if we want before, NEXT if we want after
-     * @return a non-null list of cigar elements between this and the neighboring position in direction
-     */
-    @Ensures("result != null")
-    private LinkedList<CigarElement> getBetween(final Direction direction) {
-        final int increment = direction == Direction.NEXT ? 1 : -1;
-        LinkedList<CigarElement> elements = null;
-        final int nCigarElements = read.getCigarLength();
-        for ( int i = currentCigarOffset + increment; i >= 0 && i < nCigarElements; i += increment) {
-            final CigarElement elt = read.getCigar().getCigarElement(i);
-            if ( ON_GENOME_OPERATORS.contains(elt.getOperator()) )
-                break;
-            else {
-                // optimization: don't allocate list if not necessary
-                if ( elements == null )
-                    elements = new LinkedList<CigarElement>();
-
-                if ( increment > 0 )
-                    // to keep the list in the right order, if we are incrementing positively add to the end
-                    elements.add(elt);
-                else
-                    // counting down => add to front
-                    elements.addFirst(elt);
-            }
-        }
-
-        // optimization: elements is null because nothing got added, just return the empty list
-        return elements == null ? EMPTY_LINKED_LIST : elements;
-    }
-
-    /**
-     * Get the cigar element of the previous genomic aligned position
-     *
-     * For example, we might have 1M2I3M, and be sitting at the someone in the 3M.  This
-     * function would return 1M, as the 2I isn't on the genome.  Note this function skips
-     * all of the positions that would occur in the current element.  So the result
-     * is always 1M regardless of whether we're in the first, second, or third position of the 3M
-     * cigar.
-     *
-     * @return a CigarElement, or null (indicating that no previous element exists)
-     */
-    @Ensures("result == null || ON_GENOME_OPERATORS.contains(result.getOperator())")
-    public CigarElement getPreviousOnGenomeCigarElement() {
-        return getNeighboringOnGenomeCigarElement(Direction.PREV);
-    }
-
-    /**
-     * Get the cigar element of the next genomic aligned position
-     *
-     * @see #getPreviousOnGenomeCigarElement() for more details
-     *
-     * @return a CigarElement, or null (indicating that no next element exists)
-     */
-    @Ensures("result == null || ON_GENOME_OPERATORS.contains(result.getOperator())")
-    public CigarElement getNextOnGenomeCigarElement() {
-        return getNeighboringOnGenomeCigarElement(Direction.NEXT);
-    }
-
-    /**
-     * Helper function to get the cigar element of the next or previous genomic position
-     * @param direction the direction to look in
-     * @return a CigarElement, or null if no such element exists
-     */
-    @Ensures("result == null || ON_GENOME_OPERATORS.contains(result.getOperator())")
-    private CigarElement getNeighboringOnGenomeCigarElement(final Direction direction) {
-        final int increment = direction == Direction.NEXT ? 1 : -1;
-        final int nCigarElements = read.getCigarLength();
-
-        for ( int i = currentCigarOffset + increment; i >= 0 && i < nCigarElements; i += increment) {
-            final CigarElement elt = read.getCigar().getCigarElement(i);
-            if ( ON_GENOME_OPERATORS.contains(elt.getOperator()) )
-                return elt;
-        }
-
-        // getting here means that you didn't find anything
-        return null;
-    }
-
-    /**
-     * Does the cigar element (which may be null) have operation toMatch?
-     *
-     * @param maybeCigarElement a CigarElement that might be null
-     * @param toMatch a CigarOperator we want to match against the one in maybeCigarElement
-     * @return true if maybeCigarElement isn't null and has operator toMatch
-     */
-    @Requires("toMatch != null")
-    private boolean hasOperator(final CigarElement maybeCigarElement, final CigarOperator toMatch) {
-        return maybeCigarElement != null && maybeCigarElement.getOperator() == toMatch;
-    }
-
-    /**
-     * Does an insertion occur immediately before the current position on the genome?
-     *
-     * @return true if yes, false if no
-     */
-    public boolean isAfterInsertion() { return isAfter(getBetweenPrevPosition(), CigarOperator.I); }
-
-    /**
-     * Does an insertion occur immediately after the current position on the genome?
-     *
-     * @return true if yes, false if no
-     */
-    public boolean isBeforeInsertion() { return isBefore(getBetweenNextPosition(), CigarOperator.I); }
-
-    /**
-     * Does a soft-clipping event occur immediately before the current position on the genome?
-     *
-     * @return true if yes, false if no
-     */
-    public boolean isAfterSoftClip() { return isAfter(getBetweenPrevPosition(), CigarOperator.S); }
-
-    /**
-     * Does a soft-clipping event occur immediately after the current position on the genome?
-     *
-     * @return true if yes, false if no
-     */
-    public boolean isBeforeSoftClip() { return isBefore(getBetweenNextPosition(), CigarOperator.S); }
-
-    /**
-     * Does a soft-clipping event occur immediately before or after the current position on the genome?
-     *
-     * @return true if yes, false if no
-     */
-    public boolean isNextToSoftClip() { return isAfterSoftClip() || isBeforeSoftClip(); }
-
-    /**
-     * Is the current position at the end of the current cigar?
-     *
-     * For example, if we are in element 3M, this function returns true if we are at offsetInCurrentCigar
-     * of 2, but not 0 or 1.
-     *
-     * @return true if we're at the end of the current cigar
-     */
-    public boolean atEndOfCurrentCigar() {
-        return offsetInCurrentCigar == currentCigarElement.getLength() - 1;
-    }
-
-    /**
-     * Is the current position at the start of the current cigar?
-     *
-     * For example, if we are in element 3M, this function returns true if we are at offsetInCurrentCigar
-     * of 0, but not 1 or 2.
-     *
-     * @return true if we're at the start of the current cigar
-     */
-    public boolean atStartOfCurrentCigar() {
-        return offsetInCurrentCigar == 0;
-    }
-
-    /**
-     * Is op the last element in the list of elements?
-     *
-     * @param elements the elements to examine
-     * @param op the op we want the last element's op to equal
-     * @return true if op == last(elements).op
-     */
-    @Requires({"elements != null", "op != null"})
-    private boolean isAfter(final LinkedList<CigarElement> elements, final CigarOperator op) {
-        return ! elements.isEmpty() && elements.peekLast().getOperator() == op;
-    }
-
-    /**
-     * Is op the first element in the list of elements?
-     *
-     * @param elements the elements to examine
-     * @param op the op we want the last element's op to equal
-     * @return true if op == first(elements).op
-     */
-    @Requires({"elements != null", "op != null"})
-    private boolean isBefore(final List<CigarElement> elements, final CigarOperator op) {
-        return ! elements.isEmpty() && elements.get(0).getOperator() == op;
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pileup/PileupElementFilter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pileup/PileupElementFilter.java
deleted file mode 100644
index 7f82709..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pileup/PileupElementFilter.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.pileup;
-
-/**
- * A filtering interface for pileup elements.
- *
- * @author mhanna
- * @version 0.1
- */
-public interface PileupElementFilter {
-    public boolean allow(final PileupElement pileupElement);
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pileup/PileupElementTracker.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pileup/PileupElementTracker.java
deleted file mode 100644
index 7d49fcc..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pileup/PileupElementTracker.java
+++ /dev/null
@@ -1,154 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.pileup;
-
-import org.apache.commons.collections.iterators.IteratorChain;
-
-import java.util.*;
-
-/**
- * Javadoc goes here.
- *
- * @author mhanna
- * @version 0.1
- */
-abstract class PileupElementTracker<PE extends PileupElement> implements Iterable<PE> {
-    public abstract int size();
-
-    /**
-     * Iterate through the PEs here, but in any order, which may improve performance
-     * if you don't care about the underlying order the reads are coming to you in.
-     * @return an iteratable over all pileup elements in this tracker
-     */
-    public abstract Iterable<PE> unorderedIterable();
-
-    /**
-     * Same as @see #unorderedIterable but the actual iterator itself
-     * @return
-     */
-    public Iterator<PE> unorderedIterator() { return unorderedIterable().iterator(); }
-
-    public abstract PileupElementTracker<PE> copy();
-}
-
-class UnifiedPileupElementTracker<PE extends PileupElement> extends PileupElementTracker<PE> {
-    private final List<PE> pileup;
-
-    @Override
-    public UnifiedPileupElementTracker<PE> copy() {
-        UnifiedPileupElementTracker<PE> result = new UnifiedPileupElementTracker<PE>();
-        for(PE element : pileup)
-            result.add(element);
-        return result;
-    }
-
-    public UnifiedPileupElementTracker() { pileup = new LinkedList<PE>(); }
-    public UnifiedPileupElementTracker(List<PE> pileup) { this.pileup = pileup; }
-
-    public void add(PE element) {
-        pileup.add(element);
-    }
-
-    public PE get(int index) {
-        return pileup.get(index);
-    }
-
-    public int size() {
-        return pileup.size();
-    }
-
-    public Iterator<PE> iterator() { return pileup.iterator(); }
-    public Iterable<PE> unorderedIterable() { return this; }
-}
-
-class PerSamplePileupElementTracker<PE extends PileupElement> extends PileupElementTracker<PE> {
-    private final Map<String,PileupElementTracker<PE>> pileup;
-    private int size = 0;
-
-    public PerSamplePileupElementTracker() {
-        pileup = new HashMap<String,PileupElementTracker<PE>>();
-    }
-
-    public PerSamplePileupElementTracker<PE> copy() {
-        PerSamplePileupElementTracker<PE> result = new PerSamplePileupElementTracker<PE>();
-        for (Map.Entry<String, PileupElementTracker<PE>> entry : pileup.entrySet())
-            result.addElements(entry.getKey(), entry.getValue());
-
-        return result;
-    }
-
-    /**
-     * Gets a list of all the samples stored in this pileup.
-     * @return List of samples in this pileup.
-     */
-    public Collection<String> getSamples() {
-        return pileup.keySet();
-    }
-
-    public PileupElementTracker<PE> getElements(final String sample) {
-        return pileup.get(sample);
-    }
-
-    public PileupElementTracker<PE> getElements(final Collection<String> selectSampleNames) {
-        PerSamplePileupElementTracker<PE> result = new PerSamplePileupElementTracker<PE>();
-        for (final String sample :  selectSampleNames) {
-            result.addElements(sample, pileup.get(sample));
-        }
-        return result;
-    }
-
-    public void addElements(final String sample, PileupElementTracker<PE> elements) {
-        pileup.put(sample,elements);
-        size += elements.size();
-    }
-
-    public Iterator<PE> iterator() { return new MergingPileupElementIterator<PE>(this); }
-
-    public int size() {
-        return size;
-    }
-
-
-    public Iterable<PE> unorderedIterable() {
-        return new Iterable<PE>() {
-            @Override
-            public Iterator<PE> iterator() {
-                return new Iterator<PE>() {
-                    final private IteratorChain chain = new IteratorChain();
-
-                    { // initialize the chain with the unordered iterators of the per sample pileups
-                        for ( PileupElementTracker<PE> pet : pileup.values() ) {
-                            chain.addIterator(pet.unorderedIterator());
-                        }
-                    }
-                    @Override public boolean hasNext() { return chain.hasNext(); }
-                    @Override public PE next() { return (PE)chain.next(); }
-                    @Override public void remove() { throw new UnsupportedOperationException("Cannot remove"); }
-                };
-            }
-        };
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pileup/ReadBackedPileup.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pileup/ReadBackedPileup.java
deleted file mode 100644
index e4394f1..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pileup/ReadBackedPileup.java
+++ /dev/null
@@ -1,295 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.pileup;
-
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.HasGenomeLocation;
-import org.broadinstitute.gatk.utils.fragments.FragmentCollection;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-
-import java.util.Collection;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-
-/**
- * A data retrieval interface for accessing parts of the pileup.
- *
- * @author mhanna
- * @version 0.1
- */
-public interface ReadBackedPileup extends Iterable<PileupElement>, HasGenomeLocation {
-    /**
-     * Returns a new ReadBackedPileup that is free of deletion spanning reads in this pileup.  Note that this
-     * does not copy the data, so both ReadBackedPileups should not be changed.  Doesn't make an unnecessary copy
-     * of the pileup (just returns this) if there are no deletions in the pileup.
-     *
-     * @return
-     */
-    public ReadBackedPileup getPileupWithoutDeletions();
-
-    /**
-     * Returns a new ReadBackedPileup where only one read from an overlapping read
-     * pair is retained.  If the two reads in question disagree to their basecall,
-     * neither read is retained.  If they agree on the base, the read with the higher
-     * quality observation is retained
-     *
-     * @return the newly filtered pileup
-     */
-    public ReadBackedPileup getOverlappingFragmentFilteredPileup();
-
-    /**
-     * Returns a new ReadBackedPileup where only one read from an overlapping read
-     * pair is retained.  If discardDiscordant and the two reads in question disagree to their basecall,
-     * neither read is retained.  Otherwise, the read with the higher
-     * quality (base or mapping, depending on baseQualNotMapQual) observation is retained
-     *
-     * @return the newly filtered pileup
-     */
-    public ReadBackedPileup getOverlappingFragmentFilteredPileup(boolean discardDiscordant, boolean baseQualNotMapQual);
-
-    /**
-     * Returns a new ReadBackedPileup that is free of mapping quality zero reads in this pileup.  Note that this
-     * does not copy the data, so both ReadBackedPileups should not be changed.  Doesn't make an unnecessary copy
-     * of the pileup (just returns this) if there are no MQ0 reads in the pileup.
-     *
-     * @return
-     */
-    public ReadBackedPileup getPileupWithoutMappingQualityZeroReads();
-
-    /**
-     * Gets the pileup consisting of only reads on the positive strand.
-     * @return A read-backed pileup consisting only of reads on the positive strand.
-     */
-    public ReadBackedPileup getPositiveStrandPileup();
-
-    /**
-     * Gets the pileup consisting of only reads on the negative strand.
-     * @return A read-backed pileup consisting only of reads on the negative strand.
-     */
-    public ReadBackedPileup getNegativeStrandPileup();
-
-    /**
-     * Gets a pileup consisting of all those elements passed by a given filter.
-     * @param filter Filter to use when testing for elements.
-     * @return a pileup without the given filtered elements.
-     */
-    public ReadBackedPileup getFilteredPileup(PileupElementFilter filter);
-
-    /** Returns subset of this pileup that contains only bases with quality >= minBaseQ, coming from
-     * reads with mapping qualities >= minMapQ. This method allocates and returns a new instance of ReadBackedPileup.
-     * @param minBaseQ
-     * @param minMapQ
-     * @return
-     */
-    public ReadBackedPileup getBaseAndMappingFilteredPileup( int minBaseQ, int minMapQ );
-
-    /** Returns subset of this pileup that contains only bases with quality >= minBaseQ.
-     * This method allocates and returns a new instance of ReadBackedPileup.
-     * @param minBaseQ
-     * @return
-     */
-    public ReadBackedPileup getBaseFilteredPileup( int minBaseQ );
-
-    /** Returns subset of this pileup that contains only bases coming from reads with mapping quality >= minMapQ.
-     * This method allocates and returns a new instance of ReadBackedPileup.
-     * @param minMapQ
-     * @return
-     */
-    public ReadBackedPileup getMappingFilteredPileup( int minMapQ );
-
-    /**
-     * Returns a pileup randomly downsampled to the desiredCoverage.
-     *
-     * @param desiredCoverage
-     * @return
-     */
-    public ReadBackedPileup getDownsampledPileup(int desiredCoverage);
-
-    /**
-     * Gets a collection of all the read groups represented in this pileup.
-     * @return A collection of all the read group ids represented in this pileup.
-     */
-    public Collection<String> getReadGroups();
-
-    /**
-     * Gets all the reads associated with a given read group.
-     * @param readGroupId Identifier for the read group.
-     * @return A pileup containing only the reads in the given read group.
-     */
-    public ReadBackedPileup getPileupForReadGroup(String readGroupId);
-
-    /**
-     * Gets all the reads associated with a given read groups.
-     * @param rgSet Set of identifiers for the read group.
-     * @return A pileup containing only the reads in the given read groups.
-     */
-    public ReadBackedPileup getPileupForReadGroups(final HashSet<String> rgSet);
-    
-    /**
-     * Gets all reads in a given lane id. (Lane ID is the read group
-     * id stripped of the last .XX sample identifier added by the GATK).
-     * @param laneID The read group ID without the sample identifier added by the GATK.
-     * @return A pileup containing the reads from all samples in the given lane.
-     */
-    public ReadBackedPileup getPileupForLane(String laneID);
-
-    /**
-     * Gets a collection of *names* of all the samples stored in this pileup.
-     * @return Collection of names
-     */
-    public Collection<String> getSamples();
-
-
-    /**
-     * Gets the particular subset of this pileup for all the given sample names.
-     * @param sampleNames Name of the sample to use.
-     * @return A subset of this pileup containing only reads with the given sample.
-     */
-    public ReadBackedPileup getPileupForSamples(Collection<String> sampleNames);
-
-    /**
-     * Gets the particular subset of this pileup for each given sample name.
-     *
-     * Same as calling getPileupForSample for all samples, but in O(n) instead of O(n^2).
-     *
-     * @param sampleNames Name of the sample to use.
-     * @return A subset of this pileup containing only reads with the given sample.
-     */
-    public Map<String, ReadBackedPileup> getPileupsForSamples(Collection<String> sampleNames);
-
-
-    /**
-     * Gets the particular subset of this pileup with the given sample name.
-     * @param sampleName Name of the sample to use.
-     * @return A subset of this pileup containing only reads with the given sample.
-     */
-    public ReadBackedPileup getPileupForSample(String sampleName);
-    
-    /**
-     * Simple useful routine to count the number of deletion bases in this pileup
-     *
-     * @return
-     */
-    public int getNumberOfDeletions();
-
-    /**
-     * Simple useful routine to count the number of deletion bases in at the next position this pileup
-     *
-     * @return
-     */
-    public int getNumberOfDeletionsAfterThisElement();
-
-    /**
-     * Simple useful routine to count the number of insertions right after this pileup
-     *
-     * @return
-     */
-    public int getNumberOfInsertionsAfterThisElement();
-
-    public int getNumberOfMappingQualityZeroReads();
-
-    /**
-     * @return the number of physical elements in this pileup (a reduced read is counted just once)
-     */
-    public int getNumberOfElements();
-
-    /**
-     * @return the number of abstract elements in this pileup (reduced reads are expanded to count all reads that they represent)
-     */
-    public int depthOfCoverage();
-
-    /**
-     * @return true if there are 0 elements in the pileup, false otherwise
-     */
-    public boolean isEmpty();
-
-    /**
-     * @return the location of this pileup
-     */
-    public GenomeLoc getLocation();
-
-    /**
-     * Get counts of A, C, G, T in order, which returns a int[4] vector with counts according
-     * to BaseUtils.simpleBaseToBaseIndex for each base.
-     *
-     * @return
-     */
-    public int[] getBaseCounts();
-
-    public String getPileupString(Character ref);
-
-    /**
-     * Returns a list of the reads in this pileup. Note this call costs O(n) and allocates fresh lists each time
-     * @return
-     */
-    public List<GATKSAMRecord> getReads();
-
-    /**
-     * Returns a list of the offsets in this pileup. Note this call costs O(n) and allocates fresh lists each time
-     * @return
-     */
-    public List<Integer> getOffsets();
-
-    /**
-     * Returns an array of the bases in this pileup. Note this call costs O(n) and allocates fresh array each time
-     * @return
-     */
-    public byte[] getBases();
-
-    /**
-    * Returns an array of the quals in this pileup. Note this call costs O(n) and allocates fresh array each time
-    * @return
-    */
-    public byte[] getQuals();
-
-    /**
-     * Get an array of the mapping qualities
-     * @return
-     */
-    public int[] getMappingQuals();
-
-    /**
-     * Returns a new ReadBackedPileup that is sorted by start coordinate of the reads.
-     *
-     * @return
-     */
-    public ReadBackedPileup getStartSortedPileup();
-
-    /**
-     * Converts this pileup into a FragmentCollection (see FragmentUtils for documentation)
-     * @return
-     */
-    public FragmentCollection<PileupElement> toFragments();
-
-    /**
-     * Creates a full copy (not shallow) of the ReadBacked Pileup
-     *
-     * @return
-     */
-    public ReadBackedPileup copy();
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pileup/ReadBackedPileupImpl.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pileup/ReadBackedPileupImpl.java
deleted file mode 100644
index 840fbeb..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pileup/ReadBackedPileupImpl.java
+++ /dev/null
@@ -1,1040 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.pileup;
-
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.fragments.FragmentCollection;
-import org.broadinstitute.gatk.utils.fragments.FragmentUtils;
-import org.broadinstitute.gatk.utils.locusiterator.LocusIteratorByState;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.broadinstitute.gatk.utils.BaseUtils;
-
-import java.util.*;
-
-public class ReadBackedPileupImpl implements ReadBackedPileup {
-    protected final GenomeLoc loc;
-    protected final PileupElementTracker<PileupElement> pileupElementTracker;
-
-    private final static int UNINITIALIZED_CACHED_INT_VALUE = -1;
-
-    /**
-     * Different then number of elements due to reduced reads
-     */
-    private int depthOfCoverage = UNINITIALIZED_CACHED_INT_VALUE;
-    private int nDeletions = UNINITIALIZED_CACHED_INT_VALUE;            // cached value of the number of deletions
-    private int nMQ0Reads = UNINITIALIZED_CACHED_INT_VALUE;             // cached value of the number of MQ0 reads
-
-    /**
-     * Create a new version of a read backed pileup at loc, using the reads and their corresponding
-     * offsets.  This pileup will contain a list, in order of the reads, of the piled bases at
-     * reads[i] for all i in offsets.  Does not make a copy of the data, so it's not safe to
-     * go changing the reads.
-     *
-     * @param loc     The genome loc to associate reads wotj
-     * @param reads
-     * @param offsets
-     */
-    public ReadBackedPileupImpl(GenomeLoc loc, List<GATKSAMRecord> reads, List<Integer> offsets) {
-        this.loc = loc;
-        this.pileupElementTracker = readsOffsets2Pileup(reads, offsets);
-    }
-
-
-    /**
-     * Create a new version of a read backed pileup at loc without any aligned reads
-     */
-    public ReadBackedPileupImpl(GenomeLoc loc) {
-        this(loc, new UnifiedPileupElementTracker<PileupElement>());
-    }
-
-    /**
-     * Create a new version of a read backed pileup at loc, using the reads and their corresponding
-     * offsets.  This lower level constructure assumes pileup is well-formed and merely keeps a
-     * pointer to pileup.  Don't go changing the data in pileup.
-     */
-    public ReadBackedPileupImpl(GenomeLoc loc, List<PileupElement> pileup) {
-        if (loc == null) throw new ReviewedGATKException("Illegal null genomeloc in ReadBackedPileup");
-        if (pileup == null) throw new ReviewedGATKException("Illegal null pileup in ReadBackedPileup");
-
-        this.loc = loc;
-        this.pileupElementTracker = new UnifiedPileupElementTracker<PileupElement>(pileup);
-    }
-
-    /**
-     * Optimization of above constructor where all of the cached data is provided
-     *
-     * @param loc
-     * @param pileup
-     */
-    @Deprecated
-    public ReadBackedPileupImpl(GenomeLoc loc, List<PileupElement> pileup, int size, int nDeletions, int nMQ0Reads) {
-        this(loc, pileup);
-    }
-
-    protected ReadBackedPileupImpl(GenomeLoc loc, PileupElementTracker<PileupElement> tracker) {
-        this.loc = loc;
-        this.pileupElementTracker = tracker;
-    }
-
-    public ReadBackedPileupImpl(GenomeLoc loc, Map<String, ReadBackedPileupImpl> pileupsBySample) {
-        this.loc = loc;
-        PerSamplePileupElementTracker<PileupElement> tracker = new PerSamplePileupElementTracker<PileupElement>();
-        for (Map.Entry<String, ReadBackedPileupImpl> pileupEntry : pileupsBySample.entrySet()) {
-            tracker.addElements(pileupEntry.getKey(), pileupEntry.getValue().pileupElementTracker);
-        }
-        this.pileupElementTracker = tracker;
-    }
-
-    public ReadBackedPileupImpl(GenomeLoc loc, List<GATKSAMRecord> reads, int offset) {
-        this.loc = loc;
-        this.pileupElementTracker = readsOffsets2Pileup(reads, offset);
-    }
-
-    /**
-     * Helper routine for converting reads and offset lists to a PileupElement list.
-     *
-     * @param reads
-     * @param offsets
-     * @return
-     */
-    private PileupElementTracker<PileupElement> readsOffsets2Pileup(List<GATKSAMRecord> reads, List<Integer> offsets) {
-        if (reads == null) throw new ReviewedGATKException("Illegal null read list in UnifiedReadBackedPileup");
-        if (offsets == null) throw new ReviewedGATKException("Illegal null offsets list in UnifiedReadBackedPileup");
-        if (reads.size() != offsets.size())
-            throw new ReviewedGATKException("Reads and offset lists have different sizes!");
-
-        UnifiedPileupElementTracker<PileupElement> pileup = new UnifiedPileupElementTracker<PileupElement>();
-        for (int i = 0; i < reads.size(); i++) {
-            GATKSAMRecord read = reads.get(i);
-            int offset = offsets.get(i);
-            pileup.add(createNewPileupElement(read, offset)); // only used to create fake pileups for testing so ancillary information is not important
-        }
-
-        return pileup;
-    }
-
-    /**
-     * Helper routine for converting reads and a single offset to a PileupElement list.
-     *
-     * @param reads
-     * @param offset
-     * @return
-     */
-    private PileupElementTracker<PileupElement> readsOffsets2Pileup(List<GATKSAMRecord> reads, int offset) {
-        if (reads == null) throw new ReviewedGATKException("Illegal null read list in UnifiedReadBackedPileup");
-        if (offset < 0) throw new ReviewedGATKException("Illegal offset < 0 UnifiedReadBackedPileup");
-
-        UnifiedPileupElementTracker<PileupElement> pileup = new UnifiedPileupElementTracker<PileupElement>();
-        for (GATKSAMRecord read : reads) {
-            pileup.add(createNewPileupElement(read, offset)); // only used to create fake pileups for testing so ancillary information is not important
-        }
-
-        return pileup;
-    }
-
-    protected ReadBackedPileupImpl createNewPileup(GenomeLoc loc, PileupElementTracker<PileupElement> tracker) {
-        return new ReadBackedPileupImpl(loc, tracker);
-    }
-
-    protected PileupElement createNewPileupElement(GATKSAMRecord read, int offset) {
-        return LocusIteratorByState.createPileupForReadAndOffset(read, offset);
-    }    
-    
-    // --------------------------------------------------------
-    //
-    // Special 'constructors'
-    //
-    // --------------------------------------------------------
-
-    /**
-     * Returns a new ReadBackedPileup that is free of deletion spanning reads in this pileup.  Note that this
-     * does not copy the data, so both ReadBackedPileups should not be changed.  Doesn't make an unnecessary copy
-     * of the pileup (just returns this) if there are no deletions in the pileup.
-     *
-     * @return
-     */
-    @Override
-    public ReadBackedPileupImpl getPileupWithoutDeletions() {
-        if (getNumberOfDeletions() > 0) {
-            if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
-                PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
-                PerSamplePileupElementTracker<PileupElement> filteredTracker = new PerSamplePileupElementTracker<PileupElement>();
-
-                for (final String sample : tracker.getSamples()) {
-                    PileupElementTracker<PileupElement> perSampleElements = tracker.getElements(sample);
-                    ReadBackedPileupImpl pileup = createNewPileup(loc, perSampleElements).getPileupWithoutDeletions();
-                    filteredTracker.addElements(sample, pileup.pileupElementTracker);
-                }
-                return createNewPileup(loc, filteredTracker);
-
-            } else {
-                UnifiedPileupElementTracker<PileupElement> tracker = (UnifiedPileupElementTracker<PileupElement>) pileupElementTracker;
-                UnifiedPileupElementTracker<PileupElement> filteredTracker = new UnifiedPileupElementTracker<PileupElement>();
-
-                for (PileupElement p : tracker) {
-                    if (!p.isDeletion()) {
-                        filteredTracker.add(p);
-                    }
-                }
-                return createNewPileup(loc, filteredTracker);
-            }
-        } else {
-            return this;
-        }
-    }
-
-    /**
-     * Returns a new ReadBackedPileup where only one read from an overlapping read
-     * pair is retained.  If the two reads in question disagree to their basecall,
-     * neither read is retained.  If they agree on the base, the read with the higher
-     * base quality observation is retained
-     *
-     * @return the newly filtered pileup
-     */
-    @Override
-    public ReadBackedPileup getOverlappingFragmentFilteredPileup() {
-        return getOverlappingFragmentFilteredPileup(true, true);
-    }
-
-    /**
-     * Returns a new ReadBackedPileup where only one read from an overlapping read
-     * pair is retained.  If discardDiscordant and the two reads in question disagree to their basecall,
-     * neither read is retained.  Otherwise, the read with the higher
-     * quality (base or mapping, depending on baseQualNotMapQual) observation is retained
-     *
-     * @return the newly filtered pileup
-     */
-    @Override
-    public ReadBackedPileupImpl getOverlappingFragmentFilteredPileup(boolean discardDiscordant, boolean baseQualNotMapQual) {
-        if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
-            PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
-            PerSamplePileupElementTracker<PileupElement> filteredTracker = new PerSamplePileupElementTracker<PileupElement>();
-
-            for (final String sample : tracker.getSamples()) {
-                PileupElementTracker<PileupElement> perSampleElements = tracker.getElements(sample);
-                ReadBackedPileupImpl pileup = createNewPileup(loc, perSampleElements).getOverlappingFragmentFilteredPileup(discardDiscordant, baseQualNotMapQual);
-                filteredTracker.addElements(sample, pileup.pileupElementTracker);
-            }
-            return createNewPileup(loc, filteredTracker);
-        } else {
-            Map<String, PileupElement> filteredPileup = new HashMap<String, PileupElement>();
-
-            for (PileupElement p : pileupElementTracker) {
-                String readName = p.getRead().getReadName();
-
-                // if we've never seen this read before, life is good
-                if (!filteredPileup.containsKey(readName)) {
-                    filteredPileup.put(readName, p);
-                } else {
-                    PileupElement existing = filteredPileup.get(readName);
-
-                    // if the reads disagree at this position, throw them both out.  Otherwise
-                    // keep the element with the higher quality score
-                    if (discardDiscordant && existing.getBase() != p.getBase()) {
-                        filteredPileup.remove(readName);
-                    } else {
-                        if (baseQualNotMapQual) {
-                            if (existing.getQual() < p.getQual())
-                                filteredPileup.put(readName, p);
-                        }
-                        else {
-                            if (existing.getMappingQual() < p.getMappingQual())
-                                filteredPileup.put(readName, p);
-                        }
-                    }
-                }
-            }
-
-            UnifiedPileupElementTracker<PileupElement> filteredTracker = new UnifiedPileupElementTracker<PileupElement>();
-            for (PileupElement filteredElement : filteredPileup.values())
-                filteredTracker.add(filteredElement);
-
-            return createNewPileup(loc, filteredTracker);
-        }
-    }
-
-
-    /**
-     * Returns a new ReadBackedPileup that is free of mapping quality zero reads in this pileup.  Note that this
-     * does not copy the data, so both ReadBackedPileups should not be changed.  Doesn't make an unnecessary copy
-     * of the pileup (just returns this) if there are no MQ0 reads in the pileup.
-     *
-     * @return
-     */
-    @Override
-    public ReadBackedPileupImpl getPileupWithoutMappingQualityZeroReads() {
-        if (getNumberOfMappingQualityZeroReads() > 0) {
-            if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
-                PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
-                PerSamplePileupElementTracker<PileupElement> filteredTracker = new PerSamplePileupElementTracker<PileupElement>();
-
-                for (final String sample : tracker.getSamples()) {
-                    PileupElementTracker<PileupElement> perSampleElements = tracker.getElements(sample);
-                    ReadBackedPileupImpl pileup = createNewPileup(loc, perSampleElements).getPileupWithoutMappingQualityZeroReads();
-                    filteredTracker.addElements(sample, pileup.pileupElementTracker);
-                }
-                return createNewPileup(loc, filteredTracker);
-
-            } else {
-                UnifiedPileupElementTracker<PileupElement> tracker = (UnifiedPileupElementTracker<PileupElement>) pileupElementTracker;
-                UnifiedPileupElementTracker<PileupElement> filteredTracker = new UnifiedPileupElementTracker<PileupElement>();
-
-                for (PileupElement p : tracker) {
-                    if (p.getRead().getMappingQuality() > 0) {
-                        filteredTracker.add(p);
-                    }
-                }
-                return createNewPileup(loc, filteredTracker);
-            }
-        } else {
-            return this;
-        }
-    }
-
-    public ReadBackedPileupImpl getPositiveStrandPileup() {
-        if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
-            PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
-            PerSamplePileupElementTracker<PileupElement> filteredTracker = new PerSamplePileupElementTracker<PileupElement>();
-
-            for (final String sample : tracker.getSamples()) {
-                PileupElementTracker<PileupElement> perSampleElements = tracker.getElements(sample);
-                ReadBackedPileupImpl pileup = createNewPileup(loc, perSampleElements).getPositiveStrandPileup();
-                filteredTracker.addElements(sample, pileup.pileupElementTracker);
-            }
-            return createNewPileup(loc, filteredTracker);
-        } else {
-            UnifiedPileupElementTracker<PileupElement> tracker = (UnifiedPileupElementTracker<PileupElement>) pileupElementTracker;
-            UnifiedPileupElementTracker<PileupElement> filteredTracker = new UnifiedPileupElementTracker<PileupElement>();
-
-            for (PileupElement p : tracker) {
-                if (!p.getRead().getReadNegativeStrandFlag()) {
-                    filteredTracker.add(p);
-                }
-            }
-            return createNewPileup(loc, filteredTracker);
-        }
-    }
-
-    /**
-     * Gets the pileup consisting of only reads on the negative strand.
-     *
-     * @return A read-backed pileup consisting only of reads on the negative strand.
-     */
-    public ReadBackedPileupImpl getNegativeStrandPileup() {
-        if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
-            PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
-            PerSamplePileupElementTracker<PileupElement> filteredTracker = new PerSamplePileupElementTracker<PileupElement>();
-
-            for (final String sample : tracker.getSamples()) {
-                PileupElementTracker<PileupElement> perSampleElements = tracker.getElements(sample);
-                ReadBackedPileupImpl pileup = createNewPileup(loc, perSampleElements).getNegativeStrandPileup();
-                filteredTracker.addElements(sample, pileup.pileupElementTracker);
-            }
-            return createNewPileup(loc, filteredTracker);
-        } else {
-            UnifiedPileupElementTracker<PileupElement> tracker = (UnifiedPileupElementTracker<PileupElement>) pileupElementTracker;
-            UnifiedPileupElementTracker<PileupElement> filteredTracker = new UnifiedPileupElementTracker<PileupElement>();
-
-            for (PileupElement p : tracker) {
-                if (p.getRead().getReadNegativeStrandFlag()) {
-                    filteredTracker.add(p);
-                }
-            }
-            return createNewPileup(loc, filteredTracker);
-        }
-    }
-
-    /**
-     * Gets a pileup consisting of all those elements passed by a given filter.
-     *
-     * @param filter Filter to use when testing for elements.
-     * @return a pileup without the given filtered elements.
-     */
-    public ReadBackedPileupImpl getFilteredPileup(PileupElementFilter filter) {
-        if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
-            PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
-            PerSamplePileupElementTracker<PileupElement> filteredTracker = new PerSamplePileupElementTracker<PileupElement>();
-
-            for (final String sample : tracker.getSamples()) {
-                PileupElementTracker<PileupElement> perSampleElements = tracker.getElements(sample);
-                ReadBackedPileupImpl pileup = createNewPileup(loc, perSampleElements).getFilteredPileup(filter);
-                filteredTracker.addElements(sample, pileup.pileupElementTracker);
-            }
-
-            return createNewPileup(loc, filteredTracker);
-        } else {
-            UnifiedPileupElementTracker<PileupElement> filteredTracker = new UnifiedPileupElementTracker<PileupElement>();
-
-            for (PileupElement p : pileupElementTracker) {
-                if (filter.allow(p))
-                    filteredTracker.add(p);
-            }
-
-            return createNewPileup(loc, filteredTracker);
-        }
-    }
-
-    /**
-     * Returns subset of this pileup that contains only bases with quality >= minBaseQ, coming from
-     * reads with mapping qualities >= minMapQ. This method allocates and returns a new instance of ReadBackedPileup.
-     *
-     * @param minBaseQ
-     * @param minMapQ
-     * @return
-     */
-    @Override
-    public ReadBackedPileupImpl getBaseAndMappingFilteredPileup(int minBaseQ, int minMapQ) {
-        if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
-            PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
-            PerSamplePileupElementTracker<PileupElement> filteredTracker = new PerSamplePileupElementTracker<PileupElement>();
-
-            for (final String sample : tracker.getSamples()) {
-                PileupElementTracker<PileupElement> perSampleElements = tracker.getElements(sample);
-                ReadBackedPileupImpl pileup = createNewPileup(loc, perSampleElements).getBaseAndMappingFilteredPileup(minBaseQ, minMapQ);
-                filteredTracker.addElements(sample, pileup.pileupElementTracker);
-            }
-
-            return createNewPileup(loc, filteredTracker);
-        } else {
-            UnifiedPileupElementTracker<PileupElement> filteredTracker = new UnifiedPileupElementTracker<PileupElement>();
-
-            for (PileupElement p : pileupElementTracker) {
-                if (p.getRead().getMappingQuality() >= minMapQ && (p.isDeletion() || p.getQual() >= minBaseQ)) {
-                    filteredTracker.add(p);
-                }
-            }
-
-            return createNewPileup(loc, filteredTracker);
-        }
-    }
-
-    /**
-     * Returns subset of this pileup that contains only bases with quality >= minBaseQ.
-     * This method allocates and returns a new instance of ReadBackedPileup.
-     *
-     * @param minBaseQ
-     * @return
-     */
-    @Override
-    public ReadBackedPileup getBaseFilteredPileup(int minBaseQ) {
-        return getBaseAndMappingFilteredPileup(minBaseQ, -1);
-    }
-
-    /**
-     * Returns subset of this pileup that contains only bases coming from reads with mapping quality >= minMapQ.
-     * This method allocates and returns a new instance of ReadBackedPileup.
-     *
-     * @param minMapQ
-     * @return
-     */
-    @Override
-    public ReadBackedPileup getMappingFilteredPileup(int minMapQ) {
-        return getBaseAndMappingFilteredPileup(-1, minMapQ);
-    }
-
-    /**
-     * Gets a list of the read groups represented in this pileup.
-     *
-     * @return
-     */
-    @Override
-    public Collection<String> getReadGroups() {
-        Set<String> readGroups = new HashSet<String>();
-        for (PileupElement pileupElement : this)
-            readGroups.add(pileupElement.getRead().getReadGroup().getReadGroupId());
-        return readGroups;
-    }
-
-    /**
-     * Gets the pileup for a given read group.  Horrendously inefficient at this point.
-     *
-     * @param targetReadGroupId Identifier for the read group.
-     * @return A read-backed pileup containing only the reads in the given read group.
-     */
-    @Override
-    public ReadBackedPileupImpl getPileupForReadGroup(String targetReadGroupId) {
-        if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
-            PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
-            PerSamplePileupElementTracker<PileupElement> filteredTracker = new PerSamplePileupElementTracker<PileupElement>();
-
-            for (final String sample : tracker.getSamples()) {
-                PileupElementTracker<PileupElement> perSampleElements = tracker.getElements(sample);
-                ReadBackedPileupImpl pileup = createNewPileup(loc, perSampleElements).getPileupForReadGroup(targetReadGroupId);
-                if (pileup != null)
-                    filteredTracker.addElements(sample, pileup.pileupElementTracker);
-            }
-            return filteredTracker.size() > 0 ? createNewPileup(loc, filteredTracker) : null;
-        } else {
-            UnifiedPileupElementTracker<PileupElement> filteredTracker = new UnifiedPileupElementTracker<PileupElement>();
-            for (PileupElement p : pileupElementTracker) {
-                GATKSAMRecord read = p.getRead();
-                if (targetReadGroupId != null) {
-                    if (read.getReadGroup() != null && targetReadGroupId.equals(read.getReadGroup().getReadGroupId()))
-                        filteredTracker.add(p);
-                } else {
-                    if (read.getReadGroup() == null || read.getReadGroup().getReadGroupId() == null)
-                        filteredTracker.add(p);
-                }
-            }
-            return filteredTracker.size() > 0 ? createNewPileup(loc, filteredTracker) : null;
-        }
-    }
-
-    /**
-     * Gets the pileup for a set of read groups.  Horrendously inefficient at this point.
-     *
-     * @param rgSet List of identifiers for the read groups.
-     * @return A read-backed pileup containing only the reads in the given read groups.
-     */
-    @Override
-    public ReadBackedPileupImpl getPileupForReadGroups(final HashSet<String> rgSet) {
-        if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
-            PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
-            PerSamplePileupElementTracker<PileupElement> filteredTracker = new PerSamplePileupElementTracker<PileupElement>();
-
-            for (final String sample : tracker.getSamples()) {
-                PileupElementTracker<PileupElement> perSampleElements = tracker.getElements(sample);
-                ReadBackedPileupImpl pileup = createNewPileup(loc, perSampleElements).getPileupForReadGroups(rgSet);
-                if (pileup != null)
-                    filteredTracker.addElements(sample, pileup.pileupElementTracker);
-            }
-            return filteredTracker.size() > 0 ? createNewPileup(loc, filteredTracker) : null;
-        } else {
-            UnifiedPileupElementTracker<PileupElement> filteredTracker = new UnifiedPileupElementTracker<PileupElement>();
-            for (PileupElement p : pileupElementTracker) {
-                GATKSAMRecord read = p.getRead();
-                if (rgSet != null && !rgSet.isEmpty()) {
-                    if (read.getReadGroup() != null && rgSet.contains(read.getReadGroup().getReadGroupId()))
-                        filteredTracker.add(p);
-                } else {
-                    if (read.getReadGroup() == null || read.getReadGroup().getReadGroupId() == null)
-                        filteredTracker.add(p);
-                }
-            }
-            return filteredTracker.size() > 0 ? createNewPileup(loc, filteredTracker) : null;
-        }
-    }
-
-    @Override
-    public ReadBackedPileupImpl getPileupForLane(String laneID) {
-        if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
-            PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
-            PerSamplePileupElementTracker<PileupElement> filteredTracker = new PerSamplePileupElementTracker<PileupElement>();
-
-            for (final String sample : tracker.getSamples()) {
-                PileupElementTracker<PileupElement> perSampleElements = tracker.getElements(sample);
-                ReadBackedPileupImpl pileup = createNewPileup(loc, perSampleElements).getPileupForLane(laneID);
-                if (pileup != null)
-                    filteredTracker.addElements(sample, pileup.pileupElementTracker);
-            }
-            return filteredTracker.size() > 0 ? createNewPileup(loc, filteredTracker) : null;
-        } else {
-            UnifiedPileupElementTracker<PileupElement> filteredTracker = new UnifiedPileupElementTracker<PileupElement>();
-            for (PileupElement p : pileupElementTracker) {
-                GATKSAMRecord read = p.getRead();
-                if (laneID != null) {
-                    if (read.getReadGroup() != null &&
-                            (read.getReadGroup().getReadGroupId().startsWith(laneID + ".")) ||   // lane is the same, but sample identifier is different
-                            (read.getReadGroup().getReadGroupId().equals(laneID)))               // in case there is no sample identifier, they have to be exactly the same
-                        filteredTracker.add(p);
-                } else {
-                    if (read.getReadGroup() == null || read.getReadGroup().getReadGroupId() == null)
-                        filteredTracker.add(p);
-                }
-            }
-            return filteredTracker.size() > 0 ? createNewPileup(loc, filteredTracker) : null;
-        }
-    }
-
-    public Collection<String> getSamples() {
-        if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
-            PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
-            return new HashSet<String>(tracker.getSamples());
-        } else {
-            Collection<String> sampleNames = new HashSet<String>();
-            for (PileupElement p : this) {
-                GATKSAMRecord read = p.getRead();
-                String sampleName = read.getReadGroup() != null ? read.getReadGroup().getSample() : null;
-                sampleNames.add(sampleName);
-            }
-            return sampleNames;
-        }
-    }
-
-    /**
-     * Returns a pileup randomly downsampled to the desiredCoverage.
-     *
-     * TODO: delete this once the experimental downsampler stabilizes
-     *
-     * @param desiredCoverage
-     * @return
-     */
-    @Override
-    public ReadBackedPileup getDownsampledPileup(int desiredCoverage) {
-        if (getNumberOfElements() <= desiredCoverage)
-            return this;
-
-        // randomly choose numbers corresponding to positions in the reads list
-        TreeSet<Integer> positions = new TreeSet<Integer>();
-        for (int i = 0; i < desiredCoverage; /* no update */) {
-            if (positions.add(GenomeAnalysisEngine.getRandomGenerator().nextInt(getNumberOfElements())))
-                i++;
-        }
-
-        if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
-            PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
-            PerSamplePileupElementTracker<PileupElement> filteredTracker = new PerSamplePileupElementTracker<PileupElement>();
-
-
-            for (final String sample : tracker.getSamples()) {
-                PileupElementTracker<PileupElement> perSampleElements = tracker.getElements(sample);
-
-                int current = 0;
-                UnifiedPileupElementTracker<PileupElement> filteredPileup = new UnifiedPileupElementTracker<PileupElement>();
-                for (PileupElement p : perSampleElements) {
-                    if (positions.contains(current))
-                        filteredPileup.add(p);
-                    current++;
-
-                }
-                filteredTracker.addElements(sample, filteredPileup);
-            }
-
-            return createNewPileup(loc, filteredTracker);
-        } else {
-            UnifiedPileupElementTracker<PileupElement> tracker = (UnifiedPileupElementTracker<PileupElement>) pileupElementTracker;
-            UnifiedPileupElementTracker<PileupElement> filteredTracker = new UnifiedPileupElementTracker<PileupElement>();
-
-            Iterator positionIter = positions.iterator();
-
-            while (positionIter.hasNext()) {
-                int nextReadToKeep = (Integer) positionIter.next();
-                filteredTracker.add(tracker.get(nextReadToKeep));
-            }
-
-            return createNewPileup(getLocation(), filteredTracker);
-        }
-    }
-
-    @Override
-    public ReadBackedPileup getPileupForSamples(Collection<String> sampleNames) {
-        if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
-            PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
-            PileupElementTracker<PileupElement> filteredElements = tracker.getElements(sampleNames);
-            return filteredElements != null ? createNewPileup(loc, filteredElements) : null;
-        } else {
-            HashSet<String> hashSampleNames = new HashSet<String>(sampleNames);                                         // to speed up the "contains" access in the for loop
-            UnifiedPileupElementTracker<PileupElement> filteredTracker = new UnifiedPileupElementTracker<PileupElement>();
-            for (PileupElement p : pileupElementTracker) {
-                GATKSAMRecord read = p.getRead();
-                if (sampleNames != null) {                                                                              // still checking on sampleNames because hashSampleNames will never be null. And empty means something else.
-                    if (read.getReadGroup() != null && hashSampleNames.contains(read.getReadGroup().getSample()))
-                        filteredTracker.add(p);
-                } else {
-                    if (read.getReadGroup() == null || read.getReadGroup().getSample() == null)
-                        filteredTracker.add(p);
-                }
-            }
-            return filteredTracker.size() > 0 ? createNewPileup(loc, filteredTracker) : null;
-        }
-    }
-
-    @Override
-    public Map<String, ReadBackedPileup> getPileupsForSamples(Collection<String> sampleNames) {
-        Map<String, ReadBackedPileup> result = new HashMap<String, ReadBackedPileup>();
-        if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
-            PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
-            for (String sample : sampleNames) {
-                PileupElementTracker<PileupElement> filteredElements = tracker.getElements(sample);
-                if (filteredElements != null)
-                    result.put(sample, createNewPileup(loc, filteredElements));
-            }
-        } else {
-            Map<String, UnifiedPileupElementTracker<PileupElement>> trackerMap = new HashMap<String, UnifiedPileupElementTracker<PileupElement>>();
-
-            for (String sample : sampleNames) {                                                                         // initialize pileups for each sample
-                UnifiedPileupElementTracker<PileupElement> filteredTracker = new UnifiedPileupElementTracker<PileupElement>();
-                trackerMap.put(sample, filteredTracker);
-            }
-            for (PileupElement p : pileupElementTracker) {                                                                         // go through all pileup elements only once and add them to the respective sample's pileup
-                GATKSAMRecord read = p.getRead();
-                if (read.getReadGroup() != null) {
-                    String sample = read.getReadGroup().getSample();
-                    UnifiedPileupElementTracker<PileupElement> tracker = trackerMap.get(sample);
-                    if (tracker != null)                                                                                // we only add the pileup the requested samples. Completely ignore the rest
-                        tracker.add(p);
-                }
-            }
-            for (Map.Entry<String, UnifiedPileupElementTracker<PileupElement>> entry : trackerMap.entrySet())                      // create the ReadBackedPileup for each sample
-                result.put(entry.getKey(), createNewPileup(loc, entry.getValue()));
-        }
-        return result;
-    }
-
-
-    @Override
-    public ReadBackedPileup getPileupForSample(String sampleName) {
-        if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
-            PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
-            PileupElementTracker<PileupElement> filteredElements = tracker.getElements(sampleName);
-            return filteredElements != null ? createNewPileup(loc, filteredElements) : null;
-        } else {
-            UnifiedPileupElementTracker<PileupElement> filteredTracker = new UnifiedPileupElementTracker<PileupElement>();
-            for (PileupElement p : pileupElementTracker) {
-                GATKSAMRecord read = p.getRead();
-                if (sampleName != null) {
-                    if (read.getReadGroup() != null && sampleName.equals(read.getReadGroup().getSample()))
-                        filteredTracker.add(p);
-                } else {
-                    if (read.getReadGroup() == null || read.getReadGroup().getSample() == null)
-                        filteredTracker.add(p);
-                }
-            }
-            return filteredTracker.size() > 0 ? createNewPileup(loc, filteredTracker) : null;
-        }
-    }
-
-    // --------------------------------------------------------
-    //
-    // iterators
-    //
-    // --------------------------------------------------------
-
-    /**
-     * The best way to access PileupElements where you only care about the bases and quals in the pileup.
-     * <p/>
-     * for (PileupElement p : this) { doSomething(p); }
-     * <p/>
-     * Provides efficient iteration of the data.
-     *
-     * @return
-     */
-    @Override
-    public Iterator<PileupElement> iterator() {
-        return new Iterator<PileupElement>() {
-            private final Iterator<PileupElement> wrappedIterator = pileupElementTracker.iterator();
-
-            public boolean hasNext() {
-                return wrappedIterator.hasNext();
-            }
-
-            public PileupElement next() {
-                return wrappedIterator.next();
-            }
-
-            public void remove() {
-                throw new UnsupportedOperationException("Cannot remove from a pileup element iterator");
-            }
-        };
-    }
-
-    /**
-     * The best way to access PileupElements where you only care not only about bases and quals in the pileup
-     * but also need access to the index of the pileup element in the pile.
-     *
-     * for (ExtendedPileupElement p : this) { doSomething(p); }
-     *
-     * Provides efficient iteration of the data.
-     *
-     * @return
-     */
-
-    /**
-     * Simple useful routine to count the number of deletion bases in this pileup
-     *
-     * @return
-     */
-    @Override
-    public int getNumberOfDeletions() {
-        if ( nDeletions == UNINITIALIZED_CACHED_INT_VALUE ) {
-            nDeletions = 0;
-            for (PileupElement p : pileupElementTracker.unorderedIterable() ) {
-                if (p.isDeletion()) {
-                    nDeletions++;
-                }
-            }
-        }
-        return nDeletions;
-    }
-
-    @Override
-    public int getNumberOfMappingQualityZeroReads() {
-        if ( nMQ0Reads == UNINITIALIZED_CACHED_INT_VALUE ) {
-            nMQ0Reads = 0;
-
-            for (PileupElement p : pileupElementTracker.unorderedIterable()) {
-                if (p.getRead().getMappingQuality() == 0) {
-                    nMQ0Reads++;
-                }
-            }
-        }
-
-        return nMQ0Reads;
-    }
-
-    /**
-     * @return the number of physical elements in this pileup
-     */
-    @Override
-    public int getNumberOfElements() {
-        return pileupElementTracker.size();
-    }
-
-    /**
-     * @return the number of abstract elements in this pileup
-     */
-    @Override
-    public int depthOfCoverage() {
-        if (depthOfCoverage == UNINITIALIZED_CACHED_INT_VALUE) {
-            depthOfCoverage = pileupElementTracker.size();
-        }
-        return depthOfCoverage;
-    }
-
-    /**
-     * @return true if there are 0 elements in the pileup, false otherwise
-     */
-    @Override
-    public boolean isEmpty() {
-        return getNumberOfElements() == 0;
-    }
-
-
-    /**
-     * @return the location of this pileup
-     */
-    @Override
-    public GenomeLoc getLocation() {
-        return loc;
-    }
-
-    /**
-     * Get counts of A, C, G, T in order, which returns a int[4] vector with counts according
-     * to BaseUtils.simpleBaseToBaseIndex for each base.
-     *
-     * @return
-     */
-    @Override
-    public int[] getBaseCounts() {
-        int[] counts = new int[4];
-
-        // TODO -- can be optimized with .unorderedIterable()
-        if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
-            PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
-            for (final String sample : tracker.getSamples()) {
-                int[] countsBySample = createNewPileup(loc, tracker.getElements(sample)).getBaseCounts();
-                for (int i = 0; i < counts.length; i++)
-                    counts[i] += countsBySample[i];
-            }
-        } else {
-            for (PileupElement pile : this) {
-                // skip deletion sites
-                if (!pile.isDeletion()) {
-                    int index = BaseUtils.simpleBaseToBaseIndex((char) pile.getBase());
-                    if (index != -1)
-                        counts[index]++;
-                }
-            }
-        }
-
-        return counts;
-    }
-
-    @Override
-    public String getPileupString(Character ref) {
-        // In the pileup format, each line represents a genomic position, consisting of chromosome name,
-        // coordinate, reference base, read bases, read qualities and alignment mapping qualities.
-        return String.format("%s %s %c %s %s",
-                getLocation().getContig(), getLocation().getStart(),    // chromosome name and coordinate
-                ref,                                                     // reference base
-                new String(getBases()),
-                getQualsString());
-    }
-
-    // --------------------------------------------------------
-    //
-    // Convenience functions that may be slow
-    //
-    // --------------------------------------------------------
-
-    /**
-     * Returns a list of the reads in this pileup. Note this call costs O(n) and allocates fresh lists each time
-     *
-     * @return
-     */
-    @Override
-    public List<GATKSAMRecord> getReads() {
-        List<GATKSAMRecord> reads = new ArrayList<GATKSAMRecord>(getNumberOfElements());
-        for (PileupElement pile : this) {
-            reads.add(pile.getRead());
-        }
-        return reads;
-    }
-
-    @Override
-    public int getNumberOfDeletionsAfterThisElement() {
-        int count = 0;
-        for (PileupElement p : pileupElementTracker.unorderedIterable()) {
-            if (p.isBeforeDeletionStart())
-                count++;
-        }
-        return count;
-    }
-
-    @Override
-    public int getNumberOfInsertionsAfterThisElement() {
-        int count = 0;
-        for (PileupElement p : pileupElementTracker.unorderedIterable()) {
-            if (p.isBeforeInsertion())
-                count++;
-        }
-        return count;
-
-    }
-    /**
-     * Returns a list of the offsets in this pileup. Note this call costs O(n) and allocates fresh lists each time
-     *
-     * @return
-     */
-    @Override
-    public List<Integer> getOffsets() {
-        List<Integer> offsets = new ArrayList<Integer>(getNumberOfElements());
-        for (PileupElement pile : pileupElementTracker.unorderedIterable()) {
-            offsets.add(pile.getOffset());
-        }
-        return offsets;
-    }
-
-    /**
-     * Returns an array of the bases in this pileup. Note this call costs O(n) and allocates fresh array each time
-     *
-     * @return
-     */
-    @Override
-    public byte[] getBases() {
-        byte[] v = new byte[getNumberOfElements()];
-        int pos = 0;
-        for (PileupElement pile : pileupElementTracker) {
-            v[pos++] = pile.getBase();
-        }
-        return v;
-    }
-
-    /**
-     * Returns an array of the quals in this pileup. Note this call costs O(n) and allocates fresh array each time
-     *
-     * @return
-     */
-    @Override
-    public byte[] getQuals() {
-        byte[] v = new byte[getNumberOfElements()];
-        int pos = 0;
-        for (PileupElement pile : pileupElementTracker) {
-            v[pos++] = pile.getQual();
-        }
-        return v;
-    }
-
-    /**
-     * Get an array of the mapping qualities
-     *
-     * @return
-     */
-    @Override
-    public int[] getMappingQuals() {
-        final int[] v = new int[getNumberOfElements()];
-        int pos = 0;
-        for ( final PileupElement pile : pileupElementTracker ) {
-            v[pos++] = pile.getRead().getMappingQuality();
-        }
-        return v;
-    }
-
-    static String quals2String(byte[] quals) {
-        StringBuilder qualStr = new StringBuilder();
-        for (int qual : quals) {
-            qual = Math.min(qual, 63);              // todo: fixme, this isn't a good idea
-            char qualChar = (char) (33 + qual);     // todo: warning, this is illegal for qual > 63
-            qualStr.append(qualChar);
-        }
-
-        return qualStr.toString();
-    }
-
-    private String getQualsString() {
-        return quals2String(getQuals());
-    }
-
-    /**
-     * Returns a new ReadBackedPileup that is sorted by start coordinate of the reads.
-     *
-     * @return
-     */
-    @Override
-    public ReadBackedPileup getStartSortedPileup() {
-
-        final TreeSet<PileupElement> sortedElements = new TreeSet<PileupElement>(new Comparator<PileupElement>() {
-            @Override
-            public int compare(PileupElement element1, PileupElement element2) {
-                final int difference = element1.getRead().getAlignmentStart() - element2.getRead().getAlignmentStart();
-                return difference != 0 ? difference : element1.getRead().getReadName().compareTo(element2.getRead().getReadName());
-            }
-        });
-
-        if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
-            PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
-
-            for (final String sample : tracker.getSamples()) {
-                PileupElementTracker<PileupElement> perSampleElements = tracker.getElements(sample);
-                for (PileupElement pile : perSampleElements)
-                    sortedElements.add(pile);
-            }
-        }
-        else {
-            UnifiedPileupElementTracker<PileupElement> tracker = (UnifiedPileupElementTracker<PileupElement>) pileupElementTracker;
-            for (PileupElement pile : tracker)
-                sortedElements.add(pile);
-        }
-
-        UnifiedPileupElementTracker<PileupElement> sortedTracker = new UnifiedPileupElementTracker<PileupElement>();
-        for (PileupElement pile : sortedElements)
-            sortedTracker.add(pile);
-
-        return createNewPileup(loc, sortedTracker);
-    }
-
-    @Override
-    public FragmentCollection<PileupElement> toFragments() {
-        return FragmentUtils.create(this);
-    }
-
-    @Override
-    public ReadBackedPileup copy() {
-        return new ReadBackedPileupImpl(loc, pileupElementTracker.copy());
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/progressmeter/ProgressMeter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/progressmeter/ProgressMeter.java
deleted file mode 100644
index f77ac04..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/progressmeter/ProgressMeter.java
+++ /dev/null
@@ -1,465 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.progressmeter;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Invariant;
-import com.google.java.contract.Requires;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.utils.*;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.PrintStream;
-import java.util.Arrays;
-import java.util.List;
-
-/**
- * A meter measuring progress on a calculation through a set of genomic regions that can
- * print a few key metrics to a logger and optionally to a file
- *
- * The key information for assessing progress is a set of genome locs describing the total
- * set of regions we will process.  Whenever (at reasonable intervals) the processing unit
- * can called notifyOfProgress and this logger may, depending on the metering delay, print
- * a log message with the following metrics:
- *
- *      -- Number of processed X (X = processing units)
- *      -- Runtime per.1M X
- *      -- Percent of regions to be processed completed
- *      -- The estimated total runtime based on previous performance
- *      -- The estimated time remaining for the entire process
- *
- * The optional file log an expanded set of metrics in tabular format
- * suitable for subsequent analysis in R.
- *
- * This class is -- and MUST BE -- thread-safe for use in the GATK.  Multiple independent
- * threads executing processors will be calling notifyOfProgress() simultaneously and this
- * class does (and MUST) properly sort out the timings of logs without interlacing outputs
- * because of these threads.
- *
- * Consequently, the fundamental model for when to print the logs is time based.  We basically
- * print a meter message every X seconds, minutes, hours, whatever is appropriate based on the
- * estimated remaining runtime.
- *
- * @author depristo
- * @since 2010 maybe, but written in 09/12 for clarity
- */
- at Invariant({
-        "targetSizeInBP >= 0",
-        "progressPrintFrequency > 0"
-})
-public class ProgressMeter {
-    protected static final Logger logger = Logger.getLogger(ProgressMeter.class);
-
-    // --------------------------------------------------------------------------------
-    // static constants controlling overall system behavior
-    // --------------------------------------------------------------------------------
-
-    /**
-     * Min. milliseconds after we start up the meter before we will print our first meter message
-     */
-    private final static long MIN_ELAPSED_TIME_BEFORE_FIRST_PROGRESS = 30 * 1000;
-
-    /**
-     * How often should we print performance logging information, when we are sending this
-     * information to a file?  Not dynamically updated as the logger meter is.
-     */
-    private final static long PERFORMANCE_LOG_PRINT_FREQUENCY = 10 * 1000;
-
-    private final static double TWO_HOURS_IN_SECONDS    =  2.0 * 60.0 * 60.0;
-    private final static double TWELVE_HOURS_IN_SECONDS = 12.0 * 60.0 * 60.0;
-
-    // --------------------------------------------------------------------------------
-    // Variables we updating during running
-    // --------------------------------------------------------------------------------
-
-    /**
-     * When was the last time we printed progress log?  In milleseconds
-     */
-    private long lastProgressPrintTime = -1;
-
-    /**
-     * How frequently should we be printing our meter messages?  Dynamically updated
-     * depending on how long we think the run has left.
-     */
-    private long progressPrintFrequency = 10 * 1000; // default value
-
-    /**
-     * When was the last time we printed to the performance log?  In millseconds
-     */
-    private long lastPerformanceLogPrintTime = -1;
-
-    // --------------------------------------------------------------------------------
-    // final variables fixed at object creation time
-    // --------------------------------------------------------------------------------
-
-    /**
-     * The set of genome locs describing the total region we are processing with
-     * this GATK run.  Used to determine how close we are to completing the run
-     */
-    private final GenomeLocSortedSet regionsBeingProcessed;
-
-    /**
-     * Size, in bp, of the area we are processing, derived from regionsBeingProcessed.
-     * Updated once in the system in initial for performance reasons
-     */
-    private final long targetSizeInBP;
-
-    /**
-     * A string describing the type of units being processes, so we can say things like
-     * "we are running at X processingUnitName per second"
-     */
-    private final String processingUnitName;
-
-    /**
-     * The space allocated to #processingUnitName in the output
-     */
-    private final int processingUnitWidth;
-
-    /**
-     * The format string used for progress lines
-     */
-    private final String progressFormatString;
-
-    /**
-     * A potentially null file where we print a supplementary, R readable performance log
-     * file.
-     */
-    private final PrintStream performanceLog;
-
-    /** We use the SimpleTimer to time our run */
-    private final SimpleTimer timer = new SimpleTimer();
-
-    private GenomeLoc maxGenomeLoc = null;
-    private Position position = new Position(PositionStatus.STARTING);
-    private long nTotalRecordsProcessed = 0;
-
-    /**
-     * The elapsed time in nanosecond, updated by the daemon thread, so that
-     * we don't pay any system call overhead to determine the the elapsed time.
-     */
-    private long elapsedTimeInNanosecondUpdatedByDaemon = 0;
-
-    final ProgressMeterDaemon progressMeterDaemon;
-
-    /**
-     * Create a new ProgressMeter
-     *
-     * Note that progress meter isn't started until the client calls start()
-     *
-     * @param performanceLogFile an optional performance log file where a table of performance logs will be written
-     * @param processingUnitName the name of the unit type being processed, suitable for saying X seconds per processingUnitName
-     * @param processingIntervals the intervals being processed
-     */
-    public ProgressMeter(final File performanceLogFile,
-                         final String processingUnitName,
-                         final GenomeLocSortedSet processingIntervals) {
-        this(performanceLogFile, processingUnitName, processingIntervals, ProgressMeterDaemon.DEFAULT_POLL_FREQUENCY_MILLISECONDS);
-    }
-
-    protected ProgressMeter(final File performanceLogFile,
-                            final String processingUnitName,
-                            final GenomeLocSortedSet processingIntervals,
-                            final long pollingFrequency) {
-        if ( processingUnitName == null ) throw new IllegalArgumentException("processingUnitName cannot be null");
-        if ( processingIntervals == null ) throw new IllegalArgumentException("Target intervals cannot be null");
-
-        this.processingUnitName = processingUnitName;
-        this.regionsBeingProcessed = processingIntervals;
-        this.processingUnitWidth = Math.max(processingUnitName.length(), "processed".length());
-        this.progressFormatString = String.format("%%15s   %%%1$ds   %%7s   %%%1$ds      %%5.1f%%%%   %%7s   %%9s", processingUnitWidth);
-
-        // setup the performance logger output, if requested
-        if ( performanceLogFile != null ) {
-            try {
-                this.performanceLog = new PrintStream(new FileOutputStream(performanceLogFile));
-                final List<String> pLogHeader = Arrays.asList("elapsed.time", "units.processed", "processing.speed",
-                        "bp.processed", "bp.speed", "genome.fraction.complete", "est.total.runtime", "est.time.remaining");
-                performanceLog.println(Utils.join("\t", pLogHeader));
-            } catch (FileNotFoundException e) {
-                throw new UserException.CouldNotCreateOutputFile(performanceLogFile, e);
-            }
-        } else {
-            performanceLog = null;
-        }
-
-        // cached for performance reasons
-        targetSizeInBP = processingIntervals.coveredSize();
-
-        // start up the timer
-        progressMeterDaemon = new ProgressMeterDaemon(this, pollingFrequency);
-    }
-
-    public ProgressMeterDaemon getProgressMeterDaemon() {
-        return progressMeterDaemon;
-    }
-
-    /**
-     * Start up the progress meter, printing initialization message and starting up the
-     * daemon thread for periodic printing.
-     */
-    @Requires("progressMeterDaemon != null")
-    public synchronized void start() {
-        timer.start();
-        lastProgressPrintTime = timer.currentTime();
-        final String formatString = String.format("%%15s | %%%1$ds | %%7s | %%%1$ds | %%9s | %%7s | %%9s", processingUnitWidth);
-
-        logger.info("[INITIALIZATION COMPLETE; STARTING PROCESSING]");
-        logger.info(String.format(formatString, "", "processed", "time", "per 1M", "", "total", "remaining"));
-        logger.info(String.format(formatString, "Location", processingUnitName, "elapsed", processingUnitName,
-                "completed", "runtime", "runtime"));
-
-        progressMeterDaemon.start();
-    }
-
-    /**
-     * @return the current runtime in nanoseconds
-     */
-    @Ensures("result >= 0")
-    public long getRuntimeInNanoseconds() {
-        return timer.getElapsedTimeNano();
-    }
-
-    /**
-     * This function is just like getRuntimeInNanoseconds but it doesn't actually query the
-     * system timer to determine the value, but rather uses a local variable in this meter
-     * that is updated by the daemon thread.  This means that the result is ridiculously imprecise
-     * for a nanosecond value (as it's only updated each pollingFrequency of the daemon) but
-     * it is free for clients to access, which can be critical when one wants to do tests like:
-     *
-     * for some work unit:
-     *   do unit if getRuntimeInNanosecondsUpdatedPeriodically < X
-     *
-     * and have this operation eventually timeout but don't want to pay the system call time to
-     * ensure that the loop exits as soon as the elapsed time exceeds X
-     *
-     * @return the current runtime in nanoseconds
-     */
-    @Ensures("result >= 0")
-    public long getRuntimeInNanosecondsUpdatedPeriodically() {
-        return elapsedTimeInNanosecondUpdatedByDaemon;
-    }
-
-    /**
-     * Update the period runtime variable to the current runtime in nanoseconds.  Should only
-     * be called by the daemon thread
-     */
-    protected void updateElapsedTimeInNanoseconds() {
-        elapsedTimeInNanosecondUpdatedByDaemon = getRuntimeInNanoseconds();
-    }
-
-
-
-    /**
-     * Utility routine that prints out process information (including timing) every N records or
-     * every M seconds, for N and M set in global variables.
-     *
-     * Synchronized to ensure that even with multiple threads calling notifyOfProgress we still
-     * get one clean stream of meter logs.
-     *
-     * Note this thread doesn't actually print progress, unless must print is true, but just registers
-     * the progress itself.  A separate printing daemon periodically polls the meter to print out
-     * progress
-     *
-     * @param loc Current location, can be null if you are at the end of the processing unit.  Must
-     *            have size == 1 (cannot be multiple bases in size).
-     * @param nTotalRecordsProcessed the total number of records we've processed
-     */
-    public synchronized void notifyOfProgress(final GenomeLoc loc, final long nTotalRecordsProcessed) {
-        if ( nTotalRecordsProcessed < 0 ) throw new IllegalArgumentException("nTotalRecordsProcessed must be >= 0");
-        if ( loc.size() != 1 ) throw new IllegalArgumentException("GenomeLoc must have size == 1 but got " + loc);
-
-        // weird comparison to ensure that loc == null (in unmapped reads) is keep before maxGenomeLoc == null (on startup)
-        this.maxGenomeLoc = loc == null ? loc : (maxGenomeLoc == null ? loc : loc.max(maxGenomeLoc));
-        this.nTotalRecordsProcessed = Math.max(this.nTotalRecordsProcessed, nTotalRecordsProcessed);
-
-        // a pretty name for our position
-        this.position = maxGenomeLoc == null ? new Position(PositionStatus.IN_UNMAPPED_READS) : new Position(maxGenomeLoc);
-    }
-
-    /**
-     * Describes the status of this position marker, such as starting up, done, in the unmapped reads,
-     * or somewhere on the genome
-     */
-    private enum PositionStatus {
-        STARTING("Starting"),
-        DONE("done"),
-        IN_UNMAPPED_READS("unmapped reads"),
-        ON_GENOME(null);
-
-        public final String message;
-
-        private PositionStatus(String message) {
-            this.message = message;
-        }
-    }
-
-    /**
-     * A pair of position status and the genome loc, if necessary.  Used to get a
-     * status update message as needed, without the computational cost of formatting
-     * the genome loc string every time a progress notification happens (which is almost
-     * always not printed)
-     */
-    private class Position {
-        final PositionStatus type;
-        final GenomeLoc maybeLoc;
-
-        /**
-         * Create a position object of any type != ON_GENOME
-         * @param type
-         */
-        @Requires({"type != null", "type != PositionStatus.ON_GENOME"})
-        private Position(PositionStatus type) {
-            this.type = type;
-            this.maybeLoc = null;
-        }
-
-        /**
-         * Create a position object of type ON_GENOME at genomeloc loc
-         * @param loc
-         */
-        @Requires("loc != null")
-        private Position(GenomeLoc loc) {
-            this.type = PositionStatus.ON_GENOME;
-            this.maybeLoc = loc;
-        }
-
-        /**
-         * @return a human-readable representation of this position
-         */
-        private String getMessage() {
-            if ( type == PositionStatus.ON_GENOME )
-                return maxGenomeLoc.getContig() + ":" + maxGenomeLoc.getStart();
-            else
-                return type.message;
-        }
-    }
-
-    /**
-     * Actually try to print out progress
-     *
-     * This function may print out if the progress print is due, but if not enough time has elapsed
-     * since the last print we will not print out information.
-     *
-     * @param mustPrint if true, progress will be printed regardless of the last time we printed progress
-     */
-    protected synchronized void printProgress(final boolean mustPrint) {
-        final long curTime = timer.currentTime();
-        final boolean printProgress = mustPrint || maxElapsedIntervalForPrinting(curTime, lastProgressPrintTime, progressPrintFrequency);
-        final boolean printLog = performanceLog != null && maxElapsedIntervalForPrinting(curTime, lastPerformanceLogPrintTime, PERFORMANCE_LOG_PRINT_FREQUENCY);
-
-        if ( printProgress || printLog ) {
-            final ProgressMeterData progressData = takeProgressSnapshot(maxGenomeLoc, nTotalRecordsProcessed);
-
-            final AutoFormattingTime elapsed = new AutoFormattingTime(progressData.getElapsedSeconds(), 5, 1);
-            final AutoFormattingTime bpRate = new AutoFormattingTime(progressData.secondsPerMillionBP());
-            final AutoFormattingTime unitRate = new AutoFormattingTime(progressData.secondsPerMillionElements());
-            final double fractionGenomeTargetCompleted = progressData.calculateFractionGenomeTargetCompleted(targetSizeInBP);
-            final AutoFormattingTime estTotalRuntime = new AutoFormattingTime(elapsed.getTimeInSeconds() / fractionGenomeTargetCompleted, 5, 1);
-            final AutoFormattingTime timeToCompletion = new AutoFormattingTime(estTotalRuntime.getTimeInSeconds() - elapsed.getTimeInSeconds());
-
-            if ( printProgress ) {
-                lastProgressPrintTime = curTime;
-                updateLoggerPrintFrequency(estTotalRuntime.getTimeInSeconds());
-
-                logger.info(String.format(progressFormatString,
-                        position.getMessage(), progressData.getUnitsProcessed()*1.0, elapsed, unitRate,
-                        100*fractionGenomeTargetCompleted, estTotalRuntime, timeToCompletion));
-
-            }
-
-            if ( printLog ) {
-                lastPerformanceLogPrintTime = curTime;
-                performanceLog.printf("%.2f\t%d\t%.2e\t%d\t%.2e\t%.2e\t%.2f\t%.2f%n",
-                        elapsed.getTimeInSeconds(), progressData.getUnitsProcessed(), unitRate.getTimeInSeconds(),
-                        progressData.getBpProcessed(), bpRate.getTimeInSeconds(),
-                        fractionGenomeTargetCompleted, estTotalRuntime.getTimeInSeconds(),
-                        timeToCompletion.getTimeInSeconds());
-            }
-        }
-    }
-
-    /**
-     * Determine, based on remaining runtime, how often to print the meter
-     *
-     * @param totalRuntimeSeconds kinda obvious, no?
-     */
-    private void updateLoggerPrintFrequency(final double totalRuntimeSeconds) {
-        // dynamically change the update rate so that short running jobs receive frequent updates while longer jobs receive fewer updates
-        if ( totalRuntimeSeconds > TWELVE_HOURS_IN_SECONDS )
-            progressPrintFrequency = 60 * 1000; // in milliseconds
-        else if ( totalRuntimeSeconds > TWO_HOURS_IN_SECONDS )
-            progressPrintFrequency = 30 * 1000; // in milliseconds
-        else
-            progressPrintFrequency = 10 * 1000; // in milliseconds
-    }
-
-    /**
-     * Creates a new ProgressData object recording a snapshot of our progress at this instant
-     *
-     * @param loc our current position.  If null, assumes we are done traversing
-     * @param nTotalRecordsProcessed the total number of records we've processed
-     * @return
-     */
-    private ProgressMeterData takeProgressSnapshot(final GenomeLoc loc, final long nTotalRecordsProcessed) {
-        // null -> end of processing
-        final long bpProcessed = loc == null ? targetSizeInBP : regionsBeingProcessed.sizeBeforeLoc(loc);
-        return new ProgressMeterData(timer.getElapsedTime(), nTotalRecordsProcessed, bpProcessed);
-    }
-
-    /**
-     * Should be called when processing is done
-     */
-    public void notifyDone(final long nTotalRecordsProcessed) {
-        // print out the progress meter
-        this.nTotalRecordsProcessed = nTotalRecordsProcessed;
-        this.position = new Position(PositionStatus.DONE);
-        printProgress(true);
-
-        logger.info(String.format("Total runtime %.2f secs, %.2f min, %.2f hours",
-                timer.getElapsedTime(), timer.getElapsedTime() / 60, timer.getElapsedTime() / 3600));
-
-        if ( performanceLog != null )
-            performanceLog.close();
-
-        // shutdown our daemon thread
-        progressMeterDaemon.done();
-    }
-
-    /**
-     * @param curTime (current runtime, in millisecs)
-     * @param lastPrintTime the last time we printed, in machine milliseconds
-     * @param printFreq maximum permitted difference between last print and current times
-     *
-     * @return true if the maximum interval (in millisecs) has passed since the last printing
-     */
-    private boolean maxElapsedIntervalForPrinting(final long curTime, long lastPrintTime, long printFreq) {
-        final long elapsed = curTime - lastPrintTime;
-        return elapsed > printFreq && elapsed > MIN_ELAPSED_TIME_BEFORE_FIRST_PROGRESS;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/progressmeter/ProgressMeterDaemon.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/progressmeter/ProgressMeterDaemon.java
deleted file mode 100644
index f1f48e6..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/progressmeter/ProgressMeterDaemon.java
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.progressmeter;
-
-/**
- * Daemon thread that periodically prints the progress of the progress meter
- *
- * User: depristo
- * Date: 12/4/12
- * Time: 9:16 PM
- */
-public final class ProgressMeterDaemon extends Thread {
-    public final static long DEFAULT_POLL_FREQUENCY_MILLISECONDS = 10 * 1000;
-
-    /**
-     * How frequently should we poll and print progress?
-     */
-    private final long pollFrequencyMilliseconds;
-
-    /**
-     * How long are we waiting between print progress calls are issued?
-     * @return the time in milliseconds between progress meter calls
-     */
-    private long getPollFrequencyMilliseconds() {
-        return pollFrequencyMilliseconds;
-    }
-
-    /**
-     * Are we to continue periodically printing status, or should we shut down?
-     */
-    boolean done = false;
-
-    /**
-     * The meter we will call print on
-     */
-    final ProgressMeter meter;
-
-    /**
-     * Create a new ProgressMeterDaemon printing progress for meter
-     * @param meter the progress meter to print progress of
-     */
-    public ProgressMeterDaemon(final ProgressMeter meter, final long pollFrequencyMilliseconds) {
-        if ( meter == null ) throw new IllegalArgumentException("meter cannot be null");
-        if ( pollFrequencyMilliseconds <= 0 ) throw new IllegalArgumentException("pollFrequencyMilliseconds must be greater than 0 but got " + pollFrequencyMilliseconds);
-
-        this.meter = meter;
-        this.pollFrequencyMilliseconds = pollFrequencyMilliseconds;
-        setDaemon(true);
-        setName("ProgressMeterDaemon");
-    }
-
-    public ProgressMeterDaemon(final ProgressMeter meter) {
-        this(meter, DEFAULT_POLL_FREQUENCY_MILLISECONDS);
-    }
-
-    /**
-     * Tells this daemon thread to shutdown at the next opportunity, as the progress
-     * metering is complete.
-     */
-    public final void done() {
-        this.done = true;
-    }
-
-    /**
-     * Is this daemon thread done?
-     * @return true if done, false otherwise
-     */
-    public boolean isDone() {
-        return done;
-    }
-
-    /**
-     * Start up the ProgressMeterDaemon, polling every tens of seconds to print, if
-     * necessary, the provided progress meter.  Never exits until the JVM is complete,
-     * or done() is called, as the thread is a daemon thread
-     */
-    public void run() {
-        while (! done) {
-            meter.printProgress(false);
-            meter.updateElapsedTimeInNanoseconds();
-            try {
-                Thread.sleep(getPollFrequencyMilliseconds());
-            } catch (InterruptedException e) {
-                throw new RuntimeException(e);
-            }
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/progressmeter/ProgressMeterData.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/progressmeter/ProgressMeterData.java
deleted file mode 100644
index 6804032..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/progressmeter/ProgressMeterData.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.progressmeter;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-
-/**
- * a snapshot of our performance, suitable for storage and later analysis
- */
-class ProgressMeterData {
-    private final double elapsedSeconds;
-    private final long unitsProcessed;
-    private final long bpProcessed;
-
-    @Requires({"unitsProcessed >= 0", "bpProcessed >= 0", "elapsedSeconds >= 0"})
-    public ProgressMeterData(double elapsedSeconds, long unitsProcessed, long bpProcessed) {
-        this.elapsedSeconds = elapsedSeconds;
-        this.unitsProcessed = unitsProcessed;
-        this.bpProcessed = bpProcessed;
-    }
-
-    @Ensures("result >= 0.0")
-    public double getElapsedSeconds() {
-        return elapsedSeconds;
-    }
-
-    @Ensures("result >= 0")
-    public long getUnitsProcessed() {
-        return unitsProcessed;
-    }
-
-    @Ensures("result >= 0")
-    public long getBpProcessed() {
-        return bpProcessed;
-    }
-
-    /** How long in seconds to process 1M traversal units? */
-    @Ensures("result >= 0.0")
-    public double secondsPerMillionElements() {
-        return (elapsedSeconds * 1000000.0) / Math.max(unitsProcessed, 1);
-    }
-
-    /** How long in seconds to process 1M bp on the genome? */
-    @Ensures("result >= 0.0")
-    public double secondsPerMillionBP() {
-        return (elapsedSeconds * 1000000.0) / Math.max(bpProcessed, 1);
-    }
-
-    /** What fraction of the target intervals have we covered? */
-    @Requires("targetSize >= 0")
-    @Ensures({"result >= 0.0", "result <= 1.0"})
-    public double calculateFractionGenomeTargetCompleted(final long targetSize) {
-        return (1.0*bpProcessed) / Math.max(targetSize, 1);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/recalibration/BQSRArgumentSet.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/recalibration/BQSRArgumentSet.java
deleted file mode 100644
index cc41bc5..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/recalibration/BQSRArgumentSet.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.recalibration;
-
-import org.broadinstitute.gatk.engine.arguments.GATKArgumentCollection;
-
-import java.io.File;
-
-public class BQSRArgumentSet {
-    // declare public, STL-style for easier and more efficient access:
-    private File BQSR_RECAL_FILE;
-    private int quantizationLevels;
-    private boolean disableIndelQuals;
-    private boolean emitOriginalQuals;
-    private int PRESERVE_QSCORES_LESS_THAN;
-    private double globalQScorePrior;
-
-    public BQSRArgumentSet(final GATKArgumentCollection args) {
-        this.BQSR_RECAL_FILE = args.BQSR_RECAL_FILE;
-        this.quantizationLevels = args.quantizationLevels;
-        this.disableIndelQuals = args.disableIndelQuals;
-        this.emitOriginalQuals = args.emitOriginalQuals;
-        this.PRESERVE_QSCORES_LESS_THAN = args.PRESERVE_QSCORES_LESS_THAN;
-        this.globalQScorePrior = args.globalQScorePrior;
-    }
-
-    public File getRecalFile() { return BQSR_RECAL_FILE; }
-
-    public int getQuantizationLevels() { return quantizationLevels; }
-
-    public boolean shouldDisableIndelQuals() { return disableIndelQuals; }
-
-    public boolean shouldEmitOriginalQuals() { return emitOriginalQuals; }
-
-    public int getPreserveQscoresLessThan() { return PRESERVE_QSCORES_LESS_THAN; }
-
-    public double getGlobalQScorePrior() { return globalQScorePrior; }
-
-    public void setRecalFile(final File BQSR_RECAL_FILE) {
-        this.BQSR_RECAL_FILE = BQSR_RECAL_FILE;
-    }
-
-    public void setQuantizationLevels(final int quantizationLevels) {
-        this.quantizationLevels = quantizationLevels;
-    }
-
-    public void setDisableIndelQuals(final boolean disableIndelQuals) {
-        this.disableIndelQuals = disableIndelQuals;
-    }
-
-    public void setEmitOriginalQuals(final boolean emitOriginalQuals) {
-        this.emitOriginalQuals = emitOriginalQuals;
-    }
-
-    public void setPreserveQscoresLessThan(final int PRESERVE_QSCORES_LESS_THAN) {
-        this.PRESERVE_QSCORES_LESS_THAN = PRESERVE_QSCORES_LESS_THAN;
-    }
-
-    public void setGlobalQScorePrior(final double globalQScorePrior) {
-        this.globalQScorePrior = globalQScorePrior;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/recalibration/BQSRMode.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/recalibration/BQSRMode.java
deleted file mode 100644
index a742ed4..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/recalibration/BQSRMode.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.recalibration;
-
-import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
-
-import java.lang.annotation.*;
-
-/**
- * User: hanna
- * Date: May 14, 2009
- * Time: 1:51:22 PM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * Allows the walker to indicate what type of data it wants to consume.
- */
-
- at Documented
- at Inherited
- at Retention(RetentionPolicy.RUNTIME)
- at Target(ElementType.TYPE)
-public @interface BQSRMode {
-    public abstract ReadTransformer.ApplicationTime ApplicationTime() default ReadTransformer.ApplicationTime.ON_INPUT;
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/recalibration/EventType.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/recalibration/EventType.java
deleted file mode 100644
index 84ab785..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/recalibration/EventType.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.recalibration;
-
-public enum EventType {
-    BASE_SUBSTITUTION("M", "Base Substitution"),
-    BASE_INSERTION("I", "Base Insertion"),
-    BASE_DELETION("D", "Base Deletion");
-
-    private final String representation;
-    private final String longRepresentation;
-
-    private EventType(String representation, String longRepresentation) {
-        this.representation = representation;
-        this.longRepresentation = longRepresentation;
-    }
-
-    /**
-     * Get the EventType corresponding to its ordinal index
-     * @param index an ordinal index
-     * @return the event type corresponding to ordinal index
-     */
-    public static EventType eventFrom(int index) {
-        return EventType.values()[index];
-    }
-
-    /**
-     * Get the EventType with short string representation
-     * @throws IllegalArgumentException if representation doesn't correspond to one of EventType
-     * @param representation short string representation of the event
-     * @return an EventType
-     */
-    public static EventType eventFrom(String representation) {
-        for (EventType eventType : EventType.values())
-            if (eventType.representation.equals(representation))
-                return eventType;
-
-        throw new IllegalArgumentException(String.format("Event %s does not exist.", representation));
-    }
-
-    @Override
-    public String toString() {
-        return representation;
-    }
-
-    public String prettyPrint() {
-        return longRepresentation;
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/runtime/CapturedStreamOutput.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/runtime/CapturedStreamOutput.java
deleted file mode 100644
index 0166e98..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/runtime/CapturedStreamOutput.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.runtime;
-
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.io.output.NullOutputStream;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.io.HardThresholdingOutputStream;
-
-import java.io.*;
-import java.util.EnumMap;
-
-/**
- * Stream output captured from a stream.
- */
-public class CapturedStreamOutput extends StreamOutput {
-    private final InputStream processStream;
-    private final EnumMap<StreamLocation, OutputStream> outputStreams = new EnumMap<StreamLocation, OutputStream>(StreamLocation.class);
-
-    /**
-     * The byte stream to capture content or null if no output string content was requested.
-     */
-    private final ByteArrayOutputStream bufferStream;
-
-    /**
-     * True if the buffer is truncated.
-     */
-    private boolean bufferTruncated = false;
-
-    /**
-     * @param settings       Settings that define what to capture.
-     * @param processStream  Stream to capture output.
-     * @param standardStream Stream to write debug output.
-     */
-    public CapturedStreamOutput(OutputStreamSettings settings, InputStream processStream, PrintStream standardStream) {
-        this.processStream = processStream;
-        int bufferSize = settings.getBufferSize();
-        this.bufferStream = (bufferSize < 0) ? new ByteArrayOutputStream() : new ByteArrayOutputStream(bufferSize);
-
-        for (StreamLocation location : settings.getStreamLocations()) {
-            OutputStream outputStream;
-            switch (location) {
-                case Buffer:
-                    if (bufferSize < 0) {
-                        outputStream = this.bufferStream;
-                    } else {
-                        outputStream = new HardThresholdingOutputStream(bufferSize) {
-                            @Override
-                            protected OutputStream getStream() throws IOException {
-                                return bufferTruncated ? NullOutputStream.NULL_OUTPUT_STREAM : bufferStream;
-                            }
-
-                            @Override
-                            protected void thresholdReached() throws IOException {
-                                bufferTruncated = true;
-                            }
-                        };
-                    }
-                    break;
-                case File:
-                    try {
-                        outputStream = new FileOutputStream(settings.getOutputFile(), settings.isAppendFile());
-                    } catch (IOException e) {
-                        throw new UserException.BadInput(e.getMessage());
-                    }
-                    break;
-                case Standard:
-                    outputStream = standardStream;
-                    break;
-                default:
-                    throw new ReviewedGATKException("Unexpected stream location: " + location);
-            }
-            this.outputStreams.put(location, outputStream);
-        }
-    }
-
-    @Override
-    public byte[] getBufferBytes() {
-        return bufferStream.toByteArray();
-    }
-
-    @Override
-    public boolean isBufferTruncated() {
-        return bufferTruncated;
-    }
-
-    /**
-     * Drain the input stream to keep the process from backing up until it's empty.
-     * File streams will be closed automatically when this method returns.
-     *
-     * @throws java.io.IOException When unable to read or write.
-     */
-    public void readAndClose() throws IOException {
-        try {
-            byte[] buf = new byte[4096];
-            int readCount;
-            while ((readCount = processStream.read(buf)) >= 0)
-                for (OutputStream outputStream : this.outputStreams.values()) {
-                    outputStream.write(buf, 0, readCount);
-                }
-        } finally {
-            for (StreamLocation location : this.outputStreams.keySet()) {
-                OutputStream outputStream = this.outputStreams.get(location);
-                outputStream.flush();
-                if (location != StreamLocation.Standard)
-                    IOUtils.closeQuietly(outputStream);
-            }
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/runtime/InputStreamSettings.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/runtime/InputStreamSettings.java
deleted file mode 100644
index 56bfabd..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/runtime/InputStreamSettings.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.runtime;
-
-import java.io.File;
-import java.util.Collections;
-import java.util.EnumSet;
-import java.util.Set;
-
-/**
- * Settings that define text to write to the process stdin.
- */
-public class InputStreamSettings {
-    private final EnumSet<StreamLocation> streamLocations = EnumSet.noneOf(StreamLocation.class);
-    private byte[] inputBuffer;
-    private File inputFile;
-
-    public InputStreamSettings() {
-    }
-
-    /**
-     * @param inputBuffer String to write to stdin.
-     */
-    public InputStreamSettings(String inputBuffer) {
-        setInputBuffer(inputBuffer);
-    }
-
-    /**
-     * @param inputFile File to write to stdin.
-     */
-    public InputStreamSettings(File inputFile) {
-        setInputFile(inputFile);
-    }
-
-    /**
-     * @param inputBuffer String to write to stdin.
-     * @param inputFile   File to write to stdin.
-     */
-    public InputStreamSettings(byte[] inputBuffer, File inputFile) {
-        setInputBuffer(inputBuffer);
-        setInputFile(inputFile);
-    }
-
-    public Set<StreamLocation> getStreamLocations() {
-        return Collections.unmodifiableSet(streamLocations);
-    }
-
-    public byte[] getInputBuffer() {
-        return inputBuffer;
-    }
-
-    public void setInputBuffer(String inputBuffer) {
-        if (inputBuffer == null)
-            throw new IllegalArgumentException("inputBuffer cannot be null");
-        this.streamLocations.add(StreamLocation.Buffer);
-        this.inputBuffer = inputBuffer.getBytes();
-    }
-
-    public void setInputBuffer(byte[] inputBuffer) {
-        if (inputBuffer == null)
-            throw new IllegalArgumentException("inputBuffer cannot be null");
-        this.streamLocations.add(StreamLocation.Buffer);
-        this.inputBuffer = inputBuffer;
-    }
-
-    public void clearInputBuffer() {
-        this.streamLocations.remove(StreamLocation.Buffer);
-        this.inputBuffer = null;
-    }
-
-    public File getInputFile() {
-        return inputFile;
-    }
-
-    public void setInputFile(File inputFile) {
-        if (inputFile == null)
-            throw new IllegalArgumentException("inputFile cannot be null");
-        this.streamLocations.add(StreamLocation.File);
-        this.inputFile = inputFile;
-    }
-
-    public void clearInputFile() {
-        this.streamLocations.remove(StreamLocation.File);
-        this.inputFile = null;
-    }
-
-    public void setInputStandard(boolean inputStandard) {
-        if (inputStandard)
-            this.streamLocations.add(StreamLocation.Standard);
-        else
-            this.streamLocations.remove(StreamLocation.Standard);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/runtime/OutputStreamSettings.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/runtime/OutputStreamSettings.java
deleted file mode 100644
index bc92291..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/runtime/OutputStreamSettings.java
+++ /dev/null
@@ -1,127 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.runtime;
-
-import java.io.File;
-import java.util.Collections;
-import java.util.EnumSet;
-import java.util.Set;
-
-/**
- * Settings that define text to capture from a process stream.
- */
-public class OutputStreamSettings {
-    private final EnumSet<StreamLocation> streamLocations = EnumSet.noneOf(StreamLocation.class);
-    private int bufferSize;
-    private File outputFile;
-    private boolean appendFile;
-
-    public OutputStreamSettings() {
-    }
-
-    /**
-     * @param bufferSize The number of bytes to capture, or -1 for unlimited.
-     */
-    public OutputStreamSettings(int bufferSize) {
-        setBufferSize(bufferSize);
-    }
-
-    /**
-     * @param outputFile The file to write output to.
-     */
-    public OutputStreamSettings(File outputFile) {
-        setOutputFile(outputFile);
-    }
-
-    /**
-     * @param outputFile The file to write output to.
-     * @param append     true if the output file should be appended to.
-     */
-    public OutputStreamSettings(File outputFile, boolean append) {
-        setOutputFile(outputFile, append);
-    }
-
-    public OutputStreamSettings(int bufferSize, File outputFile, boolean appendFile) {
-        setBufferSize(bufferSize);
-        setOutputFile(outputFile, appendFile);
-    }
-
-    public Set<StreamLocation> getStreamLocations() {
-        return Collections.unmodifiableSet(streamLocations);
-    }
-
-    public int getBufferSize() {
-        return bufferSize;
-    }
-
-    public void setBufferSize(int bufferSize) {
-        this.streamLocations.add(StreamLocation.Buffer);
-        this.bufferSize = bufferSize;
-    }
-
-    public void clearBufferSize() {
-        this.streamLocations.remove(StreamLocation.Buffer);
-        this.bufferSize = 0;
-    }
-
-    public File getOutputFile() {
-        return outputFile;
-    }
-
-    public boolean isAppendFile() {
-        return appendFile;
-    }
-
-    /**
-     * Overwrites the outputFile with the process output.
-     *
-     * @param outputFile File to overwrite.
-     */
-    public void setOutputFile(File outputFile) {
-        setOutputFile(outputFile, false);
-    }
-
-    public void setOutputFile(File outputFile, boolean append) {
-        if (outputFile == null)
-            throw new IllegalArgumentException("outputFile cannot be null");
-        streamLocations.add(StreamLocation.File);
-        this.outputFile = outputFile;
-        this.appendFile = append;
-    }
-
-    public void clearOutputFile() {
-        streamLocations.remove(StreamLocation.File);
-        this.outputFile = null;
-        this.appendFile = false;
-    }
-
-    public void printStandard(boolean print) {
-        if (print)
-            this.streamLocations.add(StreamLocation.Standard);
-        else
-            this.streamLocations.remove(StreamLocation.Standard);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/runtime/ProcessController.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/runtime/ProcessController.java
deleted file mode 100644
index 3955817..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/runtime/ProcessController.java
+++ /dev/null
@@ -1,387 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.runtime;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.lang.StringUtils;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.util.*;
-
-/**
- * Facade to Runtime.exec() and java.lang.Process.  Handles
- * running a process to completion and returns stdout and stderr
- * as strings.  Creates separate threads for reading stdout and stderr,
- * then reuses those threads for each process most efficient use is
- * to create one of these and use it repeatedly.  Instances are not
- * thread-safe, however.
- *
- * TODO: java.io sometimes zombies the backround threads locking up on read().
- * Supposedly NIO has better ways of interrupting a blocked stream but will
- * require a little bit of refactoring.
- *
- * @author Michael Koehrsen
- * @author Khalid Shakir
- */
-public class ProcessController {
-    private static Logger logger = Logger.getLogger(ProcessController.class);
-
-    private static enum ProcessStream {Stdout, Stderr}
-
-    // Tracks running processes.
-    private static final Set<ProcessController> running = Collections.synchronizedSet(new HashSet<ProcessController>());
-
-    // Tracks this running process.
-    private Process process;
-
-    // Threads that capture stdout and stderr
-    private final OutputCapture stdoutCapture;
-    private final OutputCapture stderrCapture;
-
-    // When a caller destroyes a controller a new thread local version will be created
-    private boolean destroyed = false;
-
-    // Communication channels with output capture threads
-
-    // Holds the stdout and stderr sent to the background capture threads
-    private final Map<ProcessStream, CapturedStreamOutput> toCapture =
-            new EnumMap<ProcessStream, CapturedStreamOutput>(ProcessStream.class);
-
-    // Holds the results of the capture from the background capture threads.
-    // May be the content via toCapture or an StreamOutput.EMPTY if the capture was interrupted.
-    private final Map<ProcessStream, StreamOutput> fromCapture =
-            new EnumMap<ProcessStream, StreamOutput>(ProcessStream.class);
-
-    // Useful for debugging if background threads have shut down correctly
-    private static int nextControllerId = 0;
-    private final int controllerId;
-
-    public ProcessController() {
-        // Start the background threads for this controller.
-        synchronized (running) {
-            controllerId = nextControllerId++;
-        }
-        stdoutCapture = new OutputCapture(ProcessStream.Stdout, controllerId);
-        stderrCapture = new OutputCapture(ProcessStream.Stderr, controllerId);
-        stdoutCapture.start();
-        stderrCapture.start();
-    }
-
-    /**
-     * Returns a thread local ProcessController.
-     * Should NOT be closed when finished so it can be reused by the thread.
-     *
-     * @return a thread local ProcessController.
-     */
-    public static ProcessController getThreadLocal() {
-        // If the local controller was destroyed get a fresh instance.
-        if (threadProcessController.get().destroyed)
-            threadProcessController.remove();
-        return threadProcessController.get();
-    }
-
-    /**
-     * Thread local process controller container.
-     */
-    private static final ThreadLocal<ProcessController> threadProcessController =
-            new ThreadLocal<ProcessController>() {
-                @Override
-                protected ProcessController initialValue() {
-                    return new ProcessController();
-                }
-            };
-
-    /**
-     * Similar to Runtime.exec() but drains the output and error streams.
-     *
-     * @param command Command to run.
-     * @return The result code.
-     */
-    public static int exec(String[] command) {
-        ProcessController controller = ProcessController.getThreadLocal();
-        return controller.exec(new ProcessSettings(command)).getExitValue();
-    }
-
-    /**
-     * Executes a command line program with the settings and waits for it to return,
-     * processing the output on a background thread.
-     *
-     * @param settings Settings to be run.
-     * @return The output of the command.
-     */
-    public ProcessOutput exec(ProcessSettings settings) {
-        if (destroyed)
-            throw new IllegalStateException("This controller was destroyed");
-
-        ProcessBuilder builder = new ProcessBuilder(settings.getCommand());
-        builder.directory(settings.getDirectory());
-
-        Map<String, String> settingsEnvironment = settings.getEnvironment();
-        if (settingsEnvironment != null) {
-            Map<String, String> builderEnvironment = builder.environment();
-            builderEnvironment.clear();
-            builderEnvironment.putAll(settingsEnvironment);
-        }
-
-        builder.redirectErrorStream(settings.isRedirectErrorStream());
-
-        StreamOutput stdout = null;
-        StreamOutput stderr = null;
-
-        // Start the process running.
-
-        try {
-            synchronized (toCapture) {
-                process = builder.start();
-            }
-            running.add(this);
-        } catch (IOException e) {
-            String message = String.format("Unable to start command: %s\nReason: %s",
-                    StringUtils.join(builder.command(), " "),
-                    e.getMessage());
-            throw new ReviewedGATKException(message);
-        }
-
-        int exitCode;
-
-        try {
-            // Notify the background threads to start capturing.
-            synchronized (toCapture) {
-                toCapture.put(ProcessStream.Stdout,
-                        new CapturedStreamOutput(settings.getStdoutSettings(), process.getInputStream(), System.out));
-                toCapture.put(ProcessStream.Stderr,
-                        new CapturedStreamOutput(settings.getStderrSettings(), process.getErrorStream(), System.err));
-                toCapture.notifyAll();
-            }
-
-            // Write stdin content
-            InputStreamSettings stdinSettings = settings.getStdinSettings();
-            Set<StreamLocation> streamLocations = stdinSettings.getStreamLocations();
-            if (!streamLocations.isEmpty()) {
-                try {
-                    OutputStream stdinStream = process.getOutputStream();
-                    for (StreamLocation location : streamLocations) {
-                        InputStream inputStream;
-                        switch (location) {
-                            case Buffer:
-                                inputStream = new ByteArrayInputStream(stdinSettings.getInputBuffer());
-                                break;
-                            case File:
-                                try {
-                                    inputStream = FileUtils.openInputStream(stdinSettings.getInputFile());
-                                } catch (IOException e) {
-                                    throw new UserException.BadInput(e.getMessage());
-                                }
-                                break;
-                            case Standard:
-                                inputStream = System.in;
-                                break;
-                            default:
-                                throw new ReviewedGATKException("Unexpected stream location: " + location);
-                        }
-                        try {
-                            IOUtils.copy(inputStream, stdinStream);
-                        } finally {
-                            if (location != StreamLocation.Standard)
-                                IOUtils.closeQuietly(inputStream);
-                        }
-                    }
-                    stdinStream.flush();
-                } catch (IOException e) {
-                    throw new ReviewedGATKException("Error writing to stdin on command: " + StringUtils.join(builder.command(), " "), e);
-                }
-            }
-
-            // Wait for the process to complete.
-            try {
-                process.getOutputStream().close();
-                process.waitFor();
-            } catch (IOException e) {
-                throw new ReviewedGATKException("Unable to close stdin on command: " + StringUtils.join(builder.command(), " "), e);
-            } catch (InterruptedException e) {
-                throw new ReviewedGATKException("Process interrupted", e);
-            } finally {
-                while (!destroyed && stdout == null || stderr == null) {
-                    synchronized (fromCapture) {
-                        if (fromCapture.containsKey(ProcessStream.Stdout))
-                            stdout = fromCapture.remove(ProcessStream.Stdout);
-                        if (fromCapture.containsKey(ProcessStream.Stderr))
-                            stderr = fromCapture.remove(ProcessStream.Stderr);
-                        try {
-                            if (stdout == null || stderr == null)
-                                fromCapture.wait();
-                        } catch (InterruptedException e) {
-                            // Log the error, ignore the interrupt and wait patiently
-                            // for the OutputCaptures to (via finally) return their
-                            // stdout and stderr.
-                            logger.error(e);
-                        }
-                    }
-                }
-
-                if (destroyed) {
-                    if (stdout == null)
-                        stdout = StreamOutput.EMPTY;
-                    if (stderr == null)
-                        stderr = StreamOutput.EMPTY;
-                }
-            }
-        } finally {
-            synchronized (toCapture) {
-                exitCode = process.exitValue();
-                process = null;
-            }
-            running.remove(this);
-        }
-
-        return new ProcessOutput(exitCode, stdout, stderr);
-    }
-
-    /**
-     * Executes a command line program with the settings and waits for it to return,
-     * processing the output on a background thread.
-     *
-     * Throws an IOException if the ProcessOutput exit code is nonzero
-     *
-     * @param settings Settings to be run.
-     */
-    public ProcessOutput execAndCheck(ProcessSettings settings) throws IOException {
-        ProcessOutput po = exec(settings);
-        if (po.getExitValue() != 0) {
-            String message = String.format("Process exited with %d\nCommand Line: %s",
-                    po.getExitValue(),
-                    Utils.join(" ", settings.getCommand()));
-            throw new IOException(message);
-        }
-        return po;
-    }
-
-    /**
-     * @return The set of still running processes.
-     */
-    public static Set<ProcessController> getRunning() {
-        synchronized (running) {
-            return new HashSet<ProcessController>(running);
-        }
-    }
-
-    /**
-     * Stops the process from running and tries to ensure process is cleaned up properly.
-     * NOTE: sub-processes started by process may be zombied with their parents set to pid 1.
-     * NOTE: capture threads may block on read.
-     * TODO: Try to use NIO to interrupt streams.
-     */
-    public void tryDestroy() {
-        destroyed = true;
-        synchronized (toCapture) {
-            if (process != null) {
-                process.destroy();
-                IOUtils.closeQuietly(process.getInputStream());
-                IOUtils.closeQuietly(process.getErrorStream());
-            }
-            stdoutCapture.interrupt();
-            stderrCapture.interrupt();
-            toCapture.notifyAll();
-        }
-    }
-
-    @Override
-    protected void finalize() throws Throwable {
-        try {
-            tryDestroy();
-        } catch (Exception e) {
-            logger.error(e);
-        }
-        super.finalize();
-    }
-
-    private class OutputCapture extends Thread {
-        private final int controllerId;
-        private final ProcessStream key;
-
-        /**
-         * Reads in the output of a stream on a background thread to keep the output pipe from backing up and freezing the called process.
-         *
-         * @param key The stdout or stderr key for this output capture.
-         * @param controllerId Unique id of the controller.
-         */
-        public OutputCapture(ProcessStream key, int controllerId) {
-            super(String.format("OutputCapture-%d-%s-%s-%d", controllerId, key.name().toLowerCase(),
-                    Thread.currentThread().getName(), Thread.currentThread().getId()));
-            this.controllerId = controllerId;
-            this.key = key;
-            setDaemon(true);
-        }
-
-        /**
-         * Runs the capture.
-         */
-        @Override
-        public void run() {
-            while (!destroyed) {
-                StreamOutput processStream = StreamOutput.EMPTY;
-                try {
-                    // Wait for a new input stream to be passed from this process controller.
-                    CapturedStreamOutput capturedProcessStream = null;
-                    while (!destroyed && capturedProcessStream == null) {
-                        synchronized (toCapture) {
-                            if (toCapture.containsKey(key)) {
-                                capturedProcessStream = toCapture.remove(key);
-                            } else {
-                                toCapture.wait();
-                            }
-                        }
-                    }
-
-                    if (!destroyed) {
-                        // Read in the input stream
-                        processStream = capturedProcessStream;
-                        capturedProcessStream.readAndClose();
-                    }
-                } catch (InterruptedException e) {
-                    logger.info("OutputCapture interrupted, exiting");
-                    break;
-                } catch (IOException e) {
-                    logger.error("Error reading process output", e);
-                } finally {
-                    // Send the string back to the process controller.
-                    synchronized (fromCapture) {
-                        fromCapture.put(key, processStream);
-                        fromCapture.notify();
-                    }
-                }
-            }
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/runtime/ProcessOutput.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/runtime/ProcessOutput.java
deleted file mode 100644
index 9276de7..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/runtime/ProcessOutput.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.runtime;
-
-public class ProcessOutput {
-    private final int exitValue;
-    private final StreamOutput stdout;
-    private final StreamOutput stderr;
-
-    /**
-     * The output of a process.
-     *
-     * @param exitValue The exit value.
-     * @param stdout    The capture of stdout as defined by the stdout OutputStreamSettings.
-     * @param stderr    The capture of stderr as defined by the stderr OutputStreamSettings.
-     */
-    public ProcessOutput(int exitValue, StreamOutput stdout, StreamOutput stderr) {
-        this.exitValue = exitValue;
-        this.stdout = stdout;
-        this.stderr = stderr;
-    }
-
-    public int getExitValue() {
-        return exitValue;
-    }
-
-    public StreamOutput getStdout() {
-        return stdout;
-    }
-
-    public StreamOutput getStderr() {
-        return stderr;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/runtime/ProcessSettings.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/runtime/ProcessSettings.java
deleted file mode 100644
index 7027b9d..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/runtime/ProcessSettings.java
+++ /dev/null
@@ -1,140 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.runtime;
-
-
-import java.io.File;
-import java.util.Map;
-
-public class ProcessSettings {
-    private String[] command;
-    private Map<String, String> environment;
-    private File directory;
-    private boolean redirectErrorStream;
-    private InputStreamSettings stdinSettings;
-    private OutputStreamSettings stdoutSettings;
-    private OutputStreamSettings stderrSettings;
-
-    /**
-     * @param command Command line to run.
-     */
-    public ProcessSettings(String[] command) {
-        this(command, false, null, null, null, null, null);
-    }
-
-    /**
-     * @param command             Command line to run.
-     * @param redirectErrorStream true if stderr should be sent to stdout.
-     * @param environment         Environment settings to override System.getEnv, or null to use System.getEnv.
-     * @param directory           The directory to run the command in, or null to run in the current directory.
-     * @param stdinSettings       Settings for writing to the process stdin.
-     * @param stdoutSettings      Settings for capturing the process stdout.
-     * @param stderrSettings      Setting for capturing the process stderr.
-     */
-    public ProcessSettings(String[] command, boolean redirectErrorStream, File directory, Map<String, String> environment,
-                           InputStreamSettings stdinSettings, OutputStreamSettings stdoutSettings, OutputStreamSettings stderrSettings) {
-        this.command = checkCommand(command);
-        this.redirectErrorStream = redirectErrorStream;
-        this.directory = directory;
-        this.environment = environment;
-        this.stdinSettings = checkSettings(stdinSettings);
-        this.stdoutSettings = checkSettings(stdoutSettings);
-        this.stderrSettings = checkSettings(stderrSettings);
-    }
-
-    public String[] getCommand() {
-        return command;
-    }
-
-    public void setCommand(String[] command) {
-        this.command = checkCommand(command);
-    }
-
-    public boolean isRedirectErrorStream() {
-        return redirectErrorStream;
-    }
-
-    public void setRedirectErrorStream(boolean redirectErrorStream) {
-        this.redirectErrorStream = redirectErrorStream;
-    }
-
-    public File getDirectory() {
-        return directory;
-    }
-
-    public void setDirectory(File directory) {
-        this.directory = directory;
-    }
-
-    public Map<String, String> getEnvironment() {
-        return environment;
-    }
-
-    public void setEnvironment(Map<String, String> environment) {
-        this.environment = environment;
-    }
-
-    public InputStreamSettings getStdinSettings() {
-        return stdinSettings;
-    }
-
-    public void setStdinSettings(InputStreamSettings stdinSettings) {
-        this.stdinSettings = checkSettings(stdinSettings);
-    }
-
-    public OutputStreamSettings getStdoutSettings() {
-        return stdoutSettings;
-    }
-
-    public void setStdoutSettings(OutputStreamSettings stdoutSettings) {
-        this.stdoutSettings = checkSettings(stdoutSettings);
-    }
-
-    public OutputStreamSettings getStderrSettings() {
-        return stderrSettings;
-    }
-
-    public void setStderrSettings(OutputStreamSettings stderrSettings) {
-        this.stderrSettings = checkSettings(stderrSettings);
-    }
-
-    protected String[] checkCommand(String[] command) {
-        if (command == null)
-            throw new IllegalArgumentException("Command is not allowed to be null");
-        for (String s: command)
-            if (s == null)
-                throw new IllegalArgumentException("Command is not allowed to contain nulls");
-        return command;
-    }
-
-    protected InputStreamSettings checkSettings(InputStreamSettings settings) {
-        return settings == null ? new InputStreamSettings() : settings;
-    }
-
-    protected OutputStreamSettings checkSettings(OutputStreamSettings settings) {
-        return settings == null ? new OutputStreamSettings() : settings;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/runtime/RuntimeUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/runtime/RuntimeUtils.java
deleted file mode 100644
index 7a982dd..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/runtime/RuntimeUtils.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.runtime;
-
-import org.apache.commons.lang.StringUtils;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.List;
-
-public class RuntimeUtils {
-    public static final String[] PATHS;
-
-    static {
-        String path = System.getenv("PATH");
-        if (path == null)
-            path = System.getenv("path");
-        if (path == null) {
-            PATHS = new String[0];
-        } else {
-            PATHS = StringUtils.split(path, File.pathSeparatorChar);
-        }
-    }
-
-    /**
-     * Returns the path to an executable or null if it doesn't exist.
-     * @param executable Relative path
-     * @return The absolute file path.
-     */
-    public static File which(String executable) {
-        for (String path: PATHS) {
-            File file = new File(path, executable);
-            if (file.exists())
-                return file.getAbsoluteFile();
-        }
-        return null;
-    }
-
-    /**
-     * Return the current classpath as a list of absolute paths
-     * @return
-     */
-    public static List<String> getAbsoluteClassPaths() {
-        final String[] relativeClassPaths = System.getProperty("java.class.path").split(File.pathSeparator);
-        final List<String> absoluteClassPaths = new ArrayList<>(relativeClassPaths.length);
-        for (String classPath : relativeClassPaths) {
-            File cp = new File(classPath);
-            if (cp.exists())
-                absoluteClassPaths.add(cp.getAbsolutePath());
-        }
-
-        return absoluteClassPaths;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/runtime/StreamLocation.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/runtime/StreamLocation.java
deleted file mode 100644
index 37d66f0..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/runtime/StreamLocation.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.runtime;
-
-/**
- * Where to read/write a stream
- */
-public enum StreamLocation {
-    Buffer, File, Standard
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/runtime/StreamOutput.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/runtime/StreamOutput.java
deleted file mode 100644
index 9ce039e..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/runtime/StreamOutput.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.runtime;
-
-/**
- * The content of stdout or stderr.
- */
-public abstract class StreamOutput {
-    /**
-     * Empty stream output when no output is captured due to an error.
-     */
-    public static final StreamOutput EMPTY = new StreamOutput() {
-        @Override
-        public byte[] getBufferBytes() {
-            return new byte[0];
-        }
-
-        @Override
-        public boolean isBufferTruncated() {
-            return false;
-        }
-    };
-
-    /**
-     * Returns the content as a string.
-     *
-     * @return The content as a string.
-     */
-    public String getBufferString() {
-        return new String(getBufferBytes());
-    }
-
-    /**
-     * Returns the content as a string.
-     *
-     * @return The content as a string.
-     */
-    public abstract byte[] getBufferBytes();
-
-    /**
-     * Returns true if the buffer was truncated.
-     *
-     * @return true if the buffer was truncated.
-     */
-    public abstract boolean isBufferTruncated();
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/AlignmentStartComparator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/AlignmentStartComparator.java
deleted file mode 100644
index 7e926d5..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/AlignmentStartComparator.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import htsjdk.samtools.SAMRecord;
-
-import java.util.Comparator;
-
-/**
- * Compares two SAMRecords only the basis on alignment start.  Note that
- * comparisons are performed ONLY on the basis of alignment start; any
- * two SAM records with the same alignment start will be considered equal.
- *
- * Unmapped alignments will all be considered equal.
- *
- * @author mhanna
- * @version 0.1
- */
-public class AlignmentStartComparator implements Comparator<SAMRecord> {
-    public int compare(SAMRecord lhs, SAMRecord rhs) {
-        if(!lhs.getReferenceIndex().equals(rhs.getReferenceIndex()))
-            return lhs.getReferenceIndex() - rhs.getReferenceIndex();
-
-        // Note: no integer overflow here because alignment starts are >= 0.
-        return lhs.getAlignmentStart() - rhs.getAlignmentStart();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/AlignmentStartWithNoTiesComparator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/AlignmentStartWithNoTiesComparator.java
deleted file mode 100644
index db3f458..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/AlignmentStartWithNoTiesComparator.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import htsjdk.samtools.SAMRecord;
-
-import java.util.Comparator;
-
-public class AlignmentStartWithNoTiesComparator implements Comparator<SAMRecord> {
-    @Requires("c1 >= 0 && c2 >= 0")
-    @Ensures("result == 0 || result == 1 || result == -1")
-    private int compareContigs(int c1, int c2) {
-        if (c1 == c2)
-            return 0;
-        else if (c1 > c2)
-            return 1;
-        return -1;
-    }
-
-    @Requires("r1 != null && r2 != null")
-    @Ensures("result == 0 || result == 1 || result == -1")
-    public int compare(SAMRecord r1, SAMRecord r2) {
-        int result;
-
-        if (r1 == r2)
-            result = 0;
-
-        else if (r1.getReadUnmappedFlag())
-            result = 1;
-        else if (r2.getReadUnmappedFlag())
-            result = -1;
-        else {
-            final int cmpContig = compareContigs(r1.getReferenceIndex(), r2.getReferenceIndex());
-
-            if (cmpContig != 0)
-                result = cmpContig;
-
-            else {
-                if (r1.getAlignmentStart() < r2.getAlignmentStart())
-                    result = -1;
-                else
-                    result = 1;
-            }
-        }
-
-        return result;
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/AlignmentUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/AlignmentUtils.java
deleted file mode 100644
index 4c9a444..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/AlignmentUtils.java
+++ /dev/null
@@ -1,1337 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import htsjdk.samtools.Cigar;
-import htsjdk.samtools.CigarElement;
-import htsjdk.samtools.CigarOperator;
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.BaseUtils;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.haplotype.Haplotype;
-import org.broadinstitute.gatk.utils.pileup.PileupElement;
-import org.broadinstitute.gatk.utils.recalibration.EventType;
-import org.broadinstitute.gatk.utils.smithwaterman.SWPairwiseAlignment;
-
-import java.util.*;
-
-
-public final class AlignmentUtils {
-    private final static EnumSet<CigarOperator> ALIGNED_TO_GENOME_OPERATORS = EnumSet.of(CigarOperator.M, CigarOperator.EQ, CigarOperator.X);
-    private final static EnumSet<CigarOperator> ALIGNED_TO_GENOME_PLUS_SOFTCLIPS = EnumSet.of(CigarOperator.M, CigarOperator.EQ, CigarOperator.X, CigarOperator.S);
-    public final static String HAPLOTYPE_TAG = "HC";
-
-    // cannot be instantiated
-    private AlignmentUtils() { }
-
-    /**
-     * Does cigar start or end with a deletion operation?
-     *
-     * @param cigar a non-null cigar to test
-     * @return true if the first or last operator of cigar is a D
-     */
-    public static boolean startsOrEndsWithInsertionOrDeletion(final Cigar cigar) {
-        if ( cigar == null ) throw new IllegalArgumentException("Cigar cannot be null");
-
-        if ( cigar.isEmpty() )
-            return false;
-
-        final CigarOperator first = cigar.getCigarElement(0).getOperator();
-        final CigarOperator last = cigar.getCigarElement(cigar.numCigarElements()-1).getOperator();
-        return first == CigarOperator.D || first == CigarOperator.I || last == CigarOperator.D || last == CigarOperator.I;
-    }
-
-    /**
-     * Aligns reads the haplotype, and then projects this alignment of read -> hap onto the reference
-     * via the alignment of haplotype (via its getCigar) method.
-     *
-     * @param originalRead the read we want to write aligned to the reference genome
-     * @param haplotype the haplotype that the read should be aligned to, before aligning to the reference
-     * @param referenceStart the start of the reference that haplotype is aligned to.  Provides global coordinate frame.
-     * @param isInformative true if the read is differentially informative for one of the haplotypes
-     *
-     * @throws IllegalArgumentException if {@code originalRead} is {@code null} or {@code haplotype} is {@code null} or it
-     *   does not have a Cigar or the {@code referenceStart} is invalid (less than 1).
-     *
-     * @return a GATKSAMRecord aligned to reference. Never {@code null}.
-     */
-    public static GATKSAMRecord createReadAlignedToRef(final GATKSAMRecord originalRead,
-                                                       final Haplotype haplotype,
-                                                       final int referenceStart,
-                                                       final boolean isInformative) {
-        if ( originalRead == null ) throw new IllegalArgumentException("originalRead cannot be null");
-        if ( haplotype == null ) throw new IllegalArgumentException("haplotype cannot be null");
-        if ( haplotype.getCigar() == null ) throw new IllegalArgumentException("Haplotype cigar not set " + haplotype);
-        if ( referenceStart < 1 ) throw new IllegalArgumentException("reference start much be >= 1 but got " + referenceStart);
-
-        // compute the smith-waterman alignment of read -> haplotype
-        final SWPairwiseAlignment swPairwiseAlignment = new SWPairwiseAlignment(haplotype.getBases(), originalRead.getReadBases(), CigarUtils.NEW_SW_PARAMETERS);
-        if ( swPairwiseAlignment.getAlignmentStart2wrt1() == -1 )
-            // sw can fail (reasons not clear) so if it happens just don't realign the read
-            return originalRead;
-        final Cigar swCigar = consolidateCigar(swPairwiseAlignment.getCigar());
-
-        // since we're modifying the read we need to clone it
-        final GATKSAMRecord read = (GATKSAMRecord)originalRead.clone();
-
-        // only informative reads are given the haplotype tag to enhance visualization
-        if ( isInformative )
-            read.setAttribute(HAPLOTYPE_TAG, haplotype.hashCode());
-
-        // compute here the read starts w.r.t. the reference from the SW result and the hap -> ref cigar
-        final Cigar extendedHaplotypeCigar = haplotype.getConsolidatedPaddedCigar(1000);
-        final int readStartOnHaplotype = calcFirstBaseMatchingReferenceInCigar(extendedHaplotypeCigar, swPairwiseAlignment.getAlignmentStart2wrt1());
-        final int readStartOnReference = referenceStart + haplotype.getAlignmentStartHapwrtRef() + readStartOnHaplotype;
-        read.setAlignmentStart(readStartOnReference);
-        read.resetSoftStartAndEnd();
-
-        // compute the read -> ref alignment by mapping read -> hap -> ref from the
-        // SW of read -> hap mapped through the given by hap -> ref
-        final Cigar haplotypeToRef = trimCigarByBases(extendedHaplotypeCigar, swPairwiseAlignment.getAlignmentStart2wrt1(), extendedHaplotypeCigar.getReadLength() - 1);
-        final Cigar readToRefCigarRaw = applyCigarToCigar(swCigar, haplotypeToRef);
-        final Cigar readToRefCigarClean = cleanUpCigar(readToRefCigarRaw);
-        final Cigar readToRefCigar = leftAlignIndel(readToRefCigarClean, haplotype.getBases(),
-                originalRead.getReadBases(), swPairwiseAlignment.getAlignmentStart2wrt1(), 0, true);
-
-        read.setCigar(readToRefCigar);
-
-        if ( readToRefCigar.getReadLength() != read.getReadLength() )
-            throw new IllegalStateException("Cigar " + readToRefCigar + " with read length " + readToRefCigar.getReadLength()
-                    + " != read length " + read.getReadLength() + " for read " + read.format() + "\nhapToRef " + haplotypeToRef + " length " + haplotypeToRef.getReadLength() + "/" + haplotypeToRef.getReferenceLength()
-                    + "\nreadToHap " + swCigar + " length " + swCigar.getReadLength() + "/" + swCigar.getReferenceLength());
-
-        return read;
-    }
-
-
-
-    /**
-     * Get the byte[] from bases that cover the reference interval refStart -> refEnd given the
-     * alignment of bases to the reference (basesToRefCigar) and the start offset of the bases on the reference
-     *
-     * refStart and refEnd are 0 based offsets that we want to obtain.  In the client code, if the reference
-     * bases start at position X and you want Y -> Z, refStart should be Y - X and refEnd should be Z - X.
-     *
-     * If refStart or refEnd would start or end the new bases within a deletion, this function will return null
-     *
-     * @param bases
-     * @param refStart
-     * @param refEnd
-     * @param basesStartOnRef where does the bases array start w.r.t. the reference start?  For example, bases[0] of
-     *                        could be at refStart == 0 if basesStartOnRef == 0, but it could just as easily be at
-     *                        10 (meaning bases doesn't fully span the reference), which would be indicated by basesStartOnRef == 10.
-     *                        It's not trivial to eliminate this parameter because it's tied up with the cigar
-     * @param basesToRefCigar the cigar that maps the bases to the reference genome
-     * @return a byte[] containing the bases covering this interval, or null if we would start or end within a deletion
-     */
-    public static byte[] getBasesCoveringRefInterval(final int refStart, final int refEnd, final byte[] bases, final int basesStartOnRef, final Cigar basesToRefCigar) {
-        if ( refStart < 0 || refEnd < refStart ) throw new IllegalArgumentException("Bad start " + refStart + " and/or stop " + refEnd);
-        if ( basesStartOnRef < 0 ) throw new IllegalArgumentException("BasesStartOnRef must be >= 0 but got " + basesStartOnRef);
-        if ( bases == null ) throw new IllegalArgumentException("Bases cannot be null");
-        if ( basesToRefCigar == null ) throw new IllegalArgumentException("basesToRefCigar cannot be null");
-        if ( bases.length != basesToRefCigar.getReadLength() ) throw new IllegalArgumentException("Mismatch in length between reference bases " + bases.length + " and cigar length " + basesToRefCigar);
-
-        int refPos = basesStartOnRef;
-        int basesPos = 0;
-        int basesStart = -1;
-        int basesStop = -1;
-        boolean done = false;
-
-        for ( int iii = 0; ! done && iii < basesToRefCigar.numCigarElements(); iii++ ) {
-            final CigarElement ce = basesToRefCigar.getCigarElement(iii);
-            switch ( ce.getOperator() ) {
-                case I:
-                    basesPos += ce.getLength();
-                    break;
-                case M: case X: case EQ:
-                    for ( int i = 0; i < ce.getLength(); i++ ) {
-                        if ( refPos == refStart )
-                            basesStart = basesPos;
-                        if ( refPos == refEnd ) {
-                            basesStop = basesPos;
-                            done = true;
-                            break;
-                        }
-                        refPos++;
-                        basesPos++;
-                    }
-                    break;
-                case D:
-                    for ( int i = 0; i < ce.getLength(); i++ ) {
-                        if ( refPos == refEnd || refPos == refStart ) {
-                            // if we ever reach a ref position that is either a start or an end, we fail
-                            return null;
-                        }
-                        refPos++;
-                    }
-                    break;
-                default:
-                    throw new IllegalStateException("Unsupported operator " + ce);
-            }
-        }
-
-        if ( basesStart == -1 || basesStop == -1 )
-            throw new IllegalStateException("Never found start " + basesStart + " or stop " + basesStop + " given cigar " + basesToRefCigar);
-
-        return Arrays.copyOfRange(bases, basesStart, basesStop + 1);
-    }
-
-    /**
-     * Get the number of bases at which refSeq and readSeq differ, given their alignment
-     *
-     * @param cigar the alignment of readSeq to refSeq
-     * @param refSeq the bases of the reference sequence
-     * @param readSeq the bases of the read sequence
-     * @return the number of bases that differ between refSeq and readSeq
-     */
-    public static int calcNumDifferentBases(final Cigar cigar, final byte[] refSeq, final byte[] readSeq) {
-        int refIndex = 0, readIdx = 0, delta = 0;
-
-        for (final CigarElement ce : cigar.getCigarElements()) {
-            final int elementLength = ce.getLength();
-            switch (ce.getOperator()) {
-                case X:case EQ:case M:
-                    for (int j = 0; j < elementLength; j++, refIndex++, readIdx++)
-                        delta += refSeq[refIndex] != readSeq[readIdx] ? 1 : 0;
-                    break;
-                case I:
-                    delta += elementLength;
-                case S:
-                    readIdx += elementLength;
-                    break;
-                case D:
-                    delta += elementLength;
-                case N:
-                    refIndex += elementLength;
-                    break;
-                case H:
-                case P:
-                    break;
-                default:
-                    throw new ReviewedGATKException("The " + ce.getOperator() + " cigar element is not currently supported");
-            }
-        }
-
-        return delta;
-    }
-
-    public static class MismatchCount {
-        public int numMismatches = 0;
-        public long mismatchQualities = 0;
-    }
-
-    public static long mismatchingQualities(GATKSAMRecord r, byte[] refSeq, int refIndex) {
-        return getMismatchCount(r, refSeq, refIndex).mismatchQualities;
-    }
-
-    /**
-     * @see #getMismatchCount(GATKSAMRecord, byte[], int, int, int) with startOnRead == 0 and nReadBases == read.getReadLength()
-     */
-    public static MismatchCount getMismatchCount(GATKSAMRecord r, byte[] refSeq, int refIndex) {
-        return getMismatchCount(r, refSeq, refIndex, 0, r.getReadLength());
-    }
-
-    // todo -- this code and mismatchesInRefWindow should be combined and optimized into a single
-    // todo -- high performance implementation.  We can do a lot better than this right now
-
-    /**
-     * Count how many bases mismatch the reference.  Indels are not considered mismatching.
-     *
-     * @param r                   the sam record to check against
-     * @param refSeq              the byte array representing the reference sequence
-     * @param refIndex            the index in the reference byte array of the read's first base (the reference index
-     *                            is matching the alignment start, there may be tons of soft-clipped bases before/after
-     *                            that so it's wrong to compare with getReadLength() here.).  Note that refIndex is
-     *                            zero based, not 1 based
-     * @param startOnRead         the index in the read's bases from which we start counting
-     * @param nReadBases          the number of bases after (but including) startOnRead that we check
-     * @return non-null object representing the mismatch count
-     */
-    @Ensures("result != null")
-    public static MismatchCount getMismatchCount(GATKSAMRecord r, byte[] refSeq, int refIndex, int startOnRead, int nReadBases) {
-        if ( r == null ) throw new IllegalArgumentException("attempting to calculate the mismatch count from a read that is null");
-        if ( refSeq == null ) throw new IllegalArgumentException("attempting to calculate the mismatch count with a reference sequence that is null");
-        if ( refIndex < 0 ) throw new IllegalArgumentException("attempting to calculate the mismatch count with a reference index that is negative");
-        if ( startOnRead < 0 ) throw new IllegalArgumentException("attempting to calculate the mismatch count with a read start that is negative");
-        if ( nReadBases < 0 ) throw new IllegalArgumentException("attempting to calculate the mismatch count for a negative number of read bases");
-        if ( refSeq.length - refIndex < (r.getAlignmentEnd() - r.getAlignmentStart()) )
-            throw new IllegalArgumentException("attempting to calculate the mismatch count against a reference string that is smaller than the read");
-
-        MismatchCount mc = new MismatchCount();
-
-        int readIdx = 0;
-        final int endOnRead = startOnRead + nReadBases - 1; // index of the last base on read we want to count (note we are including soft-clipped bases with this math)
-        final byte[] readSeq = r.getReadBases();
-        final Cigar c = r.getCigar();
-        final byte[] readQuals = r.getBaseQualities();
-        for (final CigarElement ce : c.getCigarElements()) {
-
-            if (readIdx > endOnRead)
-                break;
-
-            final int elementLength = ce.getLength();
-            switch (ce.getOperator()) {
-                case X:
-                    mc.numMismatches += elementLength;
-                    for (int j = 0; j < elementLength; j++)
-                        mc.mismatchQualities += readQuals[readIdx+j];
-                case EQ:
-                    refIndex += elementLength;
-                    readIdx += elementLength;
-                break;
-                case M:
-                    for (int j = 0; j < elementLength; j++, refIndex++, readIdx++) {
-                        if (refIndex >= refSeq.length)
-                            continue;                      // TODO : It should never happen, we should throw exception here
-                        if (readIdx < startOnRead) continue;
-                        if (readIdx > endOnRead) break;
-                        byte refChr = refSeq[refIndex];
-                        byte readChr = readSeq[readIdx];
-                        // Note: we need to count X/N's as mismatches because that's what SAM requires
-                        //if ( BaseUtils.simpleBaseToBaseIndex(readChr) == -1 ||
-                        //     BaseUtils.simpleBaseToBaseIndex(refChr)  == -1 )
-                        //    continue; // do not count Ns/Xs/etc ?
-                        if (readChr != refChr) {
-                            mc.numMismatches++;
-                            mc.mismatchQualities += readQuals[readIdx];
-                        }
-                    }
-                    break;
-                case I:
-                case S:
-                    readIdx += elementLength;
-                    break;
-                case D:
-                case N:
-                    refIndex += elementLength;
-                    break;
-                case H:
-                case P:
-                    break;
-                default:
-                    throw new ReviewedGATKException("The " + ce.getOperator() + " cigar element is not currently supported");
-            }
-
-        }
-        return mc;
-    }
-
-    /**
-     * Returns number of alignment blocks (continuous stretches of aligned bases) in the specified alignment.
-     * This method follows closely the SAMRecord::getAlignmentBlocks() implemented in samtools library, but
-     * it only counts blocks without actually allocating and filling the list of blocks themselves. Hence, this method is
-     * a much more efficient alternative to r.getAlignmentBlocks.size() in the situations when this number is all that is needed.
-     * Formally, this method simply returns the number of M elements in the cigar.
-     *
-     * @param r alignment
-     * @return number of continuous alignment blocks (i.e. 'M' elements of the cigar; all indel and clipping elements are ignored).
-     */
-    @Ensures("result >= 0")
-    public static int getNumAlignmentBlocks(final SAMRecord r) {
-        if ( r == null ) throw new IllegalArgumentException("read cannot be null");
-        final Cigar cigar = r.getCigar();
-        if (cigar == null) return 0;
-
-        int n = 0;
-        for (final CigarElement e : cigar.getCigarElements()) {
-            if (ALIGNED_TO_GENOME_OPERATORS.contains(e.getOperator()))
-                n++;
-        }
-
-        return n;
-    }
-
-
-    /**
-     * Get the number of bases aligned to the genome, including soft clips
-     *
-     * If read is not mapped (i.e., doesn't have a cigar) returns 0
-     *
-     * @param r a non-null GATKSAMRecord
-     * @return the number of bases aligned to the genome in R, including soft clipped bases
-     */
-    public static int getNumAlignedBasesCountingSoftClips(final GATKSAMRecord r) {
-        int n = 0;
-        final Cigar cigar = r.getCigar();
-        if (cigar == null) return 0;
-
-        for (final CigarElement e : cigar.getCigarElements())
-            if (ALIGNED_TO_GENOME_PLUS_SOFTCLIPS.contains(e.getOperator()))
-                n += e.getLength();
-
-        return n;
-    }
-
-    /**
-     * Count the number of bases hard clipped from read
-     *
-     * If read's cigar is null, return 0
-     *
-     * @param r a non-null read
-     * @return a positive integer
-     */
-    @Ensures("result >= 0")
-    public static int getNumHardClippedBases(final SAMRecord r) {
-        if ( r == null ) throw new IllegalArgumentException("Read cannot be null");
-
-        int n = 0;
-        final Cigar cigar = r.getCigar();
-        if (cigar == null) return 0;
-
-        for (final CigarElement e : cigar.getCigarElements())
-            if (e.getOperator() == CigarOperator.H)
-                n += e.getLength();
-
-        return n;
-    }
-
-    /**
-     * Calculate the number of bases that are soft clipped in read with quality score greater than threshold
-     *
-     * Handles the case where the cigar is null (i.e., the read is unmapped), returning 0
-     *
-     * @param read a non-null GATKSAMRecord.
-     * @param qualThreshold consider bases with quals > this value as high quality.  Must be >= 0
-     * @return positive integer
-     */
-    @Ensures("result >= 0")
-    public static int calcNumHighQualitySoftClips( final GATKSAMRecord read, final byte qualThreshold ) {
-        if ( read == null ) throw new IllegalArgumentException("Read cannot be null");
-        if ( qualThreshold < 0 ) throw new IllegalArgumentException("Expected qualThreshold to be a positive byte but saw " + qualThreshold);
-
-        if ( read.getCigar() == null ) // the read is unmapped
-            return 0;
-
-        final byte[] qual = read.getBaseQualities( EventType.BASE_SUBSTITUTION );
-
-        int numHQSoftClips = 0;
-        int alignPos = 0;
-        for ( final CigarElement ce : read.getCigar().getCigarElements() ) {
-            final int elementLength = ce.getLength();
-
-            switch( ce.getOperator() ) {
-                case S:
-                    for( int jjj = 0; jjj < elementLength; jjj++ ) {
-                        if( qual[alignPos++] > qualThreshold ) { numHQSoftClips++; }
-                    }
-                    break;
-                case M: case I: case EQ: case X:
-                    alignPos += elementLength;
-                    break;
-                case H: case P: case D: case N:
-                    break;
-                default:
-                    throw new IllegalStateException("Unsupported cigar operator: " + ce.getOperator());
-            }
-        }
-
-        return numHQSoftClips;
-    }
-
-    public static int calcAlignmentByteArrayOffset(final Cigar cigar, final PileupElement pileupElement, final int alignmentStart, final int refLocus) {
-        return calcAlignmentByteArrayOffset( cigar, pileupElement.getOffset(), pileupElement.isDeletion(), alignmentStart, refLocus );
-    }
-
-    /**
-     * Calculate the index into the read's bases of the beginning of the encompassing cigar element for a given cigar and offset
-     *
-     * @param cigar            the read's CIGAR -- cannot be null
-     * @param offset           the offset to use for the calculation or -1 if in the middle of a deletion
-     * @param isDeletion       are we in the middle of a deletion?
-     * @param alignmentStart   the alignment start of the read
-     * @param refLocus         the reference position of the offset
-     * @return a non-negative int index
-     */
-    @Ensures("result >= 0")
-    public static int calcAlignmentByteArrayOffset(final Cigar cigar, final int offset, final boolean isDeletion, final int alignmentStart, final int refLocus) {
-        if ( cigar == null ) throw new IllegalArgumentException("attempting to find the alignment position from a CIGAR that is null");
-        if ( offset < -1 ) throw new IllegalArgumentException("attempting to find the alignment position with an offset that is negative (and not -1)");
-        if ( alignmentStart < 0 ) throw new IllegalArgumentException("attempting to find the alignment position from an alignment start that is negative");
-        if ( refLocus < 0 ) throw new IllegalArgumentException("attempting to find the alignment position from a reference position that is negative");
-        if ( offset >= cigar.getReadLength() ) throw new IllegalArgumentException("attempting to find the alignment position of an offset than is larger than the read length");
-
-        int pileupOffset = offset;
-
-        // Reassign the offset if we are in the middle of a deletion because of the modified representation of the read bases
-        if (isDeletion) {
-            pileupOffset = refLocus - alignmentStart;
-            final CigarElement ce = cigar.getCigarElement(0);
-            if (ce.getOperator() == CigarOperator.S) {
-                pileupOffset += ce.getLength();
-            }
-        }
-
-        int pos = 0;
-        int alignmentPos = 0;
-
-        for (int iii = 0; iii < cigar.numCigarElements(); iii++) {
-            final CigarElement ce = cigar.getCigarElement(iii);
-            final int elementLength = ce.getLength();
-
-            switch (ce.getOperator()) {
-                case I:
-                case S: // TODO -- I don't think that soft clips should be treated the same as inserted bases here. Investigation needed.
-                    pos += elementLength;
-                    if (pos >= pileupOffset) {
-                        return alignmentPos;
-                    }
-                    break;
-                case D:
-                    if (!isDeletion) {
-                        alignmentPos += elementLength;
-                    } else {
-                        if (pos + elementLength - 1 >= pileupOffset) {
-                            return alignmentPos + (pileupOffset - pos);
-                        } else {
-                            pos += elementLength;
-                            alignmentPos += elementLength;
-                        }
-                    }
-                    break;
-                case M:
-                case EQ:
-                case X:
-                    if (pos + elementLength - 1 >= pileupOffset) {
-                        return alignmentPos + (pileupOffset - pos);
-                    } else {
-                        pos += elementLength;
-                        alignmentPos += elementLength;
-                    }
-                    break;
-                case H:
-                case P:
-                case N:
-                    break;
-                default:
-                    throw new ReviewedGATKException("Unsupported cigar operator: " + ce.getOperator());
-            }
-        }
-
-        return alignmentPos;
-    }
-
-    /**
-     * Generate an array of bases for just those that are aligned to the reference (i.e. no clips or insertions)
-     *
-     * @param cigar            the read's CIGAR -- cannot be null
-     * @param read             the read's base array
-     * @return a non-null array of bases (bytes)
-     */
-    @Ensures("result != null")
-    public static byte[] readToAlignmentByteArray(final Cigar cigar, final byte[] read) {
-        if ( cigar == null ) throw new IllegalArgumentException("attempting to generate an alignment from a CIGAR that is null");
-        if ( read == null ) throw new IllegalArgumentException("attempting to generate an alignment from a read sequence that is null");
-
-        final int alignmentLength = cigar.getReferenceLength();
-        final byte[] alignment = new byte[alignmentLength];
-        int alignPos = 0;
-        int readPos = 0;
-        for (int iii = 0; iii < cigar.numCigarElements(); iii++) {
-
-            final CigarElement ce = cigar.getCigarElement(iii);
-            final int elementLength = ce.getLength();
-
-            switch (ce.getOperator()) {
-                case I:
-                    if (alignPos > 0) {
-                        final int prevPos = alignPos - 1;
-                        if (alignment[prevPos] == BaseUtils.Base.A.base) {
-                            alignment[prevPos] = PileupElement.A_FOLLOWED_BY_INSERTION_BASE;
-                        } else if (alignment[prevPos] == BaseUtils.Base.C.base) {
-                            alignment[prevPos] = PileupElement.C_FOLLOWED_BY_INSERTION_BASE;
-                        } else if (alignment[prevPos] == BaseUtils.Base.T.base) {
-                            alignment[prevPos] = PileupElement.T_FOLLOWED_BY_INSERTION_BASE;
-                        } else if (alignment[prevPos] == BaseUtils.Base.G.base) {
-                            alignment[prevPos] = PileupElement.G_FOLLOWED_BY_INSERTION_BASE;
-                        }
-                    }
-                case S:
-                    readPos += elementLength;
-                    break;
-                case D:
-                case N:
-                    for (int jjj = 0; jjj < elementLength; jjj++) {
-                        alignment[alignPos++] = PileupElement.DELETION_BASE;
-                    }
-                    break;
-                case M:
-                case EQ:
-                case X:
-                    for (int jjj = 0; jjj < elementLength; jjj++) {
-                        alignment[alignPos++] = read[readPos++];
-                    }
-                    break;
-                case H:
-                case P:
-                    break;
-                default:
-                    throw new ReviewedGATKException("Unsupported cigar operator: " + ce.getOperator());
-            }
-        }
-        return alignment;
-    }
-
-    /**
-     * Returns true if the read does not belong to a contig, i.e. it's location is GenomeLoc.UNMAPPED.
-     * NOTE: A read can have a mapped GenomeLoc and still have an unmapped flag!
-     *
-     * @param r record
-     * @return true if read is unmapped to a genome loc
-     */
-    public static boolean isReadGenomeLocUnmapped(final SAMRecord r) {
-        return SAMRecord.NO_ALIGNMENT_REFERENCE_NAME.equals(r.getReferenceName());
-    }
-
-    /**
-     * Due to (unfortunate) multiple ways to indicate that read is unmapped allowed by SAM format
-     * specification, one may need this convenience shortcut. Checks both 'read unmapped' flag and
-     * alignment reference index/start.
-     *
-     * Our life would be so much easier if all sam files followed the specs. In reality,
-     * sam files (including those generated by maq or bwa) miss headers altogether. When
-     * reading such a SAM file, reference name is set, but since there is no sequence dictionary,
-     * null is always returned for referenceIndex. Let's be paranoid here, and make sure that
-     * we do not call the read "unmapped" when it has only reference name set with ref. index missing
-     * or vice versa.
-     *
-     * @param r a non-null record
-     * @return true if read is unmapped
-     */
-    public static boolean isReadUnmapped(final SAMRecord r) {
-        if ( r == null )
-            throw new IllegalArgumentException("Read cannot be null");
-
-        return r.getReadUnmappedFlag() ||
-               !((r.getReferenceIndex() != null && r.getReferenceIndex() != SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX ||
-                  r.getReferenceName() != null && !r.getReferenceName().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME)) &&
-                 r.getAlignmentStart() != SAMRecord.NO_ALIGNMENT_START);
-
-    }
-
-    /**
-     * Need a well-formed, consolidated Cigar string so that the left aligning code works properly.
-     * For example, 1M1M1M1D2M1M --> 3M1D3M
-     * If the given cigar is empty then the returned cigar will also be empty
-     *
-     * Note that this routine collapses cigar elements of size 0, so 2M0M => 2M
-     *
-     * @param c the cigar to consolidate
-     * @return  a non-null cigar with consecutive matching operators merged into single operators.
-     */
-    @Ensures({"result != null"})
-    public static Cigar consolidateCigar( final Cigar c ) {
-        if ( c == null ) { throw new IllegalArgumentException("Cigar cannot be null"); }
-
-        // fast check to determine if there's anything worth doing before we create new Cigar and actually do some work
-        if ( ! needsConsolidation(c) )
-            return c;
-
-        final Cigar returnCigar = new Cigar();
-        int sumLength = 0;
-        CigarElement lastElement = null;
-
-        for( final CigarElement cur : c.getCigarElements() ) {
-            if ( cur.getLength() == 0 )
-                continue; // don't add elements of 0 length
-
-            if ( lastElement != null && lastElement.getOperator() != cur.getOperator() ) {
-                returnCigar.add(new CigarElement(sumLength, lastElement.getOperator()));
-                sumLength = 0;
-            }
-
-            sumLength += cur.getLength();
-            lastElement = cur;
-        }
-
-        if ( sumLength > 0 ) {
-            returnCigar.add(new CigarElement(sumLength, lastElement.getOperator()));
-        }
-
-        return returnCigar;
-    }
-
-    /**
-     * Does the cigar C need to be consolidated?
-     *
-     * @param c a non-null cigar
-     * @return true if so
-     */
-    private static boolean needsConsolidation(final Cigar c) {
-        if ( c.numCigarElements() <= 1 )
-            return false; // fast path for empty or single cigar
-
-        CigarOperator lastOp = null;
-        for( final CigarElement cur : c.getCigarElements() ) {
-            if ( cur.getLength() == 0 || lastOp == cur.getOperator() )
-                return true;
-            lastOp = cur.getOperator();
-        }
-
-        return false;
-    }
-
-    /**
-     * Takes the alignment of the read sequence <code>readSeq</code> to the reference sequence <code>refSeq</code>
-     * starting at 0-based position <code>refIndex</code> on the <code>refSeq</code> and specified by its <code>cigar</code>.
-     * The last argument <code>readIndex</code> specifies 0-based position on the read where the alignment described by the
-     * <code>cigar</code> starts. Usually cigars specify alignments of the whole read to the ref, so that readIndex is normally 0.
-     * Use non-zero readIndex only when the alignment cigar represents alignment of a part of the read. The refIndex in this case
-     * should be the position where the alignment of that part of the read starts at. In other words, both refIndex and readIndex are
-     * always the positions where the cigar starts on the ref and on the read, respectively.
-     * <p/>
-     * If the alignment has one or more indels, this method attempts to move them left across a stretch of repetitive bases.
-     * For instance, if the original cigar specifies that (any) one AT is deleted from a repeat sequence TATATATA, the output
-     * cigar will always mark the leftmost AT as deleted. If there is no indel in the original cigar or if the indel position
-     * is determined unambiguously (i.e. inserted/deleted sequence is not repeated), the original cigar is returned.
-     *
-     * Note that currently we do not actually support the case where there is more than one indel in the alignment.  We will throw
-     * an exception if there is -- unless the
-     *
-     * @param cigar     structure of the original alignment
-     * @param refSeq    reference sequence the read is aligned to
-     * @param readSeq   read sequence
-     * @param refIndex  0-based alignment start position on ref
-     * @param readIndex 0-based alignment start position on read
-     * @param doNotThrowExceptionForMultipleIndels  if true we will not throw an exception if we encounter multiple indels in the alignment will instead will return the original cigar
-     * @return a non-null cigar, in which the indels are guaranteed to be placed at the leftmost possible position across a repeat (if any)
-     */
-    @Ensures("result != null")
-    public static Cigar leftAlignIndel(Cigar cigar, final byte[] refSeq, final byte[] readSeq, final int refIndex, final int readIndex, final boolean doNotThrowExceptionForMultipleIndels) {
-        ensureLeftAlignmentHasGoodArguments(cigar, refSeq, readSeq, refIndex, readIndex);
-
-        final int numIndels = countIndelElements(cigar);
-        if ( numIndels == 0 )
-            return cigar;
-        if ( numIndels == 1 )
-            return leftAlignSingleIndel(cigar, refSeq, readSeq, refIndex, readIndex, true);
-
-        // if we got here then there is more than 1 indel in the alignment
-        if ( doNotThrowExceptionForMultipleIndels )
-            return cigar;
-
-        throw new UnsupportedOperationException("attempting to left align a CIGAR that has more than 1 indel in its alignment but this functionality has not been implemented yet");
-    }
-
-    private static void ensureLeftAlignmentHasGoodArguments(final Cigar cigar, final byte[] refSeq, final byte[] readSeq, final int refIndex, final int readIndex) {
-        if ( cigar == null ) throw new IllegalArgumentException("attempting to left align a CIGAR that is null");
-        if ( refSeq == null ) throw new IllegalArgumentException("attempting to left align a reference sequence that is null");
-        if ( readSeq == null ) throw new IllegalArgumentException("attempting to left align a read sequence that is null");
-        if ( refIndex < 0 ) throw new IllegalArgumentException("attempting to left align with a reference index less than 0");
-        if ( readIndex < 0 ) throw new IllegalArgumentException("attempting to left align with a read index less than 0");
-    }
-
-    /**
-     * Counts the number of I/D operators
-     *
-     * @param cigar   cigar to check -- cannot be null
-     * @return  non-negative count of indel operators
-     */
-    @Requires("cigar != null")
-    @Ensures("result >= 0")
-    private static int countIndelElements(final Cigar cigar) {
-        int indelCount = 0;
-        for ( CigarElement ce : cigar.getCigarElements() ) {
-            if ( ce.getOperator() == CigarOperator.D || ce.getOperator() == CigarOperator.I )
-                indelCount++;
-        }
-        return indelCount;
-    }
-
-    /**
-     * See the documentation for AlignmentUtils.leftAlignIndel() for more details.
-     *
-     * This flavor of the left alignment works if and only if the alignment has one - and only one - indel.
-     * An exception is thrown if there are no indels or more than 1 indel in the alignment.
-     *
-     * @param cigar     structure of the original alignment -- cannot be null
-     * @param refSeq    reference sequence the read is aligned to
-     * @param readSeq   read sequence
-     * @param refIndex  0-based alignment start position on ref
-     * @param readIndex 0-based alignment start position on read
-     * @param cleanupCigar if true, we'll cleanup the resulting cigar element, removing 0 length elements and deletions from the first cigar position
-     * @return a non-null cigar, in which the single indel is guaranteed to be placed at the leftmost possible position across a repeat (if any)
-     */
-    @Ensures("result != null")
-    public static Cigar leftAlignSingleIndel(Cigar cigar, final byte[] refSeq, final byte[] readSeq, final int refIndex, final int readIndex, final boolean cleanupCigar) {
-        ensureLeftAlignmentHasGoodArguments(cigar, refSeq, readSeq, refIndex, readIndex);
-
-        int indexOfIndel = -1;
-        for (int i = 0; i < cigar.numCigarElements(); i++) {
-            CigarElement ce = cigar.getCigarElement(i);
-            if (ce.getOperator() == CigarOperator.D || ce.getOperator() == CigarOperator.I) {
-                // if there is more than 1 indel, exception out
-                if (indexOfIndel != -1)
-                    throw new IllegalArgumentException("attempting to left align a CIGAR that has more than 1 indel in its alignment");
-                indexOfIndel = i;
-            }
-        }
-
-        // if there is no indel, exception out
-        if ( indexOfIndel == -1 )
-            throw new IllegalArgumentException("attempting to left align a CIGAR that has no indels in its alignment");
-        // if the alignment starts with an insertion (so that there is no place on the read to move that insertion further left), we are done
-        if ( indexOfIndel == 0 )
-            return cigar;
-
-        final int indelLength = cigar.getCigarElement(indexOfIndel).getLength();
-
-        byte[] altString = createIndelString(cigar, indexOfIndel, refSeq, readSeq, refIndex, readIndex);
-        if (altString == null)
-            return cigar;
-
-        Cigar newCigar = cigar;
-        for (int i = 0; i < indelLength; i++) {
-            newCigar = moveCigarLeft(newCigar, indexOfIndel);
-            byte[] newAltString = createIndelString(newCigar, indexOfIndel, refSeq, readSeq, refIndex, readIndex);
-
-            // check to make sure we haven't run off the end of the read
-            boolean reachedEndOfRead = cigarHasZeroSizeElement(newCigar);
-
-            if (Arrays.equals(altString, newAltString)) {
-                cigar = newCigar;
-                i = -1;
-                if (reachedEndOfRead)
-                    cigar = cleanupCigar ? cleanUpCigar(cigar) : cigar;
-            }
-
-            if (reachedEndOfRead)
-                break;
-        }
-
-        return cigar;
-    }
-
-    /**
-     * Does one of the elements in cigar have a 0 length?
-     *
-     * @param c a non-null cigar
-     * @return true if any element has 0 size
-     */
-    @Requires("c != null")
-    protected static boolean cigarHasZeroSizeElement(final Cigar c) {
-        for (final CigarElement ce : c.getCigarElements()) {
-            if (ce.getLength() == 0)
-                return true;
-        }
-        return false;
-    }
-
-    /**
-     * Clean up the incoming cigar
-     *
-     * Removes elements with zero size
-     * Clips away beginning deletion operators
-     *
-     * @param c the cigar string we want to clean up
-     * @return a newly allocated, cleaned up Cigar
-     */
-    @Requires("c != null")
-    @Ensures("result != null")
-    public static Cigar cleanUpCigar(final Cigar c) {
-        final List<CigarElement> elements = new ArrayList<CigarElement>(c.numCigarElements() - 1);
-
-        for (final CigarElement ce : c.getCigarElements()) {
-            if (ce.getLength() != 0 && (! elements.isEmpty() || ce.getOperator() != CigarOperator.D)) {
-                elements.add(ce);
-            }
-        }
-
-        return new Cigar(elements);
-    }
-
-    /**
-     * Removing a trailing deletion from the incoming cigar if present
-     *
-     * @param c the cigar we want to update
-     * @return a non-null Cigar
-     */
-    @Requires("c != null")
-    @Ensures("result != null")
-    public static Cigar removeTrailingDeletions(final Cigar c) {
-
-        final List<CigarElement> elements = c.getCigarElements();
-        if ( elements.get(elements.size() - 1).getOperator() != CigarOperator.D )
-            return c;
-
-        return new Cigar(elements.subList(0, elements.size() - 1));
-    }
-
-    /**
-     * Move the indel in a given cigar string one base to the left
-     *
-     * @param cigar          original cigar
-     * @param indexOfIndel   the index of the indel cigar element
-     * @return non-null cigar with indel moved one base to the left
-     */
-    @Requires("cigar != null && indexOfIndel >= 0 && indexOfIndel < cigar.numCigarElements()")
-    @Ensures("result != null")
-    private static Cigar moveCigarLeft(Cigar cigar, int indexOfIndel) {
-        // get the first few elements
-        ArrayList<CigarElement> elements = new ArrayList<CigarElement>(cigar.numCigarElements());
-        for (int i = 0; i < indexOfIndel - 1; i++)
-            elements.add(cigar.getCigarElement(i));
-
-        // get the indel element and move it left one base
-        CigarElement ce = cigar.getCigarElement(indexOfIndel - 1);
-        elements.add(new CigarElement(Math.max(ce.getLength() - 1, 0), ce.getOperator()));
-        elements.add(cigar.getCigarElement(indexOfIndel));
-        if (indexOfIndel + 1 < cigar.numCigarElements()) {
-            ce = cigar.getCigarElement(indexOfIndel + 1);
-            elements.add(new CigarElement(ce.getLength() + 1, ce.getOperator()));
-        } else {
-            elements.add(new CigarElement(1, CigarOperator.M));
-        }
-
-        // get the last few elements
-        for (int i = indexOfIndel + 2; i < cigar.numCigarElements(); i++)
-            elements.add(cigar.getCigarElement(i));
-        return new Cigar(elements);
-    }
-
-    /**
-     * Create the string (really a byte array) representation of an indel-containing cigar against the reference.
-     *
-     * @param cigar             the indel-containing cigar
-     * @param indexOfIndel      the index of the indel cigar element
-     * @param refSeq            the reference sequence
-     * @param readSeq           the read sequence for the cigar
-     * @param refIndex          the starting reference index into refSeq
-     * @param readIndex         the starting read index into readSeq
-     * @return non-null byte array which is the indel representation against the reference
-     */
-    @Requires("cigar != null && indexOfIndel >= 0 && indexOfIndel < cigar.numCigarElements() && refSeq != null && readSeq != null && refIndex >= 0 && readIndex >= 0")
-    @Ensures("result != null")
-    private static byte[] createIndelString(final Cigar cigar, final int indexOfIndel, final byte[] refSeq, final byte[] readSeq, int refIndex, int readIndex) {
-        CigarElement indel = cigar.getCigarElement(indexOfIndel);
-        int indelLength = indel.getLength();
-
-        int totalRefBases = 0;
-        for (int i = 0; i < indexOfIndel; i++) {
-            CigarElement ce = cigar.getCigarElement(i);
-            int length = ce.getLength();
-
-            switch (ce.getOperator()) {
-                case M:
-                case EQ:
-                case X:
-                    readIndex += length;
-                    refIndex += length;
-                    totalRefBases += length;
-                    break;
-                case S:
-                    readIndex += length;
-                    break;
-                case N:
-                    refIndex += length;
-                    totalRefBases += length;
-                    break;
-                default:
-                    break;
-            }
-        }
-
-        // sometimes, when there are very large known indels, we won't have enough reference sequence to cover them
-        if (totalRefBases + indelLength > refSeq.length)
-            indelLength -= (totalRefBases + indelLength - refSeq.length);
-
-        // the indel-based reference string
-        byte[] alt = new byte[refSeq.length + (indelLength * (indel.getOperator() == CigarOperator.D ? -1 : 1))];
-
-        // add the bases before the indel, making sure it's not aligned off the end of the reference
-        if (refIndex > alt.length || refIndex > refSeq.length)
-            return null;
-        System.arraycopy(refSeq, 0, alt, 0, refIndex);
-        int currentPos = refIndex;
-
-        // take care of the indel
-        if (indel.getOperator() == CigarOperator.D) {
-            refIndex += indelLength;
-        } else {
-            System.arraycopy(readSeq, readIndex, alt, currentPos, indelLength);
-            currentPos += indelLength;
-        }
-
-        // add the bases after the indel, making sure it's not aligned off the end of the reference
-        if (refSeq.length - refIndex > alt.length - currentPos)
-            return null;
-        System.arraycopy(refSeq, refIndex, alt, currentPos, refSeq.length - refIndex);
-
-        return alt;
-    }
-
-
-    /**
-     * Trim cigar down to one that starts at start reference on the left and extends to end on the reference
-     *
-     * @param cigar a non-null Cigar to trim down
-     * @param start Where should we start keeping bases on the reference?  The first position is 0
-     * @param end Where should we stop keeping bases on the reference?  The maximum value is cigar.getReferenceLength()
-     * @return a new Cigar with reference length == start - end + 1
-     */
-    public static Cigar trimCigarByReference(final Cigar cigar, final int start, final int end) {
-        if ( start < 0 ) throw new IllegalArgumentException("Start must be >= 0 but got " + start);
-        if ( end < start ) throw new IllegalArgumentException("End " + end + " is < start start " + start);
-        if ( end > cigar.getReferenceLength() ) throw new IllegalArgumentException("End is beyond the cigar's reference length " + end + " for cigar " + cigar );
-
-        final Cigar result = trimCigar(cigar, start, end, true);
-
-        if ( result.getReferenceLength() != end - start + 1)
-            throw new IllegalStateException("trimCigarByReference failure: start " + start + " end " + end + " for " + cigar + " resulted in cigar with wrong size " + result);
-        return result;
-    }
-
-    /**
-     * Trim cigar down to one that starts at start base in the cigar and extends to (inclusive) end base
-     *
-     * @param cigar a non-null Cigar to trim down
-     * @param start Where should we start keeping bases in the cigar?  The first position is 0
-     * @param end Where should we stop keeping bases in the cigar?  The maximum value is cigar.getReadLength()
-     * @return a new Cigar containing == start - end + 1 reads
-     */
-    public static Cigar trimCigarByBases(final Cigar cigar, final int start, final int end) {
-        if ( start < 0 ) throw new IllegalArgumentException("Start must be >= 0 but got " + start);
-        if ( end < start ) throw new IllegalArgumentException("End " + end + " is < start = " + start);
-        if ( end > cigar.getReadLength() ) throw new IllegalArgumentException("End is beyond the cigar's read length " + end + " for cigar " + cigar );
-
-        final Cigar result = trimCigar(cigar, start, end, false);
-
-        final int expectedSize = end - start + 1;
-        if ( result.getReadLength() != expectedSize)
-            throw new IllegalStateException("trimCigarByBases failure: start " + start + " end " + end + " for " + cigar + " resulted in cigar with wrong size " + result + " with size " + result.getReadLength() + " expected " + expectedSize + " for input cigar " + cigar);
-        return result;
-    }
-
-
-    /**
-     * Workhorse for trimCigarByBases and trimCigarByReference
-     *
-     * @param cigar a non-null Cigar to trim down
-     * @param start Where should we start keeping bases in the cigar?  The first position is 0
-     * @param end Where should we stop keeping bases in the cigar?  The maximum value is cigar.getReadLength()
-     * @param byReference should start and end be intrepreted as position in the reference or the read to trim to/from?
-     * @return a non-null cigar
-     */
-    @Requires({"cigar != null", "start >= 0", "start <= end"})
-    @Ensures("result != null")
-    private static Cigar trimCigar(final Cigar cigar, final int start, final int end, final boolean byReference) {
-        final List<CigarElement> newElements = new LinkedList<CigarElement>();
-
-        int pos = 0;
-        for ( final CigarElement elt : cigar.getCigarElements() ) {
-            if ( pos > end && (byReference || elt.getOperator() != CigarOperator.D) ) break;
-
-            switch ( elt.getOperator() ) {
-                case D:
-                    if ( ! byReference ) {
-                        if ( pos >= start )
-                            newElements.add(elt);
-                        break;
-                    }
-                    // otherwise fall through to the next case
-                case EQ: case M: case X:
-                    pos = addCigarElements(newElements, pos, start, end, elt);
-                    break;
-                case S: case I:
-                    if ( byReference ) {
-                        if ( pos >= start )
-                            newElements.add(elt);
-                    } else {
-                        pos = addCigarElements(newElements, pos, start, end, elt);
-                    }
-                    break;
-                default:
-                    throw new IllegalStateException("Cannot handle " + elt);
-            }
-        }
-
-        return AlignmentUtils.consolidateCigar(new Cigar(newElements));
-    }
-
-    /**
-     * Helper function for trimCigar that adds cigar elements (of total length X) of elt.op to dest for
-     * X bases that fall between start and end, where the last position of the base is pos.
-     *
-     * The primary use of this function is to create a new cigar element list that contains only
-     * elements that occur between start and end bases in an initial cigar.
-     *
-     * Note that this function may return multiple cigar elements (1M1M etc) that are best consolidated
-     * after the fact into a single simpler representation.
-     *
-     * @param dest we will append our cigar elements to this list
-     * @param pos the position (0 indexed) where elt started
-     * @param start only include bases that occur >= this position
-     * @param end only include bases that occur <= this position
-     * @param elt the element we are slicing down
-     * @return the position after we've traversed all elt.length bases of elt
-     */
-   protected static int addCigarElements(final List<CigarElement> dest, int pos, final int start, final int end, final CigarElement elt) {
-        final int length = Math.min(pos + elt.getLength() - 1, end) - Math.max(pos, start) + 1;
-        if ( length > 0 )
-            dest.add(new CigarElement(length, elt.getOperator()));
-        return pos + elt.getLength();
-    }
-
-    /**
-     * Get the offset (base 0) of the first reference aligned base in Cigar that occurs after readStartByBaseOfCigar base of the cigar
-     *
-     * The main purpose of this routine is to find a good start position for a read given it's cigar.  The real
-     * challenge is that the starting base might be inside an insertion, in which case the read actually starts
-     * at the next M/EQ/X operator.
-     *
-     * @param cigar a non-null cigar
-     * @param readStartByBaseOfCigar finds the first base after this (0 indexed) that aligns to the reference genome (M, EQ, X)
-     * @throws IllegalStateException if no such base can be found
-     * @return an offset into cigar
-     */
-    public static int calcFirstBaseMatchingReferenceInCigar(final Cigar cigar, int readStartByBaseOfCigar) {
-        if ( cigar == null ) throw new IllegalArgumentException("cigar cannot be null");
-        if ( readStartByBaseOfCigar >= cigar.getReadLength() ) throw new IllegalArgumentException("readStartByBaseOfCigar " + readStartByBaseOfCigar + " must be <= readLength " + cigar.getReadLength());
-
-        int hapOffset = 0, refOffset = 0;
-        for ( final CigarElement ce : cigar.getCigarElements() ) {
-            for ( int i = 0; i < ce.getLength(); i++ ) {
-                switch ( ce.getOperator() ) {
-                    case M:case EQ:case X:
-                        if ( hapOffset >= readStartByBaseOfCigar )
-                            return refOffset;
-                        hapOffset++;
-                        refOffset++;
-                        break;
-                    case I: case S:
-                        hapOffset++;
-                        break;
-                    case D:
-                        refOffset++;
-                        break;
-                    default:
-                        throw new IllegalStateException("calcFirstBaseMatchingReferenceInCigar does not support cigar " + ce.getOperator() + " in cigar " + cigar);
-                }
-            }
-        }
-
-        throw new IllegalStateException("Never found appropriate matching state for cigar " + cigar + " given start of " + readStartByBaseOfCigar);
-    }
-
-    /**
-     * Generate a new Cigar that maps the operations of the first cigar through those in a second
-     *
-     * For example, if first is 5M and the second is 2M1I2M then the result is 2M1I2M.
-     * However, if first is 1M2D3M and second is 2M1I3M this results in a cigar X
-     *
-     * ref   : AC-GTA
-     * hap   : ACxGTA  - 2M1I3M
-     * read  : A--GTA  - 1M2D3M
-     * result: A--GTA => 1M1D3M
-     *
-     * ref   : ACxG-TA
-     * hap   : AC-G-TA  - 2M1D3M
-     * read  : AC-GxTA  - 3M1I2M
-     * result: AC-GxTA => 2M1D1M1I2M
-     *
-     * ref   : ACGTA
-     * hap   : ACGTA  - 5M
-     * read  : A-GTA  - 1M1I3M
-     * result: A-GTA => 1M1I3M
-     *
-     * ref   : ACGTAC
-     * hap   : AC---C  - 2M3D1M
-     * read  : AC---C  - 3M
-     * result: AG---C => 2M3D
-     *
-     * The constraint here is that both cigars should imply that the result have the same number of
-     * reference bases (i.e.g, cigar.getReferenceLength() are equals).
-     *
-     * @param firstToSecond the cigar mapping hap1 -> hap2
-     * @param secondToThird the cigar mapping hap2 -> hap3
-     * @return A cigar mapping hap1 -> hap3
-     */
-    public static Cigar applyCigarToCigar(final Cigar firstToSecond, final Cigar secondToThird) {
-        final boolean DEBUG = false;
-
-        final List<CigarElement> newElements = new LinkedList<CigarElement>();
-        final int nElements12 = firstToSecond.getCigarElements().size();
-        final int nElements23 = secondToThird.getCigarElements().size();
-
-        int cigar12I = 0, cigar23I = 0;
-        int elt12I = 0, elt23I = 0;
-
-        while ( cigar12I < nElements12 && cigar23I < nElements23 ) {
-            final CigarElement elt12 = firstToSecond.getCigarElement(cigar12I);
-            final CigarElement elt23 = secondToThird.getCigarElement(cigar23I);
-
-            final CigarPairTransform transform = getTransformer(elt12.getOperator(), elt23.getOperator());
-
-            if ( DEBUG )
-                System.out.printf("Transform %s => %s with elt1 = %d %s @ %d elt2 = %d %s @ %d with transform %s%n",
-                        firstToSecond, secondToThird, cigar12I, elt12.getOperator(), elt12I, cigar23I, elt23.getOperator(), elt23I, transform);
-
-            if ( transform.op13 != null ) // skip no ops
-                newElements.add(new CigarElement(1, transform.op13));
-
-            elt12I += transform.advance12;
-            elt23I += transform.advance23;
-
-            // if have exhausted our current element, advance to the next one
-            if ( elt12I == elt12.getLength() ) { cigar12I++; elt12I = 0; }
-            if ( elt23I == elt23.getLength() ) { cigar23I++; elt23I = 0; }
-        }
-
-        return AlignmentUtils.consolidateCigar(new Cigar(newElements));
-    }
-
-    private static CigarPairTransform getTransformer(final CigarOperator op12, final CigarOperator op23) {
-        for ( final CigarPairTransform transform : cigarPairTransformers) {
-            if ( transform.op12.contains(op12) && transform.op23.contains(op23) )
-                return transform;
-        }
-
-        throw new IllegalStateException("No transformer for operators " + op12 + " and " + op23);
-    }
-
-    /**
-     * transformations that project one alignment state through another
-     *
-     * Think about this as a state machine, where we have:
-     *
-     * bases3 : xxx A zzz
-     * bases2 : xxx B zzz
-     * bases1 : xxx C zzz
-     *
-     * where A, B and C are alignment states of a three way alignment.  We want to capture
-     * the transition from operation mapping 1 -> 2 and an operation mapping 2 -> 3 and its
-     * associated mapping from 1 -> 3 and the advancement of the cigar states of 1->2 and 2->3.
-     *
-     * Imagine that A, B, and C are all equivalent (so that op12 = M and op23 = M).  This implies
-     * a mapping of 1->3 of M, and in this case the next states to consider in the 3 way alignment
-     * are the subsequent states in 1 and 2 (so that advance12 and advance23 are both 1).
-     *
-     * Obviously not all of the states and their associated transitions are so simple.  Suppose instead
-     * that op12 = I, and op23 = M.  What does this look like:
-     *
-     * bases3 : xxx - A zzz
-     * bases2 : xxx - B zzz
-     * bases1 : xxx I C zzz
-     *
-     * It means that op13 must be an insertion (as we have an extra base in 1 thats not present in 2 and
-     * so not present in 3).  We advance the cigar in 1 by 1 (as we've consumed one base in 1 for the I)
-     * but we haven't yet found the base corresponding to the M of op23.  So we don't advance23.
-     */
-    private static class CigarPairTransform {
-        private final EnumSet<CigarOperator> op12, op23;
-        private final CigarOperator op13;
-        private final int advance12, advance23;
-
-        private CigarPairTransform(CigarOperator op12, CigarOperator op23, CigarOperator op13, int advance12, int advance23) {
-            this.op12 = getCigarSet(op12);
-            this.op23 = getCigarSet(op23);
-            this.op13 = op13;
-            this.advance12 = advance12;
-            this.advance23 = advance23;
-        }
-
-        private static EnumSet<CigarOperator> getCigarSet(final CigarOperator masterOp) {
-            switch ( masterOp ) {
-                case M: return EnumSet.of(CigarOperator.M, CigarOperator.EQ, CigarOperator.X);
-                case I: return EnumSet.of(CigarOperator.I, CigarOperator.S);
-                case D: return EnumSet.of(CigarOperator.D);
-                default: throw new IllegalStateException("Unexpected state " + masterOp);
-            }
-        }
-
-        @Override
-        public String toString() {
-            return "CigarPairTransform{" +
-                    "op12=" + op12 +
-                    ", op23=" + op23 +
-                    ", op13=" + op13 +
-                    ", advance12=" + advance12 +
-                    ", advance23=" + advance23 +
-                    '}';
-        }
-    }
-
-
-    private final static List<CigarPairTransform> cigarPairTransformers = Arrays.asList(
-            //
-            // op12 is a match
-            //
-            // 3: xxx B yyy
-            // ^^^^^^^^^^^^
-            // 2: xxx M yyy
-            // 1: xxx M yyy
-            new CigarPairTransform(CigarOperator.M, CigarOperator.M, CigarOperator.M, 1, 1),
-            // 3: xxx I yyy
-            // ^^^^^^^^^^^^
-            // 2: xxx I yyy
-            // 1: xxx M yyy
-            new CigarPairTransform(CigarOperator.M, CigarOperator.I, CigarOperator.I, 1, 1),
-            // 3: xxx D yyy
-            // ^^^^^^^^^^^^
-            // 2: xxx D yyy
-            // 1: xxx M yyy
-            new CigarPairTransform(CigarOperator.M, CigarOperator.D, CigarOperator.D, 0, 1),
-
-            //
-            // op12 is a deletion
-            //
-            // 3: xxx D M yyy
-            // ^^^^^^^^^^^^
-            // 2: xxx M yyy
-            // 1: xxx D yyy
-            new CigarPairTransform(CigarOperator.D, CigarOperator.M, CigarOperator.D, 1, 1),
-            // 3: xxx D1 D2 yyy
-            // ^^^^^^^^^^^^
-            // 2: xxx D2 yyy
-            // 1: xxx D1 yyy
-            new CigarPairTransform(CigarOperator.D, CigarOperator.D, CigarOperator.D, 1, 0),
-            // 3: xxx X yyy => no-op, we skip emitting anything here
-            // ^^^^^^^^^^^^
-            // 2: xxx I yyy
-            // 1: xxx D yyy
-            new CigarPairTransform(CigarOperator.D, CigarOperator.I, null, 1, 1),
-
-            //
-            // op12 is a insertion
-            //
-            // 3: xxx I M yyy
-            // ^^^^^^^^^^^^
-            // 2: xxx M yyy
-            // 1: xxx I yyy
-            new CigarPairTransform(CigarOperator.I, CigarOperator.M, CigarOperator.I, 1, 0),
-            // 3: xxx I D yyy
-            // ^^^^^^^^^^^^
-            // 2: xxx D yyy
-            // 1: xxx I yyy
-            new CigarPairTransform(CigarOperator.I, CigarOperator.D, CigarOperator.I, 1, 0),
-            // 3: xxx I1 I2 yyy
-            // ^^^^^^^^^^^^
-            // 2: xxx I2 yyy
-            // 1: xxx I1 yyy
-            new CigarPairTransform(CigarOperator.I, CigarOperator.I, CigarOperator.I, 1, 0)
-            );
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialBAMBuilder.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialBAMBuilder.java
deleted file mode 100644
index 8233252..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialBAMBuilder.java
+++ /dev/null
@@ -1,242 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import htsjdk.samtools.*;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.NGSPlatform;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.*;
-
-/**
- * Easy to use creator of artificial BAM files for testing
- *
- * Allows us to make a stream of reads or an index BAM file with read having the following properties
- *
- * - coming from n samples
- * - of fixed read length and aligned to the genome with M operator
- * - having N reads per alignment start
- * - skipping N bases between each alignment start
- * - starting at a given alignment start
- *
- * User: depristo
- * Date: 1/15/13
- * Time: 9:22 AM
- */
-public class ArtificialBAMBuilder {
-    public final static int BAM_SHARD_SIZE = 16384;
-
-    private final IndexedFastaSequenceFile reference;
-    private final GenomeLocParser parser;
-
-    final int nReadsPerLocus;
-    final int nLoci;
-
-    int skipNLoci = 0;
-    int alignmentStart = 1;
-    int readLength = 10;
-    private final ArrayList<String> samples = new ArrayList<String>();
-    private List<GATKSAMRecord> createdReads = null;
-
-    private LinkedList<GATKSAMRecord> additionalReads = new LinkedList<GATKSAMRecord>();
-
-    final SAMFileWriterFactory factory = new SAMFileWriterFactory();
-    {
-        factory.setCreateIndex(true);
-    }
-
-    SAMFileHeader header;
-
-    public ArtificialBAMBuilder(final IndexedFastaSequenceFile reference, int nReadsPerLocus, int nLoci) {
-        this.nReadsPerLocus = nReadsPerLocus;
-        this.nLoci = nLoci;
-
-        this.reference = reference;
-        this.parser = new GenomeLocParser(reference);
-        createAndSetHeader(1);
-    }
-
-    public ArtificialBAMBuilder(int nReadsPerLocus, int nLoci) {
-        this(ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000).getSequenceDictionary(), nReadsPerLocus, nLoci);
-    }
-
-    public ArtificialBAMBuilder(final SAMSequenceDictionary dict, int nReadsPerLocus, int nLoci) {
-        this.nReadsPerLocus = nReadsPerLocus;
-        this.nLoci = nLoci;
-        this.reference = null;
-        this.parser = new GenomeLocParser(dict);
-        createAndSetHeader(1);
-    }
-
-    public IndexedFastaSequenceFile getReference() {
-        return reference;
-    }
-
-    public GenomeLocParser getGenomeLocParser() {
-        return parser;
-    }
-
-    public ArtificialBAMBuilder createAndSetHeader(final int nSamples) {
-        createdReads = null;
-        this.header = new SAMFileHeader();
-        header.setSortOrder(SAMFileHeader.SortOrder.coordinate);
-        header.setSequenceDictionary(parser.getContigs());
-        samples.clear();
-
-        for ( int i = 0; i < nSamples; i++ ) {
-            final GATKSAMReadGroupRecord rg = new GATKSAMReadGroupRecord("rg" + i);
-            final String sample = "sample" + i;
-            samples.add(sample);
-            rg.setSample(sample);
-            rg.setPlatform(NGSPlatform.ILLUMINA.getDefaultPlatform());
-            header.addReadGroup(rg);
-        }
-
-        return this;
-    }
-
-    public void addReads(final GATKSAMRecord readToAdd) {
-        createdReads = null;
-        additionalReads.add(readToAdd);
-    }
-
-    public void addReads(final Collection<GATKSAMRecord> readsToAdd) {
-        createdReads = null;
-        additionalReads.addAll(readsToAdd);
-    }
-
-    public List<String> getSamples() {
-        return samples;
-    }
-
-    /**
-     * Create a read stream based on the parameters.  The cigar string for each
-     * read will be *M, where * is the length of the read.
-     *
-     * Useful for testing things like LocusIteratorBystate
-     *
-     * @return a ordered list of reads
-     */
-    public List<GATKSAMRecord> makeReads() {
-        if ( createdReads == null ) {
-            final String baseName = "read";
-            final LinkedList<GATKSAMReadGroupRecord> readGroups = new LinkedList<GATKSAMReadGroupRecord>();
-            for ( final SAMReadGroupRecord rg : header.getReadGroups())
-                readGroups.add(new GATKSAMReadGroupRecord(rg));
-
-            List<GATKSAMRecord> reads = new ArrayList<GATKSAMRecord>(nReadsPerLocus*nLoci);
-            for ( int locusI = 0; locusI < nLoci; locusI++) {
-                final int locus = locusI * (skipNLoci + 1);
-                for ( int readI = 0; readI < nReadsPerLocus; readI++ ) {
-                    for ( final GATKSAMReadGroupRecord rg : readGroups ) {
-                        final String readName = String.format("%s.%d.%d.%s", baseName, locus, readI, rg.getId());
-                        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, readName, 0, alignmentStart + locus, readLength);
-                        read.setReadGroup(rg);
-                        reads.add(read);
-                    }
-                }
-            }
-
-            if ( ! additionalReads.isEmpty() ) {
-                reads.addAll(additionalReads);
-                Collections.sort(reads, new SAMRecordCoordinateComparator());
-            }
-
-            createdReads = new ArrayList<GATKSAMRecord>(reads);
-        }
-
-        return createdReads;
-    }
-
-    /**
-     * Make an indexed BAM file contains the reads in the builder, marking it for deleteOnExit()
-     * @return the BAM file
-     */
-    public File makeTemporarilyBAMFile() {
-        try {
-            final File file = File.createTempFile("tempBAM", ".bam");
-            file.deleteOnExit();
-
-            // Register the bam index file for deletion on exit as well:
-            new File(file.getAbsolutePath().replace(".bam", ".bai")).deleteOnExit();
-            new File(file.getAbsolutePath() + ".bai").deleteOnExit();
-
-            return makeBAMFile(file);
-        } catch ( IOException e ) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    /**
-     * Write the reads from this builder to output, creating an index as well
-     * @param output the output BAM file we want to use
-     * @return
-     */
-    public File makeBAMFile(final File output) {
-        final SAMFileWriter writer = factory.makeBAMWriter(header, true, output, 0);
-        for ( final GATKSAMRecord read : makeReads() )
-            writer.addAlignment(read);
-        writer.close();
-        return output;
-    }
-
-    public int getnReadsPerLocus() { return nReadsPerLocus; }
-    public int getnLoci() { return nLoci; }
-    public int getSkipNLoci() { return skipNLoci; }
-    public ArtificialBAMBuilder setSkipNLoci(int skipNLoci) { this.skipNLoci = skipNLoci; createdReads = null; return this; }
-    public int getAlignmentStart() { return alignmentStart; }
-    public ArtificialBAMBuilder setAlignmentStart(int alignmentStart) { this.alignmentStart = alignmentStart; createdReads = null; return this; }
-    public int getReadLength() { return readLength; }
-    public ArtificialBAMBuilder setReadLength(int readLength) { this.readLength = readLength; createdReads = null; return this; }
-    public SAMFileHeader getHeader() { return header; }
-    public ArtificialBAMBuilder setHeader(SAMFileHeader header) { this.header = header; createdReads = null; return this; }
-
-    public int getAlignmentEnd() {
-        return alignmentStart + nLoci * (skipNLoci + 1) + readLength;
-    }
-
-
-    public int getNSamples() { return samples.size(); }
-
-    public int expectedNumberOfReads() {
-        return nLoci * nReadsPerLocus * header.getReadGroups().size();
-    }
-
-    @Override
-    public String toString() {
-        return "ArtificialBAMBuilder{" +
-                "samples=" + samples +
-                ", readLength=" + readLength +
-                ", alignmentStart=" + alignmentStart +
-                ", skipNLoci=" + skipNLoci +
-                ", nLoci=" + nLoci +
-                ", nReadsPerLocus=" + nReadsPerLocus +
-                '}';
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialGATKSAMFileWriter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialGATKSAMFileWriter.java
deleted file mode 100644
index 0821f46..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialGATKSAMFileWriter.java
+++ /dev/null
@@ -1,130 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.util.ProgressLoggerInterface;
-import org.broadinstitute.gatk.engine.io.GATKSAMFileWriter;
-
-import java.util.ArrayList;
-import java.util.List;
-
-
-/*
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use,
- * copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the
- * Software is furnished to do so, subject to the following
- * conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
- * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
- * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
- * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
- * OTHER DEALINGS IN THE SOFTWARE.
- */
-
-/**
- * @author aaron
- *         <p/>
- *         Class ArtificialGATKSAMFileWriter
- *         <p/>
- * generates a fake samwriter, that you can get the output reads
- * from when you're done.  
- */
-public class ArtificialGATKSAMFileWriter implements GATKSAMFileWriter {
-
-    // are we closed
-    private boolean closed = false;
-
-    // the SAMRecords we've added to this writer
-    List<SAMRecord> records = new ArrayList<SAMRecord>();
-
-    public void addAlignment( SAMRecord alignment ) {
-        records.add(alignment);
-    }
-
-    public SAMFileHeader getFileHeader() {
-        if (records.size() > 0) {
-            return records.get(0).getHeader();
-        }
-        return null;
-    }
-
-    /** not much to do when we're fake */
-    public void close() {
-        closed = true;
-    }
-
-    /**
-     * are we closed?
-     *
-     * @return true if we're closed
-     */
-    public boolean isClosed() {
-        return closed;
-    }
-
-    /**
-     * get the records we've seen
-     * @return
-     */
-    public List<SAMRecord> getRecords() {
-        return records;
-    }
-
-    @Override
-    public void writeHeader(SAMFileHeader header) {
-    }
-
-    @Override
-    public void setPresorted(boolean presorted) {
-    }
-
-    @Override
-    public void setMaxRecordsInRam(int maxRecordsInRam) {
-    }
-
-    /**
-     * @throws java.lang.UnsupportedOperationException No progress logging in this implementation.
-     */
-    @Override
-    public void setProgressLogger(final ProgressLoggerInterface logger) {
-        throw new UnsupportedOperationException("Progress logging not supported");
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialMultiSampleReadStream.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialMultiSampleReadStream.java
deleted file mode 100644
index 84978c1..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialMultiSampleReadStream.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import htsjdk.samtools.MergingSamRecordIterator;
-import htsjdk.samtools.SamFileHeaderMerger;
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMFileReader;
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.engine.iterators.GATKSAMIterator;
-import org.broadinstitute.gatk.engine.iterators.GATKSAMIteratorAdapter;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.util.*;
-
-/**
- * Simple wrapper class that multiplexes multiple ArtificialSingleSampleReadStreams into a single stream of reads
- *
- * @author David Roazen
- */
-public class ArtificialMultiSampleReadStream implements Iterable<SAMRecord> {
-
-    private Collection<ArtificialSingleSampleReadStream> perSampleArtificialReadStreams;
-    private MergingSamRecordIterator mergingIterator;
-
-    public ArtificialMultiSampleReadStream( Collection<ArtificialSingleSampleReadStream> perSampleArtificialReadStreams ) {
-        if ( perSampleArtificialReadStreams == null || perSampleArtificialReadStreams.isEmpty() ) {
-            throw new ReviewedGATKException("Can't create an ArtificialMultiSampleReadStream out of 0 ArtificialSingleSampleReadStreams");
-        }
-
-        this.perSampleArtificialReadStreams = perSampleArtificialReadStreams;
-    }
-
-    public Iterator<SAMRecord> iterator() {
-        // lazy initialization to prevent reads from being created until they're needed
-        initialize();
-
-        return mergingIterator;
-    }
-
-    public GATKSAMIterator getGATKSAMIterator() {
-        // lazy initialization to prevent reads from being created until they're needed
-        initialize();
-
-        return GATKSAMIteratorAdapter.adapt(mergingIterator);
-    }
-
-    private void initialize() {
-        Collection<SAMFileReader> perSampleSAMReaders = new ArrayList<SAMFileReader>(perSampleArtificialReadStreams.size());
-        Collection<SAMFileHeader> headers = new ArrayList<SAMFileHeader>(perSampleArtificialReadStreams.size());
-
-        for ( ArtificialSingleSampleReadStream readStream : perSampleArtificialReadStreams ) {
-            Collection<SAMRecord> thisStreamReads = readStream.makeReads();
-
-            SAMFileReader reader = new ArtificialSAMFileReader(readStream.getHeader(),
-                                                               thisStreamReads.toArray(new SAMRecord[thisStreamReads.size()]));
-            perSampleSAMReaders.add(reader);
-            headers.add(reader.getFileHeader());
-        }
-
-        SamFileHeaderMerger headerMerger = new SamFileHeaderMerger(SAMFileHeader.SortOrder.coordinate, headers, true);
-        mergingIterator = new MergingSamRecordIterator(headerMerger, perSampleSAMReaders, true);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialPatternedSAMIterator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialPatternedSAMIterator.java
deleted file mode 100644
index 8434e15..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialPatternedSAMIterator.java
+++ /dev/null
@@ -1,172 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMRecord;
-
-
-/*
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use,
- * copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the
- * Software is furnished to do so, subject to the following
- * conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
- * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
- * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
- * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
- * OTHER DEALINGS IN THE SOFTWARE.
- */
-
-/**
- * @author aaron
- *
- *  Class ArtificialPatternedSAMIterator
- *
- * This class allows you to pattern the artificial sam iterator, asking for reads
- * in order or out of order.
- */
-public class ArtificialPatternedSAMIterator extends ArtificialSAMIterator {
-
-    /** the pattern we're implementing */
-    public enum PATTERN {
-        RANDOM_READS, IN_ORDER_READS;
-    }
-
-    // our pattern
-    private final PATTERN mPattern;
-
-    /**
-     * this is pretty heavy (and it could be extremely heavy, given the amount of reads they request, but it
-     * allows us to give them each read once, reguardless of the order specified
-     */
-    private final int[] reads;
-    private final int readCount;
-
-    /**
-     * create the fake iterator, given the mapping of chromosomes and read counts.  If pattern
-     * is specified to be random, it will generate reads that are randomly placed on the current chromosome
-     *
-     * @param startingChr the starting chromosome
-     * @param endingChr   the ending chromosome
-     * @param readCount   the number of reads in each chromosome
-     * @param header      the associated header
-     * @param pattern     the pattern to implement
-     */
-    ArtificialPatternedSAMIterator( int startingChr, int endingChr, int readCount, int unmappedReadCount, SAMFileHeader header, PATTERN pattern ) {
-        super(startingChr, endingChr, readCount, unmappedReadCount, header);
-        mPattern = pattern;
-        this.readCount = readCount;
-        reads = new int[readCount];
-
-        for (int x = 0; x < readCount; x++) {
-            reads[x] = x+1;
-        }
-        if (pattern == PATTERN.RANDOM_READS) {
-            // scramble a bunch of the reads
-            for (int y = 0; y < readCount; y++) {
-                int ranOne = (int) Math.round(Math.random() * ( readCount - 1 ));
-                int ranTwo = (int) Math.round(Math.random() * ( readCount - 1 ));
-                int temp = reads[ranOne];
-                reads[ranOne] = reads[ranTwo];
-                reads[ranTwo] = temp;
-            }
-            /**
-             *  up to this point there's no garauntee that the random() has made the reads out of order (though it's
-             *  extremely extremely unlikely it's failed).  Let's make sure there at least out of order:
-             */
-            if (this.reads[0] < this.reads[reads.length - 1]) {
-                int temp = reads[0];
-                reads[0] = reads[reads.length - 1];
-                reads[reads.length - 1] = temp;
-            }
-
-        }
-
-    }
-
-    /**
-     * override the default ArtificialSAMIterator createNextRead method, which creates the next read
-     *
-     * @return
-     */
-    protected boolean createNextRead() {
-        if (currentRead > rCount) {
-            currentChromo++;
-            currentRead = 1;
-        }
-        // check for end condition, have we finished the chromosome listing, and have no unmapped reads
-        if (currentChromo >= eChromosomeCount) {
-            if (unmappedRemaining < 1) {
-                this.next = null;
-                return false;
-            } else {
-                ++totalReadCount;
-                this.next = ArtificialSAMUtils.createArtificialRead(this.header,
-                        String.valueOf(totalReadCount),
-                        SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX,
-                        SAMRecord.NO_ALIGNMENT_START,
-                        50);
-                --unmappedRemaining;
-                return true;
-            }
-        }
-        ++totalReadCount;
-        this.next = getNextRecord(currentRead);
-
-        ++currentRead;
-        return true;
-    }
-
-
-    /**
-     * get the next read, given it's index in the chromosome
-     *
-     * @param read the read index in the chromosome
-     *
-     * @return a SAMRecord
-     */
-    private SAMRecord getNextRecord( int read ) {
-        if (read > this.readCount) {
-            return ArtificialSAMUtils.createArtificialRead(this.header, String.valueOf(reads[readCount - 1]), currentChromo, reads[readCount - 1], 50);
-        }
-        return ArtificialSAMUtils.createArtificialRead(this.header, String.valueOf(reads[read-1]), currentChromo, reads[read-1], 50);
-    }
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialReadsTraversal.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialReadsTraversal.java
deleted file mode 100644
index 54c2b87..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialReadsTraversal.java
+++ /dev/null
@@ -1,140 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMRecord;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.datasources.providers.ShardDataProvider;
-import org.broadinstitute.gatk.engine.traversals.TraversalEngine;
-import org.broadinstitute.gatk.engine.walkers.ReadWalker;
-import org.broadinstitute.gatk.engine.walkers.Walker;
-
-
-/*
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use,
- * copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the
- * Software is furnished to do so, subject to the following
- * conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
- * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
- * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
- * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
- * OTHER DEALINGS IN THE SOFTWARE.
- */
-
-/**
- * @author aaron
- *
- * this class acts as a fake reads traversal engine for testing out reads based traversals.
- */
-public class ArtificialReadsTraversal<M,T> extends TraversalEngine<M,T,Walker<M,T>,ShardDataProvider> {
-
-    public int startingChr = 1;
-    public int endingChr = 5;
-    public int readsPerChr = 100;
-    public int unMappedReads = 1000;
-    private int DEFAULT_READ_LENGTH = ArtificialSAMUtils.DEFAULT_READ_LENGTH;
-    private ArtificialPatternedSAMIterator iter;
-    /** our log, which we want to capture anything from this class */
-    protected static Logger logger = Logger.getLogger(ArtificialReadsTraversal.class);
-
-    /** Creates a new, uninitialized ArtificialReadsTraversal */
-    public ArtificialReadsTraversal() {
-    }
-
-    // what read ordering are we using
-    private ArtificialPatternedSAMIterator.PATTERN readOrder = ArtificialPatternedSAMIterator.PATTERN.IN_ORDER_READS;
-
-
-    /**
-     * set the read ordering of the reads given to the walker
-     *
-     * @param readOrdering
-     */
-    public void setReadOrder( ArtificialPatternedSAMIterator.PATTERN readOrdering ) {
-        readOrder = readOrdering;
-    }
-
-    @Override
-    public String getTraversalUnits() {
-        return "reads";
-    }
-
-    /**
-     * Traverse by reads, given the data and the walker
-     *
-     * @param walker       the walker to traverse with
-     * @param dataProvider the provider of the reads data
-     * @param sum          the value of type T, specified by the walker, to feed to the walkers reduce function
-     *
-     * @return the reduce variable of the read walker
-     */
-    public T traverse( Walker<M, T> walker,
-                       ShardDataProvider dataProvider,
-                       T sum ) {
-
-        if (!( walker instanceof ReadWalker ))
-            throw new IllegalArgumentException("Walker isn't a read walker!");
-
-        ReadWalker<M, T> readWalker = (ReadWalker<M, T>) walker;
-        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(( endingChr - startingChr ) + 1, startingChr, readsPerChr + DEFAULT_READ_LENGTH);
-        iter = new ArtificialPatternedSAMIterator(this.startingChr,
-                this.endingChr,
-                this.readsPerChr,
-                this.unMappedReads,
-                header,
-                this.readOrder);
-
-        // while we still have more reads
-        for (SAMRecord read : iter) {
-
-            // an array of characters that represent the reference
-            ReferenceContext refSeq = null;
-
-            final boolean keepMeP = readWalker.filter(refSeq, (GATKSAMRecord) read);
-            if (keepMeP) {
-                M x = readWalker.map(refSeq, (GATKSAMRecord) read, null);  // TODO: fix me at some point, it would be nice to fake out ROD data too
-                sum = readWalker.reduce(x, sum);
-            }
-        }
-        return sum;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMFileReader.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMFileReader.java
deleted file mode 100644
index 427b12e..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMFileReader.java
+++ /dev/null
@@ -1,156 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import htsjdk.samtools.*;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.io.ByteArrayInputStream;
-import java.io.InputStream;
-import java.io.UnsupportedEncodingException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Iterator;
-import java.util.List;
-/**
- * User: hanna
- * Date: Jun 11, 2009
- * Time: 9:35:31 AM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * Pass specified reads into the given walker.
- */
-
-public class ArtificialSAMFileReader extends SAMFileReader {
-    /**
-     * The parser, for GenomeLocs.
-     */
-    private final GenomeLocParser genomeLocParser;
-
-    /**
-     * Backing data store of reads.
-     */
-    private final List<SAMRecord> reads;
-
-    private SAMFileHeader customHeader = null;
-
-    /**
-     * Construct an artificial SAM file reader.
-     * @param sequenceDictionary sequence dictionary used to initialize our GenomeLocParser
-     * @param reads Reads to use as backing data source.
-     */
-    public ArtificialSAMFileReader(SAMSequenceDictionary sequenceDictionary,SAMRecord... reads) {
-        super( createEmptyInputStream(),true );
-        this.genomeLocParser = new GenomeLocParser(sequenceDictionary);
-        this.reads = Arrays.asList(reads);
-    }
-
-    /**
-     * Construct an artificial SAM file reader with the given SAM file header
-     *
-     * @param customHeader Header that should be returned by calls to getFileHeader() on this reader
-     * @param reads Reads to use as backing data source.
-     */
-    public ArtificialSAMFileReader( SAMFileHeader customHeader, SAMRecord... reads ) {
-        super(createEmptyInputStream(),true);
-
-        this.customHeader = customHeader;
-        this.genomeLocParser = new GenomeLocParser(customHeader.getSequenceDictionary());
-        this.reads = Arrays.asList(reads);
-    }
-
-
-    @Override
-    public SAMFileHeader getFileHeader() {
-        if ( customHeader != null ) {
-            return customHeader;
-        }
-
-        return super.getFileHeader();
-    }
-
-    /**
-     * @{inheritDoc}
-     */
-    @Override
-    public SAMRecordIterator query(final String sequence, final int start, final int end, final boolean contained) {
-        GenomeLoc region = genomeLocParser.createGenomeLoc(sequence, start, end);
-        List<SAMRecord> coveredSubset = new ArrayList<SAMRecord>();
-
-        for( SAMRecord read: reads ) {
-            GenomeLoc readPosition = genomeLocParser.createGenomeLoc(read);
-            if( contained && region.containsP(readPosition) ) coveredSubset.add(read);
-            else if( !contained && readPosition.overlapsP(region) ) coveredSubset.add(read);
-        }
-
-        final Iterator<SAMRecord> iterator = coveredSubset.iterator();
-        return new SAMRecordIterator() {
-            public boolean hasNext() { return iterator.hasNext(); }
-            public SAMRecord next() { return iterator.next(); }
-            public void close() {}
-            public void remove() { iterator.remove(); }
-            public SAMRecordIterator assertSorted(SAMFileHeader.SortOrder sortOrder) { return this; }
-        };
-    }
-
-    @Override
-    public SAMRecordIterator iterator() {
-        return new SAMRecordIterator() {
-            private final Iterator<SAMRecord> iterator = reads.iterator();
-            public boolean hasNext() { return iterator.hasNext(); }
-            public SAMRecord next() { return iterator.next(); }
-            public void close() {}
-            public void remove() { iterator.remove(); }
-            public SAMRecordIterator assertSorted(SAMFileHeader.SortOrder sortOrder) { return this; }
-        };
-    }
-
-    /**
-     * Builds an empty input stream for faking out the sam file reader.
-     * Derive it from a string so that, in the future, it might be possible
-     * to fake the text of a sam file from samtools output, et.c
-     * @return Stream that returns no characters.
-     */
-    private static InputStream createEmptyInputStream() {
-        try {
-            byte[] byteArray = "".getBytes("ISO-8859-1");
-            return new ByteArrayInputStream(byteArray);
-        }
-        catch( UnsupportedEncodingException ex ) {
-            throw new ReviewedGATKException("Unable to build empty input stream",ex);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMIterator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMIterator.java
deleted file mode 100644
index b133e9c..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMIterator.java
+++ /dev/null
@@ -1,212 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.engine.iterators.GATKSAMIterator;
-
-import java.util.Iterator;
-
-
-/*
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use,
- * copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the
- * Software is furnished to do so, subject to the following
- * conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
- * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
- * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
- * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
- * OTHER DEALINGS IN THE SOFTWARE.
- */
-
-/** this fake iterator allows us to look at how specific piles of reads are handled */
-public class ArtificialSAMIterator implements GATKSAMIterator {
-
-
-    protected int currentChromo = 0;
-    protected int currentRead = 1;
-    protected int totalReadCount = 0;
-    protected int unmappedRemaining = 0;
-    protected boolean done = false;
-    // the next record
-    protected SAMRecord next = null;
-    protected SAMFileHeader header = null;
-
-    // the passed in parameters
-    protected final int sChr;
-    protected final int eChromosomeCount;
-    protected final int rCount;
-    protected final int unmappedReadCount;
-
-    // let us know to make a read, we need this to help out the fake sam query iterator
-    private boolean initialized = false;
-
-    /**
-     * Is this iterator currently open or closed?  Closed iterators can be reused.
-     */    
-    protected boolean open = false;
-
-    /**
-     * create the fake iterator, given the mapping of chromosomes and read counts
-     *
-     * @param startingChr the starting chromosome
-     * @param endingChr   the ending chromosome
-     * @param readCount   the number of reads in each chromosome
-     * @param header      the associated header
-     */
-    ArtificialSAMIterator( int startingChr, int endingChr, int readCount, SAMFileHeader header ) {
-        sChr = startingChr;
-        eChromosomeCount = (endingChr - startingChr) + 1;
-        rCount = readCount;
-        this.header = header;
-        unmappedReadCount = 0;
-        reset();
-    }
-
-    protected void reset() {
-        this.currentChromo = 0;
-        this.currentRead = 1;
-        this.totalReadCount = 0;
-        this.done = false;
-        this.next = null;
-        this.initialized = false;
-        this.unmappedRemaining = unmappedReadCount;
-    }
-
-    /**
-     * create the fake iterator, given the mapping of chromosomes and read counts
-     *
-     * @param startingChr the starting chromosome
-     * @param endingChr   the ending chromosome
-     * @param readCount   the number of reads in each chromosome
-     * @param header      the associated header
-     */
-    ArtificialSAMIterator( int startingChr, int endingChr, int readCount, int unmappedReadCount, SAMFileHeader header ) {
-        sChr = startingChr;
-        eChromosomeCount = (endingChr - startingChr) + 1;
-        rCount = readCount;
-        this.header = header;
-        this.currentChromo = 0;
-        this.unmappedReadCount = unmappedReadCount;
-        reset();
-    }
-
-    public void close() {
-        open = false;
-    }
-
-    public boolean hasNext() {
-        open = true;
-
-        if (!initialized){
-            initialized = true;
-            createNextRead();
-        }
-        if (this.next != null) {
-            return true;
-        }
-        return false;
-    }
-
-    protected boolean createNextRead() {
-        if (currentRead > rCount) {
-            currentChromo++;
-            currentRead = 1;
-        }
-        // check for end condition, have we finished the chromosome listing, and have no unmapped reads
-        if (currentChromo >= eChromosomeCount) {
-            if (unmappedRemaining < 1) {
-                this.next = null;
-                return false;
-            } else {
-                ++totalReadCount;
-                this.next = ArtificialSAMUtils.createArtificialRead(this.header,
-                        String.valueOf(totalReadCount),
-                        SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX,
-                        SAMRecord.NO_ALIGNMENT_START,
-                        50);
-                --unmappedRemaining;
-                return true;
-            }
-        }
-        ++totalReadCount;
-        this.next = ArtificialSAMUtils.createArtificialRead(this.header, String.valueOf(totalReadCount), currentChromo, currentRead, 50);
-        ++currentRead;
-        return true;
-    }
-
-
-    public SAMRecord next() {
-        open = true;        
-
-        SAMRecord ret = next;
-        createNextRead();
-        return ret;
-    }
-
-    public void remove() {
-        throw new UnsupportedOperationException("You've tried to remove on a GATKSAMIterator (unsupported), not to mention that this is a fake iterator.");
-    }
-
-    /**
-     * return this iterator, for the iterable interface
-     * @return
-     */
-    public Iterator<SAMRecord> iterator() {
-        return this;
-    }
-
-    /**
-     * some instrumentation methods
-     */
-    public int readsTaken() {
-        return totalReadCount;
-    }
-
-    /**
-     * peek at the next sam record
-     *
-     * @return
-     */
-    public SAMRecord peek() {
-        return this.next;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMQueryIterator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMQueryIterator.java
deleted file mode 100644
index fe7f7b0..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMQueryIterator.java
+++ /dev/null
@@ -1,259 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.SAMSequenceRecord;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.util.List;
-
-
-/*
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use,
- * copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the
- * Software is furnished to do so, subject to the following
- * conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
- * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
- * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
- * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
- * OTHER DEALINGS IN THE SOFTWARE.
- */
-
-/**
- * @author aaron
- *
- * allows query calls to the artificial sam iterator, which allows you
- * to test out classes that use specific itervals.  The reads returned will
- * all lie in order in the specified interval.
- */
-public class ArtificialSAMQueryIterator extends ArtificialSAMIterator {
-
-    // get the next positon
-    protected int finalPos = 0;
-    protected int startPos = 0;
-    protected int contigIndex = -1;
-    protected boolean overlapping = false;
-    protected int startingChr = 0;
-    protected boolean seeked = false;
-
-    /**
-     * create the fake iterator, given the mapping of chromosomes and read counts
-     *
-     * @param startingChr the starting chromosome
-     * @param endingChr   the ending chromosome
-     * @param readCount   the number of reads in each chromosome
-     * @param header      the associated header
-     */
-    ArtificialSAMQueryIterator( int startingChr, int endingChr, int readCount, int unmappedReadCount, SAMFileHeader header ) {
-        super(startingChr, endingChr, readCount, unmappedReadCount, header);
-        this.startingChr = startingChr;
-    }
-
-    @Override
-    protected void reset() {
-        this.startPos = 0;
-        this.finalPos = 0;
-        this.contigIndex = -1;
-        // Doesn't make sense to reset the overlapping flag, because we rely on its state later on.
-        // TODO: Make this a bit more direct.
-        //overlapping = false;
-        this.startingChr = 0;
-        this.seeked = false;
-        super.reset();
-    }
-
-    /**
-     * query containing - get reads contained by the specified interval
-     *
-     * @param contig the contig index string
-     * @param start  the start position
-     * @param stop   the stop position
-     */
-    public void queryContained( String contig, int start, int stop ) {
-        this.overlapping = false;
-        initialize(contig, start, stop);
-    }
-
-    /**
-     * query containing - get reads contained by the specified interval
-     *
-     * @param contig the contig index string
-     * @param start  the start position
-     * @param stop   the stop position
-     */
-    public void queryOverlapping( String contig, int start, int stop ) {
-        this.overlapping = true;
-        initialize(contig, start, stop);
-    }
-
-    public void query( String contig, int start, int stop, boolean contained ) {
-        if (contained)
-            queryContained(contig, start, stop);
-        else
-            queryOverlapping(contig, start, stop);
-    }
-
-    public void queryUnmappedReads() {
-        initializeUnmapped();
-    }
-
-    /**
-     * initialize the iterator to an unmapped read position
-     */
-    public void initializeUnmapped() {
-        // throw away data from the previous invocation, if one exists.
-        ensureUntouched();
-        reset();
-
-        while (super.hasNext() && this.peek().getReferenceIndex() >= 0) {
-            super.next();
-        }
-        // sanity check that we have an actual matching read next
-        SAMRecord rec = this.peek();
-        if (rec == null) {
-            throw new ReviewedGATKException("The next read doesn't match");
-        }
-        // set the seeked variable to true
-        seeked = true;
-    }
-
-
-
-
-    /**
-     * initialize the query iterator
-     *
-     * @param contig the contig
-     * @param start  the start position
-     * @param stop   the stop postition
-     */
-    private void initialize( String contig, int start, int stop ) {
-        // throw away data from the previous invocation, if one exists.
-        ensureUntouched();
-        reset();
-
-        finalPos = stop;
-        startPos = start;
-        if (finalPos < 0) {
-            finalPos = Integer.MAX_VALUE;
-        }
-        // sanity check that we have the contig
-        contigIndex = -1;
-        List<SAMSequenceRecord> list = header.getSequenceDictionary().getSequences();
-        for (SAMSequenceRecord rec : list) {
-            if (rec.getSequenceName().equals(contig)) {
-                contigIndex = rec.getSequenceIndex();
-            }
-        }
-        if (contigIndex < 0) { throw new IllegalArgumentException("ArtificialContig" + contig + " doesn't exist"); }
-        while (super.hasNext() && this.peek().getReferenceIndex() < contigIndex) {
-            super.next();
-        }
-        if (!super.hasNext()) {
-            throw new ReviewedGATKException("Unable to find the target chromosome");
-        }
-        while (super.hasNext() && this.peek().getAlignmentStart() < start) {
-            super.next();
-        }
-        // sanity check that we have an actual matching read next
-        SAMRecord rec = this.peek();
-        if (!matches(rec)) {
-            throw new ReviewedGATKException("The next read doesn't match");
-        }
-        // set the seeked variable to true
-        seeked = true;
-    }
-
-    /**
-     * given a read and the query type, check if it matches our regions
-     *
-     * @param rec the read
-     *
-     * @return true if it belongs in our region
-     */
-    public boolean matches( SAMRecord rec ) {
-        if (rec.getReferenceIndex() != this.contigIndex) {
-            return false;
-        }
-        // if we have an unmapped read, matching the contig is good enough for us
-        if (rec.getReferenceIndex() < 0) {
-            return true;    
-        }
-
-        if (!overlapping) {
-            // if the start or the end are somewhere within our range
-            if (( rec.getAlignmentStart() >= startPos && rec.getAlignmentEnd() <= finalPos )) {
-                return true;
-            }
-        } else {
-            if (( rec.getAlignmentStart() <= finalPos && rec.getAlignmentStart() >= startPos ) ||
-                    ( rec.getAlignmentEnd() <= finalPos && rec.getAlignmentEnd() >= startPos )) {
-                return true;
-            }
-        }
-        return false;
-    }
-
-
-    /**
-     * override the hasNext, to incorportate our limiting factor
-     *
-     * @return
-     */
-    public boolean hasNext() {
-        boolean res = super.hasNext();
-        if (!seeked) {
-            return res;
-        }
-        if (res && matches(this.next)) {
-            return true;
-        }
-        return false;
-    }
-
-    /** make sure we haven't been used as an iterator yet; this is to miror the MergingSamIterator2 action. */
-    public void ensureUntouched() {
-        if (open) {
-            throw new UnsupportedOperationException("We've already been used as an iterator; you can't query after that");
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMUtils.java
deleted file mode 100644
index 7fb43ef..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMUtils.java
+++ /dev/null
@@ -1,484 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import htsjdk.samtools.*;
-import org.broadinstitute.gatk.engine.iterators.GATKSAMIterator;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.locusiterator.LocusIteratorByState;
-import org.broadinstitute.gatk.utils.pileup.PileupElement;
-import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
-import org.broadinstitute.gatk.utils.pileup.ReadBackedPileupImpl;
-
-import java.io.File;
-import java.util.*;
-
-/**
- * @author aaron
- * @version 1.0
- */
-public class ArtificialSAMUtils {
-    public static final int DEFAULT_READ_LENGTH = 50;
-
-    /**
-     * create an artificial sam file
-     *
-     * @param filename            the filename to write to
-     * @param numberOfChromosomes the number of chromosomes
-     * @param startingChromosome  where to start counting
-     * @param chromosomeSize      how large each chromosome is
-     * @param readsPerChomosome   how many reads to make in each chromosome.  They'll be aligned from position 1 to x (which is the number of reads)
-     */
-    public static void createArtificialBamFile(String filename, int numberOfChromosomes, int startingChromosome, int chromosomeSize, int readsPerChomosome) {
-        SAMFileHeader header = createArtificialSamHeader(numberOfChromosomes, startingChromosome, chromosomeSize);
-        File outFile = new File(filename);
-
-        SAMFileWriter out = new SAMFileWriterFactory().makeBAMWriter(header, true, outFile);
-
-        for (int x = startingChromosome; x < startingChromosome + numberOfChromosomes; x++) {
-            for (int readNumber = 1; readNumber < readsPerChomosome; readNumber++) {
-                out.addAlignment(createArtificialRead(header, "Read_" + readNumber, x - startingChromosome, readNumber, DEFAULT_READ_LENGTH));
-            }
-        }
-
-        out.close();
-    }
-
-    /**
-     * create an artificial sam file
-     *
-     * @param filename            the filename to write to
-     * @param numberOfChromosomes the number of chromosomes
-     * @param startingChromosome  where to start counting
-     * @param chromosomeSize      how large each chromosome is
-     * @param readsPerChomosome   how many reads to make in each chromosome.  They'll be aligned from position 1 to x (which is the number of reads)
-     */
-    public static void createArtificialSamFile(String filename, int numberOfChromosomes, int startingChromosome, int chromosomeSize, int readsPerChomosome) {
-        SAMFileHeader header = createArtificialSamHeader(numberOfChromosomes, startingChromosome, chromosomeSize);
-        File outFile = new File(filename);
-
-        SAMFileWriter out = new SAMFileWriterFactory().makeSAMWriter(header, false, outFile);
-
-        for (int x = startingChromosome; x < startingChromosome + numberOfChromosomes; x++) {
-            for (int readNumber = 1; readNumber <= readsPerChomosome; readNumber++) {
-                out.addAlignment(createArtificialRead(header, "Read_" + readNumber, x - startingChromosome, readNumber, 100));
-            }
-        }
-
-        out.close();
-    }
-
-    /**
-     * Creates an artificial sam header, matching the parameters, chromosomes which will be labeled chr1, chr2, etc
-     *
-     * @param numberOfChromosomes the number of chromosomes to create
-     * @param startingChromosome  the starting number for the chromosome (most likely set to 1)
-     * @param chromosomeSize      the length of each chromosome
-     * @return
-     */
-    public static SAMFileHeader createArtificialSamHeader(int numberOfChromosomes, int startingChromosome, int chromosomeSize) {
-        SAMFileHeader header = new SAMFileHeader();
-        header.setSortOrder(htsjdk.samtools.SAMFileHeader.SortOrder.coordinate);
-        SAMSequenceDictionary dict = new SAMSequenceDictionary();
-        // make up some sequence records
-        for (int x = startingChromosome; x < startingChromosome + numberOfChromosomes; x++) {
-            SAMSequenceRecord rec = new SAMSequenceRecord("chr" + (x), chromosomeSize /* size */);
-            rec.setSequenceLength(chromosomeSize);
-            dict.addSequence(rec);
-        }
-        header.setSequenceDictionary(dict);
-        return header;
-    }
-
-    /**
-     * Creates an artificial sam header based on the sequence dictionary dict
-     *
-     * @return a new sam header
-     */
-    public static SAMFileHeader createArtificialSamHeader(final SAMSequenceDictionary dict) {
-        SAMFileHeader header = new SAMFileHeader();
-        header.setSortOrder(htsjdk.samtools.SAMFileHeader.SortOrder.coordinate);
-        header.setSequenceDictionary(dict);
-        return header;
-    }
-
-    /**
-     * Creates an artificial sam header with standard test parameters
-     *
-     * @return the sam header
-     */
-    public static SAMFileHeader createArtificialSamHeader() {
-        return createArtificialSamHeader(1, 1, 1000000);
-    }
-
-    /**
-     * setup a default read group for a SAMFileHeader
-     *
-     * @param header      the header to set
-     * @param readGroupID the read group ID tag
-     * @param sampleName  the sample name
-     * @return the adjusted SAMFileHeader
-     */
-    public static SAMFileHeader createDefaultReadGroup(SAMFileHeader header, String readGroupID, String sampleName) {
-        SAMReadGroupRecord rec = new SAMReadGroupRecord(readGroupID);
-        rec.setSample(sampleName);
-        List<SAMReadGroupRecord> readGroups = new ArrayList<SAMReadGroupRecord>();
-        readGroups.add(rec);
-        header.setReadGroups(readGroups);
-        return header;
-    }
-
-    /**
-     * setup read groups for the specified read groups and sample names
-     *
-     * @param header       the header to set
-     * @param readGroupIDs the read group ID tags
-     * @param sampleNames  the sample names
-     * @return the adjusted SAMFileHeader
-     */
-    public static SAMFileHeader createEnumeratedReadGroups(SAMFileHeader header, List<String> readGroupIDs, List<String> sampleNames) {
-        if (readGroupIDs.size() != sampleNames.size()) {
-            throw new ReviewedGATKException("read group count and sample name count must be the same");
-        }
-
-        List<SAMReadGroupRecord> readGroups = new ArrayList<SAMReadGroupRecord>();
-
-        int x = 0;
-        for (; x < readGroupIDs.size(); x++) {
-            SAMReadGroupRecord rec = new SAMReadGroupRecord(readGroupIDs.get(x));
-            rec.setSample(sampleNames.get(x));
-            readGroups.add(rec);
-        }
-        header.setReadGroups(readGroups);
-        return header;
-    }
-
-
-    /**
-     * Create an artificial read based on the parameters.  The cigar string will be *M, where * is the length of the read
-     *
-     * @param header         the SAM header to associate the read with
-     * @param name           the name of the read
-     * @param refIndex       the reference index, i.e. what chromosome to associate it with
-     * @param alignmentStart where to start the alignment
-     * @param length         the length of the read
-     * @return the artificial read
-     */
-    public static GATKSAMRecord createArtificialRead(SAMFileHeader header, String name, int refIndex, int alignmentStart, int length) {
-        if ((refIndex == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX && alignmentStart != SAMRecord.NO_ALIGNMENT_START) ||
-                (refIndex != SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX && alignmentStart == SAMRecord.NO_ALIGNMENT_START))
-            throw new ReviewedGATKException("Invalid alignment start for artificial read, start = " + alignmentStart);
-        GATKSAMRecord record = new GATKSAMRecord(header);
-        record.setReadName(name);
-        record.setReferenceIndex(refIndex);
-        record.setAlignmentStart(alignmentStart);
-        List<CigarElement> elements = new ArrayList<CigarElement>();
-        elements.add(new CigarElement(length, CigarOperator.characterToEnum('M')));
-        record.setCigar(new Cigar(elements));
-        record.setProperPairFlag(false);
-
-        // our reads and quals are all 'A's by default
-        byte[] c = new byte[length];
-        byte[] q = new byte[length];
-        for (int x = 0; x < length; x++)
-            c[x] = q[x] = 'A';
-        record.setReadBases(c);
-        record.setBaseQualities(q);
-
-        if (refIndex == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX) {
-            record.setReadUnmappedFlag(true);
-        }
-
-        return record;
-    }
-
-    /**
-     * Create an artificial read based on the parameters.  The cigar string will be *M, where * is the length of the read
-     *
-     * @param header         the SAM header to associate the read with
-     * @param name           the name of the read
-     * @param refIndex       the reference index, i.e. what chromosome to associate it with
-     * @param alignmentStart where to start the alignment
-     * @param bases          the sequence of the read
-     * @param qual           the qualities of the read
-     * @return the artificial read
-     */
-    public static GATKSAMRecord createArtificialRead(SAMFileHeader header, String name, int refIndex, int alignmentStart, byte[] bases, byte[] qual) {
-        if (bases.length != qual.length) {
-            throw new ReviewedGATKException("Passed in read string is different length then the quality array");
-        }
-        GATKSAMRecord rec = createArtificialRead(header, name, refIndex, alignmentStart, bases.length);
-        rec.setReadBases(bases);
-        rec.setBaseQualities(qual);
-        rec.setReadGroup(new GATKSAMReadGroupRecord("x"));
-        if (refIndex == -1) {
-            rec.setReadUnmappedFlag(true);
-        }
-
-        return rec;
-    }
-
-    /**
-     * Create an artificial read based on the parameters
-     *
-     * @param header         the SAM header to associate the read with
-     * @param name           the name of the read
-     * @param refIndex       the reference index, i.e. what chromosome to associate it with
-     * @param alignmentStart where to start the alignment
-     * @param bases          the sequence of the read
-     * @param qual           the qualities of the read
-     * @param cigar          the cigar string of the read
-     * @return the artificial read
-     */
-    public static GATKSAMRecord createArtificialRead(SAMFileHeader header, String name, int refIndex, int alignmentStart, byte[] bases, byte[] qual, String cigar) {
-        GATKSAMRecord rec = createArtificialRead(header, name, refIndex, alignmentStart, bases, qual);
-        rec.setCigarString(cigar);
-        return rec;
-    }
-
-    /**
-     * Create an artificial read with the following default parameters :
-     * header:
-     * numberOfChromosomes = 1
-     * startingChromosome = 1
-     * chromosomeSize = 1000000
-     * read:
-     * name = "default_read"
-     * refIndex = 0
-     * alignmentStart = 1
-     *
-     * @param bases the sequence of the read
-     * @param qual  the qualities of the read
-     * @param cigar the cigar string of the read
-     * @return the artificial read
-     */
-    public static GATKSAMRecord createArtificialRead(byte[] bases, byte[] qual, String cigar) {
-        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader();
-        return ArtificialSAMUtils.createArtificialRead(header, "default_read", 0, 10000, bases, qual, cigar);
-    }
-
-    public static GATKSAMRecord createArtificialRead(Cigar cigar) {
-        int length = cigar.getReadLength();
-        byte [] base = {'A'};
-        byte [] qual = {30};
-        byte [] bases = Utils.arrayFromArrayWithLength(base, length);
-        byte [] quals = Utils.arrayFromArrayWithLength(qual, length);
-        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader();
-        return ArtificialSAMUtils.createArtificialRead(header, "default_read", 0, 10000, bases, quals, cigar.toString());
-    }
-
-    
-    public final static List<GATKSAMRecord> createPair(SAMFileHeader header, String name, int readLen, int leftStart, int rightStart, boolean leftIsFirst, boolean leftIsNegative) {
-        GATKSAMRecord left = ArtificialSAMUtils.createArtificialRead(header, name, 0, leftStart, readLen);
-        GATKSAMRecord right = ArtificialSAMUtils.createArtificialRead(header, name, 0, rightStart, readLen);
-
-        left.setReadPairedFlag(true);
-        right.setReadPairedFlag(true);
-
-        left.setProperPairFlag(true);
-        right.setProperPairFlag(true);
-
-        left.setFirstOfPairFlag(leftIsFirst);
-        right.setFirstOfPairFlag(!leftIsFirst);
-
-        left.setReadNegativeStrandFlag(leftIsNegative);
-        left.setMateNegativeStrandFlag(!leftIsNegative);
-        right.setReadNegativeStrandFlag(!leftIsNegative);
-        right.setMateNegativeStrandFlag(leftIsNegative);
-
-        left.setMateAlignmentStart(right.getAlignmentStart());
-        right.setMateAlignmentStart(left.getAlignmentStart());
-
-        left.setMateReferenceIndex(0);
-        right.setMateReferenceIndex(0);
-
-        int isize = rightStart + readLen - leftStart;
-        left.setInferredInsertSize(isize);
-        right.setInferredInsertSize(-isize);
-
-        return Arrays.asList(left, right);
-    }
-
-    /**
-     * Create a collection of identical artificial reads based on the parameters.  The cigar string for each
-     * read will be *M, where * is the length of the read.
-     *
-     * Useful for testing things like positional downsampling where you care only about the position and
-     * number of reads, and not the other attributes.
-     *
-     * @param stackSize      number of identical reads to create
-     * @param header         the SAM header to associate each read with
-     * @param name           name associated with each read
-     * @param refIndex       the reference index, i.e. what chromosome to associate them with
-     * @param alignmentStart where to start each alignment
-     * @param length         the length of each read
-     *
-     * @return a collection of stackSize reads all sharing the above properties
-     */
-    public static Collection<GATKSAMRecord> createStackOfIdenticalArtificialReads( int stackSize, SAMFileHeader header, String name, int refIndex, int alignmentStart, int length ) {
-        Collection<GATKSAMRecord> stack = new ArrayList<GATKSAMRecord>(stackSize);
-        for ( int i = 1; i <= stackSize; i++ ) {
-            stack.add(createArtificialRead(header, name, refIndex, alignmentStart, length));
-        }
-        return stack;
-    }
-
-    /**
-     * create an iterator containing the specified read piles
-     *
-     * @param startingChr the chromosome (reference ID) to start from
-     * @param endingChr   the id to end with
-     * @param readCount   the number of reads per chromosome
-     * @return GATKSAMIterator representing the specified amount of fake data
-     */
-    public static GATKSAMIterator mappedReadIterator(int startingChr, int endingChr, int readCount) {
-        SAMFileHeader header = createArtificialSamHeader((endingChr - startingChr) + 1, startingChr, readCount + DEFAULT_READ_LENGTH);
-
-        return new ArtificialSAMQueryIterator(startingChr, endingChr, readCount, 0, header);
-    }
-
-    /**
-     * create an iterator containing the specified read piles
-     *
-     * @param startingChr       the chromosome (reference ID) to start from
-     * @param endingChr         the id to end with
-     * @param readCount         the number of reads per chromosome
-     * @param unmappedReadCount the count of unmapped reads to place at the end of the iterator, like in a sorted bam file
-     * @return GATKSAMIterator representing the specified amount of fake data
-     */
-    public static GATKSAMIterator mappedAndUnmappedReadIterator(int startingChr, int endingChr, int readCount, int unmappedReadCount) {
-        SAMFileHeader header = createArtificialSamHeader((endingChr - startingChr) + 1, startingChr, readCount + DEFAULT_READ_LENGTH);
-
-        return new ArtificialSAMQueryIterator(startingChr, endingChr, readCount, unmappedReadCount, header);
-    }
-
-    /**
-     * create an ArtificialSAMQueryIterator containing the specified read piles
-     *
-     * @param startingChr the chromosome (reference ID) to start from
-     * @param endingChr   the id to end with
-     * @param readCount   the number of reads per chromosome
-     * @return GATKSAMIterator representing the specified amount of fake data
-     */
-    public static ArtificialSAMQueryIterator queryReadIterator(int startingChr, int endingChr, int readCount) {
-        SAMFileHeader header = createArtificialSamHeader((endingChr - startingChr) + 1, startingChr, readCount + DEFAULT_READ_LENGTH);
-
-        return new ArtificialSAMQueryIterator(startingChr, endingChr, readCount, 0, header);
-    }
-
-    /**
-     * create an ArtificialSAMQueryIterator containing the specified read piles
-     *
-     * @param startingChr       the chromosome (reference ID) to start from
-     * @param endingChr         the id to end with
-     * @param readCount         the number of reads per chromosome
-     * @param unmappedReadCount the count of unmapped reads to place at the end of the iterator, like in a sorted bam file
-     * @return GATKSAMIterator representing the specified amount of fake data
-     */
-    public static GATKSAMIterator queryReadIterator(int startingChr, int endingChr, int readCount, int unmappedReadCount) {
-        SAMFileHeader header = createArtificialSamHeader((endingChr - startingChr) + 1, startingChr, readCount + DEFAULT_READ_LENGTH);
-
-        return new ArtificialSAMQueryIterator(startingChr, endingChr, readCount, unmappedReadCount, header);
-    }
-
-    /**
-     * Create an iterator containing the specified reads
-     *
-     * @param reads the reads
-     * @return iterator for the reads
-     */
-    public static GATKSAMIterator createReadIterator(SAMRecord... reads) {
-        return createReadIterator(Arrays.asList(reads));
-    }
-
-    /**
-     * Create an iterator containing the specified reads
-     *
-     * @param reads the reads
-     * @return iterator for the reads
-     */
-    public static GATKSAMIterator createReadIterator(List<SAMRecord> reads) {
-        final Iterator<SAMRecord> iter = reads.iterator();
-        return new GATKSAMIterator() {
-            @Override public void close() {}
-            @Override public Iterator<SAMRecord> iterator() { return iter; }
-            @Override public boolean hasNext() { return iter.hasNext(); }
-            @Override public SAMRecord next() { return iter.next(); }
-            @Override public void remove() { iter.remove(); }
-        };
-    }
-
-    private final static int ranIntInclusive(Random ran, int start, int stop) {
-        final int range = stop - start;
-        return ran.nextInt(range) + start;
-    }
-
-    /**
-     * Creates a read backed pileup containing up to pileupSize reads at refID 0 from header at loc with
-     * reads created that have readLen bases.  Pairs are sampled from a gaussian distribution with mean insert
-     * size of insertSize and variation of insertSize / 10.  The first read will be in the pileup, and the second
-     * may be, depending on where this sampled insertSize puts it.
-     *
-     * @param header
-     * @param loc
-     * @param readLen
-     * @param insertSize
-     * @param pileupSize
-     * @return
-     */
-    public static ReadBackedPileup createReadBackedPileup(final SAMFileHeader header, final GenomeLoc loc, final int readLen, final int insertSize, final int pileupSize) {
-        final Random ran = new Random();
-        final boolean leftIsFirst = true;
-        final boolean leftIsNegative = false;
-        final int insertSizeVariation = insertSize / 10;
-        final int pos = loc.getStart();
-
-        final List<PileupElement> pileupElements = new ArrayList<PileupElement>();
-        for (int i = 0; i < pileupSize / 2; i++) {
-            final String readName = "read" + i;
-            final int leftStart = ranIntInclusive(ran, 1, pos);
-            final int fragmentSize = (int) (ran.nextGaussian() * insertSizeVariation + insertSize);
-            final int rightStart = leftStart + fragmentSize - readLen;
-
-            if (rightStart <= 0) continue;
-
-            List<GATKSAMRecord> pair = createPair(header, readName, readLen, leftStart, rightStart, leftIsFirst, leftIsNegative);
-            final GATKSAMRecord left = pair.get(0);
-            final GATKSAMRecord right = pair.get(1);
-
-            pileupElements.add(LocusIteratorByState.createPileupForReadAndOffset(left, pos - leftStart));
-
-            if (pos >= right.getAlignmentStart() && pos <= right.getAlignmentEnd()) {
-                pileupElements.add(LocusIteratorByState.createPileupForReadAndOffset(right, pos - rightStart));
-            }
-        }
-
-        Collections.sort(pileupElements);
-        return new ReadBackedPileupImpl(loc, pileupElements);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSingleSampleReadStream.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSingleSampleReadStream.java
deleted file mode 100644
index 27e25d3..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSingleSampleReadStream.java
+++ /dev/null
@@ -1,213 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.engine.iterators.GATKSAMIterator;
-import org.broadinstitute.gatk.engine.iterators.GATKSAMIteratorAdapter;
-import org.broadinstitute.gatk.utils.MathUtils;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Iterator;
-
-/**
- * An artificial stream of reads from a single read group/sample with configurable characteristics
- * such as:
- *
- * -the number of contigs that the reads should be distributed across
- * -number of "stacks" of reads sharing the same alignment start position per contig
- * -the min/max number of reads in each stack (exact values chosen randomly from this range)
- * -the min/max distance between stack start positions (exact values chosen randomly from this range)
- * -the min/max length of each read (exact values chosen randomly from this range)
- * -the number of unmapped reads
- *
- * The cigar string for all reads will be *M, where * is the length of the read.
- *
- * @author David Roazen
- */
-public class ArtificialSingleSampleReadStream implements Iterable<SAMRecord> {
-    private SAMFileHeader header;
-    private String readGroupID;
-    private int numContigs;
-    private int numStacksPerContig;
-    private int minReadsPerStack;
-    private int maxReadsPerStack;
-    private int minDistanceBetweenStacks;
-    private int maxDistanceBetweenStacks;
-    private int minReadLength;
-    private int maxReadLength;
-    private int numUnmappedReads;
-
-    private static final String READ_GROUP_TAG = "RG";
-
-    public ArtificialSingleSampleReadStream( SAMFileHeader header,
-                                             String readGroupID,
-                                             int numContigs,
-                                             int numStacksPerContig,
-                                             int minReadsPerStack,
-                                             int maxReadsPerStack,
-                                             int minDistanceBetweenStacks,
-                                             int maxDistanceBetweenStacks,
-                                             int minReadLength,
-                                             int maxReadLength,
-                                             int numUnmappedReads ) {
-        this.header = header;
-        this.readGroupID = readGroupID;
-        this.numContigs = numContigs;
-        this.numStacksPerContig = numStacksPerContig;
-        this.minReadsPerStack = minReadsPerStack;
-        this.maxReadsPerStack = maxReadsPerStack;
-        this.minDistanceBetweenStacks = minDistanceBetweenStacks;
-        this.maxDistanceBetweenStacks = maxDistanceBetweenStacks;
-        this.minReadLength = minReadLength;
-        this.maxReadLength = maxReadLength;
-        this.numUnmappedReads = numUnmappedReads;
-
-        validateStreamParameters();
-    }
-
-    private void validateStreamParameters() {
-        if ( header == null || readGroupID == null ) {
-            throw new ReviewedGATKException("null SAMFileHeader or read group ID") ;
-        }
-
-        if ( header.getReadGroup(readGroupID) == null ) {
-            throw new ReviewedGATKException(String.format("Read group %s not found in SAMFileHeader", readGroupID));
-        }
-
-        if ( numContigs < 0 || numStacksPerContig < 0 || minReadsPerStack < 0 || maxReadsPerStack < 0 ||
-             minDistanceBetweenStacks < 0 || maxDistanceBetweenStacks < 0 || minReadLength < 0 || maxReadLength < 0 ||
-             numUnmappedReads < 0 ) {
-            throw new ReviewedGATKException("Read stream parameters must be >= 0");
-        }
-
-        if ( (numContigs == 0 && numStacksPerContig != 0) || (numContigs != 0 && numStacksPerContig == 0) ) {
-            throw new ReviewedGATKException("numContigs and numStacksPerContig must either both be > 0, or both be 0");
-        }
-
-        if ( minReadsPerStack > maxReadsPerStack ) {
-            throw new ReviewedGATKException("minReadsPerStack > maxReadsPerStack");
-        }
-
-        if ( minDistanceBetweenStacks > maxDistanceBetweenStacks ) {
-            throw new ReviewedGATKException("minDistanceBetweenStacks > maxDistanceBetweenStacks");
-        }
-
-        if ( minReadLength > maxReadLength ) {
-            throw new ReviewedGATKException("minReadLength > maxReadLength");
-        }
-    }
-
-    public Iterator<SAMRecord> iterator() {
-        return makeReads().iterator();
-    }
-
-    public GATKSAMIterator getGATKSAMIterator() {
-        return GATKSAMIteratorAdapter.adapt(iterator());
-    }
-
-    public Collection<SAMRecord> makeReads() {
-        Collection<SAMRecord> reads = new ArrayList<SAMRecord>(numContigs * numStacksPerContig * maxReadsPerStack);
-
-        for ( int contig = 0; contig < numContigs; contig++ ) {
-            int alignmentStart = 1;
-
-            for ( int stack = 0; stack < numStacksPerContig; stack++ ) {
-                reads.addAll(makeReadStack(contig, alignmentStart, MathUtils.randomIntegerInRange(minReadsPerStack, maxReadsPerStack)));
-                alignmentStart += MathUtils.randomIntegerInRange(minDistanceBetweenStacks, maxDistanceBetweenStacks);
-            }
-        }
-
-        if ( numUnmappedReads > 0 ) {
-            reads.addAll(makeReadStack(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX, SAMRecord.NO_ALIGNMENT_START, numUnmappedReads));
-        }
-
-        return reads;
-    }
-
-    private Collection<SAMRecord> makeReadStack( int contig, int alignmentStart, int stackSize ) {
-        Collection<SAMRecord> readStack = new ArrayList<SAMRecord>(stackSize);
-
-        for ( int i = 0; i < stackSize; i++ ) {
-            SAMRecord read = ArtificialSAMUtils.createArtificialRead(header,
-                                                                     "foo",
-                                                                     contig,
-                                                                     alignmentStart,
-                                                                     MathUtils.randomIntegerInRange(minReadLength, maxReadLength));
-            read.setAttribute(READ_GROUP_TAG, readGroupID);
-            readStack.add(read);
-        }
-
-        return readStack;
-    }
-
-    public SAMFileHeader getHeader() {
-        return header;
-    }
-
-    public String getReadGroupID() {
-        return readGroupID;
-    }
-
-    public int getNumContigs() {
-        return numContigs;
-    }
-
-    public int getNumStacksPerContig() {
-        return numStacksPerContig;
-    }
-
-    public int getMinReadsPerStack() {
-        return minReadsPerStack;
-    }
-
-    public int getMaxReadsPerStack() {
-        return maxReadsPerStack;
-    }
-
-    public int getMinDistanceBetweenStacks() {
-        return minDistanceBetweenStacks;
-    }
-
-    public int getMaxDistanceBetweenStacks() {
-        return maxDistanceBetweenStacks;
-    }
-
-    public int getMinReadLength() {
-        return minReadLength;
-    }
-
-    public int getMaxReadLength() {
-        return maxReadLength;
-    }
-
-    public int getNumUnmappedReads() {
-        return numUnmappedReads;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSingleSampleReadStreamAnalyzer.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSingleSampleReadStreamAnalyzer.java
deleted file mode 100644
index 196fa71..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSingleSampleReadStreamAnalyzer.java
+++ /dev/null
@@ -1,282 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * A class for analyzing and validating the read stream produced by an ArtificialSingleSampleReadStream.
- *
- * Collects various statistics about the stream of reads it's fed, and validates the stream
- * by checking whether the collected statistics match the nominal properties of the stream.
- *
- * Subclasses are expected to override the validate() method in order to check whether an artificial
- * read stream has been *transformed* in some way (eg., by downsampling or some other process), rather
- * than merely checking whether the stream matches its original properties.
- *
- * Usage is simple:
- *
- * ArtificialSingleSampleReadStreamAnalyzer analyzer = new ArtificialSingleSampleReadStreamAnalyzer(originalStream);
- * analyzer.analyze(originalOrTransformedStream);
- * analyzer.validate();  // override this method if you want to check whether the stream has been transformed
- *                       // in a certain way relative to the original stream
- *
- * @author David Roazen
- */
-public class ArtificialSingleSampleReadStreamAnalyzer {
-    protected ArtificialSingleSampleReadStream originalStream;
-    protected SAMRecord lastRead;
-    protected int totalReads;
-    protected boolean allSamplesMatch;
-    protected int numContigs;
-    protected List<Integer> stacksPerContig;
-    protected Integer minReadsPerStack;
-    protected Integer maxReadsPerStack;
-    protected Integer minDistanceBetweenStacks;
-    protected Integer maxDistanceBetweenStacks;
-    protected Integer minReadLength;
-    protected Integer maxReadLength;
-    protected int numUnmappedReads;
-
-    protected int currentContigNumStacks;
-    protected int currentStackNumReads;
-
-    /**
-     * Construct a new read stream analyzer, providing an ArtificialSingleSampleReadStream that will
-     * serve as the basis for comparison after the analysis is complete.
-     *
-     * @param originalStream the original ArtificialSingleSampleReadStream upon which the stream
-     *                       that will be fed to the analyzer is based
-     */
-    public ArtificialSingleSampleReadStreamAnalyzer( ArtificialSingleSampleReadStream originalStream ) {
-        this.originalStream = originalStream;
-        reset();
-    }
-
-    /**
-     * Reset all read stream statistics collected by this analyzer to prepare for a fresh run
-     */
-    public void reset() {
-        lastRead = null;
-        totalReads = 0;
-        allSamplesMatch = true;
-        numContigs = 0;
-        stacksPerContig = new ArrayList<Integer>();
-        minReadsPerStack = null;
-        maxReadsPerStack = null;
-        minDistanceBetweenStacks = null;
-        maxDistanceBetweenStacks = null;
-        minReadLength = null;
-        maxReadLength = null;
-        numUnmappedReads = 0;
-        currentContigNumStacks = 0;
-        currentStackNumReads = 0;
-    }
-
-    /**
-     * Collect statistics on the stream of reads passed in
-     *
-     * @param stream the stream of reads to analyze
-     */
-    public void analyze( Iterable<SAMRecord> stream ) {
-        for ( SAMRecord read : stream ) {
-            update(read);
-        }
-        finalizeStats();
-    }
-
-    /**
-     * Validate the stream by checking whether our collected statistics match the properties of the
-     * original stream. Throws a ReviewedGATKException if the stream is invalid.
-     *
-     * Override this method if you want to check whether the stream has been transformed in some
-     * way relative to the original stream.
-     */
-    public void validate() {
-        if ( (originalStream.getNumContigs() == 0 || originalStream.getNumStacksPerContig() == 0) && originalStream.getNumUnmappedReads() == 0 ) {
-            if ( totalReads != 0 ) {
-                throw new ReviewedGATKException("got reads from the stream, but the stream was configured to have 0 reads");
-            }
-            return;  // no further validation needed for the 0-reads case
-        }
-        else if ( totalReads == 0 ) {
-            throw new ReviewedGATKException("got no reads from the stream, but the stream was configured to have > 0 reads");
-        }
-
-        if ( ! allSamplesMatch ) {
-            throw new ReviewedGATKException("some reads had the wrong sample");
-        }
-
-        if ( numContigs != originalStream.getNumContigs() ) {
-            throw new ReviewedGATKException("number of contigs not correct");
-        }
-
-        if ( stacksPerContig.size() != originalStream.getNumContigs() ) {
-            throw new ReviewedGATKException(String.format("bug in analyzer code: calculated sizes for %d contigs even though there were only %d contigs",
-                                                           stacksPerContig.size(), originalStream.getNumContigs()));
-        }
-
-        for ( int contigStackCount : stacksPerContig ) {
-            if ( contigStackCount != originalStream.getNumStacksPerContig() ) {
-                throw new ReviewedGATKException("contig had incorrect number of stacks");
-            }
-        }
-
-        if ( originalStream.getNumStacksPerContig() > 0 ) {
-            if ( minReadsPerStack < originalStream.getMinReadsPerStack() ) {
-                throw new ReviewedGATKException("stack had fewer than the minimum number of reads");
-            }
-            if ( maxReadsPerStack > originalStream.getMaxReadsPerStack() ) {
-                throw new ReviewedGATKException("stack had more than the maximum number of reads");
-            }
-        }
-        else if ( minReadsPerStack != null || maxReadsPerStack != null ) {
-            throw new ReviewedGATKException("bug in analyzer code: reads per stack was calculated even though 0 stacks per contig was specified");
-        }
-
-        if ( originalStream.getNumStacksPerContig() > 1 ) {
-            if ( minDistanceBetweenStacks < originalStream.getMinDistanceBetweenStacks() ) {
-                throw new ReviewedGATKException("stacks were separated by less than the minimum distance");
-            }
-            if ( maxDistanceBetweenStacks > originalStream.getMaxDistanceBetweenStacks() ) {
-                throw new ReviewedGATKException("stacks were separated by more than the maximum distance");
-            }
-        }
-        else if ( minDistanceBetweenStacks != null || maxDistanceBetweenStacks != null ) {
-            throw new ReviewedGATKException("bug in analyzer code: distance between stacks was calculated even though numStacksPerContig was <= 1");
-        }
-
-        if ( minReadLength < originalStream.getMinReadLength() ) {
-            throw new ReviewedGATKException("read was shorter than the minimum allowed length");
-        }
-        if ( maxReadLength > originalStream.getMaxReadLength() ) {
-            throw new ReviewedGATKException("read was longer than the maximum allowed length");
-        }
-
-        if ( numUnmappedReads != originalStream.getNumUnmappedReads() ) {
-            throw new ReviewedGATKException(String.format("wrong number of unmapped reads: requested %d but saw %d",
-                                                           originalStream.getNumUnmappedReads(), numUnmappedReads));
-        }
-
-        if ( (originalStream.getNumContigs() == 0 || originalStream.getNumStacksPerContig() == 0) &&
-             numUnmappedReads != totalReads ) {
-            throw new ReviewedGATKException("stream should have consisted only of unmapped reads, but saw some mapped reads");
-        }
-    }
-
-    public void update( SAMRecord read ) {
-        if ( read.getReadUnmappedFlag() ) {
-            numUnmappedReads++;
-
-            if ( numUnmappedReads == 1 && lastRead != null ) {
-                processContigChange();
-                numContigs--;
-            }
-        }
-        else if ( lastRead == null ) {
-            numContigs = 1;
-            currentContigNumStacks = 1;
-            currentStackNumReads = 1;
-        }
-        else if ( ! read.getReferenceIndex().equals(lastRead.getReferenceIndex()) ) {
-            processContigChange();
-        }
-        else if ( read.getAlignmentStart() != lastRead.getAlignmentStart() ) {
-            processStackChangeWithinContig(read);
-        }
-        else {
-            currentStackNumReads++;
-        }
-
-        updateReadLength(read.getReadLength());
-        allSamplesMatch = allSamplesMatch && readHasCorrectSample(read);
-        totalReads++;
-
-        lastRead = read;
-    }
-
-
-    private void processContigChange() {
-        numContigs++;
-
-        stacksPerContig.add(currentContigNumStacks);
-        currentContigNumStacks = 1;
-
-        updateReadsPerStack(currentStackNumReads);
-        currentStackNumReads = 1;
-    }
-
-    private void processStackChangeWithinContig( SAMRecord read ) {
-        currentContigNumStacks++;
-
-        updateReadsPerStack(currentStackNumReads);
-        currentStackNumReads = 1;
-
-        updateDistanceBetweenStacks(read.getAlignmentStart() - lastRead.getAlignmentStart());
-    }
-
-    private void updateReadsPerStack( int stackReadCount ) {
-        if ( minReadsPerStack == null || stackReadCount < minReadsPerStack ) {
-            minReadsPerStack = stackReadCount;
-        }
-        if ( maxReadsPerStack == null || stackReadCount > maxReadsPerStack ) {
-            maxReadsPerStack = stackReadCount;
-        }
-    }
-
-    private void updateDistanceBetweenStacks( int stackDistance ) {
-        if ( minDistanceBetweenStacks == null || stackDistance < minDistanceBetweenStacks ) {
-            minDistanceBetweenStacks = stackDistance;
-        }
-        if ( maxDistanceBetweenStacks == null || stackDistance > maxDistanceBetweenStacks ) {
-            maxDistanceBetweenStacks = stackDistance;
-        }
-    }
-
-    private void updateReadLength( int readLength ) {
-        if ( minReadLength == null || readLength < minReadLength ) {
-            minReadLength = readLength;
-        }
-        if ( maxReadLength == null || readLength > maxReadLength ) {
-            maxReadLength = readLength;
-        }
-    }
-
-    private boolean readHasCorrectSample( SAMRecord read ) {
-        return originalStream.getReadGroupID().equals(read.getAttribute("RG"));
-    }
-
-    public void finalizeStats() {
-        if ( lastRead != null && ! lastRead.getReadUnmappedFlag() ) {
-            stacksPerContig.add(currentContigNumStacks);
-            updateReadsPerStack(currentStackNumReads);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/BySampleSAMFileWriter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/BySampleSAMFileWriter.java
deleted file mode 100644
index e212bd9..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/BySampleSAMFileWriter.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMProgramRecord;
-import htsjdk.samtools.SAMReadGroupRecord;
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.datasources.reads.SAMReaderID;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Created by IntelliJ IDEA.
- * User: carneiro
- * Date: Nov 13
- */
-public class BySampleSAMFileWriter extends NWaySAMFileWriter{
-
-    private final Map<String, SAMReaderID> sampleToWriterMap;
-
-    public BySampleSAMFileWriter(GenomeAnalysisEngine toolkit, String ext, SAMFileHeader.SortOrder order, boolean presorted, boolean indexOnTheFly, boolean generateMD5, SAMProgramRecord pRecord, boolean keep_records) {
-        super(toolkit, ext, order, presorted, indexOnTheFly, generateMD5, pRecord, keep_records);
-
-        sampleToWriterMap = new HashMap<String, SAMReaderID>(toolkit.getSAMFileHeader().getReadGroups().size() * 2);
-
-        for (SAMReaderID readerID : toolkit.getReadsDataSource().getReaderIDs()) {
-            for (SAMReadGroupRecord rg : toolkit.getReadsDataSource().getHeader(readerID).getReadGroups()) {
-                String sample = rg.getSample();
-                if (sampleToWriterMap.containsKey(sample) && sampleToWriterMap.get(sample) != readerID) {
-                    throw new ReviewedGATKException("The same sample appears in multiple files, this input cannot be multiplexed using the BySampleSAMFileWriter, try NWaySAMFileWriter instead.");
-                }
-                else {
-                    sampleToWriterMap.put(sample, readerID);
-                }
-            }
-        }
-    }
-
-    @Override
-    public void addAlignment(SAMRecord samRecord) {
-        super.addAlignment(samRecord, sampleToWriterMap.get(samRecord.getReadGroup().getSample()));
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/CigarUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/CigarUtils.java
deleted file mode 100644
index cd492fe..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/CigarUtils.java
+++ /dev/null
@@ -1,273 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import com.google.java.contract.Ensures;
-import htsjdk.samtools.Cigar;
-import htsjdk.samtools.CigarElement;
-import htsjdk.samtools.CigarOperator;
-import htsjdk.samtools.TextCigarCodec;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.smithwaterman.Parameters;
-import org.broadinstitute.gatk.utils.smithwaterman.SWPairwiseAlignment;
-import org.broadinstitute.gatk.utils.smithwaterman.SmithWaterman;
-
-import java.util.Arrays;
-import java.util.Stack;
-
-/**
- * Created with IntelliJ IDEA.
- * User: ami
- * Date: 11/26/13
- * Time: 11:33 AM
- * To change this template use File | Settings | File Templates.
- */
-public class CigarUtils {
-
-    /**
-     * Combines equal adjacent elements of a Cigar object
-     *
-     * @param rawCigar the cigar object
-     * @return a combined cigar object
-     */
-    public static Cigar combineAdjacentCigarElements(Cigar rawCigar) {
-        Cigar combinedCigar = new Cigar();
-        CigarElement lastElement = null;
-        int lastElementLength = 0;
-        for (CigarElement cigarElement : rawCigar.getCigarElements()) {
-            if (lastElement != null &&
-                    ((lastElement.getOperator() == cigarElement.getOperator()) ||
-                            (lastElement.getOperator() == CigarOperator.I && cigarElement.getOperator() == CigarOperator.D) ||
-                            (lastElement.getOperator() == CigarOperator.D && cigarElement.getOperator() == CigarOperator.I)))
-                lastElementLength += cigarElement.getLength();
-            else
-            {
-                if (lastElement != null)
-                    combinedCigar.add(new CigarElement(lastElementLength, lastElement.getOperator()));
-
-                lastElement = cigarElement;
-                lastElementLength = cigarElement.getLength();
-            }
-        }
-        if (lastElement != null)
-            combinedCigar.add(new CigarElement(lastElementLength, lastElement.getOperator()));
-
-        return combinedCigar;
-    }
-
-    public static Cigar invertCigar (Cigar cigar) {
-        Stack<CigarElement> cigarStack = new Stack<CigarElement>();
-        for (CigarElement cigarElement : cigar.getCigarElements())
-            cigarStack.push(cigarElement);
-
-        Cigar invertedCigar = new Cigar();
-        while (!cigarStack.isEmpty())
-            invertedCigar.add(cigarStack.pop());
-
-        return invertedCigar;
-    }
-
-    /**
-     * Checks whether or not the read has any cigar element that is not H or S
-     *
-     * @param read the read
-     * @return true if it has any M, I or D, false otherwise
-     */
-    public static boolean readHasNonClippedBases(GATKSAMRecord read) {
-        for (CigarElement cigarElement : read.getCigar().getCigarElements())
-            if (cigarElement.getOperator() != CigarOperator.SOFT_CLIP && cigarElement.getOperator() != CigarOperator.HARD_CLIP)
-                return true;
-        return false;
-    }
-
-    public static Cigar cigarFromString(String cigarString) {
-        return TextCigarCodec.getSingleton().decode(cigarString);
-    }
-
-    /**
-    * A valid cigar object obeys the following rules:
-    *  - No Hard/Soft clips in the middle of the read
-    *  - No deletions in the beginning / end of the read
-    *  - No repeated adjacent element (e.g. 1M2M -> this should be 3M)
-    *  - No consecutive I/D elements
-    **/
-    public static boolean isCigarValid(Cigar cigar) {
-        if (cigar.isValid(null, -1) == null) {                                                                          // This should take care of most invalid Cigar Strings (picard's "exhaustive" implementation)
-
-            Stack<CigarElement> cigarElementStack = new Stack<CigarElement>();                                          // Stack to invert cigar string to find ending operator
-            CigarOperator startingOp = null;
-            CigarOperator endingOp = null;
-
-            // check if it doesn't start with deletions
-            boolean readHasStarted = false;                                                                             // search the list of elements for the starting operator
-            for (CigarElement cigarElement : cigar.getCigarElements()) {
-                if (!readHasStarted) {
-                    if (cigarElement.getOperator() != CigarOperator.SOFT_CLIP && cigarElement.getOperator() != CigarOperator.HARD_CLIP) {
-                        readHasStarted = true;
-                        startingOp = cigarElement.getOperator();
-                    }
-                }
-                cigarElementStack.push(cigarElement);
-            }
-
-            while (!cigarElementStack.empty()) {
-                CigarElement cigarElement = cigarElementStack.pop();
-                if (cigarElement.getOperator() != CigarOperator.SOFT_CLIP && cigarElement.getOperator() != CigarOperator.HARD_CLIP) {
-                    endingOp = cigarElement.getOperator();
-                    break;
-                }
-            }
-
-            if (startingOp != CigarOperator.DELETION && endingOp != CigarOperator.DELETION && startingOp != CigarOperator.SKIPPED_REGION && endingOp != CigarOperator.SKIPPED_REGION)
-                return true;                                                                                          // we don't accept reads starting or ending in deletions (add any other constraint here)
-        }
-
-        return false;
-    }
-
-    public static final int countRefBasesBasedOnCigar(final GATKSAMRecord read, final int cigarStartIndex, final int cigarEndIndex){
-        int result = 0;
-        for(int i = cigarStartIndex; i<cigarEndIndex;i++){
-            final CigarElement cigarElement = read.getCigar().getCigarElement(i);
-            switch (cigarElement.getOperator()) {
-                case M:
-                case S:
-                case D:
-                case N:
-                case H:
-                    result += cigarElement.getLength();
-                    break;
-                case I:
-                    break;
-                default:
-                    throw new ReviewedGATKException("Unsupported cigar operator: " + cigarElement.getOperator());
-            }
-        }
-        return result;
-    }
-
-    // used in the bubble state machine to apply Smith-Waterman to the bubble sequence
-    // these values were chosen via optimization against the NA12878 knowledge base
-    public static final Parameters NEW_SW_PARAMETERS = new Parameters(200, -150, -260, -11);
-
-    private final static String SW_PAD = "NNNNNNNNNN";
-
-    /**
-     * Calculate the cigar elements for this path against the reference sequence
-     *
-     * @param refSeq the reference sequence that all of the bases in this path should align to
-     * @return a Cigar mapping this path to refSeq, or null if no reasonable alignment could be found
-     */
-    public static Cigar calculateCigar(final byte[] refSeq, final byte[] altSeq) {
-        if ( altSeq.length == 0 ) {
-            // horrible edge case from the unit tests, where this path has no bases
-            return new Cigar(Arrays.asList(new CigarElement(refSeq.length, CigarOperator.D)));
-        }
-
-        final Cigar nonStandard;
-
-        final String paddedRef = SW_PAD + new String(refSeq) + SW_PAD;
-        final String paddedPath = SW_PAD + new String(altSeq) + SW_PAD;
-        final SmithWaterman alignment = new SWPairwiseAlignment( paddedRef.getBytes(), paddedPath.getBytes(), NEW_SW_PARAMETERS);
-
-        if ( isSWFailure(alignment) ) {
-            return null;
-        }
-
-
-        // cut off the padding bases
-        final int baseStart = SW_PAD.length();
-        final int baseEnd = paddedPath.length() - SW_PAD.length() - 1; // -1 because it's inclusive
-        nonStandard = AlignmentUtils.trimCigarByBases(alignment.getCigar(), baseStart, baseEnd);
-
-        if ( nonStandard.getReferenceLength() != refSeq.length ) {
-            nonStandard.add(new CigarElement(refSeq.length - nonStandard.getReferenceLength(), CigarOperator.D));
-        }
-
-        // finally, return the cigar with all indels left aligned
-        return leftAlignCigarSequentially(nonStandard, refSeq, altSeq, 0, 0);
-    }
-
-    /**
-     * Make sure that the SW didn't fail in some terrible way, and throw exception if it did
-     */
-    private static boolean isSWFailure(final SmithWaterman alignment) {
-        // check that the alignment starts at the first base, which it should given the padding
-        if ( alignment.getAlignmentStart2wrt1() > 0 ) {
-            return true;
-//          throw new IllegalStateException("SW failure ref " + paddedRef + " vs. " + paddedPath + " should always start at 0, but got " + alignment.getAlignmentStart2wrt1() + " with cigar " + alignment.getCigar());
-        }
-
-        // check that we aren't getting any S operators (which would be very bad downstream)
-        for ( final CigarElement ce : alignment.getCigar().getCigarElements() ) {
-            if ( ce.getOperator() == CigarOperator.S )
-                return true;
-            // soft clips at the end of the alignment are really insertions
-//                throw new IllegalStateException("SW failure ref " + paddedRef + " vs. " + paddedPath + " should never contain S operators but got cigar " + alignment.getCigar());
-        }
-
-        return false;
-    }
-
-    /**
-     * Left align the given cigar sequentially. This is needed because AlignmentUtils doesn't accept cigars with more than one indel in them.
-     * This is a target of future work to incorporate and generalize into AlignmentUtils for use by others.
-     * @param cigar     the cigar to left align
-     * @param refSeq    the reference byte array
-     * @param readSeq   the read byte array
-     * @param refIndex  0-based alignment start position on ref
-     * @param readIndex 0-based alignment start position on read
-     * @return          the left-aligned cigar
-     */
-    @Ensures({"cigar != null", "refSeq != null", "readSeq != null", "refIndex >= 0", "readIndex >= 0"})
-    public static Cigar leftAlignCigarSequentially(final Cigar cigar, final byte[] refSeq, final byte[] readSeq, int refIndex, int readIndex) {
-        final Cigar cigarToReturn = new Cigar();
-        Cigar cigarToAlign = new Cigar();
-        for (int i = 0; i < cigar.numCigarElements(); i++) {
-            final CigarElement ce = cigar.getCigarElement(i);
-            if (ce.getOperator() == CigarOperator.D || ce.getOperator() == CigarOperator.I) {
-                cigarToAlign.add(ce);
-                final Cigar leftAligned = AlignmentUtils.leftAlignSingleIndel(cigarToAlign, refSeq, readSeq, refIndex, readIndex, false);
-                for ( final CigarElement toAdd : leftAligned.getCigarElements() ) { cigarToReturn.add(toAdd); }
-                refIndex += cigarToAlign.getReferenceLength();
-                readIndex += cigarToAlign.getReadLength();
-                cigarToAlign = new Cigar();
-            } else {
-                cigarToAlign.add(ce);
-            }
-        }
-        if( !cigarToAlign.isEmpty() ) {
-            for( final CigarElement toAdd : cigarToAlign.getCigarElements() ) {
-                cigarToReturn.add(toAdd);
-            }
-        }
-
-        final Cigar result = AlignmentUtils.consolidateCigar(cigarToReturn);
-        if( result.getReferenceLength() != cigar.getReferenceLength() )
-            throw new IllegalStateException("leftAlignCigarSequentially failed to produce a valid CIGAR.  Reference lengths differ.  Initial cigar " + cigar + " left aligned into " + result);
-        return result;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/GATKSAMReadGroupRecord.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/GATKSAMReadGroupRecord.java
deleted file mode 100644
index 6af9059..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/GATKSAMReadGroupRecord.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import htsjdk.samtools.SAMReadGroupRecord;
-import org.broadinstitute.gatk.utils.NGSPlatform;
-
-/**
- * @author ebanks
- * GATKSAMReadGroupRecord
- *
- * this class extends the samtools SAMReadGroupRecord class and caches important
- * (and oft-accessed) data that's not already cached by the SAMReadGroupRecord class
- *
- */
-public class GATKSAMReadGroupRecord extends SAMReadGroupRecord {
-    // the SAMReadGroupRecord data we're caching
-    private String mSample = null;
-    private String mPlatform = null;
-    private NGSPlatform mNGSPlatform = null;
-
-    // because some values can be null, we don't want to duplicate effort
-    private boolean retrievedSample = false;
-    private boolean retrievedPlatform = false;
-    private boolean retrievedNGSPlatform = false;
-
-    public GATKSAMReadGroupRecord(final String id) {
-        super(id);
-    }
-
-    public GATKSAMReadGroupRecord(SAMReadGroupRecord record) {
-        super(record.getReadGroupId(), record);
-    }
-
-    /**
-     * Get the NGSPlatform enum telling us the platform of this read group
-     *
-     * This function call is caching, so subsequent calls to it are free, while
-     * the first time it's called there's a bit of work to resolve the enum
-     *
-     * @return an NGSPlatform enum value
-     */
-    public NGSPlatform getNGSPlatform() {
-        if ( ! retrievedNGSPlatform ) {
-            mNGSPlatform = NGSPlatform.fromReadGroupPL(getPlatform());
-            retrievedNGSPlatform = true;
-        }
-
-        return mNGSPlatform;
-    }
-
-    @Override
-    public String toString() {
-        return "GATKSAMReadGroupRecord @RG:" + getReadGroupId();
-    }
-
-    ///////////////////////////////////////////////////////////////////////////////
-    // *** The following methods are overloaded to cache the appropriate data ***//
-    ///////////////////////////////////////////////////////////////////////////////
-
-    @Override
-    public String getSample() {
-        if ( !retrievedSample ) {
-            mSample = super.getSample();
-            retrievedSample = true;
-        }
-        return mSample;
-    }
-
-    @Override
-    public void setSample(String s) {
-        super.setSample(s);
-        mSample = s;
-        retrievedSample = true;
-    }
-
-    @Override
-    public String getPlatform() {
-        if ( !retrievedPlatform ) {
-            mPlatform = super.getPlatform();
-            retrievedPlatform = true;
-        }
-        return mPlatform;
-    }
-
-    @Override
-    public void setPlatform(String s) {
-        super.setPlatform(s);
-        mPlatform = s;
-        retrievedPlatform = true;
-        retrievedNGSPlatform = false; // recalculate the NGSPlatform
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/GATKSAMRecord.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/GATKSAMRecord.java
deleted file mode 100644
index 0080f01..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/GATKSAMRecord.java
+++ /dev/null
@@ -1,631 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import com.google.java.contract.Ensures;
-import htsjdk.samtools.*;
-import org.broadinstitute.gatk.utils.NGSPlatform;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.recalibration.EventType;
-
-import java.util.*;
-
-/**
- * @author ebanks, depristo
- * GATKSAMRecord
- *
- * this class extends the samtools BAMRecord class (and SAMRecord) and caches important
- * (and oft-accessed) data that's not already cached by the SAMRecord class
- *
- * IMPORTANT NOTE: Because ReadGroups are not set through the SAMRecord,
- *   if they are ever modified externally then one must also invoke the
- *   setReadGroup() method here to ensure that the cache is kept up-to-date.
- *
- * WARNING -- GATKSAMRecords cache several values (that are expensive to compute)
- * that depending on the inferred insert size and alignment starts and stops of this read and its mate.
- * Changing these values in any way will invalidate the cached value. However, we do not monitor those setter
- * functions, so modifying a GATKSAMRecord in any way may result in stale cached values.
- */
-public class GATKSAMRecord extends BAMRecord implements Cloneable {
-    // Base Quality Score Recalibrator specific attribute tags
-    public static final String BQSR_BASE_INSERTION_QUALITIES = "BI";                // base qualities for insertions
-    public static final String BQSR_BASE_DELETION_QUALITIES = "BD";                 // base qualities for deletions
-
-    /**
-     * The default quality score for an insertion or deletion, if
-     * none are provided for this read.
-     */
-    public static final byte DEFAULT_INSERTION_DELETION_QUAL = (byte)45;
-
-    // the SAMRecord data we're caching
-    private String mReadString = null;
-    private GATKSAMReadGroupRecord mReadGroup = null;
-    private final static int UNINITIALIZED = -1;
-    private int softStart = UNINITIALIZED;
-    private int softEnd = UNINITIALIZED;
-    private Integer adapterBoundary = null;
-
-    private boolean isStrandlessRead = false;
-
-    // because some values can be null, we don't want to duplicate effort
-    private boolean retrievedReadGroup = false;
-
-    // These temporary attributes were added here to make life easier for
-    // certain algorithms by providing a way to label or attach arbitrary data to
-    // individual GATKSAMRecords.
-    // These attributes exist in memory only, and are never written to disk.
-    private Map<Object, Object> temporaryAttributes;
-
-    /**
-     * HACK TO CREATE GATKSAMRECORD WITH ONLY A HEADER FOR TESTING PURPOSES ONLY
-     * @param header
-     */
-    public GATKSAMRecord(final SAMFileHeader header) {
-        this(new SAMRecord(header));
-    }
-
-    /**
-     * HACK TO CREATE GATKSAMRECORD BASED ONLY A SAMRECORD FOR TESTING PURPOSES ONLY
-     * @param read
-     */
-    public GATKSAMRecord(final SAMRecord read) {
-        super(read.getHeader(),
-                read.getReferenceIndex(),
-                read.getAlignmentStart(),
-                read.getReadName() != null ? (short)read.getReadNameLength() : 0,
-                (short)read.getMappingQuality(),
-                0,
-                read.getCigarLength(),
-                read.getFlags(),
-                read.getReadLength(),
-                read.getMateReferenceIndex(),
-                read.getMateAlignmentStart(),
-                read.getInferredInsertSize(),
-                null);
-        SAMReadGroupRecord samRG = read.getReadGroup();
-        clearAttributes();
-        if (samRG != null) {
-            GATKSAMReadGroupRecord rg = new GATKSAMReadGroupRecord(samRG);
-            setReadGroup(rg);
-        }
-    }
-
-    public GATKSAMRecord(final SAMFileHeader header,
-                         final int referenceSequenceIndex,
-                         final int alignmentStart,
-                         final short readNameLength,
-                         final short mappingQuality,
-                         final int indexingBin,
-                         final int cigarLen,
-                         final int flags,
-                         final int readLen,
-                         final int mateReferenceSequenceIndex,
-                         final int mateAlignmentStart,
-                         final int insertSize,
-                         final byte[] variableLengthBlock) {
-        super(header, referenceSequenceIndex, alignmentStart, readNameLength, mappingQuality, indexingBin, cigarLen,
-                flags, readLen, mateReferenceSequenceIndex, mateAlignmentStart, insertSize, variableLengthBlock);
-    }
-
-    public static GATKSAMRecord createRandomRead(int length) {
-        List<CigarElement> cigarElements = new LinkedList<>();
-        cigarElements.add(new CigarElement(length, CigarOperator.M));
-        Cigar cigar = new Cigar(cigarElements);
-        return ArtificialSAMUtils.createArtificialRead(cigar);
-    }
-
-    ///////////////////////////////////////////////////////////////////////////////
-    // *** support for reads without meaningful strand information            ***//
-    ///////////////////////////////////////////////////////////////////////////////
-
-    /**
-     * Does this read have a meaningful strandedness value?
-     *
-     * Some advanced types of reads, such as reads coming from merged fragments,
-     * don't have meaningful strandedness values, as they are composites of multiple
-     * other reads.  Strandless reads need to be handled specially by code that cares about
-     * stranded information, such as FS.
-     *
-     * @return true if this read doesn't have meaningful strand information
-     */
-    public boolean isStrandless() {
-        return isStrandlessRead;
-    }
-
-    /**
-     * Set the strandless state of this read to isStrandless
-     * @param isStrandless true if this read doesn't have a meaningful strandedness value
-     */
-    public void setIsStrandless(final boolean isStrandless) {
-        this.isStrandlessRead = isStrandless;
-    }
-
-    @Override
-    public boolean getReadNegativeStrandFlag() {
-        return ! isStrandless() && super.getReadNegativeStrandFlag();
-    }
-
-    @Override
-    public void setReadNegativeStrandFlag(final boolean flag) {
-        if ( isStrandless() )
-            throw new IllegalStateException("Cannot set the strand of a strandless read");
-        super.setReadNegativeStrandFlag(flag);
-    }
-
-
-    ///////////////////////////////////////////////////////////////////////////////
-    // *** The following methods are overloaded to cache the appropriate data ***//
-    ///////////////////////////////////////////////////////////////////////////////
-
-    @Override
-    public String getReadString() {
-        if ( mReadString == null )
-            mReadString = super.getReadString();
-        return mReadString;
-    }
-
-    @Override
-    public void setReadString(String s) {
-        super.setReadString(s);
-        mReadString = s;
-    }
-
-    /**
-     * Get the GATKSAMReadGroupRecord of this read
-     * @return a non-null GATKSAMReadGroupRecord
-     */
-    @Override
-    public GATKSAMReadGroupRecord getReadGroup() {
-        if ( ! retrievedReadGroup ) {
-            final SAMReadGroupRecord rg = super.getReadGroup();
-
-            // three cases: rg may be null (no rg, rg may already be a GATKSAMReadGroupRecord, or it may be
-            // a regular SAMReadGroupRecord in which case we have to make it a GATKSAMReadGroupRecord
-            if ( rg == null )
-                mReadGroup = null;
-            else if ( rg instanceof GATKSAMReadGroupRecord )
-                mReadGroup = (GATKSAMReadGroupRecord)rg;
-            else
-                mReadGroup = new GATKSAMReadGroupRecord(rg);
-
-            retrievedReadGroup = true;
-        }
-        return mReadGroup;
-    }
-
-    public void setReadGroup( final GATKSAMReadGroupRecord readGroup ) {
-        mReadGroup = readGroup;
-        retrievedReadGroup = true;
-        setAttribute("RG", mReadGroup.getId()); // todo -- this should be standardized, but we don't have access to SAMTagUtils!
-    }
-
-
-    @Override
-    public int hashCode() {
-        return super.hashCode();
-    }
-
-    @Override
-    public boolean equals(Object o) {
-        if (this == o) return true;
-
-        if (!(o instanceof GATKSAMRecord)) return false;
-
-        // note that we do not consider the GATKSAMRecord internal state at all
-        return super.equals(o);
-    }
-
-    /**
-     * Setters and Accessors for base insertion and base deletion quality scores
-     */
-    public void setBaseQualities( final byte[] quals, final EventType errorModel ) {
-        switch( errorModel ) {
-            case BASE_SUBSTITUTION:
-                setBaseQualities(quals);
-                break;
-            case BASE_INSERTION:
-                setAttribute( GATKSAMRecord.BQSR_BASE_INSERTION_QUALITIES, quals == null ? null : SAMUtils.phredToFastq(quals) );
-                break;
-            case BASE_DELETION:
-                setAttribute( GATKSAMRecord.BQSR_BASE_DELETION_QUALITIES, quals == null ? null : SAMUtils.phredToFastq(quals) );
-                break;
-            default:
-                throw new ReviewedGATKException("Unrecognized Base Recalibration type: " + errorModel );
-        }
-    }
-
-    public byte[] getBaseQualities( final EventType errorModel ) {
-        switch( errorModel ) {
-            case BASE_SUBSTITUTION:
-                return getBaseQualities();
-            case BASE_INSERTION:
-                return getBaseInsertionQualities();
-            case BASE_DELETION:
-                return getBaseDeletionQualities();
-            default:
-                throw new ReviewedGATKException("Unrecognized Base Recalibration type: " + errorModel );
-        }
-    }
-
-    /**
-     * @return whether or not this read has base insertion or deletion qualities (one of the two is sufficient to return true)
-     */
-    public boolean hasBaseIndelQualities() {
-        return getAttribute( BQSR_BASE_INSERTION_QUALITIES ) != null || getAttribute( BQSR_BASE_DELETION_QUALITIES ) != null;
-    }
-
-    /**
-     * @return the base deletion quality or null if read doesn't have one
-     */
-    public byte[] getExistingBaseInsertionQualities() {
-        return SAMUtils.fastqToPhred( getStringAttribute(BQSR_BASE_INSERTION_QUALITIES));
-    }
-
-    /**
-     * @return the base deletion quality or null if read doesn't have one
-     */
-    public byte[] getExistingBaseDeletionQualities() {
-        return SAMUtils.fastqToPhred( getStringAttribute(BQSR_BASE_DELETION_QUALITIES));
-    }
-
-    /**
-     * Default utility to query the base insertion quality of a read. If the read doesn't have one, it creates an array of default qualities (currently Q45)
-     * and assigns it to the read.
-     *
-     * @return the base insertion quality array
-     */
-    public byte[] getBaseInsertionQualities() {
-        byte [] quals = getExistingBaseInsertionQualities();
-        if( quals == null ) {
-            quals = new byte[getBaseQualities().length];
-            Arrays.fill(quals, DEFAULT_INSERTION_DELETION_QUAL); // Some day in the future when base insertion and base deletion quals exist the samtools API will
-                                           // be updated and the original quals will be pulled here, but for now we assume the original quality is a flat Q45
-        }
-        return quals;
-    }
-
-    /**
-     * Default utility to query the base deletion quality of a read. If the read doesn't have one, it creates an array of default qualities (currently Q45)
-     * and assigns it to the read.
-     *
-     * @return the base deletion quality array
-     */
-    public byte[] getBaseDeletionQualities() {
-        byte[] quals = getExistingBaseDeletionQualities();
-        if( quals == null ) {
-            quals = new byte[getBaseQualities().length];
-            Arrays.fill(quals, DEFAULT_INSERTION_DELETION_QUAL);  // Some day in the future when base insertion and base deletion quals exist the samtools API will
-                                            // be updated and the original quals will be pulled here, but for now we assume the original quality is a flat Q45
-        }
-        return quals;
-    }
-
-    /**
-     * Efficient caching accessor that returns the GATK NGSPlatform of this read
-     * @return
-     */
-    public NGSPlatform getNGSPlatform() {
-        return getReadGroup().getNGSPlatform();
-    }
-
-    ///////////////////////////////////////////////////////////////////////////////
-    // *** GATKSAMRecord specific methods                                     ***//
-    ///////////////////////////////////////////////////////////////////////////////
-
-    /**
-     * Checks whether an attribute has been set for the given key.
-     *
-     * Temporary attributes provide a way to label or attach arbitrary data to
-     * individual GATKSAMRecords. These attributes exist in memory only,
-     * and are never written to disk.
-     *
-     * @param key key
-     * @return True if an attribute has been set for this key.
-     */
-    public boolean containsTemporaryAttribute(Object key) {
-        return temporaryAttributes != null && temporaryAttributes.containsKey(key);
-    }
-
-    /**
-     * Sets the key to the given value, replacing any previous value. The previous
-     * value is returned.
-     *
-     * Temporary attributes provide a way to label or attach arbitrary data to
-     * individual GATKSAMRecords. These attributes exist in memory only,
-     * and are never written to disk.
-     *
-     * @param key    key
-     * @param value  value
-     * @return attribute
-     */
-    public Object setTemporaryAttribute(Object key, Object value) {
-        if(temporaryAttributes == null) {
-            temporaryAttributes = new HashMap<>();
-        }
-        return temporaryAttributes.put(key, value);
-    }
-
-    /**
-     * Looks up the value associated with the given key.
-     *
-     * Temporary attributes provide a way to label or attach arbitrary data to
-     * individual GATKSAMRecords. These attributes exist in memory only,
-     * and are never written to disk.
-     *
-     * @param key key
-     * @return The value, or null.
-     */
-    public Object getTemporaryAttribute(Object key) {
-        if(temporaryAttributes != null) {
-            return temporaryAttributes.get(key);
-        }
-        return null;
-    }
-
-    /**
-     * Checks whether if the read has any bases.
-     *
-     * Empty reads can be dangerous as it may have no cigar strings, no read names and
-     * other missing attributes.
-     *
-     * @return true if the read has no bases
-     */
-    public boolean isEmpty() {
-        return super.getReadBases() == null || super.getReadLength() == 0;
-    }
-
-    /**
-     * Clears all attributes except ReadGroup of the read.
-     */
-    public GATKSAMRecord simplify () {
-        GATKSAMReadGroupRecord rg = getReadGroup(); // save the read group information
-        byte[] insQuals = (this.getAttribute(BQSR_BASE_INSERTION_QUALITIES) == null) ? null : getBaseInsertionQualities();
-        byte[] delQuals = (this.getAttribute(BQSR_BASE_DELETION_QUALITIES)  == null) ? null : getBaseDeletionQualities();
-        this.clearAttributes(); // clear all attributes from the read
-        this.setReadGroup(rg); // restore read group
-        if (insQuals != null)
-           this.setBaseQualities(insQuals, EventType.BASE_INSERTION); // restore base insertion if we had any
-        if (delQuals != null)
-            this.setBaseQualities(delQuals, EventType.BASE_DELETION); // restore base deletion if we had any
-        return this;
-    }
-
-    /**
-     * Calculates the reference coordinate for the beginning of the read taking into account soft clips but not hard clips.
-     *
-     * Note: getUnclippedStart() adds soft and hard clips, this function only adds soft clips.
-     *
-     * @return the unclipped start of the read taking soft clips (but not hard clips) into account
-     */
-    public int getSoftStart() {
-        if ( softStart == UNINITIALIZED ) {
-            softStart = getAlignmentStart();
-            for (final CigarElement cig : getCigar().getCigarElements()) {
-                final CigarOperator op = cig.getOperator();
-
-                if (op == CigarOperator.SOFT_CLIP)
-                    softStart -= cig.getLength();
-                else if (op != CigarOperator.HARD_CLIP)
-                    break;
-            }
-        }
-        return softStart;
-    }
-
-    /**
-     * Calculates the reference coordinate for the end of the read taking into account soft clips but not hard clips.
-     *
-     * Note: getUnclippedEnd() adds soft and hard clips, this function only adds soft clips.
-     *
-     * @return the unclipped end of the read taking soft clips (but not hard clips) into account
-     */
-    public int getSoftEnd() {
-        if ( softEnd == UNINITIALIZED ) {
-            boolean foundAlignedBase = false;
-            softEnd = getAlignmentEnd();
-            final List<CigarElement> cigs = getCigar().getCigarElements();
-            for (int i = cigs.size() - 1; i >= 0; --i) {
-                final CigarElement cig = cigs.get(i);
-                final CigarOperator op = cig.getOperator();
-
-                if (op == CigarOperator.SOFT_CLIP) // assumes the soft clip that we found is at the end of the aligned read
-                    softEnd += cig.getLength();
-                else if (op != CigarOperator.HARD_CLIP) {
-                    foundAlignedBase = true;
-                    break;
-                }
-            }
-            if( !foundAlignedBase ) { // for example 64H14S, the soft end is actually the same as the alignment end
-                softEnd = getAlignmentEnd();
-            }
-        }
-
-        return softEnd;
-    }
-
-    /**
-     * If the read is hard clipped, the soft start and end will change. You can set manually or just reset the cache
-     * so that the next call to getSoftStart/End will recalculate it lazily.
-     */
-    public void resetSoftStartAndEnd() {
-        softStart = -1;
-        softEnd = -1;
-    }
-
-    /**
-     * If the read is hard clipped, the soft start and end will change. You can set manually or just reset the cache
-     * so that the next call to getSoftStart/End will recalculate it lazily.
-     */
-    public void resetSoftStartAndEnd(int softStart, int softEnd) {
-        this.softStart = softStart;
-        this.softEnd = softEnd;
-    }
-
-    /**
-     * Determines the original alignment start of a previously clipped read.
-     * 
-     * This is useful for reads that have been trimmed to a variant region and lost the information of it's original alignment end
-     * 
-     * @return the alignment start of a read before it was clipped
-     */
-    public int getOriginalAlignmentStart() {
-        return getUnclippedStart();
-    }
-
-    /**
-     * Determines the original alignment end of a previously clipped read.
-     *
-     * This is useful for reads that have been trimmed to a variant region and lost the information of it's original alignment end
-     * 
-     * @return the alignment end of a read before it was clipped
-     */
-    public int getOriginalAlignmentEnd() {
-        return getUnclippedEnd();
-    }
-
-    /**
-     * Creates an empty GATKSAMRecord with the read's header, read group and mate
-     * information, but empty (not-null) fields:
-     *  - Cigar String
-     *  - Read Bases
-     *  - Base Qualities
-     *
-     * Use this method if you want to create a new empty GATKSAMRecord based on
-     * another GATKSAMRecord
-     *
-     * @param read a read to copy the header from
-     * @return a read with no bases but safe for the GATK
-     */
-    public static GATKSAMRecord emptyRead(GATKSAMRecord read) {
-        GATKSAMRecord emptyRead = new GATKSAMRecord(read.getHeader(),
-                read.getReferenceIndex(),
-                0,
-                (short) 0,
-                (short) 0,
-                0,
-                0,
-                read.getFlags(),
-                0,
-                read.getMateReferenceIndex(),
-                read.getMateAlignmentStart(),
-                read.getInferredInsertSize(),
-                null);
-
-        emptyRead.setCigarString("");
-        emptyRead.setReadBases(new byte[0]);
-        emptyRead.setBaseQualities(new byte[0]);
-
-        SAMReadGroupRecord samRG = read.getReadGroup();
-        emptyRead.clearAttributes();
-        if (samRG != null) {
-            GATKSAMReadGroupRecord rg = new GATKSAMReadGroupRecord(samRG);
-            emptyRead.setReadGroup(rg);
-        }
-
-        return emptyRead;
-    }
-
-    /**
-     * Creates a new GATKSAMRecord with the source read's header, read group and mate
-     * information, but with the following fields set to user-supplied values:
-     *  - Read Bases
-     *  - Base Qualities
-     *  - Base Insertion Qualities
-     *  - Base Deletion Qualities
-     *
-     *  Cigar string is empty (not-null)
-     *
-     * Use this method if you want to create a new GATKSAMRecord based on
-     * another GATKSAMRecord, but with modified bases and qualities
-     *
-     * @param read a read to copy the header from
-     * @param readBases an array containing the new bases you wish use in place of the originals
-     * @param baseQualities an array containing the new base qualities you wish use in place of the originals
-     * @param baseInsertionQualities an array containing the new base insertion qaulities
-     * @param baseDeletionQualities an array containing the new base deletion qualities
-     * @return a read with modified bases and qualities, safe for the GATK
-     */
-    public static GATKSAMRecord createQualityModifiedRead(final GATKSAMRecord read,
-                                                          final byte[] readBases,
-                                                          final byte[] baseQualities,
-                                                          final byte[] baseInsertionQualities,
-                                                          final byte[] baseDeletionQualities) {
-        if ( baseQualities.length != readBases.length || baseInsertionQualities.length != readBases.length || baseDeletionQualities.length != readBases.length )
-            throw new IllegalArgumentException("Read bases and read quality arrays aren't the same size: Bases:" + readBases.length
-                                                + " vs Base Q's:" + baseQualities.length
-                                                + " vs Insert Q's:" + baseInsertionQualities.length
-                                                + " vs Delete Q's:" + baseDeletionQualities.length);
-
-        final GATKSAMRecord processedRead = GATKSAMRecord.emptyRead(read);
-        processedRead.setReadBases(readBases);
-        processedRead.setBaseQualities(baseQualities, EventType.BASE_SUBSTITUTION);
-        processedRead.setBaseQualities(baseInsertionQualities, EventType.BASE_INSERTION);
-        processedRead.setBaseQualities(baseDeletionQualities, EventType.BASE_DELETION);
-
-        return processedRead;
-    }
-
-    /**
-     * Shallow copy of everything, except for the attribute list and the temporary attributes. 
-     * A new list of the attributes is created for both, but the attributes themselves are copied by reference.  
-     * This should be safe because callers should never modify a mutable value returned by any of the get() methods anyway.
-     * 
-     * @return a shallow copy of the GATKSAMRecord
-     */
-    @Override
-    public Object clone() {
-        try {
-            final GATKSAMRecord clone = (GATKSAMRecord) super.clone();
-            if (temporaryAttributes != null) {
-                clone.temporaryAttributes = new HashMap<>();
-                for (Object attribute : temporaryAttributes.keySet())
-                    clone.setTemporaryAttribute(attribute, temporaryAttributes.get(attribute));
-            }
-            return clone;
-        } catch (final CloneNotSupportedException e) {
-            throw new RuntimeException( e );
-        }
-    }
-
-    /**
-     * A caching version of ReadUtils.getAdaptorBoundary()
-     *
-     * see #ReadUtils.getAdaptorBoundary(SAMRecord) for more information about the meaning of this function
-     *
-     * WARNING -- this function caches a value depending on the inferred insert size and alignment starts
-     * and stops of this read and its mate.  Changing these values in any way will invalidate the cached value.
-     * However, we do not monitor those setter functions, so modifying a GATKSAMRecord in any way may
-     * result in stale cached values.
-     *
-     * @return the result of calling ReadUtils.getAdaptorBoundary on this read
-     */
-    @Ensures("result == ReadUtils.getAdaptorBoundary(this)")
-    public int getAdaptorBoundary() {
-        if ( adapterBoundary == null )
-            adapterBoundary = ReadUtils.getAdaptorBoundary(this);
-        return adapterBoundary;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/GATKSamRecordFactory.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/GATKSamRecordFactory.java
deleted file mode 100644
index 1e5ad1e..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/GATKSamRecordFactory.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.SAMRecordFactory;
-import htsjdk.samtools.BAMRecord;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-/**
- * Factory interface implementation used to create GATKSamRecords
- * from SAMFileReaders with SAM-JDK
- *
- * @author Mark DePristo
- */
-public class GATKSamRecordFactory implements SAMRecordFactory {
-
-    /** Create a new SAMRecord to be filled in */
-    public SAMRecord createSAMRecord(SAMFileHeader header) {
-        throw new UserException.BadInput("The GATK now longer supports input SAM files");
-    }
-
-    /** Create a new BAM Record. */
-    public BAMRecord createBAMRecord(final SAMFileHeader header,
-                                     final int referenceSequenceIndex,
-                                     final int alignmentStart,
-                                     final short readNameLength,
-                                     final short mappingQuality,
-                                     final int indexingBin,
-                                     final int cigarLen,
-                                     final int flags,
-                                     final int readLen,
-                                     final int mateReferenceSequenceIndex,
-                                     final int mateAlignmentStart,
-                                     final int insertSize,
-                                     final byte[] variableLengthBlock) {
-        return new GATKSAMRecord(header,
-                referenceSequenceIndex,
-                alignmentStart,
-                readNameLength,
-                mappingQuality,
-                indexingBin,
-                cigarLen,
-                flags,
-                readLen,
-                mateReferenceSequenceIndex,
-                mateAlignmentStart,
-                insertSize,
-                variableLengthBlock);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/MisencodedBaseQualityReadTransformer.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/MisencodedBaseQualityReadTransformer.java
deleted file mode 100644
index 35146f0..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/MisencodedBaseQualityReadTransformer.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
-import org.broadinstitute.gatk.engine.walkers.Walker;
-import org.broadinstitute.gatk.utils.QualityUtils;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-/**
- * Checks for and errors out (or fixes if requested) when it detects reads with base qualities that are not encoded with
- * phred-scaled quality scores.  Q0 == ASCII 33 according to the SAM specification, whereas Illumina encoding starts at
- * Q64.  The idea here is simple: if we are asked to fix the scores then we just subtract 31 from every quality score.
- * Otherwise, we randomly sample reads (for efficiency) and error out if we encounter a qual that's too high.
- */
-public class MisencodedBaseQualityReadTransformer extends ReadTransformer {
-
-    private static final int samplingFrequency = 1000;  // sample 1 read for every 1000 encountered
-    private static final int encodingFixValue = 31;  // Illumina_64 - PHRED_33
-
-    private boolean disabled;
-    private boolean fixQuals;
-    protected static int currentReadCounter = 0;
-
-    @Override
-    public ApplicationTime initializeSub(final GenomeAnalysisEngine engine, final Walker walker) {
-        fixQuals = engine.getArguments().FIX_MISENCODED_QUALS;
-        disabled = !fixQuals && engine.getArguments().ALLOW_POTENTIALLY_MISENCODED_QUALS;
-
-        return ReadTransformer.ApplicationTime.ON_INPUT;
-    }
-
-    @Override
-    public boolean enabled() {
-        return !disabled;
-    }
-
-    @Override
-    public GATKSAMRecord apply(final GATKSAMRecord read) {
-        if ( fixQuals )
-            return fixMisencodedQuals(read);
-
-        checkForMisencodedQuals(read);
-        return read;
-    }
-
-    protected static GATKSAMRecord fixMisencodedQuals(final GATKSAMRecord read) {
-        final byte[] quals = read.getBaseQualities();
-        for ( int i = 0; i < quals.length; i++ ) {
-            quals[i] -= encodingFixValue;
-            if ( quals[i] < 0 )
-                throw new UserException.BadInput("while fixing mis-encoded base qualities we encountered a read that was correctly encoded; we cannot handle such a mixture of reads so unfortunately the BAM must be fixed with some other tool");
-        }
-        read.setBaseQualities(quals);
-        return read;
-    }
-
-    protected static void checkForMisencodedQuals(final GATKSAMRecord read) {
-        // sample reads randomly for checking
-        if ( ++currentReadCounter >= samplingFrequency ) {
-            currentReadCounter = 0;
-
-            final byte[] quals = read.getBaseQualities();
-            for ( final byte qual : quals ) {
-                if ( qual > QualityUtils.MAX_REASONABLE_Q_SCORE )
-                    throw new UserException.MisencodedBAM(read, "we encountered an extremely high quality score of " + (int)qual);
-            }
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/NWaySAMFileWriter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/NWaySAMFileWriter.java
deleted file mode 100644
index abf70d5..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/NWaySAMFileWriter.java
+++ /dev/null
@@ -1,185 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import htsjdk.samtools.*;
-import htsjdk.samtools.util.ProgressLoggerInterface;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.datasources.reads.SAMReaderID;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.exceptions.GATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import java.io.File;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Created by IntelliJ IDEA.
- * User: asivache
- * Date: May 31, 2011
- * Time: 3:52:49 PM
- * To change this template use File | Settings | File Templates.
- */
-public class NWaySAMFileWriter implements SAMFileWriter {
-
-    private Map<SAMReaderID,SAMFileWriter> writerMap = null;
-    private boolean presorted ;
-    GenomeAnalysisEngine toolkit;
-    boolean KEEP_ALL_PG_RECORDS = false;
-
-    public NWaySAMFileWriter(GenomeAnalysisEngine toolkit, Map<String,String> in2out, SAMFileHeader.SortOrder order,
-                             boolean presorted, boolean indexOnTheFly, boolean generateMD5, SAMProgramRecord pRecord, boolean keep_records) {
-        this.presorted = presorted;
-        this.toolkit = toolkit;
-        this.KEEP_ALL_PG_RECORDS = keep_records;
-        writerMap = new HashMap<SAMReaderID,SAMFileWriter>();
-        setupByReader(toolkit,in2out,order, presorted, indexOnTheFly, generateMD5, pRecord);
-    }
-
-    public NWaySAMFileWriter(GenomeAnalysisEngine toolkit, String ext, SAMFileHeader.SortOrder order,
-                              boolean presorted, boolean indexOnTheFly , boolean generateMD5, SAMProgramRecord pRecord, boolean keep_records) {
-        this.presorted = presorted;
-        this.toolkit = toolkit;
-        this.KEEP_ALL_PG_RECORDS = keep_records;
-        writerMap = new HashMap<SAMReaderID,SAMFileWriter>();
-        setupByReader(toolkit,ext,order, presorted, indexOnTheFly, generateMD5, pRecord);
-    }
-
-    public NWaySAMFileWriter(GenomeAnalysisEngine toolkit, Map<String,String> in2out, SAMFileHeader.SortOrder order,
-                             boolean presorted, boolean indexOnTheFly, boolean generateMD5) {
-        this(toolkit, in2out, order, presorted, indexOnTheFly, generateMD5, null,false);
-    }
-
-    public NWaySAMFileWriter(GenomeAnalysisEngine toolkit, String ext, SAMFileHeader.SortOrder order,
-                              boolean presorted, boolean indexOnTheFly , boolean generateMD5) {
-        this(toolkit, ext, order, presorted, indexOnTheFly, generateMD5, null,false);
-    }
-
-    /**
-     * Instantiates multiple underlying SAM writes, one per input SAM reader registered with GATK engine (those will be retrieved
-     * from <code>toolkit</code>). The <code>in2out</code> map must contain an entry for each input filename and map it
-     * onto a unique output file name.
-     * @param toolkit
-     * @param in2out
-     */
-    public void setupByReader(GenomeAnalysisEngine toolkit, Map<String,String> in2out, SAMFileHeader.SortOrder order,
-                              boolean presorted, boolean indexOnTheFly, boolean generateMD5, SAMProgramRecord pRecord) {
-        if ( in2out==null ) throw new GATKException("input-output bam filename map for n-way-out writing is NULL");
-        for ( SAMReaderID rid : toolkit.getReadsDataSource().getReaderIDs() ) {
-
-            String fName = toolkit.getReadsDataSource().getSAMFile(rid).getName();
-
-            String outName;
-            if ( ! in2out.containsKey(fName) )
-                    throw new UserException.BadInput("Input-output bam filename map does not contain an entry for the input file "+fName);
-            outName = in2out.get(fName);
-
-            if ( writerMap.containsKey( rid ) )
-                throw new GATKException("nWayOut mode: Reader id for input sam file "+fName+" is already registered; "+
-                        "map file likely contains multiple entries for this input file");
-
-            addWriter(rid,outName, order, presorted, indexOnTheFly, generateMD5, pRecord);
-        }
-
-    }
-
-    /**
-     * Instantiates multiple underlying SAM writes, one per input SAM reader registered with GATK engine (those will be retrieved
-     * from <code>toolkit</code>). The output file names will be generated automatically by stripping ".sam" or ".bam" off the
-     * input file name and adding ext instead (e.g. ".cleaned.bam").
-     * onto a unique output file name.
-     * @param toolkit
-     * @param ext
-     */
-    public void setupByReader(GenomeAnalysisEngine toolkit, String ext, SAMFileHeader.SortOrder order,
-                              boolean presorted, boolean indexOnTheFly, boolean generateMD5, SAMProgramRecord pRecord) {
-        for ( SAMReaderID rid : toolkit.getReadsDataSource().getReaderIDs() ) {
-
-            String fName = toolkit.getReadsDataSource().getSAMFile(rid).getName();
-
-            String outName;
-            int pos ;
-            if ( fName.toUpperCase().endsWith(".BAM") ) pos = fName.toUpperCase().lastIndexOf(".BAM");
-            else {
-                if ( fName.toUpperCase().endsWith(".SAM") ) pos = fName.toUpperCase().lastIndexOf(".SAM");
-                else throw new UserException.BadInput("Input file name "+fName+" does not end with .sam or .bam");
-            }
-            String prefix = fName.substring(0,pos);
-            outName = prefix+ext;
-
-            if ( writerMap.containsKey( rid ) )
-                throw new GATKException("nWayOut mode: Reader id for input sam file "+fName+" is already registered");
-            addWriter(rid,outName, order, presorted, indexOnTheFly, generateMD5, pRecord);
-        }
-
-    }
-
-    private void addWriter(SAMReaderID id , String outName, SAMFileHeader.SortOrder order, boolean presorted,
-                           boolean indexOnTheFly, boolean generateMD5, SAMProgramRecord programRecord) {
-        File f = new File(outName);
-        SAMFileHeader header = Utils.setupWriter(toolkit.getSAMFileHeader(id), programRecord);
-        SAMFileWriterFactory factory = new SAMFileWriterFactory();
-        factory.setCreateIndex(indexOnTheFly);
-        factory.setCreateMd5File(generateMD5);
-        SAMFileWriter sw = factory.makeSAMOrBAMWriter(header, presorted, f);
-        writerMap.put(id,sw);
-    }
-
-    public Collection<SAMFileWriter> getWriters() {
-        return writerMap.values();
-    }
-
-    public void addAlignment(SAMRecord samRecord) {
-        final SAMReaderID id = toolkit.getReaderIDForRead(samRecord);
-        String rg = samRecord.getStringAttribute("RG");
-        if ( rg != null ) {
-            String rg_orig = toolkit.getReadsDataSource().getOriginalReadGroupId(rg);
-            samRecord.setAttribute("RG",rg_orig);
-        }
-        addAlignment(samRecord, id);
-    }
-
-    public void addAlignment(SAMRecord samRecord, SAMReaderID readerID) {
-        writerMap.get(readerID).addAlignment(samRecord);
-    }
-
-    public SAMFileHeader getFileHeader() {
-        return toolkit.getSAMFileHeader();
-    }
-
-    public void close() {
-        for ( SAMFileWriter w : writerMap.values() ) w.close();
-    }
-
-    @Override
-    public void setProgressLogger(final ProgressLoggerInterface logger) {
-        for (final SAMFileWriter writer: writerMap.values()) {
-            writer.setProgressLogger(logger);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ReadUnclippedStartWithNoTiesComparator.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ReadUnclippedStartWithNoTiesComparator.java
deleted file mode 100644
index 9d2a391..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ReadUnclippedStartWithNoTiesComparator.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import htsjdk.samtools.SAMRecord;
-
-import java.util.Comparator;
-
-public class ReadUnclippedStartWithNoTiesComparator implements Comparator<SAMRecord> {
-    @Requires("c1 >= 0 && c2 >= 0")
-    @Ensures("result == 0 || result == 1 || result == -1")
-    private int compareContigs(int c1, int c2) {
-        if (c1 == c2)
-            return 0;
-        else if (c1 > c2)
-            return 1;
-        return -1;
-    }
-
-    @Requires("r1 != null && r2 != null")
-    @Ensures("result == 0 || result == 1 || result == -1")
-    public int compare(SAMRecord r1, SAMRecord r2) {
-        int result;
-
-        if (r1 == r2)
-            result = 0;
-
-        else if (r1.getReadUnmappedFlag())
-            result = 1;
-        else if (r2.getReadUnmappedFlag())
-            result = -1;
-        else {
-            final int cmpContig = compareContigs(r1.getReferenceIndex(), r2.getReferenceIndex());
-
-            if (cmpContig != 0)
-                result = cmpContig;
-
-            else {
-                if (r1.getUnclippedStart() < r2.getUnclippedStart())
-                    result = -1;
-                else
-                    result = 1;
-            }
-        }
-
-        return result;
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ReadUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ReadUtils.java
deleted file mode 100644
index 7fc1b40..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/ReadUtils.java
+++ /dev/null
@@ -1,964 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import htsjdk.samtools.*;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.io.stubs.SAMFileWriterStub;
-import org.broadinstitute.gatk.utils.*;
-import org.broadinstitute.gatk.utils.collections.Pair;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.io.File;
-import java.util.*;
-
-/**
- * A miscellaneous collection of utilities for working with SAM files, headers, etc.
- * Static methods only, please.
- *
- * @author mhanna
- * @version 0.1
- */
-public class ReadUtils {
-    private final static Logger logger = Logger.getLogger(ReadUtils.class);
-    
-    private static final String OFFSET_OUT_OF_BOUNDS_EXCEPTION = "Offset cannot be greater than read length %d : %d";
-    private static final String OFFSET_NOT_ZERO_EXCEPTION = "We ran past the end of the read and never found the offset, something went wrong!";
-    
-    private ReadUtils() {
-    }
-
-    private static final int DEFAULT_ADAPTOR_SIZE = 100;
-    public static final int CLIPPING_GOAL_NOT_REACHED = -1;
-
-    /**
-     * A marker to tell which end of the read has been clipped
-     */
-    public enum ClippingTail {
-        LEFT_TAIL,
-        RIGHT_TAIL
-    }
-
-    /**
-     * A HashMap of the SAM spec read flag names
-     *
-     * Note: This is not being used right now, but can be useful in the future
-     */
-    private static final Map<Integer, String> readFlagNames = new HashMap<Integer, String>();
-
-    static {
-        readFlagNames.put(0x1, "Paired");
-        readFlagNames.put(0x2, "Proper");
-        readFlagNames.put(0x4, "Unmapped");
-        readFlagNames.put(0x8, "MateUnmapped");
-        readFlagNames.put(0x10, "Forward");
-        //readFlagNames.put(0x20, "MateForward");
-        readFlagNames.put(0x40, "FirstOfPair");
-        readFlagNames.put(0x80, "SecondOfPair");
-        readFlagNames.put(0x100, "NotPrimary");
-        readFlagNames.put(0x200, "NON-PF");
-        readFlagNames.put(0x400, "Duplicate");
-    }
-
-    /**
-     * This enum represents all the different ways in which a read can overlap an interval.
-     *
-     * NO_OVERLAP_CONTIG:
-     * read and interval are in different contigs.
-     *
-     * NO_OVERLAP_LEFT:
-     * the read does not overlap the interval.
-     *
-     *                        |----------------| (interval)
-     *   <---------------->                      (read)
-     *
-     * NO_OVERLAP_RIGHT:
-     * the read does not overlap the interval.
-     *
-     *   |----------------|                      (interval)
-     *                        <----------------> (read)
-     *
-     * OVERLAP_LEFT:
-     * the read starts before the beginning of the interval but ends inside of it
-     *
-     *          |----------------| (interval)
-     *   <---------------->        (read)
-     *
-     * OVERLAP_RIGHT:
-     * the read starts inside the interval but ends outside of it
-     *
-     *   |----------------|     (interval)
-     *       <----------------> (read)
-     *
-     * OVERLAP_LEFT_AND_RIGHT:
-     * the read starts before the interval and ends after the interval
-     *
-     *      |-----------|     (interval)
-     *  <-------------------> (read)
-     *
-     * OVERLAP_CONTAINED:
-     * the read starts and ends inside the interval
-     *
-     *  |----------------|     (interval)
-     *     <-------->          (read)
-     */
-    public enum ReadAndIntervalOverlap {NO_OVERLAP_CONTIG, NO_OVERLAP_LEFT, NO_OVERLAP_RIGHT, NO_OVERLAP_HARDCLIPPED_LEFT, NO_OVERLAP_HARDCLIPPED_RIGHT, OVERLAP_LEFT, OVERLAP_RIGHT, OVERLAP_LEFT_AND_RIGHT, OVERLAP_CONTAINED}
-
-    /**
-     * Creates a SAMFileWriter using all of the features currently set in the engine (command line arguments, ReadTransformers, etc)
-     * @param file the filename to write to
-     * @param engine the engine
-     * @return a SAMFileWriter with the correct options set
-     */
-    public static SAMFileWriter createSAMFileWriter(final String file, final GenomeAnalysisEngine engine) {
-        final SAMFileWriterStub output = new SAMFileWriterStub(engine, new File(file));
-        output.processArguments(engine.getArguments());
-        return output;
-    }
-
-    /**
-     *  As {@link #createSAMFileWriter(String, org.broadinstitute.gatk.engine.GenomeAnalysisEngine)}, but also sets the header
-     */
-    public static SAMFileWriter createSAMFileWriter(final String file, final GenomeAnalysisEngine engine, final SAMFileHeader header) {
-        final SAMFileWriterStub output = (SAMFileWriterStub) createSAMFileWriter(file, engine);
-        output.writeHeader(header);
-        return output;
-    }
-
-    /**
-     * is this base inside the adaptor of the read?
-     *
-     * There are two cases to treat here:
-     *
-     * 1) Read is in the negative strand => Adaptor boundary is on the left tail
-     * 2) Read is in the positive strand => Adaptor boundary is on the right tail
-     *
-     * Note: We return false to all reads that are UNMAPPED or have an weird big insert size (probably due to mismapping or bigger event)
-     *
-     * @param read the read to test
-     * @param basePos base position in REFERENCE coordinates (not read coordinates)
-     * @return whether or not the base is in the adaptor
-     */
-    public static boolean isBaseInsideAdaptor(final GATKSAMRecord read, long basePos) {
-        final int adaptorBoundary = read.getAdaptorBoundary();
-        if (adaptorBoundary == CANNOT_COMPUTE_ADAPTOR_BOUNDARY || read.getInferredInsertSize() > DEFAULT_ADAPTOR_SIZE)
-            return false;
-
-        return read.getReadNegativeStrandFlag() ? basePos <= adaptorBoundary : basePos >= adaptorBoundary;
-    }
-
-    /**
-     * Finds the adaptor boundary around the read and returns the first base inside the adaptor that is closest to
-     * the read boundary. If the read is in the positive strand, this is the first base after the end of the
-     * fragment (Picard calls it 'insert'), if the read is in the negative strand, this is the first base before the
-     * beginning of the fragment.
-     *
-     * There are two cases we need to treat here:
-     *
-     * 1) Our read is in the reverse strand :
-     *
-     *     <----------------------| *
-     *   |--------------------->
-     *
-     *   in these cases, the adaptor boundary is at the mate start (minus one)
-     *
-     * 2) Our read is in the forward strand :
-     *
-     *   |---------------------->   *
-     *     <----------------------|
-     *
-     *   in these cases the adaptor boundary is at the start of the read plus the inferred insert size (plus one)
-     *
-     * @param read the read being tested for the adaptor boundary
-     * @return the reference coordinate for the adaptor boundary (effectively the first base IN the adaptor, closest to the read.
-     * CANNOT_COMPUTE_ADAPTOR_BOUNDARY if the read is unmapped or the mate is mapped to another contig.
-     */
-    public static int getAdaptorBoundary(final SAMRecord read) {
-        if ( ! hasWellDefinedFragmentSize(read) ) {
-            return CANNOT_COMPUTE_ADAPTOR_BOUNDARY;
-        } else if ( read.getReadNegativeStrandFlag() ) {
-            return read.getMateAlignmentStart() - 1;           // case 1 (see header)
-        } else {
-            final int insertSize = Math.abs(read.getInferredInsertSize());    // the inferred insert size can be negative if the mate is mapped before the read (so we take the absolute value)
-            return read.getAlignmentStart() + insertSize + 1;  // case 2 (see header)
-        }
-    }
-
-    public static int CANNOT_COMPUTE_ADAPTOR_BOUNDARY = Integer.MIN_VALUE;
-
-    /**
-     * Can the adaptor sequence of read be reliably removed from the read based on the alignment of
-     * read and its mate?
-     *
-     * @param read the read to check
-     * @return true if it can, false otherwise
-     */
-    public static boolean hasWellDefinedFragmentSize(final SAMRecord read) {
-        if ( read.getInferredInsertSize() == 0 )
-            // no adaptors in reads with mates in another chromosome or unmapped pairs
-            return false;
-        if ( ! read.getReadPairedFlag() )
-            // only reads that are paired can be adaptor trimmed
-            return false;
-        if ( read.getReadUnmappedFlag() || read.getMateUnmappedFlag() )
-            // only reads when both reads are mapped can be trimmed
-            return false;
-//        if ( ! read.getProperPairFlag() )
-//            // note this flag isn't always set properly in BAMs, can will stop us from eliminating some proper pairs
-//            // reads that aren't part of a proper pair (i.e., have strange alignments) can't be trimmed
-//            return false;
-        if ( read.getReadNegativeStrandFlag() == read.getMateNegativeStrandFlag() )
-            // sanity check on getProperPairFlag to ensure that read1 and read2 aren't on the same strand
-            return false;
-
-        if ( read.getReadNegativeStrandFlag() ) {
-            // we're on the negative strand, so our read runs right to left
-            return read.getAlignmentEnd() > read.getMateAlignmentStart();
-        } else {
-            // we're on the positive strand, so our mate should be to our right (his start + insert size should be past our start)
-            return read.getAlignmentStart() <= read.getMateAlignmentStart() + read.getInferredInsertSize();
-        }
-    }
-
-    /**
-     * is the read a 454 read?
-     *
-     * @param read the read to test
-     * @return checks the read group tag PL for the default 454 tag
-     */
-    public static boolean is454Read(GATKSAMRecord read) {
-        return NGSPlatform.fromRead(read) == NGSPlatform.LS454;
-    }
-
-    /**
-     * is the read an IonTorrent read?
-     *
-     * @param read the read to test
-     * @return checks the read group tag PL for the default ion tag
-     */
-    public static boolean isIonRead(GATKSAMRecord read) {
-        return NGSPlatform.fromRead(read) == NGSPlatform.ION_TORRENT;
-    }
-
-    /**
-     * is the read a SOLiD read?
-     *
-     * @param read the read to test
-     * @return checks the read group tag PL for the default SOLiD tag
-     */
-    public static boolean isSOLiDRead(GATKSAMRecord read) {
-        return NGSPlatform.fromRead(read) == NGSPlatform.SOLID;
-    }
-
-    /**
-     * is the read a SLX read?
-     *
-     * @param read the read to test
-     * @return checks the read group tag PL for the default SLX tag
-     */
-    public static boolean isIlluminaRead(GATKSAMRecord read) {
-        return NGSPlatform.fromRead(read) == NGSPlatform.ILLUMINA;
-    }
-
-    /**
-     * checks if the read has a platform tag in the readgroup equal to 'name'.
-     * Assumes that 'name' is upper-cased.
-     *
-     * @param read the read to test
-     * @param name the upper-cased platform name to test
-     * @return whether or not name == PL tag in the read group of read
-     */
-    public static boolean isPlatformRead(GATKSAMRecord read, String name) {
-
-        SAMReadGroupRecord readGroup = read.getReadGroup();
-        if (readGroup != null) {
-            Object readPlatformAttr = readGroup.getAttribute("PL");
-            if (readPlatformAttr != null)
-                return readPlatformAttr.toString().toUpperCase().contains(name);
-        }
-        return false;
-    }
-
-
-    /**
-     * Returns the collections of reads sorted in coordinate order, according to the order defined
-     * in the reads themselves
-     *
-     * @param reads
-     * @return
-     */
-    public final static List<GATKSAMRecord> sortReadsByCoordinate(List<GATKSAMRecord> reads) {
-        final SAMRecordComparator comparer = new SAMRecordCoordinateComparator();
-        Collections.sort(reads, comparer);
-        return reads;
-    }
-
-    /**
-     * If a read starts in INSERTION, returns the first element length.
-     *
-     * Warning: If the read has Hard or Soft clips before the insertion this function will return 0.
-     *
-     * @param read
-     * @return the length of the first insertion, or 0 if there is none (see warning).
-     */
-    public final static int getFirstInsertionOffset(SAMRecord read) {
-        CigarElement e = read.getCigar().getCigarElement(0);
-        if ( e.getOperator() == CigarOperator.I )
-            return e.getLength();
-        else
-            return 0;
-    }
-
-    /**
-     * If a read ends in INSERTION, returns the last element length.
-     *
-     * Warning: If the read has Hard or Soft clips after the insertion this function will return 0.
-     *
-     * @param read
-     * @return the length of the last insertion, or 0 if there is none (see warning).
-     */
-    public final static int getLastInsertionOffset(SAMRecord read) {
-        CigarElement e = read.getCigar().getCigarElement(read.getCigarLength() - 1);
-        if ( e.getOperator() == CigarOperator.I )
-            return e.getLength();
-        else
-            return 0;
-    }
-
-    /**
-     * Determines what is the position of the read in relation to the interval.
-     * Note: This function uses the UNCLIPPED ENDS of the reads for the comparison.
-     * @param read the read
-     * @param interval the interval
-     * @return the overlap type as described by ReadAndIntervalOverlap enum (see above)
-     */
-    public static ReadAndIntervalOverlap getReadAndIntervalOverlapType(GATKSAMRecord read, GenomeLoc interval) {
-
-        int sStart = read.getSoftStart();
-        int sStop = read.getSoftEnd();
-        int uStart = read.getUnclippedStart();
-        int uStop = read.getUnclippedEnd();
-
-        if ( !read.getReferenceName().equals(interval.getContig()) )
-            return ReadAndIntervalOverlap.NO_OVERLAP_CONTIG;
-
-        else if ( uStop < interval.getStart() )
-            return ReadAndIntervalOverlap.NO_OVERLAP_LEFT;
-
-        else if ( uStart > interval.getStop() )
-            return ReadAndIntervalOverlap.NO_OVERLAP_RIGHT;
-
-        else if ( sStop < interval.getStart() )
-            return ReadAndIntervalOverlap.NO_OVERLAP_HARDCLIPPED_LEFT;
-
-        else if ( sStart > interval.getStop() )
-            return ReadAndIntervalOverlap.NO_OVERLAP_HARDCLIPPED_RIGHT;
-
-        else if ( (sStart >= interval.getStart()) &&
-                  (sStop <= interval.getStop()) )
-            return ReadAndIntervalOverlap.OVERLAP_CONTAINED;
-
-        else if ( (sStart < interval.getStart()) &&
-                  (sStop > interval.getStop()) )
-            return ReadAndIntervalOverlap.OVERLAP_LEFT_AND_RIGHT;
-
-        else if ( (sStart < interval.getStart()) )
-            return ReadAndIntervalOverlap.OVERLAP_LEFT;
-
-        else
-            return ReadAndIntervalOverlap.OVERLAP_RIGHT;
-    }
-
-    /**
-     * Pre-processes the results of getReadCoordinateForReferenceCoordinate(GATKSAMRecord, int) to take care of
-     * two corner cases:
-     * 
-     * 1. If clipping the right tail (end of the read) getReadCoordinateForReferenceCoordinate and fall inside
-     * a deletion return the base after the deletion. If clipping the left tail (beginning of the read) it
-     * doesn't matter because it already returns the previous base by default.
-     * 
-     * 2. If clipping the left tail (beginning of the read) getReadCoordinateForReferenceCoordinate and the
-     * read starts with an insertion, and you're requesting the first read based coordinate, it will skip
-     * the leading insertion (because it has the same reference coordinate as the following base).
-     *
-     * @param read
-     * @param refCoord
-     * @param tail
-     * @return the read coordinate corresponding to the requested reference coordinate for clipping.
-     */
-    @Requires({"refCoord >= read.getUnclippedStart()", "refCoord <= read.getUnclippedEnd() || (read.getUnclippedEnd() < read.getUnclippedStart())"})
-    @Ensures({"result >= 0", "result < read.getReadLength()"})
-    public static int getReadCoordinateForReferenceCoordinate(GATKSAMRecord read, int refCoord, ClippingTail tail) {
-        return getReadCoordinateForReferenceCoordinate(read.getSoftStart(), read.getCigar(), refCoord, tail, false);
-    }
-
-    public static int getReadCoordinateForReferenceCoordinateUpToEndOfRead(GATKSAMRecord read, int refCoord, ClippingTail tail) {
-        final int leftmostSafeVariantPosition = Math.max(read.getSoftStart(), refCoord);
-        return getReadCoordinateForReferenceCoordinate(read.getSoftStart(), read.getCigar(), leftmostSafeVariantPosition, tail, false);
-    }
-
-    public static int getReadCoordinateForReferenceCoordinate(final int alignmentStart, final Cigar cigar, final int refCoord, final ClippingTail tail, final boolean allowGoalNotReached) {
-        Pair<Integer, Boolean> result = getReadCoordinateForReferenceCoordinate(alignmentStart, cigar, refCoord, allowGoalNotReached);
-        int readCoord = result.getFirst();
-
-        // Corner case one: clipping the right tail and falls on deletion, move to the next
-        // read coordinate. It is not a problem for the left tail because the default answer
-        // from getReadCoordinateForReferenceCoordinate is to give the previous read coordinate.
-        if (result.getSecond() && tail == ClippingTail.RIGHT_TAIL)
-            readCoord++;
-
-        // clipping the left tail and first base is insertion, go to the next read coordinate
-        // with the same reference coordinate. Advance to the next cigar element, or to the
-        // end of the read if there is no next element.
-        final CigarElement firstElementIsInsertion = readStartsWithInsertion(cigar);
-        if (readCoord == 0 && tail == ClippingTail.LEFT_TAIL && firstElementIsInsertion != null)
-            readCoord = Math.min(firstElementIsInsertion.getLength(), cigar.getReadLength() - 1);
-
-        return readCoord;
-    }
-
-    /**
-     * Returns the read coordinate corresponding to the requested reference coordinate.
-     *
-     * WARNING: if the requested reference coordinate happens to fall inside or just before a deletion (or skipped region) in the read, this function
-     * will return the last read base before the deletion (or skipped region). This function returns a
-     * Pair(int readCoord, boolean fallsInsideOrJustBeforeDeletionOrSkippedRegion) so you can choose which readCoordinate to use when faced with
-     * a deletion (or skipped region).
-     *
-     * SUGGESTION: Use getReadCoordinateForReferenceCoordinate(GATKSAMRecord, int, ClippingTail) instead to get a
-     * pre-processed result according to normal clipping needs. Or you can use this function and tailor the
-     * behavior to your needs.
-     *
-     * @param read
-     * @param refCoord the requested reference coordinate
-     * @return the read coordinate corresponding to the requested reference coordinate. (see warning!)
-     */
-    @Requires({"refCoord >= read.getSoftStart()", "refCoord <= read.getSoftEnd()"})
-    @Ensures({"result.getFirst() >= 0", "result.getFirst() < read.getReadLength()"})
-    //TODO since we do not have contracts any more, should we check for the requirements in the method code?
-    public static Pair<Integer, Boolean> getReadCoordinateForReferenceCoordinate(GATKSAMRecord read, int refCoord) {
-        return getReadCoordinateForReferenceCoordinate(read.getSoftStart(), read.getCigar(), refCoord, false);
-    }
-
-    public static Pair<Integer, Boolean> getReadCoordinateForReferenceCoordinate(final int alignmentStart, final Cigar cigar, final int refCoord, final boolean allowGoalNotReached) {
-        int readBases = 0;
-        int refBases = 0;
-        boolean fallsInsideDeletionOrSkippedRegion = false;
-        boolean endJustBeforeDeletionOrSkippedRegion = false;
-        boolean fallsInsideOrJustBeforeDeletionOrSkippedRegion = false;
-
-        final int goal = refCoord - alignmentStart;  // The goal is to move this many reference bases
-        if (goal < 0) {
-            if (allowGoalNotReached) {
-                return new Pair<Integer, Boolean>(CLIPPING_GOAL_NOT_REACHED, false);
-            } else {
-                throw new ReviewedGATKException("Somehow the requested coordinate is not covered by the read. Too many deletions?");
-            }
-        }
-        boolean goalReached = refBases == goal;
-
-        Iterator<CigarElement> cigarElementIterator = cigar.getCigarElements().iterator();
-        while (!goalReached && cigarElementIterator.hasNext()) {
-            final CigarElement cigarElement = cigarElementIterator.next();
-            int shift = 0;
-
-            if (cigarElement.getOperator().consumesReferenceBases() || cigarElement.getOperator() == CigarOperator.SOFT_CLIP) {
-                if (refBases + cigarElement.getLength() < goal)
-                    shift = cigarElement.getLength();
-                else
-                    shift = goal - refBases;
-
-                refBases += shift;
-            }
-            goalReached = refBases == goal;
-
-            if (!goalReached && cigarElement.getOperator().consumesReadBases())
-                readBases += cigarElement.getLength();
-
-            if (goalReached) {
-                // Is this base's reference position within this cigar element? Or did we use it all?
-                final boolean endsWithinCigar = shift < cigarElement.getLength();
-
-                // If it isn't, we need to check the next one. There should *ALWAYS* be a next one
-                // since we checked if the goal coordinate is within the read length, so this is just a sanity check.
-                if (!endsWithinCigar && !cigarElementIterator.hasNext()) {
-                    if (allowGoalNotReached) {
-                        return new Pair<Integer, Boolean>(CLIPPING_GOAL_NOT_REACHED, false);
-                    } else {
-                        throw new ReviewedGATKException(String.format("Reference coordinate corresponds to a non-existent base in the read. This should never happen -- check read with alignment start: %s  and cigar: %s", alignmentStart, cigar));
-                    }
-                }
-
-                CigarElement nextCigarElement = null;
-
-                // if we end inside the current cigar element, we just have to check if it is a deletion (or skipped region)
-                if (endsWithinCigar)
-                    fallsInsideDeletionOrSkippedRegion = (cigarElement.getOperator() == CigarOperator.DELETION || cigarElement.getOperator() == CigarOperator.SKIPPED_REGION) ;
-
-                // if we end outside the current cigar element, we need to check if the next element is an insertion, deletion or skipped region.
-                else {
-                    nextCigarElement = cigarElementIterator.next();
-
-                    // if it's an insertion, we need to clip the whole insertion before looking at the next element
-                    if (nextCigarElement.getOperator() == CigarOperator.INSERTION) {
-                        readBases += nextCigarElement.getLength();
-                        if (!cigarElementIterator.hasNext()) {
-                            if (allowGoalNotReached) {
-                                return new Pair<Integer, Boolean>(CLIPPING_GOAL_NOT_REACHED, false);
-                            } else {
-                                throw new ReviewedGATKException(String.format("Reference coordinate corresponds to a non-existent base in the read. This should never happen -- check read with alignment start: %s  and cigar: %s", alignmentStart, cigar));
-                            }
-                        }
-
-                        nextCigarElement = cigarElementIterator.next();
-                    }
-
-                    // if it's a deletion (or skipped region), we will pass the information on to be handled downstream.
-                    endJustBeforeDeletionOrSkippedRegion = (nextCigarElement.getOperator() == CigarOperator.DELETION || nextCigarElement.getOperator() == CigarOperator.SKIPPED_REGION);
-                }
-
-                fallsInsideOrJustBeforeDeletionOrSkippedRegion = endJustBeforeDeletionOrSkippedRegion || fallsInsideDeletionOrSkippedRegion;
-
-                // If we reached our goal outside a deletion (or skipped region), add the shift
-                if (!fallsInsideOrJustBeforeDeletionOrSkippedRegion && cigarElement.getOperator().consumesReadBases())
-                    readBases += shift;
-
-                // If we reached our goal just before a deletion (or skipped region) we need
-                // to add the shift of the current cigar element but go back to it's last element to return the last
-                // base before the deletion (or skipped region) (see warning in function contracts)
-                else if (endJustBeforeDeletionOrSkippedRegion && cigarElement.getOperator().consumesReadBases())
-                    readBases += shift - 1;
-
-                // If we reached our goal inside a deletion (or skipped region), or just between a deletion and a skipped region,
-                // then we must backtrack to the last base before the deletion (or skipped region)
-                else if (fallsInsideDeletionOrSkippedRegion ||
-                        (endJustBeforeDeletionOrSkippedRegion && nextCigarElement.getOperator().equals(CigarOperator.N)) ||
-                        (endJustBeforeDeletionOrSkippedRegion && nextCigarElement.getOperator().equals(CigarOperator.D)))
-                    readBases--;
-            }
-        }
-
-        if (!goalReached) {
-            if (allowGoalNotReached) {
-                return new Pair<Integer, Boolean>(CLIPPING_GOAL_NOT_REACHED, false);
-            } else {
-                throw new ReviewedGATKException("Somehow the requested coordinate is not covered by the read. Alignment " + alignmentStart + " | " + cigar);
-            }
-        }
-
-        return new Pair<Integer, Boolean>(readBases, fallsInsideOrJustBeforeDeletionOrSkippedRegion);
-    }
-
-    /**
-     * Compares two SAMRecords only the basis on alignment start.  Note that
-     * comparisons are performed ONLY on the basis of alignment start; any
-     * two SAM records with the same alignment start will be considered equal.
-     *
-     * Unmapped alignments will all be considered equal.
-     */
-
-    @Requires({"read1 != null", "read2 != null"})
-    public static int compareSAMRecords(GATKSAMRecord read1, GATKSAMRecord read2) {
-        AlignmentStartComparator comp = new AlignmentStartComparator();
-        return comp.compare(read1, read2);
-    }
-
-    /**
-     * Is a base inside a read?
-     *
-     * @param read                the read to evaluate
-     * @param referenceCoordinate the reference coordinate of the base to test
-     * @return true if it is inside the read, false otherwise.
-     */
-    public static boolean isInsideRead(final GATKSAMRecord read, final int referenceCoordinate) {
-        return referenceCoordinate >= read.getAlignmentStart() && referenceCoordinate <= read.getAlignmentEnd();
-    }
-
-    /**
-     * Is this read all insertion?
-     *
-     * @param read
-     * @return whether or not the only element in the cigar string is an Insertion
-     */
-    public static boolean readIsEntirelyInsertion(GATKSAMRecord read) {
-        for (CigarElement cigarElement : read.getCigar().getCigarElements()) {
-            if (cigarElement.getOperator() != CigarOperator.INSERTION)
-                return false;
-        }
-        return true;
-    }
-
-    /**
-     * @see #readStartsWithInsertion(htsjdk.samtools.Cigar, boolean) with ignoreClipOps set to true
-     */
-    public static CigarElement readStartsWithInsertion(final Cigar cigarForRead) {
-        return readStartsWithInsertion(cigarForRead, true);
-    }
-
-    /**
-     * Checks if a read starts with an insertion.
-     *
-     * @param cigarForRead    the CIGAR to evaluate
-     * @param ignoreSoftClipOps   should we ignore S operators when evaluating whether an I operator is at the beginning?  Note that H operators are always ignored.
-     * @return the element if it's a leading insertion or null otherwise
-     */
-    public static CigarElement readStartsWithInsertion(final Cigar cigarForRead, final boolean ignoreSoftClipOps) {
-        for ( final CigarElement cigarElement : cigarForRead.getCigarElements() ) {
-            if ( cigarElement.getOperator() == CigarOperator.INSERTION )
-                return cigarElement;
-
-            else if ( cigarElement.getOperator() != CigarOperator.HARD_CLIP && ( !ignoreSoftClipOps || cigarElement.getOperator() != CigarOperator.SOFT_CLIP) )
-                break;
-        }
-        return null;
-    }
-
-    /**
-     * Returns the coverage distribution of a list of reads within the desired region.
-     *
-     * See getCoverageDistributionOfRead for information on how the coverage is calculated.
-     *
-     * @param list          the list of reads covering the region
-     * @param startLocation the first reference coordinate of the region (inclusive)
-     * @param stopLocation  the last reference coordinate of the region (inclusive)
-     * @return an array with the coverage of each position from startLocation to stopLocation
-     */
-    public static int [] getCoverageDistributionOfReads(List<GATKSAMRecord> list, int startLocation, int stopLocation) {
-        int [] totalCoverage = new int[stopLocation - startLocation + 1];
-
-        for (GATKSAMRecord read : list) {
-            int [] readCoverage = getCoverageDistributionOfRead(read, startLocation, stopLocation);
-            totalCoverage = MathUtils.addArrays(totalCoverage, readCoverage);
-        }
-
-        return totalCoverage;
-    }
-
-    /**
-     * Returns the coverage distribution of a single read within the desired region.
-     *
-     * Note: This function counts DELETIONS as coverage (since the main purpose is to downsample
-     * reads for variant regions, and deletions count as variants)
-     *
-     * @param read          the read to get the coverage distribution of
-     * @param startLocation the first reference coordinate of the region (inclusive)
-     * @param stopLocation  the last reference coordinate of the region (inclusive)
-     * @return an array with the coverage of each position from startLocation to stopLocation
-     */
-    public static int [] getCoverageDistributionOfRead(GATKSAMRecord read, int startLocation, int stopLocation) {
-        int [] coverage = new int[stopLocation - startLocation + 1];
-        int refLocation = read.getSoftStart();
-        for (CigarElement cigarElement : read.getCigar().getCigarElements()) {
-            switch (cigarElement.getOperator()) {
-                case S:
-                case M:
-                case EQ:
-                case N:
-                case X:
-                case D:
-                    for (int i = 0; i < cigarElement.getLength(); i++) {
-                        if (refLocation >= startLocation && refLocation <= stopLocation) {
-                            coverage[refLocation - startLocation]++;
-                        }
-                        refLocation++;
-                    }
-                    break;
-
-                case P:
-                case I:
-                case H:
-                    break;
-            }
-
-            if (refLocation > stopLocation)
-                break;
-        }
-        return coverage;
-    }
-
-    /**
-     * Makes association maps for the reads and loci coverage as described below :
-     *
-     *  - First: locusToReadMap -- a HashMap that describes for each locus, which reads contribute to its coverage.
-     *    Note: Locus is in reference coordinates.
-     *    Example: Locus => {read1, read2, ..., readN}
-     *
-     *  - Second: readToLocusMap -- a HashMap that describes for each read what loci it contributes to the coverage.
-     *    Note: Locus is a boolean array, indexed from 0 (= startLocation) to N (= stopLocation), with value==true meaning it contributes to the coverage.
-     *    Example: Read => {true, true, false, ... false}
-     *
-     * @param readList      the list of reads to generate the association mappings
-     * @param startLocation the first reference coordinate of the region (inclusive)
-     * @param stopLocation  the last reference coordinate of the region (inclusive)
-     * @return the two hashmaps described above
-     */
-    public static Pair<HashMap<Integer, HashSet<GATKSAMRecord>> , HashMap<GATKSAMRecord, Boolean[]>> getBothReadToLociMappings (List<GATKSAMRecord> readList, int startLocation, int stopLocation) {
-        int arraySize = stopLocation - startLocation + 1;
-
-        HashMap<Integer, HashSet<GATKSAMRecord>> locusToReadMap = new HashMap<Integer, HashSet<GATKSAMRecord>>(2*(stopLocation - startLocation + 1), 0.5f);
-        HashMap<GATKSAMRecord, Boolean[]> readToLocusMap = new HashMap<GATKSAMRecord, Boolean[]>(2*readList.size(), 0.5f);
-
-        for (int i = startLocation; i <= stopLocation; i++)
-            locusToReadMap.put(i, new HashSet<GATKSAMRecord>()); // Initialize the locusToRead map with empty lists
-
-        for (GATKSAMRecord read : readList) {
-            readToLocusMap.put(read, new Boolean[arraySize]);       // Initialize the readToLocus map with empty arrays
-
-            int [] readCoverage = getCoverageDistributionOfRead(read, startLocation, stopLocation);
-
-            for (int i = 0; i < readCoverage.length; i++) {
-                int refLocation = i + startLocation;
-                if (readCoverage[i] > 0) {
-                    // Update the hash for this locus
-                    HashSet<GATKSAMRecord> readSet = locusToReadMap.get(refLocation);
-                    readSet.add(read);
-
-                    // Add this locus to the read hash
-                    readToLocusMap.get(read)[refLocation - startLocation] = true;
-                }
-                else
-                    // Update the boolean array with a 'no coverage' from this read to this locus
-                    readToLocusMap.get(read)[refLocation-startLocation] = false;
-            }
-        }
-        return new Pair<HashMap<Integer, HashSet<GATKSAMRecord>>, HashMap<GATKSAMRecord, Boolean[]>>(locusToReadMap, readToLocusMap);
-    }
-
-    /**
-     * Create random read qualities
-     *
-     * @param length the length of the read
-     * @return an array with randomized base qualities between 0 and 50
-     */
-    public static byte[] createRandomReadQuals(int length) {
-        Random random = GenomeAnalysisEngine.getRandomGenerator();
-        byte[] quals = new byte[length];
-        for (int i = 0; i < length; i++)
-            quals[i] = (byte) random.nextInt(50);
-        return quals;
-    }
-
-    /**
-     * Create random read qualities
-     *
-     * @param length  the length of the read
-     * @param allowNs whether or not to allow N's in the read
-     * @return an array with randomized bases (A-N) with equal probability
-     */
-    public static byte[] createRandomReadBases(int length, boolean allowNs) {
-        Random random = GenomeAnalysisEngine.getRandomGenerator();
-        int numberOfBases = allowNs ? 5 : 4;
-        byte[] bases = new byte[length];
-        for (int i = 0; i < length; i++) {
-            switch (random.nextInt(numberOfBases)) {
-                case 0:
-                    bases[i] = 'A';
-                    break;
-                case 1:
-                    bases[i] = 'C';
-                    break;
-                case 2:
-                    bases[i] = 'G';
-                    break;
-                case 3:
-                    bases[i] = 'T';
-                    break;
-                case 4:
-                    bases[i] = 'N';
-                    break;
-                default:
-                    throw new ReviewedGATKException("Something went wrong, this is just impossible");
-            }
-        }
-        return bases;
-    }
-
-    public static GATKSAMRecord createRandomRead(int length) {
-        return createRandomRead(length, true);
-    }
-
-    public static GATKSAMRecord createRandomRead(int length, boolean allowNs) {
-        byte[] quals = ReadUtils.createRandomReadQuals(length);
-        byte[] bbases = ReadUtils.createRandomReadBases(length, allowNs);
-        return ArtificialSAMUtils.createArtificialRead(bbases, quals, bbases.length + "M");
-    }
-
-
-    public static String prettyPrintSequenceRecords ( SAMSequenceDictionary sequenceDictionary ) {
-        String[] sequenceRecordNames = new String[sequenceDictionary.size()];
-        int sequenceRecordIndex = 0;
-        for (SAMSequenceRecord sequenceRecord : sequenceDictionary.getSequences())
-            sequenceRecordNames[sequenceRecordIndex++] = sequenceRecord.getSequenceName();
-        return Arrays.deepToString(sequenceRecordNames);
-    }
-
-    /**
-     * Calculates the reference coordinate for a read coordinate
-     *
-     * @param read   the read
-     * @param offset the base in the read (coordinate in the read)
-     * @return the reference coordinate correspondent to this base
-     */
-    public static long getReferenceCoordinateForReadCoordinate(GATKSAMRecord read, int offset) {
-        if (offset > read.getReadLength()) 
-            throw new ReviewedGATKException(String.format(OFFSET_OUT_OF_BOUNDS_EXCEPTION, offset, read.getReadLength()));
-
-        long location = read.getAlignmentStart();
-        Iterator<CigarElement> cigarElementIterator = read.getCigar().getCigarElements().iterator();
-        while (offset > 0 && cigarElementIterator.hasNext()) {
-            CigarElement cigarElement = cigarElementIterator.next();
-            long move = 0;
-            if (cigarElement.getOperator().consumesReferenceBases())  
-                move = (long) Math.min(cigarElement.getLength(), offset);
-            location += move;
-            offset -= move;
-        }
-        if (offset > 0 && !cigarElementIterator.hasNext()) 
-            throw new ReviewedGATKException(OFFSET_NOT_ZERO_EXCEPTION);
-
-        return location;
-    }
-
-    /**
-     * Creates a map with each event in the read (cigar operator) and the read coordinate where it happened.
-     *
-     * Example:
-     *  D -> 2, 34, 75
-     *  I -> 55
-     *  S -> 0, 101
-     *  H -> 101
-     *
-     * @param read the read
-     * @return a map with the properties described above. See example
-     */
-    public static Map<CigarOperator, ArrayList<Integer>> getCigarOperatorForAllBases (GATKSAMRecord read) {
-        Map<CigarOperator, ArrayList<Integer>> events = new HashMap<CigarOperator, ArrayList<Integer>>();
-
-        int position = 0;
-        for (CigarElement cigarElement : read.getCigar().getCigarElements()) {
-            CigarOperator op = cigarElement.getOperator();
-            if (op.consumesReadBases()) {
-                ArrayList<Integer> list = events.get(op);
-                if (list == null) {
-                    list = new ArrayList<Integer>();
-                    events.put(op, list);
-                }
-                for (int i = position; i < cigarElement.getLength(); i++)
-                    list.add(position++);
-            }
-            else {
-                ArrayList<Integer> list = events.get(op);
-                if (list == null) {
-                    list = new ArrayList<Integer>();
-                    events.put(op, list);
-                }
-                list.add(position);
-            }
-        }
-        return events;
-    }
-
-    /**
-     * Given a read, outputs the read bases in a string format
-     *
-     * @param read the read
-     * @return a string representation of the read bases
-     */
-    public static String convertReadBasesToString(GATKSAMRecord read) {
-        String bases = "";
-        for (byte b : read.getReadBases()) {
-            bases += (char) b;
-        }
-        return bases.toUpperCase();
-    }
-
-    /**
-     * Given a read, outputs the base qualities in a string format
-     *
-     * @param quals the read qualities
-     * @return a string representation of the base qualities
-     */
-    public static String convertReadQualToString(byte[] quals) {
-        String result = "";
-        for (byte b : quals) {
-            result += (char) (33 + b);
-        }
-        return result;
-    }
-
-    /**
-     * Given a read, outputs the base qualities in a string format
-     *
-     * @param read the read
-     * @return a string representation of the base qualities
-     */
-    public static String convertReadQualToString(GATKSAMRecord read) {
-        return convertReadQualToString(read.getBaseQualities());
-    }
-
-    /**
-     * Returns the reverse complement of the read bases
-     *
-     * @param bases the read bases
-     * @return the reverse complement of the read bases
-     */
-    public static String getBasesReverseComplement(byte[] bases) {
-        String reverse = "";
-        for (int i = bases.length-1; i >=0; i--) {
-            reverse += (char) BaseUtils.getComplement(bases[i]);
-        }
-        return reverse;
-    }
-
-    /**
-     * Returns the reverse complement of the read bases
-     *
-     * @param read the read
-     * @return the reverse complement of the read bases
-     */
-    public static String getBasesReverseComplement(GATKSAMRecord read) {
-        return getBasesReverseComplement(read.getReadBases());
-    }
-
-    /**
-     * Calculate the maximum read length from the given list of reads.
-     * @param reads list of reads
-     * @return      non-negative integer
-     */
-    @Ensures({"result >= 0"})
-    public static int getMaxReadLength( final List<GATKSAMRecord> reads ) {
-        if( reads == null ) { throw new IllegalArgumentException("Attempting to check a null list of reads."); }
-
-        int maxReadLength = 0;
-        for( final GATKSAMRecord read : reads ) {
-            maxReadLength = Math.max(maxReadLength, read.getReadLength());
-        }
-        return maxReadLength;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/SAMFileReaderBuilder.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/SAMFileReaderBuilder.java
deleted file mode 100644
index 2c5ea5f..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/SAMFileReaderBuilder.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import htsjdk.samtools.SAMFileReader;
-import htsjdk.samtools.ValidationStringency;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.io.File;
-
-/**
- * Allows the user to steadily accumulate information about what
- * components go into a SAM file writer, ultimately using this
- * information to create a SAM file writer on demand.
- *
- * @author mhanna
- * @version 0.1
- */
-public class SAMFileReaderBuilder {
-    /**
-     * To which file should output be written?
-     */
-    private File samFile = null;
-
-    /**
-     * What compression level should be used when building this file?
-     */
-    private ValidationStringency validationStringency = null;
-
-    /**
-     * Sets the handle of the sam file to which data should be written.
-     * @param samFile The SAM file into which data should flow.
-     */
-    public void setSAMFile( File samFile ) {
-        this.samFile = samFile;
-    }
-
-    /**
-     * Sets the validation stringency to apply when reading this sam file.
-     * @param validationStringency Stringency to apply.  Must not be null.
-     */
-    public void setValidationStringency( ValidationStringency validationStringency ) {
-        this.validationStringency = validationStringency;
-    }
-
-    /**
-     * Create the SAM writer, given the constituent parts accrued.
-     * @return Newly minted SAM file writer.
-     */
-    public SAMFileReader build() {
-        if( samFile == null )
-            throw new ReviewedGATKException( "Filename for output sam file must be supplied.");
-        if( validationStringency == null )
-            throw new ReviewedGATKException( "Header for output sam file must be supplied.");
-
-        SAMFileReader reader = new SAMFileReader( samFile );
-        reader.setValidationStringency( validationStringency );
-
-        return reader;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/SimplifyingSAMFileWriter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/SimplifyingSAMFileWriter.java
deleted file mode 100644
index 4214619..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/SimplifyingSAMFileWriter.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMFileWriter;
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.util.ProgressLoggerInterface;
-
-/**
- * XXX
- */
-public class SimplifyingSAMFileWriter implements SAMFileWriter {
-    final SAMFileWriter dest;
-
-    public SimplifyingSAMFileWriter(final SAMFileWriter finalDestination) {
-        this.dest = finalDestination;
-    }
-
-    public void addAlignment( SAMRecord read ) {
-        if ( keepRead(read) ) {
-            dest.addAlignment(simplifyRead(read));
-
-        }
-    }
-
-    /**
-     * Retrieves the header to use when creating the new SAM file.
-     * @return header to use when creating the new SAM file.
-     */
-    public SAMFileHeader getFileHeader() {
-        return dest.getFileHeader();
-    }
-
-    /**
-     * @{inheritDoc}
-     */
-    public void close() {
-        dest.close();
-    }
-
-
-    public static final boolean keepRead(SAMRecord read) {
-        return ! excludeRead(read);
-    }
-
-    public static final boolean excludeRead(SAMRecord read) {
-        return read.getReadUnmappedFlag() || read.getReadFailsVendorQualityCheckFlag() || read.getDuplicateReadFlag() || read.getNotPrimaryAlignmentFlag();
-    }
-
-    public static final SAMRecord simplifyRead(SAMRecord read) {
-        // the only attribute we keep is the RG
-        Object rg = read.getAttribute("RG");
-        read.clearAttributes();
-        read.setAttribute("RG", rg);
-        return read;
-    }
-
-    @Override
-    public void setProgressLogger(final ProgressLoggerInterface logger) {
-        dest.setProgressLogger(logger);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/package-info.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/package-info.java
deleted file mode 100644
index ee2bcec..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/package-info.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/GlobalEdgeGreedySWPairwiseAlignment.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/GlobalEdgeGreedySWPairwiseAlignment.java
deleted file mode 100644
index 666ca8b..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/GlobalEdgeGreedySWPairwiseAlignment.java
+++ /dev/null
@@ -1,208 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.smithwaterman;
-
-import htsjdk.samtools.Cigar;
-import htsjdk.samtools.CigarElement;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.sam.AlignmentUtils;
-
-import java.util.*;
-
-/**
- * Pairwise discrete Smith-Waterman alignment with an edge greedy implementation
- *
- * ************************************************************************
- * ****                    IMPORTANT NOTE:                             ****
- * ****  This class assumes that all bytes come from UPPERCASED chars! ****
- * ************************************************************************
- *
- * User: ebanks
- */
-public final class GlobalEdgeGreedySWPairwiseAlignment extends SWPairwiseAlignment {
-
-    private final static boolean DEBUG_MODE = false;
-
-    /**
-     * Create a new greedy SW pairwise aligner
-     *
-     * @param reference the reference sequence we want to align
-     * @param alternate the alternate sequence we want to align
-     * @param parameters the SW parameters to use
-     */
-    public GlobalEdgeGreedySWPairwiseAlignment(final byte[] reference, final byte[] alternate, final Parameters parameters) {
-        super(reference, alternate, parameters);
-    }
-
-    /**
-     * Create a new SW pairwise aligner
-     *
-     * After creating the object the two sequences are aligned with an internal call to align(seq1, seq2)
-     *
-     * @param reference the reference sequence we want to align
-     * @param alternate the alternate sequence we want to align
-     * @param namedParameters the named parameter set to get our parameters from
-     */
-    public GlobalEdgeGreedySWPairwiseAlignment(final byte[] reference, final byte[] alternate, final SWParameterSet namedParameters) {
-        this(reference, alternate, namedParameters.parameters);
-    }
-
-    /**
-     * @see #GlobalEdgeGreedySWPairwiseAlignment(byte[], byte[], SWParameterSet) with original default parameters
-     */
-    public GlobalEdgeGreedySWPairwiseAlignment(byte[] reference, byte[] alternate) {
-        this(reference, alternate, SWParameterSet.ORIGINAL_DEFAULT);
-    }
-
-    /**
-     * Aligns the alternate sequence to the reference sequence
-     *
-     * @param reference  ref sequence
-     * @param alternate  alt sequence
-     */
-    @Override
-    protected void align(final byte[] reference, final byte[] alternate) {
-        if ( reference == null || reference.length == 0 )
-            throw new IllegalArgumentException("Non-null, non-empty reference sequences are required for the Smith-Waterman calculation");
-        if ( alternate == null || alternate.length == 0 )
-            throw new IllegalArgumentException("Non-null, non-empty alternate sequences are required for the Smith-Waterman calculation");
-
-        final int forwardEdgeMatch = Utils.longestCommonPrefix(reference, alternate, Integer.MAX_VALUE);
-
-        // edge case: one sequence is a strict prefix of the other
-        if ( forwardEdgeMatch == reference.length || forwardEdgeMatch == alternate.length ) {
-            alignmentResult = new SWPairwiseAlignmentResult(makeCigarForStrictPrefixAndSuffix(reference, alternate, forwardEdgeMatch, 0), 0);
-            return;
-        }
-
-        int reverseEdgeMatch = Utils.longestCommonSuffix(reference, alternate, Integer.MAX_VALUE);
-
-        // edge case: one sequence is a strict suffix of the other
-        if ( reverseEdgeMatch == reference.length || reverseEdgeMatch == alternate.length ) {
-            alignmentResult = new SWPairwiseAlignmentResult(makeCigarForStrictPrefixAndSuffix(reference, alternate, 0, reverseEdgeMatch), 0);
-            return;
-        }
-
-        final int sizeOfRefToAlign = reference.length - forwardEdgeMatch - reverseEdgeMatch;
-        final int sizeOfAltToAlign = alternate.length - forwardEdgeMatch - reverseEdgeMatch;
-
-        // edge case: one sequence is a strict subset of the other accounting for both prefix and suffix
-        final int minSizeToAlign = Math.min(sizeOfRefToAlign, sizeOfAltToAlign);
-        if ( minSizeToAlign < 0 )
-            reverseEdgeMatch += minSizeToAlign;
-        if ( sizeOfRefToAlign <= 0 || sizeOfAltToAlign <= 0 ) {
-            alignmentResult = new SWPairwiseAlignmentResult(makeCigarForStrictPrefixAndSuffix(reference, alternate, forwardEdgeMatch, reverseEdgeMatch), 0);
-            return;
-        }
-
-        final byte[] refToAlign = Utils.trimArray(reference, forwardEdgeMatch, reverseEdgeMatch);
-        final byte[] altToAlign = Utils.trimArray(alternate, forwardEdgeMatch, reverseEdgeMatch);
-
-        final int[][] sw = new int[(sizeOfRefToAlign+1)][(sizeOfAltToAlign+1)];
-        if ( keepScoringMatrix ) SW = sw;
-        final int[][] btrack = new int[(sizeOfRefToAlign+1)][(sizeOfAltToAlign+1)];
-
-        calculateMatrix(refToAlign, altToAlign, sw, btrack, OVERHANG_STRATEGY.INDEL);
-
-        if ( DEBUG_MODE ) {
-            System.out.println(new String(refToAlign) + " vs. " + new String(altToAlign));
-            debugMatrix(sw);
-            System.out.println("----");
-            debugMatrix(btrack);
-            System.out.println();
-        }
-
-        alignmentResult = calculateCigar(forwardEdgeMatch, reverseEdgeMatch, sw, btrack);
-    }
-
-
-    private void debugMatrix(final int[][] matrix) {
-        for ( int i = 0; i < matrix.length; i++ ) {
-            int [] cur = matrix[i];
-            for ( int j = 0; j < cur.length; j++ )
-                System.out.print(cur[j] + " ");
-            System.out.println();
-        }
-    }
-
-    /**
-     * Creates a CIGAR for the case where the prefix/suffix match combination encompasses an entire sequence
-     *
-     * @param reference            the reference sequence
-     * @param alternate            the alternate sequence
-     * @param matchingPrefix       the prefix match size
-     * @param matchingSuffix       the suffix match size
-     * @return non-null CIGAR
-     */
-    private Cigar makeCigarForStrictPrefixAndSuffix(final byte[] reference, final byte[] alternate, final int matchingPrefix, final int matchingSuffix) {
-
-        final List<CigarElement> result = new ArrayList<CigarElement>();
-
-        // edge case: no D or I element
-        if ( reference.length == alternate.length ) {
-            result.add(makeElement(State.MATCH, matchingPrefix + matchingSuffix));
-        } else {
-            // add the first M element
-            if ( matchingPrefix > 0 )
-                result.add(makeElement(State.MATCH, matchingPrefix));
-
-            // add the D or I element
-            if ( alternate.length > reference.length )
-                result.add(makeElement(State.INSERTION, alternate.length - reference.length));
-            else // if ( reference.length > alternate.length )
-                result.add(makeElement(State.DELETION, reference.length - alternate.length));
-
-            // add the last M element
-            if ( matchingSuffix > 0 )
-                result.add(makeElement(State.MATCH, matchingSuffix));
-        }
-
-        return new Cigar(result);
-    }
-
-    /**
-     * Calculates the CIGAR for the alignment from the back track matrix
-     *
-     * @param matchingPrefix       the prefix match size
-     * @param matchingSuffix       the suffix match size
-     * @param sw                   the Smith-Waterman matrix to use
-     * @param btrack               the back track matrix to use
-     * @return non-null SWPairwiseAlignmentResult object
-     */
-    protected SWPairwiseAlignmentResult calculateCigar(final int matchingPrefix, final int matchingSuffix,
-                                                       final int[][] sw, final int[][] btrack) {
-
-        final SWPairwiseAlignmentResult SW_result = calculateCigar(sw, btrack, OVERHANG_STRATEGY.INDEL);
-
-        final LinkedList<CigarElement> lce = new LinkedList<CigarElement>(SW_result.cigar.getCigarElements());
-        if ( matchingPrefix > 0 )
-            lce.addFirst(makeElement(State.MATCH, matchingPrefix));
-        if ( matchingSuffix > 0 )
-            lce.addLast(makeElement(State.MATCH, matchingSuffix));
-
-        return new SWPairwiseAlignmentResult(AlignmentUtils.consolidateCigar(new Cigar(lce)), 0);
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/Parameters.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/Parameters.java
deleted file mode 100644
index 46cb8be..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/Parameters.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.smithwaterman;
-
-/**
- * Holds the core Smith-Waterman alignment parameters of
- *
- * match value, and mismatch, gap open and gap extension penalties
- *
- * User: depristo
- * Date: 4/11/13
- * Time: 12:03 PM
- */
-public final class Parameters {
-    public final int w_match;
-    public final int w_mismatch;
-    public final int w_open;
-    public final int w_extend;
-
-    /**
-     * Create a new set of SW parameters
-     * @param w_match the match score
-     * @param w_mismatch the mismatch penalty
-     * @param w_open the gap open penalty
-     * @param w_extend the gap extension penalty
-
-     */
-    public Parameters(final int w_match, final int w_mismatch, final int w_open, final int w_extend) {
-        if ( w_mismatch > 0 ) throw new IllegalArgumentException("w_mismatch must be <= 0 but got " + w_mismatch);
-        if ( w_open> 0 ) throw new IllegalArgumentException("w_open must be <= 0 but got " + w_open);
-        if ( w_extend> 0 ) throw new IllegalArgumentException("w_extend must be <= 0 but got " + w_extend);
-
-        this.w_match = w_match;
-        this.w_mismatch = w_mismatch;
-        this.w_open = w_open;
-        this.w_extend = w_extend;
-    }
-
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/SWPairwiseAlignment.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/SWPairwiseAlignment.java
deleted file mode 100644
index aa38c06..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/SWPairwiseAlignment.java
+++ /dev/null
@@ -1,599 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.smithwaterman;
-
-import htsjdk.samtools.Cigar;
-import htsjdk.samtools.CigarElement;
-import htsjdk.samtools.CigarOperator;
-import org.broadinstitute.gatk.utils.exceptions.GATKException;
-import org.broadinstitute.gatk.utils.sam.AlignmentUtils;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-
-/**
- * Pairwise discrete smith-waterman alignment
- *
- * ************************************************************************
- * ****                    IMPORTANT NOTE:                             ****
- * ****  This class assumes that all bytes come from UPPERCASED chars! ****
- * ************************************************************************
- *
- * User: asivache
- * Date: Mar 23, 2009
- * Time: 1:54:54 PM
- */
-public class SWPairwiseAlignment implements SmithWaterman {
-
-    protected SWPairwiseAlignmentResult alignmentResult;
-
-    protected final Parameters parameters;
-
-    /**
-     * The state of a trace step through the matrix
-     */
-    protected enum State {
-        MATCH,
-        INSERTION,
-        DELETION,
-        CLIP
-    }
-
-    /**
-     * What strategy should we use when the best path does not start/end at the corners of the matrix?
-     */
-    public enum OVERHANG_STRATEGY {
-        /*
-         * Add softclips for the overhangs
-         */
-        SOFTCLIP,
-
-        /*
-         * Treat the overhangs as proper insertions/deletions
-         */
-        INDEL,
-
-        /*
-         * Treat the overhangs as proper insertions/deletions for leading (but not trailing) overhangs.
-         * This is useful e.g. when we want to merge dangling tails in an assembly graph: because we don't
-         * expect the dangling tail to reach the end of the reference path we are okay ignoring trailing
-         * deletions - but leading indels are still very much relevant.
-         */
-        LEADING_INDEL,
-
-        /*
-         * Just ignore the overhangs
-         */
-        IGNORE
-    }
-
-    protected static boolean cutoff = false;
-
-    protected OVERHANG_STRATEGY overhang_strategy = OVERHANG_STRATEGY.SOFTCLIP;
-
-    /**
-     * The SW scoring matrix, stored for debugging purposes if keepScoringMatrix is true
-     */
-    protected int[][] SW = null;
-
-    /**
-     * Only for testing purposes in the SWPairwiseAlignmentMain function
-     * set to true to keep SW scoring matrix after align call
-     */
-    protected static boolean keepScoringMatrix = false;
-
-    /**
-     * Create a new SW pairwise aligner.
-     *
-     * @deprecated in favor of constructors using the Parameter or ParameterSet class
-     */
-    @Deprecated
-    public SWPairwiseAlignment(byte[] seq1, byte[] seq2, int match, int mismatch, int open, int extend ) {
-        this(seq1, seq2, new Parameters(match, mismatch, open, extend));
-    }
-
-    /**
-     * Create a new SW pairwise aligner
-     *
-     * After creating the object the two sequences are aligned with an internal call to align(seq1, seq2)
-     *
-     * @param seq1 the first sequence we want to align
-     * @param seq2 the second sequence we want to align
-     * @param parameters the SW parameters to use
-     */
-    public SWPairwiseAlignment(byte[] seq1, byte[] seq2, Parameters parameters) {
-        this(parameters);
-        align(seq1,seq2);
-    }
-
-    /**
-     * Create a new SW pairwise aligner
-     *
-     * After creating the object the two sequences are aligned with an internal call to align(seq1, seq2)
-     *
-     * @param seq1 the first sequence we want to align
-     * @param seq2 the second sequence we want to align
-     * @param parameters the SW parameters to use
-     * @param strategy   the overhang strategy to use
-     */
-    public SWPairwiseAlignment(final byte[] seq1, final byte[] seq2, final SWParameterSet parameters, final OVERHANG_STRATEGY strategy) {
-        this(parameters.parameters);
-        overhang_strategy = strategy;
-        align(seq1, seq2);
-    }
-
-    /**
-     * Create a new SW pairwise aligner, without actually doing any alignment yet
-     *
-     * @param parameters the SW parameters to use
-     */
-    protected SWPairwiseAlignment(final Parameters parameters) {
-        this.parameters = parameters;
-    }
-
-    /**
-     * Create a new SW pairwise aligner
-     *
-     * After creating the object the two sequences are aligned with an internal call to align(seq1, seq2)
-     *
-     * @param seq1 the first sequence we want to align
-     * @param seq2 the second sequence we want to align
-     * @param namedParameters the named parameter set to get our parameters from
-     */
-    public SWPairwiseAlignment(byte[] seq1, byte[] seq2, SWParameterSet namedParameters) {
-        this(seq1, seq2, namedParameters.parameters);
-    }
-
-    public SWPairwiseAlignment(byte[] seq1, byte[] seq2) {
-        this(seq1,seq2,SWParameterSet.ORIGINAL_DEFAULT);
-    }
-
-    @Override
-    public Cigar getCigar() { return alignmentResult.cigar ; }
-
-    @Override
-    public int getAlignmentStart2wrt1() { return alignmentResult.alignment_offset; }
-
-    /**
-     * Aligns the alternate sequence to the reference sequence
-     *
-     * @param reference  ref sequence
-     * @param alternate  alt sequence
-     */
-    protected void align(final byte[] reference, final byte[] alternate) {
-        if ( reference == null || reference.length == 0 || alternate == null || alternate.length == 0 )
-            throw new IllegalArgumentException("Non-null, non-empty sequences are required for the Smith-Waterman calculation");
-
-        final int n = reference.length+1;
-        final int m = alternate.length+1;
-        int[][] sw = new int[n][m];
-        if ( keepScoringMatrix ) SW = sw;
-        int[][] btrack=new int[n][m];
-
-        calculateMatrix(reference, alternate, sw, btrack);
-        alignmentResult = calculateCigar(sw, btrack, overhang_strategy); // length of the segment (continuous matches, insertions or deletions)
-    }
-
-    /**
-     * Calculates the SW matrices for the given sequences
-     *
-     * @param reference  ref sequence
-     * @param alternate  alt sequence
-     * @param sw         the Smith-Waterman matrix to populate
-     * @param btrack     the back track matrix to populate
-     */
-    protected void calculateMatrix(final byte[] reference, final byte[] alternate, final int[][] sw, final int[][] btrack) {
-        calculateMatrix(reference, alternate, sw, btrack, overhang_strategy);
-    }
-
-    /**
-     * Calculates the SW matrices for the given sequences
-     *
-     * @param reference  ref sequence
-     * @param alternate  alt sequence
-     * @param sw         the Smith-Waterman matrix to populate
-     * @param btrack     the back track matrix to populate
-     * @param overhang_strategy    the strategy to use for dealing with overhangs
-     */
-    protected void calculateMatrix(final byte[] reference, final byte[] alternate, final int[][] sw, final int[][] btrack, final OVERHANG_STRATEGY overhang_strategy) {
-        if ( reference.length == 0 || alternate.length == 0 )
-            throw new IllegalArgumentException("Non-null, non-empty sequences are required for the Smith-Waterman calculation");
-
-        final int ncol = sw[0].length;//alternate.length+1; formerly m
-        final int nrow = sw.length;// reference.length+1; formerly n
-
-        final int MATRIX_MIN_CUTOFF;   // never let matrix elements drop below this cutoff
-        if ( cutoff ) MATRIX_MIN_CUTOFF = 0;
-        else MATRIX_MIN_CUTOFF = (int) -1e8;
-
-        int lowInitValue=Integer.MIN_VALUE/2;
-        final int[] best_gap_v = new int[ncol+1];
-        Arrays.fill(best_gap_v, lowInitValue);
-        final int[] gap_size_v = new int[ncol+1];
-        final int[] best_gap_h = new int[nrow+1];
-        Arrays.fill(best_gap_h,lowInitValue);
-        final int[] gap_size_h = new int[nrow+1];
-
-        // we need to initialize the SW matrix with gap penalties if we want to keep track of indels at the edges of alignments
-        if ( overhang_strategy == OVERHANG_STRATEGY.INDEL || overhang_strategy == OVERHANG_STRATEGY.LEADING_INDEL ) {
-            // initialize the first row
-            int[] topRow=sw[0];
-            topRow[1]=parameters.w_open;
-            int currentValue = parameters.w_open;
-            for ( int i = 2; i < topRow.length; i++ ) {
-                currentValue += parameters.w_extend;
-                topRow[i]=currentValue;
-            }
-            // initialize the first column
-            sw[1][0]=parameters.w_open;
-            currentValue = parameters.w_open;
-            for ( int i = 2; i < sw.length; i++ ) {
-                currentValue += parameters.w_extend;
-                sw[i][0]=currentValue;
-            }
-        }
-        // build smith-waterman matrix and keep backtrack info:
-        int[] curRow=sw[0];
-        for ( int i = 1; i <sw.length ; i++ ) {
-            final byte a_base = reference[i-1]; // letter in a at the current pos
-            final int[] lastRow=curRow;
-            curRow=sw[i];
-            final int[] curBackTrackRow=btrack[i];
-            for ( int j = 1; j < curRow.length; j++) {
-                final byte b_base = alternate[j-1]; // letter in b at the current pos
-                // in other words, step_diag = sw[i-1][j-1] + wd(a_base,b_base);
-                final int step_diag = lastRow[j-1] + wd(a_base,b_base);
-
-                // optimized "traversal" of all the matrix cells above the current one (i.e. traversing
-                // all 'step down' events that would end in the current cell. The optimized code
-                // does exactly the same thing as the commented out loop below. IMPORTANT:
-                // the optimization works ONLY for linear w(k)=wopen+(k-1)*wextend!!!!
-
-                // if a gap (length 1) was just opened above, this is the cost of arriving to the current cell:
-                int prev_gap = lastRow[j]+parameters.w_open;
-                best_gap_v[j] += parameters.w_extend; // for the gaps that were already opened earlier, extending them by 1 costs w_extend
-                 if (  prev_gap > best_gap_v[j]  ) {
-                    // opening a gap just before the current cell results in better score than extending by one
-                    // the best previously opened gap. This will hold for ALL cells below: since any gap
-                    // once opened always costs w_extend to extend by another base, we will always get a better score
-                    // by arriving to any cell below from the gap we just opened (prev_gap) rather than from the previous best gap
-                    best_gap_v[j] = prev_gap;
-                    gap_size_v[j] = 1; // remember that the best step-down gap from above has length 1 (we just opened it)
-                } else {
-                    // previous best gap is still the best, even after extension by another base, so we just record that extension:
-                    gap_size_v[j]++;
-                }
-
-                final int step_down = best_gap_v[j] ;
-                final int kd = gap_size_v[j];
-
-                // optimized "traversal" of all the matrix cells to the left of the current one (i.e. traversing
-                // all 'step right' events that would end in the current cell. The optimized code
-                // does exactly the same thing as the commented out loop below. IMPORTANT:
-                // the optimization works ONLY for linear w(k)=wopen+(k-1)*wextend!!!!
-
-                prev_gap =curRow[j-1]  + parameters.w_open; // what would it cost us to open length 1 gap just to the left from current cell
-                best_gap_h[i] += parameters.w_extend; // previous best gap would cost us that much if extended by another base
-                if ( prev_gap > best_gap_h[i] ) {
-                    // newly opened gap is better (score-wise) than any previous gap with the same row index i; since
-                    // gap penalty is linear with k, this new gap location is going to remain better than any previous ones
-                    best_gap_h[i] = prev_gap;
-                    gap_size_h[i] = 1;
-                } else {
-                    gap_size_h[i]++;
-                }
-
-                final int step_right = best_gap_h[i];
-                final int ki = gap_size_h[i];
-
-                //priority here will be step diagonal, step right, step down
-                final boolean diagHighestOrEqual = (step_diag >= step_down)
-                                                && (step_diag >= step_right);
-
-                if ( diagHighestOrEqual ) {
-                    curRow[j]=Math.max(MATRIX_MIN_CUTOFF,step_diag);
-                    curBackTrackRow[j]=0;
-                }
-                else if(step_right>=step_down) { //moving right is the highest
-                    curRow[j]=Math.max(MATRIX_MIN_CUTOFF,step_right);
-                    curBackTrackRow[j]=-ki; // negative = horizontal
-                }
-                else  {
-                    curRow[j]=Math.max(MATRIX_MIN_CUTOFF,step_down);
-                    curBackTrackRow[j]= kd; // positive=vertical
-                }
-            }
-        }
-    }
-
-    /*
-     * Class to store the result of calculating the CIGAR from the back track matrix
-     */
-    protected final class SWPairwiseAlignmentResult {
-        public final Cigar cigar;
-        public final int alignment_offset;
-        public SWPairwiseAlignmentResult(final Cigar cigar, final int alignment_offset) {
-            this.cigar = cigar;
-            this.alignment_offset = alignment_offset;
-        }
-    }
-
-    /**
-     * Calculates the CIGAR for the alignment from the back track matrix
-     *
-     * @param sw                   the Smith-Waterman matrix to use
-     * @param btrack               the back track matrix to use
-     * @param overhang_strategy    the strategy to use for dealing with overhangs
-     * @return non-null SWPairwiseAlignmentResult object
-     */
-    protected SWPairwiseAlignmentResult calculateCigar(final int[][] sw, final int[][] btrack, final OVERHANG_STRATEGY overhang_strategy) {
-        // p holds the position we start backtracking from; we will be assembling a cigar in the backwards order
-        int p1 = 0, p2 = 0;
-
-        int refLength = sw.length-1;
-        int altLength = sw[0].length-1;
-
-        int maxscore = Integer.MIN_VALUE; // sw scores are allowed to be negative
-        int segment_length = 0; // length of the segment (continuous matches, insertions or deletions)
-
-        // if we want to consider overhangs as legitimate operators, then just start from the corner of the matrix
-        if ( overhang_strategy == OVERHANG_STRATEGY.INDEL ) {
-            p1 = refLength;
-            p2 = altLength;
-        } else {
-            // look for the largest score on the rightmost column. we use >= combined with the traversal direction
-            // to ensure that if two scores are equal, the one closer to diagonal gets picked
-            //Note: this is not technically smith-waterman, as by only looking for max values on the right we are
-            //excluding high scoring local alignments
-            p2=altLength;
-
-            for(int i=1;i<sw.length;i++)  {
-               final int curScore = sw[i][altLength];
-               if (curScore >= maxscore ) {
-                    p1 = i;
-                    maxscore = curScore;
-               }
-            }
-            // now look for a larger score on the bottom-most row
-            if ( overhang_strategy != OVERHANG_STRATEGY.LEADING_INDEL ) {
-                final int[] bottomRow=sw[refLength];
-                for ( int j = 1 ; j < bottomRow.length; j++) {
-                    int curScore=bottomRow[j];
-                    // data_offset is the offset of [n][j]
-                    if ( curScore > maxscore ||
-                            (curScore == maxscore && Math.abs(refLength-j) < Math.abs(p1 - p2) ) ) {
-                        p1 = refLength;
-                        p2 = j ;
-                        maxscore = curScore;
-                        segment_length = altLength - j ; // end of sequence 2 is overhanging; we will just record it as 'M' segment
-                    }
-                }
-            }
-        }
-        final List<CigarElement> lce = new ArrayList<CigarElement>(5);
-        if ( segment_length > 0 && overhang_strategy == OVERHANG_STRATEGY.SOFTCLIP ) {
-            lce.add(makeElement(State.CLIP, segment_length));
-            segment_length = 0;
-        }
-
-        // we will be placing all insertions and deletions into sequence b, so the states are named w/regard
-        // to that sequence
-
-        State state = State.MATCH;
-        do {
-            int btr = btrack[p1][p2];
-            State new_state;
-            int step_length = 1;
-            if ( btr > 0 ) {
-                new_state = State.DELETION;
-                step_length = btr;
-            } else if ( btr < 0 ) {
-                new_state = State.INSERTION;
-                step_length = (-btr);
-            } else new_state = State.MATCH; // and step_length =1, already set above
-
-            // move to next best location in the sw matrix:
-            switch( new_state ) {
-                case MATCH:  p1--; p2--; break; // move back along the diag in the sw matrix
-                case INSERTION: p2 -= step_length; break; // move left
-                case DELETION:  p1 -= step_length; break; // move up
-            }
-
-            // now let's see if the state actually changed:
-            if ( new_state == state ) segment_length+=step_length;
-            else {
-                // state changed, lets emit previous segment, whatever it was (Insertion Deletion, or (Mis)Match).
-                lce.add(makeElement(state, segment_length));
-                segment_length = step_length;
-                state = new_state;
-            }
-        // next condition is equivalent to  while ( sw[p1][p2] != 0 ) (with modified p1 and/or p2:
-        } while ( p1 > 0 && p2 > 0 );
-
-        // post-process the last segment we are still keeping;
-        // NOTE: if reads "overhangs" the ref on the left (i.e. if p2>0) we are counting
-        // those extra bases sticking out of the ref into the first cigar element if DO_SOFTCLIP is false;
-        // otherwise they will be softclipped. For instance,
-        // if read length is 5 and alignment starts at offset -2 (i.e. read starts before the ref, and only
-        // last 3 bases of the read overlap with/align to the ref), the cigar will be still 5M if
-        // DO_SOFTCLIP is false or 2S3M if DO_SOFTCLIP is true.
-        // The consumers need to check for the alignment offset and deal with it properly.
-        final int alignment_offset;
-        if ( overhang_strategy == OVERHANG_STRATEGY.SOFTCLIP ) {
-            lce.add(makeElement(state, segment_length));
-            if ( p2 > 0 ) lce.add(makeElement(State.CLIP, p2));
-            alignment_offset = p1;
-        } else if ( overhang_strategy == OVERHANG_STRATEGY.IGNORE ) {
-            lce.add(makeElement(state, segment_length + p2));
-            alignment_offset = p1 - p2;
-        } else {  // overhang_strategy == OVERHANG_STRATEGY.INDEL || overhang_strategy == OVERHANG_STRATEGY.LEADING_INDEL
-
-            // take care of the actual alignment
-            lce.add(makeElement(state, segment_length));
-
-            // take care of overhangs at the beginning of the alignment
-            if ( p1 > 0 )
-                lce.add(makeElement(State.DELETION, p1));
-            else if ( p2 > 0 )
-                lce.add(makeElement(State.INSERTION, p2));
-
-            alignment_offset = 0;
-        }
-
-        Collections.reverse(lce);
-        return new SWPairwiseAlignmentResult(AlignmentUtils.consolidateCigar(new Cigar(lce)), alignment_offset);
-    }
-
-    protected CigarElement makeElement(final State state, final int length) {
-        CigarOperator op = null;
-        switch (state) {
-            case MATCH: op = CigarOperator.M; break;
-            case INSERTION: op = CigarOperator.I; break;
-            case DELETION: op = CigarOperator.D; break;
-            case CLIP: op = CigarOperator.S; break;
-        }
-        return new CigarElement(length, op);
-    }
-
-
-    private int wd(final byte x, final byte y) {
-        return (x == y ? parameters.w_match : parameters.w_mismatch);
-    }
-
-    public void printAlignment(byte[] ref, byte[] read) {
-        printAlignment(ref,read,100);
-    }
-    
-    public void printAlignment(byte[] ref, byte[] read, int width) {
-        StringBuilder bread = new StringBuilder();
-        StringBuilder bref = new StringBuilder();
-        StringBuilder match = new StringBuilder();
-
-        int i = 0;
-        int j = 0;
-
-        final int offset = getAlignmentStart2wrt1();
-
-        Cigar cigar = getCigar();
-
-        if ( overhang_strategy != OVERHANG_STRATEGY.SOFTCLIP ) {
-
-            // we need to go through all the hassle below only if we do not do softclipping;
-            // otherwise offset is never negative
-            if ( offset < 0 ) {
-                for (  ; j < (-offset) ; j++ ) {
-                    bread.append((char)read[j]);
-                    bref.append(' ');
-                    match.append(' ');
-                }
-                // at negative offsets, our cigar's first element carries overhanging bases
-                // that we have just printed above. Tweak the first element to
-                // exclude those bases. Here we create a new list of cigar elements, so the original
-                // list/original cigar are unchanged (they are unmodifiable anyway!)
-
-                List<CigarElement> tweaked = new ArrayList<CigarElement>();
-                tweaked.addAll(cigar.getCigarElements());
-                tweaked.set(0,new CigarElement(cigar.getCigarElement(0).getLength()+offset,
-                        cigar.getCigarElement(0).getOperator()));
-                cigar = new Cigar(tweaked);
-            }
-        }
-
-        if ( offset > 0 ) { // note: the way this implementation works, cigar will ever start from S *only* if read starts before the ref, i.e. offset = 0
-            for (  ; i < getAlignmentStart2wrt1() ; i++ ) {
-                bref.append((char)ref[i]);
-                bread.append(' ');
-                match.append(' ');
-            }
-        }
-        
-        for ( CigarElement e : cigar.getCigarElements() ) {
-            switch (e.getOperator()) {
-                case M :
-                    for ( int z = 0 ; z < e.getLength() ; z++, i++, j++  ) {
-                        bref.append((i<ref.length)?(char)ref[i]:' ');
-                        bread.append((j < read.length)?(char)read[j]:' ');
-                        match.append( ( i<ref.length && j < read.length ) ? (ref[i] == read[j] ? '.':'*' ) : ' ' );
-                    }
-                    break;
-                case I :
-                    for ( int z = 0 ; z < e.getLength(); z++, j++ ) {
-                        bref.append('-');
-                        bread.append((char)read[j]);
-                        match.append('I');
-                    }
-                    break;
-                case S :
-                    for ( int z = 0 ; z < e.getLength(); z++, j++ ) {
-                        bref.append(' ');
-                        bread.append((char)read[j]);
-                        match.append('S');
-                    }
-                    break;
-                case D:
-                    for ( int z = 0 ; z < e.getLength(); z++ , i++ ) {
-                        bref.append((char)ref[i]);
-                        bread.append('-');
-                        match.append('D');
-                    }
-                    break;
-                default:
-                    throw new GATKException("Unexpected Cigar element:" + e.getOperator());
-            }
-        }
-        for ( ; i < ref.length; i++ ) bref.append((char)ref[i]);
-        for ( ; j < read.length; j++ ) bread.append((char)read[j]);
-
-        int pos = 0 ;
-        int maxlength = Math.max(match.length(),Math.max(bread.length(),bref.length()));
-        while ( pos < maxlength ) {
-            print_cautiously(match,pos,width);
-            print_cautiously(bread,pos,width);
-            print_cautiously(bref,pos,width);
-            System.out.println();
-            pos += width;
-        }
-    }
-
-    /** String builder's substring is extremely stupid: instead of trimming and/or returning an empty
-     * string when one end/both ends of the interval are out of range, it crashes with an
-     * exception. This utility function simply prints the substring if the interval is within the index range
-     * or trims accordingly if it is not.
-     * @param s
-     * @param start
-     * @param width
-     */
-    private static void print_cautiously(StringBuilder s, int start, int width) {
-        if ( start >= s.length() ) {
-            System.out.println();
-            return;
-        }
-        int end = Math.min(start+width,s.length());
-        System.out.println(s.substring(start,end));
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/SWPairwiseAlignmentMain.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/SWPairwiseAlignmentMain.java
deleted file mode 100644
index 3d2ddd1..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/SWPairwiseAlignmentMain.java
+++ /dev/null
@@ -1,221 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.smithwaterman;
-
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.collections.Pair;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Simple program to run SW performance test.
- *
- * // TODO -- should be replaced with Caliper before using again
- *
- * User: depristo
- * Date: 2/28/13
- * Time: 4:54 PM
- * To change this template use File | Settings | File Templates.
- */
-public class SWPairwiseAlignmentMain {
-    //    BELOW: main() method for testing; old implementations of the core methods are commented out below;
-//           uncomment everything through the end of the file if benchmarking of new vs old implementations is needed.
-
-    public static void main(String argv[]) {
-//        String ref="CACGAGCATATGTGTACATGAATTTGTATTGCACATGTGTTTAATGCGAACACGTGTCATGTGTATGTGTTCACATGCATGTGTGTCT";
-//        String read =   "GCATATGTTTACATGAATTTGTATTGCACATGTGTTTAATGCGAACACGTGTCATGTGTGTGTTCACATGCATGTG";
-
-        String ref = null;
-        String read = null;
-
-        Map<String,List<String>> args = processArgs(argv);
-
-        List<String> l = args.get("SEQ");
-        args.remove("SEQ");
-        if ( l == null ) {
-            System.err.println("SEQ argument is missing. Two input sequences must be provided");
-            System.exit(1);
-        }
-        if ( l.size() != 2 ) {
-            System.err.println("Two input sequences (SEQ arguments) must be provided. Found "+l.size()+" instead");
-            System.exit(1);
-        }
-
-        ref = l.get(0);
-        read = l.get(1);
-
-        Double m = extractSingleDoubleArg("MATCH",args);
-        Double mm = extractSingleDoubleArg("MISMATCH",args);
-        Double open = extractSingleDoubleArg("OPEN",args);
-        Double ext = extractSingleDoubleArg("EXTEND",args);
-
-        Boolean reverse = extractSingleBooleanArg("REVERSE",args);
-        if ( reverse != null && reverse.booleanValue() == true ) {
-            ref = Utils.reverse(ref);
-            read = Utils.reverse(read);
-        }
-
-        Boolean print_mat = extractSingleBooleanArg("PRINT_MATRIX",args);
-        Boolean cut = extractSingleBooleanArg("CUTOFF",args);
-        if ( cut != null ) SWPairwiseAlignment.cutoff = cut;
-
-        if ( args.size() != 0 ) {
-            System.err.println("Unknown argument on the command line: "+args.keySet().iterator().next());
-            System.exit(1);
-        }
-
-        final int w_match, w_mismatch, w_open, w_extend;
-
-        w_match = (m == null ? 30 : m.intValue());
-        w_mismatch = (mm == null ? -10 : mm.intValue());
-        w_open = (open == null ? -10 : open.intValue());
-        w_extend = (ext == null ? -2 : ext.intValue());
-
-
-        SWPairwiseAlignment.keepScoringMatrix = true;
-        SWPairwiseAlignment a = new SWPairwiseAlignment(ref.getBytes(),read.getBytes(),w_match,w_mismatch,w_open,w_extend);
-
-        System.out.println("start="+a.getAlignmentStart2wrt1()+", cigar="+a.getCigar()+
-                " length1="+ref.length()+" length2="+read.length());
-
-
-        System.out.println();
-        a.printAlignment(ref.getBytes(),read.getBytes());
-
-        System.out.println();
-        if ( print_mat != null && print_mat == true ) {
-            print(a.SW,ref.getBytes(),read.getBytes());
-        }
-    }
-
-    private static void print(final int[][] s, final byte[] a, final byte[] b) {
-        int n = a.length+1;
-        int m = b.length+1;
-        System.out.print("         ");
-        for ( int j = 1 ; j < m ; j++) System.out.printf(" %5c",(char)b[j-1]) ;
-        System.out.println();
-
-        for ( int i = 0 ; i < n ; i++) {
-            if ( i > 0 ) System.out.print((char)a[i-1]);
-            else System.out.print(' ');
-            System.out.print("  ");
-            for ( int j = 0; j < m ; j++ ) {
-                System.out.printf(" %5.1f",s[i][j]);
-            }
-            System.out.println();
-        }
-    }
-
-
-    static Pair<String,Integer> getArg(String prefix, String argv[], int i) {
-        String arg = null;
-        if ( argv[i].startsWith(prefix) ) {
-            arg = argv[i].substring(prefix.length());
-            if( arg.length() == 0 ) {
-                i++;
-                if ( i < argv.length ) arg = argv[i];
-                else {
-                    System.err.println("No value found after " + prefix + " argument tag");
-                    System.exit(1);
-                }
-            }
-            i++;
-        }
-        return new Pair<String,Integer>(arg,i);
-    }
-
-    static Map<String,List<String>> processArgs(String argv[]) {
-        Map<String,List<String>> args = new HashMap<String,List<String>>();
-
-        for ( int i = 0; i < argv.length ; i++ ) {
-            String arg = argv[i];
-            int pos = arg.indexOf('=');
-            if ( pos < 0 ) {
-                System.err.println("Argument "+arg+" is not of the form <ARG>=<VAL>");
-                System.exit(1);
-            }
-            String val = arg.substring(pos+1);
-            if ( val.length() == 0 ) {
-                // there was a space between '=' and the value
-                i++;
-                if ( i < argv.length ) val = argv[i];
-                else {
-                    System.err.println("No value found after " + arg + " argument tag");
-                    System.exit(1);
-                }
-            }
-            arg = arg.substring(0,pos);
-
-            List<String> l = args.get(arg);
-            if ( l == null ) {
-                l = new ArrayList<String>();
-                args.put(arg,l);
-            }
-            l.add(val);
-        }
-        return args;
-    }
-
-    static Double extractSingleDoubleArg(String argname, Map<String,List<String>> args) {
-        List<String> l = args.get(argname);
-        args.remove(argname);
-        if ( l == null ) return null;
-
-        if ( l.size() > 1 ) {
-            System.err.println("Only one "+argname+" argument is allowed");
-            System.exit(1);
-        }
-        double d=0;
-        try {
-            d = Double.parseDouble(l.get(0));
-        } catch ( NumberFormatException e) {
-            System.err.println("Can not parse value provided for "+argname+" argument ("+l.get(0)+")");
-            System.exit(1);
-        }
-        System.out.println("Argument "+argname+" set to "+d);
-        return new Double(d);
-    }
-
-
-    static Boolean extractSingleBooleanArg(String argname, Map<String,List<String>> args) {
-        List<String> l = args.get(argname);
-        args.remove(argname);
-        if ( l == null ) return null;
-
-        if ( l.size() > 1 ) {
-            System.err.println("Only one "+argname+" argument is allowed");
-            System.exit(1);
-        }
-        if ( l.get(0).equals("true") ) return Boolean.valueOf(true);
-        if ( l.get(0).equals("false") ) return Boolean.valueOf(false);
-        System.err.println("Can not parse value provided for "+argname+" argument ("+l.get(0)+"); true/false are allowed");
-        System.exit(1);
-        return Boolean.valueOf(false); // This value isn't used because it is preceded by System.exit(1)
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/SWParameterSet.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/SWParameterSet.java
deleted file mode 100644
index 7226a98..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/SWParameterSet.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.smithwaterman;
-
-/**
- * Handy named collection of common Smith-waterman parameter sets
- *
- * User: depristo
- * Date: 4/11/13
- * Time: 12:02 PM
- */
-public enum SWParameterSet {
-    // match=1, mismatch = -1/3, gap=-(1+k/3)
-    ORIGINAL_DEFAULT(new Parameters(3,-1,-4,-3)),
-
-    /**
-     * A standard set of values for NGS alignments
-     */
-    STANDARD_NGS(new Parameters(25, -50, -110, -6));
-
-    protected Parameters parameters;
-
-    SWParameterSet(final Parameters parameters) {
-        if ( parameters == null ) throw new IllegalArgumentException("parameters cannot be null");
-
-        this.parameters = parameters;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/SmithWaterman.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/SmithWaterman.java
deleted file mode 100644
index c4184e1..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/SmithWaterman.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.smithwaterman;
-
-import htsjdk.samtools.Cigar;
-
-/**
- * Generic interface for SmithWaterman calculations
- *
- * This interface allows clients to use a generic SmithWaterman variable, without propagating the specific
- * implementation of SmithWaterman throughout their code:
- *
- * SmithWaterman sw = new SpecificSmithWatermanImplementation(ref, read, params)
- * sw.getCigar()
- * sw.getAlignmentStart2wrt1()
- *
- * User: depristo
- * Date: 4/26/13
- * Time: 8:24 AM
- */
-public interface SmithWaterman {
-
-    /**
-     * Get the cigar string for the alignment of this SmithWaterman class
-     * @return a non-null cigar
-     */
-    public Cigar getCigar();
-
-    /**
-     * Get the starting position of the read sequence in the reference sequence
-     * @return a positive integer >= 0
-     */
-    public int getAlignmentStart2wrt1();
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/text/ListFileUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/text/ListFileUtils.java
deleted file mode 100644
index d6e1bcb..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/text/ListFileUtils.java
+++ /dev/null
@@ -1,344 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.text;
-
-import org.broadinstitute.gatk.utils.commandline.ParsingEngine;
-import org.broadinstitute.gatk.utils.commandline.RodBinding;
-import org.broadinstitute.gatk.utils.commandline.Tags;
-import org.broadinstitute.gatk.engine.datasources.reads.SAMReaderID;
-import org.broadinstitute.gatk.engine.refdata.tracks.FeatureManager;
-import org.broadinstitute.gatk.engine.refdata.utils.RMDTriplet;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.util.*;
-import java.util.regex.Pattern;
-
-/**
- * A collection of convenience methods for working with list files.
- */
-public class ListFileUtils {
-    /**
-     * Lines starting with this String in .list files are considered comments.
-     */
-    public static final String LIST_FILE_COMMENT_START = "#";        
-
-    /**
-     * Unpack the bam files to be processed, given a list of files.  That list of files can
-     * itself contain entries which are lists of other files to be read (note: you cannot have lists
-     * of lists of lists). Lines in .list files containing only whitespace or which begin with
-     * LIST_FILE_COMMENT_START are ignored.
-     *
-     * @param samFiles The sam files, in string format.
-     * @param parser Parser
-     * @return a flattened list of the bam files provided
-     */
-    public static List<SAMReaderID> unpackBAMFileList(final List<String> samFiles, final ParsingEngine parser) {
-        List<SAMReaderID> unpackedReads = new ArrayList<SAMReaderID>();
-        for( String inputFileName: samFiles ) {
-            Tags inputFileNameTags = parser.getTags(inputFileName);
-            inputFileName = expandFileName(inputFileName);
-            if (inputFileName.toLowerCase().endsWith(".list") ) {
-                try {
-                    for ( String fileName : new XReadLines(new File(inputFileName), true, LIST_FILE_COMMENT_START) ) {
-                        unpackedReads.add(new SAMReaderID(fileName,parser.getTags(inputFileName)));
-                    }
-                }
-                catch( FileNotFoundException ex ) {
-                    throw new UserException.CouldNotReadInputFile(new File(inputFileName), "Unable to find file while unpacking reads", ex);
-                }
-            }
-            else if(inputFileName.toLowerCase().endsWith(".bam")) {
-                unpackedReads.add(new SAMReaderID(inputFileName,inputFileNameTags));
-            }
-            else if(inputFileName.endsWith("stdin")) {
-                unpackedReads.add(new SAMReaderID(inputFileName,inputFileNameTags));
-            }
-            else {
-                throw new UserException.CommandLineException(String.format("The GATK reads argument (-I, --input_file) supports only BAM files with the .bam extension and lists of BAM files " +
-                        "with the .list extension, but the file %s has neither extension.  Please ensure that your BAM file or list " +
-                        "of BAM files is in the correct format, update the extension, and try again.",inputFileName));
-            }
-        }
-        return unpackedReads;
-    }
-
-    /**
-     * Convert command-line argument representation of ROD bindings to something more easily understandable by the engine.
-     * @param RODBindings a text equivale
-     * @param parser Parser
-     * @return a list of expanded, bound RODs.
-     */
-    @Deprecated
-    @SuppressWarnings("unused") // TODO: Who is still using this? External walkers?
-    public static Collection<RMDTriplet> unpackRODBindingsOldStyle(final Collection<String> RODBindings, final ParsingEngine parser) {
-        // todo -- this is a strange home for this code.  Move into ROD system
-        Collection<RMDTriplet> rodBindings = new ArrayList<RMDTriplet>();
-
-        for (String fileName: RODBindings) {
-            final Tags tags = parser.getTags(fileName);
-            fileName = expandFileName(fileName);
-
-            List<String> positionalTags = tags.getPositionalTags();
-            if(positionalTags.size() != 2)
-                throw new UserException("Invalid syntax for -B (reference-ordered data) input flag.  " +
-                        "Please use the following syntax when providing reference-ordered " +
-                        "data: -B:<name>,<type> <filename>.");
-            // Assume that if tags are present, those tags are name and type.
-            // Name is always first, followed by type.
-            String name = positionalTags.get(0);
-            String type = positionalTags.get(1);
-
-            RMDTriplet.RMDStorageType storageType;
-            if(tags.getValue("storage") != null)
-                storageType = Enum.valueOf(RMDTriplet.RMDStorageType.class,tags.getValue("storage"));
-            else if(fileName.toLowerCase().endsWith("stdin"))
-                storageType = RMDTriplet.RMDStorageType.STREAM;
-            else
-                storageType = RMDTriplet.RMDStorageType.FILE;
-
-            rodBindings.add(new RMDTriplet(name,type,fileName,storageType,tags));
-        }
-
-        return rodBindings;
-    }
-
-    /**
-     * Convert command-line argument representation of ROD bindings to something more easily understandable by the engine.
-     * @param RODBindings a text equivale
-     * @param parser Parser
-     * @return a list of expanded, bound RODs.
-     */
-    @SuppressWarnings("unchecked")
-    public static Collection<RMDTriplet> unpackRODBindings(final Collection<RodBinding> RODBindings, @SuppressWarnings("unused") final ParsingEngine parser) {
-        // todo -- this is a strange home for this code.  Move into ROD system
-        Collection<RMDTriplet> rodBindings = new ArrayList<RMDTriplet>();
-        FeatureManager builderForValidation = new FeatureManager();
-
-        for (RodBinding rodBinding: RODBindings) {
-            String argValue = rodBinding.getSource();
-            String fileName = expandFileName(argValue);
-            String name = rodBinding.getName();
-            String type = rodBinding.getTribbleType();
-
-            RMDTriplet.RMDStorageType storageType;
-            if(rodBinding.getTags().getValue("storage") != null)
-                storageType = Enum.valueOf(RMDTriplet.RMDStorageType.class,rodBinding.getTags().getValue("storage"));
-            else if(fileName.toLowerCase().endsWith("stdin"))
-                storageType = RMDTriplet.RMDStorageType.STREAM;
-            else
-                storageType = RMDTriplet.RMDStorageType.FILE;
-
-            RMDTriplet triplet = new RMDTriplet(name,type,fileName,storageType,rodBinding.getTags());
-
-            // validate triplet type
-            FeatureManager.FeatureDescriptor descriptor = builderForValidation.getByTriplet(triplet);
-            if ( descriptor == null )
-                throw new UserException.UnknownTribbleType(rodBinding.getTribbleType(),
-                        String.format("Field %s had provided type %s but there's no such Tribble type.  The compatible types are: %n%s",
-                                rodBinding.getName(), rodBinding.getTribbleType(), builderForValidation.userFriendlyListOfAvailableFeatures(rodBinding.getType())));
-            if ( ! rodBinding.getType().isAssignableFrom(descriptor.getFeatureClass()) )
-                throw new UserException.BadArgumentValue(rodBinding.getName(),
-                        String.format("Field %s expects Features of type %s, but the input file produces Features of type %s. The compatible types are: %n%s",
-                                rodBinding.getName(), rodBinding.getType().getSimpleName(), descriptor.getSimpleFeatureName(),
-                                builderForValidation.userFriendlyListOfAvailableFeatures(rodBinding.getType())));
-
-
-            rodBindings.add(triplet);
-        }
-
-        return rodBindings;
-    }
-
-    /**
-     * Expand any special characters that appear in the filename.  Right now, '-' is expanded to
-     * '/dev/stdin' only, but in the future, special characters like '~' and '*' that are passed
-     * directly to the command line in some circumstances could be expanded as well.  Be careful
-     * when adding UNIX-isms.
-     * @param argument the text appearing on the command-line.
-     * @return An expanded string suitable for opening by Java/UNIX file handling utilities.
-     */
-    private static String expandFileName(String argument) {
-        if(argument.trim().equals("-"))
-            return "/dev/stdin";
-        return argument;
-    }
-
-    /**
-     * Returns a new set of values, containing a final set of values expanded from values
-     * <p/>
-     * Each element E of values can either be a literal string or a file ending in .list.
-     * For each E ending in .list we try to read a file named E from disk, and if possible
-     * all lines from that file are expanded into unique values.
-     *
-     * @param values Original values
-     * @return entries from values or the files listed in values
-     */
-    public static Set<String> unpackSet(Collection<String> values) {
-        if (values == null)
-            throw new NullPointerException("values cannot be null");
-        Set<String> unpackedValues = new LinkedHashSet<String>();
-        // Let's first go through the list and see if we were given any files.
-        // We'll add every entry in the file to our set, and treat the entries as
-        // if they had been specified on the command line.
-        for (String value : values) {
-            File file = new File(value);
-            if (value.toLowerCase().endsWith(".list") && file.exists()) {
-                try {
-                    unpackedValues.addAll(new XReadLines(file, true, LIST_FILE_COMMENT_START).readLines());
-                } catch (IOException e) {
-                    throw new UserException.CouldNotReadInputFile(file, e);
-                }
-            } else {
-                unpackedValues.add(value);
-            }
-        }
-        return unpackedValues;
-    }
-
-    /**
-     * Returns a new set of values including only values listed by filters
-     * <p/>
-     * Each element E of values can either be a literal string or a file.  For each E,
-     * we try to read a file named E from disk, and if possible all lines from that file are expanded
-     * into unique names.
-     * <p/>
-     * Filters may also be a file of filters.
-     *
-     * @param values     Values or files with values
-     * @param filters    Filters or files with filters
-     * @param exactMatch If true match filters exactly, otherwise use as both exact and regular expressions
-     * @return entries from values or the files listed in values, filtered by filters
-     */
-    public static Set<String> includeMatching(Collection<String> values, Collection<String> filters, boolean exactMatch) {
-        return includeMatching(values, IDENTITY_STRING_CONVERTER, filters, exactMatch);
-    }
-
-    /**
-     * Converts a type T to a String representation.
-     *
-     * @param <T> Type to convert to a String.
-     */
-    public static interface StringConverter<T> {
-        String convert(T value);
-    }
-
-    /**
-     * Returns a new set of values including only values matching filters
-     * <p/>
-     * Filters may also be a file of filters.
-     * <p/>
-     * The converter should convert T to a unique String for each value in the set.
-     *
-     * @param values     Values or files with values
-     * @param converter  Converts values to strings
-     * @param filters    Filters or files with filters
-     * @param exactMatch If true match filters exactly, otherwise use as both exact and regular expressions
-     * @return entries from values including only values matching filters
-     */
-    public static <T> Set<T> includeMatching(Collection<T> values, StringConverter<T> converter, Collection<String> filters, boolean exactMatch) {
-        if (values == null)
-            throw new NullPointerException("values cannot be null");
-        if (converter == null)
-            throw new NullPointerException("converter cannot be null");
-        if (filters == null)
-            throw new NullPointerException("filters cannot be null");
-
-        Set<String> unpackedFilters = unpackSet(filters);
-        Set<T> filteredValues = new LinkedHashSet<T>();
-        Collection<Pattern> patterns = null;
-        if (!exactMatch)
-            patterns = compilePatterns(unpackedFilters);
-        for (T value : values) {
-            String converted = converter.convert(value);
-            if (unpackedFilters.contains(converted)) {
-                filteredValues.add(value);
-            } else if (!exactMatch) {
-                for (Pattern pattern : patterns)
-                    if (pattern.matcher(converted).find())
-                        filteredValues.add(value);
-            }
-        }
-        return filteredValues;
-    }
-    
-    /**
-     * Returns a new set of values excluding any values matching filters.
-     * <p/>
-     * Filters may also be a file of filters.
-     * <p/>
-     * The converter should convert T to a unique String for each value in the set.
-     *
-     * @param values     Values or files with values
-     * @param converter  Converts values to strings
-     * @param filters    Filters or files with filters
-     * @param exactMatch If true match filters exactly, otherwise use as both exact and regular expressions
-     * @return entries from values exluding any values matching filters
-     */
-    public static <T> Set<T> excludeMatching(Collection<T> values, StringConverter<T> converter, Collection<String> filters, boolean exactMatch) {
-        if (values == null)
-            throw new NullPointerException("values cannot be null");
-        if (converter == null)
-            throw new NullPointerException("converter cannot be null");
-        if (filters == null)
-            throw new NullPointerException("filters cannot be null");
-
-        Set<String> unpackedFilters = unpackSet(filters);
-        Set<T> filteredValues = new LinkedHashSet<T>();
-        filteredValues.addAll(values);
-        Collection<Pattern> patterns = null;
-        if (!exactMatch)
-            patterns = compilePatterns(unpackedFilters);
-        for (T value : values) {
-            String converted = converter.convert(value);
-            if (unpackedFilters.contains(converted)) {
-                filteredValues.remove(value);
-            } else if (!exactMatch) {
-                for (Pattern pattern : patterns)
-                    if (pattern.matcher(converted).find())
-                        filteredValues.remove(value);
-            }
-        }
-        return filteredValues;
-    }
-
-    private static Collection<Pattern> compilePatterns(Collection<String> filters) {
-        Collection<Pattern> patterns = new ArrayList<Pattern>();
-        for (String filter: filters) {
-            patterns.add(Pattern.compile(filter));
-        }
-        return patterns;
-    }
-
-    protected static final StringConverter<String> IDENTITY_STRING_CONVERTER = new StringConverter<String>() {
-        @Override
-        public String convert(String value) {
-            return value;
-        }
-    };
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/text/TextFormattingUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/text/TextFormattingUtils.java
deleted file mode 100644
index 65fb970..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/text/TextFormattingUtils.java
+++ /dev/null
@@ -1,172 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.text;
-
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.io.IOException;
-import java.io.StringReader;
-import java.util.*;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-/**
- * Common utilities for dealing with text formatting.
- *
- * @author mhanna
- * @version 0.1
- */
-public class TextFormattingUtils {
-    /**
-     * our log, which we want to capture anything from this class
-     */
-    private static Logger logger = Logger.getLogger(TextFormattingUtils.class);    
-
-    /**
-     * The default line width, for GATK output written to the screen.
-     */
-    public static final int DEFAULT_LINE_WIDTH = 120;
-
-    /**
-     * Simple implementation of word-wrap for a line of text.  Idea and
-     * regexp shamelessly stolen from http://joust.kano.net/weblog/archives/000060.html.
-     * Regexp can probably be simplified for our application.
-     * @param text Text to wrap.
-     * @param width Maximum line width.
-     * @return A list of word-wrapped lines.
-     */
-    public static List<String> wordWrap( String text, int width ) {
-        Pattern wrapper = Pattern.compile( String.format(".{0,%d}(?:\\S(?:[\\s|]|$)|$)", width-1) );
-        Matcher matcher = wrapper.matcher( text );
-
-        List<String> wrapped = new ArrayList<String>();
-        while( matcher.find() ) {
-            // Regular expression is supersensitive to whitespace.
-            // Assert that content is present before adding the line.
-            String line = matcher.group().trim();
-            if( line.length() > 0 )
-                wrapped.add( matcher.group() );
-        }
-        return wrapped;
-    }
-
-    /**
-     * Compares two strings independently of case sensitivity.
-     */
-    public static class CaseInsensitiveComparator implements Comparator<String> {
-        /**
-         * Compares the order of lhs to rhs, not taking case into account.
-         * @param lhs First object to compare.
-         * @param rhs Second object to compare.
-         * @return 0 if objects are identical; -1 if lhs is before rhs, 1 if rhs is before lhs.  Nulls are treated as after everything else.
-         */
-        public int compare(String lhs, String rhs) {
-            if(lhs == null && rhs == null) return 0;
-            if(lhs == null) return 1;
-            if(rhs == null) return -1;
-            return lhs.toLowerCase().compareTo(rhs.toLowerCase());
-        }
-    }
-
-    /**
-     * Load the contents of a resource bundle with the given name.  If no such resource exists, warn the user
-     * and create an empty bundle.
-     * @param bundleName The name of the bundle to load.
-     * @return The best resource bundle that can be found matching the given name.
-     */
-    public static ResourceBundle loadResourceBundle(String bundleName) {
-        ResourceBundle bundle;
-        try {
-            bundle = ResourceBundle.getBundle(bundleName);
-        }
-        catch(MissingResourceException ex) {
-            //logger.warn("Unable to load help text.  Help output will be sparse.");
-            // Generate an empty resource bundle.
-            try {
-                bundle = new PropertyResourceBundle(new StringReader(""));
-            }
-            catch(IOException ioe) {
-                throw new ReviewedGATKException("No resource bundle found, and unable to create an empty placeholder.",ioe);
-            }
-        }
-        return bundle;
-    }
-
-
-    /**
-     * Returns the word starting positions within line, excluding the first position 0.
-     * The returned list is compatible with splitFixedWidth.
-     * @param line Text to parse.
-     * @return the word starting positions within line, excluding the first position 0.
-     */
-    public static List<Integer> getWordStarts(String line) {
-        if (line == null)
-            throw new ReviewedGATKException("line is null");
-        List<Integer> starts = new ArrayList<Integer>();
-        int stop = line.length();
-        for (int i = 1; i < stop; i++)
-            if (Character.isWhitespace(line.charAt(i-1)))
-                if(!Character.isWhitespace(line.charAt(i)))
-                    starts.add(i);
-        return starts;
-    }
-
-    /**
-     * Parses a fixed width line of text.
-     * @param line Text to parse.
-     * @param columnStarts the column starting positions within line, excluding the first position 0.
-     * @return The parsed string array with each entry trimmed.
-     */
-    public static String[] splitFixedWidth(String line, List<Integer> columnStarts) {
-        if (line == null)
-            throw new ReviewedGATKException("line is null");
-        if (columnStarts == null)
-            throw new ReviewedGATKException("columnStarts is null");
-        int startCount = columnStarts.size();
-        String[] row = new String[startCount + 1];
-        if (startCount == 0) {
-            row[0] = line.trim();
-        } else {
-            row[0] = line.substring(0, columnStarts.get(0)).trim();
-            for (int i = 1; i < startCount; i++)
-                row[i] = line.substring(columnStarts.get(i - 1), columnStarts.get(i)).trim();
-            row[startCount] = line.substring(columnStarts.get(startCount - 1)).trim();
-        }
-        return row;
-    }
-
-    /**
-     * Parses a line of text by whitespace.
-     * @param line Text to parse.
-     * @return The parsed string array.
-     */
-    public static String[] splitWhiteSpace(String line) {
-        if (line == null)
-            throw new ReviewedGATKException("line is null");
-        return line.trim().split("\\s+");
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/text/XReadLines.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/text/XReadLines.java
deleted file mode 100644
index f410156..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/text/XReadLines.java
+++ /dev/null
@@ -1,208 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.text;
-
-import java.io.*;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-
-/**
- * Support for Python-like xreadlines() function as a class.  This is an iterator and iterable over
- * Strings, each corresponding a line in the file (minus newline).  Enables the very simple accessing
- * of lines in a file as:
- *
- * xReadLines reader = new xReadLines(new File(file_name));
- * List<String> lines = reader.readLines();
- * reader.close();
- *
- * or
- *
- * for ( String line : new xReadLines(new File(file_name)) {
- *   doSomeWork(line);
- * }
- *
- * For the love of god, please use this system for reading lines in a file.
- */
-public class XReadLines implements Iterator<String>, Iterable<String> {
-    private final BufferedReader in;      // The stream we're reading from
-    private String nextLine = null;       // Return value of next call to next()
-    private final boolean trimWhitespace;
-    private final String commentPrefix;
-
-    public XReadLines(final File filename) throws FileNotFoundException {
-        this(new FileReader(filename), true, null);
-    }
-
-    public XReadLines(final File filename, final boolean trimWhitespace) throws FileNotFoundException {
-        this(new FileReader(filename), trimWhitespace, null);
-    }
-
-    /**
-     * Creates a new xReadLines object to read lines from filename
-     *
-     * @param filename file name
-     * @param trimWhitespace trim whitespace
-     * @param commentPrefix prefix for comments or null if no prefix is set
-     * @throws FileNotFoundException when the file is not found
-     */
-    public XReadLines(final File filename, final boolean trimWhitespace, final String commentPrefix) throws FileNotFoundException {
-        this(new FileReader(filename), trimWhitespace, commentPrefix);
-    }
-
-    public XReadLines(final InputStream inputStream) throws FileNotFoundException {
-        this(new InputStreamReader(inputStream), true, null);
-    }
-
-    public XReadLines(final InputStream inputStream, final boolean trimWhitespace) {
-        this(new InputStreamReader(inputStream), trimWhitespace, null);
-    }
-
-    /**
-     * Creates a new xReadLines object to read lines from an input stream
-     *
-     * @param inputStream input stream
-     * @param trimWhitespace trim whitespace
-     * @param commentPrefix prefix for comments or null if no prefix is set
-     */
-    public XReadLines(final InputStream inputStream, final boolean trimWhitespace, final String commentPrefix) {
-        this(new InputStreamReader(inputStream), trimWhitespace, commentPrefix);
-    }
-
-
-    /**
-     * Creates a new xReadLines object to read lines from a reader
-     *
-     * @param reader reader
-     */
-    public XReadLines(final Reader reader) {
-        this(reader, true, null);
-    }
-
-    /**
-     * Creates a new xReadLines object to read lines from an reader
-     *
-     * @param reader reader
-     * @param trimWhitespace trim whitespace
-     */
-    public XReadLines(final Reader reader, final boolean trimWhitespace) {
-        this(reader, trimWhitespace, null);
-    }
-
-    /**
-     * Creates a new xReadLines object to read lines from an bufferedReader
-     *
-     * @param reader file name
-     * @param trimWhitespace trim whitespace
-     * @param commentPrefix prefix for comments or null if no prefix is set
-     */
-    public XReadLines(final Reader reader, final boolean trimWhitespace, final String commentPrefix) {
-        this.in = (reader instanceof BufferedReader) ? (BufferedReader)reader : new BufferedReader(reader);
-        this.trimWhitespace = trimWhitespace;
-        this.commentPrefix = commentPrefix;
-        try {
-            this.nextLine = readNextLine();
-        } catch(IOException e) {
-            throw new IllegalArgumentException(e);
-        }
-    }
-
-    /**
-     * Reads all of the lines in the file, and returns them as a list of strings
-     *
-     * @return all of the lines in the file.
-     */
-    public List<String> readLines() {
-        List<String> lines = new LinkedList<String>();
-        for ( String line : this ) {
-            lines.add(line);
-        }
-        return lines;
-    }
-
-    /**
-     * I'm an iterator too...
-     * @return an iterator
-     */
-    public Iterator<String> iterator() {
-        return this;
-    }
-
-    public boolean hasNext() {
-        return this.nextLine != null;
-    }
-
-    /**
-     * Actually reads the next line from the stream, not accessible publicly
-     * @return the next line or null
-     * @throws IOException if an error occurs
-     */
-    private String readNextLine() throws IOException {
-        String nextLine;
-        while ((nextLine = this.in.readLine()) != null) {
-            if (this.trimWhitespace) {
-                nextLine = nextLine.trim();
-                if (nextLine.length() == 0)
-                    continue;
-            }
-            if (this.commentPrefix != null)
-                if (nextLine.startsWith(this.commentPrefix))
-                    continue;
-            break;
-        }
-        return nextLine;
-    }
-
-    /**
-     * Returns the next line (optionally minus whitespace)
-     * @return the next line
-     */
-    public String next() {
-        try {
-            String result = this.nextLine;
-            this.nextLine = readNextLine();
-
-            // If we haven't reached EOF yet
-            if (this.nextLine == null) {
-                in.close();             // And close on EOF
-            }
-
-            // Return the line we read last time through.
-            return result;
-        } catch(IOException e) {
-            throw new IllegalArgumentException(e);
-        }
-    }
-
-    // The file is read-only; we don't allow lines to be removed.
-    public void remove() {
-        throw new UnsupportedOperationException();
-    }
-
-    public void close() throws IOException {
-        this.in.close();
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/threading/EfficiencyMonitoringThreadFactory.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/threading/EfficiencyMonitoringThreadFactory.java
deleted file mode 100644
index 7282083..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/threading/EfficiencyMonitoringThreadFactory.java
+++ /dev/null
@@ -1,160 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.threading;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Invariant;
-import com.google.java.contract.Requires;
-import org.apache.log4j.Logger;
-import org.apache.log4j.Priority;
-import org.broadinstitute.gatk.utils.AutoFormattingTime;
-
-import java.lang.management.ManagementFactory;
-import java.lang.management.ThreadInfo;
-import java.lang.management.ThreadMXBean;
-import java.util.ArrayList;
-import java.util.EnumMap;
-import java.util.List;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.ThreadFactory;
-import java.util.concurrent.TimeUnit;
-
-/**
- * Creates threads that automatically monitor their efficiency via the parent ThreadEfficiencyMonitor
- *
- * User: depristo
- * Date: 8/14/12
- * Time: 8:47 AM
- */
- at Invariant({
-        "activeThreads.size() <= nThreadsToCreate",
-        "countDownLatch.getCount() <= nThreadsToCreate",
-        "nThreadsCreated <= nThreadsToCreate"
-})
-public class EfficiencyMonitoringThreadFactory extends ThreadEfficiencyMonitor implements ThreadFactory  {
-    final int nThreadsToCreate;
-    final List<Thread> activeThreads;
-
-    int nThreadsCreated = 0;
-
-    /**
-     * Counts down the number of active activeThreads whose runtime info hasn't been incorporated into
-     * times.  Counts down from nThreadsToCreate to 0, at which point any code waiting
-     * on the final times is freed to run.
-     */
-    final CountDownLatch countDownLatch;
-
-    /**
-     * Create a new factory generating threads whose runtime and contention
-     * behavior is tracked in this factory.
-     *
-     * @param nThreadsToCreate the number of threads we will create in the factory before it's considered complete
-     */
-    public EfficiencyMonitoringThreadFactory(final int nThreadsToCreate) {
-        super();
-        if ( nThreadsToCreate <= 0 ) throw new IllegalArgumentException("nThreadsToCreate <= 0: " + nThreadsToCreate);
-
-        this.nThreadsToCreate = nThreadsToCreate;
-        activeThreads = new ArrayList<Thread>(nThreadsToCreate);
-        countDownLatch = new CountDownLatch(nThreadsToCreate);
-    }
-
-    /**
-     * How many threads have been created by this factory so far?
-     * @return
-     */
-    @Ensures("result >= 0")
-    public int getNThreadsCreated() {
-        return nThreadsCreated;
-    }
-
-    /**
-     * Only useful for testing, so that we can wait for all of the threads in the factory to complete running
-     *
-     * @throws InterruptedException
-     */
-    protected void waitForAllThreadsToComplete() throws InterruptedException {
-        countDownLatch.await();
-    }
-
-    @Ensures({
-            "activeThreads.size() <= old(activeThreads.size())",
-            "! activeThreads.contains(thread)",
-            "countDownLatch.getCount() <= old(countDownLatch.getCount())"
-    })
-    @Override
-    public synchronized void threadIsDone(final Thread thread) {
-        nThreadsAnalyzed++;
-
-        if ( DEBUG ) logger.warn("  Countdown " + countDownLatch.getCount() + " in thread " + Thread.currentThread().getName());
-
-        super.threadIsDone(thread);
-
-        // remove the thread from the list of active activeThreads, if it's in there, and decrement the countdown latch
-        if ( activeThreads.remove(thread) ) {
-            // one less thread is live for those blocking on all activeThreads to be complete
-            countDownLatch.countDown();
-            if ( DEBUG ) logger.warn("  -> Countdown " + countDownLatch.getCount() + " in thread " + Thread.currentThread().getName());
-        }
-    }
-
-    /**
-     * Create a new thread from this factory
-     *
-     * @param runnable
-     * @return
-     */
-    @Override
-    @Ensures({
-            "activeThreads.size() > old(activeThreads.size())",
-            "activeThreads.contains(result)",
-            "nThreadsCreated == old(nThreadsCreated) + 1"
-    })
-    public synchronized Thread newThread(final Runnable runnable) {
-        if ( activeThreads.size() >= nThreadsToCreate)
-            throw new IllegalStateException("Attempting to create more activeThreads than allowed by constructor argument nThreadsToCreate " + nThreadsToCreate);
-
-        nThreadsCreated++;
-        final Thread myThread = new TrackingThread(runnable);
-        activeThreads.add(myThread);
-        return myThread;
-    }
-
-    /**
-     * A wrapper around Thread that tracks the runtime of the thread and calls threadIsDone() when complete
-     */
-    private class TrackingThread extends Thread {
-        private TrackingThread(Runnable runnable) {
-            super(runnable);
-        }
-
-        @Override
-        public void run() {
-            super.run();
-            threadIsDone(this);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/threading/NamedThreadFactory.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/threading/NamedThreadFactory.java
deleted file mode 100644
index 6c84086..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/threading/NamedThreadFactory.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.threading;
-
-import java.util.concurrent.ThreadFactory;
-
-/**
- * Thread factor that produces threads with a given name pattern
- *
- * User: depristo
- * Date: 9/5/12
- * Time: 9:22 PM
- *
- */
-public class NamedThreadFactory implements ThreadFactory {
-    static int id = 0;
-    final String format;
-
-    public NamedThreadFactory(String format) {
-        this.format = format;
-        String.format(format, id); // test the name
-    }
-
-    @Override
-    public Thread newThread(Runnable r) {
-        return new Thread(r, String.format(format, id++));
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/threading/ThreadEfficiencyMonitor.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/threading/ThreadEfficiencyMonitor.java
deleted file mode 100644
index cee91a4..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/threading/ThreadEfficiencyMonitor.java
+++ /dev/null
@@ -1,232 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.threading;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Invariant;
-import com.google.java.contract.Requires;
-import org.apache.log4j.Logger;
-import org.apache.log4j.Priority;
-import org.broadinstitute.gatk.utils.AutoFormattingTime;
-
-import java.lang.management.ManagementFactory;
-import java.lang.management.ThreadInfo;
-import java.lang.management.ThreadMXBean;
-import java.util.EnumMap;
-import java.util.concurrent.TimeUnit;
-
-/**
- * Uses an MXBean to monitor thread efficiency
- *
- * Once the monitor is created, calls to threadIsDone() can be used to add information
- * about the efficiency of the provided thread to this monitor.
- *
- * Provides simple print() for displaying efficiency information to a logger
- *
- * User: depristo
- * Date: 8/22/12
- * Time: 10:48 AM
- */
- at Invariant({"nThreadsAnalyzed >= 0"})
-public class ThreadEfficiencyMonitor {
-    protected static final boolean DEBUG = false;
-    protected static Logger logger = Logger.getLogger(EfficiencyMonitoringThreadFactory.class);
-    final EnumMap<State, Long> times = new EnumMap<State, Long>(State.class);
-
-    /**
-     * The number of threads we've included in our efficiency monitoring
-     */
-    int nThreadsAnalyzed = 0;
-
-    /**
-     * The bean used to get the thread info about blocked and waiting times
-     */
-    final ThreadMXBean bean;
-
-    public ThreadEfficiencyMonitor() {
-        bean = ManagementFactory.getThreadMXBean();
-
-        // get the bean, and start tracking
-        if ( bean.isThreadContentionMonitoringSupported() )
-            bean.setThreadContentionMonitoringEnabled(true);
-        else
-            logger.warn("Thread contention monitoring not supported, we cannot track GATK multi-threaded efficiency");
-        //bean.setThreadCpuTimeEnabled(true);
-
-        if ( bean.isThreadCpuTimeSupported() )
-            bean.setThreadCpuTimeEnabled(true);
-        else
-            logger.warn("Thread CPU monitoring not supported, we cannot track GATK multi-threaded efficiency");
-
-        // initialize times to 0
-        for ( final State state : State.values() )
-            times.put(state, 0l);
-    }
-
-    private static long nanoToMilli(final long timeInNano) {
-        return TimeUnit.NANOSECONDS.toMillis(timeInNano);
-    }
-
-    /**
-     * Get the time spent in state across all threads created by this factory
-     *
-     * @param state to get information about
-     * @return the time in milliseconds
-     */
-    @Ensures({"result >= 0"})
-    public synchronized long getStateTime(final State state) {
-        return times.get(state);
-    }
-
-    /**
-     * Get the total time spent in all states across all threads created by this factory
-     *
-     * @return the time in milliseconds
-     */
-    @Ensures({"result >= 0"})
-    public synchronized long getTotalTime() {
-        long total = 0;
-        for ( final long time : times.values() )
-            total += time;
-        return total;
-    }
-
-    /**
-     * Get the fraction of time spent in state across all threads created by this factory
-     *
-     * @return the percentage (0.0-100.0) of time spent in state over all state times of all threads
-     */
-    @Ensures({"result >= 0.0", "result <= 100.0"})
-    public synchronized double getStatePercent(final State state) {
-        return (100.0 * getStateTime(state)) / Math.max(getTotalTime(), 1);
-    }
-
-    public int getnThreadsAnalyzed() {
-        return nThreadsAnalyzed;
-    }
-
-    @Override
-    public synchronized String toString() {
-        final StringBuilder b = new StringBuilder();
-
-        b.append("total ").append(getTotalTime()).append(" ");
-        for ( final State state : State.values() ) {
-            b.append(state).append(" ").append(getStateTime(state)).append(" ");
-        }
-
-        return b.toString();
-    }
-
-    /**
-     * Print usage information about threads from this factory to logger
-     * with the INFO priority
-     *
-     * @param logger
-     */
-    public synchronized void printUsageInformation(final Logger logger) {
-        printUsageInformation(logger, Priority.INFO);
-    }
-
-    /**
-     * Print usage information about threads from this factory to logger
-     * with the provided priority
-     *
-     * @param logger
-     */
-    public synchronized void printUsageInformation(final Logger logger, final Priority priority) {
-        logger.debug("Number of threads monitored: " + getnThreadsAnalyzed());
-        logger.debug("Total runtime " + new AutoFormattingTime(TimeUnit.MILLISECONDS.toNanos(getTotalTime())));
-        for ( final State state : State.values() ) {
-            logger.debug(String.format("\tPercent of time spent %s is %.2f", state.getUserFriendlyName(), getStatePercent(state)));
-        }
-        logger.log(priority, String.format("CPU      efficiency : %6.2f%% of time spent %s", getStatePercent(State.USER_CPU), State.USER_CPU.getUserFriendlyName()));
-        logger.log(priority, String.format("Walker inefficiency : %6.2f%% of time spent %s", getStatePercent(State.BLOCKING), State.BLOCKING.getUserFriendlyName()));
-        logger.log(priority, String.format("I/O    inefficiency : %6.2f%% of time spent %s", getStatePercent(State.WAITING_FOR_IO), State.WAITING_FOR_IO.getUserFriendlyName()));
-        logger.log(priority, String.format("Thread inefficiency : %6.2f%% of time spent %s", getStatePercent(State.WAITING), State.WAITING.getUserFriendlyName()));
-    }
-
-    /**
-     * Update the information about completed thread that ran for runtime in milliseconds
-     *
-     * This method updates all of the key timing and tracking information in the factory so that
-     * thread can be retired.  After this call the factory shouldn't have a pointer to the thread any longer
-     *
-     * @param thread the thread whose information we are updating
-     */
-    @Ensures({
-            "getTotalTime() >= old(getTotalTime())"
-    })
-    public synchronized void threadIsDone(final Thread thread) {
-        nThreadsAnalyzed++;
-
-        if ( DEBUG ) logger.warn("UpdateThreadInfo called");
-
-        final long threadID = thread.getId();
-        final ThreadInfo info = bean.getThreadInfo(thread.getId());
-        final long totalTimeNano = bean.getThreadCpuTime(threadID);
-        final long userTimeNano = bean.getThreadUserTime(threadID);
-        final long systemTimeNano = totalTimeNano - userTimeNano;
-        final long userTimeInMilliseconds = nanoToMilli(userTimeNano);
-        final long systemTimeInMilliseconds = nanoToMilli(systemTimeNano);
-
-        if ( info != null ) {
-            if ( DEBUG ) logger.warn("Updating thread with user runtime " + userTimeInMilliseconds + " and system runtime " + systemTimeInMilliseconds + " of which blocked " + info.getBlockedTime() + " and waiting " + info.getWaitedTime());
-            incTimes(State.BLOCKING, info.getBlockedTime());
-            incTimes(State.WAITING, info.getWaitedTime());
-            incTimes(State.USER_CPU, userTimeInMilliseconds);
-            incTimes(State.WAITING_FOR_IO, systemTimeInMilliseconds);
-        }
-    }
-
-    /**
-     * Helper function that increments the times counter by by for state
-     *
-     * @param state
-     * @param by
-     */
-    @Requires({"state != null", "by >= 0"})
-    @Ensures("getTotalTime() == old(getTotalTime()) + by")
-    private synchronized void incTimes(final State state, final long by) {
-        times.put(state, times.get(state) + by);
-    }
-
-    public enum State {
-        BLOCKING("blocking on synchronized data structures"),
-        WAITING("waiting on some other thread"),
-        USER_CPU("doing productive CPU work"),
-        WAITING_FOR_IO("waiting for I/O");
-
-        private final String userFriendlyName;
-
-        private State(String userFriendlyName) {
-            this.userFriendlyName = userFriendlyName;
-        }
-
-        public String getUserFriendlyName() {
-            return userFriendlyName;
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/threading/ThreadLocalArray.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/threading/ThreadLocalArray.java
deleted file mode 100644
index b8dea06..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/threading/ThreadLocalArray.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.threading;
-
-import java.lang.reflect.Array;
-
-/**
- * ThreadLocal implementation for arrays
- *
- * Example usage:
- *
- * private ThreadLocal<byte[]> threadLocalByteArray = new ThreadLocalArray<byte[]>(length, byte.class);
- * ....
- * byte[] byteArray = threadLocalByteArray.get();
- *
- * @param <T> the type of the array itself (eg., int[], double[], etc.)
- *
- * @author David Roazen
- */
-public class ThreadLocalArray<T> extends ThreadLocal<T> {
-    private int arraySize;
-    private Class arrayElementType;
-
-    /**
-     * Create a new ThreadLocalArray
-     *
-     * @param arraySize desired length of the array
-     * @param arrayElementType type of the elements within the array (eg., Byte.class, Integer.class, etc.)
-     */
-    public ThreadLocalArray( int arraySize, Class arrayElementType ) {
-        super();
-
-        this.arraySize = arraySize;
-        this.arrayElementType = arrayElementType;
-    }
-
-    @Override
-    @SuppressWarnings("unchecked")
-    protected T initialValue() {
-        return (T)Array.newInstance(arrayElementType, arraySize);
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/threading/ThreadPoolMonitor.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/threading/ThreadPoolMonitor.java
deleted file mode 100644
index 9261870..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/threading/ThreadPoolMonitor.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.threading;
-
-import org.apache.log4j.Logger;
-/**
- * User: hanna
- * Date: Apr 29, 2009
- * Time: 4:27:58 PM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * Waits for a signal to come through that the thread pool has run
- * a given task and therefore has a free slot.
- *
- * Make sure, that, when using, the submit and the run are both
- * protected by the same synchronized(monitor) lock.  See the test
- * case for an example.
- */
-public class ThreadPoolMonitor implements Runnable {
-    /**
-     * Logger for reporting interruptions, etc.
-     */
-    private static Logger logger = Logger.getLogger(ThreadPoolMonitor.class);
-
-    /**
-     * Watch the monitor
-     */
-    public synchronized void watch() {
-        try {
-            wait();
-        }
-        catch( InterruptedException ex ) {
-            logger.error("ThreadPoolMonitor interrupted:" + ex.getStackTrace());
-            throw new RuntimeException("ThreadPoolMonitor interrupted", ex);
-        }
-    }
-
-    /**
-     * Instruct the monitor that the thread pool has run for the class.
-     * Only the thread pool should execute this method.
-     */
-    public synchronized void run() {
-        notify();
-    }
-}
-
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/threading/package-info.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/threading/package-info.java
deleted file mode 100644
index 83093ba..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/threading/package-info.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.threading;
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/variant/GATKVCFIndexType.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/variant/GATKVCFIndexType.java
deleted file mode 100644
index f142da2..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/variant/GATKVCFIndexType.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.variant;
-
-import org.broadinstitute.gatk.utils.commandline.EnumerationArgumentDefault;
-
-/**
- * Choose the Tribble indexing strategy
- */
-public enum GATKVCFIndexType {
-    @EnumerationArgumentDefault
-    DYNAMIC_SEEK,       // use DynamicIndexCreator(IndexFactory.IndexBalanceApproach.FOR_SEEK_TIME)
-    DYNAMIC_SIZE,       // use DynamicIndexCreator(IndexFactory.IndexBalanceApproach.FOR_SIZE)
-    LINEAR,             // use LinearIndexCreator()
-    INTERVAL            // use IntervalIndexCreator()
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/variant/GATKVCFUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/variant/GATKVCFUtils.java
deleted file mode 100644
index 6baa7b6..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/variant/GATKVCFUtils.java
+++ /dev/null
@@ -1,316 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.variant;
-
-import htsjdk.samtools.SAMSequenceDictionary;
-import org.apache.log4j.Logger;
-import htsjdk.tribble.Feature;
-import htsjdk.tribble.FeatureCodec;
-import htsjdk.tribble.FeatureCodecHeader;
-import htsjdk.tribble.index.DynamicIndexCreator;
-import htsjdk.tribble.index.IndexCreator;
-import htsjdk.tribble.index.IndexFactory;
-import htsjdk.tribble.index.interval.IntervalIndexCreator;
-import htsjdk.tribble.index.linear.LinearIndexCreator;
-import htsjdk.tribble.index.tabix.TabixFormat;
-import htsjdk.tribble.index.tabix.TabixIndexCreator;
-import htsjdk.tribble.readers.LineIterator;
-import htsjdk.tribble.readers.PositionalBufferedStream;
-import org.broadinstitute.gatk.utils.commandline.RodBinding;
-import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
-import org.broadinstitute.gatk.engine.io.stubs.VCFWriterArgumentTypeDescriptor;
-import org.broadinstitute.gatk.utils.collections.Pair;
-import htsjdk.variant.variantcontext.VariantContext;
-import htsjdk.variant.vcf.*;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.util.*;
-
-
-/**
- * A set of GATK-specific static utility methods for common operations on VCF files/records.
- */
-public class GATKVCFUtils {
-
-    /**
-     * Constructor access disallowed...static utility methods only!
-     */
-    private GATKVCFUtils() { }
-
-    public static final Logger logger = Logger.getLogger(GATKVCFUtils.class);
-    public final static String GATK_COMMAND_LINE_KEY = "GATKCommandLine";
-
-    public final static GATKVCFIndexType DEFAULT_INDEX_TYPE = GATKVCFIndexType.DYNAMIC_SEEK;  // by default, optimize for seek time.  All indices prior to Nov 2013 used this type.
-    public final static Integer DEFAULT_INDEX_PARAMETER = -1;           // the default DYNAMIC_SEEK does not use a parameter
-
-    /**
-     * Gets the appropriately formatted header for a VCF file describing this GATK run
-     *
-     * @param engine the GATK engine that holds the walker name, GATK version, and other information
-     * @param argumentSources contains information on the argument values provided to the GATK for converting to a
-     *                        command line string.  Should be provided from the data in the parsing engine.  Can be
-     *                        empty in which case the command line will be the empty string.
-     * @return VCF header line describing this run of the GATK.
-     */
-    public static VCFHeaderLine getCommandLineArgumentHeaderLine(final GenomeAnalysisEngine engine, final Collection<Object> argumentSources) {
-        if ( engine == null ) throw new IllegalArgumentException("engine cannot be null");
-        if ( argumentSources == null ) throw new IllegalArgumentException("argumentSources cannot be null");
-
-        final Map<String, String> attributes = new LinkedHashMap<>();
-        attributes.put("ID", engine.getWalkerName());
-        attributes.put("Version", CommandLineGATK.getVersionNumber());
-        final Date date = new Date();
-        attributes.put("Date", date.toString());
-        attributes.put("Epoch", Long.toString(date.getTime()));
-        attributes.put("CommandLineOptions", engine.createApproximateCommandLineArgumentString(argumentSources.toArray()));
-        return new VCFSimpleHeaderLine(GATK_COMMAND_LINE_KEY, attributes);
-    }
-
-    public static <T extends Feature> Map<String, VCFHeader> getVCFHeadersFromRods(GenomeAnalysisEngine toolkit, List<RodBinding<T>> rodBindings) {
-        // Collect the eval rod names
-        final Set<String> names = new TreeSet<String>();
-        for ( final RodBinding<T> evalRod : rodBindings )
-            names.add(evalRod.getName());
-        return getVCFHeadersFromRods(toolkit, names);
-    }
-
-    public static Map<String, VCFHeader> getVCFHeadersFromRods(GenomeAnalysisEngine toolkit) {
-        return getVCFHeadersFromRods(toolkit, (Collection<String>)null);
-    }
-
-    public static Map<String, VCFHeader> getVCFHeadersFromRods(GenomeAnalysisEngine toolkit, Collection<String> rodNames) {
-        Map<String, VCFHeader> data = new HashMap<String, VCFHeader>();
-
-        // iterate to get all of the sample names
-        List<ReferenceOrderedDataSource> dataSources = toolkit.getRodDataSources();
-        for ( ReferenceOrderedDataSource source : dataSources ) {
-            // ignore the rod if it's not in our list
-            if ( rodNames != null && !rodNames.contains(source.getName()) )
-                continue;
-
-            if ( source.getHeader() != null && source.getHeader() instanceof VCFHeader )
-                data.put(source.getName(), (VCFHeader)source.getHeader());
-        }
-
-        return data;
-    }
-
-    public static Map<String,VCFHeader> getVCFHeadersFromRodPrefix(GenomeAnalysisEngine toolkit,String prefix) {
-        Map<String, VCFHeader> data = new HashMap<String, VCFHeader>();
-
-        // iterate to get all of the sample names
-        List<ReferenceOrderedDataSource> dataSources = toolkit.getRodDataSources();
-        for ( ReferenceOrderedDataSource source : dataSources ) {
-            // ignore the rod if lacks the prefix
-            if ( ! source.getName().startsWith(prefix) )
-                continue;
-
-            if ( source.getHeader() != null && source.getHeader() instanceof VCFHeader )
-                data.put(source.getName(), (VCFHeader)source.getHeader());
-        }
-
-        return data;
-    }
-
-    /**
-     * Gets the header fields from all VCF rods input by the user
-     *
-     * @param toolkit    GATK engine
-     *
-     * @return a set of all fields
-     */
-    public static Set<VCFHeaderLine> getHeaderFields(GenomeAnalysisEngine toolkit) {
-        return getHeaderFields(toolkit, null);
-    }
-
-    /**
-     * Gets the header fields from all VCF rods input by the user
-     *
-     * @param toolkit    GATK engine
-     * @param rodNames   names of rods to use, or null if we should use all possible ones
-     *
-     * @return a set of all fields
-     */
-    public static Set<VCFHeaderLine> getHeaderFields(GenomeAnalysisEngine toolkit, Collection<String> rodNames) {
-
-        // keep a map of sample name to occurrences encountered
-        TreeSet<VCFHeaderLine> fields = new TreeSet<VCFHeaderLine>();
-
-        // iterate to get all of the sample names
-        List<ReferenceOrderedDataSource> dataSources = toolkit.getRodDataSources();
-        for ( ReferenceOrderedDataSource source : dataSources ) {
-            // ignore the rod if it's not in our list
-            if ( rodNames != null && !rodNames.contains(source.getName()) )
-                continue;
-
-            if ( source.getRecordType().equals(VariantContext.class)) {
-                VCFHeader header = (VCFHeader)source.getHeader();
-                if ( header != null )
-                    fields.addAll(header.getMetaDataInSortedOrder());
-            }
-        }
-
-        return fields;
-    }
-
-    /**
-     * Add / replace the contig header lines in the VCFHeader with the information in the GATK engine
-     *
-     * @param header the header to update
-     * @param engine the GATK engine containing command line arguments and the master sequence dictionary
-     */
-    public static VCFHeader withUpdatedContigs(final VCFHeader header, final GenomeAnalysisEngine engine) {
-        return VCFUtils.withUpdatedContigs(header, engine.getArguments().referenceFile, engine.getMasterSequenceDictionary());
-    }
-
-    /**
-     * Create and return an IndexCreator
-     * @param type
-     * @param parameter
-     * @param outFile
-     * @return
-     */
-    public static IndexCreator getIndexCreator(GATKVCFIndexType type, int parameter, File outFile) {
-        return getIndexCreator(type, parameter, outFile, null);
-    }
-
-    /**
-     * Create and return an IndexCreator
-     * @param type
-     * @param parameter
-     * @param outFile
-     * @param sequenceDictionary
-     * @return
-     */
-    public static IndexCreator getIndexCreator(GATKVCFIndexType type, int parameter, File outFile, SAMSequenceDictionary sequenceDictionary) {
-        if (VCFWriterArgumentTypeDescriptor.isCompressed(outFile.toString())) {
-            if (type != GATKVCFUtils.DEFAULT_INDEX_TYPE || parameter != GATKVCFUtils.DEFAULT_INDEX_PARAMETER)
-                logger.warn("Creating Tabix index for " + outFile + ", ignoring user-specified index type and parameter");
-
-            if (sequenceDictionary == null)
-                return new TabixIndexCreator(TabixFormat.VCF);
-            else
-                return new TabixIndexCreator(sequenceDictionary, TabixFormat.VCF);
-        }
-
-        IndexCreator idxCreator;
-        switch (type) {
-            case DYNAMIC_SEEK: idxCreator = new DynamicIndexCreator(outFile, IndexFactory.IndexBalanceApproach.FOR_SEEK_TIME); break;
-            case DYNAMIC_SIZE: idxCreator = new DynamicIndexCreator(outFile, IndexFactory.IndexBalanceApproach.FOR_SIZE); break;
-            case LINEAR: idxCreator = new LinearIndexCreator(outFile, parameter); break;
-            case INTERVAL: idxCreator = new IntervalIndexCreator(outFile, parameter); break;
-            default: throw new IllegalArgumentException("Unknown IndexCreator type: " + type);
-        }
-
-        return idxCreator;
-    }
-
-    /**
-     * Utility class to read all of the VC records from a file
-     *
-     * @param file
-     * @param codec
-     * @return
-     * @throws IOException
-     */
-    public final static <SOURCE> Pair<VCFHeader, VCIterable<SOURCE>> readAllVCs( final File file, final FeatureCodec<VariantContext, SOURCE> codec) throws IOException {
-        // read in the features
-        SOURCE source = codec.makeSourceFromStream(new FileInputStream(file));
-        FeatureCodecHeader header = codec.readHeader(source);
-        final VCFHeader vcfHeader = (VCFHeader)header.getHeaderValue();
-        return new Pair<>(vcfHeader, new VCIterable<>(source, codec, vcfHeader));
-    }
-
-    public static class VCIterable<SOURCE> implements Iterable<VariantContext>, Iterator<VariantContext> {
-        final SOURCE source;
-        final FeatureCodec<VariantContext, SOURCE> codec;
-        final VCFHeader header;
-
-        private VCIterable(final SOURCE source, final FeatureCodec<VariantContext, SOURCE> codec, final VCFHeader header) {
-            this.source = source;
-            this.codec = codec;
-            this.header = header;
-        }
-
-        @Override
-        public Iterator<VariantContext> iterator() {
-            return this;
-        }
-
-        @Override
-        public boolean hasNext() {
-            return ! codec.isDone(source);
-        }
-
-        @Override
-        public VariantContext next() {
-            try {
-                final VariantContext vc = codec.decode(source);
-                return vc == null ? null : vc.fullyDecode(header, false);
-            } catch ( IOException e ) {
-                throw new RuntimeException(e);
-            }
-        }
-
-        @Override
-        public void remove() {
-        }
-    }
-
-    /**
-     * Read all of the VCF records from source into memory, returning the header and the VariantContexts
-     *
-     * SHOULD ONLY BE USED FOR UNIT/INTEGRATION TESTING PURPOSES!
-     *
-     * @param source the file to read, must be in VCF4 format
-     * @return
-     * @throws java.io.IOException
-     */
-    public static Pair<VCFHeader, List<VariantContext>> readVCF(final File source) throws IOException {
-        // read in the features
-        final List<VariantContext> vcs = new ArrayList<VariantContext>();
-        final VCFCodec codec = new VCFCodec();
-        PositionalBufferedStream pbs = new PositionalBufferedStream(new FileInputStream(source));
-        final LineIterator vcfSource = codec.makeSourceFromStream(pbs);
-        try {
-            final VCFHeader vcfHeader = (VCFHeader) codec.readActualHeader(vcfSource);
-
-            while (vcfSource.hasNext()) {
-                final VariantContext vc = codec.decode(vcfSource);
-                if ( vc != null )
-                    vcs.add(vc);
-            }
-
-            return new Pair<VCFHeader, List<VariantContext>>(vcfHeader, vcs);
-        } finally {
-            codec.close(vcfSource);
-        }
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/variant/GATKVariantContextUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/variant/GATKVariantContextUtils.java
deleted file mode 100644
index a099d8c..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/variant/GATKVariantContextUtils.java
+++ /dev/null
@@ -1,1960 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.variant;
-
-import com.google.java.contract.Ensures;
-import com.google.java.contract.Requires;
-import htsjdk.tribble.TribbleException;
-import htsjdk.tribble.util.popgen.HardyWeinbergCalculation;
-import htsjdk.variant.variantcontext.*;
-import htsjdk.variant.vcf.VCFConstants;
-import org.apache.commons.lang.ArrayUtils;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.utils.*;
-import org.broadinstitute.gatk.utils.collections.Pair;
-
-import java.io.Serializable;
-import java.util.*;
-
-public class GATKVariantContextUtils {
-
-    private static Logger logger = Logger.getLogger(GATKVariantContextUtils.class);
-
-    public static final int DEFAULT_PLOIDY = HomoSapiensConstants.DEFAULT_PLOIDY;
-
-    public static final double SUM_GL_THRESH_NOCALL = -0.1; // if sum(gl) is bigger than this threshold, we treat GL's as non-informative and will force a no-call.
-
-    /**
-     * Diploid NO_CALL allele list...
-     *
-     * @deprecated you should use {@link #noCallAlleles(int)} instead. It indicates the presence of a hardcoded diploid assumption which is bad.
-     */
-    @Deprecated
-    public final static List<Allele> NO_CALL_ALLELES = Arrays.asList(Allele.NO_CALL, Allele.NO_CALL);
-
-    public final static String NON_REF_SYMBOLIC_ALLELE_NAME = "NON_REF";
-    public final static Allele NON_REF_SYMBOLIC_ALLELE = Allele.create("<"+NON_REF_SYMBOLIC_ALLELE_NAME+">", false); // represents any possible non-ref allele at this site
-
-    public final static String MERGE_FILTER_PREFIX = "filterIn";
-    public final static String MERGE_REF_IN_ALL = "ReferenceInAll";
-    public final static String MERGE_FILTER_IN_ALL = "FilteredInAll";
-    public final static String MERGE_INTERSECTION = "Intersection";
-
-    /**
-     * Checks whether a variant-context overlaps with a region.
-     *
-     * <p>
-     *     No event overlaps an unmapped region.
-     * </p>
-     *
-     * @param variantContext variant-context to test the overlap with.
-     * @param region region to test the overlap with.
-     *
-     * @throws IllegalArgumentException if either region or event is {@code null}.
-     *
-     * @return {@code true} if there is an overlap between the event described and the active region provided.
-     */
-    public static boolean overlapsRegion(final VariantContext variantContext, final GenomeLoc region) {
-        if (region == null) throw new IllegalArgumentException("the active region provided cannot be null");
-        if (variantContext == null) throw new IllegalArgumentException("the variant context provided cannot be null");
-        if (region.isUnmapped())
-            return false;
-        if (variantContext.getEnd() < region.getStart())
-            return false;
-        if (variantContext.getStart() > region.getStop())
-            return false;
-        if (!variantContext.getChr().equals(region.getContig()))
-            return false;
-        return true;
-    }
-
-    /**
-     * Returns a homozygous call allele list given the only allele and the ploidy.
-     *
-     * @param allele the only allele in the allele list.
-     * @param ploidy the ploidy of the resulting allele list.
-     *
-     * @throws IllegalArgumentException if {@code allele} is {@code null} or ploidy is negative.
-     *
-     * @return never {@code null}.
-     */
-    public static List<Allele> homozygousAlleleList(final Allele allele, final int ploidy) {
-        if (allele == null || ploidy < 0)
-            throw new IllegalArgumentException();
-
-        // Use a tailored inner class to implement the list:
-        return Collections.nCopies(ploidy,allele);
-    }
-
-    private static boolean hasPLIncompatibleAlleles(final Collection<Allele> alleleSet1, final Collection<Allele> alleleSet2) {
-        final Iterator<Allele> it1 = alleleSet1.iterator();
-        final Iterator<Allele> it2 = alleleSet2.iterator();
-
-        while ( it1.hasNext() && it2.hasNext() ) {
-            final Allele a1 = it1.next();
-            final Allele a2 = it2.next();
-            if ( ! a1.equals(a2) )
-                return true;
-        }
-
-        // by this point, at least one of the iterators is empty.  All of the elements
-        // we've compared are equal up until this point.  But it's possible that the
-        // sets aren't the same size, which is indicated by the test below.  If they
-        // are of the same size, though, the sets are compatible
-        return it1.hasNext() || it2.hasNext();
-    }
-
-    /**
-     * Determines the common reference allele
-     *
-     * @param VCs    the list of VariantContexts
-     * @param loc    if not null, ignore records that do not begin at this start location
-     * @return possibly null Allele
-     */
-    protected static Allele determineReferenceAllele(final List<VariantContext> VCs, final GenomeLoc loc) {
-        Allele ref = null;
-
-        for ( final VariantContext vc : VCs ) {
-            if ( contextMatchesLoc(vc, loc) ) {
-                final Allele myRef = vc.getReference();
-                if ( ref == null || ref.length() < myRef.length() )
-                    ref = myRef;
-                else if ( ref.length() == myRef.length() && ! ref.equals(myRef) )
-                    throw new TribbleException(String.format("The provided variant file(s) have inconsistent references for the same position(s) at %s:%d, %s vs. %s", vc.getChr(), vc.getStart(), ref, myRef));
-            }
-        }
-
-        return ref;
-    }
-
-    /**
-     * Calculates the total ploidy of a variant context as the sum of all plodies across genotypes.
-     * @param vc the target variant context.
-     * @param defaultPloidy the default ploidy to be assume when there is no ploidy information for a genotype.
-     * @return never {@code null}.
-     */
-    public static int totalPloidy(final VariantContext vc, final int defaultPloidy) {
-        if (vc == null)
-            throw new IllegalArgumentException("the vc provided cannot be null");
-        if (defaultPloidy < 0)
-            throw new IllegalArgumentException("the default ploidy must 0 or greater");
-        int result = 0;
-        for (final Genotype genotype : vc.getGenotypes()) {
-            final int declaredPloidy = genotype.getPloidy();
-            result += declaredPloidy <= 0 ? defaultPloidy : declaredPloidy;
-        }
-
-        return result;
-    }
-
-    public enum GenotypeMergeType {
-        /**
-         * Make all sample genotypes unique by file. Each sample shared across RODs gets named sample.ROD.
-         */
-        UNIQUIFY,
-        /**
-         * Take genotypes in priority order (see the priority argument).
-         */
-        PRIORITIZE,
-        /**
-         * Take the genotypes in any order.
-         */
-        UNSORTED,
-        /**
-         * Require that all samples/genotypes be unique between all inputs.
-         */
-        REQUIRE_UNIQUE
-    }
-
-    public enum FilteredRecordMergeType {
-        /**
-         * Union - leaves the record if any record is unfiltered.
-         */
-        KEEP_IF_ANY_UNFILTERED,
-        /**
-         * Requires all records present at site to be unfiltered. VCF files that don't contain the record don't influence this.
-         */
-        KEEP_IF_ALL_UNFILTERED,
-        /**
-         * If any record is present at this site (regardless of possibly being filtered), then all such records are kept and the filters are reset.
-         */
-        KEEP_UNCONDITIONAL
-    }
-
-    public enum MultipleAllelesMergeType {
-        /**
-         * Combine only alleles of the same type (SNP, indel, etc.) into a single VCF record.
-         */
-        BY_TYPE,
-        /**
-         * Merge all allele types at the same start position into the same VCF record.
-         */
-        MIX_TYPES
-    }
-
-    /**
-     * Refactored out of the AverageAltAlleleLength annotation class
-     * @param vc the variant context
-     * @return the average length of the alt allele (a double)
-     */
-    public static double getMeanAltAlleleLength(VariantContext vc) {
-        double averageLength = 1.0;
-        if ( ! vc.isSNP() && ! vc.isSymbolic() ) {
-            // adjust for the event length
-            int averageLengthNum = 0;
-            int averageLengthDenom = 0;
-            int refLength = vc.getReference().length();
-            for ( final Allele a : vc.getAlternateAlleles() ) {
-                int numAllele = vc.getCalledChrCount(a);
-                int alleleSize;
-                if ( a.length() == refLength ) {
-                    // SNP or MNP
-                    byte[] a_bases = a.getBases();
-                    byte[] ref_bases = vc.getReference().getBases();
-                    int n_mismatch = 0;
-                    for ( int idx = 0; idx < a_bases.length; idx++ ) {
-                        if ( a_bases[idx] != ref_bases[idx] )
-                            n_mismatch++;
-                    }
-                    alleleSize = n_mismatch;
-                }
-                else if ( a.isSymbolic() ) {
-                    alleleSize = 1;
-                } else {
-                    alleleSize = Math.abs(refLength-a.length());
-                }
-                averageLengthNum += alleleSize*numAllele;
-                averageLengthDenom += numAllele;
-            }
-            averageLength = ( (double) averageLengthNum )/averageLengthDenom;
-        }
-
-        return averageLength;
-    }
-
-    /**
-     * create a genome location, given a variant context
-     * @param genomeLocParser parser
-     * @param vc the variant context
-     * @return the genomeLoc
-     */
-    public static final GenomeLoc getLocation(GenomeLocParser genomeLocParser,VariantContext vc) {
-        return genomeLocParser.createGenomeLoc(vc.getChr(), vc.getStart(), vc.getEnd(), true);
-    }
-
-    public static BaseUtils.BaseSubstitutionType getSNPSubstitutionType(VariantContext context) {
-        if (!context.isSNP() || !context.isBiallelic())
-            throw new IllegalStateException("Requested SNP substitution type for bialleic non-SNP " + context);
-        return BaseUtils.SNPSubstitutionType(context.getReference().getBases()[0], context.getAlternateAllele(0).getBases()[0]);
-    }
-
-    /**
-     * If this is a BiAllelic SNP, is it a transition?
-     */
-    public static boolean isTransition(VariantContext context) {
-        return getSNPSubstitutionType(context) == BaseUtils.BaseSubstitutionType.TRANSITION;
-    }
-
-    /**
-     * If this is a BiAllelic SNP, is it a transversion?
-     */
-    public static boolean isTransversion(VariantContext context) {
-        return getSNPSubstitutionType(context) == BaseUtils.BaseSubstitutionType.TRANSVERSION;
-    }
-
-    public static boolean isTransition(Allele ref, Allele alt) {
-        return BaseUtils.SNPSubstitutionType(ref.getBases()[0], alt.getBases()[0]) == BaseUtils.BaseSubstitutionType.TRANSITION;
-    }
-
-    public static boolean isTransversion(Allele ref, Allele alt) {
-        return BaseUtils.SNPSubstitutionType(ref.getBases()[0], alt.getBases()[0]) == BaseUtils.BaseSubstitutionType.TRANSVERSION;
-    }
-
-    /**
-     * Returns a context identical to this with the REF and ALT alleles reverse complemented.
-     *
-     * @param vc        variant context
-     * @return new vc
-     */
-    public static VariantContext reverseComplement(VariantContext vc) {
-        // create a mapping from original allele to reverse complemented allele
-        HashMap<Allele, Allele> alleleMap = new HashMap<>(vc.getAlleles().size());
-        for ( final Allele originalAllele : vc.getAlleles() ) {
-            Allele newAllele;
-            if ( originalAllele.isNoCall() )
-                newAllele = originalAllele;
-            else
-                newAllele = Allele.create(BaseUtils.simpleReverseComplement(originalAllele.getBases()), originalAllele.isReference());
-            alleleMap.put(originalAllele, newAllele);
-        }
-
-        // create new Genotype objects
-        GenotypesContext newGenotypes = GenotypesContext.create(vc.getNSamples());
-        for ( final Genotype genotype : vc.getGenotypes() ) {
-            List<Allele> newAlleles = new ArrayList<>();
-            for ( final Allele allele : genotype.getAlleles() ) {
-                Allele newAllele = alleleMap.get(allele);
-                if ( newAllele == null )
-                    newAllele = Allele.NO_CALL;
-                newAlleles.add(newAllele);
-            }
-            newGenotypes.add(new GenotypeBuilder(genotype).alleles(newAlleles).make());
-        }
-
-        return new VariantContextBuilder(vc).alleles(alleleMap.values()).genotypes(newGenotypes).make();
-    }
-
-    /**
-     * Returns true iff VC is an non-complex indel where every allele represents an expansion or
-     * contraction of a series of identical bases in the reference.
-     *
-     * For example, suppose the ref bases are CTCTCTGA, which includes a 3x repeat of CTCTCT
-     *
-     * If VC = -/CT, then this function returns true because the CT insertion matches exactly the
-     * upcoming reference.
-     * If VC = -/CTA then this function returns false because the CTA isn't a perfect match
-     *
-     * Now consider deletions:
-     *
-     * If VC = CT/- then again the same logic applies and this returns true
-     * The case of CTA/- makes no sense because it doesn't actually match the reference bases.
-     *
-     * The logic of this function is pretty simple.  Take all of the non-null alleles in VC.  For
-     * each insertion allele of n bases, check if that allele matches the next n reference bases.
-     * For each deletion allele of n bases, check if this matches the reference bases at n - 2 n,
-     * as it must necessarily match the first n bases.  If this test returns true for all
-     * alleles you are a tandem repeat, otherwise you are not.
-     *
-     * @param vc
-     * @param refBasesStartingAtVCWithPad not this is assumed to include the PADDED reference
-     * @return
-     */
-    @Requires({"vc != null", "refBasesStartingAtVCWithPad != null && refBasesStartingAtVCWithPad.length > 0"})
-    public static boolean isTandemRepeat(final VariantContext vc, final byte[] refBasesStartingAtVCWithPad) {
-        final String refBasesStartingAtVCWithoutPad = new String(refBasesStartingAtVCWithPad).substring(1);
-        if ( ! vc.isIndel() ) // only indels are tandem repeats
-            return false;
-
-        final Allele ref = vc.getReference();
-
-        for ( final Allele allele : vc.getAlternateAlleles() ) {
-            if ( ! isRepeatAllele(ref, allele, refBasesStartingAtVCWithoutPad) )
-                return false;
-        }
-
-        // we've passed all of the tests, so we are a repeat
-        return true;
-    }
-
-    /**
-     *
-     * @param vc
-     * @param refBasesStartingAtVCWithPad
-     * @return
-     */
-    @Requires({"vc != null", "refBasesStartingAtVCWithPad != null && refBasesStartingAtVCWithPad.length > 0"})
-    public static Pair<List<Integer>,byte[]> getNumTandemRepeatUnits(final VariantContext vc, final byte[] refBasesStartingAtVCWithPad) {
-        final boolean VERBOSE = false;
-        final String refBasesStartingAtVCWithoutPad = new String(refBasesStartingAtVCWithPad).substring(1);
-        if ( ! vc.isIndel() ) // only indels are tandem repeats
-            return null;
-
-        final Allele refAllele = vc.getReference();
-        final byte[] refAlleleBases = Arrays.copyOfRange(refAllele.getBases(), 1, refAllele.length());
-
-        byte[] repeatUnit = null;
-        final ArrayList<Integer> lengths = new ArrayList<>();
-
-        for ( final Allele allele : vc.getAlternateAlleles() ) {
-            Pair<int[],byte[]> result = getNumTandemRepeatUnits(refAlleleBases, Arrays.copyOfRange(allele.getBases(), 1, allele.length()), refBasesStartingAtVCWithoutPad.getBytes());
-
-            final int[] repetitionCount = result.first;
-            // repetition count = 0 means allele is not a tandem expansion of context
-            if (repetitionCount[0] == 0 || repetitionCount[1] == 0)
-                return null;
-
-            if (lengths.size() == 0) {
-                lengths.add(repetitionCount[0]); // add ref allele length only once
-            }
-            lengths.add(repetitionCount[1]);  // add this alt allele's length
-
-            repeatUnit = result.second;
-            if (VERBOSE) {
-                System.out.println("RefContext:"+refBasesStartingAtVCWithoutPad);
-                System.out.println("Ref:"+refAllele.toString()+" Count:" + String.valueOf(repetitionCount[0]));
-                System.out.println("Allele:"+allele.toString()+" Count:" + String.valueOf(repetitionCount[1]));
-                System.out.println("RU:"+new String(repeatUnit));
-            }
-        }
-
-        return new Pair<List<Integer>, byte[]>(lengths,repeatUnit);
-    }
-
-    public static Pair<int[],byte[]> getNumTandemRepeatUnits(final byte[] refBases, final byte[] altBases, final byte[] remainingRefContext) {
-         /* we can't exactly apply same logic as in basesAreRepeated() to compute tandem unit and number of repeated units.
-           Consider case where ref =ATATAT and we have an insertion of ATAT. Natural description is (AT)3 -> (AT)2.
-         */
-
-        byte[] longB;
-        // find first repeat unit based on either ref or alt, whichever is longer
-        if (altBases.length > refBases.length)
-            longB = altBases;
-        else
-            longB = refBases;
-
-        // see if non-null allele (either ref or alt, whichever is longer) can be decomposed into several identical tandem units
-        // for example, -*,CACA needs to first be decomposed into (CA)2
-        final int repeatUnitLength = findRepeatedSubstring(longB);
-        final byte[] repeatUnit = Arrays.copyOf(longB, repeatUnitLength);
-
-        final int[] repetitionCount = new int[2];
-        // look for repetitions forward on the ref bases (i.e. starting at beginning of ref bases)
-        int repetitionsInRef = findNumberOfRepetitions(repeatUnit, refBases, true);
-        repetitionCount[0] = findNumberOfRepetitions(repeatUnit, ArrayUtils.addAll(refBases, remainingRefContext), true)-repetitionsInRef;
-        repetitionCount[1] = findNumberOfRepetitions(repeatUnit, ArrayUtils.addAll(altBases, remainingRefContext), true)-repetitionsInRef;
-
-        return new Pair<>(repetitionCount, repeatUnit);
-
-    }
-
-    /**
-     * Find out if a string can be represented as a tandem number of substrings.
-     * For example ACTACT is a 2-tandem of ACT,
-     * but ACTACA is not.
-     *
-     * @param bases                 String to be tested
-     * @return                      Length of repeat unit, if string can be represented as tandem of substring (if it can't
-     *                              be represented as one, it will be just the length of the input string)
-     */
-    public static int findRepeatedSubstring(byte[] bases) {
-
-        int repLength;
-        for (repLength=1; repLength <=bases.length; repLength++) {
-            final byte[] candidateRepeatUnit = Arrays.copyOf(bases,repLength);
-            boolean allBasesMatch = true;
-            for (int start = repLength; start < bases.length; start += repLength ) {
-                // check that remaining of string is exactly equal to repeat unit
-                final byte[] basePiece = Arrays.copyOfRange(bases,start,start+candidateRepeatUnit.length);
-                if (!Arrays.equals(candidateRepeatUnit, basePiece)) {
-                    allBasesMatch = false;
-                    break;
-                }
-            }
-            if (allBasesMatch)
-                return repLength;
-        }
-
-        return repLength;
-    }
-
-    /**
-     * Helper routine that finds number of repetitions a string consists of.
-     * For example, for string ATAT and repeat unit AT, number of repetitions = 2
-     * @param repeatUnit             Substring
-     * @param testString             String to test
-     * @oaram lookForward            Look for repetitions forward (at beginning of string) or backward (at end of string)
-     * @return                       Number of repetitions (0 if testString is not a concatenation of n repeatUnit's
-     */
-    public static int findNumberOfRepetitions(byte[] repeatUnit, byte[] testString, boolean lookForward) {
-        int numRepeats = 0;
-        if (lookForward) {
-            // look forward on the test string
-            for (int start = 0; start < testString.length; start += repeatUnit.length) {
-                int end = start + repeatUnit.length;
-                byte[] unit = Arrays.copyOfRange(testString,start, end);
-                if(Arrays.equals(unit,repeatUnit))
-                    numRepeats++;
-                else
-                    break;
-            }
-            return numRepeats;
-        }
-
-        // look backward. For example, if repeatUnit = AT and testString = GATAT, number of repeat units is still 2
-        // look forward on the test string
-        for (int start = testString.length - repeatUnit.length; start >= 0; start -= repeatUnit.length) {
-            int end = start + repeatUnit.length;
-            byte[] unit = Arrays.copyOfRange(testString,start, end);
-            if(Arrays.equals(unit,repeatUnit))
-                numRepeats++;
-            else
-                break;
-        }
-        return numRepeats;
-    }
-
-    /**
-     * Helper function for isTandemRepeat that checks that allele matches somewhere on the reference
-     * @param ref
-     * @param alt
-     * @param refBasesStartingAtVCWithoutPad
-     * @return
-     */
-    protected static boolean isRepeatAllele(final Allele ref, final Allele alt, final String refBasesStartingAtVCWithoutPad) {
-        if ( ! Allele.oneIsPrefixOfOther(ref, alt) )
-            return false; // we require one allele be a prefix of another
-
-        if ( ref.length() > alt.length() ) { // we are a deletion
-            return basesAreRepeated(ref.getBaseString(), alt.getBaseString(), refBasesStartingAtVCWithoutPad, 2);
-        } else { // we are an insertion
-            return basesAreRepeated(alt.getBaseString(), ref.getBaseString(), refBasesStartingAtVCWithoutPad, 1);
-        }
-    }
-
-    protected static boolean basesAreRepeated(final String l, final String s, final String ref, final int minNumberOfMatches) {
-        final String potentialRepeat = l.substring(s.length()); // skip s bases
-
-        for ( int i = 0; i < minNumberOfMatches; i++) {
-            final int start = i * potentialRepeat.length();
-            final int end = (i+1) * potentialRepeat.length();
-            if ( ref.length() < end )
-                return false; // we ran out of bases to test
-            final String refSub = ref.substring(start, end);
-            if ( ! refSub.equals(potentialRepeat) )
-                return false; // repeat didn't match, fail
-        }
-
-        return true; // we passed all tests, we matched
-    }
-
-    public enum GenotypeAssignmentMethod {
-        /**
-         * set all of the genotype GT values to NO_CALL
-         */
-        SET_TO_NO_CALL,
-
-        /**
-         * Use the subsetted PLs to greedily assigned genotypes
-         */
-        USE_PLS_TO_ASSIGN,
-
-        /**
-         * Try to match the original GT calls, if at all possible
-         *
-         * Suppose I have 3 alleles: A/B/C and the following samples:
-         *
-         *       original_GT best_match to A/B best_match to A/C
-         * S1 => A/A A/A A/A
-         * S2 => A/B A/B A/A
-         * S3 => B/B B/B A/A
-         * S4 => B/C A/B A/C
-         * S5 => C/C A/A C/C
-         *
-         * Basically, all alleles not in the subset map to ref.  It means that het-alt genotypes
-         * when split into 2 bi-allelic variants will be het in each, which is good in some cases,
-         * rather than the undetermined behavior when using the PLs to assign, which could result
-         * in hom-var or hom-ref for each, depending on the exact PL values.
-         */
-        BEST_MATCH_TO_ORIGINAL,
-
-        /**
-         * do not even bother changing the GTs
-         */
-        DO_NOT_ASSIGN_GENOTYPES
-    }
-
-    /**
-     * subset the Variant Context to the specific set of alleles passed in (pruning the PLs appropriately)
-     *
-     * @param vc                 variant context with genotype likelihoods
-     * @param allelesToUse       which alleles from the vc are okay to use; *** must be in the same relative order as those in the original VC ***
-     * @param assignGenotypes    assignment strategy for the (subsetted) PLs
-     * @return a new non-null GenotypesContext
-     */
-    public static GenotypesContext subsetDiploidAlleles(final VariantContext vc,
-                                                        final List<Allele> allelesToUse,
-                                                        final GenotypeAssignmentMethod assignGenotypes) {
-        if ( allelesToUse.get(0).isNonReference() ) throw new IllegalArgumentException("First allele must be the reference allele");
-        if ( allelesToUse.size() == 1 ) throw new IllegalArgumentException("Cannot subset to only 1 alt allele");
-
-        // optimization: if no input genotypes, just exit
-        if (vc.getGenotypes().isEmpty()) return GenotypesContext.create();
-
-        // we need to determine which of the alternate alleles (and hence the likelihoods) to use and carry forward
-        final List<Integer> likelihoodIndexesToUse = determineLikelihoodIndexesToUse(vc, allelesToUse);
-
-        // create the new genotypes
-        return createGenotypesWithSubsettedLikelihoods(vc.getGenotypes(), vc, allelesToUse, likelihoodIndexesToUse, assignGenotypes);
-    }
-
-    /**
-     * Figure out which likelihood indexes to use for a selected down set of alleles
-     *
-     * @param originalVC        the original VariantContext
-     * @param allelesToUse      the subset of alleles to use
-     * @return a list of PL indexes to use or null if none
-     */
-    private static List<Integer> determineLikelihoodIndexesToUse(final VariantContext originalVC, final List<Allele> allelesToUse) {
-
-        // the bitset representing the allele indexes we want to keep
-        final boolean[] alleleIndexesToUse = getAlleleIndexBitset(originalVC, allelesToUse);
-
-        // an optimization: if we are supposed to use all (or none in the case of a ref call) of the alleles,
-        // then we can keep the PLs as is; otherwise, we determine which ones to keep
-        if ( MathUtils.countOccurrences(true, alleleIndexesToUse) == alleleIndexesToUse.length )
-            return null;
-
-        return getLikelihoodIndexes(originalVC, alleleIndexesToUse);
-    }
-
-    /**
-     * Get the actual likelihoods indexes to use given the corresponding allele indexes
-     *
-     * @param originalVC           the original VariantContext
-     * @param alleleIndexesToUse   the bitset representing the alleles to use (@see #getAlleleIndexBitset)
-     * @return a non-null List
-     */
-    private static List<Integer> getLikelihoodIndexes(final VariantContext originalVC, final boolean[] alleleIndexesToUse) {
-
-        final List<Integer> result = new ArrayList<>(30);
-
-        // numLikelihoods takes total # of alleles. Use default # of chromosomes (ploidy) = 2
-        final int numLikelihoods = GenotypeLikelihoods.numLikelihoods(originalVC.getNAlleles(), DEFAULT_PLOIDY);
-
-        for ( int PLindex = 0; PLindex < numLikelihoods; PLindex++ ) {
-            final GenotypeLikelihoods.GenotypeLikelihoodsAllelePair alleles = GenotypeLikelihoods.getAllelePair(PLindex);
-            // consider this entry only if both of the alleles are good
-            if ( alleleIndexesToUse[alleles.alleleIndex1] && alleleIndexesToUse[alleles.alleleIndex2] )
-                result.add(PLindex);
-        }
-
-        return result;
-    }
-
-    /**
-     * Given an original VariantContext and a list of alleles from that VC to keep,
-     * returns a bitset representing which allele indexes should be kept
-     *
-     * @param originalVC      the original VC
-     * @param allelesToKeep   the list of alleles to keep
-     * @return non-null bitset
-     */
-    private static boolean[] getAlleleIndexBitset(final VariantContext originalVC, final List<Allele> allelesToKeep) {
-        final int numOriginalAltAlleles = originalVC.getNAlleles() - 1;
-        final boolean[] alleleIndexesToKeep = new boolean[numOriginalAltAlleles + 1];
-
-        // the reference Allele is definitely still used
-        alleleIndexesToKeep[0] = true;
-        for ( int i = 0; i < numOriginalAltAlleles; i++ ) {
-            if ( allelesToKeep.contains(originalVC.getAlternateAllele(i)) )
-                alleleIndexesToKeep[i+1] = true;
-        }
-
-        return alleleIndexesToKeep;
-    }
-
-    /**
-     * Create the new GenotypesContext with the subsetted PLs and ADs
-     *
-     * @param originalGs               the original GenotypesContext
-     * @param vc                       the original VariantContext
-     * @param allelesToUse             the actual alleles to use with the new Genotypes
-     * @param likelihoodIndexesToUse   the indexes in the PL to use given the allelesToUse (@see #determineLikelihoodIndexesToUse())
-     * @param assignGenotypes          assignment strategy for the (subsetted) PLs
-     * @return a new non-null GenotypesContext
-     */
-    private static GenotypesContext createGenotypesWithSubsettedLikelihoods(final GenotypesContext originalGs,
-                                                                            final VariantContext vc,
-                                                                            final List<Allele> allelesToUse,
-                                                                            final List<Integer> likelihoodIndexesToUse,
-                                                                            final GenotypeAssignmentMethod assignGenotypes) {
-        // the new genotypes to create
-        final GenotypesContext newGTs = GenotypesContext.create(originalGs.size());
-
-        // make sure we are seeing the expected number of likelihoods per sample
-        final int expectedNumLikelihoods = GenotypeLikelihoods.numLikelihoods(vc.getNAlleles(), 2);
-
-        // the samples
-        final List<String> sampleIndices = originalGs.getSampleNamesOrderedByName();
-
-        // create the new genotypes
-        for ( int k = 0; k < originalGs.size(); k++ ) {
-            final Genotype g = originalGs.get(sampleIndices.get(k));
-            final GenotypeBuilder gb = new GenotypeBuilder(g);
-
-            // create the new likelihoods array from the alleles we are allowed to use
-            double[] newLikelihoods;
-            if ( !g.hasLikelihoods() ) {
-                // we don't have any likelihoods, so we null out PLs and make G ./.
-                newLikelihoods = null;
-                gb.noPL();
-            } else {
-                final double[] originalLikelihoods = g.getLikelihoods().getAsVector();
-                if ( likelihoodIndexesToUse == null ) {
-                    newLikelihoods = originalLikelihoods;
-                } else if ( originalLikelihoods.length != expectedNumLikelihoods ) {
-                    logger.debug("Wrong number of likelihoods in sample " + g.getSampleName() + " at " + vc + " got " + g.getLikelihoodsString() + " but expected " + expectedNumLikelihoods);
-                    newLikelihoods = null;
-                } else {
-                    newLikelihoods = new double[likelihoodIndexesToUse.size()];
-                    int newIndex = 0;
-                    for ( final int oldIndex : likelihoodIndexesToUse )
-                        newLikelihoods[newIndex++] = originalLikelihoods[oldIndex];
-
-                    // might need to re-normalize
-                    newLikelihoods = MathUtils.normalizeFromLog10(newLikelihoods, false, true);
-                }
-
-                if ( newLikelihoods == null || likelihoodsAreUninformative(newLikelihoods) )
-                    gb.noPL();
-                else
-                    gb.PL(newLikelihoods);
-            }
-
-            updateGenotypeAfterSubsetting(g.getAlleles(), gb, assignGenotypes, newLikelihoods, allelesToUse);
-            newGTs.add(gb.make());
-        }
-
-        return fixADFromSubsettedAlleles(newGTs, vc, allelesToUse);
-    }
-
-    private static boolean likelihoodsAreUninformative(final double[] likelihoods) {
-        return MathUtils.sum(likelihoods) > SUM_GL_THRESH_NOCALL;
-    }
-
-    /**
-     * Add the genotype call (GT) field to GenotypeBuilder using the requested algorithm assignmentMethod
-     *
-     * @param originalGT the original genotype calls, cannot be null
-     * @param gb the builder where we should put our newly called alleles, cannot be null
-     * @param assignmentMethod the method to use to do the assignment, cannot be null
-     * @param newLikelihoods a vector of likelihoods to use if the method requires PLs, should be log10 likelihoods, cannot be null
-     * @param allelesToUse the alleles we are using for our subsetting
-     */
-    public static void updateGenotypeAfterSubsetting(final List<Allele> originalGT,
-                                                     final GenotypeBuilder gb,
-                                                     final GenotypeAssignmentMethod assignmentMethod,
-                                                     final double[] newLikelihoods,
-                                                     final List<Allele> allelesToUse) {
-        switch ( assignmentMethod ) {
-            case DO_NOT_ASSIGN_GENOTYPES:
-                break;
-            case SET_TO_NO_CALL:
-                gb.alleles(NO_CALL_ALLELES);
-                gb.noGQ();
-                break;
-            case USE_PLS_TO_ASSIGN:
-                if ( newLikelihoods == null || likelihoodsAreUninformative(newLikelihoods) ) {
-                    // if there is no mass on the (new) likelihoods, then just no-call the sample
-                    gb.alleles(NO_CALL_ALLELES);
-                    gb.noGQ();
-                } else {
-                    // find the genotype with maximum likelihoods
-                    final int PLindex = MathUtils.maxElementIndex(newLikelihoods);
-                    GenotypeLikelihoods.GenotypeLikelihoodsAllelePair alleles = GenotypeLikelihoods.getAllelePair(PLindex);
-                    gb.alleles(Arrays.asList(allelesToUse.get(alleles.alleleIndex1), allelesToUse.get(alleles.alleleIndex2)));
-                    gb.log10PError(GenotypeLikelihoods.getGQLog10FromLikelihoods(PLindex, newLikelihoods));
-                }
-                break;
-            case BEST_MATCH_TO_ORIGINAL:
-                final List<Allele> best = new LinkedList<>();
-                final Allele ref = allelesToUse.get(0); // WARNING -- should be checked in input argument
-                for ( final Allele originalAllele : originalGT ) {
-                    best.add(allelesToUse.contains(originalAllele) ? originalAllele : ref);
-                }
-                gb.noGQ();
-                gb.noPL();
-                gb.alleles(best);
-                break;
-        }
-    }
-
-    /**
-     * Subset the samples in VC to reference only information with ref call alleles
-     *
-     * Preserves DP if present
-     *
-     * @param vc the variant context to subset down to
-     * @param ploidy ploidy to use if a genotype doesn't have any alleles
-     * @return a GenotypesContext
-     */
-    public static GenotypesContext subsetToRefOnly(final VariantContext vc, final int ploidy) {
-        if ( vc == null ) throw new IllegalArgumentException("vc cannot be null");
-        if ( ploidy < 1 ) throw new IllegalArgumentException("ploidy must be >= 1 but got " + ploidy);
-
-        // the genotypes with PLs
-        final GenotypesContext oldGTs = vc.getGenotypes();
-
-        // optimization: if no input genotypes, just exit
-        if (oldGTs.isEmpty()) return oldGTs;
-
-        // the new genotypes to create
-        final GenotypesContext newGTs = GenotypesContext.create(oldGTs.size());
-
-        final Allele ref = vc.getReference();
-        final List<Allele> diploidRefAlleles = Arrays.asList(ref, ref);
-
-        // create the new genotypes
-        for ( final Genotype g : vc.getGenotypes() ) {
-            final int gPloidy = g.getPloidy() == 0 ? ploidy : g.getPloidy();
-            final List<Allele> refAlleles = gPloidy == 2 ? diploidRefAlleles : Collections.nCopies(gPloidy, ref);
-            final GenotypeBuilder gb = new GenotypeBuilder(g.getSampleName(), refAlleles);
-            if ( g.hasDP() ) gb.DP(g.getDP());
-            if ( g.hasGQ() ) gb.GQ(g.getGQ());
-            newGTs.add(gb.make());
-        }
-
-        return newGTs;
-    }
-
-    /**
-     * Assign genotypes (GTs) to the samples in the Variant Context greedily based on the PLs
-     *
-     * @param vc            variant context with genotype likelihoods
-     * @return genotypes context
-     */
-    public static GenotypesContext assignDiploidGenotypes(final VariantContext vc) {
-        return subsetDiploidAlleles(vc, vc.getAlleles(), GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN);
-    }
-
-    /**
-     * Split variant context into its biallelic components if there are more than 2 alleles
-     *
-     * For VC has A/B/C alleles, returns A/B and A/C contexts.
-     * Genotypes are all no-calls now (it's not possible to fix them easily)
-     * Alleles are right trimmed to satisfy VCF conventions
-     *
-     * If vc is biallelic or non-variant it is just returned
-     *
-     * Chromosome counts are updated (but they are by definition 0)
-     *
-     * @param vc a potentially multi-allelic variant context
-     * @return a list of bi-allelic (or monomorphic) variant context
-     */
-    public static List<VariantContext> splitVariantContextToBiallelics(final VariantContext vc) {
-        return splitVariantContextToBiallelics(vc, false, GenotypeAssignmentMethod.SET_TO_NO_CALL);
-    }
-
-    /**
-     * Split variant context into its biallelic components if there are more than 2 alleles
-     *
-     * For VC has A/B/C alleles, returns A/B and A/C contexts.
-     * Genotypes are all no-calls now (it's not possible to fix them easily)
-     * Alleles are right trimmed to satisfy VCF conventions
-     *
-     * If vc is biallelic or non-variant it is just returned
-     *
-     * Chromosome counts are updated (but they are by definition 0)
-     *
-     * @param vc a potentially multi-allelic variant context
-     * @param trimLeft if true, we will also left trim alleles, potentially moving the resulting vcs forward on the genome
-     * @return a list of bi-allelic (or monomorphic) variant context
-     */
-    public static List<VariantContext> splitVariantContextToBiallelics(final VariantContext vc, final boolean trimLeft, final GenotypeAssignmentMethod genotypeAssignmentMethod) {
-        if ( ! vc.isVariant() || vc.isBiallelic() )
-            // non variant or biallelics already satisfy the contract
-            return Collections.singletonList(vc);
-        else {
-            final List<VariantContext> biallelics = new LinkedList<>();
-
-            for ( final Allele alt : vc.getAlternateAlleles() ) {
-                VariantContextBuilder builder = new VariantContextBuilder(vc);
-                final List<Allele> alleles = Arrays.asList(vc.getReference(), alt);
-                builder.alleles(alleles);
-                builder.genotypes(subsetDiploidAlleles(vc, alleles, genotypeAssignmentMethod));
-                VariantContextUtils.calculateChromosomeCounts(builder, true);
-                final VariantContext trimmed = trimAlleles(builder.make(), trimLeft, true);
-                biallelics.add(trimmed);
-            }
-
-            return biallelics;
-        }
-    }
-
-    public static Genotype removePLsAndAD(final Genotype g) {
-        return ( g.hasLikelihoods() || g.hasAD() ) ? new GenotypeBuilder(g).noPL().noAD().make() : g;
-    }
-
-    //TODO consider refactor variant-context merging code so that we share as much as possible between
-    //TODO simpleMerge and referenceConfidenceMerge
-    //TODO likely using a separate helper class or hierarchy.
-    /**
-     * Merges VariantContexts into a single hybrid.  Takes genotypes for common samples in priority order, if provided.
-     * If uniquifySamples is true, the priority order is ignored and names are created by concatenating the VC name with
-     * the sample name
-     *
-     * @param unsortedVCs               collection of unsorted VCs
-     * @param priorityListOfVCs         priority list detailing the order in which we should grab the VCs
-     * @param filteredRecordMergeType   merge type for filtered records
-     * @param genotypeMergeOptions      merge option for genotypes
-     * @param annotateOrigin            should we annotate the set it came from?
-     * @param printMessages             should we print messages?
-     * @param setKey                    the key name of the set
-     * @param filteredAreUncalled       are filtered records uncalled?
-     * @param mergeInfoWithMaxAC        should we merge in info from the VC with maximum allele count?
-     * @return new VariantContext       representing the merge of unsortedVCs
-     */
-    public static VariantContext simpleMerge(final Collection<VariantContext> unsortedVCs,
-                                             final List<String> priorityListOfVCs,
-                                             final FilteredRecordMergeType filteredRecordMergeType,
-                                             final GenotypeMergeType genotypeMergeOptions,
-                                             final boolean annotateOrigin,
-                                             final boolean printMessages,
-                                             final String setKey,
-                                             final boolean filteredAreUncalled,
-                                             final boolean mergeInfoWithMaxAC ) {
-        int originalNumOfVCs = priorityListOfVCs == null ? 0 : priorityListOfVCs.size();
-        return simpleMerge(unsortedVCs, priorityListOfVCs, originalNumOfVCs, filteredRecordMergeType, genotypeMergeOptions, annotateOrigin, printMessages, setKey, filteredAreUncalled, mergeInfoWithMaxAC);
-    }
-
-    /**
-     * Merges VariantContexts into a single hybrid.  Takes genotypes for common samples in priority order, if provided.
-     * If uniquifySamples is true, the priority order is ignored and names are created by concatenating the VC name with
-     * the sample name.
-     * simpleMerge does not verify any more unique sample names EVEN if genotypeMergeOptions == GenotypeMergeType.REQUIRE_UNIQUE. One should use
-     * SampleUtils.verifyUniqueSamplesNames to check that before using simpleMerge.
-     *
-     * For more information on this method see: http://www.thedistractionnetwork.com/programmer-problem/
-     *
-     * @param unsortedVCs               collection of unsorted VCs
-     * @param priorityListOfVCs         priority list detailing the order in which we should grab the VCs
-     * @param filteredRecordMergeType   merge type for filtered records
-     * @param genotypeMergeOptions      merge option for genotypes
-     * @param annotateOrigin            should we annotate the set it came from?
-     * @param printMessages             should we print messages?
-     * @param setKey                    the key name of the set
-     * @param filteredAreUncalled       are filtered records uncalled?
-     * @param mergeInfoWithMaxAC        should we merge in info from the VC with maximum allele count?
-     * @return new VariantContext       representing the merge of unsortedVCs
-     */
-    public static VariantContext simpleMerge(final Collection<VariantContext> unsortedVCs,
-                                             final List<String> priorityListOfVCs,
-                                             final int originalNumOfVCs,
-                                             final FilteredRecordMergeType filteredRecordMergeType,
-                                             final GenotypeMergeType genotypeMergeOptions,
-                                             final boolean annotateOrigin,
-                                             final boolean printMessages,
-                                             final String setKey,
-                                             final boolean filteredAreUncalled,
-                                             final boolean mergeInfoWithMaxAC ) {
-        if ( unsortedVCs == null || unsortedVCs.size() == 0 )
-            return null;
-
-        if (priorityListOfVCs != null && originalNumOfVCs != priorityListOfVCs.size())
-            throw new IllegalArgumentException("the number of the original VariantContexts must be the same as the number of VariantContexts in the priority list");
-
-        if ( annotateOrigin && priorityListOfVCs == null && originalNumOfVCs == 0)
-            throw new IllegalArgumentException("Cannot merge calls and annotate their origins without a complete priority list of VariantContexts or the number of original VariantContexts");
-
-        final List<VariantContext> preFilteredVCs = sortVariantContextsByPriority(unsortedVCs, priorityListOfVCs, genotypeMergeOptions);
-        // Make sure all variant contexts are padded with reference base in case of indels if necessary
-        List<VariantContext> VCs = new ArrayList<>();
-
-        for (final VariantContext vc : preFilteredVCs) {
-            if ( ! filteredAreUncalled || vc.isNotFiltered() )
-                VCs.add(vc);
-        }
-
-        if ( VCs.size() == 0 ) // everything is filtered out and we're filteredAreUncalled
-            return null;
-
-        // establish the baseline info from the first VC
-        final VariantContext first = VCs.get(0);
-        final String name = first.getSource();
-        final Allele refAllele = determineReferenceAllele(VCs);
-
-        final LinkedHashSet<Allele> alleles = new LinkedHashSet<>();
-        final Set<String> filters = new HashSet<>();
-        final Map<String, Object> attributes = new LinkedHashMap<>();
-        final Set<String> inconsistentAttributes = new HashSet<>();
-        final Set<String> variantSources = new HashSet<>(); // contains the set of sources we found in our set of VCs that are variant
-        final Set<String> rsIDs = new LinkedHashSet<>(1); // most of the time there's one id
-
-        VariantContext longestVC = first;
-        int depth = 0;
-        int maxAC = -1;
-        final Map<String, Object> attributesWithMaxAC = new LinkedHashMap<>();
-        double log10PError = CommonInfo.NO_LOG10_PERROR;
-        boolean anyVCHadFiltersApplied = false;
-        VariantContext vcWithMaxAC = null;
-        GenotypesContext genotypes = GenotypesContext.create();
-
-        // counting the number of filtered and variant VCs
-        int nFiltered = 0;
-
-        boolean remapped = false;
-
-        // cycle through and add info from the other VCs, making sure the loc/reference matches
-        for ( final VariantContext vc : VCs ) {
-            if ( longestVC.getStart() != vc.getStart() )
-                throw new IllegalStateException("BUG: attempting to merge VariantContexts with different start sites: first="+ first.toString() + " second=" + vc.toString());
-
-            if ( VariantContextUtils.getSize(vc) > VariantContextUtils.getSize(longestVC) )
-                longestVC = vc; // get the longest location
-
-            nFiltered += vc.isFiltered() ? 1 : 0;
-            if ( vc.isVariant() ) variantSources.add(vc.getSource());
-
-            AlleleMapper alleleMapping = resolveIncompatibleAlleles(refAllele, vc, alleles);
-            remapped = remapped || alleleMapping.needsRemapping();
-
-            alleles.addAll(alleleMapping.values());
-
-            mergeGenotypes(genotypes, vc, alleleMapping, genotypeMergeOptions == GenotypeMergeType.UNIQUIFY);
-
-            // We always take the QUAL of the first VC with a non-MISSING qual for the combined value
-            if ( log10PError == CommonInfo.NO_LOG10_PERROR )
-                log10PError =  vc.getLog10PError();
-
-            filters.addAll(vc.getFilters());
-            anyVCHadFiltersApplied |= vc.filtersWereApplied();
-
-            //
-            // add attributes
-            //
-            // special case DP (add it up) and ID (just preserve it)
-            //
-            if (vc.hasAttribute(VCFConstants.DEPTH_KEY))
-                depth += vc.getAttributeAsInt(VCFConstants.DEPTH_KEY, 0);
-            if ( vc.hasID() ) rsIDs.add(vc.getID());
-            if (mergeInfoWithMaxAC && vc.hasAttribute(VCFConstants.ALLELE_COUNT_KEY)) {
-                String rawAlleleCounts = vc.getAttributeAsString(VCFConstants.ALLELE_COUNT_KEY, null);
-                // lets see if the string contains a "," separator
-                if (rawAlleleCounts.contains(VCFConstants.INFO_FIELD_ARRAY_SEPARATOR)) {
-                    final List<String> alleleCountArray = Arrays.asList(rawAlleleCounts.substring(1, rawAlleleCounts.length() - 1).split(VCFConstants.INFO_FIELD_ARRAY_SEPARATOR));
-                    for (final String alleleCount : alleleCountArray) {
-                        final int ac = Integer.valueOf(alleleCount.trim());
-                        if (ac > maxAC) {
-                            maxAC = ac;
-                            vcWithMaxAC = vc;
-                        }
-                    }
-                } else {
-                    final int ac = Integer.valueOf(rawAlleleCounts);
-                    if (ac > maxAC) {
-                        maxAC = ac;
-                        vcWithMaxAC = vc;
-                    }
-                }
-            }
-
-            for (final Map.Entry<String, Object> p : vc.getAttributes().entrySet()) {
-                final String key = p.getKey();
-                final Object value = p.getValue();
-                // only output annotations that have the same value in every input VC
-                // if we don't like the key already, don't go anywhere
-                if ( ! inconsistentAttributes.contains(key) ) {
-                    final boolean alreadyFound = attributes.containsKey(key);
-                    final Object boundValue = attributes.get(key);
-                    final boolean boundIsMissingValue = alreadyFound && boundValue.equals(VCFConstants.MISSING_VALUE_v4);
-
-                    if ( alreadyFound && ! boundValue.equals(value) && ! boundIsMissingValue ) {
-                        // we found the value but we're inconsistent, put it in the exclude list
-                        inconsistentAttributes.add(key);
-                        attributes.remove(key);
-                    } else if ( ! alreadyFound || boundIsMissingValue )  { // no value
-                        attributes.put(key, value);
-                    }
-                }
-            }
-        }
-
-        // if we have more alternate alleles in the merged VC than in one or more of the
-        // original VCs, we need to strip out the GL/PLs (because they are no longer accurate), as well as allele-dependent attributes like AC,AF, and AD
-        for ( final VariantContext vc : VCs ) {
-            if (vc.getAlleles().size() == 1)
-                continue;
-            if ( hasPLIncompatibleAlleles(alleles, vc.getAlleles())) {
-                if ( ! genotypes.isEmpty() ) {
-                    logger.debug(String.format("Stripping PLs at %s:%d-%d due to incompatible alleles merged=%s vs. single=%s",
-                            vc.getChr(), vc.getStart(), vc.getEnd(), alleles, vc.getAlleles()));
-                }
-                genotypes = stripPLsAndAD(genotypes);
-                // this will remove stale AC,AF attributed from vc
-                VariantContextUtils.calculateChromosomeCounts(vc, attributes, true);
-                break;
-            }
-        }
-
-        // take the VC with the maxAC and pull the attributes into a modifiable map
-        if ( mergeInfoWithMaxAC && vcWithMaxAC != null ) {
-            attributesWithMaxAC.putAll(vcWithMaxAC.getAttributes());
-        }
-
-        // if at least one record was unfiltered and we want a union, clear all of the filters
-        if ( (filteredRecordMergeType == FilteredRecordMergeType.KEEP_IF_ANY_UNFILTERED && nFiltered != VCs.size()) || filteredRecordMergeType == FilteredRecordMergeType.KEEP_UNCONDITIONAL )
-            filters.clear();
-
-
-        if ( annotateOrigin ) { // we care about where the call came from
-            String setValue;
-            if ( nFiltered == 0 && variantSources.size() == originalNumOfVCs ) // nothing was unfiltered
-                setValue = MERGE_INTERSECTION;
-            else if ( nFiltered == VCs.size() )     // everything was filtered out
-                setValue = MERGE_FILTER_IN_ALL;
-            else if ( variantSources.isEmpty() )    // everyone was reference
-                setValue = MERGE_REF_IN_ALL;
-            else {
-                final LinkedHashSet<String> s = new LinkedHashSet<>();
-                for ( final VariantContext vc : VCs )
-                    if ( vc.isVariant() )
-                        s.add( vc.isFiltered() ? MERGE_FILTER_PREFIX + vc.getSource() : vc.getSource() );
-                setValue = Utils.join("-", s);
-            }
-
-            if ( setKey != null ) {
-                attributes.put(setKey, setValue);
-                if( mergeInfoWithMaxAC && vcWithMaxAC != null ) {
-                    attributesWithMaxAC.put(setKey, setValue);
-                }
-            }
-        }
-
-        if ( depth > 0 )
-            attributes.put(VCFConstants.DEPTH_KEY, String.valueOf(depth));
-
-        final String ID = rsIDs.isEmpty() ? VCFConstants.EMPTY_ID_FIELD : Utils.join(",", rsIDs);
-
-        final VariantContextBuilder builder = new VariantContextBuilder().source(name).id(ID);
-        builder.loc(longestVC.getChr(), longestVC.getStart(), longestVC.getEnd());
-        builder.alleles(alleles);
-        builder.genotypes(genotypes);
-        builder.log10PError(log10PError);
-        if ( anyVCHadFiltersApplied ) {
-            builder.filters(filters.isEmpty() ? filters : new TreeSet<>(filters));
-        }
-        builder.attributes(new TreeMap<>(mergeInfoWithMaxAC ? attributesWithMaxAC : attributes));
-
-        // Trim the padded bases of all alleles if necessary
-        final VariantContext merged = builder.make();
-        if ( printMessages && remapped ) System.out.printf("Remapped => %s%n", merged);
-        return merged;
-    }
-
-    //TODO as part of a larger refactoring effort remapAlleles can be merged with createAlleleMapping.
-
-    public static GenotypesContext stripPLsAndAD(final GenotypesContext genotypes) {
-        final GenotypesContext newGs = GenotypesContext.create(genotypes.size());
-
-        for ( final Genotype g : genotypes ) {
-            newGs.add(removePLsAndAD(g));
-        }
-
-        return newGs;
-    }
-
-    /**
-     * Updates the PLs and AD of the Genotypes in the newly selected VariantContext to reflect the fact that some alleles
-     * from the original VariantContext are no longer present.
-     *
-     * @param selectedVC  the selected (new) VariantContext
-     * @param originalVC  the original VariantContext
-     * @return a new non-null GenotypesContext
-     */
-    public static GenotypesContext updatePLsAndAD(final VariantContext selectedVC, final VariantContext originalVC) {
-        final int numNewAlleles = selectedVC.getAlleles().size();
-        final int numOriginalAlleles = originalVC.getAlleles().size();
-
-        // if we have more alternate alleles in the selected VC than in the original VC, then something is wrong
-        if ( numNewAlleles > numOriginalAlleles )
-            throw new IllegalArgumentException("Attempting to fix PLs and AD from what appears to be a *combined* VCF and not a selected one");
-
-        final GenotypesContext oldGs = selectedVC.getGenotypes();
-
-        // if we have the same number of alternate alleles in the selected VC as in the original VC, then we don't need to fix anything
-        if ( numNewAlleles == numOriginalAlleles )
-            return oldGs;
-
-        return fixGenotypesFromSubsettedAlleles(oldGs, originalVC, selectedVC.getAlleles());
-    }
-
-    /**
-     * Fix the PLs and ADs for the GenotypesContext of a VariantContext that has been subset
-     *
-     * @param originalGs       the original GenotypesContext
-     * @param originalVC       the original VariantContext
-     * @param allelesToUse     the new (sub)set of alleles to use
-     * @return a new non-null GenotypesContext
-     */
-    static private GenotypesContext fixGenotypesFromSubsettedAlleles(final GenotypesContext originalGs, final VariantContext originalVC, final List<Allele> allelesToUse) {
-
-        // we need to determine which of the alternate alleles (and hence the likelihoods) to use and carry forward
-        final List<Integer> likelihoodIndexesToUse = determineLikelihoodIndexesToUse(originalVC, allelesToUse);
-
-        // create the new genotypes
-        return createGenotypesWithSubsettedLikelihoods(originalGs, originalVC, allelesToUse, likelihoodIndexesToUse, GenotypeAssignmentMethod.DO_NOT_ASSIGN_GENOTYPES);
-    }
-
-    /**
-     * Fix the AD for the GenotypesContext of a VariantContext that has been subset
-     *
-     * @param originalGs       the original GenotypesContext
-     * @param originalVC       the original VariantContext
-     * @param allelesToUse     the new (sub)set of alleles to use
-     * @return a new non-null GenotypesContext
-     */
-    static private GenotypesContext fixADFromSubsettedAlleles(final GenotypesContext originalGs, final VariantContext originalVC, final List<Allele> allelesToUse) {
-
-        // the bitset representing the allele indexes we want to keep
-        final boolean[] alleleIndexesToUse = getAlleleIndexBitset(originalVC, allelesToUse);
-
-        // the new genotypes to create
-        final GenotypesContext newGTs = GenotypesContext.create(originalGs.size());
-
-        // the samples
-        final List<String> sampleIndices = originalGs.getSampleNamesOrderedByName();
-
-        // create the new genotypes
-        for ( int k = 0; k < originalGs.size(); k++ ) {
-            final Genotype g = originalGs.get(sampleIndices.get(k));
-            newGTs.add(fixAD(g, alleleIndexesToUse, allelesToUse.size()));
-        }
-
-        return newGTs;
-    }
-
-    /**
-     * Fix the AD for the given Genotype
-     *
-     * @param genotype              the original Genotype
-     * @param alleleIndexesToUse    a bitset describing whether or not to keep a given index
-     * @param nAllelesToUse         how many alleles we are keeping
-     * @return a non-null Genotype
-     */
-    private static Genotype fixAD(final Genotype genotype, final boolean[] alleleIndexesToUse, final int nAllelesToUse) {
-        // if it ain't broke don't fix it
-        if ( !genotype.hasAD() )
-            return genotype;
-
-        final GenotypeBuilder builder = new GenotypeBuilder(genotype);
-
-        final int[] oldAD = genotype.getAD();
-        if ( oldAD.length != alleleIndexesToUse.length ) {
-            builder.noAD();
-        } else {
-            final int[] newAD = new int[nAllelesToUse];
-            int currentIndex = 0;
-            for ( int i = 0; i < oldAD.length; i++ ) {
-                if ( alleleIndexesToUse[i] )
-                    newAD[currentIndex++] = oldAD[i];
-            }
-            builder.AD(newAD);
-        }
-        return builder.make();
-    }
-
-    private static Allele determineReferenceAllele(final List<VariantContext> VCs) {
-        return determineReferenceAllele(VCs, null);
-    }
-
-    public static boolean contextMatchesLoc(final VariantContext vc, final GenomeLoc loc) {
-        return loc == null || loc.getStart() == vc.getStart();
-    }
-
-    static private AlleleMapper resolveIncompatibleAlleles(final Allele refAllele, final VariantContext vc, final LinkedHashSet<Allele> allAlleles) {
-        if ( refAllele.equals(vc.getReference()) )
-            return new AlleleMapper(vc);
-        else {
-            final Map<Allele, Allele> map = createAlleleMapping(refAllele, vc, allAlleles);
-            map.put(vc.getReference(), refAllele);
-            return new AlleleMapper(map);
-        }
-    }
-
-    //TODO as part of a larger refactoring effort {@link #createAlleleMapping} can be merged with {@link ReferenceConfidenceVariantContextMerger#remapAlleles}.
-    /**
-     * Create an allele mapping for the given context where its reference allele must (potentially) be extended to the given allele
-     *
-     * The refAllele is the longest reference allele seen at this start site.
-     * So imagine it is:
-     * refAllele: ACGTGA
-     * myRef:     ACGT
-     * myAlt:     A
-     *
-     * We need to remap all of the alleles in vc to include the extra GA so that
-     * myRef => refAllele and myAlt => AGA
-     *
-     * @param refAllele          the new (extended) reference allele
-     * @param oneVC              the Variant Context to extend
-     * @param currentAlleles     the list of alleles already created
-     * @return a non-null mapping of original alleles to new (extended) ones
-     */
-    private static Map<Allele, Allele> createAlleleMapping(final Allele refAllele,
-                                                           final VariantContext oneVC,
-                                                           final Collection<Allele> currentAlleles) {
-        final Allele myRef = oneVC.getReference();
-        if ( refAllele.length() <= myRef.length() ) throw new IllegalStateException("BUG: myRef="+myRef+" is longer than refAllele="+refAllele);
-
-        final byte[] extraBases = Arrays.copyOfRange(refAllele.getBases(), myRef.length(), refAllele.length());
-
-        final Map<Allele, Allele> map = new HashMap<>();
-        for ( final Allele a : oneVC.getAlternateAlleles() ) {
-            if ( isUsableAlternateAllele(a) ) {
-                Allele extended = Allele.extend(a, extraBases);
-                for ( final Allele b : currentAlleles )
-                    if ( extended.equals(b) )
-                        extended = b;
-                map.put(a, extended);
-            }
-        }
-
-        return map;
-    }
-
-    static private boolean isUsableAlternateAllele(final Allele allele) {
-        return ! (allele.isReference() || allele.isSymbolic() );
-    }
-
-    public static List<VariantContext> sortVariantContextsByPriority(Collection<VariantContext> unsortedVCs, List<String> priorityListOfVCs, GenotypeMergeType mergeOption ) {
-        if ( mergeOption == GenotypeMergeType.PRIORITIZE && priorityListOfVCs == null )
-            throw new IllegalArgumentException("Cannot merge calls by priority with a null priority list");
-
-        if ( priorityListOfVCs == null || mergeOption == GenotypeMergeType.UNSORTED )
-            return new ArrayList<>(unsortedVCs);
-        else {
-            ArrayList<VariantContext> sorted = new ArrayList<>(unsortedVCs);
-            Collections.sort(sorted, new CompareByPriority(priorityListOfVCs));
-            return sorted;
-        }
-    }
-
-    private static void mergeGenotypes(GenotypesContext mergedGenotypes, VariantContext oneVC, AlleleMapper alleleMapping, boolean uniquifySamples) {
-        //TODO: should we add a check for cases when the genotypeMergeOption is REQUIRE_UNIQUE
-        for ( final Genotype g : oneVC.getGenotypes() ) {
-            final String name = mergedSampleName(oneVC.getSource(), g.getSampleName(), uniquifySamples);
-            if ( ! mergedGenotypes.containsSample(name) ) {
-                // only add if the name is new
-                Genotype newG = g;
-
-                if ( uniquifySamples || alleleMapping.needsRemapping() ) {
-                    final List<Allele> alleles = alleleMapping.needsRemapping() ? alleleMapping.remap(g.getAlleles()) : g.getAlleles();
-                    newG = new GenotypeBuilder(g).name(name).alleles(alleles).make();
-                }
-
-                mergedGenotypes.add(newG);
-            }
-        }
-    }
-
-    /**
-     * Cached NO_CALL immutable lists where the position ith contains the list with i elements.
-     */
-    private static List<Allele>[] NOCALL_LISTS = new List[] {
-            Collections.emptyList(),
-            Collections.singletonList(Allele.NO_CALL),
-            Collections.nCopies(2,Allele.NO_CALL)
-    };
-
-    /**
-     * Synchronized code to ensure that {@link #NOCALL_LISTS} has enough entries beyod the requested ploidy
-     * @param capacity the requested ploidy.
-     */
-    private static synchronized void ensureNoCallListsCapacity(final int capacity) {
-        final int currentCapacity = NOCALL_LISTS.length - 1;
-        if (currentCapacity >= capacity)
-            return;
-        NOCALL_LISTS = Arrays.copyOf(NOCALL_LISTS,Math.max(capacity,currentCapacity << 1) + 1);
-        for (int i = currentCapacity + 1; i < NOCALL_LISTS.length; i++)
-            NOCALL_LISTS[i] = Collections.nCopies(i,Allele.NO_CALL);
-    }
-
-    /**
-     * Returns a {@link Allele#NO_CALL NO_CALL} allele list provided the ploidy.
-     *
-     * @param ploidy the required ploidy.
-     *
-     * @return never {@code null}, but an empty list if {@code ploidy} is equal or less than 0. The returned list
-     *   might or might not be mutable.
-     */
-    public static List<Allele> noCallAlleles(final int ploidy) {
-        if (NOCALL_LISTS.length <= ploidy)
-            ensureNoCallListsCapacity(ploidy);
-        return NOCALL_LISTS[ploidy];
-    }
-
-
-    /**
-     * This is just a safe wrapper around GenotypeLikelihoods.calculatePLindex()
-     *
-     * @param originalIndex1   the index of the first allele
-     * @param originalIndex2   the index of the second allele
-     * @return the PL index
-     */
-    protected static int calculatePLindexFromUnorderedIndexes(final int originalIndex1, final int originalIndex2) {
-        // we need to make sure they are ordered correctly
-        return ( originalIndex2 < originalIndex1 ) ? GenotypeLikelihoods.calculatePLindex(originalIndex2, originalIndex1) : GenotypeLikelihoods.calculatePLindex(originalIndex1, originalIndex2);
-    }
-
-    public static String mergedSampleName(String trackName, String sampleName, boolean uniquify ) {
-        return uniquify ? sampleName + "." + trackName : sampleName;
-    }
-
-    /**
-     * Trim the alleles in inputVC from the reverse direction
-     *
-     * @param inputVC a non-null input VC whose alleles might need a haircut
-     * @return a non-null VariantContext (may be == to inputVC) with alleles trimmed up
-     */
-    public static VariantContext reverseTrimAlleles( final VariantContext inputVC ) {
-        return trimAlleles(inputVC, false, true);
-    }
-
-    /**
-     * Trim the alleles in inputVC from the forward direction
-     *
-     * @param inputVC a non-null input VC whose alleles might need a haircut
-     * @return a non-null VariantContext (may be == to inputVC) with alleles trimmed up
-     */
-    public static VariantContext forwardTrimAlleles( final VariantContext inputVC ) {
-        return trimAlleles(inputVC, true, false);
-    }
-
-    /**
-     * Trim the alleles in inputVC forward and reverse, as requested
-     *
-     * @param inputVC a non-null input VC whose alleles might need a haircut
-     * @param trimForward should we trim up the alleles from the forward direction?
-     * @param trimReverse should we trim up the alleles from the reverse direction?
-     * @return a non-null VariantContext (may be == to inputVC) with trimmed up alleles
-     */
-    @Ensures("result != null")
-    public static VariantContext trimAlleles(final VariantContext inputVC, final boolean trimForward, final boolean trimReverse) {
-        if ( inputVC == null ) throw new IllegalArgumentException("inputVC cannot be null");
-
-        if ( inputVC.getNAlleles() <= 1 || inputVC.isSNP() )
-            return inputVC;
-
-        // see whether we need to trim common reference base from all alleles
-        final int revTrim = trimReverse ? computeReverseClipping(inputVC.getAlleles(), inputVC.getReference().getDisplayString().getBytes()) : 0;
-        final VariantContext revTrimVC = trimAlleles(inputVC, -1, revTrim);
-        final int fwdTrim = trimForward ? computeForwardClipping(revTrimVC.getAlleles()) : -1;
-        final VariantContext vc= trimAlleles(revTrimVC, fwdTrim, 0);
-        return vc;
-    }
-
-    /**
-     * Trim up alleles in inputVC, cutting out all bases up to fwdTrimEnd inclusive and
-     * the last revTrim bases from the end
-     *
-     * @param inputVC a non-null input VC
-     * @param fwdTrimEnd bases up to this index (can be -1) will be removed from the start of all alleles
-     * @param revTrim the last revTrim bases of each allele will be clipped off as well
-     * @return a non-null VariantContext (may be == to inputVC) with trimmed up alleles
-     */
-    @Requires({"inputVC != null"})
-    @Ensures("result != null")
-    protected static VariantContext trimAlleles(final VariantContext inputVC,
-                                                final int fwdTrimEnd,
-                                                final int revTrim) {
-        if( fwdTrimEnd == -1 && revTrim == 0 ) // nothing to do, so just return inputVC unmodified
-            return inputVC;
-
-        final List<Allele> alleles = new LinkedList<>();
-        final Map<Allele, Allele> originalToTrimmedAlleleMap = new HashMap<>();
-
-        for (final Allele a : inputVC.getAlleles()) {
-            if (a.isSymbolic()) {
-                alleles.add(a);
-                originalToTrimmedAlleleMap.put(a, a);
-            } else {
-                // get bases for current allele and create a new one with trimmed bases
-                final byte[] newBases = Arrays.copyOfRange(a.getBases(), fwdTrimEnd+1, a.length()-revTrim);
-                final Allele trimmedAllele = Allele.create(newBases, a.isReference());
-                alleles.add(trimmedAllele);
-                originalToTrimmedAlleleMap.put(a, trimmedAllele);
-            }
-        }
-
-        // now we can recreate new genotypes with trimmed alleles
-        final AlleleMapper alleleMapper = new AlleleMapper(originalToTrimmedAlleleMap);
-        final GenotypesContext genotypes = updateGenotypesWithMappedAlleles(inputVC.getGenotypes(), alleleMapper);
-
-        final int start = inputVC.getStart() + (fwdTrimEnd + 1);
-        final VariantContextBuilder builder = new VariantContextBuilder(inputVC);
-        builder.start(start);
-        builder.stop(start + alleles.get(0).length() - 1);
-        builder.alleles(alleles);
-        builder.genotypes(genotypes);
-        return builder.make();
-    }
-
-    @Requires("originalGenotypes != null && alleleMapper != null")
-    protected static GenotypesContext updateGenotypesWithMappedAlleles(final GenotypesContext originalGenotypes, final AlleleMapper alleleMapper) {
-        final GenotypesContext updatedGenotypes = GenotypesContext.create(originalGenotypes.size());
-
-        for ( final Genotype genotype : originalGenotypes ) {
-            final List<Allele> updatedAlleles = alleleMapper.remap(genotype.getAlleles());
-            updatedGenotypes.add(new GenotypeBuilder(genotype).alleles(updatedAlleles).make());
-        }
-
-        return updatedGenotypes;
-    }
-
-    public static int computeReverseClipping(final List<Allele> unclippedAlleles, final byte[] ref) {
-        int clipping = 0;
-        boolean stillClipping = true;
-
-        while ( stillClipping ) {
-            for ( final Allele a : unclippedAlleles ) {
-                if ( a.isSymbolic() )
-                    continue;
-
-                // we need to ensure that we don't reverse clip out all of the bases from an allele because we then will have the wrong
-                // position set for the VariantContext (although it's okay to forward clip it all out, because the position will be fine).
-                if ( a.length() - clipping == 0 )
-                    return clipping - 1;
-
-                if ( a.length() - clipping <= 0 || a.length() == 0 ) {
-                    stillClipping = false;
-                }
-                else if ( ref.length == clipping ) {
-                    return -1;
-                }
-                else if ( a.getBases()[a.length()-clipping-1] != ref[ref.length-clipping-1] ) {
-                    stillClipping = false;
-                }
-            }
-            if ( stillClipping )
-                clipping++;
-        }
-
-        return clipping;
-    }
-
-    /**
-     * Clip out any unnecessary bases off the front of the alleles
-     *
-     * The VCF spec represents alleles as block substitutions, replacing AC with A for a
-     * 1 bp deletion of the C.  However, it's possible that we'd end up with alleles that
-     * contain extra bases on the left, such as GAC/GA to represent the same 1 bp deletion.
-     * This routine finds an offset among all alleles that can be safely trimmed
-     * off the left of each allele and still represent the same block substitution.
-     *
-     * A/C => A/C
-     * AC/A => AC/A
-     * ACC/AC => CC/C
-     * AGT/CAT => AGT/CAT
-     * <DEL>/C => <DEL>/C
-     *
-     * @param unclippedAlleles a non-null list of alleles that we want to clip
-     * @return the offset into the alleles where we can safely clip, inclusive, or
-     *   -1 if no clipping is tolerated.  So, if the result is 0, then we can remove
-     *   the first base of every allele.  If the result is 1, we can remove the
-     *   second base.
-     */
-    public static int computeForwardClipping(final List<Allele> unclippedAlleles) {
-        // cannot clip unless there's at least 1 alt allele
-        if ( unclippedAlleles.size() <= 1 )
-            return -1;
-
-        // we cannot forward clip any set of alleles containing a symbolic allele
-        int minAlleleLength = Integer.MAX_VALUE;
-        for ( final Allele a : unclippedAlleles ) {
-            if ( a.isSymbolic() )
-                return -1;
-            minAlleleLength = Math.min(minAlleleLength, a.length());
-        }
-
-        final byte[] firstAlleleBases = unclippedAlleles.get(0).getBases();
-        int indexOflastSharedBase = -1;
-
-        // the -1 to the stop is that we can never clip off the right most base
-        for ( int i = 0; i < minAlleleLength - 1; i++) {
-            final byte base = firstAlleleBases[i];
-
-            for ( final Allele allele : unclippedAlleles ) {
-                if ( allele.getBases()[i] != base )
-                    return indexOflastSharedBase;
-            }
-
-            indexOflastSharedBase = i;
-        }
-
-        return indexOflastSharedBase;
-    }
-
-    public static double computeHardyWeinbergPvalue(VariantContext vc) {
-        if ( vc.getCalledChrCount() == 0 )
-            return 0.0;
-        return HardyWeinbergCalculation.hwCalculate(vc.getHomRefCount(), vc.getHetCount(), vc.getHomVarCount());
-    }
-
-    public static boolean requiresPaddingBase(final List<String> alleles) {
-
-        // see whether one of the alleles would be null if trimmed through
-
-        for ( final String allele : alleles ) {
-            if ( allele.isEmpty() )
-                return true;
-        }
-
-        int clipping = 0;
-        Character currentBase = null;
-
-        while ( true ) {
-            for ( final String allele : alleles ) {
-                if ( allele.length() - clipping == 0 )
-                    return true;
-
-                char myBase = allele.charAt(clipping);
-                if ( currentBase == null )
-                    currentBase = myBase;
-                else if ( currentBase != myBase )
-                    return false;
-            }
-
-            clipping++;
-            currentBase = null;
-        }
-    }
-
-    private final static Map<String, Object> subsetAttributes(final CommonInfo igc, final Collection<String> keysToPreserve) {
-        Map<String, Object> attributes = new HashMap<>(keysToPreserve.size());
-        for ( final String key : keysToPreserve  ) {
-            if ( igc.hasAttribute(key) )
-                attributes.put(key, igc.getAttribute(key));
-        }
-        return attributes;
-    }
-
-    /**
-     * @deprecated use variant context builder version instead
-     * @param vc                  the variant context
-     * @param keysToPreserve      the keys to preserve
-     * @return a pruned version of the original variant context
-     */
-    @Deprecated
-    public static VariantContext pruneVariantContext(final VariantContext vc, Collection<String> keysToPreserve ) {
-        return pruneVariantContext(new VariantContextBuilder(vc), keysToPreserve).make();
-    }
-
-    public static VariantContextBuilder pruneVariantContext(final VariantContextBuilder builder, Collection<String> keysToPreserve ) {
-        final VariantContext vc = builder.make();
-        if ( keysToPreserve == null ) keysToPreserve = Collections.emptyList();
-
-        // VC info
-        final Map<String, Object> attributes = subsetAttributes(vc.getCommonInfo(), keysToPreserve);
-
-        // Genotypes
-        final GenotypesContext genotypes = GenotypesContext.create(vc.getNSamples());
-        for ( final Genotype g : vc.getGenotypes() ) {
-            final GenotypeBuilder gb = new GenotypeBuilder(g);
-            // remove AD, DP, PL, and all extended attributes, keeping just GT and GQ
-            gb.noAD().noDP().noPL().noAttributes();
-            genotypes.add(gb.make());
-        }
-
-        return builder.genotypes(genotypes).attributes(attributes);
-    }
-
-    public static boolean allelesAreSubset(VariantContext vc1, VariantContext vc2) {
-        // if all alleles of vc1 are a contained in alleles of vc2, return true
-        if (!vc1.getReference().equals(vc2.getReference()))
-            return false;
-
-        for (final Allele a :vc1.getAlternateAlleles()) {
-            if (!vc2.getAlternateAlleles().contains(a))
-                return false;
-        }
-
-        return true;
-    }
-
-    public static Map<VariantContext.Type, List<VariantContext>> separateVariantContextsByType( final Collection<VariantContext> VCs ) {
-        if( VCs == null ) { throw new IllegalArgumentException("VCs cannot be null."); }
-
-        final HashMap<VariantContext.Type, List<VariantContext>> mappedVCs = new HashMap<>();
-        for ( final VariantContext vc : VCs ) {
-            VariantContext.Type vcType = vc.getType();
-
-            // look at previous variant contexts of different type. If:
-            // a) otherVC has alleles which are subset of vc, remove otherVC from its list and add otherVC to vc's list
-            // b) vc has alleles which are subset of otherVC. Then, add vc to otherVC's type list (rather, do nothing since vc will be added automatically to its list)
-            // c) neither: do nothing, just add vc to its own list
-            boolean addtoOwnList = true;
-            for (final VariantContext.Type type : VariantContext.Type.values()) {
-                if (type.equals(vcType))
-                    continue;
-
-                if (!mappedVCs.containsKey(type))
-                    continue;
-
-                List<VariantContext> vcList = mappedVCs.get(type);
-                for (int k=0; k <  vcList.size(); k++) {
-                    VariantContext otherVC = vcList.get(k);
-                    if (allelesAreSubset(otherVC,vc)) {
-                        // otherVC has a type different than vc and its alleles are a subset of vc: remove otherVC from its list and add it to vc's type list
-                        vcList.remove(k);
-                        // avoid having empty lists
-                        if (vcList.size() == 0)
-                            mappedVCs.remove(type);
-                        if ( !mappedVCs.containsKey(vcType) )
-                            mappedVCs.put(vcType, new ArrayList<VariantContext>());
-                        mappedVCs.get(vcType).add(otherVC);
-                        break;
-                    }
-                    else if (allelesAreSubset(vc,otherVC)) {
-                        // vc has a type different than otherVC and its alleles are a subset of VC: add vc to otherVC's type list and don't add to its own
-                        mappedVCs.get(type).add(vc);
-                        addtoOwnList = false;
-                        break;
-                    }
-                }
-            }
-            if (addtoOwnList) {
-                if ( !mappedVCs.containsKey(vcType) )
-                    mappedVCs.put(vcType, new ArrayList<VariantContext>());
-                mappedVCs.get(vcType).add(vc);
-            }
-        }
-
-        return mappedVCs;
-    }
-
-    public static VariantContext purgeUnallowedGenotypeAttributes(VariantContext vc, Set<String> allowedAttributes) {
-        if ( allowedAttributes == null )
-            return vc;
-
-        final GenotypesContext newGenotypes = GenotypesContext.create(vc.getNSamples());
-        for ( final Genotype genotype : vc.getGenotypes() ) {
-            final Map<String, Object> attrs = new HashMap<>();
-            for ( final Map.Entry<String, Object> attr : genotype.getExtendedAttributes().entrySet() ) {
-                if ( allowedAttributes.contains(attr.getKey()) )
-                    attrs.put(attr.getKey(), attr.getValue());
-            }
-            newGenotypes.add(new GenotypeBuilder(genotype).attributes(attrs).make());
-        }
-
-        return new VariantContextBuilder(vc).genotypes(newGenotypes).make();
-    }
-
-    protected static class AlleleMapper {
-        private VariantContext vc = null;
-        private Map<Allele, Allele> map = null;
-        public AlleleMapper(VariantContext vc)          { this.vc = vc; }
-        public AlleleMapper(Map<Allele, Allele> map)    { this.map = map; }
-        public boolean needsRemapping()                 { return this.map != null; }
-        public Collection<Allele> values()              { return map != null ? map.values() : vc.getAlleles(); }
-        public Allele remap(Allele a)                   { return map != null && map.containsKey(a) ? map.get(a) : a; }
-
-        public List<Allele> remap(List<Allele> as) {
-            List<Allele> newAs = new ArrayList<>();
-            for ( final Allele a : as ) {
-                //System.out.printf("  Remapping %s => %s%n", a, remap(a));
-                newAs.add(remap(a));
-            }
-            return newAs;
-        }
-
-        /**
-         * @return the list of unique values
-         */
-        public List<Allele> getUniqueMappedAlleles() {
-            if ( map == null )
-                return Collections.emptyList();
-            return new ArrayList<>(new HashSet<>(map.values()));
-        }
-    }
-
-    private static class CompareByPriority implements Comparator<VariantContext>, Serializable {
-        List<String> priorityListOfVCs;
-        public CompareByPriority(List<String> priorityListOfVCs) {
-            this.priorityListOfVCs = priorityListOfVCs;
-        }
-
-        private int getIndex(VariantContext vc) {
-            int i = priorityListOfVCs.indexOf(vc.getSource());
-            if ( i == -1 ) throw new IllegalArgumentException("Priority list " + priorityListOfVCs + " doesn't contain variant context " + vc.getSource());
-            return i;
-        }
-
-        public int compare(VariantContext vc1, VariantContext vc2) {
-            return Integer.valueOf(getIndex(vc1)).compareTo(getIndex(vc2));
-        }
-    }
-
-    /**
-     * For testing purposes only.  Create a site-only VariantContext at contig:start containing alleles
-     *
-     * @param name the name of the VC
-     * @param contig the contig for the VC
-     * @param start the start of the VC
-     * @param alleleStrings a non-null, non-empty list of strings for the alleles.  The first will be the ref allele, and others the
-     *                      alt.  Will compute the stop of the VC from the length of the reference allele
-     * @return a non-null VariantContext
-     */
-    public static VariantContext makeFromAlleles(final String name, final String contig, final int start, final List<String> alleleStrings) {
-        if ( alleleStrings == null || alleleStrings.isEmpty() )
-            throw new IllegalArgumentException("alleleStrings must be non-empty, non-null list");
-
-        final List<Allele> alleles = new LinkedList<>();
-        final int length = alleleStrings.get(0).length();
-
-        boolean first = true;
-        for ( final String alleleString : alleleStrings ) {
-            alleles.add(Allele.create(alleleString, first));
-            first = false;
-        }
-      return new VariantContextBuilder(name, contig, start, start+length-1, alleles).make();
-    }
-
-    /**
-     * Splits the alleles for the provided variant context into its primitive parts.
-     * Requires that the input VC be bi-allelic, so calling methods should first call splitVariantContextToBiallelics() if needed.
-     * Currently works only for MNPs.
-     *
-     * @param vc  the non-null VC to split
-     * @return a non-empty list of VCs split into primitive parts or the original VC otherwise
-     */
-    public static List<VariantContext> splitIntoPrimitiveAlleles(final VariantContext vc) {
-        if ( vc == null )
-            throw new IllegalArgumentException("Trying to break a null Variant Context into primitive parts");
-
-        if ( !vc.isBiallelic() )
-            throw new IllegalArgumentException("Trying to break a multi-allelic Variant Context into primitive parts");
-
-        // currently only works for MNPs
-        if ( !vc.isMNP() )
-            return Arrays.asList(vc);
-
-        final byte[] ref = vc.getReference().getBases();
-        final byte[] alt = vc.getAlternateAllele(0).getBases();
-
-        if ( ref.length != alt.length )
-            throw new IllegalStateException("ref and alt alleles for MNP have different lengths");
-
-        final List<VariantContext> result = new ArrayList<>(ref.length);
-
-        for ( int i = 0; i < ref.length; i++ ) {
-
-            // if the ref and alt bases are different at a given position, create a new SNP record (otherwise do nothing)
-            if ( ref[i] != alt[i] ) {
-
-                // create the ref and alt SNP alleles
-                final Allele newRefAllele = Allele.create(ref[i], true);
-                final Allele newAltAllele = Allele.create(alt[i], false);
-
-                // create a new VariantContext with the new SNP alleles
-                final VariantContextBuilder newVC = new VariantContextBuilder(vc).start(vc.getStart() + i).stop(vc.getStart() + i).alleles(Arrays.asList(newRefAllele, newAltAllele));
-
-                // create new genotypes with updated alleles
-                final Map<Allele, Allele> alleleMap = new HashMap<>();
-                alleleMap.put(vc.getReference(), newRefAllele);
-                alleleMap.put(vc.getAlternateAllele(0), newAltAllele);
-                final GenotypesContext newGenotypes = updateGenotypesWithMappedAlleles(vc.getGenotypes(), new AlleleMapper(alleleMap));
-
-                result.add(newVC.genotypes(newGenotypes).make());
-            }
-        }
-
-        if ( result.isEmpty() )
-            result.add(vc);
-
-        return result;
-    }
-
-    /**
-     * Are vc1 and 2 equal including their position and alleles?
-     * @param vc1 non-null VariantContext
-     * @param vc2 non-null VariantContext
-     * @return true if vc1 and vc2 are equal, false otherwise
-     */
-    public static boolean equalSites(final VariantContext vc1, final VariantContext vc2) {
-        if ( vc1 == null ) throw new IllegalArgumentException("vc1 cannot be null");
-        if ( vc2 == null ) throw new IllegalArgumentException("vc2 cannot be null");
-
-        if ( vc1.getStart() != vc2.getStart() ) return false;
-        if ( vc1.getEnd() != vc2.getEnd() ) return false;
-        if ( ! vc1.getChr().equals(vc2.getChr())) return false;
-        if ( ! vc1.getAlleles().equals(vc2.getAlleles()) ) return false;
-        return true;
-    }
-
-    /**
-     * Returns the absolute 0-based index of an allele.
-     *
-     * <p/>
-     * If the allele is equal to the reference, the result is 0, if it equal to the first alternative the result is 1
-     * and so forth.
-     * <p/>
-     * Therefore if you want the 0-based index within the alternative alleles you need to do the following:
-     *
-     * <p/>
-     * You can indicate whether the Java object reference comparator {@code ==} can be safelly used by setting {@code useEquals} to {@code false}.
-     *
-     * @param vc the target variant context.
-     * @param allele the target allele.
-     * @param ignoreRefState whether the reference states of the allele is important at all. Has no effect if {@code useEquals} is {@code false}.
-     * @param considerRefAllele whether the reference allele should be considered. You should set it to {@code false} if you are only interested in alternative alleles.
-     * @param useEquals whether equal method should be used in the search: {@link Allele#equals(Allele,boolean)}.
-     *
-     * @throws IllegalArgumentException if {@code allele} is {@code null}.
-     * @return {@code -1} if there is no such allele that satify those criteria, a value between 0 and {@link VariantContext#getNAlleles()} {@code -1} otherwise.
-     */
-    public static int indexOfAllele(final VariantContext vc, final Allele allele, final boolean ignoreRefState, final boolean considerRefAllele, final boolean useEquals) {
-        if (allele == null) throw new IllegalArgumentException();
-        return useEquals ? indexOfEqualAllele(vc,allele,ignoreRefState,considerRefAllele) : indexOfSameAllele(vc,allele,considerRefAllele);
-    }
-
-    /**
-     * Returns the relative 0-based index of an alternative allele.
-     * <p/>
-     * The the query allele is the same as the first alternative allele, the result is 0,
-     * if it is equal to the second 1 and so forth.
-     *
-     *
-     * <p/>
-     * Notice that the ref-status of the query {@code allele} is ignored.
-     *
-     * @param vc the target variant context.
-     * @param allele the query allele.
-     * @param useEquals  whether equal method should be used in the search: {@link Allele#equals(Allele,boolean)}.
-     *
-     * @throws IllegalArgumentException if {@code allele} is {@code null}.
-     *
-     * @return {@code -1} if there is no such allele that satify those criteria, a value between 0 and the number
-     *  of alternative alleles - 1.
-     */
-    public static int indexOfAltAllele(final VariantContext vc, final Allele allele, final boolean useEquals) {
-        final int absoluteIndex = indexOfAllele(vc,allele,true,false,useEquals);
-        return absoluteIndex == -1 ? -1 : absoluteIndex - 1;
-    }
-
-    // Impements index search using equals.
-    private static int indexOfEqualAllele(final VariantContext vc, final Allele allele, final boolean ignoreRefState,
-                                          final boolean considerRefAllele) {
-        int i = 0;
-        for (final Allele a : vc.getAlleles())
-            if (a.equals(allele,ignoreRefState))
-                return i == 0 ? (considerRefAllele ? 0 : -1) : i;
-            else
-                i++;
-        return -1;
-    }
-
-    // Implements index search using ==.
-    private static int indexOfSameAllele(final VariantContext vc, final Allele allele, final boolean considerRefAllele) {
-        int i = 0;
-
-        for (final Allele a : vc.getAlleles())
-            if (a == allele)
-                return i == 0 ? (considerRefAllele ? 0 : -1) : i;
-            else
-                i++;
-
-        return -1;
-    }
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/variant/HomoSapiensConstants.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/variant/HomoSapiensConstants.java
deleted file mode 100644
index 0b1543b..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/variant/HomoSapiensConstants.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.variant;
-
-/**
- * <i>Homo sapiens</i> genome constants.
- *
- * <p>NOTE: reference to these constants is an indication that your code is (human) species assumption dependant.</p>
- *
- * @author Valentin Ruano-Rubio <valentin at broadinstitute.org>
- */
-public class HomoSapiensConstants {
-
-    /**
-     * Standard heterozygous rate for SNP variation.
-     */
-    public static final double SNP_HETEROZYGOSITY = 1e-3;
-
-    /**
-     * Standard heterozygous rate for INDEL variation.
-     */
-    public static final double INDEL_HETEROZYGOSITY = 1.0/8000;
-
-    /**
-     * Standard ploidy for autosomal chromosomes.
-     */
-    public static final int DEFAULT_PLOIDY = 2;
-}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/wiggle/WiggleHeader.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/wiggle/WiggleHeader.java
deleted file mode 100644
index 85681c4..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/wiggle/WiggleHeader.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.wiggle;
-
-/**
- * A class for defining the header values for a wiggle graph file (see UCSC). The optional fields are:
- * name, description, visibility, color, altColor, priority, autoscale, alwaysZero, gridDefault,
- * maxHeightPixels,graphType,viewLimits,yLineMark,yLineOnOff,windowingFunction,smoothingWindow
- *
- * For now only support name, description
- *
- * @Author chartl
- * @Date Jul 21, 2010
- */
-public class WiggleHeader {
-    static String type = "wiggle_0";
-    // defines the type of the track (for IGV or UCSC), wiggle_0 is the 'only' type of wiggle
-    private String name;
-    // a label for the track
-    private String description;
-    // a description of what the track is
-
-    public WiggleHeader(String name, String description) {
-        this.name = name;
-        this.description = description;
-    }
-
-    public String toString() {
-        return String.format("track type=%s name=\"%s\" description=\"%s\"",type,name,description);
-    }
-
-}
-
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/wiggle/WiggleWriter.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/wiggle/WiggleWriter.java
deleted file mode 100644
index af9268b..0000000
--- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/wiggle/WiggleWriter.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.wiggle;
-
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import java.io.*;
-
-/**
- * Manages the output of wiggle files. Due to the wiggle spec (each wiggle file must be one chromosome), this writer
- * will throw exceptions (or output multiple files?)
- *
- * todo -- currently no support for fixed step (special case of variable step)
- * todo -- currently no support for span, start, or step
- *
- * @Author chartl
- * @Date Jul 21, 2010
- */
-public class WiggleWriter {
-
-    enum StepType {
-        fixed("fixedStep"),variable("variableStep");
-
-        String repr;
-
-        StepType(String repr) {
-            this.repr = repr;
-        }
-
-        public String toString() {
-            return repr;
-        }
-    }
-
-    private WiggleHeader wHeader = null;
-    // the header that we need to write prior to the file; and on future files (if multiple outputs ??)
-    private BufferedWriter wWriter = null;
-    // the file to which we are writing
-    private GenomeLoc firstLoc = null;
-    // the first genome loc the writer saw; need to cache this to compare contigs to preserve spec
-    private StepType type = StepType.variable;
-    // the type of step for the wiggle file, todo -- allow this to change
-
-    private String myFile = "unknown";
-
-    public WiggleWriter(File outputFile) {
-        myFile = outputFile.getAbsolutePath();
-        FileOutputStream outputStream;
-        try {
-            outputStream = new FileOutputStream(outputFile);
-        } catch ( FileNotFoundException e ) {
-            throw new UserException.CouldNotCreateOutputFile(outputFile, "Unable to create a wiggle file ", e);
-        }
-
-        wWriter = new BufferedWriter(new OutputStreamWriter(outputStream));
-    }
-
-    public WiggleWriter(OutputStream out) {
-       wWriter = new BufferedWriter(new OutputStreamWriter(out)); 
-    }
-
-    public void writeHeader(WiggleHeader header) {
-        wHeader = header;
-        write(wWriter,header.toString());
-    }
-
-    public void writeData(GenomeLoc loc, Object dataPoint) {
-        if ( this.firstLoc == null ) {
-            firstLoc = loc;
-            write(wWriter,String.format("%n"));
-            write(wWriter,String.format("%s\tchrom=%s",type.toString(),firstLoc.getContig()));
-            write(wWriter,String.format("%n"));
-            write(wWriter,String.format("%d\t%s",loc.getStart(),dataPoint.toString()));
-        } else if ( loc.compareContigs(firstLoc) == 0 ) {
-            write(wWriter,String.format("%n"));
-            write(wWriter,String.format("%d\t%s",loc.getStart(),dataPoint.toString()));
-        } else {
-            // todo -- maybe allow this to open a new file for the new chromosome?
-            throw new ReviewedGATKException("Attempting to write multiple contigs into wiggle file, first contig was "+firstLoc.getContig()+" most recent "+loc.getContig());
-        }
-    }
-
-    private void write(BufferedWriter w, String s) {
-        try {
-            w.write(s);
-            w.flush();
-            // flush required so writing to output stream will work
-        } catch (IOException e) {
-            throw new UserException.CouldNotCreateOutputFile(myFile, String.format("Error writing the wiggle line %s", s), e);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/main/resources/org/broadinstitute/gatk/tools/walkers/variantrecalibration/plot_Tranches.R b/public/gatk-tools-public/src/main/resources/org/broadinstitute/gatk/tools/walkers/variantrecalibration/plot_Tranches.R
deleted file mode 100755
index d96add7..0000000
--- a/public/gatk-tools-public/src/main/resources/org/broadinstitute/gatk/tools/walkers/variantrecalibration/plot_Tranches.R
+++ /dev/null
@@ -1,93 +0,0 @@
-#!/bin/env Rscript
-
-library(tools)
-
-args <- commandArgs(TRUE)
-verbose = TRUE
-
-tranchesFile = args[1]
-targetTITV = as.numeric(args[2])
-targetSensitivity = as.numeric(args[3])
-suppressLegend = ! is.na(args[4])
-
-# -----------------------------------------------------------------------------------------------
-# Useful general routines
-# -----------------------------------------------------------------------------------------------
-
-MIN_FP_RATE = 0.001 # 1 / 1000 is min error rate 
-
-titvFPEst <- function(titvExpected, titvObserved) { 
-    max(min(1 - (titvObserved - 0.5) / (titvExpected - 0.5), 1), MIN_FP_RATE) 
-}
-
-titvFPEstV <- function(titvExpected, titvs) {
-    sapply(titvs, function(x) titvFPEst(titvExpected, x))
-}
-
-nTPFP <- function(nVariants, FDR) {
-    return(list(TP = nVariants * (1 - FDR/100), FP = nVariants * (FDR / 100)))
-}
-
-leftShift <- function(x, leftValue = 0) {
-    r = rep(leftValue, length(x))
-    for ( i in 1:(length(x)-1) ) {
-        #print(list(i=i))
-        r[i] = x[i+1]
-    }
-    r
-}
-
-# -----------------------------------------------------------------------------------------------
-# Tranches plot
-# -----------------------------------------------------------------------------------------------
-data2 = read.table(tranchesFile,sep=",",head=T)
-data2 = data2[order(data2$novelTiTv, decreasing=F),]
-#data2 = data2[order(data2$FDRtranche, decreasing=T),]
-cols = c("cornflowerblue", "cornflowerblue", "darkorange", "darkorange")
-density=c(20, -1, -1, 20)
-outfile = paste(tranchesFile, ".pdf", sep="")
-pdf(outfile, height=5, width=8)
-par(mar = c(5, 5, 4, 2) + 0.1)
-novelTiTv = c(data2$novelTITV,data2$novelTiTv)
-alpha = 1 - titvFPEstV(targetTITV, novelTiTv)
-#print(alpha)
-
-numGood = round(alpha * data2$numNovel);
-
-#numGood = round(data2$numNovel * (1-data2$targetTruthSensitivity/100))
-numBad = data2$numNovel - numGood;
-
-numPrevGood = leftShift(numGood, 0)
-numNewGood = numGood - numPrevGood
-numPrevBad = leftShift(numBad, 0)
-numNewBad = numBad - numPrevBad
-
-d=matrix(c(numPrevGood,numNewGood, numNewBad, numPrevBad),4,byrow=TRUE)
-#print(d)
-barplot(d/1000,horiz=TRUE,col=cols,space=0.2,xlab="Number of Novel Variants (1000s)", density=density, cex.axis=1.25, cex.lab=1.25) # , xlim=c(250000,350000))
-#abline(v= d[2,dim(d)[2]], lty=2)
-#abline(v= d[1,3], lty=2)
-if ( ! suppressLegend ) 
-    legend(3, length(data2$targetTruthSensitivity)/3 +1, c('Cumulative TPs','Tranch-specific TPs', 'Tranch-specific FPs', 'Cumulative FPs' ), fill=cols, density=density, bg='white', cex=1.25)
-
-mtext("Ti/Tv",2,line=2.25,at=length(data2$targetTruthSensitivity)*1.2,las=1, cex=1)
-mtext("truth",2,line=0,at=length(data2$targetTruthSensitivity)*1.2,las=1, cex=1)
-axis(2,line=-1,at=0.7+(0:(length(data2$targetTruthSensitivity)-1))*1.2,tick=FALSE,labels=data2$targetTruthSensitivity, las=1, cex.axis=1.0)
-axis(2,line=1,at=0.7+(0:(length(data2$targetTruthSensitivity)-1))*1.2,tick=FALSE,labels=round(novelTiTv,3), las=1, cex.axis=1.0)
-
-# plot sensitivity vs. specificity
-sensitivity = data2$truthSensitivity
-if ( ! is.null(sensitivity) ) {
-    #specificity = titvFPEstV(targetTITV, novelTiTv)
-    specificity = novelTiTv
-    plot(sensitivity, specificity, type="b", col="cornflowerblue", xlab="Tranche truth sensitivity", ylab="Specificity (Novel Ti/Tv ratio)")
-    abline(h=targetTITV, lty=2)
-    abline(v=targetSensitivity, lty=2)
-    #text(max(sensitivity), targetTITV-0.05, labels="Expected novel Ti/Tv", pos=2)
-}
-
-dev.off()
-
-if (exists('compactPDF')) {
-  compactPDF(outfile)
-}
diff --git a/public/gatk-tools-public/src/test/java/htsjdk/samtools/GATKBAMFileSpanUnitTest.java b/public/gatk-tools-public/src/test/java/htsjdk/samtools/GATKBAMFileSpanUnitTest.java
deleted file mode 100644
index aaa20c0..0000000
--- a/public/gatk-tools-public/src/test/java/htsjdk/samtools/GATKBAMFileSpanUnitTest.java
+++ /dev/null
@@ -1,254 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package htsjdk.samtools;
-
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-/**
- * Tests of functionality of union, intersection operators.
- */
-public class GATKBAMFileSpanUnitTest {
-    @Test
-    public void testUnionOfEmptyFileSpans() {
-        GATKBAMFileSpan empty1 = new GATKBAMFileSpan();
-        GATKBAMFileSpan empty2 = new GATKBAMFileSpan();
-        GATKBAMFileSpan union = empty1.union(empty2);
-        Assert.assertEquals(union.getGATKChunks().size(),0,"Elements inserted in union of two empty sets");
-    }
-
-    @Test
-    public void testUnionOfNonOverlappingFileSpans() {
-        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk(0,65535));
-        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(1<<16,(1<<16)|65535));
-        GATKBAMFileSpan union = regionOne.union(regionTwo);
-        Assert.assertEquals(union.getGATKChunks().size(),2,"Discontiguous elements were merged");
-        Assert.assertEquals(union.getGATKChunks().get(0),regionOne.getGATKChunks().get(0),"Wrong chunk was first in list");
-        Assert.assertEquals(union.getGATKChunks().get(1),regionTwo.getGATKChunks().get(0),"Wrong chunk was second in list");
-    }
-
-    @Test
-    public void testUnionOfContiguousFileSpans() {
-        // Region 1 ends at position adjacent to Region 2 start:
-        // |---1----|---2----|
-
-        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk(0,1<<16));
-        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(1<<16,(1<<16)|65535));
-        GATKBAMFileSpan union = regionOne.union(regionTwo);
-        Assert.assertEquals(union.getGATKChunks().size(),1,"Elements to be merged were not.");
-        Assert.assertEquals(union.getGATKChunks().get(0),new GATKChunk(0,(1<<16)|65535));
-    }
-
-    @Test
-    public void testUnionOfFileSpansFirstRegionEndsWithinSecondRegion() {
-        // Region 1 ends within Region 2:
-        //        |---2----|
-        // |---1----|
-
-        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk(0,(1<<16)|32767));
-        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(1<<16,(1<<16)|65535));
-        GATKBAMFileSpan union = regionOne.union(regionTwo);
-        Assert.assertEquals(union.getGATKChunks().size(),1,"Elements to be merged were not.");
-        Assert.assertEquals(union.getGATKChunks().get(0),new GATKChunk(0,(1<<16)|65535));
-    }
-
-    @Test
-    public void testUnionOfFileSpansFirstRegionEndsAtSecondRegionEnd() {
-        // Region 1 ends at Region 2 end:
-        //        |---2----|
-        // |---1-----------|
-
-        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk(0,(1<<16)|65535));
-        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(1<<16,(1<<16)|65535));
-        GATKBAMFileSpan union = regionOne.union(regionTwo);
-        Assert.assertEquals(union.getGATKChunks().size(),1,"Elements to be merged were not.");
-        Assert.assertEquals(union.getGATKChunks().get(0),new GATKChunk(0,(1<<16)|65535));
-    }
-
-    @Test
-    public void testUnionOfFileSpansFirstRegionEndsAfterSecondRegionEnd() {
-        // Region 1 ends after Region 2 end:
-        //        |---2----|
-        // |---1---------------|
-
-        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk(0,(1<<16)|65535));
-        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(1<<16,(1<<16)|32767));
-        GATKBAMFileSpan union = regionOne.union(regionTwo);
-        Assert.assertEquals(union.getGATKChunks().size(),1,"Elements to be merged were not.");
-        Assert.assertEquals(union.getGATKChunks().get(0),new GATKChunk(0,(1<<16)|65535));
-    }
-
-    @Test
-    public void testUnionOfFileSpansFirstRegionStartsAtSecondRegionStart() {
-        // Region 1 starts at Region 2 start, but ends before Region 2:
-        // |---2--------|
-        // |---1----|
-
-        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk(1<<16,(1<<16)|32767));
-        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(1<<16,(1<<16)|65535));
-        GATKBAMFileSpan union = regionOne.union(regionTwo);
-        Assert.assertEquals(union.getGATKChunks().size(),1,"Elements to be merged were not.");
-        Assert.assertEquals(union.getGATKChunks().get(0),new GATKChunk(1<<16,(1<<16)|65535));
-    }
-
-    @Test
-    public void testUnionOfFileSpansFirstRegionEqualToSecondRegion() {
-        // Region 1 and Region 2 represent the same region:
-        // |---2----|
-        // |---1----|
-
-        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk(1<<16,(1<<16)|65535));
-        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(1<<16,(1<<16)|65535));
-        GATKBAMFileSpan union = regionOne.union(regionTwo);
-        Assert.assertEquals(union.getGATKChunks().size(),1,"Elements to be merged were not.");
-        Assert.assertEquals(union.getGATKChunks().get(0),new GATKChunk(1<<16,(1<<16)|65535));
-    }
-
-    @Test
-    public void testUnionOfStringOfFileSpans() {
-        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk[] { new GATKChunk(0,1<<16), new GATKChunk(2<<16,3<<16) });
-        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(1<<16,2<<16));
-        GATKBAMFileSpan union = regionOne.union(regionTwo);
-        Assert.assertEquals(union.getGATKChunks().size(),1,"Elements to be merged were not.");
-        Assert.assertEquals(union.getGATKChunks().get(0),new GATKChunk(0,3<<16));
-    }
-
-    @Test
-    public void testUnionAllFileSpansAdded() {
-        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk[] { new GATKChunk(0,1<<16), new GATKChunk(2<<16,3<<16), new GATKChunk(20<<16,21<<16) });
-        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(1<<16,2<<16));
-        GATKBAMFileSpan union = regionOne.union(regionTwo);
-        Assert.assertEquals(union.getGATKChunks().size(),2,"Elements to be merged were not.");
-        Assert.assertEquals(union.getGATKChunks().get(0),new GATKChunk(0,3<<16));
-        Assert.assertEquals(union.getGATKChunks().get(1),new GATKChunk(20<<16,21<<16));
-    }
-
-    @Test
-    public void testIntersectionOfEmptyFileSpans() {
-        GATKBAMFileSpan empty1 = new GATKBAMFileSpan();
-        GATKBAMFileSpan empty2 = new GATKBAMFileSpan();
-        GATKBAMFileSpan intersection = empty1.intersection(empty2);
-        Assert.assertEquals(intersection.getGATKChunks().size(),0,"Elements inserted in intersection of two empty sets");
-    }
-
-    @Test
-    public void testIntersectionOfNonOverlappingFileSpans() {
-        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk(0,1<<16));
-        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(1<<16,2<<16));
-        GATKBAMFileSpan intersection = regionOne.intersection(regionTwo);
-        Assert.assertEquals(intersection.getGATKChunks().size(),0,"Elements inserted in intersection of two non-intersecting filespans");
-    }
-
-    @Test
-    public void testIntersectionOfSmallOverlapInFileSpans() {
-        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk(0,1<<16));
-        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(65535,2<<16));
-        GATKBAMFileSpan intersection = regionOne.intersection(regionTwo);
-        Assert.assertEquals(intersection.getGATKChunks().size(),1,"No intersection found between two partially overlapping filespans");
-        Assert.assertEquals(intersection.getGATKChunks().get(0),new GATKChunk(65535,1<<16),"Determined intersection is incorrect.");
-    }
-
-    @Test
-    public void testIntersectionOfStrictSubset() {
-        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk(0,1<<16));
-        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(0,2<<16));
-        GATKBAMFileSpan intersection = regionOne.intersection(regionTwo);
-        Assert.assertEquals(intersection.getGATKChunks().size(),1,"No intersection found between two partially overlapping filespans");
-        Assert.assertEquals(intersection.getGATKChunks().get(0),new GATKChunk(0<<16,1<<16),"Determined intersection is incorrect.");
-
-        // Make sure intersection is symmetric
-        intersection = regionTwo.intersection(regionOne);
-        Assert.assertEquals(intersection.getGATKChunks().size(),1,"No intersection found between two partially overlapping filespans");
-        Assert.assertEquals(intersection.getGATKChunks().get(0),new GATKChunk(0<<16,1<<16),"Determined intersection is incorrect.");
-    }
-
-    @Test
-    public void testIntersectionOfPartialOverlap() {
-        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk(0,2<<16));
-        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(0<<16|32768,1<<16|32768));
-        GATKBAMFileSpan intersection = regionOne.intersection(regionTwo);
-        Assert.assertEquals(intersection.getGATKChunks().size(),1,"No intersection found between two partially overlapping filespans");
-        Assert.assertEquals(intersection.getGATKChunks().get(0),new GATKChunk(0<<16|32768,1<<16|32768),"Determined intersection is incorrect.");
-    }
-
-    @Test
-    public void testIntersectionOfChunkLists() {
-        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk(0,5<<16));
-        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk[] { new GATKChunk(1<<16,2<<16), new GATKChunk(3<<16,4<<16) });
-        GATKBAMFileSpan intersection = regionOne.intersection(regionTwo);
-        Assert.assertEquals(intersection.getGATKChunks().size(),2,"Wrong number of intersections found.");
-        Assert.assertEquals(intersection.getGATKChunks().get(0),new GATKChunk(1<<16,2<<16),"Determined intersection is incorrect.");
-        Assert.assertEquals(intersection.getGATKChunks().get(1),new GATKChunk(3<<16,4<<16),"Determined intersection is incorrect.");
-
-        // Make sure intersection is symmetric
-        intersection = regionTwo.intersection(regionOne);
-        Assert.assertEquals(intersection.getGATKChunks().size(),2,"Wrong number of intersections found.");
-        Assert.assertEquals(intersection.getGATKChunks().get(0),new GATKChunk(1<<16,2<<16),"Determined intersection is incorrect.");
-        Assert.assertEquals(intersection.getGATKChunks().get(1),new GATKChunk(3<<16,4<<16),"Determined intersection is incorrect.");
-    }
-
-    @Test
-    public void testSubtractionOfEmptyChunkLists() {
-        GATKBAMFileSpan regionOne = new GATKBAMFileSpan();
-        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan();
-        GATKBAMFileSpan subtraction = regionOne.minus(regionTwo);
-        Assert.assertEquals(subtraction.getGATKChunks().size(),0,"Elements inserted in subtraction of two empty sets");
-    }
-
-    @Test
-    public void testSingleIntervalSubtractedAway() {
-        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk(0,1<<16));
-        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(0,1<<16));
-        GATKBAMFileSpan subtraction = regionOne.minus(regionTwo);
-        Assert.assertEquals(subtraction.getGATKChunks().size(),0,"Elements inserted in complete subtraction of region");
-    }
-
-    @Test
-    public void testMultipleIntervalsSubtractedAway() {
-        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk[] { new GATKChunk(0,1<<16), new GATKChunk(2<<16,3<<16) });
-        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk[] { new GATKChunk(0,1<<16), new GATKChunk(2<<16,3<<16) });
-        GATKBAMFileSpan subtraction = regionOne.minus(regionTwo);
-        Assert.assertEquals(subtraction.getGATKChunks().size(),0,"Elements inserted in complete subtraction of region");
-    }
-
-    @Test
-    public void testSubtractionOfStrictSubset() {
-        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk(0,2<<16));
-        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(0,1<<16));
-        GATKBAMFileSpan subtraction = regionOne.minus(regionTwo);
-        Assert.assertEquals(subtraction.getGATKChunks().size(),1,"Incorrect size in strict subset subtraction of region");
-        Assert.assertEquals(subtraction.getGATKChunks().get(0),new GATKChunk(1<<16,2<<16),"Determined subtraction is incorrect.");
-    }
-
-    @Test
-    public void testSubtractionOfPartialOverlap() {
-        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk(0,2<<16));
-        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(1<<16,3<<16));
-        GATKBAMFileSpan subtraction = regionOne.minus(regionTwo);
-        Assert.assertEquals(subtraction.getGATKChunks().size(),1,"Incorrect size in partial subset subtraction of region");
-        Assert.assertEquals(subtraction.getGATKChunks().get(0),new GATKChunk(0<<16,1<<16),"Determined subtraction is incorrect.");
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/htsjdk/samtools/GATKChunkUnitTest.java b/public/gatk-tools-public/src/test/java/htsjdk/samtools/GATKChunkUnitTest.java
deleted file mode 100644
index 2b08fc4..0000000
--- a/public/gatk-tools-public/src/test/java/htsjdk/samtools/GATKChunkUnitTest.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package htsjdk.samtools;
-
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-/**
- * Test basic functionality of the GATK chunk, giving informative size capabilities, etc.
- */
-public class GATKChunkUnitTest {
-    private static final int FULL_BLOCK_COMPRESSED_SIZE = 25559;
-    private static final int FULL_BLOCK_UNCOMPRESSED_SIZE = 65536;
-    private static final int HALF_BLOCK_UNCOMPRESSED_SIZE = FULL_BLOCK_UNCOMPRESSED_SIZE/2;
-
-    @Test
-    public void testSizeOfEmptyChunk() {
-        GATKChunk chunk = new GATKChunk(0,0);
-        Assert.assertEquals(chunk.size(),0,"Empty chunk's size is not equal to 0.");
-    }
-
-    @Test
-    public void testSizeOfChunkWithinSingleBlock() {
-        GATKChunk chunk = new GATKChunk(0,FULL_BLOCK_UNCOMPRESSED_SIZE-1);
-        Assert.assertEquals(chunk.size(),FULL_BLOCK_UNCOMPRESSED_SIZE-1,"Chunk spanning limits of block is returning wrong size.");
-
-        chunk = new GATKChunk(0,HALF_BLOCK_UNCOMPRESSED_SIZE);
-        Assert.assertEquals(chunk.size(),HALF_BLOCK_UNCOMPRESSED_SIZE,"Chunk spanning 1/2 block is returning the wrong size.");
-    }
-
-    @Test
-    public void testSizeOfSingleBlock() {
-        GATKChunk chunk = new GATKChunk(0,FULL_BLOCK_COMPRESSED_SIZE<<16);
-        Assert.assertEquals(chunk.size(),FULL_BLOCK_UNCOMPRESSED_SIZE,"Chunk spanning complete block returns incorrect size.");
-    }
-
-    @Test
-    public void testSizeOfBlockAndAHalf() {
-        GATKChunk chunk = new GATKChunk(0,(FULL_BLOCK_COMPRESSED_SIZE<<16)+HALF_BLOCK_UNCOMPRESSED_SIZE);
-        Assert.assertEquals(chunk.size(),FULL_BLOCK_UNCOMPRESSED_SIZE+HALF_BLOCK_UNCOMPRESSED_SIZE,"Chunk spanning 1.5 blocks returns incorrect size.");
-    }
-
-    @Test
-    public void testSizeOfHalfBlock() {
-        GATKChunk chunk = new GATKChunk(HALF_BLOCK_UNCOMPRESSED_SIZE,FULL_BLOCK_COMPRESSED_SIZE<<16);
-        Assert.assertEquals(chunk.size(),HALF_BLOCK_UNCOMPRESSED_SIZE,"Chunk spanning 0.5 blocks returns incorrect size.");
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/CommandLineGATKUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/CommandLineGATKUnitTest.java
deleted file mode 100644
index dc3e996..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/CommandLineGATKUnitTest.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine;
-
-import htsjdk.samtools.SAMFileReader;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.io.File;
-
-/**
- * @author Eric Banks
- * @since 7/18/12
- */
-public class CommandLineGATKUnitTest extends BaseTest {
-
-    @Test(enabled = true)
-    public void testSamTextFileError1() {
-        final File samFile = new File(publicTestDir + "testfile.sam");
-        final File indexFile = new File(publicTestDir + "HiSeq.1mb.1RG.bai");
-        try {
-            final SAMFileReader reader = new SAMFileReader(samFile, indexFile, false);
-
-            // we shouldn't get here
-            Assert.fail("We should have exceptioned out when trying to create a reader with an index for a textual SAM file");
-        } catch (RuntimeException e) {
-            Assert.assertTrue(e.getMessage().indexOf(CommandLineGATK.PICARD_TEXT_SAM_FILE_ERROR_1) != -1);
-        }
-    }
-
-    @Test(enabled = true)
-    public void testSamTextFileError2() {
-        File samFile = new File(publicTestDir + "testfile.sam");
-        try {
-            final SAMFileReader reader = new SAMFileReader(samFile);
-            reader.getFilePointerSpanningReads();
-
-            // we shouldn't get here
-            Assert.fail("We should have exceptioned out when trying to call getFilePointerSpanningReads() for a textual SAM file");
-        } catch (RuntimeException e) {
-            Assert.assertTrue(e.getMessage().indexOf(CommandLineGATK.PICARD_TEXT_SAM_FILE_ERROR_2) != -1);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/EngineFeaturesIntegrationTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/EngineFeaturesIntegrationTest.java
deleted file mode 100644
index 6596cf3..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/EngineFeaturesIntegrationTest.java
+++ /dev/null
@@ -1,736 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine;
-
-import htsjdk.samtools.*;
-import htsjdk.samtools.util.CloseableIterator;
-import htsjdk.tribble.readers.LineIterator;
-import org.broadinstitute.gatk.engine.walkers.WalkerTest;
-import org.broadinstitute.gatk.utils.commandline.*;
-import org.broadinstitute.gatk.engine.arguments.StandardVariantContextInputArgumentCollection;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.datasources.reference.ReferenceDataSource;
-import org.broadinstitute.gatk.engine.filters.MappingQualityUnavailableFilter;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.engine.refdata.tracks.RMDTrack;
-import org.broadinstitute.gatk.engine.refdata.tracks.RMDTrackBuilder;
-import org.broadinstitute.gatk.engine.refdata.utils.GATKFeature;
-import org.broadinstitute.gatk.engine.walkers.ReadFilters;
-import org.broadinstitute.gatk.engine.walkers.ReadWalker;
-import org.broadinstitute.gatk.engine.walkers.RodWalker;
-import org.broadinstitute.gatk.tools.walkers.qc.ErrorThrowing;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.collections.Pair;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.broadinstitute.gatk.utils.sam.GATKSamRecordFactory;
-import org.broadinstitute.gatk.utils.variant.GATKVCFUtils;
-import htsjdk.variant.variantcontext.VariantContext;
-import htsjdk.variant.variantcontext.writer.VariantContextWriter;
-import htsjdk.variant.vcf.VCFCodec;
-import htsjdk.variant.vcf.VCFHeader;
-import htsjdk.variant.vcf.VCFHeaderLine;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.*;
-import java.util.*;
-
-/**
- *
- */
-public class EngineFeaturesIntegrationTest extends WalkerTest {
-    private void testBadRODBindingInput(String type, String name, Class c) {
-        WalkerTestSpec spec = new WalkerTestSpec("-T SelectVariants -L 1:1 --variant:variant," + type + " "
-                + b37dbSNP132 + " -R " + b37KGReference + " -o %s",
-                1, c);
-        executeTest(name, spec);
-    }
-
-    @Test() private void testBadRODBindingInputType1() {
-        testBadRODBindingInput("beagle", "BEAGLE input to VCF expecting walker", UserException.BadArgumentValue.class);
-    }
-
-    @Test() private void testBadRODBindingInputType3() {
-        testBadRODBindingInput("bed", "Bed input to VCF expecting walker", UserException.BadArgumentValue.class);
-    }
-
-    @Test() private void testBadRODBindingInputTypeUnknownType() {
-        testBadRODBindingInput("bedXXX", "Unknown input to VCF expecting walker", UserException.UnknownTribbleType.class);
-    }
-
-    private void testMissingFile(String name, String missingBinding) {
-        WalkerTestSpec spec = new WalkerTestSpec(missingBinding + " -R " + b37KGReference + " -o %s",
-                1, UserException.CouldNotReadInputFile.class);
-        executeTest(name, spec);
-    }
-
-    @Test() private void testMissingBAMnt1() {
-        testMissingFile("missing BAM", "-T PrintReads -I missing.bam -nt 1");
-    }
-    @Test() private void testMissingBAMnt4() {
-        testMissingFile("missing BAM", "-T PrintReads -I missing.bam -nt 4");
-    }
-    @Test() private void testMissingVCF() {
-        testMissingFile("missing VCF", "-T SelectVariants -V missing.vcf");
-    }
-    @Test() private void testMissingInterval() {
-        testMissingFile("missing interval", "-T PrintReads -L missing.interval_list -I " + b37GoodBAM);
-    }
-
-
-    // --------------------------------------------------------------------------------
-    //
-    // Test that our exceptions are coming back as we expect
-    //
-    // --------------------------------------------------------------------------------
-
-    private class EngineErrorHandlingTestProvider extends TestDataProvider {
-        final Class expectedException;
-        final String args;
-        final int iterationsToTest;
-
-        public EngineErrorHandlingTestProvider(Class exceptedException, final String args) {
-            super(EngineErrorHandlingTestProvider.class);
-            this.expectedException = exceptedException;
-            this.args = args;
-            this.iterationsToTest = args.equals("") ? 1 : 10;
-            setName(String.format("Engine error handling: expected %s with args %s", exceptedException, args));
-        }
-    }
-
-    @DataProvider(name = "EngineErrorHandlingTestProvider")
-    public Object[][] makeEngineErrorHandlingTestProvider() {
-        for ( final ErrorThrowing.FailMethod failMethod : ErrorThrowing.FailMethod.values() ) {
-            if ( failMethod == ErrorThrowing.FailMethod.TREE_REDUCE )
-                continue; // cannot reliably throw errors in TREE_REDUCE
-
-            final String failArg = " -fail " + failMethod.name();
-            for ( final String args : Arrays.asList("", " -nt 2", " -nct 2") ) {
-                new EngineErrorHandlingTestProvider(NullPointerException.class, failArg + args);
-                new EngineErrorHandlingTestProvider(UserException.class, failArg + args);
-                new EngineErrorHandlingTestProvider(ReviewedGATKException.class, failArg + args);
-            }
-        }
-
-        return EngineErrorHandlingTestProvider.getTests(EngineErrorHandlingTestProvider.class);
-    }
-
-    //
-    // Loop over errors to throw, make sure they are the errors we get back from the engine, regardless of NT type
-    //
-    @Test(enabled = true, dataProvider = "EngineErrorHandlingTestProvider", timeOut = 60 * 1000 )
-    public void testEngineErrorHandlingTestProvider(final EngineErrorHandlingTestProvider cfg) {
-        for ( int i = 0; i < cfg.iterationsToTest; i++ ) {
-            final String root = "-T ErrorThrowing -R " + exampleFASTA;
-            final String args = root + cfg.args + " -E " + cfg.expectedException.getSimpleName();
-            WalkerTestSpec spec = new WalkerTestSpec(args, 0, cfg.expectedException);
-
-            executeTest(cfg.toString(), spec);
-        }
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // Test that read filters are being applied in the order we expect
-    //
-    // --------------------------------------------------------------------------------
-
-    @ReadFilters({MappingQualityUnavailableFilter.class})
-    public static class DummyReadWalkerWithMapqUnavailableFilter extends ReadWalker<Integer, Integer> {
-        @Output
-        PrintStream out;
-
-        @Override
-        public Integer map(ReferenceContext ref, GATKSAMRecord read, RefMetaDataTracker metaDataTracker) {
-            return 1;
-        }
-
-        @Override
-        public Integer reduceInit() {
-            return 0;
-        }
-
-        @Override
-        public Integer reduce(Integer value, Integer sum) {
-            return value + sum;
-        }
-
-        @Override
-        public void onTraversalDone(Integer result) {
-            out.println(result);
-        }
-    }
-
-    @Test(enabled = true)
-    public void testUserReadFilterAppliedBeforeWalker() {
-        WalkerTestSpec spec = new WalkerTestSpec("-R " + b37KGReference + " -I " + privateTestDir + "allMAPQ255.bam"
-                + " -T DummyReadWalkerWithMapqUnavailableFilter -o %s -L MT -rf ReassignMappingQuality",
-                1, Arrays.asList("ecf27a776cdfc771defab1c5d19de9ab"));
-        executeTest("testUserReadFilterAppliedBeforeWalker", spec);
-    }
-
-    @Test
-    public void testNegativeCompress() {
-        testBadCompressArgument(-1);
-    }
-
-    @Test
-    public void testTooBigCompress() {
-        testBadCompressArgument(100);
-    }
-
-    private void testBadCompressArgument(final int compress) {
-        WalkerTestSpec spec = new WalkerTestSpec("-T PrintReads -R " + b37KGReference + " -I " + privateTestDir + "NA12878.1_10mb_2_10mb.bam -o %s -compress " + compress,
-                1, UserException.class);
-        executeTest("badCompress " + compress, spec);
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // Test that the VCF version key is what we expect
-    //
-    // --------------------------------------------------------------------------------
-    @Test(enabled = true)
-    public void testGATKVersionInVCF() throws Exception {
-        WalkerTestSpec spec = new WalkerTestSpec("-T SelectVariants -R " + b37KGReference +
-                " -V " + privateTestDir + "NA12878.WGS.b37.chr20.firstMB.vcf"
-                + " -o %s -L 20:61098",
-                1, Arrays.asList(""));
-        spec.disableShadowBCF();
-        final File vcf = executeTest("testGATKVersionInVCF", spec).first.get(0);
-        final VCFCodec codec = new VCFCodec();
-        final VCFHeader header = (VCFHeader) codec.readActualHeader(codec.makeSourceFromStream(new FileInputStream(vcf)));
-        final VCFHeaderLine versionLine = header.getMetaDataLine(GATKVCFUtils.GATK_COMMAND_LINE_KEY);
-        Assert.assertNotNull(versionLine);
-        Assert.assertTrue(versionLine.toString().contains("SelectVariants"));
-    }
-
-    @Test(enabled = true)
-    public void testMultipleGATKVersionsInVCF() throws Exception {
-        WalkerTestSpec spec = new WalkerTestSpec("-T SelectVariants -R " + b37KGReference +
-                " -V " + privateTestDir + "gatkCommandLineInHeader.vcf"
-                + " -o %s",
-                1, Arrays.asList(""));
-        spec.disableShadowBCF();
-        final File vcf = executeTest("testMultipleGATKVersionsInVCF", spec).first.get(0);
-        final VCFCodec codec = new VCFCodec();
-        final VCFHeader header = (VCFHeader) codec.readActualHeader(codec.makeSourceFromStream(new FileInputStream(vcf)));
-
-        boolean foundHC = false;
-        boolean foundSV = false;
-        for ( final VCFHeaderLine line : header.getMetaDataInInputOrder() ) {
-            if ( line.getKey().equals(GATKVCFUtils.GATK_COMMAND_LINE_KEY) ) {
-                if ( line.toString().contains("HaplotypeCaller") ) {
-                    Assert.assertFalse(foundHC);
-                    foundHC = true;
-                }
-                if ( line.toString().contains("SelectVariants") ) {
-                    Assert.assertFalse(foundSV);
-                    foundSV = true;
-                }
-            }
-        }
-
-        Assert.assertTrue(foundHC, "Didn't find HaplotypeCaller command line header field");
-        Assert.assertTrue(foundSV, "Didn't find SelectVariants command line header field");
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // Test that defaultBaseQualities actually works
-    //
-    // --------------------------------------------------------------------------------
-
-    public WalkerTestSpec testDefaultBaseQualities(final Integer value, final String md5) {
-        return new WalkerTestSpec("-T PrintReads -R " + b37KGReference + " -I " + privateTestDir + "/baseQualitiesToFix.bam -o %s"
-                + (value != null ? " --defaultBaseQualities " + value : ""),
-                1, Arrays.asList(md5));
-    }
-
-    @Test()
-    public void testDefaultBaseQualities20() {
-        executeTest("testDefaultBaseQualities20", testDefaultBaseQualities(20, "7d254a9d0ec59c66ee3e137f56f4c78f"));
-    }
-
-    @Test()
-    public void testDefaultBaseQualities30() {
-        executeTest("testDefaultBaseQualities30", testDefaultBaseQualities(30, "0f50def6cbbbd8ccd4739e2b3998e503"));
-    }
-
-    @Test(expectedExceptions = Exception.class)
-    public void testDefaultBaseQualitiesNoneProvided() {
-        executeTest("testDefaultBaseQualitiesNoneProvided", testDefaultBaseQualities(null, ""));
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // Test engine-level cigar consolidation
-    //
-    // --------------------------------------------------------------------------------
-
-    @Test
-    public void testGATKEngineConsolidatesCigars() {
-        final WalkerTestSpec spec = new WalkerTestSpec(" -T PrintReads" +
-                                                       " -R " + b37KGReference +
-                                                       " -I " + privateTestDir + "zero_length_cigar_elements.bam" +
-                                                       " -o %s",
-                                                       1, Arrays.asList(""));  // No MD5s; we only want to check the cigar
-
-        final File outputBam = executeTest("testGATKEngineConsolidatesCigars", spec).first.get(0);
-        final SAMFileReader reader = new SAMFileReader(outputBam);
-        reader.setValidationStringency(ValidationStringency.SILENT);
-        reader.setSAMRecordFactory(new GATKSamRecordFactory());
-
-        final SAMRecord read = reader.iterator().next();
-        reader.close();
-
-        // Original cigar was 0M3M0M8M. Check that it's been consolidated after running through the GATK engine:
-        Assert.assertEquals(read.getCigarString(), "11M", "Cigar 0M3M0M8M not consolidated correctly by the engine");
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // Test on-the-fly sample renaming
-    //
-    // --------------------------------------------------------------------------------
-
-    // On-the-fly sample renaming test case: one single-sample bam with multiple read groups
-    @Test
-    public void testOnTheFlySampleRenamingWithSingleBamFile() throws IOException {
-        final File sampleRenameMapFile = createTestSampleRenameMapFile(
-                Arrays.asList(privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12878.HEADERONLY.bam  myNewSampleName"));
-
-        final WalkerTestSpec spec = new WalkerTestSpec(" -T PrintReads" +
-                                                       " -R " + b37KGReference +
-                                                       " -I " + privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12878.HEADERONLY.bam" +
-                                                       " --sample_rename_mapping_file " + sampleRenameMapFile.getAbsolutePath() +
-                                                       " -o %s",
-                                                       1, Arrays.asList(""));  // No MD5s; we only want to check the read groups
-
-        final File outputBam = executeTest("testOnTheFlySampleRenamingWithSingleBamFile", spec).first.get(0);
-        final SAMFileReader reader = new SAMFileReader(outputBam);
-
-        for ( final SAMReadGroupRecord readGroup : reader.getFileHeader().getReadGroups() ) {
-            Assert.assertEquals(readGroup.getSample(), "myNewSampleName", String.format("Sample for read group %s not renamed correctly", readGroup.getId()));
-        }
-
-        reader.close();
-    }
-
-    // On-the-fly sample renaming test case: three single-sample bams with multiple read groups per bam
-    @Test
-    public void testOnTheFlySampleRenamingWithMultipleBamFiles() throws IOException {
-        final File sampleRenameMapFile = createTestSampleRenameMapFile(
-                Arrays.asList(privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12878.HEADERONLY.bam  newSampleFor12878",
-                              privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12891.HEADERONLY.bam  newSampleFor12891",
-                              privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12892.HEADERONLY.bam  newSampleFor12892"));
-
-        final Map<String, String> readGroupToNewSampleMap = new HashMap<>();
-        for ( String inputBamID : Arrays.asList("12878", "12891", "12892") ) {
-            final File inputBam = new File(privateTestDir + String.format("CEUTrio.HiSeq.WGS.b37.NA%s.HEADERONLY.bam", inputBamID));
-            final SAMFileReader inputBamReader = new SAMFileReader(inputBam);
-            final String newSampleName = String.format("newSampleFor%s", inputBamID);
-            for ( final SAMReadGroupRecord readGroup : inputBamReader.getFileHeader().getReadGroups() ) {
-                readGroupToNewSampleMap.put(readGroup.getId(), newSampleName);
-            }
-            inputBamReader.close();
-        }
-
-        final WalkerTestSpec spec = new WalkerTestSpec(" -T PrintReads" +
-                                                       " -R " + b37KGReference +
-                                                       " -I " + privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12878.HEADERONLY.bam" +
-                                                       " -I " + privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12891.HEADERONLY.bam" +
-                                                       " -I " + privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12892.HEADERONLY.bam" +
-                                                       " --sample_rename_mapping_file " + sampleRenameMapFile.getAbsolutePath() +
-                                                       " -o %s",
-                                                       1, Arrays.asList(""));  // No MD5s; we only want to check the read groups
-
-        final File outputBam = executeTest("testOnTheFlySampleRenamingWithMultipleBamFiles", spec).first.get(0);
-        final SAMFileReader outputBamReader = new SAMFileReader(outputBam);
-
-        int totalReadGroupsSeen = 0;
-        for ( final SAMReadGroupRecord readGroup : outputBamReader.getFileHeader().getReadGroups() ) {
-            Assert.assertEquals(readGroup.getSample(), readGroupToNewSampleMap.get(readGroup.getId()),
-                                String.format("Wrong sample for read group %s after on-the-fly renaming", readGroup.getId()));
-            totalReadGroupsSeen++;
-        }
-
-        Assert.assertEquals(totalReadGroupsSeen, readGroupToNewSampleMap.size(), "Wrong number of read groups encountered in output bam file");
-
-        outputBamReader.close();
-    }
-
-    // On-the-fly sample renaming test case: three single-sample bams with multiple read groups per bam,
-    //                                       performing renaming in only SOME of the bams
-    @Test
-    public void testOnTheFlySampleRenamingWithMultipleBamFilesPartialRename() throws IOException {
-        // Rename samples for NA12878 and NA12892, but not for NA12891
-        final File sampleRenameMapFile = createTestSampleRenameMapFile(
-                Arrays.asList(privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12878.HEADERONLY.bam  newSampleFor12878",
-                              privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12892.HEADERONLY.bam  newSampleFor12892"));
-
-        final Map<String, String> readGroupToNewSampleMap = new HashMap<>();
-        for ( String inputBamID : Arrays.asList("12878", "12891", "12892") ) {
-            final File inputBam = new File(privateTestDir + String.format("CEUTrio.HiSeq.WGS.b37.NA%s.HEADERONLY.bam", inputBamID));
-            final SAMFileReader inputBamReader = new SAMFileReader(inputBam);
-
-            // Special-case NA12891, which we're not renaming:
-            final String newSampleName = inputBamID.equals("12891") ? "NA12891" : String.format("newSampleFor%s", inputBamID);
-
-            for ( final SAMReadGroupRecord readGroup : inputBamReader.getFileHeader().getReadGroups() ) {
-                readGroupToNewSampleMap.put(readGroup.getId(), newSampleName);
-            }
-            inputBamReader.close();
-        }
-
-        final WalkerTestSpec spec = new WalkerTestSpec(" -T PrintReads" +
-                                                       " -R " + b37KGReference +
-                                                       " -I " + privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12878.HEADERONLY.bam" +
-                                                       " -I " + privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12891.HEADERONLY.bam" +
-                                                       " -I " + privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12892.HEADERONLY.bam" +
-                                                       " --sample_rename_mapping_file " + sampleRenameMapFile.getAbsolutePath() +
-                                                       " -o %s",
-                                                       1, Arrays.asList(""));  // No MD5s; we only want to check the read groups
-
-        final File outputBam = executeTest("testOnTheFlySampleRenamingWithMultipleBamFilesPartialRename", spec).first.get(0);
-        final SAMFileReader outputBamReader = new SAMFileReader(outputBam);
-
-        int totalReadGroupsSeen = 0;
-        for ( final SAMReadGroupRecord readGroup : outputBamReader.getFileHeader().getReadGroups() ) {
-            Assert.assertEquals(readGroup.getSample(), readGroupToNewSampleMap.get(readGroup.getId()),
-                                String.format("Wrong sample for read group %s after on-the-fly renaming", readGroup.getId()));
-            totalReadGroupsSeen++;
-        }
-
-        Assert.assertEquals(totalReadGroupsSeen, readGroupToNewSampleMap.size(), "Wrong number of read groups encountered in output bam file");
-
-        outputBamReader.close();
-    }
-
-    // On-the-fly sample renaming test case: two single-sample bams with read group collisions
-    @Test
-    public void testOnTheFlySampleRenamingWithReadGroupCollisions() throws IOException {
-        final File sampleRenameMapFile = createTestSampleRenameMapFile(
-                Arrays.asList(privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12878.HEADERONLY.bam  newSampleFor12878",
-                              privateTestDir + "CEUTrio.HiSeq.WGS.b37.READ_GROUP_COLLISIONS_WITH_NA12878.HEADERONLY.bam  newSampleForNot12878"));
-
-        final Set<String> na12878ReadGroups = new HashSet<>();
-        final SAMFileReader inputBamReader = new SAMFileReader(new File(privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12878.HEADERONLY.bam"));
-        for ( final SAMReadGroupRecord readGroup : inputBamReader.getFileHeader().getReadGroups() ) {
-            na12878ReadGroups.add(readGroup.getId());
-        }
-        inputBamReader.close();
-
-        final WalkerTestSpec spec = new WalkerTestSpec(" -T PrintReads" +
-                                                       " -R " + b37KGReference +
-                                                       " -I " + privateTestDir + "CEUTrio.HiSeq.WGS.b37.NA12878.HEADERONLY.bam" +
-                                                       " -I " + privateTestDir + "CEUTrio.HiSeq.WGS.b37.READ_GROUP_COLLISIONS_WITH_NA12878.HEADERONLY.bam" +
-                                                       " --sample_rename_mapping_file " + sampleRenameMapFile.getAbsolutePath() +
-                                                       " -o %s",
-                                                       1, Arrays.asList(""));  // No MD5s; we only want to check the read groups
-
-        final File outputBam = executeTest("testOnTheFlySampleRenamingWithReadGroupCollisions", spec).first.get(0);
-        final SAMFileReader outputBamReader = new SAMFileReader(outputBam);
-
-        int totalReadGroupsSeen = 0;
-        for ( final SAMReadGroupRecord readGroup : outputBamReader.getFileHeader().getReadGroups() ) {
-            String expectedSampleName = "";
-            if ( na12878ReadGroups.contains(readGroup.getId()) ) {
-                expectedSampleName = "newSampleFor12878";
-            }
-            else {
-                expectedSampleName = "newSampleForNot12878";
-            }
-
-            Assert.assertEquals(readGroup.getSample(), expectedSampleName,
-                                String.format("Wrong sample for read group %s after on-the-fly renaming", readGroup.getId()));
-            totalReadGroupsSeen++;
-        }
-
-        Assert.assertEquals(totalReadGroupsSeen, na12878ReadGroups.size() * 2, "Wrong number of read groups encountered in output bam file");
-
-        outputBamReader.close();
-    }
-
-    // On-the-fly sample renaming test case: a multi-sample bam (this should generate a UserException)
-    @Test
-    public void testOnTheFlySampleRenamingWithMultiSampleBam() throws IOException {
-        final File sampleRenameMapFile = createTestSampleRenameMapFile(
-                Arrays.asList(privateTestDir + "CEUTrio.HiSeq.WGS.b37.MERGED.HEADERONLY.bam  myNewSampleName"));
-
-        final WalkerTestSpec spec = new WalkerTestSpec(" -T PrintReads" +
-                                                       " -R " + b37KGReference +
-                                                       " -I " + privateTestDir + "CEUTrio.HiSeq.WGS.b37.MERGED.HEADERONLY.bam" +
-                                                       " --sample_rename_mapping_file " + sampleRenameMapFile.getAbsolutePath() +
-                                                       " -o %s",
-                                                       1,
-                                                       UserException.class); // expecting a UserException here
-
-        executeTest("testOnTheFlySampleRenamingWithMultiSampleBam", spec);
-    }
-
-    // On-the-fly sample renaming test case: ensure that walkers can see the remapped sample names in individual reads
-    @Test
-    public void testOnTheFlySampleRenamingVerifyWalkerSeesNewSamplesInReads() throws IOException {
-        final File sampleRenameMapFile = createTestSampleRenameMapFile(
-                Arrays.asList(privateTestDir + "NA12878.HiSeq.b37.chr20.10_11mb.bam  myNewSampleName"));
-
-        final WalkerTestSpec spec = new WalkerTestSpec(" -T OnTheFlySampleRenamingVerifyingTestWalker" +
-                                                       " -R " + b37KGReference +
-                                                       " -I " + privateTestDir + "NA12878.HiSeq.b37.chr20.10_11mb.bam" +
-                                                       " --sample_rename_mapping_file " + sampleRenameMapFile.getAbsolutePath() +
-                                                       " --newSampleName myNewSampleName" +
-                                                       " -L 20:10000000-10001000",
-                                                       1, Arrays.asList(""));
-
-        // Test is a success if our custom walker doesn't throw an exception
-        executeTest("testOnTheFlySampleRenamingVerifyWalkerSeesNewSamplesInReads", spec);
-    }
-
-    @Test
-    public void testOnTheFlySampleRenamingSingleSampleVCF() throws IOException {
-        final File sampleRenameMapFile = createTestSampleRenameMapFile(
-                Arrays.asList(privateTestDir + "NA12878.WGS.b37.chr20.firstMB.vcf  newSampleForNA12878"));
-
-        final WalkerTestSpec spec = new WalkerTestSpec(" -T CombineVariants" +
-                " -R " + b37KGReference +
-                " -V " + privateTestDir + "NA12878.WGS.b37.chr20.firstMB.vcf" +
-                " --sample_rename_mapping_file " + sampleRenameMapFile.getAbsolutePath() +
-                " -o %s",
-                1,
-                Arrays.asList("")); // No MD5s -- we will inspect the output file manually
-
-        final File outputVCF = executeTest("testOnTheFlySampleRenamingSingleSampleVCF", spec).first.get(0);
-        verifySampleRenaming(outputVCF, "newSampleForNA12878");
-    }
-
-    private void verifySampleRenaming( final File outputVCF, final String newSampleName ) throws IOException {
-        final Pair<VCFHeader, GATKVCFUtils.VCIterable<LineIterator>> headerAndVCIter = GATKVCFUtils.readAllVCs(outputVCF, new VCFCodec());
-        final VCFHeader header = headerAndVCIter.getFirst();
-        final GATKVCFUtils.VCIterable<LineIterator> iter = headerAndVCIter.getSecond();
-
-        // Verify that sample renaming occurred at both the header and record levels (checking only the first 10 records):
-
-        Assert.assertEquals(header.getGenotypeSamples().size(), 1, "Wrong number of samples in output vcf header");
-        Assert.assertEquals(header.getGenotypeSamples().get(0), newSampleName, "Wrong sample name in output vcf header");
-
-        int recordCount = 0;
-        while ( iter.hasNext() && recordCount < 10 ) {
-            final VariantContext vcfRecord = iter.next();
-            Assert.assertEquals(vcfRecord.getSampleNames().size(), 1, "Wrong number of samples in output vcf record");
-            Assert.assertEquals(vcfRecord.getSampleNames().iterator().next(), newSampleName, "Wrong sample name in output vcf record");
-            recordCount++;
-        }
-    }
-
-    @Test
-    public void testOnTheFlySampleRenamingVerifyWalkerSeesNewSamplesInVCFRecords() throws Exception {
-        final File sampleRenameMapFile = createTestSampleRenameMapFile(
-                Arrays.asList(privateTestDir + "samplerenametest_single_sample_gvcf.vcf    FOOSAMPLE"));
-
-        final WalkerTestSpec spec = new WalkerTestSpec(" -T OnTheFlySampleRenamingVerifyingRodWalker" +
-                " -R " + hg19Reference +
-                " -V " + privateTestDir + "samplerenametest_single_sample_gvcf.vcf" +
-                " --sample_rename_mapping_file " + sampleRenameMapFile.getAbsolutePath() +
-                " --expectedSampleName FOOSAMPLE" +
-                " -o %s",
-                1,
-                Arrays.asList("")); // No MD5s -- custom walker will throw an exception if there's a problem
-
-        executeTest("testOnTheFlySampleRenamingVerifyWalkerSeesNewSamplesInVCFRecords", spec);
-    }
-
-    @Test
-    public void testOnTheFlySampleRenamingMultiSampleVCF() throws Exception {
-        final File sampleRenameMapFile = createTestSampleRenameMapFile(
-                Arrays.asList(privateTestDir + "vcf/vcfWithGenotypes.vcf  badSample"));
-
-        final WalkerTestSpec spec = new WalkerTestSpec(" -T CombineVariants" +
-                " -R " + b37KGReference +
-                " -V " + privateTestDir + "vcf/vcfWithGenotypes.vcf" +
-                " --sample_rename_mapping_file " + sampleRenameMapFile.getAbsolutePath() +
-                " -o %s",
-                1,
-                UserException.class); // expecting a UserException here
-
-        executeTest("testOnTheFlySampleRenamingMultiSampleVCF", spec);
-    }
-
-    @Test
-    public void testOnTheFlySampleRenamingSitesOnlyVCF() throws Exception {
-        final File sampleRenameMapFile = createTestSampleRenameMapFile(
-                Arrays.asList(privateTestDir + "vcf/vcfWithoutGenotypes.vcf  badSample"));
-
-        final WalkerTestSpec spec = new WalkerTestSpec(" -T CombineVariants" +
-                " -R " + b37KGReference +
-                " -V " + privateTestDir + "vcf/vcfWithoutGenotypes.vcf" +
-                " --sample_rename_mapping_file " + sampleRenameMapFile.getAbsolutePath() +
-                " -o %s",
-                1,
-                UserException.class); // expecting a UserException here
-
-        executeTest("testOnTheFlySampleRenamingSitesOnlyVCF", spec);
-    }
-
-    private File createTestSampleRenameMapFile( final List<String> contents ) throws IOException {
-        final File mapFile = createTempFile("TestSampleRenameMapFile", ".tmp");
-        final PrintWriter writer = new PrintWriter(mapFile);
-
-        for ( final String line : contents ) {
-            writer.println(line);
-        }
-        writer.close();
-
-        return mapFile;
-    }
-
-    public static class OnTheFlySampleRenamingVerifyingTestWalker extends ReadWalker<Integer, Integer> {
-        @Argument(fullName = "newSampleName", shortName = "newSampleName", doc = "", required = true)
-        String newSampleName = null;
-
-        public Integer map(ReferenceContext ref, GATKSAMRecord read, RefMetaDataTracker metaDataTracker) {
-            if ( ! newSampleName.equals(read.getReadGroup().getSample()) ) {
-                throw new IllegalStateException(String.format("Encountered read with the wrong sample name. Expected %s found %s",
-                                                              newSampleName, read.getReadGroup().getSample()));
-            }
-
-            return 1;
-        }
-
-        public Integer reduceInit() { return 0; }
-        public Integer reduce(Integer value, Integer sum) { return value + sum; }
-    }
-
-    public static class OnTheFlySampleRenamingVerifyingRodWalker extends RodWalker<Integer, Integer> {
-        @Argument(fullName = "expectedSampleName", shortName = "expectedSampleName", doc = "", required = true)
-        String expectedSampleName = null;
-
-        @Output
-        PrintStream out;
-
-        @Input(fullName="variant", shortName = "V", doc="Input VCF file", required=true)
-        public RodBinding<VariantContext> variants;
-
-        public Integer map( RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context ) {
-            if ( tracker == null ) {
-                return 0;
-            }
-
-            for ( final VariantContext vc : tracker.getValues(variants, context.getLocation()) ) {
-                if ( vc.getSampleNames().size() != 1 ) {
-                    throw new IllegalStateException("Encountered a vcf record with num samples != 1");
-                }
-
-                final String actualSampleName = vc.getSampleNames().iterator().next();
-                if ( ! expectedSampleName.equals(actualSampleName)) {
-                    throw new IllegalStateException(String.format("Encountered vcf record with wrong sample name. Expected %s found %s",
-                                                                  expectedSampleName, actualSampleName));
-                }
-            }
-
-            return 1;
-        }
-
-        public Integer reduceInit() {
-            return 0;
-        }
-
-        public Integer reduce(Integer counter, Integer sum) {
-            return counter + sum;
-        }
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // Test output file-specific options
-    //
-    // --------------------------------------------------------------------------------
-
-    //Returns the output file
-    private File testBAMFeatures(final String args, final String md5) {
-        WalkerTestSpec spec = new WalkerTestSpec("-T PrintReads -R " + b37KGReference +
-                " -I " + privateTestDir + "NA20313.highCoverageRegion.bam"
-                + " --no_pg_tag -o %s " + args,
-                1, Arrays.asList(".bam"), Arrays.asList(md5));
-        return executeTest("testBAMFeatures: "+args, spec).first.get(0);
-    }
-
-    @Test
-    public void testSAMWriterFeatures() {
-        testBAMFeatures("-compress 0", "bb4b55b1f80423970bb9384cbf0d8793");
-        testBAMFeatures("-compress 9", "b85ee1636d62e1bb8ed65a245c307167");
-        testBAMFeatures("-simplifyBAM", "38f9c30a27dfbc085a2ff52a1617d579");
-
-        //Validate MD5
-        final String expectedMD5 = "6627b9ea33293a0083983feb94948c1d";
-        final File md5Target = testBAMFeatures("--generate_md5", expectedMD5);
-        final File md5File = new File(md5Target.getAbsoluteFile() + ".md5");
-        md5File.deleteOnExit();
-        Assert.assertTrue(md5File.exists(), "MD5 wasn't created");
-        try {
-            String md5 = new BufferedReader(new FileReader(md5File)).readLine();
-            Assert.assertEquals(md5, expectedMD5, "Generated MD5 doesn't match expected");
-        } catch (IOException e) {
-            Assert.fail("Can't parse MD5 file", e);
-        }
-
-        //Validate that index isn't created
-        final String unindexedBAM = testBAMFeatures("--disable_bam_indexing", expectedMD5).getAbsolutePath();
-        Assert.assertTrue(!(new File(unindexedBAM+".bai").exists()) &&
-                          !(new File(unindexedBAM.replace(".bam", ".bai")).exists()),
-                          "BAM index was created even though it was disabled");
-    }
-
-    private void testVCFFeatures(final String args, final String md5) {
-        WalkerTestSpec spec = new WalkerTestSpec("-T SelectVariants -R " + b37KGReference +
-                " -V " + privateTestDir + "CEUtrioTest.vcf"
-                + " --no_cmdline_in_header -o %s " + args,
-                1, Arrays.asList(md5));
-        executeTest("testVCFFeatures: "+args, spec);
-    }
-
-    private void testVCFFormatHandling(final boolean writeFullFormat, final String md5) {
-        WalkerTestSpec spec = new WalkerTestSpec("-T SelectVariants -R " + b37KGReference +
-                " -V " + privateTestDir + "ILLUMINA.wex.broad_phase2_baseline.20111114.both.exome.genotypes.1000.vcf"
-                + " --no_cmdline_in_header -o %s "
-                + " --fullyDecode " //Without this parameter, the FORMAT fields will be emitted unchanged.  Oops
-                + (writeFullFormat ? "-writeFullFormat" : "") ,
-                1, Arrays.asList(md5));
-        executeTest("testVCFFormatHandling: "+(writeFullFormat ? "Untrimmed" : "Trimmed"), spec);
-    }
-
-    @Test
-    public void testVCFWriterFeatures() {
-        testVCFFeatures("--sites_only", "94bf1f2c0946e933515e4322323a5716");
-        testVCFFeatures("--bcf", "03f2d6988f54a332da48803c78f9c4b3");
-        testVCFFormatHandling(true, "2b0fa660b0cef4b0f45a10febb453b6c");
-        testVCFFormatHandling(false, "5960311fdd9ee6db88587efaaf4055a0");
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/GenomeAnalysisEngineUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/GenomeAnalysisEngineUnitTest.java
deleted file mode 100644
index ff60ae3..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/GenomeAnalysisEngineUnitTest.java
+++ /dev/null
@@ -1,273 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.commandline.Tags;
-import org.broadinstitute.gatk.engine.arguments.GATKArgumentCollection;
-import org.broadinstitute.gatk.engine.datasources.reads.SAMReaderID;
-import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
-import org.broadinstitute.gatk.engine.walkers.Walker;
-import org.broadinstitute.gatk.tools.walkers.qc.CountReads;
-import org.broadinstitute.gatk.tools.walkers.readutils.PrintReads;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.util.*;
-
-/**
- * Tests selected functionality in the GenomeAnalysisEngine class
- */
-public class GenomeAnalysisEngineUnitTest extends BaseTest {
-
-    @Test(expectedExceptions=UserException.class)
-    public void testEmptySamFileListHandling() throws Exception {
-        GenomeAnalysisEngine testEngine = new GenomeAnalysisEngine();
-        testEngine.setWalker(new CountReads()); //generalizable to any walker requiring reads
-
-        //supply command line args so validateSuppliedReads() knows whether reads were passed in
-        GATKArgumentCollection testArgs = new GATKArgumentCollection();
-        testArgs.samFiles.add("empty.list");
-        testEngine.setArguments(testArgs);
-
-        //represents the empty list of samFiles read in from empty.list by CommandLineExecutable
-        Collection<SAMReaderID> samFiles = new ArrayList<SAMReaderID>();
-
-        testEngine.setSAMFileIDs(samFiles);
-        testEngine.validateSuppliedReads();
-    }
-
-    @Test(expectedExceptions=UserException.class)
-    public void testDuplicateSamFileHandlingSingleDuplicate() throws Exception {
-        GenomeAnalysisEngine testEngine = new GenomeAnalysisEngine();
-
-        Collection<SAMReaderID> samFiles = new ArrayList<SAMReaderID>();
-        samFiles.add(new SAMReaderID(new File(publicTestDir + "exampleBAM.bam"), new Tags()));
-        samFiles.add(new SAMReaderID(new File(publicTestDir + "exampleBAM.bam"), new Tags()));
-
-        testEngine.setSAMFileIDs(samFiles);
-        testEngine.checkForDuplicateSamFiles();
-    }
-
-    @Test(expectedExceptions=UserException.class)
-    public void testDuplicateSamFileHandlingMultipleDuplicates() throws Exception {
-        GenomeAnalysisEngine testEngine = new GenomeAnalysisEngine();
-
-        Collection<SAMReaderID> samFiles = new ArrayList<SAMReaderID>();
-        samFiles.add(new SAMReaderID(new File(publicTestDir + "exampleBAM.bam"), new Tags()));
-        samFiles.add(new SAMReaderID(new File(publicTestDir + "exampleNORG.bam"), new Tags()));
-        samFiles.add(new SAMReaderID(new File(publicTestDir + "exampleBAM.bam"),  new Tags()));
-        samFiles.add(new SAMReaderID(new File(publicTestDir + "exampleNORG.bam"), new Tags()));
-
-        testEngine.setSAMFileIDs(samFiles);
-        testEngine.checkForDuplicateSamFiles();
-    }
-
-    @Test(expectedExceptions=UserException.class)
-    public void testDuplicateSamFileHandlingAbsoluteVsRelativePath() {
-        GenomeAnalysisEngine testEngine = new GenomeAnalysisEngine();
-
-        final File relativePathToBAMFile = new File(publicTestDir + "exampleBAM.bam");
-        final File absolutePathToBAMFile = new File(relativePathToBAMFile.getAbsolutePath());
-        Collection<SAMReaderID> samFiles = new ArrayList<SAMReaderID>();
-        samFiles.add(new SAMReaderID(relativePathToBAMFile, new Tags()));
-        samFiles.add(new SAMReaderID(absolutePathToBAMFile, new Tags()));
-
-        testEngine.setSAMFileIDs(samFiles);
-        testEngine.checkForDuplicateSamFiles();
-    }
-
-    @Test
-    public void testEmptyIntervalSetHandling() throws Exception {
-        GenomeLocParser genomeLocParser = new GenomeLocParser(ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000).getSequenceDictionary());
-
-        GenomeAnalysisEngine testEngine = new GenomeAnalysisEngine();
-
-        testEngine.setWalker(new PrintReads());
-        testEngine.setIntervals(new GenomeLocSortedSet(genomeLocParser));
-
-        testEngine.validateSuppliedIntervals();
-    }
-
-    @Test
-    public void testLoadWellFormedSampleRenameMapFile() throws IOException {
-        final File mapFile = createTestSampleRenameMapFile(Arrays.asList("/foo/bar/first.bam    newSample1",
-                                                                         "/foo/bar/second.bam        newSample2",
-                                                                         "/foo/bar2/third.bam newSample3",
-                                                                         "/foo/bar2/fourth.bam new sample    4",
-                                                                         "/foo/bar2/fifth.bam     new   sample     5    "));
-        final GenomeAnalysisEngine engine = new GenomeAnalysisEngine();
-        final Map<String, String> renameMap = engine.loadSampleRenameMap(mapFile);
-
-        Assert.assertEquals(renameMap.size(), 5, "Sample rename map was wrong size after loading from file");
-
-        final Iterator<String> expectedResultsIterator = Arrays.asList(
-                        "/foo/bar/first.bam",   "newSample1", 
-                        "/foo/bar/second.bam",  "newSample2", 
-                        "/foo/bar2/third.bam",  "newSample3",
-                        "/foo/bar2/fourth.bam", "new sample    4",
-                        "/foo/bar2/fifth.bam",  "new   sample     5"
-        ).iterator();
-        while ( expectedResultsIterator.hasNext() ) {
-            final String expectedKey = expectedResultsIterator.next();
-            final String expectedValue = expectedResultsIterator.next();
-
-            Assert.assertNotNull(renameMap.get(expectedKey), String.format("Entry for %s not found in sample rename map", expectedKey));
-            Assert.assertEquals(renameMap.get(expectedKey), expectedValue, "Wrong value in sample rename map for " + expectedKey);
-        }
-    }
-
-    @DataProvider(name = "MalformedSampleRenameMapFileDataProvider")
-    public Object[][] generateMalformedSampleRenameMapFiles() throws IOException {
-        final List<Object[]> tests = new ArrayList<Object[]>();
-
-        tests.add(new Object[]{"testLoadSampleRenameMapFileNonExistentFile",
-                               new File("/foo/bar/nonexistent")});
-        tests.add(new Object[]{"testLoadSampleRenameMapFileMalformedLine",
-                               createTestSampleRenameMapFile(Arrays.asList("/path/to/foo.bam"))});
-        tests.add(new Object[]{"testLoadSampleRenameMapFileNonAbsoluteBamPath",
-                               createTestSampleRenameMapFile(Arrays.asList("relative/path/to/foo.bam newSample"))});
-        tests.add(new Object[]{"testLoadSampleRenameMapFileDuplicateBamPath",
-                               createTestSampleRenameMapFile(Arrays.asList("/path/to/dupe.bam newSample1",
-                                                                           "/path/to/dupe.bam newSample2"))});
-        tests.add(new Object[]{"testLoadSampleRenameMapFileTabInSampleName",
-                               createTestSampleRenameMapFile(Arrays.asList("/path/to/stuff.bam some wonky\tsample   "))});
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "MalformedSampleRenameMapFileDataProvider", expectedExceptions = UserException.class)
-    public void testLoadMalformedSampleRenameMapFile( final String testName, final File mapFile ) {
-        logger.info("Executing test " + testName);
-
-        final GenomeAnalysisEngine engine = new GenomeAnalysisEngine();
-        final Map<String, String> renameMap = engine.loadSampleRenameMap(mapFile);
-    }
-
-    private File createTestSampleRenameMapFile( final List<String> contents ) throws IOException {
-        final File mapFile = createTempFile("TestSampleRenameMapFile", ".tmp");
-        final PrintWriter writer = new PrintWriter(mapFile);
-
-        for ( final String line : contents ) {
-            writer.println(line);
-        }
-        writer.close();
-
-        return mapFile;
-    }
-
-    ///////////////////////////////////////////////////
-    // Test the ReadTransformer ordering enforcement //
-    ///////////////////////////////////////////////////
-
-    public static class TestReadTransformer extends ReadTransformer {
-
-        private OrderingConstraint orderingConstraint = OrderingConstraint.DO_NOT_CARE;
-        private boolean enabled;
-
-        protected TestReadTransformer(final OrderingConstraint orderingConstraint) {
-            this.orderingConstraint = orderingConstraint;
-            enabled = true;
-        }
-
-        // need this because PackageUtils will pick up this class as a possible ReadTransformer
-        protected TestReadTransformer() {
-            enabled = false;
-        }
-
-        @Override
-        public OrderingConstraint getOrderingConstraint() { return orderingConstraint; }
-
-        @Override
-        public ApplicationTime initializeSub(final GenomeAnalysisEngine engine, final Walker walker) { return ApplicationTime.HANDLED_IN_WALKER; }
-
-        @Override
-        public boolean enabled() { return enabled; }
-
-        @Override
-        public GATKSAMRecord apply(final GATKSAMRecord read) { return read; }
-
-    }
-
-    @DataProvider(name = "ReadTransformerData")
-    public Object[][] makeReadTransformerData() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        for ( final ReadTransformer.OrderingConstraint orderingConstraint1 : ReadTransformer.OrderingConstraint.values() ) {
-            for ( final ReadTransformer.OrderingConstraint orderingConstraint2 : ReadTransformer.OrderingConstraint.values() ) {
-                for ( final ReadTransformer.OrderingConstraint orderingConstraint3 : ReadTransformer.OrderingConstraint.values() ) {
-                    tests.add(new Object[]{orderingConstraint1, orderingConstraint2, orderingConstraint3});
-                }
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "ReadTransformerData")
-    public void testReadTransformer(final ReadTransformer.OrderingConstraint oc1, final ReadTransformer.OrderingConstraint oc2, final ReadTransformer.OrderingConstraint oc3) {
-
-        final GenomeAnalysisEngine testEngine = new GenomeAnalysisEngine();
-        final List<ReadTransformer> readTransformers = new ArrayList<ReadTransformer>(3);
-        readTransformers.add(new TestReadTransformer(oc1));
-        readTransformers.add(new TestReadTransformer(oc2));
-        readTransformers.add(new TestReadTransformer(oc3));
-
-        final boolean shouldThrowException = numWithConstraint(ReadTransformer.OrderingConstraint.MUST_BE_FIRST, oc1, oc2, oc3) > 1 ||
-                numWithConstraint(ReadTransformer.OrderingConstraint.MUST_BE_LAST, oc1, oc2, oc3) > 1;
-
-        try {
-            testEngine.setReadTransformers(readTransformers);
-
-            Assert.assertFalse(shouldThrowException);
-            Assert.assertEquals(testEngine.getReadTransformers().size(), 3);
-
-            Assert.assertTrue(testEngine.getReadTransformers().get(1).getOrderingConstraint() != ReadTransformer.OrderingConstraint.MUST_BE_FIRST);
-            Assert.assertTrue(testEngine.getReadTransformers().get(2).getOrderingConstraint() != ReadTransformer.OrderingConstraint.MUST_BE_FIRST);
-            Assert.assertTrue(testEngine.getReadTransformers().get(0).getOrderingConstraint() != ReadTransformer.OrderingConstraint.MUST_BE_LAST);
-            Assert.assertTrue(testEngine.getReadTransformers().get(1).getOrderingConstraint() != ReadTransformer.OrderingConstraint.MUST_BE_LAST);
-        } catch (UserException.IncompatibleReadFiltersException e) {
-            Assert.assertTrue(shouldThrowException);
-        }
-    }
-
-    private int numWithConstraint(final ReadTransformer.OrderingConstraint target, final ReadTransformer.OrderingConstraint... constraints ) {
-        int count = 0;
-        for ( final ReadTransformer.OrderingConstraint constraint : constraints ) {
-            if ( constraint == target )
-                count++;
-        }
-        return count;
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/MaxRuntimeIntegrationTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/MaxRuntimeIntegrationTest.java
deleted file mode 100644
index 27b6c1c..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/MaxRuntimeIntegrationTest.java
+++ /dev/null
@@ -1,151 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine;
-
-import org.broadinstitute.gatk.engine.walkers.WalkerTest;
-import org.broadinstitute.gatk.utils.commandline.Argument;
-import org.broadinstitute.gatk.utils.commandline.Output;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.engine.walkers.LocusWalker;
-import org.broadinstitute.gatk.utils.SimpleTimer;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileReader;
-import java.io.PrintStream;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-import java.util.concurrent.TimeUnit;
-
-/**
- *
- */
-public class MaxRuntimeIntegrationTest extends WalkerTest {
-    public static class SleepingWalker extends LocusWalker<Integer, Integer> {
-        @Output PrintStream out;
-
-        @Argument(fullName="sleepTime",shortName="sleepTime",doc="x", required=false)
-        public int sleepTime = 100;
-
-        @Override
-        public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
-            try {Thread.sleep(sleepTime);} catch (InterruptedException e) {};
-            return 1;
-        }
-
-        @Override public Integer reduceInit() { return 0; }
-        @Override public Integer reduce(Integer value, Integer sum) { return sum + value; }
-
-        @Override
-        public void onTraversalDone(Integer result) {
-            out.println(result);
-        }
-    }
-
-    private static final long STARTUP_TIME = TimeUnit.NANOSECONDS.convert(60, TimeUnit.SECONDS);
-
-    private class MaxRuntimeTestProvider extends TestDataProvider {
-        final long maxRuntime;
-        final TimeUnit unit;
-
-        public MaxRuntimeTestProvider(final long maxRuntime, final TimeUnit unit) {
-            super(MaxRuntimeTestProvider.class);
-            this.maxRuntime = maxRuntime;
-            this.unit = unit;
-            setName(String.format("Max runtime test : %d of %s", maxRuntime, unit));
-        }
-
-        public long expectedMaxRuntimeNano() {
-            return TimeUnit.NANOSECONDS.convert(maxRuntime, unit) + STARTUP_TIME;
-        }
-    }
-
-    @DataProvider(name = "MaxRuntimeProvider")
-    public Object[][] makeMaxRuntimeProvider() {
-        for ( final TimeUnit requestedUnits : Arrays.asList(TimeUnit.NANOSECONDS, TimeUnit.MILLISECONDS, TimeUnit.SECONDS, TimeUnit.MINUTES) )
-            new MaxRuntimeTestProvider(requestedUnits.convert(30, TimeUnit.SECONDS), requestedUnits);
-
-        return MaxRuntimeTestProvider.getTests(MaxRuntimeTestProvider.class);
-    }
-
-    //
-    // Loop over errors to throw, make sure they are the errors we get back from the engine, regardless of NT type
-    //
-    @Test(enabled = true, dataProvider = "MaxRuntimeProvider", timeOut = 120 * 1000)
-    public void testMaxRuntime(final MaxRuntimeTestProvider cfg) {
-        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
-                "-T PrintReads -R " + hg18Reference
-                        + " -I " + validationDataLocation + "NA12878.WEx.downsampled20x.bam -o /dev/null"
-                        + " -maxRuntime " + cfg.maxRuntime + " -maxRuntimeUnits " + cfg.unit, 0,
-                Collections.<String>emptyList());
-        final SimpleTimer timer = new SimpleTimer().start();
-        executeTest("Max runtime " + cfg, spec);
-        final long actualRuntimeNano = timer.getElapsedTimeNano();
-
-        Assert.assertTrue(actualRuntimeNano < cfg.expectedMaxRuntimeNano(),
-                "Actual runtime " + TimeUnit.SECONDS.convert(actualRuntimeNano, TimeUnit.NANOSECONDS)
-                        + " exceeded max. tolerated runtime " + TimeUnit.SECONDS.convert(cfg.expectedMaxRuntimeNano(), TimeUnit.NANOSECONDS)
-                        + " given requested runtime " + cfg.maxRuntime + " " + cfg.unit);
-    }
-
-    @DataProvider(name = "SubshardProvider")
-    public Object[][] makeSubshardProvider() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        // this functionality can be adapted to provide input data for whatever you might want in your data
-        tests.add(new Object[]{10});
-        tests.add(new Object[]{100});
-        tests.add(new Object[]{500});
-        tests.add(new Object[]{1000});
-        tests.add(new Object[]{2000});
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = true, dataProvider = "SubshardProvider", timeOut = 120 * 1000)
-    public void testSubshardTimeout(final int sleepTime) throws Exception {
-        final int maxRuntime = 5000;
-
-        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
-                "-T SleepingWalker -R " + b37KGReference
-                        + " -I " + privateTestDir + "NA12878.100kb.BQSRv2.example.bam -o %s"
-                        + " -maxRuntime " + maxRuntime + " -maxRuntimeUnits MILLISECONDS -sleepTime " + sleepTime, 1,
-                Collections.singletonList(""));
-        final File result = executeTest("Subshard max runtime ", spec).getFirst().get(0);
-        final int cycle = Integer.valueOf(new BufferedReader(new FileReader(result)).readLine());
-
-        final int maxCycles = (int)Math.ceil((maxRuntime * 5) / sleepTime);
-        logger.warn(String.format("Max cycles %d saw %d in file %s with sleepTime %d and maxRuntime %d", maxCycles, cycle, result, sleepTime, maxRuntime));
-        Assert.assertTrue(cycle < maxCycles, "Too many cycles seen -- saw " + cycle + " in file " + result + " but max should have been " + maxCycles);
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/ReadMetricsUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/ReadMetricsUnitTest.java
deleted file mode 100644
index 1153bcc..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/ReadMetricsUnitTest.java
+++ /dev/null
@@ -1,371 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine;
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import htsjdk.samtools.*;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.commandline.Tags;
-import org.broadinstitute.gatk.engine.arguments.ValidationExclusion;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.datasources.providers.LocusShardDataProvider;
-import org.broadinstitute.gatk.engine.datasources.providers.ReadShardDataProvider;
-import org.broadinstitute.gatk.engine.datasources.reads.*;
-import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
-import org.broadinstitute.gatk.engine.executive.WindowMaker;
-import org.broadinstitute.gatk.engine.filters.ReadFilter;
-import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.engine.resourcemanagement.ThreadAllocation;
-import org.broadinstitute.gatk.engine.traversals.*;
-import org.broadinstitute.gatk.engine.walkers.*;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
-import org.broadinstitute.gatk.utils.SampleUtils;
-import org.broadinstitute.gatk.utils.activeregion.ActiveRegion;
-import org.broadinstitute.gatk.utils.activeregion.ActivityProfileState;
-import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
-import org.broadinstitute.gatk.utils.interval.IntervalMergingRule;
-import org.broadinstitute.gatk.utils.sam.*;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.*;
-
-public class ReadMetricsUnitTest extends BaseTest {
-
-    @Test
-    public void testReadsSeenDoNotOverflowInt() {
-
-        final ReadMetrics metrics = new ReadMetrics();
-
-        final long moreThanMaxInt = ((long)Integer.MAX_VALUE) + 1L;
-
-        for ( long i = 0L; i < moreThanMaxInt; i++ ) {
-            metrics.incrementNumReadsSeen();
-        }
-
-        Assert.assertEquals(metrics.getNumReadsSeen(), moreThanMaxInt);
-        Assert.assertTrue(metrics.getNumReadsSeen() > (long) Integer.MAX_VALUE);
-
-        logger.warn(String.format("%d %d %d", Integer.MAX_VALUE, moreThanMaxInt, Long.MAX_VALUE));
-    }
-
-
-    // Test the accuracy of the read metrics
-
-    private IndexedFastaSequenceFile reference;
-    private SAMSequenceDictionary dictionary;
-    private SAMFileHeader header;
-    private GATKSAMReadGroupRecord readGroup;
-    private GenomeLocParser genomeLocParser;
-    private File testBAM;
-
-    private static final int numReadsPerContig = 250000;
-    private static final List<String> contigs = Arrays.asList("1", "2", "3");
-
-    @BeforeClass
-    private void init() throws IOException {
-        reference = new CachingIndexedFastaSequenceFile(new File(b37KGReference));
-        dictionary = reference.getSequenceDictionary();
-        genomeLocParser = new GenomeLocParser(dictionary);
-        header = ArtificialSAMUtils.createDefaultReadGroup(new SAMFileHeader(), "test", "test");
-        header.setSequenceDictionary(dictionary);
-        header.setSortOrder(SAMFileHeader.SortOrder.coordinate);
-        readGroup = new GATKSAMReadGroupRecord(header.getReadGroup("test"));
-
-        final List<GATKSAMRecord> reads = new ArrayList<>();
-        for ( final String contig : contigs ) {
-            for ( int i = 1; i <= numReadsPerContig; i++ ) {
-                reads.add(buildSAMRecord("read" + contig + "_" + i, contig, i));
-            }
-        }
-
-        createBAM(reads);
-    }
-
-    private void createBAM(final List<GATKSAMRecord> reads) throws IOException {
-        testBAM = createTempFile("TraverseActiveRegionsUnitTest", ".bam");
-
-        SAMFileWriter out = new SAMFileWriterFactory().setCreateIndex(true).makeBAMWriter(reads.get(0).getHeader(), true, testBAM);
-        for (GATKSAMRecord read : reads ) {
-            out.addAlignment(read);
-        }
-        out.close();
-
-        new File(testBAM.getAbsolutePath().replace(".bam", ".bai")).deleteOnExit();
-        new File(testBAM.getAbsolutePath() + ".bai").deleteOnExit();
-    }
-
-    // copied from LocusViewTemplate
-    protected GATKSAMRecord buildSAMRecord(final String readName, final String contig, final int alignmentStart) {
-        GATKSAMRecord record = new GATKSAMRecord(header);
-
-        record.setReadName(readName);
-        record.setReferenceIndex(dictionary.getSequenceIndex(contig));
-        record.setAlignmentStart(alignmentStart);
-
-        record.setCigarString("1M");
-        record.setReadString("A");
-        record.setBaseQualityString("A");
-        record.setReadGroup(readGroup);
-
-        return record;
-    }
-
-    @Test
-    public void testCountsFromReadTraversal() {
-        final GenomeAnalysisEngine engine = new GenomeAnalysisEngine();
-        engine.setGenomeLocParser(genomeLocParser);
-
-        final Collection<SAMReaderID> samFiles = new ArrayList<>();
-        final SAMReaderID readerID = new SAMReaderID(testBAM, new Tags());
-        samFiles.add(readerID);
-
-        final SAMDataSource dataSource = new SAMDataSource(samFiles, new ThreadAllocation(), null, genomeLocParser,
-                false,
-                ValidationStringency.STRICT,
-                null,
-                null,
-                new ValidationExclusion(),
-                new ArrayList<ReadFilter>(),
-                new ArrayList<ReadTransformer>(),
-                false, (byte)30, false, true, null, IntervalMergingRule.ALL);
-
-        engine.setReadsDataSource(dataSource);
-
-        final TraverseReadsNano traverseReadsNano = new TraverseReadsNano(1);
-        final DummyReadWalker walker = new DummyReadWalker();
-        traverseReadsNano.initialize(engine, walker, null);
-
-        for ( final Shard shard : dataSource.createShardIteratorOverAllReads(new ReadShardBalancer()) ) {
-            final ReadShardDataProvider dataProvider = new ReadShardDataProvider(shard, engine.getGenomeLocParser(), dataSource.seek(shard), reference, new ArrayList<ReferenceOrderedDataSource>());
-            traverseReadsNano.traverse(walker, dataProvider, 0);
-            dataProvider.close();
-        }
-
-        Assert.assertEquals(engine.getCumulativeMetrics().getNumReadsSeen(), contigs.size() * numReadsPerContig);
-        Assert.assertEquals(engine.getCumulativeMetrics().getNumIterations(), contigs.size() * numReadsPerContig);
-    }
-
-    @Test
-    public void testCountsFromLocusTraversal() {
-        final GenomeAnalysisEngine engine = new GenomeAnalysisEngine();
-        engine.setGenomeLocParser(genomeLocParser);
-
-        final Collection<SAMReaderID> samFiles = new ArrayList<>();
-        final SAMReaderID readerID = new SAMReaderID(testBAM, new Tags());
-        samFiles.add(readerID);
-
-        final SAMDataSource dataSource = new SAMDataSource(samFiles, new ThreadAllocation(), null, genomeLocParser,
-                false,
-                ValidationStringency.STRICT,
-                null,
-                null,
-                new ValidationExclusion(),
-                new ArrayList<ReadFilter>(),
-                new ArrayList<ReadTransformer>(),
-                false, (byte)30, false, true, null, IntervalMergingRule.ALL);
-
-        engine.setReadsDataSource(dataSource);
-        final Set<String> samples = SampleUtils.getSAMFileSamples(dataSource.getHeader());
-
-        final TraverseLociNano traverseLociNano = new TraverseLociNano(1);
-        final DummyLocusWalker walker = new DummyLocusWalker();
-        traverseLociNano.initialize(engine, walker, null);
-
-        for ( final Shard shard : dataSource.createShardIteratorOverAllReads(new LocusShardBalancer()) ) {
-            final WindowMaker windowMaker = new WindowMaker(shard, genomeLocParser, dataSource.seek(shard), shard.getGenomeLocs(), samples);
-            for ( WindowMaker.WindowMakerIterator window : windowMaker ) {
-                final LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, shard.getReadProperties(), genomeLocParser, window.getLocus(), window, reference, new ArrayList<ReferenceOrderedDataSource>());
-                traverseLociNano.traverse(walker, dataProvider, 0);
-                dataProvider.close();
-            }
-            windowMaker.close();
-        }
-
-        //dataSource.close();
-        Assert.assertEquals(engine.getCumulativeMetrics().getNumReadsSeen(), contigs.size() * numReadsPerContig);
-        Assert.assertEquals(engine.getCumulativeMetrics().getNumIterations(), contigs.size() * numReadsPerContig);
-    }
-
-    @Test
-    public void testCountsFromActiveRegionTraversal() {
-        final GenomeAnalysisEngine engine = new GenomeAnalysisEngine();
-        engine.setGenomeLocParser(genomeLocParser);
-
-        final Collection<SAMReaderID> samFiles = new ArrayList<>();
-        final SAMReaderID readerID = new SAMReaderID(testBAM, new Tags());
-        samFiles.add(readerID);
-
-        final SAMDataSource dataSource = new SAMDataSource(samFiles, new ThreadAllocation(), null, genomeLocParser,
-                false,
-                ValidationStringency.STRICT,
-                null,
-                null,
-                new ValidationExclusion(),
-                new ArrayList<ReadFilter>(),
-                new ArrayList<ReadTransformer>(),
-                false, (byte)30, false, true, null, IntervalMergingRule.ALL);
-
-        engine.setReadsDataSource(dataSource);
-        final Set<String> samples = SampleUtils.getSAMFileSamples(dataSource.getHeader());
-
-        final List<GenomeLoc> intervals = new ArrayList<>(contigs.size());
-        for ( final String contig : contigs )
-            intervals.add(genomeLocParser.createGenomeLoc(contig, 1, numReadsPerContig));
-
-        final TraverseActiveRegions traverseActiveRegions = new TraverseActiveRegions();
-        final DummyActiveRegionWalker walker = new DummyActiveRegionWalker();
-        traverseActiveRegions.initialize(engine, walker, null);
-
-        for ( final Shard shard : dataSource.createShardIteratorOverIntervals(new GenomeLocSortedSet(genomeLocParser, intervals), new ActiveRegionShardBalancer()) ) {
-            final WindowMaker windowMaker = new WindowMaker(shard, genomeLocParser, dataSource.seek(shard), shard.getGenomeLocs(), samples);
-            for ( WindowMaker.WindowMakerIterator window : windowMaker ) {
-                final LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, shard.getReadProperties(), genomeLocParser, window.getLocus(), window, reference, new ArrayList<ReferenceOrderedDataSource>());
-                traverseActiveRegions.traverse(walker, dataProvider, 0);
-                dataProvider.close();
-            }
-            windowMaker.close();
-        }
-
-        Assert.assertEquals(engine.getCumulativeMetrics().getNumReadsSeen(), contigs.size() * numReadsPerContig);
-        Assert.assertEquals(engine.getCumulativeMetrics().getNumIterations(), contigs.size() * numReadsPerContig);
-    }
-
-    @Test
-    public void testFilteredCounts() {
-        final GenomeAnalysisEngine engine = new GenomeAnalysisEngine();
-        engine.setGenomeLocParser(genomeLocParser);
-
-        final Collection<SAMReaderID> samFiles = new ArrayList<>();
-        final SAMReaderID readerID = new SAMReaderID(testBAM, new Tags());
-        samFiles.add(readerID);
-
-        final List<ReadFilter> filters = new ArrayList<>();
-        filters.add(new EveryTenthReadFilter());
-
-        final SAMDataSource dataSource = new SAMDataSource(samFiles, new ThreadAllocation(), null, genomeLocParser,
-                false,
-                ValidationStringency.STRICT,
-                null,
-                null,
-                new ValidationExclusion(),
-                filters,
-                new ArrayList<ReadTransformer>(),
-                false, (byte)30, false, true, null, IntervalMergingRule.ALL);
-
-        engine.setReadsDataSource(dataSource);
-
-        final TraverseReadsNano traverseReadsNano = new TraverseReadsNano(1);
-        final DummyReadWalker walker = new DummyReadWalker();
-        traverseReadsNano.initialize(engine, walker, null);
-
-        for ( final Shard shard : dataSource.createShardIteratorOverAllReads(new ReadShardBalancer()) ) {
-            final ReadShardDataProvider dataProvider = new ReadShardDataProvider(shard, engine.getGenomeLocParser(), dataSource.seek(shard), reference, new ArrayList<ReferenceOrderedDataSource>());
-            traverseReadsNano.traverse(walker, dataProvider, 0);
-            dataProvider.close();
-        }
-
-        Assert.assertEquals((long)engine.getCumulativeMetrics().getCountsByFilter().get(EveryTenthReadFilter.class.getSimpleName()), contigs.size() * numReadsPerContig / 10);
-    }
-
-    class DummyLocusWalker extends LocusWalker<Integer, Integer> {
-        @Override
-        public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
-            return 0;
-        }
-
-        @Override
-        public Integer reduceInit() {
-            return 0;
-        }
-
-        @Override
-        public Integer reduce(Integer value, Integer sum) {
-            return 0;
-        }
-    }
-
-    class DummyReadWalker extends ReadWalker<Integer, Integer> {
-        @Override
-        public Integer map(ReferenceContext ref, GATKSAMRecord read, RefMetaDataTracker metaDataTracker) {
-            return 0;
-        }
-
-        @Override
-        public Integer reduceInit() {
-            return 0;
-        }
-
-        @Override
-        public Integer reduce(Integer value, Integer sum) {
-            return 0;
-        }
-    }
-
-    class DummyActiveRegionWalker extends ActiveRegionWalker<Integer, Integer> {
-        @Override
-        public ActivityProfileState isActive(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
-            return new ActivityProfileState(ref.getLocus(), 0.0);
-        }
-
-        @Override
-        public Integer map(ActiveRegion activeRegion, RefMetaDataTracker metaDataTracker) {
-            return 0;
-        }
-
-        @Override
-        public Integer reduceInit() {
-            return 0;
-        }
-
-        @Override
-        public Integer reduce(Integer value, Integer sum) {
-            return 0;
-        }
-    }
-
-    private final class EveryTenthReadFilter extends ReadFilter {
-
-        private int myCounter = 0;
-
-        @Override
-        public boolean filterOut(final SAMRecord record) {
-            if ( ++myCounter == 10 ) {
-                myCounter = 0;
-                return true;
-            }
-
-            return false;
-        }
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/WalkerManagerUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/WalkerManagerUnitTest.java
deleted file mode 100644
index 62348ef..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/WalkerManagerUnitTest.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine;
-
-import org.broadinstitute.gatk.utils.commandline.Hidden;
-import org.broadinstitute.gatk.engine.walkers.Walker;
-import org.broadinstitute.gatk.tools.walkers.qc.CountLoci;
-import org.broadinstitute.gatk.utils.exceptions.DynamicClassResolutionException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-/**
- * Tests basic functionality of the walker manager.
- */
-public class WalkerManagerUnitTest {
-    private static WalkerManager walkerManager;
-
-    @BeforeClass
-    public void setUp() {
-        walkerManager = new WalkerManager();
-    }
-
-    @Test
-    public void testPresentWalker() {
-        Walker countLociWalker = walkerManager.createByName("CountLoci");
-        Assert.assertEquals(CountLoci.class,countLociWalker.getClass());
-    }
-
-    @Test(expectedExceptions=UserException.class)
-    public void testAbsentWalker() {
-        walkerManager.createByName("Missing");
-    }
-
-    @Test(expectedExceptions=DynamicClassResolutionException.class)
-    public void testUninstantiableWalker() {
-        walkerManager.createByName("UninstantiableWalker");
-    }
-}
-
- at Hidden
-class UninstantiableWalker extends Walker<Integer,Long> {
-    // Private constructor will generate uninstantiable message
-    private UninstantiableWalker() {}
-    public Long reduceInit() { return 0L; }
-    public Long reduce(Integer value, Long accum) { return 0L; }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/AllLocusViewUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/AllLocusViewUnitTest.java
deleted file mode 100644
index f9d9dfe..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/AllLocusViewUnitTest.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.providers;
-
-
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.testng.Assert;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-
-import java.util.List;
-/**
- * User: hanna
- * Date: May 12, 2009
- * Time: 2:34:46 PM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * Test the view of all loci.
- */
-public class AllLocusViewUnitTest extends LocusViewTemplate {
-
-    @Override
-    protected LocusView createView(LocusShardDataProvider provider) {
-        return new AllLocusView(provider);
-    }
-
-    /**
-     * Test the reads according to an independently derived context.
-     * @param view
-     * @param range
-     * @param reads
-     */
-    @Override
-    protected void testReadsInContext( LocusView view, List<GenomeLoc> range, List<GATKSAMRecord> reads ) {
-        AllLocusView allLocusView = (AllLocusView)view;
-
-        // TODO: Should skip over loci not in the given range.
-        GenomeLoc firstLoc = range.get(0);
-        GenomeLoc lastLoc = range.get(range.size()-1);
-        GenomeLoc bounds = genomeLocParser.createGenomeLoc(firstLoc.getContig(),firstLoc.getStart(),lastLoc.getStop());
-
-        for( int i = bounds.getStart(); i <= bounds.getStop(); i++ ) {
-            GenomeLoc site = genomeLocParser.createGenomeLoc("chr1",i);
-            AlignmentContext locusContext = allLocusView.next();
-            Assert.assertEquals(locusContext.getLocation(), site, "Locus context location is incorrect");
-            int expectedReadsAtSite = 0;
-
-            for( GATKSAMRecord read: reads ) {
-                if(genomeLocParser.createGenomeLoc(read).containsP(locusContext.getLocation())) {
-                    Assert.assertTrue(locusContext.getReads().contains(read),"Target locus context does not contain reads");
-                    expectedReadsAtSite++;
-                }
-            }
-
-            Assert.assertEquals(locusContext.getReads().size(), expectedReadsAtSite, "Found wrong number of reads at site");
-        }
-
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/CoveredLocusViewUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/CoveredLocusViewUnitTest.java
deleted file mode 100644
index 8914a48..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/CoveredLocusViewUnitTest.java
+++ /dev/null
@@ -1,103 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.providers;
-
-
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.testng.Assert;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-
-import java.util.List;
-/**
- * User: hanna
- * Date: May 12, 2009
- * Time: 2:34:46 PM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * Test the CoveredLocusView.
- */
-public class CoveredLocusViewUnitTest extends LocusViewTemplate {
-
-    /**
-     * Retrieve a covered locus view.
-     */
-    @Override
-    protected LocusView createView(LocusShardDataProvider provider) {
-        return new CoveredLocusView(provider);
-    }
-
-    /**
-     * Test the reads according to an independently derived context.
-     * @param view
-     * @param range
-     * @param reads
-     */
-    @Override
-    protected void testReadsInContext( LocusView view, List<GenomeLoc> range, List<GATKSAMRecord> reads ) {
-        CoveredLocusView coveredLocusView = (CoveredLocusView)view;
-
-        // TODO: Should skip over loci not in the given range.
-        GenomeLoc firstLoc = range.get(0);
-        GenomeLoc lastLoc = range.get(range.size()-1);
-        GenomeLoc bounds = genomeLocParser.createGenomeLoc(firstLoc.getContig(),firstLoc.getStart(),lastLoc.getStop());
-
-        for( int i = bounds.getStart(); i <= bounds.getStop(); i++ ) {
-            GenomeLoc site = genomeLocParser.createGenomeLoc("chr1",i);
-
-            int expectedReadsAtSite = 0;
-            for( GATKSAMRecord read: reads ) {
-                if( genomeLocParser.createGenomeLoc(read).containsP(site) )
-                    expectedReadsAtSite++;
-            }
-
-            if( expectedReadsAtSite < 1 )
-                continue;
-
-            Assert.assertTrue(coveredLocusView.hasNext(),"Incorrect number of loci in view");
-
-            AlignmentContext locusContext = coveredLocusView.next();
-            Assert.assertEquals(locusContext.getLocation(), site, "Target locus context location is incorrect");
-            Assert.assertEquals(locusContext.getReads().size(), expectedReadsAtSite, "Found wrong number of reads at site");
-
-            for( GATKSAMRecord read: reads ) {
-                if(genomeLocParser.createGenomeLoc(read).containsP(locusContext.getLocation()))
-                    Assert.assertTrue(locusContext.getReads().contains(read),"Target locus context does not contain reads");
-            }
-        }
-
-        Assert.assertFalse(coveredLocusView.hasNext(),"Iterator is not bounded at boundaries of shard");
-    }        
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/IntervalReferenceOrderedViewUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/IntervalReferenceOrderedViewUnitTest.java
deleted file mode 100644
index 29ccbd6..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/IntervalReferenceOrderedViewUnitTest.java
+++ /dev/null
@@ -1,366 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.providers;
-
-import htsjdk.samtools.util.PeekableIterator;
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.tribble.BasicFeature;
-import htsjdk.tribble.Feature;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.commandline.RodBinding;
-import org.broadinstitute.gatk.engine.refdata.RODRecordListImpl;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.engine.refdata.utils.GATKFeature;
-import org.broadinstitute.gatk.engine.refdata.utils.RODRecordList;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.*;
-
-/**
- * @author depristo
- */
-public class IntervalReferenceOrderedViewUnitTest extends BaseTest {
-    private static int startingChr = 1;
-    private static int endingChr = 2;
-    private static int readCount = 100;
-    private static int DEFAULT_READ_LENGTH = ArtificialSAMUtils.DEFAULT_READ_LENGTH;
-    private static String contig;
-    private static SAMFileHeader header;
-
-    private GenomeLocParser genomeLocParser;
-
-    @BeforeClass
-    public void beforeClass() {
-        header = ArtificialSAMUtils.createArtificialSamHeader((endingChr - startingChr) + 1, startingChr, readCount + DEFAULT_READ_LENGTH);
-        contig = header.getSequence(0).getSequenceName();
-        genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
-
-        initializeTests();
-    }
-
-    private class CompareFeatures implements Comparator<Feature> {
-        @Override
-        public int compare(Feature o1, Feature o2) {
-            return genomeLocParser.createGenomeLoc(o1).compareTo(genomeLocParser.createGenomeLoc(o2));
-        }
-    }
-
-    private class ReadMetaDataTrackerRODStreamTest extends TestDataProvider {
-        final List<Feature> allFeatures;
-        final List<GenomeLoc> intervals;
-
-        public ReadMetaDataTrackerRODStreamTest(final List<Feature> allFeatures, final GenomeLoc interval) {
-            this(allFeatures, Collections.singletonList(interval));
-        }
-
-        public ReadMetaDataTrackerRODStreamTest(final List<Feature> allFeatures, final List<GenomeLoc> intervals) {
-            super(ReadMetaDataTrackerRODStreamTest.class);
-            this.allFeatures = new ArrayList<Feature>(allFeatures);
-            Collections.sort(this.allFeatures, new CompareFeatures());
-            this.intervals = new ArrayList<GenomeLoc>(intervals);
-            Collections.sort(this.intervals);
-            setName(String.format("%s nFeatures %d intervals %s", getClass().getSimpleName(), allFeatures.size(),
-                    intervals.size() == 1 ? intervals.get(0) : "size " + intervals.size()));
-        }
-
-        public PeekableIterator<RODRecordList> getIterator(final String name) {
-            return new PeekableIterator<RODRecordList>(new TribbleIteratorFromCollection(name, genomeLocParser, allFeatures));
-        }
-
-        public Set<Feature> getExpectedOverlaps(final GenomeLoc interval) {
-            final Set<Feature> overlapping = new HashSet<Feature>();
-            for ( final Feature f : allFeatures )
-                if ( genomeLocParser.createGenomeLoc(f).overlapsP(interval) )
-                    overlapping.add(f);
-            return overlapping;
-        }
-    }
-
-    public void initializeTests() {
-        final List<Feature> handPickedFeatures = new ArrayList<Feature>();
-
-        handPickedFeatures.add(new BasicFeature(contig, 1, 1));
-        handPickedFeatures.add(new BasicFeature(contig, 2, 5));
-        handPickedFeatures.add(new BasicFeature(contig, 4, 4));
-        handPickedFeatures.add(new BasicFeature(contig, 6, 6));
-        handPickedFeatures.add(new BasicFeature(contig, 9, 10));
-        handPickedFeatures.add(new BasicFeature(contig, 10, 10));
-        handPickedFeatures.add(new BasicFeature(contig, 10, 11));
-        handPickedFeatures.add(new BasicFeature(contig, 13, 20));
-
-        createTestsForFeatures(handPickedFeatures);
-
-        // test in the present of a large spanning element
-        {
-            List<Feature> oneLargeSpan = new ArrayList<Feature>(handPickedFeatures);
-            oneLargeSpan.add(new BasicFeature(contig, 1, 30));
-            createTestsForFeatures(oneLargeSpan);
-        }
-
-        // test in the presence of a partially spanning element
-        {
-            List<Feature> partialSpanStart = new ArrayList<Feature>(handPickedFeatures);
-            partialSpanStart.add(new BasicFeature(contig, 1, 6));
-            createTestsForFeatures(partialSpanStart);
-        }
-
-        // test in the presence of a partially spanning element at the end
-        {
-            List<Feature> partialSpanEnd = new ArrayList<Feature>(handPickedFeatures);
-            partialSpanEnd.add(new BasicFeature(contig, 10, 30));
-            createTestsForFeatures(partialSpanEnd);
-        }
-
-        // no data at all
-        final GenomeLoc loc = genomeLocParser.createGenomeLoc(contig, 5, 5);
-        new ReadMetaDataTrackerRODStreamTest(Collections.<Feature>emptyList(), loc);
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // tests for the lower level IntervalOverlappingRODsFromStream
-    //
-    // --------------------------------------------------------------------------------
-
-    @DataProvider(name = "ReadMetaDataTrackerRODStreamTest")
-    public Object[][] createReadMetaDataTrackerRODStreamTest() {
-        return ReadMetaDataTrackerRODStreamTest.getTests(ReadMetaDataTrackerRODStreamTest.class);
-    }
-
-    private GenomeLoc span(final List<GenomeLoc> features) {
-        int featuresStart = 1; for ( final GenomeLoc f : features ) featuresStart = Math.min(featuresStart, f.getStart());
-        int featuresStop = 1; for ( final GenomeLoc f : features ) featuresStop = Math.max(featuresStop, f.getStop());
-        return genomeLocParser.createGenomeLoc(contig, featuresStart, featuresStop);
-    }
-
-    private void createTestsForFeatures(final List<Feature> features) {
-        int featuresStart = 1; for ( final Feature f : features ) featuresStart = Math.min(featuresStart, f.getStart());
-        int featuresStop = 1; for ( final Feature f : features ) featuresStop = Math.max(featuresStop, f.getEnd());
-
-        for ( final int size : Arrays.asList(1, 5, 10, 100) ) {
-            final List<GenomeLoc> allIntervals = new ArrayList<GenomeLoc>();
-            // regularly spaced
-            for ( int start = featuresStart; start < featuresStop; start++) {
-                final GenomeLoc loc = genomeLocParser.createGenomeLoc(contig, start, start + size - 1);
-                allIntervals.add(loc);
-                new ReadMetaDataTrackerRODStreamTest(features, loc);
-            }
-
-            // starting and stopping at every feature
-            for ( final Feature f : features ) {
-                // just at the feature
-                allIntervals.add(genomeLocParser.createGenomeLoc(contig, f.getStart(), f.getEnd()));
-                new ReadMetaDataTrackerRODStreamTest(features, allIntervals.get(allIntervals.size() - 1));
-
-                // up to end
-                allIntervals.add(genomeLocParser.createGenomeLoc(contig, f.getStart() - 1, f.getEnd()));
-                new ReadMetaDataTrackerRODStreamTest(features, allIntervals.get(allIntervals.size() - 1));
-
-                // missing by 1
-                allIntervals.add(genomeLocParser.createGenomeLoc(contig, f.getStart() + 1, f.getEnd() + 1));
-                new ReadMetaDataTrackerRODStreamTest(features, allIntervals.get(allIntervals.size() - 1));
-
-                // just spanning
-                allIntervals.add(genomeLocParser.createGenomeLoc(contig, f.getStart() - 1, f.getEnd() + 1));
-                new ReadMetaDataTrackerRODStreamTest(features, allIntervals.get(allIntervals.size() - 1));
-            }
-
-            new ReadMetaDataTrackerRODStreamTest(features, allIntervals);
-        }
-    }
-
-    @Test(enabled = true, dataProvider = "ReadMetaDataTrackerRODStreamTest")
-    public void runReadMetaDataTrackerRODStreamTest_singleQuery(final ReadMetaDataTrackerRODStreamTest data) {
-        if ( data.intervals.size() == 1 ) {
-            final String name = "testName";
-            final PeekableIterator<RODRecordList> iterator = data.getIterator(name);
-            final IntervalOverlappingRODsFromStream stream = new IntervalOverlappingRODsFromStream(name, iterator);
-            testRODStream(data, stream, Collections.singletonList(data.intervals.get(0)));
-        }
-    }
-
-    @Test(enabled = true, dataProvider = "ReadMetaDataTrackerRODStreamTest", dependsOnMethods = "runReadMetaDataTrackerRODStreamTest_singleQuery")
-    public void runReadMetaDataTrackerRODStreamTest_multipleQueries(final ReadMetaDataTrackerRODStreamTest data) {
-        if ( data.intervals.size() > 1 ) {
-            final String name = "testName";
-            final PeekableIterator<RODRecordList> iterator = data.getIterator(name);
-            final IntervalOverlappingRODsFromStream stream = new IntervalOverlappingRODsFromStream(name, iterator);
-            testRODStream(data, stream, data.intervals);
-        }
-    }
-
-    private void testRODStream(final ReadMetaDataTrackerRODStreamTest test, final IntervalOverlappingRODsFromStream stream, final List<GenomeLoc> intervals) {
-        for ( final GenomeLoc interval : intervals ) {
-            final RODRecordList query = stream.getOverlapping(interval);
-            final HashSet<Feature> queryFeatures = new HashSet<Feature>();
-            for ( final GATKFeature f : query ) queryFeatures.add((Feature)f.getUnderlyingObject());
-            final Set<Feature> overlaps = test.getExpectedOverlaps(interval);
-
-            Assert.assertEquals(queryFeatures.size(), overlaps.size(), "IntervalOverlappingRODsFromStream didn't return the expected set of overlapping features." +
-                    " Expected size = " + overlaps.size() + " but saw " + queryFeatures.size());
-
-            BaseTest.assertEqualsSet(queryFeatures, overlaps, "IntervalOverlappingRODsFromStream didn't return the expected set of overlapping features." +
-                    " Expected = " + Utils.join(",", overlaps) + " but saw " + Utils.join(",", queryFeatures));
-        }
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // tests for the higher level tracker itself
-    //
-    // --------------------------------------------------------------------------------
-
-    @DataProvider(name = "ReadMetaDataTrackerTests")
-    public Object[][] createTrackerTests() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        final Object[][] singleTests = ReadMetaDataTrackerRODStreamTest.getTests(ReadMetaDataTrackerRODStreamTest.class);
-        final List<ReadMetaDataTrackerRODStreamTest> multiSiteTests = new ArrayList<ReadMetaDataTrackerRODStreamTest>();
-        for ( final Object[] singleTest : singleTests ) {
-            if ( ((ReadMetaDataTrackerRODStreamTest)singleTest[0]).intervals.size() > 1 )
-                multiSiteTests.add((ReadMetaDataTrackerRODStreamTest)singleTest[0]);
-        }
-
-        for ( final boolean testStateless : Arrays.asList(true, false) ) {
-            // all pairwise tests
-            for ( List<ReadMetaDataTrackerRODStreamTest> singleTest : Utils.makePermutations(multiSiteTests, 2, false)) {
-                tests.add(new Object[]{singleTest, testStateless});
-            }
-
-            // all 3 way pairwise tests
-            //for ( List<ReadMetaDataTrackerRODStreamTest> singleTest : Utils.makePermutations(multiSiteTests, 3, false)) {
-            //    tests.add(new Object[]{singleTest, testStateless});
-            //}
-        }
-
-        logger.warn("Creating " + tests.size() + " tests for ReadMetaDataTrackerTests");
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = true, dataProvider = "ReadMetaDataTrackerTests", dependsOnMethods = "runReadMetaDataTrackerRODStreamTest_multipleQueries")
-    public void runReadMetaDataTrackerTest(final List<ReadMetaDataTrackerRODStreamTest> RODs, final boolean testStateless) {
-        final List<String> names = new ArrayList<String>();
-        final List<PeekableIterator<RODRecordList>> iterators = new ArrayList<PeekableIterator<RODRecordList>>();
-        final List<GenomeLoc> intervals = new ArrayList<GenomeLoc>();
-        final List<RodBinding<Feature>> rodBindings = new ArrayList<RodBinding<Feature>>();
-
-        for ( int i = 0; i < RODs.size(); i++ ) {
-            final RodBinding<Feature> rodBinding = new RodBinding<Feature>(Feature.class, "name"+i);
-            rodBindings.add(rodBinding);
-            final String name = rodBinding.getName();
-            names.add(name);
-            iterators.add(RODs.get(i).getIterator(name));
-            intervals.addAll(RODs.get(i).intervals);
-        }
-
-        Collections.sort(intervals);
-        final GenomeLoc span = span(intervals);
-        final IntervalReferenceOrderedView view = new IntervalReferenceOrderedView(genomeLocParser, span, names, iterators);
-
-        if ( testStateless ) {
-            // test each tracker is well formed, as each is created
-            for ( final GenomeLoc interval : intervals ) {
-                final RefMetaDataTracker tracker = view.getReferenceOrderedDataForInterval(interval);
-                testMetaDataTrackerBindings(tracker, interval, RODs, rodBindings);
-            }
-        } else {
-            // tests all trackers are correct after reading them into an array
-            // this checks that the trackers are be safely stored away and analyzed later (critical for nano-scheduling)
-            final List<RefMetaDataTracker> trackers = new ArrayList<RefMetaDataTracker>();
-            for ( final GenomeLoc interval : intervals ) {
-                final RefMetaDataTracker tracker = view.getReferenceOrderedDataForInterval(interval);
-                trackers.add(tracker);
-            }
-
-            for ( int i = 0; i < trackers.size(); i++) {
-                testMetaDataTrackerBindings(trackers.get(i), intervals.get(i), RODs, rodBindings);
-            }
-        }
-    }
-
-    private void testMetaDataTrackerBindings(final RefMetaDataTracker tracker,
-                                             final GenomeLoc interval,
-                                             final List<ReadMetaDataTrackerRODStreamTest> RODs,
-                                             final List<RodBinding<Feature>> rodBindings) {
-        for ( int i = 0; i < RODs.size(); i++ ) {
-            final ReadMetaDataTrackerRODStreamTest test = RODs.get(i);
-            final List<Feature> queryFeaturesList = tracker.getValues(rodBindings.get(i));
-            final Set<Feature> queryFeatures = new HashSet<Feature>(queryFeaturesList);
-            final Set<Feature> overlaps = test.getExpectedOverlaps(interval);
-
-            Assert.assertEquals(queryFeatures.size(), overlaps.size(), "IntervalOverlappingRODsFromStream didn't return the expected set of overlapping features." +
-                    " Expected size = " + overlaps.size() + " but saw " + queryFeatures.size());
-
-            BaseTest.assertEqualsSet(queryFeatures, overlaps, "IntervalOverlappingRODsFromStream didn't return the expected set of overlapping features." +
-                    " Expected = " + Utils.join(",", overlaps) + " but saw " + Utils.join(",", queryFeatures));
-        }
-    }
-
-    static class TribbleIteratorFromCollection implements Iterator<RODRecordList> {
-        // current location
-        private final String name;
-        final Queue<GATKFeature> gatkFeatures;
-
-        public TribbleIteratorFromCollection(final String name, final GenomeLocParser genomeLocParser, final List<Feature> features) {
-            this.name = name;
-
-            this.gatkFeatures = new LinkedList<GATKFeature>();
-            for ( final Feature f : features )
-                gatkFeatures.add(new GATKFeature.TribbleGATKFeature(genomeLocParser, f, name));
-        }
-
-        @Override
-        public boolean hasNext() {
-            return ! gatkFeatures.isEmpty();
-        }
-
-        @Override
-        public RODRecordList next() {
-            final GATKFeature first = gatkFeatures.poll();
-            final Collection<GATKFeature> myFeatures = new LinkedList<GATKFeature>();
-            myFeatures.add(first);
-            while ( gatkFeatures.peek() != null && gatkFeatures.peek().getLocation().getStart() == first.getStart() )
-                myFeatures.add(gatkFeatures.poll());
-
-            GenomeLoc loc = first.getLocation();
-            for ( final GATKFeature feature : myFeatures )
-                loc = loc.merge(feature.getLocation());
-
-            return new RODRecordListImpl(name, myFeatures, loc); // is this safe?
-        }
-
-        @Override public void remove() { throw new IllegalStateException("GRRR"); }
-    }
-}
-
-
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/LocusReferenceViewUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/LocusReferenceViewUnitTest.java
deleted file mode 100644
index 5eb9c7a..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/LocusReferenceViewUnitTest.java
+++ /dev/null
@@ -1,143 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.providers;
-
-import org.broadinstitute.gatk.engine.datasources.reads.Shard;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.engine.datasources.reads.MockLocusShard;
-import org.broadinstitute.gatk.engine.iterators.GenomeLocusIterator;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-
-import htsjdk.samtools.reference.ReferenceSequence;
-import htsjdk.samtools.util.StringUtil;
-
-import java.util.Collections;
-/*
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use,
- * copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the
- * Software is furnished to do so, subject to the following
- * conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
- * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
- * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
- * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
- * OTHER DEALINGS IN THE SOFTWARE.
- */
-
-/** Tests for viewing the reference from the perspective of a locus. */
-
-public class LocusReferenceViewUnitTest extends ReferenceViewTemplate {
-
-//
-//    /** Multiple-base pair queries should generate exceptions. */
-//    @Test(expectedExceptions=InvalidPositionException.class)
-//    public void testSingleBPFailure() {
-//        Shard shard = new LocusShard(GenomeLocParser.createGenomeLoc(0, 1, 50));
-//
-//        ShardDataProvider dataProvider = new ShardDataProvider(shard, null, sequenceFile, null);
-//        LocusReferenceView view = new LocusReferenceView(dataProvider);
-//
-//        view.getReferenceContext(shard.getGenomeLoc()).getBase();
-//    }
-
-    @Test
-    public void testOverlappingReferenceBases() {
-        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc(sequenceFile.getSequenceDictionary().getSequence(0).getSequenceName(),
-                                                                                                                   sequenceFile.getSequence("chrM").length() - 10,
-                                                                                                                   sequenceFile.getSequence("chrM").length())));
-        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, null, genomeLocParser, shard.getGenomeLocs().get(0), null, sequenceFile, null);
-        LocusReferenceView view = new LocusReferenceView(dataProvider);
-
-        byte[] results = view.getReferenceBases(genomeLocParser.createGenomeLoc(sequenceFile.getSequenceDictionary().getSequence(0).getSequenceName(),
-                                                                                sequenceFile.getSequence("chrM").length() - 10,
-                                                                                sequenceFile.getSequence("chrM").length() + 9));
-        System.out.printf("results are %s%n", new String(results));
-        Assert.assertEquals(results.length, 20);
-        for (int x = 0; x < results.length; x++) {
-            if (x <= 10) Assert.assertTrue(results[x] != 'X');
-            else Assert.assertTrue(results[x] == 'X');
-        }
-    }
-
-
-    /** Queries outside the bounds of the shard should result in reference context window trimmed at the shard boundary. */
-    @Test
-    public void testBoundsFailure() {
-        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc(sequenceFile.getSequenceDictionary().getSequence(0).getSequenceName(), 1, 50)));
-
-        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, null, genomeLocParser, shard.getGenomeLocs().get(0), null, sequenceFile, null);
-        LocusReferenceView view = new LocusReferenceView(dataProvider);
-
-        GenomeLoc locus = genomeLocParser.createGenomeLoc(sequenceFile.getSequenceDictionary().getSequence(0).getSequenceName(), 50, 51);
-
-        ReferenceContext rc = view.getReferenceContext(locus);
-        Assert.assertTrue(rc.getLocus().equals(locus));
-        Assert.assertTrue(rc.getWindow().equals(genomeLocParser.createGenomeLoc(sequenceFile.getSequenceDictionary().getSequence(0).getSequenceName(),50)));
-        Assert.assertTrue(rc.getBases().length == 1);
-    }
-
-
-    /**
-     * Compares the contents of the fasta and view at a specified location.
-     *
-     * @param loc
-     */
-    protected void validateLocation( GenomeLoc loc ) {
-        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(loc));
-        GenomeLocusIterator shardIterator = new GenomeLocusIterator(genomeLocParser,loc);
-
-        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, null, genomeLocParser, loc, null, sequenceFile, null);
-        LocusReferenceView view = new LocusReferenceView(dataProvider);
-
-        while (shardIterator.hasNext()) {
-            GenomeLoc locus = shardIterator.next();
-
-            ReferenceSequence expectedAsSeq = sequenceFile.getSubsequenceAt(locus.getContig(), locus.getStart(), locus.getStop());
-            char expected = Character.toUpperCase(StringUtil.bytesToString(expectedAsSeq.getBases()).charAt(0));
-            char actual = view.getReferenceContext(locus).getBaseAsChar();
-
-            Assert.assertEquals(actual, expected, String.format("Value of base at position %s in shard %s does not match expected", locus.toString(), shard.getGenomeLocs())
-            );
-        }
-    }
-
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/LocusViewTemplate.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/LocusViewTemplate.java
deleted file mode 100644
index 650b146..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/LocusViewTemplate.java
+++ /dev/null
@@ -1,405 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.providers;
-
-import htsjdk.samtools.reference.ReferenceSequence;
-import htsjdk.samtools.reference.ReferenceSequenceFile;
-import htsjdk.samtools.*;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.engine.datasources.reads.MockLocusShard;
-import org.broadinstitute.gatk.engine.datasources.reads.SAMReaderID;
-import org.broadinstitute.gatk.engine.datasources.reads.Shard;
-import org.broadinstitute.gatk.engine.executive.WindowMaker;
-import org.broadinstitute.gatk.engine.datasources.reads.LocusShard;
-import org.broadinstitute.gatk.engine.datasources.reads.SAMDataSource;
-import org.broadinstitute.gatk.engine.iterators.GATKSAMIterator;
-import org.broadinstitute.gatk.engine.resourcemanagement.ThreadAllocation;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.util.*;
-/**
- * User: hanna
- * Date: May 13, 2009
- * Time: 4:29:08 PM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/** Base support for testing variants of the LocusView family of classes. */
-
-public abstract class LocusViewTemplate extends BaseTest {
-    protected static ReferenceSequenceFile sequenceSourceFile = null;
-    protected GenomeLocParser genomeLocParser = null;
-
-    @BeforeClass
-    public void setupGenomeLoc() throws FileNotFoundException {
-        sequenceSourceFile = fakeReferenceSequenceFile();
-        genomeLocParser = new GenomeLocParser(sequenceSourceFile);
-    }
-
-    @Test
-    public void emptyAlignmentContextTest() {
-        SAMRecordIterator iterator = new SAMRecordIterator();
-
-        GenomeLoc shardBounds = genomeLocParser.createGenomeLoc("chr1", 1, 5);
-        Shard shard = new LocusShard(genomeLocParser, new SAMDataSource(Collections.<SAMReaderID>emptyList(),new ThreadAllocation(),null,genomeLocParser),Collections.singletonList(shardBounds),Collections.<SAMReaderID,SAMFileSpan>emptyMap());
-        WindowMaker windowMaker = new WindowMaker(shard,genomeLocParser,iterator,shard.getGenomeLocs());
-        WindowMaker.WindowMakerIterator window = windowMaker.next();
-        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, null, genomeLocParser, window.getLocus(), window, null, null);
-
-        LocusView view = createView(dataProvider);
-
-        testReadsInContext(view, shard.getGenomeLocs(), Collections.<GATKSAMRecord>emptyList());
-    }
-
-    @Test
-    public void singleReadTest() {
-        GATKSAMRecord read = buildSAMRecord("read1","chr1", 1, 5);
-        SAMRecordIterator iterator = new SAMRecordIterator(read);
-
-        GenomeLoc shardBounds = genomeLocParser.createGenomeLoc("chr1", 1, 5);
-        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(shardBounds));
-        WindowMaker windowMaker = new WindowMaker(shard,genomeLocParser,iterator,shard.getGenomeLocs());
-        WindowMaker.WindowMakerIterator window = windowMaker.next();
-        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, window.getSourceInfo(), genomeLocParser, window.getLocus(), window, null, null);
-
-        LocusView view = createView(dataProvider);
-
-        testReadsInContext(view, shard.getGenomeLocs(), Collections.singletonList(read));
-    }
-
-    @Test
-    public void readCoveringFirstPartTest() {
-        GATKSAMRecord read = buildSAMRecord("read1","chr1", 1, 5);
-        SAMRecordIterator iterator = new SAMRecordIterator(read);
-
-        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc("chr1", 1, 10)));
-        WindowMaker windowMaker = new WindowMaker(shard,genomeLocParser,iterator,shard.getGenomeLocs());
-        WindowMaker.WindowMakerIterator window = windowMaker.next();
-        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, window.getSourceInfo(), genomeLocParser, window.getLocus(), window, null, null);
-        LocusView view = createView(dataProvider);
-
-        testReadsInContext(view, shard.getGenomeLocs(), Collections.singletonList(read));
-    }
-
-    @Test
-    public void readCoveringLastPartTest() {
-        GATKSAMRecord read = buildSAMRecord("read1","chr1", 6, 10);
-        SAMRecordIterator iterator = new SAMRecordIterator(read);
-
-        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc("chr1", 1, 10)));
-        WindowMaker windowMaker = new WindowMaker(shard,genomeLocParser,iterator,shard.getGenomeLocs());
-        WindowMaker.WindowMakerIterator window = windowMaker.next();
-        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, window.getSourceInfo(), genomeLocParser, window.getLocus(), window, null, null);
-        LocusView view = createView(dataProvider);
-
-        testReadsInContext(view, shard.getGenomeLocs(), Collections.singletonList(read));
-    }
-
-    @Test
-    public void readCoveringMiddleTest() {
-        GATKSAMRecord read = buildSAMRecord("read1","chr1", 3, 7);
-        SAMRecordIterator iterator = new SAMRecordIterator(read);
-
-        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc("chr1", 1, 10)));
-        WindowMaker windowMaker = new WindowMaker(shard,genomeLocParser,iterator,shard.getGenomeLocs());
-        WindowMaker.WindowMakerIterator window = windowMaker.next();
-        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, window.getSourceInfo(), genomeLocParser, window.getLocus(), window, null, null);
-        LocusView view = createView(dataProvider);
-
-        testReadsInContext(view, shard.getGenomeLocs(), Collections.singletonList(read));
-    }
-
-    @Test
-    public void readAndLocusOverlapAtLastBase() {
-        GATKSAMRecord read = buildSAMRecord("read1","chr1", 1, 5);
-        SAMRecordIterator iterator = new SAMRecordIterator(read);
-
-        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc("chr1", 5, 5)));
-        WindowMaker windowMaker = new WindowMaker(shard,genomeLocParser,iterator,shard.getGenomeLocs());
-        WindowMaker.WindowMakerIterator window = windowMaker.next();
-        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, window.getSourceInfo(), genomeLocParser, window.getLocus(), window, null, null);
-        LocusView view = createView(dataProvider);
-
-        testReadsInContext(view, shard.getGenomeLocs(), Collections.singletonList(read));
-    }
-
-    @Test
-    public void readOverlappingStartTest() {
-        GATKSAMRecord read = buildSAMRecord("read1","chr1", 1, 10);
-        SAMRecordIterator iterator = new SAMRecordIterator(read);
-
-        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc("chr1", 6, 15)));
-        WindowMaker windowMaker = new WindowMaker(shard,genomeLocParser,iterator,shard.getGenomeLocs());
-        WindowMaker.WindowMakerIterator window = windowMaker.next();
-        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, window.getSourceInfo(), genomeLocParser, window.getLocus(), window, null, null);
-        LocusView view = createView(dataProvider);
-
-        testReadsInContext(view, shard.getGenomeLocs(), Collections.singletonList(read));
-    }
-
-    @Test
-    public void readOverlappingEndTest() {
-        GATKSAMRecord read = buildSAMRecord("read1","chr1", 6, 15);
-        SAMRecordIterator iterator = new SAMRecordIterator(read);
-
-        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc("chr1", 1, 10)));
-        WindowMaker windowMaker = new WindowMaker(shard,genomeLocParser,iterator,shard.getGenomeLocs());
-        WindowMaker.WindowMakerIterator window = windowMaker.next();
-        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, window.getSourceInfo(), genomeLocParser, window.getLocus(), window, null, null);
-        LocusView view = createView(dataProvider);
-
-        testReadsInContext(view, shard.getGenomeLocs(), Collections.singletonList(read));
-    }
-
-    @Test
-    public void readsSpanningTest() {
-        GATKSAMRecord read1 = buildSAMRecord("read1","chr1", 1, 5);
-        GATKSAMRecord read2 = buildSAMRecord("read2","chr1", 6, 10);
-        SAMRecordIterator iterator = new SAMRecordIterator(read1, read2);
-
-        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc("chr1", 1, 10)));
-        WindowMaker windowMaker = new WindowMaker(shard,genomeLocParser,iterator,shard.getGenomeLocs());
-        WindowMaker.WindowMakerIterator window = windowMaker.next();
-        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, window.getSourceInfo(), genomeLocParser, window.getLocus(), window, null, null);
-        LocusView view = createView(dataProvider);
-
-        List<GATKSAMRecord> expectedReads = new ArrayList<GATKSAMRecord>();
-        Collections.addAll(expectedReads, read1, read2);
-        testReadsInContext(view, shard.getGenomeLocs(), expectedReads);
-    }
-
-    @Test
-    public void duplicateReadsTest() {
-        GATKSAMRecord read1 = buildSAMRecord("read1","chr1", 1, 5);
-        GATKSAMRecord read2 = buildSAMRecord("read2","chr1", 1, 5);
-        GATKSAMRecord read3 = buildSAMRecord("read3","chr1", 6, 10);
-        GATKSAMRecord read4 = buildSAMRecord("read4","chr1", 6, 10);
-        SAMRecordIterator iterator = new SAMRecordIterator(read1, read2, read3, read4);
-
-        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc("chr1", 1, 10)));
-        WindowMaker windowMaker = new WindowMaker(shard,genomeLocParser,iterator,shard.getGenomeLocs());
-        WindowMaker.WindowMakerIterator window = windowMaker.next();
-        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, window.getSourceInfo(), genomeLocParser, window.getLocus(), window, null, null);
-        LocusView view = createView(dataProvider);
-
-        List<GATKSAMRecord> expectedReads = new ArrayList<GATKSAMRecord>();
-        Collections.addAll(expectedReads, read1, read2, read3, read4);
-        testReadsInContext(view, shard.getGenomeLocs(), expectedReads);
-    }
-
-    @Test
-    public void cascadingReadsWithinBoundsTest() {
-        GATKSAMRecord read1 = buildSAMRecord("read1","chr1", 2, 6);
-        GATKSAMRecord read2 = buildSAMRecord("read2","chr1", 3, 7);
-        GATKSAMRecord read3 = buildSAMRecord("read3","chr1", 4, 8);
-        GATKSAMRecord read4 = buildSAMRecord("read4","chr1", 5, 9);
-        SAMRecordIterator iterator = new SAMRecordIterator(read1, read2, read3, read4);
-
-        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc("chr1", 1, 10)));
-        WindowMaker windowMaker = new WindowMaker(shard,genomeLocParser,iterator,shard.getGenomeLocs());
-        WindowMaker.WindowMakerIterator window = windowMaker.next();
-        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, window.getSourceInfo(), genomeLocParser, window.getLocus(), window, null, null);
-        LocusView view = createView(dataProvider);
-
-        List<GATKSAMRecord> expectedReads = new ArrayList<GATKSAMRecord>();
-        Collections.addAll(expectedReads, read1, read2, read3, read4);
-        testReadsInContext(view, shard.getGenomeLocs(), expectedReads);
-    }
-
-    @Test
-    public void cascadingReadsAtBoundsTest() {
-        GATKSAMRecord read1 = buildSAMRecord("read1","chr1", 1, 5);
-        GATKSAMRecord read2 = buildSAMRecord("read2","chr1", 2, 6);
-        GATKSAMRecord read3 = buildSAMRecord("read3","chr1", 3, 7);
-        GATKSAMRecord read4 = buildSAMRecord("read4","chr1", 4, 8);
-        GATKSAMRecord read5 = buildSAMRecord("read5","chr1", 5, 9);
-        GATKSAMRecord read6 = buildSAMRecord("read6","chr1", 6, 10);
-        SAMRecordIterator iterator = new SAMRecordIterator(read1, read2, read3, read4, read5, read6);
-
-        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc("chr1", 1, 10)));
-        WindowMaker windowMaker = new WindowMaker(shard,genomeLocParser,iterator,shard.getGenomeLocs());
-        WindowMaker.WindowMakerIterator window = windowMaker.next();
-        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, window.getSourceInfo(), genomeLocParser, window.getLocus(), window, null, null);
-        LocusView view = createView(dataProvider);
-
-        List<GATKSAMRecord> expectedReads = new ArrayList<GATKSAMRecord>();
-        Collections.addAll(expectedReads, read1, read2, read3, read4, read5, read6);
-        testReadsInContext(view, shard.getGenomeLocs(), expectedReads);
-    }
-
-    @Test
-    public void cascadingReadsOverlappingBoundsTest() {
-        GATKSAMRecord read01 = buildSAMRecord("read1","chr1", 1, 5);
-        GATKSAMRecord read02 = buildSAMRecord("read2","chr1", 2, 6);
-        GATKSAMRecord read03 = buildSAMRecord("read3","chr1", 3, 7);
-        GATKSAMRecord read04 = buildSAMRecord("read4","chr1", 4, 8);
-        GATKSAMRecord read05 = buildSAMRecord("read5","chr1", 5, 9);
-        GATKSAMRecord read06 = buildSAMRecord("read6","chr1", 6, 10);
-        GATKSAMRecord read07 = buildSAMRecord("read7","chr1", 7, 11);
-        GATKSAMRecord read08 = buildSAMRecord("read8","chr1", 8, 12);
-        GATKSAMRecord read09 = buildSAMRecord("read9","chr1", 9, 13);
-        GATKSAMRecord read10 = buildSAMRecord("read10","chr1", 10, 14);
-        GATKSAMRecord read11 = buildSAMRecord("read11","chr1", 11, 15);
-        GATKSAMRecord read12 = buildSAMRecord("read12","chr1", 12, 16);
-        SAMRecordIterator iterator = new SAMRecordIterator(read01, read02, read03, read04, read05, read06,
-                                                           read07, read08, read09, read10, read11, read12);
-
-        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc("chr1", 6, 15)));
-        WindowMaker windowMaker = new WindowMaker(shard,genomeLocParser,iterator,shard.getGenomeLocs());
-        WindowMaker.WindowMakerIterator window = windowMaker.next();
-        LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, window.getSourceInfo(), genomeLocParser, window.getLocus(), window, null, null);
-        LocusView view = createView(dataProvider);
-
-        List<GATKSAMRecord> expectedReads = new ArrayList<GATKSAMRecord>();
-        Collections.addAll(expectedReads, read01, read02, read03, read04, read05, read06,
-                           read07, read08, read09, read10, read11, read12);
-        testReadsInContext(view, shard.getGenomeLocs(), expectedReads);
-    }
-
-    /**
-     * Creates a view of the type required for testing.
-     *
-     * @return The correct view to test.
-     */
-    protected abstract LocusView createView(LocusShardDataProvider provider);
-
-    /**
-     * Test the reads according to an independently derived context.
-     *
-     * @param view
-     * @param bounds
-     * @param reads
-     */
-    protected abstract void testReadsInContext(LocusView view, List<GenomeLoc> bounds, List<GATKSAMRecord> reads);
-
-    /**
-     * Fake a reference sequence file.  Essentially, seek a header with a bunch of dummy data.
-     *
-     * @return A 'fake' reference sequence file
-     */
-    private static ReferenceSequenceFile fakeReferenceSequenceFile() {
-        return new ReferenceSequenceFile() {
-            public SAMSequenceDictionary getSequenceDictionary() {
-                SAMSequenceRecord sequenceRecord = new SAMSequenceRecord("chr1", 1000000);
-                SAMSequenceDictionary dictionary = new SAMSequenceDictionary(Collections.singletonList(sequenceRecord));
-                return dictionary;
-            }
-
-            public boolean isIndexed() { return false; }
-
-            public ReferenceSequence nextSequence() {
-                throw new UnsupportedOperationException("Fake implementation doesn't support a getter");
-            }
-
-            public ReferenceSequence getSequence( String contig ) {
-                throw new UnsupportedOperationException("Fake implementation doesn't support a getter");
-            }
-
-            public ReferenceSequence getSubsequenceAt( String contig, long start, long stop ) {
-                throw new UnsupportedOperationException("Fake implementation doesn't support a getter");
-            }
-
-            public void reset() {
-                return;
-            }
-
-            public void close() throws IOException {
-            }
-        };
-    }
-
-    /**
-     * Build a SAM record featuring the absolute minimum required dataset.
-     *
-     * @param contig         Contig to populate.
-     * @param alignmentStart start of alignment
-     * @param alignmentEnd   end of alignment
-     *
-     * @return New SAM Record
-     */
-    protected GATKSAMRecord buildSAMRecord(String readName, String contig, int alignmentStart, int alignmentEnd) {
-        SAMFileHeader header = new SAMFileHeader();
-        header.setSequenceDictionary(sequenceSourceFile.getSequenceDictionary());
-
-        GATKSAMRecord record = new GATKSAMRecord(header);
-
-        record.setReadName(readName);
-        record.setReferenceIndex(sequenceSourceFile.getSequenceDictionary().getSequenceIndex(contig));
-        record.setAlignmentStart(alignmentStart);
-        Cigar cigar = new Cigar();
-        int len = alignmentEnd - alignmentStart + 1;
-        cigar.add(new CigarElement(len, CigarOperator.M));
-        record.setCigar(cigar);
-        record.setReadBases(new byte[len]);
-        record.setBaseQualities(new byte[len]);
-        return record;
-    }
-
-    /** A simple iterator which iterates over a list of reads. */
-    protected class SAMRecordIterator implements GATKSAMIterator {
-        private Iterator<SAMRecord> backingIterator = null;
-
-        public SAMRecordIterator(SAMRecord... reads) {
-            List<SAMRecord> backingList = new ArrayList<SAMRecord>();
-            backingList.addAll(Arrays.asList(reads));
-            backingIterator = backingList.iterator();
-        }
-
-        public boolean hasNext() {
-            return backingIterator.hasNext();
-        }
-
-        public SAMRecord next() {
-            return backingIterator.next();
-        }
-
-        public Iterator<SAMRecord> iterator() {
-            return this;
-        }
-
-        public void close() {
-            // NO-OP.
-        }
-
-        public void remove() {
-            throw new UnsupportedOperationException("Can't remove from a read-only iterator");
-        }
-    }
-
-
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/ReadReferenceViewUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/ReadReferenceViewUnitTest.java
deleted file mode 100644
index 8bf4f41..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/ReadReferenceViewUnitTest.java
+++ /dev/null
@@ -1,160 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.providers;
-
-import org.testng.Assert;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-
-import org.testng.annotations.Test;
-
-import htsjdk.samtools.*;
-import htsjdk.samtools.reference.ReferenceSequence;
-
-/*
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use,
- * copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the
- * Software is furnished to do so, subject to the following
- * conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
- * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
- * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
- * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
- * OTHER DEALINGS IN THE SOFTWARE.
- */
-
-/**
- * User: hanna
- * Date: May 27, 2009
- * Time: 1:04:27 PM
- *
- */
-
-/**
- * Test reading the reference for a given read.
- */
-
-public class ReadReferenceViewUnitTest extends ReferenceViewTemplate {
-
-
-    /**
-     * tests that the ReadReferenceView correctly generates X's when a read overhangs the
-     * end of a contig
-     */
-    @Test
-    public void testOverhangingRead() {
-        testOverhangingGivenSize(25,0);
-        testOverhangingGivenSize(25,12);
-        testOverhangingGivenSize(25,24);
-    }
-
-
-    /**
-     * a private method, that tests getting the read sequence for reads that overlap the end of the
-     * contig
-     * @param readLength the length of the read
-     * @param overlap the amount of overlap
-     */
-    private void testOverhangingGivenSize(int readLength, int overlap) {
-        SAMSequenceRecord selectedContig = sequenceFile.getSequenceDictionary().getSequences().get(sequenceFile.getSequenceDictionary().getSequences().size()-1);
-        final long contigStart = selectedContig.getSequenceLength() - (readLength - overlap - 1);
-        final long contigStop = selectedContig.getSequenceLength() + overlap;
-
-        ReadShardDataProvider dataProvider = new ReadShardDataProvider(null,genomeLocParser,null,sequenceFile,null);
-        ReadReferenceView view = new ReadReferenceView(dataProvider);
-
-        SAMRecord rec = buildSAMRecord(selectedContig.getSequenceName(),(int)contigStart,(int)contigStop);
-        ReferenceSequence expectedAsSeq = sequenceFile.getSubsequenceAt(selectedContig.getSequenceName(),(int)contigStart,selectedContig.getSequenceLength());
-        //char[] expected = StringUtil.bytesToString(expectedAsSeq.getBases()).toCharArray();
-        byte[] expected = expectedAsSeq.getBases();
-        byte[] actual = view.getReferenceBases(rec);
-
-        Assert.assertEquals((readLength - overlap), expected.length);
-        Assert.assertEquals(readLength, actual.length);
-        int xRange = 0;
-        for (; xRange < (readLength - overlap); xRange++) {
-            Assert.assertTrue(actual[xRange] != 'X');
-        }
-        for (; xRange < actual.length; xRange++) {
-            Assert.assertTrue(actual[xRange] == 'X');
-        }
-    }
-
-
-    /**
-     * Compares the contents of the fasta and view at a specified location.
-     * @param loc the location to validate
-     */
-    protected void validateLocation( GenomeLoc loc ) {
-        SAMRecord read = buildSAMRecord( loc.getContig(), (int)loc.getStart(), (int)loc.getStop() );
-
-        ReadShardDataProvider dataProvider = new ReadShardDataProvider(null,genomeLocParser,null,sequenceFile,null);
-        ReadReferenceView view = new ReadReferenceView(dataProvider);
-
-        ReferenceSequence expectedAsSeq = sequenceFile.getSubsequenceAt(loc.getContig(),loc.getStart(),loc.getStop());
-        byte[] expected = expectedAsSeq.getBases();
-        byte[] actual = view.getReferenceBases(read);
-
-        org.testng.Assert.assertEquals(actual,expected,String.format("Base array at  in shard %s does not match expected",loc.toString()));
-    }
-
-
-    /**
-     * Build a SAM record featuring the absolute minimum required dataset.
-     * TODO: Blatantly copied from LocusViewTemplate.  Refactor these into a set of tools.
-     * @param contig Contig to populate.
-     * @param alignmentStart start of alignment
-     * @param alignmentEnd end of alignment
-     * @return New SAM Record
-     */
-    protected SAMRecord buildSAMRecord( String contig, int alignmentStart, int alignmentEnd ) {
-        SAMFileHeader header = new SAMFileHeader();
-        header.setSequenceDictionary(sequenceFile.getSequenceDictionary());
-
-        SAMRecord record = new SAMRecord(header);
-
-        record.setReferenceIndex(sequenceFile.getSequenceDictionary().getSequenceIndex(contig));
-        record.setAlignmentStart(alignmentStart);
-        Cigar cigar = new Cigar();
-        cigar.add(new CigarElement(alignmentEnd-alignmentStart+1, CigarOperator.M));
-        record.setCigar(cigar);
-        return record;
-    }
-
-
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/ReferenceOrderedViewUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/ReferenceOrderedViewUnitTest.java
deleted file mode 100644
index fdec858..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/ReferenceOrderedViewUnitTest.java
+++ /dev/null
@@ -1,157 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.providers;
-
-import htsjdk.tribble.Feature;
-import org.broadinstitute.gatk.utils.commandline.RodBinding;
-import org.broadinstitute.gatk.utils.commandline.Tags;
-import org.broadinstitute.gatk.engine.datasources.reads.MockLocusShard;
-import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
-import org.broadinstitute.gatk.engine.refdata.tracks.RMDTrackBuilder;
-import org.broadinstitute.gatk.engine.refdata.utils.RMDTriplet;
-import org.testng.Assert;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.engine.datasources.reads.Shard;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.utils.codecs.table.TableFeature;
-import org.broadinstitute.gatk.engine.refdata.utils.RMDTriplet.RMDStorageType;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
-
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.util.Arrays;
-import java.util.Collections;
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-/**
- * User: hanna
- * Date: May 27, 2009
- * Time: 3:07:23 PM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * Test the transparent view into the reference-ordered data.  At the moment, just do some basic bindings and make
- * sure the data comes through correctly.
- */
-public class ReferenceOrderedViewUnitTest extends BaseTest {
-    /**
-     * Sequence file.
-     */
-    private static IndexedFastaSequenceFile seq;
-    private GenomeLocParser genomeLocParser;
-
-    /**
-     * our track builder
-     */
-    RMDTrackBuilder builder = null;
-
-    @BeforeClass
-    public void init() throws FileNotFoundException {
-        // sequence
-        seq = new CachingIndexedFastaSequenceFile(new File(hg18Reference));
-        genomeLocParser = new GenomeLocParser(seq);
-        // disable auto-index creation/locking in the RMDTrackBuilder for tests
-        builder = new RMDTrackBuilder(seq.getSequenceDictionary(),genomeLocParser,null,true,null);
-    }
-
-    /**
-     * Make sure binding to an empty list produces an empty tracker.
-     */
-    @Test
-    public void testNoBindings() {
-        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc("chrM",1,30)));
-        LocusShardDataProvider provider = new LocusShardDataProvider(shard, null, genomeLocParser, shard.getGenomeLocs().get(0), null, seq, Collections.<ReferenceOrderedDataSource>emptyList());
-        ReferenceOrderedView view = new ManagingReferenceOrderedView( provider );
-
-        RefMetaDataTracker tracker = view.getReferenceOrderedDataAtLocus(genomeLocParser.createGenomeLoc("chrM",10));
-        Assert.assertEquals(tracker.getValues(Feature.class).size(), 0, "The tracker should not have produced any data");
-    }
-
-    /**
-     * Test a single ROD binding.
-     */
-    @Test
-    public void testSingleBinding() {
-        String fileName = privateTestDir + "TabularDataTest.dat";
-        RMDTriplet triplet = new RMDTriplet("tableTest","Table",fileName,RMDStorageType.FILE,new Tags());
-        ReferenceOrderedDataSource dataSource = new ReferenceOrderedDataSource(triplet,builder,seq.getSequenceDictionary(),genomeLocParser,false);
-
-        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc("chrM",1,30)));
-
-        LocusShardDataProvider provider = new LocusShardDataProvider(shard, null, genomeLocParser, shard.getGenomeLocs().get(0), null, seq, Collections.singletonList(dataSource));
-        ReferenceOrderedView view = new ManagingReferenceOrderedView( provider );
-
-        RefMetaDataTracker tracker = view.getReferenceOrderedDataAtLocus(genomeLocParser.createGenomeLoc("chrM",20));
-        TableFeature datum = tracker.getFirstValue(new RodBinding<TableFeature>(TableFeature.class, "tableTest"));
-
-        Assert.assertEquals(datum.get("COL1"),"C","datum parameter for COL1 is incorrect");
-        Assert.assertEquals(datum.get("COL2"),"D","datum parameter for COL2 is incorrect");
-        Assert.assertEquals(datum.get("COL3"),"E","datum parameter for COL3 is incorrect");
-    }
-
-    /**
-     * Make sure multiple bindings are visible from the view.
-     */
-    @Test
-    public void testMultipleBinding() {
-        File file = new File(privateTestDir + "TabularDataTest.dat");
-
-        RMDTriplet testTriplet1 = new RMDTriplet("tableTest1","Table",file.getAbsolutePath(),RMDStorageType.FILE,new Tags());
-        ReferenceOrderedDataSource dataSource1 = new ReferenceOrderedDataSource(testTriplet1,builder,seq.getSequenceDictionary(),genomeLocParser,false);
-
-        RMDTriplet testTriplet2 = new RMDTriplet("tableTest2","Table",file.getAbsolutePath(),RMDStorageType.FILE,new Tags());
-        ReferenceOrderedDataSource dataSource2 = new ReferenceOrderedDataSource(testTriplet2,builder,seq.getSequenceDictionary(),genomeLocParser,false);
-
-        Shard shard = new MockLocusShard(genomeLocParser,Collections.singletonList(genomeLocParser.createGenomeLoc("chrM",1,30)));
-
-        LocusShardDataProvider provider = new LocusShardDataProvider(shard, null, genomeLocParser, shard.getGenomeLocs().get(0), null, seq, Arrays.asList(dataSource1,dataSource2));
-        ReferenceOrderedView view = new ManagingReferenceOrderedView( provider );
-
-        RefMetaDataTracker tracker = view.getReferenceOrderedDataAtLocus(genomeLocParser.createGenomeLoc("chrM",20));
-        TableFeature datum1 = tracker.getFirstValue(new RodBinding<TableFeature>(TableFeature.class, "tableTest1"));
-
-        Assert.assertEquals(datum1.get("COL1"),"C","datum1 parameter for COL1 is incorrect");
-        Assert.assertEquals(datum1.get("COL2"),"D","datum1 parameter for COL2 is incorrect");
-        Assert.assertEquals(datum1.get("COL3"),"E","datum1 parameter for COL3 is incorrect");
-
-        TableFeature datum2 = tracker.getFirstValue(new RodBinding<TableFeature>(TableFeature.class, "tableTest2"));
-
-        Assert.assertEquals(datum2.get("COL1"),"C","datum2 parameter for COL1 is incorrect");
-        Assert.assertEquals(datum2.get("COL2"),"D","datum2 parameter for COL2 is incorrect");
-        Assert.assertEquals(datum2.get("COL3"),"E","datum2 parameter for COL3 is incorrect");
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/ReferenceViewTemplate.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/ReferenceViewTemplate.java
deleted file mode 100644
index bffd23d..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/ReferenceViewTemplate.java
+++ /dev/null
@@ -1,122 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.providers;
-
-import htsjdk.samtools.SAMSequenceRecord;
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-/**
- * User: hanna
- * Date: May 27, 2009
- * Time: 1:12:35 PM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * Template for testing reference views (ReadReferenceView and LocusReferenceView).
- */
-
-public abstract class ReferenceViewTemplate extends BaseTest {
-    /**
-     * The fasta, for comparison.
-     */
-    protected IndexedFastaSequenceFile sequenceFile = null;
-    protected GenomeLocParser genomeLocParser = null;
-
-    //
-    // The bulk of sequence retrieval is tested by IndexedFastaSequenceFile, but we'll run a few spot
-    // checks here to make sure that data is flowing through the LocusReferenceView.
-
-    /**
-     * Initialize the fasta.
-     */
-    @BeforeClass
-    public void initialize() throws FileNotFoundException {
-        sequenceFile = new CachingIndexedFastaSequenceFile( new File(hg18Reference) );
-        genomeLocParser = new GenomeLocParser(sequenceFile);
-    }
-
-    /**
-     * Test the initial fasta location.
-     */
-    @Test
-    public void testReferenceStart() {
-        validateLocation( genomeLocParser.createGenomeLoc(sequenceFile.getSequenceDictionary().getSequence(0).getSequenceName(),1,25) );
-    }
-
-    /**
-     * Test the end of a contig.
-     */
-    @Test
-    public void testReferenceEnd() {
-        // Test the last 25 bases of the first contig.
-        SAMSequenceRecord selectedContig = sequenceFile.getSequenceDictionary().getSequences().get(sequenceFile.getSequenceDictionary().getSequences().size()-1);
-        final int contigStart = selectedContig.getSequenceLength() - 24;
-        final int contigStop = selectedContig.getSequenceLength();
-        validateLocation( genomeLocParser.createGenomeLoc(selectedContig.getSequenceName(),contigStart,contigStop) );
-    }
-
-    /**
-     * Test the start of the middle contig.
-     */
-    @Test
-    public void testContigStart() {
-        // Test the last 25 bases of the first contig.
-        int contigPosition = sequenceFile.getSequenceDictionary().getSequences().size()/2;
-        SAMSequenceRecord selectedContig = sequenceFile.getSequenceDictionary().getSequences().get(contigPosition);
-        validateLocation( genomeLocParser.createGenomeLoc(selectedContig.getSequenceName(),1,25) );
-    }
-
-
-    /**
-     * Test the end of the middle contig.
-     */
-    @Test
-    public void testContigEnd() {
-        // Test the last 25 bases of the first contig.
-        int contigPosition = sequenceFile.getSequenceDictionary().getSequences().size()/2;
-        SAMSequenceRecord selectedContig = sequenceFile.getSequenceDictionary().getSequences().get(contigPosition);
-        final int contigStart = selectedContig.getSequenceLength() - 24;
-        final int contigStop = selectedContig.getSequenceLength();
-        validateLocation( genomeLocParser.createGenomeLoc(selectedContig.getSequenceName(),contigStart,contigStop) );
-    }
-
-    protected abstract void validateLocation( GenomeLoc loc );
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/ShardDataProviderUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/ShardDataProviderUnitTest.java
deleted file mode 100644
index 251eec4..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/providers/ShardDataProviderUnitTest.java
+++ /dev/null
@@ -1,152 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.providers;
-
-import org.testng.Assert;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.testng.annotations.BeforeMethod;
-
-
-import org.testng.annotations.Test;
-import org.broadinstitute.gatk.utils.BaseTest;
-
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Arrays;
-/**
- * User: hanna
- * Date: May 27, 2009
- * Time: 1:56:02 PM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * Test basic functionality of the shard data provider.
- */
-
-public class ShardDataProviderUnitTest extends BaseTest {
-    /**
-     * Provider to test.  Should be recreated for every test.
-     */
-    private ShardDataProvider provider = null;
-
-    @BeforeMethod
-    public void createProvider() {
-        provider = new LocusShardDataProvider( null,null,null,null,null,null,null );
-    }
-
-    /**
-     * Test whether views are closed when the provider closes.
-     */
-    @Test
-    public void testClose() {
-        TestView testView = new TestView( provider );
-        Assert.assertFalse(testView.closed,"View is currently closed but should be open");
-
-        provider.close();
-        Assert.assertTrue(testView.closed,"View is currently open but should be closed");
-    }
-
-    /**
-     * Test whether multiple of the same view can be registered and all get a close method.
-     */
-    @Test
-    public void testMultipleClose() {
-        Collection<TestView> testViews = Arrays.asList(new TestView(provider),new TestView(provider));
-        for( TestView testView: testViews )
-            Assert.assertFalse(testView.closed,"View is currently closed but should be open");
-
-        provider.close();
-        for( TestView testView: testViews )
-            Assert.assertTrue(testView.closed,"View is currently open but should be closed");
-    }
-
-    /**
-     * Try adding a view which conflicts with some other view that's already been registered.
-     */
-    @Test(expectedExceptions= ReviewedGATKException.class)
-    public void testAddViewWithExistingConflict() {
-        View initial = new ConflictingTestView( provider );
-        View conflictsWithInitial = new TestView( provider );
-    }
-
-    /**
-     * Try adding a view which has a conflict with a previously registered view.
-     */
-    @Test(expectedExceptions= ReviewedGATKException.class)
-    public void testAddViewWithNewConflict() {
-        View conflictsWithInitial = new TestView( provider );
-        View initial = new ConflictingTestView( provider );
-    }
-
-    /**
-     * A simple view for testing interactions between views attached to the ShardDataProvider.
-     */
-    private class TestView implements View {
-        /**
-         * Is the test view currently closed.
-         */
-        private boolean closed = false;
-
-        /**
-         * Create a new test view wrapping the given provider.
-         * @param provider
-         */
-        public TestView( ShardDataProvider provider ) {
-            provider.register(this);            
-        }
-
-        /**
-         * Gets conflicting views.  In this case, none conflict.
-         * @return
-         */
-        public Collection<Class<? extends View>> getConflictingViews() { return Collections.emptyList(); }
-
-        /**
-         * Close this view.
-         */
-        public void close() { this.closed = true; }
-    }
-
-    /**
-     * Another view that conflicts with the one above.
-     */
-    private class ConflictingTestView implements View {
-        public ConflictingTestView( ShardDataProvider provider ) { provider.register(this); }
-
-        public Collection<Class<? extends View>> getConflictingViews() {
-            return Collections.<Class<? extends View>>singleton(TestView.class);
-        }
-
-        public void close() {}
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/ActiveRegionShardBalancerUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/ActiveRegionShardBalancerUnitTest.java
deleted file mode 100644
index f1ee6ab..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/ActiveRegionShardBalancerUnitTest.java
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMFileSpan;
-import htsjdk.samtools.SAMSequenceRecord;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.interval.IntervalMergingRule;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-import java.io.FileNotFoundException;
-import java.util.*;
-
-public class ActiveRegionShardBalancerUnitTest extends BaseTest {
-    // example genome loc parser for this test, can be deleted if you don't use the reference
-    private GenomeLocParser genomeLocParser;
-    protected SAMDataSource readsDataSource;
-
-    @BeforeClass
-    public void setup() throws FileNotFoundException {
-        // sequence
-        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(10, 0, 10000);
-        genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
-        readsDataSource = null;
-    }
-
-    @Test
-    public void testMergingManyContigs() {
-        executeTest(genomeLocParser.getContigs().getSequences());
-    }
-
-    @Test
-    public void testMergingAllPointersOnSingleContig() {
-        executeTest(Arrays.asList(genomeLocParser.getContigs().getSequences().get(1)));
-    }
-
-    @Test
-    public void testMergingMultipleDiscontinuousContigs() {
-        final List<SAMSequenceRecord> all = genomeLocParser.getContigs().getSequences();
-        executeTest(Arrays.asList(all.get(1), all.get(3)));
-    }
-
-    private void executeTest(final Collection<SAMSequenceRecord> records) {
-        final ActiveRegionShardBalancer balancer = new ActiveRegionShardBalancer();
-
-        final List<Set<GenomeLoc>> expectedLocs = new LinkedList<>();
-        final List<FilePointer> pointers = new LinkedList<>();
-
-        for ( final SAMSequenceRecord record : records ) {
-            final int size = 10;
-            int end = 0;
-            for ( int i = 0; i < record.getSequenceLength(); i += size) {
-                final int myEnd = i + size - 1;
-                end = myEnd;
-                final GenomeLoc loc = genomeLocParser.createGenomeLoc(record.getSequenceName(), i, myEnd);
-                final Map<SAMReaderID, SAMFileSpan> fileSpans = Collections.emptyMap();
-                final FilePointer fp = new FilePointer(fileSpans, IntervalMergingRule.ALL, Collections.singletonList(loc));
-                pointers.add(fp);
-            }
-            expectedLocs.add(Collections.singleton(genomeLocParser.createGenomeLoc(record.getSequenceName(), 0, end)));
-        }
-
-        balancer.initialize(readsDataSource, pointers.iterator(), genomeLocParser);
-
-        int i = 0;
-        int nShardsFound = 0;
-        for ( final Shard shard : balancer ) {
-            nShardsFound++;
-            Assert.assertEquals(new HashSet<>(shard.getGenomeLocs()), expectedLocs.get(i++));
-        }
-        Assert.assertEquals(nShardsFound, records.size(), "Didn't find exactly one shard for each contig in the sequence dictionary");
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/DownsamplerBenchmark.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/DownsamplerBenchmark.java
deleted file mode 100644
index 27c287c..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/DownsamplerBenchmark.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import com.google.caliper.Param;
-import org.broadinstitute.gatk.engine.WalkerManager;
-import org.broadinstitute.gatk.engine.downsampling.DownsamplingMethod;
-import org.broadinstitute.gatk.engine.walkers.LocusWalker;
-
-/**
- * Created by IntelliJ IDEA.
- * User: mhanna
- * Date: Apr 22, 2011
- * Time: 4:02:56 PM
- * To change this template use File | Settings | File Templates.
- */
-public class DownsamplerBenchmark extends ReadProcessingBenchmark {
-    @Param
-    private String bamFile;
-
-    @Param
-    private Integer maxReads;
-
-    @Override
-    public String getBAMFile() { return bamFile; }
-
-    @Override
-    public Integer getMaxReads() { return maxReads; }
-
-    @Param
-    private Downsampling downsampling;
-
-//    public void timeDownsampling(int reps) {
-//        for(int i = 0; i < reps; i++) {
-//            SAMFileReader reader = new SAMFileReader(inputFile);
-//            ReadProperties readProperties = new ReadProperties(Collections.<SAMReaderID>singletonList(new SAMReaderID(inputFile,new Tags())),
-//                    reader.getFileHeader(),
-//                    SAMFileHeader.SortOrder.coordinate,
-//                    false,
-//                    SAMFileReader.ValidationStringency.SILENT,
-//                    downsampling.create(),
-//                    new ValidationExclusion(Collections.singletonList(ValidationExclusion.TYPE.ALL)),
-//                    Collections.<ReadFilter>emptyList(),
-//                    Collections.<ReadTransformer>emptyList(),
-//                    false,
-//                    (byte)0,
-//                    false);
-//
-//            GenomeLocParser genomeLocParser = new GenomeLocParser(reader.getFileHeader().getSequenceDictionary());
-//            // Filter unmapped reads.  TODO: is this always strictly necessary?  Who in the GATK normally filters these out?
-//            Iterator<SAMRecord> readIterator = new FilteringIterator(reader.iterator(),new UnmappedReadFilter());
-//            LegacyLocusIteratorByState locusIteratorByState = new LegacyLocusIteratorByState(readIterator,readProperties,genomeLocParser, LegacyLocusIteratorByState.sampleListForSAMWithoutReadGroups());
-//            while(locusIteratorByState.hasNext()) {
-//                locusIteratorByState.next().getLocation();
-//            }
-//            reader.close();
-//        }
-//    }
-
-    private enum Downsampling {
-        NONE {
-            @Override
-            DownsamplingMethod create() { return DownsamplingMethod.NONE; }
-        },
-        PER_SAMPLE {
-            @Override
-            DownsamplingMethod create() { return WalkerManager.getDownsamplingMethod(LocusWalker.class); }
-        };
-        abstract DownsamplingMethod create();
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/FilePointerUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/FilePointerUnitTest.java
deleted file mode 100644
index e35f1d5..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/FilePointerUnitTest.java
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import htsjdk.samtools.GATKBAMFileSpan;
-import htsjdk.samtools.GATKChunk;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.commandline.Tags;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
-import org.broadinstitute.gatk.utils.interval.IntervalMergingRule;
-import org.testng.Assert;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-
-/**
- *
- */
-public class FilePointerUnitTest extends BaseTest {
-    private IndexedFastaSequenceFile seq;
-    private GenomeLocParser genomeLocParser;
-    private SAMReaderID readerID = new SAMReaderID("samFile",new Tags());
-
-    /**
-     * This function does the setup of our parser, before each method call.
-     * <p/>
-     * Called before every test case method.
-     */
-    @BeforeMethod
-    public void doForEachTest() throws FileNotFoundException {
-        // sequence
-        seq = new CachingIndexedFastaSequenceFile(new File(hg18Reference));
-        genomeLocParser = new GenomeLocParser(seq.getSequenceDictionary());
-    }
-
-    @Test
-    public void testFilePointerCombineDisjoint() {
-        FilePointer one = new FilePointer(IntervalMergingRule.ALL, genomeLocParser.createGenomeLoc("chr1",1,5));
-        one.addFileSpans(readerID,new GATKBAMFileSpan(new GATKChunk(0,1)));
-        FilePointer two = new FilePointer(IntervalMergingRule.ALL, genomeLocParser.createGenomeLoc("chr1",6,10));
-        two.addFileSpans(readerID,new GATKBAMFileSpan(new GATKChunk(1,2)));
-
-        FilePointer result = new FilePointer(IntervalMergingRule.ALL, genomeLocParser.createGenomeLoc("chr1",1,10));
-        result.addFileSpans(readerID,new GATKBAMFileSpan(new GATKChunk(0,2)));
-
-        Assert.assertEquals(one.combine(genomeLocParser,two),result,"Combination of two file pointers is incorrect");
-        Assert.assertEquals(two.combine(genomeLocParser,one),result,"Combination of two file pointers is incorrect");
-
-        //Now test that adjacent (but disjoint) intervals are properly handled with OVERLAPPING_ONLY
-        one = new FilePointer(IntervalMergingRule.OVERLAPPING_ONLY, genomeLocParser.createGenomeLoc("chr1",1,5));
-        one.addFileSpans(readerID,new GATKBAMFileSpan(new GATKChunk(0,1)));
-        two = new FilePointer(IntervalMergingRule.OVERLAPPING_ONLY, genomeLocParser.createGenomeLoc("chr1",6,10));
-        two.addFileSpans(readerID,new GATKBAMFileSpan(new GATKChunk(1,2)));
-
-        result = new FilePointer(IntervalMergingRule.OVERLAPPING_ONLY,
-                genomeLocParser.createGenomeLoc("chr1",1,5),
-                genomeLocParser.createGenomeLoc("chr1",6,10));
-        result.addFileSpans(readerID,new GATKBAMFileSpan(new GATKChunk(0,2)));
-
-        Assert.assertEquals(one.combine(genomeLocParser,two),result,"Combination of two file pointers is incorrect");
-        Assert.assertEquals(two.combine(genomeLocParser,one),result,"Combination of two file pointers is incorrect");
-    }
-
-    @Test
-    public void testFilePointerCombineJoint() {
-        FilePointer one = new FilePointer(IntervalMergingRule.ALL, genomeLocParser.createGenomeLoc("chr1",1,5));
-        one.addFileSpans(readerID,new GATKBAMFileSpan(new GATKChunk(0,2)));
-        FilePointer two = new FilePointer(IntervalMergingRule.ALL, genomeLocParser.createGenomeLoc("chr1",2,6));
-        two.addFileSpans(readerID,new GATKBAMFileSpan(new GATKChunk(1,3)));
-
-        FilePointer result = new FilePointer(IntervalMergingRule.ALL, genomeLocParser.createGenomeLoc("chr1",1,6));
-        result.addFileSpans(readerID,new GATKBAMFileSpan(new GATKChunk(0,3)));        
-
-        Assert.assertEquals(one.combine(genomeLocParser,two),result,"Combination of two file pointers is incorrect");
-        Assert.assertEquals(two.combine(genomeLocParser,one),result,"Combination of two file pointers is incorrect");
-
-        //Repeat the tests for OVERLAPPING_ONLY
-        one = new FilePointer(IntervalMergingRule.OVERLAPPING_ONLY, genomeLocParser.createGenomeLoc("chr1",1,5));
-        one.addFileSpans(readerID,new GATKBAMFileSpan(new GATKChunk(0,2)));
-        two = new FilePointer(IntervalMergingRule.OVERLAPPING_ONLY, genomeLocParser.createGenomeLoc("chr1",2,6));
-        two.addFileSpans(readerID,new GATKBAMFileSpan(new GATKChunk(1,3)));
-
-        result = new FilePointer(IntervalMergingRule.OVERLAPPING_ONLY, genomeLocParser.createGenomeLoc("chr1",1,6));
-        result.addFileSpans(readerID,new GATKBAMFileSpan(new GATKChunk(0,3)));
-
-        Assert.assertEquals(one.combine(genomeLocParser,two),result,"Combination of two file pointers is incorrect");
-        Assert.assertEquals(two.combine(genomeLocParser,one),result,"Combination of two file pointers is incorrect");
-    }
-
-    @Test
-    public void testFilePointerCombineOneSided() {
-        FilePointer filePointer = new FilePointer(IntervalMergingRule.ALL, genomeLocParser.createGenomeLoc("chr1",1,5));
-        filePointer.addFileSpans(readerID,new GATKBAMFileSpan(new GATKChunk(0,1)));
-        FilePointer empty = new FilePointer(IntervalMergingRule.ALL, genomeLocParser.createGenomeLoc("chr1",6,10));
-        // Do not add file spans to empty result
-
-        FilePointer result = new FilePointer(IntervalMergingRule.ALL, genomeLocParser.createGenomeLoc("chr1",1,10));
-        result.addFileSpans(readerID,new GATKBAMFileSpan(new GATKChunk(0,1)));
-        Assert.assertEquals(filePointer.combine(genomeLocParser,empty),result,"Combination of two file pointers is incorrect");
-        Assert.assertEquals(empty.combine(genomeLocParser,filePointer),result,"Combination of two file pointers is incorrect");
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/GATKBAMIndexUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/GATKBAMIndexUnitTest.java
deleted file mode 100644
index 289a10c..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/GATKBAMIndexUnitTest.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import htsjdk.samtools.SAMFileReader;
-import htsjdk.samtools.SAMSequenceDictionary;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-
-/**
- * Test basic functionality in the GATK's implementation of the BAM index classes.
- */
-public class GATKBAMIndexUnitTest extends BaseTest {
-    private static File bamFile = new File(validationDataLocation+"MV1994.selected.bam");
-
-    /**
-     * Index file forming the source of all unit tests.
-     */
-    private static File bamIndexFile = new File(validationDataLocation+"MV1994.selected.bam.bai");
-
-    /**
-     * Storage for the index itself.
-     */
-    private GATKBAMIndex bamIndex;
-
-    
-    @BeforeClass
-    public void init() throws FileNotFoundException {
-        SAMFileReader reader = new SAMFileReader(bamFile);
-        SAMSequenceDictionary sequenceDictionary = reader.getFileHeader().getSequenceDictionary();
-        reader.close();
-        
-        bamIndex = new GATKBAMIndex(bamIndexFile);
-    }
-
-    @Test
-    public void testNumberAndSizeOfIndexLevels() {
-        // The correct values for this test are pulled directly from the
-        // SAM Format Specification v1.3-r882, Section 4.1.1, last paragraph.
-        Assert.assertEquals(GATKBAMIndex.getNumIndexLevels(),6,"Incorrect number of levels in BAM index");
-
-        // Level 0
-        Assert.assertEquals(GATKBAMIndex.getFirstBinInLevel(0),0);
-        Assert.assertEquals(bamIndex.getLevelSize(0),1);
-
-        // Level 1
-        Assert.assertEquals(GATKBAMIndex.getFirstBinInLevel(1),1);
-        Assert.assertEquals(bamIndex.getLevelSize(1),8-1+1);
-
-        // Level 2
-        Assert.assertEquals(GATKBAMIndex.getFirstBinInLevel(2),9);
-        Assert.assertEquals(bamIndex.getLevelSize(2),72-9+1);
-
-        // Level 3
-        Assert.assertEquals(GATKBAMIndex.getFirstBinInLevel(3),73);
-        Assert.assertEquals(bamIndex.getLevelSize(3),584-73+1);
-
-        // Level 4
-        Assert.assertEquals(GATKBAMIndex.getFirstBinInLevel(4),585);
-        Assert.assertEquals(bamIndex.getLevelSize(4),4680-585+1);
-
-        // Level 5                                
-        Assert.assertEquals(GATKBAMIndex.getFirstBinInLevel(5),4681);
-        Assert.assertEquals(bamIndex.getLevelSize(5),37448-4681+1);
-    }
-
-    @Test( expectedExceptions = UserException.MalformedFile.class )
-    public void testDetectTruncatedBamIndexWordBoundary() {
-        GATKBAMIndex index = new GATKBAMIndex(new File(privateTestDir + "truncated_at_word_boundary.bai"));
-        index.readReferenceSequence(0);
-    }
-
-    @Test( expectedExceptions = UserException.MalformedFile.class )
-    public void testDetectTruncatedBamIndexNonWordBoundary() {
-        GATKBAMIndex index = new GATKBAMIndex(new File(privateTestDir + "truncated_at_non_word_boundary.bai"));
-        index.readReferenceSequence(0);
-    }
-
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/GATKWalkerBenchmark.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/GATKWalkerBenchmark.java
deleted file mode 100644
index 30eaeb6..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/GATKWalkerBenchmark.java
+++ /dev/null
@@ -1,141 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import com.google.caliper.Param;
-import org.broadinstitute.gatk.utils.commandline.Tags;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.arguments.GATKArgumentCollection;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.filters.ReadFilter;
-import org.broadinstitute.gatk.engine.filters.UnmappedReadFilter;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.engine.refdata.utils.RMDTriplet;
-import org.broadinstitute.gatk.engine.walkers.ReadWalker;
-import org.broadinstitute.gatk.engine.walkers.Walker;
-import org.broadinstitute.gatk.tools.walkers.qc.CountLoci;
-import org.broadinstitute.gatk.tools.walkers.qc.CountReads;
-import org.broadinstitute.gatk.utils.classloader.JVMUtils;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-
-import java.io.File;
-import java.util.Collections;
-
-/**
- * Created by IntelliJ IDEA.
- * User: mhanna
- * Date: Feb 25, 2011
- * Time: 10:16:54 AM
- * To change this template use File | Settings | File Templates.
- */
-public class GATKWalkerBenchmark extends ReadProcessingBenchmark {
-    @Param
-    private String bamFile;
-
-    @Param
-    private Integer maxReads;
-
-    @Param
-    private String referenceFile;
-
-    @Param
-    private WalkerType walkerType;
-
-    @Override
-    public String getBAMFile() { return bamFile; }
-
-    @Override
-    public Integer getMaxReads() { return maxReads; }    
-
-    @Override
-    public void setUp() {
-        super.setUp();
-    }
-
-    public void timeWalkerPerformance(final int reps) {
-        for(int i = 0; i < reps; i++) {
-            GenomeAnalysisEngine engine = new GenomeAnalysisEngine();
-
-            // Establish the argument collection
-            GATKArgumentCollection argCollection = new GATKArgumentCollection();
-            argCollection.referenceFile = new File(referenceFile);
-            argCollection.samFiles = Collections.singletonList(inputFile.getAbsolutePath());
-
-            engine.setArguments(argCollection);
-            // Bugs in the engine mean that this has to be set twice.
-            engine.setSAMFileIDs(Collections.singletonList(new SAMReaderID(inputFile,new Tags())));
-            engine.setFilters(Collections.<ReadFilter>singletonList(new UnmappedReadFilter()));
-            engine.setReferenceMetaDataFiles(Collections.<RMDTriplet>emptyList());
-
-            // Create the walker
-            engine.setWalker(walkerType.create());
-
-            engine.execute();
-        }
-    }
-
-    private enum WalkerType {
-        COUNT_READS {
-            @Override
-            Walker create() { return new CountReads(); }
-        },
-        COUNT_BASES_IN_READ {
-            @Override
-            Walker create() { return new CountBasesInReadPerformanceWalker(); }
-        },
-        COUNT_LOCI {
-            @Override
-            Walker create() {
-                CountLoci walker = new CountLoci();
-                JVMUtils.setFieldValue(JVMUtils.findField(CountLoci.class,"out"),walker,System.out);
-                return walker;
-            }
-        };
-        abstract Walker create();
-    }
-}
-
-class CountBasesInReadPerformanceWalker extends ReadWalker<Integer,Long> {
-    private long As;
-    private long Cs;
-    private long Gs;
-    private long Ts;
-
-    public Integer map(ReferenceContext ref, GATKSAMRecord read, RefMetaDataTracker tracker) {
-        for(byte base: read.getReadBases()) {
-            switch(base) {
-                case 'A': As++; break;
-                case 'C': Cs++; break;
-                case 'G': Gs++; break;
-                case 'T': Ts++; break;
-            }
-        }
-        return 1;
-    }
-
-    public Long reduceInit() { return 0L; }
-    public Long reduce(Integer value, Long accum) { return value + accum; }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/IntervalOverlapFilteringIteratorUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/IntervalOverlapFilteringIteratorUnitTest.java
deleted file mode 100644
index 90ac754..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/IntervalOverlapFilteringIteratorUnitTest.java
+++ /dev/null
@@ -1,150 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.SAMSequenceRecord;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-public class IntervalOverlapFilteringIteratorUnitTest {
-
-    private SAMFileHeader header;
-    private GenomeLoc firstContig;
-    private GenomeLoc secondContig;
-
-    /** Basic aligned and mapped read. */
-    private SAMRecord readMapped;
-
-    /** Read with no contig specified in the read, -L UNMAPPED */
-    private SAMRecord readNoReference;
-
-    /** This read has a start position, but is flagged that it's not mapped. */
-    private SAMRecord readUnmappedFlag;
-
-    /** This read is from the second contig. */
-    private SAMRecord readSecondContig;
-
-    /** This read says it's aligned, but actually has an unknown start. */
-    private SAMRecord readUnknownStart;
-
-    /** The above reads in the order one would expect to find them in a sorted BAM. */
-    private List<SAMRecord> testReads;
-
-    @BeforeClass
-    public void init() {
-        header = ArtificialSAMUtils.createArtificialSamHeader(3, 1, ArtificialSAMUtils.DEFAULT_READ_LENGTH * 2);
-        GenomeLocParser genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
-        SAMSequenceRecord record;
-
-        record = header.getSequence(0);
-        firstContig = genomeLocParser.createGenomeLoc(record.getSequenceName(), 1, record.getSequenceLength());
-        record = header.getSequence(1);
-        secondContig = genomeLocParser.createGenomeLoc(record.getSequenceName(), 1, record.getSequenceLength());
-
-        readMapped = createMappedRead("mapped", 1);
-
-        readUnmappedFlag = createMappedRead("unmappedFlagged", 2);
-        readUnmappedFlag.setReadUnmappedFlag(true);
-
-        readSecondContig = createMappedRead("secondContig", 3);
-        readSecondContig.setReferenceName(secondContig.getContig());
-
-        /* This read says it's aligned, but to a contig not in the header. */
-        SAMRecord readUnknownContig = createMappedRead("unknownContig", 4);
-        readUnknownContig.setReferenceName("unknownContig");
-
-        readUnknownStart = createMappedRead("unknownStart", 1);
-        readUnknownStart.setAlignmentStart(SAMRecord.NO_ALIGNMENT_START);
-
-        readNoReference = createUnmappedRead("unmappedNoReference");
-
-        testReads = new ArrayList<SAMRecord>();
-        testReads.add(readMapped);
-        testReads.add(readUnmappedFlag);
-        testReads.add(readUnknownStart);
-        testReads.add(readSecondContig);
-        testReads.add(readUnknownContig);
-        testReads.add(readNoReference);
-    }
-
-    @DataProvider(name = "filteringIteratorTestData")
-    public Object[][] getFilteringIteratorTestData() {
-        return new Object[][] {
-                new Object[] {Arrays.asList(firstContig), Arrays.asList(readMapped, readUnmappedFlag, readUnknownStart)},
-                new Object[] {Arrays.asList(GenomeLoc.UNMAPPED), Arrays.asList(readNoReference)},
-                new Object[] {Arrays.asList(firstContig, secondContig), Arrays.asList(readMapped, readUnmappedFlag, readUnknownStart, readSecondContig)}
-        };
-    }
-
-    @Test(dataProvider = "filteringIteratorTestData")
-    public void testFilteringIterator(List<GenomeLoc> locs, List<SAMRecord> expected) {
-        IntervalOverlapFilteringIterator filterIter = new IntervalOverlapFilteringIterator(
-                ArtificialSAMUtils.createReadIterator(testReads), locs);
-
-        List<SAMRecord> actual = new ArrayList<SAMRecord>();
-        while (filterIter.hasNext()) {
-            actual.add(filterIter.next());
-        }
-        Assert.assertEquals(actual, expected);
-    }
-
-    @Test(expectedExceptions = ReviewedGATKException.class)
-    public void testMappedAndUnmapped() {
-        new IntervalOverlapFilteringIterator(
-                ArtificialSAMUtils.createReadIterator(testReads),
-                Arrays.asList(firstContig, GenomeLoc.UNMAPPED));
-    }
-
-    private SAMRecord createUnmappedRead(String name) {
-        return ArtificialSAMUtils.createArtificialRead(
-                header,
-                name,
-                SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX,
-                SAMRecord.NO_ALIGNMENT_START,
-                ArtificialSAMUtils.DEFAULT_READ_LENGTH);
-    }
-
-    private SAMRecord createMappedRead(String name, int start) {
-        return ArtificialSAMUtils.createArtificialRead(
-                header,
-                name,
-                0,
-                start,
-                ArtificialSAMUtils.DEFAULT_READ_LENGTH);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/MockLocusShard.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/MockLocusShard.java
deleted file mode 100644
index eb3c894..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/MockLocusShard.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import org.broadinstitute.gatk.engine.datasources.reads.LocusShard;
-import org.broadinstitute.gatk.engine.datasources.reads.SAMReaderID;
-import org.broadinstitute.gatk.engine.resourcemanagement.ThreadAllocation;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.engine.datasources.reads.SAMDataSource;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-
-import java.util.List;
-import java.util.Collections;
-
-/**
- * A mock locus shard, usable for infrastructure that requires a shard to behave properly.
- *
- * @author mhanna
- * @version 0.1
- */
-public class MockLocusShard extends LocusShard {
-    public MockLocusShard(final GenomeLocParser genomeLocParser,final List<GenomeLoc> intervals) {
-        super(  genomeLocParser,
-                new SAMDataSource(Collections.<SAMReaderID>emptyList(),new ThreadAllocation(),null,genomeLocParser),
-                intervals,
-                null);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/PicardBaselineBenchmark.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/PicardBaselineBenchmark.java
deleted file mode 100644
index f96a35a..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/PicardBaselineBenchmark.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import com.google.caliper.Param;
-import com.google.caliper.SimpleBenchmark;
-import htsjdk.samtools.util.SamLocusIterator;
-import htsjdk.samtools.SAMFileReader;
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.util.CloseableIterator;
-
-import java.io.File;
-import java.util.Iterator;
-
-/**
- * Created by IntelliJ IDEA.
- * User: mhanna
- * Date: Apr 22, 2011
- * Time: 3:51:06 PM
- * To change this template use File | Settings | File Templates.
- */
-public class PicardBaselineBenchmark extends ReadProcessingBenchmark {
-    @Param
-    private String bamFile;
-
-    @Param
-    private Integer maxReads;
-
-    @Override
-    public String getBAMFile() { return bamFile; }
-
-    @Override
-    public Integer getMaxReads() { return maxReads; }
-    
-    public void timeDecompressBamFile(int reps) {
-        for(int i = 0; i < reps; i++) {
-            SAMFileReader reader = new SAMFileReader(inputFile);
-            CloseableIterator<SAMRecord> iterator = reader.iterator();
-            while(iterator.hasNext())
-                iterator.next();
-            iterator.close();
-            reader.close();
-        }
-    }
-
-    public void timeExtractTag(int reps) {
-        for(int i = 0; i < reps; i++) {
-            SAMFileReader reader = new SAMFileReader(inputFile);
-            CloseableIterator<SAMRecord> iterator = reader.iterator();
-            while(iterator.hasNext()) {
-                SAMRecord read = iterator.next();
-                read.getAttribute("OQ");
-            }
-            iterator.close();
-            reader.close();
-        }
-    }
-
-    public void timeSamLocusIterator(int reps) {
-        for(int i = 0; i < reps; i++) {
-            SAMFileReader reader = new SAMFileReader(inputFile);
-            long loci = 0;
-
-            SamLocusIterator samLocusIterator = new SamLocusIterator(reader);
-            samLocusIterator.setEmitUncoveredLoci(false);
-            Iterator<SamLocusIterator.LocusInfo> workhorseIterator = samLocusIterator.iterator();
-
-            while(workhorseIterator.hasNext()) {
-                SamLocusIterator.LocusInfo locusInfo = workhorseIterator.next();
-                // Use the value of locusInfo to avoid optimization.
-                if(locusInfo != null) loci++;
-            }
-            System.out.printf("Total loci = %d%n",loci);
-
-            reader.close();
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/ReadProcessingBenchmark.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/ReadProcessingBenchmark.java
deleted file mode 100644
index 71fc81a..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/ReadProcessingBenchmark.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import com.google.caliper.Param;
-import com.google.caliper.SimpleBenchmark;
-import htsjdk.samtools.SAMFileReader;
-import htsjdk.samtools.SAMFileWriter;
-import htsjdk.samtools.SAMFileWriterFactory;
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.io.File;
-import java.io.IOException;
-
-/**
- * Created by IntelliJ IDEA.
- * User: mhanna
- * Date: Apr 22, 2011
- * Time: 4:04:38 PM
- * To change this template use File | Settings | File Templates.
- */
-public abstract class ReadProcessingBenchmark extends SimpleBenchmark {
-    protected abstract String getBAMFile();
-    protected abstract Integer getMaxReads();
-
-    protected File inputFile;
-
-    @Override
-    public void setUp() {
-        SAMFileReader fullInputFile = new SAMFileReader(new File(getBAMFile()));
-
-        File tempFile = null;
-        try {
-            tempFile = File.createTempFile("testfile_"+getMaxReads(),".bam");
-        }
-        catch(IOException ex) {
-            throw new ReviewedGATKException("Unable to create temporary BAM",ex);
-        }
-        SAMFileWriterFactory factory = new SAMFileWriterFactory();
-        factory.setCreateIndex(true);
-        SAMFileWriter writer = factory.makeBAMWriter(fullInputFile.getFileHeader(),true,tempFile);
-
-        long numReads = 0;
-        for(SAMRecord read: fullInputFile) {
-            if(numReads++ >= getMaxReads())
-                break;
-            writer.addAlignment(read);
-        }
-
-        writer.close();
-
-        inputFile = tempFile;
-    }
-
-    @Override
-    public void tearDown() {
-        inputFile.delete();
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/ReadShardBalancerUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/ReadShardBalancerUnitTest.java
deleted file mode 100644
index be48194..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/ReadShardBalancerUnitTest.java
+++ /dev/null
@@ -1,195 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import htsjdk.samtools.*;
-import org.broadinstitute.gatk.engine.arguments.ValidationExclusion;
-import org.broadinstitute.gatk.engine.downsampling.DownsampleType;
-import org.broadinstitute.gatk.engine.downsampling.DownsamplingMethod;
-import org.broadinstitute.gatk.engine.filters.ReadFilter;
-import org.broadinstitute.gatk.engine.resourcemanagement.ThreadAllocation;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.commandline.Tags;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.broadinstitute.gatk.utils.sam.ArtificialSingleSampleReadStream;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Arrays;
-
-public class ReadShardBalancerUnitTest extends BaseTest {
-
-    /**
-     * Tests to ensure that ReadShardBalancer works as expected and does not place shard boundaries
-     * at inappropriate places, such as within an alignment start position
-     */
-    private static class ReadShardBalancerTest extends TestDataProvider {
-        private int numContigs;
-        private int numStacksPerContig;
-        private int stackSize;
-        private int numUnmappedReads;
-        private DownsamplingMethod downsamplingMethod;
-        private int expectedReadCount;
-
-        private SAMFileHeader header;
-        private SAMReaderID testBAM;
-
-        public ReadShardBalancerTest( int numContigs,
-                                      int numStacksPerContig,
-                                      int stackSize,
-                                      int numUnmappedReads,
-                                      int downsamplingTargetCoverage ) {
-            super(ReadShardBalancerTest.class);
-
-            this.numContigs = numContigs;
-            this.numStacksPerContig = numStacksPerContig;
-            this.stackSize = stackSize;
-            this.numUnmappedReads = numUnmappedReads;
-
-            this.downsamplingMethod = new DownsamplingMethod(DownsampleType.BY_SAMPLE, downsamplingTargetCoverage, null);
-            this.expectedReadCount = Math.min(stackSize, downsamplingTargetCoverage) * numStacksPerContig * numContigs + numUnmappedReads;
-
-            setName(String.format("%s: numContigs=%d numStacksPerContig=%d stackSize=%d numUnmappedReads=%d downsamplingTargetCoverage=%d",
-                                  getClass().getSimpleName(), numContigs, numStacksPerContig, stackSize, numUnmappedReads, downsamplingTargetCoverage));
-        }
-
-        public void run() {
-            createTestBAM();
-
-            SAMDataSource dataSource = new SAMDataSource(Arrays.asList(testBAM),
-                                                         new ThreadAllocation(),
-                                                         null,
-                                                         new GenomeLocParser(header.getSequenceDictionary()),
-                                                         false,
-                                                         ValidationStringency.SILENT,
-                                                         ReadShard.DEFAULT_MAX_READS,  // reset ReadShard.MAX_READS to ReadShard.DEFAULT_MAX_READS for each test
-                                                         downsamplingMethod,
-                                                         new ValidationExclusion(),
-                                                         new ArrayList<ReadFilter>(),
-                                                         false);
-
-            Iterable<Shard> shardIterator = dataSource.createShardIteratorOverAllReads(new ReadShardBalancer());
-
-            SAMRecord readAtEndOfLastShard = null;
-            int totalReadsSeen = 0;
-
-            for ( Shard shard : shardIterator ) {
-                int numContigsThisShard = 0;
-                SAMRecord lastRead = null;
-
-                for ( SAMRecord read : shard.iterator() ) {
-                    totalReadsSeen++;
-
-                    if ( lastRead == null ) {
-                        numContigsThisShard = 1;
-                    }
-                    else if ( ! read.getReadUnmappedFlag() && ! lastRead.getReferenceIndex().equals(read.getReferenceIndex()) ) {
-                        numContigsThisShard++;
-                    }
-
-                    // If the last read from the previous shard is not unmapped, we have to make sure
-                    // that no reads in this shard start at the same position
-                    if ( readAtEndOfLastShard != null && ! readAtEndOfLastShard.getReadUnmappedFlag() ) {
-                        Assert.assertFalse(readAtEndOfLastShard.getReferenceIndex().equals(read.getReferenceIndex()) &&
-                                           readAtEndOfLastShard.getAlignmentStart() == read.getAlignmentStart(),
-                                           String.format("Reads from alignment start position %d:%d are split across multiple shards",
-                                                         read.getReferenceIndex(), read.getAlignmentStart()));
-                    }
-
-                    lastRead = read;
-                }
-
-                // There should never be reads from more than 1 contig in a shard (ignoring unmapped reads)
-                Assert.assertTrue(numContigsThisShard == 1, "found a shard with reads from multiple contigs");
-
-                readAtEndOfLastShard = lastRead;
-            }
-
-            Assert.assertEquals(totalReadsSeen, expectedReadCount, "did not encounter the expected number of reads");
-        }
-
-        private void createTestBAM() {
-            header = ArtificialSAMUtils.createArtificialSamHeader(numContigs, 1, 100000);
-            SAMReadGroupRecord readGroup = new SAMReadGroupRecord("foo");
-            readGroup.setSample("testSample");
-            header.addReadGroup(readGroup);
-            ArtificialSingleSampleReadStream artificialReads = new ArtificialSingleSampleReadStream(header,
-                                                                                                    "foo",
-                                                                                                    numContigs,
-                                                                                                    numStacksPerContig,
-                                                                                                    stackSize,
-                                                                                                    stackSize,
-                                                                                                    1,
-                                                                                                    100,
-                                                                                                    50,
-                                                                                                    150,
-                                                                                                    numUnmappedReads);
-
-            final File testBAMFile = createTempFile("SAMDataSourceFillShardBoundaryTest", ".bam");
-
-            SAMFileWriter bamWriter = new SAMFileWriterFactory().setCreateIndex(true).makeBAMWriter(header, true, testBAMFile);
-            for ( SAMRecord read : artificialReads ) {
-                bamWriter.addAlignment(read);
-            }
-            bamWriter.close();
-
-            testBAM =  new SAMReaderID(testBAMFile, new Tags());
-
-            new File(testBAM.getSamFilePath().replace(".bam", ".bai")).deleteOnExit();
-            new File(testBAM.getSamFilePath() + ".bai").deleteOnExit();
-        }
-    }
-
-    @DataProvider(name = "ReadShardBalancerTestDataProvider")
-    public Object[][] createReadShardBalancerTests() {
-        for ( int numContigs = 1; numContigs <= 3; numContigs++ ) {
-            for ( int numStacksPerContig : Arrays.asList(1, 2, 4) ) {
-                // Use crucial read shard boundary values as the stack sizes
-                for ( int stackSize : Arrays.asList(ReadShard.DEFAULT_MAX_READS / 2, ReadShard.DEFAULT_MAX_READS / 2 + 10, ReadShard.DEFAULT_MAX_READS, ReadShard.DEFAULT_MAX_READS - 1, ReadShard.DEFAULT_MAX_READS + 1, ReadShard.DEFAULT_MAX_READS * 2) ) {
-                    for ( int numUnmappedReads : Arrays.asList(0, ReadShard.DEFAULT_MAX_READS / 2, ReadShard.DEFAULT_MAX_READS * 2) ) {
-                        // The first value will result in no downsampling at all, the others in some downsampling
-                        for ( int downsamplingTargetCoverage : Arrays.asList(ReadShard.DEFAULT_MAX_READS * 10, ReadShard.DEFAULT_MAX_READS, ReadShard.DEFAULT_MAX_READS / 2) ) {
-                            new ReadShardBalancerTest(numContigs, numStacksPerContig, stackSize, numUnmappedReads, downsamplingTargetCoverage);
-                        }
-                    }
-                }
-            }
-        }
-
-        return ReadShardBalancerTest.getTests(ReadShardBalancerTest.class);
-    }
-
-    @Test(dataProvider = "ReadShardBalancerTestDataProvider")
-    public void runReadShardBalancerTest( ReadShardBalancerTest test ) {
-        logger.warn("Running test: " + test);
-
-        test.run();
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/SAMDataSourceUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/SAMDataSourceUnitTest.java
deleted file mode 100644
index 526b8ce..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/SAMDataSourceUnitTest.java
+++ /dev/null
@@ -1,253 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import htsjdk.samtools.*;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.commandline.Tags;
-import org.broadinstitute.gatk.engine.arguments.ValidationExclusion;
-import org.broadinstitute.gatk.engine.filters.ReadFilter;
-import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
-import org.broadinstitute.gatk.engine.iterators.GATKSAMIterator;
-import org.broadinstitute.gatk.engine.resourcemanagement.ThreadAllocation;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
-import org.broadinstitute.gatk.utils.interval.IntervalMergingRule;
-import org.testng.annotations.AfterMethod;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-
-import static org.testng.Assert.*;
-
-/**
- * <p/>
- * Class SAMDataSourceUnitTest
- * <p/>
- * The test of the SAMBAM simple data source.
- */
-public class SAMDataSourceUnitTest extends BaseTest {
-
-    // TODO: These legacy tests should really be replaced with a more comprehensive suite of tests for SAMDataSource
-
-    private List<SAMReaderID> readers;
-    private IndexedFastaSequenceFile seq;
-    private GenomeLocParser genomeLocParser;
-
-    /**
-     * This function does the setup of our parser, before each method call.
-     * <p/>
-     * Called before every test case method.
-     */
-    @BeforeMethod
-    public void doForEachTest() throws FileNotFoundException {
-        readers = new ArrayList<SAMReaderID>();
-
-        // sequence
-        seq = new CachingIndexedFastaSequenceFile(new File(b36KGReference));
-        genomeLocParser = new GenomeLocParser(seq.getSequenceDictionary());
-    }
-
-    /**
-     * Tears down the test fixture after each call.
-     * <p/>
-     * Called after every test case method.
-     */
-    @AfterMethod
-    public void undoForEachTest() {
-        seq = null;
-        readers.clear();
-    }
-
-
-    /** Test out that we can shard the file and iterate over every read */
-    @Test
-    public void testLinearBreakIterateAll() {
-        logger.warn("Executing testLinearBreakIterateAll");
-
-        // setup the data
-        readers.add(new SAMReaderID(new File(validationDataLocation+"/NA12878.chrom6.SLX.SRP000032.2009_06.selected.bam"),new Tags()));
-
-        // the sharding strat.
-        SAMDataSource data = new SAMDataSource(readers,
-                new ThreadAllocation(),
-                null,
-                genomeLocParser,
-                false,
-                ValidationStringency.SILENT,
-                null,
-                null,
-                new ValidationExclusion(),
-                new ArrayList<ReadFilter>(),
-                false);
-
-        Iterable<Shard> strat = data.createShardIteratorOverMappedReads(new LocusShardBalancer());
-        int count = 0;
-
-        try {
-            for (Shard sh : strat) {
-                int readCount = 0;
-                count++;
-
-                GenomeLoc firstLocus = sh.getGenomeLocs().get(0), lastLocus = sh.getGenomeLocs().get(sh.getGenomeLocs().size()-1);
-                logger.debug("Start : " + firstLocus.getStart() + " stop : " + lastLocus.getStop() + " contig " + firstLocus.getContig());
-                logger.debug("count = " + count);
-                GATKSAMIterator datum = data.seek(sh);
-
-                // for the first couple of shards make sure we can see the reads
-                if (count < 5) {
-                    for (SAMRecord r : datum) {
-                    }
-                    readCount++;
-                }
-                datum.close();
-
-                // if we're over 100 shards, break out
-                if (count > 100) {
-                    break;
-                }
-            }
-        }
-        catch (UserException.CouldNotReadInputFile e) {
-            e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
-            fail("testLinearBreakIterateAll: We Should get a UserException.CouldNotReadInputFile exception");
-        }
-    }
-
-    /** Test that we clear program records when requested */
-    @Test
-    public void testRemoveProgramRecords() {
-        logger.warn("Executing testRemoveProgramRecords");
-
-        // setup the data
-        readers.add(new SAMReaderID(new File(b37GoodBAM),new Tags()));
-
-        // use defaults
-        SAMDataSource data = new SAMDataSource(readers,
-                new ThreadAllocation(),
-                null,
-                genomeLocParser,
-                false,
-                ValidationStringency.SILENT,
-                null,
-                null,
-                new ValidationExclusion(),
-                new ArrayList<ReadFilter>(),
-                false);
-
-        List<SAMProgramRecord> defaultProgramRecords = data.getHeader().getProgramRecords();
-        assertTrue(defaultProgramRecords.size() != 0, "testRemoveProgramRecords: No program records found when using default constructor");
-
-        boolean removeProgramRecords = false;
-        data = new SAMDataSource(readers,
-                new ThreadAllocation(),
-                null,
-                genomeLocParser,
-                false,
-                ValidationStringency.SILENT,
-                null,
-                null,
-                new ValidationExclusion(),
-                new ArrayList<ReadFilter>(),
-                Collections.<ReadTransformer>emptyList(),
-                false,
-                (byte) -1,
-                removeProgramRecords,
-                false,
-                null, IntervalMergingRule.ALL);
-
-        List<SAMProgramRecord> dontRemoveProgramRecords = data.getHeader().getProgramRecords();
-        assertEquals(dontRemoveProgramRecords, defaultProgramRecords, "testRemoveProgramRecords: default program records differ from removeProgramRecords = false");
-
-        removeProgramRecords = true;
-        data = new SAMDataSource(readers,
-                new ThreadAllocation(),
-                null,
-                genomeLocParser,
-                false,
-                ValidationStringency.SILENT,
-                null,
-                null,
-                new ValidationExclusion(),
-                new ArrayList<ReadFilter>(),
-                Collections.<ReadTransformer>emptyList(),
-                false,
-                (byte) -1,
-                removeProgramRecords,
-                false,
-                null, IntervalMergingRule.ALL);
-
-        List<SAMProgramRecord> doRemoveProgramRecords = data.getHeader().getProgramRecords();
-        assertTrue(doRemoveProgramRecords.isEmpty(), "testRemoveProgramRecords: program records not cleared when removeProgramRecords = true");
-    }
-
-    @Test(expectedExceptions = UserException.class)
-    public void testFailOnReducedReads() {
-        readers.add(new SAMReaderID(new File(privateTestDir + "old.reduced.bam"), new Tags()));
-
-        SAMDataSource data = new SAMDataSource(readers,
-                new ThreadAllocation(),
-                null,
-                genomeLocParser,
-                false,
-                ValidationStringency.SILENT,
-                null,
-                null,
-                new ValidationExclusion(),
-                new ArrayList<ReadFilter>(),
-                false);
-    }
-
-    @Test(expectedExceptions = UserException.class)
-    public void testFailOnReducedReadsRemovingProgramRecords() {
-        readers.add(new SAMReaderID(new File(privateTestDir + "old.reduced.bam"), new Tags()));
-
-        SAMDataSource data = new SAMDataSource(readers,
-                new ThreadAllocation(),
-                null,
-                genomeLocParser,
-                false,
-                ValidationStringency.SILENT,
-                null,
-                null,
-                new ValidationExclusion(),
-                new ArrayList<ReadFilter>(),
-                Collections.<ReadTransformer>emptyList(),
-                false,
-                (byte) -1,
-                true,
-                false,
-                null, IntervalMergingRule.ALL);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/SAMReaderIDUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/SAMReaderIDUnitTest.java
deleted file mode 100644
index bb1cd75..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/SAMReaderIDUnitTest.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.commandline.Tags;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.io.File;
-
-public class SAMReaderIDUnitTest extends BaseTest {
-
-    @Test
-    public void testSAMReaderIDHashingAndEquality() {
-        // Test to make sure that two SAMReaderIDs that point at the same file via an absolute vs. relative
-        // path are equal according to equals() and have the same hash code
-        final File relativePathToBAMFile = new File(publicTestDir + "exampleBAM.bam");
-        final File absolutePathToBAMFile = new File(relativePathToBAMFile.getAbsolutePath());
-        final SAMReaderID relativePathSAMReaderID = new SAMReaderID(relativePathToBAMFile, new Tags());
-        final SAMReaderID absolutePathSAMReaderID = new SAMReaderID(absolutePathToBAMFile, new Tags());
-
-        Assert.assertEquals(relativePathSAMReaderID, absolutePathSAMReaderID, "Absolute-path and relative-path SAMReaderIDs not equal according to equals()");
-        Assert.assertEquals(relativePathSAMReaderID.hashCode(), absolutePathSAMReaderID.hashCode(), "Absolute-path and relative-path SAMReaderIDs have different hash codes");
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/SeekableBufferedStreamUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/SeekableBufferedStreamUnitTest.java
deleted file mode 100644
index c24a21a..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/SeekableBufferedStreamUnitTest.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import htsjdk.samtools.seekablestream.SeekableBufferedStream;
-import htsjdk.samtools.seekablestream.SeekableFileStream;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.exceptions.GATKException;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.IOException;
-
-/**
- * Test basic functionality in SeekableBufferedStream.
- */
-public class SeekableBufferedStreamUnitTest extends BaseTest {
-    private static File InputFile = new File(validationDataLocation + "megabyteZeros.dat");
-
-    final private int BUFFERED_STREAM_BUFFER_SIZE = 100;
-    private byte buffer[] = new byte[BUFFERED_STREAM_BUFFER_SIZE * 10];
-
-
-    @DataProvider(name = "BasicArgumentsDivisible")
-    public Integer[][] DivisableReads() {
-        return new Integer[][]{{1}, {4}, {5}, {10}, {20}, {50}, {100}};
-    }
-
-    @DataProvider(name = "BasicArgumentsIndivisibleAndSmall")
-    public Integer[][] InDivisableReadsSmall() {
-        return new Integer[][]{{3}, {11}, {31}, {51}, {77}, {99}};
-    }
-
-    @DataProvider(name = "BasicArgumentsIndivisibleYetLarge")
-    public Integer[][] InDivisableReadsLarge() {
-        return new Integer[][]{{101}, {151}, {205}, {251}, {301}};
-    }
-
-
-    private void testReadsLength(int length) throws IOException {
-        final int READ_SIZE=100000; //file is 10^6, so make this smaller to be safe.
-
-        SeekableFileStream fileStream = new SeekableFileStream(InputFile);
-        SeekableBufferedStream bufferedStream = new SeekableBufferedStream(fileStream, BUFFERED_STREAM_BUFFER_SIZE);
-
-        for (int i = 0; i < READ_SIZE / length; ++i) {
-            Assert.assertEquals(bufferedStream.read(buffer, 0, length), length);
-        }
-
-    }
-
-    // These tests fail because SeekableBuffered stream may return _less_ than the amount you are asking for.
-    // make sure that you wrap reads with while-loops.  If these test start failing (meaning that the reads work properly,
-    // the layer of protection built into GATKBamIndex can be removed.
-
-    @Test(dataProvider = "BasicArgumentsIndivisibleAndSmall", enabled = true, expectedExceptions = java.lang.AssertionError.class)
-    public void testIndivisableSmallReadsFAIL(Integer readLength) throws IOException {
-        testReadsLength(readLength);
-    }
-
-    //Evidently, if you ask for a read length that's larger than the inernal buffer,
-    //SeekableBufferedStreamdoes something else and gives you what you asked for
-
-    @Test(dataProvider = "BasicArgumentsIndivisibleYetLarge", enabled = true)
-    public void testIndivisableLargeReadsPASS(Integer readLength) throws IOException {
-        testReadsLength(readLength);
-    }
-
-    // if the readlength divides the buffer, there are no failures
-    @Test(dataProvider = "BasicArgumentsDivisible", enabled = true)
-    public void testDivisableReadsPASS(Integer readLength) throws IOException {
-        testReadsLength(readLength);
-    }
-
-
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/TheoreticalMinimaBenchmark.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/TheoreticalMinimaBenchmark.java
deleted file mode 100644
index aa66f17..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reads/TheoreticalMinimaBenchmark.java
+++ /dev/null
@@ -1,114 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reads;
-
-import com.google.caliper.Param;
-import htsjdk.samtools.Cigar;
-import htsjdk.samtools.CigarElement;
-import htsjdk.samtools.SAMFileReader;
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.util.CloseableIterator;
-
-import java.io.File;
-
-/**
- * Created by IntelliJ IDEA.
- * User: mhanna
- * Date: Apr 22, 2011
- * Time: 4:01:23 PM
- * To change this template use File | Settings | File Templates.
- */
-public class TheoreticalMinimaBenchmark extends ReadProcessingBenchmark {
-    @Param
-    private String bamFile;
-
-    @Param
-    private Integer maxReads;
-
-    @Override
-    public String getBAMFile() { return bamFile; }
-
-    @Override
-    public Integer getMaxReads() { return maxReads; }
-
-    public void timeIterateOverEachBase(int reps) {
-        System.out.printf("Processing " + inputFile);
-        for(int i = 0; i < reps; i++) {
-            SAMFileReader reader = new SAMFileReader(inputFile);
-            CloseableIterator<SAMRecord> iterator = reader.iterator();
-
-            long As=0,Cs=0,Gs=0,Ts=0;
-            while(iterator.hasNext()) {
-                SAMRecord read = iterator.next();
-                for(byte base: read.getReadBases()) {
-                    switch(base) {
-                        case 'A': As++; break;
-                        case 'C': Cs++; break;
-                        case 'G': Gs++; break;
-                        case 'T': Ts++; break;
-                    }
-                }
-            }
-            System.out.printf("As = %d; Cs = %d; Gs = %d; Ts = %d; total = %d%n",As,Cs,Gs,Ts,As+Cs+Gs+Ts);
-            iterator.close();
-            reader.close();
-        }
-    }
-
-    public void timeIterateOverCigarString(int reps) {
-        for(int i = 0; i < reps; i++) {
-            long matchMismatches = 0;
-            long insertions = 0;
-            long deletions = 0;
-            long others = 0;
-
-            SAMFileReader reader = new SAMFileReader(inputFile);
-            CloseableIterator<SAMRecord> iterator = reader.iterator();
-            while(iterator.hasNext()) {
-                SAMRecord read = iterator.next();
-
-                Cigar cigar = read.getCigar();
-                for(CigarElement cigarElement: cigar.getCigarElements()) {
-                    int elementSize = cigarElement.getLength();
-                    while(elementSize > 0) {
-                        switch(cigarElement.getOperator()) {
-                            case M: case EQ: case X: matchMismatches++; break;
-                            case I: insertions++; break;
-                            case D: deletions++; break;
-                            default: others++; break;
-                        }
-                        elementSize--;
-                    }
-                }
-            }
-            System.out.printf("Ms = %d; Is = %d; Ds = %d; others = %d; total = %d%n",matchMismatches,insertions,deletions,others,matchMismatches+insertions+deletions+others);
-
-            iterator.close();
-            reader.close();
-        }
-    }
-
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reference/ReferenceDataSourceIntegrationTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reference/ReferenceDataSourceIntegrationTest.java
deleted file mode 100644
index 46a4cb5..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/reference/ReferenceDataSourceIntegrationTest.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.reference;
-
-import org.broadinstitute.gatk.engine.walkers.WalkerTest;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.testng.annotations.Test;
-import org.testng.Assert;
-
-import java.io.File;
-import java.io.IOException;
-
-public class ReferenceDataSourceIntegrationTest extends WalkerTest {
-
-    @Test
-    public void testReferenceWithMissingFaiFile() throws IOException {
-        final File dummyReference = createTempFile("dummy", ".fasta");
-        final File dictFile = new File(dummyReference.getAbsolutePath().replace(".fasta", ".dict"));
-        dictFile.deleteOnExit();
-        Assert.assertTrue(dictFile.createNewFile());
-
-        final WalkerTestSpec spec = new WalkerTestSpec(
-            " -T PrintReads" +
-            " -R " + dummyReference.getAbsolutePath() +
-            " -I " + privateTestDir + "NA12878.4.snippet.bam" +
-            " -o %s",
-            1,
-            UserException.MissingReferenceFaiFile.class
-        );
-
-        executeTest("testReferenceWithMissingFaiFile", spec);
-    }
-
-    @Test
-    public void testReferenceWithMissingDictFile() throws IOException {
-        final File dummyReference = createTempFile("dummy", ".fasta");
-        final File faiFile = new File(dummyReference.getAbsolutePath() + ".fai");
-        faiFile.deleteOnExit();
-        Assert.assertTrue(faiFile.createNewFile());
-
-        final WalkerTestSpec spec = new WalkerTestSpec(
-                " -T PrintReads" +
-                " -R " + dummyReference.getAbsolutePath() +
-                " -I " + privateTestDir + "NA12878.4.snippet.bam" +
-                " -o %s",
-                1,
-                UserException.MissingReferenceDictFile.class
-        );
-
-        executeTest("testReferenceWithMissingDictFile", spec);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/rmd/ReferenceOrderedDataPoolUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/rmd/ReferenceOrderedDataPoolUnitTest.java
deleted file mode 100644
index baa2af0..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/rmd/ReferenceOrderedDataPoolUnitTest.java
+++ /dev/null
@@ -1,208 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.rmd;
-
-import org.broadinstitute.gatk.utils.commandline.Tags;
-import org.broadinstitute.gatk.engine.refdata.tracks.RMDTrackBuilder;
-import org.testng.Assert;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.codecs.table.TableFeature;
-import org.broadinstitute.gatk.engine.refdata.utils.LocationAwareSeekableRODIterator;
-import org.broadinstitute.gatk.engine.refdata.utils.RMDTriplet;
-import org.broadinstitute.gatk.engine.refdata.utils.RMDTriplet.RMDStorageType;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
-
-import static org.testng.Assert.assertTrue;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-/**
- * User: hanna
- * Date: May 21, 2009
- * Time: 11:03:04 AM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * Test the contents and number of iterators in the pool.
- */
-
-public class ReferenceOrderedDataPoolUnitTest extends BaseTest {
-
-    private RMDTriplet triplet = null;
-    private RMDTrackBuilder builder = null;
-
-    private IndexedFastaSequenceFile seq;
-    private GenomeLocParser genomeLocParser;
-
-    private GenomeLoc testSite1;
-    private GenomeLoc testSite2;
-    private GenomeLoc testSite3;
-
-    private GenomeLoc testInterval1; // an interval matching testSite1 -> testSite2 for queries
-    private GenomeLoc testInterval2; // an interval matching testSite2 -> testSite3 for queries
-
-
-    @BeforeClass
-    public void init() throws FileNotFoundException {
-        seq = new CachingIndexedFastaSequenceFile(new File(hg18Reference));
-        genomeLocParser = new GenomeLocParser(seq);
-
-        testSite1 = genomeLocParser.createGenomeLoc("chrM",10);
-        testSite2 = genomeLocParser.createGenomeLoc("chrM",20);
-        testSite3 = genomeLocParser.createGenomeLoc("chrM",30);
-        testInterval1 = genomeLocParser.createGenomeLoc("chrM",10,20);
-        testInterval2 = genomeLocParser.createGenomeLoc("chrM",20,30);
-    }
-
-    @BeforeMethod
-    public void setUp() {
-        String fileName = privateTestDir + "TabularDataTest.dat";
-
-        triplet = new RMDTriplet("tableTest","Table",fileName,RMDStorageType.FILE,new Tags());
-        // disable auto-index creation/locking in the RMDTrackBuilder for tests
-        builder = new RMDTrackBuilder(seq.getSequenceDictionary(),genomeLocParser,null,true,null);
-    }
-
-    @Test
-    public void testCreateSingleIterator() {
-        ResourcePool iteratorPool = new ReferenceOrderedDataPool(triplet,builder,seq.getSequenceDictionary(),genomeLocParser,false);
-        LocationAwareSeekableRODIterator iterator = (LocationAwareSeekableRODIterator)iteratorPool.iterator( new MappedStreamSegment(testSite1) );
-
-        Assert.assertEquals(iteratorPool.numIterators(), 1, "Number of iterators in the pool is incorrect");
-        Assert.assertEquals(iteratorPool.numAvailableIterators(), 0, "Number of available iterators in the pool is incorrect");
-
-        TableFeature datum = (TableFeature)iterator.next().get(0).getUnderlyingObject();
-
-        assertTrue(datum.getLocation().equals(testSite1));
-        assertTrue(datum.get("COL1").equals("A"));
-        assertTrue(datum.get("COL2").equals("B"));
-        assertTrue(datum.get("COL3").equals("C"));
-
-        iteratorPool.release(iterator);
-
-        Assert.assertEquals(iteratorPool.numIterators(), 1, "Number of iterators in the pool is incorrect");
-        Assert.assertEquals(iteratorPool.numAvailableIterators(), 1, "Number of available iterators in the pool is incorrect");
-    }
-
-    @Test
-    public void testCreateMultipleIterators() {
-        ReferenceOrderedQueryDataPool iteratorPool = new ReferenceOrderedQueryDataPool(triplet,builder,seq.getSequenceDictionary(),genomeLocParser);
-        LocationAwareSeekableRODIterator iterator1 = iteratorPool.iterator( new MappedStreamSegment(testInterval1) );
-
-        // Create a new iterator at position 2.
-        LocationAwareSeekableRODIterator iterator2 = iteratorPool.iterator( new MappedStreamSegment(testInterval2) );
-
-        Assert.assertEquals(iteratorPool.numIterators(), 2, "Number of iterators in the pool is incorrect");
-        Assert.assertEquals(iteratorPool.numAvailableIterators(), 0, "Number of available iterators in the pool is incorrect");
-
-        // Test out-of-order access: first iterator2, then iterator1.
-        // Ugh...first call to a region needs to be a seek.
-        TableFeature datum = (TableFeature)iterator2.seekForward(testSite2).get(0).getUnderlyingObject();
-        assertTrue(datum.getLocation().equals(testSite2));
-        assertTrue(datum.get("COL1").equals("C"));
-        assertTrue(datum.get("COL2").equals("D"));
-        assertTrue(datum.get("COL3").equals("E"));
-
-        datum = (TableFeature)iterator1.next().get(0).getUnderlyingObject();
-        assertTrue(datum.getLocation().equals(testSite1));
-        assertTrue(datum.get("COL1").equals("A"));
-        assertTrue(datum.get("COL2").equals("B"));
-        assertTrue(datum.get("COL3").equals("C"));
-
-        // Advance iterator2, and make sure both iterator's contents are still correct.
-        datum = (TableFeature)iterator2.next().get(0).getUnderlyingObject();
-        assertTrue(datum.getLocation().equals(testSite3));
-        assertTrue(datum.get("COL1").equals("F"));
-        assertTrue(datum.get("COL2").equals("G"));
-        assertTrue(datum.get("COL3").equals("H"));
-
-        datum = (TableFeature)iterator1.next().get(0).getUnderlyingObject();
-        assertTrue(datum.getLocation().equals(testSite2));
-        assertTrue(datum.get("COL1").equals("C"));
-        assertTrue(datum.get("COL2").equals("D"));
-        assertTrue(datum.get("COL3").equals("E"));
-
-        // Cleanup, and make sure the number of iterators dies appropriately.
-        iteratorPool.release(iterator1);
-
-        Assert.assertEquals(iteratorPool.numIterators(), 2, "Number of iterators in the pool is incorrect");
-        Assert.assertEquals(iteratorPool.numAvailableIterators(), 1, "Number of available iterators in the pool is incorrect");
-
-        iteratorPool.release(iterator2);
-
-        Assert.assertEquals(iteratorPool.numIterators(), 2, "Number of iterators in the pool is incorrect");
-        Assert.assertEquals(iteratorPool.numAvailableIterators(), 2, "Number of available iterators in the pool is incorrect");
-    }
-
-    @Test
-    public void testIteratorConservation() {
-        ReferenceOrderedDataPool iteratorPool = new ReferenceOrderedDataPool(triplet,builder,seq.getSequenceDictionary(),genomeLocParser,false);
-        LocationAwareSeekableRODIterator iterator = iteratorPool.iterator( new MappedStreamSegment(testSite1) );
-
-        Assert.assertEquals(iteratorPool.numIterators(), 1, "Number of iterators in the pool is incorrect");
-        Assert.assertEquals(iteratorPool.numAvailableIterators(), 0, "Number of available iterators in the pool is incorrect");
-
-        TableFeature datum = (TableFeature)iterator.next().get(0).getUnderlyingObject();
-        assertTrue(datum.getLocation().equals(testSite1));
-        assertTrue(datum.get("COL1").equals("A"));
-        assertTrue(datum.get("COL2").equals("B"));
-        assertTrue(datum.get("COL3").equals("C"));
-
-        iteratorPool.release(iterator);
-
-        // Create another iterator after the current iterator.
-        iterator = iteratorPool.iterator( new MappedStreamSegment(testSite3) );
-
-        // Make sure that the previously acquired iterator was reused.
-        Assert.assertEquals(iteratorPool.numIterators(), 1, "Number of iterators in the pool is incorrect");
-        Assert.assertEquals(iteratorPool.numAvailableIterators(), 0, "Number of available iterators in the pool is incorrect");
-
-        datum = (TableFeature)iterator.seekForward(testSite3).get(0).getUnderlyingObject();
-        assertTrue(datum.getLocation().equals(testSite3));
-        assertTrue(datum.get("COL1").equals("F"));
-        assertTrue(datum.get("COL2").equals("G"));
-        assertTrue(datum.get("COL3").equals("H"));
-
-        iteratorPool.release(iterator);
-
-        Assert.assertEquals(iteratorPool.numIterators(), 1, "Number of iterators in the pool is incorrect");
-        Assert.assertEquals(iteratorPool.numAvailableIterators(), 1, "Number of available iterators in the pool is incorrect");
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/rmd/ReferenceOrderedQueryDataPoolUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/rmd/ReferenceOrderedQueryDataPoolUnitTest.java
deleted file mode 100644
index 6c403cd..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/datasources/rmd/ReferenceOrderedQueryDataPoolUnitTest.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.datasources.rmd;
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import htsjdk.tribble.Feature;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.commandline.Tags;
-import org.broadinstitute.gatk.engine.refdata.utils.*;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.List;
-
-public class ReferenceOrderedQueryDataPoolUnitTest extends BaseTest{
-    @Test
-    public void testCloseFilePointers() throws IOException {
-        // Build up query parameters
-        File file = new File(BaseTest.privateTestDir + "NA12878.hg19.example1.vcf");
-        RMDTriplet triplet = new RMDTriplet("test", "VCF", file.getAbsolutePath(), RMDTriplet.RMDStorageType.FILE, new Tags());
-        IndexedFastaSequenceFile seq = new CachingIndexedFastaSequenceFile(new File(BaseTest.hg19Reference));
-        GenomeLocParser parser = new GenomeLocParser(seq);
-        GenomeLoc loc = parser.createGenomeLoc("20", 1, 100000);
-        TestRMDTrackBuilder builder = new TestRMDTrackBuilder(seq.getSequenceDictionary(), parser);
-
-        // Create the query data pool
-        ReferenceOrderedQueryDataPool pool = new ReferenceOrderedQueryDataPool(triplet, builder, seq.getSequenceDictionary(), parser);
-
-        for (int i = 0; i < 3; i++) {
-            // Ensure our tribble iterators are closed.
-            CheckableCloseableTribbleIterator.clearThreadIterators();
-            Assert.assertTrue(CheckableCloseableTribbleIterator.getThreadIterators().isEmpty(), "Tribble iterators list was not cleared.");
-
-            // Request the the rodIterator
-            LocationAwareSeekableRODIterator rodIterator = pool.iterator(new MappedStreamSegment(loc));
-
-            // Run normal iteration over rodIterator
-            Assert.assertTrue(rodIterator.hasNext(), "Rod iterator does not have a next value.");
-            GenomeLoc rodIteratorLocation = rodIterator.next().getLocation();
-            Assert.assertEquals(rodIteratorLocation.getContig(), "20", "Instead of chr 20 rod iterator was at location " + rodIteratorLocation);
-
-            // Check that the underlying tribbleIterators are still open.
-            List<CheckableCloseableTribbleIterator<? extends Feature>> tribbleIterators = CheckableCloseableTribbleIterator.getThreadIterators();
-            Assert.assertFalse(tribbleIterators.isEmpty(), "Tribble iterators list is empty");
-            for (CheckableCloseableTribbleIterator<? extends Feature> tribbleIterator: tribbleIterators) {
-                Assert.assertFalse(tribbleIterator.isClosed(), "Tribble iterator is closed but should be still open.");
-            }
-
-            // Releasing the rodIterator should close the underlying tribbleIterator.
-            pool.release(rodIterator);
-
-            // Check that the underlying tribbleIterators are now closed.
-            for (CheckableCloseableTribbleIterator<? extends Feature> tribbleIterator: tribbleIterators) {
-                Assert.assertTrue(tribbleIterator.isClosed(), "Tribble iterator is open but should be now closed.");
-            }
-        }
-
-        // Extra cleanup.
-        CheckableCloseableTribbleIterator.clearThreadIterators();
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/downsampling/AlleleBiasedDownsamplingUtilsUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/downsampling/AlleleBiasedDownsamplingUtilsUnitTest.java
deleted file mode 100644
index 2d86f73..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/downsampling/AlleleBiasedDownsamplingUtilsUnitTest.java
+++ /dev/null
@@ -1,219 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.downsampling;
-
-import htsjdk.samtools.CigarElement;
-import htsjdk.samtools.CigarOperator;
-import htsjdk.samtools.SAMFileHeader;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.pileup.PileupElement;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.util.*;
-
-
-/**
- * Basic unit test for AlleleBiasedDownsamplingUtils
- */
-public class AlleleBiasedDownsamplingUtilsUnitTest extends BaseTest {
-
-
-    @Test
-    public void testSmartDownsampling() {
-
-        final int[] idealHetAlleleCounts = new int[]{0, 50, 0, 50};
-        final int[] idealHomAlleleCounts = new int[]{0, 100, 0, 0};
-
-        // no contamination, no removal
-        testOneCase(0, 0, 0, 0, 0.1, 100, idealHetAlleleCounts, idealHetAlleleCounts);
-        testOneCase(0, 0, 0, 0, 0.1, 100, idealHomAlleleCounts, idealHomAlleleCounts);
-
-        // hom sample, het contaminant, different alleles
-        testOneCase(5, 0, 0, 0, 0.1, 100, idealHomAlleleCounts, idealHomAlleleCounts);
-        testOneCase(0, 0, 5, 0, 0.1, 100, idealHomAlleleCounts, idealHomAlleleCounts);
-        testOneCase(0, 0, 0, 5, 0.1, 100, idealHomAlleleCounts, idealHomAlleleCounts);
-
-        // hom sample, hom contaminant, different alleles
-        testOneCase(10, 0, 0, 0, 0.1, 100, idealHomAlleleCounts, idealHomAlleleCounts);
-        testOneCase(0, 0, 10, 0, 0.1, 100, idealHomAlleleCounts, idealHomAlleleCounts);
-        testOneCase(0, 0, 0, 10, 0.1, 100, idealHomAlleleCounts, idealHomAlleleCounts);
-
-        // het sample, het contaminant, different alleles
-        testOneCase(5, 0, 0, 0, 0.1, 100, idealHetAlleleCounts, idealHetAlleleCounts);
-        testOneCase(0, 0, 5, 0, 0.1, 100, idealHetAlleleCounts, idealHetAlleleCounts);
-
-        // het sample, hom contaminant, different alleles
-        testOneCase(10, 0, 0, 0, 0.1, 100, idealHetAlleleCounts, idealHetAlleleCounts);
-        testOneCase(0, 0, 10, 0, 0.1, 100, idealHetAlleleCounts, idealHetAlleleCounts);
-
-        // hom sample, het contaminant, overlapping alleles
-        final int[] enhancedHomAlleleCounts = new int[]{0, 105, 0, 0};
-        testOneCase(5, 5, 0, 0, 0.1, 100, idealHomAlleleCounts, enhancedHomAlleleCounts);
-        testOneCase(0, 5, 5, 0, 0.1, 100, idealHomAlleleCounts, enhancedHomAlleleCounts);
-        testOneCase(0, 5, 0, 5, 0.1, 100, idealHomAlleleCounts, enhancedHomAlleleCounts);
-
-        // hom sample, hom contaminant, overlapping alleles
-        testOneCase(0, 10, 0, 0, 0.1, 100, idealHomAlleleCounts, new int[]{0, 110, 0, 0});
-
-        // het sample, het contaminant, overlapping alleles
-        testOneCase(5, 5, 0, 0, 0.1, 100, idealHetAlleleCounts, idealHetAlleleCounts);
-        testOneCase(0, 5, 5, 0, 0.1, 100, idealHetAlleleCounts, idealHetAlleleCounts);
-        testOneCase(0, 5, 0, 5, 0.1, 100, idealHetAlleleCounts, new int[]{0, 55, 0, 55});
-        testOneCase(5, 0, 0, 5, 0.1, 100, idealHetAlleleCounts, idealHetAlleleCounts);
-        testOneCase(0, 0, 5, 5, 0.1, 100, idealHetAlleleCounts, idealHetAlleleCounts);
-
-        // het sample, hom contaminant, overlapping alleles
-        testOneCase(0, 10, 0, 0, 0.1, 100, idealHetAlleleCounts, idealHetAlleleCounts);
-        testOneCase(0, 0, 0, 10, 0.1, 100, idealHetAlleleCounts, idealHetAlleleCounts);
-    }
-
-    private static void testOneCase(final int addA, final int addC, final int addG, final int addT, final double contaminationFraction,
-                                    final int pileupSize, final int[] initialCounts, final int[] targetCounts) {
-
-        final int[] actualCounts = initialCounts.clone();
-        actualCounts[0] += addA;
-        actualCounts[1] += addC;
-        actualCounts[2] += addG;
-        actualCounts[3] += addT;
-
-        final int[] results = AlleleBiasedDownsamplingUtils.runSmartDownsampling(actualCounts, (int)(pileupSize * contaminationFraction));
-        Assert.assertTrue(countsAreEqual(results, targetCounts));
-    }
-
-    private static boolean countsAreEqual(final int[] counts1, final int[] counts2) {
-        for ( int i = 0; i < 4; i++ ) {
-            if ( counts1[i] != counts2[i] )
-                return false;
-        }
-        return true;
-    }
-
-    @DataProvider(name = "BiasedDownsamplingTest")
-    public Object[][] makeBiasedDownsamplingTest() {
-        final List<Object[]> tests = new LinkedList<Object[]>();
-
-        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000);
-
-        for ( final int originalCount : Arrays.asList(1, 2, 10, 1000) ) {
-            for ( final int toRemove : Arrays.asList(0, 1, 2, 10, 1000) ) {
-                if ( toRemove <= originalCount )
-                    tests.add(new Object[]{header, originalCount, toRemove});
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "BiasedDownsamplingTest")
-    public void testBiasedDownsampling(final SAMFileHeader header, final int originalCount, final int toRemove) {
-
-        final LinkedList<PileupElement> elements = new LinkedList<>();
-        for ( int i = 0; i < originalCount; i++ ) {
-            final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, 1, 1);
-            elements.add(new PileupElement(read, 0, new CigarElement(1, CigarOperator.M), 0, 0));
-        }
-
-        final List<PileupElement> result = AlleleBiasedDownsamplingUtils.downsampleElements(elements, originalCount, toRemove);
-
-        Assert.assertEquals(result.size(), toRemove);
-    }
-
-    @Test
-    public void testLoadContaminationFileDetails(){
-        Logger logger=org.apache.log4j.Logger.getRootLogger();
-
-        final String ArtificalBAMLocation = privateTestDir + "ArtificallyContaminatedBams/";
-        final File ContamFile1=new File(ArtificalBAMLocation+"contamination.case.1.txt");
-
-        Map<String,Double> Contam1=new HashMap<String,Double>();
-        Set<String> Samples1=new HashSet<String>();
-
-        Contam1.put("NA11918",0.15);
-        Samples1.addAll(Contam1.keySet());
-        testLoadFile(ContamFile1,Samples1,Contam1,logger);
-
-        Contam1.put("NA12842",0.13);
-        Samples1.addAll(Contam1.keySet());
-        testLoadFile(ContamFile1,Samples1,Contam1,logger);
-
-        Samples1.add("DUMMY");
-        testLoadFile(ContamFile1,Samples1,Contam1,logger);
-   }
-
-    private static void testLoadFile(final File file, final Set<String> Samples, final Map<String,Double> map, Logger logger){
-        Map<String,Double> loadedMap = AlleleBiasedDownsamplingUtils.loadContaminationFile(file,0.0,Samples,logger);
-        Assert.assertTrue(loadedMap.equals(map));
-    }
-
-    @DataProvider(name = "goodContaminationFiles")
-    public Integer[][] goodContaminationFiles() {
-        return new Integer[][]{
-                {1, 2},
-                {2, 3},
-                {3, 2},
-                {4, 2},
-                {5, 3},
-                {6, 2},
-                {7, 2},
-                {8, 2}
-        };
-    }
-
-    @Test(dataProvider = "goodContaminationFiles")
-    public void testLoadContaminationFile(final Integer ArtificalBAMnumber, final Integer numberOfSamples) {
-        final String ArtificialBAM = String.format("ArtificallyContaminatedBams/contamination.case.%d.txt", ArtificalBAMnumber);
-        Logger logger = org.apache.log4j.Logger.getRootLogger();
-
-        File ContamFile = new File(privateTestDir, ArtificialBAM);
-        Assert.assertTrue(AlleleBiasedDownsamplingUtils.loadContaminationFile(ContamFile, 0.0, null, logger).size() == numberOfSamples);
-
-    }
-
-
-    @DataProvider(name = "badContaminationFiles")
-    public Integer[][] badContaminationFiles() {
-        return new Integer[][]{{1}, {2}, {3}, {4}, {5}};
-    }
-
-    @Test(dataProvider = "badContaminationFiles", expectedExceptions = UserException.MalformedFile.class)
-    public void testLoadBrokenContaminationFile(final int i) {
-        Logger logger = org.apache.log4j.Logger.getRootLogger();
-        final String ArtificalBAMLocation = privateTestDir + "ArtificallyContaminatedBams/";
-
-        File ContaminationFile = new File(ArtificalBAMLocation + String.format("contamination.case.broken.%d.txt", i));
-        AlleleBiasedDownsamplingUtils.loadContaminationFile(ContaminationFile, 0.0, null, logger);
-
-    }
-
-
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/downsampling/DownsamplingIntegrationTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/downsampling/DownsamplingIntegrationTest.java
deleted file mode 100644
index 2f171de..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/downsampling/DownsamplingIntegrationTest.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.downsampling;
-
-import org.broadinstitute.gatk.engine.walkers.WalkerTest;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.testng.annotations.Test;
-
-public class DownsamplingIntegrationTest extends WalkerTest {
-
-    @Test
-    public void testDetectLowDcovValueWithLocusTraversal() {
-        final WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
-            "-T CountLoci -R " + publicTestDir + "exampleFASTA.fasta -I " + publicTestDir + "exampleBAM.bam -o %s " +
-            "-dcov " + (DownsamplingMethod.MINIMUM_SAFE_COVERAGE_TARGET_FOR_LOCUS_BASED_TRAVERSALS - 1),
-            1,
-            UserException.class
-        );
-        executeTest("testDetectLowDcovValueWithLocusTraversal", spec);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/downsampling/DownsamplingReadsIteratorUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/downsampling/DownsamplingReadsIteratorUnitTest.java
deleted file mode 100644
index 19eec62..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/downsampling/DownsamplingReadsIteratorUnitTest.java
+++ /dev/null
@@ -1,139 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.downsampling;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMReadGroupRecord;
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.broadinstitute.gatk.utils.sam.ArtificialSingleSampleReadStream;
-import org.broadinstitute.gatk.utils.sam.ArtificialSingleSampleReadStreamAnalyzer;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.Arrays;
-
-public class DownsamplingReadsIteratorUnitTest extends BaseTest {
-
-    private static class DownsamplingReadsIteratorTest extends TestDataProvider {
-        private DownsamplingReadsIterator downsamplingIter;
-        private int targetCoverage;
-        private ArtificialSingleSampleReadStream stream;
-        private ArtificialSingleSampleReadStreamAnalyzer streamAnalyzer;
-
-        public DownsamplingReadsIteratorTest( ArtificialSingleSampleReadStream stream, int targetCoverage ) {
-            super(DownsamplingReadsIteratorTest.class);
-
-            this.stream = stream;
-            this.targetCoverage = targetCoverage;
-
-            setName(String.format("%s: targetCoverage=%d numContigs=%d stacksPerContig=%d readsPerStack=%d-%d distanceBetweenStacks=%d-%d readLength=%d-%d unmappedReads=%d",
-                    getClass().getSimpleName(),
-                    targetCoverage,
-                    stream.getNumContigs(),
-                    stream.getNumStacksPerContig(),
-                    stream.getMinReadsPerStack(),
-                    stream.getMaxReadsPerStack(),
-                    stream.getMinDistanceBetweenStacks(),
-                    stream.getMaxDistanceBetweenStacks(),
-                    stream.getMinReadLength(),
-                    stream.getMaxReadLength(),
-                    stream.getNumUnmappedReads()));
-        }
-
-        public void run() {
-            streamAnalyzer = new PositionallyDownsampledArtificialSingleSampleReadStreamAnalyzer(stream, targetCoverage);
-            downsamplingIter = new DownsamplingReadsIterator(stream.getGATKSAMIterator(), new SimplePositionalDownsampler<SAMRecord>(targetCoverage));
-
-            streamAnalyzer.analyze(downsamplingIter);
-
-            // Check whether the observed properties of the downsampled stream are what they should be
-            streamAnalyzer.validate();
-
-            // Allow memory used by this test to be reclaimed
-            stream = null;
-            streamAnalyzer = null;
-            downsamplingIter = null;
-        }
-    }
-
-    @DataProvider(name = "DownsamplingReadsIteratorTestDataProvider")
-    public Object[][] createDownsamplingReadsIteratorTests() {
-        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(5, 1, 10000);
-        String readGroupID = "testReadGroup";
-        SAMReadGroupRecord readGroup = new SAMReadGroupRecord(readGroupID);
-        readGroup.setSample("testSample");
-        header.addReadGroup(readGroup);
-
-        // Values that don't vary across tests
-        int targetCoverage = 10;
-        int minReadLength = 50;
-        int maxReadLength = 100;
-        int minDistanceBetweenStacks = 1;
-        int maxDistanceBetweenStacks = maxReadLength + 1;
-
-        GenomeAnalysisEngine.resetRandomGenerator();
-
-        // brute force testing!
-        for ( int numContigs : Arrays.asList(1, 2, 5) ) {
-            for ( int stacksPerContig : Arrays.asList(1, 2, 10) ) {
-                for ( int minReadsPerStack : Arrays.asList(1, targetCoverage / 2, targetCoverage, targetCoverage - 1, targetCoverage + 1, targetCoverage * 2) ) {
-                    for ( int maxReadsPerStack : Arrays.asList(1, targetCoverage / 2, targetCoverage, targetCoverage - 1, targetCoverage + 1, targetCoverage * 2) ) {
-                        for ( int numUnmappedReads : Arrays.asList(0, 1, targetCoverage, targetCoverage * 2) ) {
-                            // Only interested in sane read stream configurations here
-                            if ( minReadsPerStack <= maxReadsPerStack ) {
-                                new DownsamplingReadsIteratorTest(new ArtificialSingleSampleReadStream(header,
-                                                                                                       readGroupID,
-                                                                                                       numContigs,
-                                                                                                       stacksPerContig,
-                                                                                                       minReadsPerStack,
-                                                                                                       maxReadsPerStack,
-                                                                                                       minDistanceBetweenStacks,
-                                                                                                       maxDistanceBetweenStacks,
-                                                                                                       minReadLength,
-                                                                                                       maxReadLength,
-                                                                                                       numUnmappedReads),
-                                                                  targetCoverage);
-                            }
-                        }
-                    }
-                }
-            }
-        }
-
-        return DownsamplingReadsIteratorTest.getTests(DownsamplingReadsIteratorTest.class);
-    }
-
-    @Test(dataProvider = "DownsamplingReadsIteratorTestDataProvider")
-    public void runDownsamplingReadsIteratorTest( DownsamplingReadsIteratorTest test ) {
-        logger.warn("Running test: " + test);
-
-        GenomeAnalysisEngine.resetRandomGenerator();
-        test.run();
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/downsampling/FractionalDownsamplerUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/downsampling/FractionalDownsamplerUnitTest.java
deleted file mode 100644
index 9185374..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/downsampling/FractionalDownsamplerUnitTest.java
+++ /dev/null
@@ -1,158 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.downsampling;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-import org.testng.Assert;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.List;
-
-public class FractionalDownsamplerUnitTest extends BaseTest {
-
-    private static class FractionalDownsamplerTest extends TestDataProvider {
-        double fraction;
-        int totalReads;
-        int expectedMinNumReadsAfterDownsampling;
-        int expectedMaxNumReadsAfterDownsampling;
-        int expectedMinDiscardedItems;
-        int expectedMaxDiscardedItems;
-
-        private static final double EXPECTED_ACCURACY = 0.05; // should be accurate to within +/- this percent
-
-        public FractionalDownsamplerTest( double fraction, int totalReads ) {
-            super(FractionalDownsamplerTest.class);
-
-            this.fraction = fraction;
-            this.totalReads = totalReads;
-
-            calculateExpectations();
-
-            setName(String.format("%s: fraction=%.2f totalReads=%d expectedMinNumReadsAfterDownsampling=%d expectedMaxNumReadsAfterDownsampling=%d",
-                    getClass().getSimpleName(), fraction, totalReads, expectedMinNumReadsAfterDownsampling, expectedMaxNumReadsAfterDownsampling));
-        }
-
-        private void calculateExpectations() {
-            // Require an exact match in the 0% and 100% cases
-            if ( fraction == 0.0 ) {
-                expectedMinNumReadsAfterDownsampling = expectedMaxNumReadsAfterDownsampling = 0;
-                expectedMinDiscardedItems = expectedMaxDiscardedItems = totalReads;
-            }
-            else if ( fraction == 1.0 ) {
-                expectedMinNumReadsAfterDownsampling = expectedMaxNumReadsAfterDownsampling = totalReads;
-                expectedMinDiscardedItems = expectedMaxDiscardedItems = 0;
-            }
-            else {
-                expectedMinNumReadsAfterDownsampling = Math.max((int)((fraction - EXPECTED_ACCURACY) * totalReads), 0);
-                expectedMaxNumReadsAfterDownsampling = Math.min((int) ((fraction + EXPECTED_ACCURACY) * totalReads), totalReads);
-                expectedMinDiscardedItems = totalReads - expectedMaxNumReadsAfterDownsampling;
-                expectedMaxDiscardedItems = totalReads - expectedMinNumReadsAfterDownsampling;
-            }
-        }
-
-        public Collection<SAMRecord> createReads() {
-            Collection<SAMRecord> reads = new ArrayList<SAMRecord>(totalReads);
-
-            SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000);
-            reads.addAll(ArtificialSAMUtils.createStackOfIdenticalArtificialReads(totalReads, header, "foo", 0, 1, 100));
-
-            return reads;
-        }
-    }
-
-    @DataProvider(name = "FractionalDownsamplerTestDataProvider")
-    public Object[][] createFractionalDownsamplerTestData() {
-        for ( double fraction : Arrays.asList(0.0, 0.25, 0.5, 0.75, 1.0) ) {
-            for ( int totalReads : Arrays.asList(0, 1000, 10000) ) {
-                new FractionalDownsamplerTest(fraction, totalReads);
-            }
-        }
-
-        return FractionalDownsamplerTest.getTests(FractionalDownsamplerTest.class);
-    }
-
-    @Test(dataProvider = "FractionalDownsamplerTestDataProvider")
-    public void runFractionalDownsamplerTest( FractionalDownsamplerTest test ) {
-        logger.warn("Running test: " + test);
-
-        GenomeAnalysisEngine.resetRandomGenerator();
-
-        ReadsDownsampler<SAMRecord> downsampler = new FractionalDownsampler<SAMRecord>(test.fraction);
-
-        downsampler.submit(test.createReads());
-
-        if ( test.totalReads > 0 ) {
-            if ( test.fraction > FractionalDownsamplerTest.EXPECTED_ACCURACY ) {
-                Assert.assertTrue(downsampler.hasFinalizedItems());
-                Assert.assertTrue(downsampler.peekFinalized() != null);
-            }
-            Assert.assertFalse(downsampler.hasPendingItems());
-            Assert.assertTrue(downsampler.peekPending() == null);
-        }
-        else {
-            Assert.assertFalse(downsampler.hasFinalizedItems() || downsampler.hasPendingItems());
-            Assert.assertTrue(downsampler.peekFinalized() == null && downsampler.peekPending() == null);
-        }
-
-        downsampler.signalEndOfInput();
-
-        if ( test.totalReads > 0 ) {
-            if ( test.fraction > FractionalDownsamplerTest.EXPECTED_ACCURACY ) {
-                Assert.assertTrue(downsampler.hasFinalizedItems());
-                Assert.assertTrue(downsampler.peekFinalized() != null);
-            }
-            Assert.assertFalse(downsampler.hasPendingItems());
-            Assert.assertTrue(downsampler.peekPending() == null);
-        }
-        else {
-            Assert.assertFalse(downsampler.hasFinalizedItems() || downsampler.hasPendingItems());
-            Assert.assertTrue(downsampler.peekFinalized() == null && downsampler.peekPending() == null);
-        }
-
-        List<SAMRecord> downsampledReads = downsampler.consumeFinalizedItems();
-        Assert.assertFalse(downsampler.hasFinalizedItems() || downsampler.hasPendingItems());
-        Assert.assertTrue(downsampler.peekFinalized() == null && downsampler.peekPending() == null);
-
-        Assert.assertTrue(downsampledReads.size() >= test.expectedMinNumReadsAfterDownsampling &&
-                          downsampledReads.size() <= test.expectedMaxNumReadsAfterDownsampling);
-
-        Assert.assertTrue(downsampler.getNumberOfDiscardedItems() >= test.expectedMinDiscardedItems &&
-                          downsampler.getNumberOfDiscardedItems() <= test.expectedMaxDiscardedItems);
-
-        Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), test.totalReads - downsampledReads.size());
-
-        downsampler.resetStats();
-        Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 0);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/downsampling/LevelingDownsamplerUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/downsampling/LevelingDownsamplerUnitTest.java
deleted file mode 100644
index 2544b72..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/downsampling/LevelingDownsamplerUnitTest.java
+++ /dev/null
@@ -1,163 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.downsampling;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.testng.annotations.Test;
-import org.testng.annotations.DataProvider;
-import org.testng.Assert;
-
-import java.util.*;
-
-public class LevelingDownsamplerUnitTest extends BaseTest {
-
-    private static class LevelingDownsamplerUniformStacksTest extends TestDataProvider {
-        public enum DataStructure { LINKED_LIST, ARRAY_LIST }
-
-        int targetSize;
-        int numStacks;
-        int stackSize;
-        DataStructure dataStructure;
-        int expectedSize;
-
-        public LevelingDownsamplerUniformStacksTest( int targetSize, int numStacks, int stackSize, DataStructure dataStructure ) {
-            super(LevelingDownsamplerUniformStacksTest.class);
-
-            this.targetSize = targetSize;
-            this.numStacks = numStacks;
-            this.stackSize = stackSize;
-            this.dataStructure = dataStructure;
-            expectedSize = calculateExpectedDownsampledStackSize();
-
-            setName(String.format("%s: targetSize=%d numStacks=%d stackSize=%d dataStructure=%s expectedSize=%d",
-                    getClass().getSimpleName(), targetSize, numStacks, stackSize, dataStructure, expectedSize));
-        }
-
-        public Collection<List<Object>> createStacks() {
-            Collection<List<Object>> stacks = new ArrayList<List<Object>>();
-
-            for ( int i = 1; i <= numStacks; i++ ) {
-                List<Object> stack = dataStructure == DataStructure.LINKED_LIST ? new LinkedList<Object>() : new ArrayList<Object>();
-
-                for ( int j = 1; j <= stackSize; j++ ) {
-                    stack.add(new Object());
-                }
-
-                stacks.add(stack);
-            }
-
-            return stacks;
-        }
-
-        private int calculateExpectedDownsampledStackSize() {
-            int numItemsToRemove = numStacks * stackSize - targetSize;
-
-            if ( numStacks == 0 ) {
-                return 0;
-            }
-            else if ( numItemsToRemove <= 0 ) {
-                return stackSize;
-            }
-
-            return Math.max(1, stackSize - (numItemsToRemove / numStacks));
-        }
-    }
-
-    @DataProvider(name = "UniformStacksDataProvider")
-    public Object[][] createUniformStacksTestData() {
-        for ( int targetSize = 1; targetSize <= 10000; targetSize *= 10 ) {
-            for ( int numStacks = 0; numStacks <= 10; numStacks++ ) {
-                for ( int stackSize = 1; stackSize <= 1000; stackSize *= 10 ) {
-                    for ( LevelingDownsamplerUniformStacksTest.DataStructure dataStructure : LevelingDownsamplerUniformStacksTest.DataStructure.values() ) {
-                        new LevelingDownsamplerUniformStacksTest(targetSize, numStacks, stackSize, dataStructure);
-                    }
-                }
-            }
-        }
-
-        return LevelingDownsamplerUniformStacksTest.getTests(LevelingDownsamplerUniformStacksTest.class);
-    }
-
-    @Test( dataProvider = "UniformStacksDataProvider" )
-    public void testLevelingDownsamplerWithUniformStacks( LevelingDownsamplerUniformStacksTest test ) {
-        logger.warn("Running test: " + test);
-
-        GenomeAnalysisEngine.resetRandomGenerator();
-
-        Downsampler<List<Object>> downsampler = new LevelingDownsampler<List<Object>, Object>(test.targetSize);
-
-        downsampler.submit(test.createStacks());
-
-        if ( test.numStacks > 0 ) {
-            Assert.assertFalse(downsampler.hasFinalizedItems());
-            Assert.assertTrue(downsampler.peekFinalized() == null);
-            Assert.assertTrue(downsampler.hasPendingItems());
-            Assert.assertTrue(downsampler.peekPending() != null);
-        }
-        else {
-            Assert.assertFalse(downsampler.hasFinalizedItems() || downsampler.hasPendingItems());
-            Assert.assertTrue(downsampler.peekFinalized() == null && downsampler.peekPending() == null);
-        }
-
-        downsampler.signalEndOfInput();
-
-        if ( test.numStacks > 0 ) {
-            Assert.assertTrue(downsampler.hasFinalizedItems());
-            Assert.assertTrue(downsampler.peekFinalized() != null);
-            Assert.assertFalse(downsampler.hasPendingItems());
-            Assert.assertTrue(downsampler.peekPending() == null);
-        }
-        else {
-            Assert.assertFalse(downsampler.hasFinalizedItems() || downsampler.hasPendingItems());
-            Assert.assertTrue(downsampler.peekFinalized() == null && downsampler.peekPending() == null);
-        }
-
-        final int sizeFromDownsampler = downsampler.size();
-        List<List<Object>> downsampledStacks = downsampler.consumeFinalizedItems();
-        Assert.assertFalse(downsampler.hasFinalizedItems() || downsampler.hasPendingItems());
-        Assert.assertTrue(downsampler.peekFinalized() == null && downsampler.peekPending() == null);
-
-        Assert.assertEquals(downsampledStacks.size(), test.numStacks);
-
-        int totalRemainingItems = 0;
-        for ( List<Object> stack : downsampledStacks ) {
-            Assert.assertTrue(Math.abs(stack.size() - test.expectedSize) <= 1);
-            totalRemainingItems += stack.size();
-        }
-
-        Assert.assertEquals(sizeFromDownsampler, totalRemainingItems);
-        int numItemsReportedDiscarded = downsampler.getNumberOfDiscardedItems();
-        int numItemsActuallyDiscarded = test.numStacks * test.stackSize - totalRemainingItems;
-
-        Assert.assertEquals(numItemsReportedDiscarded, numItemsActuallyDiscarded);
-
-        downsampler.resetStats();
-        Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 0);
-
-        Assert.assertTrue(totalRemainingItems <= Math.max(test.targetSize, test.numStacks));
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/downsampling/PerSampleDownsamplingReadsIteratorUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/downsampling/PerSampleDownsamplingReadsIteratorUnitTest.java
deleted file mode 100644
index 2606a01..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/downsampling/PerSampleDownsamplingReadsIteratorUnitTest.java
+++ /dev/null
@@ -1,299 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.downsampling;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMReadGroupRecord;
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.iterators.GATKSAMIterator;
-import org.broadinstitute.gatk.engine.iterators.VerifyingSamIterator;
-import org.broadinstitute.gatk.utils.MathUtils;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.sam.ArtificialMultiSampleReadStream;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.broadinstitute.gatk.utils.sam.ArtificialSingleSampleReadStream;
-import org.broadinstitute.gatk.utils.sam.ArtificialSingleSampleReadStreamAnalyzer;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.*;
-
-public class PerSampleDownsamplingReadsIteratorUnitTest extends BaseTest {
-
-    private static class PerSampleDownsamplingReadsIteratorTest extends TestDataProvider {
-
-        // TODO: tests should distinguish between variance across samples and variance within a sample
-
-        private enum StreamDensity {
-            SPARSE         (MAX_READ_LENGTH,     MAX_READ_LENGTH * 2),
-            DENSE          (1,                   MIN_READ_LENGTH),
-            MIXED          (1,                   MAX_READ_LENGTH * 2),
-            UNIFORM_DENSE  (1,                   1),
-            UNIFORM_SPARSE (MAX_READ_LENGTH * 2, MAX_READ_LENGTH * 2);
-
-            int minDistanceBetweenStacks;
-            int maxDistanceBetweenStacks;
-
-            StreamDensity( int minDistanceBetweenStacks, int maxDistanceBetweenStacks ) {
-                this.minDistanceBetweenStacks = minDistanceBetweenStacks;
-                this.maxDistanceBetweenStacks = maxDistanceBetweenStacks;
-            }
-
-            public String toString() {
-                return String.format("StreamDensity:%d-%d", minDistanceBetweenStacks, maxDistanceBetweenStacks);
-            }
-        }
-
-        private enum StreamStackDepth {
-            NON_UNIFORM_LOW   (1,  5),
-            NON_UNIFORM_HIGH  (15, 20),
-            NON_UNIFORM_MIXED (1,  20),
-            UNIFORM_SINGLE    (1,  1),
-            UNIFORM_LOW       (2,  2),
-            UNIFORM_HIGH      (20, 20),
-            UNIFORM_MEDIUM    (10, 10);   // should set target coverage to this value for testing
-
-            int minReadsPerStack;
-            int maxReadsPerStack;
-
-            StreamStackDepth( int minReadsPerStack, int maxReadsPerStack ) {
-                this.minReadsPerStack = minReadsPerStack;
-                this.maxReadsPerStack = maxReadsPerStack;
-            }
-
-            public boolean isUniform() {
-                return minReadsPerStack == maxReadsPerStack;
-            }
-
-            public String toString() {
-                return String.format("StreamStackDepth:%d-%d", minReadsPerStack, maxReadsPerStack);
-            }
-        }
-
-        private enum StreamStacksPerContig {
-            UNIFORM(20, 20),
-            NON_UNIFORM(1, 30);
-
-            int minStacksPerContig;
-            int maxStacksPerContig;
-
-            StreamStacksPerContig( int minStacksPerContig, int maxStacksPerContig ) {
-                this.minStacksPerContig = minStacksPerContig;
-                this.maxStacksPerContig = maxStacksPerContig;
-            }
-
-            public boolean isUniform() {
-                return minStacksPerContig == maxStacksPerContig;
-            }
-
-            public String toString() {
-                return String.format("StreamStacksPerContig:%d-%d", minStacksPerContig, maxStacksPerContig);
-            }
-        }
-
-        // Not interested in testing multiple ranges for the read lengths, as none of our current
-        // downsamplers are affected by read length
-        private static final int MIN_READ_LENGTH = 50;
-        private static final int MAX_READ_LENGTH = 150;
-
-        private ReadsDownsamplerFactory<SAMRecord> downsamplerFactory;
-        private int targetCoverage;
-        private int numSamples;
-        private int minContigs;
-        private int maxContigs;
-        private StreamDensity streamDensity;
-        private StreamStackDepth streamStackDepth;
-        private StreamStacksPerContig streamStacksPerContig;
-        private double unmappedReadsFraction;
-        private int unmappedReadsCount;
-        private boolean verifySortedness;
-
-        private ArtificialMultiSampleReadStream mergedReadStream;
-        private Map<String, ArtificialSingleSampleReadStream> perSampleArtificialReadStreams;
-        private Map<String, ArtificialSingleSampleReadStreamAnalyzer> perSampleStreamAnalyzers;
-        private SAMFileHeader header;
-
-        public PerSampleDownsamplingReadsIteratorTest( ReadsDownsamplerFactory<SAMRecord> downsamplerFactory,
-                                                       int targetCoverage,
-                                                       int numSamples,
-                                                       int minContigs,
-                                                       int maxContigs,
-                                                       StreamDensity streamDensity,
-                                                       StreamStackDepth streamStackDepth,
-                                                       StreamStacksPerContig streamStacksPerContig,
-                                                       double unmappedReadsFraction,
-                                                       int unmappedReadsCount,
-                                                       boolean verifySortedness ) {
-            super(PerSampleDownsamplingReadsIteratorTest.class);
-
-            this.downsamplerFactory = downsamplerFactory;
-            this.targetCoverage = targetCoverage;
-            this.numSamples = numSamples;
-            this.minContigs = minContigs;
-            this.maxContigs = maxContigs;
-            this.streamDensity = streamDensity;
-            this.streamStackDepth = streamStackDepth;
-            this.streamStacksPerContig = streamStacksPerContig;
-            this.unmappedReadsFraction = unmappedReadsFraction;
-            this.unmappedReadsCount = unmappedReadsCount;
-            this.verifySortedness = verifySortedness;
-
-            header = createHeader();
-            createReadStreams();
-
-            setName(String.format("%s: targetCoverage=%d numSamples=%d minContigs=%d maxContigs=%d %s %s %s unmappedReadsFraction=%.2f unmappedReadsCount=%d verifySortedness=%b",
-                    getClass().getSimpleName(), targetCoverage, numSamples, minContigs, maxContigs, streamDensity, streamStackDepth, streamStacksPerContig, unmappedReadsFraction, unmappedReadsCount, verifySortedness));
-        }
-
-        private SAMFileHeader createHeader() {
-            SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(maxContigs, 1, (streamDensity.maxDistanceBetweenStacks + MAX_READ_LENGTH) * streamStacksPerContig.maxStacksPerContig + 100000);
-            List<String> readGroups = new ArrayList<String>(numSamples);
-            List<String> sampleNames = new ArrayList<String>(numSamples);
-
-            for ( int i = 0; i < numSamples; i++ ) {
-                readGroups.add("ReadGroup" + i);
-                sampleNames.add("Sample" + i);
-            }
-
-            return ArtificialSAMUtils.createEnumeratedReadGroups(header, readGroups, sampleNames);
-        }
-
-        private void createReadStreams() {
-            perSampleArtificialReadStreams = new HashMap<String, ArtificialSingleSampleReadStream>(numSamples);
-            perSampleStreamAnalyzers = new HashMap<String, ArtificialSingleSampleReadStreamAnalyzer>(numSamples);
-
-            for (SAMReadGroupRecord readGroup : header.getReadGroups() ) {
-                String readGroupID = readGroup.getReadGroupId();
-                String sampleName = readGroup.getSample();
-
-                int thisSampleNumContigs = MathUtils.randomIntegerInRange(minContigs, maxContigs);
-                int thisSampleStacksPerContig = MathUtils.randomIntegerInRange(streamStacksPerContig.minStacksPerContig, streamStacksPerContig.maxStacksPerContig);
-
-                int thisSampleNumUnmappedReads = GenomeAnalysisEngine.getRandomGenerator().nextDouble() < unmappedReadsFraction ? unmappedReadsCount : 0;
-
-                ArtificialSingleSampleReadStream thisSampleStream = new ArtificialSingleSampleReadStream(header,
-                                                                                                         readGroupID,
-                                                                                                         thisSampleNumContigs,
-                                                                                                         thisSampleStacksPerContig,
-                                                                                                         streamStackDepth.minReadsPerStack,
-                                                                                                         streamStackDepth.maxReadsPerStack,
-                                                                                                         streamDensity.minDistanceBetweenStacks,
-                                                                                                         streamDensity.maxDistanceBetweenStacks,
-                                                                                                         MIN_READ_LENGTH,
-                                                                                                         MAX_READ_LENGTH,
-                                                                                                         thisSampleNumUnmappedReads);
-                perSampleArtificialReadStreams.put(sampleName, thisSampleStream);
-                perSampleStreamAnalyzers.put(sampleName, new PositionallyDownsampledArtificialSingleSampleReadStreamAnalyzer(thisSampleStream, targetCoverage));
-            }
-
-            mergedReadStream = new ArtificialMultiSampleReadStream(perSampleArtificialReadStreams.values());
-        }
-
-        public void run() {
-            GATKSAMIterator downsamplingIter = new PerSampleDownsamplingReadsIterator(mergedReadStream.getGATKSAMIterator(), downsamplerFactory);
-
-            if ( verifySortedness ) {
-                downsamplingIter = new VerifyingSamIterator(downsamplingIter);
-            }
-
-            while ( downsamplingIter.hasNext() ) {
-                SAMRecord read = downsamplingIter.next();
-                String sampleName = read.getReadGroup() != null ? read.getReadGroup().getSample() : null;
-
-                ArtificialSingleSampleReadStreamAnalyzer analyzer = perSampleStreamAnalyzers.get(sampleName);
-                if ( analyzer != null ) {
-                    analyzer.update(read);
-                }
-                else {
-                    throw new ReviewedGATKException("bug: stream analyzer for sample " + sampleName + " not found");
-                }
-            }
-
-            for ( Map.Entry<String, ArtificialSingleSampleReadStreamAnalyzer> analyzerEntry : perSampleStreamAnalyzers.entrySet() ) {
-                ArtificialSingleSampleReadStreamAnalyzer analyzer = analyzerEntry.getValue();
-                analyzer.finalizeStats();
-
-                // Validate the downsampled read stream for each sample individually
-                analyzer.validate();
-            }
-
-            // Allow memory used by this test to be reclaimed:
-            mergedReadStream = null;
-            perSampleArtificialReadStreams = null;
-            perSampleStreamAnalyzers = null;
-        }
-    }
-
-    @DataProvider(name = "PerSampleDownsamplingReadsIteratorTestDataProvider")
-    public Object[][] createPerSampleDownsamplingReadsIteratorTests() {
-
-        GenomeAnalysisEngine.resetRandomGenerator();
-
-        // Some values don't vary across tests
-        int targetCoverage = PerSampleDownsamplingReadsIteratorTest.StreamStackDepth.UNIFORM_MEDIUM.minReadsPerStack;
-        ReadsDownsamplerFactory<SAMRecord> downsamplerFactory = new SimplePositionalDownsamplerFactory<SAMRecord>(targetCoverage);
-        int maxContigs = 3;
-        boolean verifySortedness = true;
-
-        for ( int numSamples : Arrays.asList(1, 2, 10) ) {
-            for ( int minContigs = 1; minContigs <= maxContigs; minContigs++ ) {
-                for ( PerSampleDownsamplingReadsIteratorTest.StreamDensity streamDensity : PerSampleDownsamplingReadsIteratorTest.StreamDensity.values() ) {
-                    for ( PerSampleDownsamplingReadsIteratorTest.StreamStackDepth streamStackDepth : PerSampleDownsamplingReadsIteratorTest.StreamStackDepth.values() ) {
-                        for (PerSampleDownsamplingReadsIteratorTest.StreamStacksPerContig streamStacksPerContig : PerSampleDownsamplingReadsIteratorTest.StreamStacksPerContig.values() ) {
-                            for ( double unmappedReadsFraction : Arrays.asList(0.0, 1.0, 0.5) ) {
-                                for ( int unmappedReadsCount : Arrays.asList(1, 50) ) {
-                                    new PerSampleDownsamplingReadsIteratorTest(downsamplerFactory,
-                                                                               targetCoverage,
-                                                                               numSamples,
-                                                                               minContigs,
-                                                                               maxContigs,
-                                                                               streamDensity,
-                                                                               streamStackDepth,
-                                                                               streamStacksPerContig,
-                                                                               unmappedReadsFraction,
-                                                                               unmappedReadsCount,
-                                                                               verifySortedness);
-                                }
-                            }
-                        }
-                    }
-                }
-            }
-        }
-
-        return PerSampleDownsamplingReadsIteratorTest.getTests(PerSampleDownsamplingReadsIteratorTest.class);
-    }
-
-    @Test(dataProvider = "PerSampleDownsamplingReadsIteratorTestDataProvider")
-    public void runPerSampleDownsamplingReadsIteratorTest( PerSampleDownsamplingReadsIteratorTest test ) {
-        logger.warn("Running test: " + test);
-
-        GenomeAnalysisEngine.resetRandomGenerator();
-        test.run();
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/downsampling/PositionallyDownsampledArtificialSingleSampleReadStreamAnalyzer.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/downsampling/PositionallyDownsampledArtificialSingleSampleReadStreamAnalyzer.java
deleted file mode 100644
index b8a57e7..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/downsampling/PositionallyDownsampledArtificialSingleSampleReadStreamAnalyzer.java
+++ /dev/null
@@ -1,127 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.downsampling;
-
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.sam.ArtificialSingleSampleReadStream;
-import org.broadinstitute.gatk.utils.sam.ArtificialSingleSampleReadStreamAnalyzer;
-
-/**
- * Class for analyzing an artificial read stream that has been positionally downsampled, and verifying
- * that the downsampling was done correctly without changing the stream in unexpected ways.
- *
- * @author David Roazen
- */
-public class PositionallyDownsampledArtificialSingleSampleReadStreamAnalyzer extends ArtificialSingleSampleReadStreamAnalyzer {
-    private int targetCoverage;
-
-    public PositionallyDownsampledArtificialSingleSampleReadStreamAnalyzer( ArtificialSingleSampleReadStream originalStream, int targetCoverage ) {
-        super(originalStream);
-        this.targetCoverage = targetCoverage;
-    }
-
-    /**
-     * Overridden validate() method that checks for the effects of positional downsampling in addition to checking
-     * for whether the original properties of the stream not affected by downsampling have been preserved
-     */
-    @Override
-    public void validate() {
-        if ( (originalStream.getNumContigs() == 0 || originalStream.getNumStacksPerContig() == 0) && originalStream.getNumUnmappedReads() == 0 ) {
-            if ( totalReads != 0 ) {
-                throw new ReviewedGATKException("got reads from the stream, but the stream was configured to have 0 reads");
-            }
-            return;  // no further validation needed for the 0-reads case
-        }
-        else if ( totalReads == 0 ) {
-            throw new ReviewedGATKException("got no reads from the stream, but the stream was configured to have > 0 reads");
-        }
-
-        if ( ! allSamplesMatch ) {
-            throw new ReviewedGATKException("some reads had the wrong sample");
-        }
-
-        if ( numContigs != originalStream.getNumContigs() ) {
-            throw new ReviewedGATKException("number of contigs not correct");
-        }
-
-        if ( stacksPerContig.size() != originalStream.getNumContigs() ) {
-            throw new ReviewedGATKException(String.format("bug in analyzer code: calculated sizes for %d contigs even though there were only %d contigs",
-                                                           stacksPerContig.size(), originalStream.getNumContigs()));
-        }
-
-        for ( int contigStackCount : stacksPerContig ) {
-            if ( contigStackCount != originalStream.getNumStacksPerContig() ) {
-                throw new ReviewedGATKException("contig had incorrect number of stacks");
-            }
-        }
-
-        if ( originalStream.getNumStacksPerContig() > 0 ) {
-
-            // Check for the effects of positional downsampling:
-            int stackMinimumAfterDownsampling = Math.min(targetCoverage, originalStream.getMinReadsPerStack());
-            int stackMaximumAfterDownsampling = targetCoverage;
-
-            if ( minReadsPerStack < stackMinimumAfterDownsampling ) {
-                throw new ReviewedGATKException("stack had fewer than the minimum number of reads after downsampling");
-            }
-            if ( maxReadsPerStack > stackMaximumAfterDownsampling ) {
-                throw new ReviewedGATKException("stack had more than the maximum number of reads after downsampling");
-            }
-        }
-        else if ( minReadsPerStack != null || maxReadsPerStack != null ) {
-            throw new ReviewedGATKException("bug in analyzer code: reads per stack was calculated even though 0 stacks per contig was specified");
-        }
-
-        if ( originalStream.getNumStacksPerContig() > 1 ) {
-            if ( minDistanceBetweenStacks < originalStream.getMinDistanceBetweenStacks() ) {
-                throw new ReviewedGATKException("stacks were separated by less than the minimum distance");
-            }
-            if ( maxDistanceBetweenStacks > originalStream.getMaxDistanceBetweenStacks() ) {
-                throw new ReviewedGATKException("stacks were separated by more than the maximum distance");
-            }
-        }
-        else if ( minDistanceBetweenStacks != null || maxDistanceBetweenStacks != null ) {
-            throw new ReviewedGATKException("bug in analyzer code: distance between stacks was calculated even though numStacksPerContig was <= 1");
-        }
-
-        if ( minReadLength < originalStream.getMinReadLength() ) {
-            throw new ReviewedGATKException("read was shorter than the minimum allowed length");
-        }
-        if ( maxReadLength > originalStream.getMaxReadLength() ) {
-            throw new ReviewedGATKException("read was longer than the maximum allowed length");
-        }
-
-        if ( numUnmappedReads != originalStream.getNumUnmappedReads() ) {
-            throw new ReviewedGATKException(String.format("wrong number of unmapped reads: requested %d but saw %d",
-                                                           originalStream.getNumUnmappedReads(), numUnmappedReads));
-        }
-
-        if ( (originalStream.getNumContigs() == 0 || originalStream.getNumStacksPerContig() == 0) &&
-             numUnmappedReads != totalReads ) {
-            throw new ReviewedGATKException("stream should have consisted only of unmapped reads, but saw some mapped reads");
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/downsampling/ReservoirDownsamplerUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/downsampling/ReservoirDownsamplerUnitTest.java
deleted file mode 100644
index 4e6f157..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/downsampling/ReservoirDownsamplerUnitTest.java
+++ /dev/null
@@ -1,131 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.downsampling;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-import org.testng.Assert;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-
-public class ReservoirDownsamplerUnitTest extends BaseTest {
-
-    private static class ReservoirDownsamplerTest extends TestDataProvider {
-        int reservoirSize;
-        int totalReads;
-        int expectedNumReadsAfterDownsampling;
-        int expectedNumDiscardedItems;
-
-        public ReservoirDownsamplerTest( int reservoirSize, int totalReads ) {
-            super(ReservoirDownsamplerTest.class);
-
-            this.reservoirSize = reservoirSize;
-            this.totalReads = totalReads;
-
-            expectedNumReadsAfterDownsampling = Math.min(reservoirSize, totalReads);
-            expectedNumDiscardedItems = totalReads <= reservoirSize ? 0 : totalReads - reservoirSize;
-
-            setName(String.format("%s: reservoirSize=%d totalReads=%d expectedNumReadsAfterDownsampling=%d expectedNumDiscardedItems=%d",
-                    getClass().getSimpleName(), reservoirSize, totalReads, expectedNumReadsAfterDownsampling, expectedNumDiscardedItems));
-        }
-
-        public Collection<SAMRecord> createReads() {
-            Collection<SAMRecord> reads = new ArrayList<SAMRecord>(totalReads);
-
-            SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000);
-            reads.addAll(ArtificialSAMUtils.createStackOfIdenticalArtificialReads(totalReads, header, "foo", 0, 1, 100));
-
-            return reads;
-        }
-    }
-
-    @DataProvider(name = "ReservoirDownsamplerTestDataProvider")
-    public Object[][] createReservoirDownsamplerTestData() {
-        for ( int reservoirSize = 1; reservoirSize <= 10000; reservoirSize *= 10 ) {
-            new ReservoirDownsamplerTest(reservoirSize, 0);
-            for ( int totalReads = 1; totalReads <= 10000; totalReads *= 10 ) {
-                new ReservoirDownsamplerTest(reservoirSize, totalReads);
-            }
-        }
-
-        return ReservoirDownsamplerTest.getTests(ReservoirDownsamplerTest.class);
-    }
-
-    @Test(dataProvider = "ReservoirDownsamplerTestDataProvider")
-    public void testReservoirDownsampler( ReservoirDownsamplerTest test ) {
-        logger.warn("Running test: " + test);
-
-        GenomeAnalysisEngine.resetRandomGenerator();
-
-        ReadsDownsampler<SAMRecord> downsampler = new ReservoirDownsampler<SAMRecord>(test.reservoirSize);
-
-        downsampler.submit(test.createReads());
-
-        if ( test.totalReads > 0 ) {
-            Assert.assertTrue(downsampler.hasFinalizedItems());
-            Assert.assertTrue(downsampler.peekFinalized() != null);
-            Assert.assertFalse(downsampler.hasPendingItems());
-            Assert.assertTrue(downsampler.peekPending() == null);
-        }
-        else {
-            Assert.assertFalse(downsampler.hasFinalizedItems() || downsampler.hasPendingItems());
-            Assert.assertTrue(downsampler.peekFinalized() == null && downsampler.peekPending() == null);
-        }
-
-        downsampler.signalEndOfInput();
-
-        if ( test.totalReads > 0 ) {
-            Assert.assertTrue(downsampler.hasFinalizedItems());
-            Assert.assertTrue(downsampler.peekFinalized() != null);
-            Assert.assertFalse(downsampler.hasPendingItems());
-            Assert.assertTrue(downsampler.peekPending() == null);
-        }
-        else {
-            Assert.assertFalse(downsampler.hasFinalizedItems() || downsampler.hasPendingItems());
-            Assert.assertTrue(downsampler.peekFinalized() == null && downsampler.peekPending() == null);
-        }
-
-        Assert.assertEquals(downsampler.size(), test.expectedNumReadsAfterDownsampling);
-        List<SAMRecord> downsampledReads = downsampler.consumeFinalizedItems();
-        Assert.assertFalse(downsampler.hasFinalizedItems() || downsampler.hasPendingItems());
-        Assert.assertTrue(downsampler.peekFinalized() == null && downsampler.peekPending() == null);
-
-        Assert.assertEquals(downsampledReads.size(), test.expectedNumReadsAfterDownsampling);
-
-        Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), test.expectedNumDiscardedItems);
-        Assert.assertEquals(test.totalReads - downsampledReads.size(), test.expectedNumDiscardedItems);
-
-        downsampler.resetStats();
-        Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 0);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/downsampling/SimplePositionalDownsamplerUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/downsampling/SimplePositionalDownsamplerUnitTest.java
deleted file mode 100644
index e04c347..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/downsampling/SimplePositionalDownsamplerUnitTest.java
+++ /dev/null
@@ -1,331 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.downsampling;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-import org.testng.Assert;
-
-import java.util.*;
-
-public class SimplePositionalDownsamplerUnitTest extends BaseTest {
-
-    private static class SimplePositionalDownsamplerTest extends TestDataProvider {
-        int targetCoverage;
-        int numStacks;
-        List<Integer> stackSizes;
-        List<Integer> expectedStackSizes;
-        boolean multipleContigs;
-        int totalInitialReads;
-
-        public SimplePositionalDownsamplerTest( int targetCoverage, List<Integer> stackSizes, boolean multipleContigs ) {
-            super(SimplePositionalDownsamplerTest.class);
-
-            this.targetCoverage = targetCoverage;
-            this.numStacks = stackSizes.size();
-            this.stackSizes = stackSizes;
-            this.multipleContigs = multipleContigs;
-
-            calculateExpectedDownsampledStackSizes();
-
-            totalInitialReads = 0;
-            for ( Integer stackSize : stackSizes ) {
-                totalInitialReads += stackSize;
-            }
-
-            setName(String.format("%s: targetCoverage=%d numStacks=%d stackSizes=%s expectedSizes=%s multipleContigs=%b",
-                    getClass().getSimpleName(), targetCoverage, numStacks, stackSizes, expectedStackSizes, multipleContigs));
-        }
-
-        public Collection<SAMRecord> createReads() {
-            Collection<SAMRecord> reads = new ArrayList<SAMRecord>();
-            SAMFileHeader header = multipleContigs ?
-                                   ArtificialSAMUtils.createArtificialSamHeader(2, 1, 1000000) :
-                                   ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000);
-
-            int refIndex = 0;
-            int alignmentStart = 1;
-            int readLength = 100;
-
-            for ( int i = 0; i < numStacks; i++ ) {
-                if ( multipleContigs && refIndex == 0 && i >= numStacks / 2 ) {
-                    refIndex++;
-                }
-
-                reads.addAll(ArtificialSAMUtils.createStackOfIdenticalArtificialReads(stackSizes.get(i), header, "foo",
-                                                                                      refIndex, alignmentStart, readLength));
-
-                alignmentStart += 10;
-            }
-
-            return reads;
-        }
-
-        private void calculateExpectedDownsampledStackSizes() {
-            expectedStackSizes = new ArrayList<Integer>(numStacks);
-
-            for ( Integer stackSize : stackSizes ) {
-                int expectedSize = targetCoverage >= stackSize ? stackSize : targetCoverage;
-                expectedStackSizes.add(expectedSize);
-            }
-        }
-    }
-
-    @DataProvider(name = "SimplePositionalDownsamplerTestDataProvider")
-    public Object[][] createSimplePositionalDownsamplerTestData() {
-        GenomeAnalysisEngine.resetRandomGenerator();
-
-        for ( int targetCoverage = 1; targetCoverage <= 10000; targetCoverage *= 10 ) {
-            for ( int contigs = 1; contigs <= 2; contigs++ ) {
-                for ( int numStacks = 0; numStacks <= 10; numStacks++ ) {
-                    List<Integer> stackSizes = new ArrayList<Integer>(numStacks);
-                    for ( int stack = 1; stack <= numStacks; stack++ ) {
-                        stackSizes.add(GenomeAnalysisEngine.getRandomGenerator().nextInt(targetCoverage * 2) + 1);
-                    }
-                    new SimplePositionalDownsamplerTest(targetCoverage, stackSizes, contigs > 1);
-                }
-            }
-        }
-
-        return SimplePositionalDownsamplerTest.getTests(SimplePositionalDownsamplerTest.class);
-    }
-
-    @Test( dataProvider = "SimplePositionalDownsamplerTestDataProvider" )
-    public void testSimplePostionalDownsampler( SimplePositionalDownsamplerTest test ) {
-        logger.warn("Running test: " + test);
-
-        GenomeAnalysisEngine.resetRandomGenerator();
-
-        ReadsDownsampler<SAMRecord> downsampler = new SimplePositionalDownsampler<SAMRecord>(test.targetCoverage);
-
-        downsampler.submit(test.createReads());
-
-        if ( test.numStacks > 1 ) {
-            Assert.assertTrue(downsampler.hasFinalizedItems());
-            Assert.assertTrue(downsampler.peekFinalized() != null);
-            Assert.assertTrue(downsampler.hasPendingItems());
-            Assert.assertTrue(downsampler.peekPending() != null);
-        }
-        else if ( test.numStacks == 1 ) {
-            Assert.assertFalse(downsampler.hasFinalizedItems());
-            Assert.assertTrue(downsampler.peekFinalized() == null);
-            Assert.assertTrue(downsampler.hasPendingItems());
-            Assert.assertTrue(downsampler.peekPending() != null);
-        }
-        else {
-            Assert.assertFalse(downsampler.hasFinalizedItems() || downsampler.hasPendingItems());
-            Assert.assertTrue(downsampler.peekFinalized() == null && downsampler.peekPending() == null);
-        }
-
-        downsampler.signalEndOfInput();
-
-        if ( test.numStacks > 0 ) {
-            Assert.assertTrue(downsampler.hasFinalizedItems());
-            Assert.assertTrue(downsampler.peekFinalized() != null);
-            Assert.assertFalse(downsampler.hasPendingItems());
-            Assert.assertTrue(downsampler.peekPending() == null);
-        }
-        else {
-            Assert.assertFalse(downsampler.hasFinalizedItems() || downsampler.hasPendingItems());
-            Assert.assertTrue(downsampler.peekFinalized() == null && downsampler.peekPending() == null);
-        }
-
-        List<SAMRecord> downsampledReads = downsampler.consumeFinalizedItems();
-        Assert.assertFalse(downsampler.hasFinalizedItems() || downsampler.hasPendingItems());
-        Assert.assertTrue(downsampler.peekFinalized() == null && downsampler.peekPending() == null);
-
-        if ( test.numStacks == 0 ) {
-            Assert.assertTrue(downsampledReads.isEmpty());
-        }
-        else {
-            List<Integer> downsampledStackSizes = getDownsampledStackSizesAndVerifySortedness(downsampledReads);
-
-            Assert.assertEquals(downsampledStackSizes.size(), test.numStacks);
-            Assert.assertEquals(downsampledStackSizes, test.expectedStackSizes);
-
-            int numReadsActuallyEliminated = test.totalInitialReads - downsampledReads.size();
-            int numReadsReportedEliminated = downsampler.getNumberOfDiscardedItems();
-            Assert.assertEquals(numReadsActuallyEliminated, numReadsReportedEliminated);
-        }
-
-        downsampler.resetStats();
-        Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 0);
-    }
-
-    private List<Integer> getDownsampledStackSizesAndVerifySortedness( List<SAMRecord> downsampledReads ) {
-        List<Integer> stackSizes = new ArrayList<Integer>();
-
-        if ( downsampledReads.isEmpty() ) {
-            return stackSizes;
-        }
-
-        Iterator<SAMRecord> iter = downsampledReads.iterator();
-        Assert.assertTrue(iter.hasNext());
-
-        SAMRecord previousRead = iter.next();
-        int currentStackSize = 1;
-
-        while ( iter.hasNext() ) {
-            SAMRecord currentRead = iter.next();
-
-            if ( currentRead.getReferenceIndex() > previousRead.getReferenceIndex() || currentRead.getAlignmentStart() > previousRead.getAlignmentStart() ) {
-                stackSizes.add(currentStackSize);
-                currentStackSize = 1;
-            }
-            else if ( currentRead.getReferenceIndex() < previousRead.getReferenceIndex() || currentRead.getAlignmentStart() < previousRead.getAlignmentStart() ) {
-                Assert.fail(String.format("Reads are out of order: %s %s", previousRead, currentRead));
-            }
-            else {
-                currentStackSize++;
-            }
-
-            previousRead = currentRead;
-        }
-
-        stackSizes.add(currentStackSize);
-        return stackSizes;
-    }
-
-    @Test
-    public void testSimplePositionalDownsamplerSignalNoMoreReadsBefore() {
-        ReadsDownsampler<SAMRecord> downsampler = new SimplePositionalDownsampler<SAMRecord>(1000);
-
-        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000);
-
-        Collection<SAMRecord> readStack = new ArrayList<SAMRecord>();
-        readStack.addAll(ArtificialSAMUtils.createStackOfIdenticalArtificialReads(50, header, "foo", 0, 1, 100));
-        downsampler.submit(readStack);
-
-        Assert.assertFalse(downsampler.hasFinalizedItems());
-        Assert.assertTrue(downsampler.peekFinalized() == null);
-        Assert.assertTrue(downsampler.hasPendingItems());
-        Assert.assertTrue(downsampler.peekPending() != null);
-
-        SAMRecord laterRead = ArtificialSAMUtils.createArtificialRead(header, "foo", 0, 2, 100);
-        downsampler.signalNoMoreReadsBefore(laterRead);
-
-        Assert.assertTrue(downsampler.hasFinalizedItems());
-        Assert.assertTrue(downsampler.peekFinalized() != null);
-        Assert.assertFalse(downsampler.hasPendingItems());
-        Assert.assertTrue(downsampler.peekPending() == null);
-
-        List<SAMRecord> downsampledReads = downsampler.consumeFinalizedItems();
-
-        Assert.assertEquals(downsampledReads.size(), readStack.size());
-    }
-
-    @Test
-    public void testBasicUnmappedReadsSupport() {
-        ReadsDownsampler<SAMRecord> downsampler = new SimplePositionalDownsampler<SAMRecord>(100);
-
-        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000);
-
-        Collection<SAMRecord> readStack = new ArrayList<SAMRecord>();
-        readStack.addAll(ArtificialSAMUtils.createStackOfIdenticalArtificialReads(200, header, "foo", SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX,
-                                                                                  SAMRecord.NO_ALIGNMENT_START, 100));
-        for ( SAMRecord read : readStack ) {
-            Assert.assertTrue(read.getReadUnmappedFlag());
-        }
-
-        downsampler.submit(readStack);
-        downsampler.signalEndOfInput();
-
-        List<SAMRecord> downsampledReads = downsampler.consumeFinalizedItems();
-
-        // Unmapped reads should not get downsampled at all by the SimplePositionalDownsampler
-        Assert.assertEquals(downsampledReads.size(), readStack.size());
-
-        for ( SAMRecord read: downsampledReads ) {
-            Assert.assertTrue(read.getReadUnmappedFlag());
-        }
-    }
-
-    @Test
-    public void testMixedMappedAndUnmappedReadsSupport() {
-        ReadsDownsampler<SAMRecord> downsampler = new SimplePositionalDownsampler<SAMRecord>(100);
-
-        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000);
-
-        Collection<SAMRecord> mappedReadStack = new ArrayList<SAMRecord>();
-        mappedReadStack.addAll(ArtificialSAMUtils.createStackOfIdenticalArtificialReads(200, header, "foo", 0, 1, 100));
-        for ( SAMRecord read : mappedReadStack ) {
-            Assert.assertFalse(read.getReadUnmappedFlag());
-        }
-
-        Collection<SAMRecord> unmappedReadStack = new ArrayList<SAMRecord>();
-        unmappedReadStack.addAll(ArtificialSAMUtils.createStackOfIdenticalArtificialReads(200, header, "foo", SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX,
-                                                                                          SAMRecord.NO_ALIGNMENT_START, 100));
-        for ( SAMRecord read : unmappedReadStack ) {
-            Assert.assertTrue(read.getReadUnmappedFlag());
-        }
-
-        downsampler.submit(mappedReadStack);
-        downsampler.submit(unmappedReadStack);
-        downsampler.signalEndOfInput();
-
-        List<SAMRecord> downsampledReads = downsampler.consumeFinalizedItems();
-
-        // Unmapped reads should not get downsampled at all by the SimplePositionalDownsampler
-        Assert.assertEquals(downsampledReads.size(), 300);
-        Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 100);
-
-        int count = 1;
-        for ( SAMRecord read: downsampledReads ) {
-            if ( count <= 100 ) {
-                Assert.assertFalse(read.getReadUnmappedFlag());
-            }
-            else {
-                Assert.assertTrue(read.getReadUnmappedFlag());
-            }
-
-            count++;
-        }
-    }
-
-    @Test
-    public void testGATKSAMRecordSupport() {
-        ReadsDownsampler<GATKSAMRecord> downsampler = new SimplePositionalDownsampler<GATKSAMRecord>(1000);
-
-        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000);
-
-        List<GATKSAMRecord> reads = new ArrayList<GATKSAMRecord>();
-        for ( int i = 0; i < 10; i++ ) {
-            reads.add(ArtificialSAMUtils.createArtificialRead(header, "foo", 0, 10, 20 * i + 10));
-        }
-
-        downsampler.submit(reads);
-        downsampler.signalEndOfInput();
-        List<GATKSAMRecord> downsampledReads = downsampler.consumeFinalizedItems();
-
-        Assert.assertEquals(downsampledReads.size(), 10);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/executive/ReduceTreeUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/executive/ReduceTreeUnitTest.java
deleted file mode 100644
index 50f21a6..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/executive/ReduceTreeUnitTest.java
+++ /dev/null
@@ -1,254 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.executive;
-
-
-import org.testng.Assert;
-import org.testng.annotations.AfterMethod;
-import org.testng.annotations.Test;
-import org.testng.annotations.BeforeMethod;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-
-import java.util.concurrent.Callable;
-import java.util.concurrent.Future;
-import java.util.concurrent.FutureTask;
-import java.util.concurrent.ExecutionException;
-import java.util.List;
-import java.util.ArrayList;
-/**
- * User: hanna
- * Date: Apr 29, 2009
- * Time: 10:40:49 AM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * Make sure the reduce tree organizes reduces in the correct way.
- */
-
-public class ReduceTreeUnitTest extends BaseTest implements ReduceTree.TreeReduceNotifier {
-
-    /**
-     * The tree indicating reduce order.
-     */
-    private ReduceTree reduceTree = null;
-
-    /**
-     * 
-     */
-    private List<List<Integer>> reduces = new ArrayList<List<Integer>>();
-
-    @BeforeMethod
-    public void createTree() {
-        reduceTree = new ReduceTree( this );
-    }
-
-    @AfterMethod
-    public void destroyTree() {
-        reduceTree = null;
-        reduces.clear();
-    }
-
-    @Test
-    public void testNoValueReduce()
-        throws InterruptedException, ExecutionException {
-        reduceTree.complete();
-        Assert.assertEquals(reduceTree.getResult(), null, "Single-value reduce failed");
-    }
-
-    @Test
-    public void testSingleValueReduce()
-            throws InterruptedException, ExecutionException {
-        reduceTree.addEntry( getReduceTestEntry(1) );
-        reduceTree.complete();
-        Assert.assertEquals(reduceTree.getResult().get(), 1, "Single-value reduce failed");
-    }
-
-    @Test(expectedExceptions=IllegalStateException.class)
-    public void testIncompleteReduce()
-            throws InterruptedException, ExecutionException {
-        reduceTree.addEntry( getReduceTestEntry(1) );
-        reduceTree.getResult().get();
-    }
-
-    @Test
-    public void testDualValueReduce()
-        throws InterruptedException, ExecutionException {
-        reduceTree.addEntry( getReduceTestEntry(1) );
-        reduceTree.addEntry( getReduceTestEntry(2) );
-        reduceTree.complete();
-
-        List<Integer> expected = new ArrayList<Integer>();
-        expected.add( 1 );
-        expected.add( 2 );
-
-        // Test the result
-        Assert.assertEquals(reduceTree.getResult().get(), expected, "Dual-value reduce failed");
-
-        // Test the intermediate steps
-        Assert.assertEquals(reduces.size(), 1, "Size of incoming tree reduces incorrect");
-        Assert.assertEquals(reduces.get(0), expected, "Incoming tree reduce incorrect");
-    }
-
-    @Test
-    public void testThreeValueReduce()
-        throws InterruptedException, ExecutionException {
-        List<Integer> firstExpected = new ArrayList<Integer>();
-        firstExpected.add(1);
-        firstExpected.add(2);
-
-        List<Integer> finalExpected = new ArrayList<Integer>();
-        finalExpected.addAll( firstExpected );
-        finalExpected.add(3);
-
-        reduceTree.addEntry( getReduceTestEntry(1) );
-
-        Assert.assertEquals(reduces.size(), 0, "Reduce queue should be empty after entering a single element");
-
-        reduceTree.addEntry( getReduceTestEntry(2) );
-
-        Assert.assertEquals(reduces.size(), 1, "Reduce queue should have one element after two entries");
-        Assert.assertEquals(reduces.get(0), firstExpected, "Reduce queue element is incorrect after two entries");
-
-        reduceTree.addEntry( getReduceTestEntry(3) );
-
-        Assert.assertEquals(reduces.size(), 1, "Reduce queue should have one element after three entries");
-        Assert.assertEquals(reduces.get(0), firstExpected, "Reduce queue element is incorrect after three entries");
-
-        reduceTree.complete();
-
-        // Test the result
-        Assert.assertEquals(reduceTree.getResult().get(), finalExpected, "Three value reduce failed");
-
-        Assert.assertEquals(reduces.size(), 2, "Reduce queue should have two elements after three entries (complete)");
-        Assert.assertEquals(reduces.get(0), firstExpected, "Reduce queue element is incorrect after three entries");
-        Assert.assertEquals(reduces.get(1), finalExpected, "Reduce queue element is incorrect after three entries");
-    }
-
-    @Test
-    public void testFourValueReduce()
-        throws InterruptedException, ExecutionException {
-        List<Integer> lhsExpected = new ArrayList<Integer>();
-        lhsExpected.add(1);
-        lhsExpected.add(2);
-
-        List<Integer> rhsExpected = new ArrayList<Integer>();
-        rhsExpected.add(3);
-        rhsExpected.add(4);
-
-        List<Integer> finalExpected = new ArrayList<Integer>();
-        finalExpected.addAll(lhsExpected);
-        finalExpected.addAll(rhsExpected);
-
-        reduceTree.addEntry( getReduceTestEntry(1) );
-
-        Assert.assertEquals(reduces.size(), 0, "Reduce queue should be empty after entering a single element");
-
-        reduceTree.addEntry( getReduceTestEntry(2) );
-
-        Assert.assertEquals(reduces.size(), 1, "Reduce queue should have one element after two entries");
-        Assert.assertEquals(reduces.get(0), lhsExpected, "Reduce queue element is incorrect after two entries");
-
-        reduceTree.addEntry( getReduceTestEntry(3) );
-
-        Assert.assertEquals(reduces.size(), 1, "Reduce queue should have one element after three entries");
-        Assert.assertEquals(reduces.get(0), lhsExpected, "Reduce queue element is incorrect after three entries");
-
-        reduceTree.addEntry( getReduceTestEntry(4) );
-
-        Assert.assertEquals(reduces.size(), 3, "Reduce queue should have three elements after four entries");
-        Assert.assertEquals(reduces.get(0), lhsExpected, "Reduce queue element 0 is incorrect after three entries");
-        Assert.assertEquals(reduces.get(1), rhsExpected, "Reduce queue element 1 is incorrect after three entries");
-        Assert.assertEquals(reduces.get(2), finalExpected, "Reduce queue element 2 is incorrect after three entries");
-
-        reduceTree.complete();
-
-                // Test the result
-        Assert.assertEquals(reduceTree.getResult().get(), finalExpected, "Four-valued reduce failed");
-
-        // Test the working tree
-        Assert.assertEquals(reduces.size(), 3, "Didn't see correct number of reduces");
-        Assert.assertEquals(reduces.get(0), lhsExpected, "lhs of four value reduce failed");
-        Assert.assertEquals(reduces.get(1), rhsExpected, "rhs of four value reduce failed");
-        Assert.assertEquals(reduces.get(2), finalExpected, "final value four value reduce failed");
-    }
-
-
-    private Future getReduceTestEntry( Object value ) {
-        // Create a task and run it, assuring that the tests won't block on a get.
-        FutureTask task = new FutureTask( new ReduceTestEntry( value ) );
-        task.run();
-        return task;
-    }
-
-    public Future notifyReduce( Future lhs, Future rhs )  {
-        List<Integer> reduce = new ArrayList<Integer>();
-
-        try {
-            if( lhs == null && rhs == null )
-                throw new IllegalStateException("lhs and rhs are null");
-
-            if( lhs.get() instanceof List )
-                reduce.addAll((List)lhs.get());
-            else
-                reduce.add((Integer)lhs.get());
-
-            if( rhs != null ) {
-                if( rhs.get() instanceof List )
-                    reduce.addAll((List)rhs.get());
-                else
-                    reduce.add((Integer)rhs.get());
-            }
-        }
-        catch( Exception ex ) {
-            // just rethrow any exceptions
-            throw new RuntimeException(ex);
-        }
-
-        reduces.add( reduce );
-
-        return getReduceTestEntry( reduce );
-    }
-
-    private class ReduceTestEntry implements Callable {
-        private Object data;
-
-        public ReduceTestEntry( Object data ) {
-            this.data = data;
-        }
-
-        public Object call() {
-            return data;
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/filters/AllowNCigarMalformedReadFilterUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/filters/AllowNCigarMalformedReadFilterUnitTest.java
deleted file mode 100644
index 7a01220..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/filters/AllowNCigarMalformedReadFilterUnitTest.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.engine.arguments.ValidationExclusion;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.util.Collections;
-
-
-/**
- * Tests for the {@link MalformedReadFilter} when the unsafe flag
- * {@link ValidationExclusion.TYPE#ALLOW_N_CIGAR_READS} is set.
- *
- * @author Valentin Ruano-Rubio
- * @since 6/6/13
- */
-public class AllowNCigarMalformedReadFilterUnitTest extends MalformedReadFilterUnitTest {
-
-
-    @Override
-    protected ValidationExclusion composeValidationExclusion() {
-        return new ValidationExclusion(Collections.singletonList(ValidationExclusion.TYPE.ALLOW_N_CIGAR_READS));
-    }
-
-
-    @Test(enabled = true,
-            dataProvider= "UnsupportedCigarOperatorDataProvider")
-    @CigarOperatorTest(CigarOperatorTest.Outcome.IGNORE)
-    public void testCigarNOperatorFilterIgnore(final String cigarString) {
-
-        final MalformedReadFilter filter = buildMalformedReadFilter(false);
-        final SAMRecord nContainingCigarRead = buildSAMRecord(cigarString);
-        Assert.assertFalse(filter.filterOut(nContainingCigarRead),
-                "filters out N containing Cigar when it should ignore the fact");
-    }
-
-    @Test(enabled = false)
-    @Override
-    public void testCigarNOperatorFilterException(final String cigarString) {
-        // Nothing to do here.
-        // Just deactivates the parents test case.
-    }
-
-
-
-
-
-
-
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/filters/BadCigarFilterUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/filters/BadCigarFilterUnitTest.java
deleted file mode 100644
index bdb194c..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/filters/BadCigarFilterUnitTest.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.Cigar;
-import org.broadinstitute.gatk.utils.clipping.ReadClipperTestUtils;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-import java.util.List;
-
-/**
- * Checks that the Bad Cigar filter works for all kinds of wonky cigars
- *
- * @author Mauricio Carneiro
- * @since 3/20/12
- */
-public class BadCigarFilterUnitTest {
-
-    public static final String[] BAD_CIGAR_LIST = {
-            "2D4M",               // starting with multiple deletions
-            "4M2D",               // ending with multiple deletions
-            "3M1I1D",             // adjacent indels AND ends in deletion
-            "1M1I1D2M",           // adjacent indels I->D
-            "1M1D2I1M",           // adjacent indels D->I
-            "1M1I2M1D",           // ends in single deletion with insertion in the middle
-            "4M1D",               // ends in single deletion
-            "1D4M",               // starts with single deletion
-            "2M1D1D2M",           // adjacent D's
-            "1M1I1I1M",           // adjacent I's
-            "1H1D4M",             // starting with deletion after H
-            "1S1D3M",             // starting with deletion after S
-            "1H1S1D3M",           // starting with deletion after HS
-            "4M1D1H",             // ending with deletion before H
-            "3M1D1S",             // ending with deletion before S
-            "3M1D1S1H",           // ending with deletion before HS
-            "10M2H10M",           // H in the middle
-            "10M2S10M",           // S in the middle
-            "1H1S10M2S10M1S1H",    // deceiving S in the middle
-            "1H1S10M2H10M1S1H"    // deceiving H in the middle
-    };
-
-    BadCigarFilter filter;
-
-    @BeforeClass
-    public void init() {
-        filter = new BadCigarFilter();
-    }
-
-    @Test(enabled = true)
-    public void testWonkyCigars () {
-        for (String cigarString : BAD_CIGAR_LIST) {
-            GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigarString);
-            Assert.assertTrue(filter.filterOut(read), read.getCigarString());
-        }
-    }
-
-    @Test(enabled = true)
-    public void testGoodCigars() {
-        List<Cigar> cigarList = ReadClipperTestUtils.generateCigarList(10);
-        for (Cigar cigar : cigarList) {
-            GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar);
-            Assert.assertFalse(filter.filterOut(read), read.getCigarString());
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/filters/BadReadGroupsIntegrationTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/filters/BadReadGroupsIntegrationTest.java
deleted file mode 100644
index 3ff8ed4..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/filters/BadReadGroupsIntegrationTest.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import org.broadinstitute.gatk.engine.walkers.WalkerTest;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.testng.annotations.Test;
-
-
-public class BadReadGroupsIntegrationTest extends WalkerTest {
-
-    @Test
-    public void testMissingReadGroup() {
-        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
-                "-T PrintReads -R " + b36KGReference + " -I " + privateTestDir + "missingReadGroup.bam -o /dev/null",
-                0,
-                UserException.ReadMissingReadGroup.class);
-        executeTest("test Missing Read Group", spec);
-    }
-
-    @Test
-    public void testUndefinedReadGroup() {
-        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
-                "-T PrintReads -R " + b36KGReference + " -I " + privateTestDir + "undefinedReadGroup.bam -o /dev/null",
-                0,
-                UserException.ReadHasUndefinedReadGroup.class);
-        executeTest("test Undefined Read Group", spec);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/filters/MalformedReadFilterUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/filters/MalformedReadFilterUnitTest.java
deleted file mode 100644
index d25db50..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/filters/MalformedReadFilterUnitTest.java
+++ /dev/null
@@ -1,246 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-
-import htsjdk.samtools.Cigar;
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.TextCigarCodec;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.arguments.ValidationExclusion;
-import org.broadinstitute.gatk.engine.datasources.reads.SAMDataSource;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.exceptions.UserException.UnsupportedCigarOperatorException;
-
-import java.lang.annotation.*;
-import java.lang.reflect.Method;
-import java.util.*;
-
-
-/**
- * Tests for the MalformedReadFilter
- *
- * @author Eric Banks
- * @since 3/14/13
- */
-public class MalformedReadFilterUnitTest extends ReadFilterTest {
-
-    //////////////////////////////////////
-    // Test the checkSeqStored() method //
-    //////////////////////////////////////
-
-    @Test(enabled = true)
-    public void testCheckSeqStored () {
-
-        final GATKSAMRecord goodRead = ArtificialSAMUtils.createArtificialRead(new byte[]{(byte)'A'}, new byte[]{(byte)'A'}, "1M");
-        final GATKSAMRecord badRead = ArtificialSAMUtils.createArtificialRead(new byte[]{}, new byte[]{}, "1M");
-        badRead.setReadString("*");
-
-        Assert.assertTrue(MalformedReadFilter.checkSeqStored(goodRead, true));
-        Assert.assertFalse(MalformedReadFilter.checkSeqStored(badRead, true));
-
-        try {
-            MalformedReadFilter.checkSeqStored(badRead, false);
-            Assert.assertTrue(false, "We should have exceptioned out in the previous line");
-        } catch (UserException e) { }
-    }
-
-    @Test(enabled = true, dataProvider= "UnsupportedCigarOperatorDataProvider")
-    @CigarOperatorTest(CigarOperatorTest.Outcome.FILTER)
-    public void testCigarNOperatorFilterTruePositive(String cigarString) {
-
-       final MalformedReadFilter filter = buildMalformedReadFilter(true);
-       final SAMRecord nContainingCigarRead = buildSAMRecord(cigarString);
-       Assert.assertTrue(filter.filterOut(nContainingCigarRead),
-                  " Did not filtered out a N containing CIGAR read");
-    }
-
-    @Test(enabled = true, dataProvider= "UnsupportedCigarOperatorDataProvider")
-    @CigarOperatorTest(CigarOperatorTest.Outcome.ACCEPT)
-    public void testCigarNOperatorFilterTrueNegative(String cigarString) {
-
-        final MalformedReadFilter filter = buildMalformedReadFilter(true);
-        final SAMRecord nonNContainingCigarRead = buildSAMRecord(cigarString);
-        Assert.assertFalse(filter.filterOut(nonNContainingCigarRead),
-                    " Filtered out a non-N containing CIGAR read");
-    }
-
-    @Test(enabled = true,
-            expectedExceptions = UnsupportedCigarOperatorException.class,
-            dataProvider= "UnsupportedCigarOperatorDataProvider")
-    @CigarOperatorTest(CigarOperatorTest.Outcome.EXCEPTION)
-    public void testCigarNOperatorFilterException(final String cigarString) {
-
-        final MalformedReadFilter filter = buildMalformedReadFilter(false);
-        final SAMRecord nContainingCigarRead = buildSAMRecord(cigarString);
-
-        filter.filterOut(nContainingCigarRead);
-    }
-
-    @Test(enabled = true, dataProvider="UnsupportedCigarOperatorDataProvider")
-    @CigarOperatorTest(CigarOperatorTest.Outcome.ACCEPT)
-    public void testCigarNOperatorFilterControl(final String cigarString) {
-
-        final MalformedReadFilter filter = buildMalformedReadFilter(false);
-        final SAMRecord nonNContainingCigarRead = buildSAMRecord(cigarString);
-
-        Assert.assertFalse(filter.filterOut(nonNContainingCigarRead));
-    }
-
-    protected SAMRecord buildSAMRecord(final String cigarString) {
-        final Cigar nContainingCigar = TextCigarCodec.getSingleton().decode(cigarString);
-        return  this.createRead(nContainingCigar, 1, 0, 10);
-    }
-
-    protected MalformedReadFilter buildMalformedReadFilter(final boolean filterRNO) {
-        return buildMalformedReadFiter(filterRNO,new ValidationExclusion.TYPE[] {});
-    }
-
-    protected MalformedReadFilter buildMalformedReadFiter(boolean filterRNO, final ValidationExclusion.TYPE... excl) {
-        final ValidationExclusion ve = new ValidationExclusion(Arrays.asList(excl));
-
-        final MalformedReadFilter filter = new MalformedReadFilter();
-
-        final SAMFileHeader h = getHeader();
-        final SAMDataSource ds =  getDataSource();
-
-        final GenomeAnalysisEngine gae = new GenomeAnalysisEngine() {
-            @Override
-            public SAMFileHeader getSAMFileHeader() {
-                return h;
-            }
-
-            @Override
-            public SAMDataSource getReadsDataSource() {
-                return ds;
-            }
-        };
-        filter.initialize(gae);
-        filter.filterReadsWithNCigar = filterRNO;
-        return filter;
-    }
-
-    @Retention(RetentionPolicy.RUNTIME)
-    @Target(ElementType.METHOD)
-    @Inherited
-    protected @interface CigarOperatorTest {
-
-        enum Outcome {
-            ANY,ACCEPT,FILTER,EXCEPTION,IGNORE;
-
-            public boolean appliesTo (String cigar) {
-                boolean hasN = cigar.indexOf('N') != -1;
-                switch (this) {
-                    case ANY: return true;
-                    case ACCEPT: return !hasN;
-                    case IGNORE: return hasN;
-                    case FILTER:
-                    case EXCEPTION:
-                    default:
-                        return hasN;
-
-                }
-            }
-        }
-
-        Outcome value() default Outcome.ANY;
-    }
-
-    /**
-     * Cigar test data for unsupported operator test.
-     * Each element of this array corresponds to a test case. In turn the first element of the test case array is the
-     * Cigar string for that test case and the second indicates whether it should be filtered due to the presence of a
-     * unsupported operator
-     */
-    private static final String[] TEST_CIGARS =  {
-       "101M10D20I10M",
-       "6M14N5M",
-       "1N",
-       "101M",
-       "110N",
-       "2N4M",
-       "4M2N",
-       "3M1I1M",
-       "1M2I2M",
-       "1M10N1I1M",
-       "1M1I1D",
-       "11N12M1I34M12N"
-    };
-
-    @DataProvider(name= "UnsupportedCigarOperatorDataProvider")
-    public Iterator<Object[]> unsupportedOperatorDataProvider(final Method testMethod) {
-        final CigarOperatorTest a = resolveCigarOperatorTestAnnotation(testMethod);
-        final List<Object[]> result = new LinkedList<Object[]>();
-        for (final String cigarString : TEST_CIGARS) {
-            if (a == null || a.value().appliesTo(cigarString)) {
-                result.add(new Object[] { cigarString });
-            }
-        }
-        return result.iterator();
-    }
-
-    /**
-     * Gets the most specific {@link CigarOperatorTest} annotation for the
-     * signature of the test method provided.
-     * <p/>
-     * This in-house implementation is required due to the fact that method
-     * annotations do not have inheritance.
-     *
-     * @param m targeted test method.
-     * @return <code>null</code> if there is no {@link CigarOperatorTest}
-     * annotation in this or overridden methods.
-     */
-    private CigarOperatorTest resolveCigarOperatorTestAnnotation(final Method m) {
-       CigarOperatorTest res = m.getAnnotation(CigarOperatorTest.class);
-       if (res != null) {
-           return res;
-       }
-       Class<?> c = this.getClass();
-       Class<?> p = c.getSuperclass();
-       while (p != null && p != Object.class) {
-           try {
-             final Method met = p.getDeclaredMethod(m.getName(),
-                     m.getParameterTypes());
-             res = met.getAnnotation(CigarOperatorTest.class);
-             if (res != null) {
-                 break;
-             }
-           } catch (NoSuchMethodException e) {
-             // Its ok; nothing to do here, just keep looking.
-           }
-           c = p;
-           p = c.getSuperclass();
-       }
-       return res;
-    }
-
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/filters/NDNCigarReadTransformerUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/filters/NDNCigarReadTransformerUnitTest.java
deleted file mode 100644
index beb4123..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/filters/NDNCigarReadTransformerUnitTest.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.Cigar;
-import org.broadinstitute.gatk.utils.sam.CigarUtils;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-/**
- * @author ami
- * @since 04/22/14
- */
-public class NDNCigarReadTransformerUnitTest {
-
-
-    @DataProvider(name = "filteringIteratorTestData")
-    public String[][] getFilteringIteratorTestData() {
-        return new String[][] {
-                {"1M1N1N1M","1M1N1N1M"},           // NN elements
-                {"1M1N1D4M","1M1N1D4M"},           // ND
-                {"1M1N3M","1M1N3M"},               // N
-                {"1M1N2I1N3M","1M1N2I1N3M"},       // NIN
-                {"1M1N3D2N1M","1M6N1M"},
-                {"1M2N2D2N1M1D3N1D1N1M2H","1M6N1M1D5N1M2H"},
-                {"1H2S1M1N3D2N1M","1H2S1M6N1M"},
-                {"10M628N2D203N90M","10M833N90M"}
-        };
-    }
-
-    NDNCigarReadTransformer filter;
-
-    @BeforeClass
-    public void init() {
-        filter = new NDNCigarReadTransformer();
-    }
-
-    @Test(dataProvider = "filteringIteratorTestData")
-    public void testCigarRefactoring (final String originalCigarString, final String expectedString) {
-        Cigar originalCigar = CigarUtils.cigarFromString(originalCigarString);
-        String actualString = filter.refactorNDNtoN(originalCigar).toString();
-        Assert.assertEquals(actualString, expectedString, "ciagr string "+ originalCigarString+" should become: "+expectedString+" but got: "+actualString);
-    }
-
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/filters/ReadFilterTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/filters/ReadFilterTest.java
deleted file mode 100644
index 0f61de2..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/filters/ReadFilterTest.java
+++ /dev/null
@@ -1,370 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import htsjdk.samtools.*;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.engine.arguments.ValidationExclusion;
-import org.broadinstitute.gatk.engine.datasources.reads.SAMDataSource;
-import org.broadinstitute.gatk.engine.datasources.reads.SAMReaderID;
-import org.broadinstitute.gatk.engine.downsampling.DownsamplingMethod;
-import org.broadinstitute.gatk.engine.resourcemanagement.ThreadAllocation;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.testng.annotations.AfterClass;
-import org.testng.annotations.BeforeClass;
-
-import java.util.*;
-
-/**
- * Class ReadBaseTest
- * <p/>
- * This is the base test class for read filter test classes.  All read
- * filter test cases should extend from this
- * class; it sets ups a header mock up to test read filtering.
- *
- * Feel free to override non-final method to modify the behavior
- * (i.e. change how read group id are formatted, or complete a header).
- *
- * <p/>
- * You can statically determine the number of read-group involved
- * in the test by calling {@link #ReadFilterTest(int)} in you constructor.
- * <p/>
- *
- * Notice that the same header object is shared by all test and
- * it is initialized by Junit (calling {@link #beforeClass()}.
- *
- * @author Valentin Ruano Rubio
- * @date May 23, 2013
- */
-public class ReadFilterTest extends BaseTest {
-
-    private static final int DEFAULT_READ_GROUP_COUNT = 5;
-    private static final int DEFAULT_READER_COUNT = 1;
-    private static final String DEFAULT_READ_GROUP_PREFIX = "ReadGroup";
-    private static final String DEFAULT_PLATFORM_UNIT_PREFIX = "Lane";
-    private static final String DEFAULT_SAMPLE_NAME_PREFIX = "Sample";
-    private static final String DEFAULT_PLATFORM_PREFIX = "Platform";
-    private static final int DEFAULT_CHROMOSOME_COUNT = 1;
-    private static final int DEFAULT_CHROMOSOME_START_INDEX = 1;
-    private static final int DEFAULT_CHROMOSOME_SIZE = 1000;
-    private static final String DEFAULT_SAM_FILE_FORMAT = "readfile-%3d.bam";
-
-    private final int groupCount;
-
-    private  SAMFileHeader header;
-
-    private SAMDataSource dataSource;
-
-    /**
-     * Constructs a new read-filter test providing the number of read
-     * groups in the file.
-     *
-     * @param groupCount number of read-group in the fictional SAM file,
-     *                   must be equal or greater than 1.
-     */
-    protected ReadFilterTest(final int groupCount) {
-        if (groupCount < 1) {
-            throw new IllegalArgumentException(
-                    "the read group count must at least be 1");
-        }
-        this.groupCount = groupCount;
-    }
-
-
-    /**
-     * Gets the data source.
-     *
-     * @throws IllegalStateException if the data source was not initialized
-     *          invoking {@link #beforeClass()}
-     * @return never <code>null</code>
-     */
-    protected final SAMDataSource getDataSource() {
-        checkDataSourceExists();
-        return dataSource;
-    }
-
-    /**
-     * Returns the mock-up SAM file header for testing.
-     *
-     * @throws IllegalStateException if the header was not initialized
-     *          invoking {@link #beforeClass()}
-     * @return never <code>null</code>
-     */
-    protected final SAMFileHeader getHeader() {
-        checkHeaderExists();
-        return header;
-    }
-
-    /**
-     * Construct a read filter test with the default number of groups
-     *  ({@link #DEFAULT_READ_GROUP_COUNT}.
-     */
-    public ReadFilterTest() {
-        this(DEFAULT_READ_GROUP_COUNT);
-    }
-
-    /**
-     * Return the number of read groups involved in the test
-     * @return <code>1</code> or greater.
-     */
-    protected final int getReadGroupCount() {
-        return groupCount;
-    }
-
-    /**
-     * Composes the Id for the read group given its index.
-     *
-     * This methods must return a unique distinct ID for each possible index and
-     * it must be the same value each time it is invoked.
-     *
-     * @param index the index of the targeted read group in the range
-     *              [1,{@link #getReadGroupCount()}]
-     * @return never <code>null</code> and must be unique to each possible
-     *         read group index.
-     */
-    protected String composeReadGroupId(final int index) {
-        checkReadGroupIndex(index);
-        return DEFAULT_READ_GROUP_PREFIX + index;
-    }
-
-    /**
-     * Composes the Platform name for the read group given its index.
-     *
-     * This method must always return the same value give an index.
-     *
-     * @param index the index of the targeted read group in the range
-     *              [1,{@link #getReadGroupCount()}]
-     * @return never <code>null</code>.
-     */
-    protected String composePlatformName(final int index) {
-        checkReadGroupIndex(index);
-        return DEFAULT_PLATFORM_PREFIX + (((index-1)%2)+1);
-    }
-
-
-    /**
-     * Composes the Platform unit name for the read group given its index.
-     *
-     * @param index the index of the targeted read group in the range
-     *              [1,{@link #getReadGroupCount()}]
-     * @return never <code>null</code>.
-     */
-    protected String composePlatformUnitName(final int index) {
-        checkReadGroupIndex(index);
-        return DEFAULT_PLATFORM_UNIT_PREFIX + (((index-1)%3)+1);
-    }
-
-
-
-    /**
-     * Checks the correctness of a given read group index.
-     *
-     * A correct index is any value in the range [1,{@link #getReadGroupCount()}].
-     *
-     * @param index the target index.
-     * @throws IllegalArgumentException if the input index is not correct.
-     */
-    protected final void checkReadGroupIndex(final int index) {
-        checkIndex(index,groupCount,"read group");
-    }
-
-
-    private void checkIndex(final int index, final int max, CharSequence name) {
-        if (index < 1 || index > max) {
-            throw new IllegalArgumentException(
-                    name + " index ("
-                    + index
-                    + ") is out of bounds [1," + max + "]");
-        }
-    }
-
-
-    /**
-     * Checks whether the header was initialized.
-     *
-     * @throws IllegalStateException if the header was not yet initialized.
-     */
-    protected final void checkHeaderExists() {
-        if (header == null) {
-            throw new IllegalArgumentException(
-                    "header has not been initialized;"
-                    + " beforeClass() was not invoked");
-        }
-    }
-
-    /**
-     * Checks whether the data source was initialized.
-     *
-     * @throws IllegalStateException if the data source was not yet initialized.
-     */
-    protected final void checkDataSourceExists() {
-        if (header == null) {
-            throw new IllegalArgumentException(
-                    "data source has not been initialized;"
-                            + " beforeClass() was not invoked");
-        }
-    }
-
-    /**
-     * Returns the ID for a read group given its index.
-     *
-     * @param index the index of the targeted read group in the range
-     *              [1,{@link #getReadGroupCount()}]
-     * @return never <code>null</code> and must be unique to each
-     *              possible read group index.
-     */
-    protected final String getReadGroupId(final int index) {
-        checkReadGroupIndex(index);
-        return getHeader().getReadGroups().get(index - 1).getReadGroupId();
-    }
-
-    /**
-     * Returns the platform name for a read group given its index.
-     *
-     * @param group the index of the targeted read group in the range
-     *              [1,{@link #getReadGroupCount()}]
-     * @return never <code>null</code>.
-     */
-    protected final String getPlatformName(final int group) {
-        checkReadGroupIndex(group);
-        return getHeader().getReadGroups().get(group - 1).getPlatform();
-    }
-
-    /**
-     * Returns the platform unit for a read group given its index.
-     *
-     * @param group the index of the targeted read group in the range
-     *              [1,{@link #getReadGroupCount()}]
-     * @return never <code>null</code>.
-     */
-    protected final String getPlatformUnit(final int group) {
-        checkReadGroupIndex(group);
-        return getHeader().getReadGroups().get(group - 1).getPlatformUnit();
-    }
-
-
-    /**
-     * Composes the mock up SAM file header.
-     *
-     * It must return an equivalent (equal) value each time it is invoked.
-     *
-     * @return never <code>null</code>.
-     */
-    protected SAMFileHeader composeHeader() {
-
-        return ArtificialSAMUtils.createArtificialSamHeader(
-                DEFAULT_CHROMOSOME_COUNT, DEFAULT_CHROMOSOME_START_INDEX,
-                DEFAULT_CHROMOSOME_SIZE);
-    }
-
-    @BeforeClass
-    public void beforeClass() {
-
-        header = composeHeader();
-        dataSource = composeDataSource();
-        final List<String> readGroupIDs = new ArrayList<String>();
-        final List<String> sampleNames = new ArrayList<String>();
-
-        for (int i = 1; i <= getReadGroupCount(); i++) {
-            final String readGroupId = composeReadGroupId(i);
-            readGroupIDs.add(readGroupId);
-            sampleNames.add(readGroupId);
-        }
-
-        ArtificialSAMUtils.createEnumeratedReadGroups(
-                header, readGroupIDs, sampleNames);
-
-        for (int i = 1; i <= getReadGroupCount(); i++) {
-            final String readGroupId = readGroupIDs.get(i-1);
-            final SAMReadGroupRecord groupRecord = header.getReadGroup(readGroupId);
-            groupRecord.setAttribute("PL", composePlatformName(i));
-            groupRecord.setAttribute("PU", composePlatformUnitName(i));
-        }
-
-    }
-
-    protected ValidationExclusion composeValidationExclusion() {
-        return new ValidationExclusion();
-    }
-
-    protected SAMDataSource composeDataSource() {
-        checkHeaderExists();
-        final Set<SAMReaderID> readerIDs = new HashSet<>(1);
-        final ThreadAllocation ta = new ThreadAllocation();
-        final Integer numFileHandles = 1; // I believe that any value would do but need to confirm.
-        final boolean useOriginalBaseQualities = true;
-        final ValidationStringency strictness = ValidationStringency.LENIENT;
-        final Integer readBufferSize = 1; // not relevant.
-        final DownsamplingMethod downsamplingMethod = DownsamplingMethod.NONE;
-        final ValidationExclusion exclusionList = composeValidationExclusion();
-        final Collection<ReadFilter> supplementalFilters = Collections.EMPTY_SET;
-        final boolean includeReadsWithDeletionAtLoci = true;
-
-        final GenomeLocParser glp = new GenomeLocParser(header.getSequenceDictionary());
-        final SAMDataSource res = new SAMDataSource(
-                readerIDs,
-                ta,
-                numFileHandles,
-                glp,
-                useOriginalBaseQualities,
-                strictness,
-                readBufferSize,
-                downsamplingMethod,
-                exclusionList,
-                supplementalFilters,
-                includeReadsWithDeletionAtLoci);
-
-        return res;
-    }
-
-    @AfterClass
-    public void afterClass() {
-        header = null;
-        dataSource = null;
-    }
-
-    /**
-     * Creates a read record.
-     *
-     * @param cigar the new record CIGAR.
-     * @param group the new record group index that must be in the range \
-     *              [1,{@link #getReadGroupCount()}]
-     * @param reference the reference sequence index (0-based)
-     * @param start the start position of the read alignment in the reference
-     *              (1-based)
-     * @return never <code>null</code>
-     */
-    protected SAMRecord createRead(final Cigar cigar, final int group, final int reference, final int start) {
-        final SAMRecord record = ArtificialSAMUtils.createArtificialRead(cigar);
-        record.setHeader(getHeader());
-        record.setAlignmentStart(start);
-        record.setReferenceIndex(reference);
-        record.setAttribute(SAMTag.RG.toString(), getReadGroupId(group));
-        return record;
-
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/filters/ReadGroupBlackListFilterUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/filters/ReadGroupBlackListFilterUnitTest.java
deleted file mode 100644
index 3a0fc6e..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/filters/ReadGroupBlackListFilterUnitTest.java
+++ /dev/null
@@ -1,247 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-import org.testng.Assert;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.testng.annotations.Test;
-
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.SAMReadGroupRecord;
-
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Collections;
-
-public class ReadGroupBlackListFilterUnitTest extends ReadFilterTest {
-
-    @Test(expectedExceptions=ReviewedGATKException.class)
-    public void testBadFilter() {
-        List<String> badFilters = Collections.singletonList("bad");
-        new ReadGroupBlackListFilter(badFilters);
-    }
-    @Test(expectedExceptions=ReviewedGATKException.class)
-    public void testBadFilterTag() {
-        List<String> badFilters = Collections.singletonList("bad:filter");
-        new ReadGroupBlackListFilter(badFilters);
-    }
-
-    @Test(expectedExceptions=ReviewedGATKException.class)
-    public void testBadFilterFile() {
-        List<String> badFilters = Collections.singletonList("/foo/bar/rgbl.txt");
-        new ReadGroupBlackListFilter(badFilters);
-    }
-
-    @Test
-    public void testFilterReadGroup() {
-        SAMRecord filteredRecord = ArtificialSAMUtils.createArtificialRead(getHeader(), "readUno", 0, 1, 20);
-        filteredRecord.setAttribute("RG", getReadGroupId(1));
-
-        SAMRecord unfilteredRecord = ArtificialSAMUtils.createArtificialRead(getHeader(), "readDos", 0, 2, 20);
-        unfilteredRecord.setAttribute("RG", getReadGroupId(2));
-
-        List<String> filterList = new ArrayList<String>();
-        filterList.add("RG:" + getReadGroupId(1));
-
-        ReadGroupBlackListFilter filter = new ReadGroupBlackListFilter(filterList);
-        Assert.assertTrue(filter.filterOut(filteredRecord));
-        Assert.assertFalse(filter.filterOut(unfilteredRecord));
-    }
-
-    @Test
-    public void testFilterPlatformUnit() {
-        SAMRecord filteredRecord = ArtificialSAMUtils.createArtificialRead(getHeader(), "readUno", 0, 1, 20);
-        filteredRecord.setAttribute("RG", getReadGroupId(1));
-
-        SAMRecord unfilteredRecord = ArtificialSAMUtils.createArtificialRead(getHeader(), "readDos", 0, 2, 20);
-        unfilteredRecord.setAttribute("RG", getReadGroupId(2));
-
-        List<String> filterList = new ArrayList<String>();
-        filterList.add("PU:" + getPlatformUnit(1));
-
-        ReadGroupBlackListFilter filter = new ReadGroupBlackListFilter(filterList);
-        Assert.assertTrue(filter.filterOut(filteredRecord));
-        Assert.assertFalse(filter.filterOut(unfilteredRecord));
-    }
-
-    @Test
-    public void testFilterOutByReadGroup() {
-        int recordsPerGroup = 3;
-        List<SAMRecord> records = new ArrayList<SAMRecord>();
-        int alignmentStart = 0;
-        for (int x = 1; x <= getReadGroupCount(); x++) {
-            SAMReadGroupRecord groupRecord = getHeader().getReadGroup(getReadGroupId(x));
-            for (int y = 1; y <= recordsPerGroup; y++) {
-                SAMRecord record = ArtificialSAMUtils.createArtificialRead(getHeader(), "readUno", 0, ++alignmentStart, 20);
-                record.setAttribute("RG", groupRecord.getReadGroupId());
-                records.add(record);
-            }
-        }
-
-        List<String> filterList = new ArrayList<String>();
-        filterList.add("RG:" + getReadGroupId(1));
-        filterList.add("RG:" + getReadGroupId(3));
-
-        ReadGroupBlackListFilter filter = new ReadGroupBlackListFilter(filterList);
-        int filtered = 0;
-        int unfiltered = 0;
-        for (SAMRecord record : records) {
-            String readGroupName = record.getReadGroup().getReadGroupId();
-            if (filter.filterOut(record)) {
-                if (!filterList.contains("RG:" + readGroupName))
-                    Assert.fail("Read group " + readGroupName + " was filtered");
-                filtered++;
-            } else {
-                if (filterList.contains("RG:" + readGroupName))
-                    Assert.fail("Read group " + readGroupName + " was not filtered");
-                unfiltered++;
-            }
-        }
-
-        int filteredExpected = recordsPerGroup * 2;
-        int unfilteredExpected = recordsPerGroup * (getReadGroupCount() - 2);
-        Assert.assertEquals(filtered, filteredExpected, "Filtered");
-        Assert.assertEquals(unfiltered, unfilteredExpected, "Uniltered");
-    }
-
-    @Test
-    public void testFilterOutByAttribute() {
-        int recordsPerGroup = 3;
-        List<SAMRecord> records = new ArrayList<SAMRecord>();
-        int alignmentStart = 0;
-        for (int x = 1; x <= getReadGroupCount(); x++) {
-            SAMReadGroupRecord groupRecord = getHeader().getReadGroup(getReadGroupId(x));
-            for (int y = 1; y <= recordsPerGroup; y++) {
-                SAMRecord record = ArtificialSAMUtils.createArtificialRead(getHeader(), "readUno", 0, ++alignmentStart, 20);
-                record.setAttribute("RG", groupRecord.getReadGroupId());
-                records.add(record);
-            }
-        }
-
-        List<String> filterList = new ArrayList<String>();
-        filterList.add("PU:" + getPlatformUnit(1));
-
-        ReadGroupBlackListFilter filter = new ReadGroupBlackListFilter(filterList);
-        int filtered = 0;
-        int unfiltered = 0;
-        for (SAMRecord record : records) {
-            String platformUnit = (String) record.getReadGroup().getAttribute("PU");
-            if (filter.filterOut(record)) {
-                if (!filterList.contains("PU:" + platformUnit))
-                    Assert.fail("Platform unit " + platformUnit + " was filtered");
-                filtered++;
-            } else {
-                if (filterList.contains("PU:" + platformUnit))
-                    Assert.fail("Platform unit " + platformUnit + " was not filtered");
-                unfiltered++;
-            }
-        }
-
-        int filteredExpected = 6;
-        int unfilteredExpected = 9;
-        Assert.assertEquals(filtered, filteredExpected, "Filtered");
-        Assert.assertEquals(unfiltered, unfilteredExpected, "Uniltered");
-    }
-
-    @Test
-    public void testFilterOutByFile() {
-        int recordsPerGroup = 3;
-        List<SAMRecord> records = new ArrayList<SAMRecord>();
-        int alignmentStart = 0;
-        for (int x = 1; x <= getReadGroupCount(); x++) {
-            SAMReadGroupRecord groupRecord = getHeader().getReadGroup(getReadGroupId(x));
-            for (int y = 1; y <= recordsPerGroup; y++) {
-                SAMRecord record = ArtificialSAMUtils.createArtificialRead(getHeader(), "readUno", 0, ++alignmentStart, 20);
-                record.setAttribute("RG", groupRecord.getReadGroupId());
-                records.add(record);
-            }
-        }
-
-        List<String> filterList = new ArrayList<String>();
-        filterList.add(privateTestDir + "readgroupblacklisttest.txt");
-
-        ReadGroupBlackListFilter filter = new ReadGroupBlackListFilter(filterList);
-        int filtered = 0;
-        int unfiltered = 0;
-        for (SAMRecord record : records) {
-            String readGroup = record.getReadGroup().getReadGroupId();
-            if (filter.filterOut(record)) {
-                if (!("ReadGroup3".equals(readGroup) || "ReadGroup4".equals(readGroup)))
-                    Assert.fail("Read group " + readGroup + " was filtered");
-                filtered++;
-            } else {
-                if ("ReadGroup3".equals(readGroup) || "ReadGroup4".equals(readGroup))
-                    Assert.fail("Read group " + readGroup + " was not filtered");
-                unfiltered++;
-            }
-        }
-
-        int filteredExpected = recordsPerGroup * 2;
-        int unfilteredExpected = recordsPerGroup * (getReadGroupCount() - 2);
-        Assert.assertEquals(filtered, filteredExpected, "Filtered");
-        Assert.assertEquals(unfiltered, unfilteredExpected, "Uniltered");
-    }
-
-    @Test
-    public void testFilterOutByListFile() {
-        int recordsPerGroup = 3;
-        List<SAMRecord> records = new ArrayList<SAMRecord>();
-        int alignmentStart = 0;
-        for (int x = 1; x <= getReadGroupCount(); x++) {
-            SAMReadGroupRecord groupRecord = getHeader().getReadGroup(getReadGroupId(x));
-            for (int y = 1; y <= recordsPerGroup; y++) {
-                SAMRecord record = ArtificialSAMUtils.createArtificialRead(getHeader(), "readUno", 0, ++alignmentStart, 20);
-                record.setAttribute("RG", groupRecord.getReadGroupId());
-                records.add(record);
-            }
-        }
-
-        List<String> filterList = new ArrayList<String>();
-        filterList.add(privateTestDir + "readgroupblacklisttestlist.txt");
-
-        ReadGroupBlackListFilter filter = new ReadGroupBlackListFilter(filterList);
-        int filtered = 0;
-        int unfiltered = 0;
-        for (SAMRecord record : records) {
-            String readGroup = record.getReadGroup().getReadGroupId();
-            if (filter.filterOut(record)) {
-                if (!("ReadGroup3".equals(readGroup) || "ReadGroup4".equals(readGroup)))
-                    Assert.fail("Read group " + readGroup + " was filtered");
-                filtered++;
-            } else {
-                if ("ReadGroup3".equals(readGroup) || "ReadGroup4".equals(readGroup))
-                    Assert.fail("Read group " + readGroup + " was not filtered");
-                unfiltered++;
-            }
-        }
-
-        int filteredExpected = recordsPerGroup * 2;
-        int unfilteredExpected = recordsPerGroup * (getReadGroupCount() - 2);
-        Assert.assertEquals(filtered, filteredExpected, "Filtered");
-        Assert.assertEquals(unfiltered, unfilteredExpected, "Uniltered");
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/filters/UnsafeMalformedReadFilterUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/filters/UnsafeMalformedReadFilterUnitTest.java
deleted file mode 100644
index a00f0a0..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/filters/UnsafeMalformedReadFilterUnitTest.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.filters;
-
-
-import org.broadinstitute.gatk.engine.arguments.ValidationExclusion;
-
-import java.util.Collections;
-
-
-/**
- * Tests for the {@link MalformedReadFilter} when the unsafe flag
- * {@link ValidationExclusion.TYPE#ALL} is set.
- *
- * @author Valentin Ruano-Rubio
- * @since 6/6/13
- */
-public class UnsafeMalformedReadFilterUnitTest extends AllowNCigarMalformedReadFilterUnitTest {
-
-
-    @Override
-    protected ValidationExclusion composeValidationExclusion() {
-        return new ValidationExclusion(Collections.singletonList(ValidationExclusion.TYPE.ALL));
-    }
-
-
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/io/OutputTrackerUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/io/OutputTrackerUnitTest.java
deleted file mode 100644
index 479e19e..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/io/OutputTrackerUnitTest.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.io;
-
-import org.broadinstitute.gatk.engine.io.stubs.OutputStreamStub;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-
-public class OutputTrackerUnitTest extends BaseTest {
-
-    private final OutputTracker tracker = new DirectOutputTracker();
-    private File unwriteableDir = null;
-    private File untraversableDir = null;
-
-    @BeforeClass
-    public void createDirectories() {
-        unwriteableDir = new File("unwriteable");
-        unwriteableDir.deleteOnExit();
-        unwriteableDir.mkdir();
-        unwriteableDir.setWritable(false);
-
-        untraversableDir = new File("untraversable");
-        untraversableDir.deleteOnExit();
-        untraversableDir.mkdir();
-        untraversableDir.setExecutable(false);
-    }
-
-    @DataProvider(name = "BadOutputPaths")
-    public Object[][] makeBadOutputPaths() {
-        return new Object[][] {new String[] {"thisDirectoryDoesNotExist/stub.txt"},
-                new String[] {"/thisDirectoryDoesNotExist/dummy.txt"},
-                new String[] {unwriteableDir.getAbsolutePath()+"/dummy.txt"},
-                new String[] {untraversableDir.getAbsolutePath()+"/dummy.txt"}};
-    }
-
-    @DataProvider(name = "GoodOutputPaths")
-    public Object[][] makeGoodOutputPaths() {
-        return new Object[][] {new String[] {publicTestDir+"stub.txt"},
-                new String[] {"dummy.txt"}};
-    }
-
-    @Test(dataProvider = "BadOutputPaths", expectedExceptions = UserException.CouldNotCreateOutputFile.class)
-    public void testInvalidOutputPath(final String path) {
-        tracker.validateOutputPath(new OutputStreamStub(new File(path)));
-    }
-
-    @Test(dataProvider = "GoodOutputPaths")
-    public void testValidOutputPath(final String path) {
-        tracker.validateOutputPath(new OutputStreamStub(new File(path)));
-    }
-
-    @Test
-    public void testOutputPathWithNullFile() {
-        tracker.validateOutputPath(new OutputStreamStub(System.out));
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/iterators/BoundedReadIteratorUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/iterators/BoundedReadIteratorUnitTest.java
deleted file mode 100644
index 7c3aca2..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/iterators/BoundedReadIteratorUnitTest.java
+++ /dev/null
@@ -1,145 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.iterators;
-
-import static org.testng.Assert.fail;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.reference.ReferenceSequenceFile;
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.testng.Assert;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-
-import org.testng.annotations.BeforeMethod;
-
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-
-
-/*
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use,
- * copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the
- * Software is furnished to do so, subject to the following
- * conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
- * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
- * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
- * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
- * OTHER DEALINGS IN THE SOFTWARE.
- */
-
-/**
- * @author aaron
- * @version 1.0
- * @date Apr 14, 2009
- * <p/>
- * Class BoundedReadIteratorUnitTest
- * <p/>
- * tests for the bounded read iterator.
- */
-public class BoundedReadIteratorUnitTest extends BaseTest {
-
-    /** the file list and the fasta sequence */
-    private List<File> fl;
-    private ReferenceSequenceFile seq;
-
-    /**
-     * This function does the setup of our parser, before each method call.
-     * <p/>
-     * Called before every test case method.
-     */
-    @BeforeMethod
-    public void doForEachTest() throws FileNotFoundException {
-        fl = new ArrayList<File>();
-    }
-
-
-    /** Test out that we can shard the file and iterate over every read */
-    @Test
-    public void testBounding() {
-        logger.warn("Executing testBounding");
-        // total reads expected
-        final int expected = 20;
-        // bound by ten reads
-        BoundedReadIterator iter = new BoundedReadIterator(new testIterator(), expected);
-
-        int count = 0;
-        for (SAMRecord rec: iter) {
-            count++;
-        }
-
-        Assert.assertEquals(count, expected);
-    }
-}
-
-class testIterator implements GATKSAMIterator {
-    SAMFileHeader header;
-    testIterator() {
-        header = ArtificialSAMUtils.createArtificialSamHeader(1,1,2000);
-    }
-
-    public void close() {
-
-    }
-
-    public boolean hasNext() {
-        return true;
-    }
-
-    public SAMRecord next() {
-        return ArtificialSAMUtils.createArtificialRead(header,"blah",0,1,100);
-    }
-
-    public void remove() {
-    }
-
-    public Iterator<SAMRecord> iterator() {
-        return this;
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/iterators/GATKSAMIteratorAdapterUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/iterators/GATKSAMIteratorAdapterUnitTest.java
deleted file mode 100644
index 6cbd4fd..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/iterators/GATKSAMIteratorAdapterUnitTest.java
+++ /dev/null
@@ -1,176 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.iterators;
-
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.util.CloseableIterator;
-import org.broadinstitute.gatk.utils.BaseTest;
-import static org.testng.Assert.assertEquals;
-import org.testng.annotations.Test;
-
-import java.util.Iterator;
-
-/**
- *
- * User: aaron
- * Date: May 13, 2009
- * Time: 6:58:21 PM
- *
- * The Broad Institute
- * SOFTWARE COPYRIGHT NOTICE AGREEMENT 
- * This software and its documentation are copyright 2009 by the
- * Broad Institute/Massachusetts Institute of Technology. All rights are reserved.
- *
- * This software is supplied without any warranty or guaranteed support whatsoever. Neither
- * the Broad Institute nor MIT can be responsible for its use, misuse, or functionality.
- *
- */
-
-
-/**
- * @author aaron
- * @version 1.0
- * @date May 13, 2009
- * <p/>
- * Class GATKSAMIteratorTest
- * <p/>
- * Tests the GATKSAMIteratorAdapter class.
- */
-public class GATKSAMIteratorAdapterUnitTest extends BaseTest {
-
-    class MyTestIterator implements Iterator<SAMRecord> {
-
-        public int count = 0;
-
-        public MyTestIterator() {
-            count = 0;
-        }
-
-        public boolean hasNext() {
-            if (count < 100) {
-                ++count;
-                return true;
-            } else {
-                return false;
-            }
-        }
-
-        public SAMRecord next() {
-            return null;
-        }
-
-        public void remove() {
-            throw new UnsupportedOperationException("Unsupported");
-        }
-    }
-
-    class MyTestCloseableIterator implements CloseableIterator<SAMRecord> {
-        public int count = 0;
-
-        public MyTestCloseableIterator() {
-            count = 0;
-        }
-
-        public boolean hasNext() {
-            if (count < 100) {
-                ++count;
-                return true;
-            } else {
-                return false;
-            }
-        }
-
-        public SAMRecord next() {
-            return null;
-        }
-
-        public void remove() {
-            throw new UnsupportedOperationException("Unsupported");
-        }
-
-        public void close() {
-            count = -1;
-        }
-    }
-
-
-    @Test
-    public void testNormalIterator() {
-        final int COUNT = 100;
-        MyTestIterator it = new MyTestIterator();
-
-        GATKSAMIterator samIt = GATKSAMIteratorAdapter.adapt(it);
-        int countCheck = 0;
-        while (samIt.hasNext()) {
-            samIt.next();
-            ++countCheck;
-            //logger.warn("cnt = " + countCheck);
-        }
-
-        assertEquals(countCheck, COUNT);
-
-        assertEquals(countCheck, COUNT);
-    }
-
-    @Test
-    public void testCloseableIterator() {
-        final int COUNT = 100;
-
-        MyTestCloseableIterator it = new MyTestCloseableIterator();
-
-        GATKSAMIterator samIt = GATKSAMIteratorAdapter.adapt(it);
-
-        int countCheck = 0;
-        while (samIt.hasNext()) {
-            samIt.next();
-            ++countCheck;
-        }
-
-        assertEquals(countCheck, COUNT);
-    }
-
-    @Test
-    public void testCloseOnCloseableIterator() {
-        final int COUNT = 100;
-
-        MyTestCloseableIterator it = new MyTestCloseableIterator();
-        
-        GATKSAMIterator samIt = GATKSAMIteratorAdapter.adapt(it);
-
-
-        int countCheck = 0;
-        while (samIt.hasNext()) {
-            samIt.next();
-            ++countCheck;
-        }
-
-        assertEquals(countCheck, COUNT);
-
-        // check to see that the count get's set to -1
-        samIt.close();
-        assertEquals(it.count, -1);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/iterators/ReadFormattingIteratorUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/iterators/ReadFormattingIteratorUnitTest.java
deleted file mode 100644
index c926d06..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/iterators/ReadFormattingIteratorUnitTest.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.iterators;
-
-import htsjdk.samtools.*;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.util.Arrays;
-
-
-public class ReadFormattingIteratorUnitTest extends BaseTest {
-
-    @Test
-    public void testIteratorConsolidatesCigars() {
-        final Cigar unconsolidatedCigar = TextCigarCodec.getSingleton().decode("3M0M5M0M");
-        final SAMRecord unconsolidatedRead = ArtificialSAMUtils.createArtificialRead(unconsolidatedCigar);
-
-        final GATKSAMIterator readIterator = GATKSAMIteratorAdapter.adapt(Arrays.asList(unconsolidatedRead).iterator());
-        final ReadFormattingIterator formattingIterator = new ReadFormattingIterator(readIterator, false, (byte)-1);
-        final SAMRecord postIterationRead = formattingIterator.next();
-
-        Assert.assertEquals(postIterationRead.getCigarString(), "8M", "Cigar 3M0M5M0M not consolidated correctly by ReadFormattingIterator");
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/iterators/VerifyingSamIteratorUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/iterators/VerifyingSamIteratorUnitTest.java
deleted file mode 100644
index 371f94f..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/iterators/VerifyingSamIteratorUnitTest.java
+++ /dev/null
@@ -1,128 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.iterators;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.SAMSequenceRecord;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-import java.util.Arrays;
-import java.util.List;
-
-/**
- * Created by IntelliJ IDEA.
- * User: mhanna
- * Date: Mar 2, 2011
- * Time: 9:48:10 PM
- * To change this template use File | Settings | File Templates.
- */
-public class VerifyingSamIteratorUnitTest {
-    private SAMFileHeader samFileHeader;
-
-    @BeforeClass
-    public void init() {
-        SAMSequenceDictionary sequenceDictionary = new SAMSequenceDictionary();
-        sequenceDictionary.addSequence(new SAMSequenceRecord("1",500));
-        sequenceDictionary.addSequence(new SAMSequenceRecord("2",500));
-
-        samFileHeader = new SAMFileHeader();
-        samFileHeader.setSequenceDictionary(sequenceDictionary);
-    }
-
-    @Test
-    public void testSortedReadsBasic() {
-        SAMRecord read1 = ArtificialSAMUtils.createArtificialRead(samFileHeader,"read1",getContig(0).getSequenceIndex(),1,10);
-        SAMRecord read2 = ArtificialSAMUtils.createArtificialRead(samFileHeader,"read2",getContig(0).getSequenceIndex(),2,10);
-        List<SAMRecord> reads = Arrays.asList(read1,read2);
-
-        VerifyingSamIterator iterator = new VerifyingSamIterator(GATKSAMIteratorAdapter.adapt(reads.iterator()));
-
-        Assert.assertTrue(iterator.hasNext(),"Insufficient reads");
-        Assert.assertSame(iterator.next(),read1,"Incorrect read in read 1 position");
-        Assert.assertTrue(iterator.hasNext(),"Insufficient reads");
-        Assert.assertSame(iterator.next(),read2,"Incorrect read in read 2 position");
-        Assert.assertFalse(iterator.hasNext(),"Too many reads in iterator");
-    }
-
-    @Test
-    public void testSortedReadsAcrossContigs() {
-        SAMRecord read1 = ArtificialSAMUtils.createArtificialRead(samFileHeader,"read1",getContig(0).getSequenceIndex(),2,10);
-        SAMRecord read2 = ArtificialSAMUtils.createArtificialRead(samFileHeader,"read2",getContig(1).getSequenceIndex(),1,10);
-        List<SAMRecord> reads = Arrays.asList(read1,read2);
-
-        VerifyingSamIterator iterator = new VerifyingSamIterator(GATKSAMIteratorAdapter.adapt(reads.iterator()));
-
-        Assert.assertTrue(iterator.hasNext(),"Insufficient reads");
-        Assert.assertSame(iterator.next(),read1,"Incorrect read in read 1 position");
-        Assert.assertTrue(iterator.hasNext(),"Insufficient reads");
-        Assert.assertSame(iterator.next(),read2,"Incorrect read in read 2 position");
-        Assert.assertFalse(iterator.hasNext(),"Too many reads in iterator");
-    }
-
-    @Test(expectedExceptions=UserException.MissortedBAM.class)
-    public void testImproperlySortedReads() {
-        SAMRecord read1 = ArtificialSAMUtils.createArtificialRead(samFileHeader,"read1",getContig(0).getSequenceIndex(),2,10);
-        SAMRecord read2 = ArtificialSAMUtils.createArtificialRead(samFileHeader,"read2",getContig(0).getSequenceIndex(),1,10);
-        List<SAMRecord> reads = Arrays.asList(read1,read2);
-
-        VerifyingSamIterator iterator = new VerifyingSamIterator(GATKSAMIteratorAdapter.adapt(reads.iterator()));
-
-        Assert.assertTrue(iterator.hasNext(),"Insufficient reads");
-        Assert.assertSame(iterator.next(),read1,"Incorrect read in read 1 position");
-        Assert.assertTrue(iterator.hasNext(),"Insufficient reads");
-
-        // Should trigger MissortedBAM exception.
-        iterator.next();
-    }
-
-    @Test(expectedExceptions=UserException.MalformedBAM.class)
-    public void testInvalidAlignment() {
-        // Create an invalid alignment state.
-        SAMRecord read1 = ArtificialSAMUtils.createArtificialRead(samFileHeader,"read1",getContig(0).getSequenceIndex(),1,10);
-        SAMRecord read2 = ArtificialSAMUtils.createArtificialRead(samFileHeader,"read1",getContig(0).getSequenceIndex(),2,10);
-        read1.setReferenceIndex(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX);
-        List<SAMRecord> reads = Arrays.asList(read1,read2);
-
-        VerifyingSamIterator iterator = new VerifyingSamIterator(GATKSAMIteratorAdapter.adapt(reads.iterator()));
-
-        Assert.assertTrue(iterator.hasNext(),"Insufficient reads");
-        Assert.assertSame(iterator.next(),read1,"Incorrect read in read 1 position");
-        Assert.assertTrue(iterator.hasNext(),"Insufficient reads");
-
-        // Should trigger MalformedBAM exception.
-        iterator.next();
-    }
-
-    private SAMSequenceRecord getContig(final int contigIndex) {
-        return samFileHeader.getSequence(contigIndex);            
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/phonehome/GATKRunReportUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/phonehome/GATKRunReportUnitTest.java
deleted file mode 100644
index d7b9b3d..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/phonehome/GATKRunReportUnitTest.java
+++ /dev/null
@@ -1,310 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.phonehome;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.arguments.GATKArgumentCollection;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.engine.walkers.ActiveRegionWalker;
-import org.broadinstitute.gatk.engine.walkers.Walker;
-import org.broadinstitute.gatk.tools.walkers.qc.CountLoci;
-import org.broadinstitute.gatk.tools.walkers.qc.CountRODs;
-import org.broadinstitute.gatk.tools.walkers.qc.CountReads;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.activeregion.ActiveRegion;
-import org.broadinstitute.gatk.utils.activeregion.ActivityProfileState;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.jets3t.service.S3Service;
-import org.jets3t.service.S3ServiceException;
-import org.jets3t.service.ServiceException;
-import org.jets3t.service.model.S3Object;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.FileInputStream;
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Properties;
-
-public class GATKRunReportUnitTest extends BaseTest {
-    private final static boolean DEBUG = false;
-    private static final long S3_PUT_TIMEOUT_IN_MILLISECONDS_FOR_TESTING = 30 * 1000;
-    private static final String AWS_DOWNLOADER_CREDENTIALS_PROPERTIES_FILE = privateTestDir + "phonehome/awsDownloaderCredentials.properties";
-
-    private Walker walker;
-    private Exception exception;
-    private GenomeAnalysisEngine engine;
-    private String downloaderAccessKey;
-    private String downloaderSecretKey;
-
-    @BeforeClass
-    public void setup() throws Exception {
-        walker = new CountReads();
-        exception = new IllegalArgumentException("javaException");
-        engine = new GenomeAnalysisEngine();
-        engine.setArguments(new GATKArgumentCollection());
-
-        Properties awsProperties = new Properties();
-        awsProperties.load(new FileInputStream(AWS_DOWNLOADER_CREDENTIALS_PROPERTIES_FILE));
-        downloaderAccessKey = awsProperties.getProperty("accessKey");
-        downloaderSecretKey = awsProperties.getProperty("secretKey");
-    }
-
-    @Test(enabled = ! DEBUG)
-    public void testAWSKeysAreValid() {
-        // throws an exception if they aren't
-        GATKRunReport.checkAWSAreValid();
-    }
-
-    @Test(enabled = ! DEBUG)
-    public void testAccessKey() throws Exception {
-        testAWSKey(GATKRunReport.getAWSUploadAccessKey(), GATKRunReport.AWS_ACCESS_KEY_MD5);
-    }
-
-    @Test(enabled = ! DEBUG)
-    public void testSecretKey() throws Exception {
-        testAWSKey(GATKRunReport.getAWSUploadSecretKey(), GATKRunReport.AWS_SECRET_KEY_MD5);
-    }
-
-    private void testAWSKey(final String accessKey, final String expectedMD5) throws Exception {
-        Assert.assertNotNull(accessKey, "AccessKey should not be null");
-        final String actualmd5 = Utils.calcMD5(accessKey);
-        Assert.assertEquals(actualmd5, expectedMD5);
-    }
-
-    @DataProvider(name = "GATKReportCreationTest")
-    public Object[][] makeGATKReportCreationTest() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        final Walker readWalker = new CountReads();
-        final Walker lociWalker = new CountLoci();
-        final Walker rodWalker = new CountRODs();
-        final Walker artWalker = new RunReportDummyActiveRegionWalker();
-
-        final Exception noException = null;
-        final Exception javaException = new IllegalArgumentException("javaException");
-        final Exception stingException = new ReviewedGATKException("GATKException");
-        final Exception userException = new UserException("userException");
-
-        final GenomeAnalysisEngine engine = new GenomeAnalysisEngine();
-        engine.setArguments(new GATKArgumentCollection());
-
-        for ( final Walker walker : Arrays.asList(readWalker, lociWalker, rodWalker, artWalker) ) {
-            for ( final Exception exception : Arrays.asList(noException,  javaException, stingException, userException) ) {
-                tests.add(new Object[]{walker, exception, engine});
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "GATKReportCreationTest")
-    public void testGATKReportCreationReadingAndWriting(final Walker walker, final Exception exception, final GenomeAnalysisEngine engine) throws Exception {
-        final GATKRunReport report = new GATKRunReport(walker, exception, engine, GATKRunReport.PhoneHomeOption.STDOUT);
-        final ByteArrayOutputStream captureStream = new ByteArrayOutputStream();
-        final boolean succeeded = report.postReportToStream(captureStream);
-        Assert.assertTrue(succeeded, "Failed to write report to stream");
-        Assert.assertFalse(report.exceptionOccurredDuringPost(), "Post succeeded but report says it failed");
-        Assert.assertNull(report.getErrorMessage(), "Post succeeded but there was an error message");
-        Assert.assertNull(report.getErrorThrown(), "Post succeeded but there was an error message");
-        final InputStream readStream = new ByteArrayInputStream(captureStream.toByteArray());
-
-        GATKRunReport deserialized = null;
-        try {
-            deserialized = GATKRunReport.deserializeReport(readStream);
-        } catch ( Exception e ) {
-            final String reportString = new String(captureStream.toByteArray());
-            Assert.fail("Failed to deserialize GATK report " + reportString + " with exception " + e);
-        }
-
-        if ( deserialized != null )
-            Assert.assertEquals(report, deserialized);
-    }
-
-    @DataProvider(name = "GATKAWSReportMode")
-    public Object[][] makeGATKAWSReportMode() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        for ( final GATKRunReport.AWSMode mode : GATKRunReport.AWSMode.values() ) {
-            tests.add(new Object[]{mode});
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    // Will fail with timeout if AWS time out isn't working
-    // Will fail with exception if AWS doesn't protect itself from errors
-    @Test(enabled = ! DEBUG, dataProvider = "GATKAWSReportMode", timeOut = S3_PUT_TIMEOUT_IN_MILLISECONDS_FOR_TESTING * 2)
-    public void testAWS(final GATKRunReport.AWSMode awsMode) {
-        logger.warn("Starting testAWS mode=" + awsMode);
-
-        // Use a shorter timeout than usual when we're testing GATKRunReport.AWSMode.TIMEOUT
-        final long thisTestS3Timeout = awsMode == GATKRunReport.AWSMode.TIMEOUT ? 30 * 1000 : S3_PUT_TIMEOUT_IN_MILLISECONDS_FOR_TESTING;
-        final GATKRunReport report = new GATKRunReport(walker, exception, engine, GATKRunReport.PhoneHomeOption.AWS, thisTestS3Timeout);
-        report.sendAWSToTestBucket();
-        report.setAwsMode(awsMode);
-        final S3Object s3Object = report.postReportToAWSS3();
-
-        if ( awsMode == GATKRunReport.AWSMode.NORMAL ) {
-            Assert.assertNotNull(s3Object, "Upload to AWS failed, s3Object was null. error was " + report.formatError());
-            Assert.assertFalse(report.exceptionOccurredDuringPost(), "The upload should have succeeded but the report says it didn't.  Error was " + report.formatError());
-            Assert.assertNull(report.getErrorMessage(), "Report succeeded but an error message was found");
-            Assert.assertNull(report.getErrorThrown(), "Report succeeded but an thrown error was found");
-            try {
-                final GATKRunReport deserialized = GATKRunReport.deserializeReport(downloaderAccessKey, downloaderSecretKey, report.getS3ReportBucket(), s3Object);
-                Assert.assertEquals(report, deserialized);
-                deleteFromS3(report);
-            } catch ( Exception e ) {
-                Assert.fail("Failed to read, deserialize, or delete GATK report " + s3Object.getName() + " with exception " + e);
-            }
-        } else {
-            Assert.assertNull(s3Object, "AWS upload should have failed for mode " + awsMode + " but got non-null s3 object back " + s3Object + " error was " + report.formatError());
-            Assert.assertTrue(report.exceptionOccurredDuringPost(), "S3 object was null but the report says that the upload succeeded");
-            Assert.assertNotNull(report.getErrorMessage(), "Report succeeded but an error message wasn't found");
-            if ( awsMode == GATKRunReport.AWSMode.FAIL_WITH_EXCEPTION )
-                Assert.assertNotNull(report.getErrorThrown());
-        }
-    }
-
-    private void deleteFromS3(final GATKRunReport report) throws Exception {
-        final S3Service s3Service = GATKRunReport.initializeAWSService(downloaderAccessKey, downloaderSecretKey);
-        // Retrieve the whole data object we created previously
-        s3Service.deleteObject(report.getS3ReportBucket(), report.getReportFileName());
-    }
-
-    @DataProvider(name = "PostReportByType")
-    public Object[][] makePostReportByType() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        for ( final GATKRunReport.PhoneHomeOption et : GATKRunReport.PhoneHomeOption.values() ) {
-            tests.add(new Object[]{et});
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = ! DEBUG, dataProvider = "PostReportByType", timeOut = S3_PUT_TIMEOUT_IN_MILLISECONDS_FOR_TESTING * 2)
-    public void testPostReportByType(final GATKRunReport.PhoneHomeOption type) {
-        final GATKRunReport report = new GATKRunReport(walker, exception, engine, GATKRunReport.PhoneHomeOption.AWS, S3_PUT_TIMEOUT_IN_MILLISECONDS_FOR_TESTING);
-        Assert.assertFalse(report.exceptionOccurredDuringPost(), "An exception occurred during posting the report");
-        final boolean succeeded = report.postReport(type);
-
-        if ( type == GATKRunReport.PhoneHomeOption.NO_ET )
-            Assert.assertFalse(succeeded, "NO_ET option shouldn't write a report");
-        else {
-            Assert.assertTrue(succeeded, "Any non NO_ET option should succeed in writing a report");
-
-            if ( type == GATKRunReport.PhoneHomeOption.STDOUT ) {
-                // nothing to do
-            } else {
-                // must have gone to AWS
-                try {
-                    Assert.assertTrue(report.wentToAWS(), "The report should have gone to AWS but the report says it wasn't");
-                    deleteFromS3(report);
-                } catch ( Exception e ) {
-                    Assert.fail("Failed delete GATK report " + report.getReportFileName() + " with exception " + e);
-                }
-            }
-        }
-    }
-
-    public interface S3Op {
-        public void apply() throws ServiceException;
-    }
-
-    // Will fail with timeout if AWS time out isn't working
-    // Will fail with exception if AWS doesn't protect itself from errors
-    @Test(timeOut = S3_PUT_TIMEOUT_IN_MILLISECONDS_FOR_TESTING * 2)
-    public void testAWSPublicKeyHasAccessControls() throws Exception {
-        final GATKRunReport report = new GATKRunReport(walker, exception, engine, GATKRunReport.PhoneHomeOption.AWS, S3_PUT_TIMEOUT_IN_MILLISECONDS_FOR_TESTING);
-        report.sendAWSToTestBucket();
-        final S3Object s3Object = report.postReportToAWSS3();
-        Assert.assertNotNull(s3Object, "Upload to AWS failed, s3Object was null. error was " + report.formatError());
-
-        // create a service with the public key, and make sure it cannot list or delete
-        final S3Service s3Service = GATKRunReport.initializeAWSService(GATKRunReport.getAWSUploadAccessKey(), GATKRunReport.getAWSUploadSecretKey());
-        assertOperationNotAllowed("listAllBuckets", new S3Op() {
-            @Override
-            public void apply() throws S3ServiceException {
-                s3Service.listAllBuckets();
-            }
-        });
-        assertOperationNotAllowed("listBucket", new S3Op() {
-            @Override
-            public void apply() throws S3ServiceException { s3Service.listObjects(report.getS3ReportBucket()); }
-        });
-        assertOperationNotAllowed("createBucket", new S3Op() {
-            @Override
-            public void apply() throws S3ServiceException { s3Service.createBucket("ShouldNotCreate"); }
-        });
-        assertOperationNotAllowed("deleteObject", new S3Op() {
-            @Override
-            public void apply() throws ServiceException { s3Service.deleteObject(report.getS3ReportBucket(), report.getReportFileName()); }
-        });
-    }
-
-    private void assertOperationNotAllowed(final String name, final S3Op op) {
-        try {
-            op.apply();
-            // only gets here if the operation was successful
-            Assert.fail("Operation " + name + " ran successfully but we expected to it fail");
-        } catch ( ServiceException e ) {
-            Assert.assertEquals(e.getErrorCode(), "AccessDenied");
-        }
-    }
-
-    class RunReportDummyActiveRegionWalker extends ActiveRegionWalker<Integer, Integer> {
-        @Override
-        public ActivityProfileState isActive(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
-            return new ActivityProfileState(ref.getLocus(), 0.0);
-        }
-
-        @Override
-        public Integer map(ActiveRegion activeRegion, RefMetaDataTracker metaDataTracker) {
-            return 0;
-        }
-
-        @Override
-        public Integer reduceInit() {
-            return 0;
-        }
-
-        @Override
-        public Integer reduce(Integer value, Integer sum) {
-            return 0;
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/refdata/RefMetaDataTrackerUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/refdata/RefMetaDataTrackerUnitTest.java
deleted file mode 100644
index f25ab8d..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/refdata/RefMetaDataTrackerUnitTest.java
+++ /dev/null
@@ -1,290 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.refdata;
-
-import htsjdk.samtools.SAMFileHeader;
-import org.apache.log4j.Logger;
-import htsjdk.tribble.Feature;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.commandline.RodBinding;
-import org.broadinstitute.gatk.utils.commandline.Tags;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.utils.codecs.table.TableFeature;
-import org.broadinstitute.gatk.engine.refdata.utils.GATKFeature;
-import org.broadinstitute.gatk.engine.refdata.utils.RODRecordList;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import htsjdk.variant.variantcontext.Allele;
-import htsjdk.variant.variantcontext.VariantContext;
-import htsjdk.variant.variantcontext.VariantContextBuilder;
-import org.testng.Assert;
-import org.testng.annotations.*;
-import java.util.*;
-import java.util.List;
-
-public class RefMetaDataTrackerUnitTest {
-    final protected static Logger logger = Logger.getLogger(RefMetaDataTrackerUnitTest.class);
-    private static SAMFileHeader header;
-    private ReferenceContext context;
-    private GenomeLocParser genomeLocParser;
-    private GenomeLoc locus;
-    private final static int START_POS = 10;
-    Allele A,C,G,T;
-    VariantContext AC_SNP, AG_SNP, AT_SNP;
-    TableFeature span10_10, span1_20, span10_20;
-
-    @BeforeClass
-    public void beforeClass() {
-        header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 100);
-        genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
-        locus = genomeLocParser.createGenomeLoc("chr1", START_POS, START_POS);
-        context = new ReferenceContext(genomeLocParser, locus, (byte)'A');
-        A = Allele.create("A", true);
-        C = Allele.create("C");
-        G = Allele.create("G");
-        T = Allele.create("T");
-        AC_SNP = new VariantContextBuilder("x", "chr1", START_POS, START_POS, Arrays.asList(A, C)).make();
-        AG_SNP = new VariantContextBuilder("x", "chr1", START_POS, START_POS, Arrays.asList(A, G)).make();
-        AT_SNP = new VariantContextBuilder("x", "chr1", START_POS, START_POS, Arrays.asList(A, T)).make();
-        span10_10 = makeSpan(10, 10);
-        span1_20 = makeSpan(1, 20);
-        span10_20 = makeSpan(10, 20);
-    }
-
-    @BeforeMethod
-    public void reset() {
-        RodBinding.resetNameCounter();
-    }
-
-    private class MyTest extends BaseTest.TestDataProvider {
-        public RODRecordList AValues, BValues;
-
-        private MyTest(Class c, final List<? extends Feature> AValues, final List<? extends Feature> BValues) {
-            super(c);
-            this.AValues = AValues == null ? null : makeRODRecord("A", AValues);
-            this.BValues = BValues == null ? null : makeRODRecord("B", BValues);
-        }
-
-        private MyTest(final List<? extends Feature> AValues, final List<? extends Feature> BValues) {
-            super(MyTest.class);
-            this.AValues = AValues == null ? null : makeRODRecord("A", AValues);
-            this.BValues = BValues == null ? null : makeRODRecord("B", BValues);
-        }
-
-        @Override
-        public String toString() {
-            return String.format("A=%s, B=%s", AValues, BValues);
-        }
-
-        private final RODRecordList makeRODRecord(String name, List<? extends Feature> features) {
-            List<GATKFeature> x = new ArrayList<GATKFeature>();
-            for ( Feature f : features )
-                x.add(new GATKFeature.TribbleGATKFeature(genomeLocParser, f, name));
-            return new RODRecordListImpl(name, x, locus);
-        }
-
-        public List<GATKFeature> expected(String name) {
-            if ( name.equals("A+B") ) return allValues();
-            if ( name.equals("A") ) return expectedAValues();
-            if ( name.equals("B") ) return expectedBValues();
-            throw new RuntimeException("FAIL");
-        }
-
-        public List<GATKFeature> allValues() {
-            List<GATKFeature> x = new ArrayList<GATKFeature>();
-            x.addAll(expectedAValues());
-            x.addAll(expectedBValues());
-            return x;
-        }
-
-        public List<GATKFeature> expectedAValues() {
-            return AValues == null ? Collections.<GATKFeature>emptyList() : AValues;
-        }
-
-        public List<GATKFeature> expectedBValues() {
-            return BValues == null ? Collections.<GATKFeature>emptyList() : BValues;
-        }
-
-        public RefMetaDataTracker makeTracker() {
-            List<RODRecordList> x = new ArrayList<RODRecordList>();
-            if ( AValues != null ) x.add(AValues);
-            if ( BValues != null ) x.add(BValues);
-            return new RefMetaDataTracker(x);
-        }
-
-        public int nBoundTracks() {
-            int n = 0;
-            if ( AValues != null ) n++;
-            if ( BValues != null ) n++;
-            return n;
-        }
-    }
-
-    private final TableFeature makeSpan(int start, int stop) {
-        return new TableFeature(genomeLocParser.createGenomeLoc("chr1", start, stop),
-                Collections.<String>emptyList(), Collections.<String>emptyList());
-    }
-
-    @DataProvider(name = "tests")
-    public Object[][] createTests() {
-        new MyTest(null, null);
-        new MyTest(Arrays.asList(AC_SNP), null);
-        new MyTest(Arrays.asList(AC_SNP, AT_SNP), null);
-        new MyTest(Arrays.asList(AC_SNP), Arrays.asList(AG_SNP));
-        new MyTest(Arrays.asList(AC_SNP, AT_SNP), Arrays.asList(AG_SNP));
-        new MyTest(Arrays.asList(AC_SNP, AT_SNP), Arrays.asList(span10_10));
-        new MyTest(Arrays.asList(AC_SNP, AT_SNP), Arrays.asList(span10_10, span10_20));
-        new MyTest(Arrays.asList(AC_SNP, AT_SNP), Arrays.asList(span10_10, span10_20, span1_20));
-
-        // for requires starts
-        new MyTest(Arrays.asList(span1_20), null);
-        new MyTest(Arrays.asList(span10_10, span10_20), null);
-        new MyTest(Arrays.asList(span10_10, span10_20, span1_20), null);
-
-        return MyTest.getTests(MyTest.class);
-    }
-
-    @Test(enabled = true, dataProvider = "tests")
-    public void testRawBindings(MyTest test) {
-        logger.warn("Testing " + test + " for number of bound tracks");
-        RefMetaDataTracker tracker = test.makeTracker();
-        Assert.assertEquals(tracker.getNTracksWithBoundFeatures(), test.nBoundTracks());
-
-        testSimpleBindings("A", tracker, test.AValues);
-        testSimpleBindings("B", tracker, test.BValues);
-    }
-
-    private <T> void testSimpleBindings(String name, RefMetaDataTracker tracker, RODRecordList expected) {
-        List<Feature> asValues = tracker.getValues(Feature.class, name);
-
-        Assert.assertEquals(tracker.hasValues(name), expected != null);
-        Assert.assertEquals(asValues.size(), expected == null ? 0 : expected.size());
-
-        if ( expected != null ) {
-            for ( GATKFeature e : expected ) {
-                boolean foundValue = false;
-                for ( Feature f : asValues ) {
-                    if ( e.getUnderlyingObject() == f ) foundValue = true;
-                }
-                Assert.assertTrue(foundValue, "Never found expected value of " + e.getUnderlyingObject() + " bound to " + name + " in " + tracker);
-            }
-        }
-    }
-
-    @Test(enabled = true, dataProvider = "tests")
-    public void testGettersAsString(MyTest test) {
-        logger.warn("Testing " + test + " for get() methods");
-        RefMetaDataTracker tracker = test.makeTracker();
-
-        for ( String name : Arrays.asList("A+B", "A", "B") ) {
-            List<Feature> v1 = name.equals("A+B") ? tracker.getValues(Feature.class) : tracker.getValues(Feature.class, name);
-            testGetter(name, v1, test.expected(name), true, tracker);
-
-            List<Feature> v2 = name.equals("A+B") ? tracker.getValues(Feature.class, locus) : tracker.getValues(Feature.class, name, locus);
-            testGetter(name, v2, startingHere(test.expected(name)), true, tracker);
-
-            Feature v3 = name.equals("A+B") ? tracker.getFirstValue(Feature.class) : tracker.getFirstValue(Feature.class, name);
-            testGetter(name, Arrays.asList(v3), test.expected(name), false, tracker);
-
-            Feature v4 = name.equals("A+B") ? tracker.getFirstValue(Feature.class, locus) : tracker.getFirstValue(Feature.class, name, locus);
-            testGetter(name, Arrays.asList(v4), startingHere(test.expected(name)), false, tracker);
-        }
-    }
-
-    @Test(enabled = true, dataProvider = "tests")
-    public void testGettersAsRodBindings(MyTest test) {
-        logger.warn("Testing " + test + " for get() methods as RodBindings");
-        RefMetaDataTracker tracker = test.makeTracker();
-
-        for ( String nameAsString : Arrays.asList("A", "B") ) {
-            RodBinding<Feature> binding = new RodBinding<Feature>(Feature.class, nameAsString, "none", "vcf", new Tags());
-            List<Feature> v1 = tracker.getValues(binding);
-            testGetter(nameAsString, v1, test.expected(nameAsString), true, tracker);
-
-            List<Feature> v2 = tracker.getValues(binding, locus);
-            testGetter(nameAsString, v2, startingHere(test.expected(nameAsString)), true, tracker);
-
-            Feature v3 = tracker.getFirstValue(binding);
-            testGetter(nameAsString, Arrays.asList(v3), test.expected(nameAsString), false, tracker);
-
-            Feature v4 = tracker.getFirstValue(binding, locus);
-            testGetter(nameAsString, Arrays.asList(v4), startingHere(test.expected(nameAsString)), false, tracker);
-        }
-    }
-
-    @Test(enabled = true, dataProvider = "tests")
-    public void testGettersAsListOfRodBindings(MyTest test) {
-        logger.warn("Testing " + test + " for get() methods for List<RodBindings>");
-        RefMetaDataTracker tracker = test.makeTracker();
-
-        String nameAsString = "A+B";
-        RodBinding<Feature> A = new RodBinding<Feature>(Feature.class, "A", "none", "vcf", new Tags());
-        RodBinding<Feature> B = new RodBinding<Feature>(Feature.class, "B", "none", "vcf", new Tags());
-        List<RodBinding<Feature>> binding = Arrays.asList(A, B);
-
-        List<Feature> v1 = tracker.getValues(binding);
-        testGetter(nameAsString, v1, test.expected(nameAsString), true, tracker);
-
-        List<Feature> v2 = tracker.getValues(binding, locus);
-        testGetter(nameAsString, v2, startingHere(test.expected(nameAsString)), true, tracker);
-
-        Feature v3 = tracker.getFirstValue(binding);
-        testGetter(nameAsString, Arrays.asList(v3), test.expected(nameAsString), false, tracker);
-
-        Feature v4 = tracker.getFirstValue(binding, locus);
-        testGetter(nameAsString, Arrays.asList(v4), startingHere(test.expected(nameAsString)), false, tracker);
-    }
-
-    private List<GATKFeature> startingHere(List<GATKFeature> l) {
-        List<GATKFeature> x = new ArrayList<GATKFeature>();
-        for ( GATKFeature f : l ) if ( f.getStart() == locus.getStart() ) x.add(f);
-        return x;
-    }
-
-    private void testGetter(String name, List<Feature> got, List<GATKFeature> expected, boolean requireExact, RefMetaDataTracker tracker) {
-        if ( got.size() == 1 && got.get(0) == null )
-            got = Collections.emptyList();
-
-        if ( requireExact )
-            Assert.assertEquals(got.size(), expected.size());
-
-        boolean foundAny = false;
-        for ( GATKFeature e : expected ) {
-            boolean found1 = false;
-            for ( Feature got1 : got ) {
-                if ( e.getUnderlyingObject() == got1 )
-                    found1 = true;
-            }
-            if ( requireExact )
-                Assert.assertTrue(found1, "Never found expected GATKFeature " + e + " bound to " + name + " in " + tracker);
-            foundAny = found1 || foundAny;
-        }
-
-        if ( ! requireExact && ! expected.isEmpty() )
-            Assert.assertTrue(foundAny, "Never found any got values matching one of the expected values bound to " + name + " in " + tracker);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/refdata/tracks/FeatureManagerUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/refdata/tracks/FeatureManagerUnitTest.java
deleted file mode 100644
index ec3b470..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/refdata/tracks/FeatureManagerUnitTest.java
+++ /dev/null
@@ -1,163 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.refdata.tracks;
-
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import htsjdk.tribble.Feature;
-import htsjdk.tribble.FeatureCodec;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.codecs.table.BedTableCodec;
-import org.broadinstitute.gatk.utils.codecs.table.TableFeature;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import htsjdk.variant.vcf.VCF3Codec;
-import htsjdk.variant.vcf.VCFCodec;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
-import htsjdk.variant.variantcontext.VariantContext;
-import org.testng.Assert;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.*;
-import java.util.*;
-
-
-/**
- * @author depristo
- *
- * UnitTests for RMD FeatureManager
- */
-public class FeatureManagerUnitTest extends BaseTest {
-    private static final File RANDOM_FILE = new File(publicTestDir+ "exampleGATKReport.eval");
-    private static final File VCF3_FILE = new File(privateTestDir + "vcf3.vcf");
-    private static final File VCF4_FILE = new File(privateTestDir + "HiSeq.10000.vcf");
-    private static final File VCF4_FILE_GZ = new File(privateTestDir + "HiSeq.10000.vcf.gz");
-    private static final File VCF4_FILE_BGZIP = new File(privateTestDir + "HiSeq.10000.bgzip.vcf.gz");
-
-    private FeatureManager manager;
-    private GenomeLocParser genomeLocParser;
-
-    @BeforeMethod
-    public void setup() {
-        File referenceFile = new File(b36KGReference);
-        try {
-            IndexedFastaSequenceFile seq = new CachingIndexedFastaSequenceFile(referenceFile);
-            genomeLocParser = new GenomeLocParser(seq);
-            manager = new FeatureManager();
-        }
-        catch(FileNotFoundException ex) {
-            throw new UserException.CouldNotReadInputFile(referenceFile,ex);
-        }
-    }
-
-    @Test
-    public void testManagerCreation() {
-        Assert.assertTrue(manager.getFeatureDescriptors().size() > 0);
-    }
-
-    private class FMTest extends BaseTest.TestDataProvider {
-        public Class codec;
-        public Class<? extends Feature> feature;
-        public String name;
-        public File associatedFile;
-
-        private FMTest(final Class feature, final Class codec, final String name, final File file) {
-            super(FMTest.class);
-            this.codec = codec;
-            this.feature = feature;
-            this.name = name;
-            this.associatedFile = file;
-        }
-
-        public void assertExpected(FeatureManager.FeatureDescriptor featureDescriptor) {
-            Assert.assertEquals(featureDescriptor.getCodecClass(), codec);
-            Assert.assertEquals(featureDescriptor.getFeatureClass(), feature);
-            Assert.assertEquals(featureDescriptor.getName().toLowerCase(), name.toLowerCase());
-        }
-
-        public String toString() {
-            return String.format("FMTest name=%s codec=%s feature=%s file=%s",
-                    name, codec.getSimpleName(), feature.getSimpleName(), associatedFile);
-        }
-    }
-
-    @DataProvider(name = "tests")
-    public Object[][] createTests() {
-        new FMTest(VariantContext.class, VCF3Codec.class, "VCF3", VCF3_FILE);
-        new FMTest(VariantContext.class, VCFCodec.class, "VCF", VCF4_FILE);
-        new FMTest(VariantContext.class, VCFCodec.class, "VCF", VCF4_FILE_GZ);
-        new FMTest(VariantContext.class, VCFCodec.class, "VCF", VCF4_FILE_BGZIP);
-        new FMTest(TableFeature.class, BedTableCodec.class, "bedtable", null);
-        return FMTest.getTests(FMTest.class);
-    }
-
-    @Test(dataProvider = "tests")
-    public void testGetByFile(FMTest params) {
-        if ( params.associatedFile != null ) {
-            FeatureManager.FeatureDescriptor byFile = manager.getByFiletype(params.associatedFile);
-            Assert.assertNotNull(byFile, "Couldn't find any type associated with file " + params.associatedFile);
-            params.assertExpected(byFile);
-        }
-    }
-
-    @Test
-    public void testGetByFileNoMatch() {
-        FeatureManager.FeatureDescriptor byFile = manager.getByFiletype(RANDOM_FILE);
-        Assert.assertNull(byFile, "Found type " + byFile + " associated with RANDOM, non-Tribble file " + RANDOM_FILE);
-    }
-
-    @Test(dataProvider = "tests")
-    public void testGetters(FMTest params) {
-        params.assertExpected(manager.getByCodec(params.codec));
-        params.assertExpected(manager.getByName(params.name));
-        params.assertExpected(manager.getByName(params.name.toLowerCase()));
-        params.assertExpected(manager.getByName(params.name.toUpperCase()));
-
-        Collection<FeatureManager.FeatureDescriptor> descriptors = manager.getByFeature(params.feature);
-        Assert.assertTrue(descriptors.size() > 0, "Look up by FeatureClass failed");
-    }
-
-    @Test
-    public void testUserFriendlyList() {
-        Assert.assertTrue(manager.userFriendlyListOfAvailableFeatures().length() > 0, "Expected at least one codec to be listed");
-        Assert.assertTrue(manager.userFriendlyListOfAvailableFeatures().split(",").length > 0, "Expected at least two codecs, but only saw one");
-    }
-
-    @Test
-    public void testCodecCreation() {
-        FeatureManager.FeatureDescriptor descriptor = manager.getByName("vcf");
-        Assert.assertNotNull(descriptor, "Couldn't find VCF feature descriptor!");
-
-        FeatureCodec c = manager.createCodec(descriptor, "foo", genomeLocParser, null);
-        Assert.assertNotNull(c, "Couldn't create codec");
-        Assert.assertEquals(c.getClass(), descriptor.getCodecClass());
-        Assert.assertEquals(c.getFeatureType(), descriptor.getFeatureClass());
-    }
-
-}
-
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/refdata/tracks/RMDTrackBuilderUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/refdata/tracks/RMDTrackBuilderUnitTest.java
deleted file mode 100644
index a64773a..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/refdata/tracks/RMDTrackBuilderUnitTest.java
+++ /dev/null
@@ -1,190 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.refdata.tracks;
-
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import htsjdk.tribble.Tribble;
-import htsjdk.tribble.index.Index;
-import htsjdk.tribble.util.LittleEndianOutputStream;
-import htsjdk.variant.vcf.VCFCodec;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.testng.Assert;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
-
-import org.testng.annotations.BeforeMethod;
-
-import org.testng.annotations.Test;
-
-import java.io.*;
-import java.nio.channels.FileChannel;
-
-
-/**
- * @author aaron
- *         <p/>
- *         Class RMDTrackBuilderUnitTest
- *         <p/>
- *         Testing out the builder for tribble Tracks
- */
-public class RMDTrackBuilderUnitTest extends BaseTest {
-    private RMDTrackBuilder builder;
-    private IndexedFastaSequenceFile seq;
-    private GenomeLocParser genomeLocParser;
-
-    @BeforeMethod
-    public void setup() {
-        File referenceFile = new File(b37KGReference);
-        try {
-            seq = new CachingIndexedFastaSequenceFile(referenceFile);
-        }
-        catch(FileNotFoundException ex) {
-            throw new UserException.CouldNotReadInputFile(referenceFile,ex);
-        }
-        genomeLocParser = new GenomeLocParser(seq);
-
-        // We have to disable auto-index creation/locking in the RMDTrackBuilder for tests,
-        // as the lock acquisition calls were intermittently hanging on our farm. This unfortunately
-        // means that we can't include tests for the auto-index creation feature.
-        builder = new RMDTrackBuilder(seq.getSequenceDictionary(),genomeLocParser,null,true,null);
-    }
-
-    @Test
-    public void testBuilder() {
-        Assert.assertTrue(builder.getFeatureManager().getFeatureDescriptors().size() > 0);
-    }
-
-    @Test
-    public void testDisableAutoIndexGeneration() throws IOException {
-        final File unindexedVCF = new File(privateTestDir + "unindexed.vcf");
-        final File unindexedVCFIndex = Tribble.indexFile(unindexedVCF);
-
-        Index index = builder.loadIndex(unindexedVCF, new VCFCodec());
-
-        Assert.assertFalse(unindexedVCFIndex.exists());
-        Assert.assertNotNull(index);
-    }
-
-    @Test
-    public void testLoadOnDiskIndex() {
-        final File originalVCF = new File(privateTestDir + "vcf4.1.example.vcf");
-        final File tempVCFWithCorrectIndex = createTempVCFFileAndIndex(originalVCF, false);
-        final File tempVCFIndexFile = Tribble.indexFile(tempVCFWithCorrectIndex);
-
-        final Index index = builder.loadFromDisk(tempVCFWithCorrectIndex, tempVCFIndexFile);
-
-        Assert.assertNotNull(index);
-        Assert.assertTrue(tempVCFIndexFile.exists());
-
-        final Index inMemoryIndex = builder.createIndexInMemory(tempVCFWithCorrectIndex, new VCFCodec());
-        Assert.assertTrue(index.equalsIgnoreProperties(inMemoryIndex));
-    }
-
-    @Test
-    public void testLoadOnDiskOutdatedIndex() {
-        final File originalVCF = new File(privateTestDir + "vcf4.1.example.vcf");
-        final File tempVCFWithOutdatedIndex = createTempVCFFileAndIndex(originalVCF, true);
-        final File tempVCFIndexFile = Tribble.indexFile(tempVCFWithOutdatedIndex);
-
-        final Index index = builder.loadFromDisk(tempVCFWithOutdatedIndex, tempVCFIndexFile);
-
-        // loadFromDisk() should return null to indicate that the index is outdated and should not be used,
-        // but should not delete the index since our builder has disableAutoIndexCreation set to true
-        Assert.assertNull(index);
-        Assert.assertTrue(tempVCFIndexFile.exists());
-    }
-
-    /**
-     * Create a temporary vcf file and an associated index file, which may be set to be out-of-date
-     * relative to the vcf
-     *
-     * @param vcfFile the vcf file
-     * @param createOutOfDateIndex if true, ensure that the temporary vcf file is modified after the index
-     * @return a file pointing to the new tmp location, with accompanying index
-     */
-    private File createTempVCFFileAndIndex( final File vcfFile, final boolean createOutOfDateIndex ) {
-        try {
-            final File tmpFile = createTempFile("RMDTrackBuilderUnitTest", "");
-            final File tmpIndex = Tribble.indexFile(tmpFile);
-            tmpIndex.deleteOnExit();
-
-            copyFile(vcfFile, tmpFile);
-            final Index inMemoryIndex = builder.createIndexInMemory(tmpFile, new VCFCodec());
-            final LittleEndianOutputStream indexOutputStream = new LittleEndianOutputStream(new FileOutputStream(tmpIndex));
-
-            // If requested, modify the tribble file after the index. Otherwise, modify the index last.
-            if ( createOutOfDateIndex ) {
-                inMemoryIndex.write(indexOutputStream);
-                indexOutputStream.close();
-                Thread.sleep(2000);
-                copyFile(vcfFile, tmpFile);
-            }
-            else {
-                copyFile(vcfFile, tmpFile);
-                Thread.sleep(2000);
-                inMemoryIndex.write(indexOutputStream);
-                indexOutputStream.close();
-            }
-
-            return tmpFile;
-        } catch (IOException e) {
-            Assert.fail("Unable to create temperary file");
-        } catch (InterruptedException e) {
-            Assert.fail("Somehow our thread got interrupted");
-        }
-        return null;
-    }
-
-    /**
-     * copy a file, from http://www.exampledepot.com/egs/java.nio/File2File.html
-     *
-     * @param srFile the source file
-     * @param dtFile the destination file
-     */
-    private static void copyFile(File srFile, File dtFile) {
-        try {
-            // Create channel on the source
-            FileChannel srcChannel = new FileInputStream(srFile).getChannel();
-
-            // Create channel on the destination
-            FileChannel dstChannel = new FileOutputStream(dtFile).getChannel();
-
-            // Copy file contents from source to destination
-            dstChannel.transferFrom(srcChannel, 0, srcChannel.size());
-
-            // Close the channels
-            srcChannel.close();
-            dstChannel.close();
-        } catch (IOException e) {
-            e.printStackTrace();
-            Assert.fail("Unable to process copy " + e.getMessage());
-        }
-    }
-
-}
-
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/refdata/utils/CheckableCloseableTribbleIterator.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/refdata/utils/CheckableCloseableTribbleIterator.java
deleted file mode 100644
index e77c079..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/refdata/utils/CheckableCloseableTribbleIterator.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.refdata.utils;
-
-import htsjdk.tribble.CloseableTribbleIterator;
-import htsjdk.tribble.Feature;
-
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * Adapter to allow checking if the wrapped iterator was closed.
- * Creating an CCTI also adds it to the list returned from getThreadIterators().
- * @param <T> feature
- */
-public class CheckableCloseableTribbleIterator<T extends Feature> implements CloseableTribbleIterator<T> {
-    private final CloseableTribbleIterator<T> iterator;
-    private boolean closed = false;
-
-    private static ThreadLocal<List<CheckableCloseableTribbleIterator<? extends Feature>>> threadIterators =
-            new ThreadLocal<List<CheckableCloseableTribbleIterator<? extends Feature>>>() {
-                @Override
-                protected List<CheckableCloseableTribbleIterator<? extends Feature>> initialValue() {
-                    return new ArrayList<CheckableCloseableTribbleIterator<? extends Feature>>();
-                }
-            };
-
-    public CheckableCloseableTribbleIterator(CloseableTribbleIterator<T> iterator) {
-        this.iterator = iterator;
-        threadIterators.get().add(this);
-    }
-
-    /**
-     * Returns the list of iterators created on this thread since the last time clearCreatedIterators() was called.
-     * @return the list of iterators created on this thread since the last time clearCreatedIterators() was called.
-     */
-    public static List<CheckableCloseableTribbleIterator<? extends Feature>> getThreadIterators() {
-        return threadIterators.get();
-    }
-
-    /**
-     * Clears the tracked list of iterators created on this thread.
-     */
-    public static void clearThreadIterators() {
-        threadIterators.get().clear();
-    }
-
-    @Override
-    public void close() {
-        iterator.close();
-        this.closed = true;
-    }
-
-    /**
-     * Returns true if this iterator was properly closed.
-     * @return true if this iterator was properly closed.
-     */
-    public boolean isClosed() {
-        return closed;
-    }
-
-    @Override public Iterator<T> iterator() { return this; }
-    @Override public boolean hasNext() { return iterator.hasNext(); }
-    @Override public T next() { return iterator.next(); }
-    @Override public void remove() { iterator.remove(); }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/refdata/utils/FeatureToGATKFeatureIteratorUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/refdata/utils/FeatureToGATKFeatureIteratorUnitTest.java
deleted file mode 100644
index d95c320..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/refdata/utils/FeatureToGATKFeatureIteratorUnitTest.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.refdata.utils;
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import htsjdk.tribble.Feature;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import htsjdk.variant.vcf.VCFCodec;
-import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.IOException;
-
-public class FeatureToGATKFeatureIteratorUnitTest extends BaseTest {
-    @Test
-    @SuppressWarnings("unchecked")
-    public void testCloseFilePointers() throws IOException {
-        final String chr = "20";
-        IndexedFastaSequenceFile seq = new CachingIndexedFastaSequenceFile(new File(BaseTest.hg19Reference));
-        GenomeLocParser parser = new GenomeLocParser(seq);
-        File file = new File(privateTestDir + "NA12878.hg19.example1.vcf");
-        VCFCodec codec = new VCFCodec();
-        TestFeatureReader reader = new TestFeatureReader(file.getAbsolutePath(), codec);
-        CheckableCloseableTribbleIterator<Feature> tribbleIterator = reader.query(chr, 1, 100000);
-        FeatureToGATKFeatureIterator gatkIterator = new FeatureToGATKFeatureIterator(parser, tribbleIterator, "test");
-        Assert.assertTrue(gatkIterator.hasNext(), "GATK feature iterator does not have a next value.");
-        GenomeLoc gatkLocation = gatkIterator.next().getLocation();
-        Assert.assertEquals(gatkLocation.getContig(), chr, "Instead of chr 20 rod iterator was at location " + gatkLocation);
-        Assert.assertFalse(tribbleIterator.isClosed(), "Tribble iterator is closed but should be still open.");
-        gatkIterator.close();
-        Assert.assertTrue(tribbleIterator.isClosed(), "Tribble iterator is open but should be now closed.");
-        reader.close();
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/refdata/utils/FlashBackIteratorUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/refdata/utils/FlashBackIteratorUnitTest.java
deleted file mode 100644
index 7aa07ef..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/refdata/utils/FlashBackIteratorUnitTest.java
+++ /dev/null
@@ -1,364 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.refdata.utils;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMSequenceDictionary;
-import org.testng.Assert;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.engine.refdata.ReferenceOrderedDatum;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-
-import org.testng.annotations.BeforeMethod;
-
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.util.AbstractList;
-import java.util.ArrayList;
-import java.util.List;
-
-
-/**
- * @author aaron
- *         <p/>
- *         Class FlashBackIteratorUnitTest
- *         <p/>
- *         just like a greatful dead show...this will be prone to flashbacks
- */
-public class FlashBackIteratorUnitTest extends BaseTest {
-    private SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(NUMBER_OF_CHROMOSOMES, STARTING_CHROMOSOME, CHROMOSOME_SIZE);
-    private static final int NUMBER_OF_CHROMOSOMES = 5;
-    private static final int STARTING_CHROMOSOME = 1;
-    private static final int CHROMOSOME_SIZE = 1000;
-
-    private String firstContig;
-    private GenomeLocParser genomeLocParser;
-
-    @BeforeMethod
-    public void setup() {
-        genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
-        firstContig = header.getSequenceDictionary().getSequence(0).getSequenceName();
-    }
-
-    @Test
-    public void testBasicIteration() {
-        GenomeLoc loc = genomeLocParser.createGenomeLoc(firstContig, 0, 0);
-        FlashBackIterator iter = new FlashBackIterator(new FakeSeekableRODIterator(genomeLocParser,loc));
-        GenomeLoc lastLocation = null;
-        for (int x = 0; x < 10; x++) {
-            iter.next();
-            GenomeLoc cur = iter.position();
-            if (lastLocation != null) {
-                Assert.assertTrue(lastLocation.isBefore(cur));
-            }
-            lastLocation = cur;
-        }
-    }
-
-    @Test
-    public void testBasicIterationThenFlashBack() {
-        GenomeLoc loc = genomeLocParser.createGenomeLoc(firstContig, 0, 0);
-        FlashBackIterator iter = new FlashBackIterator(new FakeSeekableRODIterator(genomeLocParser,loc));
-        GenomeLoc lastLocation = null;
-        for (int x = 0; x < 10; x++) {
-            iter.next();
-            GenomeLoc cur = iter.position();
-            if (lastLocation != null) {
-                Assert.assertTrue(lastLocation.isBefore(cur));
-            }
-            lastLocation = cur;
-        }
-        iter.flashBackTo(genomeLocParser.createGenomeLoc(firstContig, 2));
-    }
-
-    @Test
-    public void testBasicIterationThenFlashBackThenIterate() {
-        GenomeLoc loc = genomeLocParser.createGenomeLoc(firstContig, 0, 0);
-        FlashBackIterator iter = new FlashBackIterator(new FakeSeekableRODIterator(genomeLocParser,loc));
-        GenomeLoc lastLocation = null;
-        for (int x = 0; x < 10; x++) {
-            iter.next();
-            GenomeLoc cur = iter.position();
-            if (lastLocation != null) {
-                Assert.assertTrue(lastLocation.isBefore(cur));
-            }
-            lastLocation = cur;
-        }
-        iter.flashBackTo(genomeLocParser.createGenomeLoc(firstContig, 1));
-        int count = 0;
-        while (iter.hasNext()) {
-            count++;
-            iter.next();
-        }
-        Assert.assertEquals(count, 10);
-    }
-
-
-    @Test
-    public void testFlashBackTruth() {
-        GenomeLoc loc = genomeLocParser.createGenomeLoc(firstContig, 0, 0);
-        LocationAwareSeekableRODIterator backIter = new FakeSeekableRODIterator(genomeLocParser,loc);
-        // remove the first three records
-        backIter.next();
-        backIter.next();
-        backIter.next();
-        FlashBackIterator iter = new FlashBackIterator(backIter);
-        GenomeLoc lastLocation = null;
-        for (int x = 0; x < 10; x++) {
-            iter.next();
-            GenomeLoc cur = iter.position();
-            if (lastLocation != null) {
-                Assert.assertTrue(lastLocation.isBefore(cur));
-            }
-            lastLocation = cur;
-        }
-        Assert.assertTrue(iter.canFlashBackTo(genomeLocParser.createGenomeLoc(firstContig, 5)));
-        Assert.assertTrue(iter.canFlashBackTo(genomeLocParser.createGenomeLoc(firstContig, 15)));
-        Assert.assertTrue(!iter.canFlashBackTo(genomeLocParser.createGenomeLoc(firstContig, 2)));
-        Assert.assertTrue(!iter.canFlashBackTo(genomeLocParser.createGenomeLoc(firstContig, 1)));
-    }
-
-    @Test
-    public void testBasicIterationThenFlashBackHalfWayThenIterate() {
-        GenomeLoc loc = genomeLocParser.createGenomeLoc(firstContig, 0, 0);
-        FlashBackIterator iter = new FlashBackIterator(new FakeSeekableRODIterator(genomeLocParser,loc));
-        GenomeLoc lastLocation = null;
-        for (int x = 0; x < 10; x++) {
-            iter.next();
-            GenomeLoc cur = iter.position();
-            if (lastLocation != null) {
-                Assert.assertTrue(lastLocation.isBefore(cur));
-            }
-            lastLocation = cur;
-        }
-        iter.flashBackTo(genomeLocParser.createGenomeLoc(firstContig, 5));
-        int count = 0;
-        while (iter.hasNext()) {
-            count++;
-            iter.next();
-        }
-        Assert.assertEquals(count, 6); // chr1:5, 6, 7, 8, 9, and 10
-    }
-}
-
-
-class FakeSeekableRODIterator implements LocationAwareSeekableRODIterator {
-    private GenomeLocParser genomeLocParser;
-
-    // current location
-    private GenomeLoc location;
-    private FakeRODatum curROD;
-    private int recordCount = 10;
-
-    public FakeSeekableRODIterator(GenomeLocParser genomeLocParser,GenomeLoc startingLoc) {
-        this.genomeLocParser = genomeLocParser;
-        this.location = genomeLocParser.createGenomeLoc(startingLoc.getContig(), startingLoc.getStart() + 1, startingLoc.getStop() + 1);
-    }
-
-    /**
-     * Gets the header associated with the backing input stream.
-     * @return the ROD header.
-     */
-    @Override
-    public Object getHeader() {
-        return null;
-    }
-
-    /**
-     * Gets the sequence dictionary associated with the backing input stream.
-     * @return sequence dictionary from the ROD header.
-     */
-    @Override
-    public SAMSequenceDictionary getSequenceDictionary() {
-        return null;
-    }
-
-
-    @Override
-    public GenomeLoc peekNextLocation() {
-        System.err.println("Peek Next -> " + location);
-        return location;
-    }
-
-    @Override
-    public GenomeLoc position() {
-        return location;
-    }
-
-    @Override
-    public RODRecordList seekForward(GenomeLoc interval) {
-        this.location = interval;
-        return next();
-    }
-
-    @Override
-    public boolean hasNext() {
-        return (recordCount > 0);
-    }
-
-    @Override
-    public RODRecordList next() {
-        RODRecordList list = new FakeRODRecordList();
-        curROD = new FakeRODatum("STUPIDNAME", location);
-        location = genomeLocParser.createGenomeLoc(location.getContig(), location.getStart() + 1, location.getStop() + 1);
-        list.add(curROD);
-        recordCount--;
-        return list;
-    }
-
-    @Override
-    public void remove() {
-        throw new IllegalStateException("GRRR");
-    }
-
-    @Override
-    public void close() {
-        // nothing to do
-    }
-}
-
-
-/** for testing only */
-class FakeRODatum extends GATKFeature implements ReferenceOrderedDatum {
-
-    final GenomeLoc location;
-
-    public FakeRODatum(String name, GenomeLoc location) {
-        super(name);
-        this.location = location;
-    }
-
-    @Override
-    public String getName() {
-        return "false";
-    }
-
-    @Override
-    public boolean parseLine(Object header, String[] parts) throws IOException {
-        return false;
-    }
-
-    @Override
-    public String toSimpleString() {
-        return "";
-    }
-
-    @Override
-    public String repl() {
-        return "";
-    }
-
-    /**
-     * Used by the ROD system to determine how to split input lines
-     *
-     * @return Regex string delimiter separating fields
-     */
-    @Override
-    public String delimiterRegex() {
-        return "";
-    }
-
-    @Override
-    public GenomeLoc getLocation() {
-        return location;
-    }
-
-    @Override
-    public Object getUnderlyingObject() {
-        return this;
-    }
-
-    @Override
-    public int compareTo(ReferenceOrderedDatum that) {
-        return location.compareTo(that.getLocation());
-    }
-
-    /**
-     * Backdoor hook to read header, meta-data, etc. associated with the file.  Will be
-     * called by the ROD system before streaming starts
-     *
-     * @param source source data file on disk from which this rod stream will be pulled
-     *
-     * @return a header object that will be passed to parseLine command
-     */
-    @Override
-    public Object initialize(File source) throws FileNotFoundException {
-        return null;
-    }
-
-    @Override
-    public String getChr() {
-        return location.getContig();
-    }
-
-    @Override
-    public int getStart() {
-        return (int)location.getStart();
-    }
-
-    @Override
-    public int getEnd() {
-        return (int)location.getStop();
-    }
-}
-
-class FakeRODRecordList extends AbstractList<GATKFeature> implements RODRecordList {
-    private final List<GATKFeature> list = new ArrayList<GATKFeature>();
-
-    public boolean add(GATKFeature data) {
-        return list.add(data);
-    }
-
-    @Override
-    public GATKFeature get(int i) {
-        return list.get(i);
-    }
-
-    @Override
-    public int size() {
-        return list.size();
-    }
-
-    @Override
-    public GenomeLoc getLocation() {
-        return list.get(0).getLocation();
-    }
-
-    @Override
-    public String getName() {
-        return "test";
-    }
-
-    @Override
-    public int compareTo(RODRecordList rodRecordList) {
-        return this.list.get(0).getLocation().compareTo(rodRecordList.getLocation());
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/refdata/utils/TestFeatureReader.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/refdata/utils/TestFeatureReader.java
deleted file mode 100644
index 90b5e7a..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/refdata/utils/TestFeatureReader.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.refdata.utils;
-
-import htsjdk.tribble.Feature;
-import htsjdk.tribble.FeatureCodec;
-import htsjdk.tribble.TribbleIndexedFeatureReader;
-
-import java.io.IOException;
-
-/**
- * Feature reader with additional test utilities. The iterators can be checked to see if they are closed.
- */
-public class TestFeatureReader extends TribbleIndexedFeatureReader<Feature, Object> {
-    public TestFeatureReader(String featurePath, FeatureCodec codec) throws IOException {
-        super(featurePath, codec, true);
-    }
-
-    @Override
-    @SuppressWarnings("unchecked")
-    public CheckableCloseableTribbleIterator<Feature> iterator() throws IOException {
-        return new CheckableCloseableTribbleIterator<Feature>(super.iterator());
-    }
-
-    @Override
-    @SuppressWarnings("unchecked")
-    public CheckableCloseableTribbleIterator<Feature> query(String chr, int start, int end) throws IOException {
-        return new CheckableCloseableTribbleIterator<Feature>(super.query(chr, start, end));
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/refdata/utils/TestRMDTrackBuilder.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/refdata/utils/TestRMDTrackBuilder.java
deleted file mode 100644
index 17179f3..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/refdata/utils/TestRMDTrackBuilder.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.refdata.utils;
-
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.tribble.FeatureCodec;
-import htsjdk.tribble.Tribble;
-import htsjdk.tribble.index.Index;
-import org.broadinstitute.gatk.engine.refdata.tracks.FeatureManager;
-import org.broadinstitute.gatk.engine.refdata.tracks.IndexDictionaryUtils;
-import org.broadinstitute.gatk.engine.refdata.tracks.RMDTrack;
-import org.broadinstitute.gatk.engine.refdata.tracks.RMDTrackBuilder;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-
-import java.io.File;
-import java.io.IOException;
-
-/**
- * Extension of RMDTrackBuilder that creates TestFeatureReader's which in turn create CheckableCloseableTribbleIterator's.
- */
-public class TestRMDTrackBuilder extends RMDTrackBuilder {
-    private GenomeLocParser genomeLocParser;
-
-    public TestRMDTrackBuilder(SAMSequenceDictionary dict, GenomeLocParser genomeLocParser) {
-        // disable auto-index creation/locking in the RMDTrackBuilder for tests
-        super(dict, genomeLocParser, null, true, null);
-        this.genomeLocParser = genomeLocParser;
-    }
-
-    @Override
-    public RMDTrack createInstanceOfTrack(RMDTriplet fileDescriptor) {
-        String name = fileDescriptor.getName();
-        File inputFile = new File(fileDescriptor.getFile());
-        FeatureManager.FeatureDescriptor descriptor = getFeatureManager().getByTriplet(fileDescriptor);
-        FeatureCodec codec = getFeatureManager().createCodec(descriptor, name, genomeLocParser, null);
-        TestFeatureReader featureReader;
-        Index index;
-        try {
-            // Create a feature reader that creates checkable tribble iterators.
-            index = loadIndex(inputFile, codec);
-            featureReader = new TestFeatureReader(inputFile.getAbsolutePath(), codec);
-        } catch (IOException e) {
-            throw new RuntimeException(e);
-        }
-        SAMSequenceDictionary sequenceDictionary = IndexDictionaryUtils.getSequenceDictionaryFromProperties(index);
-        return new RMDTrack(descriptor.getCodecClass(), name, inputFile, featureReader, sequenceDictionary, genomeLocParser, codec);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/report/GATKReportUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/report/GATKReportUnitTest.java
deleted file mode 100644
index c28e901..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/report/GATKReportUnitTest.java
+++ /dev/null
@@ -1,285 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.report;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.PrintStream;
-import java.util.Random;
-import java.io.FileInputStream;
-import java.io.DataInputStream;
-import java.io.BufferedReader;
-import java.io.InputStreamReader;
-import java.util.ArrayList;
-
-
-public class GATKReportUnitTest extends BaseTest {
-    @Test
-    public void testParse() throws Exception {
-        String reportPath = publicTestDir + "exampleGATKReportv2.tbl";
-        GATKReport report = new GATKReport(reportPath);
-        Assert.assertEquals(report.getVersion(), GATKReportVersion.V1_1);
-        Assert.assertEquals(report.getTables().size(), 5);
-
-        GATKReportTable countVariants = report.getTable("CountVariants");
-        Assert.assertEquals(countVariants.get(0, "nProcessedLoci"), "63025520");
-        Assert.assertEquals(countVariants.get(0, "nNoCalls"), "0");
-        Assert.assertEquals(countVariants.get(0, "heterozygosity"), 4.73e-06);
-
-        GATKReportTable validationReport = report.getTable("ValidationReport");
-        Assert.assertEquals(validationReport.get(2, "PPV"), Double.NaN);
-    }
-
-    @DataProvider(name = "rightAlignValues")
-    public Object[][] getRightAlignValues() {
-        return new Object[][]{
-                new Object[]{null, true},
-                new Object[]{"null", true},
-                new Object[]{"NA", true},
-                new Object[]{"0", true},
-                new Object[]{"0.0", true},
-                new Object[]{"-0", true},
-                new Object[]{"-0.0", true},
-                new Object[]{String.valueOf(Long.MAX_VALUE), true},
-                new Object[]{String.valueOf(Long.MIN_VALUE), true},
-                new Object[]{String.valueOf(Float.MIN_NORMAL), true},
-                new Object[]{String.valueOf(Double.MAX_VALUE), true},
-                new Object[]{String.valueOf(Double.MIN_VALUE), true},
-                new Object[]{String.valueOf(Double.POSITIVE_INFINITY), true},
-                new Object[]{String.valueOf(Double.NEGATIVE_INFINITY), true},
-                new Object[]{String.valueOf(Double.NaN), true},
-                new Object[]{"hello", false}
-        };
-    }
-
-    @Test(dataProvider = "rightAlignValues")
-    public void testIsRightAlign(String value, boolean expected) {
-        Assert.assertEquals(GATKReportColumn.isRightAlign(value), expected, "right align of '" + value + "'");
-    }
-
-    private GATKReportTable getTableWithRandomValues() {
-        Random number = new Random(123L);
-        final int VALUESRANGE = 10;
-
-        GATKReport report = GATKReport.newSimpleReport("TableName", "col1", "col2", "col3");
-        GATKReportTable table = new GATKReportTable("testSortingTable", "table with random values sorted by columns", 3, GATKReportTable.TableSortingWay.SORT_BY_COLUMN );
-
-        final int NUMROWS = 100;
-        for (int x = 0; x < NUMROWS; x++) {
-            report.addRow(number.nextInt(VALUESRANGE), number.nextInt(VALUESRANGE), number.nextInt(VALUESRANGE));
-        }
-        return table;
-    }
-
-    @Test(enabled = true)
-    public void testSortingByColumn() {
-        Assert.assertEquals(isSorted(getTableWithRandomValues()), true);
-    }
-
-    private boolean isSorted(GATKReportTable table) {
-        boolean result = true;
-        File testingSortingTableFile = new File("testSortingFile.txt");
-
-        try {
-            // Connect print stream to the output stream
-            PrintStream ps = new PrintStream(testingSortingTableFile);
-            table.write(ps);
-            ps.close();
-        }
-        catch (Exception e){
-            System.err.println ("Error: " + e.getMessage());
-        }
-
-        ArrayList<int[]> rows = new ArrayList<int[]>();
-        try {
-            // Open the file
-            FileInputStream fStream = new FileInputStream(testingSortingTableFile);
-            // Get the object of DataInputStream
-            DataInputStream in = new DataInputStream(fStream);
-            BufferedReader br = new BufferedReader(new InputStreamReader(in));
-            String strLine;
-            //Read File Line By Line
-            while ((strLine = br.readLine()) != null) {
-
-                String[] parts = strLine.split(" ");
-                int l = parts.length;
-                int[] row = new int[l];
-                for(int n = 0; n < l; n++) {
-                    row[n] = Integer.parseInt(parts[n]);
-                }
-                rows.add(row);
-            }
-            //Close the input stream
-            in.close();
-        } catch (Exception e){//Catch exception if any
-            System.err.println("Error: " + e.getMessage());
-        }
-        for (int x = 1; x < rows.size() && result; x++)    {
-            result = checkRowOrder(rows.get(x - 1), rows.get(x));
-        }
-        return result;
-    }
-
-    private boolean checkRowOrder(int[] row1, int[] row2) {
-        int l = row1.length;
-        final int EQUAL = 0;
-
-        int result = EQUAL;
-
-        for(int x = 0; x < l && ( result <= EQUAL); x++) {
-            result = ((Integer)row1[x]).compareTo(row2[x]);
-        }
-        if (result <= EQUAL) {
-            return true;
-        } else {
-            return false;
-        }
-    }
-
-    private GATKReportTable makeBasicTable() {
-        GATKReport report = GATKReport.newSimpleReport("TableName", "sample", "value");
-        GATKReportTable table = report.getTable("TableName");
-        report.addRow("foo.1", "hello");
-        report.addRow("foo.2", "world");
-        return table;
-    }
-
-    @Test
-    public void testDottedSampleName() {
-        GATKReportTable table = makeBasicTable();
-        Assert.assertEquals(table.get(0, "value"), "hello");
-        Assert.assertEquals(table.get(1, "value"), "world");
-    }
-
-    @Test
-    public void testSimpleGATKReport() {
-        // Create a new simple GATK report named "TableName" with columns: Roger, is, and Awesome
-        GATKReport report = GATKReport.newSimpleReport("TableName", "Roger", "is", "Awesome");
-
-        // Add data to simple GATK report
-        report.addRow(12, 23.45, true);
-        report.addRow("ans", '3', 24.5);
-        report.addRow("hi", "", 2.3);
-
-        // Print the report to console
-        //report.print(System.out);
-
-        try {
-            File file = createTempFile("GATKReportGatherer-UnitTest", ".tbl");
-            //System.out.format("The temporary file" + " has been created: %s%n", file);
-            PrintStream ps = new PrintStream(file);
-            report.print(ps);
-            //System.out.println("File succesfully outputed!");
-            GATKReport inputRead = new GATKReport(file);
-            //System.out.println("File succesfully read!");
-            //inputRead.print(System.out);
-            Assert.assertTrue(report.isSameFormat(inputRead));
-
-        } catch (IOException x) {
-            System.err.format("IOException: %s%n", x);
-        }
-
-    }
-
-    @Test
-    public void testGATKReportGatherer() {
-
-        GATKReport report1, report2, report3;
-        report1 = new GATKReport();
-        report1.addTable("TableName", "Description", 2);
-        report1.getTable("TableName").addColumn("colA", "%s");
-        report1.getTable("TableName").addColumn("colB", "%c");
-        report1.getTable("TableName").set(0, "colA", "NotNum");
-        report1.getTable("TableName").set(0, "colB", (char) 64);
-
-        report2 = new GATKReport();
-        report2.addTable("TableName", "Description", 2);
-        report2.getTable("TableName").addColumn("colA", "%s");
-        report2.getTable("TableName").addColumn("colB", "%c");
-        report2.getTable("TableName").set(0, "colA", "df3");
-        report2.getTable("TableName").set(0, "colB", 'A');
-
-        report3 = new GATKReport();
-        report3.addTable("TableName", "Description", 2);
-        report3.getTable("TableName").addColumn("colA", "%s");
-        report3.getTable("TableName").addColumn("colB", "%c");
-        report3.getTable("TableName").set(0, "colA", "df5f");
-        report3.getTable("TableName").set(0, "colB", 'c');
-
-        report1.concat(report2);
-        report1.concat(report3);
-
-        report1.addTable("Table2", "To contain some more data types", 3);
-        GATKReportTable table = report1.getTable("Table2");
-        table.addColumn("SomeInt", "%d");
-        table.addColumn("SomeFloat", "%.16E");
-        table.addColumn("TrueFalse", "%B");
-        table.addRowIDMapping("12df", 0);
-        table.addRowIDMapping("5f", 1);
-        table.addRowIDMapping("RZ", 2);
-        table.set("12df", "SomeInt", Byte.MAX_VALUE);
-        table.set("12df", "SomeFloat", 34.0);
-        table.set("12df", "TrueFalse", true);
-        table.set("5f", "SomeInt", Short.MAX_VALUE);
-        table.set("5f", "SomeFloat", Double.MAX_VALUE);
-        table.set("5f", "TrueFalse", false);
-        table.set("RZ", "SomeInt", Long.MAX_VALUE);
-        table.set("RZ", "SomeFloat", 535646345.657453464576);
-        table.set("RZ", "TrueFalse", true);
-
-        report1.addTable("Table3", "blah", 1, GATKReportTable.TableSortingWay.SORT_BY_ROW);
-        report1.getTable("Table3").addColumn("a");
-        report1.getTable("Table3").addRowIDMapping("q", 2);
-        report1.getTable("Table3").addRowIDMapping("5", 3);
-        report1.getTable("Table3").addRowIDMapping("573s", 0);
-        report1.getTable("Table3").addRowIDMapping("ZZZ", 1);
-        report1.getTable("Table3").set("q", "a", "34");
-        report1.getTable("Table3").set("5", "a", "c4g34");
-        report1.getTable("Table3").set("573s", "a", "fDlwueg");
-        report1.getTable("Table3").set("ZZZ", "a", "Dfs");
-
-        try {
-            File file = createTempFile("GATKReportGatherer-UnitTest", ".tbl");
-            //System.out.format("The temporary file" + " has been created: %s%n", file);
-            PrintStream ps = new PrintStream(file);
-            report1.print(ps);
-            //System.out.println("File succesfully outputed!");
-            GATKReport inputRead = new GATKReport(file);
-            //System.out.println("File succesfully read!");
-            //inputRead.print(System.out);
-            Assert.assertTrue(report1.isSameFormat(inputRead));
-            Assert.assertTrue(report1.equals(inputRead));
-
-        } catch (IOException x) {
-            System.err.format("IOException: %s%n", x);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/samples/PedReaderUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/samples/PedReaderUnitTest.java
deleted file mode 100644
index cd6014b..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/samples/PedReaderUnitTest.java
+++ /dev/null
@@ -1,354 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.samples;
-
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.StringReader;
-import java.util.*;
-
-/**
- * UnitTest for PedReader
- *
- * @author Mark DePristo
- * @since 2011
- */
-public class PedReaderUnitTest extends BaseTest {
-    private static Logger logger = Logger.getLogger(PedReaderUnitTest.class);
-
-    private class PedReaderTest extends TestDataProvider {
-        public String fileContents;
-        public List<Sample> expectedSamples;
-        EnumSet<PedReader.MissingPedField> missing;
-
-        private PedReaderTest(final String name, final List<Sample> expectedSamples, final String fileContents) {
-            super(PedReaderTest.class, name);
-            this.fileContents = fileContents;
-            this.expectedSamples = expectedSamples;
-        }
-    }
-
-//     Family ID
-//     Individual ID
-//     Paternal ID
-//     Maternal ID
-//     Sex (1=male; 2=female; other=unknown)
-//     Phenotype
-//
-//     -9 missing
-//     0 missing
-//     1 unaffected
-//     2 affected
-
-    @DataProvider(name = "readerTest")
-    public Object[][] createPEDFiles() {
-        new PedReaderTest("singleRecordMale",
-                Arrays.asList(new Sample("kid", "fam1", null, null, Gender.MALE, Affection.UNAFFECTED)),
-                "fam1 kid 0 0 1 1");
-
-        new PedReaderTest("singleRecordFemale",
-                Arrays.asList(new Sample("kid", "fam1", null, null, Gender.FEMALE, Affection.UNAFFECTED)),
-                "fam1 kid 0 0 2 1");
-
-        new PedReaderTest("singleRecordMissingGender",
-                Arrays.asList(new Sample("kid", "fam1", null, null, Gender.UNKNOWN, Affection.UNKNOWN)),
-                "fam1 kid 0 0 0 0");
-
-        // Affection
-        new PedReaderTest("singleRecordAffected",
-                Arrays.asList(new Sample("kid", "fam1", null, null, Gender.MALE, Affection.AFFECTED)),
-                "fam1 kid 0 0 1 2");
-
-        new PedReaderTest("singleRecordUnaffected",
-                Arrays.asList(new Sample("kid", "fam1", null, null, Gender.MALE, Affection.UNAFFECTED)),
-                "fam1 kid 0 0 1 1");
-
-        new PedReaderTest("singleRecordMissingAffection-9",
-                Arrays.asList(new Sample("kid", "fam1", null, null, Gender.MALE, Affection.UNKNOWN)),
-                "fam1 kid 0 0 1 -9");
-
-        new PedReaderTest("singleRecordMissingAffection0",
-                Arrays.asList(new Sample("kid", "fam1", null, null, Gender.MALE, Affection.UNKNOWN)),
-                "fam1 kid 0 0 1 0");
-
-        new PedReaderTest("multipleUnrelated",
-                Arrays.asList(
-                        new Sample("s1", "fam1", null, null, Gender.MALE,   Affection.UNAFFECTED),
-                        new Sample("s2", "fam2", null, null, Gender.FEMALE, Affection.AFFECTED)),
-                String.format("%s%n%s",
-                        "fam1 s1 0 0 1 1",
-                        "fam2 s2 0 0 2 2"));
-
-        new PedReaderTest("multipleUnrelatedExtraLine",
-                Arrays.asList(
-                        new Sample("s1", "fam1", null, null, Gender.MALE,   Affection.UNAFFECTED),
-                        new Sample("s2", "fam2", null, null, Gender.FEMALE, Affection.AFFECTED)),
-                String.format("%s%n%s%n  %n", // note extra newlines and whitespace
-                        "fam1 s1 0 0 1 1",
-                        "fam2 s2 0 0 2 2"));
-
-        new PedReaderTest("explicitTrio",
-                Arrays.asList(
-                        new Sample("kid", "fam1", "dad", "mom", Gender.MALE,   Affection.AFFECTED),
-                        new Sample("dad", "fam1", null, null,   Gender.MALE,   Affection.UNAFFECTED),
-                        new Sample("mom", "fam1", null, null,   Gender.FEMALE, Affection.AFFECTED)),
-                String.format("%s%n%s%n%s",
-                        "fam1 kid dad mom 1 2",
-                        "fam1 dad 0   0   1 1",
-                        "fam1 mom 0   0   2 2"));
-
-        new PedReaderTest("implicitTrio",
-                Arrays.asList(
-                        new Sample("kid", "fam1", "dad", "mom", Gender.MALE,   Affection.AFFECTED),
-                        new Sample("dad", "fam1", null, null,   Gender.MALE,   Affection.UNKNOWN),
-                        new Sample("mom", "fam1", null, null,   Gender.FEMALE, Affection.UNKNOWN)),
-                "fam1 kid dad mom 1 2");
-
-        new PedReaderTest("partialTrio",
-                Arrays.asList(
-                        new Sample("kid", "fam1", "dad", "mom", Gender.MALE,   Affection.AFFECTED),
-                        new Sample("dad", "fam1", null, null,   Gender.MALE,   Affection.UNAFFECTED),
-                        new Sample("mom", "fam1", null, null,   Gender.FEMALE, Affection.UNKNOWN)),
-                String.format("%s%n%s",
-                        "fam1 kid dad mom 1 2",
-                        "fam1 dad 0   0   1 1"));
-
-        new PedReaderTest("bigPedigree",
-                Arrays.asList(
-                        new Sample("kid", "fam1", "dad",       "mom",      Gender.MALE,   Affection.AFFECTED),
-                        new Sample("dad", "fam1", "granddad1", "grandma1", Gender.MALE,   Affection.UNAFFECTED),
-                        new Sample("granddad1", "fam1", null, null,        Gender.MALE,   Affection.UNKNOWN),
-                        new Sample("grandma1",  "fam1", null, null,        Gender.FEMALE,   Affection.UNKNOWN),
-                        new Sample("mom", "fam1", "granddad2", "grandma2", Gender.FEMALE, Affection.AFFECTED),
-                        new Sample("granddad2", "fam1", null, null,        Gender.MALE,   Affection.UNKNOWN),
-                        new Sample("grandma2",  "fam1", null, null,        Gender.FEMALE,   Affection.UNKNOWN)),
-                String.format("%s%n%s%n%s",
-                        "fam1 kid dad       mom      1 2",
-                        "fam1 dad granddad1 grandma1 1 1",
-                        "fam1 mom granddad2 grandma2 2 2"));
-
-        // Quantitative trait
-        new PedReaderTest("OtherPhenotype",
-                Arrays.asList(
-                        new Sample("s1", "fam1", null, null, Gender.MALE,   Affection.OTHER, "1"),
-                        new Sample("s2", "fam2", null, null, Gender.FEMALE, Affection.OTHER, "10.0")),
-                String.format("%s%n%s",
-                        "fam1 s1 0 0 1 1",
-                        "fam2 s2 0 0 2 10.0"));
-
-        new PedReaderTest("OtherPhenotypeWithMissing",
-                Arrays.asList(
-                        new Sample("s1", "fam1", null, null, Gender.MALE,   Affection.UNKNOWN, Sample.UNSET_QT),
-                        new Sample("s2", "fam2", null, null, Gender.FEMALE, Affection.OTHER, "10.0")),
-                String.format("%s%n%s",
-                        "fam1 s1 0 0 1 -9",
-                        "fam2 s2 0 0 2 10.0"));
-
-        new PedReaderTest("OtherPhenotypeOnlyInts",
-                Arrays.asList(
-                        new Sample("s1", "fam1", null, null, Gender.MALE,   Affection.OTHER, "1"),
-                        new Sample("s2", "fam2", null, null, Gender.FEMALE, Affection.OTHER, "10")),
-                String.format("%s%n%s",
-                        "fam1 s1 0 0 1 1",
-                        "fam2 s2 0 0 2 10"));
-
-        return PedReaderTest.getTests(PedReaderTest.class);
-    }
-
-    private static final void runTest(PedReaderTest test, String myFileContents, EnumSet<PedReader.MissingPedField> missing) {
-        logger.warn("Test " + test);
-        PedReader reader = new PedReader();
-        SampleDB sampleDB = new SampleDB();
-        List<Sample> readSamples = reader.parse(myFileContents, missing, sampleDB);
-        Assert.assertEquals(new HashSet<Sample>(test.expectedSamples), new HashSet<Sample>(readSamples));
-    }
-
-    @Test(enabled = true, dataProvider = "readerTest")
-    public void testPedReader(PedReaderTest test) {
-        runTest(test, test.fileContents, EnumSet.noneOf(PedReader.MissingPedField.class));
-    }
-
-    @Test(enabled = true, dataProvider = "readerTest")
-    public void testPedReaderWithComments(PedReaderTest test) {
-        runTest(test, String.format("#comment%n%s", test.fileContents), EnumSet.noneOf(PedReader.MissingPedField.class));
-    }
-
-    @Test(enabled = true, dataProvider = "readerTest")
-    public void testPedReaderWithSemicolons(PedReaderTest test) {
-        runTest(test,
-                test.fileContents.replace(String.format("%n"), ";"),
-                EnumSet.noneOf(PedReader.MissingPedField.class));
-    }
-
-    // -----------------------------------------------------------------
-    // missing format field tests
-    // -----------------------------------------------------------------
-
-    private class PedReaderTestMissing extends TestDataProvider {
-        public EnumSet<PedReader.MissingPedField> missingDesc;
-        public EnumSet<PedReader.Field> missingFields;
-        public final String fileContents;
-        public Sample expected;
-
-
-        private PedReaderTestMissing(final String name, final String fileContents,
-                                     EnumSet<PedReader.MissingPedField> missingDesc,
-                                     EnumSet<PedReader.Field> missingFields,
-                                     final Sample expected) {
-            super(PedReaderTestMissing.class, name);
-            this.fileContents = fileContents;
-            this.missingDesc = missingDesc;
-            this.missingFields = missingFields;
-            this.expected = expected;
-        }
-    }
-
-    @DataProvider(name = "readerTestMissing")
-    public Object[][] createPEDFilesWithMissing() {
-        new PedReaderTestMissing("missingFam",
-                "fam1 kid dad mom 1 2",
-                EnumSet.of(PedReader.MissingPedField.NO_FAMILY_ID),
-                EnumSet.of(PedReader.Field.FAMILY_ID),
-                new Sample("kid", null, "dad", "mom", Gender.MALE, Affection.AFFECTED));
-
-        new PedReaderTestMissing("missingParents",
-                "fam1 kid dad mom 1 2",
-                EnumSet.of(PedReader.MissingPedField.NO_PARENTS),
-                EnumSet.of(PedReader.Field.PATERNAL_ID, PedReader.Field.MATERNAL_ID),
-                new Sample("kid", "fam1", null, null, Gender.MALE, Affection.AFFECTED));
-
-        new PedReaderTestMissing("missingSex",
-                "fam1 kid dad mom 1 2",
-                EnumSet.of(PedReader.MissingPedField.NO_SEX),
-                EnumSet.of(PedReader.Field.GENDER),
-                new Sample("kid", "fam1", "dad", "mom", Gender.UNKNOWN, Affection.AFFECTED));
-
-        new PedReaderTestMissing("missingPhenotype",
-                "fam1 kid dad mom 1 2",
-                EnumSet.of(PedReader.MissingPedField.NO_PHENOTYPE),
-                EnumSet.of(PedReader.Field.PHENOTYPE),
-                new Sample("kid", "fam1", "dad", "mom", Gender.MALE, Affection.UNKNOWN));
-
-        new PedReaderTestMissing("missingEverythingButGender",
-                "fam1 kid dad mom 1 2",
-                EnumSet.of(PedReader.MissingPedField.NO_PHENOTYPE, PedReader.MissingPedField.NO_PARENTS, PedReader.MissingPedField.NO_FAMILY_ID),
-                EnumSet.of(PedReader.Field.FAMILY_ID, PedReader.Field.PATERNAL_ID, PedReader.Field.MATERNAL_ID, PedReader.Field.PHENOTYPE),
-                new Sample("kid", null, null, null, Gender.MALE, Affection.UNKNOWN));
-
-
-        return PedReaderTestMissing.getTests(PedReaderTestMissing.class);
-    }
-
-    @Test(enabled = true, dataProvider = "readerTestMissing")
-    public void testPedReaderWithMissing(PedReaderTestMissing test) {
-        final String contents = sliceContents(test.missingFields, test.fileContents);
-        logger.warn("Test " + test);
-        PedReader reader = new PedReader();
-        SampleDB sampleDB = new SampleDB();
-        reader.parse(new StringReader(contents), test.missingDesc, sampleDB);
-        final Sample missingSample = sampleDB.getSample("kid");
-        Assert.assertEquals(test.expected, missingSample, "Missing field value not expected value for " + test);
-    }
-
-    private final static String sliceContents(EnumSet<PedReader.Field> missingFieldsSet, String full) {
-        List<String> parts = new ArrayList<String>(Arrays.asList(full.split("\\s+")));
-        final List<PedReader.Field> missingFields = new ArrayList<PedReader.Field>(missingFieldsSet);
-        Collections.reverse(missingFields);
-        for ( PedReader.Field field : missingFields )
-            parts.remove(field.ordinal());
-        return Utils.join("\t", parts);
-    }
-
-    // -----------------------------------------------------------------
-    // parsing tags
-    // -----------------------------------------------------------------
-
-    private class PedReaderTestTagParsing extends TestDataProvider {
-        public EnumSet<PedReader.MissingPedField> expected;
-        public final List<String> tags;
-
-        private PedReaderTestTagParsing(final List<String> tags, EnumSet<PedReader.MissingPedField> missingDesc) {
-            super(PedReaderTestTagParsing.class);
-            this.tags = tags;
-            this.expected = missingDesc;
-        }
-    }
-
-    @DataProvider(name = "readerTestTagParsing")
-    public Object[][] createReaderTestTagParsing() {
-        new PedReaderTestTagParsing(
-                Collections.<String>emptyList(),
-                EnumSet.noneOf(PedReader.MissingPedField.class));
-
-        new PedReaderTestTagParsing(
-                Arrays.asList("NO_FAMILY_ID"),
-                EnumSet.of(PedReader.MissingPedField.NO_FAMILY_ID));
-
-        new PedReaderTestTagParsing(
-                Arrays.asList("NO_PARENTS"),
-                EnumSet.of(PedReader.MissingPedField.NO_PARENTS));
-
-        new PedReaderTestTagParsing(
-                Arrays.asList("NO_PHENOTYPE"),
-                EnumSet.of(PedReader.MissingPedField.NO_PHENOTYPE));
-
-        new PedReaderTestTagParsing(
-                Arrays.asList("NO_SEX"),
-                EnumSet.of(PedReader.MissingPedField.NO_SEX));
-
-        new PedReaderTestTagParsing(
-                Arrays.asList("NO_SEX", "NO_PHENOTYPE"),
-                EnumSet.of(PedReader.MissingPedField.NO_SEX, PedReader.MissingPedField.NO_PHENOTYPE));
-
-        new PedReaderTestTagParsing(
-                Arrays.asList("NO_SEX", "NO_PHENOTYPE", "NO_PARENTS"),
-                EnumSet.of(PedReader.MissingPedField.NO_SEX, PedReader.MissingPedField.NO_PHENOTYPE, PedReader.MissingPedField.NO_PARENTS));
-
-        return PedReaderTestTagParsing.getTests(PedReaderTestTagParsing.class);
-    }
-
-    @Test(enabled = true, dataProvider = "readerTestTagParsing")
-    public void testPedReaderTagParsing(PedReaderTestTagParsing test) {
-        EnumSet<PedReader.MissingPedField> parsed = PedReader.parseMissingFieldTags("test", test.tags);
-        Assert.assertEquals(test.expected, parsed, "Failed to properly parse tags " + test.tags);
-    }
-
-    @Test(enabled = true, expectedExceptions = UserException.class)
-    public void testPedReaderTagParsing1() {
-        EnumSet<PedReader.MissingPedField> parsed = PedReader.parseMissingFieldTags("test", Arrays.asList("XXX"));
-    }
-
-    @Test(enabled = true, expectedExceptions = UserException.class)
-    public void testPedReaderTagParsing2() {
-        EnumSet<PedReader.MissingPedField> parsed = PedReader.parseMissingFieldTags("test", Arrays.asList("NO_SEX", "XXX"));
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/samples/SampleDBUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/samples/SampleDBUnitTest.java
deleted file mode 100644
index fc934ef..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/samples/SampleDBUnitTest.java
+++ /dev/null
@@ -1,251 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.samples;
-
-import htsjdk.samtools.SAMFileHeader;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.testng.Assert;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.util.*;
-
-/**
- * Created by IntelliJ IDEA.
- * User: brett
- * Date: Sep 9, 2010
- * Time: 8:21:00 AM
- */
-public class SampleDBUnitTest extends BaseTest {
-    private static SampleDBBuilder builder;
-    // all the test sample files are located here
-    private File testPED = new File(privateTestDir +  "testtrio.ped");
-
-    private static final Set<Sample> testPEDSamples = new HashSet<Sample>(Arrays.asList(
-            new Sample("kid", "fam1", "dad", "mom", Gender.MALE,   Affection.AFFECTED),
-            new Sample("dad", "fam1", null, null,   Gender.MALE,   Affection.UNAFFECTED),
-            new Sample("mom", "fam1", null, null,   Gender.FEMALE, Affection.AFFECTED)));
-
-    private static final Set<Sample> testPEDFamilyF2 = new HashSet<Sample>(Arrays.asList(
-            new Sample("s2", "fam2", "d2", "m2", Gender.FEMALE, Affection.AFFECTED),
-            new Sample("d2", "fam2", null, null, Gender.MALE, Affection.UNKNOWN),
-            new Sample("m2", "fam2", null, null, Gender.FEMALE, Affection.UNKNOWN)
-            ));
-
-    private static final Set<Sample> testPEDFamilyF3 = new HashSet<Sample>(Arrays.asList(
-            new Sample("s1", "fam3", "d1", "m1", Gender.FEMALE, Affection.AFFECTED),
-            new Sample("d1", "fam3", null, null, Gender.MALE, Affection.UNKNOWN),
-            new Sample("m1", "fam3", null, null, Gender.FEMALE, Affection.UNKNOWN)
-            ));
-
-    private static final Set<Sample> testSAMSamples = new HashSet<Sample>(Arrays.asList(
-            new Sample("kid", null, null, null, Gender.UNKNOWN,   Affection.UNKNOWN),
-            new Sample("mom", null, null, null, Gender.UNKNOWN,   Affection.UNKNOWN),
-            new Sample("dad", null, null, null, Gender.UNKNOWN,   Affection.UNKNOWN)));
-
-    private static final HashMap<String, Set<Sample>> testGetFamilies = new HashMap<String,Set<Sample>>();
-    static {
-        testGetFamilies.put("fam1", testPEDSamples);
-        testGetFamilies.put("fam2", testPEDFamilyF2);
-        testGetFamilies.put("fam3", testPEDFamilyF3);
-    }
-
-    private static final Set<Sample> testKidsWithParentsFamilies2 = new HashSet<Sample>(Arrays.asList(
-            new Sample("kid", "fam1", "dad", "mom", Gender.MALE,   Affection.AFFECTED),
-            new Sample("kid3", "fam5", "dad2", "mom2", Gender.MALE,   Affection.AFFECTED),
-            new Sample("kid2", "fam5", "dad2", "mom2", Gender.MALE,   Affection.AFFECTED)));
-
-    private static final HashSet<String> testGetPartialFamiliesIds =   new HashSet<String>(Arrays.asList("kid","s1"));
-    private static final HashMap<String, Set<Sample>> testGetPartialFamilies = new HashMap<String,Set<Sample>>();
-    static {
-        testGetPartialFamilies.put("fam1", new HashSet<Sample>(Arrays.asList(new Sample("kid", "fam1", "dad", "mom", Gender.MALE,   Affection.AFFECTED))));
-        testGetPartialFamilies.put("fam3", new HashSet<Sample>(Arrays.asList(new Sample("s1", "fam3", "d1", "m1", Gender.FEMALE, Affection.AFFECTED))));
-    }
-
-    private static final String testPEDString =
-            String.format("%s%n%s%n%s",
-                    "fam1 kid dad mom 1 2",
-                    "fam1 dad 0   0   1 1",
-                    "fam1 mom 0   0   2 2");
-
-    private static final String testPEDMultipleFamilies =
-            String.format("%s%n%s%n%s%n%s%n%s",
-                    "fam1 kid dad mom 1 2",
-                    "fam1 dad 0   0   1 1",
-                    "fam1 mom 0   0   2 2",
-                    "fam3 s1  d1  m1  2 2",
-                    "fam2 s2  d2  m2  2 2");
-
-    private static final String testPEDMultipleFamilies2 =
-            String.format("%s%n%s%n%s%n%s%n%s%n%s%n%s%n%s%n%s",
-                    "fam1 kid dad mom 1 2",
-                    "fam1 dad 0   0   1 1",
-                    "fam1 mom 0   0   2 2",
-                    "fam4 kid4 dad4 0 1 2",
-                    "fam4 dad4 0   0   1 1",
-                    "fam5 kid2 dad2 mom2 1 2",
-                    "fam5 kid3 dad2 mom2 1 2",
-                    "fam5 dad2 0   0   1 1",
-                    "fam5 mom2 0   0   2 2");
-
-    private static final String testPEDStringInconsistentGender =
-            "fam1 kid 0   0   2 2";
-
-    private static final Set<Sample> testPEDSamplesAsSet =
-            new HashSet<Sample>(testPEDSamples);
-
-
-    @BeforeMethod
-    public void before() {
-        builder = new SampleDBBuilder(PedigreeValidationType.STRICT);
-    }
-
-    @Test()
-    public void loadPEDFile() {
-        builder.addSamplesFromPedigreeFiles(Arrays.asList(testPED));
-        SampleDB db = builder.getFinalSampleDB();
-        Assert.assertEquals(testPEDSamplesAsSet, db.getSamples());
-    }
-
-    @Test()
-    public void loadPEDString() {
-        builder.addSamplesFromPedigreeStrings(Arrays.asList(testPEDString));
-        SampleDB db = builder.getFinalSampleDB();
-        Assert.assertEquals(testPEDSamplesAsSet, db.getSamples());
-    }
-
-    private static final void addSAMHeader() {
-        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 10);
-        ArtificialSAMUtils.createEnumeratedReadGroups(header, Arrays.asList("1", "2", "3"),
-                Arrays.asList("kid", "mom", "dad"));
-        builder.addSamplesFromSAMHeader(header);
-    }
-
-    @Test()
-    public void loadSAMHeader() {
-        addSAMHeader();
-        SampleDB db = builder.getFinalSampleDB();
-        Assert.assertEquals(testSAMSamples, db.getSamples());
-    }
-
-    @Test()
-    public void loadSAMHeaderPlusPED() {
-        addSAMHeader();
-        builder.addSamplesFromPedigreeFiles(Arrays.asList(testPED));
-        SampleDB db = builder.getFinalSampleDB();
-        Assert.assertEquals(testPEDSamples, db.getSamples());
-    }
-
-    @Test()
-    public void loadDuplicateData() {
-        builder.addSamplesFromPedigreeFiles(Arrays.asList(testPED));
-        builder.addSamplesFromPedigreeFiles(Arrays.asList(testPED));
-        SampleDB db = builder.getFinalSampleDB();
-        Assert.assertEquals(testPEDSamples, db.getSamples());
-    }
-
-    @Test(expectedExceptions = UserException.class)
-    public void loadNonExistentFile() {
-        builder.addSamplesFromPedigreeFiles(Arrays.asList(new File("non-existence-file.txt")));
-        SampleDB db = builder.getFinalSampleDB();
-        Assert.assertEquals(testSAMSamples, db.getSamples());
-    }
-
-    @Test(expectedExceptions = UserException.class)
-    public void loadInconsistentData() {
-        builder = new SampleDBBuilder(PedigreeValidationType.STRICT);
-        builder.addSamplesFromPedigreeFiles(Arrays.asList(testPED));
-        builder.addSamplesFromPedigreeStrings(Arrays.asList(testPEDStringInconsistentGender));
-        builder.getFinalSampleDB();
-    }
-
-    @Test(expectedExceptions = UserException.class)
-    public void sampleInSAMHeaderNotInSamplesDB() {
-        addSAMHeader();
-        builder.addSamplesFromPedigreeStrings(Arrays.asList(testPEDStringInconsistentGender));
-        builder.getFinalSampleDB();
-    }
-
-    @Test()
-    public void getFamilyIDs() {
-        builder.addSamplesFromPedigreeStrings(Arrays.asList(testPEDMultipleFamilies));
-        SampleDB db = builder.getFinalSampleDB();
-        Assert.assertEquals(db.getFamilyIDs(), new TreeSet<String>(Arrays.asList("fam1", "fam2", "fam3")));
-    }
-
-    @Test()
-    public void getFamily() {
-        builder.addSamplesFromPedigreeStrings(Arrays.asList(testPEDMultipleFamilies));
-        SampleDB db = builder.getFinalSampleDB();
-        Assert.assertEquals(db.getFamily("fam1"), testPEDSamplesAsSet);
-    }
-
-    @Test()
-    public void getFamilies(){
-        builder.addSamplesFromPedigreeStrings(Arrays.asList(testPEDMultipleFamilies));
-        SampleDB db = builder.getFinalSampleDB();
-        Assert.assertEquals(db.getFamilies(),testGetFamilies);
-        Assert.assertEquals(db.getFamilies(null),testGetFamilies);
-        Assert.assertEquals(db.getFamilies(testGetPartialFamiliesIds),testGetPartialFamilies);
-    }
-
-    @Test()
-    public void testGetChildrenWithParents()
-    {
-        builder.addSamplesFromPedigreeStrings(Arrays.asList(testPEDMultipleFamilies2));
-        SampleDB db = builder.getFinalSampleDB();
-        Assert.assertEquals(db.getChildrenWithParents(), testKidsWithParentsFamilies2);
-        Assert.assertEquals(db.getChildrenWithParents(false), testKidsWithParentsFamilies2);
-        Assert.assertEquals(db.getChildrenWithParents(true), new HashSet<Sample>(Arrays.asList(new Sample("kid", "fam1", "dad", "mom", Gender.MALE,   Affection.AFFECTED))));
-    }
-
-    @Test()
-    public void testGetFounderIds(){
-        builder.addSamplesFromPedigreeStrings(Arrays.asList(testPEDMultipleFamilies2));
-        SampleDB db = builder.getFinalSampleDB();
-        Assert.assertEquals(db.getFounderIds(), new HashSet<String>(Arrays.asList("dad","mom","dad2","mom2","dad4")));
-    }
-
-    @Test()
-    public void loadFamilyIDs() {
-        builder.addSamplesFromPedigreeStrings(Arrays.asList(testPEDMultipleFamilies));
-        SampleDB db = builder.getFinalSampleDB();
-        Map<String, Set<Sample>> families = db.getFamilies();
-        Assert.assertEquals(families.size(), 3);
-        Assert.assertEquals(families.keySet(), new TreeSet<String>(Arrays.asList("fam1", "fam2", "fam3")));
-
-        for ( final String famID : families.keySet() ) {
-            final Set<Sample> fam = families.get(famID);
-            Assert.assertEquals(fam.size(), 3);
-            for ( final Sample sample : fam ) {
-                Assert.assertEquals(sample.getFamilyID(), famID);
-            }
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/samples/SampleUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/samples/SampleUnitTest.java
deleted file mode 100644
index b1b09db..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/samples/SampleUnitTest.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.samples;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-/**
- *
- */
-public class SampleUnitTest extends BaseTest {
-    SampleDB db;
-    static Sample fam1A, fam1B, fam1C;
-    static Sample s1, s2;
-    static Sample trait1, trait2, trait3, trait4, trait5;
-
-    @BeforeClass
-    public void init() {
-        db = new SampleDB();
-
-        fam1A = new Sample("1A", db, "fam1", "1B", "1C", Gender.UNKNOWN);
-        fam1B = new Sample("1B", db, "fam1", null, null, Gender.MALE);
-        fam1C = new Sample("1C", db, "fam1", null, null, Gender.FEMALE);
-
-        s1 = new Sample("s1", db);
-        s2 = new Sample("s2", db);
-
-        trait1 = new Sample("t1", db, Affection.AFFECTED, Sample.UNSET_QT);
-        trait2 = new Sample("t2", db, Affection.UNAFFECTED, Sample.UNSET_QT);
-        trait3 = new Sample("t3", db, Affection.UNKNOWN, Sample.UNSET_QT);
-        trait4 = new Sample("t4", db, Affection.OTHER, "1.0");
-        trait5 = new Sample("t4", db, Affection.OTHER, "CEU");
-    }
-
-    /**
-     * Now basic getters
-     */
-    @Test()
-    public void normalGettersTest() {
-        Assert.assertEquals("1A", fam1A.getID());
-        Assert.assertEquals("fam1", fam1A.getFamilyID());
-        Assert.assertEquals("1B", fam1A.getPaternalID());
-        Assert.assertEquals("1C", fam1A.getMaternalID());
-        Assert.assertEquals(null, fam1B.getPaternalID());
-        Assert.assertEquals(null, fam1B.getMaternalID());
-
-        Assert.assertEquals(Affection.AFFECTED, trait1.getAffection());
-        Assert.assertEquals(Sample.UNSET_QT, trait1.getOtherPhenotype());
-        Assert.assertEquals(Affection.UNAFFECTED, trait2.getAffection());
-        Assert.assertEquals(Sample.UNSET_QT, trait2.getOtherPhenotype());
-        Assert.assertEquals(Affection.UNKNOWN, trait3.getAffection());
-        Assert.assertEquals(Sample.UNSET_QT, trait3.getOtherPhenotype());
-        Assert.assertEquals(Affection.OTHER, trait4.getAffection());
-        Assert.assertEquals("1.0", trait4.getOtherPhenotype());
-        Assert.assertEquals("CEU", trait5.getOtherPhenotype());
-    }
-
-    @Test()
-    public void testGenders() {
-        Assert.assertTrue(fam1A.getGender() == Gender.UNKNOWN);
-        Assert.assertTrue(fam1B.getGender() == Gender.MALE);
-        Assert.assertTrue(fam1C.getGender() == Gender.FEMALE);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/traversals/DummyActiveRegionWalker.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/traversals/DummyActiveRegionWalker.java
deleted file mode 100644
index e1d81b5..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/traversals/DummyActiveRegionWalker.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.traversals;
-
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.engine.walkers.ActiveRegionWalker;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
-import org.broadinstitute.gatk.utils.activeregion.ActiveRegion;
-import org.broadinstitute.gatk.utils.activeregion.ActiveRegionReadState;
-import org.broadinstitute.gatk.utils.activeregion.ActivityProfileState;
-
-import java.util.*;
-
-/**
- * ActiveRegionWalker for unit testing
- *
- * User: depristo
- * Date: 1/15/13
- * Time: 1:28 PM
- */
-class DummyActiveRegionWalker extends ActiveRegionWalker<Integer, Integer> {
-    private final double prob;
-    private EnumSet<ActiveRegionReadState> states = super.desiredReadStates();
-    private GenomeLocSortedSet activeRegions = null;
-
-    protected List<GenomeLoc> isActiveCalls = new ArrayList<GenomeLoc>();
-    protected Map<GenomeLoc, ActiveRegion> mappedActiveRegions = new LinkedHashMap<GenomeLoc, ActiveRegion>();
-    private boolean declareHavingPresetRegions = false;
-
-    public DummyActiveRegionWalker() {
-        this(1.0);
-    }
-
-    public DummyActiveRegionWalker(double constProb) {
-        this.prob = constProb;
-    }
-
-    public DummyActiveRegionWalker(GenomeLocSortedSet activeRegions, EnumSet<ActiveRegionReadState> wantStates, final boolean declareHavingPresetRegions) {
-        this(activeRegions, declareHavingPresetRegions);
-        this.states = wantStates;
-    }
-
-    public DummyActiveRegionWalker(GenomeLocSortedSet activeRegions, final boolean declareHavingPresetRegions) {
-        this(1.0);
-        this.activeRegions = activeRegions;
-        this.declareHavingPresetRegions = declareHavingPresetRegions;
-    }
-
-    public void setStates(EnumSet<ActiveRegionReadState> states) {
-        this.states = states;
-    }
-
-    @Override
-    public boolean hasPresetActiveRegions() {
-        return declareHavingPresetRegions;
-    }
-
-    @Override
-    public GenomeLocSortedSet getPresetActiveRegions() {
-        return declareHavingPresetRegions ? activeRegions : null;
-    }
-
-    @Override
-    public EnumSet<ActiveRegionReadState> desiredReadStates() {
-        return states;
-    }
-
-    @Override
-    public ActivityProfileState isActive(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
-        isActiveCalls.add(ref.getLocus());
-        final double p = activeRegions == null || activeRegions.overlaps(ref.getLocus()) ? prob : 0.0;
-        return new ActivityProfileState(ref.getLocus(), p);
-    }
-
-    @Override
-    public Integer map(ActiveRegion activeRegion, RefMetaDataTracker metaDataTracker) {
-        mappedActiveRegions.put(activeRegion.getLocation(), activeRegion);
-        return 0;
-    }
-
-    @Override
-    public Integer reduceInit() {
-        return 0;
-    }
-
-    @Override
-    public Integer reduce(Integer value, Integer sum) {
-        return 0;
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/traversals/TAROrderedReadCacheUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/traversals/TAROrderedReadCacheUnitTest.java
deleted file mode 100644
index 75c669c..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/traversals/TAROrderedReadCacheUnitTest.java
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.traversals;
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
-import org.broadinstitute.gatk.utils.sam.ArtificialBAMBuilder;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-public class TAROrderedReadCacheUnitTest extends BaseTest {
-    // example fasta index file, can be deleted if you don't use the reference
-    private IndexedFastaSequenceFile seq;
-
-    @BeforeClass
-    public void setup() throws FileNotFoundException {
-        // sequence
-        seq = new CachingIndexedFastaSequenceFile(new File(b37KGReference));
-    }
-
-    @DataProvider(name = "ReadCacheTestData")
-    public Object[][] makeReadCacheTestData() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        for ( final int nReadsPerLocus : Arrays.asList(0, 1, 10, 100) ) {
-            for ( final int nLoci : Arrays.asList(1, 10, 100) ) {
-                for ( final int max : Arrays.asList(10, 50, 1000) ) {
-                    for ( final boolean addAllAtOnce : Arrays.asList(true, false) ) {
-                        tests.add(new Object[]{nReadsPerLocus, nLoci, max, addAllAtOnce});
-                    }
-                }
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "ReadCacheTestData")
-    public void testReadCache(final int nReadsPerLocus, final int nLoci, final int max, final boolean addAllAtOnce) {
-        final TAROrderedReadCache cache = new TAROrderedReadCache(max);
-
-        Assert.assertEquals(cache.getMaxCapacity(), max);
-        Assert.assertEquals(cache.getNumDiscarded(), 0);
-        Assert.assertEquals(cache.size(), 0);
-
-        final ArtificialBAMBuilder bamBuilder = new ArtificialBAMBuilder(seq, nReadsPerLocus, nLoci);
-        final List<GATKSAMRecord> reads = bamBuilder.makeReads();
-
-        if ( addAllAtOnce ) {
-            cache.addAll(reads);
-        } else {
-            for ( final GATKSAMRecord read : reads ) {
-                cache.add(read);
-            }
-        }
-
-        final int nTotalReads = reads.size();
-        final int nExpectedToKeep = Math.min(nTotalReads, max);
-        final int nExpectedToDiscard = nTotalReads - nExpectedToKeep;
-        Assert.assertEquals(cache.getNumDiscarded(), nExpectedToDiscard, "wrong number of reads discarded");
-        Assert.assertEquals(cache.size(), nExpectedToKeep, "wrong number of reads kept");
-
-        final List<GATKSAMRecord> cacheReads = cache.popCurrentReads();
-        Assert.assertEquals(cache.size(), 0, "Should be no reads left");
-        Assert.assertEquals(cache.getNumDiscarded(), 0, "should have reset stats");
-        Assert.assertEquals(cacheReads.size(), nExpectedToKeep, "should have 1 read for every read we expected to keep");
-
-        verifySortednessOfReads(cacheReads);
-    }
-
-    private void verifySortednessOfReads( final List<GATKSAMRecord> reads) {
-        int lastStart = -1;
-        for ( GATKSAMRecord read : reads ) {
-            Assert.assertTrue(lastStart <= read.getAlignmentStart(), "Reads should be sorted but weren't.  Found read with start " + read.getAlignmentStart() + " while last was " + lastStart);
-            lastStart = read.getAlignmentStart();
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/traversals/TraverseActiveRegionsUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/traversals/TraverseActiveRegionsUnitTest.java
deleted file mode 100644
index 50eb496..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/traversals/TraverseActiveRegionsUnitTest.java
+++ /dev/null
@@ -1,679 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.traversals;
-
-import com.google.java.contract.PreconditionError;
-import htsjdk.samtools.*;
-import org.broadinstitute.gatk.utils.commandline.Tags;
-import org.broadinstitute.gatk.engine.arguments.ValidationExclusion;
-import org.broadinstitute.gatk.engine.datasources.reads.*;
-import org.broadinstitute.gatk.engine.filters.ReadFilter;
-import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
-import org.broadinstitute.gatk.engine.resourcemanagement.ThreadAllocation;
-import org.broadinstitute.gatk.engine.walkers.Walker;
-import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
-import org.broadinstitute.gatk.utils.SampleUtils;
-import org.broadinstitute.gatk.utils.activeregion.ActiveRegionReadState;
-import org.broadinstitute.gatk.utils.interval.IntervalMergingRule;
-import org.broadinstitute.gatk.utils.interval.IntervalUtils;
-import org.broadinstitute.gatk.utils.sam.*;
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.datasources.providers.LocusShardDataProvider;
-import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
-import org.broadinstitute.gatk.engine.executive.WindowMaker;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.activeregion.ActiveRegion;
-import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-
-import java.io.File;
-import java.io.IOException;
-import java.util.*;
-
-/**
- * Created with IntelliJ IDEA.
- * User: thibault
- * Date: 11/13/12
- * Time: 2:47 PM
- *
- * Test the Active Region Traversal Contract
- * http://iwww.broadinstitute.org/gsa/wiki/index.php/Active_Region_Traversal_Contract
- */
-public class TraverseActiveRegionsUnitTest extends BaseTest {
-    private final static boolean ENFORCE_CONTRACTS = false;
-    private final static boolean DEBUG = false;
-
-    @DataProvider(name = "TraversalEngineProvider")
-    public Object[][] makeTraversals() {
-        final List<Object[]> traversals = new LinkedList<Object[]>();
-        traversals.add(new Object[]{new TraverseActiveRegions<>()});
-        return traversals.toArray(new Object[][]{});
-    }
-
-    private IndexedFastaSequenceFile reference;
-    private SAMSequenceDictionary dictionary;
-    private GenomeLocParser genomeLocParser;
-
-    private List<GenomeLoc> intervals;
-
-    private File testBAM;
-
-    @BeforeClass
-    private void init() throws IOException {
-        //reference = new CachingIndexedFastaSequenceFile(new File("/Users/depristo/Desktop/broadLocal/localData/human_g1k_v37.fasta")); // hg19Reference));
-        reference = new CachingIndexedFastaSequenceFile(new File(hg19Reference));
-        dictionary = reference.getSequenceDictionary();
-        genomeLocParser = new GenomeLocParser(dictionary);
-
-        // TODO: reads with indels
-        // TODO: reads which span many regions
-        // TODO: reads which are partially between intervals (in/outside extension)
-        // TODO: duplicate reads
-        // TODO: read at the end of a contig
-        // TODO: reads which are completely outside intervals but within extension
-        // TODO: test the extension itself
-        // TODO: unmapped reads
-
-        intervals = new ArrayList<GenomeLoc>();
-        intervals.add(genomeLocParser.createGenomeLoc("1", 10, 20));
-        intervals.add(genomeLocParser.createGenomeLoc("1", 1, 999));
-        intervals.add(genomeLocParser.createGenomeLoc("1", 1000, 1999));
-        intervals.add(genomeLocParser.createGenomeLoc("1", 2000, 2999));
-        intervals.add(genomeLocParser.createGenomeLoc("1", 10000, 20000));
-        intervals.add(genomeLocParser.createGenomeLoc("2", 1, 100));
-        intervals.add(genomeLocParser.createGenomeLoc("20", 10000, 10100));
-        intervals = IntervalUtils.sortAndMergeIntervals(genomeLocParser, intervals, IntervalMergingRule.OVERLAPPING_ONLY).toList();
-
-        List<GATKSAMRecord> reads = new ArrayList<GATKSAMRecord>();
-        reads.add(buildSAMRecord("simple", "1", 100, 200));
-        reads.add(buildSAMRecord("overlap_equal", "1", 10, 20));
-        reads.add(buildSAMRecord("overlap_unequal", "1", 10, 21));
-        reads.add(buildSAMRecord("boundary_equal", "1", 1990, 2009));
-        reads.add(buildSAMRecord("boundary_unequal", "1", 1990, 2008));
-        reads.add(buildSAMRecord("boundary_1_pre", "1", 1950, 2000));
-        reads.add(buildSAMRecord("boundary_1_post", "1", 1999, 2050));
-        reads.add(buildSAMRecord("extended_and_np", "1", 990, 1990));
-        reads.add(buildSAMRecord("outside_intervals", "1", 5000, 6000));
-        reads.add(buildSAMRecord("shard_boundary_1_pre", "1", 16300, 16385));
-        reads.add(buildSAMRecord("shard_boundary_1_post", "1", 16384, 16400));
-        reads.add(buildSAMRecord("shard_boundary_equal", "1", 16355, 16414));
-        reads.add(buildSAMRecord("simple20", "20", 10025, 10075));
-
-        createBAM(reads);
-    }
-
-    private void createBAM(List<GATKSAMRecord> reads) throws IOException {
-        testBAM = createTempFile("TraverseActiveRegionsUnitTest", ".bam");
-
-        SAMFileWriter out = new SAMFileWriterFactory().setCreateIndex(true).makeBAMWriter(reads.get(0).getHeader(), true, testBAM);
-        for (GATKSAMRecord read : ReadUtils.sortReadsByCoordinate(reads)) {
-            out.addAlignment(read);
-        }
-        out.close();
-
-        new File(testBAM.getAbsolutePath().replace(".bam", ".bai")).deleteOnExit();
-        new File(testBAM.getAbsolutePath() + ".bai").deleteOnExit();
-    }
-
-    @Test(enabled = true && ! DEBUG, dataProvider = "TraversalEngineProvider")
-    public void testAllBasesSeen(TraverseActiveRegions t) {
-        DummyActiveRegionWalker walker = new DummyActiveRegionWalker();
-
-        List<GenomeLoc> activeIntervals = getIsActiveIntervals(t, walker, intervals);
-        // Contract: Every genome position in the analysis interval(s) is processed by the walker's isActive() call
-        verifyEqualIntervals(intervals, activeIntervals);
-    }
-
-    private List<GenomeLoc> getIsActiveIntervals(final TraverseActiveRegions t, DummyActiveRegionWalker walker, List<GenomeLoc> intervals) {
-        List<GenomeLoc> activeIntervals = new ArrayList<GenomeLoc>();
-        for (LocusShardDataProvider dataProvider : createDataProviders(t, walker, intervals, testBAM)) {
-            t.traverse(walker, dataProvider, 0);
-            activeIntervals.addAll(walker.isActiveCalls);
-        }
-
-        return activeIntervals;
-    }
-
-    @Test (enabled = ENFORCE_CONTRACTS, dataProvider = "TraversalEngineProvider", expectedExceptions = PreconditionError.class)
-    public void testIsActiveRangeLow (TraverseActiveRegions t) {
-        DummyActiveRegionWalker walker = new DummyActiveRegionWalker(-0.1);
-        getActiveRegions(t, walker, intervals).values();
-    }
-
-    @Test (enabled = ENFORCE_CONTRACTS, dataProvider = "TraversalEngineProvider", expectedExceptions = PreconditionError.class)
-    public void testIsActiveRangeHigh (TraverseActiveRegions t) {
-        DummyActiveRegionWalker walker = new DummyActiveRegionWalker(1.1);
-        getActiveRegions(t, walker, intervals).values();
-    }
-
-    @Test(enabled = true && ! DEBUG, dataProvider = "TraversalEngineProvider")
-    public void testActiveRegionCoverage(TraverseActiveRegions t) {
-        DummyActiveRegionWalker walker = new DummyActiveRegionWalker(new GenomeLocSortedSet(genomeLocParser, intervals), true);
-
-        Collection<ActiveRegion> activeRegions = getActiveRegions(t, walker, intervals).values();
-        verifyActiveRegionCoverage(intervals, activeRegions);
-    }
-
-    private void verifyActiveRegionCoverage(List<GenomeLoc> intervals, Collection<ActiveRegion> activeRegions) {
-        List<GenomeLoc> intervalStarts = new ArrayList<GenomeLoc>();
-        List<GenomeLoc> intervalStops = new ArrayList<GenomeLoc>();
-
-        for (GenomeLoc interval : intervals) {
-            intervalStarts.add(interval.getStartLocation());
-            intervalStops.add(interval.getStopLocation());
-        }
-
-        Map<GenomeLoc, ActiveRegion> baseRegionMap = new HashMap<GenomeLoc, ActiveRegion>();
-
-        for (ActiveRegion activeRegion : activeRegions) {
-            for (GenomeLoc activeLoc : toSingleBaseLocs(activeRegion.getLocation())) {
-                // Contract: Regions do not overlap
-                Assert.assertFalse(baseRegionMap.containsKey(activeLoc), "Genome location " + activeLoc + " is assigned to more than one region");
-                baseRegionMap.put(activeLoc, activeRegion);
-            }
-
-            GenomeLoc start = activeRegion.getLocation().getStartLocation();
-            if (intervalStarts.contains(start))
-                intervalStarts.remove(start);
-
-            GenomeLoc stop = activeRegion.getLocation().getStopLocation();
-            if (intervalStops.contains(stop))
-                intervalStops.remove(stop);
-        }
-
-        for (GenomeLoc baseLoc : toSingleBaseLocs(intervals)) {
-            // Contract: Each location in the interval(s) is in exactly one region
-            // Contract: The total set of regions exactly matches the analysis interval(s)
-            Assert.assertTrue(baseRegionMap.containsKey(baseLoc), "Genome location " + baseLoc + " is not assigned to any region");
-            baseRegionMap.remove(baseLoc);
-        }
-
-        // Contract: The total set of regions exactly matches the analysis interval(s)
-        Assert.assertEquals(baseRegionMap.size(), 0, "Active regions contain base(s) outside of the given intervals");
-
-        // Contract: All explicit interval boundaries must also be region boundaries
-        Assert.assertEquals(intervalStarts.size(), 0, "Interval start location does not match an active region start location");
-        Assert.assertEquals(intervalStops.size(), 0, "Interval stop location does not match an active region stop location");
-    }
-
-    @Test(enabled = true && ! DEBUG, dataProvider = "TraversalEngineProvider")
-    public void testActiveRegionExtensionOnContig(TraverseActiveRegions t) {
-        DummyActiveRegionWalker walker = new DummyActiveRegionWalker();
-
-        Collection<ActiveRegion> activeRegions = getActiveRegions(t, walker, intervals).values();
-        for (ActiveRegion activeRegion : activeRegions) {
-            GenomeLoc loc = activeRegion.getExtendedLoc();
-
-            // Contract: active region extensions must stay on the contig
-            Assert.assertTrue(loc.getStart() > 0, "Active region extension begins at location " + loc.getStart() + ", past the left end of the contig");
-            int refLen = dictionary.getSequence(loc.getContigIndex()).getSequenceLength();
-            Assert.assertTrue(loc.getStop() <= refLen, "Active region extension ends at location " + loc.getStop() + ", past the right end of the contig");
-        }
-    }
-
-    @Test(enabled = true && !DEBUG, dataProvider = "TraversalEngineProvider")
-    public void testPrimaryReadMapping(TraverseActiveRegions t) {
-        DummyActiveRegionWalker walker = new DummyActiveRegionWalker(new GenomeLocSortedSet(genomeLocParser, intervals),
-                EnumSet.of(ActiveRegionReadState.PRIMARY),
-                true);
-
-        // Contract: Each read has the Primary state in a single region (or none)
-        // This is the region of maximum overlap for the read (earlier if tied)
-
-        // simple: Primary in 1:1-999
-        // overlap_equal: Primary in 1:1-999
-        // overlap_unequal: Primary in 1:1-999
-        // boundary_equal: Primary in 1:1000-1999, Non-Primary in 1:2000-2999
-        // boundary_unequal: Primary in 1:1000-1999, Non-Primary in 1:2000-2999
-        // boundary_1_pre: Primary in 1:1000-1999, Non-Primary in 1:2000-2999
-        // boundary_1_post: Primary in 1:1000-1999, Non-Primary in 1:2000-2999
-        // extended_and_np: Primary in 1:1-999, Non-Primary in 1:1000-1999, Extended in 1:2000-2999
-        // outside_intervals: none
-        // shard_boundary_1_pre: Primary in 1:14908-16384, Non-Primary in 1:16385-16927
-        // shard_boundary_1_post: Primary in 1:14908-16384, Non-Primary in 1:16385-16927
-        // shard_boundary_equal: Primary in 1:14908-16384, Non-Primary in 1:16385-16927
-        // simple20: Primary in 20:10000-10100
-
-        Map<GenomeLoc, ActiveRegion> activeRegions = getActiveRegions(t, walker, intervals);
-        ActiveRegion region;
-
-        region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 1, 999));
-        verifyReadMapping(region, "simple", "overlap_equal", "overlap_unequal", "extended_and_np");
-
-        region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 1000, 1999));
-        verifyReadMapping(region, "boundary_unequal", "boundary_1_pre", "boundary_equal", "boundary_1_post");
-
-        region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 2000, 2999));
-        verifyReadMapping(region);
-
-        region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 10000, 20000));
-        verifyReadMapping(region, "shard_boundary_1_pre", "shard_boundary_1_post", "shard_boundary_equal");
-
-        region = activeRegions.get(genomeLocParser.createGenomeLoc("20", 10000, 10100));
-        verifyReadMapping(region, "simple20");
-    }
-
-    @Test(enabled = true && ! DEBUG, dataProvider = "TraversalEngineProvider")
-    public void testNonPrimaryReadMapping(TraverseActiveRegions t) {
-        DummyActiveRegionWalker walker = new DummyActiveRegionWalker(new GenomeLocSortedSet(genomeLocParser, intervals),
-                EnumSet.of(ActiveRegionReadState.PRIMARY, ActiveRegionReadState.NONPRIMARY),
-                true);
-
-        // Contract: Each read has the Primary state in a single region (or none)
-        // This is the region of maximum overlap for the read (earlier if tied)
-
-        // Contract: Each read has the Non-Primary state in all other regions it overlaps
-
-        // simple: Primary in 1:1-999
-        // overlap_equal: Primary in 1:1-999
-        // overlap_unequal: Primary in 1:1-999
-        // boundary_equal: Primary in 1:1000-1999, Non-Primary in 1:2000-2999
-        // boundary_unequal: Primary in 1:1000-1999, Non-Primary in 1:2000-2999
-        // boundary_1_pre: Primary in 1:1000-1999, Non-Primary in 1:2000-2999
-        // boundary_1_post: Primary in 1:1000-1999, Non-Primary in 1:2000-2999
-        // extended_and_np: Primary in 1:1-999, Non-Primary in 1:1000-1999, Extended in 1:2000-2999
-        // outside_intervals: none
-        // shard_boundary_1_pre: Primary in 1:14908-16384, Non-Primary in 1:16385-16927
-        // shard_boundary_1_post: Primary in 1:14908-16384, Non-Primary in 1:16385-16927
-        // shard_boundary_equal: Primary in 1:14908-16384, Non-Primary in 1:16385-16927
-        // simple20: Primary in 20:10000-10100
-
-        Map<GenomeLoc, ActiveRegion> activeRegions = getActiveRegions(t, walker, intervals);
-        ActiveRegion region;
-
-        region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 1, 999));
-        verifyReadMapping(region, "simple", "overlap_equal", "overlap_unequal", "extended_and_np");
-
-        region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 1000, 1999));
-        verifyReadMapping(region, "boundary_equal", "boundary_unequal", "extended_and_np", "boundary_1_pre", "boundary_1_post");
-
-        region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 2000, 2999));
-        verifyReadMapping(region, "boundary_equal", "boundary_unequal", "boundary_1_pre", "boundary_1_post");
-
-        region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 10000, 20000));
-        verifyReadMapping(region, "shard_boundary_1_pre", "shard_boundary_1_post", "shard_boundary_equal");
-
-        region = activeRegions.get(genomeLocParser.createGenomeLoc("20", 10000, 10100));
-        verifyReadMapping(region, "simple20");
-    }
-
-    @Test(enabled = true && ! DEBUG, dataProvider = "TraversalEngineProvider")
-    public void testExtendedReadMapping(TraverseActiveRegions t) {
-        DummyActiveRegionWalker walker = new DummyActiveRegionWalker(new GenomeLocSortedSet(genomeLocParser, intervals),
-                EnumSet.of(ActiveRegionReadState.PRIMARY, ActiveRegionReadState.NONPRIMARY, ActiveRegionReadState.EXTENDED),
-                true);
-
-        // Contract: Each read has the Primary state in a single region (or none)
-        // This is the region of maximum overlap for the read (earlier if tied)
-
-        // Contract: Each read has the Non-Primary state in all other regions it overlaps
-        // Contract: Each read has the Extended state in regions where it only overlaps if the region is extended
-
-        // simple: Primary in 1:1-999
-        // overlap_equal: Primary in 1:1-999
-        // overlap_unequal: Primary in 1:1-999
-        // boundary_equal: Non-Primary in 1:1000-1999, Primary in 1:2000-2999
-        // boundary_unequal: Primary in 1:1000-1999, Non-Primary in 1:2000-2999
-        // boundary_1_pre: Primary in 1:1000-1999, Non-Primary in 1:2000-2999
-        // boundary_1_post: Non-Primary in 1:1000-1999, Primary in 1:2000-2999
-        // extended_and_np: Non-Primary in 1:1-999, Primary in 1:1000-1999, Extended in 1:2000-2999
-        // outside_intervals: none
-        // shard_boundary_1_pre: Primary in 1:14908-16384, Non-Primary in 1:16385-16927
-        // shard_boundary_1_post: Non-Primary in 1:14908-16384, Primary in 1:16385-16927
-        // shard_boundary_equal: Non-Primary in 1:14908-16384, Primary in 1:16385-16927
-        // simple20: Primary in 20:10000-10100
-
-        Map<GenomeLoc, ActiveRegion> activeRegions = getActiveRegions(t, walker, intervals);
-        ActiveRegion region;
-
-        region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 1, 999));
-        verifyReadMapping(region, "simple", "overlap_equal", "overlap_unequal", "extended_and_np");
-
-        region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 1000, 1999));
-        verifyReadMapping(region, "boundary_equal", "boundary_unequal", "extended_and_np", "boundary_1_pre", "boundary_1_post");
-
-        region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 2000, 2999));
-        verifyReadMapping(region, "boundary_equal", "boundary_unequal", "extended_and_np", "boundary_1_pre", "boundary_1_post");
-
-        region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 10000, 20000));
-        verifyReadMapping(region, "shard_boundary_1_pre", "shard_boundary_1_post", "shard_boundary_equal");
-
-        region = activeRegions.get(genomeLocParser.createGenomeLoc("20", 10000, 10100));
-        verifyReadMapping(region, "simple20");
-    }
-
-    @Test(enabled = true && ! DEBUG, dataProvider = "TraversalEngineProvider")
-    public void testUnmappedReads(TraverseActiveRegions t) {
-        // TODO
-    }
-
-    private void verifyReadMapping(ActiveRegion region, String... reads) {
-        Assert.assertNotNull(region, "Region was unexpectedly null");
-        final Set<String> regionReads = new HashSet<String>();
-        for (SAMRecord read : region.getReads()) {
-            Assert.assertFalse(regionReads.contains(read.getReadName()), "Duplicate reads detected in region " + region + " read " + read.getReadName());
-            regionReads.add(read.getReadName());
-        }
-
-        Collection<String> wantReads = new ArrayList<String>(Arrays.asList(reads));
-        for (SAMRecord read : region.getReads()) {
-            String regionReadName = read.getReadName();
-            Assert.assertTrue(wantReads.contains(regionReadName), "Read " + regionReadName + " incorrectly assigned to active region " + region);
-            wantReads.remove(regionReadName);
-        }
-
-        Assert.assertTrue(wantReads.isEmpty(), "Reads missing in active region " + region + ", wanted " + (wantReads.isEmpty() ? "" : wantReads.iterator().next()));
-    }
-
-    private Map<GenomeLoc, ActiveRegion> getActiveRegions(TraverseActiveRegions t, DummyActiveRegionWalker walker, List<GenomeLoc> intervals) {
-        return getActiveRegions(t, walker, intervals, testBAM);
-    }
-
-    private Map<GenomeLoc, ActiveRegion> getActiveRegions(TraverseActiveRegions t, DummyActiveRegionWalker walker, List<GenomeLoc> intervals, final File bam) {
-        for (LocusShardDataProvider dataProvider : createDataProviders(t, walker, intervals, bam))
-            t.traverse(walker, dataProvider, 0);
-
-        return walker.mappedActiveRegions;
-    }
-
-    private Collection<GenomeLoc> toSingleBaseLocs(GenomeLoc interval) {
-        List<GenomeLoc> bases = new ArrayList<GenomeLoc>();
-        if (interval.size() == 1)
-            bases.add(interval);
-        else {
-            for (int location = interval.getStart(); location <= interval.getStop(); location++)
-                bases.add(genomeLocParser.createGenomeLoc(interval.getContig(), location, location));
-        }
-
-        return bases;
-    }
-
-    private Collection<GenomeLoc> toSingleBaseLocs(List<GenomeLoc> intervals) {
-        Set<GenomeLoc> bases = new TreeSet<GenomeLoc>();    // for sorting and uniqueness
-        for (GenomeLoc interval : intervals)
-            bases.addAll(toSingleBaseLocs(interval));
-
-        return bases;
-    }
-
-    private void verifyEqualIntervals(List<GenomeLoc> aIntervals, List<GenomeLoc> bIntervals) {
-        Collection<GenomeLoc> aBases = toSingleBaseLocs(aIntervals);
-        Collection<GenomeLoc> bBases = toSingleBaseLocs(bIntervals);
-
-        Assert.assertTrue(aBases.size() == bBases.size(), "Interval lists have a differing number of bases: " + aBases.size() + " vs. " + bBases.size());
-
-        Iterator<GenomeLoc> aIter = aBases.iterator();
-        Iterator<GenomeLoc> bIter = bBases.iterator();
-        while (aIter.hasNext() && bIter.hasNext()) {
-            GenomeLoc aLoc = aIter.next();
-            GenomeLoc bLoc = bIter.next();
-            Assert.assertTrue(aLoc.equals(bLoc), "Interval locations do not match: " + aLoc + " vs. " + bLoc);
-        }
-    }
-
-    // copied from LocusViewTemplate
-    protected GATKSAMRecord buildSAMRecord(String readName, String contig, int alignmentStart, int alignmentEnd) {
-        SAMFileHeader header = ArtificialSAMUtils.createDefaultReadGroup(new SAMFileHeader(), "test", "test");
-        header.setSequenceDictionary(dictionary);
-        header.setSortOrder(SAMFileHeader.SortOrder.coordinate);
-        GATKSAMRecord record = new GATKSAMRecord(header);
-
-        record.setReadName(readName);
-        record.setReferenceIndex(dictionary.getSequenceIndex(contig));
-        record.setAlignmentStart(alignmentStart);
-
-        Cigar cigar = new Cigar();
-        int len = alignmentEnd - alignmentStart + 1;
-        cigar.add(new CigarElement(len, CigarOperator.M));
-        record.setCigar(cigar);
-        record.setReadString(new String(new char[len]).replace("\0", "A"));
-        record.setBaseQualities(new byte[len]);
-        record.setReadGroup(new GATKSAMReadGroupRecord(header.getReadGroup("test")));
-
-        return record;
-    }
-
-    private List<LocusShardDataProvider> createDataProviders(TraverseActiveRegions traverseActiveRegions, final Walker walker, List<GenomeLoc> intervals, File bamFile) {
-        GenomeAnalysisEngine engine = new GenomeAnalysisEngine();
-        engine.setGenomeLocParser(genomeLocParser);
-
-        Collection<SAMReaderID> samFiles = new ArrayList<SAMReaderID>();
-        SAMReaderID readerID = new SAMReaderID(bamFile, new Tags());
-        samFiles.add(readerID);
-
-        SAMDataSource dataSource = new SAMDataSource(samFiles, new ThreadAllocation(), null, genomeLocParser,
-                false,
-                ValidationStringency.STRICT,
-                null,
-                null,
-                new ValidationExclusion(),
-                new ArrayList<ReadFilter>(),
-                new ArrayList<ReadTransformer>(),
-                false, (byte)30, false, true, null, IntervalMergingRule.ALL);
-
-        engine.setReadsDataSource(dataSource);
-        final Set<String> samples = SampleUtils.getSAMFileSamples(dataSource.getHeader());
-
-        traverseActiveRegions.initialize(engine, walker);
-        List<LocusShardDataProvider> providers = new ArrayList<LocusShardDataProvider>();
-        for (Shard shard : dataSource.createShardIteratorOverIntervals(new GenomeLocSortedSet(genomeLocParser, intervals), new ActiveRegionShardBalancer())) {
-            for (WindowMaker.WindowMakerIterator window : new WindowMaker(shard, genomeLocParser, dataSource.seek(shard), shard.getGenomeLocs(), samples)) {
-                providers.add(new LocusShardDataProvider(shard, shard.getReadProperties(), genomeLocParser, window.getLocus(), window, reference, new ArrayList<ReferenceOrderedDataSource>()));
-            }
-        }
-
-        return providers;
-    }
-
-    // ---------------------------------------------------------------------------------------------------------
-    //
-    // Combinatorial tests to ensure reads are going into the right regions
-    //
-    // ---------------------------------------------------------------------------------------------------------
-
-    @DataProvider(name = "CombinatorialARTTilingProvider")
-    public Object[][] makeCombinatorialARTTilingProvider() {
-        final List<Object[]> tests = new LinkedList<Object[]>();
-
-        final List<Integer> starts = Arrays.asList(
-                1, // very start of the chromosome
-                ArtificialBAMBuilder.BAM_SHARD_SIZE - 100, // right before the shard boundary
-                ArtificialBAMBuilder.BAM_SHARD_SIZE + 100 // right after the shard boundary
-        );
-
-        final List<EnumSet<ActiveRegionReadState>> allReadStates = Arrays.asList(
-                EnumSet.of(ActiveRegionReadState.PRIMARY),
-                EnumSet.of(ActiveRegionReadState.PRIMARY, ActiveRegionReadState.NONPRIMARY),
-                EnumSet.of(ActiveRegionReadState.PRIMARY, ActiveRegionReadState.NONPRIMARY, ActiveRegionReadState.EXTENDED)
-        );
-
-        final int maxTests = Integer.MAX_VALUE;
-        int nTests = 0;
-        for ( final int readLength : Arrays.asList(100) ) {
-            for ( final int skips : Arrays.asList(0, 10) ) {
-                for ( final int start : starts ) {
-                    for ( final int nReadsPerLocus : Arrays.asList(1, 2) ) {
-                        for ( final int nLoci : Arrays.asList(1, 1000) ) {
-                            final ArtificialBAMBuilder bamBuilder = new ArtificialBAMBuilder(reference, nReadsPerLocus, nLoci);
-                            bamBuilder.setReadLength(readLength);
-                            bamBuilder.setSkipNLoci(skips);
-                            bamBuilder.setAlignmentStart(start);
-                            for ( EnumSet<ActiveRegionReadState> readStates : allReadStates ) {
-                                for ( final GenomeLocSortedSet activeRegions : enumerateActiveRegions(bamBuilder.getAlignmentStart(), bamBuilder.getAlignmentEnd())) {
-                                    nTests++;
-                                    if ( nTests < maxTests ) // && nTests == 1238 )
-                                        tests.add(new Object[]{new TraverseActiveRegions<>(), nTests, activeRegions, readStates, bamBuilder});
-                                }
-                            }
-                        }
-                    }
-                }
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    private Collection<GenomeLocSortedSet> enumerateActiveRegions(final int start, final int stop) {
-        // should basically cut up entire region into equal sized chunks, of
-        // size 10, 20, 50, 100, etc, alternating skipping pieces so they are inactive
-        // Need to make sure we include some edge cases:
-        final List<GenomeLocSortedSet> activeRegions = new LinkedList<GenomeLocSortedSet>();
-
-        for ( final int stepSize : Arrays.asList(11, 29, 53, 97) ) {
-            for ( final boolean startWithActive : Arrays.asList(true, false) ) {
-                activeRegions.add(makeActiveRegionMask(start, stop, stepSize,  startWithActive));
-            }
-        }
-
-        // active region is the whole interval
-        activeRegions.add(new GenomeLocSortedSet(genomeLocParser, genomeLocParser.createGenomeLoc("1", start, stop)));
-
-        // active region extends up to the end of the data, but doesn't include start
-        activeRegions.add(new GenomeLocSortedSet(genomeLocParser, genomeLocParser.createGenomeLoc("1", start+10, stop)));
-
-        return activeRegions;
-    }
-
-    private GenomeLocSortedSet makeActiveRegionMask(final int start, final int stop, final int stepSize, final boolean startWithActive) {
-        final GenomeLocSortedSet active = new GenomeLocSortedSet(genomeLocParser);
-
-        boolean includeRegion = startWithActive;
-        for ( int left = start; left < stop; left += stepSize) {
-            final int right = left + stepSize;
-            final GenomeLoc region = genomeLocParser.createGenomeLoc("1", left, right);
-            if ( includeRegion )
-                active.add(region);
-            includeRegion = ! includeRegion;
-        }
-
-        return active;
-    }
-
-
-    @Test(enabled = true && ! DEBUG, dataProvider = "CombinatorialARTTilingProvider")
-    public void testARTReadsInActiveRegions(final TraverseActiveRegions<Integer, Integer> traversal, final int id, final GenomeLocSortedSet activeRegions, final EnumSet<ActiveRegionReadState> readStates, final ArtificialBAMBuilder bamBuilder) {
-        logger.warn("Running testARTReadsInActiveRegions id=" + id + " locs " + activeRegions + " against bam " + bamBuilder);
-        final List<GenomeLoc> intervals = Arrays.asList(
-                genomeLocParser.createGenomeLoc("1", bamBuilder.getAlignmentStart(), bamBuilder.getAlignmentEnd())
-        );
-
-        final DummyActiveRegionWalker walker = new DummyActiveRegionWalker(activeRegions, false);
-        walker.setStates(readStates);
-
-        final Map<GenomeLoc, ActiveRegion> activeRegionsMap = getActiveRegions(traversal, walker, intervals, bamBuilder.makeTemporarilyBAMFile());
-
-        final Set<String> alreadySeenReads = new HashSet<String>(); // for use with the primary / non-primary
-        for ( final ActiveRegion region : activeRegionsMap.values() ) {
-            final Set<String> readNamesInRegion = readNamesInRegion(region);
-            int nReadsExpectedInRegion = 0;
-            for ( final GATKSAMRecord read : bamBuilder.makeReads() ) {
-                final GenomeLoc readLoc = genomeLocParser.createGenomeLoc(read);
-
-                boolean shouldBeInRegion = readStates.contains(ActiveRegionReadState.EXTENDED)
-                        ? region.getExtendedLoc().overlapsP(readLoc)
-                        : region.getLocation().overlapsP(readLoc);
-
-                if ( ! readStates.contains(ActiveRegionReadState.NONPRIMARY) ) {
-                    if ( alreadySeenReads.contains(read.getReadName()) )
-                        shouldBeInRegion = false;
-                    else if ( shouldBeInRegion )
-                        alreadySeenReads.add(read.getReadName());
-                }
-
-                String msg = readNamesInRegion.contains(read.getReadName()) == shouldBeInRegion ? "" : "Region " + region +
-                        " failed contains read check: read " + read + " with span " + readLoc + " should be in region is " + shouldBeInRegion + " but I got the opposite";
-                Assert.assertEquals(readNamesInRegion.contains(read.getReadName()), shouldBeInRegion, msg);
-
-                nReadsExpectedInRegion += shouldBeInRegion ? 1 : 0;
-            }
-
-            Assert.assertEquals(region.size(), nReadsExpectedInRegion, "There are more reads in active region " + region + "than expected");
-        }
-    }
-
-    private Set<String> readNamesInRegion(final ActiveRegion region) {
-        final Set<String> readNames = new LinkedHashSet<String>(region.getReads().size());
-        for ( final SAMRecord read : region.getReads() )
-            readNames.add(read.getReadName());
-        return readNames;
-    }
-
-    // ---------------------------------------------------------------------------------------------------------
-    //
-    // Make sure all insertion reads are properly included in the active regions
-    //
-    // ---------------------------------------------------------------------------------------------------------
-
-    @Test(dataProvider = "TraversalEngineProvider", enabled = true && ! DEBUG)
-    public void ensureAllInsertionReadsAreInActiveRegions(final TraverseActiveRegions<Integer, Integer> traversal) {
-
-        final int readLength = 10;
-        final int start = 20;
-        final int nReadsPerLocus = 10;
-        final int nLoci = 3;
-
-        final ArtificialBAMBuilder bamBuilder = new ArtificialBAMBuilder(reference, nReadsPerLocus, nLoci);
-        bamBuilder.setReadLength(readLength);
-        bamBuilder.setAlignmentStart(start);
-
-        // note that the position must be +1 as the read's all I cigar puts the end 1 bp before start, leaving it out of the region
-        GATKSAMRecord allI = ArtificialSAMUtils.createArtificialRead(bamBuilder.getHeader(),"allI",0,start+1,readLength);
-        allI.setCigarString(readLength + "I");
-        allI.setReadGroup(new GATKSAMReadGroupRecord(bamBuilder.getHeader().getReadGroups().get(0)));
-
-        bamBuilder.addReads(allI);
-
-        final GenomeLocSortedSet activeRegions = new GenomeLocSortedSet(bamBuilder.getGenomeLocParser());
-        activeRegions.add(bamBuilder.getGenomeLocParser().createGenomeLoc("1", 10, 30));
-        final List<GenomeLoc> intervals = Arrays.asList(
-                genomeLocParser.createGenomeLoc("1", bamBuilder.getAlignmentStart(), bamBuilder.getAlignmentEnd())
-        );
-
-        final DummyActiveRegionWalker walker = new DummyActiveRegionWalker(activeRegions, false);
-
-        final Map<GenomeLoc, ActiveRegion> activeRegionsMap = getActiveRegions(traversal, walker, intervals, bamBuilder.makeTemporarilyBAMFile());
-
-        final ActiveRegion region = activeRegionsMap.values().iterator().next();
-        int nReadsExpectedInRegion = 0;
-
-        final Set<String> readNamesInRegion = readNamesInRegion(region);
-        for ( final GATKSAMRecord read : bamBuilder.makeReads() ) {
-            Assert.assertTrue(readNamesInRegion.contains(read.getReadName()),
-                    "Region " + region + " should contain read " + read + " with cigar " + read.getCigarString() + " but it wasn't");
-            nReadsExpectedInRegion++;
-        }
-
-        Assert.assertEquals(region.size(), nReadsExpectedInRegion, "There are more reads in active region " + region + "than expected");
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/traversals/TraverseDuplicatesUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/traversals/TraverseDuplicatesUnitTest.java
deleted file mode 100644
index a332be1..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/traversals/TraverseDuplicatesUnitTest.java
+++ /dev/null
@@ -1,162 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.traversals;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.testng.Assert;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-
-import org.testng.annotations.BeforeMethod;
-
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Set;
-
-
-/**
- * @author aaron
- *         <p/>
- *         Class TraverseDuplicatesUnitTest
- *         <p/>
- *         test the meat of the traverse dupplicates.
- */
-public class TraverseDuplicatesUnitTest extends BaseTest {
-
-    private TraverseDuplicates obj = new TraverseDuplicates();
-    private SAMFileHeader header;
-    private GenomeLocParser genomeLocParser;
-    private GenomeAnalysisEngine engine;
-    private File refFile = new File(validationDataLocation + "Homo_sapiens_assembly17.fasta");
-
-
-    @BeforeMethod
-    public void doBefore() {
-        header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000);
-        genomeLocParser =new GenomeLocParser(header.getSequenceDictionary());
-
-        engine = new GenomeAnalysisEngine();
-        engine.setReferenceDataSource(refFile);
-        engine.setGenomeLocParser(genomeLocParser);
-        
-        obj.initialize(engine, null);
-    }
-
-    @Test
-    public void testAllDuplicatesNoPairs() {
-        List<SAMRecord> list = new ArrayList<SAMRecord>();
-        for (int x = 0; x < 10; x++) {
-            SAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "SWEET_READ" + x, 0, 1, 100);
-            read.setDuplicateReadFlag(true);
-            list.add(read);
-        }
-        Set<List<SAMRecord>> myPairings = obj.uniqueReadSets(list);
-        Assert.assertEquals(myPairings.size(), 1);
-        Assert.assertEquals(myPairings.iterator().next().size(), 10); // dup's
-    }
-
-    @Test
-    public void testNoDuplicatesNoPairs() {
-        List<SAMRecord> list = new ArrayList<SAMRecord>();
-        for (int x = 0; x < 10; x++) {
-            SAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "SWEET_READ" + x, 0, 1, 100);
-            read.setDuplicateReadFlag(false);
-            list.add(read);
-        }
-
-        Set<List<SAMRecord>> myPairing = obj.uniqueReadSets(list);
-        Assert.assertEquals(myPairing.size(), 10); // unique
-    }
-
-    @Test
-    public void testFiftyFiftyNoPairs() {
-        List<SAMRecord> list = new ArrayList<SAMRecord>();
-        for (int x = 0; x < 5; x++) {
-            SAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "SWEET_READ" + x, 0, 1, 100);
-            read.setDuplicateReadFlag(true);
-            list.add(read);
-        }
-        for (int x = 10; x < 15; x++)
-            list.add(ArtificialSAMUtils.createArtificialRead(header, String.valueOf(x), 0, x, 100));
-
-        Set<List<SAMRecord>> myPairing = obj.uniqueReadSets(list);
-        Assert.assertEquals(myPairing.size(), 6);  // unique
-    }
-
-    @Test
-    public void testAllDuplicatesAllPairs() {
-        List<SAMRecord> list = new ArrayList<SAMRecord>();
-        for (int x = 0; x < 10; x++) {
-            SAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "SWEET_READ"+ x, 0, 1, 100);
-            read.setDuplicateReadFlag(true);
-            read.setMateAlignmentStart(100);
-            read.setMateReferenceIndex(0);
-            read.setReadPairedFlag(true);
-            list.add(read);
-        }
-
-        Set<List<SAMRecord>> myPairing = obj.uniqueReadSets(list);
-        Assert.assertEquals(myPairing.size(), 1);  // unique
-    }
-
-    @Test
-    public void testNoDuplicatesAllPairs() {
-        List<SAMRecord> list = new ArrayList<SAMRecord>();
-        for (int x = 0; x < 10; x++) {
-            SAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "SWEET_READ"+ x, 0, 1, 100);
-            if (x == 0) read.setDuplicateReadFlag(true); // one is a dup but (next line)
-            read.setMateAlignmentStart(100); // they all have a shared start and mate start so they're dup's
-            read.setMateReferenceIndex(0);
-            read.setReadPairedFlag(true);
-            list.add(read);
-        }
-
-        Set<List<SAMRecord>> myPairing = obj.uniqueReadSets(list);
-        Assert.assertEquals(myPairing.size(), 1);  // unique
-    }
-
-    @Test
-    public void testAllDuplicatesAllPairsDifferentPairedEnd() {
-        List<SAMRecord> list = new ArrayList<SAMRecord>();
-        for (int x = 0; x < 10; x++) {
-            SAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "SWEET_READ" + x, 0, 1, 100);
-            if (x == 0) read.setDuplicateReadFlag(true); // one is a dup
-            read.setMateAlignmentStart(100 + x);
-            read.setMateReferenceIndex(0);
-            read.setReadPairedFlag(true);
-            list.add(read);
-        }
-
-        Set<List<SAMRecord>> myPairing = obj.uniqueReadSets(list);
-        Assert.assertEquals(myPairing.size(), 10);  // unique
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/traversals/TraverseReadsUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/traversals/TraverseReadsUnitTest.java
deleted file mode 100644
index 70336a2..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/traversals/TraverseReadsUnitTest.java
+++ /dev/null
@@ -1,166 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.traversals;
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import htsjdk.samtools.reference.ReferenceSequenceFile;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.commandline.Tags;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.datasources.providers.ReadShardDataProvider;
-import org.broadinstitute.gatk.engine.datasources.reads.*;
-import org.broadinstitute.gatk.engine.datasources.rmd.ReferenceOrderedDataSource;
-import org.broadinstitute.gatk.engine.resourcemanagement.ThreadAllocation;
-import org.broadinstitute.gatk.engine.walkers.ReadWalker;
-import org.broadinstitute.gatk.tools.walkers.qc.CountReads;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.PrintStream;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-
-import static org.testng.Assert.fail;
-
-/**
- *
- * User: aaron
- * Date: Apr 24, 2009
- * Time: 3:42:16 PM
- *
- * The Broad Institute
- * SOFTWARE COPYRIGHT NOTICE AGREEMENT 
- * This software and its documentation are copyright 2009 by the
- * Broad Institute/Massachusetts Institute of Technology. All rights are reserved.
- *
- * This software is supplied without any warranty or guaranteed support whatsoever. Neither
- * the Broad Institute nor MIT can be responsible for its use, misuse, or functionality.
- *
- */
-
-
-/**
- * @author aaron
- * @version 1.0
- * @date Apr 24, 2009
- * <p/>
- * Class TraverseReadsUnitTest
- * <p/>
- * test traversing reads
- */
-public class TraverseReadsUnitTest extends BaseTest {
-
-    private ReferenceSequenceFile seq;
-    private SAMReaderID bam = new SAMReaderID(new File(validationDataLocation + "index_test.bam"),new Tags()); // TCGA-06-0188.aligned.duplicates_marked.bam");
-    private File refFile = new File(validationDataLocation + "Homo_sapiens_assembly17.fasta");
-    private List<SAMReaderID> bamList;
-    private ReadWalker countReadWalker;
-    private File output;
-    private TraverseReadsNano traversalEngine = null;
-
-    private IndexedFastaSequenceFile ref = null;
-    private GenomeLocParser genomeLocParser = null;
-    private GenomeAnalysisEngine engine = null;
-
-    @BeforeClass
-    public void doOnce() {
-        try {
-            ref = new CachingIndexedFastaSequenceFile(refFile);
-        }
-        catch(FileNotFoundException ex) {
-            throw new UserException.CouldNotReadInputFile(refFile,ex);
-        }
-        genomeLocParser = new GenomeLocParser(ref);
-
-        engine = new GenomeAnalysisEngine();
-        engine.setReferenceDataSource(refFile);
-        engine.setGenomeLocParser(genomeLocParser);
-    }
-
-    /**
-     * This function does the setup of our parser, before each method call.
-     * <p/>
-     * Called before every test case method.
-     */
-    @BeforeMethod
-    public void doForEachTest() {
-        output = new File("testOut.txt");
-        FileOutputStream out = null;
-        PrintStream ps; // declare a print stream object
-
-        try {
-            out = new FileOutputStream(output);
-        } catch (FileNotFoundException e) {
-            e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
-            fail("Couldn't open the output file");
-        }
-
-        bamList = new ArrayList<SAMReaderID>();
-        bamList.add(bam);
-        countReadWalker = new CountReads();
-        
-        traversalEngine = new TraverseReadsNano(1);
-        traversalEngine.initialize(engine, countReadWalker);
-    }
-
-    /** Test out that we can shard the file and iterate over every read */
-    @Test
-    public void testUnmappedReadCount() {
-        SAMDataSource dataSource = new SAMDataSource(bamList,new ThreadAllocation(),null,genomeLocParser);
-        Iterable<Shard> shardStrategy = dataSource.createShardIteratorOverAllReads(new ReadShardBalancer());
-
-        countReadWalker.initialize();
-        Object accumulator = countReadWalker.reduceInit();
-
-        for(Shard shard: shardStrategy) {
-            if (shard == null) {
-                fail("Shard == null");
-            }
-
-            ReadShardDataProvider dataProvider = new ReadShardDataProvider(shard,genomeLocParser,dataSource.seek(shard),null, Collections.<ReferenceOrderedDataSource>emptyList());
-            accumulator = traversalEngine.traverse(countReadWalker, dataProvider, accumulator);
-            dataProvider.close();
-        }
-
-        countReadWalker.onTraversalDone(accumulator);
-
-        if (!(accumulator instanceof Long)) {
-            fail("Count read walker should return a Long.");
-        }
-        if (!accumulator.equals(new Long(10000))) {
-            fail("there should be 10000 mapped reads in the index file, there was " + (accumulator));
-        }
-    }
-
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/walkers/WalkerTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/walkers/WalkerTest.java
deleted file mode 100644
index a4c896e..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/engine/walkers/WalkerTest.java
+++ /dev/null
@@ -1,455 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.engine.walkers;
-
-import htsjdk.tribble.Tribble;
-import htsjdk.tribble.index.Index;
-import htsjdk.tribble.index.IndexFactory;
-import htsjdk.variant.bcf2.BCF2Utils;
-import htsjdk.variant.vcf.VCFCodec;
-import org.apache.commons.lang.StringUtils;
-import org.broadinstitute.gatk.engine.CommandLineExecutable;
-import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.phonehome.GATKRunReport;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.MD5DB;
-import org.broadinstitute.gatk.utils.MD5Mismatch;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.classloader.JVMUtils;
-import org.broadinstitute.gatk.utils.collections.Pair;
-import org.broadinstitute.gatk.utils.exceptions.GATKException;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.testng.Assert;
-import org.testng.annotations.AfterSuite;
-import org.testng.annotations.BeforeMethod;
-
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.PrintStream;
-import java.text.SimpleDateFormat;
-import java.util.*;
-
-public class WalkerTest extends BaseTest {
-    private static final boolean GENERATE_SHADOW_BCF = true;
-    private static final boolean ENABLE_PHONE_HOME_FOR_TESTS = false;
-    private static final boolean ENABLE_ON_THE_FLY_CHECK_FOR_VCF_INDEX = false;
-    private static final boolean ENABLE_AUTO_INDEX_CREATION_AND_LOCKING_FOR_TESTS = false;
-
-    private static MD5DB md5DB = new MD5DB();
-
-    @BeforeMethod
-    public void initializeWalkerTests() {
-        logger.debug("Initializing walker tests");
-        GenomeAnalysisEngine.resetRandomGenerator();
-    }
-
-    @AfterSuite
-    public void finalizeWalkerTests() {
-        logger.debug("Finalizing walker tests");
-        md5DB.close();
-    }
-
-    public static MD5DB getMd5DB() {
-        return md5DB;
-    }
-
-    public void validateOutputBCFIfPossible(final String name, final File resultFile) {
-        final File bcfFile = BCF2Utils.shadowBCF(resultFile);
-        if ( bcfFile != null && bcfFile.exists() ) {
-            logger.warn("Checking shadow BCF output file " + bcfFile + " against VCF file " + resultFile);
-            try {
-                assertVCFandBCFFilesAreTheSame(resultFile, bcfFile);
-                logger.warn("  Shadow BCF PASSED!");
-            } catch ( Exception e ) {
-                Assert.fail("Exception received reading shadow BCFFile " + bcfFile + " for test " + name, e);
-            }
-        }
-    }
-
-    public void validateOutputIndex(final String name, final File resultFile) {
-        if ( !ENABLE_ON_THE_FLY_CHECK_FOR_VCF_INDEX )
-            return;
-
-        File indexFile = Tribble.indexFile(resultFile);
-        //System.out.println("Putative index file is " + indexFile);
-        if ( indexFile.exists() ) {
-            if ( resultFile.getAbsolutePath().contains(".vcf") ) {
-                // todo -- currently we only understand VCF files! Blow up since we can't test them
-                throw new GATKException("Found an index created for file " + resultFile + " but we can only validate VCF files.  Extend this code!");
-            }
-
-            System.out.println("Verifying on-the-fly index " + indexFile + " for test " + name + " using file " + resultFile);
-            Index indexFromOutputFile = IndexFactory.createDynamicIndex(resultFile, new VCFCodec());
-            Index dynamicIndex = IndexFactory.loadIndex(indexFile.getAbsolutePath());
-
-            if ( ! indexFromOutputFile.equalsIgnoreProperties(dynamicIndex) ) {
-                Assert.fail(String.format("Index on disk from indexing on the fly not equal to the index created after the run completed.  FileIndex %s vs. on-the-fly %s%n",
-                        indexFromOutputFile.getProperties(),
-                        dynamicIndex.getProperties()));
-            }
-        }
-    }
-
-    public List<String> assertMatchingMD5s(final String testName, final String testClassName, List<File> resultFiles, List<String> expectedMD5s) {
-        List<String> md5s = new ArrayList<String>();
-        List<MD5DB.MD5Match> fails = new ArrayList<MD5DB.MD5Match>();
-
-        for (int i = 0; i < resultFiles.size(); i++) {
-            MD5DB.MD5Match result = getMd5DB().testFileMD5(testName, testClassName, resultFiles.get(i), expectedMD5s.get(i), parameterize());
-            validateOutputBCFIfPossible(testName, resultFiles.get(i));
-            if ( ! result.failed ) {
-                validateOutputIndex(testName, resultFiles.get(i));
-                md5s.add(result.expectedMD5);
-            } else {
-                fails.add(result);
-            }
-        }
-
-        if ( ! fails.isEmpty() ) {
-            List<String> actuals = new ArrayList<String>();
-            List<String> expecteds = new ArrayList<String>();
-            List<String> diffEngineOutputs = new ArrayList<String>();
-
-            for ( final MD5DB.MD5Match fail : fails ) {
-                actuals.add(fail.actualMD5);
-                expecteds.add(fail.expectedMD5);
-                diffEngineOutputs.add(fail.diffEngineOutput);
-                logger.warn("Fail: " + fail.failMessage);
-            }
-
-            final MD5Mismatch failure = new MD5Mismatch(actuals, expecteds, diffEngineOutputs);
-            Assert.fail(failure.toString());
-        }
-
-        return md5s;
-    }
-
-    public String buildCommandLine(String... arguments) {
-        String cmdline = "";
-
-        for ( int argIndex = 0; argIndex < arguments.length; argIndex++ ) {
-            cmdline += arguments[argIndex];
-
-            if (argIndex < arguments.length - 1) {
-                cmdline += " ";
-            }
-        }
-
-        return cmdline;
-    }
-
-    public class WalkerTestSpec {
-        // Arguments implicitly included in all Walker command lines, unless explicitly
-        // disabled using the disableImplicitArgs() method below.
-        String args = "";
-        int nOutputFiles = -1;
-        List<String> md5s = null;
-        List<String> exts = null;
-        Class expectedException = null;
-        boolean includeImplicitArgs = true;
-        boolean includeShadowBCF = true;
-
-        // Name of the test class that created this test case
-        private Class testClass;
-
-        // the default output path for the integration test
-        private File outputFileLocation = null;
-
-        protected Map<String, File> auxillaryFiles = new HashMap<String, File>();
-
-        public WalkerTestSpec(String args, List<String> md5s) {
-            this(args, -1, md5s);
-        }
-
-        public WalkerTestSpec(String args, int nOutputFiles, List<String> md5s) {
-            this.args = args;
-            this.nOutputFiles = md5s.size();
-            this.md5s = md5s;
-            this.testClass = getCallingTestClass();
-        }
-
-        public WalkerTestSpec(String args, List<String> exts, List<String> md5s) {
-            this(args, -1, exts, md5s);
-        }
-
-        public WalkerTestSpec(String args, int nOutputFiles, List<String> exts, List<String> md5s) {
-            this.args = args;
-            this.nOutputFiles = md5s.size();
-            this.md5s = md5s;
-            this.exts = exts;
-            this.testClass = getCallingTestClass();
-        }
-
-        // @Test(expectedExceptions) doesn't work in integration tests, so use this instead
-        public WalkerTestSpec(String args, int nOutputFiles, Class expectedException) {
-            this.args = args;
-            this.nOutputFiles = nOutputFiles;
-            this.expectedException = expectedException;
-            this.testClass = getCallingTestClass();
-        }
-
-        private Class getCallingTestClass() {
-            return JVMUtils.getCallingClass(getClass());
-        }
-
-        public String getTestClassName() {
-            return testClass.getSimpleName();
-        }
-
-        public String getArgsWithImplicitArgs() {
-            String args = this.args;
-            if ( includeImplicitArgs ) {
-                args = args + (ENABLE_PHONE_HOME_FOR_TESTS ?
-                        String.format(" -et %s ", GATKRunReport.PhoneHomeOption.AWS) :
-                        String.format(" -et %s -K %s ", GATKRunReport.PhoneHomeOption.NO_ET, gatkKeyFile));
-                if ( includeShadowBCF && GENERATE_SHADOW_BCF )
-                    args = args + " --generateShadowBCF ";
-                if ( ! ENABLE_AUTO_INDEX_CREATION_AND_LOCKING_FOR_TESTS )
-                    args = args + " --disable_auto_index_creation_and_locking_when_reading_rods ";
-            }
-
-            return args;
-        }
-
-        /**
-         * In the case where the input VCF files are malformed and cannot be fixed
-         * this function tells the engine to not try to generate a shadow BCF
-         * which will ultimately blow up...
-         */
-        public void disableShadowBCF() { this.includeShadowBCF = false; }
-        public void setOutputFileLocation(File outputFileLocation) {
-            this.outputFileLocation = outputFileLocation;
-        }        
-
-        protected File getOutputFileLocation() {
-            return outputFileLocation;
-        }
-        
-        public boolean expectsException() {
-            return expectedException != null;
-        }
-
-        public Class getExpectedException() {
-            if ( ! expectsException() ) throw new ReviewedGATKException("Tried to get expection for walker test that doesn't expect one");
-            return expectedException;
-        }
-
-        public void addAuxFile(String expectededMD5sum, File outputfile) {
-            auxillaryFiles.put(expectededMD5sum, outputfile);
-        }
-
-        public void disableImplicitArgs() {
-            includeImplicitArgs = false;
-        }
-    }
-
-    protected boolean parameterize() {
-        return false;
-    }
-
-    public enum ParallelTestType {
-        TREE_REDUCIBLE,
-        NANO_SCHEDULED,
-        BOTH
-    }
-
-    protected Pair<List<File>, List<String>> executeTestParallel(final String name, WalkerTestSpec spec, ParallelTestType testType) {
-        final List<Integer> ntThreads  = testType == ParallelTestType.TREE_REDUCIBLE || testType == ParallelTestType.BOTH ? Arrays.asList(1, 4) : Collections.<Integer>emptyList();
-        final List<Integer> cntThreads = testType == ParallelTestType.NANO_SCHEDULED || testType == ParallelTestType.BOTH ? Arrays.asList(1, 4) : Collections.<Integer>emptyList();
-
-        return executeTest(name, spec, ntThreads, cntThreads);
-    }
-
-    protected Pair<List<File>, List<String>> executeTestParallel(final String name, WalkerTestSpec spec) {
-        return executeTestParallel(name, spec, ParallelTestType.TREE_REDUCIBLE);
-    }
-
-    protected Pair<List<File>, List<String>> executeTest(final String name, WalkerTestSpec spec, List<Integer> ntThreads, List<Integer> cpuThreads) {
-        String originalArgs = spec.args;
-        Pair<List<File>, List<String>> results = null;
-
-        boolean ran1 = false;
-        for ( int nt : ntThreads ) {
-            String extra = nt == 1 ? "" : (" -nt " + nt);
-            ran1 = ran1 || nt == 1;
-            spec.args = originalArgs + extra;
-            results = executeTest(name + "-nt-" + nt, spec);
-        }
-
-        for ( int nct : cpuThreads ) {
-            if ( nct != 1 ) {
-                String extra = " -nct " + nct;
-                spec.args = originalArgs + extra;
-                results = executeTest(name + "-cnt-" + nct, spec);
-            }
-        }
-
-        return results;
-    }
-
-    protected Pair<List<File>, List<String>> executeTest(final String name, WalkerTestSpec spec) {
-        List<File> tmpFiles = new ArrayList<File>();
-        for (int i = 0; i < spec.nOutputFiles; i++) {
-            String ext = spec.exts == null ? ".tmp" : "." + spec.exts.get(i);
-            File fl = createTempFile(String.format("walktest.tmp_param.%d", i), ext);
-
-            // Cleanup any potential shadow BCFs on exit too, if we're generating them
-            if ( spec.includeShadowBCF && GENERATE_SHADOW_BCF ) {
-                final File potentalShadowBCFFile = BCF2Utils.shadowBCF(fl);
-                potentalShadowBCFFile.deleteOnExit();
-                new File(potentalShadowBCFFile.getAbsolutePath() + Tribble.STANDARD_INDEX_EXTENSION).deleteOnExit();
-            }
-
-            tmpFiles.add(fl);
-        }
-
-        final String args = String.format(spec.getArgsWithImplicitArgs(), tmpFiles.toArray());
-        System.out.println(Utils.dupString('-', 80));
-
-        if ( spec.expectsException() ) {
-            // this branch handles the case were we are testing that a walker will fail as expected
-            return executeTest(name, spec.getTestClassName(), spec.getOutputFileLocation(), null, tmpFiles, args, spec.getExpectedException());
-        } else {
-            List<String> md5s = new LinkedList<String>();
-            md5s.addAll(spec.md5s);
-
-            // check to see if they included any auxillary files, if so add them to the list and set them to be deleted on exit
-            for (String md5 : spec.auxillaryFiles.keySet()) {
-                md5s.add(md5);
-                final File auxFile = spec.auxillaryFiles.get(md5);
-                auxFile.deleteOnExit();
-                tmpFiles.add(auxFile);
-            }
-            return executeTest(name, spec.getTestClassName(), spec.getOutputFileLocation(), md5s, tmpFiles, args, null);
-        }
-    }
-
-    private void qcMD5s(String name, List<String> md5s) {
-        final String exampleMD5 = "709a1f482cce68992c637da3cff824a8";
-        for (String md5 : md5s) {
-            if ( md5 == null )
-                throw new IllegalArgumentException("Null MD5 found in test " + name);
-            if ( md5.equals("") ) // ok
-                continue;
-            if ( ! StringUtils.isAlphanumeric(md5) )
-                throw new IllegalArgumentException("MD5 contains non-alphanumeric characters test " + name + " md5=" + md5);
-            if ( md5.length() != exampleMD5.length() )
-                throw new IllegalArgumentException("Non-empty MD5 of unexpected number of characters test " + name + " md5=" + md5);
-        }
-    }
-
-
-    /**
-     * execute the test, given the following:
-     * @param testName     the name of the test
-     * @param testClassName the name of the class that contains the test
-     * @param md5s     the list of md5s
-     * @param tmpFiles the temp file corresponding to the md5 list
-     * @param args     the argument list
-     * @param expectedException the expected exception or null
-     * @return a pair of file and string lists
-     */
-    private Pair<List<File>, List<String>> executeTest(String testName, String testClassName, File outputFileLocation, List<String> md5s, List<File> tmpFiles, String args, Class expectedException) {
-        if ( md5s != null ) qcMD5s(testName, md5s);
-
-        if (outputFileLocation != null)
-            args += " -o " + outputFileLocation.getAbsolutePath();
-        executeTest(testName, testClassName, args, expectedException);
-
-        if ( expectedException != null ) {
-            return null;
-        } else {
-            // we need to check MD5s
-            return new Pair<List<File>, List<String>>(tmpFiles, assertMatchingMD5s(testName, testClassName, tmpFiles, md5s));
-        }
-    }
-    
-    /**
-     * execute the test, given the following:
-     * @param testName      the name of the test
-     * @param testClassName the name of the class that contains the test
-     * @param args          the argument list
-     * @param expectedException the expected exception or null
-     */
-    private void executeTest(String testName, String testClassName, String args, Class expectedException) {
-        CommandLineGATK instance = new CommandLineGATK();
-        String[] command = Utils.escapeExpressions(args);
-        // run the executable
-        boolean gotAnException = false;
-        try {
-            final String now = new SimpleDateFormat("HH:mm:ss").format(new Date());
-            final String cmdline = Utils.join(" ",command);
-            System.out.println(String.format("[%s] Executing test %s:%s with GATK arguments: %s", now, testClassName, testName, cmdline));
-            // also write the command line to the HTML log for convenient follow-up
-            // do the replaceAll so paths become relative to the current
-            BaseTest.log(cmdline.replaceAll(publicTestDirRoot, "").replaceAll(privateTestDirRoot, ""));
-            CommandLineExecutable.start(instance, command);
-        } catch (Exception e) {
-            gotAnException = true;
-            if ( expectedException != null ) {
-                // we expect an exception
-                //System.out.println(String.format("Wanted exception %s, saw %s", expectedException, e.getClass()));
-                if ( expectedException.isInstance(e) ) {
-                    // it's the type we expected
-                    //System.out.println(String.format("  => %s PASSED", name));
-                } else {
-                    final String message = String.format("Test %s:%s expected exception %s but instead got %s with error message %s",
-                            testClassName, testName, expectedException, e.getClass(), e.getMessage());
-                    if ( e.getCause() != null ) {
-                        final ByteArrayOutputStream baos = new ByteArrayOutputStream();
-                        final PrintStream ps = new PrintStream(baos);
-                        e.getCause().printStackTrace(ps);
-                        BaseTest.log(message);
-                        BaseTest.log(baos.toString());
-                    }
-                    Assert.fail(message);
-                }
-            } else {
-                // we didn't expect an exception but we got one :-(
-                throw new RuntimeException(e);
-            }
-        }
-
-        // catch failures from the integration test
-        if ( expectedException != null ) {
-            if ( ! gotAnException )
-                // we expected an exception but didn't see it
-                Assert.fail(String.format("Test %s:%s expected exception %s but none was thrown", testClassName, testName, expectedException.toString()));
-        } else {
-            if ( CommandLineExecutable.result != 0) {
-                throw new RuntimeException("Error running the GATK with arguments: " + args);
-            }
-        }
-    }
-
-
-    protected File createTempFileFromBase(final String name) {
-        File fl = new File(name);
-        fl.deleteOnExit();
-        return fl;
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/CatVariantsIntegrationTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/CatVariantsIntegrationTest.java
index f1b8d6e..9e6ad82 100644
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/CatVariantsIntegrationTest.java
+++ b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/CatVariantsIntegrationTest.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -24,21 +24,31 @@
 */
 
 package org.broadinstitute.gatk.tools;
-
-import org.apache.commons.lang.StringUtils;
+import htsjdk.tribble.index.Index;
+import htsjdk.tribble.index.IndexFactory;
+import htsjdk.variant.vcf.VCFCodec;
 import htsjdk.tribble.AbstractFeatureReader;
+import org.apache.commons.lang.StringUtils;
+import org.broadinstitute.gatk.engine.GATKVCFUtils;
 import org.broadinstitute.gatk.utils.BaseTest;
 import org.broadinstitute.gatk.utils.MD5DB;
 import org.broadinstitute.gatk.utils.MD5Mismatch;
+import org.broadinstitute.gatk.utils.Utils;
 import org.broadinstitute.gatk.utils.runtime.ProcessController;
 import org.broadinstitute.gatk.utils.runtime.ProcessSettings;
 import org.broadinstitute.gatk.utils.runtime.RuntimeUtils;
+import org.broadinstitute.gatk.utils.variant.GATKVCFIndexType;
 import org.testng.Assert;
+import org.testng.TestException;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
 import java.io.File;
 import java.io.IOException;
+import java.util.ArrayList;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Set;
 
 public class CatVariantsIntegrationTest {
     private final MD5DB md5db = new MD5DB();
@@ -47,6 +57,8 @@ public class CatVariantsIntegrationTest {
     private final File CatVariantsVcf2 = new File(CatVariantsDir, "CatVariantsTest2.vcf");
     private final File CatVariantsBcf1 = new File(CatVariantsDir, "CatVariantsTest1.bcf");
     private final File CatVariantsBcf2 = new File(CatVariantsDir, "CatVariantsTest2.bcf");
+    private final File CatVariantsVcf3 = new File(CatVariantsDir, "CatVariantsTest3.vcf");
+    private final File CatVariantsVcf4 = new File(CatVariantsDir, "CatVariantsTest4.vcf");
 
     private class CatVariantsTestProvider extends BaseTest.TestDataProvider {
         private final File file1;
@@ -64,13 +76,13 @@ public class CatVariantsIntegrationTest {
         }
 
         public final String getCmdLine() {
-            return String.format("java -cp %s %s -R %s -V %s -V %s -out %s",
+            return String.format("java -cp \"%s\" %s -R %s -V %s -V %s -out %s",
                     StringUtils.join(RuntimeUtils.getAbsoluteClassPaths(), File.pathSeparatorChar),
                     CatVariants.class.getCanonicalName(), BaseTest.b37KGReference, file1, file2, outputFile);
         }
 
         public String toString() {
-            return "CatVariantsTestProvider " + outputFile;
+            return String.format("CatVariantsTestProvider %s + %s -> %s", file1.getName(), file2.getName(), outputFile.getName());
         }
     }
 
@@ -79,20 +91,25 @@ public class CatVariantsIntegrationTest {
         final File catVariantsTempList1 = BaseTest.createTempListFile("CatVariantsTest1", CatVariantsVcf1.getAbsolutePath());
         final File catVariantsTempList2 = BaseTest.createTempListFile("CatVariantsTest2", CatVariantsVcf2.getAbsolutePath());
 
-        new CatVariantsTestProvider(CatVariantsVcf1, CatVariantsVcf2, BaseTest.createTempFile("CatVariantsTest", ".vcf"), "d0d81eb7fd3905256c4ac7c0fc480094");
-        new CatVariantsTestProvider(CatVariantsBcf1, CatVariantsBcf2, BaseTest.createTempFile("CatVariantsTest", ".bcf"), "6a57fcbbf3cae490896d13a288670d83");
+        new CatVariantsTestProvider(CatVariantsVcf1, CatVariantsVcf2, BaseTest.createTempFile("CatVariantsTest", ".vcf"), "c055705e0606f4fe89d339d416c182e1");
+        new CatVariantsTestProvider(CatVariantsBcf1, CatVariantsBcf2, BaseTest.createTempFile("CatVariantsTest", ".bcf"), "2a82e959b3b07b461d64bd5ed7298aa3");
 
-        for (String extension : AbstractFeatureReader.BLOCK_COMPRESSED_EXTENSIONS) {
-            final File file1 = new File(CatVariantsDir, "CatVariantsTest1.vcf" + extension);
-            final File file2 = new File(CatVariantsDir, "CatVariantsTest2.vcf" + extension);
-            final File outputFile = BaseTest.createTempFile("CatVariantsTest", ".vcf" + extension);
-            new CatVariantsTestProvider(file1, file2, outputFile, "33f728ac5c70ce2994f3619a27f47088");
+        for (String extension1 : AbstractFeatureReader.BLOCK_COMPRESSED_EXTENSIONS) {
+            for (String extension2 : AbstractFeatureReader.BLOCK_COMPRESSED_EXTENSIONS) {
+                final File file1 = new File(CatVariantsDir, "CatVariantsTest1.vcf" + extension1);
+                final File file2 = new File(CatVariantsDir, "CatVariantsTest2.vcf" + extension2);
+                new CatVariantsTestProvider(file1, file2, BaseTest.createTempFile("CatVariantsTest.", ".vcf"), "c055705e0606f4fe89d339d416c182e1");
+                new CatVariantsTestProvider(file1, file2, BaseTest.createTempFile("CatVariantsTest.", ".bcf"), "2a82e959b3b07b461d64bd5ed7298aa3");
+                new CatVariantsTestProvider(file1, file2, BaseTest.createTempFile("CatVariantsTest.", ".vcf" + extension1), "3beb2c58fb795fcdc485de9868eda576");
+            }
+            new CatVariantsTestProvider(CatVariantsVcf1, CatVariantsVcf2, BaseTest.createTempFile("CatVariantsTest.", ".vcf" + extension1), "3beb2c58fb795fcdc485de9868eda576");
+            new CatVariantsTestProvider(CatVariantsBcf1, CatVariantsBcf2, BaseTest.createTempFile("CatVariantsTest.", ".vcf" + extension1), "b9f31b6a00226c58181c19d421503693");
         }
 
         //Test list parsing functionality
-        new CatVariantsTestProvider(catVariantsTempList1, CatVariantsVcf2, BaseTest.createTempFile("CatVariantsTest", ".vcf"), "d0d81eb7fd3905256c4ac7c0fc480094");
-        new CatVariantsTestProvider(CatVariantsVcf1, catVariantsTempList2, BaseTest.createTempFile("CatVariantsTest", ".vcf"), "d0d81eb7fd3905256c4ac7c0fc480094");
-        new CatVariantsTestProvider(catVariantsTempList1, catVariantsTempList2, BaseTest.createTempFile("CatVariantsTest", ".vcf"), "d0d81eb7fd3905256c4ac7c0fc480094");
+        new CatVariantsTestProvider(catVariantsTempList1, CatVariantsVcf2, BaseTest.createTempFile("CatVariantsTest.", ".vcf"), "c055705e0606f4fe89d339d416c182e1");
+        new CatVariantsTestProvider(CatVariantsVcf1, catVariantsTempList2, BaseTest.createTempFile("CatVariantsTest.", ".vcf"), "c055705e0606f4fe89d339d416c182e1");
+        new CatVariantsTestProvider(catVariantsTempList1, catVariantsTempList2, BaseTest.createTempFile("CatVariantsTest.", ".vcf"), "c055705e0606f4fe89d339d416c182e1");
 
         return CatVariantsTestProvider.getTests(CatVariantsTestProvider.class);
     }
@@ -101,7 +118,7 @@ public class CatVariantsIntegrationTest {
     public void testExtensions(final CatVariantsTestProvider cfg) throws IOException {
 
         ProcessController pc = ProcessController.getThreadLocal();
-        ProcessSettings ps = new ProcessSettings(cfg.getCmdLine().split("\\s+"));
+        ProcessSettings ps = new ProcessSettings(Utils.escapeExpressions(cfg.getCmdLine()));
         pc.execAndCheck(ps);
 
         MD5DB.MD5Match result = md5db.testFileMD5("testExtensions", "CatVariantsTestProvider", cfg.outputFile, cfg.md5, false);
@@ -111,35 +128,161 @@ public class CatVariantsIntegrationTest {
         }
     }
 
-    @Test(expectedExceptions = IOException.class)
-    public void testMismatchedExtensions1() throws IOException {
+    @DataProvider(name = "SortOrderTest")
+    public Object[][] makeSortOrderTestProvider() {
+        new CatVariantsTestProvider(CatVariantsVcf3, CatVariantsVcf4, BaseTest.createTempFile("CatVariantsSortOrderTest", ".vcf"), "fb0b4ebe98ca23862b45fcd672fbfc3e");
+
+        return CatVariantsTestProvider.getTests(CatVariantsTestProvider.class);
+    }
+
+    @Test(dataProvider = "SortOrderTest")
+    public void testSortOrder(final CatVariantsTestProvider cfg) throws IOException {
+
+        ProcessController pc = ProcessController.getThreadLocal();
+        ProcessSettings ps = new ProcessSettings(Utils.escapeExpressions(cfg.getCmdLine()));
+        pc.execAndCheck(ps);
+
+        MD5DB.MD5Match result = md5db.testFileMD5("testSortOrder", "CatVariantsTestProvider", cfg.outputFile, cfg.md5, false);
+        if(result.failed) {
+            final MD5Mismatch failure = new MD5Mismatch(result.actualMD5, result.expectedMD5, result.diffEngineOutput);
+            Assert.fail(failure.toString());
+        }
+    }
+
+    @DataProvider(name = "MismatchedExtensionsTest")
+    public Object[][] makeMismatchedExtensionsTestProvider() {
+        return new Object[][]{
+                {".vcf", ".vcf.gz"},
+                {".vcf.gz", ".vcf"},
+                {".bcf", ".vcf.gz"},
+                {".vcf.gz", ".bcf"},
+                {".vcf", ".bcf"},
+                {".bcf", ".vcf"}
+        };
+    }
+
+    @Test(dataProvider = "MismatchedExtensionsTest", expectedExceptions = IOException.class)
+    public void testMismatchedExtensions1(final String extension1, final String extension2) throws IOException {
+        String cmdLine = String.format("java -cp \"%s\" %s -R %s -V %s -V %s -out %s",
+                StringUtils.join(RuntimeUtils.getAbsoluteClassPaths(), File.pathSeparatorChar),
+                CatVariants.class.getCanonicalName(),
+                BaseTest.b37KGReference,
+                new File(CatVariantsDir, "CatVariantsTest1" + extension1),
+                new File(CatVariantsDir, "CatVariantsTest2" + extension2),
+                BaseTest.createTempFile("CatVariantsTest", ".bcf"));
+
+        ProcessController pc = ProcessController.getThreadLocal();
+        ProcessSettings ps = new ProcessSettings(Utils.escapeExpressions(cmdLine));
+        pc.execAndCheck(ps);
+    }
+
+    @Test(dataProvider = "MismatchedExtensionsTest", expectedExceptions = IOException.class)
+    public void testMismatchedExtensions2(final String extension1, final String extension2) throws IOException {
+
+        String cmdLine = String.format("java -cp \"%s\" %s -R %s -V %s -V %s -out %s",
+                StringUtils.join(RuntimeUtils.getAbsoluteClassPaths(), File.pathSeparatorChar),
+                CatVariants.class.getCanonicalName(),
+                BaseTest.b37KGReference,
+                new File(CatVariantsDir, "CatVariantsTest1" + extension1),
+                new File(CatVariantsDir, "CatVariantsTest2" + extension2),
+                BaseTest.createTempFile("CatVariantsTest", ".vcf"));
+
+        ProcessController pc = ProcessController.getThreadLocal();
+        ProcessSettings ps = new ProcessSettings(Utils.escapeExpressions(cmdLine));
+        pc.execAndCheck(ps);
+    }
+
+    //
+    //
+    // IndexCreator tests
+    //
+    //
+
+    private class VCFIndexCreatorTest extends BaseTest.TestDataProvider {
+        private final GATKVCFIndexType type;
+        private final int parameter;
+
+        private VCFIndexCreatorTest(GATKVCFIndexType type, int parameter) {
+            super(VCFIndexCreatorTest.class);
+
+            this.type = type;
+            this.parameter = parameter;
+        }
+
+        public String toString() {
+            return String.format("Index Type %s, Index Parameter %s", type, parameter);
+        }
+
+        public Index getIndex(final File vcfFile) {
+            switch (type) {
+                case DYNAMIC_SEEK : return IndexFactory.createDynamicIndex(vcfFile, new VCFCodec(), IndexFactory.IndexBalanceApproach.FOR_SEEK_TIME);
+                case DYNAMIC_SIZE : return IndexFactory.createDynamicIndex(vcfFile, new VCFCodec(), IndexFactory.IndexBalanceApproach.FOR_SIZE);
+                case LINEAR : return IndexFactory.createLinearIndex(vcfFile, new VCFCodec(), parameter);
+                case INTERVAL : return IndexFactory.createIntervalIndex(vcfFile, new VCFCodec(), parameter);
+                default : throw new TestException("Invalid index type");
+            }
+        }
+    }
+
+    @DataProvider(name = "IndexDataProvider")
+    public Object[][] indexCreatorData() {
+        new VCFIndexCreatorTest(GATKVCFIndexType.DYNAMIC_SEEK, 0);
+        new VCFIndexCreatorTest(GATKVCFIndexType.DYNAMIC_SIZE, 0);
+        new VCFIndexCreatorTest(GATKVCFIndexType.LINEAR, 100);
+        new VCFIndexCreatorTest(GATKVCFIndexType.LINEAR, 10000);
+        new VCFIndexCreatorTest(GATKVCFIndexType.INTERVAL, 20);
+        new VCFIndexCreatorTest(GATKVCFIndexType.INTERVAL, 2000);
 
-        String cmdLine = String.format("java -cp %s %s -R %s -V %s -V %s -out %s",
+        return BaseTest.TestDataProvider.getTests(VCFIndexCreatorTest.class);
+    }
+
+    @Test(dataProvider = "IndexDataProvider")
+    public void testCatVariantsVCFIndexCreation(VCFIndexCreatorTest testSpec) throws IOException{
+
+        String cmdLine = String.format("java -cp \"%s\" %s -R %s -V %s -V %s --variant_index_type %s --variant_index_parameter %s -out %s",
                 StringUtils.join(RuntimeUtils.getAbsoluteClassPaths(), File.pathSeparatorChar),
                 CatVariants.class.getCanonicalName(),
                 BaseTest.b37KGReference,
                 CatVariantsVcf1,
                 CatVariantsVcf2,
-                BaseTest.createTempFile("CatVariantsTest", ".bcf"));
+                testSpec.type,
+                testSpec.parameter,
+                BaseTest.createTempFile("CatVariantsVCFIndexCreationTest", ".vcf"));
 
         ProcessController pc = ProcessController.getThreadLocal();
-        ProcessSettings ps = new ProcessSettings(cmdLine.split("\\s+"));
+        ProcessSettings ps = new ProcessSettings(Utils.escapeExpressions(cmdLine));
         pc.execAndCheck(ps);
     }
 
-    @Test(expectedExceptions = IOException.class)
-    public void testMismatchedExtensions2() throws IOException {
+    @Test()
+    public void testCatVariantsGVCFIndexCreation() throws IOException{
 
-        String cmdLine = String.format("java -cp %s %s -R %s -V %s -V %s -out %s",
+        String cmdLine = String.format("java -cp \"%s\" %s -R %s -V %s -V %s -out %s",
                 StringUtils.join(RuntimeUtils.getAbsoluteClassPaths(), File.pathSeparatorChar),
                 CatVariants.class.getCanonicalName(),
                 BaseTest.b37KGReference,
                 CatVariantsVcf1,
-                CatVariantsBcf2,
-                BaseTest.createTempFile("CatVariantsTest", ".vcf"));
+                CatVariantsVcf2,
+                BaseTest.createTempFile("CatVariantsGVCFIndexCreationTest", "." + GATKVCFUtils.GVCF_EXT));
+
+        ProcessController pc = ProcessController.getThreadLocal();
+        ProcessSettings ps = new ProcessSettings(Utils.escapeExpressions(cmdLine));
+        pc.execAndCheck(ps);
+    }
+
+    @Test()
+    public void testCatVariantsGVCFGzIndexCreation() throws IOException{
+
+        String cmdLine = String.format("java -cp \"%s\" %s -R %s -V %s -V %s -out %s",
+                StringUtils.join(RuntimeUtils.getAbsoluteClassPaths(), File.pathSeparatorChar),
+                CatVariants.class.getCanonicalName(),
+                BaseTest.b37KGReference,
+                CatVariantsVcf1,
+                CatVariantsVcf2,
+                BaseTest.createTempFile("CatVariantsGVCFIndexCreationTest", "." + GATKVCFUtils.GVCF_GZ_EXT));
 
         ProcessController pc = ProcessController.getThreadLocal();
-        ProcessSettings ps = new ProcessSettings(cmdLine.split("\\s+"));
+        ProcessSettings ps = new ProcessSettings(Utils.escapeExpressions(cmdLine));
         pc.execAndCheck(ps);
     }
 }
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/BAQIntegrationTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/BAQIntegrationTest.java
index 68451ef..f26f6a9 100644
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/BAQIntegrationTest.java
+++ b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/BAQIntegrationTest.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -43,13 +43,13 @@ public class BAQIntegrationTest extends WalkerTest {
     // --------------------------------------------------------------------------------------------------------------
     @Test
     public void testPrintReadsNoBAQ() {
-        WalkerTestSpec spec = new WalkerTestSpec( baseCommand +" -baq OFF",  1, Arrays.asList("d1f74074e718c82810512bf40dbc7f72"));
+        WalkerTestSpec spec = new WalkerTestSpec( baseCommand +" -baq OFF",  1, Arrays.asList("e33187ca383c7f5c75c5d547ec79e1cb"));
         executeTest(String.format("testPrintReadsNoBAQ"), spec);
     }
 
     @Test
     public void testPrintReadsRecalBAQ() {
-        WalkerTestSpec spec = new WalkerTestSpec( baseCommand +" -baq RECALCULATE",  1, Arrays.asList("96ec97cf92f1f660bd5244c6b44539b3"));
+        WalkerTestSpec spec = new WalkerTestSpec( baseCommand +" -baq RECALCULATE",  1, Arrays.asList("a25043edfbfa4f21a13cc21064b460df"));
         executeTest(String.format("testPrintReadsRecalBAQ"), spec);
     }
 }
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/CNV/SymbolicAllelesIntegrationTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/CNV/SymbolicAllelesIntegrationTest.java
index 16e47cd..4e11d0d 100644
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/CNV/SymbolicAllelesIntegrationTest.java
+++ b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/CNV/SymbolicAllelesIntegrationTest.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -48,7 +48,7 @@ public class SymbolicAllelesIntegrationTest extends WalkerTest {
         WalkerTestSpec spec = new WalkerTestSpec(
                 baseTestString(b36KGReference, "symbolic_alleles_1.vcf"),
                 1,
-                Arrays.asList("5bafc5a99ea839e686e55de93f91fd5c"));
+                Arrays.asList("a1de53ac340f4ca02367c40680628251"));
         executeTest("Test symbolic alleles", spec);
     }
 
@@ -57,7 +57,7 @@ public class SymbolicAllelesIntegrationTest extends WalkerTest {
         WalkerTestSpec spec = new WalkerTestSpec(
                 baseTestString(b36KGReference, "symbolic_alleles_2.vcf"),
                 1,
-                Arrays.asList("30f66a097987330d42e87da8bcd6be21"));
+                Arrays.asList("c8b294089832bb1a2c450b550318a471"));
         executeTest("Test symbolic alleles mixed in with non-symbolic alleles", spec);
     }
 }
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/annotator/CompressedDataListUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/annotator/CompressedDataListUnitTest.java
new file mode 100644
index 0000000..521f4c2
--- /dev/null
+++ b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/annotator/CompressedDataListUnitTest.java
@@ -0,0 +1,118 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.tools.walkers.annotator;
+
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+public class CompressedDataListUnitTest {
+
+    @Test
+    public void testAddSingly(){
+        CompressedDataList<Integer> intList = new CompressedDataList<>();
+        intList.add(2);
+        intList.add(5);
+        intList.add(5);
+        intList.add(2);
+        intList.add(2);
+        intList.add(3);
+        intList.add(2);
+        intList.add(2);
+        intList.add(4);
+        intList.add(4);
+        intList.add(4);
+        intList.add(2);
+
+        Assert.assertEquals(intList.isEmpty(), false);
+        Assert.assertEquals(intList.toString(), "2,6,3,1,4,3,5,2");
+    }
+
+    @Test
+    public void testAddValueCounts(){
+        CompressedDataList<Integer> intList = new CompressedDataList<>();
+        intList.add(5,2);
+        intList.add(2,6);
+        intList.add(3,1);
+        intList.add(4,3);
+
+        Assert.assertEquals(intList.isEmpty(), false);
+        Assert.assertEquals(intList.toString(), "2,6,3,1,4,3,5,2");
+    }
+
+    @Test
+    public void testAddBothWays(){
+        CompressedDataList<Integer> intList = new CompressedDataList<>();
+        intList.add(2);
+        intList.add(5,2);
+        intList.add(2);
+        intList.add(2);
+        intList.add(3);
+        intList.add(2);
+        intList.add(2);
+        intList.add(4,2);
+        intList.add(2);
+        intList.add(4,1);
+
+        Assert.assertEquals(intList.toString(), "2,6,3,1,4,3,5,2");
+    }
+
+    @Test
+    public void testCombineLists(){
+        CompressedDataList<Integer> intList1 = new CompressedDataList<>();
+        intList1.add(5,2);
+        intList1.add(2,6);
+        intList1.add(3,1);
+        intList1.add(4,3);
+
+        CompressedDataList<Integer> intList2 = new CompressedDataList<>();
+        intList2.add(2,5);
+        intList2.add(6,2);
+        intList2.add(1,3);
+        intList2.add(3,4);
+
+        intList1.add(intList2);
+
+        Assert.assertEquals(intList1.toString(), "1,3,2,11,3,5,4,3,5,2,6,2");
+
+    }
+
+    @Test
+    public void testIterator(){
+        CompressedDataList<Integer> intList1 = new CompressedDataList<>();
+        intList1.add(5,2);
+        intList1.add(2,6);
+        intList1.add(3,1);
+        intList1.add(4,3);
+
+        CompressedDataList<Integer> intList2 = new CompressedDataList<>();
+        for(Integer i : intList1) {
+            intList2.add(i);
+        }
+
+        Assert.assertEquals(intList1.toString(),intList2.toString());
+    }
+
+}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/annotator/SnpEffUtilUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/annotator/SnpEffUtilUnitTest.java
index ec10d7d..15279a0 100644
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/annotator/SnpEffUtilUnitTest.java
+++ b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/annotator/SnpEffUtilUnitTest.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/coverage/CallableLociIntegrationTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/coverage/CallableLociIntegrationTest.java
index b597947..3ac2a3d 100644
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/coverage/CallableLociIntegrationTest.java
+++ b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/coverage/CallableLociIntegrationTest.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/coverage/CompareCallableLociWalkerIntegrationTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/coverage/CompareCallableLociWalkerIntegrationTest.java
index ccfd743..e24564a 100644
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/coverage/CompareCallableLociWalkerIntegrationTest.java
+++ b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/coverage/CompareCallableLociWalkerIntegrationTest.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/coverage/DepthOfCoverageB36IntegrationTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/coverage/DepthOfCoverageB36IntegrationTest.java
index 447515d..1f5e398 100644
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/coverage/DepthOfCoverageB36IntegrationTest.java
+++ b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/coverage/DepthOfCoverageB36IntegrationTest.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/coverage/DepthOfCoverageIntegrationTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/coverage/DepthOfCoverageIntegrationTest.java
index 61785e8..f25ac3a 100644
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/coverage/DepthOfCoverageIntegrationTest.java
+++ b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/coverage/DepthOfCoverageIntegrationTest.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -27,8 +27,12 @@ package org.broadinstitute.gatk.tools.walkers.coverage;
 
 import org.broadinstitute.gatk.engine.walkers.WalkerTest;
 import org.testng.annotations.Test;
+import org.testng.Assert;
+
+import org.apache.commons.io.FileUtils;
 
 import java.io.File;
+import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
@@ -178,4 +182,33 @@ public class DepthOfCoverageIntegrationTest extends WalkerTest {
 
     @Test public void testRefNWithNs() { testRefNHandling(true, "24cd2da2e4323ce6fd76217ba6dc2834"); }
     @Test public void testRefNWithoutNs() { testRefNHandling(false, "4fc0f1a2e968f777d693abcefd4fb7af"); }
+
+
+    @Test
+    public void testIncompatibleArgs() throws IOException {
+        final String[] intervals = {"/humgen/gsa-hpprojects/GATK/data/Validation_Data/fhs_jhs_30_targts.interval_list"};
+        final String[] bams = {"/humgen/gsa-hpprojects/GATK/data/Validation_Data/FHS_indexed_subset.bam"};
+        final String refSeqGeneListFile = privateTestDir + "geneTrackHg18Chr1Interval.refSeq";
+
+        final String logFileName = new String("testIncompatibleArgs.log");
+        final String cmd = buildRootCmd(hg18Reference,new ArrayList<>(Arrays.asList(bams)),new ArrayList<>(Arrays.asList(intervals))) + " --omitIntervalStatistics --calculateCoverageOverGenes " + refSeqGeneListFile + " -log " + logFileName;
+        final WalkerTestSpec spec = new WalkerTestSpec(cmd,0, new ArrayList<String>());
+
+        // output file
+        final File outputFile = createTempFile("DepthOfCoverageIncompatibleArgs",".tmp");
+        spec.setOutputFileLocation(outputFile);
+
+        execute("testIncompatibleArgs",spec);
+
+        // check that only the sample gene summary output file is empty
+        Assert.assertEquals( createTempFileFromBase(outputFile.getAbsolutePath()+".sample_gene_summary").length(), 0 );
+        Assert.assertNotEquals( createTempFileFromBase(outputFile.getAbsolutePath()+".sample_cumulative_coverage_counts").length(), 0 );
+        Assert.assertNotEquals( createTempFileFromBase(outputFile.getAbsolutePath()+".sample_cumulative_coverage_proportions").length(), 0 );
+        Assert.assertNotEquals( createTempFileFromBase(outputFile.getAbsolutePath()+".sample_statistics").length(), 0 );
+        Assert.assertNotEquals( createTempFileFromBase(outputFile.getAbsolutePath()+".sample_summary").length(), 0 );
+
+        // check the log for the warning message
+        File file = new File(logFileName);
+        Assert.assertTrue(FileUtils.readFileToString(file).contains(DepthOfCoverage.incompatibleArgsMsg()));
+    }
 }
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/diffengine/DiffObjectsIntegrationTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/diffengine/DiffObjectsIntegrationTest.java
new file mode 100644
index 0000000..92b3d86
--- /dev/null
+++ b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/diffengine/DiffObjectsIntegrationTest.java
@@ -0,0 +1,76 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.tools.walkers.diffengine;
+
+import org.broadinstitute.gatk.engine.walkers.WalkerTest;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.util.Arrays;
+
+public class DiffObjectsIntegrationTest extends WalkerTest {
+    private class TestParams extends TestDataProvider {
+        public File master, test;
+        public String MD5;
+        public boolean doPairwise;
+
+        private TestParams(String master, String test, final boolean doPairwise, String MD5) {
+            super(TestParams.class);
+            this.master = new File(master);
+            this.test = new File(test);
+            this.MD5 = MD5;
+            this.doPairwise = doPairwise;
+        }
+
+        public String toString() {
+            return String.format("master=%s,test=%s,md5=%s", master, test, MD5);
+        }
+    }
+
+    @DataProvider(name = "data")
+    public Object[][] createData() {
+        new TestParams(privateTestDir + "diffTestMaster.vcf", privateTestDir + "diffTestTest.vcf", true, "71869ddf9665773a842a9def4cc5f3c8");
+        new TestParams(publicTestDir + "exampleBAM.bam", publicTestDir + "exampleBAM.simple.bam", true, "cec7c644c84ef9c96aacaed604d9ec9b");
+        new TestParams(privateTestDir + "diffTestMaster.vcf", privateTestDir + "diffTestTest.vcf", false, "47546e03344103020e49d8037a7e0727");
+        new TestParams(publicTestDir + "exampleBAM.bam", publicTestDir + "exampleBAM.simple.bam", false, "d27b37f7a366c8dacca5cd2590d3c6ce");
+        return TestParams.getTests(TestParams.class);
+    }
+
+    @Test(enabled = true, dataProvider = "data")
+    public void testDiffs(TestParams params) {
+        WalkerTestSpec spec = new WalkerTestSpec(
+                "-T DiffObjects -R " + publicTestDir + "exampleFASTA.fasta "
+                        + " -m " + params.master
+                        + " -t " + params.test
+                        + (params.doPairwise ? " -doPairwise " : "")
+                        + " -o %s",
+                Arrays.asList(params.MD5));
+        executeTest("testDiffObjects:"+params, spec).getFirst();
+    }
+}
+
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/filters/VariantFiltrationUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/filters/VariantFiltrationUnitTest.java
new file mode 100644
index 0000000..4d3223f
--- /dev/null
+++ b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/filters/VariantFiltrationUnitTest.java
@@ -0,0 +1,107 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.tools.walkers.filters;
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.Utils;
+import htsjdk.variant.variantcontext.Allele;
+import htsjdk.variant.variantcontext.VariantContext;
+import htsjdk.variant.variantcontext.VariantContextBuilder;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import org.testng.Assert;
+import org.testng.annotations.*;
+
+public class VariantFiltrationUnitTest extends BaseTest {
+
+    private String chr1 = null;
+    private GenomeLoc genomeLoc = null;
+    private String vcFilter = "testFilter";
+
+    @BeforeTest
+    public void before() {
+        // Create GenomeLoc
+        IndexedFastaSequenceFile fasta = CachingIndexedFastaSequenceFile.checkAndCreate(new File(privateTestDir + "iupacFASTA.fasta"));
+        GenomeLocParser genomeLocParser = new GenomeLocParser(fasta);
+        chr1 = fasta.getSequenceDictionary().getSequence(0).getSequenceName();
+        genomeLoc = genomeLocParser.createGenomeLoc(chr1, 5, 10);
+    }
+
+    @DataProvider(name = "VariantMaskData")
+    public Object[][] DoesMaskCoverVariantTestData() {
+
+        final String maskName = "testMask";
+
+        List<Object[]> tests = Arrays.asList(new Object[]{chr1, 0, 0, maskName, 10, true, true},
+                                             new Object[]{"chr2", 0, 0, maskName, 10, true, false},
+                                             new Object[]{chr1, 0, 0, null, 10, true, true},
+                                             new Object[]{chr1, 0, 0, maskName, 10, true, true},
+                                             new Object[]{chr1, 0, 0, vcFilter, 10, true, false},
+                                             new Object[]{chr1, 0, 0, maskName, 1, true, false},
+                                             new Object[]{chr1, 15, 15, maskName, 10, false, true},
+                                             new Object[]{chr1, 15, 15, maskName, 1, false, false}
+                                            );
+        return tests.toArray(new Object[][]{});
+    }
+
+    /**
+     * Test doesMaskCoverVariant() logic
+     *
+     * @param contig chromosome or contig name
+     * @param start  variant context start
+     * @param stop variant context stop
+     * @param maskName mask or filter name
+     * @param maskExtension bases beyond the mask
+     * @param vcBeforeLoc if true, variant context is before the genome location; if false, the converse is true.
+     * @param expectedValue  return the expected return value from doesMaskCoverVariant()
+     */
+    @Test(dataProvider = "VariantMaskData")
+    public void TestDoesMaskCoverVariant(final String contig, final int start, final int stop, final String maskName, final int maskExtension,
+                                         final boolean vcBeforeLoc, final boolean expectedValue) {
+
+        // Build VariantContext
+        final byte[] allele1 = Utils.dupBytes((byte) 'A', 1);
+        final byte[] allele2 = Utils.dupBytes((byte) 'T', 2);
+
+        final List<Allele> alleles = new ArrayList<Allele>(2);
+        final Allele ref = Allele.create(allele1, true);
+        final Allele alt = Allele.create(allele2, false);
+        alleles.add(ref);
+        alleles.add(alt);
+
+        final VariantContext vc = new VariantContextBuilder("test", contig, start, stop, alleles).filter(vcFilter).make();
+
+        boolean coversVariant = VariantFiltration.doesMaskCoverVariant(vc, genomeLoc, maskName, maskExtension, vcBeforeLoc);
+        Assert.assertEquals(coversVariant, expectedValue);
+    }
+}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/qc/CheckPileupIntegrationTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/qc/CheckPileupIntegrationTest.java
index eae4dec..2648ed8 100644
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/qc/CheckPileupIntegrationTest.java
+++ b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/qc/CheckPileupIntegrationTest.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/qc/CountReadsUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/qc/CountReadsUnitTest.java
index e79edfd..8896460 100644
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/qc/CountReadsUnitTest.java
+++ b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/qc/CountReadsUnitTest.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/qc/DictionaryConsistencyIntegrationTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/qc/DictionaryConsistencyIntegrationTest.java
index 69e623e..f29cb80 100644
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/qc/DictionaryConsistencyIntegrationTest.java
+++ b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/qc/DictionaryConsistencyIntegrationTest.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/qc/FlagStatIntegrationTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/qc/FlagStatIntegrationTest.java
index 7a88681..e86c330 100644
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/qc/FlagStatIntegrationTest.java
+++ b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/qc/FlagStatIntegrationTest.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/qc/PileupWalkerIntegrationTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/qc/PileupWalkerIntegrationTest.java
index 38154bd..a64efaf 100644
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/qc/PileupWalkerIntegrationTest.java
+++ b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/qc/PileupWalkerIntegrationTest.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/readutils/ClipReadsWalkersIntegrationTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/readutils/ClipReadsWalkersIntegrationTest.java
index 29f4621..53acd51 100644
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/readutils/ClipReadsWalkersIntegrationTest.java
+++ b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/readutils/ClipReadsWalkersIntegrationTest.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -47,22 +47,22 @@ public class ClipReadsWalkersIntegrationTest extends WalkerTest {
     }
 
     final static String Q10ClipOutput = "b29c5bc1cb9006ed9306d826a11d444f";
-    @Test public void testQClip0() { testClipper("clipQSum0", "-QT 0", "117a4760b54308f81789c39b1c9de578", "12be03c817d94bab88457e5afe74256a"); }
-    @Test public void testQClip2() { testClipper("clipQSum2", "-QT 2", Q10ClipOutput, "1cfc9da4867765c1e5b5bd6326984634"); }
-    @Test public void testQClip10() { testClipper("clipQSum10", "-QT 10", "b29c5bc1cb9006ed9306d826a11d444f", "1cfc9da4867765c1e5b5bd6326984634"); }
-    @Test public void testQClip20() { testClipper("clipQSum20", "-QT 20", "6c3434dce66ae5c9eeea502f10fb9bee", "0bcfd177fe4be422898eda8e161ebd6c"); }
+    @Test public void testQClip0() { testClipper("clipQSum0", "-QT 0", "117a4760b54308f81789c39b1c9de578", "bcf0d1e13537f764f006ef6d9b401ea7"); }
+    @Test public void testQClip2() { testClipper("clipQSum2", "-QT 2", Q10ClipOutput, "27847d330b962e60650df23b6efc8c3c"); }
+    @Test public void testQClip10() { testClipper("clipQSum10", "-QT 10", "b29c5bc1cb9006ed9306d826a11d444f", "27847d330b962e60650df23b6efc8c3c"); }
+    @Test public void testQClip20() { testClipper("clipQSum20", "-QT 20", "6c3434dce66ae5c9eeea502f10fb9bee", "f89ec5439e88f5a75433150da0069034"); }
 
-    @Test public void testClipRange1() { testClipper("clipRange1", "-CT 1-5", "b5acd753226e25b1e088838c1aab9117", "aed836c97c6383dd80e39a093cc25e08"); }
-    @Test public void testClipRange2() { testClipper("clipRange2", "-CT 1-5,11-15", "be4fcad5b666a5540028b774169cbad7", "5f6e08bd44d6faf5b85cde5d4ec1a36f"); }
+    @Test public void testClipRange1() { testClipper("clipRange1", "-CT 1-5", "b5acd753226e25b1e088838c1aab9117", "987007f6e430cad4cb4a8d1cc1f45d91"); }
+    @Test public void testClipRange2() { testClipper("clipRange2", "-CT 1-5,11-15", "be4fcad5b666a5540028b774169cbad7", "ec4cf54ed50a6baf69dbf98782c19aeb"); }
 
-    @Test public void testClipSeq() { testClipper("clipSeqX", "-X CCCCC", "db199bd06561c9f2122f6ffb07941fbc", "f3cb42759428df80d06e9789f9f9f762"); }
-    @Test public void testClipSeqFile() { testClipper("clipSeqXF", "-XF " + privateTestDir + "seqsToClip.fasta", "d011a3152b31822475afbe0281491f8d", "44658c018378467f809b443d047d5778"); }
+    @Test public void testClipSeq() { testClipper("clipSeqX", "-X CCCCC", "db199bd06561c9f2122f6ffb07941fbc", "a9cf540e4ed2514061248a878e09a09c"); }
+    @Test public void testClipSeqFile() { testClipper("clipSeqXF", "-XF " + privateTestDir + "seqsToClip.fasta", "d011a3152b31822475afbe0281491f8d", "906871df304dd966682e5798d59fc86b"); }
 
-    @Test public void testClipMulti() { testClipper("clipSeqMulti", "-QT 10 -CT 1-5 -XF " + privateTestDir + "seqsToClip.fasta -X CCCCC", "a23187bd9bfb06557f799706d98441de", "bae38f83eb9b63857f5e6e3c6e62f80c"); }
+    @Test public void testClipMulti() { testClipper("clipSeqMulti", "-QT 10 -CT 1-5 -XF " + privateTestDir + "seqsToClip.fasta -X CCCCC", "a23187bd9bfb06557f799706d98441de", "b41995fea04034ca0427c4a71504ef83"); }
 
-    @Test public void testClipNs() { testClipper("testClipNs", "-QT 10 -CR WRITE_NS", Q10ClipOutput, "1cfc9da4867765c1e5b5bd6326984634"); }
-    @Test public void testClipQ0s() { testClipper("testClipQs", "-QT 10 -CR WRITE_Q0S", Q10ClipOutput, "3b32da2eaab7a2d4729fdb486cedbb2f"); }
-    @Test public void testClipSoft() { testClipper("testClipSoft", "-QT 10 -CR SOFTCLIP_BASES", Q10ClipOutput, "9d355b0f6d2076178e92bd7fcd8f5adb"); }
+    @Test public void testClipNs() { testClipper("testClipNs", "-QT 10 -CR WRITE_NS", Q10ClipOutput, "27847d330b962e60650df23b6efc8c3c"); }
+    @Test public void testClipQ0s() { testClipper("testClipQs", "-QT 10 -CR WRITE_Q0S", Q10ClipOutput, "195b8bdfc0186fdca742764aa9b06363"); }
+    @Test public void testClipSoft() { testClipper("testClipSoft", "-QT 10 -CR SOFTCLIP_BASES", Q10ClipOutput, "08d16051be0b3fa3453eb1e6ca48b098"); }
 
     @Test
     public void testUseOriginalQuals() {
@@ -74,7 +74,7 @@ public class ClipReadsWalkersIntegrationTest extends WalkerTest {
                         " -OQ -QT 4 -CR WRITE_Q0S" +
                         " -o %s -os %s",
                 2,
-                Arrays.asList("c83b4e2ade8654a2818fe9d405f07662", "55c01ccc2e84481b22d3632cdb06c8ba"));
+                Arrays.asList("a2819d54b2110150e38511f5a55db91d", "55c01ccc2e84481b22d3632cdb06c8ba"));
         executeTest("clipOriginalQuals", spec);
     }
 }
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/readutils/PrintReadsIntegrationTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/readutils/PrintReadsIntegrationTest.java
index 0213940..cc436e1 100644
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/readutils/PrintReadsIntegrationTest.java
+++ b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/readutils/PrintReadsIntegrationTest.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -32,6 +32,7 @@ import org.testng.annotations.Test;
 
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collections;
 import java.util.List;
 
 public class PrintReadsIntegrationTest extends WalkerTest {
@@ -58,26 +59,27 @@ public class PrintReadsIntegrationTest extends WalkerTest {
     @DataProvider(name = "PRTest")
     public Object[][] createPrintReadsTestData() {
         return new Object[][]{
-                {new PRTest(hg18Reference, new String[]{"HiSeq.1mb.bam"}, "", "fa9c66f66299fe5405512ac36ec9d0f2")},
-                {new PRTest(hg18Reference, new String[]{"HiSeq.1mb.bam"}, " -compress 0", "488eb22abc31c6af7cbb1a3d41da1507")},
-                {new PRTest(hg18Reference, new String[]{"HiSeq.1mb.bam"}, " -simplifyBAM", "1510dc4429f3ed49caf96da41e8ed396")},
-                {new PRTest(hg18Reference, new String[]{"HiSeq.1mb.bam"}, " -n 10", "0e3d1748ad1cb523e3295cab9d09d8fc")},
+                {new PRTest(hg18Reference, new String[]{"HiSeq.1mb.bam"}, "", "0aa3505ba61e05663e629011dd54e423")},
+                {new PRTest(hg18Reference, new String[]{"HiSeq.1mb.bam"}, " -compress 0", "0aec10d19e0dbdfe1d0cbb3eddaf623a")},
+                {new PRTest(hg18Reference, new String[]{"HiSeq.1mb.bam"}, " -simplifyBAM", "c565d9cd4838a313e7bdb30530c0cf71")},
+                {new PRTest(hg18Reference, new String[]{"HiSeq.1mb.bam"}, " -n 10", "917440a38aba707ec0e012168590981a")},
                 // See: GATKBAMIndex.getStartOfLastLinearBin(), BAMScheduler.advance(), IntervalOverlapFilteringIterator.advance()
-                {new PRTest(b37KGReference, new String[]{"unmappedFlagReadsInLastLinearBin.bam"}, "", "d7f23fd77d7dc7cb50d3397f644c6d8a")},
-                {new PRTest(b37KGReference, new String[]{"unmappedFlagReadsInLastLinearBin.bam"}, " -L 1", "c601db95b20248d012b0085347fcb6d1")},
-                {new PRTest(b37KGReference, new String[]{"unmappedFlagReadsInLastLinearBin.bam"}, " -L unmapped", "2d32440e47e8d9d329902fe573ad94ce")},
-                {new PRTest(b37KGReference, new String[]{"unmappedFlagReadsInLastLinearBin.bam"}, " -L 1 -L unmapped", "c601db95b20248d012b0085347fcb6d1")},
-                {new PRTest(b37KGReference, new String[]{"oneReadAllInsertion.bam"}, "",  "349650b6aa9e574b48a2a62627f37c7d")},
-                {new PRTest(b37KGReference, new String[]{"NA12878.1_10mb_2_10mb.bam"}, "",  "0c1cbe67296637a85e80e7a182f828ab")},
+                {new PRTest(b37KGReference, new String[]{"unmappedFlagReadsInLastLinearBin.bam"}, "", "0b58c903f54e8543a8b2ce1439aa769b")},
+                {new PRTest(b37KGReference, new String[]{"unmappedFlagReadsInLastLinearBin.bam"}, " -L 1", "5b1154cc81dba6bcfe76188e4df8d79c")},
+                {new PRTest(b37KGReference, new String[]{"unmappedFlagReadsInLastLinearBin.cram"}, " -L 1:10001 -L GL000192.1:500204", "e9caf8a0e6ec947cdcbdfc48a4292eb5")},
+                {new PRTest(b37KGReference, new String[]{"unmappedFlagReadsInLastLinearBin.bam"}, " -L unmapped", "cbd3d1d50c8674f79033aa8c36aa3cd1")},
+                {new PRTest(b37KGReference, new String[]{"unmappedFlagReadsInLastLinearBin.bam"}, " -L 1 -L unmapped", "5b1154cc81dba6bcfe76188e4df8d79c")},
+                {new PRTest(b37KGReference, new String[]{"oneReadAllInsertion.bam"}, "",  "e212d1799ae797e781b17e630656a9a1")},
+                {new PRTest(b37KGReference, new String[]{"NA12878.1_10mb_2_10mb.bam"}, "",  "0387c61303140d8899fcbfdd3e72ed80")},
                 // Tests for filtering options
                 {new PRTest(b37KGReference, new String[]{"NA12878.1_10mb_2_10mb.bam", "NA20313.highCoverageRegion.bam"},
-                        "",  "b3ae15c8af33fd5badc1a29e089bdaac")},
+                        "",  "ad56da66be0bdab5a8992de9617ae6a5")},
                 {new PRTest(b37KGReference, new String[]{"NA12878.1_10mb_2_10mb.bam", "NA20313.highCoverageRegion.bam"},
-                        " -readGroup SRR359098",  "8bd867b30539524daa7181efd9835a8f")},
+                        " -readGroup SRR359098",  "c3bfe28722a665e666098dbb7048a9f1")},
                 {new PRTest(b37KGReference, new String[]{"NA12878.1_10mb_2_10mb.bam", "NA20313.highCoverageRegion.bam"},
-                        " -readGroup 20FUK.3 -sn NA12878",  "93a7bc1b2b1cd27815ed1666cbb4d0cb")},
+                        " -readGroup 20FUK.3 -sn NA12878",  "8191f8d635d00b1f4d0993b785cc46c5")},
                 {new PRTest(b37KGReference, new String[]{"NA12878.1_10mb_2_10mb.bam", "NA20313.highCoverageRegion.bam"},
-                        " -sn na12878",  "52e99cfcf03ff46285d1ba302f8df964")},
+                        " -sn na12878",  "92a85b4223ec45e114f12a1fe6ebbaeb")},
         };
     }
 
@@ -98,7 +100,7 @@ public class PrintReadsIntegrationTest extends WalkerTest {
                         params.args +
                         " --no_pg_tag" +
                         " -o %s",
-                Arrays.asList(params.md5));
+                Collections.singletonList(params.md5));
         executeTest("testPrintReads-"+params.args, spec).getFirst();
     }
 
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/readutils/PrintReadsLargeScaleTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/readutils/PrintReadsLargeScaleTest.java
index 956d70f..5d1b6c0 100644
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/readutils/PrintReadsLargeScaleTest.java
+++ b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/readutils/PrintReadsLargeScaleTest.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/readutils/PrintReadsUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/readutils/PrintReadsUnitTest.java
index 53b6d42..c0aec43 100644
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/readutils/PrintReadsUnitTest.java
+++ b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/readutils/PrintReadsUnitTest.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -28,8 +28,8 @@ package org.broadinstitute.gatk.tools.walkers.readutils;
 import htsjdk.samtools.SAMFileHeader;
 import htsjdk.samtools.SAMRecord;
 import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.utils.sam.ArtificialReadsTraversal;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.engine.traversals.ArtificialReadsTraversal;
 import org.broadinstitute.gatk.utils.sam.ArtificialGATKSAMFileWriter;
 import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
 import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/readutils/ReadAdaptorTrimmerIntegrationTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/readutils/ReadAdaptorTrimmerIntegrationTest.java
deleted file mode 100644
index 65ca2e7..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/readutils/ReadAdaptorTrimmerIntegrationTest.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.tools.walkers.readutils;
-
-import org.broadinstitute.gatk.engine.walkers.WalkerTest;
-import org.testng.annotations.Test;
-
-import java.util.Arrays;
-import java.util.Collections;
-
-/**
- * Created with IntelliJ IDEA.
- * User: delangel
- * Date: 4/13/13
- * Time: 7:28 AM
- * To change this template use File | Settings | File Templates.
- */
-public class ReadAdaptorTrimmerIntegrationTest extends WalkerTest {
-    private String getBaseCommand(final String BAM) {
-        return  "-T ReadAdaptorTrimmer -R " + b37KGReference +
-                " -I " + privateTestDir + BAM +
-                " -o %s";
-    }
-
-    @Test
-    public void testBasicTrimmer() {
-        WalkerTestSpec spec = new WalkerTestSpec( getBaseCommand("shortInsertTest.bam"),  1, Arrays.asList("1d42414e12b45d44e6f396d97d0f60fe"));
-        executeTest(String.format("testBasicTrimmer"), spec);
-    }
-
-    @Test
-    public void testSkippingBadPairs() {
-        WalkerTestSpec spec = new WalkerTestSpec( getBaseCommand("shortInsertTest2.bam")+" -removeUnpairedReads",  1, Arrays.asList("5e796345502fbfc31134f7736ce68868"));
-        executeTest(String.format("testSkippingBadPairs"), spec);
-    }
-
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/readutils/SplitSamFileIntegrationTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/readutils/SplitSamFileIntegrationTest.java
new file mode 100644
index 0000000..87589a5
--- /dev/null
+++ b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/readutils/SplitSamFileIntegrationTest.java
@@ -0,0 +1,60 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.tools.walkers.readutils;
+
+import org.broadinstitute.gatk.engine.walkers.WalkerTest;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.util.Collections;
+
+public class SplitSamFileIntegrationTest extends WalkerTest {
+
+    @Test
+    public void testSplitSamFile() {
+        final String prefix = "splitsam";
+        WalkerTestSpec spec = new WalkerTestSpec(
+                "-T SplitSamFile" +
+                        " -R " + b37KGReference +
+                        " -I " + privateTestDir+"/CEUTrio.HiSeq.b37.MT.1_50.bam" +
+                        " --outputRoot " + prefix,
+                Collections.<String>emptyList()
+        );
+        addSplitOutput(spec, prefix, "NA12878", "3e28b666fb673be138eca9bd3db9520b");
+        addSplitOutput(spec, prefix, "NA12891", "af01069bc3da4252ce8417a03d11f48b");
+        addSplitOutput(spec, prefix, "NA12892", "cfa1cb6aaca03900160bd5627f4f698b");
+        executeTest("testSplitSamFile", spec);
+    }
+
+    private void addSplitOutput(final WalkerTestSpec spec, final String outputPrefix, final String sample, final String md5) {
+        final File outputFile = new File(outputPrefix + sample + ".bam");
+        spec.addAuxFile(md5, outputFile);
+
+        //The AuxFile mechanism will ensure the bam is deleted, but it doesn't know about indices
+        new File(outputFile.getAbsolutePath() + ".bai").deleteOnExit();
+        new File(outputFile.getAbsolutePath().replaceAll("bam$", ".bai")).deleteOnExit();
+    }
+}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/rnaseq/ASEReadCounterIntegrationTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/rnaseq/ASEReadCounterIntegrationTest.java
new file mode 100644
index 0000000..d656aba
--- /dev/null
+++ b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/rnaseq/ASEReadCounterIntegrationTest.java
@@ -0,0 +1,112 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.tools.walkers.rnaseq;
+
+import org.broadinstitute.gatk.engine.walkers.WalkerTest;
+import org.testng.annotations.Test;
+
+import java.util.Arrays;
+
+/**
+ * Test all different parameters.
+ */
+public class ASEReadCounterIntegrationTest extends WalkerTest {
+
+    @Test
+    public void testASEReadCounterWithHighMQ() {
+        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
+                "-T ASEReadCounter -R " + b37KGReference + " -I " + privateTestDir + "NA12878.RNAseq.bam -sites "+privateTestDir +"NA12878.chr20_2444518_2637800.RNAseq.SYNONYMOUS_CODING.vcf -mmq 60 -o %s -U ALLOW_N_CIGAR_READS", 1,
+                Arrays.asList("eec421405a4a570751821d158734020e"));
+        executeTest("test high mq with no read passing", spec);
+    }
+
+    @Test
+    public void testASEReadCounterWithLowMQ() {
+        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
+                "-T ASEReadCounter -R " + b37KGReference + " -I " + privateTestDir + "NA12878.RNAseq.bam -sites "+privateTestDir +"NA12878.chr20_2444518_2637800.RNAseq.SYNONYMOUS_CODING.vcf -mmq 1 -o %s -U ALLOW_N_CIGAR_READS", 1,
+                Arrays.asList("4f1144be0bb2c4adeae00625afd04cda"));
+        executeTest("test high mq with no read passing", spec);
+    }
+
+    @Test
+    public void testASEReadCounterWithLowMQNoDedup() {
+        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
+                "-T ASEReadCounter -R " + b37KGReference + " -I " + privateTestDir + "NA12878.RNAseq.bam -sites "+privateTestDir +"NA12878.chr20_2444518_2637800.RNAseq.SYNONYMOUS_CODING.vcf -mmq 10 -o %s -U ALLOW_N_CIGAR_READS -drf DuplicateRead", 1,
+                Arrays.asList("226021673310f28d6520d7f3cfe8cb4b"));
+        executeTest("test high mq with no read passing", spec);
+    }
+
+    @Test
+    public void testASEReadCounterWithHighMQLowBQ() {
+        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
+                "-T ASEReadCounter -R " + b37KGReference + " -I " + privateTestDir + "NA12878.RNAseq.bam -sites "+privateTestDir +"NA12878.chr20_2444518_2637800.RNAseq.SYNONYMOUS_CODING.vcf -mmq 60 -mbq 10 -o %s -U ALLOW_N_CIGAR_READS", 1,
+                Arrays.asList("f86bf14ca3a2cc0114d6a11de0cd9448"));
+        executeTest("test high mq with no read passing", spec);
+    }
+
+    @Test
+    public void testASEReadCounterWithCountOverlaps() {
+        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
+                "-T ASEReadCounter -R " + b37KGReference + " -I " + privateTestDir + "NA12878.RNAseq.bam -sites "+privateTestDir +"NA12878.chr20_2444518_2637800.RNAseq.SYNONYMOUS_CODING.vcf -mmq 60 -mbq 10 -o %s -U ALLOW_N_CIGAR_READS -overlap COUNT_FRAGMENTS", 1,
+                Arrays.asList("bfaeaaa8c000eca82f703225bf2257a1"));
+        executeTest("test high mq with no read passing", spec);
+    }
+
+    @Test
+    public void testASEReadCounterWithCountReads() {
+        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
+                "-T ASEReadCounter -R " + b37KGReference + " -I " + privateTestDir + "NA12878.RNAseq.bam -sites "+privateTestDir +"NA12878.chr20_2444518_2637800.RNAseq.SYNONYMOUS_CODING.vcf -mmq 60 -mbq 10 -o %s -U ALLOW_N_CIGAR_READS -overlap COUNT_READS", 1,
+                Arrays.asList("a9b420fd12a9162b31a48842c4081a1f"));
+        executeTest("test high mq with no read passing", spec);
+    }
+
+
+    @Test
+    public void testASEReadCounterMinDepth70() {
+        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
+                "-T ASEReadCounter -R " + b37KGReference + " -I " + privateTestDir + "NA12878.RNAseq.bam -sites "+privateTestDir +"NA12878.chr20_2444518_2637800.RNAseq.SYNONYMOUS_CODING.vcf -mmq 60 -mbq 10 -o %s -U ALLOW_N_CIGAR_READS -minDepth 70", 1,
+                Arrays.asList("79b99bc8a1c1c58ac860f79a11f93086"));
+        executeTest("test high mq with no read passing", spec);
+    }
+
+    @Test
+    public void testASEReadCounterFileFormat() {
+        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
+                "-T ASEReadCounter -R " + b37KGReference + " -I " + privateTestDir + "NA12878.RNAseq.bam -sites "+privateTestDir +"NA12878.chr20_2444518_2637800.RNAseq.SYNONYMOUS_CODING.vcf -mmq 60 -mbq 10 -o %s -U ALLOW_N_CIGAR_READS --outputFormat csv", 1,
+                Arrays.asList("2c7c531018ab353e6874ee2da7980986"));
+        executeTest("test high mq with no read passing", spec);
+    }
+
+
+
+
+
+
+
+
+
+
+}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/variantutils/FilterLiftedVariantsUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/variantutils/FilterLiftedVariantsUnitTest.java
deleted file mode 100644
index 847c8f1..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/variantutils/FilterLiftedVariantsUnitTest.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.tools.walkers.variantutils;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import htsjdk.variant.variantcontext.Allele;
-import htsjdk.variant.variantcontext.VariantContext;
-import htsjdk.variant.variantcontext.VariantContextBuilder;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.util.ArrayList;
-import java.util.List;
-
-
-public class FilterLiftedVariantsUnitTest extends BaseTest {
-
-    @Test
-    public void testIndelAtEndOfContig() {
-
-        final List<Allele> alleles = new ArrayList<>(2);
-        alleles.add(Allele.create("AAAAA", true));
-        alleles.add(Allele.create("A", false));
-        final VariantContext vc = new VariantContextBuilder("test", "1", 10, 14, alleles).make();
-
-        final FilterLiftedVariants filter = new FilterLiftedVariants();
-
-        Assert.assertFalse(filter.filterOrWrite(new byte[]{'A'}, vc));
-    }
-
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/variantutils/SelectVariantsUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/variantutils/SelectVariantsUnitTest.java
index 117f507..df61b7c 100644
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/variantutils/SelectVariantsUnitTest.java
+++ b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/variantutils/SelectVariantsUnitTest.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -41,20 +41,22 @@ import java.util.List;
 
 public class SelectVariantsUnitTest extends BaseTest {
 
-    //////////////////////////////////////////
-    // Tests for maxIndelSize functionality //
-    //////////////////////////////////////////
+    ///////////////////////////////////////////////////////////
+    // Tests for maxIndelSize and minIndelSize functionality //
+    ///////////////////////////////////////////////////////////
 
-    @DataProvider(name = "MaxIndelSize")
-    public Object[][] MaxIndelSizeTestData() {
+    @DataProvider(name = "MaxMinIndelSize")
+    public Object[][] MaxMinIndelSizeTestData() {
 
         List<Object[]> tests = new ArrayList<Object[]>();
 
         for ( final int size : Arrays.asList(1, 3, 10, 100) ) {
             for ( final int otherSize : Arrays.asList(0, 1) ) {
                 for ( final int max : Arrays.asList(0, 1, 5, 50, 100000) ) {
-                    for ( final String op : Arrays.asList("D", "I") ) {
-                        tests.add(new Object[]{size, otherSize, max, op});
+                    for ( final int min : Arrays.asList(0, 1, 5, 50) ) {
+                        for (final String op : Arrays.asList("D", "I")) {
+                            tests.add(new Object[]{size, otherSize, max, min, op});
+                        }
                     }
                 }
             }
@@ -63,8 +65,8 @@ public class SelectVariantsUnitTest extends BaseTest {
         return tests.toArray(new Object[][]{});
     }
 
-    @Test(dataProvider = "MaxIndelSize")
-    public void maxIndelSizeTest(final int size, final int otherSize, final int max, final String op) {
+    @Test(dataProvider = "MaxMinIndelSize")
+    public void maxIndelSizeTest(final int size, final int otherSize, final int max, final int min, final String op) {
 
         final byte[] largerAllele = Utils.dupBytes((byte) 'A', size+1);
         final byte[] smallerAllele = Utils.dupBytes((byte) 'A', 1);
@@ -74,15 +76,11 @@ public class SelectVariantsUnitTest extends BaseTest {
         final Allele alt = Allele.create(op.equals("D") ? smallerAllele : largerAllele, false);
         alleles.add(ref);
         alleles.add(alt);
-        if ( otherSize > 0 && otherSize != size ) {
-            final Allele otherAlt = Allele.create(op.equals("D") ? Utils.dupBytes((byte) 'A', size-otherSize+1) : Utils.dupBytes((byte) 'A', otherSize+1), false);
-            alleles.add(otherAlt);
-        }
 
         final VariantContext vc = new VariantContextBuilder("test", "1", 10, 10 + ref.length() - 1, alleles).make();
 
-        boolean hasTooLargeIndel = SelectVariants.containsIndelLargerThan(vc, max);
-        Assert.assertEquals(hasTooLargeIndel, size > max);
+        boolean hasIndelTooLargeOrSmall = SelectVariants.containsIndelLargerOrSmallerThan(vc, max, min);
+        Assert.assertEquals(hasIndelTooLargeOrSmall, size > max || size < min);
     }
 
 }
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/variantutils/VCFIntegrationTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/variantutils/VCFIntegrationTest.java
new file mode 100644
index 0000000..9e893a0
--- /dev/null
+++ b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/tools/walkers/variantutils/VCFIntegrationTest.java
@@ -0,0 +1,395 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.tools.walkers.variantutils;
+
+import htsjdk.tribble.AbstractFeatureReader;
+import htsjdk.tribble.Tribble;
+import htsjdk.tribble.index.AbstractIndex;
+import htsjdk.tribble.index.Index;
+import htsjdk.tribble.index.IndexFactory;
+import htsjdk.tribble.index.interval.IntervalTreeIndex;
+import htsjdk.tribble.index.linear.LinearIndex;
+import htsjdk.tribble.index.tabix.TabixIndex;
+import htsjdk.tribble.util.TabixUtils;
+import htsjdk.variant.vcf.VCFCodec;
+import org.apache.commons.io.FileUtils;
+import org.broadinstitute.gatk.engine.GATKVCFUtils;
+import org.broadinstitute.gatk.engine.walkers.WalkerTest;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.variant.GATKVCFIndexType;
+import org.testng.Assert;
+import org.testng.TestException;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+
+public class VCFIntegrationTest extends WalkerTest {
+
+    @Test(enabled = true)
+    public void testReadingAndWritingWitHNoChanges() {
+
+        String md5ofInputVCF = "3dc9ac85f2c0541df9bc57b4d81f480b";
+        String testVCF = privateTestDir + "vcf4.1.example.vcf";
+
+        String baseCommand = "-R " + b37KGReference + " --no_cmdline_in_header -o %s ";
+
+        String test1 = baseCommand + "-T VariantAnnotator --variant " + testVCF + " -L " + testVCF;
+        WalkerTestSpec spec1 = new WalkerTestSpec(test1, 1, Arrays.asList(md5ofInputVCF));
+        List<File> result = executeTest("Test Variant Annotator with no changes", spec1).getFirst();
+
+        String test2 = baseCommand + "-T VariantsToVCF --variant " + result.get(0).getAbsolutePath();
+        WalkerTestSpec spec2 = new WalkerTestSpec(test2, 1, Arrays.asList(md5ofInputVCF));
+        executeTest("Test Variants To VCF from new output", spec2);
+    }
+
+    @Test(enabled = true)
+    public void testReadingAndWritingBreakpointAlleles() {
+        String testVCF = privateTestDir + "breakpoint-example.vcf";
+
+        String baseCommand = "-R " + b37KGReference + " --no_cmdline_in_header -o %s ";
+
+        String test1 = baseCommand + "-T SelectVariants -V " + testVCF;
+        WalkerTestSpec spec1 = new WalkerTestSpec(test1, 1, Arrays.asList("b8b18a9a015cba1f3a44af532bf45338"));
+        executeTest("Test reading and writing breakpoint VCF", spec1);
+    }
+
+    @Test(enabled = true)
+    public void testReadingLowerCaseBases() {
+        String testVCF = privateTestDir + "lowercaseBases.vcf";
+
+        String baseCommand = "-R " + b37KGReference + " --no_cmdline_in_header -o %s ";
+
+        String test1 = baseCommand + "-T SelectVariants -V " + testVCF;
+        WalkerTestSpec spec1 = new WalkerTestSpec(test1, 1, Arrays.asList("89daaa81f64e96cf4d9aa0abf9be0b76"));
+        executeTest("Test reading VCF with lower-case bases", spec1);
+    }
+
+    @Test(enabled = true)
+    public void testReadingAndWriting1000GSVs() {
+        String testVCF = privateTestDir + "1000G_SVs.chr1.vcf";
+
+        String baseCommand = "-R " + b37KGReference + " --no_cmdline_in_header -o %s ";
+
+        String test1 = baseCommand + "-T SelectVariants -V " + testVCF;
+        WalkerTestSpec spec1 = new WalkerTestSpec(test1, 1, Arrays.asList("5d91e685c760f7e869cb06596d741116"));
+        executeTest("Test reading and writing 1000G Phase I SVs", spec1);
+    }
+
+    @Test
+    public void testReadingAndWritingSamtools() {
+        String testVCF = privateTestDir + "samtools.vcf";
+
+        String baseCommand = "-R " + b37KGReference + " --no_cmdline_in_header -o %s ";
+
+        String test1 = baseCommand + "-T SelectVariants -V " + testVCF;
+        WalkerTestSpec spec1 = new WalkerTestSpec(test1, 1, Arrays.asList("122340e3dc333d2b4b79c5c0c443a3fe"));
+        executeTest("Test reading and writing samtools vcf", spec1);
+    }
+
+    @Test
+    public void testWritingSamtoolsWExBCFExample() {
+        String testVCF = privateTestDir + "ex2.vcf";
+        String baseCommand = "-R " + b36KGReference + " --no_cmdline_in_header -o %s ";
+        String test1 = baseCommand + "-T SelectVariants -V " + testVCF;
+        WalkerTestSpec spec1 = new WalkerTestSpec(test1, 1, Arrays.asList("db565efb14b2fe5f00a11762751d2476"));
+        executeTest("Test writing samtools WEx BCF example", spec1);
+    }
+
+    @Test(enabled = true)
+    public void testReadingSamtoolsWExBCFExample() {
+        String testVCF = privateTestDir + "ex2.bcf";
+        String baseCommand = "-R " + b36KGReference + " --no_cmdline_in_header -o %s ";
+        String test1 = baseCommand + "-T SelectVariants -V " + testVCF;
+        WalkerTestSpec spec1 = new WalkerTestSpec(test1, 1, Arrays.asList("0ca1a078d4801886ef4abac327df7104"));
+        executeTest("Test reading samtools WEx BCF example", spec1);
+    }
+
+    //
+    //
+    // Tests to ensure that -U LENIENT_VCF_PROCESS
+    //
+    //
+
+    @Test
+    public void testFailingOnVCFWithoutHeaders() {
+        runVCFWithoutHeaders("", "", IllegalStateException.class, false);
+    }
+
+    @Test
+    public void testPassingOnVCFWithoutHeadersWithLenientProcessing() {
+        runVCFWithoutHeaders("-U LENIENT_VCF_PROCESSING", "a8f4be8ad9820286ea13a28a675133f1", null, true);
+    }
+
+    private void runVCFWithoutHeaders(final String moreArgs, final String expectedMD5, final Class expectedException, final boolean disableBCF) {
+        final String testVCF = privateTestDir + "vcfexample2.noHeader.vcf";
+        final String baseCommand = "-R " + b37KGReference
+                + " --no_cmdline_in_header -o %s "
+                + "-T VariantsToVCF -V " + testVCF + " " + moreArgs;
+        WalkerTestSpec spec1 = expectedException != null
+                ? new WalkerTestSpec(baseCommand, 1, expectedException)
+                : new WalkerTestSpec(baseCommand, 1, Arrays.asList(expectedMD5));
+        if ( disableBCF )
+            spec1.disableShadowBCF();
+        executeTest("Test reading VCF without header lines with additional args " + moreArgs, spec1);
+    }
+
+    //
+    //
+    // IndexCreator tests
+    //
+    //
+
+    private class VCFIndexCreatorTest extends TestDataProvider {
+        private final GATKVCFIndexType type;
+        private final int parameter;
+
+        private VCFIndexCreatorTest(GATKVCFIndexType type, int parameter) {
+            super(VCFIndexCreatorTest.class);
+
+            this.type = type;
+            this.parameter = parameter;
+        }
+
+        public String toString() {
+            return String.format("Index Type %s, Index Parameter %s", type, parameter);
+        }
+
+        public Index getIndex(final File vcfFile) {
+            switch (type) {
+                case DYNAMIC_SEEK : return IndexFactory.createDynamicIndex(vcfFile, new VCFCodec(), IndexFactory.IndexBalanceApproach.FOR_SEEK_TIME);
+                case DYNAMIC_SIZE : return IndexFactory.createDynamicIndex(vcfFile, new VCFCodec(), IndexFactory.IndexBalanceApproach.FOR_SIZE);
+                case LINEAR : return IndexFactory.createLinearIndex(vcfFile, new VCFCodec(), parameter);
+                case INTERVAL : return IndexFactory.createIntervalIndex(vcfFile, new VCFCodec(), parameter);
+                default : throw new TestException("Invalid index type");
+            }
+        }
+    }
+
+    @DataProvider(name = "IndexDataProvider")
+    public Object[][] indexCreatorData() {
+        new VCFIndexCreatorTest(GATKVCFIndexType.DYNAMIC_SEEK, 0);
+        new VCFIndexCreatorTest(GATKVCFIndexType.DYNAMIC_SIZE, 0);
+        new VCFIndexCreatorTest(GATKVCFIndexType.LINEAR, 100);
+        new VCFIndexCreatorTest(GATKVCFIndexType.LINEAR, 10000);
+        new VCFIndexCreatorTest(GATKVCFIndexType.INTERVAL, 20);
+        new VCFIndexCreatorTest(GATKVCFIndexType.INTERVAL, 2000);
+
+        return TestDataProvider.getTests(VCFIndexCreatorTest.class);
+    }
+
+    @Test(dataProvider = "IndexDataProvider")
+    public void testVCFIndexCreation(VCFIndexCreatorTest testSpec) throws NoSuchFieldException, IllegalAccessException, IOException {
+
+        final String logFileName = new String("testVCFIndexCreation.log");
+        final String chromosome = "20";
+        final String commandLine = " -T SelectVariants" +
+                " -R " + b37KGReference +
+                " --no_cmdline_in_header" +
+                " -L " + chromosome +
+                " -V " + b37_NA12878_OMNI +
+                " --variant_index_type " + testSpec.type +
+                " --variant_index_parameter " + testSpec.parameter +
+                " -log " + logFileName +
+                " -o %s";
+        final WalkerTestSpec spec = new WalkerTestSpec(commandLine, 1, Arrays.asList(""));
+        spec.disableShadowBCF();
+        final String name = "testVCFIndexCreation: " + testSpec.toString();
+
+        // execute that test and check if the actual and expected indices are the same
+        executeTestAndCheckIndices(name, chromosome, testSpec, spec);
+
+        // check the log for the warning message
+        File file = new File(logFileName);
+        Assert.assertTrue(FileUtils.readFileToString(file).contains(GATKVCFUtils.DEPRECATED_INDEX_ARGS_MSG));
+    }
+
+    @Test
+    public void testVCFIndexCreationNoArgs() throws NoSuchFieldException, IllegalAccessException {
+
+        final String chromosome = "20";
+        final String commandLine = " -T SelectVariants" +
+                " -R " + b37KGReference +
+                " --no_cmdline_in_header" +
+                " -L " + chromosome +
+                " -V " + b37_NA12878_OMNI +
+                " -o %s";
+        final String name = "testVCFIndexCreationNoArgs";
+        VCFIndexCreatorTest testSpec = new VCFIndexCreatorTest(GATKVCFUtils.DEFAULT_INDEX_TYPE, GATKVCFUtils.DEFAULT_INDEX_PARAMETER);
+        final WalkerTestSpec spec = new WalkerTestSpec(commandLine, 1, Arrays.asList(""));
+        spec.disableShadowBCF();
+
+        // execute that test and check if the actual and expected indices are the same
+        executeTestAndCheckIndices(name, chromosome, testSpec, spec);
+    }
+
+    @Test
+    public void testGVCFIndexCreation() throws NoSuchFieldException, IllegalAccessException {
+
+        final String chromosome = "20";
+        final String commandLine = " -T SelectVariants" +
+                " -R " + b37KGReference +
+                " --no_cmdline_in_header" +
+                " -L " + chromosome +
+                " -V " + b37_NA12878_OMNI +
+                " -o %s";
+        final String name = "testGVCFIndexCreation";
+        VCFIndexCreatorTest testSpec = new VCFIndexCreatorTest(GATKVCFUtils.DEFAULT_GVCF_INDEX_TYPE, GATKVCFUtils.DEFAULT_GVCF_INDEX_PARAMETER);
+        final WalkerTestSpec spec = new WalkerTestSpec(commandLine, Arrays.asList(GATKVCFUtils.GVCF_EXT), Arrays.asList(""));
+        spec.disableShadowBCF();
+
+        // execute that test and check if the actual and expected indices are the same
+        executeTestAndCheckIndices(name, chromosome, testSpec, spec);
+    }
+
+    //
+    //
+    // Block-Compressed Tabix Index Tests
+    //
+    //
+
+    private class BlockCompressedIndexCreatorTest extends TestDataProvider {
+        private final String extension;
+
+        private BlockCompressedIndexCreatorTest(String extension) {
+            super(BlockCompressedIndexCreatorTest.class);
+
+            this.extension = extension;
+        }
+
+        public String toString() {
+            return String.format("File extension %s", extension);
+        }
+    }
+
+    @DataProvider(name = "BlockCompressedIndexDataProvider")
+    public Object[][] blockCompressedIndexCreatorData() {
+        for (final String extension : AbstractFeatureReader.BLOCK_COMPRESSED_EXTENSIONS)
+            new BlockCompressedIndexCreatorTest(".vcf" + extension);
+
+        return TestDataProvider.getTests(BlockCompressedIndexCreatorTest.class);
+    }
+
+    @Test(dataProvider = "BlockCompressedIndexDataProvider")
+    public void testBlockCompressedIndexCreation(BlockCompressedIndexCreatorTest testSpec) throws NoSuchFieldException, IllegalAccessException {
+
+        final String commandLine = " -T SelectVariants" +
+                " -R " + b37KGReference +
+                " --no_cmdline_in_header" +
+                " -L 20" +
+                " -V " + b37_NA12878_OMNI;
+        final String name = "testBlockCompressedIndexCreation: " + testSpec.toString();
+
+        File outVCF = createTempFile("testBlockCompressedIndexCreation", testSpec.extension);
+        final WalkerTestSpec spec = new WalkerTestSpec(commandLine, 1, Arrays.asList(""));
+        spec.disableShadowBCF();
+        spec.setOutputFileLocation(outVCF);
+
+        executeTest(name, spec);
+
+        File outTribbleIdx = new File(outVCF.getAbsolutePath() + Tribble.STANDARD_INDEX_EXTENSION);
+        Assert.assertFalse(outTribbleIdx.exists(), "testBlockCompressedIndexCreation: Want Tabix index but Tribble index exists: " + outTribbleIdx);
+
+        File outTabixIdx = new File(outVCF.getAbsolutePath() + TabixUtils.STANDARD_INDEX_EXTENSION);
+        final Index actualIndex = IndexFactory.loadIndex(outTabixIdx.toString());
+        Assert.assertTrue(actualIndex instanceof TabixIndex, "testBlockCompressedIndexCreation: Want Tabix index but index is not Tabix: " + outTabixIdx);
+    }
+
+    //
+    //
+    // Block-Compressed Input Tests
+    //
+    //
+
+    private class BlockCompressedInputTest extends TestDataProvider {
+        private final String extension;
+
+        private BlockCompressedInputTest(String extension) {
+            super(BlockCompressedInputTest.class);
+
+            this.extension = extension;
+        }
+
+        public String toString() {
+            return String.format("File extension %s", extension);
+        }
+    }
+
+    @DataProvider(name = "BlockCompressedInputDataProvider")
+    public Object[][] blockCompressedInputData() {
+        for (final String extension : AbstractFeatureReader.BLOCK_COMPRESSED_EXTENSIONS)
+            new BlockCompressedInputTest(".vcf" + extension);
+
+        return TestDataProvider.getTests(BlockCompressedInputTest.class);
+    }
+
+    @Test(dataProvider = "BlockCompressedInputDataProvider")
+    public void testBlockCompressedInput(BlockCompressedInputTest testSpec) {
+
+        File inputFile = new File(BaseTest.privateTestDir, "block_compressed_input_test" + testSpec.extension);
+        final String commandLine = " -T SelectVariants" +
+                " -R " + b37KGReference +
+                " --no_cmdline_in_header" +
+                " -V " + inputFile +
+                " -o %s ";
+        final String name = "testBlockCompressedInput: " + testSpec.toString();
+
+        final WalkerTestSpec spec = new WalkerTestSpec(commandLine, 1, Arrays.asList("ce9c0bf31ee9452ac4a12a59d5814545"));
+
+        executeTest(name, spec);
+    }
+
+    private void executeTestAndCheckIndices(final String name, final String chr, final VCFIndexCreatorTest testSpec, final WalkerTestSpec walkerTestSpec)
+            throws NoSuchFieldException, IllegalAccessException {
+
+        File outVCF = executeTest(name, walkerTestSpec).first.get(0);
+        File outIdx = new File(outVCF.getAbsolutePath() + Tribble.STANDARD_INDEX_EXTENSION);
+
+        final Index actualIndex = IndexFactory.loadIndex(outIdx.getAbsolutePath());
+        final Index expectedIndex = testSpec.getIndex(outVCF);
+
+        if (testSpec.type.equals("LINEAR"))
+            Assert.assertTrue(actualIndex instanceof LinearIndex, "Index is not a LinearIndex");
+        else if (testSpec.type.equals("INTERVAL"))
+            Assert.assertTrue(actualIndex instanceof IntervalTreeIndex, "Index is not a IntervalTreeIndex");
+        // dynamic indices ultimately resolve to one of LinearIndex or IntervalTreeIndex
+
+        Assert.assertTrue(GATKVCFUtils.equivalentAbstractIndices((AbstractIndex) actualIndex, (AbstractIndex) expectedIndex), "Indices are not equivalent");
+
+        if (actualIndex instanceof LinearIndex && expectedIndex instanceof LinearIndex) {
+            Assert.assertTrue(GATKVCFUtils.equivalentLinearIndices((LinearIndex) actualIndex, (LinearIndex) expectedIndex, chr), "Linear indices are not equivalent");
+        }
+        else if (actualIndex instanceof IntervalTreeIndex && expectedIndex instanceof IntervalTreeIndex) {
+            Assert.assertTrue(GATKVCFUtils.equivalentIntervalIndices((IntervalTreeIndex) actualIndex, (IntervalTreeIndex) expectedIndex, chr), "Interval indices are not equivalent");
+        }
+        else {
+            Assert.fail("Indices are not of the same type");
+        }
+    }
+}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/AutoFormattingTimeUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/AutoFormattingTimeUnitTest.java
deleted file mode 100644
index 22e9517..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/AutoFormattingTimeUnitTest.java
+++ /dev/null
@@ -1,118 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.AutoFormattingTime;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.concurrent.TimeUnit;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-/**
- * UnitTests for the AutoFormatting
- *
- * User: depristo
- * Date: 8/24/12
- * Time: 11:25 AM
- * To change this template use File | Settings | File Templates.
- */
-public class AutoFormattingTimeUnitTest extends BaseTest {
-    @DataProvider(name = "AutoFormattingTimeUnitSelection")
-    public Object[][] makeTimeData() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-        tests.add(new Object[]{TimeUnit.SECONDS.toNanos(10), "s"});
-        tests.add(new Object[]{TimeUnit.MINUTES.toNanos(10), "m"});
-        tests.add(new Object[]{TimeUnit.HOURS.toNanos(10), "h"});
-        tests.add(new Object[]{TimeUnit.DAYS.toNanos(10), "d"});
-        tests.add(new Object[]{TimeUnit.DAYS.toNanos(1000), "w"});
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "AutoFormattingTimeUnitSelection")
-    public void testUnitSelection(final long nano, final String expectedUnit) throws InterruptedException {
-        final AutoFormattingTime time = new AutoFormattingTime(nano);
-        testBasic(time, nano, time.getWidth(), time.getPrecision());
-        Assert.assertTrue(time.toString().endsWith(expectedUnit), "TimeUnit " + time.toString() + " didn't contain expected time unit " + expectedUnit);
-    }
-
-    @Test(dataProvider = "AutoFormattingTimeUnitSelection")
-    public void testSecondsAsDouble(final long nano, final String expectedUnit) throws InterruptedException {
-        final double inSeconds = nano * 1e-9;
-        final long nanoFromSeconds = (long)(inSeconds * 1e9);
-        final AutoFormattingTime time = new AutoFormattingTime(inSeconds);
-        testBasic(time, nanoFromSeconds, time.getWidth(), time.getPrecision());
-    }
-
-    @DataProvider(name = "AutoFormattingTimeWidthAndPrecision")
-    public Object[][] makeTimeWidthAndPrecision() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-        for ( final int width : Arrays.asList(-1, 1, 2, 6, 20) ) {
-            for ( final int precision : Arrays.asList(1, 2) ) {
-                tests.add(new Object[]{100.123456 * 1e9, width, precision});
-                tests.add(new Object[]{0.123456 * 1e9, width, precision});
-            }
-        }
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "AutoFormattingTimeWidthAndPrecision")
-    public void testWidthAndPrecision(final double inSeconds, final int width, final int precision) throws InterruptedException {
-        final AutoFormattingTime time = new AutoFormattingTime(inSeconds, width, precision);
-        final long nanoFromSeconds = (long)(inSeconds * 1e9);
-        testBasic(time, nanoFromSeconds, width, precision);
-        final Matcher match = matchToString(time);
-        match.matches();
-        final String widthString = match.group(1);
-        final String precisionString = match.group(2);
-        if ( width != -1 ) {
-            final int actualWidth = widthString.length() + 1 + precisionString.length();
-            Assert.assertTrue(actualWidth >= width, "width string '" + widthString + "' not >= the expected width " + width);
-        }
-        Assert.assertEquals(precisionString.length(), precision, "precision string '" + precisionString + "' not the expected precision " + precision);
-    }
-
-    private static Matcher matchToString(final AutoFormattingTime time) {
-        Pattern pattern = Pattern.compile("(\\s*\\d*)\\.(\\d*) \\w");
-        return pattern.matcher(time.toString());
-    }
-
-    private static void testBasic(final AutoFormattingTime aft, final long nano, final int expectedWidth, final int expectedPrecision) {
-        Assert.assertEquals(aft.getTimeInNanoSeconds(), nano);
-        assertEqualsDoubleSmart(aft.getTimeInSeconds(), nano * 1e-9, 1e-3, "Time in seconds not within tolerance of nanoSeconds");
-        Assert.assertEquals(aft.getWidth(), expectedWidth);
-        Assert.assertEquals(aft.getPrecision(), expectedPrecision);
-        Assert.assertNotNull(aft.toString(), "TimeUnit toString returned null");
-        final Matcher match = matchToString(aft);
-        Assert.assertTrue(match.matches(), "toString " + aft.toString() + " doesn't match our expected format");
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/BaseTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/BaseTest.java
deleted file mode 100644
index ca57922..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/BaseTest.java
+++ /dev/null
@@ -1,568 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import htsjdk.tribble.Tribble;
-import htsjdk.tribble.readers.LineIterator;
-import htsjdk.tribble.readers.PositionalBufferedStream;
-import htsjdk.tribble.util.TabixUtils;
-import htsjdk.variant.bcf2.BCF2Codec;
-import htsjdk.variant.variantcontext.Genotype;
-import htsjdk.variant.variantcontext.VariantContext;
-import htsjdk.variant.vcf.VCFCodec;
-import htsjdk.variant.vcf.VCFConstants;
-import htsjdk.variant.vcf.VCFHeader;
-import htsjdk.variant.vcf.VCFHeaderLine;
-import org.apache.log4j.AppenderSkeleton;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
-import org.apache.log4j.PatternLayout;
-import org.apache.log4j.spi.LoggingEvent;
-import org.broadinstitute.gatk.utils.collections.Pair;
-import org.broadinstitute.gatk.utils.commandline.CommandLineUtils;
-import org.broadinstitute.gatk.utils.crypt.CryptUtils;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.io.IOUtils;
-import org.broadinstitute.gatk.utils.variant.GATKVCFUtils;
-import org.testng.Assert;
-import org.testng.Reporter;
-import org.testng.SkipException;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.util.*;
-
-/**
- *
- * User: aaron
- * Date: Apr 14, 2009
- * Time: 10:24:30 AM
- *
- * The Broad Institute
- * SOFTWARE COPYRIGHT NOTICE AGREEMENT 
- * This software and its documentation are copyright 2009 by the
- * Broad Institute/Massachusetts Institute of Technology. All rights are reserved.
- *
- * This software is supplied without any warranty or guaranteed support whatsoever. Neither
- * the Broad Institute nor MIT can be responsible for its use, misuse, or functionality.
- *
- */
-
-
-/**
- * @author aaron
- * @version 1.0
- * @date Apr 14, 2009
- * <p/>
- * Class BaseTest
- * <p/>
- * This is the base test class for all of our test cases.  All test cases should extend from this
- * class; it sets up the logger, and resolves the location of directories that we rely on.
- */
- at SuppressWarnings("unchecked")
-public abstract class BaseTest {
-    /** our log, which we want to capture anything from org.broadinstitute.sting */
-    public static final Logger logger = CommandLineUtils.getStingLogger();
-
-    private static final String CURRENT_DIRECTORY = System.getProperty("user.dir");
-    public static final String gatkDirectory = System.getProperty("gatkdir", CURRENT_DIRECTORY) + "/";
-    public static final String baseDirectory = System.getProperty("basedir", CURRENT_DIRECTORY) + "/";
-    public static final String testType = System.getProperty("testType"); // May be null
-    public static final String testTypeSubDirectory = testType == null ? "" : ("/" + testType); // May be empty
-
-    public static final String hg18Reference = "/seq/references/Homo_sapiens_assembly18/v0/Homo_sapiens_assembly18.fasta";
-    public static final String hg19Reference = "/seq/references/Homo_sapiens_assembly19/v1/Homo_sapiens_assembly19.fasta";
-    public static final String b36KGReference = "/humgen/1kg/reference/human_b36_both.fasta";
-    //public static final String b37KGReference = "/Users/depristo/Desktop/broadLocal/localData/human_g1k_v37.fasta";
-    public static final String b37KGReference = "/humgen/1kg/reference/human_g1k_v37.fasta";
-    public static final String b37KGReferenceWithDecoy = "/humgen/gsa-hpprojects/GATK/bundle/current/b37/human_g1k_v37_decoy.fasta";
-    public static final String hg19RefereneWithChrPrefixInChromosomeNames = "/humgen/gsa-hpprojects/GATK/bundle/current/hg19/ucsc.hg19.fasta";
-    public static final String GATKDataLocation = "/humgen/gsa-hpprojects/GATK/data/";
-    public static final String validationDataLocation = GATKDataLocation + "Validation_Data/";
-    public static final String evaluationDataLocation = GATKDataLocation + "Evaluation_Data/";
-    public static final String comparisonDataLocation = GATKDataLocation + "Comparisons/";
-    public static final String annotationDataLocation = GATKDataLocation + "Annotations/";
-
-    public static final String b37GoodBAM = validationDataLocation + "/CEUTrio.HiSeq.b37.chr20.10_11mb.bam";
-    public static final String b37GoodNA12878BAM = validationDataLocation + "/NA12878.HiSeq.WGS.bwa.cleaned.recal.hg19.20.bam";
-    public static final String b37_NA12878_OMNI = validationDataLocation + "/NA12878.omni.vcf";
-
-    public static final String dbsnpDataLocation = GATKDataLocation;
-    public static final String b36dbSNP129 = dbsnpDataLocation + "dbsnp_129_b36.vcf";
-    public static final String b37dbSNP129 = dbsnpDataLocation + "dbsnp_129_b37.vcf";
-    public static final String b37dbSNP132 = dbsnpDataLocation + "dbsnp_132_b37.vcf";
-    public static final String b37dbSNP138 = "/humgen/gsa-hpprojects/GATK/bundle/current/b37/dbsnp_138.b37.vcf";
-    public static final String hg18dbSNP132 = dbsnpDataLocation + "dbsnp_132.hg18.vcf";
-
-    public static final String hapmapDataLocation = comparisonDataLocation + "Validated/HapMap/3.3/";
-    public static final String b37hapmapGenotypes = hapmapDataLocation + "genotypes_r27_nr.b37_fwd.vcf";
-
-    public static final String intervalsLocation = "/seq/references/HybSelOligos/whole_exome_agilent_1.1_refseq_plus_3_boosters/";
-    public static final String hg19Intervals = intervalsLocation + "whole_exome_agilent_1.1_refseq_plus_3_boosters.Homo_sapiens_assembly19.targets.interval_list";
-    public static final String hg19Chr20Intervals = GATKDataLocation + "whole_exome_agilent_1.1_refseq_plus_3_boosters.Homo_sapiens_assembly19.targets.chr20.interval_list";
-
-    public static final boolean REQUIRE_NETWORK_CONNECTION = false;
-    private static final String networkTempDirRoot = "/broad/hptmp/";
-    private static final boolean networkTempDirRootExists = new File(networkTempDirRoot).exists();
-    private static final File networkTempDirFile;
-
-    private static final String privateTestDirRelative = "private/gatk-tools-private/src/test/resources/";
-    public static final String privateTestDir = new File(gatkDirectory, privateTestDirRelative).getAbsolutePath() + "/";
-    protected static final String privateTestDirRoot = privateTestDir.replace(privateTestDirRelative, "");
-
-    private static final String publicTestDirRelative = "public/gatk-engine/src/test/resources/";
-    public static final String publicTestDir = new File(gatkDirectory, publicTestDirRelative).getAbsolutePath() + "/";
-    protected static final String publicTestDirRoot = publicTestDir.replace(publicTestDirRelative, "");
-
-    public static final String keysDataLocation = validationDataLocation + "keys/";
-
-    public static final String gatkKeyFile = CryptUtils.GATK_USER_KEY_DIRECTORY + "gsamembers_broadinstitute.org.key";
-
-    public static final String exampleFASTA = publicTestDir + "exampleFASTA.fasta";
-
-    public final static String NA12878_PCRFREE = privateTestDir + "PCRFree.2x250.Illumina.20_10_11.bam";
-    public final static String NA12878_WEx = privateTestDir + "CEUTrio.HiSeq.WEx.b37_decoy.NA12878.20_10_11mb.bam";
-
-    public static final boolean queueTestRunModeIsSet = System.getProperty("queuetest.run", "").equals("true");
-
-    /** before the class starts up */
-    static {
-        // setup a basic log configuration
-        CommandLineUtils.configureConsoleLogging();
-
-        // setup our log layout
-        PatternLayout layout = new PatternLayout();
-        layout.setConversionPattern("TEST %C{1}.%M - %d{HH:mm:ss,SSS} - %m%n");
-
-        // now set the layout of all the loggers to our layout
-        CommandLineUtils.setLayout(logger, layout);
-
-        // Set the Root logger to only output warnings.
-        logger.setLevel(Level.WARN);
-
-        if (networkTempDirRootExists) {
-            networkTempDirFile = IOUtils.tempDir("temp.", ".dir", new File(networkTempDirRoot + System.getProperty("user.name")));
-            networkTempDirFile.deleteOnExit();
-        } else {
-            networkTempDirFile = null;
-        }
-
-
-        if ( REQUIRE_NETWORK_CONNECTION ) {
-            // find our file sources
-            if (!fileExist(hg18Reference) || !fileExist(hg19Reference) || !fileExist(b36KGReference)) {
-                logger.fatal("We can't locate the reference directories.  Aborting!");
-                throw new RuntimeException("BaseTest setup failed: unable to locate the reference directories");
-            }
-        }
-    }
-
-    /**
-     * Simple generic utility class to creating TestNG data providers:
-     *
-     * 1: inherit this class, as in
-     *
-     *      private class SummarizeDifferenceTest extends TestDataProvider {
-     *         public SummarizeDifferenceTest() {
-     *           super(SummarizeDifferenceTest.class);
-     *         }
-     *         ...
-     *      }
-     *
-     * Provide a reference to your class to the TestDataProvider constructor.
-     *
-     * 2: Create instances of your subclass.  Return from it the call to getTests, providing
-     * the class type of your test
-     *
-     * <code>
-     * {@literal @}DataProvider(name = "summaries")
-     * public Object[][] createSummaries() {
-     *   new SummarizeDifferenceTest().addDiff("A", "A").addSummary("A:2");
-     *   new SummarizeDifferenceTest().addDiff("A", "B").addSummary("A:1", "B:1");
-     *   return SummarizeDifferenceTest.getTests(SummarizeDifferenceTest.class);
-     * }
-     * </code>
-     *
-     * This class magically tracks created objects of this
-     */
-    public static class TestDataProvider {
-        private static final Map<Class, List<Object>> tests = new HashMap<>();
-        protected String name;
-
-        /**
-         * Create a new TestDataProvider instance bound to the class variable C
-         */
-        public TestDataProvider(Class c, String name) {
-            if ( ! tests.containsKey(c) )
-                tests.put(c, new ArrayList<>());
-            tests.get(c).add(this);
-            this.name = name;
-        }
-
-        public TestDataProvider(Class c) {
-            this(c, "");
-        }
-
-        public void setName(final String name) {
-            this.name = name;
-        }
-
-        /**
-         * Return all of the data providers in the form expected by TestNG of type class C
-         * @param c
-         * @return
-         */
-        public static Object[][] getTests(Class c) {
-            List<Object[]> params2 = new ArrayList<Object[]>();
-            for ( Object x : tests.get(c) ) params2.add(new Object[]{x});
-            return params2.toArray(new Object[][]{});
-        }
-
-        @Override
-        public String toString() {
-            return "TestDataProvider("+name+")";
-        }
-    }
-
-    /**
-     * test if the file exists
-     *
-     * @param file name as a string
-     * @return true if it exists
-     */
-    public static boolean fileExist(String file) {
-        File temp = new File(file);
-        return temp.exists();
-    }
-    
-    /**
-     * this appender looks for a specific message in the log4j stream.
-     * It can be used to verify that a specific message was generated to the logging system.
-     */
-    public static class ValidationAppender extends AppenderSkeleton {
-
-        private boolean foundString = false;
-        private String targetString = "";
-
-        public ValidationAppender(String target) {
-            targetString = target;
-        }
-
-        @Override
-        protected void append(LoggingEvent loggingEvent) {
-            if (loggingEvent.getMessage().equals(targetString))
-                foundString = true;
-        }
-
-        public void close() {
-            // do nothing
-        }
-
-        public boolean requiresLayout() {
-            return false;
-        }
-
-        public boolean foundString() {
-            return foundString;
-        }
-    }
-
-    /**
-     * Creates a temp file that will be deleted on exit after tests are complete.
-     * @param name Prefix of the file.
-     * @param extension Extension to concat to the end of the file.
-     * @return A file in the temporary directory starting with name, ending with extension, which will be deleted after the program exits.
-     */
-    public static File createTempFile(final String name, final String extension) {
-        try {
-            final File file = File.createTempFile(name, extension);
-            file.deleteOnExit();
-
-            // Mark corresponding indices for deletion on exit as well just in case an index is created for the temp file:
-            new File(file.getAbsolutePath() + Tribble.STANDARD_INDEX_EXTENSION).deleteOnExit();
-            new File(file.getAbsolutePath() + TabixUtils.STANDARD_INDEX_EXTENSION).deleteOnExit();
-            new File(file.getAbsolutePath() + ".bai").deleteOnExit();
-            new File(file.getAbsolutePath().replaceAll(extension + "$", ".bai")).deleteOnExit();
-
-            return file;
-        } catch (IOException ex) {
-            throw new ReviewedGATKException("Cannot create temp file: " + ex.getMessage(), ex);
-        }
-    }
-
-    /**
-     * Creates a temp list file that will be deleted on exit after tests are complete.
-     * @param tempFilePrefix Prefix of the file.
-     * @param lines lines to write to the file.
-     * @return A list file in the temporary directory starting with tempFilePrefix, which will be deleted after the program exits.
-     */
-    public static File createTempListFile(final String tempFilePrefix, final String... lines) {
-        try {
-            final File tempListFile = createTempFile(tempFilePrefix, ".list");
-
-            final PrintWriter out = new PrintWriter(tempListFile);
-            for (final String line : lines) {
-                out.println(line);
-            }
-            out.close();
-
-            return tempListFile;
-        } catch (IOException ex) {
-            throw new ReviewedGATKException("Cannot create temp file: " + ex.getMessage(), ex);
-        }
-    }
-
-    /**
-     * Creates a temp file that will be deleted on exit after tests are complete.
-     * @param name Name of the file.
-     * @return A file in the network temporary directory with name, which will be deleted after the program exits.
-     * @throws SkipException when the network is not available.
-     */
-    public static File tryCreateNetworkTempFile(String name) {
-        if (!networkTempDirRootExists)
-            throw new SkipException("Network temporary directory does not exist: " + networkTempDirRoot);
-        File file = new File(networkTempDirFile, name);
-        file.deleteOnExit();
-        return file;
-    }
-
-    /**
-     * Log this message so that it shows up inline during output as well as in html reports
-     *
-     * @param message
-     */
-    public static void log(final String message) {
-        Reporter.log(message, true);
-    }
-
-    private static final double DEFAULT_FLOAT_TOLERANCE = 1e-1;
-
-    public static final void assertEqualsDoubleSmart(final Object actual, final Double expected) {
-        Assert.assertTrue(actual instanceof Double, "Not a double");
-        assertEqualsDoubleSmart((double)(Double)actual, (double)expected);
-    }
-
-    public static final void assertEqualsDoubleSmart(final Object actual, final Double expected, final double tolerance) {
-        Assert.assertTrue(actual instanceof Double, "Not a double");
-        assertEqualsDoubleSmart((double)(Double)actual, (double)expected, tolerance);
-    }
-
-    public static final void assertEqualsDoubleSmart(final double actual, final double expected) {
-        assertEqualsDoubleSmart(actual, expected, DEFAULT_FLOAT_TOLERANCE);
-    }
-
-    public static final <T> void assertEqualsSet(final Set<T> actual, final Set<T> expected, final String info) {
-        final Set<T> actualSet = new HashSet<T>(actual);
-        final Set<T> expectedSet = new HashSet<T>(expected);
-        Assert.assertTrue(actualSet.equals(expectedSet), info); // note this is necessary due to testng bug for set comps
-    }
-
-    public static void assertEqualsDoubleSmart(final double actual, final double expected, final double tolerance) {
-        assertEqualsDoubleSmart(actual, expected, tolerance, null);
-    }
-
-    public static void assertEqualsDoubleSmart(final double actual, final double expected, final double tolerance, final String message) {
-        if ( Double.isNaN(expected) ) // NaN == NaN => false unfortunately
-            Assert.assertTrue(Double.isNaN(actual), "expected is nan, actual is not");
-        else if ( Double.isInfinite(expected) ) // NaN == NaN => false unfortunately
-            Assert.assertTrue(Double.isInfinite(actual), "expected is infinite, actual is not");
-        else {
-            final double delta = Math.abs(actual - expected);
-            final double ratio = Math.abs(actual / expected - 1.0);
-            Assert.assertTrue(delta < tolerance || ratio < tolerance, "expected = " + expected + " actual = " + actual
-                    + " not within tolerance " + tolerance
-                    + (message == null ? "" : "message: " + message));
-        }
-    }
-
-    public static void assertVariantContextsAreEqual( final VariantContext actual, final VariantContext expected ) {
-        Assert.assertNotNull(actual, "VariantContext expected not null");
-        Assert.assertEquals(actual.getChr(), expected.getChr(), "chr");
-        Assert.assertEquals(actual.getStart(), expected.getStart(), "start");
-        Assert.assertEquals(actual.getEnd(), expected.getEnd(), "end");
-        Assert.assertEquals(actual.getID(), expected.getID(), "id");
-        Assert.assertEquals(actual.getAlleles(), expected.getAlleles(), "alleles for " + expected + " vs " + actual);
-
-        assertAttributesEquals(actual.getAttributes(), expected.getAttributes());
-        Assert.assertEquals(actual.filtersWereApplied(), expected.filtersWereApplied(), "filtersWereApplied");
-        Assert.assertEquals(actual.isFiltered(), expected.isFiltered(), "isFiltered");
-        assertEqualsSet(actual.getFilters(), expected.getFilters(), "filters");
-        assertEqualsDoubleSmart(actual.getPhredScaledQual(), expected.getPhredScaledQual());
-
-        Assert.assertEquals(actual.hasGenotypes(), expected.hasGenotypes(), "hasGenotypes");
-        if ( expected.hasGenotypes() ) {
-            assertEqualsSet(actual.getSampleNames(), expected.getSampleNames(), "sample names set");
-            Assert.assertEquals(actual.getSampleNamesOrderedByName(), expected.getSampleNamesOrderedByName(), "sample names");
-            final Set<String> samples = expected.getSampleNames();
-            for ( final String sample : samples ) {
-                assertGenotypesAreEqual(actual.getGenotype(sample), expected.getGenotype(sample));
-            }
-        }
-    }
-
-    public static void assertVariantContextStreamsAreEqual(final Iterable<VariantContext> actual, final Iterable<VariantContext> expected) {
-        final Iterator<VariantContext> actualIT = actual.iterator();
-        final Iterator<VariantContext> expectedIT = expected.iterator();
-
-        while ( expectedIT.hasNext() ) {
-            final VariantContext expectedVC = expectedIT.next();
-            if ( expectedVC == null )
-                continue;
-
-            VariantContext actualVC;
-            do {
-                Assert.assertTrue(actualIT.hasNext(), "Too few records found in actual");
-                actualVC = actualIT.next();
-            } while ( actualIT.hasNext() && actualVC == null );
-
-            if ( actualVC == null )
-                Assert.fail("Too few records in actual");
-
-            assertVariantContextsAreEqual(actualVC, expectedVC);
-        }
-        Assert.assertTrue(! actualIT.hasNext(), "Too many records found in actual");
-    }
-
-
-    public static void assertGenotypesAreEqual(final Genotype actual, final Genotype expected) {
-        Assert.assertEquals(actual.getSampleName(), expected.getSampleName(), "Genotype names");
-        Assert.assertEquals(actual.getAlleles(), expected.getAlleles(), "Genotype alleles");
-        Assert.assertEquals(actual.getGenotypeString(), expected.getGenotypeString(), "Genotype string");
-        Assert.assertEquals(actual.getType(), expected.getType(), "Genotype type");
-
-        // filters are the same
-        Assert.assertEquals(actual.getFilters(), expected.getFilters(), "Genotype fields");
-        Assert.assertEquals(actual.isFiltered(), expected.isFiltered(), "Genotype isFiltered");
-
-        // inline attributes
-        Assert.assertEquals(actual.getDP(), expected.getDP(), "Genotype dp");
-        Assert.assertTrue(Arrays.equals(actual.getAD(), expected.getAD()));
-        Assert.assertEquals(actual.getGQ(), expected.getGQ(), "Genotype gq");
-        Assert.assertEquals(actual.hasPL(), expected.hasPL(), "Genotype hasPL");
-        Assert.assertEquals(actual.hasAD(), expected.hasAD(), "Genotype hasAD");
-        Assert.assertEquals(actual.hasGQ(), expected.hasGQ(), "Genotype hasGQ");
-        Assert.assertEquals(actual.hasDP(), expected.hasDP(), "Genotype hasDP");
-
-        Assert.assertEquals(actual.hasLikelihoods(), expected.hasLikelihoods(), "Genotype haslikelihoods");
-        Assert.assertEquals(actual.getLikelihoodsString(), expected.getLikelihoodsString(), "Genotype getlikelihoodsString");
-        Assert.assertEquals(actual.getLikelihoods(), expected.getLikelihoods(), "Genotype getLikelihoods");
-        Assert.assertTrue(Arrays.equals(actual.getPL(), expected.getPL()));
-
-        Assert.assertEquals(actual.getPhredScaledQual(), expected.getPhredScaledQual(), "Genotype phredScaledQual");
-        assertAttributesEquals(actual.getExtendedAttributes(), expected.getExtendedAttributes());
-        Assert.assertEquals(actual.isPhased(), expected.isPhased(), "Genotype isPhased");
-        Assert.assertEquals(actual.getPloidy(), expected.getPloidy(), "Genotype getPloidy");
-    }
-
-    public static void assertVCFHeadersAreEqual(final VCFHeader actual, final VCFHeader expected) {
-        Assert.assertEquals(actual.getMetaDataInSortedOrder().size(), expected.getMetaDataInSortedOrder().size(), "No VCF header lines");
-
-        // for some reason set.equals() is returning false but all paired elements are .equals().  Perhaps compare to is busted?
-        //Assert.assertEquals(actual.getMetaDataInInputOrder(), expected.getMetaDataInInputOrder());
-        final List<VCFHeaderLine> actualLines = new ArrayList<VCFHeaderLine>(actual.getMetaDataInSortedOrder());
-        final List<VCFHeaderLine> expectedLines = new ArrayList<VCFHeaderLine>(expected.getMetaDataInSortedOrder());
-        for ( int i = 0; i < actualLines.size(); i++ ) {
-            Assert.assertEquals(actualLines.get(i), expectedLines.get(i), "VCF header lines");
-        }
-    }
-
-    public static void assertVCFandBCFFilesAreTheSame(final File vcfFile, final File bcfFile) throws IOException {
-        final Pair<VCFHeader, GATKVCFUtils.VCIterable<LineIterator>> vcfData = GATKVCFUtils.readAllVCs(vcfFile, new VCFCodec());
-        final Pair<VCFHeader, GATKVCFUtils.VCIterable<PositionalBufferedStream>> bcfData = GATKVCFUtils.readAllVCs(bcfFile, new BCF2Codec());
-        assertVCFHeadersAreEqual(bcfData.getFirst(), vcfData.getFirst());
-        assertVariantContextStreamsAreEqual(bcfData.getSecond(), vcfData.getSecond());
-    }
-
-    private static void assertAttributeEquals(final String key, final Object actual, final Object expected) {
-        if ( expected instanceof Double ) {
-            // must be very tolerant because doubles are being rounded to 2 sig figs
-            assertEqualsDoubleSmart(actual, (Double) expected, 1e-2);
-        } else
-            Assert.assertEquals(actual, expected, "Attribute " + key);
-    }
-
-    private static void assertAttributesEquals(final Map<String, Object> actual, Map<String, Object> expected) {
-        final Set<String> expectedKeys = new HashSet<String>(expected.keySet());
-
-        for ( final Map.Entry<String, Object> act : actual.entrySet() ) {
-            final Object actualValue = act.getValue();
-            if ( expected.containsKey(act.getKey()) && expected.get(act.getKey()) != null ) {
-                final Object expectedValue = expected.get(act.getKey());
-                if ( expectedValue instanceof List ) {
-                    final List<Object> expectedList = (List<Object>)expectedValue;
-                    Assert.assertTrue(actualValue instanceof List, act.getKey() + " should be a list but isn't");
-                    final List<Object> actualList = (List<Object>)actualValue;
-                    Assert.assertEquals(actualList.size(), expectedList.size(), act.getKey() + " size");
-                    for ( int i = 0; i < expectedList.size(); i++ )
-                        assertAttributeEquals(act.getKey(), actualList.get(i), expectedList.get(i));
-                } else
-                    assertAttributeEquals(act.getKey(), actualValue, expectedValue);
-            } else {
-                // it's ok to have a binding in x -> null that's absent in y
-                Assert.assertNull(actualValue, act.getKey() + " present in one but not in the other");
-            }
-            expectedKeys.remove(act.getKey());
-        }
-
-        // now expectedKeys contains only the keys found in expected but not in actual,
-        // and they must all be null
-        for ( final String missingExpected : expectedKeys ) {
-            final Object value = expected.get(missingExpected);
-            Assert.assertTrue(isMissing(value), "Attribute " + missingExpected + " missing in one but not in other" );
-        }
-    }
-
-    private static final boolean isMissing(final Object value) {
-        if ( value == null ) return true;
-        else if ( value.equals(VCFConstants.MISSING_VALUE_v4) ) return true;
-        else if ( value instanceof List ) {
-            // handles the case where all elements are null or the list is empty
-            for ( final Object elt : (List)value)
-                if ( elt != null )
-                    return false;
-            return true;
-        } else
-            return false;
-    }
-
-    /**
-     * Checks whether two double array contain the same values or not.
-     * @param actual actual produced array.
-     * @param expected expected array.
-     * @param tolerance maximum difference between double value to be consider equivalent.
-     */
-    protected static void assertEqualsDoubleArray(final double[] actual, final double[] expected, final double tolerance) {
-        if (expected == null)
-            Assert.assertNull(actual);
-        else {
-            Assert.assertNotNull(actual);
-            Assert.assertEquals(actual.length,expected.length,"array length");
-        }
-        for (int i = 0; i < actual.length; i++)
-            Assert.assertEquals(actual[i],expected[i],tolerance,"array position " + i);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/BaseUtilsUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/BaseUtilsUnitTest.java
deleted file mode 100644
index b532baf..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/BaseUtilsUnitTest.java
+++ /dev/null
@@ -1,179 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Random;
-
-
-public class BaseUtilsUnitTest extends BaseTest {
-    @BeforeClass
-    public void init() { }
-
-    @Test
-    public void testMostFrequentBaseFraction() {
-        logger.warn("Executing testMostFrequentBaseFraction");
-
-        compareFrequentBaseFractionToExpected("AAAAA", 1.0);
-        compareFrequentBaseFractionToExpected("ACCG", 0.5);
-        compareFrequentBaseFractionToExpected("ACCCCTTTTG", 4.0/10.0);
-    }
-
-    private void compareFrequentBaseFractionToExpected(String sequence, double expected) {
-        double fraction = BaseUtils.mostFrequentBaseFraction(sequence.getBytes());
-        Assert.assertTrue(MathUtils.compareDoubles(fraction, expected) == 0);
-    }
-
-    @Test
-    public void testConvertIUPACtoN() {
-
-        checkBytesAreEqual(BaseUtils.convertIUPACtoN(new byte[]{'A', 'A', 'A'}, false, false), new byte[]{'A', 'A', 'A'});
-        checkBytesAreEqual(BaseUtils.convertIUPACtoN(new byte[]{'W', 'A', 'A'}, false, false), new byte[]{'N', 'A', 'A'});
-        checkBytesAreEqual(BaseUtils.convertIUPACtoN(new byte[]{'A', 'M', 'A'}, false, false), new byte[]{'A', 'N', 'A'});
-        checkBytesAreEqual(BaseUtils.convertIUPACtoN(new byte[]{'A', 'A', 'K'}, false, false), new byte[]{'A', 'A', 'N'});
-        checkBytesAreEqual(BaseUtils.convertIUPACtoN(new byte[]{'M', 'M', 'M'}, false, false), new byte[]{'N', 'N', 'N'});
-    }
-
-    private void checkBytesAreEqual(final byte[] b1, final byte[] b2) {
-        for ( int i = 0; i < b1.length; i++ )
-            Assert.assertEquals(b1[i], b2[i]);
-    }
-
-    @Test
-    public void testConvertBasesToIUPAC() {
-
-        for ( final BaseUtils.Base b : BaseUtils.Base.values() ) {
-            if ( BaseUtils.isRegularBase(b.base) )
-                Assert.assertEquals(BaseUtils.basesToIUPAC(b.base, b.base), b.base, "testing same base");
-        }
-
-        Assert.assertEquals(BaseUtils.basesToIUPAC((byte)'A', (byte)'X'), 'N', "testing non-standard base");
-        Assert.assertEquals(BaseUtils.basesToIUPAC((byte)'X', (byte)'A'), 'N', "testing non-standard base");
-        Assert.assertEquals(BaseUtils.basesToIUPAC((byte)'X', (byte)'X'), 'N', "testing non-standard base");
-
-        Assert.assertEquals(BaseUtils.basesToIUPAC((byte)'A', (byte)'T'), 'W', "testing A/T=W");
-        Assert.assertEquals(BaseUtils.basesToIUPAC((byte)'T', (byte)'A'), 'W', "testing T/A=W");
-        Assert.assertEquals(BaseUtils.basesToIUPAC((byte) 'G', (byte) 'T'), 'K', "testing G/T=K");
-        Assert.assertEquals(BaseUtils.basesToIUPAC((byte) 'T', (byte) 'G'), 'K', "testing T/G=K");
-    }
-
-    @Test
-    public void testTransitionTransversion() {
-        logger.warn("Executing testTransitionTransversion");
-
-        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'A', (byte)'T' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
-        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'A', (byte)'C' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
-        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'A', (byte)'G' ) == BaseUtils.BaseSubstitutionType.TRANSITION );
-        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'C', (byte)'A' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
-        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'C', (byte)'T' ) == BaseUtils.BaseSubstitutionType.TRANSITION );
-        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'C', (byte)'G' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
-        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'T', (byte)'A' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
-        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'T', (byte)'C' ) == BaseUtils.BaseSubstitutionType.TRANSITION );
-        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'T', (byte)'G' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
-        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'G', (byte)'A' ) == BaseUtils.BaseSubstitutionType.TRANSITION );
-        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'G', (byte)'T' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
-        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'G', (byte)'C' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
-
-        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'a', (byte)'T' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
-        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'a', (byte)'C' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
-        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'A', (byte)'T' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
-        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'A', (byte)'C' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
-        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'A', (byte)'t' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
-        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'A', (byte)'c' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
-        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'a', (byte)'t' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
-        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'a', (byte)'c' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
-    }
-
-    @Test
-    public void testReverseComplementString() {
-        logger.warn("Executing testReverseComplementString");
-
-        compareRCStringToExpected("ACGGT", "ACCGT");
-        compareRCStringToExpected("TCGTATATCTCGCTATATATATATAGCTCTAGTATA", "TATACTAGAGCTATATATATATAGCGAGATATACGA");
-        compareRCStringToExpected("AAAN", "NTTT");
-    }
-
-    private void compareRCStringToExpected(String fw, String rcExp) {
-        String rcObs = BaseUtils.simpleReverseComplement(fw);
-
-        Assert.assertTrue(rcObs.equals(rcExp));
-    }
-
-    @Test(dataProvider="baseComparatorData")
-    public void testBaseComparator(final Collection<byte[]> basesToSort) {
-        final ArrayList<byte[]> sorted = new ArrayList<>(basesToSort);
-        Collections.sort(sorted, BaseUtils.BASES_COMPARATOR);
-        for (int i = 0; i < sorted.size(); i++)   {
-            Assert.assertEquals(BaseUtils.BASES_COMPARATOR.compare(sorted.get(i),sorted.get(i)),0);
-            final String iString = new String(sorted.get(i));
-            for (int j = i; j < sorted.size(); j++) {
-                final String jString = new String(sorted.get(j));
-                if (iString.compareTo(jString) == 0)
-                    Assert.assertEquals(BaseUtils.BASES_COMPARATOR.compare(sorted.get(i),sorted.get(j)),0);
-                else
-                    Assert.assertTrue(BaseUtils.BASES_COMPARATOR.compare(sorted.get(i),sorted.get(j)) * iString.compareTo(jString) > 0);
-                Assert.assertTrue(BaseUtils.BASES_COMPARATOR.compare(sorted.get(i),sorted.get(j)) <= 0);
-            }
-        }
-    }
-
-    @DataProvider(name="baseComparatorData")
-    public Object[][] baseComparatorData() {
-        final int testCount = 10;
-        final int testSizeAverage = 10;
-        final int testSizeDeviation = 10;
-        final int haplotypeSizeAverage = 100;
-        final int haplotypeSizeDeviation = 100;
-
-        final Object[][] result = new Object[testCount][];
-
-        GenomeAnalysisEngine.resetRandomGenerator();
-        final Random rnd = GenomeAnalysisEngine.getRandomGenerator();
-
-        for (int i = 0; i < testCount; i++) {
-            final int size = (int) Math.max(0,rnd.nextDouble() * testSizeDeviation + testSizeAverage);
-            final ArrayList<byte[]> bases = new ArrayList<>(size);
-            for (int j = 0; j < size; j++) {
-                final int jSize = (int) Math.max(0,rnd.nextDouble() * haplotypeSizeDeviation + haplotypeSizeAverage);
-                final byte[] b = new byte[jSize];
-                for (int k = 0; k < jSize; k++)
-                    b[k] = BaseUtils.baseIndexToSimpleBase(rnd.nextInt(4));
-                bases.add(b);
-            }
-            result[i] = new Object[] { bases };
-        }
-        return result;
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/BitSetUtilsUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/BitSetUtilsUnitTest.java
deleted file mode 100644
index 87a5914..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/BitSetUtilsUnitTest.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-import java.util.Random;
-
-/**
- * @author Mauricio Carneiro
- * @since 3/5/12
- */
-
-public class BitSetUtilsUnitTest {
-    private static int RANDOM_NUMBERS_TO_TRY = 87380;
-    private static Random random;
-
-    @BeforeClass
-    public void init() {
-        random = GenomeAnalysisEngine.getRandomGenerator();
-    }
-
-    @Test(enabled = true)
-    public void testLongBitSet() {
-        long[] numbers = {0L, 1L, 428L, 65536L, 239847L, 4611686018427387903L, Long.MAX_VALUE, Long.MIN_VALUE, -1L, -2L, -7L, -128L, -65536L, -100000L};
-        for (long n : numbers)
-            Assert.assertEquals(BitSetUtils.longFrom(BitSetUtils.bitSetFrom(n)), n);
-
-        for (int i = 0; i < RANDOM_NUMBERS_TO_TRY; i++) {
-            long n = random.nextLong();
-            Assert.assertEquals(BitSetUtils.longFrom(BitSetUtils.bitSetFrom(n)), n);    // Because class Random uses a seed with only 48 bits, this algorithm will not return all possible long values.
-        }
-    }
-
-    @Test(enabled = true)
-    public void testShortBitSet() {
-        short[] numbers = {0, 1, 428, 25934, 23847, 16168, Short.MAX_VALUE, Short.MIN_VALUE, -1, -2, -7, -128, -12312, -31432};
-        for (long n : numbers)
-            Assert.assertEquals(BitSetUtils.shortFrom(BitSetUtils.bitSetFrom(n)), n);
-
-        for (int i = 0; i < RANDOM_NUMBERS_TO_TRY; i++) {
-            short n = (short) random.nextInt();
-            Assert.assertEquals(BitSetUtils.shortFrom(BitSetUtils.bitSetFrom(n)), n);
-        }
-    }
-
-    @Test(enabled = false)
-    public void testDNAAndBitSetConversion() {
-        String[] dna = {"AGGTGTTGT", "CCCCCCCCCCCCCC", "GGGGGGGGGGGGGG", "TTTTTTTTTTTTTT", "GTAGACCGATCTCAGCTAGT", "AACGTCAATGCAGTCAAGTCAGACGTGGGTT", "TTTTTTTTTTTTTTTTTTTTTTTTTTTTTT", "TTTTTTTTTTTTTTTTTTTTTTTTTTTTTTT"};
-
-        // Test all contexts of size 1-8.
-        //for (long n = 0; n < RANDOM_NUMBERS_TO_TRY; n++)
-        //    Assert.assertEquals(BitSetUtils.longFrom(BitSetUtils.bitSetFrom(ContextCovariate.contextFromKey(BitSetUtils.bitSetFrom(n)))), n);
-
-        // Test the special cases listed in the dna array
-        //for (String d : dna)
-        //    Assert.assertEquals(BitSetUtils.dnaFrom(BitSetUtils.bitSetFrom(d)), d);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/ExampleToCopyUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/ExampleToCopyUnitTest.java
deleted file mode 100644
index 8c6e24c..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/ExampleToCopyUnitTest.java
+++ /dev/null
@@ -1,241 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-
-// the imports for unit testing.
-
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMFileReader;
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
-import org.broadinstitute.gatk.utils.pileup.PileupElement;
-import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
-import org.broadinstitute.gatk.utils.pileup.ReadBackedPileupImpl;
-import org.broadinstitute.gatk.utils.sam.ArtificialBAMBuilder;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.broadinstitute.gatk.utils.sam.GATKSamRecordFactory;
-import htsjdk.variant.variantcontext.Allele;
-import htsjdk.variant.variantcontext.VariantContext;
-import htsjdk.variant.variantcontext.VariantContextBuilder;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.util.*;
-
-public class ExampleToCopyUnitTest extends BaseTest {
-    // example genome loc parser for this test, can be deleted if you don't use the reference
-    private GenomeLocParser genomeLocParser;
-
-    // example fasta index file, can be deleted if you don't use the reference
-    private IndexedFastaSequenceFile seq;
-
-    @BeforeClass
-    public void setup() throws FileNotFoundException {
-        // sequence
-        seq = new CachingIndexedFastaSequenceFile(new File(b37KGReference));
-        genomeLocParser = new GenomeLocParser(seq);
-    }
-
-    /**
-     * Combinatorial unit test data provider example.
-     *
-     * Creates data for testMyData test function, containing two arguments, start and size at each value
-     *
-     * @return Object[][] for testng DataProvider
-     */
-    @DataProvider(name = "MyDataProvider")
-    public Object[][] makeMyDataProvider() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        // this functionality can be adapted to provide input data for whatever you might want in your data
-        for ( final int start : Arrays.asList(1, 10, 100) ) {
-            for ( final int size : Arrays.asList(1, 10, 100, 1000) ) {
-                tests.add(new Object[]{start, size});
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    /**
-     * Example testng test using MyDataProvider
-     */
-    @Test(dataProvider = "MyDataProvider")
-    public void testMyData(final int start, final int size) {
-        // adaptor this code to do whatever testing you want given the arguments start and size
-        Assert.assertTrue(start >= 0);
-        Assert.assertTrue(size >= 0);
-    }
-
-    /**
-     * DataProvider example using a class-based data structure
-     */
-    private class MyDataProviderClass extends TestDataProvider {
-        private int start;
-        private int size;
-
-        private MyDataProviderClass(int start, int size) {
-            super(MyDataProviderClass.class);
-            this.start = start;
-            this.size = size;
-        }
-    }
-
-    @DataProvider(name = "MyClassBasedDataProvider")
-    public Object[][] makeMyDataProviderClass() {
-        // this functionality can be adapted to provide input data for whatever you might want in your data
-        for ( final int start : Arrays.asList(1, 10, 100) ) {
-            for ( final int size : Arrays.asList(1, 10, 100, 1000) ) {
-                new MyDataProviderClass(start, size);
-            }
-        }
-
-        return TestDataProvider.getTests(MyDataProviderClass.class);
-    }
-
-    /**
-     * Example testng test using MyClassBasedDataProvider
-     */
-    @Test(dataProvider = "MyClassBasedDataProvider")
-    public void testMyDataProviderClass(MyDataProviderClass testSpec) {
-        // adaptor this code to do whatever testing you want given the arguments start and size
-        Assert.assertTrue(testSpec.start >= 0);
-        Assert.assertTrue(testSpec.size >= 0);
-    }
-
-    /**
-     * A unit test that creates an artificial read for testing some code that uses reads
-     */
-    @Test()
-    public void testWithARead() {
-        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(seq.getSequenceDictionary());
-        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, 1, 10);
-        Assert.assertEquals(read.getReadLength(), 10);
-        // TODO -- add some tests here using read
-    }
-
-    /**
-     * A unit test that creates a GenomeLoc for testing
-     */
-    @Test()
-    public void testWithAGenomeLoc() {
-        final GenomeLoc loc = genomeLocParser.createGenomeLoc("1", 1, 10);
-        Assert.assertEquals(loc.size(), 10);
-        // TODO -- add some tests here using the loc
-    }
-
-    /**
-     * A unit test that creates an artificial read for testing some code that uses reads
-     *
-     * Note that effective creation of RBPs isn't so good.  If you need pileups of specific properties, you shoud
-     * look into building them yourself as in the example below
-     */
-    @Test()
-    public void testWithAPileup() {
-        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(seq.getSequenceDictionary());
-        final GenomeLoc myLocation = genomeLocParser.createGenomeLoc("1", 10);
-        final ReadBackedPileup pileup = ArtificialSAMUtils.createReadBackedPileup(header, myLocation, 10, 400, 10);
-        Assert.assertFalse(pileup.isEmpty());
-        // TODO -- add some tests here using pileup
-    }
-
-    /**
-     * A unit test that creates an artificial read for testing some code that uses reads
-     *
-     * Builds the pileup from scratch to have specific properties
-     */
-    @Test()
-    public void testBuildingAPileupWithSpecificProperties() {
-        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(seq.getSequenceDictionary());
-        final GenomeLoc myLocation = genomeLocParser.createGenomeLoc("1", 10);
-
-        final int pileupSize = 100;
-        final int readLength = 10;
-        final List<GATKSAMRecord> reads = new LinkedList<GATKSAMRecord>();
-        for ( int i = 0; i < pileupSize; i++ ) {
-            final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead" + i, 0, 1, readLength);
-            final byte[] bases = Utils.dupBytes((byte)'A', readLength);
-            bases[0] = (byte)(i % 2 == 0 ? 'A' : 'C'); // every other base is a C
-
-            // set the read's bases and quals
-            read.setReadBases(bases);
-            read.setBaseQualities(Utils.dupBytes((byte)30, readLength));
-            reads.add(read);
-        }
-
-        // create a pileup with all reads having offset 0
-        final ReadBackedPileup pileup = new ReadBackedPileupImpl(myLocation, reads, 0);
-        // TODO -- add some tests here using pileup
-
-        // this code ensures that the pileup example is correct.  Can be deleted
-        Assert.assertEquals(pileup.getNumberOfElements(), pileupSize);
-        int nA = 0, nC = 0;
-        for ( final PileupElement p : pileup ) {
-            if ( p.getBase() == 'A' ) nA++;
-            if ( p.getBase() == 'C' ) nC++;
-        }
-        Assert.assertEquals(nA, pileupSize / 2);
-        Assert.assertEquals(nC, pileupSize / 2);
-
-    }
-
-    /**
-     * A unit test that creates an artificial read for testing some code that uses reads
-     */
-    @Test()
-    public void testWithBAMFile() {
-        // create a fake BAM file, and iterate through it
-        final ArtificialBAMBuilder bamBuilder = new ArtificialBAMBuilder(seq, 20, 10);
-        final File bam = bamBuilder.makeTemporarilyBAMFile();
-        final SAMFileReader reader = new SAMFileReader(bam);
-        reader.setSAMRecordFactory(new GATKSamRecordFactory());
-
-        final Iterator<SAMRecord> bamIt = reader.iterator();
-        while ( bamIt.hasNext() ) {
-            final GATKSAMRecord read = (GATKSAMRecord)bamIt.next(); // all reads are actually GATKSAMRecords
-            // TODO -- add some tests that use reads from a BAM
-        }
-    }
-
-    /**
-     * Test code that creates VariantContexts
-     */
-    @Test()
-    public void testWithVariantContext() throws Exception {
-        final List<Allele> alleles = Arrays.asList(Allele.create("A", true), Allele.create("C"));
-        final VariantContext vc = new VariantContextBuilder("test", "1", 10, 10, alleles).make();
-        Assert.assertTrue(vc.getAlleles().size() >= 0);
-        // TODO -- add some tests that use VariantContext
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/GATKTextReporter.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/GATKTextReporter.java
deleted file mode 100644
index 957ccd2..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/GATKTextReporter.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import org.testng.reporters.TextReporter;
-
-/**
- * HACK: Create a variant of the TestNG TextReporter that can be run with no
- *       arguments, and can therefore be added to the TestNG listener list.
- *
- * @author hanna
- * @version 0.1
- */
-public class GATKTextReporter extends TextReporter {
-    public GATKTextReporter() {
-        super("GATK test suite",2);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/GenomeLocParserBenchmark.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/GenomeLocParserBenchmark.java
deleted file mode 100644
index 7f19879..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/GenomeLocParserBenchmark.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import com.google.caliper.Param;
-import com.google.caliper.SimpleBenchmark;
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
-
-import java.io.File;
-
-/**
- * Caliper microbenchmark of genome loc parser
- */
-public class GenomeLocParserBenchmark extends SimpleBenchmark {
-    private IndexedFastaSequenceFile seq;
-    private final int ITERATIONS = 1000000;
-
-    @Param({"NEW", "NONE"})
-    GenomeLocParser.ValidationLevel validationLevel; // set automatically by framework
-
-    @Param({"true", "false"})
-    boolean useContigIndex; // set automatically by framework
-
-    @Override protected void setUp() throws Exception {
-        seq = new CachingIndexedFastaSequenceFile(new File("/Users/depristo/Desktop/broadLocal/localData/human_g1k_v37.fasta"));
-    }
-//
-//    public void timeSequentialCreationFromGenomeLoc(int rep) {
-//        final GenomeLocParser genomeLocParser = new GenomeLocParser(seq.getSequenceDictionary(), validationLevel);
-//        GenomeLoc last = genomeLocParser.createGenomeLoc("1", 1, 1);
-//        for ( int i = 0; i < rep; i++ ) {
-//            for ( int j = 1; j < ITERATIONS; j++ ) {
-//                if ( useContigIndex )
-//                    last = genomeLocParser.createGenomeLoc(last.getContig(), last.getContigIndex(), last.getStart() + 1);
-//                else
-//                    last = genomeLocParser.createGenomeLoc(last.getContig(), last.getStart() + 1);
-//            }
-//        }
-//    }
-//
-//    public void timeSequentialCreationFromGenomeLocOriginal(int rep) {
-//        final GenomeLocParserOriginal genomeLocParser = new GenomeLocParserOriginal(seq.getSequenceDictionary());
-//        GenomeLoc last = genomeLocParser.createGenomeLoc("1", 1, 1);
-//        for ( int i = 0; i < rep; i++ ) {
-//            for ( int j = 1; j < ITERATIONS; j++ ) {
-//                if ( useContigIndex )
-//                    last = genomeLocParser.createGenomeLoc(last.getContig(), last.getContigIndex(), last.getStart() + 1);
-//                else
-//                    last = genomeLocParser.createGenomeLoc(last.getContig(), last.getStart() + 1);
-//            }
-//        }
-//    }
-
-    public static void main(String[] args) {
-        com.google.caliper.Runner.main(GenomeLocParserBenchmark.class, args);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/GenomeLocParserUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/GenomeLocParserUnitTest.java
deleted file mode 100644
index d413633..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/GenomeLocParserUnitTest.java
+++ /dev/null
@@ -1,509 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.SAMSequenceRecord;
-import htsjdk.tribble.BasicFeature;
-import htsjdk.tribble.Feature;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import htsjdk.variant.variantcontext.Allele;
-import htsjdk.variant.variantcontext.VariantContext;
-import htsjdk.variant.variantcontext.VariantContextBuilder;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.util.Arrays;
-import java.util.LinkedList;
-import java.util.List;
-
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertTrue;
-
-/**
- * @author aaron
- *         <p/>
- *         Class GenomeLocParserUnitTest
- *         <p/>
- *         Test out the functionality of the new genome loc parser
- */
-public class GenomeLocParserUnitTest extends BaseTest {
-    private GenomeLocParser genomeLocParser;
-    private SAMFileHeader header;
-
-    @BeforeClass
-         public void init() {
-        header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 10);
-        genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
-    }
-
-    @Test(expectedExceptions=UserException.MalformedGenomeLoc.class)
-    public void testGetContigIndex() {
-        assertEquals(genomeLocParser.getContigIndex("blah"), -1); // should not be in the reference
-    }                
-
-    @Test
-    public void testGetContigIndexValid() {
-        assertEquals(genomeLocParser.getContigIndex("chr1"), 0); // should be in the reference
-    }
-
-    @Test(expectedExceptions=UserException.class)
-    public void testGetContigInfoUnknownContig1() {
-        assertEquals(null, genomeLocParser.getContigInfo("blah")); // should *not* be in the reference
-    }
-
-    @Test(expectedExceptions=UserException.class)
-    public void testGetContigInfoUnknownContig2() {
-        assertEquals(null, genomeLocParser.getContigInfo(null)); // should *not* be in the reference
-    }
-
-    @Test()
-    public void testHasContigInfoUnknownContig1() {
-        assertEquals(false, genomeLocParser.contigIsInDictionary("blah")); // should *not* be in the reference
-    }
-
-    @Test()
-    public void testHasContigInfoUnknownContig2() {
-        assertEquals(false, genomeLocParser.contigIsInDictionary(null)); // should *not* be in the reference
-    }
-
-    @Test()
-    public void testHasContigInfoKnownContig() {
-        assertEquals(true, genomeLocParser.contigIsInDictionary("chr1")); // should be in the reference
-    }
-
-    @Test
-    public void testGetContigInfoKnownContig() {
-        assertEquals(0, "chr1".compareTo(genomeLocParser.getContigInfo("chr1").getSequenceName())); // should be in the reference
-    }
-
-    @Test(expectedExceptions=ReviewedGATKException.class)
-    public void testParseBadString() {
-        genomeLocParser.parseGenomeLoc("Bad:0-1");
-    }
-
-    @Test
-    public void testContigHasColon() {
-        SAMFileHeader header = new SAMFileHeader();
-        header.setSortOrder(htsjdk.samtools.SAMFileHeader.SortOrder.coordinate);
-        SAMSequenceDictionary dict = new SAMSequenceDictionary();
-        SAMSequenceRecord rec = new SAMSequenceRecord("c:h:r1", 10);
-        rec.setSequenceLength(10);
-        dict.addSequence(rec);
-        header.setSequenceDictionary(dict);
-
-        final GenomeLocParser myGenomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
-        GenomeLoc loc = myGenomeLocParser.parseGenomeLoc("c:h:r1:4-5");
-        assertEquals(0, loc.getContigIndex());
-        assertEquals(loc.getStart(), 4);
-        assertEquals(loc.getStop(), 5);
-    }
-
-    @Test
-    public void testParseGoodString() {
-        GenomeLoc loc = genomeLocParser.parseGenomeLoc("chr1:1-10");
-        assertEquals(0, loc.getContigIndex());
-        assertEquals(loc.getStop(), 10);
-        assertEquals(loc.getStart(), 1);
-    }
-
-    @Test
-    public void testCreateGenomeLoc1() {
-        GenomeLoc loc = genomeLocParser.createGenomeLoc("chr1", 1, 100);
-        assertEquals(0, loc.getContigIndex());
-        assertEquals(loc.getStop(), 100);
-        assertEquals(loc.getStart(), 1);
-    }
-
-    @Test
-    public void testCreateGenomeLoc1point5() { // in honor of VAAL!
-        GenomeLoc loc = genomeLocParser.parseGenomeLoc("chr1:1");
-        assertEquals(0, loc.getContigIndex());
-        assertEquals(loc.getStop(), 1);
-        assertEquals(loc.getStart(), 1);
-    }
-
-    @Test
-    public void testCreateGenomeLoc2() {
-        GenomeLoc loc = genomeLocParser.createGenomeLoc("chr1", 1, 100);
-        assertEquals("chr1", loc.getContig());
-        assertEquals(loc.getStop(), 100);
-        assertEquals(loc.getStart(), 1);
-    }
-
-    @Test
-    public void testCreateGenomeLoc3() {
-        GenomeLoc loc = genomeLocParser.createGenomeLoc("chr1", 1);
-        assertEquals("chr1", loc.getContig());
-        assertEquals(loc.getStop(), 1);
-        assertEquals(loc.getStart(), 1);
-    }
-
-    @Test
-    public void testCreateGenomeLoc4() {
-        GenomeLoc loc = genomeLocParser.createGenomeLoc("chr1", 1);
-        assertEquals(0, loc.getContigIndex());
-        assertEquals(loc.getStop(), 1);
-        assertEquals(loc.getStart(), 1);
-    }
-
-    @Test
-    public void testCreateGenomeLoc5() {
-        GenomeLoc loc = genomeLocParser.createGenomeLoc("chr1", 1, 100);
-        GenomeLoc copy = genomeLocParser.createGenomeLoc(loc.getContig(),loc.getStart(),loc.getStop());
-        assertEquals(0, copy.getContigIndex());
-        assertEquals(copy.getStop(), 100);
-        assertEquals(copy.getStart(), 1);
-    }
-
-    @Test
-    public void testGenomeLocPlusSign() {
-        GenomeLoc loc = genomeLocParser.parseGenomeLoc("chr1:1+");
-        assertEquals(loc.getContigIndex(), 0);
-        assertEquals(loc.getStop(), 10); // the size
-        assertEquals(loc.getStart(), 1);
-    }
-
-    @Test
-    public void testGenomeLocParseOnlyChrome() {
-        GenomeLoc loc = genomeLocParser.parseGenomeLoc("chr1");
-        assertEquals(loc.getContigIndex(), 0);
-        assertEquals(loc.getStop(), 10); // the size
-        assertEquals(loc.getStart(), 1);
-    }
-
-    @Test(expectedExceptions=ReviewedGATKException.class)
-    public void testGenomeLocParseOnlyBadChrome() {
-        GenomeLoc loc = genomeLocParser.parseGenomeLoc("chr12");
-        assertEquals(loc.getContigIndex(), 0);
-        assertEquals(loc.getStop(), 10); // the size
-        assertEquals(loc.getStart(), 1);
-    }
-
-    @Test(expectedExceptions=ReviewedGATKException.class)
-    public void testGenomeLocBad() {
-        GenomeLoc loc = genomeLocParser.parseGenomeLoc("chr1:1-");
-        assertEquals(loc.getContigIndex(), 0);
-        assertEquals(loc.getStop(), 10); // the size
-        assertEquals(loc.getStart(), 1);
-    }
-
-    @Test(expectedExceptions=UserException.class)
-    public void testGenomeLocBad2() {
-        GenomeLoc loc = genomeLocParser.parseGenomeLoc("chr1:1-500-0");
-        assertEquals(loc.getContigIndex(), 0);
-        assertEquals(loc.getStop(), 10); // the size
-        assertEquals(loc.getStart(), 1);
-    }
-
-    @Test(expectedExceptions=UserException.class)
-    public void testGenomeLocBad3() {
-        GenomeLoc loc = genomeLocParser.parseGenomeLoc("chr1:1--0");
-        assertEquals(loc.getContigIndex(), 0);
-        assertEquals(loc.getStop(), 10); // the size
-        assertEquals(loc.getStart(), 1);
-    }
-
-    // test out the validating methods
-    @Test
-    public void testValidationOfGenomeLocs() {
-        assertTrue(genomeLocParser.isValidGenomeLoc("chr1",1,1));
-        assertTrue(!genomeLocParser.isValidGenomeLoc("chr2",1,1)); // shouldn't have an entry
-        assertTrue(!genomeLocParser.isValidGenomeLoc("chr1",1,11)); // past the end of the contig
-        assertTrue(!genomeLocParser.isValidGenomeLoc("chr1",-1,10)); // bad start
-        assertTrue(!genomeLocParser.isValidGenomeLoc("chr1",1,-2)); // bad stop
-        assertTrue( genomeLocParser.isValidGenomeLoc("chr1",-1,2, false)); // bad stop
-        assertTrue(!genomeLocParser.isValidGenomeLoc("chr1",10,11)); // bad start, past end
-        assertTrue( genomeLocParser.isValidGenomeLoc("chr1",10,11, false)); // bad start, past end
-        assertTrue(!genomeLocParser.isValidGenomeLoc("chr1",2,1)); // stop < start
-    }
-
-    @Test(expectedExceptions = ReviewedGATKException.class)
-    public void testValidateGenomeLoc() {
-        // bad contig index
-        genomeLocParser.validateGenomeLoc("chr1", 1, 1, 2, false);
-    }
-
-    private static class FlankingGenomeLocTestData extends TestDataProvider {
-        final GenomeLocParser parser;
-        final int basePairs;
-        final GenomeLoc original, flankStart, flankStop;
-
-        private FlankingGenomeLocTestData(String name, GenomeLocParser parser, int basePairs, String original, String flankStart, String flankStop) {
-            super(FlankingGenomeLocTestData.class, name);
-            this.parser = parser;
-            this.basePairs = basePairs;
-            this.original = parse(parser, original);
-            this.flankStart = flankStart == null ? null : parse(parser, flankStart);
-            this.flankStop = flankStop == null ? null : parse(parser, flankStop);
-        }
-
-        private static GenomeLoc parse(GenomeLocParser parser, String str) {
-            return "unmapped".equals(str) ? GenomeLoc.UNMAPPED : parser.parseGenomeLoc(str);
-        }
-    }
-
-    @DataProvider(name = "flankingGenomeLocs")
-    public Object[][] getFlankingGenomeLocs() {
-        int contigLength = 10000;
-        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, contigLength);
-        GenomeLocParser parser = new GenomeLocParser(header.getSequenceDictionary());
-
-        new FlankingGenomeLocTestData("atStartBase1", parser, 1,
-                "chr1:1", null, "chr1:2");
-
-        new FlankingGenomeLocTestData("atStartBase50", parser, 50,
-                "chr1:1", null, "chr1:2-51");
-
-        new FlankingGenomeLocTestData("atStartRange50", parser, 50,
-                "chr1:1-10", null, "chr1:11-60");
-
-        new FlankingGenomeLocTestData("atEndBase1", parser, 1,
-                "chr1:" + contigLength, "chr1:" + (contigLength - 1), null);
-
-        new FlankingGenomeLocTestData("atEndBase50", parser, 50,
-                "chr1:" + contigLength, String.format("chr1:%d-%d", contigLength - 50, contigLength - 1), null);
-
-        new FlankingGenomeLocTestData("atEndRange50", parser, 50,
-                String.format("chr1:%d-%d", contigLength - 10, contigLength),
-                String.format("chr1:%d-%d", contigLength - 60, contigLength - 11),
-                null);
-
-        new FlankingGenomeLocTestData("nearStartBase1", parser, 1,
-                "chr1:2", "chr1:1", "chr1:3");
-
-        new FlankingGenomeLocTestData("nearStartRange50", parser, 50,
-                "chr1:21-30", "chr1:1-20", "chr1:31-80");
-
-        new FlankingGenomeLocTestData("nearEndBase1", parser, 1,
-                "chr1:" + (contigLength - 1), "chr1:" + (contigLength - 2), "chr1:" + contigLength);
-
-        new FlankingGenomeLocTestData("nearEndRange50", parser, 50,
-                String.format("chr1:%d-%d", contigLength - 30, contigLength - 21),
-                String.format("chr1:%d-%d", contigLength - 80, contigLength - 31),
-                String.format("chr1:%d-%d", contigLength - 20, contigLength));
-
-        new FlankingGenomeLocTestData("beyondStartBase1", parser, 1,
-                "chr1:3", "chr1:2", "chr1:4");
-
-        new FlankingGenomeLocTestData("beyondStartRange50", parser, 50,
-                "chr1:101-200", "chr1:51-100", "chr1:201-250");
-
-        new FlankingGenomeLocTestData("beyondEndBase1", parser, 1,
-                "chr1:" + (contigLength - 3),
-                "chr1:" + (contigLength - 4),
-                "chr1:" + (contigLength - 2));
-
-        new FlankingGenomeLocTestData("beyondEndRange50", parser, 50,
-                String.format("chr1:%d-%d", contigLength - 200, contigLength - 101),
-                String.format("chr1:%d-%d", contigLength - 250, contigLength - 201),
-                String.format("chr1:%d-%d", contigLength - 100, contigLength - 51));
-
-        new FlankingGenomeLocTestData("unmapped", parser, 50,
-                "unmapped", null, null);
-
-        new FlankingGenomeLocTestData("fullContig", parser, 50,
-                "chr1", null, null);
-
-        return FlankingGenomeLocTestData.getTests(FlankingGenomeLocTestData.class);
-    }
-
-    @Test(dataProvider = "flankingGenomeLocs")
-    public void testCreateGenomeLocAtStart(FlankingGenomeLocTestData data) {
-        GenomeLoc actual = data.parser.createGenomeLocAtStart(data.original, data.basePairs);
-        String description = String.format("%n      name: %s%n  original: %s%n    actual: %s%n  expected: %s%n",
-                data.toString(), data.original, actual, data.flankStart);
-        assertEquals(actual, data.flankStart, description);
-    }
-
-    @Test(dataProvider = "flankingGenomeLocs")
-    public void testCreateGenomeLocAtStop(FlankingGenomeLocTestData data) {
-        GenomeLoc actual = data.parser.createGenomeLocAtStop(data.original, data.basePairs);
-        String description = String.format("%n      name: %s%n  original: %s%n    actual: %s%n  expected: %s%n",
-                data.toString(), data.original, actual, data.flankStop);
-        assertEquals(actual, data.flankStop, description);
-    }
-
-    @DataProvider(name = "parseGenomeLoc")
-    public Object[][] makeParsingTest() {
-        final List<Object[]> tests = new LinkedList<Object[]>();
-
-        tests.add(new Object[]{ "chr1:10", "chr1", 10 });
-        tests.add(new Object[]{ "chr1:100", "chr1", 100 });
-        tests.add(new Object[]{ "chr1:1000", "chr1", 1000 });
-        tests.add(new Object[]{ "chr1:1,000", "chr1", 1000 });
-        tests.add(new Object[]{ "chr1:10000", "chr1", 10000 });
-        tests.add(new Object[]{ "chr1:10,000", "chr1", 10000 });
-        tests.add(new Object[]{ "chr1:100000", "chr1", 100000 });
-        tests.add(new Object[]{ "chr1:100,000", "chr1", 100000 });
-        tests.add(new Object[]{ "chr1:1000000", "chr1", 1000000 });
-        tests.add(new Object[]{ "chr1:1,000,000", "chr1", 1000000 });
-        tests.add(new Object[]{ "chr1:1000,000", "chr1", 1000000 });
-        tests.add(new Object[]{ "chr1:1,000000", "chr1", 1000000 });
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test( dataProvider = "parseGenomeLoc")
-    public void testParsingPositions(final String string, final String contig, final int start) {
-        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 10000000);
-        GenomeLocParser genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
-        final GenomeLoc loc = genomeLocParser.parseGenomeLoc(string);
-        Assert.assertEquals(loc.getContig(), contig);
-        Assert.assertEquals(loc.getStart(), start);
-        Assert.assertEquals(loc.getStop(), start);
-    }
-
-    @Test( )
-    public void testCreationFromSAMRecord() {
-        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "foo", 0, 1, 5);
-        final GenomeLoc loc = genomeLocParser.createGenomeLoc(read);
-        Assert.assertEquals(loc.getContig(), read.getReferenceName());
-        Assert.assertEquals(loc.getContigIndex(), (int)read.getReferenceIndex());
-        Assert.assertEquals(loc.getStart(), read.getAlignmentStart());
-        Assert.assertEquals(loc.getStop(), read.getAlignmentEnd());
-    }
-
-    @Test( )
-    public void testCreationFromSAMRecordUnmapped() {
-        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "foo", 0, 1, 5);
-        read.setReadUnmappedFlag(true);
-        read.setReferenceIndex(-1);
-        final GenomeLoc loc = genomeLocParser.createGenomeLoc(read);
-        Assert.assertTrue(loc.isUnmapped());
-    }
-
-    @Test( )
-    public void testCreationFromSAMRecordUnmappedButOnGenome() {
-        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "foo", 0, 1, 5);
-        read.setReadUnmappedFlag(true);
-        read.setCigarString("*");
-        final GenomeLoc loc = genomeLocParser.createGenomeLoc(read);
-        Assert.assertEquals(loc.getContig(), read.getReferenceName());
-        Assert.assertEquals(loc.getContigIndex(), (int)read.getReferenceIndex());
-        Assert.assertEquals(loc.getStart(), read.getAlignmentStart());
-        Assert.assertEquals(loc.getStop(), read.getAlignmentStart());
-    }
-
-    @Test
-    public void testCreationFromFeature() {
-        final Feature feature = new BasicFeature("chr1", 1, 5);
-        final GenomeLoc loc = genomeLocParser.createGenomeLoc(feature);
-        Assert.assertEquals(loc.getContig(), feature.getChr());
-        Assert.assertEquals(loc.getStart(), feature.getStart());
-        Assert.assertEquals(loc.getStop(), feature.getEnd());
-    }
-
-    @Test
-    public void testCreationFromVariantContext() {
-        final VariantContext feature = new VariantContextBuilder("x", "chr1", 1, 5, Arrays.asList(Allele.create("AAAAA", true))).make();
-        final GenomeLoc loc = genomeLocParser.createGenomeLoc(feature);
-        Assert.assertEquals(loc.getContig(), feature.getChr());
-        Assert.assertEquals(loc.getStart(), feature.getStart());
-        Assert.assertEquals(loc.getStop(), feature.getEnd());
-    }
-
-    @Test
-    public void testcreateGenomeLocOnContig() throws FileNotFoundException {
-        final CachingIndexedFastaSequenceFile seq = new CachingIndexedFastaSequenceFile(new File(b37KGReference));
-        final SAMSequenceDictionary dict = seq.getSequenceDictionary();
-        final GenomeLocParser genomeLocParser = new GenomeLocParser(dict);
-
-        for ( final SAMSequenceRecord rec : dict.getSequences() ) {
-            final GenomeLoc loc = genomeLocParser.createOverEntireContig(rec.getSequenceName());
-            Assert.assertEquals(loc.getContig(), rec.getSequenceName());
-            Assert.assertEquals(loc.getStart(), 1);
-            Assert.assertEquals(loc.getStop(), rec.getSequenceLength());
-        }
-    }
-
-    @DataProvider(name = "GenomeLocOnContig")
-    public Object[][] makeGenomeLocOnContig() {
-        final List<Object[]> tests = new LinkedList<Object[]>();
-
-        final int contigLength = header.getSequence(0).getSequenceLength();
-        for ( int start = -10; start < contigLength + 10; start++ ) {
-            for ( final int len : Arrays.asList(1, 10, 20) ) {
-                tests.add(new Object[]{ "chr1", start, start + len });
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test( dataProvider = "GenomeLocOnContig")
-    public void testGenomeLocOnContig(final String contig, final int start, final int stop) {
-        final int contigLength = header.getSequence(0).getSequenceLength();
-        final GenomeLoc loc = genomeLocParser.createGenomeLocOnContig(contig, start, stop);
-
-        if ( stop < 1 || start > contigLength )
-            Assert.assertNull(loc, "GenomeLoc should be null if the start/stops are not meaningful");
-        else {
-            Assert.assertNotNull(loc);
-            Assert.assertEquals(loc.getContig(), contig);
-            Assert.assertEquals(loc.getStart(), Math.max(start, 1));
-            Assert.assertEquals(loc.getStop(), Math.min(stop, contigLength));
-        }
-    }
-
-    @DataProvider(name = "GenomeLocPadding")
-    public Object[][] makeGenomeLocPadding() {
-        final List<Object[]> tests = new LinkedList<Object[]>();
-
-        final int contigLength = header.getSequence(0).getSequenceLength();
-        for ( int pad = 0; pad < contigLength + 1; pad++) {
-            for ( int start = 1; start < contigLength; start++ ) {
-                for ( int stop = start; stop < contigLength; stop++ ) {
-                    tests.add(new Object[]{ genomeLocParser.createGenomeLoc("chr1", start, stop), pad});
-                }
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test( dataProvider = "GenomeLocPadding")
-    public void testGenomeLocPadding(final GenomeLoc input, final int pad) {
-        final int contigLength = header.getSequence(0).getSequenceLength();
-        final GenomeLoc padded = genomeLocParser.createPaddedGenomeLoc(input, pad);
-
-        Assert.assertNotNull(padded);
-        Assert.assertEquals(padded.getContig(), input.getContig());
-        Assert.assertEquals(padded.getStart(), Math.max(input.getStart() - pad, 1));
-        Assert.assertEquals(padded.getStop(), Math.min(input.getStop() + pad, contigLength));
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/GenomeLocSortedSetUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/GenomeLocSortedSetUnitTest.java
deleted file mode 100644
index 6553120..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/GenomeLocSortedSetUnitTest.java
+++ /dev/null
@@ -1,405 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import htsjdk.samtools.SAMFileHeader;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertFalse;
-import static org.testng.Assert.assertTrue;
-
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.util.*;
-
-/**
- *
- * User: aaron
- * Date: May 22, 2009
- * Time: 2:14:07 PM
- *
- * The Broad Institute
- * SOFTWARE COPYRIGHT NOTICE AGREEMENT 
- * This software and its documentation are copyright 2009 by the
- * Broad Institute/Massachusetts Institute of Technology. All rights are reserved.
- *
- * This software is supplied without any warranty or guaranteed support whatsoever. Neither
- * the Broad Institute nor MIT can be responsible for its use, misuse, or functionality.
- *
- */
-
-
-/**
- * @author aaron
- * @version 1.0
- * <p/>
- * Class GenomeLocSetTest
- * <p/>
- * This tests the functions of the GenomeLocSet
- */
-public class GenomeLocSortedSetUnitTest extends BaseTest {
-
-    private GenomeLocSortedSet mSortedSet = null;
-    private SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(NUMBER_OF_CHROMOSOMES, STARTING_CHROMOSOME, CHROMOSOME_SIZE);
-    private static final int NUMBER_OF_CHROMOSOMES = 5;
-    private static final int STARTING_CHROMOSOME = 1;
-    private static final int CHROMOSOME_SIZE = 1000;
-
-    private GenomeLocParser genomeLocParser;
-    private String contigOneName;
-
-    @BeforeClass
-    public void setup() {
-        genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
-        contigOneName = header.getSequenceDictionary().getSequence(1).getSequenceName();
-    }
-
-    @BeforeMethod
-    public void initializeSortedSet() {
-        mSortedSet = new GenomeLocSortedSet(genomeLocParser);        
-    }
-
-    @Test
-    public void testAdd() {
-        GenomeLoc g = genomeLocParser.createGenomeLoc(contigOneName, 0, 0);
-        assertTrue(mSortedSet.size() == 0);
-        mSortedSet.add(g);
-        assertTrue(mSortedSet.size() == 1);
-    }
-
-    @Test
-    public void testRemove() {
-        assertTrue(mSortedSet.size() == 0);
-        GenomeLoc g = genomeLocParser.createGenomeLoc(contigOneName, 0, 0);
-        mSortedSet.add(g);
-        assertTrue(mSortedSet.size() == 1);
-        mSortedSet.remove(g);
-        assertTrue(mSortedSet.size() == 0);
-    }
-
-    @Test
-    public void addRegion() {
-        assertTrue(mSortedSet.size() == 0);
-        GenomeLoc g = genomeLocParser.createGenomeLoc(contigOneName, 1, 50);
-        mSortedSet.add(g);
-        GenomeLoc f = genomeLocParser.createGenomeLoc(contigOneName, 30, 80);
-        mSortedSet.addRegion(f);
-        assertTrue(mSortedSet.size() == 1);
-    }
-
-    @Test
-    public void addRegionsOutOfOrder() {
-        final String contigTwoName = header.getSequenceDictionary().getSequence(2).getSequenceName();
-        assertTrue(mSortedSet.size() == 0);
-        GenomeLoc g = genomeLocParser.createGenomeLoc(contigTwoName, 1, 50);
-        mSortedSet.add(g);
-        GenomeLoc f = genomeLocParser.createGenomeLoc(contigOneName, 30, 80);
-        mSortedSet.addRegion(f);
-        assertTrue(mSortedSet.size() == 2);
-        assertTrue(mSortedSet.toList().get(0).getContig().equals(contigOneName));
-        assertTrue(mSortedSet.toList().get(1).getContig().equals(contigTwoName));
-    }
-
-    @Test(expectedExceptions = IllegalArgumentException.class)
-    public void addThrowsException() {
-        assertTrue(mSortedSet.size() == 0);
-        GenomeLoc g = genomeLocParser.createGenomeLoc(contigOneName, 1, 50);
-        mSortedSet.add(g);
-        GenomeLoc f = genomeLocParser.createGenomeLoc(contigOneName, 30, 80);
-        mSortedSet.add(f);
-    }
-
-    @Test(expectedExceptions=IllegalArgumentException.class)
-    public void testAddDuplicate() {
-        assertTrue(mSortedSet.size() == 0);
-        GenomeLoc g = genomeLocParser.createGenomeLoc(contigOneName, 0, 0);
-        mSortedSet.add(g);
-        assertTrue(mSortedSet.size() == 1);
-        mSortedSet.add(g);
-    }
-
-    @Test
-    public void mergingOverlappingBelow() {
-        GenomeLoc g = genomeLocParser.createGenomeLoc(contigOneName, 0, 50);
-        GenomeLoc e = genomeLocParser.createGenomeLoc(contigOneName, 49, 100);
-        assertTrue(mSortedSet.size() == 0);
-        mSortedSet.add(g);
-        assertTrue(mSortedSet.size() == 1);
-        mSortedSet.addRegion(e);
-        assertTrue(mSortedSet.size() == 1);
-        Iterator<GenomeLoc> iter = mSortedSet.iterator();
-        GenomeLoc loc = iter.next();
-        assertEquals(loc.getStart(), 0);
-        assertEquals(loc.getStop(), 100);
-        assertEquals(loc.getContigIndex(), 1);
-    }
-
-    @Test
-    public void overlap() {
-        for ( int i = 1; i < 6; i++ ) {
-            final int start = i * 10;
-            mSortedSet.add(genomeLocParser.createGenomeLoc(contigOneName, start, start + 1));
-        }
-
-        // test matches in and around interval
-        assertFalse(mSortedSet.overlaps(genomeLocParser.createGenomeLoc(contigOneName, 9, 9)));
-        assertTrue(mSortedSet.overlaps(genomeLocParser.createGenomeLoc(contigOneName, 10, 10)));
-        assertTrue(mSortedSet.overlaps(genomeLocParser.createGenomeLoc(contigOneName, 11, 11)));
-        assertFalse(mSortedSet.overlaps(genomeLocParser.createGenomeLoc(contigOneName, 12, 12)));
-
-        // test matches spanning intervals
-        assertTrue(mSortedSet.overlaps(genomeLocParser.createGenomeLoc(contigOneName, 14, 20)));
-        assertTrue(mSortedSet.overlaps(genomeLocParser.createGenomeLoc(contigOneName, 11, 15)));
-        assertTrue(mSortedSet.overlaps(genomeLocParser.createGenomeLoc(contigOneName, 30, 40)));
-        assertTrue(mSortedSet.overlaps(genomeLocParser.createGenomeLoc(contigOneName, 51, 53)));
-
-        // test miss
-        assertFalse(mSortedSet.overlaps(genomeLocParser.createGenomeLoc(contigOneName, 12, 19)));
-
-        // test exact match after miss
-        assertTrue(mSortedSet.overlaps(genomeLocParser.createGenomeLoc(contigOneName, 40, 41)));
-
-        // test matches at beginning of intervals
-        assertFalse(mSortedSet.overlaps(genomeLocParser.createGenomeLoc(contigOneName, 5, 6)));
-        assertTrue(mSortedSet.overlaps(genomeLocParser.createGenomeLoc(contigOneName, 0, 10)));
-
-        // test matches at end of intervals
-        assertFalse(mSortedSet.overlaps(genomeLocParser.createGenomeLoc(contigOneName, 52, 53)));
-        assertTrue(mSortedSet.overlaps(genomeLocParser.createGenomeLoc(contigOneName, 51, 53)));
-        assertFalse(mSortedSet.overlaps(genomeLocParser.createGenomeLoc(contigOneName, 52, 53)));
-    }
-
-    @Test
-    public void mergingOverlappingAbove() {
-        GenomeLoc e = genomeLocParser.createGenomeLoc(contigOneName, 0, 50);
-        GenomeLoc g = genomeLocParser.createGenomeLoc(contigOneName, 49, 100);
-        assertTrue(mSortedSet.size() == 0);
-        mSortedSet.add(g);
-        assertTrue(mSortedSet.size() == 1);
-        mSortedSet.addRegion(e);
-        assertTrue(mSortedSet.size() == 1);
-        Iterator<GenomeLoc> iter = mSortedSet.iterator();
-        GenomeLoc loc = iter.next();
-        assertEquals(loc.getStart(), 0);
-        assertEquals(loc.getStop(), 100);
-        assertEquals(loc.getContigIndex(), 1);
-    }
-
-    @Test
-    public void deleteAllByRegion() {
-        GenomeLoc e = genomeLocParser.createGenomeLoc(contigOneName, 1, 100);
-        mSortedSet.add(e);
-        for (int x = 1; x < 101; x++) {
-            GenomeLoc del = genomeLocParser.createGenomeLoc(contigOneName,x,x);
-            mSortedSet = mSortedSet.subtractRegions(new GenomeLocSortedSet(genomeLocParser,del));
-        }
-        assertTrue(mSortedSet.isEmpty());
-    }
-
-    @Test
-    public void deleteSomeByRegion() {
-        GenomeLoc e = genomeLocParser.createGenomeLoc(contigOneName, 1, 100);
-        mSortedSet.add(e);
-        for (int x = 1; x < 50; x++) {
-            GenomeLoc del = genomeLocParser.createGenomeLoc(contigOneName,x,x);
-            mSortedSet = mSortedSet.subtractRegions(new GenomeLocSortedSet(genomeLocParser,del));
-        }
-        assertTrue(!mSortedSet.isEmpty());
-        assertTrue(mSortedSet.size() == 1);
-        GenomeLoc loc = mSortedSet.iterator().next();        
-        assertTrue(loc.getStop() == 100);
-        assertTrue(loc.getStart() == 50);
-
-    }
-
-    @Test
-    public void deleteSuperRegion() {
-        GenomeLoc e = genomeLocParser.createGenomeLoc(contigOneName, 10, 20);
-        GenomeLoc g = genomeLocParser.createGenomeLoc(contigOneName, 70, 100);
-        mSortedSet.add(g);
-        mSortedSet.addRegion(e);
-        assertTrue(mSortedSet.size() == 2);
-        // now delete a region
-        GenomeLoc d = genomeLocParser.createGenomeLoc(contigOneName, 15, 75);
-        mSortedSet = mSortedSet.subtractRegions(new GenomeLocSortedSet(genomeLocParser,d));
-        Iterator<GenomeLoc> iter = mSortedSet.iterator();
-        GenomeLoc loc = iter.next();
-        assertTrue(loc.getStart() == 10);
-        assertTrue(loc.getStop() == 14);
-        assertTrue(loc.getContigIndex() == 1);
-
-        loc = iter.next();
-        assertTrue(loc.getStart() == 76);
-        assertTrue(loc.getStop() == 100);
-        assertTrue(loc.getContigIndex() == 1);
-    }
-
-    @Test
-    public void substractComplexExample() {
-        GenomeLoc e = genomeLocParser.createGenomeLoc(contigOneName, 1, 20);
-        mSortedSet.add(e);
-
-        GenomeLoc r1 = genomeLocParser.createGenomeLoc(contigOneName, 3, 5);
-        GenomeLoc r2 = genomeLocParser.createGenomeLoc(contigOneName, 10, 12);
-        GenomeLoc r3 = genomeLocParser.createGenomeLoc(contigOneName, 16, 18);
-        GenomeLocSortedSet toExclude = new GenomeLocSortedSet(genomeLocParser,Arrays.asList(r1, r2, r3));
-
-        GenomeLocSortedSet remaining = mSortedSet.subtractRegions(toExclude);
-//        logger.debug("Initial   " + mSortedSet);
-//        logger.debug("Exclude   " + toExclude);
-//        logger.debug("Remaining " + remaining);
-
-        assertEquals(mSortedSet.coveredSize(), 20);
-        assertEquals(toExclude.coveredSize(), 9);
-        assertEquals(remaining.coveredSize(), 11);
-
-        Iterator<GenomeLoc> it = remaining.iterator();
-        GenomeLoc p1 = it.next();
-        GenomeLoc p2 = it.next();
-        GenomeLoc p3 = it.next();
-        GenomeLoc p4 = it.next();
-
-        assertEquals(genomeLocParser.createGenomeLoc(contigOneName, 1, 2), p1);
-        assertEquals(genomeLocParser.createGenomeLoc(contigOneName, 6, 9), p2);
-        assertEquals(genomeLocParser.createGenomeLoc(contigOneName, 13, 15), p3);
-        assertEquals(genomeLocParser.createGenomeLoc(contigOneName, 19, 20), p4);
-    }
-
-    private void testSizeBeforeLocX(int pos, int size) {
-        GenomeLoc test = genomeLocParser.createGenomeLoc(contigOneName, pos, pos);
-        assertEquals(mSortedSet.sizeBeforeLoc(test), size, String.format("X pos=%d size=%d", pos, size));
-    }
-
-    @Test
-    public void testSizeBeforeLoc() {
-        GenomeLoc r1 = genomeLocParser.createGenomeLoc(contigOneName, 3, 5);
-        GenomeLoc r2 = genomeLocParser.createGenomeLoc(contigOneName, 10, 12);
-        GenomeLoc r3 = genomeLocParser.createGenomeLoc(contigOneName, 16, 18);
-        mSortedSet.addAll(Arrays.asList(r1,r2,r3));
-
-        testSizeBeforeLocX(2, 0);
-        testSizeBeforeLocX(3, 0);
-        testSizeBeforeLocX(4, 1);
-        testSizeBeforeLocX(5, 2);
-        testSizeBeforeLocX(6, 3);
-
-        testSizeBeforeLocX(10, 3);
-        testSizeBeforeLocX(11, 4);
-        testSizeBeforeLocX(12, 5);
-        testSizeBeforeLocX(13, 6);
-        testSizeBeforeLocX(15, 6);
-
-        testSizeBeforeLocX(16, 6);
-        testSizeBeforeLocX(17, 7);
-        testSizeBeforeLocX(18, 8);
-        testSizeBeforeLocX(19, 9);
-        testSizeBeforeLocX(50, 9);
-        testSizeBeforeLocX(50, (int)mSortedSet.coveredSize());
-    }
-
-
-    @Test
-    public void fromSequenceDictionary() {
-        mSortedSet = GenomeLocSortedSet.createSetFromSequenceDictionary(this.header.getSequenceDictionary());
-        // we should have sequence
-        assertTrue(mSortedSet.size() == GenomeLocSortedSetUnitTest.NUMBER_OF_CHROMOSOMES);
-        int seqNumber = 0;
-        for (GenomeLoc loc : mSortedSet) {
-            assertTrue(loc.getStart() == 1);
-            assertTrue(loc.getStop() == GenomeLocSortedSetUnitTest.CHROMOSOME_SIZE);
-            assertTrue(loc.getContigIndex() == seqNumber);
-            ++seqNumber;
-        }
-        assertTrue(seqNumber == GenomeLocSortedSetUnitTest.NUMBER_OF_CHROMOSOMES);
-    }
-
-    // -----------------------------------------------------------------------------------------------
-    //
-    // Test getOverlapping
-    //
-    // -----------------------------------------------------------------------------------------------
-
-    @DataProvider(name = "GetOverlapping")
-    public Object[][] makeGetOverlappingTest() throws Exception {
-        final GenomeLocParser genomeLocParser = new GenomeLocParser(new CachingIndexedFastaSequenceFile(new File(b37KGReference)));
-
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        final GenomeLoc prev1 = genomeLocParser.createGenomeLoc("19", 1, 10);
-        final GenomeLoc prev2 = genomeLocParser.createGenomeLoc("19", 20, 50);
-        final GenomeLoc post1 = genomeLocParser.createGenomeLoc("21", 1, 10);
-        final GenomeLoc post2 = genomeLocParser.createGenomeLoc("21", 20, 50);
-
-        final int chr20Length = genomeLocParser.getContigs().getSequence("20").getSequenceLength();
-        for ( final int regionStart : Arrays.asList(1, 10, chr20Length - 10, chr20Length) ) {
-            for ( final int regionSize : Arrays.asList(1, 10, 100) ) {
-                final GenomeLoc region = genomeLocParser.createGenomeLocOnContig("20", regionStart, regionStart + regionSize);
-                final GenomeLoc spanning = genomeLocParser.createGenomeLocOnContig("20", regionStart - 10, region.getStop() + 10);
-                final GenomeLoc before_into = genomeLocParser.createGenomeLocOnContig("20", regionStart - 10, regionStart + 1);
-                final GenomeLoc middle = genomeLocParser.createGenomeLocOnContig("20", regionStart + 1, regionStart + 2);
-                final GenomeLoc middle_past = genomeLocParser.createGenomeLocOnContig("20", region.getStop()-1, region.getStop()+10);
-
-                final List<GenomeLoc> potentials = new LinkedList<GenomeLoc>();
-                potentials.add(region);
-                if ( spanning != null ) potentials.add(spanning);
-                if ( before_into != null ) potentials.add(before_into);
-                if ( middle != null ) potentials.add(middle);
-                if ( middle_past != null ) potentials.add(middle_past);
-
-                for ( final int n : Arrays.asList(1, 2, 3) ) {
-                    for ( final List<GenomeLoc> regions : Utils.makePermutations(potentials, n, false) ) {
-                        tests.add(new Object[]{new GenomeLocSortedSet(genomeLocParser, regions), region});
-                        tests.add(new Object[]{new GenomeLocSortedSet(genomeLocParser, Utils.append(regions, prev1)), region});
-                        tests.add(new Object[]{new GenomeLocSortedSet(genomeLocParser, Utils.append(regions, prev1, prev2)), region});
-                        tests.add(new Object[]{new GenomeLocSortedSet(genomeLocParser, Utils.append(regions, post1)), region});
-                        tests.add(new Object[]{new GenomeLocSortedSet(genomeLocParser, Utils.append(regions, post1, post2)), region});
-                        tests.add(new Object[]{new GenomeLocSortedSet(genomeLocParser, Utils.append(regions, prev1, post1)), region});
-                        tests.add(new Object[]{new GenomeLocSortedSet(genomeLocParser, Utils.append(regions, prev1, prev2, post1, post2)), region});
-                    }
-                }
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "GetOverlapping")
-    public void testGetOverlapping(final GenomeLocSortedSet intervals, final GenomeLoc region) {
-        final List<GenomeLoc> expectedOverlapping = intervals.getOverlappingFullSearch(region);
-        final List<GenomeLoc> actualOverlapping = intervals.getOverlapping(region);
-        Assert.assertEquals(actualOverlapping, expectedOverlapping);
-        Assert.assertEquals(intervals.overlaps(region), ! expectedOverlapping.isEmpty(), "GenomeLocSortedSet.overlaps didn't return expected result");
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/GenomeLocUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/GenomeLocUnitTest.java
deleted file mode 100644
index ae86ca5..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/GenomeLocUnitTest.java
+++ /dev/null
@@ -1,386 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-
-// the imports for unit testing.
-
-
-import htsjdk.samtools.reference.ReferenceSequenceFile;
-import htsjdk.samtools.SAMFileHeader;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
-import org.broadinstitute.gatk.utils.interval.IntervalMergingRule;
-import org.broadinstitute.gatk.utils.interval.IntervalUtils;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.util.*;
-
-/**
- * Basic unit test for GenomeLoc
- */
-public class GenomeLocUnitTest extends BaseTest {
-    private static ReferenceSequenceFile seq;
-    private GenomeLocParser genomeLocParser;
-
-    @BeforeClass
-    public void init() throws FileNotFoundException {
-        // sequence
-        seq = new CachingIndexedFastaSequenceFile(new File(hg18Reference));
-        genomeLocParser = new GenomeLocParser(seq);
-    }
-
-    /**
-     * Tests that we got a string parameter in correctly
-     */
-    @Test
-    public void testIsBetween() {
-        logger.warn("Executing testIsBetween");
-
-        GenomeLoc locMiddle = genomeLocParser.createGenomeLoc("chr1", 3, 3);
-
-        GenomeLoc locLeft = genomeLocParser.createGenomeLoc("chr1", 1, 1);
-        GenomeLoc locRight = genomeLocParser.createGenomeLoc("chr1", 5, 5);
-
-        Assert.assertTrue(locMiddle.isBetween(locLeft, locRight));
-        Assert.assertFalse(locLeft.isBetween(locMiddle, locRight));
-        Assert.assertFalse(locRight.isBetween(locLeft, locMiddle));
-
-    }
-    @Test
-    public void testContigIndex() {
-        logger.warn("Executing testContigIndex");
-        GenomeLoc locOne = genomeLocParser.createGenomeLoc("chr1",1,1);
-        Assert.assertEquals(1, locOne.getContigIndex());
-        Assert.assertEquals("chr1", locOne.getContig());
-
-        GenomeLoc locX = genomeLocParser.createGenomeLoc("chrX",1,1);
-        Assert.assertEquals(23, locX.getContigIndex());
-        Assert.assertEquals("chrX", locX.getContig());
-
-        GenomeLoc locNumber = genomeLocParser.createGenomeLoc(seq.getSequenceDictionary().getSequence(1).getSequenceName(),1,1);
-        Assert.assertEquals(1, locNumber.getContigIndex());
-        Assert.assertEquals("chr1", locNumber.getContig());
-        Assert.assertEquals(0, locOne.compareTo(locNumber));
-
-    }
-
-    @Test
-    public void testCompareTo() {
-        logger.warn("Executing testCompareTo");
-        GenomeLoc twoOne = genomeLocParser.createGenomeLoc("chr2", 1);
-        GenomeLoc twoFive = genomeLocParser.createGenomeLoc("chr2", 5);
-        GenomeLoc twoOtherFive = genomeLocParser.createGenomeLoc("chr2", 5);
-        Assert.assertEquals(twoFive.compareTo(twoOtherFive), 0);
-
-        Assert.assertEquals(twoOne.compareTo(twoFive), -1);
-        Assert.assertEquals(twoFive.compareTo(twoOne), 1);
-
-        GenomeLoc oneOne = genomeLocParser.createGenomeLoc("chr1", 5);
-        Assert.assertEquals(oneOne.compareTo(twoOne), -1);
-        Assert.assertEquals(twoOne.compareTo(oneOne), 1);
-    }
-
-
-    @Test
-    public void testUnmappedSort() {
-        GenomeLoc chr1 = genomeLocParser.createGenomeLoc("chr1",1,10000000);
-        GenomeLoc chr2 = genomeLocParser.createGenomeLoc("chr2",1,10000000);
-        GenomeLoc unmapped = GenomeLoc.UNMAPPED;
-
-        List<GenomeLoc> unmappedOnly = Arrays.asList(unmapped);
-        Collections.sort(unmappedOnly);
-        Assert.assertEquals(unmappedOnly.size(),1,"Wrong number of elements in unmapped-only list.");
-        Assert.assertEquals(unmappedOnly.get(0),unmapped,"List sorted in wrong order");
-
-        List<GenomeLoc> chr1Presorted = Arrays.asList(chr1,unmapped);
-        Collections.sort(chr1Presorted);
-        Assert.assertEquals(chr1Presorted.size(),2,"Wrong number of elements in chr1,unmapped list.");
-        Assert.assertEquals(chr1Presorted,Arrays.asList(chr1,unmapped),"List sorted in wrong order");
-
-        List<GenomeLoc> chr1Inverted = Arrays.asList(unmapped,chr1);
-        Collections.sort(chr1Inverted);
-        Assert.assertEquals(chr1Inverted.size(),2,"Wrong number of elements in chr1,unmapped list.");
-        Assert.assertEquals(chr1Inverted,Arrays.asList(chr1,unmapped),"List sorted in wrong order");
-
-        List<GenomeLoc> chr1and2Presorted = Arrays.asList(chr1,chr2,unmapped);
-        Collections.sort(chr1and2Presorted);
-        Assert.assertEquals(chr1and2Presorted.size(),3,"Wrong number of elements in chr1,chr2,unmapped list.");
-        Assert.assertEquals(chr1and2Presorted,Arrays.asList(chr1,chr2,unmapped),"List sorted in wrong order");
-
-        List<GenomeLoc> chr1and2UnmappedInFront = Arrays.asList(unmapped,chr1,chr2);
-        Collections.sort(chr1and2UnmappedInFront);
-        Assert.assertEquals(chr1and2UnmappedInFront.size(),3,"Wrong number of elements in unmapped,chr1,chr2 list.");
-        Assert.assertEquals(chr1and2UnmappedInFront,Arrays.asList(chr1,chr2,unmapped),"List sorted in wrong order");
-
-        List<GenomeLoc> chr1and2UnmappedSandwiched = Arrays.asList(chr1,unmapped,chr2);
-        Collections.sort(chr1and2UnmappedSandwiched);
-        Assert.assertEquals(chr1and2UnmappedSandwiched.size(),3,"Wrong number of elements in chr1,unmapped,chr2 list.");
-        Assert.assertEquals(chr1and2UnmappedSandwiched,Arrays.asList(chr1,chr2,unmapped),"List sorted in wrong order");
-    }
-
-    @Test
-    public void testUnmappedMerge() {
-        GenomeLoc chr1 = genomeLocParser.createGenomeLoc("chr1",1,10000000);
-        GenomeLoc unmapped = GenomeLoc.UNMAPPED;
-
-        List<GenomeLoc> oneUnmappedOnly = Arrays.asList(unmapped);
-        oneUnmappedOnly = IntervalUtils.sortAndMergeIntervals(genomeLocParser,oneUnmappedOnly, IntervalMergingRule.OVERLAPPING_ONLY).toList();
-        Assert.assertEquals(oneUnmappedOnly.size(),1,"Wrong number of elements in list.");
-        Assert.assertEquals(oneUnmappedOnly.get(0),unmapped,"List sorted in wrong order");
-
-        List<GenomeLoc> twoUnmapped = Arrays.asList(unmapped,unmapped);
-        twoUnmapped = IntervalUtils.sortAndMergeIntervals(genomeLocParser,twoUnmapped,IntervalMergingRule.OVERLAPPING_ONLY).toList();
-        Assert.assertEquals(twoUnmapped.size(),1,"Wrong number of elements in list.");
-        Assert.assertEquals(twoUnmapped.get(0),unmapped,"List sorted in wrong order");
-
-        List<GenomeLoc> twoUnmappedAtEnd = Arrays.asList(chr1,unmapped,unmapped);
-        twoUnmappedAtEnd = IntervalUtils.sortAndMergeIntervals(genomeLocParser,twoUnmappedAtEnd,IntervalMergingRule.OVERLAPPING_ONLY).toList();
-        Assert.assertEquals(twoUnmappedAtEnd.size(),2,"Wrong number of elements in list.");
-        Assert.assertEquals(twoUnmappedAtEnd,Arrays.asList(chr1,unmapped),"List sorted in wrong order");
-
-        List<GenomeLoc> twoUnmappedMixed = Arrays.asList(unmapped,chr1,unmapped);
-        twoUnmappedMixed = IntervalUtils.sortAndMergeIntervals(genomeLocParser,twoUnmappedMixed,IntervalMergingRule.OVERLAPPING_ONLY).toList();
-        Assert.assertEquals(twoUnmappedMixed.size(),2,"Wrong number of elements in list.");
-        Assert.assertEquals(twoUnmappedMixed,Arrays.asList(chr1,unmapped),"List sorted in wrong order");
-    }
-
-    // -------------------------------------------------------------------------------------
-    //
-    // testing overlap detection
-    //
-    // -------------------------------------------------------------------------------------
-
-    private class ReciprocalOverlapProvider extends TestDataProvider {
-        GenomeLoc gl1, gl2;
-        int overlapSize;
-        double overlapFraction;
-
-        private ReciprocalOverlapProvider(int start1, int stop1, int start2, int stop2) {
-            super(ReciprocalOverlapProvider.class);
-            gl1 = genomeLocParser.createGenomeLoc("chr1", start1, stop1);
-            gl2 = genomeLocParser.createGenomeLoc("chr1", start2, stop2);
-
-            int shared = 0;
-            for ( int i = start1; i <= stop1; i++ ) {
-                if ( i >= start2 && i <= stop2 )
-                    shared++;
-            }
-
-            this.overlapSize = shared;
-            this.overlapFraction = Math.min((1.0*shared)/gl1.size(), (1.0*shared)/gl2.size());
-            super.setName(String.format("%d-%d / %d-%d overlap=%d / %.2f", start1, stop1, start2, stop2, overlapSize, overlapFraction));
-        }
-    }
-
-    @DataProvider(name = "ReciprocalOverlapProvider")
-    public Object[][] makeReciprocalOverlapProvider() {
-        for ( int start1 = 1; start1 <= 10; start1++ ) {
-            for ( int stop1 = start1; stop1 <= 10; stop1++ ) {
-                new ReciprocalOverlapProvider(start1, stop1, 1, 10);
-                new ReciprocalOverlapProvider(start1, stop1, 5, 10);
-                new ReciprocalOverlapProvider(start1, stop1, 5, 7);
-                new ReciprocalOverlapProvider(start1, stop1, 5, 15);
-                new ReciprocalOverlapProvider(start1, stop1, 11, 20);
-
-                new ReciprocalOverlapProvider(1, 10, start1, stop1);
-                new ReciprocalOverlapProvider(5, 10, start1, stop1);
-                new ReciprocalOverlapProvider(5, 7, start1, stop1);
-                new ReciprocalOverlapProvider(5, 15, start1, stop1);
-                new ReciprocalOverlapProvider(11, 20, start1, stop1);
-            }
-        }
-
-        return ReciprocalOverlapProvider.getTests(ReciprocalOverlapProvider.class);
-    }
-
-    @Test(dataProvider = "ReciprocalOverlapProvider")
-    public void testReciprocalOverlapProvider(ReciprocalOverlapProvider cfg) {
-        if ( cfg.overlapSize == 0 ) {
-            Assert.assertFalse(cfg.gl1.overlapsP(cfg.gl2));
-        } else {
-            Assert.assertTrue(cfg.gl1.overlapsP(cfg.gl2));
-            Assert.assertEquals(cfg.gl1.intersect(cfg.gl2).size(), cfg.overlapSize);
-            Assert.assertEquals(cfg.gl1.reciprocialOverlapFraction(cfg.gl2), cfg.overlapFraction);
-        }
-    }
-
-    // -------------------------------------------------------------------------------------
-    //
-    // testing comparison, hashcode, and equals
-    //
-    // -------------------------------------------------------------------------------------
-
-    @DataProvider(name = "GenomeLocComparisons")
-    public Object[][] createGenomeLocComparisons() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        final int start = 10;
-        for ( int stop = start; stop < start + 3; stop++ ) {
-            final GenomeLoc g1 = genomeLocParser.createGenomeLoc("chr2", start, stop);
-            for ( final String contig : Arrays.asList("chr1", "chr2", "chr3")) {
-                for ( int start2 = start - 1; start2 <= stop + 1; start2++ ) {
-                    for ( int stop2 = start2; stop2 < stop + 2; stop2++ ) {
-                        final GenomeLoc g2 = genomeLocParser.createGenomeLoc(contig, start2, stop2);
-
-                        ComparisonResult cmp = ComparisonResult.EQUALS;
-                        if ( contig.equals("chr3") ) cmp = ComparisonResult.LESS_THAN;
-                        else if ( contig.equals("chr1") ) cmp = ComparisonResult.GREATER_THAN;
-                        else if ( start < start2 ) cmp = ComparisonResult.LESS_THAN;
-                        else if ( start > start2 ) cmp = ComparisonResult.GREATER_THAN;
-                        else if ( stop < stop2 ) cmp = ComparisonResult.LESS_THAN;
-                        else if ( stop > stop2 ) cmp = ComparisonResult.GREATER_THAN;
-
-                        tests.add(new Object[]{g1, g2, cmp});
-                    }
-                }
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    private enum ComparisonResult {
-        LESS_THAN(-1),
-        EQUALS(0),
-        GREATER_THAN(1);
-
-        final int cmp;
-
-        private ComparisonResult(int cmp) {
-            this.cmp = cmp;
-        }
-    }
-
-    @Test(dataProvider = "GenomeLocComparisons")
-    public void testGenomeLocComparisons(GenomeLoc g1, GenomeLoc g2, ComparisonResult expected) {
-        Assert.assertEquals(g1.compareTo(g2), expected.cmp, "Comparing genome locs failed");
-        Assert.assertEquals(g1.equals(g2), expected == ComparisonResult.EQUALS);
-        if ( expected == ComparisonResult.EQUALS )
-            Assert.assertEquals(g1.hashCode(), g2.hashCode(), "Equal genome locs don't have the same hash code");
-    }
-
-    // -------------------------------------------------------------------------------------
-    //
-    // testing merging functionality
-    //
-    // -------------------------------------------------------------------------------------
-
-    private static final GenomeLoc loc1 = new GenomeLoc("1", 0, 10, 20);
-    private static final GenomeLoc loc2 = new GenomeLoc("1", 0, 21, 30);
-    private static final GenomeLoc loc3 = new GenomeLoc("1", 0, 31, 40);
-
-    private class MergeTest {
-        public List<GenomeLoc> locs;
-
-        private MergeTest(final List<GenomeLoc> locs) {
-            this.locs = locs;
-        }
-    }
-
-    @DataProvider(name = "SGLtest")
-    public Object[][] createFindVariantRegionsData() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        tests.add(new Object[]{new MergeTest(Arrays.<GenomeLoc>asList(loc1))});
-        tests.add(new Object[]{new MergeTest(Arrays.<GenomeLoc>asList(loc1, loc2))});
-        tests.add(new Object[]{new MergeTest(Arrays.<GenomeLoc>asList(loc1, loc2, loc3))});
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "SGLtest", enabled = true)
-    public void testSimpleGenomeLoc(MergeTest test) {
-        testMerge(test.locs);
-    }
-
-    @Test(expectedExceptions = ReviewedGATKException.class)
-    public void testNotContiguousLocs() {
-        final List<GenomeLoc> locs = new ArrayList<GenomeLoc>(1);
-        locs.add(loc1);
-        locs.add(loc3);
-        testMerge(locs);
-    }
-
-    private void testMerge(final List<GenomeLoc> locs) {
-        GenomeLoc result1 = locs.get(0);
-        for ( int i = 1; i < locs.size(); i++ )
-            result1 = GenomeLoc.merge(result1, locs.get(i));
-
-        GenomeLoc result2 = GenomeLoc.merge(new TreeSet<GenomeLoc>(locs));
-        Assert.assertEquals(result1, result2);
-        Assert.assertEquals(result1.getStart(), locs.get(0).getStart());
-        Assert.assertEquals(result1.getStop(), locs.get(locs.size() - 1).getStop());
-    }
-
-    // -------------------------------------------------------------------------------------
-    //
-    // testing distance functionality
-    //
-    // -------------------------------------------------------------------------------------
-
-    @Test(enabled=true)
-    public void testDistanceAcrossContigs() {
-        final int chrSize = 1000;
-        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(10, 0, chrSize);
-        GenomeLocParser parser = new GenomeLocParser(header.getSequenceDictionary());
-        GenomeLoc loc1 = parser.createGenomeLoc("chr3", 500);  // to check regular case
-        GenomeLoc loc2 = parser.createGenomeLoc("chr7", 200);  // to check regular case
-        GenomeLoc loc3 = parser.createGenomeLoc("chr0", 1);    // to check corner case
-        GenomeLoc loc4 = parser.createGenomeLoc("chr9", 1000);// to check corner case
-        GenomeLoc loc5 = parser.createGenomeLoc("chr7", 500);  // to make sure it does the right thing when in the same chromosome
-
-        GenomeLoc loc6 = parser.createGenomeLoc("chr7", 200, 300);
-        GenomeLoc loc7 = parser.createGenomeLoc("chr7", 500, 600);
-        GenomeLoc loc8 = parser.createGenomeLoc("chr9", 500, 600);
-
-        // Locus comparisons
-        Assert.assertEquals(loc1.distanceAcrossContigs(loc2, header), 3*chrSize + chrSize-loc1.getStop() + loc2.getStart()); // simple case, smaller first
-        Assert.assertEquals(loc2.distanceAcrossContigs(loc1, header), 3*chrSize + chrSize-loc1.getStop() + loc2.getStart()); // simple case, bigger first
-
-        Assert.assertEquals(loc3.distanceAcrossContigs(loc4, header), 10*chrSize - 1); // corner case, smaller first
-        Assert.assertEquals(loc4.distanceAcrossContigs(loc3, header), 10*chrSize - 1); // corner case, bigger first
-
-        Assert.assertEquals(loc2.distanceAcrossContigs(loc5, header), 300); // same contig, smaller first
-        Assert.assertEquals(loc5.distanceAcrossContigs(loc2, header), 300); // same contig, bigger first
-
-        // Interval comparisons
-        Assert.assertEquals(loc6.distanceAcrossContigs(loc7, header), 200); // same contig, smaller first
-        Assert.assertEquals(loc7.distanceAcrossContigs(loc6, header), 200); // same contig, bigger first
-
-        Assert.assertEquals(loc7.distanceAcrossContigs(loc8, header), chrSize + chrSize-loc7.stop + loc8.getStart()); // across contigs, smaller first
-        Assert.assertEquals(loc8.distanceAcrossContigs(loc7, header), chrSize + chrSize-loc7.stop + loc8.getStart()); // across congits, bigger first
-
-    }
-
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/MD5DB.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/MD5DB.java
deleted file mode 100644
index d7c9929..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/MD5DB.java
+++ /dev/null
@@ -1,312 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.engine.walkers.diffengine.DiffEngine;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-
-import java.io.*;
-import java.util.Arrays;
-
-/**
- * Created by IntelliJ IDEA.
- * User: depristo
- * Date: 7/18/11
- * Time: 9:10 AM
- *
- * Utilities for manipulating the MD5 database of previous results
- */
-public class MD5DB {
-    public static final Logger logger = Logger.getLogger(MD5DB.class);
-
-    /**
-     * Subdirectory under the ant build directory where we store integration test md5 results
-     */
-    private static final int MAX_RECORDS_TO_READ = 1000000;
-    private static final int MAX_RAW_DIFFS_TO_SUMMARIZE = -1;
-    public static final String LOCAL_MD5_DB_DIR = "integrationtests";
-    public static final String GLOBAL_MD5_DB_DIR = "/humgen/gsa-hpprojects/GATK/data/integrationtests";
-
-    // tracking and emitting a data file of origina and new md5s
-    private final File MD5MismatchesFile;
-    private final PrintStream md5MismatchStream;
-
-    public MD5DB() {
-        this(new File(MD5DB.LOCAL_MD5_DB_DIR + "/md5mismatches.txt"));
-    }
-
-    public MD5DB(final File MD5MismatchesFile) {
-        this.MD5MismatchesFile = MD5MismatchesFile;
-
-        ensureMd5DbDirectory();
-
-        logger.debug("Creating md5 mismatch db at " + MD5MismatchesFile);
-        try {
-            md5MismatchStream = new PrintStream(new FileOutputStream(MD5MismatchesFile));
-            md5MismatchStream.printf("%s\t%s\t%s%n", "expected", "observed", "test");
-        } catch ( FileNotFoundException e ) {
-            throw new ReviewedGATKException("Failed to open md5 mismatch file", e);
-        }
-
-    }
-
-    public void close() {
-        if ( md5MismatchStream != null ) {
-            logger.debug("Closeing md5 mismatch db at " + MD5MismatchesFile);
-            md5MismatchStream.close();
-        }
-    }
-
-    // ----------------------------------------------------------------------
-    //
-    // MD5 DB stuff
-    //
-    // ----------------------------------------------------------------------
-
-    /**
-     * Create the MD5 file directories if necessary
-     */
-    private void ensureMd5DbDirectory() {
-        File dir = new File(LOCAL_MD5_DB_DIR);
-        if ( ! dir.exists() ) {
-            System.out.printf("##### Creating MD5 db %s%n", LOCAL_MD5_DB_DIR);
-            if ( ! dir.mkdir() ) {
-                // Need to check AGAIN whether the dir exists, because we might be doing multi-process parallelism
-                // within the same working directory, and another GATK instance may have come along and created the
-                // directory between the calls to exists() and mkdir() above.
-                if ( ! dir.exists() ) {
-                    throw new ReviewedGATKException("Infrastructure failure: failed to create md5 directory " + LOCAL_MD5_DB_DIR);
-                }
-            }
-        }
-    }
-
-    /**
-     * Returns the path to an already existing file with the md5 contents, or valueIfNotFound
-     * if no such file exists in the db.
-     *
-     * @param md5
-     * @param valueIfNotFound
-     * @return
-     */
-    public String getMD5FilePath(final String md5, final String valueIfNotFound) {
-        // we prefer the global db to the local DB, so match it first
-        for ( String dir : Arrays.asList(GLOBAL_MD5_DB_DIR, LOCAL_MD5_DB_DIR)) {
-            File f = getFileForMD5(md5, dir);
-            if ( f.exists() && f.canRead() )
-                return f.getAbsolutePath();
-        }
-
-        return valueIfNotFound;
-    }
-
-    /**
-     * Utility function that given a file's md5 value and the path to the md5 db,
-     * returns the canonical name of the file. For example, if md5 is XXX and db is YYY,
-     * this will return YYY/XXX.integrationtest
-     *
-     * @param md5
-     * @param dbPath
-     * @return
-     */
-    private File getFileForMD5(final String md5, final String dbPath) {
-        final String basename = String.format("%s.integrationtest", md5);
-        return new File(dbPath + "/" + basename);
-    }
-
-    /**
-     * Copies the results file with md5 value to its canonical file name and db places
-     *
-     * @param md5
-     * @param resultsFile
-     */
-    private void updateMD5Db(final String md5, final File resultsFile) {
-        copyFileToDB(getFileForMD5(md5, LOCAL_MD5_DB_DIR), resultsFile);
-        copyFileToDB(getFileForMD5(md5, GLOBAL_MD5_DB_DIR), resultsFile);
-    }
-
-    /**
-     * Low-level utility routine that copies resultsFile to dbFile
-     * @param dbFile
-     * @param resultsFile
-     */
-    private void copyFileToDB(File dbFile, final File resultsFile) {
-        if ( ! dbFile.exists() ) {
-            // the file isn't already in the db, copy it over
-            System.out.printf("##### Updating MD5 file: %s%n", dbFile.getPath());
-            try {
-                FileUtils.copyFile(resultsFile, dbFile);
-            } catch ( IOException e ) {
-                System.out.printf("##### Skipping update, cannot write file %s%n", dbFile);
-            }
-        } else {
-            //System.out.printf("##### MD5 file is up to date: %s%n", dbFile.getPath());
-        }
-    }
-
-    /**
-     * Returns the byte[] of the entire contents of file, for md5 calculations
-     * @param file
-     * @return
-     * @throws IOException
-     */
-    private static byte[] getBytesFromFile(File file) throws IOException {
-        InputStream is = new FileInputStream(file);
-
-        // Get the size of the file
-        long length = file.length();
-
-        if (length > Integer.MAX_VALUE) {
-            // File is too large
-        }
-
-        // Create the byte array to hold the data
-        byte[] bytes = new byte[(int) length];
-
-        // Read in the bytes
-        int offset = 0;
-        int numRead = 0;
-        while (offset < bytes.length
-                && (numRead = is.read(bytes, offset, bytes.length - offset)) >= 0) {
-            offset += numRead;
-        }
-
-        // Ensure all the bytes have been read in
-        if (offset < bytes.length) {
-            throw new IOException("Could not completely read file " + file.getName());
-        }
-
-        // Close the input stream and return bytes
-        is.close();
-        return bytes;
-    }
-
-    public static class MD5Match {
-        public final String actualMD5, expectedMD5;
-        public final String failMessage;
-        public final String diffEngineOutput;
-        public final boolean failed;
-
-        public MD5Match(final String actualMD5, final String expectedMD5, final String failMessage, final String diffEngineOutput, final boolean failed) {
-            this.actualMD5 = actualMD5;
-            this.expectedMD5 = expectedMD5;
-            this.failMessage = failMessage;
-            this.diffEngineOutput = diffEngineOutput;
-            this.failed = failed;
-        }
-    }
-
-    /**
-     * Tests a file MD5 against an expected value, returning an MD5Match object containing a description of the
-     * match or mismatch. In case of a mismatch, outputs a description of the mismatch to various log files/streams.
-     *
-     * NOTE: This function WILL NOT throw an exception if the MD5s are different.
-     *
-     * @param testName Name of the test.
-     * @param testClassName Name of the class that contains the test.
-     * @param resultsFile File to MD5.
-     * @param expectedMD5 Expected MD5 value.
-     * @param parameterize If true or if expectedMD5 is an empty string, will print out the calculated MD5 instead of error text.
-     * @return an MD5Match object containing a description of the match/mismatch. Will have its "failed" field set
-     *         to true if there was a mismatch (unless we're using the "parameterize" argument)
-     */
-    public MD5Match testFileMD5(final String testName, final String testClassName, final File resultsFile, final String expectedMD5, final boolean parameterize) {
-        final String actualMD5 = calculateFileMD5(resultsFile);
-        String diffEngineOutput = "";
-        String failMessage = "";
-        boolean failed = false;
-
-        // copy md5 to integrationtests
-        updateMD5Db(actualMD5, resultsFile);
-
-        if (parameterize || expectedMD5.equals("")) {
-            BaseTest.log(String.format("PARAMETERIZATION: file %s has md5 = %s", resultsFile, actualMD5));
-        } else if ( ! expectedMD5.equals(actualMD5) ) {
-            failed = true;
-            failMessage = String.format("%s:%s has mismatching MD5s: expected=%s observed=%s", testClassName, testName, expectedMD5, actualMD5);
-            diffEngineOutput = logMD5MismatchAndGetDiffEngineOutput(testName, testClassName, expectedMD5, actualMD5);
-        }
-
-        return new MD5Match(actualMD5, expectedMD5, failMessage, diffEngineOutput, failed);
-    }
-
-    /**
-     * Calculates the MD5 for the specified file and returns it as a String
-     *
-     * @param file file whose MD5 to calculate
-     * @return file's MD5 in String form
-     * @throws RuntimeException if the file could not be read
-     */
-    public String calculateFileMD5( final File file ) {
-        try {
-            return Utils.calcMD5(getBytesFromFile(file));
-        }
-        catch ( Exception e ) {
-            throw new RuntimeException("Failed to read bytes from file: " + file + " for MD5 calculation", e);
-        }
-    }
-
-    /**
-     * Logs a description (including diff engine output) of the MD5 mismatch between the expectedMD5
-     * and actualMD5 to a combination of BaseTest.log(), the md5MismatchStream, and stdout, then returns
-     * the diff engine output.
-     *
-     * @param testName name of the test that generated the mismatch
-     * @param testClassName name of the class containing the test that generated the mismatch
-     * @param expectedMD5 the MD5 we were expecting from this test
-     * @param actualMD5 the MD5 we actually calculated from the test output
-     * @return the diff engine output produced while logging the description of the mismatch
-     */
-    private String logMD5MismatchAndGetDiffEngineOutput(final String testName, final String testClassName, final String expectedMD5, final String actualMD5) {
-        System.out.printf("##### Test %s:%s is going to fail #####%n", testClassName, testName);
-        String pathToExpectedMD5File = getMD5FilePath(expectedMD5, "[No DB file found]");
-        String pathToFileMD5File = getMD5FilePath(actualMD5, "[No DB file found]");
-        BaseTest.log(String.format("expected   %s", expectedMD5));
-        BaseTest.log(String.format("calculated %s", actualMD5));
-        BaseTest.log(String.format("diff %s %s", pathToExpectedMD5File, pathToFileMD5File));
-
-        md5MismatchStream.printf("%s\t%s\t%s%n", expectedMD5, actualMD5, testName);
-        md5MismatchStream.flush();
-
-        // inline differences
-        String diffEngineOutput = "";
-        final ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        final PrintStream ps = new PrintStream(baos);
-        DiffEngine.SummaryReportParams params = new DiffEngine.SummaryReportParams(ps, 20, 10, 0, MAX_RAW_DIFFS_TO_SUMMARIZE, false);
-        boolean success = DiffEngine.simpleDiffFiles(new File(pathToExpectedMD5File), new File(pathToFileMD5File), MAX_RECORDS_TO_READ, params);
-        if ( success ) {
-            diffEngineOutput = baos.toString();
-            BaseTest.log(diffEngineOutput);
-            System.out.printf("Note that the above list is not comprehensive.  At most 20 lines of output, and 10 specific differences will be listed.  Please use -T DiffObjects -R " + BaseTest.publicTestDir + "exampleFASTA.fasta -m %s -t %s to explore the differences more freely%n",
-                    pathToExpectedMD5File, pathToFileMD5File);
-        }
-        ps.close();
-
-        return diffEngineOutput;
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/MD5Mismatch.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/MD5Mismatch.java
deleted file mode 100644
index 11064d1..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/MD5Mismatch.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import java.util.Collections;
-import java.util.List;
-
-/**
- * Assertion failure representing an MD5 mismatch between expected and actual
- *
- * @author Your Name
- * @since Date created
- */
-public class MD5Mismatch extends Exception {
-    final List<String> actuals, expecteds, diffEngineOutputs;
-
-    public MD5Mismatch(final String actual, final String expected, final String diffEngineOutput) {
-        this(Collections.singletonList(actual), Collections.singletonList(expected), Collections.singletonList(diffEngineOutput));
-    }
-
-    public MD5Mismatch(final List<String> actuals, final List<String> expecteds, final List<String> diffEngineOutputs) {
-        super(formatMessage(actuals, expecteds, diffEngineOutputs));
-        this.actuals = actuals;
-        this.expecteds = expecteds;
-        this.diffEngineOutputs = diffEngineOutputs;
-    }
-
-    @Override
-    public String toString() {
-        return formatMessage(actuals, expecteds, diffEngineOutputs);
-    }
-
-    private static String formatMessage(final List<String> actuals, final List<String> expecteds, final List<String> diffEngineOutputs) {
-        final StringBuilder b = new StringBuilder("MD5 mismatch: ");
-        for ( int i = 0; i < actuals.size(); i++ ) {
-            if ( i >= 1 ) b.append("\t\t\n\n");
-            b.append("actual ").append(actuals.get(i));
-            b.append(" expected ").append(expecteds.get(i));
-            b.append("\nDiff Engine Output:\n");
-            b.append(diffEngineOutputs.get(i));
-        }
-        return b.toString();
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/MRUCachingSAMSequencingDictionaryUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/MRUCachingSAMSequencingDictionaryUnitTest.java
deleted file mode 100644
index 978a9a7..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/MRUCachingSAMSequencingDictionaryUnitTest.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-
-import htsjdk.samtools.reference.ReferenceSequenceFile;
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.SAMSequenceRecord;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.util.LinkedList;
-import java.util.List;
-
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertTrue;
-
-public class MRUCachingSAMSequencingDictionaryUnitTest extends BaseTest {
-    private static ReferenceSequenceFile seq;
-    private static SAMSequenceDictionary dict;
-
-    @BeforeClass
-    public void init() throws FileNotFoundException {
-        // sequence
-        seq = new CachingIndexedFastaSequenceFile(new File(b37KGReference));
-        dict = seq.getSequenceDictionary();
-    }
-
-    @Test
-    public void testBasic() {
-        final MRUCachingSAMSequenceDictionary caching = new MRUCachingSAMSequenceDictionary(dict);
-
-        Assert.assertEquals(caching.getDictionary(), dict, "Dictionary not the one I expected");
-
-        for ( final SAMSequenceRecord rec : dict.getSequences() ) {
-            Assert.assertFalse(caching.isCached(rec.getSequenceIndex()), "Expected index to not be cached");
-            Assert.assertFalse(caching.isCached(rec.getSequenceName()), "Expected contig to not be cached");
-
-            Assert.assertEquals(caching.getSequence(rec.getSequenceName()), rec, "Couldn't query for sequence");
-            Assert.assertEquals(caching.getSequence(rec.getSequenceIndex()), rec, "Couldn't query for sequence index");
-            Assert.assertEquals(caching.hasContig(rec.getSequenceName()), true, "hasContig query for sequence");
-            Assert.assertEquals(caching.hasContigIndex(rec.getSequenceIndex()), true, "hasContigIndex query for sequence");
-            Assert.assertEquals(caching.getSequenceIndex(rec.getSequenceName()), rec.getSequenceIndex(), "Couldn't query for sequence");
-
-            Assert.assertEquals(caching.hasContig(rec.getSequenceName() + "asdfadsfa"), false, "hasContig query for unknown sequence");
-            Assert.assertEquals(caching.hasContigIndex(dict.getSequences().size()), false, "hasContigIndex query for unknown index");
-
-            Assert.assertTrue(caching.isCached(rec.getSequenceIndex()), "Expected index to be cached");
-            Assert.assertTrue(caching.isCached(rec.getSequenceName()), "Expected contig to be cached");
-        }
-    }
-
-    @Test(expectedExceptions = ReviewedGATKException.class)
-    public void testBadGetSequence() {
-        final MRUCachingSAMSequenceDictionary caching = new MRUCachingSAMSequenceDictionary(dict);
-        caching.getSequence("notInDictionary");
-    }
-
-    @Test(expectedExceptions = ReviewedGATKException.class)
-    public void testBadGetSequenceIndex() {
-        final MRUCachingSAMSequenceDictionary caching = new MRUCachingSAMSequenceDictionary(dict);
-        caching.getSequence(dict.getSequences().size());
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/MWUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/MWUnitTest.java
deleted file mode 100644
index c148dc9..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/MWUnitTest.java
+++ /dev/null
@@ -1,131 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.collections.Pair;
-
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-import org.testng.Assert;
-
-/**
- * Created by IntelliJ IDEA.
- * User: Ghost
- * Date: 3/5/11
- * Time: 2:06 PM
- * To change this template use File | Settings | File Templates.
- */
-public class MWUnitTest extends BaseTest {
-    @BeforeClass
-    public void init() { }
-
-    @Test
-    private void testMWU() {
-        logger.warn("Testing MWU");
-        MannWhitneyU mwu = new MannWhitneyU();
-        mwu.add(0, MannWhitneyU.USet.SET1);
-        mwu.add(1,MannWhitneyU.USet.SET2);
-        mwu.add(2,MannWhitneyU.USet.SET2);
-        mwu.add(3,MannWhitneyU.USet.SET2);
-        mwu.add(4,MannWhitneyU.USet.SET2);
-        mwu.add(5,MannWhitneyU.USet.SET2);
-        mwu.add(6,MannWhitneyU.USet.SET1);
-        mwu.add(7,MannWhitneyU.USet.SET1);
-        mwu.add(8,MannWhitneyU.USet.SET1);
-        mwu.add(9,MannWhitneyU.USet.SET1);
-        mwu.add(10,MannWhitneyU.USet.SET1);
-        mwu.add(11,MannWhitneyU.USet.SET2);
-        Assert.assertEquals(MannWhitneyU.calculateOneSidedU(mwu.getObservations(), MannWhitneyU.USet.SET1),25L);
-        Assert.assertEquals(MannWhitneyU.calculateOneSidedU(mwu.getObservations(),MannWhitneyU.USet.SET2),11L);
-
-        MannWhitneyU mwu2 = new MannWhitneyU();
-        MannWhitneyU mwuNoDither = new MannWhitneyU(false);
-        for ( int dp : new int[]{2,4,5,6,8} ) {
-            mwu2.add(dp,MannWhitneyU.USet.SET1);
-            mwuNoDither.add(dp,MannWhitneyU.USet.SET1);
-        }
-
-        for ( int dp : new int[]{1,3,7,9,10,11,12,13} ) {
-            mwu2.add(dp,MannWhitneyU.USet.SET2);
-            mwuNoDither.add(dp,MannWhitneyU.USet.SET2);
-        }
-
-        MannWhitneyU.ExactMode pm = MannWhitneyU.ExactMode.POINT;
-        MannWhitneyU.ExactMode cm = MannWhitneyU.ExactMode.CUMULATIVE;
-
-        // tests using the hypothesis that set 2 dominates set 1 (U value = 10)
-        Assert.assertEquals(MannWhitneyU.calculateOneSidedU(mwu2.getObservations(),MannWhitneyU.USet.SET1),10L);
-        Assert.assertEquals(MannWhitneyU.calculateOneSidedU(mwu2.getObservations(),MannWhitneyU.USet.SET2),30L);
-        Assert.assertEquals(MannWhitneyU.calculateOneSidedU(mwuNoDither.getObservations(),MannWhitneyU.USet.SET1),10L);
-        Assert.assertEquals(MannWhitneyU.calculateOneSidedU(mwuNoDither.getObservations(),MannWhitneyU.USet.SET2),30L);
-
-        Pair<Integer,Integer> sizes = mwu2.getSetSizes();
-
-        Assert.assertEquals(MannWhitneyU.calculatePUniformApproximation(sizes.first,sizes.second,10L),0.4180519701814064,1e-14);
-        Assert.assertEquals(MannWhitneyU.calculatePRecursively(sizes.first,sizes.second,10L,false,pm).second,0.021756021756021756,1e-14);
-        Assert.assertEquals(MannWhitneyU.calculatePNormalApproximation(sizes.first,sizes.second,10L,false).second,0.06214143703127617,1e-14);
-        logger.warn("Testing two-sided");
-        Assert.assertEquals((double)mwu2.runTwoSidedTest().second,2*0.021756021756021756,1e-8);
-
-        // tests using the hypothesis that set 1 dominates set 2 (U value = 30) -- empirical should be identical, normall approx close, uniform way off
-        Assert.assertEquals(MannWhitneyU.calculatePNormalApproximation(sizes.second,sizes.first,30L,true).second,2.0*0.08216463976903321,1e-14);
-        Assert.assertEquals(MannWhitneyU.calculatePUniformApproximation(sizes.second,sizes.first,30L),0.0023473625009559074,1e-14);
-        Assert.assertEquals(MannWhitneyU.calculatePRecursively(sizes.second,sizes.first,30L,false,pm).second,0.021756021756021756,1e-14); // note -- exactly same value as above
-        Assert.assertEquals(MannWhitneyU.calculatePRecursively(sizes.second,sizes.first,29L,false,cm).second,1.0-0.08547008547008,1e-14); // r does a correction, subtracting 1 from U
-        Assert.assertEquals(MannWhitneyU.calculatePRecursively(sizes.second,sizes.first,11L,false,cm).second,0.08547008547008,1e-14); // r does a correction, subtracting 1 from U
-        Assert.assertEquals(MannWhitneyU.calculatePRecursively(sizes.second,sizes.first,11L,false,cm).first,-1.36918910442,1e-2); // apache inversion set to be good only to 1e-2
-        Assert.assertEquals(MannWhitneyU.calculatePRecursively(sizes.second,sizes.first,29L,false,cm).first,1.36918910442,1e-2); // apache inversion set to be good only to 1e-2
-        Assert.assertEquals(MannWhitneyU.calculatePRecursively(sizes.second,sizes.first,29L,false,pm).first,1.2558754796642067,1e-8); // PDF should be similar
-        Assert.assertEquals(MannWhitneyU.calculatePRecursively(sizes.second,sizes.first,11L,false,pm).first,-1.2558754796642067,1e-8); // PDF should be similar
-        Assert.assertEquals(MannWhitneyU.calculatePRecursively(4,5,10L,false,pm).second,0.0952381,1e-5);
-        Assert.assertEquals(MannWhitneyU.calculatePRecursively(4,5,10L,false,pm).first,0.0,1e-14);
-
-        logger.warn("Set 1");
-        Assert.assertEquals((double)mwu2.runOneSidedTest(MannWhitneyU.USet.SET1).second,0.021756021756021756,1e-8);
-        logger.warn("Set 2");
-        Assert.assertEquals((double)mwu2.runOneSidedTest(MannWhitneyU.USet.SET2).second,0.021756021756021756,1e-8);
-
-        MannWhitneyU mwu3 = new MannWhitneyU();
-        for ( int dp : new int[]{0,2,4} ) {
-            mwu3.add(dp,MannWhitneyU.USet.SET1);
-        }
-        for ( int dp : new int[]{1,5,6,7,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34} ) {
-            mwu3.add(dp,MannWhitneyU.USet.SET2);
-        }
-        long u = MannWhitneyU.calculateOneSidedU(mwu3.getObservations(),MannWhitneyU.USet.SET1);
-        //logger.warn(String.format("U is: %d",u));
-        Pair<Integer,Integer> nums = mwu3.getSetSizes();
-        //logger.warn(String.format("Corrected p is: %.4e",MannWhitneyU.calculatePRecursivelyDoNotCheckValuesEvenThoughItIsSlow(nums.first,nums.second,u)));
-        //logger.warn(String.format("Counted sequences: %d",MannWhitneyU.countSequences(nums.first, nums.second, u)));
-        //logger.warn(String.format("Possible sequences: %d", (long) Arithmetic.binomial(nums.first+nums.second,nums.first)));
-        //logger.warn(String.format("Ratio: %.4e",MannWhitneyU.countSequences(nums.first,nums.second,u)/Arithmetic.binomial(nums.first+nums.second,nums.first)));
-        Assert.assertEquals(MannWhitneyU.calculatePRecursivelyDoNotCheckValuesEvenThoughItIsSlow(nums.first, nums.second, u), 3.665689149560116E-4, 1e-14);
-        Assert.assertEquals(MannWhitneyU.calculatePNormalApproximation(nums.first,nums.second,u,false).second,0.0032240865760884696,1e-14);
-        Assert.assertEquals(MannWhitneyU.calculatePUniformApproximation(nums.first,nums.second,u),0.0026195003025784036,1e-14);
-
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/MathUtilsUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/MathUtilsUnitTest.java
deleted file mode 100644
index 4e2fd31..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/MathUtilsUnitTest.java
+++ /dev/null
@@ -1,913 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import cern.jet.random.Normal;
-import org.apache.commons.lang.ArrayUtils;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.*;
-
-/**
- * Basic unit test for MathUtils
- */
-public class MathUtilsUnitTest extends BaseTest {
-
-    @BeforeClass
-    public void init() {
-    }
-
-    /**
-     * Tests that we get unique values for the valid (non-null-producing) input space for {@link MathUtils#fastGenerateUniqueHashFromThreeIntegers(int, int, int)}.
-     */
-    @Test
-    public void testGenerateUniqueHashFromThreePositiveIntegers() {
-        logger.warn("Executing testGenerateUniqueHashFromThreePositiveIntegers");
-
-        final Set<Long> observedLongs = new HashSet<>();
-        for (short i = 0; i < Byte.MAX_VALUE; i++) {
-            for (short j = 0; j < Byte.MAX_VALUE; j++) {
-                for (short k = 0; k < Byte.MAX_VALUE; k++) {
-                    final Long aLong = MathUtils.fastGenerateUniqueHashFromThreeIntegers(i, j, k);
-                    //System.out.println(String.format("%s, %s, %s: %s", i, j, k, aLong));
-                    Assert.assertTrue(observedLongs.add(aLong));
-                }
-            }
-        }
-
-        for (short i = Byte.MAX_VALUE; i <= Short.MAX_VALUE && i > 0; i += 128) {
-            for (short j = Byte.MAX_VALUE; j <= Short.MAX_VALUE && j > 0; j += 128) {
-                for (short k = Byte.MAX_VALUE; k <= Short.MAX_VALUE && k > 0; k += 128) {
-                    final Long aLong = MathUtils.fastGenerateUniqueHashFromThreeIntegers(i, j, k);
-                    // System.out.println(String.format("%s, %s, %s: %s", i, j, k, aLong));
-                    Assert.assertTrue(observedLongs.add(aLong));
-                }
-            }
-        }
-    }
-
-    @Test(dataProvider = "log10OneMinusPow10Data")
-    public void testLog10OneMinusPow10(final double x, final double expected) {
-        final double actual = MathUtils.log10OneMinusPow10(x);
-        if (Double.isNaN(expected))
-            Assert.assertTrue(Double.isNaN(actual));
-        else
-            Assert.assertEquals(actual,expected,1E-9);
-    }
-
-    @Test(dataProvider = "log1mexpData")
-    public void testLog1mexp(final double x, final double expected) {
-        final double actual = MathUtils.log1mexp(x);
-        if (Double.isNaN(expected))
-            Assert.assertTrue(Double.isNaN(actual));
-        else
-            Assert.assertEquals(actual,expected,1E-9);
-    }
-
-    @DataProvider(name = "log10OneMinusPow10Data")
-    public Iterator<Object[]> log10OneMinusPow10Data() {
-
-          final double[] inValues = new double[] { Double.NaN, 10, 1, 0, -1, -3, -10, -30, -100, -300, -1000, -3000 };
-          return new Iterator<Object[]>() {
-
-              private int i = 0;
-
-              @Override
-              public boolean hasNext() {
-                return i < inValues.length;
-
-              }
-
-              @Override
-              public Object[] next() {
-                  final double input = inValues[i++];
-                  final double output = Math.log10( 1 - Math.pow(10,input));
-                  return new Object[] { input, output };
-              }
-
-              @Override
-              public void remove() {
-                  throw new UnsupportedOperationException();
-              }
-          };
-    }
-
-    @DataProvider(name = "log1mexpData")
-    public Iterator<Object[]> log1mexpData() {
-
-        final double[] inValues = new double[] { Double.NaN, 10, 1, 0, -1, -3, -10, -30, -100, -300, -1000, -3000 };
-        return new Iterator<Object[]>() {
-
-            private int i = 0;
-
-            @Override
-            public boolean hasNext() {
-                return i < inValues.length;
-
-            }
-
-            @Override
-            public Object[] next() {
-                final double input = inValues[i++];
-                final double output = Math.log( 1 - Math.exp(input));
-                return new Object[] { input, output };
-            }
-
-            @Override
-            public void remove() {
-                throw new UnsupportedOperationException();
-            }
-        };
-    }
-
-    /**
-     * Tests that we get the right values from the binomial distribution
-     */
-    @Test
-    public void testBinomialProbability() {
-        logger.warn("Executing testBinomialProbability");
-
-        Assert.assertEquals(MathUtils.binomialProbability(3, 2, 0.5), 0.375, 0.0001);
-        Assert.assertEquals(MathUtils.binomialProbability(100, 10, 0.5), 1.365543e-17, 1e-18);
-        Assert.assertEquals(MathUtils.binomialProbability(217, 73, 0.02), 4.521904e-67, 1e-68);
-        Assert.assertEquals(MathUtils.binomialProbability(300, 100, 0.02), 9.27097e-91, 1e-92);
-        Assert.assertEquals(MathUtils.binomialProbability(300, 150, 0.98), 6.462892e-168, 1e-169);
-        Assert.assertEquals(MathUtils.binomialProbability(300, 120, 0.98), 3.090054e-221, 1e-222);
-        Assert.assertEquals(MathUtils.binomialProbability(300, 112, 0.98), 2.34763e-236, 1e-237);
-    }
-
-    /**
-     * Tests that we get the right values from the binomial distribution
-     */
-    @Test
-    public void testCumulativeBinomialProbability() {
-        logger.warn("Executing testCumulativeBinomialProbability");
-
-        for (int j = 0; j < 2; j++) { // Test memoizing functionality, as well.
-            final int numTrials = 10;
-            for ( int i = 0; i < numTrials; i++ )
-                Assert.assertEquals(MathUtils.binomialCumulativeProbability(numTrials, i, i), MathUtils.binomialProbability(numTrials, i), 1e-10, String.format("k=%d, n=%d", i, numTrials));
-
-            Assert.assertEquals(MathUtils.binomialCumulativeProbability(10, 0, 2), 0.05468750, 1e-7);
-            Assert.assertEquals(MathUtils.binomialCumulativeProbability(10, 0, 5), 0.62304687, 1e-7);
-            Assert.assertEquals(MathUtils.binomialCumulativeProbability(10, 0, 10), 1.0, 1e-7);
-        }
-    }
-
-    /**
-     * Tests that we get the right values from the multinomial distribution
-     */
-    @Test
-    public void testMultinomialProbability() {
-        logger.warn("Executing testMultinomialProbability");
-
-        int[] counts0 = {2, 0, 1};
-        double[] probs0 = {0.33, 0.33, 0.34};
-        Assert.assertEquals(MathUtils.multinomialProbability(counts0, probs0), 0.111078, 1e-6);
-
-        int[] counts1 = {10, 20, 30};
-        double[] probs1 = {0.25, 0.25, 0.50};
-        Assert.assertEquals(MathUtils.multinomialProbability(counts1, probs1), 0.002870301, 1e-9);
-
-        int[] counts2 = {38, 82, 50, 36};
-        double[] probs2 = {0.25, 0.25, 0.25, 0.25};
-        Assert.assertEquals(MathUtils.multinomialProbability(counts2, probs2), 1.88221e-09, 1e-10);
-
-        int[] counts3 = {1, 600, 1};
-        double[] probs3 = {0.33, 0.33, 0.34};
-        Assert.assertEquals(MathUtils.multinomialProbability(counts3, probs3), 5.20988e-285, 1e-286);
-    }
-
-    /**
-     * Tests that the random index selection is working correctly
-     */
-    @Test
-    public void testRandomIndicesWithReplacement() {
-        logger.warn("Executing testRandomIndicesWithReplacement");
-
-        // Check that the size of the list returned is correct
-        Assert.assertTrue(MathUtils.sampleIndicesWithReplacement(5, 0).size() == 0);
-        Assert.assertTrue(MathUtils.sampleIndicesWithReplacement(5, 1).size() == 1);
-        Assert.assertTrue(MathUtils.sampleIndicesWithReplacement(5, 5).size() == 5);
-        Assert.assertTrue(MathUtils.sampleIndicesWithReplacement(5, 1000).size() == 1000);
-
-        // Check that the list contains only the k element range that as asked for - no more, no less
-        List<Integer> Five = new ArrayList<>();
-        Collections.addAll(Five, 0, 1, 2, 3, 4);
-        List<Integer> BigFive = MathUtils.sampleIndicesWithReplacement(5, 10000);
-        Assert.assertTrue(BigFive.containsAll(Five));
-        Assert.assertTrue(Five.containsAll(BigFive));
-    }
-
-    /**
-     * Tests that we get the right values from the multinomial distribution
-     */
-    @Test
-    public void testSliceListByIndices() {
-        logger.warn("Executing testSliceListByIndices");
-
-        // Check that the list contains only the k element range that as asked for - no more, no less but now
-        // use the index list to pull elements from another list using sliceListByIndices
-        List<Integer> Five = new ArrayList<>();
-        Collections.addAll(Five, 0, 1, 2, 3, 4);
-        List<Character> FiveAlpha = new ArrayList<>();
-        Collections.addAll(FiveAlpha, 'a', 'b', 'c', 'd', 'e');
-        List<Integer> BigFive = MathUtils.sampleIndicesWithReplacement(5, 10000);
-        List<Character> BigFiveAlpha = MathUtils.sliceListByIndices(BigFive, FiveAlpha);
-        Assert.assertTrue(BigFiveAlpha.containsAll(FiveAlpha));
-        Assert.assertTrue(FiveAlpha.containsAll(BigFiveAlpha));
-    }
-
-    /**
-     * Tests that we correctly compute mean and standard deviation from a stream of numbers
-     */
-    @Test
-    public void testRunningAverage() {
-        logger.warn("Executing testRunningAverage");
-
-        int[] numbers = {1, 2, 4, 5, 3, 128, 25678, -24};
-        MathUtils.RunningAverage r = new MathUtils.RunningAverage();
-
-        for (final double b : numbers)
-            r.add(b);
-
-        Assert.assertEquals((long) numbers.length, r.observationCount());
-        Assert.assertTrue(r.mean() - 3224.625 < 2e-10);
-        Assert.assertTrue(r.stddev() - 9072.6515881128 < 2e-10);
-    }
-
-    @Test
-    public void testLog10Gamma() {
-        logger.warn("Executing testLog10Gamma");
-
-        Assert.assertEquals(MathUtils.log10Gamma(4.0), 0.7781513, 1e-6);
-        Assert.assertEquals(MathUtils.log10Gamma(10), 5.559763, 1e-6);
-        Assert.assertEquals(MathUtils.log10Gamma(10654), 38280.53, 1e-2);
-    }
-
-    @Test
-    public void testLog10BinomialCoefficient() {
-        logger.warn("Executing testLog10BinomialCoefficient");
-        // note that we can test the binomial coefficient calculation indirectly via Newton's identity
-        // (1+z)^m = sum (m choose k)z^k
-        double[] z_vals = new double[]{0.999,0.9,0.8,0.5,0.2,0.01,0.0001};
-        int[] exponent = new int[]{5,15,25,50,100};
-        for ( double z : z_vals ) {
-            double logz = Math.log10(z);
-            for ( int exp : exponent ) {
-                double expected_log = exp*Math.log10(1+z);
-                double[] newtonArray_log = new double[1+exp];
-                for ( int k = 0 ; k <= exp; k++ ) {
-                    newtonArray_log[k] = MathUtils.log10BinomialCoefficient(exp,k)+k*logz;
-                }
-                Assert.assertEquals(MathUtils.log10sumLog10(newtonArray_log),expected_log,1e-6);
-            }
-        }
-
-        Assert.assertEquals(MathUtils.log10BinomialCoefficient(4, 2), 0.7781513, 1e-6);
-        Assert.assertEquals(MathUtils.log10BinomialCoefficient(10, 3), 2.079181, 1e-6);
-        Assert.assertEquals(MathUtils.log10BinomialCoefficient(103928, 119), 400.2156, 1e-4);
-    }
-
-    @Test
-    public void testFactorial() {
-        logger.warn("Executing testFactorial");
-        Assert.assertEquals((int) MathUtils.factorial(4), 24);
-        Assert.assertEquals((int) MathUtils.factorial(10), 3628800);
-        Assert.assertEquals((int) MathUtils.factorial(12), 479001600);
-    }
-
-    @Test
-    public void testLog10Factorial() {
-        logger.warn("Executing testLog10Factorial");
-        Assert.assertEquals(MathUtils.log10Factorial(4), 1.380211, 1e-6);
-        Assert.assertEquals(MathUtils.log10Factorial(10), 6.559763, 1e-6);
-        Assert.assertEquals(MathUtils.log10Factorial(12), 8.680337, 1e-6);
-        Assert.assertEquals(MathUtils.log10Factorial(200), 374.8969, 1e-3);
-        Assert.assertEquals(MathUtils.log10Factorial(12342), 45138.26, 1e-1);
-        double log10factorial_small = 0;
-        double log10factorial_middle = 374.8969;
-        double log10factorial_large = 45138.26;
-        int small_start = 1;
-        int med_start = 200;
-        int large_start = 12342;
-        for ( int i = 1; i < 1000; i++ ) {
-            log10factorial_small += Math.log10(i+small_start);
-            log10factorial_middle += Math.log10(i+med_start);
-            log10factorial_large += Math.log10(i+large_start);
-            Assert.assertEquals(MathUtils.log10Factorial(small_start+i),log10factorial_small,1e-6);
-            Assert.assertEquals(MathUtils.log10Factorial(med_start+i),log10factorial_middle,1e-3);
-            Assert.assertEquals(MathUtils.log10Factorial(large_start+i),log10factorial_large,1e-1);
-        }
-    }
-
-    @Test
-    public void testApproximateLog10SumLog10() {
-
-        final double requiredPrecision = 1E-4;
-
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {0.0}), 0.0, requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-5.15}), -5.15, requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {130.0}), 130.0, requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-0.145}), -0.145, requiredPrecision);
-
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(0.0, 0.0), Math.log10(Math.pow(10.0, 0.0) + Math.pow(10.0, 0.0)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-1.0, 0.0), Math.log10(Math.pow(10.0, -1.0) + Math.pow(10.0, 0.0)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(0.0, -1.0), Math.log10(Math.pow(10.0, 0.0) + Math.pow(10.0, -1.0)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-2.2, -3.5), Math.log10(Math.pow(10.0, -2.2) + Math.pow(10.0, -3.5)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-1.0, -7.1), Math.log10(Math.pow(10.0, -1.0) + Math.pow(10.0, -7.1)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(5.0, 6.2), Math.log10(Math.pow(10.0, 5.0) + Math.pow(10.0, 6.2)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(38.1, 16.2), Math.log10(Math.pow(10.0, 38.1) + Math.pow(10.0, 16.2)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-38.1, 6.2), Math.log10(Math.pow(10.0, -38.1) + Math.pow(10.0, 6.2)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-19.1, -37.1), Math.log10(Math.pow(10.0, -19.1) + Math.pow(10.0, -37.1)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-29.1, -27.6), Math.log10(Math.pow(10.0, -29.1) + Math.pow(10.0, -27.6)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-0.12345, -0.23456), Math.log10(Math.pow(10.0, -0.12345) + Math.pow(10.0, -0.23456)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-15.7654, -17.0101), Math.log10(Math.pow(10.0, -15.7654) + Math.pow(10.0, -17.0101)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-0.12345, Double.NEGATIVE_INFINITY), -0.12345, requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-15.7654, Double.NEGATIVE_INFINITY), -15.7654, requiredPrecision);
-
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {0.0, 0.0}), Math.log10(Math.pow(10.0, 0.0) + Math.pow(10.0, 0.0)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-1.0, 0.0}), Math.log10(Math.pow(10.0, -1.0) + Math.pow(10.0, 0.0)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {0.0, -1.0}), Math.log10(Math.pow(10.0, 0.0) + Math.pow(10.0, -1.0)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-2.2, -3.5}), Math.log10(Math.pow(10.0, -2.2) + Math.pow(10.0, -3.5)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-1.0, -7.1}), Math.log10(Math.pow(10.0, -1.0) + Math.pow(10.0, -7.1)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {5.0, 6.2}), Math.log10(Math.pow(10.0, 5.0) + Math.pow(10.0, 6.2)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {38.1, 16.2}), Math.log10(Math.pow(10.0, 38.1) + Math.pow(10.0, 16.2)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-38.1, 6.2}), Math.log10(Math.pow(10.0, -38.1) + Math.pow(10.0, 6.2)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-19.1, -37.1}), Math.log10(Math.pow(10.0, -19.1) + Math.pow(10.0, -37.1)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-29.1, -27.6}), Math.log10(Math.pow(10.0, -29.1) + Math.pow(10.0, -27.6)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-0.12345, -0.23456}), Math.log10(Math.pow(10.0, -0.12345) + Math.pow(10.0, -0.23456)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-15.7654, -17.0101}), Math.log10(Math.pow(10.0, -15.7654) + Math.pow(10.0, -17.0101)), requiredPrecision);
-
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {0.0, 0.0, 0.0}), Math.log10(Math.pow(10.0, 0.0) + Math.pow(10.0, 0.0) + Math.pow(10.0, 0.0)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-1.0, 0.0, 0.0}), Math.log10(Math.pow(10.0, -1.0) + Math.pow(10.0, 0.0) + Math.pow(10.0, 0.0)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {0.0, -1.0, -2.5}), Math.log10(Math.pow(10.0, 0.0) + Math.pow(10.0, -1.0) + Math.pow(10.0, -2.5)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-2.2, -3.5, -1.1}), Math.log10(Math.pow(10.0, -2.2) + Math.pow(10.0, -3.5) + Math.pow(10.0, -1.1)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-1.0, -7.1, 0.5}), Math.log10(Math.pow(10.0, -1.0) + Math.pow(10.0, -7.1) + Math.pow(10.0, 0.5)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {5.0, 6.2, 1.3}), Math.log10(Math.pow(10.0, 5.0) + Math.pow(10.0, 6.2) + Math.pow(10.0, 1.3)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {38.1, 16.2, 18.1}), Math.log10(Math.pow(10.0, 38.1) + Math.pow(10.0, 16.2) + Math.pow(10.0, 18.1)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-38.1, 6.2, 26.6}), Math.log10(Math.pow(10.0, -38.1) + Math.pow(10.0, 6.2) + Math.pow(10.0, 26.6)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-19.1, -37.1, -45.1}), Math.log10(Math.pow(10.0, -19.1) + Math.pow(10.0, -37.1) + Math.pow(10.0, -45.1)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-29.1, -27.6, -26.2}), Math.log10(Math.pow(10.0, -29.1) + Math.pow(10.0, -27.6) + Math.pow(10.0, -26.2)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-0.12345, -0.23456, -0.34567}), Math.log10(Math.pow(10.0, -0.12345) + Math.pow(10.0, -0.23456) + Math.pow(10.0, -0.34567)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-15.7654, -17.0101, -17.9341}), Math.log10(Math.pow(10.0, -15.7654) + Math.pow(10.0, -17.0101) + Math.pow(10.0, -17.9341)), requiredPrecision);
-
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(0.0, 0.0, 0.0), Math.log10(Math.pow(10.0, 0.0) + Math.pow(10.0, 0.0) + Math.pow(10.0, 0.0)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-1.0, 0.0, 0.0), Math.log10(Math.pow(10.0, -1.0) + Math.pow(10.0, 0.0) + Math.pow(10.0, 0.0)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(0.0, -1.0, -2.5), Math.log10(Math.pow(10.0, 0.0) + Math.pow(10.0, -1.0) + Math.pow(10.0, -2.5)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-2.2, -3.5, -1.1), Math.log10(Math.pow(10.0, -2.2) + Math.pow(10.0, -3.5) + Math.pow(10.0, -1.1)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-1.0, -7.1, 0.5), Math.log10(Math.pow(10.0, -1.0) + Math.pow(10.0, -7.1) + Math.pow(10.0, 0.5)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(5.0, 6.2, 1.3), Math.log10(Math.pow(10.0, 5.0) + Math.pow(10.0, 6.2) + Math.pow(10.0, 1.3)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(38.1, 16.2, 18.1), Math.log10(Math.pow(10.0, 38.1) + Math.pow(10.0, 16.2) + Math.pow(10.0, 18.1)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-38.1, 6.2, 26.6), Math.log10(Math.pow(10.0, -38.1) + Math.pow(10.0, 6.2) + Math.pow(10.0, 26.6)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-19.1, -37.1, -45.1), Math.log10(Math.pow(10.0, -19.1) + Math.pow(10.0, -37.1) + Math.pow(10.0, -45.1)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-29.1, -27.6, -26.2), Math.log10(Math.pow(10.0, -29.1) + Math.pow(10.0, -27.6) + Math.pow(10.0, -26.2)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-0.12345, -0.23456, -0.34567), Math.log10(Math.pow(10.0, -0.12345) + Math.pow(10.0, -0.23456) + Math.pow(10.0, -0.34567)), requiredPrecision);
-        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-15.7654, -17.0101, -17.9341), Math.log10(Math.pow(10.0, -15.7654) + Math.pow(10.0, -17.0101) + Math.pow(10.0, -17.9341)), requiredPrecision);
-
-        // magnitude of the sum doesn't matter, so we can combinatorially test this via partitions of unity
-        double[] mult_partitionFactor = new double[]{0.999,0.98,0.95,0.90,0.8,0.5,0.3,0.1,0.05,0.001};
-        int[] n_partitions = new int[] {2,4,8,16,32,64,128,256,512,1028};
-        for ( double alpha : mult_partitionFactor ) {
-            double log_alpha = Math.log10(alpha);
-            double log_oneMinusAlpha = Math.log10(1-alpha);
-            for ( int npart : n_partitions ) {
-                double[] multiplicative = new double[npart];
-                double[] equal = new double[npart];
-                double remaining_log = 0.0;  // realspace = 1
-                for ( int i = 0 ; i < npart-1; i++ ) {
-                    equal[i] = -Math.log10(npart);
-                    double piece = remaining_log + log_alpha; // take a*remaining, leaving remaining-a*remaining = (1-a)*remaining
-                    multiplicative[i] = piece;
-                    remaining_log = remaining_log + log_oneMinusAlpha;
-                }
-                equal[npart-1] = -Math.log10(npart);
-                multiplicative[npart-1] = remaining_log;
-                Assert.assertEquals(MathUtils.approximateLog10SumLog10(equal),0.0,requiredPrecision,String.format("Did not sum to one: k=%d equal partitions.",npart));
-                Assert.assertEquals(MathUtils.approximateLog10SumLog10(multiplicative),0.0,requiredPrecision, String.format("Did not sum to one: k=%d multiplicative partitions with alpha=%f",npart,alpha));
-            }
-        }
-    }
-
-    @Test
-    public void testLog10sumLog10() {
-        final double requiredPrecision = 1E-14;
-
-        final double log3 = 0.477121254719662;
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[]{0.0, 0.0, 0.0}), log3, requiredPrecision);
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {0.0, 0.0, 0.0}, 0), log3, requiredPrecision);
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[]{0.0, 0.0, 0.0}, 0, 3), log3, requiredPrecision);
-
-        final double log2 = 0.301029995663981;
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {0.0, 0.0, 0.0}, 0, 2), log2, requiredPrecision);
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {0.0, 0.0, 0.0}, 0, 1), 0.0, requiredPrecision);
-
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {0.0}), 0.0, requiredPrecision);
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-5.15}), -5.15, requiredPrecision);
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {130.0}), 130.0, requiredPrecision);
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-0.145}), -0.145, requiredPrecision);
-
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {0.0, 0.0}), Math.log10(Math.pow(10.0, 0.0) + Math.pow(10.0, 0.0)), requiredPrecision);
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-1.0, 0.0}), Math.log10(Math.pow(10.0, -1.0) + Math.pow(10.0, 0.0)), requiredPrecision);
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {0.0, -1.0}), Math.log10(Math.pow(10.0, 0.0) + Math.pow(10.0, -1.0)), requiredPrecision);
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-2.2, -3.5}), Math.log10(Math.pow(10.0, -2.2) + Math.pow(10.0, -3.5)), requiredPrecision);
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-1.0, -7.1}), Math.log10(Math.pow(10.0, -1.0) + Math.pow(10.0, -7.1)), requiredPrecision);
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {5.0, 6.2}), Math.log10(Math.pow(10.0, 5.0) + Math.pow(10.0, 6.2)), requiredPrecision);
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {38.1, 16.2}), Math.log10(Math.pow(10.0, 38.1) + Math.pow(10.0, 16.2)), requiredPrecision);
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-38.1, 6.2}), Math.log10(Math.pow(10.0, -38.1) + Math.pow(10.0, 6.2)), requiredPrecision);
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-19.1, -37.1}), Math.log10(Math.pow(10.0, -19.1) + Math.pow(10.0, -37.1)), requiredPrecision);
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-29.1, -27.6}), Math.log10(Math.pow(10.0, -29.1) + Math.pow(10.0, -27.6)), requiredPrecision);
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-0.12345, -0.23456}), Math.log10(Math.pow(10.0, -0.12345) + Math.pow(10.0, -0.23456)), requiredPrecision);
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-15.7654, -17.0101}), Math.log10(Math.pow(10.0, -15.7654) + Math.pow(10.0, -17.0101)), requiredPrecision);
-
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {0.0, 0.0, 0.0}), Math.log10(Math.pow(10.0, 0.0) + Math.pow(10.0, 0.0) + Math.pow(10.0, 0.0)), requiredPrecision);
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-1.0, 0.0, 0.0}), Math.log10(Math.pow(10.0, -1.0) + Math.pow(10.0, 0.0) + Math.pow(10.0, 0.0)), requiredPrecision);
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {0.0, -1.0, -2.5}), Math.log10(Math.pow(10.0, 0.0) + Math.pow(10.0, -1.0) + Math.pow(10.0, -2.5)), requiredPrecision);
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-2.2, -3.5, -1.1}), Math.log10(Math.pow(10.0, -2.2) + Math.pow(10.0, -3.5) + Math.pow(10.0, -1.1)), requiredPrecision);
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-1.0, -7.1, 0.5}), Math.log10(Math.pow(10.0, -1.0) + Math.pow(10.0, -7.1) + Math.pow(10.0, 0.5)), requiredPrecision);
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {5.0, 6.2, 1.3}), Math.log10(Math.pow(10.0, 5.0) + Math.pow(10.0, 6.2) + Math.pow(10.0, 1.3)), requiredPrecision);
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {38.1, 16.2, 18.1}), Math.log10(Math.pow(10.0, 38.1) + Math.pow(10.0, 16.2) + Math.pow(10.0, 18.1)), requiredPrecision);
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-38.1, 6.2, 26.6}), Math.log10(Math.pow(10.0, -38.1) + Math.pow(10.0, 6.2) + Math.pow(10.0, 26.6)), requiredPrecision);
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-19.1, -37.1, -45.1}), Math.log10(Math.pow(10.0, -19.1) + Math.pow(10.0, -37.1) + Math.pow(10.0, -45.1)), requiredPrecision);
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-29.1, -27.6, -26.2}), Math.log10(Math.pow(10.0, -29.1) + Math.pow(10.0, -27.6) + Math.pow(10.0, -26.2)), requiredPrecision);
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-0.12345, -0.23456, -0.34567}), Math.log10(Math.pow(10.0, -0.12345) + Math.pow(10.0, -0.23456) + Math.pow(10.0, -0.34567)), requiredPrecision);
-        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-15.7654, -17.0101, -17.9341}), Math.log10(Math.pow(10.0, -15.7654) + Math.pow(10.0, -17.0101) + Math.pow(10.0, -17.9341)), requiredPrecision);
-
-        // magnitude of the sum doesn't matter, so we can combinatorially test this via partitions of unity
-        double[] mult_partitionFactor = new double[]{0.999,0.98,0.95,0.90,0.8,0.5,0.3,0.1,0.05,0.001};
-        int[] n_partitions = new int[] {2,4,8,16,32,64,128,256,512,1028};
-        for ( double alpha : mult_partitionFactor ) {
-            double log_alpha = Math.log10(alpha);
-            double log_oneMinusAlpha = Math.log10(1-alpha);
-            for ( int npart : n_partitions ) {
-                double[] multiplicative = new double[npart];
-                double[] equal = new double[npart];
-                double remaining_log = 0.0;  // realspace = 1
-                for ( int i = 0 ; i < npart-1; i++ ) {
-                    equal[i] = -Math.log10(npart);
-                    double piece = remaining_log + log_alpha; // take a*remaining, leaving remaining-a*remaining = (1-a)*remaining
-                    multiplicative[i] = piece;
-                    remaining_log = remaining_log + log_oneMinusAlpha;
-                }
-                equal[npart-1] = -Math.log10(npart);
-                multiplicative[npart-1] = remaining_log;
-                Assert.assertEquals(MathUtils.log10sumLog10(equal),0.0,requiredPrecision);
-                Assert.assertEquals(MathUtils.log10sumLog10(multiplicative),0.0,requiredPrecision,String.format("Did not sum to one: nPartitions=%d, alpha=%f",npart,alpha));
-            }
-        }
-    }
-
-    @Test
-    public void testLogDotProduct() {
-        Assert.assertEquals(MathUtils.logDotProduct(new double[]{-5.0,-3.0,2.0}, new double[]{6.0,7.0,8.0}),10.0,1e-3);
-        Assert.assertEquals(MathUtils.logDotProduct(new double[]{-5.0}, new double[]{6.0}),1.0,1e-3);
-    }
-
-    @Test
-    public void testNormalDistribution() {
-        final double requiredPrecision = 1E-10;
-
-        final Normal n = new Normal(0.0, 1.0, null);
-        for( final double mu : new double[]{-5.0, -3.2, -1.5, 0.0, 1.2, 3.0, 5.8977} ) {
-            for( final double sigma : new double[]{1.2, 3.0, 5.8977} ) {
-                for( final double x : new double[]{-5.0, -3.2, -1.5, 0.0, 1.2, 3.0, 5.8977} ) {
-                    n.setState(mu, sigma);
-                    Assert.assertEquals(n.pdf(x), MathUtils.normalDistribution(mu, sigma, x), requiredPrecision);
-                    Assert.assertEquals(Math.log10(n.pdf(x)), MathUtils.normalDistributionLog10(mu, sigma, x), requiredPrecision);
-                }
-            }
-        }
-    }
-
-    @DataProvider(name = "ArrayMinData")
-    public Object[][] makeArrayMinData() {
-        List<Object[]> tests = new ArrayList<>();
-
-        // this functionality can be adapted to provide input data for whatever you might want in your data
-        tests.add(new Object[]{Arrays.asList(10), 10});
-        tests.add(new Object[]{Arrays.asList(-10), -10});
-
-        for ( final List<Integer> values : Utils.makePermutations(Arrays.asList(1,2,3), 3, false) ) {
-            tests.add(new Object[]{values, 1});
-        }
-
-        for ( final List<Integer> values : Utils.makePermutations(Arrays.asList(1,2,-3), 3, false) ) {
-            tests.add(new Object[]{values, -3});
-        }
-
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "ArrayMinData")
-    public void testArrayMinList(final List<Integer> values, final int expected) {
-        final int actual = MathUtils.arrayMin(values);
-        Assert.assertEquals(actual, expected, "Failed with " + values);
-    }
-
-    @Test(dataProvider = "ArrayMinData")
-    public void testArrayMinIntArray(final List<Integer> values, final int expected) {
-        final int[] asArray = ArrayUtils.toPrimitive(values.toArray(new Integer[values.size()]));
-        final int actual = MathUtils.arrayMin(asArray);
-        Assert.assertEquals(actual, expected, "Failed with " + values);
-    }
-
-    @Test(dataProvider = "ArrayMinData")
-    public void testArrayMinByteArray(final List<Integer> values, final int expected) {
-        final byte[] asArray = new byte[values.size()];
-        for ( int i = 0; i < values.size(); i++ ) asArray[i] = (byte)(values.get(i) & 0xFF);
-        final byte actual = MathUtils.arrayMin(asArray);
-        Assert.assertEquals(actual, (byte)(expected & 0xFF), "Failed with " + values);
-    }
-
-    @Test(dataProvider = "ArrayMinData")
-    public void testArrayMinDoubleArray(final List<Integer> values, final int expected) {
-        final double[] asArray = new double[values.size()];
-        for ( int i = 0; i < values.size(); i++ ) asArray[i] = (double)(values.get(i));
-        final double actual = MathUtils.arrayMin(asArray);
-        Assert.assertEquals(actual, (double)expected, "Failed with " + values);
-    }
-
-    @DataProvider(name = "MedianData")
-    public Object[][] makeMedianData() {
-        final List<Object[]> tests = new ArrayList<>();
-
-        // this functionality can be adapted to provide input data for whatever you might want in your data
-        tests.add(new Object[]{Arrays.asList(10), 10});
-        tests.add(new Object[]{Arrays.asList(1, 10), 10});
-
-        for ( final List<Integer> values : Utils.makePermutations(Arrays.asList(1,2,-3), 3, false) ) {
-            tests.add(new Object[]{values, 1});
-        }
-
-        for ( final List<Double> values : Utils.makePermutations(Arrays.asList(1.1,2.1,-3.1), 3, false) ) {
-            tests.add(new Object[]{values, 1.1});
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "MedianData")
-    public void testMedian(final List<Comparable> values, final Comparable expected) {
-        final Comparable actual = MathUtils.median(values);
-        Assert.assertEquals(actual, expected, "Failed with " + values);
-    }
-
-
-
-    // man. All this to test dirichlet.
-
-    private double[] unwrap(List<Double> stuff) {
-        double[] unwrapped = new double[stuff.size()];
-        int idx = 0;
-        for ( Double d : stuff ) {
-            unwrapped[idx++] = d == null ? 0.0 : d;
-        }
-
-        return unwrapped;
-    }
-
-    /**
-     * The PartitionGenerator generates all of the partitions of a number n, e.g.
-     * 5 + 0
-     * 4 + 1
-     * 3 + 2
-     * 3 + 1 + 1
-     * 2 + 2 + 1
-     * 2 + 1 + 1 + 1
-     * 1 + 1 + 1 + 1 + 1
-     *
-     * This is used to help enumerate the state space over which the Dirichlet-Multinomial is defined,
-     * to ensure that the distribution function is properly implemented
-     */
-    class PartitionGenerator implements Iterator<List<Integer>> {
-        // generate the partitions of an integer, each partition sorted numerically
-        int n;
-        List<Integer> a;
-
-        int y;
-        int k;
-        int state;
-
-        int x;
-        int l;
-
-        public PartitionGenerator(int n) {
-            this.n = n;
-            this.y = n - 1;
-            this.k = 1;
-            this.a = new ArrayList<>();
-            for ( int i = 0; i < n; i++ ) {
-                this.a.add(i);
-            }
-            this.state = 0;
-        }
-
-        public void remove()  { /* do nothing */ }
-
-        public boolean hasNext() { return ! ( this.k == 0 && state == 0 ); }
-
-        private String dataStr()  {
-            return String.format("a = [%s]  k = %d  y = %d  state = %d  x = %d  l = %d",
-                    Utils.join(",",a), k, y, state, x, l);
-        }
-
-        public List<Integer> next() {
-            if ( this.state == 0 ) {
-                this.x = a.get(k-1)+1;
-                k -= 1;
-                this.state = 1;
-            }
-
-            if ( this.state == 1 ) {
-                while ( 2 * x <= y ) {
-                    this.a.set(k,x);
-                    this.y -= (int) x;
-                    this.k++;
-                }
-                this.l = 1+this.k;
-                this.state = 2;
-            }
-
-            if ( this.state == 2 ) {
-                if ( x <= y ) {
-                    this.a.set(k,x);
-                    this.a.set(l,y);
-                    x += 1;
-                    y -= 1;
-                    return this.a.subList(0, this.k + 2);
-                } else {
-                    this.state =3;
-                }
-            }
-
-            if ( this.state == 3 ) {
-                this.a.set(k,x+y);
-                this.y = x + y - 1;
-                this.state = 0;
-                return a.subList(0, k + 1);
-            }
-
-            throw new IllegalStateException("Cannot get here");
-        }
-
-        public String toString() {
-            final StringBuilder buf = new StringBuilder();
-            buf.append("{ ");
-            while ( hasNext() ) {
-                buf.append("[");
-                buf.append(Utils.join(",",next()));
-                buf.append("],");
-            }
-            buf.deleteCharAt(buf.lastIndexOf(","));
-            buf.append(" }");
-            return buf.toString();
-        }
-
-    }
-
-    /**
-     * NextCounts is the enumerator over the state space of the multinomial dirichlet.
-     *
-     * It filters the partition of the total sum to only those with a number of terms
-     * equal to the number of categories.
-     *
-     * It then generates all permutations of that partition.
-     *
-     * In so doing it enumerates over the full state space.
-     */
-    class NextCounts implements Iterator<int[]> {
-
-        private PartitionGenerator partitioner;
-        private int numCategories;
-        private int[] next;
-
-        public NextCounts(int numCategories, int totalCounts) {
-            partitioner = new PartitionGenerator(totalCounts);
-            this.numCategories = numCategories;
-            next = nextFromPartitioner();
-        }
-
-        public void remove() { /* do nothing */ }
-
-        public boolean hasNext() { return next != null; }
-
-        public int[] next() {
-            int[] toReturn = clone(next);
-            next = nextPermutation();
-            if ( next == null ) {
-                next = nextFromPartitioner();
-            }
-
-            return toReturn;
-        }
-
-        private int[] clone(int[] arr) {
-            return Arrays.copyOf(arr, arr.length);
-        }
-
-        private int[] nextFromPartitioner() {
-            if ( partitioner.hasNext() ) {
-                List<Integer> nxt = partitioner.next();
-                while ( partitioner.hasNext() && nxt.size() > numCategories ) {
-                    nxt = partitioner.next();
-                }
-
-                if ( nxt.size() > numCategories ) {
-                    return null;
-                } else {
-                    int[] buf = new int[numCategories];
-                    for ( int idx = 0; idx < nxt.size(); idx++ ) {
-                        buf[idx] = nxt.get(idx);
-                    }
-                    Arrays.sort(buf);
-                    return buf;
-                }
-            }
-
-            return null;
-        }
-
-        public int[] nextPermutation() {
-            return MathUtilsUnitTest.nextPermutation(next);
-        }
-
-    }
-
-    public static int[] nextPermutation(int[] next) {
-        // the counts can swap among each other. The int[] is originally in ascending order
-        // this generates the next array in lexicographic order descending
-
-        // locate the last occurrence where next[k] < next[k+1]
-        int gt = -1;
-        for ( int idx = 0; idx < next.length-1; idx++) {
-            if ( next[idx] < next[idx+1] ) {
-                gt = idx;
-            }
-        }
-
-        if ( gt == -1 ) {
-            return null;
-        }
-
-        int largestLessThan = gt+1;
-        for ( int idx = 1 + largestLessThan; idx < next.length; idx++) {
-            if ( next[gt] < next[idx] ) {
-                largestLessThan = idx;
-            }
-        }
-
-        int val = next[gt];
-        next[gt] = next[largestLessThan];
-        next[largestLessThan] = val;
-
-        // reverse the tail of the array
-        int[] newTail = new int[next.length-gt-1];
-        int ctr = 0;
-        for ( int idx = next.length-1; idx > gt; idx-- ) {
-            newTail[ctr++] = next[idx];
-        }
-
-        for ( int idx = 0; idx < newTail.length; idx++) {
-            next[gt+idx+1] = newTail[idx];
-        }
-
-        return next;
-    }
-
-
-    // before testing the dirichlet multinomial, we need to test the
-    // classes used to test the dirichlet multinomial
-
-    @Test
-    public void testPartitioner() {
-        int[] numsToTest = new int[]{1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20};
-        int[] expectedSizes = new int[]{1, 2, 3, 5, 7, 11, 15, 22, 30, 42, 56, 77, 101, 135, 176, 231, 297, 385, 490, 627};
-        for ( int testNum = 0; testNum < numsToTest.length; testNum++ ) {
-            PartitionGenerator gen = new PartitionGenerator(numsToTest[testNum]);
-            int size = 0;
-            while ( gen.hasNext() ) {
-                logger.debug(gen.dataStr());
-                size += 1;
-                gen.next();
-            }
-            Assert.assertEquals(size,expectedSizes[testNum],
-                    String.format("Expected %d partitions, observed %s",expectedSizes[testNum],new PartitionGenerator(numsToTest[testNum]).toString()));
-        }
-    }
-
-    @Test
-    public void testNextPermutation() {
-        int[] arr = new int[]{1,2,3,4};
-        int[][] gens = new int[][] {
-                new int[]{1,2,3,4},
-                new int[]{1,2,4,3},
-                new int[]{1,3,2,4},
-                new int[]{1,3,4,2},
-                new int[]{1,4,2,3},
-                new int[]{1,4,3,2},
-                new int[]{2,1,3,4},
-                new int[]{2,1,4,3},
-                new int[]{2,3,1,4},
-                new int[]{2,3,4,1},
-                new int[]{2,4,1,3},
-                new int[]{2,4,3,1},
-                new int[]{3,1,2,4},
-                new int[]{3,1,4,2},
-                new int[]{3,2,1,4},
-                new int[]{3,2,4,1},
-                new int[]{3,4,1,2},
-                new int[]{3,4,2,1},
-                new int[]{4,1,2,3},
-                new int[]{4,1,3,2},
-                new int[]{4,2,1,3},
-                new int[]{4,2,3,1},
-                new int[]{4,3,1,2},
-                new int[]{4,3,2,1} };
-        for ( int gen = 0; gen < gens.length; gen ++ ) {
-            for ( int idx = 0; idx < 3; idx++ ) {
-                Assert.assertEquals(arr[idx],gens[gen][idx],
-                 String.format("Error at generation %d, expected %s, observed %s",gen,Arrays.toString(gens[gen]),Arrays.toString(arr)));
-            }
-            arr = nextPermutation(arr);
-        }
-    }
-
-    private double[] addEpsilon(double[] counts) {
-        double[] d = new double[counts.length];
-        for ( int i = 0; i < counts.length; i ++ ) {
-            d[i] = counts[i] + 1e-3;
-        }
-        return d;
-    }
-
-    @Test
-    public void testDirichletMultinomial() {
-        List<double[]> testAlleles = Arrays.asList(
-                new double[]{80,240},
-                new double[]{1,10000},
-                new double[]{0,500},
-                new double[]{5140,20480},
-                new double[]{5000,800,200},
-                new double[]{6,3,1000},
-                new double[]{100,400,300,800},
-                new double[]{8000,100,20,80,2},
-                new double[]{90,20000,400,20,4,1280,720,1}
-        );
-
-        Assert.assertTrue(! Double.isInfinite(MathUtils.log10Gamma(1e-3)) && ! Double.isNaN(MathUtils.log10Gamma(1e-3)));
-
-        int[] numAlleleSampled = new int[]{2,5,10,20,25};
-        for ( double[] alleles : testAlleles ) {
-            for ( int count : numAlleleSampled ) {
-                // test that everything sums to one. Generate all multinomial draws
-                List<Double> likelihoods = new ArrayList<>(100000);
-                NextCounts generator = new NextCounts(alleles.length,count);
-                double maxLog = Double.MIN_VALUE;
-                //List<String> countLog = new ArrayList<String>(200);
-                while ( generator.hasNext() ) {
-                    int[] thisCount = generator.next();
-                    //countLog.add(Arrays.toString(thisCount));
-                    Double likelihood = MathUtils.dirichletMultinomial(addEpsilon(alleles),thisCount);
-                    Assert.assertTrue(! Double.isNaN(likelihood) && ! Double.isInfinite(likelihood),
-                            String.format("Likelihood for counts %s and nAlleles %d was %s",
-                                    Arrays.toString(thisCount),alleles.length,Double.toString(likelihood)));
-                    if ( likelihood > maxLog )
-                        maxLog = likelihood;
-                    likelihoods.add(likelihood);
-                }
-                //System.out.printf("%d likelihoods and max is (probability) %e\n",likelihoods.size(),Math.pow(10,maxLog));
-                Assert.assertEquals(MathUtils.sumLog10(unwrap(likelihoods)),1.0,1e-7,
-                        String.format("Counts %d and alleles %d have nLikelihoods %d. \n Counts: %s",
-                                count,alleles.length,likelihoods.size(), "NODEBUG"/*,countLog*/));
-            }
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/MedianUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/MedianUnitTest.java
deleted file mode 100644
index 21f3d89..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/MedianUnitTest.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-
-// the imports for unit testing.
-
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-
-public class MedianUnitTest extends BaseTest {
-
-    // --------------------------------------------------------------------------------
-    //
-    // Provider
-    //
-    // --------------------------------------------------------------------------------
-
-    private class MedianTestProvider extends TestDataProvider {
-        final List<Integer> values = new ArrayList<Integer>();
-        final int cap;
-        final Integer expected;
-
-        public MedianTestProvider(int expected, int cap, Integer ... values) {
-            super(MedianTestProvider.class);
-            this.expected = expected;
-            this.cap = cap;
-            this.values.addAll(Arrays.asList(values));
-            this.name = String.format("values=%s expected=%d cap=%d", this.values, expected, cap);
-        }
-    }
-
-    @DataProvider(name = "MedianTestProvider")
-    public Object[][] makeMedianTestProvider() {
-        new MedianTestProvider(1, 1000, 0, 1, 2);
-        new MedianTestProvider(1, 1000, 1, 0, 1, 2);
-        new MedianTestProvider(1, 1000, 0, 1, 2, 3);
-        new MedianTestProvider(2, 1000, 0, 1, 2, 3, 4);
-        new MedianTestProvider(2, 1000, 4, 1, 2, 3, 0);
-        new MedianTestProvider(1, 1000, 1);
-        new MedianTestProvider(2, 1000, 2);
-        new MedianTestProvider(1, 1000, 1, 2);
-
-        new MedianTestProvider(1, 3, 1);
-        new MedianTestProvider(1, 3, 1, 2);
-        new MedianTestProvider(2, 3, 1, 2, 3);
-        new MedianTestProvider(2, 3, 1, 2, 3, 4);
-        new MedianTestProvider(2, 3, 1, 2, 3, 4, 5);
-
-        new MedianTestProvider(1, 3, 1);
-        new MedianTestProvider(1, 3, 1, 2);
-        new MedianTestProvider(2, 3, 3, 2, 1);
-        new MedianTestProvider(3, 3, 4, 3, 2, 1);
-        new MedianTestProvider(4, 3, 5, 4, 3, 2, 1);
-
-        return MedianTestProvider.getTests(MedianTestProvider.class);
-    }
-
-    @Test(dataProvider = "MedianTestProvider")
-    public void testBasicLikelihoods(MedianTestProvider cfg) {
-        final Median<Integer> median = new Median<Integer>(cfg.cap);
-
-        int nAdded = 0;
-        for ( final int value : cfg.values )
-            if ( median.add(value) )
-                nAdded++;
-
-        Assert.assertEquals(nAdded, median.size());
-
-        Assert.assertEquals(cfg.values.isEmpty(), median.isEmpty());
-        Assert.assertEquals(cfg.values.size() >= cfg.cap, median.isFull());
-        Assert.assertEquals(median.getMedian(), cfg.expected, cfg.toString());
-    }
-
-    @Test(expectedExceptions = IllegalStateException.class)
-    public void testEmptyMedian() {
-        final Median<Integer> median = new Median<Integer>();
-        Assert.assertTrue(median.isEmpty());
-        final Integer d = 100;
-        Assert.assertEquals(median.getMedian(d), d);
-        median.getMedian();
-    }
-
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/NGSPlatformUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/NGSPlatformUnitTest.java
deleted file mode 100644
index b247f59..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/NGSPlatformUnitTest.java
+++ /dev/null
@@ -1,167 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-
-// the imports for unit testing.
-
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMReadGroupRecord;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-public class NGSPlatformUnitTest extends BaseTest {
-    // example genome loc parser for this test, can be deleted if you don't use the reference
-    private GenomeLocParser genomeLocParser;
-
-    // example fasta index file, can be deleted if you don't use the reference
-    private IndexedFastaSequenceFile seq;
-
-    @BeforeClass
-    public void setup() throws FileNotFoundException {
-        // sequence
-        seq = new CachingIndexedFastaSequenceFile(new File(b37KGReference));
-        genomeLocParser = new GenomeLocParser(seq);
-    }
-
-    @DataProvider(name = "TestPrimary")
-    public Object[][] makeTestPrimary() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        for ( final NGSPlatform pl : NGSPlatform.values() ) {
-            tests.add(new Object[]{pl, pl.BAM_PL_NAMES[0]});
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "TestPrimary")
-    public void testPrimary(final NGSPlatform pl, final String expectedPrimaryName) {
-        Assert.assertEquals(pl.getDefaultPlatform(), expectedPrimaryName, "Failed primary test for " + pl);
-    }
-
-    // make sure common names in BAMs are found
-    @DataProvider(name = "TestMappings")
-    public Object[][] makeTestMappings() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        final Map<String, NGSPlatform> expected = new HashMap<String, NGSPlatform>();
-        // VALID VALUES ACCORDING TO SAM SPEC: https://www.google.com/url?sa=t&rct=j&q=&esrc=s&source=web&cd=1&ved=0CC8QFjAA&url=http%3A%2F%2Fsamtools.sourceforge.net%2FSAM1.pdf&ei=Dm8WUbXAEsi10QHYqoDwDQ&usg=AFQjCNFkMtvEi6LeiKgpxQGtHTlqWKw2yw&bvm=bv.42080656,d.dmQ
-        expected.put("CAPILLARY", NGSPlatform.CAPILLARY);
-        expected.put("LS454", NGSPlatform.LS454);
-        expected.put("ILLUMINA", NGSPlatform.ILLUMINA);
-        expected.put("SOLID", NGSPlatform.SOLID);
-        expected.put("HELICOS", NGSPlatform.HELICOS);
-        expected.put("IONTORRENT", NGSPlatform.ION_TORRENT);
-        expected.put("PACBIO", NGSPlatform.PACBIO);
-        // other commonly seen values out in the wild
-        expected.put("SLX", NGSPlatform.ILLUMINA);
-        expected.put("SOLEXA", NGSPlatform.ILLUMINA);
-        expected.put("454", NGSPlatform.LS454);
-        expected.put("COMPLETE", NGSPlatform.COMPLETE_GENOMICS);
-        // unknown platforms should map to unknown
-        expected.put("MARKS_GENOMICS_TECH", NGSPlatform.UNKNOWN);
-        expected.put("RANDOM_PL_VALUE", NGSPlatform.UNKNOWN);
-        // critical -- a null platform maps to unknown
-        expected.put(null, NGSPlatform.UNKNOWN);
-
-        for ( final Map.Entry<String,NGSPlatform> one : expected.entrySet() ) {
-            tests.add(new Object[]{one.getKey(), one.getValue()});
-
-            if ( one.getKey() != null ) {
-                // make sure we're case insensitive
-                tests.add(new Object[]{one.getKey().toLowerCase(), one.getValue()});
-                tests.add(new Object[]{one.getKey().toUpperCase(), one.getValue()});
-
-                // make sure appending GENOMICS works (required for COMPLETE mapping
-                tests.add(new Object[]{one.getKey() + " GENOMICS", one.getValue()});
-                // make sure that random junk works correctly
-                tests.add(new Object[]{one.getKey() + " asdfa", one.getValue()});
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "TestMappings")
-    public void testMappings(final String plField, final NGSPlatform expected) {
-        Assert.assertEquals(NGSPlatform.fromReadGroupPL(plField), expected, "Failed primary test for " + plField + " mapping to " + expected);
-    }
-
-    @Test(dataProvider = "TestMappings")
-    public void testKnown(final String plField, final NGSPlatform expected) {
-        Assert.assertEquals(NGSPlatform.isKnown(plField), expected != NGSPlatform.UNKNOWN, "Failed isKnown test for " + plField + " mapping to " + expected);
-    }
-
-    /**
-     * A unit test that creates an artificial read for testing some code that uses reads
-     */
-    @Test(dataProvider = "TestMappings")
-    public void testPLFromReadWithRG(final String plField, final NGSPlatform expected) {
-        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(seq.getSequenceDictionary());
-        final String rgID = "ID";
-        final SAMReadGroupRecord rg = new SAMReadGroupRecord(rgID);
-        if ( plField != null )
-            rg.setPlatform(plField);
-        header.addReadGroup(rg);
-        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, 1, 10);
-        read.setAttribute("RG", rgID);
-        Assert.assertEquals(NGSPlatform.fromRead(read), expected);
-    }
-
-    @Test()
-    public void testPLFromReadWithRGButNoPL() {
-        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(seq.getSequenceDictionary());
-        final String rgID = "ID";
-        final SAMReadGroupRecord rg = new SAMReadGroupRecord(rgID);
-        header.addReadGroup(rg);
-        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, 1, 10);
-        read.setAttribute("RG", rgID);
-        Assert.assertEquals(NGSPlatform.fromRead(read), NGSPlatform.UNKNOWN);
-    }
-
-    @Test
-    public void testReadWithoutRG() {
-        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(seq.getSequenceDictionary());
-        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, 1, 10);
-        Assert.assertEquals(NGSPlatform.fromRead(read), NGSPlatform.UNKNOWN);
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/PathUtilsUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/PathUtilsUnitTest.java
deleted file mode 100644
index 00cc0dc..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/PathUtilsUnitTest.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.testng.Assert;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-
-import java.io.File;
-
-public class PathUtilsUnitTest extends BaseTest {
-    @BeforeClass
-    public void init() { }
-
-    /**
-     * Tests that we can successfully refresh a volume
-     */
-    @Test
-    public void testRefreshVolume() {
-        logger.warn("Executing testRefreshVolume");
-
-        Assert.assertTrue(successfullyRefreshedVolume(System.getProperty("java.io.tmpdir")));
-        Assert.assertFalse(successfullyRefreshedVolume("/a/made/up/file.txt"));
-    }
-
-    private boolean successfullyRefreshedVolume(String filename) {
-        boolean result = true;
-
-        try {
-            PathUtils.refreshVolume(new File(filename));
-        } catch (ReviewedGATKException e) {
-            result = false;
-        }
-
-        logger.warn(filename + " is accessible : " + result);
-
-        return result;
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/QualityUtilsUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/QualityUtilsUnitTest.java
deleted file mode 100644
index 86b436b..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/QualityUtilsUnitTest.java
+++ /dev/null
@@ -1,189 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-/**
- * Created by IntelliJ IDEA.
- * User: rpoplin
- * Date: 3/21/12
- */
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * Basic unit test for QualityUtils class
- */
-public class QualityUtilsUnitTest extends BaseTest {
-    final private static double TOLERANCE = 1e-9;
-
-    @BeforeClass
-    public void init() {
-    }
-
-    @DataProvider(name = "QualTest")
-    public Object[][] makeMyDataProvider() {
-        final List<Object[]> tests = new ArrayList<>();
-
-        for ( int qual = 0; qual < 255; qual++ ) {
-            tests.add(new Object[]{(byte)(qual & 0xFF), Math.pow(10.0, ((double)qual)/-10.0)});
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    /**
-     * Example testng test using MyDataProvider
-     */
-    @Test(dataProvider = "QualTest")
-    public void testMyData(final byte qual, final double errorRate) {
-        final double trueRate = 1 - errorRate;
-
-        final double actualErrorRate = QualityUtils.qualToErrorProb(qual);
-        Assert.assertEquals(actualErrorRate, errorRate, TOLERANCE);
-        final double actualTrueRate = QualityUtils.qualToProb(qual);
-        Assert.assertEquals(actualTrueRate, trueRate, TOLERANCE);
-
-        // log10 tests
-        final double actualLog10ErrorRate = QualityUtils.qualToErrorProbLog10(qual);
-        Assert.assertEquals(actualLog10ErrorRate, Math.log10(errorRate), TOLERANCE);
-        final double actualLog10TrueRate = QualityUtils.qualToProbLog10(qual);
-        Assert.assertEquals(actualLog10TrueRate, Math.log10(trueRate), TOLERANCE);
-
-        // test that we can convert our error rates to quals, accounting for boundaries
-        final int expectedQual = Math.max(Math.min(qual & 0xFF, QualityUtils.MAX_SAM_QUAL_SCORE), 1);
-        final byte actualQual = QualityUtils.trueProbToQual(trueRate);
-        Assert.assertEquals(actualQual, expectedQual & 0xFF);
-        final byte actualQualFromErrorRate = QualityUtils.errorProbToQual(errorRate);
-        Assert.assertEquals(actualQualFromErrorRate, expectedQual & 0xFF);
-
-        for ( int maxQual = 10; maxQual < QualityUtils.MAX_SAM_QUAL_SCORE; maxQual++ ) {
-            final byte maxAsByte = (byte)(maxQual & 0xFF);
-            final byte expectedQual2 = (byte)(Math.max(Math.min(qual & 0xFF, maxQual), 1) & 0xFF);
-            final byte actualQual2 = QualityUtils.trueProbToQual(trueRate, maxAsByte);
-            Assert.assertEquals(actualQual2, expectedQual2, "Failed with max " + maxQual);
-            final byte actualQualFromErrorRate2 = QualityUtils.errorProbToQual(errorRate, maxAsByte);
-            Assert.assertEquals(actualQualFromErrorRate2, expectedQual2, "Failed with max " + maxQual);
-
-            // test the integer routines
-            final byte actualQualInt2 = QualityUtils.trueProbToQual(trueRate, maxQual);
-            Assert.assertEquals(actualQualInt2, expectedQual2, "Failed with max " + maxQual);
-            final byte actualQualFromErrorRateInt2 = QualityUtils.errorProbToQual(errorRate, maxQual);
-            Assert.assertEquals(actualQualFromErrorRateInt2, expectedQual2, "Failed with max " + maxQual);
-        }
-    }
-
-    @Test
-    public void testTrueProbWithMinDouble() {
-        final byte actual = QualityUtils.trueProbToQual(Double.MIN_VALUE);
-        Assert.assertEquals(actual, 1, "Failed to convert true prob of min double to 1 qual");
-    }
-
-    @Test
-    public void testTrueProbWithVerySmallValue() {
-        final byte actual = QualityUtils.trueProbToQual(1.7857786272673852E-19);
-        Assert.assertEquals(actual, 1, "Failed to convert true prob of very small value 1.7857786272673852E-19 to 1 qual");
-    }
-
-    @Test
-    public void testQualCaches() {
-        Assert.assertEquals(QualityUtils.qualToErrorProb((byte) 20), 0.01, 1e-6);
-        Assert.assertEquals(QualityUtils.qualToErrorProbLog10((byte) 20), -2.0, 1e-6);
-        Assert.assertEquals(QualityUtils.qualToProb((byte) 20), 0.99, 1e-6);
-        Assert.assertEquals(QualityUtils.qualToProbLog10((byte) 20), -0.0043648054, 1e-6);
-
-        Assert.assertEquals(QualityUtils.qualToErrorProb((byte) 30), 0.001, 1e-6);
-        Assert.assertEquals(QualityUtils.qualToErrorProbLog10((byte) 30), -3.0, 1e-6);
-        Assert.assertEquals(QualityUtils.qualToProb((byte) 30), 0.999, 1e-6);
-        Assert.assertEquals(QualityUtils.qualToProbLog10((byte) 30), -0.000434511774, 1e-6);
-
-        Assert.assertEquals(QualityUtils.qualToErrorProb((byte) 40), 0.0001, 1e-6);
-        Assert.assertEquals(QualityUtils.qualToErrorProbLog10((byte) 40), -4.0, 1e-6);
-        Assert.assertEquals(QualityUtils.qualToProb((byte) 40), 0.9999, 1e-6);
-        Assert.assertEquals(QualityUtils.qualToProbLog10((byte) 40), -4.34316198e-5, 1e-6);
-    }
-
-    @Test()
-    public void testBoundingDefault() {
-        for ( int qual = 0; qual < 1000; qual++ ) {
-            final byte expected = (byte)Math.max(Math.min(qual, QualityUtils.MAX_SAM_QUAL_SCORE), 1);
-            Assert.assertEquals(QualityUtils.boundQual(qual), expected);
-        }
-    }
-
-    @Test()
-    public void testBoundingWithMax() {
-        for ( int max = 10; max < 255; max += 50 ) {
-            for ( int qual = 0; qual < 1000; qual++ ) {
-                final int expected = Math.max(Math.min(qual, max), 1);
-                Assert.assertEquals(QualityUtils.boundQual(qual, (byte)(max & 0xFF)) & 0xFF, expected & 0xFF, "qual " + qual + " max " + max);
-            }
-        }
-    }
-
-    @DataProvider(name = "PhredScaleDoubleOps")
-    public Object[][] makePhredDoubleTest() {
-        final List<Object[]> tests = new ArrayList<>();
-
-        tests.add(new Object[]{0.0, -10 * Math.log10(Double.MIN_VALUE)});
-        tests.add(new Object[]{1.0, 0.0});
-        for ( int pow = 1; pow < 20; pow++ ) {
-            tests.add(new Object[]{Math.pow(10.0, -1.0 * pow), pow * 10});
-            tests.add(new Object[]{Math.pow(10.0, -1.5 * pow), pow * 15});
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test()
-    public void testQualToErrorProbDouble() {
-        for ( double qual = 3.0; qual < 255.0; qual += 0.1 ) {
-            final double expected = Math.pow(10.0, qual / -10.0);
-            Assert.assertEquals(QualityUtils.qualToErrorProb(qual), expected, TOLERANCE, "failed qual->error prob for double qual " + qual);
-        }
-    }
-
-
-    @Test(dataProvider = "PhredScaleDoubleOps")
-    public void testPhredScaleDoubleOps(final double errorRate, final double expectedPhredScaled) {
-        final double actualError = QualityUtils.phredScaleErrorRate(errorRate);
-        Assert.assertEquals(actualError, expectedPhredScaled, TOLERANCE);
-        final double trueRate = 1 - errorRate;
-        final double actualTrue = QualityUtils.phredScaleCorrectRate(trueRate);
-        if ( trueRate == 1.0 ) {
-            Assert.assertEquals(actualTrue, QualityUtils.MIN_PHRED_SCALED_QUAL);
-        } else {
-            final double tol = errorRate < 1e-10 ? 10.0 : 1e-3;
-            Assert.assertEquals(actualTrue, expectedPhredScaled, tol);
-        }
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/R/RScriptExecutorUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/R/RScriptExecutorUnitTest.java
deleted file mode 100644
index 7a56b99..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/R/RScriptExecutorUnitTest.java
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.R;
-
-import org.apache.commons.io.FileUtils;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.io.IOUtils;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.io.File;
-
-/**
- * Basic unit test for RScriptExecutor in reduced reads
- */
-public class RScriptExecutorUnitTest extends BaseTest {
-
-    private static final String HELLO_WORLD_SCRIPT = "print('hello, world')";
-    private static final String GSALIB_LOADED_SCRIPT = "if (!'package:gsalib' %in% search()) stop('gsalib not loaded')";
-
-    @Test
-    public void testRscriptExists() {
-        Assert.assertTrue(RScriptExecutor.RSCRIPT_EXISTS, "Rscript not found in environment ${PATH}");
-    }
-
-    @Test(dependsOnMethods = "testRscriptExists")
-    public void testExistingScript() {
-        File script = writeScript(HELLO_WORLD_SCRIPT);
-        try {
-            RScriptExecutor executor = new RScriptExecutor();
-            executor.addScript(script);
-            executor.setExceptOnError(true);
-            Assert.assertTrue(executor.exec(), "Exec failed");
-        } finally {
-            FileUtils.deleteQuietly(script);
-        }
-    }
-
-    @Test(dependsOnMethods = "testRscriptExists", expectedExceptions = RScriptExecutorException.class)
-    public void testNonExistantScriptException() {
-        RScriptExecutor executor = new RScriptExecutor();
-        executor.setExceptOnError(true);
-        executor.addScript(new File("does_not_exists.R"));
-        executor.exec();
-    }
-
-    @Test(dependsOnMethods = "testRscriptExists")
-    public void testNonExistantScriptNoException() {
-        logger.warn("Testing that warning is printed an no exception thrown for missing script.");
-        RScriptExecutor executor = new RScriptExecutor();
-        executor.setExceptOnError(false);
-        executor.addScript(new File("does_not_exists.R"));
-        Assert.assertFalse(executor.exec(), "Exec should have returned false when the job failed");
-    }
-
-    @Test(dependsOnMethods = "testRscriptExists")
-    public void testLibrary() {
-        File script = writeScript(GSALIB_LOADED_SCRIPT);
-        try {
-            RScriptExecutor executor = new RScriptExecutor();
-            executor.addScript(script);
-            executor.addLibrary(RScriptLibrary.GSALIB);
-            executor.setExceptOnError(true);
-            Assert.assertTrue(executor.exec(), "Exec failed");
-        } finally {
-            FileUtils.deleteQuietly(script);
-        }
-    }
-
-    @Test(dependsOnMethods = "testRscriptExists", expectedExceptions = RScriptExecutorException.class)
-    public void testLibraryMissing() {
-        File script = writeScript(GSALIB_LOADED_SCRIPT);
-        try {
-            RScriptExecutor executor = new RScriptExecutor();
-            executor.addScript(script);
-            // GSALIB is not added nor imported in the script
-            executor.setExceptOnError(true);
-            executor.exec();
-        } finally {
-            FileUtils.deleteQuietly(script);
-        }
-    }
-
-    private File writeScript(String content) {
-        return IOUtils.writeTempFile(content, "myTestScript", ".R");
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/R/RScriptLibraryUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/R/RScriptLibraryUnitTest.java
deleted file mode 100644
index b89686c..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/R/RScriptLibraryUnitTest.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.R;
-
-import org.apache.commons.io.FileUtils;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.io.File;
-
-public class RScriptLibraryUnitTest {
-    @Test
-    public void testProperties() {
-        Assert.assertEquals(RScriptLibrary.GSALIB.getLibraryName(), "gsalib");
-        Assert.assertEquals(RScriptLibrary.GSALIB.getResourcePath(), "gsalib.tar.gz");
-    }
-
-    @Test
-    public void testWriteTemp() {
-        File file = RScriptLibrary.GSALIB.writeTemp();
-        Assert.assertTrue(file.exists(), "R library was not written to temp file: " + file);
-        FileUtils.deleteQuietly(file);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/R/RUtilsUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/R/RUtilsUnitTest.java
deleted file mode 100644
index 51ab6f7..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/R/RUtilsUnitTest.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.R;
-
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-
-public class RUtilsUnitTest {
-    @DataProvider(name = "stringLists")
-    public Object[][] getStringLists() {
-        return new Object[][] {
-                new Object[] { null, "NA" },
-                new Object[] { Collections.EMPTY_LIST, "c()" },
-                new Object[] { Arrays.asList("1", "2", "3"), "c('1','2','3')" }
-        };
-    }
-
-    @Test(dataProvider = "stringLists")
-    public void testToStringList(List<? extends CharSequence> actual, String expected) {
-        Assert.assertEquals(RUtils.toStringList(actual), expected);
-    }
-
-    @DataProvider(name = "numberLists")
-    public Object[][] getNumberLists() {
-        return new Object[][] {
-                new Object[] { null, "NA" },
-                new Object[] { Collections.EMPTY_LIST, "c()" },
-                new Object[] { Arrays.asList(1, 2, 3), "c(1,2,3)" },
-                new Object[] { Arrays.asList(1D, 2D, 3D), "c(1.0,2.0,3.0)" }
-        };
-    }
-
-    @Test(dataProvider = "numberLists")
-    public void testToNumberList(List<? extends Number> actual, String expected) {
-        Assert.assertEquals(RUtils.toNumberList(actual), expected);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/SampleUtilsUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/SampleUtilsUnitTest.java
deleted file mode 100644
index d11c4bf..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/SampleUtilsUnitTest.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import org.apache.commons.io.FileUtils;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.io.IOUtils;
-import org.testng.Assert;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.util.*;
-
-/**
- * Testing framework for sample utilities class.
- *
- * @author gauthier
- */
-
-public class SampleUtilsUnitTest extends BaseTest {
-    @Test(expectedExceptions=UserException.class)
-    public void testBadSampleFiles() throws Exception {
-        Set<File> sampleFiles = new HashSet<File>(0);
-        sampleFiles.add(new File("fileNotHere.samples"));
-        Collection<String> samplesFromFile = SampleUtils.getSamplesFromFiles(sampleFiles);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/SequenceDictionaryUtilsUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/SequenceDictionaryUtilsUnitTest.java
deleted file mode 100644
index 6ccb0c9..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/SequenceDictionaryUtilsUnitTest.java
+++ /dev/null
@@ -1,241 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.SAMSequenceRecord;
-import org.apache.log4j.Logger;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.engine.arguments.ValidationExclusion;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-import org.testng.Assert;
-
-import static org.broadinstitute.gatk.utils.SequenceDictionaryUtils.*;
-import static org.broadinstitute.gatk.utils.SequenceDictionaryUtils.SequenceDictionaryCompatibility.*;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-public class SequenceDictionaryUtilsUnitTest extends BaseTest {
-
-    private static Logger logger = Logger.getLogger(SequenceDictionaryUtilsUnitTest.class);
-
-
-    @DataProvider( name = "SequenceDictionaryDataProvider" )
-    public Object[][] generateSequenceDictionaryTestData() {
-        final SAMSequenceRecord CHRM_HG19 = new SAMSequenceRecord("chrM", 16571);
-        final SAMSequenceRecord CHR_NONSTANDARD1 = new SAMSequenceRecord("NonStandard1", 8675309);
-        final SAMSequenceRecord CHR_NONSTANDARD2 = new SAMSequenceRecord("NonStandard2", 8675308);
-
-        final Class NO_COMMON_CONTIGS_EXCEPTION = UserException.IncompatibleSequenceDictionaries.class;
-        final Class UNEQUAL_COMMON_CONTIGS_EXCEPTION = UserException.IncompatibleSequenceDictionaries.class;
-        final Class NON_CANONICAL_HUMAN_ORDER_EXCEPTION = UserException.LexicographicallySortedSequenceDictionary.class;
-        final Class OUT_OF_ORDER_EXCEPTION = UserException.IncompatibleSequenceDictionaries.class;
-        final Class DIFFERENT_INDICES_EXCEPTION = UserException.IncompatibleSequenceDictionaries.class;
-
-        final List<SAMSequenceRecord> hg19Sequences = Arrays.asList(CHRM_HG19, CHR1_HG19, CHR2_HG19, CHR10_HG19);
-        final GenomeLocParser hg19GenomeLocParser = new GenomeLocParser(new SAMSequenceDictionary(hg19Sequences));
-        final List<GenomeLoc> hg19AllContigsIntervals = Arrays.asList(hg19GenomeLocParser.createGenomeLoc("chrM", 0, 1),
-                                                                      hg19GenomeLocParser.createGenomeLoc("chr1", 0, 1),
-                                                                      hg19GenomeLocParser.createGenomeLoc("chr2", 0, 1),
-                                                                      hg19GenomeLocParser.createGenomeLoc("chr10", 0, 1));
-        final List<GenomeLoc> hg19PartialContigsIntervals = Arrays.asList(hg19GenomeLocParser.createGenomeLoc("chrM", 0, 1),
-                                                                          hg19GenomeLocParser.createGenomeLoc("chr1", 0, 1));
-        final GenomeLocSortedSet hg19AllContigsIntervalSet = new GenomeLocSortedSet(hg19GenomeLocParser, hg19AllContigsIntervals);
-        final GenomeLocSortedSet hg19PartialContigsIntervalSet = new GenomeLocSortedSet(hg19GenomeLocParser, hg19PartialContigsIntervals);
-
-        return new Object[][]  {
-            // Identical dictionaries:
-            { Arrays.asList(CHR1_HG19),                        Arrays.asList(CHR1_HG19),                        null, IDENTICAL, null, false, null },
-            { Arrays.asList(CHR1_HG19, CHR2_HG19, CHR10_HG19), Arrays.asList(CHR1_HG19, CHR2_HG19, CHR10_HG19), null, IDENTICAL, null, false, null },
-            { Arrays.asList(CHR1_B37),                         Arrays.asList(CHR1_B37),                         null, IDENTICAL, null, false, null },
-            { Arrays.asList(CHR1_B37, CHR2_B37, CHR10_B37),    Arrays.asList(CHR1_B37, CHR2_B37, CHR10_B37),    null, IDENTICAL, null, false, null },
-
-            // Dictionaries with a common subset:
-            { Arrays.asList(CHR1_HG19),                                          Arrays.asList(CHR1_HG19, CHR_NONSTANDARD1),                                   null, COMMON_SUBSET, null, false, null },
-            { Arrays.asList(CHR1_HG19, CHR_NONSTANDARD1),                        Arrays.asList(CHR1_HG19, CHR_NONSTANDARD2),                                   null, COMMON_SUBSET, null, false, null },
-            { Arrays.asList(CHR_NONSTANDARD1, CHR1_HG19),                        Arrays.asList(CHR_NONSTANDARD2, CHR1_HG19),                                   null, COMMON_SUBSET, null, false, null },
-            { Arrays.asList(CHR_NONSTANDARD1, CHR1_HG19),                        Arrays.asList(CHR_NONSTANDARD2, CHR1_HG19, CHRM_HG19),                        null, COMMON_SUBSET, null, false, null },
-            { Arrays.asList(CHR1_HG19, CHR2_HG19, CHR10_HG19, CHR_NONSTANDARD1), Arrays.asList(CHR1_HG19, CHR2_HG19, CHR10_HG19, CHR_NONSTANDARD2),            null, COMMON_SUBSET, null, false, null },
-            { Arrays.asList(CHR1_HG19, CHR2_HG19, CHR10_HG19, CHR_NONSTANDARD1), Arrays.asList(CHR1_HG19, CHR2_HG19, CHR10_HG19),                              null, COMMON_SUBSET, null, false, null },
-            { Arrays.asList(CHR1_HG19, CHR2_HG19, CHR10_HG19),                   Arrays.asList(CHR1_HG19, CHR2_HG19, CHR10_HG19, CHR_NONSTANDARD1),            null, COMMON_SUBSET, null, false, null },
-            { Arrays.asList(CHR_NONSTANDARD1, CHR1_HG19, CHR2_HG19, CHR10_HG19), Arrays.asList(CHR_NONSTANDARD2, CHR1_HG19, CHR2_HG19, CHR10_HG19),            null, COMMON_SUBSET, null, false, null },
-            { Arrays.asList(CHR_NONSTANDARD1, CHR1_HG19, CHR2_HG19, CHR10_HG19), Arrays.asList(CHR_NONSTANDARD2, CHR1_HG19, CHR2_HG19, CHR10_HG19, CHRM_HG19), null, COMMON_SUBSET, null, false, null },
-            { Arrays.asList(CHR1_B37, CHR2_B37, CHR10_B37, CHR_NONSTANDARD1),    Arrays.asList(CHR1_B37, CHR2_B37, CHR10_B37, CHR_NONSTANDARD2),               null, COMMON_SUBSET, null, false, null },
-            { Arrays.asList(CHR1_HG19, CHR2_HG19),                               Arrays.asList(CHR1_HG19, CHR2_HG19, CHR10_HG19),                              null, COMMON_SUBSET, null, false, null },
-
-            // Dictionaries with no common contigs:
-            { Arrays.asList(CHR1_HG19),                        Arrays.asList(CHR2_HG19),                     null, NO_COMMON_CONTIGS, NO_COMMON_CONTIGS_EXCEPTION, false, null },
-            { Arrays.asList(CHR1_HG19),                        Arrays.asList(CHR1_B37),                      null, NO_COMMON_CONTIGS, NO_COMMON_CONTIGS_EXCEPTION, false, null },
-            { Arrays.asList(CHR1_HG19, CHR2_HG19, CHR10_HG19), Arrays.asList(CHR1_B37, CHR2_B37, CHR10_B37), null, NO_COMMON_CONTIGS, NO_COMMON_CONTIGS_EXCEPTION, false, null },
-            { Arrays.asList(CHR1_HG19, CHR2_HG19),             Arrays.asList(CHR1_B37, CHR2_B37, CHR10_B37), null, NO_COMMON_CONTIGS, NO_COMMON_CONTIGS_EXCEPTION, false, null },
-
-            // Dictionaries with unequal common contigs:
-            { Arrays.asList(CHR1_HG19),                                          Arrays.asList(CHR1_HG18),                                          null, UNEQUAL_COMMON_CONTIGS, UNEQUAL_COMMON_CONTIGS_EXCEPTION, false, null },
-            { Arrays.asList(CHR1_B36),                                           Arrays.asList(CHR1_B37),                                           null, UNEQUAL_COMMON_CONTIGS, UNEQUAL_COMMON_CONTIGS_EXCEPTION, false, null },
-            { Arrays.asList(CHR1_HG19, CHR2_HG19, CHR10_HG19),                   Arrays.asList(CHR1_HG18, CHR2_HG18, CHR10_HG18),                   null, UNEQUAL_COMMON_CONTIGS, UNEQUAL_COMMON_CONTIGS_EXCEPTION, false, null },
-            { Arrays.asList(CHR1_B37, CHR2_B37, CHR10_B37),                      Arrays.asList(CHR1_B36, CHR2_B36, CHR10_B36),                      null, UNEQUAL_COMMON_CONTIGS, UNEQUAL_COMMON_CONTIGS_EXCEPTION, false, null },
-            { Arrays.asList(CHR1_HG19, CHR2_HG19, CHR10_HG19, CHR_NONSTANDARD1), Arrays.asList(CHR1_HG18, CHR2_HG18, CHR10_HG18, CHR_NONSTANDARD2), null, UNEQUAL_COMMON_CONTIGS, UNEQUAL_COMMON_CONTIGS_EXCEPTION, false, null },
-            { Arrays.asList(CHR_NONSTANDARD1, CHR1_HG19, CHR2_HG19, CHR10_HG19), Arrays.asList(CHR_NONSTANDARD2, CHR1_HG18, CHR2_HG18, CHR10_HG18), null, UNEQUAL_COMMON_CONTIGS, UNEQUAL_COMMON_CONTIGS_EXCEPTION, false, null },
-            { Arrays.asList(CHR1_HG19, CHR2_HG19),                               Arrays.asList(CHR1_HG18, CHR2_HG18, CHR10_HG18),                   null, UNEQUAL_COMMON_CONTIGS, UNEQUAL_COMMON_CONTIGS_EXCEPTION, false, null },
-
-            // One or both dictionaries in non-canonical human order:
-            { Arrays.asList(CHR1_HG19, CHR10_HG19, CHR2_HG19), Arrays.asList(CHR1_HG19, CHR10_HG19, CHR2_HG19), null, NON_CANONICAL_HUMAN_ORDER, NON_CANONICAL_HUMAN_ORDER_EXCEPTION, false, null },
-            { Arrays.asList(CHR1_HG18, CHR10_HG18, CHR2_HG18), Arrays.asList(CHR1_HG18, CHR10_HG18, CHR2_HG18), null, NON_CANONICAL_HUMAN_ORDER, NON_CANONICAL_HUMAN_ORDER_EXCEPTION, false, null },
-            { Arrays.asList(CHR1_HG19, CHR10_HG19, CHR2_HG19), Arrays.asList(CHR1_HG19, CHR2_HG19, CHR10_HG19), null, NON_CANONICAL_HUMAN_ORDER, NON_CANONICAL_HUMAN_ORDER_EXCEPTION, false, null },
-            { Arrays.asList(CHR1_HG19, CHR2_HG19, CHR10_HG19), Arrays.asList(CHR1_HG19, CHR10_HG19, CHR2_HG19), null, NON_CANONICAL_HUMAN_ORDER, NON_CANONICAL_HUMAN_ORDER_EXCEPTION, false, null },
-            { Arrays.asList(CHR1_B37, CHR10_B37, CHR2_B37),    Arrays.asList(CHR1_B37, CHR10_B37, CHR2_B37),    null, NON_CANONICAL_HUMAN_ORDER, NON_CANONICAL_HUMAN_ORDER_EXCEPTION, false, null },
-            { Arrays.asList(CHR1_B36, CHR10_B36, CHR2_B36),    Arrays.asList(CHR1_B36, CHR10_B36, CHR2_B36),    null, NON_CANONICAL_HUMAN_ORDER, NON_CANONICAL_HUMAN_ORDER_EXCEPTION, false, null },
-
-            // Dictionaries with a common subset, but different relative ordering within that subset:
-            { Arrays.asList(CHR1_HG19, CHR2_HG19),            Arrays.asList(CHR2_HG19, CHR1_HG19),                              null, OUT_OF_ORDER, OUT_OF_ORDER_EXCEPTION, false, null },
-            { Arrays.asList(CHRM_HG19, CHR1_HG19, CHR2_HG19), Arrays.asList(CHR2_HG19, CHR1_HG19, CHRM_HG19),                   null, OUT_OF_ORDER, OUT_OF_ORDER_EXCEPTION, false, null },
-            { Arrays.asList(CHRM_HG19, CHR1_HG19, CHR2_HG19), Arrays.asList(CHRM_HG19, CHR2_HG19, CHR1_HG19),                   null, OUT_OF_ORDER, OUT_OF_ORDER_EXCEPTION, false, null },
-            { Arrays.asList(CHRM_HG19, CHR1_HG19, CHR2_HG19), Arrays.asList(CHR2_HG19, CHRM_HG19, CHR1_HG19),                   null, OUT_OF_ORDER, OUT_OF_ORDER_EXCEPTION, false, null },
-            { Arrays.asList(CHR1_B37, CHR2_B37),              Arrays.asList(CHR2_B37, CHR1_B37),                                null, OUT_OF_ORDER, OUT_OF_ORDER_EXCEPTION, false, null },
-
-
-            // Dictionaries with a common subset in the same relative order, but with different indices.
-            // This will only throw an exception during validation if isReadsToReferenceComparison is true,
-            // and there are intervals overlapping the misindexed contigs:
-
-            // These have isReadsToReferenceComparison == true and overlapping intervals, so we expect an exception:
-            { Arrays.asList(CHRM_HG19, CHR1_HG19),                                                 Arrays.asList(CHR1_HG19),                                          null, DIFFERENT_INDICES, DIFFERENT_INDICES_EXCEPTION, true, hg19AllContigsIntervalSet },
-            { Arrays.asList(CHR1_HG19, CHR2_HG19),                                                 Arrays.asList(CHRM_HG19, CHR1_HG19, CHR2_HG19),                    null, DIFFERENT_INDICES, DIFFERENT_INDICES_EXCEPTION, true, hg19AllContigsIntervalSet },
-            { Arrays.asList(CHR1_HG19, CHR2_HG19),                                                 Arrays.asList(CHRM_HG19, CHR1_HG19, CHR2_HG19, CHR_NONSTANDARD1),  null, DIFFERENT_INDICES, DIFFERENT_INDICES_EXCEPTION, true, hg19AllContigsIntervalSet },
-            { Arrays.asList(CHR1_HG19, CHR2_HG19),                                                 Arrays.asList(CHRM_HG19, CHR_NONSTANDARD1, CHR1_HG19, CHR2_HG19),  null, DIFFERENT_INDICES, DIFFERENT_INDICES_EXCEPTION, true, hg19AllContigsIntervalSet },
-            { Arrays.asList(CHR1_HG19, CHR2_HG19, CHR_NONSTANDARD1, CHRM_HG19 ),                   Arrays.asList(CHR1_HG19, CHR2_HG19, CHRM_HG19),                    null, DIFFERENT_INDICES, DIFFERENT_INDICES_EXCEPTION, true, hg19AllContigsIntervalSet },
-            { Arrays.asList(CHR1_HG19, CHR2_HG19, CHR_NONSTANDARD1, CHRM_HG19, CHR_NONSTANDARD2 ), Arrays.asList(CHR1_HG19, CHR2_HG19, CHRM_HG19, CHR_NONSTANDARD2 ), null, DIFFERENT_INDICES, DIFFERENT_INDICES_EXCEPTION, true, hg19AllContigsIntervalSet },
-            { Arrays.asList(CHR1_HG19, CHR_NONSTANDARD1, CHR2_HG19, CHRM_HG19, CHR_NONSTANDARD2 ), Arrays.asList(CHR1_HG19, CHR2_HG19, CHRM_HG19, CHR_NONSTANDARD2 ), null, DIFFERENT_INDICES, DIFFERENT_INDICES_EXCEPTION, true, hg19AllContigsIntervalSet },
-
-            // These have isReadsToReferenceComparison == true but no overlapping intervals, so we don't expect an exception:
-            { Arrays.asList(CHR2_HG19, CHR10_HG19),                              Arrays.asList(CHR10_HG19),                       null, DIFFERENT_INDICES, null, true, hg19PartialContigsIntervalSet },
-            { Arrays.asList(CHR1_HG19, CHR_NONSTANDARD1, CHR2_HG19),             Arrays.asList(CHR1_HG19, CHR2_HG19),             null, DIFFERENT_INDICES, null, true, hg19PartialContigsIntervalSet },
-            { Arrays.asList(CHR1_HG19, CHR_NONSTANDARD1, CHR2_HG19, CHR10_HG19), Arrays.asList(CHR1_HG19, CHR2_HG19, CHR10_HG19), null, DIFFERENT_INDICES, null, true, hg19PartialContigsIntervalSet },
-
-            // These have isReadsToReferenceComparison == false, so we don't expect an exception:
-            { Arrays.asList(CHRM_HG19, CHR1_HG19),                              Arrays.asList(CHR1_HG19),                       null, DIFFERENT_INDICES, null, false, hg19AllContigsIntervalSet },
-            { Arrays.asList(CHR1_HG19, CHR_NONSTANDARD1, CHR2_HG19, CHRM_HG19), Arrays.asList(CHR1_HG19, CHR2_HG19, CHRM_HG19), null, DIFFERENT_INDICES, null, false, hg19AllContigsIntervalSet },
-
-
-            // Tests for validation exclusions. Note that errors resulting from NO_COMMON_CONTIGs cannot be suppressed
-            { Arrays.asList(CHR1_HG19),                        Arrays.asList(CHR2_HG19),                        ValidationExclusion.TYPE.ALLOW_SEQ_DICT_INCOMPATIBILITY, NO_COMMON_CONTIGS,         NO_COMMON_CONTIGS_EXCEPTION, false, null },
-            { Arrays.asList(CHR1_HG19),                        Arrays.asList(CHR2_HG19),                        ValidationExclusion.TYPE.ALL,                            NO_COMMON_CONTIGS,         NO_COMMON_CONTIGS_EXCEPTION, false, null },
-            { Arrays.asList(CHR1_HG19),                        Arrays.asList(CHR1_HG18),                        ValidationExclusion.TYPE.ALLOW_SEQ_DICT_INCOMPATIBILITY, UNEQUAL_COMMON_CONTIGS,    null, false, null },
-            { Arrays.asList(CHR1_HG19),                        Arrays.asList(CHR1_HG18),                        ValidationExclusion.TYPE.ALL,                            UNEQUAL_COMMON_CONTIGS,    null, false, null },
-            { Arrays.asList(CHR1_HG19, CHR10_HG19, CHR2_HG19), Arrays.asList(CHR1_HG19, CHR10_HG19, CHR2_HG19), ValidationExclusion.TYPE.ALLOW_SEQ_DICT_INCOMPATIBILITY, NON_CANONICAL_HUMAN_ORDER, null, false, null },
-            { Arrays.asList(CHR1_HG19, CHR10_HG19, CHR2_HG19), Arrays.asList(CHR1_HG19, CHR10_HG19, CHR2_HG19), ValidationExclusion.TYPE.ALL,                            NON_CANONICAL_HUMAN_ORDER, null, false, null },
-            { Arrays.asList(CHR1_HG19, CHR2_HG19),             Arrays.asList(CHR2_HG19, CHR1_HG19),             ValidationExclusion.TYPE.ALLOW_SEQ_DICT_INCOMPATIBILITY, OUT_OF_ORDER,              null, false, null },
-            { Arrays.asList(CHR1_HG19, CHR2_HG19),             Arrays.asList(CHR2_HG19, CHR1_HG19),             ValidationExclusion.TYPE.ALL,                            OUT_OF_ORDER,              null, false, null },
-            { Arrays.asList(CHRM_HG19, CHR1_HG19),             Arrays.asList(CHR1_HG19),                        ValidationExclusion.TYPE.ALLOW_SEQ_DICT_INCOMPATIBILITY, DIFFERENT_INDICES,         null, true, hg19AllContigsIntervalSet },
-            { Arrays.asList(CHRM_HG19, CHR1_HG19),             Arrays.asList(CHR1_HG19),                        ValidationExclusion.TYPE.ALL,                            DIFFERENT_INDICES,         null, true, hg19AllContigsIntervalSet }
-        };
-    }
-
-    @Test( dataProvider = "SequenceDictionaryDataProvider" )
-    public void testSequenceDictionaryValidation( final List<SAMSequenceRecord> firstDictionaryContigs,
-                                                  final List<SAMSequenceRecord> secondDictionaryContigs,
-                                                  final ValidationExclusion.TYPE validationExclusions,
-                                                  final SequenceDictionaryUtils.SequenceDictionaryCompatibility dictionaryCompatibility,
-                                                  final Class expectedExceptionUponValidation,
-                                                  final boolean isReadsToReferenceComparison,
-                                                  final GenomeLocSortedSet intervals ) {
-
-        final SAMSequenceDictionary firstDictionary = createSequenceDictionary(firstDictionaryContigs);
-        final SAMSequenceDictionary secondDictionary = createSequenceDictionary(secondDictionaryContigs);
-        final String testDescription = String.format("First dictionary: %s  Second dictionary: %s  Validation exclusions: %s",
-                                                     SequenceDictionaryUtils.getDictionaryAsString(firstDictionary),
-                                                     SequenceDictionaryUtils.getDictionaryAsString(secondDictionary),
-                                                     validationExclusions);
-
-        Exception exceptionThrown = null;
-        try {
-            SequenceDictionaryUtils.validateDictionaries(logger,
-                                                         validationExclusions,
-                                                         "firstDictionary",
-                                                         firstDictionary,
-                                                         "secondDictionary",
-                                                         secondDictionary,
-                                                         isReadsToReferenceComparison,
-                                                         intervals);
-        }
-        catch ( Exception e ) {
-            exceptionThrown = e;
-        }
-
-        if ( expectedExceptionUponValidation != null ) {
-            Assert.assertTrue(exceptionThrown != null && expectedExceptionUponValidation.isInstance(exceptionThrown),
-                              String.format("Expected exception %s but saw %s instead. %s",
-                                            expectedExceptionUponValidation.getSimpleName(),
-                                            exceptionThrown == null ? "no exception" : exceptionThrown.getClass().getSimpleName(),
-                                            testDescription));
-        }
-        else {
-            Assert.assertTrue(exceptionThrown == null,
-                              String.format("Expected no exception but saw exception %s instead. %s",
-                                            exceptionThrown != null ? exceptionThrown.getClass().getSimpleName() : "none",
-                                            testDescription));
-        }
-    }
-
-    @Test( dataProvider = "SequenceDictionaryDataProvider" )
-    public void testSequenceDictionaryComparison( final List<SAMSequenceRecord> firstDictionaryContigs,
-                                                  final List<SAMSequenceRecord> secondDictionaryContigs,
-                                                  final ValidationExclusion.TYPE validationExclusions,
-                                                  final SequenceDictionaryUtils.SequenceDictionaryCompatibility dictionaryCompatibility,
-                                                  final Class expectedExceptionUponValidation,
-                                                  final boolean isReadsToReferenceComparison,
-                                                  final GenomeLocSortedSet intervals ) {
-
-        final SAMSequenceDictionary firstDictionary = createSequenceDictionary(firstDictionaryContigs);
-        final SAMSequenceDictionary secondDictionary = createSequenceDictionary(secondDictionaryContigs);
-        final String testDescription = String.format("First dictionary: %s  Second dictionary: %s",
-                                                     SequenceDictionaryUtils.getDictionaryAsString(firstDictionary),
-                                                     SequenceDictionaryUtils.getDictionaryAsString(secondDictionary));
-
-        final SequenceDictionaryUtils.SequenceDictionaryCompatibility reportedCompatibility =
-              SequenceDictionaryUtils.compareDictionaries(firstDictionary, secondDictionary);
-
-        Assert.assertTrue(reportedCompatibility == dictionaryCompatibility,
-                          String.format("Dictionary comparison should have returned %s but instead returned %s. %s",
-                                        dictionaryCompatibility, reportedCompatibility, testDescription));
-    }
-
-    private SAMSequenceDictionary createSequenceDictionary( final List<SAMSequenceRecord> contigs ) {
-        final List<SAMSequenceRecord> clonedContigs = new ArrayList<SAMSequenceRecord>(contigs.size());
-
-        // Clone the individual SAMSequenceRecords to avoid contig-index issues with shared objects
-        // across multiple dictionaries in tests
-        for ( SAMSequenceRecord contig : contigs ) {
-            clonedContigs.add(contig.clone());
-        }
-
-        return new SAMSequenceDictionary(clonedContigs);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/SimpleTimerUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/SimpleTimerUnitTest.java
deleted file mode 100644
index 85aec81..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/SimpleTimerUnitTest.java
+++ /dev/null
@@ -1,179 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.lang.reflect.Field;
-
-import java.util.Arrays;
-import java.util.List;
-import java.util.concurrent.TimeUnit;
-
-public class SimpleTimerUnitTest extends BaseTest {
-    private final static String NAME = "unit.test.timer";
-
-    @Test
-    public void testSimpleTimer() {
-        SimpleTimer t = new SimpleTimer(NAME);
-        Assert.assertEquals(t.getName(), NAME, "Name is not the provided one");
-        Assert.assertFalse(t.isRunning(), "Initial state of the timer is running");
-        Assert.assertEquals(t.getElapsedTime(), 0.0, "New timer elapsed time should be 0");
-        Assert.assertEquals(t.getElapsedTimeNano(), 0l, "New timer elapsed time nano should be 0");
-
-        t.start();
-        Assert.assertTrue(t.isRunning(), "Started timer isn't running");
-        Assert.assertTrue(t.getElapsedTime() >= 0.0, "Elapsed time should be >= 0");
-        Assert.assertTrue(t.getElapsedTimeNano() >= 0.0, "Elapsed time nano should be >= 0");
-        long n1 = t.getElapsedTimeNano();
-        double t1 = t.getElapsedTime();
-        idleLoop(); // idle loop to wait a tiny bit of time
-        long n2 = t.getElapsedTimeNano();
-        double t2 = t.getElapsedTime();
-        Assert.assertTrue(t2 >= t1, "T2 >= T1 for a running time");
-        Assert.assertTrue(n2 >= n1, "T2 >= T1 nano for a running time");
-
-        t.stop();
-        Assert.assertFalse(t.isRunning(), "Stopped timer still running");
-        long n3 = t.getElapsedTimeNano();
-        double t3 = t.getElapsedTime();
-        idleLoop(); // idle loop to wait a tiny bit of time
-        double t4 = t.getElapsedTime();
-        long n4 = t.getElapsedTimeNano();
-        Assert.assertTrue(t4 == t3, "Elapsed times for two calls of stop timer not the same");
-        Assert.assertTrue(n4 == n3, "Elapsed times for two calls of stop timer not the same");
-
-        t.restart();
-        idleLoop(); // idle loop to wait a tiny bit of time
-        double t5 = t.getElapsedTime();
-        long n5 = t.getElapsedTimeNano();
-        Assert.assertTrue(t.isRunning(), "Restarted timer should be running");
-        idleLoop(); // idle loop to wait a tiny bit of time
-        double t6 = t.getElapsedTime();
-        long n6 = t.getElapsedTimeNano();
-        Assert.assertTrue(t5 >= t4, "Restarted timer elapsed time should be after elapsed time preceding the restart");
-        Assert.assertTrue(t6 >= t5, "Second elapsed time not after the first in restarted timer");
-        Assert.assertTrue(n5 >= n4, "Restarted timer elapsed time nano should be after elapsed time preceding the restart");
-        Assert.assertTrue(n6 >= n5, "Second elapsed time nano not after the first in restarted timer");
-
-        final List<Double> secondTimes = Arrays.asList(t1, t2, t3, t4, t5, t6);
-        final List<Long> nanoTimes     = Arrays.asList(n1, n2, n3, n4, n5, n6);
-        for ( int i = 0; i < nanoTimes.size(); i++ )
-            Assert.assertEquals(
-                    SimpleTimer.nanoToSecondsAsDouble(nanoTimes.get(i)),
-                    secondTimes.get(i), 1e-1, "Nanosecond and second timer disagree");
-    }
-
-    @Test
-    public void testNanoResolution() {
-        SimpleTimer t = new SimpleTimer(NAME);
-
-        // test the nanosecond resolution
-        long n7 = t.currentTimeNano();
-        int sum = 0;
-        for ( int i = 0; i < 100; i++) sum += i;
-        long n8 = t.currentTimeNano();
-        final long delta = n8 - n7;
-        final long oneMilliInNano = TimeUnit.MILLISECONDS.toNanos(1);
-        logger.warn("nanoTime before nano operation " + n7);
-        logger.warn("nanoTime after nano operation of summing 100 ints " + n8 + ", sum = " + sum + " time delta " + delta + " vs. 1 millsecond in nano " + oneMilliInNano);
-        Assert.assertTrue(n8 > n7, "SimpleTimer doesn't appear to have nanoSecond resolution: n8 " + n8 + " <= n7 " + n7);
-        Assert.assertTrue(delta < oneMilliInNano,
-                "SimpleTimer doesn't appear to have nanoSecond resolution: time delta is " + delta + " vs 1 millisecond in nano " + oneMilliInNano);
-    }
-
-    @Test
-    public void testMeaningfulTimes() {
-        SimpleTimer t = new SimpleTimer(NAME);
-
-        t.start();
-        for ( int i = 0; i < 100; i++ ) ;
-        long nano = t.getElapsedTimeNano();
-        double secs = t.getElapsedTime();
-
-        Assert.assertTrue(secs > 0, "Seconds timer doesn't appear to count properly: elapsed time is " + secs);
-        Assert.assertTrue(secs < 0.01, "Fast operation said to take longer than 10 milliseconds: elapsed time in seconds " + secs);
-
-        Assert.assertTrue(nano > 0, "Nanosecond timer doesn't appear to count properly: elapsed time is " + nano);
-        final long maxTimeInMicro = 10000;
-        final long maxTimeInNano = TimeUnit.MICROSECONDS.toNanos(maxTimeInMicro);
-        Assert.assertTrue(nano < maxTimeInNano, "Fast operation said to take longer than " + maxTimeInMicro + " microseconds: elapsed time in nano " + nano + " micro " + TimeUnit.NANOSECONDS.toMicros(nano));
-    }
-
-    @Test
-    public void testCheckpointRestart() throws Exception {
-        SimpleTimer t = new SimpleTimer(NAME);
-        
-        final Field offsetField = t.getClass().getDeclaredField("nanoTimeOffset");
-        offsetField.setAccessible(true);
-        long offset = ((Long) offsetField.get(t)).longValue();
-
-        t.start();
-        idleLoop();
-        // Make it as if clock has jumped into the past
-        offsetField.set(t, offset + TimeUnit.SECONDS.toNanos(10));
-        t.stop();
-        offset = ((Long) offsetField.get(t)).longValue();
-        Assert.assertEquals(t.getElapsedTime(), 0.0, "Time over restart is not zero.");
-
-        t.start();
-        idleLoop();
-        t.stop();
-        offset = ((Long) offsetField.get(t)).longValue();
-        double elapsed = t.getElapsedTime();
-        Assert.assertTrue(elapsed >= 0.0, "Elapsed time is zero.");
-        t.restart();
-        // Make the clock jump again by just a little
-        offsetField.set(t, offset + TimeUnit.SECONDS.toNanos(1));
-        idleLoop();
-        t.stop();
-        offset = ((Long) offsetField.get(t)).longValue();
-        Assert.assertTrue(t.getElapsedTime() > elapsed, "Small clock drift causing reset.");
-        elapsed = t.getElapsedTime();
-        // Now a bigger jump, into the future this time.
-        t.restart();
-        // Make the clock jump again by a lot
-        offsetField.set(t, offset - TimeUnit.SECONDS.toNanos(10));
-        t.stop();
-        Assert.assertEquals(t.getElapsedTime(), elapsed, "Time added over checkpoint/restart.");
-
-        // Test without stopping
-        t.start();
-        offset = ((Long) offsetField.get(t)).longValue();
-        // Make it as if clock has jumped into the past
-        offsetField.set(t, offset + TimeUnit.SECONDS.toNanos(10));       
-        Assert.assertEquals(t.getElapsedTime(), 0.0, "Elapsed time after C/R is not zero.");
-        idleLoop();
-        Assert.assertTrue(t.getElapsedTime() > 0.0, "Elapsed time zero after re-sync.");
-
-    }
-
-    private static void idleLoop() {
-        for ( int i = 0; i < 100000; i++ ) ; // idle loop to wait a tiny bit of time
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/TestNGTestTransformer.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/TestNGTestTransformer.java
deleted file mode 100644
index e804e70..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/TestNGTestTransformer.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import org.apache.log4j.Logger;
-import org.testng.IAnnotationTransformer;
-import org.testng.annotations.ITestAnnotation;
-
-import java.lang.reflect.Constructor;
-import java.lang.reflect.Method;
-
-/**
- * Provide default @Test values for GATK testng tests.
- *
- * Currently only sets the maximum runtime to 40 minutes, if it's not been specified.
- *
- * See http://beust.com/weblog/2006/10/18/annotation-transformers-in-java/
- *
- * @author depristo
- * @since 10/31/12
- * @version 0.1
- */
-public class TestNGTestTransformer implements IAnnotationTransformer {
-    public static final long DEFAULT_TIMEOUT = 1000 * 60 * 40; // 40 minutes max per test
-
-    final static Logger logger = Logger.getLogger(TestNGTestTransformer.class);
-
-    public void transform(ITestAnnotation annotation,
-                          Class testClass,
-                          Constructor testConstructor,
-                          Method testMethod)
-    {
-        if ( annotation.getTimeOut() == 0 ) {
-            logger.warn("test " + (testMethod == null ? "<null>" : testMethod.toString()) + " has no specified timeout, adding default timeout " + DEFAULT_TIMEOUT / 1000 / 60 + " minutes");
-            annotation.setTimeOut(DEFAULT_TIMEOUT);
-        }
-    }
-}
-
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/UtilsUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/UtilsUnitTest.java
deleted file mode 100644
index a303f2c..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/UtilsUnitTest.java
+++ /dev/null
@@ -1,363 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils;
-
-import org.apache.commons.io.FileUtils;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.utils.io.IOUtils;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.util.*;
-
-/**
- * Testing framework for general purpose utilities class.
- *
- * @author hanna
- * @version 0.1
- */
-
-public class UtilsUnitTest extends BaseTest {
-    @Test
-    public void testAppend() {
-        for ( int leftSize : Arrays.asList(0, 1, 2, 3) ) {
-            for ( final int rightSize : Arrays.asList(0, 1, 2) ) {
-                final List<Integer> left = new LinkedList<Integer>();
-                for ( int i = 0; i < leftSize; i++ ) left.add(i);
-                final List<Integer> total = new LinkedList<Integer>();
-                for ( int i = 0; i < leftSize + rightSize; i++ ) total.add(i);
-
-                if ( rightSize == 0 )
-                    Assert.assertEquals(Utils.append(left), total);
-                if ( rightSize == 1 )
-                    Assert.assertEquals(Utils.append(left, leftSize), total);
-                if ( rightSize == 2 )
-                    Assert.assertEquals(Utils.append(left, leftSize, leftSize + 1), total);
-            }
-        }
-
-    }
-
-    @Test
-    public void testDupStringNoChars() {
-        String duped = Utils.dupString('a',0);
-        Assert.assertEquals(duped.length(), 0, "dupString did not produce zero-length string");
-    }
-
-    @Test
-    public void testDupStringOneChar() {
-        String duped = Utils.dupString('b',1);
-        Assert.assertEquals(duped.length(), 1, "dupString did not produce single character string");
-        Assert.assertEquals(duped.charAt(0), 'b', "dupString character was incorrect");
-    }
-
-    @Test
-    public void testXor() {
-        Assert.assertEquals(Utils.xor(false, false), false, "xor F F failed");
-        Assert.assertEquals(Utils.xor(false, true), true, "xor F T failed");
-        Assert.assertEquals(Utils.xor(true, false), true, "xor T F failed");
-        Assert.assertEquals(Utils.xor(true, true), false, "xor T T failed");
-    }
-
-    @Test
-    public void testDupStringMultiChar() {
-        String duped = Utils.dupString('c',5);
-        Assert.assertEquals(duped.length(), 5, "dupString did not produce five character string");
-        Assert.assertEquals(duped,"ccccc","dupString string was incorrect");
-    }
-
-    @Test
-    public void testJoinMap() {
-        Map<String,Integer> map = new LinkedHashMap<String,Integer>();
-        map.put("one",1);
-        map.put("two",2);
-        String joined = Utils.joinMap("-",";",map);
-        Assert.assertTrue("one-1;two-2".equals(joined));
-    }
-
-    @Test
-    public void testJoinMapLargerSet() {
-        Map<String,Integer> map = new LinkedHashMap<String,Integer>();
-        map.put("one",1);
-        map.put("two",2);
-        map.put("three",1);
-        map.put("four",2);
-        map.put("five",1);
-        map.put("six",2);
-        String joined = Utils.joinMap("-",";",map);
-        Assert.assertTrue("one-1;two-2;three-1;four-2;five-1;six-2".equals(joined));
-    }
-
-    @Test
-    public void testConcat() {
-        final String s1 = "A";
-        final String s2 = "CC";
-        final String s3 = "TTT";
-        final String s4 = "GGGG";
-        Assert.assertEquals(new String(Utils.concat()), "");
-        Assert.assertEquals(new String(Utils.concat(s1.getBytes())), s1);
-        Assert.assertEquals(new String(Utils.concat(s1.getBytes(), s2.getBytes())), s1 + s2);
-        Assert.assertEquals(new String(Utils.concat(s1.getBytes(), s2.getBytes(), s3.getBytes())), s1 + s2 + s3);
-        Assert.assertEquals(new String(Utils.concat(s1.getBytes(), s2.getBytes(), s3.getBytes(), s4.getBytes())), s1 + s2 + s3 + s4);
-    }
-
-    @Test
-    public void testEscapeExpressions() {
-        String[] expected, actual;
-
-        expected = new String[] {"one", "two", "three"};
-        actual = Utils.escapeExpressions("one two three");
-        Assert.assertEquals(actual, expected);
-        actual = Utils.escapeExpressions(" one two three");
-        Assert.assertEquals(actual, expected);
-        actual = Utils.escapeExpressions("one two three ");
-        Assert.assertEquals(actual, expected);
-        actual = Utils.escapeExpressions(" one two three ");
-        Assert.assertEquals(actual, expected);
-        actual = Utils.escapeExpressions("  one  two  three  ");
-        Assert.assertEquals(actual, expected);
-
-        expected = new String[] {"one", "two", "three four", "five", "six"};
-        actual = Utils.escapeExpressions("one two 'three four' five six");
-        Assert.assertEquals(actual, expected);
-        actual = Utils.escapeExpressions(" one two 'three four' five six");
-        Assert.assertEquals(actual, expected);
-        actual = Utils.escapeExpressions("one two 'three four' five six ");
-        Assert.assertEquals(actual, expected);
-        actual = Utils.escapeExpressions(" one two 'three four' five six ");
-        Assert.assertEquals(actual, expected);
-        actual = Utils.escapeExpressions("  one  two  'three four'  five  six  ");
-        Assert.assertEquals(actual, expected);
-
-        expected = new String[] {"one two", "three", "four"};
-        actual = Utils.escapeExpressions("'one two' three four");
-        Assert.assertEquals(actual, expected);
-        actual = Utils.escapeExpressions(" 'one two' three four");
-        Assert.assertEquals(actual, expected);
-        actual = Utils.escapeExpressions("'one two' three four ");
-        Assert.assertEquals(actual, expected);
-        actual = Utils.escapeExpressions(" 'one two' three four ");
-        Assert.assertEquals(actual, expected);
-        actual = Utils.escapeExpressions("  'one two'  three  four  ");
-        Assert.assertEquals(actual, expected);
-
-        expected = new String[] {"one", "two", "three four"};
-        actual = Utils.escapeExpressions("one two 'three four'");
-        Assert.assertEquals(actual, expected);
-        actual = Utils.escapeExpressions(" one two 'three four'");
-        Assert.assertEquals(actual, expected);
-        actual = Utils.escapeExpressions("one two 'three four' ");
-        Assert.assertEquals(actual, expected);
-        actual = Utils.escapeExpressions(" one two 'three four' ");
-        Assert.assertEquals(actual, expected);
-        actual = Utils.escapeExpressions("  one  two  'three four'  ");
-        Assert.assertEquals(actual, expected);
-    }
-
-    @Test(dataProvider = "asIntegerListData")
-    public void testAsIntegerList(final int[] values) {
-        if (values == null) {
-            try {
-                Utils.asList((int[]) null);
-                Assert.fail("Should have thrown an exception");
-            } catch (final IllegalArgumentException ex) {
-                // good.
-            }
-        } else {
-            final Random rdn = GenomeAnalysisEngine.getRandomGenerator();
-            final int[] valuesClone = values.clone();
-            final List<Integer> list = Utils.asList(valuesClone);
-            Assert.assertNotNull(list);
-            Assert.assertEquals(list.size(),values.length);
-            for (int i = 0; i < values.length; i++)
-                Assert.assertEquals((int) list.get(i),values[i]);
-            for (int i = 0; i < values.length; i++)
-                valuesClone[rdn.nextInt(values.length)] = rdn.nextInt(1000);
-            for (int i = 0; i < values.length; i++)
-                Assert.assertEquals((int) list.get(i),valuesClone[i]);
-        }
-    }
-
-    @Test(dataProvider = "asDoubleListData")
-    public void testAsDoubleList(final double[] values) {
-        if (values == null) {
-            try {
-                Utils.asList((int[]) null);
-                Assert.fail("Should have thrown an exception");
-            } catch (final IllegalArgumentException ex) {
-                // good.
-            }
-        } else {
-            final Random rdn = GenomeAnalysisEngine.getRandomGenerator();
-            final double[] valuesClone = values.clone();
-            final List<Double> list = Utils.asList(valuesClone);
-            Assert.assertNotNull(list);
-            Assert.assertEquals(list.size(),values.length);
-            for (int i = 0; i < values.length; i++)
-                Assert.assertEquals((double) list.get(i),values[i]);
-            for (int i = 0; i < values.length; i++)
-                valuesClone[rdn.nextInt(values.length)] = rdn.nextDouble() * 1000;
-            for (int i = 0; i < values.length; i++)
-                Assert.assertEquals((double) list.get(i),valuesClone[i]);
-        }
-    }
-
-    @Test
-    public void testCalcMD5() throws Exception {
-        final File source = new File(publicTestDir + "exampleFASTA.fasta");
-        final String sourceMD5 = "36880691cf9e4178216f7b52e8d85fbe";
-
-        final byte[] sourceBytes = IOUtils.readFileIntoByteArray(source);
-        Assert.assertEquals(Utils.calcMD5(sourceBytes), sourceMD5);
-
-        final String sourceString = FileUtils.readFileToString(source);
-        Assert.assertEquals(Utils.calcMD5(sourceString), sourceMD5);
-    }
-
-    @Test
-    public void testLongestCommonOps() {
-        for ( int prefixLen = 0; prefixLen < 20; prefixLen++ ) {
-            for ( int extraSeq1Len = 0; extraSeq1Len < 10; extraSeq1Len++ ) {
-                for ( int extraSeq2Len = 0; extraSeq2Len < 10; extraSeq2Len++ ) {
-                    for ( int max = 0; max < 50; max++ ) {
-                        final String prefix = Utils.dupString("A", prefixLen);
-                        final int expected = Math.min(prefixLen, max);
-
-                        {
-                            final String seq1 = prefix + Utils.dupString("C", extraSeq1Len);
-                            final String seq2 = prefix + Utils.dupString("G", extraSeq1Len);
-                            Assert.assertEquals(Utils.longestCommonPrefix(seq1.getBytes(), seq2.getBytes(), max), expected, "LongestCommonPrefix failed: seq1 " + seq1 + " seq2 " + seq2 + " max " + max);
-                        }
-
-                        {
-                            final String seq1 = Utils.dupString("C", extraSeq1Len) + prefix;
-                            final String seq2 = Utils.dupString("G", extraSeq1Len) + prefix;
-                            Assert.assertEquals(Utils.longestCommonSuffix(seq1.getBytes(), seq2.getBytes(), max), expected, "longestCommonSuffix failed: seq1 " + seq1 + " seq2 " + seq2 + " max " + max);
-                        }
-                    }
-                }
-            }
-        }
-    }
-
-    @DataProvider(name = "trim")
-    public Object[][] createTrimTestData() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        final String s = "AAAA";
-        for ( int front = 0; front < s.length(); front++ ) {
-            for ( int back = 0; back < s.length(); back++ ) {
-                if ( front + back <= s.length() )
-                    tests.add(new Object[]{s, front, back});
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "trim", enabled = true)
-    public void testTrim(final String s, final int frontTrim, final int backTrim) {
-        Assert.assertEquals(s.length() - frontTrim - backTrim, Utils.trimArray(s.getBytes(), frontTrim, backTrim).length);
-    }
-
-    @Test(dataProvider = "equalRangeData", enabled = true)
-    public void testEqualRange(final byte[] array1, final byte[] array2, final int offset1, final int offset2, final int length, final boolean expected) {
-        Assert.assertEquals(Utils.equalRange(array1,offset1,array2,offset2,length),expected);
-        Assert.assertTrue(Utils.equalRange(array1,offset1,array1,offset1,length));
-        Assert.assertTrue(Utils.equalRange(array2,offset2,array2,offset2,length));
-
-    }
-
-    @DataProvider(name = "equalRangeData")
-    public Object[][] equalRangeData() {
-        return new Object[][] {
-                new Object[] { new byte[0] , new byte[0], 0, 0, 0, true},
-                new Object[]  {      "ABCF".getBytes(), "BC".getBytes(), 1,0,2, true },
-                new Object[]  { "ABCF".getBytes(), "".getBytes(), 1,0,0, true },
-                new Object[]  { "ABCF".getBytes(), "ACBF".getBytes(), 0,0, 4, false}
-        };
-
-    }
-
-    @Test(dataProvider = "skimArrayData")
-    public void testSkimArray(final String original, final String remove) {
-        final StringBuilder resultBuilder = new StringBuilder();
-        final boolean[] removeBoolean = new boolean[remove.length()];
-        for (int i = 0; i < original.length(); i++)
-            if (remove.charAt(i) == '1') {
-                resultBuilder.append(original.charAt(i));
-                removeBoolean[i] = false;
-            } else
-                removeBoolean[i] = true;
-
-        final String expected = resultBuilder.toString();
-        final byte[] resultBytes = Utils.skimArray(original.getBytes(),removeBoolean);
-        final String resultString = new String(resultBytes);
-        Assert.assertEquals(resultString,expected);
-    }
-
-    @DataProvider(name = "skimArrayData")
-    public Object[][] skimArrayData() {
-        return new Object[][] {
-                {"romeo+juliette" , "11111111111111" },
-                {"romeo+juliette" , "11111011111111" },
-                {"romeo+juliette" , "00000011111111" },
-                {"romeo+juliette" , "11111100000000" },
-                {"romeo+juliette" , "11111011111111" },
-                {"romeo+juliette" , "01111010000001" },
-                {"romeo+juliette" , "01100110000110" },
-                {"romeo+juliette" , "10101010101010" },
-                {"romeo+juliette" , "01010101010101" },
-                {"romeo+juliette" , "01111010111001" },
-        };
-    }
-
-
-    @DataProvider(name = "asIntegerListData")
-    public Object[][] asIntegerListData() {
-        return new Object[][] {
-                { null },
-                {new int[0]},
-                {new int[]{1, 2, 3, 4, 5}},
-                {new int[]{2}},
-                {new int[]{3,4}}
-        };
-    }
-
-    @DataProvider(name = "asDoubleListData")
-    public Object[][] asDoubleListData() {
-        return new Object[][] {
-                { null },
-                {new double[0]},
-                {new double[]{1, 2, 3, 4, 5}},
-                {new double[]{2}},
-                {new double[]{3,4}},
-                {new double[]{Double.NaN, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY}}
-        };
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/activeregion/ActiveRegionUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/activeregion/ActiveRegionUnitTest.java
deleted file mode 100644
index 41f7a76..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/activeregion/ActiveRegionUnitTest.java
+++ /dev/null
@@ -1,395 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.activeregion;
-
-
-// the imports for unit testing.
-
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import htsjdk.samtools.SAMFileHeader;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
-import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.util.*;
-
-
-public class ActiveRegionUnitTest extends BaseTest {
-    private final static boolean DEBUG = false;
-    private GenomeLocParser genomeLocParser;
-    private IndexedFastaSequenceFile seq;
-    private String contig;
-    private int contigLength;
-
-    @BeforeClass
-    public void init() throws FileNotFoundException {
-        // sequence
-        seq = new CachingIndexedFastaSequenceFile(new File(b37KGReference));
-        genomeLocParser = new GenomeLocParser(seq);
-        contig = "1";
-        contigLength = genomeLocParser.getContigInfo(contig).getSequenceLength();
-    }
-
-    @DataProvider(name = "ActionRegionCreationTest")
-    public Object[][] makePollingData() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-        for ( final int start : Arrays.asList(1, 10, 100, contigLength - 10, contigLength - 1) ) {
-            for ( final int size : Arrays.asList(1, 10, 100, 1000) ) {
-                for ( final int ext : Arrays.asList(0, 1, 10, 100) ) {
-                    for ( final boolean isActive : Arrays.asList(true, false) ) {
-                        for ( final boolean addStates : Arrays.asList(true, false) ) {
-                            List<ActivityProfileState> states = null;
-                            if ( addStates ) {
-                                states = new LinkedList<ActivityProfileState>();
-                                for ( int i = start; i < start + size; i++ ) {
-                                    states.add(new ActivityProfileState(genomeLocParser.createGenomeLoc(contig, i + start), isActive ? 1.0 : 0.0));
-                                }
-                            }
-                            final GenomeLoc loc = genomeLocParser.createGenomeLoc(contig, start, start + size - 1);
-                            tests.add(new Object[]{loc, states, isActive, ext});
-                        }
-                    }
-                }
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "ActionRegionCreationTest")
-    public void testCreatingActiveRegions(final GenomeLoc loc, final List<ActivityProfileState> supportingStates, final boolean isActive, final int extension) {
-        final ActiveRegion region = new ActiveRegion(loc, supportingStates, isActive, genomeLocParser, extension);
-        Assert.assertEquals(region.getLocation(), loc);
-        Assert.assertEquals(region.getExtendedLoc().getStart(), Math.max(loc.getStart() - extension, 1));
-        Assert.assertEquals(region.getExtendedLoc().getStop(), Math.min(loc.getStop() + extension, contigLength));
-        Assert.assertEquals(region.getReadSpanLoc().getStart(), Math.max(loc.getStart() - extension, 1));
-        Assert.assertEquals(region.getReadSpanLoc().getStop(), Math.min(loc.getStop() + extension, contigLength));
-        Assert.assertEquals(region.isActive(), isActive);
-        Assert.assertEquals(region.getExtension(), extension);
-        Assert.assertEquals(region.getReads(), Collections.emptyList());
-        Assert.assertEquals(region.size(), 0);
-        Assert.assertEquals(region.getSupportingStates(), supportingStates == null ? Collections.emptyList() : supportingStates);
-        Assert.assertNotNull(region.toString());
-
-        assertGoodReferenceGetter(region.getActiveRegionReference(seq), region.getExtendedLoc(), 0);
-        assertGoodReferenceGetter(region.getActiveRegionReference(seq, 10), region.getExtendedLoc(), 10);
-        assertGoodReferenceGetter(region.getFullReference(seq), region.getReadSpanLoc(), 0);
-        assertGoodReferenceGetter(region.getFullReference(seq, 10), region.getReadSpanLoc(), 10);
-    }
-
-    private void assertGoodReferenceGetter(final byte[] actualBytes, final GenomeLoc span, final int padding) {
-        final int expectedStart = Math.max(span.getStart() - padding, 1);
-        final int expectedStop = Math.min(span.getStop() + padding, contigLength);
-        final byte[] expectedBytes = seq.getSubsequenceAt(span.getContig(), expectedStart, expectedStop).getBases();
-        Assert.assertEquals(actualBytes, expectedBytes);
-    }
-
-    @DataProvider(name = "ActiveRegionReads")
-    public Object[][] makeActiveRegionReads() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(seq.getSequenceDictionary());
-        for ( final int start : Arrays.asList(1, 10, 100, contigLength - 10, contigLength - 1) ) {
-            for ( final int readStartOffset : Arrays.asList(-100, -10, 0, 10, 100) ) {
-                for ( final int readSize : Arrays.asList(10, 100, 1000) ) {
-                    final GenomeLoc loc = genomeLocParser.createGenomeLocOnContig(contig, start, start + 10);
-
-                    final int readStart = Math.max(start + readStartOffset, 1);
-                    final int readStop = Math.min(readStart + readSize, contigLength);
-                    final int readLength = readStop - readStart + 1;
-                    if ( readLength > 0 ) {
-                        GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, readStart, readLength);
-                        final GenomeLoc readLoc = genomeLocParser.createGenomeLoc(read);
-                        if ( readLoc.overlapsP(loc) )
-                            tests.add(new Object[]{loc, read});
-                    }
-                }
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "ActiveRegionReads")
-    public void testActiveRegionReads(final GenomeLoc loc, final GATKSAMRecord read) throws Exception {
-        final GenomeLoc expectedSpan = loc.union(genomeLocParser.createGenomeLoc(read));
-
-        final ActiveRegion region = new ActiveRegion(loc, null, true, genomeLocParser, 0);
-        final ActiveRegion region2 = new ActiveRegion(loc, null, true, genomeLocParser, 0);
-        Assert.assertEquals(region.getReads(), Collections.emptyList());
-        Assert.assertEquals(region.size(), 0);
-        Assert.assertEquals(region.getExtendedLoc(), loc);
-        Assert.assertEquals(region.getReadSpanLoc(), loc);
-        Assert.assertTrue(region.equalExceptReads(region2));
-
-        region.add(read);
-        Assert.assertEquals(region.getReads(), Collections.singletonList(read));
-        Assert.assertEquals(region.size(), 1);
-        Assert.assertEquals(region.getExtendedLoc(), loc);
-        Assert.assertEquals(region.getReadSpanLoc(), expectedSpan);
-        Assert.assertTrue(region.equalExceptReads(region2));
-
-        region.clearReads();
-        Assert.assertEquals(region.getReads(), Collections.emptyList());
-        Assert.assertEquals(region.size(), 0);
-        Assert.assertEquals(region.getExtendedLoc(), loc);
-        Assert.assertEquals(region.getReadSpanLoc(), loc);
-        Assert.assertTrue(region.equalExceptReads(region2));
-
-        region.addAll(Collections.singleton(read));
-        Assert.assertEquals(region.getReads(), Collections.singletonList(read));
-        Assert.assertEquals(region.size(), 1);
-        Assert.assertEquals(region.getExtendedLoc(), loc);
-        Assert.assertEquals(region.getReadSpanLoc(), expectedSpan);
-        Assert.assertTrue(region.equalExceptReads(region2));
-
-        region.removeAll(Collections.<GATKSAMRecord>emptySet());
-        Assert.assertEquals(region.getReads(), Collections.singletonList(read));
-        Assert.assertEquals(region.size(), 1);
-        Assert.assertEquals(region.getExtendedLoc(), loc);
-        Assert.assertEquals(region.getReadSpanLoc(), expectedSpan);
-        Assert.assertTrue(region.equalExceptReads(region2));
-
-        region.removeAll(Collections.singleton(read));
-        Assert.assertEquals(region.getReads(), Collections.emptyList());
-        Assert.assertEquals(region.size(), 0);
-        Assert.assertEquals(region.getExtendedLoc(), loc);
-        Assert.assertEquals(region.getReadSpanLoc(), loc);
-        Assert.assertTrue(region.equalExceptReads(region2));
-
-        final GATKSAMRecord read2 = (GATKSAMRecord)read.clone();
-        read2.setReadName(read.getReadName() + ".clone");
-
-        for ( final GATKSAMRecord readToKeep : Arrays.asList(read, read2)) {
-            region.addAll(Arrays.asList(read, read2));
-            final GATKSAMRecord readToDiscard = readToKeep == read ? read2 : read;
-            region.removeAll(Collections.singleton(readToDiscard));
-            Assert.assertEquals(region.getReads(), Arrays.asList(readToKeep));
-            Assert.assertEquals(region.size(), 1);
-            Assert.assertEquals(region.getExtendedLoc(), loc);
-        }
-    }
-
-    // -----------------------------------------------------------------------------------------------
-    //
-    // Make sure bad inputs are properly detected
-    //
-    // -----------------------------------------------------------------------------------------------
-
-    @DataProvider(name = "BadReadsTest")
-    public Object[][] makeBadReadsTest() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(seq.getSequenceDictionary());
-        tests.add(new Object[]{
-                ArtificialSAMUtils.createArtificialRead(header, "read1", 0, 10, 10),
-                ArtificialSAMUtils.createArtificialRead(header, "read2", 0, 9, 10)});
-        tests.add(new Object[]{
-                ArtificialSAMUtils.createArtificialRead(header, "read1", 0, 10, 10),
-                ArtificialSAMUtils.createArtificialRead(header, "read2", 1, 9, 10)});
-        tests.add(new Object[]{
-                ArtificialSAMUtils.createArtificialRead(header, "read1", 1, 10, 10),
-                ArtificialSAMUtils.createArtificialRead(header, "read2", 0, 9, 10)});
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "BadReadsTest", expectedExceptions = IllegalArgumentException.class)
-    public void testBadReads(final GATKSAMRecord read1, final GATKSAMRecord read2) {
-        final GenomeLoc loc = genomeLocParser.createGenomeLoc(read1);
-        final ActiveRegion region = new ActiveRegion(loc, null, true, genomeLocParser, 0);
-        region.add(read1);
-        region.add(read2);
-    }
-
-    // -----------------------------------------------------------------------------------------------
-    //
-    // Make sure we can properly cut up an active region based on engine intervals
-    //
-    // -----------------------------------------------------------------------------------------------
-
-    @DataProvider(name = "SplitActiveRegion")
-    public Object[][] makeSplitActiveRegion() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        final GenomeLoc whole_span = genomeLocParser.createGenomeLoc("20", 1, 500);
-        final GenomeLoc gl_before = genomeLocParser.createGenomeLoc("20", 1, 9);
-        final GenomeLoc gl_after = genomeLocParser.createGenomeLoc("20", 250, 500);
-        final GenomeLoc gl_diff_contig = genomeLocParser.createGenomeLoc("19", 40, 50);
-
-        final int regionStart = 10;
-        final int regionStop = 100;
-        final GenomeLoc region = genomeLocParser.createGenomeLoc("20", regionStart, regionStop);
-
-        for ( final GenomeLoc noEffect : Arrays.asList(whole_span) )
-            tests.add(new Object[]{
-                    region,
-                    Arrays.asList(noEffect),
-                    Arrays.asList(region)});
-
-        for ( final GenomeLoc noOverlap : Arrays.asList(gl_before, gl_after, gl_diff_contig) )
-            tests.add(new Object[]{
-                    region,
-                    Arrays.asList(noOverlap),
-                    Arrays.asList()});
-
-        tests.add(new Object[]{region,
-                Arrays.asList(genomeLocParser.createGenomeLoc("20", 5, 50)),
-                Arrays.asList(genomeLocParser.createGenomeLoc("20", regionStart, 50))});
-
-        tests.add(new Object[]{region,
-                Arrays.asList(genomeLocParser.createGenomeLoc("20", 50, 200)),
-                Arrays.asList(genomeLocParser.createGenomeLoc("20", 50, regionStop))});
-
-        tests.add(new Object[]{region,
-                Arrays.asList(genomeLocParser.createGenomeLoc("20", 40, 50)),
-                Arrays.asList(genomeLocParser.createGenomeLoc("20", 40, 50))});
-
-        tests.add(new Object[]{region,
-                Arrays.asList(genomeLocParser.createGenomeLoc("20", 20, 30), genomeLocParser.createGenomeLoc("20", 40, 50)),
-                Arrays.asList(genomeLocParser.createGenomeLoc("20", 20, 30), genomeLocParser.createGenomeLoc("20", 40, 50))});
-
-        tests.add(new Object[]{region,
-                Arrays.asList(genomeLocParser.createGenomeLoc("20", 1, 30), genomeLocParser.createGenomeLoc("20", 40, 50)),
-                Arrays.asList(genomeLocParser.createGenomeLoc("20", regionStart, 30), genomeLocParser.createGenomeLoc("20", 40, 50))});
-
-        tests.add(new Object[]{region,
-                Arrays.asList(genomeLocParser.createGenomeLoc("20", 1, 30), genomeLocParser.createGenomeLoc("20", 70, 200)),
-                Arrays.asList(genomeLocParser.createGenomeLoc("20", regionStart, 30), genomeLocParser.createGenomeLoc("20", 70, regionStop))});
-
-        tests.add(new Object[]{region,
-                Arrays.asList(genomeLocParser.createGenomeLoc("20", 1, 30), genomeLocParser.createGenomeLoc("20", 40, 50), genomeLocParser.createGenomeLoc("20", 70, 200)),
-                Arrays.asList(genomeLocParser.createGenomeLoc("20", regionStart, 30), genomeLocParser.createGenomeLoc("20", 40, 50), genomeLocParser.createGenomeLoc("20", 70, regionStop))});
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "SplitActiveRegion")
-    public void testSplitActiveRegion(final GenomeLoc regionLoc, final List<GenomeLoc> intervalLocs, final List<GenomeLoc> expectedRegionLocs) {
-        for ( final boolean addSubstates : Arrays.asList(true, false) ) {
-            final List<ActivityProfileState> states;
-            if ( addSubstates ) {
-                states = new LinkedList<ActivityProfileState>();
-                for ( int i = 0; i < regionLoc.size(); i++ )
-                    states.add(new ActivityProfileState(genomeLocParser.createGenomeLoc(regionLoc.getContig(), regionLoc.getStart() + i), 1.0));
-            } else {
-                states = null;
-            }
-
-            final ActiveRegion region = new ActiveRegion(regionLoc, states, true, genomeLocParser, 0);
-            final GenomeLocSortedSet intervals = new GenomeLocSortedSet(genomeLocParser,  intervalLocs);
-            final List<ActiveRegion> regions = region.splitAndTrimToIntervals(intervals);
-
-            Assert.assertEquals(regions.size(), expectedRegionLocs.size(), "Wrong number of split locations");
-            for ( int i = 0; i < expectedRegionLocs.size(); i++ ) {
-                final GenomeLoc expected = expectedRegionLocs.get(i);
-                final ActiveRegion actual = regions.get(i);
-                Assert.assertEquals(actual.getLocation(), expected, "Bad region after split");
-                Assert.assertEquals(actual.isActive(), region.isActive());
-                Assert.assertEquals(actual.getExtension(), region.getExtension());
-            }
-        }
-    }
-
-    // -----------------------------------------------------------------------------------------------
-    //
-    // Make sure we can properly cut up an active region based on engine intervals
-    //
-    // -----------------------------------------------------------------------------------------------
-
-    @DataProvider(name = "TrimActiveRegionData")
-    public Object[][] makeTrimActiveRegionData() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        // fully enclosed within active region
-        tests.add(new Object[]{
-                genomeLocParser.createGenomeLoc("20", 10, 20), 10,
-                genomeLocParser.createGenomeLoc("20", 15, 16),
-                genomeLocParser.createGenomeLoc("20", 15, 16), 0});
-
-        tests.add(new Object[]{
-                genomeLocParser.createGenomeLoc("20", 10, 20), 10,
-                genomeLocParser.createGenomeLoc("20", 10, 15),
-                genomeLocParser.createGenomeLoc("20", 10, 15), 0});
-
-        tests.add(new Object[]{
-                genomeLocParser.createGenomeLoc("20", 10, 20), 10,
-                genomeLocParser.createGenomeLoc("20", 15, 20),
-                genomeLocParser.createGenomeLoc("20", 15, 20), 0});
-
-        // needs extra padding on the right
-        tests.add(new Object[]{
-                genomeLocParser.createGenomeLoc("20", 10, 20), 10,
-                genomeLocParser.createGenomeLoc("20", 15, 25),
-                genomeLocParser.createGenomeLoc("20", 15, 20), 5});
-
-        // needs extra padding on the left
-        tests.add(new Object[]{
-                genomeLocParser.createGenomeLoc("20", 10, 20), 10,
-                genomeLocParser.createGenomeLoc("20", 5, 15),
-                genomeLocParser.createGenomeLoc("20", 10, 15), 5});
-
-        // needs extra padding on both
-        tests.add(new Object[]{
-                genomeLocParser.createGenomeLoc("20", 10, 20), 10,
-                genomeLocParser.createGenomeLoc("20", 7, 21),
-                genomeLocParser.createGenomeLoc("20", 10, 20), 3});
-        tests.add(new Object[]{
-                genomeLocParser.createGenomeLoc("20", 10, 20), 10,
-                genomeLocParser.createGenomeLoc("20", 9, 23),
-                genomeLocParser.createGenomeLoc("20", 10, 20), 3});
-
-        // desired span captures everything, so we're returning everything.  Tests that extension is set correctly
-        tests.add(new Object[]{
-                genomeLocParser.createGenomeLoc("20", 10, 20), 10,
-                genomeLocParser.createGenomeLoc("20", 1, 50),
-                genomeLocParser.createGenomeLoc("20", 10, 20), 10});
-
-        // At the start of the chromosome, potentially a bit weird
-        tests.add(new Object[]{
-                genomeLocParser.createGenomeLoc("20", 1, 10), 10,
-                genomeLocParser.createGenomeLoc("20", 1, 50),
-                genomeLocParser.createGenomeLoc("20", 1, 10), 10});
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "TrimActiveRegionData")
-    public void testTrimActiveRegion(final GenomeLoc regionLoc, final int extension, final GenomeLoc desiredSpan, final GenomeLoc expectedActiveRegion, final int expectedExtension) {
-        final ActiveRegion region = new ActiveRegion(regionLoc, Collections.<ActivityProfileState>emptyList(), true, genomeLocParser, extension);
-        final ActiveRegion trimmed = region.trim(desiredSpan);
-        Assert.assertEquals(trimmed.getLocation(), expectedActiveRegion, "Incorrect region");
-        Assert.assertEquals(trimmed.getExtension(), expectedExtension, "Incorrect region");
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/activeregion/ActivityProfileStateUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/activeregion/ActivityProfileStateUnitTest.java
deleted file mode 100644
index 75e9d9a..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/activeregion/ActivityProfileStateUnitTest.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.activeregion;
-
-import htsjdk.samtools.SAMFileHeader;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.FileNotFoundException;
-import java.util.Arrays;
-import java.util.LinkedList;
-import java.util.List;
-
-/**
- * Created with IntelliJ IDEA.
- * User: depristo
- * Date: 1/17/13
- * Time: 2:30 PM
- * To change this template use File | Settings | File Templates.
- */
-public class ActivityProfileStateUnitTest {
-    private GenomeLocParser genomeLocParser;
-
-    @BeforeClass
-    public void init() throws FileNotFoundException {
-        // sequence
-        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 100);
-        genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
-    }
-
-    @DataProvider(name = "ActiveProfileResultProvider")
-    public Object[][] makeActiveProfileResultProvider() {
-        final List<Object[]> tests = new LinkedList<Object[]>();
-
-        final String chr = genomeLocParser.getContigs().getSequence(0).getSequenceName();
-        for ( final GenomeLoc loc : Arrays.asList(
-                genomeLocParser.createGenomeLoc(chr, 10, 10),
-                genomeLocParser.createGenomeLoc(chr, 100, 100) )) {
-            for ( final double prob : Arrays.asList(0.0, 0.5, 1.0) ) {
-                for ( final ActivityProfileState.Type state : ActivityProfileState.Type.values() ) {
-                    for ( final Number value : Arrays.asList(1, 2, 4) ) {
-                        tests.add(new Object[]{ loc, prob, state, value});
-                    }
-                }
-                tests.add(new Object[]{ loc, prob, null, null});
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "ActiveProfileResultProvider")
-    public void testActiveProfileResultProvider(GenomeLoc loc, final double prob, ActivityProfileState.Type maybeState, final Number maybeNumber) {
-        final ActivityProfileState apr = maybeState == null
-                ? new ActivityProfileState(loc, prob)
-                : new ActivityProfileState(loc, prob, maybeState, maybeNumber);
-
-        Assert.assertEquals(apr.getLoc(), loc);
-        Assert.assertNotNull(apr.toString());
-        Assert.assertEquals(apr.isActiveProb, prob);
-        Assert.assertEquals(apr.resultState, maybeState == null ? ActivityProfileState.Type.NONE : maybeState);
-        Assert.assertEquals(apr.resultValue, maybeState == null ? null : maybeNumber);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/activeregion/ActivityProfileUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/activeregion/ActivityProfileUnitTest.java
deleted file mode 100644
index b3442b3..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/activeregion/ActivityProfileUnitTest.java
+++ /dev/null
@@ -1,491 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.activeregion;
-
-
-// the imports for unit testing.
-
-
-import htsjdk.samtools.reference.ReferenceSequenceFile;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.MathUtils;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.util.*;
-
-
-public class ActivityProfileUnitTest extends BaseTest {
-    private final static boolean DEBUG = false;
-    private GenomeLocParser genomeLocParser;
-    private GenomeLoc startLoc;
-
-    private final static int MAX_PROB_PROPAGATION_DISTANCE = 50;
-    private final static double ACTIVE_PROB_THRESHOLD= 0.002;
-
-    @BeforeClass
-    public void init() throws FileNotFoundException {
-        // sequence
-        ReferenceSequenceFile seq = new CachingIndexedFastaSequenceFile(new File(hg18Reference));
-        genomeLocParser = new GenomeLocParser(seq);
-        startLoc = genomeLocParser.createGenomeLoc("chr1", 1, 1, 100);
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // Basic tests Provider
-    //
-    // --------------------------------------------------------------------------------
-
-    private class BasicActivityProfileTestProvider extends TestDataProvider {
-        List<Double> probs;
-        List<ActiveRegion> expectedRegions;
-        int extension = 0;
-        GenomeLoc regionStart = startLoc;
-        final ProfileType type;
-
-        public BasicActivityProfileTestProvider(final ProfileType type, final List<Double> probs, boolean startActive, int ... startsAndStops) {
-            super(BasicActivityProfileTestProvider.class);
-            this.type = type;
-            this.probs = probs;
-            this.expectedRegions = toRegions(startActive, startsAndStops);
-            setName(getName());
-        }
-
-        private String getName() {
-            return String.format("type=%s probs=%s expectedRegions=%s", type, Utils.join(",", probs), Utils.join(",", expectedRegions));
-        }
-
-        public ActivityProfile makeProfile() {
-            switch ( type ) {
-                case Base: return new ActivityProfile(genomeLocParser, MAX_PROB_PROPAGATION_DISTANCE, ACTIVE_PROB_THRESHOLD);
-                case BandPass:
-                    // zero size => equivalent to ActivityProfile
-                    return new BandPassActivityProfile(genomeLocParser, null, MAX_PROB_PROPAGATION_DISTANCE, ACTIVE_PROB_THRESHOLD, 0, 0.01, false);
-                default: throw new IllegalStateException(type.toString());
-            }
-        }
-
-        private List<ActiveRegion> toRegions(boolean isActive, int[] startsAndStops) {
-            List<ActiveRegion> l = new ArrayList<ActiveRegion>();
-            for ( int i = 0; i < startsAndStops.length - 1; i++) {
-                int start = regionStart.getStart() + startsAndStops[i];
-                int end = regionStart.getStart() + startsAndStops[i+1] - 1;
-                GenomeLoc activeLoc = genomeLocParser.createGenomeLoc(regionStart.getContig(), start, end);
-                ActiveRegion r = new ActiveRegion(activeLoc, Collections.<ActivityProfileState>emptyList(), isActive, genomeLocParser, extension);
-                l.add(r);
-                isActive = ! isActive;
-            }
-            return l;
-        }
-    }
-
-    private enum ProfileType {
-        Base, BandPass
-    }
-
-    @DataProvider(name = "BasicActivityProfileTestProvider")
-    public Object[][] makeQualIntervalTestProvider() {
-        for ( final ProfileType type : ProfileType.values() ) {
-            new BasicActivityProfileTestProvider(type, Arrays.asList(1.0), true, 0, 1);
-            new BasicActivityProfileTestProvider(type, Arrays.asList(1.0, 0.0), true, 0, 1, 2);
-            new BasicActivityProfileTestProvider(type, Arrays.asList(0.0, 1.0), false, 0, 1, 2);
-            new BasicActivityProfileTestProvider(type, Arrays.asList(1.0, 0.0, 1.0), true, 0, 1, 2, 3);
-            new BasicActivityProfileTestProvider(type, Arrays.asList(1.0, 1.0, 1.0), true, 0, 3);
-        }
-
-        return BasicActivityProfileTestProvider.getTests(BasicActivityProfileTestProvider.class);
-    }
-
-    @Test(enabled = ! DEBUG, dataProvider = "BasicActivityProfileTestProvider")
-    public void testBasicActivityProfile(BasicActivityProfileTestProvider cfg) {
-        ActivityProfile profile = cfg.makeProfile();
-
-        Assert.assertTrue(profile.isEmpty());
-
-        Assert.assertEquals(profile.parser, genomeLocParser);
-
-        for ( int i = 0; i < cfg.probs.size(); i++ ) {
-            double p = cfg.probs.get(i);
-            GenomeLoc loc = genomeLocParser.createGenomeLoc(cfg.regionStart.getContig(), cfg.regionStart.getStart() + i, cfg.regionStart.getStart() + i);
-            profile.add(new ActivityProfileState(loc, p));
-            Assert.assertFalse(profile.isEmpty(), "Profile shouldn't be empty after adding a state");
-        }
-        Assert.assertEquals(profile.regionStartLoc, genomeLocParser.createGenomeLoc(cfg.regionStart.getContig(), cfg.regionStart.getStart(), cfg.regionStart.getStart() ), "Start loc should be the start of the region");
-
-        Assert.assertEquals(profile.size(), cfg.probs.size(), "Should have exactly the number of states we expected to add");
-        assertProbsAreEqual(profile.stateList, cfg.probs);
-
-        // TODO -- reanble tests
-        //assertRegionsAreEqual(profile.createActiveRegions(0, 100), cfg.expectedRegions);
-    }
-
-    private void assertRegionsAreEqual(List<ActiveRegion> actual, List<ActiveRegion> expected) {
-        Assert.assertEquals(actual.size(), expected.size());
-        for ( int i = 0; i < actual.size(); i++ ) {
-            Assert.assertTrue(actual.get(i).equalExceptReads(expected.get(i)));
-        }
-    }
-
-    private void assertProbsAreEqual(List<ActivityProfileState> actual, List<Double> expected) {
-        Assert.assertEquals(actual.size(), expected.size());
-        for ( int i = 0; i < actual.size(); i++ ) {
-            Assert.assertEquals(actual.get(i).isActiveProb, expected.get(i));
-        }
-    }
-
-    // -------------------------------------------------------------------------------------
-    //
-    // Hardcore tests for adding to the profile and constructing active regions
-    //
-    // -------------------------------------------------------------------------------------
-
-    private static class SizeToStringList<T> extends ArrayList<T> {
-        @Override public String toString() { return "List[" + size() + "]"; }
-    }
-
-    @DataProvider(name = "RegionCreationTests")
-    public Object[][] makeRegionCreationTests() {
-        final List<Object[]> tests = new LinkedList<Object[]>();
-
-        final int contigLength = genomeLocParser.getContigs().getSequences().get(0).getSequenceLength();
-        for ( int start : Arrays.asList(1, 10, 100, contigLength - 100, contigLength - 10) ) {
-            for ( int regionSize : Arrays.asList(1, 10, 100, 1000, 10000) ) {
-                for ( int maxRegionSize : Arrays.asList(10, 50, 200) ) {
-                    for ( final boolean waitUntilEnd : Arrays.asList(false, true) ) {
-                        for ( final boolean forceConversion : Arrays.asList(false, true) ) {
-                            // what do I really want to test here?  I'd like to test a few cases:
-                            // -- region is all active (1.0)
-                            // -- region is all inactive (0.0)
-                            // -- cut the interval into 1, 2, 3, 4, 5 ... 10 regions, each with alternating activity values
-                            for ( final boolean startWithActive : Arrays.asList(true, false) ) {
-                                for ( int nParts : Arrays.asList(1, 2, 3, 4, 5, 7, 10, 11, 13) ) {
-
-//        for ( int start : Arrays.asList(1) ) {
-//            for ( int regionSize : Arrays.asList(100) ) {
-//                for ( int maxRegionSize : Arrays.asList(10) ) {
-//                    for ( final boolean waitUntilEnd : Arrays.asList(true) ) {
-//                        for ( final boolean forceConversion : Arrays.asList(false) ) {
-//                            for ( final boolean startWithActive : Arrays.asList(true) ) {
-//                                for ( int nParts : Arrays.asList(3) ) {
-                                    regionSize = Math.min(regionSize, contigLength - start);
-                                    final List<Boolean> regions = makeRegions(regionSize, startWithActive, nParts);
-                                    tests.add(new Object[]{ start, regions, maxRegionSize, nParts, forceConversion, waitUntilEnd });
-                                }
-                            }
-                        }
-                    }
-                }
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    private List<Boolean> makeRegions(final int totalRegionSize,
-                                      final boolean startWithActive,
-                                      final int nParts) {
-        final List<Boolean> regions = new SizeToStringList<Boolean>();
-
-        boolean isActive = startWithActive;
-        final int activeRegionSize = Math.max(totalRegionSize / nParts, 1);
-        for ( int i = 0; i < totalRegionSize; i += activeRegionSize ) {
-            for ( int j = 0; j < activeRegionSize && j + i < totalRegionSize; j++ ) {
-                regions.add(isActive);
-            }
-            isActive = ! isActive;
-        }
-
-        return regions;
-    }
-
-
-    @Test(enabled = !DEBUG, dataProvider = "RegionCreationTests")
-    public void testRegionCreation(final int start, final List<Boolean> probs, int maxRegionSize, final int nParts, final boolean forceConversion, final boolean waitUntilEnd) {
-        final ActivityProfile profile = new ActivityProfile(genomeLocParser, MAX_PROB_PROPAGATION_DISTANCE, ACTIVE_PROB_THRESHOLD);
-        Assert.assertNotNull(profile.toString());
-
-        final String contig = genomeLocParser.getContigs().getSequences().get(0).getSequenceName();
-        final List<Boolean> seenSites = new ArrayList<Boolean>(Collections.nCopies(probs.size(), false));
-        ActiveRegion lastRegion = null;
-        for ( int i = 0; i < probs.size(); i++ ) {
-            final boolean isActive = probs.get(i);
-            final GenomeLoc loc = genomeLocParser.createGenomeLoc(contig, i + start);
-            final ActivityProfileState state = new ActivityProfileState(loc, isActive ? 1.0 : 0.0);
-            profile.add(state);
-            Assert.assertNotNull(profile.toString());
-
-            if ( ! waitUntilEnd ) {
-                final List<ActiveRegion> regions = profile.popReadyActiveRegions(0, 1, maxRegionSize, false);
-                lastRegion = assertGoodRegions(start, regions, maxRegionSize, lastRegion, probs, seenSites);
-            }
-        }
-
-        if ( waitUntilEnd || forceConversion ) {
-            final List<ActiveRegion> regions = profile.popReadyActiveRegions(0, 1, maxRegionSize, forceConversion);
-            lastRegion = assertGoodRegions(start, regions, maxRegionSize, lastRegion, probs, seenSites);
-        }
-
-        for ( int i = 0; i < probs.size(); i++ ) {
-            if ( forceConversion || (i + maxRegionSize + profile.getMaxProbPropagationDistance() < probs.size()))
-                // only require a site to be seen if we are forcing conversion or the site is more than maxRegionSize from the end
-                Assert.assertTrue(seenSites.get(i), "Missed site " + i);
-        }
-
-        Assert.assertNotNull(profile.toString());
-    }
-
-    private ActiveRegion assertGoodRegions(final int start, final List<ActiveRegion> regions, final int maxRegionSize, ActiveRegion lastRegion, final List<Boolean> probs, final List<Boolean> seenSites) {
-        for ( final ActiveRegion region : regions ) {
-            Assert.assertTrue(region.getLocation().size() > 0, "Region " + region + " has a bad size");
-            Assert.assertTrue(region.getLocation().size() <= maxRegionSize, "Region " + region + " has a bad size: it's big than the max region size " + maxRegionSize);
-            if ( lastRegion != null ) {
-                Assert.assertTrue(region.getLocation().getStart() == lastRegion.getLocation().getStop() + 1, "Region " + region + " doesn't start immediately after previous region" + lastRegion);
-            }
-
-            // check that all active bases are actually active
-            final int regionOffset = region.getLocation().getStart() - start;
-            Assert.assertTrue(regionOffset >= 0 && regionOffset < probs.size(), "Region " + region + " has a bad offset w.r.t. start");
-            for ( int j = 0; j < region.getLocation().size(); j++ ) {
-                final int siteOffset = j + regionOffset;
-                Assert.assertEquals(region.isActive(), probs.get(siteOffset).booleanValue());
-                Assert.assertFalse(seenSites.get(siteOffset), "Site " + j + " in " + region + " was seen already");
-                seenSites.set(siteOffset, true);
-            }
-
-            lastRegion = region;
-        }
-
-        return lastRegion;
-    }
-
-    // -------------------------------------------------------------------------------------
-    //
-    // Hardcore tests for adding to the profile and constructing active regions
-    //
-    // -------------------------------------------------------------------------------------
-
-    @DataProvider(name = "SoftClipsTest")
-    public Object[][] makeSoftClipsTest() {
-        final List<Object[]> tests = new LinkedList<Object[]>();
-
-        final int contigLength = genomeLocParser.getContigs().getSequences().get(0).getSequenceLength();
-        for ( int start : Arrays.asList(1, 10, 100, contigLength - 100, contigLength - 10, contigLength - 1) ) {
-            for ( int precedingSites: Arrays.asList(0, 1, 10) ) {
-                if ( precedingSites + start < contigLength ) {
-                    for ( int softClipSize : Arrays.asList(1, 2, 10, 100) ) {
-//        for ( int start : Arrays.asList(10) ) {
-//            for ( int precedingSites: Arrays.asList(10) ) {
-//                for ( int softClipSize : Arrays.asList(1) ) {
-                        tests.add(new Object[]{ start, precedingSites, softClipSize });
-                    }
-                }
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = ! DEBUG, dataProvider = "SoftClipsTest")
-    public void testSoftClips(final int start, int nPrecedingSites, final int softClipSize) {
-        final ActivityProfile profile = new ActivityProfile(genomeLocParser, MAX_PROB_PROPAGATION_DISTANCE, ACTIVE_PROB_THRESHOLD);
-
-        final int contigLength = genomeLocParser.getContigs().getSequences().get(0).getSequenceLength();
-        final String contig = genomeLocParser.getContigs().getSequences().get(0).getSequenceName();
-        for ( int i = 0; i < nPrecedingSites; i++ ) {
-            final GenomeLoc loc = genomeLocParser.createGenomeLoc(contig, i + start);
-            final ActivityProfileState state = new ActivityProfileState(loc, 0.0);
-            profile.add(state);
-        }
-
-        final GenomeLoc softClipLoc = genomeLocParser.createGenomeLoc(contig, nPrecedingSites + start);
-        profile.add(new ActivityProfileState(softClipLoc, 1.0, ActivityProfileState.Type.HIGH_QUALITY_SOFT_CLIPS, softClipSize));
-
-        final int actualNumOfSoftClips = Math.min(softClipSize, profile.getMaxProbPropagationDistance());
-        if ( nPrecedingSites == 0 ) {
-            final int profileSize = Math.min(start + actualNumOfSoftClips, contigLength) - start + 1;
-            Assert.assertEquals(profile.size(), profileSize, "Wrong number of states in the profile");
-        }
-
-        for ( int i = 0; i < profile.size(); i++ ) {
-            final ActivityProfileState state = profile.getStateList().get(i);
-            final boolean withinSCRange = state.getLoc().distance(softClipLoc) <= actualNumOfSoftClips;
-            if ( withinSCRange ) {
-                Assert.assertTrue(state.isActiveProb > 0.0, "active prob should be changed within soft clip size");
-            } else {
-                Assert.assertEquals(state.isActiveProb, 0.0, "active prob shouldn't be changed outside of clip size");
-            }
-        }
-    }
-
-    // -------------------------------------------------------------------------------------
-    //
-    // Tests to ensure we cut large active regions in the right place
-    //
-    // -------------------------------------------------------------------------------------
-
-    private void addProb(final List<Double> l, final double v) {
-        l.add(v);
-    }
-
-    @DataProvider(name = "ActiveRegionCutTests")
-    public Object[][] makeActiveRegionCutTests() {
-        final List<Object[]> tests = new LinkedList<Object[]>();
-
-//        for ( final int activeRegionSize : Arrays.asList(30) ) {
-//            for ( final int minRegionSize : Arrays.asList(5) ) {
-        for ( final int activeRegionSize : Arrays.asList(10, 12, 20, 30, 40) ) {
-            for ( final int minRegionSize : Arrays.asList(1, 5, 10) ) {
-                final int maxRegionSize = activeRegionSize * 2 / 3;
-                if ( minRegionSize >= maxRegionSize ) continue;
-                { // test flat activity profile
-                    final List<Double> probs = Collections.nCopies(activeRegionSize, 1.0);
-                    tests.add(new Object[]{minRegionSize, maxRegionSize, maxRegionSize, probs});
-                }
-
-                { // test point profile is properly handled
-                    for ( int end = 1; end < activeRegionSize; end++ ) {
-                        final List<Double> probs = Collections.nCopies(end, 1.0);
-                        tests.add(new Object[]{minRegionSize, maxRegionSize, Math.min(end, maxRegionSize), probs});
-                    }
-                }
-
-                { // test increasing activity profile
-                    final List<Double> probs = new ArrayList<Double>(activeRegionSize);
-                    for ( int i = 0; i < activeRegionSize; i++ ) {
-                        addProb(probs, (1.0*(i+1))/ activeRegionSize);
-                    }
-                    tests.add(new Object[]{minRegionSize, maxRegionSize, maxRegionSize, probs});
-                }
-
-                { // test decreasing activity profile
-                    final List<Double> probs = new ArrayList<Double>(activeRegionSize);
-                    for ( int i = 0; i < activeRegionSize; i++ ) {
-                        addProb(probs, 1 - (1.0*(i+1))/ activeRegionSize);
-                    }
-                    tests.add(new Object[]{minRegionSize, maxRegionSize, maxRegionSize, probs});
-                }
-
-                { // test two peaks
-//                    for ( final double rootSigma : Arrays.asList(2.0) ) {
-//                        int maxPeak1 = 9; {
-//                            int maxPeak2 = 16; {
-                    for ( final double rootSigma : Arrays.asList(1.0, 2.0, 3.0) ) {
-                        for ( int maxPeak1 = 0; maxPeak1 < activeRegionSize / 2; maxPeak1++ ) {
-                            for ( int maxPeak2 = activeRegionSize / 2 + 1; maxPeak2 < activeRegionSize; maxPeak2++ ) {
-                                final double[] gauss1 = makeGaussian(maxPeak1, activeRegionSize, rootSigma);
-                                final double[] gauss2 = makeGaussian(maxPeak2, activeRegionSize, rootSigma+1);
-                                final List<Double> probs = new ArrayList<Double>(activeRegionSize);
-                                for ( int i = 0; i < activeRegionSize; i++ ) {
-                                    addProb(probs, gauss1[i] + gauss2[i]);
-                                }
-                                final int cutSite = findCutSiteForTwoMaxPeaks(probs, minRegionSize);
-                                if ( cutSite != -1 && cutSite < maxRegionSize )
-                                    tests.add(new Object[]{minRegionSize, maxRegionSize, Math.max(cutSite, minRegionSize), probs});
-                            }
-                        }
-                    }
-                }
-
-                { // test that the lowest of two minima is taken
-                    // looks like a bunch of 1s, 0.5, some 1.0s, 0.75, some more 1s
-//                    int firstMin = 0; {
-//                    int secondMin = 4; {
-                    for ( int firstMin = 1; firstMin < activeRegionSize; firstMin++ ) {
-                        for ( int secondMin = firstMin + 1; secondMin < activeRegionSize; secondMin++ ) {
-                            final List<Double> probs = new ArrayList<Double>(Collections.nCopies(activeRegionSize, 1.0));
-                            probs.set(firstMin, 0.5);
-                            probs.set(secondMin, 0.75);
-                            final int expectedCut;
-                            if ( firstMin + 1 < minRegionSize ) {
-                                if ( firstMin == secondMin - 1 ) // edge case for non-min at minRegionSize
-                                    expectedCut = maxRegionSize;
-                                else
-                                    expectedCut = secondMin + 1 > maxRegionSize ? maxRegionSize : ( secondMin + 1 < minRegionSize ? maxRegionSize : secondMin + 1);
-                            } else if ( firstMin + 1 > maxRegionSize )
-                                expectedCut = maxRegionSize;
-                            else {
-                                expectedCut = firstMin + 1;
-                            }
-
-                            Math.min(firstMin + 1, maxRegionSize);
-                            tests.add(new Object[]{minRegionSize, maxRegionSize, expectedCut, probs});
-                        }
-                    }
-                }
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    private double[] makeGaussian(final int mean, final int range, final double sigma) {
-        final double[] gauss = new double[range];
-        for( int iii = 0; iii < range; iii++ ) {
-            gauss[iii] = MathUtils.normalDistribution(mean, sigma, iii) + ACTIVE_PROB_THRESHOLD;
-        }
-        return gauss;
-    }
-
-    private int findCutSiteForTwoMaxPeaks(final List<Double> probs, final int minRegionSize) {
-        for ( int i = probs.size() - 2; i > minRegionSize; i-- ) {
-            double prev = probs.get(i - 1);
-            double next = probs.get(i + 1);
-            double cur = probs.get(i);
-            if ( cur < next && cur < prev )
-                return i + 1;
-        }
-
-        return -1;
-    }
-
-    @Test(dataProvider = "ActiveRegionCutTests")
-    public void testActiveRegionCutTests(final int minRegionSize, final int maxRegionSize, final int expectedRegionSize, final List<Double> probs) {
-        final ActivityProfile profile = new ActivityProfile(genomeLocParser, MAX_PROB_PROPAGATION_DISTANCE, ACTIVE_PROB_THRESHOLD);
-
-        final String contig = genomeLocParser.getContigs().getSequences().get(0).getSequenceName();
-        for ( int i = 0; i <= maxRegionSize + profile.getMaxProbPropagationDistance(); i++ ) {
-            final GenomeLoc loc = genomeLocParser.createGenomeLoc(contig, i + 1);
-            final double prob = i < probs.size() ? probs.get(i) : 0.0;
-            final ActivityProfileState state = new ActivityProfileState(loc, prob);
-            profile.add(state);
-        }
-
-        final List<ActiveRegion> regions = profile.popReadyActiveRegions(0, minRegionSize, maxRegionSize, false);
-        Assert.assertTrue(regions.size() >= 1, "Should only be one regions for this test");
-        final ActiveRegion region = regions.get(0);
-        Assert.assertEquals(region.getLocation().getStart(), 1, "Region should start at 1");
-        Assert.assertEquals(region.getLocation().size(), expectedRegionSize, "Incorrect region size; cut must have been incorrect");
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/activeregion/BandPassActivityProfileUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/activeregion/BandPassActivityProfileUnitTest.java
deleted file mode 100644
index 2087d9a..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/activeregion/BandPassActivityProfileUnitTest.java
+++ /dev/null
@@ -1,339 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.activeregion;
-
-
-// the imports for unit testing.
-
-
-import htsjdk.samtools.reference.ReferenceSequenceFile;
-import org.apache.commons.lang.ArrayUtils;
-import htsjdk.tribble.readers.LineIterator;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.MathUtils;
-import org.broadinstitute.gatk.utils.collections.Pair;
-import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
-import org.broadinstitute.gatk.utils.variant.GATKVCFUtils;
-import htsjdk.variant.variantcontext.VariantContext;
-import htsjdk.variant.vcf.VCFCodec;
-import htsjdk.variant.vcf.VCFHeader;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.LinkedList;
-import java.util.List;
-
-
-public class BandPassActivityProfileUnitTest extends BaseTest {
-    private final static boolean DEBUG = false;
-    private GenomeLocParser genomeLocParser;
-
-    private final static int MAX_PROB_PROPAGATION_DISTANCE = 50;
-    private final static double ACTIVE_PROB_THRESHOLD= 0.002;
-
-    @BeforeClass
-    public void init() throws FileNotFoundException {
-        // sequence
-        ReferenceSequenceFile seq = new CachingIndexedFastaSequenceFile(new File(b37KGReference));
-        genomeLocParser = new GenomeLocParser(seq);
-    }
-
-    @DataProvider(name = "BandPassBasicTest")
-    public Object[][] makeBandPassTest() {
-        final List<Object[]> tests = new LinkedList<Object[]>();
-
-        for ( int start : Arrays.asList(1, 10, 100, 1000) ) {
-            for ( boolean precedingIsActive : Arrays.asList(true, false) ) {
-                for ( int precedingSites: Arrays.asList(0, 1, 10, 100) ) {
-                    for ( int bandPassSize : Arrays.asList(0, 1, 10, 100) ) {
-                        for ( double sigma : Arrays.asList(1.0, 2.0, BandPassActivityProfile.DEFAULT_SIGMA) ) {
-//        for ( int start : Arrays.asList(10) ) {
-//            for ( boolean precedingIsActive : Arrays.asList(false) ) {
-//                for ( int precedingSites: Arrays.asList(0) ) {
-//                    for ( int bandPassSize : Arrays.asList(1) ) {
-                            tests.add(new Object[]{ start, precedingIsActive, precedingSites, bandPassSize, sigma });
-                        }
-                    }
-                }
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = ! DEBUG, dataProvider = "BandPassBasicTest")
-    public void testBandPass(final int start, final boolean precedingIsActive, final int nPrecedingSites, final int bandPassSize, final double sigma) {
-        final BandPassActivityProfile profile = new BandPassActivityProfile(genomeLocParser, null, MAX_PROB_PROPAGATION_DISTANCE, ACTIVE_PROB_THRESHOLD, bandPassSize, sigma, false);
-
-        final int expectedBandSize = bandPassSize * 2 + 1;
-        Assert.assertEquals(profile.getFilteredSize(), bandPassSize, "Wrong filter size");
-        Assert.assertEquals(profile.getSigma(), sigma, "Wrong sigma");
-        Assert.assertEquals(profile.getBandSize(), expectedBandSize, "Wrong expected band size");
-
-        final String contig = genomeLocParser.getContigs().getSequences().get(0).getSequenceName();
-        final double precedingProb = precedingIsActive ? 1.0 : 0.0;
-        for ( int i = 0; i < nPrecedingSites; i++ ) {
-            final GenomeLoc loc = genomeLocParser.createGenomeLoc(contig, i + start);
-            final ActivityProfileState state = new ActivityProfileState(loc, precedingProb);
-            profile.add(state);
-        }
-
-        final GenomeLoc nextLoc = genomeLocParser.createGenomeLoc(contig, nPrecedingSites + start);
-        profile.add(new ActivityProfileState(nextLoc, 1.0));
-
-        if ( precedingIsActive == false && nPrecedingSites >= bandPassSize && bandPassSize < start ) {
-            // we have enough space that all probs fall on the genome
-            final double[] probs = profile.getProbabilitiesAsArray();
-            Assert.assertEquals(MathUtils.sum(probs), 1.0 * (nPrecedingSites * precedingProb + 1), 1e-3, "Activity profile doesn't sum to number of non-zero prob states");
-        }
-    }
-
-    private double[] bandPassInOnePass(final BandPassActivityProfile profile, final double[] activeProbArray) {
-        final double[] bandPassProbArray = new double[activeProbArray.length];
-
-        // apply the band pass filter for activeProbArray into filteredProbArray
-        final double[] GaussianKernel = profile.getKernel();
-        for( int iii = 0; iii < activeProbArray.length; iii++ ) {
-            final double[] kernel = ArrayUtils.subarray(GaussianKernel, Math.max(profile.getFilteredSize() - iii, 0), Math.min(GaussianKernel.length, profile.getFilteredSize() + activeProbArray.length - iii));
-            final double[] activeProbSubArray = ArrayUtils.subarray(activeProbArray, Math.max(0,iii - profile.getFilteredSize()), Math.min(activeProbArray.length,iii + profile.getFilteredSize() + 1));
-            bandPassProbArray[iii] = dotProduct(activeProbSubArray, kernel);
-        }
-
-        return bandPassProbArray;
-    }
-
-    public static double dotProduct(double[] v1, double[] v2) {
-        Assert.assertEquals(v1.length,v2.length,"Array lengths do not mach in dotProduct");
-        double result = 0.0;
-        for (int k = 0; k < v1.length; k++)
-            result += v1[k] * v2[k];
-
-        return result;
-    }
-
-    @DataProvider(name = "BandPassComposition")
-    public Object[][] makeBandPassComposition() {
-        final List<Object[]> tests = new LinkedList<Object[]>();
-
-        for ( int bandPassSize : Arrays.asList(0, 1, 10, 100, BandPassActivityProfile.MAX_FILTER_SIZE) ) {
-            for ( int integrationLength : Arrays.asList(1, 10, 100, 1000) ) {
-                tests.add(new Object[]{ bandPassSize, integrationLength });
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test( enabled = ! DEBUG, dataProvider = "BandPassComposition")
-    public void testBandPassComposition(final int bandPassSize, final int integrationLength) {
-        final int start = 1;
-        final BandPassActivityProfile profile = new BandPassActivityProfile(genomeLocParser, null, MAX_PROB_PROPAGATION_DISTANCE,
-                ACTIVE_PROB_THRESHOLD, bandPassSize, BandPassActivityProfile.DEFAULT_SIGMA);
-        final double[] rawActiveProbs = new double[integrationLength + bandPassSize * 2];
-
-        // add a buffer so that we can get all of the band pass values
-        final String contig = genomeLocParser.getContigs().getSequences().get(0).getSequenceName();
-        int pos = start;
-        int rawProbsOffset = 0;
-        for ( int i = 0; i < bandPassSize; i++ ) {
-            final GenomeLoc loc = genomeLocParser.createGenomeLoc(contig, pos++);
-            final ActivityProfileState state = new ActivityProfileState(loc, 0.0);
-            profile.add(state);
-            rawActiveProbs[rawProbsOffset++] = 0.0;
-            rawActiveProbs[rawActiveProbs.length - rawProbsOffset] = 0.0;
-        }
-
-        for ( int i = 0; i < integrationLength; i++ ) {
-            final GenomeLoc nextLoc = genomeLocParser.createGenomeLoc(contig, pos++);
-            profile.add(new ActivityProfileState(nextLoc, 1.0));
-            rawActiveProbs[rawProbsOffset++] = 1.0;
-
-            for ( int j = 0; j < profile.size(); j++ ) {
-                Assert.assertTrue(profile.getStateList().get(j).isActiveProb >= 0.0, "State probability < 0 at " + j);
-                Assert.assertTrue(profile.getStateList().get(j).isActiveProb <= 1.0 + 1e-3, "State probability > 1 at " + j);
-            }
-        }
-
-        final double[] expectedProbs = bandPassInOnePass(profile, rawActiveProbs);
-        for ( int j = 0; j < profile.size(); j++ ) {
-            Assert.assertEquals(profile.getStateList().get(j).isActiveProb, expectedProbs[j], "State probability not expected at " + j);
-        }
-    }
-
-    // ------------------------------------------------------------------------------------
-    //
-    // Code to test the creation of the kernels
-    //
-    // ------------------------------------------------------------------------------------
-
-    /**
-
-     kernel <- function(sd, pThres) {
-     raw = dnorm(-80:81, mean=0, sd=sd)
-     norm = raw / sum(raw)
-     bad = norm < pThres
-     paste(norm[! bad], collapse=", ")
-     }
-
-     print(kernel(0.01, 1e-5))
-     print(kernel(1, 1e-5))
-     print(kernel(5, 1e-5))
-     print(kernel(17, 1e-5))
-
-     * @return
-     */
-
-    @DataProvider(name = "KernelCreation")
-    public Object[][] makeKernelCreation() {
-        final List<Object[]> tests = new LinkedList<Object[]>();
-
-        tests.add(new Object[]{ 0.01, 1000, new double[]{1.0}});
-        tests.add(new Object[]{ 1.0, 1000, new double[]{0.0001338302, 0.004431848, 0.053990966, 0.241970723, 0.398942278, 0.241970723, 0.053990966, 0.004431848, 0.0001338302}});
-        tests.add(new Object[]{ 1.0, 0, new double[]{1.0}});
-        tests.add(new Object[]{ 1.0, 1, new double[]{0.2740686, 0.4518628, 0.2740686}});
-        tests.add(new Object[]{ 1.0, 2, new double[]{0.05448868, 0.24420134, 0.40261995, 0.24420134, 0.05448868}});
-        tests.add(new Object[]{ 1.0, 1000, new double[]{0.0001338302, 0.004431848, 0.053990966, 0.241970723, 0.398942278, 0.241970723, 0.053990966, 0.004431848, 0.0001338302}});
-        tests.add(new Object[]{ 5.0, 1000, new double[]{1.1788613551308e-05, 2.67660451529771e-05, 5.83893851582921e-05, 0.000122380386022754, 0.000246443833694604, 0.000476817640292968, 0.000886369682387602, 0.00158309031659599, 0.00271659384673712, 0.00447890605896858, 0.00709491856924629, 0.0107981933026376, 0.0157900316601788, 0.0221841669358911, 0.029945493127149, 0.0388372109966426, 0.0483941449038287, 0.0579383105522965, 0.0666449205783599, 0.0736540280606647, 0.0782085387950912,  [...]
-        tests.add(new Object[]{17.0, 1000, new double[]{1.25162575710745e-05, 1.57001772728555e-05, 1.96260034693739e-05, 2.44487374842009e-05, 3.03513668801384e-05, 3.75489089511911e-05, 4.62928204154855e-05, 5.68757597480354e-05, 6.96366758708924e-05, 8.49661819944029e-05, 0.000103312156275406, 0.000125185491708561, 0.000151165896477646, 0.000181907623161359, 0.000218144981137171, 0.000260697461819069, 0.000310474281706066, 0.000368478124457557, 0.000435807841336874, 0.0005136598504885 [...]
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test( enabled = ! DEBUG, dataProvider = "KernelCreation")
-    public void testKernelCreation(final double sigma, final int maxSize, final double[] expectedKernel) {
-        final BandPassActivityProfile profile = new BandPassActivityProfile(genomeLocParser, null, MAX_PROB_PROPAGATION_DISTANCE, ACTIVE_PROB_THRESHOLD,
-                maxSize, sigma, true);
-
-        final double[] kernel = profile.getKernel();
-        Assert.assertEquals(kernel.length, expectedKernel.length);
-        for ( int i = 0; i < kernel.length; i++ )
-            Assert.assertEquals(kernel[i], expectedKernel[i], 1e-3, "Kernels not equal at " + i);
-    }
-
-    // ------------------------------------------------------------------------------------
-    //
-    // Large-scale test, reading in 1000G Phase I chr20 calls and making sure that
-    // the regions returned are the same if you run on the entire profile vs. doing it
-    // incremental
-    //
-    // ------------------------------------------------------------------------------------
-
-    @DataProvider(name = "VCFProfile")
-    public Object[][] makeVCFProfile() {
-        final List<Object[]> tests = new LinkedList<Object[]>();
-
-        //tests.add(new Object[]{ privateTestDir + "ALL.chr20.phase1_release_v3.20101123.snps_indels_svs.sites.vcf", "20", 60470, 61000});
-        //tests.add(new Object[]{ privateTestDir + "ALL.chr20.phase1_release_v3.20101123.snps_indels_svs.sites.vcf", "20", 60470, 100000});
-        //tests.add(new Object[]{ privateTestDir + "ALL.chr20.phase1_release_v3.20101123.snps_indels_svs.sites.vcf", "20", 60470, 1000000});
-        tests.add(new Object[]{ privateTestDir + "ALL.chr20.phase1_release_v3.20101123.snps_indels_svs.sites.vcf", "20", 60470, 1000000});
-        tests.add(new Object[]{ privateTestDir + "NA12878.WGS.b37.chr20.firstMB.vcf", "20", 1, 1000000});
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test( dataProvider = "VCFProfile")
-    public void testVCFProfile(final String path, final String contig, final int start, final int end) throws Exception {
-        final int extension = 50;
-        final int minRegionSize = 50;
-        final int maxRegionSize = 300;
-
-        final File file = new File(path);
-        final VCFCodec codec = new VCFCodec();
-        final Pair<VCFHeader, GATKVCFUtils.VCIterable<LineIterator>> reader = GATKVCFUtils.readAllVCs(file, codec);
-
-        final List<ActiveRegion> incRegions = new ArrayList<ActiveRegion>();
-        final BandPassActivityProfile incProfile = new BandPassActivityProfile(genomeLocParser, null, MAX_PROB_PROPAGATION_DISTANCE, ACTIVE_PROB_THRESHOLD);
-        final BandPassActivityProfile fullProfile = new BandPassActivityProfile(genomeLocParser, null, MAX_PROB_PROPAGATION_DISTANCE, ACTIVE_PROB_THRESHOLD);
-        int pos = start;
-        for ( final VariantContext vc : reader.getSecond() ) {
-            if ( vc == null ) continue;
-            while ( pos < vc.getStart() ) {
-                final GenomeLoc loc = genomeLocParser.createGenomeLoc(contig, pos);
-                //logger.warn("Adding 0.0 at " + loc + " because vc.getStart is " + vc.getStart());
-                incProfile.add(new ActivityProfileState(loc, 0.0));
-                fullProfile.add(new ActivityProfileState(loc, 0.0));
-                pos++;
-            }
-            if ( vc.getStart() >= start && vc.getEnd() <= end ) {
-                final GenomeLoc loc = genomeLocParser.createGenomeLoc(contig, pos);
-                //logger.warn("Adding 1.0 at " + loc);
-                ActivityProfileState.Type type = ActivityProfileState.Type.NONE;
-                Number value = null;
-                if ( vc.isBiallelic() && vc.isIndel() ) {
-                    type = ActivityProfileState.Type.HIGH_QUALITY_SOFT_CLIPS;
-                    value = Math.abs(vc.getIndelLengths().get(0));
-                }
-                final ActivityProfileState state = new ActivityProfileState(loc, 1.0, type, value);
-                incProfile.add(state);
-                fullProfile.add(state);
-                pos++;
-            }
-
-            incRegions.addAll(incProfile.popReadyActiveRegions(extension, minRegionSize, maxRegionSize, false));
-
-            if ( vc.getStart() > end )
-                break;
-        }
-
-        incRegions.addAll(incProfile.popReadyActiveRegions(extension, minRegionSize, maxRegionSize, true));
-
-        final List<ActiveRegion> fullRegions = fullProfile.popReadyActiveRegions(extension, minRegionSize, maxRegionSize, true);
-        assertGoodRegions(fullRegions, start, end, maxRegionSize);
-        assertGoodRegions(incRegions, start, end, maxRegionSize);
-
-        Assert.assertEquals(incRegions.size(),  fullRegions.size(), "incremental and full region sizes aren't the same");
-        for ( int i = 0; i < fullRegions.size(); i++ ) {
-            final ActiveRegion incRegion = incRegions.get(i);
-            final ActiveRegion fullRegion = fullRegions.get(i);
-            Assert.assertTrue(incRegion.equalExceptReads(fullRegion), "Full and incremental regions are not equal: full = " + fullRegion + " inc = " + incRegion);
-        }
-    }
-
-    private void assertGoodRegions(final List<ActiveRegion> regions, final int start, final int end, final int maxRegionSize) {
-        int lastPosSeen = start - 1;
-        for ( int regionI = 0; regionI < regions.size(); regionI++ ) {
-            final ActiveRegion region = regions.get(regionI);
-            Assert.assertEquals(region.getLocation().getStart(), lastPosSeen + 1, "discontinuous with previous region.  lastPosSeen " + lastPosSeen + " but region is " + region);
-            Assert.assertTrue(region.getLocation().size() <= maxRegionSize, "Region is too big: " + region);
-            lastPosSeen = region.getLocation().getStop();
-
-            for ( final ActivityProfileState state : region.getSupportingStates() ) {
-                Assert.assertEquals(state.isActiveProb > ACTIVE_PROB_THRESHOLD, region.isActive(),
-                        "Region is active=" + region.isActive() + " but contains a state " + state + " with prob "
-                                + state.isActiveProb + " not within expected values given threshold for activity of "
-                                + ACTIVE_PROB_THRESHOLD);
-            }
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/baq/BAQUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/baq/BAQUnitTest.java
deleted file mode 100644
index 7ea26ee..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/baq/BAQUnitTest.java
+++ /dev/null
@@ -1,257 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.baq;
-
-
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.BeforeMethod;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.broadinstitute.gatk.utils.Utils;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.PrintStream;
-import java.util.List;
-import java.util.ArrayList;
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import htsjdk.samtools.*;
-
-/**
- * Basic unit test for BAQ calculation
- */
-public class BAQUnitTest extends BaseTest {
-    private SAMFileHeader header;
-    private final int startChr = 1;
-    private final int numChr = 2;
-    private final int chrSize = 1000;
-    IndexedFastaSequenceFile fasta = null;
-
-    @BeforeMethod
-    public void before() {
-        header = ArtificialSAMUtils.createArtificialSamHeader(numChr, startChr, chrSize);
-        File referenceFile = new File(hg18Reference);
-        try {
-            fasta = new IndexedFastaSequenceFile(referenceFile);
-        }
-        catch(FileNotFoundException ex) {
-            throw new UserException.CouldNotReadInputFile(referenceFile,ex);
-        }
-    }
-
-    private class BAQTest {
-        String readBases, refBases;
-        byte[] quals, expected;
-        String cigar;
-        int refOffset;
-        int pos;
-
-        public BAQTest(String _refBases, String _readBases, String _quals, String _expected) {
-            this(0, -1, null, _readBases, _refBases, _quals, _expected);
-        }
-
-        public BAQTest(int refOffset, String _refBases, String _readBases, String _quals, String _expected) {
-            this(refOffset, -1, null, _refBases, _readBases, _quals, _expected);
-        }
-
-        public BAQTest(long pos, String cigar, String _readBases, String _quals, String _expected) {
-            this(0, pos, cigar, null, _readBases, _quals, _expected);
-        }
-
-
-        public BAQTest(int _refOffset, long _pos, String _cigar, String _refBases, String _readBases, String _quals, String _expected) {
-            refOffset = _refOffset;
-            pos = (int)_pos;
-            cigar = _cigar;
-            readBases = _readBases;
-            refBases = _refBases;
-
-            quals = new byte[_quals.getBytes().length];
-            expected = new byte[_quals.getBytes().length];
-            for ( int i = 0; i < quals.length; i++) {
-                quals[i] = (byte)(_quals.getBytes()[i] - 33);
-                expected[i] = (byte)(_expected.getBytes()[i] - 33);
-            }
-        }
-
-        public String toString() { return readBases; }
-
-        public SAMRecord createRead() {
-            SAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "foo", 0, pos > 0 ? pos + (refOffset > 0 ? refOffset : 0): 1, readBases.getBytes(), quals);
-            //if ( cigar != null ) read.setAlignmentEnd(readBases.getBytes().length + pos);
-            read.setCigarString( cigar == null ? String.format("%dM", quals.length) : cigar);
-            return read;
-        }
-    }
-
-
-    @DataProvider(name = "data")
-    public Object[][] createData1() {
-        List<BAQTest> params = new ArrayList<BAQTest>();
-
-        params.add(new BAQTest("GCTGCTCCTGGTACTGCTGGATGAGGGCCTCGATGAAGCTAAGCTTTTTCTCCTGCTCCTGCGTGATCCGCTGCAG",
-                               "GCTGCTCCTGGTACTGCTGGATGAGGGCCTCGATGAAGCTAAGCTTTTCCTCCTGCTCCTGCGTGATCCGCTGCAG",
-                               "?BACCBDDDFFBCFFHHFIHFEIFHIGHHGHBFEIFGIIGEGIIHGGGIHHIIHIIHIIHGICCIGEII at IGIHCG",
-                               "?BACCBDDDFFBCFFHHFIHFEIFHIGHHGHBFEIFGIIGEGII410..0HIIHIIHIIHGICCIGEII at IGIHCE"));
-
-        params.add(new BAQTest("GCTTTTTCTCCTCCTG",
-                               "GCTTTTCCTCCTCCTG",
-                               "IIHGGGIHHIIHHIIH",
-                               "EI410..0HIIHHIIE"));
-
-        // big and complex, also does a cap from 3 to 4!
-        params.add(new BAQTest(-3, 9999810l, "49M1I126M1I20M1I25M",
-                                "AAATTCAAGATTTCAAAGGCTCTTAACTGCTCAAGATAATTTTTTTTTTTTGAGACAGAGTCTTGCTGTGTTGCCCAGGCTGGAGTGCAGTGGCGTGATCTTGGCTCACTGCAAGCTCCGCCTCCCGGGTTCACGCCATTCTCCTGCCTCAGCCTCCCGAGTAGCTGGGACTACAGGCACCCACCACCACGCCTGGCCAATTTTTTTGTATTTTTAGTAGAGATAG",
-                                "TTCAAGATTTCAAAGGCTCTTAACTGCTCAAGATAATTTTTTTTTTTTGTAGACAGAGTCTTGCTGTGTTGCCCAGGCTGGAGTGCAGTGGCGTGATCTTGGCTCACTGCAAGCTCCGCCTCCCGGGTTCACGCCATTCTCCTGCCTCAGCCTCCCGAGTAGCTGGGACTACAGGCCACCCACCACCACGCCTGGCCTAATTTTTTTGTATTTTTAGTAGAGA",
-                                ">IHFECEBDBBCBCABABAADBD?AABBACEABABC?>?B>@A@@>A?B3BBC?CBDBAABBBBBAABAABBABDACCCBCDAACBCBABBB:ABDBACBBDCCCCABCDCCBCC@@;?<B at BC;CBBBAB=;A>ACBABBBABBCA@@<?>>AAA<CA at AABBABCC?BB8@<@C<>5;<A5=A;>=64>???B>=6497<<;;<;>2?>BA@??A6<<A59",
-                                ">EHFECEBDBBCBCABABAADBD?AABBACEABABC?>?B>@A@@>A?838BC?CBDBAABBBBBAABAABBABDACCCBCDAACBCBABBB:ABDBACBBDCCCCABCDCCBCC@@;?<B at BC;CBBBAB=;A>ACBABBBABBCA@@<?>>AAA<CA at AABBABCC?BB8@<@%<>5;<A5=A;>=64>???B;86497<<;;<;>2?>BA@??A6<<A59"));
-
-        // now changes
-        params.add(new BAQTest(-3, 9999966l, "36M",
-                                "CCGAGTAGCTGGGACTACAGGCACCCACCACCACGCCTGGCC",
-                                "AGTAGCTGGGACTACAGGCACCCACCACCACGCCTG",
-                                "A?>>@>AA?@@>A?>A@?>@>>?=>?'>?=>7=?A9",
-                                "A?>>@>AA?@@>A?>A@?>@>>?=>?'>?=>7=?A9"));
-
-        // raw base qualities are low -- but they shouldn't be capped
-        params.add(new BAQTest(-3, 9999993l, "36M",
-                                "CCACCACGCCTGGCCAATTTTTTTGTATTTTTAGTAGAGATA",
-                                "CCACGCTTGGCAAAGTTTTCCGTACGTTTAGCCGAG",
-                                "33'/(7+270&4),(&&-)$&,%7$',-/61(,6?8",
-                                "33'/(7+270&4),(&&-)$&,%7$',-/61(,6?8"));
-
-        // soft clipping
-        // todo soft clip testing just doesn't work right now!
-
-//        params.add(new BAQTest(29, 10000109l, "29S190M",
-//                                null, "GAAGGTTGAATCAAACCTTCGGTTCCAACGGATTACAGGTGTGAGCCACCGCGACCGGCCTGCTCAAGATAATTTTTAGGGCTAACTATGACATGAACCCCAAAATTCCTGTCCTCTAGATGGCAGAAACCAAGATAAAGTATCCCCACATGGCCACAAGGTTAAGCTCTTATGGACACAAAACAAGGCAGAGAAATGTCATTTGGCATTGGTTTCAGG",
-//                                "3737088:858278273772:3<=;:?;5=9@>@?>@=<>8?>@=>>?>4=5>?=5====A==@?A@=@6 at A><?B:A;:;>@A?>?AA>@?AA>A?>==?AAA@@A>=A<A>>A=?A>AA==@A?AA?>?AA?A@@C@:?A@<;::??AA==>@@?BB=<A?BA>>A>A?AB=???@?BBA@?BA==?A>A?BB=A:@?ABAB>>?ABB>8A at BAIGA",
-//                                "3737088:858278273772:3<=;:?;5=9@>@?>@=<>8?>@=>>?>4=5>?=5====A==@?A@=@6 at A><?B:A;:;>@A?>?AA>@?AA>A?>==?AAA@@A>=A<A>>A=?A>AA==@A?AA?>?AA?A@@C@:?A@<;::??AA==>@@?BB=<A?BA>>A>A?AB=???@?BBA@?BA==?A>A?BB=A:@?ABAB>>?ABB>8A at BAI>;"));
-
-//        params.add(new BAQTest(30, 10000373l, "30S69M1D2M",
-//                                null, "TGAAATCCTGCCTTATAGTTCCCCTAAACCCACGTTCTATCCCCAGATACTCCCCTCTTCATTACAGAACAACAAAGAAAGACAAATTCTTAGCATCAATG",
-//                                "###############################=89>B;6<;96*>.1799>++66=:=:8=<-.9>><;9<':-+;*+::=;8=;;.::<:;=/2=70<=?-",
-//                                "###############################=89>B;6<;96*>.1799>++66=:=:8=<-.9>><;9<':-+;*+::=;8=;;.::<:;=/2=7000%%"));
-
-
-//        params.add(new BAQTest(5, 10000109l, "5S5M",
-//                                "GAAGGTTGAA",
-//                                null,
-//                                "HHHHHHHHHH",
-//                                "HHHHHHHHHE"));
-
-//        params.add(new BAQTest(10009480l, "102M1I18M1I16M1I43M1I10M1D9M1I7M1I7M1I16M1I9M1I8M1I14M2I18M",
-//                                "AGAGATGGGGTTTCGCCATGTTGTCCAGGCTGGTCTTGAACTCCTGACCTCAAGTGATCTGCCCACCTCGGCCTCCCAAAGTGCTGGGATTACACGTGTGAAACCACCATGCCTGGTCTCTTAATTTTTCNGATTCTAATAAAATTACATTCTATTTGCTGAAAGNGTACTTTAGAGTTGAAAGAAAAAGAAAGGNGTGGAACTTCCCCTAGTAAACAAGGAAAAACNTCCATGTTATTTATTGGACCTTAAAAATAGTGAAACATCTTAAGAAAAAAAATCAATCCTA",
-//                                "@HI at BA<?C@?CA>7>=AA>9@==??C???@?>:?BB at BA>B?=A@@<=B?AB???@@@@@?=?A==B at 7<<?@>==>=<=>???>=@@A?<=B:5?413577/675;><;==@=<>>968;6;>????:#;=?>:3072077726/6;3719;9A=9;774771#30532676??=8::97<7144448/4425#65688821515986255/5601548355551#218>96/5/8<4/.2344/914/55553)1047;:30312:4:63556565631=:62610",
-//                                "@HI at BA<?C@?CA>7>=AA>9@==??C???@?>:?BB at BA>B?=A@@<=B?AB???@@@@@?=?A==B at 7<<?@>==>=<=>???>=@@A?<=B:5?413&!7/675;><;==@=<>>96!;6;>????:#;=?>:3!72077726/6;3719;9A=9;774771#30532676??=8::&!<7144448'$!25#65687421515986255/560!548355551#218>96!5/8<4/.2344/614(%!!53)1047;:30312:4:63556565631=:62610"));
-
-        List<Object[]> params2 = new ArrayList<Object[]>();
-        for ( BAQTest x : params ) params2.add(new Object[]{x});
-        return params2.toArray(new Object[][]{});
-    }
-
-
-
-    @Test(dataProvider = "data", enabled = true)
-    public void testBAQWithProvidedReference(BAQTest test) {
-        if ( test.refBases != null ) {
-            testBAQ(test, false);
-        }
-    }
-
-    @Test(dataProvider = "data", enabled = true)
-    public void testBAQWithCigarAndRefLookup(BAQTest test) {
-        if ( test.cigar != null ) {
-            testBAQ(test, true);
-        }
-    }
-
-    @Test(enabled = true)
-    public void testBAQQualRange() {
-        BAQ baq = new BAQ(1e-3, 0.1, 7, (byte)4, false);         // matches current samtools parameters
-        final byte ref = (byte)'A';
-        final byte alt = (byte)'A';
-
-        for ( int i = 0; i <= SAMUtils.MAX_PHRED_SCORE; i++ )
-            Assert.assertTrue(baq.calcEpsilon( ref, alt, (byte)i) >= 0.0, "Failed to get baq epsilon range");
-    }
-
-    @Test(enabled = true)
-    public void testBAQOverwritesExistingTagWithNull() {
-
-        // create a read with a single base off the end of the contig, which cannot be BAQed
-        final SAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "foo", 0, fasta.getSequenceDictionary().getSequence("chr1").getSequenceLength() + 1, 1);
-        read.setReadBases(new byte[] {(byte) 'A'});
-        read.setBaseQualities(new byte[] {(byte) 20});
-        read.setCigarString("1M");
-        read.setAttribute("BQ", "A");
-
-        // try to BAQ and tell it to RECALCULATE AND ADD_TAG
-        BAQ baq = new BAQ(1e-3, 0.1, 7, (byte)4, false);
-        baq.baqRead(read, fasta, BAQ.CalculationMode.RECALCULATE, BAQ.QualityMode.ADD_TAG);
-
-        // did we remove the existing tag?
-        Assert.assertTrue(read.getAttribute("BQ") == null);
-    }
-
-    public void testBAQ(BAQTest test, boolean lookupWithFasta) {
-        BAQ baqHMM = new BAQ(1e-3, 0.1, 7, (byte)4, false);         // matches current samtools parameters
-
-        SAMRecord read = test.createRead();
-        BAQ.BAQCalculationResult result;
-        if ( lookupWithFasta && test.cigar != null )
-            result = baqHMM.calcBAQFromHMM(read, fasta);
-        else
-            result = baqHMM.calcBAQFromHMM(read, test.refBases.getBytes(), test.refOffset);
-
-        System.out.println(Utils.dupString('-', 40));
-        System.out.println("reads   : " + new String(test.readBases));
-        printQuals(System.out, "in-quals:", test.quals, false);
-        printQuals(System.out, "bq-quals:", result.bq, false);
-        for (int i = 0; i < test.quals.length; i++) {
-            //result.bq[i] = baqHMM.capBaseByBAQ(result.rawQuals[i], result.bq[i], result.state[i], i);
-            Assert.assertTrue(result.bq[i] >= baqHMM.getMinBaseQual() || test.expected[i] < baqHMM.getMinBaseQual(), "BQ < min base quality");
-            Assert.assertEquals(result.bq[i], test.expected[i], "Did not see the expected BAQ value at " + i);
-        }
-
-    }
-
-    public final static void printQuals( PrintStream out, String prefix, byte[] quals, boolean asInt ) {
-        out.print(prefix);
-        for ( int i = 0; i < quals.length; i++) {
-            if ( asInt ) {
-                out.printf("%2d", (int)quals[i]);
-                if ( i+1 != quals.length ) out.print(",");
-            } else
-                out.print((char)(quals[i]+33));
-        }
-        out.println();
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/classloader/JVMUtilsUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/classloader/JVMUtilsUnitTest.java
deleted file mode 100644
index c232e1c..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/classloader/JVMUtilsUnitTest.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.classloader;
-
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-public class JVMUtilsUnitTest {
-
-    // Test classes used by the tests for JVMUtils.getCallingClass():
-    private static class DummyTestClass1 {
-        public static Class getCaller( final Class callee ) {
-            return DummyTestClass2.getCaller(callee);
-        }
-    }
-
-    private static class DummyTestClass2 {
-        public static Class getCaller( final Class callee ) {
-            return DummyTestClass3.getCaller(callee);
-        }
-    }
-
-    private static class DummyTestClass3 {
-        public static Class getCaller( final Class callee ) {
-            return JVMUtils.getCallingClass(callee);
-        }
-    }
-
-    @DataProvider( name = "TestGetCallingClassDataProvider" )
-    public Object[][] getTestCallingClassTestData() {
-        return new Object[][] {
-            { DummyTestClass1.class, JVMUtilsUnitTest.class },
-            { DummyTestClass2.class, DummyTestClass1.class },
-            { DummyTestClass3.class, DummyTestClass2.class }
-        };
-    }
-
-    @Test( dataProvider = "TestGetCallingClassDataProvider" )
-    public void testGetCallingClass( final Class callee, final Class expectedCaller ) {
-        final Class reportedCaller = DummyTestClass1.getCaller(callee);
-
-        Assert.assertEquals(reportedCaller, expectedCaller,
-                            String.format("Wrong calling class returned from DummyTestClass1.getCaller(%s)", callee.getSimpleName()));
-    }
-
-    @Test( expectedExceptions = IllegalArgumentException.class )
-    public void testGetCallingClassCalleeNotFound() {
-        // Trying to get the calling class of a class not on the runtime stack should produce an exception.
-        JVMUtils.getCallingClass(DummyTestClass1.class);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/clipping/ReadClipperTestUtils.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/clipping/ReadClipperTestUtils.java
deleted file mode 100644
index 8ce0a9e..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/clipping/ReadClipperTestUtils.java
+++ /dev/null
@@ -1,144 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.clipping;
-
-import htsjdk.samtools.Cigar;
-import htsjdk.samtools.CigarElement;
-import htsjdk.samtools.CigarOperator;
-import htsjdk.samtools.TextCigarCodec;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.broadinstitute.gatk.utils.sam.CigarUtils;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.testng.Assert;
-
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Stack;
-
-public class ReadClipperTestUtils {
-    //Should contain all the utils needed for tests to mass produce
-    //reads, cigars, and other needed classes
-
-    final static byte [] BASES = {'A', 'C', 'T', 'G'};
-    final static byte [] QUALS = {2, 15, 25, 30};
-    final static String CIGAR = "4M";
-    final static CigarElement[] cigarElements = { new CigarElement(1, CigarOperator.HARD_CLIP),
-                                                  new CigarElement(1, CigarOperator.SOFT_CLIP),
-                                                  new CigarElement(1, CigarOperator.INSERTION),
-                                                  new CigarElement(1, CigarOperator.DELETION),
-                                                  new CigarElement(1, CigarOperator.MATCH_OR_MISMATCH)};
-
-
-    public static GATKSAMRecord makeReadFromCigar(Cigar cigar) {
-        return ArtificialSAMUtils.createArtificialRead(Utils.arrayFromArrayWithLength(BASES, cigar.getReadLength()), Utils.arrayFromArrayWithLength(QUALS, cigar.getReadLength()), cigar.toString());
-    }
-
-    public static GATKSAMRecord makeReadFromCigar(String cigarString) {
-        return makeReadFromCigar(CigarUtils.cigarFromString(cigarString));
-    }
-
-    public static List<Cigar> generateCigarList(int maximumLength) {
-        return generateCigarList(maximumLength, cigarElements);
-    }
-
-        /**
-        * This function generates every valid permutation of cigar strings (with a given set of cigarElement) with a given length.
-        *
-        * A valid cigar object obeys the following rules:
-        *  - No Hard/Soft clips in the middle of the read
-        *  - No deletions in the beginning / end of the read
-        *  - No repeated adjacent element (e.g. 1M2M -> this should be 3M)
-        *  - No consecutive I/D elements
-        *
-        * @param maximumLength the maximum number of elements in the cigar
-        * @return a list with all valid Cigar objects
-        */
-    public static List<Cigar> generateCigarList(int maximumLength, CigarElement[] cigarElements) {
-        int numCigarElements = cigarElements.length;
-        LinkedList<Cigar> cigarList = new LinkedList<Cigar>();
-        byte [] cigarCombination = new byte[maximumLength];
-
-        Utils.fillArrayWithByte(cigarCombination, (byte) 0);               // we start off with all 0's in the combination array.
-        int currentIndex = 0;
-        while (true) {
-            Cigar cigar = createCigarFromCombination(cigarCombination, cigarElements);    // create the cigar
-            cigar = CigarUtils.combineAdjacentCigarElements(cigar);                   // combine adjacent elements
-            if (CigarUtils.isCigarValid(cigar)) {                                     // check if it's valid
-                cigarList.add(cigar);                                      // add it
-            }
-
-            boolean currentIndexChanged = false;
-            while (currentIndex < maximumLength && cigarCombination[currentIndex] == numCigarElements - 1) {
-                currentIndex++;                                            // find the next index to increment
-                currentIndexChanged = true;                                // keep track of the fact that we have changed indices!
-            }
-
-            if (currentIndex == maximumLength)                             // if we hit the end of the array, we're done.
-                break;
-
-            cigarCombination[currentIndex]++;                              // otherwise advance the current index
-
-            if (currentIndexChanged) {                                     // if we have changed index, then...
-                for (int i = 0; i < currentIndex; i++)
-                    cigarCombination[i] = 0;                               // reset everything from 0->currentIndex
-                currentIndex = 0;                                          // go back to the first index
-            }
-        }
-
-        return cigarList;
-    }
-
-    private static Cigar createCigarFromCombination(byte[] cigarCombination, CigarElement[] cigarElements) {
-        Cigar cigar = new Cigar();
-        for (byte i : cigarCombination) {
-            cigar.add(cigarElements[i]);
-        }
-        return cigar;
-    }
-
-    public static GATKSAMRecord makeRead() {
-        return ArtificialSAMUtils.createArtificialRead(BASES, QUALS, CIGAR);
-    }
-
-    /**
-     * Asserts that the two reads have the same bases, qualities and cigar strings
-     *
-     * @param actual the calculated read
-     * @param expected the expected read
-     */
-    public static void assertEqualReads(GATKSAMRecord actual, GATKSAMRecord expected) {
-        // If they're both not empty, test their contents
-        if(!actual.isEmpty() && !expected.isEmpty()) {
-            Assert.assertEquals(actual.getReadBases(), expected.getReadBases());
-            Assert.assertEquals(actual.getBaseQualities(), expected.getBaseQualities());
-            Assert.assertEquals(actual.getCigarString(), expected.getCigarString());
-        }
-        // Otherwise test if they're both empty
-        else
-            Assert.assertEquals(actual.isEmpty(), expected.isEmpty());
-     }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/clipping/ReadClipperUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/clipping/ReadClipperUnitTest.java
deleted file mode 100644
index 400e984..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/clipping/ReadClipperUnitTest.java
+++ /dev/null
@@ -1,421 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.clipping;
-
-import htsjdk.samtools.Cigar;
-import htsjdk.samtools.CigarElement;
-import htsjdk.samtools.CigarOperator;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.sam.CigarUtils;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-
-/**
- * User: roger
- * Date: 9/28/11
- */
-public class ReadClipperUnitTest extends BaseTest {
-    private final static boolean DEBUG = false;
-
-    List<Cigar> cigarList;
-    int maximumCigarSize = 10;                                                                                           // 6 is the minimum necessary number to try all combinations of cigar types with guarantee of clipping an element with length = 2
-
-    @BeforeClass
-    public void init() {
-        cigarList = ReadClipperTestUtils.generateCigarList(maximumCigarSize);
-    }
-
-    @Test(enabled = !DEBUG)
-    public void testHardClipBothEndsByReferenceCoordinates() {
-        for (Cigar cigar : cigarList) {
-            GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar);
-            int alnStart = read.getAlignmentStart();
-            int alnEnd = read.getAlignmentEnd();
-            int readLength = alnStart - alnEnd;
-            for (int i = 0; i < readLength / 2; i++) {
-                GATKSAMRecord clippedRead = ReadClipper.hardClipBothEndsByReferenceCoordinates(read, alnStart + i, alnEnd - i);
-                Assert.assertTrue(clippedRead.getAlignmentStart() >= alnStart + i, String.format("Clipped alignment start is less than original read (minus %d): %s -> %s", i, read.getCigarString(), clippedRead.getCigarString()));
-                Assert.assertTrue(clippedRead.getAlignmentEnd() <= alnEnd + i, String.format("Clipped alignment end is greater than original read (minus %d): %s -> %s", i, read.getCigarString(), clippedRead.getCigarString()));
-                assertUnclippedLimits(read, clippedRead);
-            }
-        }
-    }
-
-    @Test(enabled = !DEBUG)
-    public void testHardClipByReadCoordinates() {
-        for (Cigar cigar : cigarList) {
-            GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar);
-            int readLength = read.getReadLength();
-            for (int i = 0; i < readLength; i++) {
-                GATKSAMRecord clipLeft = ReadClipper.hardClipByReadCoordinates(read, 0, i);
-                Assert.assertTrue(clipLeft.getReadLength() <= readLength - i, String.format("Clipped read length is greater than original read length (minus %d): %s -> %s", i, read.getCigarString(), clipLeft.getCigarString()));
-                assertUnclippedLimits(read, clipLeft);
-
-                GATKSAMRecord clipRight = ReadClipper.hardClipByReadCoordinates(read, i, readLength - 1);
-                Assert.assertTrue(clipRight.getReadLength() <= i, String.format("Clipped read length is greater than original read length (minus %d): %s -> %s", i, read.getCigarString(), clipRight.getCigarString()));
-                assertUnclippedLimits(read, clipRight);
-            }
-        }
-    }
-
-    @DataProvider(name = "ClippedReadLengthData")
-    public Object[][] makeClippedReadLengthData() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        // this functionality can be adapted to provide input data for whatever you might want in your data
-        final int originalReadLength = 50;
-        for ( int nToClip = 1; nToClip < originalReadLength - 1; nToClip++ ) {
-            tests.add(new Object[]{originalReadLength, nToClip});
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "ClippedReadLengthData", enabled = !DEBUG)
-    public void testHardClipReadLengthIsRight(final int originalReadLength, final int nToClip) {
-        GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(originalReadLength + "M");
-        read.getReadLength(); // provoke the caching of the read length
-        final int expectedReadLength = originalReadLength - nToClip;
-        GATKSAMRecord clipped = ReadClipper.hardClipByReadCoordinates(read, 0, nToClip - 1);
-        Assert.assertEquals(clipped.getReadLength(), expectedReadLength,
-                String.format("Clipped read length %d with cigar %s not equal to the expected read length %d after clipping %d bases from the left from a %d bp read with cigar %s",
-                        clipped.getReadLength(), clipped.getCigar(), expectedReadLength, nToClip, read.getReadLength(), read.getCigar()));
-    }
-
-    @Test(enabled = !DEBUG)
-    public void testHardClipByReferenceCoordinates() {
-        for (Cigar cigar : cigarList) {
-            GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar);
-            int start = read.getSoftStart();
-            int stop = read.getSoftEnd();
-
-            for (int i = start; i <= stop; i++) {
-                GATKSAMRecord clipLeft = (new ReadClipper(read)).hardClipByReferenceCoordinates(-1, i);
-                if (!clipLeft.isEmpty()) {
-                    Assert.assertTrue(clipLeft.getAlignmentStart() >= Math.min(read.getAlignmentEnd(), i + 1), String.format("Clipped alignment start (%d) is less the expected (%d): %s -> %s", clipLeft.getAlignmentStart(), i + 1, read.getCigarString(), clipLeft.getCigarString()));
-                    assertUnclippedLimits(read, clipLeft);
-                }
-
-                GATKSAMRecord clipRight = (new ReadClipper(read)).hardClipByReferenceCoordinates(i, -1);
-                if (!clipRight.isEmpty() && clipRight.getAlignmentStart() <= clipRight.getAlignmentEnd()) {             // alnStart > alnEnd if the entire read is a soft clip now. We can't test those.
-                    Assert.assertTrue(clipRight.getAlignmentEnd() <= Math.max(read.getAlignmentStart(), i - 1), String.format("Clipped alignment end (%d) is greater than expected (%d): %s -> %s", clipRight.getAlignmentEnd(), i - 1, read.getCigarString(), clipRight.getCigarString()));
-                    assertUnclippedLimits(read, clipRight);
-                }
-            }
-        }
-    }
-
-    @Test(enabled = !DEBUG)
-    public void testHardClipByReferenceCoordinatesLeftTail() {
-        for (Cigar cigar : cigarList) {
-            GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar);
-            int alnStart = read.getAlignmentStart();
-            int alnEnd = read.getAlignmentEnd();
-            if (read.getSoftStart() == alnStart) {                                                                      // we can't test left clipping if the read has hanging soft clips on the left side
-                for (int i = alnStart; i <= alnEnd; i++) {
-                    GATKSAMRecord clipLeft = ReadClipper.hardClipByReferenceCoordinatesLeftTail(read, i);
-
-                    if (!clipLeft.isEmpty()) {
-                        Assert.assertTrue(clipLeft.getAlignmentStart() >= i + 1, String.format("Clipped alignment start (%d) is less the expected (%d): %s -> %s", clipLeft.getAlignmentStart(), i + 1, read.getCigarString(), clipLeft.getCigarString()));
-                        assertUnclippedLimits(read, clipLeft);
-                    }
-                }
-            }
-        }
-    }
-
-    @Test(enabled = !DEBUG)
-    public void testHardClipByReferenceCoordinatesRightTail() {
-        for (Cigar cigar : cigarList) {
-            GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar);
-            int alnStart = read.getAlignmentStart();
-            int alnEnd = read.getAlignmentEnd();
-            if (read.getSoftEnd() == alnEnd) {                                                                          // we can't test right clipping if the read has hanging soft clips on the right side
-                for (int i = alnStart; i <= alnEnd; i++) {
-                    GATKSAMRecord clipRight = ReadClipper.hardClipByReferenceCoordinatesRightTail(read, i);
-                    if (!clipRight.isEmpty() && clipRight.getAlignmentStart() <= clipRight.getAlignmentEnd()) {         // alnStart > alnEnd if the entire read is a soft clip now. We can't test those.
-                        Assert.assertTrue(clipRight.getAlignmentEnd() <= i - 1, String.format("Clipped alignment end (%d) is greater than expected (%d): %s -> %s", clipRight.getAlignmentEnd(), i - 1, read.getCigarString(), clipRight.getCigarString()));
-                        assertUnclippedLimits(read, clipRight);
-                    }
-                }
-            }
-        }
-    }
-
-    @Test(enabled = !DEBUG)
-    public void testHardClipLowQualEnds() {
-        final byte LOW_QUAL = 2;
-        final byte HIGH_QUAL = 30;
-
-        /** create a read for every cigar permutation */
-        for (Cigar cigar : cigarList) {
-            GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar);
-            int readLength = read.getReadLength();
-            byte[] quals = new byte[readLength];
-
-            for (int nLowQualBases = 0; nLowQualBases < readLength; nLowQualBases++) {
-
-                /**  create a read with nLowQualBases in the left tail */
-                Utils.fillArrayWithByte(quals, HIGH_QUAL);
-                for (int addLeft = 0; addLeft < nLowQualBases; addLeft++)
-                    quals[addLeft] = LOW_QUAL;
-                read.setBaseQualities(quals);
-                GATKSAMRecord clipLeft = ReadClipper.hardClipLowQualEnds(read, LOW_QUAL);
-                checkClippedReadsForLowQualEnds(read, clipLeft, LOW_QUAL, nLowQualBases);
-
-                /** create a read with nLowQualBases in the right tail */
-                Utils.fillArrayWithByte(quals, HIGH_QUAL);
-                for (int addRight = 0; addRight < nLowQualBases; addRight++)
-                    quals[readLength - addRight - 1] = LOW_QUAL;
-                read.setBaseQualities(quals);
-                GATKSAMRecord clipRight = ReadClipper.hardClipLowQualEnds(read, LOW_QUAL);
-                checkClippedReadsForLowQualEnds(read, clipRight, LOW_QUAL, nLowQualBases);
-
-                /** create a read with nLowQualBases on both tails */
-                if (nLowQualBases <= readLength / 2) {
-                    Utils.fillArrayWithByte(quals, HIGH_QUAL);
-                    for (int addBoth = 0; addBoth < nLowQualBases; addBoth++) {
-                        quals[addBoth] = LOW_QUAL;
-                        quals[readLength - addBoth - 1] = LOW_QUAL;
-                    }
-                    read.setBaseQualities(quals);
-                    GATKSAMRecord clipBoth = ReadClipper.hardClipLowQualEnds(read, LOW_QUAL);
-                    checkClippedReadsForLowQualEnds(read, clipBoth, LOW_QUAL, 2*nLowQualBases);
-                }
-            }
-        }
-    }
-
-    @Test(enabled = !DEBUG)
-    public void testHardClipSoftClippedBases() {
-        for (Cigar cigar : cigarList) {
-            GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar);
-            GATKSAMRecord clippedRead = ReadClipper.hardClipSoftClippedBases(read);
-            CigarCounter original = new CigarCounter(read);
-            CigarCounter clipped = new CigarCounter(clippedRead);
-
-            assertUnclippedLimits(read, clippedRead);                                                                   // Make sure limits haven't changed
-            original.assertHardClippingSoftClips(clipped);                                                              // Make sure we have only clipped SOFT_CLIPS
-        }
-    }
-
-    @Test(enabled = false)
-    public void testHardClipLeadingInsertions() {
-        for (Cigar cigar : cigarList) {
-            if (startsWithInsertion(cigar)) {
-                GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar);
-                GATKSAMRecord clippedRead = ReadClipper.hardClipLeadingInsertions(read);
-
-                assertUnclippedLimits(read, clippedRead);        // Make sure limits haven't changed
-
-                int expectedLength = read.getReadLength() - leadingCigarElementLength(read.getCigar(), CigarOperator.INSERTION);
-                if (cigarHasElementsDifferentThanInsertionsAndHardClips(read.getCigar()))
-                    expectedLength -= leadingCigarElementLength(CigarUtils.invertCigar(read.getCigar()), CigarOperator.INSERTION);
-
-                if (!clippedRead.isEmpty()) {
-                    Assert.assertEquals(expectedLength, clippedRead.getReadLength(), String.format("%s -> %s", read.getCigarString(), clippedRead.getCigarString()));  // check that everything else is still there
-                    Assert.assertFalse(startsWithInsertion(clippedRead.getCigar()));                                                                                   // check that the insertions are gone
-                } else
-                    Assert.assertTrue(expectedLength == 0, String.format("expected length: %d", expectedLength));                                                      // check that the read was expected to be fully clipped
-            }
-        }
-    }
-
-    @Test(enabled = !DEBUG)
-    public void testRevertSoftClippedBases() {
-        for (Cigar cigar : cigarList) {
-            final int leadingSoftClips = leadingCigarElementLength(cigar, CigarOperator.SOFT_CLIP);
-            final int tailSoftClips = leadingCigarElementLength(CigarUtils.invertCigar(cigar), CigarOperator.SOFT_CLIP);
-
-            final GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar);
-            final GATKSAMRecord unclipped = ReadClipper.revertSoftClippedBases(read);
-
-            assertUnclippedLimits(read, unclipped);                                                                     // Make sure limits haven't changed
-
-            if (leadingSoftClips > 0 || tailSoftClips > 0) {
-                final int expectedStart = read.getAlignmentStart() - leadingSoftClips;
-                final int expectedEnd = read.getAlignmentEnd() + tailSoftClips;
-
-                Assert.assertEquals(unclipped.getAlignmentStart(), expectedStart);
-                Assert.assertEquals(unclipped.getAlignmentEnd(), expectedEnd);
-            } else
-                Assert.assertEquals(read.getCigarString(), unclipped.getCigarString());
-        }
-    }
-
-    @Test(enabled = !DEBUG)
-    public void testRevertSoftClippedBasesWithThreshold() {
-        for (Cigar cigar : cigarList) {
-            final int leadingSoftClips = leadingCigarElementLength(cigar, CigarOperator.SOFT_CLIP);
-            final int tailSoftClips = leadingCigarElementLength(CigarUtils.invertCigar(cigar), CigarOperator.SOFT_CLIP);
-
-            final GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar);
-            final GATKSAMRecord unclipped = ReadClipper.revertSoftClippedBases(read);
-
-            assertUnclippedLimits(read, unclipped);                                                                     // Make sure limits haven't changed
-            Assert.assertNull(read.getCigar().isValid(null, -1));
-            Assert.assertNull(unclipped.getCigar().isValid(null, -1));
-
-            if (!(leadingSoftClips > 0 || tailSoftClips > 0))
-                Assert.assertEquals(read.getCigarString(), unclipped.getCigarString());
-
-        }
-    }
-
-    @DataProvider(name = "RevertSoftClipsBeforeContig")
-    public Object[][] makeRevertSoftClipsBeforeContig() {
-        List<Object[]> tests = new ArrayList<>();
-
-        // this functionality can be adapted to provide input data for whatever you might want in your data
-        for ( int softStart : Arrays.asList(-10, -1, 0) ) {
-            for ( int alignmentStart : Arrays.asList(1, 10) ) {
-                tests.add(new Object[]{softStart, alignmentStart});
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = true, dataProvider = "RevertSoftClipsBeforeContig")
-    public void testRevertSoftClippedBasesBeforeStartOfContig(final int softStart, final int alignmentStart) {
-        final int nMatches = 10;
-        final int nSoft = -1 * (softStart - alignmentStart);
-        final String cigar = nSoft + "S" + nMatches + "M";
-        final GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar);
-        read.setAlignmentStart(alignmentStart);
-
-        Assert.assertEquals(read.getSoftStart(), softStart);
-        Assert.assertEquals(read.getAlignmentStart(), alignmentStart);
-        Assert.assertEquals(read.getCigarString(), cigar);
-
-        final GATKSAMRecord reverted = ReadClipper.revertSoftClippedBases(read);
-
-        final int expectedAlignmentStart = 1;
-        final String expectedCigar = (1 - softStart) + "H" + read.getAlignmentEnd() + "M";
-        Assert.assertEquals(reverted.getSoftStart(), expectedAlignmentStart);
-        Assert.assertEquals(reverted.getAlignmentStart(), expectedAlignmentStart);
-        Assert.assertEquals(reverted.getCigarString(), expectedCigar);
-    }
-
-    private void assertNoLowQualBases(GATKSAMRecord read, byte low_qual) {
-        if (!read.isEmpty()) {
-            byte[] quals = read.getBaseQualities();
-            for (int i = 0; i < quals.length; i++)
-                Assert.assertFalse(quals[i] <= low_qual, String.format("Found low qual (%d) base after hard clipping. Position: %d -- %s", low_qual, i, read.getCigarString()));
-        }
-    }
-
-    private void checkClippedReadsForLowQualEnds(GATKSAMRecord read, GATKSAMRecord clippedRead, byte lowQual, int nLowQualBases) {
-        assertUnclippedLimits(read, clippedRead);                                                                       // Make sure limits haven't changed
-        assertNoLowQualBases(clippedRead, lowQual);                                                                     // Make sure the low qualities are gone
-    }
-
-    /**
-     * Asserts that clipping doesn't change the getUnclippedStart / getUnclippedEnd
-     *
-     * @param original original read
-     * @param clipped clipped read
-     */
-    private void assertUnclippedLimits(GATKSAMRecord original, GATKSAMRecord clipped) {
-        if (CigarUtils.readHasNonClippedBases(clipped)) {
-            Assert.assertEquals(original.getUnclippedStart(), clipped.getUnclippedStart());
-            Assert.assertEquals(original.getUnclippedEnd(), clipped.getUnclippedEnd());
-        }
-    }
-
-    private boolean startsWithInsertion(Cigar cigar) {
-        return leadingCigarElementLength(cigar, CigarOperator.INSERTION) > 0;
-    }
-
-    private int leadingCigarElementLength(Cigar cigar, CigarOperator operator) {
-        for (CigarElement cigarElement : cigar.getCigarElements()) {
-            if (cigarElement.getOperator() == operator)
-                return cigarElement.getLength();
-            if (cigarElement.getOperator() != CigarOperator.HARD_CLIP)
-                break;
-        }
-        return 0;
-    }
-
-    private boolean cigarHasElementsDifferentThanInsertionsAndHardClips(Cigar cigar) {
-        for (CigarElement cigarElement : cigar.getCigarElements())
-            if (cigarElement.getOperator() != CigarOperator.INSERTION && cigarElement.getOperator() != CigarOperator.HARD_CLIP)
-                return true;
-        return false;
-    }
-
-    private class CigarCounter {
-        private HashMap<CigarOperator, Integer> counter;
-
-        public Integer getCounterForOp(CigarOperator operator) {
-            return counter.get(operator);
-        }
-
-        public CigarCounter(GATKSAMRecord read) {
-            CigarOperator[] operators = CigarOperator.values();
-            counter = new HashMap<CigarOperator, Integer>(operators.length);
-
-            for (CigarOperator op : operators)
-                counter.put(op, 0);
-
-            for (CigarElement cigarElement : read.getCigar().getCigarElements())
-                counter.put(cigarElement.getOperator(), counter.get(cigarElement.getOperator()) + cigarElement.getLength());
-        }
-
-        public boolean assertHardClippingSoftClips(CigarCounter clipped) {
-            for (CigarOperator op : counter.keySet()) {
-                if (op == CigarOperator.HARD_CLIP || op == CigarOperator.SOFT_CLIP) {
-                    int counterTotal = counter.get(CigarOperator.HARD_CLIP) + counter.get(CigarOperator.SOFT_CLIP);
-                    int clippedHard = clipped.getCounterForOp(CigarOperator.HARD_CLIP);
-                    int clippedSoft = clipped.getCounterForOp(CigarOperator.SOFT_CLIP);
-
-                    Assert.assertEquals(counterTotal, clippedHard);
-                    Assert.assertTrue(clippedSoft == 0);
-                } else
-                    Assert.assertEquals(counter.get(op), clipped.getCounterForOp(op));
-            }
-            return true;
-        }
-
-    }
-
-    @Test(enabled = !DEBUG)
-    public void testRevertEntirelySoftclippedReads() {
-        GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar("2H1S3H");
-        GATKSAMRecord clippedRead = ReadClipper.revertSoftClippedBases(read);
-        Assert.assertEquals(clippedRead.getAlignmentStart(), read.getSoftStart());
-    }
-
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/codecs/hapmap/HapMapUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/codecs/hapmap/HapMapUnitTest.java
deleted file mode 100644
index 0ff50d7..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/codecs/hapmap/HapMapUnitTest.java
+++ /dev/null
@@ -1,164 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.codecs.hapmap;
-
-import htsjdk.tribble.annotation.Strand;
-import htsjdk.tribble.readers.LineIterator;
-import htsjdk.tribble.readers.LineIteratorImpl;
-import htsjdk.tribble.readers.LineReaderUtil;
-import htsjdk.tribble.readers.PositionalBufferedStream;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-
-/**
- * Unit tests for the HapMap codec
- */
-public class HapMapUnitTest extends BaseTest {
-    // our sample hapmap file
-    private final static File hapMapFile = new File(privateTestDir + "genotypes_chr1_ASW_phase3.3_first500.hapmap");
-    private final static String knownLine = "rs2185539 C/T chr1 556738 + ncbi_b36 bbs urn:lsid:bbs.hapmap.org:Protocol:Phase3.r3:1 urn:lsid:bbs.hapmap.org:Assay:Phase3.r3_r" +
-            "s2185539:1 urn:lsid:dcc.hapmap.org:Panel:US_African-30-trios:4 QC+ CC TC TT CT CC CC CC CC CC CC CC CC CC";
-    /**
-     * test reading the header off of the file.  We take in the file, read off the first line,
-     * close the reader, and then ask the HapMap decoder for the header with a new reader.  These should
-     * be equal (i.e. they return the same object).
-     */
-    @Test
-    public void testReadHeader() {
-        RawHapMapCodec codec = new RawHapMapCodec();
-        final LineIterator reader = getLineIterator();
-        try {
-            String header = reader.next();
-            Assert.assertTrue(header.equals(codec.readActualHeader(getLineIterator())));
-        } finally {
-            codec.close(reader);
-        }
-    }
-
-    @Test
-    public void testKnownRecordConversion() {
-        RawHapMapCodec codec = new RawHapMapCodec();
-        RawHapMapFeature feature = (RawHapMapFeature)codec.decode(knownLine);
-
-
-        // check that the alleles are right
-        Assert.assertEquals(feature.getAlleles().length,2);
-        Assert.assertTrue("C".equals(feature.getAlleles()[0]));
-        Assert.assertTrue("T".equals(feature.getAlleles()[1]));
-
-        // check the name
-        Assert.assertTrue("rs2185539".equals(feature.getName()));
-
-        // check the position
-        Assert.assertEquals(feature.getStart(),556738);
-        Assert.assertEquals(feature.getEnd(),556738);
-
-        // check the contig
-        Assert.assertTrue("chr1".equals(feature.getChr()));
-                
-        // check the assembly, center, protLSID, assayLSID, panelLSID, and qccode
-        Assert.assertTrue("ncbi_b36".equals(feature.getAssembly()));
-        Assert.assertTrue("bbs".equals(feature.getCenter()));
-        Assert.assertTrue("urn:lsid:bbs.hapmap.org:Protocol:Phase3.r3:1".equals(feature.getProtLSID()));
-        Assert.assertTrue("urn:lsid:bbs.hapmap.org:Assay:Phase3.r3_rs2185539:1".equals(feature.getAssayLSID()));
-        Assert.assertTrue("urn:lsid:dcc.hapmap.org:Panel:US_African-30-trios:4".equals(feature.getPanelLSID()));
-        Assert.assertTrue("QC+".equals(feature.getQCCode()));
-
-        // check the strand
-        Assert.assertEquals(feature.getStrand(),Strand.POSITIVE);
-
-        // check the genotypes
-        int x = 0;
-        for (; x < feature.getGenotypes().length; x++) {
-            switch (x) {
-                case 1: Assert.assertTrue("TC".equals(feature.getGenotypes()[x])); break;
-                case 2: Assert.assertTrue("TT".equals(feature.getGenotypes()[x])); break;
-                case 3: Assert.assertTrue("CT".equals(feature.getGenotypes()[x])); break;
-                default: Assert.assertTrue("CC".equals(feature.getGenotypes()[x])); break;
-            }
-        }
-        // assert that we found the correct number of records
-        Assert.assertEquals(x,13);
-    }
-
-    @Test
-    public void testReadCorrectNumberOfRecords() {
-        // setup the record for reading our 500 line file (499 records, 1 header line)
-        RawHapMapCodec codec = new RawHapMapCodec();
-        final LineIterator reader = getLineIterator();
-
-        int count = 0;
-        try {
-            codec.readHeader(reader);
-            while (reader.hasNext()) {
-                codec.decode(reader.next());
-                ++count;
-            }
-        } catch (IOException e) {
-            Assert.fail("IOException " + e.getMessage());
-        } finally {
-            codec.close(reader);
-        }
-        Assert.assertEquals(count,499);
-    }
-
-    @Test
-    public void testGetSampleNames() {
-        // setup the record for reading our 500 line file (499 records, 1 header line)
-        RawHapMapCodec codec = new RawHapMapCodec();
-        final LineIterator reader = getLineIterator();
-
-        String line;
-        try {
-            codec.readHeader(reader);
-            line = reader.next();
-            RawHapMapFeature feature = (RawHapMapFeature) codec.decode(line);
-            Assert.assertEquals(feature.getSampleIDs().length,87);
-
-        } catch (IOException e) {
-            Assert.fail("IOException " + e.getMessage());
-        } finally {
-            codec.close(reader);
-        }
-    }
-
-
-    public LineIterator getLineIterator() {
-        try {
-            return new LineIteratorImpl(LineReaderUtil.fromBufferedStream(new PositionalBufferedStream(new FileInputStream(hapMapFile))));
-        } catch (FileNotFoundException e) {
-            Assert.fail("Unable to open hapmap file : " + hapMapFile);
-        }
-        return null; // for intellij, it doesn't know that assert.fail is fatal
-    }
-
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/collections/DefaultHashMapUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/collections/DefaultHashMapUnitTest.java
deleted file mode 100755
index a87aeba..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/collections/DefaultHashMapUnitTest.java
+++ /dev/null
@@ -1,159 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.collections;
-
-
-// the imports for unit testing.
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.testng.Assert;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Test;
-
-
-/**
- * Basic unit test for DefaultHashMap
- */
-public class DefaultHashMapUnitTest extends BaseTest {
-    DefaultHashMap<String, Double> empty, hasOne, hasTen;
-    Double initialDefault = 10.0;
-
-    @BeforeMethod
-    public void before() {
-        empty = new DefaultHashMap<String, Double>(initialDefault);
-
-        hasOne = new DefaultHashMap<String, Double>(initialDefault);
-        hasOne.put("1", .1);
-
-        hasTen = new DefaultHashMap<String, Double>(initialDefault);
-        for (Integer i = 1; i <= 10; i++) {
-            hasTen.put(i.toString(), i.doubleValue() / 10);
-        }
-    }
-
-    @Test
-    public void testBasicSizes() {
-        logger.warn("Executing testBasicSizes");
-
-        Assert.assertEquals(0, empty.size());
-        Assert.assertEquals(1, hasOne.size());
-        Assert.assertEquals(10, hasTen.size());
-    }
-
-    @Test
-    public void testTenElements() {
-        logger.warn("Executing testTenElements");
-
-        for (Integer i = 1; i <= 10; i++) {
-            Assert.assertEquals(i.doubleValue() / 10, hasTen.get(i.toString()));
-        }
-        Assert.assertEquals(initialDefault, hasTen.get("0"));
-    }
-
-    @Test
-    public void testClear() {
-        logger.warn("Executing testClear");
-
-        empty.clear();
-        hasOne.clear();
-        hasTen.clear();
-
-        Assert.assertEquals(0, empty.size());
-        Assert.assertEquals(0, hasOne.size());
-        Assert.assertEquals(0, hasTen.size());
-    }
-
-
-    @Test
-    public void testSettingTenElements() {
-        logger.warn("Executing testSettingTenElements");
-
-        Assert.assertEquals(10, hasTen.size());
-        for (Integer i = 1; i <= 10; i++) {
-            hasTen.put(i.toString(), i.doubleValue());
-        }
-
-        Assert.assertEquals(10, hasTen.size());
-        for (Integer i = 1; i <= 10; i++) {
-            Assert.assertEquals(i.doubleValue(), hasTen.get(i.toString()));
-        }
-    }
-
-    @Test
-    public void testSettingDefault() {
-        logger.warn("Executing testSettingDefault");
-
-        Assert.assertEquals(initialDefault, empty.get("0"));
-        Assert.assertEquals(initialDefault, hasOne.get("0"));
-        Assert.assertEquals(initialDefault, hasTen.get("0"));
-
-        empty.setDefaultValue(2 * initialDefault);
-        hasOne.setDefaultValue(2 * initialDefault);
-        hasTen.setDefaultValue(2 * initialDefault);
-
-        Assert.assertEquals(2 * initialDefault, empty.get("0"));
-        Assert.assertEquals(2 * initialDefault, hasOne.get("0"));
-        Assert.assertEquals(2 * initialDefault, hasTen.get("0"));
-
-    }
-
-    @Test
-    public void testAdd() {
-        logger.warn("Executing testAdd");
-
-        Assert.assertEquals(0, empty.size());
-
-        Double x = 1.0;
-        empty.put(x.toString(), x / 10);
-        Assert.assertEquals(1, empty.size());
-        Assert.assertEquals(.1, empty.get(x.toString()));
-
-        x = 2.0;
-        empty.put(x.toString(), x / 10);
-        Assert.assertEquals(2, empty.size());
-        Assert.assertEquals(.2, empty.get(x.toString()));
-
-    }
-
-    @Test
-    public void testUnset() {
-        logger.warn("Executing testUnset1");
-
-        Assert.assertEquals(10, hasTen.size());
-        Assert.assertEquals(.9, hasTen.get("9"));
-
-        hasTen.remove("9");
-
-        Assert.assertEquals(9, hasTen.size());
-        Assert.assertEquals(initialDefault, hasTen.get("9"));
-
-        hasTen.remove("1");
-
-        Assert.assertEquals(8, hasTen.size());
-        Assert.assertEquals(initialDefault, hasTen.get("1"));
-
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/collections/ExpandingArrayListUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/collections/ExpandingArrayListUnitTest.java
deleted file mode 100644
index 7f9d808..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/collections/ExpandingArrayListUnitTest.java
+++ /dev/null
@@ -1,177 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.collections;
-
-
-// the imports for unit testing.
-
-
-import org.testng.Assert;
-import org.testng.annotations.Test;
-import org.testng.annotations.BeforeMethod;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-
-import java.util.Arrays;
-
-/**
- * Basic unit test for RecalData
- */
-public class ExpandingArrayListUnitTest extends BaseTest {
-    ExpandingArrayList<Integer> empty, initCap10, hasOne, hasTen;
-
-    @BeforeMethod
-    public void before() {
-        empty = new ExpandingArrayList<Integer>();
-
-        initCap10 = new ExpandingArrayList<Integer>(10);
-
-        hasOne = new ExpandingArrayList<Integer>();
-        hasOne.add(1);
-
-        hasTen = new ExpandingArrayList<Integer>();
-        hasTen.addAll(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10));
-    }
-
-    @Test
-    public void testBasicSizes() {
-        logger.warn("Executing testBasicSizes");
-
-        Assert.assertEquals(0, empty.size());
-        Assert.assertEquals(0, initCap10.size());
-        Assert.assertEquals(1, hasOne.size());
-        Assert.assertEquals(10, hasTen.size());
-    }
-
-    @Test
-    public void testTenElements() {
-        logger.warn("Executing testTenElements");
-
-        for ( int i = 0; i < 10; i++ ) {
-            Assert.assertEquals(i+1, (int)hasTen.get(i));
-        }
-    }
-
-    @Test
-    public void testSettingTenElements() {
-        logger.warn("Executing testSettingTenElements");
-
-        for ( int i = 0; i < 10; i++ ) {
-            Assert.assertEquals(i+1, (int)hasTen.set(i, 2*i));
-        }
-
-        Assert.assertEquals(10, hasTen.size());
-        for ( int i = 0; i < 10; i++ ) {
-            Assert.assertEquals(2*i, (int)hasTen.get(i));
-        }
-    }
-
-    @Test
-    public void testAdd() {
-        logger.warn("Executing testAdd");
-
-        Assert.assertEquals(0, empty.size());
-        empty.add(1);
-        Assert.assertEquals(1, empty.size());
-        Assert.assertEquals(1, (int)empty.get(0));
-        empty.add(2);
-        Assert.assertEquals(2, empty.size());
-        Assert.assertEquals(2, (int)empty.get(1));
-    }
-
-    @Test
-    public void testSet1() {
-        logger.warn("Executing testSet1");
-
-        Assert.assertEquals(0, empty.size());
-        empty.set(0, 1);
-        Assert.assertEquals(1, empty.size());
-        Assert.assertEquals(1, (int)empty.get(0));
-
-        empty.set(1, 2);
-        Assert.assertEquals(2, empty.size());
-        Assert.assertEquals(2, (int)empty.get(1));
-
-        // doesn't expand
-        empty.set(0, 3);
-        Assert.assertEquals(2, empty.size());
-        Assert.assertEquals(3, (int)empty.get(0));
-    }
-
-    @Test
-    public void testSetExpanding() {
-        logger.warn("Executing testSetExpanding");
-
-        Assert.assertEquals(0, empty.size());
-        empty.set(3, 1);
-        Assert.assertEquals(4, empty.size());
-        Assert.assertEquals(empty.get(0), null);
-        Assert.assertEquals(empty.get(1), null);
-        Assert.assertEquals(empty.get(2), null);
-        Assert.assertEquals(1, (int)empty.get(3));
-    }
-
-    @Test
-    public void testSetExpandingReset() {
-        logger.warn("Executing testSetExpandingReset");
-
-        Assert.assertEquals(0, empty.size());
-        empty.set(3, 3);
-        empty.set(2, 2);
-        empty.set(1, 1);
-        empty.set(0, 0);
-        Assert.assertEquals(4, empty.size());
-        for ( int i = 0; i < 4; i++ )
-            Assert.assertEquals(i, (int)empty.get(i));
-    }
-
-    @Test
-    public void testSetExpandingBig() {
-        logger.warn("Executing testSetExpandingBig");
-
-        Assert.assertEquals(0, empty.size());
-        empty.set(1000, 1000);
-        Assert.assertEquals(1001, empty.size());
-        for ( int i = 0; i < 1000; i++ )
-            Assert.assertEquals(empty.get(i), null);
-        Assert.assertEquals(1000, (int)empty.get(1000));
-    }
-
-    @Test (expectedExceptions=IndexOutOfBoundsException.class )
-    public void testSetBadGetNegative() {
-        logger.warn("Executing testSetBadGetNegative");
-        empty.get(-1);
-    }
-
-    @Test
-    public void testSetBadGetPost() {
-        logger.warn("Executing testSetBadGetPost");
-        empty.set(1, 1);
-        Assert.assertEquals(empty.get(0), null);
-        Assert.assertEquals(1, (int)empty.get(1));
-        Assert.assertEquals(empty.get(2), null);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchSiteUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchSiteUnitTest.java
deleted file mode 100644
index b1ba784..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchSiteUnitTest.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.io.File;
-
-public class ArgumentMatchSiteUnitTest {
-    @Test
-    public void testCommandLine() {
-        ArgumentMatchSite site = new ArgumentMatchSite(ArgumentMatchSource.COMMAND_LINE, 1);
-        Assert.assertEquals(site.getSource(), ArgumentMatchSource.COMMAND_LINE);
-        Assert.assertEquals(site.getIndex(), 1);
-    }
-
-    @Test
-    public void testFile() {
-        ArgumentMatchSource source = new ArgumentMatchFileSource(new File("test"));
-        ArgumentMatchSite site = new ArgumentMatchSite(source, 1);
-        Assert.assertEquals(site.getSource(), source);
-        Assert.assertEquals(site.getIndex(), 1);
-    }
-
-    @Test
-    public void testEquals() {
-        ArgumentMatchSource cmdLine = ArgumentMatchSource.COMMAND_LINE;
-        ArgumentMatchSite site1 = new ArgumentMatchSite(cmdLine, 1);
-        ArgumentMatchSite site2 = new ArgumentMatchSite(cmdLine, 2);
-
-        Assert.assertFalse(site1.equals(null));
-
-        Assert.assertTrue(site1.equals(site1));
-        Assert.assertFalse(site1.equals(site2));
-
-        Assert.assertFalse(site2.equals(site1));
-        Assert.assertTrue(site2.equals(site2));
-    }
-
-    @Test
-    public void testCompareTo() {
-        ArgumentMatchSource cmdLine = ArgumentMatchSource.COMMAND_LINE;
-        ArgumentMatchSite site1 = new ArgumentMatchSite(cmdLine, 1);
-        ArgumentMatchSite site2 = new ArgumentMatchSite(cmdLine, 2);
-
-        Assert.assertTrue(site1.compareTo(site1) == 0);
-        Assert.assertTrue(site1.compareTo(site2) < 0);
-        Assert.assertTrue(site2.compareTo(site1) > 0);
-        Assert.assertTrue(site2.compareTo(site2) == 0);
-    }
-
-    @Test(expectedExceptions = NullPointerException.class)
-    public void testCompareToNull() {
-        new ArgumentMatchSite(ArgumentMatchSource.COMMAND_LINE, 0).compareTo(null);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchSourceUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchSourceUnitTest.java
deleted file mode 100644
index 8837f4b..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchSourceUnitTest.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.io.File;
-
-public class ArgumentMatchSourceUnitTest extends BaseTest {
-    @Test
-    public void testCommandLine() {
-        ArgumentMatchSource source = ArgumentMatchSource.COMMAND_LINE;
-        Assert.assertEquals(source.getType(), ArgumentMatchSourceType.CommandLine);
-        Assert.assertNull(source.getDescription());
-    }
-
-    @Test
-    public void testFile() {
-        File f = new File("test");
-        ArgumentMatchSource source = new ArgumentMatchFileSource(f);
-        Assert.assertEquals(source.getType(), ArgumentMatchSourceType.Provider);
-        Assert.assertEquals(source.getDescription(), "file " + f.getAbsolutePath());
-    }
-
-    @Test(expectedExceptions = IllegalArgumentException.class)
-    public void testNullFile() {
-        new ArgumentMatchSource(null);
-    }
-
-    @Test
-    public void testEquals() {
-        ArgumentMatchSource cmdLine = ArgumentMatchSource.COMMAND_LINE;
-        ArgumentMatchSource fileA = new ArgumentMatchFileSource(new File("a"));
-        ArgumentMatchSource fileB = new ArgumentMatchFileSource(new File("b"));
-
-        Assert.assertFalse(cmdLine.equals(null));
-
-        Assert.assertTrue(cmdLine.equals(cmdLine));
-        Assert.assertFalse(cmdLine.equals(fileA));
-        Assert.assertFalse(cmdLine.equals(fileB));
-
-        Assert.assertFalse(fileA.equals(cmdLine));
-        Assert.assertTrue(fileA.equals(fileA));
-        Assert.assertFalse(fileA.equals(fileB));
-
-        Assert.assertFalse(fileB.equals(cmdLine));
-        Assert.assertFalse(fileB.equals(fileA));
-        Assert.assertTrue(fileB.equals(fileB));
-    }
-
-    @Test
-    public void testCompareTo() {
-        ArgumentMatchSource cmdLine = ArgumentMatchSource.COMMAND_LINE;
-        ArgumentMatchSource fileA = new ArgumentMatchFileSource(new File("a"));
-        ArgumentMatchSource fileB = new ArgumentMatchFileSource(new File("b"));
-
-        Assert.assertTrue(cmdLine.compareTo(cmdLine) == 0);
-        Assert.assertTrue(cmdLine.compareTo(fileA) < 0);
-        Assert.assertTrue(cmdLine.compareTo(fileB) < 0);
-
-        Assert.assertTrue(fileA.compareTo(cmdLine) > 0);
-        Assert.assertTrue(fileA.compareTo(fileA) == 0);
-        Assert.assertTrue(fileA.compareTo(fileB) < 0);
-
-        Assert.assertTrue(fileB.compareTo(cmdLine) > 0);
-        Assert.assertTrue(fileB.compareTo(fileA) > 0);
-        Assert.assertTrue(fileB.compareTo(fileB) == 0);
-    }
-
-    @Test(expectedExceptions = NullPointerException.class)
-    public void testCompareToNull() {
-        ArgumentMatchSource.COMMAND_LINE.compareTo(null);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/commandline/ArgumentTypeDescriptorUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/commandline/ArgumentTypeDescriptorUnitTest.java
deleted file mode 100644
index 1dfffa3..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/commandline/ArgumentTypeDescriptorUnitTest.java
+++ /dev/null
@@ -1,233 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import htsjdk.variant.variantcontext.VariantContext;
-import it.unimi.dsi.fastutil.objects.ObjectArrayList;
-import htsjdk.samtools.SAMFileWriter;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.io.stubs.*;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import htsjdk.variant.variantcontext.writer.VariantContextWriter;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.PrintStream;
-import java.util.Arrays;
-import java.util.Collection;
-
-
-public class ArgumentTypeDescriptorUnitTest extends BaseTest {
-
-    ////////////////////////////////////////////////////////////////////
-    // This section tests the functionality of the @Output annotation //
-    ////////////////////////////////////////////////////////////////////
-
-    private class ATDTestCommandLineProgram extends CommandLineProgram {
-        public int execute() { return 0; }
-
-        @Override
-        public Collection<ArgumentTypeDescriptor> getArgumentTypeDescriptors() {
-            final GenomeAnalysisEngine engine = new GenomeAnalysisEngine();
-            return Arrays.asList( new SAMFileWriterArgumentTypeDescriptor(engine, System.out),
-                    new OutputStreamArgumentTypeDescriptor(engine, System.out),
-                    new VCFWriterArgumentTypeDescriptor(engine, System.out, null));
-        }
-
-        protected abstract class ATDTestOutputArgumentSource {
-            public abstract Object getOut();
-        }
-
-        protected class OutputRequiredSamArgumentSource extends ATDTestOutputArgumentSource {
-            @Output(shortName="o", doc="output file", required = true)
-            public SAMFileWriter out;
-            public Object getOut() { return out; }
-        }
-
-        protected class OutputRequiredVcfArgumentSource extends ATDTestOutputArgumentSource {
-            @Output(shortName="o", doc="output file", required = true)
-            public VariantContextWriter out;
-            public Object getOut() { return out; }
-        }
-
-        protected class OutputRequiredStreamArgumentSource extends ATDTestOutputArgumentSource {
-            @Output(shortName="o", doc="output file", required = true)
-            public PrintStream out;
-            public Object getOut() { return out; }
-        }
-
-        protected class OutputNotRequiredNoDefaultSamArgumentSource extends ATDTestOutputArgumentSource {
-            @Output(shortName="o", doc="output file", required = false, defaultToStdout = false)
-            public SAMFileWriter out;
-            public Object getOut() { return out; }
-        }
-
-        protected class OutputNotRequiredNoDefaultVcfArgumentSource extends ATDTestOutputArgumentSource {
-            @Output(shortName="o", doc="output file", required = false, defaultToStdout = false)
-            public VariantContextWriter out;
-            public Object getOut() { return out; }
-        }
-
-        protected class OutputNotRequiredNoDefaultStreamArgumentSource extends ATDTestOutputArgumentSource {
-            @Output(shortName="o", doc="output file", required = false, defaultToStdout = false)
-            public PrintStream out;
-            public Object getOut() { return out; }
-        }
-
-        protected class OutputNotRequiredSamArgumentSource extends ATDTestOutputArgumentSource {
-            @Output(shortName="o", doc="output file", required = false)
-            public SAMFileWriter out;
-            public Object getOut() { return out; }
-        }
-
-        protected class OutputNotRequiredVcfArgumentSource extends ATDTestOutputArgumentSource {
-            @Output(shortName="o", doc="output file", required = false)
-            public VariantContextWriter out;
-            public Object getOut() { return out; }
-        }
-
-        protected class OutputNotRequiredStreamArgumentSource extends ATDTestOutputArgumentSource {
-            @Output(shortName="o", doc="output file", required = false)
-            public PrintStream out;
-            public Object getOut() { return out; }
-        }
-    }
-
-    @DataProvider(name = "OutputProvider")
-    public Object[][] OutputProvider() {
-
-        ObjectArrayList<Object[]> tests = new ObjectArrayList<Object[]>();
-
-        final ATDTestCommandLineProgram clp = new ATDTestCommandLineProgram();
-
-        for ( final Object obj : Arrays.asList(clp.new OutputRequiredSamArgumentSource(), clp.new OutputRequiredVcfArgumentSource(), clp.new OutputRequiredStreamArgumentSource()) ) {
-            for ( final boolean provided : Arrays.asList(true, false) ) {
-                tests.add(new Object[]{obj, true, true, provided});
-            }
-        }
-
-        for ( final Object obj : Arrays.asList(clp.new OutputNotRequiredSamArgumentSource(), clp.new OutputNotRequiredVcfArgumentSource(), clp.new OutputNotRequiredStreamArgumentSource()) ) {
-            for ( final boolean provided : Arrays.asList(true, false) ) {
-                tests.add(new Object[]{obj, false, true, provided});
-            }
-        }
-
-        for ( final Object obj : Arrays.asList(clp.new OutputNotRequiredNoDefaultSamArgumentSource(), clp.new OutputNotRequiredNoDefaultVcfArgumentSource(), clp.new OutputNotRequiredNoDefaultStreamArgumentSource()) ) {
-            for ( final boolean provided : Arrays.asList(true, false) ) {
-                tests.add(new Object[]{obj, false, false, provided});
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "OutputProvider")
-    public void testOutput(final ATDTestCommandLineProgram.ATDTestOutputArgumentSource argumentSource, final boolean required, final boolean hasDefault, final boolean provided) {
-
-        final ParsingEngine parser = new ParsingEngine(new ATDTestCommandLineProgram());
-        parser.addArgumentSource(argumentSource.getClass());
-        parser.parse(provided ? new String[] {"out", "foo"} : new String[] {});
-
-        try {
-            parser.loadArgumentsIntoObject(argumentSource);
-
-            if ( !provided && (required || !hasDefault) )
-                Assert.assertEquals(argumentSource.getOut(), null);
-            else if ( !provided )
-                Assert.assertNotEquals(argumentSource.getOut(), null);
-            else if ( argumentSource.getOut() == null || !(argumentSource.getOut() instanceof SAMFileWriterStub) ) // can't test this one case
-                Assert.assertEquals(!provided, outputIsStdout(argumentSource.getOut()));
-
-        } catch (Exception e) {
-            throw new ReviewedGATKException(e.getMessage());
-        }
-    }
-
-    @Test
-    public void testRodBindingsCollection() {
-
-        final ParsingEngine parser = new ParsingEngine(new ATDTestCommandLineProgram());
-
-        //A list file containing a single VCF
-        final File listFile = createTempListFile("oneVCF", privateTestDir + "empty.vcf");
-
-        try {
-            Object result = ArgumentTypeDescriptor.getRodBindingsCollection(listFile,
-                    parser,
-                    VariantContext.class,
-                    "variant",
-                    new Tags(),
-                    "variantTest");
-            if (!(result instanceof RodBindingCollection))
-                throw new ReviewedGATKException("getRodBindingsCollection did not return a RodBindingCollection");
-            RodBindingCollection<?> rbc = (RodBindingCollection) result;
-
-            Assert.assertEquals(rbc.getType(), VariantContext.class);
-            Assert.assertEquals(rbc.getRodBindings().size(), 1);
-
-        } catch (IOException e) {
-            throw new ReviewedGATKException(e.getMessage(), e);
-        }
-
-        //The same file, now with an extra blank line
-        final File listFileWithBlank = createTempListFile("oneVCFwithBlankLine", privateTestDir + "empty.vcf", "");
-        try {
-            Object result = ArgumentTypeDescriptor.getRodBindingsCollection(listFileWithBlank,
-                    parser,
-                    VariantContext.class,
-                    "variant",
-                    new Tags(),
-                    "variantTest");
-            if (!(result instanceof RodBindingCollection))
-                throw new ReviewedGATKException("getRodBindingsCollection did not return a RodBindingCollection");
-            RodBindingCollection<?> rbc = (RodBindingCollection) result;
-
-            Assert.assertEquals(rbc.getType(), VariantContext.class);
-            Assert.assertEquals(rbc.getRodBindings().size(), 1);
-
-        } catch (IOException e) {
-            throw new ReviewedGATKException(e.getMessage(), e);
-        }
-    }
-
-    private static boolean outputIsStdout(final Object out) {
-        if ( out == null ) {
-            return false;
-        } else if ( out instanceof SAMFileWriterStub ) {
-            return ((SAMFileWriterStub)out).getOutputStream() != System.out;
-        } else if ( out instanceof VariantContextWriterStub ) {
-            return ((VariantContextWriterStub)out).getOutputStream() == System.out;
-        } else if ( out instanceof OutputStreamStub ) {
-            return ((OutputStreamStub)out).getOutputStream() == System.out;
-        }
-        return false;
-    }
-
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/commandline/InvalidArgumentIntegrationTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/commandline/InvalidArgumentIntegrationTest.java
deleted file mode 100644
index 8ab2159..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/commandline/InvalidArgumentIntegrationTest.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.engine.walkers.WalkerTest;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-
-import org.testng.annotations.Test;
-import org.testng.annotations.DataProvider;
-
-/**
- * Created by IntelliJ IDEA.
- * User: chartl
- * Date: 8/31/12
- * Time: 11:03 AM
- * To change this template use File | Settings | File Templates.
- */
-public class InvalidArgumentIntegrationTest extends WalkerTest {
-    private static final String callsB36  = BaseTest.validationDataLocation + "lowpass.N3.chr1.raw.vcf";
-
-    private WalkerTest.WalkerTestSpec baseTest(String flag, String arg, Class exeption) {
-        return new WalkerTest.WalkerTestSpec("-T VariantsToTable -M 10 --variant:vcf "
-                + callsB36 + " -F POS,CHROM -R "
-                + b36KGReference +  " -o %s " + flag + " " + arg,
-                1, exeption);
-
-    }
-
-    @Test
-    public void testUnknownReadFilter() {
-        executeTest("UnknownReadFilter",baseTest("-rf","TestUnknownReadFilter", UserException.MalformedReadFilterException.class));
-    }
-
-    @Test
-    public void testMalformedWalkerArgs() {
-        executeTest("MalformedWalkerArgs",
-                new WalkerTest.WalkerTestSpec("-T UnknownWalkerName -M 10 --variant:vcf "
-                + callsB36 + " -F POS,CHROM -R "
-                + b36KGReference +  " -o %s ",
-                1, UserException.MalformedWalkerArgumentsException.class));
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/commandline/LoggingIntegrationTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/commandline/LoggingIntegrationTest.java
deleted file mode 100644
index d690f68..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/commandline/LoggingIntegrationTest.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.Arrays;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.log4j.Level;
-
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.MD5DB;
-import org.broadinstitute.gatk.utils.MD5Mismatch;
-import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.utils.runtime.*;
-
-public class LoggingIntegrationTest {
-    private final MD5DB md5db = new MD5DB();
-
-    private class LoggingTestProvider extends BaseTest.TestDataProvider {
-
-        private final String baseCmdLine;
-
-        private final Level logLevel;
-        private final String logFileStr;
-        public final File argumentOutputFile;
-        public final File pipedOutputFile;
-
-        private LoggingTestProvider(final Level logLevel, final boolean explicitLogfile) throws IOException {
-            super(LoggingTestProvider.class);
-
-            // TODO: a better command line that exercises log levels besides INFO
-            this.baseCmdLine = String.format("java -cp %s %s -T SelectVariants -R %s -V %s -L 1:1000000-2000000 --no_cmdline_in_header",
-                    StringUtils.join(RuntimeUtils.getAbsoluteClassPaths(), File.pathSeparatorChar),
-                    CommandLineGATK.class.getCanonicalName(), BaseTest.b37KGReference, BaseTest.b37_NA12878_OMNI);
-
-            this.logLevel = logLevel;
-            this.logFileStr = explicitLogfile ? " -log " + BaseTest.createTempFile(logLevel.toString(), "log") : "";
-            this.argumentOutputFile = BaseTest.createTempFile(logLevel.toString(), "vcf");
-            this.pipedOutputFile = BaseTest.createTempFile(logLevel.toString(), "vcf");
-        }
-
-        public final String getCmdLine(boolean redirectStdout) {
-            String command = String.format("%s -l %s %s", baseCmdLine, logLevel, logFileStr);
-            return redirectStdout ? command : command + " -o " + argumentOutputFile;
-        }
-
-        public String toString() {
-            return String.format("LoggingTestProvider logLevel=%s", logLevel);
-        }
-    }
-
-    @DataProvider(name = "LoggingTest")
-    public Object[][] makeLoggingTestProvider() throws IOException {
-        for (Boolean explicitLogFile : Arrays.asList(true, false)) {
-            // TODO: enable other logging levels when tests for those exist
-            new LoggingTestProvider(Level.DEBUG, explicitLogFile);
-        }
-
-        return LoggingTestProvider.getTests(LoggingTestProvider.class);
-    }
-
-    /**
-     * test that using an output argument produces the same output as stdout
-     */
-    @Test(dataProvider = "LoggingTest")
-    public void testStdoutEquivalence(final LoggingTestProvider cfg) throws IOException {
-
-        ProcessController pc = ProcessController.getThreadLocal();
-
-        // output argument
-
-        ProcessSettings ps = new ProcessSettings(cfg.getCmdLine(false).split("\\s+"));
-        pc.execAndCheck(ps);
-        String output_argument_md5 = md5db.calculateFileMD5(cfg.argumentOutputFile);
-
-        // pipe to stdout
-
-        ps = new ProcessSettings(cfg.getCmdLine(true).split("\\s+"));
-        ps.setStdoutSettings(new OutputStreamSettings(cfg.pipedOutputFile));
-        pc.execAndCheck(ps);
-
-        MD5DB.MD5Match result = md5db.testFileMD5("LoggingIntegrationTest", "LoggingIntegrationTest", cfg.pipedOutputFile, output_argument_md5, false);
-        if(result.failed) {
-            final MD5Mismatch failure = new MD5Mismatch(result.actualMD5, result.expectedMD5, result.diffEngineOutput);
-            Assert.fail(failure.toString());
-        }
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/commandline/ParsingEngineUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/commandline/ParsingEngineUnitTest.java
deleted file mode 100644
index d3c85b6..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/commandline/ParsingEngineUnitTest.java
+++ /dev/null
@@ -1,1140 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import org.apache.commons.io.FileUtils;
-import htsjdk.tribble.Feature;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import htsjdk.variant.variantcontext.VariantContext;
-import org.testng.Assert;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.List;
-import java.util.EnumSet;
-import java.util.Set;
-
-/**
- * Test suite for the parsing engine.
- */
-public class ParsingEngineUnitTest extends BaseTest {
-    /** we absolutely cannot have this file existing, or we'll fail the UnitTest */
-    private final static String NON_EXISTANT_FILENAME_VCF = "this_file_should_not_exist_on_disk_123456789.vcf";
-    private ParsingEngine parsingEngine;
-
-    @BeforeMethod
-    public void setUp() {
-        parsingEngine = new ParsingEngine(null);
-        RodBinding.resetNameCounter();
-    }
-
-    private class InputFileArgProvider {
-        @Argument(fullName="input_file",doc="input file",shortName="I")
-        public String inputFile;
-    }
-
-    @Test
-    public void shortNameArgumentTest() {
-        final String[] commandLine = new String[] {"-I","na12878.bam"};
-
-        parsingEngine.addArgumentSource( InputFileArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        InputFileArgProvider argProvider = new InputFileArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertEquals(argProvider.inputFile,"na12878.bam","Argument is not correctly initialized");
-    }
-
-    @Test
-    public void multiCharShortNameArgumentTest() {
-        final String[] commandLine = new String[] {"-out","out.txt"};
-
-        parsingEngine.addArgumentSource( MultiCharShortNameArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        MultiCharShortNameArgProvider argProvider = new MultiCharShortNameArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertEquals(argProvider.outputFile,"out.txt","Argument is not correctly initialized");
-    }
-
-
-    private class MultiCharShortNameArgProvider {
-        @Argument(shortName="out", doc="output file")
-        public String outputFile;
-    }
-
-    @Test
-    public void longNameArgumentTest() {
-        final String[] commandLine = new String[] {"--input_file", "na12878.bam"};
-
-        parsingEngine.addArgumentSource( InputFileArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        InputFileArgProvider argProvider = new InputFileArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertEquals(argProvider.inputFile,"na12878.bam","Argument is not correctly initialized");
-    }
-
-    @Test
-    public void extraWhitespaceTest() {
-        final String[] commandLine = new String[] {"  --input_file ", "na12878.bam"};
-
-        parsingEngine.addArgumentSource( InputFileArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        InputFileArgProvider argProvider = new InputFileArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertEquals(argProvider.inputFile,"na12878.bam","Argument is not correctly initialized");
-    }
-
-    @Test
-    public void primitiveArgumentTest() {
-        final String[] commandLine = new String[] {"--foo", "5"};
-
-        parsingEngine.addArgumentSource( PrimitiveArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        PrimitiveArgProvider argProvider = new PrimitiveArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertEquals(argProvider.foo, 5, "Argument is not correctly initialized");
-    }
-
-    @Test(expectedExceptions=InvalidArgumentValueException.class)
-    public void primitiveArgumentNoValueTest() {
-        final String[] commandLine = new String[] {"--foo"};
-
-        parsingEngine.addArgumentSource( PrimitiveArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        PrimitiveArgProvider argProvider = new PrimitiveArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertEquals(argProvider.foo, 5, "Argument is not correctly initialized");
-    }
-
-    private class PrimitiveArgProvider {
-        @Argument(doc="simple integer")
-        int foo;
-    }
-
-    @Test
-    public void flagTest() {
-        final String[] commandLine = new String[] {"--all_loci"};
-
-        parsingEngine.addArgumentSource( AllLociArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        AllLociArgProvider argProvider = new AllLociArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertTrue(argProvider.allLoci,"Argument is not correctly initialized");
-    }
-
-    private class AllLociArgProvider {
-        @Argument(fullName="all_loci",shortName="A", doc="all loci")
-        public boolean allLoci = false;
-    }
-
-    @Test
-    public void arrayTest() {
-        final String[] commandLine = new String[] {"-I", "foo.txt", "--input_file", "bar.txt"};
-
-        parsingEngine.addArgumentSource( MultiValueArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        MultiValueArgProvider argProvider = new MultiValueArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertEquals(argProvider.inputFile.length, 2, "Argument array is of incorrect length");
-        Assert.assertEquals(argProvider.inputFile[0],"foo.txt","1st filename is incorrect");
-        Assert.assertEquals(argProvider.inputFile[1],"bar.txt","2nd filename is incorrect");
-    }
-
-    private class MultiValueArgProvider {
-        @Argument(fullName="input_file",shortName="I", doc="input file")
-        public String[] inputFile;
-    }
-
-    @Test
-    public void enumTest() {
-        final String[] commandLine = new String[] {  "--test_enum", "TWO" };
-
-        parsingEngine.addArgumentSource( EnumArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        EnumArgProvider argProvider = new EnumArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertEquals(argProvider.testEnum, TestEnum.TWO, "Enum value is not correct");
-    }
-
-    @Test
-    public void enumMixedCaseTest() {
-        final String[] commandLine = new String[] {  "--test_enum", "oNe" };
-
-        parsingEngine.addArgumentSource( EnumArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        EnumArgProvider argProvider = new EnumArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertEquals(argProvider.testEnum, TestEnum.ONE, "Enum value is not correct");
-    }
-
-    @Test
-    public void enumDefaultTest() {
-        final String[] commandLine = new String[] {};
-
-        parsingEngine.addArgumentSource( EnumArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        EnumArgProvider argProvider = new EnumArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertEquals(argProvider.testEnum, TestEnum.THREE, "Enum value is not correct");
-    }
-
-    public enum TestEnum { ONE, TWO, THREE }
-
-    private class EnumArgProvider {
-        @Argument(fullName="test_enum",shortName="ti",doc="test enum",required=false)
-        public TestEnum testEnum = TestEnum.THREE;
-    }
-
-    @Test
-    public void typedCollectionTest() {
-        final String[] commandLine = new String[] { "-N","2","-N","4","-N","6","-N","8","-N","10" };
-
-        parsingEngine.addArgumentSource( IntegerListArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        IntegerListArgProvider argProvider = new IntegerListArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertNotNull(argProvider.integers, "Argument array is null");
-        Assert.assertEquals(argProvider.integers.size(), 5, "Argument array is of incorrect length");
-        Assert.assertEquals(argProvider.integers.get(0).intValue(), 2, "1st integer is incorrect");
-        Assert.assertEquals(argProvider.integers.get(1).intValue(), 4, "2nd integer is incorrect");
-        Assert.assertEquals(argProvider.integers.get(2).intValue(), 6, "3rd integer is incorrect");
-        Assert.assertEquals(argProvider.integers.get(3).intValue(), 8, "4th integer is incorrect");
-        Assert.assertEquals(argProvider.integers.get(4).intValue(), 10, "5th integer is incorrect");
-    }
-
-    private class IntegerListArgProvider {
-        @Argument(fullName="integer_list",shortName="N",doc="integer list")
-        public List<Integer> integers;
-    }
-
-    @Test
-    public void untypedCollectionTest() {
-        final String[] commandLine = new String[] { "-N","2","-N","4","-N","6","-N","8","-N","10" };
-
-        parsingEngine.addArgumentSource( UntypedListArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        UntypedListArgProvider argProvider = new UntypedListArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertNotNull(argProvider.integers, "Argument array is null");
-        Assert.assertEquals(argProvider.integers.size(), 5, "Argument array is of incorrect length");
-        Assert.assertEquals(argProvider.integers.get(0), "2", "1st integer is incorrect");
-        Assert.assertEquals(argProvider.integers.get(1), "4", "2nd integer is incorrect");
-        Assert.assertEquals(argProvider.integers.get(2), "6", "3rd integer is incorrect");
-        Assert.assertEquals(argProvider.integers.get(3), "8", "4th integer is incorrect");
-        Assert.assertEquals(argProvider.integers.get(4), "10", "5th integer is incorrect");
-    }
-
-    private class UntypedListArgProvider {
-        @Argument(fullName="untyped_list",shortName="N", doc="untyped list")
-        public List integers;
-    }
-
-    @Test(expectedExceptions=MissingArgumentException.class)
-    public void requiredArgTest() {
-        final String[] commandLine = new String[0];
-
-        parsingEngine.addArgumentSource( RequiredArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-    }
-
-    private class RequiredArgProvider {
-        @Argument(required=true,doc="value")
-        public Integer value;
-    }
-
-    @Test
-    public void defaultValueTest() {
-        // First try getting the default.
-        String[] commandLine = new String[0];
-
-        parsingEngine.addArgumentSource( DefaultValueArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        DefaultValueArgProvider argProvider = new DefaultValueArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertEquals(argProvider.value.intValue(), 42, "Default value is not correctly initialized");
-
-        // Then try to override it.
-        commandLine = new String[] { "--value", "27" };
-
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertEquals(argProvider.value.intValue(), 27, "Default value is not correctly initialized");
-    }
-
-    private class DefaultValueArgProvider {
-        @Argument(doc="value",required=false)
-        public Integer value = 42;
-    }
-
-    @Test
-    public void disableValidationOfRequiredArgTest() {
-        final String[] commandLine = new String[0];
-
-        parsingEngine.addArgumentSource( RequiredArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate( EnumSet.of(ParsingEngine.ValidationType.MissingRequiredArgument) );
-
-        RequiredArgProvider argProvider = new RequiredArgProvider();
-        parsingEngine.loadArgumentsIntoObject(argProvider );
-
-        Assert.assertNull(argProvider.value, "Value should have remain unset");
-    }
-
-    @Test
-    public void unrequiredArgTest() {
-        final String[] commandLine = new String[0];
-
-        parsingEngine.addArgumentSource( UnrequiredArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        UnrequiredArgProvider argProvider = new UnrequiredArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertNull(argProvider.value, "Value was unrequired and unspecified; contents should be null");
-    }
-
-    private class UnrequiredArgProvider {
-        @Argument(required=false,doc="unrequired value")
-        public Integer value;
-    }
-
-    @Test(expectedExceptions=InvalidArgumentException.class)
-    public void invalidArgTest() {
-        final String[] commandLine = new String[] { "--foo" };
-
-        parsingEngine.addArgumentSource( UnrequiredArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-    }
-
-    @Test(expectedExceptions= ReviewedGATKException.class)
-    public void duplicateLongNameTest() {
-        parsingEngine.addArgumentSource( DuplicateLongNameProvider.class );
-    }
-
-    private class DuplicateLongNameProvider {
-        @Argument(fullName="myarg",doc="my arg")
-        public Integer foo;
-
-        @Argument(fullName="myarg", doc="my arg")
-        public Integer bar;
-    }
-
-    @Test(expectedExceptions= ReviewedGATKException.class)
-    public void duplicateShortNameTest() {
-        parsingEngine.addArgumentSource( DuplicateShortNameProvider.class );
-    }
-
-
-    private class DuplicateShortNameProvider {
-        @Argument(shortName="myarg", doc="my arg")
-        public Integer foo;
-
-        @Argument(shortName="myarg", doc="my arg")
-        public Integer bar;
-    }
-
-    @Test(expectedExceptions=UnmatchedArgumentException.class)
-    public void missingArgumentNameTest() {
-        final String[] commandLine = new String[] {"foo.txt"};
-
-        parsingEngine.addArgumentSource( NoArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-    }
-
-    private class NoArgProvider {
-
-    }
-
-    @Test(expectedExceptions=UnmatchedArgumentException.class)
-    public void extraValueTest() {
-        final String[] commandLine = new String[] {"-I", "foo.txt", "bar.txt"};
-
-        parsingEngine.addArgumentSource( InputFileArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-    }
-
-    @Test(expectedExceptions=MissingArgumentException.class)
-    public void multipleInvalidArgTest() {
-        final String[] commandLine = new String[] {"-N1", "-N2", "-N3"};
-
-        parsingEngine.addArgumentSource( RequiredArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-    }
-
-    @Test(expectedExceptions=TooManyValuesForArgumentException.class)
-    public void invalidArgCountTest() {
-        final String[] commandLine = new String[] {"--value","1","--value","2","--value","3"};
-
-        parsingEngine.addArgumentSource( RequiredArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-    }
-
-    @Test
-    public void packageProtectedArgTest() {
-        final String[] commandLine = new String[] {"--foo", "1"};
-
-        parsingEngine.addArgumentSource( PackageProtectedArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        PackageProtectedArgProvider argProvider = new PackageProtectedArgProvider();
-        parsingEngine.loadArgumentsIntoObject(argProvider);
-
-        Assert.assertEquals(argProvider.foo.intValue(), 1, "Argument is not correctly initialized");
-    }
-
-    private class PackageProtectedArgProvider {
-        @Argument(doc="foo")
-        Integer foo;
-    }
-
-    @Test
-    public void derivedArgTest() {
-        final String[] commandLine = new String[] {"--bar", "5"};
-
-        parsingEngine.addArgumentSource( DerivedArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        DerivedArgProvider argProvider = new DerivedArgProvider();
-        parsingEngine.loadArgumentsIntoObject(argProvider);
-
-        Assert.assertEquals(argProvider.bar.intValue(), 5, "Argument is not correctly initialized");
-    }
-
-    private class DerivedArgProvider extends BaseArgProvider {
-    }
-
-    private class BaseArgProvider {
-        @Argument(doc="bar")
-        public Integer bar;
-    }
-
-    @Test
-    public void correctDefaultArgNameTest() {
-        parsingEngine.addArgumentSource( CamelCaseArgProvider.class );
-
-        DefinitionMatcher matcher = ArgumentDefinitions.FullNameDefinitionMatcher;
-        ArgumentDefinition definition = parsingEngine.argumentDefinitions.findArgumentDefinition("myarg", matcher);
-
-        Assert.assertNotNull(definition, "Invalid default argument name assigned");
-    }
-
-    @SuppressWarnings("unused")
-    private class CamelCaseArgProvider {
-        @Argument(doc="my arg")
-        Integer myArg;
-    }
-
-    @Test(expectedExceptions=UnmatchedArgumentException.class)
-    public void booleanWithParameterTest() {
-        final String[] commandLine = new String[] {"--mybool", "true"};
-
-        parsingEngine.addArgumentSource( BooleanArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-    }
-
-    @SuppressWarnings("unused")
-    private class BooleanArgProvider {
-        @Argument(doc="my bool")
-        boolean myBool;
-    }
-
-    @Test
-    public void validParseForAnalysisTypeTest() {
-        final String[] commandLine = new String[] {"--analysis_type", "Pileup" };
-
-        parsingEngine.addArgumentSource( AnalysisTypeArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate( EnumSet.of(ParsingEngine.ValidationType.MissingRequiredArgument) );
-
-        AnalysisTypeArgProvider argProvider = new AnalysisTypeArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertEquals(argProvider.Analysis_Name,"Pileup","Argument is not correctly initialized");
-    }
-
-    private class AnalysisTypeArgProvider {
-        @Argument(fullName="analysis_type", shortName="T", doc="Type of analysis to run")
-        public String Analysis_Name = null;
-    }
-
-    @Test(expectedExceptions=TooManyValuesForArgumentException.class)
-    public void invalidParseForAnalysisTypeTest() {
-        final String[] commandLine = new String[] {"--analysis_type", "Pileup", "-T", "CountReads" };
-
-        parsingEngine.addArgumentSource( AnalysisTypeArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate( EnumSet.of(ParsingEngine.ValidationType.MissingRequiredArgument) );
-    }
-
-    @Test(expectedExceptions=ArgumentsAreMutuallyExclusiveException.class)
-    public void mutuallyExclusiveArgumentsTest() {
-        // Passing only foo should work fine...
-        String[] commandLine = new String[] {"--foo","5"};
-
-        parsingEngine.addArgumentSource( MutuallyExclusiveArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        MutuallyExclusiveArgProvider argProvider = new MutuallyExclusiveArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertEquals(argProvider.foo.intValue(), 5, "Argument is not correctly initialized");
-
-        // But when foo and bar come together, danger!
-        commandLine = new String[] {"--foo","5","--bar","6"};
-
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-    }
-
-    @SuppressWarnings("unused")
-    private class MutuallyExclusiveArgProvider {
-        @Argument(doc="foo",exclusiveOf="bar")
-        Integer foo;
-
-        @Argument(doc="bar",required=false)
-        Integer bar;
-    }
-
-    @Test(expectedExceptions=InvalidArgumentValueException.class)
-    public void argumentValidationTest() {
-        // Passing only foo should work fine...
-        String[] commandLine = new String[] {"--value","521"};
-
-        parsingEngine.addArgumentSource( ValidatingArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        ValidatingArgProvider argProvider = new ValidatingArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertEquals(argProvider.value.intValue(), 521, "Argument is not correctly initialized");
-
-        // Try some invalid arguments
-        commandLine = new String[] {"--value","foo"};
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-    }
-
-    private class ValidatingArgProvider {
-        @Argument(doc="value",validation="\\d+")
-        Integer value;
-    }
-
-    @Test
-    public void argumentCollectionTest() {
-        String[] commandLine = new String[] { "--value", "5" };
-
-        parsingEngine.addArgumentSource( ArgumentCollectionProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        ArgumentCollectionProvider argProvider = new ArgumentCollectionProvider();
-        parsingEngine.loadArgumentsIntoObject(argProvider);
-
-        Assert.assertEquals(argProvider.rap.value.intValue(), 5, "Argument is not correctly initialized");
-    }
-
-    private class ArgumentCollectionProvider {
-        @ArgumentCollection
-        RequiredArgProvider rap = new RequiredArgProvider();
-    }
-
-    @Test(expectedExceptions= ReviewedGATKException.class)
-    public void multipleArgumentCollectionTest() {
-        parsingEngine.addArgumentSource( MultipleArgumentCollectionProvider.class );
-    }
-
-    @SuppressWarnings("unused")
-    private class MultipleArgumentCollectionProvider {
-        @ArgumentCollection
-        RequiredArgProvider rap1 = new RequiredArgProvider();
-        @ArgumentCollection
-        RequiredArgProvider rap2 = new RequiredArgProvider();
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // Tests of the RodBinding<T> system
-    //
-    // --------------------------------------------------------------------------------
-
-    private class SingleRodBindingArgProvider {
-        @Input(fullName="binding", shortName="V", required=true)
-        public RodBinding<Feature> binding;
-    }
-
-    @Test
-    public void basicRodBindingArgumentTest() {
-        final String[] commandLine = new String[] {"-V:vcf",NON_EXISTANT_FILENAME_VCF};
-
-        parsingEngine.addArgumentSource( SingleRodBindingArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        SingleRodBindingArgProvider argProvider = new SingleRodBindingArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertEquals(argProvider.binding.getName(), "binding", "Name isn't set properly");
-        Assert.assertEquals(argProvider.binding.getSource(), NON_EXISTANT_FILENAME_VCF, "Source isn't set to its expected value");
-        Assert.assertEquals(argProvider.binding.getType(), Feature.class, "Type isn't set to its expected value");
-        Assert.assertEquals(argProvider.binding.isBound(), true, "Bound() isn't returning its expected value");
-        Assert.assertEquals(argProvider.binding.getTags().getPositionalTags().size(), 1, "Tags aren't correctly set");
-    }
-
-    private class ShortNameOnlyRodBindingArgProvider {
-        @Input(shortName="short", required=false)
-        public RodBinding<Feature> binding; // = RodBinding.makeUnbound(Feature.class);
-    }
-
-    @Test
-    public void shortNameOnlyRodBindingArgumentTest() {
-        final String[] commandLine = new String[] {"-short:vcf",NON_EXISTANT_FILENAME_VCF};
-
-        parsingEngine.addArgumentSource( ShortNameOnlyRodBindingArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        ShortNameOnlyRodBindingArgProvider argProvider = new ShortNameOnlyRodBindingArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertEquals(argProvider.binding.getName(), "binding", "Name isn't set properly");
-        Assert.assertEquals(argProvider.binding.getSource(), NON_EXISTANT_FILENAME_VCF, "Source isn't set to its expected value");
-        Assert.assertEquals(argProvider.binding.getType(), Feature.class, "Type isn't set to its expected value");
-        Assert.assertEquals(argProvider.binding.isBound(), true, "Bound() isn't returning its expected value");
-        Assert.assertEquals(argProvider.binding.getTags().getPositionalTags().size(), 1, "Tags aren't correctly set");
-    }
-
-    private class OptionalRodBindingArgProvider {
-        @Input(fullName="binding", shortName="V", required=false)
-        public RodBinding<Feature> binding;
-
-        @Input(fullName="bindingNull", shortName="VN", required=false)
-        public RodBinding<VariantContext> bindingNull = null;
-    }
-
-    @Test
-    public void optionalRodBindingArgumentTest() {
-        final String[] commandLine = new String[] {};
-
-        parsingEngine.addArgumentSource( OptionalRodBindingArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        OptionalRodBindingArgProvider argProvider = new OptionalRodBindingArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertNotNull(argProvider.binding, "Default value not applied corrected to RodBinding");
-        Assert.assertEquals(argProvider.binding.getName(), RodBinding.UNBOUND_VARIABLE_NAME, "Name isn't set properly");
-        Assert.assertEquals(argProvider.binding.getSource(), RodBinding.UNBOUND_SOURCE, "Source isn't set to its expected value");
-        Assert.assertEquals(argProvider.binding.getType(), Feature.class, "Type isn't set to its expected value");
-        Assert.assertEquals(argProvider.binding.isBound(), false, "Bound() isn't returning its expected value");
-        Assert.assertEquals(argProvider.binding.getTags().getPositionalTags().size(), 0, "Tags aren't correctly set");
-
-        Assert.assertNotNull(argProvider.bindingNull, "Default value not applied corrected to RodBinding");
-        Assert.assertEquals(argProvider.bindingNull.getName(), RodBinding.UNBOUND_VARIABLE_NAME, "Name isn't set properly");
-        Assert.assertEquals(argProvider.bindingNull.getSource(), RodBinding.UNBOUND_SOURCE, "Source isn't set to its expected value");
-        Assert.assertEquals(argProvider.bindingNull.getType(), VariantContext.class, "Type isn't set to its expected value");
-        Assert.assertEquals(argProvider.bindingNull.isBound(), false, "Bound() isn't returning its expected value");
-        Assert.assertEquals(argProvider.bindingNull.getTags().getPositionalTags().size(), 0, "Tags aren't correctly set");
-    }
-
-    @Test(expectedExceptions = UserException.class)
-    public void rodBindingArgumentTestMissingType() {
-        final String[] commandLine = new String[] {"-V",NON_EXISTANT_FILENAME_VCF};
-
-        parsingEngine.addArgumentSource( SingleRodBindingArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        SingleRodBindingArgProvider argProvider = new SingleRodBindingArgProvider();
-        parsingEngine.loadArgumentsIntoObject(argProvider);
-    }
-
-    @Test(expectedExceptions = UserException.class)
-    public void rodBindingArgumentTestTooManyTags() {
-        final String[] commandLine = new String[] {"-V:x,y,z",NON_EXISTANT_FILENAME_VCF};
-
-        parsingEngine.addArgumentSource( SingleRodBindingArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        SingleRodBindingArgProvider argProvider = new SingleRodBindingArgProvider();
-        parsingEngine.loadArgumentsIntoObject(argProvider);
-    }
-
-    private class VariantContextRodBindingArgProvider {
-        @Input(fullName = "binding", shortName="V")
-        public RodBinding<VariantContext> binding;
-    }
-
-    @Test
-    public void variantContextBindingArgumentTest() {
-        final String[] commandLine = new String[] {"-V:vcf",NON_EXISTANT_FILENAME_VCF};
-
-        parsingEngine.addArgumentSource( VariantContextRodBindingArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        VariantContextRodBindingArgProvider argProvider = new VariantContextRodBindingArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertEquals(argProvider.binding.getName(), "binding", "Name isn't set properly");
-        Assert.assertEquals(argProvider.binding.getSource(), NON_EXISTANT_FILENAME_VCF, "Source isn't set to its expected value");
-        Assert.assertEquals(argProvider.binding.getType(), VariantContext.class, "Type isn't set to its expected value");
-        Assert.assertEquals(argProvider.binding.getTags().getPositionalTags().size(), 1, "Tags aren't correctly set");
-    }
-
-    private class ListRodBindingArgProvider {
-        @Input(fullName = "binding", shortName="V", required=false)
-        public List<RodBinding<Feature>> bindings;
-    }
-
-    @Test
-    public void listRodBindingArgumentTest() {
-        final String[] commandLine = new String[] {"-V:vcf",NON_EXISTANT_FILENAME_VCF};
-
-        parsingEngine.addArgumentSource( ListRodBindingArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        ListRodBindingArgProvider argProvider = new ListRodBindingArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertEquals(argProvider.bindings.size(), 1, "Unexpected number of bindings");
-        RodBinding<Feature> binding = argProvider.bindings.get(0);
-        Assert.assertEquals(binding.getName(), "binding", "Name isn't set properly");
-        Assert.assertEquals(binding.getSource(), NON_EXISTANT_FILENAME_VCF, "Source isn't set to its expected value");
-        Assert.assertEquals(binding.getType(), Feature.class, "Type isn't set to its expected value");
-        Assert.assertEquals(binding.getTags().getPositionalTags().size(), 1, "Tags aren't correctly set");
-    }
-
-    @Test
-    public void listRodBindingArgumentTest2Args() {
-        final String[] commandLine = new String[] {"-V:vcf",NON_EXISTANT_FILENAME_VCF, "-V:vcf", "bar.vcf"};
-
-        parsingEngine.addArgumentSource( ListRodBindingArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        ListRodBindingArgProvider argProvider = new ListRodBindingArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertEquals(argProvider.bindings.size(), 2, "Unexpected number of bindings");
-
-        RodBinding<Feature> binding = argProvider.bindings.get(0);
-        Assert.assertEquals(binding.getName(), "binding", "Name isn't set properly");
-        Assert.assertEquals(binding.getSource(), NON_EXISTANT_FILENAME_VCF, "Source isn't set to its expected value");
-        Assert.assertEquals(binding.getType(), Feature.class, "Type isn't set to its expected value");
-        Assert.assertEquals(binding.getTags().getPositionalTags().size(), 1, "Tags aren't correctly set");
-
-        RodBinding<Feature> binding2 = argProvider.bindings.get(1);
-        Assert.assertEquals(binding2.getName(), "binding2", "Name isn't set properly");
-        Assert.assertEquals(binding2.getSource(), "bar.vcf", "Source isn't set to its expected value");
-        Assert.assertEquals(binding2.getType(), Feature.class, "Type isn't set to its expected value");
-        Assert.assertEquals(binding2.getTags().getPositionalTags().size(), 1, "Tags aren't correctly set");
-    }
-
-    @Test
-    public void listRodBindingArgumentTest0Args() {
-        final String[] commandLine = new String[] {};
-
-        parsingEngine.addArgumentSource( ListRodBindingArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        ListRodBindingArgProvider argProvider = new ListRodBindingArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertNull(argProvider.bindings, "Bindings were not null");
-    }
-
-    @Test
-    public void listRodBindingArgumentTestExplicitlyNamed() {
-        final String[] commandLine = new String[] {"-V:foo,vcf",NON_EXISTANT_FILENAME_VCF, "-V:foo,vcf", "bar.vcf"};
-
-        parsingEngine.addArgumentSource( ListRodBindingArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        ListRodBindingArgProvider argProvider = new ListRodBindingArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertEquals(argProvider.bindings.size(), 2, "Unexpected number of bindings");
-        Assert.assertEquals(argProvider.bindings.get(0).getName(), "foo", "Name isn't set properly");
-        Assert.assertEquals(argProvider.bindings.get(1).getName(), "foo2", "Name isn't set properly");
-    }
-
-    private final static String HISEQ_VCF = privateTestDir + "HiSeq.10000.vcf";
-    private final static String TRANCHES_FILE = privateTestDir + "tranches.6.txt";
-
-    @Test
-    public void variantContextBindingTestDynamicTyping1() {
-        final String[] commandLine = new String[] {"-V", HISEQ_VCF};
-
-        parsingEngine.addArgumentSource( VariantContextRodBindingArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        VariantContextRodBindingArgProvider argProvider = new VariantContextRodBindingArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertEquals(argProvider.binding.getName(), "binding", "Name isn't set properly");
-        Assert.assertEquals(argProvider.binding.getSource(), HISEQ_VCF, "Source isn't set to its expected value");
-        Assert.assertEquals(argProvider.binding.getType(), VariantContext.class, "Type isn't set to its expected value");
-        Assert.assertEquals(argProvider.binding.getTags().getPositionalTags().size(), 0, "Tags aren't correctly set");
-    }
-
-    @Test
-    public void variantContextBindingTestDynamicTypingNameAsSingleArgument() {
-        final String[] commandLine = new String[] {"-V:name", HISEQ_VCF};
-
-        parsingEngine.addArgumentSource( VariantContextRodBindingArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        VariantContextRodBindingArgProvider argProvider = new VariantContextRodBindingArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertEquals(argProvider.binding.getName(), "name", "Name isn't set properly");
-        Assert.assertEquals(argProvider.binding.getSource(), HISEQ_VCF, "Source isn't set to its expected value");
-        Assert.assertEquals(argProvider.binding.getType(), VariantContext.class, "Type isn't set to its expected value");
-        Assert.assertEquals(argProvider.binding.getTags().getPositionalTags().size(), 1, "Tags aren't correctly set");
-    }
-
-    @Test()
-    public void variantContextBindingTestDynamicTypingTwoTagsPassing() {
-        final String[] commandLine = new String[] {"-V:name,vcf", HISEQ_VCF};
-
-        parsingEngine.addArgumentSource( VariantContextRodBindingArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        VariantContextRodBindingArgProvider argProvider = new VariantContextRodBindingArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertEquals(argProvider.binding.getName(), "name", "Name isn't set properly");
-        Assert.assertEquals(argProvider.binding.getSource(), HISEQ_VCF, "Source isn't set to its expected value");
-        Assert.assertEquals(argProvider.binding.getType(), VariantContext.class, "Type isn't set to its expected value");
-        Assert.assertEquals(argProvider.binding.getTags().getPositionalTags().size(), 2, "Tags aren't correctly set");
-    }
-
-    @Test()
-    public void variantContextBindingTestDynamicTypingTwoTagsCausingTypeFailure() {
-        final String[] commandLine = new String[] {"-V:name,beagle", HISEQ_VCF};
-
-        parsingEngine.addArgumentSource( VariantContextRodBindingArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        VariantContextRodBindingArgProvider argProvider = new VariantContextRodBindingArgProvider();
-        parsingEngine.loadArgumentsIntoObject(argProvider);
-
-        Assert.assertEquals(argProvider.binding.getName(), "name", "Name isn't set properly");
-        Assert.assertEquals(argProvider.binding.getSource(), HISEQ_VCF, "Source isn't set to its expected value");
-        Assert.assertEquals(argProvider.binding.getType(), VariantContext.class, "Type isn't set to its expected value");
-        Assert.assertEquals(argProvider.binding.getTribbleType(), "beagle", "Type isn't set to its expected value");
-        Assert.assertEquals(argProvider.binding.getTags().getPositionalTags().size(), 2, "Tags aren't correctly set");
-    }
-
-    @Test(expectedExceptions = UserException.class)
-    public void variantContextBindingTestDynamicTypingUnknownTribbleType() {
-        final String[] commandLine = new String[] {"-V", TRANCHES_FILE};
-
-        parsingEngine.addArgumentSource( VariantContextRodBindingArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        VariantContextRodBindingArgProvider argProvider = new VariantContextRodBindingArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-    }
-
-    @Test
-    public void argumentListTest() throws IOException {
-        File argsFile = BaseTest.createTempListFile("args.", "-I na12878.bam");
-        try {
-            final String[] commandLine = new String[] {"-args", argsFile.getPath()};
-            parsingEngine.addArgumentSource(InputFileArgProvider.class);
-            parsingEngine.parse(commandLine);
-            parsingEngine.validate();
-
-            InputFileArgProvider argProvider = new InputFileArgProvider();
-            parsingEngine.loadArgumentsIntoObject(argProvider);
-
-            Assert.assertEquals(argProvider.inputFile, "na12878.bam", "Argument is not correctly initialized");
-        } finally {
-            FileUtils.deleteQuietly(argsFile);
-        }
-    }
-
-    @SuppressWarnings("unused")
-    private class NumericRangeArgProvider {
-        @Argument(fullName = "intWithHardMinAndMax", minValue = 5, maxValue = 10)
-        public int intWithHardMinAndMax;
-
-        @Argument(fullName = "intWithHardMin", minValue = 5)
-        public int intWithHardMin;
-
-        @Argument(fullName = "intWithHardMax", maxValue = 10)
-        public int intWithHardMax;
-
-        @Argument(fullName = "intWithSoftMinAndMax", minRecommendedValue = 5, maxRecommendedValue = 10)
-        public int intWithSoftMinAndMax;
-
-        @Argument(fullName = "intWithSoftMin", minRecommendedValue = 5)
-        public int intWithSoftMin;
-
-        @Argument(fullName = "intWithSoftMax", maxRecommendedValue = 10)
-        public int intWithSoftMax;
-
-        @Argument(fullName = "intWithHardAndSoftMinAndMax", minValue = 5, minRecommendedValue = 7, maxValue = 10, maxRecommendedValue = 9)
-        public int intWithHardAndSoftMinAndMax;
-
-        @Argument(fullName = "intWithHardAndSoftMin", minValue = 5, minRecommendedValue = 7)
-        public int intWithHardAndSoftMin;
-
-        @Argument(fullName = "intWithHardAndSoftMax", maxValue = 10, maxRecommendedValue = 8)
-        public int intWithHardAndSoftMax;
-
-        @Argument(fullName = "intWithHardMinAndMaxDefaultOutsideRange", minValue = 5, maxValue = 10)
-        public int intWithHardMinAndMaxDefaultOutsideRange = -1;
-
-        @Argument(fullName = "integerWithHardMinAndMax", minValue = 5, maxValue = 10)
-        public Integer integerWithHardMinAndMax;
-
-        @Argument(fullName = "byteWithHardMinAndMax", minValue = 5, maxValue = 10)
-        public byte byteWithHardMinAndMax;
-
-        @Argument(fullName = "byteWithHardMin", minValue = 5)
-        public byte byteWithHardMin;
-
-        @Argument(fullName = "byteWithHardMax", maxValue = 10)
-        public byte byteWithHardMax;
-
-        @Argument(fullName = "doubleWithHardMinAndMax", minValue = 5.5, maxValue = 10.0)
-        public double doubleWithHardMinAndMax;
-
-        @Argument(fullName = "doubleWithHardMin", minValue = 5.5)
-        public double doubleWithHardMin;
-
-        @Argument(fullName = "doubleWithHardMax", maxValue = 10.0)
-        public double doubleWithHardMax;
-    }
-
-    @DataProvider(name = "NumericRangeConstraintViolationDataProvider")
-    public Object[][] numericRangeConstraintViolationDataProvider() {
-        return new Object[][] {
-                { new String[]{"--intWithHardMinAndMax", "11"} },
-                { new String[]{"--intWithHardMinAndMax", "4"} },
-                { new String[]{"--intWithHardMin", "4"} },
-                { new String[]{"--intWithHardMax", "11"} },
-                { new String[]{"--intWithHardAndSoftMinAndMax", "11"} },
-                { new String[]{"--intWithHardAndSoftMinAndMax", "4"} },
-                { new String[]{"--intWithHardAndSoftMin", "4"} },
-                { new String[]{"--intWithHardAndSoftMax", "11"} },
-                { new String[]{"--intWithHardMinAndMaxDefaultOutsideRange", "11"} },
-                { new String[]{"--intWithHardMinAndMaxDefaultOutsideRange", "4"} },
-                { new String[]{"--integerWithHardMinAndMax", "11"} },
-                { new String[]{"--integerWithHardMinAndMax", "4"} },
-                { new String[]{"--byteWithHardMinAndMax", "11"} },
-                { new String[]{"--byteWithHardMinAndMax", "4"} },
-                { new String[]{"--byteWithHardMin", "4"} },
-                { new String[]{"--byteWithHardMax", "11"} },
-                { new String[]{"--doubleWithHardMinAndMax", "5.4"} },
-                { new String[]{"--doubleWithHardMinAndMax", "10.1"} },
-                { new String[]{"--doubleWithHardMin", "5.4"} },
-                { new String[]{"--doubleWithHardMax", "10.1"} }
-        };
-    }
-
-    @Test(dataProvider = "NumericRangeConstraintViolationDataProvider",
-          expectedExceptions = ArgumentValueOutOfRangeException.class)
-    public void testNumericRangeWithConstraintViolation( final String[] commandLine ) {
-        runNumericArgumentRangeTest(commandLine);
-    }
-
-    @DataProvider(name = "NumericRangeWithoutConstraintViolationDataProvider")
-    public Object[][] numericRangeWithoutConstraintViolationDataProvider() {
-        return new Object[][] {
-                { new String[]{"--intWithHardMinAndMax", "10"} },
-                { new String[]{"--intWithHardMinAndMax", "5"} },
-                { new String[]{"--intWithHardMinAndMax", "7"} },
-                { new String[]{"--intWithHardMin", "11"} },
-                { new String[]{"--intWithHardMax", "4"} },
-                { new String[]{"--intWithSoftMinAndMax", "11"} },
-                { new String[]{"--intWithSoftMinAndMax", "4"} },
-                { new String[]{"--intWithSoftMin", "4"} },
-                { new String[]{"--intWithSoftMax", "11"} },
-                { new String[]{"--intWithHardAndSoftMinAndMax", "5"} },
-                { new String[]{"--intWithHardAndSoftMinAndMax", "7"} },
-                { new String[]{"--intWithHardAndSoftMinAndMax", "8"} },
-                { new String[]{"--intWithHardAndSoftMinAndMax", "9"} },
-                { new String[]{"--intWithHardAndSoftMinAndMax", "10"} },
-                { new String[]{"--intWithHardAndSoftMin", "5"} },
-                { new String[]{"--intWithHardAndSoftMin", "6"} },
-                { new String[]{"--intWithHardAndSoftMin", "7"} },
-                { new String[]{"--intWithHardAndSoftMax", "10"} },
-                { new String[]{"--intWithHardAndSoftMax", "9"} },
-                { new String[]{"--intWithHardAndSoftMax", "8"} },
-                { new String[]{"--intWithHardMinAndMaxDefaultOutsideRange", "10"} },
-                { new String[]{"--intWithHardMinAndMaxDefaultOutsideRange", "5"} },
-                { new String[]{"--intWithHardMinAndMaxDefaultOutsideRange", "7"} },
-                { new String[]{"--integerWithHardMinAndMax", "10"} },
-                { new String[]{"--integerWithHardMinAndMax", "5"} },
-                { new String[]{"--byteWithHardMinAndMax", "10"} },
-                { new String[]{"--byteWithHardMinAndMax", "5"} },
-                { new String[]{"--byteWithHardMinAndMax", "7"} },
-                { new String[]{"--byteWithHardMin", "5"} },
-                { new String[]{"--byteWithHardMax", "10"} },
-                { new String[]{"--doubleWithHardMinAndMax", "5.5"} },
-                { new String[]{"--doubleWithHardMinAndMax", "10.0"} },
-                { new String[]{"--doubleWithHardMinAndMax", "7.5"} },
-                { new String[]{"--doubleWithHardMin", "5.5"} },
-                { new String[]{"--doubleWithHardMin", "15.5"} },
-                { new String[]{"--doubleWithHardMax", "10.0"} },
-                { new String[]{"--doubleWithHardMax", "7.5"} }
-        };
-    }
-
-    @Test(dataProvider = "NumericRangeWithoutConstraintViolationDataProvider")
-    public void testNumericRangeWithoutConstraintViolation( final String[] commandLine ) {
-        // These tests succeed if no exception is thrown, since no constraints have been violated
-        runNumericArgumentRangeTest(commandLine);
-    }
-
-    private void runNumericArgumentRangeTest( final String[] commandLine ) {
-        parsingEngine.addArgumentSource(NumericRangeArgProvider.class);
-        parsingEngine.parse(commandLine);
-
-        NumericRangeArgProvider argProvider = new NumericRangeArgProvider();
-        parsingEngine.loadArgumentsIntoObject(argProvider);
-    }
-
-    @SuppressWarnings("unused")
-    private class VariedTypeArgProvider {
-        @Argument(fullName = "intVal", required=false)
-        private int anInt;
-
-        @Argument(fullName = "stringVal", required=false)
-        private String aString;
-
-        @Argument(fullName = "enumVal", required=false)
-        private TestEnum anEnum;
-
-        @Argument(fullName = "fileVal", required=false)
-        private File aFile;
-
-        @Argument(fullName = "stringSet", required=false)
-        private Set<String> someStrings;
-
-        @Argument(fullName = "intervalVal", required=false)
-        private IntervalBinding<Feature> anInterval;
-    }
-
-    @DataProvider(name = "MissingArgumentValueDataProvider")
-    public Object[][] missingArgumentDataProvider() {
-        return new Object[][]{
-                { new String[]{"--intVal"} },
-                { new String[]{"--stringVal"} },
-                { new String[]{"--enumVal"} },
-                { new String[]{"--fileVal"} },
-                { new String[]{"--stringSet"} },
-                { new String[]{"--stringSet", "aha", "--stringSet"} },
-                { new String[]{"--intervalVal"} }
-        };
-    }
-
-    @Test(dataProvider = "MissingArgumentValueDataProvider",
-          expectedExceptions = {InvalidArgumentValueException.class, MissingArgumentValueException.class})
-    public void testMissingArguments( final String[] commandLine ) {
-        parsingEngine.addArgumentSource( VariedTypeArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        VariedTypeArgProvider argProvider = new VariedTypeArgProvider();
-        parsingEngine.loadArgumentsIntoObject(argProvider);
-    }
-
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/commandline/RodBindingCollectionUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/commandline/RodBindingCollectionUnitTest.java
deleted file mode 100644
index a846384..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/commandline/RodBindingCollectionUnitTest.java
+++ /dev/null
@@ -1,133 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import htsjdk.variant.variantcontext.VariantContext;
-import org.testng.Assert;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.util.Collection;
-
-public class RodBindingCollectionUnitTest extends BaseTest {
-
-    private ParsingEngine parsingEngine;
-    private Tags mytags;
-
-    private static final String defaultTagString = "VCF";
-    private static final String testVCFFileName = privateTestDir + "empty.vcf";
-    private static final String testListFileName = createTempListFile("oneVCF", testVCFFileName).getAbsolutePath();
-
-    @BeforeMethod
-    public void setUp() {
-        parsingEngine = new ParsingEngine(null);
-        RodBinding.resetNameCounter();
-        mytags = new Tags();
-        mytags.addPositionalTag(defaultTagString);
-    }
-
-    private class RodBindingCollectionArgProvider {
-        @Argument(fullName="input",doc="input",shortName="V")
-        public RodBindingCollection<VariantContext> input;
-    }
-
-    @Test
-    public void testStandardVCF() {
-        final String[] commandLine = new String[] {"-V", testVCFFileName};
-
-        parsingEngine.addArgumentSource( RodBindingCollectionArgProvider.class );
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        final RodBindingCollectionArgProvider argProvider = new RodBindingCollectionArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertEquals(argProvider.input.getRodBindings().iterator().next().getSource(), testVCFFileName, "Argument is not correctly initialized");
-    }
-
-    @Test
-    public void testList() {
-        final String[] commandLine = new String[] {"-V", testListFileName};
-
-        parsingEngine.addArgumentSource(RodBindingCollectionArgProvider.class);
-        parsingEngine.parse( commandLine );
-        parsingEngine.validate();
-
-        final RodBindingCollectionArgProvider argProvider = new RodBindingCollectionArgProvider();
-        parsingEngine.loadArgumentsIntoObject( argProvider );
-
-        Assert.assertEquals(argProvider.input.getRodBindings().iterator().next().getSource(), testVCFFileName, "Argument is not correctly initialized");
-    }
-
-    @Test
-    public void testDefaultTagsInFile() throws IOException {
-
-        final File testFile = createTempListFile("RodBindingCollectionUnitTest.defaultTags", testVCFFileName);
-
-        ArgumentTypeDescriptor.getRodBindingsCollection(testFile, parsingEngine, VariantContext.class, "foo", mytags, "input");
-
-        final Collection<RodBinding> bindings = parsingEngine.getRodBindings();
-        Assert.assertNotNull(bindings);
-        Assert.assertEquals(bindings.size(), 1);
-
-        final RodBinding binding = bindings.iterator().next();
-        Assert.assertEquals(parsingEngine.getTags(binding), mytags);
-    }
-
-    @Test(expectedExceptions = UserException.BadArgumentValue.class)
-    public void testDuplicateEntriesInFile() throws IOException {
-
-        final File testFile = createTempListFile("RodBindingCollectionUnitTest.variantListWithDuplicates", testVCFFileName, testVCFFileName);
-
-        ArgumentTypeDescriptor.getRodBindingsCollection(testFile, parsingEngine, VariantContext.class, "foo", mytags, "input");
-    }
-
-    @Test(expectedExceptions = UserException.BadArgumentValue.class)
-    public void testValidateEmptyFile() throws IOException {
-        final File testFile = createTempListFile("RodBindingCollectionUnitTest.emptyVCFList");
-
-        ArgumentTypeDescriptor.getRodBindingsCollection(testFile, parsingEngine, VariantContext.class, "foo", mytags, "input");
-    }
-
-    @Test
-    public void testOverrideTagsInFile() throws IOException {
-        final File testFile = createTempListFile("RodBindingCollectionUnitTest.overrideTags", "foo " + testVCFFileName);
-
-        ArgumentTypeDescriptor.getRodBindingsCollection(testFile, parsingEngine, VariantContext.class, "foo", mytags, "input");
-
-        final Collection<RodBinding> bindings = parsingEngine.getRodBindings();
-        Assert.assertNotNull(bindings);
-        Assert.assertEquals(bindings.size(), 1);
-
-        final RodBinding binding = bindings.iterator().next();
-        Assert.assertNotEquals(parsingEngine.getTags(binding), mytags);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/commandline/RodBindingUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/commandline/RodBindingUnitTest.java
deleted file mode 100644
index bffb1d2..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/commandline/RodBindingUnitTest.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.commandline;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import htsjdk.variant.variantcontext.VariantContext;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-import org.testng.annotations.BeforeMethod;
-
-/**
- * Test suite for the parsing engine.
- */
-public class RodBindingUnitTest extends BaseTest {
-    Tags mytags = new Tags();
-
-    @BeforeMethod
-    public void setUp() {
-        RodBinding.resetNameCounter();
-    }
-
-    @Test
-    public void testStandardRodBinding() {
-        RodBinding<VariantContext> b = new RodBinding<VariantContext>(VariantContext.class, "b", "foo", "vcf", mytags);
-        Assert.assertEquals(b.getName(), "b");
-        Assert.assertEquals(b.getType(), VariantContext.class);
-        Assert.assertEquals(b.getSource(), "foo");
-        Assert.assertEquals(b.getTribbleType(), "vcf");
-        Assert.assertEquals(b.isBound(), true);
-    }
-
-    @Test
-    public void testUnboundRodBinding() {
-        RodBinding<VariantContext> u = RodBinding.makeUnbound(VariantContext.class);
-        Assert.assertEquals(u.getName(), RodBinding.UNBOUND_VARIABLE_NAME);
-        Assert.assertEquals(u.getSource(), RodBinding.UNBOUND_SOURCE);
-        Assert.assertEquals(u.getType(), VariantContext.class);
-        Assert.assertEquals(u.getTribbleType(), RodBinding.UNBOUND_TRIBBLE_TYPE);
-        Assert.assertEquals(u.isBound(), false);
-    }
-
-    @Test
-    public void testMultipleBindings() {
-        String name = "binding";
-        RodBinding<VariantContext> b1 = new RodBinding<VariantContext>(VariantContext.class, name, "foo", "vcf", mytags);
-        Assert.assertEquals(b1.getName(), name);
-        Assert.assertEquals(b1.getType(), VariantContext.class);
-        Assert.assertEquals(b1.getSource(), "foo");
-        Assert.assertEquals(b1.getTribbleType(), "vcf");
-        Assert.assertEquals(b1.isBound(), true);
-
-        RodBinding<VariantContext> b2 = new RodBinding<VariantContext>(VariantContext.class, name, "foo", "vcf", mytags);
-        Assert.assertEquals(b2.getName(), name + "2");
-        Assert.assertEquals(b2.getType(), VariantContext.class);
-        Assert.assertEquals(b2.getSource(), "foo");
-        Assert.assertEquals(b2.getTribbleType(), "vcf");
-        Assert.assertEquals(b2.isBound(), true);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/crypt/CryptUtilsUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/crypt/CryptUtilsUnitTest.java
deleted file mode 100644
index c44bfdc..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/crypt/CryptUtilsUnitTest.java
+++ /dev/null
@@ -1,199 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.crypt;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.testng.SkipException;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-import org.testng.Assert;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.security.Key;
-import java.security.KeyPair;
-import java.security.PrivateKey;
-import java.security.PublicKey;
-import java.util.Arrays;
-
-public class CryptUtilsUnitTest extends BaseTest {
-
-    @Test
-    public void testGenerateValidKeyPairWithDefaultSettings() {
-        KeyPair keyPair = CryptUtils.generateKeyPair();
-        Assert.assertTrue(CryptUtils.keysDecryptEachOther(keyPair.getPrivate(), keyPair.getPublic()));
-    }
-
-    @DataProvider( name = "InvalidKeyPairSettings" )
-    public Object[][] invalidKeyPairSettingsDataProvider() {
-        return new Object[][] {
-            { -1, CryptUtils.DEFAULT_ENCRYPTION_ALGORITHM, CryptUtils.DEFAULT_RANDOM_NUMBER_GENERATION_ALGORITHM},
-            { CryptUtils.DEFAULT_KEY_LENGTH, "Made-up algorithm", CryptUtils.DEFAULT_RANDOM_NUMBER_GENERATION_ALGORITHM},
-            { CryptUtils.DEFAULT_KEY_LENGTH, CryptUtils.DEFAULT_ENCRYPTION_ALGORITHM, "Made-up algorithm"}
-        };
-    }
-
-    @Test( dataProvider = "InvalidKeyPairSettings", expectedExceptions = ReviewedGATKException.class )
-    public void testGenerateKeyPairWithInvalidSettings( int keyLength, String encryptionAlgorithm, String randomNumberGenerationAlgorithm ) {
-        KeyPair keyPair = CryptUtils.generateKeyPair(keyLength, encryptionAlgorithm, randomNumberGenerationAlgorithm);
-    }
-
-    @Test
-    public void testGATKMasterKeyPairMutualDecryption() {
-        if ( gatkPrivateKeyExistsButReadPermissionDenied() ) {
-            throw new SkipException(String.format("Skipping test %s because we do not have permission to read the GATK private key",
-                                    "testGATKMasterKeyPairMutualDecryption"));
-        }
-
-        Assert.assertTrue(CryptUtils.keysDecryptEachOther(CryptUtils.loadGATKMasterPrivateKey(), CryptUtils.loadGATKMasterPublicKey()));
-    }
-
-    @Test
-    public void testGATKMasterPrivateKeyWithDistributedPublicKeyMutualDecryption() {
-        if ( gatkPrivateKeyExistsButReadPermissionDenied() ) {
-            throw new SkipException(String.format("Skipping test %s because we do not have permission to read the GATK private key",
-                                    "testGATKMasterPrivateKeyWithDistributedPublicKeyMutualDecryption"));
-        }
-
-        Assert.assertTrue(CryptUtils.keysDecryptEachOther(CryptUtils.loadGATKMasterPrivateKey(), CryptUtils.loadGATKDistributedPublicKey()));
-    }
-
-    @Test
-    public void testKeyPairWriteThenRead() {
-        KeyPair keyPair = CryptUtils.generateKeyPair();
-        File privateKeyFile = createTempFile("testKeyPairWriteThenRead_private", "key");
-        File publicKeyFile = createTempFile("testKeyPairWriteThenRead_public", "key");
-
-        CryptUtils.writeKeyPair(keyPair, privateKeyFile, publicKeyFile);
-
-        assertKeysAreEqual(keyPair.getPrivate(), CryptUtils.readPrivateKey(privateKeyFile));
-        assertKeysAreEqual(keyPair.getPublic(), CryptUtils.readPublicKey(publicKeyFile));
-    }
-
-    @Test
-    public void testPublicKeyWriteThenReadFromFile() {
-        File keyFile = createTempFile("testPublicKeyWriteThenReadFromFile", "key");
-        PublicKey publicKey = CryptUtils.generateKeyPair().getPublic();
-
-        CryptUtils.writeKey(publicKey, keyFile);
-
-        assertKeysAreEqual(publicKey, CryptUtils.readPublicKey(keyFile));
-    }
-
-    @Test
-    public void testPublicKeyWriteThenReadFromStream() throws IOException {
-        File keyFile = createTempFile("testPublicKeyWriteThenReadFromStream", "key");
-        PublicKey publicKey = CryptUtils.generateKeyPair().getPublic();
-
-        CryptUtils.writeKey(publicKey, keyFile);
-
-        assertKeysAreEqual(publicKey, CryptUtils.readPublicKey(new FileInputStream(keyFile)));
-    }
-
-    @Test
-    public void testPrivateKeyWriteThenReadFromFile() {
-        File keyFile = createTempFile("testPrivateKeyWriteThenReadFromFile", "key");
-        PrivateKey privateKey = CryptUtils.generateKeyPair().getPrivate();
-
-        CryptUtils.writeKey(privateKey, keyFile);
-
-        assertKeysAreEqual(privateKey, CryptUtils.readPrivateKey(keyFile));
-    }
-
-    @Test
-    public void testPrivateKeyWriteThenReadFromStream() throws IOException {
-        File keyFile = createTempFile("testPrivateKeyWriteThenReadFromStream", "key");
-        PrivateKey privateKey = CryptUtils.generateKeyPair().getPrivate();
-
-        CryptUtils.writeKey(privateKey, keyFile);
-
-        assertKeysAreEqual(privateKey, CryptUtils.readPrivateKey(new FileInputStream(keyFile)));
-    }
-
-    @Test( expectedExceptions = UserException.CouldNotReadInputFile.class )
-    public void testReadNonExistentPublicKey() {
-        File nonExistentFile = new File("jdshgkdfhg.key");
-        Assert.assertFalse(nonExistentFile.exists());
-
-        CryptUtils.readPublicKey(nonExistentFile);
-    }
-
-    @Test( expectedExceptions = UserException.CouldNotReadInputFile.class )
-    public void testReadNonExistentPrivateKey() {
-        File nonExistentFile = new File("jdshgkdfhg.key");
-        Assert.assertFalse(nonExistentFile.exists());
-
-        CryptUtils.readPrivateKey(nonExistentFile);
-    }
-
-    @Test
-    public void testDecodePublicKey() {
-        PublicKey originalKey = CryptUtils.generateKeyPair().getPublic();
-        PublicKey decodedKey = CryptUtils.decodePublicKey(originalKey.getEncoded(), CryptUtils.DEFAULT_ENCRYPTION_ALGORITHM);
-        assertKeysAreEqual(originalKey, decodedKey);
-    }
-
-    @Test
-    public void testDecodePrivateKey() {
-        PrivateKey originalKey = CryptUtils.generateKeyPair().getPrivate();
-        PrivateKey decodedKey = CryptUtils.decodePrivateKey(originalKey.getEncoded(), CryptUtils.DEFAULT_ENCRYPTION_ALGORITHM);
-        assertKeysAreEqual(originalKey, decodedKey);
-    }
-
-    @Test
-    public void testLoadGATKMasterPrivateKey() {
-        if ( gatkPrivateKeyExistsButReadPermissionDenied() ) {
-            throw new SkipException(String.format("Skipping test %s because we do not have permission to read the GATK private key",
-                                    "testLoadGATKMasterPrivateKey"));
-        }
-
-        PrivateKey gatkMasterPrivateKey = CryptUtils.loadGATKMasterPrivateKey();
-    }
-
-    @Test
-    public void testLoadGATKMasterPublicKey() {
-        PublicKey gatkMasterPublicKey = CryptUtils.loadGATKMasterPublicKey();
-    }
-
-    @Test
-    public void testLoadGATKDistributedPublicKey() {
-        PublicKey gatkDistributedPublicKey = CryptUtils.loadGATKDistributedPublicKey();
-    }
-
-    private void assertKeysAreEqual( Key originalKey, Key keyFromDisk ) {
-        Assert.assertTrue(Arrays.equals(originalKey.getEncoded(), keyFromDisk.getEncoded()));
-        Assert.assertEquals(originalKey.getAlgorithm(), keyFromDisk.getAlgorithm());
-        Assert.assertEquals(originalKey.getFormat(), keyFromDisk.getFormat());
-    }
-
-    private boolean gatkPrivateKeyExistsButReadPermissionDenied() {
-        File gatkPrivateKey = new File(CryptUtils.GATK_MASTER_PRIVATE_KEY_FILE);
-        return gatkPrivateKey.exists() && ! gatkPrivateKey.canRead();
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/crypt/GATKKeyIntegrationTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/crypt/GATKKeyIntegrationTest.java
deleted file mode 100644
index 9cafd61..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/crypt/GATKKeyIntegrationTest.java
+++ /dev/null
@@ -1,157 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.crypt;
-
-import org.broadinstitute.gatk.engine.walkers.WalkerTest;
-import org.broadinstitute.gatk.engine.phonehome.GATKRunReport;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.Arrays;
-
-public class GATKKeyIntegrationTest extends WalkerTest {
-
-    public static final String BASE_COMMAND = String.format("-T PrintReads -R %s -I %s -o %%s",
-                                                            publicTestDir + "exampleFASTA.fasta",
-                                                            publicTestDir + "exampleBAM.bam");
-    public static final String MD5_UPON_SUCCESSFUL_RUN = "e7b4a5b62f9d4badef1cd07040011b2b";
-
-
-    private void runGATKKeyTest ( String testName, String etArg, String keyArg, Class expectedException, String md5 ) {
-        String command = BASE_COMMAND + String.format(" %s %s", etArg, keyArg);
-
-        WalkerTestSpec spec = expectedException != null ?
-                              new WalkerTestSpec(command, 1, expectedException) :
-                              new WalkerTestSpec(command, 1, Arrays.asList(md5));
-
-        spec.disableImplicitArgs(); // Turn off automatic inclusion of -et/-K args by WalkerTest
-        executeTest(testName, spec);
-    }
-
-    @Test
-    public void testValidKeyNoET() {
-        runGATKKeyTest("testValidKeyNoET",
-                       "-et " + GATKRunReport.PhoneHomeOption.NO_ET,
-                       "-K " + keysDataLocation + "valid.key",
-                       null,
-                       MD5_UPON_SUCCESSFUL_RUN);
-    }
-
-    @Test
-    public void testValidKeyETStdout() {
-        runGATKKeyTest("testValidKeyETStdout",
-                       "-et " + GATKRunReport.PhoneHomeOption.STDOUT,
-                       "-K " + keysDataLocation + "valid.key",
-                       null,
-                       MD5_UPON_SUCCESSFUL_RUN);
-    }
-
-    @Test
-    public void testValidKeyETStandard() {
-        runGATKKeyTest("testValidKeyETStandard",
-                       "",
-                       "-K " + keysDataLocation + "valid.key",
-                       null,
-                       MD5_UPON_SUCCESSFUL_RUN);
-    }
-
-    @Test
-    public void testNoKeyNoET() {
-        runGATKKeyTest("testNoKeyNoET",
-                       "-et " + GATKRunReport.PhoneHomeOption.NO_ET,
-                       "",
-                       UserException.class,
-                       null);
-    }
-
-    @Test
-    public void testNoKeyETStdout() {
-        runGATKKeyTest("testNoKeyETStdout",
-                       "-et " + GATKRunReport.PhoneHomeOption.STDOUT,
-                       "",
-                       UserException.class,
-                       null);
-    }
-
-    @Test
-    public void testNoKeyETStandard() {
-        runGATKKeyTest("testNoKeyETStandard",
-                       "",
-                       "",
-                       null,
-                       MD5_UPON_SUCCESSFUL_RUN);
-    }
-
-    @Test
-    public void testRevokedKey() {
-        runGATKKeyTest("testRevokedKey",
-                       "-et " + GATKRunReport.PhoneHomeOption.NO_ET,
-                       "-K " + keysDataLocation + "revoked.key",
-                       UserException.KeySignatureVerificationException.class,
-                       null);
-    }
-
-    @DataProvider(name = "CorruptKeyTestData")
-    public Object[][] corruptKeyDataProvider() {
-        return new Object[][] {
-            { "corrupt_empty.key",                  UserException.UnreadableKeyException.class },
-            { "corrupt_single_byte_file.key",       UserException.UnreadableKeyException.class },
-            { "corrupt_random_contents.key",        UserException.UnreadableKeyException.class },
-            { "corrupt_single_byte_deletion.key",   UserException.UnreadableKeyException.class },
-            { "corrupt_single_byte_insertion.key",  UserException.UnreadableKeyException.class },
-            { "corrupt_single_byte_change.key",     UserException.UnreadableKeyException.class },
-            { "corrupt_multi_byte_deletion.key",    UserException.UnreadableKeyException.class },
-            { "corrupt_multi_byte_insertion.key",   UserException.UnreadableKeyException.class },
-            { "corrupt_multi_byte_change.key",      UserException.UnreadableKeyException.class },
-            { "corrupt_bad_isize_field.key",        UserException.UnreadableKeyException.class },
-            { "corrupt_bad_crc.key",                UserException.UnreadableKeyException.class },
-            { "corrupt_no_email_address.key",       UserException.UnreadableKeyException.class },
-            { "corrupt_no_sectional_delimiter.key", UserException.UnreadableKeyException.class },
-            { "corrupt_no_signature.key",           UserException.UnreadableKeyException.class },
-            { "corrupt_bad_signature.key",          UserException.KeySignatureVerificationException.class },
-            { "corrupt_non_gzipped_valid_key.key",  UserException.UnreadableKeyException.class }
-        };
-    }
-
-    @Test(dataProvider = "CorruptKeyTestData")
-    public void testCorruptKey ( String corruptKeyName, Class expectedException ) {
-        runGATKKeyTest(String.format("testCorruptKey (%s)", corruptKeyName),
-                       "-et " + GATKRunReport.PhoneHomeOption.NO_ET,
-                       "-K " + keysDataLocation + corruptKeyName,
-                       expectedException,
-                       null);
-    }
-
-    @Test
-    public void testCorruptButNonRequiredKey() {
-        runGATKKeyTest("testCorruptButNonRequiredKey",
-                       "",
-                       "-K " + keysDataLocation + "corrupt_random_contents.key",
-                       null,
-                       MD5_UPON_SUCCESSFUL_RUN);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/crypt/GATKKeyUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/crypt/GATKKeyUnitTest.java
deleted file mode 100644
index 5fd6475..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/crypt/GATKKeyUnitTest.java
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.crypt;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.testng.SkipException;
-import org.testng.annotations.Test;
-import org.testng.Assert;
-
-import java.io.File;
-import java.security.KeyPair;
-import java.security.PrivateKey;
-import java.security.PublicKey;
-
-public class GATKKeyUnitTest extends BaseTest {
-
-    @Test
-    public void testCreateGATKKeyUsingMasterKeyPair() {
-        if ( gatkPrivateKeyExistsButReadPermissionDenied() ) {
-            throw new SkipException(String.format("Skipping test %s because we do not have permission to read the GATK private key",
-                                    "testCreateGATKKeyUsingMasterKeyPair"));
-        }
-
-        PrivateKey masterPrivateKey = CryptUtils.loadGATKMasterPrivateKey();
-        PublicKey masterPublicKey = CryptUtils.loadGATKMasterPublicKey();
-
-        // We should be able to create a valid GATKKey using our master key pair:
-        GATKKey key = new GATKKey(masterPrivateKey, masterPublicKey, "foo at bar.com");
-        Assert.assertTrue(key.isValid());
-    }
-
-    @Test
-    public void testCreateGATKKeyUsingMasterPrivateKeyAndDistributedPublicKey() {
-        if ( gatkPrivateKeyExistsButReadPermissionDenied() ) {
-            throw new SkipException(String.format("Skipping test %s because we do not have permission to read the GATK private key",
-                                    "testCreateGATKKeyUsingMasterPrivateKeyAndDistributedPublicKey"));
-        }
-
-        PrivateKey masterPrivateKey = CryptUtils.loadGATKMasterPrivateKey();
-        PublicKey distributedPublicKey = CryptUtils.loadGATKDistributedPublicKey();
-
-        // We should also be able to create a valid GATKKey using our master private
-        // key and the public key we distribute with the GATK:
-        GATKKey key = new GATKKey(masterPrivateKey, distributedPublicKey, "foo at bar.com");
-        Assert.assertTrue(key.isValid());
-    }
-
-    @Test( expectedExceptions = ReviewedGATKException.class )
-    public void testKeyPairMismatch() {
-        KeyPair firstKeyPair = CryptUtils.generateKeyPair();
-        KeyPair secondKeyPair = CryptUtils.generateKeyPair();
-
-        // Attempting to create a GATK Key with private and public keys that aren't part of the
-        // same key pair should immediately trigger a validation failure:
-        GATKKey key = new GATKKey(firstKeyPair.getPrivate(), secondKeyPair.getPublic(), "foo at bar.com");
-    }
-
-    @Test( expectedExceptions = ReviewedGATKException.class )
-    public void testEncryptionAlgorithmMismatch() {
-        KeyPair keyPair = CryptUtils.generateKeyPair(CryptUtils.DEFAULT_KEY_LENGTH, "DSA", CryptUtils.DEFAULT_RANDOM_NUMBER_GENERATION_ALGORITHM);
-
-        // Attempting to use a DSA private key to create an RSA signature should throw an error:
-        GATKKey key = new GATKKey(keyPair.getPrivate(), keyPair.getPublic(), "foo at bar.com", "SHA1withRSA");
-    }
-
-    @Test( expectedExceptions = UserException.class )
-    public void testInvalidEmailAddress() {
-        String emailAddressWithNulByte = new String(new byte[] { 0 });
-        KeyPair keyPair = CryptUtils.generateKeyPair();
-
-        // Email addresses cannot contain the NUL byte, since it's used as a sectional delimiter in the key file:
-        GATKKey key = new GATKKey(keyPair.getPrivate(), keyPair.getPublic(), emailAddressWithNulByte);
-    }
-
-    @Test
-    public void testCreateGATKKeyFromValidKeyFile() {
-        GATKKey key = new GATKKey(CryptUtils.loadGATKDistributedPublicKey(), new File(keysDataLocation + "valid.key"));
-        Assert.assertTrue(key.isValid());
-    }
-
-    @Test( expectedExceptions = UserException.UnreadableKeyException.class )
-    public void testCreateGATKKeyFromCorruptKeyFile() {
-        GATKKey key = new GATKKey(CryptUtils.loadGATKDistributedPublicKey(), new File(keysDataLocation + "corrupt_random_contents.key"));
-    }
-
-    @Test
-    public void testCreateGATKKeyFromRevokedKeyFile() {
-        GATKKey key = new GATKKey(CryptUtils.loadGATKDistributedPublicKey(), new File(keysDataLocation + "revoked.key"));
-        Assert.assertFalse(key.isValid());
-    }
-
-    @Test( expectedExceptions = UserException.CouldNotReadInputFile.class )
-    public void testCreateGATKKeyFromNonExistentFile() {
-        File nonExistentFile = new File("ghfdkgsdhg.key");
-        Assert.assertFalse(nonExistentFile.exists());
-
-        GATKKey key = new GATKKey(CryptUtils.loadGATKDistributedPublicKey(), nonExistentFile);
-    }
-
-    private boolean gatkPrivateKeyExistsButReadPermissionDenied() {
-        File gatkPrivateKey = new File(CryptUtils.GATK_MASTER_PRIVATE_KEY_FILE);
-        return gatkPrivateKey.exists() && ! gatkPrivateKey.canRead();
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/fasta/CachingIndexedFastaSequenceFileUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/fasta/CachingIndexedFastaSequenceFileUnitTest.java
deleted file mode 100644
index 3e877b9..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/fasta/CachingIndexedFastaSequenceFileUnitTest.java
+++ /dev/null
@@ -1,264 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.fasta;
-
-
-// the imports for unit testing.
-
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import htsjdk.samtools.reference.ReferenceSequence;
-import htsjdk.samtools.SAMSequenceRecord;
-import org.apache.commons.lang.StringUtils;
-import org.apache.log4j.Priority;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.List;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-
-/**
- * Basic unit test for CachingIndexedFastaSequenceFile
- */
-public class CachingIndexedFastaSequenceFileUnitTest extends BaseTest {
-    private File simpleFasta = new File(publicTestDir + "/exampleFASTA.fasta");
-    private static final int STEP_SIZE = 1;
-    private final static boolean DEBUG = false;
-
-    //private static final List<Integer> QUERY_SIZES = Arrays.asList(1);
-    private static final List<Integer> QUERY_SIZES = Arrays.asList(1, 10, 100);
-    private static final List<Integer> CACHE_SIZES = Arrays.asList(-1, 100, 1000);
-
-    @DataProvider(name = "fastas")
-    public Object[][] createData1() {
-        List<Object[]> params = new ArrayList<Object[]>();
-        for ( File fasta : Arrays.asList(simpleFasta) ) {
-            for ( int cacheSize : CACHE_SIZES ) {
-                for ( int querySize : QUERY_SIZES ) {
-                    params.add(new Object[]{fasta, cacheSize, querySize});
-                }
-            }
-        }
-
-        return params.toArray(new Object[][]{});
-    }
-
-    private static long getCacheSize(final long cacheSizeRequested) {
-        return cacheSizeRequested == -1 ? CachingIndexedFastaSequenceFile.DEFAULT_CACHE_SIZE : cacheSizeRequested;
-    }
-
-    @Test(dataProvider = "fastas", enabled = true && ! DEBUG)
-    public void testCachingIndexedFastaReaderSequential1(File fasta, int cacheSize, int querySize) throws FileNotFoundException {
-        final CachingIndexedFastaSequenceFile caching = new CachingIndexedFastaSequenceFile(fasta, getCacheSize(cacheSize), true, false);
-
-        SAMSequenceRecord contig = caching.getSequenceDictionary().getSequence(0);
-        logger.warn(String.format("Checking contig %s length %d with cache size %d and query size %d",
-                contig.getSequenceName(), contig.getSequenceLength(), cacheSize, querySize));
-        testSequential(caching, fasta, querySize);
-    }
-
-    private void testSequential(final CachingIndexedFastaSequenceFile caching, final File fasta, final int querySize) throws FileNotFoundException {
-        Assert.assertTrue(caching.isPreservingCase(), "testSequential only works for case preserving CachingIndexedFastaSequenceFile readers");
-
-        final IndexedFastaSequenceFile uncached = new IndexedFastaSequenceFile(fasta);
-
-        SAMSequenceRecord contig = uncached.getSequenceDictionary().getSequence(0);
-        for ( int i = 0; i < contig.getSequenceLength(); i += STEP_SIZE ) {
-            int start = i;
-            int stop = start + querySize;
-            if ( stop <= contig.getSequenceLength() ) {
-                ReferenceSequence cachedVal = caching.getSubsequenceAt(contig.getSequenceName(), start, stop);
-                ReferenceSequence uncachedVal = uncached.getSubsequenceAt(contig.getSequenceName(), start, stop);
-
-                Assert.assertEquals(cachedVal.getName(), uncachedVal.getName());
-                Assert.assertEquals(cachedVal.getContigIndex(), uncachedVal.getContigIndex());
-                Assert.assertEquals(cachedVal.getBases(), uncachedVal.getBases());
-            }
-        }
-
-        // asserts for efficiency.  We are going to make contig.length / STEP_SIZE queries
-        // at each of range: start -> start + querySize against a cache with size of X.
-        // we expect to hit the cache each time range falls within X.  We expect a hit
-        // on the cache if range is within X.  Which should happen at least (X - query_size * 2) / STEP_SIZE
-        // times.
-        final int minExpectedHits = (int)Math.floor((Math.min(caching.getCacheSize(), contig.getSequenceLength()) - querySize * 2.0) / STEP_SIZE);
-        caching.printEfficiency(Priority.WARN);
-        Assert.assertTrue(caching.getCacheHits() >= minExpectedHits, "Expected at least " + minExpectedHits + " cache hits but only got " + caching.getCacheHits());
-
-    }
-
-    // Tests grabbing sequences around a middle cached value.
-    @Test(dataProvider = "fastas", enabled = true && ! DEBUG)
-    public void testCachingIndexedFastaReaderTwoStage(File fasta, int cacheSize, int querySize) throws FileNotFoundException {
-        final IndexedFastaSequenceFile uncached = new IndexedFastaSequenceFile(fasta);
-        final CachingIndexedFastaSequenceFile caching = new CachingIndexedFastaSequenceFile(fasta, getCacheSize(cacheSize), true, false);
-
-        SAMSequenceRecord contig = uncached.getSequenceDictionary().getSequence(0);
-
-        int middleStart = (contig.getSequenceLength() - querySize) / 2;
-        int middleStop = middleStart + querySize;
-
-        logger.warn(String.format("Checking contig %s length %d with cache size %d and query size %d with intermediate query",
-                contig.getSequenceName(), contig.getSequenceLength(), cacheSize, querySize));
-
-        for ( int i = 0; i < contig.getSequenceLength(); i += 10 ) {
-            int start = i;
-            int stop = start + querySize;
-            if ( stop <= contig.getSequenceLength() ) {
-                ReferenceSequence grabMiddle = caching.getSubsequenceAt(contig.getSequenceName(), middleStart, middleStop);
-                ReferenceSequence cachedVal = caching.getSubsequenceAt(contig.getSequenceName(), start, stop);
-                ReferenceSequence uncachedVal = uncached.getSubsequenceAt(contig.getSequenceName(), start, stop);
-
-                Assert.assertEquals(cachedVal.getName(), uncachedVal.getName());
-                Assert.assertEquals(cachedVal.getContigIndex(), uncachedVal.getContigIndex());
-                Assert.assertEquals(cachedVal.getBases(), uncachedVal.getBases());
-            }
-        }
-    }
-
-    @DataProvider(name = "ParallelFastaTest")
-    public Object[][] createParallelFastaTest() {
-        List<Object[]> params = new ArrayList<Object[]>();
-
-        for ( File fasta : Arrays.asList(simpleFasta) ) {
-            for ( int cacheSize : CACHE_SIZES ) {
-                for ( int querySize : QUERY_SIZES ) {
-                    for ( int nt : Arrays.asList(1, 2, 3, 4) ) {
-                        params.add(new Object[]{fasta, cacheSize, querySize, nt});
-                    }
-                }
-            }
-        }
-
-        return params.toArray(new Object[][]{});
-    }
-
-
-    @Test(dataProvider = "ParallelFastaTest", enabled = true && ! DEBUG, timeOut = 60000)
-    public void testCachingIndexedFastaReaderParallel(final File fasta, final int cacheSize, final int querySize, final int nt) throws FileNotFoundException, InterruptedException {
-        final CachingIndexedFastaSequenceFile caching = new CachingIndexedFastaSequenceFile(fasta, getCacheSize(cacheSize), true, false);
-
-        logger.warn(String.format("Parallel caching index fasta reader test cacheSize %d querySize %d nt %d", caching.getCacheSize(), querySize, nt));
-        for ( int iterations = 0; iterations < 1; iterations++ ) {
-            final ExecutorService executor = Executors.newFixedThreadPool(nt);
-            final Collection<Callable<Object>> tasks = new ArrayList<Callable<Object>>(nt);
-            for ( int i = 0; i < nt; i++ )
-                tasks.add(new Callable<Object>() {
-                    @Override
-                    public Object call() throws Exception {
-                        testSequential(caching, fasta, querySize);
-                        return null;
-                    }
-                });
-            executor.invokeAll(tasks);
-            executor.shutdownNow();
-        }
-    }
-
-    // make sure some bases are lower case and some are upper case
-    @Test(enabled = true)
-    public void testMixedCasesInExample() throws FileNotFoundException, InterruptedException {
-        final IndexedFastaSequenceFile original = new IndexedFastaSequenceFile(new File(exampleFASTA));
-        final CachingIndexedFastaSequenceFile casePreserving = new CachingIndexedFastaSequenceFile(new File(exampleFASTA), true);
-        final CachingIndexedFastaSequenceFile allUpper = new CachingIndexedFastaSequenceFile(new File(exampleFASTA));
-
-        int nMixedCase = 0;
-        for ( SAMSequenceRecord contig : original.getSequenceDictionary().getSequences() ) {
-            nMixedCase += testCases(original, casePreserving, allUpper, contig.getSequenceName(), -1, -1);
-
-            final int step = 100;
-            for ( int lastPos = step; lastPos < contig.getSequenceLength(); lastPos += step ) {
-                testCases(original, casePreserving, allUpper, contig.getSequenceName(), lastPos - step, lastPos);
-            }
-        }
-
-        Assert.assertTrue(nMixedCase > 0, "No mixed cases sequences found in file.  Unexpected test state");
-    }
-
-    private int testCases(final IndexedFastaSequenceFile original,
-                          final IndexedFastaSequenceFile casePreserving,
-                          final IndexedFastaSequenceFile allUpper,
-                          final String contig, final int start, final int stop ) {
-        final String orig = fetchBaseString(original, contig, start, stop);
-        final String keptCase = fetchBaseString(casePreserving, contig, start, stop);
-        final String upperCase = fetchBaseString(allUpper, contig, start, stop).toUpperCase();
-
-        final String origToUpper = orig.toUpperCase();
-        if ( ! orig.equals(origToUpper) ) {
-            Assert.assertEquals(keptCase, orig, "Case preserving operation not equal to the original case for contig " + contig);
-            Assert.assertEquals(upperCase, origToUpper, "All upper case reader not equal to the uppercase of original case for contig " + contig);
-            return 1;
-        } else {
-            return 0;
-        }
-    }
-
-    private String fetchBaseString(final IndexedFastaSequenceFile reader, final String contig, final int start, final int stop) {
-        if ( start == -1 )
-            return new String(reader.getSequence(contig).getBases());
-        else
-            return new String(reader.getSubsequenceAt(contig, start, stop).getBases());
-    }
-
-    @Test(enabled = true)
-    public void testIupacChanges() throws FileNotFoundException, InterruptedException {
-        final String testFasta = privateTestDir + "iupacFASTA.fasta";
-        final CachingIndexedFastaSequenceFile iupacPreserving = new CachingIndexedFastaSequenceFile(new File(testFasta), CachingIndexedFastaSequenceFile.DEFAULT_CACHE_SIZE, false, true);
-        final CachingIndexedFastaSequenceFile makeNs = new CachingIndexedFastaSequenceFile(new File(testFasta));
-
-        int preservingNs = 0;
-        int changingNs = 0;
-        for ( SAMSequenceRecord contig : iupacPreserving.getSequenceDictionary().getSequences() ) {
-            final String sPreserving = fetchBaseString(iupacPreserving, contig.getSequenceName(), 0, 15000);
-            preservingNs += StringUtils.countMatches(sPreserving, "N");
-
-            final String sChanging = fetchBaseString(makeNs, contig.getSequenceName(), 0, 15000);
-            changingNs += StringUtils.countMatches(sChanging, "N");
-        }
-
-        Assert.assertEquals(changingNs, preservingNs + 4);
-    }
-
-    @Test(enabled = true, expectedExceptions = {UserException.class})
-    public void testFailOnBadBase() throws FileNotFoundException, InterruptedException {
-        final String testFasta = privateTestDir + "problematicFASTA.fasta";
-        final CachingIndexedFastaSequenceFile fasta = new CachingIndexedFastaSequenceFile(new File(testFasta));
-
-        for ( SAMSequenceRecord contig : fasta.getSequenceDictionary().getSequences() ) {
-            fetchBaseString(fasta, contig.getSequenceName(), -1, -1);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/file/FSLockWithSharedUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/file/FSLockWithSharedUnitTest.java
deleted file mode 100644
index 63d98a2..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/file/FSLockWithSharedUnitTest.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.file;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.testng.annotations.Test;
-
-import java.io.File;
-
-public class FSLockWithSharedUnitTest extends BaseTest {
-
-    private static final int MAX_EXPECTED_LOCK_ACQUISITION_TIME = FSLockWithShared.DEFAULT_LOCK_ACQUISITION_TIMEOUT_IN_MILLISECONDS +
-                                                                  FSLockWithShared.THREAD_TERMINATION_TIMEOUT_IN_MILLISECONDS;
-
-    /**
-     * Test to ensure that we're never spending more than the maximum configured amount of time in lock acquisition calls.
-     */
-    @Test( timeOut = MAX_EXPECTED_LOCK_ACQUISITION_TIME + 10 * 1000 )
-    public void testLockAcquisitionTimeout() {
-        final File lockFile = createTempFile("FSLockWithSharedUnitTest", ".lock");
-        final FSLockWithShared lock = new FSLockWithShared(lockFile);
-        boolean lockAcquisitionSucceeded = false;
-
-        try {
-            lockAcquisitionSucceeded = lock.sharedLock();
-        }
-        catch ( UserException e ) {
-            logger.info("Caught UserException from lock acquisition call: lock acquisition must have timed out. Message: " + e.getMessage());
-        }
-        finally {
-            if ( lockAcquisitionSucceeded ) {
-                lock.unlock();
-            }
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/fragments/FragmentUtilsBenchmark.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/fragments/FragmentUtilsBenchmark.java
deleted file mode 100644
index f388d14..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/fragments/FragmentUtilsBenchmark.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.fragments;
-
-import com.google.caliper.Param;
-import com.google.caliper.SimpleBenchmark;
-import htsjdk.samtools.SAMFileHeader;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * Caliper microbenchmark of fragment pileup
- */
-public class FragmentUtilsBenchmark extends SimpleBenchmark {
-    List<ReadBackedPileup> pileups;
-
-    @Param({"0", "4", "30", "150", "1000"})
-    int pileupSize; // set automatically by framework
-
-    @Param({"200", "400"})
-    int insertSize; // set automatically by framework
-
-    @Override protected void setUp() {
-        final int nPileupsToGenerate = 100;
-        pileups = new ArrayList<ReadBackedPileup>(nPileupsToGenerate);
-        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000);
-        GenomeLocParser genomeLocParser;
-        genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
-        GenomeLoc loc = genomeLocParser.createGenomeLoc("chr1", 50);
-        final int readLen = 100;
-
-        for ( int pileupN = 0; pileupN < nPileupsToGenerate; pileupN++ ) {
-            ReadBackedPileup rbp = ArtificialSAMUtils.createReadBackedPileup(header, loc, readLen, insertSize, pileupSize);
-            pileups.add(rbp);
-        }
-    }
-
-//    public void timeOriginal(int rep) {
-//        run(rep, FragmentUtils.FragmentMatchingAlgorithm.ORIGINAL);
-//    }
-
-    public void timeSkipNonOverlapping(int rep) {
-        int nFrags = 0;
-        for ( int i = 0; i < rep; i++ ) {
-            for ( ReadBackedPileup rbp : pileups )
-                nFrags += FragmentUtils.create(rbp).getOverlappingPairs().size();
-        }
-    }
-
-    public static void main(String[] args) {
-        com.google.caliper.Runner.main(FragmentUtilsBenchmark.class, args);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/fragments/FragmentUtilsUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/fragments/FragmentUtilsUnitTest.java
deleted file mode 100644
index f9f9ba4..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/fragments/FragmentUtilsUnitTest.java
+++ /dev/null
@@ -1,390 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.fragments;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.TextCigarCodec;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.pileup.PileupElement;
-import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
-import org.broadinstitute.gatk.utils.pileup.ReadBackedPileupImpl;
-import org.broadinstitute.gatk.utils.recalibration.EventType;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.broadinstitute.gatk.utils.sam.GATKSAMReadGroupRecord;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.testng.Assert;
-import org.testng.annotations.BeforeTest;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-/**
- * Test routines for read-backed pileup.
- */
-public class FragmentUtilsUnitTest extends BaseTest {
-    private static SAMFileHeader header;
-    private static GATKSAMReadGroupRecord rgForMerged;
-    private final static boolean DEBUG = false;
-
-    private class FragmentUtilsTest extends TestDataProvider {
-        List<TestState> statesForPileup = new ArrayList<TestState>();
-        List<TestState> statesForReads = new ArrayList<TestState>();
-
-        private FragmentUtilsTest(String name, int readLen, int leftStart, int rightStart,
-                                  boolean leftIsFirst, boolean leftIsNegative) {
-            super(FragmentUtilsTest.class, String.format("%s-leftIsFirst:%b-leftIsNegative:%b", name, leftIsFirst, leftIsNegative));
-
-            List<GATKSAMRecord> pair = ArtificialSAMUtils.createPair(header, "readpair", readLen, leftStart, rightStart, leftIsFirst, leftIsNegative);
-            GATKSAMRecord left = pair.get(0);
-            GATKSAMRecord right = pair.get(1);
-
-            for ( int pos = leftStart; pos < rightStart + readLen; pos++) {
-                boolean posCoveredByLeft = pos >= left.getAlignmentStart() && pos <= left.getAlignmentEnd();
-                boolean posCoveredByRight = pos >= right.getAlignmentStart() && pos <= right.getAlignmentEnd();
-
-                if ( posCoveredByLeft || posCoveredByRight ) {
-                    List<GATKSAMRecord> reads = new ArrayList<GATKSAMRecord>();
-                    List<Integer> offsets = new ArrayList<Integer>();
-
-                    if ( posCoveredByLeft ) {
-                        reads.add(left);
-                        offsets.add(pos - left.getAlignmentStart());
-                    }
-
-                    if ( posCoveredByRight ) {
-                        reads.add(right);
-                        offsets.add(pos - right.getAlignmentStart());
-                    }
-
-                    boolean shouldBeFragment = posCoveredByLeft && posCoveredByRight;
-                    ReadBackedPileup pileup = new ReadBackedPileupImpl(null, reads, offsets);
-                    TestState testState = new TestState(shouldBeFragment ? 0 : 1, shouldBeFragment ? 1 : 0, pileup, null);
-                    statesForPileup.add(testState);
-                }
-
-                TestState testState = left.getAlignmentEnd() >= right.getAlignmentStart() ? new TestState(0, 1, null, pair) : new TestState(2, 0, null, pair);
-                statesForReads.add(testState);
-            }
-        }
-    }
-
-    private class TestState {
-        int expectedSingletons, expectedPairs;
-        ReadBackedPileup pileup;
-        List<GATKSAMRecord> rawReads;
-
-        private TestState(final int expectedSingletons, final int expectedPairs, final ReadBackedPileup pileup, final List<GATKSAMRecord> rawReads) {
-            this.expectedSingletons = expectedSingletons;
-            this.expectedPairs = expectedPairs;
-            this.pileup = pileup;
-            this.rawReads = rawReads;
-        }
-    }
-
-    @DataProvider(name = "fragmentUtilsTest")
-    public Object[][] createTests() {
-        for ( boolean leftIsFirst : Arrays.asList(true, false) ) {
-            for ( boolean leftIsNegative : Arrays.asList(true, false) ) {
-                // Overlapping pair
-                // ---->        [first]
-                //   <---       [second]
-                new FragmentUtilsTest("overlapping-pair", 10, 1, 5, leftIsFirst, leftIsNegative);
-
-                // Non-overlapping pair
-                // ---->
-                //          <----
-                new FragmentUtilsTest("nonoverlapping-pair", 10, 1, 15, leftIsFirst, leftIsNegative);
-            }
-        }
-
-        return FragmentUtilsTest.getTests(FragmentUtilsTest.class);
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "fragmentUtilsTest")
-    public void testAsPileup(FragmentUtilsTest test) {
-        for ( TestState testState : test.statesForPileup ) {
-            ReadBackedPileup rbp = testState.pileup;
-            FragmentCollection<PileupElement> fp = FragmentUtils.create(rbp);
-            Assert.assertEquals(fp.getOverlappingPairs().size(), testState.expectedPairs);
-            Assert.assertEquals(fp.getSingletonReads().size(), testState.expectedSingletons);
-        }
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "fragmentUtilsTest")
-    public void testAsListOfReadsFromPileup(FragmentUtilsTest test) {
-        for ( TestState testState : test.statesForPileup ) {
-            FragmentCollection<GATKSAMRecord> fp = FragmentUtils.create(testState.pileup.getReads());
-            Assert.assertEquals(fp.getOverlappingPairs().size(), testState.expectedPairs);
-            Assert.assertEquals(fp.getSingletonReads().size(), testState.expectedSingletons);
-        }
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "fragmentUtilsTest")
-    public void testAsListOfReads(FragmentUtilsTest test) {
-        for ( TestState testState : test.statesForReads ) {
-            FragmentCollection<GATKSAMRecord> fp = FragmentUtils.create(testState.rawReads);
-            Assert.assertEquals(fp.getOverlappingPairs().size(), testState.expectedPairs);
-            Assert.assertEquals(fp.getSingletonReads().size(), testState.expectedSingletons);
-        }
-    }
-
-    @Test(enabled = !DEBUG, expectedExceptions = IllegalArgumentException.class)
-    public void testOutOfOrder() {
-        final List<GATKSAMRecord> pair = ArtificialSAMUtils.createPair(header, "readpair", 100, 1, 50, true, true);
-        final GATKSAMRecord left = pair.get(0);
-        final GATKSAMRecord right = pair.get(1);
-        final List<GATKSAMRecord> reads = Arrays.asList(right, left); // OUT OF ORDER!
-        final List<Integer> offsets = Arrays.asList(0, 50);
-        final ReadBackedPileup pileup = new ReadBackedPileupImpl(null, reads, offsets);
-        FragmentUtils.create(pileup); // should throw exception
-    }
-
-    @BeforeTest
-    public void setup() {
-        header = ArtificialSAMUtils.createArtificialSamHeader(1,1,1000);
-        rgForMerged = new GATKSAMReadGroupRecord("RG1");
-    }
-
-    @DataProvider(name = "MergeFragmentsTest")
-    public Object[][] createMergeFragmentsTest() throws Exception {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        final String leftFlank = "CCC";
-        final String rightFlank = "AAA";
-        final String allOverlappingBases = "ACGTACGTGGAACCTTAG";
-        for ( int overlapSize = 1; overlapSize < allOverlappingBases.length(); overlapSize++ ) {
-            final String overlappingBases = allOverlappingBases.substring(0, overlapSize);
-            final byte[] overlappingBaseQuals = new byte[overlapSize];
-            for ( int i = 0; i < overlapSize; i++ ) overlappingBaseQuals[i] = (byte)(i + 30);
-            final GATKSAMRecord read1  = makeOverlappingRead(leftFlank, 20, overlappingBases, overlappingBaseQuals, "", 30, 1);
-            final GATKSAMRecord read2  = makeOverlappingRead("", 20, overlappingBases, overlappingBaseQuals, rightFlank, 30, leftFlank.length() + 1);
-            final GATKSAMRecord merged = makeOverlappingRead(leftFlank, 20, overlappingBases, overlappingBaseQuals, rightFlank, 30, 1);
-            tests.add(new Object[]{"equalQuals", read1, read2, merged});
-
-            // test that the merged read base quality is the
-            tests.add(new Object[]{"lowQualLeft", modifyBaseQualities(read1, leftFlank.length(), overlapSize), read2, merged});
-            tests.add(new Object[]{"lowQualRight", read1, modifyBaseQualities(read2, 0, overlapSize), merged});
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    private GATKSAMRecord modifyBaseQualities(final GATKSAMRecord read, final int startOffset, final int length) throws Exception {
-        final GATKSAMRecord readWithLowQuals = (GATKSAMRecord)read.clone();
-        final byte[] withLowQuals = Arrays.copyOf(read.getBaseQualities(), read.getBaseQualities().length);
-        for ( int i = startOffset; i < startOffset + length; i++ )
-            withLowQuals[i] = (byte)(read.getBaseQualities()[i] + (i % 2 == 0 ? -1 : 0));
-        readWithLowQuals.setBaseQualities(withLowQuals);
-        return readWithLowQuals;
-    }
-
-    private GATKSAMRecord makeOverlappingRead(final String leftFlank, final int leftQual, final String overlapBases,
-                                              final byte[] overlapQuals, final String rightFlank, final int rightQual,
-                                              final int alignmentStart) {
-        final String bases = leftFlank + overlapBases + rightFlank;
-        final int readLength = bases.length();
-        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, alignmentStart, readLength);
-        final byte[] leftQuals = Utils.dupBytes((byte) leftQual, leftFlank.length());
-        final byte[] rightQuals = Utils.dupBytes((byte) rightQual, rightFlank.length());
-        final byte[] quals = Utils.concat(leftQuals, overlapQuals, rightQuals);
-        read.setCigarString(readLength + "M");
-        read.setReadBases(bases.getBytes());
-        for ( final EventType type : EventType.values() )
-            read.setBaseQualities(quals, type);
-        read.setReadGroup(rgForMerged);
-        read.setMappingQuality(60);
-        return read;
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "MergeFragmentsTest")
-    public void testMergingTwoReads(final String name, final GATKSAMRecord read1, final GATKSAMRecord read2, final GATKSAMRecord expectedMerged) {
-        final GATKSAMRecord actual = FragmentUtils.mergeOverlappingPairedFragments(read1, read2);
-
-        if ( expectedMerged == null ) {
-            Assert.assertNull(actual, "Expected reads not to merge, but got non-null result from merging");
-        } else {
-            Assert.assertTrue(actual.isStrandless(), "Merged reads should be strandless");
-            Assert.assertNotNull(actual, "Expected reads to merge, but got null result from merging");
-            // I really care about the bases, the quals, the CIGAR, and the read group tag
-            Assert.assertEquals(actual.getCigarString(), expectedMerged.getCigarString());
-            Assert.assertEquals(actual.getReadBases(), expectedMerged.getReadBases());
-            Assert.assertEquals(actual.getReadGroup(), expectedMerged.getReadGroup());
-            Assert.assertEquals(actual.getMappingQuality(), expectedMerged.getMappingQuality());
-            for ( final EventType type : EventType.values() )
-                Assert.assertEquals(actual.getBaseQualities(type), expectedMerged.getBaseQualities(type), "Failed base qualities for event type " + type);
-        }
-    }
-
-    @Test(enabled = !DEBUG)
-    public void testHardClippingBeforeMerge() {
-        final String common = Utils.dupString("A", 10);
-        final byte[] commonQuals = Utils.dupBytes((byte)30, common.length());
-        final String adapter    = "NNNN";
-
-        final GATKSAMRecord read1 = makeOverlappingRead(adapter, 30, common, commonQuals, "", 30, 10);
-        final GATKSAMRecord read2 = makeOverlappingRead("", 30, common, commonQuals, adapter, 30, 10);
-        final GATKSAMRecord expectedMerged = makeOverlappingRead("", 30, common, commonQuals, "", 30, 10);
-        read1.setCigarString("4S" + common.length() + "M");
-        read1.setProperPairFlag(true);
-        read1.setReadPairedFlag(true);
-        read1.setFirstOfPairFlag(true);
-        read1.setReadNegativeStrandFlag(true);
-        read1.setMateNegativeStrandFlag(false);
-        read1.setMateAlignmentStart(read2.getAlignmentStart());
-        read2.setCigarString(common.length() + "M4S");
-        read2.setProperPairFlag(true);
-        read2.setReadPairedFlag(true);
-        read2.setFirstOfPairFlag(false);
-        read2.setReadNegativeStrandFlag(false);
-        read2.setMateNegativeStrandFlag(true);
-        read2.setMateAlignmentStart(read1.getAlignmentStart());
-
-        final int insertSize = common.length() - 1;
-        read1.setInferredInsertSize(-insertSize);
-        read2.setInferredInsertSize(insertSize);
-
-        final GATKSAMRecord actual = FragmentUtils.mergeOverlappingPairedFragments(read1, read2);
-        Assert.assertEquals(actual.getCigarString(), expectedMerged.getCigarString());
-        Assert.assertEquals(actual.getReadBases(), expectedMerged.getReadBases());
-        Assert.assertEquals(actual.getReadGroup(), expectedMerged.getReadGroup());
-        Assert.assertEquals(actual.getMappingQuality(), expectedMerged.getMappingQuality());
-        for ( final EventType type : EventType.values() )
-            Assert.assertEquals(actual.getBaseQualities(type), expectedMerged.getBaseQualities(type), "Failed base qualities for event type " + type);
-    }
-
-    @Test(enabled = true)
-    public void testHardClippingBeforeMergeResultingInCompletelyContainedSecondRead() {
-        final String adapter    = "NNNN";
-
-        final GATKSAMRecord read1 = makeOverlappingRead(adapter, 30, Utils.dupString("A", 10), Utils.dupBytes((byte)30, 10), "", 30, 10);
-        final GATKSAMRecord read2 = makeOverlappingRead("", 30, Utils.dupString("A", 7), Utils.dupBytes((byte)30, 7), adapter, 30, 10);
-        read1.setCigarString("4S10M");
-        read1.setProperPairFlag(true);
-        read1.setFirstOfPairFlag(true);
-        read1.setReadNegativeStrandFlag(true);
-        read1.setMateAlignmentStart(10);
-        read2.setCigarString("7M4S");
-        read2.setProperPairFlag(true);
-        read2.setFirstOfPairFlag(false);
-        read2.setReadNegativeStrandFlag(false);
-
-        final int insertSize = 7 - 1;
-        read1.setInferredInsertSize(insertSize);
-        read2.setInferredInsertSize(-insertSize);
-
-        final GATKSAMRecord actual = FragmentUtils.mergeOverlappingPairedFragments(read1, read2);
-        Assert.assertNull(actual);
-    }
-
-    @DataProvider(name = "MergeFragmentsOffContig")
-    public Object[][] makeMergeFragmentsOffContig() throws Exception {
-        List<Object[]> tests = new ArrayList<>();
-
-        for ( final int pre1 : Arrays.asList(0, 50)) {
-            for ( final int post1 : Arrays.asList(0, 50)) {
-                for ( final int pre2 : Arrays.asList(0, 50)) {
-                    for ( final int post2 : Arrays.asList(0, 50)) {
-                        tests.add(new Object[]{pre1, post1, pre2, post2});
-                    }
-                }
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "MergeFragmentsOffContig")
-    public void testMergeFragmentsOffContig(final int pre1, final int post1, final int pre2, final int post2) {
-        final int contigSize = 10;
-        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 0, contigSize);
-
-        final GATKSAMRecord read1 = createReadOffContig(header, false, pre1, post1);
-        final GATKSAMRecord read2 = createReadOffContig(header, true, pre2, post2);
-
-        final GATKSAMRecord merged = FragmentUtils.mergeOverlappingPairedFragments(read1, read2);
-    }
-
-    private GATKSAMRecord createReadOffContig(final SAMFileHeader header, final boolean negStrand, final int pre, final int post) {
-        final int contigLen = header.getSequence(0).getSequenceLength();
-        final int readLen = pre + contigLen + post;
-        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read1", 0, 1, readLen);
-        read.setAlignmentStart(1);
-        read.setCigar(TextCigarCodec.getSingleton().decode(pre + "S" + contigLen + "M" + post + "S"));
-        read.setBaseQualities(Utils.dupBytes((byte) 30, readLen));
-        read.setReadBases(Utils.dupBytes((byte)'A', readLen));
-        read.setMappingQuality(60);
-        read.setMateAlignmentStart(1);
-        read.setProperPairFlag(true);
-        read.setReadPairedFlag(true);
-        read.setInferredInsertSize(30);
-        read.setReadNegativeStrandFlag(negStrand);
-        read.setMateNegativeStrandFlag(! negStrand);
-        read.setReadGroup(new GATKSAMReadGroupRecord("foo"));
-        return read;
-    }
-
-
-    private static final byte highQuality = 30;
-    private static final byte overlappingQuality = 20;
-
-    @DataProvider(name = "AdjustFragmentsTest")
-    public Object[][] createAdjustFragmentsTest() throws Exception {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        final String leftFlank = "CCC";
-        final String rightFlank = "AAA";
-        final String allOverlappingBases = "ACGTACGTGGAACCTTAG";
-        for ( int overlapSize = 1; overlapSize < allOverlappingBases.length(); overlapSize++ ) {
-            final String overlappingBases = allOverlappingBases.substring(0, overlapSize);
-            final byte[] overlappingBaseQuals = new byte[overlapSize];
-            for ( int i = 0; i < overlapSize; i++ ) overlappingBaseQuals[i] = highQuality;
-            final GATKSAMRecord read1  = makeOverlappingRead(leftFlank, highQuality, overlappingBases, overlappingBaseQuals, "", highQuality, 1);
-            final GATKSAMRecord read2  = makeOverlappingRead("", highQuality, overlappingBases, overlappingBaseQuals, rightFlank, highQuality, leftFlank.length() + 1);
-            tests.add(new Object[]{read1, read2, overlapSize});
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "AdjustFragmentsTest")
-    public void testAdjustingTwoReads(final GATKSAMRecord read1, final GATKSAMRecord read2, final int overlapSize) {
-        FragmentUtils.adjustQualsOfOverlappingPairedFragments(read1, read2);
-
-        for ( int i = 0; i < read1.getReadLength() - overlapSize; i++ )
-            Assert.assertEquals(read1.getBaseQualities()[i], highQuality);
-        for ( int i = read1.getReadLength() - overlapSize; i < read1.getReadLength(); i++ )
-            Assert.assertEquals(read1.getBaseQualities()[i], overlappingQuality);
-
-        for ( int i = 0; i < overlapSize; i++ )
-            Assert.assertEquals(read2.getBaseQualities()[i], overlappingQuality);
-        for ( int i = overlapSize; i < read2.getReadLength(); i++ )
-            Assert.assertEquals(read2.getBaseQualities()[i], highQuality);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/haplotype/EventMapUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/haplotype/EventMapUnitTest.java
deleted file mode 100644
index 6baf6ef..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/haplotype/EventMapUnitTest.java
+++ /dev/null
@@ -1,203 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.haplotype;
-
-import htsjdk.samtools.TextCigarCodec;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.UnvalidatingGenomeLoc;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
-import htsjdk.variant.variantcontext.VariantContext;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.*;
-
-public class EventMapUnitTest extends BaseTest {
-    private final static String CHR = "20";
-    private final static String NAME = "foo";
-    
-    @DataProvider(name = "MyDataProvider")
-         public Object[][] makeMyDataProvider() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        final List<String> SNP_ALLELES = Arrays.asList("A", "C");
-        final List<String> INS_ALLELES = Arrays.asList("A", "ACGTGA");
-        final List<String> DEL_ALLELES = Arrays.asList("ACGTA", "C");
-        final List<List<String>> allAlleles = Arrays.asList(SNP_ALLELES, INS_ALLELES, DEL_ALLELES);
-        for ( final int leftNotClump : Arrays.asList(-1, 3) ) {
-            for ( final int middleNotClump : Arrays.asList(-1, 10, 500) ) {
-                for ( final int rightNotClump : Arrays.asList(-1, 1000) ) {
-                    for ( final int nClumped : Arrays.asList(3, 4) ) {
-                        for ( final List<List<String>> alleles : Utils.makePermutations(allAlleles, nClumped, true)) {
-                            final List<VariantContext> allVCS = new LinkedList<VariantContext>();
-
-                            if ( leftNotClump != -1 ) allVCS.add(GATKVariantContextUtils.makeFromAlleles(NAME, CHR, leftNotClump, SNP_ALLELES));
-                            if ( middleNotClump != -1 ) allVCS.add(GATKVariantContextUtils.makeFromAlleles(NAME, CHR, middleNotClump, SNP_ALLELES));
-                            if ( rightNotClump != -1 ) allVCS.add(GATKVariantContextUtils.makeFromAlleles(NAME, CHR, rightNotClump, SNP_ALLELES));
-
-                            int clumpStart = 50;
-                            final List<VariantContext> vcs = new LinkedList<VariantContext>();
-                            for ( final List<String> myAlleles : alleles ) {
-                                final VariantContext vc = GATKVariantContextUtils.makeFromAlleles(NAME, CHR, clumpStart, myAlleles);
-                                clumpStart = vc.getEnd() + 3;
-                                vcs.add(vc);
-                            }
-
-                            tests.add(new Object[]{new EventMap(new LinkedList<VariantContext>(allVCS)), Collections.emptyList()});
-                            allVCS.addAll(vcs);
-                            tests.add(new Object[]{new EventMap(allVCS), vcs});
-                        }
-                    }
-                }
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    /**
-     * Example testng test using MyDataProvider
-     */
-    @Test(dataProvider = "MyDataProvider", enabled = true)
-    public void testGetNeighborhood(final EventMap eventMap, final List<VariantContext> expectedNeighbors) {
-        final VariantContext leftOfNeighors = expectedNeighbors.isEmpty() ? null : expectedNeighbors.get(0);
-
-        for ( final VariantContext vc : eventMap.getVariantContexts() ) {
-            final List<VariantContext> n = eventMap.getNeighborhood(vc, 5);
-            if ( leftOfNeighors == vc )
-                Assert.assertEquals(n, expectedNeighbors);
-            else if ( ! expectedNeighbors.contains(vc) )
-                Assert.assertEquals(n, Collections.singletonList(vc), "Should only contain the original vc but " + n);
-        }
-    }
-
-    @DataProvider(name = "BlockSubstitutionsData")
-    public Object[][] makeBlockSubstitutionsData() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        for ( int size = EventMap.MIN_NUMBER_OF_EVENTS_TO_COMBINE_INTO_BLOCK_SUBSTITUTION; size < 10; size++ ) {
-            final String ref = Utils.dupString("A", size);
-            final String alt = Utils.dupString("C", size);
-            tests.add(new Object[]{ref, alt, size + "M", GATKVariantContextUtils.makeFromAlleles(NAME, CHR, 1, Arrays.asList(ref, alt))});
-        }
-
-        tests.add(new Object[]{"AAAAAA", "GAGAGA", "6M", GATKVariantContextUtils.makeFromAlleles(NAME, CHR, 1, Arrays.asList("AAAAA", "GAGAG"))});
-        tests.add(new Object[]{"AAAAAA", "GAGAGG", "6M", GATKVariantContextUtils.makeFromAlleles(NAME, CHR, 1, Arrays.asList("AAAAAA", "GAGAGG"))});
-
-        for ( int len = 0; len < 10; len++ ) {
-            final String s = len == 0 ? "" : Utils.dupString("A", len);
-            tests.add(new Object[]{s + "AACCCCAA", s + "GAAG", len + 2 + "M4D2M", GATKVariantContextUtils.makeFromAlleles(NAME, CHR, 1 + len,   Arrays.asList("AACCCCAA", "GAAG"))});
-            tests.add(new Object[]{s + "AAAA", s + "GACCCCAG", len + 2 + "M4I2M", GATKVariantContextUtils.makeFromAlleles(NAME, CHR, 1 + len, Arrays.asList("AAAA", "GACCCCAG"))});
-
-            tests.add(new Object[]{"AACCCCAA" + s, "GAAG" + s, "2M4D" + (len + 2) + "M", GATKVariantContextUtils.makeFromAlleles(NAME, CHR, 1,   Arrays.asList("AACCCCAA", "GAAG"))});
-            tests.add(new Object[]{"AAAA" + s, "GACCCCAG" + s, "2M4I" + (len + 2) + "M", GATKVariantContextUtils.makeFromAlleles(NAME, CHR, 1, Arrays.asList("AAAA", "GACCCCAG"))});
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    /**
-     * Example testng test using MyDataProvider
-     */
-    @Test(dataProvider = "BlockSubstitutionsData")
-    public void testBlockSubstitutionsData(final String refBases, final String haplotypeBases, final String cigar, final VariantContext expectedBlock) {
-        final Haplotype hap = new Haplotype(haplotypeBases.getBytes(), false, 0, TextCigarCodec.getSingleton().decode(cigar));
-        final GenomeLoc loc = new UnvalidatingGenomeLoc(CHR, 0, 1, refBases.length());
-        final EventMap ee = new EventMap(hap, refBases.getBytes(), loc, NAME);
-        ee.replaceClumpedEventsWithBlockSubstitutions();
-        Assert.assertEquals(ee.getNumberOfEvents(), 1);
-        final VariantContext actual = ee.getVariantContexts().iterator().next();
-        Assert.assertTrue(GATKVariantContextUtils.equalSites(actual, expectedBlock), "Failed with " + actual);
-    }
-
-    @DataProvider(name = "AdjacentSNPIndelTest")
-    public Object[][] makeAdjacentSNPIndelTest() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        tests.add(new Object[]{"TT", "GCT", "1M1I1M", Arrays.asList(Arrays.asList("T", "GC"))});
-        tests.add(new Object[]{"GCT", "TT", "1M1D1M", Arrays.asList(Arrays.asList("GC", "T"))});
-        tests.add(new Object[]{"TT", "GCCT", "1M2I1M", Arrays.asList(Arrays.asList("T", "GCC"))});
-        tests.add(new Object[]{"GCCT", "TT", "1M2D1M", Arrays.asList(Arrays.asList("GCC", "T"))});
-        tests.add(new Object[]{"AAGCCT", "AATT", "3M2D1M", Arrays.asList(Arrays.asList("GCC", "T"))});
-        tests.add(new Object[]{"AAGCCT", "GATT", "3M2D1M", Arrays.asList(Arrays.asList("A", "G"), Arrays.asList("GCC", "T"))});
-        tests.add(new Object[]{"AAAAA", "AGACA", "5M", Arrays.asList(Arrays.asList("A", "G"), Arrays.asList("A", "C"))});
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    /**
-     * Example testng test using MyDataProvider
-     */
-    @Test(dataProvider = "AdjacentSNPIndelTest")
-    public void testAdjacentSNPIndelTest(final String refBases, final String haplotypeBases, final String cigar, final List<List<String>> expectedAlleles) {
-        final Haplotype hap = new Haplotype(haplotypeBases.getBytes(), false, 0, TextCigarCodec.getSingleton().decode(cigar));
-        final GenomeLoc loc = new UnvalidatingGenomeLoc(CHR, 0, 1, refBases.length());
-        final EventMap ee = new EventMap(hap, refBases.getBytes(), loc, NAME);
-        ee.replaceClumpedEventsWithBlockSubstitutions();
-        Assert.assertEquals(ee.getNumberOfEvents(), expectedAlleles.size());
-        final List<VariantContext> actuals = new ArrayList<VariantContext>(ee.getVariantContexts());
-        for ( int i = 0; i < ee.getNumberOfEvents(); i++ ) {
-            final VariantContext actual = actuals.get(i);
-            Assert.assertEquals(actual.getReference().getDisplayString(), expectedAlleles.get(i).get(0));
-            Assert.assertEquals(actual.getAlternateAllele(0).getDisplayString(), expectedAlleles.get(i).get(1));
-        }
-    }
-
-    @DataProvider(name = "MakeBlockData")
-    public Object[][] makeMakeBlockData() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        tests.add(new Object[]{Arrays.asList("A", "G"), Arrays.asList("AGT", "A"), Arrays.asList("AGT", "G")});
-        tests.add(new Object[]{Arrays.asList("A", "G"), Arrays.asList("A", "AGT"), Arrays.asList("A", "GGT")});
-
-        tests.add(new Object[]{Arrays.asList("AC", "A"), Arrays.asList("A", "AGT"), Arrays.asList("AC", "AGT")});
-        tests.add(new Object[]{Arrays.asList("ACGTA", "A"), Arrays.asList("A", "AG"), Arrays.asList("ACGTA", "AG")});
-        tests.add(new Object[]{Arrays.asList("AC", "A"), Arrays.asList("A", "AGCGT"), Arrays.asList("AC", "AGCGT")});
-        tests.add(new Object[]{Arrays.asList("A", "ACGTA"), Arrays.asList("AG", "A"), Arrays.asList("AG", "ACGTA")});
-        tests.add(new Object[]{Arrays.asList("A", "AC"), Arrays.asList("AGCGT", "A"), Arrays.asList("AGCGT", "AC")});
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    /**
-     * Example testng test using MyDataProvider
-     */
-    @Test(dataProvider = "MakeBlockData", enabled = true)
-    public void testGetNeighborhood(final List<String> firstAlleles, final List<String> secondAlleles, final List<String> expectedAlleles) {
-        final VariantContext vc1 = GATKVariantContextUtils.makeFromAlleles("x", "20", 10, firstAlleles);
-        final VariantContext vc2 = GATKVariantContextUtils.makeFromAlleles("x", "20", 10, secondAlleles);
-        final VariantContext expected = GATKVariantContextUtils.makeFromAlleles("x", "20", 10, expectedAlleles);
-
-        final EventMap eventMap = new EventMap(Collections.<VariantContext>emptyList());
-        final VariantContext block = eventMap.makeBlock(vc1, vc2);
-
-        Assert.assertEquals(block.getStart(), expected.getStart());
-        Assert.assertEquals(block.getAlleles(), expected.getAlleles());
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/haplotype/HaplotypeUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/haplotype/HaplotypeUnitTest.java
deleted file mode 100644
index b0087dc..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/haplotype/HaplotypeUnitTest.java
+++ /dev/null
@@ -1,249 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.haplotype;
-
-
-import htsjdk.samtools.Cigar;
-import htsjdk.samtools.CigarElement;
-import htsjdk.samtools.CigarOperator;
-import htsjdk.samtools.TextCigarCodec;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.UnvalidatingGenomeLoc;
-import org.broadinstitute.gatk.utils.haplotype.Haplotype;
-import htsjdk.variant.variantcontext.Allele;
-import htsjdk.variant.variantcontext.VariantContext;
-import htsjdk.variant.variantcontext.VariantContextBuilder;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.*;
-
-/**
- * Basic unit test for Haplotype Class
- */
-public class HaplotypeUnitTest extends BaseTest {
-    @Test
-    public void testSimpleInsertionAllele() {
-        final String bases = "ACTGGTCAACTGGTCAACTGGTCAACTGGTCA";
-
-        final ArrayList<CigarElement> h1CigarList = new ArrayList<CigarElement>();
-        h1CigarList.add(new CigarElement(bases.length(), CigarOperator.M));
-        final Cigar h1Cigar = new Cigar(h1CigarList);
-        String h1bases = "AACTTCTGGTCAACTGGTCAACTGGTCAACTGGTCA";
-        basicInsertTest("A", "AACTT", 0, h1Cigar, bases, h1bases);
-        h1bases = "ACTGGTCAACTTACTGGTCAACTGGTCAACTGGTCA";
-        basicInsertTest("A", "AACTT", 7, h1Cigar, bases, h1bases);
-        h1bases = "ACTGGTCAACTGGTCAAACTTCTGGTCAACTGGTCA";
-        basicInsertTest("A", "AACTT", 16, h1Cigar, bases, h1bases);
-    }
-
-    @Test
-    public void testSimpleDeletionAllele() {
-        final String bases = "ACTGGTCAACTGGTCAACTGGTCAACTGGTCA";
-
-        final ArrayList<CigarElement> h1CigarList = new ArrayList<CigarElement>();
-        h1CigarList.add(new CigarElement(bases.length(), CigarOperator.M));
-        final Cigar h1Cigar = new Cigar(h1CigarList);
-        String h1bases = "ATCAACTGGTCAACTGGTCAACTGGTCA";
-        basicInsertTest("ACTGG", "A", 0, h1Cigar, bases, h1bases);
-        h1bases = "ACTGGTCAGTCAACTGGTCAACTGGTCA";
-        basicInsertTest("AACTG", "A", 7, h1Cigar, bases, h1bases);
-        h1bases = "ACTGGTCAACTGGTCAATCAACTGGTCA";
-        basicInsertTest("ACTGG", "A", 16, h1Cigar, bases, h1bases);
-    }
-
-    @Test
-    public void testSimpleSNPAllele() {
-        final String bases = "ACTGGTCAACTGGTCAACTGGTCAACTGGTCA";
-
-        final ArrayList<CigarElement> h1CigarList = new ArrayList<CigarElement>();
-        h1CigarList.add(new CigarElement(bases.length(), CigarOperator.M));
-        final Cigar h1Cigar = new Cigar(h1CigarList);
-        String h1bases = "AGTGGTCAACTGGTCAACTGGTCAACTGGTCA";
-        basicInsertTest("C", "G", 1, h1Cigar, bases, h1bases);
-        h1bases = "ACTGGTCTACTGGTCAACTGGTCAACTGGTCA";
-        basicInsertTest("A", "T", 7, h1Cigar, bases, h1bases);
-        h1bases = "ACTGGTCAACTGGTCAAATGGTCAACTGGTCA";
-        basicInsertTest("C", "A", 17, h1Cigar, bases, h1bases);
-    }
-
-    @Test
-    public void testComplexInsertionAllele() {
-        final String bases = "ATCG" + "CCGGCCGGCC" + "ATCGATCG" + "AGGGGGA" + "AGGC";
-
-        final ArrayList<CigarElement> h1CigarList = new ArrayList<CigarElement>();
-        h1CigarList.add(new CigarElement(4, CigarOperator.M));
-        h1CigarList.add(new CigarElement(10, CigarOperator.I));
-        h1CigarList.add(new CigarElement(8, CigarOperator.M));
-        h1CigarList.add(new CigarElement(3, CigarOperator.D));
-        h1CigarList.add(new CigarElement(7 + 4, CigarOperator.M));
-        final Cigar h1Cigar = new Cigar(h1CigarList);
-        String h1bases = "AACTTTCG" + "CCGGCCGGCC" + "ATCGATCG" + "AGGGGGA" + "AGGC";
-        basicInsertTest("A", "AACTT", 0, h1Cigar, bases, h1bases);
-        h1bases = "ATCG" + "CCGGCCGGCC" + "ATCACTTGATCG" + "AGGGGGA" + "AGGC";
-        basicInsertTest("C", "CACTT", 6, h1Cigar, bases, h1bases);
-        h1bases = "ATCG" + "CCGGCCGGCC" + "ATCGATCG" + "AGACTTGGGGA" + "AGGC";
-        basicInsertTest("G", "GACTT", 16, h1Cigar, bases, h1bases);
-    }
-
-    @Test
-    public void testComplexDeletionAllele() {
-        final String bases = "ATCG" + "CCGGCCGGCC" + "ATCGATCG" + "AGGGGGA" + "AGGC";
-
-        final ArrayList<CigarElement> h1CigarList = new ArrayList<CigarElement>();
-        h1CigarList.add(new CigarElement(4, CigarOperator.M));
-        h1CigarList.add(new CigarElement(10, CigarOperator.I));
-        h1CigarList.add(new CigarElement(8, CigarOperator.M));
-        h1CigarList.add(new CigarElement(3, CigarOperator.D));
-        h1CigarList.add(new CigarElement(7 + 4, CigarOperator.M));
-        final Cigar h1Cigar = new Cigar(h1CigarList);
-        String h1bases = "A" + "CCGGCCGGCC" + "ATCGATCG" + "AGGGGGA" + "AGGC";
-        basicInsertTest("ATCG", "A", 0, h1Cigar, bases, h1bases);
-        h1bases = "ATCG" + "CCGGCCGGCC" + "ATAAAG" + "AGGGGGA" + "AGGC";
-        basicInsertTest("CGATC", "AAA", 6, h1Cigar, bases, h1bases);
-        h1bases = "ATCG" + "CCGGCCGGCC" + "ATCGATCG" + "AGA" + "AGGC";
-        basicInsertTest("GGGGG", "G", 16, h1Cigar, bases, h1bases);
-    }
-
-    @Test
-    public void testComplexSNPAllele() {
-        final String bases = "ATCG" + "CCGGCCGGCC" + "ATCGATCG" + "AGGGGGA" + "AGGC";
-
-        final ArrayList<CigarElement> h1CigarList = new ArrayList<CigarElement>();
-        h1CigarList.add(new CigarElement(4, CigarOperator.M));
-        h1CigarList.add(new CigarElement(10, CigarOperator.I));
-        h1CigarList.add(new CigarElement(8, CigarOperator.M));
-        h1CigarList.add(new CigarElement(3, CigarOperator.D));
-        h1CigarList.add(new CigarElement(7 + 4, CigarOperator.M));
-        final Cigar h1Cigar = new Cigar(h1CigarList);
-        String h1bases = "AGCG" + "CCGGCCGGCC" + "ATCGATCG" + "AGGGGGA" + "AGGC";
-        basicInsertTest("T", "G", 1, h1Cigar, bases, h1bases);
-        h1bases = "ATCG" + "CCGGCCGGCC" + "ATCTATCG" + "AGGGGGA" + "AGGC";
-        basicInsertTest("G", "T", 7, h1Cigar, bases, h1bases);
-        h1bases = "ATCG" + "CCGGCCGGCC" + "ATCGATCG" + "AGCGGGA" + "AGGC";
-        basicInsertTest("G", "C", 17, h1Cigar, bases, h1bases);
-    }
-
-    private void basicInsertTest(String ref, String alt, int loc, Cigar cigar, String hap, String newHap) {
-        final Haplotype h = new Haplotype(hap.getBytes());
-        final Allele h1refAllele = Allele.create(ref, true);
-        final Allele h1altAllele = Allele.create(alt, false);
-        final ArrayList<Allele> alleles = new ArrayList<Allele>();
-        alleles.add(h1refAllele);
-        alleles.add(h1altAllele);
-        final VariantContext vc = new VariantContextBuilder().alleles(alleles).loc("1", loc, loc + h1refAllele.getBases().length - 1).make();
-        h.setAlignmentStartHapwrtRef(0);
-        h.setCigar(cigar);
-        final Haplotype h1 = h.insertAllele(vc.getReference(), vc.getAlternateAllele(0), loc, vc.getStart());
-        final Haplotype h1expected = new Haplotype(newHap.getBytes());
-        Assert.assertEquals(h1, h1expected);
-    }
-
-    private Haplotype makeHCForCigar(final String bases, final String cigar) {
-        final Haplotype h = new Haplotype(bases.getBytes());
-        h.setCigar(TextCigarCodec.getSingleton().decode(cigar));
-        return h;
-    }
-
-    @Test
-    public void testConsolidateCigar() throws Exception {
-        Assert.assertEquals(makeHCForCigar("AGCT", "4M").getConsolidatedPaddedCigar(0).toString(), "4M");
-        Assert.assertEquals(makeHCForCigar("AGCT", "4M").getConsolidatedPaddedCigar(1).toString(), "5M");
-        Assert.assertEquals(makeHCForCigar("AGCT", "1M1I1I1M").getConsolidatedPaddedCigar(0).toString(), "1M2I1M");
-        Assert.assertEquals(makeHCForCigar("AGCT", "1M1I1I1M").getConsolidatedPaddedCigar(1).toString(), "1M2I2M");
-        Assert.assertEquals(makeHCForCigar("AGCT", "1M1I1I1M").getConsolidatedPaddedCigar(2).toString(), "1M2I3M");
-        Assert.assertEquals(makeHCForCigar("AGCT", "1M1I1I1I").getConsolidatedPaddedCigar(0).toString(), "1M3I");
-        Assert.assertEquals(makeHCForCigar("AGCT", "1M1I1I1I").getConsolidatedPaddedCigar(1).toString(), "1M3I1M");
-        Assert.assertEquals(makeHCForCigar("AGCT", "1M1I1I1I").getConsolidatedPaddedCigar(2).toString(), "1M3I2M");
-    }
-
-    @DataProvider(name = "TrimmingData")
-    public Object[][] makeTrimmingData() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        // this functionality can be adapted to provide input data for whatever you might want in your data
-        final GenomeLoc loc = new UnvalidatingGenomeLoc("20", 0, 10, 20);
-        final String fullBases = "ACGTAACCGGT";
-        for ( int trimStart = loc.getStart(); trimStart < loc.getStop(); trimStart++ ) {
-            for ( int trimStop = trimStart; trimStop <= loc.getStop(); trimStop++ ) {
-                final int start = trimStart - loc.getStart();
-                final int stop = start + (trimStop - trimStart) + 1;
-                final GenomeLoc trimmedLoc = new UnvalidatingGenomeLoc("20", 0, start + loc.getStart(), stop + loc.getStart() - 1);
-                final String expectedBases = fullBases.substring(start, stop);
-                final Haplotype full = new Haplotype(fullBases.getBytes(), loc);
-                final Haplotype trimmed = new Haplotype(expectedBases.getBytes(), trimmedLoc);
-
-                final int hapStart = 10;
-                full.setAlignmentStartHapwrtRef(hapStart);
-                full.setCigar(TextCigarCodec.getSingleton().decode(full.length() + "M"));
-
-                trimmed.setAlignmentStartHapwrtRef(hapStart + start);
-                trimmed.setCigar(TextCigarCodec.getSingleton().decode(trimmed.length() + "M"));
-
-                tests.add(new Object[]{full, trimmedLoc, trimmed});
-            }
-        }
-
-        final Haplotype full = new Haplotype("ACT".getBytes(), new UnvalidatingGenomeLoc("20", 0, 10, 14));
-        full.setAlignmentStartHapwrtRef(10);
-        full.setCigar(TextCigarCodec.getSingleton().decode("1M2D2M"));
-        tests.add(new Object[]{full, new UnvalidatingGenomeLoc("20", 0, 11, 12), null});
-        tests.add(new Object[]{full, new UnvalidatingGenomeLoc("20", 0, 10, 12), null});
-        tests.add(new Object[]{full, new UnvalidatingGenomeLoc("20", 0, 11, 13), null});
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "TrimmingData")
-    public void testTrim(final Haplotype full, final GenomeLoc trimTo, final Haplotype expected) {
-        final Haplotype actual = full.trim(trimTo);
-        if ( expected != null ) {
-            Assert.assertEquals(actual.getBases(), expected.getBases());
-            Assert.assertEquals(actual.getStartPosition(), trimTo.getStart());
-            Assert.assertEquals(actual.getStopPosition(), trimTo.getStop());
-            Assert.assertEquals(actual.getCigar(), expected.getCigar());
-            Assert.assertEquals(actual.getAlignmentStartHapwrtRef(), expected.getAlignmentStartHapwrtRef());
-        } else {
-            Assert.assertNull(actual);
-        }
-    }
-
-    @Test(expectedExceptions = IllegalArgumentException.class)
-    public void testBadTrimLoc() {
-        final GenomeLoc loc = new UnvalidatingGenomeLoc("20", 0, 10, 20);
-        final Haplotype hap = new Haplotype("ACGTAACCGGT".getBytes(), loc);
-        hap.trim(new UnvalidatingGenomeLoc("20", 0, 1, 20));
-    }
-
-    @Test(expectedExceptions = IllegalStateException.class)
-    public void testBadTrimNoLoc() {
-        final Haplotype hap = new Haplotype("ACGTAACCGGT".getBytes());
-        hap.trim(new UnvalidatingGenomeLoc("20", 0, 1, 20));
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/interval/IntervalIntegrationTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/interval/IntervalIntegrationTest.java
deleted file mode 100644
index 8d8f7d2..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/interval/IntervalIntegrationTest.java
+++ /dev/null
@@ -1,304 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.interval;
-
-import org.broadinstitute.gatk.engine.walkers.WalkerTest;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.util.Arrays;
-import java.util.Collections;
-
-/**
- * Test the GATK core interval parsing mechanism.
- */
-public class IntervalIntegrationTest extends WalkerTest {
-    @Test(enabled = true)
-    public void testAllImplicitIntervalParsing() {
-        String md5 = "7821db9e14d4f8e07029ff1959cd5a99";
-        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
-                "-T CountLoci" +
-                        " -I " + validationDataLocation + "OV-0930.normal.chunk.bam" +
-                        " -R " + hg18Reference +
-                        " -o %s",
-                        1, // just one output file
-                        Arrays.asList(md5));
-        executeTest("testAllIntervalsImplicit",spec);
-    }
-
-// '-L all' is no longer supported
-//    @Test(enabled = true)
-//    public void testAllExplicitIntervalParsing() {
-//        String md5 = "7821db9e14d4f8e07029ff1959cd5a99";
-//        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
-//                "-T CountLoci" +
-//                        " -I " + validationDataLocation + "OV-0930.normal.chunk.bam" +
-//                        " -R " + hg18Reference +
-//                        " -L all" +
-//                        " -o %s",
-//                        1, // just one output file
-//                        Arrays.asList(md5));
-//        executeTest("testAllIntervalsExplicit",spec);
-//    }
-
-    @Test
-    public void testUnmappedReadInclusion() {
-        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
-                "-T PrintReads" +
-                        " -I " + validationDataLocation + "MV1994.bam" +
-                        " -R " + validationDataLocation + "Escherichia_coli_K12_MG1655.fasta" +
-                        " -L unmapped" +
-                        " -U",
-                        0, // two output files
-                        Collections.<String>emptyList());
-
-        // our base file
-        File baseOutputFile = createTempFile("testUnmappedReadInclusion",".bam");
-        spec.setOutputFileLocation(baseOutputFile);
-        spec.addAuxFile("95e98192e5b90cf80eaa87a4ace263da",createTempFileFromBase(baseOutputFile.getAbsolutePath()));
-        spec.addAuxFile("fadcdf88597b9609c5f2a17f4c6eb455", createTempFileFromBase(baseOutputFile.getAbsolutePath().substring(0,baseOutputFile.getAbsolutePath().indexOf(".bam"))+".bai"));
-
-        executeTest("testUnmappedReadInclusion",spec);
-    }
-
-    @Test
-    public void testMixedMappedAndUnmapped() {
-        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
-                "-T PrintReads" +
-                        " -I " + validationDataLocation + "MV1994.bam" +
-                        " -R " + validationDataLocation + "Escherichia_coli_K12_MG1655.fasta" +
-                        " -L Escherichia_coli_K12:4630000-4639675" +
-                        " -L unmapped" +
-                        " -U",
-                        0, // two output files
-                        Collections.<String>emptyList());
-
-        // our base file
-        File baseOutputFile = createTempFile("testUnmappedReadInclusion",".bam");
-        spec.setOutputFileLocation(baseOutputFile);
-        spec.addAuxFile("3944b5a6bfc06277ed3afb928a20d588",createTempFileFromBase(baseOutputFile.getAbsolutePath()));
-        spec.addAuxFile("fa90ff91ac0cc689c71a3460a3530b8b", createTempFileFromBase(baseOutputFile.getAbsolutePath().substring(0,baseOutputFile.getAbsolutePath().indexOf(".bam"))+".bai"));
-
-        executeTest("testUnmappedReadInclusion",spec);
-    }
-
-
-    @Test(enabled = false)
-    public void testUnmappedReadExclusion() {
-        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
-                "-T PrintReads" +
-                        " -I " + validationDataLocation + "MV1994.bam" +
-                        " -R " + validationDataLocation + "Escherichia_coli_K12_MG1655.fasta" +
-                        " -XL unmapped" +
-                        " -U",
-                        0, // two output files
-                        Collections.<String>emptyList());
-
-        // our base file
-        File baseOutputFile = createTempFile("testUnmappedReadExclusion",".bam");
-        spec.setOutputFileLocation(baseOutputFile);
-        spec.addAuxFile("80887ba488e53dabd9596ff93070ae75",createTempFileFromBase(baseOutputFile.getAbsolutePath()));
-        spec.addAuxFile("b341d808ecc33217f37c0c0cde2a3e2f", createTempFileFromBase(baseOutputFile.getAbsolutePath().substring(0,baseOutputFile.getAbsolutePath().indexOf(".bam"))+".bai"));
-
-        executeTest("testUnmappedReadExclusion",spec);
-    }
-
-    @Test(enabled = true)
-    public void testIntervalParsingFromFile() {
-        String md5 = "48a24b70a0b376535542b996af517398";
-        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
-                "-T CountLoci" +
-                        " -I " + validationDataLocation + "OV-0930.normal.chunk.bam" +
-                        " -R " + hg18Reference +
-                        " -o %s" +
-                        " -L " + validationDataLocation + "intervalTest.1.vcf",
-                        1, // just one output file
-                        Arrays.asList(md5));
-        executeTest("testIntervalParsingFromFile", spec);
-    }
-
-    @Test(enabled = true)
-    public void testIntervalMergingFromFiles() {
-        String md5 = "9ae0ea9e3c9c6e1b9b6252c8395efdc1";
-        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
-                "-T CountLoci" +
-                        " -I " + validationDataLocation + "OV-0930.normal.chunk.bam" +
-                        " -R " + hg18Reference +
-                        " -o %s" +
-                        " -L " + validationDataLocation + "intervalTest.1.vcf" +
-                        " -L " + validationDataLocation + "intervalTest.2.vcf",
-                        1, // just one output file
-                        Arrays.asList(md5));
-        executeTest("testIntervalMergingFromFiles", spec);
-    }
-
-    @Test(enabled = true)
-    public void testIntervalExclusionsFromFiles() {
-        String md5 = "26ab0db90d72e28ad0ba1e22ee510510";
-        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
-                "-T CountLoci" +
-                        " -I " + validationDataLocation + "OV-0930.normal.chunk.bam" +
-                        " -R " + hg18Reference +
-                        " -o %s" +
-                        " -L " + validationDataLocation + "intervalTest.1.vcf" +
-                        " -XL " + validationDataLocation + "intervalTest.2.vcf",
-                        1, // just one output file
-                        Arrays.asList(md5));
-        executeTest("testIntervalExclusionsFromFiles", spec);
-    }
-
-    @Test(enabled = true)
-    public void testMixedIntervalMerging() {
-        String md5 = "7c5aba41f53293b712fd86d08ed5b36e";
-        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
-                "-T CountLoci" +
-                        " -I " + validationDataLocation + "OV-0930.normal.chunk.bam" +
-                        " -R " + hg18Reference +
-                        " -o %s" +
-                        " -L " + validationDataLocation + "intervalTest.1.vcf" +
-                        " -L chr1:1677524-1677528",
-                        1, // just one output file
-                        Arrays.asList(md5));
-        executeTest("testMixedIntervalMerging", spec);
-    }
-
-    @Test(enabled = true)
-    public void testBed() {
-        String md5 = "cf4278314ef8e4b996e1b798d8eb92cf";
-        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
-                "-T CountLoci" +
-                        " -I " + validationDataLocation + "OV-0930.normal.chunk.bam" +
-                        " -R " + hg18Reference +
-                        " -o %s" +
-                        " -L " + validationDataLocation + "intervalTest.bed",
-                        1, // just one output file
-                        Arrays.asList(md5));
-        executeTest("testBed", spec);
-    }
-
-    @Test(enabled = true)
-    public void testComplexVCF() {
-        String md5 = "166d77ac1b46a1ec38aa35ab7e628ab5";
-        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
-                "-T CountLoci" +
-                        " -I " + validationDataLocation + "OV-0930.normal.chunk.bam" +
-                        " -R " + hg18Reference +
-                        " -o %s" +
-                        " -L " + validationDataLocation + "intervalTest.3.vcf",
-                1, // just one output file
-                Arrays.asList(md5));
-        executeTest("testComplexVCF", spec);
-    }
-
-    @Test(enabled = true)
-    public void testComplexVCFWithPadding() {
-        String md5 = "649ee93d50739c656e94ec88a32c7ffe";
-        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
-                "-T CountLoci" +
-                        " --interval_padding 2" +
-                        " -I " + validationDataLocation + "OV-0930.normal.chunk.bam" +
-                        " -R " + hg18Reference +
-                        " -o %s" +
-                        " -L " + validationDataLocation + "intervalTest.3.vcf",
-                1, // just one output file
-                Arrays.asList(md5));
-        executeTest("testComplexVCFWithPadding", spec);
-    }
-
-    @Test(enabled = true)
-    public void testMergingWithComplexVCF() {
-        String md5 = "6d7fce9fee471194aa8b5b6e47267f03";
-        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
-                "-T CountLoci" +
-                        " -I " + validationDataLocation + "OV-0930.normal.chunk.bam" +
-                        " -R " + hg18Reference +
-                        " -o %s" +
-                        " -L " + validationDataLocation + "intervalTest.1.vcf" +
-                        " -XL " + validationDataLocation + "intervalTest.3.vcf",
-                        1, // just one output file
-                        Arrays.asList(md5));
-        executeTest("testMergingWithComplexVCF", spec);
-    }
-
-    @Test(enabled = true)
-    public void testEmptyVCF() {
-        String md5 = "897316929176464ebc9ad085f31e7284";
-        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
-                "-T CountLoci" +
-                        " -I " + validationDataLocation + "OV-0930.normal.chunk.bam" +
-                        " -R " + hg18Reference +
-                        " -o %s" +
-                        " -L " + validationDataLocation + "intervalTest.empty.vcf",
-                        1, // just one output file
-                        Arrays.asList(md5));
-        executeTest("testEmptyVCFWarning", spec);
-    }
-
-    @Test(enabled = true)
-    public void testIncludeExcludeIsTheSame() {
-        String md5 = "897316929176464ebc9ad085f31e7284";
-        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
-                "-T CountLoci" +
-                        " -I " + validationDataLocation + "OV-0930.normal.chunk.bam" +
-                        " -R " + hg18Reference +
-                        " -o %s" +
-                        " -L " + validationDataLocation + "intervalTest.1.vcf" +
-                        " -XL " + validationDataLocation + "intervalTest.1.vcf",
-                        1, // just one output file
-                        Arrays.asList(md5));
-        executeTest("testIncludeExcludeIsTheSame", spec);
-    }
-
-    @Test(enabled = true)
-    public void testSymbolicAlleles() {
-        String md5 = "52745056d2fd5904857bbd4984c08098";
-        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
-                "-T CountLoci" +
-                        " -I " + validationDataLocation + "NA12878.chrom1.SLX.SRP000032.2009_06.bam" +
-                        " -R " + b36KGReference +
-                        " -o %s" +
-                        " -L " + privateTestDir + "symbolic_alleles_1.vcf",
-                1, // just one output file
-                Arrays.asList(md5));
-        executeTest("testSymbolicAlleles", spec);
-    }
-
-    @Test
-    public void testIntersectionOfLexicographicallySortedIntervals() {
-        final String md5 = "18be9375e5a753f766616a51eb6131f0";
-        WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
-                " -T CountLoci" +
-                " -I " + privateTestDir + "NA12878.4.snippet.bam" +
-                " -R " + b37KGReference +
-                " -L " + privateTestDir + "lexicographicallySortedIntervals.bed" +
-                " -L 4" +
-                " -isr INTERSECTION" +
-                " -o %s",
-                1, // just one output file
-                Arrays.asList(md5));
-        executeTest("testIntersectionOfLexicographicallySortedIntervals", spec);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/interval/IntervalUtilsUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/interval/IntervalUtilsUnitTest.java
deleted file mode 100644
index e9846da..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/interval/IntervalUtilsUnitTest.java
+++ /dev/null
@@ -1,1110 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.interval;
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import htsjdk.samtools.reference.ReferenceSequenceFile;
-import htsjdk.samtools.util.Interval;
-import htsjdk.samtools.util.IntervalList;
-import htsjdk.samtools.SAMFileHeader;
-import org.apache.commons.io.FileUtils;
-import htsjdk.tribble.Feature;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.commandline.IntervalBinding;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.arguments.GATKArgumentCollection;
-import org.broadinstitute.gatk.engine.datasources.reference.ReferenceDataSource;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.util.*;
-
-/**
- * test out the interval utility methods
- */
-public class IntervalUtilsUnitTest extends BaseTest {
-    // used to seed the genome loc parser with a sequence dictionary
-    private SAMFileHeader hg18Header;
-    private GenomeLocParser hg18GenomeLocParser;
-    private List<GenomeLoc> hg18ReferenceLocs;
-    private SAMFileHeader hg19Header;
-    private GenomeLocParser hg19GenomeLocParser;
-    private List<GenomeLoc> hg19ReferenceLocs;
-    private List<GenomeLoc> hg19exomeIntervals;
-
-    private List<GenomeLoc> getLocs(String... intervals) {
-        return getLocs(Arrays.asList(intervals));
-    }
-
-    private List<GenomeLoc> getLocs(List<String> intervals) {
-        if (intervals.size() == 0)
-            return hg18ReferenceLocs;
-        List<GenomeLoc> locs = new ArrayList<GenomeLoc>();
-        for (String interval: intervals)
-            locs.add(hg18GenomeLocParser.parseGenomeLoc(interval));
-        return Collections.unmodifiableList(locs);
-    }
-
-    @BeforeClass
-    public void init() {
-        File hg18Ref = new File(BaseTest.hg18Reference);
-        try {
-            ReferenceDataSource referenceDataSource = new ReferenceDataSource(hg18Ref);
-            hg18Header = new SAMFileHeader();
-            hg18Header.setSequenceDictionary(referenceDataSource.getReference().getSequenceDictionary());
-            ReferenceSequenceFile seq = new CachingIndexedFastaSequenceFile(hg18Ref);
-            hg18GenomeLocParser = new GenomeLocParser(seq);
-            hg18ReferenceLocs = Collections.unmodifiableList(GenomeLocSortedSet.createSetFromSequenceDictionary(referenceDataSource.getReference().getSequenceDictionary()).toList()) ;
-        }
-        catch(FileNotFoundException ex) {
-            throw new UserException.CouldNotReadInputFile(hg18Ref,ex);
-        }
-
-        File hg19Ref = new File(BaseTest.hg19Reference);
-        try {
-            ReferenceDataSource referenceDataSource = new ReferenceDataSource(hg19Ref);
-            hg19Header = new SAMFileHeader();
-            hg19Header.setSequenceDictionary(referenceDataSource.getReference().getSequenceDictionary());
-            ReferenceSequenceFile seq = new CachingIndexedFastaSequenceFile(hg19Ref);
-            hg19GenomeLocParser = new GenomeLocParser(seq);
-            hg19ReferenceLocs = Collections.unmodifiableList(GenomeLocSortedSet.createSetFromSequenceDictionary(referenceDataSource.getReference().getSequenceDictionary()).toList()) ;
-
-            hg19exomeIntervals = Collections.unmodifiableList(IntervalUtils.parseIntervalArguments(hg19GenomeLocParser, Arrays.asList(hg19Intervals)));
-        }
-        catch(FileNotFoundException ex) {
-            throw new UserException.CouldNotReadInputFile(hg19Ref,ex);
-        }
-    }
-
-    // -------------------------------------------------------------------------------------
-    //
-    // tests to ensure the quality of the interval cuts of the interval cutting functions
-    //
-    // -------------------------------------------------------------------------------------
-
-    private class IntervalSlicingTest extends TestDataProvider {
-        public int parts;
-        public double maxAllowableVariance;
-
-        private IntervalSlicingTest(final int parts, final double maxAllowableVariance) {
-            super(IntervalSlicingTest.class);
-            this.parts = parts;
-            this.maxAllowableVariance = maxAllowableVariance;
-        }
-
-        public String toString() {
-            return String.format("IntervalSlicingTest parts=%d maxVar=%.2f", parts, maxAllowableVariance);
-        }
-    }
-
-    @DataProvider(name = "intervalslicingdata")
-    public Object[][] createTrees() {
-        new IntervalSlicingTest(1, 0);
-        new IntervalSlicingTest(2, 1);
-        new IntervalSlicingTest(5, 1);
-        new IntervalSlicingTest(10, 1);
-        new IntervalSlicingTest(67, 1);
-        new IntervalSlicingTest(100, 1);
-        new IntervalSlicingTest(500, 1);
-        new IntervalSlicingTest(1000, 1);
-        return IntervalSlicingTest.getTests(IntervalSlicingTest.class);
-    }
-
-    @Test(enabled = true, dataProvider = "intervalslicingdata")
-    public void testFixedScatterIntervalsAlgorithm(IntervalSlicingTest test) {
-        List<List<GenomeLoc>> splits = IntervalUtils.splitFixedIntervals(hg19exomeIntervals, test.parts);
-
-        long totalSize = IntervalUtils.intervalSize(hg19exomeIntervals);
-        long idealSplitSize = totalSize / test.parts;
-
-        long sumOfSplitSizes = 0;
-        int counter = 0;
-        for ( final List<GenomeLoc> split : splits ) {
-            long splitSize = IntervalUtils.intervalSize(split);
-            double sigma = (splitSize - idealSplitSize) / (1.0 * idealSplitSize);
-            //logger.warn(String.format("Split %d size %d ideal %d sigma %.2f", counter, splitSize, idealSplitSize, sigma));
-            counter++;
-            sumOfSplitSizes += splitSize;
-            Assert.assertTrue(Math.abs(sigma) <= test.maxAllowableVariance, String.format("Interval %d (size %d ideal %d) has a variance %.2f outside of the tolerated range %.2f", counter, splitSize, idealSplitSize, sigma, test.maxAllowableVariance));
-        }
-
-        Assert.assertEquals(totalSize, sumOfSplitSizes, "Split intervals don't contain the exact number of bases in the origianl intervals");
-    }
-
-    // -------------------------------------------------------------------------------------
-    //
-    // splitLocusIntervals tests
-    //
-    // -------------------------------------------------------------------------------------
-
-    /** large scale tests for many intervals */
-    private class SplitLocusIntervalsTest extends TestDataProvider {
-        final List<GenomeLoc> originalIntervals;
-        final public int parts;
-
-        private SplitLocusIntervalsTest(final String name, List<GenomeLoc> originalIntervals, final int parts) {
-            super(SplitLocusIntervalsTest.class, name);
-            this.parts = parts;
-            this.originalIntervals = originalIntervals;
-        }
-
-        public String toString() {
-            return String.format("%s parts=%d", super.toString(), parts);
-        }
-    }
-
-    @DataProvider(name = "IntervalRepartitionTest")
-    public Object[][] createIntervalRepartitionTest() {
-        for ( int parts : Arrays.asList(1, 2, 3, 10, 13, 100, 151, 1000, 10000) ) {
-        //for ( int parts : Arrays.asList(10) ) {
-            new SplitLocusIntervalsTest("hg19RefLocs", hg19ReferenceLocs, parts);
-            new SplitLocusIntervalsTest("hg19ExomeLocs", hg19exomeIntervals, parts);
-        }
-
-        return SplitLocusIntervalsTest.getTests(SplitLocusIntervalsTest.class);
-    }
-
-    @Test(enabled = true, dataProvider = "IntervalRepartitionTest")
-    public void testIntervalRepartition(SplitLocusIntervalsTest test) {
-        List<List<GenomeLoc>> splitByLocus = IntervalUtils.splitLocusIntervals(test.originalIntervals, test.parts);
-        Assert.assertEquals(splitByLocus.size(), test.parts, "SplitLocusIntervals failed to generate correct number of intervals");
-        List<GenomeLoc> flat = IntervalUtils.flattenSplitIntervals(splitByLocus);
-
-        // test overall size
-        final long originalSize = IntervalUtils.intervalSize(test.originalIntervals);
-        final long flatSize = IntervalUtils.intervalSize(flat);
-        Assert.assertEquals(flatSize, originalSize, "SplitLocusIntervals locs cover an incorrect number of bases");
-
-        // test size of each split
-        final long ideal = (long)Math.floor(originalSize / (1.0 * test.parts));
-        final long maxSize = ideal + (originalSize % test.parts) * test.parts; // no more than N * rounding error in size
-        for ( final List<GenomeLoc> split : splitByLocus ) {
-            final long splitSize = IntervalUtils.intervalSize(split);
-            Assert.assertTrue(splitSize >= ideal && splitSize <= maxSize,
-                    String.format("SplitLocusIntervals interval (start=%s) has size %d outside of bounds ideal=%d, max=%d",
-                            split.get(0), splitSize, ideal, maxSize));
-        }
-
-        // test that every base in original is covered once by a base in split by locus intervals
-        String diff = IntervalUtils.equateIntervals(test.originalIntervals, flat);
-        Assert.assertNull(diff, diff);
-    }
-
-    /** small scale tests where the expected cuts are enumerated upfront for testing */
-    private class SplitLocusIntervalsSmallTest extends TestDataProvider {
-        final List<GenomeLoc> original;
-        final public int parts;
-        final public int expectedParts;
-        final List<GenomeLoc> expected;
-
-        private SplitLocusIntervalsSmallTest(final String name, List<GenomeLoc> originalIntervals, final int parts, List<GenomeLoc> expected) {
-            this(name, originalIntervals, parts,  expected, parts);
-        }
-
-        private SplitLocusIntervalsSmallTest(final String name, List<GenomeLoc> originalIntervals, final int parts, List<GenomeLoc> expected, int expectedParts) {
-            super(SplitLocusIntervalsSmallTest.class, name);
-            this.parts = parts;
-            this.expectedParts = expectedParts;
-            this.original = originalIntervals;
-            this.expected = expected;
-        }
-
-        public String toString() {
-            return String.format("%s parts=%d", super.toString(), parts);
-        }
-    }
-
-    @DataProvider(name = "SplitLocusIntervalsSmallTest")
-    public Object[][] createSplitLocusIntervalsSmallTest() {
-        GenomeLoc bp01_10 = hg19GenomeLocParser.createGenomeLoc("1", 1, 10);
-
-        GenomeLoc bp1_5 = hg19GenomeLocParser.createGenomeLoc("1", 1, 5);
-        GenomeLoc bp6_10 = hg19GenomeLocParser.createGenomeLoc("1", 6, 10);
-        new SplitLocusIntervalsSmallTest("cut into two", Arrays.asList(bp01_10), 2, Arrays.asList(bp1_5, bp6_10));
-
-        GenomeLoc bp20_30 = hg19GenomeLocParser.createGenomeLoc("1", 20, 30);
-        new SplitLocusIntervalsSmallTest("two in two", Arrays.asList(bp01_10, bp20_30), 2, Arrays.asList(bp01_10, bp20_30));
-
-        GenomeLoc bp1_7 = hg19GenomeLocParser.createGenomeLoc("1", 1, 7);
-        GenomeLoc bp8_10 = hg19GenomeLocParser.createGenomeLoc("1", 8, 10);
-        GenomeLoc bp20_23 = hg19GenomeLocParser.createGenomeLoc("1", 20, 23);
-        GenomeLoc bp24_30 = hg19GenomeLocParser.createGenomeLoc("1", 24, 30);
-        new SplitLocusIntervalsSmallTest("two in three", Arrays.asList(bp01_10, bp20_30), 3,
-                Arrays.asList(bp1_7, bp8_10, bp20_23, bp24_30));
-
-        GenomeLoc bp1_2 = hg19GenomeLocParser.createGenomeLoc("1", 1, 2);
-        GenomeLoc bp1_1 = hg19GenomeLocParser.createGenomeLoc("1", 1, 1);
-        GenomeLoc bp2_2 = hg19GenomeLocParser.createGenomeLoc("1", 2, 2);
-        new SplitLocusIntervalsSmallTest("too many pieces", Arrays.asList(bp1_2), 5, Arrays.asList(bp1_1, bp2_2), 2);
-
-        new SplitLocusIntervalsSmallTest("emptyList", Collections.<GenomeLoc>emptyList(), 5, Collections.<GenomeLoc>emptyList(), 0);
-
-        return SplitLocusIntervalsSmallTest.getTests(SplitLocusIntervalsSmallTest.class);
-    }
-
-    @Test(enabled = true, dataProvider = "SplitLocusIntervalsSmallTest")
-    public void splitLocusIntervalsSmallTest(SplitLocusIntervalsSmallTest test) {
-        List<List<GenomeLoc>> splitByLocus = IntervalUtils.splitLocusIntervals(test.original, test.parts);
-        Assert.assertEquals(splitByLocus.size(), test.expectedParts, "SplitLocusIntervals failed to generate correct number of intervals");
-        List<GenomeLoc> flat = IntervalUtils.flattenSplitIntervals(splitByLocus);
-
-        // test sizes
-        final long originalSize = IntervalUtils.intervalSize(test.original);
-        final long splitSize = IntervalUtils.intervalSize(flat);
-        Assert.assertEquals(splitSize, originalSize, "SplitLocusIntervals locs cover an incorrect number of bases");
-
-        Assert.assertEquals(flat, test.expected, "SplitLocusIntervals locs not expected intervals");
-    }
-
-    //
-    // Misc. tests
-    //
-
-    @Test(expectedExceptions=UserException.class)
-    public void testMergeListsBySetOperatorNoOverlap() {
-        // a couple of lists we'll use for the testing
-        List<GenomeLoc> listEveryTwoFromOne = new ArrayList<GenomeLoc>();
-        List<GenomeLoc> listEveryTwoFromTwo = new ArrayList<GenomeLoc>();
-
-        // create the two lists we'll use
-        for (int x = 1; x < 101; x++) {
-            if (x % 2 == 0)
-                listEveryTwoFromTwo.add(hg18GenomeLocParser.createGenomeLoc("chr1",x,x));
-            else
-                listEveryTwoFromOne.add(hg18GenomeLocParser.createGenomeLoc("chr1",x,x));
-        }
-
-        List<GenomeLoc> ret;
-        ret = IntervalUtils.mergeListsBySetOperator(listEveryTwoFromTwo, listEveryTwoFromOne, IntervalSetRule.UNION);
-        Assert.assertEquals(ret.size(), 100);
-        ret = IntervalUtils.mergeListsBySetOperator(listEveryTwoFromTwo, listEveryTwoFromOne, null);
-        Assert.assertEquals(ret.size(), 100);
-        ret = IntervalUtils.mergeListsBySetOperator(listEveryTwoFromTwo, listEveryTwoFromOne, IntervalSetRule.INTERSECTION);
-        Assert.assertEquals(ret.size(), 0);
-    }
-
-    @Test
-    public void testMergeListsBySetOperatorAllOverlap() {
-        // a couple of lists we'll use for the testing
-        List<GenomeLoc> allSites = new ArrayList<GenomeLoc>();
-        List<GenomeLoc> listEveryTwoFromTwo = new ArrayList<GenomeLoc>();
-
-        // create the two lists we'll use
-        for (int x = 1; x < 101; x++) {
-            if (x % 2 == 0)
-                listEveryTwoFromTwo.add(hg18GenomeLocParser.createGenomeLoc("chr1",x,x));
-            allSites.add(hg18GenomeLocParser.createGenomeLoc("chr1",x,x));
-        }
-
-        List<GenomeLoc> ret;
-        ret = IntervalUtils.mergeListsBySetOperator(listEveryTwoFromTwo, allSites, IntervalSetRule.UNION);
-        Assert.assertEquals(ret.size(), 150);
-        ret = IntervalUtils.mergeListsBySetOperator(listEveryTwoFromTwo, allSites, null);
-        Assert.assertEquals(ret.size(), 150);
-        ret = IntervalUtils.mergeListsBySetOperator(listEveryTwoFromTwo, allSites, IntervalSetRule.INTERSECTION);
-        Assert.assertEquals(ret.size(), 50);
-    }
-
-    @Test
-    public void testMergeListsBySetOperator() {
-        // a couple of lists we'll use for the testing
-        List<GenomeLoc> allSites = new ArrayList<GenomeLoc>();
-        List<GenomeLoc> listEveryTwoFromTwo = new ArrayList<GenomeLoc>();
-
-        // create the two lists we'll use
-        for (int x = 1; x < 101; x++) {
-            if (x % 5 == 0) {
-                listEveryTwoFromTwo.add(hg18GenomeLocParser.createGenomeLoc("chr1",x,x));
-                allSites.add(hg18GenomeLocParser.createGenomeLoc("chr1",x,x));
-            }
-        }
-
-        List<GenomeLoc> ret;
-        ret = IntervalUtils.mergeListsBySetOperator(listEveryTwoFromTwo, allSites, IntervalSetRule.UNION);
-        Assert.assertEquals(ret.size(), 40);
-        ret = IntervalUtils.mergeListsBySetOperator(listEveryTwoFromTwo, allSites, null);
-        Assert.assertEquals(ret.size(), 40);
-        ret = IntervalUtils.mergeListsBySetOperator(listEveryTwoFromTwo, allSites, IntervalSetRule.INTERSECTION);
-        Assert.assertEquals(ret.size(), 20);
-    }
-
-    @Test
-    public void testOverlappingIntervalsFromSameSourceWithIntersection() {
-        // a couple of lists we'll use for the testing
-        List<GenomeLoc> source1 = new ArrayList<GenomeLoc>();
-        List<GenomeLoc> source2 = new ArrayList<GenomeLoc>();
-
-        source1.add(hg18GenomeLocParser.createGenomeLoc("chr1", 10, 20));
-        source1.add(hg18GenomeLocParser.createGenomeLoc("chr1", 15, 25));
-
-        source2.add(hg18GenomeLocParser.createGenomeLoc("chr1", 16, 18));
-        source2.add(hg18GenomeLocParser.createGenomeLoc("chr1", 22, 24));
-
-        List<GenomeLoc> ret = IntervalUtils.mergeListsBySetOperator(source1, source2, IntervalSetRule.INTERSECTION);
-        Assert.assertEquals(ret.size(), 2);
-    }
-
-    @Test
-    public void testGetContigLengths() {
-        Map<String, Integer> lengths = IntervalUtils.getContigSizes(new File(BaseTest.hg18Reference));
-        Assert.assertEquals((long)lengths.get("chr1"), 247249719);
-        Assert.assertEquals((long)lengths.get("chr2"), 242951149);
-        Assert.assertEquals((long)lengths.get("chr3"), 199501827);
-        Assert.assertEquals((long)lengths.get("chr20"), 62435964);
-        Assert.assertEquals((long)lengths.get("chrX"), 154913754);
-    }
-
-    @Test
-    public void testParseIntervalArguments() {
-        Assert.assertEquals(getLocs().size(), 45);
-        Assert.assertEquals(getLocs("chr1", "chr2", "chr3").size(), 3);
-        Assert.assertEquals(getLocs("chr1:1-2", "chr1:4-5", "chr2:1-1", "chr3:2-2").size(), 4);
-    }
-
-    @Test
-    public void testIsIntervalFile() {
-        Assert.assertTrue(IntervalUtils.isIntervalFile(BaseTest.privateTestDir + "empty_intervals.list"));
-        Assert.assertTrue(IntervalUtils.isIntervalFile(BaseTest.privateTestDir + "empty_intervals.list", true));
-
-        List<String> extensions = Arrays.asList("bed", "interval_list", "intervals", "list", "picard");
-        for (String extension: extensions) {
-            Assert.assertTrue(IntervalUtils.isIntervalFile("test_intervals." + extension, false), "Tested interval file extension: " + extension);
-        }
-    }
-
-    @Test(expectedExceptions = UserException.CouldNotReadInputFile.class)
-    public void testMissingIntervalFile() {
-        IntervalUtils.isIntervalFile(BaseTest.privateTestDir + "no_such_intervals.list");
-    }
-
-    @Test
-    public void testFixedScatterIntervalsBasic() {
-        GenomeLoc chr1 = hg18GenomeLocParser.parseGenomeLoc("chr1");
-        GenomeLoc chr2 = hg18GenomeLocParser.parseGenomeLoc("chr2");
-        GenomeLoc chr3 = hg18GenomeLocParser.parseGenomeLoc("chr3");
-
-        List<File> files = testFiles("basic.", 3, ".intervals");
-
-        List<GenomeLoc> locs = getLocs("chr1", "chr2", "chr3");
-        List<List<GenomeLoc>> splits = IntervalUtils.splitFixedIntervals(locs, files.size());
-        IntervalUtils.scatterFixedIntervals(hg18Header, splits, files);
-
-        List<GenomeLoc> locs1 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(0).toString()));
-        List<GenomeLoc> locs2 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(1).toString()));
-        List<GenomeLoc> locs3 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(2).toString()));
-
-        Assert.assertEquals(locs1.size(), 1);
-        Assert.assertEquals(locs2.size(), 1);
-        Assert.assertEquals(locs3.size(), 1);
-
-        Assert.assertEquals(locs1.get(0), chr1);
-        Assert.assertEquals(locs2.get(0), chr2);
-        Assert.assertEquals(locs3.get(0), chr3);
-    }
-
-    @Test
-    public void testScatterFixedIntervalsLessFiles() {
-        GenomeLoc chr1 = hg18GenomeLocParser.parseGenomeLoc("chr1");
-        GenomeLoc chr2 = hg18GenomeLocParser.parseGenomeLoc("chr2");
-        GenomeLoc chr3 = hg18GenomeLocParser.parseGenomeLoc("chr3");
-        GenomeLoc chr4 = hg18GenomeLocParser.parseGenomeLoc("chr4");
-
-        List<File> files = testFiles("less.", 3, ".intervals");
-
-        List<GenomeLoc> locs = getLocs("chr1", "chr2", "chr3", "chr4");
-        List<List<GenomeLoc>> splits = IntervalUtils.splitFixedIntervals(locs, files.size());
-        IntervalUtils.scatterFixedIntervals(hg18Header, splits, files);
-
-        List<GenomeLoc> locs1 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(0).toString()));
-        List<GenomeLoc> locs2 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(1).toString()));
-        List<GenomeLoc> locs3 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(2).toString()));
-
-        Assert.assertEquals(locs1.size(), 1);
-        Assert.assertEquals(locs2.size(), 1);
-        Assert.assertEquals(locs3.size(), 2);
-
-        Assert.assertEquals(locs1.get(0), chr1);
-        Assert.assertEquals(locs2.get(0), chr2);
-        Assert.assertEquals(locs3.get(0), chr3);
-        Assert.assertEquals(locs3.get(1), chr4);
-    }
-
-    @Test(expectedExceptions=UserException.BadArgumentValue.class)
-    public void testSplitFixedIntervalsMoreFiles() {
-        List<File> files = testFiles("more.", 3, ".intervals");
-        List<GenomeLoc> locs = getLocs("chr1", "chr2");
-        IntervalUtils.splitFixedIntervals(locs, files.size());
-    }
-
-    @Test(expectedExceptions=UserException.BadArgumentValue.class)
-    public void testScatterFixedIntervalsMoreFiles() {
-        List<File> files = testFiles("more.", 3, ".intervals");
-        List<GenomeLoc> locs = getLocs("chr1", "chr2");
-        List<List<GenomeLoc>> splits = IntervalUtils.splitFixedIntervals(locs, locs.size()); // locs.size() instead of files.size()
-        IntervalUtils.scatterFixedIntervals(hg18Header, splits, files);
-    }
-    @Test
-    public void testScatterFixedIntervalsStart() {
-        List<String> intervals = Arrays.asList("chr1:1-2", "chr1:4-5", "chr2:1-1", "chr3:2-2");
-        GenomeLoc chr1a = hg18GenomeLocParser.parseGenomeLoc("chr1:1-2");
-        GenomeLoc chr1b = hg18GenomeLocParser.parseGenomeLoc("chr1:4-5");
-        GenomeLoc chr2 = hg18GenomeLocParser.parseGenomeLoc("chr2:1-1");
-        GenomeLoc chr3 = hg18GenomeLocParser.parseGenomeLoc("chr3:2-2");
-
-        List<File> files = testFiles("split.", 3, ".intervals");
-
-        List<GenomeLoc> locs = getLocs(intervals);
-        List<List<GenomeLoc>> splits = IntervalUtils.splitFixedIntervals(locs, files.size());
-        IntervalUtils.scatterFixedIntervals(hg18Header, splits, files);
-
-        List<GenomeLoc> locs1 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(0).toString()));
-        List<GenomeLoc> locs2 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(1).toString()));
-        List<GenomeLoc> locs3 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(2).toString()));
-
-        Assert.assertEquals(locs1.size(), 1);
-        Assert.assertEquals(locs2.size(), 1);
-        Assert.assertEquals(locs3.size(), 2);
-
-        Assert.assertEquals(locs1.get(0), chr1a);
-        Assert.assertEquals(locs2.get(0), chr1b);
-        Assert.assertEquals(locs3.get(0), chr2);
-        Assert.assertEquals(locs3.get(1), chr3);
-    }
-
-    @Test
-    public void testScatterFixedIntervalsMiddle() {
-        List<String> intervals = Arrays.asList("chr1:1-1", "chr2:1-2", "chr2:4-5", "chr3:2-2");
-        GenomeLoc chr1 = hg18GenomeLocParser.parseGenomeLoc("chr1:1-1");
-        GenomeLoc chr2a = hg18GenomeLocParser.parseGenomeLoc("chr2:1-2");
-        GenomeLoc chr2b = hg18GenomeLocParser.parseGenomeLoc("chr2:4-5");
-        GenomeLoc chr3 = hg18GenomeLocParser.parseGenomeLoc("chr3:2-2");
-
-        List<File> files = testFiles("split.", 3, ".intervals");
-
-        List<GenomeLoc> locs = getLocs(intervals);
-        List<List<GenomeLoc>> splits = IntervalUtils.splitFixedIntervals(locs, files.size());
-        IntervalUtils.scatterFixedIntervals(hg18Header, splits, files);
-
-        List<GenomeLoc> locs1 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(0).toString()));
-        List<GenomeLoc> locs2 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(1).toString()));
-        List<GenomeLoc> locs3 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(2).toString()));
-
-        Assert.assertEquals(locs1.size(), 1);
-        Assert.assertEquals(locs2.size(), 1);
-        Assert.assertEquals(locs3.size(), 2);
-
-        Assert.assertEquals(locs1.get(0), chr1);
-        Assert.assertEquals(locs2.get(0), chr2a);
-        Assert.assertEquals(locs3.get(0), chr2b);
-        Assert.assertEquals(locs3.get(1), chr3);
-    }
-
-    @Test
-    public void testScatterFixedIntervalsEnd() {
-        List<String> intervals = Arrays.asList("chr1:1-1", "chr2:2-2", "chr3:1-2", "chr3:4-5");
-        GenomeLoc chr1 = hg18GenomeLocParser.parseGenomeLoc("chr1:1-1");
-        GenomeLoc chr2 = hg18GenomeLocParser.parseGenomeLoc("chr2:2-2");
-        GenomeLoc chr3a = hg18GenomeLocParser.parseGenomeLoc("chr3:1-2");
-        GenomeLoc chr3b = hg18GenomeLocParser.parseGenomeLoc("chr3:4-5");
-
-        List<File> files = testFiles("split.", 3, ".intervals");
-
-        List<GenomeLoc> locs = getLocs(intervals);
-        List<List<GenomeLoc>> splits = IntervalUtils.splitFixedIntervals(locs, files.size());
-        IntervalUtils.scatterFixedIntervals(hg18Header, splits, files);
-
-        List<GenomeLoc> locs1 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(0).toString()));
-        List<GenomeLoc> locs2 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(1).toString()));
-        List<GenomeLoc> locs3 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(2).toString()));
-
-        Assert.assertEquals(locs1.size(), 2);
-        Assert.assertEquals(locs2.size(), 1);
-        Assert.assertEquals(locs3.size(), 1);
-
-        Assert.assertEquals(locs1.get(0), chr1);
-        Assert.assertEquals(locs1.get(1), chr2);
-        Assert.assertEquals(locs2.get(0), chr3a);
-        Assert.assertEquals(locs3.get(0), chr3b);
-    }
-
-    @Test
-    public void testScatterFixedIntervalsFile() {
-        List<File> files = testFiles("sg.", 20, ".intervals");
-        List<GenomeLoc> locs = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(BaseTest.GATKDataLocation + "whole_exome_agilent_designed_120.targets.hg18.chr20.interval_list"));
-        List<List<GenomeLoc>> splits = IntervalUtils.splitFixedIntervals(locs, files.size());
-
-        int[] counts = {
-                125, 138, 287, 291, 312, 105, 155, 324,
-                295, 298, 141, 121, 285, 302, 282, 88,
-                116, 274, 282, 248
-//                5169, 5573, 10017, 10567, 10551,
-//                5087, 4908, 10120, 10435, 10399,
-//                5391, 4735, 10621, 10352, 10654,
-//                5227, 5256, 10151, 9649, 9825
-        };
-
-        //String splitCounts = "";
-        for (int i = 0; i < splits.size(); i++) {
-            int splitCount = splits.get(i).size();
-            Assert.assertEquals(splitCount, counts[i], "Num intervals in split " + i);
-        }
-        //System.out.println(splitCounts.substring(2));
-
-        IntervalUtils.scatterFixedIntervals(hg18Header, splits, files);
-
-        int locIndex = 0;
-        for (int i = 0; i < files.size(); i++) {
-            String file = files.get(i).toString();
-            List<GenomeLoc> parsedLocs = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(file));
-            Assert.assertEquals(parsedLocs.size(), counts[i], "Intervals in " + file);
-            for (GenomeLoc parsedLoc: parsedLocs)
-                Assert.assertEquals(parsedLoc, locs.get(locIndex), String.format("Genome loc %d from file %d", locIndex++, i));
-        }
-        Assert.assertEquals(locIndex, locs.size(), "Total number of GenomeLocs");
-    }
-
-    @Test
-    public void testScatterFixedIntervalsMax() {
-        List<File> files = testFiles("sg.", 85, ".intervals");
-        List<List<GenomeLoc>> splits = IntervalUtils.splitFixedIntervals(hg19ReferenceLocs, files.size());
-        IntervalUtils.scatterFixedIntervals(hg19Header, splits, files);
-
-        for (int i = 0; i < files.size(); i++) {
-            String file = files.get(i).toString();
-            List<GenomeLoc> parsedLocs = IntervalUtils.parseIntervalArguments(hg19GenomeLocParser, Arrays.asList(file));
-            Assert.assertEquals(parsedLocs.size(), 1, "parsedLocs[" + i + "].size()");
-            Assert.assertEquals(parsedLocs.get(0), hg19ReferenceLocs.get(i), "parsedLocs[" + i + "].get()");
-        }
-    }
-
-    @Test
-    public void testScatterContigIntervalsOrder() {
-        List<String> intervals = Arrays.asList("chr2:1-1", "chr1:1-1", "chr3:2-2");
-        GenomeLoc chr1 = hg18GenomeLocParser.parseGenomeLoc("chr1:1-1");
-        GenomeLoc chr2 = hg18GenomeLocParser.parseGenomeLoc("chr2:1-1");
-        GenomeLoc chr3 = hg18GenomeLocParser.parseGenomeLoc("chr3:2-2");
-
-        List<File> files = testFiles("split.", 3, ".intervals");
-
-        IntervalUtils.scatterContigIntervals(hg18Header, getLocs(intervals), files);
-
-        List<GenomeLoc> locs1 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(0).toString()));
-        List<GenomeLoc> locs2 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(1).toString()));
-        List<GenomeLoc> locs3 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(2).toString()));
-
-        Assert.assertEquals(locs1.size(), 1);
-        Assert.assertEquals(locs2.size(), 1);
-        Assert.assertEquals(locs3.size(), 1);
-
-        Assert.assertEquals(locs1.get(0), chr2);
-        Assert.assertEquals(locs2.get(0), chr1);
-        Assert.assertEquals(locs3.get(0), chr3);
-    }
-
-    @Test
-    public void testScatterContigIntervalsBasic() {
-        GenomeLoc chr1 = hg18GenomeLocParser.parseGenomeLoc("chr1");
-        GenomeLoc chr2 = hg18GenomeLocParser.parseGenomeLoc("chr2");
-        GenomeLoc chr3 = hg18GenomeLocParser.parseGenomeLoc("chr3");
-
-        List<File> files = testFiles("contig_basic.", 3, ".intervals");
-
-        IntervalUtils.scatterContigIntervals(hg18Header, getLocs("chr1", "chr2", "chr3"), files);
-
-        List<GenomeLoc> locs1 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(0).toString()));
-        List<GenomeLoc> locs2 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(1).toString()));
-        List<GenomeLoc> locs3 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(2).toString()));
-
-        Assert.assertEquals(locs1.size(), 1);
-        Assert.assertEquals(locs2.size(), 1);
-        Assert.assertEquals(locs3.size(), 1);
-
-        Assert.assertEquals(locs1.get(0), chr1);
-        Assert.assertEquals(locs2.get(0), chr2);
-        Assert.assertEquals(locs3.get(0), chr3);
-    }
-
-    @Test
-    public void testScatterContigIntervalsLessFiles() {
-        GenomeLoc chr1 = hg18GenomeLocParser.parseGenomeLoc("chr1");
-        GenomeLoc chr2 = hg18GenomeLocParser.parseGenomeLoc("chr2");
-        GenomeLoc chr3 = hg18GenomeLocParser.parseGenomeLoc("chr3");
-        GenomeLoc chr4 = hg18GenomeLocParser.parseGenomeLoc("chr4");
-
-        List<File> files = testFiles("contig_less.", 3, ".intervals");
-
-        IntervalUtils.scatterContigIntervals(hg18Header, getLocs("chr1", "chr2", "chr3", "chr4"), files);
-
-        List<GenomeLoc> locs1 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(0).toString()));
-        List<GenomeLoc> locs2 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(1).toString()));
-        List<GenomeLoc> locs3 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(2).toString()));
-
-        Assert.assertEquals(locs1.size(), 2);
-        Assert.assertEquals(locs2.size(), 1);
-        Assert.assertEquals(locs3.size(), 1);
-
-        Assert.assertEquals(locs1.get(0), chr1);
-        Assert.assertEquals(locs1.get(1), chr2);
-        Assert.assertEquals(locs2.get(0), chr3);
-        Assert.assertEquals(locs3.get(0), chr4);
-    }
-
-    @Test(expectedExceptions=UserException.BadInput.class)
-    public void testScatterContigIntervalsMoreFiles() {
-        List<File> files = testFiles("contig_more.", 3, ".intervals");
-        IntervalUtils.scatterContigIntervals(hg18Header, getLocs("chr1", "chr2"), files);
-    }
-
-    @Test
-    public void testScatterContigIntervalsStart() {
-        List<String> intervals = Arrays.asList("chr1:1-2", "chr1:4-5", "chr2:1-1", "chr3:2-2");
-        GenomeLoc chr1a = hg18GenomeLocParser.parseGenomeLoc("chr1:1-2");
-        GenomeLoc chr1b = hg18GenomeLocParser.parseGenomeLoc("chr1:4-5");
-        GenomeLoc chr2 = hg18GenomeLocParser.parseGenomeLoc("chr2:1-1");
-        GenomeLoc chr3 = hg18GenomeLocParser.parseGenomeLoc("chr3:2-2");
-
-        List<File> files = testFiles("contig_split_start.", 3, ".intervals");
-
-        IntervalUtils.scatterContigIntervals(hg18Header, getLocs(intervals), files);
-
-        List<GenomeLoc> locs1 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(0).toString()));
-        List<GenomeLoc> locs2 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(1).toString()));
-        List<GenomeLoc> locs3 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(2).toString()));
-
-        Assert.assertEquals(locs1.size(), 2);
-        Assert.assertEquals(locs2.size(), 1);
-        Assert.assertEquals(locs3.size(), 1);
-
-        Assert.assertEquals(locs1.get(0), chr1a);
-        Assert.assertEquals(locs1.get(1), chr1b);
-        Assert.assertEquals(locs2.get(0), chr2);
-        Assert.assertEquals(locs3.get(0), chr3);
-    }
-
-    @Test
-    public void testScatterContigIntervalsMiddle() {
-        List<String> intervals = Arrays.asList("chr1:1-1", "chr2:1-2", "chr2:4-5", "chr3:2-2");
-        GenomeLoc chr1 = hg18GenomeLocParser.parseGenomeLoc("chr1:1-1");
-        GenomeLoc chr2a = hg18GenomeLocParser.parseGenomeLoc("chr2:1-2");
-        GenomeLoc chr2b = hg18GenomeLocParser.parseGenomeLoc("chr2:4-5");
-        GenomeLoc chr3 = hg18GenomeLocParser.parseGenomeLoc("chr3:2-2");
-
-        List<File> files = testFiles("contig_split_middle.", 3, ".intervals");
-
-        IntervalUtils.scatterContigIntervals(hg18Header, getLocs(intervals), files);
-
-        List<GenomeLoc> locs1 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(0).toString()));
-        List<GenomeLoc> locs2 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(1).toString()));
-        List<GenomeLoc> locs3 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(2).toString()));
-
-        Assert.assertEquals(locs1.size(), 1);
-        Assert.assertEquals(locs2.size(), 2);
-        Assert.assertEquals(locs3.size(), 1);
-
-        Assert.assertEquals(locs1.get(0), chr1);
-        Assert.assertEquals(locs2.get(0), chr2a);
-        Assert.assertEquals(locs2.get(1), chr2b);
-        Assert.assertEquals(locs3.get(0), chr3);
-    }
-
-    @Test
-    public void testScatterContigIntervalsEnd() {
-        List<String> intervals = Arrays.asList("chr1:1-1", "chr2:2-2", "chr3:1-2", "chr3:4-5");
-        GenomeLoc chr1 = hg18GenomeLocParser.parseGenomeLoc("chr1:1-1");
-        GenomeLoc chr2 = hg18GenomeLocParser.parseGenomeLoc("chr2:2-2");
-        GenomeLoc chr3a = hg18GenomeLocParser.parseGenomeLoc("chr3:1-2");
-        GenomeLoc chr3b = hg18GenomeLocParser.parseGenomeLoc("chr3:4-5");
-
-        List<File> files = testFiles("contig_split_end.", 3 ,".intervals");
-
-        IntervalUtils.scatterContigIntervals(hg18Header, getLocs(intervals), files);
-
-        List<GenomeLoc> locs1 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(0).toString()));
-        List<GenomeLoc> locs2 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(1).toString()));
-        List<GenomeLoc> locs3 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(2).toString()));
-
-        Assert.assertEquals(locs1.size(), 1);
-        Assert.assertEquals(locs2.size(), 1);
-        Assert.assertEquals(locs3.size(), 2);
-
-        Assert.assertEquals(locs1.get(0), chr1);
-        Assert.assertEquals(locs2.get(0), chr2);
-        Assert.assertEquals(locs3.get(0), chr3a);
-        Assert.assertEquals(locs3.get(1), chr3b);
-    }
-
-    @Test
-    public void testScatterContigIntervalsMax() {
-        List<File> files = testFiles("sg.", 85, ".intervals");
-        IntervalUtils.scatterContigIntervals(hg19Header, hg19ReferenceLocs, files);
-
-        for (int i = 0; i < files.size(); i++) {
-            String file = files.get(i).toString();
-            List<GenomeLoc> parsedLocs = IntervalUtils.parseIntervalArguments(hg19GenomeLocParser, Arrays.asList(file));
-            Assert.assertEquals(parsedLocs.size(), 1, "parsedLocs[" + i + "].size()");
-            Assert.assertEquals(parsedLocs.get(0), hg19ReferenceLocs.get(i), "parsedLocs[" + i + "].get()");
-        }
-    }
-
-    private List<File> testFiles(String prefix, int count, String suffix) {
-        ArrayList<File> files = new ArrayList<File>();
-        for (int i = 1; i <= count; i++) {
-            files.add(createTempFile(prefix + i, suffix));
-        }
-        return files;
-    }
-
-    @DataProvider(name="unmergedIntervals")
-    public Object[][] getUnmergedIntervals() {
-        return new Object[][] {
-                new Object[] {"small_unmerged_picard_intervals.list"},
-                new Object[] {"small_unmerged_gatk_intervals.list"}
-        };
-    }
-
-    @Test(dataProvider="unmergedIntervals")
-    public void testUnmergedIntervals(String unmergedIntervals) {
-        List<GenomeLoc> locs = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Collections.singletonList(privateTestDir + unmergedIntervals));
-        Assert.assertEquals(locs.size(), 2);
-
-        List<GenomeLoc> merged;
-
-        merged = IntervalUtils.mergeIntervalLocations(locs, IntervalMergingRule.ALL);
-        Assert.assertEquals(merged.size(), 1);
-
-        // Test that null means the same as ALL
-        merged = IntervalUtils.mergeIntervalLocations(locs, null);
-        Assert.assertEquals(merged.size(), 1);
-    }
-
-    /*
-    Split into tests that can be written to files and tested by writeFlankingIntervals,
-    and lists that cannot but are still handled by getFlankingIntervals.
-    */
-    private static abstract class FlankingIntervalsTestData extends TestDataProvider {
-        final public File referenceFile;
-        final public GenomeLocParser parser;
-        final int basePairs;
-        final List<GenomeLoc> original;
-        final List<GenomeLoc> expected;
-
-        protected FlankingIntervalsTestData(Class<?> clazz, String name, File referenceFile, GenomeLocParser parser,
-                                          int basePairs, List<String> original, List<String> expected) {
-            super(clazz, name);
-            this.referenceFile = referenceFile;
-            this.parser = parser;
-            this.basePairs = basePairs;
-            this.original = parse(parser, original);
-            this.expected = parse(parser, expected);
-        }
-
-        private static List<GenomeLoc> parse(GenomeLocParser parser, List<String> locs) {
-            List<GenomeLoc> parsed = new ArrayList<GenomeLoc>();
-            for (String loc: locs)
-                parsed.add("unmapped".equals(loc) ? GenomeLoc.UNMAPPED : parser.parseGenomeLoc(loc));
-            return parsed;
-        }
-    }
-
-    private static class FlankingIntervalsFile extends FlankingIntervalsTestData {
-        public FlankingIntervalsFile(String name, File referenceFile, GenomeLocParser parser,
-                                     int basePairs, List<String> original, List<String> expected) {
-            super(FlankingIntervalsFile.class, name, referenceFile, parser, basePairs, original, expected);
-        }
-    }
-
-    private static class FlankingIntervalsList extends FlankingIntervalsTestData {
-        public FlankingIntervalsList(String name, File referenceFile, GenomeLocParser parser,
-                                     int basePairs, List<String> original, List<String> expected) {
-            super(FlankingIntervalsList.class, name, referenceFile, parser, basePairs, original, expected);
-        }
-    }
-
-    /* Intervals where the original and the flanks can be written to files. */
-    @DataProvider(name = "flankingIntervalsFiles")
-    public Object[][] getFlankingIntervalsFiles() {
-        File hg19ReferenceFile = new File(BaseTest.hg19Reference);
-        int hg19Length1 = hg19GenomeLocParser.getContigInfo("1").getSequenceLength();
-
-        new FlankingIntervalsFile("atStartBase1", hg19ReferenceFile, hg19GenomeLocParser, 1,
-                Arrays.asList("1:1"),
-                Arrays.asList("1:2"));
-
-        new FlankingIntervalsFile("atStartBase50", hg19ReferenceFile, hg19GenomeLocParser, 50,
-                Arrays.asList("1:1"),
-                Arrays.asList("1:2-51"));
-
-        new FlankingIntervalsFile("atStartRange50", hg19ReferenceFile, hg19GenomeLocParser, 50,
-                Arrays.asList("1:1-10"),
-                Arrays.asList("1:11-60"));
-
-        new FlankingIntervalsFile("atEndBase1", hg19ReferenceFile, hg19GenomeLocParser, 1,
-                Arrays.asList("1:" + hg19Length1),
-                Arrays.asList("1:" + (hg19Length1 - 1)));
-
-        new FlankingIntervalsFile("atEndBase50", hg19ReferenceFile, hg19GenomeLocParser, 50,
-                Arrays.asList("1:" + hg19Length1),
-                Arrays.asList(String.format("1:%d-%d", hg19Length1 - 50, hg19Length1 - 1)));
-
-        new FlankingIntervalsFile("atEndRange50", hg19ReferenceFile, hg19GenomeLocParser, 50,
-                Arrays.asList(String.format("1:%d-%d", hg19Length1 - 10, hg19Length1)),
-                Arrays.asList(String.format("1:%d-%d", hg19Length1 - 60, hg19Length1 - 11)));
-
-        new FlankingIntervalsFile("nearStartBase1", hg19ReferenceFile, hg19GenomeLocParser, 1,
-                Arrays.asList("1:2"),
-                Arrays.asList("1:1", "1:3"));
-
-        new FlankingIntervalsFile("nearStartRange50", hg19ReferenceFile, hg19GenomeLocParser, 50,
-                Arrays.asList("1:21-30"),
-                Arrays.asList("1:1-20", "1:31-80"));
-
-        new FlankingIntervalsFile("nearEndBase1", hg19ReferenceFile, hg19GenomeLocParser, 1,
-                Arrays.asList("1:" + (hg19Length1 - 1)),
-                Arrays.asList("1:" + (hg19Length1 - 2), "1:" + hg19Length1));
-
-        new FlankingIntervalsFile("nearEndRange50", hg19ReferenceFile, hg19GenomeLocParser, 50,
-                Arrays.asList(String.format("1:%d-%d", hg19Length1 - 30, hg19Length1 - 21)),
-                Arrays.asList(
-                        String.format("1:%d-%d", hg19Length1 - 80, hg19Length1 - 31),
-                        String.format("1:%d-%d", hg19Length1 - 20, hg19Length1)));
-
-        new FlankingIntervalsFile("beyondStartBase1", hg19ReferenceFile, hg19GenomeLocParser, 1,
-                Arrays.asList("1:3"),
-                Arrays.asList("1:2", "1:4"));
-
-        new FlankingIntervalsFile("beyondStartRange50", hg19ReferenceFile, hg19GenomeLocParser, 50,
-                Arrays.asList("1:101-200"),
-                Arrays.asList("1:51-100", "1:201-250"));
-
-        new FlankingIntervalsFile("beyondEndBase1", hg19ReferenceFile, hg19GenomeLocParser, 1,
-                Arrays.asList("1:" + (hg19Length1 - 3)),
-                Arrays.asList("1:" + (hg19Length1 - 4), "1:" + (hg19Length1 - 2)));
-
-        new FlankingIntervalsFile("beyondEndRange50", hg19ReferenceFile, hg19GenomeLocParser, 50,
-                Arrays.asList(String.format("1:%d-%d", hg19Length1 - 200, hg19Length1 - 101)),
-                Arrays.asList(
-                        String.format("1:%d-%d", hg19Length1 - 250, hg19Length1 - 201),
-                        String.format("1:%d-%d", hg19Length1 - 100, hg19Length1 - 51)));
-
-        new FlankingIntervalsFile("betweenFar50", hg19ReferenceFile, hg19GenomeLocParser, 50,
-                Arrays.asList("1:101-200", "1:401-500"),
-                Arrays.asList("1:51-100", "1:201-250", "1:351-400", "1:501-550"));
-
-        new FlankingIntervalsFile("betweenSpan50", hg19ReferenceFile, hg19GenomeLocParser, 50,
-                Arrays.asList("1:101-200", "1:301-400"),
-                Arrays.asList("1:51-100", "1:201-300", "1:401-450"));
-
-        new FlankingIntervalsFile("betweenOverlap50", hg19ReferenceFile, hg19GenomeLocParser, 50,
-                Arrays.asList("1:101-200", "1:271-400"),
-                Arrays.asList("1:51-100", "1:201-270", "1:401-450"));
-
-        new FlankingIntervalsFile("betweenShort50", hg19ReferenceFile, hg19GenomeLocParser, 50,
-                Arrays.asList("1:101-200", "1:221-400"),
-                Arrays.asList("1:51-100", "1:201-220", "1:401-450"));
-
-        new FlankingIntervalsFile("betweenNone50", hg19ReferenceFile, hg19GenomeLocParser, 50,
-                Arrays.asList("1:101-200", "1:121-400"),
-                Arrays.asList("1:51-100", "1:401-450"));
-
-        new FlankingIntervalsFile("twoContigs", hg19ReferenceFile, hg19GenomeLocParser, 50,
-                Arrays.asList("1:101-200", "2:301-400"),
-                Arrays.asList("1:51-100", "1:201-250", "2:251-300", "2:401-450"));
-
-        // Explicit testing a problematic agilent target pair
-        new FlankingIntervalsFile("badAgilent", hg19ReferenceFile, hg19GenomeLocParser, 50,
-                Arrays.asList("2:74756257-74756411", "2:74756487-74756628"),
-                // wrong!    ("2:74756206-74756256", "2:74756412-74756462", "2:74756436-74756486", "2:74756629-74756679")
-                Arrays.asList("2:74756207-74756256", "2:74756412-74756486", "2:74756629-74756678"));
-
-        return TestDataProvider.getTests(FlankingIntervalsFile.class);
-    }
-
-    /* Intervals where either the original and/or the flanks cannot be written to a file. */
-    @DataProvider(name = "flankingIntervalsLists")
-    public Object[][] getFlankingIntervalsLists() {
-        File hg19ReferenceFile = new File(BaseTest.hg19Reference);
-        List<String> empty = Collections.emptyList();
-
-        new FlankingIntervalsList("empty", hg19ReferenceFile, hg19GenomeLocParser, 50,
-                empty,
-                empty);
-
-        new FlankingIntervalsList("unmapped", hg19ReferenceFile, hg19GenomeLocParser, 50,
-                Arrays.asList("unmapped"),
-                empty);
-
-        new FlankingIntervalsList("fullContig", hg19ReferenceFile, hg19GenomeLocParser, 50,
-                Arrays.asList("1"),
-                empty);
-
-        new FlankingIntervalsList("fullContigs", hg19ReferenceFile, hg19GenomeLocParser, 50,
-                Arrays.asList("1", "2", "3"),
-                empty);
-
-        new FlankingIntervalsList("betweenWithUnmapped", hg19ReferenceFile, hg19GenomeLocParser, 50,
-                Arrays.asList("1:101-200", "1:301-400", "unmapped"),
-                Arrays.asList("1:51-100", "1:201-300", "1:401-450"));
-
-        return TestDataProvider.getTests(FlankingIntervalsList.class);
-    }
-
-    @Test(dataProvider = "flankingIntervalsFiles")
-    public void testWriteFlankingIntervals(FlankingIntervalsTestData data) throws Exception {
-        File originalFile = createTempFile("original.", ".intervals");
-        File flankingFile = createTempFile("flanking.", ".intervals");
-        try {
-            List<String> lines = new ArrayList<String>();
-            for (GenomeLoc loc: data.original)
-                lines.add(loc.toString());
-            FileUtils.writeLines(originalFile, lines);
-
-            IntervalUtils.writeFlankingIntervals(data.referenceFile, originalFile, flankingFile, data.basePairs);
-
-            List<GenomeLoc> actual = IntervalUtils.intervalFileToList(data.parser, flankingFile.getAbsolutePath());
-
-            String description = String.format("%n      name: %s%n  original: %s%n    actual: %s%n  expected: %s%n",
-                    data.toString(), data.original, actual, data.expected);
-            Assert.assertEquals(actual, data.expected, description);
-        } finally {
-            FileUtils.deleteQuietly(originalFile);
-            FileUtils.deleteQuietly(flankingFile);
-        }
-    }
-
-    @Test(dataProvider = "flankingIntervalsLists", expectedExceptions = UserException.class)
-    public void testWritingBadFlankingIntervals(FlankingIntervalsTestData data) throws Exception {
-        File originalFile = createTempFile("original.", ".intervals");
-        File flankingFile = createTempFile("flanking.", ".intervals");
-        try {
-            List<String> lines = new ArrayList<String>();
-            for (GenomeLoc loc: data.original)
-                lines.add(loc.toString());
-            FileUtils.writeLines(originalFile, lines);
-
-            // Should throw a user exception on bad input if either the original
-            // intervals are empty or if the flanking intervals are empty
-            IntervalUtils.writeFlankingIntervals(data.referenceFile, originalFile, flankingFile, data.basePairs);
-        } finally {
-            FileUtils.deleteQuietly(originalFile);
-            FileUtils.deleteQuietly(flankingFile);
-        }
-    }
-
-    @Test(dataProvider = "flankingIntervalsLists")
-    public void testGetFlankingIntervals(FlankingIntervalsTestData data) {
-        List<GenomeLoc> actual = IntervalUtils.getFlankingIntervals(data.parser, data.original, data.basePairs);
-        String description = String.format("%n      name: %s%n  original: %s%n    actual: %s%n  expected: %s%n",
-                data.toString(), data.original, actual, data.expected);
-        Assert.assertEquals(actual, data.expected, description);
-    }
-
-    @Test(expectedExceptions=UserException.BadArgumentValue.class)
-    public void testExceptionUponLegacyIntervalSyntax() throws Exception {
-        GenomeAnalysisEngine toolkit = new GenomeAnalysisEngine();
-        toolkit.setGenomeLocParser(new GenomeLocParser(new CachingIndexedFastaSequenceFile(new File(BaseTest.hg19Reference))));
-
-        // Attempting to use the legacy -L "interval1;interval2" syntax should produce an exception:
-        IntervalBinding<Feature> binding = new IntervalBinding<Feature>("1;2");
-        binding.getIntervals(toolkit);
-    }
-
-    @DataProvider(name="invalidIntervalTestData")
-    public Object[][] invalidIntervalDataProvider() throws Exception {
-        GATKArgumentCollection argCollection = new GATKArgumentCollection();
-        File fastaFile = new File(publicTestDir + "exampleFASTA.fasta");
-        GenomeLocParser genomeLocParser = new GenomeLocParser(new IndexedFastaSequenceFile(fastaFile));
-
-        return new Object[][] {
-                new Object[] {argCollection, genomeLocParser, "chr1", 10000000, 20000000},
-                new Object[] {argCollection, genomeLocParser, "chr2", 1, 2},
-                new Object[] {argCollection, genomeLocParser, "chr1", -1, 50}
-        };
-    }
-
-    @Test(dataProvider="invalidIntervalTestData")
-    public void testInvalidPicardIntervalHandling(GATKArgumentCollection argCollection, GenomeLocParser genomeLocParser,
-                                                  String contig, int intervalStart, int intervalEnd ) throws Exception {
-
-        SAMFileHeader picardFileHeader = new SAMFileHeader();
-        picardFileHeader.addSequence(genomeLocParser.getContigInfo("chr1"));
-        IntervalList picardIntervals = new IntervalList(picardFileHeader);
-        picardIntervals.add(new Interval(contig, intervalStart, intervalEnd, true, "dummyname"));
-
-        File picardIntervalFile = createTempFile("testInvalidPicardIntervalHandling", ".intervals");
-        picardIntervals.write(picardIntervalFile);
-
-        List<IntervalBinding<Feature>> intervalArgs = new ArrayList<IntervalBinding<Feature>>(1);
-        intervalArgs.add(new IntervalBinding<Feature>(picardIntervalFile.getAbsolutePath()));
-
-        IntervalUtils.loadIntervals(intervalArgs, argCollection.intervalArguments.intervalSetRule, argCollection.intervalArguments.intervalMerging, argCollection.intervalArguments.intervalPadding, genomeLocParser);
-    }
-
-    @Test(expectedExceptions=UserException.class, dataProvider="invalidIntervalTestData")
-    public void testInvalidGATKFileIntervalHandling(GATKArgumentCollection argCollection, GenomeLocParser genomeLocParser,
-                                                    String contig, int intervalStart, int intervalEnd ) throws Exception {
-
-        File gatkIntervalFile = createTempFile("testInvalidGATKFileIntervalHandling", ".intervals",
-                String.format("%s:%d-%d", contig, intervalStart, intervalEnd));
-
-        List<IntervalBinding<Feature>> intervalArgs = new ArrayList<IntervalBinding<Feature>>(1);
-        intervalArgs.add(new IntervalBinding<Feature>(gatkIntervalFile.getAbsolutePath()));
-
-        IntervalUtils.loadIntervals(intervalArgs, argCollection.intervalArguments.intervalSetRule, argCollection.intervalArguments.intervalMerging, argCollection.intervalArguments.intervalPadding, genomeLocParser);
-    }
-
-    private File createTempFile( String tempFilePrefix, String tempFileExtension, String... lines ) throws Exception {
-        File tempFile = BaseTest.createTempFile(tempFilePrefix, tempFileExtension);
-        FileUtils.writeLines(tempFile, Arrays.asList(lines));
-        return tempFile;
-    }
-
-    @DataProvider(name = "sortAndMergeIntervals")
-    public Object[][] getSortAndMergeIntervals() {
-        return new Object[][] {
-                new Object[] { IntervalMergingRule.OVERLAPPING_ONLY, getLocs("chr1:1", "chr1:3", "chr1:2"), getLocs("chr1:1", "chr1:2", "chr1:3") },
-                new Object[] { IntervalMergingRule.ALL, getLocs("chr1:1", "chr1:3", "chr1:2"), getLocs("chr1:1-3") },
-                new Object[] { IntervalMergingRule.OVERLAPPING_ONLY, getLocs("chr1:1", "chr1:3", "chr2:2"), getLocs("chr1:1", "chr1:3", "chr2:2") },
-                new Object[] { IntervalMergingRule.ALL, getLocs("chr1:1", "chr1:3", "chr2:2"), getLocs("chr1:1", "chr1:3", "chr2:2") },
-                new Object[] { IntervalMergingRule.OVERLAPPING_ONLY, getLocs("chr1:1", "chr1"), getLocs("chr1") },
-                new Object[] { IntervalMergingRule.ALL, getLocs("chr1:1", "chr1"), getLocs("chr1") }
-        };
-    }
-
-    @Test(dataProvider = "sortAndMergeIntervals")
-    public void testSortAndMergeIntervals(IntervalMergingRule merge, List<GenomeLoc> unsorted, List<GenomeLoc> expected) {
-        List<GenomeLoc> sorted = IntervalUtils.sortAndMergeIntervals(hg18GenomeLocParser, unsorted, merge).toList();
-        Assert.assertEquals(sorted, expected);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/io/IOUtilsUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/io/IOUtilsUnitTest.java
deleted file mode 100644
index 13a2e8a..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/io/IOUtilsUnitTest.java
+++ /dev/null
@@ -1,326 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.io;
-
-import org.apache.commons.io.FileUtils;
-import org.broadinstitute.gatk.utils.BaseTest;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Random;
-
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-public class IOUtilsUnitTest extends BaseTest {
-  @Test
-  public void testGoodTempDir() {
-    IOUtils.checkTempDir(new File("/tmp/queue"));
-  }
-
-  @Test(expectedExceptions=UserException.BadTmpDir.class)
-  public void testBadTempDir() {
-    IOUtils.checkTempDir(new File("/tmp"));
-  }
-
-  @Test
-  public void testAbsoluteSubDir() {
-    File subDir = IOUtils.absolute(new File("."), new File("/path/to/file"));
-    Assert.assertEquals(subDir, new File("/path/to/file"));
-
-    subDir = IOUtils.absolute(new File("/different/path"), new File("/path/to/file"));
-    Assert.assertEquals(subDir, new File("/path/to/file"));
-
-    subDir = IOUtils.absolute(new File("/different/path"), new File("."));
-    Assert.assertEquals(subDir, new File("/different/path"));
-  }
-
-  @Test
-  public void testRelativeSubDir() throws IOException {
-    File subDir = IOUtils.absolute(new File("."), new File("path/to/file"));
-    Assert.assertEquals(subDir.getCanonicalFile(), new File("path/to/file").getCanonicalFile());
-
-    subDir = IOUtils.absolute(new File("/different/path"), new File("path/to/file"));
-    Assert.assertEquals(subDir, new File("/different/path/path/to/file"));
-  }
-
-  @Test
-  public void testDottedSubDir() throws IOException {
-    File subDir = IOUtils.absolute(new File("."), new File("path/../to/file"));
-    Assert.assertEquals(subDir.getCanonicalFile(), new File("path/../to/./file").getCanonicalFile());
-
-    subDir = IOUtils.absolute(new File("."), new File("/path/../to/file"));
-    Assert.assertEquals(subDir, new File("/path/../to/file"));
-
-    subDir = IOUtils.absolute(new File("/different/../path"), new File("path/to/file"));
-    Assert.assertEquals(subDir, new File("/different/../path/path/to/file"));
-
-    subDir = IOUtils.absolute(new File("/different/./path"), new File("/path/../to/file"));
-    Assert.assertEquals(subDir, new File("/path/../to/file"));
-  }
-
-  @Test
-  public void testTempDir() {
-    File tempDir = IOUtils.tempDir("Q-Unit-Test", "", new File("queueTempDirToDelete"));
-    Assert.assertTrue(tempDir.exists());
-    Assert.assertFalse(tempDir.isFile());
-    Assert.assertTrue(tempDir.isDirectory());
-    boolean deleted = IOUtils.tryDelete(tempDir);
-    Assert.assertTrue(deleted);
-    Assert.assertFalse(tempDir.exists());
-  }
-
-  @Test
-  public void testDirLevel() {
-    File dir = IOUtils.dirLevel(new File("/path/to/directory"), 1);
-    Assert.assertEquals(dir, new File("/path"));
-
-    dir = IOUtils.dirLevel(new File("/path/to/directory"), 2);
-    Assert.assertEquals(dir, new File("/path/to"));
-
-    dir = IOUtils.dirLevel(new File("/path/to/directory"), 3);
-    Assert.assertEquals(dir, new File("/path/to/directory"));
-
-    dir = IOUtils.dirLevel(new File("/path/to/directory"), 4);
-    Assert.assertEquals(dir, new File("/path/to/directory"));
-  }
-
-  @Test
-  public void testAbsolute() {
-    File dir = IOUtils.absolute(new File("/path/./to/./directory/."));
-    Assert.assertEquals(dir, new File("/path/to/directory"));
-
-    dir = IOUtils.absolute(new File("/"));
-    Assert.assertEquals(dir, new File("/"));
-
-    dir = IOUtils.absolute(new File("/."));
-    Assert.assertEquals(dir, new File("/"));
-
-    dir = IOUtils.absolute(new File("/././."));
-    Assert.assertEquals(dir, new File("/"));
-
-    dir = IOUtils.absolute(new File("/./directory/."));
-    Assert.assertEquals(dir, new File("/directory"));
-
-    dir = IOUtils.absolute(new File("/./directory/./"));
-    Assert.assertEquals(dir, new File("/directory"));
-
-    dir = IOUtils.absolute(new File("/./directory./"));
-    Assert.assertEquals(dir, new File("/directory."));
-
-    dir = IOUtils.absolute(new File("/./.directory/"));
-    Assert.assertEquals(dir, new File("/.directory"));
-  }
-
-  @Test
-  public void testTail() throws IOException {
-    List<String> lines = Arrays.asList(
-            "chr18_random	4262	3154410390	50	51",
-            "chr19_random	301858	3154414752	50	51",
-            "chr21_random	1679693	3154722662	50	51",
-            "chr22_random	257318	3156435963	50	51",
-            "chrX_random	1719168	3156698441	50	51");
-    List<String> tail = IOUtils.tail(new File(BaseTest.hg18Reference + ".fai"), 5);
-    Assert.assertEquals(tail.size(), 5);
-    for (int i = 0; i < 5; i++)
-      Assert.assertEquals(tail.get(i), lines.get(i));
-  }
-
-    @Test
-    public void testWriteSystemFile() throws IOException {
-        File temp = createTempFile("temp.", ".properties");
-        try {
-            IOUtils.writeResource(new Resource("testProperties.properties", null), temp);
-        } finally {
-            FileUtils.deleteQuietly(temp);
-        }
-    }
-
-    @Test
-    public void testWriteSystemTempFile() throws IOException {
-        File temp = IOUtils.writeTempResource(new Resource("testProperties.properties", null));
-        try {
-            Assert.assertTrue(temp.getName().startsWith("testProperties"), "File does not start with 'testProperties.': " + temp);
-            Assert.assertTrue(temp.getName().endsWith(".properties"), "File does not end with '.properties': " + temp);
-        } finally {
-            FileUtils.deleteQuietly(temp);
-        }
-    }
-
-    @Test(expectedExceptions = IllegalArgumentException.class)
-    public void testMissingSystemFile() throws IOException {
-        File temp = createTempFile("temp.", ".properties");
-        try {
-            IOUtils.writeResource(new Resource("MissingStingText.properties", null), temp);
-        } finally {
-            FileUtils.deleteQuietly(temp);
-        }
-    }
-
-    @Test
-    public void testWriteRelativeFile() throws IOException {
-        File temp = createTempFile("temp.", ".properties");
-        try {
-            IOUtils.writeResource(new Resource("/testProperties.properties", IOUtils.class), temp);
-        } finally {
-            FileUtils.deleteQuietly(temp);
-        }
-    }
-
-    @Test
-    public void testWriteRelativeTempFile() throws IOException {
-        File temp = IOUtils.writeTempResource(new Resource("/testProperties.properties", IOUtils.class));
-        try {
-            Assert.assertTrue(temp.getName().startsWith("testProperties"), "File does not start with 'testProperties.': " + temp);
-            Assert.assertTrue(temp.getName().endsWith(".properties"), "File does not end with '.properties': " + temp);
-        } finally {
-            FileUtils.deleteQuietly(temp);
-        }
-    }
-
-    @Test(expectedExceptions = IllegalArgumentException.class)
-    public void testMissingRelativeFile() throws IOException {
-        File temp = createTempFile("temp.", ".properties");
-        try {
-            // Looking for /org/broadinstitute/gatk/utils/file/GATKText.properties
-            IOUtils.writeResource(new Resource("GATKText.properties", IOUtils.class), temp);
-        } finally {
-            FileUtils.deleteQuietly(temp);
-        }
-    }
-
-    @Test
-    public void testResourceProperties() {
-        Resource resource = new Resource("foo", Resource.class);
-        Assert.assertEquals(resource.getPath(), "foo");
-        Assert.assertEquals(resource.getRelativeClass(), Resource.class);
-    }
-
-    @Test
-    public void testIsSpecialFile() {
-        Assert.assertTrue(IOUtils.isSpecialFile(new File("/dev")));
-        Assert.assertTrue(IOUtils.isSpecialFile(new File("/dev/null")));
-        Assert.assertTrue(IOUtils.isSpecialFile(new File("/dev/full")));
-        Assert.assertTrue(IOUtils.isSpecialFile(new File("/dev/stdout")));
-        Assert.assertTrue(IOUtils.isSpecialFile(new File("/dev/stderr")));
-        Assert.assertFalse(IOUtils.isSpecialFile(null));
-        Assert.assertFalse(IOUtils.isSpecialFile(new File("/home/user/my.file")));
-        Assert.assertFalse(IOUtils.isSpecialFile(new File("/devfake/null")));
-    }
-
-    @DataProvider( name = "ByteArrayIOTestData")
-    public Object[][] byteArrayIOTestDataProvider() {
-        return new Object[][] {
-            // file size, read buffer size
-            { 0,     4096 },
-            { 1,     4096 },
-            { 2000,  4096 },
-            { 4095,  4096 },
-            { 4096,  4096 },
-            { 4097,  4096 },
-            { 6000,  4096 },
-            { 8191,  4096 },
-            { 8192,  4096 },
-            { 8193,  4096 },
-            { 10000, 4096 }
-        };
-    }
-
-    @Test( dataProvider = "ByteArrayIOTestData" )
-    public void testWriteThenReadFileIntoByteArray ( int fileSize, int readBufferSize ) throws Exception {
-        File tempFile = createTempFile(String.format("testWriteThenReadFileIntoByteArray_%d_%d", fileSize, readBufferSize), "tmp");
-
-        byte[] dataWritten = getDeterministicRandomData(fileSize);
-        IOUtils.writeByteArrayToFile(dataWritten, tempFile);
-        byte[] dataRead = IOUtils.readFileIntoByteArray(tempFile, readBufferSize);
-
-        Assert.assertEquals(dataRead.length, dataWritten.length);
-        Assert.assertTrue(Arrays.equals(dataRead, dataWritten));
-    }
-
-    @Test( dataProvider = "ByteArrayIOTestData" )
-    public void testWriteThenReadStreamIntoByteArray ( int fileSize, int readBufferSize ) throws Exception {
-        File tempFile = createTempFile(String.format("testWriteThenReadStreamIntoByteArray_%d_%d", fileSize, readBufferSize), "tmp");
-
-        byte[] dataWritten = getDeterministicRandomData(fileSize);
-        IOUtils.writeByteArrayToStream(dataWritten, new FileOutputStream(tempFile));
-        byte[] dataRead = IOUtils.readStreamIntoByteArray(new FileInputStream(tempFile), readBufferSize);
-
-        Assert.assertEquals(dataRead.length, dataWritten.length);
-        Assert.assertTrue(Arrays.equals(dataRead, dataWritten));
-    }
-
-    @Test( expectedExceptions = UserException.CouldNotReadInputFile.class )
-    public void testReadNonExistentFileIntoByteArray() {
-        File nonExistentFile = new File("djfhsdkjghdfk");
-        Assert.assertFalse(nonExistentFile.exists());
-
-        IOUtils.readFileIntoByteArray(nonExistentFile);
-    }
-
-    @Test( expectedExceptions = ReviewedGATKException.class )
-    public void testReadNullStreamIntoByteArray() {
-        IOUtils.readStreamIntoByteArray(null);
-    }
-
-    @Test( expectedExceptions = ReviewedGATKException.class )
-    public void testReadStreamIntoByteArrayInvalidBufferSize() throws Exception {
-        IOUtils.readStreamIntoByteArray(new FileInputStream(createTempFile("testReadStreamIntoByteArrayInvalidBufferSize", "tmp")),
-                                        -1);
-    }
-
-    @Test( expectedExceptions = UserException.CouldNotCreateOutputFile.class )
-    public void testWriteByteArrayToUncreatableFile() {
-        IOUtils.writeByteArrayToFile(new byte[]{0}, new File("/dev/foo/bar"));
-    }
-
-    @Test( expectedExceptions = ReviewedGATKException.class )
-    public void testWriteNullByteArrayToFile() {
-        IOUtils.writeByteArrayToFile(null, createTempFile("testWriteNullByteArrayToFile", "tmp"));
-    }
-
-    @Test( expectedExceptions = ReviewedGATKException.class )
-    public void testWriteByteArrayToNullStream() {
-        IOUtils.writeByteArrayToStream(new byte[]{0}, null);
-    }
-
-    private byte[] getDeterministicRandomData ( int size ) {
-        GenomeAnalysisEngine.resetRandomGenerator();
-        Random rand = GenomeAnalysisEngine.getRandomGenerator();
-
-        byte[] randomData = new byte[size];
-        rand.nextBytes(randomData);
-
-        return randomData;
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/jna/clibrary/LibCUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/jna/clibrary/LibCUnitTest.java
deleted file mode 100644
index f695d89..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/jna/clibrary/LibCUnitTest.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.jna.clibrary;
-
-import com.sun.jna.NativeLong;
-import com.sun.jna.ptr.NativeLongByReference;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-public class LibCUnitTest extends BaseTest {
-
-    @Test
-    public void testEnvironment() {
-        String testProperty = "test_property";
-        String testValue = "value";
-        Assert.assertEquals(LibC.getenv(testProperty), null);
-        Assert.assertEquals(LibC.setenv(testProperty, testValue, 1), 0);
-        Assert.assertEquals(LibC.getenv(testProperty), testValue);
-        Assert.assertEquals(LibC.unsetenv(testProperty), 0);
-        Assert.assertEquals(LibC.getenv(testProperty), null);
-    }
-
-    @Test
-    public void testDifftime() throws Exception {
-        // Pointer to hold the times
-        NativeLongByReference ref = new NativeLongByReference();
-
-        // time() returns -1 on error.
-        NativeLong err = new NativeLong(-1L);
-
-        LibC.time(ref);
-        NativeLong time0 = ref.getValue();
-        Assert.assertNotSame(time0, err, "Time 0 returned an error (-1).");
-
-        Thread.sleep(5000L);
-
-        LibC.time(ref);
-        NativeLong time1 = ref.getValue();
-        Assert.assertNotSame(time1, err, "Time 1 returned an error (-1).");
-
-        Assert.assertNotSame(time1, time0, "Time 1 returned same time as Time 0.");
-
-        double diff = LibC.difftime(time1, time0);
-        Assert.assertTrue(diff >= 5, "Time difference was not greater than 5 seconds: " + diff);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/JnaSessionQueueTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/JnaSessionQueueTest.java
deleted file mode 100644
index e683f4b..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/JnaSessionQueueTest.java
+++ /dev/null
@@ -1,165 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.jna.drmaa.v1_0;
-
-import org.apache.commons.io.FileUtils;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.ggf.drmaa.*;
-import org.testng.Assert;
-import org.testng.SkipException;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.util.*;
-
-public class JnaSessionQueueTest extends BaseTest {
-    private String implementation = null;
-    private static final SessionFactory factory = new JnaSessionFactory();
-
-    @Test
-    public void testDrmaa() throws Exception {
-        Session session = factory.getSession();
-        Version version = session.getVersion();
-        System.out.println(String.format("DRMAA version: %d.%d", version.getMajor(), version.getMinor()));
-        System.out.println(String.format("DRMAA contact(s): %s", session.getContact()));
-        System.out.println(String.format("DRM system(s): %s", session.getDrmSystem()));
-        System.out.println(String.format("DRMAA implementation(s): %s", session.getDrmaaImplementation()));
-        this.implementation = session.getDrmaaImplementation();
-    }
-
-    @Test(dependsOnMethods = { "testDrmaa" })
-    public void testSubmitEcho() throws Exception {
-        if ( ! queueTestRunModeIsSet ) {
-            throw new SkipException("Skipping testSubmitEcho because we are in queue test dry run mode");
-        }
-
-        if (implementation.contains("LSF")) {
-            System.err.println("    ***********************************************************");
-            System.err.println("   *************************************************************");
-            System.err.println("   ****                                                     ****");
-            System.err.println("  ****  Skipping JnaSessionQueueTest.testSubmitEcho()        ****");
-            System.err.println("  ****      Are you using the dotkit .combined_LSF_SGE?      ****");
-            System.err.println("   ****                                                     ****");
-            System.err.println("   *************************************************************");
-            System.err.println("    ***********************************************************");
-            throw new SkipException("Skipping testSubmitEcho because correct DRMAA implementation not found");
-        }
-
-        File outFile = tryCreateNetworkTempFile("JnaSessionQueueTest.out");
-        Session session = factory.getSession();
-        session.init(null);
-        try {
-            JobTemplate template = session.createJobTemplate();
-            template.setRemoteCommand("sh");
-            template.setOutputPath(":" + outFile.getAbsolutePath());
-            template.setJoinFiles(true);
-            template.setArgs(Arrays.asList("-c", "echo \"Hello world.\""));
-
-            String jobId = session.runJob(template);
-            System.out.println(String.format("Job id %s", jobId));
-            session.deleteJobTemplate(template);
-
-            System.out.println("Waiting for job to run: " + jobId);
-            int remotePs = Session.QUEUED_ACTIVE;
-
-            List<Integer> runningStatuses = Arrays.asList(Session.QUEUED_ACTIVE, Session.RUNNING);
-
-            while (runningStatuses.contains(remotePs)) {
-                Thread.sleep(30 * 1000L);
-                remotePs = session.getJobProgramStatus(jobId);
-            }
-
-            Assert.assertEquals(remotePs, Session.DONE, "Job status is not DONE.");
-
-            JobInfo jobInfo = session.wait(jobId, Session.TIMEOUT_NO_WAIT);
-
-            Assert.assertTrue(jobInfo.hasExited(), String.format("Job did not exit cleanly: %s", jobId));
-            Assert.assertEquals(jobInfo.getExitStatus(), 0, String.format("Exit status for jobId %s is non-zero", jobId));
-            if (jobInfo.hasSignaled())
-                Assert.fail(String.format("JobId %s exited with signal %s and core dump flag %s", jobId, jobInfo.getTerminatingSignal(), jobInfo.hasCoreDump()));
-            Assert.assertFalse(jobInfo.wasAborted(), String.format("Job was aborted: %s", jobId));
-        } finally {
-            session.exit();
-        }
-
-        Assert.assertTrue(FileUtils.waitFor(outFile, 120), "File not found: " + outFile.getAbsolutePath());
-        System.out.println("--- output ---");
-        System.out.println(FileUtils.readFileToString(outFile));
-        System.out.println("--- output ---");
-        Assert.assertTrue(outFile.delete(), "Unable to delete " + outFile.getAbsolutePath());
-        System.out.println("Validating that we reached the end of the test without exit.");
-    }
-
-    @Test
-    public void testCollectionConversions() {
-        Collection<String> list = Arrays.asList("a=1", "foo=bar", "empty=");
-        Map<String, String> map = new LinkedHashMap<String, String>();
-        map.put("a", "1");
-        map.put("foo", "bar");
-        map.put("empty", "");
-
-        Assert.assertEquals(JnaSession.collectionToMap(list), map);
-        Assert.assertEquals(JnaSession.mapToCollection(map), list);
-    }
-
-    @Test
-    public void testLimitConversions() {
-        Assert.assertEquals(JnaSession.formatLimit(0), "0:00:00");
-        Assert.assertEquals(JnaSession.formatLimit(59), "0:00:59");
-        Assert.assertEquals(JnaSession.formatLimit(60), "0:01:00");
-        Assert.assertEquals(JnaSession.formatLimit(3540), "0:59:00");
-        Assert.assertEquals(JnaSession.formatLimit(3599), "0:59:59");
-        Assert.assertEquals(JnaSession.formatLimit(7200), "2:00:00");
-        Assert.assertEquals(JnaSession.formatLimit(7260), "2:01:00");
-        Assert.assertEquals(JnaSession.formatLimit(7261), "2:01:01");
-
-        Assert.assertEquals(JnaSession.parseLimit("0"), 0);
-        Assert.assertEquals(JnaSession.parseLimit("00"), 0);
-        Assert.assertEquals(JnaSession.parseLimit("0:00"), 0);
-        Assert.assertEquals(JnaSession.parseLimit("00:00"), 0);
-        Assert.assertEquals(JnaSession.parseLimit("0:00:00"), 0);
-
-        Assert.assertEquals(JnaSession.parseLimit("1"), 1);
-        Assert.assertEquals(JnaSession.parseLimit("01"), 1);
-        Assert.assertEquals(JnaSession.parseLimit("0:01"), 1);
-        Assert.assertEquals(JnaSession.parseLimit("00:01"), 1);
-        Assert.assertEquals(JnaSession.parseLimit("0:00:01"), 1);
-
-        Assert.assertEquals(JnaSession.parseLimit("10"), 10);
-        Assert.assertEquals(JnaSession.parseLimit("0:10"), 10);
-        Assert.assertEquals(JnaSession.parseLimit("00:10"), 10);
-        Assert.assertEquals(JnaSession.parseLimit("0:00:10"), 10);
-
-        Assert.assertEquals(JnaSession.parseLimit("1:0"), 60);
-        Assert.assertEquals(JnaSession.parseLimit("1:00"), 60);
-        Assert.assertEquals(JnaSession.parseLimit("01:00"), 60);
-        Assert.assertEquals(JnaSession.parseLimit("0:01:00"), 60);
-
-        Assert.assertEquals(JnaSession.parseLimit("1:00:00"), 3600);
-
-        Assert.assertEquals(JnaSession.parseLimit("1:02:03"), 3723);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/LibDrmaaQueueTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/LibDrmaaQueueTest.java
deleted file mode 100644
index accc0fe..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/LibDrmaaQueueTest.java
+++ /dev/null
@@ -1,257 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.jna.drmaa.v1_0;
-
-import com.sun.jna.Memory;
-import com.sun.jna.NativeLong;
-import com.sun.jna.Pointer;
-import com.sun.jna.StringArray;
-import com.sun.jna.ptr.IntByReference;
-import com.sun.jna.ptr.PointerByReference;
-import org.apache.commons.io.FileUtils;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.testng.Assert;
-import org.testng.SkipException;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.util.Arrays;
-import java.util.List;
-
-public class LibDrmaaQueueTest extends BaseTest {
-    private String implementation = null;
-
-    @Test
-    public void testDrmaa() throws Exception {
-        Memory error = new Memory(LibDrmaa.DRMAA_ERROR_STRING_BUFFER);
-        int errnum;
-
-        IntByReference major = new IntByReference();
-        IntByReference minor = new IntByReference();
-        Memory contact = new Memory(LibDrmaa.DRMAA_CONTACT_BUFFER);
-        Memory drmSystem = new Memory(LibDrmaa.DRMAA_DRM_SYSTEM_BUFFER);
-        Memory drmaaImplementation = new Memory(LibDrmaa.DRMAA_DRMAA_IMPLEMENTATION_BUFFER);
-
-        errnum = LibDrmaa.drmaa_version(major, minor, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
-
-        if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
-            Assert.fail(String.format("Could not get version from the DRMAA library: %s", error.getString(0)));
-
-        System.out.println(String.format("DRMAA version: %d.%d", major.getValue(), minor.getValue()));
-
-        errnum = LibDrmaa.drmaa_get_contact(contact, LibDrmaa.DRMAA_CONTACT_BUFFER_LEN, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
-
-        if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
-            Assert.fail(String.format("Could not get contacts from the DRMAA library: %s", error.getString(0)));
-
-        System.out.println(String.format("DRMAA contact(s): %s", contact.getString(0)));
-
-        errnum = LibDrmaa.drmaa_get_DRM_system(drmSystem, LibDrmaa.DRMAA_DRM_SYSTEM_BUFFER_LEN, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
-
-        if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
-            Assert.fail(String.format("Could not get DRM system from the DRMAA library: %s", error.getString(0)));
-
-        System.out.println(String.format("DRM system(s): %s", drmSystem.getString(0)));
-
-        errnum = LibDrmaa.drmaa_get_DRMAA_implementation(drmaaImplementation, LibDrmaa.DRMAA_DRMAA_IMPLEMENTATION_BUFFER_LEN, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
-
-        if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
-            Assert.fail(String.format("Could not get DRMAA implementation from the DRMAA library: %s", error.getString(0)));
-
-        System.out.println(String.format("DRMAA implementation(s): %s", drmaaImplementation.getString(0)));
-
-        this.implementation = drmaaImplementation.getString(0);
-    }
-
-    @Test(dependsOnMethods = { "testDrmaa" })
-    public void testSubmitEcho() throws Exception {
-        if ( ! queueTestRunModeIsSet ) {
-            throw new SkipException("Skipping testSubmitEcho because we are in pipeline test dry run mode");
-        }
-
-        if (implementation.contains("LSF")) {
-            System.err.println("    *********************************************************");
-            System.err.println("   ***********************************************************");
-            System.err.println("   ****                                                   ****");
-            System.err.println("  ****  Skipping LibDrmaaQueueTest.testSubmitEcho()        ****");
-            System.err.println("  ****     Are you using the dotkit .combined_LSF_SGE?     ****");
-            System.err.println("   ****                                                   ****");
-            System.err.println("   ***********************************************************");
-            System.err.println("    *********************************************************");
-            throw new SkipException("Skipping testSubmitEcho because correct DRMAA implementation not found");
-        }
-
-        Memory error = new Memory(LibDrmaa.DRMAA_ERROR_STRING_BUFFER);
-        int errnum;
-
-    File outFile = tryCreateNetworkTempFile("LibDrmaaQueueTest.out");
-
-        errnum = LibDrmaa.drmaa_init(null, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
-
-        if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
-            Assert.fail(String.format("Could not initialize the DRMAA library: %s", error.getString(0)));
-
-        try {
-            PointerByReference jtRef = new PointerByReference();
-            Pointer jt;
-            Memory jobIdMem = new Memory(LibDrmaa.DRMAA_JOBNAME_BUFFER);
-            String jobId;
-            IntByReference remotePs = new IntByReference();
-            IntByReference stat = new IntByReference();
-            PointerByReference rusage = new PointerByReference();
-
-            errnum = LibDrmaa.drmaa_allocate_job_template(jtRef, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
-
-            if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
-                Assert.fail(String.format("Could not create job template: %s", error.getString(0)));
-
-            jt = jtRef.getValue();
-
-            errnum = LibDrmaa.drmaa_set_attribute(jt, LibDrmaa.DRMAA_REMOTE_COMMAND, "sh", error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
-
-            if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
-                Assert.fail(String.format("Could not set attribute \"%s\": %s", LibDrmaa.DRMAA_REMOTE_COMMAND, error.getString(0)));
-
-            errnum = LibDrmaa.drmaa_set_attribute(jt, LibDrmaa.DRMAA_OUTPUT_PATH, ":" + outFile.getAbsolutePath(), error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
-
-            if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
-                Assert.fail(String.format("Could not set attribute \"%s\": %s", LibDrmaa.DRMAA_OUTPUT_PATH, error.getString(0)));
-
-            errnum = LibDrmaa.drmaa_set_attribute(jt, LibDrmaa.DRMAA_JOIN_FILES, "y", error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
-
-            if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
-                Assert.fail(String.format("Could not set attribute \"%s\": %s", LibDrmaa.DRMAA_JOIN_FILES, error.getString(0)));
-
-            StringArray args = new StringArray(new String[] { "-c", "echo \"Hello world.\"" });
-
-            errnum = LibDrmaa.drmaa_set_vector_attribute(jt, LibDrmaa.DRMAA_V_ARGV, args, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
-
-            if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
-                Assert.fail(String.format("Could not set attribute \"%s\": %s", LibDrmaa.DRMAA_V_ARGV, error.getString(0)));
-
-            errnum = LibDrmaa.drmaa_run_job(jobIdMem, LibDrmaa.DRMAA_JOBNAME_BUFFER_LEN, jt, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
-
-            if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
-                Assert.fail(String.format("Could not submit job: %s", error.getString(0)));
-
-            jobId = jobIdMem.getString(0);
-
-            System.out.println(String.format("Job id %s", jobId));
-
-            errnum = LibDrmaa.drmaa_delete_job_template(jt, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
-
-            if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
-                Assert.fail(String.format("Could not delete job template: %s", error.getString(0)));
-
-            System.out.println("Waiting for job to run: " + jobId);
-            remotePs.setValue(LibDrmaa.DRMAA_PS.DRMAA_PS_QUEUED_ACTIVE);
-
-            List<Integer> runningStatuses = Arrays.asList(
-                    LibDrmaa.DRMAA_PS.DRMAA_PS_QUEUED_ACTIVE, LibDrmaa.DRMAA_PS.DRMAA_PS_RUNNING);
-
-            while (runningStatuses.contains(remotePs.getValue())) {
-                Thread.sleep(30 * 1000L);
-
-                errnum = LibDrmaa.drmaa_job_ps(jobId, remotePs, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
-
-                if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
-                    Assert.fail(String.format("Could not get status for jobId %s: %s", jobId, error.getString(0)));
-            }
-
-            Assert.assertEquals(remotePs.getValue(), LibDrmaa.DRMAA_PS.DRMAA_PS_DONE, "Job status is not DONE.");
-
-            errnum = LibDrmaa.drmaa_wait(jobId, Pointer.NULL, new NativeLong(0), stat, LibDrmaa.DRMAA_TIMEOUT_NO_WAIT,
-                    rusage, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
-
-            if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
-                Assert.fail(String.format("Wait failed for jobId %s: %s", jobId, error.getString(0)));
-
-            IntByReference exited = new IntByReference();
-            IntByReference exitStatus = new IntByReference();
-            IntByReference signaled = new IntByReference();
-            Memory signal = new Memory(LibDrmaa.DRMAA_SIGNAL_BUFFER);
-            IntByReference coreDumped = new IntByReference();
-            IntByReference aborted = new IntByReference();
-
-            errnum = LibDrmaa.drmaa_wifexited(exited, stat.getValue(), error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
-
-            if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
-                Assert.fail(String.format("Exit check failed for jobId %s: %s", jobId, error.getString(0)));
-
-            Assert.assertTrue(exited.getValue() != 0, String.format("Job did not exit cleanly: %s", jobId));
-
-            errnum = LibDrmaa.drmaa_wexitstatus(exitStatus, stat.getValue(), error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
-
-            if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
-                Assert.fail(String.format("Exit status failed for jobId %s: %s", jobId, error.getString(0)));
-
-            Assert.assertEquals(exitStatus.getValue(), 0, String.format("Exit status for jobId %s is non-zero", jobId));
-
-            errnum = LibDrmaa.drmaa_wifsignaled(signaled, stat.getValue(), error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
-
-            if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
-                Assert.fail(String.format("Signaled check failed for jobId %s: %s", jobId, error.getString(0)));
-
-            if (signaled.getValue() != 0) {
-                errnum = LibDrmaa.drmaa_wtermsig(signal, LibDrmaa.DRMAA_SIGNAL_BUFFER_LEN, stat.getValue(), error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
-
-                if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
-                    Assert.fail(String.format("Signal lookup failed for jobId %s: %s", jobId, error.getString(0)));
-
-                errnum = LibDrmaa.drmaa_wcoredump(coreDumped, stat.getValue(), error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
-
-                if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
-                    Assert.fail(String.format("Core dump check failed for jobId %s: %s", jobId, error.getString(0)));
-
-                Assert.fail(String.format("JobId %s exited with signal %s and core dump flag %d", jobId, signal.getString(0), coreDumped.getValue()));
-            }
-
-            errnum = LibDrmaa.drmaa_wifaborted(aborted, stat.getValue(), error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
-
-            if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
-                Assert.fail(String.format("Aborted check failed for jobId %s: %s", jobId, error.getString(0)));
-
-            Assert.assertTrue(aborted.getValue() == 0, String.format("Job was aborted: %s", jobId));
-
-        } finally {
-            if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS) {
-                LibDrmaa.drmaa_exit(error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
-            } else {
-                errnum = LibDrmaa.drmaa_exit(error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
-
-                if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
-                    Assert.fail(String.format("Could not shut down the DRMAA library: %s", error.getString(0)));
-            }
-        }
-
-        Assert.assertTrue(FileUtils.waitFor(outFile, 120), "File not found: " + outFile.getAbsolutePath());
-        System.out.println("--- output ---");
-        System.out.println(FileUtils.readFileToString(outFile));
-        System.out.println("--- output ---");
-        Assert.assertTrue(outFile.delete(), "Unable to delete " + outFile.getAbsolutePath());
-        System.out.println("Validating that we reached the end of the test without exit.");
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/jna/lsf/v7_0_6/LibBatQueueTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/jna/lsf/v7_0_6/LibBatQueueTest.java
deleted file mode 100644
index 4af2bf7..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/jna/lsf/v7_0_6/LibBatQueueTest.java
+++ /dev/null
@@ -1,162 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.jna.lsf.v7_0_6;
-
-import com.sun.jna.*;
-import com.sun.jna.ptr.IntByReference;
-import org.apache.commons.io.FileUtils;
-import org.broadinstitute.gatk.utils.Utils;
-import org.testng.Assert;
-import org.testng.SkipException;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.jna.lsf.v7_0_6.LibBat.*;
-
-import java.io.File;
-
-/**
- * Really unit tests, but these tests will only run on systems with LSF set up.
- */
-public class LibBatQueueTest extends BaseTest {
-    @BeforeClass
-    public void initLibBat() {
-        Assert.assertFalse(LibBat.lsb_init("LibBatQueueTest") < 0, LibBat.lsb_sperror("lsb_init() failed"));
-    }
-
-    @Test
-    public void testClusterName() {
-        String clusterName = LibLsf.ls_getclustername();
-        System.out.println("Cluster name: " + clusterName);
-        Assert.assertNotNull(clusterName);
-    }
-
-    @Test
-    public void testReadConfEnv() {
-        LibLsf.config_param[] configParams = (LibLsf.config_param[]) new LibLsf.config_param().toArray(4);
-
-        configParams[0].paramName = "LSF_UNIT_FOR_LIMITS";
-        configParams[1].paramName = "LSF_CONFDIR";
-        configParams[2].paramName = "MADE_UP_PARAMETER";
-
-        Structure.autoWrite(configParams);
-
-        if (LibLsf.ls_readconfenv(configParams[0], null) != 0) {
-            Assert.fail(LibLsf.ls_sysmsg());
-        }
-
-        Structure.autoRead(configParams);
-
-        System.out.println("LSF_UNIT_FOR_LIMITS: " + configParams[0].paramValue);
-        Assert.assertNotNull(configParams[1].paramValue);
-        Assert.assertNull(configParams[2].paramValue);
-        Assert.assertNull(configParams[3].paramName);
-        Assert.assertNull(configParams[3].paramValue);
-    }
-
-    @Test
-    public void testReadQueueLimits() {
-        String queue = "hour";
-        StringArray queues = new StringArray(new String[] {queue});
-        IntByReference numQueues = new IntByReference(1);
-        queueInfoEnt queueInfo = LibBat.lsb_queueinfo(queues, numQueues, null, null, 0);
-
-        Assert.assertEquals(numQueues.getValue(), 1);
-        Assert.assertNotNull(queueInfo);
-        Assert.assertEquals(queueInfo.queue, queue);
-
-        int runLimit = queueInfo.rLimits[LibLsf.LSF_RLIMIT_RUN];
-        Assert.assertTrue(runLimit > 0, "LSF run limit is not greater than zero: " + runLimit);
-    }
-
-    @Test
-    public void testSubmitEcho() throws Exception {
-        if ( ! queueTestRunModeIsSet ) {
-            throw new SkipException("Skipping testSubmitEcho because we are in queue test dry run mode");
-        }
-
-        String queue = "hour";
-        File outFile = tryCreateNetworkTempFile("LibBatQueueTest.out");
-
-        submit req = new submit();
-
-        for (int i = 0; i < LibLsf.LSF_RLIM_NLIMITS; i++)
-            req.rLimits[i] = LibLsf.DEFAULT_RLIMIT;
-
-        req.projectName = "LibBatQueueTest";
-        req.options |= LibBat.SUB_PROJECT_NAME;
-
-        req.queue = queue;
-        req.options |= LibBat.SUB_QUEUE;
-
-        req.outFile = outFile.getPath();
-        req.options |= LibBat.SUB_OUT_FILE;
-
-        req.userPriority = 100;
-        req.options2 |= LibBat.SUB2_JOB_PRIORITY;
-
-        req.command = "echo \"Hello world.\"";
-
-        String[] argv = {"", "-a", "tv"};
-        int setOptionResult = LibBat.setOption_(argv.length, new StringArray(argv), "a:", req, ~0, ~0, ~0, null);
-        Assert.assertTrue(setOptionResult != -1, "setOption_ returned -1");
-
-        submitReply reply = new submitReply();
-        long jobId = LibBat.lsb_submit(req, reply);
-
-        Assert.assertFalse(jobId < 0, LibBat.lsb_sperror("Error dispatching"));
-
-        System.out.println("Waiting for job to run: " + jobId);
-        int jobStatus = LibBat.JOB_STAT_PEND;
-        while (Utils.isFlagSet(jobStatus, LibBat.JOB_STAT_PEND) || Utils.isFlagSet(jobStatus, LibBat.JOB_STAT_RUN)) {
-            Thread.sleep(30 * 1000L);
-
-            int numJobs = LibBat.lsb_openjobinfo(jobId, null, null, null, null, LibBat.ALL_JOB);
-            try {
-                Assert.assertEquals(numJobs, 1);
-    
-                IntByReference more = new IntByReference();
-
-                jobInfoEnt jobInfo = LibBat.lsb_readjobinfo(more);
-                Assert.assertNotNull(jobInfo, "Job info is null");
-                Assert.assertEquals(more.getValue(), 0, "More job info results than expected");
-
-                jobStatus = jobInfo.status;
-            } finally {
-                LibBat.lsb_closejobinfo();
-            }
-        }
-        Assert.assertTrue(Utils.isFlagSet(jobStatus, LibBat.JOB_STAT_DONE), String.format("Unexpected job status: 0x%02x", jobStatus));
-
-        Assert.assertTrue(FileUtils.waitFor(outFile, 120), "File not found: " + outFile.getAbsolutePath());
-        System.out.println("--- output ---");
-        System.out.println(FileUtils.readFileToString(outFile));
-        System.out.println("--- output ---");
-        Assert.assertTrue(outFile.delete(), "Unable to delete " + outFile.getAbsolutePath());
-        Assert.assertEquals(reply.queue, req.queue, "LSF reply queue does not match requested queue.");
-        System.out.println("Validating that we reached the end of the test without exit.");
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/locusiterator/AlignmentStateMachineUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/locusiterator/AlignmentStateMachineUnitTest.java
deleted file mode 100644
index d8c39d3..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/locusiterator/AlignmentStateMachineUnitTest.java
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.locusiterator;
-
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.Arrays;
-
-/**
- * testing of the new (non-legacy) version of LocusIteratorByState
- */
-public class AlignmentStateMachineUnitTest extends LocusIteratorByStateBaseTest {
-    @DataProvider(name = "AlignmentStateMachineTest")
-    public Object[][] makeAlignmentStateMachineTest() {
-//        return new Object[][]{{new LIBSTest("2M2D2X", 2)}};
-//        return createLIBSTests(
-//                Arrays.asList(2),
-//                Arrays.asList(2));
-        return createLIBSTests(
-                Arrays.asList(1, 2),
-                Arrays.asList(1, 2, 3, 4));
-    }
-
-    @Test(dataProvider = "AlignmentStateMachineTest")
-    public void testAlignmentStateMachineTest(LIBSTest params) {
-        final GATKSAMRecord read = params.makeRead();
-        final AlignmentStateMachine state = new AlignmentStateMachine(read);
-        final LIBS_position tester = new LIBS_position(read);
-
-        // min is one because always visit something, even for 10I reads
-        final int expectedBpToVisit = read.getAlignmentEnd() - read.getAlignmentStart() + 1;
-
-        Assert.assertSame(state.getRead(), read);
-        Assert.assertNotNull(state.toString());
-
-        int bpVisited = 0;
-        int lastOffset = -1;
-
-        // TODO -- more tests about test state machine state before first step?
-        Assert.assertTrue(state.isLeftEdge());
-        Assert.assertNull(state.getCigarOperator());
-        Assert.assertNotNull(state.toString());
-        Assert.assertEquals(state.getReadOffset(), -1);
-        Assert.assertEquals(state.getGenomeOffset(), -1);
-        Assert.assertEquals(state.getCurrentCigarElementOffset(), -1);
-        Assert.assertEquals(state.getCurrentCigarElement(), null);
-
-        while ( state.stepForwardOnGenome() != null ) {
-            Assert.assertNotNull(state.toString());
-
-            tester.stepForwardOnGenome();
-
-            Assert.assertTrue(state.getReadOffset() >= lastOffset, "Somehow read offsets are decreasing: lastOffset " + lastOffset + " current " + state.getReadOffset());
-            Assert.assertEquals(state.getReadOffset(), tester.getCurrentReadOffset(), "Read offsets are wrong at " + bpVisited);
-
-            Assert.assertFalse(state.isLeftEdge());
-
-            Assert.assertEquals(state.getCurrentCigarElement(), read.getCigar().getCigarElement(tester.currentOperatorIndex), "CigarElement index failure");
-            Assert.assertEquals(state.getOffsetIntoCurrentCigarElement(), tester.getCurrentPositionOnOperatorBase0(), "CigarElement index failure");
-
-            Assert.assertEquals(read.getCigar().getCigarElement(state.getCurrentCigarElementOffset()), state.getCurrentCigarElement(), "Current cigar element isn't what we'd get from the read itself");
-
-            Assert.assertTrue(state.getOffsetIntoCurrentCigarElement() >= 0, "Offset into current cigar too small");
-            Assert.assertTrue(state.getOffsetIntoCurrentCigarElement() < state.getCurrentCigarElement().getLength(), "Offset into current cigar too big");
-
-            Assert.assertEquals(state.getGenomeOffset(), tester.getCurrentGenomeOffsetBase0(), "Offset from alignment start is bad");
-            Assert.assertEquals(state.getGenomePosition(), tester.getCurrentGenomeOffsetBase0() + read.getAlignmentStart(), "GenomePosition start is bad");
-            Assert.assertEquals(state.getLocation(genomeLocParser).size(), 1, "GenomeLoc position should have size == 1");
-            Assert.assertEquals(state.getLocation(genomeLocParser).getStart(), state.getGenomePosition(), "GenomeLoc position is bad");
-
-            // most tests of this functionality are in LIBS
-            Assert.assertNotNull(state.makePileupElement());
-
-            lastOffset = state.getReadOffset();
-            bpVisited++;
-        }
-
-        Assert.assertEquals(bpVisited, expectedBpToVisit, "Didn't visit the expected number of bp");
-        Assert.assertEquals(state.getReadOffset(), read.getReadLength());
-        Assert.assertEquals(state.getCurrentCigarElementOffset(), read.getCigarLength());
-        Assert.assertEquals(state.getCurrentCigarElement(), null);
-        Assert.assertNotNull(state.toString());
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/locusiterator/LIBS_position.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/locusiterator/LIBS_position.java
deleted file mode 100644
index 92680a7..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/locusiterator/LIBS_position.java
+++ /dev/null
@@ -1,155 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.locusiterator;
-
-import htsjdk.samtools.Cigar;
-import htsjdk.samtools.CigarElement;
-import htsjdk.samtools.CigarOperator;
-import htsjdk.samtools.SAMRecord;
-
-/**
-* Created with IntelliJ IDEA.
-* User: depristo
-* Date: 1/5/13
-* Time: 8:42 PM
-* To change this template use File | Settings | File Templates.
-*/
-public final class LIBS_position {
-
-    SAMRecord read;
-
-    final int numOperators;
-    int currentOperatorIndex = 0;
-    int currentPositionOnOperator = 0;
-    int currentReadOffset = 0;
-    int currentGenomeOffset = 0;
-
-    public boolean isBeforeDeletionStart = false;
-    public boolean isBeforeDeletedBase = false;
-    public boolean isAfterDeletionEnd = false;
-    public boolean isAfterDeletedBase = false;
-    public boolean isBeforeInsertion = false;
-    public boolean isAfterInsertion = false;
-    public boolean isNextToSoftClip = false;
-
-    boolean sawMop = false;
-
-    public LIBS_position(final SAMRecord read) {
-        this.read = read;
-        numOperators = read.getCigar().numCigarElements();
-    }
-
-    public int getCurrentReadOffset() {
-        return Math.max(0, currentReadOffset - 1);
-    }
-
-    public int getCurrentPositionOnOperatorBase0() {
-        return currentPositionOnOperator - 1;
-    }
-
-    public int getCurrentGenomeOffsetBase0() {
-        return currentGenomeOffset - 1;
-    }
-
-    /**
-     * Steps forward on the genome.  Returns false when done reading the read, true otherwise.
-     */
-    public boolean stepForwardOnGenome() {
-        if ( currentOperatorIndex == numOperators )
-            return false;
-
-        CigarElement curElement = read.getCigar().getCigarElement(currentOperatorIndex);
-        if ( currentPositionOnOperator >= curElement.getLength() ) {
-            if ( ++currentOperatorIndex == numOperators )
-                return false;
-
-            curElement = read.getCigar().getCigarElement(currentOperatorIndex);
-            currentPositionOnOperator = 0;
-        }
-
-        switch ( curElement.getOperator() ) {
-            case I: // insertion w.r.t. the reference
-//                if ( !sawMop )
-//                    break;
-            case S: // soft clip
-                currentReadOffset += curElement.getLength();
-            case H: // hard clip
-            case P: // padding
-                currentOperatorIndex++;
-                return stepForwardOnGenome();
-
-            case D: // deletion w.r.t. the reference
-            case N: // reference skip (looks and gets processed just like a "deletion", just different logical meaning)
-                currentPositionOnOperator++;
-                currentGenomeOffset++;
-                break;
-
-            case M:
-            case EQ:
-            case X:
-                sawMop = true;
-                currentReadOffset++;
-                currentPositionOnOperator++;
-                currentGenomeOffset++;
-                break;
-            default:
-                throw new IllegalStateException("No support for cigar op: " + curElement.getOperator());
-        }
-
-        final boolean isFirstOp = currentOperatorIndex == 0;
-        final boolean isLastOp = currentOperatorIndex == numOperators - 1;
-        final boolean isFirstBaseOfOp = currentPositionOnOperator == 1;
-        final boolean isLastBaseOfOp = currentPositionOnOperator == curElement.getLength();
-
-        isBeforeDeletionStart = isBeforeOp(read.getCigar(), currentOperatorIndex, CigarOperator.D, isLastOp, isLastBaseOfOp);
-        isBeforeDeletedBase = isBeforeDeletionStart || (!isLastBaseOfOp && curElement.getOperator() == CigarOperator.D);
-        isAfterDeletionEnd = isAfterOp(read.getCigar(), currentOperatorIndex, CigarOperator.D, isFirstOp, isFirstBaseOfOp);
-        isAfterDeletedBase  = isAfterDeletionEnd || (!isFirstBaseOfOp && curElement.getOperator() == CigarOperator.D);
-        isBeforeInsertion = isBeforeOp(read.getCigar(), currentOperatorIndex, CigarOperator.I, isLastOp, isLastBaseOfOp)
-                || (!sawMop && curElement.getOperator() == CigarOperator.I);
-        isAfterInsertion = isAfterOp(read.getCigar(), currentOperatorIndex, CigarOperator.I, isFirstOp, isFirstBaseOfOp);
-        isNextToSoftClip = isBeforeOp(read.getCigar(), currentOperatorIndex, CigarOperator.S, isLastOp, isLastBaseOfOp)
-                || isAfterOp(read.getCigar(), currentOperatorIndex, CigarOperator.S, isFirstOp, isFirstBaseOfOp);
-
-        return true;
-    }
-
-    private static boolean isBeforeOp(final Cigar cigar,
-                                      final int currentOperatorIndex,
-                                      final CigarOperator op,
-                                      final boolean isLastOp,
-                                      final boolean isLastBaseOfOp) {
-        return  !isLastOp && isLastBaseOfOp && cigar.getCigarElement(currentOperatorIndex+1).getOperator() == op;
-    }
-
-    private static boolean isAfterOp(final Cigar cigar,
-                                     final int currentOperatorIndex,
-                                     final CigarOperator op,
-                                     final boolean isFirstOp,
-                                     final boolean isFirstBaseOfOp) {
-        return  !isFirstOp && isFirstBaseOfOp && cigar.getCigarElement(currentOperatorIndex-1).getOperator() == op;
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/locusiterator/LocusIteratorBenchmark.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/locusiterator/LocusIteratorBenchmark.java
deleted file mode 100644
index 1f02a68..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/locusiterator/LocusIteratorBenchmark.java
+++ /dev/null
@@ -1,142 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.locusiterator;
-
-import com.google.caliper.Param;
-import com.google.caliper.SimpleBenchmark;
-import htsjdk.samtools.SAMFileHeader;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.QualityUtils;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-
-import java.util.LinkedList;
-import java.util.List;
-
-/**
- * Caliper microbenchmark of fragment pileup
- */
-public class LocusIteratorBenchmark extends SimpleBenchmark {
-    protected SAMFileHeader header;
-    protected GenomeLocParser genomeLocParser;
-
-    List<GATKSAMRecord> reads = new LinkedList<GATKSAMRecord>();
-    final int readLength = 101;
-    final int nReads = 10000;
-    final int locus = 1;
-
-    @Param({"101M", "50M10I40M", "50M10D40M"})
-    String cigar; // set automatically by framework
-
-    @Override protected void setUp() {
-        header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000);
-        genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
-
-        for ( int j = 0; j < nReads; j++ ) {
-            GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, locus, readLength);
-            read.setReadBases(Utils.dupBytes((byte) 'A', readLength));
-            final byte[] quals = new byte[readLength];
-            for ( int i = 0; i < readLength; i++ )
-                quals[i] = (byte)(i % QualityUtils.MAX_SAM_QUAL_SCORE);
-            read.setBaseQualities(quals);
-            read.setCigarString(cigar);
-            reads.add(read);
-        }
-    }
-
-//    public void timeOriginalLIBS(int rep) {
-//        for ( int i = 0; i < rep; i++ ) {
-//            final org.broadinstitute.gatk.utils.locusiterator.old.LocusIteratorByState libs =
-//                    new org.broadinstitute.gatk.utils.locusiterator.old.LocusIteratorByState(
-//                            new LocusIteratorByStateBaseTest.FakeCloseableIterator<SAMRecord>(reads.iterator()),
-//                            LocusIteratorByStateBaseTest.createTestReadProperties(),
-//                            genomeLocParser,
-//                            LocusIteratorByState.sampleListForSAMWithoutReadGroups());
-//
-//            while ( libs.hasNext() ) {
-//                AlignmentContext context = libs.next();
-//            }
-//        }
-//    }
-//
-//    public void timeLegacyLIBS(int rep) {
-//        for ( int i = 0; i < rep; i++ ) {
-//            final org.broadinstitute.gatk.utils.locusiterator.legacy.LegacyLocusIteratorByState libs =
-//                    new org.broadinstitute.gatk.utils.locusiterator.legacy.LegacyLocusIteratorByState(
-//                            new LocusIteratorByStateBaseTest.FakeCloseableIterator<SAMRecord>(reads.iterator()),
-//                            LocusIteratorByStateBaseTest.createTestReadProperties(),
-//                            genomeLocParser,
-//                            LocusIteratorByState.sampleListForSAMWithoutReadGroups());
-//
-//            while ( libs.hasNext() ) {
-//                AlignmentContext context = libs.next();
-//            }
-//        }
-//    }
-
-    public void timeNewLIBS(int rep) {
-        for ( int i = 0; i < rep; i++ ) {
-            final org.broadinstitute.gatk.utils.locusiterator.LocusIteratorByState libs =
-                    new org.broadinstitute.gatk.utils.locusiterator.LocusIteratorByState(
-                            new LocusIteratorByStateBaseTest.FakeCloseableIterator<GATKSAMRecord>(reads.iterator()),
-                            LocusIteratorByStateBaseTest.createTestReadProperties(),
-                            genomeLocParser,
-                            LocusIteratorByState.sampleListForSAMWithoutReadGroups());
-
-            while ( libs.hasNext() ) {
-                AlignmentContext context = libs.next();
-            }
-        }
-    }
-
-//    public void timeOriginalLIBSStateMachine(int rep) {
-//        for ( int i = 0; i < rep; i++ ) {
-//            for ( final SAMRecord read : reads ) {
-//                final SAMRecordAlignmentState alignmentStateMachine = new SAMRecordAlignmentState(read);
-//                while ( alignmentStateMachine.stepForwardOnGenome() != null ) {
-//                    alignmentStateMachine.getGenomeOffset();
-//                }
-//            }
-//        }
-//    }
-
-    public void timeAlignmentStateMachine(int rep) {
-        for ( int i = 0; i < rep; i++ ) {
-            for ( final GATKSAMRecord read : reads ) {
-                final AlignmentStateMachine alignmentStateMachine = new AlignmentStateMachine(read);
-                while ( alignmentStateMachine.stepForwardOnGenome() != null ) {
-                    ;
-                }
-            }
-        }
-    }
-
-    public static void main(String[] args) {
-        com.google.caliper.Runner.main(LocusIteratorBenchmark.class, args);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/locusiterator/LocusIteratorByStateBaseTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/locusiterator/LocusIteratorByStateBaseTest.java
deleted file mode 100644
index 286c712..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/locusiterator/LocusIteratorByStateBaseTest.java
+++ /dev/null
@@ -1,252 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.locusiterator;
-
-import htsjdk.samtools.*;
-import htsjdk.samtools.util.CloseableIterator;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.engine.ReadProperties;
-import org.broadinstitute.gatk.engine.arguments.ValidationExclusion;
-import org.broadinstitute.gatk.engine.datasources.reads.SAMReaderID;
-import org.broadinstitute.gatk.engine.downsampling.DownsamplingMethod;
-import org.broadinstitute.gatk.engine.filters.ReadFilter;
-import org.broadinstitute.gatk.engine.iterators.ReadTransformer;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.QualityUtils;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-
-import java.util.*;
-
-/**
- * testing of the new (non-legacy) version of LocusIteratorByState
- */
-public class LocusIteratorByStateBaseTest extends BaseTest {
-    protected static SAMFileHeader header;
-    protected GenomeLocParser genomeLocParser;
-
-    @BeforeClass
-    public void beforeClass() {
-        header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000);
-        genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
-    }
-
-    protected LocusIteratorByState makeLTBS(List<GATKSAMRecord> reads,
-                                            ReadProperties readAttributes) {
-        return new LocusIteratorByState(new FakeCloseableIterator<GATKSAMRecord>(reads.iterator()),
-                readAttributes,
-                genomeLocParser,
-                LocusIteratorByState.sampleListForSAMWithoutReadGroups());
-    }
-
-    public static ReadProperties createTestReadProperties() {
-        return createTestReadProperties(null, false);
-    }
-
-    public static ReadProperties createTestReadProperties( DownsamplingMethod downsamplingMethod, final boolean keepReads ) {
-        return new ReadProperties(
-                Collections.<SAMReaderID>emptyList(),
-                new SAMFileHeader(),
-                SAMFileHeader.SortOrder.coordinate,
-                false,
-                ValidationStringency.STRICT,
-                downsamplingMethod,
-                new ValidationExclusion(),
-                Collections.<ReadFilter>emptyList(),
-                Collections.<ReadTransformer>emptyList(),
-                true,
-                (byte) -1,
-                keepReads);
-    }
-
-    public static class FakeCloseableIterator<T> implements CloseableIterator<T> {
-        Iterator<T> iterator;
-
-        public FakeCloseableIterator(Iterator<T> it) {
-            iterator = it;
-        }
-
-        @Override
-        public void close() {}
-
-        @Override
-        public boolean hasNext() {
-            return iterator.hasNext();
-        }
-
-        @Override
-        public T next() {
-            return iterator.next();
-        }
-
-        @Override
-        public void remove() {
-            throw new UnsupportedOperationException("Don't remove!");
-        }
-    }
-
-    protected static class LIBSTest {
-        public static final int locus = 44367788;
-        final String cigarString;
-        final int readLength;
-        final private List<CigarElement> elements;
-
-        public LIBSTest(final String cigarString) {
-            final Cigar cigar = TextCigarCodec.getSingleton().decode(cigarString);
-            this.cigarString = cigarString;
-            this.elements = cigar.getCigarElements();
-            this.readLength = cigar.getReadLength();
-        }
-
-        @Override
-        public String toString() {
-            return "LIBSTest{" +
-                    "cigar='" + cigarString + '\'' +
-                    ", readLength=" + readLength +
-                    '}';
-        }
-
-        public List<CigarElement> getElements() {
-            return elements;
-        }
-
-        public GATKSAMRecord makeRead() {
-            GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, locus, readLength);
-            read.setReadBases(Utils.dupBytes((byte) 'A', readLength));
-            final byte[] quals = new byte[readLength];
-            for ( int i = 0; i < readLength; i++ )
-                quals[i] = (byte)(i % QualityUtils.MAX_SAM_QUAL_SCORE);
-            read.setBaseQualities(quals);
-            read.setCigarString(cigarString);
-            return read;
-        }
-    }
-
-    private boolean isIndel(final CigarElement ce) {
-        return ce.getOperator() == CigarOperator.D || ce.getOperator() == CigarOperator.I;
-    }
-
-    private boolean startsWithDeletion(final List<CigarElement> elements) {
-        for ( final CigarElement element : elements ) {
-            switch ( element.getOperator() ) {
-                case M:
-                case I:
-                case EQ:
-                case X:
-                    return false;
-                case D:
-                    return true;
-                default:
-                    // keep looking
-            }
-        }
-
-        return false;
-    }
-
-    private LIBSTest makePermutationTest(final List<CigarElement> elements) {
-        CigarElement last = null;
-        boolean hasMatch = false;
-
-        // starts with D => bad
-        if ( startsWithDeletion(elements) )
-            return null;
-
-        // ends with D => bad
-        if ( elements.get(elements.size()-1).getOperator() == CigarOperator.D )
-            return null;
-
-        // make sure it's valid
-        String cigar = "";
-        int len = 0;
-        for ( final CigarElement ce : elements ) {
-            if ( ce.getOperator() == CigarOperator.N )
-                return null; // TODO -- don't support N
-
-            // abort on a bad cigar
-            if ( last != null ) {
-                if ( ce.getOperator() == last.getOperator() )
-                    return null;
-                if ( isIndel(ce) && isIndel(last) )
-                    return null;
-            }
-
-            cigar += ce.getLength() + ce.getOperator().toString();
-            len += ce.getLength();
-            last = ce;
-            hasMatch = hasMatch || ce.getOperator() == CigarOperator.M;
-        }
-
-        if ( ! hasMatch && elements.size() == 1 &&
-                ! (last.getOperator() == CigarOperator.I || last.getOperator() == CigarOperator.S))
-            return null;
-
-        return new LIBSTest(cigar);
-    }
-
-    @DataProvider(name = "LIBSTest")
-    public Object[][] createLIBSTests(final List<Integer> cigarLengths, final List<Integer> combinations) {
-        final List<Object[]> tests = new LinkedList<Object[]>();
-
-        final List<CigarOperator> allOps = Arrays.asList(CigarOperator.values());
-
-        final List<CigarElement> singleCigars = new LinkedList<CigarElement>();
-        for ( final int len : cigarLengths )
-            for ( final CigarOperator op : allOps )
-                singleCigars.add(new CigarElement(len, op));
-
-        for ( final int complexity : combinations ) {
-            for ( final List<CigarElement> elements : Utils.makePermutations(singleCigars, complexity, true) ) {
-                final LIBSTest test = makePermutationTest(elements);
-                if ( test != null ) tests.add(new Object[]{test});
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    /**
-     * Work around inadequate tests that aren't worth fixing.
-     *
-     * Look at the CIGAR 2M2P2D2P2M.  Both M states border a deletion, separated by P (padding elements).  So
-     * the right answer for deletions here is true for isBeforeDeletion() and isAfterDeletion() for the first
-     * and second M.  But the LIBS_position doesn't say so.
-     *
-     * @param elements
-     * @return
-     */
-    protected static boolean hasNeighboringPaddedOps(final List<CigarElement> elements, final int elementI) {
-        return (elementI - 1 >= 0 && isPadding(elements.get(elementI-1))) ||
-                (elementI + 1 < elements.size() && isPadding(elements.get(elementI+1)));
-    }
-
-    private static boolean isPadding(final CigarElement elt) {
-        return elt.getOperator() == CigarOperator.P || elt.getOperator() == CigarOperator.H || elt.getOperator() == CigarOperator.S;
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/locusiterator/LocusIteratorByStateUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/locusiterator/LocusIteratorByStateUnitTest.java
deleted file mode 100644
index 08cbeca..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/locusiterator/LocusIteratorByStateUnitTest.java
+++ /dev/null
@@ -1,753 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.locusiterator;
-
-import htsjdk.samtools.CigarOperator;
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMReadGroupRecord;
-import org.broadinstitute.gatk.engine.ReadProperties;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.downsampling.DownsampleType;
-import org.broadinstitute.gatk.engine.downsampling.DownsamplingMethod;
-import org.broadinstitute.gatk.utils.NGSPlatform;
-import org.broadinstitute.gatk.utils.QualityUtils;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.pileup.PileupElement;
-import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
-import org.broadinstitute.gatk.utils.sam.ArtificialBAMBuilder;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.broadinstitute.gatk.utils.sam.GATKSAMReadGroupRecord;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.*;
-
-/**
- * testing of the new (non-legacy) version of LocusIteratorByState
- */
-public class LocusIteratorByStateUnitTest extends LocusIteratorByStateBaseTest {
-    private static final boolean DEBUG = false;
-    protected LocusIteratorByState li;
-
-    @Test(enabled = !DEBUG)
-    public void testUnmappedAndAllIReadsPassThrough() {
-        final int readLength = 10;
-        GATKSAMRecord mapped1 = ArtificialSAMUtils.createArtificialRead(header,"mapped1",0,1,readLength);
-        GATKSAMRecord mapped2 = ArtificialSAMUtils.createArtificialRead(header,"mapped2",0,1,readLength);
-        GATKSAMRecord unmapped = ArtificialSAMUtils.createArtificialRead(header,"unmapped",0,1,readLength);
-        GATKSAMRecord allI = ArtificialSAMUtils.createArtificialRead(header,"allI",0,1,readLength);
-
-        unmapped.setReadUnmappedFlag(true);
-        unmapped.setCigarString("*");
-        allI.setCigarString(readLength + "I");
-
-        List<GATKSAMRecord> reads = Arrays.asList(mapped1, unmapped, allI, mapped2);
-
-        // create the iterator by state with the fake reads and fake records
-        li = makeLTBS(reads,createTestReadProperties(DownsamplingMethod.NONE, true));
-
-        Assert.assertTrue(li.hasNext());
-        AlignmentContext context = li.next();
-        ReadBackedPileup pileup = context.getBasePileup();
-        Assert.assertEquals(pileup.depthOfCoverage(), 2, "Should see only 2 reads in pileup, even with unmapped and all I reads");
-
-        final List<GATKSAMRecord> rawReads = li.transferReadsFromAllPreviousPileups();
-        Assert.assertEquals(rawReads, reads, "Input and transferred read lists should be the same, and include the unmapped and all I reads");
-    }
-
-    @Test(enabled = true && ! DEBUG)
-    public void testXandEQOperators() {
-        final byte[] bases1 = new byte[] {'A','A','A','A','A','A','A','A','A','A'};
-        final byte[] bases2 = new byte[] {'A','A','A','C','A','A','A','A','A','C'};
-
-        // create a test version of the Reads object
-        ReadProperties readAttributes = createTestReadProperties();
-
-        GATKSAMRecord r1 = ArtificialSAMUtils.createArtificialRead(header,"r1",0,1,10);
-        r1.setReadBases(bases1);
-        r1.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20});
-        r1.setCigarString("10M");
-
-        GATKSAMRecord r2 = ArtificialSAMUtils.createArtificialRead(header,"r2",0,1,10);
-        r2.setReadBases(bases2);
-        r2.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20,20,20});
-        r2.setCigarString("3=1X5=1X");
-
-        GATKSAMRecord r3 = ArtificialSAMUtils.createArtificialRead(header,"r3",0,1,10);
-        r3.setReadBases(bases2);
-        r3.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20,20,20});
-        r3.setCigarString("3=1X5M1X");
-
-        GATKSAMRecord r4  = ArtificialSAMUtils.createArtificialRead(header,"r4",0,1,10);
-        r4.setReadBases(bases2);
-        r4.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20});
-        r4.setCigarString("10M");
-
-        List<GATKSAMRecord> reads = Arrays.asList(r1, r2, r3, r4);
-
-        // create the iterator by state with the fake reads and fake records
-        li = makeLTBS(reads,readAttributes);
-
-        while (li.hasNext()) {
-            AlignmentContext context = li.next();
-            ReadBackedPileup pileup = context.getBasePileup();
-            Assert.assertEquals(pileup.depthOfCoverage(), 4);
-        }
-    }
-
-    @Test(enabled = true && ! DEBUG)
-    public void testIndelsInRegularPileup() {
-        final byte[] bases = new byte[] {'A','A','A','A','A','A','A','A','A','A'};
-        final byte[] indelBases = new byte[] {'A','A','A','A','C','T','A','A','A','A','A','A'};
-
-        // create a test version of the Reads object
-        ReadProperties readAttributes = createTestReadProperties();
-
-        GATKSAMRecord before = ArtificialSAMUtils.createArtificialRead(header,"before",0,1,10);
-        before.setReadBases(bases);
-        before.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20});
-        before.setCigarString("10M");
-
-        GATKSAMRecord during = ArtificialSAMUtils.createArtificialRead(header,"during",0,2,10);
-        during.setReadBases(indelBases);
-        during.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20,20,20});
-        during.setCigarString("4M2I6M");
-
-        GATKSAMRecord after  = ArtificialSAMUtils.createArtificialRead(header,"after",0,3,10);
-        after.setReadBases(bases);
-        after.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20});
-        after.setCigarString("10M");
-
-        List<GATKSAMRecord> reads = Arrays.asList(before, during, after);
-
-        // create the iterator by state with the fake reads and fake records
-        li = makeLTBS(reads,readAttributes);
-
-        boolean foundIndel = false;
-        while (li.hasNext()) {
-            AlignmentContext context = li.next();
-            ReadBackedPileup pileup = context.getBasePileup().getBaseFilteredPileup(10);
-            for (PileupElement p : pileup) {
-                if (p.isBeforeInsertion()) {
-                    foundIndel = true;
-                    Assert.assertEquals(p.getLengthOfImmediatelyFollowingIndel(), 2, "Wrong event length");
-                    Assert.assertEquals(p.getBasesOfImmediatelyFollowingInsertion(), "CT", "Inserted bases are incorrect");
-                    break;
-               }
-            }
-
-         }
-
-         Assert.assertTrue(foundIndel,"Indel in pileup not found");
-    }
-
-    @Test(enabled = false && ! DEBUG)
-    public void testWholeIndelReadInIsolation() {
-        final int firstLocus = 44367789;
-
-        // create a test version of the Reads object
-        ReadProperties readAttributes = createTestReadProperties();
-
-        GATKSAMRecord indelOnlyRead = ArtificialSAMUtils.createArtificialRead(header, "indelOnly", 0, firstLocus, 76);
-        indelOnlyRead.setReadBases(Utils.dupBytes((byte)'A',76));
-        indelOnlyRead.setBaseQualities(Utils.dupBytes((byte) '@', 76));
-        indelOnlyRead.setCigarString("76I");
-
-        List<GATKSAMRecord> reads = Arrays.asList(indelOnlyRead);
-
-        // create the iterator by state with the fake reads and fake records
-        li = makeLTBS(reads, readAttributes);
-
-        // Traditionally, reads that end with indels bleed into the pileup at the following locus.  Verify that the next pileup contains this read
-        // and considers it to be an indel-containing read.
-        Assert.assertTrue(li.hasNext(),"Should have found a whole-indel read in the normal base pileup without extended events enabled");
-        AlignmentContext alignmentContext = li.next();
-        Assert.assertEquals(alignmentContext.getLocation().getStart(), firstLocus, "Base pileup is at incorrect location.");
-        ReadBackedPileup basePileup = alignmentContext.getBasePileup();
-        Assert.assertEquals(basePileup.getReads().size(),1,"Pileup is of incorrect size");
-        Assert.assertSame(basePileup.getReads().get(0), indelOnlyRead, "Read in pileup is incorrect");
-    }
-
-    /**
-     * Test to make sure that reads supporting only an indel (example cigar string: 76I) do
-     * not negatively influence the ordering of the pileup.
-     */
-    @Test(enabled = true && ! DEBUG)
-    public void testWholeIndelRead() {
-        final int firstLocus = 44367788, secondLocus = firstLocus + 1;
-
-        GATKSAMRecord leadingRead = ArtificialSAMUtils.createArtificialRead(header,"leading",0,firstLocus,76);
-        leadingRead.setReadBases(Utils.dupBytes((byte)'A',76));
-        leadingRead.setBaseQualities(Utils.dupBytes((byte)'@',76));
-        leadingRead.setCigarString("1M75I");
-
-        GATKSAMRecord indelOnlyRead = ArtificialSAMUtils.createArtificialRead(header,"indelOnly",0,secondLocus,76);
-        indelOnlyRead.setReadBases(Utils.dupBytes((byte) 'A', 76));
-        indelOnlyRead.setBaseQualities(Utils.dupBytes((byte)'@',76));
-        indelOnlyRead.setCigarString("76I");
-
-        GATKSAMRecord fullMatchAfterIndel = ArtificialSAMUtils.createArtificialRead(header,"fullMatch",0,secondLocus,76);
-        fullMatchAfterIndel.setReadBases(Utils.dupBytes((byte)'A',76));
-        fullMatchAfterIndel.setBaseQualities(Utils.dupBytes((byte)'@',76));
-        fullMatchAfterIndel.setCigarString("75I1M");
-
-        List<GATKSAMRecord> reads = Arrays.asList(leadingRead, indelOnlyRead, fullMatchAfterIndel);
-
-        // create the iterator by state with the fake reads and fake records
-        li = makeLTBS(reads, createTestReadProperties());
-        int currentLocus = firstLocus;
-        int numAlignmentContextsFound = 0;
-
-        while(li.hasNext()) {
-            AlignmentContext alignmentContext = li.next();
-            Assert.assertEquals(alignmentContext.getLocation().getStart(),currentLocus,"Current locus returned by alignment context is incorrect");
-
-            if(currentLocus == firstLocus) {
-                List<GATKSAMRecord> readsAtLocus = alignmentContext.getBasePileup().getReads();
-                Assert.assertEquals(readsAtLocus.size(),1,"Wrong number of reads at locus " + currentLocus);
-                Assert.assertSame(readsAtLocus.get(0),leadingRead,"leadingRead absent from pileup at locus " + currentLocus);
-            }
-            else if(currentLocus == secondLocus) {
-                List<GATKSAMRecord> readsAtLocus = alignmentContext.getBasePileup().getReads();
-                Assert.assertEquals(readsAtLocus.size(),1,"Wrong number of reads at locus " + currentLocus);
-                Assert.assertSame(readsAtLocus.get(0),fullMatchAfterIndel,"fullMatchAfterIndel absent from pileup at locus " + currentLocus);
-            }
-
-            currentLocus++;
-            numAlignmentContextsFound++;
-        }
-
-        Assert.assertEquals(numAlignmentContextsFound, 2, "Found incorrect number of alignment contexts");
-    }
-
-    /**
-     * Test to make sure that reads supporting only an indel (example cigar string: 76I) are represented properly
-     */
-    @Test(enabled = false && ! DEBUG)
-    public void testWholeIndelReadRepresentedTest() {
-        final int firstLocus = 44367788, secondLocus = firstLocus + 1;
-
-        GATKSAMRecord read1 = ArtificialSAMUtils.createArtificialRead(header,"read1",0,secondLocus,1);
-        read1.setReadBases(Utils.dupBytes((byte) 'A', 1));
-        read1.setBaseQualities(Utils.dupBytes((byte) '@', 1));
-        read1.setCigarString("1I");
-
-        List<GATKSAMRecord> reads = Arrays.asList(read1);
-
-        // create the iterator by state with the fake reads and fake records
-        li = makeLTBS(reads, createTestReadProperties());
-
-        while(li.hasNext()) {
-            AlignmentContext alignmentContext = li.next();
-            ReadBackedPileup p = alignmentContext.getBasePileup();
-            Assert.assertTrue(p.getNumberOfElements() == 1);
-            // TODO -- fix tests
-//            PileupElement pe = p.iterator().next();
-//            Assert.assertTrue(pe.isBeforeInsertion());
-//            Assert.assertFalse(pe.isAfterInsertion());
-//            Assert.assertEquals(pe.getBasesOfImmediatelyFollowingInsertion(), "A");
-        }
-
-        GATKSAMRecord read2 = ArtificialSAMUtils.createArtificialRead(header,"read2",0,secondLocus,10);
-        read2.setReadBases(Utils.dupBytes((byte) 'A', 10));
-        read2.setBaseQualities(Utils.dupBytes((byte) '@', 10));
-        read2.setCigarString("10I");
-
-        reads = Arrays.asList(read2);
-
-        // create the iterator by state with the fake reads and fake records
-        li = makeLTBS(reads, createTestReadProperties());
-
-        while(li.hasNext()) {
-            AlignmentContext alignmentContext = li.next();
-            ReadBackedPileup p = alignmentContext.getBasePileup();
-            Assert.assertTrue(p.getNumberOfElements() == 1);
-            // TODO -- fix tests
-//            PileupElement pe = p.iterator().next();
-//            Assert.assertTrue(pe.isBeforeInsertion());
-//            Assert.assertFalse(pe.isAfterInsertion());
-//            Assert.assertEquals(pe.getBasesOfImmediatelyFollowingInsertion(), "AAAAAAAAAA");
-        }
-    }
-
-
-    /////////////////////////////////////////////
-    // get event length and bases calculations //
-    /////////////////////////////////////////////
-
-    @DataProvider(name = "IndelLengthAndBasesTest")
-    public Object[][] makeIndelLengthAndBasesTest() {
-        final String EVENT_BASES = "ACGTACGTACGT";
-        final List<Object[]> tests = new LinkedList<Object[]>();
-
-        for ( int eventSize = 1; eventSize < 10; eventSize++ ) {
-            for ( final CigarOperator indel : Arrays.asList(CigarOperator.D, CigarOperator.I) ) {
-                final String cigar = String.format("2M%d%s1M", eventSize, indel.toString());
-                final String eventBases = indel == CigarOperator.D ? "" : EVENT_BASES.substring(0, eventSize);
-                final int readLength = 3 + eventBases.length();
-
-                GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, 1, readLength);
-                read.setReadBases(("TT" + eventBases + "A").getBytes());
-                final byte[] quals = new byte[readLength];
-                for ( int i = 0; i < readLength; i++ )
-                    quals[i] = (byte)(i % QualityUtils.MAX_SAM_QUAL_SCORE);
-                read.setBaseQualities(quals);
-                read.setCigarString(cigar);
-
-                tests.add(new Object[]{read, indel, eventSize, eventBases.equals("") ? null : eventBases});
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = true && ! DEBUG, dataProvider = "IndelLengthAndBasesTest")
-    public void testIndelLengthAndBasesTest(GATKSAMRecord read, final CigarOperator op, final int eventSize, final String eventBases) {
-        // create the iterator by state with the fake reads and fake records
-        li = makeLTBS(Arrays.asList((GATKSAMRecord)read), createTestReadProperties());
-
-        Assert.assertTrue(li.hasNext());
-
-        final PileupElement firstMatch = getFirstPileupElement(li.next());
-
-        Assert.assertEquals(firstMatch.getLengthOfImmediatelyFollowingIndel(), 0, "Length != 0 for site not adjacent to indel");
-        Assert.assertEquals(firstMatch.getBasesOfImmediatelyFollowingInsertion(), null, "Getbases of following event should be null at non-adajenct event");
-
-        Assert.assertTrue(li.hasNext());
-
-        final PileupElement pe = getFirstPileupElement(li.next());
-
-        if ( op == CigarOperator.D )
-            Assert.assertTrue(pe.isBeforeDeletionStart());
-        else
-            Assert.assertTrue(pe.isBeforeInsertion());
-
-        Assert.assertEquals(pe.getLengthOfImmediatelyFollowingIndel(), eventSize, "Length of event failed");
-        Assert.assertEquals(pe.getBasesOfImmediatelyFollowingInsertion(), eventBases, "Getbases of following event failed");
-    }
-
-    private PileupElement getFirstPileupElement(final AlignmentContext context) {
-        final ReadBackedPileup p = context.getBasePileup();
-        Assert.assertEquals(p.getNumberOfElements(), 1);
-        return p.iterator().next();
-    }
-
-    ////////////////////////////////////////////
-    // comprehensive LIBS/PileupElement tests //
-    ////////////////////////////////////////////
-
-    @DataProvider(name = "MyLIBSTest")
-    public Object[][] makeLIBSTest() {
-        final List<Object[]> tests = new LinkedList<Object[]>();
-
-//        tests.add(new Object[]{new LIBSTest("2=2D2=2X", 1)});
-//        return tests.toArray(new Object[][]{});
-
-        return createLIBSTests(
-                Arrays.asList(1, 2),
-                Arrays.asList(1, 2, 3, 4));
-
-//        return createLIBSTests(
-//                Arrays.asList(2),
-//                Arrays.asList(3));
-    }
-
-    @Test(enabled = ! DEBUG, dataProvider = "MyLIBSTest")
-    public void testLIBS(LIBSTest params) {
-        // create the iterator by state with the fake reads and fake records
-        final GATKSAMRecord read = params.makeRead();
-        li = makeLTBS(Arrays.asList((GATKSAMRecord)read), createTestReadProperties());
-        final LIBS_position tester = new LIBS_position(read);
-
-        int bpVisited = 0;
-        int lastOffset = 0;
-        while ( li.hasNext() ) {
-            bpVisited++;
-
-            AlignmentContext alignmentContext = li.next();
-            ReadBackedPileup p = alignmentContext.getBasePileup();
-            Assert.assertEquals(p.getNumberOfElements(), 1);
-            PileupElement pe = p.iterator().next();
-
-            Assert.assertEquals(p.getNumberOfDeletions(), pe.isDeletion() ? 1 : 0, "wrong number of deletions in the pileup");
-            Assert.assertEquals(p.getNumberOfMappingQualityZeroReads(), pe.getRead().getMappingQuality() == 0 ? 1 : 0, "wront number of mapq reads in the pileup");
-
-            tester.stepForwardOnGenome();
-
-            if ( ! hasNeighboringPaddedOps(params.getElements(), pe.getCurrentCigarOffset()) ) {
-                Assert.assertEquals(pe.isBeforeDeletionStart(), tester.isBeforeDeletionStart, "before deletion start failure");
-                Assert.assertEquals(pe.isAfterDeletionEnd(), tester.isAfterDeletionEnd, "after deletion end failure");
-            }
-
-            Assert.assertEquals(pe.isBeforeInsertion(), tester.isBeforeInsertion, "before insertion failure");
-            Assert.assertEquals(pe.isAfterInsertion(), tester.isAfterInsertion, "after insertion failure");
-            Assert.assertEquals(pe.isNextToSoftClip(), tester.isNextToSoftClip, "next to soft clip failure");
-
-            Assert.assertTrue(pe.getOffset() >= lastOffset, "Somehow read offsets are decreasing: lastOffset " + lastOffset + " current " + pe.getOffset());
-            Assert.assertEquals(pe.getOffset(), tester.getCurrentReadOffset(), "Read offsets are wrong at " + bpVisited);
-
-            Assert.assertEquals(pe.getCurrentCigarElement(), read.getCigar().getCigarElement(tester.currentOperatorIndex), "CigarElement index failure");
-            Assert.assertEquals(pe.getOffsetInCurrentCigar(), tester.getCurrentPositionOnOperatorBase0(), "CigarElement index failure");
-
-            Assert.assertEquals(read.getCigar().getCigarElement(pe.getCurrentCigarOffset()), pe.getCurrentCigarElement(), "Current cigar element isn't what we'd get from the read itself");
-
-            Assert.assertTrue(pe.getOffsetInCurrentCigar() >= 0, "Offset into current cigar too small");
-            Assert.assertTrue(pe.getOffsetInCurrentCigar() < pe.getCurrentCigarElement().getLength(), "Offset into current cigar too big");
-
-            Assert.assertEquals(pe.getOffset(), tester.getCurrentReadOffset(), "Read offset failure");
-            lastOffset = pe.getOffset();
-        }
-
-        final int expectedBpToVisit = read.getAlignmentEnd() - read.getAlignmentStart() + 1;
-        Assert.assertEquals(bpVisited, expectedBpToVisit, "Didn't visit the expected number of bp");
-    }
-
-    // ------------------------------------------------------------
-    //
-    // Tests for keeping reads
-    //
-    // ------------------------------------------------------------
-
-    @DataProvider(name = "LIBS_ComplexPileupTests")
-    public Object[][] makeLIBS_ComplexPileupTests() {
-        final List<Object[]> tests = new LinkedList<Object[]>();
-
-        for ( final int downsampleTo : Arrays.asList(-1, 1, 2, 5, 10, 30)) {
-            for ( final int nReadsPerLocus : Arrays.asList(1, 10, 60) ) {
-                for ( final int nLoci : Arrays.asList(1, 10, 25) ) {
-                    for ( final int nSamples : Arrays.asList(1, 2, 10) ) {
-                        for ( final boolean keepReads : Arrays.asList(true, false) ) {
-                            for ( final boolean grabReadsAfterEachCycle : Arrays.asList(true, false) ) {
-//        for ( final int downsampleTo : Arrays.asList(1)) {
-//            for ( final int nReadsPerLocus : Arrays.asList(1) ) {
-//                for ( final int nLoci : Arrays.asList(1) ) {
-//                    for ( final int nSamples : Arrays.asList(1) ) {
-//                        for ( final boolean keepReads : Arrays.asList(true) ) {
-//                            for ( final boolean grabReadsAfterEachCycle : Arrays.asList(true) ) {
-                                tests.add(new Object[]{nReadsPerLocus, nLoci, nSamples,
-                                        keepReads, grabReadsAfterEachCycle,
-                                        downsampleTo});
-                            }
-                        }
-                    }
-                }
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = true && ! DEBUG, dataProvider = "LIBS_ComplexPileupTests")
-    public void testLIBS_ComplexPileupTests(final int nReadsPerLocus,
-                                            final int nLoci,
-                                            final int nSamples,
-                                            final boolean keepReads,
-                                            final boolean grabReadsAfterEachCycle,
-                                            final int downsampleTo) {
-        //logger.warn(String.format("testLIBSKeepSubmittedReads %d %d %d %b %b %b", nReadsPerLocus, nLoci, nSamples, keepReads, grabReadsAfterEachCycle, downsample));
-        final int readLength = 10;
-
-        final boolean downsample = downsampleTo != -1;
-        final DownsamplingMethod downsampler = downsample
-                ? new DownsamplingMethod(DownsampleType.BY_SAMPLE, downsampleTo, null)
-                : new DownsamplingMethod(DownsampleType.NONE, null, null);
-
-        final ArtificialBAMBuilder bamBuilder = new ArtificialBAMBuilder(header.getSequenceDictionary(), nReadsPerLocus, nLoci);
-        bamBuilder.createAndSetHeader(nSamples).setReadLength(readLength).setAlignmentStart(1);
-
-        final List<GATKSAMRecord> reads = bamBuilder.makeReads();
-        li = new LocusIteratorByState(new FakeCloseableIterator<GATKSAMRecord>(reads.iterator()),
-                createTestReadProperties(downsampler, keepReads),
-                genomeLocParser,
-                bamBuilder.getSamples());
-
-        final Set<GATKSAMRecord> seenSoFar = new HashSet<GATKSAMRecord>();
-        final Set<GATKSAMRecord> keptReads = new HashSet<GATKSAMRecord>();
-        int bpVisited = 0;
-        while ( li.hasNext() ) {
-            bpVisited++;
-            final AlignmentContext alignmentContext = li.next();
-            final ReadBackedPileup p = alignmentContext.getBasePileup();
-
-            AssertWellOrderedPileup(p);
-
-            if ( downsample ) {
-                // just not a safe test
-                //Assert.assertTrue(p.getNumberOfElements() <= maxDownsampledCoverage * nSamples, "Too many reads at locus after downsampling");
-            } else {
-                final int minPileupSize = nReadsPerLocus * nSamples;
-                Assert.assertTrue(p.getNumberOfElements() >= minPileupSize);
-            }
-
-            // the number of reads starting here
-            int nReadsStartingHere = 0;
-            for ( final GATKSAMRecord read : p.getReads() )
-                if ( read.getAlignmentStart() == alignmentContext.getPosition() )
-                    nReadsStartingHere++;
-
-            // we can have no more than maxDownsampledCoverage per sample
-            final int maxCoveragePerLocus = downsample ? downsampleTo : nReadsPerLocus;
-            Assert.assertTrue(nReadsStartingHere <= maxCoveragePerLocus * nSamples);
-
-            seenSoFar.addAll(p.getReads());
-            if ( keepReads && grabReadsAfterEachCycle ) {
-                final List<GATKSAMRecord> locusReads = li.transferReadsFromAllPreviousPileups();
-
-
-                if ( downsample ) {
-                    // with downsampling we might have some reads here that were downsampled away
-                    // in the pileup.  We want to ensure that no more than the max coverage per sample is added
-                    Assert.assertTrue(locusReads.size() >= nReadsStartingHere);
-                    Assert.assertTrue(locusReads.size() <= maxCoveragePerLocus * nSamples);
-                } else {
-                    Assert.assertEquals(locusReads.size(), nReadsStartingHere);
-                }
-                keptReads.addAll(locusReads);
-
-                // check that all reads we've seen so far are in our keptReads
-                for ( final GATKSAMRecord read : seenSoFar ) {
-                    Assert.assertTrue(keptReads.contains(read), "A read that appeared in a pileup wasn't found in the kept reads: " + read);
-                }
-            }
-
-            if ( ! keepReads )
-                Assert.assertTrue(li.getReadsFromAllPreviousPileups().isEmpty(), "Not keeping reads but the underlying list of reads isn't empty");
-        }
-
-        if ( keepReads && ! grabReadsAfterEachCycle )
-            keptReads.addAll(li.transferReadsFromAllPreviousPileups());
-
-        if ( ! downsample ) { // downsampling may drop loci
-            final int expectedBpToVisit = nLoci + readLength - 1;
-            Assert.assertEquals(bpVisited, expectedBpToVisit, "Didn't visit the expected number of bp");
-        }
-
-        if ( keepReads ) {
-            // check we have the right number of reads
-            final int totalReads = nLoci * nReadsPerLocus * nSamples;
-            if ( ! downsample ) { // downsampling may drop reads
-                Assert.assertEquals(keptReads.size(), totalReads, "LIBS didn't keep the right number of reads during the traversal");
-
-                // check that the order of reads is the same as in our read list
-                for ( int i = 0; i < reads.size(); i++ ) {
-                    final GATKSAMRecord inputRead = reads.get(i);
-                    final GATKSAMRecord keptRead = reads.get(i);
-                    Assert.assertSame(keptRead, inputRead, "Input reads and kept reads differ at position " + i);
-                }
-            } else {
-                Assert.assertTrue(keptReads.size() <= totalReads, "LIBS didn't keep the right number of reads during the traversal");
-            }
-
-            // check uniqueness
-            final Set<String> readNames = new HashSet<String>();
-            for ( final GATKSAMRecord read : keptReads ) {
-                Assert.assertFalse(readNames.contains(read.getReadName()), "Found duplicate reads in the kept reads");
-                readNames.add(read.getReadName());
-            }
-
-            // check that all reads we've seen are in our keptReads
-            for ( final GATKSAMRecord read : seenSoFar ) {
-                Assert.assertTrue(keptReads.contains(read), "A read that appeared in a pileup wasn't found in the kept reads: " + read);
-            }
-
-            if ( ! downsample ) {
-                // check that every read in the list of keep reads occurred at least once in one of the pileups
-                for ( final GATKSAMRecord keptRead : keptReads ) {
-                    Assert.assertTrue(seenSoFar.contains(keptRead), "There's a read " + keptRead + " in our keptReads list that never appeared in any pileup");
-                }
-            }
-        }
-    }
-
-    private void AssertWellOrderedPileup(final ReadBackedPileup pileup) {
-        if ( ! pileup.isEmpty() ) {
-            int leftMostPos = -1;
-
-            for ( final PileupElement pe : pileup ) {
-                Assert.assertTrue(pileup.getLocation().getContig().equals(pe.getRead().getReferenceName()), "ReadBackedPileup contains an element " + pe + " that's on a different contig than the pileup itself");
-                Assert.assertTrue(pe.getRead().getAlignmentStart() >= leftMostPos,
-                        "ReadBackedPileup contains an element " + pe + " whose read's alignment start " + pe.getRead().getAlignmentStart()
-                                + " occurs before the leftmost position we've seen previously " + leftMostPos);
-            }
-        }
-    }
-
-    // ---------------------------------------------------------------------------
-    // make sure that downsampling isn't holding onto a bazillion reads
-    //
-    @DataProvider(name = "LIBS_NotHoldingTooManyReads")
-    public Object[][] makeLIBS_NotHoldingTooManyReads() {
-        final List<Object[]> tests = new LinkedList<Object[]>();
-
-        for ( final int downsampleTo : Arrays.asList(1, 10)) {
-            for ( final int nReadsPerLocus : Arrays.asList(100, 1000, 10000, 100000) ) {
-                for ( final int payloadInBytes : Arrays.asList(0, 1024, 1024*1024) ) {
-                    tests.add(new Object[]{nReadsPerLocus, downsampleTo, payloadInBytes});
-                }
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = true && ! DEBUG, dataProvider = "LIBS_NotHoldingTooManyReads")
-//    @Test(enabled = true, dataProvider = "LIBS_NotHoldingTooManyReads", timeOut = 100000)
-    public void testLIBS_NotHoldingTooManyReads(final int nReadsPerLocus, final int downsampleTo, final int payloadInBytes) {
-        logger.warn(String.format("testLIBS_NotHoldingTooManyReads %d %d %d", nReadsPerLocus, downsampleTo, payloadInBytes));
-        final int readLength = 10;
-
-        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 100000);
-        final int nSamples = 1;
-        final List<String> samples = new ArrayList<String>(nSamples);
-        for ( int i = 0; i < nSamples; i++ ) {
-            final GATKSAMReadGroupRecord rg = new GATKSAMReadGroupRecord("rg" + i);
-            final String sample = "sample" + i;
-            samples.add(sample);
-            rg.setSample(sample);
-            rg.setPlatform(NGSPlatform.ILLUMINA.getDefaultPlatform());
-            header.addReadGroup(rg);
-        }
-
-        final boolean downsample = downsampleTo != -1;
-        final DownsamplingMethod downsampler = downsample
-                ? new DownsamplingMethod(DownsampleType.BY_SAMPLE, downsampleTo, null)
-                : new DownsamplingMethod(DownsampleType.NONE, null, null);
-
-        // final List<GATKSAMRecord> reads = ArtificialSAMUtils.createReadStream(nReadsPerLocus, nLoci, header, 1, readLength);
-
-        final WeakReadTrackingIterator iterator = new WeakReadTrackingIterator(nReadsPerLocus, readLength, payloadInBytes, header);
-
-        li = new LocusIteratorByState(iterator,
-                createTestReadProperties(downsampler, false),
-                genomeLocParser,
-                samples);
-
-        while ( li.hasNext() ) {
-            final AlignmentContext next = li.next();
-            Assert.assertTrue(next.getBasePileup().getNumberOfElements() <= downsampleTo, "Too many elements in pileup " + next);
-            // TODO -- assert that there are <= X reads in memory after GC for some X
-        }
-    }
-
-    private static class WeakReadTrackingIterator implements Iterator<GATKSAMRecord> {
-        final int nReads, readLength, payloadInBytes;
-        int readI = 0;
-        final SAMFileHeader header;
-
-        private WeakReadTrackingIterator(int nReads, int readLength, final int payloadInBytes, final SAMFileHeader header) {
-            this.nReads = nReads;
-            this.readLength = readLength;
-            this.header = header;
-            this.payloadInBytes = payloadInBytes;
-        }
-
-        @Override public boolean hasNext() { return readI < nReads; }
-        @Override public void remove() { throw new UnsupportedOperationException("no remove"); }
-
-        @Override
-        public GATKSAMRecord next() {
-            readI++;
-            return makeRead();
-        }
-
-        private GATKSAMRecord makeRead() {
-            final SAMReadGroupRecord rg = header.getReadGroups().get(0);
-            final String readName = String.format("%s.%d.%s", "read", readI, rg.getId());
-            final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, readName, 0, 1, readLength);
-            read.setReadGroup(new GATKSAMReadGroupRecord(rg));
-            if ( payloadInBytes > 0 )
-                // add a payload byte array to push memory use per read even higher
-                read.setAttribute("PL", new byte[payloadInBytes]);
-            return read;
-        }
-    }
-
-    // ---------------------------------------------------------------------------
-    //
-    // make sure that adapter clipping is working properly in LIBS
-    //
-    // ---------------------------------------------------------------------------
-    @DataProvider(name = "AdapterClippingTest")
-    public Object[][] makeAdapterClippingTest() {
-        final List<Object[]> tests = new LinkedList<Object[]>();
-
-        final int start = 10;
-        for ( final int goodBases : Arrays.asList(10, 20, 30) ) {
-            for ( final int nClips : Arrays.asList(0, 1, 2, 10)) {
-                for ( final boolean onLeft : Arrays.asList(true, false) ) {
-                    final int readLength = nClips + goodBases;
-                    GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read1" , 0, start, readLength);
-                    read.setProperPairFlag(true);
-                    read.setReadPairedFlag(true);
-                    read.setReadUnmappedFlag(false);
-                    read.setMateUnmappedFlag(false);
-                    read.setReadBases(Utils.dupBytes((byte) 'A', readLength));
-                    read.setBaseQualities(Utils.dupBytes((byte) '@', readLength));
-                    read.setCigarString(readLength + "M");
-
-                    if ( onLeft ) {
-                        read.setReadNegativeStrandFlag(true);
-                        read.setMateNegativeStrandFlag(false);
-                        read.setMateAlignmentStart(start + nClips);
-                        read.setInferredInsertSize(readLength);
-                        tests.add(new Object[]{nClips, goodBases, 0, read});
-                    } else {
-                        read.setReadNegativeStrandFlag(false);
-                        read.setMateNegativeStrandFlag(true);
-                        read.setMateAlignmentStart(start - 1);
-                        read.setInferredInsertSize(goodBases - 1);
-                        tests.add(new Object[]{0, goodBases, nClips, read});
-                    }
-                }
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = true, dataProvider = "AdapterClippingTest")
-    public void testAdapterClipping(final int nClipsOnLeft, final int nReadContainingPileups, final int nClipsOnRight, final GATKSAMRecord read) {
-
-        li = new LocusIteratorByState(new FakeCloseableIterator<>(Collections.singletonList(read).iterator()),
-                createTestReadProperties(DownsamplingMethod.NONE, false),
-                genomeLocParser,
-                LocusIteratorByState.sampleListForSAMWithoutReadGroups());
-
-        int expectedPos = read.getAlignmentStart() + nClipsOnLeft;
-        int nPileups = 0;
-        while ( li.hasNext() ) {
-            final AlignmentContext next = li.next();
-            Assert.assertEquals(next.getLocation().getStart(), expectedPos);
-            nPileups++;
-            expectedPos++;
-        }
-
-        final int nExpectedPileups = nReadContainingPileups;
-        Assert.assertEquals(nPileups, nExpectedPileups, "Wrong number of pileups seen");
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/locusiterator/PerSampleReadStateManagerUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/locusiterator/PerSampleReadStateManagerUnitTest.java
deleted file mode 100644
index 4a760b5..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/locusiterator/PerSampleReadStateManagerUnitTest.java
+++ /dev/null
@@ -1,188 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.locusiterator;
-
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.MathUtils;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.*;
-
-/**
- * testing of the new (non-legacy) version of LocusIteratorByState
- */
-public class PerSampleReadStateManagerUnitTest extends LocusIteratorByStateBaseTest {
-    private class PerSampleReadStateManagerTest extends TestDataProvider {
-        private List<Integer> readCountsPerAlignmentStart;
-        private List<SAMRecord> reads;
-        private List<ArrayList<AlignmentStateMachine>> recordStatesByAlignmentStart;
-        private int removalInterval;
-
-        public PerSampleReadStateManagerTest( List<Integer> readCountsPerAlignmentStart, int removalInterval ) {
-            super(PerSampleReadStateManagerTest.class);
-
-            this.readCountsPerAlignmentStart = readCountsPerAlignmentStart;
-            this.removalInterval = removalInterval;
-
-            reads = new ArrayList<SAMRecord>();
-            recordStatesByAlignmentStart = new ArrayList<ArrayList<AlignmentStateMachine>>();
-
-            setName(String.format("%s: readCountsPerAlignmentStart: %s  removalInterval: %d",
-                    getClass().getSimpleName(), readCountsPerAlignmentStart, removalInterval));
-        }
-
-        public void run() {
-            PerSampleReadStateManager perSampleReadStateManager = new PerSampleReadStateManager(LocusIteratorByState.NO_DOWNSAMPLING);
-
-            makeReads();
-
-            for ( ArrayList<AlignmentStateMachine> stackRecordStates : recordStatesByAlignmentStart ) {
-                perSampleReadStateManager.addStatesAtNextAlignmentStart(new LinkedList<AlignmentStateMachine>(stackRecordStates));
-            }
-
-            // read state manager should have the right number of reads
-            Assert.assertEquals(reads.size(), perSampleReadStateManager.size());
-
-            Iterator<SAMRecord> originalReadsIterator = reads.iterator();
-            Iterator<AlignmentStateMachine> recordStateIterator = perSampleReadStateManager.iterator();
-            int recordStateCount = 0;
-            int numReadStatesRemoved = 0;
-
-            // Do a first-pass validation of the record state iteration by making sure we get back everything we
-            // put in, in the same order, doing any requested removals of read states along the way
-            while ( recordStateIterator.hasNext() ) {
-                AlignmentStateMachine readState = recordStateIterator.next();
-                recordStateCount++;
-                SAMRecord readFromPerSampleReadStateManager = readState.getRead();
-
-                Assert.assertTrue(originalReadsIterator.hasNext());
-                SAMRecord originalRead = originalReadsIterator.next();
-
-                // The read we get back should be literally the same read in memory as we put in
-                Assert.assertTrue(originalRead == readFromPerSampleReadStateManager);
-
-                // If requested, remove a read state every removalInterval states
-                if ( removalInterval > 0 && recordStateCount % removalInterval == 0 ) {
-                    recordStateIterator.remove();
-                    numReadStatesRemoved++;
-                }
-            }
-
-            Assert.assertFalse(originalReadsIterator.hasNext());
-
-            // If we removed any read states, do a second pass through the read states to make sure the right
-            // states were removed
-            if ( numReadStatesRemoved > 0 ) {
-                Assert.assertEquals(perSampleReadStateManager.size(), reads.size() - numReadStatesRemoved);
-
-                originalReadsIterator = reads.iterator();
-                recordStateIterator = perSampleReadStateManager.iterator();
-                int readCount = 0;
-                int readStateCount = 0;
-
-                // Match record states with the reads that should remain after removal
-                while ( recordStateIterator.hasNext() ) {
-                    AlignmentStateMachine readState = recordStateIterator.next();
-                    readStateCount++;
-                    SAMRecord readFromPerSampleReadStateManager = readState.getRead();
-
-                    Assert.assertTrue(originalReadsIterator.hasNext());
-
-                    SAMRecord originalRead = originalReadsIterator.next();
-                    readCount++;
-
-                    if ( readCount % removalInterval == 0 ) {
-                        originalRead = originalReadsIterator.next(); // advance to next read, since the previous one should have been discarded
-                        readCount++;
-                    }
-
-                    // The read we get back should be literally the same read in memory as we put in (after accounting for removals)
-                    Assert.assertTrue(originalRead == readFromPerSampleReadStateManager);
-                }
-
-                Assert.assertEquals(readStateCount, reads.size() - numReadStatesRemoved);
-            }
-
-            // Allow memory used by this test to be reclaimed
-            readCountsPerAlignmentStart = null;
-            reads = null;
-            recordStatesByAlignmentStart = null;
-        }
-
-        private void makeReads() {
-            int alignmentStart = 1;
-
-            for ( int readsThisStack : readCountsPerAlignmentStart ) {
-                ArrayList<GATKSAMRecord> stackReads = new ArrayList<GATKSAMRecord>(ArtificialSAMUtils.createStackOfIdenticalArtificialReads(readsThisStack, header, "foo", 0, alignmentStart, MathUtils.randomIntegerInRange(50, 100)));
-                ArrayList<AlignmentStateMachine> stackRecordStates = new ArrayList<AlignmentStateMachine>();
-
-                for ( GATKSAMRecord read : stackReads ) {
-                    stackRecordStates.add(new AlignmentStateMachine(read));
-                }
-
-                reads.addAll(stackReads);
-                recordStatesByAlignmentStart.add(stackRecordStates);
-            }
-        }
-    }
-
-    @DataProvider(name = "PerSampleReadStateManagerTestDataProvider")
-    public Object[][] createPerSampleReadStateManagerTests() {
-        for ( List<Integer> thisTestReadStateCounts : Arrays.asList( Arrays.asList(1),
-                Arrays.asList(2),
-                Arrays.asList(10),
-                Arrays.asList(1, 1),
-                Arrays.asList(2, 2),
-                Arrays.asList(10, 10),
-                Arrays.asList(1, 10),
-                Arrays.asList(10, 1),
-                Arrays.asList(1, 1, 1),
-                Arrays.asList(2, 2, 2),
-                Arrays.asList(10, 10, 10),
-                Arrays.asList(1, 1, 1, 1, 1, 1),
-                Arrays.asList(10, 10, 10, 10, 10, 10),
-                Arrays.asList(1, 2, 10, 1, 2, 10)
-        ) ) {
-
-            for ( int removalInterval : Arrays.asList(0, 2, 3) ) {
-                new PerSampleReadStateManagerTest(thisTestReadStateCounts, removalInterval);
-            }
-        }
-
-        return PerSampleReadStateManagerTest.getTests(PerSampleReadStateManagerTest.class);
-    }
-
-    @Test(dataProvider = "PerSampleReadStateManagerTestDataProvider")
-    public void runPerSampleReadStateManagerTest( PerSampleReadStateManagerTest test ) {
-        logger.warn("Running test: " + test);
-
-        test.run();
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/nanoScheduler/InputProducerUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/nanoScheduler/InputProducerUnitTest.java
deleted file mode 100644
index d99a079..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/nanoScheduler/InputProducerUnitTest.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.nanoScheduler;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-/**
-* UnitTests for the InputProducer
-*
-* User: depristo
-* Date: 8/24/12
-* Time: 11:25 AM
-* To change this template use File | Settings | File Templates.
-*/
-public class InputProducerUnitTest extends BaseTest {
-    @DataProvider(name = "InputProducerTest")
-    public Object[][] createInputProducerTest() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        for ( final int nElements : Arrays.asList(0, 1, 10, 100, 1000, 10000, 100000) ) {
-            for ( final int queueSize : Arrays.asList(1, 10, 100) ) {
-                tests.add(new Object[]{ nElements, queueSize });
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = true, dataProvider = "InputProducerTest", timeOut = NanoSchedulerUnitTest.NANO_SCHEDULE_MAX_RUNTIME)
-    public void testInputProducer(final int nElements, final int queueSize) throws InterruptedException {
-        final List<Integer> elements = new ArrayList<Integer>(nElements);
-        for ( int i = 0; i < nElements; i++ ) elements.add(i);
-
-        final InputProducer<Integer> ip = new InputProducer<Integer>(elements.iterator());
-
-        Assert.assertFalse(ip.allInputsHaveBeenRead(), "InputProvider said that all inputs have been read, but I haven't started reading yet");
-        Assert.assertEquals(ip.getNumInputValues(), -1, "InputProvider told me that the queue was done, but I haven't started reading yet");
-
-        int lastValue = -1;
-        int nRead = 0;
-        while ( ip.hasNext() ) {
-            final int nTotalElements = ip.getNumInputValues();
-
-            if ( nRead < nElements )
-                Assert.assertEquals(nTotalElements, -1, "getNumInputValues should have returned -1 with not all elements read");
-            // note, cannot test else case because elements input could have emptied between calls
-
-            final InputProducer<Integer>.InputValue value = ip.next();
-            if ( value.isEOFMarker() ) {
-                Assert.assertEquals(nRead, nElements, "Number of input values " + nRead + " not all that are expected " + nElements);
-                break;
-            } else {
-                Assert.assertTrue(lastValue < value.getValue(), "Read values coming out of order!");
-                final int expected = lastValue + 1;
-                Assert.assertEquals((int)value.getValue(), expected, "Value observed " + value.getValue() + " not equal to the expected value " + expected);
-                nRead++;
-                lastValue = value.getValue();
-            }
-        }
-
-        Assert.assertTrue(ip.allInputsHaveBeenRead(), "InputProvider said that all inputs haven't been read, but I read them all");
-        Assert.assertEquals(ip.getNumInputValues(), nElements, "Wrong number of total elements getNumInputValues");
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/nanoScheduler/MapResultUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/nanoScheduler/MapResultUnitTest.java
deleted file mode 100644
index 93105cd..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/nanoScheduler/MapResultUnitTest.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.nanoScheduler;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-/**
-* UnitTests for the InputProducer
-*
-* User: depristo
-* Date: 8/24/12
-* Time: 11:25 AM
-* To change this template use File | Settings | File Templates.
-*/
-public class MapResultUnitTest {
-    @DataProvider(name = "CompareTester")
-    public Object[][] createCompareTester() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        for ( int id1 = 0; id1 < 10; id1++ ) {
-            for ( int id2 = 0; id2 < 10; id2++ ) {
-                tests.add(new Object[]{ id1, id2, Integer.valueOf(id1).compareTo(id2)});
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = true, dataProvider = "CompareTester")
-    public void testInputProducer(final int id1, final int id2, final int comp ) throws InterruptedException {
-        final MapResult<Integer> mr1 = new MapResult<Integer>(id1, id1);
-        final MapResult<Integer> mr2 = new MapResult<Integer>(id2, id2);
-        Assert.assertEquals(mr1.compareTo(mr2), comp, "Compare MapResultsUnitTest failed");
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/nanoScheduler/NanoSchedulerUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/nanoScheduler/NanoSchedulerUnitTest.java
deleted file mode 100644
index 72636f0..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/nanoScheduler/NanoSchedulerUnitTest.java
+++ /dev/null
@@ -1,343 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.nanoScheduler;
-
-import org.apache.log4j.BasicConfigurator;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.SimpleTimer;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.testng.Assert;
-import org.testng.annotations.BeforeSuite;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * UnitTests for the NanoScheduler
- *
- * User: depristo
- * Date: 8/24/12
- * Time: 11:25 AM
- * To change this template use File | Settings | File Templates.
- */
-public class NanoSchedulerUnitTest extends BaseTest {
-    private final static boolean DEBUG = false;
-    private final static boolean debug = false;
-    public static final int NANO_SCHEDULE_MAX_RUNTIME = 30000;
-    public static final int EXCEPTION_THROWING_TEST_TIMEOUT = 10000;
-
-    private static class Map2x implements NSMapFunction<Integer, Integer> {
-        @Override public Integer apply(Integer input) { return input * 2; }
-    }
-
-    private static void maybeDelayMe(final int input) {
-        try {
-            if ( input % 7 == 0 ) {
-                final int milliToSleep = (input % 10);
-                //System.out.printf("Sleeping %d millseconds%n", milliToSleep);
-                Thread.sleep(milliToSleep);
-            }
-        } catch ( InterruptedException ex ) {
-            throw new RuntimeException(ex);
-        }
-    }
-
-    private static class Map2xWithDelays extends Map2x {
-        @Override public Integer apply(Integer input) {
-            maybeDelayMe(input);
-            return input * 2;
-        }
-    }
-
-    private static class ReduceSum implements NSReduceFunction<Integer, Integer> {
-        int prevOne = Integer.MIN_VALUE;
-
-        @Override public Integer apply(Integer one, Integer sum) {
-            Assert.assertTrue(prevOne < one, "Reduce came in out of order.  Prev " + prevOne + " cur " + one);
-            return one + sum;
-        }
-    }
-
-    private static class ProgressCallback implements NSProgressFunction<Integer> {
-        int callBacks = 0;
-
-        @Override
-        public void progress(Integer lastMapInput) {
-            callBacks++;
-        }
-    }
-
-
-    private static int sum2x(final int start, final int end) {
-        int sum = 0;
-        for ( int i = start; i < end; i++ )
-            sum += 2 * i;
-        return sum;
-    }
-
-    private static class NanoSchedulerBasicTest extends TestDataProvider {
-        final int bufferSize, nThreads, start, end, expectedResult;
-        final boolean addDelays;
-
-        public NanoSchedulerBasicTest(final int bufferSize, final int nThreads, final int start, final int end, final boolean addDelays) {
-            super(NanoSchedulerBasicTest.class);
-            this.bufferSize = bufferSize;
-            this.nThreads = nThreads;
-            this.start = start;
-            this.end = end;
-            this.expectedResult = sum2x(start, end);
-            this.addDelays = addDelays;
-            setName(String.format("%s nt=%d buf=%d start=%d end=%d sum=%d delays=%b",
-                    getClass().getSimpleName(), nThreads, bufferSize, start, end, expectedResult, addDelays));
-        }
-
-        public Iterator<Integer> makeReader() {
-            final List<Integer> ints = new ArrayList<Integer>();
-            for ( int i = start; i < end; i++ )
-                ints.add(i);
-            return ints.iterator();
-        }
-
-        public int nExpectedCallbacks() {
-            int nElements = Math.max(end - start, 0);
-            return nElements / bufferSize / NanoScheduler.UPDATE_PROGRESS_FREQ;
-        }
-
-        public Map2x makeMap() { return addDelays ? new Map2xWithDelays() : new Map2x(); }
-        public Integer initReduce() { return 0; }
-        public ReduceSum makeReduce() { return new ReduceSum(); }
-
-        public NanoScheduler<Integer, Integer, Integer> makeScheduler() {
-            final NanoScheduler <Integer, Integer, Integer> nano;
-            if ( bufferSize == -1 )
-                nano = new NanoScheduler<Integer, Integer, Integer>(nThreads);
-            else
-                nano = new NanoScheduler<Integer, Integer, Integer>(bufferSize, nThreads);
-
-            nano.setDebug(debug);
-            return nano;
-        }
-    }
-
-    static NanoSchedulerBasicTest exampleTest = null;
-    static NanoSchedulerBasicTest exampleTestWithDelays = null;
-
-    @BeforeSuite
-    public void setUp() throws Exception {
-        exampleTest = new NanoSchedulerBasicTest(10, 2, 1, 10, false);
-        exampleTestWithDelays = new NanoSchedulerBasicTest(10, 2, 1, 10, true);
-    }
-
-    @DataProvider(name = "NanoSchedulerBasicTest")
-    public Object[][] createNanoSchedulerBasicTest() {
-//        for ( final int bufferSize : Arrays.asList(1, 10) ) {
-//            for ( final int nt : Arrays.asList(1, 2, 4) ) {
-//                for ( final int start : Arrays.asList(0) ) {
-//                    for ( final int end : Arrays.asList(0, 1, 2) ) {
-//                        exampleTest = new NanoSchedulerBasicTest(bufferSize, nt, start, end, false);
-//                    }
-//                }
-//            }
-//        }
-
-        for ( final int bufferSize : Arrays.asList(-1, 1, 10, 100) ) {
-            for ( final int nt : Arrays.asList(1, 2, 4) ) {
-                for ( final int start : Arrays.asList(0) ) {
-                    for ( final int end : Arrays.asList(0, 1, 2, 11, 100, 10000, 100000) ) {
-                        for ( final boolean addDelays : Arrays.asList(true, false) ) {
-                            if ( end < 1000 )
-                                new NanoSchedulerBasicTest(bufferSize, nt, start, end, addDelays);
-                        }
-                    }
-                }
-            }
-        }
-
-        return NanoSchedulerBasicTest.getTests(NanoSchedulerBasicTest.class);
-    }
-
-    @Test(enabled = true && ! DEBUG, dataProvider = "NanoSchedulerBasicTest", timeOut = NANO_SCHEDULE_MAX_RUNTIME)
-    public void testSingleThreadedNanoScheduler(final NanoSchedulerBasicTest test) throws InterruptedException {
-        logger.warn("Running " + test);
-        if ( test.nThreads == 1 )
-            testNanoScheduler(test);
-    }
-
-    @Test(enabled = true && ! DEBUG, dataProvider = "NanoSchedulerBasicTest", timeOut = NANO_SCHEDULE_MAX_RUNTIME, dependsOnMethods = "testSingleThreadedNanoScheduler")
-    public void testMultiThreadedNanoScheduler(final NanoSchedulerBasicTest test) throws InterruptedException {
-        logger.warn("Running " + test);
-        if ( test.nThreads >= 1 )
-            testNanoScheduler(test);
-    }
-
-    private void testNanoScheduler(final NanoSchedulerBasicTest test) throws InterruptedException {
-        final SimpleTimer timer = new SimpleTimer().start();
-        final NanoScheduler<Integer, Integer, Integer> nanoScheduler = test.makeScheduler();
-
-        final ProgressCallback callback = new ProgressCallback();
-        nanoScheduler.setProgressFunction(callback);
-
-        if ( test.bufferSize > -1 )
-            Assert.assertEquals(nanoScheduler.getBufferSize(), test.bufferSize, "bufferSize argument");
-        Assert.assertEquals(nanoScheduler.getnThreads(), test.nThreads, "nThreads argument");
-
-        final Integer sum = nanoScheduler.execute(test.makeReader(), test.makeMap(), test.initReduce(), test.makeReduce());
-        Assert.assertNotNull(sum);
-        Assert.assertEquals((int)sum, test.expectedResult, "NanoScheduler sum not the same as calculated directly");
-
-        Assert.assertTrue(callback.callBacks >= test.nExpectedCallbacks(), "Not enough callbacks detected.  Expected at least " + test.nExpectedCallbacks() + " but saw only " + callback.callBacks);
-        nanoScheduler.shutdown();
-    }
-
-    @Test(enabled = true && ! DEBUG, dataProvider = "NanoSchedulerBasicTest", dependsOnMethods = "testMultiThreadedNanoScheduler", timeOut = 2 * NANO_SCHEDULE_MAX_RUNTIME)
-    public void testNanoSchedulerInLoop(final NanoSchedulerBasicTest test) throws InterruptedException {
-        if ( test.bufferSize > 1) {
-            logger.warn("Running " + test);
-
-            final NanoScheduler<Integer, Integer, Integer> nanoScheduler = test.makeScheduler();
-
-            // test reusing the scheduler
-            for ( int i = 0; i < 10; i++ ) {
-                final Integer sum = nanoScheduler.execute(test.makeReader(), test.makeMap(), test.initReduce(), test.makeReduce());
-                Assert.assertNotNull(sum);
-                Assert.assertEquals((int)sum, test.expectedResult, "NanoScheduler sum not the same as calculated directly");
-            }
-
-            nanoScheduler.shutdown();
-        }
-    }
-
-    @Test(enabled = true && ! DEBUG, timeOut = NANO_SCHEDULE_MAX_RUNTIME)
-    public void testShutdown() throws InterruptedException {
-        final NanoScheduler<Integer, Integer, Integer> nanoScheduler = new NanoScheduler<Integer, Integer, Integer>(1, 2);
-        Assert.assertFalse(nanoScheduler.isShutdown(), "scheduler should be alive");
-        nanoScheduler.shutdown();
-        Assert.assertTrue(nanoScheduler.isShutdown(), "scheduler should be dead");
-    }
-
-    @Test(enabled = true && ! DEBUG, expectedExceptions = IllegalStateException.class, timeOut = NANO_SCHEDULE_MAX_RUNTIME)
-    public void testShutdownExecuteFailure() throws InterruptedException {
-        final NanoScheduler<Integer, Integer, Integer> nanoScheduler = new NanoScheduler<Integer, Integer, Integer>(1, 2);
-        nanoScheduler.shutdown();
-        nanoScheduler.execute(exampleTest.makeReader(), exampleTest.makeMap(), exampleTest.initReduce(), exampleTest.makeReduce());
-    }
-
-    @DataProvider(name = "NanoSchedulerInputExceptionTest")
-    public Object[][] createNanoSchedulerInputExceptionTest() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-
-        for ( final int bufSize : Arrays.asList(100) ) {
-            for ( final int nThreads : Arrays.asList(8) ) {
-                for ( final boolean addDelays : Arrays.asList(true, false) ) {
-                    final NanoSchedulerBasicTest test = new NanoSchedulerBasicTest(bufSize, nThreads, 1, 1000000, false);
-                    final int maxN = addDelays ? 1000 : 10000;
-                    for ( int nElementsBeforeError = 0; nElementsBeforeError < maxN; nElementsBeforeError += Math.max(nElementsBeforeError / 10, 1) ) {
-                        tests.add(new Object[]{nElementsBeforeError, test, addDelays});
-                    }
-                }
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = true, expectedExceptions = NullPointerException.class, timeOut = EXCEPTION_THROWING_TEST_TIMEOUT)
-    public void testInputErrorIsThrown_NPE() throws InterruptedException {
-        executeTestErrorThrowingInput(10, new NullPointerException(), exampleTest, false);
-    }
-
-    @Test(enabled = true, expectedExceptions = ReviewedGATKException.class, timeOut = EXCEPTION_THROWING_TEST_TIMEOUT)
-    public void testInputErrorIsThrown_RSE() throws InterruptedException {
-        executeTestErrorThrowingInput(10, new ReviewedGATKException("test"), exampleTest, false);
-    }
-
-    @Test(enabled = true, expectedExceptions = NullPointerException.class, dataProvider = "NanoSchedulerInputExceptionTest", timeOut = EXCEPTION_THROWING_TEST_TIMEOUT, invocationCount = 1)
-    public void testInputRuntimeExceptionDoesntDeadlock(final int nElementsBeforeError, final NanoSchedulerBasicTest test, final boolean addDelays ) throws InterruptedException {
-        executeTestErrorThrowingInput(nElementsBeforeError, new NullPointerException(), test, addDelays);
-    }
-
-    @Test(enabled = true, expectedExceptions = ReviewedGATKException.class, dataProvider = "NanoSchedulerInputExceptionTest", timeOut = EXCEPTION_THROWING_TEST_TIMEOUT, invocationCount = 1)
-    public void testInputErrorDoesntDeadlock(final int nElementsBeforeError, final NanoSchedulerBasicTest test, final boolean addDelays ) throws InterruptedException {
-        executeTestErrorThrowingInput(nElementsBeforeError, new Error(), test, addDelays);
-    }
-
-    private void executeTestErrorThrowingInput(final int nElementsBeforeError, final Throwable ex, final NanoSchedulerBasicTest test, final boolean addDelays) {
-        logger.warn("executeTestErrorThrowingInput " + nElementsBeforeError + " ex=" + ex + " test=" + test + " addInputDelays=" + addDelays);
-        final NanoScheduler<Integer, Integer, Integer> nanoScheduler = test.makeScheduler();
-        nanoScheduler.execute(new ErrorThrowingIterator(nElementsBeforeError, ex, addDelays), test.makeMap(), test.initReduce(), test.makeReduce());
-    }
-
-    private static class ErrorThrowingIterator implements Iterator<Integer> {
-        final int nElementsBeforeError;
-        final boolean addDelays;
-        int i = 0;
-        final Throwable ex;
-
-        private ErrorThrowingIterator(final int nElementsBeforeError, Throwable ex, boolean addDelays) {
-            this.nElementsBeforeError = nElementsBeforeError;
-            this.ex = ex;
-            this.addDelays = addDelays;
-        }
-
-        @Override public boolean hasNext() { return true; }
-        @Override public Integer next() {
-            if ( i++ > nElementsBeforeError ) {
-                if ( ex instanceof Error )
-                    throw (Error)ex;
-                else if ( ex instanceof RuntimeException )
-                    throw (RuntimeException)ex;
-                else
-                    throw new RuntimeException("Bad exception " + ex);
-            } else if ( addDelays ) {
-                maybeDelayMe(i);
-                return i;
-            } else {
-                return i;
-            }
-        }
-        @Override public void remove() { throw new UnsupportedOperationException("x"); }
-    }
-
-    public static void main(String [ ] args) {
-        org.apache.log4j.Logger logger = org.apache.log4j.Logger.getRootLogger();
-        BasicConfigurator.configure();
-        logger.setLevel(org.apache.log4j.Level.DEBUG);
-
-        final NanoSchedulerBasicTest test = new NanoSchedulerBasicTest(1000, Integer.valueOf(args[0]), 0, Integer.valueOf(args[1]), false);
-        final NanoScheduler<Integer, Integer, Integer> nanoScheduler =
-                new NanoScheduler<Integer, Integer, Integer>(test.bufferSize, test.nThreads);
-        nanoScheduler.setDebug(true);
-
-        final Integer sum = nanoScheduler.execute(test.makeReader(), test.makeMap(), test.initReduce(), test.makeReduce());
-        System.out.printf("Sum = %d, expected =%d%n", sum, test.expectedResult);
-        nanoScheduler.shutdown();
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/nanoScheduler/ReducerUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/nanoScheduler/ReducerUnitTest.java
deleted file mode 100644
index 987d13f..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/nanoScheduler/ReducerUnitTest.java
+++ /dev/null
@@ -1,236 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.nanoScheduler;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.MultiThreadedErrorTracker;
-import org.broadinstitute.gatk.utils.Utils;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.List;
-import java.util.concurrent.*;
-
-/**
- * UnitTests for Reducer
- *
- * User: depristo
- * Date: 8/24/12
- * Time: 11:25 AM
- * To change this template use File | Settings | File Templates.
- */
-public class ReducerUnitTest extends BaseTest {
-    @DataProvider(name = "ReducerThreadTest")
-    public Object[][] createReducerThreadTest() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        for ( final int groupSize : Arrays.asList(-1, 1, 5, 50, 500, 5000, 50000) ) {
-            for ( final int nElements : Arrays.asList(0, 1, 3, 5) ) {
-                if ( groupSize < nElements ) {
-                    for ( final List<MapResult<Integer>> jobs : Utils.makePermutations(makeJobs(nElements), nElements, false) ) {
-                        tests.add(new Object[]{ new ListOfJobs(jobs), groupSize });
-                    }
-                }
-            }
-
-            for ( final int nElements : Arrays.asList(10, 100, 1000, 10000, 100000, 1000000) ) {
-                if ( groupSize < nElements ) {
-                    tests.add(new Object[]{ new ListOfJobs(makeJobs(nElements)), groupSize });
-                }
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    private static class ListOfJobs extends ArrayList<MapResult<Integer>> {
-        private ListOfJobs(Collection<? extends MapResult<Integer>> c) {
-            super(c);
-        }
-
-        @Override
-        public String toString() {
-            if ( size() < 10 )
-                return super.toString();
-            else
-                return "JobList of " + size();
-        }
-    }
-
-    private static List<MapResult<Integer>> makeJobs(final int nElements) {
-        List<MapResult<Integer>> jobs = new ArrayList<MapResult<Integer>>(nElements);
-        for ( int i = 0; i < nElements; i++ ) {
-            jobs.add(new MapResult<Integer>(i, i));
-        }
-        return jobs;
-    }
-
-    private int expectedSum(final List<MapResult<Integer>> jobs) {
-        int sum = 0;
-        for ( final MapResult<Integer> job : jobs )
-            sum += job.getValue();
-        return sum;
-    }
-
-    @Test(enabled = true, dataProvider = "ReducerThreadTest", timeOut = NanoSchedulerUnitTest.NANO_SCHEDULE_MAX_RUNTIME)
-    public void testReducerThread(final List<MapResult<Integer>> allJobs, int groupSize) throws Exception {
-        if ( groupSize == -1 )
-            groupSize = allJobs.size();
-
-        final MapResultsQueue<Integer> mapResultsQueue = new MapResultsQueue<Integer>();
-
-        final List<List<MapResult<Integer>>> jobGroups = Utils.groupList(allJobs, groupSize);
-        final ReduceSumTest reduce = new ReduceSumTest();
-        final Reducer<Integer, Integer> reducer = new Reducer<Integer, Integer>(reduce, new MultiThreadedErrorTracker(), 0);
-
-        final TestWaitingForFinalReduce waitingThread = new TestWaitingForFinalReduce(reducer, expectedSum(allJobs));
-        final ExecutorService es = Executors.newSingleThreadExecutor();
-        es.submit(waitingThread);
-
-        int lastJobID = -1;
-        int nJobsSubmitted = 0;
-        int jobGroupCount = 0;
-        final int lastJobGroupCount = jobGroups.size() - 1;
-
-        for ( final List<MapResult<Integer>> jobs : jobGroups ) {
-            //logger.warn("Processing job group " + jobGroupCount + " with " + jobs.size() + " jobs");
-            for ( final MapResult<Integer> job : jobs ) {
-                lastJobID = Math.max(lastJobID, job.getJobID());
-                mapResultsQueue.put(job);
-                nJobsSubmitted++;
-            }
-
-            if ( jobGroupCount == lastJobGroupCount ) {
-                mapResultsQueue.put(new MapResult<Integer>(lastJobID+1));
-                nJobsSubmitted++;
-            }
-
-            final int nReduced = reducer.reduceAsMuchAsPossible(mapResultsQueue, true);
-            Assert.assertTrue(nReduced <= nJobsSubmitted, "Somehow reduced more jobs than submitted");
-
-            jobGroupCount++;
-        }
-
-        Assert.assertEquals(reduce.nRead, allJobs.size(), "number of read values not all of the values in the reducer queue");
-        es.shutdown();
-        es.awaitTermination(1, TimeUnit.HOURS);
-    }
-
-    @Test(timeOut = 1000, invocationCount = 100)
-    private void testNonBlockingReduce() throws Exception {
-        final Reducer<Integer, Integer> reducer = new Reducer<Integer, Integer>(new ReduceSumTest(), new MultiThreadedErrorTracker(), 0);
-        final MapResultsQueue<Integer> mapResultsQueue = new MapResultsQueue<Integer>();
-        mapResultsQueue.put(new MapResult<Integer>(0, 0));
-        mapResultsQueue.put(new MapResult<Integer>(1, 1));
-
-        final CountDownLatch latch = new CountDownLatch(1);
-        final ExecutorService es = Executors.newSingleThreadExecutor();
-
-        es.submit(new Runnable() {
-            @Override
-            public void run() {
-                reducer.acquireReduceLock(true);
-                latch.countDown();
-            }
-        });
-
-        latch.await();
-        final int nReduced = reducer.reduceAsMuchAsPossible(mapResultsQueue, false);
-        Assert.assertEquals(nReduced, 0, "The reducer lock was already held but we did some work");
-        es.shutdown();
-        es.awaitTermination(1, TimeUnit.HOURS);
-    }
-
-    @Test(timeOut = 10000, invocationCount = 100)
-    private void testBlockingReduce() throws Exception {
-        final Reducer<Integer, Integer> reducer = new Reducer<Integer, Integer>(new ReduceSumTest(), new MultiThreadedErrorTracker(), 0);
-        final MapResultsQueue<Integer> mapResultsQueue = new MapResultsQueue<Integer>();
-        mapResultsQueue.put(new MapResult<Integer>(0, 0));
-        mapResultsQueue.put(new MapResult<Integer>(1, 1));
-
-        final CountDownLatch latch = new CountDownLatch(1);
-        final ExecutorService es = Executors.newSingleThreadExecutor();
-
-        es.submit(new Runnable() {
-            @Override
-            public void run() {
-                reducer.acquireReduceLock(true);
-                latch.countDown();
-                try {
-                    Thread.sleep(100);
-                } catch ( InterruptedException e ) {
-                    ;
-                } finally {
-                    reducer.releaseReduceLock();
-                }
-            }
-        });
-
-        latch.await();
-        final int nReduced = reducer.reduceAsMuchAsPossible(mapResultsQueue, true);
-        Assert.assertEquals(nReduced, 2, "The reducer should have blocked until the lock was freed and reduced 2 values");
-        es.shutdown();
-        es.awaitTermination(1, TimeUnit.HOURS);
-    }
-
-
-    public class ReduceSumTest implements NSReduceFunction<Integer, Integer> {
-        int nRead = 0;
-        int lastValue = -1;
-
-        @Override public Integer apply(Integer one, Integer sum) {
-            Assert.assertTrue(lastValue < one, "Reduce came in out of order.  Prev " + lastValue + " cur " + one);
-
-            Assert.assertTrue(lastValue < one, "Read values coming out of order!");
-            final int expected = lastValue + 1;
-            Assert.assertEquals((int)one, expected, "Value observed " + one + " not equal to the expected value " + expected);
-            nRead++;
-            lastValue = expected;
-
-            return one + sum;
-        }
-    }
-
-    final static class TestWaitingForFinalReduce implements Runnable {
-        final Reducer<Integer, Integer> reducer;
-        final int expectedSum;
-
-        TestWaitingForFinalReduce(Reducer<Integer, Integer> reducer, final int expectedSum) {
-            this.reducer = reducer;
-            this.expectedSum = expectedSum;
-        }
-
-        @Override
-        public void run() {
-            final int observedSum = reducer.getReduceResult();
-            Assert.assertEquals(observedSum, expectedSum, "Reduce didn't sum to expected value");
-        }
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/pileup/PileupElementUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/pileup/PileupElementUnitTest.java
deleted file mode 100644
index 90d235f..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/pileup/PileupElementUnitTest.java
+++ /dev/null
@@ -1,189 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.pileup;
-
-import htsjdk.samtools.CigarElement;
-import htsjdk.samtools.CigarOperator;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.locusiterator.AlignmentStateMachine;
-import org.broadinstitute.gatk.utils.locusiterator.LIBS_position;
-import org.broadinstitute.gatk.utils.locusiterator.LocusIteratorByStateBaseTest;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.Arrays;
-import java.util.LinkedList;
-import java.util.List;
-
-/**
- * testing of the new (non-legacy) version of LocusIteratorByState
- */
-public class PileupElementUnitTest extends LocusIteratorByStateBaseTest {
-    @DataProvider(name = "PileupElementTest")
-    public Object[][] makePileupElementTest() {
-//        return new Object[][]{{new LIBSTest("2X2D2P2X")}};
-//        return createLIBSTests(
-//                Arrays.asList(2),
-//                Arrays.asList(2));
-        return createLIBSTests(
-                Arrays.asList(1, 2),
-                Arrays.asList(1, 2, 3, 4));
-    }
-
-    @Test(dataProvider = "PileupElementTest")
-    public void testPileupElementTest(LIBSTest params) {
-        final GATKSAMRecord read = params.makeRead();
-        final AlignmentStateMachine state = new AlignmentStateMachine(read);
-        final LIBS_position tester = new LIBS_position(read);
-
-        while ( state.stepForwardOnGenome() != null ) {
-            tester.stepForwardOnGenome();
-            final PileupElement pe = state.makePileupElement();
-
-            Assert.assertEquals(pe.getRead(), read);
-            Assert.assertEquals(pe.getMappingQual(), read.getMappingQuality());
-            Assert.assertEquals(pe.getOffset(), state.getReadOffset());
-
-            Assert.assertEquals(pe.isDeletion(), state.getCigarOperator() == CigarOperator.D);
-            Assert.assertEquals(pe.isAfterInsertion(), tester.isAfterInsertion);
-            Assert.assertEquals(pe.isBeforeInsertion(), tester.isBeforeInsertion);
-            Assert.assertEquals(pe.isNextToSoftClip(), tester.isNextToSoftClip);
-
-            if ( ! hasNeighboringPaddedOps(params.getElements(), pe.getCurrentCigarOffset()) ) {
-                Assert.assertEquals(pe.isAfterDeletionEnd(), tester.isAfterDeletionEnd);
-                Assert.assertEquals(pe.isBeforeDeletionStart(), tester.isBeforeDeletionStart);
-            }
-
-
-
-            Assert.assertEquals(pe.atEndOfCurrentCigar(), state.getOffsetIntoCurrentCigarElement() == state.getCurrentCigarElement().getLength() - 1, "atEndOfCurrentCigar failed");
-            Assert.assertEquals(pe.atStartOfCurrentCigar(), state.getOffsetIntoCurrentCigarElement() == 0, "atStartOfCurrentCigar failed");
-
-            Assert.assertEquals(pe.getBase(), pe.isDeletion() ? PileupElement.DELETION_BASE : read.getReadBases()[state.getReadOffset()]);
-            Assert.assertEquals(pe.getQual(), pe.isDeletion() ? PileupElement.DELETION_QUAL : read.getBaseQualities()[state.getReadOffset()]);
-
-            Assert.assertEquals(pe.getCurrentCigarElement(), state.getCurrentCigarElement());
-            Assert.assertEquals(pe.getCurrentCigarOffset(), state.getCurrentCigarElementOffset());
-
-            // tested in libs
-            //pe.getLengthOfImmediatelyFollowingIndel();
-            //pe.getBasesOfImmediatelyFollowingInsertion();
-
-            // Don't test -- pe.getBaseIndex();
-            if ( pe.atEndOfCurrentCigar() && state.getCurrentCigarElementOffset() < read.getCigarLength() - 1 ) {
-                final CigarElement nextElement = read.getCigar().getCigarElement(state.getCurrentCigarElementOffset() + 1);
-                if ( nextElement.getOperator() == CigarOperator.I ) {
-                    Assert.assertTrue(pe.getBetweenNextPosition().size() >= 1);
-                    Assert.assertEquals(pe.getBetweenNextPosition().get(0), nextElement);
-                }
-                if ( nextElement.getOperator() == CigarOperator.M ) {
-                    Assert.assertTrue(pe.getBetweenNextPosition().isEmpty());
-                }
-            } else {
-                Assert.assertTrue(pe.getBetweenNextPosition().isEmpty());
-            }
-
-            if ( pe.atStartOfCurrentCigar() && state.getCurrentCigarElementOffset() > 0 ) {
-                final CigarElement prevElement = read.getCigar().getCigarElement(state.getCurrentCigarElementOffset() - 1);
-                if ( prevElement.getOperator() == CigarOperator.I ) {
-                    Assert.assertTrue(pe.getBetweenPrevPosition().size() >= 1);
-                    Assert.assertEquals(pe.getBetweenPrevPosition().getLast(), prevElement);
-                }
-                if ( prevElement.getOperator() == CigarOperator.M ) {
-                    Assert.assertTrue(pe.getBetweenPrevPosition().isEmpty());
-                }
-            } else {
-                Assert.assertTrue(pe.getBetweenPrevPosition().isEmpty());
-            }
-
-            // TODO -- add meaningful tests
-            pe.getBaseInsertionQual();
-            pe.getBaseDeletionQual();
-        }
-    }
-
-
-    @DataProvider(name = "PrevAndNextTest")
-    public Object[][] makePrevAndNextTest() {
-        final List<Object[]> tests = new LinkedList<Object[]>();
-
-        final List<CigarOperator> operators = Arrays.asList(CigarOperator.I, CigarOperator.P, CigarOperator.S);
-
-        for ( final CigarOperator firstOp : Arrays.asList(CigarOperator.M) ) {
-            for ( final CigarOperator lastOp : Arrays.asList(CigarOperator.M, CigarOperator.D) ) {
-                for ( final int nIntermediate : Arrays.asList(1, 2, 3) ) {
-                    for ( final List<CigarOperator> combination : Utils.makePermutations(operators, nIntermediate, false) ) {
-                        final int readLength = 2 + combination.size();
-                        GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, 1, readLength);
-                        read.setReadBases(Utils.dupBytes((byte) 'A', readLength));
-                        read.setBaseQualities(Utils.dupBytes((byte) 30, readLength));
-
-                        String cigar = "1" + firstOp;
-                        for ( final CigarOperator op : combination ) cigar += "1" + op;
-                        cigar += "1" + lastOp;
-                        read.setCigarString(cigar);
-
-                        tests.add(new Object[]{read, firstOp, lastOp, combination});
-                    }
-                }
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "PrevAndNextTest")
-    public void testPrevAndNextTest(final GATKSAMRecord read, final CigarOperator firstOp, final CigarOperator lastOp, final List<CigarOperator> ops) {
-        final AlignmentStateMachine state = new AlignmentStateMachine(read);
-
-        state.stepForwardOnGenome();
-        final PileupElement pe = state.makePileupElement();
-        Assert.assertEquals(pe.getBetweenNextPosition().size(), ops.size());
-        Assert.assertEquals(pe.getBetweenPrevPosition().size(), 0);
-        assertEqualsOperators(pe.getBetweenNextPosition(), ops);
-        Assert.assertEquals(pe.getPreviousOnGenomeCigarElement(), null);
-        Assert.assertNotNull(pe.getNextOnGenomeCigarElement());
-        Assert.assertEquals(pe.getNextOnGenomeCigarElement().getOperator(), lastOp);
-
-        state.stepForwardOnGenome();
-        final PileupElement pe2 = state.makePileupElement();
-        Assert.assertEquals(pe2.getBetweenPrevPosition().size(), ops.size());
-        Assert.assertEquals(pe2.getBetweenNextPosition().size(), 0);
-        assertEqualsOperators(pe2.getBetweenPrevPosition(), ops);
-        Assert.assertNotNull(pe2.getPreviousOnGenomeCigarElement());
-        Assert.assertEquals(pe2.getPreviousOnGenomeCigarElement().getOperator(), firstOp);
-        Assert.assertEquals(pe2.getNextOnGenomeCigarElement(), null);
-    }
-
-    private void assertEqualsOperators(final List<CigarElement> elements, final List<CigarOperator> ops) {
-        for ( int i = 0; i < elements.size(); i++ ) {
-            Assert.assertEquals(elements.get(i).getOperator(), ops.get(i), "elements doesn't have expected operator at position " + i);
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/pileup/ReadBackedPileupUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/pileup/ReadBackedPileupUnitTest.java
deleted file mode 100644
index 9b3b3b8..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/pileup/ReadBackedPileupUnitTest.java
+++ /dev/null
@@ -1,328 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.pileup;
-
-import htsjdk.samtools.CigarElement;
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMReadGroupRecord;
-import org.broadinstitute.gatk.utils.GenomeLoc;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
-import org.testng.Assert;
-import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
-
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.*;
-
-/**
- * Test routines for read-backed pileup.
- */
-public class ReadBackedPileupUnitTest {
-    protected static SAMFileHeader header;
-    protected GenomeLocParser genomeLocParser;
-    private GenomeLoc loc;
-
-    @BeforeClass
-    public void beforeClass() {
-        header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000);
-        genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
-        loc = genomeLocParser.createGenomeLoc("chr1", 1);
-    }
-
-    /**
-     * Ensure that basic read group splitting works.
-     */
-    @Test
-    public void testSplitByReadGroup() {
-        SAMReadGroupRecord readGroupOne = new SAMReadGroupRecord("rg1");
-        SAMReadGroupRecord readGroupTwo = new SAMReadGroupRecord("rg2");
-
-        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1,1,1000);
-        header.addReadGroup(readGroupOne);
-        header.addReadGroup(readGroupTwo);
-
-        GATKSAMRecord read1 = ArtificialSAMUtils.createArtificialRead(header,"read1",0,1,10);
-        read1.setAttribute("RG",readGroupOne.getId());
-        GATKSAMRecord read2 = ArtificialSAMUtils.createArtificialRead(header,"read2",0,1,10);
-        read2.setAttribute("RG",readGroupTwo.getId());
-        GATKSAMRecord read3 = ArtificialSAMUtils.createArtificialRead(header,"read3",0,1,10);
-        read3.setAttribute("RG",readGroupOne.getId());
-        GATKSAMRecord read4 = ArtificialSAMUtils.createArtificialRead(header,"read4",0,1,10);
-        read4.setAttribute("RG",readGroupTwo.getId());
-        GATKSAMRecord read5 = ArtificialSAMUtils.createArtificialRead(header,"read5",0,1,10);
-        read5.setAttribute("RG",readGroupTwo.getId());
-        GATKSAMRecord read6 = ArtificialSAMUtils.createArtificialRead(header,"read6",0,1,10);
-        read6.setAttribute("RG",readGroupOne.getId());
-        GATKSAMRecord read7 = ArtificialSAMUtils.createArtificialRead(header,"read7",0,1,10);
-        read7.setAttribute("RG",readGroupOne.getId());
-
-        ReadBackedPileup pileup = new ReadBackedPileupImpl(null, Arrays.asList(read1,read2,read3,read4,read5,read6,read7), Arrays.asList(1,1,1,1,1,1,1));
-
-        ReadBackedPileup rg1Pileup = pileup.getPileupForReadGroup("rg1");
-        List<GATKSAMRecord> rg1Reads = rg1Pileup.getReads();
-        Assert.assertEquals(rg1Reads.size(), 4, "Wrong number of reads in read group rg1");
-        Assert.assertEquals(rg1Reads.get(0), read1, "Read " + read1.getReadName() + " should be in rg1 but isn't");
-        Assert.assertEquals(rg1Reads.get(1), read3, "Read " + read3.getReadName() + " should be in rg1 but isn't");
-        Assert.assertEquals(rg1Reads.get(2), read6, "Read " + read6.getReadName() + " should be in rg1 but isn't");
-        Assert.assertEquals(rg1Reads.get(3), read7, "Read " + read7.getReadName() + " should be in rg1 but isn't");
-
-        ReadBackedPileup rg2Pileup = pileup.getPileupForReadGroup("rg2");
-        List<GATKSAMRecord> rg2Reads = rg2Pileup.getReads();        
-        Assert.assertEquals(rg2Reads.size(), 3, "Wrong number of reads in read group rg2");
-        Assert.assertEquals(rg2Reads.get(0), read2, "Read " + read2.getReadName() + " should be in rg2 but isn't");
-        Assert.assertEquals(rg2Reads.get(1), read4, "Read " + read4.getReadName() + " should be in rg2 but isn't");
-        Assert.assertEquals(rg2Reads.get(2), read5, "Read " + read5.getReadName() + " should be in rg2 but isn't");
-    }
-
-    /**
-     * Ensure that splitting read groups still works when dealing with null read groups.
-     */
-    @Test
-    public void testSplitByNullReadGroups() {
-        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1,1,1000);
-
-        GATKSAMRecord read1 = ArtificialSAMUtils.createArtificialRead(header,"read1",0,1,10);
-        GATKSAMRecord read2 = ArtificialSAMUtils.createArtificialRead(header,"read2",0,1,10);
-        GATKSAMRecord read3 = ArtificialSAMUtils.createArtificialRead(header,"read3",0,1,10);
-
-        ReadBackedPileup pileup = new ReadBackedPileupImpl(null,
-                                                           Arrays.asList(read1,read2,read3),
-                                                           Arrays.asList(1,1,1));
-
-        ReadBackedPileup nullRgPileup = pileup.getPileupForReadGroup(null);
-        List<GATKSAMRecord> nullRgReads = nullRgPileup.getReads();
-        Assert.assertEquals(nullRgPileup.getNumberOfElements(), 3, "Wrong number of reads in null read group");
-        Assert.assertEquals(nullRgReads.get(0), read1, "Read " + read1.getReadName() + " should be in null rg but isn't");
-        Assert.assertEquals(nullRgReads.get(1), read2, "Read " + read2.getReadName() + " should be in null rg but isn't");
-        Assert.assertEquals(nullRgReads.get(2), read3, "Read " + read3.getReadName() + " should be in null rg but isn't");
-
-        ReadBackedPileup rg1Pileup = pileup.getPileupForReadGroup("rg1");
-        Assert.assertNull(rg1Pileup, "Pileup for non-existent read group should return null");
-    }
-
-    /**
-     * Ensure that splitting read groups still works when dealing with a sample-split pileup.
-     */
-    @Test
-    public void testSplitBySample() {
-        SAMReadGroupRecord readGroupOne = new SAMReadGroupRecord("rg1");
-        readGroupOne.setSample("sample1");
-        SAMReadGroupRecord readGroupTwo = new SAMReadGroupRecord("rg2");
-        readGroupTwo.setSample("sample2");
-
-        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1,1,1000);
-        header.addReadGroup(readGroupOne);
-        header.addReadGroup(readGroupTwo);
-
-        GATKSAMRecord read1 = ArtificialSAMUtils.createArtificialRead(header,"read1",0,1,10);
-        read1.setAttribute("RG",readGroupOne.getId());
-        GATKSAMRecord read2 = ArtificialSAMUtils.createArtificialRead(header,"read2",0,1,10);
-        read2.setAttribute("RG",readGroupTwo.getId());
-        GATKSAMRecord read3 = ArtificialSAMUtils.createArtificialRead(header,"read3",0,1,10);
-        read3.setAttribute("RG",readGroupOne.getId());
-        GATKSAMRecord read4 = ArtificialSAMUtils.createArtificialRead(header,"read4",0,1,10);
-        read4.setAttribute("RG",readGroupTwo.getId());
-
-        ReadBackedPileupImpl sample1Pileup = new ReadBackedPileupImpl(null,
-                                                                      Arrays.asList(read1,read3),
-                                                                      Arrays.asList(1,1));
-        ReadBackedPileupImpl sample2Pileup = new ReadBackedPileupImpl(null,
-                                                                      Arrays.asList(read2,read4),
-                                                                      Arrays.asList(1,1));
-        Map<String,ReadBackedPileupImpl> sampleToPileupMap = new HashMap<String,ReadBackedPileupImpl>();
-        sampleToPileupMap.put(readGroupOne.getSample(),sample1Pileup);
-        sampleToPileupMap.put(readGroupTwo.getSample(),sample2Pileup);
-
-        ReadBackedPileup compositePileup = new ReadBackedPileupImpl(null,sampleToPileupMap);
-
-        ReadBackedPileup rg1Pileup = compositePileup.getPileupForReadGroup("rg1");
-        List<GATKSAMRecord> rg1Reads = rg1Pileup.getReads();
-
-        Assert.assertEquals(rg1Reads.size(), 2, "Wrong number of reads in read group rg1");
-        Assert.assertEquals(rg1Reads.get(0), read1, "Read " + read1.getReadName() + " should be in rg1 but isn't");
-        Assert.assertEquals(rg1Reads.get(1), read3, "Read " + read3.getReadName() + " should be in rg1 but isn't");
-
-        ReadBackedPileup rg2Pileup = compositePileup.getPileupForReadGroup("rg2");
-        List<GATKSAMRecord> rg2Reads = rg2Pileup.getReads();
-
-        Assert.assertEquals(rg1Reads.size(), 2, "Wrong number of reads in read group rg2");
-        Assert.assertEquals(rg2Reads.get(0), read2, "Read " + read2.getReadName() + " should be in rg2 but isn't");
-        Assert.assertEquals(rg2Reads.get(1), read4, "Read " + read4.getReadName() + " should be in rg2 but isn't");
-    }
-
-    @Test
-    public void testGetPileupForSample() {
-        String sample1 = "sample1";
-        String sample2 = "sample2";
-
-        SAMReadGroupRecord readGroupOne = new SAMReadGroupRecord("rg1");
-        readGroupOne.setSample(sample1);
-        SAMReadGroupRecord readGroupTwo = new SAMReadGroupRecord("rg2");
-        readGroupTwo.setSample(sample2);
-
-        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1,1,1000);
-        header.addReadGroup(readGroupOne);
-        header.addReadGroup(readGroupTwo);
-
-        GATKSAMRecord read1 = ArtificialSAMUtils.createArtificialRead(header,"read1",0,1,10);
-        read1.setAttribute("RG",readGroupOne.getId());
-        GATKSAMRecord read2 = ArtificialSAMUtils.createArtificialRead(header,"read2",0,1,10);
-        read2.setAttribute("RG",readGroupTwo.getId());
-
-        Map<String,ReadBackedPileupImpl> sampleToPileupMap = new HashMap<String,ReadBackedPileupImpl>();
-        sampleToPileupMap.put(sample1,new ReadBackedPileupImpl(null,Collections.singletonList(read1),0));
-        sampleToPileupMap.put(sample2,new ReadBackedPileupImpl(null,Collections.singletonList(read2),0));
-
-        ReadBackedPileup pileup = new ReadBackedPileupImpl(null,sampleToPileupMap);
-
-        ReadBackedPileup sample2Pileup = pileup.getPileupForSample(sample2);
-        Assert.assertEquals(sample2Pileup.getNumberOfElements(),1,"Sample 2 pileup has wrong number of elements");
-        Assert.assertEquals(sample2Pileup.getReads().get(0),read2,"Sample 2 pileup has incorrect read");
-
-        ReadBackedPileup missingSamplePileup = pileup.getPileupForSample("missing");
-        Assert.assertNull(missingSamplePileup,"Pileup for sample 'missing' should be null but isn't");
-
-        missingSamplePileup = pileup.getPileupForSample("not here");
-        Assert.assertNull(missingSamplePileup,"Pileup for sample 'not here' should be null but isn't");
-    }
-
-    private static int sampleI = 0;
-    private class RBPCountTest {
-        final String sample;
-        final int nReads, nMapq0, nDeletions;
-
-        private RBPCountTest(int nReads, int nMapq0, int nDeletions) {
-            this.sample = "sample" + sampleI++;
-            this.nReads = nReads;
-            this.nMapq0 = nMapq0;
-            this.nDeletions = nDeletions;
-        }
-
-        private List<PileupElement> makeReads( final int n, final int mapq, final String op ) {
-            final int readLength = 3;
-
-            final List<PileupElement> elts = new LinkedList<PileupElement>();
-            for ( int i = 0; i < n; i++ ) {
-                GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, 1, readLength);
-                read.setReadBases(Utils.dupBytes((byte) 'A', readLength));
-                read.setBaseQualities(Utils.dupBytes((byte) 30, readLength));
-                read.setCigarString("1M1" + op + "1M");
-                read.setMappingQuality(mapq);
-                final int baseOffset = op.equals("M") ? 1 : 0;
-                final CigarElement cigarElement = read.getCigar().getCigarElement(1);
-                elts.add(new PileupElement(read, baseOffset, cigarElement, 1, 0));
-            }
-
-            return elts;
-        }
-
-        private ReadBackedPileupImpl makePileup() {
-            final List<PileupElement> elts = new LinkedList<PileupElement>();
-
-            elts.addAll(makeReads(nMapq0, 0, "M"));
-            elts.addAll(makeReads(nDeletions, 30, "D"));
-            elts.addAll(makeReads(nReads - nMapq0 - nDeletions, 30, "M"));
-
-            return new ReadBackedPileupImpl(loc, elts);
-        }
-
-        @Override
-        public String toString() {
-            return "RBPCountTest{" +
-                    "sample='" + sample + '\'' +
-                    ", nReads=" + nReads +
-                    ", nMapq0=" + nMapq0 +
-                    ", nDeletions=" + nDeletions +
-                    '}';
-        }
-    }
-
-    @DataProvider(name = "RBPCountingTest")
-    public Object[][] makeRBPCountingTest() {
-        final List<Object[]> tests = new LinkedList<Object[]>();
-
-        for ( final int nMapq : Arrays.asList(0, 10, 20) ) {
-            for ( final int nDeletions : Arrays.asList(0, 10, 20) ) {
-                for ( final int nReg : Arrays.asList(0, 10, 20) ) {
-                    final int total = nMapq + nDeletions + nReg;
-                    if ( total > 0 )
-                        tests.add(new Object[]{new RBPCountTest(total, nMapq, nDeletions)});
-                }
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "RBPCountingTest")
-    public void testRBPCountingTestSinglePileup(RBPCountTest params) {
-        testRBPCounts(params.makePileup(), params);
-    }
-
-    @Test(dataProvider = "RBPCountingTest")
-    public void testRBPCountingTestMultiSample(RBPCountTest params) {
-        final RBPCountTest newSample = new RBPCountTest(2, 1, 1);
-        final Map<String, ReadBackedPileupImpl> pileupsBySample = new HashMap<String, ReadBackedPileupImpl>();
-        pileupsBySample.put(newSample.sample, newSample.makePileup());
-        pileupsBySample.put(params.sample, params.makePileup());
-        final ReadBackedPileup pileup = new ReadBackedPileupImpl(loc, pileupsBySample);
-        testRBPCounts(pileup, new RBPCountTest(params.nReads + 2, params.nMapq0 + 1, params.nDeletions + 1));
-    }
-
-    private void testRBPCounts(final ReadBackedPileup rbp, RBPCountTest expected) {
-        for ( int cycles = 0; cycles < 3; cycles++ ) {
-            // multiple cycles to make sure caching is working
-            Assert.assertEquals(rbp.getNumberOfElements(), expected.nReads);
-            Assert.assertEquals(rbp.depthOfCoverage(), expected.nReads);
-            Assert.assertEquals(rbp.getNumberOfDeletions(), expected.nDeletions);
-            Assert.assertEquals(rbp.getNumberOfMappingQualityZeroReads(), expected.nMapq0);
-        }
-    }
-
-    @Test
-    public void testRBPMappingQuals() {
-
-        // create a read with high MQ
-        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, 1, 10);
-        read.setReadBases(Utils.dupBytes((byte) 'A', 10));
-        read.setBaseQualities(Utils.dupBytes((byte) 30, 10));
-        read.setCigarString("10M");
-        read.setMappingQuality(200); // set a MQ higher than max signed byte
-
-        // now create the RBP
-        final List<PileupElement> elts = new LinkedList<>();
-        elts.add(new PileupElement(read, 0, read.getCigar().getCigarElement(0), 0, 0));
-        final Map<String, ReadBackedPileupImpl> pileupsBySample = new HashMap<>();
-        pileupsBySample.put("foo", new ReadBackedPileupImpl(loc, elts));
-        final ReadBackedPileup pileup = new ReadBackedPileupImpl(loc, pileupsBySample);
-
-        Assert.assertEquals(pileup.getMappingQuals()[0], 200);
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/progressmeter/ProgressMeterDaemonUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/progressmeter/ProgressMeterDaemonUnitTest.java
deleted file mode 100644
index 9d549ea..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/progressmeter/ProgressMeterDaemonUnitTest.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.progressmeter;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.GenomeLocParser;
-import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
-import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.LinkedList;
-import java.util.List;
-
-/**
- * UnitTests for the ProgressMeterDaemon
- *
- * User: depristo
- * Date: 8/24/12
- * Time: 11:25 AM
- * To change this template use File | Settings | File Templates.
- */
-public class ProgressMeterDaemonUnitTest extends BaseTest {
-    private GenomeLocParser genomeLocParser;
-
-    @BeforeClass
-    public void init() throws FileNotFoundException {
-        genomeLocParser = new GenomeLocParser(new CachingIndexedFastaSequenceFile(new File(b37KGReference)));
-    }
-
-    // capture and count calls to progress
-    private class TestingProgressMeter extends ProgressMeter {
-        final List<Long> progressCalls = new LinkedList<Long>();
-
-        private TestingProgressMeter(final long poll) {
-            super(null, "test", new GenomeLocSortedSet(genomeLocParser), poll);
-            super.start();
-        }
-
-        @Override
-        protected synchronized void printProgress(boolean mustPrint) {
-            progressCalls.add(System.currentTimeMillis());
-        }
-    }
-
-    @DataProvider(name = "PollingData")
-    public Object[][] makePollingData() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-        for ( final int ticks : Arrays.asList(1, 5, 10) ) {
-            for ( final int poll : Arrays.asList(10, 100) ) {
-                tests.add(new Object[]{poll, ticks});
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test
-    public void testPeriodUpdateNano() {
-        final ProgressMeter meter = new TestingProgressMeter(10);
-        final long currentTime = meter.getRuntimeInNanoseconds();
-        meter.updateElapsedTimeInNanoseconds();
-        Assert.assertTrue( meter.getRuntimeInNanosecondsUpdatedPeriodically() > currentTime, "Updating the periodic runtime failed" );
-    }
-
-    @Test(dataProvider = "PollingData", invocationCount = 10, successPercentage = 90, enabled = false)
-    public void testProgressMeterDaemon(final long poll, final int ticks) throws InterruptedException {
-        final TestingProgressMeter meter = new TestingProgressMeter(poll);
-        final ProgressMeterDaemon daemon = meter.getProgressMeterDaemon();
-
-        Assert.assertTrue(daemon.isDaemon());
-
-        Assert.assertFalse(daemon.isDone());
-        Thread.sleep(ticks * poll);
-        Assert.assertFalse(daemon.isDone());
-
-        daemon.done();
-        Assert.assertTrue(daemon.isDone());
-
-        // wait for the thread to actually finish
-        daemon.join();
-
-        Assert.assertTrue(meter.progressCalls.size() >= 1,
-                "Expected at least one progress update call from daemon thread, but only got " + meter.progressCalls.size() + " with exact calls " + meter.progressCalls);
-
-        final int tolerance = (int)Math.ceil(0.8 * meter.progressCalls.size());
-        Assert.assertTrue(Math.abs(meter.progressCalls.size() - ticks) <= tolerance,
-                "Expected " + ticks + " progress calls from daemon thread, but got " + meter.progressCalls.size() + " and a tolerance of only " + tolerance);
-
-        Assert.assertTrue(meter.getRuntimeInNanosecondsUpdatedPeriodically() > 0, "Daemon should have updated our periodic runtime");
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/progressmeter/ProgressMeterDataUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/progressmeter/ProgressMeterDataUnitTest.java
deleted file mode 100644
index 0c97377..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/progressmeter/ProgressMeterDataUnitTest.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.progressmeter;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.AutoFormattingTime;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.concurrent.TimeUnit;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-/**
- * UnitTests for the ProgressMeterData
- *
- * User: depristo
- * Date: 8/24/12
- * Time: 11:25 AM
- * To change this template use File | Settings | File Templates.
- */
-public class ProgressMeterDataUnitTest extends BaseTest {
-    @Test
-    public void testBasic() {
-        Assert.assertEquals(new ProgressMeterData(1.0, 2, 3).getElapsedSeconds(), 1.0);
-        Assert.assertEquals(new ProgressMeterData(1.0, 2, 3).getUnitsProcessed(), 2);
-        Assert.assertEquals(new ProgressMeterData(1.0, 2, 3).getBpProcessed(), 3);
-    }
-
-    @Test
-    public void testFraction() {
-        final double TOL = 1e-3;
-        Assert.assertEquals(new ProgressMeterData(1.0, 1, 1).calculateFractionGenomeTargetCompleted(10), 0.1, TOL);
-        Assert.assertEquals(new ProgressMeterData(1.0, 1, 2).calculateFractionGenomeTargetCompleted(10), 0.2, TOL);
-        Assert.assertEquals(new ProgressMeterData(1.0, 1, 1).calculateFractionGenomeTargetCompleted(100), 0.01, TOL);
-        Assert.assertEquals(new ProgressMeterData(1.0, 1, 2).calculateFractionGenomeTargetCompleted(100), 0.02, TOL);
-        Assert.assertEquals(new ProgressMeterData(1.0, 1, 1).calculateFractionGenomeTargetCompleted(0), 1.0, TOL);
-    }
-
-    @Test
-    public void testSecondsPerBP() {
-        final double TOL = 1e-3;
-        final long M = 1000000;
-        Assert.assertEquals(new ProgressMeterData(1.0, 1, M).secondsPerMillionBP(), 1.0, TOL);
-        Assert.assertEquals(new ProgressMeterData(1.0, 1, M/10).secondsPerMillionBP(), 10.0, TOL);
-        Assert.assertEquals(new ProgressMeterData(2.0, 1, M).secondsPerMillionBP(), 2.0, TOL);
-        Assert.assertEquals(new ProgressMeterData(1.0, 1, 0).secondsPerMillionBP(), 1e6, TOL);
-    }
-
-    @Test
-    public void testSecondsPerElement() {
-        final double TOL = 1e-3;
-        final long M = 1000000;
-        Assert.assertEquals(new ProgressMeterData(1.0, M, 1).secondsPerMillionElements(), 1.0, TOL);
-        Assert.assertEquals(new ProgressMeterData(1.0, M/10, 1).secondsPerMillionElements(), 10.0, TOL);
-        Assert.assertEquals(new ProgressMeterData(2.00, M, 1).secondsPerMillionElements(), 2.0, TOL);
-        Assert.assertEquals(new ProgressMeterData(1.0, 0, 1).secondsPerMillionElements(), 1e6, TOL);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/recalibration/EventTypeUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/recalibration/EventTypeUnitTest.java
deleted file mode 100644
index 7749fb2..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/recalibration/EventTypeUnitTest.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.recalibration;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.util.*;
-
-public final class EventTypeUnitTest extends BaseTest {
-    @Test
-    public void testEventTypes() {
-        for ( final EventType et : EventType.values() ) {
-            Assert.assertNotNull(et.toString());
-            Assert.assertNotNull(et.prettyPrint());
-            Assert.assertFalse("".equals(et.toString()));
-            Assert.assertFalse("".equals(et.prettyPrint()));
-            Assert.assertEquals(EventType.eventFrom(et.ordinal()), et);
-            Assert.assertEquals(EventType.eventFrom(et.toString()), et);
-        }
-    }
-
-    @Test
-    public void testEventTypesEnumItself() {
-        final Set<String> shortReps = new HashSet<String>();
-        for ( final EventType et : EventType.values() ) {
-            Assert.assertFalse(shortReps.contains(et.toString()), "Short representative for EventType has duplicates for " + et);
-            shortReps.add(et.toString());
-        }
-        Assert.assertEquals(shortReps.size(), EventType.values().length, "Short representatives for EventType aren't unique");
-    }
-
-    @Test(expectedExceptions = IllegalArgumentException.class)
-    public void testBadString() {
-        EventType.eventFrom("asdfhalsdjfalkjsdf");
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/report/ReportMarshallerUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/report/ReportMarshallerUnitTest.java
deleted file mode 100644
index ebeb158..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/report/ReportMarshallerUnitTest.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.report;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.testng.annotations.Test;
-
-
-/**
- * @author aaron
- *         <p/>
- *         Class ReportMarshallerUnitTest
- *         <p/>
- *         test out the marshaller
- */
-public class ReportMarshallerUnitTest extends BaseTest {
-    @Test
-    public void testMarshalling() {
-        /*Configuration cfg = new Configuration();
-        try {
-            cfg.setDirectoryForTemplateLoading(new File("templates"));
-        } catch (IOException e) {
-            e.printStackTrace(); 
-        }
-        cfg.setObjectWrapper(new DefaultObjectWrapper());
-        Template temp = null;
-        try {
-            temp = cfg.createMarhsaller("myTestTemp.ftl");
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
-        FakeAnalysis fa = new FakeAnalysis();
-        File fl = new File("testFile.out");
-        fl.deleteOnExit();
-        ReportMarshaller marsh = new ReportMarshaller("report",fl,temp);
-        marsh.write(fa);
-        marsh.write(fa);
-        marsh.write(fa);
-        marsh.close();*/
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/runtime/ProcessControllerUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/runtime/ProcessControllerUnitTest.java
deleted file mode 100644
index 4fa7ef5..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/runtime/ProcessControllerUnitTest.java
+++ /dev/null
@@ -1,518 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.runtime;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.lang.StringUtils;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.broadinstitute.gatk.utils.io.IOUtils;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Map;
-
-public class ProcessControllerUnitTest extends BaseTest {
-    private static final String NL = String.format("%n");
-
-    @Test(timeOut = 60 * 1000)
-    public void testDestroyThreadLocal() throws InterruptedException {
-        for (int i = 0; i < 3; i++) {
-            final ProcessController controller = ProcessController.getThreadLocal();
-            final ProcessSettings job = new ProcessSettings(
-                    new String[] {"sh", "-c", "echo Hello World && sleep 600 && echo Goodbye"});
-            job.getStdoutSettings().setBufferSize(-1);
-
-            Thread t = new Thread(new Runnable() {
-                @Override
-                public void run() {
-                    System.out.println("BACK: Starting on background thread");
-                    ProcessOutput result = controller.exec(job);
-                    // Assert in background thread doesn't make it to main thread but does print a trace.
-                    Assert.assertTrue(result.getExitValue() != 0, "Destroy-attempted job returned zero exit status");
-                    System.out.println("BACK: Background thread exiting");
-                }
-            });
-
-            System.out.println("MAIN: Starting background thread");
-            t.start();
-            System.out.println("MAIN: Sleeping main thread 3s");
-            Thread.sleep(3000);
-            System.out.println("MAIN: Destroying job");
-            controller.tryDestroy();
-            System.out.println("MAIN: Not waiting on background thread to exit");
-            // Using standard java.io this was blocking on linux.
-            // TODO: try again with NIO.
-            //t.join();
-            //System.out.println("MAIN: Background thread exited");
-        }
-    }
-
-    @Test
-    public void testReuseAfterError() {
-        ProcessController controller = new ProcessController();
-
-        ProcessSettings job;
-
-        for (int i = 0; i < 3; i++) {
-            // Test bad command
-            job = new ProcessSettings(new String[] {"no_such_command"});
-            try {
-                controller.exec(job);
-            } catch (ReviewedGATKException e) {
-                /* Was supposed to throw an exception */
-            }
-
-            // Test exit != 0
-            job = new ProcessSettings(new String[] {"cat", "non_existent_file"});
-            int exitValue = controller.exec(job).getExitValue();
-            Assert.assertTrue(exitValue != 0, "'cat' non existent file returned 0");
-
-            // Text success
-            job = new ProcessSettings(new String[] {"echo", "Hello World"});
-            exitValue = controller.exec(job).getExitValue();
-            Assert.assertEquals(exitValue, 0, "Echo failed");
-        }
-    }
-
-    @Test
-    public void testEnvironment() {
-        String key = "MY_NEW_VAR";
-        String value = "value is here";
-
-        ProcessSettings job = new ProcessSettings(new String[] {"sh", "-c", "echo $"+key});
-        job.getStdoutSettings().setBufferSize(-1);
-        job.setRedirectErrorStream(true);
-
-        Map<String, String> env = new HashMap<String, String>(System.getenv());
-        env.put(key, value);
-        job.setEnvironment(env);
-
-        ProcessController controller = new ProcessController();
-        ProcessOutput result = controller.exec(job);
-        int exitValue = result.getExitValue();
-
-        Assert.assertEquals(exitValue, 0, "Echo environment variable failed");
-        Assert.assertEquals(result.getStdout().getBufferString(), value + NL, "Echo environment returned unexpected output");
-    }
-
-    @Test
-    public void testDirectory() throws IOException {
-        File dir = null;
-        try {
-            dir = IOUtils.tempDir("temp.", "").getCanonicalFile();
-
-            ProcessSettings job = new ProcessSettings(new String[] {"pwd"});
-            job.getStdoutSettings().setBufferSize(-1);
-            job.setRedirectErrorStream(true);
-            job.setDirectory(dir);
-
-            ProcessController controller = new ProcessController();
-            ProcessOutput result = controller.exec(job);
-            int exitValue = result.getExitValue();
-
-            Assert.assertEquals(exitValue, 0, "Getting working directory failed");
-
-            Assert.assertEquals(result.getStdout().getBufferString(), dir.getAbsolutePath() + NL,
-                    "Setting/getting working directory returned unexpected output");
-        } finally {
-            FileUtils.deleteQuietly(dir);
-        }
-    }
-
-    @Test
-    public void testReadStdInBuffer() {
-        String bufferText = "Hello from buffer";
-        ProcessSettings job = new ProcessSettings(new String[] {"cat"});
-        job.getStdoutSettings().setBufferSize(-1);
-        job.setRedirectErrorStream(true);
-        job.getStdinSettings().setInputBuffer(bufferText);
-
-        ProcessController controller = new ProcessController();
-        ProcessOutput output = controller.exec(job);
-
-        Assert.assertEquals(output.getStdout().getBufferString(), bufferText,
-                "Unexpected output from cat stdin buffer");
-    }
-
-    @Test
-    public void testReadStdInFile() {
-        File input = null;
-        try {
-            String fileText = "Hello from file";
-            input = IOUtils.writeTempFile(fileText, "stdin.", ".txt");
-
-            ProcessSettings job = new ProcessSettings(new String[] {"cat"});
-            job.getStdoutSettings().setBufferSize(-1);
-            job.setRedirectErrorStream(true);
-            job.getStdinSettings().setInputFile(input);
-
-            ProcessController controller = new ProcessController();
-            ProcessOutput output = controller.exec(job);
-
-            Assert.assertEquals(output.getStdout().getBufferString(), fileText,
-                    "Unexpected output from cat stdin file");
-        } finally {
-            FileUtils.deleteQuietly(input);
-        }
-    }
-
-    @Test
-    public void testWriteStdOut() {
-        ProcessSettings job = new ProcessSettings(new String[] {"echo", "Testing to stdout"});
-        // Not going to call the System.setOut() for now. Just running a basic visual test.
-        job.getStdoutSettings().printStandard(true);
-        job.setRedirectErrorStream(true);
-
-        System.out.println("testWriteStdOut: Writing two lines to std out...");
-        ProcessController controller = new ProcessController();
-        controller.exec(job);
-        job.setCommand(new String[]{"cat", "non_existent_file"});
-        controller.exec(job);
-        System.out.println("testWriteStdOut: ...two lines should have been printed to std out");
-    }
-
-    @Test
-    public void testErrorToOut() throws IOException {
-        File outFile = null;
-        File errFile = null;
-        try {
-            outFile = BaseTest.createTempFile("temp", "");
-            errFile = BaseTest.createTempFile("temp", "");
-
-            ProcessSettings job = new ProcessSettings(new String[]{"cat", "non_existent_file"});
-            job.getStdoutSettings().setOutputFile(outFile);
-            job.getStdoutSettings().setBufferSize(-1);
-            job.getStderrSettings().setOutputFile(errFile);
-            job.getStderrSettings().setBufferSize(-1);
-            job.setRedirectErrorStream(true);
-
-            ProcessOutput result = new ProcessController().exec(job);
-            int exitValue = result.getExitValue();
-
-            Assert.assertTrue(exitValue != 0, "'cat' non existent file returned 0");
-
-            String fileString, bufferString;
-
-            fileString = FileUtils.readFileToString(outFile);
-            Assert.assertTrue(fileString.length() > 0, "Out file was length 0");
-
-            bufferString = result.getStdout().getBufferString();
-            Assert.assertTrue(bufferString.length() > 0, "Out buffer was length 0");
-
-            Assert.assertFalse(result.getStdout().isBufferTruncated(), "Out buffer was truncated");
-            Assert.assertEquals(bufferString.length(), fileString.length(), "Out buffer length did not match file length");
-
-            fileString = FileUtils.readFileToString(errFile);
-            Assert.assertEquals(fileString, "", "Unexpected output to err file");
-
-            bufferString = result.getStderr().getBufferString();
-            Assert.assertEquals(bufferString, "", "Unexepected output to err buffer");
-        } finally {
-            FileUtils.deleteQuietly(outFile);
-            FileUtils.deleteQuietly(errFile);
-        }
-    }
-
-    @Test
-    public void testErrorToErr() throws IOException {
-        File outFile = null;
-        File errFile = null;
-        try {
-            outFile = BaseTest.createTempFile("temp", "");
-            errFile = BaseTest.createTempFile("temp", "");
-
-            ProcessSettings job = new ProcessSettings(new String[]{"cat", "non_existent_file"});
-            job.getStdoutSettings().setOutputFile(outFile);
-            job.getStdoutSettings().setBufferSize(-1);
-            job.getStderrSettings().setOutputFile(errFile);
-            job.getStderrSettings().setBufferSize(-1);
-            job.setRedirectErrorStream(false);
-
-            ProcessOutput result = new ProcessController().exec(job);
-            int exitValue = result.getExitValue();
-
-            Assert.assertTrue(exitValue != 0, "'cat' non existent file returned 0");
-
-            String fileString, bufferString;
-
-            fileString = FileUtils.readFileToString(errFile);
-            Assert.assertTrue(fileString.length() > 0, "Err file was length 0");
-
-            bufferString = result.getStderr().getBufferString();
-            Assert.assertTrue(bufferString.length() > 0, "Err buffer was length 0");
-
-            Assert.assertFalse(result.getStderr().isBufferTruncated(), "Err buffer was truncated");
-            Assert.assertEquals(bufferString.length(), fileString.length(), "Err buffer length did not match file length");
-
-            fileString = FileUtils.readFileToString(outFile);
-            Assert.assertEquals(fileString, "", "Unexpected output to out file");
-
-            bufferString = result.getStdout().getBufferString();
-            Assert.assertEquals(bufferString, "", "Unexepected output to out buffer");
-        } finally {
-            FileUtils.deleteQuietly(outFile);
-            FileUtils.deleteQuietly(errFile);
-        }
-    }
-
-    private static final String TRUNCATE_TEXT = "Hello World";
-    private static final byte[] TRUNCATE_OUTPUT_BYTES = (TRUNCATE_TEXT + NL).getBytes();
-
-    /**
-     * @return Test truncating content vs. not truncating (run at -1/+1 size)
-     */
-    @DataProvider(name = "truncateSizes")
-    public Object[][] getTruncateBufferSizes() {
-        int l = TRUNCATE_OUTPUT_BYTES.length;
-        return new Object[][]{
-                new Object[]{0, 0},
-                new Object[]{l, l},
-                new Object[]{l + 1, l},
-                new Object[]{l - 1, l - 1}
-        };
-    }
-
-    @Test(dataProvider = "truncateSizes")
-    public void testTruncateBuffer(int truncateLen, int expectedLen) {
-        byte[] expected = Arrays.copyOf(TRUNCATE_OUTPUT_BYTES, expectedLen);
-
-        String[] command = {"echo", TRUNCATE_TEXT};
-        ProcessController controller = new ProcessController();
-
-        ProcessSettings job = new ProcessSettings(command);
-        job.getStdoutSettings().setBufferSize(truncateLen);
-        ProcessOutput result = controller.exec(job);
-
-        int exitValue = result.getExitValue();
-
-        Assert.assertEquals(exitValue, 0,
-                String.format("Echo returned %d: %s", exitValue, TRUNCATE_TEXT));
-
-        byte[] bufferBytes = result.getStdout().getBufferBytes();
-
-        Assert.assertEquals(bufferBytes, expected,
-                String.format("Output buffer didn't match (%d vs %d)", expected.length, bufferBytes.length));
-
-        boolean truncated = result.getStdout().isBufferTruncated();
-
-        Assert.assertEquals(truncated, TRUNCATE_OUTPUT_BYTES.length > truncateLen,
-                "Unexpected buffer truncation result");
-    }
-
-    private static final String[] LONG_COMMAND = getLongCommand();
-    private static final String LONG_COMMAND_STRING = StringUtils.join(LONG_COMMAND, " ");
-    private static final String LONG_COMMAND_DESCRIPTION = "<long command>";
-
-    @DataProvider(name = "echoCommands")
-    public Object[][] getEchoCommands() {
-
-        new EchoCommand(new String[]{"echo", "Hello", "World"}, "Hello World" + NL);
-        new EchoCommand(new String[]{"echo", "'Hello", "World"}, "'Hello World" + NL);
-        new EchoCommand(new String[]{"echo", "Hello", "World'"}, "Hello World'" + NL);
-        new EchoCommand(new String[]{"echo", "'Hello", "World'"}, "'Hello World'" + NL);
-
-        String[] longCommand = new String[LONG_COMMAND.length + 1];
-        longCommand[0] = "echo";
-        System.arraycopy(LONG_COMMAND, 0, longCommand, 1, LONG_COMMAND.length);
-        new EchoCommand(longCommand, LONG_COMMAND_STRING + NL) {
-            @Override
-            public String toString() {
-                return LONG_COMMAND_DESCRIPTION;
-            }
-        };
-
-        return TestDataProvider.getTests(EchoCommand.class);
-    }
-
-    @Test(dataProvider = "echoCommands")
-    public void testEcho(EchoCommand script) throws IOException {
-        File outputFile = null;
-        try {
-            outputFile = BaseTest.createTempFile("temp", "");
-
-            ProcessSettings job = new ProcessSettings(script.command);
-            if (script.output != null) {
-                job.getStdoutSettings().setOutputFile(outputFile);
-                job.getStdoutSettings().setBufferSize(script.output.getBytes().length);
-            }
-
-            ProcessOutput result = new ProcessController().exec(job);
-            int exitValue = result.getExitValue();
-
-            Assert.assertEquals(exitValue, 0,
-                    String.format("Echo returned %d: %s", exitValue, script));
-
-            if (script.output != null) {
-
-                String fileString = FileUtils.readFileToString(outputFile);
-                Assert.assertEquals(fileString, script.output,
-                        String.format("Output file didn't match (%d vs %d): %s",
-                                fileString.length(), script.output.length(), script));
-
-                String bufferString = result.getStdout().getBufferString();
-                Assert.assertEquals(bufferString, script.output,
-                        String.format("Output content didn't match (%d vs %d): %s",
-                                bufferString.length(), script.output.length(), script));
-
-                Assert.assertFalse(result.getStdout().isBufferTruncated(),
-                        "Output content was truncated: " + script);
-            }
-        } finally {
-            FileUtils.deleteQuietly(outputFile);
-        }
-    }
-
-    @Test(expectedExceptions = ReviewedGATKException.class)
-    public void testUnableToStart() {
-        ProcessSettings job = new ProcessSettings(new String[]{"no_such_command"});
-        new ProcessController().exec(job);
-    }
-
-    @DataProvider(name = "scriptCommands")
-    public Object[][] getScriptCommands() {
-        new ScriptCommand(true, "echo Hello World", "Hello World" + NL);
-        new ScriptCommand(false, "echo 'Hello World", null);
-        new ScriptCommand(false, "echo Hello World'", null);
-        new ScriptCommand(true, "echo 'Hello World'", "Hello World" + NL);
-        new ScriptCommand(true, "echo \"Hello World\"", "Hello World" + NL);
-        new ScriptCommand(false, "no_such_echo Hello World", null);
-        new ScriptCommand(true, "echo #", NL);
-        new ScriptCommand(true, "echo \\#", "#" + NL);
-        new ScriptCommand(true, "echo \\\\#", "\\#" + NL);
-
-        new ScriptCommand(true, "echo " + LONG_COMMAND_STRING, LONG_COMMAND_STRING + NL) {
-            @Override
-            public String toString() {
-                return LONG_COMMAND_DESCRIPTION;
-            }
-        };
-
-        return TestDataProvider.getTests(ScriptCommand.class);
-    }
-
-    @Test(dataProvider = "scriptCommands")
-    public void testScript(ScriptCommand script) throws IOException {
-        File scriptFile = null;
-        File outputFile = null;
-        try {
-            scriptFile = writeScript(script.content);
-            outputFile = BaseTest.createTempFile("temp", "");
-
-            ProcessSettings job = new ProcessSettings(new String[]{"sh", scriptFile.getAbsolutePath()});
-            if (script.output != null) {
-                job.getStdoutSettings().setOutputFile(outputFile);
-                job.getStdoutSettings().setBufferSize(script.output.getBytes().length);
-            }
-
-            ProcessOutput result = new ProcessController().exec(job);
-            int exitValue = result.getExitValue();
-
-            Assert.assertEquals(exitValue == 0, script.succeed,
-                    String.format("Script returned %d: %s", exitValue, script));
-
-            if (script.output != null) {
-
-                String fileString = FileUtils.readFileToString(outputFile);
-                Assert.assertEquals(fileString, script.output,
-                        String.format("Output file didn't match (%d vs %d): %s",
-                                fileString.length(), script.output.length(), script));
-
-                String bufferString = result.getStdout().getBufferString();
-                Assert.assertEquals(bufferString, script.output,
-                        String.format("Output content didn't match (%d vs %d): %s",
-                                bufferString.length(), script.output.length(), script));
-
-                Assert.assertFalse(result.getStdout().isBufferTruncated(),
-                        "Output content was truncated: " + script);
-            }
-        } finally {
-            FileUtils.deleteQuietly(scriptFile);
-            FileUtils.deleteQuietly(outputFile);
-        }
-    }
-
-    private static String[] getLongCommand() {
-        // This command fails on some systems with a 4096 character limit when run via the old sh -c "echo ...",
-        // but works on the same systems when run via sh <script>
-        int cnt = 500;
-        String[] command = new String[cnt];
-        for (int i = 1; i <= cnt; i++) {
-            command[i - 1] = String.format("%03d______", i);
-        }
-        return command;
-    }
-
-    private static File writeScript(String contents) {
-        try {
-            File file = BaseTest.createTempFile("temp", "");
-            FileUtils.writeStringToFile(file, contents);
-            return file;
-        } catch (IOException e) {
-            throw new UserException.BadTmpDir(e.getMessage());
-        }
-    }
-
-    private static class EchoCommand extends TestDataProvider {
-        public final String[] command;
-        public final String output;
-
-        public EchoCommand(String[] command, String output) {
-            super(EchoCommand.class);
-            this.command = command;
-            this.output = output;
-        }
-
-        @Override
-        public String toString() {
-            return StringUtils.join(command, " ");
-        }
-    }
-
-    public static class ScriptCommand extends TestDataProvider {
-        public final boolean succeed;
-        public final String content;
-        public final String output;
-
-        public ScriptCommand(boolean succeed, String content, String output) {
-            super(ScriptCommand.class);
-            this.succeed = succeed;
-            this.content = content;
-            this.output = output;
-        }
-
-        @Override
-        public String toString() {
-            return content;
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/runtime/RuntimeUtilsUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/runtime/RuntimeUtilsUnitTest.java
deleted file mode 100644
index 9573774..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/runtime/RuntimeUtilsUnitTest.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.runtime;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-public class RuntimeUtilsUnitTest extends BaseTest {
-    @Test
-    public void testWhichExists() {
-        Assert.assertNotNull(RuntimeUtils.which("ls"), "Unable to locate ls");
-    }
-
-    @Test
-    public void testWhichNotExists() {
-        Assert.assertNull(RuntimeUtils.which("does_not_exist"), "Found nonexistent binary: does_not_exist");
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/sam/AlignmentUtilsUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/sam/AlignmentUtilsUnitTest.java
deleted file mode 100644
index 3281570..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/sam/AlignmentUtilsUnitTest.java
+++ /dev/null
@@ -1,1044 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import htsjdk.samtools.*;
-import org.apache.commons.lang.ArrayUtils;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.haplotype.Haplotype;
-import org.broadinstitute.gatk.utils.pileup.PileupElement;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.*;
-
-public class AlignmentUtilsUnitTest {
-    private final static boolean DEBUG = false;
-    private SAMFileHeader header;
-
-    /** Basic aligned and mapped read. */
-    private SAMRecord readMapped;
-
-    /** Read with no contig specified in the read, -L UNMAPPED */
-    private SAMRecord readNoReference;
-
-    /** This read has a start position, but is flagged that it's not mapped. */
-    private SAMRecord readUnmappedFlag;
-
-    /** This read says it's aligned, but to a contig not in the header. */
-    private SAMRecord readUnknownContig;
-
-    /** This read says it's aligned, but actually has an unknown start. */
-    private SAMRecord readUnknownStart;
-
-    @BeforeClass
-    public void init() {
-        header = ArtificialSAMUtils.createArtificialSamHeader(3, 1, ArtificialSAMUtils.DEFAULT_READ_LENGTH * 2);
-
-        readMapped = createMappedRead("mapped", 1);
-
-        readNoReference = createUnmappedRead("unmappedNoReference");
-
-        readUnmappedFlag = createMappedRead("unmappedFlagged", 2);
-        readUnmappedFlag.setReadUnmappedFlag(true);
-
-        readUnknownContig = createMappedRead("unknownContig", 3);
-        readUnknownContig.setReferenceName("unknownContig");
-
-        readUnknownStart = createMappedRead("unknownStart", 1);
-        readUnknownStart.setAlignmentStart(SAMRecord.NO_ALIGNMENT_START);
-    }
-
-    /**
-     * Test for -L UNMAPPED
-     */
-    @DataProvider(name = "genomeLocUnmappedReadTests")
-    public Object[][] getGenomeLocUnmappedReadTests() {
-        return new Object[][] {
-                new Object[] {readNoReference, true},
-                new Object[] {readMapped, false},
-                new Object[] {readUnmappedFlag, false},
-                new Object[] {readUnknownContig, false},
-                new Object[] {readUnknownStart, false}
-        };
-    }
-    @Test(enabled = !DEBUG, dataProvider = "genomeLocUnmappedReadTests")
-    public void testIsReadGenomeLocUnmapped(SAMRecord read, boolean expected) {
-        Assert.assertEquals(AlignmentUtils.isReadGenomeLocUnmapped(read), expected);
-    }
-
-    /**
-     * Test for read being truly unmapped
-     */
-    @DataProvider(name = "unmappedReadTests")
-    public Object[][] getUnmappedReadTests() {
-        return new Object[][] {
-                new Object[] {readNoReference, true},
-                new Object[] {readMapped, false},
-                new Object[] {readUnmappedFlag, true},
-                new Object[] {readUnknownContig, false},
-                new Object[] {readUnknownStart, true}
-        };
-    }
-    @Test(enabled = !DEBUG, dataProvider = "unmappedReadTests")
-    public void testIsReadUnmapped(SAMRecord read, boolean expected) {
-        Assert.assertEquals(AlignmentUtils.isReadUnmapped(read), expected);
-    }
-
-    private SAMRecord createUnmappedRead(String name) {
-        return ArtificialSAMUtils.createArtificialRead(
-                header,
-                name,
-                SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX,
-                SAMRecord.NO_ALIGNMENT_START,
-                ArtificialSAMUtils.DEFAULT_READ_LENGTH);
-    }
-
-    private SAMRecord createMappedRead(String name, int start) {
-        return ArtificialSAMUtils.createArtificialRead(
-                header,
-                name,
-                0,
-                start,
-                ArtificialSAMUtils.DEFAULT_READ_LENGTH);
-    }
-
-    private final List<List<CigarElement>> makeCigarElementCombinations() {
-        // this functionality can be adapted to provide input data for whatever you might want in your data
-        final List<CigarElement> cigarElements = new LinkedList<CigarElement>();
-        for ( final int size : Arrays.asList(0, 10) ) {
-            for ( final CigarOperator op : CigarOperator.values() ) {
-                cigarElements.add(new CigarElement(size, op));
-            }
-        }
-
-        final List<List<CigarElement>> combinations = new LinkedList<List<CigarElement>>();
-        for ( final int nElements : Arrays.asList(1, 2, 3) ) {
-            combinations.addAll(Utils.makePermutations(cigarElements, nElements, true));
-        }
-
-        return combinations;
-    }
-
-
-    @DataProvider(name = "CalcNumDifferentBasesData")
-    public Object[][] makeCalcNumDifferentBasesData() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        tests.add(new Object[]{"5M", "ACGTA", "ACGTA", 0});
-        tests.add(new Object[]{"5M", "ACGTA", "ACGTT", 1});
-        tests.add(new Object[]{"5M", "ACGTA", "TCGTT", 2});
-        tests.add(new Object[]{"5M", "ACGTA", "TTGTT", 3});
-        tests.add(new Object[]{"5M", "ACGTA", "TTTTT", 4});
-        tests.add(new Object[]{"5M", "ACGTA", "TTTCT", 5});
-        tests.add(new Object[]{"2M3I3M", "ACGTA", "ACNNNGTA", 3});
-        tests.add(new Object[]{"2M3I3M", "ACGTA", "ACNNNGTT", 4});
-        tests.add(new Object[]{"2M3I3M", "ACGTA", "TCNNNGTT", 5});
-        tests.add(new Object[]{"2M2D1M", "ACGTA", "ACA", 2});
-        tests.add(new Object[]{"2M2D1M", "ACGTA", "ACT", 3});
-        tests.add(new Object[]{"2M2D1M", "ACGTA", "TCT", 4});
-        tests.add(new Object[]{"2M2D1M", "ACGTA", "TGT", 5});
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = true, dataProvider = "CalcNumDifferentBasesData")
-    public void testCalcNumDifferentBases(final String cigarString, final String ref, final String read, final int expectedDifferences) {
-        final Cigar cigar = TextCigarCodec.getSingleton().decode(cigarString);
-        Assert.assertEquals(AlignmentUtils.calcNumDifferentBases(cigar, ref.getBytes(), read.getBytes()), expectedDifferences);
-    }
-
-    @DataProvider(name = "NumAlignedBasesCountingSoftClips")
-    public Object[][] makeNumAlignedBasesCountingSoftClips() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        final EnumSet<CigarOperator> alignedToGenome = EnumSet.of(CigarOperator.M, CigarOperator.EQ, CigarOperator.X, CigarOperator.S);
-        for ( final List<CigarElement> elements : makeCigarElementCombinations() ) {
-            int n = 0;
-            for ( final CigarElement elt : elements ) n += alignedToGenome.contains(elt.getOperator()) ? elt.getLength() : 0;
-            tests.add(new Object[]{new Cigar(elements), n});
-        }
-
-        tests.add(new Object[]{null, 0});
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "NumAlignedBasesCountingSoftClips")
-    public void testNumAlignedBasesCountingSoftClips(final Cigar cigar, final int expected) {
-        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, 1, cigar == null ? 10 : cigar.getReadLength());
-        read.setCigar(cigar);
-        Assert.assertEquals(AlignmentUtils.getNumAlignedBasesCountingSoftClips(read), expected, "Cigar " + cigar + " failed NumAlignedBasesCountingSoftClips");
-    }
-
-    @DataProvider(name = "CigarHasZeroElement")
-    public Object[][] makeCigarHasZeroElement() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        for ( final List<CigarElement> elements : makeCigarElementCombinations() ) {
-            boolean hasZero = false;
-            for ( final CigarElement elt : elements ) hasZero = hasZero || elt.getLength() == 0;
-            tests.add(new Object[]{new Cigar(elements), hasZero});
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "CigarHasZeroElement")
-    public void testCigarHasZeroSize(final Cigar cigar, final boolean hasZero) {
-        Assert.assertEquals(AlignmentUtils.cigarHasZeroSizeElement(cigar), hasZero, "Cigar " + cigar.toString() + " failed cigarHasZeroSizeElement");
-    }
-
-    @DataProvider(name = "NumHardClipped")
-    public Object[][] makeNumHardClipped() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        for ( final List<CigarElement> elements : makeCigarElementCombinations() ) {
-            int n = 0;
-            for ( final CigarElement elt : elements ) n += elt.getOperator() == CigarOperator.H ? elt.getLength() : 0;
-            tests.add(new Object[]{new Cigar(elements), n});
-        }
-
-        tests.add(new Object[]{null, 0});
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "NumHardClipped")
-    public void testNumHardClipped(final Cigar cigar, final int expected) {
-        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, 1, cigar == null ? 10 : cigar.getReadLength());
-        read.setCigar(cigar);
-        Assert.assertEquals(AlignmentUtils.getNumHardClippedBases(read), expected, "Cigar " + cigar + " failed num hard clips");
-    }
-
-    @DataProvider(name = "NumAlignedBlocks")
-    public Object[][] makeNumAlignedBlocks() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        for ( final List<CigarElement> elements : makeCigarElementCombinations() ) {
-            int n = 0;
-            for ( final CigarElement elt : elements ) {
-                switch ( elt.getOperator() ) {
-                    case M:case X:case EQ: n++; break;
-                    default: break;
-                }
-            }
-            tests.add(new Object[]{new Cigar(elements), n});
-        }
-
-        tests.add(new Object[]{null, 0});
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "NumAlignedBlocks")
-    public void testNumAlignedBlocks(final Cigar cigar, final int expected) {
-        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, 1, cigar == null ? 10 : cigar.getReadLength());
-        read.setCigar(cigar);
-        Assert.assertEquals(AlignmentUtils.getNumAlignmentBlocks(read), expected, "Cigar " + cigar + " failed NumAlignedBlocks");
-    }
-
-    @DataProvider(name = "ConsolidateCigarData")
-    public Object[][] makeConsolidateCigarData() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        // this functionality can be adapted to provide input data for whatever you might want in your data
-        tests.add(new Object[]{"1M1M", "2M"});
-        tests.add(new Object[]{"2M", "2M"});
-        tests.add(new Object[]{"2M0M", "2M"});
-        tests.add(new Object[]{"0M2M", "2M"});
-        tests.add(new Object[]{"0M2M0M0I0M1M", "3M"});
-        tests.add(new Object[]{"2M0M1M", "3M"});
-        tests.add(new Object[]{"1M1M1M1D2M1M", "3M1D3M"});
-        tests.add(new Object[]{"6M6M6M", "18M"});
-
-        final List<CigarElement> elements = new LinkedList<CigarElement>();
-        int i = 1;
-        for ( final CigarOperator op : CigarOperator.values() ) {
-            elements.add(new CigarElement(i++, op));
-        }
-        for ( final List<CigarElement> ops : Utils.makePermutations(elements,  3, false) ) {
-            final String expected = new Cigar(ops).toString();
-            final List<CigarElement> cutElements = new LinkedList<CigarElement>();
-            for ( final CigarElement elt : ops ) {
-                for ( int j = 0; j < elt.getLength(); j++ ) {
-                    cutElements.add(new CigarElement(1, elt.getOperator()));
-                }
-            }
-
-            final String actual = new Cigar(cutElements).toString();
-            tests.add(new Object[]{actual, expected});
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "ConsolidateCigarData")
-    public void testConsolidateCigarWithData(final String testCigarString, final String expectedCigarString) {
-        final Cigar testCigar = TextCigarCodec.getSingleton().decode(testCigarString);
-        final Cigar expectedCigar = TextCigarCodec.getSingleton().decode(expectedCigarString);
-        final Cigar actualCigar = AlignmentUtils.consolidateCigar(testCigar);
-        Assert.assertEquals(actualCigar, expectedCigar);
-    }
-
-    @DataProvider(name = "SoftClipsDataProvider")
-    public Object[][] makeSoftClipsDataProvider() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        // this functionality can be adapted to provide input data for whatever you might want in your data
-        for ( final int lengthOfLeftClip : Arrays.asList(0, 1, 10) ) {
-            for ( final int lengthOfRightClip : Arrays.asList(0, 1, 10) ) {
-                for ( final int qualThres : Arrays.asList(10, 20, 30) ) {
-                    for ( final String middleOp : Arrays.asList("M", "D") ) {
-                        for ( final int matchSize : Arrays.asList(0, 1, 10) ) {
-                            final byte[] left = makeQualArray(lengthOfLeftClip, qualThres);
-                            final byte[] right = makeQualArray(lengthOfRightClip, qualThres);
-                            int n = 0;
-                            for ( int i = 0; i < left.length; i++ ) n += left[i] > qualThres ? 1 : 0;
-                            for ( int i = 0; i < right.length; i++ ) n += right[i] > qualThres ? 1 : 0;
-                            tests.add(new Object[]{left, matchSize, middleOp, right, qualThres, n});
-                        }
-                    }
-                }
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    private byte[] makeQualArray(final int length, final int qualThreshold) {
-        final byte[] array = new byte[length];
-        for ( int i = 0; i < array.length; i++ )
-            array[i] = (byte)(qualThreshold + ( i % 2 == 0 ? 1 : - 1 ));
-        return array;
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "SoftClipsDataProvider")
-    public void testSoftClipsData(final byte[] qualsOfSoftClipsOnLeft, final int middleSize, final String middleOp, final byte[] qualOfSoftClipsOnRight, final int qualThreshold, final int numExpected) {
-        final int readLength = (middleOp.equals("D") ? 0 : middleSize) + qualOfSoftClipsOnRight.length + qualsOfSoftClipsOnLeft.length;
-        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, 1, readLength);
-        final byte[] bases = Utils.dupBytes((byte) 'A', readLength);
-        final byte[] matchBytes = middleOp.equals("D") ? new byte[]{} : Utils.dupBytes((byte)30, middleSize);
-        final byte[] quals = ArrayUtils.addAll(ArrayUtils.addAll(qualsOfSoftClipsOnLeft, matchBytes), qualOfSoftClipsOnRight);
-
-        // set the read's bases and quals
-        read.setReadBases(bases);
-        read.setBaseQualities(quals);
-
-        final StringBuilder cigar = new StringBuilder();
-        if (qualsOfSoftClipsOnLeft.length > 0 ) cigar.append(qualsOfSoftClipsOnLeft.length + "S");
-        if (middleSize > 0 ) cigar.append(middleSize + middleOp);
-        if (qualOfSoftClipsOnRight.length > 0 ) cigar.append(qualOfSoftClipsOnRight.length + "S");
-
-        read.setCigarString(cigar.toString());
-
-        final int actual = AlignmentUtils.calcNumHighQualitySoftClips(read, (byte) qualThreshold);
-        Assert.assertEquals(actual, numExpected, "Wrong number of soft clips detected for read " + read.getSAMString());
-    }
-
-    ////////////////////////////////////////////
-    // Test AlignmentUtils.getMismatchCount() //
-    ////////////////////////////////////////////
-
-    @DataProvider(name = "MismatchCountDataProvider")
-    public Object[][] makeMismatchCountDataProvider() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        final int readLength = 20;
-        final int lengthOfIndel = 2;
-        final int locationOnReference = 10;
-        final byte[] reference = Utils.dupBytes((byte)'A', readLength);
-        final byte[] quals = Utils.dupBytes((byte)'A', readLength);
-
-
-        for ( int startOnRead = 0; startOnRead <= readLength; startOnRead++ ) {
-            for ( int basesToRead = 0; basesToRead <= readLength; basesToRead++ ) {
-                for ( final int lengthOfSoftClip : Arrays.asList(0, 1, 10) ) {
-                    for ( final int lengthOfFirstM : Arrays.asList(0, 3) ) {
-                        for ( final char middleOp : Arrays.asList('M', 'D', 'I') ) {
-                            for ( final int mismatchLocation : Arrays.asList(-1, 0, 5, 10, 15, 19) ) {
-
-                                final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, locationOnReference, readLength);
-
-                                // set the read's bases and quals
-                                final byte[] readBases = reference.clone();
-                                // create the mismatch if requested
-                                if ( mismatchLocation != -1 )
-                                    readBases[mismatchLocation] = (byte)'C';
-                                read.setReadBases(readBases);
-                                read.setBaseQualities(quals);
-
-                                // create the CIGAR string
-                                read.setCigarString(buildTestCigarString(middleOp, lengthOfSoftClip, lengthOfFirstM, lengthOfIndel, readLength));
-
-                                // now, determine whether or not there's a mismatch
-                                final boolean isMismatch;
-                                if ( mismatchLocation < startOnRead || mismatchLocation >= startOnRead + basesToRead || mismatchLocation < lengthOfSoftClip ) {
-                                    isMismatch = false;
-                                } else if ( middleOp == 'M' || middleOp == 'D' || mismatchLocation < lengthOfSoftClip + lengthOfFirstM || mismatchLocation >= lengthOfSoftClip + lengthOfFirstM + lengthOfIndel ) {
-                                    isMismatch = true;
-                                } else {
-                                    isMismatch = false;
-                                }
-
-                                tests.add(new Object[]{read, locationOnReference, startOnRead, basesToRead, isMismatch});
-                            }
-                        }
-                    }
-                }
-            }
-        }
-
-        // Adding test to make sure soft-clipped reads go through the exceptions thrown at the beginning of the getMismatchCount method
-        // todo: incorporate cigars with right-tail soft-clips in the systematic tests above.
-        GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, 10, 20);
-        read.setReadBases(reference);
-        read.setBaseQualities(quals);
-        read.setCigarString("10S5M5S");
-        tests.add(new Object[]{read, 10, read.getAlignmentStart(), read.getReadLength(), false});
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "MismatchCountDataProvider")
-    public void testMismatchCountData(final GATKSAMRecord read, final int refIndex, final int startOnRead, final int basesToRead, final boolean isMismatch) {
-        final byte[] reference = Utils.dupBytes((byte)'A', 100);
-        final int actual = AlignmentUtils.getMismatchCount(read, reference, refIndex, startOnRead, basesToRead).numMismatches;
-        Assert.assertEquals(actual, isMismatch ? 1 : 0, "Wrong number of mismatches detected for read " + read.getSAMString());
-    }
-
-    private static String buildTestCigarString(final char middleOp, final int lengthOfSoftClip, final int lengthOfFirstM, final int lengthOfIndel, final int readLength) {
-        final StringBuilder cigar = new StringBuilder();
-        int remainingLength = readLength;
-
-        // add soft clips to the beginning of the read
-        if (lengthOfSoftClip > 0 ) {
-            cigar.append(lengthOfSoftClip).append("S");
-            remainingLength -= lengthOfSoftClip;
-        }
-
-        if ( middleOp == 'M' ) {
-            cigar.append(remainingLength).append("M");
-        } else {
-            if ( lengthOfFirstM > 0 ) {
-                cigar.append(lengthOfFirstM).append("M");
-                remainingLength -= lengthOfFirstM;
-            }
-
-            if ( middleOp == 'D' ) {
-                cigar.append(lengthOfIndel).append("D");
-            } else {
-                cigar.append(lengthOfIndel).append("I");
-                remainingLength -= lengthOfIndel;
-            }
-            cigar.append(remainingLength).append("M");
-        }
-
-        return cigar.toString();
-    }
-
-    ////////////////////////////////////////////////////////
-    // Test AlignmentUtils.calcAlignmentByteArrayOffset() //
-    ////////////////////////////////////////////////////////
-
-    @DataProvider(name = "AlignmentByteArrayOffsetDataProvider")
-    public Object[][] makeAlignmentByteArrayOffsetDataProvider() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        final int readLength = 20;
-        final int lengthOfIndel = 2;
-        final int locationOnReference = 20;
-
-        for ( int offset = 0; offset < readLength; offset++ ) {
-            for ( final int lengthOfSoftClip : Arrays.asList(0, 1, 10) ) {
-                for ( final int lengthOfFirstM : Arrays.asList(0, 3) ) {
-                    for ( final char middleOp : Arrays.asList('M', 'D', 'I') ) {
-
-                        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, locationOnReference, readLength);
-                        // create the CIGAR string
-                        read.setCigarString(buildTestCigarString(middleOp, lengthOfSoftClip, lengthOfFirstM, lengthOfIndel, readLength));
-
-                        // now, determine the expected alignment offset
-                        final int expected;
-                        boolean isDeletion = false;
-                        if ( offset < lengthOfSoftClip ) {
-                            expected = 0;
-                        } else if ( middleOp == 'M' || offset < lengthOfSoftClip + lengthOfFirstM ) {
-                            expected = offset - lengthOfSoftClip;
-                        } else if ( offset < lengthOfSoftClip + lengthOfFirstM + lengthOfIndel ) {
-                            if ( middleOp == 'D' ) {
-                                isDeletion = true;
-                                expected = offset - lengthOfSoftClip;
-                            } else {
-                                expected = lengthOfFirstM;
-                            }
-                        } else {
-                            expected = offset - lengthOfSoftClip - (middleOp == 'I' ? lengthOfIndel : -lengthOfIndel);
-                        }
-
-                        tests.add(new Object[]{read.getCigar(), offset, expected, isDeletion, lengthOfSoftClip});
-                    }
-                }
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "AlignmentByteArrayOffsetDataProvider")
-    public void testAlignmentByteArrayOffsetData(final Cigar cigar, final int offset, final int expectedResult, final boolean isDeletion, final int lengthOfSoftClip) {
-        final int actual = AlignmentUtils.calcAlignmentByteArrayOffset(cigar, isDeletion ? -1 : offset, isDeletion, 20, 20 + offset - lengthOfSoftClip);
-        Assert.assertEquals(actual, expectedResult, "Wrong alignment offset detected for cigar " + cigar.toString());
-    }
-
-    ////////////////////////////////////////////////////
-    // Test AlignmentUtils.readToAlignmentByteArray() //
-    ////////////////////////////////////////////////////
-
-    @DataProvider(name = "ReadToAlignmentByteArrayDataProvider")
-    public Object[][] makeReadToAlignmentByteArrayDataProvider() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        final int readLength = 20;
-        final int lengthOfIndel = 2;
-        final int locationOnReference = 20;
-
-        for ( final int lengthOfSoftClip : Arrays.asList(0, 1, 10) ) {
-            for ( final int lengthOfFirstM : Arrays.asList(0, 3) ) {
-                for ( final char middleOp : Arrays.asList('M', 'D', 'I') ) {
-
-                    final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, locationOnReference, readLength);
-                    // create the CIGAR string
-                    read.setCigarString(buildTestCigarString(middleOp, lengthOfSoftClip, lengthOfFirstM, lengthOfIndel, readLength));
-
-                    // now, determine the byte array size
-                    final int expected = readLength - lengthOfSoftClip - (middleOp == 'I' ? lengthOfIndel : (middleOp == 'D' ? -lengthOfIndel : 0));
-                    final int indelBasesStart = middleOp != 'M' ? lengthOfFirstM : -1;
-
-                    tests.add(new Object[]{read.getCigar(), expected, middleOp, indelBasesStart, lengthOfIndel});
-                }
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "ReadToAlignmentByteArrayDataProvider")
-    public void testReadToAlignmentByteArrayData(final Cigar cigar, final int expectedLength, final char middleOp, final int startOfIndelBases, final int lengthOfDeletion) {
-        final byte[] read = Utils.dupBytes((byte)'A', cigar.getReadLength());
-        final byte[] alignment = AlignmentUtils.readToAlignmentByteArray(cigar, read);
-
-        Assert.assertEquals(alignment.length, expectedLength, "Wrong alignment length detected for cigar " + cigar.toString());
-
-        for ( int i = 0; i < alignment.length; i++ ) {
-            final byte expectedBase;
-            if ( middleOp == 'D' && i >= startOfIndelBases && i < startOfIndelBases + lengthOfDeletion )
-                expectedBase = PileupElement.DELETION_BASE;
-            else if ( middleOp == 'I' && i == startOfIndelBases - 1 )
-                expectedBase = PileupElement.A_FOLLOWED_BY_INSERTION_BASE;
-            else
-                expectedBase = (byte)'A';
-            Assert.assertEquals(alignment[i], expectedBase, "Wrong base detected at position " + i);
-        }
-    }
-
-    //////////////////////////////////////////
-    // Test AlignmentUtils.leftAlignIndel() //
-    //////////////////////////////////////////
-
-    @DataProvider(name = "LeftAlignIndelDataProvider")
-    public Object[][] makeLeftAlignIndelDataProvider() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        final byte[] repeat1Reference = "ABCDEFGHIJKLMNOPXXXXXXXXXXABCDEFGHIJKLMNOP".getBytes();
-        final byte[] repeat2Reference = "ABCDEFGHIJKLMNOPXYXYXYXYXYABCDEFGHIJKLMNOP".getBytes();
-        final byte[] repeat3Reference = "ABCDEFGHIJKLMNOPXYZXYZXYZXYZABCDEFGHIJKLMN".getBytes();
-        final int referenceLength = repeat1Reference.length;
-
-        for ( int indelStart = 0; indelStart < repeat1Reference.length; indelStart++ ) {
-            for ( final int indelSize : Arrays.asList(0, 1, 2, 3, 4) ) {
-                for ( final char indelOp : Arrays.asList('D', 'I') ) {
-
-                    if ( indelOp == 'D' && indelStart + indelSize >= repeat1Reference.length )
-                        continue;
-
-                    final int readLength = referenceLength - (indelOp == 'D' ? indelSize : -indelSize);
-
-                    // create the original CIGAR string
-                    final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, 1, readLength);
-                    read.setCigarString(buildTestCigarString(indelSize == 0 ? 'M' : indelOp, 0, indelStart, indelSize, readLength));
-                    final Cigar originalCigar = read.getCigar();
-
-                    final Cigar expectedCigar1 = makeExpectedCigar1(originalCigar, indelOp, indelStart, indelSize, readLength);
-                    final byte[] readString1 = makeReadString(repeat1Reference, indelOp, indelStart, indelSize, readLength, 1);
-                    tests.add(new Object[]{originalCigar, expectedCigar1, repeat1Reference, readString1, 1});
-
-                    final Cigar expectedCigar2 = makeExpectedCigar2(originalCigar, indelOp, indelStart, indelSize, readLength);
-                    final byte[] readString2 = makeReadString(repeat2Reference, indelOp, indelStart, indelSize, readLength, 2);
-                    tests.add(new Object[]{originalCigar, expectedCigar2, repeat2Reference, readString2, 2});
-
-                    final Cigar expectedCigar3 = makeExpectedCigar3(originalCigar, indelOp, indelStart, indelSize, readLength);
-                    final byte[] readString3 = makeReadString(repeat3Reference, indelOp, indelStart, indelSize, readLength, 3);
-                    tests.add(new Object[]{originalCigar, expectedCigar3, repeat3Reference, readString3, 3});
-                }
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    private Cigar makeExpectedCigar1(final Cigar originalCigar, final char indelOp, final int indelStart, final int indelSize, final int readLength) {
-        if ( indelSize == 0 || indelStart < 17 || indelStart > (26 - (indelOp == 'D' ? indelSize : 0)) )
-            return originalCigar;
-
-        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, 1, readLength);
-        read.setCigarString(buildTestCigarString(indelOp, 0, 16, indelSize, readLength));
-        return read.getCigar();
-    }
-
-    private Cigar makeExpectedCigar2(final Cigar originalCigar, final char indelOp, final int indelStart, final int indelSize, final int readLength) {
-        if ( indelStart < 17 || indelStart > (26 - (indelOp == 'D' ? indelSize : 0)) )
-            return originalCigar;
-
-        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, 1, readLength);
-
-        if ( indelOp == 'I' && (indelSize == 1 || indelSize == 3) && indelStart % 2 == 1 )
-            read.setCigarString(buildTestCigarString(indelOp, 0, Math.max(indelStart - indelSize, 16), indelSize, readLength));
-        else if ( (indelSize == 2 || indelSize == 4) && (indelOp == 'D' || indelStart % 2 == 0) )
-            read.setCigarString(buildTestCigarString(indelOp, 0, 16, indelSize, readLength));
-        else
-            return originalCigar;
-
-        return read.getCigar();
-    }
-
-    private Cigar makeExpectedCigar3(final Cigar originalCigar, final char indelOp, final int indelStart, final int indelSize, final int readLength) {
-        if ( indelStart < 17 || indelStart > (28 - (indelOp == 'D' ? indelSize : 0)) )
-            return originalCigar;
-
-        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, 1, readLength);
-
-        if ( indelSize == 3 && (indelOp == 'D' || indelStart % 3 == 1) )
-            read.setCigarString(buildTestCigarString(indelOp, 0, 16, indelSize, readLength));
-        else if ( (indelOp == 'I' && indelSize == 4 && indelStart % 3 == 2) ||
-                (indelOp == 'I' && indelSize == 2 && indelStart % 3 == 0) ||
-                (indelOp == 'I' && indelSize == 1 && indelStart < 28 && indelStart % 3 == 2) )
-            read.setCigarString(buildTestCigarString(indelOp, 0, Math.max(indelStart - indelSize, 16), indelSize, readLength));
-        else
-            return originalCigar;
-
-        return read.getCigar();
-    }
-
-    private static byte[] makeReadString(final byte[] reference, final char indelOp, final int indelStart, final int indelSize, final int readLength, final int repeatLength) {
-        final byte[] readString = new byte[readLength];
-
-        if ( indelOp == 'D' && indelSize > 0 ) {
-            System.arraycopy(reference, 0, readString, 0, indelStart);
-            System.arraycopy(reference, indelStart + indelSize, readString, indelStart, readLength - indelStart);
-        } else if ( indelOp == 'I' && indelSize > 0 ) {
-            System.arraycopy(reference, 0, readString, 0, indelStart);
-            for ( int i = 0; i < indelSize; i++ ) {
-                if ( i % repeatLength == 0 )
-                    readString[indelStart + i] = 'X';
-                else if ( i % repeatLength == 1 )
-                    readString[indelStart + i] = 'Y';
-                else
-                    readString[indelStart + i] = 'Z';
-            }
-            System.arraycopy(reference, indelStart, readString, indelStart + indelSize, readLength - indelStart - indelSize);
-        } else {
-            System.arraycopy(reference, 0, readString, 0, readLength);
-        }
-
-        return readString;
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "LeftAlignIndelDataProvider")
-    public void testLeftAlignIndelData(final Cigar originalCigar, final Cigar expectedCigar, final byte[] reference, final byte[] read, final int repeatLength) {
-        final Cigar actualCigar = AlignmentUtils.leftAlignIndel(originalCigar, reference, read, 0, 0, true);
-        Assert.assertTrue(expectedCigar.equals(actualCigar), "Wrong left alignment detected for cigar " + originalCigar.toString() + " to " + actualCigar.toString() + " but expected " + expectedCigar.toString() + " with repeat length " + repeatLength);
-    }
-
-    //////////////////////////////////////////
-    // Test AlignmentUtils.trimCigarByReference() //
-    //////////////////////////////////////////
-
-    @DataProvider(name = "TrimCigarData")
-    public Object[][] makeTrimCigarData() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        for ( final CigarOperator op : Arrays.asList(CigarOperator.D, CigarOperator.EQ, CigarOperator.X, CigarOperator.M) ) {
-            for ( int myLength = 1; myLength < 6; myLength++ ) {
-                for ( int start = 0; start < myLength - 1; start++ ) {
-                    for ( int end = start; end < myLength; end++ ) {
-                        final int length = end - start + 1;
-
-                        final List<CigarOperator> padOps = Arrays.asList(CigarOperator.D, CigarOperator.M);
-                        for ( final CigarOperator padOp: padOps) {
-                            for ( int leftPad = 0; leftPad < 2; leftPad++ ) {
-                                for ( int rightPad = 0; rightPad < 2; rightPad++ ) {
-                                    tests.add(new Object[]{
-                                            (leftPad > 0 ? leftPad + padOp.toString() : "") + myLength + op.toString() + (rightPad > 0 ? rightPad + padOp.toString() : ""),
-                                            start + leftPad,
-                                            end + leftPad,
-                                            length + op.toString()});
-                                }
-                            }
-                        }
-                    }
-                }
-            }
-        }
-
-        for ( final int leftPad : Arrays.asList(0, 1, 2, 5) ) {
-            for ( final int rightPad : Arrays.asList(0, 1, 2, 5) ) {
-                final int length = leftPad + rightPad;
-                if ( length > 0 ) {
-                    for ( final int insSize : Arrays.asList(1, 10) ) {
-                        for ( int start = 0; start <= leftPad; start++ ) {
-                            for ( int stop = leftPad; stop < length; stop++ ) {
-                                final int leftPadRemaining = leftPad - start;
-                                final int rightPadRemaining = stop - leftPad + 1;
-                                final String insC = insSize + "I";
-                                tests.add(new Object[]{
-                                        leftPad + "M" + insC + rightPad + "M",
-                                        start,
-                                        stop,
-                                        (leftPadRemaining > 0 ? leftPadRemaining + "M" : "") + insC + (rightPadRemaining > 0 ? rightPadRemaining + "M" : "")
-                                });
-                            }
-                        }
-                    }
-                }
-            }
-        }
-
-        tests.add(new Object[]{"3M2D4M", 0, 8, "3M2D4M"});
-        tests.add(new Object[]{"3M2D4M", 2, 8, "1M2D4M"});
-        tests.add(new Object[]{"3M2D4M", 2, 6, "1M2D2M"});
-        tests.add(new Object[]{"3M2D4M", 3, 6, "2D2M"});
-        tests.add(new Object[]{"3M2D4M", 4, 6, "1D2M"});
-        tests.add(new Object[]{"3M2D4M", 5, 6, "2M"});
-        tests.add(new Object[]{"3M2D4M", 6, 6, "1M"});
-
-        tests.add(new Object[]{"2M3I4M", 0, 5, "2M3I4M"});
-        tests.add(new Object[]{"2M3I4M", 1, 5, "1M3I4M"});
-        tests.add(new Object[]{"2M3I4M", 1, 4, "1M3I3M"});
-        tests.add(new Object[]{"2M3I4M", 2, 4, "3I3M"});
-        tests.add(new Object[]{"2M3I4M", 2, 3, "3I2M"});
-        tests.add(new Object[]{"2M3I4M", 2, 2, "3I1M"});
-        tests.add(new Object[]{"2M3I4M", 3, 4, "2M"});
-        tests.add(new Object[]{"2M3I4M", 3, 3, "1M"});
-        tests.add(new Object[]{"2M3I4M", 4, 4, "1M"});
-
-        // this doesn't work -- but I'm not sure it should
-        //        tests.add(new Object[]{"2M3I4M", 2, 1, "3I"});
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "TrimCigarData", enabled = ! DEBUG)
-    public void testTrimCigar(final String cigarString, final int start, final int length, final String expectedCigarString) {
-        final Cigar cigar = TextCigarCodec.getSingleton().decode(cigarString);
-        final Cigar expectedCigar = TextCigarCodec.getSingleton().decode(expectedCigarString);
-        final Cigar actualCigar = AlignmentUtils.trimCigarByReference(cigar, start, length);
-        Assert.assertEquals(actualCigar, expectedCigar);
-    }
-
-    @DataProvider(name = "TrimCigarByBasesData")
-    public Object[][] makeTrimCigarByBasesData() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        tests.add(new Object[]{"2M3I4M", 0, 8, "2M3I4M"});
-        tests.add(new Object[]{"2M3I4M", 1, 8, "1M3I4M"});
-        tests.add(new Object[]{"2M3I4M", 2, 8, "3I4M"});
-        tests.add(new Object[]{"2M3I4M", 3, 8, "2I4M"});
-        tests.add(new Object[]{"2M3I4M", 4, 8, "1I4M"});
-        tests.add(new Object[]{"2M3I4M", 4, 7, "1I3M"});
-        tests.add(new Object[]{"2M3I4M", 4, 6, "1I2M"});
-        tests.add(new Object[]{"2M3I4M", 4, 5, "1I1M"});
-        tests.add(new Object[]{"2M3I4M", 4, 4, "1I"});
-        tests.add(new Object[]{"2M3I4M", 5, 5, "1M"});
-
-        tests.add(new Object[]{"2M2D2I", 0, 3, "2M2D2I"});
-        tests.add(new Object[]{"2M2D2I", 1, 3, "1M2D2I"});
-        tests.add(new Object[]{"2M2D2I", 2, 3, "2D2I"});
-        tests.add(new Object[]{"2M2D2I", 3, 3, "1I"});
-        tests.add(new Object[]{"2M2D2I", 2, 2, "2D1I"});
-        tests.add(new Object[]{"2M2D2I", 1, 2, "1M2D1I"});
-        tests.add(new Object[]{"2M2D2I", 0, 1, "2M2D"});
-        tests.add(new Object[]{"2M2D2I", 1, 1, "1M2D"});
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "TrimCigarByBasesData", enabled = !DEBUG)
-    public void testTrimCigarByBase(final String cigarString, final int start, final int length, final String expectedCigarString) {
-        final Cigar cigar = TextCigarCodec.getSingleton().decode(cigarString);
-        final Cigar expectedCigar = TextCigarCodec.getSingleton().decode(expectedCigarString);
-        final Cigar actualCigar = AlignmentUtils.trimCigarByBases(cigar, start, length);
-        Assert.assertEquals(actualCigar, expectedCigar);
-    }
-
-    //////////////////////////////////////////
-    // Test AlignmentUtils.applyCigarToCigar() //
-    //////////////////////////////////////////
-
-    @DataProvider(name = "ApplyCigarToCigarData")
-    public Object[][] makeApplyCigarToCigarData() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        for ( int i = 1; i < 5; i++ )
-            tests.add(new Object[]{i + "M", i + "M", i + "M"});
-
-//        * ref   : ACGTAC
-//        * hap   : AC---C  - 2M3D1M
-//        * read  : AC---C  - 3M
-//        * result: AG---C => 2M3D
-        tests.add(new Object[]{"3M", "2M3D1M", "2M3D1M"});
-
-//        * ref   : ACxG-TA
-//        * hap   : AC-G-TA  - 2M1D3M
-//        * read  : AC-GxTA  - 3M1I2M
-//        * result: AC-GxTA => 2M1D1M1I2M
-        tests.add(new Object[]{"3M1I2M", "2M1D3M", "2M1D1M1I2M"});
-
-//        * ref   : A-CGTA
-//        * hap   : A-CGTA  - 5M
-//        * read  : AxCGTA  - 1M1I4M
-//        * result: AxCGTA => 1M1I4M
-        tests.add(new Object[]{"1M1I4M", "5M", "1M1I4M"});
-
-//        * ref   : ACGTA
-//        * hap   : ACGTA  - 5M
-//        * read  : A--TA  - 1M2D2M
-//        * result: A--TA => 1M2D2M
-        tests.add(new Object[]{"1M2D2M", "5M", "1M2D2M"});
-
-//        * ref   : AC-GTA
-//        * hap   : ACxGTA  - 2M1I3M
-//        * read  : A--GTA  - 1M2D3M
-//        * result: A--GTA => 1M1D3M
-        tests.add(new Object[]{"108M14D24M2M18I29M92M1000M", "2M1I3M", "2M1I3M"});
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "ApplyCigarToCigarData", enabled = !DEBUG)
-    public void testApplyCigarToCigar(final String firstToSecondString, final String secondToThirdString, final String expectedCigarString) {
-        final Cigar firstToSecond = TextCigarCodec.getSingleton().decode(firstToSecondString);
-        final Cigar secondToThird = TextCigarCodec.getSingleton().decode(secondToThirdString);
-        final Cigar expectedCigar = TextCigarCodec.getSingleton().decode(expectedCigarString);
-        final Cigar actualCigar = AlignmentUtils.applyCigarToCigar(firstToSecond, secondToThird);
-        Assert.assertEquals(actualCigar, expectedCigar);
-    }
-
-    //////////////////////////////////////////
-    // Test AlignmentUtils.applyCigarToCigar() //
-    //////////////////////////////////////////
-
-    @DataProvider(name = "ReadOffsetFromCigarData")
-    public Object[][] makeReadOffsetFromCigarData() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        final int SIZE = 10;
-        for ( int i = 0; i < SIZE; i++ ) {
-            tests.add(new Object[]{SIZE + "M", i, i});
-        }
-
-        //          0123ii45
-        // ref    : ACGT--AC
-        // hap    : AC--xxAC (2M2D2I2M)
-        // ref.pos: 01    45
-        tests.add(new Object[]{"2M2D2I2M", 0, 0});
-        tests.add(new Object[]{"2M2D2I2M", 1, 1});
-        tests.add(new Object[]{"2M2D2I2M", 2, 4});
-        tests.add(new Object[]{"2M2D2I2M", 3, 4});
-        tests.add(new Object[]{"2M2D2I2M", 4, 4});
-        tests.add(new Object[]{"2M2D2I2M", 5, 5});
-
-        // 10132723 - 10132075 - 500 = 148
-        // what's the offset of the first match after the I?
-        // 108M + 14D + 24M + 2M = 148
-        // What's the offset of the first base that is after the I?
-        // 108M + 24M + 2M + 18I = 134M + 18I = 152 - 1 = 151
-        tests.add(new Object[]{"108M14D24M2M18I29M92M", 0, 0});
-        tests.add(new Object[]{"108M14D24M2M18I29M92M", 107, 107});
-        tests.add(new Object[]{"108M14D24M2M18I29M92M", 108, 108 + 14}); // first base after the deletion
-
-        tests.add(new Object[]{"108M14D24M2M18I29M92M", 132, 132+14}); // 2 before insertion
-        tests.add(new Object[]{"108M14D24M2M18I29M92M", 133, 133+14}); // last base before insertion
-
-        // entering into the insertion
-        for ( int i = 0; i < 18; i++ ) {
-            tests.add(new Object[]{"108M14D24M2M18I29M92M", 134+i, 148}); // inside insertion
-        }
-        tests.add(new Object[]{"108M14D24M2M18I29M92M", 134+18, 148}); // first base after insertion matches at same as insertion
-        tests.add(new Object[]{"108M14D24M2M18I29M92M", 134+18+1, 149});
-        tests.add(new Object[]{"108M14D24M2M18I29M92M", 134+18+2, 150});
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "ReadOffsetFromCigarData", enabled = !DEBUG)
-    public void testReadOffsetFromCigar(final String cigarString, final int startOnCigar, final int expectedOffset) {
-        final Cigar cigar = TextCigarCodec.getSingleton().decode(cigarString);
-        final int actualOffset = AlignmentUtils.calcFirstBaseMatchingReferenceInCigar(cigar, startOnCigar);
-        Assert.assertEquals(actualOffset, expectedOffset);
-    }
-
-    //////////////////////////////////////////
-    // Test AlignmentUtils.addCigarElements() //
-    //////////////////////////////////////////
-
-    @DataProvider(name = "AddCigarElementsData")
-    public Object[][] makeAddCigarElementsData() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        final int SIZE = 10;
-        for ( final CigarOperator op : Arrays.asList(CigarOperator.I, CigarOperator.M, CigarOperator.S, CigarOperator.EQ, CigarOperator.X)) {
-            for ( int start = 0; start < SIZE; start++ ) {
-                for ( int end = start; end < SIZE * 2; end ++ ) {
-                    for ( int pos = 0; pos < SIZE * 3; pos++ ) {
-                        int length = 0;
-                        for ( int i = 0; i < SIZE; i++ ) length += (i+pos) >= start && (i+pos) <= end ? 1 : 0;
-                        tests.add(new Object[]{SIZE + op.toString(), pos, start, end, length > 0 ? length + op.toString() : "*"});
-                    }
-                }
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "AddCigarElementsData", enabled = !DEBUG)
-    public void testAddCigarElements(final String cigarString, final int pos, final int start, final int end, final String expectedCigarString) {
-        final Cigar cigar = TextCigarCodec.getSingleton().decode(cigarString);
-        final CigarElement elt = cigar.getCigarElement(0);
-        final Cigar expectedCigar = TextCigarCodec.getSingleton().decode(expectedCigarString);
-
-        final List<CigarElement> elts = new LinkedList<CigarElement>();
-        final int actualEndPos = AlignmentUtils.addCigarElements(elts, pos, start, end, elt);
-
-        Assert.assertEquals(actualEndPos, pos + elt.getLength());
-        Assert.assertEquals(AlignmentUtils.consolidateCigar(new Cigar(elts)), expectedCigar);
-    }
-
-    @DataProvider(name = "GetBasesCoveringRefIntervalData")
-    public Object[][] makeGetBasesCoveringRefIntervalData() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        // matches
-        // 0123
-        // ACGT
-        tests.add(new Object[]{"ACGT", 0, 3, "4M", "ACGT"});
-        tests.add(new Object[]{"ACGT", 1, 3, "4M", "CGT"});
-        tests.add(new Object[]{"ACGT", 1, 2, "4M", "CG"});
-        tests.add(new Object[]{"ACGT", 1, 1, "4M", "C"});
-
-        // deletions
-        // 012345
-        // AC--GT
-        tests.add(new Object[]{"ACGT", 0, 5, "2M2D2M", "ACGT"});
-        tests.add(new Object[]{"ACGT", 1, 5, "2M2D2M", "CGT"});
-        tests.add(new Object[]{"ACGT", 2, 5, "2M2D2M", null});
-        tests.add(new Object[]{"ACGT", 3, 5, "2M2D2M", null});
-        tests.add(new Object[]{"ACGT", 4, 5, "2M2D2M", "GT"});
-        tests.add(new Object[]{"ACGT", 5, 5, "2M2D2M", "T"});
-        tests.add(new Object[]{"ACGT", 0, 4, "2M2D2M", "ACG"});
-        tests.add(new Object[]{"ACGT", 0, 3, "2M2D2M", null});
-        tests.add(new Object[]{"ACGT", 0, 2, "2M2D2M", null});
-        tests.add(new Object[]{"ACGT", 0, 1, "2M2D2M", "AC"});
-        tests.add(new Object[]{"ACGT", 0, 0, "2M2D2M", "A"});
-
-        // insertions
-        // 01--23
-        // ACTTGT
-        tests.add(new Object[]{"ACTTGT", 0, 3, "2M2I2M", "ACTTGT"});
-        tests.add(new Object[]{"ACTTGT", 1, 3, "2M2I2M", "CTTGT"});
-        tests.add(new Object[]{"ACTTGT", 2, 3, "2M2I2M", "GT"});
-        tests.add(new Object[]{"ACTTGT", 3, 3, "2M2I2M", "T"});
-        tests.add(new Object[]{"ACTTGT", 0, 2, "2M2I2M", "ACTTG"});
-        tests.add(new Object[]{"ACTTGT", 0, 1, "2M2I2M", "AC"});
-        tests.add(new Object[]{"ACTTGT", 1, 2, "2M2I2M", "CTTG"});
-        tests.add(new Object[]{"ACTTGT", 2, 2, "2M2I2M", "G"});
-        tests.add(new Object[]{"ACTTGT", 1, 1, "2M2I2M", "C"});
-
-        tests.add(new Object[]{"ACGT", 0, 1, "2M2I", "AC"});
-        tests.add(new Object[]{"ACGT", 1, 1, "2M2I", "C"});
-        tests.add(new Object[]{"ACGT", 0, 0, "2M2I", "A"});
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "GetBasesCoveringRefIntervalData", enabled = true)
-    public void testGetBasesCoveringRefInterval(final String basesString, final int refStart, final int refEnd, final String cigarString, final String expected) {
-        final byte[] actualBytes = AlignmentUtils.getBasesCoveringRefInterval(refStart, refEnd, basesString.getBytes(), 0, TextCigarCodec.getSingleton().decode(cigarString));
-        if ( expected == null )
-            Assert.assertNull(actualBytes);
-        else
-            Assert.assertEquals(new String(actualBytes), expected);
-    }
-
-    @DataProvider(name = "StartsOrEndsWithInsertionOrDeletionData")
-    public Object[][] makeStartsOrEndsWithInsertionOrDeletionData() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        tests.add(new Object[]{"2M", false});
-        tests.add(new Object[]{"1D2M", true});
-        tests.add(new Object[]{"2M1D", true});
-        tests.add(new Object[]{"2M1I", true});
-        tests.add(new Object[]{"1I2M", true});
-        tests.add(new Object[]{"1M1I2M", false});
-        tests.add(new Object[]{"1M1D2M", false});
-        tests.add(new Object[]{"1M1I2M1I", true});
-        tests.add(new Object[]{"1M1I2M1D", true});
-        tests.add(new Object[]{"1D1M1I2M", true});
-        tests.add(new Object[]{"1I1M1I2M", true});
-        tests.add(new Object[]{"1M1I2M1I1M", false});
-        tests.add(new Object[]{"1M1I2M1D1M", false});
-        tests.add(new Object[]{"1M1D2M1D1M", false});
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "StartsOrEndsWithInsertionOrDeletionData", enabled = true)
-    public void testStartsOrEndsWithInsertionOrDeletion(final String cigar, final boolean expected) {
-        Assert.assertEquals(AlignmentUtils.startsOrEndsWithInsertionOrDeletion(TextCigarCodec.getSingleton().decode(cigar)), expected);
-    }
-
-    @Test(dataProvider = "StartsOrEndsWithInsertionOrDeletionData", enabled = true)
-    public void testRemoveTrailingDeletions(final String cigar, final boolean expected) {
-
-        final Cigar originalCigar = TextCigarCodec.getSingleton().decode(cigar);
-        final Cigar newCigar = AlignmentUtils.removeTrailingDeletions(originalCigar);
-
-        Assert.assertEquals(originalCigar.equals(newCigar), !cigar.endsWith("D"));
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialBAMBuilderUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialBAMBuilderUnitTest.java
deleted file mode 100644
index b7042a6..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialBAMBuilderUnitTest.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import htsjdk.samtools.SAMFileReader;
-import htsjdk.samtools.SAMRecord;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.util.Arrays;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-
-/**
- * Created with IntelliJ IDEA.
- * User: depristo
- * Date: 1/15/13
- * Time: 3:49 PM
- * To change this template use File | Settings | File Templates.
- */
-public class ArtificialBAMBuilderUnitTest extends BaseTest {
-    @DataProvider(name = "ArtificialBAMBuilderUnitTestProvider")
-    public Object[][] makeArtificialBAMBuilderUnitTestProvider() {
-        final List<Object[]> tests = new LinkedList<Object[]>();
-
-        final List<Integer> starts = Arrays.asList(
-                1, // very start of the chromosome
-                ArtificialBAMBuilder.BAM_SHARD_SIZE - 100, // right before the shard boundary
-                ArtificialBAMBuilder.BAM_SHARD_SIZE + 100 // right after the shard boundary
-        );
-
-        for ( final int readLength : Arrays.asList(10, 20) ) {
-            for ( final int skips : Arrays.asList(0, 1, 10) ) {
-                for ( final int start : starts ) {
-                    for ( final int nSamples : Arrays.asList(1, 2) ) {
-                        for ( final int nReadsPerLocus : Arrays.asList(1, 10) ) {
-                            for ( final int nLoci : Arrays.asList(10, 100, 1000) ) {
-                                final ArtificialBAMBuilder bamBuilder = new ArtificialBAMBuilder(nReadsPerLocus, nLoci);
-                                bamBuilder.setReadLength(readLength);
-                                bamBuilder.setSkipNLoci(skips);
-                                bamBuilder.setAlignmentStart(start);
-                                bamBuilder.createAndSetHeader(nSamples);
-                                tests.add(new Object[]{bamBuilder, readLength, skips, start, nSamples, nReadsPerLocus, nLoci});
-                            }
-                        }
-                    }
-                }
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "ArtificialBAMBuilderUnitTestProvider")
-    public void testBamProvider(final ArtificialBAMBuilder bamBuilder, int readLength, int skips, int start, int nSamples, int nReadsPerLocus, int nLoci) {
-        Assert.assertEquals(bamBuilder.getReadLength(), readLength);
-        Assert.assertEquals(bamBuilder.getSkipNLoci(), skips);
-        Assert.assertEquals(bamBuilder.getAlignmentStart(), start);
-        Assert.assertEquals(bamBuilder.getNSamples(), nSamples);
-        Assert.assertEquals(bamBuilder.getnReadsPerLocus(), nReadsPerLocus);
-        Assert.assertEquals(bamBuilder.getnLoci(), nLoci);
-
-        final List<GATKSAMRecord> reads = bamBuilder.makeReads();
-        Assert.assertEquals(reads.size(), bamBuilder.expectedNumberOfReads());
-        for ( final GATKSAMRecord read : reads ) {
-            assertGoodRead(read, bamBuilder);
-        }
-
-        final File bam = bamBuilder.makeTemporarilyBAMFile();
-        final SAMFileReader reader = new SAMFileReader(bam);
-        Assert.assertTrue(reader.hasIndex());
-        final Iterator<SAMRecord> bamIt = reader.iterator();
-        int nReadsFromBam = 0;
-        int lastStart = -1;
-        while ( bamIt.hasNext() ) {
-            final SAMRecord read = bamIt.next();
-            assertGoodRead(read, bamBuilder);
-            nReadsFromBam++;
-            Assert.assertTrue(read.getAlignmentStart() >= lastStart);
-            lastStart = read.getAlignmentStart();
-        }
-        Assert.assertEquals(nReadsFromBam, bamBuilder.expectedNumberOfReads());
-    }
-
-    private void assertGoodRead(final SAMRecord read, final ArtificialBAMBuilder bamBuilder) {
-        Assert.assertEquals(read.getReadLength(), bamBuilder.getReadLength());
-        Assert.assertEquals(read.getReadBases().length, bamBuilder.getReadLength());
-        Assert.assertEquals(read.getBaseQualities().length, bamBuilder.getReadLength());
-        Assert.assertTrue(read.getAlignmentStart() >= bamBuilder.getAlignmentStart());
-        Assert.assertNotNull(read.getReadGroup());
-    }
-}
-
-
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialPatternedSAMIteratorUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialPatternedSAMIteratorUnitTest.java
deleted file mode 100644
index fe5fba7..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialPatternedSAMIteratorUnitTest.java
+++ /dev/null
@@ -1,122 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import static org.testng.Assert.assertTrue;
-import static org.testng.Assert.fail;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Test;
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMRecord;
-
-
-/*
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use,
- * copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the
- * Software is furnished to do so, subject to the following
- * conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
- * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
- * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
- * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
- * OTHER DEALINGS IN THE SOFTWARE.
- */
-
-/**
- * 
- * @author aaron 
- * 
- * Class ArtificialPatternedSAMIteratorUnitTest
- *
- * tests ArtificialPatternedSAMIterator, making sure that if you specify in order
- * you get reads in order, and if you specify out of order you get them out of order.  
- */
-public class ArtificialPatternedSAMIteratorUnitTest extends BaseTest {
-
-    // our artifical patterned iterator
-    ArtificialPatternedSAMIterator iter;
-
-    private int startingChr = 1;
-    private int endingChr = 2;
-    private int readCount = 100;
-    private int DEFAULT_READ_LENGTH = ArtificialSAMUtils.DEFAULT_READ_LENGTH;
-    SAMFileHeader header;
-
-    @BeforeMethod
-    public void before() {
-        header = ArtificialSAMUtils.createArtificialSamHeader(( endingChr - startingChr ) + 1, startingChr, readCount + DEFAULT_READ_LENGTH);
-
-    }
-    @Test
-    public void testInOrder() {
-        iter = new ArtificialPatternedSAMIterator(startingChr,endingChr,readCount,0,header, ArtificialPatternedSAMIterator.PATTERN.IN_ORDER_READS);
-        if (!iter.hasNext()) {
-            fail("no reads in the ArtificialPatternedSAMIterator");
-        }
-        SAMRecord last = iter.next();
-        while (iter.hasNext()) {
-            SAMRecord rec = iter.next();
-            if (!(rec.getReferenceIndex() > last.getReferenceIndex()) && (rec.getAlignmentStart() <= last.getAlignmentStart())) {
-                fail("read " + rec.getReadName() + " out of order compared to last read, " + last.getReadName());
-            }
-            last = rec;
-        }
-
-    }
-    @Test
-    public void testOutOfOrder() {
-        int outOfOrderCount = 0;
-        iter = new ArtificialPatternedSAMIterator(startingChr,endingChr,readCount,0,header, ArtificialPatternedSAMIterator.PATTERN.RANDOM_READS);
-        if (!iter.hasNext()) {
-            fail("no reads in the ArtificialPatternedSAMIterator");
-        }
-        SAMRecord last = iter.next();
-        while (iter.hasNext()) {
-            SAMRecord rec = iter.next();
-            if (!(rec.getReferenceIndex() > last.getReferenceIndex()) && (rec.getAlignmentStart() <= last.getAlignmentStart())) {
-                ++outOfOrderCount;
-            }
-            last = rec;
-        }
-        assertTrue(outOfOrderCount > 0);
-    }
-
-
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMFileWriterUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMFileWriterUnitTest.java
deleted file mode 100644
index d527624..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMFileWriterUnitTest.java
+++ /dev/null
@@ -1,120 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertTrue;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Test;
-import org.broadinstitute.gatk.utils.BaseTest;
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.SAMFileHeader;
-
-import java.util.ArrayList;
-import java.util.List;
-
-
-/*
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use,
- * copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the
- * Software is furnished to do so, subject to the following
- * conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
- * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
- * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
- * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
- * OTHER DEALINGS IN THE SOFTWARE.
- */
-
-/**
- * @author aaron
- *         <p/>
- *         Class ArtificialGATKSAMFileWriter
- *         <p/>
- *         Test out the ArtificialGATKSAMFileWriter class
- */
-public class ArtificialSAMFileWriterUnitTest extends BaseTest {
-
-    /** the artificial sam writer */
-    private ArtificialGATKSAMFileWriter writer;
-    private SAMFileHeader header;
-    private final int startChr = 1;
-    private final int numChr = 2;
-    private final int chrSize = 100;
-
-    @BeforeMethod
-    public void before() {
-        writer = new ArtificialGATKSAMFileWriter();
-        header = ArtificialSAMUtils.createArtificialSamHeader(numChr, startChr, chrSize);
-    }
-
-    @Test
-    public void testBasicCount() {
-        for (int x = 1; x <= 100; x++) {
-            SAMRecord rec = ArtificialSAMUtils.createArtificialRead(header, String.valueOf(x), 1, x, ArtificialSAMUtils.DEFAULT_READ_LENGTH);
-            writer.addAlignment(rec);
-        }
-        assertEquals(writer.getRecords().size(), 100);
-
-    }
-
-    @Test
-    public void testReadName() {
-        List<String> names = new ArrayList<String>();
-
-        for (int x = 1; x <= 100; x++) {
-            names.add(String.valueOf(x));
-            SAMRecord rec = ArtificialSAMUtils.createArtificialRead(header, String.valueOf(x), 1, x, ArtificialSAMUtils.DEFAULT_READ_LENGTH);
-            writer.addAlignment(rec);
-        }
-        assertEquals(writer.getRecords().size(), 100);
-
-        // check the names
-        for (int x = 0; x < 100; x++) {
-            assertTrue(names.get(x).equals(writer.getRecords().get(x).getReadName()));
-        }
-
-    }
-
-    @Test
-    public void testClose() {
-        writer.close();
-        assertTrue(writer.isClosed());
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMQueryIteratorUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMQueryIteratorUnitTest.java
deleted file mode 100644
index 32409c6..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMQueryIteratorUnitTest.java
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import static org.testng.Assert.assertEquals;
-import org.testng.annotations.Test;
-import htsjdk.samtools.SAMRecord;
-
-
-/*
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person
- * obtaining a copy of this software and associated documentation
- * files (the "Software"), to deal in the Software without
- * restriction, including without limitation the rights to use,
- * copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the
- * Software is furnished to do so, subject to the following
- * conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
- * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
- * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
- * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
- * OTHER DEALINGS IN THE SOFTWARE.
- */
-
-/**
- * @author aaron
- *         <p/>
- *         Class ArtificialSAMQueryIteratorUnitTest
- *         <p/>
- *         a test for the ArtificialSAMQueryIterator class.
- */
-public class ArtificialSAMQueryIteratorUnitTest extends BaseTest {
-
-    @Test
-    public void testWholeChromosomeQuery() {
-        ArtificialSAMQueryIterator iter = ArtificialSAMUtils.queryReadIterator(1, 2, 100);
-        iter.queryContained("chr1", 1, -1);
-        int count = 0;
-        while (iter.hasNext()) {
-            SAMRecord rec = iter.next();
-            count++;
-        }
-        assertEquals(count, 100);
-
-    }
-
-    @Test
-    public void testContainedQueryStart() {
-        ArtificialSAMQueryIterator iter = ArtificialSAMUtils.queryReadIterator(1, 2, 100);
-        iter.queryContained("chr1", 1, 50);
-        int count = 0;
-        while (iter.hasNext()) {
-            SAMRecord rec = iter.next();
-            count++;
-        }
-        assertEquals(count, 1);
-
-    }
-
-    @Test
-    public void testOverlappingQueryStart() {
-        ArtificialSAMQueryIterator iter = ArtificialSAMUtils.queryReadIterator(1, 2, 100);
-        iter.queryOverlapping("chr1", 1, 50);
-        int count = 0;
-        while (iter.hasNext()) {
-            SAMRecord rec = iter.next();
-            count++;
-        }
-        assertEquals(count, 50);
-
-    }
-
-    @Test
-    public void testContainedQueryMiddle() {
-        ArtificialSAMQueryIterator iter = ArtificialSAMUtils.queryReadIterator(1, 2, 100);
-        iter.queryContained("chr1", 25, 74);
-        int count = 0;
-        while (iter.hasNext()) {
-            SAMRecord rec = iter.next();
-            count++;
-        }
-        assertEquals(count, 1);
-
-    }
-
-    @Test
-    public void testOverlappingQueryMiddle() {
-        ArtificialSAMQueryIterator iter = ArtificialSAMUtils.queryReadIterator(1, 2, 100);
-        iter.queryOverlapping("chr1", 25, 74);
-        int count = 0;
-        while (iter.hasNext()) {
-            SAMRecord rec = iter.next();
-            count++;
-        }
-        assertEquals(count, 50);
-
-    }
-
-    @Test(expectedExceptions=IllegalArgumentException.class)
-    public void testUnknownChromosome() {
-        ArtificialSAMQueryIterator iter = ArtificialSAMUtils.queryReadIterator(1, 2, 100);
-        iter.queryOverlapping("chr621", 25, 74);         
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMUtilsUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMUtilsUnitTest.java
deleted file mode 100644
index 48ad212..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMUtilsUnitTest.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.engine.iterators.GATKSAMIterator;
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertTrue;
-import static org.testng.Assert.fail;
-import org.testng.annotations.Test;
-import htsjdk.samtools.SAMRecord;
-
-/**
- * Created by IntelliJ IDEA.
- * User: aaronmckenna
- * Date: Jun 3, 2009
- * Time: 3:09:34 AM
- * To change this template use File | Settings | File Templates.
- */
-public class ArtificialSAMUtilsUnitTest extends BaseTest {
-
-
-    @Test
-    public void basicReadIteratorTest() {
-        GATKSAMIterator iter = ArtificialSAMUtils.mappedReadIterator(1, 100, 100);
-        int count = 0;
-        while (iter.hasNext()) {
-            SAMRecord rec = iter.next();
-            count++;
-        }
-        assertEquals(count, 100 * 100);
-    }
-
-    @Test
-    public void tenPerChromosome() {
-        GATKSAMIterator iter = ArtificialSAMUtils.mappedReadIterator(1, 100, 10);
-        int count = 0;
-        while (iter.hasNext()) {
-            SAMRecord rec = iter.next();
-
-            assertEquals(Integer.valueOf(Math.round(count / 10)), rec.getReferenceIndex());
-            count++;
-        }
-        assertEquals(count, 100 * 10);
-    }
-
-    @Test
-    public void onePerChromosome() {
-        GATKSAMIterator iter = ArtificialSAMUtils.mappedReadIterator(1, 100, 1);
-        int count = 0;
-        while (iter.hasNext()) {
-            SAMRecord rec = iter.next();
-
-            assertEquals(Integer.valueOf(count), rec.getReferenceIndex());
-            count++;
-        }
-        assertEquals(count, 100 * 1);
-    }
-
-    @Test
-    public void basicUnmappedIteratorTest() {
-        GATKSAMIterator iter = ArtificialSAMUtils.mappedAndUnmappedReadIterator(1, 100, 100, 1000);
-        int count = 0;
-        for (int x = 0; x < (100* 100); x++ ) {
-            if (!iter.hasNext()) {
-                fail ("we didn't get the expected number of reads");
-            }
-            SAMRecord rec = iter.next();
-            assertTrue(rec.getReferenceIndex() >= 0);
-            count++;
-        }
-        assertEquals(100 * 100, count);
-
-        // now we should have 1000 unmapped reads
-        count = 0;
-        while (iter.hasNext()) {
-            SAMRecord rec = iter.next();
-            assertTrue(rec.getReferenceIndex() < 0);
-            count++;
-        }
-        assertEquals(count, 1000);
-    }
-
-  
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialSingleSampleReadStreamUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialSingleSampleReadStreamUnitTest.java
deleted file mode 100644
index 271a75a..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialSingleSampleReadStreamUnitTest.java
+++ /dev/null
@@ -1,186 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMReadGroupRecord;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.testng.annotations.Test;
-import org.testng.annotations.DataProvider;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-
-public class ArtificialSingleSampleReadStreamUnitTest extends BaseTest {
-
-    private static class ArtificialSingleSampleReadStreamTest extends TestDataProvider {
-        private ArtificialSingleSampleReadStream stream;
-        private ArtificialSingleSampleReadStreamAnalyzer streamAnalyzer;
-
-        public ArtificialSingleSampleReadStreamTest( ArtificialSingleSampleReadStream stream ) {
-            super(ArtificialSingleSampleReadStreamTest.class);
-
-            this.stream = stream;
-
-            setName(String.format("%s: numContigs=%d stacksPerContig=%d readsPerStack=%d-%d distanceBetweenStacks=%d-%d readLength=%d-%d unmappedReads=%d",
-                    getClass().getSimpleName(),
-                    stream.getNumContigs(),
-                    stream.getNumStacksPerContig(),
-                    stream.getMinReadsPerStack(),
-                    stream.getMaxReadsPerStack(),
-                    stream.getMinDistanceBetweenStacks(),
-                    stream.getMaxDistanceBetweenStacks(),
-                    stream.getMinReadLength(),
-                    stream.getMaxReadLength(),
-                    stream.getNumUnmappedReads()));
-        }
-
-        public void run() {
-            streamAnalyzer= new ArtificialSingleSampleReadStreamAnalyzer(stream);
-
-            streamAnalyzer.analyze(stream);
-
-            // Check whether the observed properties of the stream match its nominal properties
-            streamAnalyzer.validate();
-        }
-    }
-
-    @DataProvider(name = "ArtificialSingleSampleReadStreamTestDataProvider")
-    public Object[][] createArtificialSingleSampleReadStreamTests() {
-        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(3, 1, 10000);
-        String readGroupID = "testReadGroup";
-        SAMReadGroupRecord readGroup = new SAMReadGroupRecord(readGroupID);
-        readGroup.setSample("testSample");
-        header.addReadGroup(readGroup);
-
-        GenomeAnalysisEngine.resetRandomGenerator();
-
-        // brute force testing!
-        for ( int numContigs = 0; numContigs <= 2; numContigs++ ) {
-            for ( int stacksPerContig = 0; stacksPerContig <= 2; stacksPerContig++ ) {
-                for ( int minReadsPerStack = 1; minReadsPerStack <= 2; minReadsPerStack++ ) {
-                    for ( int maxReadsPerStack = 1; maxReadsPerStack <= 3; maxReadsPerStack++ ) {
-                        for ( int minDistanceBetweenStacks = 1; minDistanceBetweenStacks <= 2; minDistanceBetweenStacks++ ) {
-                            for ( int maxDistanceBetweenStacks = 1; maxDistanceBetweenStacks <= 3; maxDistanceBetweenStacks++ ) {
-                                for ( int minReadLength = 1; minReadLength <= 2; minReadLength++ ) {
-                                    for ( int maxReadLength = 1; maxReadLength <= 3; maxReadLength++ ) {
-                                        for ( int numUnmappedReads = 0; numUnmappedReads <= 2; numUnmappedReads++ ) {
-                                            // Only test sane combinations here
-                                            if ( minReadsPerStack <= maxReadsPerStack &&
-                                                 minDistanceBetweenStacks <= maxDistanceBetweenStacks &&
-                                                 minReadLength <= maxReadLength &&
-                                                 ((numContigs > 0 && stacksPerContig > 0) || (numContigs == 0 && stacksPerContig == 0)) ) {
-
-                                                new ArtificialSingleSampleReadStreamTest(new ArtificialSingleSampleReadStream(header,
-                                                                                                                              readGroupID,
-                                                                                                                              numContigs,
-                                                                                                                              stacksPerContig,
-                                                                                                                              minReadsPerStack,
-                                                                                                                              maxReadsPerStack,
-                                                                                                                              minDistanceBetweenStacks,
-                                                                                                                              maxDistanceBetweenStacks,
-                                                                                                                              minReadLength,
-                                                                                                                              maxReadLength,
-                                                                                                                              numUnmappedReads));
-                                            }
-                                        }
-                                    }
-                                }
-                            }
-                        }
-                    }
-                }
-            }
-        }
-
-        return ArtificialSingleSampleReadStreamTest.getTests(ArtificialSingleSampleReadStreamTest.class);
-    }
-
-    @Test(dataProvider = "ArtificialSingleSampleReadStreamTestDataProvider")
-    public void testArtificialSingleSampleReadStream( ArtificialSingleSampleReadStreamTest test ) {
-        logger.warn("Running test: " + test);
-
-        GenomeAnalysisEngine.resetRandomGenerator();
-        test.run();
-    }
-
-    @DataProvider(name = "ArtificialSingleSampleReadStreamInvalidArgumentsTestDataProvider")
-    public Object[][] createInvalidArgumentsTests() {
-        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(3, 1, 10000);
-        String readGroupID = "testReadGroup";
-        header.addReadGroup(new SAMReadGroupRecord(readGroupID));
-
-        return new Object[][] {
-            {"testNullHeader", null, readGroupID, 1, 1, 1, 2, 1, 2, 1, 2, 0},
-            {"testNullReadGroup", header, null, 1, 1, 1, 2, 1, 2, 1, 2, 0},
-            {"testInvalidReadGroup", header, "foo", 1, 1, 1, 2, 1, 2, 1, 2, 0},
-            {"testInvalidNumContigs", header, readGroupID, -1, 1, 1, 2, 1, 2, 1, 2, 0},
-            {"testInvalidNumStacksPerContig", header, readGroupID, 1, -1, 1, 2, 1, 2, 1, 2, 0},
-            {"test0ContigsNon0StacksPerContig", header, readGroupID, 0, 1, 1, 2, 1, 2, 1, 2, 0},
-            {"testNon0Contigs0StacksPerContig", header, readGroupID, 1, 0, 1, 2, 1, 2, 1, 2, 0},
-            {"testInvalidMinReadsPerStack", header, readGroupID, 1, 1, -1, 2, 1, 2, 1, 2, 0},
-            {"testInvalidMaxReadsPerStack", header, readGroupID, 1, 1, 1, -2, 1, 2, 1, 2, 0},
-            {"testInvalidMinDistanceBetweenStacks", header, readGroupID, 1, 1, 1, 2, -1, 2, 1, 2, 0},
-            {"testInvalidMaxDistanceBetweenStacks", header, readGroupID, 1, 1, 1, 2, 1, -2, 1, 2, 0},
-            {"testInvalidMinReadLength", header, readGroupID, 1, 1, 1, 2, 1, 2, -1, 2, 0},
-            {"testInvalidMaxReadLength", header, readGroupID, 1, 1, 1, 2, 1, 2, 1, -2, 0},
-            {"testInvalidReadsPerStackRange", header, readGroupID, 1, 1, 2, 1, 1, 2, 1, 2, 0},
-            {"testInvalidDistanceBetweenStacksRange", header, readGroupID, 1, 1, 1, 2, 2, 1, 1, 2, 0},
-            {"testInvalidReadLengthRange", header, readGroupID, 1, 1, 1, 2, 1, 2, 2, 1, 0},
-            {"testInvalidNumUnmappedReads", header, readGroupID, 1, 1, 1, 2, 1, 2, 1, 2, -1},
-        };
-    }
-
-    @Test(dataProvider = "ArtificialSingleSampleReadStreamInvalidArgumentsTestDataProvider",
-          expectedExceptions = ReviewedGATKException.class)
-    public void testInvalidArguments( String testName,
-                                      SAMFileHeader header,
-                                      String readGroupID,
-                                      int numContigs,
-                                      int numStacksPerContig,
-                                      int minReadsPerStack,
-                                      int maxReadsPerStack,
-                                      int minDistanceBetweenStacks,
-                                      int maxDistanceBetweenStacks,
-                                      int minReadLength,
-                                      int maxReadLength,
-                                      int numUnmappedReads ) {
-
-        logger.warn("Running test: " + testName);
-
-        ArtificialSingleSampleReadStream stream = new ArtificialSingleSampleReadStream(header,
-                                                                                       readGroupID,
-                                                                                       numContigs,
-                                                                                       numStacksPerContig,
-                                                                                       minReadsPerStack,
-                                                                                       maxReadsPerStack,
-                                                                                       minDistanceBetweenStacks,
-                                                                                       maxDistanceBetweenStacks,
-                                                                                       minReadLength,
-                                                                                       maxReadLength,
-                                                                                       numUnmappedReads);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/sam/GATKSAMRecordUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/sam/GATKSAMRecordUnitTest.java
deleted file mode 100644
index e703c52..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/sam/GATKSAMRecordUnitTest.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import htsjdk.samtools.SAMFileHeader;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-
-public class GATKSAMRecordUnitTest extends BaseTest {
-    GATKSAMRecord read;
-    final static String BASES = "ACTG";
-    final static String QUALS = "!+5?";
-
-    @BeforeClass
-    public void init() {
-        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000);
-        read = ArtificialSAMUtils.createArtificialRead(header, "read1", 0, 1, BASES.length());
-        read.setReadUnmappedFlag(true);
-        read.setReadBases(new String(BASES).getBytes());
-        read.setBaseQualityString(new String(QUALS));
-    }
-
-    @Test
-    public void testStrandlessReads() {
-        final byte [] bases = {'A', 'A', 'A', 'A', 'A', 'A', 'A', 'A'};
-        final byte [] quals = {20 , 20 , 20 , 20 , 20 , 20 , 20 , 20 };
-        GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(bases, quals, "6M");
-        Assert.assertEquals(read.isStrandless(), false);
-
-        read.setReadNegativeStrandFlag(false);
-        Assert.assertEquals(read.isStrandless(), false);
-        Assert.assertEquals(read.getReadNegativeStrandFlag(), false);
-
-        read.setReadNegativeStrandFlag(true);
-        Assert.assertEquals(read.isStrandless(), false);
-        Assert.assertEquals(read.getReadNegativeStrandFlag(), true);
-
-        read.setReadNegativeStrandFlag(true);
-        read.setIsStrandless(true);
-        Assert.assertEquals(read.isStrandless(), true);
-        Assert.assertEquals(read.getReadNegativeStrandFlag(), false, "negative strand flag should return false even through its set for a strandless read");
-    }
-
-    @Test(expectedExceptions = IllegalStateException.class)
-    public void testStrandlessReadsFailSetStrand() {
-        final byte [] bases = {'A', 'A', 'A', 'A', 'A', 'A', 'A', 'A'};
-        final byte [] quals = {20 , 20 , 20 , 20 , 20 , 20 , 20 , 20 };
-        GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(bases, quals, "6M");
-        read.setIsStrandless(true);
-        read.setReadNegativeStrandFlag(true);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/sam/MisencodedBaseQualityUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/sam/MisencodedBaseQualityUnitTest.java
deleted file mode 100644
index 207e01a..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/sam/MisencodedBaseQualityUnitTest.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-
-import htsjdk.samtools.SAMFileHeader;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.exceptions.UserException;
-import org.testng.Assert;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Test;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-/**
- * Basic unit test for misencoded quals
- */
-public class MisencodedBaseQualityUnitTest extends BaseTest {
-
-    private static final String readBases = "AAAAAAAAAA";
-    private static final byte[] badQuals = { 59, 60, 62, 63, 64, 61, 62, 58, 57, 56 };
-    private static final byte[] goodQuals = { 60, 60, 60, 60, 60, 60, 60, 60, 60, 60 };
-    private static final byte[] fixedQuals = { 28, 29, 31, 32, 33, 30, 31, 27, 26, 25 };
-    private SAMFileHeader header;
-
-    @BeforeMethod
-    public void before() {
-        // reset the read counter so that we are deterministic
-        MisencodedBaseQualityReadTransformer.currentReadCounter = 0;
-        header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000);
-    }
-
-    private GATKSAMRecord createRead(final boolean useGoodBases) {
-        GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "foo", 0, 10, readBases.getBytes(),
-                                                                     useGoodBases ? Arrays.copyOf(goodQuals, goodQuals.length) :
-                                                                                    Arrays.copyOf(badQuals, badQuals.length));
-        read.setCigarString("10M");
-        return read;
-    }
-
-    @Test(enabled = true)
-    public void testGoodQuals() {
-        final List<GATKSAMRecord> reads = new ArrayList<GATKSAMRecord>(10000);
-        for ( int i = 0; i < 10000; i++ )
-            reads.add(createRead(true));
-
-        testEncoding(reads);
-    }
-
-    @Test(enabled = true, expectedExceptions = {UserException.class})
-    public void testBadQualsThrowsError() {
-        final List<GATKSAMRecord> reads = new ArrayList<GATKSAMRecord>(10000);
-        for ( int i = 0; i < 10000; i++ )
-            reads.add(createRead(false));
-
-        testEncoding(reads);
-    }
-
-    @Test(enabled = true)
-    public void testFixBadQuals() {
-        final GATKSAMRecord read = createRead(false);
-        final GATKSAMRecord fixedRead = MisencodedBaseQualityReadTransformer.fixMisencodedQuals(read);
-        for ( int i = 0; i < fixedQuals.length; i++ )
-            Assert.assertEquals(fixedQuals[i], fixedRead.getBaseQualities()[i]);
-    }
-
-    private void testEncoding(final List<GATKSAMRecord> reads) {
-        for ( final GATKSAMRecord read : reads )
-            MisencodedBaseQualityReadTransformer.checkForMisencodedQuals(read);
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/sam/ReadUtilsUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/sam/ReadUtilsUnitTest.java
deleted file mode 100644
index c7ceea1..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/sam/ReadUtilsUnitTest.java
+++ /dev/null
@@ -1,340 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.sam;
-
-import htsjdk.samtools.reference.IndexedFastaSequenceFile;
-import htsjdk.samtools.SAMFileHeader;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.utils.BaseUtils;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.util.*;
-
-
-public class ReadUtilsUnitTest extends BaseTest {
-    private interface GetAdaptorFunc {
-        public int getAdaptor(final GATKSAMRecord record);
-    }
-
-    @DataProvider(name = "AdaptorGetter")
-    public Object[][] makeActiveRegionCutTests() {
-        final List<Object[]> tests = new LinkedList<Object[]>();
-
-        tests.add( new Object[]{ new GetAdaptorFunc() {
-            @Override public int getAdaptor(final GATKSAMRecord record) { return ReadUtils.getAdaptorBoundary(record); }
-        }});
-
-        tests.add( new Object[]{ new GetAdaptorFunc() {
-            @Override public int getAdaptor(final GATKSAMRecord record) { return record.getAdaptorBoundary(); }
-        }});
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    private GATKSAMRecord makeRead(final int fragmentSize, final int mateStart) {
-        final byte[] bases = {'A', 'C', 'G', 'T', 'A', 'C', 'G', 'T'};
-        final byte[] quals = {30, 30, 30, 30, 30, 30, 30, 30};
-        final String cigar = "8M";
-        GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(bases, quals, cigar);
-        read.setProperPairFlag(true);
-        read.setReadPairedFlag(true);
-        read.setMateAlignmentStart(mateStart);
-        read.setInferredInsertSize(fragmentSize);
-        return read;
-    }
-
-    @Test(dataProvider = "AdaptorGetter")
-    public void testGetAdaptorBoundary(final GetAdaptorFunc get) {
-        final int fragmentSize = 10;
-        final int mateStart = 1000;
-        final int BEFORE = mateStart - 2;
-        final int AFTER = mateStart + 2;
-        int myStart, boundary;
-        GATKSAMRecord read;
-
-        // Test case 1: positive strand, first read
-        read = makeRead(fragmentSize, mateStart);
-        myStart = BEFORE;
-        read.setAlignmentStart(myStart);
-        read.setReadNegativeStrandFlag(false);
-        read.setMateNegativeStrandFlag(true);
-        boundary = get.getAdaptor(read);
-        Assert.assertEquals(boundary, myStart + fragmentSize + 1);
-
-        // Test case 2: positive strand, second read
-        read = makeRead(fragmentSize, mateStart);
-        myStart = AFTER;
-        read.setAlignmentStart(myStart);
-        read.setReadNegativeStrandFlag(false);
-        read.setMateNegativeStrandFlag(true);
-        boundary = get.getAdaptor(read);
-        Assert.assertEquals(boundary, myStart + fragmentSize + 1);
-
-        // Test case 3: negative strand, second read
-        read = makeRead(fragmentSize, mateStart);
-        myStart = AFTER;
-        read.setAlignmentStart(myStart);
-        read.setReadNegativeStrandFlag(true);
-        read.setMateNegativeStrandFlag(false);
-        boundary = get.getAdaptor(read);
-        Assert.assertEquals(boundary, mateStart - 1);
-
-        // Test case 4: negative strand, first read
-        read = makeRead(fragmentSize, mateStart);
-        myStart = BEFORE;
-        read.setAlignmentStart(myStart);
-        read.setReadNegativeStrandFlag(true);
-        read.setMateNegativeStrandFlag(false);
-        boundary = get.getAdaptor(read);
-        Assert.assertEquals(boundary, mateStart - 1);
-
-        // Test case 5: mate is mapped to another chromosome (test both strands)
-        read = makeRead(fragmentSize, mateStart);
-        read.setInferredInsertSize(0);
-        read.setReadNegativeStrandFlag(true);
-        read.setMateNegativeStrandFlag(false);
-        boundary = get.getAdaptor(read);
-        Assert.assertEquals(boundary, ReadUtils.CANNOT_COMPUTE_ADAPTOR_BOUNDARY);
-        read.setReadNegativeStrandFlag(false);
-        read.setMateNegativeStrandFlag(true);
-        boundary = get.getAdaptor(read);
-        Assert.assertEquals(boundary, ReadUtils.CANNOT_COMPUTE_ADAPTOR_BOUNDARY);
-        read.setInferredInsertSize(10);
-
-        // Test case 6: read is unmapped
-        read = makeRead(fragmentSize, mateStart);
-        read.setReadUnmappedFlag(true);
-        boundary = get.getAdaptor(read);
-        Assert.assertEquals(boundary, ReadUtils.CANNOT_COMPUTE_ADAPTOR_BOUNDARY);
-        read.setReadUnmappedFlag(false);
-
-        // Test case 7:  reads don't overlap and look like this:
-        //    <--------|
-        //                 |------>
-        // first read:
-        read = makeRead(fragmentSize, mateStart);
-        myStart = 980;
-        read.setAlignmentStart(myStart);
-        read.setInferredInsertSize(20);
-        read.setReadNegativeStrandFlag(true);
-        boundary = get.getAdaptor(read);
-        Assert.assertEquals(boundary, ReadUtils.CANNOT_COMPUTE_ADAPTOR_BOUNDARY);
-
-        // second read:
-        read = makeRead(fragmentSize, mateStart);
-        myStart = 1000;
-        read.setAlignmentStart(myStart);
-        read.setInferredInsertSize(20);
-        read.setMateAlignmentStart(980);
-        read.setReadNegativeStrandFlag(false);
-        boundary = get.getAdaptor(read);
-        Assert.assertEquals(boundary, ReadUtils.CANNOT_COMPUTE_ADAPTOR_BOUNDARY);
-
-        // Test case 8: read doesn't have proper pair flag set
-        read = makeRead(fragmentSize, mateStart);
-        read.setReadPairedFlag(true);
-        read.setProperPairFlag(false);
-        Assert.assertEquals(get.getAdaptor(read), ReadUtils.CANNOT_COMPUTE_ADAPTOR_BOUNDARY);
-
-        // Test case 9: read and mate have same negative flag setting
-        for ( final boolean negFlag: Arrays.asList(true, false) ) {
-            read = makeRead(fragmentSize, mateStart);
-            read.setAlignmentStart(BEFORE);
-            read.setReadPairedFlag(true);
-            read.setProperPairFlag(true);
-            read.setReadNegativeStrandFlag(negFlag);
-            read.setMateNegativeStrandFlag(!negFlag);
-            Assert.assertTrue(get.getAdaptor(read) != ReadUtils.CANNOT_COMPUTE_ADAPTOR_BOUNDARY, "Get adaptor should have succeeded");
-
-            read = makeRead(fragmentSize, mateStart);
-            read.setAlignmentStart(BEFORE);
-            read.setReadPairedFlag(true);
-            read.setProperPairFlag(true);
-            read.setReadNegativeStrandFlag(negFlag);
-            read.setMateNegativeStrandFlag(negFlag);
-            Assert.assertEquals(get.getAdaptor(read), ReadUtils.CANNOT_COMPUTE_ADAPTOR_BOUNDARY, "Get adaptor should have failed for reads with bad alignment orientation");
-        }
-    }
-
-    @Test (enabled = true)
-    public void testGetBasesReverseComplement() {
-        int iterations = 1000;
-        Random random = GenomeAnalysisEngine.getRandomGenerator();
-        while(iterations-- > 0) {
-            final int l = random.nextInt(1000);
-            GATKSAMRecord read = GATKSAMRecord.createRandomRead(l);
-            byte [] original = read.getReadBases();
-            byte [] reconverted = new byte[l];
-            String revComp = ReadUtils.getBasesReverseComplement(read);
-            for (int i=0; i<l; i++) {
-                reconverted[l-1-i] = BaseUtils.getComplement((byte) revComp.charAt(i));
-            }
-            Assert.assertEquals(reconverted, original);
-        }
-    }
-
-    @Test (enabled = true)
-    public void testGetMaxReadLength() {
-        for( final int minLength : Arrays.asList( 5, 30, 50 ) ) {
-            for( final int maxLength : Arrays.asList( 50, 75, 100 ) ) {
-                final List<GATKSAMRecord> reads = new ArrayList<GATKSAMRecord>();
-                for( int readLength = minLength; readLength <= maxLength; readLength++ ) {
-                    reads.add( ReadUtils.createRandomRead( readLength ) );
-                }
-                Assert.assertEquals(ReadUtils.getMaxReadLength(reads), maxLength, "max length does not match");
-            }
-        }
-
-        final List<GATKSAMRecord> reads = new LinkedList<GATKSAMRecord>();
-        Assert.assertEquals(ReadUtils.getMaxReadLength(reads), 0, "Empty list should have max length of zero");
-    }
-
-    @Test (enabled = true)
-    public void testReadWithNsRefIndexInDeletion() throws FileNotFoundException {
-
-        final IndexedFastaSequenceFile seq = new CachingIndexedFastaSequenceFile(new File(b37KGReference));
-        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(seq.getSequenceDictionary());
-        final int readLength = 76;
-
-        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, 8975, readLength);
-        read.setReadBases(Utils.dupBytes((byte) 'A', readLength));
-        read.setBaseQualities(Utils.dupBytes((byte)30, readLength));
-        read.setCigarString("3M414N1D73M");
-
-        final int result = ReadUtils.getReadCoordinateForReferenceCoordinateUpToEndOfRead(read, 9392, ReadUtils.ClippingTail.LEFT_TAIL);
-        Assert.assertEquals(result, 2);
-    }
-
-    @Test (enabled = true)
-    public void testReadWithNsRefAfterDeletion() throws FileNotFoundException {
-
-        final IndexedFastaSequenceFile seq = new CachingIndexedFastaSequenceFile(new File(b37KGReference));
-        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(seq.getSequenceDictionary());
-        final int readLength = 76;
-
-        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, 8975, readLength);
-        read.setReadBases(Utils.dupBytes((byte) 'A', readLength));
-        read.setBaseQualities(Utils.dupBytes((byte)30, readLength));
-        read.setCigarString("3M414N1D73M");
-
-        final int result = ReadUtils.getReadCoordinateForReferenceCoordinateUpToEndOfRead(read, 9393, ReadUtils.ClippingTail.LEFT_TAIL);
-        Assert.assertEquals(result, 3);
-    }
-
-    @DataProvider(name = "HasWellDefinedFragmentSizeData")
-    public Object[][] makeHasWellDefinedFragmentSizeData() throws Exception {
-        final List<Object[]> tests = new LinkedList<Object[]>();
-
-        // setup a basic read that will work
-        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader();
-        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read1", 0, 10, 10);
-        read.setReadPairedFlag(true);
-        read.setProperPairFlag(true);
-        read.setReadUnmappedFlag(false);
-        read.setMateUnmappedFlag(false);
-        read.setAlignmentStart(100);
-        read.setCigarString("50M");
-        read.setMateAlignmentStart(130);
-        read.setInferredInsertSize(80);
-        read.setFirstOfPairFlag(true);
-        read.setReadNegativeStrandFlag(false);
-        read.setMateNegativeStrandFlag(true);
-
-        tests.add( new Object[]{ "basic case", read.clone(), true });
-
-        {
-            final GATKSAMRecord bad1 = (GATKSAMRecord)read.clone();
-            bad1.setReadPairedFlag(false);
-            tests.add( new Object[]{ "not paired", bad1, false });
-        }
-
-        {
-            final GATKSAMRecord bad = (GATKSAMRecord)read.clone();
-            bad.setProperPairFlag(false);
-            // we currently don't require the proper pair flag to be set
-            tests.add( new Object[]{ "not proper pair", bad, true });
-//            tests.add( new Object[]{ "not proper pair", bad, false });
-        }
-
-        {
-            final GATKSAMRecord bad = (GATKSAMRecord)read.clone();
-            bad.setReadUnmappedFlag(true);
-            tests.add( new Object[]{ "read is unmapped", bad, false });
-        }
-
-        {
-            final GATKSAMRecord bad = (GATKSAMRecord)read.clone();
-            bad.setMateUnmappedFlag(true);
-            tests.add( new Object[]{ "mate is unmapped", bad, false });
-        }
-
-        {
-            final GATKSAMRecord bad = (GATKSAMRecord)read.clone();
-            bad.setMateNegativeStrandFlag(false);
-            tests.add( new Object[]{ "read and mate both on positive strand", bad, false });
-        }
-
-        {
-            final GATKSAMRecord bad = (GATKSAMRecord)read.clone();
-            bad.setReadNegativeStrandFlag(true);
-            tests.add( new Object[]{ "read and mate both on negative strand", bad, false });
-        }
-
-        {
-            final GATKSAMRecord bad = (GATKSAMRecord)read.clone();
-            bad.setInferredInsertSize(0);
-            tests.add( new Object[]{ "insert size is 0", bad, false });
-        }
-
-        {
-            final GATKSAMRecord bad = (GATKSAMRecord)read.clone();
-            bad.setAlignmentStart(1000);
-            tests.add( new Object[]{ "positve read starts after mate end", bad, false });
-        }
-
-        {
-            final GATKSAMRecord bad = (GATKSAMRecord)read.clone();
-            bad.setReadNegativeStrandFlag(true);
-            bad.setMateNegativeStrandFlag(false);
-            bad.setMateAlignmentStart(1000);
-            tests.add( new Object[]{ "negative strand read ends before mate starts", bad, false });
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "HasWellDefinedFragmentSizeData")
-    private void testHasWellDefinedFragmentSize(final String name, final GATKSAMRecord read, final boolean expected) {
-        Assert.assertEquals(ReadUtils.hasWellDefinedFragmentSize(read), expected);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/smithwaterman/SmithWatermanBenchmark.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/smithwaterman/SmithWatermanBenchmark.java
deleted file mode 100644
index 44ba64c..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/smithwaterman/SmithWatermanBenchmark.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.smithwaterman;
-
-import com.google.caliper.Param;
-import com.google.caliper.SimpleBenchmark;
-import org.broadinstitute.gatk.utils.Utils;
-
-/**
- * Caliper microbenchmark of parsing a VCF file
- */
-public class SmithWatermanBenchmark extends SimpleBenchmark {
-
-    @Param({"Original"})
-    String version; // set automatically by framework
-
-    @Param({"10", "50", "100", "500"})
-    int sizeOfMiddleRegion; // set automatically by framework
-
-    @Param({"10", "50", "100", "500"})
-    int sizeOfEndRegions; // set automatically by framework
-
-    String refString;
-    String hapString;
-
-    @Override protected void setUp() {
-        final StringBuilder ref = new StringBuilder();
-        final StringBuilder hap = new StringBuilder();
-
-        ref.append(Utils.dupString('A', sizeOfEndRegions));
-        hap.append(Utils.dupString('A', sizeOfEndRegions));
-
-        // introduce a SNP
-        ref.append("X");
-        hap.append("Y");
-
-        ref.append(Utils.dupString('A', sizeOfMiddleRegion));
-        hap.append(Utils.dupString('A', sizeOfMiddleRegion));
-
-        // introduce a SNP
-        ref.append("X");
-        hap.append("Y");
-
-        ref.append(Utils.dupString('A', sizeOfEndRegions));
-        hap.append(Utils.dupString('A', sizeOfEndRegions));
-
-        refString = ref.toString();
-        hapString = hap.toString();
-    }
-
-    public void timeSW(int rep) {
-        for ( int i = 0; i < rep; i++ ) {
-            final SmithWaterman sw;
-            if ( version.equals("Greedy") )
-                throw new IllegalArgumentException("Unsupported implementation");
-            sw = new SWPairwiseAlignment(refString.getBytes(), hapString.getBytes());
-            sw.getCigar();
-        }
-    }
-
-    public static void main(String[] args) {
-        com.google.caliper.Runner.main(SmithWatermanBenchmark.class, args);
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/text/ListFileUtilsUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/text/ListFileUtilsUnitTest.java
deleted file mode 100644
index 086cefe..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/text/ListFileUtilsUnitTest.java
+++ /dev/null
@@ -1,159 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.text;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.commandline.ParsingEngine;
-import org.broadinstitute.gatk.utils.commandline.Tags;
-import org.broadinstitute.gatk.engine.CommandLineGATK;
-import org.broadinstitute.gatk.engine.datasources.reads.SAMReaderID;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.PrintWriter;
-import java.util.*;
-
-/**
- * Tests selected functionality in the CommandLineExecutable class
- */
-public class ListFileUtilsUnitTest extends BaseTest {
-
-    @Test
-    public void testIgnoreBlankLinesInBAMListFiles() throws Exception {
-        File tempListFile = createTempListFile("testIgnoreBlankLines",
-                                               "",
-                                               publicTestDir + "exampleBAM.bam",
-                                               "         "
-                                              );
-
-        List<SAMReaderID> expectedBAMFileListAfterUnpacking = new ArrayList<SAMReaderID>();
-        expectedBAMFileListAfterUnpacking.add(new SAMReaderID(new File(publicTestDir + "exampleBAM.bam"), new Tags()));
-
-        performBAMListFileUnpackingTest(tempListFile, expectedBAMFileListAfterUnpacking);
-    }
-
-    @Test
-    public void testCommentSupportInBAMListFiles() throws Exception {
-        File tempListFile = createTempListFile("testCommentSupport",
-                                               "#",
-                                               publicTestDir + "exampleBAM.bam",
-                                               "#" + publicTestDir + "foo.bam",
-                                               "      # " + publicTestDir + "bar.bam"
-                                              );
-
-        List<SAMReaderID> expectedBAMFileListAfterUnpacking = new ArrayList<SAMReaderID>();
-        expectedBAMFileListAfterUnpacking.add(new SAMReaderID(new File(publicTestDir + "exampleBAM.bam"), new Tags()));
-
-        performBAMListFileUnpackingTest(tempListFile, expectedBAMFileListAfterUnpacking);
-    }
-
-    @Test
-    public void testUnpackSet() throws Exception {
-        Set<String> expected = new HashSet<String>(Arrays.asList(publicTestDir + "exampleBAM.bam"));
-        Set<String> actual;
-
-        actual = ListFileUtils.unpackSet(Arrays.asList(publicTestDir + "exampleBAM.bam"));
-        Assert.assertEquals(actual, expected);
-
-        File tempListFile = createTempListFile("testUnpackSet",
-                "#",
-                publicTestDir + "exampleBAM.bam",
-                "#" + publicTestDir + "foo.bam",
-                "      # " + publicTestDir + "bar.bam"
-        );
-        actual = ListFileUtils.unpackSet(Arrays.asList(tempListFile.getAbsolutePath()));
-        Assert.assertEquals(actual, expected);
-    }
-
-    @DataProvider(name="includeMatchingTests")
-    public Object[][] getIncludeMatchingTests() {
-        return new Object[][] {
-                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("a"), true, asSet("a") },
-                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("a"), false, asSet("a", "ab", "abc") },
-                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("b"), true, Collections.EMPTY_SET },
-                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("b"), false, asSet("ab", "abc") },
-                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("a", "b"), true, asSet("a") },
-                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("a", "b"), false, asSet("a", "ab", "abc") },
-                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("a", "ab"), true, asSet("a", "ab") },
-                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("a", "ab"), false, asSet("a", "ab", "abc") },
-                new Object[] { asSet("a", "ab", "abc"), Arrays.asList(".*b.*"), true, Collections.EMPTY_SET },
-                new Object[] { asSet("a", "ab", "abc"), Arrays.asList(".*b.*"), false, asSet("ab", "abc") },
-                new Object[] { asSet("a", "ab", "abc"), Arrays.asList(".*"), true, Collections.EMPTY_SET },
-                new Object[] { asSet("a", "ab", "abc"), Arrays.asList(".*"), false, asSet("a", "ab", "abc") }
-        };
-    }
-
-    @Test(dataProvider = "includeMatchingTests")
-    public void testIncludeMatching(Set<String> values, Collection<String> filters, boolean exactMatch, Set<String> expected) {
-        Set<String> actual = ListFileUtils.includeMatching(values, ListFileUtils.IDENTITY_STRING_CONVERTER, filters, exactMatch);
-        Assert.assertEquals(actual, expected);
-    }
-
-    @DataProvider(name="excludeMatchingTests")
-    public Object[][] getExcludeMatchingTests() {
-        return new Object[][] {
-                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("a"), true, asSet("ab", "abc") },
-                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("a"), false, Collections.EMPTY_SET },
-                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("b"), true, asSet("a", "ab", "abc") },
-                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("b"), false, asSet("a") },
-                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("a", "b"), true, asSet("ab", "abc") },
-                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("a", "b"), false, Collections.EMPTY_SET },
-                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("a", "ab"), true, asSet("abc") },
-                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("a", "ab"), false, Collections.EMPTY_SET },
-                new Object[] { asSet("a", "ab", "abc"), Arrays.asList(".*b.*"), true, asSet("a", "ab", "abc") },
-                new Object[] { asSet("a", "ab", "abc"), Arrays.asList(".*b.*"), false, asSet("a") },
-                new Object[] { asSet("a", "ab", "abc"), Arrays.asList(".*"), true, asSet("a", "ab", "abc") },
-                new Object[] { asSet("a", "ab", "abc"), Arrays.asList(".*"), false, Collections.EMPTY_SET }
-        };
-    }
-
-    @Test(dataProvider = "excludeMatchingTests")
-    public void testExcludeMatching(Set<String> values, Collection<String> filters, boolean exactMatch, Set<String> expected) {
-        Set<String> actual = ListFileUtils.excludeMatching(values, ListFileUtils.IDENTITY_STRING_CONVERTER, filters, exactMatch);
-        Assert.assertEquals(actual, expected);
-    }
-
-    private static <T> Set<T> asSet(T... args){
-        return new HashSet<T>(Arrays.asList(args));
-    }
-
-    private void performBAMListFileUnpackingTest( File tempListFile, List<SAMReaderID> expectedUnpackedFileList ) throws Exception {
-        List<String> bamFiles = new ArrayList<String>();
-        bamFiles.add(tempListFile.getAbsolutePath());
-
-        CommandLineGATK testInstance = new CommandLineGATK();
-        testInstance.setParser(new ParsingEngine(testInstance));
-
-        List<SAMReaderID> unpackedBAMFileList = ListFileUtils.unpackBAMFileList(bamFiles,new ParsingEngine(testInstance));
-
-        Assert.assertEquals(unpackedBAMFileList.size(), expectedUnpackedFileList.size(),
-                            "Unpacked BAM file list contains extraneous lines");
-        Assert.assertEquals(unpackedBAMFileList, expectedUnpackedFileList,
-                            "Unpacked BAM file list does not contain correct BAM file names");
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/text/TextFormattingUtilsUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/text/TextFormattingUtilsUnitTest.java
deleted file mode 100644
index 5457310..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/text/TextFormattingUtilsUnitTest.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.text;
-
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.util.Arrays;
-import java.util.Collections;
-
-public class TextFormattingUtilsUnitTest extends BaseTest {
-    @Test(expectedExceptions = ReviewedGATKException.class)
-    public void testSplitWhiteSpaceNullLine() {
-        TextFormattingUtils.splitWhiteSpace(null);
-    }
-
-    @Test
-    public void testSplitWhiteSpace() {
-        Assert.assertEquals(TextFormattingUtils.splitWhiteSpace("foo bar baz"), new String[] { "foo", "bar", "baz" });
-        Assert.assertEquals(TextFormattingUtils.splitWhiteSpace("foo  bar  baz"), new String[] { "foo", "bar", "baz" });
-        Assert.assertEquals(TextFormattingUtils.splitWhiteSpace(" foo bar baz"), new String[] { "foo", "bar", "baz" });
-        Assert.assertEquals(TextFormattingUtils.splitWhiteSpace(" foo bar baz "), new String[] { "foo", "bar", "baz" });
-        Assert.assertEquals(TextFormattingUtils.splitWhiteSpace("foo bar baz "), new String[] { "foo", "bar", "baz" });
-        Assert.assertEquals(TextFormattingUtils.splitWhiteSpace("\tfoo\tbar\tbaz\t"), new String[]{"foo", "bar", "baz"});
-    }
-
-    @Test(expectedExceptions = ReviewedGATKException.class)
-    public void testGetWordStartsNullLine() {
-        TextFormattingUtils.getWordStarts(null);
-    }
-
-    @Test
-    public void testGetWordStarts() {
-        Assert.assertEquals(TextFormattingUtils.getWordStarts("foo bar baz"), Arrays.asList(4, 8));
-        Assert.assertEquals(TextFormattingUtils.getWordStarts("foo  bar  baz"), Arrays.asList(5, 10));
-        Assert.assertEquals(TextFormattingUtils.getWordStarts(" foo bar baz"), Arrays.asList(1, 5, 9));
-        Assert.assertEquals(TextFormattingUtils.getWordStarts(" foo bar baz "), Arrays.asList(1, 5, 9));
-        Assert.assertEquals(TextFormattingUtils.getWordStarts("foo bar baz "), Arrays.asList(4, 8));
-        Assert.assertEquals(TextFormattingUtils.getWordStarts("\tfoo\tbar\tbaz\t"), Arrays.asList(1, 5, 9));
-    }
-
-    @Test(expectedExceptions = ReviewedGATKException.class)
-    public void testSplitFixedWidthNullLine() {
-        TextFormattingUtils.splitFixedWidth(null, Collections.<Integer>emptyList());
-    }
-
-    @Test(expectedExceptions = ReviewedGATKException.class)
-    public void testSplitFixedWidthNullColumnStarts() {
-        TextFormattingUtils.splitFixedWidth("foo bar baz", null);
-    }
-
-    @Test
-    public void testSplitFixedWidth() {
-        Assert.assertEquals(TextFormattingUtils.splitFixedWidth("foo bar baz", Arrays.asList(4, 8)), new String[] { "foo", "bar", "baz" });
-        Assert.assertEquals(TextFormattingUtils.splitFixedWidth("foo  bar  baz", Arrays.asList(5, 10)), new String[] { "foo", "bar", "baz" });
-        Assert.assertEquals(TextFormattingUtils.splitFixedWidth(" foo bar baz", Arrays.asList(5, 9)), new String[] { "foo", "bar", "baz" });
-        Assert.assertEquals(TextFormattingUtils.splitFixedWidth(" foo bar baz ", Arrays.asList(5, 9)), new String[] { "foo", "bar", "baz" });
-        Assert.assertEquals(TextFormattingUtils.splitFixedWidth("foo bar baz ", Arrays.asList(4, 8)), new String[] { "foo", "bar", "baz" });
-        Assert.assertEquals(TextFormattingUtils.splitFixedWidth("\tfoo\tbar\tbaz\t", Arrays.asList(5, 9)), new String[] { "foo", "bar", "baz" });
-        Assert.assertEquals(TextFormattingUtils.splitFixedWidth("f o b r b z", Arrays.asList(4, 8)), new String[] { "f o", "b r", "b z" });
-        Assert.assertEquals(TextFormattingUtils.splitFixedWidth(" f o b r b z", Arrays.asList(4, 8)), new String[] { "f o", "b r", "b z" });
-        Assert.assertEquals(TextFormattingUtils.splitFixedWidth("  f o b r b z", Arrays.asList(4, 8)), new String[] { "f", "o b", "r b z" });
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/threading/EfficiencyMonitoringThreadFactoryUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/threading/EfficiencyMonitoringThreadFactoryUnitTest.java
deleted file mode 100644
index 0c98813..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/threading/EfficiencyMonitoringThreadFactoryUnitTest.java
+++ /dev/null
@@ -1,189 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.threading;
-
-import org.apache.log4j.Priority;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.utils.Utils;
-import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.TimeUnit;
-
-/**
- * Tests for the state monitoring thread factory.
- */
-public class EfficiencyMonitoringThreadFactoryUnitTest extends BaseTest {
-    // the duration of the tests -- 100 ms is tolerable given the number of tests we are doing
-    private final static long THREAD_TARGET_DURATION_IN_MILLISECOND = 100000;
-    private final static int MAX_THREADS = 4;
-    final static Object GLOBAL_LOCK = new Object();
-
-    private class StateTest extends TestDataProvider {
-        private final double TOLERANCE = 0.1; // willing to tolerate a 10% error
-
-        final List<EfficiencyMonitoringThreadFactory.State> statesForThreads;
-
-        public StateTest(final List<EfficiencyMonitoringThreadFactory.State> statesForThreads) {
-            super(StateTest.class);
-            this.statesForThreads = statesForThreads;
-            setName("StateTest " + Utils.join(",", statesForThreads));
-        }
-
-        public List<EfficiencyMonitoringThreadFactory.State> getStatesForThreads() {
-            return statesForThreads;
-        }
-
-        public int getNStates() { return statesForThreads.size(); }
-
-        public double maxStatePercent(final EfficiencyMonitoringThreadFactory.State state) { return 100*(fraction(state) + TOLERANCE); }
-        public double minStatePercent(final EfficiencyMonitoringThreadFactory.State state) { return 100*(fraction(state) - TOLERANCE); }
-
-        private double fraction(final EfficiencyMonitoringThreadFactory.State state) {
-            return Collections.frequency(statesForThreads, state) / (1.0 * statesForThreads.size());
-        }
-    }
-
-    /**
-     * Test helper threading class that puts the thread into RUNNING, BLOCKED, or WAITING state as
-     * requested for input argument
-     */
-    private static class StateTestThread implements Callable<Double> {
-        private final EfficiencyMonitoringThreadFactory.State stateToImplement;
-
-        private StateTestThread(final EfficiencyMonitoringThreadFactory.State stateToImplement) {
-            this.stateToImplement = stateToImplement;
-        }
-
-        @Override
-        public Double call() throws Exception {
-            switch ( stateToImplement ) {
-                case USER_CPU:
-                    // do some work until we get to THREAD_TARGET_DURATION_IN_MILLISECOND
-                    double sum = 0.0;
-                    final long startTime = System.currentTimeMillis();
-                    for ( int i = 1; System.currentTimeMillis() - startTime < (THREAD_TARGET_DURATION_IN_MILLISECOND - 1); i++ ) {
-                        sum += Math.log10(i);
-                    }
-                    return sum;
-                case WAITING:
-                    Thread.currentThread().sleep(THREAD_TARGET_DURATION_IN_MILLISECOND);
-                    return 0.0;
-                case BLOCKING:
-                    if ( EfficiencyMonitoringThreadFactory.DEBUG ) logger.warn("Blocking...");
-                    synchronized (GLOBAL_LOCK) {
-                        // the GLOBAL_LOCK must be held by the unit test itself for this to properly block
-                        if ( EfficiencyMonitoringThreadFactory.DEBUG ) logger.warn("  ... done blocking");
-                    }
-                    return 0.0;
-                case WAITING_FOR_IO:
-                    // TODO -- implement me
-                    // shouldn't ever get here, throw an exception
-                    throw new ReviewedGATKException("WAITING_FOR_IO testing currently not implemented, until we figure out how to force a system call block");
-                default:
-                    throw new ReviewedGATKException("Unexpected thread test state " + stateToImplement);
-            }
-        }
-    }
-
-    @DataProvider(name = "StateTest")
-    public Object[][] createStateTest() {
-        for ( final int nThreads : Arrays.asList(3) ) {
-            //final List<EfficiencyMonitoringThreadFactory.State> allStates = Arrays.asList(EfficiencyMonitoringThreadFactory.State.WAITING_FOR_IO);
-            final List<EfficiencyMonitoringThreadFactory.State> allStates = Arrays.asList(EfficiencyMonitoringThreadFactory.State.USER_CPU, EfficiencyMonitoringThreadFactory.State.WAITING, EfficiencyMonitoringThreadFactory.State.BLOCKING);
-            //final List<EfficiencyMonitoringThreadFactory.State> allStates = Arrays.asList(EfficiencyMonitoringThreadFactory.State.values());
-            for (final List<EfficiencyMonitoringThreadFactory.State> states : Utils.makePermutations(allStates, nThreads, true) ) {
-                //if ( Collections.frequency(states, Thread.State.BLOCKED) > 0)
-                    new StateTest(states);
-            }
-        }
-
-        return StateTest.getTests(StateTest.class);
-    }
-
-    // NOTE this test takes an unreasonably long time to run, and so it's been disabled as these monitoring threads
-    // aren't a core GATK feature any longer.  Should be reabled if we come to care about this capability again
-    // in the future, or we can run these in parallel
-    @Test(enabled = false, dataProvider = "StateTest", timeOut = MAX_THREADS * THREAD_TARGET_DURATION_IN_MILLISECOND)
-    public void testStateTest(final StateTest test) throws InterruptedException {
-        // allows us to test blocking
-        final EfficiencyMonitoringThreadFactory factory = new EfficiencyMonitoringThreadFactory(test.getNStates());
-        final ExecutorService threadPool = Executors.newFixedThreadPool(test.getNStates(), factory);
-
-        logger.warn("Running " + test);
-        synchronized (GLOBAL_LOCK) {
-            //logger.warn("  Have lock");
-            for ( final EfficiencyMonitoringThreadFactory.State threadToRunState : test.getStatesForThreads() )
-            threadPool.submit(new StateTestThread(threadToRunState));
-
-            // lock has to be here for the whole running of the activeThreads but end before the sleep so the blocked activeThreads
-            // can block for their allotted time
-            threadPool.shutdown();
-            Thread.sleep(THREAD_TARGET_DURATION_IN_MILLISECOND);
-        }
-        //logger.warn("  Releasing lock");
-        threadPool.awaitTermination(10, TimeUnit.SECONDS);
-        //logger.warn("  done awaiting termination");
-        //logger.warn("  waiting for all activeThreads to complete");
-        factory.waitForAllThreadsToComplete();
-        //logger.warn("  done waiting for activeThreads");
-
-        // make sure we counted everything properly
-        final long totalTime = factory.getTotalTime();
-        final long minTime = (long)(THREAD_TARGET_DURATION_IN_MILLISECOND * 0.5) * test.getNStates();
-        final long maxTime = (long)(THREAD_TARGET_DURATION_IN_MILLISECOND * 1.5) * test.getNStates();
-        //logger.warn("Testing total time");
-        Assert.assertTrue(totalTime >= minTime, "Factory results not properly accumulated: totalTime = " + totalTime + " < minTime = " + minTime);
-        Assert.assertTrue(totalTime <= maxTime, "Factory results not properly accumulated: totalTime = " + totalTime + " > maxTime = " + maxTime);
-
-        for (final EfficiencyMonitoringThreadFactory.State state : EfficiencyMonitoringThreadFactory.State.values() ) {
-            final double min = test.minStatePercent(state);
-            final double max = test.maxStatePercent(state);
-            final double obs = factory.getStatePercent(state);
-//            logger.warn("  Checking " + state
-//                    + " min " + String.format("%.2f", min)
-//                    + " max " + String.format("%.2f", max)
-//                    + " obs " + String.format("%.2f", obs)
-//                    + " factor = " + factory);
-            Assert.assertTrue(obs >= min, "Too little time spent in state " + state + " obs " + obs + " min " + min);
-            Assert.assertTrue(obs <= max, "Too much time spent in state " + state + " obs " + obs + " max " + min);
-        }
-
-        // we actually ran the expected number of activeThreads
-        Assert.assertEquals(factory.getNThreadsCreated(), test.getNStates());
-
-        // should be called to ensure we don't format / NPE on output
-        factory.printUsageInformation(logger, Priority.WARN);
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/threading/ThreadPoolMonitorUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/threading/ThreadPoolMonitorUnitTest.java
deleted file mode 100644
index a73cb26..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/threading/ThreadPoolMonitorUnitTest.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.threading;
-
-import org.testng.annotations.Test;
-import org.broadinstitute.gatk.utils.BaseTest;
-
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors; 
-/**
- * User: hanna
- * Date: Apr 29, 2009
- * Time: 4:30:55 PM
- * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
- * Software and documentation are copyright 2005 by the Broad Institute.
- * All rights are reserved.
- *
- * Users acknowledge that this software is supplied without any warranty or support.
- * The Broad Institute is not responsible for its use, misuse, or
- * functionality.
- */
-
-/**
- * Tests for the thread pool monitor class.
- */
-
-public class ThreadPoolMonitorUnitTest extends BaseTest {
-    private ExecutorService threadPool = Executors.newFixedThreadPool(1);
-
-    /**
-     * Test to make sure the thread pool wait works properly. 
-     */
-    @Test(timeOut=2000)
-    public void testThreadPoolMonitor() {
-        ThreadPoolMonitor monitor = new ThreadPoolMonitor();
-        synchronized(monitor) {
-            threadPool.submit(monitor);
-            monitor.watch();
-        }
-    }
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/variant/GATKVCFUtilsUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/variant/GATKVCFUtilsUnitTest.java
deleted file mode 100644
index ab547b7..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/variant/GATKVCFUtilsUnitTest.java
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.variant;
-
-import htsjdk.tribble.index.DynamicIndexCreator;
-import htsjdk.tribble.index.IndexCreator;
-import htsjdk.tribble.index.interval.IntervalIndexCreator;
-import htsjdk.tribble.index.linear.LinearIndexCreator;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.engine.contexts.AlignmentContext;
-import org.broadinstitute.gatk.engine.contexts.ReferenceContext;
-import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker;
-import org.broadinstitute.gatk.engine.walkers.RodWalker;
-import org.broadinstitute.gatk.engine.walkers.Walker;
-import htsjdk.variant.vcf.VCFHeader;
-import htsjdk.variant.vcf.VCFHeaderLine;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.lang.reflect.Method;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Set;
-
-public class GATKVCFUtilsUnitTest extends BaseTest {
-    public static class VCFHeaderTestWalker extends RodWalker<Integer, Integer> {
-        public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { return null; }
-        public Integer reduceInit() { return 0; }
-        public Integer reduce(Integer value, Integer sum) { return value + sum; }
-    }
-
-    public static class VCFHeaderTest2Walker extends VCFHeaderTestWalker {}
-
-    @Test
-    public void testAddingVCFHeaderInfo() {
-        final VCFHeader header = new VCFHeader();
-
-        final Walker walker1 = new VCFHeaderTestWalker();
-        final Walker walker2 = new VCFHeaderTest2Walker();
-
-        final GenomeAnalysisEngine testEngine1 = new GenomeAnalysisEngine();
-        testEngine1.setWalker(walker1);
-
-        final GenomeAnalysisEngine testEngine2 = new GenomeAnalysisEngine();
-        testEngine2.setWalker(walker2);
-
-        final VCFHeaderLine line1 = GATKVCFUtils.getCommandLineArgumentHeaderLine(testEngine1, Collections.EMPTY_LIST);
-        logger.warn(line1);
-        Assert.assertNotNull(line1);
-        Assert.assertEquals(line1.getKey(), GATKVCFUtils.GATK_COMMAND_LINE_KEY);
-        for ( final String field : Arrays.asList("Version", "ID", "Date", "CommandLineOptions"))
-            Assert.assertTrue(line1.toString().contains(field), "Couldn't find field " + field + " in " + line1.getValue());
-        Assert.assertTrue(line1.toString().contains("ID=" + testEngine1.getWalkerName()));
-
-        final VCFHeaderLine line2 = GATKVCFUtils.getCommandLineArgumentHeaderLine(testEngine2, Collections.EMPTY_LIST);
-        logger.warn(line2);
-
-        header.addMetaDataLine(line1);
-        final Set<VCFHeaderLine> lines1 = header.getMetaDataInInputOrder();
-        Assert.assertTrue(lines1.contains(line1));
-
-        header.addMetaDataLine(line2);
-        final Set<VCFHeaderLine> lines2 = header.getMetaDataInInputOrder();
-        Assert.assertTrue(lines2.contains(line1));
-        Assert.assertTrue(lines2.contains(line2));
-    }
-
-    private class IndexCreatorTest extends TestDataProvider {
-        private final GATKVCFIndexType type;
-        private final int parameter;
-        private final Class expectedClass;
-        private final Integer expectedDimension;
-        private final Method dimensionGetter;
-
-        private IndexCreatorTest(GATKVCFIndexType type, int parameter, Class expectedClass, Integer expectedDimension,
-                                 String dimensionGetterName) {
-            super(IndexCreatorTest.class);
-
-            this.type = type;
-            this.parameter = parameter;
-            this.expectedClass = expectedClass;
-            this.expectedDimension = expectedDimension;
-            try {
-                // Conditional matches testGetIndexCreator's if-statement
-                this.dimensionGetter = this.expectedDimension == null ? null : expectedClass.getDeclaredMethod(dimensionGetterName);
-            } catch (NoSuchMethodException e) {
-                throw new RuntimeException(e);
-            }
-        }
-    }
-
-    @DataProvider(name = "indexCreator")
-    public Object[][] indexCreatorData() {
-        new IndexCreatorTest(GATKVCFIndexType.DYNAMIC_SEEK, 0, DynamicIndexCreator.class, null, null);
-        new IndexCreatorTest(GATKVCFIndexType.DYNAMIC_SIZE, 0, DynamicIndexCreator.class, null, null);
-        new IndexCreatorTest(GATKVCFIndexType.LINEAR, 100, LinearIndexCreator.class, 100, "getBinSize");
-        new IndexCreatorTest(GATKVCFIndexType.INTERVAL, 200, IntervalIndexCreator.class, 200, "getFeaturesPerInterval");
-
-        return IndexCreatorTest.getTests(IndexCreatorTest.class);
-    }
-
-    @Test(dataProvider = "indexCreator")
-    public void testGetIndexCreator(IndexCreatorTest spec) throws Exception{
-        File dummy = new File("");
-        IndexCreator ic = GATKVCFUtils.getIndexCreator(spec.type, spec.parameter, dummy);
-        Assert.assertEquals(ic.getClass(), spec.expectedClass, "Wrong IndexCreator type");
-        if (spec.expectedDimension != null) {
-            Integer dimension = (int)spec.dimensionGetter.invoke(ic);
-            Assert.assertEquals(dimension, spec.expectedDimension, "Wrong dimension");
-        }
-    }
-}
\ No newline at end of file
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/variant/GATKVariantContextUtilsUnitTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/variant/GATKVariantContextUtilsUnitTest.java
deleted file mode 100644
index feb10a7..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/variant/GATKVariantContextUtilsUnitTest.java
+++ /dev/null
@@ -1,1612 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.variant;
-
-import htsjdk.variant.variantcontext.*;
-import org.broadinstitute.gatk.engine.GenomeAnalysisEngine;
-import org.broadinstitute.gatk.utils.*;
-import org.broadinstitute.gatk.utils.collections.Pair;
-import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
-import org.testng.Assert;
-import org.testng.annotations.BeforeSuite;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.*;
-
-public class GATKVariantContextUtilsUnitTest extends BaseTest {
-    private final static boolean DEBUG = false;
-
-    Allele Aref, T, C, G, Cref, ATC, ATCATC;
-    Allele ATCATCT;
-    Allele ATref;
-    Allele Anoref;
-    Allele GT;
-
-    private GenomeLocParser genomeLocParser;
-
-    @BeforeSuite
-    public void setup() throws IOException {
-        // alleles
-        Aref = Allele.create("A", true);
-        Cref = Allele.create("C", true);
-        T = Allele.create("T");
-        C = Allele.create("C");
-        G = Allele.create("G");
-        ATC = Allele.create("ATC");
-        ATCATC = Allele.create("ATCATC");
-        ATCATCT = Allele.create("ATCATCT");
-        ATref = Allele.create("AT",true);
-        Anoref = Allele.create("A",false);
-        GT = Allele.create("GT",false);
-        genomeLocParser = new GenomeLocParser(new CachingIndexedFastaSequenceFile(new File(hg18Reference)));
-    }
-
-    private Genotype makeG(String sample, Allele a1, Allele a2, double log10pError, int... pls) {
-        return new GenotypeBuilder(sample, Arrays.asList(a1, a2)).log10PError(log10pError).PL(pls).make();
-    }
-
-
-    private Genotype makeG(String sample, Allele a1, Allele a2, double log10pError) {
-        return new GenotypeBuilder(sample, Arrays.asList(a1, a2)).log10PError(log10pError).make();
-    }
-
-    private VariantContext makeVC(String source, List<Allele> alleles) {
-        return makeVC(source, alleles, null, null);
-    }
-
-    private VariantContext makeVC(String source, List<Allele> alleles, Genotype... g1) {
-        return makeVC(source, alleles, Arrays.asList(g1));
-    }
-
-    private VariantContext makeVC(String source, List<Allele> alleles, String filter) {
-        return makeVC(source, alleles, filter.equals(".") ? null : new HashSet<String>(Arrays.asList(filter)));
-    }
-
-    private VariantContext makeVC(String source, List<Allele> alleles, Set<String> filters) {
-        return makeVC(source, alleles, null, filters);
-    }
-
-    private VariantContext makeVC(String source, List<Allele> alleles, Collection<Genotype> genotypes) {
-        return makeVC(source, alleles, genotypes, null);
-    }
-
-    private VariantContext makeVC(String source, List<Allele> alleles, Collection<Genotype> genotypes, Set<String> filters) {
-        int start = 10;
-        int stop = start + alleles.get(0).length() - 1; // alleles.contains(ATC) ? start + 3 : start;
-        return new VariantContextBuilder(source, "1", start, stop, alleles).genotypes(genotypes).filters(filters).make();
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // Test allele merging
-    //
-    // --------------------------------------------------------------------------------
-
-    private class MergeAllelesTest extends TestDataProvider {
-        List<List<Allele>> inputs;
-        List<Allele> expected;
-
-        private MergeAllelesTest(List<Allele>... arg) {
-            super(MergeAllelesTest.class);
-            LinkedList<List<Allele>> all = new LinkedList<>(Arrays.asList(arg));
-            expected = all.pollLast();
-            inputs = all;
-        }
-
-        public String toString() {
-            return String.format("MergeAllelesTest input=%s expected=%s", inputs, expected);
-        }
-    }
-    @DataProvider(name = "mergeAlleles")
-    public Object[][] mergeAllelesData() {
-        // first, do no harm
-        new MergeAllelesTest(Arrays.asList(Aref),
-                Arrays.asList(Aref));
-
-        new MergeAllelesTest(Arrays.asList(Aref),
-                Arrays.asList(Aref),
-                Arrays.asList(Aref));
-
-        new MergeAllelesTest(Arrays.asList(Aref),
-                Arrays.asList(Aref, T),
-                Arrays.asList(Aref, T));
-
-        new MergeAllelesTest(Arrays.asList(Aref, C),
-                Arrays.asList(Aref, T),
-                Arrays.asList(Aref, C, T));
-
-        new MergeAllelesTest(Arrays.asList(Aref, T),
-                Arrays.asList(Aref, C),
-                Arrays.asList(Aref, T, C)); // in order of appearence
-
-        new MergeAllelesTest(Arrays.asList(Aref, C, T),
-                Arrays.asList(Aref, C),
-                Arrays.asList(Aref, C, T));
-
-        new MergeAllelesTest(Arrays.asList(Aref, C, T), Arrays.asList(Aref, C, T));
-
-        new MergeAllelesTest(Arrays.asList(Aref, T, C), Arrays.asList(Aref, T, C));
-
-        new MergeAllelesTest(Arrays.asList(Aref, T, C),
-                Arrays.asList(Aref, C),
-                Arrays.asList(Aref, T, C)); // in order of appearence
-
-        new MergeAllelesTest(Arrays.asList(Aref),
-                Arrays.asList(Aref, ATC),
-                Arrays.asList(Aref, ATC));
-
-        new MergeAllelesTest(Arrays.asList(Aref),
-                Arrays.asList(Aref, ATC, ATCATC),
-                Arrays.asList(Aref, ATC, ATCATC));
-
-        // alleles in the order we see them
-        new MergeAllelesTest(Arrays.asList(Aref, ATCATC),
-                Arrays.asList(Aref, ATC, ATCATC),
-                Arrays.asList(Aref, ATCATC, ATC));
-
-        // same
-        new MergeAllelesTest(Arrays.asList(Aref, ATC),
-                Arrays.asList(Aref, ATCATC),
-                Arrays.asList(Aref, ATC, ATCATC));
-
-        new MergeAllelesTest(Arrays.asList(ATref, ATC, Anoref, G),
-                Arrays.asList(Aref, ATCATC, G),
-                Arrays.asList(ATref, ATC, Anoref, G, ATCATCT, GT));
-
-        return MergeAllelesTest.getTests(MergeAllelesTest.class);
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "mergeAlleles")
-    public void testMergeAlleles(MergeAllelesTest cfg) {
-        final List<VariantContext> inputs = new ArrayList<VariantContext>();
-
-        int i = 0;
-        for ( final List<Allele> alleles : cfg.inputs ) {
-            final String name = "vcf" + ++i;
-            inputs.add(makeVC(name, alleles));
-        }
-
-        final List<String> priority = vcs2priority(inputs);
-
-        final VariantContext merged = GATKVariantContextUtils.simpleMerge(
-                inputs, priority,
-                GATKVariantContextUtils.FilteredRecordMergeType.KEEP_IF_ANY_UNFILTERED,
-                GATKVariantContextUtils.GenotypeMergeType.PRIORITIZE, false, false, "set", false, false);
-
-        Assert.assertEquals(merged.getAlleles().size(),cfg.expected.size());
-        Assert.assertEquals(merged.getAlleles(), cfg.expected);
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // Test rsID merging
-    //
-    // --------------------------------------------------------------------------------
-
-    private class SimpleMergeRSIDTest extends TestDataProvider {
-        List<String> inputs;
-        String expected;
-
-        private SimpleMergeRSIDTest(String... arg) {
-            super(SimpleMergeRSIDTest.class);
-            LinkedList<String> allStrings = new LinkedList<String>(Arrays.asList(arg));
-            expected = allStrings.pollLast();
-            inputs = allStrings;
-        }
-
-        public String toString() {
-            return String.format("SimpleMergeRSIDTest vc=%s expected=%s", inputs, expected);
-        }
-    }
-
-    @DataProvider(name = "simplemergersiddata")
-    public Object[][] createSimpleMergeRSIDData() {
-        new SimpleMergeRSIDTest(".", ".");
-        new SimpleMergeRSIDTest(".", ".", ".");
-        new SimpleMergeRSIDTest("rs1", "rs1");
-        new SimpleMergeRSIDTest("rs1", "rs1", "rs1");
-        new SimpleMergeRSIDTest(".", "rs1", "rs1");
-        new SimpleMergeRSIDTest("rs1", ".", "rs1");
-        new SimpleMergeRSIDTest("rs1", "rs2", "rs1,rs2");
-        new SimpleMergeRSIDTest("rs1", "rs2", "rs1", "rs1,rs2"); // duplicates
-        new SimpleMergeRSIDTest("rs2", "rs1", "rs2,rs1");
-        new SimpleMergeRSIDTest("rs2", "rs1", ".", "rs2,rs1");
-        new SimpleMergeRSIDTest("rs2", ".", "rs1", "rs2,rs1");
-        new SimpleMergeRSIDTest("rs1", ".", ".", "rs1");
-        new SimpleMergeRSIDTest("rs1", "rs2", "rs3", "rs1,rs2,rs3");
-
-        return SimpleMergeRSIDTest.getTests(SimpleMergeRSIDTest.class);
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "simplemergersiddata")
-    public void testRSIDMerge(SimpleMergeRSIDTest cfg) {
-        VariantContext snpVC1 = makeVC("snpvc1", Arrays.asList(Aref, T));
-        final List<VariantContext> inputs = new ArrayList<VariantContext>();
-
-        for ( final String id : cfg.inputs ) {
-            inputs.add(new VariantContextBuilder(snpVC1).id(id).make());
-        }
-
-        final VariantContext merged = GATKVariantContextUtils.simpleMerge(
-                inputs, null,
-                GATKVariantContextUtils.FilteredRecordMergeType.KEEP_IF_ANY_UNFILTERED,
-                GATKVariantContextUtils.GenotypeMergeType.UNSORTED, false, false, "set", false, false);
-        Assert.assertEquals(merged.getID(), cfg.expected);
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // Test filtered merging
-    //
-    // --------------------------------------------------------------------------------
-
-    private class MergeFilteredTest extends TestDataProvider {
-        List<VariantContext> inputs;
-        VariantContext expected;
-        String setExpected;
-        GATKVariantContextUtils.FilteredRecordMergeType type;
-
-
-        private MergeFilteredTest(String name, VariantContext input1, VariantContext input2, VariantContext expected, String setExpected) {
-            this(name, input1, input2, expected, GATKVariantContextUtils.FilteredRecordMergeType.KEEP_IF_ANY_UNFILTERED, setExpected);
-        }
-
-        private MergeFilteredTest(String name, VariantContext input1, VariantContext input2, VariantContext expected, GATKVariantContextUtils.FilteredRecordMergeType type, String setExpected) {
-            super(MergeFilteredTest.class, name);
-            LinkedList<VariantContext> all = new LinkedList<VariantContext>(Arrays.asList(input1, input2));
-            this.expected = expected;
-            this.type = type;
-            inputs = all;
-            this.setExpected = setExpected;
-        }
-
-        public String toString() {
-            return String.format("%s input=%s expected=%s", super.toString(), inputs, expected);
-        }
-    }
-
-    @DataProvider(name = "mergeFiltered")
-    public Object[][] mergeFilteredData() {
-        new MergeFilteredTest("AllPass",
-                makeVC("1", Arrays.asList(Aref, T), VariantContext.PASSES_FILTERS),
-                makeVC("2", Arrays.asList(Aref, T), VariantContext.PASSES_FILTERS),
-                makeVC("3", Arrays.asList(Aref, T), VariantContext.PASSES_FILTERS),
-                GATKVariantContextUtils.MERGE_INTERSECTION);
-
-        new MergeFilteredTest("noFilters",
-                makeVC("1", Arrays.asList(Aref, T), "."),
-                makeVC("2", Arrays.asList(Aref, T), "."),
-                makeVC("3", Arrays.asList(Aref, T), "."),
-                GATKVariantContextUtils.MERGE_INTERSECTION);
-
-        new MergeFilteredTest("oneFiltered",
-                makeVC("1", Arrays.asList(Aref, T), "."),
-                makeVC("2", Arrays.asList(Aref, T), "FAIL"),
-                makeVC("3", Arrays.asList(Aref, T), "."),
-                String.format("1-%s2", GATKVariantContextUtils.MERGE_FILTER_PREFIX));
-
-        new MergeFilteredTest("onePassOneFail",
-                makeVC("1", Arrays.asList(Aref, T), VariantContext.PASSES_FILTERS),
-                makeVC("2", Arrays.asList(Aref, T), "FAIL"),
-                makeVC("3", Arrays.asList(Aref, T), VariantContext.PASSES_FILTERS),
-                String.format("1-%s2", GATKVariantContextUtils.MERGE_FILTER_PREFIX));
-
-        new MergeFilteredTest("AllFiltered",
-                makeVC("1", Arrays.asList(Aref, T), "FAIL"),
-                makeVC("2", Arrays.asList(Aref, T), "FAIL"),
-                makeVC("3", Arrays.asList(Aref, T), "FAIL"),
-                GATKVariantContextUtils.MERGE_FILTER_IN_ALL);
-
-        // test ALL vs. ANY
-        new MergeFilteredTest("FailOneUnfiltered",
-                makeVC("1", Arrays.asList(Aref, T), "FAIL"),
-                makeVC("2", Arrays.asList(Aref, T), "."),
-                makeVC("3", Arrays.asList(Aref, T), "."),
-                GATKVariantContextUtils.FilteredRecordMergeType.KEEP_IF_ANY_UNFILTERED,
-                String.format("%s1-2", GATKVariantContextUtils.MERGE_FILTER_PREFIX));
-
-        new MergeFilteredTest("OneFailAllUnfilteredArg",
-                makeVC("1", Arrays.asList(Aref, T), "FAIL"),
-                makeVC("2", Arrays.asList(Aref, T), "."),
-                makeVC("3", Arrays.asList(Aref, T), "FAIL"),
-                GATKVariantContextUtils.FilteredRecordMergeType.KEEP_IF_ALL_UNFILTERED,
-                String.format("%s1-2", GATKVariantContextUtils.MERGE_FILTER_PREFIX));
-
-        // test excluding allele in filtered record
-        new MergeFilteredTest("DontIncludeAlleleOfFilteredRecords",
-                makeVC("1", Arrays.asList(Aref, T), "."),
-                makeVC("2", Arrays.asList(Aref, T), "FAIL"),
-                makeVC("3", Arrays.asList(Aref, T), "."),
-                String.format("1-%s2", GATKVariantContextUtils.MERGE_FILTER_PREFIX));
-
-        // promotion of site from unfiltered to PASSES
-        new MergeFilteredTest("UnfilteredPlusPassIsPass",
-                makeVC("1", Arrays.asList(Aref, T), "."),
-                makeVC("2", Arrays.asList(Aref, T), VariantContext.PASSES_FILTERS),
-                makeVC("3", Arrays.asList(Aref, T), VariantContext.PASSES_FILTERS),
-                GATKVariantContextUtils.MERGE_INTERSECTION);
-
-        new MergeFilteredTest("RefInAll",
-                makeVC("1", Arrays.asList(Aref), VariantContext.PASSES_FILTERS),
-                makeVC("2", Arrays.asList(Aref), VariantContext.PASSES_FILTERS),
-                makeVC("3", Arrays.asList(Aref), VariantContext.PASSES_FILTERS),
-                GATKVariantContextUtils.MERGE_REF_IN_ALL);
-
-        new MergeFilteredTest("RefInOne",
-                makeVC("1", Arrays.asList(Aref), VariantContext.PASSES_FILTERS),
-                makeVC("2", Arrays.asList(Aref, T), VariantContext.PASSES_FILTERS),
-                makeVC("3", Arrays.asList(Aref, T), VariantContext.PASSES_FILTERS),
-                "2");
-
-        return MergeFilteredTest.getTests(MergeFilteredTest.class);
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "mergeFiltered")
-    public void testMergeFiltered(MergeFilteredTest cfg) {
-        final List<String> priority = vcs2priority(cfg.inputs);
-        final VariantContext merged = GATKVariantContextUtils.simpleMerge(
-                cfg.inputs, priority, cfg.type, GATKVariantContextUtils.GenotypeMergeType.PRIORITIZE, true, false, "set", false, false);
-
-        // test alleles are equal
-        Assert.assertEquals(merged.getAlleles(), cfg.expected.getAlleles());
-
-        // test set field
-        Assert.assertEquals(merged.getAttribute("set"), cfg.setExpected);
-
-        // test filter field
-        Assert.assertEquals(merged.getFilters(), cfg.expected.getFilters());
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // Test genotype merging
-    //
-    // --------------------------------------------------------------------------------
-
-    private class MergeGenotypesTest extends TestDataProvider {
-        List<VariantContext> inputs;
-        VariantContext expected;
-        List<String> priority;
-
-        private MergeGenotypesTest(String name, String priority, VariantContext... arg) {
-            super(MergeGenotypesTest.class, name);
-            LinkedList<VariantContext> all = new LinkedList<VariantContext>(Arrays.asList(arg));
-            this.expected = all.pollLast();
-            inputs = all;
-            this.priority = Arrays.asList(priority.split(","));
-        }
-
-        public String toString() {
-            return String.format("%s input=%s expected=%s", super.toString(), inputs, expected);
-        }
-    }
-
-    @DataProvider(name = "mergeGenotypes")
-    public Object[][] mergeGenotypesData() {
-        new MergeGenotypesTest("TakeGenotypeByPriority-1,2", "1,2",
-                makeVC("1", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1)),
-                makeVC("2", Arrays.asList(Aref, T), makeG("s1", Aref, T, -2)),
-                makeVC("3", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1)));
-
-        new MergeGenotypesTest("TakeGenotypeByPriority-1,2-nocall", "1,2",
-                makeVC("1", Arrays.asList(Aref, T), makeG("s1", Allele.NO_CALL, Allele.NO_CALL, -1)),
-                makeVC("2", Arrays.asList(Aref, T), makeG("s1", Aref, T, -2)),
-                makeVC("3", Arrays.asList(Aref, T), makeG("s1", Allele.NO_CALL, Allele.NO_CALL, -1)));
-
-        new MergeGenotypesTest("TakeGenotypeByPriority-2,1", "2,1",
-                makeVC("1", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1)),
-                makeVC("2", Arrays.asList(Aref, T), makeG("s1", Aref, T, -2)),
-                makeVC("3", Arrays.asList(Aref, T), makeG("s1", Aref, T, -2)));
-
-        new MergeGenotypesTest("NonOverlappingGenotypes", "1,2",
-                makeVC("1", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1)),
-                makeVC("2", Arrays.asList(Aref, T), makeG("s2", Aref, T, -2)),
-                makeVC("3", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1), makeG("s2", Aref, T, -2)));
-
-        new MergeGenotypesTest("PreserveNoCall", "1,2",
-                makeVC("1", Arrays.asList(Aref, T), makeG("s1", Allele.NO_CALL, Allele.NO_CALL, -1)),
-                makeVC("2", Arrays.asList(Aref, T), makeG("s2", Aref, T, -2)),
-                makeVC("3", Arrays.asList(Aref, T), makeG("s1", Allele.NO_CALL, Allele.NO_CALL, -1), makeG("s2", Aref, T, -2)));
-
-        new MergeGenotypesTest("PerserveAlleles", "1,2",
-                makeVC("1", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1)),
-                makeVC("2", Arrays.asList(Aref, C), makeG("s2", Aref, C, -2)),
-                makeVC("3", Arrays.asList(Aref, T, C), makeG("s1", Aref, T, -1), makeG("s2", Aref, C, -2)));
-
-        new MergeGenotypesTest("TakeGenotypePartialOverlap-1,2", "1,2",
-                makeVC("1", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1)),
-                makeVC("2", Arrays.asList(Aref, T), makeG("s1", Aref, T, -2), makeG("s3", Aref, T, -3)),
-                makeVC("3", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1), makeG("s3", Aref, T, -3)));
-
-        new MergeGenotypesTest("TakeGenotypePartialOverlap-2,1", "2,1",
-                makeVC("1", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1)),
-                makeVC("2", Arrays.asList(Aref, T), makeG("s1", Aref, T, -2), makeG("s3", Aref, T, -3)),
-                makeVC("3", Arrays.asList(Aref, T), makeG("s1", Aref, T, -2), makeG("s3", Aref, T, -3)));
-
-        //
-        // merging genothpes with PLs
-        //
-
-        // first, do no harm
-        new MergeGenotypesTest("OrderedPLs", "1",
-                makeVC("1", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1, 1, 2, 3)),
-                makeVC("1", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1, 1, 2, 3)));
-
-        // first, do no harm
-        new MergeGenotypesTest("OrderedPLs-3Alleles", "1",
-                makeVC("1", Arrays.asList(Aref, C, T), makeG("s1", Aref, T, -1, 1, 2, 3, 4, 5, 6)),
-                makeVC("1", Arrays.asList(Aref, C, T), makeG("s1", Aref, T, -1, 1, 2, 3, 4, 5, 6)));
-
-        // first, do no harm
-        new MergeGenotypesTest("OrderedPLs-3Alleles-2", "1",
-                makeVC("1", Arrays.asList(Aref, T, C), makeG("s1", Aref, T, -1, 1, 2, 3, 4, 5, 6)),
-                makeVC("1", Arrays.asList(Aref, T, C), makeG("s1", Aref, T, -1, 1, 2, 3, 4, 5, 6)));
-
-        // first, do no harm
-        new MergeGenotypesTest("OrderedPLs-3Alleles-2", "1",
-                makeVC("1", Arrays.asList(Aref, T, C), makeG("s1", Aref, T, -1, 1, 2, 3, 4, 5, 6)),
-                makeVC("1", Arrays.asList(Aref, T, C), makeG("s2", Aref, C, -1, 1, 2, 3, 4, 5, 6)),
-                makeVC("1", Arrays.asList(Aref, T, C), makeG("s1", Aref, T, -1, 1, 2, 3, 4, 5, 6), makeG("s2", Aref, C, -1, 1, 2, 3, 4, 5, 6)));
-
-        new MergeGenotypesTest("TakeGenotypePartialOverlapWithPLs-2,1", "2,1",
-                makeVC("1", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1,5,0,3)),
-                makeVC("2", Arrays.asList(Aref, T), makeG("s1", Aref, T, -2,4,0,2), makeG("s3", Aref, T, -3,3,0,2)),
-                makeVC("3", Arrays.asList(Aref, T), makeG("s1", Aref, T, -2,4,0,2), makeG("s3", Aref, T, -3,3,0,2)));
-
-        new MergeGenotypesTest("TakeGenotypePartialOverlapWithPLs-1,2", "1,2",
-                makeVC("1", Arrays.asList(Aref,ATC), makeG("s1", Aref, ATC, -1,5,0,3)),
-                makeVC("2", Arrays.asList(Aref, T), makeG("s1", Aref, T, -2,4,0,2), makeG("s3", Aref, T, -3,3,0,2)),
-                // no likelihoods on result since type changes to mixed multiallelic
-                makeVC("3", Arrays.asList(Aref, ATC, T), makeG("s1", Aref, ATC, -1), makeG("s3", Aref, T, -3)));
-
-        new MergeGenotypesTest("MultipleSamplePLsDifferentOrder", "1,2",
-                makeVC("1", Arrays.asList(Aref, C, T), makeG("s1", Aref, C, -1, 1, 2, 3, 4, 5, 6)),
-                makeVC("2", Arrays.asList(Aref, T, C), makeG("s2", Aref, T, -2, 6, 5, 4, 3, 2, 1)),
-                // no likelihoods on result since type changes to mixed multiallelic
-                makeVC("3", Arrays.asList(Aref, C, T), makeG("s1", Aref, C, -1), makeG("s2", Aref, T, -2)));
-
-        return MergeGenotypesTest.getTests(MergeGenotypesTest.class);
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "mergeGenotypes")
-    public void testMergeGenotypes(MergeGenotypesTest cfg) {
-        final VariantContext merged = GATKVariantContextUtils.simpleMerge(
-                cfg.inputs, cfg.priority, GATKVariantContextUtils.FilteredRecordMergeType.KEEP_IF_ANY_UNFILTERED,
-                GATKVariantContextUtils.GenotypeMergeType.PRIORITIZE, true, false, "set", false, false);
-
-        // test alleles are equal
-        Assert.assertEquals(merged.getAlleles(), cfg.expected.getAlleles());
-
-        // test genotypes
-        assertGenotypesAreMostlyEqual(merged.getGenotypes(), cfg.expected.getGenotypes());
-    }
-
-    // necessary to not overload equals for genotypes
-    private void assertGenotypesAreMostlyEqual(GenotypesContext actual, GenotypesContext expected) {
-        if (actual == expected) {
-            return;
-        }
-
-        if (actual == null || expected == null) {
-            Assert.fail("Maps not equal: expected: " + expected + " and actual: " + actual);
-        }
-
-        if (actual.size() != expected.size()) {
-            Assert.fail("Maps do not have the same size:" + actual.size() + " != " + expected.size());
-        }
-
-        for (Genotype value : actual) {
-            Genotype expectedValue = expected.get(value.getSampleName());
-
-            Assert.assertEquals(value.getAlleles(), expectedValue.getAlleles(), "Alleles in Genotype aren't equal");
-            Assert.assertEquals(value.getGQ(), expectedValue.getGQ(), "GQ values aren't equal");
-            Assert.assertEquals(value.hasLikelihoods(), expectedValue.hasLikelihoods(), "Either both have likelihoods or both not");
-            if ( value.hasLikelihoods() )
-                Assert.assertEquals(value.getLikelihoods().getAsVector(), expectedValue.getLikelihoods().getAsVector(), "Genotype likelihoods aren't equal");
-        }
-    }
-
-    @Test(enabled = !DEBUG)
-    public void testMergeGenotypesUniquify() {
-        final VariantContext vc1 = makeVC("1", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1));
-        final VariantContext vc2 = makeVC("2", Arrays.asList(Aref, T), makeG("s1", Aref, T, -2));
-
-        final VariantContext merged = GATKVariantContextUtils.simpleMerge(
-                Arrays.asList(vc1, vc2), null, GATKVariantContextUtils.FilteredRecordMergeType.KEEP_IF_ANY_UNFILTERED,
-                GATKVariantContextUtils.GenotypeMergeType.UNIQUIFY, false, false, "set", false, false);
-
-        // test genotypes
-        Assert.assertEquals(merged.getSampleNames(), new HashSet<>(Arrays.asList("s1.1", "s1.2")));
-    }
-
-// TODO: remove after testing
-//    @Test(expectedExceptions = IllegalStateException.class)
-//    public void testMergeGenotypesRequireUnique() {
-//        final VariantContext vc1 = makeVC("1", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1));
-//        final VariantContext vc2 = makeVC("2", Arrays.asList(Aref, T), makeG("s1", Aref, T, -2));
-//
-//        final VariantContext merged = VariantContextUtils.simpleMerge(
-//                Arrays.asList(vc1, vc2), null, VariantContextUtils.FilteredRecordMergeType.KEEP_IF_ANY_UNFILTERED,
-//                VariantContextUtils.GenotypeMergeType.REQUIRE_UNIQUE, false, false, "set", false, false, false);
-//    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // Misc. tests
-    //
-    // --------------------------------------------------------------------------------
-
-    @Test(enabled = !DEBUG)
-    public void testAnnotationSet() {
-        for ( final boolean annotate : Arrays.asList(true, false)) {
-            for ( final String set : Arrays.asList("set", "combine", "x")) {
-                final List<String> priority = Arrays.asList("1", "2");
-                VariantContext vc1 = makeVC("1", Arrays.asList(Aref, T), VariantContext.PASSES_FILTERS);
-                VariantContext vc2 = makeVC("2", Arrays.asList(Aref, T), VariantContext.PASSES_FILTERS);
-
-                final VariantContext merged = GATKVariantContextUtils.simpleMerge(
-                        Arrays.asList(vc1, vc2), priority, GATKVariantContextUtils.FilteredRecordMergeType.KEEP_IF_ANY_UNFILTERED,
-                        GATKVariantContextUtils.GenotypeMergeType.PRIORITIZE, annotate, false, set, false, false);
-
-                if ( annotate )
-                    Assert.assertEquals(merged.getAttribute(set), GATKVariantContextUtils.MERGE_INTERSECTION);
-                else
-                    Assert.assertFalse(merged.hasAttribute(set));
-            }
-        }
-    }
-
-    private static final List<String> vcs2priority(final Collection<VariantContext> vcs) {
-        final List<String> priority = new ArrayList<>();
-
-        for ( final VariantContext vc : vcs ) {
-            priority.add(vc.getSource());
-        }
-
-        return priority;
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // basic allele clipping test
-    //
-    // --------------------------------------------------------------------------------
-
-    private class ReverseClippingPositionTestProvider extends TestDataProvider {
-        final String ref;
-        final List<Allele> alleles = new ArrayList<Allele>();
-        final int expectedClip;
-
-        private ReverseClippingPositionTestProvider(final int expectedClip, final String ref, final String... alleles) {
-            super(ReverseClippingPositionTestProvider.class);
-            this.ref = ref;
-            for ( final String allele : alleles )
-                this.alleles.add(Allele.create(allele));
-            this.expectedClip = expectedClip;
-        }
-
-        @Override
-        public String toString() {
-            return String.format("ref=%s allele=%s reverse clip %d", ref, alleles, expectedClip);
-        }
-    }
-
-    @DataProvider(name = "ReverseClippingPositionTestProvider")
-    public Object[][] makeReverseClippingPositionTestProvider() {
-        // pair clipping
-        new ReverseClippingPositionTestProvider(0, "ATT", "CCG");
-        new ReverseClippingPositionTestProvider(1, "ATT", "CCT");
-        new ReverseClippingPositionTestProvider(2, "ATT", "CTT");
-        new ReverseClippingPositionTestProvider(2, "ATT", "ATT");  // cannot completely clip allele
-
-        // triplets
-        new ReverseClippingPositionTestProvider(0, "ATT", "CTT", "CGG");
-        new ReverseClippingPositionTestProvider(1, "ATT", "CTT", "CGT"); // the T can go
-        new ReverseClippingPositionTestProvider(2, "ATT", "CTT", "CTT"); // both Ts can go
-
-        return ReverseClippingPositionTestProvider.getTests(ReverseClippingPositionTestProvider.class);
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "ReverseClippingPositionTestProvider")
-    public void testReverseClippingPositionTestProvider(ReverseClippingPositionTestProvider cfg) {
-        int result = GATKVariantContextUtils.computeReverseClipping(cfg.alleles, cfg.ref.getBytes());
-        Assert.assertEquals(result, cfg.expectedClip);
-    }
-
-
-    // --------------------------------------------------------------------------------
-    //
-    // test splitting into bi-allelics
-    //
-    // --------------------------------------------------------------------------------
-
-    @DataProvider(name = "SplitBiallelics")
-    public Object[][] makeSplitBiallelics() throws CloneNotSupportedException {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        final VariantContextBuilder root = new VariantContextBuilder("x", "20", 10, 10, Arrays.asList(Aref, C));
-
-        // biallelic -> biallelic
-        tests.add(new Object[]{root.make(), Arrays.asList(root.make())});
-
-        // monos -> monos
-        root.alleles(Arrays.asList(Aref));
-        tests.add(new Object[]{root.make(), Arrays.asList(root.make())});
-
-        root.alleles(Arrays.asList(Aref, C, T));
-        tests.add(new Object[]{root.make(),
-                Arrays.asList(
-                        root.alleles(Arrays.asList(Aref, C)).make(),
-                        root.alleles(Arrays.asList(Aref, T)).make())});
-
-        root.alleles(Arrays.asList(Aref, C, T, G));
-        tests.add(new Object[]{root.make(),
-                Arrays.asList(
-                        root.alleles(Arrays.asList(Aref, C)).make(),
-                        root.alleles(Arrays.asList(Aref, T)).make(),
-                        root.alleles(Arrays.asList(Aref, G)).make())});
-
-        final Allele C      = Allele.create("C");
-        final Allele CA      = Allele.create("CA");
-        final Allele CAA     = Allele.create("CAA");
-        final Allele CAAAA   = Allele.create("CAAAA");
-        final Allele CAAAAA  = Allele.create("CAAAAA");
-        final Allele Cref      = Allele.create("C", true);
-        final Allele CAref     = Allele.create("CA", true);
-        final Allele CAAref    = Allele.create("CAA", true);
-        final Allele CAAAref   = Allele.create("CAAA", true);
-
-        root.alleles(Arrays.asList(Cref, CA, CAA));
-        tests.add(new Object[]{root.make(),
-                Arrays.asList(
-                        root.alleles(Arrays.asList(Cref, CA)).make(),
-                        root.alleles(Arrays.asList(Cref, CAA)).make())});
-
-        root.alleles(Arrays.asList(CAAref, C, CA)).stop(12);
-        tests.add(new Object[]{root.make(),
-                Arrays.asList(
-                        root.alleles(Arrays.asList(CAAref, C)).make(),
-                        root.alleles(Arrays.asList(CAref, C)).stop(11).make())});
-
-        root.alleles(Arrays.asList(CAAAref, C, CA, CAA)).stop(13);
-        tests.add(new Object[]{root.make(),
-                Arrays.asList(
-                        root.alleles(Arrays.asList(CAAAref, C)).make(),
-                        root.alleles(Arrays.asList(CAAref, C)).stop(12).make(),
-                        root.alleles(Arrays.asList(CAref, C)).stop(11).make())});
-
-        root.alleles(Arrays.asList(CAAAref, CAAAAA, CAAAA, CAA, C)).stop(13);
-        tests.add(new Object[]{root.make(),
-                Arrays.asList(
-                        root.alleles(Arrays.asList(Cref, CAA)).stop(10).make(),
-                        root.alleles(Arrays.asList(Cref, CA)).stop(10).make(),
-                        root.alleles(Arrays.asList(CAref, C)).stop(11).make(),
-                        root.alleles(Arrays.asList(CAAAref, C)).stop(13).make())});
-
-        final Allele threeCopies = Allele.create("GTTTTATTTTATTTTA", true);
-        final Allele twoCopies = Allele.create("GTTTTATTTTA", true);
-        final Allele zeroCopies = Allele.create("G", false);
-        final Allele oneCopies = Allele.create("GTTTTA", false);
-        tests.add(new Object[]{root.alleles(Arrays.asList(threeCopies, zeroCopies, oneCopies)).stop(25).make(),
-                Arrays.asList(
-                        root.alleles(Arrays.asList(threeCopies, zeroCopies)).stop(25).make(),
-                        root.alleles(Arrays.asList(twoCopies, zeroCopies)).stop(20).make())});
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "SplitBiallelics")
-    public void testSplitBiallelicsNoGenotypes(final VariantContext vc, final List<VariantContext> expectedBiallelics) {
-        final List<VariantContext> biallelics = GATKVariantContextUtils.splitVariantContextToBiallelics(vc);
-        Assert.assertEquals(biallelics.size(), expectedBiallelics.size());
-        for ( int i = 0; i < biallelics.size(); i++ ) {
-            final VariantContext actual = biallelics.get(i);
-            final VariantContext expected = expectedBiallelics.get(i);
-            assertVariantContextsAreEqual(actual, expected);
-        }
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "SplitBiallelics", dependsOnMethods = "testSplitBiallelicsNoGenotypes")
-    public void testSplitBiallelicsGenotypes(final VariantContext vc, final List<VariantContext> expectedBiallelics) {
-        final List<Genotype> genotypes = new ArrayList<Genotype>();
-
-        int sampleI = 0;
-        for ( final List<Allele> alleles : Utils.makePermutations(vc.getAlleles(), 2, true) ) {
-            genotypes.add(GenotypeBuilder.create("sample" + sampleI++, alleles));
-        }
-        genotypes.add(GenotypeBuilder.createMissing("missing", 2));
-
-        final VariantContext vcWithGenotypes = new VariantContextBuilder(vc).genotypes(genotypes).make();
-
-        final List<VariantContext> biallelics = GATKVariantContextUtils.splitVariantContextToBiallelics(vcWithGenotypes);
-        for ( int i = 0; i < biallelics.size(); i++ ) {
-            final VariantContext actual = biallelics.get(i);
-            Assert.assertEquals(actual.getNSamples(), vcWithGenotypes.getNSamples()); // not dropping any samples
-
-            for ( final Genotype inputGenotype : genotypes ) {
-                final Genotype actualGenotype = actual.getGenotype(inputGenotype.getSampleName());
-                Assert.assertNotNull(actualGenotype);
-                if ( ! vc.isVariant() || vc.isBiallelic() )
-                    Assert.assertEquals(actualGenotype, vcWithGenotypes.getGenotype(inputGenotype.getSampleName()));
-                else
-                    Assert.assertTrue(actualGenotype.isNoCall());
-            }
-        }
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // Test repeats
-    //
-    // --------------------------------------------------------------------------------
-
-    private class RepeatDetectorTest extends TestDataProvider {
-        String ref;
-        boolean isTrueRepeat;
-        VariantContext vc;
-
-        private RepeatDetectorTest(boolean isTrueRepeat, String ref, String refAlleleString, String ... altAlleleStrings) {
-            super(RepeatDetectorTest.class);
-            this.isTrueRepeat = isTrueRepeat;
-            this.ref = ref;
-
-            List<Allele> alleles = new LinkedList<Allele>();
-            final Allele refAllele = Allele.create(refAlleleString, true);
-            alleles.add(refAllele);
-            for ( final String altString: altAlleleStrings) {
-                final Allele alt = Allele.create(altString, false);
-                alleles.add(alt);
-            }
-
-            VariantContextBuilder builder = new VariantContextBuilder("test", "chr1", 1, refAllele.length(), alleles);
-            this.vc = builder.make();
-        }
-
-        public String toString() {
-            return String.format("%s refBases=%s trueRepeat=%b vc=%s", super.toString(), ref, isTrueRepeat, vc);
-        }
-    }
-
-    @DataProvider(name = "RepeatDetectorTest")
-    public Object[][] makeRepeatDetectorTest() {
-        new RepeatDetectorTest(true,  "NAAC", "N", "NA");
-        new RepeatDetectorTest(true,  "NAAC", "NA", "N");
-        new RepeatDetectorTest(false, "NAAC", "NAA", "N");
-        new RepeatDetectorTest(false, "NAAC", "N", "NC");
-        new RepeatDetectorTest(false, "AAC", "A", "C");
-
-        // running out of ref bases => false
-        new RepeatDetectorTest(false, "NAAC", "N", "NCAGTA");
-
-        // complex repeats
-        new RepeatDetectorTest(true,  "NATATATC", "N", "NAT");
-        new RepeatDetectorTest(true,  "NATATATC", "N", "NATA");
-        new RepeatDetectorTest(true,  "NATATATC", "N", "NATAT");
-        new RepeatDetectorTest(true,  "NATATATC", "NAT", "N");
-        new RepeatDetectorTest(false, "NATATATC", "NATA", "N");
-        new RepeatDetectorTest(false, "NATATATC", "NATAT", "N");
-
-        // multi-allelic
-        new RepeatDetectorTest(true,  "NATATATC", "N", "NAT", "NATAT");
-        new RepeatDetectorTest(true,  "NATATATC", "N", "NAT", "NATA");
-        new RepeatDetectorTest(true,  "NATATATC", "NAT", "N", "NATAT");
-        new RepeatDetectorTest(true,  "NATATATC", "NAT", "N", "NATA"); // two As
-        new RepeatDetectorTest(false, "NATATATC", "NAT", "N", "NATC"); // false
-        new RepeatDetectorTest(false, "NATATATC", "NAT", "N", "NCC"); // false
-        new RepeatDetectorTest(false, "NATATATC", "NAT", "NATAT", "NCC"); // false
-
-        return RepeatDetectorTest.getTests(RepeatDetectorTest.class);
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "RepeatDetectorTest")
-    public void testRepeatDetectorTest(RepeatDetectorTest cfg) {
-
-        // test alleles are equal
-        Assert.assertEquals(GATKVariantContextUtils.isTandemRepeat(cfg.vc, cfg.ref.getBytes()), cfg.isTrueRepeat);
-    }
-
-    @Test(enabled = !DEBUG)
-    public void testRepeatAllele() {
-        Allele nullR = Allele.create("A", true);
-        Allele nullA = Allele.create("A", false);
-        Allele atc   = Allele.create("AATC", false);
-        Allele atcatc   = Allele.create("AATCATC", false);
-        Allele ccccR = Allele.create("ACCCC", true);
-        Allele cc   = Allele.create("ACC", false);
-        Allele cccccc   = Allele.create("ACCCCCC", false);
-        Allele gagaR   = Allele.create("AGAGA", true);
-        Allele gagagaga   = Allele.create("AGAGAGAGA", false);
-
-        // - / ATC [ref] from 20-22
-        String delLoc = "chr1";
-        int delLocStart = 20;
-        int delLocStop = 22;
-
-        // - [ref] / ATC from 20-20
-        String insLoc = "chr1";
-        int insLocStart = 20;
-        int insLocStop = 20;
-
-        Pair<List<Integer>,byte[]> result;
-        byte[] refBytes = "TATCATCATCGGA".getBytes();
-
-        Assert.assertEquals(GATKVariantContextUtils.findNumberOfRepetitions("ATG".getBytes(), "ATGATGATGATG".getBytes(), true),4);
-        Assert.assertEquals(GATKVariantContextUtils.findNumberOfRepetitions("G".getBytes(), "ATGATGATGATG".getBytes(), true),0);
-        Assert.assertEquals(GATKVariantContextUtils.findNumberOfRepetitions("T".getBytes(), "T".getBytes(), true),1);
-        Assert.assertEquals(GATKVariantContextUtils.findNumberOfRepetitions("AT".getBytes(), "ATGATGATCATG".getBytes(), true),1);
-        Assert.assertEquals(GATKVariantContextUtils.findNumberOfRepetitions("CCC".getBytes(), "CCCCCCCC".getBytes(), true),2);
-
-        Assert.assertEquals(GATKVariantContextUtils.findRepeatedSubstring("ATG".getBytes()),3);
-        Assert.assertEquals(GATKVariantContextUtils.findRepeatedSubstring("AAA".getBytes()),1);
-        Assert.assertEquals(GATKVariantContextUtils.findRepeatedSubstring("CACACAC".getBytes()),7);
-        Assert.assertEquals(GATKVariantContextUtils.findRepeatedSubstring("CACACA".getBytes()),2);
-        Assert.assertEquals(GATKVariantContextUtils.findRepeatedSubstring("CATGCATG".getBytes()),4);
-        Assert.assertEquals(GATKVariantContextUtils.findRepeatedSubstring("AATAATA".getBytes()),7);
-
-
-        // A*,ATC, context = ATC ATC ATC : (ATC)3 -> (ATC)4
-        VariantContext vc = new VariantContextBuilder("foo", insLoc, insLocStart, insLocStop, Arrays.asList(nullR,atc)).make();
-        result = GATKVariantContextUtils.getNumTandemRepeatUnits(vc, refBytes);
-        Assert.assertEquals(result.getFirst().toArray()[0],3);
-        Assert.assertEquals(result.getFirst().toArray()[1],4);
-        Assert.assertEquals(result.getSecond().length,3);
-
-        // ATC*,A,ATCATC
-        vc = new VariantContextBuilder("foo", insLoc, insLocStart, insLocStart+3, Arrays.asList(Allele.create("AATC", true),nullA,atcatc)).make();
-        result = GATKVariantContextUtils.getNumTandemRepeatUnits(vc, refBytes);
-        Assert.assertEquals(result.getFirst().toArray()[0],3);
-        Assert.assertEquals(result.getFirst().toArray()[1],2);
-        Assert.assertEquals(result.getFirst().toArray()[2],4);
-        Assert.assertEquals(result.getSecond().length,3);
-
-        // simple non-tandem deletion: CCCC*, -
-        refBytes = "TCCCCCCCCATG".getBytes();
-        vc = new VariantContextBuilder("foo", delLoc, 10, 14, Arrays.asList(ccccR,nullA)).make();
-        result = GATKVariantContextUtils.getNumTandemRepeatUnits(vc, refBytes);
-        Assert.assertEquals(result.getFirst().toArray()[0],8);
-        Assert.assertEquals(result.getFirst().toArray()[1],4);
-        Assert.assertEquals(result.getSecond().length,1);
-
-        // CCCC*,CC,-,CCCCCC, context = CCC: (C)7 -> (C)5,(C)3,(C)9
-        refBytes = "TCCCCCCCAGAGAGAG".getBytes();
-        vc = new VariantContextBuilder("foo", insLoc, insLocStart, insLocStart+4, Arrays.asList(ccccR,cc, nullA,cccccc)).make();
-        result = GATKVariantContextUtils.getNumTandemRepeatUnits(vc, refBytes);
-        Assert.assertEquals(result.getFirst().toArray()[0],7);
-        Assert.assertEquals(result.getFirst().toArray()[1],5);
-        Assert.assertEquals(result.getFirst().toArray()[2],3);
-        Assert.assertEquals(result.getFirst().toArray()[3],9);
-        Assert.assertEquals(result.getSecond().length,1);
-
-        // GAGA*,-,GAGAGAGA
-        refBytes = "TGAGAGAGAGATTT".getBytes();
-        vc = new VariantContextBuilder("foo", insLoc, insLocStart, insLocStart+4, Arrays.asList(gagaR, nullA,gagagaga)).make();
-        result = GATKVariantContextUtils.getNumTandemRepeatUnits(vc, refBytes);
-        Assert.assertEquals(result.getFirst().toArray()[0],5);
-        Assert.assertEquals(result.getFirst().toArray()[1],3);
-        Assert.assertEquals(result.getFirst().toArray()[2],7);
-        Assert.assertEquals(result.getSecond().length,2);
-
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // test forward clipping
-    //
-    // --------------------------------------------------------------------------------
-
-    @DataProvider(name = "ForwardClippingData")
-    public Object[][] makeForwardClippingData() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        // this functionality can be adapted to provide input data for whatever you might want in your data
-        tests.add(new Object[]{Arrays.asList("A"), -1});
-        tests.add(new Object[]{Arrays.asList("<DEL>"), -1});
-        tests.add(new Object[]{Arrays.asList("A", "C"), -1});
-        tests.add(new Object[]{Arrays.asList("AC", "C"), -1});
-        tests.add(new Object[]{Arrays.asList("A", "G"), -1});
-        tests.add(new Object[]{Arrays.asList("A", "T"), -1});
-        tests.add(new Object[]{Arrays.asList("GT", "CA"), -1});
-        tests.add(new Object[]{Arrays.asList("GT", "CT"), -1});
-        tests.add(new Object[]{Arrays.asList("ACC", "AC"), 0});
-        tests.add(new Object[]{Arrays.asList("ACGC", "ACG"), 1});
-        tests.add(new Object[]{Arrays.asList("ACGC", "ACG"), 1});
-        tests.add(new Object[]{Arrays.asList("ACGC", "ACGA"), 2});
-        tests.add(new Object[]{Arrays.asList("ACGC", "AGC"), 0});
-        tests.add(new Object[]{Arrays.asList("A", "<DEL>"), -1});
-        for ( int len = 0; len < 50; len++ )
-            tests.add(new Object[]{Arrays.asList("A" + new String(Utils.dupBytes((byte)'C', len)), "C"), -1});
-
-        tests.add(new Object[]{Arrays.asList("A", "T", "C"), -1});
-        tests.add(new Object[]{Arrays.asList("AT", "AC", "AG"), 0});
-        tests.add(new Object[]{Arrays.asList("AT", "AC", "A"), -1});
-        tests.add(new Object[]{Arrays.asList("AT", "AC", "ACG"), 0});
-        tests.add(new Object[]{Arrays.asList("AC", "AC", "ACG"), 0});
-        tests.add(new Object[]{Arrays.asList("AC", "ACT", "ACG"), 0});
-        tests.add(new Object[]{Arrays.asList("ACG", "ACGT", "ACGTA"), 1});
-        tests.add(new Object[]{Arrays.asList("ACG", "ACGT", "ACGCA"), 1});
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "ForwardClippingData")
-    public void testForwardClipping(final List<String> alleleStrings, final int expectedClip) {
-        final List<Allele> alleles = new LinkedList<Allele>();
-        for ( final String alleleString : alleleStrings )
-            alleles.add(Allele.create(alleleString));
-
-        for ( final List<Allele> myAlleles : Utils.makePermutations(alleles, alleles.size(), false)) {
-            final int actual = GATKVariantContextUtils.computeForwardClipping(myAlleles);
-            Assert.assertEquals(actual, expectedClip);
-        }
-    }
-
-    @DataProvider(name = "ClipAlleleTest")
-    public Object[][] makeClipAlleleTest() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        // this functionality can be adapted to provide input data for whatever you might want in your data
-        tests.add(new Object[]{Arrays.asList("ACC", "AC"), Arrays.asList("AC", "A"), 0});
-        tests.add(new Object[]{Arrays.asList("ACGC", "ACG"), Arrays.asList("GC", "G"), 2});
-        tests.add(new Object[]{Arrays.asList("ACGC", "ACGA"), Arrays.asList("C", "A"), 3});
-        tests.add(new Object[]{Arrays.asList("ACGC", "AGC"), Arrays.asList("AC", "A"), 0});
-        tests.add(new Object[]{Arrays.asList("AT", "AC", "AG"), Arrays.asList("T", "C", "G"), 1});
-        tests.add(new Object[]{Arrays.asList("AT", "AC", "ACG"), Arrays.asList("T", "C", "CG"), 1});
-        tests.add(new Object[]{Arrays.asList("AC", "ACT", "ACG"), Arrays.asList("C", "CT", "CG"), 1});
-        tests.add(new Object[]{Arrays.asList("ACG", "ACGT", "ACGTA"), Arrays.asList("G", "GT", "GTA"), 2});
-        tests.add(new Object[]{Arrays.asList("ACG", "ACGT", "ACGCA"), Arrays.asList("G", "GT", "GCA"), 2});
-
-        // trims from left and right
-        tests.add(new Object[]{Arrays.asList("ACGTT", "ACCTT"), Arrays.asList("G", "C"), 2});
-        tests.add(new Object[]{Arrays.asList("ACGTT", "ACCCTT"), Arrays.asList("G", "CC"), 2});
-        tests.add(new Object[]{Arrays.asList("ACGTT", "ACGCTT"), Arrays.asList("G", "GC"), 2});
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "ClipAlleleTest")
-    public void testClipAlleles(final List<String> alleleStrings, final List<String> expected, final int numLeftClipped) {
-        final int start = 10;
-        final VariantContext unclipped = GATKVariantContextUtils.makeFromAlleles("test", "20", start, alleleStrings);
-        final VariantContext clipped = GATKVariantContextUtils.trimAlleles(unclipped, true, true);
-
-        Assert.assertEquals(clipped.getStart(), unclipped.getStart() + numLeftClipped);
-        for ( int i = 0; i < unclipped.getAlleles().size(); i++ ) {
-            final Allele trimmed = clipped.getAlleles().get(i);
-            Assert.assertEquals(trimmed.getBaseString(), expected.get(i));
-        }
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // test primitive allele splitting
-    //
-    // --------------------------------------------------------------------------------
-
-    @DataProvider(name = "PrimitiveAlleleSplittingData")
-    public Object[][] makePrimitiveAlleleSplittingData() {
-        List<Object[]> tests = new ArrayList<>();
-
-        // no split
-        tests.add(new Object[]{"A", "C", 0, null});
-        tests.add(new Object[]{"A", "AC", 0, null});
-        tests.add(new Object[]{"AC", "A", 0, null});
-
-        // one split
-        tests.add(new Object[]{"ACA", "GCA", 1, Arrays.asList(0)});
-        tests.add(new Object[]{"ACA", "AGA", 1, Arrays.asList(1)});
-        tests.add(new Object[]{"ACA", "ACG", 1, Arrays.asList(2)});
-
-        // two splits
-        tests.add(new Object[]{"ACA", "GGA", 2, Arrays.asList(0, 1)});
-        tests.add(new Object[]{"ACA", "GCG", 2, Arrays.asList(0, 2)});
-        tests.add(new Object[]{"ACA", "AGG", 2, Arrays.asList(1, 2)});
-
-        // three splits
-        tests.add(new Object[]{"ACA", "GGG", 3, Arrays.asList(0, 1, 2)});
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "PrimitiveAlleleSplittingData")
-    public void testPrimitiveAlleleSplitting(final String ref, final String alt, final int expectedSplit, final List<Integer> variantPositions) {
-
-        final int start = 10;
-        final VariantContext vc = GATKVariantContextUtils.makeFromAlleles("test", "20", start, Arrays.asList(ref, alt));
-
-        final List<VariantContext> result = GATKVariantContextUtils.splitIntoPrimitiveAlleles(vc);
-
-        if ( expectedSplit > 0 ) {
-            Assert.assertEquals(result.size(), expectedSplit);
-            for ( int i = 0; i < variantPositions.size(); i++ ) {
-                Assert.assertEquals(result.get(i).getStart(), start + variantPositions.get(i));
-            }
-        } else {
-            Assert.assertEquals(result.size(), 1);
-            Assert.assertEquals(vc, result.get(0));
-        }
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // test allele remapping
-    //
-    // --------------------------------------------------------------------------------
-
-    @DataProvider(name = "AlleleRemappingData")
-    public Object[][] makeAlleleRemappingData() {
-        List<Object[]> tests = new ArrayList<>();
-
-        final Allele originalBase1 = Allele.create((byte)'A');
-        final Allele originalBase2 = Allele.create((byte)'T');
-
-        for ( final byte base1 : BaseUtils.BASES ) {
-            for ( final byte base2 : BaseUtils.BASES ) {
-                for ( final int numGenotypes : Arrays.asList(0, 1, 2, 5) ) {
-                    Map<Allele, Allele> map = new HashMap<>(2);
-                    map.put(originalBase1, Allele.create(base1));
-                    map.put(originalBase2, Allele.create(base2));
-
-                    tests.add(new Object[]{map, numGenotypes});
-                }
-            }
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "AlleleRemappingData")
-    public void testAlleleRemapping(final Map<Allele, Allele> alleleMap, final int numGenotypes) {
-
-        final GATKVariantContextUtils.AlleleMapper alleleMapper = new GATKVariantContextUtils.AlleleMapper(alleleMap);
-
-        final GenotypesContext originalGC = createGenotypesContext(numGenotypes, new ArrayList(alleleMap.keySet()));
-
-        final GenotypesContext remappedGC = GATKVariantContextUtils.updateGenotypesWithMappedAlleles(originalGC, alleleMapper);
-
-        for ( int i = 0; i < numGenotypes; i++ ) {
-            final Genotype originalG = originalGC.get(String.format("%d", i));
-            final Genotype remappedG = remappedGC.get(String.format("%d", i));
-
-            Assert.assertEquals(originalG.getAlleles().size(), remappedG.getAlleles().size());
-            for ( int j = 0; j < originalG.getAlleles().size(); j++ )
-                Assert.assertEquals(remappedG.getAllele(j), alleleMap.get(originalG.getAllele(j)));
-        }
-    }
-
-    private static GenotypesContext createGenotypesContext(final int numGenotypes, final List<Allele> alleles) {
-        GenomeAnalysisEngine.resetRandomGenerator();
-        final Random random = GenomeAnalysisEngine.getRandomGenerator();
-
-        final GenotypesContext gc = GenotypesContext.create();
-        for ( int i = 0; i < numGenotypes; i++ ) {
-            // choose alleles at random
-            final List<Allele> myAlleles = new ArrayList<Allele>();
-            myAlleles.add(alleles.get(random.nextInt(2)));
-            myAlleles.add(alleles.get(random.nextInt(2)));
-
-            final Genotype g = new GenotypeBuilder(String.format("%d", i)).alleles(myAlleles).make();
-            gc.add(g);
-        }
-
-        return gc;
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // Test subsetDiploidAlleles
-    //
-    // --------------------------------------------------------------------------------
-
-    @DataProvider(name = "subsetDiploidAllelesData")
-    public Object[][] makesubsetDiploidAllelesData() {
-        List<Object[]> tests = new ArrayList<>();
-
-        final Allele A = Allele.create("A", true);
-        final Allele C = Allele.create("C");
-        final Allele G = Allele.create("G");
-
-        final List<Allele> AA = Arrays.asList(A,A);
-        final List<Allele> AC = Arrays.asList(A,C);
-        final List<Allele> CC = Arrays.asList(C,C);
-        final List<Allele> AG = Arrays.asList(A,G);
-        final List<Allele> CG = Arrays.asList(C,G);
-        final List<Allele> GG = Arrays.asList(G,G);
-        final List<Allele> ACG = Arrays.asList(A,C,G);
-
-        final VariantContext vcBase = new VariantContextBuilder("test", "20", 10, 10, AC).make();
-
-        final double[] homRefPL = MathUtils.normalizeFromRealSpace(new double[]{0.9, 0.09, 0.01});
-        final double[] hetPL = MathUtils.normalizeFromRealSpace(new double[]{0.09, 0.9, 0.01});
-        final double[] homVarPL = MathUtils.normalizeFromRealSpace(new double[]{0.01, 0.09, 0.9});
-        final double[] uninformative = new double[]{0, 0, 0};
-
-        final Genotype base = new GenotypeBuilder("NA12878").DP(10).GQ(50).make();
-
-        // make sure we don't screw up the simple case
-        final Genotype aaGT = new GenotypeBuilder(base).alleles(AA).AD(new int[]{10,2}).PL(homRefPL).GQ(8).make();
-        final Genotype acGT = new GenotypeBuilder(base).alleles(AC).AD(new int[]{10,2}).PL(hetPL).GQ(8).make();
-        final Genotype ccGT = new GenotypeBuilder(base).alleles(CC).AD(new int[]{10,2}).PL(homVarPL).GQ(8).make();
-
-        tests.add(new Object[]{new VariantContextBuilder(vcBase).genotypes(aaGT).make(), AC, Arrays.asList(new GenotypeBuilder(aaGT).make())});
-        tests.add(new Object[]{new VariantContextBuilder(vcBase).genotypes(acGT).make(), AC, Arrays.asList(new GenotypeBuilder(acGT).make())});
-        tests.add(new Object[]{new VariantContextBuilder(vcBase).genotypes(ccGT).make(), AC, Arrays.asList(new GenotypeBuilder(ccGT).make())});
-
-        // uninformative test case
-        final Genotype uninformativeGT = new GenotypeBuilder(base).alleles(CC).PL(uninformative).GQ(0).make();
-        final Genotype emptyGT = new GenotypeBuilder(base).alleles(GATKVariantContextUtils.NO_CALL_ALLELES).noPL().noGQ().make();
-        tests.add(new Object[]{new VariantContextBuilder(vcBase).genotypes(uninformativeGT).make(), AC, Arrays.asList(emptyGT)});
-
-        // actually subsetting down from multiple alt values
-        final double[] homRef3AllelesPL = new double[]{0, -10, -20, -30, -40, -50};
-        final double[] hetRefC3AllelesPL = new double[]{-10, 0, -20, -30, -40, -50};
-        final double[] homC3AllelesPL = new double[]{-20, -10, 0, -30, -40, -50};
-        final double[] hetRefG3AllelesPL = new double[]{-20, -10, -30, 0, -40, -50};
-        final double[] hetCG3AllelesPL = new double[]{-20, -10, -30, -40, 0, -50}; // AA, AC, CC, AG, CG, GG
-        final double[] homG3AllelesPL = new double[]{-20, -10, -30, -40, -50, 0};  // AA, AC, CC, AG, CG, GG
-        tests.add(new Object[]{
-                new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).PL(homRef3AllelesPL).make()).make(),
-                AC,
-                Arrays.asList(new GenotypeBuilder(base).alleles(AA).PL(new double[]{0, -10, -20}).GQ(100).make())});
-
-        tests.add(new Object[]{
-                new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).PL(hetRefC3AllelesPL).make()).make(),
-                AC,
-                Arrays.asList(new GenotypeBuilder(base).alleles(AC).PL(new double[]{-10, 0, -20}).GQ(100).make())});
-
-        tests.add(new Object[]{
-                new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).PL(homC3AllelesPL).make()).make(),
-                AC,
-                Arrays.asList(new GenotypeBuilder(base).alleles(CC).PL(new double[]{-20, -10, 0}).GQ(100).make())});
-        tests.add(new Object[]{
-                new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).PL(hetRefG3AllelesPL).make()).make(),
-                AG,
-                Arrays.asList(new GenotypeBuilder(base).alleles(AG).PL(new double[]{-20, 0, -50}).GQ(200).make())});
-
-        // wow, scary -- bad output but discussed with Eric and we think this is the only thing that can be done
-        tests.add(new Object[]{
-                new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).PL(hetCG3AllelesPL).make()).make(),
-                AG,
-                Arrays.asList(new GenotypeBuilder(base).alleles(AA).PL(new double[]{0, -20, -30}).GQ(200).make())});
-
-        tests.add(new Object[]{
-                new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).PL(homG3AllelesPL).make()).make(),
-                AG,
-                Arrays.asList(new GenotypeBuilder(base).alleles(GG).PL(new double[]{-20, -40, 0}).GQ(200).make())});
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "subsetDiploidAllelesData")
-    public void testsubsetDiploidAllelesData(final VariantContext inputVC,
-                                             final List<Allele> allelesToUse,
-                                             final List<Genotype> expectedGenotypes) {
-        final GenotypesContext actual = GATKVariantContextUtils.subsetDiploidAlleles(inputVC, allelesToUse, GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN);
-
-        Assert.assertEquals(actual.size(), expectedGenotypes.size());
-        for ( final Genotype expected : expectedGenotypes ) {
-            final Genotype actualGT = actual.get(expected.getSampleName());
-            Assert.assertNotNull(actualGT);
-            assertGenotypesAreEqual(actualGT, expected);
-        }
-    }
-
-    @DataProvider(name = "UpdateGenotypeAfterSubsettingData")
-    public Object[][] makeUpdateGenotypeAfterSubsettingData() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        final Allele A = Allele.create("A", true);
-        final Allele C = Allele.create("C");
-        final Allele G = Allele.create("G");
-
-        final List<Allele> AA = Arrays.asList(A,A);
-        final List<Allele> AC = Arrays.asList(A,C);
-        final List<Allele> CC = Arrays.asList(C,C);
-        final List<Allele> AG = Arrays.asList(A,G);
-        final List<Allele> CG = Arrays.asList(C,G);
-        final List<Allele> GG = Arrays.asList(G,G);
-        final List<Allele> ACG = Arrays.asList(A,C,G);
-        final List<List<Allele>> allSubsetAlleles = Arrays.asList(AC,AG,ACG);
-
-        final double[] homRefPL = new double[]{0.9, 0.09, 0.01};
-        final double[] hetPL = new double[]{0.09, 0.9, 0.01};
-        final double[] homVarPL = new double[]{0.01, 0.09, 0.9};
-        final double[] uninformative = new double[]{0.33, 0.33, 0.33};
-        final List<double[]> allPLs = Arrays.asList(homRefPL, hetPL, homVarPL, uninformative);
-
-        for ( final List<Allele> alleles : allSubsetAlleles ) {
-            for ( final double[] pls : allPLs ) {
-                tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.SET_TO_NO_CALL, pls, AA, alleles, GATKVariantContextUtils.NO_CALL_ALLELES});
-            }
-        }
-
-        for ( final List<Allele> originalGT : Arrays.asList(AA, AC, CC, AG, CG, GG) ) {
-            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN, homRefPL, originalGT, AC, AA});
-            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN, hetPL, originalGT, AC, AC});
-            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN, homVarPL, originalGT, AC, CC});
-//        tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN, uninformative, AA, AC, GATKVariantContextUtils.NO_CALL_ALLELES});
-        }
-
-        for ( final double[] pls : allPLs ) {
-            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AA, AC, AA});
-            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AC, AC, AC});
-            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, CC, AC, CC});
-            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, CG, AC, AC});
-
-            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AA, AG, AA});
-            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AC, AG, AA});
-            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, CC, AG, AA});
-            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, CG, AG, AG});
-
-            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AA, ACG, AA});
-            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AC, ACG, AC});
-            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, CC, ACG, CC});
-            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AG, ACG, AG});
-            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, CG, ACG, CG});
-            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, GG, ACG, GG});
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(enabled = !DEBUG, dataProvider = "UpdateGenotypeAfterSubsettingData")
-    public void testUpdateGenotypeAfterSubsetting(final GATKVariantContextUtils.GenotypeAssignmentMethod mode,
-                                                  final double[] likelihoods,
-                                                  final List<Allele> originalGT,
-                                                  final List<Allele> allelesToUse,
-                                                  final List<Allele> expectedAlleles) {
-        final GenotypeBuilder gb = new GenotypeBuilder("test");
-        final double[] log10Likelhoods = MathUtils.normalizeFromLog10(likelihoods, true, false);
-        GATKVariantContextUtils.updateGenotypeAfterSubsetting(originalGT, gb, mode, log10Likelhoods, allelesToUse);
-        final Genotype g = gb.make();
-        Assert.assertEquals(new HashSet<>(g.getAlleles()), new HashSet<>(expectedAlleles));
-    }
-
-    @Test(enabled = !DEBUG)
-    public void testSubsetToRef() {
-        final Map<Genotype, Genotype> tests = new LinkedHashMap<>();
-
-        for ( final List<Allele> alleles : Arrays.asList(Arrays.asList(Aref), Arrays.asList(C), Arrays.asList(Aref, C), Arrays.asList(Aref, C, C) ) ) {
-            for ( final String name : Arrays.asList("test1", "test2") ) {
-                final GenotypeBuilder builder = new GenotypeBuilder(name, alleles);
-                builder.DP(10);
-                builder.GQ(30);
-                builder.AD(alleles.size() == 1 ? new int[]{1} : (alleles.size() == 2 ? new int[]{1, 2} : new int[]{1, 2, 3}));
-                builder.PL(alleles.size() == 1 ? new int[]{1} : (alleles.size() == 2 ? new int[]{1,2} : new int[]{1,2,3}));
-                final List<Allele> refs = Collections.nCopies(alleles.size(), Aref);
-                tests.put(builder.make(), builder.alleles(refs).noAD().noPL().make());
-            }
-        }
-
-        for ( final int n : Arrays.asList(1, 2, 3) ) {
-            for ( final List<Genotype> genotypes : Utils.makePermutations(new ArrayList<>(tests.keySet()), n, false) ) {
-                final VariantContext vc = new VariantContextBuilder("test", "20", 1, 1, Arrays.asList(Aref, C)).genotypes(genotypes).make();
-                final GenotypesContext gc = GATKVariantContextUtils.subsetToRefOnly(vc, 2);
-
-                Assert.assertEquals(gc.size(), genotypes.size());
-                for ( int i = 0; i < genotypes.size(); i++ ) {
-//                    logger.warn("Testing " + genotypes.get(i) + " => " + gc.get(i) + " " + tests.get(genotypes.get(i)));
-                    assertGenotypesAreEqual(gc.get(i), tests.get(genotypes.get(i)));
-                }
-            }
-        }
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // Test updatePLsAndAD
-    //
-    // --------------------------------------------------------------------------------
-
-    @DataProvider(name = "updatePLsAndADData")
-    public Object[][] makeUpdatePLsAndADData() {
-        List<Object[]> tests = new ArrayList<>();
-
-        final Allele A = Allele.create("A", true);
-        final Allele C = Allele.create("C");
-        final Allele G = Allele.create("G");
-
-        final List<Allele> AA = Arrays.asList(A,A);
-        final List<Allele> AC = Arrays.asList(A,C);
-        final List<Allele> CC = Arrays.asList(C,C);
-        final List<Allele> AG = Arrays.asList(A,G);
-        final List<Allele> CG = Arrays.asList(C,G);
-        final List<Allele> GG = Arrays.asList(G,G);
-        final List<Allele> ACG = Arrays.asList(A,C,G);
-
-        final VariantContext vcBase = new VariantContextBuilder("test", "20", 10, 10, AC).make();
-
-        final double[] homRefPL = MathUtils.normalizeFromRealSpace(new double[]{0.9, 0.09, 0.01});
-        final double[] hetPL = MathUtils.normalizeFromRealSpace(new double[]{0.09, 0.9, 0.01});
-        final double[] homVarPL = MathUtils.normalizeFromRealSpace(new double[]{0.01, 0.09, 0.9});
-        final double[] uninformative = new double[]{0, 0, 0};
-
-        final Genotype base = new GenotypeBuilder("NA12878").DP(10).GQ(100).make();
-
-        // make sure we don't screw up the simple case where no selection happens
-        final Genotype aaGT = new GenotypeBuilder(base).alleles(AA).AD(new int[]{10,2}).PL(homRefPL).GQ(8).make();
-        final Genotype acGT = new GenotypeBuilder(base).alleles(AC).AD(new int[]{10,2}).PL(hetPL).GQ(8).make();
-        final Genotype ccGT = new GenotypeBuilder(base).alleles(CC).AD(new int[]{10,2}).PL(homVarPL).GQ(8).make();
-
-        tests.add(new Object[]{new VariantContextBuilder(vcBase).genotypes(aaGT).make(), new VariantContextBuilder(vcBase).alleles(AC).make(), Arrays.asList(new GenotypeBuilder(aaGT).make())});
-        tests.add(new Object[]{new VariantContextBuilder(vcBase).genotypes(acGT).make(), new VariantContextBuilder(vcBase).alleles(AC).make(), Arrays.asList(new GenotypeBuilder(acGT).make())});
-        tests.add(new Object[]{new VariantContextBuilder(vcBase).genotypes(ccGT).make(), new VariantContextBuilder(vcBase).alleles(AC).make(), Arrays.asList(new GenotypeBuilder(ccGT).make())});
-
-        // uninformative test cases
-        final Genotype uninformativeGT = new GenotypeBuilder(base).alleles(CC).noAD().PL(uninformative).GQ(0).make();
-        tests.add(new Object[]{new VariantContextBuilder(vcBase).genotypes(uninformativeGT).make(), new VariantContextBuilder(vcBase).alleles(AC).make(), Arrays.asList(uninformativeGT)});
-        final Genotype emptyGT = new GenotypeBuilder(base).alleles(GATKVariantContextUtils.NO_CALL_ALLELES).noAD().noPL().noGQ().make();
-        tests.add(new Object[]{new VariantContextBuilder(vcBase).genotypes(emptyGT).make(), new VariantContextBuilder(vcBase).alleles(AC).make(), Arrays.asList(emptyGT)});
-
-        // actually subsetting down from multiple alt values
-        final double[] homRef3AllelesPL = new double[]{0, -10, -20, -30, -40, -50};
-        final double[] hetRefC3AllelesPL = new double[]{-10, 0, -20, -30, -40, -50};
-        final double[] homC3AllelesPL = new double[]{-20, -10, 0, -30, -40, -50};
-        final double[] hetRefG3AllelesPL = new double[]{-20, -10, -30, 0, -40, -50};
-        final double[] hetCG3AllelesPL = new double[]{-20, -10, -30, -40, 0, -50}; // AA, AC, CC, AG, CG, GG
-        final double[] homG3AllelesPL = new double[]{-20, -10, -30, -40, -50, 0};  // AA, AC, CC, AG, CG, GG
-
-        final int[] homRef3AllelesAD = new int[]{20, 0, 1};
-        final int[] hetRefC3AllelesAD = new int[]{10, 10, 1};
-        final int[] homC3AllelesAD = new int[]{0, 20, 1};
-        final int[] hetRefG3AllelesAD = new int[]{10, 0, 11};
-        final int[] hetCG3AllelesAD = new int[]{0, 12, 11}; // AA, AC, CC, AG, CG, GG
-        final int[] homG3AllelesAD = new int[]{0, 1, 21};  // AA, AC, CC, AG, CG, GG
-
-        tests.add(new Object[]{
-                new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).AD(homRef3AllelesAD).PL(homRef3AllelesPL).make()).make(),
-                new VariantContextBuilder(vcBase).alleles(AC).make(),
-                Arrays.asList(new GenotypeBuilder(base).alleles(AA).PL(new double[]{0, -10, -20}).AD(new int[]{20, 0}).GQ(100).make())});
-
-        tests.add(new Object[]{
-                new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).AD(hetRefC3AllelesAD).PL(hetRefC3AllelesPL).make()).make(),
-                new VariantContextBuilder(vcBase).alleles(AC).make(),
-                Arrays.asList(new GenotypeBuilder(base).alleles(AA).PL(new double[]{-10, 0, -20}).AD(new int[]{10, 10}).GQ(100).make())});
-
-        tests.add(new Object[]{
-                new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).AD(homC3AllelesAD).PL(homC3AllelesPL).make()).make(),
-                new VariantContextBuilder(vcBase).alleles(AC).make(),
-                Arrays.asList(new GenotypeBuilder(base).alleles(AA).PL(new double[]{-20, -10, 0}).AD(new int[]{0, 20}).GQ(100).make())});
-        tests.add(new Object[]{
-                new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).AD(hetRefG3AllelesAD).PL(hetRefG3AllelesPL).make()).make(),
-                new VariantContextBuilder(vcBase).alleles(AG).make(),
-                Arrays.asList(new GenotypeBuilder(base).alleles(AA).PL(new double[]{-20, 0, -50}).AD(new int[]{10, 11}).GQ(100).make())});
-
-        tests.add(new Object[]{
-                new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).AD(hetCG3AllelesAD).PL(hetCG3AllelesPL).make()).make(),
-                new VariantContextBuilder(vcBase).alleles(AG).make(),
-                Arrays.asList(new GenotypeBuilder(base).alleles(AA).PL(new double[]{0, -20, -30}).AD(new int[]{0, 11}).GQ(100).make())});
-
-        tests.add(new Object[]{
-                new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).AD(homG3AllelesAD).PL(homG3AllelesPL).make()).make(),
-                new VariantContextBuilder(vcBase).alleles(AG).make(),
-                Arrays.asList(new GenotypeBuilder(base).alleles(AA).PL(new double[]{-20, -40, 0}).AD(new int[]{0, 21}).GQ(100).make())});
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "updatePLsAndADData")
-    public void testUpdatePLsAndADData(final VariantContext originalVC,
-                                       final VariantContext selectedVC,
-                                       final List<Genotype> expectedGenotypes) {
-        final VariantContext selectedVCwithGTs = new VariantContextBuilder(selectedVC).genotypes(originalVC.getGenotypes()).make();
-        final GenotypesContext actual = GATKVariantContextUtils.updatePLsAndAD(selectedVCwithGTs, originalVC);
-
-        Assert.assertEquals(actual.size(), expectedGenotypes.size());
-        for ( final Genotype expected : expectedGenotypes ) {
-            final Genotype actualGT = actual.get(expected.getSampleName());
-            Assert.assertNotNull(actualGT);
-            assertGenotypesAreEqual(actualGT, expected);
-        }
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // Test methods for merging reference confidence VCs
-    //
-    // --------------------------------------------------------------------------------
-
-
-    @Test(dataProvider = "indexOfAlleleData")
-    public void testIndexOfAllele(final Allele reference, final List<Allele> altAlleles, final List<Allele> otherAlleles) {
-        final List<Allele> alleles = new ArrayList<>(altAlleles.size() + 1);
-        alleles.add(reference);
-        alleles.addAll(altAlleles);
-        final VariantContext vc = makeVC("Source", alleles);
-
-        for (int i = 0; i < alleles.size(); i++) {
-            Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,alleles.get(i),true,true,true),i);
-            Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,alleles.get(i),false,true,true),i);
-            Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,alleles.get(i),true,true,false),i);
-            Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,alleles.get(i),false,true,false),i);
-            Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,Allele.create(alleles.get(i),true),true,true,true),i);
-            Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,Allele.create(alleles.get(i),true),true,true,false),-1);
-            if (i == 0) {
-                Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,alleles.get(i),true,false,true),-1);
-                Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,alleles.get(i),false,false,true),-1);
-                Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,alleles.get(i),true,false,false),-1);
-                Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,alleles.get(i),false,false,false),-1);
-                Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,Allele.create(alleles.get(i).getBases(),true),false,true,true),i);
-                Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,Allele.create(alleles.get(i).getBases(),false),false,true,true),-1);
-            } else {
-                Assert.assertEquals(GATKVariantContextUtils.indexOfAltAllele(vc,alleles.get(i),true),i - 1);
-                Assert.assertEquals(GATKVariantContextUtils.indexOfAltAllele(vc,alleles.get(i),false), i - 1);
-                Assert.assertEquals(GATKVariantContextUtils.indexOfAltAllele(vc,Allele.create(alleles.get(i),true),true),i-1);
-                Assert.assertEquals(GATKVariantContextUtils.indexOfAltAllele(vc,Allele.create(alleles.get(i),true),false),-1);
-            }
-        }
-
-        for (final Allele other : otherAlleles) {
-            Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc, other, true, true, true), -1);
-            Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,other,false,true,true),-1);
-            Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,other,true,true,false),-1);
-            Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,other,false,true,false),-1);
-            Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,other,true,false,true),-1);
-            Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,other,false,false,true),-1);
-            Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,other,true,false,false),-1);
-            Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc, other, false, false, false),-1);
-        }
-    }
-
-    @DataProvider(name = "indexOfAlleleData")
-    public Iterator<Object[]> indexOfAlleleData() {
-
-        final Allele[] ALTERNATIVE_ALLELES = new Allele[] { T, C, G, ATC, ATCATC};
-
-        final int lastMask = 0x1F;
-
-        return new Iterator<Object[]>() {
-
-            int nextMask = 0;
-
-            @Override
-            public boolean hasNext() {
-                return nextMask <= lastMask;
-            }
-
-            @Override
-            public Object[] next() {
-
-                int mask = nextMask++;
-                final List<Allele> includedAlleles = new ArrayList<>(5);
-                final List<Allele> excludedAlleles = new ArrayList<>(5);
-                for (int i = 0; i < ALTERNATIVE_ALLELES.length; i++) {
-                    ((mask & 1) == 1 ? includedAlleles : excludedAlleles).add(ALTERNATIVE_ALLELES[i]);
-                    mask >>= 1;
-                }
-                return new Object[] { Aref , includedAlleles, excludedAlleles};
-            }
-
-            @Override
-            public void remove() {
-                throw new UnsupportedOperationException();
-            }
-        };
-    }
-
-    @Test(dataProvider="overlapWithData")
-    public void testOverlapsWith(final VariantContext vc, final GenomeLoc genomeLoc) {
-        final boolean expected;
-
-        if (genomeLoc.isUnmapped())
-            expected = false;
-        else if (vc.getStart() > genomeLoc.getStop())
-            expected = false;
-        else if (vc.getEnd() < genomeLoc.getStart())
-            expected = false;
-        else if (!vc.getChr().equals(genomeLoc.getContig()))
-            expected = false;
-        else
-            expected = true;
-
-        Assert.assertEquals(GATKVariantContextUtils.overlapsRegion(vc, genomeLoc), expected);
-    }
-
-
-    private final String[] OVERLAP_WITH_CHROMOSOMES =  { "chr1", "chr20" };
-    private final int[] OVERLAP_WITH_EVENT_SIZES =  { -10, -1, 0, 1, 10 }; // 0 == SNP , -X xbp deletion, +X xbp insertion.
-    private final int[] OVERLAP_WITH_EVENT_STARTS = { 10000000, 10000001,
-                                                      10000005, 10000010,
-                                                      10000009, 10000011,
-                                                      20000000 };
-
-    @DataProvider(name="overlapWithData")
-    public Object[][] overlapWithData() {
-
-        final int totalLocations = OVERLAP_WITH_CHROMOSOMES.length * OVERLAP_WITH_EVENT_SIZES.length * OVERLAP_WITH_EVENT_STARTS.length + 1;
-        final int totalEvents = OVERLAP_WITH_CHROMOSOMES.length * OVERLAP_WITH_EVENT_SIZES.length * OVERLAP_WITH_EVENT_STARTS.length;
-        final GenomeLoc[] locs = new GenomeLoc[totalLocations];
-        final VariantContext[] events = new VariantContext[totalEvents];
-
-        generateAllLocationsAndVariantContextCombinations(OVERLAP_WITH_CHROMOSOMES, OVERLAP_WITH_EVENT_SIZES,
-                OVERLAP_WITH_EVENT_STARTS, locs, events);
-
-        return generateAllParameterCombinationsForOverlapWithData(locs, events);
-    }
-
-    private Object[][] generateAllParameterCombinationsForOverlapWithData(GenomeLoc[] locs, VariantContext[] events) {
-        final List<Object[]> result = new LinkedList<>();
-        for (final GenomeLoc loc : locs)
-            for (final VariantContext event : events)
-               result.add(new Object[] { event , loc });
-
-        return result.toArray(new Object[result.size()][]);
-    }
-
-    private void generateAllLocationsAndVariantContextCombinations(final String[] chrs, final int[] eventSizes,
-                                                                   final int[] eventStarts, final GenomeLoc[] locs,
-                                                                   final VariantContext[] events) {
-        int nextIndex = 0;
-        for (final String chr : chrs )
-            for (final int size : eventSizes )
-                for (final int starts : eventStarts ) {
-                    locs[nextIndex] = genomeLocParser.createGenomeLoc(chr,starts,starts + Math.max(0,size));
-                    events[nextIndex++] = new VariantContextBuilder().source("test").loc(chr,starts,starts + Math.max(0,size)).alleles(Arrays.asList(
-                            Allele.create(randomBases(size <= 0 ? 1 : size + 1, true), true), Allele.create(randomBases(size < 0 ? -size + 1 : 1, false), false))).make();
-                }
-
-        locs[nextIndex++]  = GenomeLoc.UNMAPPED;
-    }
-
-    @Test(dataProvider = "totalPloidyData")
-    public void testTotalPloidy(final int[] ploidies, final int defaultPloidy, final int expected) {
-        final Genotype[] genotypes = new Genotype[ploidies.length];
-        final List<Allele> vcAlleles = Arrays.asList(Aref,C);
-        for (int i = 0; i < genotypes.length; i++)
-            genotypes[i] = new GenotypeBuilder().alleles(GATKVariantContextUtils.noCallAlleles(ploidies[i])).make();
-        final VariantContext vc = new VariantContextBuilder().chr("seq1").genotypes(genotypes).alleles(vcAlleles).make();
-        Assert.assertEquals(GATKVariantContextUtils.totalPloidy(vc,defaultPloidy),expected," " + defaultPloidy + " " + Arrays.toString(ploidies));
-    }
-
-    @DataProvider(name="totalPloidyData")
-    public Object[][] totalPloidyData() {
-        final Random rdn = GenomeAnalysisEngine.getRandomGenerator();
-        final List<Object[]> resultList = new ArrayList<>();
-        for (int i = 0; i < 100; i++) {
-            final int sampleCount = rdn.nextInt(10);
-
-            int expected = 0;
-            final int defaultPloidy = rdn.nextInt(10) + 1;
-            final int[] plodies = new int[sampleCount];
-            for (int j = 0; j < sampleCount; j++) {
-                plodies[j] = rdn.nextInt(10);
-                expected += plodies[j] == 0 ? defaultPloidy : plodies[j];
-            }
-            resultList.add(new Object[] { plodies, defaultPloidy, expected });
-        }
-        return resultList.toArray(new Object[100][]);
-    }
-
-    private byte[] randomBases(final int length, final boolean reference) {
-        final byte[] bases = new byte[length];
-        bases[0] = (byte) (reference  ? 'A' : 'C');
-        BaseUtils.fillWithRandomBases(bases, 1, bases.length);
-        return bases;
-    }
-}
-
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/variant/VCFIntegrationTest.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/variant/VCFIntegrationTest.java
deleted file mode 100644
index 4a08702..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/variant/VCFIntegrationTest.java
+++ /dev/null
@@ -1,377 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.variant;
-
-import htsjdk.tribble.AbstractFeatureReader;
-import htsjdk.tribble.Tribble;
-import htsjdk.tribble.index.AbstractIndex;
-import htsjdk.tribble.index.ChrIndex;
-import htsjdk.tribble.index.Index;
-import htsjdk.tribble.index.IndexFactory;
-import htsjdk.tribble.index.interval.IntervalTreeIndex;
-import htsjdk.tribble.index.linear.LinearIndex;
-import htsjdk.tribble.index.tabix.TabixIndex;
-import htsjdk.tribble.util.TabixUtils;
-import org.broadinstitute.gatk.utils.BaseTest;
-import org.broadinstitute.gatk.engine.walkers.WalkerTest;
-import htsjdk.variant.vcf.VCFCodec;
-import org.testng.Assert;
-import org.testng.TestException;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.lang.reflect.Field;
-import java.util.Arrays;
-import java.util.LinkedHashMap;
-import java.util.List;
-
-public class VCFIntegrationTest extends WalkerTest {
-
-    @Test(enabled = true)
-    public void testReadingAndWritingWitHNoChanges() {
-
-        String md5ofInputVCF = "d991abe6c6a7a778a60a667717903be0";
-        String testVCF = privateTestDir + "vcf4.1.example.vcf";
-
-        String baseCommand = "-R " + b37KGReference + " --no_cmdline_in_header -o %s ";
-
-        String test1 = baseCommand + "-T VariantAnnotator --variant " + testVCF + " -L " + testVCF;
-        WalkerTestSpec spec1 = new WalkerTestSpec(test1, 1, Arrays.asList(md5ofInputVCF));
-        List<File> result = executeTest("Test Variant Annotator with no changes", spec1).getFirst();
-
-        String test2 = baseCommand + "-T VariantsToVCF --variant " + result.get(0).getAbsolutePath();
-        WalkerTestSpec spec2 = new WalkerTestSpec(test2, 1, Arrays.asList(md5ofInputVCF));
-        executeTest("Test Variants To VCF from new output", spec2);
-    }
-
-    @Test(enabled = true)
-    public void testReadingAndWritingBreakpointAlleles() {
-        String testVCF = privateTestDir + "breakpoint-example.vcf";
-
-        String baseCommand = "-R " + b37KGReference + " --no_cmdline_in_header -o %s ";
-
-        String test1 = baseCommand + "-T SelectVariants -V " + testVCF;
-        WalkerTestSpec spec1 = new WalkerTestSpec(test1, 1, Arrays.asList("13329ba7360a8beb3afc02569e5a20c4"));
-        executeTest("Test reading and writing breakpoint VCF", spec1);
-    }
-
-    @Test(enabled = true)
-    public void testReadingLowerCaseBases() {
-        String testVCF = privateTestDir + "lowercaseBases.vcf";
-
-        String baseCommand = "-R " + b37KGReference + " --no_cmdline_in_header -o %s ";
-
-        String test1 = baseCommand + "-T SelectVariants -V " + testVCF;
-        WalkerTestSpec spec1 = new WalkerTestSpec(test1, 1, Arrays.asList("e0e308a25e56bde1c664139bb44ed19d"));
-        executeTest("Test reading VCF with lower-case bases", spec1);
-    }
-
-    @Test(enabled = true)
-    public void testReadingAndWriting1000GSVs() {
-        String testVCF = privateTestDir + "1000G_SVs.chr1.vcf";
-
-        String baseCommand = "-R " + b37KGReference + " --no_cmdline_in_header -o %s ";
-
-        String test1 = baseCommand + "-T SelectVariants -V " + testVCF;
-        WalkerTestSpec spec1 = new WalkerTestSpec(test1, 1, Arrays.asList("bdab26dd7648a806dbab01f64db2bdab"));
-        executeTest("Test reading and writing 1000G Phase I SVs", spec1);
-    }
-
-    @Test
-    public void testReadingAndWritingSamtools() {
-        String testVCF = privateTestDir + "samtools.vcf";
-
-        String baseCommand = "-R " + b37KGReference + " --no_cmdline_in_header -o %s ";
-
-        String test1 = baseCommand + "-T SelectVariants -V " + testVCF;
-        WalkerTestSpec spec1 = new WalkerTestSpec(test1, 1, Arrays.asList("38697c195e7abf18d95dcc16c8e6d284"));
-        executeTest("Test reading and writing samtools vcf", spec1);
-    }
-
-    @Test
-    public void testWritingSamtoolsWExBCFExample() {
-        String testVCF = privateTestDir + "ex2.vcf";
-        String baseCommand = "-R " + b36KGReference + " --no_cmdline_in_header -o %s ";
-        String test1 = baseCommand + "-T SelectVariants -V " + testVCF;
-        WalkerTestSpec spec1 = new WalkerTestSpec(test1, 1, Arrays.asList("e8f721ce81e4fdadba13c5291027057f"));
-        executeTest("Test writing samtools WEx BCF example", spec1);
-    }
-
-    @Test(enabled = true)
-    public void testReadingSamtoolsWExBCFExample() {
-        String testVCF = privateTestDir + "ex2.bcf";
-        String baseCommand = "-R " + b36KGReference + " --no_cmdline_in_header -o %s ";
-        String test1 = baseCommand + "-T SelectVariants -V " + testVCF;
-        WalkerTestSpec spec1 = new WalkerTestSpec(test1, 1, Arrays.asList("0439e2b4ccc63bb4ba7c283cd9ab1b25"));
-        executeTest("Test reading samtools WEx BCF example", spec1);
-    }
-
-    //
-    //
-    // Tests to ensure that -U LENIENT_VCF_PROCESS
-    //
-    //
-
-    @Test
-    public void testFailingOnVCFWithoutHeaders() {
-        runVCFWithoutHeaders("", "", IllegalStateException.class, false);
-    }
-
-    @Test
-    public void testPassingOnVCFWithoutHeadersWithLenientProcessing() {
-        runVCFWithoutHeaders("-U LENIENT_VCF_PROCESSING", "6de8cb7457154dd355aa55befb943f88", null, true);
-    }
-
-    private void runVCFWithoutHeaders(final String moreArgs, final String expectedMD5, final Class expectedException, final boolean disableBCF) {
-        final String testVCF = privateTestDir + "vcfexample2.noHeader.vcf";
-        final String baseCommand = "-R " + b37KGReference
-                + " --no_cmdline_in_header -o %s "
-                + "-T VariantsToVCF -V " + testVCF + " " + moreArgs;
-        WalkerTestSpec spec1 = expectedException != null
-                ? new WalkerTestSpec(baseCommand, 1, expectedException)
-                : new WalkerTestSpec(baseCommand, 1, Arrays.asList(expectedMD5));
-        if ( disableBCF )
-            spec1.disableShadowBCF();
-        executeTest("Test reading VCF without header lines with additional args " + moreArgs, spec1);
-    }
-
-    //
-    //
-    // IndexCreator tests
-    //
-    //
-
-    private class VCFIndexCreatorTest extends TestDataProvider {
-        private final GATKVCFIndexType type;
-        private final int parameter;
-
-        private VCFIndexCreatorTest(GATKVCFIndexType type, int parameter) {
-            super(VCFIndexCreatorTest.class);
-
-            this.type = type;
-            this.parameter = parameter;
-        }
-
-        public String toString() {
-            return String.format("Index Type %s, Index Parameter %s", type, parameter);
-        }
-
-        public Index getIndex(final File vcfFile) {
-            switch (type) {
-                case DYNAMIC_SEEK : return IndexFactory.createDynamicIndex(vcfFile, new VCFCodec(), IndexFactory.IndexBalanceApproach.FOR_SEEK_TIME);
-                case DYNAMIC_SIZE : return IndexFactory.createDynamicIndex(vcfFile, new VCFCodec(), IndexFactory.IndexBalanceApproach.FOR_SIZE);
-                case LINEAR : return IndexFactory.createLinearIndex(vcfFile, new VCFCodec(), parameter);
-                case INTERVAL : return IndexFactory.createIntervalIndex(vcfFile, new VCFCodec(), parameter);
-                default : throw new TestException("Invalid index type");
-            }
-        }
-    }
-
-    @DataProvider(name = "IndexDataProvider")
-    public Object[][] indexCreatorData() {
-        new VCFIndexCreatorTest(GATKVCFIndexType.DYNAMIC_SEEK, 0);
-        new VCFIndexCreatorTest(GATKVCFIndexType.DYNAMIC_SIZE, 0);
-        new VCFIndexCreatorTest(GATKVCFIndexType.LINEAR, 100);
-        new VCFIndexCreatorTest(GATKVCFIndexType.LINEAR, 10000);
-        new VCFIndexCreatorTest(GATKVCFIndexType.INTERVAL, 20);
-        new VCFIndexCreatorTest(GATKVCFIndexType.INTERVAL, 2000);
-
-        return TestDataProvider.getTests(VCFIndexCreatorTest.class);
-    }
-
-    @Test(dataProvider = "IndexDataProvider")
-    public void testVCFIndexCreation(VCFIndexCreatorTest testSpec) throws NoSuchFieldException, IllegalAccessException {
-
-        final String commandLine = " -T SelectVariants" +
-                " -R " + b37KGReference +
-                " --no_cmdline_in_header" +
-                " -L 20" +
-                " -V " + b37_NA12878_OMNI +
-                " --variant_index_type " + testSpec.type +
-                " --variant_index_parameter " + testSpec.parameter +
-                " -o %s ";
-        final String name = "testVCFIndexCreation: " + testSpec.toString();
-
-        final WalkerTestSpec spec = new WalkerTestSpec(commandLine, 1, Arrays.asList(""));
-        spec.disableShadowBCF();
-
-        File outVCF = executeTest(name, spec).first.get(0);
-        File outIdx = new File(outVCF.getAbsolutePath() + Tribble.STANDARD_INDEX_EXTENSION);
-
-        final Index actualIndex = IndexFactory.loadIndex(outIdx.getAbsolutePath());
-        final Index expectedIndex = testSpec.getIndex(outVCF);
-
-        if (testSpec.type.equals("LINEAR"))
-            Assert.assertTrue(actualIndex instanceof LinearIndex, "Index is not a LinearIndex");
-        else if (testSpec.type.equals("INTERVAL"))
-            Assert.assertTrue(actualIndex instanceof IntervalTreeIndex, "Index is not a IntervalTreeIndex");
-        // dynamic indices ultimately resolve to one of LinearIndex or IntervalTreeIndex
-
-        Assert.assertTrue(equivalentAbstractIndices((AbstractIndex)actualIndex, (AbstractIndex)expectedIndex), "Indices are not equivalent");
-
-        if (actualIndex instanceof LinearIndex && expectedIndex instanceof LinearIndex) {
-            Assert.assertTrue(equivalentLinearIndices((LinearIndex)actualIndex, (LinearIndex)expectedIndex, "20"), "Linear indices are not equivalent");
-        }
-        else if (actualIndex instanceof IntervalTreeIndex && expectedIndex instanceof IntervalTreeIndex) {
-            Assert.assertTrue(equivalentIntervalIndices((IntervalTreeIndex)actualIndex, (IntervalTreeIndex)expectedIndex, "20"), "Interval indices are not equivalent");
-        }
-        else {
-            Assert.fail("Indices are not of the same type");
-        }
-    }
-
-    private static boolean equivalentAbstractIndices(AbstractIndex thisIndex, AbstractIndex otherIndex){
-        return thisIndex.getVersion() == otherIndex.getVersion() &&
-                thisIndex.getIndexedFile().equals(otherIndex.getIndexedFile()) &&
-                thisIndex.getIndexedFileSize() == otherIndex.getIndexedFileSize() &&
-                thisIndex.getIndexedFileMD5().equals(otherIndex.getIndexedFileMD5()) &&
-                thisIndex.getFlags() == otherIndex.getFlags();
-     }
-
-    private static boolean equivalentLinearIndices(LinearIndex thisIndex, LinearIndex otherIndex, String chr) throws NoSuchFieldException, IllegalAccessException {
-        htsjdk.tribble.index.linear.LinearIndex.ChrIndex thisChr = (htsjdk.tribble.index.linear.LinearIndex.ChrIndex)getChrIndex(thisIndex, chr);
-        htsjdk.tribble.index.linear.LinearIndex.ChrIndex otherChr = (htsjdk.tribble.index.linear.LinearIndex.ChrIndex)getChrIndex(otherIndex, chr);
-
-        return  thisChr.getName().equals(otherChr.getName()) &&
-                //thisChr.getTotalSize() == otherChr.getTotalSize() &&      TODO: why does this differ?
-                thisChr.getNFeatures() == otherChr.getNFeatures() &&
-                thisChr.getNBlocks() == otherChr.getNBlocks();
-    }
-
-    private static boolean equivalentIntervalIndices(IntervalTreeIndex thisIndex, IntervalTreeIndex otherIndex, String chr) throws NoSuchFieldException, IllegalAccessException {
-        htsjdk.tribble.index.interval.IntervalTreeIndex.ChrIndex thisChr = (htsjdk.tribble.index.interval.IntervalTreeIndex.ChrIndex)getChrIndex(thisIndex, chr);
-        htsjdk.tribble.index.interval.IntervalTreeIndex.ChrIndex otherChr = (htsjdk.tribble.index.interval.IntervalTreeIndex.ChrIndex)getChrIndex(otherIndex, chr);
-
-        // TODO: compare trees?
-        return thisChr.getName().equals(otherChr.getName());
-    }
-
-    private static ChrIndex getChrIndex(AbstractIndex index, String chr) throws NoSuchFieldException, IllegalAccessException {
-        Field f = AbstractIndex.class.getDeclaredField("chrIndices");
-        f.setAccessible(true);
-        LinkedHashMap<String, ChrIndex> chrIndices = (LinkedHashMap<String, ChrIndex>) f.get(index);
-        return chrIndices.get(chr);
-    }
-
-    //
-    //
-    // Block-Compressed Tabix Index Tests
-    //
-    //
-
-    private class BlockCompressedIndexCreatorTest extends TestDataProvider {
-        private final String extension;
-
-        private BlockCompressedIndexCreatorTest(String extension) {
-            super(BlockCompressedIndexCreatorTest.class);
-
-            this.extension = extension;
-        }
-
-        public String toString() {
-            return String.format("File extension %s", extension);
-        }
-    }
-
-    @DataProvider(name = "BlockCompressedIndexDataProvider")
-    public Object[][] blockCompressedIndexCreatorData() {
-        for (final String extension : AbstractFeatureReader.BLOCK_COMPRESSED_EXTENSIONS)
-            new BlockCompressedIndexCreatorTest(".vcf" + extension);
-
-        return TestDataProvider.getTests(BlockCompressedIndexCreatorTest.class);
-    }
-
-    @Test(dataProvider = "BlockCompressedIndexDataProvider")
-    public void testBlockCompressedIndexCreation(BlockCompressedIndexCreatorTest testSpec) throws NoSuchFieldException, IllegalAccessException {
-
-        final String commandLine = " -T SelectVariants" +
-                " -R " + b37KGReference +
-                " --no_cmdline_in_header" +
-                " -L 20" +
-                " -V " + b37_NA12878_OMNI;
-        final String name = "testBlockCompressedIndexCreation: " + testSpec.toString();
-
-        File outVCF = createTempFile("testBlockCompressedIndexCreation", testSpec.extension);
-        final WalkerTestSpec spec = new WalkerTestSpec(commandLine, 1, Arrays.asList(""));
-        spec.disableShadowBCF();
-        spec.setOutputFileLocation(outVCF);
-
-        executeTest(name, spec);
-
-        File outTribbleIdx = new File(outVCF.getAbsolutePath() + Tribble.STANDARD_INDEX_EXTENSION);
-        Assert.assertFalse(outTribbleIdx.exists(), "testBlockCompressedIndexCreation: Want Tabix index but Tribble index exists: " + outTribbleIdx);
-
-        File outTabixIdx = new File(outVCF.getAbsolutePath() + TabixUtils.STANDARD_INDEX_EXTENSION);
-        final Index actualIndex = IndexFactory.loadIndex(outTabixIdx.toString());
-        Assert.assertTrue(actualIndex instanceof TabixIndex, "testBlockCompressedIndexCreation: Want Tabix index but index is not Tabix: " + outTabixIdx);
-    }
-
-    //
-    //
-    // Block-Compressed Input Tests
-    //
-    //
-
-    private class BlockCompressedInputTest extends TestDataProvider {
-        private final String extension;
-
-        private BlockCompressedInputTest(String extension) {
-            super(BlockCompressedInputTest.class);
-
-            this.extension = extension;
-        }
-
-        public String toString() {
-            return String.format("File extension %s", extension);
-        }
-    }
-
-    @DataProvider(name = "BlockCompressedInputDataProvider")
-    public Object[][] blockCompressedInputData() {
-        for (final String extension : AbstractFeatureReader.BLOCK_COMPRESSED_EXTENSIONS)
-            new BlockCompressedInputTest(".vcf" + extension);
-
-        return TestDataProvider.getTests(BlockCompressedInputTest.class);
-    }
-
-    @Test(dataProvider = "BlockCompressedInputDataProvider")
-    public void testBlockCompressedInput(BlockCompressedInputTest testSpec) {
-
-        File inputFile = new File(BaseTest.privateTestDir, "block_compressed_input_test" + testSpec.extension);
-        final String commandLine = " -T SelectVariants" +
-                " -R " + b37KGReference +
-                " --no_cmdline_in_header" +
-                " -V " + inputFile +
-                " -o %s ";
-        final String name = "testBlockCompressedInput: " + testSpec.toString();
-
-        final WalkerTestSpec spec = new WalkerTestSpec(commandLine, 1, Arrays.asList("3b60668bd973e43783d0406de80d2ed2"));
-
-        executeTest(name, spec);
-    }
-
-}
diff --git a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/variant/VariantContextBenchmark.java b/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/variant/VariantContextBenchmark.java
deleted file mode 100644
index 7c1b202..0000000
--- a/public/gatk-tools-public/src/test/java/org/broadinstitute/gatk/utils/variant/VariantContextBenchmark.java
+++ /dev/null
@@ -1,377 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package org.broadinstitute.gatk.utils.variant;
-
-import com.google.caliper.Param;
-import com.google.caliper.SimpleBenchmark;
-import htsjdk.tribble.Feature;
-import htsjdk.tribble.FeatureCodec;
-import htsjdk.variant.variantcontext.*;
-import htsjdk.variant.vcf.VCFCodec;
-
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-/**
- * Caliper microbenchmark of parsing a VCF file
- */
-public class VariantContextBenchmark extends SimpleBenchmark {
-    @Param({"/Users/depristo/Desktop/broadLocal/localData/ALL.chr20.merged_beagle_mach.20101123.snps_indels_svs.genotypes.vcf"})
-    String vcfFile;
-
-    @Param({"1000"})
-    int linesToRead; // set automatically by framework
-
-    @Param({"100"})
-    int nSamplesToTake; // set automatically by framework
-
-    @Param({"10"})
-    int dupsToMerge; // set automatically by framework
-
-    @Param
-    Operation operation; // set automatically by framework
-
-    private String INPUT_STRING;
-
-    public enum Operation {
-        READ,
-        SUBSET_TO_SAMPLES,
-        GET_TYPE,
-        GET_ID,
-        GET_GENOTYPES,
-        GET_ATTRIBUTE_STRING,
-        GET_ATTRIBUTE_INT,
-        GET_N_SAMPLES,
-        GET_GENOTYPES_FOR_SAMPLES,
-        GET_GENOTYPES_IN_ORDER_OF_NAME,
-        CALC_GENOTYPE_COUNTS,
-        MERGE
-    }
-
-    @Override protected void setUp() {
-        // TODO -- update for new tribble interface
-//        try {
-//            ReferenceSequenceFile seq = new CachingIndexedFastaSequenceFile(new File(BaseTest.b37KGReference));
-//            b37GenomeLocParser = new GenomeLocParser(seq);
-//        } catch ( FileNotFoundException e) {
-//            throw new RuntimeException(e);
-//        }
-//
-//        // read it into a String so that we don't try to benchmark IO issues
-//        try {
-//            FileInputStream s = new FileInputStream(new File(vcfFile));
-//            AsciiLineReader lineReader = new AsciiLineReader(s);
-//            int counter = 0;
-//            StringBuffer sb = new StringBuffer();
-//            while (counter++ < linesToRead ) {
-//                String line = lineReader.readLine();
-//                if ( line == null )
-//                    break;
-//                sb.append(line + "\n");
-//            }
-//            s.close();
-//            INPUT_STRING = sb.toString();
-//        } catch (IOException e) {
-//            throw new RuntimeException(e);
-//        }
-    }
-
-    private interface FunctionToBenchmark<T extends Feature> {
-        public void run(T vc);
-    }
-
-    private <T extends Feature> void runBenchmark(FeatureCodec codec, FunctionToBenchmark<T> func) {
-        // TODO -- update for new Tribble interface
-//        try {
-//            InputStream is = new ByteArrayInputStream(INPUT_STRING.getBytes());
-//            AsciiLineReader lineReader = new AsciiLineReader(is);
-//            codec.readHeader(lineReader);
-//
-//            int counter = 0;
-//            while (counter++ < linesToRead ) {
-//                String line = lineReader.readLine();
-//                if ( line == null )
-//                    break;
-//
-//                T vc = codec.decode(line);
-//                func.run(vc);
-//            }
-//        } catch (Exception e) {
-//            System.out.println("Benchmarking run failure because of " + e.getMessage());
-//        }
-    }
-
-    public void timeV14(int rep) {
-        for ( int i = 0; i < rep; i++ ) {
-            FunctionToBenchmark<VariantContext> func = getV14FunctionToBenchmark();
-            final VCFCodec codec = new VCFCodec();
-            runBenchmark(codec, func);
-        }
-    }
-
-    public FunctionToBenchmark<VariantContext> getV14FunctionToBenchmark() {
-        switch ( operation ) {
-            case READ:
-                return new FunctionToBenchmark<VariantContext>() {
-                    public void run(final VariantContext vc) {
-                        ; // empty operation
-                    }
-                };
-            case SUBSET_TO_SAMPLES:
-                return new FunctionToBenchmark<VariantContext>() {
-                    Set<String> samples;
-                    public void run(final VariantContext vc) {
-                        if ( samples == null )
-                            samples = new HashSet<>(new ArrayList<>(vc.getSampleNames()).subList(0, nSamplesToTake));
-                        VariantContext sub = vc.subContextFromSamples(samples);
-                        sub.getNSamples();
-                    }
-                };
-            case GET_TYPE:
-                return new FunctionToBenchmark<VariantContext>() {
-                    public void run(final VariantContext vc) {
-                        vc.getType();
-                    }
-                };
-            case GET_ID:
-                return new FunctionToBenchmark<VariantContext>() {
-                    public void run(final VariantContext vc) {
-                        vc.getID();
-                    }
-                };
-            case GET_GENOTYPES:
-                return new FunctionToBenchmark<VariantContext>() {
-                    public void run(final VariantContext vc) {
-                        vc.getGenotypes().size();
-                    }
-                };
-
-            case GET_GENOTYPES_FOR_SAMPLES:
-                return new FunctionToBenchmark<VariantContext>() {
-                    Set<String> samples;
-                    public void run(final VariantContext vc) {
-                        if ( samples == null )
-                            samples = new HashSet<>(new ArrayList<>(vc.getSampleNames()).subList(0, nSamplesToTake));
-                        vc.getGenotypes(samples).size();
-                    }
-                };
-
-            case GET_ATTRIBUTE_STRING:
-                return new FunctionToBenchmark<VariantContext>() {
-                    public void run(final VariantContext vc) {
-                        vc.getAttribute("AN", null);
-                    }
-                };
-
-            case GET_ATTRIBUTE_INT:
-                return new FunctionToBenchmark<VariantContext>() {
-                    public void run(final VariantContext vc) {
-                        vc.getAttributeAsInt("AC", 0);
-                    }
-                };
-
-            case GET_N_SAMPLES:
-                return new FunctionToBenchmark<VariantContext>() {
-                    public void run(final VariantContext vc) {
-                        vc.getNSamples();
-                    }
-                };
-
-            case GET_GENOTYPES_IN_ORDER_OF_NAME:
-                return new FunctionToBenchmark<VariantContext>() {
-                    public void run(final VariantContext vc) {
-                        ; // TODO - TEST IS BROKEN
-//                        int n = 0;
-//                        for ( final Genotype g: vc.getGenotypesOrderedByName() ) n++;
-                    }
-                };
-
-            case CALC_GENOTYPE_COUNTS:
-                return new FunctionToBenchmark<VariantContext>() {
-                    public void run(final VariantContext vc) {
-                        vc.getHetCount();
-                    }
-                };
-
-            case MERGE:
-                return new FunctionToBenchmark<VariantContext>() {
-                    public void run(final VariantContext vc) {
-                        List<VariantContext> toMerge = new ArrayList<>();
-
-                        for ( int i = 0; i < dupsToMerge; i++ ) {
-                            GenotypesContext gc = GenotypesContext.create(vc.getNSamples());
-                            for ( final Genotype g : vc.getGenotypes() ) {
-                                gc.add(new GenotypeBuilder(g).name(g.getSampleName()+"_"+i).make());
-                            }
-                            toMerge.add(new VariantContextBuilder(vc).genotypes(gc).make());
-                        }
-
-                        GATKVariantContextUtils.simpleMerge(toMerge, null,
-                                GATKVariantContextUtils.FilteredRecordMergeType.KEEP_IF_ANY_UNFILTERED,
-                                GATKVariantContextUtils.GenotypeMergeType.UNSORTED,
-                                true, false, "set", false, true);
-                    }
-                };
-
-            default: throw new IllegalArgumentException("Unexpected operation " + operation);
-        }
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // V13
-    //
-    // In order to use this, you must move the v13 version from archive and uncomment
-    //
-    // git mv private/archive/java/src/org/broadinstitute/sting/utils/variantcontext/v13 public/java/test/org/broadinstitute/sting/utils/variantcontext/v13
-    //
-    // --------------------------------------------------------------------------------
-
-//    public void timeV13(int rep) {
-//        for ( int i = 0; i < rep; i++ ) {
-//            FunctionToBenchmark<htsjdk.variant.variantcontext.v13.VariantContext> func = getV13FunctionToBenchmark();
-//            FeatureCodec<htsjdk.variant.variantcontext.v13.VariantContext> codec = new htsjdk.variant.variantcontext.v13.VCFCodec();
-//            runBenchmark(codec, func);
-//        }
-//    }
-//
-//    public FunctionToBenchmark<htsjdk.variant.variantcontext.v13.VariantContext> getV13FunctionToBenchmark() {
-//        switch ( operation ) {
-//            case READ:
-//                return new FunctionToBenchmark<htsjdk.variant.variantcontext.v13.VariantContext>() {
-//                    public void run(final htsjdk.variant.variantcontext.v13.VariantContext vc) {
-//                        ; // empty operation
-//                    }
-//                };
-//            case SUBSET_TO_SAMPLES:
-//                return new FunctionToBenchmark<htsjdk.variant.variantcontext.v13.VariantContext>() {
-//                    List<String> samples;
-//                    public void run(final htsjdk.variant.variantcontext.v13.VariantContext vc) {
-//                        if ( samples == null )
-//                            samples = new ArrayList<String>(vc.getSampleNames()).subList(0, nSamplesToTake);
-//                        htsjdk.variant.variantcontext.v13.VariantContext sub = vc.subContextFromGenotypes(vc.getGenotypes(samples).values());
-//                        sub.getNSamples();
-//                    }
-//                };
-//
-//            case GET_TYPE:
-//                return new FunctionToBenchmark<htsjdk.variant.variantcontext.v13.VariantContext>() {
-//                    public void run(final htsjdk.variant.variantcontext.v13.VariantContext vc) {
-//                        vc.getType();
-//                    }
-//                };
-//            case GET_ID:
-//                return new FunctionToBenchmark<htsjdk.variant.variantcontext.v13.VariantContext>() {
-//                    public void run(final htsjdk.variant.variantcontext.v13.VariantContext vc) {
-//                        vc.getID();
-//                    }
-//                };
-//            case GET_GENOTYPES:
-//                return new FunctionToBenchmark<htsjdk.variant.variantcontext.v13.VariantContext>() {
-//                    public void run(final htsjdk.variant.variantcontext.v13.VariantContext vc) {
-//                        vc.getGenotypes().size();
-//                    }
-//                };
-//
-//            case GET_GENOTYPES_FOR_SAMPLES:
-//                return new FunctionToBenchmark<htsjdk.variant.variantcontext.v13.VariantContext>() {
-//                    Set<String> samples;
-//                    public void run(final htsjdk.variant.variantcontext.v13.VariantContext vc) {
-//                        if ( samples == null )
-//                            samples = new HashSet<String>(new ArrayList<String>(vc.getSampleNames()).subList(0, nSamplesToTake));
-//                        vc.getGenotypes(samples).size();
-//                    }
-//                };
-//
-//            case GET_ATTRIBUTE_STRING:
-//                return new FunctionToBenchmark<htsjdk.variant.variantcontext.v13.VariantContext>() {
-//                    public void run(final htsjdk.variant.variantcontext.v13.VariantContext vc) {
-//                        vc.getExtendedAttribute("AN", null);
-//                    }
-//                };
-//
-//            case GET_ATTRIBUTE_INT:
-//                return new FunctionToBenchmark<htsjdk.variant.variantcontext.v13.VariantContext>() {
-//                    public void run(final htsjdk.variant.variantcontext.v13.VariantContext vc) {
-//                        vc.getAttributeAsInt("AC", 0);
-//                    }
-//                };
-//
-//            case GET_N_SAMPLES:
-//                return new FunctionToBenchmark<htsjdk.variant.variantcontext.v13.VariantContext>() {
-//                    public void run(final htsjdk.variant.variantcontext.v13.VariantContext vc) {
-//                        vc.getNSamples();
-//                    }
-//                };
-//
-//            case GET_GENOTYPES_IN_ORDER_OF_NAME:
-//                return new FunctionToBenchmark<htsjdk.variant.variantcontext.v13.VariantContext>() {
-//                    public void run(final htsjdk.variant.variantcontext.v13.VariantContext vc) {
-//                        ; // TODO - TEST IS BROKEN
-//                        //vc.getGenotypesOrderedByName();
-//                    }
-//                };
-//
-//            case CALC_GENOTYPE_COUNTS:
-//                return new FunctionToBenchmark<htsjdk.variant.variantcontext.v13.VariantContext>() {
-//                    public void run(final htsjdk.variant.variantcontext.v13.VariantContext vc) {
-//                        vc.getHetCount();
-//                    }
-//                };
-//
-//            case MERGE:
-//                return new FunctionToBenchmark<htsjdk.variant.variantcontext.v13.VariantContext>() {
-//                    public void run(final htsjdk.variant.variantcontext.v13.VariantContext vc) {
-//                        List<htsjdk.variant.variantcontext.v13.VariantContext> toMerge = new ArrayList<htsjdk.variant.variantcontext.v13.VariantContext>();
-//
-//                        for ( int i = 0; i < dupsToMerge; i++ ) {
-//                            Map<String, htsjdk.variant.variantcontext.v13.Genotype> gc = new HashMap<String, htsjdk.variant.variantcontext.v13.Genotype>();
-//                            for ( final htsjdk.variant.variantcontext.v13.Genotype g : vc.getGenotypes().values() ) {
-//                                String name = g.getSampleName()+"_"+i;
-//                                gc.put(name, new htsjdk.variant.variantcontext.v13.Genotype(name,
-//                                        g.getAlleles(), g.getLog10PError(), g.getFilters(), g.getAttributes(), g.isPhased(), g.getLikelihoods().getAsVector()));
-//                                toMerge.add(htsjdk.variant.variantcontext.v13.VariantContext.modifyGenotypes(vc, gc));
-//                            }
-//                        }
-//
-//                        htsjdk.variant.variantcontext.v13.VariantContextUtils.simpleMerge(b37GenomeLocParser,
-//                                toMerge, null,
-//                                htsjdk.variant.variantcontext.v13.VariantContextUtils.FilteredRecordMergeType.KEEP_IF_ANY_UNFILTERED,
-//                                htsjdk.variant.variantcontext.v13.VariantContextUtils.GenotypeMergeType.UNSORTED,
-//                                true, false, "set", false, true, false);
-//                    }
-//                };
-//
-//            default: throw new IllegalArgumentException("Unexpected operation " + operation);
-//        }
-//    }
-
-    public static void main(String[] args) {
-        com.google.caliper.Runner.main(VariantContextBenchmark.class, args);
-    }
-}
diff --git a/public/gatk-utils/pom.xml b/public/gatk-utils/pom.xml
index f656394..75884d7 100644
--- a/public/gatk-utils/pom.xml
+++ b/public/gatk-utils/pom.xml
@@ -5,7 +5,7 @@
     <parent>
         <groupId>org.broadinstitute.gatk</groupId>
         <artifactId>gatk-aggregator</artifactId>
-        <version>3.3</version>
+        <version>3.5</version>
         <relativePath>../..</relativePath>
     </parent>
 
@@ -22,14 +22,10 @@
 
     <dependencies>
         <dependency>
-            <groupId>samtools</groupId>
+            <groupId>com.github.samtools</groupId>
             <artifactId>htsjdk</artifactId>
         </dependency>
         <dependency>
-            <groupId>picard</groupId>
-            <artifactId>picard</artifactId>
-        </dependency>
-        <dependency>
             <groupId>log4j</groupId>
             <artifactId>log4j</artifactId>
         </dependency>
@@ -42,26 +38,19 @@
             <artifactId>fastutil</artifactId>
         </dependency>
         <dependency>
-            <groupId>org.simpleframework</groupId>
-            <artifactId>simple-xml</artifactId>
-        </dependency>
-        <dependency>
             <groupId>org.reflections</groupId>
             <artifactId>reflections</artifactId>
         </dependency>
+        <!-- slf4j bindings must only be at the package level: http://www.slf4j.org/manual.html -->
         <dependency>
             <groupId>org.slf4j</groupId>
-            <artifactId>slf4j-log4j12</artifactId>
+            <artifactId>slf4j-api</artifactId>
         </dependency>
         <dependency>
             <groupId>org.freemarker</groupId>
             <artifactId>freemarker</artifactId>
         </dependency>
         <dependency>
-            <groupId>org.apache.commons</groupId>
-            <artifactId>commons-jexl</artifactId>
-        </dependency>
-        <dependency>
             <groupId>commons-lang</groupId>
             <artifactId>commons-lang</artifactId>
         </dependency>
@@ -82,10 +71,6 @@
             <artifactId>jna</artifactId>
         </dependency>
         <dependency>
-            <groupId>net.java.dev.jets3t</groupId>
-            <artifactId>jets3t</artifactId>
-        </dependency>
-        <dependency>
             <groupId>us.levk</groupId>
             <artifactId>drmaa-gridengine</artifactId>
         </dependency>
@@ -119,6 +104,16 @@
         <plugins>
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-assembly-plugin</artifactId>
+                <executions>
+                    <execution>
+                        <id>example-resources</id>
+                        <phase>${gatk.generate-resources.phase}</phase>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
                 <artifactId>maven-dependency-plugin</artifactId>
                 <executions>
                     <execution>
@@ -144,18 +139,6 @@
             </plugin>
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-resources-plugin</artifactId>
-                <executions>
-                    <execution>
-                        <id>copy-resource-bundle-log4j</id>
-                        <phase>prepare-package</phase>
-                    </execution>
-                </executions>
-            </plugin>
-            <!--
-            TODO: Refactor ResourceBundleExtractorDoclet.isWalker() and move the RBED to utils.
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
                 <artifactId>maven-javadoc-plugin</artifactId>
                 <executions>
                     <execution>
@@ -164,7 +147,6 @@
                     </execution>
                 </executions>
             </plugin>
-            -->
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>
                 <artifactId>maven-invoker-plugin</artifactId>
diff --git a/public/gatk-engine/src/main/assembly/example-resources.xml b/public/gatk-utils/src/main/assembly/example-resources.xml
similarity index 100%
rename from public/gatk-engine/src/main/assembly/example-resources.xml
rename to public/gatk-utils/src/main/assembly/example-resources.xml
diff --git a/public/gatk-utils/src/main/config/org/broadinstitute/gatk/utils/help/log4j.properties b/public/gatk-utils/src/main/config/org/broadinstitute/gatk/utils/help/log4j.properties
deleted file mode 100644
index 38c8335..0000000
--- a/public/gatk-utils/src/main/config/org/broadinstitute/gatk/utils/help/log4j.properties
+++ /dev/null
@@ -1,7 +0,0 @@
-# Root logger option
-log4j.rootLogger=INFO, stdout
-
-# Direct log messages to stdout
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.Target=System.out
-log4j.appender.stdout.layout=org.apache.log4j.SimpleLayout
diff --git a/public/gatk-utils/src/main/java/htsjdk/samtools/GATKBAMFileSpan.java b/public/gatk-utils/src/main/java/htsjdk/samtools/GATKBAMFileSpan.java
new file mode 100644
index 0000000..be38c06
--- /dev/null
+++ b/public/gatk-utils/src/main/java/htsjdk/samtools/GATKBAMFileSpan.java
@@ -0,0 +1,308 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package htsjdk.samtools;
+
+import htsjdk.samtools.util.PeekableIterator;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Queue;
+
+/**
+ * A temporary solution to work around Java access rights issues:
+ * override BAMFileSpan and make it public.
+ * TODO: Eliminate once we determine the final fate of the BAM index reading code.
+ */
+public class GATKBAMFileSpan extends BAMFileSpan {
+    /**
+     * Create a new empty list of chunks.
+     */
+    public GATKBAMFileSpan() {
+        super();
+    }
+
+    /**
+     * Create a new GATKBAMFileSpan from an existing BAMFileSpan.
+     * @param sourceFileSpan
+     */
+    public GATKBAMFileSpan(SAMFileSpan sourceFileSpan) {
+        if(!(sourceFileSpan instanceof BAMFileSpan))
+            throw new SAMException("Unable to create GATKBAMFileSpan from a SAMFileSpan. Please submit a BAMFileSpan instead");
+        BAMFileSpan sourceBAMFileSpan = (BAMFileSpan)sourceFileSpan;
+        for(Chunk chunk: sourceBAMFileSpan.getChunks())
+            add(chunk instanceof GATKChunk ? chunk : new GATKChunk(chunk));
+    }
+
+    /**
+     * Convenience constructor to construct a BAM file span from
+     * a single chunk.
+     * @param chunk Chunk to use as the sole region in this span.
+     */
+    public GATKBAMFileSpan(final Chunk chunk) {
+        super(chunk);
+    }
+
+    /**
+     * Create a new chunk list from the given list of chunks.
+     * @param chunks Constituent chunks.
+     */
+    public GATKBAMFileSpan(final GATKChunk[] chunks) {
+        super(Arrays.<Chunk>asList(chunks));
+    }
+
+    @Override
+    public boolean equals(final Object other) {
+        if(!(other instanceof BAMFileSpan))
+            return false;
+
+        List<Chunk> theseChunks = getChunks();
+        List<Chunk> otherChunks = ((BAMFileSpan)other).getChunks();
+
+        if(theseChunks.size() != otherChunks.size())
+            return false;
+        for(int i = 0; i < theseChunks.size(); i++) {
+            if(!theseChunks.get(i).equals(otherChunks.get(i)))
+                return false;
+        }
+
+        return true;
+    }
+
+    /**
+     * Gets the constituent chunks stored in this span.
+     * @return An unmodifiable list of chunks.
+     */
+    public List<GATKChunk> getGATKChunks() {
+        List<GATKChunk> gatkChunks = new ArrayList<GATKChunk>();
+        for(Chunk chunk: getChunks())
+            gatkChunks.add(new GATKChunk(chunk));
+        return gatkChunks;
+    }
+
+    public String toString() {
+        StringBuilder builder = new StringBuilder();
+        for(GATKChunk chunk: getGATKChunks())
+            builder.append(String.format("%s;",chunk));
+        return builder.toString();
+    }
+
+    /**
+     * Returns an approximation of the number of uncompressed bytes in this
+     * file span.
+     * @return Approximation of uncompressed bytes in filespan.
+     */
+    public long size() {
+        long size = 0L;
+        for(GATKChunk chunk: getGATKChunks())
+            size += chunk.size();
+        return size;
+    }
+
+    /**
+     * Get a GATKChunk representing the "extent" of this file span, from the start of the first
+     * chunk to the end of the last chunk.The chunks list must be sorted in order to use this method.
+     *
+     * @return a GATKChunk representing the extent of this file span, or a GATKChunk representing
+     *         a span of size 0 if there are no chunks
+     */
+    public GATKChunk getExtent() {
+        validateSorted();   // TODO: defensive measure: may be unnecessary
+
+        List<Chunk> chunks = getChunks();
+        if ( chunks.isEmpty() ) {
+            return new GATKChunk(0L, 0L);
+        }
+
+        return new GATKChunk(chunks.get(0).getChunkStart(), chunks.get(chunks.size() - 1).getChunkEnd());
+    }
+
+    /**
+     * Validates the list of chunks to ensure that they appear in sorted order.
+     */
+    private void validateSorted() {
+        List<Chunk> chunks = getChunks();
+        for ( int i = 1; i < chunks.size(); i++ ) {
+            if ( chunks.get(i).getChunkStart() < chunks.get(i-1).getChunkEnd() ) {
+                throw new ReviewedGATKException(String.format("Chunk list is unsorted; chunk %s is before chunk %s", chunks.get(i-1), chunks.get(i)));
+
+            }
+        }
+    }
+
+    /**
+     * Computes the union of two FileSpans.
+     * @param other FileSpan to union with this one.
+     * @return A file span that's been unioned.
+     */
+    public GATKBAMFileSpan union(final GATKBAMFileSpan other) {
+        // No data?  Return an empty file span.
+        if(getGATKChunks().size() == 0 && other.getGATKChunks().size() == 0)
+            return new GATKBAMFileSpan();
+
+        LinkedList<GATKChunk> unmergedUnion = new LinkedList<GATKChunk>();
+        unmergedUnion.addAll(getGATKChunks());
+        unmergedUnion.addAll(other.getGATKChunks());
+        Collections.sort(unmergedUnion);
+
+        List<GATKChunk> mergedUnion = new ArrayList<GATKChunk>();
+        GATKChunk currentChunk = unmergedUnion.remove();
+        while(!unmergedUnion.isEmpty()) {
+
+            // While the current chunk can be merged with the next chunk:
+            while( ! unmergedUnion.isEmpty() &&
+                   (currentChunk.overlaps(unmergedUnion.peek()) || currentChunk.isAdjacentTo(unmergedUnion.peek())) ) {
+
+                // Merge the current chunk with the next chunk:
+                GATKChunk nextChunk = unmergedUnion.remove();
+                currentChunk = currentChunk.merge(nextChunk);
+            }
+            // Add the accumulated range.
+            mergedUnion.add(currentChunk);
+            currentChunk = !unmergedUnion.isEmpty() ? unmergedUnion.remove() : null;
+        }
+
+        // At end of the loop above, the last chunk will be contained in currentChunk and will not yet have been added.  Add it.
+        if(currentChunk !=null)
+            mergedUnion.add(currentChunk);
+
+        return new GATKBAMFileSpan(mergedUnion.toArray(new GATKChunk[mergedUnion.size()]));
+    }
+
+    /**
+     * Intersects two BAM file spans.
+     * @param other File span to intersect with this one.
+     * @return The intersected BAM file span.
+     */
+    public GATKBAMFileSpan intersection(final GATKBAMFileSpan other) {
+        Iterator<GATKChunk> thisIterator = getGATKChunks().iterator();
+        Iterator<GATKChunk> otherIterator = other.getGATKChunks().iterator();
+
+        if(!thisIterator.hasNext() || !otherIterator.hasNext())
+            return new GATKBAMFileSpan();
+
+        GATKChunk thisChunk = thisIterator.next();
+        GATKChunk otherChunk = otherIterator.next();
+
+        List<GATKChunk> intersected = new ArrayList<GATKChunk>();
+
+        while(thisChunk != null && otherChunk != null) {
+            // If this iterator is before other, skip this ahead.
+            if(thisChunk.getChunkEnd() <= otherChunk.getChunkStart()) {
+                thisChunk = thisIterator.hasNext() ? thisIterator.next() : null;
+                continue;
+            }
+
+            // If other iterator is before this, skip other ahead.
+            if(thisChunk.getChunkStart() >= otherChunk.getChunkEnd()) {
+                otherChunk = otherIterator.hasNext() ? otherIterator.next() : null;
+                continue;
+            }
+
+            // If these two chunks overlap, pull out intersection of data and truncated current chunks to point after
+            // the intersection (or next chunk if no such overlap exists).
+            if(thisChunk.overlaps(otherChunk)) {
+                // Determine the chunk constraints
+                GATKChunk firstChunk = thisChunk.getChunkStart() < otherChunk.getChunkStart() ? thisChunk : otherChunk;
+                GATKChunk secondChunk = thisChunk==firstChunk ? otherChunk : thisChunk;
+                GATKChunk intersectedChunk = new GATKChunk(secondChunk.getChunkStart(),Math.min(firstChunk.getChunkEnd(),secondChunk.getChunkEnd()));
+                intersected.add(intersectedChunk);
+
+                if(thisChunk.getChunkEnd() > intersectedChunk.getChunkEnd())
+                    thisChunk = new GATKChunk(intersectedChunk.getChunkEnd(),thisChunk.getChunkEnd());
+                else
+                    thisChunk = thisIterator.hasNext() ? thisIterator.next() : null;
+
+                if(otherChunk.getChunkEnd() > intersectedChunk.getChunkEnd())
+                    otherChunk = new GATKChunk(intersectedChunk.getChunkEnd(),otherChunk.getChunkEnd());
+                else
+                    otherChunk = otherIterator.hasNext() ? otherIterator.next() : null;
+            }
+
+        }
+
+        return new GATKBAMFileSpan(intersected.toArray(new GATKChunk[intersected.size()]));
+    }
+
+    /**
+     * Substracts other file span from this file span.
+     * @param other File span to strike out.
+     * @return This file span minuse the other file span.
+     */
+
+    public GATKBAMFileSpan minus(final GATKBAMFileSpan other) {
+        Iterator<GATKChunk> thisIterator = getGATKChunks().iterator();
+        Iterator<GATKChunk> otherIterator = other.getGATKChunks().iterator();
+
+        if(!thisIterator.hasNext() || !otherIterator.hasNext())
+            return this;
+
+        GATKChunk thisChunk = thisIterator.next();
+        GATKChunk otherChunk = otherIterator.next();
+
+        List<GATKChunk> subtracted = new ArrayList<GATKChunk>();
+
+        while(thisChunk != null && otherChunk != null) {
+            // If this iterator is before the other, add this to the subtracted list and forge ahead.
+            if(thisChunk.getChunkEnd() <= otherChunk.getChunkStart()) {
+                subtracted.add(thisChunk);
+                thisChunk = thisIterator.hasNext() ? thisIterator.next() : null;
+                continue;
+            }
+
+            // If other iterator is before this, skip other ahead.
+            if(thisChunk.getChunkStart() >= otherChunk.getChunkEnd()) {
+                otherChunk = otherIterator.hasNext() ? otherIterator.next() : null;
+                continue;
+            }
+
+            // If these two chunks overlap, pull out intersection of data and truncated current chunks to point after
+            // the intersection (or next chunk if no such overlap exists).
+            if(thisChunk.overlaps(otherChunk)) {
+                // Add in any sort of prefix that this chunk might have over the other.
+                if(thisChunk.getChunkStart() < otherChunk.getChunkStart())
+                    subtracted.add(new GATKChunk(thisChunk.getChunkStart(),otherChunk.getChunkStart()));
+
+                if(thisChunk.getChunkEnd() > otherChunk.getChunkEnd())
+                    thisChunk = new GATKChunk(otherChunk.getChunkEnd(),thisChunk.getChunkEnd());
+                else
+                    thisChunk = thisIterator.hasNext() ? thisIterator.next() : null;
+            }
+        }
+
+        // Finish up any remaining contents of this that didn't make it into the subtracted array.
+        if(thisChunk != null)
+            subtracted.add(thisChunk);
+        while(thisIterator.hasNext())
+            subtracted.add(thisIterator.next());
+
+        return new GATKBAMFileSpan(subtracted.toArray(new GATKChunk[subtracted.size()]));
+    }
+}
diff --git a/public/gatk-utils/src/main/java/htsjdk/samtools/GATKBin.java b/public/gatk-utils/src/main/java/htsjdk/samtools/GATKBin.java
new file mode 100644
index 0000000..d9698c3
--- /dev/null
+++ b/public/gatk-utils/src/main/java/htsjdk/samtools/GATKBin.java
@@ -0,0 +1,146 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package htsjdk.samtools;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * A temporary solution to work around Java access rights issues:
+ * override GATKBin and make it public.
+ * TODO: Eliminate once we determine the final fate of the BAM index reading code.
+ */
+public class GATKBin implements Comparable<GATKBin> {
+    /**
+     * The reference sequence associated with this bin.
+     */
+    private final int referenceSequence;
+
+    /**
+     * The number of this bin within the BAM file.
+     */
+    private final int binNumber;
+
+    /**
+     * The chunks associated with this bin.
+     */
+    private GATKChunk[] chunkList;
+
+    public GATKBin(Bin bin) {
+        this(bin.getReferenceSequence(),bin.getBinNumber());
+    }
+
+    public GATKBin(final int referenceSequence, final int binNumber) {
+        this.referenceSequence = referenceSequence;
+        this.binNumber = binNumber;
+    }
+
+    public int getReferenceSequence() {
+        return referenceSequence;
+    }
+
+    public int getBinNumber() {
+        return binNumber;
+    }
+
+    /**
+     * Convert this GATKBin to a normal bin, for processing with the standard BAM query interface.
+     * @return
+     */
+    public Bin toBin() {
+        return new Bin(referenceSequence,binNumber);
+    }
+
+    /**
+     * See whether two bins are equal.  If the ref seq and the bin number
+     * are equal, assume equality of the chunk list.
+     * @param other The other Bin to which to compare this.
+     * @return True if the two bins are equal.  False otherwise.
+     */
+    @Override
+    public boolean equals(Object other) {
+        if(other == null) return false;
+        if(!(other instanceof GATKBin)) return false;
+
+        GATKBin otherBin = (GATKBin)other;
+        return this.referenceSequence == otherBin.referenceSequence && this.binNumber == otherBin.binNumber;
+    }
+
+    /**
+     * Compute a unique hash code for the given reference sequence and bin number.
+     * @return A unique hash code.
+     */
+    @Override
+    public int hashCode() {
+        return ((Integer)referenceSequence).hashCode() ^ ((Integer)binNumber).hashCode();
+    }
+
+    /**
+     * Compare two bins to see what ordering they should appear in.
+     * @param other Other bin to which this bin should be compared.
+     * @return -1 if this < other, 0 if this == other, 1 if this > other.
+     */
+    public int compareTo(GATKBin other) {
+        if(other == null)
+            throw new ClassCastException("Cannot compare to a null object");
+
+        // Check the reference sequences first.
+        if(this.referenceSequence != other.referenceSequence)
+            return referenceSequence - other.referenceSequence;
+
+        // Then check the bin ordering.
+        return binNumber - other.binNumber;
+    }
+
+    /**
+     * Sets the chunks associated with this bin
+     */
+    public void setChunkList(GATKChunk[] list){
+        chunkList = list;
+    }
+
+    /**
+     * Gets the list of chunks associated with this bin.
+     * @return the chunks in this bin.  If no chunks are associated, an empty list will be returned.
+     */
+    public GATKChunk[] getChunkList(){
+        if(chunkList == null)
+            return new GATKChunk[0];
+        return chunkList;
+    }
+
+    // HACK: Using this classes package permissions to further hack the CRAM created SAMRecord's indexing bin and binary attributes.
+    public static Integer getReadIndexingBin(final SAMRecord read) {
+        return read.getIndexingBin();
+    }
+    public static void setReadIndexingBin(final SAMRecord read, final Integer indexingBin) {
+        read.setIndexingBin(indexingBin);
+    }
+    public static SAMBinaryTagAndValue getReadBinaryAttributes(final SAMRecord read) {
+        return read.getBinaryAttributes();
+    }
+}
diff --git a/public/gatk-utils/src/main/java/htsjdk/samtools/GATKChunk.java b/public/gatk-utils/src/main/java/htsjdk/samtools/GATKChunk.java
new file mode 100644
index 0000000..1a10819
--- /dev/null
+++ b/public/gatk-utils/src/main/java/htsjdk/samtools/GATKChunk.java
@@ -0,0 +1,116 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package htsjdk.samtools;
+
+/**
+ * A temporary solution to work around Java access rights issues:
+ * override chunk and make it public.
+ * TODO: Eliminate once we determine the final fate of the BAM index reading code.
+ */
+public class GATKChunk extends Chunk {
+    /**
+     * The average ratio of compressed block size / uncompressed block size, computed empirically
+     * using the output of org.broadinstitute.gatk.engine.datasources.reads.utilities.PrintBGZFBounds.
+     */
+    private static final double AVERAGE_BAM_COMPRESSION_RATIO = 0.39;
+
+    public GATKChunk(final long start, final long stop) {
+        super(start,stop);
+    }
+
+    public GATKChunk(final long blockStart, final int blockOffsetStart, final long blockEnd, final int blockOffsetEnd) {
+        super(blockStart << 16 | blockOffsetStart,blockEnd << 16 | blockOffsetEnd);
+    }
+
+    public GATKChunk(final Chunk chunk) {
+        super(chunk.getChunkStart(),chunk.getChunkEnd());
+    }
+
+    @Override
+    public GATKChunk clone() {
+        return new GATKChunk(getChunkStart(),getChunkEnd());
+    }
+
+    @Override
+    public long getChunkStart() {
+        return super.getChunkStart();
+    }
+
+    @Override
+    public void setChunkStart(final long value) {
+        super.setChunkStart(value);
+    }
+
+    @Override
+    public long getChunkEnd() {
+        return super.getChunkEnd();
+    }
+
+    @Override
+    public void setChunkEnd(final long value) {
+        super.setChunkEnd(value);
+    }
+
+    public long getBlockStart() {
+        return getChunkStart() >>> 16;
+    }
+
+    public int getBlockOffsetStart() {
+        return (int)(getChunkStart() & 0xFFFF);
+    }
+
+    public long getBlockEnd() {
+        return getChunkEnd() >>> 16;
+    }
+
+    public int getBlockOffsetEnd() {
+        return ((int)getChunkEnd() & 0xFFFF);
+    }
+
+    /**
+     * Computes an approximation of the uncompressed size of the
+     * chunk, in bytes.  Can be used to determine relative weights
+     * of chunk size.
+     * @return An approximation of the chunk size in bytes.
+     */
+    public long size() {
+        final long chunkSpan = Math.round(((getChunkEnd()>>16)-(getChunkStart()>>16))/AVERAGE_BAM_COMPRESSION_RATIO);
+        final int offsetSpan = (int)((getChunkEnd()&0xFFFF)-(getChunkStart()&0xFFFF));
+        return chunkSpan + offsetSpan;
+    }
+
+    /**
+     * Merges two chunks together. The caller is responsible for testing whether the
+     * chunks overlap/are adjacent before calling this method!
+     *
+     * @param other the chunk to merge with this chunk
+     * @return a new chunk representing the union of the two chunks (provided the chunks were
+     *         overlapping/adjacent)
+     */
+    public GATKChunk merge ( GATKChunk other ) {
+        return new GATKChunk(Math.min(getChunkStart(), other.getChunkStart()), Math.max(getChunkEnd(), other.getChunkEnd()));
+    }
+}
diff --git a/public/gatk-utils/src/main/java/htsjdk/samtools/PicardNamespaceUtils.java b/public/gatk-utils/src/main/java/htsjdk/samtools/PicardNamespaceUtils.java
new file mode 100644
index 0000000..f861ccb
--- /dev/null
+++ b/public/gatk-utils/src/main/java/htsjdk/samtools/PicardNamespaceUtils.java
@@ -0,0 +1,40 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package htsjdk.samtools;
+
+/**
+ * Utils that insist on being in the same package as Picard.
+ */
+public class PicardNamespaceUtils {
+    /**
+     * Private constructor only.  Do not instantiate.
+     */
+    private PicardNamespaceUtils() {}
+
+    public static void setFileSource(final SAMRecord read, final SAMFileSource fileSource) {
+        read.setFileSource(fileSource);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/AutoFormattingTime.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/AutoFormattingTime.java
new file mode 100644
index 0000000..e77ff64
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/AutoFormattingTime.java
@@ -0,0 +1,185 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Conveniently print a time with an automatically determined time unit
+ *
+ * For example, if the amount of time is 10^6 seconds, instead of printing
+ * out 10^6 seconds, prints out 11.57 days instead.
+ *
+ * Dynamically uses time units:
+ *
+ *   - seconds: s
+ *   - minutes: m
+ *   - hours  : h
+ *   - days   : d
+ *   - weeks  : w
+ *
+ * @author depristo
+ * @since 2009
+ */
+public class AutoFormattingTime {
+    private static final double NANOSECONDS_PER_SECOND = 1e9;
+
+    /**
+     * Width a la format's %WIDTH.PERCISIONf
+     */
+    private final int width; // for format
+
+    /**
+     * Precision a la format's %WIDTH.PERCISIONf
+     */
+    private final int precision;      // for format
+
+    /**
+     * The elapsed time in nanoseconds
+     */
+    private final long nanoTime;
+
+    /**
+     * Create a new autoformatting time with elapsed time nanoTime in nanoseconds
+     * @param nanoTime the elapsed time in nanoseconds
+     * @param width the width >= 0 (a la format's %WIDTH.PERCISIONf) to use to display the format, or -1 if none is required
+     * @param precision the precision to display the time at.  Must be >= 0;
+     */
+    public AutoFormattingTime(final long nanoTime, final int width, int precision) {
+        if ( width < -1 ) throw new IllegalArgumentException("Width " + width + " must be >= -1");
+        if ( precision < 0 ) throw new IllegalArgumentException("Precision " + precision + " must be >= 0");
+
+        this.width = width;
+        this.nanoTime = nanoTime;
+        this.precision = precision;
+    }
+
+    /**
+     * @see #AutoFormattingTime(long, int, int) but with default width and precision
+     * @param nanoTime
+     */
+    public AutoFormattingTime(final long nanoTime) {
+        this(nanoTime, 6, 1);
+    }
+
+    /**
+     * @see #AutoFormattingTime(long, int, int) but with time specificied as a double in seconds
+     */
+    public AutoFormattingTime(final double timeInSeconds, final int width, final int precision) {
+        this(secondsToNano(timeInSeconds), width, precision);
+    }
+
+    /**
+     * @see #AutoFormattingTime(long) but with time specificied as a double in seconds
+     */
+    public AutoFormattingTime(double timeInSeconds) {
+        this(timeInSeconds, 6, 1);
+    }
+
+    /**
+     * Precomputed format string suitable for string.format with the required width and precision
+     */
+    private String getFormatString() {
+        final StringBuilder b = new StringBuilder("%");
+        if ( width != -1 )
+            b.append(width);
+        b.append(".").append(precision).append("f %s");
+        return b.toString();
+    }
+
+    /**
+     * Get the time associated with this object in nanoseconds
+     * @return the time in nanoseconds
+     */
+    public long getTimeInNanoSeconds() {
+        return nanoTime;
+    }
+
+    /**
+     * Get the time associated with this object in seconds, as a double
+     * @return time in seconds as a double
+     */
+    public double getTimeInSeconds() {
+        return TimeUnit.NANOSECONDS.toSeconds(getTimeInNanoSeconds());
+    }
+
+    /**
+     * @return the precision (a la format's %WIDTH.PERCISIONf)
+     */
+    public int getWidth() {
+        return width;
+    }
+
+    /**
+     * @return the precision (a la format's %WIDTH.PERCISIONf)
+     */
+    public int getPrecision() {
+        return precision;
+    }
+
+    /**
+     * Get a string representation of this time, automatically converting the time
+     * to a human readable unit with width and precision provided during construction
+     * @return a non-null string
+     */
+    public String toString() {
+        double unitTime = getTimeInSeconds();
+        String unit = "s";
+
+        if ( unitTime > 120 ) {
+            unitTime /= 60; // minutes
+            unit = "m";
+
+            if ( unitTime > 120 ) {
+                unitTime /= 60; // hours
+                unit = "h";
+
+                if ( unitTime > 100 ) {
+                    unitTime /= 24; // days
+                    unit = "d";
+
+                    if ( unitTime > 20 ) {
+                        unitTime /= 7; // weeks
+                        unit = "w";
+                    }
+                }
+            }
+        }
+
+        return String.format(getFormatString(), unitTime, unit);
+    }
+
+
+    /**
+     * Convert a time in seconds as a double into nanoseconds as a long
+     * @param timeInSeconds an elapsed time in seconds, as a double
+     * @return an equivalent value in nanoseconds as a long
+     */
+    private static long secondsToNano(final double timeInSeconds) {
+        return (long)(NANOSECONDS_PER_SECOND * timeInSeconds);
+    }
+
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/BaseUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/BaseUtils.java
new file mode 100644
index 0000000..f0ed568
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/BaseUtils.java
@@ -0,0 +1,671 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import htsjdk.samtools.util.StringUtil;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.Random;
+
+/**
+ * BaseUtils contains some basic utilities for manipulating nucleotides.
+ */
+public class BaseUtils {
+
+    public enum Base {
+        A ('A'),
+        C ('C'),
+        G ('G'),
+        T ('T'),
+        N ('N'),
+        D ('D');
+
+        public byte base;
+
+        private Base(final char base) {
+            this.base = (byte)base;
+        }
+    }
+
+    // todo -- add this to the generalized base abstraction using the Base enum.
+    public final static byte[] BASES = {'A', 'C', 'G', 'T'};
+    public final static byte[] EXTENDED_BASES = {'A', 'C', 'G', 'T', 'N', 'D'};
+
+    static private final int[] baseIndexMap = new int[256];
+    static {
+        Arrays.fill(baseIndexMap, -1);
+        baseIndexMap['A'] = Base.A.ordinal();
+        baseIndexMap['a'] = Base.A.ordinal();
+        baseIndexMap['*'] = Base.A.ordinal();    // the wildcard character counts as an A
+        baseIndexMap['C'] = Base.C.ordinal();
+        baseIndexMap['c'] = Base.C.ordinal();
+        baseIndexMap['G'] = Base.G.ordinal();
+        baseIndexMap['g'] = Base.G.ordinal();
+        baseIndexMap['T'] = Base.T.ordinal();
+        baseIndexMap['t'] = Base.T.ordinal();
+    }
+
+    static private final int[] baseIndexWithIupacMap = baseIndexMap.clone();
+    static {
+        baseIndexWithIupacMap['*'] = -1;    // the wildcard character is bad
+        baseIndexWithIupacMap['N'] = Base.N.ordinal();
+        baseIndexWithIupacMap['n'] = Base.N.ordinal();
+        baseIndexWithIupacMap['R'] = Base.N.ordinal();
+        baseIndexWithIupacMap['r'] = Base.N.ordinal();
+        baseIndexWithIupacMap['Y'] = Base.N.ordinal();
+        baseIndexWithIupacMap['y'] = Base.N.ordinal();
+        baseIndexWithIupacMap['M'] = Base.N.ordinal();
+        baseIndexWithIupacMap['m'] = Base.N.ordinal();
+        baseIndexWithIupacMap['K'] = Base.N.ordinal();
+        baseIndexWithIupacMap['k'] = Base.N.ordinal();
+        baseIndexWithIupacMap['W'] = Base.N.ordinal();
+        baseIndexWithIupacMap['w'] = Base.N.ordinal();
+        baseIndexWithIupacMap['S'] = Base.N.ordinal();
+        baseIndexWithIupacMap['s'] = Base.N.ordinal();
+        baseIndexWithIupacMap['B'] = Base.N.ordinal();
+        baseIndexWithIupacMap['b'] = Base.N.ordinal();
+        baseIndexWithIupacMap['D'] = Base.N.ordinal();
+        baseIndexWithIupacMap['d'] = Base.N.ordinal();
+        baseIndexWithIupacMap['H'] = Base.N.ordinal();
+        baseIndexWithIupacMap['h'] = Base.N.ordinal();
+        baseIndexWithIupacMap['V'] = Base.N.ordinal();
+        baseIndexWithIupacMap['v'] = Base.N.ordinal();
+    }
+
+    /// In genetics, a transition is a mutation changing a purine to another purine nucleotide (A <-> G) or
+    // a pyrimidine to another pyrimidine nucleotide (C <-> T).
+    // Approximately two out of every three single nucleotide polymorphisms (SNPs) are transitions.
+    public enum BaseSubstitutionType {
+        TRANSITION,         // A <-> G or C <-> T
+        TRANSVERSION
+    }
+
+    /**
+     * Returns the base substitution type of the 2 state SNP
+     *
+     * @param base1
+     * @param base2
+     * @return
+     */
+    public static BaseSubstitutionType SNPSubstitutionType(byte base1, byte base2) {
+        BaseSubstitutionType t = isTransition(base1, base2) ? BaseSubstitutionType.TRANSITION : BaseSubstitutionType.TRANSVERSION;
+        //System.out.printf("SNPSubstitutionType( char %c, char %c ) => %s%n", base1, base2, t);
+        return t;
+    }
+
+    public static boolean isTransition(byte base1, byte base2) {
+        final int b1 = simpleBaseToBaseIndex(base1);
+        final int b2 = simpleBaseToBaseIndex(base2);
+        return b1 == Base.A.ordinal() && b2 == Base.G.ordinal() || b1 == Base.G.ordinal() && b2 == Base.A.ordinal() ||
+                b1 == Base.C.ordinal() && b2 == Base.T.ordinal() || b1 == Base.T.ordinal() && b2 == Base.C.ordinal();
+    }
+
+    public static boolean isTransversion(byte base1, byte base2) {
+        return !isTransition(base1, base2);
+    }
+
+    /**
+     * Private constructor.  No instantiating this class!
+     */
+    private BaseUtils() {}
+
+    static public boolean basesAreEqual(byte base1, byte base2) {
+        return simpleBaseToBaseIndex(base1) == simpleBaseToBaseIndex(base2);
+    }
+
+    /**
+     * Checks whether to bases are the same in fact ignore ambiguous 'N' bases.
+     *
+     * @param base1 first base to compare.
+     * @param base2 second base to compare.
+     * @return true if {@code base1 == base2} or either is an 'N', false otherwise.
+     */
+    static public boolean basesAreEqualIgnoreAmbiguous(final byte base1, final byte base2) {
+        if (base1 == base2) return true;
+        else if (base1 == 'n' || base1 == 'N' || base2 == 'N' || base2 == 'n') return true;
+        else return false;
+    }
+
+    /**
+     * Compare to base arrays ranges checking whether they contain the same bases.
+     *
+     * <p>
+     *     By default two array have equal bases, i.e. {@code length == 0} results results in {@code true}.
+     * </p>
+     *
+     * @param bases1 first base array to compare.
+     * @param offset1 position of the first base in bases1 to compare.
+     * @param bases2 second base array to compare.
+     * @param offset2 position of the first base in bases2 to compare.
+     * @param length number of bases to compare.
+     *
+     * @throws NullPointerException if {@code bases1} or {@code bases2} is {@code null}.
+     * @throws ArrayIndexOutOfBoundsException if:
+     * <ul>
+     *      <li>{@code offset1} is not within the range [0,{@code bases1.length}) or</li>
+     *     <li>{@code offset2} is not within the range [0,{@code bases2.length}) or</li>
+     *     <li>{@code offset1 + length} is not within the range [0,{@code bases1.length}) or </li>
+     *     <li>{@code offset2 + length} is not within the range [0,{@code bases2.length})</li>
+     * </ul>
+     * @return
+     */
+    static public boolean basesAreEqualIgnoreAmbiguous(final byte[] bases1, final int offset1, final byte[] bases2, final int offset2, final int length) {
+        for (int i = 0; i < length; i++)
+            if (!basesAreEqualIgnoreAmbiguous(bases1[offset1 + i],bases2[offset2 + i])) return false;
+        return true;
+    }
+
+    static public boolean extendedBasesAreEqual(byte base1, byte base2) {
+        return extendedBaseToBaseIndex(base1) == extendedBaseToBaseIndex(base2);
+    }
+
+    /**
+     * @return true iff the bases array contains at least one instance of base
+     */
+    static public boolean containsBase(final byte[] bases, final byte base) {
+        for ( final byte b : bases ) {
+            if ( b == base )
+                return true;
+        }
+        return false;
+    }
+
+    public static boolean isUpperCase(final byte[] bases) {
+        for ( byte base : bases )
+            if ( ! isUpperCase(base) )
+                return false;
+        return true;
+    }
+
+    public static boolean isUpperCase(final byte base) {
+        return base >= 'A' && base <= 'Z';
+    }
+
+    public static byte[] convertIUPACtoN(final byte[] bases, final boolean errorOnBadReferenceBase, final boolean ignoreConversionOfFirstByte) {
+        final int length = bases.length;
+        final int start = ignoreConversionOfFirstByte ? 1 : 0;
+
+        for ( int i = start; i < length; i++ ) {
+            final int baseIndex = baseIndexWithIupacMap[bases[i]];
+            if ( baseIndex == Base.N.ordinal() ) {
+                bases[i] = 'N';
+            } else if ( errorOnBadReferenceBase && baseIndex == -1 ) {
+                throw new UserException.BadInput("We encountered a non-standard non-IUPAC base in the provided reference: '" + bases[i] + "'");
+            }
+        }
+        return bases;
+    }
+
+    /**
+     * Converts a IUPAC nucleotide code to a pair of bases
+     *
+     * @param code
+     * @return 0, 1, 2, 3, or -1 if the base can't be understood
+     */
+    @Deprecated
+    static public char[] iupacToBases(char code) {
+        char[] bases = new char[2];
+        switch (code) {
+            case '*':               // the wildcard character counts as an A
+            case 'A':
+            case 'a':
+                bases[0] = bases[1] = 'A';
+                break;
+            case 'C':
+            case 'c':
+                bases[0] = bases[1] = 'C';
+                break;
+            case 'G':
+            case 'g':
+                bases[0] = bases[1] = 'G';
+                break;
+            case 'T':
+            case 't':
+                bases[0] = bases[1] = 'T';
+                break;
+            case 'R':
+            case 'r':
+                bases[0] = 'A';
+                bases[1] = 'G';
+                break;
+            case 'Y':
+            case 'y':
+                bases[0] = 'C';
+                bases[1] = 'T';
+                break;
+            case 'S':
+            case 's':
+                bases[0] = 'G';
+                bases[1] = 'C';
+                break;
+            case 'W':
+            case 'w':
+                bases[0] = 'A';
+                bases[1] = 'T';
+                break;
+            case 'K':
+            case 'k':
+                bases[0] = 'G';
+                bases[1] = 'T';
+                break;
+            case 'M':
+            case 'm':
+                bases[0] = 'A';
+                bases[1] = 'C';
+                break;
+            default:
+                bases[0] = bases[1] = 'N';
+        }
+        return bases;
+    }
+
+    /**
+     * Converts a pair of bases to their IUPAC ambiguity code
+     *
+     * @param base1  1st base
+     * @param base2  2nd base
+     * @return byte
+     */
+    static public byte basesToIUPAC(final byte base1, final byte base2) {
+        // ensure that the bases come in order
+        if ( base2 < base1 )
+            return basesToIUPAC(base2, base1);
+
+        // ensure that the bases are regular ones
+        if ( !isRegularBase(base1) || !isRegularBase(base2) )
+            return Base.N.base;
+
+        // IUPAC codes are not needed if the bases are identical
+        if ( basesAreEqual(base1, base2) )
+            return base1;
+
+        if ( base1 == Base.A.base )
+            return (byte)(base2 == Base.C.base ? 'M' : (base2 == Base.G.base ? 'R' : 'W'));
+
+        if ( base1 == Base.C.base )
+            return (byte)(base2 == Base.G.base ? 'S' : 'Y');
+
+        // the only possibility left is G/T
+        return 'K';
+    }
+
+    /**
+     * Converts a simple base to a base index
+     *
+     * @param base [AaCcGgTt]
+     * @return 0, 1, 2, 3, or -1 if the base can't be understood
+     */
+    static public int simpleBaseToBaseIndex(final byte base) {
+        if ( base < 0 || base >= 256 )
+            throw new UserException.BadInput("Non-standard bases were encountered in either the input reference or BAM file(s)");
+        return baseIndexMap[base];
+    }
+
+    /**
+     * Converts a simple base to a base index
+     *
+     * @param base [AaCcGgTt]
+     * @return 0, 1, 2, 3, or -1 if the base can't be understood
+     */
+    @Deprecated
+    static public int simpleBaseToBaseIndex(char base) {
+        return baseIndexMap[base];
+    }
+
+    static public int extendedBaseToBaseIndex(byte base) {
+        switch (base) {
+            case 'd':
+            case 'D':
+                return Base.D.ordinal();
+            case 'n':
+            case 'N':
+                return Base.N.ordinal();
+
+            default:
+                return simpleBaseToBaseIndex(base);
+        }
+    }
+
+    @Deprecated
+    static public boolean isRegularBase( final char base ) {
+        return simpleBaseToBaseIndex(base) != -1;
+    }
+
+    static public boolean isRegularBase( final byte base ) {
+        return simpleBaseToBaseIndex(base) != -1;
+    }
+
+    static public boolean isAllRegularBases( final byte[] bases ) {
+        for( final byte base : bases) {
+            if( !isRegularBase(base) ) { return false; }
+        }
+        return true;
+    }
+
+    static public boolean isNBase(byte base) {
+        return base == 'N' || base == 'n';
+    }
+
+    /**
+     * Converts a base index to a simple base
+     *
+     * @param baseIndex 0, 1, 2, 3
+     * @return A, C, G, T, or '.' if the index can't be understood
+     */
+    static public byte baseIndexToSimpleBase(int baseIndex) {
+        switch (baseIndex) {
+            case 0:
+                return 'A';
+            case 1:
+                return 'C';
+            case 2:
+                return 'G';
+            case 3:
+                return 'T';
+            default:
+                return '.';
+        }
+    }
+
+    /**
+     * Return the complement (A <-> T or C <-> G) of a base, or the specified base if it can't be complemented (i.e. an ambiguous base).
+     *
+     * @param base the base [AaCcGgTt]
+     * @return the complementary base, or the input base if it's not one of the understood ones
+     */
+    static public byte simpleComplement(byte base) {
+        switch (base) {
+            case 'A':
+            case 'a':
+                return 'T';
+            case 'C':
+            case 'c':
+                return 'G';
+            case 'G':
+            case 'g':
+                return 'C';
+            case 'T':
+            case 't':
+                return 'A';
+            default:
+                return base;
+        }
+    }
+
+    @Deprecated
+    static private char simpleComplement(char base) {
+        return (char) simpleComplement((byte) base);
+    }
+
+    /**
+     * Reverse complement a byte array of bases (that is, chars casted to bytes, *not* base indices in byte form)
+     *
+     * @param bases the byte array of bases
+     * @return the reverse complement of the base byte array
+     */
+    static public byte[] simpleReverseComplement(byte[] bases) {
+        byte[] rcbases = new byte[bases.length];
+
+        for (int i = 0; i < bases.length; i++) {
+            rcbases[i] = simpleComplement(bases[bases.length - 1 - i]);
+        }
+
+        return rcbases;
+    }
+
+    /**
+     * Reverse complement a char array of bases
+     *
+     * @param bases the char array of bases
+     * @return the reverse complement of the char byte array
+     */
+    @Deprecated
+    static public char[] simpleReverseComplement(char[] bases) {
+        char[] rcbases = new char[bases.length];
+
+        for (int i = 0; i < bases.length; i++) {
+            rcbases[i] = simpleComplement(bases[bases.length - 1 - i]);
+        }
+
+        return rcbases;
+    }
+
+    /**
+     * Reverse complement a String of bases.  Preserves ambiguous bases.
+     *
+     * @param bases the String of bases
+     * @return the reverse complement of the String
+     */
+    @Deprecated
+    static public String simpleReverseComplement(String bases) {
+        return new String(simpleReverseComplement(bases.getBytes()));
+    }
+
+    /**
+     * Returns the uppercased version of the bases
+     *
+     * @param bases   the bases
+     * @return the upper cased version
+     */
+    static public void convertToUpperCase(final byte[] bases) {
+        StringUtil.toUpperCase(bases);
+    }
+
+    /**
+     * Returns the index of the most common base in the basecounts array. To be used with
+     * pileup.getBaseCounts.
+     *
+     * @param baseCounts counts of a,c,g,t in order.
+     * @return the index of the most common base
+     */
+    static public int mostFrequentBaseIndex(int[] baseCounts) {
+        int mostFrequentBaseIndex = 0;
+        for (int baseIndex = 1; baseIndex < 4; baseIndex++) {
+            if (baseCounts[baseIndex] > baseCounts[mostFrequentBaseIndex]) {
+                mostFrequentBaseIndex = baseIndex;
+            }
+        }
+        return mostFrequentBaseIndex;
+    }
+
+    static public int mostFrequentBaseIndexNotRef(int[] baseCounts, int refBaseIndex) {
+        int tmp = baseCounts[refBaseIndex];
+        baseCounts[refBaseIndex] = -1;
+        int result = mostFrequentBaseIndex(baseCounts);
+        baseCounts[refBaseIndex] = tmp;
+        return result;
+    }
+
+    static public int mostFrequentBaseIndexNotRef(int[] baseCounts, byte refSimpleBase) {
+        return mostFrequentBaseIndexNotRef(baseCounts, simpleBaseToBaseIndex(refSimpleBase));
+    }
+
+    /**
+     * Returns the most common base in the basecounts array. To be used with pileup.getBaseCounts.
+     *
+     * @param baseCounts counts of a,c,g,t in order.
+     * @return the most common base
+     */
+    static public byte mostFrequentSimpleBase(int[] baseCounts) {
+        return baseIndexToSimpleBase(mostFrequentBaseIndex(baseCounts));
+    }
+
+    /**
+     * For the most frequent base in the sequence, return the percentage of the read it constitutes.
+     *
+     * @param sequence the read sequence
+     * @return the percentage of the read that's made up of the most frequent base
+     */
+    static public double mostFrequentBaseFraction(byte[] sequence) {
+        int[] baseCounts = new int[4];
+
+        for (byte base : sequence) {
+            int baseIndex = simpleBaseToBaseIndex(base);
+
+            if (baseIndex >= 0) {
+                baseCounts[baseIndex]++;
+            }
+        }
+
+        int mostFrequentBaseIndex = mostFrequentBaseIndex(baseCounts);
+
+        return ((double) baseCounts[mostFrequentBaseIndex]) / ((double) sequence.length);
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // random bases
+    //
+    // --------------------------------------------------------------------------------
+
+    /**
+     * Return a random base index (A=0, C=1, G=2, T=3).
+     *
+     * @return a random base index (A=0, C=1, G=2, T=3)
+     */
+    static public int getRandomBaseIndex() {
+        return getRandomBaseIndex(-1);
+    }
+
+    /**
+     * Return random bases.
+     *
+     * @param length base count and length of returned array.
+     *
+     * @throws IllegalArgumentException if {@code length} is less than 0.
+     *
+     * @return never {@code null}
+     */
+    @SuppressWarnings("unused")
+    public static byte[] getRandomBases(final int length) {
+        if (length < 0)
+            throw new IllegalArgumentException("length must zero or greater");
+        final byte[] result = new byte[length];
+        fillWithRandomBases(result);
+        return result;
+    }
+
+    /**
+     * Fills an array with random bases.
+     *
+     * @param dest the array to fill.
+     *
+     * @throws IllegalArgumentException if {@code result} is {@code null}.
+     */
+    public static void fillWithRandomBases(final byte[] dest) {
+        fillWithRandomBases(dest,0,dest.length);
+    }
+
+    /**
+     * Fill an array section with random bases.
+     *
+     * @param dest array to fill.
+     * @param fromIndex first index to be filled (inclusive).
+     * @param toIndex index after last to be filled (exclusive).
+     *
+     * @throws IllegalArgumentException if {@code dest} is {@code null},
+     *              {@code fromIndex} or {@code toIndex} is negative,
+     *              {@code fromIndex} or {@code toIndex} are greater than {@code dest} length,
+     *              or {@code fromIndex} greater than {@code toIndex}.
+     */
+    public static void fillWithRandomBases(final byte[] dest, final int fromIndex, final int toIndex) {
+        final Random rnd = Utils.getRandomGenerator();
+        if (dest == null)
+            throw new IllegalArgumentException("the dest array cannot be null");
+        if (fromIndex > toIndex)
+            throw new IllegalArgumentException("fromIndex cannot be larger than toIndex");
+        if (fromIndex < 0)
+            throw new IllegalArgumentException("both indexes must be positive");
+        if (toIndex > dest.length)
+            throw new IllegalArgumentException("both indexes must be less or equal to the destination array length");
+
+        for (int i = fromIndex; i < toIndex; i++)
+            dest[i] = baseIndexToSimpleBase(rnd.nextInt(4));
+    }
+
+    /**
+     * Return a random base index, excluding some base index.
+     *
+     * @param excludeBaseIndex the base index to exclude
+     * @return a random base index, excluding the one specified (A=0, C=1, G=2, T=3)
+     */
+    static public int getRandomBaseIndex(int excludeBaseIndex) {
+        int randomBaseIndex = excludeBaseIndex;
+
+        while (randomBaseIndex == excludeBaseIndex) {
+            randomBaseIndex = Utils.getRandomGenerator().nextInt(4);
+        }
+
+        return randomBaseIndex;
+    }
+
+    public static byte getComplement(byte base) {
+        switch(base) {
+            case 'a':
+            case 'A':
+                return 'T';
+            case 'c':
+            case 'C':
+                return 'G';
+            case 'g':
+            case 'G':
+                return 'C';
+            case 't':
+            case 'T':
+                return 'A';
+            case 'n':
+            case 'N':
+                return 'N';
+            default:
+                throw new ReviewedGATKException("base must be A, C, G or T. " + (char) base + " is not a valid base.");
+        }
+    }
+
+
+    /**
+     * Lexicographical sorting of base arrays {@link Comparator}.
+     */
+    public static final Comparator<byte[]> BASES_COMPARATOR = new Comparator<byte[]> (){
+
+        @Override
+        public int compare(final byte[] o1,final byte[] o2) {
+            final int minLength = Math.min(o1.length,o2.length);
+            for (int i = 0; i < minLength; i++) {
+                final int cmp = Byte.compare(o1[i],o2[i]);
+                if (cmp != 0) return cmp;
+            }
+            if (o1.length == o2.length)
+                return 0;
+            else if (o1.length == minLength)
+                return -1;
+            else
+                return 1;
+        }
+    };
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/BitSetUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/BitSetUtils.java
new file mode 100644
index 0000000..20b6026
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/BitSetUtils.java
@@ -0,0 +1,134 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import java.util.BitSet;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Utilities for bitset conversion
+ *
+ * @author Mauricio Carneiro
+ * @since 3/5/12
+ */
+public class BitSetUtils {
+
+    static final private byte NBITS_LONG_REPRESENTATION = 64;                   // the number of bits used in the long version to represent the bit set (necessary for the two's complement representation of negative numbers)
+    static final private byte NBITS_SHORT_REPRESENTATION = 16;                  // the number of bits used in the short version to represent the bit set (necessary for the two's complement representation of negative numbers)
+
+    /**
+     * Creates an long out of a bitset
+     *
+     * @param bitSet the bitset
+     * @return a long from the bitset representation
+     */
+    public static long longFrom(final BitSet bitSet) {
+        return longFrom(bitSet, NBITS_LONG_REPRESENTATION);
+    }
+
+    /**
+     * Creates a short integer from a bitset
+     *
+     * @param bitSet the bitset
+     * @return a short from the bitset representation
+     */
+    public static short shortFrom(final BitSet bitSet) {
+        return (short) longFrom(bitSet, NBITS_SHORT_REPRESENTATION);
+    }
+
+    /**
+     * Cretes an integer with any number of bits (up to 64 -- long precision) from a bitset
+     *
+     * @param bitSet the bitset
+     * @param nBits  the number of bits to be used for this representation
+     * @return an integer with nBits from the bitset representation
+     */
+    public static long longFrom(final BitSet bitSet, final int nBits) {
+        long number = 0;
+        for (int bitIndex = bitSet.nextSetBit(0); bitIndex >= 0 && bitIndex <= nBits; bitIndex = bitSet.nextSetBit(bitIndex + 1))
+            number |= 1L << bitIndex;
+
+        return number;
+    }
+
+    /**
+     * Creates a BitSet representation of a given long
+     *
+     * @param number the number to turn into a bitset
+     * @return a bitset representation of the long
+     */
+    public static BitSet bitSetFrom(long number) {
+        return bitSetFrom(number, NBITS_LONG_REPRESENTATION);
+    }
+
+    /**
+     * Creates a BitSet representation of a given short
+     *
+     * @param number the number to turn into a bitset
+     * @return a bitset representation of the short
+     */
+    public static BitSet bitSetFrom(short number) {
+        BitSet result = shortCache.get(number);
+        if (result == null) {
+            result = bitSetFrom(number, NBITS_SHORT_REPRESENTATION);
+            shortCache.put(number, result);
+        }
+        return result;
+    }
+    // use a static cache for shorts (but not for longs, because there could be a lot of entries)
+    private static final Map<Short, BitSet> shortCache = new HashMap<Short, BitSet>(2 * Short.MAX_VALUE);
+
+    /**
+     * Creates a BitSet representation of an arbitrary integer (number of bits capped at 64 -- long precision)
+     *
+     * @param number the number to turn into a bitset
+     * @param nBits  the number of bits to use as precision for this conversion
+     * @return a bitset representation of the integer
+     */
+    public static BitSet bitSetFrom(long number, int nBits) {
+        BitSet bitSet = new BitSet(nBits);
+        boolean isNegative = number < 0;
+        int bitIndex = 0;
+        while (number != 0) {
+            if (number % 2 != 0)
+                bitSet.set(bitIndex);
+            bitIndex++;
+            number /= 2;
+        }
+        if (isNegative) {
+            boolean foundFirstSetBit = false;
+            for (int i = bitSet.nextSetBit(0); i < nBits && i >= 0; i++) {
+                boolean bit = bitSet.get(i);
+                if (!foundFirstSetBit && bit)
+                    foundFirstSetBit = true;    // maintain all bits until the first 1 is found (inclusive)
+                else if (foundFirstSetBit)
+                    bitSet.flip(i);             // flip every other bit up to NBITS_REPRESENTATION
+            }
+        }
+        return bitSet;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/ContigComparator.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/ContigComparator.java
new file mode 100644
index 0000000..a069415
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/ContigComparator.java
@@ -0,0 +1,80 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import htsjdk.samtools.SAMSequenceDictionary;
+
+import java.util.Comparator;
+import java.util.Set;
+import java.util.TreeSet;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: carneiro
+ * Date: 7/23/11
+ * Time: 6:07 PM
+ *
+ * Contig comparator -- sorting contigs like Picard
+ *
+ *   This is very useful if you want to output your text files or manipulate data in the usual chromosome ordering :
+ *    1
+ *    2
+ *    3
+ *    ...
+ *    21
+ *    22
+ *    X
+ *    Y
+ *    GL***
+ *    ...
+ * Just use this comparator in any SortedSet class constructor and your data will be sorted like in the BAM file.
+ */
+public class ContigComparator implements Comparator<String> {
+    final SAMSequenceDictionary dict;
+
+    public ContigComparator(final SAMSequenceDictionary dict) {
+        if ( dict == null ) throw new IllegalArgumentException("dict cannot be null");
+        this.dict = dict;
+    }
+
+    @Override
+    public int compare(final String chr1, final String chr2) {
+        final int index1 = getIndex(chr1);
+        final int index2 = getIndex(chr2);
+        return Integer.valueOf(index1).compareTo(index2);
+    }
+
+    /**
+     * Convert contig to its index in the dict, or throw an exception if it's not found or is null
+     * @param chr the contig
+     */
+    private int getIndex(final String chr) {
+        if ( chr == null ) throw new IllegalArgumentException("chr is null");
+        final int index = dict.getSequenceIndex(chr);
+        if ( index == -1 ) throw new IllegalArgumentException("Unknown contig " + chr);
+        return index;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/DeprecatedToolChecks.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/DeprecatedToolChecks.java
new file mode 100644
index 0000000..53613ca
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/DeprecatedToolChecks.java
@@ -0,0 +1,107 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import it.unimi.dsi.fastutil.objects.Object2ObjectMap;
+import it.unimi.dsi.fastutil.objects.Object2ObjectOpenHashMap;
+
+import java.util.*;
+
+/**
+ * Utility class for handling deprecated tools gracefully
+ *
+ * @author vdauwera
+ * @since 3/11/13
+ */
+public class DeprecatedToolChecks {
+
+    // Mapping from walker name to major version number where the walker first disappeared and optional replacement options
+    private static Object2ObjectMap deprecatedGATKWalkers = new Object2ObjectOpenHashMap();
+    static {
+        // Indicate recommended replacement in parentheses if applicable
+        deprecatedGATKWalkers.put("ReduceReads", "3.0 (use recommended best practices pipeline with the HaplotypeCaller)");
+        deprecatedGATKWalkers.put("CountCovariates", "2.0 (use BaseRecalibrator instead; see documentation for usage)");
+        deprecatedGATKWalkers.put("TableRecalibration", "2.0 (use PrintReads with -BQSR instead; see documentation for usage)");
+        deprecatedGATKWalkers.put("AlignmentWalker", "2.2 (no replacement)");
+        deprecatedGATKWalkers.put("CountBestAlignments", "2.2 (no replacement)");
+        deprecatedGATKWalkers.put("SomaticIndelDetector", "2.0 (replaced by MuTect2; see documentation for usage)");
+        deprecatedGATKWalkers.put("BeagleOutputToVCF", "3,4 (replaced by Beagle native functions; see Beagle 4 documentation at https://faculty.washington.edu/browning/beagle/beagle.html)");
+        deprecatedGATKWalkers.put("VariantsToBeagleUnphased", "3.4 (replaced by Beagle native functions; see Beagle 4 documentation at https://faculty.washington.edu/browning/beagle/beagle.html)");
+        deprecatedGATKWalkers.put("ProduceBeagleInput", "3.4 (replaced by Beagle native functions; see Beagle 4 documentation at https://faculty.washington.edu/browning/beagle/beagle.html)");
+        deprecatedGATKWalkers.put("ReadAdaptorTrimmer","3.5 (this tool was unsound and untested -- no specific replacement, see Picard tools for alternatives)");
+        deprecatedGATKWalkers.put("BaseCoverageDistribution","3.5 (use DiagnoseTargets instead; see documentation for usage)");
+        deprecatedGATKWalkers.put("CoveredByNSamplesSites","3.5 (use DiagnoseTargets instead; see documentation for usage)");
+        deprecatedGATKWalkers.put("VariantValidationAssessor","3.5 (this tool was unsound and untested -- no replacement)");
+        deprecatedGATKWalkers.put("LiftOverVariants","3.5 (use Picard LiftoverVCF instead; see documentation for usage)");
+        deprecatedGATKWalkers.put("FilterLiftedVariants","3.5 (use Picard LiftoverVCF instead; see documentation for usage)");
+        deprecatedGATKWalkers.put("ListAnnotations","3.5 (this tool was impractical; see the online documentation instead)");
+
+    }
+
+    // Mapping from walker name to major version number where the walker first disappeared and optional replacement options
+    private static Object2ObjectMap deprecatedGATKAnnotations = new Object2ObjectOpenHashMap();
+    static {
+        // Same comments as for walkers
+        deprecatedGATKAnnotations.put("DepthOfCoverage", "2.4 (renamed to Coverage)");
+    }
+
+    /**
+     * Utility method to check whether a given walker has been deprecated in a previous GATK release
+     *
+     * @param walkerName   the walker class name (not the full package) to check
+     */
+    public static boolean isDeprecatedWalker(final String walkerName) {
+        return deprecatedGATKWalkers.containsKey(walkerName);
+    }
+
+    /**
+     * Utility method to check whether a given annotation has been deprecated in a previous GATK release
+     *
+     * @param annotationName   the annotation class name (not the full package) to check
+     */
+    public static boolean isDeprecatedAnnotation(final String annotationName) {
+        return deprecatedGATKAnnotations.containsKey(annotationName);
+    }
+
+    /**
+     * Utility method to pull up the version number at which a walker was deprecated and the suggested replacement, if any
+     *
+     * @param walkerName   the walker class name (not the full package) to check
+     */
+    public static String getWalkerDeprecationInfo(final String walkerName) {
+        return deprecatedGATKWalkers.get(walkerName).toString();
+    }
+
+    /**
+     * Utility method to pull up the version number at which an annotation was deprecated and the suggested replacement, if any
+     *
+     * @param annotationName   the annotation class name (not the full package) to check
+     */
+    public static String getAnnotationDeprecationInfo(final String annotationName) {
+        return deprecatedGATKAnnotations.get(annotationName).toString();
+    }
+
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/GenomeLoc.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/GenomeLoc.java
index 101796b..d41072a 100644
--- a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/GenomeLoc.java
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/GenomeLoc.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
@@ -62,6 +62,7 @@ public class GenomeLoc implements Comparable<GenomeLoc>, Serializable, HasGenome
     // TODO - WARNING WARNING WARNING code somehow depends on the name of the contig being null!
     public static final GenomeLoc UNMAPPED = new GenomeLoc((String)null);
     public static final GenomeLoc WHOLE_GENOME = new GenomeLoc("all");
+    public static final GenomeLoc END_OF_GENOME = new GenomeLoc("Y", 23, 59347566, 59347566);
 
     public static final boolean isUnmapped(GenomeLoc loc) {
         return loc == UNMAPPED;
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/GenomeLocParser.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/GenomeLocParser.java
new file mode 100644
index 0000000..9cac5d5
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/GenomeLocParser.java
@@ -0,0 +1,622 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import com.google.java.contract.ThrowEnsures;
+import htsjdk.samtools.reference.ReferenceSequenceFile;
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.SAMSequenceRecord;
+import org.apache.log4j.Logger;
+import htsjdk.tribble.Feature;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+/**
+ * Factory class for creating GenomeLocs
+ */
+public final class GenomeLocParser {
+    private static Logger logger = Logger.getLogger(GenomeLocParser.class);
+
+    /**
+     * How much validation should we do at runtime with this parser?
+     */
+    public enum ValidationLevel {
+        /** Do the standard amount of validation */
+        STANDARD,
+        /** Don't do any real checking at all */
+        NONE
+    }
+
+    // --------------------------------------------------------------------------------------------------------------
+    //
+    // Ugly global variable defining the optional ordering of contig elements
+    //
+    // --------------------------------------------------------------------------------------------------------------
+
+    /**
+     * This single variable holds the underlying SamSequenceDictionary used by the GATK.  We assume
+     * it is thread safe.
+     */
+    final private SAMSequenceDictionary SINGLE_MASTER_SEQUENCE_DICTIONARY;
+
+    /**
+     * A thread-local CachingSequenceDictionary
+     */
+    private final ThreadLocal<MRUCachingSAMSequenceDictionary> contigInfoPerThread =
+            new ThreadLocal<MRUCachingSAMSequenceDictionary>() {
+                @Override
+                protected MRUCachingSAMSequenceDictionary initialValue() {
+                    return new MRUCachingSAMSequenceDictionary(SINGLE_MASTER_SEQUENCE_DICTIONARY);
+                }
+            };
+
+    /**
+     * How much validation are we doing at runtime with this GenomeLocParser?
+     */
+    private final ValidationLevel validationLevel;
+
+    /**
+     * @return a caching sequence dictionary appropriate for this thread
+     */
+    private MRUCachingSAMSequenceDictionary getContigInfo() {
+        return contigInfoPerThread.get();
+    }
+
+    /**
+     * set our internal reference contig order
+     * @param refFile the reference file
+     */
+    @Requires("refFile != null")
+    public GenomeLocParser(final ReferenceSequenceFile refFile) {
+        this(refFile.getSequenceDictionary());
+    }
+
+    /**
+     * Create a new GenomeLocParser based on seqDictionary with the standard validation level
+     * @param seqDict a non-null sequence dictionary
+     */
+    public GenomeLocParser(SAMSequenceDictionary seqDict) {
+        this(seqDict, ValidationLevel.STANDARD);
+    }
+
+    /**
+     * Create a genome loc parser based on seqDict with the specified level of validation
+     * @param seqDict the sequence dictionary to use when creating genome locs
+     * @param validationLevel how much validation should we do of the genome locs at runtime? Purely for testing purposes
+     */
+    protected GenomeLocParser(SAMSequenceDictionary seqDict, final ValidationLevel validationLevel) {
+        if (validationLevel == null)
+            throw new IllegalArgumentException("validation level cannot be null");
+        if (seqDict == null) { // we couldn't load the reference dictionary
+            //logger.info("Failed to load reference dictionary, falling back to lexicographic order for contigs");
+            throw new UserException.CommandLineException("Failed to load reference dictionary");
+        }
+
+        this.validationLevel = validationLevel;
+        this.SINGLE_MASTER_SEQUENCE_DICTIONARY = seqDict;
+        if ( logger.isDebugEnabled() ) {
+            logger.debug(String.format("Prepared reference sequence contig dictionary"));
+            for (SAMSequenceRecord contig : seqDict.getSequences()) {
+                logger.debug(String.format(" %s (%d bp)", contig.getSequenceName(), contig.getSequenceLength()));
+            }
+        }
+    }
+
+    /**
+     * Determines whether the given contig is valid with respect to the sequence dictionary
+     * already installed in the GenomeLoc.
+     *
+     * @param contig a potentially null string name for the contig
+     * @return True if the contig is valid.  False otherwise.
+     */
+    public final boolean contigIsInDictionary(final String contig) {
+        return contig != null && getContigInfo().hasContig(contig);
+    }
+
+    /**
+     * get the contig's SAMSequenceRecord
+     *
+     * @param contig the string name of the contig
+     *
+     * @return the sam sequence record
+     */
+    @Ensures("result != null")
+    @ThrowEnsures({"UserException.MalformedGenomeLoc", "!contigIsInDictionary(contig) || contig == null"})
+    public final SAMSequenceRecord getContigInfo(final String contig) {
+        if ( contig == null || ! contigIsInDictionary(contig) )
+            throw new UserException.MalformedGenomeLoc(String.format("Contig %s given as location, but this contig isn't present in the Fasta sequence dictionary", contig));
+        return getContigInfo().getSequence(contig);
+    }
+
+    /**
+     * Returns the contig index of a specified string version of the contig
+     *
+     * @param contig the contig string
+     *
+     * @return the contig index, -1 if not found
+     */
+    @Ensures("result >= 0")
+    @ThrowEnsures({"UserException.MalformedGenomeLoc", "!contigIsInDictionary(contig) || contig == null"})
+    public final int getContigIndex(final String contig) {
+        return getContigInfo(contig).getSequenceIndex();
+    }
+
+    @Requires("contig != null")
+    protected int getContigIndexWithoutException(final String contig) {
+        if ( contig == null || ! getContigInfo().hasContig(contig) )
+            return -1;
+        return getContigInfo().getSequenceIndex(contig);
+    }
+
+    /**
+     * Return the master sequence dictionary used within this GenomeLocParser
+     * @return
+     */
+    public final SAMSequenceDictionary getContigs() {
+        return getContigInfo().getDictionary();
+    }
+
+    // --------------------------------------------------------------------------------------------------------------
+    //
+    // Low-level creation functions
+    //
+    // --------------------------------------------------------------------------------------------------------------
+
+    /**
+     * @see #createGenomeLoc(String, int, int, int, boolean) for exact details of the creation.
+     *
+     * Note that because this function doesn't take the contig index as an argument for contig, it
+     * has a slight performance penalty over the version that does take the contig index.  Does not
+     * require the created genome loc on the reference genome
+     */
+    @Ensures("result != null")
+    @ThrowEnsures({"UserException.MalformedGenomeLoc", "!isValidGenomeLoc(contig, start, stop)"})
+    public GenomeLoc createGenomeLoc(String contig, final int start, final int stop) {
+        return createGenomeLoc(contig, getContigIndex(contig), start, stop);
+    }
+
+    /**
+     * @see #createGenomeLoc(String, int, int, int, boolean) for exact details of the creation.
+     *
+     * Note that because this function doesn't take the contig index as an argument for contig, it
+     * has a slight performance penalty over the version that does take the contig index.
+     */
+    public GenomeLoc createGenomeLoc(final String contig, final int start, final int stop, boolean mustBeOnReference) {
+        return createGenomeLoc(contig, getContigIndex(contig), start, stop, mustBeOnReference);
+    }
+
+    /**
+     * @see #createGenomeLoc(String, int, int, int, boolean) for exact details of the creation.
+     *
+     * Doesn't require the start and stop to be on the genome
+     */
+    @ThrowEnsures({"UserException.MalformedGenomeLoc", "!isValidGenomeLoc(contig, start, stop, false)"})
+    public GenomeLoc createGenomeLoc(String contig, int index, final int start, final int stop) {
+        return createGenomeLoc(contig, index, start, stop, false);
+    }
+
+    /**
+     * Create a GenomeLoc on contig, starting at start and ending (inclusive) at stop.
+     *
+     * @param contig the contig name
+     * @param index the index into the GATK's SAMSequencingDictionary of contig (passed for efficiency to avoid the lookup)
+     * @param start the starting position
+     * @param stop  the stop position of this loc, inclusive
+     * @param mustBeOnReference if true, this factory will throw a UserException.MalformedGenomeLoc if start or stop isn't on the contig
+     *
+     * @return a non-null GenomeLoc
+     */
+    @ThrowEnsures({"UserException.MalformedGenomeLoc", "!isValidGenomeLoc(contig, start, stop,mustBeOnReference)"})
+    @Ensures("result != null")
+    public GenomeLoc createGenomeLoc(final String contig, int index, final int start, final int stop, boolean mustBeOnReference) {
+        // optimization: by interning the string we ensure that future comparisons use == not the full string comp
+        final String interned = validateGenomeLoc(contig, index, start, stop, mustBeOnReference);
+        return new GenomeLoc(interned, index, start, stop);
+    }
+
+    /**
+     * Create a new GenomeLoc, on contig, including the single position pos.
+     *
+     * Pos is not required to be on the reference
+     *
+     * @see #createGenomeLoc(String, int, int, int, boolean) for exact details of the creation.
+     *
+     * @param contig the contig name
+     * @param pos    the start and stop of the created genome loc
+     *
+     * @return a genome loc representing a single base at the specified postion on the contig
+     */
+    @Ensures("result != null")
+    @ThrowEnsures({"UserException.MalformedGenomeLoc", "!isValidGenomeLoc(contig, pos, pos, true)"})
+    public GenomeLoc createGenomeLoc(final String contig, final int pos) {
+        return createGenomeLoc(contig, getContigIndex(contig), pos, pos);
+    }
+
+    /**
+     * validate a position or interval on the genome as valid
+     *
+     * Requires that contig exist in the master sequence dictionary, and that contig index be valid as well.  Requires
+     * that start <= stop.
+     *
+     * if mustBeOnReference is true,
+     * performs boundary validation for genome loc INTERVALS:
+     * start and stop are on contig and start <= stop
+     *
+     * @param contig the contig name
+     * @param start  the start position
+     * @param stop   the stop position
+     *
+     * @return the interned contig name, an optimization that ensures that contig == the string in the sequence dictionary
+     */
+    protected String validateGenomeLoc(final String contig, final int contigIndex, final int start, final int stop, final boolean mustBeOnReference) {
+        if ( validationLevel == ValidationLevel.NONE )
+            return contig;
+        else {
+            if (stop < start)
+                vglHelper(String.format("The stop position %d is less than start %d in contig %s", stop, start, contig));
+
+            final SAMSequenceRecord contigInfo = getContigInfo().getSequence(contig);
+            if ( contigInfo.getSequenceIndex() != contigIndex )
+                vglHelper(String.format("The contig index %d is bad, doesn't equal the contig index %d of the contig from a string %s",
+                        contigIndex, contigInfo.getSequenceIndex(), contig));
+
+            if ( mustBeOnReference ) {
+                if (start < 1)
+                    vglHelper(String.format("The start position %d is less than 1", start));
+
+                if (stop < 1)
+                    vglHelper(String.format("The stop position %d is less than 1", stop));
+
+                final int contigSize = contigInfo.getSequenceLength();
+                if (start > contigSize || stop > contigSize)
+                    vglHelper(String.format("The genome loc coordinates %d-%d exceed the contig size (%d)", start, stop, contigSize));
+            }
+
+            return contigInfo.getSequenceName();
+        }
+    }
+
+    /**
+     * Would a genome loc created with the given parameters be valid w.r.t. the master sequence dictionary?
+     * @param contig the contig we'd use
+     * @param start the start position
+     * @param stop the stop
+     * @param mustBeOnReference should we require the resulting genome loc to be completely on the reference genome?
+     * @return true if this would produce a valid genome loc, false otherwise
+     */
+    public boolean isValidGenomeLoc(String contig, int start, int stop, boolean mustBeOnReference ) {
+        try {
+            validateGenomeLoc(contig, getContigIndexWithoutException(contig), start, stop, mustBeOnReference);
+            return true;
+        } catch ( ReviewedGATKException e) {
+            return false;
+        }
+    }
+
+    /**
+     * @see #isValidGenomeLoc(String, int, int) with mustBeOnReference == true
+     */
+    public boolean isValidGenomeLoc(String contig, int start, int stop ) {
+        return isValidGenomeLoc(contig, start, stop, true);
+    }
+
+    private void vglHelper(final String msg) {
+        throw new UserException.MalformedGenomeLoc("Parameters to GenomeLocParser are incorrect:" + msg);
+    }
+
+    // --------------------------------------------------------------------------------------------------------------
+    //
+    // Parsing genome locs
+    //
+    // --------------------------------------------------------------------------------------------------------------
+
+    /**
+     * parse a genome interval, from a location string
+     *
+     * Performs interval-style validation:
+     *
+     * contig is valid; start and stop less than the end; start <= stop, and start/stop are on the contig
+     * @param str the string to parse
+     *
+     * @return a GenomeLoc representing the String
+     *
+     */
+    @Requires("str != null")
+    @Ensures("result != null")
+    public GenomeLoc parseGenomeLoc(final String str) {
+        // 'chr2', 'chr2:1000000' or 'chr2:1,000,000-2,000,000'
+        //System.out.printf("Parsing location '%s'%n", str);
+
+        String contig = null;
+        int start = 1;
+        int stop = -1;
+
+        final int colonIndex = str.lastIndexOf(":");
+        if(colonIndex == -1) {
+            contig = str.substring(0, str.length());  // chr1
+            stop = Integer.MAX_VALUE;
+        } else {
+            contig = str.substring(0, colonIndex);
+            final int dashIndex = str.indexOf('-', colonIndex);
+            try {
+                if(dashIndex == -1) {
+                    if(str.charAt(str.length() - 1) == '+') {
+                        start = parsePosition(str.substring(colonIndex + 1, str.length() - 1));  // chr:1+
+                        stop = Integer.MAX_VALUE;
+                    } else {
+                        start = parsePosition(str.substring(colonIndex + 1));   // chr1:1
+                        stop = start;
+                    }
+                } else {
+                    start = parsePosition(str.substring(colonIndex + 1, dashIndex));  // chr1:1-1
+                    stop = parsePosition(str.substring(dashIndex + 1));
+                }
+            } catch(Exception e) {
+                throw new UserException("Failed to parse Genome Location string: " + str, e);
+            }
+        }
+
+        // is the contig valid?
+        if (!contigIsInDictionary(contig))
+            throw new UserException.MalformedGenomeLoc("Contig '" + contig + "' does not match any contig in the GATK sequence dictionary derived from the reference; are you sure you are using the correct reference fasta file?");
+
+        if (stop == Integer.MAX_VALUE)
+            // lookup the actually stop position!
+            stop = getContigInfo(contig).getSequenceLength();
+
+        return createGenomeLoc(contig, getContigIndex(contig), start, stop, true);
+    }
+
+    /**
+     * Parses a number like 1,000,000 into a long.
+     * @param pos
+     */
+    @Requires("pos != null")
+    @Ensures("result >= 0")
+    protected int parsePosition(final String pos) {
+        if(pos.indexOf('-') != -1) {
+            throw new NumberFormatException("Position: '" + pos + "' can't contain '-'." );
+        }
+
+        if(pos.indexOf(',') != -1) {
+            final StringBuilder buffer = new StringBuilder();
+            for(int i = 0; i < pos.length(); i++) {
+                final char c = pos.charAt(i);
+
+                if(c == ',') {
+                    continue;
+                } else if(c < '0' || c > '9') {
+                    throw new NumberFormatException("Position: '" + pos + "' contains invalid chars." );
+                } else {
+                    buffer.append(c);
+                }
+            }
+            return Integer.parseInt(buffer.toString());
+        } else {
+            return Integer.parseInt(pos);
+        }
+    }
+
+    // --------------------------------------------------------------------------------------------------------------
+    //
+    // Parsing string representations
+    //
+    // --------------------------------------------------------------------------------------------------------------
+
+    /**
+     * Create a genome loc, given a read. If the read is unmapped, *and* yet the read has a contig and start position,
+     * then a GenomeLoc is returned for contig:start-start, otherwise an UNMAPPED GenomeLoc is returned.
+     *
+     * @param read the read from which to create a genome loc
+     *
+     * @return the GenomeLoc that was created
+     */
+    @Requires("read != null")
+    @Ensures("result != null")
+    public GenomeLoc createGenomeLoc(final SAMRecord read) {
+        if ( read.getReadUnmappedFlag() && read.getReferenceIndex() == -1 )
+            // read is unmapped and not placed anywhere on the genome
+            return GenomeLoc.UNMAPPED;
+        else {
+            // Use Math.max to ensure that end >= start (Picard assigns the end to reads that are entirely within an insertion as start-1)
+            final int end = read.getReadUnmappedFlag() ? read.getAlignmentStart() : Math.max(read.getAlignmentEnd(), read.getAlignmentStart());
+            return createGenomeLoc(read.getReferenceName(), read.getReferenceIndex(), read.getAlignmentStart(), end, false);
+        }
+    }
+
+    /**
+     * Create a genome loc, given a read using its unclipped alignment. If the read is unmapped, *and* yet the read has a contig and start position,
+     * then a GenomeLoc is returned for contig:start-start, otherwise an UNMAPPED GenomeLoc is returned.
+     *
+     * @param read the read from which to create a genome loc
+     *
+     * @return the GenomeLoc that was created
+     */
+    @Requires("read != null")
+    @Ensures("result != null")
+    public GenomeLoc createGenomeLocUnclipped(final SAMRecord read) {
+        if ( read.getReadUnmappedFlag() && read.getReferenceIndex() == -1 )
+            // read is unmapped and not placed anywhere on the genome
+            return GenomeLoc.UNMAPPED;
+        else {
+            // Use Math.max to ensure that end >= start (Picard assigns the end to reads that are entirely within an insertion as start-1)
+            final int end = read.getReadUnmappedFlag() ? read.getUnclippedEnd() : Math.max(read.getUnclippedEnd(), read.getUnclippedStart());
+            return createGenomeLoc(read.getReferenceName(), read.getReferenceIndex(), read.getUnclippedStart(), end, false);
+        }
+    }
+
+    /**
+     * Creates a GenomeLoc from a Tribble feature
+     * @param feature
+     * @return
+     */
+    public GenomeLoc createGenomeLoc(final Feature feature) {
+        return createGenomeLoc(feature.getChr(), feature.getStart(), feature.getEnd());
+    }
+
+    /**
+     * @see GenomeLoc.setStart
+     */
+    @Deprecated
+    public GenomeLoc setStart(final GenomeLoc loc, final int start) {
+        return createGenomeLoc(loc.getContig(), loc.getContigIndex(), start, loc.getStop());
+    }
+
+    /**
+     * @see GenomeLoc.setStop
+     */
+    @Deprecated
+    public GenomeLoc setStop(final GenomeLoc loc, final int stop) {
+        return createGenomeLoc(loc.getContig(), loc.getContigIndex(), loc.start, stop);
+    }
+
+    /**
+     * @see GenomeLoc.incPos
+     */
+    @Deprecated
+    public GenomeLoc incPos(final GenomeLoc loc) {
+        return incPos(loc, 1);
+    }
+
+    /**
+     * @see GenomeLoc.incPos
+     */
+    @Deprecated
+    public GenomeLoc incPos(final GenomeLoc loc, final int by) {
+        return createGenomeLoc(loc.getContig(), loc.getContigIndex(), loc.start + by, loc.stop + by);
+    }
+
+    /**
+     * Creates a GenomeLoc than spans the entire contig.
+     * @param contigName Name of the contig.
+     * @return A locus spanning the entire contig.
+     */
+    @Requires("contigName != null")
+    @Ensures("result != null")
+    public GenomeLoc createOverEntireContig(final String contigName) {
+        SAMSequenceRecord contig = getContigInfo().getSequence(contigName);
+        return createGenomeLoc(contigName,contig.getSequenceIndex(),1,contig.getSequenceLength(), true);
+    }
+
+    /**
+     * Creates a loc to the left (starting at the loc start + 1) of maxBasePairs size.
+     * @param loc The original loc
+     * @param maxBasePairs The maximum number of basePairs
+     * @return The contiguous loc of up to maxBasePairs length or null if the loc is already at the start of the contig.
+     */
+    @Requires({"loc != null", "maxBasePairs > 0"})
+    public GenomeLoc createGenomeLocAtStart(final GenomeLoc loc, final int maxBasePairs) {
+        if (GenomeLoc.isUnmapped(loc))
+            return null;
+        final String contigName = loc.getContig();
+        final SAMSequenceRecord contig = getContigInfo().getSequence(contigName);
+        final int contigIndex = contig.getSequenceIndex();
+
+        int start = loc.getStart() - maxBasePairs;
+        int stop = loc.getStart() - 1;
+
+        if (start < 1)
+            start = 1;
+        if (stop < 1)
+            return null;
+
+        return createGenomeLoc(contigName, contigIndex, start, stop, true);
+    }
+
+    /**
+     * Creates a loc padded in both directions by maxBasePairs size (if possible).
+     * @param loc      The original loc
+     * @param padding  The number of base pairs to pad on either end
+     * @return The contiguous loc of length up to the original length + 2*padding (depending on the start/end of the contig).
+     */
+    @Requires({"loc != null", "padding >= 0"})
+    public GenomeLoc createPaddedGenomeLoc(final GenomeLoc loc, final int padding) {
+        if (GenomeLoc.isUnmapped(loc) || padding == 0)
+            return loc;
+        else
+            return createGenomeLocOnContig(loc.getContig(), loc.getContigIndex(), loc.getStart() - padding, loc.getStop() + padding);
+    }
+
+    /**
+     * Creates a loc to the right (starting at the loc stop + 1) of maxBasePairs size.
+     * @param loc The original loc
+     * @param maxBasePairs The maximum number of basePairs
+     * @return The contiguous loc of up to maxBasePairs length or null if the loc is already at the end of the contig.
+     */
+    @Requires({"loc != null", "maxBasePairs > 0"})
+    public GenomeLoc createGenomeLocAtStop(final GenomeLoc loc, final int maxBasePairs) {
+        if (GenomeLoc.isUnmapped(loc))
+            return null;
+        String contigName = loc.getContig();
+        SAMSequenceRecord contig = getContigInfo().getSequence(contigName);
+        int contigIndex = contig.getSequenceIndex();
+        int contigLength = contig.getSequenceLength();
+
+        int start = loc.getStop() + 1;
+        int stop = loc.getStop() + maxBasePairs;
+
+        if (start > contigLength)
+            return null;
+        if (stop > contigLength)
+            stop = contigLength;
+
+        return createGenomeLoc(contigName, contigIndex, start, stop, true);
+    }
+
+    /**
+     * @see #createGenomeLocOnContig(String, int, int, int) with the contig index looked up from contig
+     */
+    public GenomeLoc createGenomeLocOnContig(final String contig, final int start, final int stop) {
+        return createGenomeLocOnContig(contig, getContigIndex(contig), start, stop);
+    }
+
+    /**
+     * Create a new genome loc, bounding start and stop by the start and end of contig
+     *
+     * This function will return null if start and stop cannot be adjusted in any reasonable way
+     * to be on the contig.  For example, if start and stop are both past the end of the contig,
+     * there's no way to fix this, and null will be returned.
+     *
+     * @param contig our contig
+     * @param start our start as an arbitrary integer (may be negative, etc)
+     * @param stop our stop as an arbitrary integer (may be negative, etc)
+     * @return a valid genome loc over contig, or null if a meaningful genome loc cannot be created
+     */
+    public GenomeLoc createGenomeLocOnContig(final String contig, final int contigIndex, final int start, final int stop) {
+        final int contigLength = getContigInfo().getSequence(contigIndex).getSequenceLength();
+        final int boundedStart = Math.max(1, start);
+        final int boundedStop = Math.min(contigLength, stop);
+
+        if ( boundedStart > contigLength || boundedStop < 1 )
+            // there's no meaningful way to create this genome loc, as the start and stop are off the contig
+            return null;
+        else
+            return createGenomeLoc(contig, contigIndex, boundedStart, boundedStop);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/GenomeLocSortedSet.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/GenomeLocSortedSet.java
new file mode 100644
index 0000000..6e777df
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/GenomeLocSortedSet.java
@@ -0,0 +1,476 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.SAMSequenceRecord;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.interval.IntervalMergingRule;
+import org.broadinstitute.gatk.utils.interval.IntervalUtils;
+
+import java.util.*;
+
+/**
+ *         <p/>
+ *         Class GenomeLocCollection
+ *         <p/>
+ *         a set of genome locations. This collection is self sorting,
+ *         and will merge genome locations that are overlapping. The remove function
+ *         will also remove a region from the list, if the region to remove is a
+ *         partial interval of a region in the collection it will remove the region from
+ *         that element.
+ *
+ * @author aaron
+ * Date: May 22, 2009
+ * Time: 10:54:40 AM
+ */
+public class GenomeLocSortedSet extends AbstractSet<GenomeLoc> {
+    private static Logger logger = Logger.getLogger(GenomeLocSortedSet.class);
+
+    private GenomeLocParser genomeLocParser;
+
+    // our private storage for the GenomeLoc's
+    private final List<GenomeLoc> mArray = new ArrayList<GenomeLoc>();
+
+    // cache this to make overlap checking much more efficient
+    private int previousOverlapSearchIndex = -1;
+
+    /**
+     * Create a new, empty GenomeLocSortedSet
+     *
+     * @param parser a non-null the parser we use to create genome locs
+     */
+    public GenomeLocSortedSet(final GenomeLocParser parser) {
+        if ( parser == null ) throw new IllegalArgumentException("parser cannot be null");
+        this.genomeLocParser = parser;
+    }
+
+    /**
+     * Create a new GenomeLocSortedSet containing location e
+     *
+     * @param parser a non-null the parser we use to create genome locs
+     * @param e a single genome locs to add to this set
+     */
+    public GenomeLocSortedSet(final GenomeLocParser parser, final GenomeLoc e) {
+        this(parser);
+        add(e);
+    }
+
+    /**
+     * Create a new GenomeLocSortedSet containing locations l
+     *
+     * The elements in l can be in any order, and can be overlapping.  They will be sorted first and
+     * overlapping (but not contiguous) elements will be merged
+     *
+     * @param parser a non-null the parser we use to create genome locs
+     * @param l a collection of genome locs to add to this set
+     */
+    public GenomeLocSortedSet(final GenomeLocParser parser, final Collection<GenomeLoc> l) {
+        this(parser);
+
+        final ArrayList<GenomeLoc> sorted = new ArrayList<GenomeLoc>(l);
+        Collections.sort(sorted);
+        mArray.addAll(IntervalUtils.mergeIntervalLocations(sorted, IntervalMergingRule.OVERLAPPING_ONLY));
+    }
+
+    /**
+     * Gets the GenomeLocParser used to create this sorted set.
+     * @return The parser.  Will never be null.
+     */
+    public GenomeLocParser getGenomeLocParser() {
+        return genomeLocParser;
+    }
+
+    /**
+     * get an iterator over this collection
+     *
+     * @return an iterator<GenomeLoc>
+     */
+    public Iterator<GenomeLoc> iterator() {
+        return mArray.iterator();
+    }
+
+    /**
+     * return the size of the collection
+     *
+     * @return the size of the collection
+     */
+    public int size() {
+        return mArray.size();
+    }
+
+    /**
+     * Return the size, in bp, of the genomic regions by all of the regions in this set
+     * @return size in bp of the covered regions
+     */
+    public long coveredSize() {
+        long s = 0;
+        for ( GenomeLoc e : this )
+            s += e.size();
+        return s;
+    }
+
+    /**
+     * Return the number of bps before loc in the sorted set
+     *
+     * @param loc the location before which we are counting bases
+     * @return the number of base pairs over all previous intervals
+     */
+    public long sizeBeforeLoc(GenomeLoc loc) {
+        long s = 0;
+
+        for ( GenomeLoc e : this ) {
+            if ( e.isBefore(loc) )
+                s += e.size();
+            else if ( e.isPast(loc) )
+                break; // we are done
+            else // loc is inside of s
+                s += loc.getStart() - e.getStart();
+        }
+
+        return s;
+    }
+
+    /**
+     * determine if the collection is empty
+     *
+     * @return true if we have no elements
+     */
+    public boolean isEmpty() {
+        return mArray.isEmpty();
+    }
+
+    /**
+     * Determine if the given loc overlaps any loc in the sorted set
+     *
+     * @param loc the location to test
+     * @return trip if the location overlaps any loc
+     */
+    public boolean overlaps(final GenomeLoc loc) {
+        // edge condition
+        if ( mArray.isEmpty() )
+            return false;
+
+        // use the cached version first
+        if ( previousOverlapSearchIndex != -1 && overlapsAtOrImmediatelyAfterCachedIndex(loc, true) )
+            return true;
+
+        // update the cached index
+        previousOverlapSearchIndex = Collections.binarySearch(mArray, loc);
+
+        // if it matches an interval exactly, we are done
+        if ( previousOverlapSearchIndex >= 0 )
+            return true;
+
+        // check whether it overlaps the interval before or after the insertion point
+        previousOverlapSearchIndex = Math.max(0, -1 * previousOverlapSearchIndex - 2);
+        return overlapsAtOrImmediatelyAfterCachedIndex(loc, false);
+    }
+
+    private boolean overlapsAtOrImmediatelyAfterCachedIndex(final GenomeLoc loc, final boolean updateCachedIndex) {
+        // check the cached entry
+        if ( mArray.get(previousOverlapSearchIndex).overlapsP(loc) )
+            return true;
+
+        // check the entry after the cached entry since we may have moved to it
+        boolean returnValue = false;
+        if ( previousOverlapSearchIndex < mArray.size() - 1 ) {
+            returnValue = mArray.get(previousOverlapSearchIndex + 1).overlapsP(loc);
+            if ( updateCachedIndex )
+                previousOverlapSearchIndex++;
+        }
+
+        return returnValue;
+    }
+
+    /**
+     * Return a list of intervals overlapping loc
+     *
+     * @param loc the location we want overlapping intervals
+     * @return a non-null list of locations that overlap loc
+     */
+    public List<GenomeLoc> getOverlapping(final GenomeLoc loc) {
+        // the max ensures that if loc would be the first element, that we start searching at the first element
+        final int index = Collections.binarySearch(mArray, loc);
+        if ( index >= 0 )
+            // we can safely return a singleton because overlapping regions are merged and loc is exactly in
+            // the set already
+            return Collections.singletonList(loc);
+
+        // if loc isn't in the list index is (-(insertion point) - 1). The insertion point is defined as the point at
+        // which the key would be inserted into the list: the index of the first element greater than the key, or list.size()
+        // -ins - 1 = index => -ins = index + 1 => ins = -(index + 1)
+        // Note that we look one before the index in this case, as loc might occur after the previous overlapping interval
+        final int start = Math.max(-(index + 1) - 1, 0);
+        final int size = mArray.size();
+
+        final List<GenomeLoc> overlapping = new LinkedList<GenomeLoc>();
+        for ( int i = start; i < size; i++ ) {
+            final GenomeLoc myLoc = mArray.get(i);
+            if ( loc.overlapsP(myLoc) )
+                overlapping.add(myLoc);
+            else if ( myLoc.isPast(loc) )
+                // since mArray is ordered, if myLoc is past loc that means all future
+                // intervals cannot overlap loc either.  So we can safely abort the search
+                // note that we need to be a bit conservative on our tests since index needs to start
+                // at -1 the position of index, so it's possible that myLoc and loc don't overlap but the next
+                // position might
+                break;
+        }
+
+        return overlapping;
+    }
+
+    /**
+     * Return a list of intervals overlapping loc by enumerating all locs and testing for overlap
+     *
+     * Purely for testing purposes -- this is way to slow for any production code
+     *
+     * @param loc the location we want overlapping intervals
+     * @return a non-null list of locations that overlap loc
+     */
+    protected List<GenomeLoc> getOverlappingFullSearch(final GenomeLoc loc) {
+        final List<GenomeLoc> overlapping = new LinkedList<GenomeLoc>();
+
+        // super slow, but definitely works
+        for ( final GenomeLoc myLoc : mArray ) {
+            if ( loc.overlapsP(myLoc) )
+                overlapping.add(myLoc);
+        }
+
+        return overlapping;
+    }
+
+    /**
+     * Adds a GenomeLoc to the collection, inserting at the correct sorted position into the set.
+     * Throws an exception if the loc overlaps another loc already in the set.
+     *
+     * @param loc the GenomeLoc to add
+     *
+     * @return true if the loc was added or false otherwise (if the loc was null)
+     */
+    public boolean add(final GenomeLoc loc) {
+        return add(loc, false);
+    }
+
+    /**
+     * Adds a GenomeLoc to the collection, merging it if it overlaps another region.
+     * If it's not overlapping then we insert it at the correct sorted position into the set.
+     *
+     * @param loc the GenomeLoc to add
+     *
+     * @return true if the loc was added or false otherwise (if the loc was null)
+     */
+    public boolean addRegion(final GenomeLoc loc) {
+        return add(loc, true);
+    }
+
+    /**
+     * Adds a GenomeLoc to the collection, inserting at the correct sorted position into the set.
+     *
+     * @param loc                      the GenomeLoc to add
+     * @param mergeIfIntervalOverlaps  if true we merge the interval if it overlaps another one already in the set, otherwise we throw an exception
+     *
+     * @return true if the loc was added or false otherwise (if the loc was null or an exact duplicate)
+     */
+    public boolean add(final GenomeLoc loc, final boolean mergeIfIntervalOverlaps) {
+        if ( loc == null )
+            return false;
+
+        // if we have no other intervals yet or if the new loc is past the last one in the list (which is usually the
+        // case because locs are generally added in order) then be extra efficient and just add the loc to the end
+        if ( mArray.size() == 0 || loc.isPast(mArray.get(mArray.size() - 1)) ) {
+            return mArray.add(loc);
+        }
+
+        // find where in the list the new loc belongs
+        final int binarySearchIndex = Collections.binarySearch(mArray,loc);
+
+        // if it already exists in the list, return or throw an exception as needed
+        if ( binarySearchIndex >= 0 ) {
+            if ( mergeIfIntervalOverlaps )
+                return false;
+            throw new IllegalArgumentException("GenomeLocSortedSet already contains the GenomeLoc " + loc);
+        }
+
+        // if it overlaps a loc already in the list merge or throw an exception as needed
+        final int insertionIndex = -1 * (binarySearchIndex + 1);
+        if ( ! mergeOverlappingIntervalsFromAdd(loc, insertionIndex, !mergeIfIntervalOverlaps) ) {
+            // it does not overlap any current intervals, so add it to the set
+            mArray.add(insertionIndex, loc);
+        }
+
+        return true;
+    }
+
+    /*
+     * If the provided GenomeLoc overlaps another already in the set, merge them (or throw an exception if requested)
+     *
+     * @param loc                          the GenomeLoc to add
+     * @param insertionIndex               the index in the sorted set to add the new loc
+     * @param throwExceptionIfOverlapping  if true we throw an exception if there's overlap, otherwise we merge them
+     *
+     * @return true if the loc was added or false otherwise
+     */
+    private boolean mergeOverlappingIntervalsFromAdd(final GenomeLoc loc, final int insertionIndex, final boolean throwExceptionIfOverlapping) {
+        // try merging with the previous index
+        if ( insertionIndex != 0 && loc.overlapsP(mArray.get(insertionIndex - 1)) ) {
+            if ( throwExceptionIfOverlapping )
+                throw new IllegalArgumentException(String.format("GenomeLocSortedSet contains a GenomeLoc (%s) that overlaps with the provided one (%s)", mArray.get(insertionIndex - 1).toString(), loc.toString()));
+            mArray.set(insertionIndex - 1, mArray.get(insertionIndex - 1).merge(loc));
+            return true;
+        }
+
+        // try merging with the following index
+        if ( insertionIndex < mArray.size() && loc.overlapsP(mArray.get(insertionIndex)) ) {
+            if ( throwExceptionIfOverlapping )
+                throw new IllegalArgumentException(String.format("GenomeLocSortedSet contains a GenomeLoc (%s) that overlaps with the provided one (%s)", mArray.get(insertionIndex).toString(), loc.toString()));
+            mArray.set(insertionIndex, mArray.get(insertionIndex).merge(loc));
+            return true;
+        }
+
+        return false;
+    }
+
+    public GenomeLocSortedSet subtractRegions(GenomeLocSortedSet toRemoveSet) {
+        LinkedList<GenomeLoc> good = new LinkedList<GenomeLoc>();
+        Stack<GenomeLoc> toProcess = new Stack<GenomeLoc>();
+        Stack<GenomeLoc> toExclude = new Stack<GenomeLoc>();
+
+        // initialize the stacks
+        toProcess.addAll(mArray);
+        Collections.reverse(toProcess);
+        toExclude.addAll(toRemoveSet.mArray);
+        Collections.reverse(toExclude);
+
+        int i = 0;
+        while ( ! toProcess.empty() ) {    // while there's still stuff to process
+            if ( toExclude.empty() ) {
+                good.addAll(toProcess);         // no more excludes, all the processing stuff is good
+                break;
+            }
+
+            GenomeLoc p = toProcess.peek();
+            GenomeLoc e = toExclude.peek();
+
+            if ( p.overlapsP(e) ) {
+                toProcess.pop();
+                for ( GenomeLoc newP : p.subtract(e) )
+                    toProcess.push(newP);
+            } else if ( p.compareContigs(e) < 0 ) {
+                good.add(toProcess.pop());         // p is now good
+            } else if ( p.compareContigs(e) > 0 ) {
+                toExclude.pop();                 // e can't effect anything
+            } else if ( p.getStop() < e.getStart() ) {
+                good.add(toProcess.pop());         // p stops before e starts, p is good
+            } else if ( e.getStop() < p.getStart() ) {
+                toExclude.pop();                 // p starts after e stops, e is done
+            } else {
+                throw new ReviewedGATKException("BUG: unexpected condition: p=" + p + ", e=" + e);
+            }
+
+            if ( i++ % 10000 == 0 )
+                logger.debug("removeRegions operation: i = " + i);
+        }
+
+        return createSetFromList(genomeLocParser,good);
+    }
+
+
+    /**
+     * a simple removal of an interval contained in this list.  The interval must be identical to one in the list (no partial locations or overlapping)
+     * @param location the GenomeLoc to remove
+     */
+    public void remove(GenomeLoc location) {
+        if (!mArray.contains(location)) throw new IllegalArgumentException("Unable to remove location: " + location + ", not in the list");
+        mArray.remove(location);
+    }
+
+    /**
+     * create a list of genomic locations, given a reference sequence
+     *
+     * @param dict the sequence dictionary to create a collection from
+     *
+     * @return the GenomeLocSet of all references sequences as GenomeLoc's
+     */
+    public static GenomeLocSortedSet createSetFromSequenceDictionary(final SAMSequenceDictionary dict) {
+        final GenomeLocParser parser = new GenomeLocParser(dict);
+        final GenomeLocSortedSet returnSortedSet = new GenomeLocSortedSet(parser);
+        for ( final SAMSequenceRecord sequence : dict.getSequences() ) {
+            returnSortedSet.add(parser.createOverEntireContig(sequence.getSequenceName()));
+        }
+        return returnSortedSet;
+    }
+
+    /**
+     * Create a sorted genome location set from a list of GenomeLocs.
+     *
+     * @param locs the list<GenomeLoc>
+     *
+     * @return the sorted genome loc list
+     */
+    public static GenomeLocSortedSet createSetFromList(GenomeLocParser parser,List<GenomeLoc> locs) {
+        GenomeLocSortedSet set = new GenomeLocSortedSet(parser);
+        set.addAll(locs);
+        return set;
+    }
+
+
+    /**
+     * return a deep copy of this collection.
+     *
+     * @return a new GenomeLocSortedSet, identical to the current GenomeLocSortedSet.
+     */
+    public GenomeLocSortedSet clone() {
+        GenomeLocSortedSet ret = new GenomeLocSortedSet(genomeLocParser);
+        for (GenomeLoc loc : this.mArray) {
+            // ensure a deep copy
+            ret.mArray.add(genomeLocParser.createGenomeLoc(loc.getContig(), loc.getStart(), loc.getStop()));
+        }
+        return ret;
+    }
+
+    /**
+     * convert this object to a list
+     * @return the lists
+     */
+    public List<GenomeLoc> toList() {
+        return this.mArray;
+    }
+
+    public String toString() {
+        StringBuilder s = new StringBuilder();
+        s.append("[");
+        for ( GenomeLoc e : this ) {
+            s.append(" ");
+            s.append(e.toString());
+        }
+        s.append("]");
+
+        return s.toString();
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/HasGenomeLocation.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/HasGenomeLocation.java
index d080d5b..d030919 100644
--- a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/HasGenomeLocation.java
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/HasGenomeLocation.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/HeapSizeMonitor.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/HeapSizeMonitor.java
new file mode 100644
index 0000000..b158794
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/HeapSizeMonitor.java
@@ -0,0 +1,107 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.lang.management.ManagementFactory;
+import java.lang.management.MemoryMXBean;
+
+/**
+ * Monitor the current heap size, allowing the application to programmatically
+ * access the data.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class HeapSizeMonitor {
+    private final int monitorFrequencyMillis;
+    private final MonitorRunnable monitorRunnable;
+
+    private Thread monitorThread;
+
+    public HeapSizeMonitor() {
+        this(1000);
+    }
+
+    public HeapSizeMonitor(final int monitorFrequencyMillis) {
+        this.monitorFrequencyMillis = monitorFrequencyMillis;
+        this.monitorRunnable = new MonitorRunnable();
+    }
+
+    public long getMaxMemoryUsed() {
+        return monitorRunnable.getMaxMemoryUsed();
+    }
+
+    public void start() {
+        monitorThread = new Thread(monitorRunnable);
+        monitorThread.start();
+    }
+
+    public void stop() {
+        monitorRunnable.stop = true;
+        try {
+            monitorThread.join();
+        }
+        catch(InterruptedException ex) {
+            throw new ReviewedGATKException("Unable to connect to monitor thread");
+        }
+        monitorThread = null;        
+    }
+
+    private class MonitorRunnable implements Runnable {
+        private MemoryMXBean monitor;
+
+        private long maxMemoryUsed;
+        private boolean stop;
+
+        public MonitorRunnable() {
+            monitor = ManagementFactory.getMemoryMXBean();   
+        }
+
+        public void reset() {
+            maxMemoryUsed = 0L;
+            stop = false;
+        }
+
+        public long getMaxMemoryUsed() {
+            return maxMemoryUsed;
+        }
+
+        public void run() {
+            while(!stop) {
+                System.gc();
+                maxMemoryUsed = Math.max(monitor.getHeapMemoryUsage().getUsed(),maxMemoryUsed);
+                try {
+                    Thread.sleep(monitorFrequencyMillis);
+                }
+                catch(InterruptedException ex) {
+                    throw new ReviewedGATKException("Unable to continue monitoring heap consumption",ex);
+                }
+            }
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/IndelUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/IndelUtils.java
new file mode 100644
index 0000000..83003d2
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/IndelUtils.java
@@ -0,0 +1,262 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import htsjdk.variant.variantcontext.VariantContext;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: delangel
+ * Date: Feb 3, 2011
+ * Time: 2:44:22 PM
+ * To change this template use File | Settings | File Templates.
+ */
+public class IndelUtils {
+    protected final static String[] COLUMN_KEYS;
+
+
+
+    static {
+        COLUMN_KEYS= new String[51];
+        COLUMN_KEYS[0] = "Novel_A";
+        COLUMN_KEYS[1] = "Novel_C";
+        COLUMN_KEYS[2] = "Novel_G";
+        COLUMN_KEYS[3] = "Novel_T";
+        COLUMN_KEYS[4]  = "NOVEL_1";
+        COLUMN_KEYS[5]  = "NOVEL_2";
+        COLUMN_KEYS[6]  = "NOVEL_3";
+        COLUMN_KEYS[7]  = "NOVEL_4";
+        COLUMN_KEYS[8]  = "NOVEL_5";
+        COLUMN_KEYS[9]  = "NOVEL_6";
+        COLUMN_KEYS[10] = "NOVEL_7";
+        COLUMN_KEYS[11] = "NOVEL_8";
+        COLUMN_KEYS[12] = "NOVEL_9";
+        COLUMN_KEYS[13] = "NOVEL_10orMore";
+        COLUMN_KEYS[14] = "RepeatExpansion_A";
+        COLUMN_KEYS[15] = "RepeatExpansion_C";
+        COLUMN_KEYS[16] = "RepeatExpansion_G";
+        COLUMN_KEYS[17] = "RepeatExpansion_T";
+        COLUMN_KEYS[18] = "RepeatExpansion_AC";
+        COLUMN_KEYS[19] = "RepeatExpansion_AG";
+        COLUMN_KEYS[20] = "RepeatExpansion_AT";
+        COLUMN_KEYS[21] = "RepeatExpansion_CA";
+        COLUMN_KEYS[22] = "RepeatExpansion_CG";
+        COLUMN_KEYS[23] = "RepeatExpansion_CT";
+        COLUMN_KEYS[24] = "RepeatExpansion_GA";
+        COLUMN_KEYS[25] = "RepeatExpansion_GC";
+        COLUMN_KEYS[26] = "RepeatExpansion_GT";
+        COLUMN_KEYS[27] = "RepeatExpansion_TA";
+        COLUMN_KEYS[28] = "RepeatExpansion_TC";
+        COLUMN_KEYS[29] = "RepeatExpansion_TG";
+        COLUMN_KEYS[30] = "EventLength_1";
+        COLUMN_KEYS[31] = "EventLength_2";
+        COLUMN_KEYS[32] = "EventLength_3";
+        COLUMN_KEYS[33] = "EventLength_4";
+        COLUMN_KEYS[34] = "EventLength_5";
+        COLUMN_KEYS[35] = "EventLength_6";
+        COLUMN_KEYS[36] = "EventLength_7";
+        COLUMN_KEYS[37] = "EventLength_8";
+        COLUMN_KEYS[38] = "EventLength_9";
+        COLUMN_KEYS[39] = "EventLength_10orMore";
+        COLUMN_KEYS[40] = "NumRepetitions_1";
+        COLUMN_KEYS[41] = "NumRepetitions_2";
+        COLUMN_KEYS[42] = "NumRepetitions_3";
+        COLUMN_KEYS[43] = "NumRepetitions_4";
+        COLUMN_KEYS[44] = "NumRepetitions_5";
+        COLUMN_KEYS[45] = "NumRepetitions_6";
+        COLUMN_KEYS[46] = "NumRepetitions_7";
+        COLUMN_KEYS[47] = "NumRepetitions_8";
+        COLUMN_KEYS[48] = "NumRepetitions_9";
+        COLUMN_KEYS[49] = "NumRepetitions_10orMore";
+        COLUMN_KEYS[50] = "Other";
+
+    }
+
+    private static final int START_IND_NOVEL = 4;
+    private static final int STOP_IND_NOVEL = 13;
+    private static final int START_IND_FOR_REPEAT_EXPANSION_1 = 14;
+    private static final int IND_FOR_REPEAT_EXPANSION_A = 14;
+    private static final int IND_FOR_REPEAT_EXPANSION_C = 15;
+    private static final int IND_FOR_REPEAT_EXPANSION_G = 16;
+    private static final int IND_FOR_REPEAT_EXPANSION_T = 17;
+    private static final int STOP_IND_FOR_REPEAT_EXPANSION_2 = 29;
+    private static final int START_IND_FOR_REPEAT_EXPANSION_COUNTS = 30;
+    private static final int STOP_IND_FOR_REPEAT_EXPANSION_COUNTS = 39;
+    private static final int START_IND_FOR_NUM_REPETITION_COUNTS = 40;
+    private static final int STOP_IND_FOR_NUM_REPETITION_COUNTS = 49;
+    private static final int IND_FOR_OTHER_EVENT = 50;
+    private static final int START_IND_NOVEL_PER_BASE = 0;
+    private static final int STOP_IND_NOVEL_PER_BASE = 3;
+
+    private static String findMinimalEvent(String eventString) {
+
+        // for each length up to given string length, see if event string is a repetition of units of size N
+        String minEvent = eventString;
+        for (int k=1; k < eventString.length(); k++) {
+            if (eventString.length() % k > 0)
+                continue;
+            String str = eventString.substring(0,k);
+            // now see if event string is a repetition of str
+            int numReps = eventString.length() / k;
+            String r = "";
+            for (int j=0; j < numReps; j++)
+                r = r.concat(str);
+
+            if (r.matches(eventString)) {
+                minEvent = str;
+                break;
+            }
+
+        }
+        return minEvent;
+    }
+
+    public static ArrayList<Integer> findEventClassificationIndex(VariantContext vc, ReferenceContext ref) {
+        int eventLength;
+
+        String indelAlleleString;
+        boolean done = false;
+
+        ArrayList<Integer> inds = new ArrayList<Integer>();
+        if ( vc.isSimpleInsertion() ) {
+            indelAlleleString = vc.getAlternateAllele(0).getDisplayString().substring(1);
+        } else if ( vc.isSimpleDeletion() ) {
+            indelAlleleString = vc.getReference().getDisplayString().substring(1);
+        }
+        else {
+            inds.add(IND_FOR_OTHER_EVENT);
+            return inds;
+        }
+
+        byte[] refBases = ref.getBases();
+
+        indelAlleleString = findMinimalEvent(indelAlleleString);
+        eventLength = indelAlleleString.length();
+
+        // See first if indel is a repetition of bases before current
+        int indStart = refBases.length/2-eventLength+1;
+
+        int numRepetitions = 0;
+        while (!done) {
+            if (indStart < 0)
+                done = true;
+            else {
+                String refPiece = new String(Arrays.copyOfRange(refBases,indStart,indStart+eventLength));
+                if (refPiece.matches(indelAlleleString))
+                {
+                    numRepetitions++;
+                    indStart = indStart - eventLength;
+                }
+                else
+                    done = true;
+
+            }
+        }
+
+        // now do it forward
+        done = false;
+        indStart = refBases.length/2+1;
+        while (!done) {
+            if (indStart + eventLength >= refBases.length)
+                break;
+            else {
+                String refPiece = new String(Arrays.copyOfRange(refBases,indStart,indStart+eventLength));
+                if (refPiece.matches(indelAlleleString))
+                {
+                    numRepetitions++;
+                    indStart = indStart + eventLength;
+                }
+                else
+                    done = true;
+
+            }
+        }
+
+        if (numRepetitions == 0) {
+            //unrepeated sequence from surroundings
+            int ind = START_IND_NOVEL + (eventLength-1);
+            if (ind > STOP_IND_NOVEL)
+                ind = STOP_IND_NOVEL;
+            inds.add(ind);
+
+            if (eventLength == 1) {
+                // log single base indels additionally by base
+                String keyStr = "Novel_" + indelAlleleString;
+                int k;
+                for (k=START_IND_NOVEL_PER_BASE; k <= STOP_IND_NOVEL_PER_BASE; k++) {
+                    if (keyStr.matches(COLUMN_KEYS[k]))
+                        break;
+                }
+                inds.add(k);
+            }
+        }
+        else {
+            // log number of repetition counts
+            int ind = START_IND_FOR_NUM_REPETITION_COUNTS + (numRepetitions-1);
+            if (ind > STOP_IND_FOR_NUM_REPETITION_COUNTS)
+                ind = STOP_IND_FOR_NUM_REPETITION_COUNTS;
+            inds.add(ind);
+
+            ind = START_IND_FOR_REPEAT_EXPANSION_COUNTS + (eventLength - 1);
+            if (ind > STOP_IND_FOR_REPEAT_EXPANSION_COUNTS)
+                    ind = STOP_IND_FOR_REPEAT_EXPANSION_COUNTS;
+            inds.add(ind);
+            
+            // log event length
+            if (eventLength<=2) {
+                // for single or dinucleotide indels, we further log the base in which they occurred
+                String keyStr = "RepeatExpansion_" + indelAlleleString;
+                int k;
+                for (k=START_IND_FOR_REPEAT_EXPANSION_1; k <= STOP_IND_FOR_REPEAT_EXPANSION_2; k++) {
+                    if (keyStr.matches(COLUMN_KEYS[k]))
+                        break;
+                }
+                // log now event
+                inds.add(k);
+            }
+
+
+        }
+
+        return inds;
+    }
+
+    public static String getIndelClassificationName(int k) {
+        if (k >=0 && k < COLUMN_KEYS.length)
+            return COLUMN_KEYS[k];
+        else
+            throw new ReviewedGATKException("Invalid index when trying to get indel classification name");
+    }
+
+    public static boolean isInsideExtendedIndel(VariantContext vc, ReferenceContext ref) {
+        return (vc.getStart() != ref.getLocus().getStart());
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/LRUCache.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/LRUCache.java
new file mode 100644
index 0000000..ed985c4
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/LRUCache.java
@@ -0,0 +1,45 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+/**
+ * An LRU cache implemented as an extension to LinkedHashMap
+ */
+public class LRUCache<K,V> extends LinkedHashMap<K,V> {
+    private int capacity; // Maximum number of items in the cache.
+
+    public LRUCache(int capacity) {
+        super(capacity+1, 1.0f, true); // Pass 'true' for accessOrder.
+        this.capacity = capacity;
+    }
+
+    protected boolean removeEldestEntry(final Map.Entry entry) {
+        return (size() > this.capacity);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/MRUCachingSAMSequenceDictionary.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/MRUCachingSAMSequenceDictionary.java
new file mode 100644
index 0000000..b2c2d1a
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/MRUCachingSAMSequenceDictionary.java
@@ -0,0 +1,186 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.SAMSequenceRecord;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+/**
+ * A wrapper class that provides efficient most recently used caching for the global
+ * SAMSequenceDictionary underlying all of the GATK engine capabilities.  It is essential
+ * that these class be as efficient as possible.  It doesn't need to be thread-safe, as
+ * GenomeLocParser uses a thread-local variable to ensure that each thread gets its own MRU
+ * cache.
+ *
+ * The MRU elements are the SAMSequenceRecord, the lastContig, and the lastIndex.  The
+ * cached value is the actual SAMSequenceRecord of the most recently accessed value from
+ * getSequence, along with local variables for the contig index and contig string.
+ */
+final class MRUCachingSAMSequenceDictionary {
+    /**
+     * Our sequence dictionary
+     */
+    private final SAMSequenceDictionary dict;
+
+    SAMSequenceRecord lastSSR = null;
+    String lastContig = "";
+    int lastIndex = -1;
+
+    /**
+     * Create a new MRUCachingSAMSequenceDictionary that provides information about sequences in dict
+     * @param dict a non-null, non-empty sequencing dictionary
+     */
+    @Ensures("lastSSR == null")
+    public MRUCachingSAMSequenceDictionary(final SAMSequenceDictionary dict) {
+        if ( dict == null ) throw new IllegalArgumentException("Dictionary cannot be null");
+        if ( dict.size() == 0 ) throw new IllegalArgumentException("Dictionary cannot have size zero");
+
+        this.dict = dict;
+    }
+
+    /**
+     * Get our sequence dictionary
+     * @return a non-null SAMSequenceDictionary
+     */
+    @Ensures("result != null")
+    public SAMSequenceDictionary getDictionary() {
+        return dict;
+    }
+
+    /**
+     * Is contig present in the dictionary?  Efficiently caching.
+     * @param contig a non-null contig we want to test
+     * @return true if contig is in dictionary, false otherwise
+     */
+    @Requires("contig != null")
+    public final boolean hasContig(final String contig) {
+        return contig.equals(lastContig) || dict.getSequence(contig) != null;
+    }
+
+    /**
+     * Is contig index present in the dictionary?  Efficiently caching.
+     * @param contigIndex an integer offset that might map to a contig in this dictionary
+     * @return true if contigIndex is in dictionary, false otherwise
+     */
+    @Requires("contigIndex >= 0")
+    public final boolean hasContigIndex(final int contigIndex) {
+        return lastIndex == contigIndex || dict.getSequence(contigIndex) != null;
+    }
+
+    /**
+     * Same as SAMSequenceDictionary.getSequence but uses a MRU cache for efficiency
+     *
+     * @param contig the contig name we want to get the sequence record of
+     * @throws ReviewedGATKException if contig isn't present in the dictionary
+     * @return the sequence record for contig
+     */
+    @Requires("contig != null")
+    @Ensures("result != null")
+    public final SAMSequenceRecord getSequence(final String contig) {
+        if ( isCached(contig) )
+            return lastSSR;
+        else
+            return updateCache(contig, -1);
+    }
+
+    /**
+     * Same as SAMSequenceDictionary.getSequence but uses a MRU cache for efficiency
+     *
+     * @param index the contig index we want to get the sequence record of
+     * @throws ReviewedGATKException if contig isn't present in the dictionary
+     * @return the sequence record for contig
+     */
+    @Requires("index >= 0")
+    @Ensures("result != null")
+    public final SAMSequenceRecord getSequence(final int index) {
+        if ( isCached(index) )
+            return lastSSR;
+        else
+            return updateCache(null, index);
+    }
+
+    /**
+     * Same as SAMSequenceDictionary.getSequenceIndex but uses a MRU cache for efficiency
+     *
+     * @param contig the contig we want to get the sequence record of
+     * @throws ReviewedGATKException if index isn't present in the dictionary
+     * @return the sequence record index for contig
+     */
+    @Requires("contig != null")
+    @Ensures("result >= 0")
+    public final int getSequenceIndex(final String contig) {
+        if ( ! isCached(contig) ) {
+            updateCache(contig, -1);
+        }
+
+        return lastIndex;
+    }
+
+    /**
+     * Is contig the MRU cached contig?
+     * @param contig the contig to test
+     * @return true if contig is the currently cached contig, false otherwise
+     */
+    @Requires({"contig != null"})
+    protected boolean isCached(final String contig) {
+        return contig.equals(lastContig);
+    }
+
+    /**
+     * Is the contig index index the MRU cached index?
+     * @param index the contig index to test
+     * @return true if contig index is the currently cached contig index, false otherwise
+     */
+    protected boolean isCached(final int index) {
+        return lastIndex == index;
+    }
+
+    /**
+     * The key algorithm.  Given a new record, update the last used record, contig
+     * name, and index.
+     *
+     * @param contig the contig we want to look up.  If null, index is used instead
+     * @param index the contig index we want to look up.  Only used if contig is null
+     * @throws ReviewedGATKException if index isn't present in the dictionary
+     * @return the SAMSequenceRecord for contig / index
+     */
+    @Requires("contig != null || index >= 0")
+    @Ensures("result != null")
+    private SAMSequenceRecord updateCache(final String contig, int index ) {
+        SAMSequenceRecord rec = contig == null ? dict.getSequence(index) : dict.getSequence(contig);
+        if ( rec == null ) {
+            throw new ReviewedGATKException("BUG: requested unknown contig=" + contig + " index=" + index);
+        } else {
+            lastSSR = rec;
+            lastContig = rec.getSequenceName();
+            lastIndex = rec.getSequenceIndex();
+            return rec;
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/MannWhitneyU.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/MannWhitneyU.java
new file mode 100644
index 0000000..a768a1c
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/MannWhitneyU.java
@@ -0,0 +1,507 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import cern.jet.math.Arithmetic;
+import cern.jet.random.Normal;
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import org.apache.commons.math.MathException;
+import org.apache.commons.math.distribution.NormalDistribution;
+import org.apache.commons.math.distribution.NormalDistributionImpl;
+import org.broadinstitute.gatk.utils.collections.Pair;
+import org.broadinstitute.gatk.utils.exceptions.GATKException;
+
+import java.io.Serializable;
+import java.util.Comparator;
+import java.util.TreeSet;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: chartl
+ */
+public class MannWhitneyU {
+
+    private static Normal STANDARD_NORMAL = new Normal(0.0,1.0,null);
+    private static NormalDistribution APACHE_NORMAL = new NormalDistributionImpl(0.0,1.0,1e-2);
+    private static double LNSQRT2PI = Math.log(Math.sqrt(2.0*Math.PI));
+
+    private TreeSet<Pair<Number,USet>> observations;
+    private int sizeSet1;
+    private int sizeSet2;
+    private ExactMode exactMode;
+
+    public MannWhitneyU(ExactMode mode, boolean dither) {
+        if ( dither )
+            observations = new TreeSet<Pair<Number,USet>>(new DitheringComparator());
+        else
+            observations = new TreeSet<Pair<Number,USet>>(new NumberedPairComparator());
+        sizeSet1 = 0;
+        sizeSet2 = 0;
+        exactMode = mode;
+    }
+
+    public MannWhitneyU() {
+        this(ExactMode.POINT,true);
+    }
+
+    public MannWhitneyU(boolean dither) {
+        this(ExactMode.POINT,dither);
+    }
+
+    public MannWhitneyU(ExactMode mode) {
+        this(mode,true);
+    }
+
+    /**
+     * Add an observation into the observation tree
+     * @param n: the observation (a number)
+     * @param set: whether the observation comes from set 1 or set 2
+     */
+    public void add(Number n, USet set) {
+        observations.add(new Pair<Number,USet>(n,set));
+        if ( set == USet.SET1 ) {
+            ++sizeSet1;
+        } else {
+            ++sizeSet2;
+        }
+    }
+
+    public Pair<Long,Long> getR1R2() {
+        long u1 = calculateOneSidedU(observations,MannWhitneyU.USet.SET1);
+        long n1 = sizeSet1*(sizeSet1+1)/2;
+        long r1 = u1 + n1;
+        long n2 = sizeSet2*(sizeSet2+1)/2;
+        long u2 = n1*n2-u1;
+        long r2 = u2 + n2;
+
+        return new Pair<Long,Long>(r1,r2);
+    }
+
+    /**
+     * Runs the one-sided test under the hypothesis that the data in set "lessThanOther" stochastically
+     * dominates the other set
+     * @param lessThanOther - either Set1 or Set2
+     * @return - u-based z-approximation, and p-value associated with the test (p-value is exact for small n,m)
+     */
+    @Requires({"lessThanOther != null"})
+    @Ensures({"validateObservations(observations) || Double.isNaN(result.getFirst())","result != null", "! Double.isInfinite(result.getFirst())", "! Double.isInfinite(result.getSecond())"})
+    public Pair<Double,Double> runOneSidedTest(USet lessThanOther) {
+        long u = calculateOneSidedU(observations, lessThanOther);
+        int n = lessThanOther == USet.SET1 ? sizeSet1 : sizeSet2;
+        int m = lessThanOther == USet.SET1 ? sizeSet2 : sizeSet1;
+        if ( n == 0 || m == 0 ) {
+            // test is uninformative as one or both sets have no observations
+            return new Pair<Double,Double>(Double.NaN,Double.NaN);
+        }
+
+        // the null hypothesis is that {N} is stochastically less than {M}, so U has counted
+        // occurrences of {M}s before {N}s. We would expect that this should be less than (n*m+1)/2 under
+        // the null hypothesis, so we want to integrate from K=0 to K=U for cumulative cases. Always.
+        return calculateP(n, m, u, false, exactMode);
+    }
+
+    /**
+     * Runs the standard two-sided test,
+     * returns the u-based z-approximate and p values.
+     * @return a pair holding the u and p-value.
+     */
+    @Ensures({"result != null", "! Double.isInfinite(result.getFirst())", "! Double.isInfinite(result.getSecond())"})
+    //@Requires({"validateObservations(observations)"})
+    public Pair<Double,Double> runTwoSidedTest() {
+        Pair<Long,USet> uPair = calculateTwoSidedU(observations);
+        long u = uPair.first;
+        int n = uPair.second == USet.SET1 ? sizeSet1 : sizeSet2;
+        int m = uPair.second == USet.SET1 ? sizeSet2 : sizeSet1;
+        if ( n == 0 || m == 0 ) {
+            // test is uninformative as one or both sets have no observations
+            return new Pair<Double,Double>(Double.NaN,Double.NaN);
+        }
+        return calculateP(n, m, u, true, exactMode);
+    }
+
+    /**
+     * Given a u statistic, calculate the p-value associated with it, dispatching to approximations where appropriate
+     * @param n - The number of entries in the stochastically smaller (dominant) set
+     * @param m - The number of entries in the stochastically larger (dominated) set
+     * @param u - the Mann-Whitney U value
+     * @param twoSided - is the test twosided
+     * @return the (possibly approximate) p-value associated with the MWU test, and the (possibly approximate) z-value associated with it
+     * todo -- there must be an approximation for small m and large n
+     */
+    @Requires({"m > 0","n > 0"})
+    @Ensures({"result != null", "! Double.isInfinite(result.getFirst())", "! Double.isInfinite(result.getSecond())"})
+    protected static Pair<Double,Double> calculateP(int n, int m, long u, boolean twoSided, ExactMode exactMode) {
+        Pair<Double,Double> zandP;
+        if ( n > 8 && m > 8 ) {
+            // large m and n - normal approx
+            zandP = calculatePNormalApproximation(n,m,u, twoSided);
+        } else if ( n > 5 && m > 7 ) {
+            // large m, small n - sum uniform approx
+            // todo -- find the appropriate regimes where this approximation is actually better enough to merit slowness
+            // pval = calculatePUniformApproximation(n,m,u);
+            zandP = calculatePNormalApproximation(n, m, u, twoSided);
+        } else if ( n > 8 || m > 8 ) {
+            zandP = calculatePFromTable(n, m, u, twoSided);
+        } else {
+            // small m and n - full approx
+            zandP = calculatePRecursively(n,m,u,twoSided,exactMode);
+        }
+
+        return zandP;
+    }
+
+    public static Pair<Double,Double> calculatePFromTable(int n, int m, long u, boolean twoSided) {
+        // todo -- actually use a table for:
+        // todo      - n large, m small
+        return calculatePNormalApproximation(n,m,u, twoSided);
+    }
+
+    /**
+     * Uses a normal approximation to the U statistic in order to return a cdf p-value. See Mann, Whitney [1947]
+     * @param n - The number of entries in the stochastically smaller (dominant) set
+     * @param m - The number of entries in the stochastically larger (dominated) set
+     * @param u - the Mann-Whitney U value
+     * @param twoSided - whether the test should be two sided
+     * @return p-value associated with the normal approximation
+     */
+    @Requires({"m > 0","n > 0"})
+    @Ensures({"result != null", "! Double.isInfinite(result.getFirst())", "! Double.isInfinite(result.getSecond())"})
+    public static Pair<Double,Double> calculatePNormalApproximation(int n,int m,long u, boolean twoSided) {
+        double z = getZApprox(n,m,u);
+        if ( twoSided ) {
+            return new Pair<Double,Double>(z,2.0*(z < 0 ? STANDARD_NORMAL.cdf(z) : 1.0-STANDARD_NORMAL.cdf(z)));
+        } else {
+            return new Pair<Double,Double>(z,STANDARD_NORMAL.cdf(z));
+        }
+    }
+
+    /**
+     * Calculates the Z-score approximation of the u-statistic
+     * @param n - The number of entries in the stochastically smaller (dominant) set
+     * @param m - The number of entries in the stochastically larger (dominated) set
+     * @param u - the Mann-Whitney U value
+     * @return the asymptotic z-approximation corresponding to the MWU p-value for n < m
+     */
+    @Requires({"m > 0","n > 0"})
+    @Ensures({"! Double.isNaN(result)", "! Double.isInfinite(result)"})
+    private static double getZApprox(int n, int m, long u) {
+        double mean = ( ((long)m)*n+1.0)/2;
+        double var = (((long) n)*m*(n+m+1.0))/12;
+        double z = ( u - mean )/Math.sqrt(var);
+        return z;
+    }
+
+    /**
+     * Uses a sum-of-uniform-0-1 random variable approximation to the U statistic in order to return an approximate
+     * p-value. See Buckle, Kraft, van Eeden [1969] (approx) and Billingsly [1995] or Stephens, MA [1966, biometrika] (sum of uniform CDF)
+     * @param n - The number of entries in the stochastically smaller (dominant) set
+     * @param m - The number of entries in the stochastically larger (dominated) set
+     * @param u - mann-whitney u value
+     * @return p-value according to sum of uniform approx
+     * todo -- this is currently not called due to not having a good characterization of where it is significantly more accurate than the
+     * todo -- normal approxmation (e.g. enough to merit the runtime hit)
+     */
+    public static double calculatePUniformApproximation(int n, int m, long u) {
+        long R = u + (n*(n+1))/2;
+        double a = Math.sqrt(m*(n+m+1));
+        double b = (n/2.0)*(1-Math.sqrt((n+m+1)/m));
+        double z = b + ((double)R)/a;
+        if ( z < 0 ) { return 1.0; }
+        else if ( z > n ) { return 0.0; }
+        else {
+            if ( z > ((double) n) /2 ) {
+                return 1.0-1/(Arithmetic.factorial(n))*uniformSumHelper(z, (int) Math.floor(z), n, 0);
+            } else {
+                return 1/(Arithmetic.factorial(n))*uniformSumHelper(z, (int) Math.floor(z), n, 0);
+            }
+        }
+    }
+
+    /**
+     * Helper function for the sum of n uniform random variables
+     * @param z - value at which to compute the (un-normalized) cdf
+     * @param m - a cutoff integer (defined by m <= z < m + 1)
+     * @param n - the number of uniform random variables
+     * @param k - holder variable for the recursion (alternatively, the index of the term in the sequence)
+     * @return the (un-normalized) cdf for the sum of n random variables
+     */
+    private static double uniformSumHelper(double z, int m, int n, int k) {
+        if ( k > m ) { return 0; }
+        int coef = (k % 2 == 0) ? 1 : -1;
+        return coef*Arithmetic.binomial(n,k)*Math.pow(z-k,n) + uniformSumHelper(z,m,n,k+1);
+    }
+
+    /**
+     * Calculates the U-statistic associated with a two-sided test (e.g. the RV from which one set is drawn
+     * stochastically dominates the RV from which the other set is drawn); two-sidedness is accounted for
+     * later on simply by multiplying the p-value by 2.
+     *
+     * Recall: If X stochastically dominates Y, the test is for occurrences of Y before X, so the lower value of u is chosen
+     * @param observed - the observed data
+     * @return the minimum of the U counts (set1 dominates 2, set 2 dominates 1)
+     */
+    @Requires({"observed != null", "observed.size() > 0"})
+    @Ensures({"result != null","result.first > 0"})
+    public static Pair<Long,USet> calculateTwoSidedU(TreeSet<Pair<Number,USet>> observed) {
+        int set1SeenSoFar = 0;
+        int set2SeenSoFar = 0;
+        long uSet1DomSet2 = 0;
+        long uSet2DomSet1 = 0;
+        USet previous = null;
+        for ( Pair<Number,USet> dataPoint : observed ) {
+
+            if ( dataPoint.second == USet.SET1 ) {
+                ++set1SeenSoFar;
+            } else {
+                ++set2SeenSoFar;
+            }
+
+            if ( previous != null ) {
+                if ( dataPoint.second == USet.SET1 ) {
+                    uSet2DomSet1 += set2SeenSoFar;
+                } else {
+                    uSet1DomSet2 += set1SeenSoFar;
+                }
+            }
+
+            previous = dataPoint.second;
+        }
+
+        return uSet1DomSet2 < uSet2DomSet1 ? new Pair<Long,USet>(uSet1DomSet2,USet.SET1) : new Pair<Long,USet>(uSet2DomSet1,USet.SET2);
+    }
+
+    /**
+     * Calculates the U-statistic associated with the one-sided hypothesis that "dominator" stochastically dominates
+     * the other U-set. Note that if S1 dominates S2, we want to count the occurrences of points in S2 coming before points in S1.
+     * @param observed - the observed data points, tagged by each set
+     * @param dominator - the set that is hypothesized to be stochastically dominating
+     * @return the u-statistic associated with the hypothesis that dominator stochastically dominates the other set
+     */
+    @Requires({"observed != null","dominator != null","observed.size() > 0"})
+    @Ensures({"result >= 0"})
+    public static long calculateOneSidedU(TreeSet<Pair<Number,USet>> observed,USet dominator) {
+        long otherBeforeDominator = 0l;
+        int otherSeenSoFar = 0;
+        for ( Pair<Number,USet> dataPoint : observed ) {
+            if ( dataPoint.second != dominator ) {
+                ++otherSeenSoFar;
+            } else {
+                otherBeforeDominator += otherSeenSoFar;
+            }
+        }
+
+        return otherBeforeDominator;
+    }
+
+    /**
+     * The Mann-Whitney U statistic follows a recursive equation (that enumerates the proportion of possible
+     * binary strings of "n" zeros, and "m" ones, where a one precedes a zero "u" times). This accessor
+     * calls into that recursive calculation.
+     * @param n: number of set-one entries (hypothesis: set one is stochastically less than set two)
+     * @param m: number of set-two entries
+     * @param u: number of set-two entries that precede set-one entries (e.g. 0,1,0,1,0 -> 3 )
+     * @param twoSided: whether the test is two sided or not. The recursive formula is symmetric, multiply by two for two-sidedness.
+     * @param  mode: whether the mode is a point probability, or a cumulative distribution
+     * @return the probability under the hypothesis that all sequences are equally likely of finding a set-two entry preceding a set-one entry "u" times.
+     */
+    @Requires({"m > 0","n > 0","u >= 0"})
+    @Ensures({"result != null","! Double.isInfinite(result.getFirst())", "! Double.isInfinite(result.getSecond())"})
+    public static Pair<Double,Double> calculatePRecursively(int n, int m, long u, boolean twoSided, ExactMode mode) {
+        if ( m > 8 && n > 5 ) { throw new GATKException(String.format("Please use the appropriate (normal or sum of uniform) approximation. Values n: %d, m: %d",n,m)); }
+        double p = mode == ExactMode.POINT ? cpr(n,m,u) : cumulativeCPR(n,m,u);
+        //p *= twoSided ? 2.0 : 1.0;
+        double z;
+        try {
+
+            if ( mode == ExactMode.CUMULATIVE ) {
+                z = APACHE_NORMAL.inverseCumulativeProbability(p);
+            } else {
+                double sd = Math.sqrt((1.0+1.0/(1+n+m))*(n*m)*(1.0+n+m)/12); // biased variance empirically better fit to distribution then asymptotic variance
+                //System.out.printf("SD is %f and Max is %f and prob is %f%n",sd,1.0/Math.sqrt(sd*sd*2.0*Math.PI),p);
+                if ( p > 1.0/Math.sqrt(sd*sd*2.0*Math.PI) ) { // possible for p-value to be outside the range of the normal. Happens at the mean, so z is 0.
+                    z = 0.0;
+                } else {
+                    if ( u >= n*m/2 ) {
+                        z = Math.sqrt(-2.0*(Math.log(sd)+Math.log(p)+LNSQRT2PI));
+                    } else {
+                        z = -Math.sqrt(-2.0*(Math.log(sd)+Math.log(p)+LNSQRT2PI));
+                    }
+                }
+            }
+
+        } catch (MathException me) {
+            throw new GATKException("A math exception occurred in inverting the probability",me);
+        }
+
+        return new Pair<Double,Double>(z,(twoSided ? 2.0*p : p));
+    }
+
+    /**
+     * Hook into CPR with sufficient warning (for testing purposes)
+     * calls into that recursive calculation.
+     * @param n: number of set-one entries (hypothesis: set one is stochastically less than set two)
+     * @param m: number of set-two entries
+     * @param u: number of set-two entries that precede set-one entries (e.g. 0,1,0,1,0 -> 3 )
+     * @return same as cpr
+     */
+    protected static double calculatePRecursivelyDoNotCheckValuesEvenThoughItIsSlow(int n, int m, long u) {
+        return cpr(n,m,u);
+    }
+
+    /**
+     * For testing
+     *
+     * @param n: number of set-one entries (hypothesis: set one is stochastically less than set two)
+     * @param m: number of set-two entries
+     * @param u: number of set-two entries that precede set-one entries (e.g. 0,1,0,1,0 -> 3 )
+     */
+    protected static long countSequences(int n, int m, long u) {
+        if ( u < 0 ) { return 0; }
+        if ( m == 0 || n == 0 ) { return u == 0 ? 1 : 0; }
+
+        return countSequences(n-1,m,u-m) + countSequences(n,m-1,u);
+    }
+
+    /**
+     * : just a shorter name for calculatePRecursively. See Mann, Whitney, [1947]
+     * @param n: number of set-1 entries
+     * @param m: number of set-2 entries
+     * @param u: number of times a set-2 entry as preceded a set-1 entry
+     * @return recursive p-value
+     */
+    private static double cpr(int n, int m, long u) {
+        if ( u < 0 ) {
+            return 0.0;
+        }
+        if ( m == 0 || n == 0 ) {
+            // there are entries in set 1 or set 2, so no set-2 entry can precede a set-1 entry; thus u must be zero.
+            // note that this exists only for edification, as when we reach this point, the coefficient on this term is zero anyway
+            return ( u == 0 ) ? 1.0 : 0.0;
+        }
+
+
+        return (((double)n)/(n+m))*cpr(n-1,m,u-m) + (((double)m)/(n+m))*cpr(n,m-1,u);
+    }
+
+    private static double cumulativeCPR(int n, int m, long u ) {
+        // from above:
+        // the null hypothesis is that {N} is stochastically less than {M}, so U has counted
+        // occurrences of {M}s before {N}s. We would expect that this should be less than (n*m+1)/2 under
+        // the null hypothesis, so we want to integrate from K=0 to K=U for cumulative cases. Always.
+        double p = 0.0;
+        // optimization using symmetry, use the least amount of sums possible
+        long uSym = ( u <= n*m/2 ) ? u : ((long)n)*m-u;
+        for ( long uu = 0; uu < uSym; uu++ ) {
+            p += cpr(n,m,uu);
+        }
+        // correct by 1.0-p if the optimization above was used (e.g. 1-right tail = left tail)
+        return (u <= n*m/2) ? p : 1.0-p;
+    }
+
+    /**
+     * hook into the data tree, for testing purposes only
+     * @return  observations
+     */
+    protected TreeSet<Pair<Number,USet>> getObservations() {
+        return observations;
+    }
+
+    /**
+     * hook into the set sizes, for testing purposes only
+     * @return size set 1, size set 2
+     */
+    protected Pair<Integer,Integer> getSetSizes() {
+        return new Pair<Integer,Integer>(sizeSet1,sizeSet2);
+    }
+
+    /**
+     * Validates that observations are in the correct format for a MWU test -- this is only called by the contracts API during testing
+     * @param tree - the collection of labeled observations
+     * @return true iff the tree set is valid (no INFs or NaNs, at least one data point in each set)
+     */
+    protected static boolean validateObservations(TreeSet<Pair<Number,USet>> tree) {
+        boolean seen1 = false;
+        boolean seen2 = false;
+        boolean seenInvalid = false;
+        for ( Pair<Number,USet> p : tree) {
+            if ( ! seen1 && p.getSecond() == USet.SET1 ) {
+                seen1 = true;
+            }
+
+            if ( ! seen2 && p.getSecond() == USet.SET2 ) {
+                seen2 = true;
+            }
+
+            if ( Double.isNaN(p.getFirst().doubleValue()) || Double.isInfinite(p.getFirst().doubleValue())) {
+                seenInvalid = true;
+            }
+
+        }
+
+            return ! seenInvalid && seen1 && seen2;
+    }
+
+    /**
+     * A comparator class which uses dithering on tie-breaking to ensure that the internal treeset drops no values
+     * and to ensure that rank ties are broken at random.
+     */
+    private static class DitheringComparator implements Comparator<Pair<Number,USet>>, Serializable {
+
+        public DitheringComparator() {}
+
+        @Override
+        public boolean equals(Object other) { return false; }
+
+        @Override
+        public int compare(Pair<Number,USet> left, Pair<Number,USet> right) {
+            double comp = Double.compare(left.first.doubleValue(),right.first.doubleValue());
+            if ( comp > 0 ) { return 1; }
+            if ( comp < 0 ) { return -1; }
+            return Utils.getRandomGenerator().nextBoolean() ? -1 : 1;
+        }
+    }
+
+    /**
+     * A comparator that reaches into the pair and compares numbers without tie-braking.
+     */
+    private static class NumberedPairComparator implements Comparator<Pair<Number,USet>>, Serializable {
+
+        public NumberedPairComparator() {}
+
+        @Override
+        public boolean equals(Object other) { return false; }
+
+        @Override
+        public int compare(Pair<Number,USet> left, Pair<Number,USet> right ) {
+            return Double.compare(left.first.doubleValue(),right.first.doubleValue());
+        }
+    }
+
+    public enum USet { SET1, SET2 }
+    public enum ExactMode { POINT, CUMULATIVE }
+
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/MathUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/MathUtils.java
new file mode 100644
index 0000000..fb8e8d9
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/MathUtils.java
@@ -0,0 +1,1689 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import org.apache.commons.math.distribution.ExponentialDistribution;
+import org.apache.commons.math.distribution.ExponentialDistributionImpl;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.math.BigDecimal;
+import java.util.*;
+
+/**
+ * MathUtils is a static class (no instantiation allowed!) with some useful math methods.
+ *
+ * @author Kiran Garimella
+ */
+public class MathUtils {
+
+    /**
+     * Private constructor.  No instantiating this class!
+     */
+    private MathUtils() {
+    }
+
+    /**
+     * The smallest log10 value we'll emit from normalizeFromLog10 and other functions
+     * where the real-space value is 0.0.
+     */
+    public static final double LOG10_P_OF_ZERO = -1000000.0;
+    public static final double FAIR_BINOMIAL_PROB_LOG10_0_5 = Math.log10(0.5);
+    public static final double LOG_ONE_HALF = -Math.log10(2.0);
+    public static final double LOG_ONE_THIRD = -Math.log10(3.0);
+    private static final double NATURAL_LOG_OF_TEN = Math.log(10.0);
+    private static final double SQUARE_ROOT_OF_TWO_TIMES_PI = Math.sqrt(2.0 * Math.PI);
+
+    /**
+     * A helper class to maintain a cache of log10 values
+     */
+    public static class Log10Cache {
+        /**
+         * Get the value of log10(n), expanding the cache as necessary
+         * @param n operand
+         * @return log10(n)
+         */
+        public static double get(final int n) {
+            if (n < 0)
+                throw new ReviewedGATKException(String.format("Can't take the log of a negative number: %d", n));
+            if (n >= cache.length)
+                ensureCacheContains(Math.max(n+10, 2*cache.length));
+            /*
+               Array lookups are not atomic.  It's possible that the reference to cache could be
+               changed between the time the reference is loaded and the data is fetched from the correct
+               offset.  However, the value retrieved can't change, and it's guaranteed to be present in the
+               old reference by the conditional above.
+             */
+            return cache[n];
+        }
+
+        /**
+         * Ensures that the cache contains a value for n.  After completion of ensureCacheContains(n),
+         * #get(n) is guaranteed to return without causing a cache expansion
+         * @param n desired value to be precomputed
+         */
+        public static synchronized void ensureCacheContains(final int n) {
+            if (n < cache.length)
+                return;
+            final double[] newCache = new double[n + 1];
+            System.arraycopy(cache, 0, newCache, 0, cache.length);
+            for (int i=cache.length; i < newCache.length; i++)
+                newCache[i] = Math.log10(i);
+            cache = newCache;
+        }
+
+        //initialize with the special case: log10(0) = NEGATIVE_INFINITY
+        private static double[] cache = new double[] { Double.NEGATIVE_INFINITY };
+    }
+
+    /**
+     * Get a random int between min and max (inclusive) using the global GATK random number generator
+     *
+     * @param min lower bound of the range
+     * @param max upper bound of the range
+     * @return a random int >= min and <= max
+     */
+    public static int randomIntegerInRange( final int min, final int max ) {
+        return Utils.getRandomGenerator().nextInt(max - min + 1) + min;
+    }
+
+    /**
+     * Encapsulates the second term of Jacobian log identity for differences up to MAX_TOLERANCE
+     */
+    private static class JacobianLogTable {
+
+        public static final double MAX_TOLERANCE = 8.0;
+
+        public static double get(final double difference) {
+            if (cache == null)
+                initialize();
+            final int index = fastRound(difference * INV_STEP);
+            return cache[index];
+        }
+
+        private static synchronized void initialize() {
+            if (cache == null) {
+                final int tableSize = (int) (MAX_TOLERANCE / TABLE_STEP) + 1;
+                cache = new double[tableSize];
+                for (int k = 0; k < cache.length; k++)
+                    cache[k] = Math.log10(1.0 + Math.pow(10.0, -((double) k) * TABLE_STEP));
+            }
+        }
+
+        private static final double TABLE_STEP = 0.0001;
+        private static final double INV_STEP = 1.0 / TABLE_STEP;
+        private static double[] cache = null;
+    }
+
+    // A fast implementation of the Math.round() method.  This method does not perform
+    // under/overflow checking, so this shouldn't be used in the general case (but is fine
+    // if one is already make those checks before calling in to the rounding).
+    public static int fastRound(final double d) {
+        return (d > 0.0) ? (int) (d + 0.5d) : (int) (d - 0.5d);
+    }
+
+    public static double approximateLog10SumLog10(final double[] vals) {
+        return approximateLog10SumLog10(vals, vals.length);
+    }
+
+    /**
+     * Calculate the approximate log10 sum of an array range.
+     * @param vals the input values.
+     * @param fromIndex the first inclusive index in the input array.
+     * @param toIndex index following the last element to sum in the input array (exclusive).
+     * @return the approximate sum.
+     * @throws IllegalArgumentException if {@code vals} is {@code null} or  {@code fromIndex} is out of bounds
+     * or if {@code toIndex} is larger than
+     * the length of the input array or {@code fromIndex} is larger than {@code toIndex}.
+     */
+    public static double approximateLog10SumLog10(final double[] vals, final int fromIndex, final int toIndex) {
+        if (fromIndex == toIndex) return Double.NEGATIVE_INFINITY;
+        final int maxElementIndex = MathUtils.maxElementIndex(vals,fromIndex,toIndex);
+        double approxSum = vals[maxElementIndex];
+
+        for (int i = fromIndex; i < toIndex; i++) {
+            final double val;
+            if (i == maxElementIndex || (val = vals[i]) == Double.NEGATIVE_INFINITY)
+                continue;
+            final double diff = approxSum - val;
+            if (diff < JacobianLogTable.MAX_TOLERANCE)
+                approxSum += JacobianLogTable.get(diff);
+        }
+        return approxSum;
+    }
+
+    public static double approximateLog10SumLog10(final double[] vals, final int endIndex) {
+
+        final int maxElementIndex = MathUtils.maxElementIndex(vals, endIndex);
+        double approxSum = vals[maxElementIndex];
+
+        for (int i = 0; i < endIndex; i++) {
+            if (i == maxElementIndex || vals[i] == Double.NEGATIVE_INFINITY)
+                continue;
+
+            final double diff = approxSum - vals[i];
+            if (diff < JacobianLogTable.MAX_TOLERANCE) {
+                // See notes from the 2-inout implementation below
+                approxSum += JacobianLogTable.get(diff);
+            }
+        }
+
+        return approxSum;
+    }
+
+    public static double approximateLog10SumLog10(final double a, final double b, final double c) {
+        return approximateLog10SumLog10(a, approximateLog10SumLog10(b, c));
+    }
+
+    public static double approximateLog10SumLog10(double small, double big) {
+        // make sure small is really the smaller value
+        if (small > big) {
+            final double t = big;
+            big = small;
+            small = t;
+        }
+
+        if (small == Double.NEGATIVE_INFINITY || big == Double.NEGATIVE_INFINITY)
+            return big;
+
+        final double diff = big - small;
+        if (diff >= JacobianLogTable.MAX_TOLERANCE)
+            return big;
+
+        // OK, so |y-x| < tol: we use the following identity then:
+        // we need to compute log10(10^x + 10^y)
+        // By Jacobian logarithm identity, this is equal to
+        // max(x,y) + log10(1+10^-abs(x-y))
+        // we compute the second term as a table lookup with integer quantization
+        // we have pre-stored correction for 0,0.1,0.2,... 10.0
+        return big + JacobianLogTable.get(diff);
+    }
+
+    public static double sum(final double[] values) {
+        double s = 0.0;
+        for (double v : values)
+            s += v;
+        return s;
+    }
+
+    public static long sum(final int[] x) {
+        long total = 0;
+        for (int v : x)
+            total += v;
+        return total;
+    }
+
+    public static int sum(final byte[] x) {
+        int total = 0;
+        for (byte v : x)
+            total += (int)v;
+        return total;
+    }
+
+    public static double percentage(int x, int base) {
+        return (base > 0 ? ((double) x / (double) base) * 100.0 : 0);
+    }
+
+    public static double ratio(final int num, final int denom) {
+        if ( denom > 0 ) {
+            return ((double) num)/denom;
+        } else {
+            if ( num == 0 && denom == 0) {
+                return 0.0;
+            } else {
+                throw new ReviewedGATKException(String.format("The denominator of a ratio cannot be zero or less than zero: %d/%d",num,denom));
+            }
+        }
+    }
+
+    public static double ratio(final long num, final long denom) {
+        if ( denom > 0L ) {
+            return ((double) num)/denom;
+        } else {
+            if ( num == 0L && denom == 0L ) {
+                return 0.0;
+            } else {
+                throw new ReviewedGATKException(String.format("The denominator of a ratio cannot be zero or less than zero: %d/%d",num,denom));
+            }
+        }
+    }
+
+    /**
+     * Converts a real space array of numbers (typically probabilities) into a log10 array
+     *
+     * @param prRealSpace
+     * @return
+     */
+    public static double[] toLog10(final double[] prRealSpace) {
+        double[] log10s = new double[prRealSpace.length];
+        for (int i = 0; i < prRealSpace.length; i++) {
+            log10s[i] = Math.log10(prRealSpace[i]);
+        }
+        return log10s;
+    }
+
+    public static double log10sumLog10(final double[] log10p, final int start) {
+        return log10sumLog10(log10p, start, log10p.length);
+    }
+
+    public static double log10sumLog10(final double[] log10p, final int start, final int finish) {
+
+        if (start >= finish)
+            return Double.NEGATIVE_INFINITY;
+        final int maxElementIndex = MathUtils.maxElementIndex(log10p, start, finish);
+        final double maxValue = log10p[maxElementIndex];
+        if(maxValue == Double.NEGATIVE_INFINITY)
+            return maxValue;
+        double sum = 1.0;
+        for (int i = start; i < finish; i++) {
+            double curVal = log10p[i];
+            double scaled_val = curVal - maxValue;
+            if (i == maxElementIndex || curVal == Double.NEGATIVE_INFINITY) {
+                continue;
+            }
+            else {
+                sum += Math.pow(10.0, scaled_val);
+            }
+        }
+        if ( Double.isNaN(sum) || sum == Double.POSITIVE_INFINITY ) {
+            throw new IllegalArgumentException("log10p: Values must be non-infinite and non-NAN");
+        }
+        return maxValue + (sum != 1.0 ? Math.log10(sum) : 0.0);
+    }
+
+    public static double sumLog10(final double[] log10values) {
+        return Math.pow(10.0, log10sumLog10(log10values));
+    }
+
+    public static double log10sumLog10(final double[] log10values) {
+        return log10sumLog10(log10values, 0);
+    }
+
+    public static boolean wellFormedDouble(final double val) {
+        return !Double.isInfinite(val) && !Double.isNaN(val);
+    }
+
+    public static double bound(final double value, final double minBoundary, final double maxBoundary) {
+        return Math.max(Math.min(value, maxBoundary), minBoundary);
+    }
+
+    public static boolean isBounded(final double val, final double lower, final double upper) {
+        return val >= lower && val <= upper;
+    }
+
+    public static boolean isPositive(final double val) {
+        return !isNegativeOrZero(val);
+    }
+
+    public static boolean isPositiveOrZero(final double val) {
+        return isBounded(val, 0.0, Double.POSITIVE_INFINITY);
+    }
+
+    public static boolean isNegativeOrZero(final double val) {
+        return isBounded(val, Double.NEGATIVE_INFINITY, 0.0);
+    }
+
+    public static boolean isNegative(final double val) {
+        return !isPositiveOrZero(val);
+    }
+
+    /**
+     * Compares double values for equality (within 1e-6), or inequality.
+     *
+     * @param a the first double value
+     * @param b the second double value
+     * @return -1 if a is greater than b, 0 if a is equal to be within 1e-6, 1 if b is greater than a.
+     */
+    public static byte compareDoubles(final double a, final double b) {
+        return compareDoubles(a, b, 1e-6);
+    }
+
+    /**
+     * Compares double values for equality (within epsilon), or inequality.
+     *
+     * @param a       the first double value
+     * @param b       the second double value
+     * @param epsilon the precision within which two double values will be considered equal
+     * @return -1 if a is greater than b, 0 if a is equal to be within epsilon, 1 if b is greater than a.
+     */
+    public static byte compareDoubles(final double a, final double b, final double epsilon) {
+        if (Math.abs(a - b) < epsilon) {
+            return 0;
+        }
+        if (a > b) {
+            return -1;
+        }
+        return 1;
+    }
+
+    /**
+     * Calculate f(x) = Normal(x | mu = mean, sigma = sd)
+     * @param mean the desired mean of the Normal distribution
+     * @param sd the desired standard deviation of the Normal distribution
+     * @param x the value to evaluate
+     * @return a well-formed double
+     */
+    public static double normalDistribution(final double mean, final double sd, final double x) {
+        if( sd < 0 )
+            throw new IllegalArgumentException("sd: Standard deviation of normal must be >0");
+        if ( ! wellFormedDouble(mean) || ! wellFormedDouble(sd) || ! wellFormedDouble(x) )
+            throw new IllegalArgumentException("mean, sd, or, x : Normal parameters must be well formatted (non-INF, non-NAN)");
+        double a = 1.0 / (sd * Math.sqrt(2.0 * Math.PI));
+        double b = Math.exp(-1.0 * (Math.pow(x - mean, 2.0) / (2.0 * sd * sd)));
+        return a * b;
+    }
+
+    /**
+     * Calculate f(x) = log10 ( Normal(x | mu = mean, sigma = sd) )
+     * @param mean the desired mean of the Normal distribution
+     * @param sd the desired standard deviation of the Normal distribution
+     * @param x the value to evaluate
+     * @return a well-formed double
+     */
+
+    public static double normalDistributionLog10(final double mean, final double sd, final double x) {
+        if( sd < 0 )
+            throw new IllegalArgumentException("sd: Standard deviation of normal must be >0");
+        if ( ! wellFormedDouble(mean) || ! wellFormedDouble(sd) || ! wellFormedDouble(x) )
+            throw new IllegalArgumentException("mean, sd, or, x : Normal parameters must be well formatted (non-INF, non-NAN)");
+        final double a = -1.0 * Math.log10(sd * SQUARE_ROOT_OF_TWO_TIMES_PI);
+        final double b = -1.0 * (square(x - mean) / (2.0 * square(sd))) / NATURAL_LOG_OF_TEN;
+        return a + b;
+    }
+
+    /**
+     * Calculate f(x) = x^2
+     * @param x the value to square
+     * @return x * x
+     */
+    public static double square(final double x) {
+        return x * x;
+    }
+
+    /**
+     * Calculates the log10 of the binomial coefficient. Designed to prevent
+     * overflows even with very large numbers.
+     *
+     * @param n total number of trials
+     * @param k number of successes
+     * @return the log10 of the binomial coefficient
+     */
+    public static double binomialCoefficient(final int n, final int k) {
+        return Math.pow(10, log10BinomialCoefficient(n, k));
+    }
+
+    /**
+     * @see #binomialCoefficient(int, int) with log10 applied to result
+     */
+    public static double log10BinomialCoefficient(final int n, final int k) {
+        if ( n < 0 ) {
+            throw new IllegalArgumentException("n: Must have non-negative number of trials");
+        }
+        if ( k > n || k < 0 ) {
+            throw new IllegalArgumentException("k: Must have non-negative number of successes, and no more successes than number of trials");
+        }
+
+        return log10Factorial(n) - log10Factorial(k) - log10Factorial(n - k);
+    }
+
+    /**
+     * Computes a binomial probability.  This is computed using the formula
+     * <p/>
+     * B(k; n; p) = [ n! / ( k! (n - k)! ) ] (p^k)( (1-p)^k )
+     * <p/>
+     * where n is the number of trials, k is the number of successes, and p is the probability of success
+     *
+     * @param n number of Bernoulli trials
+     * @param k number of successes
+     * @param p probability of success
+     * @return the binomial probability of the specified configuration.  Computes values down to about 1e-237.
+     */
+    public static double binomialProbability(final int n, final int k, final double p) {
+        return Math.pow(10, log10BinomialProbability(n, k, Math.log10(p)));
+    }
+
+    /**
+     * @see #binomialProbability(int, int, double) with log10 applied to result
+     */
+    public static double log10BinomialProbability(final int n, final int k, final double log10p) {
+        if ( log10p > 1e-18 )
+            throw new IllegalArgumentException("log10p: Log-probability must be 0 or less");
+        double log10OneMinusP = Math.log10(1 - Math.pow(10, log10p));
+        return log10BinomialCoefficient(n, k) + log10p * k + log10OneMinusP * (n - k);
+    }
+
+    /**
+     * @see #binomialProbability(int, int, double) with p=0.5
+     */
+    public static double binomialProbability(final int n, final int k) {
+        return Math.pow(10, log10BinomialProbability(n, k));
+    }
+
+    /**
+     * @see #binomialProbability(int, int, double) with p=0.5 and log10 applied to result
+     */
+    public static double log10BinomialProbability(final int n, final int k) {
+        return log10BinomialCoefficient(n, k) + (n * FAIR_BINOMIAL_PROB_LOG10_0_5);
+    }
+
+    /** A memoization container for {@link #binomialCumulativeProbability(int, int, int)}.  Synchronized to accomodate multithreading. */
+    private static final Map<Long, Double> BINOMIAL_CUMULATIVE_PROBABILITY_MEMOIZATION_CACHE = 
+            Collections.synchronizedMap(new LRUCache<Long, Double>(10_000)); 
+    
+    /**
+     * Primitive integer-triplet bijection into long.  Returns null when the bijection function fails (in lieu of an exception), which will
+     * happen when: any value is negative or larger than a short.  This method is optimized for speed; it is not intended to serve as a 
+     * utility function.
+     */
+    static Long fastGenerateUniqueHashFromThreeIntegers(final int one, final int two, final int three) {
+        if (one < 0 || two < 0 || three < 0 || Short.MAX_VALUE < one || Short.MAX_VALUE < two || Short.MAX_VALUE < three) {
+            return null;
+        } else {
+            long result = 0;
+            result += (short) one;
+            result <<= 16;
+            result += (short) two;
+            result <<= 16;
+            result += (short) three;
+            return result;
+        }
+    }
+    
+    /**
+     * Performs the cumulative sum of binomial probabilities, where the probability calculation is done in log space.
+     * Assumes that the probability of a successful hit is fair (i.e. 0.5).
+     * 
+     * This pure function is memoized because of its expensive BigDecimal calculations.
+     *
+     * @param n         number of attempts for the number of hits
+     * @param k_start   start (inclusive) of the cumulant sum (over hits)
+     * @param k_end     end (inclusive) of the cumulant sum (over hits)
+     * @return - returns the cumulative probability
+     */
+    public static double binomialCumulativeProbability(final int n, final int k_start, final int k_end) {
+        if ( k_end > n )
+            throw new IllegalArgumentException(String.format("Value for k_end (%d) is greater than n (%d)", k_end, n));
+
+        // Fetch cached value, if applicable.
+        final Long memoizationKey = fastGenerateUniqueHashFromThreeIntegers(n, k_start, k_end);
+        final Double memoizationCacheResult;
+        if (memoizationKey != null) {
+            memoizationCacheResult = BINOMIAL_CUMULATIVE_PROBABILITY_MEMOIZATION_CACHE.get(memoizationKey);
+        } else {
+            memoizationCacheResult = null;
+        }
+
+        final double result;
+        if (memoizationCacheResult != null) {
+            result = memoizationCacheResult;
+        } else {
+            double cumProb = 0.0;
+            double prevProb;
+            BigDecimal probCache = BigDecimal.ZERO;
+
+            for (int hits = k_start; hits <= k_end; hits++) {
+                prevProb = cumProb;
+                final double probability = binomialProbability(n, hits);
+                cumProb += probability;
+                if (probability > 0 && cumProb - prevProb < probability / 2) { // loss of precision
+                    probCache = probCache.add(new BigDecimal(prevProb));
+                    cumProb = 0.0;
+                    hits--; // repeat loop
+                    // prevProb changes at start of loop
+                }
+            }
+
+            result = probCache.add(new BigDecimal(cumProb)).doubleValue();
+            if (memoizationKey != null) {
+                BINOMIAL_CUMULATIVE_PROBABILITY_MEMOIZATION_CACHE.put(memoizationKey, result);
+            }
+        }
+        return result;
+    }
+
+    private static final double LOG1MEXP_THRESHOLD = Math.log(0.5);
+
+    private static final double LN_10 = Math.log(10);
+
+    /**
+     * Calculates {@code log(1-exp(a))} without loosing precision.
+     *
+     * <p>
+     *     This is based on the approach described in:
+     *
+     * </p>
+     * <p>
+     *     Maechler M, Accurately Computing log(1-exp(-|a|)) Assessed by the Rmpfr package, 2012 <br/>
+     *     <a ref="http://cran.r-project.org/web/packages/Rmpfr/vignettes/log1mexp-note.pdf">Online document</a>.
+     *
+     * </p>
+     *
+     * @param a the input exponent.
+     * @return {@link Double#NaN NaN} if {@code a > 0}, otherwise the corresponding value.
+     */
+    public static double log1mexp(final double a) {
+        if (a > 0) return Double.NaN;
+        if (a == 0) return Double.NEGATIVE_INFINITY;
+
+        return (a < LOG1MEXP_THRESHOLD) ? Math.log1p(-Math.exp(a)) : Math.log(-Math.expm1(a));
+    }
+
+    /**
+     * Calculates {@code log10(1-10^a)} without loosing precision.
+     *
+     * <p>
+     *     This is based on the approach described in:
+     *
+     * </p>
+     * <p>
+     *     Maechler M, Accurately Computing log(1-exp(-|a|)) Assessed by the Rmpfr package, 2012 <br/>
+     *     <a ref="http://cran.r-project.org/web/packages/Rmpfr/vignettes/log1mexp-note.pdf">Online document</a>.
+     * </p>
+     *
+     * @param a the input exponent.
+     * @return {@link Double#NaN NaN} if {@code a > 0}, otherwise the corresponding value.
+     */
+    public static double log10OneMinusPow10(final double a) {
+        if (a > 0) return Double.NaN;
+        if (a == 0) return Double.NEGATIVE_INFINITY;
+        final double b = a * LN_10;
+        return log1mexp(b) / LN_10;
+    }
+
+    /**
+     * Calculates the log10 of the multinomial coefficient. Designed to prevent
+     * overflows even with very large numbers.
+     *
+     * @param n total number of trials
+     * @param k array of any size with the number of successes for each grouping (k1, k2, k3, ..., km)
+     * @return {@link Double#NaN NaN} if {@code a > 0}, otherwise the corresponding value.
+     */
+    public static double log10MultinomialCoefficient(final int n, final int[] k) {
+        if ( n < 0 )
+            throw new IllegalArgumentException("n: Must have non-negative number of trials");
+        double denominator = 0.0;
+        int sum = 0;
+        for (int x : k) {
+            if ( x < 0 )
+                throw new IllegalArgumentException("x element of k: Must have non-negative observations of group");
+            if ( x > n )
+                throw new IllegalArgumentException("x element of k, n: Group observations must be bounded by k");
+            denominator += log10Factorial(x);
+            sum += x;
+        }
+        if ( sum != n )
+            throw new IllegalArgumentException("k and n: Sum of observations in multinomial must sum to total number of trials");
+        return log10Factorial(n) - denominator;
+    }
+
+    /**
+     * Computes the log10 of the multinomial distribution probability given a vector
+     * of log10 probabilities. Designed to prevent overflows even with very large numbers.
+     *
+     * @param n      number of trials
+     * @param k      array of number of successes for each possibility
+     * @param log10p array of log10 probabilities
+     * @return
+     */
+    public static double log10MultinomialProbability(final int n, final int[] k, final double[] log10p) {
+        if (log10p.length != k.length)
+            throw new IllegalArgumentException("p and k: Array of log10 probabilities must have the same size as the array of number of sucesses: " + log10p.length + ", " + k.length);
+        double log10Prod = 0.0;
+        for (int i = 0; i < log10p.length; i++) {
+            if ( log10p[i] > 1e-18 )
+                throw new IllegalArgumentException("log10p: Log-probability must be <= 0");
+            log10Prod += log10p[i] * k[i];
+        }
+        return log10MultinomialCoefficient(n, k) + log10Prod;
+    }
+
+    /**
+     * Computes a multinomial coefficient efficiently avoiding overflow even for large numbers.
+     * This is computed using the formula:
+     * <p/>
+     * M(x1,x2,...,xk; n) = [ n! / (x1! x2! ... xk!) ]
+     * <p/>
+     * where xi represents the number of times outcome i was observed, n is the number of total observations.
+     * In this implementation, the value of n is inferred as the sum over i of xi.
+     *
+     * @param k an int[] of counts, where each element represents the number of times a certain outcome was observed
+     * @return the multinomial of the specified configuration.
+     */
+    public static double multinomialCoefficient(final int[] k) {
+        int n = 0;
+        for (int xi : k) {
+            n += xi;
+        }
+
+        return Math.pow(10, log10MultinomialCoefficient(n, k));
+    }
+
+    /**
+     * Computes a multinomial probability efficiently avoiding overflow even for large numbers.
+     * This is computed using the formula:
+     * <p/>
+     * M(x1,x2,...,xk; n; p1,p2,...,pk) = [ n! / (x1! x2! ... xk!) ] (p1^x1)(p2^x2)(...)(pk^xk)
+     * <p/>
+     * where xi represents the number of times outcome i was observed, n is the number of total observations, and
+     * pi represents the probability of the i-th outcome to occur.  In this implementation, the value of n is
+     * inferred as the sum over i of xi.
+     *
+     * @param k an int[] of counts, where each element represents the number of times a certain outcome was observed
+     * @param p a double[] of probabilities, where each element represents the probability a given outcome can occur
+     * @return the multinomial probability of the specified configuration.
+     */
+    public static double multinomialProbability(final int[] k, final double[] p) {
+        if (p.length != k.length)
+            throw new IllegalArgumentException("p and k: Array of log10 probabilities must have the same size as the array of number of sucesses: " + p.length + ", " + k.length);
+
+        int n = 0;
+        double[] log10P = new double[p.length];
+        for (int i = 0; i < p.length; i++) {
+            log10P[i] = Math.log10(p[i]);
+            n += k[i];
+        }
+        return Math.pow(10, log10MultinomialProbability(n, k, log10P));
+    }
+
+    /**
+     * calculate the Root Mean Square of an array of integers
+     *
+     * @param x an byte[] of numbers
+     * @return the RMS of the specified numbers.
+     */
+    public static double rms(final byte[] x) {
+        if (x.length == 0)
+            return 0.0;
+
+        double rms = 0.0;
+        for (int i : x)
+            rms += i * i;
+        rms /= x.length;
+        return Math.sqrt(rms);
+    }
+
+    /**
+     * calculate the Root Mean Square of an array of integers
+     *
+     * @param x an int[] of numbers
+     * @return the RMS of the specified numbers.
+     */
+    public static double rms(final int[] x) {
+        if (x.length == 0)
+            return 0.0;
+
+        double rms = 0.0;
+        for (int i : x)
+            rms += i * i;
+        rms /= x.length;
+        return Math.sqrt(rms);
+    }
+
+    /**
+     * calculate the Root Mean Square of an array of doubles
+     *
+     * @param x a double[] of numbers
+     * @return the RMS of the specified numbers.
+     */
+    public static double rms(final Double[] x) {
+        if (x.length == 0)
+            return 0.0;
+
+        double rms = 0.0;
+        for (Double i : x)
+            rms += i * i;
+        rms /= x.length;
+        return Math.sqrt(rms);
+    }
+
+    public static double rms(final Collection<Integer> l) {
+        if (l.size() == 0)
+            return 0.0;
+
+        double rms = 0.0;
+        for (int i : l)
+            rms += i * i;
+        rms /= l.size();
+        return Math.sqrt(rms);
+    }
+
+    public static double distanceSquared(final double[] x, final double[] y) {
+        double dist = 0.0;
+        for (int iii = 0; iii < x.length; iii++) {
+            dist += (x[iii] - y[iii]) * (x[iii] - y[iii]);
+        }
+        return dist;
+    }
+
+    public static double round(final double num, final int digits) {
+        double result = num * Math.pow(10.0, (double) digits);
+        result = Math.round(result);
+        result = result / Math.pow(10.0, (double) digits);
+        return result;
+    }
+
+    /**
+     * normalizes the log10-based array.  ASSUMES THAT ALL ARRAY ENTRIES ARE <= 0 (<= 1 IN REAL-SPACE).
+     *
+     * @param array             the array to be normalized
+     * @param takeLog10OfOutput if true, the output will be transformed back into log10 units
+     * @return a newly allocated array corresponding the normalized values in array, maybe log10 transformed
+     */
+    public static double[] normalizeFromLog10(final double[] array, final boolean takeLog10OfOutput) {
+        return normalizeFromLog10(array, takeLog10OfOutput, false);
+    }
+
+    /**
+     * See #normalizeFromLog10 but with the additional option to use an approximation that keeps the calculation always in log-space
+     *
+     * @param array
+     * @param takeLog10OfOutput
+     * @param keepInLogSpace
+     *
+     * @return
+     */
+    public static double[] normalizeFromLog10(final double[] array, final boolean takeLog10OfOutput, final boolean keepInLogSpace) {
+        // for precision purposes, we need to add (or really subtract, since they're
+        // all negative) the largest value; also, we need to convert to normal-space.
+        double maxValue = arrayMax(array);
+
+        // we may decide to just normalize in log space without converting to linear space
+        if (keepInLogSpace) {
+            for (int i = 0; i < array.length; i++) {
+                array[i] -= maxValue;
+            }
+            return array;
+        }
+
+        // default case: go to linear space
+        double[] normalized = new double[array.length];
+
+        for (int i = 0; i < array.length; i++)
+            normalized[i] = Math.pow(10, array[i] - maxValue);
+
+        // normalize
+        double sum = 0.0;
+        for (int i = 0; i < array.length; i++)
+            sum += normalized[i];
+        for (int i = 0; i < array.length; i++) {
+            double x = normalized[i] / sum;
+            if (takeLog10OfOutput) {
+                x = Math.log10(x);
+                if ( x < LOG10_P_OF_ZERO || Double.isInfinite(x) )
+                    x = array[i] - maxValue;
+            }
+
+            normalized[i] = x;
+        }
+
+        return normalized;
+    }
+
+    /**
+     * normalizes the log10-based array.  ASSUMES THAT ALL ARRAY ENTRIES ARE <= 0 (<= 1 IN REAL-SPACE).
+     *
+     * @param array the array to be normalized
+     * @return a newly allocated array corresponding the normalized values in array
+     */
+    public static double[] normalizeFromLog10(final double[] array) {
+        return normalizeFromLog10(array, false);
+    }
+
+    /**
+     * normalizes the real-space probability array.
+     *
+     * Does not assume anything about the values in the array, beyond that no elements are below 0.  It's ok
+     * to have values in the array of > 1, or have the sum go above 0.
+     *
+     * @param array the array to be normalized
+     * @return a newly allocated array corresponding the normalized values in array
+     */
+    @Requires("array != null")
+    @Ensures({"result != null"})
+    public static double[] normalizeFromRealSpace(final double[] array) {
+        if ( array.length == 0 )
+            return array;
+
+        final double sum = sum(array);
+        final double[] normalized = new double[array.length];
+        if ( sum < 0.0 ) throw new IllegalArgumentException("Values in probability array sum to a negative number " + sum);
+        for ( int i = 0; i < array.length; i++ ) {
+            normalized[i] = array[i] / sum;
+        }
+        return normalized;
+    }
+
+    public static int maxElementIndex(final double[] array) {
+        return maxElementIndex(array, array.length);
+    }
+
+    public static int maxElementIndex(final double[] array, final int start, final int endIndex) {
+        if (array == null || array.length == 0)
+            throw new IllegalArgumentException("Array cannot be null!");
+
+        if (start > endIndex) {
+           throw new IllegalArgumentException("Start cannot be after end.");
+        }
+
+        int maxI = start;
+        for (int i = (start+1); i < endIndex; i++) {
+            if (array[i] > array[maxI])
+                maxI = i;
+        }
+        return maxI;
+    }
+
+    public static int maxElementIndex(final double[] array, final int endIndex) {
+        return maxElementIndex(array, 0, endIndex);
+    }
+
+    public static int maxElementIndex(final int[] array) {
+        return maxElementIndex(array, array.length);
+    }
+
+    public static int maxElementIndex(final byte[] array) {
+        return maxElementIndex(array, array.length);
+    }
+
+    public static int maxElementIndex(final int[] array, final int endIndex) {
+        if (array == null || array.length == 0)
+            throw new IllegalArgumentException("Array cannot be null!");
+
+        int maxI = 0;
+        for (int i = 1; i < endIndex; i++) {
+            if (array[i] > array[maxI])
+                maxI = i;
+        }
+        return maxI;
+    }
+
+    public static int maxElementIndex(final byte[] array, final int endIndex) {
+        if (array == null || array.length == 0)
+            throw new IllegalArgumentException("Array cannot be null!");
+
+        int maxI = 0;
+        for (int i = 1; i < endIndex; i++) {
+            if (array[i] > array[maxI])
+                maxI = i;
+        }
+
+        return maxI;
+    }
+
+    public static int arrayMax(final int[] array) {
+        return array[maxElementIndex(array)];
+    }
+
+
+    public static double arrayMax(final double[] array) {
+        return array[maxElementIndex(array)];
+    }
+
+    public static double arrayMax(final double[] array, final int endIndex) {
+        return array[maxElementIndex(array, endIndex)];
+    }
+
+    public static double arrayMin(final double[] array) {
+        return array[minElementIndex(array)];
+    }
+
+    public static int arrayMin(final int[] array) {
+        return array[minElementIndex(array)];
+    }
+
+    public static byte arrayMin(final byte[] array) {
+        return array[minElementIndex(array)];
+    }
+
+    /**
+     * Compute the min element of a List<Integer>
+     * @param array a non-empty list of integer
+     * @return the min
+     */
+    public static int arrayMin(final List<Integer> array) {
+        if ( array == null || array.isEmpty() ) throw new IllegalArgumentException("Array must be non-null and non-empty");
+        int min = array.get(0);
+        for ( final int i : array )
+            if ( i < min ) min = i;
+        return min;
+    }
+
+    /**
+     * Compute the median element of the list of integers
+     * @param array a list of integers
+     * @return the median element
+     */
+    public static <T extends Comparable<? super T>> T median(final List<T> array) {
+         /* TODO -- from Valentin
+        the current implementation is not the usual median when the input is of even length. More concretely it returns the ith element of the list where i = floor(input.size() / 2).
+
+        But actually that is not the "usual" definition of a median, as it is supposed to return the average of the two middle values when the sample length is an even number (i.e. median(1,2,3,4,5,6) == 3.5). [Sources: R and wikipedia]
+
+        My suggestion for a solution is then:
+
+        unify median and medianDoubles to public static <T extends Number> T median(Collection<T>)
+        check on null elements and throw an exception if there are any or perhaps return a null; documented in the javadoc.
+        relocate, rename and refactor MathUtils.median(X) to Utils.ithElement(X,X.size()/2)
+        In addition, the current median implementation sorts the whole input list witch is O(n log n). However find out the ith element (thus calculate the median) can be done in O(n)
+        */
+        if ( array == null ) throw new IllegalArgumentException("Array must be non-null");
+        final int size = array.size();
+        if ( size == 0 ) throw new IllegalArgumentException("Array cannot have size 0");
+        else if ( size == 1 ) return array.get(0);
+        else {
+            final ArrayList<T> sorted = new ArrayList<>(array);
+            Collections.sort(sorted);
+            return sorted.get(size / 2);
+        }
+    }
+
+    public static int minElementIndex(final double[] array) {
+        if (array == null || array.length == 0)
+            throw new IllegalArgumentException("Array cannot be null!");
+
+        int minI = 0;
+        for (int i = 1; i < array.length; i++) {
+            if (array[i] < array[minI])
+                minI = i;
+        }
+
+        return minI;
+    }
+
+    public static int minElementIndex(final byte[] array) {
+        if (array == null || array.length == 0)
+            throw new IllegalArgumentException("Array cannot be null!");
+
+        int minI = 0;
+        for (int i = 1; i < array.length; i++) {
+            if (array[i] < array[minI])
+                minI = i;
+        }
+
+        return minI;
+    }
+
+    public static int minElementIndex(final int[] array) {
+        if (array == null || array.length == 0)
+            throw new IllegalArgumentException("Array cannot be null!");
+
+        int minI = 0;
+        for (int i = 1; i < array.length; i++) {
+            if (array[i] < array[minI])
+                minI = i;
+        }
+
+        return minI;
+    }
+
+    public static int arrayMaxInt(final List<Integer> array) {
+        if (array == null)
+            throw new IllegalArgumentException("Array cannot be null!");
+        if (array.size() == 0)
+            throw new IllegalArgumentException("Array size cannot be 0!");
+
+        int m = array.get(0);
+        for (int e : array)
+            m = Math.max(m, e);
+        return m;
+    }
+
+    public static int sum(final List<Integer> list ) {
+        int sum = 0;
+        for ( Integer i : list ) {
+          sum += i;
+        }
+        return sum;
+    }
+
+    public static double average(final List<Long> vals, final int maxI) {
+        long sum = 0L;
+
+        int i = 0;
+        for (long x : vals) {
+            if (i > maxI)
+                break;
+            sum += x;
+            i++;
+        }
+
+        return (1.0 * sum) / i;
+    }
+
+    public static double average(final List<Long> vals) {
+        return average(vals, vals.size());
+    }
+
+    public static int countOccurrences(final char c, final String s) {
+        int count = 0;
+        for (int i = 0; i < s.length(); i++) {
+            count += s.charAt(i) == c ? 1 : 0;
+        }
+        return count;
+    }
+
+    public static <T> int countOccurrences(T x, List<T> l) {
+        int count = 0;
+        for (T y : l) {
+            if (x.equals(y))
+                count++;
+        }
+
+        return count;
+    }
+
+    public static int countOccurrences(byte element, byte[] array) {
+        int count = 0;
+        for (byte y : array) {
+            if (element == y)
+                count++;
+        }
+
+        return count;
+    }
+
+    public static int countOccurrences(final boolean element, final boolean[] array) {
+        int count = 0;
+        for (final boolean b : array) {
+            if (element == b)
+                count++;
+        }
+
+        return count;
+    }
+
+
+    /**
+     * Returns n random indices drawn with replacement from the range 0..(k-1)
+     *
+     * @param n the total number of indices sampled from
+     * @param k the number of random indices to draw (with replacement)
+     * @return a list of k random indices ranging from 0 to (n-1) with possible duplicates
+     */
+    static public ArrayList<Integer> sampleIndicesWithReplacement(final int n, final int k) {
+
+        ArrayList<Integer> chosen_balls = new ArrayList<Integer>(k);
+        for (int i = 0; i < k; i++) {
+            //Integer chosen_ball = balls[rand.nextInt(k)];
+            chosen_balls.add(Utils.getRandomGenerator().nextInt(n));
+            //balls.remove(chosen_ball);
+        }
+
+        return chosen_balls;
+    }
+
+    /**
+     * Returns n random indices drawn without replacement from the range 0..(k-1)
+     *
+     * @param n the total number of indices sampled from
+     * @param k the number of random indices to draw (without replacement)
+     * @return a list of k random indices ranging from 0 to (n-1) without duplicates
+     */
+    static public ArrayList<Integer> sampleIndicesWithoutReplacement(final int n, final int k) {
+        ArrayList<Integer> chosen_balls = new ArrayList<Integer>(k);
+
+        for (int i = 0; i < n; i++) {
+            chosen_balls.add(i);
+        }
+
+        Collections.shuffle(chosen_balls, Utils.getRandomGenerator());
+
+        //return (ArrayList<Integer>) chosen_balls.subList(0, k);
+        return new ArrayList<Integer>(chosen_balls.subList(0, k));
+    }
+
+    /**
+     * Given a list of indices into a list, return those elements of the list with the possibility of drawing list elements multiple times
+     *
+     * @param indices the list of indices for elements to extract
+     * @param list    the list from which the elements should be extracted
+     * @param <T>     the template type of the ArrayList
+     * @return a new ArrayList consisting of the elements at the specified indices
+     */
+    static public <T> ArrayList<T> sliceListByIndices(final List<Integer> indices, final List<T> list) {
+        ArrayList<T> subset = new ArrayList<T>();
+
+        for (int i : indices) {
+            subset.add(list.get(i));
+        }
+
+        return subset;
+    }
+
+    /**
+     * Given two log-probability vectors, compute log of vector product of them:
+     * in Matlab notation, return log10(10.*x'*10.^y)
+     * @param x vector 1
+     * @param y vector 2
+     * @return a double representing log (dotProd(10.^x,10.^y)
+     */
+    public static double logDotProduct(final double [] x, final double[] y) {
+        if (x.length != y.length)
+            throw new ReviewedGATKException("BUG: Vectors of different lengths");
+
+        double tmpVec[] = new double[x.length];
+
+        for (int k=0; k < tmpVec.length; k++ ) {
+            tmpVec[k] = x[k]+y[k];
+        }
+
+        return log10sumLog10(tmpVec);
+
+
+
+    }
+
+    /**
+     * Check that the log10 prob vector vector is well formed
+     *
+     * @param vector
+     * @param expectedSize
+     * @param shouldSumToOne
+     *
+     * @return true if vector is well-formed, false otherwise
+     */
+    public static boolean goodLog10ProbVector(final double[] vector, final int expectedSize, final boolean shouldSumToOne) {
+        if ( vector.length != expectedSize ) return false;
+
+        for ( final double pr : vector ) {
+            if ( ! goodLog10Probability(pr) )
+                return false;
+        }
+
+        if ( shouldSumToOne && compareDoubles(sumLog10(vector), 1.0, 1e-4) != 0 )
+            return false;
+
+        return true; // everything is good
+    }
+
+    /**
+     * Checks that the result is a well-formed log10 probability
+     *
+     * @param result a supposedly well-formed log10 probability value.  By default allows
+     *               -Infinity values, as log10(0.0) == -Infinity.
+     * @return true if result is really well formed
+     */
+    public static boolean goodLog10Probability(final double result) {
+        return goodLog10Probability(result, true);
+    }
+
+    /**
+     * Checks that the result is a well-formed log10 probability
+     *
+     * @param result a supposedly well-formed log10 probability value
+     * @param allowNegativeInfinity should we consider a -Infinity value ok?
+     * @return true if result is really well formed
+     */
+    public static boolean goodLog10Probability(final double result, final boolean allowNegativeInfinity) {
+        return result <= 0.0 && result != Double.POSITIVE_INFINITY && (allowNegativeInfinity || result != Double.NEGATIVE_INFINITY) && ! Double.isNaN(result);
+    }
+
+    /**
+     * Checks that the result is a well-formed probability
+     *
+     * @param result a supposedly well-formed probability value
+     * @return true if result is really well formed
+     */
+    public static boolean goodProbability(final double result) {
+        return result >= 0.0 && result <= 1.0 && ! Double.isInfinite(result) && ! Double.isNaN(result);
+    }
+
+    /**
+     * A utility class that computes on the fly average and standard deviation for a stream of numbers.
+     * The number of observations does not have to be known in advance, and can be also very big (so that
+     * it could overflow any naive summation-based scheme or cause loss of precision).
+     * Instead, adding a new number <code>observed</code>
+     * to a sample with <code>add(observed)</code> immediately updates the instance of this object so that
+     * it contains correct mean and standard deviation for all the numbers seen so far. Source: Knuth, vol.2
+     * (see also e.g. http://www.johndcook.com/standard_deviation.html for online reference).
+     */
+    public static class RunningAverage {
+        private double mean = 0.0;
+        private double s = 0.0;
+        private long obs_count = 0;
+
+        public void add(double obs) {
+            obs_count++;
+            double oldMean = mean;
+            mean += (obs - mean) / obs_count; // update mean
+            s += (obs - oldMean) * (obs - mean);
+        }
+
+        public void addAll(Collection<Number> col) {
+            for (Number o : col) {
+                add(o.doubleValue());
+            }
+        }
+
+        public double mean() {
+            return mean;
+        }
+
+        public double stddev() {
+            return Math.sqrt(s / (obs_count - 1));
+        }
+
+        public double var() {
+            return s / (obs_count - 1);
+        }
+
+        public long observationCount() {
+            return obs_count;
+        }
+
+        public RunningAverage clone() {
+            RunningAverage ra = new RunningAverage();
+            ra.mean = this.mean;
+            ra.s = this.s;
+            ra.obs_count = this.obs_count;
+            return ra;
+        }
+
+        public void merge(RunningAverage other) {
+            if (this.obs_count > 0 || other.obs_count > 0) { // if we have any observations at all
+                this.mean = (this.mean * this.obs_count + other.mean * other.obs_count) / (this.obs_count + other.obs_count);
+                this.s += other.s;
+            }
+            this.obs_count += other.obs_count;
+        }
+    }
+
+    //
+    // useful common utility routines
+    //
+
+    static public double max(double x0, double x1, double x2) {
+        double a = Math.max(x0, x1);
+        return Math.max(a, x2);
+    }
+
+    /**
+     * Converts LN to LOG10
+     *
+     * @param ln log(x)
+     * @return log10(x)
+     */
+    public static double lnToLog10(final double ln) {
+        return ln * Math.log10(Math.E);
+    }
+
+    /**
+     * Constants to simplify the log gamma function calculation.
+     */
+    private static final double zero = 0.0, one = 1.0, half = .5, a0 = 7.72156649015328655494e-02, a1 = 3.22467033424113591611e-01, a2 = 6.73523010531292681824e-02, a3 = 2.05808084325167332806e-02, a4 = 7.38555086081402883957e-03, a5 = 2.89051383673415629091e-03, a6 = 1.19270763183362067845e-03, a7 = 5.10069792153511336608e-04, a8 = 2.20862790713908385557e-04, a9 = 1.08011567247583939954e-04, a10 = 2.52144565451257326939e-05, a11 = 4.48640949618915160150e-05, tc = 1.46163214496836224576e [...]
+
+    /**
+     * Efficient rounding functions to simplify the log gamma function calculation
+     * double to long with 32 bit shift
+     */
+    private static final int HI(final double x) {
+        return (int) (Double.doubleToLongBits(x) >> 32);
+    }
+
+    /**
+     * Efficient rounding functions to simplify the log gamma function calculation
+     * double to long without shift
+     */
+    private static final int LO(final double x) {
+        return (int) Double.doubleToLongBits(x);
+    }
+
+    /**
+     * Most efficent implementation of the lnGamma (FDLIBM)
+     * Use via the log10Gamma wrapper method.
+     */
+    private static double lnGamma(final double x) {
+        double t, y, z, p, p1, p2, p3, q, r, w;
+        int i;
+
+        int hx = HI(x);
+        int lx = LO(x);
+
+        /* purge off +-inf, NaN, +-0, and negative arguments */
+        int ix = hx & 0x7fffffff;
+        if (ix >= 0x7ff00000)
+            return Double.POSITIVE_INFINITY;
+        if ((ix | lx) == 0 || hx < 0)
+            return Double.NaN;
+        if (ix < 0x3b900000) {    /* |x|<2**-70, return -log(|x|) */
+            return -Math.log(x);
+        }
+
+        /* purge off 1 and 2 */
+        if ((((ix - 0x3ff00000) | lx) == 0) || (((ix - 0x40000000) | lx) == 0))
+            r = 0;
+            /* for x < 2.0 */
+        else if (ix < 0x40000000) {
+            if (ix <= 0x3feccccc) {     /* lgamma(x) = lgamma(x+1)-log(x) */
+                r = -Math.log(x);
+                if (ix >= 0x3FE76944) {
+                    y = one - x;
+                    i = 0;
+                }
+                else if (ix >= 0x3FCDA661) {
+                    y = x - (tc - one);
+                    i = 1;
+                }
+                else {
+                    y = x;
+                    i = 2;
+                }
+            }
+            else {
+                r = zero;
+                if (ix >= 0x3FFBB4C3) {
+                    y = 2.0 - x;
+                    i = 0;
+                } /* [1.7316,2] */
+                else if (ix >= 0x3FF3B4C4) {
+                    y = x - tc;
+                    i = 1;
+                } /* [1.23,1.73] */
+                else {
+                    y = x - one;
+                    i = 2;
+                }
+            }
+
+            switch (i) {
+                case 0:
+                    z = y * y;
+                    p1 = a0 + z * (a2 + z * (a4 + z * (a6 + z * (a8 + z * a10))));
+                    p2 = z * (a1 + z * (a3 + z * (a5 + z * (a7 + z * (a9 + z * a11)))));
+                    p = y * p1 + p2;
+                    r += (p - 0.5 * y);
+                    break;
+                case 1:
+                    z = y * y;
+                    w = z * y;
+                    p1 = t0 + w * (t3 + w * (t6 + w * (t9 + w * t12)));    /* parallel comp */
+                    p2 = t1 + w * (t4 + w * (t7 + w * (t10 + w * t13)));
+                    p3 = t2 + w * (t5 + w * (t8 + w * (t11 + w * t14)));
+                    p = z * p1 - (tt - w * (p2 + y * p3));
+                    r += (tf + p);
+                    break;
+                case 2:
+                    p1 = y * (u0 + y * (u1 + y * (u2 + y * (u3 + y * (u4 + y * u5)))));
+                    p2 = one + y * (v1 + y * (v2 + y * (v3 + y * (v4 + y * v5))));
+                    r += (-0.5 * y + p1 / p2);
+            }
+        }
+        else if (ix < 0x40200000) {             /* x < 8.0 */
+            i = (int) x;
+            t = zero;
+            y = x - (double) i;
+            p = y * (s0 + y * (s1 + y * (s2 + y * (s3 + y * (s4 + y * (s5 + y * s6))))));
+            q = one + y * (r1 + y * (r2 + y * (r3 + y * (r4 + y * (r5 + y * r6)))));
+            r = half * y + p / q;
+            z = one;    /* lgamma(1+s) = log(s) + lgamma(s) */
+            switch (i) {
+                case 7:
+                    z *= (y + 6.0);    /* FALLTHRU */
+                case 6:
+                    z *= (y + 5.0);    /* FALLTHRU */
+                case 5:
+                    z *= (y + 4.0);    /* FALLTHRU */
+                case 4:
+                    z *= (y + 3.0);    /* FALLTHRU */
+                case 3:
+                    z *= (y + 2.0);    /* FALLTHRU */
+                    r += Math.log(z);
+                    break;
+            }
+            /* 8.0 <= x < 2**58 */
+        }
+        else if (ix < 0x43900000) {
+            t = Math.log(x);
+            z = one / x;
+            y = z * z;
+            w = w0 + z * (w1 + y * (w2 + y * (w3 + y * (w4 + y * (w5 + y * w6)))));
+            r = (x - half) * (t - one) + w;
+        }
+        else
+            /* 2**58 <= x <= inf */
+            r = x * (Math.log(x) - one);
+        return r;
+    }
+
+    /**
+     * Calculates the log10 of the gamma function for x using the efficient FDLIBM
+     * implementation to avoid overflows and guarantees high accuracy even for large
+     * numbers.
+     *
+     * @param x the x parameter
+     * @return the log10 of the gamma function at x.
+     */
+    public static double log10Gamma(final double x) {
+        return lnToLog10(lnGamma(x));
+    }
+
+    public static double factorial(final int x) {
+        // avoid rounding errors caused by fact that 10^log(x) might be slightly lower than x and flooring may produce 1 less than real value
+        return (double)Math.round(Math.pow(10, log10Factorial(x)));
+    }
+
+    public static double log10Factorial(final int x) {
+        if (x >= Log10FactorialCache.size() || x < 0)
+            return log10Gamma(x + 1);
+        else
+            return Log10FactorialCache.get(x);
+    }
+
+    /**
+     * Wrapper class so that the log10Factorial array is only calculated if it's used
+     */
+    private static class Log10FactorialCache {
+
+        /**
+         * The size of the precomputed cache.  Must be a positive number!
+         */
+        private static final int CACHE_SIZE = 10_000;
+
+        public static int size() { return CACHE_SIZE; }
+
+        public static double get(final int n) {
+            if (cache == null)
+                initialize();
+            return cache[n];
+        }
+
+        private static synchronized void initialize() {
+            if (cache == null) {
+                Log10Cache.ensureCacheContains(CACHE_SIZE);
+                cache = new double[CACHE_SIZE];
+                cache[0] = 0.0;
+                for (int k = 1; k < cache.length; k++)
+                    cache[k] = cache[k-1] + Log10Cache.get(k);
+            }
+        }
+
+        private static double[] cache = null;
+    }
+
+    /**
+     * Adds two arrays together and returns a new array with the sum.
+     *
+     * @param a one array
+     * @param b another array
+     * @return a new array with the sum of a and b
+     */
+    @Requires("a.length == b.length")
+    @Ensures("result.length == a.length")
+    public static int[] addArrays(final int[] a, final int[] b) {
+        int[] c = new int[a.length];
+        for (int i = 0; i < a.length; i++)
+            c[i] = a[i] + b[i];
+        return c;
+    }
+
+    /** Same routine, unboxed types for efficiency
+     *
+     * @param x                 First vector
+     * @param y                 Second vector
+     * @return Vector of same length as x and y so that z[k] = x[k]+y[k]
+     */
+    public static double[] vectorSum(final double[]x, final double[] y) {
+        if (x.length != y.length)
+            throw new ReviewedGATKException("BUG: Lengths of x and y must be the same");
+
+        double[] result = new double[x.length];
+        for (int k=0; k <x.length; k++)
+            result[k] = x[k]+y[k];
+
+        return result;
+    }
+
+    /** Compute Z=X-Y for two numeric vectors X and Y
+     *
+     * @param x                 First vector
+     * @param y                 Second vector
+     * @return Vector of same length as x and y so that z[k] = x[k]-y[k]
+     */
+    public static int[] vectorDiff(final int[]x, final int[] y) {
+        if (x.length != y.length)
+            throw new ReviewedGATKException("BUG: Lengths of x and y must be the same");
+
+        int[] result = new int[x.length];
+        for (int k=0; k <x.length; k++)
+            result[k] = x[k]-y[k];
+
+        return result;
+    }
+
+    /**
+     * Returns a series of integer values between start and stop, inclusive,
+     * expontentially distributed between the two.  That is, if there are
+     * ten values between 0-10 there will be 10 between 10-100.
+     *
+     * WARNING -- BADLY TESTED
+     * @param start
+     * @param stop
+     * @param eps
+     * @return
+     */
+    public static List<Integer> log10LinearRange(final int start, final int stop, final double eps) {
+        final LinkedList<Integer> values = new LinkedList<>();
+        final double log10range = Math.log10(stop - start);
+
+        if ( start == 0 )
+            values.add(0);
+
+        double i = 0.0;
+        while ( i <= log10range ) {
+            final int index = (int)Math.round(Math.pow(10, i)) + start;
+            if ( index < stop && (values.peekLast() == null || values.peekLast() != index ) )
+                values.add(index);
+            i += eps;
+        }
+
+        if ( values.peekLast() == null || values.peekLast() != stop )
+            values.add(stop);
+
+        return values;
+    }
+
+    /**
+     * Compute in a numerical correct way the quantity log10(1-x)
+     *
+     * Uses the approximation log10(1-x) = log10(1/x - 1) + log10(x) to avoid very quick underflow
+     * in 1-x when x is very small
+     *
+     * @param x a positive double value between 0.0 and 1.0
+     * @return an estimate of log10(1-x)
+     */
+    @Requires("x >= 0.0 && x <= 1.0")
+    @Ensures("result <= 0.0")
+    public static double log10OneMinusX(final double x) {
+        if ( x == 1.0 )
+            return Double.NEGATIVE_INFINITY;
+        else if ( x == 0.0 )
+            return 0.0;
+        else {
+            final double d = Math.log10(1 / x - 1) + Math.log10(x);
+            return Double.isInfinite(d) || d > 0.0 ? 0.0 : d;
+        }
+    }
+
+    /**
+     * Draw N random elements from list
+     * @param list - the list from which to draw randomly
+     * @param N - the number of elements to draw
+     */
+    public static <T> List<T> randomSubset(final List<T> list, final int N) {
+        if (list.size() <= N) {
+            return list;
+        }
+
+        return sliceListByIndices(sampleIndicesWithoutReplacement(list.size(),N),list);
+    }
+
+    /**
+    * Draw N random elements from list with replacement
+    * @param list - the list from which to draw randomly
+    * @param N - the number of elements to draw
+    */
+    public static <T> List<T> randomSample(final List<T> list, final int N) {
+        if (list.isEmpty() ) {
+            return list;
+        }
+        return sliceListByIndices(sampleIndicesWithReplacement(list.size(),N),list);
+    }
+
+    /**
+     * Return the likelihood of observing the counts of categories having sampled a population
+     * whose categorial frequencies are distributed according to a Dirichlet distribution
+     * @param dirichletParams - params of the prior dirichlet distribution
+     * @param dirichletSum - the sum of those parameters
+     * @param counts - the counts of observation in each category
+     * @param countSum - the sum of counts (number of trials)
+     * @return - associated likelihood
+     */
+    public static double dirichletMultinomial(final double[] dirichletParams, final double dirichletSum,
+                                              final int[] counts, final int countSum) {
+        if ( dirichletParams.length != counts.length ) {
+            throw new IllegalStateException("The number of dirichlet parameters must match the number of categories");
+        }
+        // todo -- lots of lnGammas here. At some point we can safely switch to x * ( ln(x) - 1)
+        double likelihood = log10MultinomialCoefficient(countSum,counts);
+        likelihood += log10Gamma(dirichletSum);
+        likelihood -= log10Gamma(dirichletSum+countSum);
+        for ( int idx = 0; idx < counts.length; idx++ ) {
+            likelihood += log10Gamma(counts[idx] + dirichletParams[idx]);
+            likelihood -= log10Gamma(dirichletParams[idx]);
+        }
+
+        return likelihood;
+    }
+
+    public static double dirichletMultinomial(double[] params, int[] counts) {
+        return dirichletMultinomial(params,sum(params),counts,(int) sum(counts));
+    }
+
+    public static ExponentialDistribution exponentialDistribution( final double mean ) {
+        return new ExponentialDistributionImpl(mean);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/Median.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/Median.java
new file mode 100644
index 0000000..9483322
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/Median.java
@@ -0,0 +1,94 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import java.util.*;
+
+/**
+ * Utility class for calculating median from a data set, potentially limiting the size of data to a
+ * fixed amount
+ *
+ * @author Your Name
+ * @since Date created
+ */
+public class Median<T extends Comparable> {
+    final List<T> values;
+    final int maxValuesToKeep;
+    boolean sorted = false;
+
+    public Median() {
+        this(Integer.MAX_VALUE);
+    }
+
+    public Median(final int maxValuesToKeep) {
+        this.maxValuesToKeep = maxValuesToKeep;
+        this.values = new ArrayList<T>();
+    }
+
+    public boolean isFull() {
+        return values.size() >= maxValuesToKeep;
+    }
+
+    public int size() {
+        return values.size();
+    }
+
+    public boolean isEmpty() {
+        return values.isEmpty();
+    }
+
+    public T getMedian() {
+        if ( isEmpty() )
+            throw new IllegalStateException("Cannot get median value from empty array");
+        return getMedian(null);  // note that value null will never be used
+    }
+
+    /**
+     * Returns the floor(n + 1 / 2) item from the list of values if the list
+     * has values, or defaultValue is the list is empty.
+     */
+    public T getMedian(final T defaultValue) {
+        if ( isEmpty() )
+            return defaultValue;
+
+        if ( ! sorted ) {
+            sorted = true;
+            Collections.sort(values);
+        }
+
+        final int offset = (int)Math.floor((values.size() + 1) * 0.5) - 1;
+        return values.get(offset);
+    }
+
+    public boolean add(final T value) {
+        if ( ! isFull() ) {
+            sorted = false;
+            return values.add(value);
+        }
+        else
+            return false;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/MultiThreadedErrorTracker.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/MultiThreadedErrorTracker.java
new file mode 100644
index 0000000..3dfe13c
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/MultiThreadedErrorTracker.java
@@ -0,0 +1,105 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+/**
+ * A utility to track exceptions that occur across threads.
+ *
+ * Uses a notify mechanism so that multiple threads can tell the tracker that an
+ * error has occurred, and a master thread can monitor this object for an error
+ * occurring and take appropriate action.  Only maintains the first
+ * error to reach the tracker.
+ *
+ * Refactored from HierarchicalMicroScheduler
+ *
+ * User: depristo
+ * Date: 9/19/12
+ * Time: 11:20 AM
+ */
+public class MultiThreadedErrorTracker {
+    /**
+     * An exception that's occurred.  If null, no exception has occurred.
+     */
+    private RuntimeException error = null;
+
+    /**
+     * Convenience function to check, and throw, an error is one is pending
+     */
+    public synchronized void throwErrorIfPending() {
+        if (hasAnErrorOccurred())
+            throw getError();
+    }
+
+    /**
+     * Detects whether an execution error has occurred.
+     * @return True if an error has occurred.  False otherwise.
+     */
+    public synchronized boolean hasAnErrorOccurred() {
+        return error != null;
+    }
+
+    /**
+     * Retrieve the error that has occurred.
+     *
+     * @throws ReviewedGATKException if no error has occurred.
+     * @return
+     */
+    public synchronized RuntimeException getError() {
+        if(!hasAnErrorOccurred())
+            throw new ReviewedGATKException("User has attempted to retrieve a traversal error when none exists");
+        return error;
+    }
+
+    /**
+     * Notify this error tracker that an error has occurs.  Only updates the tracked
+     * error if it is currently null (i.e., no error has been already reported).  So
+     * calling this successively with multiple errors only keeps the first, which is the
+     * right thing to do as the initial failure is usually the meaningful one, but
+     * generates a cascade of failures as other subsystems fail.
+     */
+    public synchronized RuntimeException notifyOfError(Throwable error) {
+        if ( this.error == null )
+            this.error = toRuntimeException(error);
+
+        return this.error;
+    }
+
+    /**
+     * Convert error to a Runtime exception, or keep as is if it already is one
+     *
+     * @param error the error that has occurred
+     * @return the potentially converted error
+     */
+    private RuntimeException toRuntimeException(final Throwable error) {
+        // If the error is already a Runtime, pass it along as is.  Otherwise, wrap it.
+        if (error instanceof RuntimeException)
+            return (RuntimeException)error;
+        else
+            return new ReviewedGATKException("An error occurred during the traversal.  Message=" + error.getMessage(), error);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/NGSPlatform.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/NGSPlatform.java
new file mode 100644
index 0000000..1b3af70
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/NGSPlatform.java
@@ -0,0 +1,146 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import org.broadinstitute.gatk.utils.sam.GATKSAMReadGroupRecord;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+
+import java.util.Arrays;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * A canonical, master list of the standard NGS platforms.  These values
+ * can be obtained (efficiently) from a GATKSAMRecord object with the
+ * getNGSPlatform method.
+ *
+ * @author Mark DePristo
+ * @since 2011
+ */
+public enum NGSPlatform {
+    // note the order of elements here determines the order of matching operations, and therefore the
+    // efficiency of getting a NGSPlatform from a string.
+    ILLUMINA(SequencerFlowClass.DISCRETE, "ILLUMINA", "SLX", "SOLEXA"),
+    SOLID(SequencerFlowClass.DISCRETE, "SOLID"),
+    LS454(SequencerFlowClass.FLOW, "454", "LS454"),
+    COMPLETE_GENOMICS(SequencerFlowClass.DISCRETE, "COMPLETE"),
+    PACBIO(SequencerFlowClass.DISCRETE, "PACBIO"),
+    ION_TORRENT(SequencerFlowClass.FLOW, "IONTORRENT"),
+    CAPILLARY(SequencerFlowClass.OTHER, "CAPILLARY"),
+    HELICOS(SequencerFlowClass.OTHER, "HELICOS"),
+    UNKNOWN(SequencerFlowClass.OTHER, "UNKNOWN");
+
+    /**
+     * Array of the prefix names in a BAM file for each of the platforms.
+     */
+    protected final String[] BAM_PL_NAMES;
+    protected final SequencerFlowClass sequencerType;
+
+    NGSPlatform(final SequencerFlowClass type, final String... BAM_PL_NAMES) {
+        if ( BAM_PL_NAMES.length == 0 ) throw new IllegalStateException("Platforms must have at least one name");
+
+        for ( int i = 0; i < BAM_PL_NAMES.length; i++ )
+            BAM_PL_NAMES[i] = BAM_PL_NAMES[i].toUpperCase();
+
+        this.BAM_PL_NAMES = BAM_PL_NAMES;
+        this.sequencerType = type;
+    }
+
+    /**
+     * Returns a representative PL string for this platform
+     * @return a representative PL string
+     */
+    public final String getDefaultPlatform() {
+        return BAM_PL_NAMES[0];
+    }
+
+    /**
+     * The broad "type" of sequencer this platform represents (discrete or flow)
+     * @return a SequencerFlowClass
+     */
+    public final SequencerFlowClass getSequencerType() {
+        return sequencerType;
+    }
+
+    /**
+     * Convenience get -- get the NGSPlatform from a GATKSAMRecord.
+     *
+     * Just gets the platform from the GATKReadGroupRecord associated with this read.
+     *
+     * @param read a non-null GATKSAMRecord
+     * @return an NGSPlatform object matching the PL field of the header, of UNKNOWN if there was no match,
+     *         if there is no read group for read, or there's no PL field for the read group
+     */
+    public static NGSPlatform fromRead(final GATKSAMRecord read) {
+        if ( read == null ) throw new IllegalArgumentException("read cannot be null");
+        final GATKSAMReadGroupRecord rg = read.getReadGroup();
+        return rg == null ? UNKNOWN : rg.getNGSPlatform();
+    }
+
+    /**
+     * Returns the NGSPlatform corresponding to the PL tag in the read group
+     * @param plFromRG -- the PL field (or equivalent) in a ReadGroup object.  Can be null => UNKNOWN
+     * @return an NGSPlatform object matching the PL field of the header, or UNKNOWN if there was no match or plFromRG is null
+     */
+    public static NGSPlatform fromReadGroupPL(final String plFromRG) {
+        if ( plFromRG == null ) return UNKNOWN;
+
+        // todo -- algorithm could be implemented more efficiently, as the list of all
+        // todo -- names is known upfront, so a decision tree could be used to identify
+        // todo -- a prefix common to PL
+        final String pl = plFromRG.toUpperCase();
+        for ( final NGSPlatform ngsPlatform : NGSPlatform.values() ) {
+            for ( final String bamPLName : ngsPlatform.BAM_PL_NAMES ) {
+                if ( pl.contains(bamPLName) )
+                    return ngsPlatform;
+            }
+        }
+
+        return UNKNOWN;
+    }
+
+    /**
+     * checks whether or not the requested platform is listed in the set (and is not unknown)
+     *
+     * @param platform the read group string that describes the platform used.  can be null
+     * @return true if the platform is known (i.e. it's in the list and is not UNKNOWN)
+     */
+    public static boolean isKnown(final String platform) {
+        return fromReadGroupPL(platform) != UNKNOWN;
+    }
+
+    /**
+     * Get a human-readable list of platform names
+     * @return the list of platform names
+     */
+    public static String knownPlatformsString() {
+        final List<String> names = new LinkedList<>();
+        for ( final NGSPlatform pl : values() ) {
+            names.addAll(Arrays.asList(pl.BAM_PL_NAMES));
+        }
+        return Utils.join(",", names);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/PathUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/PathUtils.java
new file mode 100644
index 0000000..f2a8340
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/PathUtils.java
@@ -0,0 +1,195 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import org.apache.commons.io.comparator.LastModifiedFileComparator;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.io.File;
+import java.io.FilenameFilter;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: hanna
+ * Date: Mar 30, 2009
+ * Time: 5:43:39 PM
+ * To change this template use File | Settings | File Templates.
+ *
+ * A set of static utility methods for common operations on paths.
+ */
+public class PathUtils {
+    private static Logger logger = Logger.getLogger(PathUtils.class);
+
+    /**
+     * Constructor access disallowed...static utility methods only!
+     */
+    private PathUtils() { }
+
+    /**
+     * Find the files in the given directory matching the given extension.
+     *
+     * @param basePath       Path to search.
+     * @param relativePrefix What directory should the given files be presented relative to?
+     * @param extension      Extension for which to search.
+     * @param recursive      Search recursively.  Beware of symlinks!
+     * @return A list of files matching the specified criteria.
+     *         TODO: Test recursive traversal in the presence of a symlink.
+     */
+    public static List<String> findFilesInPath(final File basePath, final String relativePrefix, final String extension, boolean recursive) {
+        List<String> filesInPath = new ArrayList<String>();
+
+        FilenameFilter filter = new OrFilenameFilter(new DirectoryFilter(),
+                new ExtensionFilter(extension));
+        File[] contents = basePath.listFiles( filter );
+        for (File content : contents) {
+            String relativeFileName = relativePrefix.trim().length() != 0 ?
+                    relativePrefix + File.separator + content.getName() :
+                    content.getName();
+            if (relativeFileName.endsWith(extension))
+                filesInPath.add(relativeFileName);
+            else if (content.isDirectory() && recursive)
+                filesInPath.addAll(findFilesInPath(content, relativeFileName, extension, recursive));
+        }
+
+        return filesInPath;
+    }
+
+    /**
+     * Filter files by extension.
+     */
+    public static class ExtensionFilter implements FilenameFilter {
+        private String extensionName = null;
+
+        public ExtensionFilter(String extensionName) {
+            this.extensionName = extensionName;
+        }
+
+        public boolean accept(File f, String s) {
+            return s.endsWith("." + extensionName);
+        }
+    }
+
+    /**
+     * Filter directories from list of files.
+     */
+    public static class DirectoryFilter implements FilenameFilter {
+        public boolean accept(File f, String s) {
+            return new File(f, s).isDirectory();
+        }
+    }
+
+    /**
+     * Join two FilenameFilters together in a logical 'or' operation.
+     */
+    public static class OrFilenameFilter implements FilenameFilter {
+        private FilenameFilter lhs = null, rhs = null;
+
+        public OrFilenameFilter(FilenameFilter lhs, FilenameFilter rhs) {
+            this.lhs = lhs;
+            this.rhs = rhs;
+        }
+
+        public boolean accept(File f, String s) {
+            return lhs.accept(f, s) || rhs.accept(f, s);
+        }
+    }
+
+    /**
+     * Refreshes the volume associated with a given file or directory by attempting to access it
+     * a few times before giving up.  The file need not exist, though the parent directory must.
+     * This method is particularly useful when your job has been dispatched to LSF and you need to
+     * ensure an NSF-mounted volume is actually accessible (many times it isn't for a few seconds,
+     * just enough to cause your program to come crashing down).
+     *
+     * @param file  the file or directory that resides in the volume to be refreshed.
+     */
+    public static void refreshVolume(File file) {
+        File dir = file.isDirectory() ? file : file.getParentFile();
+
+        int sleepCount = 0;
+        while (sleepCount < 3 && dir.listFiles() == null) {
+            try {
+                Thread.sleep((sleepCount + 1)*3000);
+            } catch (InterruptedException e) {
+            }
+
+            sleepCount++;
+        }
+
+        if (dir.listFiles() == null) {
+            throw new ReviewedGATKException("The volume '" + dir.getAbsolutePath() + "' could not be accessed.");
+        }
+    }
+
+
+    /**
+     * Walk over the GATK released directories to find the most recent JAR files corresponding
+     * to the version prefix.  For example, providing input "1.2" will
+     * return the full path to the most recent GenomeAnalysisTK.jar in the GATK_RELEASE_DIR
+     * in directories that match gatkReleaseDir/GenomeAnalysisTK-1.2*
+     *
+     * @param gatkReleaseDir Path to directory containing GATK release binaries (e.g., /humgen/gsa-hpprojects/GATK/bin/)
+     * @param releaseVersionNumber Desired GATK version number (e.g., 1.2)
+     * @return A file pointing to the most recent GATK file in the release directory with GATK release number
+     */
+    public static File findMostRecentGATKVersion(final File gatkReleaseDir, final String releaseVersionNumber) {
+        final String versionString = "GenomeAnalysisTK-" + releaseVersionNumber;
+
+        final List<File> gatkJars = new ArrayList<File>();
+        for ( final String path : gatkReleaseDir.list(new isGATKVersion(versionString)) ) {
+            gatkJars.add(new File(gatkReleaseDir.getAbsolutePath() + "/" + path + "/GenomeAnalysisTK.jar"));
+        }
+
+        if ( gatkJars.isEmpty() )
+            return null;
+        else {
+            Collections.sort(gatkJars, LastModifiedFileComparator.LASTMODIFIED_REVERSE);
+            //for ( File jar : gatkJars ) logger.info(String.format("%s => %d", jar, jar.lastModified()));
+            final File last = gatkJars.get(0);
+            logger.debug(String.format("findMostRecentGATKVersion: Found %d jars for %s, keeping last one %s",
+                    gatkJars.size(), releaseVersionNumber, last));
+            return last;
+        }
+    }
+
+    private final static class isGATKVersion implements FilenameFilter {
+        private final String versionString;
+
+        private isGATKVersion(final String versionString) {
+            this.versionString = versionString;
+        }
+
+        @Override
+        public boolean accept(final File file, final String s) {
+            return s.contains(versionString);
+        }
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/QualityUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/QualityUtils.java
new file mode 100644
index 0000000..d67e5ab
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/QualityUtils.java
@@ -0,0 +1,397 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import com.google.java.contract.Ensures;
+import htsjdk.samtools.SAMUtils;
+
+/**
+ * QualityUtils is a static class (no instantiation allowed!) with some utility methods for manipulating
+ * quality scores.
+ *
+ * @author Kiran Garimella, Mark DePristo
+ * @since Way back
+ */
+public class QualityUtils {
+    /**
+     * Maximum quality score that can be encoded in a SAM/BAM file
+     */
+    public final static byte MAX_SAM_QUAL_SCORE = SAMUtils.MAX_PHRED_SCORE;
+
+
+    private final static double RAW_MIN_PHRED_SCALED_QUAL = Math.log10(Double.MIN_VALUE);
+    protected final static double MIN_PHRED_SCALED_QUAL = -10.0 * RAW_MIN_PHRED_SCALED_QUAL;
+
+    /**
+     * bams containing quals above this value are extremely suspicious and we should warn the user
+     */
+    public final static byte MAX_REASONABLE_Q_SCORE = 60;
+
+    /**
+     * The lowest quality score for a base that is considered reasonable for statistical analysis.  This is
+     * because Q 6 => you stand a 25% of being right, which means all bases are equally likely
+     */
+    public final static byte MIN_USABLE_Q_SCORE = 6;
+    public final static int MAPPING_QUALITY_UNAVAILABLE = 255;
+
+    /**
+     * Maximum sense quality value.
+     */
+    public static final int MAX_QUAL = 254;
+
+    /**
+     * Cached values for qual as byte calculations so they are very fast
+     */
+    private static double qualToErrorProbCache[] = new double[MAX_QUAL + 1];
+    private static double qualToProbLog10Cache[] = new double[MAX_QUAL + 1];
+
+
+    static {
+        for (int i = 0; i <= MAX_QUAL; i++) {
+            qualToErrorProbCache[i] = qualToErrorProb((double) i);
+            qualToProbLog10Cache[i] = Math.log10(1.0 - qualToErrorProbCache[i]);
+        }
+    }
+
+    /**
+     * Private constructor.  No instantiating this class!
+     */
+    private QualityUtils() {}
+
+    // ----------------------------------------------------------------------
+    //
+    // These are all functions to convert a phred-scaled quality score to a probability
+    //
+    // ----------------------------------------------------------------------
+
+    /**
+     * Convert a phred-scaled quality score to its probability of being true (Q30 => 0.999)
+     *
+     * This is the Phred-style conversion, *not* the Illumina-style conversion.
+     *
+     * Because the input is a discretized byte value, this function uses a cache so is very efficient
+     *
+     * WARNING -- because this function takes a byte for maxQual, you must be careful in converting
+     * integers to byte.  The appropriate way to do this is ((byte)(myInt & 0xFF))
+     *
+     * @param qual a quality score (0-255)
+     * @return a probability (0.0-1.0)
+     */
+    @Ensures("result >= 0.0 && result <= 1.0")
+    public static double qualToProb(final byte qual) {
+        return 1.0 - qualToErrorProb(qual);
+    }
+
+    /**
+     * Convert a phred-scaled quality score to its probability of being true (Q30 => 0.999)
+     *
+     * This is the Phred-style conversion, *not* the Illumina-style conversion.
+     *
+     * Because the input is a double value, this function must call Math.pow so can be quite expensive
+     *
+     * @param qual a phred-scaled quality score encoded as a double.  Can be non-integer values (30.5)
+     * @return a probability (0.0-1.0)
+     */
+    @Ensures("result >= 0.0 && result <= 1.0")
+    public static double qualToProb(final double qual) {
+        if ( qual < 0.0 ) throw new IllegalArgumentException("qual must be >= 0.0 but got " + qual);
+        return 1.0 - qualToErrorProb(qual);
+    }
+
+    /**
+     * Convert a phred-scaled quality score to its log10 probability of being true (Q30 => log10(0.999))
+     *
+     * This is the Phred-style conversion, *not* the Illumina-style conversion.
+     *
+     * Because the input is a double value, this function must call Math.pow so can be quite expensive
+     *
+     * WARNING -- because this function takes a byte for maxQual, you must be careful in converting
+     * integers to byte.  The appropriate way to do this is ((byte)(myInt & 0xFF))
+     *
+     * @param qual a phred-scaled quality score encoded as a double.  Can be non-integer values (30.5)
+     * @return a probability (0.0-1.0)
+     */
+    @Ensures("result <= 0.0")
+    public static double qualToProbLog10(final byte qual) {
+        return qualToProbLog10Cache[(int)qual & 0xff]; // Map: 127 -> 127; -128 -> 128; -1 -> 255; etc.
+    }
+
+    /**
+     * Convert a phred-scaled quality score to its probability of being wrong (Q30 => 0.001)
+     *
+     * This is the Phred-style conversion, *not* the Illumina-style conversion.
+     *
+     * Because the input is a double value, this function must call Math.pow so can be quite expensive
+     *
+     * @param qual a phred-scaled quality score encoded as a double.  Can be non-integer values (30.5)
+     * @return a probability (0.0-1.0)
+     */
+    @Ensures("result >= 0.0 && result <= 1.0")
+    public static double qualToErrorProb(final double qual) {
+        if ( qual < 0.0 ) throw new IllegalArgumentException("qual must be >= 0.0 but got " + qual);
+        return Math.pow(10.0, qual / -10.0);
+    }
+
+    /**
+     * Convert a phred-scaled quality score to its probability of being wrong (Q30 => 0.001)
+     *
+     * This is the Phred-style conversion, *not* the Illumina-style conversion.
+     *
+     * Because the input is a byte value, this function uses a cache so is very efficient
+     *
+     * WARNING -- because this function takes a byte for maxQual, you must be careful in converting
+     * integers to byte.  The appropriate way to do this is ((byte)(myInt & 0xFF))
+     *
+     * @param qual a phred-scaled quality score encoded as a byte
+     * @return a probability (0.0-1.0)
+     */
+    @Ensures("result >= 0.0 && result <= 1.0")
+    public static double qualToErrorProb(final byte qual) {
+        return qualToErrorProbCache[(int)qual & 0xff]; // Map: 127 -> 127; -128 -> 128; -1 -> 255; etc.
+    }
+
+
+    /**
+     * Convert a phred-scaled quality score to its log10 probability of being wrong (Q30 => log10(0.001))
+     *
+     * This is the Phred-style conversion, *not* the Illumina-style conversion.
+     *
+     * The calculation is extremely efficient
+     *
+     * WARNING -- because this function takes a byte for maxQual, you must be careful in converting
+     * integers to byte.  The appropriate way to do this is ((byte)(myInt & 0xFF))
+     *
+     * @param qual a phred-scaled quality score encoded as a byte
+     * @return a probability (0.0-1.0)
+     */
+    @Ensures("result <= 0.0")
+    public static double qualToErrorProbLog10(final byte qual) {
+        return qualToErrorProbLog10((double)(qual & 0xFF));
+    }
+
+    /**
+     * Convert a phred-scaled quality score to its log10 probability of being wrong (Q30 => log10(0.001))
+     *
+     * This is the Phred-style conversion, *not* the Illumina-style conversion.
+     *
+     * The calculation is extremely efficient
+     *
+     * @param qual a phred-scaled quality score encoded as a double
+     * @return a probability (0.0-1.0)
+     */
+    @Ensures("result <= 0.0")
+    public static double qualToErrorProbLog10(final double qual) {
+        if ( qual < 0.0 ) throw new IllegalArgumentException("qual must be >= 0.0 but got " + qual);
+        return qual * -0.1;
+    }
+
+    // ----------------------------------------------------------------------
+    //
+    // Functions to convert a probability to a phred-scaled quality score
+    //
+    // ----------------------------------------------------------------------
+
+    /**
+     * Convert a probability of being wrong to a phred-scaled quality score (0.01 => 20).
+     *
+     * Note, this function caps the resulting quality score by the public static value MAX_SAM_QUAL_SCORE
+     * and by 1 at the low-end.
+     *
+     * @param errorRate a probability (0.0-1.0) of being wrong (i.e., 0.01 is 1% change of being wrong)
+     * @return a quality score (0-MAX_SAM_QUAL_SCORE)
+     */
+    public static byte errorProbToQual(final double errorRate) {
+        return errorProbToQual(errorRate, MAX_SAM_QUAL_SCORE);
+    }
+
+    /**
+     * Convert a probability of being wrong to a phred-scaled quality score (0.01 => 20).
+     *
+     * Note, this function caps the resulting quality score by the public static value MIN_REASONABLE_ERROR
+     * and by 1 at the low-end.
+     *
+     * WARNING -- because this function takes a byte for maxQual, you must be careful in converting
+     * integers to byte.  The appropriate way to do this is ((byte)(myInt & 0xFF))
+     *
+     * @param errorRate a probability (0.0-1.0) of being wrong (i.e., 0.01 is 1% change of being wrong)
+     * @return a quality score (0-maxQual)
+     */
+    public static byte errorProbToQual(final double errorRate, final byte maxQual) {
+        if ( ! MathUtils.goodProbability(errorRate) ) throw new IllegalArgumentException("errorRate must be good probability but got " + errorRate);
+        final double d = Math.round(-10.0*Math.log10(errorRate));
+        return boundQual((int)d, maxQual);
+    }
+
+    /**
+     * @see #errorProbToQual(double, byte) with proper conversion of maxQual integer to a byte
+     */
+    public static byte errorProbToQual(final double prob, final int maxQual) {
+        if ( maxQual < 0 || maxQual > 255 ) throw new IllegalArgumentException("maxQual must be between 0-255 but got " + maxQual);
+        return errorProbToQual(prob, (byte)(maxQual & 0xFF));
+    }
+
+    /**
+     * Convert a probability of being right to a phred-scaled quality score (0.99 => 20).
+     *
+     * Note, this function caps the resulting quality score by the public static value MAX_SAM_QUAL_SCORE
+     * and by 1 at the low-end.
+     *
+     * @param prob a probability (0.0-1.0) of being right
+     * @return a quality score (0-MAX_SAM_QUAL_SCORE)
+     */
+    public static byte trueProbToQual(final double prob) {
+        return trueProbToQual(prob, MAX_SAM_QUAL_SCORE);
+    }
+
+    /**
+     * Convert a probability of being right to a phred-scaled quality score (0.99 => 20).
+     *
+     * Note, this function caps the resulting quality score by the min probability allowed (EPS).
+     * So for example, if prob is 1e-6, which would imply a Q-score of 60, and EPS is 1e-4,
+     * the result of this function is actually Q40.
+     *
+     * Note that the resulting quality score, regardless of EPS, is capped by MAX_SAM_QUAL_SCORE and
+     * bounded on the low-side by 1.
+     *
+     * WARNING -- because this function takes a byte for maxQual, you must be careful in converting
+     * integers to byte.  The appropriate way to do this is ((byte)(myInt & 0xFF))
+     *
+     * @param trueProb a probability (0.0-1.0) of being right
+     * @param maxQual the maximum quality score we are allowed to emit here, regardless of the error rate
+     * @return a phred-scaled quality score (0-maxQualScore) as a byte
+     */
+    @Ensures("(result & 0xFF) >= 1 && (result & 0xFF) <= (maxQual & 0xFF)")
+    public static byte trueProbToQual(final double trueProb, final byte maxQual) {
+        if ( ! MathUtils.goodProbability(trueProb) ) throw new IllegalArgumentException("trueProb must be good probability but got " + trueProb);
+        final double lp = Math.round(-10.0*MathUtils.log10OneMinusX(trueProb));
+        return boundQual((int)lp, maxQual);
+    }
+
+    /**
+     * @see #trueProbToQual(double, byte) with proper conversion of maxQual to a byte
+     */
+    public static byte trueProbToQual(final double prob, final int maxQual) {
+        if ( maxQual < 0 || maxQual > 255 ) throw new IllegalArgumentException("maxQual must be between 0-255 but got " + maxQual);
+        return trueProbToQual(prob, (byte)(maxQual & 0xFF));
+    }
+
+    /**
+     * Convert a probability of being right to a phred-scaled quality score of being wrong as a double
+     *
+     * This is a very generic method, that simply computes a phred-scaled double quality
+     * score given an error rate.  It has the same precision as a normal double operation
+     *
+     * @param trueRate the probability of being right (0.0-1.0)
+     * @return a phred-scaled version of the error rate implied by trueRate
+     */
+    @Ensures("result >= 0.0")
+    public static double phredScaleCorrectRate(final double trueRate) {
+        return phredScaleLog10ErrorRate(MathUtils.log10OneMinusX(trueRate));
+    }
+
+    /**
+     * Convert a log10 probability of being right to a phred-scaled quality score of being wrong as a double
+     *
+     * This is a very generic method, that simply computes a phred-scaled double quality
+     * score given an error rate.  It has the same precision as a normal double operation
+     *
+     * @param trueRateLog10 the log10 probability of being right (0.0-1.0).  Can be -Infinity to indicate
+     *                      that the result is impossible in which MIN_PHRED_SCALED_QUAL is returned
+     * @return a phred-scaled version of the error rate implied by trueRate
+     */
+    @Ensures("result >= 0.0")
+    public static double phredScaleLog10CorrectRate(final double trueRateLog10) {
+        return phredScaleCorrectRate(Math.pow(10.0, trueRateLog10));
+    }
+
+    /**
+     * Convert a probability of being wrong to a phred-scaled quality score of being wrong as a double
+     *
+     * This is a very generic method, that simply computes a phred-scaled double quality
+     * score given an error rate.  It has the same precision as a normal double operation
+     *
+     * @param errorRate the probability of being wrong (0.0-1.0)
+     * @return a phred-scaled version of the error rate
+     */
+    @Ensures("result >= 0.0")
+    public static double phredScaleErrorRate(final double errorRate) {
+        return phredScaleLog10ErrorRate(Math.log10(errorRate));
+    }
+
+    /**
+     * Convert a log10 probability of being wrong to a phred-scaled quality score of being wrong as a double
+     *
+     * This is a very generic method, that simply computes a phred-scaled double quality
+     * score given an error rate.  It has the same precision as a normal double operation
+     *
+     * @param errorRateLog10 the log10 probability of being wrong (0.0-1.0).  Can be -Infinity, in which case
+     *                       the result is MIN_PHRED_SCALED_QUAL
+     * @return a phred-scaled version of the error rate
+     */
+    @Ensures("result >= 0.0")
+    public static double phredScaleLog10ErrorRate(final double errorRateLog10) {
+        if ( ! MathUtils.goodLog10Probability(errorRateLog10) ) throw new IllegalArgumentException("errorRateLog10 must be good probability but got " + errorRateLog10);
+        // abs is necessary for edge base with errorRateLog10 = 0 producing -0.0 doubles
+        return Math.abs(-10.0 * Math.max(errorRateLog10, RAW_MIN_PHRED_SCALED_QUAL));
+    }
+
+    // ----------------------------------------------------------------------
+    //
+    // Routines to bound a quality score to a reasonable range
+    //
+    // ----------------------------------------------------------------------
+
+    /**
+     * Return a quality score that bounds qual by MAX_SAM_QUAL_SCORE and 1
+     *
+     * @param qual the uncapped quality score as an integer
+     * @return the bounded quality score
+     */
+    @Ensures("(result & 0xFF) >= 1 && (result & 0xFF) <= (MAX_SAM_QUAL_SCORE & 0xFF)")
+    public static byte boundQual(int qual) {
+        return boundQual(qual, MAX_SAM_QUAL_SCORE);
+    }
+
+    /**
+     * Return a quality score that bounds qual by maxQual and 1
+     *
+     * WARNING -- because this function takes a byte for maxQual, you must be careful in converting
+     * integers to byte.  The appropriate way to do this is ((byte)(myInt & 0xFF))
+     *
+     * @param qual the uncapped quality score as an integer.  Can be < 0 (which may indicate an error in the
+     *             client code), which will be brought back to 1, but this isn't an error, as some
+     *             routines may use this functionality (BaseRecalibrator, for example)
+     * @param maxQual the maximum quality score, must be less < 255
+     * @return the bounded quality score
+     */
+    @Ensures("(result & 0xFF) >= 1 && (result & 0xFF) <= (maxQual & 0xFF)")
+    public static byte boundQual(final int qual, final byte maxQual) {
+        return (byte) (Math.max(Math.min(qual, maxQual & 0xFF), 1) & 0xFF);
+    }
+
+    }
+
+
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/R/RScriptExecutor.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/R/RScriptExecutor.java
new file mode 100644
index 0000000..1ee2798
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/R/RScriptExecutor.java
@@ -0,0 +1,191 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.R;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.exceptions.GATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.io.IOUtils;
+import org.broadinstitute.gatk.utils.io.Resource;
+import org.broadinstitute.gatk.utils.runtime.ProcessController;
+import org.broadinstitute.gatk.utils.runtime.ProcessSettings;
+import org.broadinstitute.gatk.utils.runtime.RuntimeUtils;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Generic service for executing RScripts
+ */
+public class RScriptExecutor {
+    private static final String RSCRIPT_BINARY = "Rscript";
+    private static final File RSCRIPT_PATH = RuntimeUtils.which(RSCRIPT_BINARY);
+    public static final boolean RSCRIPT_EXISTS = (RSCRIPT_PATH != null);
+    private static final String RSCRIPT_MISSING_MESSAGE = "Please add the Rscript directory to your environment ${PATH}";
+
+    /**
+     * our log
+     */
+    private static Logger logger = Logger.getLogger(RScriptExecutor.class);
+
+    private boolean exceptOnError = false;
+    private final List<RScriptLibrary> libraries = new ArrayList<RScriptLibrary>();
+    private final List<Resource> scriptResources = new ArrayList<Resource>();
+    private final List<File> scriptFiles = new ArrayList<File>();
+    private final List<String> args = new ArrayList<String>();
+
+    public void setExceptOnError(boolean exceptOnError) {
+        this.exceptOnError = exceptOnError;
+    }
+
+    public void addLibrary(RScriptLibrary library) {
+        this.libraries.add(library);
+    }
+
+    public void addScript(Resource script) {
+        this.scriptResources.add(script);
+    }
+
+    public void addScript(File script) {
+        this.scriptFiles.add(script);
+    }
+
+    /**
+     * Adds args to the end of the Rscript command line.
+     * @param args the args.
+     * @throws NullPointerException if any of the args are null.
+     */
+    public void addArgs(Object... args) {
+        for (Object arg: args)
+            this.args.add(arg.toString());
+    }
+
+    public String getApproximateCommandLine() {
+        StringBuilder command = new StringBuilder("Rscript");
+        for (Resource script: this.scriptResources)
+            command.append(" (resource)").append(script.getFullPath());
+        for (File script: this.scriptFiles)
+            command.append(" ").append(script.getAbsolutePath());
+        for (String arg: this.args)
+            command.append(" ").append(arg);
+        return command.toString();
+    }
+
+    public boolean exec() {
+        if (!RSCRIPT_EXISTS) {
+            if (exceptOnError) {
+                throw new UserException.CannotExecuteRScript(RSCRIPT_MISSING_MESSAGE);
+            } else {
+                logger.warn("Skipping: " + getApproximateCommandLine());
+                return false;
+            }
+        }
+
+        List<File> tempFiles = new ArrayList<File>();
+        try {
+            File tempLibSourceDir  = IOUtils.tempDir("RlibSources.", "");
+            File tempLibInstallationDir = IOUtils.tempDir("Rlib.", "");
+            tempFiles.add(tempLibSourceDir);
+            tempFiles.add(tempLibInstallationDir);
+
+            StringBuilder expression = new StringBuilder("tempLibDir = '").append(tempLibInstallationDir).append("';");
+
+            if (this.libraries.size() > 0) {
+                List<String> tempLibraryPaths = new ArrayList<String>();
+                for (RScriptLibrary library: this.libraries) {
+                    File tempLibrary = library.writeLibrary(tempLibSourceDir);
+                    tempFiles.add(tempLibrary);
+                    tempLibraryPaths.add(tempLibrary.getAbsolutePath());
+                }
+
+                expression.append("install.packages(");
+                expression.append("pkgs=c('").append(StringUtils.join(tempLibraryPaths, "', '")).append("'), lib=tempLibDir, repos=NULL, type='source', ");
+                // Install faster by eliminating cruft.
+                expression.append("INSTALL_opts=c('--no-libs', '--no-data', '--no-help', '--no-demo', '--no-exec')");
+                expression.append(");");
+
+                for (RScriptLibrary library: this.libraries) {
+                    expression.append("library('").append(library.getLibraryName()).append("', lib.loc=tempLibDir);");
+                }
+            }
+
+            for (Resource script: this.scriptResources) {
+                File tempScript = IOUtils.writeTempResource(script);
+                tempFiles.add(tempScript);
+                expression.append("source('").append(tempScript.getAbsolutePath()).append("');");
+            }
+
+            for (File script: this.scriptFiles) {
+                expression.append("source('").append(script.getAbsolutePath()).append("');");
+            }
+
+            String[] cmd = new String[this.args.size() + 3];
+            int i = 0;
+            cmd[i++] = RSCRIPT_BINARY;
+            cmd[i++] = "-e";
+            cmd[i++] = expression.toString();
+            for (String arg: this.args)
+                cmd[i++] = arg;
+
+            ProcessSettings processSettings = new ProcessSettings(cmd);
+            if (logger.isDebugEnabled()) {
+                processSettings.getStdoutSettings().printStandard(true);
+                processSettings.getStderrSettings().printStandard(true);
+            }
+
+            ProcessController controller = ProcessController.getThreadLocal();
+
+            if (logger.isDebugEnabled()) {
+                logger.debug("Executing:");
+                for (String arg: cmd)
+                    logger.debug("  " + arg);
+            }
+            int exitValue = controller.exec(processSettings).getExitValue();
+            logger.debug("Result: " + exitValue);
+
+            if (exitValue != 0)
+                throw new RScriptExecutorException(
+                        "RScript exited with " + exitValue +
+                                (logger.isDebugEnabled() ? "" : ". Run with -l DEBUG for more info."));
+
+            return true;
+        } catch (GATKException e) {
+            if (exceptOnError) {
+                throw e;
+            } else {
+                logger.warn(e.getMessage());
+                return false;
+            }
+        } finally {
+            for (File temp: tempFiles)
+                FileUtils.deleteQuietly(temp);
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/R/RScriptExecutorException.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/R/RScriptExecutorException.java
new file mode 100644
index 0000000..53ad3ac
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/R/RScriptExecutorException.java
@@ -0,0 +1,34 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.R;
+
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+public class RScriptExecutorException extends ReviewedGATKException {
+    public RScriptExecutorException(String msg) {
+        super(msg);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/R/RScriptLibrary.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/R/RScriptLibrary.java
new file mode 100644
index 0000000..56f80e3
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/R/RScriptLibrary.java
@@ -0,0 +1,66 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.R;
+
+import org.broadinstitute.gatk.utils.io.IOUtils;
+import org.broadinstitute.gatk.utils.io.Resource;
+
+import java.io.File;
+
+/**
+ * Libraries embedded in the StingUtils package.
+ */
+public enum RScriptLibrary {
+    GSALIB("gsalib");
+
+    private final String name;
+
+    private RScriptLibrary(String name) {
+        this.name = name;
+    }
+
+    public String getLibraryName() {
+        return this.name;
+    }
+
+    public String getResourcePath() {
+        return name + ".tar.gz";
+    }
+
+    /**
+     * Writes the library source code to a temporary tar.gz file and returns the path.
+     * @return The path to the library source code. The caller must delete the code when done.
+     */
+    public File writeTemp() {
+        return IOUtils.writeTempResource(new Resource(getResourcePath(), RScriptLibrary.class));
+    }
+
+    public File writeLibrary(File tempDir) {
+        File libraryFile = new File(tempDir, getLibraryName());
+        IOUtils.writeResource(new Resource(getResourcePath(), RScriptLibrary.class), libraryFile);
+        return libraryFile;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/R/RUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/R/RUtils.java
new file mode 100644
index 0000000..982cfa3
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/R/RUtils.java
@@ -0,0 +1,91 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.R;
+
+import org.apache.commons.lang.StringUtils;
+
+import java.text.SimpleDateFormat;
+import java.util.Collection;
+import java.util.Date;
+
+public class RUtils {
+    /**
+     * Converts a collection of values to an R compatible list. A null list will return NA,
+     * otherwise the values will be escaped with single quotes and combined with c().
+     * @param list Collection of values
+     * @return The R representation of the list
+     */
+    public static String toStringList(Collection<? extends CharSequence> list) {
+        if (list == null)
+            return "NA";
+        if (list.size() == 0)
+            return "c()";
+        return "c('" + StringUtils.join(list, "','") + "')";
+    }
+
+    /**
+     * Converts a collection of values to an R compatible list. A null list will return NA,
+     * otherwise the values will be combined with c().
+     * @param list Collection of values
+     * @return The R representation of the list
+     */
+    public static String toNumberList(Collection<? extends Number> list) {
+        return list == null ? "NA": "c(" + StringUtils.join(list, ",") + ")";
+    }
+
+    /**
+     * Converts a collection of values to an R compatible list. A null list will return NA,
+     * otherwise the date will be escaped with single quotes and combined with c().
+     * @param list Collection of values
+     * @return The R representation of the list
+     */
+    public static String toDateList(Collection<? extends Date> list) {
+        return toDateList(list, "''yyyy-MM-dd''");
+    }
+
+    /**
+     * Converts a collection of values to an R compatible list formatted by pattern.
+     * @param list Collection of values
+     * @param pattern format pattern string for each date
+     * @return The R representation of the list
+     */
+    public static String toDateList(Collection<? extends Date> list, String pattern) {
+
+        if (list == null)
+            return "NA";
+        SimpleDateFormat format = new SimpleDateFormat(pattern);
+        StringBuilder sb = new StringBuilder();
+        sb.append("c(");
+        boolean first = true;
+        for (Date date : list) {
+            if (!first) sb.append(",");
+            sb.append(format.format(date));
+            first = false;
+        }
+        sb.append(")");
+        return sb.toString();
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/SequenceDictionaryUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/SequenceDictionaryUtils.java
new file mode 100644
index 0000000..71d8740
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/SequenceDictionaryUtils.java
@@ -0,0 +1,540 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import java.math.BigInteger;
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.SAMSequenceRecord;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+import java.util.*;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: depristo
+ * Date: Sep 10, 2010
+ * Time: 1:56:24 PM
+ *
+ * A series of utility functions that enable the GATK to compare two sequence dictionaries -- from the reference,
+ * from BAMs, or from RODs -- for consistency.  The system supports two basic modes: get an enum state that
+ * describes at a high level the consistency between two dictionaries, or a validateDictionaries that will
+ * blow up with a UserException if the dicts are too incompatible.
+ *
+ * Dictionaries are tested for contig name overlaps, consistency in ordering in these overlap set, and length,
+ * if available.  Examines the Engine arguments to decided if the -U option to allow danger seq dict inconsistency
+ * is enabled before it blows up.
+ */
+public class SequenceDictionaryUtils {
+    //
+    // for detecting lexicographically sorted human references
+    //
+    private static final boolean ENABLE_LEXICOGRAPHIC_REQUIREMENT_FOR_HUMAN = true;
+
+    // hg18
+    protected static final SAMSequenceRecord CHR1_HG18 = new SAMSequenceRecord("chr1", 247249719);
+    protected static final SAMSequenceRecord CHR2_HG18 = new SAMSequenceRecord("chr2", 242951149);
+    protected static final SAMSequenceRecord CHR10_HG18 = new SAMSequenceRecord("chr10", 135374737);
+
+    // hg19
+    protected static final SAMSequenceRecord CHR1_HG19 = new SAMSequenceRecord("chr1", 249250621);
+    protected static final SAMSequenceRecord CHR2_HG19 = new SAMSequenceRecord("chr2", 243199373);
+    protected static final SAMSequenceRecord CHR10_HG19 = new SAMSequenceRecord("chr10", 135534747);
+
+    // b36
+    protected static final SAMSequenceRecord CHR1_B36 = new SAMSequenceRecord("1", 247249719);
+    protected static final SAMSequenceRecord CHR2_B36 = new SAMSequenceRecord("2", 242951149);
+    protected static final SAMSequenceRecord CHR10_B36 = new SAMSequenceRecord("10", 135374737);
+
+    // b37
+    protected static final SAMSequenceRecord CHR1_B37 = new SAMSequenceRecord("1", 249250621);
+    protected static final SAMSequenceRecord CHR2_B37 = new SAMSequenceRecord("2", 243199373);
+    protected static final SAMSequenceRecord CHR10_B37 = new SAMSequenceRecord("10", 135534747);
+
+
+    public enum SequenceDictionaryCompatibility {
+        IDENTICAL,                      // the dictionaries are identical
+        COMMON_SUBSET,                  // there exists a common subset of equivalent contigs
+        NO_COMMON_CONTIGS,              // no overlap between dictionaries
+        UNEQUAL_COMMON_CONTIGS,         // common subset has contigs that have the same name but different lengths and/or MD5s
+        NON_CANONICAL_HUMAN_ORDER,      // human reference detected but the order of the contigs is non-standard (lexicographic, for examine)
+        OUT_OF_ORDER,                   // the two dictionaries overlap but the overlapping contigs occur in different
+                                        // orders with respect to each other
+        DIFFERENT_INDICES               // the two dictionaries overlap and the overlapping contigs occur in the same
+                                        // order with respect to each other, but one or more of them have different
+                                        // indices in the two dictionaries. Eg., { chrM, chr1, chr2 } vs. { chr1, chr2 }
+    }
+
+    /**
+     * @param validationExclusion exclusions to validation
+     * @return Returns true if the engine is in tolerant mode and we'll let through dangerous but not fatal dictionary inconsistency
+     */
+    private static boolean allowNonFatalIncompabilities(final ValidationExclusion.TYPE validationExclusion) {
+        return ( validationExclusion == ValidationExclusion.TYPE.ALLOW_SEQ_DICT_INCOMPATIBILITY ||
+                        validationExclusion == ValidationExclusion.TYPE.ALL );
+    }
+
+    /**
+     * Tests for compatibility between two sequence dictionaries.  If the dictionaries are incompatible, then
+     * UserExceptions are thrown with detailed error messages.  If the engine is in permissive mode, then
+     * logger warnings are generated instead.
+     *
+     * @param logger for warnings
+     * @param validationExclusion exclusions to validation
+     * @param name1 name associated with dict1
+     * @param dict1 the sequence dictionary dict1
+     * @param name2 name associated with dict2
+     * @param dict2 the sequence dictionary dict2
+     * @param isReadsToReferenceComparison true if one of the dictionaries comes from a reads data source (eg., a BAM),
+     *                                     and the other from a reference data source
+     * @param intervals the user-specified genomic intervals: only required when isReadsToReferenceComparison is true,
+     *                  otherwise can be null
+     */
+    public static void validateDictionaries( final Logger logger,
+                                             final ValidationExclusion.TYPE validationExclusion,
+                                             final String name1,
+                                             final SAMSequenceDictionary dict1,
+                                             final String name2,
+                                             final SAMSequenceDictionary dict2,
+                                             final boolean isReadsToReferenceComparison,
+                                             final GenomeLocSortedSet intervals ) {
+
+        final SequenceDictionaryCompatibility type = compareDictionaries(dict1, dict2);
+
+        switch ( type ) {
+            case IDENTICAL:
+                return;
+            case COMMON_SUBSET:
+                 return;
+            case NO_COMMON_CONTIGS:
+                throw new UserException.IncompatibleSequenceDictionaries("No overlapping contigs found", name1, dict1, name2, dict2);
+
+            case UNEQUAL_COMMON_CONTIGS: {
+                final List<SAMSequenceRecord> x = findNotEqualCommonContigs(getCommonContigsByName(dict1, dict2), dict1, dict2);
+                final SAMSequenceRecord elt1 = x.get(0);
+                final SAMSequenceRecord elt2 = x.get(1);
+
+                String msg = "Found contigs with the same name but different lengths";
+                String contig1  = "  contig  " + name1 + " is named " + elt1.getSequenceName()  + " with length " + Integer.toString(elt1.getSequenceLength());
+                if ( elt1.getMd5() != null )
+                    contig1 += " and MD5 " + elt1.getMd5();
+                String contig2  = "  contig  " + name2 + " is named " + elt2.getSequenceName()  + " with length " + Integer.toString(elt2.getSequenceLength());
+                if ( elt2.getMd5() != null )
+                    contig2 += " and MD5 " + elt2.getMd5();
+                if ( elt1.getMd5() != null ||  elt2.getMd5() != null )
+                    msg += " or MD5s:";
+                msg += "\n" + contig1 + "\n" + contig2;
+
+                // todo -- replace with toString when SAMSequenceRecord has a nice toString routine
+                final UserException ex = new UserException.IncompatibleSequenceDictionaries(msg, name1, dict1, name2, dict2);
+
+                if ( allowNonFatalIncompabilities(validationExclusion) )
+                    logger.warn(ex.getMessage());
+                else
+                    throw ex;
+                break;
+            }
+
+            case NON_CANONICAL_HUMAN_ORDER: {
+                UserException ex;
+                if ( nonCanonicalHumanContigOrder(dict1) )
+                    ex = new UserException.LexicographicallySortedSequenceDictionary(name1, dict1);
+                else
+                    ex = new UserException.LexicographicallySortedSequenceDictionary(name2, dict2);
+                
+                if ( allowNonFatalIncompabilities(validationExclusion) )
+                    logger.warn(ex.getMessage());
+                else
+                    throw ex;
+                break;
+            }
+
+            case OUT_OF_ORDER: {
+                UserException ex = new UserException.IncompatibleSequenceDictionaries(
+			"The contig order in " + name1 + " and " + name2 + "is not "
+			+ "the same; to fix this please see: "
+			+ "(https://www.broadinstitute.org/gatk/guide/article?id=1328), "
+			+ " which describes reordering contigs in BAM and VCF files.",
+			name1, dict1, name2, dict2);
+                if ( allowNonFatalIncompabilities(validationExclusion) )
+                    logger.warn(ex.getMessage());
+                else
+                    throw ex;
+                break;
+            }
+
+            case DIFFERENT_INDICES: {
+                // This is currently only known to be problematic when the index mismatch is between a bam and the
+                // reference AND when the user's intervals actually include one or more of the contigs that are
+                // indexed differently from the reference. In this case, the engine will fail to correctly serve
+                // up the reads from those contigs, so throw an exception unless unsafe operations are enabled.
+                if ( isReadsToReferenceComparison && intervals != null ) {
+
+                     final Set<String> misindexedContigs = findMisindexedContigsInIntervals(intervals, dict1, dict2);
+
+                     if ( ! misindexedContigs.isEmpty() ) {
+                         final String msg = String.format("The following contigs included in the intervals to process have " +
+                                                          "different indices in the sequence dictionaries for the reads vs. " +
+                                                          "the reference: %s.  As a result, the GATK engine will not correctly " +
+                                                          "process reads from these contigs. You should either fix the sequence " +
+                                                          "dictionaries for your reads so that these contigs have the same indices " +
+                                                          "as in the sequence dictionary for your reference, or exclude these contigs " +
+                                                          "from your intervals. This error can be disabled via -U %s, " +
+                                                          "however this is not recommended as the GATK engine will not behave correctly.",
+                                                          misindexedContigs, ValidationExclusion.TYPE.ALLOW_SEQ_DICT_INCOMPATIBILITY);
+                         final UserException ex = new UserException.IncompatibleSequenceDictionaries(msg, name1, dict1, name2, dict2);
+
+                         if ( allowNonFatalIncompabilities(validationExclusion) )
+                             logger.warn(ex.getMessage());
+                         else
+                             throw ex;
+                     }
+                }
+                break;
+            }
+
+            default:
+                throw new ReviewedGATKException("Unexpected SequenceDictionaryComparison type: " + type);
+        }
+    }
+
+    /**
+     * Workhorse routine that takes two dictionaries and returns their compatibility.
+     *
+     * @param dict1 first sequence dictionary
+     * @param dict2 second sequence dictionary
+     * @return A SequenceDictionaryCompatibility enum value describing the compatibility of the two dictionaries
+     */
+    public static SequenceDictionaryCompatibility compareDictionaries( final SAMSequenceDictionary dict1, final SAMSequenceDictionary dict2) {
+        if ( nonCanonicalHumanContigOrder(dict1) || nonCanonicalHumanContigOrder(dict2) )
+            return SequenceDictionaryCompatibility.NON_CANONICAL_HUMAN_ORDER;
+
+        final Set<String> commonContigs = getCommonContigsByName(dict1, dict2);
+
+        if (commonContigs.isEmpty())
+            return SequenceDictionaryCompatibility.NO_COMMON_CONTIGS;
+        else if ( ! commonContigsHaveSameLengths(commonContigs, dict1, dict2) )
+            return SequenceDictionaryCompatibility.UNEQUAL_COMMON_CONTIGS;
+        else if ( ! commonContigsAreInSameRelativeOrder(commonContigs, dict1, dict2) )
+            return SequenceDictionaryCompatibility.OUT_OF_ORDER;
+        else if ( commonContigs.size() == dict1.size() && commonContigs.size() == dict2.size() )
+            return SequenceDictionaryCompatibility.IDENTICAL;
+        else if ( ! commonContigsAreAtSameIndices(commonContigs, dict1, dict2) )
+            return SequenceDictionaryCompatibility.DIFFERENT_INDICES;
+        else {
+            return SequenceDictionaryCompatibility.COMMON_SUBSET;
+        }
+    }
+
+    /**
+     * Utility function that tests whether the commonContigs in both dicts are equivalent.  Equivalence means
+     * that the seq records have the same length, if both are non-zero.
+     *
+     * @param commonContigs
+     * @param dict1
+     * @param dict2
+     * @return true if all of the common contigs are equivalent
+     */
+    private static boolean commonContigsHaveSameLengths(final Set<String> commonContigs, final SAMSequenceDictionary dict1, final SAMSequenceDictionary dict2) {
+        return findNotEqualCommonContigs(commonContigs, dict1, dict2) == null;
+    }
+
+    /**
+     * Returns a List(x,y) that contains two sequence records that are not equal among the common contigs in both dicts.  Returns
+     * null if all common contigs are equivalent
+     *
+     * @param commonContigs
+     * @param dict1
+     * @param dict2
+     * @return
+     */
+    private static List<SAMSequenceRecord> findNotEqualCommonContigs(final Set<String> commonContigs, final SAMSequenceDictionary dict1, final SAMSequenceDictionary dict2) {
+        for ( String name : commonContigs ) {
+            SAMSequenceRecord elt1 = dict1.getSequence(name);
+            SAMSequenceRecord elt2 = dict2.getSequence(name);
+            if ( ! sequenceRecordsAreEquivalent(elt1, elt2) )
+                return Arrays.asList(elt1,elt2);
+        }
+
+        return null;
+    }
+
+    /**
+     * Helper routine that determines if two sequence records are equivalent, defined as having the same name,
+     * lengths (if both are non-zero) and MD5 (if present)
+     *
+     * @param record1  a SAMSequenceRecord
+     * @param record2  a SAMSequenceRecord
+     * @return true if the records are equivalent, false otherwise
+     */
+    private static boolean sequenceRecordsAreEquivalent(final SAMSequenceRecord record1, final SAMSequenceRecord record2) {
+        if ( record1 == record2 ) return true;
+        if ( record1 == null || record2 == null ) return false;
+
+        // compare length
+        if ( record1.getSequenceLength() != 0 && record2.getSequenceLength() != 0 && record1.getSequenceLength() != record2.getSequenceLength() )
+            return false;
+
+        // compare name
+        if ( !record1.getSequenceName().equals(record2.getSequenceName() ))
+            return false;
+
+         // compare MD5
+         if ( record1.getMd5() != null && record2.getMd5() != null ){
+            final BigInteger firstMd5 = new BigInteger(record1.getMd5(), 16);
+            final BigInteger secondMd5 = new BigInteger(record2.getMd5(), 16);
+            if ( !firstMd5.equals(secondMd5) )
+                return false;
+       }
+
+        return true;
+    }
+
+    /**
+     * A very simple (and naive) algorithm to determine (1) if the dict is a human reference (hg18/hg19) and if it's
+     * lexicographically sorted.  Works by matching lengths of the static chr1, chr10, and chr2, and then if these
+     * are all matched, requiring that the order be chr1, chr2, chr10.
+     *
+     * @param dict
+     * @return
+     */
+    private static boolean nonCanonicalHumanContigOrder(final SAMSequenceDictionary dict) {
+        if ( ! ENABLE_LEXICOGRAPHIC_REQUIREMENT_FOR_HUMAN ) // if we don't want to enable this test, just return false
+            return false;
+
+        SAMSequenceRecord chr1 = null, chr2 = null, chr10 = null;
+
+        for ( final SAMSequenceRecord elt : dict.getSequences() ) {
+            if ( isHumanSeqRecord(elt, CHR1_HG18, CHR1_HG19 ) ) chr1 = elt;
+            if ( isHumanSeqRecord(elt, CHR2_HG18, CHR2_HG19 ) ) chr2 = elt;
+            if ( isHumanSeqRecord(elt, CHR10_HG18, CHR10_HG19 ) ) chr10 = elt;
+        }
+
+        if ( chr1 != null && chr2 != null && chr10 != null) {
+            // we found them all
+            return ! ( chr1.getSequenceIndex() < chr2.getSequenceIndex() && chr2.getSequenceIndex() < chr10.getSequenceIndex() );
+        } else {
+            return false;
+        }
+    }
+
+    /**
+     * Trivial helper that returns true if elt has the same length as rec1 or rec2
+     * @param elt record to test
+     * @param rec1 first record to test for length equivalence
+     * @param rec2 first record to test for length equivalence
+     * @return true if elt has the same length as either rec1 or rec2
+     */
+    private static boolean isHumanSeqRecord(SAMSequenceRecord elt, SAMSequenceRecord rec1, SAMSequenceRecord rec2 ) {
+        return elt.getSequenceLength() == rec1.getSequenceLength() || elt.getSequenceLength() == rec2.getSequenceLength();
+    }
+
+    /**
+     * Returns true if the common contigs in dict1 and dict2 are in the same relative order, without regard to
+     * absolute index position. This is accomplished by getting the common contigs in both dictionaries, sorting
+     * these according to their indices, and then walking through the sorted list to ensure that each ordered contig
+     * is equivalent
+     *
+     * @param commonContigs names of the contigs common to both dictionaries
+     * @param dict1 first SAMSequenceDictionary
+     * @param dict2 second SAMSequenceDictionary
+     * @return true if the common contigs occur in the same relative order in both dict1 and dict2, otherwise false
+     */
+    private static boolean commonContigsAreInSameRelativeOrder(final Set<String> commonContigs, final SAMSequenceDictionary dict1, final SAMSequenceDictionary dict2) {
+        List<SAMSequenceRecord> list1 = sortSequenceListByIndex(getSequencesOfName(commonContigs, dict1));
+        List<SAMSequenceRecord> list2 = sortSequenceListByIndex(getSequencesOfName(commonContigs, dict2));
+
+        for ( int i = 0; i < list1.size(); i++ ) {
+            SAMSequenceRecord elt1 = list1.get(i);
+            SAMSequenceRecord elt2 = list2.get(i);
+            if ( ! elt1.getSequenceName().equals(elt2.getSequenceName()) )
+                return false;
+        }
+
+        return true;
+    }
+
+    /**
+     * Gets the subset of SAMSequenceRecords in commonContigs in dict
+     *
+     * @param commonContigs
+     * @param dict
+     * @return
+     */
+    private static List<SAMSequenceRecord> getSequencesOfName(final Set<String> commonContigs, final SAMSequenceDictionary dict) {
+        final List<SAMSequenceRecord> l = new ArrayList<SAMSequenceRecord>(commonContigs.size());
+        for ( String name : commonContigs ) {
+            l.add(dict.getSequence(name) );
+        }
+
+        return l;
+    }
+
+    /**
+     * Compares sequence records by their order
+     */
+    private static class CompareSequenceRecordsByIndex implements Comparator<SAMSequenceRecord> {
+        public int compare(SAMSequenceRecord x, SAMSequenceRecord y) {
+            return Integer.valueOf(x.getSequenceIndex()).compareTo(y.getSequenceIndex());
+        }
+    }
+
+    /**
+     * Returns a sorted list of SAMSequenceRecords sorted by their indices.  Note that the
+     * list is modified in place, so the returned list is == to the unsorted list.
+     *
+     * @param unsorted
+     * @return
+     */
+    private static List<SAMSequenceRecord> sortSequenceListByIndex(final List<SAMSequenceRecord> unsorted) {
+        Collections.sort(unsorted, new CompareSequenceRecordsByIndex());
+        return unsorted;
+    }
+
+    /**
+     * Checks whether the common contigs in the given sequence dictionaries occur at the same indices
+     * in both dictionaries
+     *
+     * @param commonContigs Set of names of the contigs that occur in both dictionaries
+     * @param dict1 first sequence dictionary
+     * @param dict2 second sequence dictionary
+     * @return true if the contigs common to dict1 and dict2 occur at the same indices in both dictionaries,
+     *         otherwise false
+     */
+    private static boolean commonContigsAreAtSameIndices( final Set<String> commonContigs, final SAMSequenceDictionary dict1, final SAMSequenceDictionary dict2 ) {
+        for ( String commonContig : commonContigs ) {
+            final SAMSequenceRecord dict1Record = dict1.getSequence(commonContig);
+            final SAMSequenceRecord dict2Record = dict2.getSequence(commonContig);
+
+            // Each common contig must have the same index in both dictionaries
+            if ( dict1Record.getSequenceIndex() != dict2Record.getSequenceIndex() ) {
+                return false;
+            }
+        }
+
+        return true;
+    }
+
+    /**
+     * Gets the set of names of the contigs found in both sequence dictionaries that have different indices
+     * in the two dictionaries.
+     *
+     * @param commonContigs Set of names of the contigs common to both dictionaries
+     * @param dict1 first sequence dictionary
+     * @param dict2 second sequence dictionary
+     * @return a Set containing the names of the common contigs indexed differently in dict1 vs. dict2,
+     *         or an empty Set if there are no such contigs
+     */
+    private static Set<String> getDifferentlyIndexedCommonContigs( final Set<String> commonContigs,
+                                                                   final SAMSequenceDictionary dict1,
+                                                                   final SAMSequenceDictionary dict2 ) {
+
+        final Set<String> differentlyIndexedCommonContigs = new LinkedHashSet<String>(Utils.optimumHashSize(commonContigs.size()));
+
+        for ( String commonContig : commonContigs ) {
+            if ( dict1.getSequence(commonContig).getSequenceIndex() != dict2.getSequence(commonContig).getSequenceIndex() ) {
+                differentlyIndexedCommonContigs.add(commonContig);
+            }
+        }
+
+        return differentlyIndexedCommonContigs;
+    }
+
+    /**
+     * Finds the names of any contigs indexed differently in the two sequence dictionaries that also
+     * occur in the provided set of intervals.
+     *
+     * @param intervals GenomeLocSortedSet containing the intervals to check
+     * @param dict1 first sequence dictionary
+     * @param dict2 second sequence dictionary
+     * @return a Set of the names of the contigs indexed differently in dict1 vs dict2 that also
+     *         occur in the provided intervals, or an empty Set if there are no such contigs
+     */
+    private static Set<String> findMisindexedContigsInIntervals( final GenomeLocSortedSet intervals,
+                                                                 final SAMSequenceDictionary dict1,
+                                                                 final SAMSequenceDictionary dict2 ) {
+
+        final Set<String> differentlyIndexedCommonContigs = getDifferentlyIndexedCommonContigs(getCommonContigsByName(dict1, dict2), dict1, dict2);
+        final Set<String> misindexedContigsInIntervals = new LinkedHashSet<String>(Utils.optimumHashSize(differentlyIndexedCommonContigs.size()));
+
+        // We know differentlyIndexedCommonContigs is a HashSet, so this loop is O(intervals)
+        for ( GenomeLoc interval : intervals ) {
+            if ( differentlyIndexedCommonContigs.contains(interval.getContig()) ) {
+                misindexedContigsInIntervals.add(interval.getContig());
+            }
+        }
+
+        return misindexedContigsInIntervals;
+    }
+
+    /**
+     * Returns the set of contig names found in both dicts.
+     * @param dict1
+     * @param dict2
+     * @return
+     */
+    public static Set<String> getCommonContigsByName(SAMSequenceDictionary dict1, SAMSequenceDictionary dict2) {
+        final Set<String> intersectingSequenceNames = getContigNames(dict1);
+        intersectingSequenceNames.retainAll(getContigNames(dict2));
+        return intersectingSequenceNames;
+    }
+
+    public static Set<String> getContigNames(SAMSequenceDictionary dict) {
+        final Set<String> contigNames = new HashSet<String>(Utils.optimumHashSize(dict.size()));
+        for (SAMSequenceRecord dictionaryEntry : dict.getSequences())
+            contigNames.add(dictionaryEntry.getSequenceName());
+        return contigNames;
+    }
+
+    /**
+     * Returns a compact String representation of the sequence dictionary it's passed
+     *
+     * The format of the returned String is:
+     * [ contig1Name(length: contig1Length) contig2Name(length: contig2Length) ... ]
+     *
+     * @param dict a non-null SAMSequenceDictionary
+     * @return A String containing all of the contig names and lengths from the sequence dictionary it's passed
+     */
+    public static String getDictionaryAsString( final SAMSequenceDictionary dict ) {
+        if ( dict == null ) {
+            throw new IllegalArgumentException("Sequence dictionary must be non-null");
+        }
+
+        final StringBuilder s = new StringBuilder("[ ");
+
+        for ( SAMSequenceRecord dictionaryEntry : dict.getSequences() ) {
+            s.append(dictionaryEntry.getSequenceName());
+            s.append("(length:");
+            s.append(dictionaryEntry.getSequenceLength());
+            s.append(") ");
+        }
+
+        s.append("]");
+
+        return s.toString();
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/SequencerFlowClass.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/SequencerFlowClass.java
new file mode 100644
index 0000000..466f42b
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/SequencerFlowClass.java
@@ -0,0 +1,38 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+/**
+ * In broad terms, each sequencing platform can be classified by whether it flows nucleotides in some order
+ * such that homopolymers get sequenced in a single event (ie 454 or Ion) or it reads each position in the
+ * sequence one at a time, regardless of base composition (Illumina or Solid).  This information is primarily
+ * useful in the BQSR process
+ */
+public enum SequencerFlowClass {
+    DISCRETE,
+    FLOW,
+    OTHER //Catch-all for unknown platforms, as well as relics that GATK doesn't handle well (Capillary, Helicos)
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/SimpleTimer.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/SimpleTimer.java
index 39d6fa6..5101064 100644
--- a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/SimpleTimer.java
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/SimpleTimer.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/UnvalidatingGenomeLoc.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/UnvalidatingGenomeLoc.java
new file mode 100644
index 0000000..1b7ac79
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/UnvalidatingGenomeLoc.java
@@ -0,0 +1,50 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import com.google.java.contract.Requires;
+
+/**
+ * GenomeLocs are very useful objects to keep track of genomic locations and perform set operations
+ * with them.
+ *
+ * However, GenomeLocs are bound to strict validation through the GenomeLocParser and cannot
+ * be created easily for small tasks that do not require the rigors of the GenomeLocParser validation
+ *
+ * UnvalidatingGenomeLoc is a simple utility to create GenomeLocs without going through the parser.
+ *
+ * WARNING: SHOULD BE USED ONLY BY EXPERT USERS WHO KNOW WHAT THEY ARE DOING!
+ *
+ * User: carneiro
+ * Date: 10/16/12
+ * Time: 2:07 PM
+ */
+public class UnvalidatingGenomeLoc extends GenomeLoc {
+
+    public UnvalidatingGenomeLoc(String contigName, int contigIndex, int start, int stop) {
+        super(contigName, contigIndex, start, stop);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/Utils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/Utils.java
new file mode 100644
index 0000000..ce54e8f
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/Utils.java
@@ -0,0 +1,1174 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import org.apache.log4j.Logger;
+
+import java.lang.reflect.Array;
+import java.math.BigInteger;
+import java.net.InetAddress;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.*;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: depristo
+ * Date: Feb 24, 2009
+ * Time: 10:12:31 AM
+ * To change this template use File | Settings | File Templates.
+ */
+public class Utils {
+    /**
+     *  Static random number generator and seed.
+     */
+    private static final long GATK_RANDOM_SEED = 47382911L;
+    private static Random randomGenerator = new Random(GATK_RANDOM_SEED);
+    public static Random getRandomGenerator() { return randomGenerator; }
+    public static void resetRandomGenerator() { randomGenerator.setSeed(GATK_RANDOM_SEED); }
+    public static void resetRandomGenerator(long seed) { randomGenerator.setSeed(seed); }
+
+    private static final int TEXT_WARNING_WIDTH = 68;
+    private static final String TEXT_WARNING_PREFIX = "* ";
+    private static final String TEXT_WARNING_BORDER = dupString('*', TEXT_WARNING_PREFIX.length() + TEXT_WARNING_WIDTH);
+    private static final char ESCAPE_CHAR = '\u001B';
+    // ASCII codes for making text blink
+    public static final String TEXT_BLINK = ESCAPE_CHAR + "[5m";
+    public static final String TEXT_RESET = ESCAPE_CHAR + "[m";
+
+    /** our log, which we want to capture anything from this class */
+    private static Logger logger = Logger.getLogger(Utils.class);
+
+    public static final float JAVA_DEFAULT_HASH_LOAD_FACTOR = 0.75f;
+
+    /**
+     * Boolean xor operation.  Only true if x != y.
+     *
+     * @param x a boolean
+     * @param y a boolean
+     * @return true if x != y
+     */
+    public static boolean xor(final boolean x, final boolean y) {
+        return x != y;
+    }
+
+    /**
+     * Invert logic if specified
+     *
+     * @param logic boolean logical operation value
+     * @param invert whether to invert logic
+     * @return invert logic if invert flag is true, otherwise leave the logic
+     */
+    public static boolean invertLogic(final boolean logic, final boolean invert){
+            return logic ^ invert;
+    }
+
+    /**
+     * Calculates the optimum initial size for a hash table given the maximum number
+     * of elements it will need to hold. The optimum size is the smallest size that
+     * is guaranteed not to result in any rehash/table-resize operations.
+     *
+     * @param maxElements  The maximum number of elements you expect the hash table
+     *                     will need to hold
+     * @return             The optimum initial size for the table, given maxElements
+     */
+    public static int optimumHashSize ( int maxElements ) {
+        return (int)(maxElements / JAVA_DEFAULT_HASH_LOAD_FACTOR) + 2;
+    }
+
+    /**
+     * Compares two objects, either of which might be null.
+     *
+     * @param lhs One object to compare.
+     * @param rhs The other object to compare.
+     *
+     * @return True if the two objects are equal, false otherwise.
+     */
+    public static boolean equals(Object lhs, Object rhs) {
+        return lhs == null && rhs == null || lhs != null && lhs.equals(rhs);
+    }
+
+    public static <T> List<T> cons(final T elt, final List<T> l) {
+        List<T> l2 = new ArrayList<T>();
+        l2.add(elt);
+        if (l != null) l2.addAll(l);
+        return l2;
+    }
+
+    public static void warnUser(final String msg) {
+        warnUser(logger, msg);
+    }
+    
+    public static void warnUser(final Logger logger, final String msg) {
+        for (final String line: warnUserLines(msg))
+            logger.warn(line);
+    }
+
+    public static List<String> warnUserLines(final String msg) {
+        List<String> results = new ArrayList<>();
+        results.add(String.format(TEXT_WARNING_BORDER));
+        results.add(String.format(TEXT_WARNING_PREFIX + "WARNING:"));
+        results.add(String.format(TEXT_WARNING_PREFIX));
+        prettyPrintWarningMessage(results, msg);
+        results.add(String.format(TEXT_WARNING_BORDER));
+        return results;
+    }
+
+    /**
+     * pretty print the warning message supplied
+     *
+     * @param results the pretty printed message
+     * @param message the message
+     */
+    private static void prettyPrintWarningMessage(final List<String> results, final String message) {
+        for (final String line: message.split("\\r?\\n")) {
+            final StringBuilder builder = new StringBuilder(line);
+            while (builder.length() > TEXT_WARNING_WIDTH) {
+                int space = getLastSpace(builder, TEXT_WARNING_WIDTH);
+                if (space <= 0) space = TEXT_WARNING_WIDTH;
+                results.add(String.format("%s%s", TEXT_WARNING_PREFIX, builder.substring(0, space)));
+                builder.delete(0, space + 1);
+            }
+            results.add(String.format("%s%s", TEXT_WARNING_PREFIX, builder));
+        }
+    }
+
+    /**
+     * Returns the last whitespace location in string, before width characters.
+     * @param message The message to break.
+     * @param width The width of the line.
+     * @return The last whitespace location.
+     */
+    private static int getLastSpace(final CharSequence message, int width) {
+        final int length = message.length();
+        int stopPos = width;
+        int currPos = 0;
+        int lastSpace = -1;
+        boolean inEscape = false;
+        while (currPos < stopPos && currPos < length) {
+            final char c = message.charAt(currPos);
+            if (c == ESCAPE_CHAR) {
+                stopPos++;
+                inEscape = true;
+            } else if (inEscape) {
+                stopPos++;
+                if (Character.isLetter(c))
+                    inEscape = false;
+            } else if (Character.isWhitespace(c)) {
+                lastSpace = currPos;
+            }
+            currPos++;
+        }
+        return lastSpace;
+    }
+
+    /**
+     * join the key value pairs of a map into one string, i.e. myMap = [A->1,B->2,C->3] with a call of:
+     * joinMap("-","*",myMap) -> returns A-1*B-2*C-3
+     *
+     * Be forewarned, if you're not using a map that is aware of the ordering (i.e. HashMap instead of LinkedHashMap)
+     * the ordering of the string you get back might not be what you expect! (i.e. C-3*A-1*B-2 vrs A-1*B-2*C-3)
+     *
+     * @param keyValueSeperator the string to seperate the key-value pairs
+     * @param recordSeperator the string to use to seperate each key-value pair from other key-value pairs
+     * @param map the map to draw from
+     * @param <L> the map's key type
+     * @param <R> the map's value type
+     * @return a string representing the joined map
+     */
+    public static <L,R> String joinMap(String keyValueSeperator, String recordSeperator, Map<L,R> map) {
+        if (map.size() < 1) { return null; }
+        String joinedKeyValues[] = new String[map.size()];
+        int index = 0;
+        for (L key : map.keySet()) {
+           joinedKeyValues[index++] = String.format("%s%s%s",key.toString(),keyValueSeperator,map.get(key).toString());
+        }
+        return join(recordSeperator,joinedKeyValues);
+    }
+
+    /**
+     * Splits a String using indexOf instead of regex to speed things up.
+     *
+     * @param str the string to split.
+     * @param delimiter the delimiter used to split the string.
+     * @return an array of tokens.
+     */
+    public static ArrayList<String> split(String str, String delimiter) {
+        return split(str, delimiter, 10);
+    }
+
+    /**
+     * Splits a String using indexOf instead of regex to speed things up.
+     *
+     * @param str the string to split.
+     * @param delimiter the delimiter used to split the string.
+     * @param expectedNumTokens The number of tokens expected. This is used to initialize the ArrayList.
+     * @return an array of tokens.
+     */
+    public static ArrayList<String> split(String str, String delimiter, int expectedNumTokens) {
+        final ArrayList<String> result =  new ArrayList<String>(expectedNumTokens);
+
+        int delimiterIdx = -1;
+        do {
+            final int tokenStartIdx = delimiterIdx + 1;
+            delimiterIdx = str.indexOf(delimiter, tokenStartIdx);
+            final String token = (delimiterIdx != -1 ? str.substring(tokenStartIdx, delimiterIdx) : str.substring(tokenStartIdx) );
+            result.add(token);
+        } while( delimiterIdx != -1 );
+
+        return result;
+    }
+
+
+    /**
+     * join an array of strings given a seperator
+     * @param separator the string to insert between each array element
+     * @param strings the array of strings
+     * @return a string, which is the joining of all array values with the separator
+     */
+    public static String join(String separator, String[] strings) {
+        return join(separator, strings, 0, strings.length);
+    }
+
+    public static String join(String separator, String[] strings, int start, int end) {
+        if ((end - start) == 0) {
+            return "";
+        }
+        StringBuilder ret = new StringBuilder(strings[start]);
+        for (int i = start + 1; i < end; ++i) {
+            ret.append(separator);
+            ret.append(strings[i]);
+        }
+        return ret.toString();
+    }
+
+    public static String join(String separator, int[] ints) {
+        if ( ints == null || ints.length == 0)
+            return "";
+        else {
+            StringBuilder ret = new StringBuilder();
+            ret.append(ints[0]);
+            for (int i = 1; i < ints.length; ++i) {
+                ret.append(separator);
+                ret.append(ints[i]);
+            }
+            return ret.toString();
+        }
+    }
+
+    /**
+     * Create a new list that contains the elements of left along with elements elts
+     * @param left a non-null list of elements
+     * @param elts a varargs vector for elts to append in order to left
+     * @return A newly allocated linked list containing left followed by elts
+     */
+    public static <T> List<T> append(final List<T> left, T ... elts) {
+        final List<T> l = new LinkedList<T>(left);
+        l.addAll(Arrays.asList(elts));
+        return l;
+    }
+
+    /**
+     * Returns a string of the values in joined by separator, such as A,B,C
+     *
+     * @param separator separator character
+     * @param doubles   the array with values
+     * @return a string with the values separated by the separator
+     */
+    public static String join(String separator, double[] doubles) {
+        if ( doubles == null || doubles.length == 0)
+            return "";
+        else {
+            StringBuilder ret = new StringBuilder();
+            ret.append(doubles[0]);
+            for (int i = 1; i < doubles.length; ++i) {
+                ret.append(separator);
+                ret.append(doubles[i]);
+            }
+            return ret.toString();
+        }
+    }
+
+    /**
+     * Returns a string of the form elt1.toString() [sep elt2.toString() ... sep elt.toString()] for a collection of
+     * elti objects (note there's no actual space between sep and the elti elements).  Returns
+     * "" if collection is empty.  If collection contains just elt, then returns elt.toString()
+     *
+     * @param separator the string to use to separate objects
+     * @param objects a collection of objects.  the element order is defined by the iterator over objects
+     * @param <T> the type of the objects
+     * @return a non-null string
+     */
+    public static <T> String join(final String separator, final Collection<T> objects) {
+        if (objects.isEmpty()) { // fast path for empty collection
+            return "";
+        } else {
+            final Iterator<T> iter = objects.iterator();
+            final T first = iter.next();
+
+            if ( ! iter.hasNext() ) // fast path for singleton collections
+                return first.toString();
+            else { // full path for 2+ collection that actually need a join
+                final StringBuilder ret = new StringBuilder(first.toString());
+                while(iter.hasNext()) {
+                    ret.append(separator);
+                    ret.append(iter.next().toString());
+                }
+                return ret.toString();
+            }
+        }
+    }
+
+    /**
+     * Returns a {@link List List<Integer>} representation of an primitive int array.
+     * @param values the primitive int array to represent.
+     * @return never code {@code null}. The returned list will be unmodifiable yet it will reflect changes in values in the original array yet
+     *   you cannot change the values
+     */
+    public static List<Integer> asList(final int ... values) {
+        if (values == null)
+            throw new IllegalArgumentException("the input array cannot be null");
+        return new AbstractList<Integer>() {
+
+            @Override
+            public Integer get(final int index) {
+                return values[index];
+            }
+
+            @Override
+            public int size() {
+                return values.length;
+            }
+        };
+    }
+
+    /**
+     * Returns a {@link List List<Double>} representation of an primitive double array.
+     * @param values the primitive int array to represent.
+     * @return never code {@code null}. The returned list will be unmodifiable yet it will reflect changes in values in the original array yet
+     *   you cannot change the values.
+     */
+    public static List<Double> asList(final double ... values) {
+        if (values == null)
+            throw new IllegalArgumentException("the input array cannot be null");
+        return new AbstractList<Double>() {
+
+            @Override
+            public Double get(final int index) {
+                return values[index];
+            }
+
+            @Override
+            public int size() {
+                return values.length;
+            }
+        };
+    }
+
+    public static <T> String join(final String separator, final T ... objects) {
+        return join(separator, Arrays.asList(objects));
+    }
+
+    /**
+     * Create a new string thats a n duplicate copies of s
+     * @param s the string to duplicate
+     * @param nCopies how many copies?
+     * @return a string
+     */
+    public static String dupString(final String s, int nCopies) {
+        if ( s == null || s.equals("") ) throw new IllegalArgumentException("Bad s " + s);
+        if ( nCopies < 0 ) throw new IllegalArgumentException("nCopies must be >= 0 but got " + nCopies);
+
+        final StringBuilder b = new StringBuilder();
+        for ( int i = 0; i < nCopies; i++ )
+            b.append(s);
+        return b.toString();
+    }
+
+    public static String dupString(char c, int nCopies) {
+        char[] chars = new char[nCopies];
+        Arrays.fill(chars, c);
+        return new String(chars);
+    }
+
+    public static byte[] dupBytes(byte b, int nCopies) {
+        byte[] bytes = new byte[nCopies];
+        Arrays.fill(bytes, b);
+        return bytes;
+    }
+
+    // trim a string for the given character (i.e. not just whitespace)
+    public static String trim(String str, char ch) {
+        char[] array = str.toCharArray();
+
+
+        int start = 0;
+        while ( start < array.length && array[start] == ch )
+            start++;
+
+        int end = array.length - 1;
+        while ( end > start && array[end] == ch )
+            end--;
+
+        return str.substring(start, end+1);
+    }
+
+    /**
+     * Splits expressions in command args by spaces and returns the array of expressions.
+     * Expressions may use single or double quotes to group any individual expression, but not both.
+     * @param args Arguments to parse.
+     * @return Parsed expressions.
+     */
+    public static String[] escapeExpressions(String args) {
+        // special case for ' and " so we can allow expressions
+        if (args.indexOf('\'') != -1)
+            return escapeExpressions(args, "'");
+        else if (args.indexOf('\"') != -1)
+            return escapeExpressions(args, "\"");
+        else
+            return args.trim().split(" +");
+    }
+
+    /**
+     * Splits expressions in command args by spaces and the supplied delimiter and returns the array of expressions.
+     * @param args Arguments to parse.
+     * @param delimiter Delimiter for grouping expressions.
+     * @return Parsed expressions.
+     */
+    private static String[] escapeExpressions(String args, String delimiter) {
+        String[] command = {};
+        String[] split = args.split(delimiter);
+        String arg;
+        for (int i = 0; i < split.length - 1; i += 2) {
+            arg = split[i].trim();
+            if (arg.length() > 0) // if the unescaped arg has a size
+                command = Utils.concatArrays(command, arg.split(" +"));
+            command = Utils.concatArrays(command, new String[]{split[i + 1]});
+        }
+        arg = split[split.length - 1].trim();
+        if (split.length % 2 == 1) // if the command ends with a delimiter
+            if (arg.length() > 0) // if the last unescaped arg has a size
+                command = Utils.concatArrays(command, arg.split(" +"));
+        return command;
+    }
+
+    /**
+     * Concatenates two String arrays.
+     * @param A First array.
+     * @param B Second array.
+     * @return Concatenation of A then B.
+     */
+    public static String[] concatArrays(String[] A, String[] B) {
+       String[] C = new String[A.length + B.length];
+       System.arraycopy(A, 0, C, 0, A.length);
+       System.arraycopy(B, 0, C, A.length, B.length);
+       return C;
+    }
+
+    /**
+     * Concatenates byte arrays
+     * @return a concat of all bytes in allBytes in order
+     */
+    public static byte[] concat(final byte[] ... allBytes) {
+        int size = 0;
+        for ( final byte[] bytes : allBytes ) size += bytes.length;
+
+        final byte[] c = new byte[size];
+        int offset = 0;
+        for ( final byte[] bytes : allBytes ) {
+            System.arraycopy(bytes, 0, c, offset, bytes.length);
+            offset += bytes.length;
+        }
+
+        return c;
+    }
+
+    /**
+     * Appends String(s) B to array A.
+     * @param A First array.
+     * @param B Strings to append.
+     * @return A with B(s) appended.
+     */
+    public static String[] appendArray(String[] A, String... B) {
+        return concatArrays(A, B);
+    }
+
+    public static <T extends Comparable<T>> List<T> sorted(Collection<T> c) {
+        return sorted(c, false);
+    }
+
+    public static <T extends Comparable<T>> List<T> sorted(Collection<T> c, boolean reverse) {
+        List<T> l = new ArrayList<T>(c);
+        Collections.sort(l);
+        if ( reverse ) Collections.reverse(l);
+        return l;
+    }
+
+    public static <T extends Comparable<T>, V> List<V> sorted(Map<T,V> c) {
+        return sorted(c, false);
+    }
+
+    public static <T extends Comparable<T>, V> List<V> sorted(Map<T,V> c, boolean reverse) {
+        List<T> t = new ArrayList<T>(c.keySet());
+        Collections.sort(t);
+        if ( reverse ) Collections.reverse(t);
+
+        List<V> l = new ArrayList<V>();
+        for ( T k : t ) {
+            l.add(c.get(k));
+        }
+        return l;
+    }
+
+    /**
+     * Reverse a byte array of bases
+     *
+     * @param bases  the byte array of bases
+     * @return the reverse of the base byte array
+     */
+    static public byte[] reverse(byte[] bases) {
+        byte[] rcbases = new byte[bases.length];
+
+        for (int i = 0; i < bases.length; i++) {
+            rcbases[i] = bases[bases.length - i - 1];
+        }
+
+        return rcbases;
+    }
+
+    static public <T> List<T> reverse(final List<T> l) {
+        final List<T> newL = new ArrayList<T>(l);
+        Collections.reverse(newL);
+        return newL;
+    }
+
+    /**
+     * Reverse an int array of bases
+     *
+     * @param bases  the int array of bases
+     * @return the reverse of the base int array
+     */
+    static public int[] reverse(int[] bases) {
+        int[] rcbases = new int[bases.length];
+
+        for (int i = 0; i < bases.length; i++) {
+            rcbases[i] = bases[bases.length - i - 1];
+        }
+
+        return rcbases;
+    }
+
+    /**
+     * Reverse (NOT reverse-complement!!) a string
+     *
+     * @param bases  input string
+     * @return the reversed string
+     */
+    static public String reverse(String bases) {
+        return new String( reverse( bases.getBytes() )) ;
+    }
+
+    public static boolean isFlagSet(int value, int flag) {
+        return ((value & flag) == flag);
+    }
+
+    /**
+     * Helper utility that calls into the InetAddress system to resolve the hostname.  If this fails,
+     * unresolvable gets returned instead.
+     */
+    public static String resolveHostname() {
+        try {
+            return InetAddress.getLocalHost().getCanonicalHostName();
+        }
+        catch (java.net.UnknownHostException uhe) { // [beware typo in code sample -dmw]
+            return "unresolvable";
+            // handle exception
+        }
+    }
+
+
+    public static byte [] arrayFromArrayWithLength(byte[] array, int length) {
+        byte [] output = new byte[length];
+        for (int j = 0; j < length; j++)
+            output[j] = array[(j % array.length)];
+        return output;
+    }
+
+    public static void fillArrayWithByte(byte[] array, byte value) {
+        for (int i=0; i<array.length; i++)
+            array[i] = value;
+    }
+
+
+    /**
+     * Returns the number of combinations represented by this collection
+     * of collection of options.
+     *
+     * For example, if this is [[A, B], [C, D], [E, F, G]] returns 2 * 2 * 3 = 12
+     */
+    @Requires("options != null")
+    public static <T> int nCombinations(final Collection<T>[] options) {
+        int nStates = 1;
+        for ( Collection<T> states : options ) {
+            nStates *= states.size();
+        }
+        return nStates;
+    }
+
+    @Requires("options != null")
+    public static <T> int nCombinations(final List<List<T>> options) {
+        if ( options.isEmpty() )
+            return 0;
+        else {
+            int nStates = 1;
+            for ( Collection<T> states : options ) {
+                nStates *= states.size();
+            }
+            return nStates;
+        }
+    }
+
+    /**
+     * Make all combinations of N size of objects
+     *
+     * if objects = [A, B, C]
+     * if N = 1 => [[A], [B], [C]]
+     * if N = 2 => [[A, A], [B, A], [C, A], [A, B], [B, B], [C, B], [A, C], [B, C], [C, C]]
+     *
+     * @param objects         list of objects
+     * @param n               size of each combination
+     * @param withReplacement if false, the resulting permutations will only contain unique objects from objects
+     * @return a list with all combinations with size n of objects.
+     */
+    public static <T> List<List<T>> makePermutations(final List<T> objects, final int n, final boolean withReplacement) {
+        final List<List<T>> combinations = new ArrayList<List<T>>();
+
+        if ( n == 1 ) {
+            for ( final T o : objects )
+                combinations.add(Collections.singletonList(o));
+        } else if (n > 1) {
+            final List<List<T>> sub = makePermutations(objects, n - 1, withReplacement);
+            for ( List<T> subI : sub ) {
+                for ( final T a : objects ) {
+                    if ( withReplacement || ! subI.contains(a) )
+                        combinations.add(Utils.cons(a, subI));
+                }
+            }
+        }
+
+        return combinations;
+    }
+
+    /**
+     * Convenience function that formats the novelty rate as a %.2f string
+     *
+     * @param known number of variants from all that are known
+     * @param all number of all variants
+     * @return a String novelty rate, or NA if all == 0
+     */
+    public static String formattedNoveltyRate(final int known, final int all) {
+        return formattedPercent(all - known, all);
+    }
+
+    /**
+     * Convenience function that formats the novelty rate as a %.2f string
+     *
+     * @param x number of objects part of total that meet some criteria
+     * @param total count of all objects, including x
+     * @return a String percent rate, or NA if total == 0
+     */
+    public static String formattedPercent(final long x, final long total) {
+        return total == 0 ? "NA" : String.format("%.2f", (100.0*x) / total);
+    }
+
+    /**
+     * Convenience function that formats a ratio as a %.2f string
+     *
+     * @param num  number of observations in the numerator
+     * @param denom number of observations in the denumerator
+     * @return a String formatted ratio, or NA if all == 0
+     */
+    public static String formattedRatio(final long num, final long denom) {
+        return denom == 0 ? "NA" : String.format("%.2f", num / (1.0 * denom));
+    }
+
+    /**
+     * Adds element from an array into a collection.
+     *
+     * In the event of exception being throw due to some element, <code>dest</code> might have been modified by
+     * the successful addition of element before that one.
+     *
+     * @param dest the destination collection which cannot be <code>null</code> and should be able to accept
+     *             the input elements.
+     * @param elements the element to add to <code>dest</code>
+     * @param <T>  collection type element.
+     * @throws UnsupportedOperationException if the <tt>add</tt> operation
+     *         is not supported by <code>dest</code>.
+     * @throws ClassCastException if the class of any of the elements
+     *         prevents it from being added to <code>dest</code>.
+     * @throws NullPointerException if any of the elements is <code>null</code> and <code>dest</code>
+     *         does not permit <code>null</code> elements
+     * @throws IllegalArgumentException if some property of any of the elements
+     *         prevents it from being added to this collection
+     * @throws IllegalStateException if any of the elements cannot be added at this
+     *         time due to insertion restrictions.
+     * @return <code>true</code> if the collection was modified as a result.
+     */
+    public static <T> boolean addAll(Collection<T> dest, T ... elements) {
+        boolean result = false;
+        for (final T e : elements) {
+            result = dest.add(e) | result;
+        }
+        return result;
+    }
+
+    /**
+     * Create a constant map that maps each value in values to itself
+     */
+    public static <T> Map<T, T> makeIdentityFunctionMap(Collection<T> values) {
+        Map<T,T> map = new HashMap<T, T>(values.size());
+        for ( final T value : values )
+            map.put(value, value);
+        return Collections.unmodifiableMap(map);
+    }
+
+    /**
+     * Divides the input list into a list of sublists, which contains group size elements (except potentially the last one)
+     *
+     * list = [A, B, C, D, E]
+     * groupSize = 2
+     * result = [[A, B], [C, D], [E]]
+     *
+     */
+    public static <T> List<List<T>> groupList(final List<T> list, final int groupSize) {
+        if ( groupSize < 1 ) throw new IllegalArgumentException("groupSize >= 1");
+
+        final List<List<T>> subLists = new LinkedList<List<T>>();
+        int n = list.size();
+        for ( int i = 0; i < n; i += groupSize ) {
+            subLists.add(list.subList(i, Math.min(i + groupSize, n)));
+        }
+        return subLists;
+    }
+
+    /**
+     * @see #calcMD5(byte[])
+     */
+    public static String calcMD5(final String s) {
+        return calcMD5(s.getBytes());
+    }
+
+    /**
+     * Calculate the md5 for bytes, and return the result as a 32 character string
+     *
+     * @param bytes the bytes to calculate the md5 of
+     * @return the md5 of bytes, as a 32-character long string
+     */
+    @Ensures({"result != null", "result.length() == 32"})
+    public static String calcMD5(final byte[] bytes) {
+        if ( bytes == null ) throw new IllegalArgumentException("bytes cannot be null");
+        try {
+            final byte[] thedigest = MessageDigest.getInstance("MD5").digest(bytes);
+            final BigInteger bigInt = new BigInteger(1, thedigest);
+
+            String md5String = bigInt.toString(16);
+            while (md5String.length() < 32) md5String = "0" + md5String; // pad to length 32
+            return md5String;
+        }
+        catch ( NoSuchAlgorithmException e ) {
+            throw new IllegalStateException("MD5 digest algorithm not present");
+        }
+    }
+
+    /**
+     * Does big end with the exact sequence of bytes in suffix?
+     *
+     * @param big a non-null byte[] to test if it a prefix + suffix
+     * @param suffix a non-null byte[] to test if it's a suffix of big
+     * @return true if big is proper byte[] composed of some prefix + suffix
+     */
+    public static boolean endsWith(final byte[] big, final byte[] suffix) {
+        if ( big == null ) throw new IllegalArgumentException("big cannot be null");
+        if ( suffix == null ) throw new IllegalArgumentException("suffix cannot be null");
+        return new String(big).endsWith(new String(suffix));
+    }
+
+    /**
+     * Get the length of the longest common prefix of seq1 and seq2
+     * @param seq1 non-null byte array
+     * @param seq2 non-null byte array
+     * @param maxLength the maximum allowed length to return
+     * @return the length of the longest common prefix of seq1 and seq2, >= 0
+     */
+    public static int longestCommonPrefix(final byte[] seq1, final byte[] seq2, final int maxLength) {
+        if ( seq1 == null ) throw new IllegalArgumentException("seq1 is null");
+        if ( seq2 == null ) throw new IllegalArgumentException("seq2 is null");
+        if ( maxLength < 0 ) throw new IllegalArgumentException("maxLength < 0 " + maxLength);
+
+        final int end = Math.min(seq1.length, Math.min(seq2.length, maxLength));
+        for ( int i = 0; i < end; i++ ) {
+            if ( seq1[i] != seq2[i] )
+                return i;
+        }
+        return end;
+    }
+
+    /**
+     * Get the length of the longest common suffix of seq1 and seq2
+     * @param seq1 non-null byte array
+     * @param seq2 non-null byte array
+     * @param maxLength the maximum allowed length to return
+     * @return the length of the longest common suffix of seq1 and seq2, >= 0
+     */
+    public static int longestCommonSuffix(final byte[] seq1, final byte[] seq2, final int maxLength) {
+        if ( seq1 == null ) throw new IllegalArgumentException("seq1 is null");
+        if ( seq2 == null ) throw new IllegalArgumentException("seq2 is null");
+        if ( maxLength < 0 ) throw new IllegalArgumentException("maxLength < 0 " + maxLength);
+
+        final int end = Math.min(seq1.length, Math.min(seq2.length, maxLength));
+        for ( int i = 0; i < end; i++ ) {
+            if ( seq1[seq1.length - i - 1] != seq2[seq2.length - i - 1] )
+                return i;
+        }
+        return end;
+    }
+
+    /**
+     * Trim any number of bases from the front and/or back of an array
+     *
+     * @param seq                the sequence to trim
+     * @param trimFromFront      how much to trim from the front
+     * @param trimFromBack       how much to trim from the back
+     * @return a non-null array; can be the original array (i.e. not a copy)
+     */
+    public static byte[] trimArray(final byte[] seq, final int trimFromFront, final int trimFromBack) {
+        if ( trimFromFront + trimFromBack > seq.length )
+            throw new IllegalArgumentException("trimming total is larger than the original array");
+
+        // don't perform array copies if we need to copy everything anyways
+        return  ( trimFromFront == 0 && trimFromBack == 0 ) ? seq : Arrays.copyOfRange(seq, trimFromFront, seq.length - trimFromBack);
+    }
+
+    /**
+     * Simple wrapper for sticking elements of a int[] array into a List<Integer>
+     * @param ar - the array whose elements should be listified
+     * @return - a List<Integer> where each element has the same value as the corresponding index in @ar
+     */
+    public static List<Integer> listFromPrimitives(final int[] ar) {
+        final ArrayList<Integer> lst = new ArrayList<>(ar.length);
+        for ( final int d : ar ) {
+            lst.add(d);
+        }
+
+        return lst;
+    }
+
+    /**
+     * Compares sections from to byte arrays to verify whether they contain the same values.
+     *
+     * @param left first array to compare.
+     * @param leftOffset first position of the first array to compare.
+     * @param right second array to compare.
+     * @param rightOffset first position of the second array to compare.
+     * @param length number of positions to compare.
+     *
+     * @throws IllegalArgumentException if <ul>
+     *     <li>either {@code left} or {@code right} is {@code null} or</li>
+     *     <li>any off the offset or length combine point outside any of the two arrays</li>
+     * </ul>
+     * @return {@code true} iff {@code length} is 0 or all the bytes in both ranges are the same two-by-two.
+     */
+    public static boolean equalRange(final byte[] left, final int leftOffset, byte[] right, final int rightOffset, final int length) {
+        if (left == null) throw new IllegalArgumentException("left cannot be null");
+        if (right == null) throw new IllegalArgumentException("right cannot be null");
+        if (length < 0) throw new IllegalArgumentException("the length cannot be negative");
+        if (leftOffset < 0) throw new IllegalArgumentException("left offset cannot be negative");
+        if (leftOffset + length > left.length) throw new IllegalArgumentException("length goes beyond end of left array");
+        if (rightOffset < 0) throw new IllegalArgumentException("right offset cannot be negative");
+        if (rightOffset + length > right.length) throw new IllegalArgumentException("length goes beyond end of right array");
+
+        for (int i = 0; i < length; i++)
+            if (left[leftOffset + i] != right[rightOffset + i])
+                return false;
+        return true;
+    }
+
+    /**
+     * Skims out positions of an array returning a shorter one with the remaning positions in the same order.
+     * @param original the original array to splice.
+     * @param remove for each position in {@code original} indicates whether it should be spliced away ({@code true}),
+     *               or retained ({@code false})
+     *
+     * @param <T> the array type.
+     *
+     * @throws IllegalArgumentException if either {@code original} or {@code remove} is {@code null},
+     *    or {@code remove length is different to {@code original}'s}, or {@code original} is not in
+     *    fact an array.
+     *
+     * @return never {@code null}.
+     */
+    public static <T> T skimArray(final T original, final boolean[] remove) {
+        return skimArray(original,0,null,0,remove,0);
+    }
+
+    /**
+     * Skims out positions of an array returning a shorter one with the remaning positions in the same order.
+     *
+     * <p>
+     *     If the {@code dest} array provide is not long enough a new one will be created and returned with the
+     *     same component type. All elements before {@code destOffset} will be copied from the input to the
+     *     result array. If {@code dest} is {@code null}, a brand-new array large enough will be created where
+     *     the position preceding {@code destOffset} will be left with the default value. The component type
+     *     Will match the one of the {@code source} array.
+     * </p>
+     *
+     * @param source the original array to splice.
+     * @param sourceOffset the first position to skim.
+     * @param dest the destination array.
+     * @param destOffset the first position where to copy the skimed array values.
+     * @param remove for each position in {@code original} indicates whether it should be spliced away ({@code true}),
+     *               or retained ({@code false})
+     * @param removeOffset the first position in the remove index array to consider.
+     *
+     * @param <T> the array type.
+     *
+     * @throws IllegalArgumentException if either {@code original} or {@code remove} is {@code null},
+     *    or {@code remove length is different to {@code original}'s}, or {@code original} is not in
+     *    fact an array.
+     *
+     * @return never {@code null}.
+     */
+    public static <T> T skimArray(final T source, final int sourceOffset, final T dest, final int destOffset, final boolean[] remove, final int removeOffset) {
+        if (source == null)
+            throw new IllegalArgumentException("the source array cannot be null");
+        @SuppressWarnings("unchecked")
+        final Class<T> sourceClazz = (Class<T>) source.getClass();
+
+        if (!sourceClazz.isArray())
+            throw new IllegalArgumentException("the source array is not in fact an array instance");
+        final int length = Array.getLength(source) - sourceOffset;
+        if (length < 0)
+            throw new IllegalArgumentException("the source offset goes beyond the source array length");
+        return skimArray(source,sourceOffset,dest,destOffset,remove,removeOffset,length);
+    }
+
+    /**
+     * Skims out positions of an array returning a shorter one with the remaning positions in the same order.
+     *
+     * <p>
+     *     If the {@code dest} array provide is not long enough a new one will be created and returned with the
+     *     same component type. All elements before {@code destOffset} will be copied from the input to the
+     *     result array. If {@code dest} is {@code null}, a brand-new array large enough will be created where
+     *     the position preceding {@code destOffset} will be left with the default value. The component type
+     *     Will match the one of the {@code source} array.
+     * </p>
+     *
+     * @param source the original array to splice.
+     * @param sourceOffset the first position to skim.
+     * @param dest the destination array.
+     * @param destOffset the first position where to copy the skimed array values.
+     * @param remove for each position in {@code original} indicates whether it should be spliced away ({@code true}),
+     *               or retained ({@code false})
+     * @param removeOffset the first position in the remove index array to consider.
+     * @param length the total number of position in {@code source} to consider. Thus only the {@code sourceOffset} to
+     *               {@code sourceOffset + length - 1} region will be skimmed.
+     *
+     * @param <T> the array type.
+     *
+     * @throws IllegalArgumentException if either {@code original} or {@code remove} is {@code null},
+     *    or {@code remove length is different to {@code original}'s}, or {@code original} is not in
+     *    fact an array.
+     *
+     * @return never {@code null}.
+     */
+    public static <T> T skimArray(final T source, final int sourceOffset, final T dest, final int destOffset,
+                                  final boolean[] remove, final int removeOffset, final int length) {
+        if (source == null)
+            throw new IllegalArgumentException("the source array cannot be null");
+        if (remove == null)
+            throw new IllegalArgumentException("the remove array cannot be null");
+        if (sourceOffset < 0)
+            throw new IllegalArgumentException("the source array offset cannot be negative");
+        if (destOffset < 0)
+            throw new IllegalArgumentException("the destination array offset cannot be negative");
+        if (removeOffset < 0)
+            throw new IllegalArgumentException("the remove array offset cannot be negative");
+        if (length < 0)
+            throw new IllegalArgumentException("the length provided cannot be negative");
+
+        final int removeLength = Math.min(remove.length - removeOffset,length);
+
+        if (removeLength < 0)
+            throw new IllegalArgumentException("the remove offset provided falls beyond the remove array end");
+
+
+        @SuppressWarnings("unchecked")
+        final Class<T> sourceClazz = (Class<T>) source.getClass();
+
+        if (!sourceClazz.isArray())
+            throw new IllegalArgumentException("the source array is not in fact an array instance");
+
+        final Class<T> destClazz = skimArrayDetermineDestArrayClass(dest, sourceClazz);
+
+        final int sourceLength = Array.getLength(source);
+
+        if (sourceLength < length + sourceOffset)
+            throw new IllegalArgumentException("the source array is too small considering length and offset");
+
+        // count how many positions are to be removed.
+
+        int removeCount = 0;
+
+        final int removeEnd = removeLength + removeOffset;
+        for (int i = removeOffset; i < removeEnd; i++)
+            if  (remove[i]) removeCount++;
+
+
+        final int newLength = length - removeCount;
+
+
+        @SuppressWarnings("unchecked")
+        final T result = skimArrayBuildResultArray(dest, destOffset, destClazz, newLength);
+        // No removals, just copy the whole thing.
+
+        if (removeCount == 0)
+            System.arraycopy(source,sourceOffset,result,destOffset,length);
+        else if (length > 0) {  // if length == 0 nothing to do.
+            int nextOriginalIndex = 0;
+            int nextNewIndex = 0;
+            int nextRemoveIndex = removeOffset;
+            while (nextOriginalIndex < length && nextNewIndex < newLength) {
+                while (nextRemoveIndex < removeEnd && remove[nextRemoveIndex++]) { nextOriginalIndex++; } // skip positions to be spliced.
+                // Since we make the nextNewIndex < newLength check in the while condition
+                // there is no need to include the following break, as is guaranteed not to be true:
+                // if (nextOriginalIndex >= length) break; // we reach the final (last positions are to be spliced.
+                final int copyStart = nextOriginalIndex;
+                while (++nextOriginalIndex < length && (nextRemoveIndex >= removeEnd || !remove[nextRemoveIndex])) { nextRemoveIndex++; }
+                final int copyEnd = nextOriginalIndex;
+                final int copyLength = copyEnd - copyStart;
+                System.arraycopy(source, sourceOffset + copyStart, result, destOffset + nextNewIndex, copyLength);
+                nextNewIndex += copyLength;
+            }
+        }
+        return result;
+    }
+
+    private static <T> T skimArrayBuildResultArray(final T dest, final int destOffset, final Class<T> destClazz, final int newLength) {
+        @SuppressWarnings("unchecked")
+        final T result;
+
+        if (dest == null)
+            result = (T) Array.newInstance(destClazz.getComponentType(), newLength + destOffset);
+        else if (Array.getLength(dest) < newLength + destOffset) {
+            result = (T) Array.newInstance(destClazz.getComponentType(),newLength + destOffset);
+            if (destOffset > 0) System.arraycopy(dest,0,result,0,destOffset);
+        } else
+            result = dest;
+        return result;
+    }
+
+    private static <T> Class<T> skimArrayDetermineDestArrayClass(final T dest, Class<T> sourceClazz) {
+        final Class<T> destClazz;
+        if (dest == null)
+            destClazz = sourceClazz;
+        else {
+            destClazz = (Class<T>) dest.getClass();
+            if (destClazz != sourceClazz) {
+                if (!destClazz.isArray())
+                    throw new IllegalArgumentException("the destination array class must be an array");
+                if (sourceClazz.getComponentType().isAssignableFrom(destClazz.getComponentType()))
+                    throw new IllegalArgumentException("the provided destination array class cannot contain values from the source due to type incompatibility");
+            }
+        }
+        return destClazz;
+    }
+
+    /**
+     * Makes a deep clone of the array provided.
+     *
+     * <p>
+     *     When you can use {@link Arrays#copyOf} or an array {@link Object#clone()}  to create a copy of itself,
+     *     if it is multi-dimentional each sub array or matrix would be cloned.
+     * </p>
+     *
+     * <p>
+     *     Notice however that if the base type is an Object type, the base elements themselves wont be cloned.
+     * </p>
+     *
+     * @param array the array to deep-clone.
+     * @param <T> type of the array.
+     *
+     * @throws IllegalArgumentException if {@code array} is {@code null} or is not an array.
+     */
+    public static <T>  T deepCloneArray(final T array) {
+
+        if (array == null)
+            throw new IllegalArgumentException("");
+
+        @SuppressWarnings("unchecked")
+        final Class<T> clazz = (Class<T>) array.getClass();
+
+
+        if (!clazz.isArray())
+            throw new IllegalArgumentException("the input is not an array");
+
+        final int dimension = calculateArrayDimensions(clazz);
+
+        return deepCloneArrayUnchecked(array,clazz, dimension);
+    }
+
+    private static int calculateArrayDimensions(final Class<?> clazz) {
+        if (clazz.isArray())
+            return calculateArrayDimensions(clazz.getComponentType()) + 1;
+        else
+            return 0;
+    }
+
+    private static <T> T deepCloneArrayUnchecked(final T array, final Class<T> clazz, final int dimension) {
+
+
+        final int length = Array.getLength(array);
+
+        final Class componentClass = clazz.getComponentType();
+
+        final T result = (T) Array.newInstance(componentClass,length);
+
+        if (dimension <= 1) {
+            System.arraycopy(array, 0, result, 0, length);
+            return result;
+        }
+
+
+        final int dimensionMinus1 = dimension - 1;
+
+        for (int i = 0; i < length; i++)
+            Array.set(result,i,deepCloneArrayUnchecked(Array.get(array,i),componentClass,dimensionMinus1));
+
+        return result;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/ValidationExclusion.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/ValidationExclusion.java
new file mode 100644
index 0000000..c09ebe4
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/ValidationExclusion.java
@@ -0,0 +1,71 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import org.broadinstitute.gatk.utils.commandline.EnumerationArgumentDefault;
+
+import java.util.ArrayList;
+import java.util.List;
+
+
+public class ValidationExclusion {
+    // our validation options
+
+    public enum TYPE {
+        ALLOW_N_CIGAR_READS,    // ignore the presence of N operators in CIGARs: do not blow up and process reads that contain one or more N operators.
+                                // This exclusion does not have effect on reads that get filtered {@see MalformedReadFilter}.
+        ALLOW_UNINDEXED_BAM,        // allow bam files that do not have an index; we'll traverse them using monolithic shard
+        ALLOW_UNSET_BAM_SORT_ORDER, // assume that the bam is sorted, even if the SO (sort-order) flag is not set
+        NO_READ_ORDER_VERIFICATION, // do not validate that the reads are in order as we take them from the bam file
+        ALLOW_SEQ_DICT_INCOMPATIBILITY, // allow dangerous, but not fatal, sequence dictionary incompatibilities
+        LENIENT_VCF_PROCESSING,         // allow non-standard values for standard VCF header lines.  Don't worry about size differences between header and values, etc.
+        @EnumerationArgumentDefault // set the ALL value to the default value, so if they specify just -U, we get the ALL
+        ALL                         // do not check for all of the above conditions, DEFAULT
+    }
+
+    // a storage for the passed in exclusions
+    List<TYPE> exclusions = new ArrayList<TYPE>();
+
+    public ValidationExclusion(List<TYPE> exclusionsList) {
+        exclusions.addAll(exclusionsList);
+    }
+
+    public ValidationExclusion() {}
+    
+    /**
+     * do we contain the exclusion specified, or were we set to ALL
+     * @param t the exclusion case to test for
+     * @return true if we contain the exclusion or if we're set to ALL, false otherwise
+     */
+    public boolean contains(TYPE t) {
+        return (exclusions.contains(TYPE.ALL) || exclusions.contains(t));
+    }
+
+    public static boolean lenientVCFProcessing(final TYPE val) {
+        return val == TYPE.ALL
+                || val == TYPE.LENIENT_VCF_PROCESSING;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/activeregion/ActiveRegion.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/activeregion/ActiveRegion.java
new file mode 100644
index 0000000..8a4deb5
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/activeregion/ActiveRegion.java
@@ -0,0 +1,500 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.activeregion;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Invariant;
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
+import org.broadinstitute.gatk.utils.HasGenomeLocation;
+import org.broadinstitute.gatk.utils.clipping.ReadClipper;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.broadinstitute.gatk.utils.sam.ReadUtils;
+
+import java.util.*;
+
+/**
+ * Represents a single active region created by the Active Region Traversal for processing
+ *
+ * An active region is a single contiguous span of bases on the genome that should be operated
+ * on as a single unit for the active region traversal.  The action may contains a list of
+ * reads that overlap the region (may because there may be no reads in the region).  The region
+ * is tagged as being either active or inactive, depending on the probabilities provided by
+ * the isActiveProb results from the ART walker.  Each region carries with it the
+ * exact span of the region (bases which are the core of the isActiveProbs from the walker) as
+ * well as an extended size, that includes the ART walker's extension size.  Reads in the region
+ * provided by ART include all reads overlapping the extended span, not the raw span.
+ *
+ * User: rpoplin
+ * Date: 1/4/12
+ */
+ at Invariant({
+        "extension >= 0",
+        "activeRegionLoc != null",
+        "genomeLocParser != null",
+        "spanIncludingReads != null",
+        "extendedLoc != null"
+})
+public class ActiveRegion implements HasGenomeLocation {
+    /**
+     * The reads included in this active region.  May be empty upon creation, and expand / contract
+     * as reads are added or removed from this region.
+     */
+    private final List<GATKSAMRecord> reads = new ArrayList<GATKSAMRecord>();
+
+    /**
+     * An ordered list (by genomic coordinate) of the ActivityProfileStates that went
+     * into this active region.  May be empty, which says that no supporting states were
+     * provided when this region was created.
+     */
+    private final List<ActivityProfileState> supportingStates;
+
+    /**
+     * The raw span of this active region, not including the active region extension
+     */
+    private final GenomeLoc activeRegionLoc;
+
+    /**
+     * The span of this active region on the genome, including the active region extension
+     */
+    private final GenomeLoc extendedLoc;
+
+    /**
+     * The extension, in bp, of this active region.
+     */
+    private final int extension;
+
+    /**
+     * A genomeLocParser so we can create genomeLocs
+     */
+    private final GenomeLocParser genomeLocParser;
+
+    /**
+     * Does this region represent an active region (all isActiveProbs above threshold) or
+     * an inactive region (all isActiveProbs below threshold)?
+     */
+    private final boolean isActive;
+
+    /**
+     * The span of this active region, including the bp covered by all reads in this
+     * region.  This union of extensionLoc and the loc of all reads in this region.
+     *
+     * Must be at least as large as extendedLoc, but may be larger when reads
+     * partially overlap this region.
+     */
+    private GenomeLoc spanIncludingReads;
+
+
+    /**
+     * Indicates whether the active region has been finalized
+     */
+    private boolean hasBeenFinalized;
+
+    /**
+     * Create a new ActiveRegion containing no reads
+     *
+     * @param activeRegionLoc the span of this active region
+     * @param supportingStates the states that went into creating this region, or null / empty if none are available.
+     *                         If not empty, must have exactly one state for each bp in activeRegionLoc
+     * @param isActive indicates whether this is an active region, or an inactve one
+     * @param genomeLocParser a non-null parser to let us create new genome locs
+     * @param extension the active region extension to use for this active region
+     */
+    public ActiveRegion( final GenomeLoc activeRegionLoc, final List<ActivityProfileState> supportingStates, final boolean isActive, final GenomeLocParser genomeLocParser, final int extension ) {
+        if ( activeRegionLoc == null ) throw new IllegalArgumentException("activeRegionLoc cannot be null");
+        if ( activeRegionLoc.size() == 0 ) throw new IllegalArgumentException("Active region cannot be of zero size, but got " + activeRegionLoc);
+        if ( genomeLocParser == null ) throw new IllegalArgumentException("genomeLocParser cannot be null");
+        if ( extension < 0 ) throw new IllegalArgumentException("extension cannot be < 0 but got " + extension);
+
+        this.activeRegionLoc = activeRegionLoc;
+        this.supportingStates = supportingStates == null ? Collections.<ActivityProfileState>emptyList() : new ArrayList<ActivityProfileState>(supportingStates);
+        this.isActive = isActive;
+        this.genomeLocParser = genomeLocParser;
+        this.extension = extension;
+        this.extendedLoc = genomeLocParser.createGenomeLocOnContig(activeRegionLoc.getContig(), activeRegionLoc.getStart() - extension, activeRegionLoc.getStop() + extension);
+        this.spanIncludingReads = extendedLoc;
+
+        if ( ! this.supportingStates.isEmpty() ) {
+            if ( this.supportingStates.size() != activeRegionLoc.size() )
+                throw new IllegalArgumentException("Supporting states wasn't empty but it doesn't have exactly one state per bp in the active region: states " + this.supportingStates.size() + " vs. bp in region = " + activeRegionLoc.size());
+            GenomeLoc lastStateLoc = null;
+            for ( final ActivityProfileState state : this.supportingStates ) {
+                if ( lastStateLoc != null ) {
+                    if ( state.getLoc().getStart() != lastStateLoc.getStart() + 1 || state.getLoc().getContigIndex() != lastStateLoc.getContigIndex())
+                        throw new IllegalArgumentException("Supporting state has an invalid sequence: last state was " + lastStateLoc + " but next state was " + state);
+                }
+                lastStateLoc = state.getLoc();
+            }
+        }
+    }
+
+    /**
+     * Simple interface to create an active region that isActive without any profile state
+     */
+    public ActiveRegion( final GenomeLoc activeRegionLoc, final GenomeLocParser genomeLocParser, final int extension ) {
+        this(activeRegionLoc, Collections.<ActivityProfileState>emptyList(), true, genomeLocParser, extension);
+    }
+
+    @Override
+    public String toString() {
+        return "ActiveRegion "  + activeRegionLoc.toString() + " active?=" + isActive() + " nReads=" + reads.size();
+    }
+
+    /**
+     * See #getActiveRegionReference but with padding == 0
+     */
+    public byte[] getActiveRegionReference( final IndexedFastaSequenceFile referenceReader ) {
+        return getActiveRegionReference(referenceReader, 0);
+    }
+
+    /**
+     * Get the reference bases from referenceReader spanned by the extended location of this active region,
+     * including additional padding bp on either side.  If this expanded region would exceed the boundaries
+     * of the active region's contig, the returned result will be truncated to only include on-genome reference
+     * bases
+     * @param referenceReader the source of the reference genome bases
+     * @param padding the padding, in BP, we want to add to either side of this active region extended region
+     * @return a non-null array of bytes holding the reference bases in referenceReader
+     */
+    @Ensures("result != null")
+    public byte[] getActiveRegionReference( final IndexedFastaSequenceFile referenceReader, final int padding ) {
+        return getReference(referenceReader, padding, extendedLoc);
+    }
+
+    /**
+     * See #getActiveRegionReference but using the span including regions not the extended span
+     */
+    public byte[] getFullReference( final IndexedFastaSequenceFile referenceReader ) {
+        return getFullReference(referenceReader, 0);
+    }
+
+    /**
+     * See #getActiveRegionReference but using the span including regions not the extended span
+     */
+    public byte[] getFullReference( final IndexedFastaSequenceFile referenceReader, final int padding ) {
+        return getReference(referenceReader, padding, spanIncludingReads);
+    }
+
+    /**
+     * Get the reference bases from referenceReader spanned by the extended location of this active region,
+     * including additional padding bp on either side.  If this expanded region would exceed the boundaries
+     * of the active region's contig, the returned result will be truncated to only include on-genome reference
+     * bases
+     * @param referenceReader the source of the reference genome bases
+     * @param padding the padding, in BP, we want to add to either side of this active region extended region
+     * @param genomeLoc a non-null genome loc indicating the base span of the bp we'd like to get the reference for
+     * @return a non-null array of bytes holding the reference bases in referenceReader
+     */
+    @Ensures("result != null")
+    public byte[] getReference( final IndexedFastaSequenceFile referenceReader, final int padding, final GenomeLoc genomeLoc ) {
+        if ( referenceReader == null ) throw new IllegalArgumentException("referenceReader cannot be null");
+        if ( padding < 0 ) throw new IllegalArgumentException("padding must be a positive integer but got " + padding);
+        if ( genomeLoc == null ) throw new IllegalArgumentException("genomeLoc cannot be null");
+        if ( genomeLoc.size() == 0 ) throw new IllegalArgumentException("GenomeLoc must have size > 0 but got " + genomeLoc);
+
+        final byte[] reference =  referenceReader.getSubsequenceAt( genomeLoc.getContig(),
+                Math.max(1, genomeLoc.getStart() - padding),
+                Math.min(referenceReader.getSequenceDictionary().getSequence(genomeLoc.getContig()).getSequenceLength(), genomeLoc.getStop() + padding) ).getBases();
+
+        return reference;
+    }
+
+    /**
+     * Get the raw span of this active region (excluding the extension)
+     * @return a non-null genome loc
+     */
+    @Override
+    @Ensures("result != null")
+    public GenomeLoc getLocation() { return activeRegionLoc; }
+
+    /**
+     * Get the span of this active region including the extension value
+     * @return a non-null GenomeLoc
+     */
+    @Ensures("result != null")
+    public GenomeLoc getExtendedLoc() { return extendedLoc; }
+
+    /**
+     * Get the span of this active region including the extension and the projects on the
+     * genome of all reads in this active region.  That is, returns the bp covered by this
+     * region and all reads in the region.
+     * @return a non-null genome loc
+     */
+    @Ensures("result != null")
+    public GenomeLoc getReadSpanLoc() { return spanIncludingReads; }
+
+    /**
+     * Get the active profile states that went into creating this region, if possible
+     * @return an unmodifiable list of states that led to the creation of this region, or an empty
+     *         list if none were provided
+     */
+    @Ensures("result != null")
+    public List<ActivityProfileState> getSupportingStates() {
+        return Collections.unmodifiableList(supportingStates);
+    }
+
+    /**
+     * Get the active region extension applied to this region
+     *
+     * The extension is >= 0 bp in size, and indicates how much padding this art walker wanted for its regions
+     *
+     * @return the size in bp of the region extension
+     */
+    @Ensures("result >= 0")
+    public int getExtension() { return extension; }
+
+    /**
+     * Get an unmodifiable list of reads currently in this active region.
+     *
+     * The reads are sorted by their coordinate position
+     *
+     * @return an unmodifiable list of reads in this active region
+     */
+    @Ensures("result != null")
+    public List<GATKSAMRecord> getReads() {
+        return Collections.unmodifiableList(reads);
+    }
+
+    /**
+     * Get the number of reads currently in this active region
+     * @return an integer >= 0
+     */
+    @Ensures("result >= 0")
+    public int size() { return reads.size(); }
+
+    /**
+     * Add read to this active region
+     *
+     * Read must have alignment start >= than the last read currently in this active region.
+     *
+     * @throws IllegalArgumentException if read doesn't overlap the extended region of this active region
+     *
+     * @param read a non-null GATKSAMRecord
+     */
+    @Ensures("reads.size() == old(reads.size()) + 1")
+    public void add( final GATKSAMRecord read ) {
+        if ( read == null ) throw new IllegalArgumentException("Read cannot be null");
+
+        final GenomeLoc readLoc = genomeLocParser.createGenomeLoc( read );
+        if ( ! readOverlapsRegion(read) )
+            throw new IllegalArgumentException("Read location " + readLoc + " doesn't overlap with active region extended span " + extendedLoc);
+
+        spanIncludingReads = spanIncludingReads.union( readLoc );
+
+        if ( ! reads.isEmpty() ) {
+            final GATKSAMRecord lastRead = reads.get(size() - 1);
+            if ( ! lastRead.getReferenceIndex().equals(read.getReferenceIndex()) )
+                throw new IllegalArgumentException("Attempting to add a read to ActiveRegion not on the same contig as other reads: lastRead " + lastRead + " attempting to add " + read);
+
+            if ( read.getAlignmentStart() < lastRead.getAlignmentStart() )
+                throw new IllegalArgumentException("Attempting to add a read to ActiveRegion out of order w.r.t. other reads: lastRead " + lastRead + " at " + lastRead.getAlignmentStart() + " attempting to add " + read + " at " + read.getAlignmentStart());
+        }
+
+        reads.add( read );
+    }
+
+    /**
+     * Returns true if read would overlap the extended extent of this region
+     * @param read the read we want to test
+     * @return true if read can be added to this region, false otherwise
+     */
+    public boolean readOverlapsRegion(final GATKSAMRecord read) {
+        final GenomeLoc readLoc = genomeLocParser.createGenomeLoc( read );
+        return readLoc.overlapsP(extendedLoc);
+    }
+
+    /**
+     * Add all reads to this active region
+     * @param reads a collection of reads to add to this active region
+     */
+    public void addAll(final Collection<GATKSAMRecord> reads) {
+        if ( reads == null ) throw new IllegalArgumentException("reads cannot be null");
+        for ( final GATKSAMRecord read : reads )
+            add(read);
+    }
+
+    /**
+     * Clear all of the reads currently in this active region
+     */
+    @Ensures("size() == 0")
+    public void clearReads() {
+        spanIncludingReads = extendedLoc;
+        reads.clear();
+    }
+
+    /**
+     * Remove all of the reads in readsToRemove from this active region
+     * @param readsToRemove the set of reads we want to remove
+     */
+    public void removeAll( final Set<GATKSAMRecord> readsToRemove ) {
+        final Iterator<GATKSAMRecord> it = reads.iterator();
+        spanIncludingReads = extendedLoc;
+        while ( it.hasNext() ) {
+            final GATKSAMRecord read = it.next();
+            if ( readsToRemove.contains(read) )
+                it.remove();
+            else
+                spanIncludingReads = spanIncludingReads.union( genomeLocParser.createGenomeLoc(read) );
+        }
+    }
+
+    /**
+     * Is this region equal to other, excluding any reads in either region in the comparison
+     * @param other the other active region we want to test
+     * @return true if this region is equal, excluding any reads and derived values, to other
+     */
+    protected boolean equalExceptReads(final ActiveRegion other) {
+        if ( activeRegionLoc.compareTo(other.activeRegionLoc) != 0 ) return false;
+        if ( isActive() != other.isActive()) return false;
+        if ( genomeLocParser != other.genomeLocParser ) return false;
+        if ( extension != other.extension ) return false;
+        if ( extendedLoc.compareTo(other.extendedLoc) != 0 ) return false;
+        return true;
+    }
+
+    /**
+     * Does this region represent an active region (all isActiveProbs above threshold) or
+     * an inactive region (all isActiveProbs below threshold)?
+     */
+    public boolean isActive() {
+        return isActive;
+    }
+
+    /**
+     * Intersect this active region with the allowed intervals, returning a list of active regions
+     * that only contain locations present in intervals
+     *
+     * Note that the returned list may be empty, if this active region doesn't overlap the set at all
+     *
+     * Note that the resulting regions are all empty, regardless of whether the current active region has reads
+     *
+     * @param intervals a non-null set of intervals that are allowed
+     * @return an ordered list of active region where each interval is contained within intervals
+     */
+    @Ensures("result != null")
+    protected List<ActiveRegion> splitAndTrimToIntervals(final GenomeLocSortedSet intervals) {
+        final List<GenomeLoc> allOverlapping = intervals.getOverlapping(getLocation());
+        final List<ActiveRegion> clippedRegions = new LinkedList<ActiveRegion>();
+
+        for ( final GenomeLoc overlapping : allOverlapping ) {
+            clippedRegions.add(trim(overlapping, extension));
+        }
+
+        return clippedRegions;
+    }
+
+    /**
+     * Trim this active to just the span, producing a new active region without any reads that has only
+     * the extent of newExtend intersected with the current extent
+     * @param span the new extend of the active region we want
+     * @param extension the extension size we want for the newly trimmed active region
+     * @return a non-null, empty active region
+     */
+    public ActiveRegion trim(final GenomeLoc span, final int extension) {
+        if ( span == null ) throw new IllegalArgumentException("Active region extent cannot be null");
+        if ( extension < 0) throw new IllegalArgumentException("the extension size must be 0 or greater");
+        final int extendStart = Math.max(1,span.getStart() - extension);
+        final int maxStop = genomeLocParser.getContigs().getSequence(span.getContigIndex()).getSequenceLength();
+        final int extendStop = Math.min(span.getStop() + extension, maxStop);
+        final GenomeLoc extendedSpan = genomeLocParser.createGenomeLoc(span.getContig(), extendStart, extendStop);
+        return trim(span, extendedSpan);
+
+//TODO - Inconsiste support of substates trimming. Check lack of consistency!!!!
+//        final GenomeLoc subLoc = getLocation().intersect(span);
+//        final int subStart = subLoc.getStart() - getLocation().getStart();
+//        final int subEnd = subStart + subLoc.size();
+//        final List<ActivityProfileState> subStates = supportingStates.isEmpty() ? supportingStates : supportingStates.subList(subStart, subEnd);
+//        return new ActiveRegion( subLoc, subStates, isActive, genomeLocParser, extension );
+
+    }
+
+    public ActiveRegion trim(final GenomeLoc span) {
+        return trim(span,span);
+    }
+
+    /**
+     * Trim this active to no more than the span, producing a new active region with properly trimmed reads that
+     * attempts to provide the best possible representation of this active region covering the span.
+     *
+     * The challenge here is that span may (1) be larger than can be represented by this active region
+     * + its original extension and (2) the extension must be symmetric on both sides.  This algorithm
+     * therefore determines how best to represent span as a subset of the span of this
+     * region with a padding value that captures as much of the span as possible.
+     *
+     * For example, suppose this active region is
+     *
+     * Active:    100-200 with extension of 50, so that the true span is 50-250
+     * NewExtent: 150-225 saying that we'd ideally like to just have bases 150-225
+     *
+     * Here we represent the active region as a active region from 150-200 with 25 bp of padding.
+     *
+     * The overall constraint is that the active region can never exceed the original active region, and
+     * the extension is chosen to maximize overlap with the desired region
+     *
+     * @param span the new extend of the active region we want
+     * @return a non-null, empty active region
+     */
+    public ActiveRegion trim(final GenomeLoc span, final GenomeLoc extendedSpan) {
+        if ( span == null ) throw new IllegalArgumentException("Active region extent cannot be null");
+        if ( extendedSpan == null ) throw new IllegalArgumentException("Active region extended span cannot be null");
+        if ( ! extendedSpan.containsP(span))
+            throw new IllegalArgumentException("The requested extended must fully contain the requested span");
+
+        final GenomeLoc subActive = getLocation().intersect(span);
+        final int requiredOnRight = Math.max(extendedSpan.getStop() - subActive.getStop(), 0);
+        final int requiredOnLeft = Math.max(subActive.getStart() - extendedSpan.getStart(), 0);
+        final int requiredExtension = Math.min(Math.max(requiredOnLeft, requiredOnRight), getExtension());
+
+        final ActiveRegion result = new ActiveRegion( subActive, Collections.<ActivityProfileState>emptyList(), isActive, genomeLocParser, requiredExtension );
+
+        final List<GATKSAMRecord> myReads = getReads();
+        final GenomeLoc resultExtendedLoc = result.getExtendedLoc();
+        final int resultExtendedLocStart = resultExtendedLoc.getStart();
+        final int resultExtendedLocStop = resultExtendedLoc.getStop();
+
+        final List<GATKSAMRecord> trimmedReads = new ArrayList<>(myReads.size());
+        for( final GATKSAMRecord read : myReads ) {
+            final GATKSAMRecord clippedRead = ReadClipper.hardClipToRegion(read,
+                    resultExtendedLocStart, resultExtendedLocStop);
+            if( result.readOverlapsRegion(clippedRead) && clippedRead.getReadLength() > 0 )
+                trimmedReads.add(clippedRead);
+        }
+        result.clearReads();
+        result.addAll(ReadUtils.sortReadsByCoordinate(trimmedReads));
+        return result;
+    }
+
+    public void setFinalized(final boolean value) {
+        hasBeenFinalized = value;
+    }
+
+    public boolean isFinalized() {
+        return hasBeenFinalized;
+    }
+
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/activeregion/ActiveRegionReadState.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/activeregion/ActiveRegionReadState.java
new file mode 100644
index 0000000..57fd359
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/activeregion/ActiveRegionReadState.java
@@ -0,0 +1,40 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.activeregion;
+
+/**
+ * Describes how a read relates to an assigned ActiveRegion
+ *
+ * User: thibault
+ * Date: 11/26/12
+ * Time: 2:35 PM
+ */
+public enum ActiveRegionReadState {
+    PRIMARY,        // This is the read's primary region
+    NONPRIMARY,     // This region overlaps the read, but it is not primary
+    EXTENDED,       // This region would overlap the read if it were extended
+    UNMAPPED        // This read is not mapped
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/activeregion/ActivityProfile.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/activeregion/ActivityProfile.java
new file mode 100644
index 0000000..cdc7cb8
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/activeregion/ActivityProfile.java
@@ -0,0 +1,520 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.activeregion;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
+
+import java.util.*;
+
+/**
+ * Class holding information about per-base activity scores for the
+ * active region traversal
+ *
+ * @author Mark DePristo
+ * @since Date created
+ */
+public class ActivityProfile {
+    protected final List<ActivityProfileState> stateList;
+    protected final GenomeLocParser parser;
+    protected final GenomeLocSortedSet restrictToIntervals;
+
+    protected final int maxProbPropagationDistance;
+    protected final double activeProbThreshold;
+
+    protected GenomeLoc regionStartLoc = null;
+    protected GenomeLoc regionStopLoc = null;
+
+    /**
+     * A cached value of the regionStartLoc contig length, to make calls to
+     * getCurrentContigLength efficient
+     */
+    protected int contigLength = -1;
+
+    /**
+     * Create a new empty ActivityProfile
+     * @param parser the parser we can use to create genome locs, cannot be null
+     * @param maxProbPropagationDistance region probability propagation distance beyond it's maximum size
+     * @param activeProbThreshold threshold for the probability of am active profile state being active
+     */
+    public ActivityProfile(final GenomeLocParser parser, final int maxProbPropagationDistance, final double activeProbThreshold) {
+        this(parser, maxProbPropagationDistance, activeProbThreshold, null);
+    }
+
+    /**
+     * Create a empty ActivityProfile, restricting output to profiles overlapping intervals, if not null
+     * @param parser the parser we can use to create genome locs, cannot be null
+     * @param maxProbPropagationDistance region probability propagation distance beyond it's maximum size
+     * @param activeProbThreshold threshold for the probability of a profile state being active
+     * @param intervals only include states that are within these intervals, if not null
+     */
+    public ActivityProfile(final GenomeLocParser parser, final int maxProbPropagationDistance, final double activeProbThreshold, final GenomeLocSortedSet intervals) {
+        if ( parser == null ) throw new IllegalArgumentException("parser cannot be null");
+
+        this.parser = parser;
+        this.stateList = new ArrayList<ActivityProfileState>();
+        this.restrictToIntervals = intervals;
+        this.maxProbPropagationDistance = maxProbPropagationDistance;
+        this.activeProbThreshold = activeProbThreshold;
+    }
+
+    @Override
+    public String toString() {
+        return "ActivityProfile{" +
+                "start=" + regionStartLoc +
+                ", stop=" + regionStopLoc +
+                '}';
+    }
+
+    /**
+     * How far away can probability mass be moved around in this profile?
+     *
+     * This distance puts an upper limit on how far, in bp, we will ever propagate probability max around
+     * when adding a new ActivityProfileState.  For example, if the value of this function is
+     * 10, and you are looking at a state at bp 5, and we know that no states beyond 5 + 10 will have
+     * their probability propagated back to that state.
+     *
+     * @return a positive integer distance in bp
+     */
+    @Ensures("result >= 0")
+    public int getMaxProbPropagationDistance() {
+        return maxProbPropagationDistance;
+    }
+
+    /**
+     * How many profile results are in this profile?
+     * @return the number of profile results
+     */
+    @Ensures("result >= 0")
+    public int size() {
+        return stateList.size();
+    }
+
+    /**
+     * Is this profile empty?
+     * @return true if the profile is empty
+     */
+    @Ensures("isEmpty() == (size() == 0)")
+    public boolean isEmpty() {
+        return stateList.isEmpty();
+    }
+
+    /**
+     * Get the span of this activity profile, which is from the start of the first state to the stop of the last
+     * @return a potentially null GenomeLoc.  Will be null if this profile is empty
+     */
+    public GenomeLoc getSpan() {
+        return isEmpty() ? null : regionStartLoc.endpointSpan(regionStopLoc);
+    }
+
+    @Requires("! isEmpty()")
+    public int getContigIndex() {
+        return regionStartLoc.getContigIndex();
+    }
+
+    @Requires("! isEmpty()")
+    public int getStop() {
+        return regionStopLoc.getStop();
+    }
+
+    /**
+     * Get the list of active profile results in this object
+     * @return a non-null, ordered list of active profile results
+     */
+    @Ensures("result != null")
+    protected List<ActivityProfileState> getStateList() {
+        return stateList;
+    }
+
+    /**
+     * Get the probabilities of the states as a single linear array of doubles
+     * @return a non-null array
+     */
+    @Ensures("result != null")
+    protected double[] getProbabilitiesAsArray() {
+        final double[] probs = new double[getStateList().size()];
+        int i = 0;
+        for ( final ActivityProfileState state : getStateList() )
+            probs[i++] = state.isActiveProb;
+        return probs;
+    }
+
+    /**
+     * Helper function that gets the genome loc for a site offset from relativeLoc, protecting ourselves from
+     * falling off the edge of the contig.
+     *
+     * @param relativeLoc the location offset is relative to
+     * @param offset the offset from relativeLoc where we'd like to create a GenomeLoc
+     * @return a genome loc with relativeLoc.start + offset, if this is on the contig, null otherwise
+     */
+    @Requires("relativeLoc != null")
+    protected GenomeLoc getLocForOffset(final GenomeLoc relativeLoc, final int offset) {
+        final int start = relativeLoc.getStart() + offset;
+        if ( start < 0 || start > getCurrentContigLength() ) {
+            return null;
+        } else {
+            return parser.createGenomeLoc(regionStartLoc.getContig(), regionStartLoc.getContigIndex(), start, start);
+        }
+    }
+
+    /**
+     * Get the length of the current contig
+     * @return the length in bp
+     */
+    @Requires("regionStartLoc != null")
+    @Ensures("result > 0")
+    private int getCurrentContigLength() {
+        return contigLength;
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // routines to add states to a profile
+    //
+    // --------------------------------------------------------------------------------
+
+    /**
+     * Add the next ActivityProfileState to this profile.
+     *
+     * Must be contiguous with the previously added result, or an IllegalArgumentException will be thrown
+     *
+     * @param state a well-formed ActivityProfileState result to incorporate into this profile
+     */
+    @Requires("state != null")
+    public void add(final ActivityProfileState state) {
+        final GenomeLoc loc = state.getLoc();
+
+        if ( regionStartLoc == null ) {
+            regionStartLoc = loc;
+            regionStopLoc = loc;
+            contigLength = parser.getContigInfo(regionStartLoc.getContig()).getSequenceLength();
+        } else {
+            if ( regionStopLoc.getStart() != loc.getStart() - 1 )
+                throw new IllegalArgumentException("Bad add call to ActivityProfile: loc " + loc + " not immediately after last loc " + regionStopLoc );
+            regionStopLoc = loc;
+        }
+
+        final Collection<ActivityProfileState> processedStates = processState(state);
+        for ( final ActivityProfileState processedState : processedStates ) {
+            incorporateSingleState(processedState);
+        }
+    }
+
+    /**
+     * Incorporate a single activity profile state into the current list of states
+     *
+     * If state's position occurs immediately after the last position in this profile, then
+     * the state is appended to the state list.  If it's within the existing states list,
+     * the prob of stateToAdd is added to its corresponding state in the list.  If the
+     * position would be before the start of this profile, stateToAdd is simply ignored.
+     *
+     * @param stateToAdd the state we want to add to the states list
+     */
+    @Requires("stateToAdd != null")
+    private void incorporateSingleState(final ActivityProfileState stateToAdd) {
+        final int position = stateToAdd.getOffset(regionStartLoc);
+
+        if ( position > size() )
+            // should we allow this?  probably not
+            throw new IllegalArgumentException("Must add state contiguous to existing states: adding " + stateToAdd);
+
+        if ( position >= 0 ) {
+            // ignore states starting before this region's start
+            if ( position < size() ) {
+                stateList.get(position).isActiveProb += stateToAdd.isActiveProb;
+            } else {
+                if ( position != size() ) throw new IllegalStateException("position == size but it wasn't");
+                stateList.add(stateToAdd);
+            }
+        }
+    }
+
+    /**
+     * Process justAddedState, returning a collection of derived states that actually be added to the stateList
+     *
+     * The purpose of this function is to transform justAddedStates, if needed, into a series of atomic states
+     * that we actually want to track.  For example, if state is for soft clips, we transform that single
+     * state into a list of states that surround the state up to the distance of the soft clip.
+     *
+     * Can be overridden by subclasses to transform states in any way
+     *
+     * There's no particular contract for the output states, except that they can never refer to states
+     * beyond the current end of the stateList unless the explicitly include preceding states before
+     * the reference.  So for example if the current state list is [1, 2, 3] this function could return
+     * [1,2,3,4,5] but not [1,2,3,5].
+     *
+     * @param justAddedState the state our client provided to use to add to the list
+     * @return a list of derived states that should actually be added to this profile's state list
+     */
+    protected Collection<ActivityProfileState> processState(final ActivityProfileState justAddedState) {
+        if ( justAddedState.resultState.equals(ActivityProfileState.Type.HIGH_QUALITY_SOFT_CLIPS) ) {
+            // special code to deal with the problem that high quality soft clipped bases aren't added to pileups
+            final List<ActivityProfileState> states = new LinkedList<ActivityProfileState>();
+            // add no more than the max prob propagation distance num HQ clips
+            final int numHQClips = Math.min(justAddedState.resultValue.intValue(), getMaxProbPropagationDistance());
+            for( int jjj = - numHQClips; jjj <= numHQClips; jjj++ ) {
+                final GenomeLoc loc = getLocForOffset(justAddedState.getLoc(), jjj);
+                if ( loc != null )
+                    states.add(new ActivityProfileState(loc, justAddedState.isActiveProb));
+            }
+
+            return states;
+        } else {
+            return Collections.singletonList(justAddedState);
+        }
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // routines to get active regions from the profile
+    //
+    // --------------------------------------------------------------------------------
+
+    /**
+     * Get the next completed active regions from this profile, and remove all states supporting them from this profile
+     *
+     * Takes the current profile and finds all of the active / inactive from the start of the profile that are
+     * ready.  By ready we mean unable to have their probability modified any longer by future additions to the
+     * profile.  The regions that are popped off the profile take their states with them, so the start of this
+     * profile will always be after the end of the last region returned here.
+     *
+     * The regions are returned sorted by genomic position.
+     *
+     * This function may not return anything in the list, if no regions are ready
+     *
+     * No returned region will be larger than maxRegionSize.
+     *
+     * @param activeRegionExtension the extension value to provide to the constructed regions
+     * @param minRegionSize the minimum region size, in the case where we have to cut up regions that are too large
+     * @param maxRegionSize the maximize size of the returned region
+     * @param forceConversion if true, we'll return a region whose end isn't sufficiently far from the end of the
+     *                        stateList.  Used to close out the active region when we've hit some kind of end (such
+     *                        as the end of the contig)
+     * @return a non-null list of active regions
+     */
+    @Ensures("result != null")
+    public List<ActiveRegion> popReadyActiveRegions(final int activeRegionExtension, final int minRegionSize, final int maxRegionSize, final boolean forceConversion) {
+        if ( activeRegionExtension < 0 ) throw new IllegalArgumentException("activeRegionExtension must be >= 0 but got " + activeRegionExtension);
+        if ( minRegionSize < 1 ) throw new IllegalArgumentException("minRegionSize must be >= 1 but got " + minRegionSize);
+        if ( maxRegionSize < 1 ) throw new IllegalArgumentException("maxRegionSize must be >= 1 but got " + maxRegionSize);
+
+        final LinkedList<ActiveRegion> regions = new LinkedList<ActiveRegion>();
+
+        while ( true ) {
+            final ActiveRegion nextRegion = popNextReadyActiveRegion(activeRegionExtension, minRegionSize, maxRegionSize, forceConversion);
+            if ( nextRegion == null )
+                return regions;
+            else {
+                if ( restrictToIntervals == null )
+                    regions.add(nextRegion);
+                else
+                    regions.addAll(nextRegion.splitAndTrimToIntervals(restrictToIntervals));
+            }
+        }
+    }
+
+    /**
+     * Helper function for popReadyActiveRegions that pops the first ready region off the front of this profile
+     *
+     * If a region is returned, modifies the state of this profile so that states used to make the region are
+     * no longer part of the profile.  Associated information (like the region start position) of this profile
+     * are also updated.
+     *
+     * @param activeRegionExtension the extension value to provide to the constructed regions
+     * @param minRegionSize the minimum region size, in the case where we have to cut up regions that are too large
+     * @param maxRegionSize the maximize size of the returned region
+     * @param forceConversion if true, we'll return a region whose end isn't sufficiently far from the end of the
+     *                        stateList.  Used to close out the active region when we've hit some kind of end (such
+     *                        as the end of the contig)
+     * @return a fully formed active region, or null if none can be made
+     */
+    private ActiveRegion popNextReadyActiveRegion(final int activeRegionExtension, final int minRegionSize, final int maxRegionSize, final boolean forceConversion) {
+        if ( stateList.isEmpty() )
+            return null;
+
+        // If we are flushing the activity profile we need to trim off the excess states so that we don't create regions outside of our current processing interval
+        if( forceConversion ) {
+            final List<ActivityProfileState> statesToTrimAway = new ArrayList<ActivityProfileState>(stateList.subList(getSpan().size(), stateList.size()));
+            stateList.removeAll(statesToTrimAway);
+        }
+
+        final ActivityProfileState first = stateList.get(0);
+        final boolean isActiveRegion = first.isActiveProb > activeProbThreshold;
+        final int offsetOfNextRegionEnd = findEndOfRegion(isActiveRegion, minRegionSize, maxRegionSize, forceConversion);
+        if ( offsetOfNextRegionEnd == -1 )
+            // couldn't find a valid ending offset, so we return null
+            return null;
+
+        // we need to create the active region, and clip out the states we're extracting from this profile
+        final List<ActivityProfileState> sub = stateList.subList(0, offsetOfNextRegionEnd + 1);
+        final List<ActivityProfileState> supportingStates = new ArrayList<ActivityProfileState>(sub);
+        sub.clear();
+
+        // update the start and stop locations as necessary
+        if ( stateList.isEmpty() ) {
+            regionStartLoc = regionStopLoc = null;
+        } else {
+            regionStartLoc = stateList.get(0).getLoc();
+        }
+        final GenomeLoc regionLoc = parser.createGenomeLoc(first.getLoc().getContig(), first.getLoc().getStart(), first.getLoc().getStart() + offsetOfNextRegionEnd);
+        return new ActiveRegion(regionLoc, supportingStates, isActiveRegion, parser, activeRegionExtension);
+    }
+
+    /**
+     * Find the end of the current region, returning the index into the element isActive element, or -1 if the region isn't done
+     *
+     * The current region is defined from the start of the stateList, looking for elements that have the same isActiveRegion
+     * flag (i.e., if isActiveRegion is true we are looking for states with isActiveProb > threshold, or alternatively
+     * for states < threshold).  The maximize size of the returned region is maxRegionSize.  If forceConversion is
+     * true, then we'll return the region end even if this isn't safely beyond the max prob propagation distance.
+     *
+     * Note that if isActiveRegion is true, and we can construct a active region > maxRegionSize in bp, we
+     * find the further local minimum within that max region, and cut the region there, under the constraint
+     * that the resulting region must be at least minRegionSize in bp.
+     *
+     * @param isActiveRegion is the region we're looking for an active region or inactive region?
+     * @param minRegionSize the minimum region size, in the case where we have to cut up regions that are too large
+     * @param maxRegionSize the maximize size of the returned region
+     * @param forceConversion if true, we'll return a region whose end isn't sufficiently far from the end of the
+     *                        stateList.  Used to close out the active region when we've hit some kind of end (such
+     *                        as the end of the contig)
+     * @return the index into stateList of the last element of this region, or -1 if it cannot be found
+     */
+    @Ensures({
+            "result >= -1",
+            "result == -1 || result < maxRegionSize",
+            "! (result == -1 && forceConversion)"})
+    private int findEndOfRegion(final boolean isActiveRegion, final int minRegionSize, final int maxRegionSize, final boolean forceConversion) {
+        if ( ! forceConversion && stateList.size() < maxRegionSize + getMaxProbPropagationDistance() ) {
+            // we really haven't finalized at the probability mass that might affect our decision, so keep
+            // waiting until we do before we try to make any decisions
+            return -1;
+        }
+
+        int endOfActiveRegion = findFirstActivityBoundary(isActiveRegion, maxRegionSize);
+
+        if ( isActiveRegion && endOfActiveRegion == maxRegionSize )
+            // we've run to the end of the region, let's find a good place to cut
+            endOfActiveRegion = findBestCutSite(endOfActiveRegion, minRegionSize);
+
+        // we're one past the end, so i must be decremented
+        return endOfActiveRegion - 1;
+    }
+
+    /**
+     * Find the the local minimum within 0 - endOfActiveRegion where we should divide region
+     *
+     * This algorithm finds the global minimum probability state within the region [minRegionSize, endOfActiveRegion)
+     * (exclusive of endOfActiveRegion), and returns the state index of that state.
+     * that it
+     *
+     * @param endOfActiveRegion the last state of the current active region (exclusive)
+     * @param minRegionSize the minimum of the left-most region, after cutting
+     * @return the index of state after the cut site (just like endOfActiveRegion)
+     */
+    @Requires({"endOfActiveRegion >= minRegionSize", "minRegionSize >= 0"})
+    @Ensures({"result >= minRegionSize", "result <= endOfActiveRegion"})
+    private int findBestCutSite(final int endOfActiveRegion, final int minRegionSize) {
+        int minI = endOfActiveRegion - 1;
+        double minP = Double.MAX_VALUE;
+
+        for ( int i = minI; i >= minRegionSize - 1; i-- ) {
+            double cur = getProb(i);
+            if ( cur < minP && isMinimum(i) ) {
+                minP = cur;
+                minI = i;
+            }
+        }
+
+        return minI + 1;
+    }
+
+    /**
+     * Find the first index into the state list where the state is considered ! isActiveRegion
+     *
+     * Note that each state has a probability of being active, and this function thresholds that
+     * value on activeProbThreshold, coloring each state as active or inactive.  Finds the
+     * largest contiguous stretch of states starting at the first state (index 0) with the same isActive
+     * state as isActiveRegion.  If the entire state list has the same isActive value, then returns
+     * maxRegionSize
+     *
+     * @param isActiveRegion are we looking for a stretch of active states, or inactive ones?
+     * @param maxRegionSize don't look for a boundary that would yield a region of size > maxRegionSize
+     * @return the index of the first state in the state list with isActive value != isActiveRegion, or maxRegionSize
+     *         if no such element exists
+     */
+    @Requires({"maxRegionSize > 0"})
+    @Ensures({"result >= 0", "result <= stateList.size()"})
+    private int findFirstActivityBoundary(final boolean isActiveRegion, final int maxRegionSize) {
+        final int nStates = stateList.size();
+        int endOfActiveRegion = 0;
+
+        while ( endOfActiveRegion < nStates && endOfActiveRegion < maxRegionSize ) {
+            if ( getProb(endOfActiveRegion) > activeProbThreshold != isActiveRegion ) {
+                break;
+            }
+            endOfActiveRegion++;
+        }
+
+        return endOfActiveRegion;
+    }
+
+    /**
+     * Helper function to get the probability of the state at offset index
+     * @param index a valid offset into the state list
+     * @return the isActiveProb of the state at index
+     */
+    @Requires({"index >= 0", "index < stateList.size()"})
+    private double getProb(final int index) {
+        return stateList.get(index).isActiveProb;
+    }
+
+    /**
+     * Is the probability at index in a local minimum?
+     *
+     * Checks that the probability at index is <= both the probabilities to either side.
+     * Returns false if index is at the end or the start of the state list.
+     *
+     * @param index the index of the state we want to test
+     * @return true if prob at state is a minimum, false otherwise
+     */
+    @Requires({"index >= 0", "index < stateList.size()"})
+    private boolean isMinimum(final int index) {
+        if ( index == stateList.size() - 1 )
+            // we cannot be at a minimum if the current position is the last in the state list
+            return false;
+        else if ( index < 1 )
+            // we cannot be at a minimum if the current position is the first or second
+            return false;
+        else {
+            final double indexP = getProb(index);
+            return indexP <= getProb(index+1) && indexP < getProb(index-1);
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/activeregion/ActivityProfileState.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/activeregion/ActivityProfileState.java
new file mode 100644
index 0000000..a1df0ba
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/activeregion/ActivityProfileState.java
@@ -0,0 +1,112 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.activeregion;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+
+/**
+ * The state of an active region walker's isActive call at a specific locus in the genome
+ *
+ * User: rpoplin
+ * Date: 7/27/12
+ */
+public class ActivityProfileState {
+    final private GenomeLoc loc;
+    public double isActiveProb;
+    public Type resultState;
+    public Number resultValue;
+
+    public enum Type {
+        NONE,
+        HIGH_QUALITY_SOFT_CLIPS
+    }
+
+    /**
+     * Create a new ActivityProfileState at loc with probability of being active of isActiveProb
+     *
+     * @param loc the position of the result profile (for debugging purposes)
+     * @param isActiveProb the probability of being active (between 0 and 1)
+     */
+    @Requires({"loc != null", "isActiveProb >= 0.0 && isActiveProb <= 1.0"})
+    public ActivityProfileState(final GenomeLoc loc, final double isActiveProb) {
+        this(loc, isActiveProb, Type.NONE, null);
+    }
+
+    /**
+     * Create a new ActivityProfileState at loc with probability of being active of isActiveProb that maintains some
+     * information about the result state and value
+     *
+     * The only state value in use is HIGH_QUALITY_SOFT_CLIPS, and here the value is interpreted as the number
+     * of bp affected by the soft clips.
+     *
+     * @param loc the position of the result profile (for debugging purposes)
+     * @param isActiveProb the probability of being active (between 0 and 1)
+     */
+    @Requires({"loc != null", "isActiveProb >= 0.0 && isActiveProb <= 1.0"})
+    public ActivityProfileState(final GenomeLoc loc, final double isActiveProb, final Type resultState, final Number resultValue) {
+        // make sure the location of that activity profile is 1
+        if ( loc.size() != 1 )
+            throw new IllegalArgumentException("Location for an ActivityProfileState must have to size 1 bp but saw " + loc);
+        if ( resultValue != null && resultValue.doubleValue() < 0 )
+            throw new IllegalArgumentException("Result value isn't null and its < 0, which is illegal: " + resultValue);
+
+        this.loc = loc;
+        this.isActiveProb = isActiveProb;
+        this.resultState = resultState;
+        this.resultValue = resultValue;
+    }
+
+    /**
+     * The offset of state w.r.t. our current region's start location
+     * @param regionStartLoc the start of the region, as a genome loc
+     * @return the position of this profile relative to the start of this region
+     */
+    public int getOffset(final GenomeLoc regionStartLoc) {
+        return getLoc().getStart() - regionStartLoc.getStart();
+    }
+
+
+    /**
+     * Get the genome loc associated with the ActivityProfileState
+     * @return the location of this result
+     */
+    @Ensures("result != null")
+    public GenomeLoc getLoc() {
+        return loc;
+    }
+
+    @Override
+    public String toString() {
+        return "ActivityProfileState{" +
+                "loc=" + loc +
+                ", isActiveProb=" + isActiveProb +
+                ", resultState=" + resultState +
+                ", resultValue=" + resultValue +
+                '}';
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/activeregion/BandPassActivityProfile.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/activeregion/BandPassActivityProfile.java
new file mode 100644
index 0000000..8458b3a
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/activeregion/BandPassActivityProfile.java
@@ -0,0 +1,194 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.activeregion;
+
+import com.google.java.contract.Ensures;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
+import org.broadinstitute.gatk.utils.MathUtils;
+
+import java.util.Collection;
+import java.util.LinkedList;
+
+/**
+ * A band pass filtering version of the activity profile
+ *
+ * Applies a band pass filter with a Gaussian kernel to the input state probabilities to smooth
+ * them out of an interval
+ *
+ * @author Mark DePristo
+ * @since 2011
+ */
+public class BandPassActivityProfile extends ActivityProfile {
+    public static final int MAX_FILTER_SIZE = 50;
+    private final static double MIN_PROB_TO_KEEP_IN_FILTER = 1e-5;
+    public static final double DEFAULT_SIGMA = 17.0;
+
+    private final int filterSize;
+    private final double sigma;
+    private final double[] GaussianKernel;
+
+    /**
+     * Create a new BandPassActivityProfile with default sigma and filter sizes
+     *
+     * @see #BandPassActivityProfile(org.broadinstitute.gatk.utils.GenomeLocParser, org.broadinstitute.gatk.utils.GenomeLocSortedSet, int, double, int, double, boolean)
+     */
+    public BandPassActivityProfile(final GenomeLocParser parser, final GenomeLocSortedSet restrictToIntervals,
+                                   final int maxProbPropagationDistance, final double activeProbThreshold) {
+        this(parser, restrictToIntervals, maxProbPropagationDistance, activeProbThreshold, MAX_FILTER_SIZE, DEFAULT_SIGMA);
+    }
+
+    /**
+     * @see #BandPassActivityProfile(org.broadinstitute.gatk.utils.GenomeLocParser, org.broadinstitute.gatk.utils.GenomeLocSortedSet, int, double, int, double, boolean)
+     *
+     * sets adaptiveFilterSize to true
+     */
+    public BandPassActivityProfile(final GenomeLocParser parser, final GenomeLocSortedSet restrictToIntervals,
+                                   final int maxProbPropagationDistance, final double activeProbThreshold,
+                                   final int maxFilterSize, final double sigma) {
+        this(parser, restrictToIntervals, maxProbPropagationDistance, activeProbThreshold, maxFilterSize, sigma, true);
+    }
+
+    /**
+     * Create an activity profile that implements a band pass filter on the states
+     *
+     * @param parser our genome loc parser
+     * @param restrictToIntervals only include states that are within these intervals, if not null
+     * @param maxProbPropagationDistance region probability propagation distance beyond it's maximum size
+     * @param activeProbThreshold  threshold for the probability of a profile state being active
+     * @param maxFilterSize the maximum size of the band pass filter we are allowed to create, regardless of sigma
+     * @param sigma the variance of the Gaussian kernel for this band pass filter
+     * @param adaptiveFilterSize if true, use the kernel itself to determine the best filter size
+     */
+    public BandPassActivityProfile(final GenomeLocParser parser, final GenomeLocSortedSet restrictToIntervals, final int maxProbPropagationDistance,
+                                   final double activeProbThreshold, final int maxFilterSize, final double sigma, final boolean adaptiveFilterSize) {
+        super(parser, maxProbPropagationDistance, activeProbThreshold, restrictToIntervals);
+
+        if ( sigma < 0 ) throw new IllegalArgumentException("Sigma must be greater than or equal to 0 but got " + sigma);
+
+        // setup the Gaussian kernel for the band pass filter
+        this.sigma = sigma;
+        final double[] fullKernel = makeKernel(maxFilterSize, sigma);
+        this.filterSize = adaptiveFilterSize ? determineFilterSize(fullKernel, MIN_PROB_TO_KEEP_IN_FILTER) : maxFilterSize;
+        this.GaussianKernel = makeKernel(this.filterSize, sigma);
+    }
+
+    protected static int determineFilterSize(final double[] kernel, final double minProbToKeepInFilter) {
+        final int middle = (kernel.length - 1) / 2;
+        int filterEnd = middle;
+        while ( filterEnd > 0 ) {
+            if ( kernel[filterEnd - 1] < minProbToKeepInFilter ) {
+                break;
+            }
+            filterEnd--;
+        }
+        return middle - filterEnd;
+    }
+
+    protected static double[] makeKernel(final int filterSize, final double sigma) {
+        final int bandSize = 2 * filterSize + 1;
+        final double[] kernel = new double[bandSize];
+        for( int iii = 0; iii < bandSize; iii++ ) {
+            kernel[iii] = MathUtils.normalDistribution(filterSize, sigma, iii);
+        }
+        return MathUtils.normalizeFromRealSpace(kernel);
+    }
+
+    /**
+     * Our maximize propagation distance is whatever our parent's is, plus our filter size
+     *
+     * Stops the profile from interpreting sites that aren't yet fully determined due to
+     * propagation of the probabilities.
+     *
+     * @return the distance in bp we might move our probabilities around for some site i
+     */
+    @Override
+    public int getMaxProbPropagationDistance() {
+        return super.getMaxProbPropagationDistance() + filterSize;
+    }
+
+    /**
+     * Get the size (in bp) of the band pass filter
+     * @return a positive integer
+     */
+    @Ensures("result >= 1")
+    public int getBandSize() {
+        return 2 * filterSize + 1;
+    }
+
+    /**
+     * Get the filter size (which is the size of each wing of the band, minus the center point)
+     * @return a positive integer
+     */
+    @Ensures("result >= 0")
+    public int getFilteredSize() {
+        return filterSize;
+    }
+
+    /**
+     * Get the Gaussian kernel sigma value
+     * @return a positive double
+     */
+    @Ensures("result >= 0")
+    public double getSigma() {
+        return sigma;
+    }
+
+    /**
+     * Get the kernel of this band pass filter.  Do not modify returned result
+     * @return the kernel used in this band pass filter
+     */
+    @Ensures({"result != null", "result.length == getBandSize()"})
+    protected double[] getKernel() {
+        return GaussianKernel;
+    }
+
+    /**
+     * Band pass the probabilities in the ActivityProfile, producing a new profile that's band pass filtered
+     * @return a new double[] that's the band-pass filtered version of this profile
+     */
+    @Override
+    protected Collection<ActivityProfileState> processState(final ActivityProfileState justAddedState) {
+        final Collection<ActivityProfileState> states = new LinkedList<ActivityProfileState>();
+
+        for ( final ActivityProfileState superState : super.processState(justAddedState) ) {
+            if ( superState.isActiveProb > 0.0 ) {
+                for( int jjj = -filterSize; jjj <= filterSize; jjj++ ) {
+                    final GenomeLoc loc = getLocForOffset(justAddedState.getLoc(), jjj);
+                    if ( loc != null ) {
+                        final double newProb = superState.isActiveProb * GaussianKernel[jjj + filterSize];
+                        states.add(new ActivityProfileState(loc, newProb));
+                    }
+                }
+            } else {
+                states.add(justAddedState);
+            }
+        }
+
+        return states;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/analysis/AminoAcid.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/analysis/AminoAcid.java
new file mode 100644
index 0000000..af61415
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/analysis/AminoAcid.java
@@ -0,0 +1,114 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.analysis;
+
+/*
+ * Copyright (c) 2010 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following
+ * conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+/**
+ * @author chartl
+ * @since June 28, 2010
+ */
+
+public enum AminoAcid {
+    
+    Alanine("Alanine","Ala","A",new String[]{"GCA","GCC","GCG","GCT"}),
+    Arganine("Arganine","Arg","R",new String[]{"AGA","AGG","CGA","CGC","CGG","CGT"}),
+    Asparagine("Asparagine","Asn","N",new String[]{"AAC","AAT"}),
+    Aspartic_acid("Aspartic acid","Asp","D",new String[]{"GAT","GAC"}),
+    Cysteine("Cysteine","Cys","C",new String[]{"TGC","TGC"}),
+    Glutamic_acid("Glutamic acid","Glu","E",new String[]{"GAA","GAG"}),
+    Glutamine("Glutamine","Gln","Q",new String[]{"CAA","CAG"}),
+    Glycine("Glycine","Gly","G",new String[]{"GGA","GGC","GGG","GGT"}),
+    Histidine("Histidine","His","H",new String[]{"CAC","CAT"}),
+    Isoleucine("Isoleucine","Ile","I",new String[]{"ATA","ATC","ATT"}),
+    Leucine("Leucine","Leu","L",new String[]{"CTA","CTC","CTG","CTT","TTA","TTG"}),
+    Lysine("Lysine","Lys","K", new String[]{"AAA","AAG"}),
+    Methionine("Methionine","Met","M",new String[]{"ATG"}),
+    Phenylalanine("Phenylalanine","Phe","F",new String[]{"TTC","TTT"}),
+    Proline("Proline","Pro","P",new String[]{"CCA","CCC","CCG","CCT"}),
+    Serine("Serine","Ser","S",new String[]{"AGC","AGT","TCA","TCC","TCG","TCT"}),
+    Stop_codon("Stop codon","Stop","*",new String[]{"TAA","TAG","TGA"}),
+    Threonine("Threonine","Thr","T",new String[]{"ACA","ACC","ACG","ACT"}),
+    Tryptophan("Tryptophan","Trp","W",new String[]{"TGG"}),
+    Tyrosine("Tyrosine","Tyr","Y",new String[]{"TAC","TAT"}),
+    Valine("Valine","Val","V",new String[]{"GTA","GTC","GTG","GTT"});
+
+    String[] codons;
+    String fullName;
+    String code;
+    String letter;
+
+    AminoAcid(String name, String shortName, String abbrev, String[] myCodons) {
+        codons = myCodons;
+        fullName = name;
+        code = shortName;
+        letter = abbrev;
+    }
+
+    public String getName() {
+        return fullName;
+    }
+
+    public String getLetter() {
+        return letter;
+    }
+
+    public String getCode() {
+        return code;
+    }
+
+    public boolean isStop() {
+        return this == Stop_codon;
+    }
+
+    public String toString() {
+        return getName();
+    }
+    
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/analysis/AminoAcidTable.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/analysis/AminoAcidTable.java
new file mode 100644
index 0000000..e8bd5d1
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/analysis/AminoAcidTable.java
@@ -0,0 +1,94 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.analysis;
+
+import java.util.HashMap;
+
+/*
+ * Copyright (c) 2010 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following
+ * conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+/**
+ * @author chartl
+ * @since June 28, 2010
+ */
+
+public class AminoAcidTable {
+    public HashMap<String,AminoAcid> tableByCodon = new HashMap<String,AminoAcid>(21);
+    public HashMap<String,AminoAcid> tableByCode = new HashMap<String,AminoAcid>(21);
+    public AminoAcidTable() {
+        for ( AminoAcid acid : AminoAcid.values() ) {
+            tableByCode.put(acid.getCode(),acid);
+            for ( String codon : acid.codons ) {
+                tableByCodon.put(codon,acid);
+            }
+        }
+    }
+
+    // todo -- these functions are for the genomic annotator and are named too generally -- they are
+    // todo -- actually accessors by codon; thus should be more specific.
+    public AminoAcid getEukaryoticAA(String codon) {
+        return tableByCodon.get(codon.toUpperCase());
+    }
+
+    public AminoAcid getMitochondrialAA(String codon, boolean isFirst) {
+        String upperCodon = codon.toUpperCase();
+        if ( isFirst && upperCodon.equals("ATT") || upperCodon.equals("ATA") ) {
+            return AminoAcid.Methionine;
+        } else if ( upperCodon.equals("AGA") || upperCodon.equals("AGG") ) {
+            return AminoAcid.Stop_codon;
+        } else if ( upperCodon.equals("TGA") ) {
+            return AminoAcid.Tryptophan;
+        } else {
+            return tableByCodon.get(upperCodon);
+        }
+    }
+
+    public AminoAcid getAminoAcidByCode(String code) {
+        return tableByCode.get(code);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/analysis/AminoAcidUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/analysis/AminoAcidUtils.java
new file mode 100644
index 0000000..175ea36
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/analysis/AminoAcidUtils.java
@@ -0,0 +1,77 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.analysis;
+
+/*
+ * Copyright (c) 2010 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following
+ * conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+/**
+ * @author chartl
+ * @since June 28, 2010
+ */
+
+public class AminoAcidUtils {
+
+    public static String[] getAminoAcidNames() {
+        String[] names = new String[AminoAcid.values().length];
+        for ( AminoAcid acid : AminoAcid.values() ) {
+            names[acid.ordinal()] = acid.getName();
+        }
+
+        return names;
+    }
+
+    public static String[] getAminoAcidCodes() {
+        String[] codes = new String[AminoAcid.values().length];
+        for ( AminoAcid acid : AminoAcid.values() ) {
+            codes[acid.ordinal()] = acid.getCode();
+        }
+
+        return codes;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/baq/BAQ.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/baq/BAQ.java
new file mode 100644
index 0000000..91562b6
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/baq/BAQ.java
@@ -0,0 +1,713 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.baq;
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import htsjdk.samtools.reference.ReferenceSequence;
+import htsjdk.samtools.CigarElement;
+import htsjdk.samtools.CigarOperator;
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.SAMUtils;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.collections.Pair;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.sam.ReadUtils;
+
+/*
+  The topology of the profile HMM:
+
+           /\             /\        /\             /\
+           I[1]           I[k-1]    I[k]           I[L]
+            ^   \      \    ^    \   ^   \      \   ^
+            |    \      \   |     \  |    \      \  |
+    M[0]   M[1] -> ... -> M[k-1] -> M[k] -> ... -> M[L]   M[L+1]
+                \      \/        \/      \/      /
+                 \     /\        /\      /\     /
+                       -> D[k-1] -> D[k] ->
+
+   M[0] points to every {M,I}[k] and every {M,I}[k] points M[L+1].
+
+   On input, _ref is the reference sequence and _query is the query
+   sequence. Both are sequences of 0/1/2/3/4 where 4 stands for an
+   ambiguous residue. iqual is the base quality. c sets the gap open
+   probability, gap extension probability and band width.
+
+   On output, state and q are arrays of length l_query. The higher 30
+   bits give the reference position the query base is matched to and the
+   lower two bits can be 0 (an alignment match) or 1 (an
+   insertion). q[i] gives the phred scaled posterior probability of
+   state[i] being wrong.
+ */
+public class BAQ {
+    private final static Logger logger = Logger.getLogger(BAQ.class);
+    private final static boolean DEBUG = false;
+
+    public enum CalculationMode {
+        OFF,                        // don't apply BAQ at all, the default
+        CALCULATE_AS_NECESSARY,     // do HMM BAQ calculation on the fly, as necessary, if there's no tag
+        RECALCULATE                 // do HMM BAQ calculation on the fly, regardless of whether there's a tag present
+    }
+
+    /** these are features that only the walker can override */
+    public enum QualityMode {
+        ADD_TAG,                    // calculate the BAQ, but write it into the reads as the BAQ tag, leaving QUAL field alone
+        OVERWRITE_QUALS,            // overwrite the quality field directly
+        DONT_MODIFY                 // do the BAQ, but don't modify the quality scores themselves, just return them in the function.
+    }
+
+    public static final String BAQ_TAG = "BQ";
+
+    private static double[] qual2prob = new double[256];
+    static {
+        for (int i = 0; i < 256; ++i)
+            qual2prob[i] = Math.pow(10, -i/10.);
+    }
+
+    // Phred scaled now (changed 1/10/2011)
+    public static final double DEFAULT_GOP = 40;
+
+    /*  Takes a Phred Scale quality score and returns the error probability.
+     *
+     *  Quick conversion function to maintain internal structure of BAQ calculation on
+     *  probability scale, but take the user entered parameter in phred-scale.
+     *
+     *  @param x phred scaled score
+     *  @return probability of incorrect base call
+     */
+    private double convertFromPhredScale(double x) { return (Math.pow(10,(-x)/10.));}
+
+    public double cd = -1;      // gap open probability [1e-3]
+    private double ce = 0.1;    // gap extension probability [0.1]
+	private int cb = 7;         // band width [7]
+    private boolean includeClippedBases = false;
+
+    public byte getMinBaseQual() {
+        return minBaseQual;
+    }
+
+    /**
+     * Any bases with Q < MIN_BASE_QUAL are raised up to this base quality
+     */
+    private byte minBaseQual = 4;
+
+    public double getGapOpenProb() {
+        return cd;
+    }
+
+    public double getGapExtensionProb() {
+        return ce;
+    }
+
+    public int getBandWidth() {
+        return cb;
+    }
+
+    /**
+     * Use defaults for everything
+     */
+    public BAQ() {
+        this(DEFAULT_GOP);
+    }
+
+    /**
+     * Use defaults for everything
+     */
+    public BAQ(final double gapOpenPenalty) {
+        cd = convertFromPhredScale(gapOpenPenalty);
+        initializeCachedData();
+    }
+
+
+
+    /**
+     * Create a new HmmGlocal object with specified parameters
+     *
+     * @param d gap open prob (not phred scaled!).
+     * @param e gap extension prob.
+     * @param b band width
+     * @param minBaseQual All bases with Q < minBaseQual are up'd to this value
+     */
+	public BAQ(final double d, final double e, final int b, final byte minBaseQual, boolean includeClippedBases) {
+		cd = d; ce = e; cb = b;
+        this.minBaseQual = minBaseQual;
+        this.includeClippedBases = includeClippedBases;
+        initializeCachedData();
+	}
+
+    private final static double EM = 0.33333333333;
+    private final static double EI = 0.25;
+
+    private double[][][] EPSILONS = new double[256][256][SAMUtils.MAX_PHRED_SCORE+1];
+
+    private void initializeCachedData() {
+        for ( int i = 0; i < 256; i++ )
+            for ( int j = 0; j < 256; j++ )
+                for ( int q = 0; q <= SAMUtils.MAX_PHRED_SCORE; q++ ) {
+                    EPSILONS[i][j][q] = 1.0;
+                }
+
+        for ( char b1 : "ACGTacgt".toCharArray() ) {
+            for ( char b2 : "ACGTacgt".toCharArray() ) {
+                for ( int q = 0; q <= SAMUtils.MAX_PHRED_SCORE; q++ ) {
+                    double qual = qual2prob[q < minBaseQual ? minBaseQual : q];
+                    double e = Character.toLowerCase(b1) == Character.toLowerCase(b2) ? 1 - qual : qual * EM;
+                    EPSILONS[(byte)b1][(byte)b2][q] = e;
+                }
+            }
+        }
+    }
+
+    protected double calcEpsilon( byte ref, byte read, byte qualB ) {
+        return EPSILONS[ref][read][qualB];
+    }
+
+    // ####################################################################################################
+    //
+    // NOTE -- THIS CODE IS SYNCHRONIZED WITH CODE IN THE SAMTOOLS REPOSITORY.  CHANGES TO THIS CODE SHOULD BE
+    // NOTE -- PUSHED BACK TO HENG LI
+    //
+    // ####################################################################################################
+    public int hmm_glocal(final byte[] ref, final byte[] query, int qstart, int l_query, final byte[] _iqual, int[] state, byte[] q) {
+        if ( ref == null ) throw new ReviewedGATKException("BUG: ref sequence is null");
+        if ( query == null ) throw new ReviewedGATKException("BUG: query sequence is null");
+        if ( _iqual == null ) throw new ReviewedGATKException("BUG: query quality vector is null");
+        if ( query.length != _iqual.length ) throw new ReviewedGATKException("BUG: read sequence length != qual length");
+        if ( l_query < 1 ) throw new ReviewedGATKException("BUG: length of query sequence < 0: " + l_query);
+        if ( qstart < 0 ) throw new ReviewedGATKException("BUG: query sequence start < 0: " + qstart);
+
+        //if ( q != null && q.length != state.length ) throw new ReviewedGATKException("BUG: BAQ quality length != read sequence length");
+        //if ( state != null && state.length != l_query ) throw new ReviewedGATKException("BUG: state length != read sequence length");
+
+		int i, k;
+
+        /*** initialization ***/
+		// change coordinates
+		final int l_ref = ref.length;
+
+		// set band width
+		int bw2, bw = l_ref > l_query? l_ref : l_query;
+        if (cb < Math.abs(l_ref - l_query)) {
+            bw = Math.abs(l_ref - l_query) + 3;
+            //System.out.printf("SC  cb=%d, bw=%d%n", cb, bw);
+        }
+        if (bw > cb) bw = cb;
+		if (bw < Math.abs(l_ref - l_query)) {
+            //int bwOld = bw;
+            bw = Math.abs(l_ref - l_query);
+            //System.out.printf("old bw is %d, new is %d%n", bwOld, bw);
+        }
+        //System.out.printf("c->bw = %d, bw = %d, l_ref = %d, l_query = %d\n", cb, bw, l_ref, l_query);
+		bw2 = bw * 2 + 1;
+
+        // allocate the forward and backward matrices f[][] and b[][] and the scaling array s[]
+		double[][] f = new double[l_query+1][bw2*3 + 6];
+		double[][] b = new double[l_query+1][bw2*3 + 6];
+		double[] s = new double[l_query+2];
+
+		// initialize transition probabilities
+		double sM, sI, bM, bI;
+		sM = sI = 1. / (2 * l_query + 2);
+        bM = (1 - cd) / l_ref; bI = cd / l_ref; // (bM+bI)*l_ref==1
+
+		double[] m = new double[9];
+		m[0*3+0] = (1 - cd - cd) * (1 - sM); m[0*3+1] = m[0*3+2] = cd * (1 - sM);
+		m[1*3+0] = (1 - ce) * (1 - sI); m[1*3+1] = ce * (1 - sI); m[1*3+2] = 0.;
+		m[2*3+0] = 1 - ce; m[2*3+1] = 0.; m[2*3+2] = ce;
+
+
+		/*** forward ***/
+		// f[0]
+		f[0][set_u(bw, 0, 0)] = s[0] = 1.;
+		{ // f[1]
+			double[] fi = f[1];
+			double sum;
+			int beg = 1, end = l_ref < bw + 1? l_ref : bw + 1, _beg, _end;
+			for (k = beg, sum = 0.; k <= end; ++k) {
+				int u;
+                double e = calcEpsilon(ref[k-1], query[qstart], _iqual[qstart]);
+				u = set_u(bw, 1, k);
+				fi[u+0] = e * bM; fi[u+1] = EI * bI;
+				sum += fi[u] + fi[u+1];
+			}
+			// rescale
+			s[1] = sum;
+			_beg = set_u(bw, 1, beg); _end = set_u(bw, 1, end); _end += 2;
+			for (k = _beg; k <= _end; ++k) fi[k] /= sum;
+		}
+
+		// f[2..l_query]
+		for (i = 2; i <= l_query; ++i) {
+			double[] fi = f[i], fi1 = f[i-1];
+			double sum;
+			int beg = 1, end = l_ref, x, _beg, _end;
+			byte qyi = query[qstart+i-1];
+			x = i - bw; beg = beg > x? beg : x; // band start
+			x = i + bw; end = end < x? end : x; // band end
+			for (k = beg, sum = 0.; k <= end; ++k) {
+				int u, v11, v01, v10;
+                double e = calcEpsilon(ref[k-1], qyi, _iqual[qstart+i-1]);
+				u = set_u(bw, i, k); v11 = set_u(bw, i-1, k-1); v10 = set_u(bw, i-1, k); v01 = set_u(bw, i, k-1);
+				fi[u+0] = e * (m[0] * fi1[v11+0] + m[3] * fi1[v11+1] + m[6] * fi1[v11+2]);
+				fi[u+1] = EI * (m[1] * fi1[v10+0] + m[4] * fi1[v10+1]);
+				fi[u+2] = m[2] * fi[v01+0] + m[8] * fi[v01+2];
+				sum += fi[u] + fi[u+1] + fi[u+2];
+				//System.out.println("("+i+","+k+";"+u+"): "+fi[u]+","+fi[u+1]+","+fi[u+2]);
+			}
+			// rescale
+			s[i] = sum;
+			_beg = set_u(bw, i, beg); _end = set_u(bw, i, end); _end += 2;
+			for (k = _beg, sum = 1./sum; k <= _end; ++k) fi[k] *= sum;
+		}
+		{ // f[l_query+1]
+			double sum;
+			for (k = 1, sum = 0.; k <= l_ref; ++k) {
+				int u = set_u(bw, l_query, k);
+				if (u < 3 || u >= bw2*3+3) continue;
+				sum += f[l_query][u+0] * sM + f[l_query][u+1] * sI;
+			}
+			s[l_query+1] = sum; // the last scaling factor
+		}
+
+		/*** backward ***/
+		// b[l_query] (b[l_query+1][0]=1 and thus \tilde{b}[][]=1/s[l_query+1]; this is where s[l_query+1] comes from)
+		for (k = 1; k <= l_ref; ++k) {
+			int u = set_u(bw, l_query, k);
+			double[] bi = b[l_query];
+			if (u < 3 || u >= bw2*3+3) continue;
+			bi[u+0] = sM / s[l_query] / s[l_query+1]; bi[u+1] = sI / s[l_query] / s[l_query+1];
+		}
+		// b[l_query-1..1]
+		for (i = l_query - 1; i >= 1; --i) {
+			int beg = 1, end = l_ref, x, _beg, _end;
+			double[] bi = b[i], bi1 = b[i+1];
+			double y = (i > 1)? 1. : 0.;
+			byte qyi1 = query[qstart+i];
+			x = i - bw; beg = beg > x? beg : x;
+			x = i + bw; end = end < x? end : x;
+			for (k = end; k >= beg; --k) {
+				int u, v11, v01, v10;
+				u = set_u(bw, i, k); v11 = set_u(bw, i+1, k+1); v10 = set_u(bw, i+1, k); v01 = set_u(bw, i, k+1);
+                final double e = (k >= l_ref? 0 : calcEpsilon(ref[k], qyi1, _iqual[qstart+i])) * bi1[v11];
+                bi[u+0] = e * m[0] + EI * m[1] * bi1[v10+1] + m[2] * bi[v01+2]; // bi1[v11] has been folded into e.
+				bi[u+1] = e * m[3] + EI * m[4] * bi1[v10+1];
+				bi[u+2] = (e * m[6] + m[8] * bi[v01+2]) * y;
+			}
+			// rescale
+			_beg = set_u(bw, i, beg); _end = set_u(bw, i, end); _end += 2;
+			for (k = _beg, y = 1./s[i]; k <= _end; ++k) bi[k] *= y;
+		}
+
+ 		double pb;
+		{ // b[0]
+			int beg = 1, end = l_ref < bw + 1? l_ref : bw + 1;
+			double sum = 0.;
+			for (k = end; k >= beg; --k) {
+				int u = set_u(bw, 1, k);
+                double e = calcEpsilon(ref[k-1], query[qstart], _iqual[qstart]);
+                if (u < 3 || u >= bw2*3+3) continue;
+				sum += e * b[1][u+0] * bM + EI * b[1][u+1] * bI;
+			}
+			pb = b[0][set_u(bw, 0, 0)] = sum / s[0]; // if everything works as is expected, pb == 1.0
+		}
+
+        
+		/*** MAP ***/
+		for (i = 1; i <= l_query; ++i) {
+			double sum = 0., max = 0.;
+			final double[] fi = f[i], bi = b[i];
+			int beg = 1, end = l_ref, x, max_k = -1;
+			x = i - bw; beg = beg > x? beg : x;
+			x = i + bw; end = end < x? end : x;
+			for (k = beg; k <= end; ++k) {
+				final int u = set_u(bw, i, k);
+				double z;
+				sum += (z = fi[u+0] * bi[u+0]); if (z > max) { max = z; max_k = (k-1)<<2 | 0; }
+				sum += (z = fi[u+1] * bi[u+1]); if (z > max) { max = z; max_k = (k-1)<<2 | 1; }
+			}
+			max /= sum; sum *= s[i]; // if everything works as is expected, sum == 1.0
+			if (state != null) state[qstart+i-1] = max_k;
+			if (q != null) {
+				k = (int)(-4.343 * Math.log(1. - max) + .499); // = 10*log10(1-max)
+				q[qstart+i-1] = (byte)(k > 100? 99 : (k < minBaseQual ? minBaseQual : k));
+			}
+			//System.out.println("("+pb+","+sum+")"+" ("+(i-1)+","+(max_k>>2)+","+(max_k&3)+","+max+")");
+		}
+
+		return 0;
+	}
+
+    // ---------------------------------------------------------------------------------------------------------------
+    //
+    // Helper routines
+    //
+    // ---------------------------------------------------------------------------------------------------------------
+
+    /** decode the bit encoded state array values */
+    public static boolean stateIsIndel(int state) {
+        return (state & 3) != 0;
+    }
+
+    /** decode the bit encoded state array values */
+    public static int stateAlignedPosition(int state) {
+        return state >> 2;
+    }
+
+    /**
+     * helper routine for hmm_glocal
+     *
+     * @param b
+     * @param i
+     * @param k
+     * @return
+     */
+    private static int set_u(final int b, final int i, final int k) {
+		int x = i - b;
+		x = x > 0 ? x : 0;
+		return (k + 1 - x) * 3;
+	}
+
+    // ---------------------------------------------------------------------------------------------------------------
+    //
+    // Actually working with the BAQ tag now
+    //
+    // ---------------------------------------------------------------------------------------------------------------
+    
+    /**
+     * Get the BAQ attribute from the tag in read.  Returns null if no BAQ tag is present.
+     * @param read
+     * @return
+     */
+    public static byte[] getBAQTag(SAMRecord read) {
+        String s = read.getStringAttribute(BAQ_TAG);
+        return s != null ? s.getBytes() : null;
+    }
+
+    public static String encodeBQTag(SAMRecord read, byte[] baq) {
+        // Offset to base alignment quality (BAQ), of the same length as the read sequence.
+        // At the i-th read base, BAQi = Qi - (BQi - 64) where Qi is the i-th base quality.
+        // so BQi = Qi - BAQi + 64
+        byte[] bqTag = new byte[baq.length];
+        for ( int i = 0; i < bqTag.length; i++) {
+            final int bq = (int)read.getBaseQualities()[i] + 64;
+            final int baq_i = (int)baq[i];
+            final int tag = bq - baq_i;
+            // problem with the calculation of the correction factor; this is our problem
+            if ( tag < 0 )
+                throw new ReviewedGATKException("BAQ tag calculation error.  BAQ value above base quality at " + read);
+            // the original quality is too high, almost certainly due to using the wrong encoding in the BAM file
+            if ( tag > Byte.MAX_VALUE )
+                throw new UserException.MisencodedBAM(read, "we encountered an extremely high quality score (" + (int)read.getBaseQualities()[i] + ") with BAQ correction factor of " + baq_i);
+            bqTag[i] = (byte)tag;
+        }
+        return new String(bqTag);
+    }
+
+    public static void addBAQTag(SAMRecord read, byte[] baq) {
+        read.setAttribute(BAQ_TAG, encodeBQTag(read, baq));
+    }
+
+
+    /**
+      * Returns true if the read has a BAQ tag, or false otherwise
+      * @param read
+      * @return
+      */
+    public static boolean hasBAQTag(SAMRecord read) {
+        return read.getStringAttribute(BAQ_TAG) != null;
+    }
+
+    /**
+     * Returns a new qual array for read that includes the BAQ adjustment.  Does not support on-the-fly BAQ calculation
+     *
+     * @param read the SAMRecord to operate on
+     * @param overwriteOriginalQuals If true, we replace the original qualities scores in the read with their BAQ'd version
+     * @param useRawQualsIfNoBAQTag If useRawQualsIfNoBAQTag is true, then if there's no BAQ annotation we just use the raw quality scores.  Throws IllegalStateException is false and no BAQ tag is present
+     * @return
+     */
+    public static byte[] calcBAQFromTag(SAMRecord read, boolean overwriteOriginalQuals, boolean useRawQualsIfNoBAQTag) {
+        byte[] rawQuals = read.getBaseQualities();
+        byte[] newQuals = rawQuals;
+        byte[] baq = getBAQTag(read);
+
+        if ( baq != null ) {
+            // Offset to base alignment quality (BAQ), of the same length as the read sequence.
+            // At the i-th read base, BAQi = Qi - (BQi - 64) where Qi is the i-th base quality.
+            newQuals = overwriteOriginalQuals ? rawQuals : new byte[rawQuals.length];
+            for ( int i = 0; i < rawQuals.length; i++) {
+                int rawQual = (int)rawQuals[i];
+                int baq_delta = (int)baq[i] - 64;
+                int newval =  rawQual - baq_delta;
+                if ( newval < 0 )
+                    throw new UserException.MalformedBAM(read, "BAQ tag error: the BAQ value is larger than the base quality");
+                newQuals[i] = (byte)newval;
+            }
+        } else if ( ! useRawQualsIfNoBAQTag ) {
+            throw new IllegalStateException("Required BAQ tag to be present, but none was on read " + read.getReadName());
+        }
+
+        return newQuals;
+    }
+
+    /**
+     * Returns the BAQ adjusted quality score for this read at this offset.  Does not support on-the-fly BAQ calculation
+     *
+     * @param read the SAMRecord to operate on
+     * @param offset the offset of operate on
+     * @param useRawQualsIfNoBAQTag If useRawQualsIfNoBAQTag is true, then if there's no BAQ annotation we just use the raw quality scores.  Throws IllegalStateException is false and no BAQ tag is present
+     * @return
+     */
+    public static byte calcBAQFromTag(SAMRecord read, int offset, boolean useRawQualsIfNoBAQTag) {
+        byte rawQual = read.getBaseQualities()[offset];
+        byte newQual = rawQual;
+        byte[] baq = getBAQTag(read);
+
+        if ( baq != null ) {
+            // Offset to base alignment quality (BAQ), of the same length as the read sequence.
+            // At the i-th read base, BAQi = Qi - (BQi - 64) where Qi is the i-th base quality.
+            int baq_delta = (int)baq[offset] - 64;
+            int newval =  rawQual - baq_delta;
+            if ( newval < 0 )
+                throw new UserException.MalformedBAM(read, "BAQ tag error: the BAQ value is larger than the base quality");
+            newQual = (byte)newval;
+        
+        } else if ( ! useRawQualsIfNoBAQTag ) {
+            throw new IllegalStateException("Required BAQ tag to be present, but none was on read " + read.getReadName());
+        }
+
+        return newQual;
+    }
+
+    public static class BAQCalculationResult {
+        public byte[] refBases, rawQuals, readBases, bq;
+        public int[] state;
+
+        public BAQCalculationResult(SAMRecord read, byte[] ref) {
+            this(read.getBaseQualities(), read.getReadBases(), ref);
+        }
+
+        public BAQCalculationResult(byte[] bases, byte[] quals, byte[] ref) {
+            // prepares data for calculation
+            rawQuals = quals;
+            readBases = bases;
+
+            // now actually prepare the data structures, and fire up the hmm
+            bq = new byte[rawQuals.length];
+            state = new int[rawQuals.length];
+            this.refBases = ref;
+        }
+    }
+
+     public BAQCalculationResult calcBAQFromHMM(SAMRecord read, IndexedFastaSequenceFile refReader) {
+        // start is alignment start - band width / 2 - size of first I element, if there is one.  Stop is similar
+        int offset = getBandWidth() / 2;
+        long readStart = includeClippedBases ? read.getUnclippedStart() : read.getAlignmentStart();
+        long start = Math.max(readStart - offset - ReadUtils.getFirstInsertionOffset(read), 1);
+        long stop = (includeClippedBases ? read.getUnclippedEnd() : read.getAlignmentEnd()) + offset + ReadUtils.getLastInsertionOffset(read);
+
+        if ( stop > refReader.getSequenceDictionary().getSequence(read.getReferenceName()).getSequenceLength() ) {
+            return null;
+        } else {
+            // now that we have the start and stop, get the reference sequence covering it
+            ReferenceSequence refSeq = refReader.getSubsequenceAt(read.getReferenceName(), start, stop);
+            return calcBAQFromHMM(read, refSeq.getBases(), (int)(start - readStart));
+        }
+    }
+
+//    final SimpleTimer total = new SimpleTimer();
+//    final SimpleTimer local = new SimpleTimer();
+//    int n = 0;
+    public BAQCalculationResult calcBAQFromHMM(byte[] ref, byte[] query, byte[] quals, int queryStart, int queryEnd ) {
+//        total.restart();
+        if ( queryStart < 0 ) throw new ReviewedGATKException("BUG: queryStart < 0: " + queryStart);
+        if ( queryEnd < 0 ) throw new ReviewedGATKException("BUG: queryEnd < 0: " + queryEnd);
+        if ( queryEnd < queryStart ) throw new ReviewedGATKException("BUG: queryStart < queryEnd : " + queryStart + " end =" + queryEnd);
+
+        // note -- assumes ref is offset from the *CLIPPED* start
+        BAQCalculationResult baqResult = new BAQCalculationResult(query, quals, ref);
+        int queryLen = queryEnd - queryStart;
+//        local.restart();
+        hmm_glocal(baqResult.refBases, baqResult.readBases, queryStart, queryLen, baqResult.rawQuals, baqResult.state, baqResult.bq);
+//        local.stop();
+//        total.stop();
+//        if ( n++ % 100000 == 0 )
+//            logger.info("n = " + n + ": Total " + total.getElapsedTimeNano() + " local " + local.getElapsedTimeNano());
+        return baqResult;
+    }
+
+
+    /**
+     * Determine the appropriate start and stop offsets in the reads for the bases given the cigar string
+     * @param read
+     * @return
+     */
+    private final Pair<Integer,Integer> calculateQueryRange(SAMRecord read) {
+        int queryStart = -1, queryStop = -1;
+        int readI = 0;
+
+        // iterate over the cigar elements to determine the start and stop of the read bases for the BAQ calculation
+        for ( CigarElement elt : read.getCigar().getCigarElements() ) {
+            switch (elt.getOperator()) {
+                case N:  return null; // cannot handle these
+                case H : case P : case D: break; // ignore pads, hard clips, and deletions
+                case I : case S: case M: case EQ: case X:
+                    int prev = readI;
+                    readI += elt.getLength();
+                    if ( includeClippedBases || elt.getOperator() != CigarOperator.S) {
+                        if ( queryStart == -1 )
+                            queryStart = prev;
+                        queryStop = readI;
+                    }
+                    // in the else case we aren't including soft clipped bases, so we don't update
+                    // queryStart or queryStop
+                    break;
+                default: throw new ReviewedGATKException("BUG: Unexpected CIGAR element " + elt + " in read " + read.getReadName());
+            }
+        }
+
+        if ( queryStop == queryStart ) {
+            // this read is completely clipped away, and yet is present in the file for some reason
+            // usually they are flagged as non-PF, but it's possible to push them through the BAM
+            //System.err.printf("WARNING -- read is completely clipped away: " + read.format());
+            return null;
+        }
+
+        return new Pair<Integer, Integer>(queryStart, queryStop);
+    }
+
+    // we need to pad ref by at least the bandwidth / 2 on either side
+    public BAQCalculationResult calcBAQFromHMM(SAMRecord read, byte[] ref, int refOffset) {
+        // todo -- need to handle the case where the cigar sum of lengths doesn't cover the whole read
+        Pair<Integer, Integer> queryRange = calculateQueryRange(read);
+        if ( queryRange == null ) return null; // read has Ns, or is completely clipped away
+
+        int queryStart = queryRange.getFirst();
+        int queryEnd = queryRange.getSecond();
+
+        BAQCalculationResult baqResult = calcBAQFromHMM(ref, read.getReadBases(), read.getBaseQualities(), queryStart, queryEnd);
+
+        // cap quals
+        int readI = 0, refI = 0;
+        for ( CigarElement elt : read.getCigar().getCigarElements() ) {
+            int l = elt.getLength();
+            switch (elt.getOperator()) {
+                case N: // cannot handle these
+                    return null;
+                case H : case P : // ignore pads and hard clips
+                    break;
+                case S : refI += l; // move the reference too, in addition to I
+                case I :
+                    // todo -- is it really the case that we want to treat I and S the same?
+                    for ( int i = readI; i < readI + l; i++ ) baqResult.bq[i] = baqResult.rawQuals[i];
+                    readI += l;
+                    break;
+                case D : refI += l; break;
+                case M : case EQ: case X:
+                    for (int i = readI; i < readI + l; i++) {
+                        int expectedPos = refI - refOffset + (i - readI);
+                        baqResult.bq[i] = capBaseByBAQ( baqResult.rawQuals[i], baqResult.bq[i], baqResult.state[i], expectedPos );
+                    }
+                    readI += l; refI += l;
+                    break;
+                default:
+                    throw new ReviewedGATKException("BUG: Unexpected CIGAR element " + elt + " in read " + read.getReadName());
+            }
+        }
+        if ( readI != read.getReadLength() ) // odd cigar string
+            System.arraycopy(baqResult.rawQuals, 0, baqResult.bq, 0, baqResult.bq.length);
+
+        return baqResult;
+    }
+
+    public byte capBaseByBAQ( byte oq, byte bq, int state, int expectedPos ) {
+        byte b;
+        boolean isIndel = stateIsIndel(state);
+        int pos = stateAlignedPosition(state);
+        if ( isIndel || pos != expectedPos ) // we are an indel or we don't align to our best current position
+            b = minBaseQual; // just take b = minBaseQuality
+        else
+            b = bq < oq ? bq : oq;
+
+        return b;
+    }
+
+    /**
+     * Modifies read in place so that the base quality scores are capped by the BAQ calculation.  Uses the BAQ
+     * tag if present already and alwaysRecalculate is false, otherwise fires up the HMM and does the BAQ on the fly
+     * using the refReader to obtain the reference bases as needed.
+     * 
+     * @param read
+     * @param refReader
+     * @param calculationType
+     * @return BQ qualities for use, in case qmode is DONT_MODIFY
+     */
+    public byte[] baqRead(SAMRecord read, IndexedFastaSequenceFile refReader, CalculationMode calculationType, QualityMode qmode ) {
+        if ( DEBUG ) System.out.printf("BAQ %s read %s%n", calculationType, read.getReadName());
+
+        byte[] BAQQuals = read.getBaseQualities();      // in general we are overwriting quals, so just get a pointer to them
+        if ( calculationType == CalculationMode.OFF) { // we don't want to do anything
+            ; // just fall though
+        } else if ( excludeReadFromBAQ(read) ) {
+            ; // just fall through
+        } else {
+            final boolean readHasBAQTag = hasBAQTag(read);
+
+            if ( calculationType == CalculationMode.RECALCULATE || ! readHasBAQTag ) {
+                if ( DEBUG ) System.out.printf("  Calculating BAQ on the fly%n");
+                BAQCalculationResult hmmResult = calcBAQFromHMM(read, refReader);
+                if ( hmmResult != null ) {
+                    switch ( qmode ) {
+                        case ADD_TAG:         addBAQTag(read, hmmResult.bq); break;
+                        case OVERWRITE_QUALS: System.arraycopy(hmmResult.bq, 0, read.getBaseQualities(), 0, hmmResult.bq.length); break;
+                        case DONT_MODIFY:     BAQQuals = hmmResult.bq; break;
+                        default:              throw new ReviewedGATKException("BUG: unexpected qmode " + qmode);
+                    }
+                } else if ( readHasBAQTag ) {
+                    // remove the BAQ tag if it's there because we cannot trust it
+                    read.setAttribute(BAQ_TAG, null);
+                }
+            } else if ( qmode == QualityMode.OVERWRITE_QUALS ) { // only makes sense if we are overwriting quals
+                if ( DEBUG ) System.out.printf("  Taking BAQ from tag%n");
+                // this overwrites the original qualities
+                calcBAQFromTag(read, true, false);
+            }
+        }
+
+        return BAQQuals;
+    }
+
+    /**
+     * Returns true if we don't think this read is eligible for the BAQ calculation.  Examples include non-PF reads,
+     * duplicates, or unmapped reads.  Used by baqRead to determine if a read should fall through the calculation.
+     *
+     * @param read
+     * @return
+     */
+    public boolean excludeReadFromBAQ(SAMRecord read) {
+        // keeping mapped reads, regardless of pairing status, or primary alignment status.
+        return read.getReadUnmappedFlag() || read.getReadFailsVendorQualityCheckFlag() || read.getDuplicateReadFlag();
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/classloader/JVMUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/classloader/JVMUtils.java
new file mode 100644
index 0000000..329dfa1
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/classloader/JVMUtils.java
@@ -0,0 +1,309 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.classloader;
+
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.GATKException;
+import org.reflections.util.ClasspathHelper;
+
+import java.io.File;
+import java.io.IOException;
+import java.lang.reflect.*;
+import java.net.URL;
+import java.util.*;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: hanna
+ * Date: Mar 30, 2009
+ * Time: 5:38:05 PM
+ *
+ * A set of static utility methods for determining information about this runtime environment.
+ * Introspects classes, loads jars, etc.
+ */
+public class JVMUtils {
+    /**
+     * Constructor access disallowed...static utility methods only!
+     */
+    private JVMUtils() { }
+
+    /**
+     * Determines which location contains the specified class.
+     * @param clazz The specified class.
+     * @return Location (either jar file or directory) of path containing class.
+     * @throws IOException when the URI cannot be found.
+     */
+    public static File getLocationFor( Class clazz ) throws IOException {
+        try {
+            java.net.URI locationURI = clazz.getProtectionDomain().getCodeSource().getLocation().toURI();
+            return new File(locationURI);
+        }
+        catch (java.net.URISyntaxException ex) {
+            // a URISyntaxException here must be an IO error; wrap as such.
+            throw new IOException(ex);
+        }
+        catch ( NullPointerException ne ) {
+        	throw new IOException("Can not extract code source location for "+clazz.getName());
+        }
+    }    
+
+    /**
+     * Is the specified class a concrete implementation of baseClass?
+     * @param clazz Class to check.
+     * @return True if clazz is concrete.  False otherwise.
+     */
+    public static boolean isConcrete( Class clazz ) {
+        return !Modifier.isAbstract(clazz.getModifiers()) &&
+               !Modifier.isInterface(clazz.getModifiers());
+    }
+
+    /**
+     * Is the specified class anonymous?  The plugin manager (for one) generally requires that
+     * plugin classes be named so that they can easily be specified from the command line.
+     * @param clazz The class on which to perform the anonymous check.
+     * @return True if the class is anonymous; false otherwise.
+     */
+    public static boolean isAnonymous(Class clazz) {
+        return clazz.isAnonymousClass();
+    }
+
+    /**
+     * Retrieve all fields available in this object, regardless of where they are declared or
+     * whether they're accessible.
+     * @param type Type to inspect for fields.
+     * @return A list of all available fields.
+     */
+    public static List<Field> getAllFields(Class type) {
+        List<Field> allFields = new ArrayList<Field>();
+        while( type != null ) {
+            allFields.addAll(Arrays.asList(type.getDeclaredFields()));
+            type = type.getSuperclass();
+        }
+        return allFields;
+    }
+
+    /**
+     * Find the field with the given name in the class.  Will inspect all fields, independent
+     * of access level.
+     * @param type Class in which to search for the given field.
+     * @param fieldName Name of the field for which to search.
+     * @return The field, or null if no such field exists.
+     */
+    public static Field findField( Class type, String fieldName ) {
+        while( type != null ) {
+            Field[] fields = type.getDeclaredFields();
+            for( Field field: fields ) {
+                if( field.getName().equals(fieldName) )
+                    return field;
+            }
+            type = type.getSuperclass();
+        }
+        return null;
+    }
+
+    /**
+     * Sets the provided field in the given instance to the given value.  Circumvents access restrictions:
+     * a field can be private and still set successfully by this function.
+     * @param field Field to set in the given object.
+     * @param instance Instance in which to set the field.
+     * @param value The value to which to set the given field in the given instance.
+     */
+    public static void setFieldValue( Field field, Object instance, Object value ) {
+        try {
+            field.setAccessible(true);
+            field.set(instance, value);
+        }
+        catch( IllegalAccessException ex ) {
+            throw new ReviewedGATKException(String.format("Could not set %s in instance %s to %s",field.getName(),instance.getClass().getName(),value.toString()));
+        }
+    }
+
+    /**
+     * Gets the value stored in the provided field in the given instance.
+     * @param field Field to set in the given object.
+     * @param instance Instance in which to set the field.
+     * @return Value currently stored in the given field.
+     */
+    public static Object getFieldValue( Field field, Object instance ) {
+        try {
+            field.setAccessible(true);
+            return field.get(instance);
+        }
+        catch( IllegalAccessException ex ) {
+            throw new ReviewedGATKException(String.format("Could not retrieve %s in instance %s",field.getName(),instance.getClass().getName()));
+        }
+    }
+
+    /**
+     * Gets a single object in the list matching or type-compatible with the given type.  Exceptions out if multiple objects match. 
+     * @param objectsToFilter objects to filter.
+     * @param type The desired type.
+     * @param <T> The selected type.
+     * @return A collection of the given arguments with the specified type.
+     */
+    public static <T> T getObjectOfType(Collection<Object> objectsToFilter, Class<T> type) {
+        // TODO: Make JVM utils.
+        Collection<T> selectedObjects = getObjectsOfType(objectsToFilter,type);
+        if(selectedObjects.size() > 1)
+            throw new ReviewedGATKException("User asked for a single instance of the type, multiple were present");
+        if(selectedObjects.size() == 0)
+            throw new ReviewedGATKException("User asked for a single instance of the type, but none were present");
+        return selectedObjects.iterator().next();
+    }
+
+    /**
+     * Gets a collection of all objects in the list matching or type-compatible with the given type.
+     * @param objectsToFilter objects to filter.
+     * @param type The desired type.
+     * @param <T> Again, the desired type.  Used so that clients can ignore type safety.
+     * @return A collection of the given arguments with the specified type.
+     */
+    @SuppressWarnings("unchecked")
+    public static <T> Collection<T> getObjectsOfType(Collection<Object> objectsToFilter, Class<T> type) {
+        Collection<T> selectedObjects = new ArrayList<T>();
+        for(Object object: objectsToFilter) {
+            if(type.isAssignableFrom(object.getClass()))
+                selectedObjects.add((T)object);
+        }
+        return selectedObjects;
+    }
+
+    /**
+     * Returns the list of class path urls.
+     * @return the list of class path urls.
+     */
+    public static Set<URL> getClasspathURLs() {
+        return ClasspathHelper.forManifest();
+    }
+
+    /**
+     * Adds all the generic types from a class definition to the collection.
+     * Does not inspect the methods or fields, only the class.
+     * @param classes Set to collect the classes.
+     * @param type Type to inspect.
+     */
+    public static void addGenericTypes(Set<Class<?>> classes, Type type) {
+        if (type instanceof ParameterizedType) {
+            ParameterizedType parameterizedType = (ParameterizedType)type;
+            for (Type actualType: parameterizedType.getActualTypeArguments())
+                addGenericTypes(classes, actualType);
+        } else if (type instanceof GenericArrayType) {
+            addGenericTypes(classes, ((GenericArrayType)type).getGenericComponentType());
+        } else if (type instanceof WildcardType) {
+            WildcardType wildcardType = (WildcardType)type;
+            for (Type upperType: wildcardType.getUpperBounds())
+                addGenericTypes(classes, upperType);
+            for (Type lowerType: wildcardType.getLowerBounds())
+                addGenericTypes(classes, lowerType);
+        } else if (type instanceof Class<?>) {
+            classes.add((Class<?>) type);
+        } else {
+            throw new GATKException("Unknown type: " + type + " (" + type.getClass().getName() + ")");
+        }
+    }
+
+    public static Class getParameterizedTypeClass(Type t) {
+        if ( t instanceof ParameterizedType ) {
+            ParameterizedType parameterizedType = (ParameterizedType)t;
+            if ( parameterizedType.getActualTypeArguments().length != 1 )
+                throw new ReviewedGATKException("BUG: more than 1 generic type found on class" + t);
+            return (Class)parameterizedType.getActualTypeArguments()[0];
+        } else
+            throw new ReviewedGATKException("BUG: could not find generic type on class " + t);
+    }
+
+    /**
+     * Returns a comma-separated list of the names of the interfaces implemented by this class
+     *
+     * @param covClass class
+     * @return names of interfaces
+     */
+    public static String classInterfaces(final Class covClass) {
+        final List<String> interfaces = new ArrayList<String>();
+        for ( final Class interfaceClass : covClass.getInterfaces() )
+            interfaces.add(interfaceClass.getSimpleName());
+        return Utils.join(", ", interfaces);
+    }
+
+    /**
+     * Returns the Class that invoked the specified "callee" class by examining the runtime stack.
+     * The calling class is defined as the first class below the callee class on the stack.
+     *
+     * For example, given callee == MyClass and the following runtime stack:
+     *
+     * JVMUtils.getCallingClass(MyClass) <-- top
+     * MyClass.foo()
+     * MyClass.bar()
+     * OtherClass.foo()
+     * OtherClass.bar()
+     * etc.
+     *
+     * this method would return OtherClass, since its methods invoked the methods in MyClass.
+     *
+     * Considers only the occurrence of the callee class on the stack that is closest to the top
+     * (even if there are multiple, non-contiguous occurrences).
+     *
+     * @param callee Class object for the class whose calling class we want to locate
+     * @return Class object for the class that invoked the callee class, or null if
+     *         no calling class was found
+     * @throws IllegalArgumentException if the callee class is not found on the runtime stack
+     * @throws IllegalStateException if we get an error while trying to load the Class object for the calling
+     *                               class reported on the runtime stack
+     */
+    public static Class getCallingClass( final Class callee ) {
+        final StackTraceElement[] stackTrace = new Throwable().getStackTrace();
+        final String calleeClassName = callee.getName();
+
+        // Start examining the stack at the second-from-the-top position, to remove
+        // this method call (ie., the call to getCallingClass() itself) from consideration.
+        int stackTraceIndex = 1;
+
+        // Find the first occurrence of the callee on the runtime stack. Need to use String comparison
+        // unfortunately, due to limitations of the StackTraceElement class.
+        while ( stackTraceIndex < stackTrace.length && ! stackTrace[stackTraceIndex].getClassName().equals(calleeClassName) ) {
+            stackTraceIndex++;
+        }
+
+        // Make sure we actually found the callee class on the stack
+        if ( stackTraceIndex == stackTrace.length ) {
+            throw new IllegalArgumentException(String.format("Specified callee %s is not present on the call stack", callee.getSimpleName()));
+        }
+
+        // Now find the caller class, which will be the class below the callee on the stack
+        while ( stackTraceIndex < stackTrace.length && stackTrace[stackTraceIndex].getClassName().equals(calleeClassName) ) {
+            stackTraceIndex++;
+        }
+
+        try {
+            return stackTraceIndex < stackTrace.length ? Class.forName(stackTrace[stackTraceIndex].getClassName()) : null;
+        }
+        catch ( ClassNotFoundException e ) {
+            throw new IllegalStateException(String.format("Could not find caller class %s from the runtime stack in the classpath",
+                                                          stackTrace[stackTraceIndex].getClassName()));
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/classloader/PluginManager.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/classloader/PluginManager.java
new file mode 100644
index 0000000..7ef7a15
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/classloader/PluginManager.java
@@ -0,0 +1,356 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.classloader;
+
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.exceptions.DynamicClassResolutionException;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.reflections.Reflections;
+import org.reflections.scanners.SubTypesScanner;
+import org.reflections.util.ConfigurationBuilder;
+
+import java.io.File;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.Method;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.*;
+
+/**
+ * Manage plugins and plugin configuration.
+ * @author mhanna
+ * @version 0.1
+ */
+public class PluginManager<PluginType> {
+    /**
+     * A reference into our introspection utility.
+     */
+    private static final Reflections defaultReflections;
+
+    static {
+        // turn off logging in the reflections library - they talk too much
+        Reflections.log = null;
+
+        Set<URL> classPathUrls = new LinkedHashSet<URL>();
+
+        URL cwd;
+        try {
+            cwd = new File("").getAbsoluteFile().toURI().toURL();
+        } catch (MalformedURLException e) {
+            throw new RuntimeException(e);
+        }
+
+        // NOTE: Reflections also scans directories for classes.
+        // Meanwhile some of the jar MANIFEST.MF Bundle-ClassPath properties contain "."
+        // Do NOT let reflections scan the CWD where it often picks up test classes when
+        // they weren't explicitly in the classpath, for example the UninstantiableWalker
+        for (URL url: JVMUtils.getClasspathURLs())
+            if (!url.equals(cwd))
+                classPathUrls.add(url);
+
+        defaultReflections = new Reflections( new ConfigurationBuilder()
+            .setUrls(classPathUrls)
+            .setScanners(new SubTypesScanner()));
+    }
+
+    /**
+     * Defines the category of plugin defined by the subclass.
+     */
+    protected final String pluginCategory;
+
+    /**
+     * Define common strings to trim off the end of the name.
+     */
+    protected final String pluginSuffix;
+    
+    /**
+     * Plugins stored based on their name.
+     */
+    private final SortedMap<String, Class<? extends PluginType>> pluginsByName;
+
+    private final List<Class<? extends PluginType>> plugins;
+    private final List<Class<? extends PluginType>> interfaces;
+
+    /**
+     * Create a new plugin manager.
+     * @param pluginType Core type for a plugin.
+     */
+    public PluginManager(Class pluginType) {
+        this(pluginType, pluginType.getSimpleName().toLowerCase(), pluginType.getSimpleName(), null);
+    }
+
+    /**
+     * Create a new plugin manager.
+     * @param pluginType Core type for a plugin.
+     * @param classpath Custom class path to search for classes.
+     */
+    public PluginManager(Class pluginType, List<URL> classpath) {
+        this(pluginType, pluginType.getSimpleName().toLowerCase(), pluginType.getSimpleName(), classpath);
+    }
+
+    /**
+     * Create a new plugin manager.
+     * @param pluginType Core type for a plugin.
+     * @param pluginCategory Provides a category name to the plugin.  Must not be null.
+     * @param pluginSuffix Provides a suffix that will be trimmed off when converting to a plugin name.  Can be null.
+     */
+    public PluginManager(Class pluginType, String pluginCategory, String pluginSuffix) {
+        this(pluginType, pluginCategory, pluginSuffix, null);
+    }
+
+    /**
+     * Create a new plugin manager.
+     * @param pluginType Core type for a plugin.
+     * @param pluginCategory Provides a category name to the plugin.  Must not be null.
+     * @param pluginSuffix Provides a suffix that will be trimmed off when converting to a plugin name.  Can be null.
+     * @param classpath Custom class path to search for classes.
+     */
+    public PluginManager(Class pluginType, String pluginCategory, String pluginSuffix, List<URL> classpath) {
+        this.pluginCategory = pluginCategory;
+        this.pluginSuffix = pluginSuffix;
+
+        this.plugins = new ArrayList<Class<? extends PluginType>>();
+        this.interfaces = new ArrayList<Class<? extends PluginType>>();
+
+        Reflections reflections;
+        if (classpath == null) {
+            reflections = defaultReflections;
+        } else {
+            addClasspath(classpath);
+            reflections = new Reflections( new ConfigurationBuilder()
+                .setUrls(classpath)
+                .setScanners(new SubTypesScanner()));
+        }
+
+        // Load all classes types filtering them by concrete.
+        @SuppressWarnings("unchecked")
+        Set<Class<? extends PluginType>> allTypes = reflections.getSubTypesOf(pluginType);
+        for( Class<? extends PluginType> type: allTypes ) {
+            // The plugin manager does not support anonymous classes; to be a plugin, a class must have a name.
+            if(JVMUtils.isAnonymous(type))
+                continue;
+
+            if( JVMUtils.isConcrete(type) )
+                plugins.add(type);
+            else
+                interfaces.add(type);
+        }
+
+        pluginsByName = new TreeMap<String, Class<? extends PluginType>>();
+        for (Class<? extends PluginType> pluginClass : plugins) {
+            String pluginName = getName(pluginClass);
+            pluginsByName.put(pluginName, pluginClass);
+        }
+
+        // sort the plugins so the order of elements is deterministic
+        sortPlugins(plugins);
+        sortPlugins(interfaces);
+    }
+
+    /**
+     * Sorts, in place, the list of plugins according to getName() on each element
+     *
+     * @param unsortedPlugins unsorted plugins
+     */
+    private void sortPlugins(final List<Class<? extends PluginType>> unsortedPlugins) {
+        Collections.sort(unsortedPlugins, new ComparePluginsByName());
+    }
+
+    private final class ComparePluginsByName implements Comparator<Class<? extends PluginType>> {
+        @Override
+        public int compare(final Class<? extends PluginType> aClass, final Class<? extends PluginType> aClass1) {
+            String pluginName1 = getName(aClass);
+            String pluginName2 = getName(aClass1);
+            return pluginName1.compareTo(pluginName2);
+        }
+    }
+
+    /**
+     * Adds the URL to the system class loader classpath using reflection.
+     * HACK: Uses reflection to modify the class path, and assumes loader is a URLClassLoader.
+     * @param urls URLs to add to the system class loader classpath.
+     */
+    private static void addClasspath(List<URL> urls) {
+      Set<URL> existing = JVMUtils.getClasspathURLs();
+      for (URL url : urls) {
+          if (existing.contains(url))
+            continue;
+          try {
+              Method method = URLClassLoader.class.getDeclaredMethod("addURL", URL.class);
+              if (!method.isAccessible())
+                  method.setAccessible(true);
+              method.invoke(ClassLoader.getSystemClassLoader(), url);
+          } catch (Exception e) {
+              throw new ReviewedGATKException("Error adding url to the current classloader.", e);
+          }
+      }
+    }
+    
+    public Map<String, Class<? extends PluginType>> getPluginsByName() {
+        return Collections.unmodifiableMap(pluginsByName);
+    }
+
+    /**
+     * Does a plugin with the given name exist?
+     *
+     * @param pluginName Name of the plugin for which to search.
+     * @return True if the plugin exists, false otherwise.
+     */
+    public boolean exists(String pluginName) {
+        return pluginsByName.containsKey(pluginName);
+    }
+
+    /**
+     * Does a plugin with the given name exist?
+     *
+     * @param plugin Name of the plugin for which to search.
+     * @return True if the plugin exists, false otherwise.
+     */
+    public boolean exists(Class<? extends PluginType> plugin) {
+        return pluginsByName.containsValue(plugin);
+    }
+
+    /**
+     * Returns the plugin classes
+     * @return the plugin classes
+     */
+    public List<Class<? extends PluginType>> getPlugins() {
+        return plugins;
+    }
+
+    /**
+     * Returns the interface classes
+     * @return the interface classes
+     */
+    public List<Class<? extends PluginType>> getInterfaces() {
+        return interfaces;
+    }
+
+    /**
+     * Returns the plugin classes implementing interface or base clase
+     * @param type type of interface or base class
+     * @return the plugin classes implementing interface or base class
+     */
+    public List<Class<? extends PluginType>> getPluginsImplementing(Class<?> type) {
+        List<Class<? extends PluginType>> implementing = new ArrayList<Class<? extends PluginType>>();
+        for (Class<? extends PluginType> plugin: getPlugins())
+            if (type.isAssignableFrom(plugin))
+                implementing.add(plugin);
+        return implementing;
+    }
+
+
+
+    /**
+     * Gets a plugin with the given name
+     *
+     * @param pluginName Name of the plugin to retrieve.
+     * @return The plugin object if found; null otherwise.
+     */
+    public PluginType createByName(String pluginName) {
+        Class<? extends PluginType> plugin = pluginsByName.get(pluginName);
+        if( plugin == null ) {
+            String errorMessage = formatErrorMessage(pluginCategory,pluginName);
+            throw createMalformedArgumentException(errorMessage);
+        }
+        try {
+            return plugin.newInstance();
+        } catch (Exception e) {
+            throw new DynamicClassResolutionException(plugin, e);
+        }
+    }
+
+    /**
+     * create a plugin with the given type
+     *
+     * @param pluginType type of the plugin to create.
+     * @return The plugin object if created; null otherwise.
+     */
+    public PluginType createByType(Class<? extends PluginType> pluginType) {
+        Logger logger = Logger.getLogger(PluginManager.class);
+        logger.setLevel(Level.ERROR);
+        try {
+            Constructor<? extends PluginType> noArgsConstructor = pluginType.getDeclaredConstructor((Class[])null);
+            noArgsConstructor.setAccessible(true);
+            return noArgsConstructor.newInstance();
+        } catch (Exception e) {
+            logger.error("Couldn't initialize the plugin. Typically this is because of wrong global class variable initializations.");
+            throw new DynamicClassResolutionException(pluginType, e);
+        }
+    }
+
+    /**
+     * Returns concrete instances of the plugins
+     * @return concrete instances of the plugins
+     */
+    public List<PluginType> createAllTypes() {
+        List<PluginType> instances = new ArrayList<PluginType>();
+        for ( Class<? extends PluginType> c : getPlugins() ) {
+            instances.add(createByType(c));
+        }
+        return instances;
+    }
+
+    /**
+     * Create a name for this type of plugin.
+     *
+     * @param pluginType The type of plugin.
+     * @return A name for this type of plugin.
+     */
+    public String getName(Class pluginType) {
+        String pluginName = "";
+
+        if (pluginName.length() == 0) {
+            pluginName = pluginType.getSimpleName();
+            if (pluginSuffix != null && pluginName.endsWith(pluginSuffix))
+                pluginName = pluginName.substring(0, pluginName.lastIndexOf(pluginSuffix));
+        }
+
+        return pluginName;
+    }
+
+    /**
+     * Generate the error message for the plugin manager. The message is allowed to depend on the class.
+     * @param pluginCategory - string, the category of the plugin (e.g. read filter)
+     * @param pluginName - string, what we were trying to match (but failed to)
+     * @return error message text describing the error
+     */
+    protected String formatErrorMessage(String pluginCategory, String pluginName ) {
+        return String.format("Could not find %s with name: %s", pluginCategory,pluginName);
+    }
+
+    /**
+     * Creates a UserException with the appropriate message for this instance.
+     * @param errorMessage formatted error message from formatErrorMessage().
+     * @return A UserException with the error message.
+     */
+    protected UserException createMalformedArgumentException(final String errorMessage) {
+        throw new UserException.CommandLineException(errorMessage);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/classloader/ProtectedPackageSource.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/classloader/ProtectedPackageSource.java
new file mode 100644
index 0000000..7a2b536
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/classloader/ProtectedPackageSource.java
@@ -0,0 +1,28 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.classloader;
+
+public interface ProtectedPackageSource {}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/classloader/PublicPackageSource.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/classloader/PublicPackageSource.java
new file mode 100644
index 0000000..dd2068b
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/classloader/PublicPackageSource.java
@@ -0,0 +1,28 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.classloader;
+
+public interface PublicPackageSource {}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/clipping/ClippingOp.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/clipping/ClippingOp.java
new file mode 100644
index 0000000..ad0de1d
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/clipping/ClippingOp.java
@@ -0,0 +1,617 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.clipping;
+
+import com.google.java.contract.Requires;
+import htsjdk.samtools.Cigar;
+import htsjdk.samtools.CigarElement;
+import htsjdk.samtools.CigarOperator;
+import org.broadinstitute.gatk.utils.recalibration.EventType;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+
+import java.util.Iterator;
+import java.util.List;
+import java.util.Stack;
+import java.util.Vector;
+
+/**
+ * Represents a clip on a read.  It has a type (see the enum) along with a start and stop in the bases
+ * of the read, plus an option extraInfo (useful for carrying info where needed).
+ * <p/>
+ * Also holds the critical apply function that actually execute the clipping operation on a provided read,
+ * according to the wishes of the supplied ClippingAlgorithm enum.
+ */
+public class ClippingOp {
+    public final int start, stop; // inclusive
+
+    public ClippingOp(int start, int stop) {
+        this.start = start;
+        this.stop = stop;
+    }
+
+
+    public int getLength() {
+        return stop - start + 1;
+    }
+
+    /**
+     * Clips the bases in read according to this operation's start and stop.  Uses the clipping
+     * representation used is the one provided by algorithm argument.
+     *
+     * @param algorithm    clipping algorithm to use
+     * @param originalRead the read to be clipped
+     */
+    public GATKSAMRecord apply(ClippingRepresentation algorithm, GATKSAMRecord originalRead) {
+        GATKSAMRecord read = (GATKSAMRecord) originalRead.clone();
+        byte[] quals = read.getBaseQualities();
+        byte[] bases = read.getReadBases();
+        byte[] newBases = new byte[bases.length];
+        byte[] newQuals = new byte[quals.length];
+
+        switch (algorithm) {
+            // important note:
+            //   it's not safe to call read.getReadBases()[i] = 'N' or read.getBaseQualities()[i] = 0
+            //   because you're not guaranteed to get a pointer to the actual array of bytes in the GATKSAMRecord
+            case WRITE_NS:
+                for (int i = 0; i < bases.length; i++) {
+                    if (i >= start && i <= stop) {
+                        newBases[i] = 'N';
+                    }
+                    else {
+                        newBases[i] = bases[i];
+                    }
+                }
+                read.setReadBases(newBases);
+                break;
+            case WRITE_Q0S:
+                for (int i = 0; i < quals.length; i++) {
+                    if (i >= start && i <= stop) {
+                        newQuals[i] = 0;
+                    }
+                    else {
+                        newQuals[i] = quals[i];
+                    }
+                }
+                read.setBaseQualities(newQuals);
+                break;
+            case WRITE_NS_Q0S:
+                for (int i = 0; i < bases.length; i++) {
+                    if (i >= start && i <= stop) {
+                        newQuals[i] = 0;
+                        newBases[i] = 'N';
+                    }
+                    else {
+                        newQuals[i] = quals[i];
+                        newBases[i] = bases[i];
+                    }
+                }
+                read.setBaseQualities(newBases);
+                read.setReadBases(newBases);
+                break;
+            case HARDCLIP_BASES:
+                read = hardClip(read, start, stop);
+                break;
+
+            case SOFTCLIP_BASES:
+                if (read.getReadUnmappedFlag()) {
+                    // we can't process unmapped reads
+                    throw new UserException("Read Clipper cannot soft clip unmapped reads");
+                }
+
+                //System.out.printf("%d %d %d%n", stop, start, read.getReadLength());
+                int myStop = stop;
+                if ((stop + 1 - start) == read.getReadLength()) {
+                    // BAM representation issue -- we can't SOFTCLIP away all bases in a read, just leave it alone
+                    //Walker.logger.info(String.format("Warning, read %s has all bases clip but this can't be represented with SOFTCLIP_BASES, just leaving it alone", read.getReadName()));
+                    //break;
+                    myStop--; // just decrement stop
+                }
+
+                if (start > 0 && myStop != read.getReadLength() - 1)
+                    throw new RuntimeException(String.format("Cannot apply soft clipping operator to the middle of a read: %s to be clipped at %d-%d", read.getReadName(), start, myStop));
+
+                Cigar oldCigar = read.getCigar();
+
+                int scLeft = 0, scRight = read.getReadLength();
+                if (start == 0)
+                    scLeft = myStop + 1;
+                else
+                    scRight = start;
+
+                Cigar newCigar = softClip(oldCigar, scLeft, scRight);
+                read.setCigar(newCigar);
+
+                int newClippedStart = getNewAlignmentStartOffset(newCigar, oldCigar);
+                int newStart = read.getAlignmentStart() + newClippedStart;
+                read.setAlignmentStart(newStart);
+
+                break;
+
+            case REVERT_SOFTCLIPPED_BASES:
+                read = revertSoftClippedBases(read);
+                break;
+
+            default:
+                throw new IllegalStateException("Unexpected Clipping operator type " + algorithm);
+        }
+
+        return read;
+    }
+
+    private GATKSAMRecord revertSoftClippedBases(GATKSAMRecord read) {
+        GATKSAMRecord unclipped = (GATKSAMRecord) read.clone();
+
+        Cigar unclippedCigar = new Cigar();
+        int matchesCount = 0;
+        for (CigarElement element : read.getCigar().getCigarElements()) {
+            if (element.getOperator() == CigarOperator.SOFT_CLIP || element.getOperator() == CigarOperator.MATCH_OR_MISMATCH)
+                matchesCount += element.getLength();
+            else if (matchesCount > 0) {
+                unclippedCigar.add(new CigarElement(matchesCount, CigarOperator.MATCH_OR_MISMATCH));
+                matchesCount = 0;
+                unclippedCigar.add(element);
+            } else
+                unclippedCigar.add(element);
+        }
+        if (matchesCount > 0)
+            unclippedCigar.add(new CigarElement(matchesCount, CigarOperator.MATCH_OR_MISMATCH));
+
+        unclipped.setCigar(unclippedCigar);
+        final int newStart = read.getAlignmentStart() + calculateAlignmentStartShift(read.getCigar(), unclippedCigar);
+        unclipped.setAlignmentStart(newStart);
+
+        if ( newStart <= 0 ) {
+            // if the start of the unclipped read occurs before the contig,
+            // we must hard clip away the bases since we cannot represent reads with
+            // negative or 0 alignment start values in the SAMRecord (e.g., 0 means unaligned)
+            return hardClip(unclipped, 0, - newStart);
+        } else {
+            return unclipped;
+        }
+    }
+
+    /**
+     * Given a cigar string, get the number of bases hard or soft clipped at the start
+     */
+    private int getNewAlignmentStartOffset(final Cigar __cigar, final Cigar __oldCigar) {
+        int num = 0;
+        for (CigarElement e : __cigar.getCigarElements()) {
+            if (!e.getOperator().consumesReferenceBases()) {
+                if (e.getOperator().consumesReadBases()) {
+                    num += e.getLength();
+                }
+            } else {
+                break;
+            }
+        }
+
+        int oldNum = 0;
+        int curReadCounter = 0;
+
+        for (CigarElement e : __oldCigar.getCigarElements()) {
+            int curRefLength = e.getLength();
+            int curReadLength = e.getLength();
+            if (!e.getOperator().consumesReadBases()) {
+                curReadLength = 0;
+            }
+
+            boolean truncated = false;
+            if (curReadCounter + curReadLength > num) {
+                curReadLength = num - curReadCounter;
+                curRefLength = num - curReadCounter;
+                truncated = true;
+            }
+
+            if (!e.getOperator().consumesReferenceBases()) {
+                curRefLength = 0;
+            }
+
+            curReadCounter += curReadLength;
+            oldNum += curRefLength;
+
+            if (curReadCounter > num || truncated) {
+                break;
+            }
+        }
+
+        return oldNum;
+    }
+
+    /**
+     * Given a cigar string, soft clip up to startClipEnd and soft clip starting at endClipBegin
+     */
+    private Cigar softClip(final Cigar __cigar, final int __startClipEnd, final int __endClipBegin) {
+        if (__endClipBegin <= __startClipEnd) {
+            //whole thing should be soft clipped
+            int cigarLength = 0;
+            for (CigarElement e : __cigar.getCigarElements()) {
+                cigarLength += e.getLength();
+            }
+
+            Cigar newCigar = new Cigar();
+            newCigar.add(new CigarElement(cigarLength, CigarOperator.SOFT_CLIP));
+            assert newCigar.isValid(null, -1) == null;
+            return newCigar;
+        }
+
+        int curLength = 0;
+        Vector<CigarElement> newElements = new Vector<CigarElement>();
+        for (CigarElement curElem : __cigar.getCigarElements()) {
+            if (!curElem.getOperator().consumesReadBases()) {
+                if (curElem.getOperator() == CigarOperator.HARD_CLIP || curLength > __startClipEnd && curLength < __endClipBegin) {
+                    newElements.add(new CigarElement(curElem.getLength(), curElem.getOperator()));
+                }
+                continue;
+            }
+
+            int s = curLength;
+            int e = curLength + curElem.getLength();
+            if (e <= __startClipEnd || s >= __endClipBegin) {
+                //must turn this entire thing into a clip
+                newElements.add(new CigarElement(curElem.getLength(), CigarOperator.SOFT_CLIP));
+            } else if (s >= __startClipEnd && e <= __endClipBegin) {
+                //same thing
+                newElements.add(new CigarElement(curElem.getLength(), curElem.getOperator()));
+            } else {
+                //we are clipping in the middle of this guy
+                CigarElement newStart = null;
+                CigarElement newMid = null;
+                CigarElement newEnd = null;
+
+                int midLength = curElem.getLength();
+                if (s < __startClipEnd) {
+                    newStart = new CigarElement(__startClipEnd - s, CigarOperator.SOFT_CLIP);
+                    midLength -= newStart.getLength();
+                }
+
+                if (e > __endClipBegin) {
+                    newEnd = new CigarElement(e - __endClipBegin, CigarOperator.SOFT_CLIP);
+                    midLength -= newEnd.getLength();
+                }
+                assert midLength >= 0;
+                if (midLength > 0) {
+                    newMid = new CigarElement(midLength, curElem.getOperator());
+                }
+                if (newStart != null) {
+                    newElements.add(newStart);
+                }
+                if (newMid != null) {
+                    newElements.add(newMid);
+                }
+                if (newEnd != null) {
+                    newElements.add(newEnd);
+                }
+            }
+            curLength += curElem.getLength();
+        }
+
+        Vector<CigarElement> finalNewElements = new Vector<CigarElement>();
+        CigarElement lastElement = null;
+        for (CigarElement elem : newElements) {
+            if (lastElement == null || lastElement.getOperator() != elem.getOperator()) {
+                if (lastElement != null) {
+                    finalNewElements.add(lastElement);
+                }
+                lastElement = elem;
+            } else {
+                lastElement = new CigarElement(lastElement.getLength() + elem.getLength(), lastElement.getOperator());
+            }
+        }
+        if (lastElement != null) {
+            finalNewElements.add(lastElement);
+        }
+
+        Cigar newCigar = new Cigar(finalNewElements);
+        assert newCigar.isValid(null, -1) == null;
+        return newCigar;
+    }
+
+    /**
+     * Hard clip bases from read, from start to stop in base coordinates
+     *
+     * If start == 0, then we will clip from the front of the read, otherwise we clip
+     * from the right.  If start == 0 and stop == 10, this would clip out the first
+     * 10 bases of the read.
+     *
+     * Note that this function works with reads with negative alignment starts, in order to
+     * allow us to hardClip reads that have had their soft clips reverted and so might have
+     * negative alignment starts
+     *
+     * Works properly with reduced reads and insertion/deletion base qualities
+     *
+     * @param read a non-null read
+     * @param start a start >= 0 and < read.length
+     * @param stop a stop >= 0 and < read.length.
+     * @return a cloned version of read that has been properly trimmed down
+     */
+    private GATKSAMRecord hardClip(GATKSAMRecord read, int start, int stop) {
+
+        // If the read is unmapped there is no Cigar string and neither should we create a new cigar string
+        final CigarShift cigarShift = (read.getReadUnmappedFlag()) ? new CigarShift(new Cigar(), 0, 0) : hardClipCigar(read.getCigar(), start, stop);
+
+        // the cigar may force a shift left or right (or both) in case we are left with insertions
+        // starting or ending the read after applying the hard clip on start/stop.
+        final int newLength = read.getReadLength() - (stop - start + 1) - cigarShift.shiftFromStart - cigarShift.shiftFromEnd;
+        final byte[] newBases = new byte[newLength];
+        final byte[] newQuals = new byte[newLength];
+        final int copyStart = (start == 0) ? stop + 1 + cigarShift.shiftFromStart : cigarShift.shiftFromStart;
+
+        System.arraycopy(read.getReadBases(), copyStart, newBases, 0, newLength);
+        System.arraycopy(read.getBaseQualities(), copyStart, newQuals, 0, newLength);
+
+        final GATKSAMRecord hardClippedRead = (GATKSAMRecord) read.clone();
+
+        hardClippedRead.resetSoftStartAndEnd();                                                                         // reset the cached soft start and end because they may have changed now that the read was hard clipped. No need to calculate them now. They'll be lazily calculated on the next call to getSoftStart()/End()
+        hardClippedRead.setBaseQualities(newQuals);
+        hardClippedRead.setReadBases(newBases);
+        hardClippedRead.setCigar(cigarShift.cigar);
+        if (start == 0)
+            hardClippedRead.setAlignmentStart(read.getAlignmentStart() + calculateAlignmentStartShift(read.getCigar(), cigarShift.cigar));
+
+        if (read.hasBaseIndelQualities()) {
+            final byte[] newBaseInsertionQuals = new byte[newLength];
+            final byte[] newBaseDeletionQuals = new byte[newLength];
+            System.arraycopy(read.getBaseInsertionQualities(), copyStart, newBaseInsertionQuals, 0, newLength);
+            System.arraycopy(read.getBaseDeletionQualities(), copyStart, newBaseDeletionQuals, 0, newLength);
+            hardClippedRead.setBaseQualities(newBaseInsertionQuals, EventType.BASE_INSERTION);
+            hardClippedRead.setBaseQualities(newBaseDeletionQuals, EventType.BASE_DELETION);
+        }
+
+        return hardClippedRead;
+
+    }
+
+    @Requires({"!cigar.isEmpty()"})
+    private CigarShift hardClipCigar(Cigar cigar, int start, int stop) {
+        Cigar newCigar = new Cigar();
+        int index = 0;
+        int totalHardClipCount = stop - start + 1;
+        int alignmentShift = 0; // caused by hard clipping deletions
+
+        // hard clip the beginning of the cigar string
+        if (start == 0) {
+            Iterator<CigarElement> cigarElementIterator = cigar.getCigarElements().iterator();
+            CigarElement cigarElement = cigarElementIterator.next();
+            // Skip all leading hard clips
+            while (cigarElement.getOperator() == CigarOperator.HARD_CLIP) {
+                totalHardClipCount += cigarElement.getLength();
+                if (cigarElementIterator.hasNext())
+                    cigarElement = cigarElementIterator.next();
+                else
+                    throw new ReviewedGATKException("Read is entirely hardclipped, shouldn't be trying to clip it's cigar string");
+            }
+            // keep clipping until we hit stop
+            while (index <= stop) {
+                int shift = 0;
+                if (cigarElement.getOperator().consumesReadBases())
+                    shift = cigarElement.getLength();
+
+                // we're still clipping or just finished perfectly
+                if (index + shift == stop + 1) {
+                    alignmentShift += calculateHardClippingAlignmentShift(cigarElement, cigarElement.getLength());
+                    newCigar.add(new CigarElement(totalHardClipCount + alignmentShift, CigarOperator.HARD_CLIP));
+                }
+                // element goes beyond what we need to clip
+                else if (index + shift > stop + 1) {
+                    int elementLengthAfterChopping = cigarElement.getLength() - (stop - index + 1);
+                    alignmentShift += calculateHardClippingAlignmentShift(cigarElement, stop - index + 1);
+                    newCigar.add(new CigarElement(totalHardClipCount + alignmentShift, CigarOperator.HARD_CLIP));
+                    newCigar.add(new CigarElement(elementLengthAfterChopping, cigarElement.getOperator()));
+                }
+                index += shift;
+                alignmentShift += calculateHardClippingAlignmentShift(cigarElement, shift);
+
+                if (index <= stop && cigarElementIterator.hasNext())
+                    cigarElement = cigarElementIterator.next();
+                else
+                    break;
+            }
+
+            // add the remaining cigar elements
+            while (cigarElementIterator.hasNext()) {
+                cigarElement = cigarElementIterator.next();
+                newCigar.add(new CigarElement(cigarElement.getLength(), cigarElement.getOperator()));
+            }
+        }
+
+        // hard clip the end of the cigar string
+        else {
+            Iterator<CigarElement> cigarElementIterator = cigar.getCigarElements().iterator();
+            CigarElement cigarElement = cigarElementIterator.next();
+
+            // Keep marching on until we find the start
+            while (index < start) {
+                int shift = 0;
+                if (cigarElement.getOperator().consumesReadBases())
+                    shift = cigarElement.getLength();
+
+                // we haven't gotten to the start yet, keep everything as is.
+                if (index + shift < start)
+                    newCigar.add(new CigarElement(cigarElement.getLength(), cigarElement.getOperator()));
+
+                    // element goes beyond our clip starting position
+                else {
+                    int elementLengthAfterChopping = start - index;
+                    alignmentShift += calculateHardClippingAlignmentShift(cigarElement, cigarElement.getLength() - (start - index));
+
+                    // if this last element is a HARD CLIP operator, just merge it with our hard clip operator to be added later
+                    if (cigarElement.getOperator() == CigarOperator.HARD_CLIP)
+                        totalHardClipCount += elementLengthAfterChopping;
+                        // otherwise, maintain what's left of this last operator
+                    else
+                        newCigar.add(new CigarElement(elementLengthAfterChopping, cigarElement.getOperator()));
+                }
+                index += shift;
+                if (index < start && cigarElementIterator.hasNext())
+                    cigarElement = cigarElementIterator.next();
+                else
+                    break;
+            }
+
+            // check if we are hard clipping indels
+            while (cigarElementIterator.hasNext()) {
+                cigarElement = cigarElementIterator.next();
+                alignmentShift += calculateHardClippingAlignmentShift(cigarElement, cigarElement.getLength());
+
+                // if the read had a HardClip operator in the end, combine it with the Hard Clip we are adding
+                if (cigarElement.getOperator() == CigarOperator.HARD_CLIP)
+                    totalHardClipCount += cigarElement.getLength();
+            }
+            newCigar.add(new CigarElement(totalHardClipCount + alignmentShift, CigarOperator.HARD_CLIP));
+        }
+        return cleanHardClippedCigar(newCigar);
+    }
+
+    /**
+     * Checks if a hard clipped cigar left a read starting or ending with deletions or gap (N)
+     * and cleans it up accordingly.
+     *
+     * @param cigar the original cigar
+     * @return an object with the shifts (see CigarShift class)
+     */
+    private CigarShift cleanHardClippedCigar(final Cigar cigar) {
+        final Cigar cleanCigar = new Cigar();
+        int shiftFromStart = 0;
+        int shiftFromEnd = 0;
+        Stack<CigarElement> cigarStack = new Stack<CigarElement>();
+        final Stack<CigarElement> inverseCigarStack = new Stack<CigarElement>();
+
+        for (final CigarElement cigarElement : cigar.getCigarElements())
+            cigarStack.push(cigarElement);
+
+        for (int i = 1; i <= 2; i++) {
+            int shift = 0;
+            int totalHardClip = 0;
+            boolean readHasStarted = false;
+            boolean addedHardClips = false;
+
+            while (!cigarStack.empty()) {
+                CigarElement cigarElement = cigarStack.pop();
+
+                if (!readHasStarted &&
+                        cigarElement.getOperator() != CigarOperator.DELETION &&
+                        cigarElement.getOperator() != CigarOperator.SKIPPED_REGION &&
+                        cigarElement.getOperator() != CigarOperator.HARD_CLIP)
+                    readHasStarted = true;
+
+                else if (!readHasStarted && cigarElement.getOperator() == CigarOperator.HARD_CLIP)
+                    totalHardClip += cigarElement.getLength();
+
+                else if (!readHasStarted && cigarElement.getOperator() == CigarOperator.DELETION)
+                    totalHardClip += cigarElement.getLength();
+
+                else if (!readHasStarted && cigarElement.getOperator() == CigarOperator.SKIPPED_REGION)
+                    totalHardClip += cigarElement.getLength();
+
+                if (readHasStarted) {
+                    if (i == 1) {
+                        if (!addedHardClips) {
+                            if (totalHardClip > 0)
+                                inverseCigarStack.push(new CigarElement(totalHardClip, CigarOperator.HARD_CLIP));
+                            addedHardClips = true;
+                        }
+                        inverseCigarStack.push(cigarElement);
+                    } else {
+                        if (!addedHardClips) {
+                            if (totalHardClip > 0)
+                                cleanCigar.add(new CigarElement(totalHardClip, CigarOperator.HARD_CLIP));
+                            addedHardClips = true;
+                        }
+                        cleanCigar.add(cigarElement);
+                    }
+                }
+            }
+            // first pass  (i=1) is from end to start of the cigar elements
+            if (i == 1) {
+                shiftFromEnd = shift;
+                cigarStack = inverseCigarStack;
+            }
+            // second pass (i=2) is from start to end with the end already cleaned
+            else {
+                shiftFromStart = shift;
+            }
+        }
+        return new CigarShift(cleanCigar, shiftFromStart, shiftFromEnd);
+    }
+
+    /**
+     * Compute the offset of the first "real" position in the cigar on the genome
+     *
+     * This is defined as a first position after a run of Hs followed by a run of Ss
+     *
+     * @param cigar A non-null cigar
+     * @return the offset (from 0) of the first on-genome base
+     */
+    private int calcHardSoftOffset(final Cigar cigar) {
+        final List<CigarElement> elements = cigar.getCigarElements();
+
+        int size = 0;
+        int i = 0;
+        while ( i < elements.size() && elements.get(i).getOperator() == CigarOperator.HARD_CLIP ) {
+            size += elements.get(i).getLength();
+            i++;
+        }
+        while ( i < elements.size() && elements.get(i).getOperator() == CigarOperator.SOFT_CLIP ) {
+            size += elements.get(i).getLength();
+            i++;
+        }
+
+        return size;
+    }
+
+    private int calculateAlignmentStartShift(Cigar oldCigar, Cigar newCigar) {
+        final int newShift = calcHardSoftOffset(newCigar);
+        final int oldShift = calcHardSoftOffset(oldCigar);
+        return newShift - oldShift;
+    }
+
+    private int calculateHardClippingAlignmentShift(CigarElement cigarElement, int clippedLength) {
+        // Insertions should be discounted from the total hard clip count
+        if (cigarElement.getOperator() == CigarOperator.INSERTION)
+            return -clippedLength;
+
+            // Deletions and Ns should be added to the total hard clip count (because we want to maintain the original alignment start)
+        else if (cigarElement.getOperator() == CigarOperator.DELETION || cigarElement.getOperator() == CigarOperator.SKIPPED_REGION)
+            return cigarElement.getLength();
+
+        // There is no shift if we are not clipping an indel
+        return 0;
+    }
+
+    private static class CigarShift {
+        private Cigar cigar;
+        private int shiftFromStart;
+        private int shiftFromEnd;
+
+        private CigarShift(Cigar cigar, int shiftFromStart, int shiftFromEnd) {
+            this.cigar = cigar;
+            this.shiftFromStart = shiftFromStart;
+            this.shiftFromEnd = shiftFromEnd;
+        }
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/clipping/ClippingRepresentation.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/clipping/ClippingRepresentation.java
new file mode 100644
index 0000000..8d55651
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/clipping/ClippingRepresentation.java
@@ -0,0 +1,63 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.clipping;
+
+/**
+ * How should we represent a clipped bases in a read?
+ */
+public enum ClippingRepresentation {
+    /** Clipped bases are changed to Ns */
+    WRITE_NS,
+
+    /** Clipped bases are changed to have Q0 quality score */
+    WRITE_Q0S,
+
+    /** Clipped bases are change to have both an N base and a Q0 quality score */
+    WRITE_NS_Q0S,
+
+    /**
+     * Change the read's cigar string to soft clip (S, see sam-spec) away the bases.
+     * Note that this can only be applied to cases where the clipped bases occur
+     * at the start or end of a read.
+     */
+    SOFTCLIP_BASES,
+
+    /**
+     * WARNING: THIS OPTION IS STILL UNDER DEVELOPMENT AND IS NOT SUPPORTED.
+     *
+     * Change the read's cigar string to hard clip (H, see sam-spec) away the bases.
+     * Hard clipping, unlike soft clipping, actually removes bases from the read,
+     * reducing the resulting file's size but introducing an irrevesible (i.e.,
+     * lossy) operation.  Note that this can only be applied to cases where the clipped
+     * bases occur at the start or end of a read.
+     */
+    HARDCLIP_BASES,
+
+    /**
+     * Turn all soft-clipped bases into matches
+     */
+    REVERT_SOFTCLIPPED_BASES,
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/clipping/ReadClipper.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/clipping/ReadClipper.java
new file mode 100644
index 0000000..3710698
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/clipping/ReadClipper.java
@@ -0,0 +1,568 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.clipping;
+
+import com.google.java.contract.Requires;
+import htsjdk.samtools.CigarElement;
+import htsjdk.samtools.CigarOperator;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.recalibration.EventType;
+import org.broadinstitute.gatk.utils.sam.CigarUtils;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.broadinstitute.gatk.utils.sam.ReadUtils;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * A comprehensive clipping tool.
+ *
+ * General Contract:
+ *  - All clipping operations return a new read with the clipped bases requested, it never modifies the original read.
+ *  - If a read is fully clipped, return an empty GATKSAMRecord, never null.
+ *  - When hard clipping, add cigar operator H for every *reference base* removed (i.e. Matches, SoftClips and Deletions, but *not* insertions). See Hard Clipping notes for details.
+ *
+ *
+ * There are several types of clipping to use:
+ *
+ * Write N's:
+ *   Change the bases to N's in the desired region. This can be applied anywhere in the read.
+ *
+ * Write Q0's:
+ *   Change the quality of the bases in the desired region to Q0. This can be applied anywhere in the read.
+ *
+ * Write both N's and Q0's:
+ *   Same as the two independent operations, put together.
+ *
+ * Soft Clipping:
+ *   Do not change the read, just mark the reads as soft clipped in the Cigar String
+ *   and adjust the alignment start and end of the read.
+ *
+ * Hard Clipping:
+ *   Creates a new read without the hard clipped bases (and base qualities). The cigar string
+ *   will be updated with the cigar operator H for every reference base removed (i.e. Matches,
+ *   Soft clipped bases and deletions, but *not* insertions). This contract with the cigar
+ *   is necessary to allow read.getUnclippedStart() / End() to recover the original alignment
+ *   of the read (before clipping).
+ *
+ */
+public class ReadClipper {
+    final GATKSAMRecord read;
+    boolean wasClipped;
+    List<ClippingOp> ops = null;
+
+    /**
+     * Initializes a ReadClipper object.
+     *
+     * You can set up your clipping operations using the addOp method. When you're ready to
+     * generate a new read with all the clipping operations, use clipRead().
+     *
+     * Note: Use this if you want to set up multiple operations on the read using the ClippingOp
+     * class. If you just want to apply one of the typical modes of clipping, use the static
+     * clipping functions available in this class instead.
+     *
+     * @param read the read to clip
+     */
+    public ReadClipper(final GATKSAMRecord read) {
+        this.read = read;
+        this.wasClipped = false;
+    }
+
+    /**
+     * Add clipping operation to the read.
+     *
+     * You can add as many operations as necessary to this read before clipping. Beware that the
+     * order in which you add these operations matter. For example, if you hard clip the beginning
+     * of a read first then try to hard clip the end, the indices will have changed. Make sure you
+     * know what you're doing, otherwise just use the static functions below that take care of the
+     * ordering for you.
+     *
+     * Note: You only choose the clipping mode when you use clipRead()
+     *
+     * @param op a ClippingOp object describing the area you want to clip.
+     */
+    public void addOp(ClippingOp op) {
+        if (ops == null) ops = new ArrayList<ClippingOp>();
+        ops.add(op);
+    }
+
+    /**
+     * Check the list of operations set up for this read.
+     *
+     * @return a list of the operations set up for this read.
+     */
+    public List<ClippingOp> getOps() {
+        return ops;
+    }
+
+    /**
+     * Check whether or not this read has been clipped.
+     * @return true if this read has produced a clipped read, false otherwise.
+     */
+    public boolean wasClipped() {
+        return wasClipped;
+    }
+
+    /**
+     * The original read.
+     *
+     * @return  returns the read to be clipped (original)
+     */
+    public GATKSAMRecord getRead() {
+        return read;
+    }
+
+    /**
+     * Clips a read according to ops and the chosen algorithm.
+     *
+     * @param algorithm What mode of clipping do you want to apply for the stacked operations.
+     * @return the read with the clipping applied.
+     */
+    public GATKSAMRecord clipRead(ClippingRepresentation algorithm) {
+        if (ops == null)
+            return getRead();
+
+        GATKSAMRecord clippedRead = read;
+        for (ClippingOp op : getOps()) {
+            final int readLength = clippedRead.getReadLength();
+            //check if the clipped read can still be clipped in the range requested
+            if (op.start < readLength) {
+                ClippingOp fixedOperation = op;
+                if (op.stop >= readLength)
+                    fixedOperation = new ClippingOp(op.start, readLength - 1);
+
+                clippedRead = fixedOperation.apply(algorithm, clippedRead);
+            }
+        }
+        wasClipped = true;
+        ops.clear();
+        if ( clippedRead.isEmpty() )
+            return GATKSAMRecord.emptyRead(clippedRead);
+        return clippedRead;
+    }
+
+
+    /**
+     * Hard clips the left tail of a read up to (and including) refStop using reference
+     * coordinates.
+     *
+     * @param refStop the last base to be hard clipped in the left tail of the read.
+     * @return a new read, without the left tail.
+     */
+    @Requires("!read.getReadUnmappedFlag()")  // can't handle unmapped reads, as we're using reference coordinates to clip
+    private GATKSAMRecord hardClipByReferenceCoordinatesLeftTail(int refStop) {
+        return hardClipByReferenceCoordinates(-1, refStop);
+    }
+    public static GATKSAMRecord hardClipByReferenceCoordinatesLeftTail(GATKSAMRecord read, int refStop) {
+        return (new ReadClipper(read)).hardClipByReferenceCoordinates(-1, refStop);
+    }
+
+
+
+    /**
+     * Hard clips the right tail of a read starting at (and including) refStart using reference
+     * coordinates.
+     *
+     * @param refStart refStop the first base to be hard clipped in the right tail of the read.
+     * @return a new read, without the right tail.
+     */
+    @Requires("!read.getReadUnmappedFlag()")  // can't handle unmapped reads, as we're using reference coordinates to clip
+    private GATKSAMRecord hardClipByReferenceCoordinatesRightTail(int refStart) {
+        return hardClipByReferenceCoordinates(refStart, -1);
+    }
+    public static GATKSAMRecord hardClipByReferenceCoordinatesRightTail(GATKSAMRecord read, int refStart) {
+        return (new ReadClipper(read)).hardClipByReferenceCoordinates(refStart, -1);
+    }
+
+    /**
+     * Hard clips a read using read coordinates.
+     *
+     * @param start the first base to clip (inclusive)
+     * @param stop the last base to clip (inclusive)
+     * @return a new read, without the clipped bases
+     */
+    @Requires({"start >= 0 && stop <= read.getReadLength() - 1",   // start and stop have to be within the read
+               "start == 0 || stop == read.getReadLength() - 1"})  // cannot clip the middle of the read
+    private GATKSAMRecord hardClipByReadCoordinates(int start, int stop) {
+        if (read.isEmpty() || (start == 0 && stop == read.getReadLength() - 1))
+            return GATKSAMRecord.emptyRead(read);
+
+        this.addOp(new ClippingOp(start, stop));
+        return clipRead(ClippingRepresentation.HARDCLIP_BASES);
+    }
+    public static GATKSAMRecord hardClipByReadCoordinates(GATKSAMRecord read, int start, int stop) {
+        return (new ReadClipper(read)).hardClipByReadCoordinates(start, stop);
+    }
+
+
+    /**
+     * Hard clips both tails of a read.
+     *   Left tail goes from the beginning to the 'left' coordinate (inclusive)
+     *   Right tail goes from the 'right' coordinate (inclusive) until the end of the read
+     *
+     * @param left the coordinate of the last base to be clipped in the left tail (inclusive)
+     * @param right the coordinate of the first base to be clipped in the right tail (inclusive)
+     * @return a new read, without the clipped bases
+     */
+    @Requires({"left <= right",                    // tails cannot overlap
+               "left >= read.getAlignmentStart()", // coordinate has to be within the mapped read
+               "right <= read.getAlignmentEnd()"}) // coordinate has to be within the mapped read
+    private GATKSAMRecord hardClipBothEndsByReferenceCoordinates(int left, int right) {
+        if (read.isEmpty() || left == right)
+            return GATKSAMRecord.emptyRead(read);
+        GATKSAMRecord leftTailRead = hardClipByReferenceCoordinates(right, -1);
+
+        // after clipping one tail, it is possible that the consequent hard clipping of adjacent deletions
+        // make the left cut index no longer part of the read. In that case, clip the read entirely.
+        if (left > leftTailRead.getAlignmentEnd())
+            return GATKSAMRecord.emptyRead(read);
+
+        ReadClipper clipper = new ReadClipper(leftTailRead);
+        return clipper.hardClipByReferenceCoordinatesLeftTail(left);
+    }
+    public static GATKSAMRecord hardClipBothEndsByReferenceCoordinates(GATKSAMRecord read, int left, int right) {
+        return (new ReadClipper(read)).hardClipBothEndsByReferenceCoordinates(left, right);
+    }
+
+
+    /**
+     * Clips any contiguous tail (left, right or both) with base quality lower than lowQual using the desired algorithm.
+     *
+     * This function will look for low quality tails and hard clip them away. A low quality tail
+     * ends when a base has base quality greater than lowQual.
+     *
+     * @param algorithm the algorithm to use (HardClip, SoftClip, Write N's,...)
+     * @param lowQual every base quality lower than or equal to this in the tail of the read will be hard clipped
+     * @return a new read without low quality tails
+     */
+    private GATKSAMRecord clipLowQualEnds(ClippingRepresentation algorithm, byte lowQual) {
+        if (read.isEmpty())
+            return read;
+
+        final byte [] quals = read.getBaseQualities();
+        final int readLength = read.getReadLength();
+        int leftClipIndex = 0;
+        int rightClipIndex = readLength - 1;
+
+        // check how far we can clip both sides
+        while (rightClipIndex >= 0 && quals[rightClipIndex] <= lowQual) rightClipIndex--;
+        while (leftClipIndex < readLength && quals[leftClipIndex] <= lowQual) leftClipIndex++;
+
+        // if the entire read should be clipped, then return an empty read.
+        if (leftClipIndex > rightClipIndex)
+            return GATKSAMRecord.emptyRead(read);
+
+        if (rightClipIndex < readLength - 1) {
+            this.addOp(new ClippingOp(rightClipIndex + 1, readLength - 1));
+        }
+        if (leftClipIndex > 0 ) {
+            this.addOp(new ClippingOp(0, leftClipIndex - 1));
+        }
+        return this.clipRead(algorithm);
+    }
+
+    private GATKSAMRecord hardClipLowQualEnds(byte lowQual) {
+        return this.clipLowQualEnds(ClippingRepresentation.HARDCLIP_BASES, lowQual);
+    }
+    public static GATKSAMRecord hardClipLowQualEnds(GATKSAMRecord read, byte lowQual) {
+        return (new ReadClipper(read)).hardClipLowQualEnds(lowQual);
+    }
+    public static GATKSAMRecord clipLowQualEnds(GATKSAMRecord read, byte lowQual, ClippingRepresentation algorithm) {
+        return (new ReadClipper(read)).clipLowQualEnds(algorithm, lowQual);
+    }
+
+
+    /**
+     * Will hard clip every soft clipped bases in the read.
+     *
+     * @return a new read without the soft clipped bases
+     */
+    private GATKSAMRecord hardClipSoftClippedBases () {
+        if (read.isEmpty())
+            return read;
+
+        int readIndex = 0;
+        int cutLeft = -1;            // first position to hard clip (inclusive)
+        int cutRight = -1;           // first position to hard clip (inclusive)
+        boolean rightTail = false;   // trigger to stop clipping the left tail and start cutting the right tail
+
+        for (CigarElement cigarElement : read.getCigar().getCigarElements()) {
+            if (cigarElement.getOperator() == CigarOperator.SOFT_CLIP) {
+                if (rightTail) {
+                    cutRight = readIndex;
+                }
+                else {
+                    cutLeft = readIndex + cigarElement.getLength() - 1;
+                }
+            }
+            else if (cigarElement.getOperator() != CigarOperator.HARD_CLIP)
+                rightTail = true;
+
+            if (cigarElement.getOperator().consumesReadBases())
+                readIndex += cigarElement.getLength();
+        }
+
+        // It is extremely important that we cut the end first otherwise the read coordinates change.
+        if (cutRight >= 0)
+            this.addOp(new ClippingOp(cutRight, read.getReadLength() - 1));
+        if (cutLeft >= 0)
+            this.addOp(new ClippingOp(0, cutLeft));
+
+        return clipRead(ClippingRepresentation.HARDCLIP_BASES);
+    }
+    public static GATKSAMRecord hardClipSoftClippedBases (GATKSAMRecord read) {
+        return (new ReadClipper(read)).hardClipSoftClippedBases();
+    }
+
+
+    /**
+     * Hard clip the read to the variable region (from refStart to refStop)
+     *
+     * @param read     the read to be clipped
+     * @param refStart the beginning of the variant region (inclusive)
+     * @param refStop  the end of the variant region (inclusive)
+     * @return the read hard clipped to the variant region
+     */
+    public static GATKSAMRecord hardClipToRegion( final GATKSAMRecord read, final int refStart, final int refStop ) {
+        final int start = read.getAlignmentStart();
+        final int stop = read.getAlignmentEnd();
+        return hardClipToRegion(read, refStart, refStop,start,stop);
+    }
+
+    /**
+     * Hard clip the read to the variable region (from refStart to refStop) processing also the clipped bases
+     *
+     * @param read     the read to be clipped
+     * @param refStart the beginning of the variant region (inclusive)
+     * @param refStop  the end of the variant region (inclusive)
+     * @return the read hard clipped to the variant region
+     */
+    public static GATKSAMRecord hardClipToRegionIncludingClippedBases( final GATKSAMRecord read, final int refStart, final int refStop ) {
+        final int start = read.getOriginalAlignmentStart();
+        final int stop = start + CigarUtils.countRefBasesBasedOnCigar(read,0,read.getCigarLength()) - 1;
+        return hardClipToRegion(read, refStart, refStop,start,stop);
+    }
+
+    private static GATKSAMRecord hardClipToRegion( final GATKSAMRecord read, final int refStart, final int refStop, final int alignmentStart, final int alignmentStop){
+        // check if the read is contained in region
+        if (alignmentStart <= refStop && alignmentStop >= refStart) {
+            if (alignmentStart < refStart && alignmentStop > refStop)
+                return hardClipBothEndsByReferenceCoordinates(read, refStart - 1, refStop + 1);
+            else if (alignmentStart < refStart)
+                return hardClipByReferenceCoordinatesLeftTail(read, refStart - 1);
+            else if (alignmentStop > refStop)
+                return hardClipByReferenceCoordinatesRightTail(read, refStop + 1);
+            return read;
+        } else
+            return GATKSAMRecord.emptyRead(read);
+
+    }
+
+    public static List<GATKSAMRecord> hardClipToRegion( final List<GATKSAMRecord> reads, final int refStart, final int refStop ) {
+        final List<GATKSAMRecord> returnList = new ArrayList<GATKSAMRecord>( reads.size() );
+        for( final GATKSAMRecord read : reads ) {
+            final GATKSAMRecord clippedRead = hardClipToRegion( read, refStart, refStop );
+            if( !clippedRead.isEmpty() ) {
+                returnList.add( clippedRead );
+            }
+        }
+        return returnList;
+    }
+
+    /**
+     * Checks if a read contains adaptor sequences. If it does, hard clips them out.
+     *
+     * Note: To see how a read is checked for adaptor sequence see ReadUtils.getAdaptorBoundary()
+     *
+     * @return a new read without adaptor sequence
+     */
+    private GATKSAMRecord hardClipAdaptorSequence () {
+        final int adaptorBoundary = ReadUtils.getAdaptorBoundary(read);
+
+        if (adaptorBoundary == ReadUtils.CANNOT_COMPUTE_ADAPTOR_BOUNDARY || !ReadUtils.isInsideRead(read, adaptorBoundary))
+            return read;
+
+        return read.getReadNegativeStrandFlag() ? hardClipByReferenceCoordinatesLeftTail(adaptorBoundary) : hardClipByReferenceCoordinatesRightTail(adaptorBoundary);
+    }
+    public static GATKSAMRecord hardClipAdaptorSequence (GATKSAMRecord read) {
+        return (new ReadClipper(read)).hardClipAdaptorSequence();
+    }
+
+
+    /**
+     * Hard clips any leading insertions in the read. Only looks at the beginning of the read, not the end.
+     *
+     * @return a new read without leading insertions
+     */
+    private GATKSAMRecord hardClipLeadingInsertions() {
+        if (read.isEmpty())
+            return read;
+
+        for(CigarElement cigarElement : read.getCigar().getCigarElements()) {
+            if (cigarElement.getOperator() != CigarOperator.HARD_CLIP && cigarElement.getOperator() != CigarOperator.SOFT_CLIP &&
+                cigarElement.getOperator() != CigarOperator.INSERTION)
+                break;
+
+            else if (cigarElement.getOperator() == CigarOperator.INSERTION)
+                this.addOp(new ClippingOp(0, cigarElement.getLength() - 1));
+
+        }
+        return clipRead(ClippingRepresentation.HARDCLIP_BASES);
+    }
+    public static GATKSAMRecord hardClipLeadingInsertions(GATKSAMRecord read) {
+        return (new ReadClipper(read)).hardClipLeadingInsertions();
+    }
+
+
+    /**
+     * Turns soft clipped bases into matches
+     * @return a new read with every soft clip turned into a match
+     */
+    private GATKSAMRecord revertSoftClippedBases() {
+        if (read.isEmpty())
+            return read;
+
+        this.addOp(new ClippingOp(0, 0));
+        return this.clipRead(ClippingRepresentation.REVERT_SOFTCLIPPED_BASES);
+    }
+
+    /**
+     * Reverts ALL soft-clipped bases
+     *
+     * @param read the read
+     * @return the read with all soft-clipped bases turned into matches
+     */
+    public static GATKSAMRecord revertSoftClippedBases(GATKSAMRecord read) {
+        return (new ReadClipper(read)).revertSoftClippedBases();
+    }
+
+    /**
+     * Reverts only soft clipped bases with quality score greater than or equal to minQual
+     *
+     * todo -- Note: Will write a temporary field with the number of soft clips that were undone on each side (left: 'SL', right: 'SR') -- THIS HAS BEEN REMOVED TEMPORARILY SHOULD HAPPEN INSIDE THE CLIPPING ROUTINE!
+     *
+     * @param read    the read
+     * @param minQual the mininum base quality score to revert the base (inclusive)
+     * @return a new read with high quality soft clips reverted
+     */
+    public static GATKSAMRecord revertSoftClippedBases(GATKSAMRecord read, byte minQual) {
+        return revertSoftClippedBases(hardClipLowQualitySoftClips(read, minQual));
+    }
+
+    /**
+     * Hard clips away soft clipped bases that are below the given quality threshold
+     *
+     * @param read    the read
+     * @param minQual the mininum base quality score to revert the base (inclusive)
+     * @return a new read without low quality soft clipped bases
+     */
+    public static GATKSAMRecord hardClipLowQualitySoftClips(GATKSAMRecord read, byte minQual) {
+        int nLeadingSoftClips = read.getAlignmentStart() - read.getSoftStart();
+        if (read.isEmpty() || nLeadingSoftClips > read.getReadLength())
+            return GATKSAMRecord.emptyRead(read);
+
+        byte [] quals = read.getBaseQualities(EventType.BASE_SUBSTITUTION);
+        int left = -1;
+
+        if (nLeadingSoftClips > 0) {
+            for (int i = nLeadingSoftClips - 1; i >= 0; i--) {
+                if (quals[i] >= minQual)
+                    left = i;
+                else
+                    break;
+            }
+        }
+
+        int right = -1;
+        int nTailingSoftClips = read.getSoftEnd() - read.getAlignmentEnd();
+        if (nTailingSoftClips > 0) {
+            for (int i = read.getReadLength() - nTailingSoftClips; i < read.getReadLength() ; i++) {
+                if (quals[i] >= minQual)
+                    right = i;
+                else
+                    break;
+            }
+        }
+
+        GATKSAMRecord clippedRead = read;
+        if (right >= 0 && right + 1 < clippedRead.getReadLength())                                                      // only clip if there are softclipped bases (right >= 0) and the first high quality soft clip is not the last base (right+1 < readlength)
+                clippedRead = hardClipByReadCoordinates(clippedRead, right+1, clippedRead.getReadLength()-1);           // first we hard clip the low quality soft clips on the right tail
+        if (left >= 0 && left - 1 > 0)                                                                                  // only clip if there are softclipped bases (left >= 0) and the first high quality soft clip is not the last base (left-1 > 0)
+                clippedRead = hardClipByReadCoordinates(clippedRead, 0, left-1);                                        // then we hard clip the low quality soft clips on the left tail
+
+        return clippedRead;
+    }
+
+    /**
+     * Generic functionality to hard clip a read, used internally by hardClipByReferenceCoordinatesLeftTail
+     * and hardClipByReferenceCoordinatesRightTail. Should not be used directly.
+     *
+     * Note, it REQUIRES you to give the directionality of your hard clip (i.e. whether you're clipping the
+     * left of right tail) by specifying either refStart < 0 or refStop < 0.
+     *
+     * @param refStart  first base to clip (inclusive)
+     * @param refStop last base to clip (inclusive)
+     * @return a new read, without the clipped bases
+     */
+    @Requires({"!read.getReadUnmappedFlag()", "refStart < 0 || refStop < 0"})  // can't handle unmapped reads, as we're using reference coordinates to clip
+    protected GATKSAMRecord hardClipByReferenceCoordinates(int refStart, int refStop) {
+        if (read.isEmpty())
+            return read;
+
+        int start;
+        int stop;
+
+        // Determine the read coordinate to start and stop hard clipping
+        if (refStart < 0) {
+            if (refStop < 0)
+                throw new ReviewedGATKException("Only one of refStart or refStop must be < 0, not both (" + refStart + ", " + refStop + ")");
+            start = 0;
+            stop = ReadUtils.getReadCoordinateForReferenceCoordinate(read, refStop, ReadUtils.ClippingTail.LEFT_TAIL);
+        }
+        else {
+            if (refStop >= 0)
+                throw new ReviewedGATKException("Either refStart or refStop must be < 0 (" + refStart + ", " + refStop + ")");
+            start = ReadUtils.getReadCoordinateForReferenceCoordinate(read, refStart, ReadUtils.ClippingTail.RIGHT_TAIL);
+            stop = read.getReadLength() - 1;
+        }
+
+        if (start < 0 || stop > read.getReadLength() - 1)
+            throw new ReviewedGATKException("Trying to clip before the start or after the end of a read");
+
+        if ( start > stop )
+            throw new ReviewedGATKException(String.format("START (%d) > (%d) STOP -- this should never happen, please check read: %s (CIGAR: %s)", start, stop, read, read.getCigarString()));
+
+        if ( start > 0 && stop < read.getReadLength() - 1)
+            throw new ReviewedGATKException(String.format("Trying to clip the middle of the read: start %d, stop %d, cigar: %s", start, stop, read.getCigarString()));
+
+        this.addOp(new ClippingOp(start, stop));
+        GATKSAMRecord clippedRead = clipRead(ClippingRepresentation.HARDCLIP_BASES);
+        this.ops = null;
+        return clippedRead;
+    }
+
+
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/beagle/BeagleCodec.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/beagle/BeagleCodec.java
new file mode 100644
index 0000000..8eaa316
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/beagle/BeagleCodec.java
@@ -0,0 +1,287 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.codecs.beagle;
+/*
+ * Copyright (c) 2010 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following
+ * conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+ * THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+
+import htsjdk.tribble.AsciiFeatureCodec;
+import htsjdk.tribble.exception.CodecLineParsingException;
+import htsjdk.tribble.readers.LineIterator;
+import org.broadinstitute.gatk.utils.refdata.ReferenceDependentFeatureCodec;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+
+import java.io.IOException;
+import java.util.*;
+import java.util.regex.Pattern;
+
+/**
+ * Codec for Beagle imputation engine
+ *
+ * <p>
+ * Reads in tabular files with site markers and genotype posteriors, genotypes and phasing that Beagle produced
+ * </p>
+ *
+ * <p>
+ * See also: @see <a href="http://faculty.washington.edu/browning/beagle/beagle.html">BEAGLE home page</a><br>
+ * </p>
+
+ * </p>
+ *
+ * <h2>File format example for phased genotypes file</h2>
+ * <pre>
+ *     dummy header
+ *      20:60251 T T T T T T
+ *      20:60321 G G G G G G
+ *      20:60467 G G G G G G
+ * </pre>
+ *
+ * <h2>File format example for genotype posteriors</h2>
+ * <pre>
+ *     marker alleleA alleleB NA07056 NA07056 NA07056
+ *     20:60251 T C 0.9962 0.0038 0 0.99245 0.00755 0 0.99245 0.00755 0
+ *     20:60321 G T 0.98747 0.01253 0 0.99922 0.00078 0 0.99368 0.00632 0
+ *     20:60467 G C 0.97475 0.02525 0 0.98718 0.01282 0 0.98718 0.01282 0
+ * </pre>
+ *
+ * <h2>File format example for r2 file
+ * <pre>
+ *      20:60251        0.747
+ *      20:60321        0.763
+ *      20:60467        0.524
+ * </pre>
+ * </h2>
+ * @author Mark DePristo
+ * @since 2010
+ */
+public class BeagleCodec extends AsciiFeatureCodec<BeagleFeature> implements ReferenceDependentFeatureCodec {
+    private String[] header;
+    public enum BeagleReaderType {PROBLIKELIHOOD, GENOTYPES, R2};
+    private BeagleReaderType readerType;
+    private int valuesPerSample;
+    private int initialSampleIndex;
+    private int markerPosition;
+    private ArrayList<String> sampleNames;
+    private int expectedTokensPerLine;
+    private final static Set<String> HEADER_IDs = new HashSet<String>(Arrays.asList("marker", "I"));
+
+    private static final String delimiterRegex = "\\s+";
+    // codec file extension
+    protected static final String FILE_EXT = "beagle";
+
+    /**
+     * The parser to use when resolving genome-wide locations.
+     */
+    private GenomeLocParser genomeLocParser;
+
+    public BeagleCodec() {
+        super(BeagleFeature.class);
+    }
+
+    /**
+     * Set the parser to use when resolving genetic data.
+     * @param genomeLocParser The supplied parser.
+     */
+    public void setGenomeLocParser(GenomeLocParser genomeLocParser) {
+        this.genomeLocParser =  genomeLocParser;
+    }
+
+    @Override
+    public Object readActualHeader(LineIterator reader) {
+        int[] lineCounter = new int[1];
+        try {
+            header = readHeader(reader, lineCounter);
+
+            Boolean getSamples = true;
+            markerPosition = 0; //default value for all readers
+
+            if (header[0].matches("I")) {
+                // Phased genotype Beagle files start with "I"
+                readerType = BeagleReaderType.GENOTYPES;
+                valuesPerSample = 2;
+                initialSampleIndex = 2;
+                markerPosition = 1;
+            }
+            else if (header[0].matches("marker")) {
+                readerType = BeagleReaderType.PROBLIKELIHOOD;
+                valuesPerSample = 3;
+                initialSampleIndex = 3;
+            }
+            else {
+                readerType = BeagleReaderType.R2;
+                getSamples = false;
+                // signal we don't have a header
+                lineCounter[0] = 0;
+                // not needed, but for consistency:
+                valuesPerSample = 0;
+                initialSampleIndex = 0;
+            }
+
+            sampleNames = new ArrayList<String>();
+
+            if (getSamples) {
+                for (int k = initialSampleIndex; k < header.length; k += valuesPerSample)
+                    sampleNames.add(header[k]);
+
+                expectedTokensPerLine = sampleNames.size()*valuesPerSample+initialSampleIndex;
+
+            } else {
+                expectedTokensPerLine = 2;
+            }
+
+
+        } catch(IOException e) {
+            throw new IllegalArgumentException("Unable to read from file.", e);
+        }
+        return header;
+    }
+
+    private static String[] readHeader(final LineIterator source, int[] lineCounter) throws IOException {
+
+        String[] header = null;
+        int numLines = 0;
+
+        //find the 1st line that's non-empty and not a comment
+        while(source.hasNext()) {
+            final String line = source.next();
+            numLines++;
+            if ( line.trim().isEmpty() ) {
+                continue;
+            }
+
+            //parse the header
+            header = line.split(delimiterRegex);
+            break;
+        }
+
+        // check that we found the header
+        if ( header == null ) {
+            throw new IllegalArgumentException("No header in " + source);
+        }
+
+        if(lineCounter != null) {
+            lineCounter[0] = numLines;
+        }
+
+        return header;
+    }
+
+    private static Pattern MARKER_PATTERN = Pattern.compile("(.+):([0-9]+)");
+
+    @Override
+    public BeagleFeature decode(String line) {
+        String[] tokens;
+
+        // split the line
+        tokens = line.split(delimiterRegex);
+        if (tokens.length != expectedTokensPerLine)
+            throw new CodecLineParsingException("Incorrect number of fields in Beagle input on line "+line);
+
+        if ( HEADER_IDs.contains(tokens[0]) )
+            return null;
+
+        BeagleFeature bglFeature = new BeagleFeature();
+
+        final GenomeLoc loc = genomeLocParser.parseGenomeLoc(tokens[markerPosition]); //GenomeLocParser.parseGenomeLoc(values.get(0)); - TODO switch to this
+
+        //parse the location: common to all readers
+        bglFeature.setChr(loc.getContig());
+        bglFeature.setStart((int) loc.getStart());
+        bglFeature.setEnd((int) loc.getStop());
+
+        // Parse R2 if needed
+        if (readerType == BeagleReaderType.R2) {
+            bglFeature.setR2value(Double.valueOf(tokens[1]));
+        }
+        else if (readerType == BeagleReaderType.GENOTYPES) {
+            // read phased Genotype pairs
+            HashMap<String, ArrayList<String>> sampleGenotypes = new HashMap<String, ArrayList<String>>();
+
+            for ( int i = 2; i < tokens.length; i+=2 ) {
+                String sampleName = sampleNames.get(i/2-1);
+                if ( ! sampleGenotypes.containsKey(sampleName) ) {
+                    sampleGenotypes.put(sampleName, new ArrayList<String>());
+                }
+
+                sampleGenotypes.get(sampleName).add(tokens[i]);
+                sampleGenotypes.get(sampleName).add(tokens[i+1]);
+            }
+
+            bglFeature.setGenotypes(sampleGenotypes);
+        }
+        else {
+            // read probabilities/likelihood trios and alleles
+            bglFeature.setAlleleA(tokens[1], true);
+            bglFeature.setAlleleB(tokens[2], false);
+            HashMap<String, ArrayList<String>> sampleProbLikelihoods = new HashMap<String, ArrayList<String>>();
+
+            for ( int i = 3; i < tokens.length; i+=3 ) {
+                String sampleName = sampleNames.get(i/3-1);
+                if ( ! sampleProbLikelihoods.containsKey(sampleName) ) {
+                    sampleProbLikelihoods.put(sampleName, new ArrayList<String>());
+                }
+
+                sampleProbLikelihoods.get(sampleName).add(tokens[i]);
+                sampleProbLikelihoods.get(sampleName).add(tokens[i+1]);
+                sampleProbLikelihoods.get(sampleName).add(tokens[i+2]);
+            }
+            bglFeature.setProbLikelihoods(sampleProbLikelihoods);
+        }
+
+        return bglFeature;
+    }
+
+    /**
+     * Can the file be decoded?
+     * @param path path the file to test for parsability with this codec
+     * @return true if the path has the correct file extension, false otherwise
+     */
+    @Override
+    public boolean canDecode(final String path) { return path.endsWith("." + FILE_EXT); }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/beagle/BeagleFeature.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/beagle/BeagleFeature.java
new file mode 100644
index 0000000..3990836
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/beagle/BeagleFeature.java
@@ -0,0 +1,118 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.codecs.beagle;
+
+import htsjdk.tribble.Feature;
+import htsjdk.variant.variantcontext.Allele;
+
+import java.util.ArrayList;
+import java.util.Map;
+
+public class BeagleFeature implements Feature {
+
+    private String chr;
+    private int start;
+    private int end;
+
+    Map<String, ArrayList<String>> sampleGenotypes;
+    private Double r2Value;
+    Map<String, ArrayList<String>> probLikelihoods;
+
+    Allele AlleleA;
+    Allele AlleleB;
+
+    @Override
+    public String getChr() {
+        return getContig();
+    }
+
+    @Override
+    public String getContig() {
+        return chr;
+    }
+
+    @Override
+    public int getStart() {
+        return start;
+    }
+
+    @Override
+    public int getEnd() {
+        return end;
+    }
+
+    public Double getR2value() {
+        return r2Value;
+    }
+
+    public Allele getAlleleA() {
+        return AlleleA;
+    }
+
+    public Allele getAlleleB() {
+        return AlleleB;
+    }
+
+    public Map<String, ArrayList<String>> getProbLikelihoods() {
+        return probLikelihoods;
+    }
+
+    public Map<String, ArrayList<String>> getGenotypes() {
+        return sampleGenotypes;        
+    }
+
+    protected void setChr(String chr) {
+       this.chr = chr;
+    }
+
+    protected void setStart(int start) {
+        this.start = start;
+    }
+
+    protected void setEnd(int end) {
+        this.end = end;
+    }
+
+    protected void setR2value(double r2) {
+        this.r2Value = r2;
+    }
+
+    protected void setAlleleA(String a, boolean isRef) {
+        this.AlleleA = Allele.create(a, isRef);
+    }
+
+    protected void setAlleleB(String a, boolean isRef) {
+        this.AlleleB = Allele.create(a, isRef);
+    }
+
+    protected void setProbLikelihoods(Map<String, ArrayList<String>> p) {
+        this.probLikelihoods = p;
+    }
+
+    protected void setGenotypes(Map<String, ArrayList<String>> p) {
+        this.sampleGenotypes = p;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/hapmap/RawHapMapCodec.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/hapmap/RawHapMapCodec.java
new file mode 100644
index 0000000..8f49dd0
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/hapmap/RawHapMapCodec.java
@@ -0,0 +1,135 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.codecs.hapmap;
+
+import htsjdk.tribble.AsciiFeatureCodec;
+import htsjdk.tribble.FeatureCodecHeader;
+import htsjdk.tribble.annotation.Strand;
+import htsjdk.tribble.readers.LineIterator;
+
+import java.io.IOException;
+import java.util.Arrays;
+
+/**
+ * A codec for the file types produced by the HapMap consortium
+ *
+ * <p>
+ *     The format includes eleven standard fields, plus genotypes for each of the samples included
+ *     in the file:
+ *
+ * <pre>
+ *     Col1: refSNP rs# identifier at the time of release (NB might merge with another rs# in the future)
+ *     Col2: SNP alleles according to dbSNP
+ *     Col3: chromosome that SNP maps to
+ *     Col4: chromosome position of SNP, in basepairs on reference sequence
+ *     Col5: strand of reference sequence that SNP maps to
+ *     Col6: version of reference sequence assembly
+ *     Col7: HapMap genotype center that produced the genotypes
+ *     Col8: LSID for HapMap protocol used for genotyping
+ *     Col9: LSID for HapMap assay used for genotyping
+ *     Col10: LSID for panel of individuals genotyped
+ *     Col11: QC-code, currently 'QC+' for all entries (for future use)
+ *     Col12 and on: observed genotypes of samples, one per column, sample identifiers in column headers (Coriell catalog numbers, example: NA10847). Duplicate samples have .dup suffix.
+ * </pre>
+ * </p>
+ *
+ * <p>
+ *  See also: @See <a href="http://hapmap.ncbi.nlm.nih.gov/downloads/genotypes/">HapMap genotypes download</a>
+ * </p>
+ *
+ * <h2>File format example</h2>
+ * From <a href="http://hapmap.ncbi.nlm.nih.gov/downloads/genotypes/latest/forward/non-redundant/genotypes_chr1_ASW_r27_nr.b36_fwd.txt.gz">genotypes_chr1_ASW_r27_nr.b36_fwd.txt.gz</a>:
+ * <pre>
+ *     rs# alleles chrom pos strand assembly# center protLSID assayLSID panelLSID QCcode NA19625 NA19700 NA19701 NA19702 NA19703 NA19704 NA19705 NA19708 NA19712 NA19711 NA19818 NA19819 NA19828 NA19835 NA19834 NA19836 NA19902 NA19901 NA19900 NA19904 NA19919 NA19908 NA19909 NA19914 NA19915 NA19916 NA19917 NA19918 NA19921 NA20129 NA19713 NA19982 NA19983 NA19714 NA19985 NA20128 NA20126 NA20127 NA20277 NA20276 NA20279 NA20282 NA20281 NA20284 NA20287 NA20288 NA20290 NA20289 NA20291 NA20292 NA2 [...]
+ *     rs9629043 C/T chr1 554636 + ncbi_b36 broad urn:LSID:affymetrix.hapmap.org:Protocol:GenomeWideSNP_6.0:3 urn:LSID:broad.hapmap.org:Assay:SNP_A-8575115:3 urn:lsid:dcc.hapmap.org:Panel:US_African-30-trios:3 QC+ CC CC CC CC CC CC CC CC CC CC CC CC NN CC CC CC CT CT CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CT CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC CC
+ *     rs28446478 G/T chr1 576058 + ncbi_b36 sanger urn:LSID:illumina.hapmap.org:Protocol:Human_1M_BeadChip:3 urn:LSID:sanger.hapmap.org:Assay:H1Mrs28446478:3 urn:lsid:dcc.hapmap.org:Panel:US_African-30-trios:3 QC+ GT TT GT TT TT TT TT GT GT TT TT TT TT GT GT GT GT TT GT TT GT GT TT GT GT TT TT TT GT GT TT TT TT GT TT GT TT GT GT GT GT GT TT GT TT TT GT GT TT TT TT TT TT TT GT GT GT GT TT TT TT TT GT TT GT TT TT GT TT TT TT GT TT TT TT GT GT TT GT TT GT TT TT
+ *     rs12565286 C/G chr1 711153 + ncbi_b36 broad urn:LSID:affymetrix.hapmap.org:Protocol:GenomeWideSNP_6.0:3 urn:LSID:broad.hapmap.org:Assay:SNP_A-8709646:3 urn:lsid:dcc.hapmap.org:Panel:US_African-30-trios:3 QC+ GG GG GG GG GG GG GG GG CG GG GG GG GG GG GG GG GG GG GG CG GG GG GG GG GG GG GG GG GG GG GG GG GG GG GG GG GG GG GG GG CG GG GG GG GG GG GG GG CG CG GG GG GG GG GG GG GG GG GG CG CG GG GG GG GG GG GG GG GG GG GG CG NN GG GG GG GG GG GG NN GG NN NN
+ * </pre>
+ *
+ * @author Mark DePristo
+ * @since 2010
+ */
+public class RawHapMapCodec extends AsciiFeatureCodec<RawHapMapFeature> {
+    // the minimum number of features in the HapMap file line
+    private static final int minimumFeatureCount = 11;
+
+    private String headerLine;
+    // codec file extension
+    protected static final String FILE_EXT = "hapmap";
+
+    public RawHapMapCodec() {
+        super(RawHapMapFeature.class);
+    }
+
+    /**
+     * decode the hapmap record
+     * @param line the input line to decode
+     * @return a HapMapFeature, with the given fields 
+     */
+    public RawHapMapFeature decode(String line) {
+        String[] array = line.split("\\s+");
+
+        // make sure the split was successful - that we got an appropriate number of fields
+        if (array.length < minimumFeatureCount)
+            throw new IllegalArgumentException("Unable to parse line " + line + ", the length of split features is less than the minimum of " + minimumFeatureCount);
+
+        // create a new feature given the array
+        return new RawHapMapFeature(array[0],
+                array[1].split("/"),
+                array[2],
+                Long.valueOf(array[3]),
+                Strand.toStrand(array[4]),
+                array[5],
+                array[6],
+                array[7],
+                array[8],
+                array[9],
+                array[10],
+                Arrays.copyOfRange(array,11,array.length),
+                headerLine);
+    }
+
+    /**
+     * Can the file be decoded?
+     * @param path path the file to test for parsability with this codec
+     * @return true if the path has the correct file extension, false otherwise
+     */
+    @Override
+    public boolean canDecode(final String path) { return path.endsWith("." + FILE_EXT); }
+
+    @Override
+    public Object readActualHeader(final LineIterator lineIterator) {
+        this.headerLine = lineIterator.next();
+        return headerLine;
+    }
+
+    @Override
+    public FeatureCodecHeader readHeader(final LineIterator lineIterator) throws IOException {
+        final String header = (String) readActualHeader(lineIterator);
+        // TODO: This approach may cause issues with files formatted with \r\n-style line-endings.
+        return new FeatureCodecHeader(header, header.length() + 1);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/hapmap/RawHapMapFeature.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/hapmap/RawHapMapFeature.java
new file mode 100644
index 0000000..2dc23fc
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/hapmap/RawHapMapFeature.java
@@ -0,0 +1,204 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.codecs.hapmap;
+
+import htsjdk.tribble.Feature;
+import htsjdk.tribble.annotation.Strand;
+import htsjdk.variant.variantcontext.Allele;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * a feature returned by the HapMap Codec - it represents contig, position, name,
+ * alleles, other hapmap information, and genotypes for specified samples
+ */
+public class RawHapMapFeature implements Feature {
+
+    public static final String NULL_ALLELE_STRING = "-";
+    public static final String INSERTION = "I";
+    public static final String DELETION = "D";
+
+    // the variables we store internally in the class
+    private final String name;
+    private final String[] alleles;
+    private Map<String, Allele> actualAlleles = null;
+    private final String contig ;
+    private long position;
+    private final Strand strand;
+    private final String assembly;
+    private final String center;
+    private final String protLSID;
+    private final String assayLSID;
+    private final String panelLSID;
+    private final String qccode;
+    private final String[] genotypes;
+
+    // we store the header line, if they'd like to get the samples
+    private final String headerLine;
+
+    /**
+     * create a HapMap Feature, based on all the records available in the hapmap file
+     * @param contig the contig name
+     * @param position the position
+     * @param strand the strand enum
+     * @param assembly what assembly this feature is from
+     * @param center the center that provided this SNP
+     * @param protLSID ??
+     * @param assayLSID ??
+     * @param panelLSID ??
+     * @param qccode ??
+     * @param genotypes a list of strings, representing the genotypes for the list of samples
+     */
+    public RawHapMapFeature(String name,
+                            String[] alleles,
+                            String contig,
+                            Long position,
+                            Strand strand,
+                            String assembly,
+                            String center,
+                            String protLSID,
+                            String assayLSID,
+                            String panelLSID,
+                            String qccode,
+                            String[] genotypes,
+                            String headerLine) {
+        this.name = name;
+        this.alleles = alleles;
+        this.contig = contig;
+        this.position = position;
+        this.strand = strand;
+        this.assembly =  assembly;
+        this.center =  center;
+        this.protLSID = protLSID ;
+        this.assayLSID = assayLSID ;
+        this.panelLSID = panelLSID ;
+        this.qccode = qccode;
+        this.genotypes = genotypes;
+        this.headerLine = headerLine;
+    }
+
+    /**
+     * get the contig value
+     * @return a string representing the contig
+     */
+    public String getChr() {
+        return getContig();
+    }
+
+    /**
+     * get the contig value
+     * @return a string representing the contig
+     */
+    public String getContig() {
+        return contig;
+    }
+
+    /**
+     * get the start position, as an integer
+     * @return an int, representing the start position
+     */
+    public int getStart() {
+        return (int)position;
+    }
+
+    /**
+     * get the end position
+     * @return get the end position as an int
+     */
+    public int getEnd() {
+        return (int)position;
+    }
+
+    /**
+     * Getter methods
+     */
+
+    public Strand getStrand() {
+        return strand;
+    }
+
+    public String getAssembly() {
+        return assembly;
+    }
+
+    public String getCenter() {
+        return center;
+    }
+
+    public String getProtLSID() {
+        return protLSID;
+    }
+
+    public String getAssayLSID() {
+        return assayLSID;
+    }
+
+    public String getPanelLSID() {
+        return panelLSID;
+    }
+
+    public String getQCCode() {
+        return qccode;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public String[] getAlleles() {
+        return alleles;
+    }
+
+    public String[] getGenotypes() {
+        return genotypes;
+    }
+
+    // This is necessary because HapMap places insertions in the incorrect position
+    public void updatePosition(long position) {
+        this.position = position;
+    }
+
+    public void setActualAlleles(Map<String, Allele> alleleMap) {
+        actualAlleles = new HashMap<String, Allele>(alleleMap);
+    }
+
+    public Map<String, Allele> getActualAlleles() {
+        return actualAlleles;
+    }
+    
+    /**
+     * get a list of the samples from the header (in order)
+     * @return a string array of sample names
+     */
+    public String[] getSampleIDs() {
+		String[] header = headerLine.split("\\s+");
+		String[] sample_ids = new String[header.length-11];
+		for (int i = 11; i < header.length; i++)
+			sample_ids[i-11] = header[i];
+		return sample_ids;
+	}
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/refseq/RefSeqCodec.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/refseq/RefSeqCodec.java
new file mode 100644
index 0000000..4a2d15c
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/refseq/RefSeqCodec.java
@@ -0,0 +1,181 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.codecs.refseq;
+
+import htsjdk.tribble.AsciiFeatureCodec;
+import htsjdk.tribble.Feature;
+import htsjdk.tribble.TribbleException;
+import htsjdk.tribble.readers.LineIterator;
+import org.broadinstitute.gatk.utils.refdata.ReferenceDependentFeatureCodec;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+import java.util.ArrayList;
+
+/**
+ * Allows for reading in RefSeq information
+ *
+ * <p>
+ * Parses a sorted UCSC RefSeq file (see below) into relevant features: the gene name, the unique gene name (if multiple transcrips get separate entries), exons, gene start/stop, coding start/stop,
+ * strandedness of transcription. 
+ * </p>
+ *
+ * <p>
+ * Instructions for generating a RefSeq file for use with the RefSeq codec can be found on the documentation guide here
+ * <a href="http://www.broadinstitute.org/gatk/guide/article?id=1329">http://www.broadinstitute.org/gatk/guide/article?id=1329</a>
+ * </p>
+ * <h2> Usage </h2>
+ * The RefSeq Rod can be bound as any other rod, and is specified by REFSEQ, for example
+ * <pre>
+ * -refSeqBinding:REFSEQ /path/to/refSeq.txt
+ * </pre>
+ *
+ * You will need to consult individual walkers for the binding name ("refSeqBinding", above)
+ *
+ * <h2>File format example</h2>
+ * If you want to define your own file for use, the format is (tab delimited):
+ * bin, name, chrom, strand, transcription start, transcription end, coding start, coding end, num exons, exon starts, exon ends, id, alt. name, coding start status (complete/incomplete), coding end status (complete,incomplete)
+ * and exon frames, for example:
+ * <pre>
+ * 76 NM_001011874 1 - 3204562 3661579 3206102 3661429 3 3204562,3411782,3660632, 3207049,3411982,3661579, 0 Xkr4 cmpl cmpl 1,2,0,
+ * </pre>
+ * for more information see <a href="http://skip.ucsc.edu/cgi-bin/hgTables?hgsid=5651&hgta_doSchemaDb=mm8&hgta_doSchemaTable=refGene">here</a>
+ * <p>
+ *     
+ * </p>
+ *
+ * @author Mark DePristo
+ * @since 2010
+ */
+public class RefSeqCodec extends AsciiFeatureCodec<RefSeqFeature> implements ReferenceDependentFeatureCodec {
+
+    // codec file extension
+    protected static final String FILE_EXT = "refseq";
+    /**
+     * The parser to use when resolving genome-wide locations.
+     */
+    private GenomeLocParser genomeLocParser;
+    private boolean zero_coding_length_user_warned = false;
+
+    public RefSeqCodec() {
+        super(RefSeqFeature.class);
+    }
+
+    /**
+     * Set the parser to use when resolving genetic data.
+     * @param genomeLocParser The supplied parser.
+     */
+    @Override
+    public void setGenomeLocParser(GenomeLocParser genomeLocParser) {
+        this.genomeLocParser =  genomeLocParser;
+    }
+
+    @Override
+    public Feature decodeLoc(final LineIterator lineIterator) {
+        final String line = lineIterator.next();
+        if (line.startsWith("#")) return null;
+        String fields[] = line.split("\t");
+        if (fields.length < 3) throw new TribbleException("RefSeq (decodeLoc) : Unable to parse line -> " + line + ", we expected at least 3 columns, we saw " + fields.length);
+        String contig_name = fields[2];
+        try {
+            return new RefSeqFeature(genomeLocParser.createGenomeLoc(contig_name, Integer.parseInt(fields[4])+1, Integer.parseInt(fields[5])));
+        } catch ( UserException.MalformedGenomeLoc e ) {
+            Utils.warnUser("RefSeq file is potentially incorrect, as some transcripts or exons have a negative length ("+fields[2]+")");
+            return null;
+        } catch ( NumberFormatException e ) {
+            throw new UserException.MalformedFile("Could not parse location from line: " + line);
+        }
+    }
+
+    /** Fills this object from a text line in RefSeq (UCSC) text dump file */
+    @Override
+    public RefSeqFeature decode(String line) {
+        if (line.startsWith("#")) return null;
+        String fields[] = line.split("\t");
+
+        // we reference postion 15 in the split array below, make sure we have at least that many columns
+        if (fields.length < 16) throw new TribbleException("RefSeq (decode) : Unable to parse line -> " + line + ", we expected at least 16 columns, we saw " + fields.length);
+        String contig_name = fields[2];
+        RefSeqFeature feature = new RefSeqFeature(genomeLocParser.createGenomeLoc(contig_name, Integer.parseInt(fields[4])+1, Integer.parseInt(fields[5])));
+
+        feature.setTranscript_id(fields[1]);
+        if ( fields[3].length()==1 && fields[3].charAt(0)=='+') feature.setStrand(1);
+        else if ( fields[3].length()==1 && fields[3].charAt(0)=='-') feature.setStrand(-1);
+        else throw new UserException.MalformedFile("Expected strand symbol (+/-), found: "+fields[3] + " for line=" + line);
+
+        int coding_start = Integer.parseInt(fields[6])+1;
+        int coding_stop = Integer.parseInt(fields[7]);
+
+        if ( coding_start > coding_stop ) {
+            if ( ! zero_coding_length_user_warned ) {
+                Utils.warnUser("RefSeq file contains transcripts with zero coding length. "+
+                        "Such transcripts will be ignored (this warning is printed only once)");
+                zero_coding_length_user_warned = true;
+            }
+            return null;
+        }
+
+        feature.setTranscript_interval(genomeLocParser.createGenomeLoc(contig_name, Integer.parseInt(fields[4])+1, Integer.parseInt(fields[5])));
+        feature.setTranscript_coding_interval(genomeLocParser.createGenomeLoc(contig_name, coding_start, coding_stop));
+        feature.setGene_name(fields[12]);
+        String[] exon_starts = fields[9].split(",");
+        String[] exon_stops = fields[10].split(",");
+        String[] eframes = fields[15].split(",");
+
+        if ( exon_starts.length != exon_stops.length )
+            throw new UserException.MalformedFile("Data format error: numbers of exon start and stop positions differ for line=" + line);
+        if ( exon_starts.length != eframes.length )
+            throw new UserException.MalformedFile("Data format error: numbers of exons and exon frameshifts differ for line=" + line);
+
+        ArrayList<GenomeLoc> exons = new ArrayList<GenomeLoc>(exon_starts.length);
+        ArrayList<Integer> exon_frames = new ArrayList<Integer>(eframes.length);
+
+        for ( int i = 0 ; i < exon_starts.length  ; i++ ) {
+            exons.add(genomeLocParser.createGenomeLoc(contig_name, Integer.parseInt(exon_starts[i])+1, Integer.parseInt(exon_stops[i]) ) );
+            exon_frames.add(Integer.decode(eframes[i]));
+        }
+
+        feature.setExons(exons);
+        feature.setExon_frames(exon_frames);
+        return feature;
+    }
+
+    /**
+     * Can the file be decoded?
+     * @param path path the file to test for parsability with this codec
+     * @return true if the path has the correct file extension, false otherwise
+     */
+    @Override
+    public boolean canDecode(final String path) { return path.endsWith("." + FILE_EXT); }
+
+    @Override
+    public Object readActualHeader(LineIterator lineIterator) {
+        // No header for this format
+        return null;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/refseq/RefSeqFeature.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/refseq/RefSeqFeature.java
new file mode 100644
index 0000000..62d4b05
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/refseq/RefSeqFeature.java
@@ -0,0 +1,331 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.codecs.refseq;
+
+import htsjdk.tribble.Feature;
+import org.broadinstitute.gatk.utils.refdata.utils.GATKFeature;
+import org.broadinstitute.gatk.utils.refdata.utils.RODRecordList;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.util.*;
+
+/**
+ * the ref seq feature
+ */
+public class RefSeqFeature implements Transcript, Feature {
+
+    private String transcript_id;
+    private int strand;
+    private GenomeLoc transcript_interval;
+    private GenomeLoc transcript_coding_interval;
+    private List<GenomeLoc> exons;
+    private String gene_name;
+    private List<Integer> exon_frames;
+    private String name;
+
+    public RefSeqFeature(GenomeLoc genomeLoc) {
+        this.transcript_interval = genomeLoc;
+    }
+
+    /** Returns id of the transcript (RefSeq NM_* id) */
+    public String getTranscriptId() { return transcript_id; }
+
+    /** Returns coding strand of the transcript, 1 or -1 for positive or negative strand, respectively */
+    public int getStrand() { return strand; }
+
+    /** Returns transcript's full genomic interval (includes all exons with UTRs) */
+    public GenomeLoc getLocation() {
+        return transcript_interval;
+    }
+
+    /** Returns genomic interval of the coding sequence (does not include UTRs, but still includes introns, since it's a single interval on the DNA) */
+    public GenomeLoc getCodingLocation() { return transcript_coding_interval; }
+
+    /** Name of the gene this transcript corresponds to (NOT gene id such as Entrez etc) */
+    public String getGeneName() { return gene_name; }
+
+    /** Number of exons in this transcript */
+    public int getNumExons() { return exons.size(); }
+
+    /** Genomic location of the n-th exon; throws an exception if n is out of bounds */
+    public GenomeLoc getExonLocation(int n) {
+        if ( n >= exons.size() || n < 0 ) throw new ReviewedGATKException("Index out-of-bounds. Transcript has " + exons.size() +" exons; requested: "+n);
+        return exons.get(n);
+    }
+
+    /** Returns the list of all exons in this transcript, as genomic intervals */
+    public List<GenomeLoc> getExons() { return exons; }
+
+    /** Returns all exons falling ::entirely:: inside an interval **/
+    public List<GenomeLoc> getExonsInInterval( GenomeLoc interval ) {
+        List<GenomeLoc> relevantExons = new ArrayList<GenomeLoc>(exons.size());
+        for ( GenomeLoc exon : getExons() ) {
+            if ( interval.containsP(exon) ) {
+                relevantExons.add(exon);
+            }
+        }
+
+        return relevantExons;
+    }
+
+    /** convenience method; returns the numbers of the exons in the interval **/
+    public List<Integer> getExonNumbersInInterval( GenomeLoc interval ) {
+        List<Integer> numbers = new ArrayList<Integer>();
+        int iNo = 0;
+        for ( GenomeLoc exon : getExons() ) {
+            if ( interval.containsP(exon) ) {
+                numbers.add(iNo);
+            }
+            iNo++;
+        }
+
+        return numbers;
+    }
+
+    public String getTranscriptUniqueGeneName() {
+        return String.format("%s(%s)",getGeneName(),getTranscriptId());
+    }
+
+    public String getOverlapString(GenomeLoc position) {
+        boolean is_exon = false;
+        StringBuilder overlapString = new StringBuilder();
+        int exonNo = 1;
+
+        for ( GenomeLoc exon : exons ) {
+            if ( exon.containsP(position) ) {
+                overlapString.append(String.format("exon_%d",exonNo));
+                is_exon = true;
+                break;
+            }
+            exonNo ++;
+        }
+
+        if ( ! is_exon ) {
+            if ( overlapsCodingP(position) ) {
+                overlapString.append("Intron");
+            } else {
+                overlapString.append("UTR");
+            }
+        }
+
+        return overlapString.toString();
+    }
+
+    ArrayList<GenomeLoc> exonInRefOrderCache = null;
+
+    public Integer getSortedOverlapInteger(GenomeLoc position) {
+        int exonNo = -1;
+        ArrayList<GenomeLoc> exonsInReferenceOrder = exonInRefOrderCache != null ? exonInRefOrderCache : new ArrayList<GenomeLoc>(exons);
+        if ( exonInRefOrderCache == null ) {
+            Collections.sort(exonsInReferenceOrder);
+        }
+        exonInRefOrderCache = exonsInReferenceOrder;
+        for ( GenomeLoc exon : exonsInReferenceOrder ) {
+            if ( exon.overlapsP(position) ) {
+                return ++exonNo;
+            }
+            ++exonNo;
+        }
+
+        return -1;
+    }
+
+    public GenomeLoc getSortedExonLoc(int offset) {
+        ArrayList<GenomeLoc> exonsInReferenceOrder = exonInRefOrderCache != null ? exonInRefOrderCache : new ArrayList<GenomeLoc>(exons);
+        if ( exonInRefOrderCache == null ) {
+            Collections.sort(exonsInReferenceOrder);
+        }
+        exonInRefOrderCache = exonsInReferenceOrder;
+        return exonsInReferenceOrder.get(offset);
+    }
+
+    /** Returns true if the specified interval 'that' overlaps with the full genomic interval of this transcript */
+    public boolean overlapsP (GenomeLoc that) {
+        return getLocation().overlapsP(that);
+    }
+
+    /** Returns true if the specified interval 'that' overlaps with the coding genomic interval of this transcript.
+     * NOTE: since "coding interval" is still a single genomic interval, it will not contain UTRs of the outermost exons,
+     * but it will still contain introns and/or exons internal to this genomic locus that are not spliced into this transcript.
+     * @see #overlapsExonP
+     */
+    public boolean overlapsCodingP (GenomeLoc that) {
+        return transcript_coding_interval.overlapsP(that);
+    }
+
+    /** Returns true if the specified interval 'that' overlaps with any of the exons actually spliced into this transcript */
+    public boolean overlapsExonP (GenomeLoc that) {
+        for ( GenomeLoc e : exons ) {
+            if ( e.overlapsP(that) ) return true;
+        }
+        return false;
+    }
+    public String toString() {
+            StringBuilder b = new StringBuilder("000\t"); // first field is unused but required in th ecurrent format; just set to something
+            b.append(transcript_id);   // #1
+            b.append('\t');
+            b.append(getLocation().getContig()); // #2
+            b.append('\t');
+            b.append( (strand==1?'+':'-') ); // #3
+            b.append('\t');
+            b.append( (getLocation().getStart() - 1) ); // #4
+            b.append('\t');
+            b.append( getLocation().getStop());  // #5
+            b.append('\t');
+            b.append( (transcript_coding_interval.getStart() - 1) ); // #6
+            b.append('\t');
+            b.append( transcript_coding_interval.getStop());  // #7
+            b.append('\t');
+            b.append(exons.size()); // #8
+            b.append('\t');
+            for ( GenomeLoc loc : exons ) { b.append( (loc.getStart()-1) ); b.append(','); } // #9
+            b.append('\t');
+            for ( GenomeLoc loc : exons ) { b.append( loc.getStop() ); b.append(','); } // #10
+            b.append("\t0\t"); // # 11 - unused?
+            b.append(gene_name); // # 12
+            b.append("\tcmpl\tcmpl\t"); // #13, #14 - unused?
+            for ( Integer f : exon_frames ) { b.append( f ); b.append(','); } // #15
+
+
+            return b.toString();
+        }
+
+        /** Convenience method, which is packaged here for a lack of better place; it is indeed closely related to
+         * rodRefSeq though: takes list of rods (transcripts) overlapping with a given position and determines whether
+         * this position is fully whithin an exon of <i>any</i> of those transcripts. Passing null is safe (will return false).
+         * NOTE: position can be still within a UTR, see #isCoding
+         * @return true if it's an exon
+         */
+        public static boolean isExon(RODRecordList l) {
+
+            if ( l == null ) return false;
+
+            GenomeLoc loc = l.getLocation();
+
+            for ( GATKFeature t : l ) {
+                if ( ((RefSeqFeature)t.getUnderlyingObject()).overlapsExonP(loc) ) return true;
+            }
+            return false;
+
+        }
+
+        /** Convenience method, which is packaged here for a lack of better place; it is indeed closely related to
+         * rodRefSeq though: takes list of rods (transcripts) overlapping with a given position and determines whether
+         * this position is fully whithin a coding region of <i>any</i> of those transcripts.
+         * Passing null is safe (will return false).
+         * NOTE: "coding" interval is defined as a single genomic interval, so it
+         * does not include the UTRs of the outermost exons, but it includes introns between exons spliced into a
+         * transcript, or internal exons that are not spliced into a given transcript. To check that a position is
+         * indeed within an exon but not in UTR, use #isCodingExon().
+         * @return
+         */
+        public static boolean isCoding(RODRecordList l) {
+
+            if ( l == null ) return false;
+
+            GenomeLoc loc = l.getLocation();
+
+            for ( GATKFeature t : l ) {
+                if ( ((RefSeqFeature)t.getUnderlyingObject()).overlapsCodingP(loc) ) return true;
+            }
+            return false;
+
+        }
+
+        /** Convenience method, which is packaged here for a lack of better place; it is indeed closely related to
+         * rodRefSeq though: takes list of rods (transcripts) overlapping with a given position and determines whether
+         * this position is fully whithin a coding exon portion (i.e. true coding sequence) of <i>any</i> of those transcripts.
+         * Passing null is safe (will return false). In other words, this method returns true if the list contains a transcript,
+         * for which the current position is within an exon <i>and</i> within a coding interval simultaneously.
+         * @return
+         */
+        public static boolean isCodingExon(RODRecordList l) {
+
+            if ( l == null ) return false;
+
+            GenomeLoc loc = l.getLocation();
+
+            for ( GATKFeature t : l ) {
+                if ( ((RefSeqFeature)t.getUnderlyingObject()).overlapsCodingP(loc) && ((RefSeqFeature)t.getUnderlyingObject()).overlapsExonP(loc) ) return true;
+            }
+            return false;
+
+        }
+
+
+    public void setTranscript_id(String transcript_id) {
+        this.transcript_id = transcript_id;
+    }
+
+    public void setStrand(int strand) {
+        this.strand = strand;
+    }
+
+    public void setTranscript_interval(GenomeLoc transcript_interval) {
+        this.transcript_interval = transcript_interval;
+    }
+
+    public void setTranscript_coding_interval(GenomeLoc transcript_coding_interval) {
+        this.transcript_coding_interval = transcript_coding_interval;
+    }
+
+    public void setExons(List<GenomeLoc> exons) {
+        this.exons = exons;
+    }
+
+    public void setGene_name(String gene_name) {
+        this.gene_name = gene_name;
+    }
+
+    public void setExon_frames(List<Integer> exon_frames) {
+        this.exon_frames = exon_frames;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    @Override
+    public String getChr() {
+        return getContig();
+    }
+
+    @Override
+    public String getContig() {
+        return transcript_interval.getContig();
+    }
+
+    @Override
+    public int getStart() {
+        return transcript_interval.getStart();
+    }
+
+    @Override
+    public int getEnd() {
+        return transcript_interval.getStop();
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/refseq/Transcript.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/refseq/Transcript.java
new file mode 100644
index 0000000..b5bca03
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/refseq/Transcript.java
@@ -0,0 +1,78 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.codecs.refseq;
+
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.HasGenomeLocation;
+
+import java.util.List;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: asivache
+ * Date: Sep 22, 2009
+ * Time: 5:22:30 PM
+ * To change this template use File | Settings | File Templates.
+ */
+public interface Transcript extends HasGenomeLocation {
+
+    /** Returns id of the transcript (RefSeq NM_* id) */
+    public String getTranscriptId();
+    /** Returns coding strand of the transcript, 1 or -1 for positive or negative strand, respectively */
+    public int getStrand();
+    /** Returns transcript's full genomic interval (includes all exons with UTRs) */
+    public GenomeLoc getLocation();
+    /** Returns genomic interval of the coding sequence (does not include
+     * UTRs, but still includes introns, since it's a single interval on the DNA)
+     */
+    public GenomeLoc getCodingLocation();
+    /** Name of the gene this transcript corresponds to (typically NOT gene id such as Entrez etc,
+     * but the implementation can decide otherwise)
+     */
+    public String getGeneName();
+    /** Number of exons in this transcript */
+    public int getNumExons();
+    /** Genomic location of the n-th exon; expected to throw an exception (runtime) if n is out of bounds */
+    public GenomeLoc getExonLocation(int n);
+
+    /** Returns the list of all exons in this transcript, as genomic intervals */
+    public List<GenomeLoc> getExons();
+
+    /** Returns true if the specified interval 'that' overlaps with the full genomic interval of this transcript */
+    public boolean overlapsP (GenomeLoc that);
+
+    /** Returns true if the specified interval 'that' overlaps with the coding genomic interval of this transcript.
+      * NOTE: since "coding interval" is still a single genomic interval, it will not contain UTRs of the outermost exons,
+      * but it will still contain introns and/or exons internal to this genomic locus that are not spliced into this transcript.
+      * @see #overlapsExonP
+      */
+    public boolean overlapsCodingP (GenomeLoc that);
+
+    /** Returns true if the specified interval 'that' overlaps with any of the exons actually spliced into this transcript */
+    public boolean overlapsExonP (GenomeLoc that);
+
+
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/sampileup/SAMPileupCodec.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/sampileup/SAMPileupCodec.java
new file mode 100644
index 0000000..407cf89
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/sampileup/SAMPileupCodec.java
@@ -0,0 +1,365 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.codecs.sampileup;
+
+import htsjdk.tribble.AsciiFeatureCodec;
+import htsjdk.tribble.exception.CodecLineParsingException;
+import htsjdk.tribble.readers.LineIterator;
+import htsjdk.tribble.util.ParsingUtils;
+
+import java.util.ArrayList;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import static org.broadinstitute.gatk.utils.codecs.sampileup.SAMPileupFeature.VariantType;
+
+/**
+ * Decoder for SAM pileup data.
+ *
+ * <p>
+ *     From the <a href="http://samtools.sourceforge.net/">SAMTools project documentation</a>:
+ * </p>
+ * <p>The Pileup format was first used by Tony Cox and Zemin Ning at
+ *     the Sanger Institute. It describes the base-pair information at each chromosomal position. This format
+ *     facilitates SNP/indel calling and brief alignment viewing by eye. Note that the pileup program has been replaced
+ *     in Samtools by mpileup, which produces a slightly different output format by default.
+ * </p>
+
+ * <h3>Format</h3>
+ * <p>There are two versions of the original pileup format: the current 6-column format produced by Samtools, and the old
+ * 10/13-column "consensus" format which could be obtained by using the -c argument, now deprecated. </p>
+ * <h4>Simple pileup: 6-column format</h4>
+ * <p>
+ *     Each line consists of chromosome, 1-based coordinate, reference base, the
+ *     number of reads covering the site, read bases and base qualities. At the
+ *     read base column, a dot stands for a match to the reference base on the
+ *     forward strand, a comma for a match on the reverse strand, `ACGTN' for a mismatch
+ *     on the forward strand and `acgtn' for a mismatch on the reverse strand.
+ *     A pattern `\+[0-9]+[ACGTNacgtn]+' indicates there is an insertion between
+ *     this reference position and the next reference position. The length of the
+ *     insertion is given by the integer in the pattern, followed by the inserted sequence.
+ * </p>
+ * <pre>
+ *     seq1 272 T 24  ,.$.....,,.,.,...,,,.,..^+. <<<+;<<<<<<<<<<<=<;<;7<&
+ *     seq1 273 T 23  ,.....,,.,.,...,,,.,..A <<<;<<<<<<<<<3<=<<<;<<+
+ *     seq1 274 T 23  ,.$....,,.,.,...,,,.,...    7<7;<;<<<<<<<<<=<;<;<<6
+ *     seq1 275 A 23  ,$....,,.,.,...,,,.,...^l.  <+;9*<<<<<<<<<=<<:;<<<<
+ *     seq1 276 G 22  ...T,,.,.,...,,,.,....  33;+<<7=7<<7<&<<1;<<6<
+ *     seq1 277 T 22  ....,,.,.,.C.,,,.,..G.  +7<;<<<<<<<&<=<<:;<<&<
+ *     seq1 278 G 23  ....,,.,.,...,,,.,....^k.   %38*<<;<7<<7<=<<<;<<<<<
+ *     seq1 279 C 23  A..T,,.,.,...,,,.,..... ;75&<<<<<<<<<=<<<9<<:<<
+ * </pre>
+ * <p>
+ *     See the <a href="http://samtools.sourceforge.net/pileup.shtml">Pileup format documentation</a> for more details.
+ * </p>
+ *
+ * <h4>Consensus pileup: 10/13-column format</h4>
+ * <p>The "consensus" or extended pileup consists of the following:
+ *  <ul>
+ *      <li>original 6 columns as described above</li>
+ *      <li>4 extra columns representing consensus values (consensus base, consensus quality, variant quality and maximum mapping quality of the
+ * reads covering the sites) for all sites, inserted before the bases and quality strings</li>
+ *      <li>3 extra columns indicating counts of reads supporting indels (just for indel sites)</li>
+ *  </ul>
+ * </p>
+ * <h4>Example of consensus pileup for SNP or non-variant sites</h4>
+ * <pre>
+ *     seq1  60  T  T  66  0  99  13  ...........^~.^~.   9<<55<;<<<<<<
+ *     seq1  61  G  G  72  0  99  15  .............^~.^y. (;975&;<<<<<<<<
+ *     seq1  62  T  T  72  0  99  15  .$..............    <;;,55;<<<<<<<<
+ *     seq1  63  G  G  72  0  99  15  .$.............^~.  4;2;<7:+<<<<<<<
+ *     seq1  64  G  G  69  0  99  14  ..............  9+5<;;;<<<<<<<
+ *     seq1  65  A  A  69  0  99  14  .$............. <5-2<;;<<<<<<;
+ *     seq1  66  C  C  66  0  99  13  .............   &*<;;<<<<<<8<
+ *     seq1  67  C  C  69  0  99  14  .............^~.    ,75<.4<<<<<-<<
+ *     seq1  68  C  C  69  0  99  14  ..............  576<;7<<<<<8<< *
+ * </pre>
+ *
+ * <h4>Example of consensus pileup for indels</h4>
+ * <pre>
+ *     Escherichia_coli_K12	3995037	*	*\/*	430	0	37	144	*	+A	143	1	0
+ *     Escherichia_coli_K12	3995279	*	*\/*	202	0	36	68	*	+A	67	1	0
+ *     Escherichia_coli_K12	3995281	*	*\/*	239	0	36	67	*	-CG	66	1	0
+ * </pre>
+ * <p>
+ *     See <a href="http://samtools.sourceforge.net/cns0.shtml/">Consensus pileup format (deprecated)</a> for more details.
+ * </p>
+ *
+ * <h3>Caveat</h3>
+ * <p>Handling of indels is questionable at the moment. Proceed with care.</p>
+ *
+ *
+ * @author Matt Hanna, Geraldine VdAuwera
+ * @since 2014
+ */
+public class SAMPileupCodec extends AsciiFeatureCodec<SAMPileupFeature> {
+    // number of tokens expected (6 or 10 are valid, anything else is wrong)
+    private static final int basicTokenCount = 6;
+    private static final int consensusSNPTokenCount = 10;
+    private static final int consensusIndelTokenCount = 13;
+    private static final char fldDelim = '\t';
+    // allocate once and don't ever bother creating them again:
+    private static final String baseA = "A";
+    private static final String baseC = "C";
+    private static final String baseG = "G";
+    private static final String baseT = "T";
+    private static final String emptyStr = ""; // we will use this for "reference" allele in insertions
+
+    // codec file extension
+    protected static final String FILE_EXT = "samp";
+
+    public SAMPileupCodec() {
+        super(SAMPileupFeature.class);
+    }
+
+    public SAMPileupFeature decode(String line) {
+        //+1 because we want to know if we have more than the max
+        String[] tokens = new String[consensusIndelTokenCount+1];
+
+        // split the line
+        final int count = ParsingUtils.split(line,tokens,fldDelim);
+
+        SAMPileupFeature feature = new SAMPileupFeature();
+
+        /**
+         * Tokens 0, 1, 2 are the same for both formats so they will be interpreted without differentiation.
+         * The 10/13-format has 4 tokens inserted after token 2 compared to the 6-format, plus 3 more tokens added at
+         * the end for indels. We are currently not making any use of the extra indel tokens.
+         *
+         * Any token count other than basicTokenCount, consensusSNPTokenCount or consensusIndelTokenCount is wrong.
+         */
+        final String observedString, bases, quals;
+
+        feature.setChr(tokens[0]);
+        feature.setStart(Integer.parseInt(tokens[1]));
+
+        if(tokens[2].length() != 1)  {
+            throw new CodecLineParsingException("The SAM pileup line had unexpected base " + tokens[2] + " on line = " + line);
+        }
+        feature.setRef(tokens[2].charAt(0));
+
+        switch (count) {
+            case basicTokenCount:
+                bases = tokens[4];
+                quals = tokens[5];
+                // parsing is pretty straightforward for 6-col format
+                if ( feature.getRef() == '*' ) {   // this indicates an indel -- but it shouldn't occur with vanilla 6-col format
+                    throw new CodecLineParsingException("Found an indel on line = " + line + " but it shouldn't happen in simple pileup format");
+                } else {
+                    parseBasesAndQuals(feature, bases, quals);
+                    feature.setRefBases(tokens[2].toUpperCase());
+                    feature.setEnd(feature.getStart());
+                }
+                break;
+            case consensusSNPTokenCount: // pileup called a SNP or a reference base
+                observedString = tokens[3].toUpperCase();
+                feature.setFWDAlleles(new ArrayList<String>(2));
+                feature.setConsensusConfidence(Double.parseDouble(tokens[4]));
+                feature.setVariantConfidence(Double.parseDouble(tokens[5]));
+                bases = tokens[8];
+                quals = tokens[9];
+                // confirm that we have a non-variant, not a mis-parsed indel
+                if ( feature.getRef() == '*' ) {
+                    throw new CodecLineParsingException("Line parsing of " + line + " says we have a SNP or non-variant but the ref base is '*', which indicates an indel");
+                }
+                // Parse the SNP or non-variant
+                parseBasesAndQuals(feature, bases, quals);
+                if ( observedString.length() != 1 ) {
+                    throw new CodecLineParsingException( "Line parsing of " + line + " says we have a SNP or non-variant but the genotype token is not a single letter: " + observedString);
+                }
+                feature.setRefBases(tokens[2].toUpperCase());
+                feature.setEnd(feature.getStart());
+
+                char ch = observedString.charAt(0);
+
+                switch ( ch ) {  // record alleles (decompose ambiguous base codes)
+                    case 'A': feature.getFWDAlleles().add(baseA); feature.getFWDAlleles().add(baseA); break;
+                    case 'C': feature.getFWDAlleles().add(baseC); feature.getFWDAlleles().add(baseC); break;
+                    case 'G': feature.getFWDAlleles().add(baseG); feature.getFWDAlleles().add(baseG); break;
+                    case 'T': feature.getFWDAlleles().add(baseT); feature.getFWDAlleles().add(baseT); break;
+                    case 'M': feature.getFWDAlleles().add(baseA); feature.getFWDAlleles().add(baseC); break;
+                    case 'R': feature.getFWDAlleles().add(baseA); feature.getFWDAlleles().add(baseG); break;
+                    case 'W': feature.getFWDAlleles().add(baseA); feature.getFWDAlleles().add(baseT); break;
+                    case 'S': feature.getFWDAlleles().add(baseC); feature.getFWDAlleles().add(baseG); break;
+                    case 'Y': feature.getFWDAlleles().add(baseC); feature.getFWDAlleles().add(baseT); break;
+                    case 'K': feature.getFWDAlleles().add(baseG); feature.getFWDAlleles().add(baseT); break;
+                }
+                if ( feature.getFWDAlleles().get(0).charAt(0) == feature.getRef() && feature.getFWDAlleles().get(1).charAt(0) == feature.getRef() ) feature.setVariantType(VariantType.NONE);
+                else {
+                    // 	we know that at least one allele is non-ref;
+                    // if one is ref and the other is non-ref, or if both are non ref but they are the same (i.e.
+                    // homozygous non-ref), we still have 2 allelic variants at the site (e.g. one ref and one nonref)
+                    feature.setVariantType(VariantType.SNP);
+                    if ( feature.getFWDAlleles().get(0).charAt(0) == feature.getRef() ||
+                            feature.getFWDAlleles().get(1).charAt(0) == feature.getRef() ||
+                            feature.getFWDAlleles().get(0).equals(feature.getFWDAlleles().get(1))
+                            ) feature.setNumNonRef(1);
+                    else feature.setNumNonRef(2); // if both observations differ from ref and they are not equal to one another, then we get multiallelic site...
+                }
+                break;
+            case consensusIndelTokenCount:
+                observedString = tokens[3].toUpperCase();
+                feature.setFWDAlleles(new ArrayList<String>(2));
+                feature.setConsensusConfidence(Double.parseDouble(tokens[4]));
+                feature.setVariantConfidence(Double.parseDouble(tokens[5]));
+                // confirm that we have an indel, not a mis-parsed SNP or non-variant
+                if ( feature.getRef() != '*' ) {
+                    throw new CodecLineParsingException("Line parsing of " + line + " says we have an indel but the ref base is not '*'");
+                }
+                // Parse the indel
+                parseIndels(observedString,feature) ;
+                if ( feature.isDeletion() ) feature.setEnd(feature.getStart()+feature.length()-1);
+                else feature.setEnd(feature.getStart()); // if it's not a deletion and we are biallelic, this has got to be an insertion; otherwise the state is inconsistent!!!!
+                break;
+            default:
+                throw new CodecLineParsingException("The SAM pileup line didn't have the expected number of tokens " +
+                    "(expected = " + basicTokenCount + " (basic pileup), " + consensusSNPTokenCount +
+                    " (consensus pileup for a SNP or non-variant site) or " + consensusIndelTokenCount +
+                    " (consensus pileup for an indel); saw = " + count + " on line = " + line + ")");
+        }
+        return feature;
+    }
+
+    /**
+     * Can the file be decoded?
+     * @param path path the file to test for parsability with this codec
+     * @return true if the path has the correct file extension, false otherwise
+     */
+    @Override
+    public boolean canDecode(final String path) { return path.endsWith("." + FILE_EXT); }
+
+    @Override
+    public Object readActualHeader(LineIterator lineIterator) {
+        // No header for this format
+        return null;
+    }
+
+    private void parseIndels(String genotype,SAMPileupFeature feature) {
+        String [] obs = genotype.split("/"); // get observations, now need to tinker with them a bit
+
+        // if reference allele is among the observed alleles, we will need to take special care of it since we do not have direct access to the reference;
+        // if we have an insertion, the "reference" allele is going to be empty; if it it is a deletion, we will deduce the "reference allele" bases
+        // from what we have recorded for the deletion allele (e.g. "-CAC")
+        boolean hasRefAllele = false;
+
+        for ( int i = 0 ; i < obs.length ; i++ ) {
+            if ( obs[i].length() == 1 && obs[i].charAt(0) == '*'  ) {
+                hasRefAllele = true;
+                feature.getFWDAlleles().add(emptyStr);
+                continue;
+            }
+
+            String varBases = obs[i].toUpperCase();
+
+            switch ( obs[i].charAt(0) )  {
+                case '+':
+                    if (!feature.isReference() && !feature.isInsertion()) feature.setVariantType(VariantType.INDEL);
+                    else feature.setVariantType(VariantType.INSERTION);
+                    feature.setRefBases(emptyStr);
+                    break;
+                case '-' :
+                    if (!feature.isReference() && !feature.isDeletion()) feature.setVariantType(VariantType.INDEL);
+                    else feature.setVariantType(VariantType.DELETION);
+                    feature.setRefBases(varBases); // remember what was deleted, this will be saved as "reference allele"
+                    break;
+                default: throw new CodecLineParsingException("Can not interpret observed indel allele record: "+genotype);
+            }
+            feature.getFWDAlleles().add(varBases);
+            feature.setLength(obs[i].length()-1); // inconsistent for non-biallelic indels!!
+        }
+        if ( hasRefAllele ) {
+            // we got at least one ref. allele (out of two recorded)
+            if (feature.isReference()) { // both top theories are actually ref allele;
+                feature.setNumNonRef(0); // no observations of non-reference allele at all
+                feature.setRefBases(emptyStr);
+            } else {
+                feature.setNumNonRef(1); // hasRefAllele = true, so one allele was definitely ref, hence there is only one left
+            }
+        } else {
+            // we observe two non-ref alleles; they better be the same variant, otherwise the site is not bi-allelic and at the moment we
+            // fail to set data in a consistent way.
+            if ( feature.getFWDAlleles().get(0).equals(feature.getFWDAlleles().get(1))) feature.setNumNonRef(1);
+            else feature.setNumNonRef(2);
+        }
+        // DONE with indels
+
+    }
+
+    private void parseBasesAndQuals(SAMPileupFeature feature, final String bases, final String quals)
+    {
+        //System.out.printf("%s%n%s%n", bases, quals);
+
+        // needs to convert the base string with its . and , to the ref base
+        StringBuilder baseBuilder = new StringBuilder();
+        StringBuilder qualBuilder = new StringBuilder();
+        boolean done = false;
+        for ( int i = 0, j = 0; i < bases.length() && ! done; i++ ) {
+            //System.out.printf("%d %d%n", i, j);
+            char c = (char)bases.charAt(i);
+
+            switch ( c ) {
+                case '.':   // matches reference
+                case ',':   // matches reference
+                    baseBuilder.append(feature.getRef());
+                    qualBuilder.append(quals.charAt(j++));
+                    break;
+                case '$':   // end of read
+                    break;
+                case '*':   // end of indel?
+                    j++;
+                    break;
+                case '^':   // mapping quality
+                    i++;
+                    break;
+                case '+':   // start of indel
+                case '-':   // start of indel
+                    final Pattern regex = Pattern.compile("([0-9]+).*");             // matches case 1
+                    final String rest = bases.substring(i+1);
+                    //System.out.printf("sub is %s%n", rest);
+                    Matcher match = regex.matcher(rest);
+                    if ( ! match.matches() ) {
+                        if ( feature.getRef() != '*' )
+                            throw new CodecLineParsingException("Bad pileup format: " + bases + " at position " + i);
+                        done = true;
+                    }
+                    else {
+                        String g = match.group(1);
+                        //System.out.printf("group is %d, match is %s%n", match.groupCount(), g);
+                        int l = Integer.parseInt(g);
+                        i += l + g.length();    // length of number + that many bases + +/- at the start (included in the next i++)
+                        //System.out.printf("remaining is %d => %s%n", l, bases.substring(i+1));
+                    }
+                    break;
+                default:   // non reference base
+                    baseBuilder.append(c);
+                    qualBuilder.append(quals.charAt(j++));
+            }
+        }
+
+        feature.setPileupBases(baseBuilder.toString());
+        feature.setPileupQuals(qualBuilder.toString());
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/sampileup/SAMPileupFeature.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/sampileup/SAMPileupFeature.java
new file mode 100644
index 0000000..1768c13
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/sampileup/SAMPileupFeature.java
@@ -0,0 +1,284 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.codecs.sampileup;
+
+import htsjdk.samtools.util.StringUtil;
+import htsjdk.tribble.Feature;
+
+import java.util.List;
+
+/**
+ * A tribble feature representing a SAM pileup.
+ *
+ * Allows intake of both simple (6-column) or extended/consensus (10/13-column) pileups. Simple pileup features will
+ * contain only basic information, no observed alleles or variant/genotype inferences, and so shouldn't be used as
+ * input for analysis that requires that information.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class SAMPileupFeature implements Feature {
+    public enum VariantType { NONE, SNP, INSERTION, DELETION, INDEL }; 
+
+    private String contig;            // genomic location of this genotyped site
+    private int start;
+    private int stop;
+
+    private char refBaseChar; // what we have set for the reference base (is set to a '*' for indel!)
+    private String refBases;        // the reference base sequence according to NCBI; single base for point mutations, deleted bases for  deletions, empty string for insertions
+
+    private String pileupQuals;     // the read base qualities
+    private String pileupBases;     // the read bases themselves
+
+    private List<String> observedAlleles = null;    // The sequences of the observed alleles (e.g. {"A","C"} for point mutation or {"","+CC"} for het. insertion
+    private VariantType varType = VariantType.NONE;
+    private int nNonref = 0; // number of non-reference alleles observed
+    private int eventLength = 0; // number of inserted or deleted bases    
+
+    private double consensusScore = 0;
+    private double variantScore = 0;
+
+    /**
+     * create the pileup feature.  Default protection so that only other classes in this package can create it.
+     */
+    SAMPileupFeature() {}
+
+    @Override
+    public String getChr() {
+        return getContig();
+    }
+
+    @Override
+    public String getContig() {
+        return contig;
+    }
+
+    protected void setChr(String chr) {
+        this.contig = chr;
+    }
+
+    @Override
+    public int getStart() {
+        return start;
+    }
+
+    protected void setStart(int start) {
+        this.start = start;
+    }
+
+    @Override
+    public int getEnd() {
+        return stop;
+    }
+
+    protected void setEnd(int end) {
+        this.stop = end;
+    }
+
+    public String getQualsAsString()        { return pileupQuals; }
+
+    protected void setPileupQuals(String pileupQuals) {
+        this.pileupQuals = pileupQuals;
+    }
+
+    /** Returns reference base for point genotypes or '*' for indel genotypes, as a char.
+     *
+     */
+    public char getRef()            { return refBaseChar; }
+
+    protected void setRef(char ref) {
+        this.refBaseChar = ref;
+    }
+
+    public int size()               { return pileupQuals.length(); }
+
+    /** Returns pile of observed bases over the current genomic location.
+     *
+     */
+    public String getBasesAsString()        { return pileupBases; }
+
+    protected void setPileupBases(String pileupBases) {
+        this.pileupBases = pileupBases;
+    }
+
+    /** Returns formatted pileup string for the current genomic location as
+     * "location: reference_base observed_base_pile observed_qual_pile"
+     */
+    public String getPileupString()
+    {
+        if(start == stop)
+            return String.format("%s:%d: %s %s %s", getChr(), getStart(), getRef(), getBasesAsString(), getQualsAsString());
+        else
+            return String.format("%s:%d-%d: %s %s %s", getChr(), getStart(), getEnd(), getRef(), getBasesAsString(), getQualsAsString());
+    }
+
+    /**
+     * Gets the bases in byte array form.
+     * @return byte array of the available bases.
+     */
+    public byte[] getBases() {
+        return StringUtil.stringToBytes(getBasesAsString());
+    }
+
+    /**
+     * Gets the Phred base qualities without ASCII offset.
+     * @return Phred base qualities.
+     */
+    public byte[] getQuals() {
+        byte[] quals = StringUtil.stringToBytes(getQualsAsString());
+        for(int i = 0; i < quals.length; i++) quals[i] -= 33;
+        return quals;
+    }
+
+    /** Returns bases in the reference allele as a String. For point genotypes, the string consists of a single
+     * character (reference base). For indel genotypes, the string is empty for insertions into
+     * the reference, or consists of deleted bases for deletions.
+     *
+     * @return reference allele, forward strand
+     */
+    public String getFWDRefBases() {
+        return refBases;
+    }
+
+    protected void setRefBases(String refBases) {
+        this.refBases = refBases;
+    }
+
+    public List<String> getFWDAlleles()  {
+        return observedAlleles;
+    }
+
+    protected void setFWDAlleles(List<String> alleles) {
+        this.observedAlleles = alleles;
+    }
+
+    // ----------------------------------------------------------------------
+    //
+    // What kind of variant are we?
+    //
+    // ----------------------------------------------------------------------
+    public boolean isSNP() { return varType == VariantType.SNP; }
+    public boolean isInsertion() { return varType == VariantType.INSERTION; }
+    public boolean isDeletion() { return varType == VariantType.DELETION ; }
+    public boolean isIndel() { return isInsertion() || isDeletion() || varType == VariantType.INDEL; }
+    public boolean isReference()  { return varType == VariantType.NONE; }
+
+    protected void setVariantType(VariantType variantType) {
+        this.varType = variantType;
+    }
+
+    public boolean isHom() {
+    	// implementation-dependent: here we use the fact that for ref and snps we actually use fixed static strings to remember the genotype
+    	if ( ! isIndel() ) return ( observedAlleles.get(0).equals(observedAlleles.get(1)) );
+    	return ( isInsertion() || isDeletion() ) && observedAlleles.get(0).equals(observedAlleles.get(1) );
+    }
+
+    public boolean isHet() {
+    	// implementation-dependent: here we use the fact that for ref and snps we actually use fixed static strings to remember the genotype
+    	if ( ! isIndel() ) return ( !(observedAlleles.get(0).equals(observedAlleles.get(1))) );
+    	return isIndel() || ( ! observedAlleles.get(0).equals(observedAlleles.get(1) ) );
+    }
+
+    public double getVariantConfidence() {
+        return variantScore;
+    }
+
+    protected void setVariantConfidence(double variantScore) {
+        this.variantScore = variantScore;
+    }
+
+    public boolean isBiallelic() {
+        return nNonref  < 2;
+    }
+
+    protected void setNumNonRef(int nNonref) {
+        this.nNonref = nNonref;
+    }
+
+    public double getConsensusConfidence() {
+        return consensusScore;
+    }
+
+    protected void setConsensusConfidence(double consensusScore) {
+        this.consensusScore = consensusScore;
+    }
+
+    public int length() {
+        return eventLength;
+    }
+
+    protected void setLength(int eventLength) {
+        this.eventLength = eventLength;
+    }
+
+	public boolean isIndelGenotype() {
+		return refBaseChar == '*';
+	}
+
+
+	public boolean isPointGenotype() {
+		return ! isIndelGenotype();
+	}
+
+	/** Implements method required by GenotypeList interface. If this object represents
+	 * an indel genotype, then it returns itself through this method. If this object is a
+	 * point genotype, this method returns null.
+	 * @return
+	 */
+	public SAMPileupFeature getIndelGenotype() {
+		if ( isIndelGenotype() ) return this;
+		else return null;
+	}
+
+	/** Implements method required by GenotypeList interface. If this object represents
+	 * a point genotype, then it returns itself through this method. If this object is an
+	 * indel genotype, this method returns null.
+	 * @return
+	 */
+	public SAMPileupFeature getPointGenotype() {
+		if ( isPointGenotype() ) return this;
+		else return null;
+	}
+
+	/** Returns true if this object \em is an indel genotype (and thus
+	 * indel genotype is what it only has).
+	 * @return
+	 */
+	public boolean hasIndelGenotype() {
+		return isIndelGenotype();
+	}
+
+	/** Returns true if this object \em is a point genotype (and thus
+	 * point genotype is what it only has.
+	 * @return
+	 */
+	public boolean hasPointGenotype() {
+		return isPointGenotype();
+	}
+
+
+
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/samread/SAMReadCodec.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/samread/SAMReadCodec.java
new file mode 100644
index 0000000..447aa47
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/samread/SAMReadCodec.java
@@ -0,0 +1,134 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.codecs.samread;
+
+import htsjdk.samtools.Cigar;
+import htsjdk.samtools.TextCigarCodec;
+import htsjdk.samtools.util.StringUtil;
+import htsjdk.tribble.AsciiFeatureCodec;
+import htsjdk.tribble.exception.CodecLineParsingException;
+import htsjdk.tribble.readers.LineIterator;
+import htsjdk.tribble.util.ParsingUtils;
+
+/**
+ * Decodes a simple SAM text string.
+ *
+ * <p>
+ * Reads in the SAM text version of a BAM file as a ROD.  For testing only
+ * </p>
+ *
+ * <p>
+ * See also: @see <a href="http://samtools.sourceforge.net">SAMTools</a> for format specification
+ * </p>
+ *
+ * <h2>File format example</h2>
+ * <pre>
+ *     SL-XBC:1:10:628:923#0	16	Escherichia_coli_K12	1	37	76M	=	1	0	AGCTTTTCATTCTGACTGCAACGGGCAATATGTCTCTGTGTGGATTAAAAAAAGAGTGTCTGATAGCAGCTTCTGA	B@>87<;A@?@957:>>@AA at B>@A9AB at B>@A@@@@@A;=AAB at BBBBBCBBBB@>A>:ABB at BAABCB=CA at CB
+ * </pre>
+ *
+ * @author Matt Hanna
+ * @since 2009
+ */
+public class SAMReadCodec extends AsciiFeatureCodec<SAMReadFeature> {
+    /* SL-XBC:1:10:628:923#0	16	Escherichia_coli_K12	1	37	76M	=	1	0	AGCTTTTCATTCTGACTGCAACGGGCAATATGTCTCTGTGTGGATTAAAAAAAGAGTGTCTGATAGCAGCTTCTGA	B@>87<;A@?@957:>>@AA at B>@A9AB at B>@A@@@@@A;=AAB at BBBBBCBBBB@>A>:ABB at BAABCB=CA at CB */
+
+    // the number of tokens we expect to parse from a read line
+    private static final int expectedTokenCount = 11;
+    // codec file extension
+    protected static final String FILE_EXT = "samr";
+
+    public SAMReadCodec() {
+        super(SAMReadFeature.class);
+    }
+
+    /**
+     * Decode a single line in a SAM text file.
+     * @param line line to decode.
+     * @return A SAMReadFeature modeling that line.
+     */
+    @Override
+    public SAMReadFeature decode(String line) {
+        // we may be asked to process a header line; ignore it
+        if (line.startsWith("@")) return null;        
+
+        String[] tokens = new String[expectedTokenCount];
+
+        // split the line
+        int count = ParsingUtils.splitWhitespace(line,tokens);
+
+        // check to see if we've parsed the string into the right number of tokens (expectedTokenCount)
+        if (count != expectedTokenCount)
+            throw new CodecLineParsingException("the SAM read line didn't have the expected number of tokens " +
+                                                "(expected = " + expectedTokenCount + ", saw = " + count + " on " +
+                                                "line = " + line + ")");
+
+        final String readName = tokens[0];
+        final int flags = Integer.parseInt(tokens[1]);
+        final String contigName = tokens[2];
+        final int alignmentStart = Integer.parseInt(tokens[3]);
+        final int mapQ = Integer.parseInt(tokens[4]);
+        final String cigarString = tokens[5];
+        final String mateContigName = tokens[6];
+        final int mateAlignmentStart = Integer.parseInt(tokens[7]);
+        final int inferredInsertSize = Integer.parseInt(tokens[8]);
+        final byte[] bases = StringUtil.stringToBytes(tokens[9]);
+        final byte[] qualities = StringUtil.stringToBytes(tokens[10]);
+
+        // Infer the alignment end.
+        Cigar cigar = TextCigarCodec.decode(cigarString);
+        int alignmentEnd = alignmentStart + cigar.getReferenceLength() - 1;
+
+        // Remove printable character conversion from the qualities.
+        for(byte quality: qualities) quality -= 33;
+
+        return new SAMReadFeature(readName,
+                                  flags,
+                                  contigName,
+                                  alignmentStart,
+                                  alignmentEnd,
+                                  mapQ,
+                                  cigarString,
+                                  mateContigName,
+                                  mateAlignmentStart,
+                                  inferredInsertSize,
+                                  bases,
+                                  qualities);
+    }
+
+    /**
+     * Can the file be decoded?
+     * @param path path the file to test for parsability with this codec
+     * @return true if the path has the correct file extension, false otherwise
+     */
+    @Override
+    public boolean canDecode(final String path) { return path.endsWith("." + FILE_EXT); }
+
+    @Override
+    public Object readActualHeader(LineIterator lineIterator) {
+        // No header for this format
+        return null;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/samread/SAMReadFeature.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/samread/SAMReadFeature.java
new file mode 100644
index 0000000..fda2e69
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/samread/SAMReadFeature.java
@@ -0,0 +1,207 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.codecs.samread;
+
+import htsjdk.tribble.Feature;
+
+/**
+ * Represents a SAM record read from a SAM text format file. 
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class SAMReadFeature implements Feature {
+    /**
+     * Name of this read.
+     */
+    private final String readName;
+
+    /**
+     * Flags associated with this read.
+     */
+    private final int flags;
+
+    /**
+     * Contig to which this read is aligned.
+     */
+    private final String contig;
+
+    /**
+     * Position on contig to which this read is aligned.
+     */
+    private final int alignmentStart;
+
+    /**
+     * Position on contig at which this alignment ends.
+     */
+    private final int alignmentEnd;
+
+    /**
+     * Mapping quality for the read.
+     */
+    private final int mapQ;
+
+    /**
+     * Cigar string matching read to reference.
+     */
+    private final String cigarString;
+
+    /**
+     * Contig to which this read's pair is aligned.
+     */
+    private final String mateContig;
+
+    /**
+     * Position in contig to which this read's pair is aligned.
+     */
+    private final int mateAlignmentStart;
+
+    /**
+     * Size between pairs.
+     */
+    private final int insertSize;
+
+    /**
+     * Bases in this read.
+     */
+    private final byte[] bases;
+
+    /**
+     * Qualities constituting this read.
+     */
+    private final byte[] qualities;
+
+    // Tags are not currently supported.
+
+    /**
+     * create the read feature.  Default protection so that only other classes in this package can create it.
+     */
+    SAMReadFeature(final String readName,
+                   final int flags,
+                   final String contig,
+                   final int alignmentStart,
+                   final int alignmentEnd,
+                   final int mapQ,
+                   final String cigarString,
+                   final String mateContig,
+                   final int mateAlignmentStart,
+                   final int insertSize,
+                   final byte[] bases,
+                   final byte[] qualities) {
+        this.readName = readName;
+        this.flags = flags;
+        this.contig = contig;
+        this.alignmentStart = alignmentStart;
+        this.alignmentEnd = alignmentEnd;
+        this.mapQ = mapQ;
+        this.cigarString = cigarString;
+        this.mateContig = mateContig;
+        this.mateAlignmentStart = mateAlignmentStart;
+        this.insertSize = insertSize;
+        this.bases = bases;
+        this.qualities = qualities;
+    }
+
+    public String getReadName() {
+        return readName;
+    }
+
+    public int getFlags() {
+        return flags;
+    }
+
+    public String getReferenceName() {
+        return contig;
+    }
+
+    public int getAlignmentStart() {
+        return alignmentStart;
+    }
+
+    public int getAlignmentEnd() {
+        return alignmentEnd;
+    }
+
+    /**
+     * An alias for getReferenceName, required by Feature interface.
+     * @return Aligned contig name.
+     */
+    public String getChr() {
+        return getContig();
+    }
+
+    /**
+     * An alias for getReferenceName, required by Feature interface.
+     * @return Aligned contig name.
+     */
+    public String getContig() {
+        return getReferenceName();
+    }
+
+    /**
+     * An alias for getAlignmentEnd(), required by Feature interface.
+     * @return End of alignment, inclusive.
+     */
+    public int getStart() {
+        return getAlignmentStart();
+    }
+
+    /**
+     * An alias for getAlignmentStart(), required by Feature interface.
+     * @return Aligned position.  1-based.
+     */
+    public int getEnd() {
+        return getAlignmentEnd();
+    }    
+
+    public int getMappingQuality() {
+        return mapQ;
+    }
+
+    public String getCigarString() {
+        return cigarString;
+    }
+
+    public String getMateReferenceName() {
+        return mateContig;
+    }
+
+    public int getMateAlignmentStart() {
+        return mateAlignmentStart;
+    }
+
+    public int getInferredInsertSize() {
+        return insertSize;
+    }
+
+    public byte[] getReadBases() {
+        return bases;    
+    }
+
+    public byte[] getReadQualities() {
+        return qualities;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/table/BedTableCodec.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/table/BedTableCodec.java
new file mode 100644
index 0000000..84c071f
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/table/BedTableCodec.java
@@ -0,0 +1,58 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.codecs.table;
+
+import org.broadinstitute.gatk.utils.refdata.ReferenceDependentFeatureCodec;
+
+import java.util.Arrays;
+
+/**
+ * The standard table codec that expects loci as contig start stop, not contig:start-stop
+ *
+ * <p>
+ * The standard table codec with a slightly different parsing convention
+ * (expects loci as contig start stop, not contig:start-stop)
+ * </p>
+ *
+ * <p>
+ * See also: TableCodec
+ * </p>
+ *
+ * @author Chris Hartl
+ * @since 2010
+ */
+public class BedTableCodec extends TableCodec implements ReferenceDependentFeatureCodec {
+
+    @Override
+    public TableFeature decode(String line) {
+        if (line.startsWith(headerDelimiter) || line.startsWith(commentDelimiter) || line.startsWith(igvHeaderDelimiter))
+            return null;
+        String[] split = line.split(delimiterRegex);
+        if (split.length < 1)
+            throw new IllegalArgumentException("TableCodec line = " + line + " doesn't appear to be a valid table format");
+        return new TableFeature(genomeLocParser.createGenomeLoc(split[0],Integer.parseInt(split[1])-1,Integer.parseInt(split[2])), Arrays.asList(split),header);
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/table/TableCodec.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/table/TableCodec.java
new file mode 100644
index 0000000..9908ed6
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/table/TableCodec.java
@@ -0,0 +1,136 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.codecs.table;
+
+import htsjdk.tribble.AsciiFeatureCodec;
+import htsjdk.tribble.readers.LineIterator;
+import org.broadinstitute.gatk.utils.refdata.ReferenceDependentFeatureCodec;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+
+/**
+ * Reads tab deliminated tabular text files
+ *
+ * <p>
+ *     <ul>
+ *     <li>Header: must begin with line HEADER or track (for IGV), followed by any number of column names,
+ *     separated by whitespace.</li>
+ *     <li>Comment lines starting with # are ignored</li>
+ *     <li>Each non-header and non-comment line is split into parts by whitespace,
+ *     and these parts are assigned as a map to their corresponding column name in the header.
+ *     Note that the first element (corresponding to the HEADER column) must be a valid genome loc
+ *     such as 1, 1:1 or 1:1-10, which is the position of the Table element on the genome.  TableCodec
+ *     requires that there be one value for each column in the header, and no more, on all lines.</li>
+ *     </ul>
+ * </p>
+ *
+ * </p>
+ *
+ * <h2>File format example</h2>
+ * <pre>
+ *     HEADER a b c
+ *     1:1  1   2   3
+ *     1:2  4   5   6
+ *     1:3  7   8   9
+ * </pre>
+ *
+ * @author Mark DePristo
+ * @since 2009
+ */
+public class TableCodec extends AsciiFeatureCodec<TableFeature> implements ReferenceDependentFeatureCodec {
+    protected final static String delimiterRegex = "\\s+";
+    protected final static String headerDelimiter = "HEADER";
+    protected final static String igvHeaderDelimiter = "track";
+    protected final static String commentDelimiter = "#";
+    // codec file extension
+    protected final static String FILE_EXT = "tbl";
+
+    protected ArrayList<String> header = new ArrayList<String>();
+
+    /**
+     * The parser to use when resolving genome-wide locations.
+     */
+    protected GenomeLocParser genomeLocParser;
+
+    public TableCodec() {
+        super(TableFeature.class);
+    }
+
+    /**
+     * Set the parser to use when resolving genetic data.
+     * @param genomeLocParser The supplied parser.
+     */
+    @Override
+    public void setGenomeLocParser(GenomeLocParser genomeLocParser) {
+        this.genomeLocParser =  genomeLocParser;
+    }
+
+    @Override
+    public TableFeature decode(String line) {
+        if (line.startsWith(headerDelimiter) || line.startsWith(commentDelimiter) || line.startsWith(igvHeaderDelimiter))
+            return null;
+        String[] split = line.split(delimiterRegex);
+        if (split.length < 1)
+            throw new IllegalArgumentException("TableCodec line = " + line + " doesn't appear to be a valid table format");
+        return new TableFeature(genomeLocParser.parseGenomeLoc(split[0]),Arrays.asList(split), header);
+    }
+
+    /**
+     * Can the file be decoded?
+     * @param path path the file to test for parsability with this codec
+     * @return true if the path has the correct file extension, false otherwise
+     */
+    @Override
+    public boolean canDecode(final String path) { return path.endsWith("." + FILE_EXT); }
+
+    @Override
+    public Object readActualHeader(final LineIterator reader) {
+        boolean isFirst = true;
+        while (reader.hasNext()) {
+            final String line = reader.peek(); // Peek to avoid reading non-header data
+            if ( isFirst && ! line.startsWith(headerDelimiter) && ! line.startsWith(commentDelimiter)) {
+                throw new UserException.MalformedFile("TableCodec file does not have a header");
+            }
+            isFirst &= line.startsWith(commentDelimiter);
+            if (line.startsWith(headerDelimiter)) {
+                reader.next(); // "Commit" the peek
+                if (header.size() > 0) throw new IllegalStateException("Input table file seems to have two header lines.  The second is = " + line);
+                final String spl[] = line.split(delimiterRegex);
+                Collections.addAll(header, spl);
+                return header;
+            } else if (line.startsWith(commentDelimiter)) {
+                reader.next(); // "Commit" the peek
+            } else {
+                break;
+            }
+        }
+        return header;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/table/TableFeature.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/table/TableFeature.java
new file mode 100644
index 0000000..7960e1f
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/codecs/table/TableFeature.java
@@ -0,0 +1,104 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.codecs.table;
+
+
+import htsjdk.tribble.Feature;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.Utils;
+
+import java.util.List;
+
+/**
+ * A feature representing a single row out of a text table
+ */
+public class TableFeature implements Feature {
+    // stores the values for the columns seperated out
+    private final List<String> values;
+
+    // if we have column names, we store them here
+    private final List<String> keys;
+
+    // our location
+    private final GenomeLoc position;
+
+    public TableFeature(GenomeLoc position, List<String> values, List<String> keys) {
+        this.values = values;
+        this.keys = keys;
+        this.position = position;
+    }
+
+    @Override
+    public String getChr() {
+        return getContig();
+    }
+
+    @Override
+    public String getContig() {
+        return position.getContig();
+    }
+
+    @Override
+    public int getStart() {
+        return (int)position.getStart();
+    }
+
+    @Override
+    public int getEnd() {
+        return (int)position.getStop();
+    }
+
+    public String getValue(int columnPosition) {
+        if (columnPosition >= values.size()) throw new IllegalArgumentException("We only have " + values.size() + "columns, the requested column = " + columnPosition);
+        return values.get(columnPosition);
+    }
+
+    public String toString() {
+        return String.format("%s\t%s",position.toString(), Utils.join("\t",values));
+    }
+
+    public String get(String columnName) {
+        int position = keys.indexOf(columnName);
+        if (position < 0) throw new IllegalArgumentException("We don't have a column named " + columnName);
+        return values.get(position);
+    }
+
+    public GenomeLoc getLocation() {
+        return this.position;
+    }
+
+    public List<String> getAllValues() {
+        return getValuesTo(values.size());
+    }
+
+    public List<String> getValuesTo(int columnPosition) {
+        return values.subList(0,columnPosition);
+    }
+
+    public List<String> getHeader() {
+        return keys;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/collections/DefaultHashMap.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/collections/DefaultHashMap.java
new file mode 100644
index 0000000..0ae2264
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/collections/DefaultHashMap.java
@@ -0,0 +1,56 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.collections;
+
+import java.util.HashMap;
+
+/**
+ * Created with IntelliJ IDEA.
+ * User: farjoun
+ * Date: 10/30/12
+ * Time: 3:20 PM
+ * To change this template use File | Settings | File Templates.
+ */
+
+//lifted from http://stackoverflow.com/questions/7519339
+//could also use org.apache.commons.collections.map.DefaultedMap http://commons.apache.org/collections/apidocs/org/apache/commons/collections/map/DefaultedMap.html
+public class DefaultHashMap<K,V> extends HashMap<K,V> {
+
+    public void setDefaultValue(V defaultValue) {
+        this.defaultValue = defaultValue;
+    }
+    protected V defaultValue;
+    public DefaultHashMap(V defaultValue) {
+        this.defaultValue = defaultValue;
+    }
+    @Override
+    public V get(Object k) {
+        V v = super.get(k);
+        return ((v == null) && !this.containsKey(k)) ? this.defaultValue : v;
+    }
+
+}
+
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/collections/ExpandingArrayList.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/collections/ExpandingArrayList.java
new file mode 100644
index 0000000..2f8adc3
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/collections/ExpandingArrayList.java
@@ -0,0 +1,69 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.collections;
+
+import java.util.ArrayList;
+import java.util.Collection;
+
+public class ExpandingArrayList<E> extends ArrayList<E> {
+    public ExpandingArrayList() { super(); }
+    public ExpandingArrayList(Collection<? extends E> c) { super(c); }
+    public ExpandingArrayList(int initialCapacity) { super(initialCapacity); }
+
+    /**
+     * Returns the element at the specified position in this list.  If index > size,
+     * returns null.  Otherwise tries to access the array
+     * @param index
+     * @return
+     * @throws IndexOutOfBoundsException in index < 0
+     */
+    public E get(int index) throws IndexOutOfBoundsException {
+        if ( index < size() )
+            return super.get(index);
+        else
+            return null;
+    }
+
+    public E expandingGet(int index, E default_value) throws IndexOutOfBoundsException {
+        maybeExpand(index, default_value);
+        return super.get(index);
+    }
+
+    private void maybeExpand(int index, E value) {
+        if ( index >= size() ) {
+            ensureCapacity(index+1); // make sure we have space to hold at least index + 1 elements
+            // We need to add null items until we can safely set index to element
+            for ( int i = size(); i <= index; i++ )
+                add(value);
+        }
+    }
+
+
+    public E set(int index, E element) {
+        maybeExpand(index, null);
+        return super.set(index, element);
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/collections/IndexedSet.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/collections/IndexedSet.java
new file mode 100644
index 0000000..671548e
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/collections/IndexedSet.java
@@ -0,0 +1,342 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.collections;
+
+import it.unimi.dsi.fastutil.objects.Object2IntMap;
+import it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap;
+
+import java.util.*;
+
+/**
+* Set set where each element can be reference by a unique integer index that runs from
+*     0 to the size of the set - 1.
+*
+* @author Valentin Ruano-Rubio <valentin at broadinstitute.org>
+*/
+public class IndexedSet<E> extends AbstractSet<E> implements Set<E> {
+
+    /**
+     * Elements stored in an array-list by their index.
+     */
+    private final ArrayList<E> elements;
+
+    /**
+     * A unmodifiable view to the element list. Initially {@code null} it is thread-unsafe lazy instantiated
+     * when requested first time through {@link #asList}. Therefore typically it is shared by invoking code but
+     * there could be some extra copies (rare though) in multi-thread runs.
+     */
+    private transient List<E> unmodifiableElementsListView;
+
+    /**
+     * Quick element to index lookup map.
+     * <p>
+     *  Uses a primitive int value map for efficiency sake.
+     * </p>
+     */
+    private final Object2IntMap<E> indexByElement;
+
+    /**
+     * Creates an empty indexed set indicating the expected number of elements.
+     *
+     * @param initialCapacity the initial number of elements.
+     */
+    public IndexedSet(final int initialCapacity) {
+        elements = new ArrayList<>(initialCapacity);
+        indexByElement = new Object2IntOpenHashMap<>(initialCapacity);
+    }
+
+    /**
+     * Creates a new sample list from a existing collection of elements.
+     *
+     * <p>
+     *     Elements will be indexed as they appear in the input array. Repeats will be ignored.
+     * </p>
+     *
+     * @param values the original sample list.
+     *
+     * @throws IllegalArgumentException
+     * if {@code values} array is {@code null} itself, or it contains {@code null}.
+     */
+    @SuppressWarnings("unchecked")
+    public IndexedSet(final Collection<E> values) {
+        if (values == null)
+            throw new IllegalArgumentException("input values cannot be null");
+
+        final int initialCapacity = values.size();
+        elements = new ArrayList<>(initialCapacity);
+        indexByElement = new Object2IntOpenHashMap<>(initialCapacity);
+        int nextIndex = 0;
+        for (final E value : values) {
+            if (value == null)
+                throw new IllegalArgumentException("null element not allowed: index == " + nextIndex);
+            if (indexByElement.containsKey(value))
+                continue;
+            indexByElement.put(value, nextIndex++);
+            elements.add(value);
+        }
+    }
+
+    /**
+     * Creates a new sample list from a existing array of elements.
+     *
+     * <p>
+     *     Elements will be indexed as they appear in the collection. Repeats will be ignored.
+     * </p>
+     *
+     * @param values the original sample list.
+     *
+     * @throws IllegalArgumentException
+     * if {@code values} collection is {@code null} itself, or it contains {@code null}.
+     */
+    @SuppressWarnings("unchecked")
+    public IndexedSet(final E ... values) {
+        if (values == null)
+            throw new IllegalArgumentException("input values cannot be null");
+
+        final int initialCapacity = values.length;
+        elements = new ArrayList<>(initialCapacity);
+        indexByElement = new Object2IntOpenHashMap<>(initialCapacity);
+        int nextIndex = 0;
+        for (final E value : values) {
+            if (value == null)
+                throw new IllegalArgumentException("null element not allowed: index == " + nextIndex);
+            if (indexByElement.containsKey(value))
+                continue;
+            indexByElement.put(value, nextIndex++);
+            elements.add(value);
+        }
+    }
+
+    /**
+     * Returns a list view of the elements in the set.
+     *
+     * <p>
+     *     Elements are sorted by their index within the set.
+     * </p>
+     *
+     * <p>
+     *     This view changes as the indexed set changes but it cannot be used to update its contents.
+     *     In such case a {@link UnsupportedOperationException} exception will be thrown if the calling
+     *     code tries to tho just that.
+     * </p>
+     *
+     * @return never {@code null}.
+     */
+    public List<E> asList() {
+        if (unmodifiableElementsListView == null)
+            unmodifiableElementsListView = Collections.unmodifiableList(elements);
+        return unmodifiableElementsListView;
+    }
+
+    /**
+     * Throws an exception if an index is out of bounds.
+     *
+     * <p>
+     *     An element index is valid iff is within [0,{@link #size()}).
+     * </p>
+     *
+     * @param index the query index.
+     *
+     * @throws IllegalArgumentException {@code index} is out of bounds.
+     */
+    protected void checkIndex(final int index) {
+        if (index < 0)
+            throw new IllegalArgumentException("the index cannot be negative: " + index);
+        if (index >= size())
+            throw new IllegalArgumentException("the index is equal or larger than the list length: " + index + " >= " + size());
+    }
+
+    @Override
+    public Iterator<E> iterator() {
+        return asList().iterator();
+    }
+
+    /**
+     * Returns number of elements in the set.
+     * @return never {@code null}.
+     */
+    @Override
+    public int size() {
+        return elements.size();
+    }
+
+    /**
+     *
+     * @param o
+     * @return {@code true} iff {@code o} is in
+     */
+    @Override
+    @SuppressWarnings("all")
+    public boolean contains(final Object o) {
+        return o != null && indexByElement.containsKey(o);
+    }
+
+    /**
+     * Adds a new element to the set.
+     *
+     * <p>
+     *     If the element was already in th set nothing will happen and the method will return {@code false}. However,
+     *     if the element is new to this set, it will assigned the next index available (equal to the size before addition).
+     *     The method will return {@code true} in this case.
+     * </p>
+     *
+     * @param o the object to add.
+     *
+     * @throw IllegalArgumentException if {@code o} is {@code null}.
+     *
+     * @return {@code true} iff the set was modified by this operation.
+     */
+    @Override
+    public boolean add(final E o) {
+        if (o == null)
+            throw new IllegalArgumentException("the input argument cannot be null");
+        if (contains(o))
+            return false;
+        final int nextIndex = size();
+        elements.add(o);
+        indexByElement.put(o, nextIndex);
+        return true;
+    }
+
+    /**
+     * Removes an element from the set.
+     *
+     * <p>
+     *     If the element was not present in the set, nothing happens and the method return false. However,
+     *     if the element is new to this set, it will be assigned the next index available (equal to the size
+     *     before addition).
+     *     The method will return {@code true} in this case.
+     * </p>
+     *
+     * @param o the object to add.
+     *
+     * @throw IllegalArgumentException if {@code o} is {@code null}.
+     *
+     * @return {@code true} iff the set was modified by this operation.
+     */   @Override
+    public boolean remove(final Object o) {
+        final int index = indexByElement.removeInt(o);
+        if (index == -1)
+            return false;
+        elements.remove(index);
+        indexByElement.remove(o);
+        final ListIterator<E> it = elements.listIterator(index);
+        int nextIndex = index;
+        while (it.hasNext())
+            indexByElement.put(it.next(),nextIndex++);
+        return true;
+    }
+
+    /**
+     * Removes all elements in the set.
+     */
+    @Override
+    public void clear() {
+        elements.clear();
+        indexByElement.clear();
+    }
+
+    /**
+     * Compares this with another indexed set.
+     * @param o the other object to compare to.
+     * @return {@code false} unless {@code o} is a indexed-set that contains the same elements in the same order.
+     */
+    @Override
+    public boolean equals(final Object o) {
+        if (o == this)
+            return true;
+        if (o == null)
+            return false;
+        if (!(o instanceof IndexedSet<?>))
+            return false;
+
+        final IndexedSet<?> other = (IndexedSet<?>)o;
+
+        return equals(other);
+    }
+
+    /**
+     * Compare to another indexed set.
+     *
+     * @param other the target indexed set.
+     *
+     * @throws java.lang.IllegalArgumentException if {@code other} is {@code null}.
+     *
+     * @return {@code true} iff {@other} is not {@code null}, and contains exactly the same elements
+     * (as compared using {@link Object#equals} a this set with matching indices.
+     */
+    public boolean equals(final IndexedSet<?> other) {
+        if (other == null)
+            throw new IllegalArgumentException("other cannot be null");
+        final ArrayList<?> otherElements = other.elements;
+
+        final int elementCount = elements.size();
+        if (otherElements.size() != elementCount)
+            return false;
+        for (int i = 0; i < elementCount; i++)
+            if (!elements.get(i).equals(otherElements.get(i)))
+                return false;
+        return true;
+    }
+
+    @Override
+    public int hashCode() {
+        int result = 1;
+
+        for (final E element : elements)
+            result = 31 * result + (element == null ? 0 : element.hashCode());
+        return result;
+    }
+
+    /**
+     * Returns the element given its index within the set.
+     * @param index the target element's index.
+     *
+     * @throws IllegalArgumentException if {@code index} is not valid; in [0,{@link #size()}).
+     *
+     * @return never {@code null}; as null is not a valid element.
+     */
+    public E get(final int index) {
+        checkIndex(index);
+        return elements.get(index);
+    }
+
+    /**
+     * Returns the index of an object.
+     * @param o the object of interest.
+     *
+     * @throws IllegalArgumentException if {@code o} is {@code null}.
+     *
+     * @return {@code -1} if such an object is not an element of this set, otherwise is index in the set thus a
+     * values within [0,{@link #size()}).
+     */
+    public int indexOf(final E o) {
+        if (o == null)
+            throw new IllegalArgumentException("the query object cannot be null");
+        return indexByElement.containsKey(o) ? indexByElement.getInt(o) : -1;
+    }
+
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/collections/LoggingNestedIntegerArray.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/collections/LoggingNestedIntegerArray.java
new file mode 100644
index 0000000..746bb2c
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/collections/LoggingNestedIntegerArray.java
@@ -0,0 +1,120 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.collections;
+
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.io.PrintStream;
+
+/**
+ * Wrapper around the basic NestedIntegerArray class that logs all updates (ie., all calls to put())
+ * to the provided output stream. For testing/debugging purposes.
+ *
+ * Log entries are of the following form (fields are tab-separated):
+ * LABEL    OPERATION    VALUE   KEY1    KEY2    ...     KEY_N
+ *
+ * A header line is written before the log entries giving the dimensions of this NestedIntegerArray.
+ * It has the form:
+ *
+ * # LABEL    SIZE_OF_FIRST_DIMENSION    SIZE_OF_SECOND_DIMENSION    ...    SIZE_OF_NTH_DIMENSION
+ *
+ * @author David Roazen
+ */
+public class LoggingNestedIntegerArray<T> extends NestedIntegerArray<T> {
+
+    private PrintStream log;
+    private String logEntryLabel;
+
+    public static final String HEADER_LINE_PREFIX = "# ";
+    public enum NestedIntegerArrayOperation { GET, PUT };
+
+    /**
+     *
+     * @param log output stream to which to log update operations
+     * @param logEntryLabel String that should be prefixed to each log entry
+     * @param dimensions
+     */
+    public LoggingNestedIntegerArray( PrintStream log, String logEntryLabel, final int... dimensions ) {
+        super(dimensions);
+
+        if ( log == null ) {
+            throw new ReviewedGATKException("Log output stream must not be null");
+        }
+        this.log = log;
+        this.logEntryLabel = logEntryLabel != null ? logEntryLabel : "";
+
+        // Write the header line recording the dimensions of this NestedIntegerArray:
+        StringBuilder logHeaderLine = new StringBuilder();
+
+        logHeaderLine.append(HEADER_LINE_PREFIX);
+        logHeaderLine.append(this.logEntryLabel);
+        for ( int dimension : dimensions ) {
+            logHeaderLine.append("\t");
+            logHeaderLine.append(dimension);
+        }
+
+        this.log.println(logHeaderLine.toString());
+    }
+
+    @Override
+    public T get( final int... keys ) {
+        StringBuilder logEntry = new StringBuilder();
+
+        logEntry.append(logEntryLabel);
+        logEntry.append("\t");
+        logEntry.append(NestedIntegerArrayOperation.GET);
+        logEntry.append("\t");  // empty field for the datum value
+
+        for ( int key : keys ) {
+            logEntry.append("\t");
+            logEntry.append(key);
+        }
+
+        log.println(logEntry.toString());
+
+        return super.get(keys);
+    }
+
+    @Override
+    public boolean put( final T value, final int... keys ) {
+        StringBuilder logEntry = new StringBuilder();
+
+        logEntry.append(logEntryLabel);
+        logEntry.append("\t");
+        logEntry.append(NestedIntegerArrayOperation.PUT);
+        logEntry.append("\t");
+        logEntry.append(value);
+        for ( int key : keys ) {
+            logEntry.append("\t");
+            logEntry.append(key);
+        }
+
+        // PrintStream methods all use synchronized blocks internally, so our logging is thread-safe
+        log.println(logEntry.toString());
+
+        return super.put(value, keys);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/collections/NestedIntegerArray.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/collections/NestedIntegerArray.java
new file mode 100644
index 0000000..495f3dd
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/collections/NestedIntegerArray.java
@@ -0,0 +1,221 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.collections;
+
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: ebanks
+ * Date: July 1, 2012
+ */
+
+public class NestedIntegerArray<T> {
+
+    private static Logger logger = Logger.getLogger(NestedIntegerArray.class);
+
+    protected final Object[] data;
+
+    protected final int numDimensions;
+    protected final int[] dimensions;
+
+    // Preallocate the first two dimensions to limit contention during tree traversals in put()
+    private static final int NUM_DIMENSIONS_TO_PREALLOCATE = 2;
+
+    public NestedIntegerArray(final int... dimensions) {
+        numDimensions = dimensions.length;
+        if ( numDimensions == 0 )
+            throw new ReviewedGATKException("There must be at least one dimension to an NestedIntegerArray");
+        this.dimensions = dimensions.clone();
+
+        int dimensionsToPreallocate = Math.min(dimensions.length, NUM_DIMENSIONS_TO_PREALLOCATE);
+
+        if ( logger.isDebugEnabled() ) logger.debug(String.format("Creating NestedIntegerArray with dimensions %s", Arrays.toString(dimensions)));
+        if ( logger.isDebugEnabled() ) logger.debug(String.format("Pre-allocating first %d dimensions", dimensionsToPreallocate));
+
+        data = new Object[dimensions[0]];
+        preallocateArray(data, 0, dimensionsToPreallocate);
+
+        if ( logger.isDebugEnabled() ) logger.debug(String.format("Done pre-allocating first %d dimensions", dimensionsToPreallocate));
+    }
+
+    /**
+     * @return the dimensions of this nested integer array.  DO NOT MODIFY
+     */
+    public int[] getDimensions() {
+        return dimensions;
+    }
+
+    /**
+     * Recursively allocate the first dimensionsToPreallocate dimensions of the tree
+     *
+     * Pre-allocating the first few dimensions helps limit contention during tree traversals in put()
+     *
+     * @param subarray current node in the tree
+     * @param dimension current level in the tree
+     * @param dimensionsToPreallocate preallocate only this many dimensions (starting from the first)
+     */
+    private void preallocateArray( Object[] subarray, int dimension, int dimensionsToPreallocate ) {
+        if ( dimension >= dimensionsToPreallocate - 1 ) {
+            return;
+        }
+
+        for ( int i = 0; i < subarray.length; i++ ) {
+            subarray[i] = new Object[dimensions[dimension + 1]];
+            preallocateArray((Object[])subarray[i], dimension + 1, dimensionsToPreallocate);
+        }
+    }
+
+    public T get(final int... keys) {
+        final int numNestedDimensions = numDimensions - 1;
+        Object[] myData = data;
+
+        for( int i = 0; i < numNestedDimensions; i++ ) {
+            if ( keys[i] >= dimensions[i] )
+                return null;
+
+            myData = (Object[])myData[keys[i]];
+            if ( myData == null )
+                return null;
+        }
+
+        return (T)myData[keys[numNestedDimensions]];
+    }
+
+    /**
+     * Insert a value at the position specified by the given keys.
+     *
+     * This method is thread-safe, however the caller MUST check the
+     * return value to see if the put succeeded. This method RETURNS FALSE if
+     * the value could not be inserted because there already was a value present
+     * at the specified location. In this case the caller should do a get() to get
+     * the already-existing value and (potentially) update it.
+     *
+     * @param value value to insert
+     * @param keys keys specifying the location of the value in the tree
+     * @return true if the value was inserted, false if it could not be inserted because there was already
+     *         a value at the specified position
+     */
+    public boolean put(final T value, final int... keys) { // WARNING! value comes before the keys!
+        if ( keys.length != numDimensions )
+            throw new ReviewedGATKException("Exactly " + numDimensions + " keys should be passed to this NestedIntegerArray but " + keys.length + " were provided");
+
+        final int numNestedDimensions = numDimensions - 1;
+        Object[] myData = data;
+        for ( int i = 0; i < numNestedDimensions; i++ ) {
+            if ( keys[i] >= dimensions[i] )
+                throw new ReviewedGATKException("Key " + keys[i] + " is too large for dimension " + i + " (max is " + (dimensions[i]-1) + ")");
+
+            // If we're at or beyond the last dimension that was pre-allocated, we need to do a synchronized
+            // check to see if the next branch exists, and if it doesn't, create it
+            if ( i >= NUM_DIMENSIONS_TO_PREALLOCATE - 1 ) {
+                synchronized ( myData ) {
+                    if ( myData[keys[i]] == null ) {
+                        myData[keys[i]] = new Object[dimensions[i + 1]];
+                    }
+                }
+            }
+
+            myData = (Object[])myData[keys[i]];
+        }
+
+        synchronized ( myData ) {   // lock the bottom row while we examine and (potentially) update it
+
+            // Insert the new value only if there still isn't any existing value in this position
+            if ( myData[keys[numNestedDimensions]] == null ) {
+                myData[keys[numNestedDimensions]] = value;
+            }
+            else {
+                // Already have a value for this leaf (perhaps another thread came along and inserted one
+                // while we traversed the tree), so return false to notify the caller that we didn't put
+                // the item
+                return false;
+            }
+        }
+
+        return true;
+    }
+
+    public List<T> getAllValues() {
+        final List<T> result = new ArrayList<T>();
+        fillAllValues(data, result);
+        return result;
+    }
+
+    private void fillAllValues(final Object[] array, final List<T> result) {
+        for ( Object value : array ) {
+            if ( value == null )
+                continue;
+            if ( value instanceof Object[] )
+                fillAllValues((Object[])value, result);
+            else
+                result.add((T)value);
+        }
+    }
+
+    public static class Leaf<T> {
+        public final int[] keys;
+        public final T value;
+
+        public Leaf(final int[] keys, final T value) {
+            this.keys = keys;
+            this.value = value;
+        }
+    }
+
+    public List<Leaf<T>> getAllLeaves() {
+        final List<Leaf<T>> result = new ArrayList<Leaf<T>>();
+        fillAllLeaves(data, new int[0], result);
+        return result;
+    }
+
+    private void fillAllLeaves(final Object[] array, final int[] path, final List<Leaf<T>> result) {
+        for ( int key = 0; key < array.length; key++ ) {
+            final Object value = array[key];
+            if ( value == null )
+                continue;
+            final int[] newPath = appendToPath(path, key);
+            if ( value instanceof Object[] ) {
+                fillAllLeaves((Object[]) value, newPath, result);
+            } else {
+                result.add(new Leaf<T>(newPath, (T)value));
+            }
+        }
+    }
+
+    private int[] appendToPath(final int[] path, final int newKey) {
+        final int[] newPath = new int[path.length + 1];
+        for ( int i = 0; i < path.length; i++ )
+            newPath[i] = path[i];
+        newPath[path.length] = newKey;
+        return newPath;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/collections/Pair.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/collections/Pair.java
new file mode 100644
index 0000000..11229cb
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/collections/Pair.java
@@ -0,0 +1,93 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.collections;
+
+
+public class Pair<X,Y> {
+    // declare public, STL-style for easier and more efficient access:
+    public X first; 
+    public Y second;
+
+    public Pair(X x, Y y) { first = x; second = y; }
+
+    public void set(X x, Y y) { first = x; second = y; }
+
+    /** Java-style getter; note that we currently allow direct access to 
+        the member field.
+    */
+    public X getFirst() { return first; }
+
+    /** Java-style getter; note that we currently allow direct access to 
+        the member field.
+    */
+    public Y getSecond() { return second; }
+
+    /**
+     * Calculate whether this pair object is equal to another object.
+     * @param o The other object (hopefully a pair).
+     * @return True if the two are equal; false otherwise.
+     */
+    @Override
+    public boolean equals( Object o ) {
+        if( o == null )
+            return false;
+        if( !(o instanceof Pair) )
+            return false;
+
+        Pair other = (Pair)o;
+
+        // Check to see whether one is null but not the other.
+        if( this.first == null && other.first != null ) return false;
+        if( this.second == null && other.second != null ) return false;
+
+        // Check to see whether the values are equal.
+        //  If the param of equals is null, it should by contract return false.
+        if( this.first != null && !this.first.equals(other.first) ) return false;
+        if( this.second != null && !this.second.equals(other.second) ) return false;        
+
+        return true;
+    }
+
+    /**
+     * Basic hashcode function.  Assume hashcodes of first and second are
+     * randomly distributed and return the XOR of the two.
+     * @return Randomly distributed hashcode of the pair.
+     */
+    @Override
+    public int hashCode() {
+        if( second == null && first == null )
+            return 0;
+        if( second == null )
+            return first.hashCode();
+        if( first == null )
+            return second.hashCode();
+        return first.hashCode() ^ second.hashCode();
+    }
+
+    public String toString() {
+        return first+","+second;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/collections/Permutation.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/collections/Permutation.java
new file mode 100644
index 0000000..522847e
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/collections/Permutation.java
@@ -0,0 +1,103 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.collections;
+
+import java.util.List;
+
+/**
+ * Represent a permutation of a ordered set or list of elements.
+ *
+ * @author Valentin Ruano-Rubio <valentin at broadinstitute.org>
+ */
+public interface Permutation<E> {
+
+    /**
+     * Checks whether this permutation is a partial one of the original list.
+     *
+     * <p>
+     *     A partial permutation is one in that no all original elements take part of.
+     * </p>
+     *
+     * @return {@code true} iff this is a partial permutation.
+     */
+    public boolean isPartial();
+
+    /**
+     * Checks whether this is a trivial permutation where the resulting element list is the same as original.
+     *
+     * @return {@code true} iff the resulting element list is the same as the original.
+     */
+    public boolean isNonPermuted();
+
+    /**
+     * Given an index on the original list, returns the position of tha element in the resulting list.
+     *
+     * @param fromIndex the query original element index.
+     *
+     * @throws IllegalArgumentException if {@code fromIndex} is not a valid index within the original list.
+     *
+     * @return -1 if that element is not part of the result (partial) permutation, otherwise some number between
+     *   0 and {@link #toSize()} - 1.
+     */
+    public int toIndex(final int fromIndex);
+
+    /**
+     * Given an index on the resulting list, it gives you the index of that element on the original list.
+     * @param toIndex the query resulting list index.
+     *
+     * @throws IllegalArgumentException if {@code toIndex} is not a valid index, i.e. in [0,{@link #toSize()}-1).
+     *
+     * @return a value between 0 and {@link #fromSize()} - 1.
+     */
+    public int fromIndex(final int toIndex);
+
+    /**
+     * Length of the original element list.
+     *
+     * @return 0 or greater.
+     */
+    public int fromSize();
+
+    /**
+     * Length of the resulting element list.
+     *
+     * @return 0 or greater.
+     */
+    public int toSize();
+
+    /**
+     * Returns an unmodifiable view to the original element list.
+     * @return never {@code null}.
+     */
+    public List<E> fromList();
+
+    /**
+     * Returns an unmodifiable view to the original element list.
+     *
+     * @return never {@code null}.
+     */
+    public List<E> toList();
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/collections/PrimitivePair.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/collections/PrimitivePair.java
new file mode 100644
index 0000000..563578f
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/collections/PrimitivePair.java
@@ -0,0 +1,200 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.collections;
+
+
+/** This class is used to group together multiple Pair classes for
+ *  primitive types (thanks to generics shortcomings, these implementations
+ *  are more efficient then generic ones). This class contains no methods and
+ *  no fields, but only declarations of inner classes.
+ */
+ 
+public class PrimitivePair {
+
+   /** Pair of two integers */
+  public static class Int {
+    // declare public, STL-style for easier and more efficient access:
+    public int first; 
+    public int second;
+
+    public Int(int x, int y) { first = x; second = y; }
+    public Int() { first = second = 0; }
+
+    public void set(int x, int y) { first = x; second = y; }
+
+    /** Java-style getter; note that we currently allow direct access to 
+        the member field.
+    */
+    public int getFirst() { return first; }
+
+    /** Java-style getter; note that we currently allow direct access to 
+        the member field.
+    */
+    public int getSecond() { return second; }
+
+       /** Increments the elements of this pair by the
+        * corresponding elements of the pair <code>p</code> and returns this
+        * pair (modified). This method does not allocate a new pair, but changes
+        * in place the values stored in the object the method is invoked from. The
+        * method is unsafe: if p is null, a runtime exception will be thrown.
+        * @param p
+        * @return
+        */
+    public PrimitivePair.Int add(PrimitivePair.Int p) {
+        first += p.first;
+        second += p.second;
+        return this;
+    }
+
+       /** Decrements the elements of this pair by the
+        * corresponding elements of the pair <code>p</code> and returns this
+        * pair (modified). This method does not allocate a new pair, but changes
+        * in place the values stored in the object the method is invoked from. The
+        * method is unsafe: if p is null, a runtime exception will be thrown.
+        * @param p
+        * @return
+        */
+    public PrimitivePair.Int subtract(PrimitivePair.Int p) {
+        first -= p.first;
+        second -= p.second;
+        return this;
+    }
+
+       /** Copies values from the argument <code>p</code> into the corresponding
+        * elements of this pair and returns this pair (modified).
+        * @param p
+        * @return
+        */
+    public PrimitivePair.Int assignFrom(PrimitivePair.Int p ) {
+        first = p.first;
+        second = p.second;
+        return this;
+    }
+
+
+  }
+
+    public static class Long {
+      // declare public, STL-style for easier and more efficient access:
+      public long first;
+      public long second;
+
+      public Long(long x, long y) { first = x; second = y; }
+      public Long() { first = second = 0; }
+
+      public void set(long x, long y) { first = x; second = y; }
+
+      /** Java-style getter; note that we currently allow direct access to
+          the member field.
+      */
+      public long getFirst() { return first; }
+
+      /** Java-style getter; note that we currently allow direct access to
+          the member field.
+      */
+      public long getSecond() { return second; }
+
+        /** Increments the elements of this pair by the
+         * corresponding elements of the pair <code>p</code> and returns this
+         * pair (modified). This method does not allocate a new pair, but changes
+         * in place the values stored in the object the method is invoked from. The
+         * method is unsafe: if p is null, a runtime exception will be thrown.
+         * @param p
+         * @return
+         */
+     public PrimitivePair.Long add(PrimitivePair.Int p) {
+         first += p.first;
+         second += p.second;
+         return this;
+     }
+
+        /** Increments the elements of this pair by the
+         * corresponding elements of the pair <code>p</code> and returns this
+         * pair (modified). This method does not allocate a new pair, but changes
+         * in place the values stored in the object the method is invoked from. The
+         * method is unsafe: if p is null, a runtime exception will be thrown.
+         * @param p
+         * @return
+         */
+     public PrimitivePair.Long add(PrimitivePair.Long p) {
+         first += p.first;
+         second += p.second;
+         return this;
+     }
+
+        /** Decrements the elements of this pair by the
+         * corresponding elements of the pair <code>p</code> and returns this
+         * pair (modified). This method does not allocate a new pair, but changes
+         * in place the values stored in the object the method is invoked from. The
+         * method is unsafe: if p is null, a runtime exception will be thrown.
+         * @param p
+         * @return
+         */
+     public PrimitivePair.Long subtract(PrimitivePair.Int p) {
+         first -= p.first;
+         second -= p.second;
+         return this;
+     }
+
+        /** Decrements the elements of this pair by the
+         * corresponding elements of the pair <code>p</code> and returns this
+         * pair (modified). This method does not allocate a new pair, but changes
+         * in place the values stored in the object the method is invoked from. The
+         * method is unsafe: if p is null, a runtime exception will be thrown.
+         * @param p
+         * @return
+         */
+     public PrimitivePair.Long subtract(PrimitivePair.Long p) {
+         first -= p.first;
+         second -= p.second;
+         return this;
+     }
+
+     /** Copies values from the argument <code>p</code> into the corresponding
+       * elements of this pair and returns this pair (modified).
+       * @param p
+       * @return
+     */
+     public PrimitivePair.Long assignFrom(PrimitivePair.Long p ) {
+         first = p.first;
+         second = p.second;
+         return this;
+     }
+
+     /** Copies values from the argument <code>p</code> into the corresponding
+       * elements of this pair and returns this pair (modified).
+       * @param p
+       * @return
+     */
+     public PrimitivePair.Long assignFrom(PrimitivePair.Int p ) {
+            first = p.first;
+            second = p.second;
+            return this;
+     }
+
+    }
+
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/collections/RODMergingIterator.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/collections/RODMergingIterator.java
new file mode 100644
index 0000000..634fb36
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/collections/RODMergingIterator.java
@@ -0,0 +1,160 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.collections;
+
+import org.broadinstitute.gatk.utils.refdata.utils.LocationAwareSeekableRODIterator;
+import org.broadinstitute.gatk.utils.refdata.utils.RODRecordList;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.PriorityQueue;
+
+public class RODMergingIterator implements Iterator<RODRecordList>, Iterable<RODRecordList> {
+    PriorityQueue<Element> queue = new PriorityQueue<Element>();
+
+    private class Element implements Comparable<Element> {
+        public LocationAwareSeekableRODIterator it = null;
+        public GenomeLoc nextLoc = null;
+
+        public Element(Iterator<RODRecordList> it) {
+            if ( it instanceof LocationAwareSeekableRODIterator) {
+                this.it = (LocationAwareSeekableRODIterator)it;
+                if ( ! it.hasNext() ) throw new ReviewedGATKException("Iterator is empty");
+                update();
+            } else {
+                throw new ReviewedGATKException("Iterator passed to RODMergingIterator is not LocationAwareSeekableRODIterator");
+            }
+        }
+
+        public Element update() {
+ //           E prev = value;
+            nextLoc = it.peekNextLocation(); // will return null if there is no next location
+            return this;
+        }
+
+        public int compareTo(Element other) {
+            if ( nextLoc == null ) {
+                if ( other.nextLoc != null ) return 1; // null means no more data available, so its after any non-null position
+                return 0;
+            }
+            if ( other.nextLoc == null ) return -1; // we can get to this point only if this.nextLoc != null
+
+            return nextLoc.compareTo(other.nextLoc);
+        }
+
+        public RODRecordList next() {
+            RODRecordList value = it.next();
+            update();
+            return value;
+        }
+    }
+
+    public Iterator<RODRecordList> iterator() {
+        return this;
+    }
+
+    public RODMergingIterator() {
+        ;
+    }
+
+    public RODMergingIterator(Iterator<RODRecordList> it) {
+         add(it);
+    }
+
+    public RODMergingIterator(Collection<Iterator<RODRecordList>> its) {
+        for ( Iterator<RODRecordList> it : its ) {
+            add(it);
+        }
+    }
+
+    /** If the iterator is non-empty (hasNext() is true), put it into the queue. The next location the iterator
+     * will be after a call to next() is peeked into and cached as queue's priority value.
+     * @param it
+     */
+    public void add(Iterator<RODRecordList> it) {
+        if ( it.hasNext() )
+            queue.add(new Element(it));
+    }
+
+    public boolean hasNext() {
+        return ! queue.isEmpty();
+    }
+
+    public RODRecordList next() {
+        Element e = queue.poll();
+        RODRecordList value = e.next(); // next() will also update next location cached by the Element
+
+        if ( e.nextLoc != null ) // we have more data in the track
+            queue.add(e); // add the element back to queue (note: its next location, on which priority is based, was updated
+
+        //System.out.printf("Element is %s%n", e.value);
+        return value;
+    }
+
+    /** Peeks into the genomic location of the record this iterator will return next.
+     *
+     * @return
+     */
+    public GenomeLoc peekLocation() {
+        return queue.peek().nextLoc;
+    }
+
+    public Collection<RODRecordList> allElementsLTE(RODRecordList elt) {
+        return allElementsLTE(elt, true);
+    }
+
+    public Collection<RODRecordList> allElementsLTE(RODRecordList elt, boolean includeElt) {
+        LinkedList<RODRecordList> all = new LinkedList<RODRecordList>();
+
+        if ( includeElt ) all.add(elt);
+        
+        while ( hasNext() ) {
+            Element x = queue.peek();
+            //System.out.printf("elt.compareTo(x) == %d%n", elt.compareTo(x));
+            //System.out.printf("In allElementLTE%n");
+            int cmp = elt.getLocation().compareTo(x.nextLoc);
+            //System.out.printf("x=%s%n  elt=%s%n  => elt.compareTo(x) == %d%n", x, elt, cmp);
+            if ( cmp >= 0 ) {
+                //System.out.printf("  Adding element x=%s, size = %d%n", x, all.size());
+                all.add(next());
+                //System.out.printf("  Added size = %d%n", all.size());
+            }
+            else {
+                //System.out.printf("breaking...%n");
+                break;
+            }
+        }
+
+        return all;
+    }
+
+    public void remove() {
+        throw new UnsupportedOperationException();
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/Advanced.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/Advanced.java
new file mode 100644
index 0000000..a7a98c7
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/Advanced.java
@@ -0,0 +1,41 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import java.lang.annotation.*;
+
+/**
+ * Indicates that a walker argument should is considered an advanced option.
+ *
+ * @author Mark DePristo
+ * @version 0.1
+ */
+ at Documented
+ at Inherited
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target({ElementType.TYPE,ElementType.FIELD})
+public @interface Advanced {
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/Argument.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/Argument.java
new file mode 100644
index 0000000..300d04c
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/Argument.java
@@ -0,0 +1,125 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import java.lang.annotation.*;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: hanna
+ * Date: Mar 24, 2009
+ * Time: 11:11:36 AM
+ */
+/**
+ * Annotates fields in objects that should be used as command-line arguments.
+ * Any field annotated with @Argument can appear as a command-line parameter. 
+ */
+ at Documented
+ at Inherited
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target(ElementType.FIELD)
+public @interface Argument {
+    /**
+     * The full name of the command-line argument.  Full names should be
+     * prefixed on the command-line with a double dash (--).
+     * @return Selected full name, or "" to use the default.
+     */
+    String fullName() default "";
+
+    /**
+     * Specified short name of the command.  Short names should be prefixed
+     * with a single dash.  Argument values can directly abut single-char
+     * short names or be separated from them by a space.
+     * @return Selected short name, or "" for none.
+     */
+    String shortName() default "";
+
+    /**
+     * Documentation for the command-line argument.  Should appear when the
+     * --help argument is specified. 
+     * @return Doc string associated with this command-line argument.
+     */
+    String doc() default "Undocumented option";
+
+    /**
+     * Is this argument required.  If true, the command-line argument system will
+     * make a best guess for populating this argument based on the type descriptor,
+     * and will fail if the type can't be populated.
+     * @return True if the argument is required.  False otherwise.
+     */
+    boolean required() default true;
+
+    /**
+     * Should this command-line argument be exclusive of others.  Should be
+     * a comma-separated list of names of arguments of which this should be
+     * independent.
+     * @return A comma-separated string listing other arguments of which this
+     *         argument should be independent.
+     */
+    String exclusiveOf() default "";
+
+    /**
+     * Provide a regexp-based validation string.
+     * @return Non-empty regexp for validation, blank otherwise. 
+     */
+    String validation() default "";
+
+    /**
+     * Hard lower bound on the allowed value for the annotated argument -- generates an exception if violated.
+     * Enforced only for numeric types whose values are explicitly specified on the command line.
+     *
+     * @return Hard lower bound on the allowed value for the annotated argument, or Double.NEGATIVE_INFINITY
+     *         if there is none.
+     */
+    double minValue() default Double.NEGATIVE_INFINITY;
+
+    /**
+     * Hard upper bound on the allowed value for the annotated argument -- generates an exception if violated.
+     * Enforced only for numeric types whose values are explicitly specified on the command line.
+     *
+     * @return Hard upper bound on the allowed value for the annotated argument, or Double.POSITIVE_INFINITY
+     *         if there is none.
+     */
+    double maxValue() default Double.POSITIVE_INFINITY;
+
+    /**
+     * Soft lower bound on the allowed value for the annotated argument -- generates a warning if violated.
+     * Enforced only for numeric types whose values are explicitly specified on the command line.
+     *
+     * @return Soft lower bound on the allowed value for the annotated argument, or Double.NEGATIVE_INFINITY
+     *         if there is none.
+     */
+    double minRecommendedValue() default Double.NEGATIVE_INFINITY;
+
+    /**
+     * Soft upper bound on the allowed value for the annotated argument -- generates a warning if violated.
+     * Enforced only for numeric types whose values are explicitly specified on the command line.
+     *
+     * @return Soft upper bound on the allowed value for the annotated argument, or Double.POSITIVE_INFINITY
+     *         if there is none.
+     */
+    double maxRecommendedValue() default Double.POSITIVE_INFINITY;
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentCollection.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentCollection.java
new file mode 100644
index 0000000..30ba236
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentCollection.java
@@ -0,0 +1,45 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import java.lang.annotation.*;
+
+/**
+ * @author aaron
+ * @version 1.0
+ * @date May 8, 2009
+ * <p/>
+ * @interface ArgumentCollection
+ * <p/>
+ * This object represents an class, that is a collection of arguments.
+ */
+ at Documented
+ at Inherited
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target({ElementType.FIELD})
+public @interface ArgumentCollection {
+
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentDefinition.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentDefinition.java
new file mode 100644
index 0000000..f5b5424
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentDefinition.java
@@ -0,0 +1,297 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.lang.annotation.Annotation;
+import java.util.List;
+
+/**
+ * A specific argument definition.  Maps one-to-one with a field in some class.
+ */
+public class ArgumentDefinition {
+    /**
+     * Whether an argument is an input or an output.
+     */
+    public final ArgumentIOType ioType;
+
+    /**
+     * The class of the argument.
+     */
+    public final Class argumentType;
+
+    /**
+     * Full name of the argument.  Must have a value.
+     */
+    public final String fullName;
+
+    /**
+     * Short name of the argument.  Can be null.
+     */
+    public final String shortName;
+
+    /**
+     * Doc string for the argument.  Displayed in help.
+     */
+    public final String doc;
+
+    /**
+     * Must this argument be specified on the command-line?  Note that there's a
+     * critical difference between the meaning of a required argument from the
+     * perspective of the argument source and the perspective of the argument
+     * definition: the argument source's required field indicates that the field
+     * should somehow be populated by the GATK (and fail if there's an error).
+     * The ArgumentDefinition required element means that the required element
+     * must be specified on the command-line.
+     */
+    public final boolean required;
+
+    /**
+     * Is this argument a flag?  Users can't specify a value for a flag.
+     */
+    public final boolean isFlag;
+
+    /**
+     * Does this argument support multiple values (repeated "-arg value1 -arg value2"-style structures).
+     */
+    public final boolean isMultiValued;
+
+    /**
+     * The class of the componentType.  Not used for scalars.
+     */
+    public final Class componentType;
+
+    /**
+     * Is this argument hidden from the help system?
+     */
+    public final boolean isHidden;
+
+    /**
+     * Is this argument exclusive of other arguments?
+     */
+    public final String exclusiveOf;
+
+    /**
+     * Can we validate this regular expression?
+     */
+    public final String validation;
+
+    /**
+     * A list of valid options for this argument, if there is a compelling subset.
+     */
+    public final List<String> validOptions;
+
+    /**
+     * Creates a new argument definition.
+     * @param ioType Whether the argument is an input or an output.
+     * @param argumentType The class of the field.
+     * @param fullName Full name for this argument definition.
+     * @param shortName Short name for this argument definition.
+     * @param doc Doc string for this argument.
+     * @param required Whether or not this argument is required.
+     * @param isFlag Whether or not this argument should be treated as a flag.
+     * @param isMultiValued Whether or not this argument supports multiple values.
+     * @param isHidden Whether or not this argument should be hidden from the command-line argument system.
+     * @param componentType For multivalued arguments the type of the components.
+     * @param exclusiveOf Whether this command line argument is mutually exclusive of other arguments.
+     * @param validation A regular expression for command-line argument validation.
+     * @param validOptions is there a particular list of options that's valid for this argument definition?  List them if so, otherwise set this to null. 
+     */
+    public ArgumentDefinition( ArgumentIOType ioType,
+                               Class argumentType,
+                               String fullName,
+                               String shortName,
+                               String doc,
+                               boolean required,
+                               boolean isFlag,
+                               boolean isMultiValued,
+                               boolean isHidden,
+                               Class componentType,
+                               String exclusiveOf,
+                               String validation,
+                               List<String> validOptions) {
+        this.ioType = ioType;
+        this.argumentType = argumentType;
+        this.fullName = fullName;
+        this.shortName = shortName;
+        this.doc = doc;
+        this.required = required;
+        this.isFlag = isFlag;
+        this.isMultiValued = isMultiValued;
+        this.isHidden = isHidden;
+        this.componentType = componentType;
+        this.exclusiveOf = exclusiveOf;
+        this.validation = validation;
+        this.validOptions = validOptions;
+
+        validateName(shortName);
+        validateName(fullName);
+    }
+
+    /**
+     * Creates a new argument definition.
+     * @param annotation The annotation on the field.
+     * @param argumentType The class of the field.
+     * @param defaultFullName Default full name for this argument definition.
+     * @param defaultShortName Default short name for this argument definition.
+     * @param isFlag Whether or not this argument should be treated as a flag.
+     * @param isMultiValued Whether or not this argument supports multiple values.
+     * @param componentType For multivalued arguments the type of the components.
+     * @param isHidden Whether or not this argument should be hidden from the command-line argument system.
+     * @param validOptions is there a particular list of options that's valid for this argument definition?  List them if so, otherwise set this to null.
+     */
+    public ArgumentDefinition( Annotation annotation,
+                               ArgumentIOType ioType,
+                               Class argumentType,
+                               String defaultFullName,
+                               String defaultShortName,
+                               String doc,
+                               boolean isRequired,
+                               boolean isFlag,
+                               boolean isMultiValued,
+                               boolean isHidden,
+                               Class componentType,
+                               String exclusiveOf,
+                               String validation,
+                               List<String> validOptions) {
+
+        String fullName = (String)CommandLineUtils.getValue(annotation, "fullName");
+        String shortName = (String)CommandLineUtils.getValue(annotation, "shortName");
+        boolean isFullNameProvided = fullName.trim().length() > 0;
+        boolean isShortNameProvided = shortName.trim().length() > 0;
+
+        fullName = isFullNameProvided ? fullName.trim() : defaultFullName;
+
+        // If the short name is provided, use that.  If the user hasn't provided any names at all, use
+        // the default.  If somewhere in the middle, leave the short name blank.
+        if( isShortNameProvided )
+            shortName = shortName.trim();
+        else if( !isFullNameProvided )
+            shortName = defaultShortName;
+        else
+            shortName = null;
+
+        validateName(shortName);
+        validateName(fullName);
+
+        this.ioType = ioType;
+        this.argumentType = argumentType;
+        this.fullName = fullName;
+        this.shortName = shortName;
+        this.doc = doc;
+        this.required = isRequired;
+        this.isFlag = isFlag;
+        this.isMultiValued = isMultiValued;
+        this.isHidden = isHidden;
+        this.componentType = componentType;
+        this.exclusiveOf = exclusiveOf;
+        this.validation = validation;
+        this.validOptions = validOptions;
+    }
+    
+    @Override
+    public int hashCode() {
+        int hashCode = fullName.hashCode();
+        if(shortName != null) hashCode ^= shortName.hashCode();
+        return hashCode;
+    }
+
+    public boolean equals( Object o ) {
+        if( o == null )
+            return false;
+        if( !(o instanceof ArgumentDefinition) )
+            return false;
+
+        ArgumentDefinition other = (ArgumentDefinition)o;
+
+        return Utils.equals(fullName,other.fullName) &&
+               Utils.equals(shortName,other.shortName);
+    }
+
+    /**
+     * Retrieves the full name of the argument, specifiable with the '--' prefix.  The full name can be
+     * either specified explicitly with the fullName annotation parameter or implied by the field name.
+     * @param annotation Original field annotation.
+     * @param fieldName Original field name.
+     * @return full name of the argument.  Never null.
+     */
+    public static String getFullName( Annotation annotation, String fieldName ) {
+        String fullName = (String)CommandLineUtils.getValue(annotation, "fullName");
+        return fullName.trim().length() > 0 ? fullName.trim() : fieldName.toLowerCase();
+    }
+
+    /**
+     * Retrieves the short name of the argument, specifiable with the '-' prefix.  The short name can
+     * be specified or not; if left unspecified, no short name will be present.
+     * @param annotation Original field annotation.
+     * @return short name of the argument.  Null if no short name exists.
+     */
+    public static String getShortName( Annotation annotation ) {
+        String shortName = (String)CommandLineUtils.getValue(annotation, "shortName");
+        return shortName.trim().length() > 0 ? shortName.trim() : null;
+    }
+
+    /**
+     * Documentation for this argument.  Mandatory field.
+     * @param annotation Original field annotation.
+     * @return Documentation for this argument.
+     */
+    public static String getDoc( Annotation annotation ) {
+        return (String)CommandLineUtils.getValue(annotation, "doc");
+    }
+
+    /**
+     * Specifies other arguments which cannot be used in conjunction with this argument.  Comma-separated list.
+     * @param annotation Original field annotation.
+     * @return A comma-separated list of exclusive arguments, or null if none are present.
+     */
+    public static String getExclusiveOf( Annotation annotation ) {
+        String exclusiveOf = (String)CommandLineUtils.getValue(annotation, "exclusiveOf");
+        return exclusiveOf.trim().length() > 0 ? exclusiveOf.trim() : null;
+    }
+
+    /**
+     * A regular expression which can be used for validation.
+     * @param annotation Original field annotation.
+     * @return a JVM regex-compatible regular expression, or null to permit any possible value.
+     */
+    public static String getValidationRegex( Annotation annotation ) {
+        String validation = (String)CommandLineUtils.getValue(annotation, "validation");
+        return validation.trim().length() > 0 ? validation.trim() : null;
+    }
+
+    /**
+     * Make sure the argument's name is valid
+     *
+     * @param name
+     */
+    private void validateName(final String name) {
+        if ( name != null && name.startsWith("-") )
+            throw new ReviewedGATKException("Invalid argument definition: " + name + " begins with a -");
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentDefinitionGroup.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentDefinitionGroup.java
new file mode 100644
index 0000000..d6e2cb5
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentDefinitionGroup.java
@@ -0,0 +1,99 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * A group of argument definitions.
+ */
+public class ArgumentDefinitionGroup implements Iterable<ArgumentDefinition> {
+    /**
+     * Name of this group.
+     */
+    public final String groupName;
+
+    /**
+     * The argument definitions associated with this group.
+     */
+    public final List<ArgumentDefinition> argumentDefinitions;
+
+    public ArgumentDefinitionGroup( String groupName, List<ArgumentDefinition> argumentDefinitions ) {
+        this.groupName = groupName;
+        this.argumentDefinitions = Collections.unmodifiableList( argumentDefinitions );
+    }
+
+    /**
+     * Does the name of this argument group match the name of another?
+     */
+    public boolean groupNameMatches( ArgumentDefinitionGroup other ) {
+        if( this.groupName == null )
+            return other.groupName == null;
+        return this.groupName.equals(other.groupName);
+    }
+
+    /**
+     * Merges another argument group into this argument group.  Return a new
+     * group since argument groups are supposed to be immutable. Asserts that
+     * both argument groups have the same name.
+     */
+    public ArgumentDefinitionGroup merge( ArgumentDefinitionGroup other ) {
+        if( !groupNameMatches(other) )
+            throw new ReviewedGATKException("Unable to merge two argument groups with differing names.");
+
+        // Create a merged definition group.
+        List<ArgumentDefinition> mergedDefinitions = new ArrayList<ArgumentDefinition>();
+        mergedDefinitions.addAll(this.argumentDefinitions);
+        mergedDefinitions.addAll(other.argumentDefinitions);
+
+        return new ArgumentDefinitionGroup(groupName,mergedDefinitions);
+    }
+
+    /**
+     * Iterate over the arguments in an argument definition group.
+     * @return
+     */
+    public Iterator<ArgumentDefinition> iterator() {
+        return argumentDefinitions.iterator();
+    }
+
+    /**
+     * Reports whether all the arguments in this group are hidden.
+     * @return True if all are hidden, false if some or none are hidden.
+     */
+    public boolean allHidden() {
+        for(ArgumentDefinition argumentDefinition: argumentDefinitions) {
+            if(!argumentDefinition.isHidden)
+                return false;
+        }
+        return true;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentDefinitions.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentDefinitions.java
new file mode 100644
index 0000000..77c747f
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentDefinitions.java
@@ -0,0 +1,195 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Set;
+
+/**
+ * A collection of argument definitions.
+ */
+public class ArgumentDefinitions implements Iterable<ArgumentDefinition> {
+    /**
+     * Backing data set of argument stored by short name and long name.
+     */
+    private Set<ArgumentDefinition> argumentDefinitions = new HashSet<ArgumentDefinition>();
+
+    /**
+     * The groupings of argument definitions.  Used mainly for help output.
+     */
+    private Set<ArgumentDefinitionGroup> argumentDefinitionGroups = new HashSet<ArgumentDefinitionGroup>();
+
+    /**
+     * Adds an argument to the this argument definition list.
+     * @param argumentDefinitionGroup The group of arguments to add.
+     */
+    public void add( ArgumentDefinitionGroup argumentDefinitionGroup ) {
+        for( ArgumentDefinition definition: argumentDefinitionGroup ) {
+            // Do some basic validation before adding the definition. 
+            if( definition.fullName.length() == 0 )
+                throw new IllegalArgumentException( "Argument cannot have 0-length fullname." );
+            if( hasArgumentDefinition( definition.fullName, FullNameDefinitionMatcher ) )
+                throw new ReviewedGATKException("Duplicate definition of argument with full name: " + definition.fullName);
+            if( definition.shortName != null && hasArgumentDefinition( definition.shortName, ShortNameDefinitionMatcher ) )
+                throw new ReviewedGATKException("Duplicate definition of argument with short name: " + definition.shortName);
+
+            argumentDefinitions.add( definition );
+        }
+
+        // Find an existing argument definition group with this name.
+        // If one exists, merge this group into the other.
+        Iterator<ArgumentDefinitionGroup> definitionGroupIterator = argumentDefinitionGroups.iterator();
+        while( definitionGroupIterator.hasNext() ) {
+            ArgumentDefinitionGroup candidate = definitionGroupIterator.next();            
+            if( candidate.groupNameMatches(argumentDefinitionGroup) ) {
+                argumentDefinitionGroup = candidate.merge(argumentDefinitionGroup);
+                definitionGroupIterator.remove();
+            }
+        }
+
+        // Otherwise, add the new group.
+        argumentDefinitionGroups.add( argumentDefinitionGroup );
+    }
+
+    /**
+     * Are there any argument definitions matching the given property?
+     * @param property Property to find.
+     * @param matcher Method of matching a given property.
+     * @return True if one or multiple argument definitions match; false otherwise.
+     */
+    public boolean hasArgumentDefinition( Object property, DefinitionMatcher matcher ) {
+        return findArgumentDefinitions( property, matcher ).size() > 0;
+    }
+
+    /**
+     * Find the given definition matching this property.
+     * @param property Property to find.
+     * @param matcher Method of matching a given property.
+     * @return The ArgumentDefinition matching the given property.  Null if none matches.
+     * @throws IllegalArgumentException if multiple arguments match this definition.
+     */
+    public ArgumentDefinition findArgumentDefinition( Object property, DefinitionMatcher matcher ) {
+        Collection<ArgumentDefinition> selectedDefinitions = findArgumentDefinitions( property, matcher );
+        if( selectedDefinitions.size() > 1 )
+            throw new IllegalArgumentException("Multiple argument definitions match the selected property: " + property);
+
+        if( selectedDefinitions.size() == 0 )
+            return null;
+
+        return selectedDefinitions.iterator().next();
+    }
+
+    /**
+     * Find all argument definitions matching a certain category.
+     * @param property Property to inspect.
+     * @param matcher Test to see whether property matches.
+     * @return All argument definitions matching a certain object.
+     */
+    public Collection<ArgumentDefinition> findArgumentDefinitions( Object property, DefinitionMatcher matcher ) {
+        Set<ArgumentDefinition> selectedArgumentDefinitions = new HashSet<ArgumentDefinition>();
+        for( ArgumentDefinition argumentDefinition: argumentDefinitions ) {
+            if( matcher.matches( argumentDefinition, property ) )
+                selectedArgumentDefinitions.add( argumentDefinition );
+        }
+        return selectedArgumentDefinitions;
+    }
+
+    /**
+     * Return a list of the available argument groups.
+     * @return All the argument groups that have been added.
+     */
+    public Collection<ArgumentDefinitionGroup> getArgumentDefinitionGroups() {
+        return argumentDefinitionGroups;
+    }
+
+    /**
+     * Iterates through all command-line arguments.
+     * @return an iterator over command-line arguments.
+     */
+    public Iterator<ArgumentDefinition> iterator() {
+        return argumentDefinitions.iterator();
+    }
+
+    /**
+     * Match the full name of a definition.
+     */
+    static DefinitionMatcher FullNameDefinitionMatcher = new DefinitionMatcher() {
+        public boolean matches( ArgumentDefinition definition, Object key ) {
+            if( definition.fullName == null )
+                return key == null;
+            else
+                return definition.fullName.equals( key );
+        }        
+    };
+
+    /**
+     * Match the short name of a definition.
+     */
+    static DefinitionMatcher ShortNameDefinitionMatcher = new DefinitionMatcher() {
+        public boolean matches( ArgumentDefinition definition, Object key ) {
+            if( definition.shortName == null )
+                return key == null;
+            else
+                return definition.shortName.equals( key );
+        }
+    };
+
+    /**
+     * Find all required definitions.
+     */
+    static DefinitionMatcher RequiredDefinitionMatcher = new DefinitionMatcher() {
+        public boolean matches( ArgumentDefinition definition, Object key ) {
+            if( !(key instanceof Boolean) )
+                throw new IllegalArgumentException("RequiredDefinitionMatcher requires boolean key");
+            return definition.required == (Boolean)key;
+        }
+    };
+
+    static DefinitionMatcher VerifiableDefinitionMatcher = new DefinitionMatcher() {
+        public boolean matches( ArgumentDefinition definition, Object key ) {
+            // We can perform some sort of validation for anything that isn't a flag or enum.
+            // Because enums can have a default value, it might be valid to specify an enum argument with no value
+            return !definition.isFlag  && !definition.argumentType.isEnum();
+        }        
+    };
+}
+
+/**
+ * A Comparator-esque interface for finding argument definitions within a collection.
+ */
+interface DefinitionMatcher {
+    /**
+     * Does the given definition match the provided key?
+     * @param definition The definition to inspect.
+     * @param key The value to match.
+     * @return True if the key matches the definition, false otherwise.
+     */
+    boolean matches( ArgumentDefinition definition, Object key );
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentException.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentException.java
new file mode 100644
index 0000000..e098598
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentException.java
@@ -0,0 +1,38 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+/**
+ * Generic class for handling misc parsing exceptions.
+ */
+public class ArgumentException extends UserException {
+    public ArgumentException( String message ) {
+        super( message );
+    }
+}
+
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentIOType.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentIOType.java
new file mode 100644
index 0000000..b510234
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentIOType.java
@@ -0,0 +1,52 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.lang.annotation.Annotation;
+
+public enum ArgumentIOType {
+    INPUT(Input.class), OUTPUT(Output.class), ARGUMENT(Argument.class);
+
+    public final Class<? extends Annotation> annotationClass;
+
+    ArgumentIOType(Class<? extends Annotation> annotationClass) {
+        this.annotationClass = annotationClass;
+    }
+
+    /**
+     * Returns the ArgumentIOType for the annotation.
+     * @param annotation @Input or @Output
+     * @return ArgumentIOType.Input, Output, or Unknown
+     */
+    public static ArgumentIOType getIOType(Annotation annotation) {
+        for (ArgumentIOType ioType: ArgumentIOType.values())
+            if (ioType.annotationClass.isAssignableFrom(annotation.getClass()))
+                return ioType;
+        throw new ReviewedGATKException("Unknown annotation type: " + annotation);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatch.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatch.java
new file mode 100644
index 0000000..bbabd5e
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatch.java
@@ -0,0 +1,292 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import java.util.*;
+
+/**
+ * A mapping of all the sites where an argument definition maps to a site on the command line.
+ */
+public class ArgumentMatch implements Iterable<ArgumentMatch> {
+    /**
+     * The argument definition that's been matched.
+     */
+    public final ArgumentDefinition definition;
+
+    /**
+     * The text that's been matched, as it appears in the command line arguments.
+     */
+    public final String label;
+
+    /**
+     * Maps indices of command line arguments to values paired with that argument.
+     */
+    public final SortedMap<ArgumentMatchSite,List<ArgumentMatchValue>> sites = new TreeMap<ArgumentMatchSite,List<ArgumentMatchValue>>();
+
+    /**
+     * An ordered, freeform collection of tags.
+     */
+    public final Tags tags;
+
+    /**
+     * Create a new argument match, defining its properties later.  Used to create invalid arguments.
+     */
+    public ArgumentMatch() {
+        this(null,null);
+    }
+
+    /**
+     * Minimal constructor for transform function.
+     * @param label Label of the argument match.  Must not be null.
+     * @param definition The associated definition, if one exists.  May be null.
+     */
+    private ArgumentMatch(final String label, final ArgumentDefinition definition) {
+        this.label = label;
+        this.definition = definition;
+        this.tags = new Tags();
+    }
+
+    /**
+     * A simple way of indicating that an argument with the given label and definition exists at this site.
+     * @param label Label of the argument match.  Must not be null.
+     * @param definition The associated definition, if one exists.  May be null.
+     * @param site Position of the argument.  Must not be null.
+     * @param tags ordered freeform text tags associated with this argument.
+     */
+    public ArgumentMatch(final String label, final ArgumentDefinition definition, final ArgumentMatchSite site, final Tags tags) {
+        this( label, definition, site, null, tags );
+    }
+
+    /**
+     * A simple way of indicating that an argument with the given label and definition exists at this site.
+     * @param label Label of the argument match.  Must not be null.
+     * @param definition The associated definition, if one exists.  May be null.
+     * @param site Position of the argument.  Must not be null.
+     * @param value Value for the argument at this position.
+     * @param tags ordered freeform text tags associated with this argument.
+     */
+    private ArgumentMatch(final String label, final ArgumentDefinition definition, final ArgumentMatchSite site, final ArgumentMatchValue value, final Tags tags) {
+        this.label = label;
+        this.definition = definition;
+
+        ArrayList<ArgumentMatchValue> values = new ArrayList<ArgumentMatchValue>();
+        if( value != null )
+            values.add(value);
+        sites.put(site,values );
+
+        this.tags = tags;
+    }
+
+    /**
+     * Check to see whether two ArgumentMatch objects are equal.
+     * @param other Object to which this should be compared.
+     * @return True if objects are equal, false if objects are not equal or incomparable.
+     */
+    @Override
+    public boolean equals(Object other) {
+        // this clearly isn't null, since this.equals() when this == null would result in an NPE.
+        if(other == null)
+            return false;
+        if(!(other instanceof ArgumentMatch))
+            return false;
+        ArgumentMatch otherArgumentMatch = (ArgumentMatch)other;
+        return this.definition.equals(otherArgumentMatch.definition) &&
+                this.label.equals(otherArgumentMatch.label) &&
+                this.sites.equals(otherArgumentMatch.sites) &&
+                this.tags.equals(otherArgumentMatch.tags);
+    }
+
+
+    /**
+     * Reformat the given entries with the given multiplexer and key.
+     * TODO: Generify this.
+     * @param multiplexer Multiplexer that controls the transformation process.
+     * @param key Key which specifies the transform.
+     * @return A variant of this ArgumentMatch with all keys transformed.
+     */
+    @SuppressWarnings("unchecked")
+    ArgumentMatch transform(Multiplexer multiplexer, Object key) {
+        SortedMap<ArgumentMatchSite,List<ArgumentMatchValue>> newIndices = new TreeMap<ArgumentMatchSite,List<ArgumentMatchValue>>();
+        for(Map.Entry<ArgumentMatchSite,List<ArgumentMatchValue>> site: sites.entrySet()) {
+            List<ArgumentMatchValue> newEntries = new ArrayList<ArgumentMatchValue>();
+            for(ArgumentMatchValue entry: site.getValue())
+                newEntries.add(new ArgumentMatchStringValue(multiplexer.transformArgument(key,entry.asString())));
+            newIndices.put(site.getKey(),newEntries);
+        }
+        ArgumentMatch newArgumentMatch = new ArgumentMatch(label,definition);
+        newArgumentMatch.sites.putAll(newIndices);
+        return newArgumentMatch;
+    }
+
+    /**
+     * Return a string representation of the given argument match, for debugging purposes.
+     * @return String representation of the match.
+     */
+    public String toString() {
+        return label;
+    }
+
+    /**
+     * Creates an iterator that walks over each individual match at each position of a given argument.
+     * @return An iterator over the individual matches in this argument.  Will not be null.
+     */
+    public Iterator<ArgumentMatch> iterator() {
+        return new Iterator<ArgumentMatch>() {
+            /**
+             * Iterate over each the available site.
+             */
+            private Iterator<ArgumentMatchSite> siteIterator = null;
+
+            /**
+             * Iterate over each available token.
+             */
+            private Iterator<ArgumentMatchValue> tokenIterator = null;
+
+            /**
+             * The next site to return.  Null if none remain.
+             */
+            ArgumentMatchSite nextSite = null;
+
+            /**
+             * The next token to return.  Null if none remain.
+             */
+            ArgumentMatchValue nextToken = null;
+
+            {
+                siteIterator = sites.keySet().iterator();
+                prepareNext();
+            }
+
+            /**
+             * Is there a nextToken available to return?
+             * @return True if there's another token waiting in the wings.  False otherwise.
+             */
+            public boolean hasNext() {
+                return nextSite != null;
+            }
+
+            /**
+             * Get the next token, if one exists.  If not, throw an IllegalStateException.
+             * @return The next ArgumentMatch in the series.  Should never be null.
+             */
+            public ArgumentMatch next() {
+                if( nextSite == null )
+                    throw new IllegalStateException( "No more ArgumentMatches are available" );
+
+                ArgumentMatch match = new ArgumentMatch( label, definition, nextSite, nextToken, tags );
+                prepareNext();
+                return match;
+            }
+
+            /**
+             * Initialize the next ArgumentMatch to return.  If no ArgumentMatches are available,
+             * initialize nextSite / nextToken to null.
+             */
+            private void prepareNext() {
+                if( tokenIterator != null && tokenIterator.hasNext() ) {
+                    nextToken = tokenIterator.next();
+                }
+                else {
+                    nextSite = null;
+                    nextToken = null;
+
+                    // Do a nested loop.  While more data is present in the inner loop, grab that data.
+                    // Otherwise, troll the outer iterator looking for more data.
+                    while( siteIterator.hasNext() ) {
+                        nextSite = siteIterator.next();
+                        if( sites.get(nextSite) != null ) {
+                            tokenIterator = sites.get(nextSite).iterator();
+                            nextToken = tokenIterator.hasNext() ? tokenIterator.next() : null;
+                            break;
+                        }
+                    }
+                }
+
+            }
+
+            /**
+             * Remove is unsupported in this context.
+             */
+            public void remove() {
+                throw new UnsupportedOperationException("Cannot remove an argument match from the collection while iterating.");
+            }
+        };
+    }
+
+    /**
+     * Merge two ArgumentMatches, so that the values for all arguments go into the
+     * same data structure.
+     * @param other The other match to merge into.
+     */
+    public void mergeInto( ArgumentMatch other ) {
+        sites.putAll(other.sites);
+    }
+
+    /**
+     * Associate a value with this merge maapping.
+     * @param site site of the command-line argument to which this value is mated.
+     * @param value Text representation of value to add.
+     */
+    public void addValue( ArgumentMatchSite site, ArgumentMatchValue value ) {
+        if( !sites.containsKey(site) || sites.get(site) == null )
+            sites.put(site, new ArrayList<ArgumentMatchValue>() );
+        sites.get(site).add(value);
+    }
+
+    /**
+     * Does this argument already have a value at the given site?
+     * Arguments are only allowed to be single-valued per site, and
+     * flags aren't allowed a value at all.
+     * @param site Site at which to check for values.
+     * @return True if the argument has a value at the given site.  False otherwise.
+     */
+    public boolean hasValueAtSite( ArgumentMatchSite site ) {
+        return (sites.get(site) != null && sites.get(site).size() >= 1) || isArgumentFlag();
+    }
+
+    /**
+     * Return the values associated with this argument match.
+     * @return A collection of the string representation of these value.
+     */
+    public List<ArgumentMatchValue> values() {
+        final List<ArgumentMatchValue> values = new ArrayList<ArgumentMatchValue>();
+        for ( final List<ArgumentMatchValue> siteValue : sites.values() ) {
+            if ( siteValue != null )
+                values.addAll(siteValue);
+            else
+                values.add(null);
+        }
+        return values;
+    }
+
+    /**
+     * Convenience method returning true if the definition is a flag.
+     * @return True if definition is known to be a flag; false if not known to be a flag.
+     */
+    private boolean isArgumentFlag() {
+        return definition != null && definition.isFlag;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchFileValue.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchFileValue.java
new file mode 100644
index 0000000..db54fb5
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchFileValue.java
@@ -0,0 +1,52 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import java.io.File;
+
+/**
+ * Holds a reference to a file as an argument match value.
+ *
+ * This is useful when the type of the stored file may be a subclass of java.io.File,
+ * for example a Queue RemoteFile.
+ */
+public class ArgumentMatchFileValue extends ArgumentMatchValue {
+    private final File file;
+
+    public ArgumentMatchFileValue(File file) {
+        this.file = file;
+    }
+
+    @Override
+    public String asString() {
+        return file == null ? null : file.getAbsolutePath();
+    }
+
+    @Override
+    public File asFile() {
+        return file;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchSite.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchSite.java
new file mode 100644
index 0000000..095ea9e
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchSite.java
@@ -0,0 +1,77 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+/**
+ * Which source and the index within the source where an argument match was found.
+ */
+public class ArgumentMatchSite implements Comparable<ArgumentMatchSite> {
+    private final ArgumentMatchSource source;
+    private final int index;
+
+    public ArgumentMatchSite(ArgumentMatchSource source, int index) {
+        this.source = source;
+        this.index = index;
+    }
+
+    public ArgumentMatchSource getSource() {
+        return source;
+    }
+
+    public int getIndex() {
+        return index;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (this == o) return true;
+        if (o == null || getClass() != o.getClass()) return false;
+
+        ArgumentMatchSite that = (ArgumentMatchSite) o;
+
+        return (index == that.index) && (source == null ? that.source == null : source.equals(that.source));
+    }
+
+    @Override
+    public int hashCode() {
+        int result = source != null ? source.hashCode() : 0;
+        // Generated by intellij. No other special reason to this implementation. -ks
+        result = 31 * result + index;
+        return result;
+    }
+
+    @Override
+    public int compareTo(ArgumentMatchSite that) {
+        int comp = this.source.compareTo(that.source);
+        if (comp != 0)
+            return comp;
+
+        // Both files are the same.
+        if (this.index == that.index)
+            return 0;
+        return this.index < that.index ? -1 : 1;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchSource.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchSource.java
new file mode 100644
index 0000000..229af98
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchSource.java
@@ -0,0 +1,97 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+/**
+ * Where an argument match originated, via the commandline or a custom provider.
+ */
+public class ArgumentMatchSource implements Comparable<ArgumentMatchSource> {
+    public static final ArgumentMatchSource COMMAND_LINE = new ArgumentMatchSource(ArgumentMatchSourceType.CommandLine, null);
+
+    private final ArgumentMatchSourceType type;
+    private final String description;
+
+    /**
+     * Creates an argument match source from the specified file.
+     * @param description Where the arguments originated.
+     */
+    public ArgumentMatchSource(String description) {
+        this(ArgumentMatchSourceType.Provider, description);
+    }
+
+    private ArgumentMatchSource(ArgumentMatchSourceType type, String description) {
+        if (type == ArgumentMatchSourceType.Provider && description == null)
+            throw new IllegalArgumentException("An argument match source provider cannot have a null description.");
+        this.type = type;
+        this.description = description;
+    }
+
+    public ArgumentMatchSourceType getType() {
+        return type;
+    }
+
+    public String getDescription() {
+        return description;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (this == o) return true;
+        if (o == null || getClass() != o.getClass()) return false;
+
+        ArgumentMatchSource that = (ArgumentMatchSource) o;
+
+        return (type == that.type) && (description == null ? that.description == null : description.equals(that.description));
+    }
+
+    @Override
+    public int hashCode() {
+        int result = type != null ? type.hashCode() : 0;
+        result = 31 * result + (description != null ? description.hashCode() : 0);
+        return result;
+    }
+
+    /**
+     * Compares two sources, putting the command line first, then files.
+     */
+    @Override
+    public int compareTo(ArgumentMatchSource that) {
+        int comp = this.type.compareTo(that.type);
+        if (comp != 0)
+            return comp;
+
+        String d1 = this.description;
+        String d2 = that.description;
+
+        if ((d1 == null) ^ (d2 == null)) {
+            // If one of the descriptions is null and the other is not
+            // put the null description first
+            return d1 == null ? -1 : 1;
+        }
+
+        return d1 == null ? 0 : d1.compareTo(d2);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchSourceType.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchSourceType.java
new file mode 100644
index 0000000..d799048
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchSourceType.java
@@ -0,0 +1,33 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+/**
+ * Type of where an argument match originated, via the commandline or a some other provider.
+ */
+public enum ArgumentMatchSourceType {
+    CommandLine, Provider
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchStringValue.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchStringValue.java
new file mode 100644
index 0000000..b663b59
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchStringValue.java
@@ -0,0 +1,49 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import java.io.File;
+
+/**
+ * Argument values that originated from a string.
+ */
+public class ArgumentMatchStringValue extends ArgumentMatchValue {
+    private final String value;
+
+    public ArgumentMatchStringValue(String value) {
+        this.value = value;
+    }
+
+    @Override
+    public String asString() {
+        return value;
+    }
+
+    @Override
+    public File asFile() {
+        return value == null ? null : new File(value);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchValue.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchValue.java
new file mode 100644
index 0000000..b6ebdcb
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchValue.java
@@ -0,0 +1,43 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import java.io.File;
+
+/**
+ * Returns argument values as either strings or values.
+ */
+public abstract class ArgumentMatchValue {
+    /**
+     * @return the value of this argument as a String object.
+     */
+    public abstract String asString();
+
+    /**
+     * @return the value of this argument as a File object.
+     */
+    public abstract File asFile();
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatches.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatches.java
new file mode 100644
index 0000000..6edd75f
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatches.java
@@ -0,0 +1,209 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import java.util.*;
+/**
+ * Represents a list of potential matches between the arguments defined
+ * by the argument sources and the arguments passed in via the command line.
+ */
+public class ArgumentMatches implements Iterable<ArgumentMatch> {
+    /**
+     * Collection matches from argument definition to argument value.
+     * Package protected access is deliberate.
+     */
+    Map<ArgumentMatchSite,ArgumentMatch> argumentMatches = new TreeMap<ArgumentMatchSite,ArgumentMatch>();
+
+    /**
+     * Provide a place to put command-line argument values that don't seem to belong to
+     * any particular command-line option.
+     */
+    ArgumentMatch MissingArgument = new ArgumentMatch();
+
+    /**
+     * Get an iterator cycling through *unique* command-line argument <-> definition matches.
+     * @return Iterator over all argument matches.
+     */
+    public Iterator<ArgumentMatch> iterator() {
+        return getUniqueMatches().iterator();
+    }
+
+    /**
+     * Create an empty ArgumentMatches object.
+     */
+    public ArgumentMatches() {
+    }
+
+    /**
+     * Create a singleton ArgumentMatches object.
+     * @param match Match to incorporate.
+     */
+    public ArgumentMatches( ArgumentMatch match ) {
+        mergeInto( match );
+    }
+
+    /**
+     * Returns the number of matches in this structure.
+     * @return Count of the matches in this structure.
+     */
+    public int size() {
+        return argumentMatches.size();
+    }
+
+    /**
+     * Indicates whether the site contains a matched argument.
+     * @param site Site at which to check.
+     * @return True if the site has a match.  False otherwise.
+     */
+    boolean hasMatch( ArgumentMatchSite site ) {
+        return argumentMatches.containsKey( site );
+    }
+
+    /**
+     * Gets the match at a given site.
+     * @param site Site at which to look for a match.
+     * @return The match present at the given site.
+     * @throws IllegalArgumentException if site does not contain a match.
+     */
+    ArgumentMatch getMatch( ArgumentMatchSite site ) {
+        if( !argumentMatches.containsKey(site) )
+            throw new IllegalArgumentException( "Site does not contain an argument: " + site );
+        return argumentMatches.get(site);
+    }
+
+    /**
+     * Does the match collection have a match for this argument definition.
+     * @param definition Definition to match.
+     * @return True if a match exists; false otherwise.
+     */
+    boolean hasMatch( ArgumentDefinition definition ) {
+        return findMatches( definition ).size() > 0;
+    }
+
+    /**
+     * Return all argument matches of this source.
+     * @param parsingEngine Parsing engine.
+     * @param argumentSource Argument source to match.
+     * @return List of all matches.
+     */
+
+    ArgumentMatches findMatches(ParsingEngine parsingEngine, ArgumentSource argumentSource) {
+        List<ArgumentDefinition> sourceDefinitions = parsingEngine.selectBestTypeDescriptor(argumentSource.field.getType()).createArgumentDefinitions(argumentSource);
+
+        ArgumentMatches matches = new ArgumentMatches();
+        for( ArgumentMatch argumentMatch: getUniqueMatches() ) {
+            if( sourceDefinitions.contains(argumentMatch.definition) )
+                matches.mergeInto( argumentMatch );
+        }
+        return matches;
+    }
+
+    /**
+     * Return all argument matches of this definition.
+     * @param definition Argument definition to match.
+     * @return List of all matches.
+     */
+    ArgumentMatches findMatches( ArgumentDefinition definition ) {
+        ArgumentMatches matches = new ArgumentMatches();
+        for( ArgumentMatch argumentMatch: argumentMatches.values() ) {
+            if( argumentMatch.definition == definition )
+                matches.mergeInto( argumentMatch );
+        }
+        return matches;
+    }
+
+    /**
+     * Find all successful matches (a 'successful' match is one paired with a definition).
+     * @return All successful matches.
+     */
+    ArgumentMatches findSuccessfulMatches() {
+        ArgumentMatches matches = new ArgumentMatches();
+        for( ArgumentMatch argumentMatch: argumentMatches.values() ) {
+            if( argumentMatch.definition != null )
+                matches.mergeInto( argumentMatch );
+        }
+        return matches;
+    }
+
+    /**
+     * Find arguments that are unmatched to any definition.
+     * @return Set of matches that have no associated definition.
+     */
+    ArgumentMatches findUnmatched() {
+        ArgumentMatches matches = new ArgumentMatches();
+        for( ArgumentMatch argumentMatch: argumentMatches.values() ) {
+            if( argumentMatch.definition == null )
+                matches.mergeInto( argumentMatch );
+        }
+        return matches;
+    }
+
+    /**
+     * Reformat the given entries with the given multiplexer and key.
+     * TODO: Generify this.
+     * @param multiplexer Multiplexer that controls the transformation process.
+     * @param key Key which specifies the transform.
+     * @return new argument matches.
+     */
+    ArgumentMatches transform(Multiplexer multiplexer, Object key) {
+        ArgumentMatches newArgumentMatches = new ArgumentMatches();
+        for(ArgumentMatch match: argumentMatches.values())
+            newArgumentMatches.mergeInto(match.transform(multiplexer,key));
+        return newArgumentMatches;
+    }
+
+    /**
+     * Merges the given argument match into the set of existing argument matches.
+     * If multiple arguments are present, those arguments will end up grouped.
+     * @param match The match to merge into.
+     */
+    void mergeInto( ArgumentMatch match ) {
+        boolean definitionExists = false;
+
+        // Clone the list of argument matches to avoid ConcurrentModificationExceptions.
+        for( ArgumentMatch argumentMatch: getUniqueMatches() ) {
+            if( argumentMatch.definition == match.definition && argumentMatch.tags.equals(match.tags) ) {
+                argumentMatch.mergeInto( match );
+                for( ArgumentMatchSite site: match.sites.keySet() )
+                    argumentMatches.put( site, argumentMatch );
+                definitionExists = true;
+            }
+        }
+
+        if( !definitionExists ) {
+            for( ArgumentMatchSite site: match.sites.keySet() )
+                argumentMatches.put( site, match );
+        }
+    }    
+
+    /**
+     * Determines, of the argument matches by position, which are unique and returns that list.
+     * @return A unique set of matches.
+     */
+    private Set<ArgumentMatch> getUniqueMatches() {
+        return new LinkedHashSet<ArgumentMatch>( argumentMatches.values() );
+    }    
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentSource.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentSource.java
new file mode 100644
index 0000000..a8c4e3f
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentSource.java
@@ -0,0 +1,243 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.lang.reflect.Field;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * Describes the source field which defines a command-line argument.
+ * A parsed-object version of the command-line argument will be
+ * injected into an object containing this field.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class ArgumentSource {
+    /**
+     * Field into which to inject command-line arguments.
+     */
+    public final Field[] parentFields;
+
+    /**
+     * Field into which to inject command-line arguments.
+     */
+    public final Field field;
+
+    /**
+     * Type descriptor to use when parsing new argument types.
+     */
+    private final ArgumentTypeDescriptor typeDescriptor;
+
+    /**
+     * Create a new command-line argument target.
+     * @param parentFields Parent fields containing the the field.  Field must be annotated with 'ArgumentCollection'.
+     * @param field Field containing the argument.  Field must be annotated with 'Input' or 'Output'.
+     * @param typeDescriptor custom type descriptor to use when parsing.
+     */
+    protected ArgumentSource( Field[] parentFields, Field field, ArgumentTypeDescriptor typeDescriptor) {
+        this.parentFields = parentFields;
+        this.field = field;
+        this.typeDescriptor = typeDescriptor;
+    }
+
+    /**
+     * Somewhat hackish copy constructor to track fields with a custom type descriptor.
+     * TODO: Separate type descriptor from ArgumentSource in general usage.
+     * @param typeDescriptor New type descriptor for the object.
+     */
+    public ArgumentSource copyWithCustomTypeDescriptor(final ArgumentTypeDescriptor typeDescriptor) {
+        return new ArgumentSource(parentFields,field,typeDescriptor);
+    }
+
+    /**
+     * True if this argument source equals other.
+     * @param other Another object, possibly an argument source, to test for equality.  Any object can
+     *              be tested, but only instances of ArgumentSource will result in equals returning true.
+     * @return True if this argument source matches other.  False otherwise.
+     */
+    @Override
+    public boolean equals( Object other ) {
+        if( other == null )
+            return false;
+        if( !(other instanceof ArgumentSource) )
+            return false;
+
+        ArgumentSource otherArgumentSource = (ArgumentSource)other;
+        return this.field == otherArgumentSource.field && Arrays.equals(this.parentFields, otherArgumentSource.parentFields);
+    }
+
+    /**
+     * Returns an appropriate hash code for this argument source.
+     * @return A uniformly distributed hashcode representing this argument source.
+     */
+    @Override
+    public int hashCode() {
+        return field.hashCode();
+    }
+
+    /**
+     * Generate a list of all argument definitions to which this argument source maps.
+     * @return A non-null, non-empty list of argument definitions.
+     */
+    public List<ArgumentDefinition> createArgumentDefinitions() {
+        return typeDescriptor.createArgumentDefinitions( this );
+    }
+
+    /**
+     * Parses the specified value based on the specified type.
+     * @param values String representation of all values passed.
+     * @return the parsed value of the object.
+     */
+    public Object parse( ParsingEngine parsingEngine, ArgumentMatches values ) {
+        return typeDescriptor.parse( parsingEngine, this, values );
+    }
+
+    /**
+     * Returns whether this field is required.  Note that flag fields are always forced to 'not required'.
+     * @return True if the field is mandatory and not a boolean flag.  False otherwise.
+     */
+    public boolean isRequired() {
+        return (Boolean)CommandLineUtils.getValue(ArgumentTypeDescriptor.getArgumentAnnotation(this),"required");
+    }
+
+    /**
+     * Returns true if the argument is a flag (a 0-valued argument).
+     * @return True if argument is a flag; false otherwise.
+     */
+    public boolean isFlag() {
+        return (field.getType() == Boolean.class) || (field.getType() == Boolean.TYPE);
+    }
+
+    /**
+     * Can this argument support multiple values, or just one?
+     * @return True if the argument supports multiple values.
+     */
+    public boolean isMultiValued() {
+        return typeDescriptor.isMultiValued( this );
+    }
+
+    /**
+     * Should the given class be hidden from the command-line argument system.
+     * @return True if so.  False otherwise.
+     */
+    public boolean isHidden() {
+        return field.isAnnotationPresent(Hidden.class) || field.isAnnotationPresent(Deprecated.class);
+    }
+
+    /**
+     * Is the given argument considered an advanced option when displaying on the command-line argument system.
+     * @return True if so.  False otherwise.
+     */
+    public boolean isAdvanced() {
+        return field.isAnnotationPresent(Advanced.class);
+    }
+
+    /**
+     * Is the given argument an output.
+     * @return True if so. False otherwise.
+     */
+    public boolean isOutput() {
+        return field.isAnnotationPresent(Output.class);
+    }
+
+    /**
+     * Is the given argument an input.
+     * @return True if so. False otherwise.
+     */
+    public boolean isInput() {
+        return field.isAnnotationPresent(Input.class);
+    }
+
+    /**
+     * Is this command-line argument dependent on some primitive argument types?
+     * @return True if this command-line argument depends on other arguments; false otherwise.
+     */
+    public boolean isDependent() {
+        return typeDescriptor instanceof MultiplexArgumentTypeDescriptor;
+    }
+
+    /**
+     * Returns whether the field has been deprecated and should no longer be used.
+     * @return True if field has been deprecated.
+     */
+    public boolean isDeprecated() {
+        return field.isAnnotationPresent(Deprecated.class);
+    }
+
+    /**
+     * Returns whether the field should default to stdout if not provided explicitly on the command-line.
+     * @return True if field should default to stdout.
+     */
+    public boolean defaultsToStdout() {
+        return field.isAnnotationPresent(Output.class) && (Boolean)CommandLineUtils.getValue(ArgumentTypeDescriptor.getArgumentAnnotation(this),"defaultToStdout");
+    }
+
+    /**
+     * Returns false if a type-specific default can be employed.
+     * @return True to throw in a type specific default.  False otherwise.
+     */
+    public boolean createsTypeDefault() {
+        return typeDescriptor.createsTypeDefault(this);
+    }
+
+    public String typeDefaultDocString() {
+        return typeDescriptor.typeDefaultDocString(this);
+    }
+
+    /**
+     * Generates a default for the given type.
+     * @param parsingEngine the parsing engine used to validate this argument type descriptor.
+     * @return A default value for the given type.
+     */
+    public Object createTypeDefault(ParsingEngine parsingEngine) {
+        return typeDescriptor.createTypeDefault(parsingEngine,this,field.getGenericType());
+    }
+
+    /**
+     * Builds out a new type descriptor for the given dependent argument as a function
+     * of the containing object.
+     * @param parsingEngine the parsing engine to use when building out this custom type descriptor.
+     * @param containingObject The containing object.
+     * @return An argument type descriptor for the custom derivative field.
+     */
+    public MultiplexArgumentTypeDescriptor createDependentTypeDescriptor(ParsingEngine parsingEngine,Object containingObject) {
+        if(!isDependent())
+            throw new ReviewedGATKException("Field " + field.getName() + " is independent; no dependent type descriptor can be derived.");
+        return ((MultiplexArgumentTypeDescriptor)typeDescriptor).createCustomTypeDescriptor(parsingEngine,this,containingObject);
+    }
+
+    /**
+     * Gets a string representation of the argument source for debugging.
+     * @return String representation of the argument source.
+     */
+    public String toString() {
+        return field.getDeclaringClass().getSimpleName() + ": " + field.getName();
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentTypeDescriptor.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentTypeDescriptor.java
new file mode 100644
index 0000000..c583fb9
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ArgumentTypeDescriptor.java
@@ -0,0 +1,1038 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import htsjdk.tribble.AbstractFeatureReader;
+import htsjdk.tribble.Feature;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.classloader.JVMUtils;
+import org.broadinstitute.gatk.utils.exceptions.DynamicClassResolutionException;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.refdata.tracks.FeatureManager;
+import org.broadinstitute.gatk.utils.text.XReadLines;
+
+import java.io.File;
+import java.io.IOException;
+import java.lang.annotation.Annotation;
+import java.lang.reflect.*;
+import java.util.*;
+
+/**
+ * An descriptor capable of providing parsers that can parse any type
+ * of supported command-line argument.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public abstract class ArgumentTypeDescriptor {
+    private static Class[] ARGUMENT_ANNOTATIONS = {Input.class, Output.class, Argument.class};
+
+    /**
+     * our log, which we want to capture anything from org.broadinstitute.gatk
+     */
+    protected static final Logger logger = Logger.getLogger(ArgumentTypeDescriptor.class);
+
+    /**
+     * Fetch the given descriptor from the descriptor repository.
+     * @param descriptors the descriptors from which to select a good match.
+     * @param type Class for which to specify a descriptor.
+     * @return descriptor for the given type.
+     */
+    public static ArgumentTypeDescriptor selectBest( Collection<ArgumentTypeDescriptor> descriptors, Class type ) {
+        for( ArgumentTypeDescriptor descriptor: descriptors ) {
+            if( descriptor.supports(type) )
+                return descriptor;
+        }
+        throw new ReviewedGATKException("Can't process command-line arguments of type: " + type.getName());
+    }
+
+    /**
+     * Returns true if the file will be compressed.
+     * @param writerFileName Name of the file
+     * @return true if the file will be compressed.
+     */
+    public static boolean isCompressed(String writerFileName) {
+        return writerFileName != null && AbstractFeatureReader.hasBlockCompressedExtension(writerFileName);
+    }
+
+    /**
+     * Does this descriptor support classes of the given type?
+     * @param type The type to check.
+     * @return true if this descriptor supports the given type, false otherwise.
+     */
+    public abstract boolean supports( Class type );
+
+    /**
+     * Returns false if a type-specific default can be employed.
+     * @param source Source of the command-line argument.
+     * @return True to throw in a type specific default.  False otherwise.
+     */
+    public boolean createsTypeDefault(ArgumentSource source) { return false; }
+
+    /**
+     * Returns a documentation-friendly value for the default of a type descriptor.
+     * Must be overridden if createsTypeDefault return true.  cannot be called otherwise
+     * @param source Source of the command-line argument.
+     * @return Friendly string of the default value, for documentation.  If doesn't create a default, throws
+     * and UnsupportedOperationException
+     */
+    public String typeDefaultDocString(ArgumentSource source) {
+        throw new UnsupportedOperationException();
+    }
+
+    /**
+     * Generates a default for the given type.
+     *
+     * @param parsingEngine the parsing engine used to validate this argument type descriptor.
+     * @param source Source of the command-line argument.
+     * @param type Type of value to create, in case the command-line argument system wants influence.
+     * @return A default value for the given type.
+     */
+    public Object createTypeDefault(ParsingEngine parsingEngine,ArgumentSource source, Type type) { throw new UnsupportedOperationException("Unable to create default for type " + getClass()); }
+
+    /**
+     * Given the given argument source and attributes, synthesize argument definitions for command-line arguments.
+     * @param source Source class and field for the given argument.
+     * @return A list of command-line argument definitions supporting this field.
+     */
+    public List<ArgumentDefinition> createArgumentDefinitions( ArgumentSource source ) {
+        return Collections.singletonList(createDefaultArgumentDefinition(source));
+    }
+
+    /**
+     * Parses an argument source to an object.
+     * WARNING!  Mandatory side effect of parsing!  Each parse routine should register the tags it finds with the proper CommandLineProgram.
+     * TODO: Fix this, perhaps with an event model indicating that a new argument has been created.
+     *
+     * @param parsingEngine The engine responsible for parsing.
+     * @param source The source used to find the matches.
+     * @param matches The matches for the source.
+     * @return The parsed object.
+     */
+    public Object parse(ParsingEngine parsingEngine, ArgumentSource source, ArgumentMatches matches) {
+        return parse(parsingEngine, source, source.field.getGenericType(), matches);
+    }
+
+    /**
+     * Returns true if the field is a collection or an array.
+     * @param source The argument source to check.
+     * @return true if the field is a collection or an array.
+     */
+    public boolean isMultiValued( ArgumentSource source ) {
+        Class argumentType = source.field.getType();
+        return Collection.class.isAssignableFrom(argumentType) || argumentType.isArray();
+    }
+
+    /**
+     * By default, argument sources create argument definitions with a set of default values.
+     * Use this method to create the one simple argument definition.
+     * @param source argument source for which to create a default definition.
+     * @return The default definition for this argument source.
+     */
+    protected ArgumentDefinition createDefaultArgumentDefinition( ArgumentSource source ) {
+        Annotation argumentAnnotation = getArgumentAnnotation(source);
+        return new ArgumentDefinition( ArgumentIOType.getIOType(argumentAnnotation),
+                source.field.getType(),
+                ArgumentDefinition.getFullName(argumentAnnotation, source.field.getName()),
+                ArgumentDefinition.getShortName(argumentAnnotation),
+                ArgumentDefinition.getDoc(argumentAnnotation),
+                source.isRequired() && !createsTypeDefault(source) && !source.isFlag() && !source.isDeprecated(),
+                source.isFlag(),
+                source.isMultiValued(),
+                source.isHidden(),
+                makeRawTypeIfNecessary(getCollectionComponentType(source.field)),
+                ArgumentDefinition.getExclusiveOf(argumentAnnotation),
+                ArgumentDefinition.getValidationRegex(argumentAnnotation),
+                getValidOptions(source) );
+    }
+
+    /**
+     * Return the component type of a field, or String.class if the type cannot be found.
+     * @param field The reflected field to inspect.
+     * @return The parameterized component type, or String.class if the parameterized type could not be found.
+     * @throws IllegalArgumentException If more than one parameterized type is found on the field.
+     */
+    protected Type getCollectionComponentType( Field field ) {
+        return null;
+    }
+
+    /**
+     * Parses the argument matches for a class type into an object.
+     * @param source The original argument source used to find the matches.
+     * @param type The current class type being inspected.  May not match the argument source.field.getType() if this as a collection for example.
+     * @param matches The argument matches for the argument source, or the individual argument match for a scalar if this is being called to help parse a collection.
+     * @return The individual parsed object matching the argument match with Class type.
+     */
+    public abstract Object parse( ParsingEngine parsingEngine, ArgumentSource source, Type type, ArgumentMatches matches );
+
+    /**
+     * If the argument source only accepts a small set of options, populate the returned list with
+     * those options.  Otherwise, leave the list empty.
+     * @param source Original field specifying command-line arguments.
+     * @return A list of valid options.
+     */
+    protected List<String> getValidOptions( ArgumentSource source ) {
+        if(!source.field.getType().isEnum())
+            return null;
+        List<String> validOptions = new ArrayList<String>();
+        for(Object constant: source.field.getType().getEnumConstants())
+            validOptions.add(constant.toString());
+        return validOptions;
+    }
+
+    /**
+     * Returns true if the argument with the given full name exists in the collection of ArgumentMatches.
+     * @param definition Definition of the argument for which to find matches.
+     * @param matches The matches for the given argument.
+     * @return true if the argument is present, or false if not present.
+     */
+    protected boolean argumentIsPresent( ArgumentDefinition definition, ArgumentMatches matches ) {
+        for( ArgumentMatch match: matches ) {
+            if( match.definition.equals(definition) )
+                return true;
+        }
+        return false;
+    }
+
+    /**
+     * Gets the value of an argument with the given full name, from the collection of ArgumentMatches.
+     * If the argument matches multiple values, an exception will be thrown.
+     * @param definition Definition of the argument for which to find matches.
+     * @param matches The matches for the given argument.
+     * @return The value of the argument if available, or null if not present.
+     */
+    protected ArgumentMatchValue getArgumentValue( ArgumentDefinition definition, ArgumentMatches matches ) {
+        Collection<ArgumentMatchValue> argumentValues = getArgumentValues( definition, matches );
+        if( argumentValues.size() > 1 )
+            throw new UserException.CommandLineException("Multiple values associated with given definition, but this argument expects only one: " + definition.fullName);
+        return argumentValues.size() > 0 ? argumentValues.iterator().next() : null;
+    }
+
+    /**
+     * Gets the tags associated with a given command-line argument.
+     * If the argument matches multiple values, an exception will be thrown.
+     * @param matches The matches for the given argument.
+     * @return The value of the argument if available, or null if not present.
+     */
+    protected Tags getArgumentTags(ArgumentMatches matches) {
+        Tags tags = new Tags();
+        for(ArgumentMatch match: matches) {
+            if(!tags.isEmpty() && !match.tags.isEmpty())
+                throw new ReviewedGATKException("BUG: multiple conflicting sets of tags are available, and the type descriptor specifies no way of resolving the conflict.");
+            tags = match.tags;
+        }
+        return tags;
+    }
+
+    /**
+     * Gets the values of an argument with the given full name, from the collection of ArgumentMatches.
+     * @param definition Definition of the argument for which to find matches.
+     * @param matches The matches for the given argument.
+     * @return The value of the argument if available, or an empty collection if not present.
+     */
+    protected Collection<ArgumentMatchValue> getArgumentValues( ArgumentDefinition definition, ArgumentMatches matches ) {
+        Collection<ArgumentMatchValue> values = new ArrayList<ArgumentMatchValue>();
+        for( ArgumentMatch match: matches ) {
+            if( match.definition.equals(definition) )
+                values.addAll(match.values());
+        }
+        return values;
+    }
+
+    /**
+     * Retrieves the argument description from the given argument source.  Will throw an exception if
+     * the given ArgumentSource
+     * @param source source of the argument.
+     * @return Argument description annotation associated with the given field.
+     */
+    @SuppressWarnings("unchecked")
+    protected static Annotation getArgumentAnnotation( ArgumentSource source ) {
+        for (Class annotation: ARGUMENT_ANNOTATIONS)
+            if (source.field.isAnnotationPresent(annotation))
+                return source.field.getAnnotation(annotation);
+        throw new ReviewedGATKException("ArgumentAnnotation is not present for the argument field: " + source.field.getName());
+    }
+
+    /**
+     * Returns true if an argument annotation is present
+     * @param field The field to check for an annotation.
+     * @return True if an argument annotation is present on the field.
+     */
+    @SuppressWarnings("unchecked")
+    public static boolean isArgumentAnnotationPresent(Field field) {
+        for (Class annotation: ARGUMENT_ANNOTATIONS)
+            if (field.isAnnotationPresent(annotation))
+                return true;
+        return false;
+    }
+
+    /**
+     * Returns true if the given annotation is hidden from the help system.
+     * @param field Field to test.
+     * @return True if argument should be hidden.  False otherwise.
+     */
+    public static boolean isArgumentHidden(Field field) {
+        return field.isAnnotationPresent(Hidden.class);
+    }
+
+    public static Class makeRawTypeIfNecessary(Type t) {
+        if ( t == null )
+            return null;
+        else if ( t instanceof ParameterizedType )
+            return (Class)((ParameterizedType) t).getRawType();
+        else if ( t instanceof Class ) {
+            return (Class)t;
+        } else {
+            throw new IllegalArgumentException("Unable to determine Class-derived component type of field: " + t);
+        }
+    }
+
+    /**
+     * The actual argument parsing method.
+     * @param source             source
+     * @param type               type to check
+     * @param matches            matches
+     * @param tags               argument tags
+     * @return the RodBinding/IntervalBinding object depending on the value of createIntervalBinding.
+     */
+    protected Object parseBinding(ArgumentSource source, Type type, ArgumentMatches matches, Tags tags) {
+        ArgumentDefinition defaultDefinition = createDefaultArgumentDefinition(source);
+        ArgumentMatchValue value = getArgumentValue(defaultDefinition, matches);
+        @SuppressWarnings("unchecked")
+        Class<? extends Feature> parameterType = JVMUtils.getParameterizedTypeClass(type);
+        String name = defaultDefinition.fullName;
+
+        return parseBinding(value, parameterType, type, name, tags, source.field.getName());
+    }
+
+    /**
+     *
+     * @param value The source of the binding
+     * @param parameterType The Tribble Feature parameter type
+     * @param bindingClass The class type for the binding (ex: RodBinding, IntervalBinding, etc.) Must have the correct constructor for creating the binding.
+     * @param bindingName The name of the binding passed to the constructor.
+     * @param tags Tags for the binding used for parsing and passed to the constructor.
+     * @param fieldName The name of the field that was parsed. Used for error reporting.
+     * @return The newly created binding object of type bindingClass.
+     */
+    public static Object parseBinding(ArgumentMatchValue value, Class<? extends Feature> parameterType, Type bindingClass,
+                                      String bindingName, Tags tags, String fieldName) {
+        try {
+            String tribbleType = null;
+            // must have one or two tag values here
+            if ( tags.getPositionalTags().size() > 2 ) {
+                throw new UserException.CommandLineException(
+                        String.format("Unexpected number of positional tags for argument %s : %s. " +
+                                "Rod bindings only support -X:type and -X:name,type argument styles",
+                                value.asString(), fieldName));
+            } else if ( tags.getPositionalTags().size() == 2 ) {
+                // -X:name,type style
+                bindingName = tags.getPositionalTags().get(0);
+                tribbleType = tags.getPositionalTags().get(1);
+
+                FeatureManager manager = new FeatureManager();
+                if ( manager.getByName(tribbleType) == null )
+                    throw new UserException.UnknownTribbleType(
+                            tribbleType,
+                            String.format("Unable to find tribble type '%s' provided on the command line. " +
+                                    "Please select a correct type from among the supported types:%n%s",
+                                    tribbleType, manager.userFriendlyListOfAvailableFeatures(parameterType)));
+
+            } else {
+                // case with 0 or 1 positional tags
+                FeatureManager manager = new FeatureManager();
+
+                // -X:type style is a type when we cannot determine the type dynamically
+                String tag1 = tags.getPositionalTags().size() == 1 ? tags.getPositionalTags().get(0) : null;
+                if ( tag1 != null ) {
+                    if ( manager.getByName(tag1) != null ) // this a type
+                        tribbleType = tag1;
+                    else
+                        bindingName = tag1;
+                }
+
+                if ( tribbleType == null ) {
+                    // try to determine the file type dynamically
+                    File file = value.asFile();
+                    if ( file.canRead() && file.isFile() ) {
+                        FeatureManager.FeatureDescriptor featureDescriptor = manager.getByFiletype(file);
+                        if ( featureDescriptor != null ) {
+                            tribbleType = featureDescriptor.getName();
+                            logger.debug("Dynamically determined type of " + file + " to be " + tribbleType);
+                        }
+                    }
+
+                    if ( tribbleType == null ) {
+                        // IntervalBinding can be created from a normal String
+                        Class rawType = (makeRawTypeIfNecessary(bindingClass));
+                        try {
+                            return rawType.getConstructor(String.class).newInstance(value.asString());
+                        } catch (NoSuchMethodException e) {
+                            /* ignore */
+                        }
+
+                        if ( ! file.exists() ) {
+                            throw new UserException.CouldNotReadInputFile(file, "file \'"+ file + "\' does not exist");
+                        } else if ( ! file.canRead() || ! file.isFile() ) {
+                            throw new UserException.CouldNotReadInputFile(file, "file \'"+ file + "\' could not be read");
+                        } else {
+                            throw new UserException.CommandLineException(
+                                    String.format("No tribble type was provided on the command line and the type of the file \'"+ file + "\' could not be determined dynamically. " +
+                                                    "Please add an explicit type tag :NAME listing the correct type from among the supported types:%n%s",
+                                            manager.userFriendlyListOfAvailableFeatures(parameterType)));
+                        }
+                    }
+                }
+            }
+
+            Constructor ctor = (makeRawTypeIfNecessary(bindingClass)).getConstructor(Class.class, String.class, String.class, String.class, Tags.class);
+            return ctor.newInstance(parameterType, bindingName, value.asString(), tribbleType, tags);
+        } catch (Exception e) {
+            if ( e instanceof UserException )
+                throw ((UserException)e);
+            else
+                throw new UserException.CommandLineException(
+                        String.format("Failed to parse value %s for argument %s. Message: %s",
+                                value, fieldName, e.getMessage()));
+        }
+    }
+
+    /**
+     * Parse the source of a RodBindingCollection, which can be either a file of RodBindings or an actual RodBinding.
+     *
+     * @param parsingEngine the parsing engine used to validate this argument type descriptor
+     * @param source             source
+     * @param type               type
+     * @param matches            matches
+     * @param tags               argument tags
+     * @return the newly created binding object
+     */
+    public Object parseRodBindingCollectionSource(final ParsingEngine parsingEngine,
+                                                  final ArgumentSource source,
+                                                  final Type type,
+                                                  final ArgumentMatches matches,
+                                                  final Tags tags) {
+
+        final ArgumentDefinition defaultDefinition = createDefaultArgumentDefinition(source);
+        final ArgumentMatchValue value = getArgumentValue(defaultDefinition, matches);
+        @SuppressWarnings("unchecked")
+        Class<? extends Feature> parameterType = JVMUtils.getParameterizedTypeClass(type);
+        String name = defaultDefinition.fullName;
+
+        // if this a list of files, get those bindings
+        final File file = value.asFile();
+        try {
+            if (file.getAbsolutePath().endsWith(".list")) {
+                return getRodBindingsCollection(file, parsingEngine, parameterType, name, tags, source.field.getName());
+            }
+        } catch (IOException e) {
+            throw new UserException.CouldNotReadInputFile(file, e);
+        }
+
+        // otherwise, treat this as an individual binding
+        final RodBinding binding = (RodBinding)parseBinding(value, parameterType, RodBinding.class, name, tags, source.field.getName());
+        parsingEngine.addTags(binding, tags);
+        parsingEngine.addRodBinding(binding);
+        return RodBindingCollection.createRodBindingCollectionOfType(parameterType, Arrays.asList(binding));
+    }
+
+    /**
+     * Retrieve and parse a collection of RodBindings from the given file.
+     *
+     * If the file contains duplicate entries or is empty, an exception will be thrown.
+     *
+     * @param file             the source file
+     * @param parsingEngine    the engine responsible for parsing
+     * @param parameterType    the Tribble Feature parameter type
+     * @param bindingName      the name of the binding passed to the constructor.
+     * @param defaultTags      general tags for the binding used for parsing and passed to the constructor.
+     * @param fieldName        the name of the field that was parsed. Used for error reporting.
+     * @return the newly created collection of binding objects.
+     */
+    public static Object getRodBindingsCollection(final File file,
+                                                  final ParsingEngine parsingEngine,
+                                                  final Class<? extends Feature> parameterType,
+                                                  final String bindingName,
+                                                  final Tags defaultTags,
+                                                  final String fieldName) throws IOException {
+        final List<RodBinding> bindings = new ArrayList<>();
+
+        // Keep track of the files in this list so that we can check for duplicates and empty files
+        final Set<String> fileValues = new HashSet<>();
+
+        // parse each line separately using the given Tags if none are provided on each line
+        for ( final String line: new XReadLines(file) ) {
+            final String[] tokens = line.split("\\s+");
+            final RodBinding binding;
+
+            if ( tokens.length == 0 ) {
+                continue; // empty line, so do nothing
+            }
+            // use the default tags if none are provided for this binding
+            else if ( tokens.length == 1 ) {
+                final ArgumentMatchValue value = parseAndValidateArgumentMatchValue(tokens[0], fileValues, fieldName, file.getName());
+                binding = (RodBinding)parseBinding(value, parameterType, RodBinding.class, bindingName, defaultTags, fieldName);
+                parsingEngine.addTags(binding, defaultTags);
+
+            }
+            // use the new tags if provided
+            else if ( tokens.length == 2 ) {
+                final Tags tags = ParsingMethod.parseTags(fieldName, tokens[0]);
+                final ArgumentMatchValue value = parseAndValidateArgumentMatchValue(tokens[1], fileValues, fieldName, file.getName());
+                binding = (RodBinding)parseBinding(value, parameterType, RodBinding.class, bindingName, tags, fieldName);
+                parsingEngine.addTags(binding, tags);
+            } else {
+                throw new UserException.BadArgumentValue(fieldName, "data lines should consist of an optional set of tags along with a path to a file; too many tokens are present for line: " + line);
+            }
+
+            bindings.add(binding);
+            parsingEngine.addRodBinding(binding);
+        }
+
+        if (fileValues.isEmpty()) {
+            throw new UserException.BadArgumentValue(fieldName, "The input list " + file.getName() + " is empty.");
+        }
+
+        return RodBindingCollection.createRodBindingCollectionOfType(parameterType, bindings);
+    }
+
+    /**
+     * Validates the resource file name and constructs an ArgumentMatchValue from it.
+     *
+     * If the list name has already been processed in the current list, throws a UserException, otherwise
+     * creates an ArgumentMatchValue to represent the list.
+     *
+     * @param token Name of the ROD resource file.
+     * @param fileValues Set of names of ROD files that have already been processed.
+     * @param fieldName Name of the argument field being populated.
+     * @param listFileName Name of the list file being processed.
+     * @return
+     */
+    private static ArgumentMatchValue parseAndValidateArgumentMatchValue(final String token, final Set<String> fileValues, final String fieldName,
+                                                                         final String listFileName) {
+        checkForDuplicateFileName(token, fileValues, fieldName, listFileName);
+        return new ArgumentMatchStringValue(token);
+    }
+
+    /**
+     * Checks to make sure that the current file name to be processed has not already been processed.
+     *
+     * Checks the name of the current file against the names that have already been processed, throwing
+     * an informative BadArgumentValue exception if it has already been seen. As a side effect adds the
+     * current file name to the set of filenames that have already been processed.
+     *
+     * @param currentFile Name of the current file to process
+     * @param processedFiles Set of file names that have already been processed
+     * @param fieldName Name of the argument that is being populated
+     * @param listName Filename of the list that is being processed
+     */
+    protected static void checkForDuplicateFileName(final String currentFile, final Set<String> processedFiles,
+                                                    final String fieldName, final String listName) {
+        if (processedFiles.contains(currentFile)) {
+            throw new UserException.BadArgumentValue(fieldName, "The input list " + listName + " contains file " + currentFile +
+                                                     " multiple times, which isn't allowed. If you are intentionally trying to " +
+                                                     "include the same file more than once, you will need to specify it in separate file lists.");
+        }
+        processedFiles.add(currentFile);
+    }
+}
+
+/**
+ * Parser for RodBinding objects
+ */
+class RodBindingArgumentTypeDescriptor extends ArgumentTypeDescriptor {
+    /**
+     * We only want RodBinding class objects
+     * @param type The type to check.
+     * @return true if the provided class is a RodBinding.class
+     */
+    @Override
+    public boolean supports( Class type ) {
+        return isRodBinding(type);
+    }
+
+    public static boolean isRodBinding( Class type ) {
+        return RodBinding.class.isAssignableFrom(type);
+    }
+
+    @Override
+    public boolean createsTypeDefault(ArgumentSource source) { return ! source.isRequired(); }
+
+    @Override
+    @SuppressWarnings("unchecked")
+    public Object createTypeDefault(ParsingEngine parsingEngine, ArgumentSource source, Type type) {
+        Class parameterType = JVMUtils.getParameterizedTypeClass(type);
+        return RodBinding.makeUnbound((Class<? extends Feature>)parameterType);
+    }
+
+    @Override
+    public String typeDefaultDocString(ArgumentSource source) {
+        return "none";
+    }
+
+    @Override
+    public Object parse(ParsingEngine parsingEngine, ArgumentSource source, Type type, ArgumentMatches matches) {
+        Tags tags = getArgumentTags(matches);
+        RodBinding rbind = (RodBinding)parseBinding(source, type, matches, tags);
+        parsingEngine.addTags(rbind, tags);
+        parsingEngine.addRodBinding(rbind);
+        return rbind;
+    }
+}
+
+/**
+ * Parser for IntervalBinding objects
+ */
+class IntervalBindingArgumentTypeDescriptor extends ArgumentTypeDescriptor {
+    /**
+     * We only want IntervalBinding class objects
+     * @param type The type to check.
+     * @return true if the provided class is an IntervalBinding.class
+     */
+    @Override
+    public boolean supports( Class type ) {
+        return isIntervalBinding(type);
+    }
+
+    public static boolean isIntervalBinding( Class type ) {
+        return IntervalBinding.class.isAssignableFrom(type);
+    }
+
+    /**
+     * See note from RodBindingArgumentTypeDescriptor.parse().
+     *
+     * @param parsingEngine      parsing engine
+     * @param source             source
+     * @param type               type to check
+     * @param matches            matches
+     * @return the IntervalBinding object.
+     */
+    @Override
+    public Object parse(ParsingEngine parsingEngine, ArgumentSource source, Type type, ArgumentMatches matches) {
+        return parseBinding(source, type, matches, getArgumentTags(matches));
+    }
+}
+
+/**
+ * Parser for RodBindingCollection objects
+ */
+class RodBindingCollectionArgumentTypeDescriptor extends ArgumentTypeDescriptor {
+    /**
+     * We only want RodBindingCollection class objects
+     * @param type The type to check.
+     * @return true if the provided class is an RodBindingCollection.class
+     */
+    @Override
+    public boolean supports( final Class type ) {
+        return isRodBindingCollection(type);
+    }
+
+    public static boolean isRodBindingCollection( final Class type ) {
+        return RodBindingCollection.class.isAssignableFrom(type);
+    }
+
+    /**
+     * See note from RodBindingArgumentTypeDescriptor.parse().
+     *
+     * @param parsingEngine      parsing engine
+     * @param source             source
+     * @param type               type to check
+     * @param matches            matches
+     * @return the IntervalBinding object.
+     */
+    @Override
+    public Object parse(final ParsingEngine parsingEngine, final ArgumentSource source, final Type type, final ArgumentMatches matches) {
+        final Tags tags = getArgumentTags(matches);
+        return parseRodBindingCollectionSource(parsingEngine, source, type, matches, tags);
+    }
+}
+
+/**
+ * Parse simple argument types: java primitives, wrapper classes, and anything that has
+ * a simple String constructor.
+ */
+class SimpleArgumentTypeDescriptor extends ArgumentTypeDescriptor {
+
+    /**
+     * @param type  the class type
+     * @return true if this class is a binding type, false otherwise
+     */
+    private boolean isBinding(final Class type) {
+        return RodBindingArgumentTypeDescriptor.isRodBinding(type) ||
+                IntervalBindingArgumentTypeDescriptor.isIntervalBinding(type) ||
+                RodBindingCollectionArgumentTypeDescriptor.isRodBindingCollection(type);
+    }
+
+
+    @Override
+    public boolean supports( Class type ) {
+        if ( isBinding(type) ) return false;
+        if ( type.isPrimitive() ) return true;
+        if ( type.isEnum() ) return true;
+        if ( primitiveToWrapperMap.containsValue(type) ) return true;
+
+        try {
+            type.getConstructor(String.class);
+            return true;
+        }
+        catch( Exception ex ) {
+            // An exception thrown above means that the String constructor either doesn't
+            // exist or can't be accessed.  In either case, this descriptor doesn't support this type.
+            return false;
+        }
+    }
+
+    @Override
+    public Object parse(ParsingEngine parsingEngine, ArgumentSource source, Type fulltype, ArgumentMatches matches) {
+        Class type = makeRawTypeIfNecessary(fulltype);
+        if (source.isFlag())
+            return true;
+
+        ArgumentDefinition defaultDefinition = createDefaultArgumentDefinition(source);
+        ArgumentMatchValue value = getArgumentValue(defaultDefinition, matches);
+        Object result;
+        Tags tags = getArgumentTags(matches);
+
+        // lets go through the types we support
+        try {
+            if (type.isPrimitive()) {
+                Method valueOf = primitiveToWrapperMap.get(type).getMethod("valueOf",String.class);
+                if(value == null)
+                    throw new MissingArgumentValueException(createDefaultArgumentDefinition(source));
+                result = valueOf.invoke(null,value.asString().trim());
+            } else if (type.isEnum()) {
+                Object[] vals = type.getEnumConstants();
+                Object defaultEnumeration = null;  // as we look at options, record the default option if it exists
+                for (Object val : vals) {
+                    if (String.valueOf(val).equalsIgnoreCase(value == null ? null : value.asString())) return val;
+                    try { if (type.getField(val.toString()).isAnnotationPresent(EnumerationArgumentDefault.class)) defaultEnumeration = val; }
+                    catch (NoSuchFieldException e) { throw new ReviewedGATKException("parsing " + type.toString() + "doesn't contain the field " + val.toString()); }
+                }
+                // if their argument has no value (null), and there's a default, return that default for the enum value
+                if (defaultEnumeration != null && value == null)
+                    result = defaultEnumeration;
+                    // if their argument has no value and there's no default, throw a missing argument value exception.
+                    // TODO: Clean this up so that null values never make it to this point.  To fix this, we'll have to clean up the implementation of -U.
+                else if (value == null)
+                    throw new MissingArgumentValueException(createDefaultArgumentDefinition(source));
+                else
+                    throw new UnknownEnumeratedValueException(createDefaultArgumentDefinition(source),value.asString());
+            } else if (type.equals(File.class)) {
+                result = value == null ? null : value.asFile();
+            } else {
+                if (value == null)
+                    throw new MissingArgumentValueException(createDefaultArgumentDefinition(source));
+                Constructor ctor = type.getConstructor(String.class);
+                result = ctor.newInstance(value.asString());
+            }
+        } catch (UserException e) {
+            throw e;
+        } catch (InvocationTargetException e) {
+            throw new UserException.CommandLineException(String.format("Failed to parse value %s for argument %s.  This is most commonly caused by providing an incorrect data type (e.g. a double when an int is required)",
+                    value, source.field.getName()));
+        } catch (Exception e) {
+            throw new DynamicClassResolutionException(String.class, e);
+        }
+
+        // TODO FIXME!
+
+        // WARNING: Side effect!
+        parsingEngine.addTags(result,tags);
+
+        return result;
+    }
+
+
+    /**
+     * A mapping of the primitive types to their associated wrapper classes.  Is there really no way to infer
+     * this association available in the JRE?
+     */
+    private static Map<Class,Class> primitiveToWrapperMap = new HashMap<Class,Class>() {
+        {
+            put( Boolean.TYPE, Boolean.class );
+            put( Character.TYPE, Character.class );
+            put( Byte.TYPE, Byte.class );
+            put( Short.TYPE, Short.class );
+            put( Integer.TYPE, Integer.class );
+            put( Long.TYPE, Long.class );
+            put( Float.TYPE, Float.class );
+            put( Double.TYPE, Double.class );
+        }
+    };
+}
+
+/**
+ * Process compound argument types: arrays, and typed and untyped collections.
+ */
+class CompoundArgumentTypeDescriptor extends ArgumentTypeDescriptor {
+    @Override
+    public boolean supports( Class type ) {
+        return ( Collection.class.isAssignableFrom(type) || type.isArray() );
+    }
+
+    @Override
+    @SuppressWarnings("unchecked")
+    public Object parse(ParsingEngine parsingEngine,ArgumentSource source, Type fulltype, ArgumentMatches matches) {
+        Class type = makeRawTypeIfNecessary(fulltype);
+        Type componentType;
+        Object result;
+
+        if( Collection.class.isAssignableFrom(type) ) {
+
+            // If this is a generic interface, pick a concrete implementation to create and pass back.
+            // Because of type erasure, don't worry about creating one of exactly the correct type.
+            if( Modifier.isInterface(type.getModifiers()) || Modifier.isAbstract(type.getModifiers()) )
+            {
+                if( java.util.List.class.isAssignableFrom(type) ) type = ArrayList.class;
+                else if( java.util.Queue.class.isAssignableFrom(type) ) type = java.util.ArrayDeque.class;
+                else if( java.util.Set.class.isAssignableFrom(type) ) type = java.util.TreeSet.class;
+            }
+
+            componentType = getCollectionComponentType( source.field );
+            ArgumentTypeDescriptor componentArgumentParser = parsingEngine.selectBestTypeDescriptor(makeRawTypeIfNecessary(componentType));
+
+            Collection collection;
+            try {
+                collection = (Collection)type.newInstance();
+            }
+            catch (InstantiationException e) {
+                logger.fatal("ArgumentParser: InstantiationException: cannot convert field " + source.field.getName());
+                throw new ReviewedGATKException("constructFromString:InstantiationException: Failed conversion " + e.getMessage());
+            }
+            catch (IllegalAccessException e) {
+                logger.fatal("ArgumentParser: IllegalAccessException: cannot convert field " + source.field.getName());
+                throw new ReviewedGATKException("constructFromString:IllegalAccessException: Failed conversion " + e.getMessage());
+            }
+
+            for( ArgumentMatch match: matches ) {
+                for( ArgumentMatch value: match ) {
+                    Object object = componentArgumentParser.parse(parsingEngine,source,componentType,new ArgumentMatches(value));
+                    collection.add( object );
+                    // WARNING: Side effect!
+                    parsingEngine.addTags(object,value.tags);
+                }
+            }
+
+            result = collection;
+
+        }
+        else if( type.isArray() ) {
+            componentType = type.getComponentType();
+            ArgumentTypeDescriptor componentArgumentParser = parsingEngine.selectBestTypeDescriptor(makeRawTypeIfNecessary(componentType));
+
+            // Assemble a collection of individual values used in this computation.
+            Collection<ArgumentMatch> values = new ArrayList<ArgumentMatch>();
+            for( ArgumentMatch match: matches )
+                for( ArgumentMatch value: match )
+                    values.add(value);
+
+            result = Array.newInstance(makeRawTypeIfNecessary(componentType),values.size());
+
+            int i = 0;
+            for( ArgumentMatch value: values ) {
+                Object object = componentArgumentParser.parse(parsingEngine,source,componentType,new ArgumentMatches(value));
+                Array.set(result,i++,object);
+                // WARNING: Side effect!
+                parsingEngine.addTags(object,value.tags);
+            }
+        }
+        else
+            throw new ReviewedGATKException("Unsupported compound argument type: " + type);
+
+        return result;
+    }
+
+    /**
+     * Return the component type of a field, or String.class if the type cannot be found.
+     * @param field The reflected field to inspect.
+     * @return The parameterized component type, or String.class if the parameterized type could not be found.
+     * @throws IllegalArgumentException If more than one parameterized type is found on the field.
+     */
+    @Override
+    protected Type getCollectionComponentType( Field field ) {
+        // If this is a parameterized collection, find the contained type.  If blow up if more than one type exists.
+        if( field.getGenericType() instanceof ParameterizedType) {
+            ParameterizedType parameterizedType = (ParameterizedType)field.getGenericType();
+            if( parameterizedType.getActualTypeArguments().length > 1 )
+                throw new IllegalArgumentException("Unable to determine collection type of field: " + field.toString());
+            return parameterizedType.getActualTypeArguments()[0];
+        }
+        else
+            return String.class;
+    }
+}
+
+class MultiplexArgumentTypeDescriptor extends ArgumentTypeDescriptor {
+    /**
+     * The multiplexer controlling how data is split.
+     */
+    private final Multiplexer multiplexer;
+
+    /**
+     * The set of identifiers for the multiplexed entries.
+     */
+    private final Collection<?> multiplexedIds;
+
+    public MultiplexArgumentTypeDescriptor() {
+        this.multiplexer = null;
+        this.multiplexedIds = null;
+    }
+
+    /**
+     * Private constructor to use in creating a closure of the MultiplexArgumentTypeDescriptor specific to the
+     * given set of multiplexed ids.
+     * @param multiplexedIds The collection of multiplexed entries
+     */
+    private MultiplexArgumentTypeDescriptor(final Multiplexer multiplexer, final Collection<?> multiplexedIds) {
+        this.multiplexer = multiplexer;
+        this.multiplexedIds = multiplexedIds;
+    }
+
+    @Override
+    public boolean supports( Class type ) {
+        return ( Map.class.isAssignableFrom(type) );
+    }
+
+    @Override
+    public boolean createsTypeDefault(ArgumentSource source) {
+        // Multiplexing always creates a type default.
+        return true;
+    }
+
+    @Override
+    public Object createTypeDefault(ParsingEngine parsingEngine,ArgumentSource source, Type type) {
+        if(multiplexer == null || multiplexedIds == null)
+            throw new ReviewedGATKException("No multiplexed ids available");
+
+        Map<Object,Object> multiplexedMapping = new HashMap<Object,Object>();
+        Class componentType = makeRawTypeIfNecessary(getCollectionComponentType(source.field));
+        ArgumentTypeDescriptor componentTypeDescriptor = parsingEngine.selectBestTypeDescriptor(componentType);
+
+        for(Object id: multiplexedIds) {
+            Object value = null;
+            if(componentTypeDescriptor.createsTypeDefault(source))
+                value = componentTypeDescriptor.createTypeDefault(parsingEngine,source,componentType);
+            multiplexedMapping.put(id,value);
+        }
+        return multiplexedMapping;
+    }
+
+    @Override
+    public String typeDefaultDocString(ArgumentSource source) {
+        return "None";
+    }
+
+    @Override
+    public Object parse(ParsingEngine parsingEngine, ArgumentSource source, Type type, ArgumentMatches matches) {
+        if(multiplexedIds == null)
+            throw new ReviewedGATKException("Cannot directly parse a MultiplexArgumentTypeDescriptor; must create a derivative type descriptor first.");
+
+        Map<Object,Object> multiplexedMapping = new HashMap<Object,Object>();
+
+        Class componentType = makeRawTypeIfNecessary(getCollectionComponentType(source.field));
+
+
+        for(Object id: multiplexedIds) {
+            Object value = parsingEngine.selectBestTypeDescriptor(componentType).parse(parsingEngine,source,componentType,matches.transform(multiplexer,id));
+            multiplexedMapping.put(id,value);
+        }
+
+        parsingEngine.addTags(multiplexedMapping,getArgumentTags(matches));
+
+        return multiplexedMapping;
+    }
+
+    public MultiplexArgumentTypeDescriptor createCustomTypeDescriptor(ParsingEngine parsingEngine,ArgumentSource dependentArgument,Object containingObject) {
+        String[] sourceFields = dependentArgument.field.getAnnotation(Multiplex.class).arguments();
+
+        List<ArgumentSource> allSources = parsingEngine.extractArgumentSources(containingObject.getClass());
+        Class[] sourceTypes = new Class[sourceFields.length];
+        Object[] sourceValues = new Object[sourceFields.length];
+        int currentField = 0;
+
+        for(String sourceField: sourceFields) {
+            boolean fieldFound = false;
+            for(ArgumentSource source: allSources) {
+                if(!source.field.getName().equals(sourceField))
+                    continue;
+                if(source.field.isAnnotationPresent(Multiplex.class))
+                    throw new ReviewedGATKException("Command-line arguments can only depend on independent fields");
+                sourceTypes[currentField] = source.field.getType();
+                sourceValues[currentField] = JVMUtils.getFieldValue(source.field,containingObject);
+                currentField++;
+                fieldFound = true;
+            }
+            if(!fieldFound)
+                throw new ReviewedGATKException(String.format("Unable to find source field %s, referred to by dependent field %s",sourceField,dependentArgument.field.getName()));
+        }
+
+        Class<? extends Multiplexer> multiplexerType = dependentArgument.field.getAnnotation(Multiplex.class).value();
+        Constructor<? extends Multiplexer> multiplexerConstructor;
+        try {
+            multiplexerConstructor = multiplexerType.getConstructor(sourceTypes);
+            multiplexerConstructor.setAccessible(true);
+        }
+        catch(NoSuchMethodException ex) {
+            throw new ReviewedGATKException(String.format("Unable to find constructor for class %s with parameters %s",multiplexerType.getName(),Arrays.deepToString(sourceFields)),ex);
+        }
+
+        Multiplexer multiplexer;
+        try {
+            multiplexer = multiplexerConstructor.newInstance(sourceValues);
+        }
+        catch(IllegalAccessException ex) {
+            throw new ReviewedGATKException(String.format("Constructor for class %s with parameters %s is inaccessible",multiplexerType.getName(),Arrays.deepToString(sourceFields)),ex);
+        }
+        catch(InstantiationException ex) {
+            throw new ReviewedGATKException(String.format("Can't create class %s with parameters %s",multiplexerType.getName(),Arrays.deepToString(sourceFields)),ex);
+        }
+        catch(InvocationTargetException ex) {
+            throw new ReviewedGATKException(String.format("Can't invoke constructor of class %s with parameters %s",multiplexerType.getName(),Arrays.deepToString(sourceFields)),ex);
+        }
+
+        return new MultiplexArgumentTypeDescriptor(multiplexer,multiplexer.multiplex());
+    }
+
+    /**
+     * Return the component type of a field, or String.class if the type cannot be found.
+     * @param field The reflected field to inspect.
+     * @return The parameterized component type, or String.class if the parameterized type could not be found.
+     * @throws IllegalArgumentException If more than one parameterized type is found on the field.
+     */
+    @Override
+    protected Type getCollectionComponentType( Field field ) {
+        // Multiplex arguments must resolve to maps from which the clp should extract the second type.
+        if( field.getGenericType() instanceof ParameterizedType) {
+            ParameterizedType parameterizedType = (ParameterizedType)field.getGenericType();
+            if( parameterizedType.getActualTypeArguments().length != 2 )
+                throw new IllegalArgumentException("Unable to determine collection type of field: " + field.toString());
+            return (Class)parameterizedType.getActualTypeArguments()[1];
+        }
+        else
+            return String.class;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ClassType.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ClassType.java
new file mode 100644
index 0000000..2753c8f
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ClassType.java
@@ -0,0 +1,40 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import java.lang.annotation.*;
+
+/**
+ * Annotates generic fields where the parameterized type is not specified or erased.
+ * Primarily used for Queue traits.  Defined in java since scala does not support RetentionPolicy.RUNTIME.
+ */
+ at Documented
+ at Inherited
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target({ElementType.FIELD})
+public @interface ClassType {
+    Class value();
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/CommandLineProgram.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/CommandLineProgram.java
new file mode 100644
index 0000000..f29978e
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/CommandLineProgram.java
@@ -0,0 +1,460 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import org.apache.log4j.FileAppender;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+import org.apache.log4j.PatternLayout;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.help.ApplicationDetails;
+import org.broadinstitute.gatk.utils.help.HelpConstants;
+import org.broadinstitute.gatk.utils.help.HelpFormatter;
+import org.broadinstitute.gatk.utils.text.TextFormattingUtils;
+
+import java.io.IOException;
+import java.util.*;
+
+public abstract class CommandLineProgram {
+
+    /** The command-line program and the arguments it returned. */
+    public ParsingEngine parser = null;
+
+    /**
+     * Setting INFO gets you INFO up to FATAL, setting ERROR gets you ERROR and FATAL level logging, and so on.
+     */
+    @Argument(fullName = "logging_level", shortName = "l", doc = "Set the minimum level of logging", required = false)
+    protected String logging_level = "INFO";
+
+    /**
+     * File to save the logging output.
+     */
+    @Output(fullName = "log_to_file", shortName = "log", doc = "Set the logging location", required = false)
+    protected String toFile = null;
+
+    /**
+     * This will produce a help message in the terminal with general usage information, listing available arguments
+     * as well as tool-specific information if applicable.
+     */
+    @Argument(fullName = "help", shortName = "h", doc = "Generate the help message", required = false)
+    public Boolean help = false;
+
+    /**
+     * Use this to check the version number of the GATK executable you are invoking. Note that the version number is
+     * always included in the output at the start of every run as well as any error message.
+     */
+    @Argument(fullName = "version", shortName = "version", doc ="Output version information", required = false)
+    public Boolean version = false;
+
+
+    /** our logging output patterns */
+    private static final String patternString = "%-5p %d{HH:mm:ss,SSS} %C{1} - %m %n";
+
+    static {
+        /**
+         * The very first thing that any GATK application does is forces the JVM locale into US English, so that we don't have
+         * to think about number formatting issues.
+         */
+        forceJVMLocaleToUSEnglish();
+        // setup a basic log configuration
+        CommandLineUtils.configureConsoleLogging();
+    }
+
+
+    /**
+     * Allows a given application to return a brief description of itself.
+     *
+     * @return An ApplicationDetails object describing the current application.  Should not be null.
+     */
+    protected ApplicationDetails getApplicationDetails() {
+        return new ApplicationDetails(ApplicationDetails.createDefaultHeader(getClass()),
+                                      Collections.<String>emptyList(),
+                                      ApplicationDetails.createDefaultRunningInstructions(getClass()),
+                                      null);
+    }
+
+    /**
+     * Subclasses of CommandLinePrograms can provide their own types of command-line arguments.
+     * @return A collection of type descriptors generating implementation-dependent placeholders.
+     */
+    protected Collection<ArgumentTypeDescriptor> getArgumentTypeDescriptors() {
+        return Collections.emptyList();
+    }
+
+    /**
+     * Will this application want to vary its argument list dynamically?
+     * If so, parse the command-line options and then prompt the subclass to return
+     * a list of argument providers.
+     *
+     * @return Whether the application should vary command-line arguments dynamically.
+     */
+    protected boolean canAddArgumentsDynamically() { return false; }
+
+    /**
+     * Provide a list of object to inspect, looking for additional command-line arguments.
+     *
+     * @return A list of objects to inspect.
+     */
+    protected Class[] getArgumentSources() {
+        return new Class[]{};
+    }
+
+    /**
+     * Name this argument source.  Provides the (full) class name as a default.
+     *
+     * @param source The argument source.
+     *
+     * @return a name for the argument source.
+     */
+    protected String getArgumentSourceName( Class source ) { return source.toString(); }
+
+    /**
+     * Sets the command-line parsing engine. Necessary for unit testing purposes.
+     * @param parser the new command-line parsing engine
+     */
+    public void setParser( ParsingEngine parser ) {
+        this.parser = parser;
+    }
+
+    /**
+     * this is the function that the inheriting class can expect to have called
+     * when all the argument processing is done
+     *
+     * @return the return code to exit the program with
+     * @throws Exception when an exception occurs
+     */
+    protected abstract int execute() throws Exception;
+
+    public static int result = -1;
+
+    @SuppressWarnings("unchecked")
+    public static void start(CommandLineProgram clp, String[] args) throws Exception {
+        start(clp, args, false);
+    }
+
+    /**
+     * This function is called to start processing the command line, and kick
+     * off the execute message of the program.
+     *
+     * @param clp  the command line program to execute
+     * @param args the command line arguments passed in
+     * @param dryRun dry run
+     * @throws Exception when an exception occurs
+     */
+    @SuppressWarnings("unchecked")
+    public static void start(CommandLineProgram clp, String[] args, boolean dryRun) throws Exception {
+
+        try {
+            // setup our log layout
+            PatternLayout layout = new PatternLayout();
+
+            Logger logger = CommandLineUtils.getStingLogger();
+
+            // now set the layout of all the loggers to our layout
+            CommandLineUtils.setLayout(logger, layout);
+
+            // Initialize the logger using the defaults.
+            clp.setupLoggerLevel(layout);
+
+            // setup the parser
+            ParsingEngine parser = clp.parser = new ParsingEngine(clp);
+            parser.addArgumentSource(clp.getClass());
+
+            Map<ArgumentMatchSource, ParsedArgs> parsedArgs;
+
+            // process the args
+            if (clp.canAddArgumentsDynamically()) {
+                // if the command-line program can toss in extra args, fetch them and reparse the arguments.
+                parser.parse(args);
+
+                // Allow invalid and missing required arguments to pass this validation step.
+                //   - InvalidArgument in case these arguments are specified by plugins.
+                //   - MissingRequiredArgument in case the user requested help.  Handle that later, once we've
+                //                             determined the full complement of arguments.
+                if ( ! dryRun )
+                    parser.validate(EnumSet.of(ParsingEngine.ValidationType.MissingRequiredArgument,
+                            ParsingEngine.ValidationType.InvalidArgument));
+                parser.loadArgumentsIntoObject(clp);
+
+                // Initialize the logger using the loaded command line.
+                clp.setupLoggerLevel(layout);
+
+                Class[] argumentSources = clp.getArgumentSources();
+                    for (Class argumentSource : argumentSources)
+                    parser.addArgumentSource(clp.getArgumentSourceName(argumentSource), argumentSource);
+                parsedArgs = parser.parse(args);
+
+                if (isVersionPresent(parser))
+                    printVersionAndExit();
+
+                if (isHelpPresent(parser))
+                    printHelpAndExit(clp, parser);
+
+                if ( ! dryRun ) parser.validate();
+            } else {
+                parsedArgs = parser.parse(args);
+
+                if ( ! dryRun ) {
+                    if (isHelpPresent(parser))
+                        printHelpAndExit(clp, parser);
+
+                    parser.validate();
+                }
+                parser.loadArgumentsIntoObject(clp);
+
+                // Initialize the logger using the loaded command line.
+                clp.setupLoggerLevel(layout);
+            }
+
+            if ( ! dryRun ) {
+                // if they specify a log location, output our data there
+                if (clp.toFile != null) {
+                    FileAppender appender;
+                    try {
+                        appender = new FileAppender(layout, clp.toFile, false);
+                        logger.addAppender(appender);
+                    } catch (IOException e) {
+                        throw new RuntimeException("Unable to re-route log output to " + clp.toFile + " make sure the destination exists");
+                    }
+                }
+
+                // regardless of what happens next, generate the header information
+                HelpFormatter.generateHeaderInformation(clp.getApplicationDetails(), parsedArgs);
+
+                // call the execute
+                CommandLineProgram.result = clp.execute();
+            }
+        }
+        catch (ArgumentException e) {
+            //clp.parser.printHelp(clp.getApplicationDetails());
+            // Rethrow the exception to exit with an error.
+            throw e;
+        }
+    }
+
+    /**
+     * Find fields in the object obj that look like command-line arguments, and put command-line
+     * arguments into them.
+     *
+     * @param obj Object to inspect for command line arguments.
+     */
+    public void loadArgumentsIntoObject(Object obj) {
+        parser.loadArgumentsIntoObject(obj);
+    }
+
+    /**
+     * this function checks the logger level passed in on the command line, taking the lowest
+     * level that was provided.
+     * @param layout Pattern layout to format based on the logger level.
+     */
+    private void setupLoggerLevel(PatternLayout layout) {
+        layout.setConversionPattern(patternString);
+
+        // set the default logger level
+        Level par;
+        if (logging_level.toUpperCase().equals("DEBUG")) {
+            par = Level.DEBUG;
+        } else if (logging_level.toUpperCase().equals("INFO")) {
+            par = Level.INFO;
+        } else if (logging_level.toUpperCase().equals("WARN")) {
+            par = Level.WARN;
+        } else if (logging_level.toUpperCase().equals("ERROR")) {
+            par = Level.ERROR;
+        } else if (logging_level.toUpperCase().equals("FATAL")) {
+            par = Level.FATAL;
+        } else if (logging_level.toUpperCase().equals("OFF")) {
+            par = Level.OFF;
+        } else {
+            // we don't understand the logging level, let's get out of here
+            throw new ArgumentException("Unable to match: " + logging_level + " to a logging level, make sure it's a valid level (DEBUG, INFO, WARN, ERROR, FATAL, OFF)");
+        }
+
+        Logger.getRootLogger().setLevel(par);
+    }
+
+    public static String getVersionNumber() {
+        ResourceBundle headerInfo = TextFormattingUtils.GATK_RESOURCE_BUNDLE;
+        return headerInfo.containsKey("org.broadinstitute.gatk.utils.version") ? headerInfo.getString("org.broadinstitute.gatk.utils.version") : "<unknown>";
+    }
+
+    public static String getBuildTime() {
+        ResourceBundle headerInfo = TextFormattingUtils.GATK_RESOURCE_BUNDLE;
+        return headerInfo.containsKey("build.timestamp") ? headerInfo.getString("build.timestamp") : "<unknown>";
+    }
+
+    /**
+     * a function used to indicate an error occurred in the command line tool
+     */
+    private static void printDocumentationReference() {
+        errorPrintf("Visit our website and forum for extensive documentation and answers to %n");
+        errorPrintf("commonly asked questions " + HelpConstants.BASE_GATK_URL + "%n");
+    }
+
+
+    /**
+     * Do a cursory search for the given argument.
+     *
+     * @param parser Parser
+     *
+     * @return True if help is present; false otherwise.
+     */
+    private static boolean isHelpPresent(ParsingEngine parser) {
+        return parser.isArgumentPresent("help");
+    }
+
+    /**
+     * Print help and exit.
+     *
+     * @param clp    Instance of the command-line program.
+     * @param parser True if help is present; false otherwise.
+     */
+    private static void printHelpAndExit(CommandLineProgram clp, ParsingEngine parser) {
+        parser.printHelp(clp.getApplicationDetails());
+        System.exit(0);
+    }
+
+    /**
+     * Do a cursory search for the argument "version".
+     *
+     * @param parser Parser
+     *
+     * @return True if version is present; false otherwise.
+     */
+    private static boolean isVersionPresent(ParsingEngine parser) {
+        return parser.isArgumentPresent("version");
+    }
+
+    /**
+     * Print help and exit.
+     */
+    private static void printVersionAndExit() {
+        System.out.println(getVersionNumber().toString());
+        System.exit(0);
+    }
+
+
+    private static void errorPrintf(String format, Object... s) {
+        String formatted = String.format(format, s);
+
+        if ( formatted.trim().equals("") )
+            System.err.println("##### ERROR");
+        else {
+            for ( String part : formatted.split("\n") ) {
+                System.err.println("##### ERROR " + part);
+            }
+        }
+    }
+
+
+    /**
+     * used to indicate an error occured
+     *
+     * @param msg the message
+     * @param t   the error
+     */
+    public static void exitSystemWithError(String msg, final Throwable t) {
+        errorPrintf("------------------------------------------------------------------------------------------%n");
+        errorPrintf("stack trace %n");
+        t.printStackTrace();
+
+        errorPrintf("------------------------------------------------------------------------------------------%n");
+        errorPrintf("A GATK RUNTIME ERROR has occurred (version %s):%n", getVersionNumber());
+        errorPrintf("%n");
+        errorPrintf("This might be a bug. Please check the documentation guide to see if this is a known problem.%n");
+        errorPrintf("If not, please post the error message, with stack trace, to the GATK forum.%n");
+        printDocumentationReference();
+        if ( msg == null ) // some exceptions don't have detailed messages
+            msg = "Code exception (see stack trace for error itself)";
+        errorPrintf("%n");
+        errorPrintf("MESSAGE: %s%n", msg.trim());
+        errorPrintf("------------------------------------------------------------------------------------------%n");
+        System.exit(1);
+    }
+
+    public static void exitSystemWithUserError(final Exception e) {
+        if ( e.getMessage() == null )
+            throw new ReviewedGATKException("UserException found with no message!", e);
+
+        errorPrintf("------------------------------------------------------------------------------------------%n");
+        errorPrintf("A USER ERROR has occurred (version %s): %n", getVersionNumber());
+        errorPrintf("%n");
+        errorPrintf("This means that one or more arguments or inputs in your command are incorrect.%n");
+        errorPrintf("The error message below tells you what is the problem.%n");
+        errorPrintf("%n");
+        errorPrintf("If the problem is an invalid argument, please check the online documentation guide%n");
+        errorPrintf("(or rerun your command with --help) to view allowable command-line arguments for this tool.%n");
+        errorPrintf("%n");
+        printDocumentationReference();
+        errorPrintf("%n");
+        errorPrintf("Please do NOT post this error to the GATK forum unless you have really tried to fix it yourself.%n");
+        errorPrintf("%n");
+        errorPrintf("MESSAGE: %s%n", e.getMessage().trim());
+        errorPrintf("------------------------------------------------------------------------------------------%n");
+        System.exit(1);
+    }
+
+    public static void exitSystemWithSamError(final Throwable t) {
+        if ( t.getMessage() == null )
+            throw new ReviewedGATKException("SamException found with no message!", t);
+
+        errorPrintf("------------------------------------------------------------------------------------------%n");
+        errorPrintf("A BAM/CRAM ERROR has occurred (version %s): %n", getVersionNumber());
+        errorPrintf("%n");
+        errorPrintf("This means that there is something wrong with the BAM/CRAM file(s) you provided.%n");
+        errorPrintf("The error message below tells you what is the problem.%n");
+        errorPrintf("%n");
+        printDocumentationReference();
+        errorPrintf("%n");
+        errorPrintf("Please do NOT post this error to the GATK forum until you have followed these instructions:%n");
+        errorPrintf("- Make sure that your BAM file is well-formed by running Picard's validator on it%n");
+        errorPrintf("(see http://picard.sourceforge.net/command-line-overview.shtml#ValidateSamFile for details)%n");
+        errorPrintf("- Ensure that your BAM index is not corrupted: delete the current one and regenerate it with 'samtools index'%n");
+        errorPrintf("- Ensure that your CRAM index is not corrupted: delete the current one and regenerate it with%n");
+        errorPrintf("'java -jar cramtools-3.0.jar index --bam-style-index --input-file <input cram file> --reference-fasta-file <reference fasta file>'%n");
+        errorPrintf("(see https://github.com/enasequence/cramtools/tree/v3.0 for details)%n");
+        errorPrintf("%n");
+        errorPrintf("MESSAGE: %s%n", t.getMessage().trim());
+        errorPrintf("------------------------------------------------------------------------------------------%n");
+        System.exit(1);
+    }
+
+
+    /**
+     * used to indicate an error occured
+     *
+     * @param t the exception that occurred
+     */
+    public static void exitSystemWithError(Throwable t) {
+        exitSystemWithError(t.getMessage(), t);
+    }
+
+    /**
+     * A hack to ensure that numbers are always formatted in the US style.
+     */
+    protected static void forceJVMLocaleToUSEnglish() {
+        Locale.setDefault(Locale.US);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/CommandLineUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/CommandLineUtils.java
new file mode 100644
index 0000000..62139fb
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/CommandLineUtils.java
@@ -0,0 +1,192 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import org.apache.log4j.Appender;
+import org.apache.log4j.ConsoleAppender;
+import org.apache.log4j.Logger;
+import org.apache.log4j.PatternLayout;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.lang.annotation.Annotation;
+import java.util.Collections;
+import java.util.Enumeration;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+/**
+ * Static utility methods for working with command-line arguments.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class CommandLineUtils {
+
+    /**
+     * Returns a key-value mapping of the command-line arguments passed into the GATK.
+     * Will be approximate; this class doesn't have all the required data to completely
+     * reconstruct the list of command-line arguments from the given objects.
+     *
+     * @param parsingEngine      The parsing engine
+     * @param argumentProviders  The providers of command-line arguments.
+     * @return A key-value mapping of argument full names to argument values.  Produces best string representation
+     *         possible given the information available.
+     */
+    public static Map<String,String> getApproximateCommandLineArguments(ParsingEngine parsingEngine, Object... argumentProviders) {
+        return getApproximateCommandLineArguments(parsingEngine, false, argumentProviders);
+    }
+
+    /**
+     * Returns a key-value mapping of the command-line arguments passed into the GATK.
+     * Will be approximate; this class doesn't have all the required data to completely
+     * reconstruct the list of command-line arguments from the given objects.
+     * 
+     * @param parsingEngine      The parsing engine
+     * @param skipObjectPointers Should we skip arguments whose values are pointers (and don't print nicely)?
+     * @param argumentProviders  The providers of command-line arguments.
+     * @return A key-value mapping of argument full names to argument values.  Produces best string representation
+     *         possible given the information available.
+     */
+    public static Map<String,String> getApproximateCommandLineArguments(ParsingEngine parsingEngine, boolean skipObjectPointers, Object... argumentProviders) {
+        Map<String,String> commandLineArguments = new LinkedHashMap<String,String>();
+
+        for(Object argumentProvider: argumentProviders) {
+            Map<ArgumentSource, Object> argBindings = parsingEngine.extractArgumentBindings(argumentProvider);
+            for(Map.Entry<ArgumentSource, Object> elt: argBindings.entrySet()) {
+                Object argumentValue = elt.getValue();
+
+                String argumentValueString = argumentValue != null ? argumentValue.toString() : null;
+                if ( skipObjectPointers && isObjectPointer(argumentValueString) )
+                    continue;
+
+                for(ArgumentDefinition definition: elt.getKey().createArgumentDefinitions()) {
+                    String argumentName = definition.fullName;
+                    commandLineArguments.put(argumentName,argumentValueString);
+                }
+            }
+        }
+
+        return commandLineArguments;
+    }
+
+    /**
+     * Create an approximate list of command-line arguments based on the given argument providers.
+     * @param parsingEngine      The parsing engine
+     * @param argumentProviders  Argument providers to inspect.
+     * @return A string representing the given command-line arguments.
+     */
+    public static String createApproximateCommandLineArgumentString(ParsingEngine parsingEngine, Object... argumentProviders) {
+        return createApproximateCommandLineArgumentString(parsingEngine, true, argumentProviders);
+    }
+
+    /**
+     * Create an approximate list of command-line arguments based on the given argument providers.
+     * @param parsingEngine      The parsing engine
+     * @param skipObjectPointers Should we skip arguments whose values are pointers (and don't print nicely)?
+     * @param argumentProviders  Argument providers to inspect.
+     * @return A string representing the given command-line arguments.
+     */
+    public static String createApproximateCommandLineArgumentString(ParsingEngine parsingEngine, boolean skipObjectPointers, Object... argumentProviders) {
+        Map<String,String> commandLineArgs = getApproximateCommandLineArguments(parsingEngine, skipObjectPointers, argumentProviders);
+        StringBuffer sb = new StringBuffer();
+
+        boolean first = true;
+        for ( Map.Entry<String, String> commandLineArg : commandLineArgs.entrySet() ) {
+            if ( !first )
+                sb.append(" ");
+            sb.append(commandLineArg.getKey());
+            sb.append("=");
+            sb.append(commandLineArg.getValue());
+            first = false;
+        }
+
+        return sb.toString();
+    }
+
+    /**
+     * A hack to get around the fact that Java doesn't like inheritance in Annotations.
+     * @param annotation to run the method on
+     * @param method the method to invoke
+     * @return the return value of the method
+     */
+    public static Object getValue(Annotation annotation, String method) {
+        try {
+            return annotation.getClass().getMethod(method).invoke(annotation);
+        } catch (Exception e) {
+            throw new ReviewedGATKException("Unable to access method " + method + " on annotation " + annotation.getClass(), e);
+        }
+    }
+
+    // The problem here is that some of the fields being output are Objects - and those
+    //  Objects don't overload toString() so that the output is just the memory pointer
+    //  to the Object.  Because those values are non-deterministic, they don't merge well
+    //  into BAM/VCF headers (plus, it's just damn ugly).  Perhaps there's a better way to
+    //  do this, but at least this one works for the moment.
+    private static final String pointerRegexp = ".+@[0-9a-fA-F]+$";
+    private static boolean isObjectPointer(String s) {
+        return s != null && s.matches(pointerRegexp);
+    }
+
+    /**
+     * Returns the root logger for all GATK code.
+     * @return the root logger for all GATK  code.
+     */
+    public static Logger getStingLogger() {
+        return Logger.getLogger("org.broadinstitute.gatk");
+    }
+
+    /**
+     * Enables console logging.
+     */
+    @SuppressWarnings("unchecked")
+    public static void configureConsoleLogging() {
+        // Check to see if a console logger has already been enabled.
+        for (Logger logger = getStingLogger(); logger != null; logger = (Logger)logger.getParent()) {
+            Enumeration<Appender> e = (Enumeration<Appender>) logger.getAllAppenders();
+            for (Appender appender: Collections.list(e)) {
+                if (appender instanceof ConsoleAppender)
+                    return;
+            }
+        }
+        // Extracted from BasicConfigurator.configure(), but only applied to the GATK logger.
+        Logger.getRootLogger().addAppender(new ConsoleAppender(
+                    new PatternLayout(PatternLayout.TTCC_CONVERSION_PATTERN), ConsoleAppender.SYSTEM_ERR));
+    }
+
+    /**
+     * Sets the layout of the logger.
+     * @param logger The logger.
+     * @param layout The layout.
+     */
+    @SuppressWarnings("unchecked")
+    public static void setLayout(Logger logger, PatternLayout layout) {
+        for (; logger != null; logger = (Logger)logger.getParent()) {
+            Enumeration<Appender> e = (Enumeration<Appender>) logger.getAllAppenders();
+            for (Appender appender: Collections.list(e))
+                appender.setLayout(layout);
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/EnumerationArgumentDefault.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/EnumerationArgumentDefault.java
new file mode 100644
index 0000000..be7a65b
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/EnumerationArgumentDefault.java
@@ -0,0 +1,65 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * @author aaron
+ * <p/>
+ * Annotation EnumerationArgumentDefault
+ * <p/>
+ * Allows the default argument value to be set for an enum; this allows us to treat enums as
+ * booleans on the command line. I.e.
+ *
+ * if we're using an enum Shape,
+ *
+ * enum shape {
+ *  SQUARE,
+ *  CIRCLE,
+ *  @EnumerationArgumentDefault
+ *  TRIANGLE
+ * }
+ *
+ * and a command line option -shape, the EnumerationArgumentDefault would allow you to say:
+ * -shape
+ * or
+ * -shape TRIANGLE
+ *
+ * would get -shape set to TRIANGLE, where:
+ *
+ * -shape SQUARE
+ *
+ * would set shape to SQUARE
+ *
+ */
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target(ElementType.FIELD)
+public @interface EnumerationArgumentDefault {
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/Gather.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/Gather.java
new file mode 100644
index 0000000..e8e8cde
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/Gather.java
@@ -0,0 +1,41 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import java.lang.annotation.*;
+
+/**
+ * Specifies the class type to gather an @Output
+ */
+ at Documented
+ at Inherited
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target({ElementType.FIELD})
+public @interface Gather {
+    Class value() default Gather.class;
+    String className() default "";
+    boolean enabled() default true;
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/Gatherer.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/Gatherer.java
new file mode 100644
index 0000000..158095a
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/Gatherer.java
@@ -0,0 +1,47 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import java.io.File;
+import java.util.List;
+
+/**
+ * Combines a list of files into a single output.
+ */
+public abstract class Gatherer {
+    /**
+     * Gathers a list of files into a single output.
+     * @param inputs Files to combine.
+     * @param output Path to output file.
+     */
+    public abstract void gather(List<File> inputs, File output);
+
+    /**
+     * Returns true if the caller should wait for the input files to propagate over NFS before running gather().
+     * @return true if the caller should wait for the input files to propagate over NFS before running gather().
+     */
+    public boolean waitForInputs() { return true; }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/Hidden.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/Hidden.java
new file mode 100644
index 0000000..f5a0202
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/Hidden.java
@@ -0,0 +1,41 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import java.lang.annotation.*;
+
+/**
+ * Indicates that a walker or walker argument should not be presented in the help system.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+ at Documented
+ at Inherited
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target({ElementType.TYPE,ElementType.FIELD})
+public @interface Hidden {
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/Input.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/Input.java
new file mode 100644
index 0000000..9fd4978
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/Input.java
@@ -0,0 +1,83 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import java.lang.annotation.*;
+
+/**
+ * Annotates fields in objects that should be used as command-line arguments.
+ * Any field annotated with @Input can appear as a command-line parameter.
+ */
+ at Documented
+ at Inherited
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target(ElementType.FIELD)
+public @interface Input {
+    /**
+     * The full name of the command-line argument.  Full names should be
+     * prefixed on the command-line with a double dash (--).
+     * @return Selected full name, or "" to use the default.
+     */
+    String fullName() default "";
+
+    /**
+     * Specified short name of the command.  Short names should be prefixed
+     * with a single dash.  Argument values can directly abut single-char
+     * short names or be separated from them by a space.
+     * @return Selected short name, or "" for none.
+     */
+    String shortName() default "";
+
+    /**
+     * Documentation for the command-line argument.  Should appear when the
+     * --help argument is specified.
+     * @return Doc string associated with this command-line argument.
+     */
+    String doc() default "Undocumented option";
+
+    /**
+     * Is this argument required.  If true, the command-line argument system will
+     * make a best guess for populating this argument based on the type descriptor,
+     * and will fail if the type can't be populated.
+     * @return True if the argument is required.  False otherwise.
+     */
+    boolean required() default true;
+
+    /**
+     * Should this command-line argument be exclusive of others.  Should be
+     * a comma-separated list of names of arguments of which this should be
+     * independent.
+     * @return A comma-separated string listing other arguments of which this
+     *         argument should be independent.
+     */
+    String exclusiveOf() default "";
+
+    /**
+     * Provide a regexp-based validation string.
+     * @return Non-empty regexp for validation, blank otherwise.
+     */
+    String validation() default "";
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/IntervalArgumentCollection.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/IntervalArgumentCollection.java
new file mode 100644
index 0000000..8d18177
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/IntervalArgumentCollection.java
@@ -0,0 +1,88 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import htsjdk.tribble.Feature;
+import org.broadinstitute.gatk.utils.interval.IntervalMergingRule;
+import org.broadinstitute.gatk.utils.interval.IntervalSetRule;
+
+import java.util.List;
+
+public class IntervalArgumentCollection {
+    /**
+     * Use this option to perform the analysis over only part of the genome. This argument can be specified multiple times.
+     * You can use samtools-style intervals either explicitly on the command line (e.g. -L chr1 or -L chr1:100-200) or
+     * by loading in a file containing a list of intervals (e.g. -L myFile.intervals).
+     *
+     * Additionally, you can also specify a ROD file (such as a VCF file) in order to perform the analysis at specific
+     * positions based on the records present in the file (e.g. -L file.vcf).
+     *
+     * Finally, you can also use this to perform the analysis on the reads that are completely unmapped in the BAM file
+     * (i.e. those without a reference contig) by specifying -L unmapped.
+     */
+    @Input(fullName = "intervals", shortName = "L", doc = "One or more genomic intervals over which to operate", required = false)
+    public List<IntervalBinding<Feature>> intervals = null;
+
+    /**
+     * Use this option to exclude certain parts of the genome from the analysis (like -L, but the opposite).
+     * This argument can be specified multiple times. You can use samtools-style intervals either explicitly on the
+     * command line (e.g. -XL chr1 or -XL chr1:100-200) or by loading in a file containing a list of intervals
+     * (e.g. -XL myFile.intervals).
+     *
+     * Additionally, you can also specify a ROD file (such as a VCF file) in order to exclude specific
+     * positions from the analysis based on the records present in the file (e.g. -XL file.vcf).
+     * */
+    @Input(fullName = "excludeIntervals", shortName = "XL", doc = "One or more genomic intervals to exclude from processing", required = false)
+    public List<IntervalBinding<Feature>> excludeIntervals = null;
+
+    /**
+     * By default, the program will take the UNION of all intervals specified using -L and/or -XL. However, you can
+     * change this setting for -L, for example if you want to take the INTERSECTION of the sets instead. E.g. to perform the
+     * analysis on positions for which there is a record in a VCF, but restrict this to just those on chromosome 20,
+     * you would do -L chr20 -L file.vcf -isr INTERSECTION. However, it is not possible to modify the merging approach
+     * for intervals passed using -XL (they will always be merged using UNION).
+     *
+     * Note that if you specify both -L and -XL, the -XL interval set will be subtracted from the -L interval set.
+     */
+    @Argument(fullName = "interval_set_rule", shortName = "isr", doc = "Set merging approach to use for combining interval inputs", required = false)
+    public IntervalSetRule intervalSetRule = IntervalSetRule.UNION;
+
+    /**
+     * By default, the program merges abutting intervals (i.e. intervals that are directly side-by-side but do not
+     * actually overlap) into a single continuous interval. However you can change this behavior if you want them to be
+     * treated as separate intervals instead.
+     */
+    @Argument(fullName = "interval_merging", shortName = "im", doc = "Interval merging rule for abutting intervals", required = false)
+    public IntervalMergingRule intervalMerging = IntervalMergingRule.ALL;
+
+    /**
+     * Use this to add padding to the intervals specified using -L and/or -XL. For example, '-L chr1:100' with a
+     * padding value of 20 would turn into '-L chr1:80-120'. This is typically used to add padding around exons when
+     * analyzing exomes. The general Broad exome calling pipeline uses 100 bp padding by default.
+     */
+    @Argument(fullName = "interval_padding", shortName = "ip", doc = "Amount of padding (in bp) to add to each interval", required = false, minValue = 0)
+    public int intervalPadding = 0;
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/IntervalBinding.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/IntervalBinding.java
new file mode 100644
index 0000000..da7fa6e
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/IntervalBinding.java
@@ -0,0 +1,101 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import com.google.java.contract.Requires;
+import htsjdk.tribble.AbstractFeatureReader;
+import htsjdk.tribble.Feature;
+import htsjdk.tribble.FeatureCodec;
+import htsjdk.tribble.FeatureReader;
+import org.broadinstitute.gatk.utils.refdata.ReferenceDependentFeatureCodec;
+import org.broadinstitute.gatk.utils.refdata.tracks.FeatureManager;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.interval.IntervalUtils;
+
+import java.util.*;
+
+/**
+ * An IntervalBinding representing a walker argument that gets bound to either a ROD track or interval string.
+ *
+ * The IntervalBinding<T> is a formal GATK argument that bridges between a walker and
+ * the engine to construct intervals for traversal at runtime.  The IntervalBinding can
+ * either be a RodBinding<T>, a string of one interval, or a file with interval strings.
+ * The GATK Engine takes care of initializing the binding when appropriate and determining intervals from it.
+ *
+ * Note that this class is immutable.
+ */
+public final class IntervalBinding<T extends Feature> {
+
+    private RodBinding<T> featureIntervals;
+    private String stringIntervals;
+
+    @Requires({"type != null", "rawName != null", "source != null", "tribbleType != null", "tags != null"})
+    public IntervalBinding(Class<T> type, final String rawName, final String source, final String tribbleType, final Tags tags) {
+        featureIntervals = new RodBinding<>(type, rawName, source, tribbleType, tags);
+    }
+
+    @Requires({"intervalArgument != null"})
+    public IntervalBinding(String intervalArgument) {
+        stringIntervals = intervalArgument;
+    }
+
+    public String getSource() {
+        return ( featureIntervals != null ? featureIntervals.getSource() : stringIntervals );
+    }
+
+    public List<GenomeLoc> getIntervals(final GenomeLocParser genomeLocParser) {
+        List<GenomeLoc> intervals;
+
+        if ( featureIntervals != null ) {
+            intervals = new ArrayList<>();
+
+            // TODO -- after ROD system cleanup, go through the ROD system so that we can handle things like gzipped files
+
+            final FeatureCodec codec = new FeatureManager().getByName(featureIntervals.getTribbleType()).getCodec();
+            if ( codec instanceof ReferenceDependentFeatureCodec )
+                ((ReferenceDependentFeatureCodec)codec).setGenomeLocParser(genomeLocParser);
+            try {
+                FeatureReader<Feature> reader = AbstractFeatureReader.getFeatureReader(featureIntervals.getSource(), codec, false);
+                for ( Feature feature : reader.iterator() )
+                    intervals.add(genomeLocParser.createGenomeLoc(feature));
+            } catch (Exception e) {
+                throw new UserException.MalformedFile(featureIntervals.getSource(), "Problem reading the interval file", e);
+            }
+
+        } else {
+            intervals = IntervalUtils.parseIntervalArguments(genomeLocParser, stringIntervals);
+        }
+
+        Collections.sort(intervals);
+        return intervals;
+    }
+
+    public String toString() {
+        return getSource();
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/MissingArgumentValueException.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/MissingArgumentValueException.java
new file mode 100644
index 0000000..f71aec7
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/MissingArgumentValueException.java
@@ -0,0 +1,50 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import org.broadinstitute.gatk.utils.Utils;
+
+/**
+ * Specifies that a value was missing when attempting to populate an argument.
+ */
+public class MissingArgumentValueException extends ArgumentException {
+    public MissingArgumentValueException( ArgumentDefinition... missingArguments ) {
+        super( formatArguments(missingArguments) );
+    }
+
+    private static String formatArguments( ArgumentDefinition... missingArguments ) {
+        StringBuilder sb = new StringBuilder();
+        for( ArgumentDefinition missingArgument: missingArguments ) {
+            if( missingArgument.shortName != null )
+                sb.append( String.format("%nValue for argument with name '--%s' (-%s) is missing.", missingArgument.fullName, missingArgument.shortName) );
+            else
+                sb.append( String.format("%nValue for argument with name '--%s' is missing.", missingArgument.fullName) );
+            if(missingArgument.validOptions != null)
+                sb.append( String.format("  Valid options are (%s).", Utils.join(",",missingArgument.validOptions)));
+        }
+        return sb.toString();
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/Multiplex.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/Multiplex.java
new file mode 100644
index 0000000..199b71e
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/Multiplex.java
@@ -0,0 +1,44 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import java.lang.annotation.*;
+
+/**
+ * Indicates that the class should be multiplexed according to the rules
+ * specified in the multiplexer.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+ at Documented
+ at Inherited
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target({ElementType.FIELD})
+public @interface Multiplex {
+    public Class<? extends Multiplexer> value();
+    public String[] arguments() default {};
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/Multiplexer.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/Multiplexer.java
new file mode 100644
index 0000000..643eae1
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/Multiplexer.java
@@ -0,0 +1,52 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import java.util.Collection;
+
+/**
+ * An interface for multiplexing output streams.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public interface Multiplexer<T> {
+    /**
+     * Generate a list of the potential outputs that can be created as a function of the other
+     * command-line arguments in this class.
+     * @return A collection of unique identifiers for the file multiplex.
+     */
+    public Collection<T> multiplex();
+
+    /**
+     * Transform the given command-line argument into a suitable form specific to this filename.
+     * @param multiplexedEntry Identifies the individual component of the multiplex.  Will be a value in the collection
+     *        passed back by multiplex().
+     * @param argument The actual command-line argument, supplied for transformation.
+     * @return A transformed representation of the command-line argument.
+     */
+    public String transformArgument(final T multiplexedEntry, final String argument);
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/Output.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/Output.java
new file mode 100644
index 0000000..d7512f6
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/Output.java
@@ -0,0 +1,90 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import java.lang.annotation.*;
+
+/**
+ * Annotates fields in objects that should be used as command-line arguments.
+ * Any field annotated with @Argument can appear as a command-line parameter.
+ */
+ at Documented
+ at Inherited
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target(ElementType.FIELD)
+public @interface Output {
+    /**
+     * The full name of the command-line argument.  Full names should be
+     * prefixed on the command-line with a double dash (--).
+     * @return Selected full name, or "" to use the default.
+     */
+    String fullName() default "out";
+
+    /**
+     * Specified short name of the command.  Short names should be prefixed
+     * with a single dash.  Argument values can directly abut single-char
+     * short names or be separated from them by a space.
+     * @return Selected short name, or "" for none.
+     */
+    String shortName() default "o";
+
+    /**
+     * Documentation for the command-line argument.  Should appear when the
+     * --help argument is specified.
+     * @return Doc string associated with this command-line argument.
+     */
+    String doc() default "An output file created by the walker.  Will overwrite contents if file exists";
+
+    /**
+     * Is this argument required.  If true, the command-line argument system will
+     * make a best guess for populating this argument based on the type, and will
+     * fail if the type can't be populated.
+     * @return True if the argument is required.  False otherwise.
+     */
+    boolean required() default false;
+
+    /**
+     * If this argument is not required, should it default to use stdout if no
+     * output file is explicitly provided on the command-line?
+     * @return True if the argument should default to stdout.  False otherwise.
+     */
+    boolean defaultToStdout() default true;
+
+    /**
+     * Should this command-line argument be exclusive of others.  Should be
+     * a comma-separated list of names of arguments of which this should be
+     * independent.
+     * @return A comma-separated string listing other arguments of which this
+     *         argument should be independent.
+     */
+    String exclusiveOf() default "";
+
+    /**
+     * Provide a regexp-based validation string.
+     * @return Non-empty regexp for validation, blank otherwise.
+     */
+    String validation() default "";
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsedArgs.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsedArgs.java
new file mode 100644
index 0000000..ba403a1
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsedArgs.java
@@ -0,0 +1,38 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+/**
+ * Represents a collection of parsed arguments for an argument source.
+ *
+ * Useful for printing out help documents.
+ */
+public abstract class ParsedArgs {
+    /**
+     * @return A compact description of the arguments from an provider/source.
+     */
+    public abstract String getDescription();
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsedListArgs.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsedListArgs.java
new file mode 100644
index 0000000..0265c8f
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsedListArgs.java
@@ -0,0 +1,55 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import org.apache.commons.lang.StringUtils;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * A list of string arguments, usually from the command line or an args list file.
+ */
+public class ParsedListArgs extends ParsedArgs {
+    private final List<String> args = new ArrayList<String>();
+
+    public ParsedListArgs() {
+    }
+
+    public ParsedListArgs(List<String> args) {
+        this.args.addAll(args);
+    }
+
+    public void add(String... args) {
+        this.args.addAll(Arrays.asList(args));
+    }
+
+    @Override
+    public String getDescription() {
+        return StringUtils.join(this.args, " ");
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsingEngine.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsingEngine.java
new file mode 100644
index 0000000..e0fc361
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsingEngine.java
@@ -0,0 +1,829 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import com.google.java.contract.Requires;
+import org.apache.commons.io.FileUtils;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.classloader.JVMUtils;
+import org.broadinstitute.gatk.utils.classloader.PluginManager;
+import org.broadinstitute.gatk.utils.collections.Pair;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.help.ApplicationDetails;
+import org.broadinstitute.gatk.utils.help.HelpFormatter;
+
+import java.io.File;
+import java.io.IOException;
+import java.lang.annotation.Annotation;
+import java.lang.reflect.Field;
+import java.util.*;
+
+/**
+ * A parser for command-line arguments.
+ */
+public class ParsingEngine {
+
+    /**
+     * The loaded argument sources along with their back definitions.
+     */
+    private Map<ArgumentDefinition,ArgumentSource> argumentSourcesByDefinition = new HashMap<ArgumentDefinition,ArgumentSource>();
+
+    /**
+     * A list of defined arguments against which command lines are matched.
+     * Package protected for testing access.
+     */
+    public ArgumentDefinitions argumentDefinitions = new ArgumentDefinitions();
+
+    /**
+     * A list of matches from defined arguments to command-line text.
+     * Indicates as best as possible where command-line text remains unmatched
+     * to existing arguments.
+     */
+    private ArgumentMatches argumentMatches = null;
+
+    /**
+     * Techniques for parsing and for argument lookup.
+     */
+    private List<ParsingMethod> parsingMethods = new ArrayList<ParsingMethod>();
+
+    /**
+     * All of the RodBinding objects we've seen while parsing
+     */
+    private List<RodBinding> rodBindings = new ArrayList<RodBinding>();
+
+    /**
+     * Class reference to the different types of descriptors that the create method can create.
+     * The type of set used must be ordered (but not necessarily sorted).
+     */
+    private static final Set<ArgumentTypeDescriptor> STANDARD_ARGUMENT_TYPE_DESCRIPTORS = new LinkedHashSet<ArgumentTypeDescriptor>( Arrays.asList(new SimpleArgumentTypeDescriptor(),
+            new IntervalBindingArgumentTypeDescriptor(),
+            new RodBindingArgumentTypeDescriptor(),
+            new RodBindingCollectionArgumentTypeDescriptor(),
+            new CompoundArgumentTypeDescriptor(),
+            new MultiplexArgumentTypeDescriptor()) );
+
+    private Set<ArgumentTypeDescriptor> argumentTypeDescriptors = new LinkedHashSet<ArgumentTypeDescriptor>();
+
+    /**
+     * List of tags associated with the given instantiation of the command-line argument.
+     */
+    private final Map<Object,Tags> tags = new IdentityHashMap<Object,Tags>();
+
+    private PluginManager<ParsingEngineArgumentProvider> argumentProviderPluginManager =
+            new PluginManager<ParsingEngineArgumentProvider>(ParsingEngineArgumentProvider.class);
+
+    /**
+     * our log, which we want to capture anything from org.broadinstitute.gatk
+     */
+    protected static Logger logger = Logger.getLogger(ParsingEngine.class);
+
+    public ParsingEngine( CommandLineProgram clp ) {
+        RodBinding.resetNameCounter();
+        parsingMethods.add( ParsingMethod.FullNameParsingMethod );
+        parsingMethods.add( ParsingMethod.ShortNameParsingMethod );
+
+        // Order matters here!  Make sure the clp's new type descriptors go in before the original type descriptors.
+        if(clp != null)
+            argumentTypeDescriptors.addAll(clp.getArgumentTypeDescriptors());
+        argumentTypeDescriptors.addAll(STANDARD_ARGUMENT_TYPE_DESCRIPTORS);
+
+        List<Class<? extends ParsingEngineArgumentProvider>> providers = argumentProviderPluginManager.getPlugins();
+        for (Class<? extends ParsingEngineArgumentProvider> provider: providers) {
+            addArgumentSource(provider);
+        }
+    }
+
+    /**
+     * Add a main argument source.  Argument sources are expected to have
+     * any number of fields with an @Argument annotation attached.
+     * @param source     An argument source from which to extract command-line arguments.
+     */
+    public void addArgumentSource( Class source ) {
+        addArgumentSource(null, source);
+    }
+
+    public ArgumentMatches getArgumentMatches() {
+        return argumentMatches;
+    }
+
+    /**
+     * Add an argument source.  Argument sources are expected to have
+     * any number of fields with an @Argument annotation attached.
+     * @param sourceName name for this argument source.  'Null' indicates that this source should be treated
+     *                   as the main module.
+     * @param sourceClass A class containing argument sources from which to extract command-line arguments.
+     */
+    public void addArgumentSource( String sourceName, Class sourceClass ) {
+        List<ArgumentDefinition> argumentsFromSource = new ArrayList<ArgumentDefinition>();
+        for( ArgumentSource argumentSource: extractArgumentSources(sourceClass) ) {
+            List<ArgumentDefinition> argumentDefinitions = argumentSource.createArgumentDefinitions();
+            for(ArgumentDefinition argumentDefinition: argumentDefinitions) {
+                argumentSourcesByDefinition.put(argumentDefinition,argumentSource);
+                argumentsFromSource.add( argumentDefinition );
+            }
+        }
+        argumentDefinitions.add( new ArgumentDefinitionGroup(sourceName, argumentsFromSource) );
+    }
+
+    /**
+     * Do a cursory search to see if an argument with the given name is present.
+     * @param argumentFullName full name of the argument.
+     * @return True if the argument is present.  False otherwise.
+     */
+    public boolean isArgumentPresent( String argumentFullName ) {
+        ArgumentDefinition definition =
+                argumentDefinitions.findArgumentDefinition(argumentFullName,ArgumentDefinitions.FullNameDefinitionMatcher);
+        return argumentMatches.hasMatch(definition);
+
+    }
+
+    /**
+     * Parse the given set of command-line arguments, returning
+     * an ArgumentMatches object describing the best fit of these
+     * command-line arguments to the arguments that are actually
+     * required.
+     * @param tokens Tokens passed on the command line.
+     * @return The parsed arguments by file.
+     */
+    public SortedMap<ArgumentMatchSource, ParsedArgs> parse( String[] tokens ) {
+        argumentMatches = new ArgumentMatches();
+        SortedMap<ArgumentMatchSource, ParsedArgs> parsedArgs = new TreeMap<ArgumentMatchSource, ParsedArgs>();
+
+        List<String> cmdLineTokens = Arrays.asList(tokens);
+        parse(ArgumentMatchSource.COMMAND_LINE, cmdLineTokens, argumentMatches, parsedArgs);
+
+        List<ParsingEngineArgumentProvider> providers = argumentProviderPluginManager.createAllTypes();
+
+        for (ParsingEngineArgumentProvider provider: providers) {
+            // Load the arguments ONLY into the provider.
+            // Validation may optionally run on the rest of the arguments.
+            loadArgumentsIntoObject(provider);
+        }
+
+        for (ParsingEngineArgumentProvider provider: providers) {
+            provider.parse(this, parsedArgs);
+        }
+
+        return parsedArgs;
+    }
+
+    public void parse(ArgumentMatchSource matchSource, List<String> tokens,
+                         ArgumentMatches argumentMatches, SortedMap<ArgumentMatchSource, ParsedArgs> parsedArgs) {
+        ArgumentMatchSite lastArgumentMatchSite = new ArgumentMatchSite(matchSource, -1);
+
+        int i = 0;
+        for (String token: tokens) {
+            // If the token is of argument form, parse it into its own argument match.
+            // Otherwise, pair it with the most recently used argument discovered.
+            ArgumentMatchSite site = new ArgumentMatchSite(matchSource, i);
+            if( isArgumentForm(token) ) {
+                ArgumentMatch argumentMatch = parseArgument( token, site );
+                if( argumentMatch != null ) {
+                    argumentMatches.mergeInto( argumentMatch );
+                    lastArgumentMatchSite = site;
+                }
+            }
+            else {
+                if( argumentMatches.hasMatch(lastArgumentMatchSite) &&
+                        !argumentMatches.getMatch(lastArgumentMatchSite).hasValueAtSite(lastArgumentMatchSite))
+                    argumentMatches.getMatch(lastArgumentMatchSite).addValue( lastArgumentMatchSite, new ArgumentMatchStringValue(token) );
+                else
+                    argumentMatches.MissingArgument.addValue( site, new ArgumentMatchStringValue(token) );
+
+            }
+            i++;
+        }
+
+        parsedArgs.put(matchSource, new ParsedListArgs(tokens));
+    }
+
+    public void parsePairs(ArgumentMatchSource matchSource, List<Pair<String, ArgumentMatchValue>> tokens,
+                         ArgumentMatches argumentMatches, ParsedArgs matchSourceArgs,
+                         SortedMap<ArgumentMatchSource, ParsedArgs> parsedArgs) {
+        int i = 0;
+        for (Pair<String, ArgumentMatchValue> pair: tokens) {
+
+            ArgumentMatchSite site = new ArgumentMatchSite(matchSource, i);
+            List<DefinitionMatcher> matchers = Arrays.asList(ArgumentDefinitions.FullNameDefinitionMatcher, ArgumentDefinitions.ShortNameDefinitionMatcher);
+            ArgumentDefinition definition = null;
+            for (DefinitionMatcher matcher: matchers) {
+                definition = argumentDefinitions.findArgumentDefinition( pair.getFirst(), matcher );
+                if (definition != null)
+                    break;
+            }
+            if (definition == null)
+                continue;
+            ArgumentMatch argumentMatch = new ArgumentMatch(pair.getFirst(), definition, site, new Tags());
+            argumentMatches.mergeInto(argumentMatch);
+            argumentMatch.addValue(site, pair.getSecond());
+            i++;
+        }
+
+        parsedArgs.put(matchSource, matchSourceArgs);
+    }
+
+    protected List<String> getArguments(File file) {
+        try {
+            if (file.getAbsolutePath().endsWith(".list")) {
+                return getListArguments(file);
+            }
+        } catch (IOException e) {
+            throw new UserException.CouldNotReadInputFile(file, e);
+        }
+        throw new UserException.CouldNotReadInputFile(file, "file extension is not .list");
+    }
+
+    private List<String> getListArguments(File file) throws IOException {
+        ArrayList<String> argsList = new ArrayList<String>();
+        for (String line: FileUtils.readLines(file))
+            argsList.addAll(Arrays.asList(Utils.escapeExpressions(line)));
+        return argsList;
+    }
+
+    public enum ValidationType { MissingRequiredArgument,
+                                 InvalidArgument,
+                                 InvalidArgumentValue,
+                                 ValueMissingArgument,
+                                 TooManyValuesForArgument,
+                                 MutuallyExclusive }
+
+    /**
+     * Validates the list of command-line argument matches.
+     */
+    public void validate() {
+        validate( EnumSet.noneOf(ValidationType.class) );
+    }
+
+    /**
+     * Validates the list of command-line argument matches.  On failure throws an exception with detailed info about the
+     * particular failures.  Takes an EnumSet indicating which validation checks to skip.
+     * @param skipValidationOf List of validation checks to skip.
+     */
+    public void validate( EnumSet<ValidationType> skipValidationOf ) {
+        // Find missing required arguments.
+        if( !skipValidationOf.contains(ValidationType.MissingRequiredArgument) ) {
+            Collection<ArgumentDefinition> requiredArguments =
+                    argumentDefinitions.findArgumentDefinitions( true, ArgumentDefinitions.RequiredDefinitionMatcher );
+            Collection<ArgumentDefinition> missingArguments = new ArrayList<ArgumentDefinition>();
+            for( ArgumentDefinition requiredArgument: requiredArguments ) {
+                if( !argumentMatches.hasMatch(requiredArgument) )
+                    missingArguments.add( requiredArgument );
+            }
+
+            if( missingArguments.size() > 0 )
+                throw new MissingArgumentException( missingArguments );
+        }
+
+        // Find invalid arguments.  Invalid arguments will have a null argument definition.
+        if( !skipValidationOf.contains(ValidationType.InvalidArgument) ) {
+            ArgumentMatches invalidArguments = argumentMatches.findUnmatched();
+            if( invalidArguments.size() > 0 )
+                throw new InvalidArgumentException( invalidArguments );
+        }
+
+        // Find invalid argument values -- invalid arguments are either completely missing or fail the specified 'validation' regular expression.
+        if( !skipValidationOf.contains(ValidationType.InvalidArgumentValue) ) {
+            Collection<ArgumentDefinition> verifiableArguments = 
+                    argumentDefinitions.findArgumentDefinitions( null, ArgumentDefinitions.VerifiableDefinitionMatcher );
+            Collection<Pair<ArgumentDefinition,String>> invalidValues = new ArrayList<Pair<ArgumentDefinition,String>>();
+            for( ArgumentDefinition verifiableArgument: verifiableArguments ) {
+                ArgumentMatches verifiableMatches = argumentMatches.findMatches( verifiableArgument );
+                // Check to see whether an argument value was specified.  Argument values must be provided
+                // when the argument name is specified and the argument is not a flag type.
+                for(ArgumentMatch verifiableMatch: verifiableMatches) {
+                    ArgumentSource argumentSource = argumentSourcesByDefinition.get(verifiableArgument);
+                    if(verifiableMatch.values().size() == 0 && !verifiableArgument.isFlag && !argumentSource.createsTypeDefault())
+                        invalidValues.add(new Pair<ArgumentDefinition,String>(verifiableArgument,null));
+                }
+
+                // Ensure that the field contents meet the validation criteria specified by the regular expression.
+                for( ArgumentMatch verifiableMatch: verifiableMatches ) {
+                    for( ArgumentMatchValue value: verifiableMatch.values() ) {
+                        if( verifiableArgument.validation != null && !value.asString().matches(verifiableArgument.validation) )
+                            invalidValues.add( new Pair<ArgumentDefinition,String>(verifiableArgument, value.asString()) );
+                    }
+                }
+            }
+
+            if( invalidValues.size() > 0 )
+                throw new InvalidArgumentValueException( invalidValues );
+        }
+
+        // Find values without an associated mate.
+        if( !skipValidationOf.contains(ValidationType.ValueMissingArgument) ) {
+            if( argumentMatches.MissingArgument.values().size() > 0 )
+                throw new UnmatchedArgumentException( argumentMatches.MissingArgument );
+        }
+
+        // Find arguments with too many values.
+        if( !skipValidationOf.contains(ValidationType.TooManyValuesForArgument)) {
+            Collection<ArgumentMatch> overvaluedArguments = new ArrayList<ArgumentMatch>();
+            for( ArgumentMatch argumentMatch: argumentMatches.findSuccessfulMatches() ) {
+                // Warning: assumes that definition is not null (asserted by checks above).
+                if( !argumentMatch.definition.isMultiValued && argumentMatch.values().size() > 1 )
+                    overvaluedArguments.add(argumentMatch);
+            }
+
+            if( !overvaluedArguments.isEmpty() )
+                throw new TooManyValuesForArgumentException(overvaluedArguments);
+        }
+
+        // Find sets of options that are supposed to be mutually exclusive.
+        if( !skipValidationOf.contains(ValidationType.MutuallyExclusive)) {
+            Collection<Pair<ArgumentMatch,ArgumentMatch>> invalidPairs = new ArrayList<Pair<ArgumentMatch,ArgumentMatch>>();
+            for( ArgumentMatch argumentMatch: argumentMatches.findSuccessfulMatches() ) {
+                if( argumentMatch.definition.exclusiveOf != null ) {
+                    for( ArgumentMatch conflictingMatch: argumentMatches.findSuccessfulMatches() ) {
+                        // Skip over the current element.
+                        if( argumentMatch == conflictingMatch )
+                            continue;
+                        if( argumentMatch.definition.exclusiveOf.equals(conflictingMatch.definition.fullName) ||
+                            argumentMatch.definition.exclusiveOf.equals(conflictingMatch.definition.shortName))
+                            invalidPairs.add( new Pair<ArgumentMatch,ArgumentMatch>(argumentMatch, conflictingMatch) );
+                    }
+                }
+            }
+
+            if( !invalidPairs.isEmpty() )
+                throw new ArgumentsAreMutuallyExclusiveException( invalidPairs );
+        }
+    }
+
+    /**
+     * Loads a set of matched command-line arguments into the given object.
+     * @param object Object into which to add arguments.
+     */
+    public void loadArgumentsIntoObject( Object object ) {
+        loadArgumentsIntoObject(object, true);
+    }
+
+    /**
+     * Loads a set of matched command-line arguments into the given object.
+     * @param object Object into which to add arguments.
+     * @param enforceArgumentRanges If true, check that the argument value is within the range specified
+     *                              in the corresponding Argument annotation by min/max value attributes. This
+     *                              check is only performed for numeric types, and only when a min and/or
+     *                              max value is actually defined in the annotation. It is also only performed
+     *                              for values actually specified on the command line, and not for default values.
+     */
+    public void loadArgumentsIntoObject( Object object, boolean enforceArgumentRanges ) {
+        List<ArgumentSource> argumentSources = extractArgumentSources(object.getClass());
+
+        List<ArgumentSource> dependentArguments = new ArrayList<ArgumentSource>();
+
+        for( ArgumentSource argumentSource: argumentSources ) {
+            if(argumentSource.isDeprecated() && argumentMatches.findMatches(this,argumentSource).size() > 0)
+                notifyDeprecatedCommandLineArgument(argumentSource);
+
+            // If this argument source depends on other command-line arguments, skip it and make a note to process it later.
+            if(argumentSource.isDependent()) {
+                dependentArguments.add(argumentSource);
+                continue;
+            }
+            loadValueIntoObject(argumentSource, object, argumentMatches.findMatches(this,argumentSource), enforceArgumentRanges);
+        }
+
+        for(ArgumentSource dependentArgument: dependentArguments) {
+            MultiplexArgumentTypeDescriptor dependentDescriptor = dependentArgument.createDependentTypeDescriptor(this,object);
+            ArgumentSource dependentSource = dependentArgument.copyWithCustomTypeDescriptor(dependentDescriptor);
+            loadValueIntoObject(dependentSource,object,argumentMatches.findMatches(this,dependentSource), enforceArgumentRanges);
+        }
+    }
+
+    /**
+     * Notify the user that tags have been created.
+     * @param key The key created.
+     * @param tags List of tags, or empty list if no tags are present.
+     */
+    public void addTags(Object key, final Tags tags) {
+        this.tags.put(key,tags);        
+    }
+
+    /**
+     * Gets the tags associated with a given object.
+     * @param key Key for which to find a tag.
+     * @return List of tags associated with this key.
+     */
+    public Tags getTags(Object key)  {
+        if(!tags.containsKey(key))
+            return new Tags();
+        return tags.get(key);
+    }
+
+    /**
+     * Add a RodBinding type argument to this parser.  Called during parsing to allow
+     * us to track all of the RodBindings discovered in the command line.
+     * @param rodBinding the rodbinding to add.  Must not be added twice
+     */
+    @Requires("rodBinding != null")
+    public void addRodBinding(final RodBinding rodBinding) {
+        rodBindings.add(rodBinding);
+    }
+
+    /**
+     * Notify the user that a deprecated command-line argument has been used.
+     * @param argumentSource Deprecated argument source specified by user.
+     */
+    private void notifyDeprecatedCommandLineArgument(ArgumentSource argumentSource) {
+        // Grab the first argument definition and report that one as the failure.  Theoretically, we should notify of all failures.
+        List<ArgumentDefinition> definitions = argumentSource.createArgumentDefinitions();
+        if(definitions.size() < 1)
+            throw new ReviewedGATKException("Internal error.  Argument source creates no definitions.");
+        ArgumentDefinition definition = definitions.get(0);
+        throw new UserException.DeprecatedArgument(definition.fullName,definition.doc);
+    }
+
+    /**
+     * Loads a single argument into the object and that objects children.
+     * @param argumentMatches Argument matches to load into the object.
+     * @param source Argument source to load into the object.
+     * @param instance Object into which to inject the value.  The target might be in a container within the instance.
+     * @param enforceArgumentRanges If true, check that the argument value is within the range specified
+     *                              in the corresponding Argument annotation by min/max value attributes. This
+     *                              check is only performed for numeric types, and only when a min and/or
+     *                              max value is actually defined in the annotation. It is also only performed
+     *                              for values actually specified on the command line, and not for default values.
+     */
+    private void loadValueIntoObject( ArgumentSource source, Object instance, ArgumentMatches argumentMatches, boolean enforceArgumentRanges ) {
+        // Nothing to load
+        if( argumentMatches.size() == 0 && ! source.createsTypeDefault() )
+            return;
+
+        // Target instance into which to inject the value.
+        Collection<Object> targets = findTargets( source, instance );
+
+        // Abort if no home is found for the object.
+        if( targets.size() == 0 )
+            throw new ReviewedGATKException("Internal command-line parser error: unable to find a home for argument matches " + argumentMatches);
+
+        for( Object target: targets ) {
+            Object value;
+            boolean usedTypeDefault = false;
+            if ( argumentMatches.size() != 0 ) {
+                value = source.parse(this,argumentMatches);
+            }
+            else {
+                value = source.createTypeDefault(this);
+                usedTypeDefault = true;
+            }
+
+            // Only check argument ranges if a check was requested AND we used a value from the command line rather
+            // than the type default
+            if ( enforceArgumentRanges && ! usedTypeDefault ) {
+                checkArgumentRange(source, value);
+            }
+
+            JVMUtils.setFieldValue(source.field,target,value);
+        }
+    }
+
+    /**
+     * Check the provided value against any range constraints specified in the Argument annotation
+     * for the corresponding field. Throw an exception if hard limits are violated, or emit a warning
+     * if soft limits are violated.
+     *
+     * Only checks numeric types (int, double, etc.)
+     * Only checks fields with an actual @Argument annotation
+     * Only checks manually-specified constraints (there are no default constraints).
+     *
+     * @param argumentSource The source field for the command-line argument
+     * @param argumentValue The value we're considering putting in that source field
+     */
+    private void checkArgumentRange( final ArgumentSource argumentSource, final Object argumentValue ) {
+        // Only validate numeric types
+        if ( ! (argumentValue instanceof Number) ) {
+            return;
+        }
+        final double argumentDoubleValue = ((Number)argumentValue).doubleValue();
+
+        // Only validate fields with an @Argument annotation
+        final Annotation argumentAnnotation = argumentSource.field.getAnnotation(Argument.class);
+        if ( argumentAnnotation == null ) {
+            return;
+        }
+
+        final double minValue = (Double)CommandLineUtils.getValue(argumentAnnotation, "minValue");
+        final double maxValue = (Double)CommandLineUtils.getValue(argumentAnnotation, "maxValue");
+        final double minRecommendedValue = (Double)CommandLineUtils.getValue(argumentAnnotation, "minRecommendedValue");
+        final double maxRecommendedValue = (Double)CommandLineUtils.getValue(argumentAnnotation, "maxRecommendedValue");
+        final String argumentName = (String)CommandLineUtils.getValue(argumentAnnotation, "fullName");
+
+        // Check hard limits first, if specified
+        if ( minValue != Double.NEGATIVE_INFINITY && argumentDoubleValue < minValue ) {
+            throw new ArgumentValueOutOfRangeException(argumentName, argumentDoubleValue, minValue, "minimum");
+        }
+
+        if ( maxValue != Double.POSITIVE_INFINITY && argumentDoubleValue > maxValue ) {
+            throw new ArgumentValueOutOfRangeException(argumentName, argumentDoubleValue, maxValue, "maximum");
+        }
+
+        // Then check soft limits, if specified
+        if ( minRecommendedValue != Double.NEGATIVE_INFINITY && argumentDoubleValue < minRecommendedValue ) {
+            logger.warn(String.format("WARNING: argument --%s has value %.2f, but minimum recommended value is %.2f",
+                        argumentName, argumentDoubleValue, minRecommendedValue));
+        }
+
+        if ( maxRecommendedValue != Double.POSITIVE_INFINITY && argumentDoubleValue > maxRecommendedValue ) {
+            logger.warn(String.format("WARNING: argument --%s has value %.2f, but maximum recommended value is %.2f",
+                        argumentName, argumentDoubleValue, maxRecommendedValue));
+        }
+    }
+
+    public Collection<RodBinding> getRodBindings() {
+        return Collections.unmodifiableCollection(rodBindings);
+    }
+
+    /**
+     * Gets a collection of the container instances of the given type stored within the given target.
+     * @param source Argument source.
+     * @param instance Container.
+     * @return A collection of containers matching the given argument source.
+     */
+    private Collection<Object> findTargets(ArgumentSource source, Object instance) {
+        LinkedHashSet<Object> targets = new LinkedHashSet<Object>();
+        for( Class clazz = instance.getClass(); clazz != null; clazz = clazz.getSuperclass() ) {
+            for( Field field: clazz.getDeclaredFields() ) {
+                if( field.equals(source.field) ) {
+                    targets.add(instance);
+                } else if( field.isAnnotationPresent(ArgumentCollection.class) ) {
+                    targets.addAll(findTargets(source, JVMUtils.getFieldValue(field, instance)));
+                }
+            }
+        }
+        return targets;
+    }
+
+    /**
+     * Prints out the help associated with these command-line argument definitions.
+     * @param applicationDetails Details about the specific GATK-based application being run.
+     */
+    public void printHelp( ApplicationDetails applicationDetails ) {
+        new HelpFormatter().printHelp(applicationDetails,argumentDefinitions);
+    }
+
+    /**
+     * Extract all the argument sources from a given object.
+     * @param sourceClass class to act as sources for other arguments.
+     * @return A list of sources associated with this object and its aggregated objects.
+     */
+    public List<ArgumentSource> extractArgumentSources(Class sourceClass) {
+        return extractArgumentSources(sourceClass, new Field[0]);
+    }
+
+    /**
+     * Fetch the best command-line argument descriptor for the given class.
+     * @param type Class for which to specify a descriptor.
+     * @return descriptor for the given type.
+     */
+    public ArgumentTypeDescriptor selectBestTypeDescriptor(Class type) {
+        return ArgumentTypeDescriptor.selectBest(argumentTypeDescriptors,type);
+    }
+
+    private List<ArgumentSource> extractArgumentSources(Class sourceClass, Field[] parentFields) {
+        // now simply call into the truly general routine extract argument bindings but with a null
+        // object so bindings aren't computed
+        Map<ArgumentSource, Object> bindings = extractArgumentBindings(null, sourceClass, parentFields);
+        return new ArrayList<ArgumentSource>(bindings.keySet());
+    }
+
+    public Map<ArgumentSource, Object> extractArgumentBindings(Object obj) {
+        if ( obj == null ) throw new IllegalArgumentException("Incoming object cannot be null");
+        return extractArgumentBindings(obj, obj.getClass(), new Field[0]);
+    }
+
+    /**
+     * Extract all the argument sources from a given object, along with their bindings if obj != null .
+     * @param obj the object corresponding to the sourceClass
+     * @param sourceClass class to act as sources for other arguments.
+     * @param parentFields Parent Fields
+     * @return A map of sources associated with this object and its aggregated objects and bindings to their bindings values
+     */
+    private Map<ArgumentSource, Object> extractArgumentBindings(Object obj, Class sourceClass, Field[] parentFields) {
+        Map<ArgumentSource, Object> bindings = new LinkedHashMap<ArgumentSource, Object>();
+
+        while( sourceClass != null ) {
+            Field[] fields = sourceClass.getDeclaredFields();
+            for( Field field: fields ) {
+                if( ArgumentTypeDescriptor.isArgumentAnnotationPresent(field) ) {
+                    Object val = obj != null ? JVMUtils.getFieldValue(field, obj) : null;
+                    bindings.put( new ArgumentSource(parentFields, field, selectBestTypeDescriptor(field.getType())), val );
+                }
+                if( field.isAnnotationPresent(ArgumentCollection.class) ) {
+                    Object val = obj != null ? JVMUtils.getFieldValue(field, obj) : null;
+                    Field[] newParentFields = Arrays.copyOf(parentFields, parentFields.length + 1);
+                    newParentFields[parentFields.length] = field;
+                    bindings.putAll( extractArgumentBindings(val, field.getType(), newParentFields) );
+                }
+            }
+
+            sourceClass = sourceClass.getSuperclass();
+        }
+
+        return bindings;
+    }
+
+    /**
+     * Determines whether a token looks like the name of an argument.
+     * @param token Token to inspect.  Can be surrounded by whitespace.
+     * @return True if token is of short name form.
+     */
+    private boolean isArgumentForm( String token ) {
+        for( ParsingMethod parsingMethod: parsingMethods ) {
+            if( parsingMethod.matches(token) )
+                return true;
+        }
+
+        return false;
+    }
+
+    /**
+     * Parse a short name into an ArgumentMatch.
+     * @param token The token to parse.  The token should pass the isLongArgumentForm test.
+     * @param position The position of the token in question.
+     * @return ArgumentMatch associated with this token, or null if no match exists.
+     */    
+    private ArgumentMatch parseArgument( String token, ArgumentMatchSite position ) {
+        if( !isArgumentForm(token) )
+            throw new IllegalArgumentException( "Token is not recognizable as an argument: " + token );
+
+        for( ParsingMethod parsingMethod: parsingMethods ) {
+            if( parsingMethod.matches( token ) )
+                return parsingMethod.match( argumentDefinitions, token, position );
+        }
+
+        // No parse results found.
+        return null;
+    }
+}
+
+/**
+ * An exception indicating that some required arguments are missing.
+ */
+class MissingArgumentException extends ArgumentException {
+    public MissingArgumentException( Collection<ArgumentDefinition> missingArguments ) {
+        super( formatArguments(missingArguments) );
+    }
+
+    private static String formatArguments( Collection<ArgumentDefinition> missingArguments ) {
+        StringBuilder sb = new StringBuilder();
+        for( ArgumentDefinition missingArgument: missingArguments ) {
+            if( missingArgument.shortName != null )
+                sb.append( String.format("%nArgument with name '--%s' (-%s) is missing.", missingArgument.fullName, missingArgument.shortName) );
+            else
+                sb.append( String.format("%nArgument with name '--%s' is missing.", missingArgument.fullName) );
+        }
+        return sb.toString();
+    }
+}
+
+/**
+ * An exception for undefined arguments.
+ */
+class InvalidArgumentException extends ArgumentException {
+    public InvalidArgumentException( ArgumentMatches invalidArguments ) {
+        super( formatArguments(invalidArguments) );
+    }
+
+    private static String formatArguments( ArgumentMatches invalidArguments ) {
+        StringBuilder sb = new StringBuilder();
+        for( ArgumentMatch invalidArgument: invalidArguments )
+            sb.append( String.format("%nArgument with name '%s' isn't defined.", invalidArgument.label) );
+        return sb.toString();
+    }
+}
+
+/**
+ * An exception for values whose format is invalid.
+ */
+class InvalidArgumentValueException extends ArgumentException {
+    public InvalidArgumentValueException( Collection<Pair<ArgumentDefinition,String>> invalidArgumentValues ) {
+        super( formatArguments(invalidArgumentValues) );
+    }
+
+    private static String formatArguments( Collection<Pair<ArgumentDefinition,String>> invalidArgumentValues ) {
+        StringBuilder sb = new StringBuilder();
+        for( Pair<ArgumentDefinition,String> invalidValue: invalidArgumentValues ) {
+            if(invalidValue.getSecond() == null)
+                sb.append( String.format("%nArgument '--%s' requires a value but none was provided",
+                                         invalidValue.first.fullName) );
+            else
+                sb.append( String.format("%nArgument '--%s' has value of incorrect format: %s (should match %s)",
+                        invalidValue.first.fullName,
+                        invalidValue.second,
+                        invalidValue.first.validation) );
+        }
+        return sb.toString();
+    }
+}
+
+class ArgumentValueOutOfRangeException extends ArgumentException {
+    public ArgumentValueOutOfRangeException( final String argumentName, final double argumentActualValue,
+                                             final double argumentBoundaryValue, final String argumentBoundaryType ) {
+        super(String.format("Argument --%s has value %.2f, but %s allowed value is %.2f",
+                            argumentName, argumentActualValue, argumentBoundaryType, argumentBoundaryValue));
+    }
+}
+
+/**
+ * An exception for values that can't be mated with any argument.
+ */
+class UnmatchedArgumentException extends ArgumentException {
+    public UnmatchedArgumentException( ArgumentMatch invalidValues ) {
+        super( formatArguments(invalidValues) );
+    }
+
+    private static String formatArguments( ArgumentMatch invalidValues ) {
+        StringBuilder sb = new StringBuilder();
+        for( ArgumentMatchSite site: invalidValues.sites.keySet() )
+            for( ArgumentMatchValue value: invalidValues.sites.get(site) ) {
+                switch (site.getSource().getType()) {
+                    case CommandLine:
+                        sb.append( String.format("%nInvalid argument value '%s' at position %d.",
+                                value.asString(), site.getIndex()) );
+                        break;
+                    case Provider:
+                        sb.append( String.format("%nInvalid argument value '%s' in %s at position %d.",
+                                value.asString(), site.getSource().getDescription(), site.getIndex()) );
+                        break;
+                    default:
+                        throw new RuntimeException( String.format("Unexpected argument match source type: %s",
+                                site.getSource().getType()));
+                }
+                if(value.asString() != null && Utils.dupString(' ',value.asString().length()).equals(value.asString()))
+                    sb.append("  Please make sure any line continuation backslashes on your command line are not followed by whitespace.");
+            }
+        return sb.toString();
+    }
+}
+
+/**
+ * An exception indicating that too many values have been provided for the given argument.
+ */
+class TooManyValuesForArgumentException extends ArgumentException {
+    public TooManyValuesForArgumentException( Collection<ArgumentMatch> arguments ) {
+        super( formatArguments(arguments) );
+    }
+
+    private static String formatArguments( Collection<ArgumentMatch> arguments ) {
+        StringBuilder sb = new StringBuilder();
+        for( ArgumentMatch argument: arguments )
+            sb.append( String.format("%nArgument '%s' has too many values: %s.", argument.label, Arrays.deepToString(argument.values().toArray())) );
+        return sb.toString();
+    }
+}
+
+/**
+ * An exception indicating that mutually exclusive options have been passed in the same command line.
+ */
+class ArgumentsAreMutuallyExclusiveException extends ArgumentException {
+    public ArgumentsAreMutuallyExclusiveException( Collection<Pair<ArgumentMatch,ArgumentMatch>> arguments ) {
+        super( formatArguments(arguments) );
+    }
+
+    private static String formatArguments( Collection<Pair<ArgumentMatch,ArgumentMatch>> arguments ) {
+        StringBuilder sb = new StringBuilder();
+        for( Pair<ArgumentMatch,ArgumentMatch> argument: arguments )
+            sb.append( String.format("%nArguments '%s' and '%s' are mutually exclusive.", argument.first.definition.fullName, argument.second.definition.fullName ) );
+        return sb.toString();
+    }
+
+}
+
+
+/**
+ * An exception for when an argument doesn't match an of the enumerated options for that var type
+ */
+class UnknownEnumeratedValueException extends ArgumentException {
+    public UnknownEnumeratedValueException(ArgumentDefinition definition, String argumentPassed) {
+        super( formatArguments(definition,argumentPassed) );
+    }
+
+    private static String formatArguments(ArgumentDefinition definition, String argumentPassed) {
+        return String.format("Invalid value %s specified for argument %s; valid options are (%s).", argumentPassed, definition.fullName, Utils.join(",",definition.validOptions));
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsingEngineArgumentFiles.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsingEngineArgumentFiles.java
new file mode 100644
index 0000000..7d87882
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsingEngineArgumentFiles.java
@@ -0,0 +1,55 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.SortedMap;
+
+/**
+ * Container class to store the list of argument files.
+ * The files will be parsed after the command line arguments.
+ */
+public class ParsingEngineArgumentFiles extends ParsingEngineArgumentProvider {
+    @Argument(fullName = "arg_file", shortName = "args", doc = "Reads arguments from the specified file", required = false)
+    public List<File> files = new ArrayList<File>();
+
+    @Override
+    public void parse(ParsingEngine parsingEngine, SortedMap<ArgumentMatchSource, ParsedArgs> parsedArgs) {
+        ArgumentMatches argumentMatches = parsingEngine.getArgumentMatches();
+        for (File file: this.files) {
+            List<String> fileTokens = parsingEngine.getArguments(file);
+            parsingEngine.parse(new ArgumentMatchFileSource(file), fileTokens, argumentMatches, parsedArgs);
+        }
+    }
+}
+
+class ArgumentMatchFileSource extends ArgumentMatchSource {
+    ArgumentMatchFileSource(File file) {
+        super("file " + file.getAbsolutePath());
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsingEngineArgumentProvider.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsingEngineArgumentProvider.java
new file mode 100644
index 0000000..b5d5add
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsingEngineArgumentProvider.java
@@ -0,0 +1,37 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import java.util.List;
+import java.util.SortedMap;
+
+/**
+ * A class that can parse arguments for the engine
+ */
+public abstract class ParsingEngineArgumentProvider {
+    public abstract void parse(ParsingEngine parsingEngine, SortedMap<ArgumentMatchSource, ParsedArgs> parsedArgs);
+}
+
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsingMethod.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsingMethod.java
new file mode 100644
index 0000000..5a4c99c
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/ParsingMethod.java
@@ -0,0 +1,127 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import org.broadinstitute.gatk.utils.Utils;
+
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * Holds a pattern, along with how to get to the argument definitions that could match that pattern.
+ */
+public abstract class ParsingMethod {
+    private final Pattern pattern;
+    private final DefinitionMatcher definitionMatcher;
+
+    /**
+     * Create a new parsing method with the given identifying / validating pattern and definition matcher.
+     * @param pattern The pattern
+     * @param definitionMatcher The definition matcher.
+     */
+    private ParsingMethod( Pattern pattern, DefinitionMatcher definitionMatcher ) {
+        this.pattern = pattern;
+        this.definitionMatcher = definitionMatcher;
+    }
+
+    /**
+     * Can the given token be parsed by this parsing method?
+     * @param token Token to validate.
+     * @return True if the given token matches.
+     */
+    public boolean matches( String token ) {
+        Matcher matcher = pattern.matcher(token);
+        return matcher.matches();        
+    }
+
+    /**
+     * Find the best match for a given token at a given position from among the provided
+     * argument definitions.
+     * @param definitions List of argument definitions.
+     * @param token The token from the command line to match.  Should be validated using
+     *              ParsingMethod's matches() tester.
+     * @param position Position at which this command-line argument occurs.  Will be used
+     *                 for validation later.
+     * @return An argument match.  Definition field will be populated if a match was found or
+     *         empty if no appropriate definition could be found. 
+     */
+    public ArgumentMatch match( ArgumentDefinitions definitions, String token, ArgumentMatchSite position ) {
+        // If the argument is valid, parse out the argument.
+        Matcher matcher = pattern.matcher(token);
+
+        // Didn't match?  Must be bad input.
+        if( !matcher.matches() )
+            throw new IllegalArgumentException( String.format("Unable to parse token %s with pattern %s", token, pattern.pattern()) );
+
+        String argument = matcher.group(1).trim();
+
+        Tags tags = parseTags(argument, matcher.group(2));
+
+        // Find the most appropriate argument definition for the given argument.
+        ArgumentDefinition argumentDefinition = definitions.findArgumentDefinition( argument, definitionMatcher );
+
+        // Try to find a matching argument.  If found, label that as the match.  If not found, add the argument
+        // with a null definition.
+        return new ArgumentMatch(argument,argumentDefinition,position,tags);
+    }
+
+    public static Tags parseTags(String argument, String tagString) {
+        Tags tags = new Tags();
+        if (tagString != null) {
+            for(String tag: Utils.split(tagString, ",")) {
+                // Check for presence of an '=' sign, indicating a key-value pair in the tag line.
+                int equalDelimiterPos = tag.indexOf('=');
+                if(equalDelimiterPos >= 0) {
+                    // Sanity check; ensure that there aren't multiple '=' in this key-value pair.
+                    if(tag.indexOf('=',equalDelimiterPos+1) >= 0)
+                        throw new ArgumentException(String.format("Tag %s passed to argument %s is malformed.  Please ensure that " +
+                                "key-value tags are of the form <key>=<value>, and neither key " +
+                                "nor value contain the '=' character", tag, argument));
+                    tags.addKeyValueTag(tag.substring(0,equalDelimiterPos),tag.substring(equalDelimiterPos+1));
+                }
+                else
+                    tags.addPositionalTag(tag);
+
+            }
+        }
+        return tags;
+    }
+
+    /**
+     * A command-line argument always starts with an alphabetical character or underscore followed by any word character.
+     */
+    private static final String ARGUMENT_TEXT = "[A-Za-z_][\\w\\-\\.]*";
+
+    /**
+     * Tags, on the other hand, can start with any word character.
+     */
+    private static final String TAG_TEXT = "[\\w\\-\\.\\=]*";
+
+    public static final ParsingMethod FullNameParsingMethod = new ParsingMethod(Pattern.compile(String.format("\\s*--(%1$s)(?:\\:(%2$s(?:,%2$s)*))?\\s*",ARGUMENT_TEXT,TAG_TEXT)),
+                                                                          ArgumentDefinitions.FullNameDefinitionMatcher) {};
+    public static final ParsingMethod ShortNameParsingMethod = new ParsingMethod(Pattern.compile(String.format("\\s*-(%1$s)(?:\\:(%2$s(?:,%2$s)*))?\\s*",ARGUMENT_TEXT,TAG_TEXT)),
+                                                                           ArgumentDefinitions.ShortNameDefinitionMatcher) {};
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/RodBinding.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/RodBinding.java
new file mode 100644
index 0000000..3221d21
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/RodBinding.java
@@ -0,0 +1,197 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import htsjdk.tribble.Feature;
+
+import java.util.*;
+
+/**
+ * A RodBinding represents a walker argument that gets bound to a ROD track.
+ *
+ * The RodBinding<T> is a formal GATK argument that bridges between a walker and
+ * the RefMetaDataTracker to obtain data about this rod track at runtime.  The RodBinding
+ * is explicitly typed with type of the Tribble.Feature expected to be produced by this
+ * argument.  The GATK Engine takes care of initializing the binding and connecting it
+ * to the RMD system.
+ *
+ * It is recommended that optional RodBindings be initialized to the value returned
+ * by the static method makeUnbound().
+ *
+ * Note that this class is immutable.
+ */
+public final class RodBinding<T extends Feature> {
+    protected final static String UNBOUND_VARIABLE_NAME = "";
+    protected final static String UNBOUND_SOURCE = "UNBOUND";
+    protected final static String UNBOUND_TRIBBLE_TYPE = "";
+
+    /**
+     * Create an unbound Rodbinding of type.  This is the correct programming
+     * style for an optional RodBinding<T>
+     *
+     *     At Input()
+     *     RodBinding<T> x = RodBinding.makeUnbound(T.class)
+     *
+     * The unbound binding is guaranteed to never match any binding.  It uniquely
+     * returns false to isBound().
+     *
+     * @param type the Class type produced by this unbound object
+     * @param <T> any class extending Tribble Feature
+     * @return the UNBOUND RodBinding producing objects of type T
+     */
+    @Requires("type != null")
+    protected final static <T extends Feature> RodBinding<T> makeUnbound(Class<T> type) {
+        return new RodBinding<T>(type);
+    }
+
+    /** The name of this binding.  Often the name of the field itself, but can be overridden on cmdline */
+    final private String name;
+    /** where the data for this ROD is coming from.  A file or special value if coming from stdin */
+    final private String source;
+    /** the string name of the tribble type, such as vcf, bed, etc. */
+    final private String tribbleType;
+    /** The command line tags associated with this RodBinding */
+    final private Tags tags;
+    /** The Java class expected for this RodBinding.  Must correspond to the type emitted by Tribble */
+    final private Class<T> type;
+    /** True for all RodBindings except the special UNBOUND binding, which is the default for optional arguments */
+    final private boolean bound;
+
+    /**
+     * The name counter.  This is how we create unique names for collections of RodBindings
+     * on the command line.  If you have provide the GATK with -X file1 and -X file2 to a
+     * RodBinding argument as List<RodBinding<T>> then each binding will receive automatically
+     * the name of X and X2.
+     */
+    final private static Map<String, Integer> nameCounter = new HashMap<String, Integer>();
+
+    /** for UnitTests */
+    final public static void resetNameCounter() {
+        nameCounter.clear();
+    }
+
+    @Requires("rawName != null")
+    @Ensures("result != null")
+    final private static synchronized String countedVariableName(final String rawName) {
+        Integer count = nameCounter.get(rawName);
+        if ( count == null ) {
+            nameCounter.put(rawName, 1);
+            return rawName;
+        } else {
+            nameCounter.put(rawName, count + 1);
+            return rawName + (count + 1);
+        }
+    }
+
+    @Requires({"type != null", "rawName != null", "source != null", "tribbleType != null", "tags != null"})
+    public RodBinding(Class<T> type, final String rawName, final String source, final String tribbleType, final Tags tags) {
+        this.type = type;
+        this.name = countedVariableName(rawName);
+        this.source = source;
+        this.tribbleType = tribbleType;
+        this.tags = tags;
+        this.bound = true;
+    }
+
+    /**
+     * For testing purposes only.  Creates a RodBinding sufficient for looking up associations to rawName
+     * @param type
+     * @param rawName
+     */
+    public RodBinding(Class<T> type, final String rawName) {
+        this(type, rawName, "missing", type.getSimpleName(), new Tags());
+    }
+
+    /**
+     * Make an unbound RodBinding<T>.  Only available for creating the globally unique UNBOUND object
+     * @param type class this unbound RodBinding creates
+     */
+    @Requires({"type != null"})
+    private RodBinding(Class<T> type) {
+        this.type = type;
+        this.name = UNBOUND_VARIABLE_NAME;  // special value can never be found in RefMetaDataTracker
+        this.source = UNBOUND_SOURCE;
+        this.tribbleType = UNBOUND_TRIBBLE_TYPE;
+        this.tags = new Tags();
+        this.bound = false;
+    }
+
+
+   /**
+     * @return True for all RodBindings except the special UNBOUND binding, which is the default for optional arguments
+     */
+    final public boolean isBound() {
+        return bound;
+    }
+
+    /**
+     * @return The name of this binding.  Often the name of the field itself, but can be overridden on cmdline
+     */
+    @Ensures({"result != null"})
+    final public String getName() {
+        return name;
+    }
+
+    /**
+     * @return the string name of the tribble type, such as vcf, bed, etc.
+     */
+    @Ensures({"result != null"})
+    final public Class<T> getType() {
+        return type;
+    }
+
+    /**
+     * @return where the data for this ROD is coming from.  A file or special value if coming from stdin
+     */
+    @Ensures({"result != null"})
+    final public String getSource() {
+        return source;
+    }
+
+    /**
+     * @return The command line tags associated with this RodBinding.  Will include the tags used to
+     * determine the name and type of this RodBinding
+     */
+    @Ensures({"result != null"})
+    final public Tags getTags() {
+        return tags;
+    }
+
+    /**
+     * @return The Java class expected for this RodBinding.  Must correspond to the type emited by Tribble
+     */
+    @Ensures({"result != null"})
+    final public String getTribbleType() {
+        return tribbleType;
+    }
+
+    @Override
+    public String toString() {
+        return String.format("(RodBinding name=%s source=%s)", getName(), getSource());
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/RodBindingCollection.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/RodBindingCollection.java
new file mode 100644
index 0000000..8f90f7d
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/RodBindingCollection.java
@@ -0,0 +1,89 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import com.google.java.contract.Ensures;
+import htsjdk.tribble.Feature;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+import java.util.*;
+
+/**
+ * A RodBindingCollection represents a collection of RodBindings.
+ *
+ * The RodBindingCollection<T> is a formal GATK argument that is used to specify a file of RodBindings.
+ *
+ */
+public final class RodBindingCollection<T extends Feature> {
+
+    /** The Java class expected for this RodBinding.  Must correspond to the type emitted by Tribble */
+    final private Class<T> type;
+
+    private Collection<RodBinding<T>> rodBindings;
+
+    public RodBindingCollection(final Class<T> type, final Collection<RodBinding<T>> rodBindings) {
+        this.type = type;
+        this.rodBindings = Collections.unmodifiableCollection(rodBindings);
+    }
+
+    /**
+     * @return the collection of RodBindings
+     */
+    final public Collection<RodBinding<T>> getRodBindings() {
+        return rodBindings;
+    }
+
+    /**
+     * @return the string name of the tribble type, such as vcf, bed, etc.
+     */
+    @Ensures({"result != null"})
+    final public Class<T> getType() {
+        return type;
+    }
+
+    @Override
+    public String toString() {
+        return String.format("(RodBindingCollection %s)", getRodBindings());
+    }
+
+    /**
+     * Utility method to help construct a RodBindingCollection of the given Feature type
+     *
+     * @param type         the Feature type
+     * @param rodBindings  the rod bindings to put into the collection
+     * @return a new RodBindingCollection object
+     */
+    public static Object createRodBindingCollectionOfType(final Class<? extends Feature> type, final Collection<RodBinding> rodBindings) {
+        try {
+            final Constructor ctor = RodBindingCollection.class.getConstructor(Class.class, Collection.class);
+            return ctor.newInstance(type, rodBindings);
+        } catch (final Exception e) {
+            throw new IllegalStateException("Failed to create a RodBindingCollection for type " + type);
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/Tags.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/Tags.java
new file mode 100644
index 0000000..8ea28c8
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/Tags.java
@@ -0,0 +1,112 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import java.util.*;
+
+/**
+ * Models the tags that can appear after command-line arguments
+ * in the GATK.
+ */
+public class Tags {
+    /**
+     * Storage for the ordered, unkeyed, positional tags.
+     */
+    private final List<String> positionalTags = new ArrayList<String>();
+
+    /**
+     * Storage for key-value tags of the form <key>=<value>
+     */
+    private Map<String,String> keyValueTags = new HashMap<String,String>();
+
+    /**
+     * Tests to see whether two tag sets are equal.
+     * @param other Other object to test for equality.
+     * @return True if objects are the same.  False if objects differ.
+     */
+    @Override
+    public boolean equals(Object other) {
+        if(other == null)
+            return false;
+
+        if(!(other instanceof Tags))
+            return false;
+
+        Tags otherTags = (Tags)other;
+        return this.positionalTags.equals(otherTags.positionalTags) && this.keyValueTags.equals(otherTags.keyValueTags);
+    }
+
+    /**
+     * Returns whether any tags are specified on the command-line for this operation.
+     * @return True if the tags are empty; false otherwise.
+     */
+    public boolean isEmpty() {
+        return positionalTags.isEmpty() && keyValueTags.isEmpty();
+    }
+
+    /**
+     * Retrieves the list of all positional tags associated with this argument.
+     * @return A list of positional tags.
+     */
+    public List<String> getPositionalTags() {
+        return Collections.unmodifiableList(positionalTags);
+    }
+
+    /**
+     * Gets the value associated with a given <key>=<value> argument tag.
+     * @param key The key for which to retrieve the value.
+     * @return The value paired with the given key, or null if no such element exists.
+     */
+    public String getValue(final String key) {
+        return keyValueTags.get(key);
+    }
+
+    /**
+     * Returns true if tags contains given key
+     * @param key The key for which to check existence.
+     * @return true if tags contains given key
+     */
+    public boolean containsKey(final String key) {
+        return keyValueTags.containsKey(key);
+    }
+
+    /**
+     * Adds positional tag(s) to the tag object.
+     * @param tags The tag strings to add.
+     */
+    protected void addPositionalTag(final String... tags) {
+        positionalTags.addAll(Arrays.asList(tags));
+    }
+
+    /**
+     * Adds a <key>-<value> tag to this tag library.
+     * @param key key tag to add.
+     * @param value value to associate with this key.
+     */
+    protected void addKeyValueTag(final String key, final String value) {
+        keyValueTags.put(key,value);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/package-info.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/package-info.java
new file mode 100644
index 0000000..3dca424
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/commandline/package-info.java
@@ -0,0 +1,26 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/contexts/AlignmentContext.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/contexts/AlignmentContext.java
new file mode 100644
index 0000000..6add93b
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/contexts/AlignmentContext.java
@@ -0,0 +1,154 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.contexts;
+
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.HasGenomeLocation;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+
+import java.util.List;
+
+/**
+ * Useful class for forwarding on locusContext data from this iterator
+ * 
+ * Created by IntelliJ IDEA.
+ * User: mdepristo
+ * Date: Feb 22, 2009
+ * Time: 3:01:34 PM
+ * To change this template use File | Settings | File Templates.
+ */
+public class AlignmentContext implements HasGenomeLocation {
+    protected GenomeLoc loc = null;
+    protected ReadBackedPileup basePileup = null;
+    protected boolean hasPileupBeenDownsampled;
+
+    /**
+     * The number of bases we've skipped over in the reference since the last map invocation.
+     * Only filled in by RodTraversals right now.  By default, nothing is being skipped, so skippedBases == 0.
+     */
+    private long skippedBases = 0;
+
+    public AlignmentContext(GenomeLoc loc, ReadBackedPileup basePileup) {
+        this(loc, basePileup, 0, false);
+    }
+
+    public AlignmentContext(GenomeLoc loc, ReadBackedPileup basePileup, boolean hasPileupBeenDownsampled) {
+        this(loc, basePileup, 0, hasPileupBeenDownsampled);
+    }
+
+    public AlignmentContext(GenomeLoc loc, ReadBackedPileup basePileup, long skippedBases) {
+        this(loc, basePileup, skippedBases, false);
+    }
+
+    public AlignmentContext(GenomeLoc loc, ReadBackedPileup basePileup, long skippedBases,boolean hasPileupBeenDownsampled ) {
+        if ( loc == null ) throw new ReviewedGATKException("BUG: GenomeLoc in Alignment context is null");
+        if ( basePileup == null ) throw new ReviewedGATKException("BUG: ReadBackedPileup in Alignment context is null");
+        if ( skippedBases < 0 ) throw new ReviewedGATKException("BUG: skippedBases is -1 in Alignment context");
+
+        this.loc = loc;
+        this.basePileup = basePileup;
+        this.skippedBases = skippedBases;
+        this.hasPileupBeenDownsampled = hasPileupBeenDownsampled;
+    }
+
+    /** Returns base pileup over the current genomic location. Deprectated. Use getBasePileup() to make your intentions
+     * clear.
+     * @return
+     */
+    @Deprecated
+    public ReadBackedPileup getPileup() { return basePileup; }
+
+    /** Returns base pileup over the current genomic location. May return null if this context keeps only
+     * extended event (indel) pileup.
+     * @return
+     */
+    public ReadBackedPileup getBasePileup() {
+        return basePileup;
+    }
+
+    /**
+     * Returns true if any reads have been filtered out of the pileup due to excess DoC.
+     * @return True if reads have been filtered out.  False otherwise.
+     */
+    public boolean hasPileupBeenDownsampled() { return hasPileupBeenDownsampled; }
+
+    /**
+     * get all of the reads within this context
+     * 
+     * @return
+     */
+    @Deprecated
+    //todo: unsafe and tailored for current usage only; both pileups can be null or worse, bot can be not null in theory
+    public List<GATKSAMRecord> getReads() { return ( basePileup.getReads() ); }
+
+    /**
+     * Are there any reads associated with this locus?
+     *
+     * @return
+     */
+    public boolean hasReads() {
+        return basePileup != null && basePileup.getNumberOfElements() > 0 ;
+    }
+
+    /**
+     * How many reads cover this locus?
+     * @return
+     */
+    public int size() {
+        return basePileup.getNumberOfElements();
+    }
+
+    /**
+     * get a list of the equivalent positions within in the reads at Pos
+     *
+     * @return
+     */
+    @Deprecated
+    public List<Integer> getOffsets() {
+        return basePileup.getOffsets();
+    }
+
+    public String getContig() { return getLocation().getContig(); }
+    public long getPosition() { return getLocation().getStart(); }
+    public GenomeLoc getLocation() { return loc; }
+
+    public void downsampleToCoverage(int coverage) {
+        basePileup = basePileup.getDownsampledPileup(coverage);
+        hasPileupBeenDownsampled = true;
+    }
+
+    /**
+     * Returns the number of bases we've skipped over in the reference since the last map invocation.
+     * Only filled in by RodTraversals right now.  A value of 0 indicates that no bases were skipped.
+     *
+     * @return the number of skipped bases
+     */
+    public long getSkippedBases() {
+        return skippedBases;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/contexts/AlignmentContextUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/contexts/AlignmentContextUtils.java
new file mode 100644
index 0000000..92cfc74
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/contexts/AlignmentContextUtils.java
@@ -0,0 +1,150 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.contexts;
+
+import htsjdk.samtools.SAMReadGroupRecord;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.pileup.*;
+
+import java.util.*;
+
+/**
+ * Useful utilities for storing different AlignmentContexts
+ * User: ebanks
+ */
+public class AlignmentContextUtils {
+
+    // Definitions:
+    //   COMPLETE = full alignment context
+    //   FORWARD  = reads on forward strand
+    //   REVERSE  = reads on forward strand
+    //
+    public enum ReadOrientation { COMPLETE, FORWARD, REVERSE }
+
+    private AlignmentContextUtils() {
+        // cannot be instantiated
+    }
+
+    /**
+     * Returns a potentially derived subcontext containing only forward, reverse, or in fact all reads
+     * in alignment context context.
+     *
+     * @param context
+     * @param type
+     * @return
+     */
+    public static AlignmentContext stratify(AlignmentContext context, ReadOrientation type) {
+        switch(type) {
+            case COMPLETE:
+                return context;
+            case FORWARD:
+                return new AlignmentContext(context.getLocation(),context.getPileup().getPositiveStrandPileup());
+            case REVERSE:
+                return new AlignmentContext(context.getLocation(),context.getPileup().getNegativeStrandPileup());
+            default:
+                throw new ReviewedGATKException("Unable to get alignment context for type = " + type);
+        }
+    }
+
+    public static Map<String, AlignmentContext> splitContextBySampleName(AlignmentContext context) {
+        return splitContextBySampleName(context, null);
+    }
+
+    /**
+     * Splits the given AlignmentContext into a StratifiedAlignmentContext per sample, but referencd by sample name instead
+     * of sample object.
+     *
+     * @param context                the original pileup
+     *
+     * @return a Map of sample name to StratifiedAlignmentContext
+     *
+     **/
+    public static Map<String, AlignmentContext> splitContextBySampleName(AlignmentContext context, String assumedSingleSample) {
+        GenomeLoc loc = context.getLocation();
+        HashMap<String, AlignmentContext> contexts = new HashMap<String, AlignmentContext>();
+
+        for(String sample: context.getPileup().getSamples()) {
+            ReadBackedPileup pileupBySample = context.getPileup().getPileupForSample(sample);
+
+            // Don't add empty pileups to the split context.
+            if(pileupBySample.getNumberOfElements() == 0)
+                continue;
+
+            if(sample != null)
+                contexts.put(sample, new AlignmentContext(loc, pileupBySample));
+            else {
+                if(assumedSingleSample == null) {
+                    throw new UserException.ReadMissingReadGroup(pileupBySample.iterator().next().getRead());
+                }
+                contexts.put(assumedSingleSample,new AlignmentContext(loc, pileupBySample));
+            }
+        }
+
+        return contexts;
+    }
+
+    /**
+     * Splits the AlignmentContext into one context per read group
+     *
+     * @param context the original pileup
+     * @return a Map of ReadGroup to AlignmentContext, or an empty map if context has no base pileup
+     *
+     **/
+    public static Map<SAMReadGroupRecord, AlignmentContext> splitContextByReadGroup(AlignmentContext context, Collection<SAMReadGroupRecord> readGroups) {
+        HashMap<SAMReadGroupRecord, AlignmentContext> contexts = new HashMap<SAMReadGroupRecord, AlignmentContext>();
+
+        for (SAMReadGroupRecord rg : readGroups) {
+            ReadBackedPileup rgPileup = context.getBasePileup().getPileupForReadGroup(rg.getReadGroupId());
+            if ( rgPileup != null ) // there we some reads for RG
+                contexts.put(rg, new AlignmentContext(context.getLocation(), rgPileup));
+        }
+
+        return contexts;
+    }
+
+    public static Map<String, AlignmentContext> splitContextBySampleName(ReadBackedPileup pileup) {
+        return splitContextBySampleName(new AlignmentContext(pileup.getLocation(), pileup));
+    }
+
+
+    public static AlignmentContext joinContexts(Collection<AlignmentContext> contexts) {
+        // validation
+        GenomeLoc loc = contexts.iterator().next().getLocation();
+        for(AlignmentContext context: contexts) {
+            if(!loc.equals(context.getLocation()))
+                throw new ReviewedGATKException("Illegal attempt to join contexts from different genomic locations");
+        }
+
+        List<PileupElement> pe = new ArrayList<PileupElement>();
+        for(AlignmentContext context: contexts) {
+            for(PileupElement pileupElement: context.basePileup)
+                pe.add(pileupElement);
+        }
+        return new AlignmentContext(loc, new ReadBackedPileupImpl(loc,pe));
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/contexts/ReferenceContext.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/contexts/ReferenceContext.java
new file mode 100644
index 0000000..fca56dc
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/contexts/ReferenceContext.java
@@ -0,0 +1,217 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.contexts;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import org.broadinstitute.gatk.utils.BaseUtils;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+
+/**
+ * The section of the reference that overlaps with the given
+ * read / locus. 
+ *
+ * @author hanna
+ * @version 0.1
+ */
+public class ReferenceContext {
+    /**
+     * Facilitates creation of new GenomeLocs.
+     */
+    final private GenomeLocParser genomeLocParser;
+
+    /**
+     * The locus.
+     */
+    final private GenomeLoc locus;
+
+    /**
+     * The window of reference information around the current locus.
+     */
+    final private GenomeLoc window;
+
+    /**
+     * The bases in the window around the current locus.  If null, then bases haven't been fetched yet.
+     * Bases are always upper cased
+     */
+    private byte[] basesCache = null;
+
+    /**
+     * Lazy loader to fetch reference bases
+     */
+    final private ReferenceContextRefProvider basesProvider;
+
+    /**
+     * Interface to create byte[] contexts for lazy loading of the reference
+     */
+    public static interface ReferenceContextRefProvider {
+        /**
+         * You must provide a routine that gets the byte[] bases that would have been passed into the
+         * ReferenceContext.  The RC will handling caching.  The value of this interface and routine is
+         * that it is only called when the bytes are actually requested by the walker, not up front.  So
+         * if the walker doesn't need the refBases for whatever reason, there's no overhead to
+         * provide them.
+         *
+         * @return
+         */
+        @Ensures({"result != null"})
+        public byte[] getBases();
+    }
+
+    private static class ForwardingProvider implements ReferenceContextRefProvider {
+        byte[] bases;
+
+        public ForwardingProvider( byte base ) {
+            this(new byte[] { base });
+        }
+
+        public ForwardingProvider( byte[] bases ) {
+            this.bases = bases;
+        }
+
+        public byte[] getBases() { return bases; }
+    }
+
+    /**
+     * Contructor for a simple, windowless reference context.
+     * @param locus locus of interest.
+     * @param base reference base at that locus.
+     */
+    @Requires({
+            "genomeLocParser != null",
+            "locus != null",
+            "locus.size() > 0"})
+    public ReferenceContext( GenomeLocParser genomeLocParser, GenomeLoc locus, byte base ) {
+        this( genomeLocParser, locus, locus, new ForwardingProvider(base) );
+    }
+
+    @Requires({
+            "genomeLocParser != null",
+            "locus != null",
+            "locus.size() > 0",
+            "window != null",
+            "window.size() > 0",
+            "bases != null && bases.length > 0"})
+    public ReferenceContext( GenomeLocParser genomeLocParser, GenomeLoc locus, GenomeLoc window, byte[] bases ) {
+        this( genomeLocParser, locus, window, new ForwardingProvider(bases) );
+    }
+
+    @Requires({
+            "genomeLocParser != null",
+            "locus != null",
+            "locus.size() > 0",
+            "window != null",
+            "window.size() > 0",
+            "basesProvider != null"})
+    public ReferenceContext( GenomeLocParser genomeLocParser, GenomeLoc locus, GenomeLoc window, ReferenceContextRefProvider basesProvider ) {
+        this.genomeLocParser = genomeLocParser;
+        this.locus = locus;
+        this.window = window;
+        this.basesProvider = basesProvider;
+    }
+
+    /**
+     * Utility function to load bases from the provider to the cache, if necessary
+     */
+    @Ensures({
+            "basesCache != null",
+            "old(basesCache) == null || old(basesCache) == basesCache"})
+    private void fetchBasesFromProvider() {
+        if ( basesCache == null ) {
+            basesCache = basesProvider.getBases();
+
+            // must be an assertion that only runs when the bases are fetch to run in a reasonable amount of time
+            assert BaseUtils.isUpperCase(basesCache);
+        }
+    }
+
+    /**
+     * @return The genome loc parser associated with this reference context
+     */
+    @Ensures("result != null")
+    public GenomeLocParser getGenomeLocParser() {
+        return genomeLocParser;
+    }
+
+    /**
+     * The locus currently being examined.
+     * @return The current locus.
+     */
+    @Ensures("result != null")
+    public GenomeLoc getLocus() {
+        return locus;
+    }
+
+    @Ensures("result != null")
+    public GenomeLoc getWindow() {
+        return window;
+    }
+
+    /**
+     * Get the base at the given locus.
+     * @return The base at the given locus from the reference.
+     */
+    public byte getBase() {
+        return getBases()[(locus.getStart() - window.getStart())];
+    }
+
+    /**
+     * All the bases in the window currently being examined.
+     * @return All bases available.  If the window is of size [0,0], the array will
+     *         contain only the base at the given locus.
+     */
+    @Ensures({"result != null", "result.length > 0"})
+    public byte[] getBases() {
+        fetchBasesFromProvider();
+        return basesCache;
+    }
+
+    /**
+     * All the bases in the window from the current base forward to the end of the window.
+     */
+    @Ensures({"result != null", "result.length > 0"})
+    public byte[] getForwardBases() {
+        final byte[] bases = getBases();
+        final int mid = locus.getStart() - window.getStart();
+        // todo -- warning of performance problem, especially if this is called over and over
+        return new String(bases).substring(mid).getBytes();
+    }
+
+    @Deprecated
+    public char getBaseAsChar() {
+        return (char)getBase();
+    }
+
+    /**
+     * Get the base at the given locus.
+     * @return The base at the given locus from the reference.
+     */
+    @Deprecated()
+    public int getBaseIndex() {
+        return BaseUtils.simpleBaseToBaseIndex(getBase());
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/diffengine/BAMDiffableReader.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/diffengine/BAMDiffableReader.java
new file mode 100644
index 0000000..d06176e
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/diffengine/BAMDiffableReader.java
@@ -0,0 +1,119 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.diffengine;
+
+import htsjdk.samtools.SAMFileReader;
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.SAMRecordIterator;
+import htsjdk.samtools.ValidationStringency;
+import htsjdk.samtools.util.BlockCompressedInputStream;
+
+import java.io.*;
+import java.util.Arrays;
+
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: depristo
+ * Date: 7/4/11
+ * Time: 1:09 PM
+ *
+ * Class implementing diffnode reader for VCF
+ */
+public class BAMDiffableReader implements DiffableReader {
+    @Override
+    public String getName() { return "BAM"; }
+
+    @Override
+    public DiffElement readFromFile(File file, int maxElementsToRead) {
+        final SAMFileReader reader = new SAMFileReader(file, null); // null because we don't want it to look for the index
+        reader.setValidationStringency(ValidationStringency.SILENT);
+
+        DiffNode root = DiffNode.rooted(file.getName());
+        SAMRecordIterator iterator = reader.iterator();
+
+        int count = 0;
+        while ( iterator.hasNext() ) {
+            final SAMRecord record = iterator.next();
+
+            // name is the read name + first of pair
+            String name = record.getReadName().replace('.', '_');
+            if ( record.getReadPairedFlag() ) {
+                name += record.getFirstOfPairFlag() ? "_1" : "_2";
+            }
+
+            DiffNode readRoot = DiffNode.empty(name, root);
+
+            // add fields
+            readRoot.add("NAME", record.getReadName());
+            readRoot.add("FLAGS", record.getFlags());
+            readRoot.add("RNAME", record.getReferenceName());
+            readRoot.add("POS", record.getAlignmentStart());
+            readRoot.add("MAPQ", record.getMappingQuality());
+            readRoot.add("CIGAR", record.getCigarString());
+            readRoot.add("RNEXT", record.getMateReferenceName());
+            readRoot.add("PNEXT", record.getMateAlignmentStart());
+            readRoot.add("TLEN", record.getInferredInsertSize());
+            readRoot.add("SEQ", record.getReadString());
+            readRoot.add("QUAL", record.getBaseQualityString());
+
+            for ( SAMRecord.SAMTagAndValue xt : record.getAttributes() ) {
+                readRoot.add(xt.tag, xt.value);
+            }
+
+            // add record to root
+            if ( ! root.hasElement(name) )
+                // protect ourselves from malformed files
+                root.add(readRoot);
+            count += readRoot.size();
+            if ( count > maxElementsToRead && maxElementsToRead != -1)
+                break;
+        }
+
+        reader.close();
+
+        return root.getBinding();
+    }
+
+    @Override
+    public boolean canRead(File file) {
+        final byte[] BAM_MAGIC = "BAM\1".getBytes();
+        final byte[] buffer = new byte[BAM_MAGIC.length];
+        try {
+            InputStream fstream = new BufferedInputStream(new FileInputStream(file));
+            if ( !BlockCompressedInputStream.isValidFile(fstream) )
+                return false;
+            final BlockCompressedInputStream BCIS = new BlockCompressedInputStream(fstream);
+            BCIS.read(buffer, 0, BAM_MAGIC.length);
+            BCIS.close();
+            return Arrays.equals(buffer, BAM_MAGIC);
+        } catch ( IOException e ) {
+            return false;
+        } catch ( htsjdk.samtools.FileTruncatedException e ) {
+            return false;
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/diffengine/DiffElement.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/diffengine/DiffElement.java
new file mode 100644
index 0000000..f9167ff
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/diffengine/DiffElement.java
@@ -0,0 +1,125 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.diffengine;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Invariant;
+import com.google.java.contract.Requires;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: depristo
+ * Date: 7/4/11
+ * Time: 12:55 PM
+ *
+ * An interface that must be implemented to allow us to calculate differences
+ * between structured objects
+ */
+ at Invariant({
+        "name != null",
+        "value != null",
+        "parent != null || name.equals(\"ROOT\")",
+        "value == null || value.getBinding() == this"})
+public class DiffElement {
+    public final static DiffElement ROOT = new DiffElement();
+
+    final private String name;
+    final private DiffElement parent;
+    final private DiffValue value;
+
+    /**
+     * For ROOT only
+     */
+    private DiffElement() {
+        this.name = "ROOT";
+        this.parent = null;
+        this.value = new DiffValue(this, "ROOT");
+    }
+
+    @Requires({"name != null", "parent != null", "value != null"})
+    public DiffElement(String name, DiffElement parent, DiffValue value) {
+        if ( name.equals("ROOT") ) throw new IllegalArgumentException("Cannot use reserved name ROOT");
+        this.name = name;
+        this.parent = parent;
+        this.value = value;
+        this.value.setBinding(this);
+    }
+
+    @Ensures({"result != null"})
+    public String getName() {
+        return name;
+    }
+
+    public DiffElement getParent() {
+        return parent;
+    }
+
+    @Ensures({"result != null"})
+    public DiffValue getValue() {
+        return value;
+    }
+
+    public boolean isRoot() { return this == ROOT; }
+
+    @Ensures({"result != null"})
+    @Override
+    public String toString() {
+        return getName() + "=" + getValue().toString();
+    }
+
+    public String toString(int offset) {
+        return (offset > 0 ? Utils.dupString(' ', offset) : 0) + getName() + "=" + getValue().toString(offset);
+    }
+
+    @Ensures({"result != null"})
+    public final String fullyQualifiedName() {
+        if ( isRoot() )
+            return "";
+        else if ( parent.isRoot() )
+            return name;
+        else
+            return parent.fullyQualifiedName() + "." + name;
+    }
+
+    @Ensures({"result != null"})
+    public String toOneLineString() {
+        return getName() + "=" + getValue().toOneLineString();
+    }
+
+    @Ensures({"result != null"})
+    public DiffNode getValueAsNode() {
+        if ( getValue().isCompound() )
+            return (DiffNode)getValue();
+        else
+            throw new ReviewedGATKException("Illegal request conversion of a DiffValue into a DiffNode: " + this);
+    }
+
+    public int size() {
+        return 1 + getValue().size();
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/diffengine/DiffEngine.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/diffengine/DiffEngine.java
new file mode 100644
index 0000000..5699345
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/diffengine/DiffEngine.java
@@ -0,0 +1,437 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.diffengine;
+
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.report.GATKReport;
+import org.broadinstitute.gatk.utils.report.GATKReportTable;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.classloader.PluginManager;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+import java.io.File;
+import java.io.PrintStream;
+import java.util.*;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: depristo
+ * Date: 7/4/11
+ * Time: 12:51 PM
+ * A generic engine for comparing tree-structured objects
+ *
+ */
+public class DiffEngine {
+    final protected static Logger logger = Logger.getLogger(DiffEngine.class);
+
+    private final Map<String, DiffableReader> readers = new HashMap<String, DiffableReader>();
+
+    public DiffEngine() {
+        loadDiffableReaders();
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // difference calculation
+    //
+    // --------------------------------------------------------------------------------
+
+    public List<Difference> diff(DiffElement master, DiffElement test) {
+        DiffValue masterValue = master.getValue();
+        DiffValue testValue = test.getValue();
+
+        if ( masterValue.isCompound() && masterValue.isCompound() ) {
+            return diff(master.getValueAsNode(), test.getValueAsNode());
+        } else if ( masterValue.isAtomic() && testValue.isAtomic() ) {
+            return diff(masterValue, testValue);
+        } else {
+            // structural difference in types.  one is node, other is leaf
+            return Arrays.asList(new Difference(master, test));
+        }
+    }
+
+    public List<Difference> diff(DiffNode master, DiffNode test) {
+        Set<String> allNames = new HashSet<String>(master.getElementNames());
+        allNames.addAll(test.getElementNames());
+        List<Difference> diffs = new ArrayList<Difference>();
+
+        for ( String name : allNames ) {
+            DiffElement masterElt = master.getElement(name);
+            DiffElement testElt = test.getElement(name);
+            if ( masterElt == null && testElt == null ) {
+                throw new ReviewedGATKException("BUG: unexpectedly got two null elements for field: " + name);
+            } else if ( masterElt == null || testElt == null ) { // if either is null, we are missing a value
+                // todo -- should one of these be a special MISSING item?
+                diffs.add(new Difference(masterElt, testElt));
+            } else {
+                diffs.addAll(diff(masterElt, testElt));
+            }
+        }
+
+        return diffs;
+    }
+
+    public List<Difference> diff(DiffValue master, DiffValue test) {
+        if ( master.getValue().equals(test.getValue()) ) {
+            return Collections.emptyList();
+        } else {
+            return Arrays.asList(new Difference(master.getBinding(), test.getBinding()));
+        }
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // Summarizing differences
+    //
+    // --------------------------------------------------------------------------------
+
+    /**
+     * Emits a summary of the diffs to out.  Suppose you have the following three differences:
+     *
+     *   A.X.Z:1!=2
+     *   A.Y.Z:3!=4
+     *   B.X.Z:5!=6
+     *
+     * The above is the itemized list of the differences.  The summary looks for common differences
+     * in the name hierarchy, counts those shared elements, and emits the differences that occur
+     * in order of decreasing counts.
+     *
+     * So, in the above example, what are the shared elements?
+     *
+     * A.X.Z and B.X.Z share X.Z, so there's a *.X.Z with count 2
+     * A.X.Z, A.Y.Z, and B.X.Z all share *.*.Z, with count 3
+     * Each of A.X.Z, A.Y.Z, and B.X.Z are individually unique, with count 1
+     *
+     * So we would emit the following summary:
+     *
+     * *.*.Z: 3
+     * *.X.Z: 2
+     * A.X.Z: 1 [specific difference: 1!=2]
+     * A.Y.Z: 1 [specific difference: 3!=4]
+     * B.X.Z: 1 [specific difference: 5!=6]
+     *
+     * The algorithm to accomplish this calculation is relatively simple. Start with all of the
+     * concrete differences.  For each pair of differences A1.A2....AN and B1.B2....BN:
+     *
+     * find the longest common subsequence Si.Si+1...SN where Ai = Bi = Si
+     * If i == 0, then there's no shared substructure
+     * If i > 0, then generate the summarized value X = *.*...Si.Si+1...SN
+     * if X is a known summary, increment it's count, otherwise set its count to 1
+     *
+     * Not that only pairs of the same length are considered as potentially equivalent
+     *
+     * @param params determines how we display the items
+     * @param diffs the list of differences to summarize
+     */
+    public void reportSummarizedDifferences(List<Difference> diffs, SummaryReportParams params ) {
+        printSummaryReport(summarizedDifferencesOfPaths(diffs, params.doPairwise, params.maxRawDiffsToSummarize), params );
+    }
+
+    final protected static String[] diffNameToPath(String diffName) {
+        return diffName.split("\\.");
+    }
+
+    protected List<Difference> summarizedDifferencesOfPathsFromString(List<String> singletonDiffs) {
+        List<Difference> diffs = new ArrayList<Difference>();
+
+        for ( String diff : singletonDiffs ) {
+            diffs.add(new Difference(diff));
+        }
+
+        return summarizedDifferencesOfPaths(diffs, true, -1);
+    }
+
+    /**
+     * Computes a minimum set of potential differences between all singleton differences
+     * in singletonDiffs.  Employs an expensive pairwise O(n^2) algorithm.
+     *
+     * @param singletonDiffs
+     * @param maxRawDiffsToSummarize
+     * @return
+     */
+    private Map<String, Difference> initialPairwiseSummaries(final List<? extends Difference> singletonDiffs,
+                                                             final int maxRawDiffsToSummarize) {
+        Map<String, Difference> summaries = new HashMap<String, Difference>();
+
+        // create the initial set of differences
+        for ( int i = 0; i < singletonDiffs.size(); i++ ) {
+            for ( int j = 0; j <= i; j++ ) {
+                Difference diffPath1 = singletonDiffs.get(i);
+                Difference diffPath2 = singletonDiffs.get(j);
+                if ( diffPath1.length() == diffPath2.length() ) {
+                    int lcp = longestCommonPostfix(diffPath1.getParts(), diffPath2.getParts());
+                    String path = diffPath2.getPath();
+                    if ( lcp != 0 && lcp != diffPath1.length() )
+                        path = summarizedPath(diffPath2.getParts(), lcp);
+                    Difference sumDiff = new Difference(path, diffPath2.getMaster(), diffPath2.getTest());
+                    sumDiff.setCount(0);
+                    addSummaryIfMissing(summaries, sumDiff);
+
+                    if ( maxRawDiffsToSummarize != -1 && summaries.size() > maxRawDiffsToSummarize)
+                        return summaries;
+                }
+            }
+        }
+
+        return summaries;
+    }
+
+    /**
+     * Computes the possible leaf differences among the singleton diffs.
+     *
+     * The leaf differences are all of the form *.*...*.X where all internal
+     * differences are wildcards and the only summarized difference considered
+     * interesting to compute is
+     *
+     * @param singletonDiffs
+     * @param maxRawDiffsToSummarize
+     * @return
+     */
+    private Map<String, Difference> initialLeafSummaries(final List<? extends Difference> singletonDiffs,
+                                                         final int maxRawDiffsToSummarize) {
+        Map<String, Difference> summaries = new HashMap<String, Difference>();
+
+        // create the initial set of differences
+        for ( final Difference d : singletonDiffs ) {
+            final String path = summarizedPath(d.getParts(), 1);
+            Difference sumDiff = new Difference(path, d.getMaster(), d.getTest());
+            sumDiff.setCount(0);
+            addSummaryIfMissing(summaries, sumDiff);
+
+            if ( maxRawDiffsToSummarize != -1 && summaries.size() > maxRawDiffsToSummarize)
+                return summaries;
+        }
+
+        return summaries;
+    }
+
+    protected List<Difference> summarizedDifferencesOfPaths(final List<? extends Difference> singletonDiffs,
+                                                            final boolean doPairwise,
+                                                            final int maxRawDiffsToSummarize) {
+        final Map<String, Difference> summaries = doPairwise
+                ? initialPairwiseSummaries(singletonDiffs, maxRawDiffsToSummarize)
+                : initialLeafSummaries(singletonDiffs, maxRawDiffsToSummarize);
+
+        // count differences
+        for ( Difference diffPath : singletonDiffs ) {
+            for ( Difference sumDiff : summaries.values() ) {
+                if ( sumDiff.matches(diffPath.getParts()) )
+                    sumDiff.incCount();
+            }
+        }
+
+        List<Difference> sortedSummaries = new ArrayList<Difference>(summaries.values());
+        Collections.sort(sortedSummaries);
+        return sortedSummaries;
+    }
+
+    protected void addSummaryIfMissing(Map<String, Difference> summaries, Difference diff) {
+        if ( ! summaries.containsKey(diff.getPath()) ) {
+            summaries.put(diff.getPath(), diff);
+        }
+    }
+
+    protected void printSummaryReport(List<Difference> sortedSummaries, SummaryReportParams params ) {
+        List<Difference> toShow = new ArrayList<Difference>();
+        int count = 0, count1 = 0;
+        for ( Difference diff : sortedSummaries ) {
+            if ( diff.getCount() < params.minSumDiffToShow )
+                // in order, so break as soon as the count is too low
+                break;
+
+            if ( params.maxItemsToDisplay != 0 && count++ > params.maxItemsToDisplay )
+                break;
+
+            if ( diff.getCount() == 1 ) {
+                count1++;
+                if ( params.maxCountOneItems != 0 && count1 > params.maxCountOneItems )
+                    break;
+            }
+
+            toShow.add(diff);
+        }
+
+        // if we want it in descending order, reverse the list
+        if ( ! params.descending ) {
+            Collections.reverse(toShow);
+        }
+
+        // now that we have a specific list of values we want to show, display them
+        GATKReport report = new GATKReport();
+        final String tableName = "differences";
+        report.addTable(tableName, "Summarized differences between the master and test files. See http://www.broadinstitute.org/gatk/guide/article?id=1299 for more information", 3);
+        final GATKReportTable table = report.getTable(tableName);
+        table.addColumn("Difference");
+        table.addColumn("NumberOfOccurrences");
+        table.addColumn("ExampleDifference");
+        for ( final Difference diff : toShow ) {
+            final String key = diff.getPath();
+            table.addRowID(key, true);
+            table.set(key, "NumberOfOccurrences", diff.getCount());
+            table.set(key, "ExampleDifference", diff.valueDiffString());
+        }
+        GATKReport output = new GATKReport(table);
+        output.print(params.out);
+    }
+
+    protected static int longestCommonPostfix(String[] diffPath1, String[] diffPath2) {
+        int i = 0;
+        for ( ; i < diffPath1.length; i++ ) {
+            int j = diffPath1.length - i - 1;
+            if ( ! diffPath1[j].equals(diffPath2[j]) )
+                break;
+        }
+        return i;
+    }
+
+    /**
+     * parts is [A B C D]
+     * commonPostfixLength: how many parts are shared at the end, suppose its 2
+     * We want to create a string *.*.C.D
+     *
+     * @param parts the separated path values [above without .]
+     * @param commonPostfixLength
+     * @return
+     */
+    protected static String summarizedPath(String[] parts, int commonPostfixLength) {
+        int stop = parts.length - commonPostfixLength;
+        if ( stop > 0 ) parts = parts.clone();
+        for ( int i = 0; i < stop; i++ ) {
+            parts[i] = "*";
+        }
+        return Utils.join(".", parts);
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // plugin manager
+    //
+    // --------------------------------------------------------------------------------
+
+    public void loadDiffableReaders() {
+        List<Class<? extends DiffableReader>> drClasses = new PluginManager<DiffableReader>( DiffableReader.class ).getPlugins();
+
+        logger.info("Loading diffable modules:");
+        for (Class<? extends DiffableReader> drClass : drClasses ) {
+            logger.info("\t" + drClass.getSimpleName());
+
+            try {
+                DiffableReader dr = drClass.newInstance();
+                readers.put(dr.getName(), dr);
+            } catch (InstantiationException e) {
+                throw new ReviewedGATKException("Unable to instantiate module '" + drClass.getSimpleName() + "'");
+            } catch (IllegalAccessException e) {
+                throw new ReviewedGATKException("Illegal access error when trying to instantiate '" + drClass.getSimpleName() + "'");
+            }
+        }
+    }
+
+    protected Map<String, DiffableReader> getReaders() {
+        return readers;
+    }
+
+    protected DiffableReader getReader(String name) {
+        return readers.get(name);
+    }
+
+    /**
+     * Returns a reader appropriate for this file, or null if no such reader exists
+     * @param file
+     * @return
+     */
+    public DiffableReader findReaderForFile(File file) {
+        for ( DiffableReader reader : readers.values() )
+            if (reader.canRead(file) )
+                return reader;
+
+        return null;
+    }
+
+    /**
+     * Returns true if reader appropriate for this file, or false if no such reader exists
+     * @param file
+     * @return
+     */
+    public boolean canRead(File file) {
+        return findReaderForFile(file) != null;
+    }
+
+
+    public DiffElement createDiffableFromFile(File file) {
+        return createDiffableFromFile(file, -1);
+    }
+
+    public DiffElement createDiffableFromFile(File file, int maxElementsToRead) {
+        DiffableReader reader = findReaderForFile(file);
+        if ( reader == null )
+            throw new UserException("Unsupported file type: " + file);
+        else
+            return reader.readFromFile(file, maxElementsToRead);
+    }
+
+    public static boolean simpleDiffFiles(File masterFile, File testFile, int maxElementsToRead, DiffEngine.SummaryReportParams params) {
+        DiffEngine diffEngine = new DiffEngine();
+
+        if ( diffEngine.canRead(masterFile) && diffEngine.canRead(testFile) ) {
+            DiffElement master = diffEngine.createDiffableFromFile(masterFile, maxElementsToRead);
+            DiffElement test = diffEngine.createDiffableFromFile(testFile, maxElementsToRead);
+            List<Difference> diffs = diffEngine.diff(master, test);
+            diffEngine.reportSummarizedDifferences(diffs, params);
+            return true;
+        } else {
+            return false;
+        }
+    }
+
+    public static class SummaryReportParams {
+        final PrintStream out;
+        final int maxItemsToDisplay;
+        final int maxCountOneItems;
+        final int minSumDiffToShow;
+        final int maxRawDiffsToSummarize;
+        final boolean doPairwise;
+        boolean descending = true;
+
+        public SummaryReportParams(PrintStream out,
+                                   int maxItemsToDisplay,
+                                   int maxCountOneItems,
+                                   int minSumDiffToShow,
+                                   int maxRawDiffsToSummarize,
+                                   final boolean doPairwise) {
+            this.out = out;
+            this.maxItemsToDisplay = maxItemsToDisplay;
+            this.maxCountOneItems = maxCountOneItems;
+            this.minSumDiffToShow = minSumDiffToShow;
+            this.maxRawDiffsToSummarize = maxRawDiffsToSummarize;
+            this.doPairwise = doPairwise;
+        }
+
+        public void setDescending(boolean descending) {
+            this.descending = descending;
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/diffengine/DiffNode.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/diffengine/DiffNode.java
new file mode 100644
index 0000000..f0e8476
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/diffengine/DiffNode.java
@@ -0,0 +1,249 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.diffengine;
+
+import com.google.java.contract.Requires;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.util.*;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: depristo
+ * Date: 7/4/11
+ * Time: 12:55 PM
+ *
+ * An interface that must be implemented to allow us to calculate differences
+ * between structured objects
+ */
+public class DiffNode extends DiffValue {
+    private Map<String, DiffElement> getElementMap() {
+        return (Map<String, DiffElement>)super.getValue();
+    }
+    private static Map<String, DiffElement> emptyElements() { return new HashMap<String, DiffElement>(); }
+
+    private DiffNode(Map<String, DiffElement> elements) {
+        super(elements);
+    }
+
+    private DiffNode(DiffElement binding, Map<String, DiffElement> elements) {
+        super(binding, elements);
+    }
+
+    // ---------------------------------------------------------------------------
+    //
+    // constructors
+    //
+    // ---------------------------------------------------------------------------
+
+    public static DiffNode rooted(String name) {
+        return empty(name, DiffElement.ROOT);
+    }
+
+    public static DiffNode empty(String name, DiffElement parent) {
+        DiffNode df = new DiffNode(emptyElements());
+        DiffElement elt = new DiffElement(name, parent, df);
+        df.setBinding(elt);
+        return df;
+    }
+
+    public static DiffNode empty(String name, DiffValue parent) {
+        return empty(name, parent.getBinding());
+    }
+
+    // ---------------------------------------------------------------------------
+    //
+    // accessors
+    //
+    // ---------------------------------------------------------------------------
+
+    @Override
+    public boolean isAtomic() { return false; }
+
+    public Collection<String> getElementNames() {
+        return getElementMap().keySet();
+    }
+
+    public Collection<DiffElement> getElements() {
+        return getElementMap().values();
+    }
+
+    private Collection<DiffElement> getElements(boolean atomicOnly) {
+        List<DiffElement> elts = new ArrayList<DiffElement>();
+        for ( DiffElement elt : getElements() )
+            if ( (atomicOnly && elt.getValue().isAtomic()) || (! atomicOnly && elt.getValue().isCompound()))
+                elts.add(elt);
+        return elts;
+    }
+
+    public Collection<DiffElement> getAtomicElements() {
+        return getElements(true);
+    }
+
+    public Collection<DiffElement> getCompoundElements() {
+        return getElements(false);
+    }
+
+    /**
+     * Returns the element bound to name, or null if no such binding exists
+     * @param name
+     * @return
+     */
+    public DiffElement getElement(String name) {
+        return getElementMap().get(name);
+    }
+
+    /**
+     * Returns true if name is bound in this node
+     * @param name
+     * @return
+     */
+    public boolean hasElement(String name) {
+        return getElement(name) != null;
+    }
+
+    // ---------------------------------------------------------------------------
+    //
+    // add
+    //
+    // ---------------------------------------------------------------------------
+
+    @Requires("elt != null")
+    public void add(DiffElement elt) {
+        if ( getElementMap().containsKey(elt.getName()) )
+            throw new IllegalArgumentException("Attempting to rebind already existing binding: " + elt + " node=" + this);
+        getElementMap().put(elt.getName(), elt);
+    }
+
+    @Requires("elt != null")
+    public void add(DiffValue elt) {
+        add(elt.getBinding());
+    }
+
+    @Requires("elts != null")
+    public void add(Collection<DiffElement> elts) {
+        for ( DiffElement e : elts )
+            add(e);
+    }
+
+    public void add(String name, Object value) {
+        add(new DiffElement(name, this.getBinding(), new DiffValue(value)));
+    }
+
+    public int size() {
+        int count = 0;
+        for ( DiffElement value : getElements() )
+            count += value.size();
+        return count;
+    }
+
+    // ---------------------------------------------------------------------------
+    //
+    // toString
+    //
+    // ---------------------------------------------------------------------------
+
+    @Override
+    public String toString() {
+        return toString(0);
+    }
+
+    @Override
+    public String toString(int offset) {
+        String off = offset > 0 ? Utils.dupString(' ', offset) : "";
+        StringBuilder b = new StringBuilder();
+
+        b.append("(").append("\n");
+        Collection<DiffElement> atomicElts = getAtomicElements();
+        for ( DiffElement elt : atomicElts ) {
+            b.append(elt.toString(offset + 2)).append('\n');
+        }
+
+        for ( DiffElement elt : getCompoundElements() ) {
+            b.append(elt.toString(offset + 4)).append('\n');
+        }
+        b.append(off).append(")").append("\n");
+
+        return b.toString();
+    }
+
+    @Override
+    public String toOneLineString() {
+        StringBuilder b = new StringBuilder();
+
+        b.append('(');
+        List<String> parts = new ArrayList<String>();
+        for ( DiffElement elt : getElements() )
+            parts.add(elt.toOneLineString());
+        b.append(Utils.join(" ", parts));
+        b.append(')');
+
+        return b.toString();
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // fromString and toOneLineString
+    //
+    // --------------------------------------------------------------------------------
+
+    public static DiffElement fromString(String tree) {
+        return fromString(tree, DiffElement.ROOT);
+    }
+
+    /**
+     * Doesn't support full tree structure parsing
+     * @param tree
+     * @param parent
+     * @return
+     */
+    private static DiffElement fromString(String tree, DiffElement parent) {
+        // X=(A=A B=B C=(D=D))
+        String[] parts = tree.split("=", 2);
+        if ( parts.length != 2 )
+            throw new ReviewedGATKException("Unexpected tree structure: " + tree);
+        String name = parts[0];
+        String value = parts[1];
+
+        if ( value.length() == 0 )
+            throw new ReviewedGATKException("Illegal tree structure: " + value + " at " + tree);
+
+        if ( value.charAt(0) == '(' ) {
+            if ( ! value.endsWith(")") )
+                throw new ReviewedGATKException("Illegal tree structure.  Missing ): " + value + " at " + tree);
+            String subtree = value.substring(1, value.length()-1);
+            DiffNode rec = DiffNode.empty(name, parent);
+            String[] subParts = subtree.split(" ");
+            for ( String subPart : subParts ) {
+                rec.add(fromString(subPart, rec.getBinding()));
+            }
+            return rec.getBinding();
+        } else {
+            return new DiffValue(name, parent, value).getBinding();
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/diffengine/DiffValue.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/diffengine/DiffValue.java
new file mode 100644
index 0000000..24f2a40
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/diffengine/DiffValue.java
@@ -0,0 +1,90 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.diffengine;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: depristo
+ * Date: 7/4/11
+ * Time: 12:55 PM
+ *
+ * An interface that must be implemented to allow us to calculate differences
+ * between structured objects
+ */
+public class DiffValue {
+    private DiffElement binding = null;
+    final private Object value;
+
+    public DiffValue(Object value) {
+        this.value = value;
+    }
+
+    public DiffValue(DiffElement binding, Object value) {
+        this.binding = binding;
+        this.value = value;
+    }
+
+    public DiffValue(DiffValue parent, Object value) {
+        this(parent.getBinding(), value);
+    }
+
+    public DiffValue(String name, DiffElement parent, Object value) {
+        this.binding = new DiffElement(name, parent, this);
+        this.value = value;
+    }
+
+    public DiffValue(String name, DiffValue parent, Object value) {
+        this(name, parent.getBinding(), value);
+    }
+
+    public DiffElement getBinding() {
+        return binding;
+    }
+
+    protected void setBinding(DiffElement binding) {
+        this.binding = binding;
+    }
+
+    public Object getValue() {
+        return value;
+    }
+
+    public String toString() {
+        return getValue().toString();
+    }
+
+    public String toString(int offset) {
+        return toString();
+    }
+
+    public String toOneLineString() {
+        return getValue().toString();
+    }
+
+    public boolean isAtomic() { return true; }
+    public boolean isCompound() { return ! isAtomic(); }
+    public int size() { return 1; }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/diffengine/DiffableReader.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/diffengine/DiffableReader.java
new file mode 100644
index 0000000..aef5e8c
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/diffengine/DiffableReader.java
@@ -0,0 +1,66 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.diffengine;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+
+import java.io.File;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: depristo
+ * Date: 7/4/11
+ * Time: 1:09 PM
+ *
+ * Interface for readers creating diffable objects from a file
+ */
+public interface DiffableReader {
+    @Ensures("result != null")
+    /**
+     * Return the name of this DiffableReader type.  For example, the VCF reader returns 'VCF' and the
+     * bam reader 'BAM'
+     */
+    public String getName();
+
+    @Ensures("result != null")
+    @Requires("file != null")
+    /**
+     * Read up to maxElementsToRead DiffElements from file, and return them.
+     */
+    public DiffElement readFromFile(File file, int maxElementsToRead);
+
+    /**
+     * Return true if the file can be read into DiffElement objects with this reader. This should
+     * be uniquely true/false for all readers, as the system will use the first reader that can read the
+     * file.  This routine should never throw an exception.  The VCF reader, for example, looks at the
+     * first line of the file for the ##format=VCF4.1 header, and the BAM reader for the BAM_MAGIC value
+     * @param file
+     * @return
+     */
+    @Requires("file != null")
+    public boolean canRead(File file);
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/diffengine/Difference.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/diffengine/Difference.java
new file mode 100644
index 0000000..9f67fd4
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/diffengine/Difference.java
@@ -0,0 +1,137 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.diffengine;
+
+public class Difference implements Comparable<Difference> {
+    final String path; // X.Y.Z
+    final String[] parts;
+    int count = 1;
+    DiffElement master = null , test = null;
+
+    public Difference(String path) {
+        this.path = path;
+        this.parts = DiffEngine.diffNameToPath(path);
+    }
+
+    public Difference(DiffElement master, DiffElement test) {
+        this(createPath(master, test), master, test);
+    }
+
+    public Difference(String path, DiffElement master, DiffElement test) {
+        this(path);
+        this.master = master;
+        this.test = test;
+    }
+
+    public String[] getParts() {
+        return parts;
+    }
+
+    public void incCount() { count++; }
+
+    public int getCount() {
+        return count;
+    }
+
+    public void setCount(int count) {
+        this.count = count;
+    }
+
+    /**
+     * The fully qualified path object A.B.C etc
+     * @return
+     */
+    public String getPath() {
+        return path;
+    }
+
+    /**
+     * @return the length of the parts of this summary
+     */
+    public int length() {
+        return this.parts.length;
+    }
+
+    /**
+     * Returns true if the string parts matches this summary.  Matches are
+     * must be equal() everywhere where this summary isn't *.
+     * @param otherParts
+     * @return
+     */
+    public boolean matches(String[] otherParts) {
+        if ( otherParts.length != length() )
+            return false;
+
+        // TODO optimization: can start at right most non-star element
+        for ( int i = 0; i < length(); i++ ) {
+            String part = parts[i];
+            if ( ! part.equals("*") && ! part.equals(otherParts[i]) )
+                return false;
+        }
+
+        return true;
+    }
+
+    @Override
+    public String toString() {
+        return String.format("%s:%d:%s", getPath(), getCount(), valueDiffString());
+    }
+
+    @Override
+    public int compareTo(Difference other) {
+        // sort first highest to lowest count, then by lowest to highest path
+        int countCmp = Integer.valueOf(count).compareTo(other.count);
+        return countCmp != 0 ? -1 * countCmp : path.compareTo(other.path);
+    }
+
+    public String valueDiffString() {
+        if ( hasSpecificDifference() ) {
+            return String.format("%s!=%s", getOneLineString(master), getOneLineString(test));
+        } else {
+            return "N/A";
+        }
+    }
+
+    private static String createPath(DiffElement master, DiffElement test) {
+        return (master == null ? test : master).fullyQualifiedName();
+    }
+
+    private static String getOneLineString(DiffElement elt) {
+        return elt == null ? "MISSING" : elt.getValue().toOneLineString();
+    }
+
+    public boolean hasSpecificDifference() {
+        return master != null || test != null;
+    }
+
+    public DiffElement getMaster() {
+        return master;
+    }
+
+    public DiffElement getTest() {
+        return test;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/diffengine/GATKReportDiffableReader.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/diffengine/GATKReportDiffableReader.java
new file mode 100644
index 0000000..948fa21
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/diffengine/GATKReportDiffableReader.java
@@ -0,0 +1,104 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.diffengine;
+
+import org.broadinstitute.gatk.utils.report.GATKReport;
+import org.broadinstitute.gatk.utils.report.GATKReportColumn;
+import org.broadinstitute.gatk.utils.report.GATKReportTable;
+
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+
+
+/**
+ * Class implementing diffnode reader for GATKReports
+ */
+
+// TODO Version check to be added at the report level
+
+public class GATKReportDiffableReader implements DiffableReader {
+    @Override
+    public String getName() {
+        return "GATKReport";
+    }
+
+    @Override
+    public DiffElement readFromFile(File file, int maxElementsToRead) {
+        DiffNode root = DiffNode.rooted(file.getName());
+        try {
+            // one line reads the whole thing into memory
+            GATKReport report = new GATKReport(file);
+
+            for (GATKReportTable table : report.getTables()) {
+                root.add(tableToNode(table, root));
+            }
+
+            return root.getBinding();
+        } catch (Exception e) {
+            return null;
+        }
+    }
+
+    private DiffNode tableToNode(GATKReportTable table, DiffNode root) {
+        DiffNode tableRoot = DiffNode.empty(table.getTableName(), root);
+
+        tableRoot.add("Description", table.getTableDescription());
+        tableRoot.add("NumberOfRows", table.getNumRows());
+
+        for ( GATKReportColumn column : table.getColumnInfo() ) {
+            DiffNode columnRoot = DiffNode.empty(column.getColumnName(), tableRoot);
+
+            columnRoot.add("Width", column.getColumnFormat().getWidth());
+            // NOTE: as the values are trimmed during parsing left/right alignment is not currently preserved
+            columnRoot.add("Displayable", true);
+
+            for ( int i = 0; i < table.getNumRows(); i++ ) {
+                String name = column.getColumnName() + (i+1);
+                columnRoot.add(name, table.get(i, column.getColumnName()).toString());
+            }
+
+            tableRoot.add(columnRoot);
+        }
+
+        return tableRoot;
+    }
+
+    @Override
+    public boolean canRead(File file) {
+        try {
+            final String HEADER = GATKReport.GATKREPORT_HEADER_PREFIX;
+            final char[] buff = new char[HEADER.length()];
+            final FileReader FR = new FileReader(file);
+            FR.read(buff, 0, HEADER.length());
+            FR.close();
+            String firstLine = new String(buff);
+            return firstLine.startsWith(HEADER);
+        } catch (IOException e) {
+            return false;
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/diffengine/VCFDiffableReader.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/diffengine/VCFDiffableReader.java
new file mode 100644
index 0000000..d5d305e
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/diffengine/VCFDiffableReader.java
@@ -0,0 +1,145 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.diffengine;
+
+import org.apache.log4j.Logger;
+import htsjdk.tribble.AbstractFeatureReader;
+import htsjdk.tribble.FeatureReader;
+import org.broadinstitute.gatk.utils.Utils;
+import htsjdk.variant.vcf.*;
+import htsjdk.variant.variantcontext.Genotype;
+import htsjdk.variant.variantcontext.VariantContext;
+
+import java.io.*;
+import java.util.Iterator;
+import java.util.Map;
+
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: depristo
+ * Date: 7/4/11
+ * Time: 1:09 PM
+ *
+ * Class implementing diffnode reader for VCF
+ */
+public class VCFDiffableReader implements DiffableReader {
+    private static Logger logger = Logger.getLogger(VCFDiffableReader.class);
+
+    @Override
+    public String getName() { return "VCF"; }
+
+    @Override
+    public DiffElement readFromFile(File file, int maxElementsToRead) {
+        DiffNode root = DiffNode.rooted(file.getName());
+        try {
+            // read the version line from the file
+            BufferedReader br = new BufferedReader(new FileReader(file));
+            final String version = br.readLine();
+            root.add("VERSION", version);
+            br.close();
+
+            final VCFCodec vcfCodec = new VCFCodec();
+            vcfCodec.disableOnTheFlyModifications(); // must be read as state is stored in reader itself
+
+            FeatureReader<VariantContext> reader = AbstractFeatureReader.getFeatureReader(file.getAbsolutePath(), vcfCodec, false);
+            VCFHeader header = (VCFHeader)reader.getHeader();
+            for ( VCFHeaderLine headerLine : header.getMetaDataInInputOrder() ) {
+                String key = headerLine.getKey();
+                if ( headerLine instanceof VCFIDHeaderLine)
+                    key += "_" + ((VCFIDHeaderLine) headerLine).getID();
+                if ( root.hasElement(key) )
+                    logger.warn("Skipping duplicate header line: file=" + file + " line=" + headerLine.toString());
+                else
+                    root.add(key, headerLine.toString());
+            }
+
+            int count = 0, nRecordsAtPos = 1;
+            String prevName = "";
+            Iterator<VariantContext> it = reader.iterator();
+            while ( it.hasNext() ) {
+                VariantContext vc = it.next();
+                String name = vc.getChr() + ":" + vc.getStart();
+                if ( name.equals(prevName) ) {
+                    name += "_" + ++nRecordsAtPos;
+                } else {
+                    prevName = name;
+                }
+                DiffNode vcRoot = DiffNode.empty(name, root);
+
+                // add fields
+                vcRoot.add("CHROM", vc.getChr());
+                vcRoot.add("POS", vc.getStart());
+                vcRoot.add("ID", vc.getID());
+                vcRoot.add("REF", vc.getReference());
+                vcRoot.add("ALT", vc.getAlternateAlleles());
+                vcRoot.add("QUAL", vc.hasLog10PError() ? vc.getLog10PError() * -10 : VCFConstants.MISSING_VALUE_v4);
+                vcRoot.add("FILTER", ! vc.filtersWereApplied() // needs null to differentiate between PASS and .
+                        ? VCFConstants.MISSING_VALUE_v4
+                        : ( vc.getFilters().isEmpty() ? VCFConstants.PASSES_FILTERS_v4 : vc.getFilters()) );
+
+                // add info fields
+                for (Map.Entry<String, Object> attribute : vc.getAttributes().entrySet()) {
+                    if ( ! attribute.getKey().startsWith("_") )
+                        vcRoot.add(attribute.getKey(), attribute.getValue());
+                }
+
+                for (Genotype g : vc.getGenotypes() ) {
+                    DiffNode gRoot = DiffNode.empty(g.getSampleName(), vcRoot);
+                    gRoot.add("GT", g.getGenotypeString());
+                    if ( g.hasGQ() ) gRoot.add("GQ", g.getGQ() );
+                    if ( g.hasDP() ) gRoot.add("DP", g.getDP() );
+                    if ( g.hasAD() ) gRoot.add("AD", Utils.join(",", g.getAD()));
+                    if ( g.hasPL() ) gRoot.add("PL", Utils.join(",", g.getPL()));
+                    if ( g.getFilters() != null ) gRoot.add("FT", g.getFilters());
+
+                    for (Map.Entry<String, Object> attribute : g.getExtendedAttributes().entrySet()) {
+                        if ( ! attribute.getKey().startsWith("_") )
+                            gRoot.add(attribute.getKey(), attribute.getValue());
+                    }
+
+                    vcRoot.add(gRoot);
+                }
+
+                root.add(vcRoot);
+                count += vcRoot.size();
+                if ( count > maxElementsToRead && maxElementsToRead != -1)
+                    break;
+            }
+
+            reader.close();
+        } catch ( IOException e ) {
+            return null;
+        }
+
+        return root.getBinding();
+    }
+
+    @Override
+    public boolean canRead(File file) {
+        return AbstractVCFCodec.canDecodeFile(file.getPath(), VCFCodec.VCF4_MAGIC_HEADER);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/AlleleBiasedDownsamplingUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/AlleleBiasedDownsamplingUtils.java
new file mode 100644
index 0000000..37810ba
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/AlleleBiasedDownsamplingUtils.java
@@ -0,0 +1,369 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.downsampling;
+
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.BaseUtils;
+import org.broadinstitute.gatk.utils.MathUtils;
+import org.broadinstitute.gatk.utils.collections.DefaultHashMap;
+import org.broadinstitute.gatk.utils.exceptions.GATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.pileup.PileupElement;
+import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
+import org.broadinstitute.gatk.utils.pileup.ReadBackedPileupImpl;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.broadinstitute.gatk.utils.text.XReadLines;
+import htsjdk.variant.variantcontext.Allele;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.*;
+
+public class AlleleBiasedDownsamplingUtils {
+
+    // define this class so that we can use Java generics below
+    private final static class PileupElementList extends ArrayList<PileupElement> {}
+
+    /**
+     * Computes an allele biased version of the given pileup
+     *
+     * @param pileup                    the original pileup
+     * @param downsamplingFraction      the fraction of total reads to remove per allele
+     * @return allele biased pileup
+     */
+    public static ReadBackedPileup createAlleleBiasedBasePileup(final ReadBackedPileup pileup, final double downsamplingFraction) {
+        // special case removal of all or no reads
+        if ( downsamplingFraction <= 0.0 )
+            return pileup;
+        if ( downsamplingFraction >= 1.0 )
+            return new ReadBackedPileupImpl(pileup.getLocation(), new ArrayList<PileupElement>());
+
+        final PileupElementList[] alleleStratifiedElements = new PileupElementList[4];
+        for ( int i = 0; i < 4; i++ )
+            alleleStratifiedElements[i] = new PileupElementList();
+
+        // start by stratifying the reads by the alleles they represent at this position
+        for ( final PileupElement pe : pileup ) {
+            final int baseIndex = BaseUtils.simpleBaseToBaseIndex(pe.getBase());
+            if ( baseIndex != -1 )
+                alleleStratifiedElements[baseIndex].add(pe);
+        }
+
+        // make a listing of allele counts and calculate the total count
+        final int[] alleleCounts = calculateAlleleCounts(alleleStratifiedElements);
+        final int totalAlleleCount = (int)MathUtils.sum(alleleCounts);
+
+        // do smart down-sampling
+        final int numReadsToRemove = (int)(totalAlleleCount * downsamplingFraction); // floor
+        final int[] targetAlleleCounts = runSmartDownsampling(alleleCounts, numReadsToRemove);
+
+        final HashSet<PileupElement> readsToRemove = new HashSet<PileupElement>(numReadsToRemove);
+        for ( int i = 0; i < 4; i++ ) {
+            final PileupElementList alleleList = alleleStratifiedElements[i];
+            // if we don't need to remove any reads, then don't
+            if ( alleleCounts[i] > targetAlleleCounts[i] )
+                readsToRemove.addAll(downsampleElements(alleleList, alleleCounts[i], alleleCounts[i] - targetAlleleCounts[i]));
+        }
+
+        // we need to keep the reads sorted because the FragmentUtils code will expect them in coordinate order and will fail otherwise
+        final List<PileupElement> readsToKeep = new ArrayList<PileupElement>(totalAlleleCount - numReadsToRemove);
+        for ( final PileupElement pe : pileup ) {
+            if ( !readsToRemove.contains(pe) ) {
+                readsToKeep.add(pe);
+            }
+        }
+
+        return new ReadBackedPileupImpl(pileup.getLocation(), new ArrayList<PileupElement>(readsToKeep));
+    }
+
+    /**
+     * Calculates actual allele counts for each allele (which can be different than the list size when reduced reads are present)
+     *
+     * @param alleleStratifiedElements       pileup elements stratified by allele
+     * @return non-null int array representing allele counts
+     */
+    private static int[] calculateAlleleCounts(final PileupElementList[] alleleStratifiedElements) {
+        final int[] alleleCounts = new int[alleleStratifiedElements.length];
+        for ( int i = 0; i < alleleStratifiedElements.length; i++ ) {
+            alleleCounts[i] = alleleStratifiedElements[i].size();
+        }
+        return alleleCounts;
+    }
+
+    private static int scoreAlleleCounts(final int[] alleleCounts) {
+        if ( alleleCounts.length < 2 )
+            return 0;
+
+        // sort the counts (in ascending order)
+        final int[] alleleCountsCopy = alleleCounts.clone();
+        Arrays.sort(alleleCountsCopy);
+
+        final int maxCount = alleleCountsCopy[alleleCounts.length - 1];
+        final int nextBestCount = alleleCountsCopy[alleleCounts.length - 2];
+
+        int remainderCount = 0;
+        for ( int i = 0; i < alleleCounts.length - 2; i++ )
+            remainderCount += alleleCountsCopy[i];
+
+        // try to get the best score:
+        //    - in the het case the counts should be equal with nothing else
+        //    - in the hom case the non-max should be zero
+        return Math.min(maxCount - nextBestCount + remainderCount, Math.abs(nextBestCount + remainderCount));
+    }
+
+    /**
+     * Computes an allele biased version of the allele counts for a given pileup
+     *
+     * @param alleleCounts              the allele counts for the original pileup
+     * @param numReadsToRemove          number of total reads to remove per allele
+     * @return non-null array of new counts needed per allele
+     */
+    protected static int[] runSmartDownsampling(final int[] alleleCounts, final int numReadsToRemove) {
+        final int numAlleles = alleleCounts.length;
+
+        int maxScore = scoreAlleleCounts(alleleCounts);
+        int[] alleleCountsOfMax = alleleCounts;
+
+        final int numReadsToRemovePerAllele = numReadsToRemove / 2;
+
+        for ( int i = 0; i < numAlleles; i++ ) {
+            for ( int j = i; j < numAlleles; j++ ) {
+                final int[] newCounts = alleleCounts.clone();
+
+                // split these cases so we don't lose on the floor (since we divided by 2)
+                if ( i == j ) {
+                    newCounts[i] = Math.max(0, newCounts[i] - numReadsToRemove);
+                } else {
+                    newCounts[i] = Math.max(0, newCounts[i] - numReadsToRemovePerAllele);
+                    newCounts[j] = Math.max(0, newCounts[j] - numReadsToRemovePerAllele);
+                }
+
+                final int score = scoreAlleleCounts(newCounts);
+
+                if ( score < maxScore ) {
+                    maxScore = score;
+                    alleleCountsOfMax = newCounts;
+                }
+            }
+        }
+
+        return alleleCountsOfMax;
+    }
+
+    /**
+     * Performs allele biased down-sampling on a pileup and computes the list of elements to remove
+     *
+     * @param elements                  original list of pileup elements
+     * @param originalElementCount      original count of elements (taking reduced reads into account)
+     * @param numElementsToRemove       the number of records to remove
+     * @return the list of pileup elements TO REMOVE
+     */
+    protected static List<PileupElement> downsampleElements(final List<PileupElement> elements, final int originalElementCount, final int numElementsToRemove) {
+        // are there no elements to remove?
+        if ( numElementsToRemove == 0 )
+            return Collections.<PileupElement>emptyList();
+
+        final ArrayList<PileupElement> elementsToRemove = new ArrayList<PileupElement>(numElementsToRemove);
+
+        // should we remove all of the elements?
+        if ( numElementsToRemove >= originalElementCount ) {
+            elementsToRemove.addAll(elements);
+            return elementsToRemove;
+        }
+
+        // create a bitset describing which elements to remove
+        final BitSet itemsToRemove = new BitSet(originalElementCount);
+        for ( final Integer selectedIndex : MathUtils.sampleIndicesWithoutReplacement(originalElementCount, numElementsToRemove) ) {
+            itemsToRemove.set(selectedIndex);
+        }
+
+        int currentBitSetIndex = 0;
+        for ( final PileupElement element : elements ) {
+            if ( itemsToRemove.get(currentBitSetIndex++) ) {
+                elementsToRemove.add(element);
+            }
+        }
+
+        return elementsToRemove;
+    }
+
+    /**
+     * Computes reads to remove based on an allele biased down-sampling
+     *
+     * @param alleleReadMap             original list of records per allele
+     * @param downsamplingFraction      the fraction of total reads to remove per allele
+     * @return list of reads TO REMOVE from allele biased down-sampling
+     */
+    public static <A extends Allele> List<GATKSAMRecord> selectAlleleBiasedReads(final Map<A, List<GATKSAMRecord>> alleleReadMap, final double downsamplingFraction) {
+        int totalReads = 0;
+        for ( final List<GATKSAMRecord> reads : alleleReadMap.values() )
+            totalReads += reads.size();
+
+        int numReadsToRemove = (int)(totalReads * downsamplingFraction);
+
+        // make a listing of allele counts
+        final List<Allele> alleles = new ArrayList<Allele>(alleleReadMap.keySet());
+        alleles.remove(Allele.NO_CALL);    // ignore the no-call bin
+        final int numAlleles = alleles.size();
+
+        final int[] alleleCounts = new int[numAlleles];
+        for ( int i = 0; i < numAlleles; i++ )
+            alleleCounts[i] = alleleReadMap.get(alleles.get(i)).size();
+
+        // do smart down-sampling
+        final int[] targetAlleleCounts = runSmartDownsampling(alleleCounts, numReadsToRemove);
+
+        final List<GATKSAMRecord> readsToRemove = new ArrayList<GATKSAMRecord>(numReadsToRemove);
+        for ( int i = 0; i < numAlleles; i++ ) {
+            if ( alleleCounts[i] > targetAlleleCounts[i] ) {
+                readsToRemove.addAll(downsampleElements(alleleReadMap.get(alleles.get(i)), alleleCounts[i] - targetAlleleCounts[i]));
+            }
+        }
+
+        return readsToRemove;
+    }
+
+    /**
+     * Performs allele biased down-sampling on a pileup and computes the list of elements to remove
+     *
+     * @param reads                     original list of records
+     * @param numElementsToRemove       the number of records to remove
+     * @return the list of pileup elements TO REMOVE
+     */
+    protected static List<GATKSAMRecord> downsampleElements(final List<GATKSAMRecord> reads, final int numElementsToRemove) {
+        // are there no elements to remove?
+        if ( numElementsToRemove == 0 )
+            return Collections.<GATKSAMRecord>emptyList();
+
+        final ArrayList<GATKSAMRecord> elementsToRemove = new ArrayList<GATKSAMRecord>(numElementsToRemove);
+        final int originalElementCount = reads.size();
+
+        // should we remove all of the elements?
+        if ( numElementsToRemove >= originalElementCount ) {
+            elementsToRemove.addAll(reads);
+            return elementsToRemove;
+        }
+
+        // create a bitset describing which elements to remove
+        final BitSet itemsToRemove = new BitSet(originalElementCount);
+        for ( final Integer selectedIndex : MathUtils.sampleIndicesWithoutReplacement(originalElementCount, numElementsToRemove) ) {
+            itemsToRemove.set(selectedIndex);
+        }
+
+        int currentBitSetIndex = 0;
+        for ( final GATKSAMRecord read : reads ) {
+            if ( itemsToRemove.get(currentBitSetIndex++) )
+                elementsToRemove.add(read);
+        }
+
+        return elementsToRemove;
+    }
+
+    /**
+     * Create sample-contamination maps from file
+     *
+     * @param ContaminationFractionFile   Filename containing two columns: SampleID and Contamination
+     * @param AvailableSampleIDs          Set of Samples of interest (no reason to include every sample in file) or null to turn off checking
+     * @param logger                      for logging output
+     * @return sample-contamination Map
+     */
+
+    public static DefaultHashMap<String, Double> loadContaminationFile(File ContaminationFractionFile, final Double defaultContaminationFraction, final Set<String> AvailableSampleIDs, Logger logger) throws GATKException {
+        DefaultHashMap<String, Double> sampleContamination = new DefaultHashMap<String, Double>(defaultContaminationFraction);
+        Set<String> nonSamplesInContaminationFile = new HashSet<String>(sampleContamination.keySet());
+        try {
+
+            XReadLines reader = new XReadLines(ContaminationFractionFile, true);
+            for (String line : reader) {
+
+                if (line.length() == 0) {
+                    continue;
+                }
+
+                StringTokenizer st = new StringTokenizer(line,"\t");
+
+                String fields[] = new String[2];
+                try {
+                    fields[0] = st.nextToken();
+                    fields[1] = st.nextToken();
+                } catch(NoSuchElementException e){
+                    throw new UserException.MalformedFile("Contamination file must have exactly two, tab-delimited columns. Offending line:\n" + line);
+                }
+                if(st.hasMoreTokens()) {
+                    throw new UserException.MalformedFile("Contamination file must have exactly two, tab-delimited columns. Offending line:\n" + line);
+                }
+
+                if (fields[0].length() == 0 || fields[1].length() == 0) {
+                    throw new UserException.MalformedFile("Contamination file can not have empty strings in either column. Offending line:\n" + line);
+                }
+
+                if (sampleContamination.containsKey(fields[0])) {
+                    throw new UserException.MalformedFile("Contamination file contains duplicate entries for input name " + fields[0]);
+                }
+
+                try {
+                    final Double contamination = Double.valueOf(fields[1]);
+                    if (contamination < 0 || contamination > 1){
+                        throw new UserException.MalformedFile("Contamination file contains unacceptable contamination value (must be 0<=x<=1): " + line);
+                    }
+                    if (AvailableSampleIDs==null || AvailableSampleIDs.contains(fields[0])) {// only add samples if they are in the sampleSet (or if it is null)
+                        sampleContamination.put(fields[0], contamination);
+                    }
+                    else {
+                        nonSamplesInContaminationFile.add(fields[0]);
+                    }
+                } catch (NumberFormatException e) {
+                    throw new UserException.MalformedFile("Contamination file contains unparsable double in the second field. Offending line: " + line);
+                }
+            }
+
+
+            //output to the user info lines telling which samples are in the Contamination File
+            if (sampleContamination.size() > 0) {
+                logger.info(String.format("The following samples were found in the Contamination file and will be processed at the contamination level therein: %s", sampleContamination.keySet().toString()));
+
+                //output to the user info lines telling which samples are NOT in the Contamination File
+                if(AvailableSampleIDs!=null){
+                    Set<String> samplesNotInContaminationFile = new HashSet<String>(AvailableSampleIDs);
+                    samplesNotInContaminationFile.removeAll(sampleContamination.keySet());
+                    if (samplesNotInContaminationFile.size() > 0)
+                        logger.info(String.format("The following samples were NOT found in the Contamination file and will be processed at the default contamination level: %s", samplesNotInContaminationFile.toString()));
+                }
+            }
+
+            //output to the user Samples that do not have lines in the Contamination File
+            if (nonSamplesInContaminationFile.size() > 0) {
+                logger.info(String.format("The following entries were found in the Contamination file but were not SAMPLEIDs. They will be ignored: %s", nonSamplesInContaminationFile.toString()));
+            }
+
+            return sampleContamination;
+
+        } catch (IOException e) {
+            throw new GATKException("I/O Error while reading sample-contamination file " + ContaminationFractionFile.getName() + ": " + e.getMessage());
+        }
+
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/DownsampleType.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/DownsampleType.java
new file mode 100644
index 0000000..41b59cc
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/DownsampleType.java
@@ -0,0 +1,39 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.downsampling;
+
+/**
+ * Type of downsampling method to invoke.
+ *
+ * @author hanna
+ * @version 0.1
+ */
+
+public enum DownsampleType {
+    NONE,
+    ALL_READS,
+    BY_SAMPLE
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/Downsampler.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/Downsampler.java
new file mode 100644
index 0000000..9104094
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/Downsampler.java
@@ -0,0 +1,161 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.downsampling;
+
+import java.util.Collection;
+import java.util.List;
+
+/**
+ * The basic downsampler API, with no reads-specific operations.
+ *
+ * Downsamplers that extend this class rather than the ReadsDownsampler class can handle
+ * any kind of item, however they cannot be wrapped within a DownsamplingReadsIterator or a
+ * PerSampleDownsamplingReadsIterator.
+ *
+ * @author David Roazen
+ */
+public abstract class Downsampler<T> {
+
+    /**
+     * Number of items discarded by this downsampler since the last call to resetStats()
+     */
+    protected int numDiscardedItems = 0;
+
+    /**
+     * Submit one item to the downsampler for consideration. Some downsamplers will be able to determine
+     * immediately whether the item survives the downsampling process, while others will need to see
+     * more items before making that determination.
+     *
+     * @param item the individual item to submit to the downsampler for consideration
+     */
+    public abstract void submit( final T item );
+
+    /**
+     * Submit a collection of items to the downsampler for consideration. Should be equivalent to calling
+     * submit() on each individual item in the collection.
+     *
+     * @param items the collection of items to submit to the downsampler for consideration
+     */
+    public void submit( final Collection<T> items ) {
+        if ( items == null ) {
+            throw new IllegalArgumentException("submitted items must not be null");
+        }
+
+        for ( final T item : items ) {
+            submit(item);
+        }
+    }
+
+    /**
+     * Are there items that have survived the downsampling process waiting to be retrieved?
+     *
+     * @return true if this downsampler has > 0 finalized items, otherwise false
+     */
+    public abstract boolean hasFinalizedItems();
+
+    /**
+     * Return (and *remove*) all items that have survived downsampling and are waiting to be retrieved.
+     *
+     * @return a list of all finalized items this downsampler contains, or an empty list if there are none
+     */
+    public abstract List<T> consumeFinalizedItems();
+
+    /**
+     * Are there items stored in this downsampler that it doesn't yet know whether they will
+     * ultimately survive the downsampling process?
+     *
+     * @return true if this downsampler has > 0 pending items, otherwise false
+     */
+    public abstract boolean hasPendingItems();
+
+    /**
+     * Peek at the first finalized item stored in this downsampler (or null if there are no finalized items)
+     *
+     * @return the first finalized item in this downsampler (the item is not removed from the downsampler by this call),
+     *         or null if there are none
+     */
+    public abstract T peekFinalized();
+
+    /**
+     * Peek at the first pending item stored in this downsampler (or null if there are no pending items)
+     *
+     * @return the first pending item stored in this downsampler (the item is not removed from the downsampler by this call),
+     *         or null if there are none
+     */
+    public abstract T peekPending();
+
+    /**
+     * Get the current number of items in this downsampler
+     *
+     * This should be the best estimate of the total number of elements that will come out of the downsampler
+     * were consumeFinalizedItems() to be called immediately after this call.  In other words it should
+     * be number of finalized items + estimate of number of pending items that will ultimately be included as well.
+     *
+     * @return a positive integer
+     */
+    public abstract int size();
+
+    /**
+     * Returns the number of items discarded (so far) during the downsampling process
+     *
+     * @return the number of items that have been submitted to this downsampler and discarded in the process of
+     *         downsampling
+     */
+    public int getNumberOfDiscardedItems() {
+        return numDiscardedItems;
+    }
+
+    /**
+     * Used to tell the downsampler that no more items will be submitted to it, and that it should
+     * finalize any pending items.
+     */
+    public abstract void signalEndOfInput();
+
+    /**
+     * Empty the downsampler of all finalized/pending items
+     */
+    public abstract void clearItems();
+
+    /**
+     * Reset stats in the downsampler such as the number of discarded items *without* clearing the downsampler of items
+     */
+    public void resetStats() {
+        numDiscardedItems = 0;
+    }
+
+    /**
+     * Indicates whether an item should be excluded from elimination during downsampling. By default,
+     * all items representing reduced reads are excluded from downsampling, but individual downsamplers
+     * may override if they are able to handle reduced reads correctly. Downsamplers should check
+     * the return value of this method before discarding an item.
+     *
+     * @param item The item to test
+     * @return true if the item should not be subject to elimination during downsampling, otherwise false
+     */
+    protected boolean doNotDiscardItem( final Object item ) {
+        return false;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/DownsamplingMethod.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/DownsamplingMethod.java
new file mode 100644
index 0000000..569eb70
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/DownsamplingMethod.java
@@ -0,0 +1,121 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.downsampling;
+
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+/**
+ * Describes the method for downsampling reads at a given locus.
+ */
+
+public class DownsamplingMethod {
+    /**
+     * Type of downsampling to perform.
+     */
+    public final DownsampleType type;
+
+    /**
+     * Actual downsampling target is specified as an integer number of reads.
+     */
+    public final Integer toCoverage;
+
+    /**
+     * Actual downsampling target is specified as a fraction of total available reads.
+     */
+    public final Double toFraction;
+
+    /**
+     * Expresses no downsampling applied at all.
+     */
+    public static final DownsamplingMethod NONE = new DownsamplingMethod(DownsampleType.NONE, null, null);
+
+    /**
+     * Default type to use if no type is specified
+     */
+    public static final DownsampleType DEFAULT_DOWNSAMPLING_TYPE = DownsampleType.BY_SAMPLE;
+
+    /**
+     * Don't allow dcov values below this threshold for locus-based traversals (ie., Locus
+     * and ActiveRegion walkers), as they can result in problematic downsampling artifacts
+     */
+    public static final int MINIMUM_SAFE_COVERAGE_TARGET_FOR_LOCUS_BASED_TRAVERSALS = 200;
+
+
+    public DownsamplingMethod( DownsampleType type, Integer toCoverage, Double toFraction ) {
+        this.type = type != null ? type : DEFAULT_DOWNSAMPLING_TYPE;
+
+        if ( type == DownsampleType.NONE ) {
+            this.toCoverage = null;
+            this.toFraction = null;
+        }
+        else {
+            this.toCoverage = toCoverage;
+            this.toFraction = toFraction;
+        }
+
+        validate();
+    }
+
+    private void validate() {
+        // Can't leave toFraction and toCoverage null unless type is NONE
+        if ( type != DownsampleType.NONE && toFraction == null && toCoverage == null )
+            throw new UserException("Must specify either toFraction or toCoverage when downsampling.");
+
+        // Fraction and coverage cannot both be specified.
+        if ( toFraction != null && toCoverage != null )
+            throw new UserException("Downsampling coverage and fraction are both specified. Please choose only one.");
+
+        // toCoverage must be > 0 when specified
+        if ( toCoverage != null && toCoverage <= 0 ) {
+            throw new UserException("toCoverage must be > 0 when downsampling to coverage");
+        }
+
+        // toFraction must be >= 0.0 and <= 1.0 when specified
+        if ( toFraction != null && (toFraction < 0.0 || toFraction > 1.0) ) {
+            throw new UserException("toFraction must be >= 0.0 and <= 1.0 when downsampling to a fraction of reads");
+        }
+    }
+
+    public String toString() {
+        StringBuilder builder = new StringBuilder("Downsampling Settings: ");
+
+        if ( type == DownsampleType.NONE ) {
+            builder.append("No downsampling");
+        }
+        else {
+            builder.append(String.format("Method: %s, ", type));
+
+            if ( toCoverage != null ) {
+                builder.append(String.format("Target Coverage: %d", toCoverage));
+            }
+            else {
+                builder.append(String.format("Target Fraction: %.2f", toFraction));
+            }
+        }
+
+        return builder.toString();
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/DownsamplingReadsIterator.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/DownsamplingReadsIterator.java
new file mode 100644
index 0000000..0435e7c
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/DownsamplingReadsIterator.java
@@ -0,0 +1,116 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.downsampling;
+
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIterator;
+
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.NoSuchElementException;
+
+
+/**
+ * GATKSAMIterator wrapper around our generic reads downsampler interface. Converts the push-style
+ * downsampler interface to a pull model.
+ *
+ * @author David Roazen
+ */
+public class DownsamplingReadsIterator implements GATKSAMIterator {
+
+    private GATKSAMIterator nestedSAMIterator;
+    private ReadsDownsampler<SAMRecord> downsampler;
+    private Collection<SAMRecord> downsampledReadsCache;
+    private SAMRecord nextRead = null;
+    private Iterator<SAMRecord> downsampledReadsCacheIterator = null;
+
+    /**
+     * @param iter wrapped iterator from which this iterator will pull reads
+     * @param downsampler downsampler through which the reads will be fed
+     */
+    public DownsamplingReadsIterator( GATKSAMIterator iter, ReadsDownsampler<SAMRecord> downsampler ) {
+        nestedSAMIterator = iter;
+        this.downsampler = downsampler;
+
+        advanceToNextRead();
+    }
+
+    public boolean hasNext() {
+        return nextRead != null;
+    }
+
+    public SAMRecord next() {
+        if ( nextRead == null ) {
+            throw new NoSuchElementException("next() called when there are no more items");
+        }
+
+        SAMRecord toReturn = nextRead;
+        advanceToNextRead();
+
+        return toReturn;
+    }
+
+    private void advanceToNextRead() {
+        if ( ! readyToReleaseReads() && ! fillDownsampledReadsCache() ) {
+            nextRead = null;
+        }
+        else {
+            nextRead = downsampledReadsCacheIterator.next();
+        }
+    }
+
+    private boolean readyToReleaseReads() {
+        return downsampledReadsCacheIterator != null && downsampledReadsCacheIterator.hasNext();
+    }
+
+    private boolean fillDownsampledReadsCache() {
+        while ( nestedSAMIterator.hasNext() && ! downsampler.hasFinalizedItems() ) {
+            downsampler.submit(nestedSAMIterator.next());
+        }
+
+        if ( ! nestedSAMIterator.hasNext() ) {
+            downsampler.signalEndOfInput();
+        }
+
+        // use returned collection directly rather than make a copy, for speed
+        downsampledReadsCache = downsampler.consumeFinalizedItems();
+        downsampledReadsCacheIterator = downsampledReadsCache.iterator();
+
+        return downsampledReadsCacheIterator.hasNext();
+    }
+
+    public void remove() {
+        throw new UnsupportedOperationException("Can not remove records from a SAM file via an iterator!");
+    }
+
+    public void close() {
+        nestedSAMIterator.close();
+    }
+
+    public Iterator<SAMRecord> iterator() {
+        return this;
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/DownsamplingUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/DownsamplingUtils.java
new file mode 100644
index 0000000..d1a895b
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/DownsamplingUtils.java
@@ -0,0 +1,107 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.downsampling;
+
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.broadinstitute.gatk.utils.sam.ReadUtils;
+
+import java.util.*;
+
+/**
+ * Utilities for using the downsamplers for common tasks
+ *
+ * User: depristo
+ * Date: 3/6/13
+ * Time: 4:26 PM
+ */
+public class DownsamplingUtils {
+    private DownsamplingUtils() { }
+
+    /**
+     * Level the coverage of the reads in each sample to no more than downsampleTo reads, no reducing
+     * coverage at any read start to less than minReadsPerAlignmentStart
+     *
+     * This algorithm can be used to handle the situation where you have lots of coverage in some interval, and
+     * want to reduce the coverage of the big peak down without removing the many reads at the edge of this
+     * interval that are in fact good
+     *
+     * This algorithm separately operates on the reads for each sample independently.
+     *
+     * @param reads a sorted list of reads
+     * @param downsampleTo the targeted number of reads we want from reads per sample
+     * @param minReadsPerAlignmentStart don't reduce the number of reads starting at a specific alignment start
+     *                                  to below this.  That is, if this value is 2, we'll never reduce the number
+     *                                  of reads starting at a specific start site to less than 2
+     * @return a sorted list of reads
+     */
+    public static List<GATKSAMRecord> levelCoverageByPosition(final List<GATKSAMRecord> reads, final int downsampleTo, final int minReadsPerAlignmentStart) {
+        if ( reads == null ) throw new IllegalArgumentException("reads must not be null");
+
+        final List<GATKSAMRecord> downsampled = new ArrayList<GATKSAMRecord>(reads.size());
+
+        final Map<String, Map<Integer, List<GATKSAMRecord>>> readsBySampleByStart = partitionReadsBySampleAndStart(reads);
+        for ( final Map<Integer, List<GATKSAMRecord>> readsByPosMap : readsBySampleByStart.values() ) {
+            final LevelingDownsampler<List<GATKSAMRecord>, GATKSAMRecord> downsampler = new LevelingDownsampler<List<GATKSAMRecord>, GATKSAMRecord>(downsampleTo, minReadsPerAlignmentStart);
+            downsampler.submit(readsByPosMap.values());
+            downsampler.signalEndOfInput();
+            for ( final List<GATKSAMRecord> downsampledReads : downsampler.consumeFinalizedItems())
+                downsampled.addAll(downsampledReads);
+        }
+
+        return ReadUtils.sortReadsByCoordinate(downsampled);
+    }
+
+    /**
+     * Build the data structure mapping for each sample -> (position -> reads at position)
+     *
+     * Note that the map position -> reads isn't ordered in any meaningful way
+     *
+     * @param reads a list of sorted reads
+     * @return a map containing the list of reads at each start location, for each sample independently
+     */
+    private static Map<String, Map<Integer, List<GATKSAMRecord>>> partitionReadsBySampleAndStart(final List<GATKSAMRecord> reads) {
+        final Map<String, Map<Integer, List<GATKSAMRecord>>> readsBySampleByStart = new LinkedHashMap<String, Map<Integer, List<GATKSAMRecord>>>();
+
+        for ( final GATKSAMRecord read : reads ) {
+            Map<Integer, List<GATKSAMRecord>> readsByStart = readsBySampleByStart.get(read.getReadGroup().getSample());
+
+            if ( readsByStart == null ) {
+                readsByStart = new LinkedHashMap<Integer, List<GATKSAMRecord>>();
+                readsBySampleByStart.put(read.getReadGroup().getSample(), readsByStart);
+            }
+
+            List<GATKSAMRecord> readsAtStart = readsByStart.get(read.getAlignmentStart());
+            if ( readsAtStart == null ) {
+                readsAtStart = new LinkedList<GATKSAMRecord>();
+                readsByStart.put(read.getAlignmentStart(), readsAtStart);
+            }
+
+            readsAtStart.add(read);
+        }
+
+        return readsBySampleByStart;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/FractionalDownsampler.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/FractionalDownsampler.java
new file mode 100644
index 0000000..d480f23
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/FractionalDownsampler.java
@@ -0,0 +1,129 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.downsampling;
+
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Fractional Downsampler: selects a specified fraction of the reads for inclusion.
+ *
+ * Since the selection is done randomly, the actual fraction of reads retained may be slightly
+ * more or less than the requested fraction, depending on the total number of reads submitted.
+ *
+ * @author David Roazen
+ */
+public class FractionalDownsampler<T extends SAMRecord> extends ReadsDownsampler<T> {
+
+    private ArrayList<T> selectedReads;
+
+    private final int cutoffForInclusion;
+
+    private static final int RANDOM_POOL_SIZE = 10000;
+
+    /**
+     * Construct a FractionalDownsampler
+     *
+     * @param fraction Fraction of reads to preserve, between 0.0 (inclusive) and 1.0 (inclusive).
+     *                 Actual number of reads preserved may differ randomly.
+     */
+    public FractionalDownsampler( final double fraction ) {
+        if ( fraction < 0.0 || fraction > 1.0 ) {
+            throw new ReviewedGATKException("Fraction of reads to include must be between 0.0 and 1.0, inclusive");
+        }
+
+        cutoffForInclusion = (int)(fraction * RANDOM_POOL_SIZE);
+        clearItems();
+        resetStats();
+    }
+
+    @Override
+    public void submit( final T newRead ) {
+        if ( Utils.getRandomGenerator().nextInt(10000) < cutoffForInclusion || doNotDiscardItem(newRead) ) {
+            selectedReads.add(newRead);
+        }
+        else {
+            numDiscardedItems++;
+        }
+    }
+
+    @Override
+    public boolean hasFinalizedItems() {
+        return selectedReads.size() > 0;
+    }
+
+    @Override
+    public List<T> consumeFinalizedItems() {
+        // pass by reference rather than make a copy, for speed
+        List<T> downsampledItems = selectedReads;
+        clearItems();
+        return downsampledItems;
+    }
+
+    @Override
+    public boolean hasPendingItems() {
+        return false;
+    }
+
+    @Override
+    public T peekFinalized() {
+        return selectedReads.isEmpty() ? null : selectedReads.get(0);
+    }
+
+    @Override
+    public T peekPending() {
+        return null;
+    }
+
+    @Override
+    public int size() {
+        return selectedReads.size();
+    }
+
+    @Override
+    public void signalEndOfInput() {
+        // NO-OP
+    }
+
+    @Override
+    public void clearItems() {
+        selectedReads = new ArrayList<T>();
+    }
+
+    @Override
+    public boolean requiresCoordinateSortOrder() {
+        return false;
+    }
+
+    @Override
+    public void signalNoMoreReadsBefore( final T read ) {
+        // NO-OP
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/FractionalDownsamplerFactory.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/FractionalDownsamplerFactory.java
new file mode 100644
index 0000000..74037ea
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/FractionalDownsamplerFactory.java
@@ -0,0 +1,46 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.downsampling;
+
+import htsjdk.samtools.SAMRecord;
+
+/**
+ * Factory for creating FractionalDownsamplers on demand
+ *
+ * @author David Roazen
+ */
+public class FractionalDownsamplerFactory<T extends SAMRecord> implements ReadsDownsamplerFactory<T> {
+
+    private double fraction;
+
+    public FractionalDownsamplerFactory( double fraction ) {
+        this.fraction = fraction;
+    }
+
+    public ReadsDownsampler<T> newInstance() {
+        return new FractionalDownsampler<T>(fraction);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/LevelingDownsampler.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/LevelingDownsampler.java
new file mode 100644
index 0000000..89fbea0
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/LevelingDownsampler.java
@@ -0,0 +1,242 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.downsampling;
+
+import org.broadinstitute.gatk.utils.MathUtils;
+
+import java.util.*;
+
+/**
+ * Leveling Downsampler: Given a set of Lists of arbitrary items and a target size, removes items from
+ * the Lists in an even fashion until the total size of all Lists is <= the target size. Leveling
+ * does not occur until all Lists have been submitted and signalEndOfInput() is called.
+ *
+ * The Lists should be LinkedLists for maximum efficiency during item removal, however other
+ * kinds of Lists are also accepted (albeit at a slight performance penalty).
+ *
+ * Since this downsampler extends the Downsampler interface rather than the ReadsDownsampler interface,
+ * the Lists need not contain reads. However this downsampler may not be wrapped within one of the
+ * DownsamplingReadsIterators
+ *
+ * @param <T> the List type representing the stacks to be leveled
+ * @param <E> the type of the elements of each List
+ *
+ * @author David Roazen
+ */
+public class LevelingDownsampler<T extends List<E>, E> extends Downsampler<T> {
+    private final int minElementsPerStack;
+
+    private final int targetSize;
+
+    private List<T> groups;
+
+    private boolean groupsAreFinalized;
+
+    /**
+     * Construct a LevelingDownsampler
+     *
+     * Uses the default minElementsPerStack of 1
+     *
+     * @param targetSize the sum of the sizes of all individual Lists this downsampler is fed may not exceed
+     *                   this value -- if it does, items are removed from Lists evenly until the total size
+     *                   is <= this value
+     */
+    public LevelingDownsampler( final int targetSize ) {
+        this(targetSize, 1);
+    }
+
+    /**
+     * Construct a LevelingDownsampler
+     *
+     * @param targetSize the sum of the sizes of all individual Lists this downsampler is fed may not exceed
+     *                   this value -- if it does, items are removed from Lists evenly until the total size
+     *                   is <= this value
+     * @param minElementsPerStack no stack will be reduced below this size during downsampling.  That is,
+     *                            if a stack has only 3 elements and minElementsPerStack is 3, no matter what
+     *                            we'll not reduce this stack below 3.
+     */
+    public LevelingDownsampler( final int targetSize, final int minElementsPerStack ) {
+        if ( targetSize < 0 ) throw new IllegalArgumentException("targetSize must be >= 0 but got " + targetSize);
+        if ( minElementsPerStack < 0 ) throw new IllegalArgumentException("minElementsPerStack must be >= 0 but got " + minElementsPerStack);
+
+        this.targetSize = targetSize;
+        this.minElementsPerStack = minElementsPerStack;
+        clearItems();
+        resetStats();
+    }
+
+    @Override
+    public void submit( final T item ) {
+        groups.add(item);
+    }
+
+    @Override
+    public void submit( final Collection<T> items ){
+        groups.addAll(items);
+    }
+
+    @Override
+    public boolean hasFinalizedItems() {
+        return groupsAreFinalized && groups.size() > 0;
+    }
+
+    @Override
+    public List<T> consumeFinalizedItems() {
+        if ( ! hasFinalizedItems() ) {
+            return new ArrayList<T>();
+        }
+
+        // pass by reference rather than make a copy, for speed
+        final List<T> toReturn = groups;
+        clearItems();
+        return toReturn;
+    }
+
+    @Override
+    public boolean hasPendingItems() {
+        return ! groupsAreFinalized && groups.size() > 0;
+    }
+
+    @Override
+    public T peekFinalized() {
+        return hasFinalizedItems() ? groups.get(0) : null;
+    }
+
+    @Override
+    public T peekPending() {
+        return hasPendingItems() ? groups.get(0) : null;
+    }
+
+    @Override
+    public int size() {
+        int s = 0;
+        for ( final List<E> l : groups ) {
+            s += l.size();
+        }
+        return s;
+    }
+
+    @Override
+    public void signalEndOfInput() {
+        levelGroups();
+        groupsAreFinalized = true;
+    }
+
+    @Override
+    public void clearItems() {
+        groups = new ArrayList<T>();
+        groupsAreFinalized = false;
+    }
+
+    private void levelGroups() {
+        final int[] groupSizes = new int[groups.size()];
+        int totalSize = 0;
+        int currentGroupIndex = 0;
+
+        for ( final T group : groups ) {
+            groupSizes[currentGroupIndex] = group.size();
+            totalSize += groupSizes[currentGroupIndex];
+            currentGroupIndex++;
+        }
+
+        if ( totalSize <= targetSize ) {
+            return;    // no need to eliminate any items
+        }
+
+        // We will try to remove exactly this many items, however we will refuse to allow any
+        // one group to fall below size 1, and so might end up removing fewer items than this
+        int numItemsToRemove = totalSize - targetSize;
+
+        currentGroupIndex = 0;
+        int numConsecutiveUmodifiableGroups = 0;
+
+        // Continue until we've either removed all the items we wanted to, or we can't
+        // remove any more items without violating the constraint that all groups must
+        // be left with at least one item
+        while ( numItemsToRemove > 0 && numConsecutiveUmodifiableGroups < groupSizes.length ) {
+            if ( groupSizes[currentGroupIndex] > minElementsPerStack ) {
+                groupSizes[currentGroupIndex]--;
+                numItemsToRemove--;
+                numConsecutiveUmodifiableGroups = 0;
+            }
+            else {
+                numConsecutiveUmodifiableGroups++;
+            }
+
+            currentGroupIndex = (currentGroupIndex + 1) % groupSizes.length;
+        }
+
+        // Now we actually go through and reduce each group to its new count as specified in groupSizes
+        currentGroupIndex = 0;
+        for ( final T group : groups ) {
+            downsampleOneGroup(group, groupSizes[currentGroupIndex]);
+            currentGroupIndex++;
+        }
+    }
+
+    private void downsampleOneGroup( final T group, final int numItemsToKeep ) {
+        if ( numItemsToKeep >= group.size() ) {
+            return;
+        }
+
+        final BitSet itemsToKeep = new BitSet(group.size());
+        for ( Integer selectedIndex : MathUtils.sampleIndicesWithoutReplacement(group.size(), numItemsToKeep) ) {
+            itemsToKeep.set(selectedIndex);
+        }
+
+        int currentIndex = 0;
+
+        // If our group is a linked list, we can remove the desired items in a single O(n) pass with an iterator
+        if ( group instanceof LinkedList ) {
+            final Iterator<E> iter = group.iterator();
+            while ( iter.hasNext() ) {
+                final E item = iter.next();
+
+                if ( ! itemsToKeep.get(currentIndex) && ! doNotDiscardItem(item) ) {
+                    iter.remove();
+                    numDiscardedItems++;
+                }
+
+                currentIndex++;
+            }
+        }
+        // If it's not a linked list, it's more efficient to copy the desired items into a new list and back rather
+        // than suffer O(n^2) of item shifting
+        else {
+            final List<E> keptItems = new ArrayList<E>(group.size());
+
+            for ( final E item : group ) {
+                if ( itemsToKeep.get(currentIndex) || doNotDiscardItem(item) ) {
+                    keptItems.add(item);
+                }
+                currentIndex++;
+            }
+            numDiscardedItems += group.size() - keptItems.size();
+            group.clear();
+            group.addAll(keptItems);
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/PassThroughDownsampler.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/PassThroughDownsampler.java
new file mode 100644
index 0000000..e9161b7
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/PassThroughDownsampler.java
@@ -0,0 +1,111 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.downsampling;
+
+import htsjdk.samtools.SAMRecord;
+
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * Pass-Through Downsampler: Implementation of the ReadsDownsampler interface that does no
+ * downsampling whatsoever, and instead simply "passes-through" all the reads it's given.
+ * Useful for situations where you want to disable downsampling, but still need to use
+ * the downsampler interface.
+ *
+ * @author David Roazen
+ */
+public class PassThroughDownsampler<T extends SAMRecord> extends ReadsDownsampler<T> {
+
+    private LinkedList<T> selectedReads;
+
+    public PassThroughDownsampler() {
+        clearItems();
+    }
+
+    @Override
+    public void submit( T newRead ) {
+        // All reads pass-through, no reads get downsampled
+        selectedReads.add(newRead);
+    }
+
+    @Override
+    public boolean hasFinalizedItems() {
+        return ! selectedReads.isEmpty();
+    }
+
+    /**
+     * Note that this list is a linked list and so doesn't support fast random access
+     * @return
+     */
+    @Override
+    public List<T> consumeFinalizedItems() {
+        // pass by reference rather than make a copy, for speed
+        final List<T> downsampledItems = selectedReads;
+        clearItems();
+        return downsampledItems;
+    }
+
+    @Override
+    public boolean hasPendingItems() {
+        return false;
+    }
+
+    @Override
+    public T peekFinalized() {
+        return selectedReads.isEmpty() ? null : selectedReads.getFirst();
+    }
+
+    @Override
+    public T peekPending() {
+        return null;
+    }
+
+    @Override
+    public int size() {
+        return selectedReads.size();
+    }
+
+    @Override
+    public void signalEndOfInput() {
+        // NO-OP
+    }
+
+    @Override
+    public void clearItems() {
+        selectedReads = new LinkedList<T>();
+    }
+
+    @Override
+    public boolean requiresCoordinateSortOrder() {
+        return false;
+    }
+
+    @Override
+    public void signalNoMoreReadsBefore( T read ) {
+        // NO-OP
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/PerSampleDownsamplingReadsIterator.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/PerSampleDownsamplingReadsIterator.java
new file mode 100644
index 0000000..fc7eb77
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/PerSampleDownsamplingReadsIterator.java
@@ -0,0 +1,207 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.downsampling;
+
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.SAMRecordComparator;
+import htsjdk.samtools.SAMRecordCoordinateComparator;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIterator;
+
+import java.util.*;
+
+
+/**
+ * GATKSAMIterator wrapper around our generic reads downsampler interface
+ * that downsamples reads for each sample independently, and then re-assembles
+ * the reads back into a single merged stream.
+ *
+ * @author David Roazen
+ */
+public class PerSampleDownsamplingReadsIterator implements GATKSAMIterator {
+
+    private GATKSAMIterator nestedSAMIterator;
+    private ReadsDownsamplerFactory<SAMRecord> downsamplerFactory;
+    private Map<String, ReadsDownsampler<SAMRecord>> perSampleDownsamplers;
+    private PriorityQueue<SAMRecord> orderedDownsampledReadsCache;
+    private SAMRecord nextRead = null;
+    private SAMRecordComparator readComparator = new SAMRecordCoordinateComparator();
+    private SAMRecord earliestPendingRead = null;
+    private ReadsDownsampler<SAMRecord> earliestPendingDownsampler = null;
+
+    // Initial size of our cache of finalized reads
+    private static final int DOWNSAMPLED_READS_INITIAL_CACHE_SIZE = 4096;
+
+    // The number of positional changes that can occur in the read stream before all downsamplers
+    // should be informed of the current position (guards against samples with relatively sparse reads
+    // getting stuck in a pending state):
+    private static final int DOWNSAMPLER_POSITIONAL_UPDATE_INTERVAL = 3;   // TODO: experiment with this value
+
+    /**
+     * @param iter wrapped iterator from which this iterator will pull reads
+     * @param downsamplerFactory factory used to create new downsamplers as needed
+     */
+    public PerSampleDownsamplingReadsIterator( GATKSAMIterator iter, ReadsDownsamplerFactory<SAMRecord> downsamplerFactory ) {
+        nestedSAMIterator = iter;
+        this.downsamplerFactory = downsamplerFactory;
+        perSampleDownsamplers = new HashMap<String, ReadsDownsampler<SAMRecord>>();
+        orderedDownsampledReadsCache = new PriorityQueue<SAMRecord>(DOWNSAMPLED_READS_INITIAL_CACHE_SIZE, readComparator);
+
+        advanceToNextRead();
+    }
+
+    public boolean hasNext() {
+        return nextRead != null;
+    }
+
+    public SAMRecord next() {
+        if ( nextRead == null ) {
+            throw new NoSuchElementException("next() called when there are no more items");
+        }
+
+        SAMRecord toReturn = nextRead;
+        advanceToNextRead();
+
+        return toReturn;
+    }
+
+    private void advanceToNextRead() {
+        if ( ! readyToReleaseReads() && ! fillDownsampledReadsCache() ) {
+            nextRead = null;
+        }
+        else {
+            nextRead = orderedDownsampledReadsCache.poll();
+        }
+    }
+
+    private boolean readyToReleaseReads() {
+        if ( orderedDownsampledReadsCache.isEmpty() ) {
+            return false;
+        }
+
+        return earliestPendingRead == null ||
+               readComparator.compare(orderedDownsampledReadsCache.peek(), earliestPendingRead) <= 0;
+    }
+
+    private boolean fillDownsampledReadsCache() {
+        SAMRecord prevRead = null;
+        int numPositionalChanges = 0;
+
+        // Continue submitting reads to the per-sample downsamplers until the read at the top of the priority queue
+        // can be released without violating global sort order
+        while ( nestedSAMIterator.hasNext() && ! readyToReleaseReads() ) {
+            SAMRecord read = nestedSAMIterator.next();
+            String sampleName = read.getReadGroup() != null ? read.getReadGroup().getSample() : null;
+
+            ReadsDownsampler<SAMRecord> thisSampleDownsampler = perSampleDownsamplers.get(sampleName);
+            if ( thisSampleDownsampler == null ) {
+                thisSampleDownsampler = downsamplerFactory.newInstance();
+                perSampleDownsamplers.put(sampleName, thisSampleDownsampler);
+            }
+
+            thisSampleDownsampler.submit(read);
+            processFinalizedAndPendingItems(thisSampleDownsampler);
+
+            if ( prevRead != null && prevRead.getAlignmentStart() != read.getAlignmentStart() ) {
+                numPositionalChanges++;
+            }
+
+            // Periodically inform all downsamplers of the current position in the read stream. This is
+            // to prevent downsamplers for samples with sparser reads than others from getting stuck too
+            // long in a pending state.
+            if ( numPositionalChanges > 0 && numPositionalChanges % DOWNSAMPLER_POSITIONAL_UPDATE_INTERVAL == 0 ) {
+                for ( ReadsDownsampler<SAMRecord> perSampleDownsampler : perSampleDownsamplers.values() ) {
+                    perSampleDownsampler.signalNoMoreReadsBefore(read);
+                    processFinalizedAndPendingItems(perSampleDownsampler);
+                }
+            }
+
+            prevRead = read;
+        }
+
+        if ( ! nestedSAMIterator.hasNext() ) {
+            for ( ReadsDownsampler<SAMRecord> perSampleDownsampler : perSampleDownsamplers.values() ) {
+                perSampleDownsampler.signalEndOfInput();
+                if ( perSampleDownsampler.hasFinalizedItems() ) {
+                    orderedDownsampledReadsCache.addAll(perSampleDownsampler.consumeFinalizedItems());
+                }
+            }
+            earliestPendingRead = null;
+            earliestPendingDownsampler = null;
+        }
+
+        return readyToReleaseReads();
+    }
+
+    private void updateEarliestPendingRead( ReadsDownsampler<SAMRecord> currentDownsampler ) {
+        // If there is no recorded earliest pending read and this downsampler has pending items,
+        // then this downsampler's first pending item becomes the new earliest pending read:
+        if ( earliestPendingRead == null && currentDownsampler.hasPendingItems() ) {
+            earliestPendingRead = currentDownsampler.peekPending();
+            earliestPendingDownsampler = currentDownsampler;
+        }
+        // In all other cases, we only need to update the earliest pending read when the downsampler
+        // associated with it experiences a change in its pending reads, since by assuming a sorted
+        // read stream we're assured that each downsampler's earliest pending read will only increase
+        // in genomic position over time.
+        //
+        // TODO: An occasional O(samples) linear search seems like a better option than keeping the downsamplers
+        // TODO: sorted by earliest pending read, which would cost at least O(total_reads * (samples + log(samples))),
+        // TODO: but need to verify this empirically.
+        else if ( currentDownsampler == earliestPendingDownsampler &&
+                  (! currentDownsampler.hasPendingItems() || readComparator.compare(currentDownsampler.peekPending(), earliestPendingRead) != 0) ) {
+
+            earliestPendingRead = null;
+            earliestPendingDownsampler = null;
+            for ( ReadsDownsampler<SAMRecord> perSampleDownsampler : perSampleDownsamplers.values() ) {
+                if ( perSampleDownsampler.hasPendingItems() &&
+                     (earliestPendingRead == null || readComparator.compare(perSampleDownsampler.peekPending(), earliestPendingRead) < 0) ) {
+
+                    earliestPendingRead = perSampleDownsampler.peekPending();
+                    earliestPendingDownsampler = perSampleDownsampler;
+                }
+            }
+        }
+    }
+
+    private void processFinalizedAndPendingItems( ReadsDownsampler<SAMRecord> currentDownsampler ) {
+        if ( currentDownsampler.hasFinalizedItems() ) {
+            orderedDownsampledReadsCache.addAll(currentDownsampler.consumeFinalizedItems());
+        }
+        updateEarliestPendingRead(currentDownsampler);
+    }
+
+    public void remove() {
+        throw new UnsupportedOperationException("Can not remove records from a SAM file via an iterator!");
+    }
+
+    public void close() {
+        nestedSAMIterator.close();
+    }
+
+    public Iterator<SAMRecord> iterator() {
+        return this;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/ReadsDownsampler.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/ReadsDownsampler.java
new file mode 100644
index 0000000..455269b
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/ReadsDownsampler.java
@@ -0,0 +1,56 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.downsampling;
+
+import htsjdk.samtools.SAMRecord;
+
+/**
+ * An extension of the basic downsampler API with reads-specific operations
+ *
+ * @author David Roazen
+ */
+public abstract class ReadsDownsampler<T extends SAMRecord> extends Downsampler<T> {
+
+    /**
+     * Does this downsampler require that reads be fed to it in coordinate order?
+     *
+     * @return true if reads must be submitted to this downsampler in coordinate order, otherwise false
+     */
+    public abstract boolean requiresCoordinateSortOrder();
+
+    /**
+     * Tell this downsampler that no more reads located before the provided read (according to
+     * the sort order of the read stream) will be fed to it.
+     *
+     * Allows position-aware downsamplers to finalize pending reads earlier than they would
+     * otherwise be able to, particularly when doing per-sample downsampling and reads for
+     * certain samples are sparser than average.
+     *
+     * @param read the downsampler will assume that no reads located before this read will ever
+     *             be submitted to it in the future
+     */
+    public abstract void signalNoMoreReadsBefore( final T read );
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/ReadsDownsamplerFactory.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/ReadsDownsamplerFactory.java
new file mode 100644
index 0000000..ea7e6b0
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/ReadsDownsamplerFactory.java
@@ -0,0 +1,38 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.downsampling;
+
+import htsjdk.samtools.SAMRecord;
+
+/**
+ * A ReadsDownsamplerFactory can be used to create an arbitrary number of instances of a particular
+ * downsampler, all sharing the same construction parameters.
+ *
+ * @author David Roazen
+ */
+public interface ReadsDownsamplerFactory<T extends SAMRecord> {
+    public ReadsDownsampler<T> newInstance();
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/ReservoirDownsampler.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/ReservoirDownsampler.java
new file mode 100644
index 0000000..b7bf2be
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/ReservoirDownsampler.java
@@ -0,0 +1,219 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.downsampling;
+
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.util.*;
+
+/**
+ * Reservoir Downsampler: Selects n reads out of a stream whose size is not known in advance, with
+ * every read in the stream having an equal chance of being selected for inclusion.
+ *
+ * An implementation of "Algorithm R" from the paper "Random Sampling with a Reservoir" (Jeffrey Scott Vitter, 1985)
+ *
+ * @author David Roazen
+ */
+public class ReservoirDownsampler<T extends SAMRecord> extends ReadsDownsampler<T> {
+
+    /**
+     * size of our reservoir -- ie., the maximum number of reads from the stream that will be retained
+     * (not including any undiscardable items)
+     */
+    private final int targetSampleSize;
+
+    /**
+     * if true, this downsampler will be optimized for the case
+     * where most of the time we won't fill up anything like the
+     * targetSampleSize elements.  If this is false, we will allocate
+     * internal buffers to targetSampleSize initially, which minimizes
+     * the cost of allocation if we often use targetSampleSize or more
+     * elements.
+     */
+    private final boolean expectFewOverflows;
+
+    /**
+     * At times this can be a linked list or an array list, depending on how we're accessing the
+     * data and whether or not we're expecting few overflows
+     */
+    private List<T> reservoir;
+
+    /**
+     * Certain items (eg., reduced reads) cannot be discarded at all during downsampling. We store
+     * these items separately so as not to impact the fair selection of items for inclusion in the
+     * reservoir. These items are returned (and cleared) along with any items in the reservoir in
+     * calls to consumeFinalizedItems().
+     */
+    private List<T> undiscardableItems;
+
+    /**
+     * Are we currently using a linked list for the reservoir?
+     */
+    private boolean isLinkedList;
+
+    /**
+     * Count of the number of reads seen that were actually eligible for discarding. Used by the reservoir downsampling
+     * algorithm to ensure that all discardable reads have an equal chance of making it into the reservoir.
+     */
+    private int totalDiscardableReadsSeen;
+
+
+    /**
+     * Construct a ReservoirDownsampler
+     *
+     * @param targetSampleSize Size of the reservoir used by this downsampler. Number of items retained
+     *                         after downsampling will be min(totalDiscardableReads, targetSampleSize) + any
+     *                         undiscardable reads (eg., reduced reads).
+     *
+     * @param expectFewOverflows if true, this downsampler will be optimized for the case
+     *                           where most of the time we won't fill up anything like the
+     *                           targetSampleSize elements.  If this is false, we will allocate
+     *                           internal buffers to targetSampleSize initially, which minimizes
+     *                           the cost of allocation if we often use targetSampleSize or more
+     *                           elements.
+     */
+    public ReservoirDownsampler ( final int targetSampleSize, final boolean expectFewOverflows ) {
+        if ( targetSampleSize <= 0 ) {
+            throw new ReviewedGATKException("Cannot do reservoir downsampling with a sample size <= 0");
+        }
+
+        this.targetSampleSize = targetSampleSize;
+        this.expectFewOverflows = expectFewOverflows;
+        clearItems();
+        resetStats();
+    }
+
+    /**
+     * Construct a ReservoirDownsampler
+     *
+     * @param targetSampleSize Size of the reservoir used by this downsampler. Number of items retained
+     *                         after downsampling will be min(totalReads, targetSampleSize)
+     */
+    public ReservoirDownsampler ( final int targetSampleSize ) {
+        this(targetSampleSize, false);
+    }
+
+    @Override
+    public void submit ( final T newRead ) {
+        if ( doNotDiscardItem(newRead) ) {
+            undiscardableItems.add(newRead);
+            return;
+        }
+
+        // Only count reads that are actually eligible for discarding for the purposes of the reservoir downsampling algorithm
+        totalDiscardableReadsSeen++;
+
+        if ( totalDiscardableReadsSeen <= targetSampleSize ) {
+            reservoir.add(newRead);
+        }
+        else {
+            if ( isLinkedList ) {
+                reservoir = new ArrayList<T>(reservoir);
+                isLinkedList = false;
+            }
+
+            final int randomSlot = Utils.getRandomGenerator().nextInt(totalDiscardableReadsSeen);
+            if ( randomSlot < targetSampleSize ) {
+                reservoir.set(randomSlot, newRead);
+            }
+            numDiscardedItems++;
+        }
+    }
+
+    @Override
+    public boolean hasFinalizedItems() {
+        return ! reservoir.isEmpty() || ! undiscardableItems.isEmpty();
+    }
+
+    @Override
+    public List<T> consumeFinalizedItems() {
+        if ( ! hasFinalizedItems() ) {
+            // if there's nothing here, don't bother allocating a new list
+            return Collections.emptyList();
+        } else {
+            // pass reservoir by reference rather than make a copy, for speed
+            final List<T> downsampledItems = reservoir;
+            downsampledItems.addAll(undiscardableItems);
+            clearItems();
+            return downsampledItems;
+        }
+    }
+
+    @Override
+    public boolean hasPendingItems() {
+        return false;
+    }
+
+    @Override
+    public T peekFinalized() {
+        return ! reservoir.isEmpty() ? reservoir.get(0) : (! undiscardableItems.isEmpty() ? undiscardableItems.get(0) : null);
+    }
+
+    @Override
+    public T peekPending() {
+        return null;
+    }
+
+    @Override
+    public int size() {
+        return reservoir.size() + undiscardableItems.size();
+    }
+
+    @Override
+    public void signalEndOfInput() {
+        // NO-OP
+    }
+
+    /**
+     * Clear the data structures used to hold information
+     */
+    @Override
+    public void clearItems() {
+        // if we aren't expecting many overflows, allocate a linked list not an arraylist
+        reservoir = expectFewOverflows ? new LinkedList<T>() : new ArrayList<T>(targetSampleSize);
+
+        // there's no possibility of overflow with the undiscardable items, so we always use a linked list for them
+        undiscardableItems = new LinkedList<>();
+
+        // it's a linked list if we allocate one
+        isLinkedList = expectFewOverflows;
+
+        // an internal stat used by the downsampling process, so not cleared by resetStats() below
+        totalDiscardableReadsSeen = 0;
+    }
+
+    @Override
+    public boolean requiresCoordinateSortOrder() {
+        return false;
+    }
+
+    @Override
+    public void signalNoMoreReadsBefore( T read ) {
+        // NO-OP
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/ReservoirDownsamplerFactory.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/ReservoirDownsamplerFactory.java
new file mode 100644
index 0000000..50e89c2
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/ReservoirDownsamplerFactory.java
@@ -0,0 +1,46 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.downsampling;
+
+import htsjdk.samtools.SAMRecord;
+
+/**
+ * Factory for creating ReservoirDownsamplers on demand
+ *
+ * @author David Roazen
+ */
+public class ReservoirDownsamplerFactory<T extends SAMRecord> implements ReadsDownsamplerFactory<T> {
+
+    private int targetSampleSize;
+
+    public ReservoirDownsamplerFactory( int targetSampleSize ) {
+        this.targetSampleSize = targetSampleSize;
+    }
+
+    public ReadsDownsampler<T> newInstance() {
+        return new ReservoirDownsampler<T>(targetSampleSize);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/SimplePositionalDownsampler.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/SimplePositionalDownsampler.java
new file mode 100644
index 0000000..8a3da8a
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/SimplePositionalDownsampler.java
@@ -0,0 +1,171 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.downsampling;
+
+import htsjdk.samtools.SAMRecord;
+
+import java.util.*;
+
+/**
+ * Simple Positional Downsampler: Downsample each stack of reads at each alignment start to a size <= a target coverage
+ * using a Reservoir downsampler. Stores only O(target coverage) reads in memory at any given time.
+ *
+ * @author David Roazen
+ */
+public class SimplePositionalDownsampler<T extends SAMRecord> extends ReadsDownsampler<T> {
+
+    private final int targetCoverage;
+
+    private final ReservoirDownsampler<T> reservoir;
+
+    private int currentContigIndex;
+
+    private int currentAlignmentStart;
+
+    private boolean positionEstablished;
+
+    private boolean unmappedReadsReached;
+
+    private ArrayList<T> finalizedReads;
+
+
+    /**
+     * Construct a SimplePositionalDownsampler
+     *
+     * @param targetCoverage Maximum number of reads that may share any given alignment start position
+     */
+    public SimplePositionalDownsampler( final int targetCoverage ) {
+        this.targetCoverage = targetCoverage;
+        reservoir = new ReservoirDownsampler<T>(targetCoverage);
+        finalizedReads = new ArrayList<T>();
+        clearItems();
+        resetStats();
+    }
+
+    @Override
+    public void submit( final T newRead ) {
+        updatePositionalState(newRead);
+
+        if ( unmappedReadsReached ) {    // don't downsample the unmapped reads at the end of the stream
+            finalizedReads.add(newRead);
+        }
+        else {
+            final int reservoirPreviouslyDiscardedItems = reservoir.getNumberOfDiscardedItems();
+            // our reservoir downsampler will call doNotDiscardItem() for us to exclude items from elimination as appropriate
+            reservoir.submit(newRead);
+            numDiscardedItems += reservoir.getNumberOfDiscardedItems() - reservoirPreviouslyDiscardedItems;
+        }
+    }
+
+    @Override
+    public boolean hasFinalizedItems() {
+        return finalizedReads.size() > 0;
+    }
+
+    @Override
+    public List<T> consumeFinalizedItems() {
+        // pass by reference rather than make a copy, for speed
+        final List<T> toReturn = finalizedReads;
+        finalizedReads = new ArrayList<T>();
+        return toReturn;
+    }
+
+    @Override
+    public boolean hasPendingItems() {
+        return reservoir.hasFinalizedItems();
+    }
+
+    @Override
+    public T peekFinalized() {
+        return finalizedReads.isEmpty() ? null : finalizedReads.get(0);
+    }
+
+    @Override
+    public T peekPending() {
+        return reservoir.peekFinalized();
+    }
+
+    @Override
+    public int size() {
+        return finalizedReads.size() + reservoir.size();
+    }
+
+    @Override
+    public void signalEndOfInput() {
+        finalizeReservoir();
+    }
+
+    @Override
+    public void clearItems() {
+        reservoir.clearItems();
+        reservoir.resetStats();
+        finalizedReads.clear();
+        positionEstablished = false;
+        unmappedReadsReached = false;
+    }
+
+    @Override
+    public boolean requiresCoordinateSortOrder() {
+        return true;
+    }
+
+    @Override
+    public void signalNoMoreReadsBefore( final T read ) {
+        updatePositionalState(read);
+    }
+
+    private void updatePositionalState( final T newRead ) {
+        if ( readIsPastCurrentPosition(newRead) ) {
+            if ( reservoir.hasFinalizedItems() ) {
+                finalizeReservoir();
+            }
+
+            setCurrentPosition(newRead);
+
+            if ( newRead.getReadUnmappedFlag() ) {
+                unmappedReadsReached = true;
+            }
+        }
+    }
+
+    private void setCurrentPosition( final T read ) {
+        currentContigIndex = read.getReferenceIndex();
+        currentAlignmentStart = read.getAlignmentStart();
+        positionEstablished = true;
+    }
+
+    private boolean readIsPastCurrentPosition( final T read ) {
+        return ! positionEstablished ||
+               read.getReferenceIndex() > currentContigIndex ||
+               read.getAlignmentStart() > currentAlignmentStart ||
+               (read.getReadUnmappedFlag() && ! unmappedReadsReached);
+    }
+
+    private void finalizeReservoir() {
+        finalizedReads.addAll(reservoir.consumeFinalizedItems());
+        reservoir.resetStats();
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/SimplePositionalDownsamplerFactory.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/SimplePositionalDownsamplerFactory.java
new file mode 100644
index 0000000..bac785a
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/downsampling/SimplePositionalDownsamplerFactory.java
@@ -0,0 +1,46 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.downsampling;
+
+import htsjdk.samtools.SAMRecord;
+
+/**
+ * Factory for creating SimplePositionalDownsamplers on demand
+ *
+ * @author David Roazen
+ */
+public class SimplePositionalDownsamplerFactory<T extends SAMRecord> implements ReadsDownsamplerFactory<T> {
+
+    private int targetCoverage;
+
+    public SimplePositionalDownsamplerFactory( int targetCoverage ) {
+        this.targetCoverage = targetCoverage;
+    }
+
+    public ReadsDownsampler<T> newInstance() {
+        return new SimplePositionalDownsampler<T>(targetCoverage);
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/duplicates/DupUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/duplicates/DupUtils.java
new file mode 100644
index 0000000..9ce7fc4
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/duplicates/DupUtils.java
@@ -0,0 +1,142 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.duplicates;
+
+import org.broadinstitute.gatk.utils.BaseUtils;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.QualityUtils;
+import org.broadinstitute.gatk.utils.collections.Pair;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.pileup.PileupElement;
+import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
+import org.broadinstitute.gatk.utils.pileup.ReadBackedPileupImpl;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+
+import java.util.Arrays;
+import java.util.List;
+
+public class DupUtils {
+    private static GATKSAMRecord tmpCopyRead(GATKSAMRecord read) {
+        return (GATKSAMRecord)read.clone();
+    }
+
+    public static GATKSAMRecord combineDuplicates(GenomeLocParser genomeLocParser,List<GATKSAMRecord> duplicates, int maxQScore) {
+        if ( duplicates.size() == 0 )
+            return null;
+
+        // make the combined read by copying the first read and setting the
+        // bases and quals to new arrays
+        GATKSAMRecord comb = tmpCopyRead(duplicates.get(0));
+        //GATKSAMRecord comb = tmpCopyRead(duplicates.get(0));
+        comb.setDuplicateReadFlag(false);
+        int readLen = comb.getReadBases().length;
+        byte[] bases = new byte[readLen];
+        byte[] quals = new byte[readLen];
+
+        for ( int i = 0; i < readLen; i++ ) {
+            //System.out.printf("I is %d%n", i);
+            //for ( GATKSAMRecord read : duplicates ) {
+            //    System.out.printf("dup base %c %d%n", (char)read.getReadBases()[i], read.getBaseQualities()[i]);
+            //}
+            Pair<Byte, Byte> baseAndQual = combineBaseProbs(genomeLocParser,duplicates, i, maxQScore);
+            bases[i] = baseAndQual.getFirst();
+            quals[i] = baseAndQual.getSecond();            
+        }
+
+
+        comb.setBaseQualities(quals);
+        comb.setReadBases(bases);
+
+        return comb;
+    }
+
+    private static Pair<Byte, Byte> baseProbs2BaseAndQual(double[] probs, int maxQScore) {
+        byte bestBase = 0;
+        double bestProb = Double.NEGATIVE_INFINITY;
+        double sumProbs = 0;
+
+        for ( int i = 0; i < 4; i++ ) {
+            sumProbs += Math.pow(10, probs[i]);
+            //System.out.printf("Bestprob is %f > %f%n", bestProb, probs[i]);
+            if ( probs[i] > bestProb ) {
+                bestBase = BaseUtils.baseIndexToSimpleBase(i);
+                bestProb = probs[i];
+            }
+        }
+
+        Arrays.sort(probs);
+        double normalizedP = Math.pow(10, bestProb) / sumProbs;
+        byte qual = QualityUtils.trueProbToQual(normalizedP, maxQScore);
+//        if ( false ) {
+//            System.out.printf("Best base is %s %.8f%n", bestBase, bestProb);
+//            System.out.printf("2nd  base is %.8f%n", probs[1]);
+//            System.out.printf("normalized P %.8f%n", normalizedP);
+//            System.out.printf("normalized Q %.8f%n", 1 - normalizedP);
+//            System.out.printf("max Q        %2d%n", maxQScore);
+//            System.out.printf("eps          %.8f%n", eps);
+//            System.out.printf("encoded    Q %2d%n", qual);
+//        }
+
+        return new Pair<Byte, Byte>(bestBase, qual);
+    }
+
+    private static void print4BaseQuals(String header, double[] probs) {
+        System.out.printf("%s log10(P(b)) is ", header);
+        for ( int i = 0; i < 4; i++ ) {
+            System.out.printf("%c=%+.8f ", (char)BaseUtils.baseIndexToSimpleBase(i), probs[i]);
+        }
+        System.out.printf("%n");
+    }
+
+    private static Pair<Byte, Byte> combineBaseProbs(GenomeLocParser genomeLocParser,List<GATKSAMRecord> duplicates, int readOffset, int maxQScore) {
+        GenomeLoc loc = genomeLocParser.createGenomeLoc(duplicates.get(0));
+        ReadBackedPileup pileup = new ReadBackedPileupImpl(loc, duplicates, readOffset);
+
+        final boolean debug = false;
+
+        // calculate base probs
+        double[] qualSums = {0.0, 0.0, 0.0, 0.0};
+        if ( debug ) print4BaseQuals("start", qualSums);
+
+        for (PileupElement e : pileup ) {
+            int baseIndex = e.getBaseIndex();
+            byte qual = e.getQual();
+            double pqual = QualityUtils.qualToProb(qual);
+            for ( int j = 0; j < 4; j++) {
+                qualSums[j] += Math.log10(j == baseIndex ?  pqual : (1 - pqual)/3);
+            }
+
+            if ( debug ) print4BaseQuals(String.format("%c Q%2d", e.getBase(), qual), qualSums);
+        }
+        if ( debug ) print4BaseQuals("final", qualSums);
+
+        Pair<Byte, Byte> combined = baseProbs2BaseAndQual(qualSums, maxQScore);
+        if ( debug ) System.out.printf("%s => %c Q%s%n", pileup.getPileupString('N'), (char)(byte)combined.getFirst(), combined.getSecond());
+
+        return combined;
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/duplicates/DuplicateComp.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/duplicates/DuplicateComp.java
new file mode 100644
index 0000000..7aef373
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/duplicates/DuplicateComp.java
@@ -0,0 +1,66 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.duplicates;
+
+public class DuplicateComp {
+    public int getQLarger() {
+        return qLarger;
+    }
+
+    public void setQLarger(int qLarger) {
+        this.qLarger = qLarger;
+    }
+
+    public int getQSmaller() {
+        return qSmaller;
+    }
+
+    public void setQSmaller(int qSmaller) {
+        this.qSmaller = qSmaller;
+    }
+
+    public boolean isMismatchP() {
+        return mismatchP;
+    }
+
+    public void setMismatchP(boolean mismatchP) {
+        this.mismatchP = mismatchP;
+    }
+
+    private int qLarger;
+    private int qSmaller;
+    private boolean mismatchP;
+
+    public DuplicateComp(int qLarger, int qSmaller, boolean misMatchP) {
+        this.qLarger = qLarger;
+        this.qSmaller = qSmaller;
+        this.mismatchP = misMatchP;
+    }
+
+    public String toString() {
+        return String.format("%d %d %b", qLarger, qSmaller, mismatchP);
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/exceptions/DynamicClassResolutionException.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/exceptions/DynamicClassResolutionException.java
new file mode 100644
index 0000000..e39fd55
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/exceptions/DynamicClassResolutionException.java
@@ -0,0 +1,54 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.exceptions;
+
+import java.lang.reflect.InvocationTargetException;
+
+/**
+ * Class for handling common failures of dynamic class resolution
+ */
+public class DynamicClassResolutionException extends UserException {
+    public DynamicClassResolutionException(Class c, Exception ex) {
+        super(String.format("Could not create module %s because %s caused by exception %s",
+                c.getSimpleName(), moreInfo(ex), ex.getMessage()));
+    }
+
+    private static String moreInfo(Exception ex) {
+        try {
+            throw ex;
+        } catch (InstantiationException e) {
+            return "BUG: cannot instantiate class: must be concrete class";
+        } catch (NoSuchMethodException e) {
+            return "BUG: Cannot find expected constructor for class";
+        } catch (IllegalAccessException e) {
+            return "Cannot instantiate class (Illegal Access)";
+        } catch (InvocationTargetException e) {
+            return "Cannot instantiate class (Invocation failure)";
+        } catch ( Exception e ) {
+            return String.format("an exception of type %s occurred",e.getClass().getSimpleName());
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/exceptions/GATKException.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/exceptions/GATKException.java
index 0eb0941..3584b79 100644
--- a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/exceptions/GATKException.java
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/exceptions/GATKException.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/exceptions/ReviewedGATKException.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/exceptions/ReviewedGATKException.java
index 56dfc69..d7595d0 100644
--- a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/exceptions/ReviewedGATKException.java
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/exceptions/ReviewedGATKException.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/exceptions/UserException.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/exceptions/UserException.java
new file mode 100644
index 0000000..ceb1d0f
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/exceptions/UserException.java
@@ -0,0 +1,490 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.exceptions;
+
+import htsjdk.samtools.CigarOperator;
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.SAMSequenceDictionary;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature;
+import org.broadinstitute.gatk.utils.help.HelpConstants;
+import org.broadinstitute.gatk.utils.sam.ReadUtils;
+import org.broadinstitute.gatk.utils.variant.GATKVCFIndexType;
+import htsjdk.variant.variantcontext.VariantContext;
+
+import java.io.File;
+
+/**
+ * Represents the common user errors detected by GATK
+ *
+ * Root class for all GATK user errors, as well as the container for errors themselves
+ */
+ at DocumentedGATKFeature(
+        groupName = HelpConstants.DOCS_CAT_USRERR,
+        summary = "Errors caused by incorrect user behavior, such as bad files, bad arguments, etc." )
+public class UserException extends ReviewedGATKException {
+    /**
+     * The URL where people can get help messages.  Printed when an error occurs
+     */
+    public static final String PHONE_HOME_DOCS_URL = "http://gatkforums.broadinstitute.org/discussion/1250/what-is-phone-home-and-how-does-it-affect-me#latest";
+
+    public UserException(String msg) { super(msg); }
+    public UserException(String msg, Throwable e) { super(msg, e); }
+    private UserException(Throwable e) { super("", e); } // cannot be called, private access
+
+    protected static String getMessage(Throwable t) {
+        String message = t.getMessage();
+        return message != null ? message : t.getClass().getName();
+    }
+
+    public static class CommandLineException extends UserException {
+        public CommandLineException(String message) {
+            super(String.format("Invalid command line: %s", message));
+        }
+    }
+
+    public static class MalformedReadFilterException extends CommandLineException {
+        public MalformedReadFilterException(String message) {
+            super(String.format("Malformed read filter: %s",message));
+        }
+    }
+
+    public static class IncompatibleReadFiltersException extends CommandLineException {
+        public IncompatibleReadFiltersException(final String filter1, final String filter2) {
+            super(String.format("Two read filters are enabled that are incompatible and cannot be used simultaneously: %s and %s", filter1, filter2));
+        }
+    }
+
+    public static class MalformedWalkerArgumentsException extends CommandLineException {
+        public MalformedWalkerArgumentsException(String message) {
+            super(String.format("Malformed walker argument: %s",message));
+        }
+    }
+
+    public static class UnsupportedCigarOperatorException extends UserException {
+        public UnsupportedCigarOperatorException(final CigarOperator co, final SAMRecord read, final String message) {
+            super(String.format(
+                "Unsupported CIGAR operator %s in read %s at %s:%d. %s",
+                co,
+                read.getReadName(),
+                read.getReferenceName(),
+                read.getAlignmentStart(),
+                message));
+        }
+    }
+
+
+    public static class MalformedGenomeLoc extends UserException {
+        public MalformedGenomeLoc(String message, GenomeLoc loc) {
+            super(String.format("Badly formed genome location: %s: %s", message, loc));
+        }
+
+        public MalformedGenomeLoc(String message) {
+            super(String.format("Badly formed genome location: %s", message));
+        }
+    }
+
+    public static class BadInput extends UserException {
+        public BadInput(String message) {
+            super(String.format("Bad input: %s", message));
+        }
+    }
+
+    // todo -- fix up exception cause passing
+    public static class MissingArgument extends CommandLineException {
+        public MissingArgument(String arg, String message) {
+            super(String.format("Argument %s was missing: %s", arg, message));
+        }
+    }
+
+    public static class BadArgumentValue extends CommandLineException {
+        public BadArgumentValue(String arg, String message) {
+            super(String.format("Argument %s has a bad value: %s", arg, message));
+        }
+    }
+
+    public static class UnknownTribbleType extends CommandLineException {
+        public UnknownTribbleType(String type, String message) {
+            super(String.format("Unknown variant input file type %s: %s", type, message));
+        }
+    }
+
+
+    public static class BadTmpDir extends UserException {
+        public BadTmpDir(String message) {
+            super(String.format("An error occurred while working with the tmp directory %s. You can specify -Djava.io.tmpdir=X on the command line (before the -jar argument) where X is a directory path, to use a more appropriate temporary directory. The exact error was %s", System.getProperties().get("java.io.tmpdir"), message));
+        }
+    }
+
+    public static class TooManyOpenFiles extends UserException {
+        public TooManyOpenFiles() {
+            super(String.format("An error occurred because there were too many files open concurrently; your system's open file handle limit is probably too small.  See the unix ulimit command to adjust this limit or ask your system administrator for help."));
+        }
+    }
+
+    public static class LocalParallelizationProblem extends UserException {
+        public LocalParallelizationProblem(final File file) {
+            super(String.format("An error occurred because temporary file %s could not be found while running the GATK with more than one thread. Possible causes for this problem include: your system's open file handle limit is too small, your output or temp directories do not have sufficient space, or your system experienced a temporary instability. Your system administrator can help you resolve these problems.", file.getAbsolutePath()));
+        }
+    }
+
+    public static class NotEnoughMemory extends UserException {
+        public NotEnoughMemory() {
+            super(String.format("An error occurred because you did not provide enough memory to run this program. You can use the -Xmx argument (before the -jar argument) to adjust the maximum heap size provided to Java. Note that this is a JVM argument, not a GATK argument."));
+        }
+    }
+
+    public static class ErrorWritingBamFile extends UserException {
+        public ErrorWritingBamFile(String message) {
+            super(String.format("An error occurred when trying to write the BAM file.  Usually this happens when there is not enough space in the directory to which the data is being written (generally the temp directory) or when your system's open file handle limit is too small.  Your system administrator can help you resolve these issues. If you know what temporary directory to use, you can specify it by adding -Djava.io.tmpdir=X to the command line (before the -jar argument), where X  [...]
+        }
+    }
+
+    public static class NoSpaceOnDevice extends UserException {
+        public NoSpaceOnDevice() {
+            super("Writing failed because there is no space left on the disk or hard drive. Please make some space or specify a different location for writing output files.");
+        }
+    }
+
+    public static class CouldNotReadInputFile extends UserException {
+        public CouldNotReadInputFile(String message, Exception e) {
+            super(String.format("Could not read file because %s caused by %s", message, getMessage(e)));
+        }
+
+        public CouldNotReadInputFile(File file) {
+            super(String.format("Could not read file %s", file.getAbsolutePath()));
+        }
+
+        public CouldNotReadInputFile(File file, String message) {
+            super(String.format("Could not read file %s because %s", file.getAbsolutePath(), message));
+        }
+
+        public CouldNotReadInputFile(String file, String message) {
+            super(String.format("Could not read file %s because %s", file, message));
+        }
+
+        public CouldNotReadInputFile(File file, String message, Exception e) {
+            super(String.format("Could not read file %s because %s with exception %s", file.getAbsolutePath(), message, getMessage(e)));
+        }
+
+        public CouldNotReadInputFile(File file, Exception e) {
+            this(file, getMessage(e));
+        }
+
+        public CouldNotReadInputFile(String message) {
+            super(message);
+        }
+    }
+
+
+    public static class CouldNotCreateOutputFile extends UserException {
+        public CouldNotCreateOutputFile(File file, String message, Exception e) {
+            super(String.format("Could not write file %s because %s with exception %s", file.getAbsolutePath(), message, getMessage(e)));
+        }
+
+        public CouldNotCreateOutputFile(File file, String message) {
+            super(String.format("Could not write file %s because %s", file.getAbsolutePath(), message));
+        }
+
+        public CouldNotCreateOutputFile(String filename, String message, Exception e) {
+            super(String.format("Could not write file %s because %s with exception %s", filename, message, getMessage(e)));
+        }
+
+        public CouldNotCreateOutputFile(File file, Exception e) {
+            super(String.format("Could not write file %s because exception %s", file.getAbsolutePath(), getMessage(e)));
+        }
+
+        public CouldNotCreateOutputFile(String message, Exception e) {
+            super(message, e);
+        }
+    }
+
+    public static class MissortedBAM extends UserException {
+        public MissortedBAM(SAMFileHeader.SortOrder order, File file, SAMFileHeader header) {
+            super(String.format("Missorted input SAM/BAM/CRAM files: %s must be sorted in %s order but order was: %s. Please see " + HelpConstants.forumPost("discussion/1317/collected-faqs-about-input-files-for-sequence-read-data-bam-cram") + "for more information.", file, order, header.getSortOrder()));
+        }
+
+        public MissortedBAM(SAMFileHeader.SortOrder order, String message) {
+            super(String.format("Missorted input SAM/BAM/CRAM files: files are not sorted in %s order. Please see " + HelpConstants.forumPost("discussion/1317/collected-faqs-about-input-files-for-sequence-read-data-bam-cram") + "for more information. Error details: %s", order, message));
+        }
+
+        public MissortedBAM(SAMFileHeader.SortOrder order, SAMRecord read, String message) {
+            super(String.format("Missorted input SAM/BAM/CRAM file %s: file sorted in %s order but %s is required. Please see " + HelpConstants.forumPost("discussion/1317/collected-faqs-about-input-files-for-sequence-read-data-bam-cram") + "for more information. Error details: %s",
+                    read.getFileSource().getReader(), read.getHeader().getSortOrder(), order, message));
+        }
+
+        public MissortedBAM(String message) {
+            super(String.format("Missorted input SAM/BAM/CRAM files. Please see " + HelpConstants.forumPost("discussion/1317/collected-faqs-about-input-files-for-sequence-read-data-bam-cram") + "for more information. Error details: %s", message));
+        }
+    }
+
+    public static class MalformedBAM extends UserException {
+        public MalformedBAM(SAMRecord read, String message) {
+            this(read.getFileSource() != null ? read.getFileSource().getReader().toString() : "(none)", message);
+        }
+
+        public MalformedBAM(File file, String message) {
+            this(file.toString(), message);
+        }
+
+        public MalformedBAM(String source, String message) {
+            super(String.format("SAM/BAM/CRAM file %s is malformed. Please see " + HelpConstants.forumPost("discussion/1317/collected-faqs-about-input-files-for-sequence-read-data-bam-cram") + "for more information. Error details: %s", source, message));
+        }
+    }
+
+    public static class MisencodedBAM extends UserException {
+        public MisencodedBAM(SAMRecord read, String message) {
+            this(read.getFileSource() != null ? read.getFileSource().getReader().toString() : "(none)", message);
+        }
+
+        public MisencodedBAM(String source, String message) {
+            super(String.format("SAM/BAM/CRAM file %s appears to be using the wrong encoding for quality scores: %s. Please see https://www.broadinstitute.org/gatk/guide?id=6470 for more details and options related to this error.", source, message));
+        }
+    }
+
+    public static class MalformedVCF extends UserException {
+        public MalformedVCF(String message, String line) {
+            super(String.format("The provided VCF file is malformed at line %s: %s", line, message));
+        }
+
+        public MalformedVCF(String message) {
+            super(String.format("The provided VCF file is malformed: %s", message));
+        }
+
+        public MalformedVCF(String message, int lineNo) {
+            super(String.format("The provided VCF file is malformed at approximately line number %d: %s", lineNo, message));
+        }
+    }
+
+    public static class MalformedBCF2 extends UserException {
+        public MalformedBCF2( String message ) {
+            super(String.format("Malformed BCF2 file: %s", message));
+        }
+    }
+
+    public static class MalformedVCFHeader extends UserException {
+        public MalformedVCFHeader(String message) {
+            super(String.format("The provided VCF file has a malformed header: %s", message));
+        }
+    }
+
+    public static class ReadMissingReadGroup extends MalformedBAM {
+        public ReadMissingReadGroup(final SAMRecord read) {
+            super(read, String.format("Read %s is missing the read group (RG) tag, which is required by the GATK. Please see " + HelpConstants.forumPost("discussion/59/companion-utilities-replacereadgroups to fix this problem"), read.getReadName()));
+        }
+    }
+
+    public static class ReadHasUndefinedReadGroup extends MalformedBAM {
+        public ReadHasUndefinedReadGroup(final SAMRecord read, final String rgID) {
+            super(read, String.format("Read %s uses a read group (%s) that is not defined in the BAM header, which is not valid.  Please see " + HelpConstants.forumPost("discussion/59/companion-utilities-replacereadgroups to fix this problem"), read.getReadName(), rgID));
+        }
+    }
+
+    public static class VariantContextMissingRequiredField extends UserException {
+        public VariantContextMissingRequiredField(String field, VariantContext vc) {
+            super(String.format("Variant at %s:%d is is missing the required field %s.", vc.getChr(), vc.getStart(), field));
+        }
+    }
+
+    public static class MissortedFile extends UserException {
+        public MissortedFile(File file, String message, Exception e) {
+            super(String.format("Missorted input file: %s is must be sorted in coordinate order. Please see " + HelpConstants.forumPost("discussion/1317/collected-faqs-about-input-files-for-sequence-read-data-bam-cram") + "for more information. Error details: %s and got error %s", file, message, getMessage(e)));
+        }
+    }
+
+    public static class FailsStrictValidation extends UserException {
+        public FailsStrictValidation(File f, String message) {
+            super(String.format("File %s fails strict validation: %s", f.getAbsolutePath(), message));
+        }
+    }
+
+    public static class MalformedFile extends UserException {
+        public MalformedFile(String message) {
+            super(String.format("Unknown file is malformed: %s", message));
+        }
+
+        public MalformedFile(String message, Exception e) {
+            super(String.format("Unknown file is malformed: %s caused by %s", message, getMessage(e)));
+        }
+
+        public MalformedFile(File f, String message) {
+            super(String.format("File %s is malformed: %s", f.getAbsolutePath(), message));
+        }
+
+        public MalformedFile(File f, String message, Exception e) {
+            super(String.format("File %s is malformed: %s caused by %s", f.getAbsolutePath(), message, getMessage(e)));
+        }
+
+        public MalformedFile(String name, String message) {
+            super(String.format("File associated with name %s is malformed: %s", name, message));
+        }
+
+        public MalformedFile(String name, String message, Exception e) {
+            super(String.format("File associated with name %s is malformed: %s caused by %s", name, message, getMessage(e)));
+        }
+     }
+
+    public static class CannotExecuteRScript extends UserException {
+        public CannotExecuteRScript(String message) {
+            super(String.format("Unable to execute RScript command: " + message));
+        }
+        public CannotExecuteRScript(String message, Exception e) {
+            super(String.format("Unable to execute RScript command: " + message), e);
+        }
+    }
+
+    public static class DeprecatedArgument extends CommandLineException {
+        public DeprecatedArgument(String param, String doc) {
+            super(String.format("The parameter %s is deprecated.  %s",param,doc));
+        }
+    }
+
+
+    public static class IncompatibleSequenceDictionaries extends UserException {
+        public IncompatibleSequenceDictionaries(String message, String name1, SAMSequenceDictionary dict1, String name2, SAMSequenceDictionary dict2) {
+            super(String.format("Input files %s and %s have incompatible contigs. Please see " + HelpConstants.forumPost("discussion/63/input-files-have-incompatible-contigs") + "for more information. Error details: %s.\n  %s contigs = %s\n  %s contigs = %s",
+                    name1, name2, message, name1, ReadUtils.prettyPrintSequenceRecords(dict1), name2, ReadUtils.prettyPrintSequenceRecords(dict2)));
+        }
+    }
+
+    public static class LexicographicallySortedSequenceDictionary extends UserException {
+        public LexicographicallySortedSequenceDictionary(String name, SAMSequenceDictionary dict) {
+            super(String.format("Lexicographically sorted human genome sequence detected in %s. Please see " + HelpConstants.forumPost("discussion/58/companion-utilities-reordersam") + "for more information. Error details: %s contigs = %s",
+                    name, name, ReadUtils.prettyPrintSequenceRecords(dict)));
+        }
+    }
+
+    public static class DeprecatedWalker extends UserException {
+        public DeprecatedWalker(String walkerName, String version) {
+            super(String.format("Walker %s is no longer available in the GATK; it has been deprecated since version %s", walkerName, version));
+        }
+    }
+
+    public static class DeprecatedAnnotation extends UserException {
+        public DeprecatedAnnotation(String annotationName, String version) {
+            super(String.format("Annotation %s is no longer available in the GATK; it has been deprecated since version %s", annotationName, version));
+        }
+    }
+
+    public static class CannotExecuteQScript extends UserException {
+        public CannotExecuteQScript(String message) {
+            super(String.format("Unable to execute QScript: " + message));
+        }
+        public CannotExecuteQScript(String message, Exception e) {
+            super(String.format("Unable to execute QScript: " + message), e);
+        }
+    }
+
+    public static class CannotHandleGzippedRef extends UserException {
+        public CannotHandleGzippedRef() {
+            super("The GATK cannot process compressed (.gz) reference sequences. Please unzip the file and try again.  Sorry for the inconvenience.");
+        }
+    }
+
+    public static class MissingReferenceFaiFile extends UserException {
+        public MissingReferenceFaiFile( final File indexFile, final File fastaFile ) {
+            super(String.format("Fasta index file %s for reference %s does not exist. Please see %s for help creating it.",
+                                indexFile.getAbsolutePath(), fastaFile.getAbsolutePath(),
+                                HelpConstants.forumPost("discussion/1601/how-can-i-prepare-a-fasta-file-to-use-as-reference")));
+        }
+    }
+
+    public static class MissingReferenceDictFile extends UserException {
+        public MissingReferenceDictFile( final File dictFile, final File fastaFile ) {
+            super(String.format("Fasta dict file %s for reference %s does not exist. Please see %s for help creating it.",
+                                dictFile.getAbsolutePath(), fastaFile.getAbsolutePath(),
+                                HelpConstants.forumPost("discussion/1601/how-can-i-prepare-a-fasta-file-to-use-as-reference")));
+        }
+    }
+
+    public static class UnreadableKeyException extends UserException {
+        public UnreadableKeyException ( File f, Exception e ) {
+            super(String.format("Key file %s cannot be read (possibly the key file is corrupt?). Error was: %s. " +
+                                "Please see %s for help.",
+                                f.getAbsolutePath(), getMessage(e), PHONE_HOME_DOCS_URL));
+        }
+
+        public UnreadableKeyException ( String message, Exception e ) {
+            this(String.format("%s. Error was: %s", message, getMessage(e)));
+        }
+
+        public UnreadableKeyException ( String message ) {
+            super(String.format("Key file cannot be read (possibly the key file is corrupt?): %s. " +
+                                "Please see %s for help.",
+                                message, PHONE_HOME_DOCS_URL));
+        }
+    }
+
+    public static class KeySignatureVerificationException extends UserException {
+        public KeySignatureVerificationException ( File f ) {
+            super(String.format("The signature in key file %s failed cryptographic verification. " +
+                                "If this key was valid in the past, it's likely been revoked. " +
+                                "Please see %s for help.",
+                                f.getAbsolutePath(), PHONE_HOME_DOCS_URL));
+        }
+    }
+
+    public static class GVCFIndexException extends UserException {
+        public GVCFIndexException (GATKVCFIndexType indexType, int indexParameter) {
+            super(String.format("GVCF output requires a specific indexing strategy.  Please re-run including the arguments " +
+                    "-variant_index_type %s -variant_index_parameter %d.",
+                    indexType, indexParameter));
+        }
+    }
+
+    /**
+     * A special exception that happens only in the case where
+     * the filesystem, by design or configuration, is completely unable
+     * to handle locking.  This exception will specifically NOT be thrown
+     * in the case where the filesystem handles locking but is unable to
+     * acquire a lock due to concurrency.
+     */
+    public static class FileSystemInabilityToLockException extends UserException {
+        public FileSystemInabilityToLockException( String message ) {
+            super(message);
+        }
+
+        public FileSystemInabilityToLockException( String message, Exception innerException ) {
+            super(message,innerException);
+        }
+    }
+
+    public static class IncompatibleRecalibrationTableParameters extends UserException {
+        public IncompatibleRecalibrationTableParameters(String s) {
+            super(s);
+        }
+    }
+
+    /**
+     * A trivial specialization of  UserException to mark that a hardware feature is not supported
+     */
+    public static class HardwareFeatureException extends UserException {
+        public HardwareFeatureException(String message) {
+            super(message);
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/fasta/ArtificialFastaUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/fasta/ArtificialFastaUtils.java
new file mode 100644
index 0000000..cbf8c42
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/fasta/ArtificialFastaUtils.java
@@ -0,0 +1,154 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.fasta;
+
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.PrintStream;
+import java.util.List;
+
+
+/**
+ * @author aaron
+ *         <p/>
+ *         Class ArtificialFastaUtils
+ *         <p/>
+ *         artificial fasta utility class, for generating fake fastas.
+ */
+public class ArtificialFastaUtils {
+    public enum BASE_PATTERN {
+        RANDOM, ALL_A, ALL_T, ALL_C, ALL_G;
+    }
+
+    // what bases we support
+    public enum BASES {
+        A, T, C, G;
+    }
+
+    // create an artificial fasta file
+    public static void createArtificialFasta(String fileName,
+                                             List<String> contigNames,
+                                             List<Integer> contigSizes,
+                                             BASE_PATTERN pattern) {
+        PrintStream s;
+        try {
+            s = new PrintStream(new FileOutputStream(fileName));
+        } catch (FileNotFoundException e) {
+            throw new ReviewedGATKException("Filename " + fileName + " passed to the ArtificialFastaUtils generated a FileNotFound exception", e);
+        }
+        generateFakeFasta(contigNames, contigSizes, pattern, s);
+    }
+
+    // create an artificial fasta file
+    public static void createArtificialFasta(PrintStream stream,
+                                             List<String> contigNames,
+                                             List<Integer> contigSizes,
+                                             BASE_PATTERN pattern) {
+
+        generateFakeFasta(contigNames, contigSizes, pattern, stream);
+    }
+
+    /**
+     * create a fake fasta file
+     *
+     * @param contigNames the pile of contig names
+     * @param contigSizes the pile of contig sizes
+     * @param pattern     the pattern to use for the base distrobution
+     * @param s           the print stream to write to
+     */
+    private static void generateFakeFasta(List<String> contigNames, List<Integer> contigSizes, BASE_PATTERN pattern, PrintStream s) {
+        if (contigNames.size() != contigSizes.size()) {
+            throw new ReviewedGATKException("ArtificialContig name and size arrays are not equal sizes");
+        }
+        for (int x = 0; x < contigNames.size(); x++) {
+            ArtificialContig tig = new ArtificialContig(contigNames.get(x), contigSizes.get(x), pattern);
+            tig.write(s);
+        }
+        s.close();
+    }
+
+}
+
+
+/** the fake contig class, a fasta is made up of these */
+class ArtificialContig {
+    public static final int COLUMN_WIDTH = 80;
+
+    final protected String mName;
+    final protected int mSize;
+    final protected ArtificialFastaUtils.BASE_PATTERN mPattern;
+
+    public ArtificialContig(String name, int size, ArtificialFastaUtils.BASE_PATTERN pat) {
+        this.mName = name;
+        this.mSize = size;
+        this.mPattern = pat;
+    }
+
+    /**
+     * write out the contig to a stream
+     *
+     * @param stream
+     */
+    public void write(PrintStream stream) {
+        stream.println(">" + mName);
+        int count = 0;
+        while (count < mSize) {
+            for (int x = 0; x < COLUMN_WIDTH; x++) {
+                stream.print(generateAppropriateBase());
+                count++;
+                if (count >= mSize) {
+                    break;
+                }
+            }
+            stream.println();
+        }
+    }
+
+    /**
+     * generate the appropriate base, given the BASE_PATTERN
+     *
+     * @return a base, as a string
+     */
+    public String generateAppropriateBase() {
+        switch (mPattern) {
+            case RANDOM:
+                return (ArtificialFastaUtils.BASES.values()[(int) Math.round(Math.random() * 4)]).toString();
+            case ALL_A:
+                return "A";
+            case ALL_T:
+                return "T";
+            case ALL_C:
+                return "C";
+            case ALL_G:
+                return "G";
+            default:
+                throw new ReviewedGATKException("Unknown base pattern");
+        }
+    }
+
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/fasta/CachingIndexedFastaSequenceFile.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/fasta/CachingIndexedFastaSequenceFile.java
new file mode 100644
index 0000000..a456008
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/fasta/CachingIndexedFastaSequenceFile.java
@@ -0,0 +1,370 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.fasta;
+
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import htsjdk.samtools.SAMException;
+import htsjdk.samtools.reference.FastaSequenceIndex;
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import htsjdk.samtools.reference.ReferenceSequence;
+import htsjdk.samtools.SAMSequenceRecord;
+import htsjdk.samtools.util.StringUtil;
+import org.apache.log4j.Priority;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.BaseUtils;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.util.Arrays;
+
+/**
+ * A caching version of the IndexedFastaSequenceFile that avoids going to disk as often as the raw indexer.
+ *
+ * Thread-safe!  Uses a thread-local cache.
+ *
+ * Automatically upper-cases the bases coming in, unless the flag preserveCase is explicitly set.
+ * Automatically converts IUPAC bases to Ns, unless the flag preserveIUPAC is explicitly set.
+ */
+public class CachingIndexedFastaSequenceFile extends IndexedFastaSequenceFile {
+    protected static final org.apache.log4j.Logger logger = org.apache.log4j.Logger.getLogger(CachingIndexedFastaSequenceFile.class);
+
+    /** do we want to print debugging information about cache efficiency? */
+    private static final boolean PRINT_EFFICIENCY = false;
+
+    /** If we are printing efficiency info, what frequency should we do it at? */
+    private static final int PRINT_FREQUENCY = 10000;
+
+    /** The default cache size in bp */
+    public static final long DEFAULT_CACHE_SIZE = 1000000;
+
+    /** The cache size of this CachingIndexedFastaSequenceFile */
+    private final long cacheSize;
+
+    /** When we have a cache miss at position X, we load sequence from X - cacheMissBackup */
+    private final long cacheMissBackup;
+
+    /**
+     * If true, we will preserve the case of the original base in the genome
+     */
+    private final boolean preserveCase;
+
+    /**
+     * If true, we will preserve the IUPAC bases in the genome
+     */
+    private final boolean preserveIUPAC;
+
+    // information about checking efficiency
+    long cacheHits = 0;
+    long cacheMisses = 0;
+
+    /** Represents a specific cached sequence, with a specific start and stop, as well as the bases */
+    private static class Cache {
+        long start = -1, stop = -1;
+        ReferenceSequence seq = null;
+    }
+
+    /**
+     * Thread local cache to allow multi-threaded use of this class
+     */
+    private ThreadLocal<Cache> cache;
+    {
+        cache = new ThreadLocal<Cache> () {
+            @Override protected Cache initialValue() {
+                return new Cache();
+            }
+        };
+    }
+
+    /**
+     * Same as general constructor but allows one to override the default cacheSize
+     *
+     * @param fasta the file we will read our FASTA sequence from.
+     * @param index the index of the fasta file, used for efficient random access
+     * @param cacheSize the size in bp of the cache we will use for this reader
+     * @param preserveCase If true, we will keep the case of the underlying bases in the FASTA, otherwise everything is converted to upper case
+     * @param preserveIUPAC If true, we will keep the IUPAC bases in the FASTA, otherwise they are converted to Ns
+     */
+    public CachingIndexedFastaSequenceFile(final File fasta, final FastaSequenceIndex index, final long cacheSize, final boolean preserveCase, final boolean preserveIUPAC) {
+        super(fasta, index);
+        if ( cacheSize < 0 ) throw new IllegalArgumentException("cacheSize must be > 0");
+        this.cacheSize = cacheSize;
+        this.cacheMissBackup = Math.max(cacheSize / 1000, 1);
+        this.preserveCase = preserveCase;
+        this.preserveIUPAC = preserveIUPAC;
+    }
+
+    /**
+     * Open the given indexed fasta sequence file.  Throw an exception if the file cannot be opened.
+     *
+     * Looks for a index file for fasta on disk
+     * Uses provided cacheSize instead of the default
+     *
+     * @param fasta The file to open.
+     * @param cacheSize the size of the cache to use in this CachingIndexedFastaReader, must be >= 0
+     * @param preserveCase If true, we will keep the case of the underlying bases in the FASTA, otherwise everything is converted to upper case
+     * @param preserveIUPAC If true, we will keep the IUPAC bases in the FASTA, otherwise they are converted to Ns
+     */
+    public CachingIndexedFastaSequenceFile(final File fasta, final long cacheSize, final boolean preserveCase, final boolean preserveIUPAC) throws FileNotFoundException {
+        super(fasta);
+        if ( cacheSize < 0 ) throw new IllegalArgumentException("cacheSize must be > 0");
+        this.cacheSize = cacheSize;
+        this.cacheMissBackup = Math.max(cacheSize / 1000, 1);
+        this.preserveCase = preserveCase;
+        this.preserveIUPAC = preserveIUPAC;
+    }
+
+    /**
+     * Same as general constructor but allows one to override the default cacheSize
+     *
+     * By default, this CachingIndexedFastaReader converts all incoming bases to upper case
+     *
+     * @param fasta the file we will read our FASTA sequence from.
+     * @param index the index of the fasta file, used for efficient random access
+     * @param cacheSize the size in bp of the cache we will use for this reader
+     */
+    public CachingIndexedFastaSequenceFile(final File fasta, final FastaSequenceIndex index, final long cacheSize) {
+        this(fasta, index, cacheSize, false, false);
+    }
+
+    /**
+     * Open the given indexed fasta sequence file.  Throw an exception if the file cannot be opened.
+     *
+     * Looks for a index file for fasta on disk.
+     * This CachingIndexedFastaReader will convert all FASTA bases to upper cases under the hood
+     *
+     * @param fasta The file to open.
+     */
+    public CachingIndexedFastaSequenceFile(final File fasta) throws FileNotFoundException {
+        this(fasta, false);
+    }
+
+    /**
+     * Open the given indexed fasta sequence file.  Throw an exception if the file cannot be opened.
+     *
+     * Looks for a index file for fasta on disk
+     *
+     * @param fasta The file to open.
+     * @param preserveCase If true, we will keep the case of the underlying bases in the FASTA, otherwise everything is converted to upper case
+     */
+    public CachingIndexedFastaSequenceFile(final File fasta, final boolean preserveCase) throws FileNotFoundException {
+        this(fasta, DEFAULT_CACHE_SIZE, preserveCase, false);
+    }
+
+    /**
+     * Open the given indexed fasta sequence file.  Throw an exception if the file cannot be opened.
+     *
+     * Looks for a index file for fasta on disk
+     *
+     * @param fasta The file to open.
+     * @param preserveCase If true, we will keep the case of the underlying bases in the FASTA, otherwise everything is converted to upper case
+     * @param preserveIUPAC If true, we will keep the IUPAC bases in the FASTA, otherwise they are converted to Ns
+     */
+    public CachingIndexedFastaSequenceFile(final File fasta, final boolean preserveCase, final boolean preserveIUPAC) throws FileNotFoundException {
+        this(fasta, DEFAULT_CACHE_SIZE, preserveCase, preserveIUPAC);
+    }
+
+    /**
+     * Create reference data source from fasta file, after performing several preliminary checks on the file.
+     * This static utility was refactored from the constructor of ReferenceDataSource.
+     * Possibly may be better as an overloaded constructor.
+     * @param fastaFile Fasta file to be used as reference
+     * @return A new instance of a CachingIndexedFastaSequenceFile.
+     */
+    public static CachingIndexedFastaSequenceFile checkAndCreate(final File fastaFile) {
+        // does the fasta file exist? check that first...
+        if (!fastaFile.exists())
+            throw new UserException("The fasta file you specified (" + fastaFile.getAbsolutePath() + ") does not exist.");
+
+        final boolean isGzipped = fastaFile.getAbsolutePath().endsWith(".gz");
+        if ( isGzipped ) {
+            throw new UserException.CannotHandleGzippedRef();
+        }
+
+        final File indexFile = new File(fastaFile.getAbsolutePath() + ".fai");
+
+        // determine the name for the dict file
+        final String fastaExt = fastaFile.getAbsolutePath().endsWith("fa") ? "\\.fa$" : "\\.fasta$";
+        final File dictFile = new File(fastaFile.getAbsolutePath().replaceAll(fastaExt, ".dict"));
+
+        // It's an error if either the fai or dict file does not exist. The user is now responsible
+        // for creating these files.
+        if (!indexFile.exists()) {
+            throw new UserException.MissingReferenceFaiFile(indexFile, fastaFile);
+        }
+        if (!dictFile.exists()) {
+            throw new UserException.MissingReferenceDictFile(dictFile, fastaFile);
+        }
+
+        // Read reference data by creating an IndexedFastaSequenceFile.
+        try {
+            return new CachingIndexedFastaSequenceFile(fastaFile);
+        }
+        catch (IllegalArgumentException e) {
+            throw new UserException.CouldNotReadInputFile(fastaFile, "Could not read reference sequence.  The FASTA must have either a .fasta or .fa extension", e);
+        }
+        catch (Exception e) {
+            throw new UserException.CouldNotReadInputFile(fastaFile, e);
+        }
+    }
+
+    /**
+     * Open the given indexed fasta sequence file.  Throw an exception if the file cannot be opened.
+     *
+     * Looks for a index file for fasta on disk
+     * Uses provided cacheSize instead of the default
+     *
+     * @param fasta The file to open.
+     * @param cacheSize the size of the cache to use in this CachingIndexedFastaReader, must be >= 0
+     */
+    public CachingIndexedFastaSequenceFile(final File fasta, final long cacheSize ) throws FileNotFoundException {
+        this(fasta, cacheSize, false, false);
+    }
+
+    /**
+     * Print the efficiency (hits / queries) to logger with priority
+     */
+    public void printEfficiency(final Priority priority) {
+        logger.log(priority, String.format("### CachingIndexedFastaReader: hits=%d misses=%d efficiency %.6f%%", cacheHits, cacheMisses, calcEfficiency()));
+    }
+
+    /**
+     * Returns the efficiency (% of hits of all queries) of this object
+     * @return
+     */
+    public double calcEfficiency() {
+        return 100.0 * cacheHits / (cacheMisses + cacheHits * 1.0);
+    }
+
+    /**
+     * @return the number of cache hits that have occurred
+     */
+    public long getCacheHits() {
+        return cacheHits;
+    }
+
+    /**
+     * @return the number of cache misses that have occurred
+     */
+    public long getCacheMisses() {
+        return cacheMisses;
+    }
+
+    /**
+     * @return the size of the cache we are using
+     */
+    public long getCacheSize() {
+        return cacheSize;
+    }
+
+    /**
+     * Is this CachingIndexedFastaReader keeping the original case of bases in the fasta, or is
+     * everything being made upper case?
+     *
+     * @return true if the bases coming from this reader are in the original case in the fasta, false if they are all upper cased
+     */
+    public boolean isPreservingCase() {
+        return preserveCase;
+    }
+
+    /**
+     * Is uppercasing bases?
+     *
+     * @return true if bases coming from this CachingIndexedFastaSequenceFile are all upper cased, false if this reader are in the original case in the fasta
+     */
+    public boolean isUppercasingBases() {
+        return ! isPreservingCase();
+    }
+
+    /**
+     * Is this CachingIndexedFastaReader keeping the IUPAC bases in the fasta, or is it turning them into Ns?
+     *
+     * @return true if the IUPAC bases coming from this reader are not modified
+     */
+    public boolean isPreservingIUPAC() {
+        return preserveIUPAC;
+    }
+
+    /**
+     * Gets the subsequence of the contig in the range [start,stop]
+     *
+     * Uses the sequence cache if possible, or updates the cache to handle the request.  If the range
+     * is larger than the cache itself, just loads the sequence directly, not changing the cache at all
+     *
+     * @param contig Contig whose subsequence to retrieve.
+     * @param start inclusive, 1-based start of region.
+     * @param stop inclusive, 1-based stop of region.
+     * @return The partial reference sequence associated with this range.  If preserveCase is false, then
+     *         all of the bases in the ReferenceSequence returned by this method will be upper cased.
+     */
+    @Override
+    public ReferenceSequence getSubsequenceAt( final String contig, long start, final long stop ) {
+        final ReferenceSequence result;
+        final Cache myCache = cache.get();
+
+        if ( (stop - start) >= cacheSize ) {
+            cacheMisses++;
+            result = super.getSubsequenceAt(contig, start, stop);
+            if ( ! preserveCase ) StringUtil.toUpperCase(result.getBases());
+            if ( ! preserveIUPAC ) BaseUtils.convertIUPACtoN(result.getBases(), true, start < 1);
+        } else {
+            // todo -- potential optimization is to check if contig.name == contig, as this in general will be true
+            SAMSequenceRecord contigInfo = super.getSequenceDictionary().getSequence(contig);
+
+            if (stop > contigInfo.getSequenceLength())
+                throw new SAMException("Query asks for data past end of contig");
+
+            if ( start < myCache.start || stop > myCache.stop || myCache.seq == null || myCache.seq.getContigIndex() != contigInfo.getSequenceIndex() ) {
+                cacheMisses++;
+                myCache.start = Math.max(start - cacheMissBackup, 0);
+                myCache.stop  = Math.min(start + cacheSize + cacheMissBackup, contigInfo.getSequenceLength());
+                myCache.seq   = super.getSubsequenceAt(contig, myCache.start, myCache.stop);
+
+                // convert all of the bases in the sequence to upper case if we aren't preserving cases
+                if ( ! preserveCase ) StringUtil.toUpperCase(myCache.seq.getBases());
+                if ( ! preserveIUPAC ) BaseUtils.convertIUPACtoN(myCache.seq.getBases(), true, myCache.start == 0);
+            } else {
+                cacheHits++;
+            }
+
+            // at this point we determine where in the cache we want to extract the requested subsequence
+            final int cacheOffsetStart = (int)(start - myCache.start);
+            final int cacheOffsetStop = (int)(stop - start + cacheOffsetStart + 1);
+
+            try {
+                result = new ReferenceSequence(myCache.seq.getName(), myCache.seq.getContigIndex(), Arrays.copyOfRange(myCache.seq.getBases(), cacheOffsetStart, cacheOffsetStop));
+            } catch ( ArrayIndexOutOfBoundsException e ) {
+                throw new ReviewedGATKException(String.format("BUG: bad array indexing.  Cache start %d and end %d, request start %d end %d, offset start %d and end %d, base size %d",
+                        myCache.start, myCache.stop, start, stop, cacheOffsetStart, cacheOffsetStop, myCache.seq.getBases().length), e);
+            }
+        }
+
+        // for debugging -- print out our efficiency if requested
+        if ( PRINT_EFFICIENCY && (getCacheHits() + getCacheMisses()) % PRINT_FREQUENCY == 0 )
+            printEfficiency(Priority.INFO);
+
+        return result;
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/fasta/package-info.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/fasta/package-info.java
new file mode 100644
index 0000000..d0d7a6a
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/fasta/package-info.java
@@ -0,0 +1,26 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.fasta;
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/file/FSLockWithShared.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/file/FSLockWithShared.java
new file mode 100644
index 0000000..813d697
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/file/FSLockWithShared.java
@@ -0,0 +1,293 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.file;
+
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.nio.channels.*;
+import java.util.concurrent.*;
+
+/**
+ * a quick implementation of a file based lock, using the Java NIO classes
+ */
+public class FSLockWithShared {
+    // connect to the logger
+    private final static Logger logger = Logger.getLogger(FSLockWithShared.class);
+
+    // the file we're attempting to lock
+    private final File file;
+
+    // the file lock
+    private FileLock lock = null;
+
+    // the file channel we open
+    private FileChannel channel = null;
+
+    // Timeout (in milliseconds) before we give up during non-blocking lock-acquisition calls.
+    // Necessary because these "non-blocking" calls can hang if there's a problem with the
+    // OS file locking support.
+    private int lockAcquisitionTimeout;
+
+    // Default value for lockAcquisitionTimeout when none is explicitly provided
+    public static final int DEFAULT_LOCK_ACQUISITION_TIMEOUT_IN_MILLISECONDS = 30 * 1000;
+
+    // Amount of time to wait when trying to shut down the lock-acquisition thread before giving up
+    public static final int THREAD_TERMINATION_TIMEOUT_IN_MILLISECONDS = 30 * 1000;
+
+    /**
+     * Create a lock associated with the specified File. Use the default lock
+     * acquisition timeout of 30 seconds.
+     *
+     * @param file file to lock
+     */
+    public FSLockWithShared( final File file ) {
+        this.file = file;
+        lockAcquisitionTimeout = DEFAULT_LOCK_ACQUISITION_TIMEOUT_IN_MILLISECONDS;
+    }
+
+    /**
+     * Create a lock associated with the specified File, and set a custom lock
+     * acquisition timeout.
+     *
+     * @param file file to lock
+     * @param lockAcquisitionTimeout maximum number of milliseconds to wait during non-blocking
+     *                               lock acquisition calls before concluding that there's a
+     *                               problem with the OS file locking support and throwing an error.
+     */
+    public FSLockWithShared( final File file, final int lockAcquisitionTimeout ) {
+        this.file = file;
+        this.lockAcquisitionTimeout = lockAcquisitionTimeout;
+    }
+
+    /**
+     * Get a shared (read) lock on a file. Does not block, and returns immediately
+     * under normal conditions with the result of the lock acquisition attempt. Will
+     * throw an exception if there's a problem with the OS file locking support.
+     *
+     * @return boolean true if we obtained a lock, false if we failed to obtain one
+     */
+    public boolean sharedLock() {
+        return acquireLockWithTimeout(true);
+    }
+
+    /**
+     * Get an exclusive (read-write) lock on a file. Does not block, and returns immediately
+     * under normal conditions with the result of the lock acquisition attempt. Will
+     * throw an exception if there's a problem with the OS file locking support.
+     *
+     * @return boolean true if we obtained a lock, false if we failed to obtain one
+     */
+    public boolean exclusiveLock() {
+        return acquireLockWithTimeout(false);
+    }
+
+    /**
+     * Attempt to acquire a lock of the specified type on the file in a background thread.
+     * Uses non-blocking lock-acquisition calls that should return immediately, but may
+     * get stuck if there's a problem with the OS file locking support. If the call gets
+     * stuck and the timeout elapses, throws a UserException, since it's not safe to
+     * proceed with a stuck lock acquisition thread (and there's no way to reliably
+     * interrupt it once the underlying system call hangs).
+     *
+     * @param acquireSharedLock if true, request a shared lock rather than an exclusive lock
+     * @return true if a lock was acquired, false if we failed
+     */
+    private boolean acquireLockWithTimeout( final boolean acquireSharedLock ) {
+        // Use daemon threads so that hopelessly stuck lock acquisition threads won't prevent the JVM from exiting
+        final ExecutorService executor = Executors.newSingleThreadExecutor(new ThreadFactory() {
+                                                                               public Thread newThread( Runnable r ) {
+                                                                                   Thread lockAcquisitionThread = new Thread(r);
+                                                                                   lockAcquisitionThread.setDaemon(true);
+                                                                                   return lockAcquisitionThread;
+                                                                               }
+                                                                           });
+        final FutureTask<Boolean> lockAcquisitionTask = new FutureTask<Boolean>(new LockAcquisitionTask(acquireSharedLock));
+        boolean lockAcquired = false;
+
+        try {
+            executor.execute(lockAcquisitionTask);
+
+            // Wait at most lockAcquisitionTimeout milliseconds for the lock acquisition task to finish.
+            lockAcquired = lockAcquisitionTask.get(lockAcquisitionTimeout, TimeUnit.MILLISECONDS);
+        }
+        // Lock acquisition timeout elapsed. Since we're using NON-BLOCKING lock-acquisition calls,
+        // this implies that there's a problem with the OS locking daemon, or locks are not supported.
+        // Since it's not safe to proceed with a potentially stuck lock acquisition thread, we need to
+        // shut down the JVM in order to kill it.
+        catch ( TimeoutException e ) {
+            throw new UserException.FileSystemInabilityToLockException(
+                    String.format("Timeout of %d milliseconds was reached while trying to acquire a lock on file %s. " +
+                                  "Since the GATK uses non-blocking lock acquisition calls that are not supposed to wait, " +
+                                  "this implies a problem with the file locking support in your operating system.",
+                                  lockAcquisitionTimeout, file.getAbsolutePath()));
+        }
+        // Lock acquisition thread threw an exception. Need to unpack it via e.getCause()
+        catch ( ExecutionException e ) {
+            logger.warn(String.format("WARNING: Unable to lock file %s because exception %s occurred with error message %s",
+                                      file.getAbsolutePath(),
+                                      e.getCause() != null ? e.getCause().getClass().getSimpleName() : "unknown",
+                                      e.getCause() != null ? e.getCause().getMessage() : "none"));
+            lockAcquired = false;
+        }
+        // Interrupted while waiting for the lock acquisition thread -- not likely to happen
+        catch ( InterruptedException e ) {
+            logger.warn(String.format("WARNING: interrupted while attempting to acquire a lock for file %s", file.getAbsolutePath()));
+            lockAcquired = false;
+        }
+        catch ( Exception e ) {
+            logger.warn(String.format("WARNING: error while attempting to acquire a lock for file %s. Error message: %s",
+                                      file.getAbsolutePath(), e.getMessage()));
+            lockAcquired = false;
+        }
+
+        shutdownLockAcquisitionTask(executor);
+
+        // Upon failure to acquire a lock, we always call unlock() to close the FileChannel if it was opened
+        // and to deal with very hypothetical edge cases where a lock might actually have been acquired despite the
+        // lock acquisition thread returning false.
+        if ( ! lockAcquired ) {
+            unlock();
+        }
+
+        return lockAcquired;
+    }
+
+    /**
+     * Ensures that the lock acquisition task running in the provided executor has cleanly terminated.
+     * Throws a UserException if unable to shut it down within the period defined by the THREAD_TERMINATION_TIMEOUT.
+     *
+     * @param executor ExecutorService executing the lock-acquisition thread
+     */
+    private void shutdownLockAcquisitionTask( final ExecutorService executor ) {
+        boolean shutdownAttemptSucceeded;
+
+        try {
+            executor.shutdownNow();
+            shutdownAttemptSucceeded = executor.awaitTermination(THREAD_TERMINATION_TIMEOUT_IN_MILLISECONDS, TimeUnit.MILLISECONDS);
+        }
+        catch ( InterruptedException e ) {
+            shutdownAttemptSucceeded = false;
+        }
+
+        if ( ! shutdownAttemptSucceeded ) {
+            throw new UserException(String.format("Failed to terminate lock acquisition thread while trying to lock file %s. " +
+                                                  "Exiting because it's not safe to proceed with this run of the GATK.",
+                                                  file.getAbsolutePath()));
+        }
+    }
+
+    /**
+     * Background task that attempts to acquire a lock of the specified type, and returns a boolean
+     * indicating success/failure. Uses a non-blocking tryLock() call that should return immediately
+     * (but may get stuck if there's a problem with the OS locking daemon).
+     */
+    private class LockAcquisitionTask implements Callable<Boolean> {
+        private final boolean acquireSharedLock;
+
+        public LockAcquisitionTask( final boolean acquireSharedLock ) {
+            this.acquireSharedLock = acquireSharedLock;
+        }
+
+        public Boolean call() {
+            // Get a read-only or read-write file channel, depending on the type of lock
+            try {
+                channel = new RandomAccessFile(file, acquireSharedLock ? "r" : "rw").getChannel();
+            }
+            catch ( IOException e ) {
+                logger.warn(String.format("WARNING: Unable to lock file %s because we could not open a file channel", file.getAbsolutePath()));
+                return false;
+            }
+
+            boolean lockAcquired = false;
+
+            try {
+                // Non-blocking lock-acquisition call, should return right away. If it doesn't return immediately
+                // due to problems with the OS locking daemon, it will potentially be timed-out and interrupted.
+                lock = channel.tryLock(0, Long.MAX_VALUE, acquireSharedLock);
+                lockAcquired = lock != null;
+            }
+            catch ( AsynchronousCloseException e ) {
+                logger.warn(String.format("WARNING: Unable to lock file %s because the file channel was closed by another thread", file.getAbsolutePath()));
+                lockAcquired = false;
+            }
+            catch ( ClosedChannelException e ) {
+                logger.warn(String.format("WARNING: Unable to lock file %s because the file channel is closed.", file.getAbsolutePath()));
+                lockAcquired = false;
+            }
+            catch ( OverlappingFileLockException e ) {
+                logger.warn(String.format("WARNING: Unable to lock file %s because you already have a lock on this file.", file.getAbsolutePath()));
+                lockAcquired = false;
+            }
+            catch ( FileLockInterruptionException e ) {
+                logger.warn(String.format("WARNING: Interrupted while attempting to lock file %s", file.getAbsolutePath()));
+                lockAcquired = false;
+            }
+            catch ( IOException e ) {
+                logger.warn(String.format("WARNING: Unable to lock file %s because an IOException occurred with message: %s.", file.getAbsolutePath(), e.getMessage()));
+                lockAcquired = false;
+            }
+
+            return lockAcquired;
+        }
+    }
+
+    /**
+     * Unlock the file
+     *
+     * note: this allows unlocking a file that failed to lock (no required user checks on null locks).
+     */
+    public void unlock() {
+        releaseLock();
+        closeChannel();
+    }
+
+    private void releaseLock() {
+        try {
+            if ( lock != null )
+                lock.release();
+        }
+        catch ( ClosedChannelException e ) {
+            // if the channel was already closed we don't have to worry
+        }
+        catch ( IOException e ) {
+            throw new UserException(String.format("An error occurred while releasing the lock for file %s", file.getAbsolutePath()), e);
+        }
+    }
+
+    private void closeChannel() {
+        try {
+            if ( channel != null )
+                channel.close();
+        }
+        catch ( IOException e ) {
+            throw new UserException(String.format("An error occurred while closing channel for file %s", file.getAbsolutePath()), e);
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/fragments/FragmentCollection.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/fragments/FragmentCollection.java
new file mode 100644
index 0000000..f381133
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/fragments/FragmentCollection.java
@@ -0,0 +1,67 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.fragments;
+
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Useful helper class to represent the results of the reads -> fragment calculation.
+ *
+ * Contains singleton -- objects whose underlying reads do not overlap their mate pair
+ * Contains overlappingPairs -- objects whose underlying reads do overlap their mate pair
+ *
+ * User: ebanks, depristo
+ * Date: Jan 10, 2011
+ */
+public class FragmentCollection<T> {
+    Collection<T> singletons;
+    Collection<List<T>> overlappingPairs;
+
+    public FragmentCollection(final Collection<T> singletons, final Collection<List<T>> overlappingPairs) {
+        this.singletons = singletons == null ? Collections.<T>emptyList() : singletons;
+        this.overlappingPairs = overlappingPairs == null ? Collections.<List<T>>emptyList() : overlappingPairs;
+    }
+
+    /**
+     * Gets the T elements not containing overlapping elements, in no particular order
+     *
+     * @return
+     */
+    public Collection<T> getSingletonReads() {
+        return singletons;
+    }
+
+    /**
+     * Gets the T elements containing overlapping elements, in no particular order
+     *
+     * @return
+     */
+    public Collection<List<T>> getOverlappingPairs() {
+        return overlappingPairs;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/fragments/FragmentUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/fragments/FragmentUtils.java
new file mode 100644
index 0000000..cbcce0b
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/fragments/FragmentUtils.java
@@ -0,0 +1,377 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.fragments;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import htsjdk.samtools.util.QualityUtil;
+import htsjdk.samtools.Cigar;
+import htsjdk.samtools.CigarElement;
+import htsjdk.samtools.CigarOperator;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.clipping.ReadClipper;
+import org.broadinstitute.gatk.utils.recalibration.EventType;
+import org.broadinstitute.gatk.utils.collections.Pair;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.pileup.PileupElement;
+import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.broadinstitute.gatk.utils.sam.ReadUtils;
+
+import java.util.*;
+
+/**
+ * An easy to access fragment-based pileup, which contains two separate pileups.  The first
+ * is a regular collection of PileupElements containing all of the reads in the original RBP
+ * that uniquely info about a fragment.  The second are TwoReadPileupElements that, as the
+ * name suggests, contain two reads that are sequenced from the same underlying fragment.
+ *
+ * Based on the original code by E. Banks
+ *
+ * Oct 21: note that the order of the oneReadPileup and twoReadPileups are not
+ * defined.  The algorithms that produce these lists are in fact producing
+ * lists of Pileup elements *NOT* sorted by alignment start position of the underlying
+ * reads.
+ *
+ * User: depristo
+ * Date: 3/26/11
+ * Time: 10:09 PM
+ */
+public final class FragmentUtils {
+
+    public final static double DEFAULT_PCR_ERROR_RATE = 1e-4;
+    public final static int DEFAULT_PCR_ERROR_QUAL = QualityUtil.getPhredScoreFromErrorProbability(DEFAULT_PCR_ERROR_RATE);
+    public final static int HALF_OF_DEFAULT_PCR_ERROR_QUAL = DEFAULT_PCR_ERROR_QUAL / 2;
+
+    protected final static byte MIN_QUAL_BAD_OVERLAP = 16;
+    private FragmentUtils() {} // private constructor
+
+    /**
+     * A getter function that takes an Object of type T and returns its associated SAMRecord.
+     *
+     * Allows us to write a generic T -> Fragment algorithm that works with any object containing
+     * a read.
+     *
+     * @param <T> The type of the object that contains a GATKSAMRecord
+     */
+    public interface ReadGetter<T> {
+        /**
+         * Get the GATKSAMRecord associated with object
+         *
+         * @param object the thing that contains the read
+         * @return a non-null GATKSAMRecord read
+         */
+        public GATKSAMRecord get(T object);
+    }
+
+    /**
+     * Identify getter for SAMRecords themselves
+     */
+    private final static ReadGetter<GATKSAMRecord> SamRecordGetter = new ReadGetter<GATKSAMRecord>() {
+        @Override public GATKSAMRecord get(final GATKSAMRecord object) { return object; }
+    };
+
+    /**
+     * Gets the SAMRecord in a PileupElement
+     */
+    private final static ReadGetter<PileupElement> PileupElementGetter = new ReadGetter<PileupElement>() {
+        @Override public GATKSAMRecord get(final PileupElement object) { return object.getRead(); }
+    };
+
+
+    /**
+     * Generic algorithm that takes an iterable over T objects, a getter routine to extract the reads in T,
+     * and returns a FragmentCollection that contains the T objects whose underlying reads either overlap (or
+     * not) with their mate pairs.
+     *
+     * @param readContainingObjects An iterator of objects that contain GATKSAMRecords
+     * @param nElements the number of elements to be provided by the iterator, which is usually known upfront and
+     *                  greatly improves the efficiency of the fragment calculation
+     * @param getter a helper function that takes an object of type T and returns is associated GATKSAMRecord
+     * @param <T>
+     * @return a fragment collection
+     */
+    @Requires({
+            "readContainingObjects != null",
+            "nElements >= 0",
+            "getter != null"
+    })
+    @Ensures("result != null")
+    private static <T> FragmentCollection<T> create(final Iterable<T> readContainingObjects, final int nElements, final ReadGetter<T> getter) {
+        Collection<T> singletons = null;
+        Collection<List<T>> overlapping = null;
+        Map<String, T> nameMap = null;
+
+        int lastStart = -1;
+
+        // build an initial map, grabbing all of the multi-read fragments
+        for ( final T p : readContainingObjects ) {
+            final SAMRecord read = getter.get(p);
+
+            if ( read.getAlignmentStart() < lastStart ) {
+                throw new IllegalArgumentException(String.format(
+                        "FragmentUtils.create assumes that the incoming objects are ordered by " +
+                                "SAMRecord alignment start, but saw a read %s with alignment start " +
+                                "%d before the previous start %d", read.getSAMString(), read.getAlignmentStart(), lastStart));
+            }
+            lastStart = read.getAlignmentStart();
+
+            final int mateStart = read.getMateAlignmentStart();
+            if ( mateStart == 0 || mateStart > read.getAlignmentEnd() ) {
+                // if we know that this read won't overlap its mate, or doesn't have one, jump out early
+                if ( singletons == null ) singletons = new ArrayList<T>(nElements); // lazy init
+                singletons.add(p);
+            } else {
+                // the read might overlap it's mate, or is the rightmost read of a pair
+                final String readName = read.getReadName();
+                final T pe1 = nameMap == null ? null : nameMap.get(readName);
+                if ( pe1 != null ) {
+                    // assumes we have at most 2 reads per fragment
+                    if ( overlapping == null ) overlapping = new ArrayList<List<T>>(); // lazy init
+                    overlapping.add(Arrays.asList(pe1, p));
+                    nameMap.remove(readName);
+                } else {
+                    if ( nameMap == null ) nameMap = new HashMap<String, T>(nElements); // lazy init
+                    nameMap.put(readName, p);
+                }
+            }
+        }
+
+        // add all of the reads that are potentially overlapping but whose mate never showed
+        // up to the oneReadPile
+        if ( nameMap != null && ! nameMap.isEmpty() ) {
+            if ( singletons == null )
+                singletons = nameMap.values();
+            else
+                singletons.addAll(nameMap.values());
+        }
+
+        return new FragmentCollection<T>(singletons, overlapping);
+    }
+
+    /**
+     * Create a FragmentCollection containing PileupElements from the ReadBackedPileup rbp
+     * @param rbp a non-null read-backed pileup.  The elements in this ReadBackedPileup must be ordered
+     * @return a non-null FragmentCollection
+     */
+    @Ensures("result != null")
+    public static FragmentCollection<PileupElement> create(final ReadBackedPileup rbp) {
+        if ( rbp == null ) throw new IllegalArgumentException("Pileup cannot be null");
+        return create(rbp, rbp.getNumberOfElements(), PileupElementGetter);
+    }
+
+    /**
+     * Create a FragmentCollection containing GATKSAMRecords from a list of reads
+     *
+     * @param reads a non-null list of reads, ordered by their start location
+     * @return a non-null FragmentCollection
+     */
+    @Ensures("result != null")
+    public static FragmentCollection<GATKSAMRecord> create(final List<GATKSAMRecord> reads) {
+        if ( reads == null ) throw new IllegalArgumentException("Pileup cannot be null");
+        return create(reads, reads.size(), SamRecordGetter);
+    }
+
+    public static void adjustQualsOfOverlappingPairedFragments( final List<GATKSAMRecord> overlappingPair ) {
+        if( overlappingPair.size() != 2 ) { throw new ReviewedGATKException("Found overlapping pair with " + overlappingPair.size() + " reads, but expecting exactly 2."); }
+
+        final GATKSAMRecord firstRead = overlappingPair.get(0);
+        final GATKSAMRecord secondRead = overlappingPair.get(1);
+
+        if ( secondRead.getSoftStart() < firstRead.getSoftStart() ) {
+            adjustQualsOfOverlappingPairedFragments(secondRead, firstRead);
+        } else {
+            adjustQualsOfOverlappingPairedFragments(firstRead, secondRead);
+        }
+    }
+
+    /**
+     * Fix two overlapping reads from the same fragment by adjusting base qualities, if possible
+     *
+     * firstRead and secondRead must be part of the same fragment (though this isn't checked).  Looks
+     * at the bases and alignment, and tries its best to create adjusted base qualities so that the observations
+     * are not treated independently.
+     *
+     * Assumes that firstRead starts before secondRead (according to their soft clipped starts)
+     *
+     * @param clippedFirstRead the left most read
+     * @param clippedSecondRead the right most read
+     *
+     * @return a strandless merged read of first and second, or null if the algorithm cannot create a meaningful one
+     */
+    public static void adjustQualsOfOverlappingPairedFragments(final GATKSAMRecord clippedFirstRead, final GATKSAMRecord clippedSecondRead) {
+        if ( clippedFirstRead == null ) throw new IllegalArgumentException("clippedFirstRead cannot be null");
+        if ( clippedSecondRead == null ) throw new IllegalArgumentException("clippedSecondRead cannot be null");
+        if ( ! clippedFirstRead.getReadName().equals(clippedSecondRead.getReadName()) ) throw new IllegalArgumentException("attempting to merge two reads with different names " + clippedFirstRead + " and " + clippedSecondRead);
+
+        // don't adjust fragments that do not overlap
+        if ( clippedFirstRead.getAlignmentEnd() < clippedSecondRead.getAlignmentStart() || !clippedFirstRead.getReferenceIndex().equals(clippedSecondRead.getReferenceIndex()) )
+            return;
+
+        final Pair<Integer, Boolean> pair = ReadUtils.getReadCoordinateForReferenceCoordinate(clippedFirstRead, clippedSecondRead.getAlignmentStart());
+        final int firstReadStop = ( pair.getSecond() ? pair.getFirst() + 1 : pair.getFirst() );
+        final int numOverlappingBases = Math.min(clippedFirstRead.getReadLength() - firstReadStop, clippedSecondRead.getReadLength());
+
+        final byte[] firstReadBases = clippedFirstRead.getReadBases();
+        final byte[] firstReadQuals = clippedFirstRead.getBaseQualities();
+        final byte[] secondReadBases = clippedSecondRead.getReadBases();
+        final byte[] secondReadQuals = clippedSecondRead.getBaseQualities();
+
+        for ( int i = 0; i < numOverlappingBases; i++ ) {
+            final int firstReadIndex = firstReadStop + i;
+            final byte firstReadBase = firstReadBases[firstReadIndex];
+            final byte secondReadBase = secondReadBases[i];
+
+            if ( firstReadBase == secondReadBase ) {
+                firstReadQuals[firstReadIndex] = (byte) Math.min(firstReadQuals[firstReadIndex], HALF_OF_DEFAULT_PCR_ERROR_QUAL);
+                secondReadQuals[i] = (byte) Math.min(secondReadQuals[i], HALF_OF_DEFAULT_PCR_ERROR_QUAL);
+            } else {
+                // TODO -- use the proper statistical treatment of the quals from DiploidSNPGenotypeLikelihoods.java
+                firstReadQuals[firstReadIndex] = 0;
+                secondReadQuals[i] = 0;
+            }
+        }
+
+        clippedFirstRead.setBaseQualities(firstReadQuals);
+        clippedSecondRead.setBaseQualities(secondReadQuals);
+    }
+
+    public static List<GATKSAMRecord> mergeOverlappingPairedFragments( final List<GATKSAMRecord> overlappingPair ) {
+        if( overlappingPair.size() != 2 ) { throw new ReviewedGATKException("Found overlapping pair with " + overlappingPair.size() + " reads, but expecting exactly 2."); }
+
+        final GATKSAMRecord firstRead = overlappingPair.get(0);
+        final GATKSAMRecord secondRead = overlappingPair.get(1);
+
+        final GATKSAMRecord merged;
+        if( !(secondRead.getSoftStart() <= firstRead.getSoftEnd() && secondRead.getSoftStart() >= firstRead.getSoftStart() && secondRead.getSoftEnd() >= firstRead.getSoftEnd()) ) {
+            merged = mergeOverlappingPairedFragments(secondRead, firstRead);
+        } else {
+            merged = mergeOverlappingPairedFragments(firstRead, secondRead);
+        }
+
+        return merged == null ? overlappingPair : Collections.singletonList(merged);
+    }
+
+    /**
+     * Merge two overlapping reads from the same fragment into a single super read, if possible
+     *
+     * firstRead and secondRead must be part of the same fragment (though this isn't checked).  Looks
+     * at the bases and alignment, and tries its best to create a meaningful synthetic single super read
+     * that represents the entire sequenced fragment.
+     *
+     * Assumes that firstRead starts before secondRead (according to their soft clipped starts)
+     *
+     * @param unclippedFirstRead the left most read
+     * @param unclippedSecondRead the right most read
+     *
+     * @return a strandless merged read of first and second, or null if the algorithm cannot create a meaningful one
+     */
+    public static GATKSAMRecord mergeOverlappingPairedFragments(final GATKSAMRecord unclippedFirstRead, final GATKSAMRecord unclippedSecondRead) {
+        if ( unclippedFirstRead == null ) throw new IllegalArgumentException("unclippedFirstRead cannot be null");
+        if ( unclippedSecondRead == null ) throw new IllegalArgumentException("unclippedSecondRead cannot be null");
+        if ( ! unclippedFirstRead.getReadName().equals(unclippedSecondRead.getReadName()) ) throw new IllegalArgumentException("attempting to merge two reads with different names " + unclippedFirstRead + " and " + unclippedSecondRead);
+
+        if( unclippedFirstRead.getCigarString().contains("I") || unclippedFirstRead.getCigarString().contains("D") || unclippedSecondRead.getCigarString().contains("I") || unclippedSecondRead.getCigarString().contains("D") ) {
+            return null; // fragments contain indels so don't merge them
+        }
+
+        final GATKSAMRecord firstRead = ReadClipper.hardClipAdaptorSequence(ReadClipper.revertSoftClippedBases(unclippedFirstRead));
+        final GATKSAMRecord secondRead = ReadClipper.hardClipAdaptorSequence(ReadClipper.revertSoftClippedBases(unclippedSecondRead));
+
+        if( !(secondRead.getSoftStart() <= firstRead.getSoftEnd() && secondRead.getSoftStart() >= firstRead.getSoftStart() && secondRead.getSoftEnd() >= firstRead.getSoftEnd()) ) {
+            return null; // can't merge them, yet:  AAAAAAAAAAA-BBBBBBBBBBB-AAAAAAAAAAAAAA, B is contained entirely inside A
+        }
+
+        final Pair<Integer, Boolean> pair = ReadUtils.getReadCoordinateForReferenceCoordinate(firstRead, secondRead.getAlignmentStart());
+
+        final int firstReadStop = ( pair.getSecond() ? pair.getFirst() + 1 : pair.getFirst() );
+        final int numBases = firstReadStop + secondRead.getReadLength();
+        final byte[] bases = new byte[numBases];
+        final byte[] quals = new byte[numBases];
+        final byte[] insertionQuals = new byte[numBases];
+        final byte[] deletionQuals = new byte[numBases];
+        final byte[] firstReadBases = firstRead.getReadBases();
+        final byte[] firstReadQuals = firstRead.getBaseQualities();
+        final byte[] secondReadBases = secondRead.getReadBases();
+        final byte[] secondReadQuals = secondRead.getBaseQualities();
+
+        for(int iii = 0; iii < firstReadStop; iii++) {
+            bases[iii] = firstReadBases[iii];
+            quals[iii] = firstReadQuals[iii];
+        }
+        for(int iii = firstReadStop; iii < firstRead.getReadLength(); iii++) {
+            if( firstReadQuals[iii] > MIN_QUAL_BAD_OVERLAP && secondReadQuals[iii-firstReadStop] > MIN_QUAL_BAD_OVERLAP && firstReadBases[iii] != secondReadBases[iii-firstReadStop] ) {
+                return null; // high qual bases don't match exactly, probably indel in only one of the fragments, so don't merge them
+            }
+            if( firstReadQuals[iii] < MIN_QUAL_BAD_OVERLAP && secondReadQuals[iii-firstReadStop] < MIN_QUAL_BAD_OVERLAP ) {
+                return null; // both reads have low qual bases in the overlap region so don't merge them because don't know what is going on
+            }
+            bases[iii] = ( firstReadQuals[iii] > secondReadQuals[iii-firstReadStop] ? firstReadBases[iii] : secondReadBases[iii-firstReadStop] );
+            quals[iii] = ( firstReadQuals[iii] > secondReadQuals[iii-firstReadStop] ? firstReadQuals[iii] : secondReadQuals[iii-firstReadStop] );
+        }
+        for(int iii = firstRead.getReadLength(); iii < numBases; iii++) {
+            bases[iii] = secondReadBases[iii-firstReadStop];
+            quals[iii] = secondReadQuals[iii-firstReadStop];
+        }
+
+        final GATKSAMRecord returnRead = new GATKSAMRecord( firstRead.getHeader() );
+        returnRead.setIsStrandless(true);
+        returnRead.setAlignmentStart( firstRead.getAlignmentStart() );
+        returnRead.setReadBases( bases );
+        returnRead.setBaseQualities( quals );
+        returnRead.setReadGroup( firstRead.getReadGroup() );
+        returnRead.setReferenceName( firstRead.getReferenceName() );
+        returnRead.setReadName( firstRead.getReadName() );
+        final CigarElement c = new CigarElement(bases.length, CigarOperator.M);
+        final ArrayList<CigarElement> cList = new ArrayList<CigarElement>();
+        cList.add(c);
+        returnRead.setCigar( new Cigar( cList ));
+        returnRead.setMappingQuality( firstRead.getMappingQuality() );
+
+        if( firstRead.hasBaseIndelQualities() || secondRead.hasBaseIndelQualities() ) {
+            final byte[] firstReadInsertionQuals = firstRead.getBaseInsertionQualities();
+            final byte[] firstReadDeletionQuals = firstRead.getBaseDeletionQualities();
+            final byte[] secondReadInsertionQuals = secondRead.getBaseInsertionQualities();
+            final byte[] secondReadDeletionQuals = secondRead.getBaseDeletionQualities();
+            for(int iii = 0; iii < firstReadStop; iii++) {
+                insertionQuals[iii] = firstReadInsertionQuals[iii];
+                deletionQuals[iii] = firstReadDeletionQuals[iii];
+            }
+            for(int iii = firstReadStop; iii < firstRead.getReadLength(); iii++) {
+                insertionQuals[iii] = ( firstReadQuals[iii] > secondReadQuals[iii-firstReadStop] ? firstReadInsertionQuals[iii] : secondReadInsertionQuals[iii-firstReadStop] ); // Purposefully checking the highest *base* quality score
+                deletionQuals[iii] = ( firstReadQuals[iii] > secondReadQuals[iii-firstReadStop] ? firstReadDeletionQuals[iii] : secondReadDeletionQuals[iii-firstReadStop] ); // Purposefully checking the highest *base* quality score
+            }
+            for(int iii = firstRead.getReadLength(); iii < numBases; iii++) {
+                insertionQuals[iii] = secondReadInsertionQuals[iii-firstReadStop];
+                deletionQuals[iii] = secondReadDeletionQuals[iii-firstReadStop];
+            }
+            returnRead.setBaseQualities( insertionQuals, EventType.BASE_INSERTION );
+            returnRead.setBaseQualities( deletionQuals, EventType.BASE_DELETION );
+        }
+
+        return returnRead;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/AlleleList.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/AlleleList.java
new file mode 100644
index 0000000..3d6c581
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/AlleleList.java
@@ -0,0 +1,41 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.genotyper;
+
+import htsjdk.variant.variantcontext.Allele;
+
+/**
+ * Created by valentin on 5/12/14.
+ */
+public interface AlleleList<A extends Allele> {
+
+    public int alleleCount();
+
+    public int alleleIndex(final A allele);
+
+    public A alleleAt(final int index);
+
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/AlleleListPermutation.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/AlleleListPermutation.java
new file mode 100644
index 0000000..a9423fa
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/AlleleListPermutation.java
@@ -0,0 +1,35 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.genotyper;
+
+import htsjdk.variant.variantcontext.Allele;
+import org.broadinstitute.gatk.utils.collections.Permutation;
+
+/**
+ * Marks allele list permutation implementation classes.
+ */
+public interface AlleleListPermutation<A extends Allele> extends Permutation<A>, AlleleList<A> {
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/AlleleListUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/AlleleListUtils.java
new file mode 100644
index 0000000..249e270
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/AlleleListUtils.java
@@ -0,0 +1,334 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.genotyper;
+
+import htsjdk.variant.variantcontext.Allele;
+
+import java.util.AbstractList;
+import java.util.List;
+
+/**
+ * Utils operations on {@link AlleleList} instances.
+ *
+ * @author Valentin Ruano-Rubio <valentin at broadinstitute.org>
+ */
+public class AlleleListUtils {
+
+    @SuppressWarnings("unchecked")
+    private static final AlleleList EMPTY_LIST = new AlleleList() {
+        @Override
+        public int alleleCount() {
+            return 0;
+        }
+
+        @Override
+        public int alleleIndex(final Allele allele) {
+            return -1;
+        }
+
+        @Override
+        public Allele alleleAt(final int index) {
+            throw new IllegalArgumentException("allele index is out of range");
+        }
+    };
+
+    /**
+     * Checks whether two allele lists are in fact the same.
+     * @param first one list to compare.
+     * @param second another list to compare.
+     *
+     * @throws IllegalArgumentException if if either list is {@code null}.
+     *
+     * @return {@code true} iff both list are equal.
+     */
+    public static <A extends Allele> boolean equals(final AlleleList<A> first, final AlleleList<A> second) {
+        if (first == null || second == null)
+            throw new IllegalArgumentException("no null list allowed");
+        final int alleleCount = first.alleleCount();
+        if (alleleCount != second.alleleCount())
+            return false;
+
+        for (int i = 0; i < alleleCount; i++) {
+            final A firstSample = first.alleleAt(i);
+            if (firstSample == null)
+                throw new IllegalStateException("no null samples allowed in sample-lists: first list at " + i);
+            final A secondSample = second.alleleAt(i);
+            if (secondSample == null)
+                throw new IllegalArgumentException("no null samples allowed in sample-list: second list at " + i);
+            if (!firstSample.equals(secondSample))
+                return false;
+        }
+
+        return true;
+    }
+
+    /**
+     * Resolves the index of the reference allele in an allele-list.
+     *
+     * <p>
+     *     If there is no reference allele, it returns -1. If there is more than one reference allele,
+     *     it returns the first occurrence (lowest index).
+     * </p>
+     *
+     * @param list the search allele-list.
+     * @param <A> allele component type.
+     *
+     * @throws IllegalArgumentException if {@code list} is {@code null}.
+     *
+     * @return -1 if there is no reference allele, or a values in [0,{@code list.alleleCount()}).
+     */
+    public static <A extends Allele> int indexOfReference(final AlleleList<A> list) {
+        if (list == null)
+            throw new IllegalArgumentException("the input list cannot be null");
+        final int alleleCount = list.alleleCount();
+        for (int i = 0; i < alleleCount; i++)
+            if (list.alleleAt(i).isReference())
+                return i;
+        return -1;
+    }
+
+
+    /**
+     * Returns a {@link java.util.List} unmodifiable view of a allele-list
+     * @param list the sample-list to wrap.
+     *
+     * @throws IllegalArgumentException if {@code list} is {@code null}.
+     *
+     * @return never {@code null}.
+     */
+    public static <A extends Allele> List<A> asList(final AlleleList<A> list) {
+        if (list == null)
+            throw new IllegalArgumentException("the list cannot be null");
+        return new AsList(list);
+    }
+
+    /**
+     * Returns an unmodifiable empty allele-list.
+     * @param <A> the allele class.
+     * @return never {@code null}.
+     */
+    @SuppressWarnings("unchecked")
+    public static final <A extends Allele> AlleleList<A> emptyList() {
+        return EMPTY_LIST;
+    }
+
+    /**
+     * Simple list view of a sample-list.
+     */
+    private static class AsList<A extends Allele> extends AbstractList<A> {
+
+        private final AlleleList<A> list;
+
+        private AsList(final AlleleList<A> list) {
+            this.list = list;
+
+        }
+
+        @Override
+        public A get(int index) {
+            return list.alleleAt(index);
+        }
+
+        @Override
+        public int size() {
+            return list.alleleCount();
+        }
+    }
+
+
+    /**
+     * Returns a permutation between two allele lists.
+     * @param original the original allele list.
+     * @param target the target allele list.
+     * @param <A> the allele type.
+     *
+     * @throws IllegalArgumentException if {@code original} or {@code target} is {@code null}, or
+     * elements in {@code target} is not contained in {@code original}
+     *
+     * @return never {@code null}
+     */
+    public static <A extends Allele> AlleleListPermutation<A> permutation(final AlleleList<A> original, final AlleleList<A> target) {
+        if (equals(original,target))
+            return new NonPermutation<>(original);
+        else
+            return new ActualPermutation<>(original,target);
+    }
+
+    private static class NonPermutation<A extends Allele> implements AlleleListPermutation<A> {
+
+        private final AlleleList<A> list;
+
+        public NonPermutation(final AlleleList<A> original) {
+            list = original;
+        }
+
+        @Override
+        public boolean isPartial() {
+            return false;
+        }
+
+        @Override
+        public boolean isNonPermuted() {
+            return true;
+        }
+
+        @Override
+        public int toIndex(int fromIndex) {
+            return fromIndex;
+        }
+
+        @Override
+        public int fromIndex(int toIndex) {
+            return toIndex;
+        }
+
+        @Override
+        public int fromSize() {
+            return list.alleleCount();
+        }
+
+        @Override
+        public int toSize() {
+            return list.alleleCount();
+        }
+
+        @Override
+        public List<A> fromList() {
+            return asList(list);
+        }
+
+        @Override
+        public java.util.List<A> toList() {
+            return asList(list);
+        }
+
+
+        @Override
+        public int alleleCount() {
+            return list.alleleCount();
+        }
+
+        @Override
+        public int alleleIndex(final A allele) {
+            return list.alleleIndex(allele);
+        }
+
+        @Override
+        public A alleleAt(final int index) {
+            return list.alleleAt(index);
+        }
+    }
+
+    private static class ActualPermutation<A extends Allele> implements AlleleListPermutation<A> {
+
+        private final AlleleList<A> from;
+
+        private final AlleleList<A> to;
+
+        private final int[] fromIndex;
+
+        private final boolean nonPermuted;
+
+        private final boolean isPartial;
+
+        private ActualPermutation(final AlleleList<A> original, final AlleleList<A> target) {
+            this.from = original;
+            this.to = target;
+            final int toSize = target.alleleCount();
+            final int fromSize = original.alleleCount();
+            if (fromSize < toSize)
+                throw new IllegalArgumentException("target allele list is not a permutation of the original allele list");
+
+            fromIndex = new int[toSize];
+            boolean nonPermuted = fromSize == toSize;
+            this.isPartial = !nonPermuted;
+            for (int i = 0; i < toSize; i++) {
+                final int originalIndex = original.alleleIndex(target.alleleAt(i));
+                if (originalIndex < 0)
+                    throw new IllegalArgumentException("target allele list is not a permutation of the original allele list");
+                fromIndex[i] = originalIndex;
+                nonPermuted &= originalIndex == i;
+            }
+
+            this.nonPermuted = nonPermuted;
+        }
+
+        @Override
+        public boolean isPartial() {
+            return isPartial;
+        }
+
+        @Override
+        public boolean isNonPermuted() {
+            return nonPermuted;
+        }
+
+        @Override
+        public int toIndex(int fromIndex) {
+            return to.alleleIndex(from.alleleAt(fromIndex));
+        }
+
+        @Override
+        public int fromIndex(int toIndex) {
+            return fromIndex[toIndex];
+        }
+
+        @Override
+        public int fromSize() {
+            return from.alleleCount();
+        }
+
+        @Override
+        public int toSize() {
+            return to.alleleCount();
+        }
+
+        @Override
+        public List<A> fromList() {
+            return asList(from);
+        }
+
+        @Override
+        public List<A> toList() {
+            return asList(to);
+        }
+
+        @Override
+        public int alleleCount() {
+            return to.alleleCount();
+        }
+
+        @Override
+        public int alleleIndex(final A allele) {
+            return to.alleleIndex(allele);
+        }
+
+        @Override
+        public A alleleAt(final int index) {
+            return to.alleleAt(index);
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/DiploidGenotype.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/DiploidGenotype.java
new file mode 100644
index 0000000..0a57040
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/DiploidGenotype.java
@@ -0,0 +1,125 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.genotyper;
+
+import org.broadinstitute.gatk.utils.BaseUtils;
+
+public enum DiploidGenotype {
+    AA ('A', 'A'),
+    AC ('A', 'C'),
+    CC ('C', 'C'),
+    AG ('A', 'G'),
+    CG ('C', 'G'),
+    GG ('G', 'G'),
+    AT ('A', 'T'),
+    CT ('C', 'T'),
+    GT ('G', 'T'),
+    TT ('T', 'T');
+
+    public byte base1, base2;
+
+    @Deprecated
+    private DiploidGenotype(char base1, char base2) {
+        this((byte)base1, (byte)base2);
+    }
+
+    private DiploidGenotype(byte base1, byte base2) {
+        this.base1 = base1;
+        this.base2 = base2;
+    }
+
+    public boolean isHomRef(byte r) {
+        return isHom() && r == base1;
+    }
+
+    public boolean isHomVar(byte r) {
+        return isHom() && r != base1;
+    }
+
+    public boolean isHetRef(byte r) {
+        if ( base1 == r )
+            return r != base2;
+        else
+            return base2 == r;
+    }
+
+    public boolean isHom() {
+        return ! isHet();
+    }
+
+    public boolean isHet() {
+        return base1 != base2;
+    }
+
+    /**
+     * create a diploid genotype, given a character to make into a hom genotype
+     * @param hom the character to turn into a hom genotype, i.e. if it is A, then returned will be AA
+     * @return the diploid genotype
+     */
+    public static DiploidGenotype createHomGenotype(byte hom) {
+        int index = BaseUtils.simpleBaseToBaseIndex(hom);
+        if ( index == -1 )
+            throw new IllegalArgumentException(hom + " is not a valid base character");
+        return conversionMatrix[index][index];
+    }
+
+    /**
+     * create a diploid genotype, given 2 chars which may not necessarily be ordered correctly
+     * @param base1 base1
+     * @param base2 base2
+     * @return the diploid genotype
+     */
+    public static DiploidGenotype createDiploidGenotype(byte base1, byte base2) {
+        int index1 = BaseUtils.simpleBaseToBaseIndex(base1);
+        if ( index1 == -1 )
+            throw new IllegalArgumentException(base1 + " is not a valid base character");
+        int index2 = BaseUtils.simpleBaseToBaseIndex(base2);
+        if ( index2 == -1 )
+            throw new IllegalArgumentException(base2 + " is not a valid base character");
+        return conversionMatrix[index1][index2];
+    }
+
+    /**
+     * create a diploid genotype, given 2 base indexes which may not necessarily be ordered correctly
+     * @param baseIndex1 base1
+     * @param baseIndex2 base2
+     * @return the diploid genotype
+     */
+    public static DiploidGenotype createDiploidGenotype(int baseIndex1, int baseIndex2) {
+        if ( baseIndex1 == -1 )
+            throw new IllegalArgumentException(baseIndex1 + " does not represent a valid base character");
+        if ( baseIndex2 == -1 )
+            throw new IllegalArgumentException(baseIndex2 + " does not represent a valid base character");
+        return conversionMatrix[baseIndex1][baseIndex2];
+    }
+
+    private static final DiploidGenotype[][] conversionMatrix = {
+            { DiploidGenotype.AA, DiploidGenotype.AC, DiploidGenotype.AG, DiploidGenotype.AT },
+            { DiploidGenotype.AC, DiploidGenotype.CC, DiploidGenotype.CG, DiploidGenotype.CT },
+            { DiploidGenotype.AG, DiploidGenotype.CG, DiploidGenotype.GG, DiploidGenotype.GT },
+            { DiploidGenotype.AT, DiploidGenotype.CT, DiploidGenotype.GT, DiploidGenotype.TT }
+    };
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/IndexedAlleleList.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/IndexedAlleleList.java
new file mode 100644
index 0000000..8c3f048
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/IndexedAlleleList.java
@@ -0,0 +1,95 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.genotyper;
+
+import htsjdk.variant.variantcontext.Allele;
+import org.broadinstitute.gatk.utils.collections.IndexedSet;
+
+import java.util.Collection;
+
+/**
+ * Allele list implementation using and indexed-set.
+ *
+ * @author Valentin Ruano-Rubio <valentin at broadinstitute.org>
+ */
+public class IndexedAlleleList<A extends Allele> implements AlleleList<A> {
+
+    private final IndexedSet<A> alleles;
+
+    /**
+     * Constructs a new empty allele-list
+     */
+    public IndexedAlleleList() {
+        alleles = new IndexedSet<>();
+    }
+
+    /**
+     * Constructs a new allele-list from an array of alleles.
+     *
+     * <p>
+     *     Repeats in the input array will be ignored (keeping the first one). The order of alleles in the
+     *     resulting list is the same as in the natural traversal of the input collection.
+     *
+     * </p>
+     * @param alleles the original allele array
+     *
+     * @throws java.lang.IllegalArgumentException if {@code alleles} is {@code null} or contains {@code null}s.
+     */
+    public IndexedAlleleList(final A ... alleles) {
+        this.alleles = new IndexedSet<>(alleles);
+    }
+
+    /**
+     * Constructs a new allele-list from a collection of alleles.
+     *
+     * <p>
+     *     Repeats in the input collection will be ignored (keeping the first one). The order of alleles in the
+     *     resulting list is the same as in the natural traversal of the input collection.
+     *
+     * </p>
+     * @param alleles the original allele collection
+     *
+     * @throws java.lang.IllegalArgumentException if {@code alleles} is {@code null} or contains {@code null}s.
+     */
+    public IndexedAlleleList(final Collection<A> alleles) {
+        this.alleles = new IndexedSet<>(alleles);
+    }
+
+    @Override
+    public int alleleCount() {
+        return alleles.size();
+    }
+
+    @Override
+    public int alleleIndex(final A allele) {
+        return alleles.indexOf(allele);
+    }
+
+    @Override
+    public A alleleAt(final int index) {
+        return alleles.get(index);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/IndexedSampleList.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/IndexedSampleList.java
new file mode 100644
index 0000000..95f2559
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/IndexedSampleList.java
@@ -0,0 +1,96 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.genotyper;
+
+import org.broadinstitute.gatk.utils.collections.IndexedSet;
+
+import java.util.Collection;
+
+/**
+ * Simple implementation of a sample-list using and indexed-set.
+ *
+ * @author Valentin Ruano-Rubio <valentin at broadinstitute.org>
+ */
+public class IndexedSampleList implements SampleList {
+
+    private final IndexedSet<String> samples;
+
+    /**
+     * Constructs an empty sample-list.
+     */
+    public IndexedSampleList() {
+        samples = new IndexedSet<>(0);
+    }
+
+    /**
+     * Constructs a sample-list from a collection of samples.
+     *
+     * <p>
+     *     Repeats in the input collection are ignored (just the first occurrence is kept).
+     *     Sample names will be sorted based on the traversal order
+     *     of the original collection.
+     * </p>
+     *
+     * @param samples input sample collection.
+     *
+     * @throws IllegalArgumentException if {@code samples} is {@code null} or it contains {@code nulls}.
+     */
+    public IndexedSampleList(final Collection<String> samples) {
+        this.samples = new IndexedSet<>(samples);
+    }
+
+    /**
+     * Constructs a sample-list from an array of samples.
+     *
+     * <p>
+     *     Repeats in the input array are ignored (just the first occurrence is kept).
+     *     Sample names will be sorted based on the traversal order
+     *     of the original array.
+     * </p>
+     *
+     * @param samples input sample array.
+     *
+     * @throws IllegalArgumentException if {@code samples} is {@code null} or it contains {@code nulls}.
+     */
+    public IndexedSampleList(final String ... samples) {
+        this.samples = new IndexedSet<>(samples);
+    }
+
+    @Override
+    public int sampleCount() {
+        return samples.size();
+    }
+
+    @Override
+    public int sampleIndex(final String sample) {
+        return samples.indexOf(sample);
+    }
+
+    @Override
+    public String sampleAt(int sampleIndex) {
+        return samples.get(sampleIndex);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/MostLikelyAllele.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/MostLikelyAllele.java
new file mode 100644
index 0000000..f0a1d0b
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/MostLikelyAllele.java
@@ -0,0 +1,134 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.genotyper;
+
+import org.broadinstitute.gatk.utils.MathUtils;
+import htsjdk.variant.variantcontext.Allele;
+
+/**
+ * Stores the most likely and second most likely alleles, along with a threshold
+ * for assuming computing that a read is informative.
+ *
+ * If the difference between the most-likely allele and the next-most-likely allele is < INFORMATIVE_LIKELIHOOD_THRESHOLD
+ * then the most likely allele is set to "no call", and isInformative will return false.  This constant can be
+ * overridden simply by using one of the version of these calls that accepts informative threshold as an argument.
+ *
+ * For convenience, there are functions called getAlleleIfInformative that return either the most likely allele, or
+ * NO_CALL if two or more alleles have likelihoods within INFORMATIVE_LIKELIHOOD_THRESHOLD of one another.
+ *
+ * By default empty allele maps will return NO_CALL, and allele maps with a single entry will return the
+ * corresponding key
+ *
+ * User: depristo
+ * Date: 3/24/13
+ * Time: 1:39 PM
+ */
+public final class MostLikelyAllele {
+    public static final double INFORMATIVE_LIKELIHOOD_THRESHOLD = 0.2;
+
+    final Allele mostLikely;
+    final Allele secondLikely;
+    final double log10LikelihoodOfMostLikely;
+    final double log10LikelihoodOfSecondBest;
+
+    /**
+     * Create a new MostLikelyAllele
+     *
+     * If there's a meaningful most likely allele, allele should be a real allele.  If none can be determined,
+     * mostLikely should be a NO_CALL allele.
+     *
+     * @param mostLikely the most likely allele
+     * @param secondMostLikely the most likely allele after mostLikely
+     * @param log10LikelihoodOfMostLikely the log10 likelihood of the most likely allele
+     * @param log10LikelihoodOfSecondBest the log10 likelihood of the next most likely allele (should be NEGATIVE_INFINITY if none is available)
+     */
+    public MostLikelyAllele(final Allele mostLikely, final Allele secondMostLikely, double log10LikelihoodOfMostLikely, double log10LikelihoodOfSecondBest) {
+        if ( mostLikely == null ) throw new IllegalArgumentException("mostLikely allele cannot be null");
+        if ( log10LikelihoodOfMostLikely != Double.NEGATIVE_INFINITY && ! MathUtils.goodLog10Probability(log10LikelihoodOfMostLikely) )
+            throw new IllegalArgumentException("log10LikelihoodOfMostLikely must be either -Infinity or a good log10 prob but got " + log10LikelihoodOfMostLikely);
+        if ( log10LikelihoodOfSecondBest != Double.NEGATIVE_INFINITY && ! MathUtils.goodLog10Probability(log10LikelihoodOfSecondBest) )
+            throw new IllegalArgumentException("log10LikelihoodOfSecondBest must be either -Infinity or a good log10 prob but got " + log10LikelihoodOfSecondBest);
+        if ( log10LikelihoodOfMostLikely < log10LikelihoodOfSecondBest )
+            throw new IllegalArgumentException("log10LikelihoodOfMostLikely must be <= log10LikelihoodOfSecondBest but got " + log10LikelihoodOfMostLikely + " vs 2nd " + log10LikelihoodOfSecondBest);
+
+        this.mostLikely = mostLikely;
+        this.secondLikely = secondMostLikely;
+        this.log10LikelihoodOfMostLikely = log10LikelihoodOfMostLikely;
+        this.log10LikelihoodOfSecondBest = log10LikelihoodOfSecondBest;
+    }
+
+    public Allele getMostLikelyAllele() {
+        return mostLikely;
+    }
+
+    public Allele getSecondMostLikelyAllele() {
+        return secondLikely;
+    }
+
+    public double getLog10LikelihoodOfMostLikely() {
+        return log10LikelihoodOfMostLikely;
+    }
+
+    public double getLog10LikelihoodOfSecondBest() {
+        return log10LikelihoodOfSecondBest;
+    }
+
+    /**
+     * @see #isInformative(double) with threshold of INFORMATIVE_LIKELIHOOD_THRESHOLD
+     */
+    public boolean isInformative() {
+        return isInformative(INFORMATIVE_LIKELIHOOD_THRESHOLD);
+    }
+
+    /**
+     * Was this allele selected from an object that was specifically informative about the allele?
+     *
+     * The calculation that implements this is whether the likelihood of the most likely allele is larger
+     * than the second most likely by at least the log10ThresholdForInformative
+     *
+     * @return true if so, false if not
+     */
+    public boolean isInformative(final double log10ThresholdForInformative) {
+        return getLog10LikelihoodOfMostLikely() - getLog10LikelihoodOfSecondBest() > log10ThresholdForInformative;
+    }
+
+    /**
+     * @see #getAlleleIfInformative(double) with threshold of INFORMATIVE_LIKELIHOOD_THRESHOLD
+     */
+    public Allele getAlleleIfInformative() {
+        return getAlleleIfInformative(INFORMATIVE_LIKELIHOOD_THRESHOLD);
+    }
+
+    /**
+     * Get the most likely allele if isInformative(log10ThresholdForInformative) is true, or NO_CALL otherwise
+     *
+     * @param log10ThresholdForInformative a log10 threshold to determine if the most likely allele was informative
+     * @return a non-null allele
+     */
+    public Allele getAlleleIfInformative(final double log10ThresholdForInformative) {
+        return isInformative(log10ThresholdForInformative) ? getMostLikelyAllele() : Allele.NO_CALL;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/PerReadAlleleLikelihoodMap.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/PerReadAlleleLikelihoodMap.java
new file mode 100644
index 0000000..c52278e
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/PerReadAlleleLikelihoodMap.java
@@ -0,0 +1,417 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.genotyper;
+
+
+import com.google.java.contract.Ensures;
+import org.broadinstitute.gatk.utils.downsampling.AlleleBiasedDownsamplingUtils;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.MathUtils;
+import org.broadinstitute.gatk.utils.haplotype.Haplotype;
+import org.broadinstitute.gatk.utils.pileup.PileupElement;
+import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
+import org.broadinstitute.gatk.utils.sam.AlignmentUtils;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import htsjdk.variant.variantcontext.Allele;
+
+import java.util.*;
+
+/**
+ *   Wrapper class that holds a set of maps of the form (Read -> Map(Allele->Double))
+ *   For each read, this holds underlying alleles represented by an aligned read, and corresponding relative likelihood.
+ */
+public class PerReadAlleleLikelihoodMap {
+    /** A set of all of the allele, so we can efficiently determine if an allele is already present */
+    private final Map<Allele,Integer> allelesSet = new HashMap<>();
+    /** A list of the unique allele, as an ArrayList so we can call get(i) efficiently */
+    protected final List<Allele> alleles = new ArrayList<>();
+
+
+
+    protected final Map<GATKSAMRecord, Map<Allele, Double>> likelihoodReadMap = new LinkedHashMap<>();
+
+    public PerReadAlleleLikelihoodMap() { }
+
+    /**
+     * Add a new entry into the Read -> ( Allele -> Likelihood ) map of maps.
+     * @param read - the GATKSAMRecord that was evaluated
+     * @param a - the Allele against which the GATKSAMRecord was evaluated
+     * @param likelihood - the likelihood score resulting from the evaluation of "read" against "a"
+     */
+    public void add(final GATKSAMRecord read, final Allele a, final Double likelihood) {
+        if ( read == null ) throw new IllegalArgumentException("Cannot add a null read to the allele likelihood map");
+        if ( a == null ) throw new IllegalArgumentException("Cannot add a null allele to the allele likelihood map");
+        if ( likelihood == null ) throw new IllegalArgumentException("Likelihood cannot be null");
+        if ( likelihood > 0.0 ) throw new IllegalArgumentException("Likelihood must be negative (L = log(p))");
+
+        if (!allelesSet.containsKey(a)) {
+            allelesSet.put(a,alleles.size());
+            alleles.add(a);
+        }
+        Map<Allele,Double> likelihoodMap = likelihoodReadMap.get(read);
+        if (likelihoodMap == null){
+            // LinkedHashMap will ensure iterating through alleles will be in consistent order
+            likelihoodMap = new LinkedHashMap<>();
+            likelihoodReadMap.put(read,likelihoodMap);
+        }
+
+        likelihoodMap.put(a,likelihood);
+
+
+    }
+
+    public ReadBackedPileup createPerAlleleDownsampledBasePileup(final ReadBackedPileup pileup, final double downsamplingFraction) {
+        return AlleleBiasedDownsamplingUtils.createAlleleBiasedBasePileup(pileup, downsamplingFraction);
+    }
+
+    /**
+     * For each allele "a" , identify those reads whose most likely allele is "a", and remove a "downsamplingFraction" proportion
+     * of those reads from the "likelihoodReadMap". This is used for e.g. sample contamination
+     * @param downsamplingFraction - the fraction of supporting reads to remove from each allele. If <=0 all reads kept, if >=1 all reads tossed.
+     */
+    public void performPerAlleleDownsampling(final double downsamplingFraction) {
+        // special case removal of all or no reads
+        if ( downsamplingFraction <= 0.0 )
+            return;
+        if ( downsamplingFraction >= 1.0 ) {
+            likelihoodReadMap.clear();
+            return;
+        }
+
+        // start by stratifying the reads by the alleles they represent at this position
+        final Map<Allele, List<GATKSAMRecord>> alleleReadMap = getAlleleStratifiedReadMap();
+
+        // compute the reads to remove and actually remove them
+        final List<GATKSAMRecord> readsToRemove = AlleleBiasedDownsamplingUtils.selectAlleleBiasedReads(alleleReadMap, downsamplingFraction);
+        for ( final GATKSAMRecord read : readsToRemove )
+            likelihoodReadMap.remove(read);
+    }
+
+    /**
+     * Convert the @likelihoodReadMap to a map of alleles to reads, where each read is mapped uniquely to the allele
+     * for which it has the greatest associated likelihood
+     * @return a map from each allele to a list of reads that 'support' the allele
+     */
+    protected Map<Allele,List<GATKSAMRecord>> getAlleleStratifiedReadMap() {
+        final Map<Allele, List<GATKSAMRecord>> alleleReadMap = new HashMap<>(alleles.size());
+        for ( final Allele allele : alleles )
+            alleleReadMap.put(allele, new ArrayList<GATKSAMRecord>());
+
+        for ( final Map.Entry<GATKSAMRecord, Map<Allele, Double>> entry : likelihoodReadMap.entrySet() ) {
+            final MostLikelyAllele bestAllele = getMostLikelyAllele(entry.getValue());
+            if ( bestAllele.isInformative() )
+                alleleReadMap.get(bestAllele.getMostLikelyAllele()).add(entry.getKey());
+        }
+
+        return alleleReadMap;
+    }
+
+    @Ensures("result >=0")
+    public int size() {
+        return likelihoodReadMap.size();
+    }
+
+    /**
+     * Helper function to add the read underneath a pileup element to the map
+     * @param p                              Pileup element
+     * @param a                              Corresponding allele
+     * @param likelihood                     Allele likelihood
+     */
+    public void add(PileupElement p, Allele a, Double likelihood) {
+        if (p==null)
+            throw new IllegalArgumentException("Pileup element cannot be null");
+        if ( p.getRead()==null )
+           throw new IllegalArgumentException("Read underlying pileup element cannot be null");
+        if ( a == null )
+           throw new IllegalArgumentException("Allele for add() cannot be null");
+
+        add(p.getRead(), a, likelihood);
+    }
+
+     /**
+     * Does the current map contain the key associated with a particular SAM record in pileup?
+     * @param p                 Pileup element
+     * @return true if the map contains pileup element, else false
+     */
+    public boolean containsPileupElement(final PileupElement p) {
+        return likelihoodReadMap.containsKey(p.getRead());
+    }
+
+    public boolean isEmpty() {
+        return likelihoodReadMap.isEmpty();
+    }
+
+    public Map<GATKSAMRecord,Map<Allele,Double>> getLikelihoodReadMap() {
+        return likelihoodReadMap;
+    }
+
+    public void clear() {
+        allelesSet.clear();
+        alleles.clear();
+        likelihoodReadMap.clear();
+    }
+
+    public Set<GATKSAMRecord> getStoredElements() {
+        return likelihoodReadMap.keySet();
+    }
+
+//    public Collection<Map<Allele,Double>> getLikelihoodMapValues() {
+//        return likelihoodReadMap.values();
+//    }
+
+    public int getNumberOfStoredElements() {
+        return likelihoodReadMap.size();
+    }
+
+    public Map<Allele,Double> getLikelihoodsAssociatedWithPileupElement(final PileupElement p) {
+        if (!likelihoodReadMap.containsKey(p.getRead()))
+            return null;
+
+        return likelihoodReadMap.get(p.getRead());
+    }
+
+
+    /**
+     * Get the log10 likelihood associated with an individual read/allele
+     *
+     * @param read the read whose likelihood we want
+     * @param allele the allele whose likelihood we want
+     * @return the log10 likelihood that this read matches this allele
+     */
+    public double getLikelihoodAssociatedWithReadAndAllele(final GATKSAMRecord read, final Allele allele){
+        if (!allelesSet.containsKey(allele) || !likelihoodReadMap.containsKey(read))
+            return 0.0;
+
+        return likelihoodReadMap.get(read).get(allele);
+    }
+
+    /**
+     * Get the most likely alleles estimated across all reads in this object
+     *
+     * Takes the most likely two alleles according to their diploid genotype likelihoods.  That is, for
+     * each allele i and j we compute p(D | i,j) where D is the read likelihoods.  We track the maximum
+     * i,j likelihood and return an object that contains the alleles i and j as well as the max likelihood.
+     *
+     * Note that the second most likely diploid genotype is not tracked so the resulting MostLikelyAllele
+     * doesn't have a meaningful get best likelihood.
+     *
+     * @return a MostLikelyAllele object, or null if this map is empty
+     */
+    public MostLikelyAllele getMostLikelyDiploidAlleles() {
+        if ( isEmpty() ) return null;
+
+        int hap1 = 0;
+        int hap2 = 0;
+        double maxElement = Double.NEGATIVE_INFINITY;
+        for( int iii = 0; iii < alleles.size(); iii++ ) {
+            final Allele iii_allele = alleles.get(iii);
+            for( int jjj = 0; jjj <= iii; jjj++ ) {
+                final Allele jjj_allele = alleles.get(jjj);
+
+                double haplotypeLikelihood = 0.0;
+                for( final Map.Entry<GATKSAMRecord, Map<Allele,Double>> entry : likelihoodReadMap.entrySet() ) {
+                    // Compute log10(10^x1/2 + 10^x2/2) = log10(10^x1+10^x2)-log10(2)
+                    final double likelihood_iii = entry.getValue().get(iii_allele);
+                    final double likelihood_jjj = entry.getValue().get(jjj_allele);
+                    haplotypeLikelihood += MathUtils.approximateLog10SumLog10(likelihood_iii, likelihood_jjj) + MathUtils.LOG_ONE_HALF;
+
+                    // fast exit.  If this diploid pair is already worse than the max, just stop and look at the next pair
+                    if ( haplotypeLikelihood < maxElement ) break;
+                }
+
+                // keep track of the max element and associated indices
+                if ( haplotypeLikelihood > maxElement ) {
+                    hap1 = iii;
+                    hap2 = jjj;
+                    maxElement = haplotypeLikelihood;
+                }
+            }
+        }
+
+        if ( maxElement == Double.NEGATIVE_INFINITY )
+            throw new IllegalStateException("max likelihood is " + maxElement + " indicating something has gone wrong");
+
+        return new MostLikelyAllele(alleles.get(hap1), alleles.get(hap2), maxElement, maxElement);
+    }
+
+    /**
+     * Given a map from alleles to likelihoods, find the allele with the largest likelihood.
+     *
+     * @param alleleMap - a map from alleles to likelihoods
+     * @return - a MostLikelyAllele object
+     */
+    @Ensures("result != null")
+    public static MostLikelyAllele getMostLikelyAllele( final Map<Allele,Double> alleleMap ) {
+        return getMostLikelyAllele(alleleMap, null);
+    }
+
+    /**
+     * Given a map from alleles to likelihoods, find the allele with the largest likelihood.
+     *
+     * @param alleleMap - a map from alleles to likelihoods
+     * @param onlyConsiderTheseAlleles if not null, we will only consider alleles in this set for being one of the best.
+     *                                 this is useful for the case where you've selected a subset of the alleles that
+     *                                 the reads have been computed for further analysis.  If null totally ignored
+     * @return - a MostLikelyAllele object
+     */
+    public static MostLikelyAllele getMostLikelyAllele( final Map<Allele,Double> alleleMap, final Set<Allele> onlyConsiderTheseAlleles ) {
+        if ( alleleMap == null ) throw new IllegalArgumentException("The allele to likelihood map cannot be null");
+        double maxLike = Double.NEGATIVE_INFINITY;
+        double prevMaxLike = Double.NEGATIVE_INFINITY;
+        Allele mostLikelyAllele = Allele.NO_CALL;
+        Allele secondMostLikely = null;
+
+        for (final Map.Entry<Allele,Double> el : alleleMap.entrySet()) {
+            if ( onlyConsiderTheseAlleles != null && ! onlyConsiderTheseAlleles.contains(el.getKey()) )
+                continue;
+
+            if (el.getValue() > maxLike) {
+                prevMaxLike = maxLike;
+                maxLike = el.getValue();
+                secondMostLikely = mostLikelyAllele;
+                mostLikelyAllele = el.getKey();
+            } else if( el.getValue() > prevMaxLike ) {
+                secondMostLikely = el.getKey();
+                prevMaxLike = el.getValue();
+            }
+        }
+
+        return new MostLikelyAllele(mostLikelyAllele, secondMostLikely, maxLike, prevMaxLike);
+    }
+
+    /**
+     * Debug method to dump contents of object into string for display
+     */
+    public String toString() {
+        final StringBuilder sb = new StringBuilder();
+
+        sb.append("Alelles in map:");
+        for (final Allele a:alleles) {
+            sb.append(a.getDisplayString()+",");
+        }
+        sb.append("\n");
+        for (final Map.Entry <GATKSAMRecord, Map<Allele, Double>> el : getLikelihoodReadMap().entrySet() ) {
+            for (final Map.Entry<Allele,Double> eli : el.getValue().entrySet()) {
+                sb.append("Read "+el.getKey().getReadName()+". Allele:"+eli.getKey().getDisplayString()+" has likelihood="+Double.toString(eli.getValue())+"\n");
+            }
+
+        }
+        return sb.toString();
+    }
+
+    /**
+     * Remove reads from this map that are poorly modelled w.r.t. their per allele likelihoods
+     *
+     * Goes through each read in this map, and if it is poorly modelled removes it from the map.
+     *
+     * @see #readIsPoorlyModelled(org.broadinstitute.gatk.utils.sam.GATKSAMRecord, java.util.Collection, double)
+     * for more information about the poorly modelled test.
+     *
+     * @param maxErrorRatePerBase see equivalent parameter in #readIsPoorlyModelled
+     * @return the list of reads removed from this map because they are poorly modelled
+     */
+    public List<GATKSAMRecord> filterPoorlyModelledReads(final double maxErrorRatePerBase) {
+        final List<GATKSAMRecord> removedReads = new LinkedList<>();
+        final Iterator<Map.Entry<GATKSAMRecord, Map<Allele, Double>>> it = likelihoodReadMap.entrySet().iterator();
+        while ( it.hasNext() ) {
+            final Map.Entry<GATKSAMRecord, Map<Allele, Double>> record = it.next();
+            if ( readIsPoorlyModelled(record.getKey(), record.getValue().values(), maxErrorRatePerBase) ) {
+                it.remove();
+                removedReads.add(record.getKey());
+            }
+        }
+
+        return removedReads;
+    }
+
+    /**
+     * Is this read poorly modelled by all of the alleles in this map?
+     *
+     * A read is poorly modeled when it's likelihood is below what would be expected for a read
+     * originating from one of the alleles given the maxErrorRatePerBase of the reads in general.
+     *
+     * This function makes a number of key assumptions.  First, that the likelihoods reflect the total likelihood
+     * of the read.  In other words, that the read would be fully explained by one of the alleles.  This means
+     * that the allele should be something like the full haplotype from which the read might originate.
+     *
+     * It further assumes that each error in the read occurs with likelihood of -3 (Q30 confidence per base).  So
+     * a read with a 10% error rate with Q30 bases that's 100 bp long we'd expect to see 10 real Q30 errors
+     * even against the true haplotype.  So for this read to be well modelled by at least one allele we'd expect
+     * a likelihood to be >= 10 * -3.
+     *
+     * @param read the read we want to evaluate
+     * @param log10Likelihoods a list of the log10 likelihoods of the read against a set of haplotypes.
+     * @param maxErrorRatePerBase the maximum error rate we'd expect for this read per base, in real space.  So
+     *                            0.01 means a 1% error rate
+     * @return true if none of the log10 likelihoods imply that the read truly originated from one of the haplotypes
+     */
+    protected boolean readIsPoorlyModelled(final GATKSAMRecord read, final Collection<Double> log10Likelihoods, final double maxErrorRatePerBase) {
+        final double maxErrorsForRead = Math.min(2.0, Math.ceil(read.getReadLength() * maxErrorRatePerBase));
+        final double log10QualPerBase = -4.0;
+        final double log10MaxLikelihoodForTrueAllele = maxErrorsForRead * log10QualPerBase;
+
+        for ( final double log10Likelihood : log10Likelihoods )
+            if ( log10Likelihood >= log10MaxLikelihoodForTrueAllele )
+                return false;
+
+        return true;
+    }
+
+    /**
+     * Get an unmodifiable set of the unique alleles in this PerReadAlleleLikelihoodMap
+     * @return a non-null unmodifiable map
+     */
+    public Set<Allele> getAllelesSet() {
+        return Collections.unmodifiableSet(allelesSet.keySet());
+    }
+
+    /**
+     * Loop over all of the reads in this likelihood map and realign them to its most likely haplotype
+     * @param haplotypes            the collection of haplotypes
+     * @param paddedReferenceLoc    the active region
+     */
+    public void realignReadsToMostLikelyHaplotype(final Collection<Haplotype> haplotypes, final GenomeLoc paddedReferenceLoc) {
+
+        // we need to remap the Alleles back to the Haplotypes; inefficient but unfortunately this is a requirement currently
+        final Map<Allele, Haplotype> alleleToHaplotypeMap = new HashMap<>(haplotypes.size());
+        Haplotype refHaplotype = null;
+        for ( final Haplotype haplotype : haplotypes ) {
+            alleleToHaplotypeMap.put(Allele.create(haplotype.getBases()), haplotype);
+            if (refHaplotype == null && haplotype.isReference())
+                refHaplotype = haplotype;
+        }
+
+        final Map<GATKSAMRecord, Map<Allele, Double>> newLikelihoodReadMap = new LinkedHashMap<>(likelihoodReadMap.size());
+        for( final Map.Entry<GATKSAMRecord, Map<Allele, Double>> entry : likelihoodReadMap.entrySet() ) {
+            final MostLikelyAllele bestAllele = PerReadAlleleLikelihoodMap.getMostLikelyAllele(entry.getValue());
+            final GATKSAMRecord alignedToRef = AlignmentUtils.createReadAlignedToRef(entry.getKey(), alleleToHaplotypeMap.get(bestAllele.getMostLikelyAllele()), refHaplotype,  paddedReferenceLoc.getStart(), bestAllele.isInformative());
+            newLikelihoodReadMap.put(alignedToRef, entry.getValue());
+        }
+
+        likelihoodReadMap.clear();
+        likelihoodReadMap.putAll(newLikelihoodReadMap);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/ReadLikelihoods.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/ReadLikelihoods.java
new file mode 100644
index 0000000..a4c8ca9
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/ReadLikelihoods.java
@@ -0,0 +1,1586 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.genotyper;
+
+import htsjdk.variant.variantcontext.Allele;
+import it.unimi.dsi.fastutil.ints.IntArrayList;
+import it.unimi.dsi.fastutil.objects.Object2IntMap;
+import it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap;
+import org.broadinstitute.gatk.utils.downsampling.AlleleBiasedDownsamplingUtils;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.broadinstitute.gatk.utils.variant.GATKVCFConstants;
+
+import java.util.*;
+
+/**
+ * Read-likelihoods container implementation based on integer indexed arrays.
+ *
+ * @param <A> the type of the allele the likelihood makes reference to.
+ *
+ * @author Valentin Ruano-Rubio <valentin at broadinstitute.org>
+ */
+public class ReadLikelihoods<A extends Allele> implements SampleList, AlleleList<A>, Cloneable {
+
+    /**
+     * Reads by sample index. Each sub array contains reference to the reads of the ith sample.
+     */
+    private GATKSAMRecord[][] readsBySampleIndex;
+
+    /**
+     * Indexed per sample, allele and finally read (within sample).
+     * <p>
+     *     valuesBySampleIndex[s][a][r] == lnLk(R_r | A_a) where R_r comes from Sample s.
+     * </p>
+     */
+    private double[][][] valuesBySampleIndex;
+
+    /**
+     * Sample list
+     */
+    private final SampleList samples;
+
+    /**
+     * Allele list
+     */
+    private AlleleList<A> alleles;
+
+    /**
+     * Cached allele list.
+     */
+    private List<A> alleleList;
+
+    /**
+     * Cached sample list.
+     */
+    private List<String> sampleList;
+
+    /**
+     * Maps from each read to its index within the sample.
+     *
+     * <p>In order to save CPU time the indices contained in this array (not the array itself) is
+     * lazily initialized by invoking {@link #readIndexBySampleIndex(int)}.</p>
+     */
+    private final Object2IntMap<GATKSAMRecord>[] readIndexBySampleIndex;
+
+    /**
+     * Index of the reference allele if any, otherwise -1
+     */
+    private int referenceAlleleIndex = -1;
+
+    /**
+     * Caches the read-list per sample list returned by {@link #sampleReads}
+     */
+    private final List<GATKSAMRecord>[] readListBySampleIndex;
+
+    /**
+     * Sample matrices lazily initialized (the elements not the array) by invoking {@link #sampleMatrix(int)}.
+     */
+    private final Matrix<A>[] sampleMatrices;
+
+    /**
+     * Constructs a new read-likelihood collection.
+     *
+     * <p>
+     *     The initial likelihoods for all allele-read combinations are
+     *     0.
+     * </p>
+     *
+     * @param samples all supported samples in the collection.
+     * @param alleles all supported alleles in the collection.
+     * @param reads reads stratified per sample.
+     *
+     * @throws IllegalArgumentException if any of {@code allele}, {@code samples}
+     * or {@code reads} is {@code null},
+     *  or if they contain null values.
+     */
+    @SuppressWarnings("unchecked")
+    public ReadLikelihoods(final SampleList samples, final AlleleList<A> alleles,
+                           final Map<String, List<GATKSAMRecord>> reads) {
+        if (alleles == null)
+            throw new IllegalArgumentException("allele list cannot be null");
+        if (samples == null)
+            throw new IllegalArgumentException("sample list cannot be null");
+        if (reads == null)
+            throw new IllegalArgumentException("read map cannot be null");
+
+        this.samples = samples;
+        this.alleles = alleles;
+
+        final int sampleCount = samples.sampleCount();
+        final int alleleCount = alleles.alleleCount();
+
+        readsBySampleIndex = new GATKSAMRecord[sampleCount][];
+        readListBySampleIndex = new List[sampleCount];
+        valuesBySampleIndex = new double[sampleCount][][];
+        referenceAlleleIndex = findReferenceAllele(alleles);
+
+        readIndexBySampleIndex = new Object2IntMap[sampleCount];
+
+        setupIndexes(reads, sampleCount, alleleCount);
+
+        sampleMatrices = (Matrix<A>[]) new Matrix[sampleCount];
+    }
+
+    // Add all the indices to alleles, sample and reads in the look-up maps.
+    private void setupIndexes(final Map<String, List<GATKSAMRecord>> reads, final int sampleCount, final int alleleCount) {
+        for (int i = 0; i < sampleCount; i++)
+            setupSampleData(i, reads, alleleCount);
+    }
+
+    // Assumes that {@link #samples} has been initialized with the sample names.
+    private void setupSampleData(final int sampleIndex, final Map<String, List<GATKSAMRecord>> readsBySample,
+                                 final int alleleCount) {
+        final String sample = samples.sampleAt(sampleIndex);
+
+        final List<GATKSAMRecord> reads = readsBySample.get(sample);
+        readsBySampleIndex[sampleIndex] = reads == null
+                ? new GATKSAMRecord[0]
+                : reads.toArray(new GATKSAMRecord[reads.size()]);
+        final int sampleReadCount = readsBySampleIndex[sampleIndex].length;
+
+        final double[][] sampleValues = new double[alleleCount][sampleReadCount];
+        valuesBySampleIndex[sampleIndex] = sampleValues;
+    }
+
+    /**
+     * Create an independent copy of this read-likelihoods collection
+     */
+    public ReadLikelihoods<A> clone() {
+
+        final int sampleCount = samples.sampleCount();
+        final int alleleCount = alleles.alleleCount();
+
+        final double[][][] newLikelihoodValues = new double[sampleCount][alleleCount][];
+
+        @SuppressWarnings("unchecked")
+        final Object2IntMap<GATKSAMRecord>[] newReadIndexBySampleIndex = new Object2IntMap[sampleCount];
+        final GATKSAMRecord[][] newReadsBySampleIndex = new GATKSAMRecord[sampleCount][];
+
+        for (int s = 0; s < sampleCount; s++) {
+            newReadsBySampleIndex[s] = readsBySampleIndex[s].clone();
+            for (int a = 0; a < alleleCount; a++)
+                newLikelihoodValues[s][a] = valuesBySampleIndex[s][a].clone();
+        }
+
+        // Finally we create the new read-likelihood
+        return new ReadLikelihoods<>(alleles, samples,
+                newReadsBySampleIndex,
+                newReadIndexBySampleIndex, newLikelihoodValues);
+    }
+
+    // Internally used constructor.
+    @SuppressWarnings("unchecked")
+    private ReadLikelihoods(final AlleleList alleles, final SampleList samples,
+                            final GATKSAMRecord[][] readsBySampleIndex, final Object2IntMap<GATKSAMRecord>[] readIndex,
+                            final double[][][] values) {
+        this.samples = samples;
+        this.alleles = alleles;
+        this.readsBySampleIndex = readsBySampleIndex;
+        this.valuesBySampleIndex = values;
+        this.readIndexBySampleIndex = readIndex;
+        final int sampleCount = samples.sampleCount();
+        this.readListBySampleIndex = new List[sampleCount];
+
+        referenceAlleleIndex = findReferenceAllele(alleles);
+        sampleMatrices = (Matrix<A>[]) new Matrix[sampleCount];
+    }
+
+    // Search for the reference allele, if not found the index is -1.
+    private int findReferenceAllele(final AlleleList<A> alleles) {
+        final int alleleCount = alleles.alleleCount();
+        for (int i = 0; i < alleleCount; i++)
+            if (alleles.alleleAt(i).isReference())
+                return i;
+        return -1;
+    }
+
+    /**
+     * Returns the index of a sample within the likelihood collection.
+     *
+     * @param sample the query sample.
+     *
+     * @throws IllegalArgumentException if {@code sample} is {@code null}.
+     * @return -1 if the allele is not included, 0 or greater otherwise.
+     */
+    public int sampleIndex(final String sample) {
+        return samples.sampleIndex(sample);
+    }
+
+    /**
+     * Number of samples included in the likelihood collection.
+     * @return 0 or greater.
+     */
+    public int sampleCount() {
+        return samples.sampleCount();
+    }
+
+    /**
+     * Returns sample name given its index.
+     *
+     * @param sampleIndex query index.
+     *
+     * @throws IllegalArgumentException if {@code sampleIndex} is negative.
+     *
+     * @return never {@code null}.
+     */
+    public String sampleAt(final int sampleIndex) {
+        return samples.sampleAt(sampleIndex);
+    }
+
+    /**
+     * Returns the index of an allele within the likelihood collection.
+     *
+     * @param allele the query allele.
+     *
+     * @throws IllegalArgumentException if {@code allele} is {@code null}.
+     *
+     * @return -1 if the allele is not included, 0 or greater otherwise.
+     */
+    public int alleleIndex(final A allele) {
+        return alleles.alleleIndex(allele);
+    }
+
+    /**
+     * Returns number of alleles in the collection.
+     * @return 0 or greater.
+     */
+    @SuppressWarnings("unused")
+    public int alleleCount() {
+        return alleles.alleleCount();
+    }
+
+    /**
+     * Returns the allele given its index.
+     *
+     * @param alleleIndex the allele index.
+     *
+     * @throws IllegalArgumentException the allele index is {@code null}.
+     *
+     * @return never {@code null}.
+     */
+    public A alleleAt(final int alleleIndex) {
+        return alleles.alleleAt(alleleIndex);
+    }
+
+    /**
+     * Returns the reads that belong to a sample sorted by their index (within that sample).
+     *
+     * @param sampleIndex the requested sample.
+     * @return never {@code null} but perhaps a zero-length array if there is no reads in sample. No element in
+     *   the array will be null.
+     */
+    public List<GATKSAMRecord> sampleReads(final int sampleIndex) {
+        checkSampleIndex(sampleIndex);
+        final List<GATKSAMRecord> extantList = readListBySampleIndex[sampleIndex];
+        if (extantList == null)
+            return readListBySampleIndex[sampleIndex] = Collections.unmodifiableList(Arrays.asList(readsBySampleIndex[sampleIndex]));
+        else
+            return extantList;
+    }
+
+    /**
+     * Returns a read vs allele likelihood matrix corresponding to a sample.
+     *
+     * @param sampleIndex target sample.
+     *
+     * @throws IllegalArgumentException if {@code sampleIndex} is not null.
+     *
+     * @return never {@code null}
+     */
+    public Matrix<A> sampleMatrix(final int sampleIndex) {
+        checkSampleIndex(sampleIndex);
+        final Matrix<A> extantResult = sampleMatrices[sampleIndex];
+        if (extantResult != null)
+            return extantResult;
+        else
+            return sampleMatrices[sampleIndex] = new SampleMatrix(sampleIndex);
+    }
+
+    /**
+     * Adjusts likelihoods so that for each read, the best allele likelihood is 0 and caps the minimum likelihood
+     * of any allele for each read based on the maximum alternative allele likelihood.
+     *
+     * @param bestToZero set the best likelihood to 0, others will be subtracted the same amount.
+     * @param maximumLikelihoodDifferenceCap maximum difference between the best alternative allele likelihood
+     *                                           and any other likelihood.
+     *
+     * @throws IllegalArgumentException if {@code maximumDifferenceWithBestAlternative} is not 0 or less.
+     */
+    public void normalizeLikelihoods(final boolean bestToZero, final double maximumLikelihoodDifferenceCap) {
+        if (maximumLikelihoodDifferenceCap >= 0.0 || Double.isNaN(maximumLikelihoodDifferenceCap))
+            throw new IllegalArgumentException("the minimum reference likelihood fall cannot be positive");
+
+        if (maximumLikelihoodDifferenceCap == Double.NEGATIVE_INFINITY && !bestToZero)
+            return;
+
+        final int alleleCount = alleles.alleleCount();
+        if (alleleCount == 0) // trivial case there is no alleles.
+            return;
+        else if (alleleCount == 1 && !bestToZero)
+            return;
+
+        for (int s = 0; s < valuesBySampleIndex.length; s++) {
+            final double[][] sampleValues = valuesBySampleIndex[s];
+            final int readCount = readsBySampleIndex[s].length;
+            for (int r = 0; r < readCount; r++)
+                normalizeLikelihoodsPerRead(bestToZero, maximumLikelihoodDifferenceCap, sampleValues, s, r);
+        }
+    }
+
+    // Does the normalizeLikelihoods job for each read.
+    private void normalizeLikelihoodsPerRead(final boolean bestToZero, final double maximumBestAltLikelihoodDifference,
+                                             final double[][] sampleValues, final int sampleIndex, final int readIndex) {
+
+        final BestAllele bestAlternativeAllele = searchBestAllele(sampleIndex,readIndex,false);
+
+        final double worstLikelihoodCap = bestAlternativeAllele.likelihood + maximumBestAltLikelihoodDifference;
+
+        final double referenceLikelihood = referenceAlleleIndex == -1 ? Double.NEGATIVE_INFINITY :
+                sampleValues[referenceAlleleIndex][readIndex];
+
+
+        final double bestAbsoluteLikelihood = Math.max(bestAlternativeAllele.likelihood,referenceLikelihood);
+
+        final int alleleCount = alleles.alleleCount();
+        if (bestToZero) {
+            if (bestAbsoluteLikelihood == Double.NEGATIVE_INFINITY)
+                for (int a = 0; a < alleleCount; a++)
+                    sampleValues[a][readIndex] = 0;
+            else if (worstLikelihoodCap != Double.NEGATIVE_INFINITY)
+                for (int a = 0; a < alleleCount; a++)
+                    sampleValues[a][readIndex] = (sampleValues[a][readIndex] < worstLikelihoodCap ? worstLikelihoodCap : sampleValues[a][readIndex]) - bestAbsoluteLikelihood;
+            else
+                for (int a = 0; a < alleleCount; a++)
+                    sampleValues[a][readIndex] -= bestAbsoluteLikelihood;
+        } else  // else if (maximumReferenceLikelihoodFall != Double.NEGATIVE_INFINITY ) { //
+            // Guarantee to be the case by enclosing code.
+            for (int a = 0; a < alleleCount; a++)
+                if (sampleValues[a][readIndex] < worstLikelihoodCap)
+                    sampleValues[a][readIndex] = worstLikelihoodCap;
+    }
+
+    /**
+     * Returns the samples in this read-likelihood collection.
+     * <p>
+     *     Samples are sorted by their index in the collection.
+     * </p>
+     *
+     * <p>
+     *     The returned list is an unmodifiable view on the read-likelihoods sample list.
+     * </p>
+     *
+     * @return never {@code null}.
+     */
+    public List<String> samples() {
+        return sampleList == null ? sampleList = SampleListUtils.asList(samples) : sampleList;
+
+    }
+
+    /**
+     * Returns the samples in this read-likelihood collection.
+     * <p>
+     *     Samples are sorted by their index in the collection.
+     * </p>
+     *
+     * <p>
+     *     The returned list is an unmodifiable. It will not be updated if the collection
+     *     allele list changes.
+     * </p>
+     *
+     * @return never {@code null}.
+     */
+    public List<A> alleles() {
+        return alleleList == null ? alleleList = AlleleListUtils.asList(alleles) : alleleList;
+    }
+
+
+    /**
+     * Search the best allele for a read.
+     *
+     * @param sampleIndex including sample index.
+     * @param readIndex  target read index.
+     *
+     * @return never {@code null}, but with {@link BestAllele#allele allele} == {@code null}
+     * if non-could be found.
+     */
+    private BestAllele searchBestAllele(final int sampleIndex, final int readIndex, final boolean canBeReference) {
+        final int alleleCount = alleles.alleleCount();
+        if (alleleCount == 0 || (alleleCount == 1 && referenceAlleleIndex == 0 && !canBeReference))
+            return new BestAllele(sampleIndex,readIndex,-1,Double.NEGATIVE_INFINITY,Double.NEGATIVE_INFINITY);
+
+        final double[][] sampleValues = valuesBySampleIndex[sampleIndex];
+        int bestAlleleIndex = canBeReference || referenceAlleleIndex != 0 ? 0 : 1;
+
+        double bestLikelihood = sampleValues[bestAlleleIndex][readIndex];
+        double secondBestLikelihood = Double.NEGATIVE_INFINITY;
+        for (int a = bestAlleleIndex + 1; a < alleleCount; a++) {
+            if (!canBeReference && referenceAlleleIndex == a)
+                continue;
+            final double candidateLikelihood = sampleValues[a][readIndex];
+            if (candidateLikelihood > bestLikelihood) {
+                bestAlleleIndex = a;
+                secondBestLikelihood = bestLikelihood;
+                bestLikelihood = candidateLikelihood;
+            } else if (candidateLikelihood > secondBestLikelihood) {
+                secondBestLikelihood = candidateLikelihood;
+            }
+        }
+        return new BestAllele(sampleIndex,readIndex,bestAlleleIndex,bestLikelihood,secondBestLikelihood);
+    }
+
+    public void changeReads(final Map<GATKSAMRecord, GATKSAMRecord> readRealignments) {
+        final int sampleCount = samples.sampleCount();
+        for (int s = 0; s < sampleCount; s++) {
+            final GATKSAMRecord[] sampleReads = readsBySampleIndex[s];
+            final Object2IntMap<GATKSAMRecord> readIndex = readIndexBySampleIndex[s];
+            final int sampleReadCount = sampleReads.length;
+            for (int r = 0; r < sampleReadCount; r++) {
+                final GATKSAMRecord read = sampleReads[r];
+                final GATKSAMRecord replacement = readRealignments.get(read);
+                if (replacement == null)
+                    continue;
+                sampleReads[r] = replacement;
+                if (readIndex != null) {
+                    readIndex.remove(read);
+                    readIndex.put(replacement, r);
+                }
+            }
+        }
+    }
+
+    /**
+     * Add alleles that are missing in the read-likelihoods collection giving all reads a default
+     * likelihood value.
+     * @param candidateAlleles the potentially missing alleles.
+     * @param defaultLikelihood the default read likelihood value for that allele.
+     *
+     * @throws IllegalArgumentException if {@code candidateAlleles} is {@code null} or there is more than
+     * one missing allele that is a reference or there is one but the collection already has
+     * a reference allele.
+     */
+    public void addMissingAlleles(final Collection<A> candidateAlleles, final double defaultLikelihood) {
+        if (candidateAlleles == null)
+            throw new IllegalArgumentException("the candidateAlleles list cannot be null");
+        if (candidateAlleles.isEmpty())
+            return;
+        final List<A> allelesToAdd = new ArrayList<>(candidateAlleles.size());
+        for (final A allele : candidateAlleles)
+            if (alleles.alleleIndex(allele) == -1)
+                allelesToAdd.add(allele);
+
+        if (allelesToAdd.isEmpty())
+            return;
+
+        final int oldAlleleCount = alleles.alleleCount();
+        final int newAlleleCount = alleles.alleleCount() + allelesToAdd.size();
+
+        alleleList = null;
+        int referenceIndex = this.referenceAlleleIndex;
+        @SuppressWarnings("unchecked")
+        final A[] newAlleles = (A[]) new Allele[newAlleleCount];
+        for (int a = 0; a < oldAlleleCount; a++)
+            newAlleles[a] = this.alleleAt(a);
+        int newIndex = oldAlleleCount;
+        for (final A allele : allelesToAdd) {
+            if (allele.isReference()) {
+                if (referenceIndex != -1)
+                    throw new IllegalArgumentException("there cannot be more than one reference allele");
+                referenceIndex = newIndex;
+            }
+            newAlleles[newIndex++] = allele;
+        }
+
+        alleles = new IndexedAlleleList<>(newAlleles);
+
+        if (referenceIndex != -1)
+            referenceAlleleIndex = referenceIndex;
+
+        final int sampleCount = samples.sampleCount();
+        for (int s = 0; s < sampleCount; s++) {
+            final int sampleReadCount = readsBySampleIndex[s].length;
+            final double[][] newValuesBySampleIndex = Arrays.copyOf(valuesBySampleIndex[s],newAlleleCount);
+            for (int a = oldAlleleCount; a < newAlleleCount; a++) {
+                newValuesBySampleIndex[a] = new double[sampleReadCount];
+                if (defaultLikelihood != 0.0)
+                    Arrays.fill(newValuesBySampleIndex[a],defaultLikelihood);
+            }
+            valuesBySampleIndex[s] = newValuesBySampleIndex;
+        }
+    }
+
+    /**
+     * Likelihood matrix between a set of alleles and reads.
+     * @param <A> the allele-type.
+     */
+    public interface Matrix<A extends Allele> extends AlleleList<A> {
+
+        /**
+         * List of reads in the matrix sorted by their index therein.
+         * @return never {@code null}.
+         */
+        public List<GATKSAMRecord> reads();
+
+        /**
+         * List of alleles in the matrix sorted by their index in the collection.
+         * @return never {@code null}.
+         */
+        public List<A> alleles();
+
+        /**
+         * Set the likelihood of a read given an allele through their indices.
+         *
+         * @param alleleIndex the target allele index.
+         * @param readIndex the target read index.
+         * @param value new likelihood value for the target read give the target allele.
+         *
+         * @throws IllegalArgumentException if {@code alleleIndex} or {@code readIndex}
+         *  are not valid allele and read indices respectively.
+         */
+        public void set(final int alleleIndex, final int readIndex, final double value);
+
+        /**
+         * Returns the likelihood of a read given a haplotype.
+         *
+         * @param alleleIndex the index of the given haplotype.
+         * @param readIndex the index of the target read.
+         *
+         * @throws IllegalArgumentException if {@code alleleIndex} or {@code readIndex} is not a
+         * valid allele or read index respectively.
+         *
+         * @return the requested likelihood, whatever value was provided using {@link #set(int,int,double) set}
+         *    or 0.0 if none was set.
+         */
+        public double get(final int alleleIndex, final int readIndex);
+
+        /**
+         * Queries the index of an allele in the matrix.
+         *
+         * @param allele the target allele.
+         *
+         * @throws IllegalArgumentException if {@code allele} is {@code null}.
+         * @return -1 if such allele does not exist, otherwise its index which 0 or greater.
+         */
+        @SuppressWarnings("unused")
+        public int alleleIndex(final A allele);
+
+        /**
+         * Queries the index of a read in the matrix.
+         *
+         * @param read the target read.
+         *
+         * @throws IllegalArgumentException if {@code read} is {@code null}.
+         *
+         * @return -1 if there is not such a read in the matrix, otherwise its index
+         *    which is 0 or greater.
+         */
+        @SuppressWarnings("unused")
+        public int readIndex(final GATKSAMRecord read);
+
+        /**
+         * Number of allele in the matrix.
+         * @return never negative.
+         */
+        public int alleleCount();
+
+        /**
+         * Number of reads in the matrix.
+         * @return never negative.
+         */
+        public int readCount();
+
+        /**
+         * Returns the allele given its index.
+         *
+         * @param alleleIndex the target allele index.
+         *
+         * @throws IllegalArgumentException if {@code alleleIndex} is not a valid allele index.
+         * @return never {@code null}.
+         */
+        public A alleleAt(final int alleleIndex);
+
+        /**
+         * Returns the allele given its index.
+         *
+         * @param readIndex the target allele index.
+         *
+         * @throws IllegalArgumentException if {@code readIndex} is not a valid read index.
+         * @return never {@code null}.
+         */
+        public GATKSAMRecord readAt(final int readIndex);
+
+
+        /**
+         * Copies the likelihood of all the reads for a given allele into an array from a particular offset.
+         * @param alleleIndex the targeted allele
+         * @param dest the destination array.
+         * @param offset the copy offset within the destination allele
+         */
+        public void copyAlleleLikelihoods(final int alleleIndex, final double[] dest, final int offset);
+    }
+
+    /**
+     * Perform marginalization from an allele set to another (smaller one) taking the maximum value
+     * for each read in the original allele subset.
+     *
+     * @param newToOldAlleleMap map where the keys are the new alleles and the value list the original
+     *                          alleles that correspond to the new one.
+     * @return never {@code null}. The result will have the requested set of new alleles (keys in {@code newToOldAlleleMap}, and
+     * the same set of samples and reads as the original.
+     *
+     * @throws IllegalArgumentException is {@code newToOldAlleleMap} is {@code null} or contains {@code null} values,
+     *  or its values contain reference to non-existing alleles in this read-likelihood collection. Also no new allele
+     *  can have zero old alleles mapping nor two new alleles can make reference to the same old allele.
+     */
+    public <B extends Allele> ReadLikelihoods<B> marginalize(final Map<B, List<A>> newToOldAlleleMap) {
+
+        if (newToOldAlleleMap == null)
+            throw new IllegalArgumentException("the input allele mapping cannot be null");
+
+        @SuppressWarnings("unchecked")
+        final B[] newAlleles = newToOldAlleleMap.keySet().toArray((B[]) new Allele[newToOldAlleleMap.size()]);
+        final int oldAlleleCount = alleles.alleleCount();
+        final int newAlleleCount = newAlleles.length;
+
+        // we get the index correspondence between new old -> new allele, -1 entries mean that the old
+        // allele does not map to any new; supported but typically not the case.
+        final int[] oldToNewAlleleIndexMap = oldToNewAlleleIndexMap(newToOldAlleleMap, newAlleles, oldAlleleCount, newAlleleCount);
+
+        // We calculate the marginal likelihoods.
+
+        final double[][][] newLikelihoodValues = marginalLikelihoods(oldAlleleCount, newAlleleCount, oldToNewAlleleIndexMap, null);
+
+        final int sampleCount = samples.sampleCount();
+
+        @SuppressWarnings("unchecked")
+        final Object2IntMap<GATKSAMRecord>[] newReadIndexBySampleIndex = new Object2IntMap[sampleCount];
+        final GATKSAMRecord[][] newReadsBySampleIndex = new GATKSAMRecord[sampleCount][];
+
+        for (int s = 0; s < sampleCount; s++) {
+            newReadsBySampleIndex[s] = readsBySampleIndex[s].clone();
+        }
+
+        // Finally we create the new read-likelihood
+        return new ReadLikelihoods<>(new IndexedAlleleList(newAlleles), samples,
+                newReadsBySampleIndex,
+                newReadIndexBySampleIndex, newLikelihoodValues);
+    }
+
+
+    /**
+     * Perform marginalization from an allele set to another (smaller one) taking the maximum value
+     * for each read in the original allele subset.
+     *
+     * @param newToOldAlleleMap map where the keys are the new alleles and the value list the original
+     *                          alleles that correspond to the new one.
+     * @return never {@code null}. The result will have the requested set of new alleles (keys in {@code newToOldAlleleMap}, and
+     * the same set of samples and reads as the original.
+     *
+     * @param overlap if not {@code null}, only reads that overlap the location (with unclipping) will be present in
+     *                        the output read-collection.
+     *
+     * @throws IllegalArgumentException is {@code newToOldAlleleMap} is {@code null} or contains {@code null} values,
+     *  or its values contain reference to non-existing alleles in this read-likelihood collection. Also no new allele
+     *  can have zero old alleles mapping nor two new alleles can make reference to the same old allele.
+     */
+    public <B extends Allele> ReadLikelihoods<B> marginalize(final Map<B, List<A>> newToOldAlleleMap, final GenomeLoc overlap) {
+
+        if (overlap == null)
+            return marginalize(newToOldAlleleMap);
+
+        if (newToOldAlleleMap == null)
+            throw new IllegalArgumentException("the input allele mapping cannot be null");
+
+        @SuppressWarnings("unchecked")
+        final B[] newAlleles = newToOldAlleleMap.keySet().toArray((B[]) new Allele[newToOldAlleleMap.size()]);
+        final int oldAlleleCount = alleles.alleleCount();
+        final int newAlleleCount = newAlleles.length;
+
+        // we get the index correspondence between new old -> new allele, -1 entries mean that the old
+        // allele does not map to any new; supported but typically not the case.
+        final int[] oldToNewAlleleIndexMap = oldToNewAlleleIndexMap(newToOldAlleleMap, newAlleles, oldAlleleCount, newAlleleCount);
+
+        final int[][] readsToKeep = overlappingReadIndicesBySampleIndex(overlap);
+        // We calculate the marginal likelihoods.
+
+        final double[][][] newLikelihoodValues = marginalLikelihoods(oldAlleleCount, newAlleleCount, oldToNewAlleleIndexMap, readsToKeep);
+
+        final int sampleCount = samples.sampleCount();
+
+        @SuppressWarnings("unchecked")
+        final Object2IntMap<GATKSAMRecord>[] newReadIndexBySampleIndex = new Object2IntMap[sampleCount];
+        final GATKSAMRecord[][] newReadsBySampleIndex = new GATKSAMRecord[sampleCount][];
+
+        for (int s = 0; s < sampleCount; s++) {
+            final int[] sampleReadsToKeep = readsToKeep[s];
+            final GATKSAMRecord[] oldSampleReads = readsBySampleIndex[s];
+            final int oldSampleReadCount = oldSampleReads.length;
+            final int newSampleReadCount = sampleReadsToKeep.length;
+            if (newSampleReadCount == oldSampleReadCount) {
+                newReadsBySampleIndex[s] = oldSampleReads.clone();
+            } else {
+                newReadsBySampleIndex[s] = new GATKSAMRecord[newSampleReadCount];
+                for (int i = 0; i < newSampleReadCount; i++)
+                    newReadsBySampleIndex[s][i] = oldSampleReads[sampleReadsToKeep[i]];
+            }
+        }
+
+        // Finally we create the new read-likelihood
+        return new ReadLikelihoods<>(new IndexedAlleleList(newAlleles), samples,
+                newReadsBySampleIndex,
+                newReadIndexBySampleIndex, newLikelihoodValues);
+    }
+
+    private int[][] overlappingReadIndicesBySampleIndex(final GenomeLoc overlap) {
+        if (overlap == null)
+            return null;
+        final int sampleCount = samples.sampleCount();
+        final int[][] result = new int[sampleCount][];
+        final IntArrayList buffer = new IntArrayList(200);
+        final int referenceIndex = overlap.getContigIndex();
+        final int overlapStart = overlap.getStart();
+        final int overlapEnd = overlap.getStop();
+        for (int s = 0; s < sampleCount; s++) {
+            buffer.clear();
+            final GATKSAMRecord[] sampleReads = readsBySampleIndex[s];
+            final int sampleReadCount = sampleReads.length;
+            buffer.ensureCapacity(sampleReadCount);
+            for (int r = 0; r < sampleReadCount; r++)
+                if (unclippedReadOverlapsRegion(sampleReads[r], referenceIndex, overlapStart, overlapEnd))
+                    buffer.add(r);
+            result[s] = buffer.toIntArray();
+        }
+        return result;
+    }
+
+    public static boolean unclippedReadOverlapsRegion(final GATKSAMRecord read, final GenomeLoc region) {
+        return unclippedReadOverlapsRegion(read, region.getContigIndex(), region.getStart(), region.getStop());
+    }
+
+    private static boolean unclippedReadOverlapsRegion(final GATKSAMRecord sampleRead, final int referenceIndex, final int start, final int end) {
+        final int readReference = sampleRead.getReferenceIndex();
+       if (readReference != referenceIndex)
+            return false;
+
+        final int readStart = sampleRead.getUnclippedStart();
+        if (readStart > end)
+            return false;
+
+        final int readEnd = sampleRead.getReadUnmappedFlag() ? sampleRead.getUnclippedEnd()
+                : Math.max(sampleRead.getUnclippedEnd(), sampleRead.getUnclippedStart());
+        return readEnd >= start;
+    }
+
+    // Calculate the marginal likelihoods considering the old -> new allele index mapping.
+    private double[][][] marginalLikelihoods(final int oldAlleleCount, final int newAlleleCount, final int[] oldToNewAlleleIndexMap, final int[][] readsToKeep) {
+
+        final int sampleCount = samples.sampleCount();
+        final double[][][] result = new double[sampleCount][][];
+
+        for (int s = 0; s < sampleCount; s++) {
+            final int sampleReadCount = readsBySampleIndex[s].length;
+            final double[][] oldSampleValues = valuesBySampleIndex[s];
+            final int[] sampleReadToKeep = readsToKeep == null || readsToKeep[s].length == sampleReadCount ? null : readsToKeep[s];
+            final int newSampleReadCount = sampleReadToKeep == null ? sampleReadCount : sampleReadToKeep.length;
+            final double[][] newSampleValues = result[s] = new double[newAlleleCount][newSampleReadCount];
+            // We initiate all likelihoods to -Inf.
+            for (int a = 0; a < newAlleleCount; a++)
+                Arrays.fill(newSampleValues[a], Double.NEGATIVE_INFINITY);
+            // For each old allele and read we update the new table keeping the maximum likelihood.
+            for (int r = 0; r < newSampleReadCount; r++) {
+                for (int a = 0; a < oldAlleleCount; a++) {
+                    final int oldReadIndex = newSampleReadCount == sampleReadCount ? r : sampleReadToKeep[r];
+                    final int newAlleleIndex = oldToNewAlleleIndexMap[a];
+                    if (newAlleleIndex == -1)
+                        continue;
+                    final double likelihood = oldSampleValues[a][oldReadIndex];
+                    if (likelihood > newSampleValues[newAlleleIndex][r])
+                        newSampleValues[newAlleleIndex][r] = likelihood;
+                }
+            }
+        }
+        return result;
+    }
+
+    /**
+     * Given a collection of likelihood in the old map format, it creates the corresponding read-likelihoods collection.
+     *
+     * @param map the likelihoods to transform.
+     *
+     * @throws IllegalArgumentException if {@code map} is {@code null}.
+     *
+     * @return never {@code null}.
+     */
+    public static ReadLikelihoods<Allele> fromPerAlleleReadLikelihoodsMap(final Map<String,PerReadAlleleLikelihoodMap> map) {
+
+        // First we need to create the read-likelihood collection with all required alleles, samples and reads.
+        final SampleList sampleList = new IndexedSampleList(map.keySet());
+        final Set<Allele> alleles = new LinkedHashSet<>(10);
+        final Map<String,List<GATKSAMRecord>> sampleToReads = new HashMap<>(sampleList.sampleCount());
+        for (final Map.Entry<String,PerReadAlleleLikelihoodMap> entry : map.entrySet()) {
+            final String sample = entry.getKey();
+            final PerReadAlleleLikelihoodMap sampleLikelihoods = entry.getValue();
+            alleles.addAll(sampleLikelihoods.getAllelesSet());
+            sampleToReads.put(sample,new ArrayList<>(sampleLikelihoods.getLikelihoodReadMap().keySet()));
+        }
+
+        final AlleleList<Allele> alleleList = new IndexedAlleleList<>(alleles);
+        final ReadLikelihoods<Allele> result = new ReadLikelihoods<>(sampleList,alleleList,sampleToReads);
+
+        // Now set the likelihoods.
+        for (final Map.Entry<String,PerReadAlleleLikelihoodMap> sampleEntry : map.entrySet()) {
+            final ReadLikelihoods.Matrix<Allele> sampleMatrix = result.sampleMatrix(result.sampleIndex(sampleEntry.getKey()));
+            for (final Map.Entry<GATKSAMRecord,Map<Allele,Double>> readEntry : sampleEntry.getValue().getLikelihoodReadMap().entrySet()) {
+                final GATKSAMRecord read = readEntry.getKey();
+                final int readIndex = sampleMatrix.readIndex(read);
+                for (final Map.Entry<Allele,Double> alleleEntry : readEntry.getValue().entrySet()) {
+                    final int alleleIndex = result.alleleIndex(alleleEntry.getKey());
+                    sampleMatrix.set(alleleIndex,readIndex,alleleEntry.getValue());
+                }
+            }
+        }
+        return result;
+    }
+
+    // calculates an old to new allele index map array.
+    private <B extends Allele> int[] oldToNewAlleleIndexMap(final Map<B, List<A>> newToOldAlleleMap, final B[] newAlleles,
+                                                            final int oldAlleleCount, final int newAlleleCount) {
+
+        final int[] oldToNewAlleleIndexMap = new int[oldAlleleCount];
+        Arrays.fill(oldToNewAlleleIndexMap, -1);  // -1 indicate that there is no new allele that make reference to that old one.
+
+        for (int i = 0; i < newAlleleCount; i++) {
+            final B newAllele = newAlleles[i];
+            if (newAllele == null)
+                throw new IllegalArgumentException("input alleles cannot be null");
+            final List<A> oldAlleles = newToOldAlleleMap.get(newAllele);
+            if (oldAlleles == null)
+                throw new IllegalArgumentException("no new allele list can be null");
+            for (final A oldAllele : oldAlleles) {
+                if (oldAllele == null)
+                    throw new IllegalArgumentException("old alleles cannot be null");
+                final int oldAlleleIndex = alleleIndex(oldAllele);
+                if (oldAlleleIndex == -1)
+                    throw new IllegalArgumentException("missing old allele " + oldAllele + " in likelihood collection ");
+                if (oldToNewAlleleIndexMap[oldAlleleIndex] != -1)
+                    throw new IllegalArgumentException("collision: two new alleles make reference to the same old allele");
+                oldToNewAlleleIndexMap[oldAlleleIndex] = i;
+            }
+        }
+        return oldToNewAlleleIndexMap;
+    }
+
+    /**
+     * Remove those reads that do not overlap certain genomic location.
+     *
+     * <p>
+     *     This method modifies the current read-likelihoods collection.
+     * </p>
+     *
+     * @param location the target location.
+     *
+     * @throws IllegalArgumentException the location cannot be {@code null} nor unmapped.
+     */
+    @SuppressWarnings("unused")
+    public void filterToOnlyOverlappingUnclippedReads(final GenomeLoc location) {
+        if (location == null)
+            throw new IllegalArgumentException("the location cannot be null");
+        if (location.isUnmapped())
+            throw new IllegalArgumentException("the location cannot be unmapped");
+
+        final int sampleCount = samples.sampleCount();
+
+        final int locContig = location.getContigIndex();
+        final int locStart = location.getStart();
+        final int locEnd = location.getStop();
+
+        final int alleleCount = alleles.alleleCount();
+        final IntArrayList removeIndices = new IntArrayList(10);
+        for (int s = 0; s < sampleCount; s++) {
+            int readRemoveCount = 0;
+            final GATKSAMRecord[] sampleReads = readsBySampleIndex[s];
+            final int sampleReadCount = sampleReads.length;
+            for (int r = 0; r < sampleReadCount; r++)
+                if (!unclippedReadOverlapsRegion(sampleReads[r], locContig, locStart, locEnd))
+                    removeIndices.add(r);
+            removeSampleReads(s,removeIndices,alleleCount);
+            removeIndices.clear();
+        }
+    }
+
+    // Compare the read coordinates to the location of interest.
+    private boolean readOverlapsLocation(final String contig, final int locStart,
+                                         final int locEnd, final GATKSAMRecord read) {
+        final boolean overlaps;
+
+        if (read.getReadUnmappedFlag())
+            overlaps = false;
+        else if (!read.getReferenceName().equals(contig))
+            overlaps = false;
+        else {
+            int alnStart = read.getAlignmentStart();
+            int alnStop = read.getAlignmentEnd();
+            if (alnStart > alnStop) { // Paranoia? based on GLP.createGenomeLoc(Read) this can happen?.
+                final int end = alnStart;
+                alnStart = alnStop;
+                alnStop = end;
+            }
+            overlaps = !(alnStop < locStart || alnStart > locEnd);
+        }
+        return overlaps;
+    }
+
+    /**
+     * Removes those read that the best possible likelihood given any allele is just too low.
+     *
+     * <p>
+     *     This is determined by a maximum error per read-base against the best likelihood possible.
+     * </p>
+     *
+     * @param maximumErrorPerBase the minimum acceptable error rate per read base, must be
+     *                            a positive number.
+     *
+     * @throws IllegalStateException is not supported for read-likelihood that do not contain alleles.
+     *
+     * @throws IllegalArgumentException if {@code maximumErrorPerBase} is negative.
+     */
+    public void filterPoorlyModeledReads(final double maximumErrorPerBase) {
+        if (alleles.alleleCount() == 0)
+            throw new IllegalStateException("unsupported for read-likelihood collections with no alleles");
+        if (Double.isNaN(maximumErrorPerBase) || maximumErrorPerBase <= 0.0)
+            throw new IllegalArgumentException("the maximum error per base must be a positive number");
+        final int sampleCount = samples.sampleCount();
+
+        final int alleleCount = alleles.alleleCount();
+        final IntArrayList removeIndices = new IntArrayList(10);
+        for (int s = 0; s < sampleCount; s++) {
+            final GATKSAMRecord[] sampleReads = readsBySampleIndex[s];
+            final int sampleReadCount = sampleReads.length;
+            for (int r = 0; r < sampleReadCount; r++) {
+                final GATKSAMRecord read = sampleReads[r];
+                if (readIsPoorlyModelled(s,r,read, maximumErrorPerBase))
+                    removeIndices.add(r);
+            }
+            removeSampleReads(s, removeIndices, alleleCount);
+            removeIndices.clear();
+        }
+    }
+
+    // Check whether the read is poorly modelled.
+    protected boolean readIsPoorlyModelled(final int sampleIndex, final int readIndex, final GATKSAMRecord read, final double maxErrorRatePerBase) {
+        final double maxErrorsForRead = Math.min(2.0, Math.ceil(read.getReadLength() * maxErrorRatePerBase));
+        final double log10QualPerBase = -4.0;
+        final double log10MaxLikelihoodForTrueAllele = maxErrorsForRead * log10QualPerBase;
+
+        final int alleleCount = alleles.alleleCount();
+        final double[][] sampleValues = valuesBySampleIndex[sampleIndex];
+        for (int a = 0; a < alleleCount; a++)
+            if (sampleValues[a][readIndex] >= log10MaxLikelihoodForTrueAllele)
+                return false;
+        return true;
+    }
+
+
+    /**
+     * Add more reads to the collection.
+     *
+     * @param readsBySample reads to add.
+     * @param initialLikelihood the likelihood for the new entries.
+     *
+     * @throws IllegalArgumentException if {@code readsBySample} is {@code null} or {@code readsBySample} contains
+     *  {@code null} reads, or {@code readsBySample} contains read that are already present in the read-likelihood
+     *  collection.
+     */
+    public void addReads(final Map<String,List<GATKSAMRecord>> readsBySample, final double initialLikelihood) {
+
+        for (final Map.Entry<String,List<GATKSAMRecord>> entry : readsBySample.entrySet()) {
+
+            final String sample = entry.getKey();
+            final List<GATKSAMRecord> newSampleReads = entry.getValue();
+            final int sampleIndex = samples.sampleIndex(sample);
+
+            if (sampleIndex == -1)
+                throw new IllegalArgumentException("input sample " + sample +
+                        " is not part of the read-likelihoods collection");
+
+            if (newSampleReads == null || newSampleReads.size() == 0)
+                continue;
+
+            final int sampleReadCount = readsBySampleIndex[sampleIndex].length;
+            final int newSampleReadCount = sampleReadCount + newSampleReads.size();
+
+            appendReads(newSampleReads, sampleIndex, sampleReadCount, newSampleReadCount);
+            extendsLikelihoodArrays(initialLikelihood, sampleIndex, sampleReadCount, newSampleReadCount);
+        }
+    }
+
+    // Extends the likelihood arrays-matrices.
+    private void extendsLikelihoodArrays(double initialLikelihood, int sampleIndex, int sampleReadCount, int newSampleReadCount) {
+        final double[][] sampleValues = valuesBySampleIndex[sampleIndex];
+        final int alleleCount = alleles.alleleCount();
+        for (int a = 0; a < alleleCount; a++)
+            sampleValues[a] = Arrays.copyOf(sampleValues[a], newSampleReadCount);
+        if (initialLikelihood != 0.0) // the default array new value.
+            for (int a = 0; a < alleleCount; a++)
+                Arrays.fill(sampleValues[a],sampleReadCount,newSampleReadCount,initialLikelihood);
+    }
+
+    // Append the new read reference into the structure per-sample.
+    private void appendReads(final List<GATKSAMRecord> newSampleReads, final int sampleIndex,
+                             final int sampleReadCount, final int newSampleReadCount) {
+        final GATKSAMRecord[] sampleReads = readsBySampleIndex[sampleIndex] =
+                Arrays.copyOf(readsBySampleIndex[sampleIndex], newSampleReadCount);
+
+        int nextReadIndex = sampleReadCount;
+        final Object2IntMap<GATKSAMRecord> sampleReadIndex = readIndexBySampleIndex[sampleIndex];
+        for (final GATKSAMRecord newRead : newSampleReads) {
+        //    if (sampleReadIndex.containsKey(newRead)) // might be worth handle this without exception (ignore the read?) but in practice should never be the case.
+        //        throw new IllegalArgumentException("you cannot add reads that are already in read-likelihood collection");
+            if (sampleReadIndex != null ) sampleReadIndex.put(newRead,nextReadIndex);
+            sampleReads[nextReadIndex++] = newRead;
+        }
+    }
+
+    /**
+     * Adds the non-reference allele to the read-likelihood collection setting each read likelihood to the second
+     * best found (or best one if only one allele has likelihood).
+     *
+     * <p>Nothing will happen if the read-likelihoods collection already includes the non-ref allele</p>
+     *
+     * <p>
+     *     <i>Implementation note: even when strictly speaking we do not need to demand the calling code to pass
+     *     the reference the non-ref allele, we still demand it in order to lead the
+     *     the calling code to use the right generic type for this likelihoods
+     *     collection {@link Allele}.</i>
+     * </p>
+     *
+     * @param nonRefAllele the non-ref allele.
+     *
+     * @throws IllegalArgumentException if {@code nonRefAllele} is anything but the designated <NON_REF>
+     * symbolic allele {@link org.broadinstitute.gatk.utils.variant.GATKVCFConstants#NON_REF_SYMBOLIC_ALLELE}.
+     */
+    public void addNonReferenceAllele(final A nonRefAllele) {
+
+        if (nonRefAllele == null)
+            throw new IllegalArgumentException("non-ref allele cannot be null");
+        if (!nonRefAllele.equals(GATKVCFConstants.NON_REF_SYMBOLIC_ALLELE))
+            throw new IllegalArgumentException("the non-ref allele is not valid");
+        // Already present?
+        if (alleles.alleleIndex(nonRefAllele) != -1)
+            return;
+
+        final int oldAlleleCount = alleles.alleleCount();
+        final int newAlleleCount = oldAlleleCount + 1;
+        @SuppressWarnings("unchecked")
+        final A[] newAlleles = (A[]) new Allele[newAlleleCount];
+        for (int a = 0; a < oldAlleleCount; a++)
+            newAlleles[a] = alleles.alleleAt(a);
+        newAlleles[oldAlleleCount] = nonRefAllele;
+        alleles = new IndexedAlleleList<>(newAlleles);
+        alleleList = null; // remove the cached alleleList.
+
+        final int sampleCount = samples.sampleCount();
+        for (int s = 0; s < sampleCount; s++)
+            addNonReferenceAlleleLikelihoodsPerSample(oldAlleleCount, newAlleleCount, s);
+    }
+
+    // Updates per-sample structures according to the addition of the NON_REF allele.
+    private void addNonReferenceAlleleLikelihoodsPerSample(final int alleleCount, final int newAlleleCount, final int sampleIndex) {
+        final double[][] sampleValues = valuesBySampleIndex[sampleIndex] = Arrays.copyOf(valuesBySampleIndex[sampleIndex], newAlleleCount);
+        final int sampleReadCount = readsBySampleIndex[sampleIndex].length;
+
+        final double[] nonRefAlleleLikelihoods = sampleValues[alleleCount] = new double [sampleReadCount];
+        Arrays.fill(nonRefAlleleLikelihoods,Double.NEGATIVE_INFINITY);
+        for (int r = 0; r < sampleReadCount; r++) {
+            final BestAllele bestAllele = searchBestAllele(sampleIndex,r,true);
+            final double secondBestLikelihood = Double.isInfinite(bestAllele.confidence) ? bestAllele.likelihood
+                    : bestAllele.likelihood - bestAllele.confidence;
+            nonRefAlleleLikelihoods[r] = secondBestLikelihood;
+        }
+    }
+
+    /**
+     * Downsamples reads based on contamination fractions making sure that all alleles are affected proportionally.
+     *
+     * @param perSampleDownsamplingFraction contamination sample map where the sample name are the keys and the
+     *                                       fractions are the values.
+     *
+     * @throws IllegalArgumentException if {@code perSampleDownsamplingFraction} is {@code null}.
+     */
+    public void contaminationDownsampling(final Map<String, Double> perSampleDownsamplingFraction) {
+
+        final int sampleCount = samples.sampleCount();
+        final IntArrayList readsToRemove = new IntArrayList(10); // blind estimate, can be improved?
+        final int alleleCount = alleles.alleleCount();
+        for (int s = 0; s < sampleCount; s++) {
+            final String sample = samples.sampleAt(s);
+            final Double fractionDouble = perSampleDownsamplingFraction.get(sample);
+            if (fractionDouble == null)
+                continue;
+            final double fraction = fractionDouble;
+            if (Double.isNaN(fraction) || fraction <= 0.0)
+                continue;
+            if (fraction >= 1.0) {
+                final int sampleReadCount = readsBySampleIndex[s].length;
+                readsToRemove.ensureCapacity(sampleReadCount);
+                for (int r = 0; r < sampleReadCount; r++)
+                    readsToRemove.add(r);
+                removeSampleReads(s,readsToRemove,alleleCount);
+                readsToRemove.clear();
+            }
+            else {
+                final Map<A,List<GATKSAMRecord>> readsByBestAllelesMap = readsByBestAlleleMap(s);
+                removeSampleReads(s,AlleleBiasedDownsamplingUtils.selectAlleleBiasedReads(readsByBestAllelesMap, fraction),alleleCount);
+            }
+        }
+    }
+
+    /**
+     * Given a collection of likelihood in the old map format, it creates the corresponding read-likelihoods collection.
+     *
+     * @param alleleList the target list of alleles.
+     * @param map the likelihoods to transform.
+     *
+     *
+     * @throws IllegalArgumentException if {@code map} is {@code null}, or {@code map} does not contain likelihoods for all read vs allele combinations.
+     *
+     * @return never {@code null}.
+     */
+    public static ReadLikelihoods<Allele> fromPerAlleleReadLikelihoodsMap(final AlleleList<Allele> alleleList, final Map<String,PerReadAlleleLikelihoodMap> map) {
+
+        //TODO add test code for this method.
+        // First we need to create the read-likelihood collection with all required alleles, samples and reads.
+        final SampleList sampleList = new IndexedSampleList(map.keySet());
+        final int alleleCount = alleleList.alleleCount();
+        final Map<String,List<GATKSAMRecord>> sampleToReads = new HashMap<>(sampleList.sampleCount());
+        for (final Map.Entry<String,PerReadAlleleLikelihoodMap> entry : map.entrySet()) {
+            final String sample = entry.getKey();
+            final PerReadAlleleLikelihoodMap sampleLikelihoods = entry.getValue();
+            sampleToReads.put(sample,new ArrayList<>(sampleLikelihoods.getLikelihoodReadMap().keySet()));
+        }
+
+        final ReadLikelihoods<Allele> result = new ReadLikelihoods<>(sampleList,alleleList,sampleToReads);
+
+        // Now set the likelihoods.
+        for (final Map.Entry<String,PerReadAlleleLikelihoodMap> sampleEntry : map.entrySet()) {
+            final ReadLikelihoods.Matrix<Allele> sampleMatrix = result.sampleMatrix(result.sampleIndex(sampleEntry.getKey()));
+            for (final Map.Entry<GATKSAMRecord,Map<Allele,Double>> readEntry : sampleEntry.getValue().getLikelihoodReadMap().entrySet()) {
+                final GATKSAMRecord read = readEntry.getKey();
+                final int readIndex = sampleMatrix.readIndex(read);
+                final Map<Allele,Double> alleleToLikelihoodMap = readEntry.getValue();
+                for (int a = 0; a < alleleCount; a++) {
+                    final Allele allele = alleleList.alleleAt(a);
+                    final Double likelihood = alleleToLikelihoodMap.get(allele);
+                    if (likelihood == null)
+                        throw new IllegalArgumentException("there is no likelihood for allele " + allele + " and read " + read);
+                    sampleMatrix.set(a,readIndex,likelihood);
+                }
+            }
+        }
+        return result;
+    }
+
+    /**
+     * Returns the collection of best allele estimates for the reads based on the read-likelihoods.
+     *
+     * @throws IllegalStateException if there is no alleles.
+     *
+     * @return never {@code null}, one element per read in the read-likelihoods collection.
+     */
+    public Collection<BestAllele> bestAlleles() {
+        final List<BestAllele> result = new ArrayList<>(100); // blind estimate.
+        final int sampleCount = samples.sampleCount();
+        for (int s = 0; s < sampleCount; s++) {
+            final GATKSAMRecord[] sampleReads = readsBySampleIndex[s];
+            final int readCount = sampleReads.length;
+            for (int r = 0; r < readCount; r++)
+                result.add(searchBestAllele(s,r,true));
+        }
+        return result;
+    }
+
+    /**
+     * Returns reads stratified by their best allele.
+     * @param sampleIndex the target sample.
+     * @return never {@code null}, perhaps empty.
+     */
+    public Map<A,List<GATKSAMRecord>> readsByBestAlleleMap(final int sampleIndex) {
+        checkSampleIndex(sampleIndex);
+        final int alleleCount = alleles.alleleCount();
+        final int sampleReadCount = readsBySampleIndex[sampleIndex].length;
+        final Map<A,List<GATKSAMRecord>> result = new HashMap<>(alleleCount);
+        for (int a = 0; a < alleleCount; a++)
+            result.put(alleles.alleleAt(a),new ArrayList<GATKSAMRecord>(sampleReadCount));
+        readsByBestAlleleMap(sampleIndex,result);
+        return result;
+    }
+
+    /**
+     * Returns reads stratified by their best allele.
+     * @return never {@code null}, perhaps empty.
+     */
+    @SuppressWarnings("unused")
+    public Map<A,List<GATKSAMRecord>> readsByBestAlleleMap() {
+        final int alleleCount = alleles.alleleCount();
+        final Map<A,List<GATKSAMRecord>> result = new HashMap<>(alleleCount);
+        final int totalReadCount = readCount();
+        for (int a = 0; a < alleleCount; a++)
+            result.put(alleles.alleleAt(a),new ArrayList<GATKSAMRecord>(totalReadCount));
+        final int sampleCount = samples.sampleCount();
+        for (int s = 0; s < sampleCount; s++)
+            readsByBestAlleleMap(s,result);
+        return result;
+    }
+
+    private void readsByBestAlleleMap(final int sampleIndex, final Map<A,List<GATKSAMRecord>> result) {
+        final GATKSAMRecord[] reads = readsBySampleIndex[sampleIndex];
+        final int readCount = reads.length;
+
+        for (int r = 0; r < readCount; r++) {
+            final BestAllele bestAllele = searchBestAllele(sampleIndex,r,true);
+            if (!bestAllele.isInformative())
+                continue;
+            result.get(bestAllele.allele).add(bestAllele.read);
+        }
+    }
+
+    /**
+     * Returns the index of a read within a sample read-likelihood sub collection.
+     * @param sampleIndex the sample index.
+     * @param read the query read.
+     * @return -1 if there is no such read in that sample, 0 or greater otherwise.
+     */
+    @SuppressWarnings("unused")
+    public int readIndex(final int sampleIndex, final GATKSAMRecord read) {
+        final Object2IntMap<GATKSAMRecord> readIndex = readIndexBySampleIndex(sampleIndex);
+        if (readIndex.containsKey(read))
+            return readIndexBySampleIndex(sampleIndex).getInt(read);
+        else
+            return -1;
+    }
+
+    /**
+     * Returns the total number of reads in the read-likelihood collection.
+     *
+     * @return never {@code null}
+     */
+    public int readCount() {
+        int sum = 0;
+        final int sampleCount = samples.sampleCount();
+        for (int i = 0; i < sampleCount; i++)
+            sum += readsBySampleIndex[i].length;
+        return sum;
+    }
+
+    /**
+     * Returns the number of reads that belong to a sample in the read-likelihood collection.
+     * @param sampleIndex the query sample index.
+     *
+     * @throws IllegalArgumentException if {@code sampleIndex} is not a valid sample index.
+     * @return 0 or greater.
+     */
+    public int sampleReadCount(int sampleIndex) {
+        checkSampleIndex(sampleIndex);
+        return readsBySampleIndex[sampleIndex].length;
+    }
+
+    /**
+     * Contains information about the best allele for a read search result.
+     */
+    public class BestAllele {
+        public static final double INFORMATIVE_THRESHOLD = 0.2;
+
+        /**
+         * Null if there is no possible match (no allele?).
+         */
+        public final A allele;
+
+        /**
+         * The containing sample.
+         */
+        public final String sample;
+
+        /**
+         * The query read.
+         */
+        public final GATKSAMRecord read;
+
+        /**
+         * If allele != null, the indicates the likelihood of the read.
+         */
+        public final double likelihood;
+
+        /**
+         * Confidence that the read actually was generated under that likelihood.
+         * This is equal to the difference between this and the second best allele match.
+         */
+        public final double confidence;
+
+        private BestAllele(final int sampleIndex, final int readIndex, final int bestAlleleIndex,
+                           final double likelihood, final double secondBestLikelihood) {
+            allele = bestAlleleIndex == -1 ? null : alleles.alleleAt(bestAlleleIndex);
+            this.likelihood = likelihood;
+            sample = samples.sampleAt(sampleIndex);
+            read = readsBySampleIndex[sampleIndex][readIndex];
+            confidence = likelihood == secondBestLikelihood ? 0 : likelihood - secondBestLikelihood;
+        }
+
+        public boolean isInformative() {
+            return confidence > INFORMATIVE_THRESHOLD;
+        }
+    }
+
+    private void removeSampleReads(final int sampleIndex, final IntArrayList indexToRemove, final int alleleCount) {
+        final int removeCount = indexToRemove.size();
+        if (removeCount == 0)
+            return;
+
+        final GATKSAMRecord[] sampleReads = readsBySampleIndex[sampleIndex];
+        final int sampleReadCount = sampleReads.length;
+
+        final Object2IntMap<GATKSAMRecord> indexByRead = readIndexBySampleIndex[sampleIndex];
+        if (indexByRead != null)
+            for (int i = 0; i < removeCount; i++)
+                indexByRead.remove(sampleReads[indexToRemove.getInt(i)]);
+        final boolean[] removeIndex = new boolean[sampleReadCount];
+        int firstDeleted = indexToRemove.get(0);
+        for (int i = 0; i < removeCount; i++)
+            removeIndex[indexToRemove.get(i)] = true;
+
+        final int newSampleReadCount = sampleReadCount - removeCount;
+
+        // Now we skim out the removed reads from the read array.
+        final GATKSAMRecord[] oldSampleReads = readsBySampleIndex[sampleIndex];
+        final GATKSAMRecord[] newSampleReads = new GATKSAMRecord[newSampleReadCount];
+
+        System.arraycopy(oldSampleReads,0,newSampleReads,0,firstDeleted);
+        Utils.skimArray(oldSampleReads,firstDeleted, newSampleReads, firstDeleted, removeIndex, firstDeleted);
+
+        // Then we skim out the likelihoods of the removed reads.
+        final double[][] oldSampleValues = valuesBySampleIndex[sampleIndex];
+        final double[][] newSampleValues = new double[alleleCount][newSampleReadCount];
+        for (int a = 0; a < alleleCount; a++) {
+            System.arraycopy(oldSampleValues[a],0,newSampleValues[a],0,firstDeleted);
+            Utils.skimArray(oldSampleValues[a], firstDeleted, newSampleValues[a], firstDeleted, removeIndex, firstDeleted);
+        }
+        valuesBySampleIndex[sampleIndex] = newSampleValues;
+        readsBySampleIndex[sampleIndex] = newSampleReads;
+        readListBySampleIndex[sampleIndex] = null; // reset the unmodifiable list.
+    }
+
+
+    // Requires that the collection passed iterator can remove elements, and it can be modified.
+    private void removeSampleReads(final int sampleIndex, final Collection<GATKSAMRecord> readsToRemove, final int alleleCount) {
+        final GATKSAMRecord[] sampleReads = readsBySampleIndex[sampleIndex];
+        final int sampleReadCount = sampleReads.length;
+
+        final Object2IntMap<GATKSAMRecord> indexByRead = readIndexBySampleIndex(sampleIndex);
+        // Count how many we are going to remove, which ones (indexes) and remove entry from the read-index map.
+        final boolean[] removeIndex = new boolean[sampleReadCount];
+        int removeCount = 0; // captures the number of deletions.
+        int firstDeleted = sampleReadCount;    // captures the first position that was deleted.
+
+        final Iterator<GATKSAMRecord> readsToRemoveIterator = readsToRemove.iterator();
+        while (readsToRemoveIterator.hasNext()) {
+            final GATKSAMRecord read = readsToRemoveIterator.next();
+            if (indexByRead.containsKey(read)) {
+                final int index = indexByRead.getInt(read);
+                if (firstDeleted > index)
+                    firstDeleted = index;
+                removeCount++;
+                removeIndex[index] = true;
+                readsToRemoveIterator.remove();
+                indexByRead.remove(read);
+            }
+        }
+
+        // Nothing to remove we just finish here.
+        if (removeCount == 0)
+            return;
+
+        final int newSampleReadCount = sampleReadCount - removeCount;
+
+        // Now we skim out the removed reads from the read array.
+        final GATKSAMRecord[] oldSampleReads = readsBySampleIndex[sampleIndex];
+        final GATKSAMRecord[] newSampleReads = new GATKSAMRecord[newSampleReadCount];
+
+        System.arraycopy(oldSampleReads,0,newSampleReads,0,firstDeleted);
+        Utils.skimArray(oldSampleReads,firstDeleted, newSampleReads, firstDeleted, removeIndex, firstDeleted);
+
+        // Update the indices for the extant reads from the first deletion onwards.
+        for (int r = firstDeleted; r < newSampleReadCount; r++) {
+            indexByRead.put(newSampleReads[r], r);
+        }
+
+        // Then we skim out the likelihoods of the removed reads.
+        final double[][] oldSampleValues = valuesBySampleIndex[sampleIndex];
+        final double[][] newSampleValues = new double[alleleCount][newSampleReadCount];
+        for (int a = 0; a < alleleCount; a++) {
+            System.arraycopy(oldSampleValues[a],0,newSampleValues[a],0,firstDeleted);
+            Utils.skimArray(oldSampleValues[a], firstDeleted, newSampleValues[a], firstDeleted, removeIndex, firstDeleted);
+        }
+        valuesBySampleIndex[sampleIndex] = newSampleValues;
+        readsBySampleIndex[sampleIndex] = newSampleReads;
+        readListBySampleIndex[sampleIndex] = null; // reset the unmodifiable list.
+    }
+
+    private Object2IntMap<GATKSAMRecord> readIndexBySampleIndex(final int sampleIndex) {
+        if (readIndexBySampleIndex[sampleIndex] == null) {
+            final GATKSAMRecord[] sampleReads = readsBySampleIndex[sampleIndex];
+            final int sampleReadCount = sampleReads.length;
+            readIndexBySampleIndex[sampleIndex] = new Object2IntOpenHashMap<>(sampleReadCount);
+            for (int r = 0; r < sampleReadCount; r++)
+                readIndexBySampleIndex[sampleIndex].put(sampleReads[r],r);
+        }
+        return readIndexBySampleIndex[sampleIndex];
+    }
+
+    /**
+     * Transform into a multi-sample HashMap backed {@link PerReadAlleleLikelihoodMap} type.
+     * @return never {@code null}.
+     *
+     * @deprecated
+     *
+     * This method should eventually disappear once we have removed PerReadAlleleLikelihoodMap class completelly.
+     */
+    @Deprecated
+    @SuppressWarnings("all")
+    public Map<String, PerReadAlleleLikelihoodMap> toPerReadAlleleLikelihoodMap() {
+        final int sampleCount = samples.sampleCount();
+        final Map<String, PerReadAlleleLikelihoodMap> result = new HashMap<>(sampleCount);
+        for (int s = 0; s < sampleCount; s++)
+            result.put(samples.sampleAt(s),toPerReadAlleleLikelihoodMap(s));
+        return result;
+    }
+
+    /**
+     * Transform into a single-sample HashMap backed {@link PerReadAlleleLikelihoodMap} type.
+     *
+     * @return never {@code null}.
+     */
+    @Deprecated
+    public PerReadAlleleLikelihoodMap toPerReadAlleleLikelihoodMap(final int sampleIndex) {
+        checkSampleIndex(sampleIndex);
+        final PerReadAlleleLikelihoodMap result = new PerReadAlleleLikelihoodMap();
+        final int alleleCount = alleles.alleleCount();
+        final GATKSAMRecord[] sampleReads = readsBySampleIndex[sampleIndex];
+        final int sampleReadCount = sampleReads.length;
+        for (int a = 0; a < alleleCount; a++) {
+            final A allele = alleles.alleleAt(a);
+            final double[] readLikelihoods = valuesBySampleIndex[sampleIndex][a];
+            for (int r = 0; r < sampleReadCount; r++)
+                result.add(sampleReads[r], allele, readLikelihoods[r]);
+        }
+        return result;
+    }
+
+    /**
+     * Implements a likelihood matrix per sample given its index.
+     */
+    private class SampleMatrix implements Matrix<A> {
+
+        private final int sampleIndex;
+
+        private SampleMatrix(final int sampleIndex) {
+            this.sampleIndex = sampleIndex;
+        }
+
+        @Override
+        public List<GATKSAMRecord> reads() {
+            return sampleReads(sampleIndex);
+        }
+
+        @Override
+        public List<A> alleles() {
+            return ReadLikelihoods.this.alleles();
+        }
+
+        @Override
+        public void set(final int alleleIndex, final int readIndex, final double value) {
+            valuesBySampleIndex[sampleIndex][alleleIndex][readIndex] = value;
+        }
+
+        @Override
+        public double get(final int alleleIndex, final int readIndex) {
+            return valuesBySampleIndex[sampleIndex][alleleIndex][readIndex];
+        }
+
+        @Override
+        public int alleleIndex(final A allele) {
+            return ReadLikelihoods.this.alleleIndex(allele);
+        }
+
+        @Override
+        public int readIndex(final GATKSAMRecord read) {
+            return ReadLikelihoods.this.readIndex(sampleIndex, read);
+        }
+
+        @Override
+        public int alleleCount() {
+            return alleles.alleleCount();
+        }
+
+        @Override
+        public int readCount() {
+            return readsBySampleIndex[sampleIndex].length;
+        }
+
+        @Override
+        public A alleleAt(int alleleIndex) {
+            return ReadLikelihoods.this.alleleAt(alleleIndex);
+        }
+
+        @Override
+        public GATKSAMRecord readAt(final int readIndex) {
+            if (readIndex < 0)
+                throw new IllegalArgumentException("the read-index cannot be negative");
+            final GATKSAMRecord[] sampleReads = readsBySampleIndex[sampleIndex];
+            if (readIndex >= sampleReads.length)
+                throw new IllegalArgumentException("the read-index is beyond the read count of the sample");
+            return sampleReads[readIndex];
+        }
+
+        @Override
+        public void copyAlleleLikelihoods(final int alleleIndex, final double[] dest, final int offset) {
+            System.arraycopy(valuesBySampleIndex[sampleIndex][alleleIndex],0,dest,offset,readCount());
+        }
+    }
+
+    /**
+     * Checks whether the provide sample index is valid.
+     * <p>
+     *     If not, it throws an exception.
+     * </p>
+     * @param sampleIndex the target sample index.
+     *
+     * @throws IllegalArgumentException if {@code sampleIndex} is invalid, i.e. outside the range [0,{@link #sampleCount}).
+     */
+    private void checkSampleIndex(final int sampleIndex) {
+        if (sampleIndex < 0 || sampleIndex >= samples.sampleCount())
+            throw new IllegalArgumentException("invalid sample index: " + sampleIndex);
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/SampleList.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/SampleList.java
new file mode 100644
index 0000000..a171edd
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/SampleList.java
@@ -0,0 +1,42 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.genotyper;
+
+/**
+ * A indexed set of samples.
+ *
+ * <p>
+ *     Implementing classes must guarantee that the sample list will remain <b>constant</b> through the life of the object.
+ * </p>
+ */
+public interface SampleList  {
+
+    public int sampleCount();
+
+    public int sampleIndex(final String sample);
+
+    public String sampleAt(final int sampleIndex);
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/SampleListUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/SampleListUtils.java
new file mode 100644
index 0000000..5a63643
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/genotyper/SampleListUtils.java
@@ -0,0 +1,224 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.genotyper;
+
+import java.util.*;
+
+/**
+ * Some utility operations on sample lists.
+ *
+ * @author Valentin Ruano-Rubio <valentin at broadinstitute.org>
+ */
+public class SampleListUtils {
+
+    private static final SampleList EMPTY_LIST = new SampleList() {
+
+        @Override
+        public int sampleCount() {
+            return 0;
+        }
+
+        @Override
+        public int sampleIndex(String sample) {
+            return -1;
+        }
+
+        @Override
+        public String sampleAt(final int sampleIndex) {
+            throw new IllegalArgumentException("index is out of valid range");
+        }
+    };
+
+    /**
+     * Empty list.
+     *
+     * @return never {@code null}
+     */
+    public static SampleList emptyList() {
+        return EMPTY_LIST;
+    }
+
+    /**
+     * Checks whether two sample lists are in fact the same.
+     * @param first one list to compare.
+     * @param second another list to compare.
+     *
+     * @throws IllegalArgumentException if if either list is {@code null}.
+     *
+     * @return {@code true} iff both list are equal.
+     */
+    public static boolean equals(final SampleList first, final SampleList second) {
+        if (first == null || second == null)
+            throw new IllegalArgumentException("no null list allowed");
+        final int sampleCount = first.sampleCount();
+        if (sampleCount != second.sampleCount())
+            return false;
+
+        for (int i = 0; i < sampleCount; i++) {
+            final String firstSample = first.sampleAt(i);
+            if (firstSample == null)
+                throw new IllegalStateException("no null samples allowed in sample-lists: first list at " + i);
+            final String secondSample = second.sampleAt(i);
+            if (secondSample == null)
+                throw new IllegalArgumentException("no null samples allowed in sample-list: second list at " + i);
+            if (!firstSample.equals(secondSample))
+                return false;
+        }
+        return true;
+    }
+
+    /**
+     * Returns a {@link List} unmodifiable view of a sample-list
+     * @param list the sample-list to wrap.
+     *
+     * @throws IllegalArgumentException if {@code list} is {@code null}.
+     *
+     * @return never {@code null}.
+     */
+    public static List<String> asList(final SampleList list) {
+        if (list == null)
+            throw new IllegalArgumentException("the list cannot be null");
+        return new AsList(list);
+    }
+
+    /**
+     * Returns a {@link Set} unmodifiable view of the sample-list
+     *
+     * @param list the sample-list to wrap.
+     *
+     * @throws IllegalArgumentException if {@code list} is {@code null}
+     */
+    public static Set<String> asSet(final SampleList list) {
+        if (list == null)
+            throw new IllegalArgumentException("the list cannot be null");
+        return new AsSet(list);
+    }
+
+    /**
+     * Creates a list with a single sample.
+     *
+     * @param sampleName the sample name.
+     * @return never {@code sampleName}
+     */
+    public static SampleList singletonList(final String sampleName) {
+        if (sampleName == null)
+            throw new IllegalArgumentException("the sample name cannot be null");
+        return new SampleList() {
+
+            @Override
+            public int sampleCount() {
+                return 1;
+            }
+
+            @Override
+            public int sampleIndex(final String sample) {
+                return sampleName.equals(sample) ? 0 : -1;
+            }
+
+            @Override
+            public String sampleAt(int sampleIndex) {
+                if (sampleIndex == 0)
+                    return sampleName;
+                throw new IllegalArgumentException("index is out of bounds");
+            }
+        };
+    }
+
+    /**
+     * Simple list view of a sample-list.
+     */
+    private static class AsList extends AbstractList<String> {
+
+        private final SampleList list;
+
+        private AsList(final SampleList list) {
+            this.list = list;
+
+        }
+
+        @Override
+        public String get(int index) {
+            return list.sampleAt(index);
+        }
+
+        @Override
+        public int size() {
+            return list.sampleCount();
+        }
+    }
+
+    /**
+     * Simple set view of a sample-list
+     */
+    private static class AsSet extends AbstractSet<String> {
+
+        private final SampleList list;
+
+        private AsSet(final SampleList list) {
+            this.list = list;
+
+        }
+
+        @Override
+        public Iterator<String> iterator() {
+            return new Iterator<String>() {
+                private int index = 0;
+
+                @Override
+                public boolean hasNext() {
+                    return index < list.sampleCount();
+                }
+
+                @Override
+                public String next() {
+                    if (index >= list.sampleCount())
+                        throw new NoSuchElementException("iterating beyond sample list end");
+                    return list.sampleAt(index++);
+                }
+
+                @Override
+                public void remove() {
+                    throw new UnsupportedOperationException("unsupported operation exception");
+                }
+            };
+        }
+
+        @Override
+        public int size() {
+            return list.sampleCount();
+        }
+
+        @Override
+        public boolean contains(final Object obj) {
+            if (obj == null)
+                return false;
+            else if (obj instanceof String)
+                return list.sampleIndex(((String)obj)) >= 0;
+            else
+                return false;
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/haplotype/EventMap.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/haplotype/EventMap.java
new file mode 100644
index 0000000..5a2735d
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/haplotype/EventMap.java
@@ -0,0 +1,423 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.haplotype;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import htsjdk.samtools.Cigar;
+import htsjdk.samtools.CigarElement;
+import org.apache.commons.lang.ArrayUtils;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.BaseUtils;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.sam.AlignmentUtils;
+import htsjdk.variant.variantcontext.Allele;
+import htsjdk.variant.variantcontext.VariantContext;
+import htsjdk.variant.variantcontext.VariantContextBuilder;
+
+import java.util.*;
+
+/**
+ * Extract simple VariantContext events from a single haplotype
+ *
+ * User: depristo
+ * Date: 3/27/13
+ * Time: 8:35 AM
+ */
+public class EventMap extends TreeMap<Integer, VariantContext> {
+    private final static Logger logger = Logger.getLogger(EventMap.class);
+    protected final static int MIN_NUMBER_OF_EVENTS_TO_COMBINE_INTO_BLOCK_SUBSTITUTION = 3;
+    private static final int MAX_EVENTS_PER_HAPLOTYPE = 3;
+    private static final int MAX_INDELS_PER_HAPLOTYPE = 2;
+    public final static Allele SYMBOLIC_UNASSEMBLED_EVENT_ALLELE = Allele.create("<UNASSEMBLED_EVENT>", false);
+
+    private final Haplotype haplotype;
+    private final byte[] ref;
+    private final GenomeLoc refLoc;
+    private final String sourceNameToAdd;
+
+    public EventMap(final Haplotype haplotype, final byte[] ref, final GenomeLoc refLoc, final String sourceNameToAdd) {
+        super();
+        this.haplotype = haplotype;
+        this.ref = ref;
+        this.refLoc = refLoc;
+        this.sourceNameToAdd = sourceNameToAdd;
+
+        processCigarForInitialEvents();
+    }
+
+    /**
+     * For testing.  Let's you set up a explicit configuration without having to process a haplotype and reference
+     * @param stateForTesting
+     */
+    public EventMap(final Collection<VariantContext> stateForTesting) {
+        haplotype = null;
+        ref = null;
+        refLoc = null;
+        sourceNameToAdd = null;
+        for ( final VariantContext vc : stateForTesting )
+            addVC(vc);
+    }
+
+    protected void processCigarForInitialEvents() {
+        final Cigar cigar = haplotype.getCigar();
+        final byte[] alignment = haplotype.getBases();
+
+        int refPos = haplotype.getAlignmentStartHapwrtRef();
+        if( refPos < 0 ) {
+            return;
+        } // Protection against SW failures
+
+        final List<VariantContext> proposedEvents = new ArrayList<>();
+
+        int alignmentPos = 0;
+
+        for( int cigarIndex = 0; cigarIndex < cigar.numCigarElements(); cigarIndex++ ) {
+            final CigarElement ce = cigar.getCigarElement(cigarIndex);
+            final int elementLength = ce.getLength();
+            switch( ce.getOperator() ) {
+                case I:
+                {
+                    if( refPos > 0 ) { // protect against trying to create insertions/deletions at the beginning of a contig
+                        final List<Allele> insertionAlleles = new ArrayList<Allele>();
+                        final int insertionStart = refLoc.getStart() + refPos - 1;
+                        final byte refByte = ref[refPos-1];
+                        if( BaseUtils.isRegularBase(refByte) ) {
+                            insertionAlleles.add( Allele.create(refByte, true) );
+                        }
+                        if( cigarIndex == 0 || cigarIndex == cigar.getCigarElements().size() - 1 ) {
+                            // if the insertion isn't completely resolved in the haplotype, skip it
+                            // note this used to emit SYMBOLIC_UNASSEMBLED_EVENT_ALLELE but that seems dangerous
+                        } else {
+                            byte[] insertionBases = new byte[]{};
+                            insertionBases = ArrayUtils.add(insertionBases, ref[refPos - 1]); // add the padding base
+                            insertionBases = ArrayUtils.addAll(insertionBases, Arrays.copyOfRange(alignment, alignmentPos, alignmentPos + elementLength));
+                            if( BaseUtils.isAllRegularBases(insertionBases) ) {
+                                insertionAlleles.add( Allele.create(insertionBases, false) );
+                            }
+                        }
+                        if( insertionAlleles.size() == 2 ) { // found a proper ref and alt allele
+                            proposedEvents.add(new VariantContextBuilder(sourceNameToAdd, refLoc.getContig(), insertionStart, insertionStart, insertionAlleles).make());
+                        }
+                    }
+                    alignmentPos += elementLength;
+                    break;
+                }
+                case S:
+                {
+                    alignmentPos += elementLength;
+                    break;
+                }
+                case D:
+                {
+                    if( refPos > 0 ) { // protect against trying to create insertions/deletions at the beginning of a contig
+                        final byte[] deletionBases = Arrays.copyOfRange( ref, refPos - 1, refPos + elementLength );  // add padding base
+                        final List<Allele> deletionAlleles = new ArrayList<Allele>();
+                        final int deletionStart = refLoc.getStart() + refPos - 1;
+                        final byte refByte = ref[refPos-1];
+                        if( BaseUtils.isRegularBase(refByte) && BaseUtils.isAllRegularBases(deletionBases) ) {
+                            deletionAlleles.add( Allele.create(deletionBases, true) );
+                            deletionAlleles.add( Allele.create(refByte, false) );
+                            proposedEvents.add(new VariantContextBuilder(sourceNameToAdd, refLoc.getContig(), deletionStart, deletionStart + elementLength, deletionAlleles).make());
+                        }
+                    }
+                    refPos += elementLength;
+                    break;
+                }
+                case M:
+                case EQ:
+                case X:
+                {
+                    for( int iii = 0; iii < elementLength; iii++ ) {
+                        final byte refByte = ref[refPos];
+                        final byte altByte = alignment[alignmentPos];
+                        if( refByte != altByte ) { // SNP!
+                            if( BaseUtils.isRegularBase(refByte) && BaseUtils.isRegularBase(altByte) ) {
+                                final List<Allele> snpAlleles = new ArrayList<Allele>();
+                                snpAlleles.add( Allele.create( refByte, true ) );
+                                snpAlleles.add( Allele.create( altByte, false ) );
+                                proposedEvents.add(new VariantContextBuilder(sourceNameToAdd, refLoc.getContig(), refLoc.getStart() + refPos, refLoc.getStart() + refPos, snpAlleles).make());
+                            }
+                        }
+                        refPos++;
+                        alignmentPos++;
+                    }
+                    break;
+                }
+                case N:
+                case H:
+                case P:
+                default:
+                    throw new ReviewedGATKException( "Unsupported cigar operator created during SW alignment: " + ce.getOperator() );
+            }
+        }
+
+        for ( final VariantContext proposedEvent : proposedEvents )
+            addVC(proposedEvent, true);
+    }
+
+    /**
+     * Add VariantContext vc to this map, merging events with the same start sites if necessary
+     * @param vc the variant context to add
+     */
+    public void addVC(final VariantContext vc) {
+        addVC(vc, true);
+    }
+
+    /**
+     * Add VariantContext vc to this map
+     * @param vc the variant context to add
+     * @param merge should we attempt to merge it with an already existing element, or should we throw an error in that case?
+     */
+    public void addVC(final VariantContext vc, final boolean merge) {
+        if ( vc == null ) throw new IllegalArgumentException("vc cannot be null");
+
+        if ( containsKey(vc.getStart()) ) {
+            if ( merge ) {
+                final VariantContext prev = get(vc.getStart());
+                put(vc.getStart(), makeBlock(prev, vc));
+            } else {
+                throw new IllegalStateException("Will not merge previously bound variant contexts as merge is false at " + vc);
+            }
+        } else
+            put(vc.getStart(), vc);
+    }
+
+    /**
+     * Create a block substitution out of two variant contexts that start at the same position
+     *
+     * vc1 can be SNP, and vc2 can then be either a insertion or deletion.
+     * If vc1 is an indel, then vc2 must be the opposite type (vc1 deletion => vc2 must be an insertion)
+     *
+     * @param vc1 the first variant context we want to merge
+     * @param vc2 the second
+     * @return a block substitution that represents the composite substitution implied by vc1 and vc2
+     */
+    protected VariantContext makeBlock(final VariantContext vc1, final VariantContext vc2) {
+        if ( vc1.getStart() != vc2.getStart() )  throw new IllegalArgumentException("vc1 and 2 must have the same start but got " + vc1 + " and " + vc2);
+        if ( ! vc1.isBiallelic() ) throw new IllegalArgumentException("vc1 must be biallelic");
+        if ( ! vc1.isSNP() ) {
+            if ( ! ((vc1.isSimpleDeletion() && vc2.isSimpleInsertion()) || (vc1.isSimpleInsertion() && vc2.isSimpleDeletion())))
+                throw new IllegalArgumentException("Can only merge single insertion with deletion (or vice versa) but got " + vc1 + " merging with " + vc2);
+        } else if ( vc2.isSNP() ) {
+            throw new IllegalArgumentException("vc1 is " + vc1 + " but vc2 is a SNP, which implies there's been some terrible bug in the cigar " + vc2);
+        }
+
+        final Allele ref, alt;
+        final VariantContextBuilder b = new VariantContextBuilder(vc1);
+        if ( vc1.isSNP() ) {
+            // we have to repair the first base, so SNP case is special cased
+            if ( vc1.getReference().equals(vc2.getReference()) ) {
+                // we've got an insertion, so we just update the alt to have the prev alt
+                ref = vc1.getReference();
+                alt = Allele.create(vc1.getAlternateAllele(0).getDisplayString() + vc2.getAlternateAllele(0).getDisplayString().substring(1), false);
+            } else {
+                // we're dealing with a deletion, so we patch the ref
+                ref = vc2.getReference();
+                alt = vc1.getAlternateAllele(0);
+                b.stop(vc2.getEnd());
+            }
+        } else {
+            final VariantContext insertion = vc1.isSimpleInsertion() ? vc1 : vc2;
+            final VariantContext deletion  = vc1.isSimpleInsertion() ? vc2 : vc1;
+            ref = deletion.getReference();
+            alt = insertion.getAlternateAllele(0);
+            b.stop(deletion.getEnd());
+        }
+
+        return b.alleles(Arrays.asList(ref, alt)).make();
+    }
+
+    // TODO -- warning this is an O(N^3) algorithm because I'm just lazy.  If it's valuable we need to reengineer it
+    @Requires("getNumberOfEvents() > 0")
+    protected void replaceClumpedEventsWithBlockSubstitutions() {
+        if ( getNumberOfEvents() >= MIN_NUMBER_OF_EVENTS_TO_COMBINE_INTO_BLOCK_SUBSTITUTION) {
+            int lastStart = -1;
+            for ( boolean foundOne = true; foundOne; ) {
+                foundOne = false;
+                for ( final VariantContext vc : getVariantContexts() ) {
+                    if ( vc.getStart() > lastStart ) {
+                        lastStart = vc.getStart();
+                        final List<VariantContext> neighborhood = getNeighborhood(vc, 10);
+                        if ( updateToBlockSubstitutionIfBetter(neighborhood) ) {
+                            foundOne = true;
+                            break;
+                        }
+                    }
+                }
+            }
+        }
+    }
+
+    protected boolean updateToBlockSubstitutionIfBetter(final List<VariantContext> neighbors) {
+        if (neighbors.size() < MIN_NUMBER_OF_EVENTS_TO_COMBINE_INTO_BLOCK_SUBSTITUTION)
+            return false;
+        // TODO -- need more tests to decide if this is really so good
+
+        final VariantContext first = neighbors.get(0);
+        final int refStartOffset = first.getStart() - refLoc.getStart();
+        final int refEndOffset = neighbors.get(neighbors.size() - 1).getEnd() - refLoc.getStart();
+
+        final byte[] refBases = Arrays.copyOfRange(ref, refStartOffset, refEndOffset + 1);
+        final byte[] hapBases = AlignmentUtils.getBasesCoveringRefInterval(refStartOffset, refEndOffset, haplotype.getBases(), haplotype.getAlignmentStartHapwrtRef(), haplotype.getCigar());
+
+        final VariantContextBuilder builder = new VariantContextBuilder(first);
+        builder.stop(first.getStart() + refBases.length - 1);
+        builder.alleles(Arrays.asList(Allele.create(refBases, true), Allele.create(hapBases)));
+        final VariantContext block = builder.make();
+
+        // remove all merged events
+        for ( final VariantContext merged : neighbors ) {
+            if ( remove(merged.getStart()) == null )
+                throw new IllegalArgumentException("Expected to remove variant context from the event map but remove said there wasn't any element there: " + merged);
+        }
+
+        // note must be after we remove the previous events as the treeset only allows one key per start
+        logger.info("Transforming into block substitution at " + block);
+        addVC(block, false);
+
+        return true;
+    }
+
+    /**
+     * Get all of the variant contexts starting at leftMost that are within maxBP of each other
+     *
+     * @param leftMost the left most (smallest position) variant context that will start the neighborhood
+     * @param maxBPBetweenEvents the maximum distance in BP between the end of one event the start of the next
+     *                           to be included the the resulting list
+     * @return a list that contains at least one element (leftMost)
+     */
+    @Requires({"leftMost != null", "maxBPBetweenEvents >= 0"})
+    @Ensures({"result != null", "! result.isEmpty()"})
+    protected List<VariantContext> getNeighborhood(final VariantContext leftMost, final int maxBPBetweenEvents) {
+        final List<VariantContext> neighbors = new LinkedList<VariantContext>();
+
+        VariantContext left = leftMost;
+        for ( final VariantContext vc : getVariantContexts() ) {
+            if ( vc.getStart() < leftMost.getStart() )
+                continue;
+
+            if ( vc.getStart() - left.getEnd() < maxBPBetweenEvents ) {
+                // this vc is within max distance to the end of the left event, so accumulate it
+                neighbors.add(vc);
+                left = vc;
+            }
+        }
+
+        return neighbors;
+    }
+
+    /**
+     * Get the starting positions of events in this event map
+     * @return
+     */
+    public Set<Integer> getStartPositions() {
+        return keySet();
+    }
+
+    /**
+     * Get the variant contexts in order of start position in this event map
+     * @return
+     */
+    public Collection<VariantContext> getVariantContexts() {
+        return values();
+    }
+
+    /**
+     * How many events do we have?
+     * @return
+     */
+    public int getNumberOfEvents() {
+        return size();
+    }
+
+    @Override
+    public String toString() {
+        final StringBuilder b = new StringBuilder("EventMap{");
+        for ( final VariantContext vc : getVariantContexts() )
+            b.append(String.format("%s:%d-%d %s,", vc.getChr(), vc.getStart(), vc.getEnd(), vc.getAlleles()));
+        b.append("}");
+        return b.toString();
+    }
+
+    /**
+     * Build event maps for each haplotype, returning the sorted set of all of the starting positions of all
+     * events across all haplotypes
+     *
+     * @param haplotypes a list of haplotypes
+     * @param ref the reference bases
+     * @param refLoc the span of the reference bases
+     * @param debug if true, we'll emit debugging information during this operation
+     * @return a sorted set of start positions of all events among all haplotypes
+     */
+    public static TreeSet<Integer> buildEventMapsForHaplotypes( final List<Haplotype> haplotypes,
+                                                                final byte[] ref,
+                                                                final GenomeLoc refLoc,
+                                                                final boolean debug) {
+        // Using the cigar from each called haplotype figure out what events need to be written out in a VCF file
+        final TreeSet<Integer> startPosKeySet = new TreeSet<Integer>();
+        int hapNumber = 0;
+
+        if( debug ) logger.info("=== Best Haplotypes ===");
+        for( final Haplotype h : haplotypes ) {
+            // Walk along the alignment and turn any difference from the reference into an event
+            h.setEventMap( new EventMap( h, ref, refLoc, "HC" + hapNumber++ ) );
+            startPosKeySet.addAll(h.getEventMap().getStartPositions());
+
+            if( debug ) {
+                logger.info(h.toString());
+                logger.info("> Cigar = " + h.getCigar());
+                logger.info(">> Events = " + h.getEventMap());
+            }
+        }
+
+        return startPosKeySet;
+    }
+
+    private static class VariantContextComparator implements Comparator<VariantContext> {
+        @Override
+        public int compare(VariantContext vc1, VariantContext vc2) {
+            return vc1.getStart() - vc2.getStart();
+        }
+    }
+
+    /**
+     * Get all of the VariantContexts in the event maps for all haplotypes, sorted by their start position
+     * @param haplotypes the set of haplotypes to grab the VCs from
+     * @return a sorted set of variant contexts
+     */
+    public static TreeSet<VariantContext> getAllVariantContexts( final List<Haplotype> haplotypes ) {
+        // Using the cigar from each called haplotype figure out what events need to be written out in a VCF file
+        final TreeSet<VariantContext> vcs = new TreeSet<VariantContext>(new VariantContextComparator());
+
+        for( final Haplotype h : haplotypes ) {
+            vcs.addAll(h.getEventMap().getVariantContexts());
+        }
+
+        return vcs;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/haplotype/Haplotype.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/haplotype/Haplotype.java
new file mode 100644
index 0000000..5b0ec5a
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/haplotype/Haplotype.java
@@ -0,0 +1,343 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.haplotype;
+
+import com.google.java.contract.Requires;
+import htsjdk.samtools.Cigar;
+import htsjdk.samtools.CigarElement;
+import htsjdk.samtools.CigarOperator;
+import org.apache.commons.lang.ArrayUtils;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.sam.AlignmentUtils;
+import org.broadinstitute.gatk.utils.sam.ReadUtils;
+import htsjdk.variant.variantcontext.Allele;
+
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.LinkedHashMap;
+import java.util.List;
+
+public class Haplotype extends Allele {
+
+
+    private GenomeLoc genomeLocation = null;
+    private EventMap eventMap = null;
+    private Cigar cigar;
+    private int alignmentStartHapwrtRef;
+    private double score = Double.NaN;
+
+    /**
+     * Main constructor
+     *
+     * @param bases a non-null array of bases
+     * @param isRef is this the reference haplotype?
+     */
+    public Haplotype( final byte[] bases, final boolean isRef ) {
+        super(bases.clone(), isRef);
+    }
+
+    /**
+     * Create a new non-ref haplotype
+     *
+     * @param bases a non-null array of bases
+     */
+    public Haplotype( final byte[] bases ) {
+        this(bases, false);
+    }
+
+    /**
+     * Create a new haplotype with bases
+     *
+     * Requires bases.length == cigar.getReadLength()
+     *
+     * @param bases a non-null array of bases
+     * @param isRef is this the reference haplotype?
+     * @param alignmentStartHapwrtRef offset of this haplotype w.r.t. the reference
+     * @param cigar the cigar that maps this haplotype to the reference sequence
+     */
+    public Haplotype( final byte[] bases, final boolean isRef, final int alignmentStartHapwrtRef, final Cigar cigar) {
+        this(bases, isRef);
+        this.alignmentStartHapwrtRef = alignmentStartHapwrtRef;
+        setCigar(cigar);
+    }
+
+    /**
+     * Copy constructor.  Note the ref state of the provided allele is ignored!
+     *
+     * @param allele allele to copy
+     */
+    public Haplotype( final Allele allele ) {
+        super(allele, true);
+    }
+
+    public Haplotype( final byte[] bases, final GenomeLoc loc ) {
+        this(bases, false);
+        this.genomeLocation = loc;
+    }
+
+    /**
+     * Create a new Haplotype derived from this one that exactly spans the provided location
+     *
+     * Note that this haplotype must have a contain a genome loc for this operation to be successful.  If no
+     * GenomeLoc is contained than @throws an IllegalStateException
+     *
+     * Also loc must be fully contained within this Haplotype's genomeLoc.  If not an IllegalArgumentException is
+     * thrown.
+     *
+     * @param loc a location completely contained within this Haplotype's location
+     * @return a new Haplotype within only the bases spanning the provided location, or null for some reason the haplotype would be malformed if
+     */
+    public Haplotype trim(final GenomeLoc loc) {
+        if ( loc == null ) throw new IllegalArgumentException("Loc cannot be null");
+        if ( genomeLocation == null ) throw new IllegalStateException("Cannot trim a Haplotype without containing GenomeLoc");
+        if ( ! genomeLocation.containsP(loc) ) throw new IllegalArgumentException("Can only trim a Haplotype to a containing span.  My loc is " + genomeLocation + " but wanted trim to " + loc);
+        if ( getCigar() == null ) throw new IllegalArgumentException("Cannot trim haplotype without a cigar " + this);
+
+        final int newStart = loc.getStart() - this.genomeLocation.getStart();
+        final int newStop = newStart + loc.size() - 1;
+        final byte[] newBases = AlignmentUtils.getBasesCoveringRefInterval(newStart, newStop, getBases(), 0, getCigar());
+        final Cigar newCigar = AlignmentUtils.trimCigarByReference(getCigar(), newStart, newStop);
+
+        if ( newBases == null || AlignmentUtils.startsOrEndsWithInsertionOrDeletion(newCigar) )
+            // we cannot meaningfully chop down the haplotype, so return null
+            return null;
+
+        final Haplotype ret = new Haplotype(newBases, isReference());
+        ret.setCigar(newCigar);
+        ret.setGenomeLocation(loc);
+        ret.setAlignmentStartHapwrtRef(newStart + getAlignmentStartHapwrtRef());
+        return ret;
+    }
+
+    @Override
+    public boolean equals( Object h ) {
+        return h instanceof Haplotype && Arrays.equals(getBases(), ((Haplotype) h).getBases());
+    }
+
+    @Override
+    public int hashCode() {
+        return Arrays.hashCode(getBases());
+    }
+
+    public EventMap getEventMap() {
+        return eventMap;
+    }
+
+    public void setEventMap( final EventMap eventMap ) {
+        this.eventMap = eventMap;
+    }
+
+    @Override
+    public String toString() {
+        return getDisplayString();
+    }
+
+    /**
+     * Get the span of this haplotype (may be null)
+     * @return a potentially null genome loc
+     */
+    public GenomeLoc getGenomeLocation() {
+        return genomeLocation;
+    }
+
+    public void setGenomeLocation(GenomeLoc genomeLocation) {
+        this.genomeLocation = genomeLocation;
+    }
+
+    public long getStartPosition() {
+        return genomeLocation.getStart();
+    }
+
+    public long getStopPosition() {
+        return genomeLocation.getStop();
+    }
+
+    public int getAlignmentStartHapwrtRef() {
+        return alignmentStartHapwrtRef;
+    }
+
+    public void setAlignmentStartHapwrtRef( final int alignmentStartHapwrtRef ) {
+        this.alignmentStartHapwrtRef = alignmentStartHapwrtRef;
+    }
+
+    /**
+     * Get the cigar for this haplotype.  Note that the cigar is guaranteed to be consolidated
+     * in that multiple adjacent equal operates will have been merged
+     * @return the cigar of this haplotype
+     */
+    public Cigar getCigar() {
+        return cigar;
+    }
+
+    /**
+     * Get the haplotype cigar extended by padSize M at the tail, consolidated into a clean cigar
+     *
+     * @param padSize how many additional Ms should be appended to the end of this cigar.  Must be >= 0
+     * @return a newly allocated Cigar that consolidate(getCigar + padSize + M)
+     */
+    public Cigar getConsolidatedPaddedCigar(final int padSize) {
+        if ( padSize < 0 ) throw new IllegalArgumentException("padSize must be >= 0 but got " + padSize);
+        final Cigar extendedHaplotypeCigar = new Cigar(getCigar().getCigarElements());
+        if ( padSize > 0 ) extendedHaplotypeCigar.add(new CigarElement(padSize, CigarOperator.M));
+        return AlignmentUtils.consolidateCigar(extendedHaplotypeCigar);
+    }
+
+    /**
+     * Set the cigar of this haplotype to cigar.
+     *
+     * Note that this function consolidates the cigar, so that 1M1M1I1M1M => 2M1I2M
+     *
+     * @param cigar a cigar whose readLength == length()
+     */
+    public void setCigar( final Cigar cigar ) {
+        this.cigar = AlignmentUtils.consolidateCigar(cigar);
+        if ( this.cigar.getReadLength() != length() )
+            throw new IllegalArgumentException("Read length " + length() + " not equal to the read length of the cigar " + cigar.getReadLength() + " " + this.cigar);
+    }
+
+    @Requires({"refInsertLocation >= 0"})
+    public Haplotype insertAllele( final Allele refAllele, final Allele altAllele, final int refInsertLocation, final int genomicInsertLocation ) {
+        // refInsertLocation is in ref haplotype offset coordinates NOT genomic coordinates
+        final int haplotypeInsertLocation = ReadUtils.getReadCoordinateForReferenceCoordinate(alignmentStartHapwrtRef, cigar, refInsertLocation, ReadUtils.ClippingTail.RIGHT_TAIL, true);
+        final byte[] myBases = this.getBases();
+        if( haplotypeInsertLocation == -1 || haplotypeInsertLocation + refAllele.length() >= myBases.length ) { // desired change falls inside deletion so don't bother creating a new haplotype
+            return null;
+        }
+
+        byte[] newHaplotypeBases = new byte[]{};
+        newHaplotypeBases = ArrayUtils.addAll(newHaplotypeBases, ArrayUtils.subarray(myBases, 0, haplotypeInsertLocation)); // bases before the variant
+        newHaplotypeBases = ArrayUtils.addAll(newHaplotypeBases, altAllele.getBases()); // the alt allele of the variant
+        newHaplotypeBases = ArrayUtils.addAll(newHaplotypeBases, ArrayUtils.subarray(myBases, haplotypeInsertLocation + refAllele.length(), myBases.length)); // bases after the variant
+        return new Haplotype(newHaplotypeBases);
+    }
+
+    public static LinkedHashMap<Allele,Haplotype> makeHaplotypeListFromAlleles(final List<Allele> alleleList,
+                                                                               final int startPos,
+                                                                               final ReferenceContext ref,
+                                                                               final int haplotypeSize,
+                                                                               final int numPrefBases) {
+
+        LinkedHashMap<Allele,Haplotype> haplotypeMap = new LinkedHashMap<Allele,Haplotype>();
+
+        Allele refAllele = null;
+
+        for (Allele a:alleleList) {
+            if (a.isReference()) {
+                refAllele = a;
+                break;
+            }
+        }
+
+        if (refAllele == null)
+            throw new ReviewedGATKException("BUG: no ref alleles in input to makeHaplotypeListfrom Alleles at loc: "+ startPos);
+
+        final byte[] refBases = ref.getBases();
+
+        final int startIdxInReference = 1 + startPos - numPrefBases - ref.getWindow().getStart();
+        final String basesBeforeVariant = new String(Arrays.copyOfRange(refBases, startIdxInReference, startIdxInReference + numPrefBases));
+
+        // protect against long events that overrun available reference context
+        final int startAfter = Math.min(startIdxInReference + numPrefBases + refAllele.getBases().length - 1, refBases.length);
+        final String basesAfterVariant = new String(Arrays.copyOfRange(refBases, startAfter, refBases.length));
+
+        // Create location for all haplotypes
+        final int startLoc = ref.getWindow().getStart() + startIdxInReference;
+        final int stopLoc = startLoc + haplotypeSize-1;
+
+        final GenomeLoc locus = ref.getGenomeLocParser().createGenomeLoc(ref.getLocus().getContig(),startLoc,stopLoc);
+
+        for (final Allele a : alleleList) {
+
+            final byte[] alleleBases = a.getBases();
+            // use string concatenation
+            String haplotypeString = basesBeforeVariant + new String(Arrays.copyOfRange(alleleBases, 1, alleleBases.length)) + basesAfterVariant;
+            haplotypeString = haplotypeString.substring(0,haplotypeSize);
+
+            haplotypeMap.put(a,new Haplotype(haplotypeString.getBytes(), locus));
+        }
+
+        return haplotypeMap;
+    }
+
+    private static class Event {
+        public Allele ref;
+        public Allele alt;
+        public int pos;
+
+        public Event( final Allele ref, final Allele alt, final int pos ) {
+            this.ref = ref;
+            this.alt = alt;
+            this.pos = pos;
+        }
+    }
+
+    /**
+     * Get the score (an estimate of the support) of this haplotype
+     * @return a double, where higher values are better
+     */
+    public double getScore() {
+        return score;
+    }
+
+    /**
+     * Set the score (an estimate of the support) of this haplotype.
+     *
+     * Note that if this is the reference haplotype it is always given Double.MAX_VALUE score
+     *
+     * @param score a double, where higher values are better
+     */
+    public void setScore(double score) {
+        this.score = score;
+    }
+
+    /**
+     * Comparator used to sort haplotypes, alphanumerically.
+     *
+     * <p>
+     *     If one haplotype is the prefix of the other, the shorter one comes first.
+     * </p>
+     */
+    public static final Comparator<Haplotype> ALPHANUMERICAL_COMPARATOR = new Comparator<Haplotype>() {
+
+        @Override
+        public int compare(final Haplotype o1, final Haplotype o2) {
+            if (o1 == o2)
+                return 0;
+            final byte[] bases1 = o1.getBases();
+            final byte[] bases2 = o2.getBases();
+            final int iLimit = Math.min(bases1.length, bases2.length);
+            for (int i = 0; i < iLimit; i++) {
+                final int cmp = Byte.compare(bases1[i], bases2[i]);
+                if (cmp != 0) return cmp;
+            }
+            if (bases1.length == bases2.length) return 0;
+            return (bases1.length > bases2.length) ? -1 : 1; // is a bit better to get the longest haplotypes first.
+        }
+    };
+
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/haplotype/HaplotypeBaseComparator.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/haplotype/HaplotypeBaseComparator.java
new file mode 100644
index 0000000..d03f0b6
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/haplotype/HaplotypeBaseComparator.java
@@ -0,0 +1,42 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.haplotype;
+
+import java.util.Comparator;
+
+/**
+ * Compares two haplotypes in the lexicographic order of their bases
+ *
+ * User: depristo
+ * Date: 3/29/13
+ * Time: 11:09 AM
+ */
+public class HaplotypeBaseComparator implements Comparator<Haplotype> {
+    @Override
+    public int compare( final Haplotype hap1, final Haplotype hap2 ) {
+        return hap1.getBaseString().compareTo(hap2.getBaseString());
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/haplotype/HaplotypeScoreComparator.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/haplotype/HaplotypeScoreComparator.java
new file mode 100644
index 0000000..f294076
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/haplotype/HaplotypeScoreComparator.java
@@ -0,0 +1,39 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.haplotype;
+
+import java.util.Comparator;
+
+/**
+ * A comparator that sorts haplotypes in decreasing order of score, so that the best supported
+ * haplotypes are at the top
+ */
+public class HaplotypeScoreComparator implements Comparator<Haplotype> {
+    @Override
+    public int compare(Haplotype o1, Haplotype o2) {
+        return -1 * Double.valueOf(o1.getScore()).compareTo(o2.getScore());
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/haplotype/HaplotypeSizeAndBaseComparator.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/haplotype/HaplotypeSizeAndBaseComparator.java
new file mode 100644
index 0000000..241ac72
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/haplotype/HaplotypeSizeAndBaseComparator.java
@@ -0,0 +1,47 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.haplotype;
+
+import java.util.Comparator;
+
+/**
+ * Compares two haplotypes first by their lengths and then by lexicographic order of their bases.
+ *
+ * User: btaylor
+ * Date: 8/1/13
+ * Time: 11:09 AM
+ */
+public class HaplotypeSizeAndBaseComparator implements Comparator<Haplotype> {
+    @Override
+    public int compare( final Haplotype hap1, final Haplotype hap2 ) {
+        if (hap1.getBases().length < hap2.getBases().length)
+            return -1;
+        else if (hap1.getBases().length > hap2.getBases().length)
+            return 1;
+        else
+            return hap1.getBaseString().compareTo(hap2.getBaseString());
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/ApplicationDetails.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/ApplicationDetails.java
new file mode 100644
index 0000000..56b39eb
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/ApplicationDetails.java
@@ -0,0 +1,95 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.help;
+
+import org.broadinstitute.gatk.utils.commandline.CommandLineProgram;
+import org.broadinstitute.gatk.utils.classloader.JVMUtils;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Contains details additional details that the program can
+ * supply about itself.
+ *
+ * @author hanna
+ * @version 0.1
+ */
+
+public class ApplicationDetails {
+    /**
+     * Retrieve key information about the application (name, who to contact for support, etc.).
+     */
+    final List<String> applicationHeader;
+
+    /**
+     * Stores additional attribution for a given walker.
+     */
+    final List<String> attribution;
+
+    /**
+     * Extract details covering exactly how to run this executable.
+     */
+    final String runningInstructions;
+
+    /**
+     * Additional help particular to this command-line application.
+     */
+    final String additionalHelp;
+
+    public ApplicationDetails( List<String> applicationHeader, List<String> attribution, String runningInstructions, String additionalHelp ) {
+        this.applicationHeader = applicationHeader;
+        this.attribution = attribution;
+        this.runningInstructions = runningInstructions;
+        this.additionalHelp = additionalHelp;
+    }
+
+    public static List<String> createDefaultHeader(Class<? extends CommandLineProgram> application) {
+        return Collections.singletonList("Program Name: " + application.getName());
+    }
+
+    public static String createDefaultRunningInstructions(Class<? extends CommandLineProgram> application) {
+        // Default implementation to find a command line that makes sense.
+        // If the user is running from a jar, return '-jar <jarname>'; otherwise
+        // return the full class name.
+        String runningInstructions = null;
+        try {
+            runningInstructions = JVMUtils.getLocationFor( application ).getName();
+        }
+        catch( IOException ex ) {
+            throw new ReviewedGATKException("Unable to determine running instructions", ex);
+        }
+
+        if( runningInstructions.endsWith(".jar") )
+            runningInstructions = String.format("-jar %s", runningInstructions);
+        else
+            runningInstructions = application.getName();
+
+        return runningInstructions;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/DocletUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/DocletUtils.java
new file mode 100644
index 0000000..ef15424
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/DocletUtils.java
@@ -0,0 +1,80 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.help;
+
+import com.sun.javadoc.FieldDoc;
+import com.sun.javadoc.PackageDoc;
+import com.sun.javadoc.ProgramElementDoc;
+import org.broadinstitute.gatk.utils.classloader.JVMUtils;
+
+import java.lang.reflect.Field;
+
+/**
+ * Methods in the class must ONLY be used by doclets, since the com.sun.javadoc.* classes are not
+ * available on all systems, and we don't want the GATK proper to depend on them.
+ */
+public class DocletUtils {
+
+    protected static boolean assignableToClass(ProgramElementDoc classDoc, Class lhsClass, boolean requireConcrete) {
+        try {
+            Class type = getClassForDoc(classDoc);
+            return lhsClass.isAssignableFrom(type) && (!requireConcrete || JVMUtils.isConcrete(type));
+        } catch (Throwable t) {
+            // Ignore errors.
+            return false;
+        }
+    }
+
+    protected static Class getClassForDoc(ProgramElementDoc doc) throws ClassNotFoundException {
+        return Class.forName(getClassName(doc, true));
+    }
+
+    protected static Field getFieldForFieldDoc(FieldDoc fieldDoc) {
+        try {
+            Class clazz = getClassForDoc(fieldDoc.containingClass());
+            return JVMUtils.findField(clazz, fieldDoc.name());
+        } catch (ClassNotFoundException e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    /**
+     * Reconstitute the class name from the given class JavaDoc object.
+     *
+     * @param doc the Javadoc model for the given class.
+     * @return The (string) class name of the given class.
+     */
+    protected static String getClassName(ProgramElementDoc doc, boolean binaryName) {
+        PackageDoc containingPackage = doc.containingPackage();
+        String className = doc.name();
+        if (binaryName) {
+            className = className.replaceAll("\\.", "\\$");
+        }
+        return containingPackage.name().length() > 0 ?
+                String.format("%s.%s", containingPackage.name(), className) :
+                String.format("%s", className);
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/DocumentedGATKFeature.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/DocumentedGATKFeature.java
new file mode 100644
index 0000000..f00fceb
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/DocumentedGATKFeature.java
@@ -0,0 +1,50 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.help;
+
+import java.lang.annotation.*;
+
+/**
+ * An annotation to identify a class as a GATK capability for documentation
+ *
+ * @author depristo
+ */
+ at Documented
+ at Inherited
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target(ElementType.TYPE)
+public @interface DocumentedGATKFeature {
+    /** Should we actually document this feature, even though it's annotated? */
+    public boolean enable() default true;
+    /** The overall group name (walkers, readfilters) this feature is associated with */
+    public String groupName();
+    /** A human readable summary of the purpose of this group of features */
+    public String summary() default "";
+    /** Are there links to other docs that we should include?  CommandLineGATK.class for walkers, for example? */
+    public Class[] extraDocs() default {};
+    /** Who is the go-to developer for operation/documentation issues? */
+    public String gotoDev() default "NA";
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/DocumentedGATKFeatureHandler.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/DocumentedGATKFeatureHandler.java
new file mode 100644
index 0000000..ce4ea5f
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/DocumentedGATKFeatureHandler.java
@@ -0,0 +1,99 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.help;
+
+import com.sun.javadoc.ClassDoc;
+import com.sun.javadoc.RootDoc;
+
+import java.io.*;
+import java.util.Set;
+
+/**
+ * Extend this class to provide a documentation handler for GATKdocs
+ */
+public abstract class DocumentedGATKFeatureHandler {
+    private GATKDoclet doclet;
+
+    /**
+     * @return the javadoc RootDoc of this javadoc run
+     */
+    protected RootDoc getRootDoc() {
+        return this.doclet.rootDoc;
+    }
+
+    /** Set the master doclet driving this handler */
+    public void setDoclet(GATKDoclet doclet) {
+        this.doclet = doclet;
+    }
+
+    /**
+     * @return the GATKDoclet driving this documentation run
+     */
+    public GATKDoclet getDoclet() {
+        return doclet;
+    }
+
+    /**
+     * Should return false iff this handler wants GATKDoclet to skip documenting
+     * this ClassDoc.
+     * @param doc that is being considered for inclusion in the docs
+     * @return true if the doclet should document ClassDoc doc
+     */
+    public boolean includeInDocs(ClassDoc doc) { return true; }
+
+    /**
+     * Return the flat filename (no paths) that the handler would like the Doclet to
+     * write out the documentation for ClassDoc doc and its associated Class clazz
+     * @param doc
+     * @param clazz
+     * @return
+     */
+    public String getDestinationFilename(ClassDoc doc, Class clazz) {
+        return GATKDocUtils.phpFilenameForClass(clazz, GATKDoclet.outputFileExtension);
+    }
+
+    /**
+     * Return the name of the FreeMarker template we will use to process ClassDoc doc.
+     *
+     * Note this is a flat filename relative to settings/helpTemplates in the GATK source tree
+     * @param doc
+     * @return
+     * @throws IOException
+     */
+    public abstract String getTemplateName(ClassDoc doc) throws IOException;
+
+    /**
+     * Actually generate the documentation map associated with toProcess
+     *
+     * Can use all to provide references and rootDoc for additional information, if necessary.
+     * Implementing methods should end with a call to setHandlerContext on toProcess, as in:
+     *
+     * toProcess.setHandlerContent(summary, rootMap);
+     *
+     * @param toProcess
+     */
+    public abstract void processOne(GATKDocWorkUnit toProcess);
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/DocumentedGATKFeatureObject.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/DocumentedGATKFeatureObject.java
new file mode 100644
index 0000000..1d0186c
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/DocumentedGATKFeatureObject.java
@@ -0,0 +1,61 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.help;
+
+/**
+ * Documentation unit.  Effectively a class version of the DocumentedGATKFeature.
+ * Immutable data structure.
+ *
+ * @author depristo
+ */
+class DocumentedGATKFeatureObject {
+    /** Which class are we documenting.  Specific to each class being documented */
+    private final Class classToDoc;
+    /** Are we enabled? */
+    private final boolean enable;
+    private final String groupName, summary, gotoDev;
+    private final Class[] extraDocs;
+
+    public DocumentedGATKFeatureObject(Class classToDoc, final boolean enable, final String groupName, final String summary, final Class[] extraDocs, final String gotoDev) {
+        this.classToDoc = classToDoc;
+        this.enable = enable;
+        this.groupName = groupName;
+        this.summary = summary;
+        this.extraDocs = extraDocs;
+        this.gotoDev = gotoDev;
+    }
+
+    public DocumentedGATKFeatureObject(Class classToDoc, final String groupName, final String summary, final String gotoDev) {
+        this(classToDoc, true, groupName, summary, new Class[]{}, gotoDev);
+    }
+
+    public Class getClassToDoc() { return classToDoc; }
+    public boolean enable() { return enable; }
+    public String groupName() { return groupName; }
+    public String summary() { return summary; }
+    public Class[] extraDocs() { return extraDocs; }
+    public String gotoDev() { return gotoDev; }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/ForumAPIUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/ForumAPIUtils.java
new file mode 100644
index 0000000..4a7853b
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/ForumAPIUtils.java
@@ -0,0 +1,173 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.help;
+
+import com.google.gson.Gson;
+import org.apache.commons.io.IOUtils;
+import org.apache.http.HttpResponse;
+import org.apache.http.client.ClientProtocolException;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.entity.StringEntity;
+import org.apache.http.impl.client.DefaultHttpClient;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.net.MalformedURLException;
+import java.util.ArrayList;
+import java.util.List;
+
+public class ForumAPIUtils {
+    /**
+     * How we post to the forum
+     */
+    final private static String ACCESS_TOKEN = "access_token=";
+
+    public static List<String> getPostedTools(String forumKey) {
+        Gson gson = new Gson();
+        List<String> output = new ArrayList<String>();
+
+        String text = httpGet(HelpConstants.GATK_FORUM_API_URL + "categories.json?CategoryIdentifier=tool-bulletin&page=1-100000&" + ACCESS_TOKEN + forumKey);
+
+        APIQuery details = gson.fromJson(text, APIQuery.class);
+        ForumDiscussion[] discussions = details.Discussions;
+
+        for (ForumDiscussion post : discussions) {
+            output.add(post.Name);
+        }
+
+        /*
+        System.out.println(details.isJsonArray());
+        System.out.println(details.isJsonNull());
+        System.out.println(details.isJsonObject());
+        System.out.println(details.isJsonPrimitive());
+
+        JsonArray posted = details.getAsJsonPrimitive().get("Discussions").getAsJsonArray();
+
+        for ( JsonElement post : posted ) {
+            output.add( post.getAsJsonObject().get("Name").getAsString());
+        }
+        */
+        return output;
+    }
+
+
+    private static String httpGet(String urlStr) {
+        String output = "";
+
+        try {
+
+            DefaultHttpClient httpClient = new DefaultHttpClient();
+            HttpGet getRequest = new HttpGet(urlStr);
+            getRequest.addHeader("accept", "application/json");
+
+            HttpResponse response = httpClient.execute(getRequest);
+
+            if (response.getStatusLine().getStatusCode() != 200) {
+                throw new RuntimeException("Failed : HTTP error code : "
+                        + response.getStatusLine().getStatusCode());
+            }
+
+            output = IOUtils.toString(response.getEntity().getContent());
+
+            httpClient.getConnectionManager().shutdown();
+
+        } catch (ClientProtocolException e) {
+
+            e.printStackTrace();
+
+        } catch (IOException e) {
+
+            e.printStackTrace();
+        }
+        return output;
+    }
+
+    private static String httpPost(String data, String URL) {
+        try {
+
+            DefaultHttpClient httpClient = new DefaultHttpClient();
+            HttpPost postRequest = new HttpPost(URL);
+
+            StringEntity input = new StringEntity(data);
+            input.setContentType("application/json");
+            postRequest.setEntity(input);
+
+            HttpResponse response = httpClient.execute(postRequest);
+
+            if (response.getStatusLine().getStatusCode() != 200) {
+                throw new RuntimeException("Failed : HTTP error code : "
+                        + response.getStatusLine().getStatusCode());
+            }
+
+            BufferedReader br = new BufferedReader(
+                    new InputStreamReader((response.getEntity().getContent())));
+
+            String output = "";
+            String line;
+            System.out.println("Output from Server .... \n");
+            while ((line = br.readLine()) != null) {
+                output += (line + '\n');
+                System.out.println(line);
+            }
+
+            br.close();
+            httpClient.getConnectionManager().shutdown();
+            return output;
+
+        } catch (MalformedURLException e) {
+
+            e.printStackTrace();
+
+        } catch (IOException e) {
+
+            e.printStackTrace();
+
+        }
+        return null;
+    }
+
+    public static void postToForum(GATKDocWorkUnit tool, final String forumKey) {
+
+
+        ForumDiscussion post = new ForumDiscussion(tool);
+
+        Gson gson = new Gson();
+
+        String data = gson.toJson(post.getPostData());
+        httpPost(data, HelpConstants.GATK_FORUM_API_URL + "post/discussion.json?" + ACCESS_TOKEN + forumKey);
+
+
+    }
+
+    class APIQuery {
+        ForumDiscussion[] Discussions;
+
+        public APIQuery() {}
+    }
+
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/ForumDiscussion.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/ForumDiscussion.java
new file mode 100644
index 0000000..a1818dc
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/ForumDiscussion.java
@@ -0,0 +1,84 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.help;
+
+import java.util.HashMap;
+import java.util.Map;
+
+class ForumDiscussion {
+
+    final private static String POST_TEMPLATE = "<p>A new tool has been released!</p><p>Check out the documentation at <a href='%s'>%s</a>.</p>";
+
+    final int Announce;
+    final String Body;
+    final String Category;
+    final int Closed;
+    final String Format;
+    final String Name;
+    final int Sink;
+    final String Tags;
+    final String Type;
+
+    public ForumDiscussion(String name, String body, String format, String category,
+                           String tagsCSV, String type, int closed, int announce, int sink) {
+        this.Name = name;
+        this.Body = body;
+        this.Format = format;
+        this.Category = category;
+        this.Tags = tagsCSV;
+        this.Type = type;
+        this.Closed = closed;
+        this.Announce = announce;
+        this.Sink = sink;
+    }
+
+    public ForumDiscussion(GATKDocWorkUnit tool) {
+        this(tool.name,
+                String.format(POST_TEMPLATE, GATKDocUtils.URL_ROOT_FOR_RELEASE_GATKDOCS + tool.filename, tool.name),
+                "Html", "tool-bulletin", tool.name + "," + tool.group + ",gatkdocs", "Discussion", 0, -1, -1);
+    }
+
+    public Map<String, String> getPostData() {
+        Map<String, String> output = new HashMap<String, String>();
+
+        output.put("Name", Name);
+        output.put("Body", Body);
+        output.put("Format", Format);
+        output.put("Category", Category);
+        if (Tags != null)
+            output.put("Tags", Tags);
+        if (Type != null)
+            output.put("Type", Type);
+        if (Closed != -1)
+            output.put("Closed", Closed == 1 ? "1" : "0");
+        if (Announce != -1)
+            output.put("Announce", Announce == 1 ? "1" : "0");
+        if (Sink != -1)
+            output.put("Sink", Sink == 1 ? "1" : "0");
+
+        return output;
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/GATKDocUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/GATKDocUtils.java
new file mode 100644
index 0000000..75701d6
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/GATKDocUtils.java
@@ -0,0 +1,75 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.help;
+
+public class GATKDocUtils {
+    /**
+     * The URL root for RELEASED GATKDOC units
+     */
+    public final static String URL_ROOT_FOR_RELEASE_GATKDOCS = HelpConstants.GATK_DOCS_URL;
+    /**
+     * The URL root for STABLE GATKDOC units             //TODO: do sthing with this or remove -- URL goes nowhere
+     */
+    public final static String URL_ROOT_FOR_STABLE_GATKDOCS = "http://iwww.broadinstitute.org/gsa/gatkdocs/stable/";
+    /**
+     * The URL root for UNSTABLE GATKDOC units           //TODO: do sthing with this or remove -- URL goes nowhere
+     */
+    public final static String URL_ROOT_FOR_UNSTABLE_GATKDOCS = "http://iwww.broadinstitute.org/gsa/gatkdocs/unstable/";
+
+    /**
+     * Return the filename of the GATKDoc PHP that would be generated for Class.  This
+     * does not guarantee that the docs exist, or that docs would actually be generated
+     * for class (might not be annotated for documentation, for example).  But if
+     * this class is documented, GATKDocs will write the docs to a file named as returned
+     * by this function.
+     *
+     * @param c
+     * @return
+     */
+    public static String phpFilenameForClass(Class c) {
+        return phpFilenameForClass(c, "php");
+    }
+
+    public static String phpFilenameForClass(Class c, String extension) {
+        return c.getName().replace(".", "_") + "." + extension;
+    }
+
+    /**
+     * Returns a full URL http://etc/ linking to the documentation for class (assuming it
+     * exists).  Currently points to the RELEASE doc path only.     //TODO: do sthing with other paths or remove ?
+     *
+     * @param c
+     * @return
+     */
+    public static String helpLinksToGATKDocs(Class c) {
+        String classPath = phpFilenameForClass(c);
+        StringBuilder b = new StringBuilder();
+        b.append(URL_ROOT_FOR_RELEASE_GATKDOCS).append(classPath);
+        //b.append("stable   version: ").append(URL_ROOT_FOR_STABLE_GATKDOCS).append(classPath).append("\n");
+        //b.append("unstable version: ").append(URL_ROOT_FOR_UNSTABLE_GATKDOCS).append(classPath).append("\n");
+        return b.toString();
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/GATKDocWorkUnit.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/GATKDocWorkUnit.java
new file mode 100644
index 0000000..89cdabb
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/GATKDocWorkUnit.java
@@ -0,0 +1,127 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.help;
+
+import com.sun.javadoc.ClassDoc;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Simple collection of all relevant information about something the GATKDoclet can document
+ * <p/>
+ * Created by IntelliJ IDEA.
+ * User: depristo
+ * Date: 7/24/11
+ * Time: 7:59 PM
+ */
+class GATKDocWorkUnit implements Comparable<GATKDocWorkUnit> {
+    /**
+     * The class that's being documented
+     */
+    final Class clazz;
+    /**
+     * The name of the thing we are documenting
+     */
+    final String name;
+    /**
+     * the filename where we will be writing the docs for this class
+     */
+    final String filename;
+    /**
+     * The name of the documentation group (e.g., walkers, read filters) class belongs to
+     */
+    final String group;
+    /**
+     * The documentation handler for this class
+     */
+    final DocumentedGATKFeatureHandler handler;
+    /**
+     * The javadoc documentation for clazz
+     */
+    final ClassDoc classDoc;
+    /**
+     * The annotation that lead to this Class being in GATKDoc
+     */
+    final DocumentedGATKFeatureObject annotation;
+    /**
+     * When was this walker built, and what's the absolute version number
+     */
+    final String buildTimestamp, absoluteVersion;
+
+    // set by the handler
+    String summary;
+    Map<String, Object> forTemplate; // this is where the actual doc content gets stored
+
+    public GATKDocWorkUnit(String name, String filename, String group, DocumentedGATKFeatureObject annotation,
+                           DocumentedGATKFeatureHandler handler, ClassDoc classDoc, Class clazz,
+                           String buildTimestamp, String absoluteVersion) {
+        this.annotation = annotation;
+        this.name = name;
+        this.filename = filename;
+        this.group = group;
+        this.handler = handler;
+        this.classDoc = classDoc;
+        this.clazz = clazz;
+        this.buildTimestamp = buildTimestamp;
+        this.absoluteVersion = absoluteVersion;
+    }
+
+    /**
+     * Called by the GATKDoclet to set handler provided context for this work unit
+     *
+     * @param summary
+     * @param forTemplate
+     */
+    public void setHandlerContent(String summary, Map<String, Object> forTemplate) {
+        this.summary = summary;
+        this.forTemplate = forTemplate;
+    }
+
+    /**
+     * Return a String -> String map suitable for FreeMarker to create an index to this WorkUnit
+     *
+     * @return
+     */
+    public Map<String, String> indexDataMap() {
+        Map<String, String> data = new HashMap<String, String>();
+        data.put("name", name);
+        data.put("summary", summary);
+        data.put("filename", filename);
+        data.put("group", group);
+        return data;
+    }
+
+    /**
+     * Sort in order of the name of this WorkUnit
+     *
+     * @param other
+     * @return
+     */
+    public int compareTo(GATKDocWorkUnit other) {
+        return this.name.compareTo(other.name);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/GATKDoclet.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/GATKDoclet.java
new file mode 100644
index 0000000..015f2d3
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/GATKDoclet.java
@@ -0,0 +1,580 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.help;
+
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.sun.javadoc.ClassDoc;
+import com.sun.javadoc.RootDoc;
+import freemarker.template.Configuration;
+import freemarker.template.DefaultObjectWrapper;
+import freemarker.template.Template;
+import freemarker.template.TemplateException;
+import org.apache.commons.io.FileUtils;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+import htsjdk.tribble.FeatureCodec;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.text.XReadLines;
+
+import java.io.*;
+import java.util.*;
+
+/**
+ * Javadoc Doclet that combines javadoc, GATK ParsingEngine annotations, and FreeMarker
+ * templates to produce PHP formatted GATKDocs for classes.
+ * <p/>
+ * This document has the following workflow:
+ * <p/>
+ * 1 -- walk the javadoc hierarchy, looking for class that have the
+ * DocumentedGATKFeature annotation or are in the type hierarchy in the
+ * static list of things to document, and are to be documented
+ * 2 -- construct for each a GATKDocWorkUnit, resulting in the complete
+ * set of things to document
+ * 3 -- for each unit, actually generate a PHP page documenting it
+ * as well as links to related features via their units.  Writing
+ * of a specific class PHP is accomplished by a generate DocumentationHandler
+ * 4 -- write out an index of all units, organized by group
+ * 5 -- emit JSON version of GATKDocs using Google GSON (currently incomplete but workable)
+ * <p/>
+ * The documented classes are restricted to only those with @DocumentedGATKFeature
+ * annotation or are in the STATIC_DOCS class.
+ */
+public abstract class GATKDoclet {
+    final protected static Logger logger = Logger.getLogger(GATKDoclet.class);
+
+    /**
+     * Where we find the help FreeMarker templates
+     */
+    final protected static File SETTINGS_DIR = new File("settings/helpTemplates");
+
+    /**
+     * Where we write the GATKDoc PHP directory
+     */
+    final protected static File DESTINATION_DIR = new File("gatkdocs");
+
+    final private static String FORUM_KEY_PATH = "/local/gsa-engineering/gatkdocs_publisher/forum.key";
+
+    final private static String OUTPUT_FILE_EXTENSION = "php";
+
+    /** Controls the extension of the non-json output files, and also the HREFs to these files.  Default: php */
+    final private static String OUTPUT_FILE_EXTENSION_OPTION = "-output-file-extension";
+    // ----------------------------------------------------------------------
+    //
+    // Global variables that are set on the command line by javadoc
+    //
+    // ----------------------------------------------------------------------
+    protected static File settingsDir = SETTINGS_DIR;
+    protected static File destinationDir = DESTINATION_DIR;
+    protected static String forumKeyPath = FORUM_KEY_PATH;
+    protected static String buildTimestamp = null, absoluteVersion = null;
+    protected static boolean showHiddenFeatures = false;
+    protected static String outputFileExtension = OUTPUT_FILE_EXTENSION;
+
+    protected static boolean testOnly = false;
+
+    /**
+     * The javadoc root doc
+     */
+    RootDoc rootDoc;
+
+    /**
+     * The set of all things we are going to document
+     */
+    Set<GATKDocWorkUnit> myWorkUnits;
+
+    /**
+     * A static list of DocumentedGATKFeatureObjects.  Any class that is as or extends
+     * one of the DocumentedGATKFeatureObjects.clazz of this collection will also
+     * be documented, even if it doesn't have the @DocumentedGATKFeature annotation.  Useful
+     * when you want to document things that implement an interface (annotations on java
+     * interfaces aren't inherited) or whose base class isn't under your control (tribble
+     * codecs).
+     */
+    final static Collection<DocumentedGATKFeatureObject> STATIC_DOCS = new ArrayList<DocumentedGATKFeatureObject>();
+
+    static {
+        STATIC_DOCS.add(new DocumentedGATKFeatureObject(FeatureCodec.class,
+                HelpConstants.DOCS_CAT_RODCODECS,
+                "Tribble codecs for reading reference ordered data (ROD) files such as VCF or BED",
+                "NA"));
+    }
+
+    /**
+     * Extracts the contents of certain types of javadoc and adds them to an XML file.
+     *
+     * @param rootDoc The documentation root.
+     * @return Whether the JavaDoc run succeeded.
+     * @throws java.io.IOException if output can't be written.
+     */
+    protected boolean startProcessDocs(RootDoc rootDoc) throws IOException {
+        logger.setLevel(Level.INFO);
+
+        // load arguments
+        for (String[] options : rootDoc.options()) {
+            if (options[0].equals("-settings-dir"))
+                settingsDir = new File(options[1]);
+            if (options[0].equals("-destination-dir"))
+                destinationDir = new File(options[1]);
+            if (options[0].equals("-forum-key-path"))
+                forumKeyPath = options[1];
+            if (options[0].equals("-build-timestamp"))
+                buildTimestamp = options[1];
+            if (options[0].equals("-absolute-version"))
+                absoluteVersion = options[1];
+            if (options[0].equals("-include-hidden"))
+                showHiddenFeatures = true;
+            if (options[0].equals("-test"))
+                testOnly = true;
+            if (options[0].equals(OUTPUT_FILE_EXTENSION_OPTION)) {
+                outputFileExtension = options[1];
+            }
+        }
+
+        if (!settingsDir.exists())
+            throw new RuntimeException("-settings-dir " + settingsDir.getPath() + " does not exist");
+        else if (!settingsDir.isDirectory())
+            throw new RuntimeException("-settings-dir " + settingsDir.getPath() + " is not a directory");
+
+        // process the docs
+        processDocs(rootDoc);
+
+        return true;
+    }
+
+    /**
+     * Validate the given options against options supported by this doclet.
+     *
+     * @param option Option to validate.
+     * @return Number of potential parameters; 0 if not supported.
+     */
+    public static int optionLength(String option) {
+        if (option.equals("-settings-dir") ||
+                option.equals("-destination-dir") ||
+                option.equals("-forum-key-path") ||
+                option.equals("-build-timestamp") ||
+                option.equals("-absolute-version") ||
+                option.equals("-include-hidden") ||
+                option.equals(OUTPUT_FILE_EXTENSION_OPTION)) {
+            return 2;
+        } else if (option.equals("-test"))
+            return 1;
+        else
+            return 0;
+    }
+
+    /**
+     * Are we supposed to include @Hidden annotations in our documented output?
+     *
+     * @return
+     */
+    public boolean showHiddenFeatures() {
+        return showHiddenFeatures;
+    }
+
+    /**
+     * Any class that's in this list will be included in the documentation
+     * when the -test argument is provided.  Useful for debugging.
+     * Subclasses, such as WalkerDoclet, may add additional classes for debugging.
+     */
+    protected List<Class<?>> getTestOnlyKeepers() {
+        return Collections.<Class<?>>singletonList(UserException.class);
+    }
+
+    /**
+     * @param rootDoc
+     */
+    private void processDocs(RootDoc rootDoc) {
+        // setup the global access to the root
+        this.rootDoc = rootDoc;
+
+        try {
+            // print the Version number
+            FileUtils.writeByteArrayToFile(new File(destinationDir + "/current.version.txt"), getSimpleVersion(absoluteVersion).getBytes());
+
+            /* ------------------------------------------------------------------- */
+            /* You should do this ONLY ONCE in the whole application life-cycle:   */
+
+            Configuration cfg = new Configuration();
+            // Specify the data source where the template files come from.
+            cfg.setDirectoryForTemplateLoading(settingsDir);
+            // Specify how templates will see the data-model. This is an advanced topic...
+            cfg.setObjectWrapper(new DefaultObjectWrapper());
+
+            myWorkUnits = computeWorkUnits();
+
+            List<Map<String, String>> groups = new ArrayList<Map<String, String>>();
+            Set<String> seenDocumentationFeatures = new HashSet<String>();
+            List<Map<String, String>> data = new ArrayList<Map<String, String>>();
+            for (GATKDocWorkUnit workUnit : myWorkUnits) {
+                data.add(workUnit.indexDataMap());
+                if (!seenDocumentationFeatures.contains(workUnit.annotation.groupName())) {
+                    groups.add(toMap(workUnit.annotation));
+                    seenDocumentationFeatures.add(workUnit.annotation.groupName());
+                }
+            }
+
+            for (GATKDocWorkUnit workUnit : myWorkUnits) {
+                processDocWorkUnit(cfg, workUnit, groups, data);
+            }
+
+            processIndex(cfg, new ArrayList<GATKDocWorkUnit>(myWorkUnits));
+
+            File forumKeyFile = new File(forumKeyPath);
+            if (forumKeyFile.exists()) {
+                String forumKey = null;
+                // Read in a one-line file so we can do a for loop
+                for (String line : new XReadLines(forumKeyFile))
+                    forumKey = line;
+                updateForum(myWorkUnits, forumKey);
+            }
+        } catch (FileNotFoundException e) {
+            throw new RuntimeException(e);
+        } catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    private void updateForum(Set<GATKDocWorkUnit> docWorkUnits, String forumKey) {
+        //first get list of posts that need to be added
+        List<String> old = ForumAPIUtils.getPostedTools(forumKey);
+
+        for (String s : old)
+            System.out.println(s);
+
+        System.out.printf("Forum has %d items%n", old.size());
+        System.out.printf("Docs have %d items%n", docWorkUnits.size());
+
+        List<GATKDocWorkUnit> toAdd = new ArrayList<GATKDocWorkUnit>();
+        for (GATKDocWorkUnit tool : docWorkUnits) {
+            if (!old.contains(tool.name)) {
+                System.out.println("WILL POST: " + tool.name + " TO FORUM");
+                toAdd.add(tool);
+            }
+        }
+
+        //update using list
+        for (GATKDocWorkUnit tool : toAdd) {
+            //if ( tool.name.equals("ApplyRecalibration") )
+            ForumAPIUtils.postToForum(tool, forumKey);
+        }
+    }
+
+    /**
+     * Returns the set of all GATKDocWorkUnits that we are going to generate docs for.
+     *
+     * @return
+     */
+    private Set<GATKDocWorkUnit> computeWorkUnits() {
+        TreeSet<GATKDocWorkUnit> m = new TreeSet<GATKDocWorkUnit>();
+
+        for (ClassDoc doc : rootDoc.classes()) {
+            //logger.debug("Considering " + doc);
+            Class clazz = getClassForClassDoc(doc);
+
+            // don't add anything that's not DocumentationTest if we are in test mode
+            if (clazz != null && testOnly && !getTestOnlyKeepers().contains(clazz))
+                continue;
+
+            DocumentedGATKFeatureObject feature = getFeatureForClassDoc(doc);
+            DocumentedGATKFeatureHandler handler = createHandler(doc, feature);
+            if (handler != null && handler.includeInDocs(doc)) {
+                //logger.info("Generating documentation for class " + doc);
+                String filename = handler.getDestinationFilename(doc, clazz);
+                GATKDocWorkUnit unit = new GATKDocWorkUnit(doc.name(),
+                        filename, feature.groupName(), feature, handler, doc, clazz,
+                        buildTimestamp, absoluteVersion);
+                m.add(unit);
+            }
+        }
+
+        return m;
+    }
+
+    /**
+     * Create a handler capable of documenting the class doc according to feature.  Returns
+     * null if no appropriate handler is found or doc shouldn't be documented at all.
+     *
+     * @param doc
+     * @param feature
+     * @return
+     */
+    private DocumentedGATKFeatureHandler createHandler(ClassDoc doc, DocumentedGATKFeatureObject feature) {
+        if (feature != null) {
+            if (feature.enable()) {
+                DocumentedGATKFeatureHandler handler = createDocumentedGATKFeatureHandler();
+                handler.setDoclet(this);
+                return handler;
+            } else {
+                logger.info("Skipping disabled Documentation for " + doc);
+            }
+        }
+
+        return null;
+    }
+
+    protected abstract DocumentedGATKFeatureHandler createDocumentedGATKFeatureHandler();
+
+    /**
+     * Returns the instantiated DocumentedGATKFeatureObject that describes the GATKDoc
+     * structure we will apply to Doc.
+     *
+     * @param doc
+     * @return null if this proves inappropriate or doc shouldn't be documented
+     */
+    private DocumentedGATKFeatureObject getFeatureForClassDoc(ClassDoc doc) {
+        Class<? extends Object> docClass = getClassForClassDoc(doc);
+
+        if (docClass == null)
+            return null; // not annotated so it shouldn't be documented
+
+        if (docClass.isAnnotationPresent(DocumentedGATKFeature.class)) {
+            DocumentedGATKFeature f = docClass.getAnnotation(DocumentedGATKFeature.class);
+            return new DocumentedGATKFeatureObject(docClass, f.enable(), f.groupName(), f.summary(), f.extraDocs(), f.gotoDev());
+        } else {
+            for (DocumentedGATKFeatureObject staticDocs : STATIC_DOCS) {
+                if (staticDocs.getClassToDoc().isAssignableFrom(docClass)) {
+                    return new DocumentedGATKFeatureObject(docClass, staticDocs.enable(), staticDocs.groupName(), staticDocs.summary(), staticDocs.extraDocs(), staticDocs.gotoDev());
+                }
+            }
+            return null;
+        }
+    }
+
+    /**
+     * Return the Java class described by the ClassDoc doc
+     *
+     * @param doc
+     * @return
+     */
+    private Class<? extends Object> getClassForClassDoc(ClassDoc doc) {
+        try {
+            // todo -- what do I need the ? extends Object to pass the compiler?
+            return (Class<? extends Object>) DocletUtils.getClassForDoc(doc);
+        } catch (ClassNotFoundException e) {
+            //logger.warn("Couldn't find class for ClassDoc " + doc);
+            // we got a classdoc for a class we can't find.  Maybe in a library or something
+            return null;
+        } catch (NoClassDefFoundError e) {
+            return null;
+        } catch (UnsatisfiedLinkError e) {
+            return null; // naughty BWA bindings
+        }
+    }
+
+    /**
+     * Create the php index listing all of the GATKDocs features
+     *
+     * @param cfg
+     * @param indexData
+     * @throws IOException
+     */
+    private void processIndex(Configuration cfg, List<GATKDocWorkUnit> indexData) throws IOException {
+        /* Get or create a template */
+        Template temp = cfg.getTemplate("generic.index.template.html");
+
+        /* Merge data-model with template */
+        Writer out = new OutputStreamWriter(new FileOutputStream(new File(destinationDir + "/index." + outputFileExtension)));
+        try {
+            temp.process(groupIndexData(indexData), out);
+            out.flush();
+        } catch (TemplateException e) {
+            throw new ReviewedGATKException("Failed to create GATK documentation", e);
+        }
+    }
+
+    /**
+     * Helpful function to create the php index.  Given all of the already run GATKDocWorkUnits,
+     * create the high-level grouping data listing individual features by group.
+     *
+     * @param indexData
+     * @return
+     */
+    private Map<String, Object> groupIndexData(List<GATKDocWorkUnit> indexData) {
+        //
+        // root -> data -> { summary -> y, filename -> z }, etc
+        //      -> groups -> group1, group2, etc.
+        Map<String, Object> root = new HashMap<String, Object>();
+
+
+        Collections.sort(indexData);
+
+        List<Map<String, String>> groups = new ArrayList<Map<String, String>>();
+        Set<String> seenDocumentationFeatures = new HashSet<String>();
+        List<Map<String, String>> data = new ArrayList<Map<String, String>>();
+        for (GATKDocWorkUnit workUnit : indexData) {
+            data.add(workUnit.indexDataMap());
+            if (!seenDocumentationFeatures.contains(workUnit.annotation.groupName())) {
+                groups.add(toMap(workUnit.annotation));
+                seenDocumentationFeatures.add(workUnit.annotation.groupName());
+            }
+        }
+
+        //System.out.printf(groups.toString());
+
+        root.put("data", data);
+        root.put("groups", groups);
+        root.put("timestamp", buildTimestamp);
+        root.put("version", absoluteVersion);
+
+        return root;
+    }
+
+    /**
+     * Trivial helper routine that returns the map of name and summary given the annotation
+     * AND adds a super-category so that we can custom-order the categories in the index
+     *
+     * @param annotation
+     * @return
+     */
+    private static final Map<String, String> toMap(DocumentedGATKFeatureObject annotation) {
+        Map<String, String> root = new HashMap<String, String>();
+        root.put("id", annotation.groupName().replaceAll("\\W", ""));
+        root.put("name", annotation.groupName());
+        root.put("summary", annotation.summary());
+
+        /**
+         * Add-on super-category definitions. The assignments depend on parsing the names
+         * defined in HelpConstants.java so be careful of changing anything.
+         * Also, the super-category value strings need to be the same as used in the
+         * Freemarker template. This is all fairly clunky but the best I could do without
+         * making major changes to the DocumentedGATKFeatureObject. Doesn't help that
+         * Freemarker makes any scripting horribly awkward.
+         */
+        final String supercatValue;
+        if (annotation.groupName().endsWith(" Tools")) supercatValue = "tools";
+        else if (annotation.groupName().endsWith(" Utilities")) supercatValue = "utilities";
+        else if (annotation.groupName().startsWith("Engine ")) supercatValue = "engine";
+        else if (annotation.groupName().endsWith(" (DevZone)")) supercatValue = "dev";
+        else supercatValue = "other";
+
+        root.put("supercat", supercatValue);
+
+        return root;
+    }
+
+    /**
+     * Helper function that finding the GATKDocWorkUnit associated with class from among all of the work units
+     *
+     * @param c the class we are looking for
+     * @return the GATKDocWorkUnit whose .clazz.equals(c), or null if none could be found
+     */
+    public final GATKDocWorkUnit findWorkUnitForClass(Class c) {
+        for (final GATKDocWorkUnit unit : this.myWorkUnits)
+            if (unit.clazz.equals(c))
+                return unit;
+        return null;
+    }
+
+    /**
+     * Return the ClassDoc associated with clazz
+     *
+     * @param clazz
+     * @return
+     */
+    public ClassDoc getClassDocForClass(Class clazz) {
+        return rootDoc.classNamed(clazz.getName());
+    }
+
+    /**
+     * High-level function that processes a single DocWorkUnit unit using its handler
+     *
+     * @param cfg
+     * @param unit
+     * @param data
+     * @throws IOException
+     */
+    private void processDocWorkUnit(Configuration cfg, GATKDocWorkUnit unit, List<Map<String, String>> groups, List<Map<String, String>> data)
+            throws IOException {
+        //System.out.printf("Processing documentation for class %s%n", unit.classDoc);
+        unit.handler.processOne(unit);
+        unit.forTemplate.put("groups", groups);
+        unit.forTemplate.put("data", data);
+        // Get or create a template
+        Template temp = cfg.getTemplate(unit.handler.getTemplateName(unit.classDoc));
+
+        // Merge data-model with template
+        File outputPath = new File(destinationDir + "/" + unit.filename);
+        try {
+            Writer out = new OutputStreamWriter(new FileOutputStream(outputPath));
+            temp.process(unit.forTemplate, out);
+            out.flush();
+        } catch (TemplateException e) {
+            throw new ReviewedGATKException("Failed to create GATK documentation", e);
+        }
+
+        // Create GSON-friendly object from unit.forTemplate
+        GSONWorkUnit gsonworkunit = new GSONWorkUnit();
+        gsonworkunit.populate(  unit.forTemplate.get("summary").toString(),
+                                unit.forTemplate.get("parallel"),
+                                unit.forTemplate.get("activeregion"),
+                                unit.forTemplate.get("partitiontype").toString(),
+                                unit.forTemplate.get("walkertype").toString(),
+                                unit.forTemplate.get("gson-arguments"),
+                                unit.forTemplate.get("refwindow"),
+                                unit.forTemplate.get("description").toString(),
+                                unit.forTemplate.get("name").toString(),
+                                unit.forTemplate.get("annotinfo").toString(),
+                                unit.forTemplate.get("readfilters"),
+                                unit.forTemplate.get("downsampling"),
+                                unit.forTemplate.get("group").toString(),
+                                unit.forTemplate.get("annotfield").toString(),
+                                unit.forTemplate.get("annotdescript")
+        );
+
+        // Prepare to write JSON entry to file
+        File outputPathForJSON = new File(destinationDir + "/" + unit.filename + ".json");
+
+        try {
+            BufferedWriter outJSON = new BufferedWriter(new FileWriter(outputPathForJSON));
+            // Convert object to JSON
+            Gson gson = new GsonBuilder()
+                .serializeSpecialFloatingPointValues()
+                .setPrettyPrinting()
+                .create();
+            String json = gson.toJson(gsonworkunit); // was run on unit.forTemplate
+            outJSON.write(json);
+            outJSON.close();
+
+        } catch (Exception e) {
+            throw new ReviewedGATKException("Failed to create JSON entry", e);
+        }
+    }
+
+    private static String getSimpleVersion(String absoluteVersion) {
+        String[] parts = absoluteVersion.split("-");
+
+        // by skipping i=0, there is no trailing separator
+        for (int i = 1; i < 2; i++) {
+            parts[0] = parts[0].concat("-");
+            parts[0] = parts[0].concat(parts[i]);
+        }
+
+        return parts[0];
+    }
+
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/GSONArgument.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/GSONArgument.java
new file mode 100644
index 0000000..d081156
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/GSONArgument.java
@@ -0,0 +1,83 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.help;
+
+import java.util.List;
+import java.util.Map;
+
+/**
+ * GSON-friendly version of the argument bindings
+ */
+public class GSONArgument {
+
+    String summary;
+    String name;
+    String synonyms;
+    String type;
+    String required;
+    String fulltext;
+    String defaultValue;
+    String minValue;
+    String maxValue;
+    String minRecValue;
+    String maxRecValue;
+    String rodTypes;
+    String kind;
+    List<Map<String, Object>> options;
+
+    public void populate(   String summary,
+                            String name,
+                            String synonyms,
+                            String type,
+                            String required,
+                            String fulltext,
+                            String defaultValue,
+                            String minValue,
+                            String maxValue,
+                            String minRecValue,
+                            String maxRecValue,
+                            String rodTypes,
+                            String kind,
+                            List<Map<String, Object>> options
+    ) {
+        this.summary = summary;
+        this.name = name;
+        this.synonyms = synonyms;
+        this.type = type;
+        this.required = required;
+        this.fulltext = fulltext;
+        this.defaultValue = defaultValue;
+        this.minValue = minValue;
+        this.maxValue = maxValue;
+        this.minRecValue = minRecValue;
+        this.maxRecValue = maxRecValue;
+        this.rodTypes = rodTypes;
+        this.kind = kind;
+        this.options = options;
+    }
+
+
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/GSONWorkUnit.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/GSONWorkUnit.java
new file mode 100644
index 0000000..e9f2d4a
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/GSONWorkUnit.java
@@ -0,0 +1,86 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.help;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+/**
+ * GSON-friendly version of the GATKDocWorkUnit
+ */
+public class GSONWorkUnit {
+
+    String summary;
+    Object parallel;
+    Object activeregion;
+    String partitiontype;
+    String walkertype;
+    Object arguments;
+    Object refwindow;
+    String description;
+    String name;
+    String annotinfo;
+    Object readfilters;
+    Object downsampling;
+    String group;
+    String annotfield;
+    Object annotdescript;
+
+    public void populate(String summary,
+                         Object parallel,
+                         Object activeregion,
+                         String partitiontype,
+                         String walkertype,
+                         Object arguments,
+                         Object refwindow,
+                         String description,
+                         String name,
+                         String annotinfo,
+                         Object readfilters,
+                         Object downsampling,
+                         String group,
+                         String annotfield,
+                         Object annotdescript
+    ) {
+        this.summary = summary;
+        this.parallel = parallel;
+        this.activeregion = activeregion;
+        this.partitiontype = partitiontype;
+        this.walkertype = walkertype;
+        this.arguments = arguments;
+        this.refwindow = refwindow;
+        this.description = description;
+        this.name = name;
+        this.annotinfo = annotinfo;
+        this.readfilters = readfilters;
+        this.downsampling = downsampling;
+        this.group = group;
+        this.annotfield = annotfield;
+        this.annotdescript = annotdescript;
+    }
+
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/GenericDocumentationHandler.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/GenericDocumentationHandler.java
new file mode 100644
index 0000000..6dad680
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/GenericDocumentationHandler.java
@@ -0,0 +1,722 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.help;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import com.sun.javadoc.ClassDoc;
+import com.sun.javadoc.FieldDoc;
+import com.sun.javadoc.Tag;
+import org.apache.log4j.Logger;
+import htsjdk.tribble.Feature;
+import org.broadinstitute.gatk.utils.commandline.*;
+import org.broadinstitute.gatk.utils.refdata.tracks.FeatureManager;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.classloader.JVMUtils;
+import org.broadinstitute.gatk.utils.collections.Pair;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.GATKException;
+
+import java.io.IOException;
+import java.lang.reflect.*;
+import java.util.*;
+
+/**
+ *
+ */
+public abstract class GenericDocumentationHandler extends DocumentedGATKFeatureHandler {
+    private static Logger logger = Logger.getLogger(GenericDocumentationHandler.class);
+
+    /**
+     * The max. length of the longest of --fullName -shortName argument name
+     * before we prefer the shorter option.
+     */
+    private static final int MAX_DISPLAY_NAME = 30;
+
+    /**
+     * The Class we are documenting
+     */
+    private GATKDocWorkUnit toProcess;
+
+    @Override
+    public boolean includeInDocs(ClassDoc doc) {
+        try {
+            Class type = DocletUtils.getClassForDoc(doc);
+            boolean hidden = !getDoclet().showHiddenFeatures() && type.isAnnotationPresent(Hidden.class);
+            return !hidden && JVMUtils.isConcrete(type);
+        } catch (ClassNotFoundException e) {
+            return false;
+        }
+    }
+
+
+    @Override
+    public String getTemplateName(ClassDoc doc) throws IOException {
+        return "generic.template.html";
+    }
+
+    @Override
+    public void processOne(GATKDocWorkUnit toProcessArg) {
+        this.toProcess = toProcessArg;
+
+        //System.out.printf("%s class %s%n", toProcess.group, toProcess.classDoc);
+        Map<String, Object> root = new HashMap<String, Object>();
+
+        addHighLevelBindings(root);
+        addArgumentBindings(root);
+        addRelatedBindings(root);
+        root.put("group", toProcess.group);
+
+        // Adding in retrieval of peripheral info (rf annotations etc)
+        getClazzAnnotations(toProcess.clazz, root);
+
+        toProcess.setHandlerContent((String) root.get("summary"), root);
+    }
+
+    /**
+     * Add high-level summary information about toProcess to root, such as its
+     * name, summary, description, version, etc.
+     *
+     * @param root
+     */
+    protected void addHighLevelBindings(Map<String, Object> root) {
+        root.put("name", toProcess.classDoc.name());
+
+        // Extract overrides from the doc tags.
+        StringBuilder summaryBuilder = new StringBuilder();
+        for (Tag tag : toProcess.classDoc.firstSentenceTags())
+            summaryBuilder.append(tag.text());
+        root.put("summary", summaryBuilder.toString());
+        root.put("description", toProcess.classDoc.commentText().substring(summaryBuilder.toString().length()));
+        root.put("timestamp", toProcess.buildTimestamp);
+        root.put("version", toProcess.absoluteVersion);
+
+        for (Tag tag : toProcess.classDoc.tags()) {
+            root.put(tag.name(), tag.text());
+        }
+
+        root.put("gotoDev", toProcess.annotation.gotoDev());
+    }
+
+    /**
+     * Add bindings describing related GATK capabilites to toProcess
+     *
+     * @param root
+     */
+    protected void addRelatedBindings(Map<String, Object> root) {
+        List<Map<String, Object>> extraDocsData = new ArrayList<Map<String, Object>>();
+
+        // add in all of the explicitly related items
+        for (final Class extraDocClass : toProcess.annotation.extraDocs()) {
+            final GATKDocWorkUnit otherUnit = getDoclet().findWorkUnitForClass(extraDocClass);
+            if (otherUnit == null)
+                throw new ReviewedGATKException("Requested extraDocs for class without any documentation: " + extraDocClass);
+            extraDocsData.add(
+                    new HashMap<String, Object>() {{
+                        put("filename", otherUnit.filename);
+                        put("name", otherUnit.name);
+                    }});
+        }
+        root.put("extradocs", extraDocsData);
+    }
+
+    /**
+     * Add information about all of the arguments available to toProcess to root
+     *
+     * @param root
+     */
+    protected void addArgumentBindings(Map<String, Object> root) {
+        ParsingEngine parsingEngine = createParsingEngine();
+
+        Map<String, List<Map<String, Object>>> args = createArgumentMap();
+        root.put("arguments", args);
+        try {
+            // loop over all of the arguments according to the parsing engine
+            for (final ArgumentSource argumentSource : parsingEngine.extractArgumentSources(DocletUtils.getClassForDoc(toProcess.classDoc))) {
+                ArgumentDefinition argDef = argumentSource.createArgumentDefinitions().get(0);
+                FieldDoc fieldDoc = getFieldDoc(toProcess.classDoc, argumentSource.field.getName());
+                Map<String, Object> argBindings = docForArgument(fieldDoc, argumentSource, argDef);
+                if (!argumentSource.isHidden() || getDoclet().showHiddenFeatures()) {
+                    final String kind = docKindOfArg(argumentSource);
+                    argBindings.put("kind", kind);
+                    // Retrieve default value
+                    final Object value = argumentValue(toProcess.clazz, argumentSource);
+                    if (value != null) {
+                        argBindings.put("defaultValue", prettyPrintValueString(value));
+                    } else {
+                        argBindings.put("defaultValue", "NA");
+                    }
+                    // Retrieve min and max / hard and soft value thresholds for numeric args
+                    if (value instanceof Number) {
+                        if (argumentSource.field.isAnnotationPresent(Argument.class))   {
+                            argBindings.put("minValue", argumentSource.field.getAnnotation(Argument.class).minValue());
+                            argBindings.put("maxValue", argumentSource.field.getAnnotation(Argument.class).maxValue());
+                            if (argumentSource.field.getAnnotation(Argument.class).minRecommendedValue() != Double.NEGATIVE_INFINITY) {
+                                argBindings.put("minRecValue", argumentSource.field.getAnnotation(Argument.class).minRecommendedValue());
+                            } else {
+                                argBindings.put("minRecValue", "NA");
+                            }
+                            if (argumentSource.field.getAnnotation(Argument.class).maxRecommendedValue() != Double.POSITIVE_INFINITY) {
+                                argBindings.put("maxRecValue", argumentSource.field.getAnnotation(Argument.class).maxRecommendedValue());
+                            } else {
+                                argBindings.put("maxRecValue", "NA");
+                            }
+                        }
+                    } else {
+                        argBindings.put("minValue", "NA");
+                        argBindings.put("maxValue", "NA");
+                        argBindings.put("minRecValue", "NA");
+                        argBindings.put("maxRecValue", "NA");
+                        argBindings.put("defaultValue", "NA");
+                    }
+                    // Finalize argument bindings
+                    args.get(kind).add(argBindings);
+                    args.get("all").add(argBindings);
+                }
+            }
+
+            // sort the arguments
+            for (Map.Entry<String, List<Map<String, Object>>> entry : args.entrySet()) {
+                entry.setValue(sortArguments(entry.getValue()));
+            }
+            // make a GSON-friendly map of arguments -- uses some hacky casting
+            List<GSONArgument> allGSONArgs = new ArrayList<GSONArgument>();
+            for ( Map<String, Object> item : args.get("all")) {
+                GSONArgument itemGSONArg = new GSONArgument();
+
+                itemGSONArg.populate(item.get("summary").toString(),
+                        item.get("name").toString(),
+                        item.get("synonyms").toString(),
+                        item.get("type").toString(),
+                        item.get("required").toString(),
+                        item.get("fulltext").toString(),
+                        item.get("defaultValue").toString(),
+                        item.get("minValue").toString(),
+                        item.get("maxValue").toString(),
+                        item.get("minRecValue").toString(),
+                        item.get("maxRecValue").toString(),
+                        item.get("rodTypes").toString(),
+                        item.get("kind").toString(),
+                        (List<Map<String, Object>>)item.get("options")
+                );
+                allGSONArgs.add(itemGSONArg);
+            }
+            root.put("gson-arguments", allGSONArgs);
+
+        } catch (ClassNotFoundException e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    /**
+     * Return the argument kind (required, advanced, hidden, etc) of this argumentSource
+     *
+     * @param argumentSource
+     * @return
+     */
+    @Requires("argumentSource != null")
+    @Ensures("result != null")
+    private String docKindOfArg(ArgumentSource argumentSource) {
+        if (argumentSource.isRequired()) {
+            if (argumentSource.isInput()) return "required_in";
+            else if (argumentSource.isOutput()) return "required_out";
+            else if (argumentSource.isFlag()) return "required_flag";
+            else return "required_param";
+            }
+        else if (argumentSource.isAdvanced()) {
+            if (argumentSource.isInput()) return "advanced_in";
+            else if (argumentSource.isOutput()) return "advanced_out";
+            else if (argumentSource.isFlag()) return "advanced_flag";
+            else return "advanced_param";
+        }
+        else if (argumentSource.isHidden()) return "hidden";
+        else if (argumentSource.isDeprecated()) return "deprecated";
+        else {
+            if (argumentSource.isInput()) return "optional_in";
+            else if (argumentSource.isOutput()) return "optional_out";
+            else if (argumentSource.isFlag()) return "optional_flag";
+            else return "optional_param";
+        }
+    }
+
+    /**
+     * Attempts to determine the value of argumentSource in an instantiated version of c
+     *
+     * @param c
+     * @param argumentSource
+     * @return value of argumentSource, or null if this isn't possible
+     */
+    @Requires({"c != null", "argumentSource != null"})
+    private Object argumentValue(Class c, ArgumentSource argumentSource) {
+        // get the value of the field
+        // attempt to instantiate the class
+        final Object instance = makeInstanceIfPossible(toProcess.clazz);
+        if (instance != null) {
+            final Object value = getFieldValue(instance, argumentSource.field.getName());
+            if (value != null)
+                return value;
+
+            if (argumentSource.createsTypeDefault()) {
+                try { // handle the case where there's an implicit default
+                    return argumentSource.typeDefaultDocString();
+                } catch (ReviewedGATKException e) {
+                    ; // failed to create type default, don't worry about it
+                }
+            }
+        }
+
+        return null;
+    }
+
+    /**
+     * Create the argument map for holding class arguments
+     *
+     * @return
+     */
+    private Map<String, List<Map<String, Object>>> createArgumentMap() {
+        Map<String, List<Map<String, Object>>> args = new HashMap<String, List<Map<String, Object>>>();
+        args.put("all", new ArrayList<Map<String, Object>>());
+        args.put("required_in", new ArrayList<Map<String, Object>>());
+        args.put("required_out", new ArrayList<Map<String, Object>>());
+        args.put("required_param", new ArrayList<Map<String, Object>>());
+        args.put("required_flag", new ArrayList<Map<String, Object>>());
+        args.put("optional_in", new ArrayList<Map<String, Object>>());
+        args.put("optional_out", new ArrayList<Map<String, Object>>());
+        args.put("optional_param", new ArrayList<Map<String, Object>>());
+        args.put("optional_flag", new ArrayList<Map<String, Object>>());
+        args.put("advanced_in", new ArrayList<Map<String, Object>>());
+        args.put("advanced_out", new ArrayList<Map<String, Object>>());
+        args.put("advanced_param", new ArrayList<Map<String, Object>>());
+        args.put("advanced_flag", new ArrayList<Map<String, Object>>());
+        args.put("hidden", new ArrayList<Map<String, Object>>());
+        args.put("deprecated", new ArrayList<Map<String, Object>>());
+        return args;
+    }
+
+
+    /**
+     * Sorts the individual argument list in unsorted according to CompareArgumentsByName
+     *
+     * @param unsorted
+     * @return
+     */
+    private List<Map<String, Object>> sortArguments(List<Map<String, Object>> unsorted) {
+        Collections.sort(unsorted, new CompareArgumentsByName());
+        return unsorted;
+    }
+
+    /**
+     * Sort arguments by case-insensitive comparison ignoring the -- and - prefixes
+     */
+    private class CompareArgumentsByName implements Comparator<Map<String, Object>> {
+        public int compare(Map<String, Object> x, Map<String, Object> y) {
+            return elt(x).compareTo(elt(y));
+        }
+
+        private String elt(Map<String, Object> m) {
+            String v = m.get("name").toString().toLowerCase();
+            if (v.startsWith("--"))
+                return v.substring(2);
+            else if (v.startsWith("-"))
+                return v.substring(1);
+            else
+                throw new RuntimeException("Expect to see arguments beginning with at least one -, but found " + v);
+        }
+    }
+
+    /**
+     * Umbrella function that groups the collection of values for specific annotations applied to an
+     * instance of class c. Lists of collected values are added directly to the "toProcess" object.
+     * Requires being able to instantiate the class.
+     *
+     * @param classToProcess the object to instantiate and query for the annotation
+     * @param root the root of the document handler, to which we'll store collected annotations
+     */
+    protected abstract void getClazzAnnotations(Class classToProcess, Map<String, Object> root);
+
+    /**
+     * Utility function that finds the value of fieldName in any fields of ArgumentCollection fields in
+     * instance of class c.
+     *
+     * @param instance  the object to query for the field value
+     * @param fieldName the name of the field we are looking for in instance
+     * @return The value assigned to field in the ArgumentCollection, otherwise null
+     */
+    private Object getFieldValue(Object instance, String fieldName) {
+        //
+        // subtle note.  If you have a field named X that is an ArgumentCollection that
+        // contains a field X as well, you need only consider fields in the argumentCollection, not
+        // matching the argument itself.
+        //
+        // @ArgumentCollection
+        // protected DbsnpArgumentCollection dbsnp = new DbsnpArgumentCollection();
+        //
+
+        for (Field field : JVMUtils.getAllFields(instance.getClass())) {
+            if (field.isAnnotationPresent(ArgumentCollection.class)) {
+                //System.out.printf("Searching for %s in argument collection field %s%n", fieldName, field);
+                Object fieldValue = JVMUtils.getFieldValue(field, instance);
+                Object value = getFieldValue(fieldValue, fieldName);
+                if (value != null)
+                    return value;
+            } else if (field.getName().equals(fieldName)) {
+                return JVMUtils.getFieldValue(field, instance);
+            }
+        }
+
+        return null;
+    }
+
+    /**
+     * Pretty prints value
+     * <p/>
+     * Assumes value != null
+     *
+     * @param value
+     * @return
+     */
+    private Object prettyPrintValueString(Object value) {
+        if (value.getClass().isArray()) {
+            Class type = value.getClass().getComponentType();
+            if (boolean.class.isAssignableFrom(type))
+                return Arrays.toString((boolean[]) value);
+            if (byte.class.isAssignableFrom(type))
+                return Arrays.toString((byte[]) value);
+            if (char.class.isAssignableFrom(type))
+                return Arrays.toString((char[]) value);
+            if (double.class.isAssignableFrom(type))
+                return Arrays.toString((double[]) value);
+            if (float.class.isAssignableFrom(type))
+                return Arrays.toString((float[]) value);
+            if (int.class.isAssignableFrom(type))
+                return Arrays.toString((int[]) value);
+            if (long.class.isAssignableFrom(type))
+                return Arrays.toString((long[]) value);
+            if (short.class.isAssignableFrom(type))
+                return Arrays.toString((short[]) value);
+            if (Object.class.isAssignableFrom(type))
+                return Arrays.toString((Object[]) value);
+            else
+                throw new RuntimeException("Unexpected array type in prettyPrintValue.  Value was " + value + " type is " + type);
+        } else if (RodBinding.class.isAssignableFrom(value.getClass())) {
+            // annoying special case to handle the UnBound() constructor
+            return "none";
+        } else if (value instanceof String) {
+            return value.equals("") ? "\"\"" : value;
+        } else {
+            return value.toString();
+        }
+    }
+
+    /**
+     * Attempt to instantiate class c, if possible.  Returns null if this proves impossible.
+     *
+     * @param c
+     * @return
+     */
+    protected Object makeInstanceIfPossible(Class c) {
+        Object instance = null;
+        try {
+            // don't try to make something where we will obviously fail
+            if (!c.isEnum() && !c.isAnnotation() && !c.isAnonymousClass() &&
+                    !c.isArray() && !c.isPrimitive() & JVMUtils.isConcrete(c)) {
+                instance = c.newInstance();
+                //System.out.printf("Created object of class %s => %s%n", c, instance);
+                return instance;
+            } else
+                return null;
+        } catch (IllegalAccessException e) {
+        } catch (InstantiationException e) {
+        } catch (ExceptionInInitializerError e) {
+        } catch (SecurityException e) {
+        }
+        // this last one is super dangerous, but some of these methods catch ClassNotFoundExceptions
+        // and rethrow then as RuntimeExceptions
+        catch (RuntimeException e) {
+        }
+
+        return instance;
+    }
+
+
+    /**
+     * Create an instance of the GATK parsing engine, for argument processing with GATKDoclet
+     *
+     * @return
+     */
+    private ParsingEngine createParsingEngine() {
+        CommandLineProgram clp = createCommandLineProgram();
+        try {
+            CommandLineProgram.start(clp, new String[]{}, true);
+            return clp.parser;
+        } catch (Exception e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    protected abstract CommandLineProgram createCommandLineProgram();
+
+    /**
+     * Gets the javadocs associated with field name in classDoc.  Throws a
+     * runtime exception if this proves impossible.
+     *
+     * @param classDoc
+     * @param name
+     * @return
+     */
+    private FieldDoc getFieldDoc(ClassDoc classDoc, String name) {
+        return getFieldDoc(classDoc, name, true);
+    }
+
+    /**
+     * Recursive helper routine to getFieldDoc()
+     *
+     * @param classDoc
+     * @param name
+     * @param primary
+     * @return
+     */
+    private FieldDoc getFieldDoc(ClassDoc classDoc, String name, boolean primary) {
+        //System.out.printf("Looking for %s in %s%n", name, classDoc.name());
+        for (FieldDoc fieldDoc : classDoc.fields(false)) {
+            //System.out.printf("fieldDoc " + fieldDoc + " name " + fieldDoc.name());
+            if (fieldDoc.name().equals(name))
+                return fieldDoc;
+
+            Field field = DocletUtils.getFieldForFieldDoc(fieldDoc);
+            if (field == null)
+                throw new RuntimeException("Could not find the field corresponding to " + fieldDoc + ", presumably because the field is inaccessible");
+            if (field.isAnnotationPresent(ArgumentCollection.class)) {
+                ClassDoc typeDoc = getRootDoc().classNamed(fieldDoc.type().qualifiedTypeName());
+                if (typeDoc == null)
+                    throw new ReviewedGATKException("Tried to get javadocs for ArgumentCollection field " + fieldDoc + " but could't find the class in the RootDoc");
+                else {
+                    FieldDoc result = getFieldDoc(typeDoc, name, false);
+                    if (result != null)
+                        return result;
+                    // else keep searching
+                }
+            }
+        }
+
+        // if we didn't find it here, wander up to the superclass to find the field
+        if (classDoc.superclass() != null) {
+            return getFieldDoc(classDoc.superclass(), name, false);
+        }
+
+        if (primary)
+            throw new RuntimeException("No field found for expected field " + name);
+        else
+            return null;
+    }
+
+    /**
+     * Returns a Pair of (main, synonym) names for argument with fullName s1 and
+     * shortName s2.
+     *
+     * Previously we had it so the main name was selected to be the longest of the two, provided
+     * it didn't exceed MAX_DISPLAY_NAME, in which case the shorter was taken. But we now disable
+     * the length-based name rearrangement in order to maintain consistency in the GATKDocs table.
+     *
+     * This may cause messed up spacing in the CLI-help display but we don't care as much about that
+     * since more users use the online GATKDocs for looking up arguments.
+     *
+     * @param s1 the short argument name without -, or null if not provided
+     * @param s2 the long argument name without --, or null if not provided
+     * @return A pair of fully qualified names (with - or --) for the argument.  The first
+     *         element is the primary display name while the second (potentially null) is a
+     *         synonymous name.
+     */
+    Pair<String, String> displayNames(String s1, String s2) {
+        s1 = s1 == null ? null : "-" + s1;
+        s2 = s2 == null ? null : "--" + s2;
+
+        if (s1 == null) return new Pair<String, String>(s2, null);
+        if (s2 == null) return new Pair<String, String>(s1, null);
+
+        return new Pair<String, String>(s2, s1);
+    }
+
+    /**
+     * Returns a human readable string that describes the Type type of a GATK argument.
+     * <p/>
+     * This will include parameterized types, so that Set{T} shows up as Set(T) and not
+     * just Set in the docs.
+     *
+     * @param type
+     * @return
+     */
+    protected String argumentTypeString(Type type) {
+        if (type instanceof ParameterizedType) {
+            ParameterizedType parameterizedType = (ParameterizedType) type;
+            List<String> subs = new ArrayList<String>();
+            for (Type actualType : parameterizedType.getActualTypeArguments())
+                subs.add(argumentTypeString(actualType));
+            return argumentTypeString(((ParameterizedType) type).getRawType()) + "[" + Utils.join(",", subs) + "]";
+        } else if (type instanceof GenericArrayType) {
+            return argumentTypeString(((GenericArrayType) type).getGenericComponentType()) + "[]";
+        } else if (type instanceof WildcardType) {
+            throw new RuntimeException("We don't support wildcards in arguments: " + type);
+        } else if (type instanceof Class<?>) {
+            return ((Class) type).getSimpleName();
+        } else {
+            throw new GATKException("Unknown type: " + type);
+        }
+    }
+
+    /**
+     * Helper routine that returns the Feature.class required by a RodBinding,
+     * either T for RodBinding{T} or List{RodBinding{T}}.  Returns null if
+     * the Type doesn't fit either model.
+     *
+     * @param type
+     * @return
+     */
+    protected Class<? extends Feature> getFeatureTypeIfPossible(Type type) {
+        if (type instanceof ParameterizedType) {
+            ParameterizedType paramType = (ParameterizedType) type;
+            if (RodBinding.class.isAssignableFrom((Class<?>) paramType.getRawType())) {
+                return (Class<? extends Feature>) JVMUtils.getParameterizedTypeClass(type);
+            } else {
+                for (Type paramtype : paramType.getActualTypeArguments()) {
+                    Class<? extends Feature> x = getFeatureTypeIfPossible(paramtype);
+                    if (x != null)
+                        return x;
+                }
+            }
+        }
+
+        return null;
+    }
+
+    /**
+     * High-level entry point for creating a FreeMarker map describing the GATK argument
+     * source with definition def, with associated javadoc fieldDoc.
+     *
+     * @param fieldDoc
+     * @param source
+     * @param def
+     * @return a non-null Map binding argument keys with their values
+     */
+    protected Map<String, Object> docForArgument(FieldDoc fieldDoc, ArgumentSource source, ArgumentDefinition def) {
+        Map<String, Object> root = new HashMap<String, Object>();
+        Pair<String, String> names = displayNames(def.shortName, def.fullName);
+
+        root.put("name", names.getFirst());
+
+        if (names.getSecond() != null) {
+            root.put("synonyms", names.getSecond());
+        } else {
+            root.put("synonyms", "NA");
+        }
+
+        root.put("required", def.required ? "yes" : "no");
+
+        // type of the field
+        root.put("type", argumentTypeString(source.field.getGenericType()));
+
+        Class<? extends Feature> featureClass = getFeatureTypeIfPossible(source.field.getGenericType());
+        if (featureClass != null) {
+            // deal with the allowable types
+            FeatureManager manager = new FeatureManager();
+            List<String> rodTypes = new ArrayList<String>();
+            for (FeatureManager.FeatureDescriptor descriptor : manager.getByFeature(featureClass)) {
+                rodTypes.add(String.format("<a href=%s>%s</a>",
+                        GATKDocUtils.phpFilenameForClass(descriptor.getCodecClass()),
+                        descriptor.getName()));
+            }
+
+            root.put("rodTypes", Utils.join(", ", rodTypes));
+        } else {
+            root.put("rodTypes", "NA");
+        }
+
+        // summary and fulltext
+        root.put("summary", def.doc != null ? def.doc : "");
+        root.put("fulltext", fieldDoc.commentText());
+
+        // What are our enum options?
+        if (def.validOptions != null) {
+            root.put("options", docForEnumArgument(source.field.getType()));
+        } else {
+            root.put("options", new ArrayList());
+        }
+        // general attributes
+        List<String> attributes = new ArrayList<String>();
+        if (def.required) attributes.add("required");
+        if (source.isDeprecated()) attributes.add("deprecated");
+        if (attributes.size() > 0) {
+            root.put("attributes", Utils.join(", ", attributes));
+        } else {
+            root.put("attributes", "NA");
+        }
+        return root;
+    }
+
+    /**
+     * Helper routine that provides a FreeMarker map for an enumClass, grabbing the
+     * values of the enum and their associated javadoc documentation.
+     *
+     * @param enumClass
+     * @return
+     */
+    @Requires("enumClass.isEnum()")
+    private List<Map<String, Object>> docForEnumArgument(final Class enumClass) {
+        final ClassDoc doc = this.getDoclet().getClassDocForClass(enumClass);
+        if ( doc == null )
+            throw new RuntimeException("Tried to get docs for enum " + enumClass + " but got null instead");
+
+        final Set<String> enumConstantFieldNames = enumConstantsNames(enumClass);
+
+        final List<Map<String, Object>> bindings = new ArrayList<Map<String, Object>>();
+        for (final FieldDoc fieldDoc : doc.fields(false)) {
+            if (enumConstantFieldNames.contains(fieldDoc.name()) )
+                bindings.add(
+                        new HashMap<String, Object>() {{
+                            put("name", fieldDoc.name());
+                            put("summary", fieldDoc.commentText());
+                        }});
+        }
+
+        return bindings;
+    }
+
+    /**
+     * Returns the name of the fields that are enum constants according to reflection
+     *
+     * @return a non-null set of fields that are enum constants
+     */
+    private Set<String> enumConstantsNames(final Class enumClass) {
+        final Set<String> enumConstantFieldNames = new HashSet<String>();
+
+        for ( final Field field : enumClass.getFields() ) {
+            if ( field.isEnumConstant() )
+                enumConstantFieldNames.add(field.getName());
+        }
+
+        return enumConstantFieldNames;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/HelpConstants.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/HelpConstants.java
new file mode 100644
index 0000000..707cc8f
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/HelpConstants.java
@@ -0,0 +1,82 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.help;
+
+public class HelpConstants {
+
+    public final static String BASE_GATK_URL = "http://www.broadinstitute.org/gatk";
+    public final static String GATK_DOCS_URL = BASE_GATK_URL + "/guide/tooldocs/";
+    public final static String GATK_FORUM_URL = "http://gatkforums.broadinstitute.org/";
+    public final static String GATK_FORUM_API_URL = "https://gatkforums.broadinstitute.org/api/v1/";
+
+    /**
+     * Arguments for parallelism options
+     */
+    public final static String ARG_TREEREDUCIBLE = "-nt";
+    public final static String ARG_NANOSCHEDULABLE = "-nct";
+
+    /**
+     * Definition of the group names / categories of tools.
+     * The names get parsed to make supercategories in the doc index,
+     * so be careful when making big changes -- see GATKDoclet.java toMap()
+     */
+    public final static String DOCS_CAT_DATA = "Sequence Data Processing Tools";
+    public final static String DOCS_CAT_QC = "Diagnostics and Quality Control Tools";
+    public final static String DOCS_CAT_ENGINE = "Engine Parameters (available to all tools)";
+    public final static String DOCS_CAT_RF = "Read Filters";
+    public final static String DOCS_CAT_REFUTILS = "Reference Utilities";
+    public final static String DOCS_CAT_RODCODECS = "ROD Codecs";
+    public final static String DOCS_CAT_USRERR = "User Exceptions (DevZone)";
+    public final static String DOCS_CAT_VALIDATION = "Validation Utilities";
+    public final static String DOCS_CAT_ANNOT = "Variant Annotations";
+    public final static String DOCS_CAT_VARDISC = "Variant Discovery Tools";
+    public final static String DOCS_CAT_VARMANIP = "Variant Evaluation and Manipulation Tools";
+    public final static String DOCS_CAT_TOY = "Toy Walkers (DevZone)";
+    public final static String DOCS_CAT_HELPUTILS = "Help Utilities";
+
+    public static String forumPost(String post) {
+    	return GATK_FORUM_URL + post;
+    }
+
+    /**
+     * Go-to developer name codes for tracking and display purposes. Only current team members should be in this list.
+     * When someone leaves, their charges should be redistributed. The actual string should be closest to the dev's
+     * abbreviated name or two/three-letter nickname as possible. The code can be something else if necessary to
+     * disambiguate from other variable.
+     */
+    public final static String MC = "MC"; // Mauricio Carneiro
+    public final static String EB = "EB"; // Eric Banks
+    public final static String RP = "RP"; // Ryan Poplin
+    public final static String GVDA = "GG"; // Geraldine Van der Auwera
+    public final static String VRR = "VRR"; // Valentin Ruano-Rubio
+    public final static String ALM = "ALM"; // Ami Levy-Moonshine
+    public final static String BH = "BH"; // Bertrand Haas
+    public final static String JoT = "JT"; // Joel Thibault
+    public final static String DR = "DR"; // David Roazen
+    public final static String KS = "KS"; // Khalid Shakir
+
+
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/HelpFormatter.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/HelpFormatter.java
new file mode 100644
index 0000000..5a55f27
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/HelpFormatter.java
@@ -0,0 +1,336 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.help;
+
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.commandline.*;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.text.TextFormattingUtils;
+
+import java.net.InetAddress;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.*;
+/**
+ * Print out help for GATK command-line applications.
+ */
+
+public class HelpFormatter {
+    /** our log, which we want to capture anything from org.broadinstitute.gatk */
+    private static Logger logger = Logger.getLogger(HelpFormatter.class);
+
+    public static final int FIELD_SEPARATION_WIDTH = 3;
+
+    /**
+     * Prints the help, given a collection of argument definitions.
+     * @param applicationDetails Application details
+     * @param argumentDefinitions Argument definitions for which help should be printed.
+     */
+    public void printHelp( ApplicationDetails applicationDetails, ArgumentDefinitions argumentDefinitions ) {
+        List<ArgumentDefinitionGroup> argumentGroups = prepareArgumentGroups( argumentDefinitions );
+
+        List<String> header = applicationDetails.applicationHeader;
+        String barrier = createBarrier(header);
+
+        System.out.printf("%s%n",barrier);
+        for(String headerLine: header)
+            System.out.printf("%s%n",headerLine);
+        System.out.printf("%s%n",barrier);
+        for(String attributionLine: applicationDetails.attribution)
+            System.out.printf("%s%n",attributionLine);
+        System.out.printf("%s%n",barrier);
+
+        String synopsis = getSynopsis(applicationDetails.runningInstructions,argumentGroups);
+        String additionalDetails = applicationDetails.additionalHelp != null ? applicationDetails.additionalHelp : "";
+        String detailedDescription = getDetailed(argumentGroups);
+
+        System.out.printf("%s%n%s%n%s%n",synopsis,detailedDescription,additionalDetails );
+    }
+
+    /**
+     * Gets the synopsis: the actual command to run.
+     * @param runningInstructions Instructions on how to run hte application.
+     * @param argumentGroups Program arguments sorted in order of definition group displays.
+     * @return A synopsis line.
+     */
+    private String getSynopsis( String runningInstructions,
+                                List<ArgumentDefinitionGroup> argumentGroups ) {
+        // Build out the synopsis all as one long line.        
+        StringBuilder lineBuilder = new StringBuilder();
+        Formatter lineFormatter = new Formatter( lineBuilder );
+
+        lineFormatter.format("java %s", runningInstructions);
+
+        for( ArgumentDefinitionGroup argumentGroup: argumentGroups ) {
+            for( ArgumentDefinition argumentDefinition: argumentGroup.argumentDefinitions ) {
+                if(argumentDefinition.isHidden)
+                    continue;
+                lineFormatter.format(" ");
+                if( !argumentDefinition.required ) lineFormatter.format("[");
+                if( argumentDefinition.shortName != null )
+                    lineFormatter.format("-%s", argumentDefinition.shortName);
+                else
+                    lineFormatter.format("--%s", argumentDefinition.fullName);
+                if( !argumentDefinition.isFlag )
+                    lineFormatter.format(" <%s>", argumentDefinition.fullName);                
+                if( !argumentDefinition.required ) lineFormatter.format("]");
+            }
+        }
+
+        // Word wrap the synopsis.
+        List<String> wrappedSynopsis = TextFormattingUtils.wordWrap( lineBuilder.toString(), TextFormattingUtils.DEFAULT_LINE_WIDTH );
+
+        String header = "usage: ";
+        int headerLength = header.length();
+
+        StringBuilder synopsisBuilder = new StringBuilder();
+        Formatter synopsisFormatter = new Formatter(synopsisBuilder);
+        for( String synopsisLine: wrappedSynopsis ) {
+            synopsisFormatter.format("%" + headerLength + "s%s%n", header, synopsisLine);
+            header = "";
+        }
+
+        return synopsisBuilder.toString();
+    }
+
+    /**
+     * Gets detailed output about each argument type.
+     * @param argumentGroups Collection of program arguments sorted according to how they should be shown. 
+     * @return Detailed text about all arguments.
+     */
+    private String getDetailed( List<ArgumentDefinitionGroup> argumentGroups ) {
+        StringBuilder builder = new StringBuilder();
+
+        for( ArgumentDefinitionGroup argumentGroup: argumentGroups )
+            builder.append( getDetailForGroup( argumentGroup ) );
+
+        return builder.toString();
+    }
+
+    /**
+     * Gets a detailed description for a given argument group.
+     * @param argumentDefinitionGroup The group of argument definitions to render.
+     * @return A string giving detailed info about the contents of this group.
+     */
+    private String getDetailForGroup( ArgumentDefinitionGroup argumentDefinitionGroup ) {
+        if(argumentDefinitionGroup.allHidden())
+            return "";
+
+        StringBuilder builder = new StringBuilder();
+        Formatter formatter = new Formatter( builder );
+
+        if( argumentDefinitionGroup.groupName != null && argumentDefinitionGroup.argumentDefinitions.size() != 0 )
+            builder.append( String.format("%nArguments for %s:%n", argumentDefinitionGroup.groupName ) );
+
+        List<ArgumentDefinition> argumentDefinitions = new ArrayList<ArgumentDefinition>();
+        for(ArgumentDefinition argumentDefinition: argumentDefinitionGroup.argumentDefinitions) {
+            if(!argumentDefinition.isHidden)
+                argumentDefinitions.add(argumentDefinition);
+        }
+
+        // Try to fit the entire argument definition across the screen, but impose an arbitrary cap of 3/4 *
+        // LINE_WIDTH in case the length of the arguments gets out of control.
+        int argWidth = Math.min( findLongestArgumentCallingInfo(argumentDefinitions), (TextFormattingUtils.DEFAULT_LINE_WIDTH*3)/4 - FIELD_SEPARATION_WIDTH );
+        int docWidth = TextFormattingUtils.DEFAULT_LINE_WIDTH - argWidth - FIELD_SEPARATION_WIDTH;
+
+        for( ArgumentDefinition argumentDefinition: argumentDefinitions ) {
+            Iterator<String> wordWrappedArgs = TextFormattingUtils.wordWrap( getArgumentCallingInfo(argumentDefinition), argWidth ).iterator();
+            Iterator<String> wordWrappedDoc  = TextFormattingUtils.wordWrap( getArgumentDoc(argumentDefinition), docWidth ).iterator();
+
+            while( wordWrappedArgs.hasNext() || wordWrappedDoc.hasNext() ) {
+                String arg = wordWrappedArgs.hasNext() ? wordWrappedArgs.next() : "";
+                String doc = wordWrappedDoc.hasNext() ? wordWrappedDoc.next() : "";
+
+                String formatString = "%-" + argWidth + "s%" + FIELD_SEPARATION_WIDTH + "s%s%n";
+                formatter.format( formatString, arg, "", doc );
+            }
+        }
+
+        return builder.toString();
+    }
+
+    /**
+     * Gets a string indicating how this argument should be passed to the application.
+     * @param argumentDefinition Argument definition for which help should be printed.
+     * @return Calling information for this argument.
+     */
+    private String getArgumentCallingInfo( ArgumentDefinition argumentDefinition ) {
+        StringBuilder builder = new StringBuilder();
+        Formatter formatter = new Formatter( builder );
+
+        formatter.format(" ");
+        if( argumentDefinition.shortName != null )
+            formatter.format("-%s,", argumentDefinition.shortName);
+        formatter.format("--%s", argumentDefinition.fullName);
+        if( !argumentDefinition.isFlag )
+            formatter.format(" <%s>", argumentDefinition.fullName);
+
+        return builder.toString();
+    }
+
+    /**
+     * Gets a string of argument documentation.
+     * @param argumentDefinition Argument definition for which help should be printed.
+     * @return Brief description for this argument.
+     */
+    private String getArgumentDoc( ArgumentDefinition argumentDefinition ) {
+        StringBuilder builder = new StringBuilder();
+        builder.append(argumentDefinition.doc);
+        if( argumentDefinition.validOptions != null ) {
+            builder.append(" (");
+            builder.append(Utils.join("|",argumentDefinition.validOptions));
+            builder.append(")");
+        }
+        return builder.toString();
+    }
+
+    /**
+     * Crude implementation which finds the longest argument portion
+     * given a set of arguments.
+     * @param argumentDefinitions argument definitions to inspect.
+     * @return longest argument length.
+     */
+    private int findLongestArgumentCallingInfo( Collection<ArgumentDefinition> argumentDefinitions ) {
+        int longest = 0;
+        for( ArgumentDefinition argumentDefinition: argumentDefinitions ) {
+            String argumentText = getArgumentCallingInfo( argumentDefinition );
+            if( longest < argumentText.length() )
+                longest = argumentText.length();
+        }
+        return longest;
+    }
+
+    /**
+     * Extract the argument definition groups from the argument definitions and arrange them appropriately.
+     * For help, we want the arguments sorted as they are declared in the class.  However, required arguments
+     * should appear before optional arguments.
+     * @param argumentDefinitions Argument definitions from which to extract argument groups.
+     * @return A list of argument groups sorted in display order.
+     */
+    private List<ArgumentDefinitionGroup> prepareArgumentGroups( ArgumentDefinitions argumentDefinitions ) {
+        // Sort the list of argument definitions according to how they should be shown.
+        // Put the sorted results into a new cloned data structure.
+        Comparator<ArgumentDefinition> definitionComparator = new Comparator<ArgumentDefinition>() {
+            public int compare( ArgumentDefinition lhs, ArgumentDefinition rhs ) {
+                if( lhs.required && rhs.required ) return 0;
+                if( lhs.required ) return -1;
+                if( rhs.required ) return 1;
+                return 0;
+            }
+        };
+
+        List<ArgumentDefinitionGroup> argumentGroups = new ArrayList<ArgumentDefinitionGroup>();
+        for( ArgumentDefinitionGroup argumentGroup: argumentDefinitions.getArgumentDefinitionGroups() ) {
+            List<ArgumentDefinition> sortedDefinitions = new ArrayList<ArgumentDefinition>( argumentGroup.argumentDefinitions );
+            Collections.sort( sortedDefinitions, definitionComparator );
+            argumentGroups.add( new ArgumentDefinitionGroup(argumentGroup.groupName,sortedDefinitions) );
+        }
+
+        // Sort the argument groups themselves with main arguments first, followed by plugins sorted in name order.
+        Comparator<ArgumentDefinitionGroup> groupComparator = new Comparator<ArgumentDefinitionGroup>() {
+            public int compare( ArgumentDefinitionGroup lhs, ArgumentDefinitionGroup rhs ) {
+                if( lhs.groupName == null && rhs.groupName == null ) return 0;
+                if( lhs.groupName == null ) return -1;
+                if( rhs.groupName == null ) return 1;
+                return lhs.groupName.compareTo(rhs.groupName);
+            }
+        };
+        Collections.sort( argumentGroups, groupComparator );
+
+
+        return argumentGroups;
+    }
+
+    /**
+     * generateHeaderInformation
+     * <p/>
+     * <p/>
+     * Generate a standard header for the logger
+     *
+     * @param applicationDetails details of the application to run.
+     * @param parsedArgs the arguments passed in
+     */
+    public static void generateHeaderInformation(ApplicationDetails applicationDetails, Map<ArgumentMatchSource, ParsedArgs> parsedArgs) {
+
+        DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
+        java.util.Date date = new java.util.Date();
+
+        String barrier = createBarrier(applicationDetails.applicationHeader);
+
+        logger.info(barrier);
+        for (String headerLine : applicationDetails.applicationHeader)
+            logger.info(headerLine);
+        logger.debug("Current directory: " + System.getProperty("user.dir"));
+        for (Map.Entry<ArgumentMatchSource, ParsedArgs> entry: parsedArgs.entrySet()) {
+            ArgumentMatchSource matchSource = entry.getKey();
+            final String sourceName;
+            switch (matchSource.getType()) {
+                case CommandLine: sourceName = "Program"; break;
+                case Provider: sourceName = matchSource.getDescription(); break;
+                default: throw new RuntimeException("Unexpected argument match source type: " + matchSource.getType());
+            }
+
+            String output = sourceName + " Args: " + entry.getValue().getDescription();
+            logger.info(output);
+        }
+        logger.info(generateUserHelpData());
+        logger.info("Date/Time: " + dateFormat.format(date));
+        logger.info(barrier);
+
+        for(String attribution: applicationDetails.attribution)
+            logger.info(attribution);
+        logger.info(barrier);
+    }
+
+    /**
+     * Create the user-related help information.
+     * @return a non-null, non-empty String with the relevant information.
+     */
+    private static String generateUserHelpData() {
+	try {
+	    return "Executing as " +
+		System.getProperty("user.name") + "@" + InetAddress.getLocalHost().getHostName() +
+		" on " + System.getProperty("os.name") + " " + System.getProperty("os.version") +
+		" " + System.getProperty("os.arch") + "; " + System.getProperty("java.vm.name") +
+		" " + System.getProperty("java.runtime.version") + ".";
+	} catch (Exception e) {
+	    // don't fail
+	    return "";
+	}
+    }
+
+    /**
+     * Create a barrier to use to distinguish the header from the rest of the output.
+     * @param text A collection of lines to output as part of a header.
+     * @return A barrier consisting of the '-' character.
+     */
+    private static String createBarrier(List<String> text) {
+        int barrierWidth = 0;
+        for(String headerLine: text)
+            barrierWidth = Math.max(headerLine.length(),barrierWidth);
+        return String.format("%0" + barrierWidth + "d",0).replace('0','-');
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/ResourceBundleExtractorDoclet.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/ResourceBundleExtractorDoclet.java
new file mode 100644
index 0000000..83af1d9
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/help/ResourceBundleExtractorDoclet.java
@@ -0,0 +1,281 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.help;
+
+import com.sun.javadoc.*;
+import org.broadinstitute.gatk.utils.Utils;
+
+import java.io.*;
+import java.util.*;
+
+/**
+ * Extracts certain types of javadoc (specifically package and class descriptions) and makes them available
+ * to applications at runtime.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class ResourceBundleExtractorDoclet {
+    // NOTE: Using log4j during javadoc generation requires
+    // a proper Log4J initialization (see CommandLineProgram),
+    // or a log4.properties file. This doclet has neither.
+    //private static Logger logger = Logger.getLogger(ResourceBundleExtractorDoclet.class);
+
+    /**
+     * Taglet for the particular version number.
+     */
+    public static final String VERSION_TAGLET_NAME = "version";
+    public static final String SUMMARY_TAGLET_NAME = "help.summary";
+    public static final String DESCRIPTION_TAGLET_NAME = "help.description";
+
+    private final RootDoc rootDoc;
+    private final Set<ClassDoc> classDocs;
+    private final Set<PackageDoc> packageDocs;
+    private final Set<Doc> allDocs;
+
+    protected File outFile = null;
+    protected String buildTimestamp = null, absoluteVersion = null;
+
+    /**
+     * Extracts the contents of certain types of javadoc and adds them to an XML file.
+     * @param rootDoc The documentation root.
+     * @return Whether the JavaDoc run succeeded.
+     * @throws IOException if output can't be written.
+     */
+    public static boolean start(RootDoc rootDoc) throws IOException {
+        ResourceBundleExtractorDoclet doclet = new ResourceBundleExtractorDoclet(rootDoc);
+        doclet.checkUndocumentedClasses();
+        if (doclet.isUpToDate()) {
+            rootDoc.printNotice("Docs up to date. Not regenerating.");
+            return true;
+        }
+        doclet.processDocs();
+        return true;
+    }
+
+    /**
+     * Validate the given options against options supported by this doclet.
+     * @param option Option to validate.
+     * @return Number of potential parameters; 0 if not supported.
+     */
+    @SuppressWarnings("unused") // Used by javadoc system
+    public static int optionLength(String option) {
+        if(option.equals("-build-timestamp") || option.equals("-out") || option.equals("-absolute-version") ) {
+            return 2;
+        }
+        return 0;
+    }
+
+    /**
+     * Creates a new resource extractor doclet.
+     * @param  rootDoc           the documentation root.
+     */
+    private ResourceBundleExtractorDoclet(RootDoc rootDoc) {
+        this.rootDoc = rootDoc;
+        this.classDocs = new TreeSet<>();
+        this.packageDocs = new TreeSet<>();
+        this.allDocs = new TreeSet<>();
+        for (final ClassDoc classDoc: rootDoc.classes()) {
+            this.classDocs.add(classDoc);
+            // Cache packages as we see them, since there's no direct way to iterate over packages.
+            this.packageDocs.add(classDoc.containingPackage());
+        }
+        this.allDocs.addAll(classDocs);
+        this.allDocs.addAll(packageDocs);
+        for(final String[] options: rootDoc.options()) {
+            if(options[0].equals("-out"))
+                this.outFile = new File(options[1]);
+            if(options[0].equals("-build-timestamp"))
+                this.buildTimestamp = options[1];
+            if (options[0].equals("-absolute-version"))
+                this.absoluteVersion = options[1];
+        }
+    }
+
+    private void checkUndocumentedClasses() {
+        final Set<String> undocumentedClasses = new TreeSet<>();
+
+        for (final ClassDoc classDoc: classDocs) {
+            if(isRequiredJavadocMissing(classDoc) && shouldDocument(classDoc))
+                undocumentedClasses.add(classDoc.name());
+        }
+
+        if(undocumentedClasses.size() > 0) {
+            final String message = String.format("The following are currently undocumented: %s%s%s",
+                    Utils.TEXT_BLINK, Utils.join(" ", undocumentedClasses), Utils.TEXT_RESET);
+            for (final String line: Utils.warnUserLines(message)) {
+                rootDoc.printWarning(line);
+            }
+        }
+    }
+
+    private boolean isUpToDate() {
+        if (outFile == null)
+            return false;
+
+        final long outFileMillis = outFile.lastModified();
+
+        if (outFileMillis == 0L) {
+            return false;
+        }
+
+        for (final Doc doc: allDocs) {
+            final File docFile = doc.position() == null ? null : doc.position().file();
+            if (docFile != null && docFile.lastModified() > outFileMillis) {
+                rootDoc.printNotice("At least one item is out of date: " + docFile.getAbsolutePath());
+                return false;
+            }
+        }
+
+        return true;
+    }
+
+    protected void processDocs() throws IOException {
+        final PrintStream out;
+        if (outFile != null) {
+            out = new PrintStream(outFile);
+        } else {
+            out = System.out;
+        }
+        try {
+            // Maintains a collection of resources in memory as they're accumulated.
+            final Properties resourceText = new Properties();
+
+            loadExistingResourceFile(resourceText);
+
+            resourceText.setProperty("build.timestamp", buildTimestamp);
+
+            for (final ClassDoc currentClass : classDocs)
+                renderHelpText(resourceText, DocletUtils.getClassName(currentClass, false), currentClass);
+            for (final PackageDoc currentPackage : packageDocs)
+                renderHelpText(resourceText, currentPackage.name(), currentPackage);
+
+            resourceText.store(out, "Strings displayed by the GATK help system");
+        } finally {
+            if (outFile != null) {
+                out.close();
+            }
+        }
+    }
+
+    /**
+     * Attempts to load the contents of the resource file named by resourceFileName into
+     * our in-memory resource collection resourceText. If the resource file doesn't exist,
+     * prints a notice to the user but does not throw an exception back to the calling method,
+     * since we'll just create a new resource file from scratch in that case.
+     * @throws IOException       if there is an I/O-related error other than FileNotFoundException
+     *                           while attempting to read the resource file.
+     */
+    private void loadExistingResourceFile(final Properties resourceText) throws IOException {
+        try {
+            try (final BufferedReader resourceFile = new BufferedReader(new FileReader(outFile))) {
+                resourceText.load(resourceFile);
+            }
+        }
+        catch ( FileNotFoundException e ) {
+            rootDoc.printNotice("Resource file not found -- generating a new one from scratch.");
+        }
+    }
+
+    /**
+     * Determine whether a given class should be documented.
+     * @param classDoc the type of the given class.
+     * @return True if the class should be documented.  False otherwise.
+     */
+    protected static boolean shouldDocument(ClassDoc classDoc) {
+        if (classDoc.isAbstract()) {
+            return false;
+        }
+        // TODO: Code duplication with GATKDoclet, including DocletUtils.getClassForDoc().
+        // TODO: Refactor common methods into DocletUtils, and possibly just use DocumentGATKFeatureObjects.
+        final Class<?> docClass;
+        try {
+            docClass = (Class<?>) DocletUtils.getClassForDoc(classDoc);
+        } catch (ClassNotFoundException e) {
+            return false;
+        } catch (NoClassDefFoundError e) {
+            return false;
+        } catch (UnsatisfiedLinkError e) {
+            return false; // naughty BWA bindings
+        }
+        if (Throwable.class.isAssignableFrom(docClass)) {
+            return false; // UserExceptions
+        }
+        final DocumentedGATKFeature f = docClass.getAnnotation(DocumentedGATKFeature.class);
+        return f != null && f.enable();
+    }
+
+    /**
+     * Is the javadoc for the given class missing?
+     * @param classDoc Class for which to inspect the JavaDoc.
+     * @return True if the JavaDoc is missing.  False otherwise.
+     */
+    private static boolean isRequiredJavadocMissing(ClassDoc classDoc) {
+        return classDoc.commentText().length() == 0 || classDoc.commentText().contains("Created by IntelliJ");
+    }
+
+    /**
+     * Renders all the help text required for a given name.
+     * @param resourceText resource text properties
+     * @param elementName element name to use as the key
+     * @param element Doc element to process.
+     */
+    private void renderHelpText(final Properties resourceText, final String elementName, final Doc element) {
+        StringBuilder summaryBuilder = new StringBuilder();
+        for(Tag tag: element.firstSentenceTags())
+             summaryBuilder.append(tag.text());
+        String summary = summaryBuilder.toString();
+        String description = element.commentText();
+
+        // this might seem unnecessary, but the GATK command line program uses this tag to determine the version when running
+        if(absoluteVersion != null)
+            resourceText.setProperty(String.format("%s.%s",elementName,VERSION_TAGLET_NAME),absoluteVersion);
+
+        // Write out an alternate element summary, if exists.
+        resourceText.setProperty(String.format("%s.%s",elementName,SUMMARY_TAGLET_NAME),formatText(summary));
+
+        // Write out an alternate description, if present.
+        resourceText.setProperty(String.format("%s.%s",elementName,DESCRIPTION_TAGLET_NAME),formatText(description));
+    }
+
+    /**
+     * Format text for consumption by the properties file.
+     * @param text Text to format.
+     * @return Formatted text; string trimmed, newlines removed.
+     */
+    private static String formatText(String text) {
+        Scanner scanner = new Scanner(text);
+        StringBuilder output = new StringBuilder();
+
+        while(scanner.hasNextLine()) {
+            if(output.length() > 0)
+                output.append(' ');
+            output.append(scanner.nextLine().trim());
+        }
+
+        return output.toString();    
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/instrumentation/Sizeof.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/instrumentation/Sizeof.java
new file mode 100644
index 0000000..c61d895
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/instrumentation/Sizeof.java
@@ -0,0 +1,146 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.instrumentation;
+
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.lang.instrument.Instrumentation;
+import java.lang.reflect.Array;
+import java.lang.reflect.Field;
+import java.lang.reflect.Modifier;
+import java.util.IdentityHashMap;
+
+/**
+ * A sizeof implementation for Java.  Relies on the Java instrumentation API, so
+ * it must be added as an agent to function properly.
+ *
+ * To run, add -javaagent:$STING_HOME/dist/StingUtils.jar as a command-line
+ * JVM argument.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class Sizeof {
+    /**
+     * Instrumentation object.  Registered by the JVM via the premain() method.
+     */
+    private static Instrumentation instrumentation;
+
+    /**
+     * Called by the JVM before the agent is started.
+     * @param args Arguments?
+     * @param inst Instrumentation object, used to perform instrumentation in the JVM.
+     */
+    public static void premain(String args, Instrumentation inst) {
+        instrumentation = inst;
+    }
+
+    /**
+     * Is this Sizeof operator enabled?  To enable, add the -javaagent directive listed in the class-level javadoc.
+     * @return True if sizeof() is enabled.  If false, any calls to utility methods of this class will throw an exception.
+     */
+    public static boolean isEnabled() {
+        return instrumentation != null;
+    }
+
+    /**
+     * Gets the size of the given object.  Retrieves the size for only this object; any reference fields in the object will only be
+     * counted as single pointers.
+     * @param o The object to sizeof().
+     * @return Gets the best possible approximation we can get of the size of the object in memory.  On Sun JVM, includes some object padding.
+     */
+    public static long getObjectSize(Object o) {
+        if(!isEnabled())
+            throw new ReviewedGATKException("Sizeof operator is currently disabled!  To enable, review the documentation in Sizeof.java");
+        return instrumentation.getObjectSize(o);
+    }
+
+    /**
+     * Gets the size of the given object, including the size of the objects to which this object refers.
+     * @param o The object to sizeof().
+     * @return Gets the best possible approximation we can get of the size of the object in memory, including all references within each object.
+     */
+    public static long getObjectGraphSize(Object o) {
+        if(!isEnabled())
+            throw new ReviewedGATKException("Sizeof operator is currently disabled!  To enable, review the documentation in Sizeof.java");
+        IdentityHashMap<Object,Object> objectsSeen = new IdentityHashMap<Object,Object>();
+        return getObjectGraphSize(o,objectsSeen);
+    }
+
+    /**
+     * The engine for walking the graph of all objects and their children.
+     * @param o The object to traverse.
+     * @param objectsSeen A list of all objects already seen.
+     * @return Gets the best possible approximation we can get of the size of the object in memory, including all references within each object.
+     */
+    private static long getObjectGraphSize(Object o,IdentityHashMap<Object,Object> objectsSeen) {
+        // Size of a null object itself (as opposed to the reference to the null object) is 0.
+        if(o == null)
+            return 0;
+        
+        // Don't allow repeated traversals of the same object.
+        if(objectsSeen.containsKey(o))
+            return 0;
+        objectsSeen.put(o,o);
+
+        // Get the size of the object itself, plus all contained primitives.
+        long totalSize = instrumentation.getObjectSize(o);
+
+        // Get the size of (non-primitive) array elements.
+        Class<?> classToInspect = o.getClass();
+        if(classToInspect.isArray()) {
+            if(!classToInspect.getComponentType().isPrimitive()) {
+                for(int i = 0; i < Array.getLength(o); i++)
+                    totalSize += getObjectGraphSize(Array.get(o,i),objectsSeen);
+            }
+        }
+
+        // Walk the descendents of each field of this class.  Be sure to avoid synthetic fields like this$0 -- these
+        // are back references to the parent of the object contained in the inner class.
+        // Potential BUG: Are there other types of synthetic fields we should be tracking?
+        while(classToInspect != null) {
+            for(Field field: classToInspect.getDeclaredFields()) {
+                if(field.getType().isPrimitive())
+                    continue;
+                if(Modifier.isStatic(field.getModifiers()))
+                    continue;
+                if(field.isSynthetic())
+                    continue;
+                field.setAccessible(true);
+                Object fieldValue;
+                try {
+                    fieldValue = field.get(o);
+                }
+                catch(IllegalAccessException ex) {
+                    throw new ReviewedGATKException("Unable to access field " + field.getName(),ex);
+                }
+                totalSize += getObjectGraphSize(fieldValue,objectsSeen);
+            }
+            classToInspect = classToInspect.getSuperclass();
+        }
+        return totalSize;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/interval/IntervalMergingRule.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/interval/IntervalMergingRule.java
new file mode 100644
index 0000000..ba196a1
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/interval/IntervalMergingRule.java
@@ -0,0 +1,35 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.interval;
+
+
+/**
+ * a class we use to determine the merging rules for intervals passed to the GATK
+ */
+public enum IntervalMergingRule {
+    ALL, // we merge both overlapping intervals and abutting intervals
+    OVERLAPPING_ONLY // We merge intervals that are overlapping, but NOT ones that only abut each other
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/interval/IntervalSetRule.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/interval/IntervalSetRule.java
new file mode 100644
index 0000000..424485a
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/interval/IntervalSetRule.java
@@ -0,0 +1,36 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.interval;
+
+/**
+ * set operators for combining lists of intervals
+ */
+public enum IntervalSetRule {
+    /** Take the union of all intervals */
+    UNION,
+    /** Take the intersection of intervals (the subset that overlaps all intervals specified) */
+    INTERSECTION;
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/interval/IntervalUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/interval/IntervalUtils.java
new file mode 100644
index 0000000..e272eac
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/interval/IntervalUtils.java
@@ -0,0 +1,895 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.interval;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import htsjdk.samtools.reference.ReferenceSequenceFile;
+import htsjdk.samtools.util.Interval;
+import htsjdk.samtools.util.IntervalList;
+import htsjdk.samtools.SAMFileHeader;
+import org.apache.log4j.Logger;
+import htsjdk.tribble.Feature;
+import org.broadinstitute.gatk.utils.commandline.IntervalArgumentCollection;
+import org.broadinstitute.gatk.utils.commandline.IntervalBinding;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.collections.Pair;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+import org.broadinstitute.gatk.utils.text.XReadLines;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.*;
+
+/**
+ * Parse text representations of interval strings that
+ * can appear in GATK-based applications.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class IntervalUtils {
+    private static Logger logger = Logger.getLogger(IntervalUtils.class);
+
+    /**
+     * Turns a set of strings describing intervals into a parsed set of intervals.  Valid string elements can be files,
+     * intervals in samtools notation (chrA:B-C), or some combination of the above separated by semicolons.  Additionally,
+     * 'all' can be supplied to indicate all possible intervals, but 'all' must be exclusive of all other interval
+     * specifications.
+     *
+     * @param parser Genome loc parser.
+     * @param argList A list of strings containing interval data.
+     * @return an unsorted, unmerged representation of the given intervals.  Null is used to indicate that all intervals should be used.
+     */
+    public static List<GenomeLoc> parseIntervalArguments(GenomeLocParser parser, List<String> argList) {
+        List<GenomeLoc> rawIntervals = new ArrayList<GenomeLoc>();    // running list of raw GenomeLocs
+
+        if (argList != null) { // now that we can be in this function if only the ROD-to-Intervals was provided, we need to
+                               // ensure that the arg list isn't null before looping.
+            for (String argument : argList) {
+                rawIntervals.addAll(parseIntervalArguments(parser, argument));
+            }
+        }
+
+        return rawIntervals;
+    }
+
+    public static List<GenomeLoc> parseIntervalArguments(GenomeLocParser parser, String arg) {
+        List<GenomeLoc> rawIntervals = new ArrayList<GenomeLoc>();    // running list of raw GenomeLocs
+
+        if ( arg.indexOf(';') != -1 ) {
+            throw new UserException.BadArgumentValue("-L " + arg, "The legacy -L \"interval1;interval2\" syntax " +
+                                                     "is no longer supported. Please use one -L argument for each " +
+                                                     "interval or an interval file instead.");
+        }
+
+        // if any argument is 'unmapped', "parse" it to a null entry.  A null in this case means 'all the intervals with no alignment data'.
+        if (isUnmapped(arg))
+            rawIntervals.add(GenomeLoc.UNMAPPED);
+        // if it's a file, add items to raw interval list
+        else if (isIntervalFile(arg)) {
+            try {
+                rawIntervals.addAll(intervalFileToList(parser, arg));
+            }
+            catch ( UserException.MalformedGenomeLoc e ) {
+                throw e;
+            }
+            catch ( Exception e ) {
+                throw new UserException.MalformedFile(arg, "Interval file could not be parsed in any supported format.", e);
+            }
+        }
+        // otherwise treat as an interval -> parse and add to raw interval list
+        else {
+            rawIntervals.add(parser.parseGenomeLoc(arg));
+        }
+
+        return rawIntervals;
+    }
+
+    /**
+     * Read a file of genome locations to process. The file may be in BED, Picard,
+     * or GATK interval format.
+     *
+     * @param glParser   GenomeLocParser
+     * @param file_name  interval file
+     * @return List<GenomeLoc> List of Genome Locs that have been parsed from file
+     */
+    public static List<GenomeLoc> intervalFileToList(final GenomeLocParser glParser, final String file_name) {
+        // try to open file
+        File inputFile = new File(file_name);
+        List<GenomeLoc> ret = new ArrayList<GenomeLoc>();
+
+        // case: BED file
+        if ( file_name.toUpperCase().endsWith(".BED") ) {
+            // this is now supported in Tribble
+            throw new ReviewedGATKException("BED files must be parsed through Tribble; parsing them as intervals through the GATK engine is no longer supported");
+        }
+        else {
+            /**
+             * IF not a BED file:
+             * first try to read it as a Picard interval file since that's well structured
+             * we'll fail quickly if it's not a valid file.
+             */
+            boolean isPicardInterval = false;
+            try {
+                // Note: Picard will skip over intervals with contigs not in the sequence dictionary
+                IntervalList il = IntervalList.fromFile(inputFile);
+                isPicardInterval = true;
+
+                int nInvalidIntervals = 0;
+                for (Interval interval : il.getIntervals()) {
+                    if ( glParser.isValidGenomeLoc(interval.getSequence(), interval.getStart(), interval.getEnd(), true))
+                        ret.add(glParser.createGenomeLoc(interval.getSequence(), interval.getStart(), interval.getEnd(), true));
+                    else {
+                        nInvalidIntervals++;
+                    }
+                }
+                if ( nInvalidIntervals > 0 )
+                    logger.warn("Ignoring " + nInvalidIntervals + " invalid intervals from " + inputFile);
+            }
+
+            // if that didn't work, try parsing file as a GATK interval file
+            catch (Exception e) {
+                if ( isPicardInterval ) // definitely a picard file, but we failed to parse
+                    throw new UserException.CouldNotReadInputFile(inputFile, e);
+                else {
+                    try {
+                        XReadLines reader = new XReadLines(new File(file_name));
+                        for(String line: reader) {
+                            if ( line.trim().length() > 0 ) {
+                                ret.add(glParser.parseGenomeLoc(line));
+                            }
+                        }
+                        reader.close();
+                    }
+                    catch (IOException e2) {
+                        throw new UserException.CouldNotReadInputFile(inputFile, e2);
+                    }
+                }
+            }
+        }
+
+        return ret;
+    }
+
+    /**
+     * Returns true if the interval string is the "unmapped" interval
+     * @param interval Interval to check
+     * @return true if the interval string is the "unmapped" interval
+     */
+    public static boolean isUnmapped(String interval) {
+        return (interval != null && interval.trim().toLowerCase().equals("unmapped"));
+    }
+
+    /**
+     * merge two interval lists, using an interval set rule
+     * @param setOne a list of genomeLocs, in order (cannot be NULL)
+     * @param setTwo a list of genomeLocs, also in order (cannot be NULL)
+     * @param rule the rule to use for merging, i.e. union, intersection, etc
+     * @return a list, correctly merged using the specified rule
+     */
+    public static List<GenomeLoc> mergeListsBySetOperator(List<GenomeLoc> setOne, List<GenomeLoc> setTwo, IntervalSetRule rule) {
+        // shortcut, if either set is zero, return the other set
+        if (setOne == null || setOne.size() == 0 || setTwo == null || setTwo.size() == 0)
+            return Collections.unmodifiableList((setOne == null || setOne.size() == 0) ? setTwo : setOne);
+
+        // our master list, since we can't guarantee removal time in a generic list
+        LinkedList<GenomeLoc> retList = new LinkedList<GenomeLoc>();
+
+        // if we're set to UNION, just add them all
+        if (rule == null || rule == IntervalSetRule.UNION) {
+            retList.addAll(setOne);
+            retList.addAll(setTwo);
+            return Collections.unmodifiableList(retList);
+        }
+
+        // else we're INTERSECTION, create two indexes into the lists
+        int iOne = 0;
+        int iTwo = 0;
+
+        // merge the second into the first using the rule
+        while (iTwo < setTwo.size() && iOne < setOne.size())
+            // if the first list is ahead, drop items off the second until we overlap
+            if (setTwo.get(iTwo).isBefore(setOne.get(iOne)))
+                iTwo++;
+            // if the second is ahead, drop intervals off the first until we overlap
+            else if (setOne.get(iOne).isBefore(setTwo.get(iTwo)))
+                iOne++;
+            // we overlap, intersect the two intervals and add the result.  Then remove the interval that ends first.
+            else {
+                retList.add(setOne.get(iOne).intersect(setTwo.get(iTwo)));
+                if (setOne.get(iOne).getStop() < setTwo.get(iTwo).getStop()) iOne++;
+                else iTwo++;
+            }
+
+        //if we have an empty list, throw an exception.  If they specified intersection and there are no items, this is bad.
+        if (retList.size() == 0)
+                throw new UserException.BadInput("The INTERSECTION of your -L options produced no intervals.");
+
+        // we don't need to add the rest of remaining locations, since we know they don't overlap. return what we have
+        return Collections.unmodifiableList(retList);
+    }
+
+    /**
+     * Sorts and merges an interval list.  Multiple techniques are available for merging: ALL, which combines
+     * all overlapping and abutting intervals into an interval that spans the union of all covered bases, and
+     * OVERLAPPING_ONLY, which unions overlapping intervals but keeps abutting intervals separate.
+     *
+     * @param parser Genome loc parser for the intervals.
+     * @param intervals A collection of intervals to merge.
+     * @param mergingRule A descriptor for the type of merging to perform.
+     * @return A sorted, merged version of the intervals passed in.
+     */
+    public static GenomeLocSortedSet sortAndMergeIntervals(GenomeLocParser parser, List<GenomeLoc> intervals, IntervalMergingRule mergingRule) {
+        // Make a copy of the (potentially unmodifiable) list to be sorted
+        intervals = new ArrayList<GenomeLoc>(intervals);
+        // sort raw interval list
+        Collections.sort(intervals);
+        // now merge raw interval list
+        intervals = mergeIntervalLocations(intervals, mergingRule);
+
+        return GenomeLocSortedSet.createSetFromList(parser,intervals);
+    }
+
+    /**
+     * computes whether the test interval list is equivalent to master.  To be equivalent, test must
+     * contain GenomeLocs covering every base in master, exactly once.  Note that this algorithm
+     * assumes that master genomelocs are all discontiguous (i.e., we don't have locs like 1-3 and 4-6 but
+     * rather just 1-6).  In order to use this algorithm with contiguous genomelocs first merge them.  The algorithm
+     * doesn't assume that test has discontinuous genomelocs.
+     *
+     * Returns a null string if there are no differences, otherwise returns a string describing the difference
+     * (useful for UnitTests).  Assumes both lists are sorted
+     *
+     * @param masterArg sorted master genome locs
+     * @param testArg sorted test genome locs
+     * @return null string if there are no difference, otherwise a string describing the difference
+     */
+    public static String equateIntervals(List<GenomeLoc> masterArg, List<GenomeLoc> testArg) {
+        LinkedList<GenomeLoc> master = new LinkedList<GenomeLoc>(masterArg);
+        LinkedList<GenomeLoc> test = new LinkedList<GenomeLoc>(testArg);
+
+        while ( ! master.isEmpty() ) { // there's still unchecked bases in master
+            final GenomeLoc masterHead = master.pop();
+            final GenomeLoc testHead = test.pop();
+
+            if ( testHead.overlapsP(masterHead) ) {
+                // remove the parts of test that overlap master, and push the remaining
+                // parts onto master for further comparison.
+                for ( final GenomeLoc masterPart : Utils.reverse(masterHead.subtract(testHead)) ) {
+                    master.push(masterPart);
+                }
+            } else {
+                // testHead is incompatible with masterHead, so we must have extra bases in testHead
+                // that aren't in master
+                return "Incompatible locs detected masterHead=" + masterHead + ", testHead=" + testHead;
+            }
+        }
+
+        if ( test.isEmpty() ) // everything is equal
+            return null; // no differences
+        else
+            return "Remaining elements found in test: first=" + test.peek();
+    }
+
+
+    /**
+     * Check if string argument was intented as a file
+     * Accepted file extensions: .bed .list, .picard, .interval_list, .intervals.
+     * @param str token to identify as a filename.
+     * @return true if the token looks like a filename, or false otherwise.
+     */
+    public static boolean isIntervalFile(String str) {
+        return isIntervalFile(str, true);
+    }
+
+    /**
+     * Check if string argument was intented as a file
+     * Accepted file extensions: .bed .list, .picard, .interval_list, .intervals.
+     * @param str token to identify as a filename.
+     * @param checkExists if true throws an exception if the file doesn't exist.
+     * @return true if the token looks like a filename, or false otherwise.
+     */
+    public static boolean isIntervalFile(String str, boolean checkExists) {
+        // should we define list of file extensions as a public array somewhere?
+        // is regex or endsiwth better?
+        File file = new File(str);
+        if (str.toUpperCase().endsWith(".BED") || str.toUpperCase().endsWith(".LIST") ||
+                str.toUpperCase().endsWith(".PICARD") || str.toUpperCase().endsWith(".INTERVAL_LIST")
+                || str.toUpperCase().endsWith(".INTERVALS")) {
+            if (!checkExists)
+                return true;
+            else if (file.exists())
+                return true;
+            else
+                throw new UserException.CouldNotReadInputFile(file, "The interval file does not exist.");
+        }
+
+        if(file.exists())
+            throw new UserException.CouldNotReadInputFile(file, String.format("The interval file %s does not have one of " +
+                    "the supported extensions (.bed, .list, .picard, .interval_list, or .intervals). " +
+                    "Please rename your file with the appropriate extension. If %s is NOT supposed to be a file, " +
+                    "please move or rename the file at location %s", str, str, file.getAbsolutePath()));
+
+        else return false;
+    }
+
+    /**
+     * Returns a map of contig names with their sizes.
+     * @param reference The reference for the intervals.
+     * @return A map of contig names with their sizes.
+     */
+    public static Map<String, Integer> getContigSizes(File reference) {
+        final ReferenceSequenceFile referenceSequenceFile = createReference(reference);
+        List<GenomeLoc> locs = GenomeLocSortedSet.createSetFromSequenceDictionary(referenceSequenceFile.getSequenceDictionary()).toList();
+        Map<String, Integer> lengths = new LinkedHashMap<String, Integer>();
+        for (GenomeLoc loc: locs)
+            lengths.put(loc.getContig(), loc.size());
+        return lengths;
+    }
+
+    /**
+     * Splits an interval list into multiple files.
+     * @param fileHeader The sam file header.
+     * @param locs The genome locs to split.
+     * @param scatterParts The output interval lists to write to.
+     */
+    public static void scatterContigIntervals(SAMFileHeader fileHeader, List<GenomeLoc> locs, List<File> scatterParts) {
+
+	// Contract: must divide locs up so that each of scatterParts gets a sublist such that:
+	// (a) all locs concerning a particular contig go to the same part
+	// (b) locs are not split or combined, and remain in the same order (so scatterParts[0] + ... + scatterParts[n] == locs)
+
+	// Locs are already sorted.
+
+	long totalBases = 0;
+	for(GenomeLoc loc : locs)
+	    totalBases += loc.size();
+
+	long idealBasesPerPart = totalBases / scatterParts.size();
+	if(idealBasesPerPart == 0)
+	    throw new UserException.BadInput(String.format("Genome region is too short (%d bases) to split into %d parts", totalBases, scatterParts.size()));
+
+	// Find the indices in locs where we switch from one contig to the next.
+	ArrayList<Integer> contigStartLocs = new ArrayList<Integer>();
+	String prevContig = null;
+
+	for(int i = 0; i < locs.size(); ++i) {
+
+	    GenomeLoc loc = locs.get(i);
+	    if(prevContig == null || !loc.getContig().equals(prevContig))
+		contigStartLocs.add(i);
+	    prevContig = loc.getContig();
+
+	}
+
+	if(contigStartLocs.size() < scatterParts.size())
+	    throw new UserException.BadInput(String.format("Input genome region has too few contigs (%d) to split into %d parts", contigStartLocs.size(), scatterParts.size()));
+
+	long thisPartBases = 0;
+	int partIdx = 0;
+	IntervalList outList = new IntervalList(fileHeader);
+
+	for(int i = 0; i < locs.size(); ++i) {
+
+	    GenomeLoc loc = locs.get(i);
+	    thisPartBases += loc.getStop() - loc.getStart();
+
+	    outList.add(toInterval(loc, i));
+
+	    boolean partMustStop = false;
+
+	    if(partIdx < (scatterParts.size() - 1)) {
+
+		// If there are n contigs and n parts remaining then we must split here,
+		// otherwise we will run out of contigs.
+
+		int nextPart = partIdx + 1;
+		int nextPartMustStartBy = contigStartLocs.get(nextPart + (contigStartLocs.size() - scatterParts.size()));
+		if(i + 1 == nextPartMustStartBy)
+		    partMustStop = true;
+		
+	    }
+	    else if(i == locs.size() - 1) {
+
+		// We're done! Write the last scatter file.
+		partMustStop = true;
+
+	    }
+	    
+	    if(partMustStop || thisPartBases > idealBasesPerPart) {
+
+		// Ideally we would split here. However, we must make sure to do so
+		// on a contig boundary. Test always passes with partMustStop == true
+		// since that indicates we're at a contig boundary.
+
+		GenomeLoc nextLoc = null;
+		if((i + 1) < locs.size())
+		    nextLoc = locs.get(i+1);
+
+		if(nextLoc == null || !nextLoc.getContig().equals(loc.getContig())) {
+
+		    // Write out this part:
+		    outList.write(scatterParts.get(partIdx));
+
+		    // Reset. If this part ran long, leave the excess in thisPartBases
+		    // and the next will be a little shorter to compensate.
+		    outList = new IntervalList(fileHeader);
+		    thisPartBases -= idealBasesPerPart;
+		    ++partIdx;
+		    
+		}
+
+	    }
+
+	}
+
+    }
+
+    /**
+     * Splits an interval list into multiple sublists.
+     * @param locs The genome locs to split.
+     * @param splits The stop points for the genome locs returned by splitFixedIntervals.
+     * @return A list of lists of genome locs, split according to splits
+     */
+    public static List<List<GenomeLoc>> splitIntervalsToSubLists(List<GenomeLoc> locs, List<Integer> splits) {
+        int start = 0;
+        List<List<GenomeLoc>> sublists = new ArrayList<List<GenomeLoc>>(splits.size());
+        for (Integer stop: splits) {
+            List<GenomeLoc> curList = new ArrayList<GenomeLoc>();
+            for (int i = start; i < stop; i++)
+                curList.add(locs.get(i));
+            start = stop;
+            sublists.add(curList);
+        }
+
+        return sublists;
+    }
+
+
+    /**
+     * Splits an interval list into multiple files.
+     * @param fileHeader The sam file header.
+     * @param splits Pre-divided genome locs returned by splitFixedIntervals.
+     * @param scatterParts The output interval lists to write to.
+     */
+    public static void scatterFixedIntervals(SAMFileHeader fileHeader, List<List<GenomeLoc>> splits, List<File> scatterParts) {
+        if (splits.size() != scatterParts.size())
+            throw new UserException.BadArgumentValue("splits", String.format("Split points %d does not equal the number of scatter parts %d.", splits.size(), scatterParts.size()));
+
+        int fileIndex = 0;
+        int locIndex = 1;
+        for (final List<GenomeLoc> split : splits) {
+            IntervalList intervalList = new IntervalList(fileHeader);
+            for (final GenomeLoc loc : split)
+                intervalList.add(toInterval(loc, locIndex++));
+            intervalList.write(scatterParts.get(fileIndex++));
+        }
+    }
+
+    /**
+     * Splits the genome locs up by size.
+     * @param locs Genome locs to split.
+     * @param numParts Number of parts to split the locs into.
+     * @return The stop points to split the genome locs.
+     */
+    public static List<List<GenomeLoc>> splitFixedIntervals(List<GenomeLoc> locs, int numParts) {
+        if (locs.size() < numParts)
+            throw new UserException.BadArgumentValue("scatterParts", String.format("Cannot scatter %d locs into %d parts.", locs.size(), numParts));
+        final long locsSize = intervalSize(locs);
+        final List<Integer> splitPoints = new ArrayList<Integer>();
+        addFixedSplit(splitPoints, locs, locsSize, 0, locs.size(), numParts);
+        Collections.sort(splitPoints);
+        splitPoints.add(locs.size());
+        return splitIntervalsToSubLists(locs, splitPoints);
+    }
+
+    @Requires({"locs != null", "numParts > 0"})
+    @Ensures("result != null")
+    public static List<List<GenomeLoc>> splitLocusIntervals(List<GenomeLoc> locs, int numParts) {
+        // the ideal size of each split
+        final long bp = IntervalUtils.intervalSize(locs);
+        final long idealSplitSize = Math.max((long)Math.floor(bp / (1.0*numParts)), 1);
+
+        // algorithm:
+        // split = ()
+        // set size = 0
+        // pop the head H off locs.
+        // If size + size(H) < splitSize:
+        //      add H to split, continue
+        // If size + size(H) == splitSize:
+        //      done with split, put in splits, restart
+        // if size + size(H) > splitSize:
+        //      cut H into two pieces, first of which has splitSize - size bp
+        //      push both pieces onto locs, continue
+        // The last split is special -- when you have only one split left, it gets all of the remaining locs
+        // to deal with rounding issues
+        final List<List<GenomeLoc>> splits = new ArrayList<List<GenomeLoc>>(numParts);
+
+        LinkedList<GenomeLoc> locsLinkedList = new LinkedList<GenomeLoc>(locs);
+        while ( ! locsLinkedList.isEmpty() ) {
+            if ( splits.size() + 1 == numParts ) {
+                // the last one gets all of the remaining parts
+                splits.add(new ArrayList<GenomeLoc>(locsLinkedList));
+                locsLinkedList.clear();
+            } else {
+                final SplitLocusRecursive one = splitLocusIntervals1(locsLinkedList, idealSplitSize);
+                splits.add(one.split);
+                locsLinkedList = one.remaining;
+            }
+        }
+
+        return splits;
+    }
+
+    @Requires({"remaining != null", "!remaining.isEmpty()", "idealSplitSize > 0"})
+    @Ensures({"result != null"})
+    static SplitLocusRecursive splitLocusIntervals1(LinkedList<GenomeLoc> remaining, long idealSplitSize) {
+        final List<GenomeLoc> split = new ArrayList<GenomeLoc>();
+        long size = 0;
+
+        while ( ! remaining.isEmpty() ) {
+            GenomeLoc head = remaining.pop();
+            final long newSize = size + head.size();
+
+            if ( newSize == idealSplitSize ) {
+                split.add(head);
+                break; // we are done
+            } else if ( newSize > idealSplitSize ) {
+                final long remainingBp = idealSplitSize - size;
+                final long cutPoint = head.getStart() + remainingBp;
+                GenomeLoc[] parts = head.split((int)cutPoint);
+                remaining.push(parts[1]);
+                remaining.push(parts[0]);
+                // when we go around, head.size' = idealSplitSize - size
+                // so newSize' = splitSize + head.size' = size + (idealSplitSize - size) = idealSplitSize
+            } else {
+                split.add(head);
+                size = newSize;
+            }
+        }
+
+        return new SplitLocusRecursive(split, remaining);
+    }
+
+    /**
+     * Setup the intervals to be processed
+     */
+    public static GenomeLocSortedSet parseIntervalBindings(
+            final ReferenceSequenceFile referenceSequenceFile,
+            final List<IntervalBinding<Feature>> intervals,
+            final IntervalSetRule intervalSetRule, final IntervalMergingRule intervalMergingRule, final int intervalPadding,
+            final List<IntervalBinding<Feature>> excludeIntervals) {
+
+        Pair<GenomeLocSortedSet, GenomeLocSortedSet> includeExcludePair = parseIntervalBindingsPair(
+                referenceSequenceFile, intervals, intervalSetRule, intervalMergingRule, intervalPadding, excludeIntervals);
+
+        GenomeLocSortedSet includeSortedSet = includeExcludePair.getFirst();
+        GenomeLocSortedSet excludeSortedSet = includeExcludePair.getSecond();
+
+        if (excludeSortedSet != null) {
+            return includeSortedSet.subtractRegions(excludeSortedSet);
+        } else {
+            return includeSortedSet;
+        }
+    }
+
+    public static GenomeLocSortedSet parseIntervalArguments(final ReferenceSequenceFile referenceSequenceFile, IntervalArgumentCollection argCollection) {
+        GenomeLocSortedSet intervals = null;
+
+        // return if no interval arguments at all
+        if ( argCollection.intervals == null && argCollection.excludeIntervals == null )
+            return intervals;
+
+        // Note that the use of '-L all' is no longer supported.
+
+        // if include argument isn't given, create new set of all possible intervals
+
+        final Pair<GenomeLocSortedSet, GenomeLocSortedSet> includeExcludePair = IntervalUtils.parseIntervalBindingsPair(
+                referenceSequenceFile,
+                argCollection.intervals,
+                argCollection.intervalSetRule, argCollection.intervalMerging, argCollection.intervalPadding,
+                argCollection.excludeIntervals);
+
+        final GenomeLocSortedSet includeSortedSet = includeExcludePair.getFirst();
+        final GenomeLocSortedSet excludeSortedSet = includeExcludePair.getSecond();
+
+        // if no exclude arguments, can return parseIntervalArguments directly
+        if ( excludeSortedSet == null )
+            intervals = includeSortedSet;
+
+            // otherwise there are exclude arguments => must merge include and exclude GenomeLocSortedSets
+        else {
+            intervals = includeSortedSet.subtractRegions(excludeSortedSet);
+
+            // logging messages only printed when exclude (-XL) arguments are given
+            final long toPruneSize = includeSortedSet.coveredSize();
+            final long toExcludeSize = excludeSortedSet.coveredSize();
+            final long intervalSize = intervals.coveredSize();
+            logger.info(String.format("Initial include intervals span %d loci; exclude intervals span %d loci", toPruneSize, toExcludeSize));
+            logger.info(String.format("Excluding %d loci from original intervals (%.2f%% reduction)",
+                    toPruneSize - intervalSize, (toPruneSize - intervalSize) / (0.01 * toPruneSize)));
+        }
+
+        logger.info(String.format("Processing %d bp from intervals", intervals.coveredSize()));
+        return intervals;
+    }
+
+    public static Pair<GenomeLocSortedSet, GenomeLocSortedSet> parseIntervalBindingsPair(
+            final ReferenceSequenceFile referenceSequenceFile,
+            final List<IntervalBinding<Feature>> intervals,
+            final IntervalSetRule intervalSetRule, final IntervalMergingRule intervalMergingRule, final int intervalPadding,
+            final List<IntervalBinding<Feature>> excludeIntervals) {
+        GenomeLocParser genomeLocParser = new GenomeLocParser(referenceSequenceFile);
+
+        // if include argument isn't given, create new set of all possible intervals
+        GenomeLocSortedSet includeSortedSet = ((intervals == null || intervals.size() == 0) ?
+                GenomeLocSortedSet.createSetFromSequenceDictionary(referenceSequenceFile.getSequenceDictionary()) :
+                loadIntervals(intervals, intervalSetRule, intervalMergingRule, intervalPadding, genomeLocParser));
+
+        GenomeLocSortedSet excludeSortedSet = null;
+        if (excludeIntervals != null && excludeIntervals.size() > 0) {
+            excludeSortedSet = loadIntervals(excludeIntervals, IntervalSetRule.UNION, intervalMergingRule, 0, genomeLocParser);
+        }
+        return new Pair<GenomeLocSortedSet, GenomeLocSortedSet>(includeSortedSet, excludeSortedSet);
+    }
+
+    public static GenomeLocSortedSet loadIntervals(
+            final List<IntervalBinding<Feature>> intervalBindings,
+            final IntervalSetRule rule, final IntervalMergingRule intervalMergingRule, final int padding,
+            final GenomeLocParser genomeLocParser) {
+        List<GenomeLoc> allIntervals = new ArrayList<GenomeLoc>();
+        for ( IntervalBinding intervalBinding : intervalBindings) {
+            @SuppressWarnings("unchecked")
+            List<GenomeLoc> intervals = intervalBinding.getIntervals(genomeLocParser);
+
+            if ( intervals.isEmpty() ) {
+                logger.warn("The interval file " + intervalBinding.getSource() + " contains no intervals that could be parsed.");
+            }
+
+            if ( padding > 0 ) {
+                intervals = getIntervalsWithFlanks(genomeLocParser, intervals, padding);
+            }
+
+            allIntervals = mergeListsBySetOperator(intervals, allIntervals, rule);
+        }
+
+        return sortAndMergeIntervals(genomeLocParser, allIntervals, intervalMergingRule);
+    }
+
+    private final static class SplitLocusRecursive {
+        final List<GenomeLoc> split;
+        final LinkedList<GenomeLoc> remaining;
+
+        @Requires({"split != null", "remaining != null"})
+        private SplitLocusRecursive(final List<GenomeLoc> split, final LinkedList<GenomeLoc> remaining) {
+            this.split = split;
+            this.remaining = remaining;
+        }
+    }
+
+    public static List<GenomeLoc> flattenSplitIntervals(List<List<GenomeLoc>> splits) {
+        final List<GenomeLoc> locs = new ArrayList<GenomeLoc>();
+        for ( final List<GenomeLoc> split : splits )
+            locs.addAll(split);
+        return locs;
+    }
+
+    private static void addFixedSplit(List<Integer> splitPoints, List<GenomeLoc> locs, long locsSize, int startIndex, int stopIndex, int numParts) {
+        if (numParts < 2)
+            return;
+        int halfParts = (numParts + 1) / 2;
+        Pair<Integer, Long> splitPoint = getFixedSplit(locs, locsSize, startIndex, stopIndex, halfParts, numParts - halfParts);
+        int splitIndex = splitPoint.first;
+        long splitSize = splitPoint.second;
+        splitPoints.add(splitIndex);
+        addFixedSplit(splitPoints, locs, splitSize, startIndex, splitIndex, halfParts);
+        addFixedSplit(splitPoints, locs, locsSize - splitSize, splitIndex, stopIndex, numParts - halfParts);
+    }
+
+    private static Pair<Integer, Long> getFixedSplit(List<GenomeLoc> locs, long locsSize, int startIndex, int stopIndex, int minLocs, int maxLocs) {
+        int splitIndex = startIndex;
+        long splitSize = 0;
+        for (int i = 0; i < minLocs; i++) {
+            splitSize += locs.get(splitIndex).size();
+            splitIndex++;
+        }
+        long halfSize = locsSize / 2;
+        while (splitIndex < (stopIndex - maxLocs) && splitSize < halfSize) {
+            splitSize += locs.get(splitIndex).size();
+            splitIndex++;
+        }
+        return new Pair<Integer, Long>(splitIndex, splitSize);
+    }
+
+    /**
+     * Converts a GenomeLoc to a picard interval.
+     * @param loc The GenomeLoc.
+     * @param locIndex The loc index for use in the file.
+     * @return The picard interval.
+     */
+    private static htsjdk.samtools.util.Interval toInterval(GenomeLoc loc, int locIndex) {
+        return new htsjdk.samtools.util.Interval(loc.getContig(), loc.getStart(), loc.getStop(), false, "interval_" + locIndex);
+    }
+
+    /**
+     * merge a list of genome locs that may be overlapping, returning the list of unique genomic locations
+     *
+     * @param raw the unchecked genome loc list
+     * @param rule the merging rule we're using
+     *
+     * @return the list of merged locations
+     */
+    public static List<GenomeLoc> mergeIntervalLocations(final List<GenomeLoc> raw, IntervalMergingRule rule) {
+        if (raw.size() <= 1)
+            return Collections.unmodifiableList(raw);
+        else {
+            ArrayList<GenomeLoc> merged = new ArrayList<GenomeLoc>();
+            Iterator<GenomeLoc> it = raw.iterator();
+            GenomeLoc prev = it.next();
+            while (it.hasNext()) {
+                GenomeLoc curr = it.next();
+                if (prev.overlapsP(curr)) {
+                    prev = prev.merge(curr);
+                } else if (prev.contiguousP(curr) && (rule == null || rule == IntervalMergingRule.ALL)) {
+                    prev = prev.merge(curr);
+                } else {
+                    merged.add(prev);
+                    prev = curr;
+                }
+            }
+            merged.add(prev);
+            return Collections.unmodifiableList(merged);
+        }
+    }
+
+    public static long intervalSize(final List<GenomeLoc> locs) {
+        long size = 0;
+        for ( final GenomeLoc loc : locs )
+            size += loc.size();
+        return size;
+    }
+
+    public static void writeFlankingIntervals(File reference, File inputIntervals, File flankingIntervals, int basePairs) {
+        final ReferenceSequenceFile referenceSequenceFile = createReference(reference);
+        GenomeLocParser parser = new GenomeLocParser(referenceSequenceFile);
+        List<GenomeLoc> originalList = intervalFileToList(parser, inputIntervals.getAbsolutePath());
+
+        if (originalList.isEmpty())
+            throw new UserException.MalformedFile(inputIntervals, "File contains no intervals");
+
+        List<GenomeLoc> flankingList = getFlankingIntervals(parser, originalList, basePairs);
+
+        if (flankingList.isEmpty())
+            throw new UserException.MalformedFile(inputIntervals, "Unable to produce any flanks for the intervals");
+
+        SAMFileHeader samFileHeader = new SAMFileHeader();
+        samFileHeader.setSequenceDictionary(referenceSequenceFile.getSequenceDictionary());
+        IntervalList intervalList = new IntervalList(samFileHeader);
+        int i = 0;
+        for (GenomeLoc loc: flankingList)
+            intervalList.add(toInterval(loc, ++i));
+        intervalList.write(flankingIntervals);
+    }
+
+    /**
+     * Returns a list of intervals between the passed int locs. Does not extend UNMAPPED locs.
+     * @param parser A genome loc parser for creating the new intervals
+     * @param locs Original genome locs
+     * @param basePairs Number of base pairs on each side of loc
+     * @return The list of intervals between the locs
+     */
+    public static List<GenomeLoc> getFlankingIntervals(final GenomeLocParser parser, final List<GenomeLoc> locs, final int basePairs) {
+        List<GenomeLoc> sorted = sortAndMergeIntervals(parser, locs, IntervalMergingRule.ALL).toList();
+
+        if (sorted.size() == 0)
+            return Collections.emptyList();
+
+        LinkedHashMap<String, List<GenomeLoc>> locsByContig = splitByContig(sorted);
+        List<GenomeLoc> expanded = new ArrayList<GenomeLoc>();
+        for (Map.Entry<String, List<GenomeLoc>> contig: locsByContig.entrySet()) {
+            List<GenomeLoc> contigLocs = contig.getValue();
+            int contigLocsSize = contigLocs.size();
+
+            GenomeLoc startLoc, stopLoc;
+
+            // Create loc at start of the list
+            startLoc = parser.createGenomeLocAtStart(contigLocs.get(0), basePairs);
+            if (startLoc != null)
+                expanded.add(startLoc);
+
+            // Create locs between each loc[i] and loc[i+1]
+            for (int i = 0; i < contigLocsSize - 1; i++) {
+                stopLoc = parser.createGenomeLocAtStop(contigLocs.get(i), basePairs);
+                startLoc = parser.createGenomeLocAtStart(contigLocs.get(i + 1), basePairs);
+                if (stopLoc.getStop() + 1 >= startLoc.getStart()) {
+                    // NOTE: This is different than GenomeLoc.merge()
+                    // merge() returns a loc which covers the entire range of stop and start,
+                    // possibly returning positions inside loc(i) or loc(i+1)
+                    // We want to make sure that the start of the stopLoc is used, and the stop of the startLoc
+                    GenomeLoc merged = parser.createGenomeLoc(
+                            stopLoc.getContig(), stopLoc.getStart(), startLoc.getStop());
+                    expanded.add(merged);
+                } else {
+                    expanded.add(stopLoc);
+                    expanded.add(startLoc);
+                }
+            }
+
+            // Create loc at the end of the list
+            stopLoc = parser.createGenomeLocAtStop(contigLocs.get(contigLocsSize - 1), basePairs);
+            if (stopLoc != null)
+                expanded.add(stopLoc);
+        }
+        return expanded;
+    }
+
+    /**
+     * Returns a list of intervals between the passed int locs. Does not extend UNMAPPED locs.
+     * @param parser A genome loc parser for creating the new intervals
+     * @param locs Original genome locs
+     * @param basePairs Number of base pairs on each side of loc
+     * @return The list of intervals between the locs
+     */
+    public static List<GenomeLoc> getIntervalsWithFlanks(final GenomeLocParser parser, final List<GenomeLoc> locs, final int basePairs) {
+
+        if (locs.size() == 0)
+            return Collections.emptyList();
+
+        final List<GenomeLoc> expanded = new ArrayList<GenomeLoc>();
+        for ( final GenomeLoc loc : locs ) {
+            expanded.add(parser.createPaddedGenomeLoc(loc, basePairs));
+        }
+
+        return sortAndMergeIntervals(parser, expanded, IntervalMergingRule.ALL).toList();
+    }
+
+    private static ReferenceSequenceFile createReference(final File fastaFile) {
+        return CachingIndexedFastaSequenceFile.checkAndCreate(fastaFile);
+    }
+
+    private static LinkedHashMap<String, List<GenomeLoc>> splitByContig(List<GenomeLoc> sorted) {
+        LinkedHashMap<String, List<GenomeLoc>> splits = new LinkedHashMap<String, List<GenomeLoc>>();
+        GenomeLoc last = null;
+        List<GenomeLoc> contigLocs = null;
+        for (GenomeLoc loc: sorted) {
+            if (GenomeLoc.isUnmapped(loc))
+                continue;
+            if (last == null || !last.onSameContig(loc)) {
+                contigLocs = new ArrayList<GenomeLoc>();
+                splits.put(loc.getContig(), contigLocs);
+            }
+            contigLocs.add(loc);
+            last = loc;
+        }
+        return splits;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/io/FileExtension.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/io/FileExtension.java
new file mode 100644
index 0000000..22ec9fe
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/io/FileExtension.java
@@ -0,0 +1,37 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.io;
+
+import java.io.File;
+
+public interface FileExtension {
+    /**
+     * Returns a clone of the FileExtension with a new path.
+     * @param path New path.
+     * @return New FileExtension
+     */
+    public File withPath(String path);
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/io/HardThresholdingOutputStream.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/io/HardThresholdingOutputStream.java
new file mode 100644
index 0000000..1a38ea0
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/io/HardThresholdingOutputStream.java
@@ -0,0 +1,56 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.io;
+
+import org.apache.commons.io.output.ThresholdingOutputStream;
+
+import java.io.IOException;
+
+/**
+ * An output stream which stops at the threshold
+ * instead of potentially triggering early.
+ */
+public abstract class HardThresholdingOutputStream extends ThresholdingOutputStream {
+    protected HardThresholdingOutputStream(int threshold) {
+        super(threshold);
+    }
+
+    @Override
+    public void write(byte[] b) throws IOException {
+        write(b, 0, b.length);
+    }
+
+    @Override
+    public void write(byte[] b, int off, int len) throws IOException {
+        int remaining = this.getThreshold() - (int)this.getByteCount();
+        if (!isThresholdExceeded() && len > remaining) {
+            super.write(b, off, remaining);
+            super.write(b, off + remaining, len - remaining);
+        } else {
+            super.write(b, off, len);
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/io/IOUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/io/IOUtils.java
new file mode 100644
index 0000000..98e1e62
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/io/IOUtils.java
@@ -0,0 +1,575 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.io;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.FilenameUtils;
+import org.apache.commons.io.LineIterator;
+import org.apache.commons.lang.StringUtils;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.GATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+import java.io.*;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.util.*;
+
+public class IOUtils {
+    private static Logger logger = Logger.getLogger(IOUtils.class);
+    private static final File DEV_DIR = new File("/dev");
+
+    /**
+     * Checks if the temp directory has been setup and throws an exception if they user hasn't set it correctly.
+     *
+     * @param tempDir Temporary directory.
+     */
+    public static void checkTempDir(File tempDir) {
+        if (isDefaultTempDir(tempDir))
+            throw new UserException.BadTmpDir("java.io.tmpdir must be explicitly set");
+        if (!tempDir.exists() && !tempDir.mkdirs())
+            throw new UserException.BadTmpDir("Could not create directory: " + tempDir.getAbsolutePath());
+    }
+
+    /**
+     * Returns true if the directory is a default temporary directory.
+     * @param tempDir the directory to check.
+     * @return true if the directory is a default temporary directory.
+     */
+    public static boolean isDefaultTempDir(File tempDir) {
+        String tempDirPath = tempDir.getAbsolutePath();
+        // Keeps the user from leaving the temp directory as the default, and on Macs from having pluses
+        // in the path which can cause problems with the Google Reflections library.
+        // see also: http://benjchristensen.com/2009/09/22/mac-osx-10-6-java-java-io-tmpdir/
+        return (tempDirPath.startsWith("/var/folders/") || (tempDirPath.equals("/tmp")) || (tempDirPath.equals("/tmp/")));
+    }
+
+    /**
+     * Creates a temp directory with the prefix and optional suffix.
+     *
+     * @param prefix       Prefix for the directory name.
+     * @param suffix       Optional suffix for the directory name.
+     * @return The created temporary directory.
+     */
+    public static File tempDir(String prefix, String suffix) {
+        return tempDir(prefix, suffix, null);
+    }
+
+    /**
+     * Creates a temp directory with the prefix and optional suffix.
+     *
+     * @param prefix        Prefix for the directory name.
+     * @param suffix        Optional suffix for the directory name.
+     * @param tempDirParent Parent directory for the temp directory.
+     * @return The created temporary directory.
+     */
+    public static File tempDir(String prefix, String suffix, File tempDirParent) {
+        try {
+            if (tempDirParent == null)
+                tempDirParent = FileUtils.getTempDirectory();
+            if (!tempDirParent.exists() && !tempDirParent.mkdirs())
+                throw new UserException.BadTmpDir("Could not create temp directory: " + tempDirParent);
+            File temp = File.createTempFile(prefix, suffix, tempDirParent);
+            if (!temp.delete())
+                throw new UserException.BadTmpDir("Could not delete sub file: " + temp.getAbsolutePath());
+            if (!temp.mkdir())
+                throw new UserException.BadTmpDir("Could not create sub directory: " + temp.getAbsolutePath());
+            return absolute(temp);
+        } catch (IOException e) {
+            throw new UserException.BadTmpDir(e.getMessage());
+        }
+    }
+
+    /**
+     * Writes content to a temp file and returns the path to the temporary file.
+     *
+     * @param content   to write.
+     * @param prefix    Prefix for the temp file.
+     * @param suffix    Suffix for the temp file.
+     * @return the path to the temp file.
+     */
+    public static File writeTempFile(String content, String prefix, String suffix) {
+        return writeTempFile(content, prefix, suffix, null);
+    }
+
+    /**
+     * Writes content to a temp file and returns the path to the temporary file.
+     *
+     * @param content   to write.
+     * @param prefix    Prefix for the temp file.
+     * @param suffix    Suffix for the temp file.
+     * @param directory Directory for the temp file.
+     * @return the path to the temp file.
+     */
+    public static File writeTempFile(String content, String prefix, String suffix, File directory) {
+        try {
+            File tempFile = absolute(File.createTempFile(prefix, suffix, directory));
+            FileUtils.writeStringToFile(tempFile, content);
+            return tempFile;
+        } catch (IOException e) {
+            throw new UserException.BadTmpDir(e.getMessage());
+        }
+    }
+
+    /**
+     * Waits for NFS to propagate a file creation, imposing a timeout.
+     *
+     * Based on Apache Commons IO FileUtils.waitFor()
+     *
+     * @param file    The file to wait for.
+     * @param seconds The maximum time in seconds to wait.
+     * @return true if the file exists
+     */
+    public static boolean waitFor(File file, int seconds) {
+        return waitFor(Collections.singletonList(file), seconds).isEmpty();
+    }
+
+    /**
+     * Waits for NFS to propagate a file creation, imposing a timeout.
+     *
+     * Based on Apache Commons IO FileUtils.waitFor()
+     *
+     * @param files   The list of files to wait for.
+     * @param seconds The maximum time in seconds to wait.
+     * @return Files that still do not exists at the end of the timeout, or a empty list if all files exists.
+     */
+    public static List<File> waitFor(Collection<File> files, int seconds) {
+        long timeout = 0;
+        long tick = 0;
+        List<File> missingFiles = new ArrayList<File>();
+        for (File file : files)
+            if (!file.exists())
+                missingFiles.add(file);
+
+        while (!missingFiles.isEmpty() && timeout <= seconds) {
+            if (tick >= 10) {
+                tick = 0;
+                timeout++;
+            }
+            tick++;
+            try {
+                Thread.sleep(100);
+            } catch (InterruptedException ignore) {
+            }
+            List<File> newMissingFiles = new ArrayList<File>();
+            for (File file : missingFiles)
+                if (!file.exists())
+                    newMissingFiles.add(file);
+            missingFiles = newMissingFiles;
+        }
+        return missingFiles;
+    }
+
+    /**
+     * Returns the directory at the number of levels deep.
+     * For example 2 levels of /path/to/dir will return /path/to
+     *
+     * @param dir   Directory path.
+     * @param level how many levels deep from the root.
+     * @return The path to the parent directory that is level-levels deep.
+     */
+    public static File dirLevel(File dir, int level) {
+        List<File> directories = new ArrayList<File>();
+        File parentDir = absolute(dir);
+        while (parentDir != null) {
+            directories.add(0, parentDir);
+            parentDir = parentDir.getParentFile();
+        }
+        if (directories.size() <= level)
+            return directories.get(directories.size() - 1);
+        else
+            return directories.get(level);
+    }
+
+    /**
+     * Returns the sub path rooted at the parent.
+     *
+     * @param parent The parent directory.
+     * @param path   The sub path to append to the parent, if the path is not absolute.
+     * @return The absolute path to the file in the parent dir if the path was not absolute, otherwise the original path.
+     */
+    public static File absolute(File parent, String path) {
+        return absolute(parent, new File(path));
+    }
+
+    /**
+     * Returns the sub path rooted at the parent.
+     *
+     * @param parent The parent directory.
+     * @param file   The sub path to append to the parent, if the path is not absolute.
+     * @return The absolute path to the file in the parent dir if the path was not absolute, otherwise the original path.
+     */
+    public static File absolute(File parent, File file) {
+        String newPath;
+        if (file.isAbsolute())
+            newPath = absolutePath(file);
+        else
+            newPath = absolutePath(new File(parent, file.getPath()));
+        return replacePath(file, newPath);
+    }
+
+    /**
+     * A mix of getCanonicalFile and getAbsoluteFile that returns the
+     * absolute path to the file without deferencing symbolic links.
+     *
+     * @param file the file.
+     * @return the absolute path to the file.
+     */
+    public static File absolute(File file) {
+        return replacePath(file, absolutePath(file));
+    }
+
+    private static String absolutePath(File file) {
+        File fileAbs = file.getAbsoluteFile();
+        LinkedList<String> names = new LinkedList<String>();
+        while (fileAbs != null) {
+            String name = fileAbs.getName();
+            fileAbs = fileAbs.getParentFile();
+
+            if (".".equals(name)) {
+                /* skip */
+
+                /* TODO: What do we do for ".."?
+              } else if (name == "..") {
+
+                CentOS tcsh says use getCanonicalFile:
+                ~ $ mkdir -p test1/test2
+                ~ $ ln -s test1/test2 test3
+                ~ $ cd test3/..
+                ~/test1 $
+
+                Mac bash says keep going with getAbsoluteFile:
+                ~ $ mkdir -p test1/test2
+                ~ $ ln -s test1/test2 test3
+                ~ $ cd test3/..
+                ~ $
+
+                For now, leave it and let the shell figure it out.
+                */
+            } else {
+                names.add(0, name);
+            }
+        }
+
+        return ("/" + StringUtils.join(names, "/"));
+    }
+
+    private static File replacePath(File file, String path) {
+        if (file instanceof FileExtension)
+            return ((FileExtension)file).withPath(path);
+        if (!File.class.equals(file.getClass()))
+            throw new GATKException("Sub classes of java.io.File must also implement FileExtension");
+        return new File(path);
+    }
+
+    /**
+     * Returns the last lines of the file.
+     * NOTE: This is only safe to run on smaller files!
+     *
+     * @param file  File to read.
+     * @param count Maximum number of lines to return.
+     * @return The last count lines from file.
+     * @throws IOException When unable to read the file.
+     */
+    public static List<String> tail(File file, int count) throws IOException {
+        LinkedList<String> tailLines = new LinkedList<String>();
+        FileReader reader = new FileReader(file);
+        try {
+            LineIterator iterator = org.apache.commons.io.IOUtils.lineIterator(reader);
+            int lineCount = 0;
+            while (iterator.hasNext()) {
+                String line = iterator.nextLine();
+                lineCount++;
+                if (lineCount > count)
+                    tailLines.removeFirst();
+                tailLines.offer(line);
+            }
+        } finally {
+            org.apache.commons.io.IOUtils.closeQuietly(reader);
+        }
+        return tailLines;
+    }
+
+    /**
+     * Tries to delete a file. Emits a warning if the file
+     * is not a special file and was unable to be deleted.
+     *
+     * @param file File to delete.
+     * @return true if the file was deleted.
+     */
+    public static boolean tryDelete(File file) {
+        if (isSpecialFile(file)) {
+            logger.debug("Not trying to delete " + file);
+            return false;
+        }
+        boolean deleted = FileUtils.deleteQuietly(file);
+        if (deleted)
+            logger.debug("Deleted " + file);
+        else if (file.exists())
+            logger.warn("Unable to delete " + file);
+        return deleted;
+    }
+
+    /**
+     * Writes the an embedded resource to a temp file.
+     * File is not scheduled for deletion and must be cleaned up by the caller.
+     * @param resource Embedded resource.
+     * @return Path to the temp file with the contents of the resource.
+     */
+    public static File writeTempResource(Resource resource) {
+        File temp;
+        try {
+            temp = File.createTempFile(FilenameUtils.getBaseName(resource.getPath()) + ".", "." + FilenameUtils.getExtension(resource.getPath()));
+        } catch (IOException e) {
+            throw new UserException.BadTmpDir(e.getMessage());
+        }
+        writeResource(resource, temp);
+        return temp;
+    }
+
+    /**
+     * Writes the an embedded resource to a file.
+     * File is not scheduled for deletion and must be cleaned up by the caller.
+     * @param resource Embedded resource.
+     * @param file File path to write.
+     */
+    public static void writeResource(Resource resource, File file) {
+        String path = resource.getPath();
+        InputStream inputStream = resource.getResourceContentsAsStream();
+        OutputStream outputStream = null;
+        try {
+            outputStream = FileUtils.openOutputStream(file);
+            org.apache.commons.io.IOUtils.copy(inputStream, outputStream);
+        } catch (IOException e) {
+            throw new GATKException(String.format("Unable to copy resource '%s' to '%s'", path, file), e);
+        } finally {
+            org.apache.commons.io.IOUtils.closeQuietly(inputStream);
+            org.apache.commons.io.IOUtils.closeQuietly(outputStream);
+        }
+    }
+
+    /**
+     * Returns a file throwing a UserException if the file cannot be read.
+     * @param path File path
+     * @return LineIterator
+     */
+    public static LineIterator lineIterator(String path) {
+        return lineIterator(new File(path));
+    }
+
+    /**
+     * Returns a file throwing a UserException if the file cannot be read.
+     * @param file File
+     * @return LineIterator
+     */
+    public static LineIterator lineIterator(File file) {
+        try {
+            return FileUtils.lineIterator(file);
+        } catch (IOException e) {
+            throw new UserException.CouldNotReadInputFile(file, e);
+        }
+
+    }
+
+    /**
+     * Returns true if the file is a special file.
+     * @param file File path to check.
+     * @return true if the file is a special file.
+     */
+    public static boolean isSpecialFile(File file) {
+        return file != null && (file.getAbsolutePath().startsWith("/dev/") || file.equals(DEV_DIR));
+    }
+
+    /**
+     * Reads the entirety of the given file into a byte array. Uses a read buffer size of 4096 bytes.
+     *
+     * @param source File to read
+     * @return The contents of the file as a byte array
+     */
+    public static byte[] readFileIntoByteArray ( File source ) {
+        return readFileIntoByteArray(source, 4096);
+    }
+
+    /**
+     * Reads the entirety of the given file into a byte array using the requested read buffer size.
+     *
+     * @param source File to read
+     * @param readBufferSize Number of bytes to read in at one time
+     * @return The contents of the file as a byte array
+     */
+    public static byte[] readFileIntoByteArray ( File source, int readBufferSize ) {
+        if ( source == null ) {
+            throw new ReviewedGATKException("Source file was null");
+        }
+
+        byte[] fileContents;
+
+        try {
+            fileContents = readStreamIntoByteArray(new FileInputStream(source), readBufferSize);
+        }
+        catch ( FileNotFoundException e ) {
+            throw new UserException.CouldNotReadInputFile(source, e);
+        }
+
+        if ( fileContents.length != source.length() ) {
+            throw new UserException.CouldNotReadInputFile(String.format("Unable to completely read file %s: read only %d/%d bytes",
+                                                          source.getAbsolutePath(), fileContents.length, source.length()));
+        }
+
+        return fileContents;
+    }
+
+    /**
+     * Reads all data from the given stream into a byte array. Uses a read buffer size of 4096 bytes.
+     *
+     * @param in Stream to read data from
+     * @return The contents of the stream as a byte array
+     */
+    public static byte[] readStreamIntoByteArray ( InputStream in ) {
+        return readStreamIntoByteArray(in, 4096);
+    }
+
+    /**
+     * Reads all data from the given stream into a byte array using the requested read buffer size.
+     *
+     * @param in Stream to read data from
+     * @param readBufferSize Number of bytes to read in at one time
+     * @return The contents of the stream as a byte array
+     */
+    public static byte[] readStreamIntoByteArray ( InputStream in, int readBufferSize ) {
+        if ( in == null ) {
+            throw new ReviewedGATKException("Input stream was null");
+        }
+        else if ( readBufferSize <= 0 ) {
+            throw new ReviewedGATKException("Read buffer size must be > 0");
+        }
+
+        // Use a fixed-size buffer for each read, but a dynamically-growing buffer
+        // to hold the accumulated contents of the file/stream:
+        byte[] readBuffer = new byte[readBufferSize];
+        ByteArrayOutputStream fileBuffer = new ByteArrayOutputStream(readBufferSize * 4);
+
+        try {
+            try {
+                int currentBytesRead;
+
+                while ( (currentBytesRead = in.read(readBuffer, 0, readBuffer.length)) >= 0 ) {
+                    fileBuffer.write(readBuffer, 0, currentBytesRead);
+                }
+            }
+            finally {
+                in.close();
+            }
+        }
+        catch ( IOException e ) {
+            throw new UserException.CouldNotReadInputFile("I/O error reading from input stream", e);
+        }
+
+        return fileBuffer.toByteArray();
+    }
+
+    /**
+     * Writes the given array of bytes to a file
+     *
+     * @param bytes Data to write
+     * @param destination File to write the data to
+     */
+    public static void writeByteArrayToFile ( byte[] bytes, File destination ) {
+        if ( destination == null ) {
+            throw new ReviewedGATKException("Destination file was null");
+        }
+
+        try {
+            writeByteArrayToStream(bytes, new FileOutputStream(destination));
+        }
+        catch ( FileNotFoundException e ) {
+            throw new UserException.CouldNotCreateOutputFile(destination, e);
+        }
+    }
+
+    /**
+     * Writes the given array of bytes to a stream
+     *
+     * @param bytes Data to write
+     * @param out Stream to write the data to
+     */
+    public static void writeByteArrayToStream ( byte[] bytes, OutputStream out ) {
+        if ( bytes == null || out == null ) {
+            throw new ReviewedGATKException("Data to write or output stream was null");
+        }
+
+        try {
+            try {
+                out.write(bytes);
+            }
+            finally {
+                out.close();
+            }
+        }
+        catch ( IOException e ) {
+            throw new UserException.CouldNotCreateOutputFile("I/O error writing to output stream", e);
+        }
+    }
+
+    /**
+     * Determines the uncompressed size of a GZIP file. Uses the GZIP ISIZE field in the last
+     * 4 bytes of the file to get this information.
+     *
+     * @param gzipFile GZIP-format file whose uncompressed size to determine
+     * @return The uncompressed size (in bytes) of the GZIP file
+     */
+    public static int getGZIPFileUncompressedSize ( File gzipFile ) {
+        if ( gzipFile == null ) {
+            throw new ReviewedGATKException("GZIP file to examine was null");
+        }
+
+        try {
+            // The GZIP ISIZE field holds the uncompressed size of the compressed data.
+            // It occupies the last 4 bytes of any GZIP file:
+            RandomAccessFile in = new RandomAccessFile(gzipFile, "r");
+            in.seek(gzipFile.length() - 4);
+            byte[] sizeBytes = new byte[4];
+            in.read(sizeBytes, 0, 4);
+
+            ByteBuffer byteBuf = ByteBuffer.wrap(sizeBytes);
+            byteBuf.order(ByteOrder.LITTLE_ENDIAN);   // The GZIP spec mandates little-endian byte order
+            int uncompressedSize = byteBuf.getInt();
+
+            // If the size read in is negative, we've overflowed our signed integer:
+            if ( uncompressedSize < 0 ) {
+                throw new UserException.CouldNotReadInputFile(String.format("Cannot accurately determine the uncompressed size of file %s " +
+                                                               "because it's either larger than %d bytes or the GZIP ISIZE field is corrupt",
+                                                               gzipFile.getAbsolutePath(), Integer.MAX_VALUE));
+            }
+
+            return uncompressedSize;
+        }
+        catch ( IOException e ) {
+            throw new UserException.CouldNotReadInputFile(gzipFile, e);
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/io/ReferenceBacked.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/io/ReferenceBacked.java
new file mode 100644
index 0000000..bb5e80b
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/io/ReferenceBacked.java
@@ -0,0 +1,33 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.io;
+
+import java.io.File;
+
+public interface ReferenceBacked {
+    public File getReferenceFile();
+    public void setReferenceFile(final File reference);
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/io/Resource.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/io/Resource.java
new file mode 100644
index 0000000..9b8682a
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/io/Resource.java
@@ -0,0 +1,142 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.io;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.SequenceInputStream;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Enumeration;
+import java.util.List;
+
+/**
+ * Stores a resource by path and a relative class.
+ */
+public class Resource {
+    private final String path;
+    private final Class<?> relativeClass;
+    private final ClassLoader relativeClassLoader;
+
+    /**
+     * Create a resource with a path and a relative class.
+     * @param path Relative or absolute path to the class.
+     * @param relativeClass Relative class to use as a class loader and for a relative package.
+     *
+     * If the relative class is null then the system classloader will be used and the path must be absolute.
+     */
+    public Resource(String path, Class<?> relativeClass) {
+        this.path = path;
+        this.relativeClass = relativeClass;
+        ClassLoader classLoader = null;
+        if (relativeClass != null)
+            classLoader = relativeClass.getClassLoader();
+        this.relativeClassLoader = classLoader != null ? classLoader : ClassLoader.getSystemClassLoader();
+    }
+
+    public Class<?> getRelativeClass() {
+        return relativeClass;
+    }
+
+    public ClassLoader getRelativeClassLoader() {
+        return relativeClassLoader;
+    }
+
+    public String getPath() {
+        return path;
+    }
+
+    public String getFullPath() {
+        if (relativeClass == null)
+            return path;
+        if (new File(path).isAbsolute())
+            return path;
+        return String.format("%s%s%s",
+                relativeClass.getPackage().getName().replace('.', File.separatorChar),
+                File.separator,
+                path);
+    }
+
+    /**
+     * Get the contents of this resource as an InputStream
+     * @throws IllegalArgumentException if resource cannot be read
+     * @return an input stream that will read the contents of this resource
+     */
+    public InputStream getResourceContentsAsStream() {
+        final Class<?> clazz = getRelativeClass();
+
+        final InputStream inputStream;
+        if (clazz == null) {
+            inputStream = ClassLoader.getSystemResourceAsStream(path);
+            if (inputStream == null)
+                throw new IllegalArgumentException("Resource not found: " + path);
+        } else {
+            inputStream = clazz.getResourceAsStream(path);
+            if (inputStream == null)
+                throw new IllegalArgumentException("Resource not found relative to " + clazz + ": " + path);
+
+        }
+
+        return inputStream;
+    }
+
+    /**
+     * Get the contents of this resource as an InputStream
+     * @throws IllegalArgumentException if resource cannot be read
+     * @return an input stream that will read the contents of these resources
+     */
+    public List<InputStream> getAllResourcesContentsAsStreams() {
+        final List<InputStream> resourceStreams = new ArrayList<InputStream>();
+        try {
+            final Enumeration<URL> resources = getRelativeClassLoader().getResources(path);
+            while (resources.hasMoreElements()) {
+                try {
+                    resourceStreams.add(resources.nextElement().openStream());
+                } catch (IOException ignored) {
+                    /* skip exceptions, just like ClassLoader.getSystemResourceAsStream() */
+                }
+            }
+        } catch (IOException ignoredAlso) {
+            /* skip exceptions, just like ClassLoader.getSystemResourceAsStream() */
+        }
+        if (resourceStreams.isEmpty()) {
+            throw new IllegalArgumentException("Resource not found: " + path);
+        }
+        return resourceStreams;
+    }
+
+    /**
+     * Get the contents of this resource as an InputStream
+     * @throws IllegalArgumentException if resource cannot be read
+     * @return an input stream that will read the contents of these resources
+     */
+    public InputStream getAllResourcesContentsAsStream() {
+        final List<InputStream> resourceStreams = getAllResourcesContentsAsStreams();
+        return new SequenceInputStream(Collections.enumeration(resourceStreams));
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/iterators/GATKSAMIterator.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/iterators/GATKSAMIterator.java
new file mode 100644
index 0000000..61d8b91
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/iterators/GATKSAMIterator.java
@@ -0,0 +1,56 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.iterators;
+
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.util.CloseableIterator;
+/**
+ *
+ * User: aaron
+ * Date: May 6, 2009
+ * Time: 5:30:41 PM
+ *
+ * The Broad Institute
+ * SOFTWARE COPYRIGHT NOTICE AGREEMENT 
+ * This software and its documentation are copyright 2009 by the
+ * Broad Institute/Massachusetts Institute of Technology. All rights are reserved.
+ *
+ * This software is supplied without any warranty or guaranteed support whatsoever. Neither
+ * the Broad Institute nor MIT can be responsible for its use, misuse, or functionality.
+ *
+ */
+
+/**
+ * @author aaron
+ * @version 1.0
+ * @date May 6, 2009
+ * <p/>
+ * Interface GATKSAMIterator
+ * <p/>
+ * This is the standard interface for all iterators in the GATK package that iterate over SAMRecords
+ */
+public interface GATKSAMIterator extends CloseableIterator<SAMRecord>, Iterable<SAMRecord> {
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/iterators/GATKSAMIteratorAdapter.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/iterators/GATKSAMIteratorAdapter.java
new file mode 100644
index 0000000..82aa866
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/iterators/GATKSAMIteratorAdapter.java
@@ -0,0 +1,136 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.iterators;
+
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.util.CloseableIterator;
+
+import java.util.Iterator;
+
+/**
+ *
+ * User: aaron
+ * Date: May 13, 2009
+ * Time: 6:33:15 PM
+ *
+ * The Broad Institute
+ * SOFTWARE COPYRIGHT NOTICE AGREEMENT 
+ * This software and its documentation are copyright 2009 by the
+ * Broad Institute/Massachusetts Institute of Technology. All rights are reserved.
+ *
+ * This software is supplied without any warranty or guaranteed support whatsoever. Neither
+ * the Broad Institute nor MIT can be responsible for its use, misuse, or functionality.
+ *
+ */
+
+
+/**
+ * @author aaron
+ * @version 1.0
+ * @date May 13, 2009
+ * <p/>
+ * Class GATKSAMIteratorAdapter
+ * <p/>
+ * This class adapts other SAMRecord iterators to the GATKSAMIterator
+ */
+public class GATKSAMIteratorAdapter {
+
+    public static GATKSAMIterator adapt(Iterator<SAMRecord> iter) {
+        return new PrivateStringSAMIterator(iter);
+    }
+
+    public static GATKSAMIterator adapt(CloseableIterator<SAMRecord> iter) {
+        return new PrivateStringSAMCloseableIterator(iter);
+    }
+
+}
+
+
+/**
+ * this class wraps iterators<SAMRecord> in a GATKSAMIterator, which means just adding the
+ * methods that implement the iterable<> interface and the close() method from CloseableIterator
+ */
+class PrivateStringSAMIterator implements GATKSAMIterator {
+    private Iterator<SAMRecord> iter = null;
+
+    PrivateStringSAMIterator(Iterator<SAMRecord> iter) {
+        this.iter = iter;
+    }
+
+    public void close() {
+        // do nothing, we can't close the iterator anyway.
+    }
+
+    public boolean hasNext() {
+        return iter.hasNext();
+    }
+
+    public SAMRecord next() {
+        return iter.next();
+    }
+
+    public void remove() {
+        throw new UnsupportedOperationException("GATKSAMIterator's don't allow remove()ing");
+    }
+
+    public Iterator<SAMRecord> iterator() {
+        return iter;
+    }
+}
+
+
+/**
+ * this class wraps closeable iterators<SAMRecord> in a GATKSAMIterator, which means adding the
+ * methods that implement the iterable<> interface.
+ */
+class PrivateStringSAMCloseableIterator implements GATKSAMIterator {
+    private CloseableIterator<SAMRecord> iter = null;
+
+    PrivateStringSAMCloseableIterator(CloseableIterator<SAMRecord> iter) {
+        this.iter = iter;
+    }
+
+    public void close() {
+        iter.close();
+    }
+
+    public boolean hasNext() {
+        return iter.hasNext();
+    }
+
+    public SAMRecord next() {
+        return iter.next();
+    }
+
+    public void remove() {
+        throw new UnsupportedOperationException("GATKSAMIterator's don't allow remove()ing");
+    }
+
+    public Iterator<SAMRecord> iterator() {
+        return iter;
+    }
+}
+
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/iterators/PushbackIterator.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/iterators/PushbackIterator.java
new file mode 100644
index 0000000..836a149
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/iterators/PushbackIterator.java
@@ -0,0 +1,82 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.iterators;
+
+import java.util.Iterator;
+
+public class PushbackIterator<T> implements Iterator<T>, Iterable<T> {
+    Iterator<T> underlyingIterator;
+    T pushedElement = null;
+
+    public PushbackIterator(final Iterator<T> underlyingIterator) {
+        this.underlyingIterator = underlyingIterator;
+    }
+
+    public boolean hasNext() {
+        return pushedElement != null || underlyingIterator.hasNext();
+    }
+
+    public Iterator<T> iterator() {
+        return this;
+    }
+
+    /**
+     * Retrieves, but does not remove, the head of this iterator.
+     * @return T the next element in the iterator
+     */
+    public T element() {
+        T x = next();
+        pushback(x);
+        return x;
+    }
+
+    /**
+     * @return the next element in the iteration.
+     */
+    public T next() {
+        if (pushedElement != null) {
+            final T ret = pushedElement;
+            pushedElement = null;
+            return ret;
+        } else {
+            return underlyingIterator.next();
+        }
+    }
+
+    public void pushback(T elt) {
+        assert(pushedElement == null);
+        
+        pushedElement = elt;
+    }
+
+    public void remove() {
+        throw new UnsupportedOperationException();
+    }
+
+    public Iterator<T> getUnderlyingIterator() {
+        return underlyingIterator;
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/jna/clibrary/JNAUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/jna/clibrary/JNAUtils.java
new file mode 100644
index 0000000..cebf5e4
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/jna/clibrary/JNAUtils.java
@@ -0,0 +1,59 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.jna.clibrary;
+
+import com.sun.jna.Platform;
+
+/**
+ * Collection of functions that are in the standard CLibrary but are associated with different headers on different platforms.
+ */
+public class JNAUtils {
+    /**
+     * Defined in different places on different systems, this is currently 256 on mac and 64 everywhere else.
+     */
+    public static final int MAXHOSTNAMELEN;
+
+    /**
+     * Maximum path length.
+     */
+    public static final int MAXPATHLEN = 1024;
+
+    static {
+      int maxhostnamelen = 64;
+      if (Platform.isMac())
+         maxhostnamelen = 256;
+      MAXHOSTNAMELEN = maxhostnamelen;
+    }
+
+    /**
+     * Converts a non-zero int to true, otherwise false.
+     * @param val int to check.
+     * @return true if val is non-zero.
+     */
+    public static boolean toBoolean(int val) {
+        return val != 0;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/jna/clibrary/LibC.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/jna/clibrary/LibC.java
new file mode 100644
index 0000000..91baead
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/jna/clibrary/LibC.java
@@ -0,0 +1,200 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.jna.clibrary;
+
+import com.sun.jna.LastErrorException;
+import com.sun.jna.Native;
+import com.sun.jna.NativeLong;
+import com.sun.jna.Structure;
+import com.sun.jna.ptr.NativeLongByReference;
+
+/**
+ * Sparse port of the Standard C Library libc -lc.
+ */
+ at SuppressWarnings("unused")
+public class LibC {
+
+    static {
+        Native.register("c");
+    }
+
+    /** Operation not permitted */
+    public static final int EPERM = 1;
+
+    /** No such file or directory */
+    public static final int ENOENT = 2;
+
+    /** No such process */
+    public static final int ESRCH = 3;
+
+    /** Interrupted system call */
+    public static final int EINTR = 4;
+
+    /** I/O error */
+    public static final int EIO = 5;
+
+    /** No such device or address */
+    public static final int ENXIO = 6;
+
+    /** Argument list too long */
+    public static final int E2BIG = 7;
+
+    /** Exec format error */
+    public static final int ENOEXEC = 8;
+
+    /** Bad file number */
+    public static final int EBADF = 9;
+
+    /** No child processes */
+    public static final int ECHILD = 10;
+
+    /** Try again */
+    public static final int EAGAIN = 11;
+
+    /** Out of memory */
+    public static final int ENOMEM = 12;
+
+    /** Permission denied */
+    public static final int EACCES = 13;
+
+    /** Bad address */
+    public static final int EFAULT = 14;
+
+    /** Block device required */
+    public static final int ENOTBLK = 15;
+
+    /** Device or resource busy */
+    public static final int EBUSY = 16;
+
+    /** File exists */
+    public static final int EEXIST = 17;
+
+    /** Cross-device link */
+    public static final int EXDEV = 18;
+
+    /** No such device */
+    public static final int ENODEV = 19;
+
+    /** Not a directory */
+    public static final int ENOTDIR = 20;
+
+    /** Is a directory */
+    public static final int EISDIR = 21;
+
+    /** Invalid argument */
+    public static final int EINVAL = 22;
+
+    /** File table overflow */
+    public static final int ENFILE = 23;
+
+    /** Too many open files */
+    public static final int EMFILE = 24;
+
+    /** Not a typewriter */
+    public static final int ENOTTY = 25;
+
+    /** Text file busy */
+    public static final int ETXTBSY = 26;
+
+    /** File too large */
+    public static final int EFBIG = 27;
+
+    /** No space left on device */
+    public static final int ENOSPC = 28;
+
+    /** Illegal seek */
+    public static final int ESPIPE = 29;
+
+    /** Read-only file system */
+    public static final int EROFS = 30;
+
+    /** Too many links */
+    public static final int EMLINK = 31;
+
+    /** Broken pipe */
+    public static final int EPIPE = 32;
+
+    /** Math argument out of domain of func */
+    public static final int EDOM = 33;
+
+    /** Math result not representable */
+    public static final int ERANGE = 34;
+
+    /**
+     * Inserts or resets the environment variable name in the current environment list.  If the variable name does not exist
+     * in the list, it is inserted with the given value.  If the variable does exist, the argument overwrite is tested; if overwrite is zero, the
+     * variable is not reset, otherwise it is reset to the given value.
+     * @param name the environment variable name
+     * @param value the given value
+     * @param overwrite if overwrite is zero, the variable is not reset, otherwise it is reset to the given value
+     * @return the value 0 if successful; otherwise the value -1 is returned and the global variable errno is set to indicate the error.
+     * @throws LastErrorException [ENOMEM] The function failed because it was unable to allocate memory for the environment.
+     */
+    public static native int setenv(String name, String value, int overwrite) throws LastErrorException;
+
+    /**
+     * Obtains the current value of the environment variable, name.
+     * @param name the environment variable name
+     * @return the value of the environment variable as a NUL-terminated string.  If the variable name is not in the current environment, NULL is returned.
+     */
+    public static native String getenv(String name);
+
+    /**
+     * The unsetenv() function deletes all instances of the variable name pointed to by name from the list.  Note that only the variable name
+     * (e.g., "NAME") should be given; "NAME=value" will not work.
+     * @param name the environment variable name
+     * @return the value 0 if successful; otherwise the value -1 is returned and the global variable errno is set to indicate the error.
+     * @throws LastErrorException The function failed.
+     */
+    public static native int unsetenv(String name) throws LastErrorException;
+
+    public static class timeval extends Structure {
+        public static class ByReference extends timeval implements Structure.ByReference {
+        }
+
+        public static class ByValue extends timeval implements Structure.ByValue {
+        }
+
+        public NativeLong tv_sec;
+        public NativeLong tv_usec;
+    }
+
+    /**
+     * The time() function returns the value of time in seconds since 0 hours, 0 minutes, 0 seconds, January 1, 1970, Coordinated Universal Time, without including leap seconds.  If an error occurs, time() returns the value (time_t)-1.
+     * The return value is also stored in *tloc, provided that t is non-null.
+     * @param t the value of time in seconds,  provided that t is non-null.
+     * @return the value of time in seconds
+     */
+    public static native NativeLong time(NativeLongByReference t);
+
+    /**
+     * Returns the difference between two calendar times, (time1 - time0), expressed in seconds.
+     * @param time1 Time 1
+     * @param time0 Time 0
+     * @return the difference between two calendar times, (time1 - time0), expressed in seconds.
+     */
+    public static native double difftime(NativeLong time1, NativeLong time0);
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/JnaJobInfo.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/JnaJobInfo.java
new file mode 100644
index 0000000..ea5fd19
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/JnaJobInfo.java
@@ -0,0 +1,101 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.jna.drmaa.v1_0;
+
+import org.ggf.drmaa.DrmaaException;
+import org.ggf.drmaa.JobInfo;
+
+import java.util.Map;
+
+/**
+ * JNA mapping from Java to C DRMAA binding.
+ */
+public class JnaJobInfo implements JobInfo {
+
+    private final String jobId;
+    private final Map<String, String> rusage;
+    private final boolean hasExited;
+    private final int exitStatus;
+    private final boolean hasSignaled;
+    private final String terminatingSignal;
+    private final boolean hasCoreDump;
+    private final boolean wasAborted;
+            
+    public JnaJobInfo(String jobId, Map<String, String> rusage, boolean hasExited, int exitStatus, boolean hasSignaled, String terminatingSignal, boolean hasCoreDump, boolean wasAborted) {
+        this.jobId = jobId;
+        this.rusage = rusage;
+        this.hasExited = hasExited;
+        this.exitStatus = exitStatus;
+        this.hasSignaled = hasSignaled;
+        this.terminatingSignal = terminatingSignal;
+        this.hasCoreDump = hasCoreDump;
+        this.wasAborted = wasAborted;
+    }
+
+    @Override
+    public String getJobId() throws DrmaaException {
+        return this.jobId;
+    }
+
+    @Override
+    public Map getResourceUsage() throws DrmaaException {
+        return rusage;
+    }
+
+    @Override
+    public boolean hasExited() throws DrmaaException {
+        return hasExited;
+    }
+
+    @Override
+    public int getExitStatus() throws DrmaaException {
+        if (!hasExited)
+            throw new IllegalStateException("job has not exited");
+        return exitStatus;
+    }
+
+    @Override
+    public boolean hasSignaled() throws DrmaaException {
+        return hasSignaled;
+    }
+
+    @Override
+    public String getTerminatingSignal() throws DrmaaException {
+        if (!hasSignaled)
+            throw new IllegalStateException("job has not signaled");
+        return terminatingSignal;
+    }
+
+    @Override
+    public boolean hasCoreDump() throws DrmaaException {
+        return hasCoreDump;
+    }
+
+    @Override
+    public boolean wasAborted() throws DrmaaException {
+        return wasAborted;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/JnaJobTemplate.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/JnaJobTemplate.java
new file mode 100644
index 0000000..1afd96c
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/JnaJobTemplate.java
@@ -0,0 +1,316 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.jna.drmaa.v1_0;
+
+import com.sun.jna.Pointer;
+import org.ggf.drmaa.*;
+
+import java.util.*;
+
+/**
+ * JNA mapping from Java to C DRMAA binding.
+ */
+public class JnaJobTemplate implements JobTemplate {
+    private final JnaSession session;
+    private final Pointer jt;
+
+    public JnaJobTemplate(JnaSession session, Pointer jt) {
+        this.session = session;
+        this.jt = jt;
+    }
+
+    public Pointer getPointer() {
+        return jt;
+    }
+
+    @Override
+    public void setRemoteCommand(String s) throws DrmaaException {
+        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_REMOTE_COMMAND, s);
+    }
+
+    @Override
+    public String getRemoteCommand() throws DrmaaException {
+        return JnaSession.getAttribute(jt, LibDrmaa.DRMAA_REMOTE_COMMAND);
+    }
+
+    @SuppressWarnings("unchecked")
+    @Override
+    public void setArgs(List list) throws DrmaaException {
+        JnaSession.setVectorAttribute(jt, LibDrmaa.DRMAA_V_ARGV, list);
+    }
+
+    @Override
+    public List getArgs() throws DrmaaException {
+        return JnaSession.getVectorAttribute(jt, LibDrmaa.DRMAA_V_ARGV);
+    }
+
+    @Override
+    public void setJobSubmissionState(int state) throws DrmaaException {
+        String stateString;
+        if (state == JobTemplate.HOLD_STATE)
+            stateString = LibDrmaa.DRMAA_SUBMISSION_STATE_HOLD;
+        else if (state == JobTemplate.ACTIVE_STATE)
+            stateString = LibDrmaa.DRMAA_SUBMISSION_STATE_ACTIVE;
+        else
+            throw new InvalidAttributeValueException("jobSubmissionState attribute is invalid");
+        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_JS_STATE, stateString);
+    }
+
+    @Override
+    public int getJobSubmissionState() throws DrmaaException {
+        int state;
+        String stateString = JnaSession.getAttribute(jt, LibDrmaa.DRMAA_JS_STATE);
+        if (LibDrmaa.DRMAA_SUBMISSION_STATE_HOLD.equals(stateString))
+            state = JobTemplate.HOLD_STATE;
+        else if (LibDrmaa.DRMAA_SUBMISSION_STATE_ACTIVE.equals(stateString))
+            state = JobTemplate.ACTIVE_STATE;
+        else
+            throw new InvalidAttributeValueException("jobSubmissionState attribute is invalid");
+        return state;
+    }
+
+    @SuppressWarnings("unchecked")
+    @Override
+    public void setJobEnvironment(Map env) throws DrmaaException {
+        JnaSession.setVectorAttribute(jt, LibDrmaa.DRMAA_V_ENV, JnaSession.mapToCollection(env));
+    }
+
+    @SuppressWarnings("unchecked")
+    @Override
+    public Map getJobEnvironment() throws DrmaaException {
+        return JnaSession.collectionToMap(JnaSession.getVectorAttribute(jt, LibDrmaa.DRMAA_V_ENV));
+    }
+
+    @Override
+    public void setWorkingDirectory(String s) throws DrmaaException {
+        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_WD, s);
+    }
+
+    @Override
+    public String getWorkingDirectory() throws DrmaaException {
+        return JnaSession.getAttribute(jt, LibDrmaa.DRMAA_WD);
+    }
+
+    @Override
+    public void setJobCategory(String s) throws DrmaaException {
+        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_JOB_CATEGORY, s);
+    }
+
+    @Override
+    public String getJobCategory() throws DrmaaException {
+        return JnaSession.getAttribute(jt, LibDrmaa.DRMAA_JOB_CATEGORY);
+    }
+
+    @Override
+    public void setNativeSpecification(String s) throws DrmaaException {
+        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_NATIVE_SPECIFICATION, s);
+    }
+
+    @Override
+    public String getNativeSpecification() throws DrmaaException {
+        return JnaSession.getAttribute(jt, LibDrmaa.DRMAA_NATIVE_SPECIFICATION);
+    }
+
+    @SuppressWarnings("unchecked")
+    @Override
+    public void setEmail(Set set) throws DrmaaException {
+        JnaSession.setVectorAttribute(jt, LibDrmaa.DRMAA_V_EMAIL, set);
+    }
+
+    @SuppressWarnings("unchecked")
+    @Override
+    public Set getEmail() throws DrmaaException {
+        return new LinkedHashSet<String>(JnaSession.getVectorAttribute(jt, LibDrmaa.DRMAA_V_EMAIL));
+    }
+
+    @Override
+    public void setBlockEmail(boolean b) throws DrmaaException {
+        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_BLOCK_EMAIL, b ? "1" : "0");
+    }
+
+    @Override
+    public boolean getBlockEmail() throws DrmaaException {
+        return "1".equals(JnaSession.getAttribute(jt, LibDrmaa.DRMAA_BLOCK_EMAIL));
+    }
+
+    @Override
+    public void setStartTime(PartialTimestamp partialTimestamp) throws DrmaaException {
+        JnaSession.setPartialTime(jt, LibDrmaa.DRMAA_START_TIME, partialTimestamp);
+    }
+
+    @Override
+    public PartialTimestamp getStartTime() throws DrmaaException {
+        return JnaSession.getPartialTime(jt, LibDrmaa.DRMAA_START_TIME);
+    }
+
+    @Override
+    public void setJobName(String s) throws DrmaaException {
+        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_JOB_NAME, s);
+    }
+
+    @Override
+    public String getJobName() throws DrmaaException {
+        return JnaSession.getAttribute(jt, LibDrmaa.DRMAA_JOB_NAME);
+    }
+
+    @Override
+    public void setInputPath(String s) throws DrmaaException {
+        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_INPUT_PATH, s);
+    }
+
+    @Override
+    public String getInputPath() throws DrmaaException {
+        return JnaSession.getAttribute(jt, LibDrmaa.DRMAA_INPUT_PATH);
+    }
+
+    @Override
+    public void setOutputPath(String s) throws DrmaaException {
+        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_OUTPUT_PATH, s);
+    }
+
+    @Override
+    public String getOutputPath() throws DrmaaException {
+        return JnaSession.getAttribute(jt, LibDrmaa.DRMAA_OUTPUT_PATH);
+    }
+
+    @Override
+    public void setErrorPath(String s) throws DrmaaException {
+        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_ERROR_PATH, s);
+    }
+
+    @Override
+    public String getErrorPath() throws DrmaaException {
+        return JnaSession.getAttribute(jt, LibDrmaa.DRMAA_ERROR_PATH);
+    }
+
+    @Override
+    public void setJoinFiles(boolean b) throws DrmaaException {
+        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_JOIN_FILES, b ? "y" : "n");
+    }
+
+    @Override
+    public boolean getJoinFiles() throws DrmaaException {
+        return "y".equals(JnaSession.getAttribute(jt, LibDrmaa.DRMAA_JOIN_FILES));
+    }
+
+    @Override
+    public void setTransferFiles(FileTransferMode fileTransferMode) throws DrmaaException {
+        StringBuilder buf = new StringBuilder();
+
+        if (fileTransferMode.getInputStream())
+            buf.append('i');
+
+        if (fileTransferMode.getOutputStream())
+            buf.append('o');
+
+        if (fileTransferMode.getErrorStream())
+            buf.append('e');
+
+        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_TRANSFER_FILES, buf.toString());
+    }
+
+    @Override
+    public FileTransferMode getTransferFiles() throws DrmaaException {
+        String mode = JnaSession.getAttribute(jt, LibDrmaa.DRMAA_TRANSFER_FILES);
+
+        if (mode == null)
+            return null;
+
+        FileTransferMode fileTransferMode = new FileTransferMode();
+        fileTransferMode.setInputStream(mode.indexOf('i') >= 0);
+        fileTransferMode.setOutputStream(mode.indexOf('o') >= 0);
+        fileTransferMode.setErrorStream(mode.indexOf('e') >= 0);
+        return fileTransferMode;
+    }
+
+    @Override
+    public void setDeadlineTime(PartialTimestamp partialTimestamp) throws DrmaaException {
+        JnaSession.setPartialTime(jt, LibDrmaa.DRMAA_DEADLINE_TIME, partialTimestamp);
+    }
+
+    @Override
+    public PartialTimestamp getDeadlineTime() throws DrmaaException {
+        return JnaSession.getPartialTime(jt, LibDrmaa.DRMAA_DEADLINE_TIME);
+    }
+
+    @Override
+    public void setHardWallclockTimeLimit(long l) throws DrmaaException {
+        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_WCT_HLIMIT, JnaSession.formatLimit(l));
+    }
+
+    @Override
+    public long getHardWallclockTimeLimit() throws DrmaaException {
+        return JnaSession.parseLimit(JnaSession.getAttribute(jt, LibDrmaa.DRMAA_WCT_HLIMIT));
+    }
+
+    @Override
+    public void setSoftWallclockTimeLimit(long l) throws DrmaaException {
+        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_WCT_SLIMIT, JnaSession.formatLimit(l));
+    }
+
+    @Override
+    public long getSoftWallclockTimeLimit() throws DrmaaException {
+        return JnaSession.parseLimit(JnaSession.getAttribute(jt, LibDrmaa.DRMAA_WCT_SLIMIT));
+    }
+
+    @Override
+    public void setHardRunDurationLimit(long l) throws DrmaaException {
+        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_DURATION_HLIMIT, JnaSession.formatLimit(l));
+    }
+
+    @Override
+    public long getHardRunDurationLimit() throws DrmaaException {
+        return JnaSession.parseLimit(JnaSession.getAttribute(jt, LibDrmaa.DRMAA_DURATION_HLIMIT));
+    }
+
+    @Override
+    public void setSoftRunDurationLimit(long l) throws DrmaaException {
+        JnaSession.setAttribute(jt, LibDrmaa.DRMAA_DURATION_SLIMIT, JnaSession.formatLimit(l));
+    }
+
+    @Override
+    public long getSoftRunDurationLimit() throws DrmaaException {
+        return JnaSession.parseLimit(JnaSession.getAttribute(jt, LibDrmaa.DRMAA_DURATION_SLIMIT));
+    }
+
+    @Override
+    public Set getAttributeNames() throws DrmaaException {
+        return JnaSession.getAttrNames();
+    }
+
+    @Override
+    public boolean equals(Object obj) {
+        if (!(obj instanceof JnaJobTemplate))
+            return false;
+        JnaJobTemplate other = (JnaJobTemplate) obj;
+        return this.jt.equals(other.jt) && this.session.equals(other.session);
+    }
+
+    @Override
+    public int hashCode() {
+        return jt.hashCode();
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/JnaSession.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/JnaSession.java
new file mode 100644
index 0000000..0c0745c
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/JnaSession.java
@@ -0,0 +1,461 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.jna.drmaa.v1_0;
+
+import com.sun.jna.Memory;
+import com.sun.jna.NativeLong;
+import com.sun.jna.Pointer;
+import com.sun.jna.StringArray;
+import com.sun.jna.ptr.IntByReference;
+import com.sun.jna.ptr.PointerByReference;
+import org.ggf.drmaa.*;
+
+import java.text.ParseException;
+import java.util.*;
+
+/**
+ * JNA mapping from Java to C DRMAA binding.
+ * See: Java and C Binding Documents on http://drmaa.org
+ */
+public class JnaSession implements Session {
+    private static final PartialTimestampFormat PARTIAL_TIMESTAMP_FORMAT = new PartialTimestampFormat();
+    private static final ThreadLocal<Memory> threadError = new ThreadLocal<Memory>() {
+        @Override
+        protected Memory initialValue() {
+            return new Memory(LibDrmaa.DRMAA_ERROR_STRING_BUFFER);
+        }
+    };
+
+    @Override
+    public void init(String contact) throws DrmaaException {
+        checkError(LibDrmaa.drmaa_init(contact, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
+    }
+
+    @Override
+    public void exit() throws DrmaaException {
+        checkError(LibDrmaa.drmaa_exit(getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
+    }
+
+    @Override
+    public JobTemplate createJobTemplate() throws DrmaaException {
+        PointerByReference jtRef = new PointerByReference();
+        checkError(LibDrmaa.drmaa_allocate_job_template(jtRef, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
+        return new JnaJobTemplate(this, jtRef.getValue());
+    }
+
+    @Override
+    public void deleteJobTemplate(JobTemplate jobTemplate) throws DrmaaException {
+        JnaJobTemplate jnaJobTemplate = (JnaJobTemplate) jobTemplate;
+        checkError(LibDrmaa.drmaa_delete_job_template(jnaJobTemplate.getPointer(), getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
+    }
+
+    @Override
+    public String runJob(JobTemplate jobTemplate) throws DrmaaException {
+        Memory jobId = new Memory(LibDrmaa.DRMAA_JOBNAME_BUFFER);
+        JnaJobTemplate jnaJobTemplate = (JnaJobTemplate) jobTemplate;
+        checkError(LibDrmaa.drmaa_run_job(jobId, LibDrmaa.DRMAA_JOBNAME_BUFFER_LEN, jnaJobTemplate.getPointer(), getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
+        return jobId.getString(0);
+    }
+
+    @Override
+    public List runBulkJobs(JobTemplate jobTemplate, int start, int end, int incr) throws DrmaaException {
+        PointerByReference jobIds = new PointerByReference();
+        JnaJobTemplate jnaJobTemplate = (JnaJobTemplate) jobTemplate;
+        checkError(LibDrmaa.drmaa_run_bulk_jobs(jobIds, jnaJobTemplate.getPointer(), start, end, incr, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
+        try {
+            return getJobIds(jobIds);
+        } finally {
+            releaseJobIds(jobIds);
+        }
+    }
+
+    @Override
+    public void control(String jobId, int action) throws DrmaaException {
+        checkError(LibDrmaa.drmaa_control(jobId, action, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
+    }
+
+    @SuppressWarnings("unchecked")
+    @Override
+    public void synchronize(List list, long timeout, boolean dispose) throws DrmaaException {
+        StringArray jobIds = new StringArray((String[]) list.toArray(new String[list.size()]));
+        checkError(LibDrmaa.drmaa_synchronize(jobIds, new NativeLong(timeout), dispose ? 1 : 0, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
+    }
+
+    @Override
+    public JobInfo wait(String jobId, long timeout) throws DrmaaException {
+        Memory jobIdOut = new Memory(LibDrmaa.DRMAA_JOBNAME_BUFFER);
+        IntByReference stat = new IntByReference();
+        PointerByReference rusage = new PointerByReference();
+        IntByReference exited = new IntByReference();
+        IntByReference exitStatus = new IntByReference();
+        IntByReference signaled = new IntByReference();
+        Memory signal = new Memory(LibDrmaa.DRMAA_SIGNAL_BUFFER);
+        IntByReference coreDumped = new IntByReference();
+        IntByReference aborted = new IntByReference();
+
+        int errnum;
+
+        errnum = LibDrmaa.drmaa_wait(jobId, jobIdOut, LibDrmaa.DRMAA_JOBNAME_BUFFER_LEN, stat, new NativeLong(timeout), rusage, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
+
+        Map<String, String> rusageMap;
+        if (errnum == LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_NO_RUSAGE) {
+            rusageMap = null;
+        } else {
+            try {
+                rusageMap = collectionToMap(getAttrValues(rusage));
+            } finally {
+                releaseAttrValues(rusage);
+            }
+        }
+
+        checkError(LibDrmaa.drmaa_wifexited(exited, stat.getValue(), getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
+
+        if (exited.getValue() != 0) {
+            checkError(LibDrmaa.drmaa_wexitstatus(exitStatus, stat.getValue(), getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
+        }
+
+        checkError(LibDrmaa.drmaa_wifsignaled(signaled, stat.getValue(), getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
+
+        if (signaled.getValue() != 0) {
+            checkError(LibDrmaa.drmaa_wtermsig(signal, LibDrmaa.DRMAA_SIGNAL_BUFFER_LEN, stat.getValue(), getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
+            checkError(LibDrmaa.drmaa_wcoredump(coreDumped, stat.getValue(), getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
+        }
+
+        checkError(LibDrmaa.drmaa_wifaborted(aborted, stat.getValue(), getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
+
+        return new JnaJobInfo(jobIdOut.getString(0), rusageMap, exited.getValue() != 0, exitStatus.getValue(),
+                signaled.getValue() != 0, signal.getString(0), coreDumped.getValue() != 0, aborted.getValue() != 0);
+    }
+
+    @Override
+    public int getJobProgramStatus(String jobId) throws DrmaaException {
+        IntByReference remotePs = new IntByReference();
+        checkError(LibDrmaa.drmaa_job_ps(jobId, remotePs, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
+        return remotePs.getValue();
+    }
+
+    @Override
+    public String getContact() {
+        Memory contact = new Memory(LibDrmaa.DRMAA_CONTACT_BUFFER);
+        try {
+            checkError(LibDrmaa.drmaa_get_contact(contact, LibDrmaa.DRMAA_CONTACT_BUFFER_LEN, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
+        } catch (DrmaaException e) {
+            // DRMAA spec says this method should throw DrmaaException.
+            // Why doesn't interface implement this?
+            throw new RuntimeException(e);
+        }
+        return contact.getString(0);
+    }
+
+    @Override
+    public Version getVersion() {
+        IntByReference major = new IntByReference();
+        IntByReference minor = new IntByReference();
+        try {
+            checkError(LibDrmaa.drmaa_version(major, minor, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
+        } catch (DrmaaException e) {
+            // DRMAA spec says this method should throw DrmaaException.
+            // Why doesn't interface implement this?
+            throw new RuntimeException(e);
+        }
+        return new Version(major.getValue(), minor.getValue());
+    }
+
+    @Override
+    public String getDrmSystem() {
+        Memory drmSystem = new Memory(LibDrmaa.DRMAA_DRM_SYSTEM_BUFFER);
+        try {
+            checkError(LibDrmaa.drmaa_get_DRM_system(drmSystem, LibDrmaa.DRMAA_DRM_SYSTEM_BUFFER_LEN, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
+        } catch (DrmaaException e) {
+            // DRMAA spec says this method should throw DrmaaException.
+            // Why doesn't interface implement this?
+            throw new RuntimeException(e);
+        }
+        return drmSystem.getString(0);
+    }
+
+    @Override
+    public String getDrmaaImplementation() {
+        Memory drmaaImplementation = new Memory(LibDrmaa.DRMAA_DRMAA_IMPLEMENTATION_BUFFER);
+        try {
+            checkError(LibDrmaa.drmaa_get_DRMAA_implementation(drmaaImplementation, LibDrmaa.DRMAA_DRMAA_IMPLEMENTATION_BUFFER_LEN, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
+        } catch (DrmaaException e) {
+            // DRMAA spec says this method should throw DrmaaException.
+            // Why doesn't interface implement this?
+            throw new RuntimeException(e);
+        }
+        return drmaaImplementation.getString(0);
+    }
+
+    public static void setAttribute(Pointer jt, String name, String value) throws DrmaaException {
+        if (getAttrNames().contains(name)) {
+            checkError(LibDrmaa.drmaa_set_attribute(jt, name, value, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
+        }
+        else {
+            throw new InvalidAttributeValueException("Attribute " + name + " is not supported by this implementation of DRMAA");
+        }
+    }
+
+    public static String getAttribute(Pointer jt, String name) throws DrmaaException {
+        if (getAttrNames().contains(name)) {
+            Memory attrBuffer = new Memory(LibDrmaa.DRMAA_ATTR_BUFFER);
+            checkError(LibDrmaa.drmaa_get_attribute(jt, name, attrBuffer, LibDrmaa.DRMAA_ATTR_BUFFER_LEN, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
+            return attrBuffer.getString(0);
+        }
+        else {
+            throw new InvalidAttributeValueException("Attribute " + name + " is not supported by this implementation of DRMAA");
+        }
+    }
+
+    public static void setVectorAttribute(Pointer jt, String name, Collection<String> values) throws DrmaaException {
+        StringArray valuesArray = new StringArray(values.toArray(new String[values.size()]));
+        checkError(LibDrmaa.drmaa_set_vector_attribute(jt, name, valuesArray, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
+    }
+
+    public static List<String> getVectorAttribute(Pointer jt, String name) throws DrmaaException {
+        PointerByReference values = new PointerByReference();
+        checkError(LibDrmaa.drmaa_get_vector_attribute(jt, name, values, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
+        try {
+            return getAttrValues(values);
+        } finally {
+            releaseAttrValues(values);
+        }
+    }
+
+    public static void setPartialTime(Pointer jt, String name, PartialTimestamp partialTimestamp) throws DrmaaException {
+        setAttribute(jt, name, PARTIAL_TIMESTAMP_FORMAT.format(partialTimestamp));
+    }
+
+    public static PartialTimestamp getPartialTime(Pointer jt, String name) throws DrmaaException {
+        String time = getAttribute(jt, name);
+        if (time == null)
+            return null;
+        try {
+            return PARTIAL_TIMESTAMP_FORMAT.parse(time);
+        } catch (ParseException e) {
+            throw new InternalException(name + " property is unparsable");
+        }
+    }
+
+    public static Set<String> getAttrNames() throws DrmaaException {
+        PointerByReference values = new PointerByReference();
+        checkError(LibDrmaa.drmaa_get_attribute_names(values, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
+        try {
+            return new LinkedHashSet<String>(getAttrNames(values));
+        } finally {
+            releaseAttrNames(values);
+        }
+    }
+
+    public static Collection<String> mapToCollection(Map<String, String> map) {
+        Collection<String> collection = new LinkedHashSet<String>();
+        for (Map.Entry<String, String> entry: map.entrySet())
+            collection.add(entry.getKey() + "=" + entry.getValue());
+        return collection;
+    }
+
+    public static Map<String, String> collectionToMap(Collection<String> list) {
+        Map<String, String> map = new LinkedHashMap<String, String>();
+        for (String entry: list) {
+            if (entry == null)
+                continue;
+            int equals = entry.indexOf('=');
+            if (equals < 0)
+                continue;
+            map.put(entry.substring(0, equals), entry.substring(equals + 1));
+        }
+        return map;
+    }
+
+    public static String formatLimit(long secs) {
+        long seconds = (secs % 60);
+        long minutes = (secs / 60) % 60;
+        long hours = (secs / 3600);
+        return String.format("%d:%02d:%02d", hours, minutes, seconds);
+    }
+
+    public static long parseLimit(String limit) {
+        long seconds = 0;
+        if (limit != null) {
+            for (String token: limit.split(":")) {
+                seconds *= 60;
+                seconds += Long.parseLong(token);
+            }
+        }
+        return seconds;
+    }
+
+    private static List<String> getAttrNames(PointerByReference names) throws DrmaaException {
+        List<String> namesList = new ArrayList<String>();
+        IntByReference size = new IntByReference();
+        int errnum;
+
+        errnum = LibDrmaa.drmaa_get_num_attr_names(names.getValue(), size);
+        checkError(errnum, "unable to get attribute names");
+        int num = size.getValue();
+
+        Memory value = new Memory(LibDrmaa.DRMAA_ATTR_BUFFER);
+        for (int i = 1; i <= num; i++) {
+            errnum = LibDrmaa.drmaa_get_next_attr_name(names.getValue(), value, LibDrmaa.DRMAA_ATTR_BUFFER_LEN);
+            checkError(errnum, "unable to get attribute name " + i);
+            if (errnum == LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_NO_MORE_ELEMENTS)
+                break;
+            namesList.add(value.getString(0));
+        }
+
+        return namesList;
+    }
+
+    private static List<String> getAttrValues(PointerByReference values) throws DrmaaException {
+        List<String> valuesList = new ArrayList<String>();
+        IntByReference size = new IntByReference();
+        int errnum;
+
+        errnum = LibDrmaa.drmaa_get_num_attr_values(values.getValue(), size);
+        checkError(errnum, "unable to get attribute values");
+        int num = size.getValue();
+
+        Memory value = new Memory(LibDrmaa.DRMAA_ATTR_BUFFER);
+        for (int i = 1; i <= num; i++) {
+            errnum = LibDrmaa.drmaa_get_next_attr_value(values.getValue(), value, LibDrmaa.DRMAA_ATTR_BUFFER_LEN);
+            checkError(errnum, "unable to get attribute value " + i);
+            if (errnum == LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_NO_MORE_ELEMENTS)
+                break;
+            valuesList.add(value.getString(0));
+        }
+
+        return valuesList;
+    }
+
+    private static List<String> getJobIds(PointerByReference jobIds) throws DrmaaException {
+        List<String> jobIdsList = new ArrayList<String>();
+        IntByReference size = new IntByReference();
+        int errnum;
+
+        errnum = LibDrmaa.drmaa_get_num_job_ids(jobIds.getValue(), size);
+        checkError(errnum, "unable to get jobIds");
+        int num = size.getValue();
+
+        Memory value = new Memory(LibDrmaa.DRMAA_JOBNAME_BUFFER);
+        for (int i = 1; i <= num; i++) {
+            errnum = LibDrmaa.drmaa_get_next_job_id(jobIds.getValue(), value, LibDrmaa.DRMAA_JOBNAME_BUFFER_LEN);
+            checkError(errnum, "unable to get jobId " + i);
+            if (errnum == LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_NO_MORE_ELEMENTS)
+                break;
+            jobIdsList.add(value.getString(0));
+        }
+
+        return jobIdsList;
+    }
+
+    private static void releaseAttrNames(PointerByReference names) throws DrmaaException {
+        LibDrmaa.drmaa_release_attr_names(names.getValue());
+    }
+
+    private static void releaseAttrValues(PointerByReference values) throws DrmaaException {
+        LibDrmaa.drmaa_release_attr_values(values.getValue());
+    }
+
+    private static void releaseJobIds(PointerByReference jobIds) throws DrmaaException {
+        LibDrmaa.drmaa_release_job_ids(jobIds.getValue());
+    }
+
+    private static Memory getError() {
+        return threadError.get();
+    }
+
+    private static void checkError(int errnum) throws DrmaaException {
+        if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
+            checkError(errnum, getError().getString(0));
+    }
+
+    private static void checkError(int errnum, String error) throws DrmaaException {
+        switch (errnum) {
+            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS:
+                break;
+            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_INTERNAL_ERROR:
+                throw new InternalException(error);
+            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_DRM_COMMUNICATION_FAILURE:
+                throw new DrmCommunicationException(error);
+            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_AUTH_FAILURE:
+                throw new AuthorizationException(error);
+            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_INVALID_ARGUMENT:
+                throw new IllegalArgumentException(error);
+            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_NO_ACTIVE_SESSION:
+                throw new NoActiveSessionException(error);
+            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_NO_MEMORY:
+                throw new OutOfMemoryError(error);
+
+                /* -------------- init and exit specific --------------- */
+            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_INVALID_CONTACT_STRING:
+                throw new InvalidContactStringException(error);
+            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_DEFAULT_CONTACT_STRING_ERROR:
+                throw new DefaultContactStringException(error);
+            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_NO_DEFAULT_CONTACT_STRING_SELECTED:
+                throw new NoDefaultContactStringException(error);
+            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_DRMS_INIT_FAILED:
+                throw new DrmsInitException(error);
+            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_ALREADY_ACTIVE_SESSION:
+                throw new AlreadyActiveSessionException(error);
+            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_DRMS_EXIT_ERROR:
+                throw new DrmsExitException(error);
+
+                /* ---------------- job attributes specific -------------- */
+            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_INVALID_ATTRIBUTE_FORMAT:
+                throw new InvalidAttributeFormatException(error);
+            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_INVALID_ATTRIBUTE_VALUE:
+                throw new InvalidAttributeValueException(error);
+            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_CONFLICTING_ATTRIBUTE_VALUES:
+                throw new ConflictingAttributeValuesException(error);
+
+                /* --------------------- job submission specific -------------- */
+            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_TRY_LATER:
+                throw new TryLaterException(error);
+            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_DENIED_BY_DRM:
+                throw new DeniedByDrmException(error);
+
+                /* ------------------------------- job control specific ---------------- */
+            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_INVALID_JOB:
+                throw new InvalidJobException(error);
+            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_RESUME_INCONSISTENT_STATE:
+                throw new ResumeInconsistentStateException(error);
+            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUSPEND_INCONSISTENT_STATE:
+                throw new SuspendInconsistentStateException(error);
+            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_HOLD_INCONSISTENT_STATE:
+                throw new HoldInconsistentStateException(error);
+            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_RELEASE_INCONSISTENT_STATE:
+                throw new ReleaseInconsistentStateException(error);
+            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_EXIT_TIMEOUT:
+                throw new ExitTimeoutException(error);
+            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_NO_RUSAGE:
+                break;
+            case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_NO_MORE_ELEMENTS:
+                break;
+            default:
+                throw new IllegalArgumentException(String.format("Unknown error code %d: %s", errnum, error));
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/JnaSessionFactory.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/JnaSessionFactory.java
new file mode 100644
index 0000000..d97be2b
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/JnaSessionFactory.java
@@ -0,0 +1,40 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.jna.drmaa.v1_0;
+
+import org.ggf.drmaa.Session;
+import org.ggf.drmaa.SessionFactory;
+
+/**
+ * JNA mapping from Java to C DRMAA binding.
+ */
+ at SuppressWarnings("unused")
+public class JnaSessionFactory extends SessionFactory {
+    @Override
+    public Session getSession() {
+        return new JnaSession();
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/LibDrmaa.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/LibDrmaa.java
new file mode 100644
index 0000000..73ff9cb
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/LibDrmaa.java
@@ -0,0 +1,723 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.jna.drmaa.v1_0;
+
+import com.sun.jna.*;
+import com.sun.jna.ptr.IntByReference;
+import com.sun.jna.ptr.PointerByReference;
+
+ at SuppressWarnings("unused")
+public class LibDrmaa {
+    static {
+        Native.register("drmaa");
+    }
+
+/* see www.drmaa.org for more details on the DRMAA specification */
+/****** DRMAA/-DRMAA_Interface *************************************************
+*  NAME
+*     DRMAA_Interface -- DRMAA interface
+*
+*  FUNCTION
+*     The enlisted functions specify the C/C++ binding of the DRMAA interface
+*     specification.
+*
+*  SEE ALSO
+*     DRMAA/drmaa_get_next_attr_name()
+*     DRMAA/drmaa_get_next_attr_value()
+*     DRMAA/drmaa_get_next_job_id()
+*     DRMAA/drmaa_release_attr_names()
+*     DRMAA/drmaa_release_attr_values()
+*     DRMAA/drmaa_release_job_ids()
+*     DRMAA/drmaa_init()
+*     DRMAA/drmaa_exit()
+*     DRMAA/drmaa_allocate_job_template()
+*     DRMAA/drmaa_delete_job_template()
+*     DRMAA/drmaa_set_attribute()
+*     DRMAA/drmaa_get_attribute()
+*     DRMAA/drmaa_set_vector_attribute()
+*     DRMAA/drmaa_get_vector_attribute()
+*     DRMAA/drmaa_get_attribute_names()
+*     DRMAA/drmaa_get_vector_attribute_names()
+*     DRMAA/drmaa_run_job()
+*     DRMAA/drmaa_run_bulk_jobs()
+*     DRMAA/drmaa_control()
+*     DRMAA/drmaa_synchronize()
+*     DRMAA/drmaa_wait()
+*     DRMAA/drmaa_wifexited()
+*     DRMAA/drmaa_wexitstatus()
+*     DRMAA/drmaa_wifsignaled()
+*     DRMAA/drmaa_wtermsig()
+*     DRMAA/drmaa_wcoredump()
+*     DRMAA/drmaa_wifaborted()
+*     DRMAA/drmaa_job_ps()
+*     DRMAA/drmaa_strerror()
+*     DRMAA/drmaa_get_contact()
+*     DRMAA/drmaa_version()
+*     DRMAA/drmaa_get_DRM_system()
+*******************************************************************************/
+
+/* ------------------- Constants ------------------- */
+/*
+ * some not yet agreed buffer length constants
+ * these are recommended minimum values
+ */
+
+/* drmaa_get_attribute() */
+public static final long DRMAA_ATTR_BUFFER = 1024;
+public static final NativeLong DRMAA_ATTR_BUFFER_LEN = new NativeLong(DRMAA_ATTR_BUFFER - 1);
+
+/* drmaa_get_contact() */
+public static final long DRMAA_CONTACT_BUFFER = 1024;
+public static final NativeLong DRMAA_CONTACT_BUFFER_LEN = new NativeLong(DRMAA_CONTACT_BUFFER - 1);
+
+/* drmaa_get_DRM_system() */
+public static final long DRMAA_DRM_SYSTEM_BUFFER = 1024;
+public static final NativeLong DRMAA_DRM_SYSTEM_BUFFER_LEN = new NativeLong(DRMAA_DRM_SYSTEM_BUFFER - 1);
+
+/* drmaa_get_DRM_system() */
+public static final long DRMAA_DRMAA_IMPLEMENTATION_BUFFER = 1024;
+public static final NativeLong DRMAA_DRMAA_IMPLEMENTATION_BUFFER_LEN = new NativeLong(DRMAA_DRMAA_IMPLEMENTATION_BUFFER - 1);
+
+/*
+ * Agreed buffer length constants
+ * these are recommended minimum values
+ */
+public static final long DRMAA_ERROR_STRING_BUFFER = 1024;
+public static final long DRMAA_JOBNAME_BUFFER = 1024;
+public static final long DRMAA_SIGNAL_BUFFER = 32;
+
+public static final NativeLong DRMAA_ERROR_STRING_BUFFER_LEN = new NativeLong(DRMAA_ERROR_STRING_BUFFER - 1);
+public static final NativeLong DRMAA_JOBNAME_BUFFER_LEN = new NativeLong(DRMAA_JOBNAME_BUFFER - 1);
+public static final NativeLong DRMAA_SIGNAL_BUFFER_LEN = new NativeLong(DRMAA_SIGNAL_BUFFER - 1);
+
+/*
+ * Agreed constants
+ */
+public static final NativeLong DRMAA_TIMEOUT_WAIT_FOREVER = new NativeLong(-1);
+public static final NativeLong DRMAA_TIMEOUT_NO_WAIT = new NativeLong(0);
+
+public static final String DRMAA_JOB_IDS_SESSION_ANY = "DRMAA_JOB_IDS_SESSION_ANY";
+public static final String DRMAA_JOB_IDS_SESSION_ALL = "DRMAA_JOB_IDS_SESSION_ALL";
+
+public static final String DRMAA_SUBMISSION_STATE_ACTIVE = "drmaa_active";
+public static final String DRMAA_SUBMISSION_STATE_HOLD = "drmaa_hold";
+
+/*
+ * Agreed placeholder names
+ */
+public static final String DRMAA_PLACEHOLDER_INCR = "$drmaa_incr_ph$";
+public static final String DRMAA_PLACEHOLDER_HD = "$drmaa_hd_ph$";
+public static final String DRMAA_PLACEHOLDER_WD = "$drmaa_wd_ph$";
+
+/*
+ * Agreed names of job template attributes
+ */
+public static final String DRMAA_REMOTE_COMMAND = "drmaa_remote_command";
+public static final String DRMAA_JS_STATE = "drmaa_js_state";
+public static final String DRMAA_WD = "drmaa_wd";
+public static final String DRMAA_JOB_CATEGORY = "drmaa_job_category";
+public static final String DRMAA_NATIVE_SPECIFICATION = "drmaa_native_specification";
+public static final String DRMAA_BLOCK_EMAIL = "drmaa_block_email";
+public static final String DRMAA_START_TIME = "drmaa_start_time";
+public static final String DRMAA_JOB_NAME = "drmaa_job_name";
+public static final String DRMAA_INPUT_PATH = "drmaa_input_path";
+public static final String DRMAA_OUTPUT_PATH = "drmaa_output_path";
+public static final String DRMAA_ERROR_PATH = "drmaa_error_path";
+public static final String DRMAA_JOIN_FILES = "drmaa_join_files";
+public static final String DRMAA_TRANSFER_FILES = "drmaa_transfer_files";
+public static final String DRMAA_DEADLINE_TIME = "drmaa_deadline_time";
+public static final String DRMAA_WCT_HLIMIT = "drmaa_wct_hlimit";
+public static final String DRMAA_WCT_SLIMIT = "drmaa_wct_slimit";
+public static final String DRMAA_DURATION_HLIMIT = "drmaa_duration_hlimit";
+public static final String DRMAA_DURATION_SLIMIT = "drmaa_duration_slimit";
+
+/* names of job template vector attributes */
+public static final String DRMAA_V_ARGV = "drmaa_v_argv";
+public static final String DRMAA_V_ENV = "drmaa_v_env";
+public static final String DRMAA_V_EMAIL = "drmaa_v_email";
+
+/*
+ * DRMAA errno values
+ *
+ * do not touch these values are agreed !!!
+ */
+public static interface DRMAA_ERRNO {
+   /* -------------- these are relevant to all sections ---------------- */
+   public static final int DRMAA_ERRNO_SUCCESS = 0; /* Routine returned normally with success. */
+   public static final int DRMAA_ERRNO_INTERNAL_ERROR = 1; /* Unexpected or internal DRMAA error like memory allocation, system call failure, etc. */
+   public static final int DRMAA_ERRNO_DRM_COMMUNICATION_FAILURE = 2; /* Could not contact DRM system for this request. */
+   public static final int DRMAA_ERRNO_AUTH_FAILURE = 3; /* The specified request is not processed successfully due to authorization failure. */
+   public static final int DRMAA_ERRNO_INVALID_ARGUMENT = 4; /* The input value for an argument is invalid. */
+   public static final int DRMAA_ERRNO_NO_ACTIVE_SESSION = 5; /* Exit routine failed because there is no active session */
+   public static final int DRMAA_ERRNO_NO_MEMORY = 6; /* failed allocating memory */
+
+   /* -------------- init and exit specific --------------- */
+   public static final int DRMAA_ERRNO_INVALID_CONTACT_STRING = 7; /* Initialization failed due to invalid contact string. */
+   public static final int DRMAA_ERRNO_DEFAULT_CONTACT_STRING_ERROR = 8; /* DRMAA could not use the default contact string to connect to DRM system. */
+   public static final int DRMAA_ERRNO_NO_DEFAULT_CONTACT_STRING_SELECTED = 9; /* No default contact string was provided or selected. DRMAA requires that the default contact string is selected when there is more than one default contact string due to multiple DRMAA implementation contained in the binary module. */
+   public static final int DRMAA_ERRNO_DRMS_INIT_FAILED = 10; /* Initialization failed due to failure to init DRM system. */
+   public static final int DRMAA_ERRNO_ALREADY_ACTIVE_SESSION = 11; /* Initialization failed due to existing DRMAA session. */
+   public static final int DRMAA_ERRNO_DRMS_EXIT_ERROR = 12; /* DRM system disengagement failed. */
+
+   /* ---------------- job attributes specific -------------- */
+   public static final int DRMAA_ERRNO_INVALID_ATTRIBUTE_FORMAT = 13; /* The format for the job attribute value is invalid. */
+   public static final int DRMAA_ERRNO_INVALID_ATTRIBUTE_VALUE = 14; /* The value for the job attribute is invalid. */
+   public static final int DRMAA_ERRNO_CONFLICTING_ATTRIBUTE_VALUES = 15; /* The value of this attribute is conflicting with a previously set attributes. */
+
+   /* --------------------- job submission specific -------------- */
+   public static final int DRMAA_ERRNO_TRY_LATER = 16; /* Could not pass job now to DRM system. A retry may succeed however (saturation). */
+   public static final int DRMAA_ERRNO_DENIED_BY_DRM = 17; /* The DRM system rejected the job. The job will never be accepted due to DRM configuration or job template settings. */
+
+   /* ------------------------------- job control specific ---------------- */
+   public static final int DRMAA_ERRNO_INVALID_JOB = 18; /* The job specified by the 'jobid' does not exist. */
+   public static final int DRMAA_ERRNO_RESUME_INCONSISTENT_STATE = 19; /* The job has not been suspended. The RESUME request will not be processed. */
+   public static final int DRMAA_ERRNO_SUSPEND_INCONSISTENT_STATE = 20; /* The job has not been running, and it cannot be suspended. */
+   public static final int DRMAA_ERRNO_HOLD_INCONSISTENT_STATE = 21; /* The job cannot be moved to a HOLD state. */
+   public static final int DRMAA_ERRNO_RELEASE_INCONSISTENT_STATE = 22; /* The job is not in a HOLD state. */
+   public static final int DRMAA_ERRNO_EXIT_TIMEOUT = 23; /* We have encountered a time-out condition for drmaa_synchronize or drmaa_wait. */
+   public static final int DRMAA_ERRNO_NO_RUSAGE = 24; /* This error code is returned by drmaa_wait() when a job has finished but no rusage and stat data could be provided. */
+   public static final int DRMAA_ERRNO_NO_MORE_ELEMENTS = 25; /* There are no more elements in the opaque string vector. */
+
+   public static final int DRMAA_NO_ERRNO = 26;
+}
+
+/*
+ * Agreed DRMAA job states as returned by drmaa_job_ps()
+ */
+public static interface DRMAA_PS {
+ public static final int DRMAA_PS_UNDETERMINED = 0x00; /* process status cannot be determined */
+ public static final int DRMAA_PS_QUEUED_ACTIVE = 0x10; /* job is queued and active */
+ public static final int DRMAA_PS_SYSTEM_ON_HOLD = 0x11; /* job is queued and in system hold */
+ public static final int DRMAA_PS_USER_ON_HOLD = 0x12; /* job is queued and in user hold */
+ public static final int DRMAA_PS_USER_SYSTEM_ON_HOLD = 0x13; /* job is queued and in user and system hold */
+ public static final int DRMAA_PS_RUNNING = 0x20; /* job is running */
+ public static final int DRMAA_PS_SYSTEM_SUSPENDED = 0x21; /* job is system suspended */
+ public static final int DRMAA_PS_USER_SUSPENDED = 0x22; /* job is user suspended */
+ public static final int DRMAA_PS_USER_SYSTEM_SUSPENDED = 0x23; /* job is user and system suspended */
+ public static final int DRMAA_PS_DONE = 0x30; /* job finished normally */
+ public static final int DRMAA_PS_FAILED = 0x40;  /* job finished, but failed */
+}
+
+/*
+ * Agreed DRMAA actions for drmaa_control()
+ */
+public static interface DRMAA_CONTROL {
+ public static final int DRMAA_CONTROL_SUSPEND = 0;
+ public static final int DRMAA_CONTROL_RESUME = 1;
+ public static final int DRMAA_CONTROL_HOLD = 2;
+ public static final int DRMAA_CONTROL_RELEASE = 3;
+ public static final int DRMAA_CONTROL_TERMINATE = 4;
+}
+
+/* ------------------- Data types ------------------- */
+/*
+ * Agreed opaque DRMAA job template
+ * struct drmaa_job_template_s is in japiP.h
+ */
+//typedef struct drmaa_job_template_s drmaa_job_template_t;
+
+/* ---------- C/C++ language binding specific interfaces -------- */
+
+//typedef struct drmaa_attr_names_s drmaa_attr_names_t;
+//typedef struct drmaa_attr_values_s drmaa_attr_values_t;
+//typedef struct drmaa_job_ids_s  drmaa_job_ids_t;
+
+/*
+ * get next string attribute from iterator
+ *
+ * returns DRMAA_ERRNO_SUCCESS or DRMAA_ERRNO_INVALID_ATTRIBUTE_VALUE
+ * if no such exists
+ */
+
+public static native int drmaa_get_next_attr_name(/* drmaa_attr_names_t* */ Pointer values, Pointer value,
+                             NativeLong value_len);
+public static native int drmaa_get_next_attr_value(/* drmaa_attr_names_t* */ Pointer values, Pointer value,
+                              NativeLong value_len);
+public static native int drmaa_get_next_job_id(/* drmaa_job_ids_t* */ Pointer values, Pointer value,
+                          NativeLong value_len);
+
+/*
+ * get element count of opaque string vector
+ *
+ * Gives the number of elements in the opaque string vector.  Useful for
+ * copying the contents into an array.
+ */
+public static native int drmaa_get_num_attr_names(/* drmaa_attr_names_t* */ Pointer values, IntByReference size);
+public static native int drmaa_get_num_attr_values(/* drmaa_attr_values_t* */ Pointer values, IntByReference size);
+public static native int drmaa_get_num_job_ids(/* drmaa_job_ids_t* */ Pointer values, IntByReference size);
+
+/*
+ * release opaque string vector
+ *
+ * Opaque string vectors can be used without any constraint
+ * until the release function has been called.
+ */
+public static native void drmaa_release_attr_names(/* drmaa_attr_names_t* */ Pointer values);
+public static native void drmaa_release_attr_values(/* drmaa_attr_values_t* */ Pointer values);
+public static native void drmaa_release_job_ids(/* drmaa_job_ids_t* */ Pointer values);
+
+/* ------------------- init/exit routines ------------------- */
+/*
+ * Initialize DRMAA API library and create a new DRMAA Session. 'Contact'
+ * is an implementation dependent string which MAY be used to specify
+ * which DRM system to use. This routine MUST be called before any
+ * other DRMAA calls, except for drmaa_version().
+ * If 'contact' is NULL, the default DRM system SHALL be used provided there is
+ * only one DRMAA implementation in the provided binary module.  When these is
+ * more than one DRMAA implementation in the binary module, drmaa_init() SHALL
+ * return the DRMAA_ERRNO_NO_DEFAULT_CONTACT_STRING_SELECTED error. drmaa_init()
+ * SHOULD be called by only one of the threads. The main thread is RECOMMENDED.
+ * A call by another thread SHALL return DRMAA_ERRNO_ALREADY_ACTIVE_SESSION.
+ * When 'contact' is a a semi-colon separated list of name=value strings, the
+ * strings will be parsed and interpreted.  The current list of accepted names
+ * is:
+ *    session -- the id of the session to which to reconnect
+#if 0
+ *    sge_root -- the SGE_ROOT to use
+ *    sge_cell -- the SGE_CELL to use
+#endif
+ *
+ * drmaa_init() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
+ *    DRMAA_ERRNO_INVALID_CONTACT_STRING,
+ *    DRMAA_ERRNO_NO_MEMORY,
+ *    DRMAA_ERRNO_ALREADY_ACTIVE_SESSION,
+ *    DRMAA_ERRNO_NO_DEFAULT_CONTACT_STRING_SELECTED, or
+ *    DRMAA_ERRNO_DEFAULT_CONTACT_STRING_ERROR.
+ */
+public static native int drmaa_init(String contact, Pointer error_diagnosis, NativeLong error_diag_len);
+
+
+/*
+ * Disengage from DRMAA library and allow the DRMAA library to perform
+ * any necessary internal clean up.
+ * This routine SHALL end the current DRMAA Session, but SHALL NOT effect any
+ * jobs (e.g., queued and running jobs SHALL remain queued and running).
+ * drmaa_exit() SHOULD be called by only one of the threads. Other thread calls
+ * to drmaa_exit() MAY fail since there is no active session.
+ *
+ * drmaa_exit() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
+ *    DRMAA_ERRNO_DRMS_EXIT_ERROR or
+ *    DRMAA_ERRNO_NO_ACTIVE_SESSION.
+ */
+public static native int drmaa_exit(Pointer error_diagnosis, NativeLong error_diag_len);
+
+/* ------------------- job template routines ------------------- */
+
+/*
+ * Allocate a new job template.
+ *
+ * drmaa_allocate_job_template() SHALL return DRMAA_ERRNO_SUCCESS on success,
+ * otherwise:
+ *    DRMAA_ERRNO_DRM_COMMUNICATION_FAILURE,
+ *    DRMAA_ERRNO_INTERNAL_ERROR or
+ *    DRMAA_ERRNO_NO_MEMORY.
+ */
+public static native int drmaa_allocate_job_template(/* drmaa_job_template_t** */ PointerByReference jt, Pointer error_diagnosis, NativeLong error_diag_len);
+
+/*
+ * Deallocate a job template. This routine has no effect on jobs.
+ *
+ * drmaa_delete_job_template() SHALL return DRMAA_ERRNO_SUCCESS on success,
+ * otherwise:
+ *    DRMAA_ERRNO_DRM_COMMUNICATION_FAILURE or
+ *    DRMAA_ERRNO_INTERNAL_ERROR.
+ */
+public static native int drmaa_delete_job_template(/* drmaa_job_template_t* */ Pointer jt, Pointer error_diagnosis,
+                              NativeLong error_diag_len);
+
+
+/*
+ * Adds ('name', 'value') pair to list of attributes in job template 'jt'.
+ * Only non-vector attributes SHALL be passed.
+ *
+ * drmaa_set_attribute() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
+ *    DRMAA_ERRNO_INVALID_ATTRIBUTE_FORMAT,
+ *    DRMAA_ERRNO_INVALID_ARGUMENT,
+ *    DRMAA_ERRNO_NO_MEMORY,
+ *    DRMAA_ERRNO_INVALID_ATTRIBUTE_VALUE or
+ *    DRMAA_ERRNO_CONFLICTING_ATTRIBUTE_VALUES.
+ */
+public static native int drmaa_set_attribute(/* drmaa_job_template_t* */ Pointer jt, String name,
+                        String value, Pointer error_diagnosis,
+                        NativeLong error_diag_len);
+
+
+/*
+ * If 'name' is an existing non-vector attribute name in the job
+ * template 'jt', then the value of 'name' SHALL be returned; otherwise,
+ * NULL is returned.
+ *
+ * drmaa_get_attribute() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
+ *    DRMAA_ERRNO_INVALID_ATTRIBUTE_VALUE.
+ */
+public static native int drmaa_get_attribute(/* drmaa_job_template_t* */ Pointer jt, String name, Pointer value,
+                        NativeLong value_len, Pointer error_diagnosis,
+                        NativeLong error_diag_len);
+
+/* Adds ('name', 'values') pair to list of vector attributes in job template
+ * 'jt'. Only vector attributes SHALL be passed.
+ * A 'value' string vector containing n elements must be n+1 elements long, with
+ * the nth value, i.e. value[n], being set to NULL as a delimitor.
+ *
+ * drmaa_set_vector_attribute() SHALL return DRMAA_ERRNO_SUCCESS on success,
+ * otherwise:
+ *    DRMAA_ERRNO_INVALID_ATTRIBUTE_FORMAT,
+ *    DRMAA_ERRNO_INVALID_ARGUMENT,
+ *    DRMAA_ERRNO_NO_MEMORY,
+ *    DRMAA_ERRNO_CONFLICTING_ATTRIBUTE_VALUES.
+ */
+public static native int drmaa_set_vector_attribute(/* drmaa_job_template_t* */ Pointer jt, String name,
+                               Pointer value, Pointer error_diagnosis,
+                               NativeLong error_diag_len);
+
+
+/*
+ * If 'name' is an existing vector attribute name in the job template 'jt',
+ * then the values of 'name' are returned; otherwise, NULL is returned.
+ *
+ * drmaa_get_vector_attribute() SHALL return DRMAA_ERRNO_SUCCESS on success,
+ * otherwise:
+ *    DRMAA_ERRNO_INVALID_ATTRIBUTE_VALUE.
+ */
+public static native int drmaa_get_vector_attribute(/* drmaa_job_template_t* */ Pointer jt, String name,
+                               /* drmaa_attr_values_t ** */ PointerByReference values,
+                               Pointer error_diagnosis, NativeLong error_diag_len);
+
+
+/*
+ * SHALL return the set of supported attribute names whose associated
+ * value type is String. This set SHALL include supported DRMAA reserved
+ * attribute names and native attribute names.
+ *
+ * drmaa_get_attribute_names() SHALL return DRMAA_ERRNO_SUCCESS on success,
+ * otherwise:
+ *    DRMAA_ERRNO_NO_MEMORY.
+ */
+public static native int drmaa_get_attribute_names(/* drmaa_attr_names_t ** */ PointerByReference values,
+                              Pointer error_diagnosis, NativeLong error_diag_len);
+
+/*
+ * SHALL return the set of supported attribute names whose associated
+ * value type is String Vector.  This set SHALL include supported DRMAA reserved
+ * attribute names and native attribute names.
+ *
+ * drmaa_get_vector_attribute_names() SHALL return DRMAA_ERRNO_SUCCESS on
+ * success, otherwise:
+ *    DRMAA_ERRNO_NO_MEMORY.
+ */
+public static native int drmaa_get_vector_attribute_names(/* drmaa_attr_names_t ** */ PointerByReference values,
+                                     Pointer error_diagnosis,
+                                     NativeLong error_diag_len);
+
+/* ------------------- job submission routines ------------------- */
+
+/*
+ * Submit a job with attributes defined in the job template 'jt'.
+ * The job identifier 'job_id' is a printable, NULL terminated string,
+ * identical to that returned by the underlying DRM system.
+ *
+ * drmaa_run_job() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
+ *    DRMAA_ERRNO_TRY_LATER,
+ *    DRMAA_ERRNO_DENIED_BY_DRM,
+ *    DRMAA_ERRNO_NO_MEMORY,
+ *    DRMAA_ERRNO_DRM_COMMUNICATION_FAILURE or
+ *    DRMAA_ERRNO_AUTH_FAILURE.
+ */
+public static native int drmaa_run_job(Pointer job_id, NativeLong job_id_len,
+                  /* drmaa_job_template_t * */ Pointer jt, Pointer error_diagnosis,
+                  NativeLong error_diag_len);
+
+/*
+ * Submit a set of parametric jobs, dependent on the implied loop index, each
+ * with attributes defined in the job template 'jt'.
+ * The job identifiers 'job_ids' SHALL all be printable,
+ * NULL terminated strings, identical to those returned by the underlying
+ * DRM system. Nonnegative loop bounds SHALL NOT use file names
+ * that start with minus sign like command line options.
+ * DRMAA defines a special index placeholder, drmaa_incr_ph, (which has the
+ * value "$incr_pl$") that is used to construct parametric job templates.
+ * For example:
+ * //C++ string syntax used
+ * drmaa_set_attribute(pjt, "stderr", drmaa_incr_ph + ".err" );
+ *
+ * drmaa_run_bulk_jobs() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
+ *    DRMAA_ERRNO_TRY_LATER,
+ *    DRMAA_ERRNO_DENIED_BY_DRM,
+ *    DRMAA_ERRNO_NO_MEMORY,
+ *    DRMAA_ERRNO_DRM_COMMUNICATION_FAILURE or
+ *    DRMAA_ERRNO_AUTH_FAILURE.
+ */
+public static native int drmaa_run_bulk_jobs(/* drmaa_job_ids_t ** */ PointerByReference jobids,
+                        /* drmaa_job_template_t * */ Pointer jt, int start, int end,
+                        int incr, Pointer error_diagnosis, NativeLong error_diag_len);
+
+/* ------------------- job control routines ------------------- */
+
+/*
+ * Start, stop, restart, or kill the job identified by 'job_id'.
+ * If 'job_id' is DRMAA_JOB_IDS_SESSION_ALL then this routine
+ * acts on all jobs *submitted* during this DRMAA session.
+ * The legal values for 'action' and their meanings SHALL be:
+ * DRMAA_CONTROL_SUSPEND:     stop the job,
+ * DRMAA_CONTROL_RESUME:      (re)start the job,
+ * DRMAA_CONTROL_HOLD:        put the job on-hold,
+ * DRMAA_CONTROL_RELEASE:     release the hold on the job, and
+ * DRMAA_CONTROL_TERMINATE:   kill the job.
+ *
+ * This routine SHALL return once the action has been acknowledged by
+ * the DRM system, but does not necessarily wait until the action
+ * has been completed.
+ *
+ * drmaa_control() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
+ *    DRMAA_ERRNO_DRM_COMMUNICATION_FAILURE,
+ *    DRMAA_ERRNO_AUTH_FAILURE,
+ *    DRMAA_ERRNO_NO_MEMORY,
+ *    DRMAA_ERRNO_RESUME_INCONSISTENT_STATE,
+ *    DRMAA_ERRNO_SUSPEND_INCONSISTENT_STATE,
+ *    DRMAA_ERRNO_HOLD_INCONSISTENT_STATE,
+ *    DRMAA_ERRNO_RELEASE_INCONSISTENT_STATE or
+ *    DRMAA_ERRNO_INVALID_JOB.
+ */
+public static native int drmaa_control(String jobid, int action, Pointer error_diagnosis,
+                  NativeLong error_diag_len);
+
+
+/*
+ * Wait until all jobs specified by 'job_ids' have finished
+ * execution. If 'job_ids' is DRMAA_JOB_IDS_SESSION_ALL then this routine
+ * waits for all jobs *submitted* during this DRMAA session. The timeout value
+ * is used to specify the number of seconds to wait for the job to fail finish
+ * before returning if a result is not immediately available.  The value
+ * DRMAA_TIMEOUT_WAIT_FOREVER can be used to specify that routine should wait
+ * indefinitely for a result. The value DRMAA_TIMEOUT_NO_WAIT can be used to
+ * specify that the routine should return immediately if no result is available.
+ * If the call exits before timeout, all the jobs have
+ * been waited on or there was an interrupt.
+ * If the invocation exits on timeout, the return code is
+ * DRMAA_ERRNO_EXIT_TIMEOUT. The caller SHOULD check system time before and
+ * after this call in order to check how much time has passed.
+ *
+ * The dispose parameter specifies how to treat reaping information:
+ * True=1      "fake reap", i.e. dispose of the rusage data
+ * False=0     do not reap
+ *
+ * A 'job_ids' string vector containing n elements must be n+1 elements long,
+ * with the nth value, i.e. job_ids[n], being set to NULL as a delimitor.
+ *
+ * drmaa_synchronize() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
+ *    DRMAA_ERRNO_DRM_COMMUNICATION_FAILURE,
+ *    DRMAA_ERRNO_AUTH_FAILURE,
+ *    DRMAA_ERRNO_NO_MEMORY,
+ *    DRMAA_ERRNO_EXIT_TIMEOUT or
+ *    DRMAA_ERRNO_INVALID_JOB.
+ */
+public static native int drmaa_synchronize(Pointer job_ids, NativeLong timeout, int dispose,
+                      Pointer error_diagnosis, NativeLong error_diag_len);
+
+
+/*
+ * This routine SHALL wait for a job with job_id to fail or finish execution. If
+ * the special string, DRMAA_JOB_IDS_SESSION_ANY is provided as the job_id,
+ * this routine SHALL wait for any job from the session. This routine is modeled
+ * on the wait3 POSIX routine. The timeout value is used to specify the number
+ * of seconds to wait for the job to fail finish before returning if a result is
+ * not immediately available.  The value DRMAA_TIMEOUT_WAIT_FOREVER can be
+ * used to specify that routine should wait indefinitely for a result. The value
+ * DRMAA_TIMEOUT_NO_WAIT may be specified that the routine should return
+ * immediately if no result is available.
+ * If the call exits before timeout ,the job has been waited on
+ * successfully or there was an interrupt.
+ * If the invocation exits on timeout, the return code is
+ * DRMAA_ERRNO_EXIT_TIMEOUT. The caller SHOULD check system time before and
+ * after this call in order to check how much time has passed.
+ * The routine reaps jobs on a successful call, so any subsequent calls
+ * to drmaa_wait SHOULD fail returning an error DRMAA_ERRNO_INVALID_JOB meaning
+ * that the job has been already reaped. This error is the same as if the job
+ * was unknown. Failing due to an elapsed timeout has an effect that it is
+ * possible to issue drmaa_wait multiple times for the same job_id.  When
+ * successful, the rusage information SHALL be provided as an array of strings,
+ * where each string complies with the format <name>=<value>. The string portion
+ * <value> contains the amount of resources consumed by the job and is opaque.
+ * The 'stat' drmaa_wait parameter is used in the drmaa_w* functions for
+ * providing more detailed information about job termination if available. An
+ * analogous set of macros is defined in POSIX for analyzing the wait3(2) OUT
+ * parameter 'stat'.
+ *
+ * drmaa_wait() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
+ *    DRMAA_ERRNO_DRM_COMMUNICATION_FAILURE,
+ *    DRMAA_ERRNO_AUTH_FAILURE,
+ *    DRMAA_ERRNO_NO_RUSAGE,
+ *    DRMAA_ERRNO_NO_MEMORY,
+ *    DRMAA_ERRNO_EXIT_TIMEOUT or
+ *    DRMAA_ERRNO_INVALID_JOB.
+ */
+public static native int drmaa_wait(String job_id, Pointer job_id_out, NativeLong job_id_out_len,
+               IntByReference stat, NativeLong timeout, /* drmaa_attr_values_t ** */ PointerByReference rusage,
+               Pointer error_diagnosis, NativeLong error_diag_len);
+
+/*
+ * Evaluates into 'exited' a non-zero value if stat was returned for a
+ * job that terminated normally. A zero value can also indicate that
+ * altough the job has terminated normally an exit status is not available
+ * or that it is not known whether the job terminated normally. In both
+ * cases drmaa_wexitstatus() SHALL NOT provide exit status information.
+ * A non-zero 'exited' value indicates more detailed diagnosis can be provided
+ * by means of drmaa_wifsignaled(), drmaa_wtermsig() and drmaa_wcoredump().
+ */
+public static native int drmaa_wifexited(IntByReference exited, int stat, Pointer error_diagnosis,
+                    NativeLong error_diag_len);
+
+/*
+ * If the OUT parameter 'exited' of drmaa_wifexited() is non-zero,
+ * this function evaluates into 'exit_code' the exit code that the
+ * job passed to _exit() (see exit(2)) or exit(3C), or the value that
+ * the child process returned from main.
+ */
+public static native int drmaa_wexitstatus(IntByReference exit_status, int stat, Pointer error_diagnosis,
+                      NativeLong error_diag_len);
+
+/*
+ * Evaluates into 'signaled' a non-zero value if status was returned
+ * for a job that terminated due to the receipt of a signal. A zero value
+ * can also indicate that altough the job has terminated due to the receipt
+ * of a signal the signal is not available or that it is not known whether
+ * the job terminated due to the receipt of a signal. In both cases
+ * drmaa_wtermsig() SHALL NOT provide signal information.
+ */
+public static native int drmaa_wifsignaled(IntByReference signaled, int stat, Pointer error_diagnosis,
+                      NativeLong error_diag_len);
+
+/*
+ * If the OUT parameter 'signaled' of drmaa_wifsignaled(stat) is
+ * non-zero, this function evaluates into signal a string representation of the
+ * signal that caused the termination of the job. For signals declared by POSIX,
+ * the symbolic names SHALL be returned (e.g., SIGABRT, SIGALRM).
+ * For signals not declared by POSIX, any other string MAY be returned.
+ */
+public static native int drmaa_wtermsig(Pointer signal, NativeLong signal_len, int stat,
+                   Pointer error_diagnosis, NativeLong error_diag_len);
+
+/*
+ * If the OUT parameter 'signaled' of drmaa_wifsignaled(stat) is
+ * non-zero, this function evaluates into 'core_dumped' a non-zero value
+ * if a core image of the terminated job was created.
+ */
+public static native int drmaa_wcoredump(IntByReference core_dumped, int stat, Pointer error_diagnosis,
+                    NativeLong error_diag_len);
+
+/*
+ * Evaluates into 'aborted' a non-zero value if 'stat'
+ * was returned for a job that ended before entering the running state.
+ */
+public static native int drmaa_wifaborted(IntByReference aborted, int stat, Pointer error_diagnosis,
+                     NativeLong error_diag_len);
+
+
+
+/*
+ * Get the program status of the job identified by 'job_id'.
+ * The possible values returned in 'remote_ps' and their meanings SHALL be:
+ *
+ * DRMAA_PS_UNDETERMINED          = 0x00: process status cannot be determined
+ * DRMAA_PS_QUEUED_ACTIVE         = 0x10: job is queued and active
+ * DRMAA_PS_SYSTEM_ON_HOLD        = 0x11: job is queued and in system hold
+ * DRMAA_PS_USER_ON_HOLD          = 0x12: job is queued and in user hold
+ * DRMAA_PS_USER_SYSTEM_ON_HOLD   = 0x13: job is queued and in user and system
+ *                                        hold
+ * DRMAA_PS_RUNNING               = 0x20: job is running
+ * DRMAA_PS_SYSTEM_SUSPENDED      = 0x21: job is system suspended
+ * DRMAA_PS_USER_SUSPENDED        = 0x22: job is user suspended
+ * DRMAA_PS_USER_SYSTEM_SUSPENDED = 0x23: job is user and system suspended
+ * DRMAA_PS_DONE                  = 0x30: job finished normally
+ * DRMAA_PS_FAILED                = 0x40: job finished, but failed
+ *
+ * DRMAA SHOULD always get the status of job_id from DRM system, unless the
+ * previous status has been DRMAA_PS_FAILED or DRMAA_PS_DONE and the status has
+ * been successfully cached. Terminated jobs get DRMAA_PS_FAILED status.
+ *
+ * drmaa_synchronize() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
+ *    DRMAA_ERRNO_DRM_COMMUNICATION_FAILURE,
+ *    DRMAA_ERRNO_AUTH_FAILURE,
+ *    DRMAA_ERRNO_NO_MEMORY or
+ *    DRMAA_ERRNO_INVALID_JOB.
+ */
+public static native int drmaa_job_ps(String job_id, IntByReference remote_ps, Pointer error_diagnosis,
+                 NativeLong error_diag_len);
+
+/* ------------------- auxiliary routines ------------------- */
+
+/*
+ * SHALL return the error message text associated with the errno number. The
+ * routine SHALL return null string if called with invalid ERRNO number.
+ */
+public static native String drmaa_strerror(int drmaa_errno);
+
+/*
+ * If called before drmaa_init(), it SHALL return a comma delimited default
+ * DRMAA implementation contacts string, one per each DRM system provided
+ * implementation. If called after drmaa_init(), it SHALL return the selected
+ * contact string. The output string is Implementation dependent.
+ * drmaa_get_contact() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
+ *    DRMAA_ERRNO_INTERNAL_ERROR.
+ */
+public static native int drmaa_get_contact(Pointer contact, NativeLong contact_len,
+         Pointer error_diagnosis, NativeLong error_diag_len);
+
+/*
+ * OUT major - major version number (non-negative integer)
+ * OUT minor - minor version number (non-negative integer)
+ * SHALL return the major and minor version numbers of the DRMAA library;
+ * for DRMAA 1.0, 'major' is 1 and 'minor' is 0.
+ */
+public static native int drmaa_version(IntByReference major, IntByReference minor,
+         Pointer error_diagnosis, NativeLong error_diag_len);
+
+
+/*
+ * If called before drmaa_init(), it SHALL return a comma delimited DRM systems
+ * string, one per each DRM system provided implementation. If called after
+ * drmaa_init(), it SHALL return the selected DRM system. The output string is
+ * implementation dependent.
+ *
+ * drmaa_get_DRM_system() SHALL return DRMAA_ERRNO_SUCCESS on success,
+ * otherwise:
+ *    DRMAA_ERRNO_INTERNAL_ERROR.
+ */
+public static native int drmaa_get_DRM_system(Pointer drm_system, NativeLong drm_system_len,
+         Pointer error_diagnosis, NativeLong error_diag_len);
+
+
+/*
+ * If called before drmaa_init(), it SHALL return a comma delimited DRMAA
+ * implementations string, one per each DRM system provided implementation. If
+ * called after drmaa_init(), it SHALL return the selected DRMAA implementation.
+ * The output (string) is implementation dependent. drmaa_get_DRM_implementation
+ * routine SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
+ *    DRMAA_ERRNO_INTERNAL_ERROR.
+ */
+public static native int drmaa_get_DRMAA_implementation(Pointer drmaa_impl, NativeLong drmaa_impl_len,
+         Pointer error_diagnosis, NativeLong error_diag_len);
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/jna/lsf/v7_0_6/LibBat.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/jna/lsf/v7_0_6/LibBat.java
new file mode 100644
index 0000000..299b331
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/jna/lsf/v7_0_6/LibBat.java
@@ -0,0 +1,20014 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.jna.lsf.v7_0_6;
+
+import com.sun.jna.*;
+import com.sun.jna.ptr.*;
+import org.broadinstitute.gatk.utils.jna.clibrary.JNAUtils;
+import org.broadinstitute.gatk.utils.jna.clibrary.LibC;
+
+/*
+  NOTE: This library uses Pointer for some Struct.ByReference members going
+  against the JNA recommendations at http://jna.java.net/#structure_use
+  Instead stuct arrays are Pointers and each structure contains a
+  constructor that can accept the Pointer iff the size of the array is
+  known to be greater than zero.
+
+  This was especially problematic in jobInfoEnt->items->resName. When
+  jobInfo->reserveCnt was zero jobInfoItems->items was not necessarily null.
+
+  LSF will often reuse memory for structure arrays but will set the
+  array size / count (reserveCnt above) to zero when the array should
+  not be accessed. When LSF has reused memory and points to a non-null
+  structure pointer (items) the inner structure may contain further
+  garbage pointers (especially items->resName).
+
+  When JNA sees a non-null Structure.ByReference it will autoRead() the
+  member. When autoRead() eventually gets to the items->resName trying
+  to run strlen on the bad memory address causes a SIGSEGV.
+
+  By using a Pointer instead of the Structure.ByReference JNA will not
+  automatically autoRead(), and the API user will have to pass the
+  pointer to the Structure on their own.
+*/
+
+/**
+ * JNA wrappers for LSF's lsbatch.h and -lbat
+ *
+ * $Id: lsbatch.h,v 2.1043 2009/08/06 16:50:49 bxia Exp $
+ * -----------------------------------------------------------------
+ *
+ *  Lsbatch Distributed Batch Utility --
+ *
+ *  Header file for all lsbatch components: applications, lsblib,
+ *                                          mbatchd and sbatchd
+ *
+ * ------------------------------------------------------------------
+ */
+ at SuppressWarnings("unused")
+public class LibBat {
+
+    static {
+        // via Platform LSF Configuration Reference, by default quiet the BSUB output.
+        if ("Y".equals(System.getProperty("BSUB_QUIET", "Y")))
+            LibC.setenv("BSUB_QUIET", "Y", 1);
+        String lsfLibDir = System.getenv("LSF_LIBDIR");
+        if (lsfLibDir != null) {
+            NativeLibrary.addSearchPath("lsf", lsfLibDir);
+            NativeLibrary.addSearchPath("bat", lsfLibDir);
+        }
+        /*
+        LSF 7.0.6 on the mac is missing the unsatisfied exported symbol for environ which was removed on MacOS X 10.5+.
+        nm $LSF_LIBDIR/liblsf.dylib | grep environ
+        See "man environ" for more info, along with http://lists.apple.com/archives/java-dev/2007/Dec/msg00096.html
+        For now, we export environ ourselves using libenvironhack.dylib available in c/libenvironhack.
+        */
+        if (Platform.isMac())
+            NativeLibrary.getInstance("environhack");
+        NativeLibrary liblsf = NativeLibrary.getInstance("lsf");
+        Native.register("bat");
+        // HACK: Running into a weird error:
+        //   java.lang.UnsatisfiedLinkError: Unable to load library 'bat': <$LSF_LIBDIR>/libbat.so: undefined symbol: xdr_resourceInfoReq
+        // This function is very clearly unsatisfied by running 'nm $LSF_LIBDIR/libbat.so | grep xdr_resourceInfoReq' but is
+        // found in liblsf.so when running 'nm $LSF_LIBDIR/liblsf.so | grep xdr_resourceInfoReq'. For now holding on to a reference
+        // to the LSF lib just in case this is a problem with the NativeLibrary's internal WeakReferences and the library being unloaded?
+        liblsf.getFunction("xdr_resourceInfoReq").getName();
+    }
+
+    // Via support at platform.com:
+    //    For equivalent api of bsub -a "xxx aaa qqq", option -a is not in struct submit, we
+    //    have to use setOption_ to set it. setOption_ can be used in user program by including
+    //    cmd.h or opensource.h of LSF opensource. You can refer to cmd.sub.c in opensource.
+    //
+    //    Here is a demonstration on the api for bsub -a
+    //    =========================================================================
+    //    /*define external setOption_ function*/
+    //    extern int setOption_(int argc, char **argv, char *template,
+    //    struct submit *req, int mask, int mask2, char **errMsg);
+    //
+    //    int setEsub(char *esub, struct submit *req) {
+    //    int x;
+    //    char *template, *arg[3];
+    //    /*set esub with the following strings and set array length*/
+    //    arg[0] = "blah";
+    //    arg[1] = "-a";
+    //    arg[2] = test;
+    //    /* -a "test", You can add additional esubs in here.  Just make sure they're space delimited.  ie. "test mpich lammpi" */
+    //    x=3;
+    //    /*set template*/
+    //    template = "a:"
+    //    /*run setOption_()*/
+    //    if (setOption_(x, arg, template, req, ~0, ~0, ~0, NULL) == -1) {
+    //    return(-1);
+    //    }
+    //    else {
+    //    return(0);
+    //    }
+    //    }
+    //    =========================================================================
+
+    /**
+     * Used for setting esub and other options not in struct submit.
+     * Via support at platform.com
+     *
+     * @param argc number of args
+     * @param argv arguments including a first argument that will not be used
+     * @param template a colon delimited list of arguments in getopt format
+     * @param jobSubReq the lsf submit
+     * @param mask unknown
+     * @param mask2 unknown
+     * @param mask3 unknown
+     * @param errMsg unknown
+     * @return -1 if the option setting failed
+     */
+    public static native int setOption_(int argc, Pointer argv, String template, submit jobSubReq, int mask, int mask2, int mask3, Pointer errMsg);
+
+    /** Max job name length as defined by 'man bsub'. */
+    public static final int MAX_JOB_NAME_LEN = 4094;
+
+/* if only everyone had <paths.h> */
+    public static final String _PATH_NULL = "/dev/null";
+
+    //public static int SKIP_SPACES (int word)  { while (word[0] == ' ' )  word++; }
+
+    //public static void FREEUP_ARRAY(int num, Pointer vector) { FREE_STRING_VECTOR_ENTRIES(num, vector);  FREEUP(vector); }
+
+
+/* event log version:
+*  each new major release requires to add a new line
+ */
+    public static final float LSB_EVENT_VERSION3_0 = 3.0f;
+    public static final float LSB_EVENT_VERSION3_1 = 3.1f;
+    public static final float LSB_EVENT_VERSION3_2 = 3.2f;
+    public static final float LSB_EVENT_VERSION4_0 = 4.0f;
+    public static final float LSB_EVENT_VERSION4_1 = 4.1f;
+    public static final float LSB_EVENT_VERSION4_2 = 4.2f;
+    public static final float LSB_EVENT_VERSION5_0 = 5.0f;
+    public static final float LSB_EVENT_VERSION5_1 = 5.1f;
+    public static final float LSB_EVENT_VERSION6_0 = 6.0f;
+    public static final float LSB_EVENT_VERSION6_1 = 6.1f;
+    public static final float LSB_EVENT_VERSION6_2 = 6.2f;
+    public static final float LSB_EVENT_VERSION7_0 = 7.0f;
+    public static final float LSB_EVENT_VERSION7_0_1 = 7.01f;
+    public static final float LSB_EVENT_VERSION7_0_2 = 7.02f;
+    public static final float LSB_EVENT_VERSION7_0_3 = 7.03f;
+    public static final float LSB_EVENT_VERSION7_0_4 = 7.04f;
+    public static final float LSB_EVENT_VERSION7_0_5 = 7.05f;
+    public static final float LSB_EVENT_VERSION7_0_6 = 7.06f;
+
+/* current event version number of the mbatchd */
+    public static final String THIS_VERSION = "7.06";
+
+    public static final int MAX_VERSION_LEN = 12;
+
+/* num of users per host partition */
+    public static final int MAX_HPART_USERS = 100;
+
+/* max byte limit, OS independent */
+    public static final int MAX_CHARLEN = 20;
+
+/* the max length of name */
+    public static final int MAX_LSB_NAME_LEN = 60;
+
+/*the max length of user group*/
+    public static final int MAX_LSB_UG_NAME_LEN = 512;
+
+/*Maximum levels that a user group hierachy can have*/
+    public static final int MAX_LSB_UG_HIERDEPTH = 64;
+
+/* the max length of command */
+    public static final int MAX_CMD_DESC_LEN = 512;
+
+/* for the local cluster */
+    public static final int MAX_CALENDARS = 256;
+
+/* max num of user equivalent entries */
+    public static final int MAX_USER_EQUIVALENT = 128;
+
+/* max num of user mapping entries */
+    public static final int MAX_USER_MAPPING = 128;
+
+/* max external msg's description length */
+    public static final int MAXDESCLEN = 20 * 512;
+
+/* num of user or host group */
+    public static final int MAX_GROUPS = 1024;
+
+/*
+*  RFC #725
+ */
+
+/* max len. of a filename */
+    public static final int MAXFULLFILENAMELEN = 4096;
+    public static final int MAXFULLPATHNAMELEN = 2 * MAXFULLFILENAMELEN;
+    public static final int FILENAMEPADDING = 128;
+
+    public static final String DEFAULT_MSG_DESC = "no description";
+
+    public static final int MSGSIZE = 4096;
+
+/* RFC #725
+*  extend the MSG size to 4*max filename len
+ */
+    public static final int MAXFULLMSGSIZE = 4 * MAXFULLFILENAMELEN;
+
+/* host status (hStatus) bits */
+    /**
+     *  \addtogroup host_status host_status
+     *  The status of the host. It is the bitwise inclusive OR of some of the following:
+     */
+
+    /**
+     * < Ready to accept and run jobs
+     */
+    public static final int HOST_STAT_OK = 0x0;
+
+/* Load is not good enough */
+    public static final int HOST_STAT_BUSY = 0x01;
+    /**
+     * < The host load is greater than a scheduling threshold. In this status, no new job will be scheduled to run on this host.
+     */
+
+/* Run windows are closed */
+    public static final int HOST_STAT_WIND = 0x02;
+    /**
+     * < The host dispatch window is closed. In this status, no new job will be accepted.
+     */
+
+/* Disabled by admin */
+    public static final int HOST_STAT_DISABLED = 0x04;
+    /**
+     * < The host has been disabled by the LSF administrator and will not accept jobs. In this status, no new job will be scheduled to  run on this host.
+     */
+
+/* Lim locked by admin */
+    public static final int HOST_STAT_LOCKED = 0x08;
+    /**< The host is locked by a exclusive task. In this status, no new job will be scheduled to run on this host.*/
+
+    /**
+     * < Great than job limit
+     */
+    public static final int HOST_STAT_FULL = 0x10;
+    /**< The host has reached its job limit. In this status, no new job will be scheduled to run on this host.*/
+
+    /**
+     * < The sbatchd on this host is unreachable.
+     */
+    public static final int HOST_STAT_UNREACH = 0x20;
+
+    /**
+     * < The LIM and sbatchd on this host are unavailable.
+     */
+    public static final int HOST_STAT_UNAVAIL = 0x40;
+
+    /**
+     * < The host does not have an LSF license.
+     */
+    public static final int HOST_STAT_UNLICENSED = 0x80;
+
+    /**
+     * < The host is running an sbatchd but not a LIM.
+     */
+    public static final int HOST_STAT_NO_LIM = 0x100;
+
+    /**
+     * < Running exclusive job
+     */
+    public static final int HOST_STAT_EXCLUSIVE = 0x200;
+
+    /**
+     * < Lim locked by master LIM
+     */
+    public static final int HOST_STAT_LOCKED_MASTER = 0x400;
+
+    /**
+     * < Close a remote lease host. This flag is  used together with HOST_STAT_DISABLED.
+     */
+    public static final int HOST_STAT_REMOTE_DISABLED = 0x800;
+
+    /**
+     * < Close a remote lease host due to the  lease is renewing or terminating.
+     */
+    public static final int HOST_STAT_LEASE_INACTIVE = 0x1000;
+
+/* if LSF_HPC_EXTENTIONS="LSB_HCLOSE_BY_RES" is set in lsf.conf
+*  host will be closed if RES is unavailable.
+ */
+
+    /**
+     * < Host is disabled by RES
+     */
+    public static final int HOST_STAT_DISABLED_RES = 0x4000;
+
+/* Kite#29531 a bit set in hData->hStatus
+*  to show whether the host is closed by
+*  admin or closed because RMS is not available.
+ */
+
+    /**
+     * < Host is disabled by RMS
+     */
+    public static final int HOST_STAT_DISABLED_RMS = 0x8000;
+
+/* lsf70 project scheduling, a removed host from mbatchd move into
+*  a new status HOST_STAT_LOCKED_EGO
+ */
+
+    /**
+     * < The host is disabled by EGO
+     */
+    public static final int HOST_STAT_LOCKED_EGO = 0x10000;
+
+    /**
+     * < If none of the above hold, hStatus is set to HOST_STAT_OK to indicate that the host is ready to accept and run jobs.
+     */
+    public static final int HOST_CLOSED_BY_ADMIN = 0x20000;
+
+    /**
+     * < Running cu exclusive job
+     */
+    public static final int HOST_STAT_CU_EXCLUSIVE = 0x40000;
+
+/* host is ok */
+
+    public static boolean LSB_HOST_OK(int status) {
+        return (status == HOST_STAT_OK);
+    }
+
+/* host is busy */
+
+    public static boolean LSB_HOST_BUSY(int status) {
+        return ((status & HOST_STAT_BUSY) != 0);
+    }
+
+/* host is closed */
+
+    public static boolean LSB_HOST_CLOSED(int status) {
+        return ((status & (HOST_STAT_WIND | HOST_STAT_DISABLED | HOST_STAT_LOCKED | HOST_STAT_LOCKED_MASTER | HOST_STAT_FULL | HOST_STAT_CU_EXCLUSIVE | HOST_STAT_EXCLUSIVE | HOST_STAT_LEASE_INACTIVE | HOST_STAT_NO_LIM)) != 0);
+    }
+
+/* host is full */
+
+    public static boolean LSB_HOST_FULL(int status) {
+        return ((status & HOST_STAT_FULL) != 0);
+    }
+
+/* host is unlicensed */
+
+    public static boolean LSB_HOST_UNLICENSED(int status) {
+        return ((status & HOST_STAT_UNLICENSED) != 0);
+    }
+
+/* host is unreach */
+
+    public static boolean LSB_HOST_UNREACH(int status) {
+        return ((status & HOST_STAT_UNREACH) != 0);
+    }
+
+/* host is unavail */
+
+    public static boolean LSB_HOST_UNAVAIL(int status) {
+        return ((status & HOST_STAT_UNAVAIL) != 0);
+    }
+
+
+    /* host busy reason bits */
+    /**
+     *  \addtogroup host_load_BusyReason host_load_BusyReason
+     *  If hStatus is HOST_STAT_BUSY, these indicate the host loadSched or loadStop
+     *  busy reason. If none of the thresholds have been exceeded, the value is
+     *  HOST_BUSY_NOT. Otherwise the value is the bitwise inclusive OR of some of the
+     *  following:
+     */
+
+    /**
+     * < Host not busy
+     */
+    public static final int HOST_BUSY_NOT = 0x000;
+
+    /**
+     * < The 15 second average CPU run queue length is too high.
+     */
+    public static final int HOST_BUSY_R15S = 0x001;
+
+    /**
+     * < The 1 minute average CPU run queue length is too high.
+     */
+    public static final int HOST_BUSY_R1M = 0x002;
+
+    /**
+     * < The 15 minute average CPU run queue length is too high.
+     */
+    public static final int HOST_BUSY_R15M = 0x004;
+
+    /**
+     * < The CPU utilization is too high.
+     */
+    public static final int HOST_BUSY_UT = 0x008;
+
+    /**
+     * < The paging rate is too high.
+     */
+    public static final int HOST_BUSY_PG = 0x010;
+
+    /**
+     * < The I/O rate is too high.
+     */
+    public static final int HOST_BUSY_IO = 0x020;
+
+    /**
+     * < There are too many login sessions.
+     */
+    public static final int HOST_BUSY_LS = 0x040;
+
+    /**
+     * < Host has not been idle long enough.
+     */
+    public static final int HOST_BUSY_IT = 0x080;
+
+    /**
+     * < There is not enough free space in the file  system containing /tmp.
+     */
+    public static final int HOST_BUSY_TMP = 0x100;
+
+    /**
+     * < There is not enough free swap space.
+     */
+    public static final int HOST_BUSY_SWP = 0x200;
+
+    /**
+     * < There is not enough free memory.
+     */
+    public static final int HOST_BUSY_MEM = 0x400;
+
+/* host is busy */
+
+    public static boolean LSB_ISBUSYON(int[] status, int index) {
+        return (((status[(index) / LibLsf.INTEGER_BITS]) & (1 << (index) % LibLsf.INTEGER_BITS)) != 0);
+    }
+
+
+/* queue status (qStatus) bits */
+    /**
+     *  \addtogroup queue_status queue_status
+     *  queue status (qStatus) bits
+     */
+
+    /**
+     * < The queue is open to accept newly submitted jobs.
+     */
+    public static final int QUEUE_STAT_OPEN = 0x01;
+
+    /**
+     * < The queue is actively dispatching jobs. The queue can be inactivated and  reactivated by the LSF administrator using  \ref lsb_queuecontrol. The queue will also be inactivated when its run or dispatch window  is closed. In this case it cannot be reactivated manually; it will be reactivated by the LSF system when its run and dispatch windows reopen.
+     */
+    public static final int QUEUE_STAT_ACTIVE = 0x02;
+
+    /**
+     * < The queue run and dispatch windows are open. The initial state of a queue at LSF boot time is open and either active or inactive, depending on its run and dispatch windows.
+     */
+    public static final int QUEUE_STAT_RUN = 0x04;
+
+    /**
+     * < Remote queue rejecting jobs.
+     */
+    public static final int QUEUE_STAT_NOPERM = 0x08;
+
+    /**
+     * < Remote queue status is disconnected.
+     */
+    public static final int QUEUE_STAT_DISC = 0x10;
+
+    /**
+     * < Queue run windows are closed.
+     */
+    public static final int QUEUE_STAT_RUNWIN_CLOSE = 0x20;
+
+/* queue attribute (QAttrib) bits */
+    /**
+     *  \addtogroup queue_attribute queue_attribute
+     *  queue attribute (QAttrib) bits.
+     */
+
+    /**
+     * < This queue accepts jobs which request exclusive execution.
+     */
+    public static final int Q_ATTRIB_EXCLUSIVE = 0x01;
+
+    /**
+     * < This queue is a default LSF queue.
+     */
+    public static final int Q_ATTRIB_DEFAULT = 0x02;
+
+    /**
+     * < This queue uses the FAIRSHARE scheduling policy. The user shares  are given in userShares.
+     */
+    public static final int Q_ATTRIB_FAIRSHARE = 0x04;
+
+    /**
+     * < This queue uses the PREEMPTIVE scheduling policy.
+     */
+    public static final int Q_ATTRIB_PREEMPTIVE = 0x08;
+
+    /**
+     * < This is an NQS forward queue. The target NQS queues are given in nqsQueues. For NQS forward queues, the hostList, procJobLimit, windows, mig and windowsD fields are meaningless.
+     */
+    public static final int Q_ATTRIB_NQS = 0x10;
+
+    /**
+     * < This queue can receive jobs from other clusters
+     */
+    public static final int Q_ATTRIB_RECEIVE = 0x20;
+
+    /**
+     * < This queue uses a preemptable scheduling policy.
+     */
+    public static final int Q_ATTRIB_PREEMPTABLE = 0x40;
+
+    /**
+     * < This queue uses a backfilling policy.
+     */
+    public static final int Q_ATTRIB_BACKFILL = 0x80;
+
+    /**
+     * < This queue uses a host preference policy.
+     */
+    public static final int Q_ATTRIB_HOST_PREFER = 0x100;
+
+    /**
+     * < This queue can't preempt any other another queue.
+     */
+    public static final int Q_ATTRIB_NONPREEMPTIVE = 0x200;
+
+    /**
+     * < This queue can't be preempted from any queue.
+     */
+    public static final int Q_ATTRIB_NONPREEMPTABLE = 0x400;
+
+    /**
+     * < This queue does not accept batch interactive jobs.
+     */
+    public static final int Q_ATTRIB_NO_INTERACTIVE = 0x800;
+
+    /**
+     * < This queue only accepts batch interactive jobs.
+     */
+    public static final int Q_ATTRIB_ONLY_INTERACTIVE = 0x1000;
+
+    /**
+     * < No host type related resource name specified in resource requirement.
+     */
+    public static final int Q_ATTRIB_NO_HOST_TYPE = 0x2000;
+
+    /**
+     * < This queue disables deadline constrained resource scheduling.
+     */
+    public static final int Q_ATTRIB_IGNORE_DEADLINE = 0x4000;
+
+    /**
+     * < Jobs may run as chkpntable.
+     */
+    public static final int Q_ATTRIB_CHKPNT = 0x8000;
+
+    /**
+     * < Jobs may run as rerunnable.
+     */
+    public static final int Q_ATTRIB_RERUNNABLE = 0x10000;
+
+    /**
+     * < Excluding remote jobs when local jobs are present in the queue.
+     */
+    public static final int Q_ATTRIB_EXCL_RMTJOB = 0x20000;
+
+    /**
+     * < Turn on a multicluster fast scheduling policy.
+     */
+    public static final int Q_ATTRIB_MC_FAST_SCHEDULE = 0x40000;
+
+    /**
+     * < Push interactive jobs in front of other jobs in queue.
+     */
+    public static final int Q_ATTRIB_ENQUE_INTERACTIVE_AHEAD = 0x80000;
+
+/* Only one of the following four flags could be TRUE. By default, the queue
+*  is a local queue only(none of them is set.)
+*      0x100000 - 0xf00000 is used for MC attribute
+ */
+
+
+    /**
+     * < Flags used by MultiCluster.
+     */
+    public static final int Q_MC_FLAG = 0xf00000;
+
+    /**
+     * < Lease and local.
+     */
+    public static final int Q_ATTRIB_LEASE_LOCAL = 0x100000;
+
+    /**
+     * < Lease only; no local.
+     */
+    public static final int Q_ATTRIB_LEASE_ONLY = 0x200000;
+
+    /**
+     * < Remote batch and local.
+     */
+    public static final int Q_ATTRIB_RMT_BATCH_LOCAL = 0x300000;
+
+    /**
+     * < Remote batch only.
+     */
+    public static final int Q_ATTRIB_RMT_BATCH_ONLY = 0x400000;
+
+
+    /**
+     * < Memory reservation.
+     */
+    public static final int Q_ATTRIB_RESOURCE_RESERVE = 0x1000000;
+
+    /**
+     * < Cross-queue fairshare.
+     */
+    public static final int Q_ATTRIB_FS_DISPATCH_ORDER_QUEUE = 0x2000000;
+
+    /**
+     * < Batch queue/partition
+     */
+    public static final int Q_ATTRIB_BATCH = 0x4000000;
+
+    /**
+     * < Online partition
+     */
+    public static final int Q_ATTRIB_ONLINE = 0x8000000;
+
+    /**
+     * < Interruptible backfill
+     */
+    public static final int Q_ATTRIB_INTERRUPTIBLE_BACKFILL = 0x10000000;
+
+    /**
+     * < Absolute Priority scheduling (APS) value.
+     */
+    public static final int Q_ATTRIB_APS = 0x20000000;
+
+    /**
+     * < No queue with RESOURCE_RESERVE or SLOT_RESERVE has higher priority than this queue.
+     */
+    public static final int Q_ATTRIB_NO_HIGHER_RESERVE = 0x40000000;
+
+    /**
+     * < No host valid
+     */
+    public static final int Q_ATTRIB_NO_HOST_VALID = 0x80000000;
+
+
+/* macros to check queue near real time attributes */
+
+    public static int IS_ONLINE_QUEUE(queueInfoEnt Q) {
+        return (Q.qAttrib & Q_ATTRIB_ONLINE);
+    }
+
+    public static int IS_BATCH_QUEUE(queueInfoEnt Q) {
+        return (Q.qAttrib & Q_ATTRIB_BATCH);
+    }
+
+/* macros to check queue remote attributes */
+
+    public static boolean IS_LEASE_LOCAL_QUEUE(queueInfoEnt Q) {
+        return ((Q.qAttrib & Q_MC_FLAG) == Q_ATTRIB_LEASE_LOCAL);
+    }
+
+    public static boolean IS_LEASE_ONLY_QUEUE(queueInfoEnt Q) {
+        return ((Q.qAttrib & Q_MC_FLAG) == Q_ATTRIB_LEASE_ONLY);
+    }
+
+    public static boolean IS_RMT_BATCH_LOCAL_QUEUE(queueInfoEnt Q) {
+        return ((Q.qAttrib & Q_MC_FLAG) == Q_ATTRIB_RMT_BATCH_LOCAL);
+    }
+
+    public static boolean IS_RMT_BATCH_ONLY_QUEUE(queueInfoEnt Q) {
+        return ((Q.qAttrib & Q_MC_FLAG) == Q_ATTRIB_RMT_BATCH_ONLY);
+    }
+
+    public static boolean IS_LEASE_QUEUE(queueInfoEnt Q) {
+        return (IS_LEASE_LOCAL_QUEUE(Q) || IS_LEASE_ONLY_QUEUE(Q));
+    }
+
+    public static boolean IS_RMT_BATCH_QUEUE(queueInfoEnt Q) {
+        return (IS_RMT_BATCH_LOCAL_QUEUE(Q) || IS_RMT_BATCH_ONLY_QUEUE(Q));
+    }
+
+    public static boolean IS_MC_QUEUE(queueInfoEnt Q) {
+        return (IS_LEASE_QUEUE(Q) || IS_RMT_BATCH_QUEUE(Q));
+    }
+
+    public static int SET_LEASE_LOCAL_QUEUE(queueInfoEnt Q) {
+        return (Q.qAttrib |= Q_ATTRIB_LEASE_LOCAL);
+    }
+
+    public static int SET_LEASE_ONLY_QUEUE(queueInfoEnt Q) {
+        return (Q.qAttrib |= Q_ATTRIB_LEASE_ONLY);
+    }
+
+    public static int SET_RMT_BATCH_LOCAL_QUEUE(queueInfoEnt Q) {
+        return (Q.qAttrib |= Q_ATTRIB_RMT_BATCH_LOCAL);
+    }
+
+    public static int SET_RMT_BATCH_ONLY_QUEUE(queueInfoEnt Q) {
+        return (Q.qAttrib |= Q_ATTRIB_RMT_BATCH_ONLY);
+    }
+
+    public static int CLR_MC_QUEUE_FLAG(queueInfoEnt Q) {
+        return (Q.qAttrib &= ~Q_MC_FLAG);
+    }
+
+
+/* the bits 0x10000000 to 0x80000000 is reserved for internal use (daemons.h) */
+
+/* exit code for mbatchd */
+    public static final int MASTER_NULL = 200;
+    public static final int MASTER_RESIGN = 201;
+    public static final int MASTER_RECONFIG = 202;
+    public static final int MASTER_FATAL = 203;
+    public static final int MASTER_MEM = 204;
+    public static final int MASTER_CONF = 205;
+    public static final int MASTER_EVENT = 206;
+    public static final int MASTER_DISABLE = 207;
+
+/* sub type of mbatchd die */
+    public static final int MBD_USER_CMD = 1;
+    public static final int MBD_NON_USER_CMD = 2;
+
+    /**
+     *  \addtogroup job_states job_states
+     *  define job states
+     */
+
+    /**
+     * < State null
+     */
+    public static final int JOB_STAT_NULL = 0x00;
+
+    /**
+     * < The job is pending, i.e., it  has not been dispatched yet.
+     */
+    public static final int JOB_STAT_PEND = 0x01;
+
+    /**
+     * < The pending job was suspended by its owner or the LSF system administrator.
+     */
+    public static final int JOB_STAT_PSUSP = 0x02;
+
+    /**
+     * < The job is running.
+     */
+    public static final int JOB_STAT_RUN = 0x04;
+
+    /**
+     * < The running job was suspended  by the system because an execution  host was overloaded or the queue run  window closed. (see \ref lsb_queueinfo,  \ref lsb_hostinfo, and lsb.queues.)
+     */
+    public static final int JOB_STAT_SSUSP = 0x08;
+
+    /**
+     * < The running job was suspended by its owner or the LSF systemadministrator.
+     */
+    public static final int JOB_STAT_USUSP = 0x10;
+
+    /**
+     * < The job has terminated with a non-zero status - it may have been aborted due  to an error in its execution, or  killed by its owner or by the  LSF system administrator.
+     */
+    public static final int JOB_STAT_EXIT = 0x20;
+
+    /**
+     * < The job has terminated with status 0.
+     */
+    public static final int JOB_STAT_DONE = 0x40;
+
+    /**
+     * < Post job process done successfully
+     */
+    public static final int JOB_STAT_PDONE = (0x80);
+
+    /**
+     * < Post job process has error
+     */
+    public static final int JOB_STAT_PERR = (0x100);
+
+    /**
+     * < Chunk job waiting its turn to exec
+     */
+    public static final int JOB_STAT_WAIT = (0x200);
+
+    /**
+     * < The slave batch daemon (sbatchd) on  the host on which the job is processed  has lost contact with the master batch  daemon (mbatchd).
+     */
+    public static final int JOB_STAT_UNKWN = 0x10000;
+
+    /**
+     *  \addtogroup event_types event_types
+     *  define statements used by \ref lsb_geteventrec. Events logged in lsb.events file
+     */
+
+    /**
+     * < Submit a new job
+     */
+    public static final int EVENT_JOB_NEW = 1;
+
+    /**
+     * < mbatchd is trying to start a job
+     */
+    public static final int EVENT_JOB_START = 2;
+
+    /**
+     * < Job's status change event
+     */
+    public static final int EVENT_JOB_STATUS = 3;
+
+    /**
+     * < Job switched to another queue
+     */
+    public static final int EVENT_JOB_SWITCH = 4;
+
+    /**
+     * < Move a pending job's position within a queue
+     */
+    public static final int EVENT_JOB_MOVE = 5;
+
+    /**
+     * < Queue's status changed by Platform LSF  administrator (bhc operation)
+     */
+    public static final int EVENT_QUEUE_CTRL = 6;
+
+    /**
+     * < Host status changed by Platform LSF  administrator (bhc operation)
+     */
+    public static final int EVENT_HOST_CTRL = 7;
+
+    /**
+     * < Log parameters before mbatchd died
+     */
+    public static final int EVENT_MBD_DIE = 8;
+
+    /**
+     * < Action that was not taken because the  mbatchd was unable to contact the sbatchd on the job's execution host
+     */
+    public static final int EVENT_MBD_UNFULFILL = 9;
+
+    /**
+     * < Job finished (Logged in lsb.acct)
+     */
+    public static final int EVENT_JOB_FINISH = 10;
+
+    /**
+     * < The complete list of load indices, including external load indices
+     */
+    public static final int EVENT_LOAD_INDEX = 11;
+
+    /**
+     * < Job checkpointed.
+     */
+    public static final int EVENT_CHKPNT = 12;
+
+    /**
+     * < Job migrated
+     */
+    public static final int EVENT_MIG = 13;
+
+    /**
+     * < The pre-execution command started
+     */
+    public static final int EVENT_PRE_EXEC_START = 14;
+
+    /**
+     * < New mbatchd start event
+     */
+    public static final int EVENT_MBD_START = 15;
+
+    /**
+     * < The job has been routed to NQS
+     */
+    public static final int EVENT_JOB_ROUTE = 16;
+
+    /**
+     * < Job modification request
+     */
+    public static final int EVENT_JOB_MODIFY = 17;
+
+    /**
+     * < Signal/delete a job
+     */
+    public static final int EVENT_JOB_SIGNAL = 18;
+
+    /**
+     * < Add new calendar to the system
+     */
+    public static final int EVENT_CAL_NEW = 19;
+
+    /**
+     * < Calendar modified
+     */
+    public static final int EVENT_CAL_MODIFY = 20;
+
+    /**
+     * < Delete a calendar in the system
+     */
+    public static final int EVENT_CAL_DELETE = 21;
+
+    /**
+     * < Job forwarded to another cluster
+     */
+    public static final int EVENT_JOB_FORWARD = 22;
+
+    /**
+     * < Job from a remote cluster dispatched
+     */
+    public static final int EVENT_JOB_ACCEPT = 23;
+
+    /**
+     * < Job status successfully sent to  submission cluster
+     */
+    public static final int EVENT_STATUS_ACK = 24;
+
+    /**
+     * < Job started successfully on the  execution host
+     */
+    public static final int EVENT_JOB_EXECUTE = 25;
+
+    /**
+     * < Send a message to a job
+     */
+    public static final int EVENT_JOB_MSG = 26;
+
+    /**
+     * < The message has been delivered
+     */
+    public static final int EVENT_JOB_MSG_ACK = 27;
+
+    /**
+     * < Job is requeued
+     */
+    public static final int EVENT_JOB_REQUEUE = 28;
+
+    /**
+     * < Submission mbatchd logs this after sending  an occupy request to execution mbatchd
+     */
+    public static final int EVENT_JOB_OCCUPY_REQ = 29;
+
+    /**
+     * < Submission mbatchd logs this event after  all execution mbatchds have vacated the occupied hosts for the job
+     */
+    public static final int EVENT_JOB_VACATED = 30;
+
+    /**
+     * < A signal action on a job has been  initiated or finished
+     */
+    public static final int EVENT_JOB_SIGACT = 32;
+
+    /**
+     * < sbatchd's new job status
+     */
+    public static final int EVENT_SBD_JOB_STATUS = 34;
+
+    /**
+     * < sbatchd accepts job start
+     */
+    public static final int EVENT_JOB_START_ACCEPT = 35;
+
+    /**
+     * < Undelete a calendar in the system
+     */
+    public static final int EVENT_CAL_UNDELETE = 36;
+
+    /**
+     * < Job is cleaned out of the core
+     */
+    public static final int EVENT_JOB_CLEAN = 37;
+
+    /**
+     * < Job exception was detected
+     */
+    public static final int EVENT_JOB_EXCEPTION = 38;
+
+    /**
+     * < Adding a new job group
+     */
+    public static final int EVENT_JGRP_ADD = 39;
+
+    /**
+     * < Modifying a job group
+     */
+    public static final int EVENT_JGRP_MOD = 40;
+
+    /**
+     * < Controlling a job group
+     */
+    public static final int EVENT_JGRP_CTRL = 41;
+
+    /**
+     * < Forcing a job to start on specified  hosts (brun operation)
+     */
+    public static final int EVENT_JOB_FORCE = 42;
+
+    /**
+     * < Switching the event file lsb.events
+     */
+    public static final int EVENT_LOG_SWITCH = 43;
+
+    /**
+     * < Job modification request
+     */
+    public static final int EVENT_JOB_MODIFY2 = 44;
+
+    /**
+     * < Log job group status
+     */
+    public static final int EVENT_JGRP_STATUS = 45;
+
+    /**
+     * < Job attributes have been set
+     */
+    public static final int EVENT_JOB_ATTR_SET = 46;
+
+    /**
+     * < Send an external message to a job
+     */
+    public static final int EVENT_JOB_EXT_MSG = 47;
+
+    /**
+     * < Update data status of a message for a job
+     */
+    public static final int EVENT_JOB_ATTA_DATA = 48;
+
+    /**
+     * < Insert one job to a chunk
+     */
+    public static final int EVENT_JOB_CHUNK = 49;
+
+    /**
+     * < Save unreported sbatchd status
+     */
+    public static final int EVENT_SBD_UNREPORTED_STATUS = 50;
+
+    /**
+     * < Reservation finished
+     */
+    public static final int EVENT_ADRSV_FINISH = 51;
+
+    /**
+     * < Dynamic host group control
+     */
+    public static final int EVENT_HGHOST_CTRL = 52;
+
+    /**
+     * < Saved current CPU allocation on service partition
+     */
+    public static final int EVENT_CPUPROFILE_STATUS = 53;
+
+    /**
+     * < Write out data logging file
+     */
+    public static final int EVENT_DATA_LOGGING = 54;
+
+    /**
+     * < Write job rusage in lsb.stream
+     */
+    public static final int EVENT_JOB_RUN_RUSAGE = 55;
+
+    /**
+     * < Stream closed and new stream opened.
+     */
+    public static final int EVENT_END_OF_STREAM = 56;
+
+    /**
+     * < SLA goal is reavaluated
+     */
+    public static final int EVENT_SLA_RECOMPUTE = 57;
+
+    /**
+     * < Write performance metrics to lsb.stream
+     */
+    public static final int EVENT_METRIC_LOG = 58;
+
+    /**
+     * < Write task finish log to ssched.acct
+     */
+    public static final int EVENT_TASK_FINISH = 59;
+
+    /**
+     * < Resize allocation is made
+     */
+    public static final int EVENT_JOB_RESIZE_NOTIFY_START = 60;
+
+    /**
+     * < Resize notification action initialized
+     */
+    public static final int EVENT_JOB_RESIZE_NOTIFY_ACCEPT = 61;
+
+    /**
+     * < Resize notification action completed
+     */
+    public static final int EVENT_JOB_RESIZE_NOTIFY_DONE = 62;
+
+    /**
+     * < Job resize release request is received
+     */
+    public static final int EVENT_JOB_RESIZE_RELEASE = 63;
+
+    /**
+     * < Job resize cancel request is received
+     */
+    public static final int EVENT_JOB_RESIZE_CANCEL = 64;
+
+    /**
+     * < Job resize event for lsb.acct
+     */
+    public static final int EVENT_JOB_RESIZE = 65;
+
+    /**
+     * < Saved array element's resource consumption  for LSF simulator
+     */
+    public static final int EVENT_JOB_ARRAY_ELEMENT = 66;
+
+    /**
+     * < Saved LSF simulator status
+     */
+    public static final int EVENT_MBD_SIM_STATUS = 67;
+
+/* event kind
+ */
+
+    /**
+     * < it is a job related event
+     */
+    public static final int EVENT_JOB_RELATED = 1;
+
+    /**
+     * < it is a non job related event
+     */
+    public static final int EVENT_NON_JOB_RELATED = 0;
+
+    /*
+   *  EXCLUSIVE PENDING REASONS
+   *  a job must stay pending as long as ONE of the exclusive reasons exists
+    */
+
+/* Job Related Reasons (001 - 300)
+ */
+    /**
+     * \addtogroup pending_reasons pending_reasons
+     * \brief          Each entry in the table contains one of the following pending reasons
+     */
+
+    /**
+     * < Virtual code; not a reason
+     */
+    public static final int PEND_JOB_REASON = 0;
+
+    /**
+     * < A new job is waiting to be scheduled
+     */
+    public static final int PEND_JOB_NEW = 1;
+
+    /**
+     * < The job is held until its specified start time
+     */
+    public static final int PEND_JOB_START_TIME = 2;
+
+    /**
+     * < The job is waiting for its dependency condition(s) to be satisfied
+     */
+    public static final int PEND_JOB_DEPEND = 3;
+
+    /**
+     * < The dependency condition is invalid or never satisfied
+     */
+    public static final int PEND_JOB_DEP_INVALID = 4;
+
+    /**
+     * < The migrating job is waiting to be rescheduled
+     */
+    public static final int PEND_JOB_MIG = 5;
+
+    /**
+     * < The job's pre-exec command exited with non-zero status
+     */
+    public static final int PEND_JOB_PRE_EXEC = 6;
+
+    /**
+     * < Unable to access jobfile
+     */
+    public static final int PEND_JOB_NO_FILE = 7;
+
+    /**
+     * < Unable to set job's environment variables
+     */
+    public static final int PEND_JOB_ENV = 8;
+
+    /**
+     * < Unable to determine the job's home or working directories
+     */
+    public static final int PEND_JOB_PATHS = 9;
+
+    /**
+     * < Unable to open the job's input and output files
+     */
+    public static final int PEND_JOB_OPEN_FILES = 10;
+
+    /**
+     * < Job execution initialization failed
+     */
+    public static final int PEND_JOB_EXEC_INIT = 11;
+
+    /**
+     * < Unable to copy restarting job's checkpoint files
+     */
+    public static final int PEND_JOB_RESTART_FILE = 12;
+
+    /**
+     * < Scheduling of the job is delayed
+     */
+    public static final int PEND_JOB_DELAY_SCHED = 13;
+
+    /**
+     * < Waiting for the re-scheduling of the job after switching queues
+     */
+    public static final int PEND_JOB_SWITCH = 14;
+
+    /**
+     * < An event is rejected by eeventd due to a syntax error
+     */
+    public static final int PEND_JOB_DEP_REJECT = 15;
+
+    /**
+     * < A JobScheduler feature is not enabled
+     */
+    public static final int PEND_JOB_JS_DISABLED = 16;
+
+    /**
+     * < Failed to get user password
+     */
+    public static final int PEND_JOB_NO_PASSWD = 17;
+
+    /**
+     * < The job is pending due to logon failure
+     */
+    public static final int PEND_JOB_LOGON_FAIL = 18;
+
+    /**
+     * < The job is waiting to be re-scheduled after its parameters have been changed
+     */
+    public static final int PEND_JOB_MODIFY = 19;
+
+    /**
+     * < The job time event is invalid
+     */
+    public static final int PEND_JOB_TIME_INVALID = 20;
+
+    /**
+     * < The job time event has expired
+     */
+    public static final int PEND_TIME_EXPIRED = 21;
+
+    /**
+     * < The job has been requeued
+     */
+    public static final int PEND_JOB_REQUEUED = 23;
+
+    /**
+     * < Waiting for the next time event
+     */
+    public static final int PEND_WAIT_NEXT = 24;
+
+    /**
+     * < The parent group is held
+     */
+    public static final int PEND_JGRP_HOLD = 25;
+
+    /**
+     * < The parent group is inactive
+     */
+    public static final int PEND_JGRP_INACT = 26;
+
+    /**
+     * < The group is waiting for scheduling
+     */
+    public static final int PEND_JGRP_WAIT = 27;
+
+    /**
+     * < The remote cluster(s) are unreachable
+     */
+    public static final int PEND_JOB_RCLUS_UNREACH = 28;
+
+    /**
+     * < SNDJOBS_TO queue rejected by remote  clusters
+     */
+    public static final int PEND_JOB_QUE_REJECT = 29;
+
+    /**
+     * < Waiting for new remote scheduling  session
+     */
+    public static final int PEND_JOB_RSCHED_START = 30;
+
+    /**
+     * < Waiting for allocation reply from remote clusters
+     */
+    public static final int PEND_JOB_RSCHED_ALLOC = 31;
+
+    /**
+     * < The job is forwarded to a remote cluster
+     */
+    public static final int PEND_JOB_FORWARDED = 32;
+
+    /**
+     * < The job running remotely is in a zombie state
+     */
+    public static final int PEND_JOB_RMT_ZOMBIE = 33;
+
+    /**
+     * < Job's enforced user group share account not selected
+     */
+    public static final int PEND_JOB_ENFUGRP = 34;
+
+    /**
+     * < The system is unable to schedule the job
+     */
+    public static final int PEND_SYS_UNABLE = 35;
+
+    /**
+     * < The parent group has just been released
+     */
+    public static final int PEND_JGRP_RELEASE = 36;
+
+    /**
+     * < The job has run since group active
+     */
+    public static final int PEND_HAS_RUN = 37;
+
+    /**
+     * < The job has reached its running element limit
+     */
+    public static final int PEND_JOB_ARRAY_JLIMIT = 38;
+
+    /**
+     * < Checkpoint directory is invalid
+     */
+    public static final int PEND_CHKPNT_DIR = 39;
+
+    /**
+     * < The first job in the chunk failed  (all other jobs in the chunk are set to PEND)
+     */
+    public static final int PEND_CHUNK_FAIL = 40;
+
+    /**
+     * < Optimum number of running jobs for SLA has been reached
+     */
+    public static final int PEND_JOB_SLA_MET = 41;
+
+    /**
+     * < Specified application profile does not exist
+     */
+    public static final int PEND_JOB_APP_NOEXIST = 42;
+
+    /**
+     * < Job no longer satisfies application  PROCLIMIT configuration
+     */
+    public static final int PEND_APP_PROCLIMIT = 43;
+
+    /**
+     * < No hosts for the job from EGO
+     */
+    public static final int PEND_EGO_NO_HOSTS = 44;
+
+    /**
+     * < The specified job group has reached its job limit
+     */
+    public static final int PEND_JGRP_JLIMIT = 45;
+
+    /**
+     * < Job pre-exec retry limit
+     */
+    public static final int PEND_PREEXEC_LIMIT = 46;
+
+    /**
+     * < Job re-queue limit
+     */
+    public static final int PEND_REQUEUE_LIMIT = 47;
+
+    /**
+     * < Job has bad res req
+     */
+    public static final int PEND_BAD_RESREQ = 48;
+
+    /**
+     * < Job's reservation is inactive
+     */
+    public static final int PEND_RSV_INACTIVE = 49;
+
+    /**
+     * < Job was in PSUSP with bad res req, after successful bmod  waiting for the user to bresume
+     */
+    public static final int PEND_WAITING_RESUME = 50;
+
+    /**
+     * < Job slot request cannot satisfy compound  resource requirement
+     */
+    public static final int PEND_SLOT_COMPOUND = 51;
+
+/*
+*  Queue and System Related Reasons (301 - 599)
+ */
+
+    /**
+     * < The queue is inactivated by the administrator
+     */
+    public static final int PEND_QUE_INACT = 301;
+
+    /**
+     * < The queue is inactivated by its time windows
+     */
+    public static final int PEND_QUE_WINDOW = 302;
+
+    /**
+     * < The queue has reached its job slot limit
+     */
+    public static final int PEND_QUE_JOB_LIMIT = 303;
+
+    /**
+     * < The user has reached the per-user job slot limit of the queue
+     */
+    public static final int PEND_QUE_USR_JLIMIT = 304;
+
+    /**
+     * < Not enough per-user job slots of the queue for the parallel job
+     */
+    public static final int PEND_QUE_USR_PJLIMIT = 305;
+
+    /**
+     * < The queue's pre-exec command exited with non-zero status
+     */
+    public static final int PEND_QUE_PRE_FAIL = 306;
+
+    /**
+     * < The job was not accepted by the NQS host,  Attempt again later
+     */
+    public static final int PEND_NQS_RETRY = 307;
+
+    /**
+     * < Unable to send the job to an NQS host
+     */
+    public static final int PEND_NQS_REASONS = 308;
+
+    /**
+     * < Unable to contact NQS host
+     */
+    public static final int PEND_NQS_FUN_OFF = 309;
+
+    /**
+     * < The system is not ready for scheduling after reconfiguration
+     */
+    public static final int PEND_SYS_NOT_READY = 310;
+
+    /**
+     * < The requeued job is waiting for rescheduling
+     */
+    public static final int PEND_SBD_JOB_REQUEUE = 311;
+
+    /**
+     * < Not enough hosts to meet the job's spanning requirement
+     */
+    public static final int PEND_JOB_SPREAD_TASK = 312;
+
+    /**
+     * < Not enough hosts to meet the queue's spanning requirement
+     */
+    public static final int PEND_QUE_SPREAD_TASK = 313;
+
+    /**
+     * < The queue has not enough job slots for the parallel job
+     */
+    public static final int PEND_QUE_PJOB_LIMIT = 314;
+
+    /**
+     * < The job will not finish before queue's run window is closed
+     */
+    public static final int PEND_QUE_WINDOW_WILL_CLOSE = 315;
+
+    /**
+     * < Job no longer satisfies queue  PROCLIMIT configuration
+     */
+    public static final int PEND_QUE_PROCLIMIT = 316;
+
+    /**
+     * < Job requeued due to plug-in failure
+     */
+    public static final int PEND_SBD_PLUGIN = 317;
+
+    /**
+     * < Waiting for lease signing
+     */
+    public static final int PEND_WAIT_SIGN_LEASE = 318;
+
+/* waitint for scheduling for SLOT_SHARE*/
+    public static final int PEND_WAIT_SLOT_SHARE = 319;
+
+/*
+*  User Related Reasons (601 - 800)
+ */
+
+    /**
+     * < The job slot limit is reached
+     */
+    public static final int PEND_USER_JOB_LIMIT = 601;
+
+    /**
+     * < A user group has reached its job slot limit
+     */
+    public static final int PEND_UGRP_JOB_LIMIT = 602;
+
+    /**
+     * < The job slot limit for the parallel job is reached
+     */
+    public static final int PEND_USER_PJOB_LIMIT = 603;
+
+    /**
+     * < A user group has reached its job slot limit for the parallel job
+     */
+    public static final int PEND_UGRP_PJOB_LIMIT = 604;
+
+    /**
+     * < Waiting for scheduling after resumed by user
+     */
+    public static final int PEND_USER_RESUME = 605;
+
+    /**
+     * < The job was suspended by the user while pending
+     */
+    public static final int PEND_USER_STOP = 607;
+
+    /**
+     * < Unable to determine user account for execution
+     */
+    public static final int PEND_NO_MAPPING = 608;
+
+    /**
+     * < The user has no permission to run the job on remote host/cluster
+     */
+    public static final int PEND_RMT_PERMISSION = 609;
+
+    /**
+     * < The job was suspended by LSF admin or root while pending
+     */
+    public static final int PEND_ADMIN_STOP = 610;
+
+    /**
+     * < The requested label is not valid
+     */
+    public static final int PEND_MLS_INVALID = 611;
+
+    /**
+     * < The requested label is above user allowed range
+     */
+    public static final int PEND_MLS_CLEARANCE = 612;
+
+    /**
+     * < The requested label rejected by /etc/rhost.conf
+     */
+    public static final int PEND_MLS_RHOST = 613;
+
+    /**
+     * < The requested label does not dominate current label
+     */
+    public static final int PEND_MLS_DOMINATE = 614;
+
+    /**
+     * < The requested label problem
+     */
+    public static final int PEND_MLS_FATAL = 615;
+
+    /**
+     * < LSF internally bstoped a pending job
+     */
+    public static final int PEND_INTERNAL_STOP = 616;
+
+/*
+*  NON-EXCLUSIVE PENDING REASONS
+*  A job may still start even though non-exclusive reasons exist.
+ */
+
+/*
+*  Host(sbatchd)-Job Related Reasons (1001 - 1300)
+ */
+
+    /**
+     * < The job's resource requirements not satisfied
+     */
+    public static final int PEND_HOST_RES_REQ = 1001;
+
+    /**
+     * < The job's requirement for exclusive execution not satisfied
+     */
+    public static final int PEND_HOST_NONEXCLUSIVE = 1002;
+
+    /**
+     * < Higher or equal priority jobs already suspended by system
+     */
+    public static final int PEND_HOST_JOB_SSUSP = 1003;
+
+    /**
+     * < The job failed to compete with other jobs on host partition
+     */
+    public static final int PEND_HOST_PART_PRIO = 1004;
+
+    /**
+     * < Unable to get the PID of the restarting job
+     */
+    public static final int PEND_SBD_GETPID = 1005;
+
+    /**
+     * < Unable to lock the host for exclusively executing the job
+     */
+    public static final int PEND_SBD_LOCK = 1006;
+
+    /**
+     * < Cleaning up zombie job
+     */
+    public static final int PEND_SBD_ZOMBIE = 1007;
+
+    /**
+     * < Can't run jobs submitted by root.  The job is rejected by the sbatchd
+     */
+    public static final int PEND_SBD_ROOT = 1008;
+
+    /**
+     * < Job can't finish on the host before queue's run window is closed
+     */
+    public static final int PEND_HOST_WIN_WILL_CLOSE = 1009;
+
+    /**
+     * < Job can't finish on the host before job's termination deadline
+     */
+    public static final int PEND_HOST_MISS_DEADLINE = 1010;
+
+    /**
+     * < The specified first execution host is  not eligible for this job at this time
+     */
+    public static final int PEND_FIRST_HOST_INELIGIBLE = 1011;
+
+    /**
+     * < Exclusive job reserves slots on host
+     */
+    public static final int PEND_HOST_EXCLUSIVE_RESERVE = 1012;
+
+    /**
+     * < Resized shadow job  or non-first resReq of a compound resReq job try to reuse the first execution host
+     */
+    public static final int PEND_FIRST_HOST_REUSE = 1013;
+/*
+*  Host Related Reasons (1301 - 1600)
+ */
+
+    /**
+     * < The host is closed by the LSF administrator
+     */
+    public static final int PEND_HOST_DISABLED = 1301;
+
+    /**
+     * < The host is locked by the LSF administrator
+     */
+    public static final int PEND_HOST_LOCKED = 1302;
+
+    /**
+     * < Not enough job slots for the parallel job
+     */
+    public static final int PEND_HOST_LESS_SLOTS = 1303;
+
+    /**
+     * < Dispatch windows are closed
+     */
+    public static final int PEND_HOST_WINDOW = 1304;
+
+    /**
+     * < The job slot limit reached
+     */
+    public static final int PEND_HOST_JOB_LIMIT = 1305;
+
+    /**
+     * < The queue's per-CPU job slot limit is reached
+     */
+    public static final int PEND_QUE_PROC_JLIMIT = 1306;
+
+    /**
+     * < The queue's per-host job slot limit is reached
+     */
+    public static final int PEND_QUE_HOST_JLIMIT = 1307;
+
+    /**
+     * < The user's per-CPU job slot limit is reached
+     */
+    public static final int PEND_USER_PROC_JLIMIT = 1308;
+
+    /**
+     * < The host's per-user job slot limit is reached
+     */
+    public static final int PEND_HOST_USR_JLIMIT = 1309;
+
+    /**
+     * < Not a member of the queue
+     */
+    public static final int PEND_HOST_QUE_MEMB = 1310;
+
+    /**
+     * < Not a user-specified host
+     */
+    public static final int PEND_HOST_USR_SPEC = 1311;
+
+    /**
+     * < The user has no access to the host partition
+     */
+    public static final int PEND_HOST_PART_USER = 1312;
+
+    /**
+     * < There is no such user account
+     */
+    public static final int PEND_HOST_NO_USER = 1313;
+
+    /**
+     * < Just started a job recently
+     */
+    public static final int PEND_HOST_ACCPT_ONE = 1314;
+
+    /**
+     * < Load info unavailable
+     */
+    public static final int PEND_LOAD_UNAVAIL = 1315;
+
+    /**
+     * < The LIM is unreachable by the sbatchd
+     */
+    public static final int PEND_HOST_NO_LIM = 1316;
+
+    /**
+     * < The host does not have a valid LSF software license
+     */
+    public static final int PEND_HOST_UNLICENSED = 1317;
+
+    /**
+     * < The queue's resource requirements are not satisfied
+     */
+    public static final int PEND_HOST_QUE_RESREQ = 1318;
+
+    /**
+     * < The submission host type is not the same
+     */
+    public static final int PEND_HOST_SCHED_TYPE = 1319;
+
+    /**
+     * < There are not enough processors to meet the job's spanning requirement.  The job level locality is unsatisfied.
+     */
+    public static final int PEND_JOB_NO_SPAN = 1320;
+
+    /**
+     * < There are not enough processors to meet the queue's spanning requirement.  The queue level locality is unsatisfied.
+     */
+    public static final int PEND_QUE_NO_SPAN = 1321;
+
+    /**
+     * < An exclusive job is running
+     */
+    public static final int PEND_HOST_EXCLUSIVE = 1322;
+
+    /**
+     * < Job Scheduler is disabled on the host.  It is not licensed to accept repetitive jobs.
+     */
+    public static final int PEND_HOST_JS_DISABLED = 1323;
+
+    /**
+     * < The user group's per-CPU job slot limit is reached
+     */
+    public static final int PEND_UGRP_PROC_JLIMIT = 1324;
+
+    /**
+     * < Incorrect host, group or cluster name
+     */
+    public static final int PEND_BAD_HOST = 1325;
+
+    /**
+     * < Host is not used by the queue
+     */
+    public static final int PEND_QUEUE_HOST = 1326;
+
+    /**
+     * < Host is locked by master LIM
+     */
+    public static final int PEND_HOST_LOCKED_MASTER = 1327;
+
+    /**
+     * < Not enough reserved job slots at this time for specified reservation ID
+     */
+    public static final int PEND_HOST_LESS_RSVSLOTS = 1328;
+
+    /**
+     * < Not enough slots or resources for whole duration of the job
+     */
+    public static final int PEND_HOST_LESS_DURATION = 1329;
+
+    /**
+     * < Specified reservation has expired or has been deleted
+     */
+    public static final int PEND_HOST_NO_RSVID = 1330;
+
+    /**
+     * < The host is closed due to lease is inactive
+     */
+    public static final int PEND_HOST_LEASE_INACTIVE = 1331;
+
+    /**
+     * < Not enough job slot(s) while advance reservation is active
+     */
+    public static final int PEND_HOST_ADRSV_ACTIVE = 1332;
+
+    /**
+     * < This queue is not configured to send jobs to the cluster specified in the advance
+     */
+    public static final int PEND_QUE_RSVID_NOMATCH = 1333;
+
+    /**
+     * < Individual host based reasons
+     */
+    public static final int PEND_HOST_GENERAL = 1334;
+
+    /**
+     * < Host does not belong to the specified  advance reservation
+     */
+    public static final int PEND_HOST_RSV = 1335;
+
+    /**
+     * < Host does not belong to a compute unit  of the required type
+     */
+    public static final int PEND_HOST_NOT_CU = 1336;
+
+    /**
+     * < A compute unit containing the host is  used exclusively
+     */
+    public static final int PEND_HOST_CU_EXCL = 1337;
+
+    /**
+     * < CU-level excl. job cannot start since CU  is occupied
+     */
+    public static final int PEND_HOST_CU_OCCUPIED = 1338;
+
+    /**
+     * < Insufficiently many usable slots on the  host's compute unit
+     */
+    public static final int PEND_HOST_USABLE_CU = 1339;
+
+    /**
+     * < No first execution compute unit satisfies CU 'usablepercu' requirement.
+     */
+    public static final int PEND_JOB_FIRST_CU = 1340;
+
+    /**
+     * < A CU containing the host is reserved  exclusively
+     */
+    public static final int PEND_HOST_CU_EXCL_RSV = 1341;
+
+    /**
+     * < Maxcus cannot be satisfied
+     */
+    public static final int PEND_JOB_CU_MAXCUS = 1342;
+
+    /**
+     * < Balance cannot be satisfied
+     */
+    public static final int PEND_JOB_CU_BALANCE = 1343;
+
+    /**
+     * < Cu not supported on toplib integration hosts
+     */
+    public static final int PEND_CU_TOPLIB_HOST = 1344;
+
+/*
+*  sbatchd Related Reasons (1601 - 1900)
+ */
+
+    /**
+     * < Cannot reach sbatchd
+     */
+    public static final int PEND_SBD_UNREACH = 1601;
+
+    /**
+     * < Number of jobs exceed quota
+     */
+    public static final int PEND_SBD_JOB_QUOTA = 1602;
+
+    /**
+     * < The job failed in talking to the server to start the job
+     */
+    public static final int PEND_JOB_START_FAIL = 1603;
+
+    /**
+     * < Failed in receiving the reply from the server when starting the job
+     */
+    public static final int PEND_JOB_START_UNKNWN = 1604;
+
+    /**
+     * < Unable to allocate memory to run job.  There is no memory on the sbatchd.
+     */
+    public static final int PEND_SBD_NO_MEM = 1605;
+
+    /**
+     * < Unable to fork process to run the job.  There are no more processes on the sbatchd.
+     */
+    public static final int PEND_SBD_NO_PROCESS = 1606;
+
+    /**
+     * < Unable to communicate with the job process
+     */
+    public static final int PEND_SBD_SOCKETPAIR = 1607;
+
+    /**
+     * < The slave batch server failed to accept the job
+     */
+    public static final int PEND_SBD_JOB_ACCEPT = 1608;
+
+    /**
+     * < Lease job remote dispatch failed
+     */
+    public static final int PEND_LEASE_JOB_REMOTE_DISPATCH = 1609;
+
+    /**
+     * < Failed to restart job from last checkpoint
+     */
+    public static final int PEND_JOB_RESTART_FAIL = 1610;
+/*
+*  Load Related Reasons (2001 - 2300)
+ */
+
+    /**
+     * < The load threshold is reached
+     */
+    public static final int PEND_HOST_LOAD = 2001;
+
+/*
+*  Queue Resource Reservation Related Reasons (2301 - 2600)
+ */
+
+    /**
+     * < The queue's requirements for resource  reservation are not satisfied.
+     */
+    public static final int PEND_HOST_QUE_RUSAGE = 2301;
+
+/*
+*  Jobs Resource Reservation Related Reasons (2601 - 2900)
+ */
+
+    /**
+     * < The job's requirements for resource  reservation are not satisfied.
+     */
+    public static final int PEND_HOST_JOB_RUSAGE = 2601;
+
+/*
+*  Remote Forwarding Related Reasons (2901 - 3200)
+ */
+
+    /**
+     * < Remote job not recongized by remote cluster, waiting for rescheduling
+     */
+    public static final int PEND_RMT_JOB_FORGOTTEN = 2901;
+
+    /**
+     * < Remote import limit reached, waiting  for rescheduling
+     */
+    public static final int PEND_RMT_IMPT_JOBBKLG = 2902;
+
+    /**
+     * < Remote schedule time reached,  waiting for rescheduling
+     */
+    public static final int PEND_RMT_MAX_RSCHED_TIME = 2903;
+
+    /**
+     * < Remote pre-exec retry limit reached, waiting for rescheduling
+     */
+    public static final int PEND_RMT_MAX_PREEXEC_RETRY = 2904;
+
+    /**
+     * < Remote queue is closed
+     */
+    public static final int PEND_RMT_QUEUE_CLOSED = 2905;
+
+    /**
+     * < Remote queue is inactive
+     */
+    public static final int PEND_RMT_QUEUE_INACTIVE = 2906;
+
+    /**
+     * < Remote queue is congested
+     */
+    public static final int PEND_RMT_QUEUE_CONGESTED = 2907;
+
+    /**
+     * < Remote queue is disconnected
+     */
+    public static final int PEND_RMT_QUEUE_DISCONNECT = 2908;
+
+    /**
+     * < Remote queue is not configured to accept jobs from this cluster
+     */
+    public static final int PEND_RMT_QUEUE_NOPERMISSION = 2909;
+
+    /**
+     * < Job's termination time exceeds the job creation time on remote cluster
+     */
+    public static final int PEND_RMT_BAD_TIME = 2910;
+
+    /**
+     * < Permission denied on the execution cluster
+     */
+    public static final int PEND_RMT_PERMISSIONS = 2911;
+
+    /**
+     * < Job's required on number of processors cannot be satisfied on the remote cluster
+     */
+    public static final int PEND_RMT_PROC_NUM = 2912;
+
+    /**
+     * < User is not defined in the fairshare policy of the remote queue
+     */
+    public static final int PEND_RMT_QUEUE_USE = 2913;
+
+    /**
+     * < Remote queue is a non-interactive queue
+     */
+    public static final int PEND_RMT_NO_INTERACTIVE = 2914;
+
+    /**
+     * < Remote queue is an interactive-only queue
+     */
+    public static final int PEND_RMT_ONLY_INTERACTIVE = 2915;
+
+    /**
+     * < Job's required maximum number of  processors is less then the minimum number
+     */
+    public static final int PEND_RMT_PROC_LESS = 2916;
+
+    /**
+     * < Job's required resource limit exceeds that of the remote queue
+     */
+    public static final int PEND_RMT_OVER_LIMIT = 2917;
+
+    /**
+     * < Job's resource requirements do not match with those of the remote queue
+     */
+    public static final int PEND_RMT_BAD_RESREQ = 2918;
+
+    /**
+     * < Job failed to be created on the remote cluster
+     */
+    public static final int PEND_RMT_CREATE_JOB = 2919;
+
+    /**
+     * < Job is requeued for rerun on the execution cluster
+     */
+    public static final int PEND_RMT_RERUN = 2920;
+
+    /**
+     * < Job is requeued on the execution cluster due to exit value
+     */
+    public static final int PEND_RMT_EXIT_REQUEUE = 2921;
+
+    /**
+     * < Job was killed and requeued on the execution cluster
+     */
+    public static final int PEND_RMT_REQUEUE = 2922;
+
+    /**
+     * < Job was forwarded to remote cluster
+     */
+    public static final int PEND_RMT_JOB_FORWARDING = 2923;
+
+    /**
+     * < Remote import queue defined for the job in lsb.queues is either not ready or not valid
+     */
+    public static final int PEND_RMT_QUEUE_INVALID = 2924;
+
+    /**
+     * < Remote queue is a non-exclusive queue
+     */
+    public static final int PEND_RMT_QUEUE_NO_EXCLUSIVE = 2925;
+
+    /**
+     * < Job was rejected; submitter does not belong to the specified User Group in the remote cluster or the user group does not exist in the remote cluster
+     */
+    public static final int PEND_RMT_UGROUP_MEMBER = 2926;
+
+    /**
+     * < Remote queue is rerunnable: can not accept interactive jobs
+     */
+    public static final int PEND_RMT_INTERACTIVE_RERUN = 2927;
+
+    /**
+     * < Remote cluster failed in talking to server to start the job
+     */
+    public static final int PEND_RMT_JOB_START_FAIL = 2928;
+
+    /**
+     * < Job was rejected; submitter does not belong to the specified User Group in the remote cluster or the user group does not exist in the remote cluster
+     */
+    public static final int PEND_RMT_FORWARD_FAIL_UGROUP_MEMBER = 2930;
+
+    /**
+     * < Specified remote reservation has expired or has been deleted
+     */
+    public static final int PEND_RMT_HOST_NO_RSVID = 2931;
+
+    /**
+     * < Application profile could not be found in the remote cluster.
+     */
+    public static final int PEND_RMT_APP_NULL = 2932;
+
+    /**
+     * < Job's required RUNLIMIT exceeds  RUNTIME*  JOB_RUNLIMIT_RATIO of the remote cluster.
+     */
+    public static final int PEND_RMT_BAD_RUNLIMIT = 2933;
+
+    /**
+     * < Job's required RUNTIME exceeds the hard runtime limit in the remote queue.
+     */
+    public static final int PEND_RMT_OVER_QUEUE_LIMIT = 2934;
+
+    /**
+     * < Job will be pend when no slots available among remote queues.
+     */
+    public static final int PEND_RMT_WHEN_NO_SLOTS = 2935;
+/* SUSPENDING REASONS */
+
+/*
+*  General Resource Limits Related Reasons ( 3201 - 4800)
+ */
+
+    /**
+     * < Resource limit defined on user  or user group has been reached.
+     */
+    public static final int PEND_GENERAL_LIMIT_USER = 3201;
+
+    /**
+     * < Resource (%s) limit defined on queue has been reached.
+     */
+    public static final int PEND_GENERAL_LIMIT_QUEUE = 3501;
+
+    /**
+     * < Resource limit defined on project has been reached.
+     */
+    public static final int PEND_GENERAL_LIMIT_PROJECT = 3801;
+
+    /**
+     * < Resource (%s) limit defined cluster wide has been reached.
+     */
+    public static final int PEND_GENERAL_LIMIT_CLUSTER = 4101;
+
+    /**
+     * < Resource (%s) limit defined on host and/or host group has  been reached.
+     */
+    public static final int PEND_GENERAL_LIMIT_HOST = 4401;
+
+    /**
+     * < JOBS limit defined for the user or user group has been reached.
+     */
+    public static final int PEND_GENERAL_LIMIT_JOBS_USER = 4701;
+
+    /**
+     * < JOBS limit defined for the queue has been reached.
+     */
+    public static final int PEND_GENERAL_LIMIT_JOBS_QUEUE = 4702;
+
+    /**
+     * < JOBS limit defined for the project has been reached.
+     */
+    public static final int PEND_GENERAL_LIMIT_JOBS_PROJECT = 4703;
+
+    /**
+     * < JOBS limit defined cluster-wide has been reached.
+     */
+    public static final int PEND_GENERAL_LIMIT_JOBS_CLUSTER = 4704;
+
+    /**
+     * < JOBS limit defined on host or host group has been reached.
+     */
+    public static final int PEND_GENERAL_LIMIT_JOBS_HOST = 4705;
+
+/* LSF2 Presto RLA-related reasons    (4900 - 4989) */
+
+    /**
+     * < RMS scheduler plugin  internal error.
+     */
+    public static final int PEND_RMS_PLUGIN_INTERNAL = 4900;
+
+    /**
+     * < RLA communication failure.
+     */
+    public static final int PEND_RMS_PLUGIN_RLA_COMM = 4901;
+
+    /**
+     * < RMS is not available.
+     */
+    public static final int PEND_RMS_NOT_AVAILABLE = 4902;
+
+    /**
+     * < Cannot satisfy the topology  requirement.
+     */
+    public static final int PEND_RMS_FAIL_TOPOLOGY = 4903;
+
+    /**
+     * < Cannot allocate an RMS resource.
+     */
+    public static final int PEND_RMS_FAIL_ALLOC = 4904;
+
+    /**
+     * < RMS job with special topology requirements cannot be preemptive or backfill job.
+     */
+    public static final int PEND_RMS_SPECIAL_NO_PREEMPT_BACKFILL = 4905;
+
+    /**
+     * < RMS job with special topology requirements cannot reserve slots.
+     */
+    public static final int PEND_RMS_SPECIAL_NO_RESERVE = 4906;
+
+    /**
+     * < RLA internal error.
+     */
+    public static final int PEND_RMS_RLA_INTERNAL = 4907;
+
+    /**
+     * < Not enough slots for job.  Job with RMS topology requirements cannot reserve slots, be preemptive, or be a backfill job.
+     */
+    public static final int PEND_RMS_NO_SLOTS_SPECIAL = 4908;
+
+    /**
+     * < User account does not exist on the execution host.
+     */
+    public static final int PEND_RMS_RLA_NO_SUCH_USER = 4909;
+
+    /**
+     * < Unknown host and/or partition unavailable.
+     */
+    public static final int PEND_RMS_RLA_NO_SUCH_HOST = 4910;
+
+    /**
+     * < Cannot schedule chunk jobs to RMS hosts.
+     */
+    public static final int PEND_RMS_CHUNKJOB = 4911;
+
+    /**
+     * < RLA protocol mismatch.
+     */
+    public static final int PEND_RLA_PROTOMISMATCH = 4912;
+
+    /**
+     * < Contradictory topology requirements specified.
+     */
+    public static final int PEND_RMS_BAD_TOPOLOGY = 4913;
+
+    /**
+     * < Not enough slots to satisfy manditory contiguous requirement.
+     */
+    public static final int PEND_RMS_RESREQ_MCONT = 4914;
+
+    /**
+     * < Not enough slots to satisfy RMS ptile requirement.
+     */
+    public static final int PEND_RMS_RESREQ_PTILE = 4915;
+
+    /**
+     * < Not enough slots to satisfy RMS nodes requirement.
+     */
+    public static final int PEND_RMS_RESREQ_NODES = 4916;
+
+    /**
+     * < Cannot satisfy RMS base node requirement.
+     */
+    public static final int PEND_RMS_RESREQ_BASE = 4917;
+
+    /**
+     * < Cannot satisfy RMS rails requirement.
+     */
+    public static final int PEND_RMS_RESREQ_RAILS = 4918;
+
+    /**
+     * < Cannot satisfy RMS railmask requirement.
+     */
+    public static final int PEND_RMS_RESREQ_RAILMASK = 4919;
+
+
+/*
+*  Maui Integration Related Reasons ( 5000 - 5100)
+ */
+
+    /**
+     * < Unable to communicate with external Maui scheduler.
+     */
+    public static final int PEND_MAUI_UNREACH = 5000;
+
+    /**
+     * < Job is pending at external Maui scheduler.
+     */
+    public static final int PEND_MAUI_FORWARD = 5001;
+
+    /**
+     * < External Maui scheduler sets detail reason.
+     */
+    public static final int PEND_MAUI_REASON = 5030;
+
+/*
+*  SGI CPUSET Integration Related Reasons ( 5200 - 5299)
+ */
+
+    /**
+     * < CPUSET attach failed.  Job requeued
+     */
+    public static final int PEND_CPUSET_ATTACH = 5200;
+
+    /**
+     * < Not a cpuset host
+     */
+    public static final int PEND_CPUSET_NOT_CPUSETHOST = 5201;
+
+    /**
+     * < Topd initialization failed
+     */
+    public static final int PEND_CPUSET_TOPD_INIT = 5202;
+
+    /**
+     * < Topd communication timeout
+     */
+    public static final int PEND_CPUSET_TOPD_TIME_OUT = 5203;
+
+    /**
+     * < Cannot satisfy the cpuset  allocation requirement
+     */
+    public static final int PEND_CPUSET_TOPD_FAIL_ALLOC = 5204;
+
+    /**
+     * < Bad cpuset allocation request
+     */
+    public static final int PEND_CPUSET_TOPD_BAD_REQUEST = 5205;
+
+    /**
+     * < Topd internal error
+     */
+    public static final int PEND_CPUSET_TOPD_INTERNAL = 5206;
+
+    /**
+     * < Cpuset system API failure
+     */
+    public static final int PEND_CPUSET_TOPD_SYSAPI_ERR = 5207;
+
+    /**
+     * < Specified static cpuset does  not exist on the host
+     */
+    public static final int PEND_CPUSET_TOPD_NOSUCH_NAME = 5208;
+
+    /**
+     * < Cpuset is already allocated   for this job
+     */
+    public static final int PEND_CPUSET_TOPD_JOB_EXIST = 5209;
+
+    /**
+     * < Topd malloc failure
+     */
+    public static final int PEND_CPUSET_TOPD_NO_MEMORY = 5210;
+
+    /**
+     * < User account does not exist   on the cpuset host
+     */
+    public static final int PEND_CPUSET_TOPD_INVALID_USER = 5211;
+
+    /**
+     * < User does not have permission   to run job within cpuset
+     */
+    public static final int PEND_CPUSET_TOPD_PERM_DENY = 5212;
+
+    /**
+     * < Topd is not available
+     */
+    public static final int PEND_CPUSET_TOPD_UNREACH = 5213;
+
+    /**
+     * < Topd communication failure
+     */
+    public static final int PEND_CPUSET_TOPD_COMM_ERR = 5214;
+
+
+    /**
+     * < CPUSET scheduler plugin internal error
+     */
+    public static final int PEND_CPUSET_PLUGIN_INTERNAL = 5215;
+
+    /**
+     * < Cannot schedule chunk jobs to cpuset hosts
+     */
+    public static final int PEND_CPUSET_CHUNKJOB = 5216;
+
+    /**
+     * < Can't satisfy CPU_LIST   requirement
+     */
+    public static final int PEND_CPUSET_CPULIST = 5217;
+
+    /**
+     * < Cannot satisfy CPUSET MAX_RADIUS requirement
+     */
+    public static final int PEND_CPUSET_MAXRADIUS = 5218;
+
+/* Bproc integration related reasons (5300 - 5320)
+ */
+
+    /**
+     * < Node allocation failed
+     */
+    public static final int PEND_NODE_ALLOC_FAIL = 5300;
+
+/* Eagle pending reasons  (5400 - 5449) */
+
+    /**
+     * < RMS resource is not available
+     */
+    public static final int PEND_RMSRID_UNAVAIL = 5400;
+
+
+    /**
+     * < Not enough free cpus to satisfy job requirements
+     */
+    public static final int PEND_NO_FREE_CPUS = 5450;
+
+    /**
+     * < Topology unknown or recently changed
+     */
+    public static final int PEND_TOPOLOGY_UNKNOWN = 5451;
+
+    /**
+     * < Contradictory topology requirement specified
+     */
+    public static final int PEND_BAD_TOPOLOGY = 5452;
+
+    /**
+     * < RLA communications failure
+     */
+    public static final int PEND_RLA_COMM = 5453;
+
+    /**
+     * < User account does not exist on execution host
+     */
+    public static final int PEND_RLA_NO_SUCH_USER = 5454;
+
+    /**
+     * < RLA internal error
+     */
+    public static final int PEND_RLA_INTERNAL = 5455;
+
+    /**
+     * < Unknown host and/or partition unavailable
+     */
+    public static final int PEND_RLA_NO_SUCH_HOST = 5456;
+
+    /**
+     * < Too few slots for specified topology requirement
+     */
+    public static final int PEND_RESREQ_TOOFEWSLOTS = 5457;
+
+/* PSET pending reasons (5500 - 5549) */
+
+    /**
+     * < PSET scheduler plugin internal error
+     */
+    public static final int PEND_PSET_PLUGIN_INTERNAL = 5500;
+
+    /**
+     * < Cannot satisfy PSET ptile requirement
+     */
+    public static final int PEND_PSET_RESREQ_PTILE = 5501;
+
+    /**
+     * < Cannot satisfy PSET cells requirement
+     */
+    public static final int PEND_PSET_RESREQ_CELLS = 5502;
+
+    /**
+     * < Cannot schedule chunk jobs to PSET hosts
+     */
+    public static final int PEND_PSET_CHUNKJOB = 5503;
+
+    /**
+     * < Host does not support processor set functionality
+     */
+    public static final int PEND_PSET_NOTSUPPORT = 5504;
+
+    /**
+     * < PSET bind failed. Job requeued
+     */
+    public static final int PEND_PSET_BIND_FAIL = 5505;
+
+    /**
+     * < Cannot satisfy PSET CELL_LIST  requirement
+     */
+    public static final int PEND_PSET_RESREQ_CELLLIST = 5506;
+
+
+/* SLURM pending reasons (5550 - 5599) */
+
+    /**
+     * < SLURM scheduler plugin internal error
+     */
+    public static final int PEND_SLURM_PLUGIN_INTERNAL = 5550;
+
+    /**
+     * < Not enough resource to satisfy  SLURM nodes requirment
+     */
+    public static final int PEND_SLURM_RESREQ_NODES = 5551;
+
+    /**
+     * < Not enough resource to satisfy  SLURM node attributes requirment.
+     */
+    public static final int PEND_SLURM_RESREQ_NODE_ATTR = 5552;
+
+    /**
+     * < Not enough resource to satisfy SLURM exclude requirment.
+     */
+    public static final int PEND_SLURM_RESREQ_EXCLUDE = 5553;
+
+    /**
+     * < Not enough resource to satisfy SLURM nodelist requirment.
+     */
+    public static final int PEND_SLURM_RESREQ_NODELIST = 5554;
+
+    /**
+     * < Not enough resource to satisfy SLURM contiguous requirment.
+     */
+    public static final int PEND_SLURM_RESREQ_CONTIGUOUS = 5555;
+
+    /**
+     * < SLURM allocation is not available. Job requeued.
+     */
+    public static final int PEND_SLURM_ALLOC_UNAVAIL = 5556;
+
+    /**
+     * < Invalid grammar in SLURM constraints option, job will never run.
+     */
+    public static final int PEND_SLURM_RESREQ_BAD_CONSTRAINT = 5557;
+
+/* Cray X1 pending reasons (5600 - 5649) */
+
+    /**
+     * < Not enough SSPs for job
+     */
+    public static final int PEND_CRAYX1_SSP = 5600;
+
+    /**
+     * < Not enough MSPs for job
+     */
+    public static final int PEND_CRAYX1_MSP = 5601;
+
+    /**
+     * < Unable to pass limit information to psched.
+     */
+    public static final int PEND_CRAYX1_PASS_LIMIT = 5602;
+
+/* Cray XT3 pending reasons (5650 - 5699) */
+
+    /**
+     * < Unable to create or assign a  partition by CPA
+     */
+    public static final int PEND_CRAYXT3_ASSIGN_FAIL = 5650;
+
+/* BlueGene pending reasons (5700 - 5749) */
+
+    /**
+     * < BG/L: Scheduler plug-in internal error.
+     */
+    public static final int PEND_BLUEGENE_PLUGIN_INTERNAL = 5700;
+
+    /**
+     * < BG/L: Allocation is not available. Job requeued.
+     */
+    public static final int PEND_BLUEGENE_ALLOC_UNAVAIL = 5701;
+
+    /**
+     * < BG/L: No free base partitions available for a full block allocation.
+     */
+    public static final int PEND_BLUEGENE_NOFREEMIDPLANES = 5702;
+
+    /**
+     * < BG/L: No free quarters available for a small block allocation.
+     */
+    public static final int PEND_BLUEGENE_NOFREEQUARTERS = 5703;
+
+    /**
+     * < BG/L: No free node cards available for a small block allocation.
+     */
+    public static final int PEND_BLUEGENE_NOFREENODECARDS = 5704;
+
+/* resize enhancement releated pending reasons */
+
+    /**
+     * < First execution host unavailable
+     */
+    public static final int PEND_RESIZE_FIRSTHOSTUNAVAIL = 5705;
+
+    /**
+     * < Master is not in the RUN state
+     */
+    public static final int PEND_RESIZE_MASTERSUSP = 5706;
+
+    /**
+     * < Host is not same as for master
+     */
+    public static final int PEND_RESIZE_MASTER_SAME = 5707;
+
+    /**
+     * < Host already used by master
+     */
+    public static final int PEND_RESIZE_SPAN_PTILE = 5708;
+
+    /**
+     * < The job can only use first host
+     */
+    public static final int PEND_RESIZE_SPAN_HOSTS = 5709;
+
+    /**
+     * < The job cannot get slots on remote hosts
+     */
+    public static final int PEND_RESIZE_LEASE_HOST = 5710;
+
+/* compound resreq related pending reasons (5800 - ??) */
+
+    /**
+     * < The job cannot get slots on  pre-7Update5 remote hosts
+     */
+    public static final int PEND_COMPOUND_RESREQ_OLD_LEASE_HOST = 5800;
+
+    /**
+     * < Hosts using LSF HPC system  integrations do not support compound resource requirements.
+     */
+    public static final int PEND_COMPOUND_RESREQ_TOPLIB_HOST = 5801;
+/* multi-phase resreq related pending reasons (5900 - ??) */
+
+    /**
+     * < The job cannot get slots on  pre-7Update6 remote hosts
+     */
+    public static final int PEND_MULTIPHASE_RESREQ_OLD_LEASE_HOST = 5900;
+
+/* EGO-Enabled SLA pending reasons (5750 - 5799) */
+
+    /**
+     * < Host does not have enough slots for this SLA job.
+     */
+    public static final int PEND_PS_PLUGIN_INTERNAL = 5750;
+
+    /**
+     * < EGO SLA: Failed to synchronize resource with MBD.
+     */
+    public static final int PEND_PS_MBD_SYNC = 5751;
+
+
+/* PLATFORM reserves pending reason number from 1 - 20000.
+*  External plugin is suggested to use platform's reserved pending reason
+*  number. However, they can use pending reason number between 20001 - 25000
+*  as customer specific pending reasons. In this case, bjobs -p will only show
+*  the reason number without detailed message
+ */
+
+    /**
+     * < Customized pending reason number between min and max.
+     */
+    public static final int PEND_CUSTOMER_MIN = 20001;
+
+    /**
+     * < Customized pending reason number between min and max.
+     */
+    public static final int PEND_CUSTOMER_MAX = 25000;
+
+
+    /**
+     * < The maximum number of reasons
+     */
+    public static final int PEND_MAX_REASONS = 25001;
+
+    /**
+     * \addtogroup suspending_reasons  suspending_reasons
+     * suspending_reasons is part of pending_reasons
+     */
+/* SUSPENDING REASONS */
+
+/* User related reasons */
+
+    /**
+     * < Virtual code. Not a reason
+     */
+    public static final int SUSP_USER_REASON = 0x00000000;
+
+    /**
+     * < The job is waiting to be re-scheduled after being resumed by the user.
+     */
+    public static final int SUSP_USER_RESUME = 0x00000001;
+
+    /**
+     * < The user suspended the job.
+     */
+    public static final int SUSP_USER_STOP = 0x00000002;
+
+/* Queue and system related reasons */
+
+    /**
+     * < Virtual code. Not a reason
+     */
+    public static final int SUSP_QUEUE_REASON = 0x00000004;
+
+    /**
+     * < The run window of the queue is closed.
+     */
+    public static final int SUSP_QUEUE_WINDOW = 0x00000008;
+
+    /**
+     * < Suspended after preemption. The system needs to re-allocate CPU utilization by job priority.
+     */
+    public static final int SUSP_RESCHED_PREEMPT = 0x00000010;
+
+    /**
+     * < The LSF administrator has locked the execution host.
+     */
+    public static final int SUSP_HOST_LOCK = 0x00000020;
+
+    /**
+     * < A load index exceeds its threshold. The subreasons field indicates which indices.
+     */
+    public static final int SUSP_LOAD_REASON = 0x00000040;
+
+    /**
+     * < The job was preempted by mbatchd because of a higher priorty job.
+     */
+    public static final int SUSP_MBD_PREEMPT = 0x00000080;
+
+    /**
+     * < Preempted by sbatchd. The job limit of the host/user has been reached.
+     */
+    public static final int SUSP_SBD_PREEMPT = 0x00000100;
+
+    /**
+     * < The suspend conditions of the queue,  as specified by the STOP_COND parameter in lsb.queues, are true.
+     */
+    public static final int SUSP_QUE_STOP_COND = 0x00000200;
+
+    /**
+     * < The resume conditions of the queue, as specified by the RESUME_COND parameter in lsb.queues, are false.
+     */
+    public static final int SUSP_QUE_RESUME_COND = 0x00000400;
+
+    /**
+     * < The job was suspended due to the paging rate and the host is not idle yet.
+     */
+    public static final int SUSP_PG_IT = 0x00000800;
+
+    /**
+     * < Resets the previous reason.
+     */
+    public static final int SUSP_REASON_RESET = 0x00001000;
+
+    /**
+     * < Load information on the execution hosts is unavailable.
+     */
+    public static final int SUSP_LOAD_UNAVAIL = 0x00002000;
+
+    /**
+     * < The job was suspened by root or the LSF administrator.
+     */
+    public static final int SUSP_ADMIN_STOP = 0x00004000;
+
+    /**
+     * < The job is terminated due to resource limit.
+     */
+    public static final int SUSP_RES_RESERVE = 0x00008000;
+
+    /**
+     * < The job is locked by the mbatchd.
+     */
+    public static final int SUSP_MBD_LOCK = 0x00010000;
+
+    /**
+     * < The job's requirements for resource  reservation are not satisfied.
+     */
+    public static final int SUSP_RES_LIMIT = 0x00020000;
+
+    /**
+     * < The job is suspended while the sbatchd is restarting.
+     */
+    public static final int SUSP_SBD_STARTUP = 0x00040000;
+
+    /**
+     * < The execution host is locked by the master LIM.
+     */
+    public static final int SUSP_HOST_LOCK_MASTER = 0x00080000;
+
+    /**
+     * < An advance reservation using the  host is active
+     */
+    public static final int SUSP_HOST_RSVACTIVE = 0x00100000;
+
+    /**
+     * < There is a detailed reason in the subreason field
+     */
+    public static final int SUSP_DETAILED_SUBREASON = 0x00200000;
+    /* GLB suspending reason */
+
+    /**
+     * < The job is preempted by glb
+     */
+    public static final int SUSP_GLB_LICENSE_PREEMPT = 0x00400000;
+
+    /* Cray X1 suspend reasons */
+
+    /**
+     * < Job not placed by Cray X1  psched
+     */
+    public static final int SUSP_CRAYX1_POSTED = 0x00800000;
+
+    /**
+     * < Job suspended when its advance  reservation expired
+     */
+    public static final int SUSP_ADVRSV_EXPIRED = 0x01000000;
+
+    /**
+     * \addtogroup suspending_subreasons  suspending_subreasons
+     * suspending_subreasons has the following options:
+     */
+
+    /**
+     * < Sub reason of SUSP_RES_LIMIT: RUNLIMIT is reached.
+     */
+    public static final int SUB_REASON_RUNLIMIT = 0x00000001;
+
+    /**
+     * < Sub reason of SUSP_RES_LIMIT: DEADLINE is reached.
+     */
+    public static final int SUB_REASON_DEADLINE = 0x00000002;
+
+    /**
+     * < Sub reason of SUSP_RES_LIMIT: PROCESSLIMIT is reached.
+     */
+    public static final int SUB_REASON_PROCESSLIMIT = 0x00000004;
+
+    /**
+     * < Sub reason of SUSP_RES_LIMIT: CPULIMIT is reached.
+     */
+    public static final int SUB_REASON_CPULIMIT = 0x00000008;
+
+    /**
+     * < Sub reason of SUSP_RES_LIMIT: MEMLIMIT is reached.
+     */
+    public static final int SUB_REASON_MEMLIMIT = 0x00000010;
+
+    /**
+     * < Sub reason of SUSP_RES_LIMIT: THREADLIMIT is reached.
+     */
+    public static final int SUB_REASON_THREADLIMIT = 0x00000020;
+
+    /**
+     * < Sub reason of SUSP_RES_LIMIT: SWAPLIMIT is reached.
+     */
+    public static final int SUB_REASON_SWAPLIMIT = 0x00000040;
+
+    /**
+     * < Account ID does not match those allowed by the gate
+     */
+    public static final int SUB_REASON_CRAYX1_ACCOUNTID = 0x00000001;
+
+    /**
+     * < Attribute does not match  those allowed by the gate
+     */
+    public static final int SUB_REASON_CRAYX1_ATTRIBUTE = 0x00000002;
+
+    /**
+     * < Blocked by one or more gates
+     */
+    public static final int SUB_REASON_CRAYX1_BLOCKED = 0x00000004;
+
+    /**
+     * < Application is in the process of being restarted  and it is under the control  of CPR
+     */
+    public static final int SUB_REASON_CRAYX1_RESTART = 0x00000008;
+
+    /**
+     * < Depth does not match those  allowed by the gate
+     */
+    public static final int SUB_REASON_CRAYX1_DEPTH = 0x00000010;
+
+    /**
+     * < GID does not match those  allowed by the gate
+     */
+    public static final int SUB_REASON_CRAYX1_GID = 0x00000020;
+
+    /**
+     * < No GASID is available
+     */
+    public static final int SUB_REASON_CRAYX1_GASID = 0x00000040;
+
+    /**
+     * < Hard label does not match  those allowed by the gate
+     */
+    public static final int SUB_REASON_CRAYX1_HARDLABEL = 0x00000080;
+
+    /**
+     * < Limit exceeded in regions   or domains
+     */
+    public static final int SUB_REASON_CRAYX1_LIMIT = 0x00000100;
+
+    /**
+     * < Memory size does not match  those allowed by the gate
+     */
+    public static final int SUB_REASON_CRAYX1_MEMORY = 0x00000200;
+
+    /**
+     * < Soft label does not match   those allowed by the gate
+     */
+    public static final int SUB_REASON_CRAYX1_SOFTLABEL = 0x00000400;
+
+    /**
+     * < Size gate (width times  depth larger than gate  allows)
+     */
+    public static final int SUB_REASON_CRAYX1_SIZE = 0x00000800;
+
+    /**
+     * < Time limit does not match those allowed by the gate
+     */
+    public static final int SUB_REASON_CRAYX1_TIME = 0x00001000;
+
+    /**
+     * < UID does not match those  allowed by the gate
+     */
+    public static final int SUB_REASON_CRAYX1_UID = 0x00002000;
+
+    /**
+     * < Width does not match those allowed by the gate
+     */
+    public static final int SUB_REASON_CRAYX1_WIDTH = 0x00004000;
+/*
+*  EXITING REASONS: currently only to indicate exited due to
+*  1) rerunnable job being restart from last chkpnt;
+*  2) being killed while execution host is unavailable
+ */
+
+    /** Job finished normally */
+    public static final int EXIT_NORMAL = 0x00000000;
+
+    /** Rerunnable job to be restarted */
+    public static final int EXIT_RESTART = 0x00000001;
+
+    /** Job killed while host unavailable */
+    public static final int EXIT_ZOMBIE = 0x00000002;
+
+    /** Job is finished and put into pend list */
+    public static final int FINISH_PEND = 0x00000004;
+
+    /** The job is killed while the execution host is unreach */
+    public static final int EXIT_KILL_ZOMBIE = 0x00000008;
+
+    /** The job in ZOMBIE is removed */
+    public static final int EXIT_ZOMBIE_JOB = 0x00000010;
+
+    /** Rerun a job without creating a ZOMBIE job */
+    public static final int EXIT_RERUN = 0x00000020;
+
+    /** Remote job has no mapping user name here */
+    public static final int EXIT_NO_MAPPING = 0x00000040;
+
+    /** Remote job has no permission running here */
+    public static final int EXIT_REMOTE_PERMISSION = 0x00000080;
+
+    /** Remote job cannot run locally because of environment problem */
+    public static final int EXIT_INIT_ENVIRON = 0x00000100;
+
+    /** Remote job failed in pre_exec command */
+    public static final int EXIT_PRE_EXEC = 0x00000200;
+
+    /** The job is killed and will be later requeued */
+    public static final int EXIT_REQUEUE = 0x00000400;
+
+    /** Job could not be killed but was removed from system */
+    public static final int EXIT_REMOVE = 0x00000800;
+
+    /** Requeue by exit value */
+    public static final int EXIT_VALUE_REQUEUE = 0x00001000;
+
+    /** Cancel request received from remote cluster. */
+    public static final int EXIT_CANCEL = 0x00002000;
+
+    /** MED killed job on web server */
+    public static final int EXIT_MED_KILLED = 0x00004000;
+
+    /** Remote lease job exit on execution, side, return to pend on submission */
+    public static final int EXIT_REMOTE_LEASE_JOB = 0x00008000;
+
+    /** Exit when cwd does not exist*/
+    public static final int EXIT_CWD_NOTEXIST = 0x00010000;
+
+
+    /** Mode indicating running in batch, js, or batch-js mode */
+    public static final int LSB_MODE_BATCH = 0x1;
+    public static final int LSB_MODE_JS = 0x2;
+    public static final int LSB_MODE_BATCH_RD = 0x4;
+
+    public static final int RLIMIT_CPU = 0;
+    public static final int RLIMIT_FSIZE = 1;
+    public static final int RLIMIT_DATA = 2;
+    public static final int RLIMIT_STACK = 3;
+    public static final int RLIMIT_CORE = 4;
+    public static final int RLIMIT_RSS = 5;
+    public static final int RLIM_INFINITY = 0x7fffffff;
+
+/*
+*  Error codes for lsblib calls
+*  Each error code has its corresponding error message defined in lsb.err.c
+*  The code number is just the position number of its message.
+*  Adding a new code here must add its message there in the corresponding
+*  position.  Changing any code number here must change the position there.
+ */
+/* Error codes related to job */
+
+    /** No error at all */
+    public static final int LSBE_NO_ERROR = 0;
+
+    /** No matching job found */
+    public static final int LSBE_NO_JOB = 1;
+
+    /** Job not started yet */
+    public static final int LSBE_NOT_STARTED = 2;
+
+    /** Job already started */
+    public static final int LSBE_JOB_STARTED = 3;
+
+    /** Job already finished */
+    public static final int LSBE_JOB_FINISH = 4;
+
+    /** Ask sbatchd to stop the wrong job */
+    public static final int LSBE_STOP_JOB = 5;
+
+    /** Depend_cond syntax error */
+    public static final int LSBE_DEPEND_SYNTAX = 6;
+
+    /** Queue doesn't accept EXCLUSIVE job */
+    public static final int LSBE_EXCLUSIVE = 7;
+
+    /** Root is not allowed to submit jobs */
+    public static final int LSBE_ROOT = 8;
+
+    /** Job is already being migrated */
+    public static final int LSBE_MIGRATION = 9;
+
+    /** Job is not chkpntable */
+    public static final int LSBE_J_UNCHKPNTABLE = 10;
+
+    /** Job has no output so far */
+    public static final int LSBE_NO_OUTPUT = 11;
+
+    /** No jobId can be used now */
+    public static final int LSBE_NO_JOBID = 12;
+
+    /** Queue only accepts bsub -I job */
+    public static final int LSBE_ONLY_INTERACTIVE = 13;
+
+    /** Queue doesn't accept bsub -I job */
+    public static final int LSBE_NO_INTERACTIVE = 14;
+
+/** Error codes related to user, queue and host */
+
+    /** No user defined in lsb.users file */
+    public static final int LSBE_NO_USER = 15;
+
+    /** Bad user name */
+    public static final int LSBE_BAD_USER = 16;
+
+    /** User permission denied */
+    public static final int LSBE_PERMISSION = 17;
+
+    /** No such queue in the system */
+    public static final int LSBE_BAD_QUEUE = 18;
+
+    /** Queue name should be given */
+    public static final int LSBE_QUEUE_NAME = 19;
+
+    /** Queue has been closed */
+    public static final int LSBE_QUEUE_CLOSED = 20;
+
+    /** Queue windows are closed */
+    public static final int LSBE_QUEUE_WINDOW = 21;
+
+    /** User cannot use the queue */
+    public static final int LSBE_QUEUE_USE = 22;
+
+    /** Bad host name or host group name" */
+    public static final int LSBE_BAD_HOST = 23;
+
+    /** Too many processors requested */
+    public static final int LSBE_PROC_NUM = 24;
+
+    /** No host partition in the system */
+    public static final int LSBE_NO_HPART = 25;
+
+    /** Bad host partition name */
+    public static final int LSBE_BAD_HPART = 26;
+
+    /** No group defined in the system */
+    public static final int LSBE_NO_GROUP = 27;
+
+    /** Bad host/user group name */
+    public static final int LSBE_BAD_GROUP = 28;
+
+    /** Host is not used by the queue */
+    public static final int LSBE_QUEUE_HOST = 29;
+
+    /** User reach UJOB_LIMIT of the queue */
+    public static final int LSBE_UJOB_LIMIT = 30;
+
+    /** No host available for migration */
+    public static final int LSBE_NO_HOST = 31;
+
+
+    /** chklog is corrupted */
+    public static final int LSBE_BAD_CHKLOG = 32;
+
+    /** User reach PJOB_LIMIT of the queue */
+    public static final int LSBE_PJOB_LIMIT = 33;
+
+    /** request from non LSF host rejected*/
+    public static final int LSBE_NOLSF_HOST = 34;
+
+/** Error codes related to input arguments of lsblib call */
+
+    /** Bad argument for lsblib call */
+    public static final int LSBE_BAD_ARG = 35;
+
+    /** Bad time spec for lsblib call */
+    public static final int LSBE_BAD_TIME = 36;
+
+    /** Start time is later than end time */
+    public static final int LSBE_START_TIME = 37;
+
+    /** Bad CPU limit specification */
+    public static final int LSBE_BAD_LIMIT = 38;
+
+    /** Over hard limit of queue */
+    public static final int LSBE_OVER_LIMIT = 39;
+
+    /** Empty job (command) */
+    public static final int LSBE_BAD_CMD = 40;
+
+    /** Bad signal value; not supported */
+    public static final int LSBE_BAD_SIGNAL = 41;
+
+    /** Bad job name */
+    public static final int LSBE_BAD_JOB = 42;
+
+    /** Queue reach QJOB_LIMIT of the queue */
+    public static final int LSBE_QJOB_LIMIT = 43;
+
+    /** Expired job terminate time*/
+    public static final int LSBE_BAD_TERM = 44;
+/** 44 is reserved for future use */
+
+/** Error codes related to lsb.events file */
+
+    /** Unknown event in event log file */
+    public static final int LSBE_UNKNOWN_EVENT = 45;
+
+    /** bad event format in event log file */
+    public static final int LSBE_EVENT_FORMAT = 46;
+
+    /** End of file */
+    public static final int LSBE_EOF = 47;
+/** 48-49 are reserved for future use */
+
+/** Error codes related to system failure */
+
+    /** mbatchd internal error */
+    public static final int LSBE_MBATCHD = 50;
+
+    /** sbatchd internal error */
+    public static final int LSBE_SBATCHD = 51;
+
+    /** lsbatch lib internal error */
+    public static final int LSBE_LSBLIB = 52;
+
+    /** LSLIB call fails */
+    public static final int LSBE_LSLIB = 53;
+
+    /** System call fails */
+    public static final int LSBE_SYS_CALL = 54;
+
+    /** Cannot alloc memory */
+    public static final int LSBE_NO_MEM = 55;
+
+    /** Lsbatch service not registered */
+    public static final int LSBE_SERVICE = 56;
+
+    /** LSB_SHAREDIR not defined */
+    public static final int LSBE_NO_ENV = 57;
+
+    /** chkpnt system call fail */
+    public static final int LSBE_CHKPNT_CALL = 58;
+
+    /** mbatchd cannot fork */
+    public static final int LSBE_NO_FORK = 59;
+
+/** Error codes related to communication between mbatchd/lsblib/sbatchd */
+
+    /** LSBATCH protocol error */
+    public static final int LSBE_PROTOCOL = 60;
+
+    /** XDR en/decode error */
+    public static final int LSBE_XDR = 61;
+
+    /** No appropriate port can be bound */
+    public static final int LSBE_PORT = 62;
+
+    /** Timeout in contacting mbatchd */
+    public static final int LSBE_TIME_OUT = 63;
+
+    /** Timeout on connect() call */
+    public static final int LSBE_CONN_TIMEOUT = 64;
+
+    /** Connection refused by server */
+    public static final int LSBE_CONN_REFUSED = 65;
+
+    /** server connection already exists */
+    public static final int LSBE_CONN_EXIST = 66;
+
+    /** server is not connected */
+    public static final int LSBE_CONN_NONEXIST = 67;
+
+    /** sbatchd cannot be reached */
+    public static final int LSBE_SBD_UNREACH = 68;
+
+    // Search for any ; \s+ /** and fix the comments
+    /** Operation cannot be performed right now, op. will be retried. */
+    public static final int LSBE_OP_RETRY = 69;
+
+    /** user has no enough job slots */
+    public static final int LSBE_USER_JLIMIT = 70;
+/** 71 is reserved for future use */
+
+/** Error codes related to NQS */
+
+    /** Bad specification for a NQS job */
+    public static final int LSBE_NQS_BAD_PAR = 72;
+
+
+    /** Client host has no license */
+    public static final int LSBE_NO_LICENSE = 73;
+
+/** Error codes related to calendar */
+
+    /** Bad calendar name */
+    public static final int LSBE_BAD_CALENDAR = 74;
+
+    /** No calendar found */
+    public static final int LSBE_NOMATCH_CALENDAR = 75;
+
+    /** No calendar in system */
+    public static final int LSBE_NO_CALENDAR = 76;
+
+    /** Bad calendar time events */
+    public static final int LSBE_BAD_TIMEEVENT = 77;
+
+    /** Calendar exist already */
+    public static final int LSBE_CAL_EXIST = 78;
+
+    /** Calendar function is not enabled*/
+    public static final int LSBE_CAL_DISABLED = 79;
+
+/** Error codes related to modify job's parameters */
+
+    /** the job's params cannot be changed */
+    public static final int LSBE_JOB_MODIFY = 80;
+    /** the changed once parameters are not used */
+    public static final int LSBE_JOB_MODIFY_ONCE = 81;
+
+
+    /** the job is not a repetitive job */
+    public static final int LSBE_J_UNREPETITIVE = 82;
+
+    /** bad cluster name */
+    public static final int LSBE_BAD_CLUSTER = 83;
+
+/** Error codes related jobs driven by calendar */
+
+    /** Job can not be killed in pending */
+    public static final int LSBE_PEND_CAL_JOB = 84;
+    /** This Running turn is being terminated */
+    public static final int LSBE_RUN_CAL_JOB = 85;
+
+
+    /** Modified parameters are being used */
+    public static final int LSBE_JOB_MODIFY_USED = 86;
+
+    /** Can not get user's token */
+    public static final int LSBE_AFS_TOKENS = 87;
+
+/** Error codes related to event */
+
+    /** Bad event name */
+    public static final int LSBE_BAD_EVENT = 88;
+
+    /** No event found */
+    public static final int LSBE_NOMATCH_EVENT = 89;
+
+    /** No event in system */
+    public static final int LSBE_NO_EVENT = 90;
+
+/** Error codes related to user, queue and host */
+
+    /** User reach HJOB_LIMIT of the queue */
+    public static final int LSBE_HJOB_LIMIT = 91;
+
+/** Error codes related to bmsg */
+
+    /** Message delivered */
+    public static final int LSBE_MSG_DELIVERED = 92;
+    /** MBD could not find the message that SBD mentions about */
+    public static final int LSBE_NO_JOBMSG = 93;
+
+    /** x */
+    public static final int LSBE_MSG_RETRY = 94;
+
+/** Error codes related to resource requirement */
+
+    /** Bad resource requirement */
+    public static final int LSBE_BAD_RESREQ = 95;
+
+
+    /** No enough hosts */
+    public static final int LSBE_NO_ENOUGH_HOST = 96;
+
+/** Error codes related to configuration lsblib call */
+
+    /** Fatal error in reading conf files */
+    public static final int LSBE_CONF_FATAL = 97;
+
+    /** Warning error in reading conf files */
+    public static final int LSBE_CONF_WARNING = 98;
+
+
+    /** CONF used calendar cannot be modified */
+    public static final int LSBE_CAL_MODIFY = 99;
+
+    /** Job created calendar cannot be modified */
+    public static final int LSBE_JOB_CAL_MODIFY = 100;
+    /** FAIRSHARE queue or HPART defined */
+    public static final int LSBE_HP_FAIRSHARE_DEF = 101;
+
+    /** No resource specified */
+    public static final int LSBE_NO_RESOURCE = 102;
+
+    /** Bad resource name */
+    public static final int LSBE_BAD_RESOURCE = 103;
+    /** Calendar not allowed for interactive job */
+    public static final int LSBE_INTERACTIVE_CAL = 104;
+    /** Interactive job cannot be rerunnable */
+    public static final int LSBE_INTERACTIVE_RERUN = 105;
+
+    /** PTY and infile specified */
+    public static final int LSBE_PTY_INFILE = 106;
+
+    /** JobScheduler is disabled */
+    public static final int LSBE_JS_DISABLED = 107;
+
+    /** Submission host and its host type can not be found any more */
+    public static final int LSBE_BAD_SUBMISSION_HOST = 108;
+    /** Lock the job so that it cann't be resume by sbatchd */
+    public static final int LSBE_LOCK_JOB = 109;
+
+    /** user not in the user group */
+    public static final int LSBE_UGROUP_MEMBER = 110;
+    /** Operation not supported for a Multicluster job */
+    public static final int LSBE_UNSUPPORTED_MC = 111;
+    /** Operation permission denied for a Multicluster job */
+    public static final int LSBE_PERMISSION_MC = 112;
+
+    /** System Calendar exist already */
+    public static final int LSBE_SYSCAL_EXIST = 113;
+
+    /** exceed q's resource reservation */
+    public static final int LSBE_OVER_RUSAGE = 114;
+
+    /** bad host spec of run/cpu limits */
+    public static final int LSBE_BAD_HOST_SPEC = 115;
+
+    /** calendar syntax error */
+    public static final int LSBE_SYNTAX_CALENDAR = 116;
+
+    /** delete a used calendar */
+    public static final int LSBE_CAL_USED = 117;
+
+    /** cyclic calednar dependence */
+    public static final int LSBE_CAL_CYC = 118;
+
+    /** bad user group name */
+    public static final int LSBE_BAD_UGROUP = 119;
+
+    /** esub aborted request */
+    public static final int LSBE_ESUB_ABORT = 120;
+
+    /** Bad exception handler syntax */
+    public static final int LSBE_EXCEPT_SYNTAX = 121;
+    /** Bad exception condition specification */
+    public static final int LSBE_EXCEPT_COND = 122;
+    /** Bad or invalid action specification */
+    public static final int LSBE_EXCEPT_ACTION = 123;
+
+    /** job dependence, not deleted immed */
+    public static final int LSBE_JOB_DEP = 124;
+/** error codes for job group */
+
+    /** the job group exists */
+    public static final int LSBE_JGRP_EXIST = 125;
+
+    /** the job group doesn't exist */
+    public static final int LSBE_JGRP_NULL = 126;
+
+    /** the group contains jobs */
+    public static final int LSBE_JGRP_HASJOB = 127;
+
+    /** the unknown group control signal */
+    public static final int LSBE_JGRP_CTRL_UNKWN = 128;
+
+    /** Bad Job Group name */
+    public static final int LSBE_JGRP_BAD = 129;
+
+    /** Job Array */
+    public static final int LSBE_JOB_ARRAY = 130;
+
+    /** Suspended job not supported */
+    public static final int LSBE_JOB_SUSP = 131;
+
+    /** Forwarded job not suported */
+    public static final int LSBE_JOB_FORW = 132;
+
+    /** parent group is held */
+    public static final int LSBE_JGRP_HOLD = 133;
+
+    /** bad index */
+    public static final int LSBE_BAD_IDX = 134;
+
+    /** index too big */
+    public static final int LSBE_BIG_IDX = 135;
+
+    /** job array not exist*/
+    public static final int LSBE_ARRAY_NULL = 136;
+
+    /** Void calendar */
+    public static final int LSBE_CAL_VOID = 137;
+
+    /** the job exists */
+    public static final int LSBE_JOB_EXIST = 138;
+
+    /** Job Element fail */
+    public static final int LSBE_JOB_ELEMENT = 139;
+
+    /** Bad jobId */
+    public static final int LSBE_BAD_JOBID = 140;
+
+    /** cannot change job name */
+    public static final int LSBE_MOD_JOB_NAME = 141;
+
+/** error codes for frame job */
+
+    /** Bad frame expression */
+    public static final int LSBE_BAD_FRAME = 142;
+
+    /** Frame index too long */
+    public static final int LSBE_FRAME_BIG_IDX = 143;
+
+    /** Frame index syntax error */
+    public static final int LSBE_FRAME_BAD_IDX = 144;
+
+
+    /** child process died */
+    public static final int LSBE_PREMATURE = 145;
+
+/** error code for user not in project group */
+
+    /** Invoker is not in project group */
+    public static final int LSBE_BAD_PROJECT_GROUP = 146;
+
+/** error code for user group / host group */
+
+    /** No host group defined in the system */
+    public static final int LSBE_NO_HOST_GROUP = 147;
+
+    /** No user group defined in the system */
+    public static final int LSBE_NO_USER_GROUP = 148;
+
+    /** Bad jobid index file format */
+    public static final int LSBE_INDEX_FORMAT = 149;
+
+/** error codes for IO_SPOOL facility */
+
+    /** source file does not exist */
+    public static final int LSBE_SP_SRC_NOT_SEEN = 150;
+
+    /** Number of failed spool hosts reached max */
+    public static final int LSBE_SP_FAILED_HOSTS_LIM = 151;
+
+    /** spool copy failed for this host*/
+    public static final int LSBE_SP_COPY_FAILED = 152;
+
+    /** fork failed */
+    public static final int LSBE_SP_FORK_FAILED = 153;
+
+    /** status of child is not available */
+    public static final int LSBE_SP_CHILD_DIES = 154;
+
+    /** child terminated with failure */
+    public static final int LSBE_SP_CHILD_FAILED = 155;
+
+    /** Unable to find a host for spooling */
+    public static final int LSBE_SP_FIND_HOST_FAILED = 156;
+
+    /** Cannot get $JOB_SPOOLDIR for this host */
+    public static final int LSBE_SP_SPOOLDIR_FAILED = 157;
+
+    /** Cannot delete spool file for this host */
+    public static final int LSBE_SP_DELETE_FAILED = 158;
+
+
+    /** Bad user priority */
+    public static final int LSBE_BAD_USER_PRIORITY = 159;
+
+    /** Job priority control undefined */
+    public static final int LSBE_NO_JOB_PRIORITY = 160;
+
+    /** Job has been killed & requeued */
+    public static final int LSBE_JOB_REQUEUED = 161;
+
+    /** Remote job cannot kill-requeued */
+    public static final int LSBE_JOB_REQUEUE_REMOTE = 162;
+
+    /** Cannot submit job array to a NQS queue */
+    public static final int LSBE_NQS_NO_ARRJOB = 163;
+
+/** error codes for EXT_JOB_STATUS */
+
+    /** No message available */
+    public static final int LSBE_BAD_EXT_MSGID = 164;
+
+    /** Not a regular file */
+    public static final int LSBE_NO_IFREG = 165;
+
+    /** MBD fail to create files in the directory*/
+    public static final int LSBE_BAD_ATTA_DIR = 166;
+
+    /** Fail to transfer data */
+    public static final int LSBE_COPY_DATA = 167;
+
+    /** exceed the limit on data transferring of a msg*/
+    public static final int LSBE_JOB_ATTA_LIMIT = 168;
+
+    /** cannot resize a chunk job, cannot bswitch a run/wait job */
+    public static final int LSBE_CHUNK_JOB = 169;
+
+/** Error code used in communications with dlogd */
+
+
+    /** dlogd is already connected */
+    public static final int LSBE_DLOGD_ISCONN = 170;
+
+/** Error code for LANL3_1ST_HOST */
+
+    /** Multiple first execution host */
+    public static final int LSBE_MULTI_FIRST_HOST = 171;
+
+    /** Host group as first execution host */
+    public static final int LSBE_HG_FIRST_HOST = 172;
+
+    /** Host partition as first execution host */
+    public static final int LSBE_HP_FIRST_HOST = 173;
+
+    /** "others" as first execution host */
+    public static final int LSBE_OTHERS_FIRST_HOST = 174;
+
+/** error code for multi-cluster: remote only queue */
+
+    /** cannot specify exec host */
+    public static final int LSBE_MC_HOST = 175;
+
+    /** cannot specify repetitive job */
+    public static final int LSBE_MC_REPETITIVE = 176;
+
+    /** cannot be a chkpnt job */
+    public static final int LSBE_MC_CHKPNT = 177;
+
+    /** cannot specify exception */
+    public static final int LSBE_MC_EXCEPTION = 178;
+
+    /** cannot specify time event */
+    public static final int LSBE_MC_TIMEEVENT = 179;
+
+    /** Too few processors requested */
+    public static final int LSBE_PROC_LESS = 180;
+    /** bmod pending options and running options together towards running job */
+    public static final int LSBE_MOD_MIX_OPTS = 181;
+
+    /** cannot bmod remote running job */
+    public static final int LSBE_MOD_REMOTE = 182;
+    /** cannot bmod cpulimit without LSB_JOB_CPULIMIT defined */
+    public static final int LSBE_MOD_CPULIMIT = 183;
+    /** cannot bmod memlimit without LSB_JOB_MEMLIMIT defined */
+    public static final int LSBE_MOD_MEMLIMIT = 184;
+
+    /** cannot bmod err file name */
+    public static final int LSBE_MOD_ERRFILE = 185;
+
+    /** host is locked by master LIM*/
+    public static final int LSBE_LOCKED_MASTER = 186;
+    /** warning time period is invalid */
+    public static final int LSBE_WARNING_INVALID_TIME_PERIOD = 187;
+    /** either warning time period or warning action is not specified */
+    public static final int LSBE_WARNING_MISSING = 188;
+    /** The job arrays involved in  one to one dependency do not  have the same size. */
+    public static final int LSBE_DEP_ARRAY_SIZE = 189;
+
+    /** Not enough processors to be reserved (lsb_addreservation()) */
+    public static final int LSBE_FEWER_PROCS = 190;
+
+    /** Bad reservation ID */
+    public static final int LSBE_BAD_RSVID = 191;
+
+    /** No more reservation IDs can be used now */
+    public static final int LSBE_NO_RSVID = 192;
+
+    /** No hosts are exported */
+    public static final int LSBE_NO_EXPORT_HOST = 193;
+
+    /** Trying to control remote hosts*/
+    public static final int LSBE_REMOTE_HOST_CONTROL = 194;
+
+/*Can't open a remote host closed by the remote cluster admin */
+    public static final int LSBE_REMOTE_CLOSED = 195;
+
+    /** User suspended job */
+    public static final int LSBE_USER_SUSPENDED = 196;
+
+    /** Admin suspended job */
+    public static final int LSBE_ADMIN_SUSPENDED = 197;
+
+    /** Not a local host name in  bhost -e command */
+    public static final int LSBE_NOT_LOCAL_HOST = 198;
+
+    /** The host's lease is not active. */
+    public static final int LSBE_LEASE_INACTIVE = 199;
+
+    /** The advance reserved host is not on queue. */
+    public static final int LSBE_QUEUE_ADRSV = 200;
+
+    /** The specified host(s) is not exported. */
+    public static final int LSBE_HOST_NOT_EXPORTED = 201;
+
+    /** The user specified host is not inn advance reservation */
+    public static final int LSBE_HOST_ADRSV = 202;
+
+    /** The remote cluster is not connected */
+    public static final int LSBE_MC_CONN_NONEXIST = 203;
+
+    /** The general resource limit broken */
+    public static final int LSBE_RL_BREAK = 204;
+
+/** ---- The following RMS errors are obsoleted in Eagle */
+
+    /** cannot submit a job with special topology requirement to a preemptive queue*/
+    public static final int LSBE_LSF2TP_PREEMPT = 205;
+
+    /** cannot submit a job with special topology requirement to a queue with slot reservation*/
+    public static final int LSBE_LSF2TP_RESERVE = 206;
+    /** cannot submit a job with special topology requirement to a queue with backill */
+    public static final int LSBE_LSF2TP_BACKFILL = 207;
+    /** ---- The above RMS errors are obsoleted in Eagle */
+
+    /** none existed policy name */
+    public static final int LSBE_RSV_POLICY_NAME_BAD = 208;
+
+    /** All normal user has no privilege */
+    public static final int LSBE_RSV_POLICY_PERMISSION_DENIED = 209;
+
+    /** user has no privilege */
+    public static final int LSBE_RSV_POLICY_USER = 210;
+
+    /** user has no privilege to create reservation on host */
+    public static final int LSBE_RSV_POLICY_HOST = 211;
+
+    /** time window is not allowed by policy */
+    public static final int LSBE_RSV_POLICY_TIMEWINDOW = 212;
+
+    /** the feature is disabled */
+    public static final int LSBE_RSV_POLICY_DISABLED = 213;
+    /** the general limit related errors */
+
+    /** There are no general limit defined */
+    public static final int LSBE_LIM_NO_GENERAL_LIMIT = 214;
+
+    /** There are no resource usage */
+    public static final int LSBE_LIM_NO_RSRC_USAGE = 215;
+
+    /** Convert data error */
+    public static final int LSBE_LIM_CONVERT_ERROR = 216;
+
+    /** There are no qualified host found in cluster*/
+    public static final int LSBE_RSV_NO_HOST = 217;
+
+    /** Cannot modify job group on element of job array */
+    public static final int LSBE_MOD_JGRP_ARRAY = 218;
+
+    /** Cannot combine modify job group or service class option with others */
+    public static final int LSBE_MOD_MIX = 219;
+
+    /** the service class doesn't exist */
+    public static final int LSBE_SLA_NULL = 220;
+
+    /** Modify job group for job in service class is not supported*/
+    public static final int LSBE_MOD_JGRP_SLA = 221;
+
+    /** User or user group is not a member of the specified service class */
+    public static final int LSBE_SLA_MEMBER = 222;
+
+    /** There is no exceptional host found */
+    public static final int LSBE_NO_EXCEPTIONAL_HOST = 223;
+
+    /** warning action (signal) is invalid */
+    public static final int LSBE_WARNING_INVALID_ACTION = 224;
+
+
+    /** Extsched option syntax error */
+    public static final int LSBE_EXTSCHED_SYNTAX = 225;
+
+    /** SLA doesn't work with remote only queues */
+    public static final int LSBE_SLA_RMT_ONLY_QUEUE = 226;
+
+    /** Cannot modify service class on element of job array */
+    public static final int LSBE_MOD_SLA_ARRAY = 227;
+
+    /** Modify service class for job in job group is not supported*/
+    public static final int LSBE_MOD_SLA_JGRP = 228;
+
+    /** Max. Pending job error */
+    public static final int LSBE_MAX_PEND = 229;
+
+    /** System concurrent query exceeded */
+    public static final int LSBE_CONCURRENT = 230;
+
+    /** Requested feature not enabled */
+    public static final int LSBE_FEATURE_NULL = 231;
+
+
+    /** Host is already member of group */
+    public static final int LSBE_DYNGRP_MEMBER = 232;
+
+    /** Host is not a dynamic host */
+    public static final int LSBE_BAD_DYN_HOST = 233;
+
+    /** Host was not added with badmin hghostadd */
+    public static final int LSBE_NO_GRP_MEMBER = 234;
+
+    /** Cannot create job info file */
+    public static final int LSBE_JOB_INFO_FILE = 235;
+
+    /** Cannot modify rusage to a new || (or) expression after the job is dispatched */
+    public static final int LSBE_MOD_OR_RUSAGE = 236;
+
+    /** Bad host group name */
+    public static final int LSBE_BAD_GROUP_NAME = 237;
+
+    /** Bad host name */
+    public static final int LSBE_BAD_HOST_NAME = 238;
+
+    /** Bsub is not permitted on DT cluster */
+    public static final int LSBE_DT_BSUB = 239;
+
+
+    /** The parent symphony job/group was  gone when submitting jobs*/
+    public static final int LSBE_PARENT_SYM_JOB = 240;
+
+    /** The partition has no cpu alllocated */
+    public static final int LSBE_PARTITION_NO_CPU = 241;
+
+    /** batch partition does not accept online jobs: obsolete */
+    public static final int LSBE_PARTITION_BATCH = 242;
+
+    /** online partition does not accept batch jobs */
+    public static final int LSBE_PARTITION_ONLINE = 243;
+
+    /** no batch licenses */
+    public static final int LSBE_NOLICENSE_BATCH = 244;
+
+    /** no online licenses */
+    public static final int LSBE_NOLICENSE_ONLINE = 245;
+
+    /** signal is not supported for service job */
+    public static final int LSBE_SIGNAL_SRVJOB = 246;
+
+    /** the begin time is not later than current time. */
+    public static final int LSBE_BEGIN_TIME_INVALID = 247;
+
+    /** the end time is not later than current time. */
+    public static final int LSBE_END_TIME_INVALID = 248;
+
+    /** Bad regular expression */
+    public static final int LSBE_BAD_REG_EXPR = 249;
+
+
+    /** Host group has regular expression */
+    public static final int LSBE_GRP_REG_EXPR = 250;
+
+    /** Host group have no member */
+    public static final int LSBE_GRP_HAVE_NO_MEMB = 251;
+
+    /** the application doesn't exist */
+    public static final int LSBE_APP_NULL = 252;
+
+    /** job's proclimit rejected by App */
+    public static final int LSBE_PROC_JOB_APP = 253;
+
+    /** app's proclimit rejected by Queue */
+    public static final int LSBE_PROC_APP_QUE = 254;
+
+    /** application name is too long */
+    public static final int LSBE_BAD_APPNAME = 255;
+
+    /** Over hard limit of queue */
+    public static final int LSBE_APP_OVER_LIMIT = 256;
+
+    /** Cannot remove default application */
+    public static final int LSBE_REMOVE_DEF_APP = 257;
+
+    /** Host is disabled by EGO */
+    public static final int LSBE_EGO_DISABLED = 258;
+
+    /** Host is a remote host. Remote hosts cannot be added to a local host group. */
+    public static final int LSBE_REMOTE_HOST = 259;
+
+    /** SLA is exclusive, only accept exclusive job. */
+    public static final int LSBE_SLA_EXCLUSIVE = 260;
+
+    /** SLA is non-exclusive, only accept non-exclusive job */
+    public static final int LSBE_SLA_NONEXCLUSIVE = 261;
+
+    /** The feature has already been started */
+    public static final int LSBE_PERFMON_STARTED = 262;
+
+    /** The Featurn has already been turn down */
+    public static final int LSBE_PERFMON_STOPED = 263;
+
+    /** Current sampling period is already set to %%s,seconds. Ignored*/
+    public static final int LSBE_PERFMON_PERIOD_SET = 264;
+
+    /** Default spool dir is disabled */
+    public static final int LSBE_DEFAULT_SPOOL_DIR_DISABLED = 265;
+
+    /** job belongs to an APS queue and cannot be moved */
+    public static final int LSBE_APS_QUEUE_JOB = 266;
+
+    /** job is not in an absolute priority enabled queue */
+    public static final int LSBE_BAD_APS_JOB = 267;
+
+    /** Wrong aps admin value */
+    public static final int LSBE_BAD_APS_VAL = 268;
+
+    /** Trying to delete a non-existent APS string */
+    public static final int LSBE_APS_STRING_UNDEF = 269;
+
+    /** A job cannot be assigned an SLA and an APS queue with factor FS */
+    public static final int LSBE_SLA_JOB_APS_QUEUE = 270;
+
+    /** bmod -aps | -apsn option cannot be mixed with other option */
+    public static final int LSBE_MOD_MIX_APS = 271;
+
+    /** specified ADMIN factor/system APS value out of range */
+    public static final int LSBE_APS_RANGE = 272;
+
+    /** specified ADMIN factor/system APS value is zero */
+    public static final int LSBE_APS_ZERO = 273;
+
+
+    /** res port is unknown */
+    public static final int LSBE_DJOB_RES_PORT_UNKNOWN = 274;
+
+    /** timeout on res communication */
+    public static final int LSBE_DJOB_RES_TIMEOUT = 275;
+
+    /** I/O error on remote stream */
+    public static final int LSBE_DJOB_RES_IOERR = 276;
+
+    /** res internal failure */
+    public static final int LSBE_DJOB_RES_INTERNAL_FAILURE = 277;
+
+
+    /** can not run outside LSF */
+    public static final int LSBE_DJOB_CAN_NOT_RUN = 278;
+
+    /** distributed job's validation failed due to incorrect job ID or index */
+    public static final int LSBE_DJOB_VALIDATION_BAD_JOBID = 279;
+
+    /** distributed job's validation failed due to incorrect host selection */
+    public static final int LSBE_DJOB_VALIDATION_BAD_HOST = 280;
+
+    /** distributed job's validation failed due to incorrect user */
+    public static final int LSBE_DJOB_VALIDATION_BAD_USER = 281;
+
+    /** failed while executing tasks */
+    public static final int LSBE_DJOB_EXECUTE_TASK = 282;
+
+    /** failed while waiting for tasks to finish*/
+    public static final int LSBE_DJOB_WAIT_TASK = 283;
+
+
+    /** HPC License not exist */
+    public static final int LSBE_APS_HPC = 284;
+
+    /** Integrity check of bsub command failed */
+    public static final int LSBE_DIGEST_CHECK_BSUB = 285;
+
+    /** Distributed Application Framework disabled */
+    public static final int LSBE_DJOB_DISABLED = 286;
+
+/** Error codes related to runtime estimation and cwd */
+
+    /** Bad runtime specification */
+    public static final int LSBE_BAD_RUNTIME = 287;
+
+    /** RUNLIMIT: Cannot exceed RUNTIME*JOB_RUNLIMIT_RATIO */
+    public static final int LSBE_BAD_RUNLIMIT = 288;
+
+    /** RUNTIME: Cannot exceed the hard runtime limit in the queue */
+    public static final int LSBE_OVER_QUEUE_LIMIT = 289;
+
+    /** RUNLIMIT: Is not set by command line */
+    public static final int LSBE_SET_BY_RATIO = 290;
+
+    /** current working directory name too long */
+    public static final int LSBE_BAD_CWD = 291;
+
+
+    /** Job group limit is greater than its parent group */
+    public static final int LSBE_JGRP_LIMIT_GRTR_THAN_PARENT = 292;
+
+    /** Job group limit is less than its children groups */
+    public static final int LSBE_JGRP_LIMIT_LESS_THAN_CHILDREN = 293;
+
+    /** Job Array end index should be specified explicitly */
+    public static final int LSBE_NO_ARRAY_END_INDEX = 294;
+
+    /** cannot bmod runtime without LSB_MOD_ALL_JOBS=y defined */
+    public static final int LSBE_MOD_RUNTIME = 295;
+
+    /** EP3 */
+    public static final int LSBE_BAD_SUCCESS_EXIT_VALUES = 296;
+    public static final int LSBE_DUP_SUCCESS_EXIT_VALUES = 297;
+    public static final int LSBE_NO_SUCCESS_EXIT_VALUES = 298;
+
+    public static final int LSBE_JOB_REQUEUE_BADARG = 299;
+    public static final int LSBE_JOB_REQUEUE_DUPLICATED = 300;
+
+    /** "all" with number */
+    public static final int LSBE_JOB_REQUEUE_INVALID_DIGIT = 301;
+
+    /** ~digit without "all" */
+    public static final int LSBE_JOB_REQUEUE_INVALID_TILDE = 302;
+    public static final int LSBE_JOB_REQUEUE_NOVALID = 303;
+
+
+    /** No matching job group found */
+    public static final int LSBE_NO_JGRP = 304;
+    public static final int LSBE_NOT_CONSUMABLE = 305;
+
+/** AR pre/post */
+
+    /** Cannot parse an Advance Reservation -exec string */
+    public static final int LSBE_RSV_BAD_EXEC = 306;
+
+    /** Unknown AR event type */
+    public static final int LSBE_RSV_EVENTTYPE = 307;
+
+    /** pre/post cannot have postive offset */
+    public static final int LSBE_RSV_SHIFT = 308;
+
+    /** pre-AR command cannot have offset < 0 in user-created AR */
+    public static final int LSBE_RSV_USHIFT = 309;
+
+    /** only one pre- and one post- cmd permitted per AR */
+    public static final int LSBE_RSV_NUMEVENTS = 310;
+
+/*Error codes related to AR Modification*/
+
+    /** ID does not correspond to a known AR. */
+    public static final int LSBE_ADRSV_ID_VALID = 311;
+
+    /** disable non-recurrent AR. */
+    public static final int LSBE_ADRSV_DISABLE_NONRECUR = 312;
+
+    /** modification is rejected because AR is activated. */
+    public static final int LSBE_ADRSV_MOD_ACTINSTANCE = 313;
+
+    /** modification is rejected because host slots is not available. */
+    public static final int LSBE_ADRSV_HOST_NOTAVAIL = 314;
+
+    /** the  time of the AR cannot be modified since resource is not available. */
+    public static final int LSBE_ADRSV_TIME_MOD_FAIL = 315;
+
+    /** resource requirement (-R) must be followed a slot requirment (-n) */
+    public static final int LSBE_ADRSV_R_AND_N = 316;
+
+/*modification is rejected because trying to empty the AR. */
+    public static final int LSBE_ADRSV_EMPTY = 317;
+
+/*modification is rejected because switching AR type. */
+    public static final int LSBE_ADRSV_SWITCHTYPE = 318;
+
+/*modification is rejected because specifying -n for system AR. */
+    public static final int LSBE_ADRSV_SYS_N = 319;
+
+    /** disable string is not valid. */
+    public static final int LSBE_ADRSV_DISABLE = 320;
+
+    /** Unique AR ID required */
+    public static final int LSBE_ADRSV_ID_UNIQUE = 321;
+
+    /** Bad reservation name */
+    public static final int LSBE_BAD_RSVNAME = 322;
+
+    /** Cannot change the start time of an active reservation. */
+    public static final int LSBE_ADVRSV_ACTIVESTART = 323;
+
+    /** AR ID is refernced by a job */
+    public static final int LSBE_ADRSV_ID_USED = 324;
+
+    /** the disable period has already been disabled */
+    public static final int LSBE_ADRSV_PREVDISABLED = 325;
+
+    /** an active period of a recurring reservation cannot be disabled */
+    public static final int LSBE_ADRSV_DISABLECURR = 326;
+
+    /** modification is rejected because specified hosts or host groups do not belong to the reservation */
+    public static final int LSBE_ADRSV_NOT_RSV_HOST = 327;
+
+/*new parser */
+
+/*checking resreq return ok */
+    public static final int LSBE_RESREQ_OK = 328;
+
+/*checking resreq return error */
+    public static final int LSBE_RESREQ_ERR = 329;
+
+
+    /** modification is rejected because reservation has running jobs on the specified hosts or host groups */
+    public static final int LSBE_ADRSV_HOST_USED = 330;
+
+
+    /** The checkpoint directory is too long */
+    public static final int LSBE_BAD_CHKPNTDIR = 331;
+
+    /** trying to modify in a remote cluster */
+    public static final int LSBE_ADRSV_MOD_REMOTE = 332;
+    public static final int LSBE_JOB_REQUEUE_BADEXCLUDE = 333;
+
+    /** trying to disable for a date in the past */
+    public static final int LSBE_ADRSV_DISABLE_DATE = 334;
+
+    /** cannot mix the -Un option with others for started jobs */
+    public static final int LSBE_ADRSV_DETACH_MIX = 335;
+
+    /** cannot detach a started job when the reservation is active */
+    public static final int LSBE_ADRSV_DETACH_ACTIVE = 336;
+
+    /** invalid time expression: must specify day for both start and end time */
+    public static final int LSBE_MISSING_START_END_TIME = 337;
+
+    /** Queue level limitation */
+    public static final int LSBE_JOB_RUSAGE_EXCEED_LIMIT = 338;
+
+    /** Queue level limitation */
+    public static final int LSBE_APP_RUSAGE_EXCEED_LIMIT = 339;
+
+    /** Hosts and host groups specified by -m are not used by the queue */
+    public static final int LSBE_CANDIDATE_HOST_EMPTY = 340;
+
+    /** An int must follow an open bracket */
+    public static final int LSBE_HS_BAD_AFTER_BRACKT = 341;
+
+    /** An end index must follow a dash */
+    public static final int LSBE_HS_NO_END_INDEX = 342;
+
+    /** Integers must come before and after the comma */
+    public static final int LSBE_HS_BAD_COMMA = 343;
+
+    /** Incorrect condensed host specification */
+    public static final int LSBE_HS_BAD_FORMAT = 344;
+
+    /** The start index must be less than end index */
+    public static final int LSBE_HS_BAD_ORDER = 345;
+
+    /** The end index must be less than 10 digits */
+    public static final int LSBE_HS_BAD_MANY_DIGITS = 346;
+
+    /** Number of digits in the start index must be less than that of end index */
+    public static final int LSBE_HS_BAD_NUM_DIGITS = 347;
+
+    /** The end index cannot start with zero (0) */
+    public static final int LSBE_HS_BAD_END_INDEX = 348;
+
+    /** Index must be an integer or a range */
+    public static final int LSBE_HS_BAD_INDEX = 349;
+
+/** host group admin*/
+
+    /** When a Host Group Admin (badmin hclose or hopen) closes or opens a host,  the usage of the -C "message" option must be compulsory, as is the logging  of the name of the person performing the action. */
+    public static final int LSBE_COMMENTS = 350;
+
+
+    /** First hosts specified by -m are not used by the queue */
+    public static final int LSBE_FIRST_HOSTS_NOT_IN_QUEUE = 351;
+
+
+    /** The job is not started */
+    public static final int LSBE_JOB_NOTSTART = 352;
+
+    /** Accumulated runtime of the job is not available */
+    public static final int LSBE_RUNTIME_INVAL = 353;
+
+    /** SSH feature can only be used for interactive job */
+    public static final int LSBE_SSH_NOT_INTERACTIVE = 354;
+
+    /** Run time specification is less than the accumulated run time */
+    public static final int LSBE_LESS_RUNTIME = 355;
+
+    /** Resize job notification command */
+    public static final int LSBE_RESIZE_NOTIFY_CMD_LEN = 356;
+
+    /** Job is not resizable */
+    public static final int LSBE_JOB_RESIZABLE = 357;
+
+    /** Bad bresize release host spec */
+    public static final int LSBE_RESIZE_RELEASE_HOSTSPEC = 358;
+
+    /** no resize notify matches in mbatchd*/
+    public static final int LSBE_NO_RESIZE_NOTIFY = 359;
+
+    /** Can't release first exec host */
+    public static final int LSBE_RESIZE_RELEASE_FRISTHOST = 360;
+
+    /** resize event in progress */
+    public static final int LSBE_RESIZE_EVENT_INPROGRESS = 361;
+
+    /** too few or too many slots */
+    public static final int LSBE_RESIZE_BAD_SLOTS = 362;
+
+    /** No active resize request */
+    public static final int LSBE_RESIZE_NO_ACTIVE_REQUEST = 363;
+
+    /** specified host not part of the job's allocation*/
+    public static final int LSBE_HOST_NOT_IN_ALLOC = 364;
+
+    /** nothing released */
+    public static final int LSBE_RESIZE_RELEASE_NOOP = 365;
+
+    /** Can't resize a brun job */
+    public static final int LSBE_RESIZE_URGENT_JOB = 366;
+    public static final int LSBE_RESIZE_EGO_SLA_COEXIST = 367;
+
+    /** hpc jobs can't be resized */
+    public static final int LSBE_HOST_NOT_SUPPORT_RESIZE = 368;
+
+    /** Application doesn't allow resizable */
+    public static final int LSBE_APP_RESIZABLE = 369;
+
+    /** can't operate on lost & found hosts*/
+    public static final int LSBE_RESIZE_LOST_AND_FOUND = 370;
+
+    /** can't resize while the first host is lost & found*/
+    public static final int LSBE_RESIZE_FIRSTHOST_LOST_AND_FOUND = 371;
+
+    /** bad host name (for resize) */
+    public static final int LSBE_RESIZE_BAD_HOST = 372;
+
+    /** proper app is required by an auto-resizable job */
+    public static final int LSBE_AUTORESIZE_APP = 373;
+
+    /** cannot resize job because there is a pedning resize request */
+    public static final int LSBE_RESIZE_PENDING_REQUEST = 374;
+
+    /** number of hosts specified by -m exceeding configuration */
+    public static final int LSBE_ASKED_HOSTS_NUMBER = 375;
+
+    /** All hosts reserved by advanced reservation are invalid in intersected hosts */
+    public static final int LSBE_AR_HOST_EMPTY = 376;
+
+    /** First hosts specified by -m are not used by advanced reservation */
+    public static final int LSBE_AR_FIRST_HOST_EMPTY = 377;
+
+    /** Internal jobbroker error */
+    public static final int LSBE_JB = 378;
+
+    /** Internal jobbroker database library error */
+    public static final int LSBE_JB_DBLIB = 379;
+
+    /** Jobbroker cannot reach database */
+    public static final int LSBE_JB_DB_UNREACH = 380;
+
+    /** Jobbroker cannot reach mbatchd */
+    public static final int LSBE_JB_MBD_UNREACH = 381;
+
+    /** BES server returned an error */
+    public static final int LSBE_JB_BES = 382;
+
+    /** Unsupported BES operation */
+    public static final int LSBE_JB_BES_UNSUPPORTED_OP = 383;
+
+    /** invalid LS project name*/
+    public static final int LSBE_LS_PROJECT_NAME = 384;
+
+    /** the end time is not later than start  time. */
+    public static final int LSBE_END_TIME_INVALID_COMPARE_START = 385;
+
+    /** one host cannot be defined in more than one host partition.*/
+    public static final int LSBE_HP_REDUNDANT_HOST = 386;
+
+    /** The application level compound resreq causes slots requirements conflict */
+    public static final int LSBE_COMPOUND_APP_SLOTS = 387;
+
+    /** The queue level compound resreq causes slots requirements conflict */
+    public static final int LSBE_COMPOUND_QUEUE_SLOTS = 388;
+
+    /** Resizable job cannot work with compound resreq */
+    public static final int LSBE_COMPOUND_RESIZE = 389;
+/** compute unit support */
+
+    /** Compute units cannot have overlapping hosts */
+    public static final int LSBE_CU_OVERLAPPING_HOST = 390;
+
+    /** The compute unit cannot contain other compute units */
+    public static final int LSBE_CU_BAD_HOST = 391;
+
+    /** The compute unit cannot contain host or host group as a member */
+    public static final int LSBE_CU_HOST_NOT_ALLOWED = 392;
+
+    /** Only lowest level compute units are allowed to add hosts as a member */
+    public static final int LSBE_CU_NOT_LOWEST_LEVEL = 393;
+
+    /** You cannot modify a compute unit resource requirement when a job is already running */
+    public static final int LSBE_CU_MOD_RESREQ = 394;
+
+    /** A compute unit resource requirement cannot be specified for auto resizable jobs */
+    public static final int LSBE_CU_AUTORESIZE = 395;
+
+    /** No COMPUTE_UNIT_TYPES are specified in lsb.params */
+    public static final int LSBE_NO_COMPUTE_UNIT_TYPES = 396;
+
+    /** No compute unit defined in the system */
+    public static final int LSBE_NO_COMPUTE_UNIT = 397;
+
+    /** No such compute unit defined in the system */
+    public static final int LSBE_BAD_COMPUTE_UNIT = 398;
+
+    /** The queue is not configured to accept exclusive compute unit jobs */
+    public static final int LSBE_CU_EXCLUSIVE = 399;
+
+    /** The queue is not configured to accept higher level of exclusive compute unit jobs */
+    public static final int LSBE_CU_EXCLUSIVE_LEVEL = 400;
+
+    /** Job cannot be switched due to the exclusive compute unit reqirement */
+    public static final int LSBE_CU_SWITCH = 401;
+
+    /** Job level compound resreq causes slots requirements conflict */
+    public static final int LSBE_COMPOUND_JOB_SLOTS = 402;
+
+    /** "||" used in rusage[] of queue resource requirement. It's conflict with job level compound resource requirement */
+    public static final int LSBE_COMPOUND_QUEUE_RUSAGE_OR = 403;
+
+    /** balance and usablecuslots cannot both be used in a compute unit resource requirement */
+    public static final int LSBE_CU_BALANCE_USABLECUSLOTS = 404;
+
+    /** TS jobs cannot use compound resource requirement (application level) */
+    public static final int LSBE_COMPOUND_TSJOB_APP = 405;
+
+    /** TS jobs cannot use compound resource requirement (queue level) */
+    public static final int LSBE_COMPOUND_TSJOB_QUEUE = 406;
+    /** Job dependency conditions using a job name or job name wild-card exceed limitation set by MAX_JOB_NAME_DEP in lsb.params */
+    public static final int LSBE_EXCEED_MAX_JOB_NAME_DEP = 407;
+
+    /** "is waiting for the remote cluster to synchronize." */
+    public static final int LSBE_WAIT_FOR_MC_SYNC = 408;
+
+    /** Job cannot exceed queue level RESRSV_LIMIT limitation */
+    public static final int LSBE_RUSAGE_EXCEED_RESRSV_LIMIT = 409;
+
+    /** job description too long */
+    public static final int LSBE_JOB_DESCRIPTION_LEN = 410;
+
+    /** Cannot use simulation options */
+    public static final int LSBE_NOT_IN_SIMMODE = 411;
+
+    /** Value of runtime simulation is incorrect */
+    public static final int LSBE_SIM_OPT_RUNTIME = 412;
+
+    /** Value of cputime simulation is incorrect */
+    public static final int LSBE_SIM_OPT_CPUTIME = 413;
+
+    /** Incorrect maxmem simulation opt */
+    public static final int LSBE_SIM_OPT_MAXMEM = 414;
+
+    /** Incorrect job exitstatus simulation opt */
+    public static final int LSBE_SIM_OPT_EXITSTATUS = 415;
+
+    /** Incorrect job simulation option syntax */
+    public static final int LSBE_SIM_OPT_SYNTAX = 416;
+
+    /** Number of the above error codes */
+    public static final int LSBE_NUM_ERR = 417;
+
+    /**
+     * *****************************************************
+     */
+
+/* op codes for hand shake protocol between client/server */
+    public static final int PREPARE_FOR_OP = 1024;
+    public static final int READY_FOR_OP = 1023;
+
+/*
+*  Data structures for lsblib interface
+ */
+
+
+    /**
+     * \addtogroup lsb_submit_options lsb_submit_options
+     * define statements used by lsb_submit.
+     */
+
+/* lsb_submit() options */
+    /**
+     * < Flag to indicate jobName parameter has data. Equivalent to bsub -J command line option existence.
+     */
+    public static final int SUB_JOB_NAME = 0x01;
+    /**
+     * < Flag to indicate queue parameter has data. Equivalent to bsub -q command line option existence.
+     */
+    public static final int SUB_QUEUE = 0x02;
+    /**
+     * < Flat to indicate numAskedHosts parameter has data. Equivalent to bsub -m command line option existence.
+     */
+    public static final int SUB_HOST = 0x04;
+    /**
+     * < Flag to indicate inFile parameter has data. Equivalent to bsub -i command line option existence.
+     */
+    public static final int SUB_IN_FILE = 0x08;
+    /**
+     * < Flag to indicate outFile parameter has data. Equivalent to bsub -o command line option existence.
+     */
+    public static final int SUB_OUT_FILE = 0x10;
+    /**
+     * < Flag to indicate errFile parameter has data. Equivalent to bsub -e command line option existence.
+     */
+    public static final int SUB_ERR_FILE = 0x20;
+    /**
+     * < Flag to indicate execution of a job on a host by itself requested. Equivalent to bsub -x command line option existence.
+     */
+    public static final int SUB_EXCLUSIVE = 0x40;
+    /**
+     * < Flag to indicate whether to send mail to the user when the job finishes. Equivalent to bsub -N command line option existence.
+     */
+    public static final int SUB_NOTIFY_END = 0x80;
+    /**
+     * < Flag to indicate whether to send mail to the user when the job is dispatched. Equivalent to bsub -B command line option existence.
+     */
+    public static final int SUB_NOTIFY_BEGIN = 0x100;
+    /**
+     * < Flag to indicate userGroup name parameter has data. Equivalent to bsub -G command line option existence.
+     */
+    public static final int SUB_USER_GROUP = 0x200;
+    /**
+     * < Flag to indicatechkpntPeriod parameter has data . Equivalent to bsub -k command line option existence.
+     */
+    public static final int SUB_CHKPNT_PERIOD = 0x400;
+    /**
+     * < Flag to indicate chkpntDir parameter has data. Equivalent to bsub -k command line option existence.
+     */
+    public static final int SUB_CHKPNT_DIR = 0x800;
+    /**
+     * < Indicates the job is checkpointable. Equivalent to bsub -k command line option.
+     */
+    public static final int SUB_CHKPNTABLE = SUB_CHKPNT_DIR;
+    /**
+     * < Flag to indicate whether to force the job to restart even if non-restartable conditions exist. These conditions are operating system specific. Equivalent to brestart() -f command line option existence.
+     */
+    public static final int SUB_RESTART_FORCE = 0x1000;
+    /**
+     * < Flag to indicate restart of a
+     * checkpointed job. Only jobs that have been successfully checkpointed
+     * can be restarted. Jobs are re-submitted and assigned a new job ID.
+     * By default, jobs are restarted with the same output file, file
+     * transfer specifications, job name, window signal value, checkpoint
+     * directory and period, and rerun options as the original job. To
+     * restart a job on another host, both hosts must be binary compatible,
+     * run the same OS version, have access to the executable, have access
+     * to all open files (LSF must locate them with an absolute path name),
+     * and have access to the checkpoint directory. Equivalent to bsub -k
+     * command line option existence.
+     */
+    public static final int SUB_RESTART = 0x2000;
+    /**
+     * < Indicates the job is re-runnable.
+     * If the execution host of the job is considered down, the batch
+     * system will re-queue this job in the same job queue, and re-run
+     * it from the beginning when a suitable host is found. Everything
+     * will be as if it were submitted as a new job, and a new job ID will
+     * be assigned. The user who submitted the failed job will receive a
+     * mail notice of the job failure, requeueing of the job, and the
+     * new job ID.
+     * <p/>
+     * For a job that was checkpointed before the execution host went down,
+     * the job will be restarted from the last checkpoint. Equivalent to
+     * bsub -r command line option existence.
+     */
+    public static final int SUB_RERUNNABLE = 0x4000;
+    /**
+     * < Flag to indicate sigValue parameter
+     * has data. Sends a signal as the queue window closes.
+     */
+    public static final int SUB_WINDOW_SIG = 0x8000;
+    /**
+     * < Flag to indicate hostSpec parameter
+     * has data.
+     */
+    public static final int SUB_HOST_SPEC = 0x10000;
+    /**
+     * < Flag to indicate dependCond parameter
+     * has data. Equivalent to bsub -w command line option existence.
+     */
+    public static final int SUB_DEPEND_COND = 0x20000;
+    /**
+     * < Flag to indicate resReq parameter
+     * has data. Equivalent to bsub -R command line option existence.
+     */
+    public static final int SUB_RES_REQ = 0x40000;
+    /**
+     * < Flag to indicate nxf parameter and structure xf have data.
+     */
+    public static final int SUB_OTHER_FILES = 0x80000;
+    /**
+     * < Flag to indicate preExecCmd
+     * parameter has data. Equivalent to bsub -E command line option
+     * existence.
+     */
+    public static final int SUB_PRE_EXEC = 0x100000;
+    /**
+     * < Equivalent to bsub -L command line option existence.
+     */
+    public static final int SUB_LOGIN_SHELL = 0x200000;
+    /**
+     * < Flag to indicate mailUser parameter has data.
+     */
+    public static final int SUB_MAIL_USER = 0x400000;
+    /**
+     * < Flag to indicate newCommand parameter has data. Equivalent to bmod bsub_options existence.
+     */
+    public static final int SUB_MODIFY = 0x800000;
+    /**
+     * < Flag to indicate modify option once.
+     */
+    public static final int SUB_MODIFY_ONCE = 0x1000000;
+    /**
+     * < Flag to indicate ProjectName
+     * parameter has data . Equivalent to bsub -P command line option
+     * existence.
+     */
+    public static final int SUB_PROJECT_NAME = 0x2000000;
+    /**
+     * < Indicates that the job is submitted
+     * as a batch interactive job. When this flag is given, \ref lsb_submit
+     * does not return unless an error occurs during the submission process.
+     * When the job is started, the user can interact with the job's
+     * standard input and output via the terminal. See the -I option
+     * in bsub for the description of a batch interactive job. Unless
+     * the SUB_PTY flag is specified, the job will run without a
+     * pseudo-terminal. Equivalent to bsub -I command line option.
+     */
+    public static final int SUB_INTERACTIVE = 0x4000000;
+    /**
+     * < Requests pseudo-terminal support
+     * for a job submitted with the SUB_INTERACTIVE flag. This flag is
+     * ignored if SUB_INTERACTIVE is not specified. A pseudo-terminal
+     * is required to run some applications (such as: vi). Equivalent to
+     * bsub -Ip command line option.
+     */
+    public static final int SUB_PTY = 0x8000000;
+    /**< Requests pseudo-terminal shell
+     *  mode support for a job submitted with the SUB_INTERACTIVE and
+     *  SUB_PTY flags. This flag is ignored if SUB_INTERACTIVE and SUB_PTY
+     *  are not specified. This flag should be specified for submitting
+     *  interactive shells, or applications which redefine the ctrl-C and
+     *  ctrl-Z keys (such as: jove). Equivalent to bsub -Is
+     *  command line option. */
+    public static final int SUB_PTY_SHELL = 0x10000000;
+
+    /**
+     * < Exception handler for job.
+     */
+    public static final int SUB_EXCEPT = 0x20000000;
+
+    /**
+     * < Specifies time_event.
+     */
+    public static final int SUB_TIME_EVENT = 0x40000000;
+/* the last bit 0x80000000 is reserved for internal use */
+
+    /**
+     * \addtogroup lsb_submit_options2 lsb_submit_options2
+     * define statements used by \ref lsb_submit.
+     */
+
+    /**< Hold the job after it is submitted. The job will be in PSUSP status. Equivalent to bsub -H command line option. */
+    public static final int SUB2_HOLD = 0x01;
+
+    /**
+     * < New cmd for bmod.
+     */
+    public static final int SUB2_MODIFY_CMD = 0x02;
+
+    /**//* Removed access to SUB2_BSUB_BLOCK since it exits the process (including the JVM) with the exit code of the submitted job. -kshakir December 14, 2010
+     * < Submit a job in a synchronous
+     * mode so that submission does not return until the job terminates.
+     * Note once this flag is set, the \ref lsb_submit will never return if
+     * the job is accepted by LSF. Programs that wishes to know the status
+     * of the submission needs to fork, with the child process invoking the
+     * API call in the blocking mode and the parent process wait on the
+     * child process (see wait() for details.
+     */
+    //public static final int SUB2_BSUB_BLOCK = 0x04;
+
+    /**
+     * < Submit from NT.
+     */
+    public static final int SUB2_HOST_NT = 0x08;
+
+    /**
+     * < Submit fom UNIX.
+     */
+    public static final int SUB2_HOST_UX = 0x10;
+
+    /**
+     * < Submit to a chkpntable queue.
+     */
+    public static final int SUB2_QUEUE_CHKPNT = 0x20;
+
+    /**
+     * < Submit to a rerunnable queue.
+     */
+    public static final int SUB2_QUEUE_RERUNNABLE = 0x40;
+
+    /**
+     * < Spool job command.
+     */
+    public static final int SUB2_IN_FILE_SPOOL = 0x80;
+
+    /**
+     * < Inputs the specified file with spooling
+     */
+    public static final int SUB2_JOB_CMD_SPOOL = 0x100;
+
+    /**
+     * < Submits job with priority.
+     */
+    public static final int SUB2_JOB_PRIORITY = 0x200;
+
+    /**
+     * < Job submitted without -n, use queue's default proclimit
+     */
+    public static final int SUB2_USE_DEF_PROCLIMIT = 0x400;
+
+    /**
+     * < bmod -c/-M/-W/-o/-e
+     */
+    public static final int SUB2_MODIFY_RUN_JOB = 0x800;
+
+    /**
+     * < bmod options only to pending jobs
+     */
+    public static final int SUB2_MODIFY_PEND_JOB = 0x1000;
+
+    /**
+     * < Job action warning time. Equivalent to bsub or bmod -wt.
+     */
+    public static final int SUB2_WARNING_TIME_PERIOD = 0x2000;
+
+    /**
+     * < Job action to be taken before a job control action occurs. Equivalent to bsub or bmod -wa.
+     */
+    public static final int SUB2_WARNING_ACTION = 0x4000;
+
+    /**
+     * < Use an advance reservation created with the brsvadd command. Equivalent to bsub -U.
+     */
+    public static final int SUB2_USE_RSV = 0x8000;
+
+    /**
+     * < Windows Terminal Services job
+     */
+    public static final int SUB2_TSJOB = 0x10000;
+
+/* SUB2_LSF2TP is obsolete in Eagle. We keep it here for backward
+*  compatibility */
+
+    /**
+     * < Parameter is deprecated
+     */
+    public static final int SUB2_LSF2TP = 0x20000;
+
+    /**
+     * < Submit into a job group
+     */
+    public static final int SUB2_JOB_GROUP = 0x40000;
+
+    /**
+     * < Submit into a service class
+     */
+    public static final int SUB2_SLA = 0x80000;
+
+    /**
+     * < Submit with -extsched options
+     */
+    public static final int SUB2_EXTSCHED = 0x100000;
+
+    /**
+     * < License Scheduler project
+     */
+    public static final int SUB2_LICENSE_PROJECT = 0x200000;
+
+    /**
+     * < Overwrite the standard output of the job. Equivalent to bsub -oo.
+     */
+    public static final int SUB2_OVERWRITE_OUT_FILE = 0x400000;
+
+    /**
+     * < Overwrites the standard error output of the job. Equivalent to bsub -eo.
+     */
+    public static final int SUB2_OVERWRITE_ERR_FILE = 0x800000;
+
+/* Following are for symphony submission definition.
+*  Note that SYM_GRP is an LSF job, which represents a symphony group.
+ */
+
+    /**
+     * < (symphony) session job
+     */
+    public static final int SUB2_SSM_JOB = 0x1000000;
+
+    /**
+     * < (symphony) symphony job
+     */
+    public static final int SUB2_SYM_JOB = 0x2000000;
+
+    /**
+     * < (symphony) service(LSF) job
+     */
+    public static final int SUB2_SRV_JOB = 0x4000000;
+
+    /**
+     * < (symphony) "group" job
+     */
+    public static final int SUB2_SYM_GRP = 0x8000000;
+
+    /**
+     * < (symphony) symphony job has child symphony job
+     */
+    public static final int SUB2_SYM_JOB_PARENT = 0x10000000;
+
+    /**
+     * < (symphony) symphony job has real time feature
+     */
+    public static final int SUB2_SYM_JOB_REALTIME = 0x20000000;
+
+    /**
+     * < (symphony) symphony job has dummy feature to hold all persistent service jobs.
+     */
+    public static final int SUB2_SYM_JOB_PERSIST_SRV = 0x40000000;
+
+    /**
+     * < Persistent session job
+     */
+    public static final int SUB2_SSM_JOB_PERSIST = 0x80000000;
+
+    /**
+     *  \addtogroup lsb_submit_options3 lsb_submit_options3
+     *  define statements used by \ref lsb_submit.
+     */
+
+    /**
+     * < Application profile name. Equivalent to bsub -app.
+     */
+    public static final int SUB3_APP = 0x01;
+
+    /**
+     * < Job rerunable because of application profile
+     */
+    public static final int SUB3_APP_RERUNNABLE = 0x02;
+
+    /**
+     * < Job modified with absolute priority. Equivalent to bmod -aps.
+     */
+    public static final int SUB3_ABSOLUTE_PRIORITY = 0x04;
+
+    /**
+     * < Submit into a default job group. Equivalent to bsub -g.
+     */
+    public static final int SUB3_DEFAULT_JOBGROUP = 0x08;
+
+    /**
+     * < Run the specified post-execution command on the execution host after the job finishes. Equivalent to bsub -Ep.
+     */
+    public static final int SUB3_POST_EXEC = 0x10;
+    /**
+     * < Pass user shell limits to execution host. Equivalent to bsub -ul.
+     */
+    public static final int SUB3_USER_SHELL_LIMITS = 0x20;
+    /**
+     * < Current working directory specified on the command line with bsub -cwd
+     */
+    public static final int SUB3_CWD = 0x40;
+    /**< Runtime estimate. Equivalent to bsub -We. Use in conjunction with SUB3_RUNTIME_ESTIMATION_ACC and SUB3_RUNTIME_ESTIMATION_PERC. */
+    public static final int SUB3_RUNTIME_ESTIMATION = 0x80;
+
+    /**
+     * < Job is not rerunnable. Equivalent to bsub -rn.
+     */
+    public static final int SUB3_NOT_RERUNNABLE = 0x100;
+
+    /**
+     * < Job level requeue exit values.
+     */
+    public static final int SUB3_JOB_REQUEUE = 0x200;
+    /**
+     * < Initial checkpoint period. Equivalent to bsub -k initial_checkpoint_period.
+     */
+    public static final int SUB3_INIT_CHKPNT_PERIOD = 0x400;
+    /**< Job migration threshold. Equivalent to bsub -mig migration_threshold. */
+    public static final int SUB3_MIG_THRESHOLD = 0x800;
+
+    /**
+     * < Checkpoint dir was set by application profile
+     */
+    public static final int SUB3_APP_CHKPNT_DIR = 0x1000;
+    /**
+     * < Value of BSUB_CHK_RESREQ environment variable, used for select section resource requirement string syntax checking with bsub -R. bsub only checks the resreq syntax.
+     */
+    public static final int SUB3_BSUB_CHK_RESREQ = 0x2000;
+    /**
+     * < Runtime estimate that is the accumulated run time plus the runtime estimate. Equivalent to bmod -We+. Use in conjunction with SUB3_RUNTIME_ESTIMATION.
+     */
+    public static final int SUB3_RUNTIME_ESTIMATION_ACC = 0x4000;
+    /**
+     * < Runtime estimate in percentage of completion. Equivalent to bmod -Wep. Two digits after the decimal point are suported. The highest eight bits of runtimeEstimation in the submit structure are used for the integer; the remaining bits are used for the fraction. Use in conjunction with SUB3_RUNTIME_ESTIMATION.
+     */
+    public static final int SUB3_RUNTIME_ESTIMATION_PERC = 0x8000;
+
+    /**
+     * < Protects the sessions of interactive jobs with SSH encryption. Equivalent to bsub -IS|-ISp|-ISs.
+     */
+    public static final int SUB3_INTERACTIVE_SSH = 0x10000;
+    /**< Protect the sessions of interactive x-window job with SSH encryption. Equivalent to bsub -IX.*/
+    public static final int SUB3_XJOB_SSH = 0x20000;
+
+    /**
+     * < If set the submitted job is auto-resizable
+     */
+    public static final int SUB3_AUTO_RESIZE = 0x40000;
+
+    /**
+     * < If set, the resize notify cmd specified
+     */
+    public static final int SUB3_RESIZE_NOTIFY_CMD = 0x80000;
+
+
+    /**
+     * < Job broker bulk submit
+     */
+    public static final int SUB3_BULK_SUBMIT = 0x100000;
+
+    /**
+     * < tty mode for interactive job
+     */
+    public static final int SUB3_INTERACTIVE_TTY = 0x200000;
+
+    /**
+     * < Job submitted from floating client
+     */
+    public static final int SUB3_FLOATING_CLIENT = 0x400000;
+
+    /**
+     * < ssh X11 forwarding (bsub -XF)
+     */
+    public static final int SUB3_XFJOB = 0x800000;
+
+    /**
+     * < ssh X11 forwarding (bsub -XF) without bsub -I...
+     */
+    public static final int SUB3_XFJOB_EXCLUSIVE = 0x1000000;
+
+    /**
+     * < Job description.
+     */
+    public static final int SUB3_JOB_DESCRIPTION = 0x2000000;
+
+    /**
+     * < Job submitted from floating client
+     */
+    public static final int SUB3_SIMULATION = 0x4000000;
+
+/* Check whether a job is symphony job. These macros should be used by all
+*  components, including ("submit" actually):
+*    - mbatchd: jData->submitReq
+*    - sbatchd: jobCard->jobSpecs
+*    - API: lsb_submit() and lsb_readjobinfo()
+ */
+
+    public static boolean IS_SSM_JOB(int option) {
+        return JNAUtils.toBoolean((option) & SUB2_SSM_JOB);
+    }
+
+    public static boolean IS_SSM_JOB_PERSIST(int option) {
+        return JNAUtils.toBoolean((option) & SUB2_SSM_JOB_PERSIST);
+    }
+
+    public static boolean IS_SYM_JOB(int option) {
+        return JNAUtils.toBoolean((option) & SUB2_SYM_JOB);
+    }
+
+    public static boolean IS_SYM_JOB_PARENT(int option) {
+        return JNAUtils.toBoolean((option) & SUB2_SYM_JOB_PARENT);
+    }
+
+    public static boolean IS_SYM_JOB_REALTIME(int option) {
+        return JNAUtils.toBoolean((option) & SUB2_SYM_JOB_REALTIME);
+    }
+
+    public static boolean IS_SYM_JOB_PERSIST_SRV(int option) {
+        return JNAUtils.toBoolean((option) & SUB2_SYM_JOB_PERSIST_SRV);
+    }
+
+    public static boolean IS_SRV_JOB(int option) {
+        return JNAUtils.toBoolean((option) & SUB2_SRV_JOB);
+    }
+
+    public static boolean IS_SYM_GRP(int option) {
+        return JNAUtils.toBoolean((option) & SUB2_SYM_GRP);
+    }
+
+    public static boolean IS_SYM_JOB_OR_SYM_GRP (int option)  { return (IS_SYM_JOB(option) || IS_SYM_GRP(option)); }
+/* symphony job for which resource usage should be collected */
+    public static boolean IS_REAL_SYM_JOB (int option)  { return (IS_SYM_JOB(option) && !IS_SYM_JOB_PERSIST_SRV(option)); }
+
+    public static boolean IS_WLM_JOB (int option)  { return (IS_SSM_JOB(option) || IS_SYM_JOB(option) || IS_SRV_JOB(option) || IS_SYM_GRP(option)); }
+    public static boolean IS_BATCH_JOB (int option)  { return (!IS_WLM_JOB(option)); }
+/* job for which resource usage should be collected */
+    public static boolean IS_JOB_FOR_ACCT (int option)  { return (IS_REAL_SYM_JOB(option) || IS_BATCH_JOB(option)); }
+
+    public static boolean IS_JOB_FOR_SYM (int option)  { return (IS_SYM_JOB(option) || IS_SRV_JOB(option) || IS_SYM_GRP(option)); }
+
+/* Don't send IS_SYM_JOB/IS_SYM_GRP jobs to scheduler;
+*  neither publish events nor brun the job allowed.
+ */
+    // NOTE: Don't know what this jp struct is.
+    //public static boolean IS_SYM_JOB_OR_GRP (int jp)   { return (   (jp) != null && (jp)->shared != null && (  IS_SYM_JOB((jp)->shared->jobBill.options2) ||IS_SYM_GRP((jp)->shared->jobBill.options2))); }
+
+/* name of the lost and find queue and host */
+    public static final String LOST_AND_FOUND = "lost_and_found";
+
+    public static final int DELETE_NUMBER = -2;
+    public static final int DEL_NUMPRO = LibLsf.INFINIT_INT;
+    public static final int DEFAULT_NUMPRO = LibLsf.INFINIT_INT - 1;
+    /**
+     *  \addtogroup calendar_command  calendar_command
+     *  options  for user calendar commands
+     */
+
+    /**
+     * < Add calenda
+     */
+    public static final int CALADD = 1;
+
+    /**
+     * < Modify calenda
+     */
+    public static final int CALMOD = 2;
+
+    /**
+     * < Delete calenda
+     */
+    public static final int CALDEL = 3;
+
+    /**
+     * < Undelete calenda
+     */
+    public static final int CALUNDEL = 4;
+
+    /**
+     * < Calenda occs
+     */
+    public static final int CALOCCS = 5;
+
+/* for user event commands */
+    public static final int EVEADD = 1;
+    public static final int EVEMOD = 2;
+    public static final int EVEDEL = 3;
+
+    public static final int PLUGIN_REQUEUE = 126;
+    public static final int PLUGIN_EXIT = 125;
+
+    /**
+     * \brief  xFile
+     */
+    public static class xFile extends Structure {
+        public static class ByReference extends xFile implements Structure.ByReference {}
+        public static class ByValue extends xFile implements Structure.ByValue {}
+        public xFile() {}
+        public xFile(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Pathname at submission host
+         */
+        public String subFn;
+
+        /**
+         * < Pathname at execution host
+         */
+        public String execFn;
+        /**
+         *  \addtogroup defs_lsb_XF_OP defs_lsb_XF_OP
+         *  options  xFile operation
+         */
+
+        /**
+         * < Transfer files from submit peer to  execution peer
+         */
+        public static final int XF_OP_SUB2EXEC = 0x1;
+
+        /**
+         * < Transfer files from execution peer to  submit peer
+         */
+        public static final int XF_OP_EXEC2SUB = 0x2;
+
+        /**
+         * < Transfer files from submit peer to  execution peer with appending mode
+         */
+        public static final int XF_OP_SUB2EXEC_APPEND = 0x4;
+
+        /**
+         * < Transfer files from execution peer to  submit peer with appending mode
+         */
+        public static final int XF_OP_EXEC2SUB_APPEND = 0x8;
+        public static final int XF_OP_URL_SOURCE = 0x10;
+
+        /**
+         * < Defined in \ref defs_lsb_XF_OP
+         */
+        public int options;
+    }
+
+
+
+    /* For NQS */
+    public static final int NQS_ROUTE = 0x1;
+    public static final int NQS_SIG = 0x2;
+    public static final int NQS_SERVER = 0x4;
+
+
+    public static final int MAXNFA = 1024;
+    public static final int MAXTAG = 10;
+
+    public static final int OKP = 1;
+    public static final int NOP = 0;
+
+    public static final int CHR = 1;
+    public static final int ANY = 2;
+    public static final int CCL = 3;
+    public static final int BOL = 4;
+    public static final int EOL = 5;
+    public static final int BOT = 6;
+    public static final int EOT = 7;
+    public static final int BOW = 8;
+    public static final int EOW = 9;
+    public static final int REF = 10;
+    public static final int CLO = 11;
+
+    public static final int END = 0;
+
+    /**
+     *  The following defines are not meant to be changeable.
+     *  They are for readability only.
+     */
+
+    public static final int MAXCHR = 128;
+    public static final int CHRBIT = 8;
+    public static final int BITBLK = MAXCHR / CHRBIT;
+    public static final int BLKIND = 0xAA;
+    public static final int BITIND = 0x7;
+
+    public static final int ASCIIB = 0x7F;
+
+    /**
+     *  byte classification table for word boundary operators BOW
+     *  and EOW. the reason for not using ctype macros is that we can
+     *  let the user add into our own table. see re_modw. This table
+     *  is not in the bitset form, since we may wish to extend it in the
+     *  future for other byte classifications.
+     *
+     *   TRUE for 0-9 A-Z a-z _
+     */
+
+    public static final byte[] chrtyp = {
+            0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+            0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+            0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+            0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+            0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
+            1, 1, 1, 1, 1, 1, 1, 1, 0, 0,
+            0, 0, 0, 0, 0, 1, 1, 1, 1, 1,
+            1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+            1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+            1, 0, 0, 0, 0, 1, 0, 1, 1, 1,
+            1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+            1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+            1, 1, 1, 0, 0, 0, 0, 0
+    };
+
+    public static int inascii(int x) {
+        return (0x7F & (x));
+    }
+
+    public static int iswordc(int x) {
+        return chrtyp[inascii(x)];
+    }
+
+/*
+*  skip values for CLO XXX to skip past the closure
+ */
+
+
+/* [CLO] ANY END ... */
+    public static final int ANYSKIP = 2;
+
+/* [CLO] CHR chr END ... */
+    public static final int CHRSKIP = 3;
+
+/* [CLO] CCL 16bytes END ... */
+    public static final int CCLSKIP = 18;
+
+/*  In LSF7.0.6, we introduce submit_ext structure to support
+*   extended fields for furture added submit options.
+*   Each new options should have a unique key defined here.
+*   The new defined key should be bigger than 1000.
+*   Keys below 1000 are used for internal use.
+ */
+
+/* submit_ext test */
+    public static final int JDATA_EXT_TEST = 1001;
+
+/* LSF simulator: simReq */
+    public static final int JDATA_EXT_SIMREQ = 1002;
+
+/* structure for lsb_submit() call */
+
+    /**
+     * \extend submit data structure
+     */
+    public static class submit_ext extends Structure {
+        public static class ByReference extends submit_ext implements Structure.ByReference {}
+        public static class ByValue extends submit_ext implements Structure.ByValue {}
+        public submit_ext() {}
+        public submit_ext(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < number of key value pairs.
+         */
+        public int num;
+
+        /**
+         * < Array of keys of the extended fields.
+         */
+        public Pointer keys;
+
+        /**
+         * < Array of values of the extended fields
+         */
+        public Pointer values;
+    }
+
+
+
+
+    /**
+     * \brief  submit request structure.
+     */
+    public static class submit extends Structure {
+        public static class ByReference extends submit implements Structure.ByReference {}
+        public static class ByValue extends submit implements Structure.ByValue {}
+        public submit() {}
+        public submit(Pointer p) { super(p); read(); }
+
+
+        /**
+         * <  <lsf/lsbatch.h> defines the flags in \ref lsb_submit_options constructed from bits. These flags correspond to some of the options of the bsub command line. Use the bitwise OR to set more than one flag.
+         */
+        public int options;
+
+
+        /**
+         * < Extended bitwise inclusive OR of some of the flags in \ref lsb_submit_options2.
+         */
+        public int options2;
+
+
+        /**
+         * < The job name. If jobName is null, command is used as the job name.
+         */
+        public String jobName;
+
+        /**
+         * < Submit the job to this queue. If queue is null, submit the job to a system default queue.
+         */
+        public String queue;
+
+        /**
+         * < The number of invoker specified candidate hosts for running the job. If numAskedHosts is 0, all qualified hosts will be considered.
+         */
+        public int numAskedHosts;
+
+        /**
+         * < The array of names of invoker specified candidate hosts.  The number of hosts is given by numAskedHosts.
+         */
+        public Pointer askedHosts;
+
+        /**
+         * < The resource requirements of the job. If resReq is null, the batch system will try to obtain resource requirements for command from the remote task lists (see \ref ls_task ). If the task does not appear in the remote task lists, then the default resource requirement is to run on host() of the same type.
+         */
+        public String resReq;
+
+        /**
+         * < Limits on the consumption of system resources by all processes belonging to this job. See getrlimit() for details. If an element of the array is -1, there is no limit for that resource. For the constants used to index the array, see \ref lsb_queueinfo .
+         */
+        public int[] rLimits = new int[LibLsf.LSF_RLIM_NLIMITS];
+
+        /**
+         * < Specify the host model to use for scaling rLimits[LSF_RLIMIT_CPU] and rLimits[LSF_RLIMIT_RUN]. (See \ref lsb_queueinfo). If hostSpec is null, the local host is assumed.
+         */
+        public String hostSpec;
+
+        /**
+         * <  The initial number of processors needed by a (parallel) job. The default is 1.
+         */
+        public int numProcessors;
+
+        /**
+         * < The job dependency condition.
+         */
+        public String dependCond;
+
+        /**
+         * <  Time event string
+         */
+        public String timeEvent;
+
+        /**
+         * <  Dispatch the job on or after beginTime, where beginTime is the number of seconds since 00:00:00 GMT, Jan. 1, 1970 (See time(), ctime()). If beginTime is 0, start the job as soon as possible.
+         */
+        public NativeLong beginTime;
+
+        /**
+         * <  The job termination deadline. If the job is still running at termTime, it will be sent a USR2 signal. If the job does not terminate within 10 minutes after being sent this signal, it will be ended. termTime has the same representation as beginTime. If termTime is 0, allow the job to run until it reaches a resource limit.
+         */
+        public NativeLong termTime;
+
+        /**
+         * < Applies to jobs submitted to a queue that has a run window (See \ref lsb_queueinfo). Send signal sigValue to the job 10 minutes before the run window is going to close. This allows the job to clean up or checkpoint itself, if desired. If the job does not terminate 10 minutes after being sent this signal, it will be suspended.
+         */
+        public int sigValue;
+
+        /**
+         * < The path name of the job's standard input file. If inFile is null, use /dev/null as the default.
+         */
+        public String inFile;
+
+        /**
+         * < The path name of the job's standard output file. If outFile is null, the job's output will be mailed to the submitter
+         */
+        public String outFile;
+
+        /**
+         * < The path name of the job's standard error output file. If errFile is null, the standard error output will be merged with the standard output of the job.
+         */
+        public String errFile;
+
+        /**
+         * < When submitting a job, the command line of the job.   When modifying a job, a mandatory parameter that  should be set to jobId in string format.
+         */
+        public String command;
+
+        /**
+         * < New command line for bmod.
+         */
+        public String newCommand;
+
+        /**
+         * < The job is checkpointable with a period of chkpntPeriod seconds. The value 0 disables periodic checkpointing.
+         */
+        public NativeLong chkpntPeriod;
+
+        /**
+         * < The directory where the chk directory for this job checkpoint files will be created. When a job is checkpointed, its checkpoint files are placed in chkpntDir/chk. chkpntDir can be a relative or absolute path name.
+         */
+        public String chkpntDir;
+
+        /**
+         * < The number of files to transfer.
+         */
+        public int nxf;
+
+        /**
+         * < The array of file transfer specifications. (The xFile structure is defined in <lsf/lsbatch.h>.)
+         */
+        public Pointer /* xFile.ByReference */ xf;
+
+        /**
+         * < The job pre-execution command.
+         */
+        public String preExecCmd;
+
+        /**
+         * < The user that results are mailed to.
+         */
+        public String mailUser;
+
+        /**
+         * < Delete options in options field.
+         */
+        public int delOptions;
+
+        /**
+         * < Extended delete options in options2 field.
+         */
+        public int delOptions2;
+
+        /**
+         * < The name of the project the job will be charged to.
+         */
+        public String projectName;
+
+        /**
+         * < Maximum number of processors required to run the job.
+         */
+        public int maxNumProcessors;
+
+        /**
+         * < Specified login shell used to initialize the execution environment for the job (see the -L option of bsub).
+         */
+        public String loginShell;
+
+        /**
+         * < The name of the LSF user group (see lsb.users) to which the job will belong. (see the -G option of bsub)
+         */
+        public String userGroup;
+
+        /**
+         * < Passes the exception handlers to mbatchd during a job. (see the -X option of bsub). Specifies execption handlers that tell the system how to respond to an exceptional condition for a job. An action is performed when any one of the following exceptions is detected: - \b missched - A job has not been scheduled within the time event specified in the -T option. - \b overrun - A job did not finish in its maximum time (maxtime). - \b underrun - A job finished before it reaches its [...]
+         */
+        public String exceptList;
+
+
+        /**
+         * < User priority for fairshare scheduling.
+         */
+        public int userPriority;
+
+        /**
+         * < Reservation ID for advance reservation.
+         */
+        public String rsvId;
+
+        /**
+         * < Job group under which the job runs.
+         */
+        public String jobGroup;
+
+        /**
+         * < SLA under which the job runs.
+         */
+        public String sla;
+
+        /**
+         * < External scheduler options.
+         */
+        public String extsched;
+
+        /**
+         * < Warning time period in seconds, -1 if unspecified.
+         */
+        public int warningTimePeriod;
+
+        /**
+         * < Warning action, SIGNAL | CHKPNT | command, null if unspecified.
+         */
+        public String warningAction;
+
+        /**
+         * < License Scheduler project name.
+         */
+        public String licenseProject;
+
+        /**
+         * < Extended bitwise inclusive OR of options flags in \ref lsb_submit_options3.
+         */
+        public int options3;
+
+        /**
+         * < Extended delete options in options3 field.
+         */
+        public int delOptions3;
+
+        /**
+         * < Application profile under which the job runs.
+         */
+        public String app;
+
+        /**
+         * < -1 if no -jsdl and -jsdl_strict options. - 0 -jsdl_strict option - 1 -jsdl option
+         */
+        public int jsdlFlag;
+
+        /**
+         * < JSDL filename
+         */
+        public String jsdlDoc;
+
+        /**
+         * < ARM correlator
+         */
+        public Pointer correlator;
+
+        /**
+         * <  Absolute priority scheduling string set by administrators to denote static system APS value or ADMIN factor APS value. This field is ignored by \ref lsb_submit.
+         */
+        public String apsString;
+
+        /**
+         * < Post-execution commands specified by -Ep option of bsub and bmod.
+         */
+        public String postExecCmd;
+
+        /**
+         * < Current working directory specified by -cwd option of bsub and bmod.
+         */
+        public String cwd;
+
+        /**
+         * < Runtime estimate specified by -We option of bsub and bmod.
+         */
+        public int runtimeEstimation;
+
+        /**
+         * < Job-level requeue exit values specified by -Q option of bsub and bmod.
+         */
+        public String requeueEValues;
+
+        /**
+         * < Initial checkpoint period specified by -k option of bsub and bmod.
+         */
+        public int initChkpntPeriod;
+
+        /**
+         * < Job migration threshold specified by -mig option of bsub and bmod.
+         */
+        public int migThreshold;
+
+        /**
+         * < Job resize notification command to be invoked on the first execution host when a resize request has been satisfied.
+         */
+        public String notifyCmd;
+
+        /**
+         * < Job description.
+         */
+        public String jobDescription;
+/* #if defined(LSF_SIMULATOR)
+
+/**< simulation related options */
+        /*public String simReq;*/
+        /* #endif */
+
+        /**
+         * < For new options in future
+         */
+        public submit_ext.ByReference submitExt;
+    }
+
+
+
+
+    /**
+     * \brief submit reply.
+     */
+    public static class submitReply extends Structure {
+        public static class ByReference extends submitReply implements Structure.ByReference {}
+        public static class ByValue extends submitReply implements Structure.ByValue {}
+        public submitReply() {}
+        public submitReply(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The queue the job was submitted to.
+         */
+        public String queue;
+
+        /**
+         * < DependCond contained badJobId but badJobId does not exist in the system.
+         */
+        public long badJobId;
+
+        /**
+         * < DependCond contained badJobName but badJobName does not exist in the system. If the environment variable BSUB_CHK_RESREQ is set, the value of lsberrno is either LSBE_RESREQ_OK or LSBE_RESREQ_ERR, depending on the result of resource requirement string checking. The badJobName field contains the detailed error message.
+         */
+        public String badJobName;
+
+        /**< If lsberrno is LSBE_BAD_HOST,
+         *  (**askedHosts)[badReqIndx] is not a host known to the system.
+         *  If lsberrno is LSBE_QUEUE_HOST, (**askedHosts)[badReqIndx]
+         *  is not a host used by the specified queue. If lsberrno is
+         *  LSBE_OVER_LIMIT, (*rLimits)[badReqIndx] exceeds the queue's
+         *  limit for the resource. */
+        public int badReqIndx;
+    }
+
+
+
+    /**
+     * \brief  submit migration request.
+     */
+    public static class submig extends Structure {
+        public static class ByReference extends submig implements Structure.ByReference {}
+        public static class ByValue extends submig implements Structure.ByValue {}
+        public submig() {}
+        public submig(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The job ID of the job to be migrated.
+         */
+        public long jobId;
+
+        /**
+         * < Please refer to \ref lsb_submit_options.
+         */
+        public int options;
+
+        /**
+         * < The number of hosts supplied as candidates  for migration.
+         */
+        public int numAskedHosts;
+
+        /**
+         * < An array of pointers to the names of candidate hosts for migration.
+         */
+        public Pointer askedHosts;
+    }
+
+
+
+/* structure for lsb_addjgrp() call */
+
+    public static class jgrpAdd extends Structure {
+        public static class ByReference extends jgrpAdd implements Structure.ByReference {}
+        public static class ByValue extends jgrpAdd implements Structure.ByValue {}
+        public jgrpAdd() {}
+        public jgrpAdd(Pointer p) { super(p); read(); }
+
+        public String groupSpec;
+        public String timeEvent;
+        public String depCond;
+        public String sla;
+        public int maxJLimit;
+    }
+
+
+
+/* structure for lsb_modjgrp() call */
+
+    public static class jgrpMod extends Structure {
+        public static class ByReference extends jgrpMod implements Structure.ByReference {}
+        public static class ByValue extends jgrpMod implements Structure.ByValue {}
+        public jgrpMod() {}
+        public jgrpMod(Pointer p) { super(p); read(); }
+
+        public String destSpec;
+        public jgrpAdd jgrp;
+    }
+
+
+
+/* structure for lsb_addjgrp() and lsb_modjgrp() call reply */
+
+    public static class jgrpReply extends Structure {
+        public static class ByReference extends jgrpReply implements Structure.ByReference {}
+        public static class ByValue extends jgrpReply implements Structure.ByValue {}
+        public jgrpReply() {}
+        public jgrpReply(Pointer p) { super(p); read(); }
+
+        public String badJgrpName;
+        public int num;
+        public Pointer delJgrpList;
+    }
+
+
+
+    /**
+     * \brief Signal a group of jobs.
+     */
+    public static class signalBulkJobs extends Structure {
+        public static class ByReference extends signalBulkJobs implements Structure.ByReference {}
+        public static class ByValue extends signalBulkJobs implements Structure.ByValue {}
+        public signalBulkJobs() {}
+        public signalBulkJobs(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Signal type
+         */
+        public int signal;
+
+        /**
+         * < Number of jobs
+         */
+        public int njobs;
+
+        /**
+         * < Jobids list
+         */
+        public Pointer jobs;
+
+        /**
+         * < Flags
+         */
+        public int flags;
+    }
+
+
+
+/* structure for lsb_ctrljgrp() call */
+
+    public static class jgrpCtrl extends Structure {
+        public static class ByReference extends jgrpCtrl implements Structure.ByReference {}
+        public static class ByValue extends jgrpCtrl implements Structure.ByValue {}
+        public jgrpCtrl() {}
+        public jgrpCtrl(Pointer p) { super(p); read(); }
+
+        public String groupSpec;
+        public String userSpec;
+        public int options;
+
+/* JGRP_RELEASE, JGRP_HOLD, JGRP_DEL */
+        public int ctrlOp;
+    }
+
+
+
+
+/* Indicate no change in chkpnt period for lsb_chkpntjob() */
+    public static final int LSB_CHKPERIOD_NOCHNG = -1;
+
+    /**
+     *  \addtogroup chkpnt_job_option  chkpnt_job_option
+     *  checkpoint job options()
+     */
+
+    /**
+     * < Kill process if successfully chkpnted
+     */
+    public static final int LSB_CHKPNT_KILL = 0x1;
+
+    /**
+     * < Force chkpnt even if non-chkpntable conditions exist.
+     */
+    public static final int LSB_CHKPNT_FORCE = 0x2;
+
+    /**
+     * < Copy all regular files in use by the  checkpointed process to the checkpoint directory.
+     */
+    public static final int LSB_CHKPNT_COPY = 0x3;
+
+    /**
+     * < Chkpnt for the purpose of migration
+     */
+    public static final int LSB_CHKPNT_MIG = 0x4;
+
+    /**
+     * < Stop  process if successfully chkpnted
+     */
+    public static final int LSB_CHKPNT_STOP = 0x8;
+
+    /**
+     *  \addtogroup kill_requeue  kill_requeue
+     *  kill and requeue a job options()
+     */
+
+    /**
+     * < Kill then re-queue a job
+     */
+    public static final int LSB_KILL_REQUEUE = 0x10;
+
+/* options for lsb_openjobinfo() */
+    /**
+     *  \addtogroup defs_lsb_openjobinfo  defs_lsb_openjobinfo
+     *  Information options about job.
+     */
+
+    /**
+     * < Reserved user name
+     */
+    public static final String ALL_USERS = "all";
+    /**
+     * \defgroup defs_lsb_openjobinfo_a defs_lsb_openjobinfo_a
+     * defs_lsb_openjobinfo_a is part of defs_lsb_openjobinfo
+     */
+    public static final int ALL_JOB = 0x0001;
+    /**
+     * < Information about all jobs, including unfinished jobs (pending, running or suspended) and recently finished jobs. LSF remembers jobs finished within the preceding period. This period is set by the parameter CLEAN_PERIOD in the lsb.params file. The default is 3600 seconds (1 hour). (See lsb.params). The command line equivalent is bjobs -a./
+     * <p/>
+     * /**< Information about recently finished jobs.
+     */
+    public static final int DONE_JOB = 0x0002;
+
+    /**
+     * < Information about pending jobs.
+     */
+    public static final int PEND_JOB = 0x0004;
+
+    /**
+     * < Information about suspended jobs.
+     */
+    public static final int SUSP_JOB = 0x0008;
+
+    /**
+     * < Information about all unfinished jobs.
+     */
+    public static final int CUR_JOB = 0x0010;
+
+    /**
+     * < Information about the last submitted job.
+     */
+    public static final int LAST_JOB = 0x0020;
+
+    /**
+     * < Information about all running jobs
+     */
+    public static final int RUN_JOB = 0x0040;
+
+    /**
+     * < Information about JobId only.
+     */
+    public static final int JOBID_ONLY = 0x0080;
+
+    /**
+     * < Internal use only.
+     */
+    public static final int HOST_NAME = 0x0100;
+
+    /**
+     * < Exclude pending jobs.
+     */
+    public static final int NO_PEND_REASONS = 0x0200;
+
+    /**
+     * < Return group info structures
+     */
+    public static final int JGRP_INFO = 0x0400;
+
+    /**
+     * < Recursively search job group tree
+     */
+    public static final int JGRP_RECURSIVE = 0x0800;
+
+    /**
+     * < Return job array info structures
+     */
+    public static final int JGRP_ARRAY_INFO = 0x1000;
+
+    /**
+     * < All jobs in the core
+     */
+    public static final int JOBID_ONLY_ALL = 0x02000;
+
+    /**
+     * < All zombie jobs
+     */
+    public static final int ZOMBIE_JOB = 0x04000;
+
+    /**
+     * < Display remote jobs by their submission jobid.
+     */
+    public static final int TRANSPARENT_MC = 0x08000;
+
+    /**
+     * < Exceptional jobs
+     */
+    public static final int EXCEPT_JOB = 0x10000;
+
+    /**
+     * < Display for murex jobs
+     */
+    public static final int MUREX_JOB = 0x20000;
+
+
+    /**
+     * < To symphony UA
+     */
+    public static final int TO_SYM_UA = 0x40000;
+
+    /**
+     * < Only show top-level symphony job
+     */
+    public static final int SYM_TOP_LEVEL_ONLY = 0x80000;
+
+    /**
+     * < For internal use only
+     */
+    public static final int JGRP_NAME = 0x100000;
+
+    /**
+     * < Condensed host group
+     */
+    public static final int COND_HOSTNAME = 0x200000;
+
+    /**
+     * < Called from command, for internal use only
+     */
+    public static final int FROM_BJOBSCMD = 0x400000;
+
+    /**
+     * < -l in command parameter, for internal use only
+     */
+    public static final int WITH_LOPTION = 0x800000;
+
+    /**
+     * < Jobs submitted to aps queue
+     */
+    public static final int APS_JOB = 0x1000000;
+
+    /**
+     * < Information about user group.
+     */
+    public static final int UGRP_INFO = 0x2000000;
+    /** RFC#1531: -G option support*/
+
+    /**
+     * < -WL
+     */
+    public static final int TIME_LEFT = 0x4000000;
+    /**
+     * < Estimated time remaining based on the runtime estimate or runlimit.
+     */
+
+/* -WF*/
+    public static final int FINISH_TIME = 0x8000000;
+    /**
+     * < Estimated finish time based on the runtime estimate or runlimit.
+     */
+
+/* -WP*/
+    public static final int COM_PERCENTAGE = 0x10000000;
+    /**
+     * < Estimated completion percentage based on the runtime estimate or runlimit. If options is 0, default to CUR_JOB.
+     */
+
+/* -ss option */
+    public static final int SSCHED_JOB = 0x20000000;
+
+/* -G option */
+    public static final int KILL_JGRP_RECURSIVE = 0x40000000;
+
+    /**
+     *  \addtogroup group_nodetypes group_nodetypes
+     *  define statements group node types.
+     */
+
+    /**
+     * <  Job
+     */
+    public static final int JGRP_NODE_JOB = 1;
+
+    /**
+     * <  Group
+     */
+    public static final int JGRP_NODE_GROUP = 2;
+
+    /**
+     * <  Array
+     */
+    public static final int JGRP_NODE_ARRAY = 3;
+
+    /**
+     * <  SLA
+     */
+    public static final int JGRP_NODE_SLA = 4;
+
+/* jobId macros */
+    public static final long LSB_MAX_ARRAY_JOBID = 0x0FFFFFFFFL;
+    public static final long LSB_MAX_ARRAY_IDX = 0x07FFFFFFFL;
+    public static final int LSB_MAX_SEDJOB_RUNID = (0x0F);
+    public static long LSB_JOBID (int array_jobId, int array_idx)    { return (((long)array_idx << 32) | array_jobId); }
+    public static int LSB_ARRAY_IDX (long jobId)   { return (((jobId) == -1) ? (0) : (int)(((long)jobId >> 32)  & LSB_MAX_ARRAY_IDX)); }
+    public static int LSB_ARRAY_JOBID (long jobId)  { return (((jobId) == -1) ? (-1) : (int)(jobId)); }
+    //public static int LSB_ARRAY_JOBID (long jobId)  { return (((jobId) == -1) ? (-1) : (int)(jobId & LSB_MAX_ARRAY_JOBID)); }
+
+/* Status of a job group */
+
+    public static final int JGRP_INACTIVE = 0;
+    public static final int JGRP_ACTIVE = 1;
+    public static final int JGRP_UNDEFINED = -1;
+
+    /**
+     *  \addtogroup jobgroup_controltypes jobgroup_controltypes
+     *  define statements job group control types.
+     */
+
+
+    /**
+     * < bgrelease
+     */
+    public static final int JGRP_RELEASE = 1;
+
+    /**
+     * < bghold
+     */
+    public static final int JGRP_HOLD = 2;
+
+    /**
+     * < bgdel
+     */
+    public static final int JGRP_DEL = 3;
+
+    /**
+     *  \addtogroup jobgroup_counterIndex jobgroup_counterIndex
+     *   Following can be used to index  into 'counters' array.
+     */
+
+    /**
+     * < Total jobs in the array
+     */
+    public static final int JGRP_COUNT_NJOBS = 0;
+
+    /**
+     * < Number of pending jobs in the array
+     */
+    public static final int JGRP_COUNT_PEND = 1;
+
+    /**
+     * < Number of held jobs in the array
+     */
+    public static final int JGRP_COUNT_NPSUSP = 2;
+
+    /**
+     * < Number of running jobs in the array
+     */
+    public static final int JGRP_COUNT_NRUN = 3;
+
+    /**
+     * < Number of jobs suspended by the system in the array
+     */
+    public static final int JGRP_COUNT_NSSUSP = 4;
+
+    /**
+     * < Number of jobs suspended by the user in the array
+     */
+    public static final int JGRP_COUNT_NUSUSP = 5;
+
+    /**
+     * < Number of exited jobs in the array
+     */
+    public static final int JGRP_COUNT_NEXIT = 6;
+
+    /**
+     * < Number of successfully completed jobs
+     */
+    public static final int JGRP_COUNT_NDONE = 7;
+
+    /**
+     * < Total slots in the array
+     */
+    public static final int JGRP_COUNT_NJOBS_SLOTS = 8;
+
+    /**
+     * < Number of pending slots in the array
+     */
+    public static final int JGRP_COUNT_PEND_SLOTS = 9;
+
+    /**
+     * < Number of running slots in the array
+     */
+    public static final int JGRP_COUNT_RUN_SLOTS = 10;
+
+    /**
+     * < Number of slots suspended by the system in the array
+     */
+    public static final int JGRP_COUNT_SSUSP_SLOTS = 11;
+
+    /**
+     * < Number of slots suspended by the user in the array
+     */
+    public static final int JGRP_COUNT_USUSP_SLOTS = 12;
+
+    /**
+     * < Number of reserverd slots in the array
+     */
+    public static final int JGRP_COUNT_RESV_SLOTS = 13;
+
+/* job group modification types */
+    public static final int JGRP_MOD_LIMIT = 0x1;
+
+/*the number of counters of job group
+* based on job level
+*/
+    public static final int NUM_JGRP_JOB_COUNTERS = 8;
+/* the number of all counters of job group,
+* including job level and slot level
+*/
+/* {njobs, npend, npsusp, nrun, nssusp nususp, nexit, ndone} */
+    public static final int NUM_JGRP_COUNTERS = 14;
+
+/* job group is created explicitly */
+    public static final int JGRP_CREATE_EXP = 0x01;
+
+/* job group is created implicitly */
+    public static final int JGRP_CREATE_IMP = 0x02;
+/* The LSF job group.
+ */
+
+    public static class jgrp extends Structure {
+        public static class ByReference extends jgrp implements Structure.ByReference {}
+        public static class ByValue extends jgrp implements Structure.ByValue {}
+        public jgrp() {}
+        public jgrp(Pointer p) { super(p); read(); }
+
+        public String name;
+        public String path;
+        public String user;
+        public String sla;
+        public int[] counters = new int[NUM_JGRP_COUNTERS];
+        public int maxJLimit;
+    }
+
+
+
+/* Structure for lsb_setjobattr() call */
+
+    public static class jobAttrInfoEnt extends Structure {
+        public static class ByReference extends jobAttrInfoEnt implements Structure.ByReference {}
+        public static class ByValue extends jobAttrInfoEnt implements Structure.ByValue {}
+        public jobAttrInfoEnt() {}
+        public jobAttrInfoEnt(Pointer p) { super(p); read(); }
+
+
+/* id of the job */
+        public long jobId;
+
+/* port number of the job */
+        public short port;
+
+/* first executing host of the job */
+        public byte[] hostname = new byte[LibLsf.MAXHOSTNAMELEN];
+    }
+
+
+
+    /**
+     * \brief  job attribute setting log.
+     */
+    public static class jobAttrSetLog extends Structure {
+        public static class ByReference extends jobAttrSetLog implements Structure.ByReference {}
+        public static class ByValue extends jobAttrSetLog implements Structure.ByValue {}
+        public jobAttrSetLog() {}
+        public jobAttrSetLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The unique ID for the job
+         */
+        public int jobId;
+
+        /**
+         * < Job array index; must be 0 in JOB_NEW
+         */
+        public int idx;
+
+        /**
+         * < The user who requested the action
+         */
+        public int uid;
+
+        /**
+         * < Job attributes
+         */
+        public int port;
+
+        /**
+         * < Name of the host
+         */
+        public String hostname;
+    }
+
+
+
+    /**
+     * \brief  job information head.
+     */
+    public static class jobInfoHead extends Structure {
+        public static class ByReference extends jobInfoHead implements Structure.ByReference {}
+        public static class ByValue extends jobInfoHead implements Structure.ByValue {}
+        public jobInfoHead() {}
+        public jobInfoHead(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The number of jobs in the connection
+         */
+        public int numJobs;
+
+        /**
+         * < An array of job identification numbers in the conection
+         */
+        public NativeLongByReference jobIds;
+
+        /**
+         * < The number of hosts in the connection
+         */
+        public int numHosts;
+
+        /**
+         * < An array of host names in the connection
+         */
+        public Pointer hostNames;
+
+        /**
+         * < The number of clusters in the connection
+         */
+        public int numClusters;
+
+        /**
+         * < An array of cluster names in the connection
+         */
+        public Pointer clusterNames;
+
+        /**
+         * < The number of remoteHosts in the connection
+         */
+        public IntByReference numRemoteHosts;
+
+        /**
+         * < An array of remoteHost names in the connection
+         */
+        public PointerByReference remoteHosts;
+    }
+
+
+
+    /**
+     * \brief job Information head extent
+     */
+    public static class jobInfoHeadExt extends Structure {
+        public static class ByReference extends jobInfoHeadExt implements Structure.ByReference {}
+        public static class ByValue extends jobInfoHeadExt implements Structure.ByValue {}
+        public jobInfoHeadExt() {}
+        public jobInfoHeadExt(Pointer p) { super(p); read(); }
+
+
+        /**
+         * <  Job Information header
+         */
+        public jobInfoHead.ByReference jobInfoHead;
+
+        /**
+         * <  Group Information returned
+         */
+        public Pointer groupInfo;
+    }
+
+
+
+    /**
+     * \brief structure reserveItem
+     */
+    public static class reserveItem extends Structure {
+        public static class ByReference extends reserveItem implements Structure.ByReference {}
+        public static class ByValue extends reserveItem implements Structure.ByValue {}
+        public reserveItem() {}
+        public reserveItem(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Name of the resource to reserve.
+         */
+        public String resName;
+
+        /**
+         * < The number of hosts to reserve this resource.
+         */
+        public int nHost;
+
+        /**
+         * < Amount of reservation is made on each host. Some hosts may reserve 0.
+         */
+        public FloatByReference value;
+
+        /**
+         * < Flag of shared or host-base resource
+         */
+        public int shared;
+    }
+
+
+
+    /**
+     * \brief  job information entry.
+     */
+    public static class jobInfoEnt extends Structure {
+        public static class ByReference extends jobInfoEnt implements Structure.ByReference {}
+        public static class ByValue extends jobInfoEnt implements Structure.ByValue {}
+        public jobInfoEnt() {}
+        public jobInfoEnt(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The job ID that the LSF system assigned to the job.
+         */
+        public long jobId;
+
+        /**
+         * < The name of the user who submitted the job.
+         */
+        public String user;
+
+        /**
+         * < The current status of the job.Possible values areshown in job_states
+         */
+        public int status;
+
+        /**
+         * < Pending or suspending reasons of the job
+         */
+        public IntByReference reasonTb;
+
+        /**
+         * < Length of reasonTb[]
+         */
+        public int numReasons;
+
+        /**
+         * < The reason a job is pending or suspended.
+         */
+        public int reasons;
+
+        /**
+         * < The reason a job is pending or suspended. If status is JOB_STAT_PEND, the values of reasons and subreasons are explained by \ref lsb_pendreason. If status is JOB_STAT_PSUSP, the values of reasons and subreasons are explained by \ref lsb_suspreason.   When reasons is PEND_HOST_LOAD or SUSP_LOAD_REASON,  subreasons indicates the load indices that are out of bounds. If reasons is PEND_HOST_LOAD, subreasons is the same as busySched in the hostInfoEnt structure; if reasons is SUS [...]
+         */
+        public int subreasons;
+
+        /**
+         * < The job process ID.
+         */
+        public int jobPid;
+
+        /**
+         * < The time the job was submitted, in seconds since 00:00:00 GMT, Jan. 1, 1970.
+         */
+        public NativeLong submitTime;
+
+        /**
+         * < Time when job slots are reserved
+         */
+        public NativeLong reserveTime;
+
+        /**
+         * < The time that the job started running, if it has been dispatched.
+         */
+        public NativeLong startTime;
+
+        /**
+         * < Job's predicted start time
+         */
+        public NativeLong predictedStartTime;
+
+        /**
+         * < The termination time of the job, if it has completed.
+         */
+        public NativeLong endTime;
+
+        /**
+         * < Last time event
+         */
+        public NativeLong lastEvent;
+
+        /**
+         * < Next time event
+         */
+        public NativeLong nextEvent;
+
+        /**
+         * < Duration time (minutes)
+         */
+        public int duration;
+
+        /**
+         * < CPU time consumed by the job
+         */
+        public float cpuTime;
+
+        /**
+         * < The file creation mask when the job was submitted.
+         */
+        public int umask;
+
+        /**
+         * < The current working directory when the job was submitted.
+         */
+        public String cwd;
+
+        /**
+         * < Home directory on submission host.
+         */
+        public String subHomeDir;
+
+        /**
+         * < The name of the host from which the job was  submitted.
+         */
+        public String fromHost;
+
+        /**
+         * < The array of names of hosts on which the job executes.
+         */
+        public Pointer exHosts;
+
+        /**
+         * < The number of hosts on which the job executes.
+         */
+        public int numExHosts;
+
+        /**
+         * < The CPU factor for normalizing CPU and wall clock time limits.
+         */
+        public float cpuFactor;
+
+        /**
+         * < The number of load indices in the loadSched and loadStop arrays.
+         */
+        public int nIdx;
+
+        /**
+         * < The values in the loadSched array specify the thresholds for the corresponding load indices. Only if the current values of all specified load indices of a host are within (below or above,  depending on the meaning of the load index) their corresponding thresholds may the suspended job be resumed on this host.  For an explanation of the entries in the loadSched, see \ref lsb_hostinfo.
+         */
+        public FloatByReference loadSched;
+
+        /**
+         * < The values in the loadStop array specify the thresholds for job suspension; if any of the current load index values of the host crosses its threshold, the job will be suspended.  For an explanation of the entries in the loadStop, see \ref lsb_hostinfo.
+         */
+        public FloatByReference loadStop;
+
+        /**
+         * < Structure for \ref lsb_submit call.
+         */
+        public submit submit;
+
+        /**
+         * < Job exit status.
+         */
+        public int exitStatus;
+
+        /**
+         * < Mapped UNIX user ID on the execution host.
+         */
+        public int execUid;
+
+        /**
+         * < Home directory for the job on the execution host.
+         */
+        public String execHome;
+
+        /**
+         * < Current working directory for the job on the execution host.
+         */
+        public String execCwd;
+
+        /**
+         * < Mapped user name on the execution host.
+         */
+        public String execUsername;
+
+        /**
+         * < Time of the last job resource usage update.
+         */
+        public NativeLong jRusageUpdateTime;
+
+        /**
+         * < Contains resource usage information for the job.
+         */
+        public LibLsf.jRusage runRusage;
+
+        /**
+         * < Job type.N_JOB, N_GROUP, N_HEAD
+         */
+        public int jType;
+
+        /**
+         * < The parent job group of a job or job group.
+         */
+        public String parentGroup;
+
+        /**
+         * < If jType is JGRP_NODE_GROUP, then it is the job group name. Otherwise, it is thejob name.
+         */
+        public String jName;
+
+        /**
+         * < Index into the counter array, only used for job arrays. Possible index values are shown in \ref jobgroup_counterIndex
+         */
+        public int[] counter = new int[NUM_JGRP_COUNTERS];
+
+        /**
+         * < Service port of the job.
+         */
+        public short port;
+
+        /**
+         * < Job dynamic priority
+         */
+        public int jobPriority;
+
+        /**
+         * < The number of external messages in the job.
+         */
+        public int numExternalMsg;
+
+        /**
+         * < This structure contains the information required to define an external message reply.
+         */
+        public Pointer externalMsg;
+
+        /**
+         * < MultiCluster cluster ID. If clusterId is greater than or equal to 0, the job is a pending remote job, and \ref lsb_readjobinfo checks for host_name\@cluster_name. If host name is needed, it should be found in  jInfoH->remoteHosts. If the remote host name is not available, the constant string remoteHost is used.
+         */
+        public int clusterId;
+
+        /**
+         * <  Detail reason field
+         */
+        public String detailReason;
+
+        /**
+         * < Idle factor for job exception handling. If the job idle factor is less than the specified threshold, LSF invokes LSF_SERVERDIR/eadmin to trigger the action for a job idle exception.
+         */
+        public float idleFactor;
+
+        /**
+         * < Job exception handling mask
+         */
+        public int exceptMask;
+
+
+        /**
+         * < Placement information of LSF HPC jobs.Placement information of LSF HPC jobs.Arbitrary information of a job stored as a string currently used by rms_rid  and rms_alloc
+         */
+        public String additionalInfo;
+
+        /**
+         * < Job termination reason. See lsbatch.h.
+         */
+        public int exitInfo;
+
+        /**
+         * < Job warning time period in seconds; -1 if unspecified.
+         */
+        public int warningTimePeriod;
+
+        /**
+         * < Warning action, SIGNAL | CHKPNT |command, null if unspecified
+         */
+        public String warningAction;
+
+        /**
+         * < SAAP charged for job
+         */
+        public String chargedSAAP;
+
+        /**
+         * < The rusage satisfied at job runtime
+         */
+        public String execRusage;
+
+        /**
+         * < The time when advance reservation expired or was deleted.
+         */
+        public NativeLong rsvInActive;
+
+        /**
+         * < The number of licenses reported from License Scheduler.
+         */
+        public int numLicense;
+
+        /**
+         * < License Scheduler license names.
+         */
+        public Pointer licenseNames;
+
+        /**
+         * < Absolute priority scheduling (APS) priority value.
+         */
+        public float aps;
+
+        /**
+         * < Absolute priority scheduling (APS) string set by administrators to denote static system APS value
+         */
+        public float adminAps;
+
+        /**
+         * < The real runtime on the execution host.
+         */
+        public int runTime;
+
+        /**
+         * < How many kinds of resource are reserved by this job
+         */
+        public int reserveCnt;
+
+        /**
+         * < Detail reservation information for each kind of resource
+         */
+        public Pointer /* reserveItem.ByReference */ items;
+
+        /**
+         * < Absolute priority scheduling (APS) string set by administrators to denote ADMIN factor APS value.
+         */
+        public float adminFactorVal;
+
+        /**
+         * < Pending resize min. 0, if no resize pending.
+         */
+        public int resizeMin;
+
+        /**
+         * < Pending resize max. 0, if no resize pending
+         */
+        public int resizeMax;
+
+        /**
+         * < Time when pending request was issued
+         */
+        public NativeLong resizeReqTime;
+
+        /**
+         * < Number of hosts when job starts
+         */
+        public int jStartNumExHosts;
+
+        /**
+         * < Host list when job starts
+         */
+        public Pointer jStartExHosts;
+
+        /**
+         * < Last time when job allocation changed
+         */
+        public NativeLong lastResizeTime;
+    }
+
+
+/* the bit set for jobInfoEnt->exceptMask */
+    public static final int J_EXCEPT_OVERRUN = 0x02;
+    public static final int J_EXCEPT_UNDERUN = 0x04;
+    public static final int J_EXCEPT_IDLE = 0x80;
+    public static final int J_EXCEPT_RUNTIME_EST_EXCEEDED = 0x100;
+
+/* exception showed by bjobs -l and bacct -l*/
+    public static final String OVERRUN = "overrun";
+    public static final String UNDERRUN = "underrun";
+    public static final String IDLE = "idle";
+    public static final String SPACE = "  ";
+    public static final String RUNTIME_EST_EXCEEDED = "runtime_est_exceeded";
+
+/* LSF7.0 moved jobInfoReq structure definition from
+*  daemonout.h to lsbatch.h. This structure will work
+*  with new API \ref lsb_openjobinfo_req
+ */
+
+    /**
+     * \brief  job Information Request
+     */
+    public static class jobInfoReq extends Structure {
+        public static class ByReference extends jobInfoReq implements Structure.ByReference {}
+        public static class ByValue extends jobInfoReq implements Structure.ByValue {}
+        public jobInfoReq() {}
+        public jobInfoReq(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Options defined in \ref defs_lsb_openjobinfo
+         */
+        public int options;
+
+        /**
+         * < Name of user whose jobs to be checked
+         */
+        public String userName;
+
+        /**
+         * < Job id, 0 means all jobs
+         */
+        public long jobId;
+
+        /**
+         * < Job name
+         */
+        public String jobName;
+
+        /**
+         * < Queue name
+         */
+        public String queue;
+
+        /**
+         * < Check jobs running on this host
+         */
+        public String host;
+
+        /**
+         * < Job application
+         */
+        public String app;
+
+        /**
+         * < Job description
+         */
+        public String jobDescription;
+
+        /**
+         * < For new options in future
+         */
+        public submit_ext.ByReference submitExt;
+    }
+
+
+
+    /**
+     * \brief  user information entry.
+     */
+    public static class userInfoEnt extends Structure {
+        public static class ByReference extends userInfoEnt implements Structure.ByReference {}
+        public static class ByValue extends userInfoEnt implements Structure.ByValue {}
+        public userInfoEnt() {}
+        public userInfoEnt(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Name of the user or user group
+         */
+        public String user;
+
+        /**
+         * < The maximum number of job slots the user or user group can use on each processor. The job slots can be used by started jobs or reserved for PEND jobs.
+         */
+        public float procJobLimit;
+
+        /**
+         * < The maximum number of job slots that the user or user group can use simultaneously in the local LSF cluster. The job slots can be used by started jobs or reserved for PEND jobs.
+         */
+        public int maxJobs;
+
+        /**
+         * < The current number of job slots used by running and suspended jobs belonging to the user or user group.
+         */
+        public int numStartJobs;
+
+        /**
+         * < The total number of job slots in the LSF cluster for the jobs submitted by the user or user group.
+         */
+        public int numJobs;
+
+        /**
+         * < The number of job slots the user or user group has for pending jobs.
+         */
+        public int numPEND;
+
+        /**
+         * < The number of job slots the user or user group has for running jobs.
+         */
+        public int numRUN;
+
+        /**
+         * < The number of job slots for the jobs belonging to the user or user group that have been suspended by the system.
+         */
+        public int numSSUSP;
+
+        /**
+         * < The number of job slots for the jobs belonging to the user or user group that have been suspended by the user or the LSF system administrator.
+         */
+        public int numUSUSP;
+
+        /**
+         * < The number of job slots reserved for the pending jobs belonging to the user or user group.
+         */
+        public int numRESERVE;
+
+        /**
+         * < The maximum number of pending jobs allowed.
+         */
+        public int maxPendJobs;
+    }
+
+
+
+/* UserEquivalent info */
+
+    public static class userEquivalentInfoEnt extends Structure {
+        public static class ByReference extends userEquivalentInfoEnt implements Structure.ByReference {}
+        public static class ByValue extends userEquivalentInfoEnt implements Structure.ByValue {}
+        public userEquivalentInfoEnt() {}
+        public userEquivalentInfoEnt(Pointer p) { super(p); read(); }
+
+        public String equivalentUsers;
+    }
+
+
+
+/* UserMapping info */
+
+    public static class userMappingInfoEnt extends Structure {
+        public static class ByReference extends userMappingInfoEnt implements Structure.ByReference {}
+        public static class ByValue extends userMappingInfoEnt implements Structure.ByValue {}
+        public userMappingInfoEnt() {}
+        public userMappingInfoEnt(Pointer p) { super(p); read(); }
+
+
+/* Users in the local cluster */
+        public String localUsers;
+
+/* Users in remote clusters */
+        public String remoteUsers;
+
+/* "export" or "import" */
+        public String direction;
+    }
+
+
+
+
+/* APS structures used for mapping between factors */
+
+    /**
+     * \brief  APS structures used for mapping between factors
+     */
+    public static class apsFactorMap extends Structure {
+        public static class ByReference extends apsFactorMap implements Structure.ByReference {}
+        public static class ByValue extends apsFactorMap implements Structure.ByValue {}
+        public apsFactorMap() {}
+        public apsFactorMap(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Name of factor.
+         */
+        public String factorName;
+
+        /**
+         * < SubFactor names.
+         */
+        public String subFactorNames;
+    }
+
+
+
+    /**
+     * \brief  APS structures used for mapping between factors
+     */
+    public static class apsLongNameMap extends Structure {
+        public static class ByReference extends apsLongNameMap implements Structure.ByReference {}
+        public static class ByValue extends apsLongNameMap implements Structure.ByValue {}
+        public apsLongNameMap() {}
+        public apsLongNameMap(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Short name
+         */
+        public String shortName;
+
+        /**
+         * < Long name
+         */
+        public String longName;
+    }
+
+
+
+
+/* options for lsb_queueinfo() , some values should not
+*  conflict with the option values for lsb_usergrpinfo() and lsb_hostinfo_ex()
+*  since they share the same xdr_infoReq()
+*/
+
+/* for compatibility for 2.0 */
+    public static final int ALL_QUEUE = 0x01;
+
+/* for compatibility for 2.0 */
+    public static final int DFT_QUEUE = 0x02;
+    public static final int CHECK_HOST = 0x80;
+    public static final int CHECK_USER = 0x100;
+    public static final int SORT_HOST = 0x200;
+
+/* not bqueues -l or -r */
+    public static final int QUEUE_SHORT_FORMAT = 0x400;
+/* expand hostname into official hostname in lsb_queueinfo */
+    public static final int EXPAND_HOSTNAME = 0x800;
+
+/* only retrieve batch partitions */
+    public static final int RETRIEVE_BATCH = 0x1000;
+
+/* Signal number in each version LSB_SIG_NUM must be equal to
+*  signal number in the latest version.
+ */
+    public static final int LSB_SIG_NUM_40 = 25;
+    public static final int LSB_SIG_NUM_41 = 26;
+
+/* Solutions #38347 */
+    public static final int LSB_SIG_NUM_51 = 30;
+    public static final int LSB_SIG_NUM_60 = 30;
+    public static final int LSB_SIG_NUM = 30;
+
+/* Dynamic CPU provision
+*  to indicate whether a SP can lend or borrow hosts
+ */
+    public static final int DCP_LEND_HOSTS = 0x0001;
+    public static final int DCP_BORROW_HOSTS = 0x0002;
+
+/* status to indicate the current situation of Dynamic CPU provision
+*  DCP_UNDER_ALLOC_AND_STARVING means a partition is under allocation
+*  of dynamic cpu and its pending jobs are starving for more cpus.
+ */
+    public static final int DCP_ALLOC_CPU_OK = 0x0;
+    public static final int DCP_UNDER_ALLOC_CPU = 0x0001;
+    public static final int DCP_JOB_WAIT_FOR_CPU = 0x0002;
+    public static final int DCP_ALLOC_CPU_BUSY = 0x0004;
+
+/* Structure for lsb_queueinfo() call */
+/* !!! IMPORTANT !!!
+*  If you change queueInfoEnt, you have to change Intlib/ade.lsbatch.h too!
+ */
+
+    /**
+     * queueInfoEnt  queue information entry.
+     */
+    public static class queueInfoEnt extends Structure {
+        public static class ByReference extends queueInfoEnt implements Structure.ByReference {}
+        public static class ByValue extends queueInfoEnt implements Structure.ByValue {}
+        public queueInfoEnt() {}
+        public queueInfoEnt(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The name of the queue.
+         */
+        public String queue;
+
+        /**
+         * < Describes the typical use of the queue.
+         */
+        public String description;
+
+        /**
+         * < Defines the priority of the queue. This determines the order in which the job queues are searched at job dispatch time: queues with higher priority values are searched first. (This is contrary to UNIX process priority ordering.)
+         */
+        public int priority;
+
+        /**
+         * < Defines the nice value at which jobs in this queue will be run.
+         */
+        public short nice;
+
+        /**
+         * < A blank-separated list of names of users allowed to submit jobs to this queue.
+         */
+        public String userList;
+
+        /**
+         * < A blank-separated list of names of hosts to which jobs in this queue may be dispatched.
+         */
+        public String hostList;
+
+        /**
+         * < Original HOSTS string in case "-" is used.
+         */
+        public String hostStr;
+
+        /**
+         * < The number of load indices in the loadSched and loadStop arrays.
+         */
+        public int nIdx;
+
+        /**
+         * < The queue and host loadSched and loadStop arrays control batch job dispatch, suspension, and resumption. The values in the loadSched array specify thresholds for the corresponding load indices. Only if the current values of all specified load indices of a host are within (below or above, depending on the meaning of the load index) the corresponding thresholds of this queue, will jobs in this queue be dispatched to the host. The same conditions are used to resume jobs dispatc [...]
+         */
+        public FloatByReference loadSched;
+
+        /**
+         * < The values in the loadStop array specify the thresholds for job suspension. If any of the current load index values of a host goes beyond a queue's threshold, jobs from the queue will be suspended. For an explanation of the fields in the loadSched and loadStop arrays, see \ref lsb_hostinfo.
+         */
+        public FloatByReference loadStop;
+
+        /**
+         * < Per-user limit on the number of jobs that can be dispatched from this queue and executed concurrently.
+         */
+        public int userJobLimit;
+
+        /**
+         * < Per-processor limit on the number of jobs that can be dispatched from this queue and executed concurrently.
+         */
+        public float procJobLimit;
+
+        /**
+         * < A blank-separated list of time windows describing the run window of the queue. When a queue's run window is closed, no job from this queue will be dispatched. When the run window closes, any running jobs from this queue will be suspended until the run window reopens, when they will be resumed. The default is no restriction, or always open (i.e., 24 hours a day, seven days a week). A time window has the format begin_time-end_time. Time is specified in the format [day:]hour[:m [...]
+         */
+        public String windows;
+
+        /**
+         * < The per-process UNIX hard resource limits for all jobs submitted to this queue (see getrlimit() and lsb.queues). The default values for the resource limits are unlimited, indicated by -1. The constants used to index the rLimits array and the corresponding resource limits are listed below. <br> LSF_RLIMIT_CPU (CPULIMIT) <br> LSF_RLIMIT_FSIZE (FILELIMIT) <br> LSF_RLIMIT_DATA (DATALIMIT) <br> LSF_RLIMIT_STACK    (STACKLIMIT) <br> LSF_RLIMIT_CORE     (CORELIMIT) <br> LSF_RLIMIT_ [...]
+         */
+        public int[] rLimits = new int[LibLsf.LSF_RLIM_NLIMITS];
+
+        /**
+         * < A host name or host model name. If the queue CPULIMIT or RUNLIMIT gives a host specification, hostSpec will be that specification. Otherwise, if defaultHostSpec (see below) is not null, hostSpec will be defaultHostSpec. Otherwise, if DEFAULT_HOST_SPEC is defined in the lsb.params file, (see lsb.params), hostSpec will be this value. Otherwise, hostSpec will be the name of the host with the largest CPU factor in the cluster.
+         */
+        public String hostSpec;
+
+        /**
+         * < The attributes of the queue.
+         */
+        public int qAttrib;
+
+        /**
+         * < The status of the queue.
+         */
+        public int qStatus;
+
+        /**
+         * < The maximum number of jobs dispatched by the queue and not yet finished.
+         */
+        public int maxJobs;
+
+        /**
+         * < Number of jobs in the queue, including pending, running, and suspended jobs.
+         */
+        public int numJobs;
+
+        /**
+         * < Number of pending jobs in the queue.
+         */
+        public int numPEND;
+
+        /**
+         * < Number of running jobs in the queue.
+         */
+        public int numRUN;
+
+        /**
+         * < Number of system suspended jobs in the queue.
+         */
+        public int numSSUSP;
+
+        /**
+         * < Number of user suspended jobs in the queue.
+         */
+        public int numUSUSP;
+
+        /**
+         * < The queue migration threshold in minutes.
+         */
+        public int mig;
+
+        /**
+         * < The number of seconds that a new job waits, before being scheduled. A value of zero (0) means the job is scheduled without any delay.
+         */
+        public int schedDelay;
+
+        /**
+         * < The number of seconds for a host to wait after dispatching a job to a host, before accepting a second job to dispatch to the same host.
+         */
+        public int acceptIntvl;
+
+        /**
+         * < A blank-separated list of time windows describing the dispatch window of the queue. When a queue's dispatch window is closed, no job from this queue will be dispatched.The default is no restriction, or always open (i.e., 24 hours a day, seven days a week). For the time window format, see windows (above).
+         */
+        public String windowsD;
+
+        /**
+         * < A blank-separated list of queue specifiers. Each queue specifier is of the form queue\@host where host is an NQS host name and queue is the name of a queue on that host.
+         */
+        public String nqsQueues;
+
+        /**
+         * < A blank-separated list of user shares. Each share is of the form [user, share] where user is a user name, a user group name, the reserved word default or the reserved word others, and share is the number of shares the user gets.
+         */
+        public String userShares;
+
+        /**
+         * < The value of DEFAULT_HOST_SPEC in the Queue section for this queue in the lsb.queues file.
+         */
+        public String defaultHostSpec;
+
+        /**
+         * < An LSF resource limit used to limit the number of job slots (processors) a (parallel) job in the queue will use. A job submitted to this queue must specify a number of processors not greater than this limit.
+         */
+        public int procLimit;
+
+        /**
+         * < A list of administrators of the queue. The users whose names are here are allowed to operate on the jobs in the queue and on the queue itself.
+         */
+        public String admins;
+
+        /**
+         * < Queue's pre-exec command. The command is executed before the real batch job is run on the execution host (or on the first host selected for a parallel batch job).
+         */
+        public String preCmd;
+
+        /**
+         * < Queue's post-exec command. The command is run when a job terminates.
+         */
+        public String postCmd;
+
+        /**
+         * < Jobs that exit with these values are automatically requeued.
+         */
+        public String requeueEValues;
+
+        /**
+         * < The maximum number of job slots a host can process from this queue, including job slots of dispatched jobs which have not finished yet and reserved slots for some PEND jobs. This limit controls the number of jobs sent to each host, regardless of a uniprocessor host or multiprocessor host. Default value for this limit is infinity.
+         */
+        public int hostJobLimit;
+
+        /**
+         * < Resource requirement string used to determine eligible hosts for a job.
+         */
+        public String resReq;
+
+        /**
+         * < Number of reserved job slots for pending jobs.
+         */
+        public int numRESERVE;
+
+        /**
+         * < The time used to hold the reserved job slots for a PEND job in this queue.
+         */
+        public int slotHoldTime;
+
+        /**
+         * < Remote MultiCluster send-jobs queues to forward jobs to.
+         */
+        public String sndJobsTo;
+
+        /**
+         * < Remote MultiCluster receive-jobs queues that can forward to this queue.
+         */
+        public String rcvJobsFrom;
+
+        /**
+         * < Resume threshold conditions for a suspended job in this queue.
+         */
+        public String resumeCond;
+
+        /**
+         * < Stop threshold conditions for a running job in this queue.
+         */
+        public String stopCond;
+
+        /**
+         * < Job starter command for a running job in this queue
+         */
+        public String jobStarter;
+
+        /**
+         * < Command configured for the SUSPEND action.
+         */
+        public String suspendActCmd;
+
+        /**
+         * < Command configured for the RESUME action.
+         */
+        public String resumeActCmd;
+
+        /**
+         * < Command configured for the TERMINATE action.
+         */
+        public String terminateActCmd;
+
+        /**
+         * < Configurable signal mapping
+         */
+        public int[] sigMap = new int[LSB_SIG_NUM];
+
+        /**
+         * < Preemptive scheduling and preemption policy specified for the queue.
+         */
+        public String preemption;
+
+        /**
+         * < Time period for a remote cluster to schedule a job. MultiCluster job forwarding model only. Determines how long a MultiCluster job stays pending in the execution cluster before returning to the submission cluster. The remote timeout limit in seconds is: \li MAX_RSCHED_TIME.ByReference  MBD_SLEEP_TIME=timeout
+         */
+        public int maxRschedTime;
+
+
+        /**
+         * < Number of share accounts in the queue.
+         */
+        public int numOfSAccts;
+
+        /**
+         * < (Only used for queues with fairshare policy) a share account vector capturing the fairshare information of the users using the queue. The storage for the array of queueInfoEnt structures will be reused by the next call.
+         */
+        public Pointer /* shareAcctInfoEnt.ByReference */ shareAccts;
+
+        /**
+         * < The directory where the checkpoint files are created.
+         */
+        public String chkpntDir;
+
+        /**
+         * < The checkpoint period in minutes.
+         */
+        public int chkpntPeriod;
+
+        /**
+         * < MultiCluster job forwarding model only. Specifies the MultiCluster pending job limit for a receive-jobs queue. This represents the maximum number of MultiCluster import jobs that can be pending in the queue; once the limit has been reached, the queue stops accepting jobs from remote clusters.
+         */
+        public int imptJobBklg;
+
+        /**
+         * < The default (soft) resource limits for all jobs submitted to this queue (see getrlimit() and lsb.queues).
+         */
+        public int[] defLimits = new int[LibLsf.LSF_RLIM_NLIMITS];
+
+        /**
+         * < The maximum number of jobs allowed to be dispatched together in one job chunk. Must be a positive integer greater than 1.
+         */
+        public int chunkJobSize;
+
+        /**
+         * < The minimum number of job slots (processors) that a job in the queue will use.
+         */
+        public int minProcLimit;
+
+        /**
+         * < The default (soft) limit on the number of job slots (processors) that a job in the queue will use.
+         */
+        public int defProcLimit;
+
+        /**
+         * < The list of queues for cross-queue fairshare.
+         */
+        public String fairshareQueues;
+
+        /**
+         * < Default external scheduling for the queue.
+         */
+        public String defExtSched;
+
+        /**
+         * < Mandatory external scheduling options for the queue.
+         */
+        public String mandExtSched;
+
+        /**
+         * < Share of job slots for queue-based fairshare. Represents the percentage of running jobs (job slots) in use from the queue. SLOT_SHARE must be greater than zero (0) and less than or equal to 100. The sum of SLOT_SHARE for all queues in the pool does not need to be 100%. It can be more or less, depending on your needs.
+         */
+        public int slotShare;
+
+        /**
+         * < Name of the pool of job slots the queue belongs to for queue-based fairshare. A queue can only belong to one pool. All queues in the pool must share the same set of hosts. Specify any ASCII string up to 60 chars long. You can use letters, digits, underscores (_) or dashes (-). You cannot use blank spaces.
+         */
+        public String slotPool;
+
+        /**
+         * < Specifies a threshold for job underrun exception handling. If a job exits before the specified number of minutes, LSF invokes LSF_SERVERDIR/eadmin to trigger the action for a job underrun exception.
+         */
+        public int underRCond;
+
+        /**
+         * < Specifies a threshold for job overrun exception handling. If a job runs longer than the specified run time, LSF invokes LSF_SERVERDIR/eadmin to trigger the action for a job overrun exception.
+         */
+        public int overRCond;
+
+        /**
+         * < Specifies a threshold for idle job exception handling. The value should be a number between 0.0 and 1.0 representing CPU time/runtime. If the job idle factor is less than the specified threshold, LSF invokes LSF_SERVERDIR/eadmin to trigger the action for a job idle exception.
+         */
+        public float idleCond;
+
+        /**
+         * < The number of underrun jobs in the queue.
+         */
+        public int underRJobs;
+
+        /**
+         * < The number of overrun jobs in the queue.
+         */
+        public int overRJobs;
+
+        /**
+         * < The number of idle jobs in the queue.
+         */
+        public int idleJobs;
+
+        /**
+         * < Specifies the amount of time before a job control action occurs that a job warning action is to be taken. For example, 2 minutes before the job reaches run time limit or termination deadline, or the queue's run window is closed, an URG signal is sent to the job. Job action warning time is not normalized. A job action warning time must be specified with a job warning action in order for job warning to take effect.
+         */
+        public int warningTimePeriod;
+
+        /**
+         * < Specifies the job action to be taken before a job control action occurs. For example, 2 minutes before the job reaches run time limit or termination deadline, or the queue's run window is closed, an URG signal is sent to the job. A job warning action must be specified with a job action warning time in order for job warning to take effect. If specified, LSF sends the warning action to the job before the actual control action is taken. This allows the job time to save its resu [...]
+         */
+        public String warningAction;
+
+        /**
+         * < AdminAction - queue control message
+         */
+        public String qCtrlMsg;
+
+        /**
+         * < Acept resource request.
+         */
+        public String acResReq;
+
+        /**
+         * < Limit of running session scheduler jobs.
+         */
+        public int symJobLimit;
+
+        /**
+         * < cpu_req for service partition of session scheduler
+         */
+        public String cpuReq;
+
+        /**
+         * < Indicate whether it would be willing to donate/borrow.
+         */
+        public int proAttr;
+
+        /**
+         * < The maximum number of hosts to lend.
+         */
+        public int lendLimit;
+
+        /**
+         * < The grace period to lend/return idle hosts.
+         */
+        public int hostReallocInterval;
+
+        /**
+         * < Number of CPUs required by CPU provision.
+         */
+        public int numCPURequired;
+
+        /**
+         * < Number of CPUs actually allocated.
+         */
+        public int numCPUAllocated;
+
+        /**
+         * < Number of CPUs borrowed.
+         */
+        public int numCPUBorrowed;
+
+        /**
+         * < Number of CPUs lent.
+         */
+        public int numCPULent;
+        /* the number of reserved cpu(numCPUReserved) = numCPUAllocated - numCPUBorrowed + numCPULent */
+
+
+        /* the following fields are for real-time app(ex. murex) of symphony */
+
+        /**
+         * < Scheduling granularity. in milliseconds.
+         */
+        public int schGranularity;
+
+        /**
+         * < The grace period for stopping session scheduler tasks.
+         */
+        public int symTaskGracePeriod;
+
+        /**
+         * < Minimum number of SSMs.
+         */
+        public int minOfSsm;
+
+        /**
+         * < Maximum number of SSMs.
+         */
+        public int maxOfSsm;
+
+        /**
+         * < Number of allocated slots.
+         */
+        public int numOfAllocSlots;
+
+        /**
+         * < Service preemptin policy.
+         */
+        public String servicePreemption;
+
+
+        /**
+         * < Dynamic cpu provision status.
+         */
+        public int provisionStatus;
+
+        /**
+         * < The minimum time for preemption and backfill, in seconds.
+         */
+        public int minTimeSlice;
+
+        /**
+         * < List of queues defined in a queue group for absolute priority scheduling (APS) across multiple queues.
+         */
+        public String queueGroup;
+
+        /**
+         * < The number of calculation factors for absolute priority scheduling (APS).
+         */
+        public int numApsFactors;
+
+        /**
+         * < List of calculation factors for absolute priority scheduling (APS)
+         */
+        public Pointer /* apsFactorInfo.ByReference */ apsFactorInfoList;
+
+        /**
+         * < The mapping of factors to subfactors for absolute priority scheduling (APS).
+         */
+        public Pointer /* apsFactorMap.ByReference */ apsFactorMaps;
+
+        /**
+         * < The mapping of factors to their long names for absolute priority scheduling (APS).
+         */
+        public Pointer /* apsLongNameMap.ByReference */ apsLongNames;
+
+        /**
+         * < Maximum number of job preempted times.
+         */
+        public int maxJobPreempt;
+
+        /**
+         * < Maximum number of pre-exec retry times.
+         */
+        public int maxPreExecRetry;
+
+        /**
+         * < Maximum number of pre-exec retry times for local cluster
+         */
+        public int localMaxPreExecRetry;
+
+        /**
+         * < Maximum number of job re-queue times.
+         */
+        public int maxJobRequeue;
+
+        /**
+         * < Use Linux-PAM
+         */
+        public int usePam;
+        /* compute unit exclusive */
+
+        /**
+         * < Compute unit type
+         */
+        public int cu_type_exclusive;
+
+        /**
+         * < A string specified in EXCLUSIVE=CU[\<string>]
+         */
+        public String cu_str_exclusive;
+
+        /**
+         * < Resource reservation limit
+         */
+        public String resRsvLimit;
+
+    }
+
+
+
+    /**
+     *  \addtogroup signal_action signal_action
+     *  define status for signal action
+     */
+
+    /**
+     * <  No action
+     */
+    public static final int ACT_NO = 0;
+
+    /**
+     * <  Start
+     */
+    public static final int ACT_START = 1;
+
+    /**
+     * <  Preempt
+     */
+    public static final int ACT_PREEMPT = 2;
+
+    /**
+     * <  Done
+     */
+    public static final int ACT_DONE = 3;
+
+    /**
+     * <  Fail
+     */
+    public static final int ACT_FAIL = 4;
+
+    /**
+     * \brief  host information entry.
+     */
+    public static class hostInfoEnt extends Structure {
+        public static class ByReference extends hostInfoEnt implements Structure.ByReference {}
+        public static class ByValue extends hostInfoEnt implements Structure.ByValue {}
+        public hostInfoEnt() {}
+        public hostInfoEnt(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The name of the host.
+         */
+        public String host;
+
+        /**
+         * < The status of the host. It is the bitwise  inclusive OR.  see \ref host_status
+         */
+        public int hStatus;
+
+        /**
+         * < Indicate host loadSched busy reason
+         */
+        public IntByReference busySched;
+
+        /**
+         * < Indicate host loadStop  busy reason.
+         */
+        public IntByReference busyStop;
+
+        /**
+         * < The host CPU factor used to scale CPU load values to account for differences in CPU speeds. The faster the CPU, the larger the CPU factor.
+         */
+        public float cpuFactor;
+
+        /**
+         * < The number of load indices in the load, loadSched and loadStop arrays.
+         */
+        public int nIdx;
+
+        /**
+         * < Load information array on a host. This array gives the load information that is used for  scheduling batch jobs. This load information  is the effective load information from \ref ls_loadofhosts (see \ref ls_loadofhosts) plus the load reserved for running jobs (see lsb.queues for details on resource reservation). The load array is indexed the same as loadSched and loadStop  (see loadSched and loadStop below).
+         */
+        public FloatByReference load;
+
+        /**
+         * < Stop scheduling new jobs if over
+         */
+        public FloatByReference loadSched;
+
+        /**
+         * < Stop jobs if over this load. The loadSched and loadStop arrays control batch job scheduling, suspension, and resumption. The values in the loadSched array specify the scheduling thresholds for the corresponding load indices. Only if the current values of all specified load indices of this host are within (below or above, depending on the meaning of the load index) the corresponding thresholds of this host, will jobs be scheduled to run on this host. Similarly, the values in  [...]
+         */
+        public FloatByReference loadStop;
+
+        /**
+         * < ASCII desp of run windows.One or more time windows in a week during which batch jobs may be dispatched to run on this host . The default is no restriction, or always open (i.e., 24 hours a day seven days a week). These windows are similar to the dispatch windows of batch job queues. See \ref lsb_queueinfo.
+         */
+        public String windows;
+
+        /**
+         * < The maximum number of job slots any user is allowed to use on this host.
+         */
+        public int userJobLimit;
+
+        /**
+         * < The maximum number of job slots that the host can process concurrently.
+         */
+        public int maxJobs;
+
+        /**
+         * < The number of job slots running or suspended on the host.
+         */
+        public int numJobs;
+
+        /**
+         * < The number of job slots running on the host.
+         */
+        public int numRUN;
+
+        /**
+         * < The number of job slots suspended by the batch daemon on the host.
+         */
+        public int numSSUSP;
+
+        /**
+         * < The number of job slots suspended by the job submitter or the LSF system administrator.
+         */
+        public int numUSUSP;
+
+        /**
+         * < The migration threshold in minutes after which a suspended job will be considered for migration.
+         */
+        public int mig;
+
+
+        /**
+         * < The host attributes; the bitwise inclusive OR of some of \ref host_attributes
+         */
+        public int attr;
+        /**
+         *  \addtogroup host_attributes host_attributes
+         *  The host attributes
+         */
+
+        /**
+         * < This host can checkpoint jobs
+         */
+        public static final int H_ATTR_CHKPNTABLE = 0x1;
+
+        /**
+         * < This host provides kernel support for checkpoint copy.
+         */
+        public static final int H_ATTR_CHKPNT_COPY = 0x2;
+
+        /**
+         * < The effective load of the host.
+         */
+        public FloatByReference realLoad;
+
+        /**
+         * < The number of job slots reserved by LSF for the PEND jobs.
+         */
+        public int numRESERVE;
+
+        /**
+         * < If attr has an H_ATTR_CHKPNT_COPY attribute, chkSig is set to the signal which triggers  checkpoint and copy operation. Otherwise,  chkSig is set to the signal which triggers  checkpoint operation on the host
+         */
+        public int chkSig;
+
+
+        /**
+         * < Num of resource used by the consumer
+         */
+        public float cnsmrUsage;
+
+        /**
+         * < Num of resource used by the provider
+         */
+        public float prvdrUsage;
+
+        /**
+         * < Num of resource available for the consumer to use
+         */
+        public float cnsmrAvail;
+
+        /**
+         * < Num of resource available for the provider to use
+         */
+        public float prvdrAvail;
+
+        /**
+         * < Num maximum of resource available in total
+         */
+        public float maxAvail;
+
+        /**
+         * < The job exit rate threshold on the host
+         */
+        public float maxExitRate;
+
+        /**
+         * < Number of job exit rate on the host
+         */
+        public float numExitRate;
+
+        /**
+         * < AdminAction - host control message
+         */
+        public String hCtrlMsg;
+
+    }
+
+
+
+    /**
+     * \brief  Host information condition entry.
+     */
+    public static class condHostInfoEnt extends Structure {
+        public static class ByReference extends condHostInfoEnt implements Structure.ByReference {}
+        public static class ByValue extends condHostInfoEnt implements Structure.ByValue {}
+        public condHostInfoEnt() {}
+        public condHostInfoEnt(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Host name
+         */
+        public String name;
+
+
+        /**
+         * < How many hosts are in the ok status
+         */
+        public int howManyOk;
+
+        /**
+         * < How many hosts are in the busy status
+         */
+        public int howManyBusy;
+
+        /**
+         * < How many hosts are in the closed status
+         */
+        public int howManyClosed;
+
+        /**
+         * < How many hosts are in the full status
+         */
+        public int howManyFull;
+
+        /**
+         * < How many hosts are in the unreach status
+         */
+        public int howManyUnreach;
+
+        /**
+         * < How many hosts are in the unavail status
+         */
+        public int howManyUnavail;
+
+
+        /**
+         * < The status of each host in the host group
+         */
+        public Pointer /* hostInfoEnt.ByReference */ hostInfo;
+
+    }
+
+
+
+    public static class adjustParam extends Structure {
+        public static class ByReference extends adjustParam implements Structure.ByReference {}
+        public static class ByValue extends adjustParam implements Structure.ByValue {}
+        public adjustParam() {}
+        public adjustParam(Pointer p) { super(p); read(); }
+
+
+/* key name of share adjustment */
+        public String key;
+
+/* value of the key */
+        public float value;
+    }
+
+
+
+
+/* cpu time factor */
+    public static final int FAIR_ADJUST_CPU_TIME_FACTOR = 0;
+
+/* run time factor */
+    public static final int FAIR_ADJUST_RUN_TIME_FACTOR = 1;
+
+/* run job factor */
+    public static final int FAIR_ADJUST_RUN_JOB_FACTOR = 2;
+
+/* committed run time factor */
+    public static final int FAIR_ADJUST_COMMITTED_RUN_TIME_FACTOR = 3;
+
+/* enable hist run time */
+    public static final int FAIR_ADJUST_ENABLE_HIST_RUN_TIME = 4;
+
+/* cpu time of finished jobs with decay */
+    public static final int FAIR_ADJUST_HIST_CPU_TIME = 5;
+
+/* cpu time of finished jobs within decay */
+    public static final int FAIR_ADJUST_NEW_USED_CPU_TIME = 6;
+
+/* total time that job spend in RUN state */
+    public static final int FAIR_ADJUST_RUN_TIME = 7;
+
+/* historical run time of finished jobs */
+    public static final int FAIR_ADJUST_HIST_RUN_TIME = 8;
+
+/* committed run time of started jobs */
+    public static final int FAIR_ADJUST_COMMITTED_RUN_TIME = 9;
+
+/* number of job slots used by started jobs */
+    public static final int FAIR_ADJUST_NUM_START_JOBS = 10;
+
+/* number of reserved slots used by pending jobs */
+    public static final int FAIR_ADJUST_NUM_RESERVE_JOBS = 11;
+
+/* total amount of memory used by started jobs */
+    public static final int FAIR_ADJUST_MEM_USED = 12;
+
+/* average memory allocated per slot */
+    public static final int FAIR_ADJUST_MEM_ALLOCATED = 13;
+
+/* total number of fairshare adjustment key value pairs */
+    public static final int FAIR_ADJUST_KVPS_SUM = 14;
+
+    //public String[] FairAdjustPairArrayName = new String[FAIR_ADJUST_KVPS_SUM];
+
+    public static class shareAdjustPair extends Structure {
+        public static class ByReference extends shareAdjustPair implements Structure.ByReference {}
+        public static class ByValue extends shareAdjustPair implements Structure.ByValue {}
+        public shareAdjustPair() {}
+        public shareAdjustPair(Pointer p) { super(p); read(); }
+
+
+/* queue share account */
+        public static int SHAREACCTTYPEQUEUE = 0x01;
+
+/* host partition share account */
+        public static final int SHAREACCTTYPEHP = 0x02;
+
+/* SLA share account */
+        public static final int SHAREACCTTYPESLA = 0x04;
+
+/* type of share account*/
+        public int shareAcctType;
+
+/* name of the share holder that use the share */
+        public String holderName;
+
+/* name of the provider policy name(name of queue, host partition or SLA) */
+        public String providerName;
+
+/* number of share adjustment key value pair */
+        public int numPair;
+
+/* share adjustment key value pair */
+        public Pointer /* adjustParam.ByReference */ adjustParam;
+    }
+
+
+
+    // NOTE: Not in libbat
+    //public static native float fairshare_adjustment(shareAdjustPair shareAdjustPair1);
+
+/* For lsb_hostpartinfo() call */
+
+    /**
+     * \brief   gets user information about host partitions.
+     */
+    public static class hostPartUserInfo extends Structure {
+        public static class ByReference extends hostPartUserInfo implements Structure.ByReference {}
+        public static class ByValue extends hostPartUserInfo implements Structure.ByValue {}
+        public hostPartUserInfo() {}
+        public hostPartUserInfo(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The user name or user group name.  See \ref lsb_userinfo  and \ref lsb_usergrpinfo
+         */
+        public String user;
+
+        /**
+         * < The number of shares assigned to the user or user group, as configured in the file lsb.hosts. (See lsb.hosts.)
+         */
+        public int shares;
+
+        /**
+         * < The priority of the user or user group to use the host partition. Bigger values represent higher priorities. Jobs belonging to the user or user group with the highest priority are considered first for dispatch when resources in the host partition are being contended for. In general, a user or user group with more shares, fewer numStartJobs and less histCpuTime has higher priority. The storage for the array of hostPartInfoEnt structures will be reused by the next call.
+         */
+        public float priority;
+
+        /**
+         * < The number of job slots belonging to the user or user group that are running or suspended in the host partition.
+         */
+        public int numStartJobs;
+
+        /**
+         * < The normalized CPU time accumulated in the host partition during the recent period by finished jobs belonging to the user or user group. The period may be configured in the file lsb.params (see lsb.params), with a default value of five (5) hours.
+         */
+        public float histCpuTime;
+
+        /**
+         * < The number of job slots that are reserved for the PEND jobs belonging to the user or user group in the host partition.
+         */
+        public int numReserveJobs;
+
+        /**
+         * < The time unfinished jobs spend  in RUN state
+         */
+        public int runTime;
+
+        /**
+         * < The fairshare adjustment value from the fairshare plugin  (libfairshareadjust.ByReference ). The adjustment is enabled and weighted by setting the value of FAIRSHARE_ADJUSTMENT_FACTOR in lsb.params.
+         */
+        public float shareAdjustment;
+    }
+
+
+
+/* For lsb_hostpartinfo() call */
+
+    /**
+     * \brief  gets information entry about host partitions.
+     */
+    public static class hostPartInfoEnt extends Structure {
+        public static class ByReference extends hostPartInfoEnt implements Structure.ByReference {}
+        public static class ByValue extends hostPartInfoEnt implements Structure.ByValue {}
+        public hostPartInfoEnt() {}
+        public hostPartInfoEnt(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The name of the host partition
+         */
+        public byte[] hostPart = new byte[MAX_LSB_NAME_LEN];
+
+        /**
+         * < A blank-separated list of names of hosts and host groups which are members of the host partition. The name of a host group has a '/' appended. see \ref lsb_hostgrpinfo.
+         */
+        public String hostList;
+
+        /**
+         * < The number of users in this host partition. i.e., the number of hostPartUserInfo structures.
+         */
+        public int numUsers;
+
+        /**
+         * < An array of hostPartUserInfo structures which hold information on users in this host partition.
+         */
+        public Pointer /* hostPartUserInfo.ByReference */ users;
+    }
+
+
+
+/* Library rappresentation of the share account */
+
+    /**
+     * \brief Library rappresentation of the share account
+     */
+    public static class shareAcctInfoEnt extends Structure {
+        public static class ByReference extends shareAcctInfoEnt implements Structure.ByReference {}
+        public static class ByValue extends shareAcctInfoEnt implements Structure.ByValue {}
+        public shareAcctInfoEnt() {}
+        public shareAcctInfoEnt(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The user name or user group name. (See \ref lsb_userinfo and \ref lsb_usergrpinfo.)
+         */
+        public String shareAcctPath;
+
+        /**
+         * < The number of shares assigned to  the user or user group, as configured in the file lsb.queues.
+         */
+        public int shares;
+
+        /**
+         * < The priority of the user or user group in the fairshare queue. Larger values represent higher priorities. Job belonging to the user or user group with the highest priority are considered first for dispatch in the fairshare queue. In general, a user or user group with more shares, fewer numStartJobs and less histCpuTime has higher priority.
+         */
+        public float priority;
+
+        /**
+         * < The number of job slots (belonging to the user or user group) that are running or suspended in the fairshare queue.
+         */
+        public int numStartJobs;
+
+        /**
+         * < The normalized CPU time accumulated in the fairshare queue by jobs belonging to the user or user group, over the time period configured in the file lsb.params. The default time period is 5 hours.
+         */
+        public float histCpuTime;
+
+        /**
+         * < The number of job slots that are reserved for the PEND jobs belonging to the user or user group in the host partition.
+         */
+        public int numReserveJobs;
+
+        /**
+         * < The time unfinished jobs spend in the RUN state.
+         */
+        public int runTime;
+
+        /**
+         * < The fairshare adjustment value from the fairshare plugin  (libfairshareadjust.SOEXT). The adjustment is enabled and weighted  by setting the value of FAIRSHARE_ADJUSTMENT_FACTOR in lsb.params.
+         */
+        public float shareAdjustment;
+    }
+
+
+
+/* boundaries and default value used by mbatchd for the maxJobId */
+    public static final int DEF_MAX_JOBID = 999999;
+    public static final int MAX_JOBID_LOW = 999999;
+    public static final int MAX_JOBID_HIGH = (LibLsf.INFINIT_INT - 1);
+
+
+/* default preemption wait time */
+    public static final int DEF_PREEMPTION_WAIT_TIME = 300;
+
+/* default number of hosts specified by -m */
+    public static final int DEF_MAX_ASKED_HOSTS = 512;
+
+/* For lsb_parameterinfo() call */
+
+    /**
+     * \brief The parameterInfo structure contains the following fields:
+     */
+    public static class parameterInfo extends Structure {
+        public static class ByReference extends parameterInfo implements Structure.ByReference {}
+        public static class ByValue extends parameterInfo implements Structure.ByValue {}
+        public parameterInfo() {}
+        public parameterInfo(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < DEFAULT_QUEUE: A blank_separated list of queue names for automatic queue selection.
+         */
+        public String defaultQueues;
+
+        /**
+         * < DEFAULT_HOST_SPEC: The host name or host model name used as the system default for scaling CPULIMIT and RUNLIMIT.
+         */
+        public String defaultHostSpec;
+
+        /**
+         * < MBD_SLEEP_TIME: The interval in seconds at which the mbatchd dispatches jobs.
+         */
+        public int mbatchdInterval;
+
+        /**
+         * < SBD_SLEEP_TIME: The interval in seconds at which the sbatchd suspends or resumes jobs.
+         */
+        public int sbatchdInterval;
+
+        /**
+         * < JOB_ACCEPT_INTERVAL: The interval at which  a host accepts two successive jobs. (In units of SBD_SLEEP_TIME.)
+         */
+        public int jobAcceptInterval;
+
+        /**
+         * < MAX_RETRY: The maximum number of retries for dispatching a job.
+         */
+        public int maxDispRetries;
+
+        /**
+         * < MAX_SBD_FAIL: The maximum number of retries for reaching an sbatchd.
+         */
+        public int maxSbdRetries;
+
+        /**
+         * < PREEM_PERIOD: The interval in seconds for preempting jobs running on the same host.
+         */
+        public int preemptPeriod;
+
+        /**
+         * < CLEAN_PERIOD: The interval in seconds during which finished jobs are kept in core.
+         */
+        public int cleanPeriod;
+
+        /**
+         * < MAX_JOB_NUM: The maximum number of finished jobs that are logged in the current event file.
+         */
+        public int maxNumJobs;
+
+        /**
+         * < HIST_HOURS: The number of hours of resource consumption history used for fair share scheduling and scheduling within a host partition.
+         */
+        public float historyHours;
+
+        /**
+         * < PG_SUSP_IT: The interval a host must be idle before resuming a job suspended for excessive paging.
+         */
+        public int pgSuspendIt;
+
+        /**
+         * < The default project assigned to jobs.
+         */
+        public String defaultProject;
+
+        /**
+         * < Job submission retry interval
+         */
+        public int retryIntvl;
+
+        /**
+         * < For Cray NQS compatiblilty only. Used by LSF to get the NQS queue information
+         */
+        public int nqsQueuesFlags;
+
+        /**
+         * < nqsRequestsFlags
+         */
+        public int nqsRequestsFlags;
+
+        /**
+         * < The maximum number of times to attempt the preexecution command of a job from a remote cluster ( MultiCluster only)
+         */
+        public int maxPreExecRetry;
+
+        /**
+         * < Maximum number of pre-exec retry times for local cluster
+         */
+        public int localMaxPreExecRetry;
+
+        /**
+         * < Event watching Interval in seconds
+         */
+        public int eventWatchTime;
+
+        /**
+         * < Run time weighting factor for fairshare scheduling
+         */
+        public float runTimeFactor;
+
+        /**
+         * < Used for calcultion of the fairshare scheduling formula
+         */
+        public float waitTimeFactor;
+
+        /**
+         * < Job slots weighting factor for fairshare scheduling
+         */
+        public float runJobFactor;
+
+        /**
+         * < Default check interval
+         */
+        public int eEventCheckIntvl;
+
+        /**
+         * < sbatchd report every sbd_sleep_time
+         */
+        public int rusageUpdateRate;
+
+        /**
+         * < sbatchd updates jobs jRusage in mbatchd if more than 10% changes
+         */
+        public int rusageUpdatePercent;
+
+        /**
+         * < Time period to check for reconfig
+         */
+        public int condCheckTime;
+
+        /**
+         * < The maximum number of connections between master and slave batch daemons
+         */
+        public int maxSbdConnections;
+
+        /**
+         * < The interval for rescheduling jobs
+         */
+        public int rschedInterval;
+
+        /**
+         * < Max time mbatchd stays in scheduling routine, after which take a breather
+         */
+        public int maxSchedStay;
+
+        /**
+         * < During which load remains fresh
+         */
+        public int freshPeriod;
+
+        /**
+         * < The preemption behavior, GROUP_MAX, GROUP_JLP, USER_JLP, HOST_JLU,MINI_JOB, LEAST_RUN_TIME
+         */
+        public int preemptFor;
+
+        /**
+         * < Flags whether users can resume their jobs when suspended by the LSF administrator
+         */
+        public int adminSuspend;
+
+        /**
+         * < Flags to enable/disable normal user to create advance reservation
+         */
+        public int userReservation;
+
+        /**
+         * < CPU time weighting factor for fairshare scheduling
+         */
+        public float cpuTimeFactor;
+
+        /**
+         * < The starting month for a fiscal year
+         */
+        public int fyStart;
+
+        /**
+         * < The maximum number of jobs in a job array
+         */
+        public int maxJobArraySize;
+
+        /**
+         * < Replay period for exceptions, in seconds
+         */
+        public NativeLong exceptReplayPeriod;
+
+        /**
+         * < The interval to terminate a job
+         */
+        public int jobTerminateInterval;
+
+        /**
+         * <  User level account mapping for remote jobs is disabled
+         */
+        public int disableUAcctMap;
+
+        /**
+         * < If set to TRUE, Project name for a job will be considerred when doing fairshare scheduling, i.e., as if user has submitted jobs using -G
+         */
+        public int enforceFSProj;
+
+        /**
+         * < Enforces the check to see if the invoker of bsub is in the specifed group when the -P option is used
+         */
+        public int enforceProjCheck;
+
+        /**
+         * < Run time for a job
+         */
+        public int jobRunTimes;
+
+        /**
+         * < Event table Job default interval
+         */
+        public int dbDefaultIntval;
+
+        /**
+         * < Event table Job Host Count
+         */
+        public int dbHjobCountIntval;
+
+        /**
+         * < Event table Job Queue Count
+         */
+        public int dbQjobCountIntval;
+
+        /**
+         * < Event table Job User Count
+         */
+        public int dbUjobCountIntval;
+
+        /**
+         * < Event table Job Resource Interval
+         */
+        public int dbJobResUsageIntval;
+
+        /**
+         * < Event table Resource Load Interval
+         */
+        public int dbLoadIntval;
+
+        /**
+         * < Event table Job Info
+         */
+        public int dbJobInfoIntval;
+
+        /**
+         * < Used with job dependency scheduling
+         */
+        public int jobDepLastSub;
+
+        /**
+         * < Used with job dependency scheduling,  deprecated
+         */
+        public int maxJobNameDep;
+
+        /**
+         * < Select resources to be logged
+         */
+        public String dbSelectLoad;
+
+        /**
+         * < Job synchronizes its group status
+         */
+        public int jobSynJgrp;
+
+        /**
+         * < The batch jobs' temporary output directory
+         */
+        public String pjobSpoolDir;
+
+
+        /**
+         * < Maximal job priority defined for all users
+         */
+        public int maxUserPriority;
+
+        /**
+         * < Job priority is increased by the system dynamically based on waiting time
+         */
+        public int jobPriorityValue;
+
+        /**
+         * < Waiting time to increase Job priority by the system dynamically
+         */
+        public int jobPriorityTime;
+
+        /**
+         * < Enable internal statistical adjustment
+         */
+        public int enableAutoAdjust;
+
+        /**
+         * < Start to autoadjust when the user has  this number of pending jobs
+         */
+        public int autoAdjustAtNumPend;
+
+        /**
+         * < If this number of jobs has been visited skip the user
+         */
+        public float autoAdjustAtPercent;
+
+        /**
+         * <  Static shared resource update interval for the cluster actor
+         */
+        public int sharedResourceUpdFactor;
+
+        /**
+         * < Schedule job based on raw load info
+         */
+        public int scheRawLoad;
+
+        /**
+         * <  The batch jobs' external storage for attached data
+         */
+        public String jobAttaDir;
+
+        /**
+         * < Maximum message number for each job
+         */
+        public int maxJobMsgNum;
+
+        /**
+         * < Maximum attached data size to be transferred for each message
+         */
+        public int maxJobAttaSize;
+
+        /**
+         * < The life time of a child MBD to serve queries in the MT way
+         */
+        public int mbdRefreshTime;
+
+        /**
+         * < The interval of the execution cluster updating the job's resource usage
+         */
+        public int updJobRusageInterval;
+
+        /**
+         * < The account to which all windows workgroup users are to be mapped
+         */
+        public String sysMapAcct;
+
+        /**
+         * < Dispatch delay internal
+         */
+        public int preExecDelay;
+
+        /**
+         * < Update duplicate event interval
+         */
+        public int updEventUpdateInterval;
+
+        /**
+         * < Resources are reserved for parallel jobs on a per-slot basis
+         */
+        public int resourceReservePerSlot;
+
+        /**
+         * < Maximum job id --- read from the lsb.params
+         */
+        public int maxJobId;
+
+        /**
+         * < Define a list of preemptable resource  names
+         */
+        public String preemptResourceList;
+
+        /**
+         * < The preemption wait time
+         */
+        public int preemptionWaitTime;
+
+        /**
+         * < Maximum number of rollover lsb.acct files kept by mbatchd.
+         */
+        public int maxAcctArchiveNum;
+
+        /**
+         * < mbatchd Archive Interval
+         */
+        public int acctArchiveInDays;
+
+        /**
+         * < mbatchd Archive threshold
+         */
+        public int acctArchiveInSize;
+
+        /**
+         * < Committed run time weighting factor
+         */
+        public float committedRunTimeFactor;
+
+        /**
+         * < Enable the use of historical run time in the calculation of fairshare scheduling priority, Disable the use of historical run time in the calculation of fairshare scheduling priority
+         */
+        public int enableHistRunTime;
+
+/*#ifdef PS_SXNQS */
+/**< NQS resource usage update interval */
+/*    public int   nqsUpdateInterval;*/
+/*#endif */
+
+        /**
+         * < Open lease reclaim time
+         */
+        public int mcbOlmReclaimTimeDelay;
+
+        /**
+         * < Enable chunk job dispatch for jobs with CPU limit or run limits
+         */
+        public int chunkJobDuration;
+
+        /**
+         * < The interval for scheduling jobs by scheduler daemon
+         */
+        public int sessionInterval;
+
+        /**
+         * < The number of jobs per user per queue whose pending reason is published at the PEND_REASON_UPDATE_INTERVAL interval
+         */
+        public int publishReasonJobNum;
+
+        /**
+         * < The interval for publishing job pending reason by scheduler daemon
+         */
+        public int publishReasonInterval;
+
+        /**
+         * < Interval(in seconds) of pending reason  publish for all jobs
+         */
+        public int publishReason4AllJobInterval;
+
+        /**
+         * < MC pending reason update interval (0 means no updates)
+         */
+        public int mcUpdPendingReasonInterval;
+
+        /**
+         * < MC pending reason update package size (0 means no limit)
+         */
+        public int mcUpdPendingReasonPkgSize;
+
+        /**
+         * < No preemption if the run time is greater  than the value defined in here
+         */
+        public int noPreemptRunTime;
+
+        /**
+         * < No preemption if the finish time is less than the value defined in here
+         */
+        public int noPreemptFinishTime;
+
+        /**
+         * < mbatchd Archive Time
+         */
+        public String acctArchiveAt;
+
+        /**
+         * < Absolute run limit for job
+         */
+        public int absoluteRunLimit;
+
+        /**
+         * < The job exit rate duration
+         */
+        public int lsbExitRateDuration;
+
+        /**
+         * <  The duration to trigger eadmin
+         */
+        public int lsbTriggerDuration;
+
+        /**
+         * < Maximum time for job information query commands (for example,with bjobs) to wait
+         */
+        public int maxJobinfoQueryPeriod;
+
+        /**
+         * < Job submission retrial interval for client
+         */
+        public int jobSubRetryInterval;
+
+        /**
+         * < System wide max pending jobs
+         */
+        public int pendingJobThreshold;
+
+
+        /**
+         * < Max number of concurrent query
+         */
+        public int maxConcurrentJobQuery;
+
+        /**
+         * < Min event switch time period
+         */
+        public int minSwitchPeriod;
+
+
+        /**
+         * < Condense pending reasons enabled
+         */
+        public int condensePendingReasons;
+
+        /**
+         * < Schedule Parallel jobs based on slots instead of CPUs
+         */
+        public int slotBasedParallelSched;
+
+        /**
+         * < Disable user job movement operations, like btop/bbot.
+         */
+        public int disableUserJobMovement;
+
+        /**
+         * < Detect and report idle jobs only after specified minutes.
+         */
+        public int detectIdleJobAfter;
+        public int useSymbolPriority;
+        /**
+         * < Use symbolic when specifing priority of symphony jobs/
+         * <p/>
+         * /**< Priority rounding for symphony jobs
+         */
+        public int JobPriorityRound;
+
+        /**
+         * < The mapping of the symbolic priority  for symphony jobs
+         */
+        public String priorityMapping;
+
+        /**
+         * < Maximum number of subdirectories under LSB_SHAREDIR/cluster/logdir/info
+         */
+        public int maxInfoDirs;
+
+        /**
+         * < The minimum period of a child MBD to serve queries in the MT way
+         */
+        public int minMbdRefreshTime;
+
+        /**
+         * < Stop asking license to LS not due to lack license
+         */
+        public int enableStopAskingLicenses2LS;
+
+        /**
+         * < Expire time for finished job which will not taken into account when calculating queue fairshare priority
+         */
+        public int expiredTime;
+
+        /**
+         * < MBD child query processes will only run on the following CPUs
+         */
+        public String mbdQueryCPUs;
+
+        /**
+         * < The default application profile assigned to jobs
+         */
+        public String defaultApp;
+
+        /**
+         * < Enable or disable data streaming
+         */
+        public int enableStream;
+
+        /**
+         * < File to which lsbatch data is streamed
+         */
+        public String streamFile;
+
+        /**
+         * < File size in MB to which lsbatch data is streamed
+         */
+        public int streamSize;
+
+        /**
+         * < Sync up host status with master LIM is enabled
+         */
+        public int syncUpHostStatusWithLIM;
+
+        /**
+         * < Project schedulign default SLA
+         */
+        public String defaultSLA;
+
+        /**
+         * < EGO Enabled SLA scheduling timer period
+         */
+        public int slaTimer;
+
+        /**
+         * < EGO Enabled SLA scheduling time to live
+         */
+        public int mbdEgoTtl;
+
+        /**
+         * < EGO Enabled SLA scheduling connection timeout
+         */
+        public int mbdEgoConnTimeout;
+
+        /**
+         * < EGO Enabled SLA scheduling read timeout
+         */
+        public int mbdEgoReadTimeout;
+
+        /**
+         * < EGO Enabled SLA scheduling use MXJ flag
+         */
+        public int mbdUseEgoMXJ;
+
+        /**
+         * < EGO Enabled SLA scheduling reclaim by queue
+         */
+        public int mbdEgoReclaimByQueue;
+
+        /**
+         * < EGO Enabled SLA scheduling default velocity
+         */
+        public int defaultSLAvelocity;
+
+        /**
+         * < Type of host exit rate exception handling types: EXIT_RATE_TYPE
+         */
+        public String exitRateTypes;
+
+        /**
+         * < Type of host exit rate exception handling types: GLOBAL_EXIT_RATE
+         */
+        public float globalJobExitRate;
+
+        /**
+         * < Type of host exit rate exception handling types ENABLE_EXIT_RATE_PER_SLOT
+         */
+        public int enableJobExitRatePerSlot;
+
+        /**
+         * < Performance metrics monitor is enabled  flag
+         */
+        public int enableMetric;
+
+        /**
+         * < Performance metrics monitor sample period flag
+         */
+        public int schMetricsSample;
+
+        /**
+         * < Used to bound: (1) factors, (2) weights, and (3) APS values
+         */
+        public float maxApsValue;
+
+        /**
+         * < Child mbatchd gets updated information about new jobs from the parent mbatchd
+         */
+        public int newjobRefresh;
+
+        /**
+         * < Job type to preempt, PREEMPT_JOBTYPE_BACKFILL, PREEMPT_JOBTYPE_EXCLUSIVE
+         */
+        public int preemptJobType;
+
+        /**
+         * < The default job group assigned to jobs
+         */
+        public String defaultJgrp;
+
+        /**
+         * < Max ratio between run limit and runtime estimation
+         */
+        public int jobRunlimitRatio;
+
+        /**
+         * < Enable the post-execution processing of the job to be included as part of the job flag
+         */
+        public int jobIncludePostproc;
+
+        /**
+         * < Timeout of post-execution processing
+         */
+        public int jobPostprocTimeout;
+
+        /**
+         * < The interval, in seconds, for updating the session scheduler status summary
+         */
+        public int sschedUpdateSummaryInterval;
+
+        /**
+         * < The number of completed tasks for updating the session scheduler status summary
+         */
+        public int sschedUpdateSummaryByTask;
+
+        /**
+         * < The maximum number of times a task can be requeued via requeue exit values
+         */
+        public int sschedRequeueLimit;
+
+        /**
+         * < The maximum number of times a task can be retried after a dispatch error
+         */
+        public int sschedRetryLimit;
+
+        /**
+         * < The maximum number of tasks that can be submitted in one session
+         */
+        public int sschedMaxTasks;
+
+        /**
+         * < The maximum run time of a single task
+         */
+        public int sschedMaxRuntime;
+
+        /**
+         * < The output directory for task accounting files
+         */
+        public String sschedAcctDir;
+
+        /**
+         * < If TRUE enable the job group automatic deletion functionality (default is FALSE).
+         */
+        public int jgrpAutoDel;
+
+        /**
+         * < Maximum number of job preempted times
+         */
+        public int maxJobPreempt;
+
+        /**
+         * < Maximum number of job re-queue times
+         */
+        public int maxJobRequeue;
+
+        /**
+         * < No preempt run time percent
+         */
+        public int noPreemptRunTimePercent;
+
+        /**
+         * < No preempt finish time percent
+         */
+        public int noPreemptFinishTimePercent;
+
+
+        /**
+         * < The reservation request being within JL/U.
+         */
+        public int slotReserveQueueLimit;
+
+        /**
+         * < Job accept limit percentage.
+         */
+        public int maxJobPercentagePerSession;
+
+        /**
+         * < The low priority job will use the slots freed by preempted jobs.
+         */
+        public int useSuspSlots;
+
+
+        /**
+         * < Maximum number of the backup stream.utc files
+         */
+        public int maxStreamFileNum;
+
+        /**
+         * < If enforced only admin can use bkill -r option
+         */
+        public int privilegedUserForceBkill;
+
+        /**
+         * < It controls the remote queue selection flow.
+         */
+        public int mcSchedulingEnhance;
+
+        /**
+         * < It controls update interval of the counters  and other original data in MC implementation
+         */
+        public int mcUpdateInterval;
+
+        /**
+         * < Jobs run on only on hosts belonging to the intersection of the queue the job was submitted to, advance reservation hosts, and any hosts specified by bsub -m at the time of submission.
+         */
+        public int intersectCandidateHosts;
+
+        /**
+         * < Enforces the limitations of a single specified user group.
+         */
+        public int enforceOneUGLimit;
+
+        /**
+         * < Enable or disable logging runtime estimation exceeded event
+         */
+        public int logRuntimeESTExceeded;
+
+        /**
+         * < Compute unit types.
+         */
+        public String computeUnitTypes;
+
+        /**
+         * < Fairshare adjustment weighting factor
+         */
+        public float fairAdjustFactor;
+
+        /**
+         * < abs runtime and cputime for LSF simulator
+         */
+        public int simAbsoluteTime;
+
+        /**
+         * < switch for job exception enhancement
+         */
+        public int extendJobException;
+    }
+
+    /* parameterInfo */
+
+
+/* Bits for preemptFor parameter */
+    public static final int GROUP_MAX = 0x0001;
+    public static final int GROUP_JLP = 0x0002;
+    public static final int USER_JLP = 0x0004;
+    public static final int HOST_JLU = 0x0008;
+
+/* minimum of job */
+    public static final int MINI_JOB = 0x0010;
+
+/* least run time */
+    public static final int LEAST_RUN_TIME = 0x0020;
+
+/* optimal mini job */
+    public static final int OPTIMAL_MINI_JOB = 0x0040;
+
+/* Bits for mcSchedulingEnhance parameter */
+    public static final int RESOURCE_ONLY = 0x0001;
+    public static final int COUNT_PREEMPTABLE = 0x0002;
+    public static final int HIGH_QUEUE_PRIORITY = 0x0004;
+    public static final int PREEMPTABLE_QUEUE_PRIORITY = 0x0008;
+    public static final int PENDING_WHEN_NOSLOTS = 0x0010;
+
+/* options for bcaladd, bcalmod, bcaldel */
+    public static final int CAL_FORCE = 0x0001;
+
+/* Bits for preemptJobType parameter,
+*  used to enable backfill and exclusive
+*  preemption */
+    public static final int PREEMPT_JOBTYPE_EXCLUSIVE = 0x0001;
+    public static final int PREEMPT_JOBTYPE_BACKFILL = 0x0002;
+
+/* For lsb_calendarinfo() call */
+
+    /**
+     * \brief  calendar Information Entry.
+     */
+    public static class calendarInfoEnt extends Structure {
+        public static class ByReference extends calendarInfoEnt implements Structure.ByReference {}
+        public static class ByValue extends calendarInfoEnt implements Structure.ByValue {}
+        public calendarInfoEnt() {}
+        public calendarInfoEnt(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < A pointer to the name of the calendar.
+         */
+        public String name;
+
+        /**
+         * < A description string associated with the calendar.
+         */
+        public String desc;
+
+        /**
+         * < Calendar Expression
+         */
+        public String calExpr;
+
+        /**
+         * < User name
+         */
+        public String userName;
+
+        /**
+         * < Calendar status
+         */
+        public int status;
+
+        /**
+         * < For future use
+         */
+        public int options;
+
+        /**
+         * < Last time event of the calendar
+         */
+        public int lastDay;
+
+        /**
+         * < Next time event of the calendar
+         */
+        public int nextDay;
+
+        /**
+         * < Create Time
+         */
+        public NativeLong creatTime;
+
+        /**
+         * < Last Modify Time
+         */
+        public NativeLong lastModifyTime;
+
+        /**
+         * < Type of calendar, etc.
+         */
+        public int flags;
+    }
+
+
+
+    public static final int ALL_CALENDARS = 0x1;
+
+    public static final int EVE_HIST = 0x1;
+    public static final int EVENT_ACTIVE = 1;
+    public static final int EVENT_INACTIVE = 2;
+    public static final int EVENT_REJECT = 3;
+
+    public static final int EVENT_TYPE_UNKNOWN = 0;
+    public static final int EVENT_TYPE_LATCHED = 1;
+    public static final int EVENT_TYPE_PULSEALL = 2;
+    public static final int EVENT_TYPE_PULSE = 3;
+    public static final int EVENT_TYPE_EXCLUSIVE = 4;
+
+/* define event types */
+    public static final int EV_UNDEF = 0;
+    public static final int EV_FILE = 1;
+    public static final int EV_EXCEPT = 2;
+    public static final int EV_USER = 3;
+
+    public static class loadInfoEnt extends Structure {
+        public static class ByReference extends loadInfoEnt implements Structure.ByReference {}
+        public static class ByValue extends loadInfoEnt implements Structure.ByValue {}
+        public loadInfoEnt() {}
+        public loadInfoEnt(Pointer p) { super(p); read(); }
+
+        public String hostName;
+        public int status;
+        public FloatByReference load;
+    }
+
+
+
+    public static class queuePairEnt extends Structure {
+        public static class ByReference extends queuePairEnt implements Structure.ByReference {}
+        public static class ByValue extends queuePairEnt implements Structure.ByValue {}
+        public queuePairEnt() {}
+        public queuePairEnt(Pointer p) { super(p); read(); }
+
+        public String local;
+        public String remote;
+        public int send;
+        public int status;
+    }
+
+
+
+    public static class rmbCluAppEnt extends Structure {
+        public static class ByReference extends rmbCluAppEnt implements Structure.ByReference {}
+        public static class ByValue extends rmbCluAppEnt implements Structure.ByValue {}
+        public rmbCluAppEnt() {}
+        public rmbCluAppEnt(Pointer p) { super(p); read(); }
+
+        public String name;
+        public String description;
+    }
+
+
+
+/* define 'cluster status' in lease model
+*  for bclusters command
+ */
+
+
+/* disconnection */
+    public static final int LEASE_CLU_STAT_DISC = 1;
+
+/* policy is exchanged but no lease is signed */
+    public static final int LEASE_CLU_STAT_CONN = 2;
+
+/* there are leases signed between two clusters */
+    public static final int LEASE_CLU_STAT_OK = 3;
+    public static final int LEASE_CLU_STAT_NUMBER = 3;
+/* consumer cluster status in lease model */
+
+    public static class consumerCluEnt extends Structure {
+        public static class ByReference extends consumerCluEnt implements Structure.ByReference {}
+        public static class ByValue extends consumerCluEnt implements Structure.ByValue {}
+        public consumerCluEnt() {}
+        public consumerCluEnt(Pointer p) { super(p); read(); }
+
+
+/* consumer cluster name */
+        public String cluName;
+
+/* cluster status, Ref- 'cluster status' definitions */
+        public int status;
+    }
+
+
+/* provider cluster status in lease model */
+
+    public static class providerCluEnt extends Structure {
+        public static class ByReference extends providerCluEnt implements Structure.ByReference {}
+        public static class ByValue extends providerCluEnt implements Structure.ByValue {}
+        public providerCluEnt() {}
+        public providerCluEnt(Pointer p) { super(p); read(); }
+
+
+/* provider cluster name */
+        public String cluName;
+
+/* cluster status, Ref- 'cluster status' definitions */
+        public int status;
+    }
+
+
+/* for remote batch model, its definition is same as  clusterInfoEnt*/
+
+    public static class rmbCluInfoEnt extends Structure {
+        public static class ByReference extends rmbCluInfoEnt implements Structure.ByReference {}
+        public static class ByValue extends rmbCluInfoEnt implements Structure.ByValue {}
+        public rmbCluInfoEnt() {}
+        public rmbCluInfoEnt(Pointer p) { super(p); read(); }
+
+        public String cluster;
+        public int numPairs;
+        public Pointer /* queuePairEnt.ByReference */ queues;
+        public int numApps;
+        public Pointer /* rmbCluAppEnt.ByReference */ apps;
+    }
+
+
+
+/* for leasing model */
+
+    public static class leaseCluInfoEnt extends Structure {
+        public static class ByReference extends leaseCluInfoEnt implements Structure.ByReference {}
+        public static class ByValue extends leaseCluInfoEnt implements Structure.ByValue {}
+        public leaseCluInfoEnt() {}
+        public leaseCluInfoEnt(Pointer p) { super(p); read(); }
+
+
+/* 1, import from all if "allremote" defined in lease queue*/
+        public int flags;
+
+/* the array size of consumer cluster array */
+        public int numConsumer;
+
+/* the consumer cluster array */
+        public Pointer /* consumerCluEnt.ByReference */ consumerClus;
+
+/* the array size of provider cluster array */
+        public int numProvider;
+
+/* the provider cluster array */
+        public Pointer /* providerCluEnt.ByReference */ providerClus;
+    }
+
+
+
+/* This is the old data structure, we
+*  leave it here to keep backward compatibility.
+*  It's definition is same as structure rmbCluInfoEnt.
+*  It is to transfer cluster status between mbatchd with
+*  old(4.x) bclusters command and old API-lsb_clusterinfo()
+ */
+
+    public static class clusterInfoEnt extends Structure {
+        public static class ByReference extends clusterInfoEnt implements Structure.ByReference {}
+        public static class ByValue extends clusterInfoEnt implements Structure.ByValue {}
+        public clusterInfoEnt() {}
+        public clusterInfoEnt(Pointer p) { super(p); read(); }
+
+        public String cluster;
+        public int numPairs;
+        public Pointer /* queuePairEnt.ByReference */ queues;
+        public int numApps;
+        public Pointer /* rmbCluAppEnt.ByReference */ apps;
+    }
+
+
+/* the new data structure to transfer cluster status between mbatchd with
+*  new(5.0) bclusters command and new API-lsb_clusterinfoEx()
+ */
+
+    public static class clusterInfoEntEx extends Structure {
+        public static class ByReference extends clusterInfoEntEx implements Structure.ByReference {}
+        public static class ByValue extends clusterInfoEntEx implements Structure.ByValue {}
+        public clusterInfoEntEx() {}
+        public clusterInfoEntEx(Pointer p) { super(p); read(); }
+
+
+/* cluster status related to remote batch*/
+        public rmbCluInfoEnt.ByReference rmbCluInfo;
+
+/* cluster status related to resource lease*/
+        public leaseCluInfoEnt leaseCluInfo;
+    }
+
+
+
+    public static class eventInfoEnt extends Structure {
+        public static class ByReference extends eventInfoEnt implements Structure.ByReference {}
+        public static class ByValue extends eventInfoEnt implements Structure.ByValue {}
+        public eventInfoEnt() {}
+        public eventInfoEnt(Pointer p) { super(p); read(); }
+
+
+/* name of event */
+        public String name;
+
+/* one of ACTIVE or INACTIVE */
+        public int status;
+
+/* one of LATCHED, PULSE and EXCLUSIVE */
+        public int type;
+
+/* one of FILE, ALARM, USER */
+        public int eType;
+
+/* user who created the event */
+        public String userName;
+
+/* event's attributes sent back from eeventd */
+        public String attributes;
+
+/* number of expression dependent on the event */
+        public int numDependents;
+
+/* last time when eeventd sent back message */
+        public NativeLong updateTime;
+
+/* last dispatched job dependent on the event */
+        public long lastDisJob;
+
+/* the time when the last job was dispatched */
+        public NativeLong lastDisTime;
+    }
+
+
+    public static final int ALL_EVENTS = 0x01;
+
+    /**
+     *  \addtogroup groupinfo_define groupinfo_define
+     *  define options for \ref lsb_usergrpinfo and \ref lsb_hostgrpinfo calls
+     */
+
+    /**
+     * < User group
+     */
+    public static final int USER_GRP = 0x1;
+
+    /**
+     * < Host group
+     */
+    public static final int HOST_GRP = 0x2;
+
+    /**
+     * < Host part group
+     */
+    public static final int HPART_HGRP = 0x4;
+    /**
+     *  \defgroup group_membership_option group_membership_option
+     *  \ingroup groupinfo_define
+     *  group membership options
+     */
+
+    /**
+     * < Expand the group membership recursively. That is, if a member of a group is itself a group, give the names of its members recursively, rather than its name, which is the default.
+     */
+    public static final int GRP_RECURSIVE = 0x8;
+
+    /**
+     * < Get membership of all groups.
+     */
+    public static final int GRP_ALL = 0x10;
+
+    /**
+     * < NQSQ_GRP
+     */
+    public static final int NQSQ_GRP = 0x20;
+
+    /**
+     * < Group shares
+     */
+    public static final int GRP_SHARES = 0x40;
+
+    /**
+     * < Dynamic group
+     */
+    public static final int DYNAMIC_GRP = 0x800;
+
+    /**
+     * < Group cu
+     */
+    public static final int GRP_CU = 0x1000;
+
+    /**
+     * \brief Structure for representing the shares assigned to a user group.
+     */
+    public static class userShares extends Structure {
+        public static class ByReference extends userShares implements Structure.ByReference {}
+        public static class ByValue extends userShares implements Structure.ByValue {}
+        public userShares() {}
+        public userShares(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < This can be a user or a keyword "default" or others
+         */
+        public String user;
+
+        /**
+         * < The number of shares assigned to the user
+         */
+        public int shares;
+    }
+
+
+
+
+    /**
+     * \brief  group information entry.
+     */
+    public static class groupInfoEnt extends Structure {
+        public static class ByReference extends groupInfoEnt implements Structure.ByReference {}
+        public static class ByValue extends groupInfoEnt implements Structure.ByValue {}
+        public groupInfoEnt() {}
+        public groupInfoEnt(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Group name
+         */
+        public String group;
+
+        /**
+         * < ASCII list of member names
+         */
+        public String memberList;
+
+        /**
+         * < ASCII list of admin member names
+         */
+        public String adminMemberList;
+
+        /**
+         * < The number of users with shares
+         */
+        public int numUserShares;
+
+        /**
+         * < The user shares rappresentation
+         */
+        public Pointer /* userShares.ByReference */ userShares;
+
+        /**
+         *  \addtogroup group_define group_define
+         *   group define statements
+         */
+
+        /**
+         * < Group output is in regular (uncondensed) format.
+         */
+        public static final int GRP_NO_CONDENSE_OUTPUT = 0x01;
+
+        /**
+         * < Group output is in condensed format.
+         */
+        public static final int GRP_CONDENSE_OUTPUT = 0x02;
+
+        /**
+         * < Group have regular expresion
+         */
+        public static final int GRP_HAVE_REG_EXP = 0x04;
+
+        /**
+         * < Group is a service class.
+         */
+        public static final int GRP_SERVICE_CLASS = 0x08;
+
+        /**
+         * < Group is a compute unit.
+         */
+        public static final int GRP_IS_CU = 0x10;
+
+        /**
+         * < Options.see \ref group_define
+         */
+        public int options;
+
+        /**
+         * < Host membership pattern
+         */
+        public String pattern;
+
+        /**
+         * < Negation membership pattern
+         */
+        public String neg_pattern;
+
+        /**
+         * < Compute unit type
+         */
+        public int cu_type;
+    }
+
+
+
+    /**
+     * \brief  run job request.
+     */
+    public static class runJobRequest extends Structure {
+        public static class ByReference extends runJobRequest implements Structure.ByReference {}
+        public static class ByValue extends runJobRequest implements Structure.ByValue {}
+        public runJobRequest() {}
+        public runJobRequest(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Jobid of the requested job
+         */
+        public long jobId;
+
+        /**
+         * < The number of hosts
+         */
+        public int numHosts;
+
+        /**
+         * < Vector of hostnames
+         */
+        public Pointer hostname;
+        /**
+         *  \addtogroup runjob_option runjob_option
+         *  Options used for lsb_runjob:
+         */
+
+
+        /**
+         * < Normal jobs
+         */
+        public static final int RUNJOB_OPT_NORMAL = 0x01;
+
+        /**
+         * < Nostop jobs
+         */
+        public static final int RUNJOB_OPT_NOSTOP = 0x02;
+
+        /**
+         * < Pending jobs only, no finished jobs
+         */
+        public static final int RUNJOB_OPT_PENDONLY = 0x04;
+
+        /**
+         * < Check point job only, from beginning
+         */
+        public static final int RUNJOB_OPT_FROM_BEGIN = 0x08;
+
+        /**
+         * < brun to use free CPUs only
+         */
+        public static final int RUNJOB_OPT_FREE = 0x10;
+
+        /**
+         * < brun ignoring rusage
+         */
+        public static final int RUNJOB_OPT_IGNORE_RUSAGE = 0x20;
+
+        /**
+         * < Run job request options, see \ref runjob_option
+         */
+        public int options;
+
+        /**
+         * < Vector of number of slots per host
+         */
+        public IntByReference slots;
+    }
+
+
+
+    /**
+     *  \addtogroup external_msg_processing external_msg_processing
+     *  options for \ref lsb_readjobmsg call
+     */
+
+    /**
+     *  \defgroup external_msg_post external_msg_post
+     *  options specifying if the message has an attachment to be posted
+     */
+
+    /**
+     * < Post the external job message. There  is no attached data file.
+     */
+    public static final int EXT_MSG_POST = 0x01;
+
+    /**
+     * < Post the external job message and data file posted to the job.
+     */
+    public static final int EXT_ATTA_POST = 0x02;
+
+    /**
+     * <Read the external job message. There is no attached data file.
+     */
+    public static final int EXT_MSG_READ = 0x04;
+
+    /**
+     * < Read the external job message and data file posted to the job.If there is no data file attached, the error message "The attached data of the message is not available" is displayed, and the external job message is displayed.
+     */
+    public static final int EXT_ATTA_READ = 0x08;
+
+    /**
+     * < Replay the external message
+     */
+    public static final int EXT_MSG_REPLAY = 0x10;
+
+    /**
+     * < Post the external job noevent message
+     */
+    public static final int EXT_MSG_POST_NOEVENT = 0x20;
+
+
+    /**
+     * \brief structure jobExternalMsgReq contains the information required to
+     * define an external message of a job.
+     */
+    public static class jobExternalMsgReq extends Structure {
+        public static class ByReference extends jobExternalMsgReq implements Structure.ByReference {}
+        public static class ByValue extends jobExternalMsgReq implements Structure.ByValue {}
+        public jobExternalMsgReq() {}
+        public jobExternalMsgReq(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Specifies if the message has an attachment to be read.<lsf/lsbatch.h> defines the following flags constructed from bits. These flags correspond to options.\n EXT_MSG_READ\n Read the external job message. There is no attached data file.\n EXT_ATTA_READ\n Read the external job message and data file posted to the job.\n If there is no data file attached, the error message "The attached data of the message is not available" is displayed, and the external job  message is displayed.
+         */
+        public int options;
+
+        /**
+         * < The system generated job Id of the job.
+         */
+        public long jobId;
+
+        /**
+         * < The name of the job if jobId is undefined (<=0)
+         */
+        public String jobName;
+
+        /**
+         * < The message index. A job can have more than one message. Use msgIdx in an array to index messages.
+         */
+        public int msgIdx;
+
+        /**
+         * < Text description of the msg
+         */
+        public String desc;
+
+        /**
+         * < The userId of the author of the message.
+         */
+        public int userId;
+
+        /**
+         * < The size of the data file. If no data file is attached, the size is 0.
+         */
+        public NativeLong dataSize;
+
+        /**
+         * < The time the author posted the message.
+         */
+        public NativeLong postTime;
+
+        /**
+         * < The author of the message.
+         */
+        public String userName;
+    }
+
+
+
+    /**
+     *  \addtogroup ext_data_status ext_data_status
+     */
+
+    /**
+     * < Transferring the message's data file.
+     */
+    public static final int EXT_DATA_UNKNOWN = 0;
+
+    /**
+     * < The message does not have an attached  data file.
+     */
+    public static final int EXT_DATA_NOEXIST = 1;
+
+    /**
+     * < The message's data file is available.
+     */
+    public static final int EXT_DATA_AVAIL = 2;
+
+    /**
+     * < The message's data file is corrupt.
+     */
+    public static final int EXT_DATA_UNAVAIL = 3;
+
+    /**
+     * \brief structure jobExternalMsgReply contains the information required to
+     * define an external message reply.
+     */
+    public static class jobExternalMsgReply extends Structure {
+        public static class ByReference extends jobExternalMsgReply implements Structure.ByReference {}
+        public static class ByValue extends jobExternalMsgReply implements Structure.ByValue {}
+        public jobExternalMsgReply() {}
+        public jobExternalMsgReply(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The system generated job Id of the job associated with the message.
+         */
+        public long jobId;
+
+        /**
+         * < The message index. A job can have more than one message. Use msgIdx in an array to index messages.
+         */
+        public int msgIdx;
+
+        /**
+         * < The message you want to read.
+         */
+        public String desc;
+
+        /**
+         * < The user Id of the author of the message.
+         */
+        public int userId;
+
+        /**
+         * < The size of the data file attached. If no data file is attached, the size is 0.
+         */
+        public NativeLong dataSize;
+
+        /**
+         * < The time the message was posted.
+         */
+        public NativeLong postTime;
+
+        /**
+         * < The status of the attached data file.  The status of the data file can be one of the following:\n EXT_DATA_UNKNOWN\n Transferring the message's data file.\n EXT_DATA_NOEXIST\n The message does not have an attached data file.\n EXT_DATA_AVAIL\n The message's data file is available. \n EXT_DATA_UNAVAIL\n The message's data file is corrupt.
+         */
+        public int dataStatus;
+
+        /**
+         * < The author of the msg
+         */
+        public String userName;
+    }
+
+
+
+
+    /**
+     * Data structures representing the symphony job status update request.
+     */
+    public static class symJobInfo extends Structure {
+        public static class ByReference extends symJobInfo implements Structure.ByReference {}
+        public static class ByValue extends symJobInfo implements Structure.ByValue {}
+        public symJobInfo() {}
+        public symJobInfo(Pointer p) { super(p); read(); }
+
+
+/* the service parititon that SSM works for */
+        public String partition;
+
+/* the priority of the symphony job */
+        public int priority;
+
+/* the full name that indicates the job relationship */
+        public String jobFullName;
+
+/* the auxiliary description to help updating command info */
+        public String auxCmdDesc;
+
+/* the auxiliary description to help updating job description info */
+        public String auxJobDesc;
+    }
+
+
+
+    public static class symJobStatus extends Structure {
+        public static class ByReference extends symJobStatus implements Structure.ByReference {}
+        public static class ByValue extends symJobStatus implements Structure.ByValue {}
+        public symJobStatus() {}
+        public symJobStatus(Pointer p) { super(p); read(); }
+
+
+/* text description of the symphony job status */
+        public String desc;
+    }
+
+
+
+    public static class symJobProgress extends Structure {
+        public static class ByReference extends symJobProgress implements Structure.ByReference {}
+        public static class ByValue extends symJobProgress implements Structure.ByValue {}
+        public symJobProgress() {}
+        public symJobProgress(Pointer p) { super(p); read(); }
+
+
+/* text description of the symphony job progress */
+        public String desc;
+    }
+
+
+
+
+    public static class symJobStatusUpdateReq extends Structure {
+        public static class ByReference extends symJobStatusUpdateReq implements Structure.ByReference {}
+        public static class ByValue extends symJobStatusUpdateReq implements Structure.ByValue {}
+        public symJobStatusUpdateReq() {}
+        public symJobStatusUpdateReq(Pointer p) { super(p); read(); }
+
+
+/* the job to be update info into MBD */
+        public long jobId;
+
+        public static final int SYM_JOB_UPDATE_NONE = 0x0;
+        public static final int SYM_JOB_UPDATE_INFO = 0x1;
+        public static final int SYM_JOB_UPDATE_STATUS = 0x2;
+        public static final int SYM_JOB_UPDATE_PROGRESS = 0x4;
+
+/* the option to update the info */
+        public int bitOption;
+        public symJobInfo info;
+        public int numOfJobStatus;
+        public Pointer /* symJobStatus.ByReference */ status;
+        public symJobProgress progress;
+    }
+
+
+
+    public static class symJobStatusUpdateReqArray extends Structure {
+        public static class ByReference extends symJobStatusUpdateReqArray implements Structure.ByReference {}
+        public static class ByValue extends symJobStatusUpdateReqArray implements Structure.ByValue {}
+        public symJobStatusUpdateReqArray() {}
+        public symJobStatusUpdateReqArray(Pointer p) { super(p); read(); }
+
+        public int numOfJobReq;
+        public Pointer /* symJobStatusUpdateReq.ByReference */ symJobReqs;
+    }
+
+
+
+
+    /**
+     * Data structures representing the symphony job status update reply.
+     */
+
+    public static class symJobUpdateAck extends Structure {
+        public static class ByReference extends symJobUpdateAck implements Structure.ByReference {}
+        public static class ByValue extends symJobUpdateAck implements Structure.ByValue {}
+        public symJobUpdateAck() {}
+        public symJobUpdateAck(Pointer p) { super(p); read(); }
+
+        public static int SYM_UPDATE_ACK_OK = 0;
+        public static final int SYM_UPDATE_ACK_ERR = 1;
+        public int ackCode;
+
+/* text description of job info update acknowledgement */
+        public String desc;
+    }
+
+
+
+    public static class symJobStatusUpdateReply extends Structure {
+        public static class ByReference extends symJobStatusUpdateReply implements Structure.ByReference {}
+        public static class ByValue extends symJobStatusUpdateReply implements Structure.ByValue {}
+        public symJobStatusUpdateReply() {}
+        public symJobStatusUpdateReply(Pointer p) { super(p); read(); }
+
+
+/* the job to be update info into MBD */
+        public long jobId;
+        public static final int SYM_UPDATE_INFO_IDX = 0;
+        public static final int SYM_UPDATE_STATUS_IDX = 1;
+        public static final int SYM_UPDATE_PROGRESS_IDX = 2;
+        public static final int NUM_SYM_UPDATE_ACK = 3;
+        public symJobUpdateAck[] acks = new symJobUpdateAck[NUM_SYM_UPDATE_ACK];
+    }
+
+
+
+    public static class symJobStatusUpdateReplyArray extends Structure {
+        public static class ByReference extends symJobStatusUpdateReplyArray implements Structure.ByReference {}
+        public static class ByValue extends symJobStatusUpdateReplyArray implements Structure.ByValue {}
+        public symJobStatusUpdateReplyArray() {}
+        public symJobStatusUpdateReplyArray(Pointer p) { super(p); read(); }
+
+        public int numOfJobReply;
+        public Pointer /* symJobStatusUpdateReply.ByReference */ symJobReplys;
+    }
+
+
+
+
+/* Data structure representing the job array requeue operation.
+*  o jobId is the Lsbatch id of the job array to be requeued
+*  o status is the desired requeue status of the job, by default
+*    it is JOB_STAT_PEND, or user specified JOB_STAT_PSUSP
+*  o options specifies the status of the array elements that have
+*    to be requeued.
+*
+*  The function that operates on the data is lsb_requeuejob()
+ */
+
+    /**
+     *  \addtogroup requeuejob_options requeuejob_options
+     *  define statements used by \ref lsb_requeuejob.
+     */
+
+    /**
+     * < Requeues jobs that have finished running. Jobs that have exited are not re-run. Equivalent to brequeue -d command line option.
+     */
+    public static final int REQUEUE_DONE = 0x1;
+
+    /**
+     * < Requeues jobs that have exited. Finished jobs are not re-run. Equivalent to brequeue -e command line option.
+     */
+    public static final int REQUEUE_EXIT = 0x2;
+
+    /**
+     * < Requeues running jobs and puts them in PEND state. Equivalent to brequeue -r command line option.
+     */
+    public static final int REQUEUE_RUN = 0x4;
+
+    /**
+     * \brief  requeued job
+     */
+    public static class jobrequeue extends Structure {
+        public static class ByReference extends jobrequeue implements Structure.ByReference {}
+        public static class ByValue extends jobrequeue implements Structure.ByValue {}
+        public jobrequeue() {}
+        public jobrequeue(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Specifies the jobid of a single job or an array of jobs.
+         */
+        public long jobId;
+
+        /**
+         * < Specifies the lsbatch status of the requeued job after it has been requeued. The job status can be JOB_STAT_PEND or JOB_STATE_PSUSP. The default status is JOB_STAT_PEND.
+         */
+        public int status;
+
+        /**
+         * < Specifies the array elements to be requeued.  see \ref requeuejob_options
+         */
+        public int options;
+    }
+
+
+
+    public static class requeueEStruct extends Structure {
+        public static class ByReference extends requeueEStruct implements Structure.ByReference {}
+        public static class ByValue extends requeueEStruct implements Structure.ByValue {}
+        public requeueEStruct() {}
+        public requeueEStruct(Pointer p) { super(p); read(); }
+
+
+/* requeue type: normal, exclude, other, prefer_other, etc. */
+        public int type;
+
+/* requeue type: normal - as in 2.2 */
+        public static final int RQE_NORMAL = 0;
+
+/* requeue type: exclude */
+        public static final int RQE_EXCLUDE = 1;
+
+/* indicate the end of the list */
+        public static final int RQE_END = 255;
+
+/* requeue exit value */
+        public int value;
+
+/* requeue interval */
+        public int interval;
+    }
+
+
+
+    public static class requeue extends Structure {
+        public static class ByReference extends requeue implements Structure.ByReference {}
+        public static class ByValue extends requeue implements Structure.ByValue {}
+        public requeue() {}
+        public requeue(Pointer p) { super(p); read(); }
+
+        public int numReqValues;
+        public Pointer /* requeueEStruct.ByReference */ reqValues;
+    }
+
+
+
+/* The Service Level Agreement in LSF
+ */
+
+
+/* This is the library representation of the
+*  service class.
+ */
+
+    public static class serviceClass extends Structure {
+        public static class ByReference extends serviceClass implements Structure.ByReference {}
+        public static class ByValue extends serviceClass implements Structure.ByValue {}
+        public serviceClass() {}
+        public serviceClass(Pointer p) { super(p); read(); }
+
+
+/* SLA name */
+        public String name;
+
+/* SLA priority */
+        public float priority;
+
+/* The number of goals */
+        public int ngoals;
+
+/* The array of goals */
+        public Pointer /* objective.ByReference */ goals;
+
+/* Users allowed to use the SLA */
+        public String userGroups;
+
+/* SLA description */
+        public String description;
+
+/* SLA control action */
+        public String controlAction;
+
+/* Finished jobs per CLEAN_PERIOD */
+        public float throughput;
+
+/* Job counters */
+        public int[] counters = new int[NUM_JGRP_COUNTERS + 1];
+
+/* project scheduling enabled sla */
+        public String consumer;
+
+/* SLA EGO control parameters */
+        public slaControl.ByReference ctrl;
+
+/* SLA EGO control parameters */
+        public slaControlExt.ByReference ctrlExt;
+    }
+
+
+
+/* This is the library representation of the
+*  Service Level Objective.
+ */
+
+    public static final int GOAL_WINDOW_OPEN = 0x1;
+    public static final int GOAL_WINDOW_CLOSED = 0x2;
+    public static final int GOAL_ONTIME = 0x4;
+    public static final int GOAL_DELAYED = 0x8;
+    public static final int GOAL_DISABLED = 0x10;
+
+/* Enumerate all the possible performance goals
+*  for a service class.
+ */
+
+    public static interface objectives {
+        public static int GOAL_DEADLINE = 0;
+        public static int GOAL_VELOCITY = 1;
+        public static int GOAL_THROUGHPUT = 2;
+    }
+
+
+
+/* The objective of a goal, also called SLO, is represented
+*  by this data structure.
+ */
+
+    public static class objective extends Structure {
+        public static class ByReference extends objective implements Structure.ByReference {}
+        public static class ByValue extends objective implements Structure.ByValue {}
+        public objective() {}
+        public objective(Pointer p) { super(p); read(); }
+
+
+/* goal specs from lsb.serviceclasses */
+        public String spec;
+
+/* goal type */
+        public int type;
+
+/* the state of the goal OnTime || Delayed */
+        public int state;
+
+/* the configured value */
+        public int goal;
+
+/* the actual value */
+        public int actual;
+
+/* the optimum value */
+        public int optimum;
+
+/* the minimum value */
+        public int minimum;
+    }
+
+
+
+/* Control parameters for SLA management of hosts belonging
+*  to the EGO cluster. The control parameters are for each
+*  SLA that gets its hosts from EGO.
+ */
+
+    public static class slaControl extends Structure {
+        public static class ByReference extends slaControl implements Structure.ByReference {}
+        public static class ByValue extends slaControl implements Structure.ByValue {}
+        public slaControl() {}
+        public slaControl(Pointer p) { super(p); read(); }
+
+
+/* sla name */
+        public String sla;
+
+/* EGO consumer the sla is mapped to */
+        public String consumer;
+
+/* timeout for returning hosts to EGO */
+        public int maxHostIdleTime;
+
+/* timeout left before EGO forcefully reclaims */
+        public int recallTimeout;
+
+/* number of hosts beign recalled */
+        public int numHostRecalled;
+
+/* EGO resource requirement */
+        public String egoResReq;
+    }
+
+
+
+    public static class slaControlExt extends Structure {
+        public static class ByReference extends slaControlExt implements Structure.ByReference {}
+        public static class ByValue extends slaControlExt implements Structure.ByValue {}
+        public slaControlExt() {}
+        public slaControlExt(Pointer p) { super(p); read(); }
+
+
+/* whether exclusive allocation */
+        public int allocflags;
+
+/* tile parameter */
+        public int tile;
+    }
+
+
+
+/* Application Encapsulation in LSF
+*
+*  This is the library representation of the
+*  application.
+ */
+
+    public static class appInfoEnt extends Structure {
+        public static class ByReference extends appInfoEnt implements Structure.ByReference {}
+        public static class ByValue extends appInfoEnt implements Structure.ByValue {}
+        public appInfoEnt() {}
+        public appInfoEnt(Pointer p) { super(p); read(); }
+
+
+/* app name */
+        public String name;
+
+/* app description */
+        public String description;
+
+/* num of total jobs */
+        public int numJobs;
+
+/* num of pending slots */
+        public int numPEND;
+
+/* num of running slots */
+        public int numRUN;
+
+/* num of suspend slots */
+        public int numSSUSP;
+
+/* num of ususp slots */
+        public int numUSUSP;
+
+/* reserved job slots */
+        public int numRESERVE;
+
+/* app attributes */
+        public int aAttrib;
+
+/* number of jobs in one chunk */
+        public int chunkJobSize;
+
+/* requeue exit values */
+        public String requeueEValues;
+
+/* success exit values */
+        public String successEValues;
+
+/* app pre execution */
+        public String preCmd;
+
+/* app post execution */
+        public String postCmd;
+
+/* Job starter command(s) */
+        public String jobStarter;
+
+/* suspend action command */
+        public String suspendActCmd;
+
+/* resume action command */
+        public String resumeActCmd;
+
+/* terimate action command */
+        public String terminateActCmd;
+
+/*memory limit level type */
+        public int memLimitType;
+
+/* LSF resource limits (soft)*/
+        public int[] defLimits = new int[LibLsf.LSF_RLIM_NLIMITS];
+
+/* host spec from CPULIMIT or  RUNLIMIT */
+        public String hostSpec;
+
+/* resource requirement string */
+        public String resReq;
+
+/* maximal processor limit */
+        public int maxProcLimit;
+
+/* default processor limit */
+        public int defProcLimit;
+
+/* minimal processor limit */
+        public int minProcLimit;
+
+/* estimated run time */
+        public int runTime;
+
+/* include postproc as part of job */
+        public int jobIncludePostProc;
+
+/* time window for postproc */
+        public int jobPostProcTimeOut;
+
+/* remote task gone action */
+        public String rTaskGoneAction;
+
+/* pathname of pjob env script */
+        public String djobEnvScript;
+
+/* DJOB rusage interval */
+        public int djobRuInterval;
+
+/* DJOB heartbeat interval */
+        public int djobHbInterval;
+
+/* DJOB communication fail action */
+        public String djobCommfailAction;
+
+/* disable Distributed Application Framework */
+        public int djobDisabled;
+
+/* grace period (in seconds) before terminating tasks when a job shrinks*/
+        public int djobResizeGracePeriod;
+
+/* chkpnt directory */
+        public String chkpntDir;
+
+/* chlpnt method */
+        public String chkpntMethod;
+
+/* chkpnt period */
+        public int chkpntPeriod;
+
+/* initial chkpnt period */
+        public int initChkpntPeriod;
+
+/* migration  threshold */
+        public int migThreshold;
+
+/* maximum number of job preempted times */
+        public int maxJobPreempt;
+
+/* maximum number of pre-exec retry times */
+        public int maxPreExecRetry;
+
+/* maximum number of pre-exec retry times for local cluster */
+        public int localMaxPreExecRetry;
+
+/* maximum number of job re-queue times */
+        public int maxJobRequeue;
+
+/* no preempt run time */
+        public int noPreemptRunTime;
+
+/* no preempt finish time */
+        public int noPreemptFinishTime;
+
+/* no preempt run time percent */
+        public int noPreemptRunTimePercent;
+
+/* no preempt finish time percent */
+        public int noPreemptFinishTimePercent;
+
+/* use Linux-PAM */
+        public int usePam;
+
+/* processor binding options */
+        public int bindingOption;
+
+/* persistent same hosts and same order */
+        public int persistHostOrder;
+
+/* job resize notification cmd */
+        public String resizeNotifyCmd;
+    }
+
+
+
+/* application attributes
+ */
+
+/* rerunnable application */
+    public static final int A_ATTRIB_RERUNNABLE = 0x01;
+
+/* non rerunnable application */
+    public static final int A_ATTRIB_NONRERUNNABLE = 0x02;
+
+/* default application */
+    public static final int A_ATTRIB_DEFAULT = 0x04;
+
+/* runtime is absolute */
+    public static final int A_ATTRIB_ABS_RUNLIMIT = 0x08;
+
+/* process binding application */
+    public static final int A_ATTRIB_JOBBINDING = 0x10;
+
+/* process binding application */
+    public static final int A_ATTRIB_NONJOBBINDING = 0x20;
+
+/* checkpointable application */
+    public static final int A_ATTRIB_CHKPNT = 0x40;
+
+/* Job can be resizable manually */
+    public static final int A_ATTRIB_RESIZABLE = 0x80;
+
+/* Job can be resized automatically */
+    public static final int A_ATTRIB_AUTO_RESIZABLE = 0x100;
+
+
+/* processor binding options */
+    public static final int BINDING_OPTION_BALANCE = 0x1;
+    public static final int BINDING_OPTION_PACK = 0x2;
+    public static final int BINDING_OPTION_ANY = 0x4;
+    public static final int BINDING_OPTION_USER = 0x8;
+    public static final int BINDING_OPTION_USER_CPU_LIST = 0x10;
+    public static final int BINDING_OPTION_NONE = 0x20;
+
+    /**
+     *  \addtogroup movejob_options movejob_options
+     *  options for \ref lsb_movejob call
+     */
+
+    /**
+     * <  To top
+     */
+    public static final int TO_TOP = 1;
+
+    /**
+     * <  To bottom
+     */
+    public static final int TO_BOTTOM = 2;
+
+    /**
+     *  \addtogroup queue_ctrl_option queue_ctrl_option
+     *  options for \ref lsb_queuecontrol call
+     */
+
+    /**
+     * < Open the queue to accept jobs.
+     */
+    public static final int QUEUE_OPEN = 1;
+
+    /**
+     * < Close the queue so it will not accept jobs.
+     */
+    public static final int QUEUE_CLOSED = 2;
+
+    /**
+     * < Activate the queue to dispatch jobs.
+     */
+    public static final int QUEUE_ACTIVATE = 3;
+
+    /**
+     * < Inactivate the queue so it will not dispatch jobs.
+     */
+    public static final int QUEUE_INACTIVATE = 4;
+
+    /**
+     * < Clean the queue
+     */
+    public static final int QUEUE_CLEAN = 5;
+
+    /**
+     * \brief The structure of queueCtrlReq
+     */
+    public static class queueCtrlReq extends Structure {
+        public static class ByReference extends queueCtrlReq implements Structure.ByReference {}
+        public static class ByValue extends queueCtrlReq implements Structure.ByValue {}
+        public queueCtrlReq() {}
+        public queueCtrlReq(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The name of the queue to be controlled.
+         */
+        public String queue;
+
+        /**
+         * < Operations to be applied, for example, QUEUE_OPEN. You can refer to \ref queue_ctrl_option for more options.
+         */
+        public int opCode;
+
+        /**
+         * < The message attached by the admin
+         */
+        public String message;
+    }
+
+
+
+/* options for lsb_hostcontrol() call */
+    /**
+     *  \addtogroup host_ctrl_option host_ctrl_option
+     *  options operations to be applied
+     */
+
+    /**
+     * < Opens the host to accept jobs.
+     */
+    public static final int HOST_OPEN = 1;
+
+    /**
+     * < Closes the host so that no jobs can be dispatched to it.
+     */
+    public static final int HOST_CLOSE = 2;
+
+    /**
+     * < Restarts sbatchd on the host. sbatchd will receive a request from mbatchd and re-execute. This permits the sbatchd binary to be updated. This operation fails if no sbatchd is running on the specified host.
+     */
+    public static final int HOST_REBOOT = 3;
+
+    /**
+     * < The sbatchd on the host will exit.
+     */
+    public static final int HOST_SHUTDOWN = 4;
+
+    /**
+     * < Used for closing leased host on the submission cluster
+     */
+    public static final int HOST_CLOSE_REMOTE = 5;
+
+    /**
+     * \brief  Host control request.
+     */
+    public static class hostCtrlReq extends Structure {
+        public static class ByReference extends hostCtrlReq implements Structure.ByReference {}
+        public static class ByValue extends hostCtrlReq implements Structure.ByValue {}
+        public hostCtrlReq() {}
+        public hostCtrlReq(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The host to be controlled. If host is null, the local host is assumed.
+         */
+        public String host;
+
+        /**
+         * < Operations to be applied in \ref host_ctrl_option.
+         */
+        public int opCode;
+
+        /**
+         * < Message attached by the administrator.
+         */
+        public String message;
+    }
+
+
+
+/* options for lsb_hgcontrol() call */
+    public static final int HGHOST_ADD = 1;
+    public static final int HGHOST_DEL = 2;
+
+    public static class hgCtrlReq extends Structure {
+        public static class ByReference extends hgCtrlReq implements Structure.ByReference {}
+        public static class ByValue extends hgCtrlReq implements Structure.ByValue {}
+        public hgCtrlReq() {}
+        public hgCtrlReq(Pointer p) { super(p); read(); }
+
+        public int opCode;
+        public String grpname;
+        public int numhosts;
+        public Pointer hosts;
+        public String message;
+    }
+
+
+
+    public static class hgCtrlReply extends Structure {
+        public static class ByReference extends hgCtrlReply implements Structure.ByReference {}
+        public static class ByValue extends hgCtrlReply implements Structure.ByValue {}
+        public hgCtrlReply() {}
+        public hgCtrlReply(Pointer p) { super(p); read(); }
+
+        public int numsucc;
+        public int numfail;
+        public Pointer succHosts;
+        public Pointer failHosts;
+        public IntByReference failReasons;
+    }
+
+
+
+/* options for lsb_reconfig() call */
+    /**
+     *  \addtogroup mbd_operation mbd_operation
+     *   options for \ref lsb_reconfig call
+     */
+
+    /**
+     * < mbatchd restart
+     */
+    public static final int MBD_RESTART = 0;
+
+    /**
+     * < mbatchd reread configuration files
+     */
+    public static final int MBD_RECONFIG = 1;
+
+    /**
+     * < mbatchd check validity of configuration files
+     */
+    public static final int MBD_CKCONFIG = 2;
+
+    /**
+     * \brief  mbatchd control request.
+     */
+    public static class mbdCtrlReq extends Structure {
+        public static class ByReference extends mbdCtrlReq implements Structure.ByReference {}
+        public static class ByValue extends mbdCtrlReq implements Structure.ByValue {}
+        public mbdCtrlReq() {}
+        public mbdCtrlReq(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Operation applied, defined in \ref mbd_operation
+         */
+        public int opCode;
+
+        /**
+         * < Not used so far
+         */
+        public String name;
+
+        /**
+         * < The message attached by the admin
+         */
+        public String message;
+    }
+
+
+
+/* opcode for turn on or off the perfmon monitor */
+    public static final int PERFMON_START = 1;
+    public static final int PERFMON_STOP = 2;
+    public static final int PERFMON_SET_PERIOD = 3;
+
+
+/* defualt sample period 60 */
+    public static final int DEF_PERFMON_PERIOD = 60;
+
+
+    public static class perfmonMetricsEnt extends Structure {
+        public static class ByReference extends perfmonMetricsEnt implements Structure.ByReference {}
+        public static class ByValue extends perfmonMetricsEnt implements Structure.ByValue {}
+        public perfmonMetricsEnt() {}
+        public perfmonMetricsEnt(Pointer p) { super(p); read(); }
+
+/* metrice name */
+        public String name;
+
+/* last period counters */
+        public NativeLong current;
+
+/* max of (counter/interval)*sample period for one period */
+        public NativeLong max;
+
+/* min of (counter/interval)*sample period for one period */
+        public NativeLong min;
+
+/* avg of (total/interval)*sample period for one period */
+        public NativeLong avg;
+
+/* total counters from performance monitor turn on */
+        public String total;
+    }
+
+
+
+/*performance monitor info*/
+
+    public static class perfmonInfo extends Structure {
+        public static class ByReference extends perfmonInfo implements Structure.ByReference {}
+        public static class ByValue extends perfmonInfo implements Structure.ByValue {}
+        public perfmonInfo() {}
+        public perfmonInfo(Pointer p) { super(p); read(); }
+
+/* number of metrics*/
+        public int num;
+
+/* array of metrics counter */
+        public Pointer /* perfmonMetricsEnt.ByReference */ record;
+
+/* sample period */
+        public int period;
+
+/* time when the performance moniter turn on */
+        public NativeLong start;
+
+/* time when the performance moniter turn off */
+        public NativeLong end;
+    }
+
+
+
+/* options for lsb_reljgrp() call */
+    public static final int JGRP_RELEASE_PARENTONLY = 0x01;
+
+
+    /**
+     * \brief Records of logged events
+     */
+    public static class logSwitchLog extends Structure {
+        public static class ByReference extends logSwitchLog implements Structure.ByReference {}
+        public static class ByValue extends logSwitchLog implements Structure.ByValue {}
+        public logSwitchLog() {}
+        public logSwitchLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The last jobId so far
+         */
+        public int lastJobId;
+/*#if defined(LSF_SIMULATOR)*/
+
+/**< last trace record time */
+/*    public NativeLong lastTraceTime;*/
+
+        /**< last trace record type */
+/*public int    lastTraceType;*
+
+    /**< last trace record info */
+/*public String lastTraceInfo;*/
+        /*#endif*/
+    }
+
+
+
+    /**
+     * \brief Records of job CPU data logged event
+     */
+    public static class dataLoggingLog extends Structure {
+        public static class ByReference extends dataLoggingLog implements Structure.ByReference {}
+        public static class ByValue extends dataLoggingLog implements Structure.ByValue {}
+        public dataLoggingLog() {}
+        public dataLoggingLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The time of last job cpu data logging
+         */
+        public NativeLong loggingTime;
+    }
+
+
+
+    /**
+     * \brief  new job group log.
+     */
+    public static class jgrpNewLog extends Structure {
+        public static class ByReference extends jgrpNewLog implements Structure.ByReference {}
+        public static class ByValue extends jgrpNewLog implements Structure.ByValue {}
+        public jgrpNewLog() {}
+        public jgrpNewLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The user ID of the submitter
+         */
+        public int userId;
+
+        /**
+         * < The job submission time
+         */
+        public NativeLong submitTime;
+
+        /**
+         * < The name of the submitter
+         */
+        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
+
+        /**
+         * < The job dependency condition
+         */
+        public String depCond;
+
+        /**
+         * < Time event string
+         */
+        public String timeEvent;
+
+        /**
+         * < Job group name
+         */
+        public String groupSpec;
+
+        /**
+         * < New job group name
+         */
+        public String destSpec;
+
+        /**
+         * < Delete options in options field
+         */
+        public int delOptions;
+
+        /**
+         * < Extended Delete options in options2 field
+         */
+        public int delOptions2;
+
+        /**
+         * < Platform type: such as Unix, Windows
+         */
+        public int fromPlatform;
+
+        /**
+         * < SLA service class name under which the job runs
+         */
+        public String sla;
+
+        /**
+         * < Max job group slots limit
+         */
+        public int maxJLimit;
+
+        /**
+         * < Job group creation method: implicit or explicit
+         */
+        public int options;
+    }
+
+
+
+    /**
+     * \brief  job group control log.
+     */
+    public static class jgrpCtrlLog extends Structure {
+        public static class ByReference extends jgrpCtrlLog implements Structure.ByReference {}
+        public static class ByValue extends jgrpCtrlLog implements Structure.ByValue {}
+        public jgrpCtrlLog() {}
+        public jgrpCtrlLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The user ID of the submitter
+         */
+        public int userId;
+
+        /**
+         * < The name of the submitter
+         */
+        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
+
+        /**
+         * < Job group name
+         */
+        public String groupSpec;
+
+        /**
+         * < Options
+         */
+        public int options;
+
+        /**
+         * < Job control JGRP_RELEASE, JGRP_HOLD, JGRP_DEL
+         */
+        public int ctrlOp;
+    }
+
+
+
+    /**
+     * \brief  job group status log.
+     */
+    public static class jgrpStatusLog extends Structure {
+        public static class ByReference extends jgrpStatusLog implements Structure.ByReference {}
+        public static class ByValue extends jgrpStatusLog implements Structure.ByValue {}
+        public jgrpStatusLog() {}
+        public jgrpStatusLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The full group path name for the job group
+         */
+        public String groupSpec;
+
+        /**
+         * < Job group status
+         */
+        public int status;
+
+        /**
+         * < Prior status
+         */
+        public int oldStatus;
+    }
+
+
+
+    /**
+     * \brief jobNewLog logged in lsb.events when a job is submitted.
+     */
+    public static class jobNewLog extends Structure {
+        public static class ByReference extends jobNewLog implements Structure.ByReference {}
+        public static class ByValue extends jobNewLog implements Structure.ByValue {}
+        public jobNewLog() {}
+        public jobNewLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The job ID that the LSF assigned to the job
+         */
+        public int jobId;
+
+        /**
+         * < The user ID of the submitter
+         */
+        public int userId;
+
+        /**
+         * < The name of the submitter
+         */
+        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
+
+        /**
+         * < Job submission options. see \ref lsb_submit.
+         */
+        public int options;
+
+        /**
+         * < Job submission options. see \ref lsb_submit.
+         */
+        public int options2;
+
+        /**
+         * < The number of processors requested for execution
+         */
+        public int numProcessors;
+
+        /**
+         * < The job submission time
+         */
+        public NativeLong submitTime;
+
+        /**
+         * < The job should be started on or after this time
+         */
+        public NativeLong beginTime;
+
+        /**
+         * < If the job has not finished by this time, it will be killed
+         */
+        public NativeLong termTime;
+
+        /**
+         * < The signal value sent to the job 10 minutes before its run window closes
+         */
+        public int sigValue;
+
+        /**
+         * < The checkpointing period
+         */
+        public int chkpntPeriod;
+
+        /**
+         * < The process ID assigned to the job when it was restarted
+         */
+        public int restartPid;
+
+        /**
+         * < The user's resource limits
+         */
+        public int[] rLimits = new int[LibLsf.LSF_RLIM_NLIMITS];
+
+        /**
+         * < The model, host name or host type for scaling CPULIMIT and RUNLIMIT
+         */
+        public byte[] hostSpec = new byte[LibLsf.MAXHOSTNAMELEN];
+
+        /**
+         * < The CPU factor for the above model, host name or host type
+         */
+        public float hostFactor;
+
+        /**
+         * < The file creation mask for this job
+         */
+        public int umask;
+
+        /**
+         * < The name of the queue to which this job was submitted
+         */
+        public byte[] queue = new byte[MAX_LSB_NAME_LEN];
+
+        /**
+         * < The resource requirements of the job
+         */
+        public String resReq;
+
+        /**
+         * < The submission host name
+         */
+        public byte[] fromHost = new byte[LibLsf.MAXHOSTNAMELEN];
+
+        /**
+         * < The current working directory
+         */
+        public String cwd;
+
+        /**
+         * < The checkpoint directory
+         */
+        public String chkpntDir;
+
+        /**
+         * < The input file name
+         */
+        public String inFile;
+
+        /**
+         * < The output file name
+         */
+        public String outFile;
+
+        /**
+         * < The error output file name
+         */
+        public String errFile;
+
+        /**
+         * < Job spool input file
+         */
+        public String inFileSpool;
+
+        /**
+         * < Job spool command file
+         */
+        public String commandSpool;
+
+        /**
+         * < Job spool directory
+         */
+        public String jobSpoolDir;
+
+        /**
+         * < The home directory of the submitter
+         */
+        public String subHomeDir;
+
+        /**
+         * < The job file name
+         */
+        public String jobFile;
+
+        /**
+         * < The number of hosts considered for dispatching this job
+         */
+        public int numAskedHosts;
+
+        /**
+         * < The array of names of hosts considered for dispatching this job
+         */
+        public Pointer askedHosts;
+
+        /**
+         * < The job dependency condition
+         */
+        public String dependCond;
+
+        /**
+         * < Time event string
+         */
+        public String timeEvent;
+
+        /**
+         * < The job name
+         */
+        public String jobName;
+
+        /**
+         * < The job command
+         */
+        public String command;
+
+        /**
+         * < The number of files to transfer
+         */
+        public int nxf;
+
+        /**
+         * < The array of file transfer specifications. (The xFile structure is defined in <lsf/lsbatch.h>)
+         */
+        public Pointer /* xFile.ByReference */ xf;
+
+        /**
+         * < The command string to be pre_executed
+         */
+        public String preExecCmd;
+
+        /**
+         * < User option mail string
+         */
+        public String mailUser;
+
+        /**
+         * < The project name for this job, used for accounting purposes
+         */
+        public String projectName;
+
+        /**
+         * < Port to be used for interactive jobs
+         */
+        public int niosPort;
+
+        /**
+         * < Maximum number of processors
+         */
+        public int maxNumProcessors;
+
+        /**
+         * < Execution host type
+         */
+        public String schedHostType;
+
+        /**
+         * < Login shell specified by user
+         */
+        public String loginShell;
+
+        /**
+         * < The user group name for this job
+         */
+        public String userGroup;
+
+        /**
+         * < List of alarm conditions for job
+         */
+        public String exceptList;
+
+        /**
+         * < Array idx, must be 0 in JOB_NEW
+         */
+        public int idx;
+
+        /**
+         * < User priority
+         */
+        public int userPriority;
+
+        /**
+         * < Advance reservation ID
+         */
+        public String rsvId;
+
+        /**
+         * < The job group under which the job runs.
+         */
+        public String jobGroup;
+
+        /**
+         * < External scheduling options
+         */
+        public String extsched;
+
+        /**
+         * < Warning time period in seconds, -1 if unspecified
+         */
+        public int warningTimePeriod;
+
+        /**
+         * < Warning action, SIGNAL | CHKPNT | command, null if unspecified
+         */
+        public String warningAction;
+
+        /**
+         * < The service class under which the job runs.
+         */
+        public String sla;
+
+        /**
+         * < The absolute run limit of the job
+         */
+        public int SLArunLimit;
+
+        /**
+         * < License Project
+         */
+        public String licenseProject;
+
+        /**
+         * < Extended bitwise inclusive OR of options flags. See \ref lsb_submit.
+         */
+        public int options3;
+
+        /**
+         * < Application profile under which the job runs.
+         */
+        public String app;
+
+        /**
+         * < Post-execution commands.
+         */
+        public String postExecCmd;
+
+        /**
+         * < Runtime estimate specified.
+         */
+        public int runtimeEstimation;
+
+        /**
+         * < Job-level requeue exit values.
+         */
+        public String requeueEValues;
+
+        /**
+         * < Initial checkpoint period
+         */
+        public int initChkpntPeriod;
+
+        /**
+         * < Job migration threshold.
+         */
+        public int migThreshold;
+
+        /**
+         * < Resize notify command
+         */
+        public String notifyCmd;
+
+        /**
+         * < Job description.
+         */
+        public String jobDescription;
+
+        /**
+         * < For new options in future
+         */
+        public submit_ext.ByReference submitExt;
+
+/*#if defined(LSF_SIMULATOR)*/
+
+/**< maximum memory */
+        /*public int    maxmem;*/
+
+        /**< exit status */
+        /*public int    exitstatus;*/
+
+        /**< job run time */
+        /*public int    runtime;*/
+
+        /**< system cpu time */
+        /*public int    cputime;*/
+
+        /**< allocated slots */
+        /*public int    slots;*/
+
+        /**< cpu factor */
+        /*public float  cpufactor;*/
+
+        /*#endif*/
+    }
+
+
+
+/*
+#if defined(LSF_SIMULATOR)
+public static class jobArrayElementLog extends Structure {
+public static class ByReference extends jobArrayElementLog implements Structure.ByReference {}
+public static class ByValue extends jobArrayElementLog implements Structure.ByValue {}
+
+    public int jobId;
+*/
+/* Copy LSF simulator related fields from jobNewLog */
+/*
+    public int idx;
+        public int maxmem;
+        public int exitstatus;
+        public int runtime;
+        public int cputime;
+        public int slots;
+        public float cpufactor;
+    };
+    #endif
+    */
+
+    /**
+     * \brief  job modified log.
+     */
+    public static class jobModLog extends Structure {
+        public static class ByReference extends jobModLog implements Structure.ByReference {}
+        public static class ByValue extends jobModLog implements Structure.ByValue {}
+        public jobModLog() {}
+        public jobModLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < JobId or jobName in String/
+         * public String jobIdStr;
+         * <p/>
+         * /**< Job submission options(See \ref lsb_submit)
+         */
+        public int options;
+
+        /**
+         * < Job submission options(See \ref lsb_submit)
+         */
+        public int options2;
+
+        /**
+         * < Delete options in options field
+         */
+        public int delOptions;
+
+        /**
+         * < Extended delete options in options2 field .
+         */
+        public int delOptions2;
+
+        /**
+         * < The user ID of the submitter
+         */
+        public int userId;
+
+        /**
+         * < The name of the submitter
+         */
+        public String userName;
+
+        /**
+         * < The job submission time
+         */
+        public int submitTime;
+
+        /**
+         * < The file creation mask for this job
+         */
+        public int umask;
+
+        /**
+         * < The number of processors requested for execution
+         */
+        public int numProcessors;
+
+        /**
+         * < The job should be started on or after this time
+         */
+        public NativeLong beginTime;
+
+        /**
+         * < If the job has not finished by this time,  it will be killed
+         */
+        public NativeLong termTime;
+
+        /**
+         * < The signal value sent to the job 10 minutes before its run window closes
+         */
+        public int sigValue;
+
+        /**
+         * < The process ID assigned to the job when it was restarted
+         */
+        public int restartPid;
+
+
+        /**
+         * < The job name
+         */
+        public String jobName;
+
+        /**
+         * < The name of the queue to which this job was submitted
+         */
+        public String queue;
+
+
+        /**
+         * < The number of hosts considered for dispatching this job
+         */
+        public int numAskedHosts;
+
+        /**
+         * < List of asked hosts
+         */
+        public Pointer askedHosts;
+
+
+        /**
+         * < The resource requirements of the job
+         */
+        public String resReq;
+
+        /**
+         * < User's resource limits (soft)
+         */
+        public int[] rLimits = new int[LibLsf.LSF_RLIM_NLIMITS];
+
+        /**
+         * < The model, host name or host type for scaling CPULIMIT and RUNLIMIT
+         */
+        public String hostSpec;
+
+
+        /**
+         * < The job dependency condition
+         */
+        public String dependCond;
+
+        /**
+         * < Time event string.
+         */
+        public String timeEvent;
+
+
+        /**
+         * < The home directory of the submitter
+         */
+        public String subHomeDir;
+
+        /**
+         * < The input file name
+         */
+        public String inFile;
+
+        /**
+         * < The output file name
+         */
+        public String outFile;
+
+        /**
+         * < The error output file name
+         */
+        public String errFile;
+
+        /**
+         * < Command description - this is really a job description field
+         */
+        public String command;
+
+        /**
+         * < Job spool input file
+         */
+        public String inFileSpool;
+
+        /**
+         * < Job spool command file
+         */
+        public String commandSpool;
+
+        /**
+         * < The checkpointing period
+         */
+        public int chkpntPeriod;
+
+        /**
+         * < The checkpoint directory
+         */
+        public String chkpntDir;
+
+        /**
+         * < The number of files to transfer
+         */
+        public int nxf;
+
+        /**
+         * < The array of file transfer specifications.  (The xFile structure is defined in <lsf/lsbatch.h>)
+         */
+        public Pointer /* xFile.ByReference */ xf;
+
+
+        /**
+         * < The job file name: If == '\\0', indicate let mbatchd make up name, otherwise, mbatchd will use given name.  It is '\\0' if it is a regular job,non-nil means it is a restart job.
+         */
+        public String jobFile;
+
+        /**
+         * < The submission host name
+         */
+        public String fromHost;
+
+        /**
+         * < The current working directory
+         */
+        public String cwd;
+
+
+        /**
+         * < The pre-execution command
+         */
+        public String preExecCmd;
+
+        /**
+         * < User option mail string
+         */
+        public String mailUser;
+
+        /**
+         * < Project name for the job; used for accounting purposes
+         */
+        public String projectName;
+
+
+        /**
+         * < NIOS callback port to be used for interactive jobs
+         */
+        public int niosPort;
+
+        /**
+         * < Maximum number of processors
+         */
+        public int maxNumProcessors;
+
+
+        /**
+         * < The login shell specified by user
+         */
+        public String loginShell;
+
+        /**
+         * < Restart job's submission host type
+         */
+        public String schedHostType;
+
+        /**
+         * < The user group name for this job
+         */
+        public String userGroup;
+
+        /**
+         * < List of job exception conditions
+         */
+        public String exceptList;
+
+        /**
+         * < User priority
+         */
+        public int userPriority;
+
+        /**
+         * < Advance reservation ID
+         */
+        public String rsvId;
+
+        /**
+         * < External scheduling options
+         */
+        public String extsched;
+
+        /**
+         * < Job warning time period in seconds; -1 if unspecified
+         */
+        public int warningTimePeriod;
+
+        /**
+         * < Job warning action: SIGNAL | CHKPNT | command; null if unspecified
+         */
+        public String warningAction;
+
+        /**
+         * < The job group under which the job runs
+         */
+        public String jobGroup;
+
+        /**
+         * < SLA service class name under which the job runs
+         */
+        public String sla;
+
+        /**
+         * < LSF License Scheduler project name
+         */
+        public String licenseProject;
+
+        /**
+         * < Extended bitwise inclusive OR of options flags. see \ref lsb_submit.
+         */
+        public int options3;
+
+        /**
+         * < Extended delete options in options3 field.
+         */
+        public int delOptions3;
+
+        /**
+         * < Application profile under which the job runs.
+         */
+        public String app;
+
+        /**
+         * < Absolute priority scheduling string set by administrators to denote static  system APS value or ADMIN factor APS value.
+         */
+        public String apsString;
+
+        /**
+         * < Post-execution commands.
+         */
+        public String postExecCmd;
+
+        /**
+         * < Runtime estimate.
+         */
+        public int runtimeEstimation;
+
+        /**
+         * < Job-level requeue exit values.
+         */
+        public String requeueEValues;
+
+        /**
+         * < Initial checkpoint period
+         */
+        public int initChkpntPeriod;
+
+        /**
+         * < Job migration threshold.
+         */
+        public int migThreshold;
+
+        /**
+         * < Resize notify command
+         */
+        public String notifyCmd;
+
+        /**
+         * < Job description.
+         */
+        public String jobDescription;
+
+        /**
+         * < For new options in future
+         */
+        public submit_ext.ByReference submitExt;
+    }
+
+
+
+    /**
+     * \brief  logged in lsb.events when a job is started.
+     */
+    public static class jobStartLog extends Structure {
+        public static class ByReference extends jobStartLog implements Structure.ByReference {}
+        public static class ByValue extends jobStartLog implements Structure.ByValue {}
+        public jobStartLog() {}
+        public jobStartLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The unique ID for the job
+         */
+        public int jobId;
+
+        /**
+         * < The status of the job (see  \ref lsb_readjobinfo )
+         */
+        public int jStatus;
+
+        /**
+         * < The job process ID
+         */
+        public int jobPid;
+
+        /**
+         * < The job process group ID
+         */
+        public int jobPGid;
+
+        /**
+         * < The CPU factor of the first execution host
+         */
+        public float hostFactor;
+
+        /**
+         * < The number of processors used for execution
+         */
+        public int numExHosts;
+
+        /**
+         * < The array of execution host names
+         */
+        public Pointer execHosts;
+
+        /**
+         * < Pre-execution command defined in the queue
+         */
+        public String queuePreCmd;
+
+        /**
+         * < Post-execution command defined in the queue
+         */
+        public String queuePostCmd;
+
+        /**
+         * < Job processing flags
+         */
+        public int jFlags;
+
+        /**
+         * < The user group name for this job
+         */
+        public String userGroup;
+
+        /**
+         * < Job array index; must be 0 in JOB_NEW
+         */
+        public int idx;
+
+        /**
+         * < Placement information of LSF HPC jobs
+         */
+        public String additionalInfo;
+
+        /**
+         * < How long a backfilled job can run; used for preemption backfill jobs
+         */
+        public int duration4PreemptBackfill;
+
+        /**
+         * <  Job Flags2
+         */
+        public int jFlags2;
+    }
+
+
+
+    /**
+     * \brief logged in lsb.events when a job start request is accepted.
+     */
+    public static class jobStartAcceptLog extends Structure {
+        public static class ByReference extends jobStartAcceptLog implements Structure.ByReference {}
+        public static class ByValue extends jobStartAcceptLog implements Structure.ByValue {}
+        public jobStartAcceptLog() {}
+        public jobStartAcceptLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The unique ID for the job
+         */
+        public int jobId;
+
+        /**
+         * < The job process ID
+         */
+        public int jobPid;
+
+        /**
+         * < The job process group ID
+         */
+        public int jobPGid;
+
+        /**
+         * < Job array index; must be 0 in JOB_NEW
+         */
+        public int idx;
+    }
+
+
+
+    /**
+     * \brief logged in lsb.events when a job is executed.
+     */
+    public static class jobExecuteLog extends Structure {
+        public static class ByReference extends jobExecuteLog implements Structure.ByReference {}
+        public static class ByValue extends jobExecuteLog implements Structure.ByValue {}
+        public jobExecuteLog() {}
+        public jobExecuteLog(Pointer p) { super(p); read(); }
+
+        /* logged in lsb.events when a job is executed */
+
+        /**
+         * < The unique ID for the job
+         */
+        public int jobId;
+
+        /**
+         * < User ID under which the job is running
+         */
+        public int execUid;
+
+        /**
+         * < Home directory of the user denoted by execUid
+         */
+        public String execHome;
+
+        /**
+         * < Current working directory where job is running
+         */
+        public String execCwd;
+
+        /**
+         * < The job process group ID
+         */
+        public int jobPGid;
+
+        /**
+         * < User name under which the job is running
+         */
+        public String execUsername;
+
+        /**
+         * < The job process ID
+         */
+        public int jobPid;
+
+        /**
+         * < Job array index; must be 0 in JOB_NEW
+         */
+        public int idx;
+
+        /**
+         * < Placement information of LSF HPC jobs
+         */
+        public String additionalInfo;
+
+        /**
+         * < The run limit scaled by the exec host
+         */
+        public int SLAscaledRunLimit;
+
+        /**
+         * < The position of the job
+         */
+        public int position;
+
+        /**
+         * < The rusage satisfied at job runtime
+         */
+        public String execRusage;
+
+        /**
+         * < The duration for preemptive backfill class in seconds
+         */
+        public int duration4PreemptBackfill;
+    }
+
+
+
+
+    /**
+     * \brief logged when a job's status is changed.
+     */
+    public static class jobStatusLog extends Structure {
+        public static class ByReference extends jobStatusLog implements Structure.ByReference {}
+        public static class ByValue extends jobStatusLog implements Structure.ByValue {}
+        public jobStatusLog() {}
+        public jobStatusLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The unique ID for the job
+         */
+        public int jobId;
+
+        /**
+         * < The job status (see \ref lsb_readjobinfo )
+         */
+        public int jStatus;
+
+        /**
+         * < The reason the job is pending or suspended  (see \ref lsb_pendreason and \ref lsb_suspreason )
+         */
+        public int reason;
+
+        /**
+         * < The load indices that have overloaded the host (see \ref lsb_pendreason  and \ref lsb_suspreason )
+         */
+        public int subreasons;
+
+        /**
+         * < The CPU time consumed before this event occurred
+         */
+        public float cpuTime;
+
+        /**
+         * < The job completion time
+         */
+        public NativeLong endTime;
+
+        /**
+         * < Boolean indicating lsfRusage is logged
+         */
+        public int ru;
+
+        /**
+         * < Resource usage statisticsThe lsfRusage structure is defined in <lsf/lsf.h>. Note that the availability of certain fields depends on the platform on which the sbatchd runs. The fields that do not make sense on the platform will be logged as -1.0.
+         */
+        public LibLsf.lsfRusage lsfRusage;
+
+        /**
+         * < Job exit status
+         */
+        public int jFlags;
+
+        /**
+         * < Job's exit status
+         */
+        public int exitStatus;
+
+        /**
+         * < Job array index; must be 0 in JOB_NEW
+         */
+        public int idx;
+
+        /**
+         * < Job termination reason, see <lsf/lsbatch.h>
+         */
+        public int exitInfo;
+    }
+
+
+
+
+    /**
+     * \brief logged when a job's status is changed
+     */
+    public static class sbdJobStatusLog extends Structure {
+        public static class ByReference extends sbdJobStatusLog implements Structure.ByReference {}
+        public static class ByValue extends sbdJobStatusLog implements Structure.ByValue {}
+        public sbdJobStatusLog() {}
+        public sbdJobStatusLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The unique ID for the job
+         */
+        public int jobId;
+
+        /**
+         * < The status of the job (see \ref lsb_readjobinfo)
+         */
+        public int jStatus;
+
+        /**
+         * < The reason the job is pending or suspended (See \ref lsb_pendreason and \ref lsb_suspreason)
+         */
+        public int reasons;
+
+        /**
+         * < The load indices that have overloaded the host (See \ref lsb_pendreason and \ref lsb_suspreason)
+         */
+        public int subreasons;
+
+        /**
+         * < Action process ID
+         */
+        public int actPid;
+
+        /**
+         * < Action Value SIG_CHKPNT | SIG_CHKPNT_COPY |  SIG_WARNING
+         */
+        public int actValue;
+
+        /**
+         * < Action period
+         */
+        public NativeLong actPeriod;
+
+        /**
+         * < Action flag
+         */
+        public int actFlags;
+
+        /**
+         * < Action logging status
+         */
+        public int actStatus;
+
+        /**
+         * < Action Reason SUSP_MBD_LOCK | SUSP_USER_STOP | SUSP_USER_RESUME | SUSP_SBD_STARTUP
+         */
+        public int actReasons;
+
+        /**
+         * < Sub Reason SUB_REASON_RUNLIMIT | SUB_REASON_DEADLINE |SUB_REASON_PROCESSLIMIT | SUB_REASON_MEMLIMIT |SUB_REASON_CPULIMIT
+         */
+        public int actSubReasons;
+
+        /**
+         * < Job array index; must be 0 in JOB_NEW
+         */
+        public int idx;
+
+        /**
+         * < The signal value
+         */
+        public int sigValue;
+
+        /**
+         * < The termination reason of a job
+         */
+        public int exitInfo;
+    }
+
+
+
+    /**
+     * \brief job status that we could send to MBD
+     */
+    public static class sbdUnreportedStatusLog extends Structure {
+        public static class ByReference extends sbdUnreportedStatusLog implements Structure.ByReference {}
+        public static class ByValue extends sbdUnreportedStatusLog implements Structure.ByValue {}
+        public sbdUnreportedStatusLog() {}
+        public sbdUnreportedStatusLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The unique ID for the job
+         */
+        public int jobId;
+
+        /**
+         * < Action process ID
+         */
+        public int actPid;
+
+        /**
+         * < The job process ID
+         */
+        public int jobPid;
+
+        /**
+         * < The job process group ID
+         */
+        public int jobPGid;
+
+        /**
+         * < New status of the job
+         */
+        public int newStatus;
+
+        /**
+         * < Pending or suspending reason code
+         */
+        public int reason;
+
+        /**
+         * < Pending or suspending subreason code
+         */
+        public int subreasons;
+
+        /**
+         * < Resource usage information for the job  (see jobFinishLog)
+         */
+        public LibLsf.lsfRusage lsfRusage;
+
+        /**
+         * < User ID under which the job is running
+         */
+        public int execUid;
+
+        /**
+         * < Job exit status
+         */
+        public int exitStatus;
+
+        /**
+         * < Current working directory where job is running
+         */
+        public String execCwd;
+
+        /**
+         * < Home directory of the user denoted by execUid
+         */
+        public String execHome;
+
+        /**
+         * < User name under which the job is running
+         */
+        public String execUsername;
+
+        /**
+         * < Message index
+         */
+        public int msgId;
+
+        /**
+         * < Job's resource usage
+         */
+        public LibLsf.jRusage runRusage;
+
+        /**
+         * < Signal value
+         */
+        public int sigValue;
+
+        /**
+         * < Action logging status
+         */
+        public int actStatus;
+
+        /**
+         * < Sequence status of the job
+         */
+        public int seq;
+
+        /**
+         * < Job array index
+         */
+        public int idx;
+
+        /**
+         * < The termination reason of a job
+         */
+        public int exitInfo;
+    }
+
+
+
+    /**
+     * \brief logged when a job is switched to another queue
+     */
+    public static class jobSwitchLog extends Structure {
+        public static class ByReference extends jobSwitchLog implements Structure.ByReference {}
+        public static class ByValue extends jobSwitchLog implements Structure.ByValue {}
+        public jobSwitchLog() {}
+        public jobSwitchLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The user ID of the submitter
+         */
+        public int userId;
+
+        /**
+         * < The unique ID of the job
+         */
+        public int jobId;
+
+        /**
+         * < The name of the queue the job has been switched to
+         */
+        public byte[] queue = new byte[MAX_LSB_NAME_LEN];
+
+        /**
+         * < Job array index; must be 0 in JOB_NEW
+         */
+        public int idx;
+
+        /**
+         * < The name of the submitter
+         */
+        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
+    }
+
+
+
+    /**
+     * \brief logged when a job is moved to another position
+     */
+    public static class jobMoveLog extends Structure {
+        public static class ByReference extends jobMoveLog implements Structure.ByReference {}
+        public static class ByValue extends jobMoveLog implements Structure.ByValue {}
+        public jobMoveLog() {}
+        public jobMoveLog(Pointer p) { super(p); read(); }
+
+        /* logged when a job is moved to another position */
+
+        /**
+         * < The user ID of the submitter
+         */
+        public int userId;
+
+        /**
+         * < The unique ID of the job
+         */
+        public int jobId;
+
+        /**
+         * < The new position of the job
+         */
+        public int position;
+
+        /**
+         * < The operation code for the move (see  \ref lsb_movejob)
+         */
+        public int base;
+
+        /**
+         * < Job array index; must be 0 in JOB_NEW
+         */
+        public int idx;
+
+        /**
+         * < The name of the submitter
+         */
+        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
+    }
+
+
+
+    /**
+     * \brief  check point log.
+     */
+    public static class chkpntLog extends Structure {
+        public static class ByReference extends chkpntLog implements Structure.ByReference {}
+        public static class ByValue extends chkpntLog implements Structure.ByValue {}
+        public chkpntLog() {}
+        public chkpntLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The unique ID of the job
+         */
+        public int jobId;
+
+        /**
+         * < The new checkpointing period
+         */
+        public NativeLong period;
+
+        /**
+         * < The process ID of the checkpointing process (a child sbatchd)
+         */
+        public int pid;
+
+        /**
+         * < 0: checkpoint started; 1: checkpoint succeeded
+         */
+        public int ok;
+
+        /**
+         * < One of the following: \n LSB_CHKPNT_KILL : Kill process if checkpoint successful \n LSB_CHKPNT_FORCE : Force checkpoint even if non-checkpointable conditions exist \n LSB_CHKPNT_MIG : Checkpoint for the purpose of migration
+         */
+        public int flags;
+
+        /**
+         * < Job array index; must be 0 in JOB_NEW
+         */
+        public int idx;
+    }
+
+
+
+    /**
+     * \brief  job requeue log.
+     */
+    public static class jobRequeueLog extends Structure {
+        public static class ByReference extends jobRequeueLog implements Structure.ByReference {}
+        public static class ByValue extends jobRequeueLog implements Structure.ByValue {}
+        public jobRequeueLog() {}
+        public jobRequeueLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The unique ID of the job
+         */
+        public int jobId;
+
+        /**
+         * < Job array index; must be 0 in JOB_NEW
+         */
+        public int idx;
+    }
+
+
+
+    /**
+     * \brief  job clean log.
+     */
+    public static class jobCleanLog extends Structure {
+        public static class ByReference extends jobCleanLog implements Structure.ByReference {}
+        public static class ByValue extends jobCleanLog implements Structure.ByValue {}
+        public jobCleanLog() {}
+        public jobCleanLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The unique ID for the job
+         */
+        public int jobId;
+
+        /**
+         * < Job array index; must be 0 in JOB_NEW
+         */
+        public int idx;
+    }
+
+
+
+    /**
+     * \brief  job exception log.
+     */
+    public static class jobExceptionLog extends Structure {
+        public static class ByReference extends jobExceptionLog implements Structure.ByReference {}
+        public static class ByValue extends jobExceptionLog implements Structure.ByValue {}
+        public jobExceptionLog() {}
+        public jobExceptionLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The unique ID for the job
+         */
+        public int jobId;
+
+        /**
+         * < Job exception handling mask
+         */
+        public int exceptMask;
+
+        /**
+         * < Action Id (kill | alarm | rerun | setexcept)
+         */
+        public int actMask;
+
+        /**
+         * < Time event string
+         */
+        public NativeLong timeEvent;
+
+        /**
+         * < Except Info, pending reason for missched or cantrun exception, the exit code of thejob for the abend exception, otherwise 0.
+         */
+        public int exceptInfo;
+
+        /**
+         * < Job array index; must be 0 in JOB_NEW
+         */
+        public int idx;
+    }
+
+
+
+    /**
+     * \brief  signal action log.
+     */
+    public static class sigactLog extends Structure {
+        public static class ByReference extends sigactLog implements Structure.ByReference {}
+        public static class ByValue extends sigactLog implements Structure.ByValue {}
+        public sigactLog() {}
+        public sigactLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The unique ID of the job
+         */
+        public int jobId;
+
+        /**
+         * < Action period
+         */
+        public NativeLong period;
+
+        /**
+         * < Action process ID
+         */
+        public int pid;
+
+        /**
+         * < Job status
+         */
+        public int jStatus;
+
+        /**
+         * < Pending reasons
+         */
+        public int reasons;
+
+        /**
+         * < Action flag
+         */
+        public int flags;
+
+        /**
+         * < Signal symbol from the set: DELETEJOB |  KILL | KILLREQUEUE |REQUEUE_DONE | REQUEUE_EXIT | REQUEUE_PEND |REQUEUE_PSUSP_ADMIN | REQUEUE_PSUSP_USER | SIG_CHKPNT |  SIG_CHKPNT_COPY
+         */
+        public String signalSymbol;
+
+        /**
+         * < Action logging status (ACT_NO | ACT_START | ACT_PREEMPT | ACT_DONE |  ACT_FAIL) .Shown in signal_action
+         */
+        public int actStatus;
+
+        /**
+         * < Job array index; must be 0 in JOB_NEW
+         */
+        public int idx;
+    }
+
+
+
+    /**
+     * \brief  migration log.
+     */
+    public static class migLog extends Structure {
+        public static class ByReference extends migLog implements Structure.ByReference {}
+        public static class ByValue extends migLog implements Structure.ByValue {}
+        public migLog() {}
+        public migLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The job to be migrated
+         */
+        public int jobId;
+
+        /**
+         * < The number of candidate hosts for migration
+         */
+        public int numAskedHosts;
+
+        /**
+         * < The array of candidate host names
+         */
+        public Pointer askedHosts;
+
+        /**
+         * < The user ID of the submitter
+         */
+        public int userId;
+
+        /**
+         * < Job array index; must be 0 in JOB_NEW
+         */
+        public int idx;
+
+        /**
+         * < The user name of the submitter
+         */
+        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
+    }
+
+
+
+    /**
+     * \brief  signal log.
+     */
+    public static class signalLog extends Structure {
+        public static class ByReference extends signalLog implements Structure.ByReference {}
+        public static class ByValue extends signalLog implements Structure.ByValue {}
+        public signalLog() {}
+        public signalLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The user ID of the submitter
+         */
+        public int userId;
+
+        /**
+         * < The unique ID of the job
+         */
+        public int jobId;
+
+        /**
+         * < Signal symbol from the set: DELETEJOB | KILL | KILLREQUEUE |REQUEUE_DONE | REQUEUE_EXIT | REQUEUE_PEND |REQUEUE_PSUSP_ADMIN | REQUEUE_PSUSP_USER | SIG_CHKPNT | SIG_CHKPNT_COPY
+         */
+        public String signalSymbol;
+
+        /**
+         * < The number of running times
+         */
+        public int runCount;
+
+        /**
+         * < Job array index; must be 0 in JOB_NEW
+         */
+        public int idx;
+
+        /**
+         * < The name of the submitter
+         */
+        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
+    }
+
+
+
+    /**
+     * \brief logged when bqc command is invoked.
+     */
+    public static class queueCtrlLog extends Structure {
+        public static class ByReference extends queueCtrlLog implements Structure.ByReference {}
+        public static class ByValue extends queueCtrlLog implements Structure.ByValue {}
+        public queueCtrlLog() {}
+        public queueCtrlLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The queue control operation (see \ref lsb_queuecontrol)
+         */
+        public int opCode;
+
+        /**
+         * < The name of the queue
+         */
+        public byte[] queue = new byte[MAX_LSB_NAME_LEN];
+
+        /**
+         * < The user ID of the submitter
+         */
+        public int userId;
+
+        /**
+         * < The name of the submitter
+         */
+        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
+
+        /**
+         * < Queue control message
+         */
+        public byte[] message = new byte[LibLsf.MAXLINELEN];
+    }
+
+
+
+/*
+*  \brief  new debug log.
+ */
+
+    public static class newDebugLog extends Structure {
+        public static class ByReference extends newDebugLog implements Structure.ByReference {}
+        public static class ByValue extends newDebugLog implements Structure.ByValue {}
+        public newDebugLog() {}
+        public newDebugLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The queue control operation
+         */
+        public int opCode;
+
+        /**
+         * < Debug level
+         */
+        public int level;
+
+        /**
+         * < Class of log
+         */
+        public int _logclass;
+
+        /**
+         * < Log enabled, disabled
+         */
+        public int turnOff;
+
+        /**
+         * < Name of log file
+         */
+        public byte[] logFileName = new byte[LibLsf.MAXLSFNAMELEN];
+
+        /**
+         * < The user ID of the submitter
+         */
+        public int userId;
+    }
+
+
+
+    /**
+     * \brief log the host control information.
+     */
+    public static class hostCtrlLog extends Structure {
+        public static class ByReference extends hostCtrlLog implements Structure.ByReference {}
+        public static class ByValue extends hostCtrlLog implements Structure.ByValue {}
+        public hostCtrlLog() {}
+        public hostCtrlLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The host control operation (See  \ref lsb_hostcontrol)
+         */
+        public int opCode;
+
+        /**
+         * < The name of the host
+         */
+        public byte[] host = new byte[LibLsf.MAXHOSTNAMELEN];
+
+        /**
+         * < The user ID of the submitter
+         */
+        public int userId;
+
+        /**
+         * < The name of the submitter
+         */
+        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
+
+        /**
+         * < Host control message
+         */
+        public byte[] message = new byte[LibLsf.MAXLINELEN];
+    }
+
+
+
+    /**
+     * \brief logged when dynamic hosts are added to group.
+     */
+    public static class hgCtrlLog extends Structure {
+        public static class ByReference extends hgCtrlLog implements Structure.ByReference {}
+        public static class ByValue extends hgCtrlLog implements Structure.ByValue {}
+        public hgCtrlLog() {}
+        public hgCtrlLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The host control operation  (see \ref lsb_hostcontrol)
+         */
+        public int opCode;
+
+        /**
+         * < The name of the host
+         */
+        public byte[] host = new byte[LibLsf.MAXHOSTNAMELEN];
+
+        /**
+         * < The name of the host group
+         */
+        public byte[] grpname = new byte[LibLsf.MAXHOSTNAMELEN];
+
+        /**
+         * < The user ID of the submitter
+         */
+        public int userId;
+
+        /**
+         * < The name of the submitter
+         */
+        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
+
+        /**
+         * < Host group control message
+         */
+        public byte[] message = new byte[LibLsf.MAXLINELEN];
+    }
+
+
+
+
+/* simulator is ready to schedule jobs */
+    public static final int SIMU_STATUS_READYSCHEDULE = 0x01;
+
+    /**
+     * \brief  mbatchd start log.
+     */
+    public static class mbdStartLog extends Structure {
+        public static class ByReference extends mbdStartLog implements Structure.ByReference {}
+        public static class ByValue extends mbdStartLog implements Structure.ByValue {}
+        public mbdStartLog() {}
+        public mbdStartLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The master host name
+         */
+        public byte[] master = new byte[LibLsf.MAXHOSTNAMELEN];
+
+        /**
+         * < The cluster name
+         */
+        public byte[] cluster = new byte[LibLsf.MAXLSFNAMELEN];
+
+        /**
+         * < The number of hosts in the cluster
+         */
+        public int numHosts;
+
+        /**
+         * < The number of queues in the cluster
+         */
+        public int numQueues;
+/*
+    public int    simDiffTime;
+    public int    pendJobsThreshold;
+    public int    simStatus;
+*/
+    }
+
+
+
+    public static class mbdSimStatusLog extends Structure {
+        public static class ByReference extends mbdSimStatusLog implements Structure.ByReference {}
+        public static class ByValue extends mbdSimStatusLog implements Structure.ByValue {}
+        public mbdSimStatusLog() {}
+        public mbdSimStatusLog(Pointer p) { super(p); read(); }
+
+
+/* simulator status */
+        public int simStatus;
+    }
+
+
+
+    /**
+     * \brief  mbatchd die log.
+     */
+    public static class mbdDieLog extends Structure {
+        public static class ByReference extends mbdDieLog implements Structure.ByReference {}
+        public static class ByValue extends mbdDieLog implements Structure.ByValue {}
+        public mbdDieLog() {}
+        public mbdDieLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The master host name
+         */
+        public byte[] master = new byte[LibLsf.MAXHOSTNAMELEN];
+
+        /**
+         * < The number of finished jobs that have been removed from the system and logged in the current event file
+         */
+        public int numRemoveJobs;
+
+        /**
+         * < The exit code from the master batch daemon
+         */
+        public int exitCode;
+
+        /**
+         * < mbatchd administrator control message
+         */
+        public byte[] message = new byte[LibLsf.MAXLINELEN];
+    }
+
+
+
+    /**
+     * \brief logged before mbatchd dies.
+     */
+    public static class unfulfillLog extends Structure {
+        public static class ByReference extends unfulfillLog implements Structure.ByReference {}
+        public static class ByValue extends unfulfillLog implements Structure.ByValue {}
+        public unfulfillLog() {}
+        public unfulfillLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The job ID.
+         */
+        public int jobId;
+
+        /**
+         * < The mbatchd has switched the job to a new queue but the sbatchd has not been informed of the switch
+         */
+        public int notSwitched;
+
+        /**
+         * < This signal was not sent to the job
+         */
+        public int sig;
+
+        /**
+         * < The job was not signaled to checkpoint itself
+         */
+        public int sig1;
+
+        /**
+         * < Checkpoint flags. see the chkpntLog structure below.
+         */
+        public int sig1Flags;
+
+        /**
+         * < The new checkpoint period for the job
+         */
+        public NativeLong chkPeriod;
+
+        /**
+         * < Flag for bmod running job's parameters
+         */
+        public int notModified;
+
+        /**
+         * < Job array index
+         */
+        public int idx;
+
+        /**
+         * < Option flags for pending job signals
+         */
+        public int miscOpts4PendSig;
+    }
+
+
+
+    public static final int TERM_UNKNOWN = 0;
+    public static final int TERM_PREEMPT = 1;
+    public static final int TERM_WINDOW = 2;
+    public static final int TERM_LOAD = 3;
+    public static final int TERM_OTHER = 4;
+    public static final int TERM_RUNLIMIT = 5;
+    public static final int TERM_DEADLINE = 6;
+    public static final int TERM_PROCESSLIMIT = 7;
+    public static final int TERM_FORCE_OWNER = 8;
+    public static final int TERM_FORCE_ADMIN = 9;
+    public static final int TERM_REQUEUE_OWNER = 10;
+    public static final int TERM_REQUEUE_ADMIN = 11;
+    public static final int TERM_CPULIMIT = 12;
+    public static final int TERM_CHKPNT = 13;
+    public static final int TERM_OWNER = 14;
+    public static final int TERM_ADMIN = 15;
+    public static final int TERM_MEMLIMIT = 16;
+    public static final int TERM_EXTERNAL_SIGNAL = 17;
+    public static final int TERM_RMS = 18;
+    public static final int TERM_ZOMBIE = 19;
+    public static final int TERM_SWAP = 20;
+    public static final int TERM_THREADLIMIT = 21;
+    public static final int TERM_SLURM = 22;
+    public static final int TERM_BUCKET_KILL = 23;
+    public static final int TERM_CTRL_PID = 24;
+    public static final int TERM_CWD_NOTEXIST = 25;
+
+    /**
+     * \brief logged in lsb.acct when a job finished.
+     */
+    public static class jobFinishLog extends Structure {
+        public static class ByReference extends jobFinishLog implements Structure.ByReference {}
+        public static class ByValue extends jobFinishLog implements Structure.ByValue {}
+        public jobFinishLog() {}
+        public jobFinishLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The unique ID for the job
+         */
+        public int jobId;
+
+        /**
+         * < The user ID of the submitter
+         */
+        public int userId;
+
+        /**
+         * < The user name of the submitter
+         */
+        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
+
+        /**
+         * < Job submission options (see  \ref lsb_submit)
+         */
+        public int options;
+
+        /**
+         * < The number of processors requested for execution
+         */
+        public int numProcessors;
+
+        /**
+         * < The status of the job (See \ref lsb_readjobinfo)
+         */
+        public int jStatus;
+
+        /**
+         * < Job submission time
+         */
+        public NativeLong submitTime;
+
+        /**
+         * < The job started at or after this time
+         */
+        public NativeLong beginTime;
+
+        /**
+         * < If the job was not finished by this time, it was killed
+         */
+        public NativeLong termTime;
+
+        /**
+         * < Job dispatch time
+         */
+        public NativeLong startTime;
+
+        /**
+         * < The time the job finished
+         */
+        public NativeLong endTime;
+
+        /**
+         * < The name of the queue to which this job was submitted
+         */
+        public byte[] queue = new byte[MAX_LSB_NAME_LEN];
+
+        /**
+         * < Resource requirements
+         */
+        public String resReq;
+
+        /**
+         * < Submission host name
+         */
+        public byte[] fromHost = new byte[LibLsf.MAXHOSTNAMELEN];
+
+        /**
+         * < Current working directory
+         */
+        public String cwd;
+
+        /**
+         * < Input file name
+         */
+        public String inFile;
+
+        /**
+         * < Output file name
+         */
+        public String outFile;
+
+        /**
+         * < Error output file name
+         */
+        public String errFile;
+
+        /**
+         * < Job spool input file
+         */
+        public String inFileSpool;
+
+        /**
+         * < Job spool command file
+         */
+        public String commandSpool;
+
+        /**
+         * < Job file name
+         */
+        public String jobFile;
+
+        /**
+         * < The number of hosts considered for dispatching this job
+         */
+        public int numAskedHosts;
+
+        /**
+         * < The array of names of hosts considered for dispatching this job
+         */
+        public Pointer askedHosts;
+
+        /**
+         * < The CPU factor of the first execution host
+         */
+        public float hostFactor;
+
+        /**
+         * < The number of processors used for execution
+         */
+        public int numExHosts;
+
+        /**
+         * < The array of names of execution hosts
+         */
+        public Pointer execHosts;
+
+        /**
+         * < The total CPU time consumed by the job
+         */
+        public float cpuTime;
+
+        /**
+         * < Job name
+         */
+        public String jobName;
+
+        /**
+         * < Job command
+         */
+        public String command;
+
+        /**
+         * < Resource usage statistics.The lsfRusage structure is defined in <lsf/lsf.h>. Note that the availability of certain fields depends on the platform on which the sbatchd runs. The fields that do not make sense on this platform will be logged as -1.0.
+         */
+        public LibLsf.lsfRusage lsfRusage;
+
+        /**
+         * < The job dependency condition
+         */
+        public String dependCond;
+
+        /**
+         * < Time event string
+         */
+        public String timeEvent;
+
+        /**
+         * < The pre-execution command
+         */
+        public String preExecCmd;
+
+        /**
+         * < Name of the user to whom job related mail was sent
+         */
+        public String mailUser;
+
+        /**
+         * < The project name, used for accounting purposes.
+         */
+        public String projectName;
+
+        /**
+         * < Job's exit status
+         */
+        public int exitStatus;
+
+        /**
+         * < Maximum number of processors specified for the job
+         */
+        public int maxNumProcessors;
+
+        /**
+         * < Login shell specified by user
+         */
+        public String loginShell;
+
+        /**
+         * < Job array index
+         */
+        public int idx;
+
+        /**
+         * < Maximum memory used by job
+         */
+        public int maxRMem;
+
+        /**
+         * < Maximum swap used by job
+         */
+        public int maxRSwap;
+
+        /**
+         * < Advanced reservation ID
+         */
+        public String rsvId;
+
+        /**
+         * < Service class of the job
+         */
+        public String sla;
+
+        /**
+         * < Job exception handling mask
+         */
+        public int exceptMask;
+
+        /**
+         * < Placement information of LSF HPC jobs
+         */
+        public String additionalInfo;
+
+        /**
+         * < Job termination reason, see <lsf/lsbatch.h>
+         */
+        public int exitInfo;
+
+        /**
+         * < Job warning time period in seconds; -1 if unspecified
+         */
+        public int warningTimePeriod;
+
+        /**
+         * < Warning action, SIGNAL | CHKPNT | command, null if unspecified
+         */
+        public String warningAction;
+
+        /**
+         * < SAAP charged for job
+         */
+        public String chargedSAAP;
+
+        /**
+         * < LSF License Scheduler project name
+         */
+        public String licenseProject;
+
+        /**
+         * < Application profile under which the job runs.
+         */
+        public String app;
+
+        /**
+         * < Post-execution commands.
+         */
+        public String postExecCmd;
+
+        /**
+         * < Runtime estimate specified.
+         */
+        public int runtimeEstimation;
+
+        /**
+         * < Job group name
+         */
+        public String jgroup;
+
+        /**
+         * < Option2
+         */
+        public int options2;
+
+        /**
+         * < Job requeue exit values
+         */
+        public String requeueEValues;
+
+        /**
+         * < Resize notify command
+         */
+        public String notifyCmd;
+
+        /**
+         * < Last resize start time
+         */
+        public NativeLong lastResizeTime;
+
+        /**
+         * < Job description.
+         */
+        public String jobDescription;
+
+        /**
+         * < For new options in future
+         */
+        public submit_ext.ByReference submitExt;
+    }
+
+
+
+
+    /**
+     * \brief  load index log.
+     */
+
+    public static class loadIndexLog extends Structure {
+        public static class ByReference extends loadIndexLog implements Structure.ByReference {}
+        public static class ByValue extends loadIndexLog implements Structure.ByValue {}
+        public loadIndexLog() {}
+        public loadIndexLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The number of load indices
+         */
+        public int nIdx;
+
+        /**
+         * < The array of load index names
+         */
+        public Pointer name;
+    }
+
+
+
+    /**
+     * \brief  calendar log.
+     */
+    public static class calendarLog extends Structure {
+        public static class ByReference extends calendarLog implements Structure.ByReference {}
+        public static class ByValue extends calendarLog implements Structure.ByValue {}
+        public calendarLog() {}
+        public calendarLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Reserved for future use
+         */
+        public int options;
+
+        /**
+         * < The user ID of the submitter
+         */
+        public int userId;
+
+        /**
+         * < The name of the calendar
+         */
+        public String name;
+
+        /**
+         * < Description
+         */
+        public String desc;
+
+        /**
+         * < Calendar expression
+         */
+        public String calExpr;
+    }
+
+
+
+    /**
+     * \brief  job forward log.
+     */
+    public static class jobForwardLog extends Structure {
+        public static class ByReference extends jobForwardLog implements Structure.ByReference {}
+        public static class ByValue extends jobForwardLog implements Structure.ByValue {}
+        public jobForwardLog() {}
+        public jobForwardLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The unique ID of the job
+         */
+        public int jobId;
+
+        /**
+         * < The cluster name
+         */
+        public String cluster;
+
+        /**
+         * < Number of Reserved Hosts
+         */
+        public int numReserHosts;
+
+        /**
+         * < Reserved Host Names
+         */
+        public Pointer reserHosts;
+
+        /**
+         * < Job array index; must be 0 in JOB_NEW
+         */
+        public int idx;
+
+        /**
+         * < Remote job attributes from: \n JOB_FORWARD Remote batch job on submission side \n JOB_LEASE Lease job on submission side \n JOB_REMOTE_BATCH Remote batch job on execution side \n JOB_REMOTE_LEASE Lease job on execution side \n JOB_LEASE_RESYNC Lease job resync during restart \n JOB_REMOTE_RERUNNABLE Remote batch job rerunnable on execution cluster
+         */
+        public int jobRmtAttr;
+    }
+
+
+
+    /**
+     * \brief  job accept log.
+     */
+    public static class jobAcceptLog extends Structure {
+        public static class ByReference extends jobAcceptLog implements Structure.ByReference {}
+        public static class ByValue extends jobAcceptLog implements Structure.ByValue {}
+        public jobAcceptLog() {}
+        public jobAcceptLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The unique ID of the job
+         */
+        public int jobId;
+
+        /**
+         * < The unique ID of the remote job
+         */
+        public long remoteJid;
+
+        /**
+         * < The cluster name
+         */
+        public String cluster;
+
+        /**
+         * < Job array index; must be 0 in JOB_NEW
+         */
+        public int idx;
+
+        /**
+         * < Remote job attributes from: \n JOB_FORWARD Remote batch job on submission side \n JOB_LEASE Lease job on submission side \n JOB_REMOTE_BATCH Remote batch job on execution side \n JOB_REMOTE_LEASE Lease job on execution side \n JOB_LEASE_RESYNC Lease job resync during restart \n JOB_REMOTE_RERUNNABLE Remote batch job rerunnable on execution cluster
+         */
+        public int jobRmtAttr;
+    }
+
+
+
+    /**
+     * \brief  status Ack log.
+     */
+    public static class statusAckLog extends Structure {
+        public static class ByReference extends statusAckLog implements Structure.ByReference {}
+        public static class ByValue extends statusAckLog implements Structure.ByValue {}
+        public statusAckLog() {}
+        public statusAckLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The unique ID of the job
+         */
+        public int jobId;
+
+        /**
+         * < Line number of Status
+         */
+        public int statusNum;
+
+        /**
+         * < Job array index; must be 0 in JOB_NEW
+         */
+        public int idx;
+    }
+
+
+
+    /**
+     * \brief  job message log.
+     */
+    public static class jobMsgLog extends Structure {
+        public static class ByReference extends jobMsgLog implements Structure.ByReference {}
+        public static class ByValue extends jobMsgLog implements Structure.ByValue {}
+        public jobMsgLog() {}
+        public jobMsgLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The user ID of the submitter
+         */
+        public int usrId;
+
+        /**
+         * < The unique ID for the job
+         */
+        public int jobId;
+
+        /**
+         * < Message index
+         */
+        public int msgId;
+
+        /**
+         * < Message type
+         */
+        public int type;
+
+        /**
+         * < Message source
+         */
+        public String src;
+
+        /**
+         * < Message destination
+         */
+        public String dest;
+
+        /**
+         * < Message
+         */
+        public String msg;
+
+        /**
+         * < Job array index; must be 0 in JOB_NEW
+         */
+        public int idx;
+    }
+
+
+
+    /**
+     * \brief  job message ack log.
+     */
+    public static class jobMsgAckLog extends Structure {
+        public static class ByReference extends jobMsgAckLog implements Structure.ByReference {}
+        public static class ByValue extends jobMsgAckLog implements Structure.ByValue {}
+        public jobMsgAckLog() {}
+        public jobMsgAckLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The user ID of the submitter
+         */
+        public int usrId;
+
+        /**
+         * < The unique ID for the job
+         */
+        public int jobId;
+
+        /**
+         * < Message index
+         */
+        public int msgId;
+
+        /**
+         * < Message type
+         */
+        public int type;
+
+        /**
+         * < Message source
+         */
+        public String src;
+
+        /**
+         * < Message destination
+         */
+        public String dest;
+
+        /**
+         * < Message
+         */
+        public String msg;
+
+        /**
+         * < Job array index; must be 0 in JOB_NEW
+         */
+        public int idx;
+    }
+
+
+
+    /**
+     * \brief  job occupy request log. jobOccupyReqLog is for future use.
+     */
+    public static class jobOccupyReqLog extends Structure {
+        public static class ByReference extends jobOccupyReqLog implements Structure.ByReference {}
+        public static class ByValue extends jobOccupyReqLog implements Structure.ByValue {}
+        public jobOccupyReqLog() {}
+        public jobOccupyReqLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The user ID of the submitter
+         */
+        public int userId;
+
+        /**
+         * < The unique ID for the job
+         */
+        public int jobId;
+
+        /**
+         * < Number of Jobs Slots desired
+         */
+        public int numOccupyRequests;
+
+        /**
+         * < List of slots occupied
+         */
+        public Pointer occupyReqList;
+
+        /**
+         * < Job array index; must be 0 in JOB_NEW
+         */
+        public int idx;
+
+        /**
+         * < The name of the submitter
+         */
+        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
+    }
+
+
+
+    /**
+     * \brief  job vacate log.jobVacatedLog is for future use.
+     */
+    public static class jobVacatedLog extends Structure {
+        public static class ByReference extends jobVacatedLog implements Structure.ByReference {}
+        public static class ByValue extends jobVacatedLog implements Structure.ByValue {}
+        public jobVacatedLog() {}
+        public jobVacatedLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The user ID of the submitter
+         */
+        public int userId;
+
+        /**
+         * < The unique ID for the job
+         */
+        public int jobId;
+
+        /**
+         * < Job array index; must be 0 in JOB_NEW
+         */
+        public int idx;
+
+        /**
+         * < The name of the submitter
+         */
+        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
+    }
+
+
+
+    /**
+     * \brief  job force request log.
+     */
+    public static class jobForceRequestLog extends Structure {
+        public static class ByReference extends jobForceRequestLog implements Structure.ByReference {}
+        public static class ByValue extends jobForceRequestLog implements Structure.ByValue {}
+        public jobForceRequestLog() {}
+        public jobForceRequestLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The user ID of the submitter
+         */
+        public int userId;
+
+        /**
+         * < >1 for local/lease jobs; 0 for remote batch model
+         */
+        public int numExecHosts;
+
+        /**
+         * < The array of execution host names
+         */
+        public Pointer execHosts;
+
+        /**
+         * < The unique ID for the job
+         */
+        public int jobId;
+
+        /**
+         * < Job array index; must be 0 in JOB_NEW
+         */
+        public int idx;
+
+        /**
+         * < Job run options (RUNJOB_OPT_NOSTOP | JFLAG_URGENT_NOSTOP |JFLAG_URGENT)
+         */
+        public int options;
+
+        /**
+         * < The name of the submitter
+         */
+        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
+
+        /**
+         * < The name of the queue to which this job was submitted
+         */
+        public String queue;
+    }
+
+
+
+    /**
+     * \brief  job chunck log.
+     */
+    public static class jobChunkLog extends Structure {
+        public static class ByReference extends jobChunkLog implements Structure.ByReference {}
+        public static class ByValue extends jobChunkLog implements Structure.ByValue {}
+        public jobChunkLog() {}
+        public jobChunkLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Size of array membJobId
+         */
+        public NativeLong membSize;
+
+        /**
+         * < Job ids of jobs in the chunk
+         */
+        public LongByReference membJobId;
+
+        /**
+         * < The number of processors used for execution
+         */
+        public NativeLong numExHosts;
+
+        /**
+         * < The array of names of execution hosts
+         */
+        public Pointer execHosts;
+    }
+
+
+
+    /**
+     * \brief  job external message log.
+     */
+    public static class jobExternalMsgLog extends Structure {
+        public static class ByReference extends jobExternalMsgLog implements Structure.ByReference {}
+        public static class ByValue extends jobExternalMsgLog implements Structure.ByValue {}
+        public jobExternalMsgLog() {}
+        public jobExternalMsgLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The unique ID for the job
+         */
+        public int jobId;
+
+        /**
+         * < Job array index; must be 0 in JOB_NEW
+         */
+        public int idx;
+
+        /**
+         * < The message index
+         */
+        public int msgIdx;
+
+        /**
+         * < Message description
+         */
+        public String desc;
+
+        /**
+         * < The user ID of the submitter
+         */
+        public int userId;
+
+        /**
+         * < Size of the message
+         */
+        public NativeLong dataSize;
+
+        /**
+         * < The time the author posted the message.
+         */
+        public NativeLong postTime;
+
+        /**
+         * < The status of the message
+         */
+        public int dataStatus;
+
+        /**
+         * < Name of attached data file. If no file is attached, use null.
+         */
+        public String fileName;
+
+        /**
+         * < The author of the message
+         */
+        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
+    }
+
+
+
+    /**
+     * \brief  reservation request.
+     */
+    public static class rsvRes extends Structure {
+        public static class ByReference extends rsvRes implements Structure.ByReference {}
+        public static class ByValue extends rsvRes implements Structure.ByValue {}
+        public rsvRes() {}
+        public rsvRes(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Name of the resource (currently: host)
+         */
+        public String resName;
+
+        /**
+         * < Reserved counter (currently: cpu number)
+         */
+        public int count;
+
+        /**
+         * < Used of the reserved counter (not used)
+         */
+        public int usedAmt;
+    }
+
+
+
+    /**
+     * \brief for advanced reservation.
+     */
+    public static class rsvFinishLog extends Structure {
+        public static class ByReference extends rsvFinishLog implements Structure.ByReference {}
+        public static class ByValue extends rsvFinishLog implements Structure.ByValue {}
+        public rsvFinishLog() {}
+        public rsvFinishLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Time when the reservation is required
+         */
+        public NativeLong rsvReqTime;
+
+        /**
+         * < Same as the options field in the addRsvRequest(lsbatch.h)
+         */
+        public int options;
+
+        /**
+         * < The user who creat the reservation
+         */
+        public int uid;
+
+        /**
+         * < Reservation ID
+         */
+        public String rsvId;
+
+        /**
+         * < Client of the reservation
+         */
+        public String name;
+
+        /**
+         * < Number of resources reserved
+         */
+        public int numReses;
+
+        /**
+         * < Allocation vector
+         */
+        public Pointer /* rsvRes.ByReference */ alloc;
+
+        /**
+         * < Time window within which the reservation is active \n Two forms: int1-int2 or [day1]:hour1:0-[day2]:hour2:0
+         */
+        public String timeWindow;
+
+        /**
+         * < Duration in seconds. duration = to - from : when the reservation expired
+         */
+        public NativeLong duration;
+
+        /**
+         * < Creator of the reservation
+         */
+        public String creator;
+    }
+
+
+
+    /**
+     * \brief  CPU Profile Log
+     */
+    public static class cpuProfileLog extends Structure {
+        public static class ByReference extends cpuProfileLog implements Structure.ByReference {}
+        public static class ByValue extends cpuProfileLog implements Structure.ByValue {}
+        public cpuProfileLog() {}
+        public cpuProfileLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Queue name
+         */
+        public byte[] servicePartition = new byte[MAX_LSB_NAME_LEN];
+
+        /**
+         * < The number of CPU required
+         */
+        public int slotsRequired;
+
+        /**
+         * < The number of CPU actually allocated
+         */
+        public int slotsAllocated;
+
+        /**
+         * < The number of CPU borrowed
+         */
+        public int slotsBorrowed;
+
+        /**
+         * < The number of CPU lent
+         */
+        public int slotsLent;
+        /** note:  the number of CPU reserved = slotsAllocated - slotsBorrowed + slotsLent */
+    }
+
+
+
+    /**
+     * \brief  job resize start notify log.
+     */
+    public static class jobResizeNotifyStartLog extends Structure {
+        public static class ByReference extends jobResizeNotifyStartLog implements Structure.ByReference {}
+        public static class ByValue extends jobResizeNotifyStartLog implements Structure.ByValue {}
+        public jobResizeNotifyStartLog() {}
+        public jobResizeNotifyStartLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * <  JobId
+         */
+        public int jobId;
+
+        /**
+         * <  Index
+         */
+        public int idx;
+
+        /**
+         * <  Notify Id
+         */
+        public int notifyId;
+
+        /**
+         * <  Number of resized hosts.
+         */
+        public int numResizeHosts;
+
+        /**
+         * <  Resize Hosts
+         */
+        public Pointer resizeHosts;
+    }
+
+
+
+    /**
+     * \brief  job resize accept notify log.
+     */
+    public static class jobResizeNotifyAcceptLog extends Structure {
+        public static class ByReference extends jobResizeNotifyAcceptLog implements Structure.ByReference {}
+        public static class ByValue extends jobResizeNotifyAcceptLog implements Structure.ByValue {}
+        public jobResizeNotifyAcceptLog() {}
+        public jobResizeNotifyAcceptLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * <  JobId
+         */
+        public int jobId;
+
+        /**
+         * <  Index
+         */
+        public int idx;
+
+        /**
+         * <  Notify Id
+         */
+        public int notifyId;
+
+        /**
+         * <  Resize Notify command pid
+         */
+        public int resizeNotifyCmdPid;
+
+        /**
+         * <  Resize Notify command pgid
+         */
+        public int resizeNotifyCmdPGid;
+
+        /**
+         * <  Status
+         */
+        public int status;
+    }
+
+
+
+    /**
+     * \brief  job resize done notify log.
+     */
+    public static class jobResizeNotifyDoneLog extends Structure {
+        public static class ByReference extends jobResizeNotifyDoneLog implements Structure.ByReference {}
+        public static class ByValue extends jobResizeNotifyDoneLog implements Structure.ByValue {}
+        public jobResizeNotifyDoneLog() {}
+        public jobResizeNotifyDoneLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * <  JobId
+         */
+        public int jobId;
+
+        /**
+         * <  Index
+         */
+        public int idx;
+
+        /**
+         * <  Notify Id
+         */
+        public int notifyId;
+
+        /**
+         * <  Status
+         */
+        public int status;
+    }
+
+
+
+    /**
+     * \brief  job resize release log.
+     */
+    public static class jobResizeReleaseLog extends Structure {
+        public static class ByReference extends jobResizeReleaseLog implements Structure.ByReference {}
+        public static class ByValue extends jobResizeReleaseLog implements Structure.ByValue {}
+        public jobResizeReleaseLog() {}
+        public jobResizeReleaseLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * <  JobId
+         */
+        public int jobId;
+
+        /**
+         * <  Index
+         */
+        public int idx;
+
+        /**
+         * <  Request Id
+         */
+        public int reqId;
+
+        /**
+         * <  Options
+         */
+        public int options;
+
+        /**
+         * <  User Id
+         */
+        public int userId;
+
+        /**
+         * <  User Name
+         */
+        public String userName;
+
+        /**
+         * <  Resize Notify command
+         */
+        public String resizeNotifyCmd;
+
+        /**
+         * <  Number of resized hosts
+         */
+        public int numResizeHosts;
+
+        /**
+         * <  Resized hosts
+         */
+        public Pointer resizeHosts;
+    }
+
+
+
+    /**
+     * \brief  job resize cancel log.
+     */
+    public static class jobResizeCancelLog extends Structure {
+        public static class ByReference extends jobResizeCancelLog implements Structure.ByReference {}
+        public static class ByValue extends jobResizeCancelLog implements Structure.ByValue {}
+        public jobResizeCancelLog() {}
+        public jobResizeCancelLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * <  JobId
+         */
+        public int jobId;
+
+        /**
+         * <  Index
+         */
+        public int idx;
+
+        /**
+         * <  User Id
+         */
+        public int userId;
+
+        /**
+         * <  User name
+         */
+        public String userName;
+    }
+
+
+
+    /**
+     * \brief log the running rusage of a job in the lsb.stream file
+     */
+    public static class jobRunRusageLog extends Structure {
+        public static class ByReference extends jobRunRusageLog implements Structure.ByReference {}
+        public static class ByValue extends jobRunRusageLog implements Structure.ByValue {}
+        public jobRunRusageLog() {}
+        public jobRunRusageLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The unique ID of the job
+         */
+        public int jobid;
+
+        /**
+         * < Job array index; must be 0 in JOB_NEW
+         */
+        public int idx;
+
+        /**
+         * < jrusage
+         */
+        public LibLsf.jRusage jrusage;
+    }
+
+
+
+    /**
+     * \brief  SLA event log.
+     */
+    public static class slaLog extends Structure {
+        public static class ByReference extends slaLog implements Structure.ByReference {}
+        public static class ByValue extends slaLog implements Structure.ByValue {}
+        public slaLog() {}
+        public slaLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Service class name
+         */
+        public String name;
+
+        /**
+         * < Consumer name associated with the service class
+         */
+        public String consumer;
+
+        /**
+         * < Objectives
+         */
+        public int goaltype;
+
+        /**
+         * < The service class state (ontime, delayed)
+         */
+        public int state;
+
+        /**
+         * < Optimum number of job slots (or concurrently running jobs) needed for the  service class to meet its service-level goals
+         */
+        public int optimum;
+
+        /**
+         * < Job counters for the service class
+         */
+        public int[] counters = new int[NUM_JGRP_COUNTERS];
+    }
+
+
+
+    /**
+     * \brief  a wrap of structure perfmonLog for performance metrics project
+     */
+    public static class perfmonLogInfo extends Structure {
+        public static class ByReference extends perfmonLogInfo implements Structure.ByReference {}
+        public static class ByValue extends perfmonLogInfo implements Structure.ByValue {}
+        public perfmonLogInfo() {}
+        public perfmonLogInfo(Pointer p) { super(p); read(); }
+
+
+        /**
+         * <  Sample period
+         */
+        public int samplePeriod;
+
+        /**
+         * <  Metrics
+         */
+        public IntByReference metrics;
+
+        /**
+         * <  Start time
+         */
+        public NativeLong startTime;
+
+        /**
+         * <  Log time
+         */
+        public NativeLong logTime;
+    }
+
+
+
+    /**
+     * \brief performance metrics log in lsb.stream
+     */
+    public static class perfmonLog extends Structure {
+        public static class ByReference extends perfmonLog implements Structure.ByReference {}
+        public static class ByValue extends perfmonLog implements Structure.ByValue {}
+        public perfmonLog() {}
+        public perfmonLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Sample rate
+         */
+        public int samplePeriod;
+
+        /**
+         * < Number of Queries
+         */
+        public int totalQueries;
+
+        /**
+         * < Number of Job Query
+         */
+        public int jobQuries;
+
+        /**
+         * < Number of Queue Query
+         */
+        public int queueQuries;
+
+        /**
+         * < Number of Host Query
+         */
+        public int hostQuries;
+
+        /**
+         * < Number of Submission Requests
+         */
+        public int submissionRequest;
+
+        /**
+         * < Number of Jobs Submitted
+         */
+        public int jobSubmitted;
+
+        /**
+         * < Number of Dispatched Jobs
+         */
+        public int dispatchedjobs;
+
+        /**
+         * < Number of Job Completed
+         */
+        public int jobcompleted;
+
+        /**
+         * < Number of MultiCluster Jobs Sent
+         */
+        public int jobMCSend;
+
+        /**
+         * < Number of MultiCluster Jobs Received
+         */
+        public int jobMCReceive;
+
+        /**
+         * < Start Time
+         */
+        public NativeLong startTime;
+    }
+
+
+
+    /**
+     * \brief task finish log.Task accounting record in ssched.acct
+     */
+    public static class taskFinishLog extends Structure {
+        public static class ByReference extends taskFinishLog implements Structure.ByReference {}
+        public static class ByValue extends taskFinishLog implements Structure.ByValue {}
+        public taskFinishLog() {}
+        public taskFinishLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * <  Job finish event
+         */
+        public jobFinishLog jobFinishLog;
+
+        /**
+         * < Task ID
+         */
+        public int taskId;
+
+        /**
+         * < Task index
+         */
+        public int taskIdx;
+
+        /**
+         * < Name of task
+         */
+        public String taskName;
+
+        /**
+         * < Bit mask of task options: \n TASK_IN_FILE (0x01)-specify input file \n TASK_OUT_FILE (0x02)-specify output file \n TASK_ERR_FILE (0x04)-specify error file \n TASK_PRE_EXEC (0x08)-specify pre-exec command \n TASK_POST_EXEC (0x10)-specify post-exec command \n TASK_NAME (0x20)-specify task name
+         */
+        public int taskOptions;
+
+        /**
+         * < Task Exit Reason \n TASK_EXIT_NORMAL = 0- normal exit \n TASK_EXIT_INIT = 1-generic task initialization failure \n TASK_EXIT_PATH = 2-failed to initialize path \n TASK_EXIT_NO_FILE = 3-failed to create task file \n TASK_EXIT_PRE_EXEC = 4- task pre-exec failed \n TASK_EXIT_NO_PROCESS = 5-fork failed \n TASK_EXIT_XDR = 6-xdr communication error \n TASK_EXIT_NOMEM = 7- no memory \n TASK_EXIT_SYS = 8-system call failed \n TASK_EXIT_TSCHILD_EXEC = 9-failed to run sschild \n TASK_ [...]
+         */
+        public int taskExitReason;
+    }
+
+
+
+    /**
+     * \brief End of stream event. The stream is moved to lsb.stream.0 and
+     * a new lsb.stream is opened. Readers of lsb.stream when encounter
+     * the event EVENT_END_OF_STREAM should close and reopen the
+     * lsb.stream file.
+     */
+    public static class eventEOSLog extends Structure {
+        public static class ByReference extends eventEOSLog implements Structure.ByReference {}
+        public static class ByValue extends eventEOSLog implements Structure.ByValue {}
+        public eventEOSLog() {}
+        public eventEOSLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Event end of stream
+         */
+        public int eos;
+    }
+
+
+
+    /**
+     * \brief job resize event: indicating a realized job allocation change
+     */
+    public static class jobResizeLog extends Structure {
+        public static class ByReference extends jobResizeLog implements Structure.ByReference {}
+        public static class ByValue extends jobResizeLog implements Structure.ByValue {}
+        public jobResizeLog() {}
+        public jobResizeLog(Pointer p) { super(p); read(); }
+
+
+        /**
+         * <  JobId
+         */
+        public int jobId;
+
+        /**
+         * <  Index
+         */
+        public int idx;
+
+        /**
+         * <  Start time
+         */
+        public NativeLong startTime;
+
+        /**
+         * <  User Id
+         */
+        public int userId;
+
+        /**
+         * <  User name
+         */
+        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
+
+        /**
+         * < 0 grow, 1 shrink
+         */
+        public int resizeType;
+
+        /**
+         * < The start time of last allocation
+         */
+        public NativeLong lastResizeStartTime;
+
+        /**
+         * < The finish time of last allocation
+         */
+        public NativeLong lastResizeFinishTime;
+
+        /**
+         * < Allocation before the resize
+         */
+        public int numExecHosts;
+
+        /**
+         * <  Execute hosts
+         */
+        public Pointer execHosts;
+
+        /**
+         * < The delta of the allocation change
+         */
+        public int numResizeHosts;
+
+        /**
+         * <  Resize hosts
+         */
+        public Pointer resizeHosts;
+    }
+
+
+
+    /**
+     * \brief  Log event types.
+     */
+    public static class eventLog extends Union {
+        /**
+         * <  Job new event
+         */
+        public jobNewLog jobNewLog;
+
+        /**
+         * <  Job start event
+         */
+        public jobStartLog jobStartLog;
+
+        /**
+         * <  Job status event
+         */
+        public jobStatusLog jobStatusLog;
+
+        /**
+         * <  sbatchd job status event
+         */
+        public sbdJobStatusLog sbdJobStatusLog;
+
+        /**
+         * <  Job switch event
+         */
+        public jobSwitchLog jobSwitchLog;
+
+        /**
+         * <  Job move event
+         */
+        public jobMoveLog jobMoveLog;
+
+        /**
+         * <  Queue control event
+         */
+        public queueCtrlLog queueCtrlLog;
+
+/* New debug event*/
+        public newDebugLog newDebugLog;
+
+        /**
+         * <  Host control event
+         */
+        public hostCtrlLog hostCtrlLog;
+
+        /**
+         * <  mbatchd start event
+         */
+        public mbdStartLog mbdStartLog;
+
+        /**
+         * <  mbatchd die event
+         */
+        public mbdDieLog mbdDieLog;
+
+        /**
+         * <  Unfulfill event
+         */
+        public unfulfillLog unfulfillLog;
+
+        /**
+         * <  Job finish event
+         */
+        public jobFinishLog jobFinishLog;
+
+        /**
+         * <  Load index event
+         */
+        public loadIndexLog loadIndexLog;
+
+        /**
+         * <  Migration initiated event
+         */
+        public migLog migLog;
+
+        /**
+         * <  Calendar event
+         */
+        public calendarLog calendarLog;
+
+        /**
+         * <  Job forward event
+         */
+        public jobForwardLog jobForwardLog;
+
+        /**
+         * <  Job accept event
+         */
+        public jobAcceptLog jobAcceptLog;
+
+        /**
+         * <  Job accepted from another  cluster event
+         */
+        public statusAckLog statusAckLog;
+
+        /**
+         * <  Job signal event
+         */
+        public signalLog signalLog;
+
+        /**
+         * <  Job execution event
+         */
+        public jobExecuteLog jobExecuteLog;
+
+        /**
+         * <  Job message event
+         */
+        public jobMsgLog jobMsgLog;
+
+        /**
+         * <  Job message ackknowledge event
+         */
+        public jobMsgAckLog jobMsgAckLog;
+
+        /**
+         * <  Job requeue event
+         */
+        public jobRequeueLog jobRequeueLog;
+
+        /**
+         * <  Checkpoint event
+         */
+        public chkpntLog chkpntLog;
+
+        /**
+         * <  Signal with action event
+         */
+        public sigactLog sigactLog;
+
+        /**
+         * <  Job occupy request event
+         */
+        public jobOccupyReqLog jobOccupyReqLog;
+
+        /**
+         * <  Job vacate event
+         */
+        public jobVacatedLog jobVacatedLog;
+
+        /**
+         * <  Job start accept event
+         */
+        public jobStartAcceptLog jobStartAcceptLog;
+
+        /**
+         * <  Job clean event
+         */
+        public jobCleanLog jobCleanLog;
+
+        /**
+         * <  Job exception event
+         */
+        public jobExceptionLog jobExceptionLog;
+
+        /**
+         * <  Job group new event
+         */
+        public jgrpNewLog jgrpNewLog;
+
+        /**
+         * <  Job group Ctrl event
+         */
+        public jgrpCtrlLog jgrpCtrlLog;
+
+        /**
+         * <  Job Force Request event
+         */
+        public jobForceRequestLog jobForceRequestLog;
+
+        /**
+         * <  Event switch event
+         */
+        public logSwitchLog logSwitchLog;
+
+        /**
+         * <  Job modify event
+         */
+        public jobModLog jobModLog;
+
+        /**
+         * <  Job group stratus event
+         */
+        public jgrpStatusLog jgrpStatusLog;
+
+        /**
+         * <  Job attribute setting event
+         */
+        public jobAttrSetLog jobAttrSetLog;
+
+        /**
+         * <  Job external message event
+         */
+        public jobExternalMsgLog jobExternalMsgLog;
+
+        /**
+         * <  Job chunk event
+         */
+        public jobChunkLog jobChunkLog;
+
+        /**
+         * < sbatchd  unreported status event
+         */
+        public sbdUnreportedStatusLog sbdUnreportedStatusLog;
+
+        /**
+         * <  Reservation finish event
+         */
+        public rsvFinishLog rsvFinishLog;
+
+        /**
+         * <  Host group control Log
+         */
+        public hgCtrlLog hgCtrlLog;
+
+        /**
+         * <  cpu profile event
+         */
+        public cpuProfileLog cpuProfileLog;
+
+        /**
+         * <  Data logging event
+         */
+        public dataLoggingLog dataLoggingLog;
+
+        /**
+         * <  Job run rusage event
+         */
+        public jobRunRusageLog jobRunRusageLog;
+
+        /**
+         * <  Event EOS event
+         */
+        public eventEOSLog eventEOSLog;
+
+        /**
+         * <  SLA event
+         */
+        public slaLog slaLog;
+
+        /**
+         * <  Performance event
+         */
+        public perfmonLog perfmonLog;
+
+        /**
+         * <  Task finish event
+         */
+        public taskFinishLog taskFinishLog;
+
+        /**
+         * <  Job resize notify start event
+         */
+        public jobResizeNotifyStartLog jobResizeNotifyStartLog;
+
+        /**
+         * <  Job resize notify accept event
+         */
+        public jobResizeNotifyAcceptLog jobResizeNotifyAcceptLog;
+
+        /**
+         * <  Job resize notify done event
+         */
+        public jobResizeNotifyDoneLog jobResizeNotifyDoneLog;
+
+        /**
+         * <  Job resize release event
+         */
+        public jobResizeReleaseLog jobResizeReleaseLog;
+
+        /**
+         * <  Job resize cancel event
+         */
+        public jobResizeCancelLog jobResizeCancelLog;
+
+        /**
+         * <  Job resize event
+         */
+        public jobResizeLog jobResizeLog;
+
+/*#if defined(LSF_SIMULATOR)*/
+/**< Job array element event */
+        /*public jobArrayElementLog jobArrayElementLog;*/
+
+        /**< LSF simulator status event */
+        /*public mbdSimStatusLog   mbdSimStatusLog;*/
+        /*#endif*/
+    }
+
+
+
+
+    /**
+     * \brief  event records.
+     */
+    public static class eventRec extends Structure {
+        public static class ByReference extends eventRec implements Structure.ByReference {}
+        public static class ByValue extends eventRec implements Structure.ByValue {}
+        public eventRec() {}
+        public eventRec(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The mbatchd version number
+         */
+        public byte[] version = new byte[MAX_VERSION_LEN];
+
+        /**
+         * < Event type in \ref event_types
+         */
+        public int type;
+
+        /**
+         * < The time the event occurred
+         */
+        public NativeLong eventTime;
+
+        /**
+         * < The information for this type of event, contained in a structure  corresponding to type
+         */
+        public eventLog eventLog;
+    }
+
+
+
+    public static class eventLogFile extends Structure {
+        public static class ByReference extends eventLogFile implements Structure.ByReference {}
+        public static class ByValue extends eventLogFile implements Structure.ByValue {}
+        public eventLogFile() {}
+        public eventLogFile(Pointer p) { super(p); read(); }
+
+
+/* event file directory */
+        public byte[] eventDir = new byte[LibLsf.MAXFILENAMELEN];
+
+/* start and end event time */
+        public NativeLong beginTime, endTime;
+    }
+
+
+
+    public static class eventLogHandle extends Structure {
+        public static class ByReference extends eventLogHandle implements Structure.ByReference {}
+        public static class ByValue extends eventLogHandle implements Structure.ByValue {}
+        public eventLogHandle() {}
+        public eventLogHandle(Pointer p) { super(p); read(); }
+
+
+/* open event file pointer */
+        public Pointer fp;
+
+/* current open events file name */
+        public byte[] openEventFile = new byte[LibLsf.MAXFILENAMELEN];
+
+/* current open event file number */
+        public int curOpenFile;
+        public int lastOpenFile;                   /* last open event file number, 0
+                  means lsb.events */
+    }
+
+
+
+
+    public static final String LSF_JOBIDINDEX_FILENAME = "lsb.events.index";
+    public static final String LSF_JOBIDINDEX_FILETAG = "#LSF_JOBID_INDEX_FILE";
+
+/* structures used to handle jobId index file */
+
+    public static class jobIdIndexS extends Structure {
+        public static class ByReference extends jobIdIndexS implements Structure.ByReference {}
+        public static class ByValue extends jobIdIndexS implements Structure.ByValue {}
+        public jobIdIndexS() {}
+        public jobIdIndexS(Pointer p) { super(p); read(); }
+
+
+/* the index file name */
+        public byte[] fileName = new byte[LibLsf.MAXFILENAMELEN];
+
+/* open index file pointer */
+        public Pointer fp;
+
+/* version number for future use */
+        public float version;
+
+/* total number of rows(files) indices */
+        public int totalRows;
+
+/* last update time */
+        public NativeLong lastUpdate;
+
+/* current rows */
+        public int curRow;
+        /* the event file currently handled is */
+        /* (totalRows - curRow + 1) */
+
+/* time stamp of current row */
+        public NativeLong timeStamp;
+
+/* min jobId in that row */
+        public long minJobId;
+
+/* max jobId in that row */
+        public long maxJobId;
+
+/* total number of jobIds */
+        public int totalJobIds;
+
+/* jobId array of current row */
+        public IntByReference jobIds;
+    }
+
+
+
+/* structures used to hold one element of sorted int list */
+
+    public static class sortIntList extends Structure {
+        public static class ByReference extends sortIntList implements Structure.ByReference {}
+        public static class ByValue extends sortIntList implements Structure.ByValue {}
+        public sortIntList() {}
+        public sortIntList(Pointer p) { super(p); read(); }
+
+        public int value;
+
+/* points to next element */
+        public sortIntList.ByReference forw;
+
+/* points to prior element */
+        public sortIntList.ByReference back;
+    }
+
+
+
+    public static class nqsStatusReq extends Structure {
+        public static class ByReference extends nqsStatusReq implements Structure.ByReference {}
+        public static class ByValue extends nqsStatusReq implements Structure.ByValue {}
+        public nqsStatusReq() {}
+        public nqsStatusReq(Pointer p) { super(p); read(); }
+
+        public long jobId;
+        public int opCode;
+        public int reportCode;
+        public String nqsQueue;
+        public int fromUid;
+        public String fromUserName;
+        public String fromHostName;
+        public int idx;
+    }
+
+
+
+    public static class nqsStatusReply extends Structure {
+        public static class ByReference extends nqsStatusReply implements Structure.ByReference {}
+        public static class ByValue extends nqsStatusReply implements Structure.ByValue {}
+        public nqsStatusReply() {}
+        public nqsStatusReply(Pointer p) { super(p); read(); }
+
+        public String orgHost;
+        public String orgUser;
+        public NativeLong startTime;
+        public String jobName;
+        public String nqsQueue;
+        public String lsbManager;
+        public int options;
+        public String outFile;
+        public String errFile;
+    }
+
+
+
+/*
+*  SBD uses the following data structure to communicate with
+*  the resource manager.
+*
+ */
+    public static final int LSB_MAX_SD_LENGTH = 128;
+
+    public static class lsbMsgHdr extends Structure {
+        public static class ByReference extends lsbMsgHdr implements Structure.ByReference {}
+        public static class ByValue extends lsbMsgHdr implements Structure.ByValue {}
+        public lsbMsgHdr() {}
+        public lsbMsgHdr(Pointer p) { super(p); read(); }
+
+        public int usrId;
+        public long jobId;
+        public int msgId;
+        public int type;
+        public String src;
+        public String dest;
+    }
+
+
+
+    public static class lsbMsg extends Structure {
+        public static class ByReference extends lsbMsg implements Structure.ByReference {}
+        public static class ByValue extends lsbMsg implements Structure.ByValue {}
+        public lsbMsg() {}
+        public lsbMsg(Pointer p) { super(p); read(); }
+
+        public lsbMsgHdr.ByReference header;
+        public String msg;
+    }
+
+
+
+/* data structures related to API_CONF */
+
+    public static final int CONF_NO_CHECK = 0x00;
+    public static final int CONF_CHECK = 0x01;
+    public static final int CONF_EXPAND = 0X02;
+    public static final int CONF_RETURN_HOSTSPEC = 0X04;
+    public static final int CONF_NO_EXPAND = 0X08;
+    public static final int CONF_HAS_CU = 0X10;
+
+    public static class paramConf extends Structure {
+        public static class ByReference extends paramConf implements Structure.ByReference {}
+        public static class ByValue extends paramConf implements Structure.ByValue {}
+        public paramConf() {}
+        public paramConf(Pointer p) { super(p); read(); }
+
+        public parameterInfo.ByReference param;
+    }
+
+
+
+    public static class userConf extends Structure {
+        public static class ByReference extends userConf implements Structure.ByReference {}
+        public static class ByValue extends userConf implements Structure.ByValue {}
+        public userConf() {}
+        public userConf(Pointer p) { super(p); read(); }
+
+        public int numUgroups;
+        public Pointer /* groupInfoEnt.ByReference */ ugroups;
+        public int numUsers;
+        public Pointer /* userInfoEnt.ByReference */ users;
+        public int numUserEquivalent;
+        public Pointer /* userEquivalentInfoEnt.ByReference */ userEquivalent;
+        public int numUserMapping;
+        public Pointer /* userMappingInfoEnt.ByReference */ userMapping;
+    }
+
+
+
+    public static class hostConf extends Structure {
+        public static class ByReference extends hostConf implements Structure.ByReference {}
+        public static class ByValue extends hostConf implements Structure.ByValue {}
+        public hostConf() {}
+        public hostConf(Pointer p) { super(p); read(); }
+
+        public int numHosts;
+        public Pointer /* hostInfoEnt.ByReference */ hosts;
+        public int numHparts;
+        public Pointer /* hostPartInfoEnt.ByReference */ hparts;
+        public int numHgroups;
+        public Pointer /* groupInfoEnt.ByReference */ hgroups;
+    }
+
+
+
+    /**
+     * \brief  lsb shared resource Instance.
+     */
+    public static class lsbSharedResourceInstance extends Structure {
+        public static class ByReference extends lsbSharedResourceInstance implements Structure.ByReference {}
+        public static class ByValue extends lsbSharedResourceInstance implements Structure.ByValue {}
+        public lsbSharedResourceInstance() {}
+        public lsbSharedResourceInstance(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Value used by mbatchd
+         */
+        public String totalValue;
+
+        /**
+         * < Reserved value
+         */
+        public String rsvValue;
+
+        /**
+         * < Number of Hosts associated with the resource.
+         */
+        public int nHosts;
+
+        /**
+         * < Hosts list
+         */
+        public Pointer hostList;
+    }
+
+
+
+    /**
+     * \brief lsb shared resource information.
+     */
+    public static class lsbSharedResourceInfo extends Structure {
+        public static class ByReference extends lsbSharedResourceInfo implements Structure.ByReference {}
+        public static class ByValue extends lsbSharedResourceInfo implements Structure.ByValue {}
+        public lsbSharedResourceInfo() {}
+        public lsbSharedResourceInfo(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Resource name
+         */
+        public String resourceName;
+
+        /**
+         * < Number of instances
+         */
+        public int nInstances;
+
+        /**
+         * < List of instances
+         */
+        public Pointer /* lsbSharedResourceInstance.ByReference */ instances;
+    }
+
+
+
+    public static class queueConf extends Structure {
+        public static class ByReference extends queueConf implements Structure.ByReference {}
+        public static class ByValue extends queueConf implements Structure.ByValue {}
+        public queueConf() {}
+        public queueConf(Pointer p) { super(p); read(); }
+
+        public int numQueues;
+        public Pointer /* queueInfoEnt.ByReference */ queues;
+    }
+
+
+
+    /**
+     * \brief  frame element information.
+     */
+    public static class frameElementInfo extends Structure {
+        public static class ByReference extends frameElementInfo implements Structure.ByReference {}
+        public static class ByValue extends frameElementInfo implements Structure.ByValue {}
+        public frameElementInfo() {}
+        public frameElementInfo(Pointer p) { super(p); read(); }
+
+
+        /**
+         * <  The job index in the frame job array.
+         */
+        public int jobindex;
+
+        /**
+         * <  The job status.
+         */
+        public int jobState;
+
+        /**
+         * <  The start frame of this frame job.
+         */
+        public int start;
+
+        /**
+         * <  The end frame of this frame job.
+         */
+        public int end;
+
+        /**
+         * <  The step of this frame job.
+         */
+        public int step;
+
+        /**
+         * <  The chunk size of this frame job.
+         */
+        public int chunk;
+    }
+
+
+
+    /**
+     * \brief  frame job Infomation.
+     */
+    public static class frameJobInfo extends Structure {
+        public static class ByReference extends frameJobInfo implements Structure.ByReference {}
+        public static class ByValue extends frameJobInfo implements Structure.ByValue {}
+        public frameJobInfo() {}
+        public frameJobInfo(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < The job ID that the LSF system assigned to the frame job array.
+         */
+        public long jobGid;
+
+        /**
+         * < The max job number in one frame job array.
+         */
+        public int maxJob;
+
+        /**
+         * < The user submitted the frame job array.
+         */
+        public byte[] userName = new byte[MAX_LSB_NAME_LEN];
+
+        /**
+         * < Full job name
+         */
+        public byte[] jobName = new byte[LibLsf.MAXLINELEN];
+
+        /**
+         * < The full job name of the frame job array.  frameElementPtr The pointer to frame ob array table.
+         */
+        public frameElementInfo.ByReference frameElementPtr;
+    }
+
+
+
+    public static class nqsRusageReq extends Structure {
+        public static class ByReference extends nqsRusageReq implements Structure.ByReference {}
+        public static class ByValue extends nqsRusageReq implements Structure.ByValue {}
+        public nqsRusageReq() {}
+        public nqsRusageReq(Pointer p) { super(p); read(); }
+
+        public long jobId;
+        public int mem;
+        public float cpuTime;
+    }
+
+
+
+    public static class nqsRusageReply extends Structure {
+        public static class ByReference extends nqsRusageReply implements Structure.ByReference {}
+        public static class ByValue extends nqsRusageReply implements Structure.ByValue {}
+        public nqsRusageReply() {}
+        public nqsRusageReply(Pointer p) { super(p); read(); }
+
+        public int status;
+    }
+
+
+
+/* end of data structures related to API_CONF */
+
+/*
+*  Structure used for the Advance Reservation API
+*
+*  MBD allows the LSF administration to make advance reservation on
+*  behalf of a user, group or or for system maintenance purposes.
+*  Clients can add a reservation, remove a reservation and show
+*  reservation statuses.  The following data structures are used to
+*  encapsulate these requests
+*
+*     addRsvRequest: to add a reservation
+*     rmRsvRequest:  to remove a reservation
+*     rsvInfoEnt:    to display reservation information
+*
+ */
+
+    public static class _rsvEventInfo_prePost_t extends Structure {
+        public static class ByReference extends _rsvEventInfo_prePost_t implements Structure.ByReference {}
+        public static class ByValue extends _rsvEventInfo_prePost_t implements Structure.ByValue {}
+        public _rsvEventInfo_prePost_t() {}
+        public _rsvEventInfo_prePost_t(Pointer p) { super(p); read(); }
+
+        public int shift;
+    }
+
+
+
+    public static final int RSV_EXECEVENTTYPE_PRE = 1;
+    public static final int RSV_EXECEVENTTYPE_POST = 2;
+
+    public static final String RSV_EXECEVENTNAME_PRE = "pre";
+    public static final String RSV_EXECEVENTNAME_POST = "post";
+
+    /**
+     * \brief  reservation excution event
+     */
+    public static class _rsvExecEvent_t extends Structure {
+        public static class ByReference extends _rsvExecEvent_t implements Structure.ByReference {}
+        public static class ByValue extends _rsvExecEvent_t implements Structure.ByValue {}
+        public _rsvExecEvent_t() {}
+        public _rsvExecEvent_t(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Event type
+         */
+        public int type;
+
+        /**
+         * < Boolean: is there additional info?
+         */
+        public int infoAttached;
+
+        /**
+         * < Info pertaining to event, such as offset
+         */
+        public Pointer info;
+    }
+
+
+
+    /**
+     * \brief  reservation excution command
+     */
+    public static class _rsvExecCmd_t extends Structure {
+        public static class ByReference extends _rsvExecCmd_t implements Structure.ByReference {}
+        public static class ByValue extends _rsvExecCmd_t implements Structure.ByValue {}
+        public _rsvExecCmd_t() {}
+        public _rsvExecCmd_t(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Full path to the command name
+         */
+        public String path;
+
+        /**
+         * < Size of events array
+         */
+        public int numEvents;
+
+        /**
+         * < Array of events that trigger -exec command
+         */
+        public Pointer /* _rsvExecEvent_t.ByReference */ events;
+    }
+
+
+
+    /**
+     *  \addtogroup reservation_option reservation_option
+     *    definitions of reservation options.
+     */
+
+    /**
+     * <  User
+     */
+    public static final int RSV_OPTION_USER = 0x0001;
+
+    /**
+     * <  Group
+     */
+    public static final int RSV_OPTION_GROUP = 0x0002;
+
+    /**
+     * <  System
+     */
+    public static final int RSV_OPTION_SYSTEM = 0x0004;
+
+    /**
+     * <  Recur
+     */
+    public static final int RSV_OPTION_RECUR = 0x0008;
+
+    /**
+     * <  Resource requirement
+     */
+    public static final int RSV_OPTION_RESREQ = 0x0010;
+
+    /**
+     * <  Host
+     */
+    public static final int RSV_OPTION_HOST = 0x0020;
+
+    /**
+     * <  Open
+     */
+    public static final int RSV_OPTION_OPEN = 0x0040;
+
+    /**
+     * <  Delete
+     */
+    public static final int RSV_OPTION_DELETE = 0x0080;
+
+    /**
+     * <  Close
+     */
+    public static final int RSV_OPTION_CLOSED = 0x0100;
+
+    /**
+     * <  Execute
+     */
+    public static final int RSV_OPTION_EXEC = 0x0200;
+
+    /**
+     * <  Remote execute
+     */
+    public static final int RSV_OPTION_RMEXEC = 0x0400;
+
+    /**
+     * <  Next instance
+     */
+    public static final int RSV_OPTION_NEXTINSTANCE = 0x0800;
+
+    /**
+     * <  Disable
+     */
+    public static final int RSV_OPTION_DISABLE = 0x1000;
+
+    /**
+     * <  Add host
+     */
+    public static final int RSV_OPTION_ADDHOST = 0x2000;
+
+    /**
+     * <  Remote host
+     */
+    public static final int RSV_OPTION_RMHOST = 0x4000;
+
+    /**
+     * <  Description
+     */
+    public static final int RSV_OPTION_DESCRIPTION = 0x8000;
+
+    /**
+     * <  Timewindow mode
+     */
+    public static final int RSV_OPTION_TWMOD = 0x10000;
+
+    /**
+     * <  Switch open/close
+     */
+    public static final int RSV_OPTION_SWITCHOPENCLOSE = 0x20000;
+
+    /**
+     * <  User mode
+     */
+    public static final int RSV_OPTION_USERMOD = 0x40000;
+
+    /**
+     * <  Reservation name
+     */
+    public static final int RSV_OPTION_RSVNAME = 0x80000;
+
+    /**
+     * <  Expired
+     */
+    public static final int RSV_OPTION_EXPIRED = 0x100000;
+
+    /**
+     * \brief add reservation request.
+     */
+    public static class addRsvRequest extends Structure {
+        public static class ByReference extends addRsvRequest implements Structure.ByReference {}
+        public static class ByValue extends addRsvRequest implements Structure.ByValue {}
+        public addRsvRequest() {}
+        public addRsvRequest(Pointer p) { super(p); read(); }
+
+
+        /**
+         * <Reservation options \ref reservation_option
+         */
+        public int options;
+
+        /**
+         * < User or group for which the reservation is made
+         */
+        public String name;
+
+        /**
+         * < Minimum number of processors the required to run the job. See the -g option of brsvadd.
+         */
+        public int minNumProcs;
+
+        /**
+         * < Maximum number of processors the required to run the job.
+         */
+        public int maxNumProcs;
+
+        /**< Range of number of processors */
+        //struct procRange;
+
+        /**
+         * < The number of invoker specified hosts for the reservation. If numAskedHosts is 0, all qualified hosts will be considered.
+         */
+        public int numAskedHosts;
+
+        /**
+         * < The array of names of invoker specified hosts hosts for the reservation. The number of hosts is given by numAskedHosts. See the -m option of brsvadd.
+         */
+        public Pointer askedHosts;
+
+        /**
+         * < The resource requirements of the reservation. See the -R option of brsvadd.
+         */
+        public String resReq;
+
+        /**
+         * < Active time window for a recurring reservation. See the -t option of brsvadd.
+         */
+        public String timeWindow;
+
+        /**
+         * < Info for the -exec option.
+         */
+        public _rsvExecCmd_t.ByReference execCmd;
+
+        /**
+         * < Description for the reservation to be created. The description must be provided as a double quoted text string. The maximum length  is 512 chars.  Equivalent to the value of brsvadd -d.
+         */
+        public String desc;
+
+        /**
+         * < User-defined advance reservation name unique in an LSF cluster. The name is a string of letters, numeric chars, underscores, and dashes beginning with a letter. The maximum length of the name is 39 chars. Equivalent to the value of brsvadd -N.
+         */
+        public String rsvName;
+    }
+
+
+
+    /**
+     * \brief  remove reservation request.
+     */
+    public static class rmRsvRequest extends Structure {
+        public static class ByReference extends rmRsvRequest implements Structure.ByReference {}
+        public static class ByValue extends rmRsvRequest implements Structure.ByValue {}
+        public rmRsvRequest() {}
+        public rmRsvRequest(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Reservation ID of the reservation that you wish to remove.
+         */
+        public String rsvId;
+    }
+
+
+
+    /**
+     * \brief  modifiy reservation request
+     */
+    public static class modRsvRequest extends Structure {
+        public static class ByReference extends modRsvRequest implements Structure.ByReference {}
+        public static class ByValue extends modRsvRequest implements Structure.ByValue {}
+        public modRsvRequest() {}
+        public modRsvRequest(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Reservation ID of the reservation that you  wish to modify.
+         */
+        public String rsvId;
+
+        /**
+         * < LSF user name for the reservation.  See the -g option of brsvadd. .
+         */
+        public addRsvRequest fieldsFromAddReq;
+
+        /**
+         * < Disabled time duration
+         */
+        public String disabledDuration;
+    }
+
+
+
+    /**
+     * \brief  host reservation infromation entry.
+     */
+    public static class hostRsvInfoEnt extends Structure {
+        public static class ByReference extends hostRsvInfoEnt implements Structure.ByReference {}
+        public static class ByValue extends hostRsvInfoEnt implements Structure.ByValue {}
+        public hostRsvInfoEnt() {}
+        public hostRsvInfoEnt(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Host name.
+         */
+        public String host;
+
+        /**
+         * < Number of CPUs reserved on the host.
+         */
+        public int numCPUs;
+
+        /**
+         * < Number of job slots reserved on the host.
+         */
+        public int numSlots;
+
+        /**
+         * < Number of processors reserved on the host.
+         */
+        public int numRsvProcs;
+
+        /**
+         * < Count for used + suspended from reserved slots
+         */
+        public int numusedRsvProcs;
+
+        /**
+         * < Number of processors in use on the host.
+         */
+        public int numUsedProcs;
+    }
+
+
+
+    /**
+     * \brief  reservation information entry.
+     */
+    public static class rsvInfoEnt extends Structure {
+        public static class ByReference extends rsvInfoEnt implements Structure.ByReference {}
+        public static class ByValue extends rsvInfoEnt implements Structure.ByValue {}
+        public rsvInfoEnt() {}
+        public rsvInfoEnt(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Reservation options, see \ref reservation_option
+         */
+        public int options;
+
+        /**
+         * < Reservation ID returned from mbatchd. If the reservation fails, this is null. The memory for rsvid is allocated by the caller.
+         */
+        public String rsvId;
+
+        /**
+         * <  LSF user group name for the reservation. See the -g option of brsvadd.
+         */
+        public String name;
+
+        /**
+         * <  Number of hosts reserved
+         */
+        public int numRsvHosts;
+
+        /**
+         * <  Info about the reserved hosts
+         */
+        public Pointer /* hostRsvInfoEnt.ByReference */ rsvHosts;
+
+        /**
+         * < Active time window for a recurring reservation. See the -t option of  brsvadd.
+         */
+        public String timeWindow;
+
+        /**
+         * < Number of jobs running in the reservation.
+         */
+        public int numRsvJobs;
+
+        /**
+         * < Job IDs of jobs running in the reservation.
+         */
+        public LongByReference jobIds;
+
+        /**
+         * < Status of jobs running in the reservation.
+         */
+        public IntByReference jobStatus;
+
+        /**
+         * <  Description for the reservation to be created. The description must be provided as a double quoted text string. The maximum length is 512 chars. Equivalent to thevalue of brsvadd -d.
+         */
+        public String desc;
+
+        /**
+         * <  Null-terminated list of disabled durations
+         */
+        public Pointer disabledDurations;
+
+        /**
+         * <  The current state of the reservation - active or inactive.
+         */
+        public int state;
+
+        /**
+         * <  The time of the next instance of a recurring reservation.
+         */
+        public String nextInstance;
+
+        /**
+         * <  Creator of the reservation.
+         */
+        public String creator;
+    }
+
+
+
+/* backfill window related data structures and functions */
+
+    public static class slotInfoRequest extends Structure {
+        public static class ByReference extends slotInfoRequest implements Structure.ByReference {}
+        public static class ByValue extends slotInfoRequest implements Structure.ByValue {}
+        public slotInfoRequest() {}
+        public slotInfoRequest(Pointer p) { super(p); read(); }
+
+        /* options mask */
+
+/* Option -R */
+        public static int SLOT_OPTION_RESREQ = 0X001;
+
+        public int options;
+
+/* Resource requirement string */
+        public String resReq;
+    }
+
+
+
+/*copy from SRInfo*/
+
+    public static class SRInfoEnt extends Structure {
+        public static class ByReference extends SRInfoEnt implements Structure.ByReference {}
+        public static class ByValue extends SRInfoEnt implements Structure.ByValue {}
+        public SRInfoEnt() {}
+        public SRInfoEnt(Pointer p) { super(p); read(); }
+
+
+/*number of reserved slots*/
+        public int numReserved;
+
+/* job's predicted start time */
+        public NativeLong predictedStartTime;
+    }
+
+
+
+    public static class hostSRInfoEnt extends Structure {
+        public static class ByReference extends hostSRInfoEnt implements Structure.ByReference {}
+        public static class ByValue extends hostSRInfoEnt implements Structure.ByValue {}
+        public hostSRInfoEnt() {}
+        public hostSRInfoEnt(Pointer p) { super(p); read(); }
+
+        public String host;
+        public int hStatus;
+        public int userJobLimit;
+        public int maxJobs;
+        public int numJobs;
+        public int numRUN;
+        public int numSSUSP;
+        public int numUSUSP;
+        public int numRESERVE;
+        public int numSR;
+        public Pointer /* SRInfoEnt.ByReference */ SRInfo;
+    }
+
+
+
+    public static class slotInfoReply extends Structure {
+        public static class ByReference extends slotInfoReply implements Structure.ByReference {}
+        public static class ByValue extends slotInfoReply implements Structure.ByValue {}
+        public slotInfoReply() {}
+        public slotInfoReply(Pointer p) { super(p); read(); }
+
+
+/* to store the time of Master host */
+        public NativeLong masterTime;
+        public int numHosts;
+        public Pointer /* hostSRInfoEnt.ByReference */ hostInfo;
+        public int numAR;
+        public Pointer /* rsvInfoEnt.ByReference */ ARInfo;
+    }
+
+
+
+
+/* the general limit related data structures and functions */
+
+
+    public static final int LSB_RSRC_LIMIT_TYPE_SLOTS = 0;
+    public static final int LSB_RSRC_LIMIT_TYPE_SLOT_PERPSR = 1;
+    public static final int LSB_RSRC_LIMIT_TYPE_MEM = 2;
+    public static final int LSB_RSRC_LIMIT_TYPE_MEM_PERCENT = 3;
+    public static final int LSB_RSRC_LIMIT_TYPE_SWP = 4;
+    public static final int LSB_RSRC_LIMIT_TYPE_SWP_PERCENT = 5;
+    public static final int LSB_RSRC_LIMIT_TYPE_TMP = 6;
+    public static final int LSB_RSRC_LIMIT_TYPE_TMP_PERCENT = 7;
+    public static final int LSB_RSRC_LIMIT_TYPE_JOBS = 8;
+
+/* all external resources */
+    public static final int LSB_RSRC_LIMIT_TYPE_EXT_RSRC = 9;
+
+    /**
+     * \addtogroup _consumertype _consumertype
+     * consumer types
+     */
+    public static interface consumerType {
+        /**
+         * < Queues
+         */
+        public static final int LIMIT_QUEUES = 1;
+
+        /**
+         * < Per-queue
+         */
+        public static final int LIMIT_PER_QUEUE = 2;
+
+        /**
+         * < Users
+         */
+        public static final int LIMIT_USERS = 3;
+
+        /**
+         * < Per-users
+         */
+        public static final int LIMIT_PER_USER = 4;
+
+        /**
+         * < Hosts
+         */
+        public static final int LIMIT_HOSTS = 5;
+
+        /**
+         * < Per-host
+         */
+        public static final int LIMIT_PER_HOST = 6;
+
+        /**
+         * < Projects
+         */
+        public static final int LIMIT_PROJECTS = 7;
+
+        /**
+         * < Per-project
+         */
+        public static final int LIMIT_PER_PROJECT = 8;
+    }
+
+
+    /**< Type definitions */
+
+    /**
+     * \brief  limit consumer
+     */
+    public static class _limitConsumer extends Structure {
+        public static class ByReference extends _limitConsumer implements Structure.ByReference {}
+        public static class ByValue extends _limitConsumer implements Structure.ByValue {}
+        public _limitConsumer() {}
+        public _limitConsumer(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Consumer type ( _consumertype ):  -  Queues per-queue -  Users and per-user -  Hosts and per-host -  Projects and per-project
+         */
+        public int type;
+
+        /**
+         * < Consumer name
+         */
+        public String name;
+    }
+
+
+
+    /**
+     * \brief  limit resource.
+     */
+    public static class _limitResource extends Structure {
+        public static class ByReference extends _limitResource implements Structure.ByReference {}
+        public static class ByValue extends _limitResource implements Structure.ByValue {}
+        public _limitResource() {}
+        public _limitResource(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Resource name
+         */
+        public String name;
+
+        /**
+         * < Resource type
+         */
+        public int type;
+
+        /**
+         * < Resource val
+         */
+        public float val;
+    }
+
+
+
+    /**
+     * \brief   limit information request
+     */
+    public static class _limitInfoReq extends Structure {
+        public static class ByReference extends _limitInfoReq implements Structure.ByReference {}
+        public static class ByValue extends _limitInfoReq implements Structure.ByValue {}
+        public _limitInfoReq() {}
+        public _limitInfoReq(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Limit policy name given by the user.
+         */
+        public String name;
+
+        /**
+         * < Number of consumers
+         */
+        public int consumerC;
+
+        /**
+         * < Consumer name, queue/host/user/project
+         */
+        public Pointer /* _limitConsumer.ByReference */ consumerV;
+    }
+
+
+
+    /**
+     * \brief  limit item.
+     */
+    public static class _limitItem extends Structure {
+        public static class ByReference extends _limitItem implements Structure.ByReference {}
+        public static class ByValue extends _limitItem implements Structure.ByValue {}
+        public _limitItem() {}
+        public _limitItem(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Number of consumers
+         */
+        public int consumerC;
+
+        /**
+         * < Consumers, such as queue, host, user or project
+         */
+        public Pointer /* _limitConsumer.ByReference */ consumerV;
+
+        /**
+         * < Number of resources
+         */
+        public int resourceC;
+
+        /**
+         * < Resources list
+         */
+        public Pointer /* _limitResource.ByReference */ resourceV;
+    }
+
+
+
+    /**
+     * \brief  limit information entry .
+     */
+    public static class _limitInfoEnt extends Structure {
+        public static class ByReference extends _limitInfoEnt implements Structure.ByReference {}
+        public static class ByValue extends _limitInfoEnt implements Structure.ByValue {}
+        public _limitInfoEnt() {}
+        public _limitInfoEnt(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Limit policy name given by the user
+         */
+        public String name;
+
+        /**
+         * < Limit configuration
+         */
+        public _limitItem confInfo;
+
+        /**
+         * < Size of limit dynamic usage info array
+         */
+        public int usageC;
+
+        /**
+         * < Limit dynamic usage info array
+         */
+        public Pointer /* _limitItem.ByReference */ usageInfo;
+
+    }
+
+
+
+/* action code for threshold based on type/model, is used for
+*  predefinedThresholdTypeModel().
+ */
+
+    public static final int ADD_THRESHOLD = 1;
+    public static final int GET_THRESHOLD = 2;
+    public static final int DEL_THRESHOLD = 3;
+
+/* Structure to hold thresholds defined based on host's type/model */
+
+    public static class thresholdEntry extends Structure {
+        public static class ByReference extends thresholdEntry implements Structure.ByReference {}
+        public static class ByValue extends thresholdEntry implements Structure.ByValue {}
+        public thresholdEntry() {}
+        public thresholdEntry(Pointer p) { super(p); read(); }
+
+
+/* Name of type or model */
+        public String attr;
+
+/* Pointer to hostInfo */
+        public hostInfoEnt.ByReference hostEntryPtr;
+    }
+
+
+
+    /**
+     * \page lsb_limitInfo lsb_limitInfo
+     * \brief gets resource allocation limit configuration and dynamic usage
+     * information.
+     * <p/>
+     * Displays current usage of resource allocation limits configured in Limit
+     * sections in lsb.resources:
+     * \li    Configured limit policy name
+     * \li    Users
+     * \li    Queues
+     * \li    Hosts
+     * \li    Project names
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * int lsb_limitInfo( limitInfoReq.ByReference req,  limitInfoEnt.ByReference[] limitItemRef,
+     * IntByReference size, lsInfo.ByReference lsInfo)</b>
+     *
+     * @return int:-1
+     *         \n Function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         On failure, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line command:</b>
+     *         \par
+     *         blimits
+     *         <p/>
+     *         \b Files
+     *         \par
+     *         $LSB_CONFDIR/cluster_name/configdir/lsb.queues \n
+     *         $LSB_CONFDIR/cluster_name/configdir/lsb.users \n
+     *         $LSB_CONFDIR/cluster_name/configdir/lsb.hosts \n
+     *         $LSB_CONFDIR/cluster_name/configdir/lsb.resources
+     * @param req input, the user request for limit information
+     * @param limitItemRef output, the limit information array
+     * @param size output, the size of the limit information array
+     * @param lsInfo Please refer to the \ref lsInfo structure.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * _limitInfoReq
+     * \n _limitConsumer
+     * \n _limitInfoEnt
+     * \n _limitItem
+     * \n _limitResource
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * \ref _consumertype
+     * #see \ref lsb_freeLimitInfoEnt
+     */
+    public static native int lsb_limitInfo(_limitInfoReq req, Pointer limitItemRef, IntByReference size, LibLsf.lsInfo lsInfo);
+
+    /**
+     * \page lsb_freeLimitInfoEnt lsb_freeLimitInfoEnt
+     * \brief Frees the memory allocated by \ref lsb_limitInfo.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * void lsb_freeLimitInfoEnt(limitInfoEnt.ByReference  ent, int size)</b>
+     *
+     * @param size input, the size of the limit information array
+     *             <p/>
+     *             <b>Data Structures:</b>
+     *             \par
+     *             _limitInfoEnt
+     *             \n _limitItem
+     *             \n _limitConsumer
+     *             \n _limitResource
+     *             <p/>
+     *             <b>Define Statements:</b>
+     *             \par
+     *             \ref _consumertype
+     * return void
+     *         \n There's no return value.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         On failure, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         blimits
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         $LSB_CONFDIR/cluster_name/configdir/lsb.queues \n
+     *         $LSB_CONFDIR/cluster_name/configdir/lsb.users \n
+     *         $LSB_CONFDIR/cluster_name/configdir/lsb.hosts \n
+     *         $LSB_CONFDIR/cluster_name/configdir/lsb.resources
+     * @param ent input, the array of limit information
+     * #see \ref lsb_limitInfo
+     */
+
+    public static native void lsb_freeLimitInfoEnt(_limitInfoEnt ent, int size);
+
+    /**
+     *  \addtogroup resizablejob_related resizablejob_related
+     *  Resizable job related definitions.
+     */
+
+    /**
+     * < Means release no slots
+     */
+    public static final int LSB_RESIZE_REL_NONE = 0x0;
+
+    /**
+     * < Means release all slots-In this case, nHosts, hosts and slots  indicate the slots that are not released
+     */
+    public static final int LSB_RESIZE_REL_ALL = 0x01;
+
+    /**
+     * < Means cancel any pending resize request
+     */
+    public static final int LSB_RESIZE_REL_CANCEL = 0x02;
+
+    /**
+     * < Means execute no resize notification command
+     */
+    public static final int LSB_RESIZE_REL_NO_NOTIFY = 0x04;
+
+    /**
+     * \brief  job resize release.
+     */
+    public static class job_resize_release extends Structure {
+        public static class ByReference extends job_resize_release implements Structure.ByReference {}
+        public static class ByValue extends job_resize_release implements Structure.ByValue {}
+        public job_resize_release() {}
+        public job_resize_release(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < LSF job ID
+         */
+        public long jobId;
+
+        /**
+         * < Options is constructed from the bitwise inclusive OR of zero or more of the flags, as defined in \ref resizablejob_related .
+         */
+        public int options;
+
+        /**
+         * < Number of hosts in the hosts list, if no hosts are to be specified this should be zero
+         */
+        public int nHosts;
+
+        /**
+         * < Specified hosts list, nHosts number of elements
+         */
+        public Pointer hosts;
+
+        /**
+         * < Slots list, each element specifies the number of slots per corresponding host (0 implies all), nHosts number of elements
+         */
+        public IntByReference slots;
+
+        /**
+         * < Name and location of notification command
+         */
+        public String notifyCmd;
+    }
+
+
+
+    public static class job_resize_request extends Structure {
+        public static class ByReference extends job_resize_request implements Structure.ByReference {}
+        public static class ByValue extends job_resize_request implements Structure.ByValue {}
+        public job_resize_request() {}
+        public job_resize_request(Pointer p) { super(p); read(); }
+
+        public long jobId;
+        public int options;
+
+/* array size */
+        public int nHosts;
+
+/* array of hosts */
+        public Pointer hosts;
+
+/* notifocation command */
+        public String notifyCmd;
+    }
+
+
+
+/*
+*  End of resizable job related definitions
+ */
+
+/* Job Dependency Display */
+
+
+/* Job Dependency Display */
+/* for options */
+    /**
+     *  \addtogroup query_depend query_depend
+     *  Job Dependency Display for options
+     */
+
+    /**
+     * <  Recursively
+     */
+    public static final int QUERY_DEPEND_RECURSIVELY = 0x1;
+
+    /**
+     * <  Detail
+     */
+    public static final int QUERY_DEPEND_DETAIL = 0x2;
+
+    /**
+     * <  Unsatisfied
+     */
+    public static final int QUERY_DEPEND_UNSATISFIED = 0x4;
+
+    /**
+     * <  Child
+     */
+    public static final int QUERY_DEPEND_CHILD = 0x8;
+
+    /**
+     * \brief  job dependent request.
+     */
+
+    public static class jobDepRequest extends Structure {
+        public static class ByReference extends jobDepRequest implements Structure.ByReference {}
+        public static class ByValue extends jobDepRequest implements Structure.ByValue {}
+        public jobDepRequest() {}
+        public jobDepRequest(Pointer p) { super(p); read(); }
+
+        /**
+         * < Job ID of the queried job or job array.
+         */
+        public long jobId;
+
+        /**
+         * < You can set the following bits into this field:\n QUERY_DEPEND_RECURSIVELY\n Query the dependency information recursively.\n QUERY_DEPEND_DETAIL\n Query the detailed dependency information.\n QUERY_DEPEND_UNSATISFIED\n Query the jobs that cause this job pend.\n QUERY_DEPEND_CHILD\n Query child jobs.
+         */
+        public int options;
+
+        /**
+         * < The level when you set QUERY_DEPEND_RECURSIVELY.
+         */
+        public int level;
+    }
+
+
+
+
+    /**
+     * \brief  queried jobs.
+     */
+    public static class queriedJobs extends Structure {
+        public static class ByReference extends queriedJobs implements Structure.ByReference {}
+        public static class ByValue extends queriedJobs implements Structure.ByValue {}
+        public queriedJobs() {}
+        public queriedJobs(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < Job ID of the queried job or job array.
+         */
+        public long jobId;
+
+        /**
+         * < The whole dependency condition of the job.
+         */
+        public String dependcondition;
+
+        /**
+         * < Whether the condition is satisfied.
+         */
+        public int satisfied;
+    }
+
+
+
+/* for hasDependency */
+    /**
+     *  \addtogroup job_has_depend job_has_depend
+     *  options for hasDependency
+     */
+
+    /**
+     * <  Job has dependency
+     */
+    public static final int JOB_HAS_DEPENDENCY = 0x1;
+
+    /**
+     * <  Job has individual  condition.
+     */
+    public static final int JOB_HAS_INDIVIDUAL_CONDITION = 0x2;
+
+    /**
+     * \brief  dependency jobs.
+     */
+
+    public static class dependJobs extends Structure {
+        public static class ByReference extends dependJobs implements Structure.ByReference {}
+        public static class ByValue extends dependJobs implements Structure.ByValue {}
+        public dependJobs() {}
+        public dependJobs(Pointer p) { super(p); read(); }
+
+        /**
+         * < Job ID. By default, it is the parent job of the queried job. Modify to child job by setting QUERY_DEPEND_CHILD in options of JobDepRequest.
+         */
+        public long jobId;
+
+        /**
+         * < The job name associated with the job ID.
+         */
+        public String jobname;
+
+        /**
+         * < The number of degrees of separation from the original job.
+         */
+        public int level;
+
+        /**
+         * < Job status of the job.
+         */
+        public int jobstatus;
+
+        /**
+         * < Whether the job ID has a dependency or not. When you set QUERY_DEPEND_RECURSIVELY in options of JobDepRequest, 0 indicates job ID does not have a dependency. Otherwise, one or more of the following bits displays:-  JOB_HAS_DEPENDENCY: Job has a dependency.-  JOB_HAS_INDIVIDUAL_CONDITION: Job has an individual dependency condition when it is an element of job array.
+         */
+        public int hasDependency;
+
+        /**
+         * < When you set "QUERY_DEPEND_DETAIL" into options, it is dependency condition of jobId. It is "" when you do not set "QUERY_DEPEND_DETAIL".
+         */
+        public String condition;
+
+        /**
+         * < Whether the condition is satisfied.
+         */
+        public int satisfied;
+
+        /**
+         * < Job ID. By default, it is the child job. Modify to parent job by setting QUERY_DEPEND_CHILD in options of JobDepRequest
+         */
+        public long depJobId;
+    }
+
+
+
+    /**
+     * \brief  job dependent information.
+     */
+
+    public static class jobDependInfo extends Structure {
+        public static class ByReference extends jobDependInfo implements Structure.ByReference {}
+        public static class ByValue extends jobDependInfo implements Structure.ByValue {}
+        public jobDependInfo() {}
+        public jobDependInfo(Pointer p) { super(p); read(); }
+
+
+        /**
+         * < You can set the following bits into this field:\n QUERY_DEPEND_RECURSIVELY\n Query the dependency information recursively.\n QUERY_DEPEND_DETAIL\n Query the detailed dependency information.\n QUERY_DEPEND_UNSATISFIED\n Query the jobs that cause this job pend.\n QUERY_DEPEND_CHILD\n Query child jobs.
+         */
+        public int options;
+
+        /**
+         * < The number of jobs you queried. By default, the value is 1. However, when you set QUERY_DEPEND_DETAIL in the options and you query a job array where some elements have a dependency condition that has changed, the value is the number of the changed element + 1.
+         */
+        public int numQueriedJobs;
+
+        /**
+         * < The jobs you queried.
+         */
+        public Pointer /* queriedJobs.ByReference */ queriedJobs;
+
+        /**
+         * < The number of levels returned.
+         */
+        public int level;
+
+        /**
+         * < The number of jobs returned.
+         */
+        public int numJobs;
+
+        /**
+         * < The returned dependency jobs.
+         */
+        public Pointer /* dependJobs.ByReference */ depJobs;
+    }
+
+
+
+
+/*
+*  Functional prototypes of the Advance Reservation API
+ */
+
+
+/* Macros */
+
+    public static boolean IS_PEND(int s) {
+        return (JNAUtils.toBoolean((s) & JOB_STAT_PEND) || JNAUtils.toBoolean((s) & JOB_STAT_PSUSP));
+    }
+
+/* Do not test JOB_STAT_UNKWN in IS_START() */
+
+    public static boolean IS_START(int s) {
+        return (JNAUtils.toBoolean((s) & JOB_STAT_RUN) || JNAUtils.toBoolean((s) & JOB_STAT_SSUSP) || JNAUtils.toBoolean((s) & JOB_STAT_USUSP));
+    }
+
+    public static boolean IS_FINISH(int s) {
+        return (JNAUtils.toBoolean((s) & JOB_STAT_DONE) || JNAUtils.toBoolean((s) & JOB_STAT_EXIT));
+    }
+
+    public static boolean IS_SUSP(int s) {
+        return (JNAUtils.toBoolean((s) & JOB_STAT_PSUSP) || JNAUtils.toBoolean((s) & JOB_STAT_SSUSP) || JNAUtils.toBoolean((s) & JOB_STAT_USUSP));
+    }
+
+/* Macro for checking post job process. (IO_SPOOL) */
+
+    public static boolean IS_POST_DONE(int s) {
+        return (((s) & JOB_STAT_PDONE) == JOB_STAT_PDONE);
+    }
+
+    public static boolean IS_POST_ERR(int s) {
+        return (((s) & JOB_STAT_PERR) == JOB_STAT_PERR);
+    }
+
+    public static boolean IS_POST_FINISH(int s) {
+        return (IS_POST_DONE(s) || IS_POST_ERR(s));
+    }
+
+/*On windows ,for dll library ,need to use _declspec(dllexport) to export
+*a symbol .but if do so ,static library will can not work .so we are going
+*to change lsberrno to a function.
+*/
+
+    public static int lsberrno() {
+        return lsb_errno().getValue();
+    }
+
+
+
+
+/*
+*  Version of the mbatchd that was last contacted.
+*  -1 indicates the mbatchd has not been contacted.
+ */
+    //public int lsb_mbd_version;
+
+/*
+*  The data definition for host name list operations
+ */
+    public static final int PRINT_SHORT_NAMELIST = 0x01;
+    public static final int PRINT_LONG_NAMELIST = 0x02;
+    public static final int PRINT_MCPU_HOSTS = 0x04;
+
+    public static class nameList extends Structure {
+        public static class ByReference extends nameList implements Structure.ByReference {}
+        public static class ByValue extends nameList implements Structure.ByValue {}
+        public nameList() {}
+        public nameList(Pointer p) { super(p); read(); }
+
+
+/* number of names */
+        public int listSize;
+
+/* a group of names */
+        public Pointer names;
+
+/* the ocurrent of corresponding name */
+        public IntByReference counter;
+    }
+
+
+
+    public static native nameList.ByReference lsb_parseShortStr(String string1, int int1);
+
+    public static native nameList.ByReference lsb_parseLongStr(String string1);
+
+    public static native String lsb_printNameList(nameList namelist1, int int1);
+
+    public static native nameList.ByReference lsb_compressStrList(Pointer stringArray1, int int1);
+
+    public static native String lsb_splitName(String string1, IntByReference int1);
+
+    public static native IntByReference lsb_errno();
+
+
+/* external routines related to API_CONF */
+
+    public static native paramConf.ByReference lsb_readparam(LibLsf.lsConf lsConf1);
+
+    public static native userConf.ByReference lsb_readuser(LibLsf.lsConf lsConf1, int int1, LibLsf.clusterConf clusterConf1);
+
+    public static native userConf.ByReference lsb_readuser_ex(LibLsf.lsConf lsConf1, int int1, LibLsf.clusterConf clusterConf1, LibLsf.sharedConf sharedConf1);
+
+    public static native hostConf.ByReference lsb_readhost(LibLsf.lsConf lsConf1, LibLsf.lsInfo lsInfo1, int int1, LibLsf.clusterConf clusterConf1);
+
+    public static native queueConf.ByReference lsb_readqueue(LibLsf.lsConf lsConf1, LibLsf.lsInfo lsInfo1, int int1, LibLsf.sharedConf sharedConf1, LibLsf.clusterConf clusterConf1);
+
+    public static native void updateClusterConf(LibLsf.clusterConf clusterConf1);
+
+/* end of external routines related to API_CONF */
+
+    /**
+     * \page lsb_hostpartinfo lsb_hostpartinfo
+     * Returns informaton about host partitions.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * hostPartInfoEnt.ByReference lsb_hostpartinfo (String[] hostParts,
+     * IntByReference numHostParts)</b> @param hostParts An array of host partition names.
+     *
+     * @return null
+     *         \n Function failed.
+     *         <p/>
+     *         <b>Errors:</b>
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error. If lsberrno is
+     *         LSBE_BAD_HPART, (*hostParts)[*numHostParts] is not a host partition known
+     *         to the LSF system. Otherwise, if.ByReference numHostParts is less than its original value,
+     *         * numHostParts is the actual number of host partitions found.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         $LSB_CONFDIR/cluster_name/configdir/lsb.hosts
+     * @param numHostHosts The number of host partition names.
+     * To get information on all host partitions, set hostParts to null;* numHostParts
+     * will be the actual number of host partitions when this call returns.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * hostPartInfoEnt
+     * \n hostPartUserInfo
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * none
+     * #see \ref lsb_usergrpinfo
+     * #see \ref lsb_hostgrpinfo
+     * @param stringArray1 stringArray1
+     */
+    public static native hostPartInfoEnt.ByReference lsb_hostpartinfo(Pointer stringArray1, IntByReference numHostHosts);
+
+    /**
+     * \page lsb_init lsb_init
+     * \brief Initializes the LSF batch library (LSBLIB), and gets the
+     * configuration environment.
+     * <p/>
+     * You must use \ref lsb_init before any other LSBLIB library routine in your
+     * application.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * int lsb_init(String appname)</b>
+     *
+     * @return int:-1 \n
+     *         The function failed.
+     *         <p/>
+     *         <b>Errors:</b>
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         none
+     * @param appName The name of your application.
+     * If appName holds the name of your application, a logfile with the same
+     * name as
+     * your application receives LSBLIB transaction information.
+     * If appName is null, the logfile $LSF_LOGDIR/bcmd receives LSBLIB
+     * transaction information.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * none
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * none
+     * see none
+     */
+    public static native int lsb_init(String appName);
+
+    public static native int sch_lsb_init();
+
+    /**
+     * \page lsb_openjobinfo lsb_openjobinfo
+     * \brief Returns the number of jobs in the master batch daemon.
+     * <p/>
+     * \ref lsb_openjobinfo accesses information about pending, running and
+     * suspended jobs in the master batch daemon. Use \ref lsb_openjobinfo to
+     * create a connection to the master batch daemon. Next, use \ref lsb_readjobinfo
+     * to read job records.Close the connection using \ref lsb_closejobinfo.
+     * <p/>
+     * \ref lsb_openjobinfo opens a connection with mbatchd and returns the total
+     * number of records in the connection on success.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * int lsb_openjobinfo(long jobId, String jobName,
+     * String userName, String queueName, String hostName,
+     * int options)</b>
+     *
+     * @param jobId   Passes information about jobs with the given job ID.
+     *                If jobId is 0, \ref lsb_openjobinfo looks to another parameter to return
+     *                information about jobs.If a member of a job array is to be passed, use
+     *                the array form jobID[ i ] where jobID is the job array name, and i is
+     *                the index value.
+     * @param options <lsf/lsbatch.h> defines the flags shown in
+     *                \ref defs_lsb_openjobinfo constructed from bits. Use the bitwise OR to set more
+     *                than one flag.
+     *                <p/>
+     *                <b>Data Structures:</b>
+     *                \par
+     *                none
+     *                <p/>
+     *                <b>Define Statements:</b>
+     *                \par
+     *                \ref defs_lsb_openjobinfo_a
+     *                \n \ref defs_lsb_openjobinfo
+     * @return int:-1 \n
+     *         The function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line command:</b>
+     *         \par
+     *         bjobs
+     *         <p/>
+     *         \b Files:
+     *         \par
+     *         ${LSF_ENVDIR:-/etc}/lsf.conf
+     * @param jobName Passes information about jobs with the given job name.
+     * If jobName is null, \ref lsb_openjobinfo looks to another parameter to return
+     * information about jobs.
+     * @param userName Passes information about jobs submitted by the named user
+     * or user group, or by all users if user is all. If user is null,
+     * \ref lsb_openjobinfo assumes the user is invoking this call.
+     * @param queueName Passes information about jobs belonging to the named
+     * queue. If queue is null,jobs in all the queues of the batch system are counted.
+     * @param hostName Passes information about jobs on the named host, host
+     * group or cluster name. If host is null, jobs on all hosts of the batch
+     * system will be considered.
+     * #see \ref               lsb_openjobinfo_a
+     * #see \ref               lsb_openjobinfo_a_ext
+     * #see \ref               lsb_openjobinfo_req
+     * #see \ref               lsb_closejobinfo
+     * #see \ref               lsb_readjobinfo
+     * #see \ref               lsb_readframejob
+     */
+    public static native int lsb_openjobinfo(long jobId, String jobName, String userName, String queueName, String hostName, int options);
+
+    /**
+     * \page lsb_openjobinfo_a lsb_openjobinfo_a
+     * \brief Provides the name and number of jobs and hosts in the master batch
+     * daemon.
+     * <p/>
+     * \ref lsb_openjobinfo_a provides more information on pending, running and
+     * suspended jobs than \ref lsb_openjobinfo. Use \ref lsb_openjobinfo_a to create a
+     * connection to the master batch daemon. Next, use \ref lsb_readjobinfo to read
+     * job records. Close the connection using \ref lsb_closejobinfo.
+     * <p/>
+     * \ref lsb_openjobinfo_a passes information about jobs based on the value of
+     * jobId,jobName, userName, queueName, or hostName. Only one parameter can be
+     * chosen. The other parameters must be null or 0.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * jobInfoHead.ByReference lsb_openjobinfo_a(long jobId,
+     * String jobName,
+     * String userName,
+     * String queueName,
+     * String hostName,
+     * int options)</b>
+     *
+     * @param jobId   Passes information about jobs with the given job ID. If jobId
+     *                is 0, \ref lsb_openjobinfo looks to another parameter to return information
+     *                about jobs.
+     *                If information about a member of a job array is to be passed, use the array
+     *                form jobID[ i ] where jobID is the job array name, and i is the index value.
+     * @param options <lsf/lsbatch.h> defines the flags shown in def_lsb_openjobinfo_a
+     *                constructed from bits. Use the bitwise OR to set more than one flag.
+     *                <p/>
+     *                <b>Data Structures:</b>
+     *                \par
+     *                jobInfoHead
+     *                <p/>
+     *                <b>Define Statements:</b>
+     *                \par
+     *                \ref defs_lsb_openjobinfo_a
+     * @return null \n
+     *         The function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line command:</b>
+     *         \par
+     *         bjobs
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         ${LSF_ENVDIR:-/etc}/lsf.conf \n
+     *         $LSB_CONFDIR/cluster_name/configdir/lsb.params
+     * @param jobName Passes information about jobs with the given job name. If
+     * jobName is null, \ref lsb_openjobinfo looks to another parameter to return
+     * information about jobs.
+     * @param userName Passes information about jobs submitted by the named user
+     * or user group, or by all users if userName is all. If userName is null,
+     * \ref lsb_openjobinfo_a assumes the user is invoking this call.
+     * @param queueName Passes information about jobs belonging to the named queue.
+     * If queueName is null, jobs in all queues of the batch system will be
+     * considered.
+     * @param hostName Passes information about jobs on the named host, host group
+     * or cluster name. If hostName is null, jobs on all hosts of the batch system
+     * will be considered.
+     * #see \ref lsb_openjobinfo
+     * #see \ref lsb_closejobinfo
+     * #see \ref lsb_readjobinfo
+     * #see \ref lsb_readframejob
+     */
+    public static native jobInfoHead.ByReference lsb_openjobinfo_a(long jobId, String jobName, String userName, String queueName, String hostName, int options);
+
+    /**
+     * \page lsb_openjobinfo_a_ext lsb_openjobinfo_a_ext
+     * \brief  Returns the name and number of jobs and hosts in the master batch
+     * daemon with additional host group information.
+     * <p/>
+     * \ref lsb_openjobinfo_a_ext is run from \ref lsb_openjobinfo_a using the same
+     * parameters and provides the same information as \ref lsb_openjobinfo_a, but with
+     * additional host group information.
+     * <p/>
+     * \ref lsb_openjobinfo_a_ext passes information about jobs based on the value of
+     * jobId, jobName, userName, queueName, or hostName. Only one parameter can be
+     * chosen. The other parameters must be null or 0.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * jobInfoHeadExt.ByReference
+     * lsb_openjobinfo_a_ext (long jobId, String jobName,
+     * String userName, String queueName,
+     * String hostName, int options)</b>
+     *
+     * @param jobId   Passes information about jobs with the given job ID. If jobId
+     *                is 0, \ref lsb_openjobinfo_a_ext looks to another parameter to return information
+     *                about jobs. If information about a member of a job array is to be passed, use
+     *                the array form jobID[ i ] where jobID is the job array name, and i is the
+     *                index value.
+     * @param options <lsf/lsbatch.h> defines the flags shown in
+     *                def_lsb_openjobinfo_a constructed from bits. Use the bitwise OR to set more
+     *                than one flag.
+     *                <p/>
+     *                <b>Data Structures:</b>
+     *                \par
+     *                jobInfoHeadExt
+     *                <p/>
+     *                <b>Define Statements:</b>
+     *                \par
+     *                \ref defs_lsb_openjobinfo_a
+     * @return null \n
+     *         The function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line command:</b>
+     *         \par
+     *         bjobs
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         ${LSF_ENVDIR:-/etc}/lsf.conf \n
+     *         $LSB_CONFDIR/cluster_name/configdir/lsb.params
+     * @param jobName Passes information about jobs with the given job name. If
+     * jobName is null, \ref lsb_openjobinfo_a_ext looks to another parameter to return
+     * information about jobs.
+     * @param userName Passes information about jobs submitted by the named user
+     * or user group, or by all users if userName is all. If userName is null,
+     * \ref lsb_openjobinfo_a_ext assumes the user is invoking this call.
+     * @param queueName Passes information about jobs belonging to the named queue.
+     * If queueName is null, jobs in all queues of the batch system will be considered.
+     * @param hostName Passes information about jobs on the named host, host group
+     * or cluster name. If hostName is null, jobs on all hosts of the batch system
+     * will be considered.
+     * #see \ref lsb_openjobinfo
+     * #see \ref lsb_closejobinfo
+     * #see \ref lsb_readjobinfo
+     * #see \ref lsb_readframejob
+     */
+    public static native jobInfoHeadExt.ByReference lsb_openjobinfo_a_ext(long jobId, String jobName, String userName, String queueName, String hostName, int options);
+
+    /**
+     * \page lsb_openjobinfo_req lsb_openjobinfo_req
+     * \brief  Extensible API.
+     * <p/>
+     * Instead of submitting individual requests this API defines
+     * all job info requests as objects, and can easily be enhanced to include
+     * additinal requests.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * jobInfoHeadExt.ByReference lsb_openjobinfo_req (jobInfoReq.ByReference req)</b>
+     *
+     * @return null
+     *         \n Function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line command:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         \b Files:
+     *         \par
+     *         ${LSF_ENVDIR:-/etc}/lsf.conf
+     * @param req  job information request.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * jobInfoReq
+     * \n \ref jobInfoHeadExt
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * \ref defs_lsb_openjobinfo_a
+     * \n \ref defs_lsb_openjobinfo
+     * #see \ref               lsb_openjobinfo_a
+     * #see \ref               lsb_openjobinfo_a_ext
+     * #see \ref               lsb_closejobinfo
+     * #see \ref               lsb_readjobinfo
+     * #see \ref               lsb_readframejob
+     */
+    public static native jobInfoHeadExt.ByReference lsb_openjobinfo_req(jobInfoReq req);
+
+    public static native int lsb_queryjobinfo(int int1, NativeLongByReference long1, String string1);
+
+    public static native jobInfoEnt.ByReference lsb_fetchjobinfo(IntByReference int1, int int2, NativeLongByReference long1, String string1);
+
+    public static native jobInfoEnt.ByReference lsb_fetchjobinfo_ext(IntByReference int1, int int2, NativeLongByReference long1, String string1, jobInfoHeadExt jobInfoHeadExt);
+
+    /**
+     * \page lsb_readjobinfo lsb_readjobinfo
+     * \brief Returns the next job information record in mbatchd.
+     * <p/>
+     * \ref lsb_readjobinfo reads the number of records defined by the more parameter.
+     * The more parameter receives its value from either \ref lsb_openjobinfo or
+     * \ref lsb_openjobinfo_a. Each time \ref lsb_readjobinfo is called, it returns one
+     * record from mbatchd. Use \ref lsb_readjobinfo in a loop and use more to
+     * determine how many times to repeat the loop to retrieve job information records.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * \n \#include <time.h>
+     * \n \#include <lsf/lsf.h>
+     * <p/>
+     * jobInfoEnt.ByReference lsb_readjobinfo(IntByReference more)</b>
+     *
+     * @return null
+     *         \n Function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If there are no more records, then lsberrno is set to LSBE_EOF.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         $LSB_CONFDIR/cluster_name/configdir/lsb.queues
+     * @param more Number of job records in the master batch daemon.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * jobInfoEnt
+     * \n jobExternalMsgReply
+     * \n jRusage
+     * \n pidInfo
+     * \n reserveItem
+     * \n submit
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * \ref job_states
+     * \n \ref jobgroup_counterIndex
+     * \n \ref group_nodetypes
+     * #see \ref lsb_openjobinfo
+     * #see \ref lsb_openjobinfo_a
+     * #see \ref lsb_closejobinfo
+     * #see \ref lsb_hostinfo
+     * #see \ref lsb_pendreason
+     * #see \ref lsb_queueinfo
+     * #see \ref lsb_suspreason
+     */
+    public static native jobInfoEnt.ByReference lsb_readjobinfo(IntByReference more);
+
+    /**
+     * \page  lsb_submit lsb_submit
+     * Submits or restarts a job in the batch system.
+     * <p/>
+     * \ref lsb_submit submits or restarts a job in the batch system according to the
+     * jobSubReq specification.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * long lsb_submit (submit.ByReference jobSubReq,
+     * submitReply.ByReference jobSubReply)</b>
+     *
+     * @return long:-1 \n
+     *         Function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error.
+     *         If the environment variable BSUB_CHK_RESREQ is set, the value of lsberrno is
+     *         either LSBE_RESREQ_OK or LSBE_RESREQ_ERR, depending on the result of
+     *         resource requirement string checking. The badJobName field in the submitReply
+     *         structure contains the detailed error message.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         bsub
+     *         \n brestart
+     *         <p/>
+     *         \b Files:
+     *         \par
+     *         ${LSF_ENVDIR:-/etc}/lsf.conf
+     * @param jobSubReq
+     * Describes the requirements for job submission to the batch system.
+     * A job that does not meet these requirements is not submitted to the
+     * batch system and an error is returned.
+     * @param jobSubReply
+     * Describes the results of the job submission to the batch system.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * submit
+     * \n submitReply
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * \ref lsb_submit_options
+     * \n \ref lsb_submit_options2
+     * \n \ref lsb_submit_options3
+     * #see \ref lsb_modify
+     * #see \ref ls_info
+     * #see \ref lsb_queueinfo
+     */
+    public static native long lsb_submit(submit jobSubReq, submitReply jobSubReply);
+
+    /**
+     * \page lsb_readjobinfo_cond lsb_readjobinfo_cond
+     * \brief Returns the next job information record for condensed host groups
+     * in mbatchd.
+     * <p/>
+     * \ref lsb_readjobinfo_cond reads the number of records defined by the more
+     * parameter. The more parameter receives its value from either \ref lsb_openjobinfo
+     * or \ref lsb_openjobinfo_a. Each time \ref lsb_readjobinfo_cond is called, it
+     * returns one record from mbatchd. Use \ref lsb_readjobinfo_cond in a loop and use
+     * more to determine how many times to repeat the loop to retrieve job information
+     * records.
+     * <p/>
+     * \ref lsb_readjobinfo_cond differs from \ref lsb_readjobinfo in that if jInfoHExt
+     * is not null, \ref lsb_readjobinfo_cond substitutes hostGroup (if it is a condensed
+     * host group) for job execution hosts.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * \n \#include <time.h>
+     * \n \#include <lsf/lsf.h>
+     * <p/>
+     * jobInfoEnt.ByReference lsb_readjobinfo_cond(IntByReference more,
+     * jobInfoHeadExt.ByReference jInfoHExt);</b>
+     *
+     * @return null
+     *         \n Function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If there are no more records, then lsberrno is set to LSBE_EOF.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         $LSB_CONFDIR/cluster_name/configdir/lsb.queues
+     * @param more Number of job records in the master batch daemon.
+     * @param jInfoHExt Job information header info for the condensed host group.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * jobInfoEnt
+     * \n jobExternalMsgReply
+     * \n jRusage
+     * \n pidInfo
+     * \n reserveItem
+     * \n submit
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * \ref external_msg_processing
+     * \n \ref group_nodetypes
+     * #see \ref lsb_openjobinfo
+     * #see \ref lsb_openjobinfo_a
+     * #see \ref lsb_closejobinfo
+     * #see \ref lsb_hostinfo
+     * #see \ref lsb_pendreason
+     * #see \ref lsb_queueinfo
+     * #see \ref lsb_readjobinfo
+     * #see \ref lsb_suspreason
+     */
+    public static native jobInfoEnt.ByReference lsb_readjobinfo_cond(IntByReference more, jobInfoHeadExt jInfoHExt);
+
+    /**
+     * \page lsb_readframejob lsb_readframejob
+     * \brief Returns all frame jobs information which matchs the specified
+     * parameters and fills related information into the frame job information table.
+     * <p/>
+     * \ref lsb_readframejob gets all frame jobs information that matches the specified
+     * parameters and fills related information into the frame job information table.
+     * \ref lsb_readframejob is a wrapper of \ref lsb_openjobinfo, \ref lsb_readjobinfo, and
+     * \ref lsb_closejobinfo. Memory allocated in frameJobInfoTbl will be freed by
+     * user.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * int lsb_readframejob(long jobId, String frameName,
+     * String user, String queue, String host, int options,
+     * frameJobInfo.ByReference[] frameJobInfoTbl)</b>
+     *
+     * @param jobId   Get information about the frame jobs with the given job ID.
+     *                If jobID is 0, get information about frame jobs which satisfy the other
+     *                specifications. If a job in a job array is to be modified, use the array
+     *                form jobID[i] where jobID is the job array name, and i is the index value.
+     * @param options <lsf/lsbatch.h> defines the following flags \ref defs_lsb_openjobinfo_a
+     *                constructed from bits. Use the bitwise OR to set more than one flag.
+     * @return int:-1
+     *         \n Function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         none
+     * @param frameName Get information about frame jobs with the given frame name.
+     * @param user Get information about frame jobs submitted by the named user
+     * or user group, or by all users if user is all. If user is null, the user
+     * invoking this routine is assumed.
+     * @param queue Get information about frame jobs belonging to the named queue.
+     * If queue is null,jobs in all queues of the batch system will be considered.
+     * @param host Get information about frame jobs on the named host, host
+     * group or cluster name.If host is null, jobs on all hosts of the batch
+     * system will be considered.
+     * @param frameJobInfoTbl The result of all frame jobs information.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * \n frameJobInfo
+     * \n frameElementInfo
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * none
+     * #see \ref lsb_openjobinfo
+     * #see \ref lsb_readjobinfo
+     * #see \ref lsb_closejobinfo
+     */
+
+    public static native int lsb_readframejob(long jobId, String frameName, String user, String queue, String host, int options, Pointer frameJobInfoTbl);
+
+    /**
+     * \page lsb_closejobinfo lsb_closejobinfo
+     * \brief Closes job information connection with the master batch daemon.
+     * <p/>
+     * Use \ref lsb_closejobinfo to close the connection to the master batch daemon
+     * after opening a job information connection with \ref lsb_openjobinfo and reading
+     * job records with \ref lsb_readjobinfo.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * void lsb_closejobinfo()</b>
+     *
+     * param void \n
+     *             <p/>
+     *             <b>Data Structures:</b>
+     *             \par
+     *             none
+     *             <p/>
+     *             <b>Define Statements:</b>
+     *             \par
+     *             none
+     * return void
+     *         \n There's no returns value.
+     *         <p/>
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         On failure, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         ${LSF_ENVDIR:-/etc}/lsf.conf
+     * #see \ref lsb_openjobinfo
+     * #see \ref lsb_openjobinfo_a
+     * #see \ref lsb_readjobinfo
+     */
+
+    public static native void lsb_closejobinfo();
+
+    /**
+     * \page  lsb_hostcontrol lsb_hostcontrol
+     * Opens or closes a host, or restarts or shuts down its slave batch daemon.
+     * <p/>
+     * \ref lsb_hostcontrol opens or closes a host, or restarts or shuts down its
+     * slave batch daemon. Any program using this API must be setuid to root if
+     * LSF_AUTH is not defined in the lsf.conf file.
+     * <p/>
+     * To restart the master batch daemon, mbatchd, in order to use updated
+     * batch LSF configuration files, use \ref lsb_reconfig.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * int lsb_hostcontrol (hostCtrlReq.ByReference req)</b>
+     *
+     * @return int:-1 \n
+     *         Function failed.
+     *         <p/>
+     *         <b>Errors:</b>
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         ${LSF_ENVDIR:-/etc}/lsf.conf
+     * @param req The host control request.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * hostCtrlReq
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * \ref host_ctrl_option
+     * #see \ref lsb_reconfig
+     */
+    public static native int lsb_hostcontrol(hostCtrlReq req);
+
+    public static native int lsb_hghostcontrol(hgCtrlReq hostCtrlReq1, hgCtrlReply reply);
+
+    /**
+     * \page lsb_queueinfo lsb_queueinfo
+     * \brief Returns information about batch queues.
+     * <p/>
+     * \ref lsb_queueinfo gets information about batch queues. See lsb.queues for more
+     * information about queue parameters.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * queueInfoEnt.ByReference lsb_queueinfo(String[] queues,
+     * IntByReference numQueues, String hosts, String users,
+     * int options)</b>
+     *
+     * @param options Reserved for future use; supply 0.
+     *                <p/>
+     *                <b>Data Structures:</b>
+     *                \par
+     *                queueInfoEnt
+     *                \n shareAcctInfoEnt
+     *                \n apsFactorInfo
+     *                \n apsFactorMap
+     *                \n apsLongNameMap
+     *                <p/>
+     *                <b>Define Statements:</b>
+     *                \par
+     *                \ref queue_status
+     *                \n \ref queue_attribute
+     * @return null
+     *         \n Function Failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error.
+     *         If lsberrno is LSBE_BAD_QUEUE, (*queues)[*numQueues] is not a queue known
+     *         to the LSF system. Otherwise, if.ByReference numQueues is less than its original value,
+     *         * numQueues is the actual number of queues found.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         bqueues
+     *         <p/>
+     *         \b Files:
+     *         \par
+     *         $LSB_CONFDIR/cluster_name/configdir/lsb.queues
+     * @param queues An array of names of queues of interest.
+     * @param numQueues The number of queue names. To get information on all queues,
+     * set.ByReference numQueues to 0;* numQueues will be updated to the actual number of
+     * queues when this call returns.If.ByReference numQueues is 1 and queues is null,
+     * information on the system default queue is returned.
+     * @param hosts The host or cluster names. If hosts is not null, then only
+     * the queues that are enabled for the hosts are of interest.
+     * @param user The name of user. If user is not null, then only the queues
+     * that are enabled for the user are of interest.
+     * #see \ref lsb_hostinfo
+     * #see \ref lsb_userinfo
+     * #see \ref lsb_usergrpinfo
+     */
+    public static native queueInfoEnt.ByReference lsb_queueinfo(Pointer queues, IntByReference numQueues, String hosts, String user, int options);
+
+    /**
+     * \page lsb_reconfig lsb_reconfig
+     * \brief Dynamically reconfigures an LSF batch system.
+     * <p/>
+     * \ref lsb_reconfig dynamically reconfigures an LSF batch system to pick up new
+     * configuration parameters and changes to the job queue setup since system
+     * startup or the last reconfiguration (see lsb.queues).
+     * <p/>
+     * To restart a slave batch daemon, use \ref lsb_hostcontrol. This call is
+     * successfully invoked only by root or by the LSF administrator.
+     * <p/>
+     * Any program using this API must be setuid to root if LSF_AUTH is not
+     * defined in the lsf.conf file.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * int lsb_reconfig (mbdCtrlReq.ByReference req)</b>
+     *
+     * @return int:-1 \n
+     *         The function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         On failure, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         badmin reconfig
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         ${LSF_ENVDIR:-/etc}/lsf.conf
+     * @param req mbatchd control request.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * mbdCtrlReq
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * \ref mbd_operation
+     * #see \ref lsb_openjobinfo
+     */
+    public static native int lsb_reconfig(mbdCtrlReq req);
+
+    /**
+     * \page lsb_signaljob lsb_signaljob
+     * \brief Sends a signal to a job.
+     * <p/>
+     * Use \ref lsb_signaljob when migrating a job from one host to another. Use
+     * \ref lsb_signaljob to stop or kill a job on a host before using \ref lsb_mig to
+     * migrate the job. Next, use \ref lsb_signaljob to continue the stopped job at
+     * the specified host.
+     * <p/>
+     * Generally, use \ref lsb_signaljob to apply any UNIX signal to a job or process.
+     * <p/>
+     * Any program using this API must be setuid to root if LSF_AUTH is not defined
+     * in the lsf.conf file.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * int lsb_signaljob (long jobId, int sigValue)</b>
+     *
+     * @param jobId    The job to be signaled. If a job in a job array is to be
+     *                 signaled, use the array form jobID[ i ] where jobID is the job array name,
+     *                 and i is the index value.
+     * @param sigValue SIGSTOP, SIGCONT, SIGKILL or some other UNIX signal.
+     *                 <p/>
+     *                 <b>Data Structures:</b>
+     *                 \par
+     *                 none
+     *                 <p/>
+     *                 <b>Define Statements:</b>
+     *                 \par
+     *                 none
+     * @return int:-1 \n
+     *         The function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         bkill \n
+     *         bstop \n
+     *         bresume
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         ${LSF_ENVDIR:-/etc}/lsf.conf
+     * #see \ref lsb_chkpntjob
+     * #see \ref lsb_forcekilljob
+     * #see \ref lsb_mig
+     */
+
+    public static native int lsb_signaljob(long jobId, int sigValue);
+
+    /**
+     * \page lsb_killbulkjobs lsb_killbulkjobs
+     * \brief Kills bulk jobs as soon as possible.
+     * <p/>
+     * Use \ref lsb_killbulkjobs to kill bulk jobs on a local host immediately, or
+     * to kill other jobs as soon as possible. If mbatchd rejects the request, it
+     * issues null as the reservation ID.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * int lsb_killbulkjobs(signalBulkJobs.ByReference s)</b>
+     *
+     * @return int:-1 \n
+     *         The bulk jobs were not killed.
+     *         <p/>
+     *         \b Error:
+     *         \par
+     *         On failure, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line command:</b>
+     *         \par
+     *         bkill -b
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         none
+     * @param s The signal to a group of jobs.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * signalBulkJobs
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * none
+     * see none
+     */
+
+    public static native int lsb_killbulkjobs(signalBulkJobs s);
+
+    public static native int lsb_msgjob(long long1, String s);
+
+    /**
+     * \page lsb_chkpntjob lsb_chkpntjob
+     * \brief Checkpoints a job.
+     * <p/>
+     * Checkpoints a job.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * int lsb_chkpntjob(long jobId, int period, int options)</b>
+     *
+     * @param jobId   The job to be checkpointed.
+     * @param period  The checkpoint period in seconds. The value 0
+     *                disables periodic checkpointing.
+     * @param options The bitwise inclusive OR of some of the following:
+     *                \n LSB_CHKPNT_KILL
+     *                Checkpoint and kill the job as an atomic action.
+     *                \n LSB_CHKPNT_FORCE
+     *                Checkpoint the job even if non-checkpointable conditions exist.
+     *                <p/>
+     *                <b>Data Structures:</b>
+     *                \par
+     *                none
+     *                <p/>
+     *                <b>Define Statements:</b>
+     *                \par
+     *                \ref chkpnt_job_option
+     * @return int:-1 \n
+     *         The function failed.
+     *         <p/>
+     *         \note Any program using this API must be setuid to root if LSF_AUTH
+     *         is not defined in the lsf.conf file.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         On failure, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         bchkpnt
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         ${LSF_ENVDIR:-/etc}/lsf.conf
+     * see none
+     */
+    public static native int lsb_chkpntjob(long jobId, NativeLong period, int options);
+
+    /**
+     * \page lsb_deletejob lsb_deletejob
+     * \brief Kills a job in a queue
+     * <p/>
+     * Use \ref lsb_deletejob to send a signal to kill a running, user-suspended,
+     * or system-suspended job. The job can be requeued or deleted from the batch
+     * system.If the job is requeued, it retains its submit time but it is dispatched
+     * according to its requeue time. When the job is requeued, it is assigned the
+     * PEND status and re-run.If the job is deleted from the batch system, it is
+     * no longer available to be requeued.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * int lsb_deletejob (long jobId, int times, int options)</b>
+     *
+     * @param jobId   The job to be killed. If an element of a job array is to be
+     *                killed, use the array form jobID[i] where jobID is the job array name,
+     *                and i is the index value.
+     * @param times   Original job submit time.
+     * @param options If the preprocessor macro LSB_KILL_REQUEUE in lsbatch.h is
+     *                compared with options and found true, then requeue the job using the same job ID.
+     *                If the preprocessor macro LSB_KILL_REQUEUE in lsbatch.h is compared with
+     *                options and found false, then the job is deleted from the batch system.
+     *                <p/>
+     *                <b>Data Structures:</b>
+     *                \par
+     *                none
+     *                <p/>
+     *                <b>Define Statements:</b>
+     *                \par
+     *                \ref kill_requeue
+     * @return int:-1 \n
+     *         The function failed.
+     *         <p/>
+     *         \note Any program using this API must be setuid to root if LSF_AUTH is not defined in the
+     *         \n lsf.conf file.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         On failure, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         bkill
+     *         \n brequeue -J
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         ${LSF_ENVDIR:-/etc}/lsf.conf
+     * #see \ref lsb_signaljob
+     * #see \ref lsb_chkpntjob
+     */
+    public static native int lsb_deletejob(long jobId, int times, int options);
+
+    /**
+     * \page lsb_forcekilljob lsb_forcekilljob
+     * \brief This function is used to send special force kill signal.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * int lsb_forcekilljob(long jobId)</b>
+     *
+     * @param jobId which job is to be killed.
+     *              <p/>
+     *              <b>Data Structures:</b>
+     *              \par
+     *              none
+     *              <p/>
+     *              <b>Define Statements:</b>
+     *              \par
+     *              none
+     * @return int:-1
+     *         \n Function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         On failure, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         none
+     * #see \ref lsb_signaljob
+     */
+    public static native int lsb_forcekilljob(long jobId);
+
+    /**
+     * \page lsb_submitframe lsb_submitframe
+     * \brief Submits a frame job to the batch system.
+     * <p/>
+     * \ref lsb_submitframe submits a frame job to the batch system according to the
+     * jobSubReq specification and frameExp.
+     * <p/>
+     * Any program using this API must be setuid to root if LSF_AUTH is not defined
+     * in the lsf.conf file.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * int lsb_submitframe (submit.ByReference jobSubReq, String frameExp,
+     * submitReply.ByReference jobSubReply)</b>
+     *
+     * @return int:-1 \n
+     *         Function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error and jobSubReply gives
+     *         additional information about the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         ${LSF_ENVDIR:-/etc}/lsf.conf
+     * @param jobSubReq Describes the requirements for job submission to the
+     * batch system. A job that does not meet these requirements is not submitted
+     * to the batch system and an error is returned. \n
+     * See \ref lsb_submit for descriptions of the submit structure fields.
+     * @param frameExp The syntax of frameExp is: \n
+     * <b>frame_name[indexlist]</b> \n
+     * frame_name is any name consisting of alphanumerics, periods, forward slashes,
+     * dashes or underscores. indexlist is a list of one or more frame indexes,
+     * separated by commas. These indexes can each be either a single integer or
+     * a range, specified in the following format: \n
+     * <b>start-end[xstep[:chunk]]</b> \n
+     * start, end, step, and chunk are integers, but chunk must be positive.
+     * If step and
+     * chunk are ommitted, the default value is 1.\n
+     * An example of a valid expression for frameExp is:\n
+     * <b>Frame_job_1[5,10-15,20-30x2:3]</b>
+     * @param jobSubReply Describes the results of the job submission to the
+     * batch system. \n
+     * See \ref lsb_submit for descriptions of the submitReply structure
+     * fields.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * submit
+     * \n submitReply
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * \ref lsb_submit_options
+     * \n \ref lsb_submit_options2
+     * \n \ref lsb_submit_options3
+     * see none
+     */
+    public static native int lsb_submitframe(submit jobSubReq, String frameExp, submitReply jobSubReply);
+
+    /**
+     * \page lsb_requeuejob lsb_requeuejob
+     * \brief Requeues job arrays, jobs in job arrays, and individual jobs.
+     * <p/>
+     * Use \ref lsb_requeuejob to requeue job arrays, jobs in job arrays, and individual
+     * jobs that are running, pending, done, or exited. In a job array, you can
+     * requeue all the jobs or requeue individual jobs of the array.
+     * <p/>
+     * \ref lsb_requeuejob requeues jobs as if the jobs were in an array. A job not in an
+     * array is considered to be a job array composed of one job.
+     * <p/>
+     * Jobs in a job array can be requeued independently of each other regardless of
+     * any job's status (running, pending, exited, done). A requeued job is requeued
+     * to the same queue it was originally submitted from or switched to. The job
+     * submission time does not change so a requeued job is placed at the top of the
+     * queue. Use \ref lsb_movejob to place a job at the bottom or any other position
+     * in a queue.
+     * <p/>
+     * If a clean period is reached before \ref lsb_requeuejob is called, the cleaned
+     * jobs cannot be requeued. Set the variable CLEAN_PERIOD in your lsb.params file
+     * to determine the amount of time that job records are kept in MBD core memory
+     * after jobs have finished or terminated.
+     * <p/>
+     * To requeue a job assign values to the data members of the jobrequeue data
+     * structure, process command line options in case the user has specified a
+     * different job, and call \ref lsb_requeuejob to requeue the job array.
+     * <p/>
+     * Assign values to the jobID, status, and options data members of the jobrequeue
+     * data structure. Assign the job identification number to jobID. Assign
+     * JOB_STAT_PEND or JOB_STAT_PSUSP to status. Assign REQUEUE_DONE, REQUEUE_EXIT,
+     * and or REQUEUE_RUN to requeue running jobs.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * int lsb_requeuejob(jobrequeue.ByReference  reqPtr)</b>
+     *
+     * @return int:-1 \n
+     *         The function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         On failure, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         brequeue -d
+     *         \n brequeue -e
+     *         \n brequeue -a
+     *         \n brequeue -r
+     *         \n brequeue -H
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         $LSB_CONFDIR/cluster_name/configdir/lsb.params
+     *         \n $LSB_SHAREDIR
+     * @param reqPtr This structure contains the information required to requeue a job.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * jobrequeue
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * \ref requeuejob_options
+     * #see \ref lsb_movejob
+     * #see \ref lsb_pendreason
+     */
+    public static native int lsb_requeuejob(jobrequeue reqPtr);
+
+    /**
+     * \page lsb_sysmsg lsb_sysmsg
+     * \brief Returns a pointer to static data.
+     * <p/>
+     * \ref lsb_sysmsg returns a pointer to static data which stores the batch error
+     * message corresponding to lsberrno. The global variable lsberrno maintained
+     * by LSBLIB holds the error number from the most recent LSBLIB call that caused
+     * an error. If lsberrno == LSBE_SYS_CALL, then the system error message defined
+     * by errno is also returned. If lsberrno == LSBE_LSLIB, then the error message
+     * returned by \ref ls_sysmsg is returned.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * String lsb_sysmsg ()</b>
+     *
+     * param void \n
+     *             <p/>
+     *             <b>Data Structures:</b>
+     *             \par
+     *             none
+     *             <p/>
+     *             <b>Define Statements:</b>
+     *             \par
+     *             none
+     * @return null
+     *         \n Function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         none
+     * #see \ref ls_perror
+     * #see \ref ls_sysmsg
+     */
+    public static native String lsb_sysmsg();
+
+    /**
+     * \page lsb_perror lsb_perror
+     * \brief Prints a batch LSF error message on stderr.
+     * <p/>
+     * \ref lsb_perror prints a batch LSF error message on stderr. The usrMsg is
+     * printed out first, followed by a ":" and the batch error message corresponding
+     * to lsberrno.
+     * <p/>
+     * \ref lsb_perror - Print LSBATCH error message on stderr. In addition
+     * to the error message defined by lsberrno, user supplied message usrMsg1
+     * is printed out first and a ':' is added to separate.ByReference  usrMsg1 and LSBATCH
+     * error message.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * void lsb_perror (String usrMsg)</b>
+     *
+     * return void \n
+     *         Prints out the user supplied error message.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line command:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         \b Files:
+     *         \par
+     *         none
+     * @param usrMsg A user supplied error message.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * none
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * none
+     * see none
+     */
+    public static native void lsb_perror(String usrMsg);
+
+    public static native void lsb_errorByCmd(String string1, String string2, int int1);
+
+    public static native String lsb_sperror(String string1);
+
+    /**
+     * \page lsb_peekjob lsb_peekjob
+     * \brief Returns the base name of the file related to the job ID
+     * <p/>
+     * \ref lsb_peekjob retrieves the name of a job file.
+     * <p/>
+     * Only the submitter can peek at job output.
+     * <p/>
+     * The storage for the file name will be reused by the next call.
+     * <p/>
+     * Any program using this API must be setuid to root if LSF_AUTH
+     * is not defined in the lsf.conf file.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * String  lsb_peekjob (long jobId)</b>
+     *
+     * @param jobId The job ID that the LSF system assigned to the job. If a job
+     *              in a job array is to be returned, use the array form jobID[i] where jobID
+     *              is the job array name, and i is the index value.
+     *              <p/>
+     *              <b>Data Structures:</b>
+     *              \par
+     *              none
+     *              <p/>
+     *              <b>Define Statements:</b>
+     *              \par
+     *              none
+     * @return null
+     *         \n Function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line command:</b>
+     *         \par
+     *         bpeek
+     *         <p/>
+     *         \b Files:
+     *         \par
+     *         ${LSF_ENVDIR:-/etc}/lsf.conf
+     * see none
+     */
+    public static native String lsb_peekjob(long jobId);
+
+    /**
+     * \page lsb_mig lsb_mig
+     * \brief Migrates a job from one host to another.
+     * <p/>
+     * \ref lsb_mig migrates a job from one host to another. Any program using
+     * this API must be setuid to root if LSF_AUTH is not defined
+     * in the lsf.conf file.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * int lsb_mig(submig.ByReference mig, IntByReference badHostIdx)</b>
+     *
+     * @return int:-1 \n
+     *         Function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error and badHostIdx indicates
+     *         which askedHost is not acceptable.
+     *         <p/>
+     *         <b>Equivalent line command:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         \b Files:
+     *         \par
+     *         ${LSF_ENVDIR:-/etc}/lsf.conf
+     * @param mig The job to be migrated.
+     * @param badHostIdx If the call fails, (**askedHosts)[*badHostIdx] is not a
+     * host known to the LSF system.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * submig
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * none
+     * #see \ref lsb_submit
+     */
+    public static native int lsb_mig(submig mig, IntByReference badHostIdx);
+
+    public static native clusterInfoEnt.ByReference lsb_clusterinfo(IntByReference int1, Pointer stringArray1, int int2);
+
+    public static native clusterInfoEntEx.ByReference lsb_clusterinfoEx(IntByReference int1, Pointer stringArray1, int int2);
+
+    /**
+     * \page lsb_hostinfo lsb_hostinfo
+     * Returns information about job server hosts.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * hostInfoEnt.ByReference lsb_hostinfo(String[] hosts, IntByReference numHosts)</b>
+     *
+     * @return hostInfoEnt.ByReference :null
+     *         \n Function failed.
+     *         <p/>
+     *         <b>Errors:</b>
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error. If lsberrno is
+     *         LSBE_BAD_HOST, (*hosts)[*numHosts] is not a host known to the batch system.
+     *         Otherwise, if.ByReference numHosts is less than its original value,* numHosts is the actual
+     *         number of hosts found.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         bhosts
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         $LSB_CONFDIR/cluster_name/configdir/lsb.hosts
+     * @param hosts
+     * An array of host or cluster names.
+     * @param numHosts
+     * The number of host names.
+     * To get information on all hosts, set.ByReference numHosts to 0;* numHosts will be set to the
+     * actual number of hostInfoEnt structures when this call returns.
+     * If.ByReference numHosts is 1 and hosts is null, information on the local host is returned.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * hostInfoEnt
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * \ref host_status
+     * \n \ref host_load_BusyReason
+     * \n \ref host_attributes
+     * #see \ref lsb_hostinfo_ex
+     * #see \ref ls_info
+     * #see \ref ls_loadofhosts
+     * #see \ref lsb_queueinfo
+     * #see \ref lsb_userinfo
+     */
+    public static native hostInfoEnt.ByReference lsb_hostinfo(Pointer hosts, IntByReference numHosts);
+
+    /**
+     * \page lsb_hostinfo_ex lsb_hostinfo_ex
+     * Returns informaton about job server hosts that satisfy specified resource
+     * requirements. \ref lsb_hostinfo_ex returns information about job server hosts
+     * that satisfy the specified resource requirements.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * hostInfoEnt.ByReference lsb_hostinfo_ex(String[] hosts,
+     * IntByReference numHosts, String resReq, int options)</b> @param hosts An array of host or cluster names.
+     *
+     * @param options Options is reserved for the future use.
+     *                <p/>
+     *                <b>Data Structures:</b>
+     *                \par
+     *                hostInfoEnt
+     *                <p/>
+     *                <b>Define Statements:</b>
+     *                \par
+     *                \ref host_status
+     *                \n \ref host_load_BusyReason
+     *                \n \ref host_attributes
+     * @return null
+     *         \n Function failed.
+     *         <p/>
+     *         <b>Errors:</b>
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error. If lsberrno is
+     *         LSBE_BAD_HOST, (*hosts)[*numHosts] is not a host known to the batch system.
+     *         Otherwise, if.ByReference numHosts is less than its original value,* numHosts is the actual
+     *         number of hosts found.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         $LSB_CONFDIR/cluster_name/configdir/lsb.hosts
+     * @param numHosts The number of host names.
+     * To get information on all hosts, set.ByReference numHosts to 0;* numHosts will be set
+     * to the actual number of hostInfoEnt structures when this call returns.
+     * If.ByReference numHosts is 1 and hosts is null, information on the local host is returned.
+     * @param resReq Resource requirements.
+     * If this option is specified, then only host information for those hosts
+     * that satisfy the resource requirements is returned. Returned hosts are
+     * sorted according to the load on the resource() given in resReq, or by
+     * default according to CPU and paging load.
+     * #see \ref ls_info
+     * #see \ref ls_loadofhosts
+     * #see \ref lsb_hostinfo
+     * #see \ref lsb_queueinfo
+     * #see \ref lsb_userinfo
+     * @param string1 string1
+     */
+
+    public static native hostInfoEnt.ByReference lsb_hostinfo_ex(Pointer resReq, IntByReference numHosts, String string1, int options);
+
+    /**
+     * \page lsb_hostinfo_cond lsb_hostinfo_cond
+     * Returns condensed information about job server hosts.
+     * <p/>
+     * \ref lsb_hostinfo_cond returns condensed information about job server hosts.
+     * While \ref lsb_hostinfo returns specific information about individual hosts,
+     * \ref lsb_hostinfo_cond returns the number of jobs in each state within the
+     * entire host group. The condHostInfoEnt structure contains counters that
+     * indicate how many hosts are in the ok, busy, closed, full, unreach, and
+     * unavail states and an array of hostInfoEnt structures that indicate the
+     * status of each host in the host
+     * group.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * condHostInfoEnt.ByReference  lsb_hostinfo_cond
+     * (String[] hosts, IntByReference numHosts,
+     * String resReq, int options)</b>
+     *
+     * @param options Any options called with the function.
+     *                <p/>
+     *                <b>Data Structures</b>
+     *                \par
+     *                condHostInfoEnt
+     *                \n hostInfoEnt
+     *                <p/>
+     *                <b>Define Statements:</b>
+     *                \par
+     *                none
+     * @return null
+     *         \n Function failed.
+     *         <p/>
+     *         <b Errors:</b>
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         none
+     * @param hosts An array of host names belonging to the host group.
+     * @param numHosts The number of host names in the host group.
+     * To get information on all hosts in the host group, set.ByReference numHosts to 0;
+     * * numHosts will be set to the actual number of hostInfoEnt structures in
+     * the host group when this call returns.
+     * @param resReq Any resource requirements called with the function.
+     * #see \ref lsb_hostinfo
+     */
+    public static native condHostInfoEnt.ByReference lsb_hostinfo_cond(Pointer hosts, IntByReference numHosts, String resReq, int options);
+
+    /**
+     * \page lsb_movejob lsb_movejob
+     * \brief Changes the position of a pending job in a queue.
+     * <p/>
+     * Use \ref lsb_movejob to move a pending job to a new position that you specify
+     * in a queue. Position the job in a queue by first specifying the job ID.
+     * Next, count, beginning at 1, from either the top or the bottom of the queue,
+     * to the position you want to place the job.
+     * <p/>
+     * To position a job at the top of a queue, choose the top of a queue parameter
+     * and a postion of 1.To position a job at the bottom of a queue, choose the
+     * bottom of the queue parameter and a position of 1.
+     * <p/>
+     * By default, LSF dispatches
+     * jobs in a queue in order of their arrival (such as first-come-first-served),
+     * subject to the availability of suitable server hosts.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * int lsb_movejob (long jobId, IntByReference position, int opCode)</b>
+     *
+     * @param jobId  The job ID that the LSF system assigns to the job. If a job
+     *               in a job array is to be moved, use the array form jobID[ i ] where jobID is
+     *               the job array name, and i is the index value.
+     * @param opCode The top or bottom position of a queue.
+     *               \n \b TO_TOP
+     *               \n The top position of a queue.
+     *               \n \b TO_BOTTOM
+     *               \n The bottom position of a queue.
+     *               \n If an opCode is not specified for the top or bottom position, the
+     *               function fails.
+     *               <p/>
+     *               <b>Data Structures:</b>
+     *               \par
+     *               none
+     *               <p/>
+     *               <b>Define Statements:</b>
+     *               \par
+     *               \ref movejob_options
+     * @return int:-1 \n
+     *         The function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line command:</b>
+     *         \par
+     *         btop
+     *         \n bbot
+     *         \n bjobs -q
+     *         <p/>
+     *         \b Files:
+     *         \par
+     *         ${LSF_ENVDIR:-/etc}/lsf.conf
+     * @param position The new position of the job in a queue. position must be
+     * a value of 1 or more.
+     * #see \ref lsb_pendreason
+     */
+
+    public static native int lsb_movejob(long jobId, IntByReference opCode, int position);
+
+    /**
+     * \page lsb_switchjob lsb_switchjob
+     * \brief Switches an unfinished job to another queue.
+     * <p/>
+     * \ref lsb_switchjob switches an unfinished job to another queue. Effectively,
+     * the job is removed from its current queue and re-queued in the new queue.
+     * <p/>
+     * The switch operation can be performed only when the job is acceptable to
+     * the new queue. If the switch operation is unsuccessful, the job will stay
+     * where it is.A user can only switch his/her own unfinished jobs, but root
+     * and the LSF administrator can switch any unfinished job.
+     * <p/>
+     * Any program using this API must be setuid to root if LSF_AUTH is not defined
+     * in the lsf.conf file.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * int lsb_switchjob (long jobId, String queue)</b>
+     *
+     * @param jobId The job to be switched. If an element of a job array is to
+     *              be switched, use the array form jobID[i] where jobID is the job array name,
+     *              and i is the index value.
+     * @return int:-1 \n
+     *         Function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         bswitch
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         ${LSF_ENVDIR:-/etc}/lsf.conf
+     * @param queue The new queue for the job.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * none
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * none
+     * see none
+     */
+    public static native int lsb_switchjob(long jobId, String queue);
+
+    /**
+     * \page lsb_queuecontrol lsb_queuecontrol
+     * \brief Changes the status of a queue.
+     * <p/>
+     * \ref lsb_queuecontrol changes the status of a queue.
+     * <p/>
+     * Any program using this API must be setuid to root if LSF_AUTH is not defined
+     * in the lsf.conf file.
+     * <p/>
+     * If a queue is inactivated by its dispatch window (see lsb.queues), then it
+     * cannot be re-activated by this call.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * int lsb_queuecontrol (queueCtrlReq.ByReference req)</b>
+     *
+     * @return int:-1 \n
+     *         Function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         \b Files:
+     *         \par
+     *         ${LSF_ENVDIR:-/etc}/lsf.conf
+     * @param req queue control request.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * queueCtrlReq
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * \ref queue_ctrl_option
+     * #see \ref lsb_queueinfo
+     */
+    public static native int lsb_queuecontrol(queueCtrlReq req);
+
+    /**
+     * \page lsb_userinfo lsb_userinfo
+     * \brief Returns the maximum number of job slots that a user can use
+     * simultaneously on any host and in the whole local LSF cluster.
+     * <p/>
+     * \ref lsb_userinfo gets the maximum number of job slots that a user can use
+     * simultaneously on any host and in the whole local LSF cluster, as well as
+     * the current number of job slots used by running and suspended jobs or
+     * reserved for pending jobs. The maximum numbers of job slots are defined
+     * in the LSF configuration file lsb.users (see lsb.users). The reserved
+     * user name default, defined in the lsb.users configuration file, matches
+     * users not listed in the lsb.users file who have no jobs started in the
+     * system.
+     * <p/>
+     * The returned array will be overwritten by the next call.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * userInfoEnt.ByReference lsb_userinfo(String[] users, IntByReference numUsers)</b>
+     *
+     * @return null \n
+     *         Function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error. If lsberrno is
+     *         LSBE_BAD_USER, (*users)[*numUsers] is not a user known to the LSF system.
+     *         Otherwise, if.ByReference numUsers is less than its original value,* numUsers is the actual
+     *         number of users found.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         busers
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         $LSB_CONFDIR/cluster_name/configdir/lsb.users
+     * @param users An array of user names.
+     * @param numUsers The number of user names.
+     * To get information about all users, set.ByReference numUsers = 0;* numUsers will
+     * be updated to the actual number of users when this call returns. To get
+     * information on the invoker, set users = null,* numUsers = 1.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * userInfoEnt
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * none
+     * #see \ref lsb_hostinfo
+     * #see \ref lsb_queueinfo
+     */
+    public static native userInfoEnt.ByReference lsb_userinfo(Pointer users, IntByReference numUsers);
+
+    /**
+     * \page lsb_hostgrpinfo lsb_hostgrpinfo
+     * Returns LSF host group membership.
+     * <p/>
+     * \ref lsb_hostgrpinfo gets LSF host group membership.
+     * <p/>
+     * LSF host group is defined in the configuration file lsb.hosts.
+     * <p/>
+     * The storage for the array of groupInfoEnt structures will be reused by
+     * the next call.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * groupInfoEnt.ByReference lsb_hostgrpinfo (String[] groups,IntByReference numGroups,
+     * int options)</b>
+     *
+     * @param options The bitwise inclusive OR of some of the following flags:
+     *                \n GRP_RECURSIVE
+     *                \n Expand the group membership recursively. That is, if a member of a
+     *                group is itself a group, give the names of its members recursively, rather
+     *                than its name, which is the default.
+     *                \n GRP_ALL
+     *                \n Get membership of all groups.
+     *                <p/>
+     *                <b>Data Structures:</b>
+     *                \par
+     *                groupInfoEnt
+     *                \n userShares
+     *                <p/>
+     *                <b>Define Statements:</b>
+     *                \par
+     *                \ref group_membership_option
+     *                \n \ref group_define
+     * @return null \n
+     *         Function failed.
+     *         <p/>
+     *         <b>Errors:</b>
+     *         \par
+     *         On failure, returns null and sets lsberrno to indicate the error. If there
+     *         are invalid groups specified, the function returns the groups up to the
+     *         invalid ones and then sets lsberrno to LSBE_BAD_GROUP, which means that
+     *         the specified (*groups)[*numGroups] is not a group known to the LSF system.
+     *         If the first group specified is invalid, the function returns null.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         $LSB_CONFDIR/cluster_name/configdir/lsb.hosts \n
+     *         $LSB_CONFDIR/cluster_name/configdir/lsb.users
+     * @param groups An array of group names.
+     * @param numGroups The number of group names.* numGroups will be updated
+     * to the actual number of groups when this call returns.
+     * #see \ref lsb_usergrpinfo
+     */
+    public static native groupInfoEnt.ByReference lsb_hostgrpinfo(Pointer groups, IntByReference numGroups, int options);
+
+    /**
+     * \page lsb_usergrpinfo lsb_usergrpinfo
+     * \brief Returns LSF user group membership.
+     * <p/>
+     * \ref lsb_usergrpinfo gets LSF user group membership.
+     * LSF user group is defined in the configuration file lsb.users.
+     * The storage for the array of groupInfoEnt structures will be reused by
+     * the next call.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * groupInfoEnt.ByReference lsb_usergrpinfo (String[] groups,
+     * IntByReference numGroups, int options)</b>
+     *
+     * @param options The bitwise inclusive OR of some of flags in \ref group_membership_option
+     *                <p/>
+     *                <b>Data Structures:</b>
+     *                \par
+     *                groupInfoEnt
+     *                <p/>
+     *                <b>Define Statements:</b>
+     *                \par
+     *                \ref group_membership_option
+     *                \n \ref group_define
+     * @return null \n
+     *         Function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         On failure, returns null and sets lsberrno to indicate the error. If there
+     *         are invalid groups specified, the function returns the groups up to the
+     *         invalid ones. It then set lsberrno to LSBE_BAD_GROUP, that is the specified
+     *         (*groups)[*numGroups] is not a group known to the LSF system. If the first
+     *         group is invalid, the function returns null.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         $LSB_CONFDIR/cluster_name/configdir/lsb.hosts
+     *         \n $LSB_CONFDIR/cluster_name/configdir/lsb.users
+     * @param groups An array of group names.
+     * @param numGroups The number of group names.* numGroups will be updated
+     * to the actual number of groups when this call returns.
+     * #see \ref lsb_hostgrpinfo
+     */
+    public static native groupInfoEnt.ByReference lsb_usergrpinfo(Pointer groups, IntByReference numGroups, int options);
+
+    /**
+     * \page lsb_parameterinfo lsb_parameterinfo
+     * \brief Returns information about the LSF cluster.
+     * <p/>
+     * \ref lsb_parameterinfo gets information about the LSF cluster.
+     * <p/>
+     * The static storage for the parameterInfo structure is reused on the next call.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * parameterInfo.ByReference lsb_parameterinfo(String[] names,
+     * IntByReference numUsers, int options)</b>
+     *
+     * @param options Reserved but not used; supply 0.
+     *                <p/>
+     *                <b>Data Structures:</b>
+     *                \par
+     *                \ref parameterInfo
+     *                <p/>
+     *                <b>Define Statements:</b>
+     *                \par
+     *                none
+     * @return null \n
+     *         Function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line command:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         \b Files:
+     *         \par
+     *         $LSB_CONFDIR/cluster_name/configdir/lsb.params
+     * @param names Reserved but not used; supply null.
+     * @param numUsers Reserved but not used; supply null.
+     * see none
+     */
+    public static native parameterInfo.ByReference lsb_parameterinfo(Pointer names, IntByReference numUsers, int options);
+
+    /**
+     * \page lsb_modify lsb_modify
+     * \brief  Modifies a submitted job's parameters.
+     * <p/>
+     * lsb_modify() allows for the modification of a submitted job's parameters.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * long lsb_modify (submit.ByReference jobsubReq,
+     * submitReply.ByReference jobSubReply,
+     * long jobId)</b>
+     *
+     * @param jobId The job to be modified. If an element of a job array is to
+     *              be modified, use the array form jobID[i] where jobID is the job array name,
+     *              and i is the index value.
+     *              <p/>
+     *              <b>Data Structures:</b>
+     *              \par
+     *              \ref submit
+     *              \n \ref submitReply
+     *              <p/>
+     *              <b>Define Statements:</b>
+     *              \par
+     *              none
+     * @return long:-1 \n
+     *         Function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line command :</b>
+     *         \par
+     *         bmod
+     *         <p/>
+     *         \b Files:
+     *         \par
+     *         ${LSF_ENVDIR:-/etc}/lsf.conf
+     * @param jobSubReq Describes the requirements for job modification to the
+     * batch system. A job that does not meet these requirements is not submitted
+     * to the batch system and an error is returned.
+     * @param jobSubReply Describes the results of the job modification to the
+     * batch system.
+     * #see \ref lsb_submit
+     * #see \ref ls_info
+     * #see \ref ls_rtask
+     * #see \ref lsb_queueinfo
+     */
+    public static native long lsb_modify(submit jobSubReq, submitReply jobSubReply, long jobId);
+
+    public static native FloatByReference getCpuFactor(String string1, int int1);
+
+    /**
+     * \page lsb_suspreason lsb_suspreason
+     * \brief Explains why a job was suspended.
+     * <p/>
+     * Using the SBD, \ref lsb_suspreason explains why system-suspended and
+     * user-suspended jobs were suspended.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * String lsb_suspreason(int reasons, int subreasons,
+     * loadIndexLog.ByReference ld)</b>
+     *
+     * @param reasons    Reasons a job suspends.
+     * @param subreasons If reasons is SUSP_LOAD_REASON, subreasons indicates
+     *                   the load indices that are out of bounds. The integer values for the load
+     *                   indices are found in lsf.h.If reasons is SUSP_RES_LIMIT, subreasons
+     *                   indicates the job's requirements for resource reservation are not satisfied.
+     *                   The integer values for the job's requirements for resource reservation are
+     *                   found in lsbatch.h.
+     *                   \n Subreasons a job suspends if reasons is SUSP_LOAD_REASON:
+     *                   - \b  R15S
+     *                   \n 15 second CPU run queue length
+     *                   - \b  R1M
+     *                   \n 1 minute CPU run queue length
+     *                   - \b  R15M
+     *                   \n 15 minute CPU run queue length
+     *                   - \b  UT
+     *                   \n 1 minute CPU utilization
+     *                   - \b  PG
+     *                   \n Paging rate
+     *                   - \b  IO
+     *                   \n Disk IO rate
+     *                   - \b LS
+     *                   \n Number of log in sessions
+     *                   - \b IT
+     *                   \n Idle time
+     *                   - \b TMP
+     *                   \n Available temporary space
+     *                   - \b SWP
+     *                   \n Available swap space
+     *                   - \b MEM
+     *                   \n Available memory
+     *                   - \b USR1
+     *                   \n USR1 is used to describe unavailable or out of bounds user defined load
+     *                   information of an external dynamic load indice on execution hosts.
+     *                   - \b USR2
+     *                   \n USR2 is used to describe unavailable or out of bounds user defined load
+     *                   information of an external dynamic load indice on execution hosts.
+     * @return null \n
+     *         The function failed. The reason code is bad.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         No error handling
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         bjobs -s
+     *         <p/>
+     *         <b>Environment Variable:</b>
+     *         \par
+     *         LSB_SUSP_REASONS
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         $LSB_CONFDIR/cluster_name/configdir/lsb.queues \n
+     *         $LSB_SHAREDIR/cluster_name/logdir/lsb.events
+     * @param ld When reasons is SUSP_LOAD_REASON, ld is used to determine the
+     * name of any external load indices. ld uses the most recent load index log
+     * in the lsb.events file.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * loadIndexLog
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * \ref suspending_reasons \n
+     * \ref suspending_subreasons
+     * #see \ref lsb_pendreason
+     */
+    public static native String lsb_suspreason(int reasons, int subreasons, loadIndexLog ld);
+
+    /**
+     * \page lsb_pendreason  lsb_pendreason
+     * \brief Explains why a job is pending.
+     * <p/>
+     * Use \ref lsb_pendreason to determine why a job is pending. Each pending reason is
+     * associated with one or more hosts.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * String lsb_pendreason (int numReasons, IntByReference rsTb,
+     * jobInfoHead.ByReference jInfoH,
+     * loadIndexLog.ByReference ld, int clusterId)</b>
+     *
+     * @param numReasons The number of reasons in the rsTb reason table.
+     * @param clusterId  MultiCluster cluster ID. If clusterId is greater than or
+     *                   equal to 0, the job is a pending remote job, and \ref lsb_pendreason checks for
+     *                   host_name\@cluster_name. If host name is needed, it should be found in
+     *                   jInfoH->remoteHosts. If the remote host name is not available, the constant
+     *                   string remoteHost is used.
+     *                   <p/>
+     *                   <b>Data Structures:</b>
+     *                   \par
+     *                   \ref jobInfoHead
+     *                   \n \ref loadIndexLog
+     *                   <p/>
+     *                   <b>Define Statements:</b>
+     *                   \par
+     *                   \ref pending_reasons
+     *                   \n \ref suspending_reasons
+     *                   \n \ref suspending_subreasons
+     * @return null \n
+     *         The function fails. The reason code is bad.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If no PEND reason is found, the function fails and lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line command:</b>
+     *         \par
+     *         bjobs -p
+     *         <p/>
+     *         \b Files:
+     *         \par
+     *         ${LSF_ENVDIR:-/etc}/lsf.conf
+     * @param rsTb The reason table. Each entry in the table contains one of \ref pending_reasons
+     * @param jInfoH jInfoH contains job information.
+     * @param ld From \ref lsb_suspreason, when reasons is SUSP_LOAD_REASON, ld is used to
+     * determine the name of any external load indices. ld uses the most recent load
+     * index log in the lsb.events file.
+     * #see \ref lsb_geteventrec
+     */
+    public static native String lsb_pendreason(int numReasons, IntByReference rsTb, jobInfoHead jInfoH, loadIndexLog ld, int clusterId);
+
+    /**
+     * \page lsb_calendarinfo lsb_calendarinfo
+     * \brief Gets information about calendars defined in the batch system.
+     * <p/>
+     * \ref lsb_calendarinfo gets information about calendars defined in the batch system.
+     * <p/>
+     * On success, this routine returns a pointer to an array of calendarInfoEnt
+     * structures which stores the information about the returned calendars and
+     * numCalendars gives number of calendars returned. On failure null is returned
+     * and lsberrno is set to indicate the error.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * calendarInfoEnt.ByReference lsb_calendarinfo(String[] calendars,
+     * IntByReference numCalendars, String user)</b>
+     *
+     * @return null
+     *         \n Function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         none
+     * @param calendars calendars is a pointer to an array of calendar names.
+     * @param numCalendars numCalendars gives the number of calendar names. If
+     * * numCalendars is 0, then information about all calendars is returned.
+     * By default, only the invokers calendars are considered.
+     * @param user Setting the user parameter will cause the given users calendars
+     * to be considered.Use the reserved user name all to get calendars of all users.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * calendarInfoEnt
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * none
+     * #see \ref lsb_calendarop
+     */
+    public static native calendarInfoEnt.ByReference lsb_calendarinfo(Pointer calendars, IntByReference numCalendars, String user);
+
+    public static native int lsb_calExprOccs(String string1, int int1, int int2, String string2, PointerByReference int3);
+
+    /**
+     * \page lsb_calendarop lsb_calendarop
+     * \brief Adds, modifies or deletes a calendar.
+     * <p/>
+     * \ref lsb_calendarop is used to add, modify or delete a calendar. The oper
+     * parameter is one of CALADD, CALMOD, or CALDEL. When the operation CALADD
+     * is specified, the first element of the names array is used as the name of
+     * the calendar to add. The desc and calExpr parameters should point to the
+     * description string and the time expression list, respectively. See bcaladd()
+     * for a description of time expressions.
+     * <p/>
+     * CALMOD permits the modification of the
+     * description or time expression list associated with an existing calendar. The
+     * first name in the names array indicates the calendar to be modified. The desc
+     * and calExpr parameters can be set to the updated value or to null to
+     * indicate that the existing value should be maintained.
+     * <p/>
+     * If the operation is
+     * CALDEL then the names parameter points to an array of calendar names to be
+     * deleted. numNames gives the number of names in the array. options is
+     * reserved for the future use.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * IntByReference lsb_calendarop(int oper, int numNames, String[] names, byte
+     * * desc, String calExpr, int options, String[] badStr)</b>
+     *
+     * @param oper     One of CALADD, CALMOD, or CALDEL. Depending on which one is
+     *                 chosen, adds, modifies, or deletes a calendar.Defined in \ref calendar_command.
+     * @param numNames The number of names in the array.
+     * @param options  Currently unused.
+     * @return int:-1
+     *         \n Function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error. If error
+     *         is related to bad calendar name or time expression, the routine returns
+     *         the name or expression in badStr.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         none
+     * @param names Depending on oper, it defines the name of the calendar is going
+     * to be added, modified or deleted.
+     * @param desc The calendar's description list.
+     * @param calExpr A calendar expression.
+     * @param badStr Return from mbatchd indicating bad name or event time of calendar.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * none
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * \ref calendar_command
+     * #see \ref lsb_calendarinfo
+     */
+    public static native int lsb_calendarop(int oper, int numNames, Pointer names, String desc, String calExpr, int options, String badStr);
+
+    /**
+     * \page lsb_puteventrec lsb_puteventrec
+     * \brief Puts information of an eventRec structure pointed to by logPtr
+     * into a log file.
+     * <p/>
+     * \ref lsb_puteventrec puts information of an eventRec structure pointed to by
+     * logPtr into a log file. log_fp is a pointer pointing to the log file name
+     * that could be either event a log file or job log file.
+     * <p/>
+     * See \ref lsb_geteventrec for detailed information about parameters.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * int lsb_puteventrec(Pointer log_fp, eventRec.ByReference logPtr)</b>
+     *
+     * @return int:-1 \n
+     *         Function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         $LSB_SHAREDIR/cluster_name/logdir/lsb.events
+     * @param logPtr The eventRec structure pointed to by logPtr into a log file.
+     * @param log_fp A pointer pointing to the log file name that could be either
+     * event a log file or job log file.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * eventRec
+     * \n eventLog
+     * \n xFile
+     * \n jobAttrSetLog
+     * \n logSwitchLog
+     * \n dataLoggingLog
+     * \n jgrpNewLog
+     * \n jgrpCtrlLog
+     * \n jgrpStatusLog
+     * \n jobNewLog
+     * \n jobModLog
+     * \n jobStartLog
+     * \n jobStartAcceptLog
+     * \n jobExecuteLog
+     * \n jobStatusLog
+     * \n sbdJobStatusLog
+     * \n sbdUnreportedStatusLog
+     * \n jobSwitchLog
+     * \n jobMoveLog
+     * \n chkpntLog
+     * \n jobRequeueLog
+     * \n jobCleanLog
+     * \n jobExceptionLog
+     * \n sigactLog
+     * \n migLog
+     * \n signalLog
+     * \n queueCtrlLog
+     * \n hostCtrlLog
+     * \n hgCtrlLog
+     * \n mbdStartLog
+     * \n mbdDieLog
+     * \n unfulfillLog
+     * \n jobFinishLog
+     * \n loadIndexLog
+     * \n calendarLog
+     * \n jobForwardLog
+     * \n jobAcceptLog
+     * \n statusAckLog
+     * \n jobMsgLog
+     * \n jobMsgAckLog
+     * \n jobOccupyReqLog
+     * \n jobVacatedLog
+     * \n jobForceRequestLog
+     * \n jobChunkLog
+     * \n jobExternalMsgLog
+     * \n rsvRes
+     * \n rsvFinishLog
+     * \n cpuProfileLog
+     * \n jobRunRusageLog
+     * \n slaLog
+     * \n perfmonLogInfo
+     * \n perfmonLog
+     * \n taskFinishLog
+     * \n eventEOSLog
+     * \n jobResizeNotifyStartLog
+     * \n jobResizeNotifyAcceptLog
+     * \n jobResizeNotifyDoneLog
+     * \n jobResizeReleaseLog
+     * \n jobResizeCancelLog
+     * \n jobResizeLog
+     * \n jRusage
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * \ref event_types
+     * \n \ref defs_lsb_XF_OP
+     * \n \ref jobgroup_controltypes
+     * \n \ref signal_action
+     * #see \ref lsb_geteventrec
+     */
+    public static native int lsb_puteventrec(Pointer logPtr, eventRec log_fp);
+
+    public static native int lsb_puteventrecRaw(Pointer pointer1, eventRec eventRec1, String string1);
+
+    /**
+     * \page lsb_geteventrec lsb_geteventrec
+     * \brief Get an event record from a log file
+     * <p/>
+     * \ref lsb_geteventrec returns an eventRec from a log file.
+     * <p/>
+     * The storage for the eventRec structure returned by \ref lsb_geteventrec will be
+     * reused by the next call.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * eventRec.ByReference lsb_geteventrec(Pointer  log_fp,IntByReference  lineNum)</b>
+     *
+     * @return null \n
+     *         Function failed.If there are no more records, returns null and sets
+     *         lsberrno to LSBE_EOF.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         On failure, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         $LSB_SHAREDIR/cluster_name/logdir/lsb.acct
+     *         \n $LSB_SHAREDIR/cluster_name/logdir/lsb.events
+     *         \n $LSB_SHAREDIR/cluster_name/logdir/lsb.rsv.ids
+     *         \n $LSB_SHAREDIR/cluster_name/logdir/lsb.rsv.state
+     * @param log_fp Either an event log file or a job log file.
+     * @param lineNum The number of the event record.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * eventRec
+     * \n eventLog
+     * \n xFile
+     * \n jobAttrSetLog
+     * \n logSwitchLog
+     * \n dataLoggingLog
+     * \n jgrpNewLog
+     * \n jgrpCtrlLog
+     * \n jgrpStatusLog
+     * \n jobNewLog
+     * \n jobModLog
+     * \n jobStartLog
+     * \n jobStartAcceptLog
+     * \n jobExecuteLog
+     * \n jobStatusLog
+     * \n sbdJobStatusLog
+     * \n sbdUnreportedStatusLog
+     * \n jobSwitchLog
+     * \n jobMoveLog
+     * \n chkpntLog
+     * \n jobRequeueLog
+     * \n jobCleanLog
+     * \n jobExceptionLog
+     * \n sigactLog
+     * \n migLog
+     * \n signalLog
+     * \n queueCtrlLog
+     * \n hostCtrlLog
+     * \n hgCtrlLog
+     * \n mbdStartLog
+     * \n mbdDieLog
+     * \n unfulfillLog
+     * \n jobFinishLog
+     * \n loadIndexLog
+     * \n calendarLog
+     * \n jobForwardLog
+     * \n jobAcceptLog
+     * \n statusAckLog
+     * \n jobMsgLog
+     * \n jobMsgAckLog
+     * \n jobOccupyReqLog
+     * \n jobVacatedLog
+     * \n jobForceRequestLog
+     * \n jobChunkLog
+     * \n jobExternalMsgLog
+     * \n rsvRes
+     * \n rsvFinishLog
+     * \n cpuProfileLog
+     * \n jobRunRusageLog
+     * \n slaLog
+     * \n perfmonLogInfo
+     * \n perfmonLog
+     * \n taskFinishLog
+     * \n eventEOSLog
+     * \n jobResizeNotifyStartLog
+     * \n jobResizeNotifyAcceptLog
+     * \n jobResizeNotifyDoneLog
+     * \n jobResizeReleaseLog
+     * \n jobResizeCancelLog
+     * \n jobResizeLog
+     * \n jRusage
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * \ref event_types
+     * \n \ref defs_lsb_XF_OP
+     * \n \ref jobgroup_controltypes
+     * \n \ref signal_action
+     * #see \ref lsb_hostcontrol
+     * #see \ref lsb_movejob
+     * #see \ref lsb_pendreason
+     * #see \ref lsb_puteventrec
+     * #see \ref lsb_queuecontrol
+     * #see \ref lsb_readjobinfo
+     * #see \ref lsb_submit
+     * #see \ref lsb_suspreason
+     */
+    public static native eventRec.ByReference lsb_geteventrec(Pointer log_fp, IntByReference lineNum);
+
+    public static native eventRec.ByReference lsb_geteventrec_decrypt(Pointer pointer1, IntByReference int1);
+
+    public static native eventRec.ByReference lsb_geteventrecord(Pointer pointer1, IntByReference int1);
+
+    public static native eventRec.ByReference lsb_geteventrecordEx(Pointer pointer1, IntByReference int1, Pointer stringArray1);
+
+    public static native eventRec.ByReference lsb_getnewjob_from_string(String string1);
+
+    public static native eventInfoEnt.ByReference lsb_eventinfo(Pointer stringArray1, IntByReference int1, String string1);
+
+    /**
+     * \page lsb_sharedresourceinfo lsb_sharedresourceinfo
+     * \brief Returns the requested shared resource information in dynamic values.
+     * <p/>
+     * \ref lsb_sharedresourceinfo returns the requested shared resource information in
+     * dynamic values. The result of this call is a chained data structure as
+     * defined in <lsf/lsbatch.h>, which contains requested information.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * LSB_SHARED_RESOURCE_INFO_T.ByReference lsb_sharedresourceinfo(
+     * String[] resources,
+     * IntByReference numResources,
+     * String hostName, int options)</b>
+     *
+     * @param options options is reserved for future use. Currently, it should be set to 0.
+     *                <p/>
+     *                <b>Data Structures:</b>
+     *                \par
+     *                lsbSharedResourceInfo
+     *                \n lsbSharedResourceInstance
+     *                <p/>
+     *                <b>Define Statements:</b>
+     *                \par
+     *                none
+     * @return null \n
+     *         Function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         $LSF_CONFDIR/lsf.shared
+     *         \n $LSF_CONFDIR/lsf.cluster.cluster_name
+     * @param resources resources is an null terminated string array storing
+     * requesting resource names.Setting resources to point to null returns all
+     * shared resources.
+     * @param numResources numResources is an input/output parameter. On input
+     * it indicates how many resources are requested. Value 0 means requesting
+     * all shared resources. On return it contains qualified resource number.
+     * @param hostName hostName is a string containing a host name. Only shared resource
+     * available on the specified host will be returned. If hostName is a null,
+     * shared resource available on all hosts will be returned.
+     * #see \ref ls_sharedresourceinfo
+     */
+    public static native Pointer lsb_sharedresourceinfo(Pointer resources, IntByReference numResources, String hostName, int options);
+
+    /**
+     * \page lsb_geteventrecbyline lsb_geteventrecbyline
+     * Parse an event line and put the result in an event record structure.
+     * The \ref lsb_geteventrecbyline function parses an event line and puts the result
+     * in an event record structure.
+     * <p/>
+     * If the line to be parsed is a comment line, \ref lsb_geteventrecbyline sets errno to
+     * bad event format and logs an error.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * int lsb_geteventrecbyline(String line, eventRec.ByReference logRec)</b>
+     *
+     * @return int:-1
+     *         \n Function failed and lserrno was set.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         none
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         none
+     * @param line
+     * Buffer containing a line of event text string
+     * @param logRec
+     * Pointer to an eventRec structure
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * eventRec
+     * \n eventLog
+     * \n xFile
+     * \n jobAttrSetLog
+     * \n logSwitchLog
+     * \n dataLoggingLog
+     * \n jgrpNewLog
+     * \n jgrpCtrlLog
+     * \n jgrpStatusLog
+     * \n jobNewLog
+     * \n jobModLog
+     * \n jobStartLog
+     * \n jobStartAcceptLog
+     * \n jobExecuteLog
+     * \n jobStatusLog
+     * \n sbdJobStatusLog
+     * \n sbdUnreportedStatusLog
+     * \n jobSwitchLog
+     * \n jobMoveLog
+     * \n chkpntLog
+     * \n jobRequeueLog
+     * \n jobCleanLog
+     * \n jobExceptionLog
+     * \n sigactLog
+     * \n migLog
+     * \n signalLog
+     * \n queueCtrlLog
+     * \n hostCtrlLog
+     * \n hgCtrlLog
+     * \n mbdStartLog
+     * \n mbdDieLog
+     * \n unfulfillLog
+     * \n jobFinishLog
+     * \n loadIndexLog
+     * \n calendarLog
+     * \n jobForwardLog
+     * \n jobAcceptLog
+     * \n statusAckLog
+     * \n jobMsgLog
+     * \n jobMsgAckLog
+     * \n jobOccupyReqLog
+     * \n jobVacatedLog
+     * \n jobForceRequestLog
+     * \n jobChunkLog
+     * \n jobExternalMsgLog
+     * \n rsvRes
+     * \n rsvFinishLog
+     * \n cpuProfileLog
+     * \n jobRunRusageLog
+     * \n slaLog
+     * \n perfmonLogInfo
+     * \n perfmonLog
+     * \n taskFinishLog
+     * \n eventEOSLog
+     * \n jobResizeNotifyStartLog
+     * \n jobResizeNotifyAcceptLog
+     * \n jobResizeNotifyDoneLog
+     * \n jobResizeReleaseLog
+     * \n jobResizeCancelLog
+     * \n jobResizeLog
+     * \n jRusage
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * none
+     * <p/>
+     * <b>Pre-Conditions:</b>
+     * \par
+     * The event record structure must have been initialized outside the
+     * \ref lsb_geteventrecbyline function.
+     * see none
+     */
+
+    public static native int lsb_geteventrecbyline(String line, eventRec logRec);
+/* Retain lsb_connect for now */
+
+    public static int lsb_connect(int a) {
+        return lsb_rcvconnect();
+    }
+
+    public static native int lsb_rcvconnect();
+
+    public static native int lsb_sndmsg(lsbMsgHdr lsbMsgHdr1, String string1, int int1);
+
+    public static native int lsb_rcvmsg(lsbMsgHdr lsbMsgHdr1, Pointer stringArray1, int int1);
+
+    /**
+     * \page  lsb_runjob lsb_runjob
+     * Starts a batch job immediately on a set of specified host().
+     * \ref lsb_runjob starts a batch job immediately on a set of specified host().
+     * The job must have been submitted and is in PEND or FINISHED status. Only
+     * the LSF administrator or the owner of the job can start the job. If the
+     * options is set to RUNJOB_OPT_NOSTOP, then the job will not be suspended by
+     * the queue's RUNWINDOW,loadStop and STOP_COND and the hosts' RUNWINDOW and
+     * loadStop conditions. By default, these conditions apply to the job as do
+     * to other normal jobs.
+     * <p/>
+     * Any program using this API must be setuid to root
+     * if LSF_AUTH is not defined in the lsf.conf file.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * int lsb_runjob(runJobRequest.ByReference runJobRequest)</b>
+     *
+     * @param runJobRequest The job-starting request.
+     *                      <p/>
+     *                      <b>Data Structures:</b>
+     *                      \par
+     *                      runJobRequest
+     *                      <p/>
+     *                      <b>Define Statements:</b>
+     *                      \par
+     *                      \ref runjob_option
+     * @return int:-1 \n
+     *         Function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         brun
+     *         <p/>
+     *         \b Files:
+     *         \par
+     *         ${LSF_ENVDIR:-/etc}/lsf.conf
+     * see none
+     */
+    public static native int lsb_runjob(runJobRequest runJobRequest);
+
+/* API for job group */
+
+    public static native int lsb_addjgrp(jgrpAdd jgrpAdd1, Pointer jgrpReply1);
+
+    public static native int lsb_modjgrp(jgrpMod jgrpMod1, Pointer jgrpReply1);
+
+    public static native int lsb_holdjgrp(String string1, int int1, Pointer jgrpReply1);
+
+    public static native int lsb_reljgrp(String string1, int int1, Pointer jgrpReply1);
+
+    public static native int lsb_deljgrp(String string1, int int1, Pointer jgrpReply1);
+
+    public static native int lsb_deljgrp_ext(jgrpCtrl jgrpCtrl1, Pointer jgrpReply1);
+
+    public static native jgrp.ByReference lsb_listjgrp(IntByReference int1);
+
+    public static native serviceClass.ByReference lsb_serviceClassInfo(IntByReference int1);
+
+/* API for Application Encapsulation */
+
+    public static native appInfoEnt.ByReference lsb_appInfo(IntByReference int1);
+
+    public static native void lsb_freeAppInfoEnts(int int1, appInfoEnt appInfoEnt1);
+
+/* routine to convert the job id to string */
+
+    public static native String lsb_jobid2str(long long1);
+
+    public static native String lsb_jobid2str_r(long long1, byte[] byte1);
+
+    public static native String lsb_jobidinstr(long long1);
+/* routine to compose and decompose 64bit jobId */
+
+    public static native void jobId32To64(LongByReference long1, int int1, int int2);
+
+    public static native void jobId64To32(long long1, IntByReference int1, IntByReference int2);
+/* API for job attribute operations */
+
+    public static native int lsb_setjobattr(int int1, jobAttrInfoEnt jobAttrInfoEnt1);
+
+/* API for remote task execution */
+
+    public static native long lsb_rexecv(int int1, Pointer stringArray1, Pointer stringArray2, IntByReference int2, int int3);
+
+
+    public static interface lsb_catchCallback extends Callback {
+        int invoke(Pointer pointer);
+    }
+
+    public static native int lsb_catch(String string1, lsb_catchCallback callback);
+
+    public static native void lsb_throw(String string1, Pointer pointer1);
+
+/* API for job external message */
+
+    /**
+     *  \page lsb_postjobmsg lsb_postjobmsg
+     *  \brief Sends messages and data posted to a job.
+     *
+     *  Use \ref lsb_postjobmsg to post a message and data to a job, open a TCP
+     *  connection, and transfer attached message and data from the mbatchd. Use
+     *  \ref lsb_readjobmsg to display messages and copy data files posted by
+     *  \ref lsb_postjobmsg.
+     *
+     *  While you can post multiple messages and attached data files to a job,
+     *  you must call \ref lsb_postjobmsg for each message and attached data file
+     *  you want to post. By default, \ref lsb_postjobmsg posts a message to position
+     *  0 of the message index (msgId) (see PARAMETERS) of the specified job.
+     *  To post additional messages to a job, call \ref lsb_postjobmsg and increment
+     *  the message index.
+     *
+     *  \ref lsb_readjobmsg reads posted job messages by their
+     *  position in the message index.
+     *
+     *  If a data file is attached to a message and the flag EXT_ATTA_POST is set,
+     *  use the JOB_ATTA_DIR parameter in lsb.params(5) to specify the directory
+     *  where attachment data fies are saved. The directory must have at least 1MB
+     *  of free space.The mbatchd checks for available space in the job attachment
+     *  directory before transferring the file.
+     *
+     *  Use the MAX_JOB_ATTA_SIZE parameter in lsb.params(5) to set a maximum size
+     *  for job message attachments.
+     *
+     *  Users can only send messages and data from their own jobs. Root and LSF
+     *  administrators can also send messages of jobs submtted by other users, but
+     *  they cannot attach data files to jobs owned by other users.
+     *
+     *  You can post messages and data to a job until it is cleaned from the system.
+     *  You cannot send messages and data to finished or exited jobs.
+     *
+     *  <b>\#include <lsf/lsbatch.h> \n
+     *     \#include <time.h>
+     *
+     *  int lsb_postjobmsg(jobExternalMsgReq.ByReference jobExternalMsg,
+     *                    String filename)</b>
+     *
+     *  @param jobExternalMsg This structure contains the information required to
+     *  define an external message of a job.
+     *  @param filename Name of attached data file. If no file is attached, use null.
+     *
+     *  <b>Data Structures:</b>
+     *  \par
+     *  \ref jobExternalMsgReq
+     *
+     *  <b>Define Statements:</b>
+     *  \par
+     *  \ref external_msg_post
+     *
+     *  @return int:value \n
+     *  The successful function returns a socket number.
+     * return int:0 \n
+     *  The EXT_ATTA_POST bit of options is not set or there is no attached data.
+     *  return int:-1 \n
+     *  The function failed.
+     *
+     *  \b Errors:
+     *  \par
+     *  If the function fails, lserrno is set to indicate the error.
+     *
+     *  <b>Equivalent line command:</b>
+     *  \par
+     *  bpost
+     *
+     *  \b Files:
+     *  \par
+     *  $LSB_CONFDIR/cluster_name/configdir/lsb.params
+     *  \n $JOB_ATTA_DIR
+     *  \n $LSB_SHAREDIR/info
+     *
+     * #see \ref lsb_readjobmsg
+     *
+     */
+
+    public static native int lsb_postjobmsg(jobExternalMsgReq jobExternalMsg, String filename);
+    /**
+     *  \page lsb_readjobmsg lsb_readjobmsg
+     *  \brief Reads messages and data posted to a job.
+     *
+     *  Use \ref lsb_readjobmsg to open a TCP connection, receive attached messages and
+     *  data from the mbatchd, and display the messages posted by \ref lsb_postjobmsg.
+     *
+     *  By default, \ref lsb_readjobmsg displays the message "no description" or the
+     *  message at index position 0 of the specified job. To read other messages,
+     *  choose another index position. The index is populated by \ref lsb_postjobmsg.
+     *
+     *  If a data file is attached to a message and the flag EXT_ATTA_READ is set,
+     *  \ref lsb_readjobmsg gets the message and copies its data file to the default
+     *  directory JOB_ATTA_DIR, overwriting the specified file if it already exists.
+     *  If there is no file attached, the system reports an error.
+     *
+     *  Users can only read messages and data from their own jobs. Root and LSF
+     *  administrators can also read messages of jobs submtted by other users,
+     *  but they cannot read data files attached to jobs owned by other users.
+     *
+     *  You can read messages and data from a job until it is cleaned from the
+     *  system. You cannot read messages and data from done or exited jobs.
+     *
+     *  <b>\#include <lsf/lsbatch.h> \n
+     *  \#include <time.h> \n
+     *  int lsb_readjobmsg(jobExternalMsgReq.ByReference jobExternalMsg,
+     *          jobExternalMsgReply.ByReference jobExternalMsgReply)</b>
+     *
+     *  @param jobExternalMsg the information required to define an external
+     *  message of a job.
+     *  @param jobExternalMsgReply the information required to define an
+     *  external message reply.
+     *
+     *  <b>Data Structures:</b>
+     *  \par
+     *  jobExternalMsgReq
+     *  \n jobExternalMsgReply
+     *
+     *  <b>Define Statements:</b>
+     *  \par
+     *  \ref external_msg_processing
+     *  \n \ref ext_data_status
+     *
+     *  @return int:value \n
+     *  The successful function returns a socket number.
+     *  return int:0 \n
+     *  The EXT_ATTA_READ bit of options is not set or there is no
+     *  attached data.
+     *  return int:-1 \n
+     *  The function failed.
+     *
+     *  \b Errors:
+     *  \par
+     *  If the function fails, lserrno is set to indicate the error.
+     *
+     *  <b>Equivalent line commands:</b>
+     *  \par
+     *  bread
+     *
+     *  <b>Files:</b>
+     *  \par
+     *  $LSB_CONFDIR/cluster_name/configdir/lsb.params
+     *  \n $JOB_ATTA_DIR
+     *  \n $LSB_SHAREDIR/info
+     * #see \ref lsb_postjobmsg
+     */
+
+    public static native int lsb_readjobmsg(jobExternalMsgReq jobExternalMsg, jobExternalMsgReply jobExternalMsgReply);
+
+/* API for symphony job information update in bulk mode */
+
+    public static native int lsb_bulkJobInfoUpdate(symJobStatusUpdateReqArray symJobStatusUpdateReqArray1, symJobStatusUpdateReplyArray symJobStatusUpdateReplyArray1);
+
+/* API for advance reservation */
+
+    /**
+     * \page lsb_addreservation lsb_addreservation
+     * \brief Makes an advance reservation.
+     * <p/>
+     * Use \ref lsb_addreservation to send a reservation request to mbatchd. If
+     * mbatchd grants the reservation, it issues the reservation ID. If mbatchd
+     * rejects the request, it issues null as the reservation ID.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * int lsb_addreservation (addRsvRequest.ByReference request, String rsvId)</b>
+     *
+     * @return int:-1 \n
+     *         The reservation failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         On failure, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         brsvadd
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         none
+     * @param request The reservation request
+     * @param rsvId Reservation ID returned from mbatchd. If the reservation
+     * fails, this is null. The
+     * memory for rsvid is allocated by the caller.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * addRsvRequest
+     * \n _rsvExecCmd_t
+     * \n _rsvExecEvent_t
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * \ref reservation_option
+     * #see \ref lsb_removereservation
+     * #see \ref lsb_modreservation
+     * #see \ref lsb_reservationinfo
+     */
+    public static native int lsb_addreservation(addRsvRequest request, String rsvId);
+
+    /**
+     * \page lsb_removereservation lsb_removereservation
+     * \brief Removes a reservation.
+     * <p/>
+     * Use \ref lsb_removereservation to remove a reservation. mbatchd removes the
+     * reservation with the specified reservation ID.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * int lsb_removereservation(String rsvId)</b>
+     *
+     * @return int:-1 \n
+     *         The reservation removal failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         On failure, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         brsvdel
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         none
+     * @param rsvId Reservation ID of the reservation that you wish to remove.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * none
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * none
+     * #see \ref lsb_addreservation
+     * #see \ref lsb_modreservation
+     * #see \ref lsb_reservationinfo
+     */
+    public static native int lsb_removereservation(String rsvId);
+
+    /**
+     * \page lsb_reservationinfo lsb_reservationinfo
+     * \brief Retrieve reservation information to display active advance reservations.
+     * <p/>
+     * Use \ref lsb_reservationinfo to retrieve reservation information from mbatchd.
+     * This function allocates memory that the caller should free.
+     * <p/>
+     * If the \ref lsb_reservationinfo function succeeds, it returns the reservation
+     * records pertaining to a particular reservation ID (rsvId) as an array of
+     * rsvInfoEnt structs.
+     * <p/>
+     * If rsvId is null, all reservation information will be returned. If a
+     * particular rsvId  is specified:
+     * \li If found, the reservation record pertaining to a particular rsvId is
+     * returned
+     * \li If not found, the number of reservation records is set to zero and
+     * the lsberrno  is set appropiately
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * rsvInfoEnt.ByReference lsb_reservationinfo(String rsvId, IntByReference numEnts,
+     * int options)</b>
+     *
+     * @param options The parameter options is currently ignored.
+     *                <p/>
+     *                <b>Data Structures:</b>
+     *                \par
+     *                rsvInfoEnt
+     *                \n hostRsvInfoEnt
+     *                <p/>
+     *                <b>Define Statements:</b>
+     *                \par
+     *                none
+     * @return null
+     *         \n Function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         On failure, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         brsvs
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         none
+     * @param rsvId Reservation ID of the requested reservation.
+     * @param numEnts Number of reservation entries that mbatchd returns.
+     * #see \ref lsb_addreservation
+     * #see \ref lsb_modreservation
+     * #see \ref lsb_removereservation
+     */
+
+    public static native rsvInfoEnt.ByReference lsb_reservationinfo(String rsvId, IntByReference numEnts, int options);
+
+    public static native int lsb_freeRsvExecCmd(Pointer _rsvExecCmd_tArray1);
+
+    public static native _rsvExecCmd_t.ByReference lsb_dupRsvExecCmd(_rsvExecCmd_t _rsvExecCmd_t1);
+
+    public static native int lsb_parseRsvExecOption(String string1, Pointer _rsvExecCmd_tArray1);
+
+    /**
+     * \page lsb_modreservation lsb_modreservation
+     * \brief Modifies an advance reservation.
+     * <p/>
+     * Use \ref lsb_modreservation to modify an advance reservation. mbatchd receives
+     * the modification request and modifies the reservation with the specified
+     * reservation ID.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * int lsb_modreservation(modRsvRequest.ByReference request)</b>
+     *
+     * @return int:-1 \n
+     *         The reservation modification failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         On failure, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line command:</b>
+     *         \par
+     *         brsvmod
+     *         <p/>
+     *         \b Files:
+     *         \par
+     *         none
+     * @param request modify reservation request.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * modRsvRequest
+     * \n addRsvRequest
+     * \n _rsvExecCmd_t
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * none
+     * #see \ref lsb_addreservation
+     * #see \ref lsb_removereservation
+     * #see \ref lsb_reservationinfo
+     */
+
+    public static native int lsb_modreservation(modRsvRequest request);
+
+/* routines for sorted integer list */
+    /*
+    sortIntList.ByReference  initSortIntList(int);
+    int insertSortIntList(sortIntList.ByReference , int);
+    sortIntList.ByReference  getNextSortIntList(sortIntList.ByReference , sortIntList.ByReference , IntByReference );
+    void freeSortIntList(sortIntList.ByReference );
+    int getMinSortIntList(sortIntList.ByReference , IntByReference );
+    int getMaxSortIntList(sortIntList.ByReference , IntByReference );
+    int getTotalSortIntList(sortIntList.ByReference );
+
+    int updateJobIdIndexFile (String string1, String string1, int);
+    */
+
+/* Structures and routine for obtaining subset of info about jobs
+*  This is being used by Maui integration.
+ */
+
+    public static class jobExtschInfoReq extends Structure {
+        public static class ByReference extends jobExtschInfoReq implements Structure.ByReference {}
+        public static class ByValue extends jobExtschInfoReq implements Structure.ByValue {}
+        public jobExtschInfoReq() {}
+        public jobExtschInfoReq(Pointer p) { super(p); read(); }
+
+        public int qCnt;
+        public Pointer queues;
+    }
+
+
+
+    public static class jobExtschInfo extends Structure {
+        public static class ByReference extends jobExtschInfo implements Structure.ByReference {}
+        public static class ByValue extends jobExtschInfo implements Structure.ByValue {}
+        public jobExtschInfo() {}
+        public jobExtschInfo(Pointer p) { super(p); read(); }
+
+        public long jobId;
+        public int status;
+        public NativeLong jRusageUpdateTime;
+        public LibLsf.jRusage runRusage;
+    }
+
+
+
+    public static class jobExtschInfoReply extends Structure {
+        public static class ByReference extends jobExtschInfoReply implements Structure.ByReference {}
+        public static class ByValue extends jobExtschInfoReply implements Structure.ByValue {}
+        public jobExtschInfoReply() {}
+        public jobExtschInfoReply(Pointer p) { super(p); read(); }
+
+        public int jobCnt;
+        public PointerByReference jobs;
+    }
+
+
+
+    public static native int getjobinfo4queues(jobExtschInfoReq jobExtschInfoReq1, jobExtschInfoReply jobExtschInfoReply1);
+
+    public static native void free_jobExtschInfoReply(jobExtschInfoReply jobExtschInfoReply1);
+
+    public static native void free_jobExtschInfoReq(jobExtschInfoReq jobExtschInfoReq1);
+
+/* For RFC 725 */
+
+    public static native String longer_strcpy(String dest, String src);
+
+/* Structures and API for job diagnostics.  These are applicable only if
+*  CONDENSE_PENDING_REASONS is enabled in lsb.params.
+ */
+
+    public static class diagnoseJobReq extends Structure {
+        public static class ByReference extends diagnoseJobReq implements Structure.ByReference {}
+        public static class ByValue extends diagnoseJobReq implements Structure.ByValue {}
+        public diagnoseJobReq() {}
+        public diagnoseJobReq(Pointer p) { super(p); read(); }
+
+        public int jobCnt;
+        public LongByReference jobId;
+    }
+
+
+
+    public static native int lsb_diagnosejob(diagnoseJobReq diagnoseJobReq1);
+
+    public static final int SIM_STATUS_RUN = 0x01;
+    public static final int SIM_STATUS_SUSPEND = 0x02;
+
+/* simulator status reply
+ */
+
+    public static class simStatusReply extends Structure {
+        public static class ByReference extends simStatusReply implements Structure.ByReference {}
+        public static class ByValue extends simStatusReply implements Structure.ByValue {}
+        public simStatusReply() {}
+        public simStatusReply(Pointer p) { super(p); read(); }
+
+        public int simStatus;
+        public NativeLong curTime;
+    }
+
+
+
+    public static native simStatusReply.ByReference lsb_simstatus();
+
+    public static native void free_simStatusReply(simStatusReply simStatusReply1);
+
+/* batch command options flag for lease */
+    public static final int LSB_HOST_OPTION_EXPORT = 0x1;
+/* bhosts -x option */
+    public static final int LSB_HOST_OPTION_EXCEPT = 0x2;
+/* retrieve hosts that belong to batch partition */
+    public static final int LSB_HOST_OPTION_BATCH = 0x4;
+
+
+/* Display condensed host output */
+    public static final int LSB_HOST_OPTION_CONDENSED = 0x08;
+
+/* error codes, structures and routines for syntax check of RMS external scheduler options */
+
+/*  non-rms option shown up in RMS[] */
+    public static final int RMS_NON_RMS_OPTIONS_ERR = (-1);
+
+/*  nodes and ptile co-exist */
+    public static final int RMS_NODE_PTILE_ERR = (-2);
+
+/*  rails and railmask co-exist */
+    public static final int RMS_RAIL_RAILMASK_ERR = (-3);
+
+/*  nodes is out of range 1..LSB_RMS_MAXNUMNODES */
+    public static final int RMS_NODES_OUT_BOUND_ERR = (-4);
+
+/*  ptile is out of range 1..LSB_RMS_MAXPTILE */
+    public static final int RMS_PTILE_OUT_BOUND_ERR = (-5);
+
+/*  rails is out of range 1..LSB_RMS_MAXNUMRAILS */
+    public static final int RMS_RAIL_OUT_BOUND_ERR = (-6);
+
+/*  railmask syntax error */
+    public static final int RMS_RAILMASK_OUT_BOUND_ERR = (-7);
+
+/*  nodes syntax error */
+    public static final int RMS_NODES_SYNTAX_ERR = (-8);
+
+/*  ptile syntax error */
+    public static final int RMS_PTILE_SYNTAX_ERR = (-9);
+
+/*  rails syntax error */
+    public static final int RMS_RAIL_SYNTAX_ERR = (-10);
+
+/*  railmask syntax error */
+    public static final int RMS_RAILMASK_SYNTAX_ERR = (-11);
+
+/*  base syntax error */
+    public static final int RMS_BASE_SYNTAX_ERR = (-12);
+
+/*  base string too NativeLong*/
+    public static final int RMS_BASE_TOO_LONG = (-13);
+
+/*  >=1 allocation types are specified */
+    public static final int RMS_TOO_MANY_ALLOCTYPE_ERR = (-14);
+
+/*  =1 allocation types are specified */
+    public static final int RMS_NO_LSF_EXTSCHED_Y_ERR = (-15);
+
+/*  error reading env from lsf.conf inside syntax check */
+    public static final int RMS_READ_ENV_ERR = (-20);
+
+/*  memory allocation problems inside syntax check function */
+    public static final int RMS_MEM_ALLOC_ERR = (-21);
+
+/*  [] mis-matched in RMS[] */
+    public static final int RMS_BRACKETS_MISMATCH_ERR = (-22);
+
+    public static interface rmsAllocType_t {
+          public static final int RMS_ALLOC_TYPE_UNKNOWN = 0;
+          public static final int RMS_ALLOC_TYPE_SLOAD = 1;
+          public static final int RMS_ALLOC_TYPE_SNODE = 2;
+          public static final int RMS_ALLOC_TYPE_MCONT = 3;
+    }
+
+
+
+    public static interface rmsTopology_t {
+          public static final int RMS_TOPOLOGY_UNKNOWN = 0;
+          public static final int RMS_TOPOLOGY_PTILE = 1;
+          public static final int RMS_TOPOLOGY_NODES = 2;
+    }
+
+
+
+    public static interface rmsFlags_t {
+          public static final int RMS_FLAGS_UNKNOWN = 0;
+          public static final int RMS_FLAGS_RAILS = 1;
+          public static final int RMS_FLAGS_RAILMASK = 2;
+    }
+
+
+
+    public static class rmsextschedoption extends Structure {
+        public static class ByReference extends rmsextschedoption implements Structure.ByReference {}
+        public static class ByValue extends rmsextschedoption implements Structure.ByValue {}
+        public rmsextschedoption() {}
+        public rmsextschedoption(Pointer p) { super(p); read(); }
+
+        public /*rmsAllocType_t*/ int alloc_type;
+        public /*rmsTopology_t*/ int topology;
+        public int topology_value;
+        public int set_base;
+        public byte[] base = new byte[LibLsf.MAXHOSTNAMELEN];
+        public /*rmsFlags_t*/ int flags;
+        public int flags_value;
+    }
+
+
+
+    public static native int parseRmsOptions(String string1, rmsextschedoption rmsextschedoption1, LibLsf.config_param config_param1);
+
+/* Stream interface.
+*  By default the stream lsb.stream is located in a subdirectory
+*  stream of the cluster working directory i.e.:
+*  work/<clustername>/logdir/stream and the size of
+*  lsb.stream is 1024MB
+ */
+    public static final int MBD_DEF_STREAM_SIZE = (1024 * 1024 * 1024);
+
+/* default maximum number of backup stream.utc file */
+    public static final int DEF_MAX_STREAM_FILE_NUMBER = 10;
+
+    /**
+     * \brief  Stream interface.
+     */
+    public static class lsbStream extends Structure {
+        public static class ByReference extends lsbStream implements Structure.ByReference {}
+        public static class ByValue extends lsbStream implements Structure.ByValue {}
+        public lsbStream() {}
+        public lsbStream(Pointer p) { super(p); read(); }
+
+        public static interface trsFunc extends Callback {
+            int invoke(String string1);
+        }
+
+        /**
+         * < Pointer to full path to the stream file
+         */
+        public String streamFile;
+
+        /**
+         * < Max size of the stream file
+         */
+        public int maxStreamSize;
+
+        /**
+         * < Max number of backup stream files
+         */
+        public int maxStreamFileNum;
+
+        /**
+         * < Set to 1 to enable trace of the stream
+         */
+        public int trace;
+
+        /**
+         * < Pointer to a function that the library invokes, passing a trace buffer.
+         */
+        public trsFunc trs;
+    }
+
+
+
+     /**//*
+     * \page lsb_openstream  lsb_openstream
+     * \brief Open and create an lsb_stream file.
+     * <p/>
+     * \ref lsb_openstream opens the streamFile .
+     * <p/>
+     * This API function is inside liblsbstream.so.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * int lsb_openstream(lsbStream.ByReference params)</b>
+     *
+     * @return int:-1 or null \n
+     *         The function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line command:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         $LSB_CONFDIR/cluster_name/configdir/lsb.params
+     * @param params Parameters.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * \ref lsbStream
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * none
+     * #see \ref lsb_closestream
+     * #see \ref lsb_readstreamline
+     * #see \ref lsb_writestream
+     * #see \ref lsb_readstream
+     * #see \ref lsb_streamversion
+     */
+    // NOTE: Not in libbat
+    //public static native int lsb_openstream(lsbStream params);
+
+     /**//*
+     * \page lsb_closestream lsb_closestream
+     * \brief Close an lsb_stream file.
+     * <p/>
+     * \ref lsb_closestream closes the streamFile.
+     * <p/>
+     * This API function is inside liblsbstream.so.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * int lsb_closestream(String config)</b>
+     *
+     * @return int:-1 \n
+     *         The function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         $LSB_CONFDIR/cluster_name/configdir/lsb.params
+     * @param config Pointer to the handle of the stream file.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * none
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * none
+     * #see \ref lsb_openstream
+     * #see \ref lsb_readstreamline
+     * #see \ref lsb_writestream
+     * #see \ref lsb_readstream
+     * #see \ref lsb_streamversion
+     */
+    // NOTE: Not in libbat
+    //public static native int lsb_closestream(String config);
+
+     /**//*
+     * \page lsb_streamversion lsb_streamversion
+     * \brief Version of the current event type supported by mbatchd.
+     * <p/>
+     * \ref lsb_streamversion returns the event version number of mbatchd, which is the
+     * version of the events to be written to the stream file. This API function
+     * is inside liblsbstream.so.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * String  lsb_streamversion()</b>
+     *
+     * param void \n
+     *             <p/>
+     *             <b>Data Structures:</b>
+     *             \par
+     *             none
+     *             <p/>
+     *             <b>Define Statements:</b>
+     *             \par
+     *             none
+     * @return null
+     *         \n Function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         $LSB_CONFDIR/cluster_name/configdir/lsb.params
+     * #see \ref lsb_closestream
+     * #see \ref lsb_geteventrec
+     * #see \ref lsb_openstream
+     * #see \ref lsb_puteventrec
+     * #see \ref lsb_readstreamline
+     * #see \ref lsb_writestream
+     * #see \ref lsb_readstream
+     */
+    // NOTE: Not in libbat
+    //public static native String lsb_streamversion();
+
+     /**//*
+     * \page lsb_writestream lsb_writestream
+     * \brief Writes a current version eventRec structure into the lsb_stream file.
+     * <p/>
+     * \ref lsb_writestream writes an eventrRec to the open streamFile.
+     * This API function is inside liblsbstream.so.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * int lsb_writestream(eventRec.ByReference logPtr)</b>
+     *
+     * @return int:-1 \n
+     *         The function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         $LSB_CONFDIR/cluster_name/configdir/lsb.params
+     * @param logPtr Pointer to the eventRec structure.
+     * \n see \ref lsb_geteventrec for details on the eventRec structure.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * eventRec
+     * \n eventLog
+     * \n xFile
+     * \n jobAttrSetLog
+     * \n logSwitchLog
+     * \n dataLoggingLog
+     * \n jgrpNewLog
+     * \n jgrpCtrlLog
+     * \n jgrpStatusLog
+     * \n jobNewLog
+     * \n jobModLog
+     * \n jobStartLog
+     * \n jobStartAcceptLog
+     * \n jobExecuteLog
+     * \n jobStatusLog
+     * \n sbdJobStatusLog
+     * \n sbdUnreportedStatusLog
+     * \n jobSwitchLog
+     * \n jobMoveLog
+     * \n chkpntLog
+     * \n jobRequeueLog
+     * \n jobCleanLog
+     * \n jobExceptionLog
+     * \n sigactLog
+     * \n migLog
+     * \n signalLog
+     * \n queueCtrlLog
+     * \n hostCtrlLog
+     * \n hgCtrlLog
+     * \n mbdStartLog
+     * \n mbdDieLog
+     * \n unfulfillLog
+     * \n jobFinishLog
+     * \n loadIndexLog
+     * \n calendarLog
+     * \n jobForwardLog
+     * \n jobAcceptLog
+     * \n statusAckLog
+     * \n jobMsgLog
+     * \n jobMsgAckLog
+     * \n jobOccupyReqLog
+     * \n jobVacatedLog
+     * \n jobForceRequestLog
+     * \n jobChunkLog
+     * \n jobExternalMsgLog
+     * \n rsvRes
+     * \n rsvFinishLog
+     * \n cpuProfileLog
+     * \n jobRunRusageLog
+     * \n slaLog
+     * \n perfmonLogInfo
+     * \n perfmonLog
+     * \n taskFinishLog
+     * \n eventEOSLog
+     * \n jobResizeNotifyStartLog
+     * \n jobResizeNotifyAcceptLog
+     * \n jobResizeNotifyDoneLog
+     * \n jobResizeReleaseLog
+     * \n jobResizeCancelLog
+     * \n jobResizeLog
+     * \n jRusage
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * \ref event_types
+     * \n \ref defs_lsb_XF_OP
+     * \n \ref jobgroup_controltypes
+     * \n \ref signal_action
+     * #see \ref lsb_closestream
+     * #see \ref lsb_geteventrec
+     * #see \ref lsb_openstream
+     * #see \ref lsb_puteventrec
+     * #see \ref lsb_readstreamline
+     * #see \ref lsb_streamversion
+     * #see \ref lsb_readstream
+     */
+    // NOTE: Not in libbat
+    //public static native int lsb_writestream(eventRec logPtr);
+
+     /**//*
+     * \page lsb_readstream lsb_readstream
+     * \brief Reads a current version eventRec structure from the lsb_stream file.
+     * <p/>
+     * \ref lsb_readstream reads an eventrRec from the open streamFile.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * eventRec lsb_readstream(IntByReference nline)</b>
+     *
+     * @return int:-1 \n
+     *         The function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         On failure, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         $LSB_CONFDIR/cluster_name/configdir/lsb.params
+     * @param nline Line number in the stream file to be read.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * eventRec
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * none
+     * #see \ref lsb_closestream
+     * #see \ref lsb_geteventrec
+     * #see \ref lsb_openstream
+     * #see \ref lsb_puteventrec
+     * #see \ref lsb_readstreamline
+     * #see \ref lsb_streamversion
+     * #see \ref lsb_writestream
+     */
+    // NOTE: Not in libbat
+    //public static native eventRec.ByReference lsb_readstream(IntByReference nline);
+
+     /**//*
+     * \page lsb_readstreamline lsb_readstreamline
+     * \brief Reads a current version eventRec structure from the lsb_stream file.
+     * <p/>
+     * \ref lsb_readstreamline reads an eventrRec from the open streamFile
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * eventRec.ByReference lsb_readstreamline(String line)</b>
+     *
+     * @return null \n
+     *         The function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         On failure, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         $LSB_CONFDIR/cluster_name/configdir/lsb.params
+     * @param line Line number in the stream file to be read.
+     * See \ref lsb_puteventrec and \ref lsb_geteventrec for details on the eventRec structure.
+     * Additionally, there are three additional event types supported.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * eventRec
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * none
+     * #see \ref lsb_closestream
+     * #see \ref lsb_geteventrec
+     * #see \ref lsb_openstream
+     * #see \ref lsb_puteventrec
+     * #see \ref lsb_readstream
+     * #see \ref lsb_streamversion
+     * #see \ref lsb_writestream
+     */
+    // NOTE: Not in libbat
+    //public static native eventRec.ByReference lsb_readstreamline(String line);
+
+    public static final int NUM_EXITRATE_TYPES = 4;
+
+/* options for exit rate type */
+
+
+/* all exited jobs */
+    public static final int JOB_EXIT = 0x01;
+
+/* jobs failed to start due to initialization problem on execution host*/
+    public static final int JOB_INIT = 0x02;
+
+/* jobs failed to start due to HPC specific initialization problem on execution host*/
+    public static final int HPC_INIT = 0x04;
+
+/* jobs exited not related to reasons set by LSF */
+    public static final int JOB_EXIT_NONLSF = 0x08;
+
+    /**
+     * \brief  APS factor information
+     */
+    public static class apsFactorInfo extends Structure {
+        public static class ByReference extends apsFactorInfo implements Structure.ByReference {}
+        public static class ByValue extends apsFactorInfo implements Structure.ByValue {}
+        public apsFactorInfo() {}
+        public apsFactorInfo(Pointer p) { super(p); read(); }
+
+
+        /**
+         * <  Name
+         */
+        public String name;
+
+        /**
+         * <  Weight
+         */
+        public float weight;
+
+        /**
+         * <  Limit
+         */
+        public float limit;
+
+        /**
+         * <  Grace period
+         */
+        public int gracePeriod;
+    }
+
+
+
+/* options for job group delete */
+
+/* delete the specified user's all empty job groups*/
+    public static final int JGRP_DEL_USER_GROUPS = 0x01;
+
+/* delete one job group's all empty children groups including itself*/
+    public static final int JGRP_DEL_CHILD_GROUPS = 0x02;
+
+/* delete all empty job groups */
+    public static final int JGRP_DEL_ALL = 0x04;
+
+    /**
+     * ------------------------------------------------------------------------
+     * lsb_getallocFromHhostfile
+     * <p/>
+     * Read the specified hostfile and return the host list. If path is null
+     * then read the hostfile specified by LSB_DJOB_HOSTFILE. The hostfile
+     * is assumed to be in simple format of one host per line. A host
+     * can be repeated.
+     * <p/>
+     * This function will allocate the memory for hostlist.
+     * It is the responsibility of the caller to free the lists when no longer
+     * needed. On success hostlist will be a list of strings.
+     * Before freeing hostlist the individual
+     * elements must be freed.
+     * <p/>
+     * Parameters:
+     * @param hostlist  [OUT]
+     * @param path      [IN]    path to hostfile, if null check LSB_DJOB_HOSTFILE
+     * <p/>
+     * @return Value:
+     * >0    success, length of hostlist not including the null last element
+     * -1    failure, lsberrno is set
+     * -------------------------------------------------------------------------
+     */
+    public static native int lsb_getallocFromHostfile(Pointer hostlist, String path);
+
+
+    /**
+     *  \addtogroup defs_lsb_launch defs_lsb_launch
+     *  lsb_launch() Valid options are:
+     */
+
+    /**
+     * < Disable standard input and redirect input from the special  device /dev/null. This is equivalent to blaunch -n.
+     */
+    public static final int LSF_DJOB_DISABLE_STDIN = 0x01;
+
+    /**
+     * < Replace existing enviornment variable values with envp.
+     */
+    public static final int LSF_DJOB_REPLACE_ENV = 0x02;
+
+    /**
+     * < Non-blocking mode; the parallel job does not wait once all tasks start.  This forces \ref lsb_launch not to wait for its tasks to finish.
+     */
+    public static final int LSF_DJOB_NOWAIT = 0x04;
+
+    /**
+     * < Display standard error messages with a corresponding host name where the message was generated.Cannot be specified with LSF_DJOB_NOWAIT.
+     */
+    public static final int LSF_DJOB_STDERR_WITH_HOSTNAME = 0x08;
+
+    /**
+     * < Display standard output messages with a corresponding host name  where the message was generated. Cannot be specified with LSF_DJOB_NOWAIT.
+     */
+    public static final int LSF_DJOB_STDOUT_WITH_HOSTNAME = 0x10;
+
+    /**
+     * < Use user's login shell to  launch tasks
+     */
+    public static final int LSF_DJOB_USE_LOGIN_SHELL = 0x20;
+
+    /**
+     * < Use /bin/sh to launch tasks
+     */
+    public static final int LSF_DJOB_USE_BOURNE_SHELL = 0x40;
+
+    /**
+     * < Separate stderr from stdout
+     */
+    public static final int LSF_DJOB_STDERR = 0x80;
+
+/*
+* -------------------------------------------------------------------------
+*  lsb_launch (where, argv, options, envp)
+*
+*  DESCRIPTION:
+*
+*    The specified command (i.e., argv) will be launched on the remote
+*    nodes in parallel
+*
+*  ARGUMENTS:
+*    where [IN]:
+*        A null terminated list of hosts.
+*        If this parameter is null then the environment variable
+*        LSB_MCPU_HOSTS will be used.
+*        A task will be launched for each slot.
+*    options [IN]:
+*        options value obtained by ORing
+*    Envp [IN]:
+*        A Null terminated list of environment variables (in 'variable=value'
+*        format).
+*        The environment to set for each task.
+*        If this parameter is null then the same environment used to start
+*        the first task will be used.
+*        If non-null, it is appended to the environment used for the
+*        first task.
+*        If LSF_DJOB_REPLACE_ENV is specified, Envp entries will overwrite
+*        existing values except those LSF needs.
+*
+*  RETURN:
+*    < 0 on failure
+*    > 0 upon success (i.e., number of tasks issued)
+*
+ */
+
+    /**
+     * \page lsb_launch lsb_launch
+     * \brief  Launch commands on remote hosts in parallel.
+     * <p/>
+     * \ref lsb_launch is a synchronous API call to allow source level integration with
+     * vendor MPI implementations. This API will launch the specified command (argv)
+     * on the remote nodes in parallel.
+     * \n LSF must be installed before integrating your MPI implementation with
+     * \ref lsb_launch. The \ref lsb_launch API requires the full set of liblsf.so,
+     * libbat.so (or liblsf.a, libbat.a).
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * int lsb_launch (String[] where, String[] argv, int userOptions, String[] envp)</b>
+     *
+     * @param userOptions [IN] Options to modify the behavior of \ref lsb_launch
+     *                    Multiple option values can be specified. For example option values can be
+     *                    separated by OR (|):
+     *                    \n \ref lsb_launch (where, argv, LSF_DJOB_REPLACE_ENV | LSF_DJOB_DISABLE_STDIN, envp);
+     * @return < 0 \n
+     *         Function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line command:</b>
+     *         \par
+     *         blaunch
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         none
+     * @param where [IN] A null-terminated list of hosts. A task will be launched
+     * for each slot.If this parameter is null then the environment variable
+     * LSB_MCPU_HOSTS will be used.
+     * @param argv [IN] The command to be executed
+     * @param envp [IN] A null-terminated list of environment variables specifying
+     * the environment to set for each task.If envp is null, \ref lsb_launch uses the
+     * same environment used to start the first task on the first execution host.
+     * If non-null, envp values are appended to the environment used for the first
+     * task.If the LSF_DJOB_REPLACE_ENV option is specified, envp entries will
+     * overwrite all existing environment values except those needed by LSF.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * none
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * \ref defs_lsb_launch
+     * see none
+     */
+    public static native int lsb_launch(Pointer where, Pointer argv, int userOptions, Pointer envp);
+
+/*
+* -------------------------------------------------------------------------
+*  lsb_getalloc
+*
+*  This function will allocate the memory for hostlist.
+*
+*  It is the responsibility of the caller to free the lists when no longer
+*  needed. On success hostlist will be a list of strings.
+*  Before freeing hostlist the individual
+*  elements must be freed.
+*
+*  Parameters:
+*     hostlist     [OUT]     null terminated list of hosts
+*
+*  Returns:
+*    >0    success, length of hostlist not including the null last element
+*    -1    failure, lsberrno is set
+*
+* -------------------------------------------------------------------------
+ */
+
+    /**
+     * \page lsb_getalloc lsb_getalloc
+     * \brief Allocates memory for a host list to be used for launching parallel
+     * tasks through blaunch and the \ref lsb_launch API.
+     * <p/>
+     * It is the responsibility of the caller to free the host list when it is
+     * no longer needed.On success, the host list will be a list of strings.
+     * Before freeing host list, the individual elements must be freed.
+     * <p/>
+     * An application using the \ref lsb_getalloc API is assumed to be part of an
+     * LSF job, and that LSB_MCPU_HOSTS is set in the environment.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * int lsb_getalloc(String[][] hostlist)</b>
+     *
+     * @return < 0 \n
+     *         Function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         If the function fails, lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         none
+     * @param hostlist [OUT] A null-terminated list of host names
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * none
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * none
+     * see none
+     */
+    public static native int lsb_getalloc(Pointer hostlist);
+
+    /**
+     * \page lsb_resize_cancel lsb_resize_cancel
+     * \brief Cancels a pending job resize allocation request.
+     * <p/>
+     * Use \ref lsb_resize_cancel to cancel a pending allocation request for a
+     * resizable job. A running job can only have one pending request at any
+     * particular time. If one request is still pending, additional requests
+     * are rejected with a proper error code.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * int lsb_resize_cancel(long jobId);</b>
+     *
+     * @param jobId LSF job ID
+     *              <p/>
+     *              <b>Data Structures:</b>
+     *              \par
+     *              none
+     *              <p/>
+     *              <b>Define Statements:</b>
+     *              \par
+     *              none
+     * @return int:-1 \n
+     *         On failure, returns -1.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         bresize cancel job_ID
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         none
+     * #see \ref lsb_resize_release
+     */
+
+    public static native int lsb_resize_cancel(long jobId);
+
+    /**
+     * \page lsb_resize_release lsb_resize_release
+     * \brief Releases part of the allocation of a running resizable job.
+     * <p/>
+     * Use \ref lsb_resize_release to release part of a running job allocation.
+     * A running job can only have one pending request at any particular time.
+     * If one request is still pending, additional requests are rejected with
+     * a proper error code.
+     * <p/>
+     * If a notification command is defined through job submission, application
+     * profile,or the \ref lsb_resize_release API, the notification command is invoked
+     * on the first execution host of the job allocation once allocation resize
+     * requests have been satisfied.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * int lsb_resize_release(job_resize_release.ByReference req);</b>
+     *
+     * @return int:-1 \n
+     *         On failure, returns -1.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         lsberrno is set to indicate the error.
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         release [-c] [-rnc resize_notification_cmd | -rncn] released_host_specification job_ID
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         none
+     * @param req job resize release request.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * job_resize_release
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * \ref resizablejob_related
+     * #see \ref lsb_resize_cancel
+     */
+    public static native int lsb_resize_release(job_resize_release req);
+
+    public static native int lsb_resize_request(job_resize_request job_resize_request1);
+
+    /**
+     * \page  lsb_getjobdepinfo lsb_getjobdepinfo
+     * Returns the job dependency information.
+     * <p/>
+     * \ref lsb_getjobdepinfo returns information about jobs (including job arrays) when
+     * a job has one or more dependencies on it.
+     * <p/>
+     * <b>\#include <lsf/lsbatch.h>
+     * <p/>
+     * jobDependInfo.ByReference
+     * lsb_getjobdepinfo(jobDepRequest.ByReference jobdepReq)</b>
+     *
+     * @return null
+     *         \n Function failed.
+     *         <p/>
+     *         \b Errors:
+     *         \par
+     *         none
+     *         <p/>
+     *         <b>Equivalent line commands:</b>
+     *         \par
+     *         none
+     *         <p/>
+     *         <b>Files:</b>
+     *         \par
+     *         none
+     * @param jobdepReq Job dependent Request.
+     * <p/>
+     * <b>Data Structures:</b>
+     * \par
+     * dependJobs
+     * \n queriedJobs
+     * \n jobDependInfo
+     * \n jobDepRequest
+     * <p/>
+     * <b>Define Statements:</b>
+     * \par
+     * \ref job_has_depend
+     * \n \ref query_depend
+     */
+    public static native jobDependInfo.ByReference lsb_getjobdepinfo(jobDepRequest jobdepReq);
+
+
+    /**
+     *  \page lsb_jsdl2submit lsb_jsdl2submit
+     *  \brief  Accepts a JSDL job submission file as input and converts the file
+     *   for use with LSF.
+     *
+     *  \ref lsb_jsdl2submit converts parameters specified in the JSDL file and merges
+     *  them with the other command line and job script options. The merged submit
+     *  request is then sent to mbatchd for processing.
+     *
+     *  Code must link to LSF_LIBDIR/libbat.jsdl.lib
+     *
+     *  <b>\#include <lsf/lsbatch.h>
+     *
+     *  int lsb_jsdl2submit(submit.ByReference req, String template);</b>
+     *
+     *  @param req Reads the specified JSDL options and maps them to the
+     *  submitReq structure. Code must specify either jsdl or jsdl_strict.
+     *  @param template The default template, which contains all of the bsub
+     *  submission options.
+     *
+     *  <b>Data Structures:</b>
+     *  \par
+     *  submit
+     *
+     *  <b>Define Statements:</b>
+     *  \par
+     *  none
+     *
+     *  @return int:0 \n
+     *  Function completed successfully.
+     *  @return int:-1 \n
+     *  Function failed.
+     *
+     *  <b>Errors:</b>
+     *  \par
+     *  On failure, sets lsberrno to indicate the error.
+     *
+     *  <b>Equivalent line command:</b>
+     *  \par
+     *   bsub with options
+     *
+     *  <b>Files:</b>
+     *  \par
+     *  $LSF_LIBDIR/jsdl.xsd
+     *  \n $LSF_LIBDIR/jsdl-posix.xsd
+     *  \n $LSF_LIBDIR/jsdl-lsf.xsd
+     *
+     *  @see \ref lsb_submit
+     *  @see \ref lsb_modify
+     */
+
+    /**
+     *  \page lsblib lsblib
+     *  \brief Application Programming Interface (API) library functions for batch jobs
+     *
+     *  LSBLIB functions allow application programs to get information about the hosts,
+     *  queues, users, jobs and configuration of the batch system. Application programs
+     *  can also submit jobs and control hosts, queues and jobs. Finally, application
+     *  programs can read batch log files and write batch error messages.
+     *
+     *  \note
+     *  \par
+     *  All LSBLIB APIs require that the batch header file <lsf/lsbatch.h> be included.
+     *  \par
+     *  Many LSBLIB APIs return a pointer to an array or structure. These data structures
+     *  are in static storage or on the heap. The next time the API is called, the storage
+     *  is overwritten or freed.
+     *  \par
+     *  Any program using LSBLIB APIs that change the state of the batch system (that
+     *  is, except for APIs that just get information about the system) must be setuid
+     *  to root if LSF_AUTH is not defined in the lsf.conf file.
+     *  \par
+     *  On systems which have both System V and BSD programming interfaces, LSBLIB
+     *  typically requires the BSD programming interface. On System V-based versions of
+     *  UNIX, for example SGI IRIX, it is normally necessary to link applications using
+     *  LSBLIB with the BSD compatibility library.
+     *  \par
+     *  On AFS systems, the following needs to be added to the end of your linkage
+     *  specifications when linking with LSBLIB (assuming your AFS library path is
+     *  /usr/afsws):
+     *  \par
+     *  For HP-UX and Solaris,
+     *  \par
+     *  -lc -L/usr/afsws/lib -L/usr/afsws/lib/afs -lsys -lrx -llwp /usr/afsws/lib/afs/util.a
+     *  \par
+     *  For other platforms,
+     *  \par
+     *  -lc -L/usr/afsws/lib -L/usr/afsws/lib/afs -lsys -lrx -llwp
+     *
+     *  \b Files:
+     *  \par
+     *  ${LSF_ENVDIR:-/etc}/lsf.conf
+     *  \n $LSF_CONFDIR/lsf.shared
+     *  \n $LSF_CONFDIR/lsf.cluster.cluster_name
+     *  \n $LSF_CONFDIR/lsf.task
+     *  \n $LSF_CONFDIR/lsf.task.cluster_name
+     *  \n $LSB_CONFDIR/cluster_name/configdir/lsb.hosts
+     *  \n $$LSB_CONFDIR/cluster_name/configdir/lsb.params
+     *  \n $LSB_CONFDIR/cluster_name/configdir/lsb.queues
+     *  \n $LSB_CONFDIR/cluster_name/configdir/lsb.users
+     *
+     *  @see lsblibapis
+     */
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/jna/lsf/v7_0_6/LibLsf.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/jna/lsf/v7_0_6/LibLsf.java
new file mode 100644
index 0000000..9c8e563
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/jna/lsf/v7_0_6/LibLsf.java
@@ -0,0 +1,1780 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.jna.lsf.v7_0_6;
+
+import com.sun.jna.*;
+import com.sun.jna.ptr.FloatByReference;
+import com.sun.jna.ptr.IntByReference;
+import com.sun.jna.ptr.PointerByReference;
+import org.broadinstitute.gatk.utils.jna.clibrary.JNAUtils;
+import org.broadinstitute.gatk.utils.jna.clibrary.LibC.timeval;
+
+/*
+  NOTE: This library uses Pointer for some Struct.ByReference members going
+  against the JNA recommendations at http://jna.java.net/#structure_use
+  Instead stuct arrays are Pointers and each structure contains a
+  constructor that can accept the Pointer iff the size of the array is
+  known to be greater than zero.
+
+  This was especially problematic in jobInfoEnt->items->resName. When
+  jobInfo->reserveCnt was zero jobInfoItems->items was not necessarily null.
+
+  LSF will often reuse memory for structure arrays but will set the
+  array size / count (reserveCnt above) to zero when the array should
+  not be accessed. When LSF has reused memory and points to a non-null
+  structure pointer (items) the inner structure may contain further
+  garbage pointers (especially items->resName).
+
+  When JNA sees a non-null Structure.ByReference it will autoRead() the
+  member. When autoRead() eventually gets to the items->resName trying
+  to run strlen on the bad memory address causes a SIGSEGV.
+
+  By using a Pointer instead of the Structure.ByReference JNA will not
+  automatically autoRead(), and the API user will have to pass the
+  pointer to the Structure on their own.
+*/
+
+/**
+ * JNA wrappers for LSF's lsf.h and -llsf
+ *
+ * $Id: base.h,v 1.25.6.12.2.5.2.11.2.15 2009/08/17 07:25:05 qlnie Exp $
+ ****************************************************************************
+ *
+ * Load Sharing Facility
+ *
+ * Header file for all components of load sharing facility.
+ *
+ ****************************************************************************/
+ at SuppressWarnings("unused")
+public class LibLsf {
+
+    static {
+        /*
+        LSF 7.0.6 on the mac is missing the unsatisfied exported symbol for environ which was removed on MacOS X 10.5+.
+        nm $LSF_LIBDIR/liblsf.dylib | grep environ
+        See "man environ" for more info, along with http://lists.apple.com/archives/java-dev/2007/Dec/msg00096.html
+        For now, we export environ ourselves using libenvironhack.dylib available in c/libenvironhack.
+        */
+        if (Platform.isMac())
+            NativeLibrary.getInstance("environhack");
+        String lsfLibDir = System.getenv("LSF_LIBDIR");
+        if (lsfLibDir != null) {
+            NativeLibrary.addSearchPath("lsf", lsfLibDir);
+        }
+        Native.register("lsf");
+    }
+
+    public static final String PASSWD_FILE_LS = "passwd.lsfuser";
+    public static final int PASSWORD_LEN = 64;
+    public static final int MAXHOSTNAMELEN = JNAUtils.MAXHOSTNAMELEN;
+    public static final int MAXPATHLEN = JNAUtils.MAXPATHLEN;
+
+
+    public static final int LOG_EMERG = 0;
+    public static final int LOG_ALERT = 1;
+    public static final int LOG_CRIT = 2;
+    public static final int LOG_ERR = 3;
+    public static final int LOG_WARNING = 4;
+    public static final int LOG_NOTICE = 5;
+    public static final int LOG_INFO = 6;
+    public static final int LOG_DEBUG = 7;
+
+    public static final int INVALID_SOCKET = -1;
+
+    public static boolean SOCK_INVALID(int c) {
+        return ((c) == INVALID_SOCKET);
+    }
+
+    public static class rlimit extends Structure {
+        public static class ByReference extends rlimit implements Structure.ByReference {}
+        public static class ByValue extends rlimit implements Structure.ByValue {}
+        public rlimit() {}
+        public rlimit(Pointer p) { super(p); read(); }
+
+        public NativeLong rlim_cur;
+        public NativeLong rlim_max;
+    }
+
+
+
+    public static class rusage extends Structure {
+        public static class ByReference extends rusage implements Structure.ByReference {}
+        public static class ByValue extends rusage implements Structure.ByValue {}
+        public rusage() {}
+        public rusage(Pointer p) { super(p); read(); }
+
+        public timeval ru_utime;
+        public timeval ru_stime;
+
+
+        public NativeLong ru_maxrss;
+        //public static final int ru_first = ru_ixrss;
+        public NativeLong ru_ixrss;
+        public NativeLong ru_idrss;
+        public NativeLong ru_isrss;
+        public NativeLong ru_minflt;
+        public NativeLong ru_majflt;
+        public NativeLong ru_nswap;
+        public NativeLong ru_inblock;
+        public NativeLong ru_oublock;
+        public NativeLong ru_msgsnd;
+        public NativeLong ru_msgrcv;
+        public NativeLong ru_nsignals;
+        public NativeLong ru_nvcsw;
+        public NativeLong ru_nivcsw;
+        //public static final int ru_last = ru_nivcsw;
+        // Listed in lsf.h but not present in structure.
+        //public NativeLong ru_ioch;
+    }
+
+
+
+
+    public static final String _VERSION_STR_ = "Platform LSF 7.0";
+    public static final String _WORKGROUP_STR_ = "";
+    public static final String _MINOR_STR_ = "";
+    public static final String _BUILD_STR_ = "";
+    public static final String _NOTE_STR_ = "";
+    public static final String _HOTFIX_STR_ = "";
+    public static final String _OS_STR_ = "";
+
+    public static final String _DATE_STR_ = "";
+    public static final String _BUILD_INFO_ = _MINOR_STR_ + "" + _BUILD_STR_ + "" + _WORKGROUP_STR_ + ", " + _DATE_STR_ + "\nCopyright 1992-2009 Platform Computing Corporation\n\n" + _OS_STR_ + _NOTE_STR_ + _HOTFIX_STR_;
+    public static final String _LS_VERSION_ = (_VERSION_STR_ + "" + _BUILD_INFO_);
+
+    //public static int XDR_SETPOS (int xdrs, int pos)  { (*(xdrs)->x_ops->x_setpostn)(xdrs, 0); return (*(xdrs)->x_ops->x_setpostn)(xdrs, pos); }
+    //public static int xdr_setpos (int xdrs, int pos)  { (*(xdrs)->x_ops->x_setpostn)(xdrs, 0); return (*(xdrs)->x_ops->x_setpostn)(xdrs, pos); }
+
+
+    public static final int LSF_XDR_VERSION2_0 = 1;
+    public static final int LSF_XDR_VERSION2_1 = 2;
+    public static final int LSF_XDR_VERSION2_2 = 3;
+    public static final int LSF_XDR_VERSION3_0 = 4;
+    public static final int LSF_XDR_VERSION3_1 = 5;
+    public static final int LSF_XDR_VERSION3_2 = 6;
+    public static final int LSF_XDR_VERSION3_2_2 = 7;
+    public static final int LSF_XDR_VERSION4_0 = 8;
+    public static final int LSF_XDR_VERSION4_1 = 9;
+    public static final int LSF_XDR_VERSION4_2 = 10;
+    public static final int LSF_XDR_VERSION5_0 = 11;
+    public static final int LSF_XDR_VERSION5_1 = 12;
+    public static final int LSF_XDR_VERSION6_0 = 13;
+    public static final int LSF_XDR_VERSION6_1 = 14;
+    public static final int LSF_XDR_VERSION6_2 = 15;
+    public static final int EGO_XDR_VERSION_1_1 = 16;
+    public static final int LSF_XDR_VERSION7_0 = 17;
+    public static final int EGO_XDR_VERSION_1_2 = LSF_XDR_VERSION7_0;
+    public static final int LSF_XDR_VERSION7_0_EP1 = 18;
+    public static final int LSF_XDR_VERSION7_0_EP2 = 19;
+    public static final int LSF_XDR_VERSION7_0_EP3 = 20;
+    public static final int LSF_XDR_VERSION7_0_EP4 = 21;
+    public static final int LSF_XDR_VERSION7_0_EP5 = 22;
+    public static final int LSF_XDR_VERSION7_0_EP6 = 23;
+    public static final int EGO_XDR_VERSION_1_2_2 = LSF_XDR_VERSION7_0_EP1;
+    public static final int EGO_XDR_VERSION_1_2_3 = LSF_XDR_VERSION7_0_EP2;
+
+    public static final int EGO_XDR_VERSION = LSF_XDR_VERSION7_0_EP2;
+
+    //public String LOG_VERSION;
+
+    public static final int LSF_DEFAULT_SOCKS = 15;
+    public static final int MAXLINELEN = 512;
+    public static final int MAXLSFNAMELEN = 40;
+    public static final int MAXLSFNAMELEN_70_EP1 = 128;
+
+    public static final int MAXSRES = 32;
+    public static final int MAXRESDESLEN = 256;
+    public static final int NBUILTINDEX = 11;
+    public static final int MAXTYPES = 128;
+    public static final int MAXMODELS = 1024 + 2;
+    public static final int MAXMODELS_70 = 128;
+    public static final int MAXTYPES_31 = 25;
+    public static final int MAXMODELS_31 = 30;
+    public static final int MAXFILENAMELEN = 256;
+    public static final int MAXEVARS = 30;
+
+    public static final int GENMALLOCPACE = 1024;
+
+
+    public static final int FIRST_RES_SOCK = 20;
+
+
+    public static final int R15S = 0;
+    public static final int R1M = 1;
+    public static final int R15M = 2;
+    public static final int UT = 3;
+    public static final int PG = 4;
+    public static final int IO = 5;
+    public static final int LS = 6;
+    public static final int IT = 7;
+    public static final int TMP = 8;
+    public static final int SWP = 9;
+    public static final int MEM = 10;
+    public static final int USR1 = 11;
+    public static final int USR2 = 12;
+
+
+    public static final float INFINIT_LOAD = (float) (0x7fffffff);
+    public static final float INFINIT_FLOAT = (float) (0x7fffffff);
+
+    public static final int INFINIT_INT = 0x7fffffff;
+    public static final long INFINIT_LONG_INT = 0x7fffffffffffffffL;
+    public static final short INFINIT_SHORT = 0x7fff;
+
+    public static final int DEFAULT_RLIMIT = -1;
+
+    public static final int LSF_RLIMIT_CPU = 0;
+    public static final int LSF_RLIMIT_FSIZE = 1;
+    public static final int LSF_RLIMIT_DATA = 2;
+    public static final int LSF_RLIMIT_STACK = 3;
+    public static final int LSF_RLIMIT_CORE = 4;
+    public static final int LSF_RLIMIT_RSS = 5;
+    public static final int LSF_RLIMIT_NOFILE = 6;
+    public static final int LSF_RLIMIT_OPEN_MAX = 7;
+    public static final int LSF_RLIMIT_VMEM = 8;
+    public static final int LSF_RLIMIT_SWAP = LSF_RLIMIT_VMEM;
+    public static final int LSF_RLIMIT_RUN = 9;
+    public static final int LSF_RLIMIT_PROCESS = 10;
+    public static final int LSF_RLIMIT_THREAD = 11;
+    public static final int LSF_RLIM_NLIMITS = 12;
+
+    public static final int LSF_RLIM_NLIMITS5_1 = 11;
+
+    //public static int seteuid (int x) { return setresuid(-1,x,-1); }
+    //public static int setegid (int x) { return setresgid(-1,x,-1); }
+
+    public static final int LSF_NULL_MODE = 0;
+    public static final int LSF_LOCAL_MODE = 1;
+    public static final int LSF_REMOTE_MODE = 2;
+
+
+    public static final int RF_MAXHOSTS = 5;
+
+
+    public static final int RF_CMD_MAXHOSTS = 0;
+
+
+    public static final int RF_CMD_RXFLAGS = 2;
+
+
+    public static final int STATUS_TIMEOUT = 125;
+    public static final int STATUS_IOERR = 124;
+    public static final int STATUS_EXCESS = 123;
+    public static final int STATUS_REX_NOMEM = 122;
+    public static final int STATUS_REX_FATAL = 121;
+    public static final int STATUS_REX_CWD = 120;
+    public static final int STATUS_REX_PTY = 119;
+    public static final int STATUS_REX_SP = 118;
+    public static final int STATUS_REX_FORK = 117;
+    public static final int STATUS_REX_AFS = 116;
+    public static final int STATUS_REX_UNKNOWN = 115;
+    public static final int STATUS_REX_NOVCL = 114;
+    public static final int STATUS_REX_NOSYM = 113;
+    public static final int STATUS_REX_VCL_INIT = 112;
+    public static final int STATUS_REX_VCL_SPAWN = 111;
+    public static final int STATUS_REX_EXEC = 110;
+    public static final int STATUS_REX_MLS_INVAL = 109;
+    public static final int STATUS_REX_MLS_CLEAR = 108;
+    public static final int STATUS_REX_MLS_RHOST = 107;
+    public static final int STATUS_REX_MLS_DOMIN = 106;
+    public static final int STATUS_DENIED = 105;
+
+
+    public static boolean REX_FATAL_ERROR(int s) {
+        return (((s) == STATUS_REX_NOVCL) || ((s) == STATUS_REX_NOSYM) || ((s) == STATUS_REX_NOMEM) || ((s) == STATUS_REX_FATAL) || ((s) == STATUS_REX_CWD) || ((s) == STATUS_REX_PTY) || ((s) == STATUS_REX_VCL_INIT) || ((s) == STATUS_REX_VCL_SPAWN) || ((s) == STATUS_REX_MLS_INVAL) || ((s) == STATUS_REX_MLS_CLEAR) || ((s) == STATUS_REX_MLS_RHOST) || ((s) == STATUS_REX_MLS_DOMIN));
+    }
+
+
+    public static final int REXF_USEPTY = 0x00000001;
+    public static final int REXF_CLNTDIR = 0x00000002;
+    public static final int REXF_TASKPORT = 0x00000004;
+    public static final int REXF_SHMODE = 0x00000008;
+    public static final int REXF_TASKINFO = 0x00000010;
+    public static final int REXF_REQVCL = 0x00000020;
+    public static final int REXF_SYNCNIOS = 0x00000040;
+    public static final int REXF_TTYASYNC = 0x00000080;
+    public static final int REXF_STDERR = 0x00000100;
+
+
+    public static final int EXACT = 0x01;
+    public static final int OK_ONLY = 0x02;
+    public static final int NORMALIZE = 0x04;
+    public static final int LOCALITY = 0x08;
+    public static final int IGNORE_RES = 0x10;
+    public static final int LOCAL_ONLY = 0x20;
+    public static final int DFT_FROMTYPE = 0x40;
+    public static final int ALL_CLUSTERS = 0x80;
+    public static final int EFFECTIVE = 0x100;
+
+
+    public static final int RECV_FROM_CLUSTERS = 0x200;
+    public static final int NEED_MY_CLUSTER_NAME = 0x400;
+
+
+    public static final int SEND_TO_CLUSTERS = 0x400;
+
+
+    public static final int NO_SORT = 0x800;
+
+
+    public static final int EXCLUSIVE_RESOURCE = 0x1000;
+
+    public static final int DT_CLUSTER_LOAD = 0x2000;
+
+
+    public static final int FROM_MASTER = 0x01;
+
+
+    public static final int KEEPUID = 0x01;
+
+
+    public static final int RES_CMD_REBOOT = 1;
+
+    public static final int RES_CMD_SHUTDOWN = 2;
+
+    public static final int RES_CMD_LOGON = 3;
+
+    public static final int RES_CMD_LOGOFF = 4;
+
+
+    public static final int LIM_CMD_REBOOT = 1;
+    public static final int LIM_CMD_SHUTDOWN = 2;
+    public static final int LIM_CMD_REMOVEHOST = 3;
+    public static final int LIM_CMD_ACTIVATE = 4;
+    public static final int LIM_CMD_DEACTIVATE = 5;
+    public static final int LIM_CMD_ELIM_ENV = 6;
+
+
+    public static class connectEnt extends Structure {
+        public static class ByReference extends connectEnt implements Structure.ByReference {}
+        public static class ByValue extends connectEnt implements Structure.ByValue {}
+        public connectEnt() {}
+        public connectEnt(Pointer p) { super(p); read(); }
+
+        public String hostname;
+        public int[] csock = new int[2];
+    }
+
+
+
+    public static final int INTEGER_BITS = 32;
+
+    public static int GET_INTNUM(int i) {
+        return ((i) / INTEGER_BITS + 1);
+    }
+
+
+    public static final int LIM_UNAVAIL = 0x00010000;
+    public static final int LIM_LOCKEDU = 0x00020000;
+    public static final int LIM_LOCKEDW = 0x00040000;
+    public static final int LIM_BUSY = 0x00080000;
+    public static final int LIM_RESDOWN = 0x00100000;
+    public static final int LIM_UNLICENSED = 0x00200000;
+    public static final int LIM_SBDDOWN = 0x00400000;
+    public static final int LIM_LOCKEDM = 0x00800000;
+
+    public static final int LIM_OK_MASK = 0x00bf0000;
+    public static final int LIM_PEMDOWN = 0x01000000;
+    public static final int LIM_LOCKEDU_RMS = 0x80000000;
+
+
+    public static boolean LS_ISUNAVAIL(int[] status) {
+        return (((status) != null) && (((status[0]) & LIM_UNAVAIL) != 0));
+    }
+
+
+    public static boolean LS_ISBUSYON(int[] status, int index) {
+        return (((status) != null) && (((status[1 + (index) / INTEGER_BITS]) & (1 << (index) % INTEGER_BITS)) != 0));
+    }
+
+    public static boolean LS_ISBUSY(int[] status) {
+        return (((status) != null) && (((status[0]) & LIM_BUSY) != 0));
+    }
+
+
+    public static boolean LS_ISRMSLOCK(int[] status) {
+        return (((status) != null) && (((status[0]) & LIM_LOCKEDU_RMS) != 0));
+    }
+
+
+    public static boolean LS_ISLOCKEDU(int[] status) {
+        return (((status) != null) && (((status[0]) & LIM_LOCKEDU) != 0));
+    }
+
+
+    public static boolean LS_ISLOCKEDW(int[] status) {
+        return (((status) != null) && (((status[0]) & LIM_LOCKEDW) != 0));
+    }
+
+
+    public static boolean LS_ISLOCKEDM(int[] status) {
+        return (((status) != null) && (((status[0]) & LIM_LOCKEDM) != 0));
+    }
+
+
+    public static boolean LS_ISLOCKED(int[] status) {
+        return (((status) != null) && (((status[0]) & (LIM_LOCKEDU | LIM_LOCKEDW | LIM_LOCKEDM)) != 0));
+    }
+
+
+    public static boolean LS_ISRESDOWN(int[] status) {
+        return (((status) != null) && (((status[0]) & LIM_RESDOWN) != 0));
+    }
+
+
+    public static boolean LS_ISSBDDOWN(int[] status) {
+        return (((status) != null) && (((status[0]) & LIM_SBDDOWN) != 0));
+    }
+
+    public static boolean LS_ISPEMDOWN(int[] status) {
+        return (((status[0]) & LIM_PEMDOWN) != 0);
+    }
+
+
+    public static boolean LS_ISUNLICENSED(int[] status) {
+        return (((status) != null) && (((status[0]) & LIM_UNLICENSED) != 0));
+    }
+
+
+    public static boolean LS_ISOK(int[] status) {
+        return (((status) != null) && ((status[0] & LIM_OK_MASK) == 0));
+    }
+
+
+    public static boolean LS_ISOKNRES(int[] status) {
+        return (((status) != null) && (((status[0] & ~(LIM_LOCKEDU_RMS)) & ~(LIM_RESDOWN | LIM_SBDDOWN | LIM_PEMDOWN)) == 0));
+    }
+
+
+    public static class placeInfo extends Structure {
+        public static class ByReference extends placeInfo implements Structure.ByReference {}
+        public static class ByValue extends placeInfo implements Structure.ByValue {}
+        public placeInfo() {}
+        public placeInfo(Pointer p) { super(p); read(); }
+
+        public byte[] hostName = new byte[MAXHOSTNAMELEN];
+        public int numtask;
+    }
+
+
+
+
+    public static class hostLoad extends Structure {
+        public static class ByReference extends hostLoad implements Structure.ByReference {}
+        public static class ByValue extends hostLoad implements Structure.ByValue {}
+        public hostLoad() {}
+        public hostLoad(Pointer p) { super(p); read(); }
+
+        public byte[] hostName = new byte[MAXHOSTNAMELEN];
+        public IntByReference status;
+        public FloatByReference li;
+    }
+
+
+
+
+    public static interface valueType {
+          public static final int LS_BOOLEAN = 0;
+          public static final int LS_NUMERIC = 1;
+          public static final int LS_STRING = 2;
+          public static final int LS_EXTERNAL = 3;
+    }
+
+
+
+    public static interface orderType {
+          public static final int INCR = 0;
+          public static final int DECR = 1;
+          public static final int NA = 2;
+    }
+
+
+
+
+    public static final int RESF_BUILTIN = 0x01;
+    public static final int RESF_DYNAMIC = 0x02;
+    public static final int RESF_GLOBAL = 0x04;
+    public static final int RESF_SHARED = 0x08;
+    public static final int RESF_LIC = 0x10;
+    public static final int RESF_EXTERNAL = 0x20;
+    public static final int RESF_RELEASE = 0x40;
+    public static final int RESF_DEFINED_IN_RESOURCEMAP = 0x80;
+
+    public static final int RESF_NON_CONSUMABLE = 0x100;
+    public static final int RESF_REDEFINABLE = 0x200;
+    public static final int RESF_ESRES = 0x400;
+
+
+    public static class resItem extends Structure {
+        public static class ByReference extends resItem implements Structure.ByReference {}
+        public static class ByValue extends resItem implements Structure.ByValue {}
+        public resItem() {}
+        public resItem(Pointer p) { super(p); read(); }
+
+        public byte[] name = new byte[MAXLSFNAMELEN];
+        public byte[] des = new byte[MAXRESDESLEN];
+        public /*valueType*/ int valueType;
+        public /*orderType*/ int orderType;
+        public int flags;
+        public int interval;
+    }
+
+
+
+
+    public static class lsInfo extends Structure {
+        public static class ByReference extends lsInfo implements Structure.ByReference {}
+        public static class ByValue extends lsInfo implements Structure.ByValue {}
+        public lsInfo() {}
+        public lsInfo(Pointer p) { super(p); read(); }
+
+        // The current version of JNA's Structure.getNativeAlignment passes a "null" to
+        // Native.getNativeSize() when accessing the contents of a 2D array.
+        // Although the method is marked as protected, there are also multiple "TO DO"
+        // comments so when we upgrade don't want to have specialized code floating around.
+
+        public int nRes;
+        public Pointer /* resItem.ByReference */ resTable;
+        public int nTypes;
+        public byte[] hostTypes = new byte[MAXTYPES * MAXLSFNAMELEN];
+        public int nModels;
+        public byte[] hostModels = new byte[MAXMODELS * MAXLSFNAMELEN];
+        public byte[] hostArchs = new byte[MAXMODELS * MAXLSFNAMELEN_70_EP1];
+        public int[] modelRefs = new int[MAXMODELS];
+        public float[] cpuFactor = new float[MAXMODELS];
+        public int numIndx;
+        public int numUsrIndx;
+    }
+
+
+
+
+    public static final int CLUST_STAT_OK = 0x01;
+    public static final int CLUST_STAT_UNAVAIL = 0x02;
+    public static final int CLUST_STAT_RECV_FROM = 0x04;
+    public static final int CLUST_STAT_SEND_TO = 0x08;
+
+
+    public static boolean IS_DEFAULT_AUTH(byte[] auth) {
+        return (auth == null || auth[0] == '\0');
+    }
+
+
+    public static class clusterInfo extends Structure {
+        public static class ByReference extends clusterInfo implements Structure.ByReference {}
+        public static class ByValue extends clusterInfo implements Structure.ByValue {}
+        public clusterInfo() {}
+        public clusterInfo(Pointer p) { super(p); read(); }
+
+        public byte[] clusterName = new byte[MAXLSFNAMELEN];
+        public int status;
+        public byte[] masterName = new byte[MAXHOSTNAMELEN];
+        public byte[] managerName = new byte[MAXLSFNAMELEN];
+        public int managerId;
+        public int numServers;
+        public int numClients;
+        public int nRes;
+        public Pointer resources;
+        public int nTypes;
+        public Pointer hostTypes;
+        public int nModels;
+        public Pointer hostModels;
+        public int nAdmins;
+        public IntByReference adminIds;
+        public Pointer admins;
+        public int analyzerLicFlag;
+        public int jsLicFlag;
+        public byte[] afterHoursWindow = new byte[MAXLINELEN];
+        public byte[] preferAuthName = new byte[MAXLSFNAMELEN];
+        public byte[] inUseAuthName = new byte[MAXLSFNAMELEN];
+    }
+
+
+    public static class hostInfo extends Structure {
+        public static class ByReference extends hostInfo implements Structure.ByReference {}
+        public static class ByValue extends hostInfo implements Structure.ByValue {}
+        public hostInfo() {}
+        public hostInfo(Pointer p) { super(p); read(); }
+
+        public byte[] hostName = new byte[MAXHOSTNAMELEN];
+        public String hostType;
+        public String hostModel;
+        public float cpuFactor;
+        public int maxCpus;
+        public int maxMem;
+        public int maxSwap;
+        public int maxTmp;
+        public int nDisks;
+        public int nRes;
+        public Pointer resources;
+        public int nDRes;
+        public Pointer DResources;
+        public String windows;
+        public int numIndx;
+        public FloatByReference busyThreshold;
+        public byte isServer;
+        public byte licensed;
+        public int rexPriority;
+        public int licFeaturesNeeded;
+
+
+        public static final int LSF_BASE_LIC = 0;
+        public static final int LSF_BATCH_LIC_OBSOLETE = 1;
+        public static final int LSF_JS_SCHEDULER_LIC = 2;
+        public static final int LSF_JS_LIC = 3;
+        public static final int LSF_CLIENT_LIC = 4;
+        public static final int LSF_MC_LIC = 5;
+        public static final int LSF_ANALYZER_SERVER_LIC = 6;
+        public static final int LSF_MAKE_LIC = 7;
+
+        public static final int LSF_PARALLEL_LIC = 8;
+        public static final int LSF_FLOAT_CLIENT_LIC = 9;
+        public static final int LSF_FTA_LIC = 10;
+        public static final int LSF_AFTER_HOURS_LIC = 11;
+        public static final int LSF_RESOURCE_PREEMPT_LIC = 12;
+        public static final int LSF_BACCT_LIC = 13;
+        public static final int LSF_SCHED_FAIRSHARE_LIC = 14;
+        public static final int LSF_SCHED_RESERVE_LIC = 15;
+        public static final int LSF_SCHED_PREEMPTION_LIC = 16;
+        public static final int LSF_SCHED_PARALLEL_LIC = 17;
+        public static final int LSF_SCHED_ADVRSV_LIC = 18;
+        public static final int LSF_API_CLIENT_LIC = 19;
+
+        public static final int CLUSTERWARE_MANAGER_LIC = 20;
+        public static final int LSF_MANAGER_LIC = 21;
+        public static final int LSF_PCC_HPC_LIC = 22;
+        public static final int sCLUSTERWARE_LIC = 23;
+        public static final int OTTAWA_MANAGER_LIC = 24;
+
+        public static final int SYMPHONY_MANAGER_ONLINE_LIC = 25;
+        public static final int SYMPHONY_MANAGER_BATCH_LIC = 26;
+        public static final int SYMPHONY_SCHED_JOB_PRIORITY_LIC = 27;
+        public static final int LSF_DUALCORE_X86_LIC = 28;
+        public static final int LSF_TSCHED_LIC = 29;
+        public static final int LSF_WORKGROUP_LIC = 30;
+        public static final int LSF_NUM_LIC_TYPE = 31;
+        public static final int LSF_WG_NUM_LIC_TYPE = 2;
+        public static final int LSF_NO_NEED_LIC = 32;
+
+        public int licClass;
+        public int cores;
+        public static final int INET6_ADDRSTRLEN = 46;
+        public byte[] hostAddr = new byte[INET6_ADDRSTRLEN];
+        public int pprocs;
+
+        public int cores_per_proc;
+        public int threads_per_core;
+    }
+
+    public static boolean HAS_BATCH_LICENSES(int featureEnabled) {
+        return (JNAUtils.toBoolean(featureEnabled & (1 << hostInfo.CLUSTERWARE_MANAGER_LIC)) || JNAUtils.toBoolean(featureEnabled & (1 << hostInfo.LSF_MANAGER_LIC)) || JNAUtils.toBoolean(featureEnabled & (1 << hostInfo.LSF_WORKGROUP_LIC)) || JNAUtils.toBoolean(featureEnabled & (1 << hostInfo.SYMPHONY_MANAGER_ONLINE_LIC)) || JNAUtils.toBoolean(featureEnabled & (1 << hostInfo.SYMPHONY_MANAGER_BATCH_LIC)));
+    }
+
+    public static boolean HAS_SYMPHONY_LICENSES(int featureEnabled) {
+        return (JNAUtils.toBoolean(featureEnabled & (1 << hostInfo.SYMPHONY_MANAGER_ONLINE_LIC)) || JNAUtils.toBoolean(featureEnabled & (1 << hostInfo.SYMPHONY_MANAGER_BATCH_LIC)));
+    }
+
+
+    public static class config_param extends Structure {
+        public static class ByReference extends config_param implements Structure.ByReference {}
+        public static class ByValue extends config_param implements Structure.ByValue {}
+        public config_param() {}
+        public config_param(Pointer p) { super(p); read(); }
+
+        public String paramName;
+        public String paramValue;
+    }
+
+
+
+    public static class lsfRusage extends Structure {
+        public static class ByReference extends lsfRusage implements Structure.ByReference {}
+        public static class ByValue extends lsfRusage implements Structure.ByValue {}
+        public lsfRusage() {}
+        public lsfRusage(Pointer p) { super(p); read(); }
+
+        public double ru_utime;
+        public double ru_stime;
+        public double ru_maxrss;
+        public double ru_ixrss;
+        public double ru_ismrss;
+        public double ru_idrss;
+        public double ru_isrss;
+        public double ru_minflt;
+        public double ru_majflt;
+        public double ru_nswap;
+        public double ru_inblock;
+        public double ru_oublock;
+        public double ru_ioch;
+        public double ru_msgsnd;
+        public double ru_msgrcv;
+        public double ru_nsignals;
+        public double ru_nvcsw;
+        public double ru_nivcsw;
+        public double ru_exutime;
+    }
+
+
+
+
+    public static class lsfAcctRec extends Structure {
+        public static class ByReference extends lsfAcctRec implements Structure.ByReference {}
+        public static class ByValue extends lsfAcctRec implements Structure.ByValue {}
+        public lsfAcctRec() {}
+        public lsfAcctRec(Pointer p) { super(p); read(); }
+
+        public int pid;
+        public String username;
+        public int exitStatus;
+        public NativeLong dispTime;
+        public NativeLong termTime;
+        public String fromHost;
+        public String execHost;
+        public String cwd;
+        public String cmdln;
+        public lsfRusage lsfRu;
+    }
+
+
+
+
+    public static class confNode extends Structure {
+        public static class ByReference extends confNode implements Structure.ByReference {}
+        public static class ByValue extends confNode implements Structure.ByValue {}
+        public confNode() {}
+        public confNode(Pointer p) { super(p); read(); }
+
+        public confNode.ByReference leftPtr;
+        public confNode.ByReference rightPtr;
+        public confNode.ByReference fwPtr;
+        public String cond;
+        public int beginLineNum;
+        public int numLines;
+        public Pointer lines;
+        public byte tag;
+    }
+
+
+
+    public static class pStack extends Structure {
+        public static class ByReference extends pStack implements Structure.ByReference {}
+        public static class ByValue extends pStack implements Structure.ByValue {}
+        public pStack() {}
+        public pStack(Pointer p) { super(p); read(); }
+
+        public int top;
+        public int size;
+        public PointerByReference nodes;
+    }
+
+
+
+    public static class confHandle extends Structure {
+        public static class ByReference extends confHandle implements Structure.ByReference {}
+        public static class ByValue extends confHandle implements Structure.ByValue {}
+        public confHandle() {}
+        public confHandle(Pointer p) { super(p); read(); }
+
+        public confNode.ByReference rootNode;
+        public String fname;
+        public confNode.ByReference curNode;
+        public int lineCount;
+        public pStack.ByReference ptrStack;
+    }
+
+
+
+    public static class lsConf extends Structure {
+        public static class ByReference extends lsConf implements Structure.ByReference {}
+        public static class ByValue extends lsConf implements Structure.ByValue {}
+        public lsConf() {}
+        public lsConf(Pointer p) { super(p); read(); }
+
+        public confHandle.ByReference confhandle;
+        public int numConds;
+        public Pointer conds;
+        public IntByReference values;
+    }
+
+
+
+    public static class sharedConf extends Structure {
+        public static class ByReference extends sharedConf implements Structure.ByReference {}
+        public static class ByValue extends sharedConf implements Structure.ByValue {}
+        public sharedConf() {}
+        public sharedConf(Pointer p) { super(p); read(); }
+
+        public lsInfo.ByReference lsinfo;
+        public int numCls;
+        public Pointer clusterNames;
+        public Pointer servers;
+    }
+
+
+
+
+    public static class lsSharedResourceInstance extends Structure {
+        public static class ByReference extends lsSharedResourceInstance implements Structure.ByReference {}
+        public static class ByValue extends lsSharedResourceInstance implements Structure.ByValue {}
+        public lsSharedResourceInstance() {}
+        public lsSharedResourceInstance(Pointer p) { super(p); read(); }
+
+        public String value;
+        public int nHosts;
+        public Pointer hostList;
+
+    }
+
+
+
+
+    public static class lsSharedResourceInfo extends Structure {
+        public static class ByReference extends lsSharedResourceInfo implements Structure.ByReference {}
+        public static class ByValue extends lsSharedResourceInfo implements Structure.ByValue {}
+        public lsSharedResourceInfo() {}
+        public lsSharedResourceInfo(Pointer p) { super(p); read(); }
+
+        public String resourceName;
+        public int nInstances;
+        public Pointer /* lsSharedResourceInstance.ByReference */ instances;
+    }
+
+
+
+    public static class clusterConf extends Structure {
+        public static class ByReference extends clusterConf implements Structure.ByReference {}
+        public static class ByValue extends clusterConf implements Structure.ByValue {}
+        public clusterConf() {}
+        public clusterConf(Pointer p) { super(p); read(); }
+
+        public clusterInfo.ByReference clinfo;
+        public int numHosts;
+        public Pointer /* hostInfo.ByReference */ hosts;
+        public int defaultFeatures;
+        public int numShareRes;
+        public Pointer /* lsSharedResourceInfo.ByReference */ shareRes;
+    }
+
+
+
+
+    public static class pidInfo extends Structure {
+        public static class ByReference extends pidInfo implements Structure.ByReference {}
+        public static class ByValue extends pidInfo implements Structure.ByValue {}
+        public pidInfo() {}
+        public pidInfo(Pointer p) { super(p); read(); }
+
+        public int pid;
+        public int ppid;
+        public int pgid;
+        public int jobid;
+    }
+
+
+
+
+    public static class jRusage extends Structure {
+        public static class ByReference extends jRusage implements Structure.ByReference {}
+        public static class ByValue extends jRusage implements Structure.ByValue {}
+        public jRusage() {}
+        public jRusage(Pointer p) { super(p); read(); }
+
+        public int mem;
+        public int swap;
+        public int utime;
+        public int stime;
+        public int npids;
+        public Pointer /* pidInfo.ByReference */ pidInfo;
+
+        public int npgids;
+        public IntByReference pgid;
+        public int nthreads;
+    }
+
+
+
+
+    public static final int NUM_SUBS = 2;
+    public static final int LEN_SUBS = 64;
+    public static final int NUM_CLASS_TYPE = 3;
+
+    public static class licUsage extends Structure {
+        public static class ByReference extends licUsage implements Structure.ByReference {}
+        public static class ByValue extends licUsage implements Structure.ByValue {}
+        public licUsage() {}
+        public licUsage(Pointer p) { super(p); read(); }
+
+        public int licDisplayMask;
+        public int usingDemoLicense;
+        public float[] total = new float[hostInfo.LSF_NUM_LIC_TYPE];
+        public float[] inUse = new float[hostInfo.LSF_NUM_LIC_TYPE];
+    }
+
+
+
+    public static class hostClassInfo extends Structure {
+        public static class ByReference extends hostClassInfo implements Structure.ByReference {}
+        public static class ByValue extends hostClassInfo implements Structure.ByValue {}
+        public hostClassInfo() {}
+        public hostClassInfo(Pointer p) { super(p); read(); }
+
+        public int numHosts;
+        public int numCpus;
+        public int numCores;
+    }
+
+
+
+    public static class lsfLicUsage extends Structure {
+        public static class ByReference extends lsfLicUsage implements Structure.ByReference {}
+        public static class ByValue extends lsfLicUsage implements Structure.ByValue {}
+        public lsfLicUsage() {}
+        public lsfLicUsage(Pointer p) { super(p); read(); }
+
+        public licUsage licUsage;
+        public hostClassInfo[] hostInfo = new hostClassInfo[NUM_CLASS_TYPE];
+        // The current version of JNA's Structure.getNativeAlignment passes a "null" to
+        // Native.getNativeSize() when accessing the contents of a 2D array.
+        // Although the method is marked as protected, there are also multiple "TO DO"
+        // comments so when we upgrade don't want to have specialized code floating around.
+        public byte[] substitution = new byte[NUM_SUBS * LEN_SUBS];
+        public byte[] cluster = new byte[MAXFILENAMELEN];
+    }
+
+
+    public static class param_entry extends Structure {
+        public static class ByReference extends param_entry implements Structure.ByReference {}
+        public static class ByValue extends param_entry implements Structure.ByValue {}
+        public param_entry() {}
+        public param_entry(Pointer p) { super(p); read(); }
+
+        public static int HAS_PARAM_VALUE = 0x001;
+        public static final int HAS_PARAM_DEFAULT = 0x002;
+
+        public int flags;
+        public String key;
+        public String value;
+        public String default_value;
+    }
+
+
+
+    public static class params_key_value_pair extends Structure {
+        public static class ByReference extends params_key_value_pair implements Structure.ByReference {}
+        public static class ByValue extends params_key_value_pair implements Structure.ByValue {}
+        public params_key_value_pair() {}
+        public params_key_value_pair(Pointer p) { super(p); read(); }
+
+        public int num_params;
+        public String daemon_time;
+        public Pointer /* param_entry.ByReference */ param;
+    }
+
+
+
+
+    public static final int LSE_NO_ERR = 0;
+    public static final int LSE_BAD_XDR = 1;
+    public static final int LSE_MSG_SYS = 2;
+    public static final int LSE_BAD_ARGS = 3;
+    public static final int LSE_MASTR_UNKNW = 4;
+    public static final int LSE_LIM_DOWN = 5;
+    public static final int LSE_PROTOC_LIM = 6;
+    public static final int LSE_SOCK_SYS = 7;
+    public static final int LSE_ACCEPT_SYS = 8;
+    public static final int LSE_BAD_TASKF = 9;
+    public static final int LSE_NO_HOST = 10;
+    public static final int LSE_NO_ELHOST = 11;
+    public static final int LSE_TIME_OUT = 12;
+    public static final int LSE_NIOS_DOWN = 13;
+    public static final int LSE_LIM_DENIED = 14;
+    public static final int LSE_LIM_IGNORE = 15;
+    public static final int LSE_LIM_BADHOST = 16;
+    public static final int LSE_LIM_ALOCKED = 17;
+    public static final int LSE_LIM_NLOCKED = 18;
+    public static final int LSE_LIM_BADMOD = 19;
+    public static final int LSE_SIG_SYS = 20;
+    public static final int LSE_BAD_EXP = 21;
+    public static final int LSE_NORCHILD = 22;
+    public static final int LSE_MALLOC = 23;
+    public static final int LSE_LSFCONF = 24;
+    public static final int LSE_BAD_ENV = 25;
+    public static final int LSE_LIM_NREG = 26;
+    public static final int LSE_RES_NREG = 27;
+    public static final int LSE_RES_NOMORECONN = 28;
+    public static final int LSE_BADUSER = 29;
+    public static final int LSE_RES_ROOTSECURE = 30;
+    public static final int LSE_RES_DENIED = 31;
+    public static final int LSE_BAD_OPCODE = 32;
+    public static final int LSE_PROTOC_RES = 33;
+    public static final int LSE_RES_CALLBACK = 34;
+    public static final int LSE_RES_NOMEM = 35;
+    public static final int LSE_RES_FATAL = 36;
+    public static final int LSE_RES_PTY = 37;
+    public static final int LSE_RES_SOCK = 38;
+    public static final int LSE_RES_FORK = 39;
+    public static final int LSE_NOMORE_SOCK = 40;
+    public static final int LSE_WDIR = 41;
+    public static final int LSE_LOSTCON = 42;
+    public static final int LSE_RES_INVCHILD = 43;
+    public static final int LSE_RES_KILL = 44;
+    public static final int LSE_PTYMODE = 45;
+    public static final int LSE_BAD_HOST = 46;
+    public static final int LSE_PROTOC_NIOS = 47;
+    public static final int LSE_WAIT_SYS = 48;
+    public static final int LSE_SETPARAM = 49;
+    public static final int LSE_RPIDLISTLEN = 50;
+    public static final int LSE_BAD_CLUSTER = 51;
+    public static final int LSE_RES_VERSION = 52;
+    public static final int LSE_EXECV_SYS = 53;
+    public static final int LSE_RES_DIR = 54;
+    public static final int LSE_RES_DIRW = 55;
+    public static final int LSE_BAD_SERVID = 56;
+    public static final int LSE_NLSF_HOST = 57;
+    public static final int LSE_UNKWN_RESNAME = 58;
+    public static final int LSE_UNKWN_RESVALUE = 59;
+    public static final int LSE_TASKEXIST = 60;
+    public static final int LSE_BAD_TID = 61;
+    public static final int LSE_TOOMANYTASK = 62;
+    public static final int LSE_LIMIT_SYS = 63;
+    public static final int LSE_BAD_NAMELIST = 64;
+    public static final int LSE_NO_LICENSE = 65;
+    public static final int LSE_LIM_NOMEM = 66;
+    public static final int LSE_NIO_INIT = 67;
+    public static final int LSE_CONF_SYNTAX = 68;
+    public static final int LSE_FILE_SYS = 69;
+    public static final int LSE_CONN_SYS = 70;
+    public static final int LSE_SELECT_SYS = 71;
+    public static final int LSE_EOF = 72;
+    public static final int LSE_ACCT_FORMAT = 73;
+    public static final int LSE_BAD_TIME = 74;
+    public static final int LSE_FORK = 75;
+    public static final int LSE_PIPE = 76;
+    public static final int LSE_ESUB = 77;
+    public static final int LSE_DCE_EXEC = 78;
+    public static final int LSE_EAUTH = 79;
+    public static final int LSE_NO_FILE = 80;
+    public static final int LSE_NO_CHAN = 81;
+    public static final int LSE_BAD_CHAN = 82;
+    public static final int LSE_INTERNAL = 83;
+    public static final int LSE_PROTOCOL = 84;
+    public static final int LSE_THRD_SYS = 85;
+    public static final int LSE_MISC_SYS = 86;
+    public static final int LSE_LOGON_FAIL = 87;
+    public static final int LSE_RES_RUSAGE = 88;
+    public static final int LSE_NO_RESOURCE = 89;
+    public static final int LSE_BAD_RESOURCE = 90;
+    public static final int LSE_RES_PARENT = 91;
+    public static final int LSE_NO_PASSWD = 92;
+    public static final int LSE_SUDOERS_CONF = 93;
+    public static final int LSE_SUDOERS_ROOT = 94;
+    public static final int LSE_I18N_SETLC = 95;
+    public static final int LSE_I18N_CATOPEN = 96;
+    public static final int LSE_I18N_NOMEM = 97;
+    public static final int LSE_NO_MEM = 98;
+    public static final int LSE_REGISTRY_SYS = 99;
+    public static final int LSE_FILE_CLOSE = 100;
+    public static final int LSE_LIMCONF_NOTREADY = 101;
+    public static final int LSE_MASTER_LIM_DOWN = 102;
+    public static final int LSE_MLS_INVALID = 103;
+    public static final int LSE_MLS_CLEARANCE = 104;
+    public static final int LSE_MLS_RHOST = 105;
+    public static final int LSE_MLS_DOMINATE = 106;
+    public static final int LSE_NO_CAL = 107;
+    public static final int LSE_NO_NETWORK = 108;
+    public static final int LSE_GETCONF_FAILED = 109;
+    public static final int LSE_TSSINIT = 110;
+    public static final int LSE_DYNM_DENIED = 111;
+    public static final int LSE_LIC_OVERUSE = 112;
+    public static final int LSE_EGOCONF = 113;
+    public static final int LSE_BAD_EGO_ENV = 114;
+    public static final int LSE_EGO_CONF_SYNTAX = 115;
+    public static final int LSE_EGO_GETCONF_FAILED = 116;
+    public static final int LSE_NS_LOOKUP = 117;
+    public static final int LSE_BAD_PASSWD = 118;
+
+    public static final int LSE_UNKWN_USER = 119;
+    public static final int LSE_NOT_WINHOST = 120;
+    public static final int LSE_NOT_MASTERCAND = 121;
+    public static final int LSE_HOST_UNAUTH = 122;
+    public static final int LSE_UNRESOLVALBE_HOST = 123;
+    public static final int LSE_RESOURCE_NOT_CONSUMABLE = 124;
+    public static final int LSE_SHUTDOWN = 125;
+    public static final int LSE_BAD_SYNTAX = 126;
+    public static final int LSE_NERR = 127;
+
+
+    public static boolean LSE_ISBAD_RESREQ(int s) {
+        return (((s) == LSE_BAD_EXP) || ((s) == LSE_UNKWN_RESNAME) || ((s) == LSE_UNKWN_RESVALUE));
+    }
+
+    public static boolean LSE_SYSCALL(int s) {
+        return (((s) == LSE_SELECT_SYS) || ((s) == LSE_CONN_SYS) || ((s) == LSE_FILE_SYS) || ((s) == LSE_MSG_SYS) || ((s) == LSE_SOCK_SYS) || ((s) == LSE_ACCEPT_SYS) || ((s) == LSE_SIG_SYS) || ((s) == LSE_WAIT_SYS) || ((s) == LSE_EXECV_SYS) || ((s) == LSE_LIMIT_SYS) || ((s) == LSE_PIPE) || ((s) == LSE_ESUB) || ((s) == LSE_REGISTRY_SYS) || ((s) == LSE_MISC_SYS));
+    }
+
+
+    /*
+    public static void TIMEVAL (int level, int func, int val)  {
+        if (timinglevel > level) {
+            timeval before, after;
+            timezone tz;
+            gettimeofday(&before, &tz);
+            func;
+            gettimeofday(&after, &tz);
+            val = (int)((after.tv_sec - before.tv_sec)*1000 +  (after.tv_usec-before.tv_usec)/1000);
+        } else {
+            func;
+            val = 0;
+        }
+    }
+    */
+
+    public static class ls_timeval extends Structure {
+        public static class ByReference extends ls_timeval implements Structure.ByReference {}
+        public static class ByValue extends ls_timeval implements Structure.ByValue {}
+        public ls_timeval() {}
+        public ls_timeval(Pointer p) { super(p); read(); }
+
+        public float rtime;
+        public float utime;
+        public float stime;
+    }
+
+
+
+    /*
+    public static void LS_TIMEVAL_ZERO(ls_timeval tv) {                            tv.rtime = 0.0;          tv.utime = 0.0;          tv.stime = 0.0;      }
+
+    public static int LS_TIMEVAL_INC (ls_timeval tv, int newtv) {                                  tv.rtime += newtv.rtime;       tv.utime += newtv.utime;       tv.stime += newtv.stime;      }
+
+    public static void LOG_TIME_MSG(int level, String name, ls_timeval tv, int count, String msg) { if (timinglevel > level) {  ls_syslog(LOG_INFO, "L%d %s rtime %.2f ms, utime %.2f ms, stime %.2f ms, count %d %s",  level, name, tv.rtime, tv.utime, tv.stime, count, msg);  } }; }
+
+    public static void TIMEIT (int level, String func, String name) {
+        if  (timinglevel > level && clockticks > 0) {
+            timeval _before, _after;
+            timezone _tz;
+            tms _buf, _buf2;
+            gettimeofday(&_before, &_tz);
+            times(&_buf);
+            func;
+            gettimeofday(&_after, &_tz);
+            times(&_buf2);
+            ls_syslog(LOG_INFO,"L%d %s rtime %.2f ms, utime %.2f ms, stime %.2f ms",  level,  name,  (_after.tv_sec - _before.tv_sec)*1000.0 +  (_after.tv_usec - _before.tv_usec)/1000.0,  1000.0*((_buf2.tms_utime - _buf.tms_utime)/clockticks),  1000.0*((_buf2.tms_stime - _buf.tms_stime)/clockticks));
+        } else {
+            func;
+        }
+    }
+
+    public static int TIMEVAL2 (int level, String func, ls_timeval tv) {
+        if (timinglevel > level && clockticks > 0) {
+            timeval _before, _after;
+            timezone _tz;
+            tms _buf, _buf2;
+            gettimeofday(&_before, &_tz);
+            times(&_buf);
+            func;
+            gettimeofday(&_after, &_tz);
+            times(&_buf2);
+            tv.rtime = (_after.tv_sec - _before.tv_sec)*1000.0 +  (_after.tv_usec - _before.tv_usec)/1000.0;
+            tv.utime = 1000.0*((_buf2.tms_utime - _buf.tms_utime)/clockticks);
+            tv.stime = 1000.0*((_buf2.tms_stime - _buf.tms_stime)/clockticks);
+        } else {
+            func;
+            tv.rtime = 0.0;
+            tv.utime = 0.0;
+            tv.stime = 0.0;
+        }
+    }
+
+    public static int TIMEIT_START_BLOCK (int level) {
+        tms _buf, _buf2;
+        timeval _before, _after;
+        timezone _tz;
+        if  (timinglevel > level) {
+            gettimeofday(&_before, &_tz);
+            times(&_buf);
+        }
+    }
+
+    public static int TIMEIT_END_BLOCK (int level, String name)  {
+        if  (timinglevel > level) {
+            float rt, ut, st;
+            gettimeofday(&_after, &_tz);
+            times(&_buf2);
+            rt = (_after.tv_sec - _before.tv_sec)*1000.0 +  (_after.tv_usec - _before.tv_usec)/1000.0;
+            ut = 1000.0*((_buf2.tms_utime - _buf.tms_utime)/clockticks);
+            st = 1000.0*((_buf2.tms_stime - _buf.tms_stime)/clockticks);
+            ls_syslog(LOG_INFO,"L%d %s rtime %.2f ms, utime %.2f ms, stime %.2f ms",  level, name, rt, ut, st);
+        }
+    }
+    */
+
+    public static final int LC_SCHED = 0x00000001;
+    public static final int LC_EXEC = 0x00000002;
+    public static final int LC_TRACE = 0x00000004;
+    public static final int LC_COMM = 0x00000008;
+    public static final int LC_XDR = 0x00000010;
+    public static final int LC_CHKPNT = 0x00000020;
+    public static final int LC_LICENCE = 0x00000040;
+    public static final int LC_LICENSE = 0x00000040;
+    public static final int LC_FILE = 0x00000080;
+    public static final int LC_AFS = 0x00000100;
+    public static final int LC_AUTH = 0x00000200;
+    public static final int LC_HANG = 0x00000400;
+    public static final int LC_MULTI = 0x00000800;
+    public static final int LC_SIGNAL = 0x00001000;
+    public static final int LC_DCE = 0x00002000;
+    public static final int LC_PIM = 0x00004000;
+    public static final int LC_MEMORY = 0x00004000;
+    public static final int LC_SYS = 0x00008000;
+    public static final int LC_JLIMIT = 0x00010000;
+    public static final int LC_FAIR = 0x00020000;
+    public static final int LC_PREEMPT = 0x00040000;
+    public static final int LC_PEND = 0x00080000;
+    public static final int LC_EEVENTD = 0x00100000;
+    public static final int LC_LOADINDX = 0x00200000;
+    public static final int LC_RESOURCE = 0x00200000;
+
+    public static final int LC_JGRP = 0x00400000;
+    public static final int LC_JARRAY = 0x00800000;
+    public static final int LC_MPI = 0x01000000;
+    public static final int LC_ELIM = 0x02000000;
+    public static final int LC_M_LOG = 0x04000000;
+    public static final int LC_PERFM = 0x08000000;
+    public static final int LC_DLOG = 0x10000000;
+    public static final int LC_HPC = 0x20000000;
+    public static final int LC_LICSCHED = 0x40000000;
+
+    public static final int LC_XDRVERSION = 0x80000000;
+    public static final int LC_FLEX = 0x80000000;
+
+    public static final int LC_ADVRSV = LC_DLOG;
+    public static final int LC_RESREQ = LC_M_LOG;
+
+
+    public static final int LOG_DEBUG1 = LOG_DEBUG + 1;
+    public static final int LOG_DEBUG2 = LOG_DEBUG + 2;
+    public static final int LOG_DEBUG3 = LOG_DEBUG + 3;
+
+
+    public static final int LSF_EVENT_LIM_DOWN = 1;
+    public static final int LSF_EVENT_RES_DOWN = 2;
+    public static final int LSF_EVENT_SBD_DOWN = 3;
+    public static final int LSF_EVENT_HOST_UNLIC = 4;
+    public static final int LSF_EVENT_MASTER_ELECT = 5;
+    public static final int LSF_EVENT_MASTER_RESIGN = 6;
+    public static final int LSF_EVENT_MBD_UP = 7;
+    public static final int LSF_EVENT_MBD_DOWN = 8;
+    public static final int LSF_EVENT_MBD_RECONFIG = 9;
+    public static final int LSF_EVENT_WORKDIR_FULL = 10;
+    public static final int LSF_EVENT_HOST_OPENED = 11;
+    public static final int LSF_EVENT_HOST_CLOSED = 12;
+    public static final int LSF_EVENT_QUEUE_OPENED = 13;
+    public static final int LSF_EVENT_QUEUE_CLOSED = 14;
+    public static final int LSF_EVENT_SCH_DOWN = 15;
+    public static final int LSF_EVENT_LIC_OVERUSE = 16;
+
+    public static final int LSF_NIOS_REQUEUE = 127;
+
+
+    /*
+    public int lserrno;
+    public int masterLimDown;
+    public int ls_nerr;
+    public String[] ls_errmsg;
+    public int logclass;
+    public int timinglevel;
+    public int clockticks;
+
+
+    public int lsf_lim_version;
+    */
+
+
+    public static native int ls_readconfenv(config_param config_param1, String string);
+
+
+    public static native Pointer ls_placereq(String resreq, IntByReference numhosts, int options, String fromhost);
+
+
+    public static native Pointer ls_placeofhosts(String resreq, IntByReference numhosts, int options, String fromhost, Pointer hostlist, int listsize);
+
+    // NOTE: Not in liblsf
+    //public static native Pointer ls_placeoftype(String resreq, IntByReference numhosts, int options, String fromhost, String hosttype);
+
+
+    public static native hostLoad.ByReference ls_load(String resreq, IntByReference numhosts, int options, String fromhost);
+
+
+    public static native hostLoad.ByReference ls_loadofhosts(String resreq, IntByReference numhosts, int options, String fromhost, Pointer hostlist, int listsize);
+
+    // NOTE: Not in liblsf
+    //public static native hostLoad.ByReference ls_loadoftype(String resreq, IntByReference numhosts, int options, String fromhost, String hosttype);
+
+
+    public static native hostLoad.ByReference ls_loadinfo(String resreq, IntByReference numhosts, int options, String fromhost, Pointer hostlist, int listsize, Pointer indxnamelist);
+
+
+    public static native int ls_loadadj(String resreq, placeInfo hostlist, int listsize);
+
+
+    public static native int ls_eligible(String task, String resreqstr, byte mode);
+
+
+    public static native String ls_resreq(String task);
+
+
+    public static native int ls_insertrtask(String task);
+
+
+    public static native int ls_insertltask(String task);
+
+
+    public static native int ls_deletertask(String task);
+
+
+    public static native int ls_deleteltask(String task);
+
+
+    public static native int ls_listrtask(Pointer taskList, int sortflag);
+
+
+    public static native int ls_listltask(Pointer taskList, int sortflag);
+
+
+    public static native Pointer ls_findmyconnections();
+
+
+    public static native int ls_isconnected(String hostName);
+
+    // NOTE: Not in liblsf
+    //public static native int ls_lostconnection();
+
+
+    public static native String ls_getclustername();
+
+
+    public static native clusterInfo.ByReference ls_clusterinfo(String string1, IntByReference int1, Pointer stringArray1, int int2, int int3);
+
+
+    public static native lsSharedResourceInfo.ByReference ls_sharedresourceinfo(Pointer stringArray1, IntByReference int1, String string1, int int2);
+
+
+    public static native String ls_getmastername();
+
+
+    public static native String ls_getmyhostname();
+
+
+    public static native String ls_getmyhostname2();
+
+
+    public static native hostInfo.ByReference ls_gethostinfo(String string1, IntByReference int1, Pointer stringArray1, int int2, int int3);
+
+    public static native String ls_getISVmode();
+
+    public static native int ls_isshutdown();
+
+    public static native int ls_isPartialLicensingEnabled();
+
+    /* NOTE: ls_getLicenseUsage() is not supported by LSF v8.x
+    *  Wei Xing, ICR
+    */
+//    public static native lsfLicUsage.ByReference ls_getLicenseUsage();
+
+    public static native lsInfo.ByReference ls_info();
+
+    public static native Pointer ls_indexnames(lsInfo lsInfo1);
+
+    public static native int ls_isclustername(String string);
+
+
+    public static native String ls_gethosttype(String hostname);
+
+
+    public static native FloatByReference ls_getmodelfactor(String modelname);
+
+
+    public static native FloatByReference ls_gethostfactor(String hostname);
+
+
+    public static native String ls_gethostmodel(String hostname);
+
+    // NOTE: Not in liblsf
+    //public static native IntByReference ls_gethostrespriority(String hostname);
+
+
+    public static native int ls_lockhost(NativeLong duration);
+
+
+    public static native int ls_unlockhost();
+
+
+    public static native int ls_limcontrol(String hostname, int opCode);
+
+    public static native void ls_remtty(int ind, int enableIntSus);
+
+    public static native void ls_loctty(int ind);
+
+
+    public static native String ls_sysmsg();
+
+
+    public static native void ls_perror(String usrMsg);
+
+
+    public static native lsConf.ByReference ls_getconf(String string);
+
+    public static native void ls_freeconf(lsConf lsConf1);
+
+    public static native sharedConf.ByReference ls_readshared(String string1);
+
+    public static native clusterConf.ByReference ls_readcluster(String string1, lsInfo lsInfo1);
+
+    public static native clusterConf.ByReference ls_readcluster_ex(String string1, lsInfo lsInfo1, int int1);
+
+
+    public static native int _ls_initdebug(String appName);
+
+    public static native void ls_syslog(int level, String fmt, Pointer args);
+
+    public static native void ls_errlog(Pointer fp, String fmt, Pointer args);
+
+    // NOTE: va_list is too compiler specific.  Skipping this function.
+    //public static native void  ls_verrlog (Pointer fp, String fmt, va_list ap);
+
+    public static native int ls_fdbusy(int fd);
+
+
+    public static native String ls_getmnthost(String fn);
+
+    public static native int ls_servavail(int int1, int int2);
+
+    public static native int ls_getpriority(IntByReference priority);
+
+    public static native int ls_setpriority(int newPriority);
+
+    public static native void ls_ruunix2lsf(rusage rusage, lsfRusage lsfRusage);
+
+    public static native void ls_rulsf2unix(lsfRusage lsfRusage, rusage rusage);
+
+    public static native void cleanLsfRusage(lsfRusage lsfRusage1);
+
+    public static native void cleanRusage(rusage rusage1);
+
+
+    // NOTE: Not in liblsf
+    //public static native int getBEtime(String string1, byte byte1, NativeLongByReference long1);
+
+
+    public static native int ls_postevent(int int1, String string1, Pointer stringArray1, int int2);
+
+    public static native int ls_postmultievent(int int1, String string1, Pointer stringArray1, int int2, int int3);
+
+    public static class extResInfo extends Structure {
+        public static class ByReference extends extResInfo implements Structure.ByReference {}
+        public static class ByValue extends extResInfo implements Structure.ByValue {}
+        public extResInfo() {}
+        public extResInfo(Pointer p) { super(p); read(); }
+
+        public String name;
+        public String type;
+        public String interval;
+        public String increasing;
+        public String des;
+    }
+
+
+
+
+    // NOTE: Not in liblsf
+    //public static native int lim_vcl_get_eres_version();
+
+    // NOTE: Not in liblsf
+    //public static native extResInfo.ByReference lim_vcl_get_eres_def(String string1);
+
+    // NOTE: Not in liblsf
+    //public static native String lim_vcl_get_eres_loc(String string1);
+
+    // NOTE: Not in liblsf
+    //public static native String lim_vcl_get_eres_val(String string1);
+
+
+    public static int isspace(byte c) {
+        return ((c == 0x20 || c == 0x09 || c == 0x0a || c == 0x0b || c == 0x0c || c == 0x0d) ? 8 : 0);
+    }
+
+    public static final int LSF_VERSION = LSF_XDR_VERSION7_0_EP6;
+    public static final String LSF_CURRENT_VERSION = "7.06";
+
+
+    public static final String LSF_PRODUCT_COPYRIGHT_STR = "Copyright 1992-2009 Platform Computing Corp.";
+
+
+    public static final String LSF_NAME_STR = "Platform LSF";
+    public static final String LSF_IDENTIFIER_STR = "";
+    public static final String LSF_PRODUCT_NAME_STR = LSF_NAME_STR + LSF_IDENTIFIER_STR;
+
+
+    public static final String LSF_PRODUCT_COMMENT_STR = "";
+
+
+    public static final String LSF_PRODUCT_BUILD_STR = "";
+
+
+    public static final String LSF_PRODUCT_BUILD_DATE_STR = "";
+
+
+    public static final int LSF_PRODUCT_MAJOR_VERSION = 7;
+    public static final int LSF_PRODUCT_MINOR_VERSION = 0;
+    public static final int LSF_PRODUCT_MAINTAIN_VERSION = 6;
+
+    public static final String LSF_PRODUCT_MAJOR_VERSION_STR = "7";
+    public static final String LSF_PRODUCT_MINOR_VERSION_STR = "0";
+    public static final String LSF_PRODUCT_MAINTAIN_VERSION_STR = "6";
+
+    public static final String LSF_PRODUCT_VERSION_STR = LSF_PRODUCT_MAJOR_VERSION_STR + "." + LSF_PRODUCT_MINOR_VERSION_STR + "." + LSF_PRODUCT_MAINTAIN_VERSION_STR;
+    public static final String LSF_FILE_VERSION_STR = LSF_PRODUCT_MAJOR_VERSION_STR + "." + LSF_PRODUCT_MINOR_VERSION_STR + "." + LSF_PRODUCT_MAINTAIN_VERSION_STR;
+
+
+    public static final String _VERSION_STR_LSID_ = "Platform LSF HPC 7";
+    public static final String _LSID_VERSION_ = (_VERSION_STR_LSID_ + " Update " + _MINOR_STR_ + ", " + _DATE_STR_ + "\nCopyright 1992-2009 Platform Computing Corporation\n");
+
+
+    /* Removing since the ls_nio functions which use fd_set, etc. are not in liblsf.
+
+    public static final int NIO_STDIN_ON = 0x01;
+    public static final int NIO_STDIN_OFF = 0x02;
+    public static final int NIO_TAGSTDOUT_ON = 0x03;
+    public static final int NIO_TAGSTDOUT_OFF = 0x04;
+
+    public static final int NIO_TASK_STDINON = 0x01;
+    public static final int NIO_TASK_STDINOFF = 0x02;
+    public static final int NIO_TASK_ALL = 0x03;
+    public static final int NIO_TASK_CONNECTED = 0x04;
+
+    public static interface nioType {
+          public static final int NIO_STATUS = 0;
+          public static final int NIO_STDOUT = 1;
+          public static final int NIO_EOF = 2;
+          public static final int NIO_IOERR = 3;
+          public static final int NIO_REQUEUE = 4;
+          public static final int NIO_STDERR = 5;
+    }
+
+
+
+    public static class nioEvent extends Structure {
+        public static class ByReference extends nioEvent implements Structure.ByReference {}
+        public static class ByValue extends nioEvent implements Structure.ByValue {}
+        public nioEvent() {}
+        public nioEvent(Pointer p) { super(p); read(); }
+
+        public int tid;
+        public *//*nioType*//* int type;
+        public int status;
+    }
+
+
+
+    public static class nioInfo extends Structure {
+        public static class ByReference extends nioInfo implements Structure.ByReference {}
+        public static class ByValue extends nioInfo implements Structure.ByValue {}
+        public nioInfo() {}
+        public nioInfo(Pointer p) { super(p); read(); }
+
+        public int num;
+        public Pointer / * nioEvent.ByReference * / ioTask;
+    }
+
+
+    public static final int FD_SETSIZE = 64;
+
+    public static class fd_set extends Structure {
+        public static class ByReference extends fd_set implements Structure.ByReference {}
+        public static class ByValue extends fd_set implements Structure.ByValue {}
+        public fd_set() {}
+        public fd_set(Pointer p) { super(p); read(); }
+
+        public int count;
+        public int[] fd = new int[FD_SETSIZE];
+    }
+    */
+
+    public static native int ls_initdebug(String appName);
+
+    // NOTE: Not in liblsf
+    //public static native int ls_nioinit(int sock);
+
+    // NOTE: Not in liblsf
+    //public static native int ls_nioselect(int int1, fd_set fd_set1, fd_set fd_set2, fd_set fd_set3, Pointer nioInfoArray1, timeval timeval1);
+
+    // NOTE: Not in liblsf
+    //public static native int ls_nioctl(int int1, int int2);
+
+    // NOTE: Not in liblsf
+    //public static native int ls_nionewtask(int int1, int int2);
+
+    // NOTE: Not in liblsf
+    //public static native int ls_nioremovetask(int int1);
+
+    // NOTE: Not in liblsf
+    //public static native int ls_niowrite(String string1, int int1);
+
+    // NOTE: Not in liblsf
+    //public static native int ls_nioclose();
+
+    // NOTE: Not in liblsf
+    //public static native int ls_nioread(int int1, String string1, int int2);
+
+    // NOTE: Not in liblsf
+    //public static native int ls_niotasks(int int1, IntByReference int2, int int3);
+
+    // NOTE: Not in liblsf
+    //public static native int ls_niostatus(int int1, IntByReference int2, rusage rusage1);
+
+    // NOTE: Not in liblsf
+    //public static native int ls_niokill(int int1);
+
+    // NOTE: Not in liblsf
+    //public static native int ls_niosetdebug(int int2);
+
+    // NOTE: Not in liblsf
+    //public static native int ls_niodump(int int1, int int2, int int3, String string1);
+
+
+    public int lsf_res_version;
+
+
+    public static native int ls_initrex(int a, int b);
+
+    public static int ls_init(int a, int b) {
+        return ls_initrex(a, b);
+    }
+
+
+    public static native int ls_donerex();
+
+    public static native int ls_niossync(int int1);
+
+
+    public static native int ls_setstdin(int on, IntByReference rpidlist, int len);
+
+
+    public static native int ls_getstdin(int on, IntByReference rpidlist, int maxlen);
+
+    public static native int ls_setstdout(int on, String format);
+
+
+    public static native int ls_stdinmode(int onoff);
+
+
+    public static native int ls_stoprex();
+
+
+    public static native int ls_chdir(String string1, String string2);
+
+
+    public static native int ls_connect(String string1);
+
+
+    public static native int ls_rkill(int int1, int int2);
+
+
+    public static native int ls_rsetenv(String host, Pointer env);
+
+    public static native int ls_rsetenv_async(String host, Pointer env);
+
+
+    public static native int ls_rescontrol(String host, int opcode, int options);
+
+
+    public static native lsfAcctRec.ByReference ls_getacctrec(Pointer pointer1, IntByReference int1);
+
+    public static native int ls_putacctrec(Pointer pointer1, lsfAcctRec lsfAcctRec1);
+
+
+    // NOTE: No idea what resLogRecord is.
+    //public static native resLogRecord.ByReference ls_readrexlog (Pointer );
+
+
+    public static native int ls_rexecv(String string1, Pointer string2, int int1);
+
+
+    public static native int ls_rexecve(String string1, Pointer stringArray1, int int1, Pointer stringArray2);
+
+    public static native int ls_rexecv2(String string1, Pointer stringArray1, int int1);
+
+    public static native int ls_startserver(String string1, Pointer stringArray1, int int1);
+
+
+    public static native int ls_rtask(String string1, Pointer stringArray1, int int1);
+
+
+    public static native int ls_rtaske(String string1, Pointer stringArray1, int int1, Pointer stringArray2);
+
+    public static native int ls_rtask2(String string1, Pointer stringArray1, int int1, Pointer stringArray2);
+
+
+    public static native int ls_rwait(IntByReference int1, int int2, rusage rusage1);
+
+
+    public static native int ls_rwaittid(int int1, IntByReference int2, int int3, rusage rusage1);
+
+
+    public static native int ls_conntaskport(int tid);
+
+
+    public static native int ls_ropen(String host, String fn, int flags, int mode);
+
+
+    public static native int ls_rclose(int rfd);
+
+
+    public static native int ls_rwrite(int rfd, String buf, int len);
+
+
+    public static native int ls_rread(int rfd, String buf, int len);
+
+
+    public static native NativeLong ls_rlseek(int rfd, NativeLong offset, int whence);
+
+
+    public static native int ls_runlink(String host, String fn);
+
+    public static native int ls_rfstat(int rfd, Pointer buf);
+
+    public static native int ls_rstat(String host, String fn, Pointer buf);
+
+
+    public static native String ls_rgetmnthost(String host, String fn);
+
+
+    public static native int ls_rfcontrol(int command, int arg);
+
+
+    public static native int ls_rfterminate(String host);
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/locusiterator/AlignmentStateMachine.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/locusiterator/AlignmentStateMachine.java
new file mode 100644
index 0000000..15b0e01
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/locusiterator/AlignmentStateMachine.java
@@ -0,0 +1,372 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.locusiterator;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Invariant;
+import com.google.java.contract.Requires;
+import htsjdk.samtools.Cigar;
+import htsjdk.samtools.CigarElement;
+import htsjdk.samtools.CigarOperator;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.pileup.PileupElement;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+
+/**
+ * Steps a single read along its alignment to the genome
+ *
+ * The logical model for generating extended events is as follows: the "record state"
+ * implements the traversal along the reference; thus stepForwardOnGenome() returns
+ * on every and only on actual reference bases. This can be a (mis)match or a deletion
+ * (in the latter case, we still return on every individual reference base the deletion spans).
+ *
+ * User: depristo
+ * Date: 1/5/13
+ * Time: 1:08 PM
+ */
+ at Invariant({
+        "nCigarElements >= 0",
+        "cigar != null",
+        "read != null",
+        "currentCigarElementOffset >= -1",
+        "currentCigarElementOffset <= nCigarElements"
+})
+public class AlignmentStateMachine {
+
+    public static final String MAKE_PILEUP_EDGE_ERROR = "Cannot make a pileup element from an edge alignment state";
+    /**
+     * Our read
+     */
+    private final GATKSAMRecord read;
+    private final Cigar cigar;
+    private final int nCigarElements;
+    private int currentCigarElementOffset = -1;
+
+    /**
+     * how far are we offset from the start of the read bases?
+     */
+    private int readOffset;
+
+    /**
+     * how far are we offset from the alignment start on the genome?
+     */
+    private int genomeOffset;
+
+    /**
+     * Our cigar element
+     */
+    private CigarElement currentElement;
+
+    /**
+     * how far are we into our cigarElement?
+     */
+    private int offsetIntoCurrentCigarElement;
+
+    @Requires({"read != null", "read.getAlignmentStart() != -1", "read.getCigar() != null"})
+    public AlignmentStateMachine(final GATKSAMRecord read) {
+        this.read = read;
+        this.cigar = read.getCigar();
+        this.nCigarElements = cigar.numCigarElements();
+        initializeAsLeftEdge();
+    }
+
+    /**
+     * Initialize the state variables to put this machine one bp before the
+     * start of the alignment, so that a call to stepForwardOnGenome() will advance
+     * us to the first proper location
+     */
+    @Ensures("isLeftEdge()")
+    private void initializeAsLeftEdge() {
+        readOffset = offsetIntoCurrentCigarElement = genomeOffset = -1;
+        currentElement = null;
+    }
+
+    /**
+     * Get the read we are aligning to the genome
+     * @return a non-null GATKSAMRecord
+     */
+    @Ensures("result != null")
+    public GATKSAMRecord getRead() {
+        return read;
+    }
+
+    /**
+     * Get the reference index of the underlying read
+     *
+     * @return the reference index of the read
+     */
+    @Ensures("result == getRead().getReferenceIndex()")
+    public int getReferenceIndex() {
+        return getRead().getReferenceIndex();
+    }
+
+    /**
+     * Is this the left edge state?  I.e., one that is before or after the current read?
+     * @return true if this state is an edge state, false otherwise
+     */
+    public boolean isLeftEdge() {
+        return readOffset == -1;
+    }
+
+    /**
+     * Are we on the right edge?  I.e., is the current state off the right of the alignment?
+     * @return true if off the right edge, false if otherwise
+     */
+    public boolean isRightEdge() {
+        return readOffset == read.getReadLength();
+    }
+
+    /**
+     * What is our current offset in the read's bases that aligns us with the reference genome?
+     *
+     * @return the current read offset position.  If an edge will be == -1
+     */
+    @Ensures("result >= -1")
+    public int getReadOffset() {
+        return readOffset;
+    }
+
+    /**
+     * What is the current offset w.r.t. the alignment state that aligns us to the readOffset?
+     *
+     * @return the current offset from the alignment start on the genome.  If this state is
+     * at the left edge the result will be -1;
+     */
+    @Ensures("result >= -1")
+    public int getGenomeOffset() {
+        return genomeOffset;
+    }
+
+    /**
+     * Get the position (1-based as standard) of the current alignment on the genome w.r.t. the read's alignment start
+     * @return the position on the genome of the current state in absolute coordinates
+     */
+    @Ensures("result > 0")
+    public int getGenomePosition() {
+        return read.getAlignmentStart() + getGenomeOffset();
+    }
+
+    /**
+     * Gets #getGenomePosition but as a 1 bp GenomeLoc
+     * @param genomeLocParser the parser to use to create the genome loc
+     * @return a non-null genome location with start position of getGenomePosition
+     */
+    @Requires("genomeLocParser != null")
+    @Ensures("result != null")
+    public GenomeLoc getLocation(final GenomeLocParser genomeLocParser) {
+        // TODO -- may return wonky results if on an edge (could be 0 or could be beyond genome location)
+        return genomeLocParser.createGenomeLoc(read.getReferenceName(), getGenomePosition());
+    }
+
+    /**
+     * Get the cigar element we're currently aligning with.
+     *
+     * For example, if the cigar string is 2M2D2M and we're in the second step of the
+     * first 2M, then this function returns the element 2M.  After calling stepForwardOnGenome
+     * this function would return 2D.
+     *
+     * @return the cigar element, or null if we're the left edge
+     */
+    @Ensures("result != null || isLeftEdge() || isRightEdge()")
+    public CigarElement getCurrentCigarElement() {
+        return currentElement;
+    }
+
+    /**
+     * Get the offset of the current cigar element among all cigar elements in the read
+     *
+     * Suppose our read's cigar is 1M2D3M, and we're at the first 1M.  This would
+     * return 0.  Stepping forward puts us in the 2D, so our offset is 1.  Another
+     * step forward would result in a 1 again (we're in the second position of the 2D).
+     * Finally, one more step forward brings us to 2 (for the 3M element)
+     *
+     * @return the offset of the current cigar element in the reads's cigar.  Will return -1 for
+     * when the state is on the left edge, and be == the number of cigar elements in the
+     * read when we're past the last position on the genome
+     */
+    @Ensures({"result >= -1", "result <= nCigarElements"})
+    public int getCurrentCigarElementOffset() {
+        return currentCigarElementOffset;
+    }
+
+    /**
+     * Get the offset of the current state into the current cigar element
+     *
+     * That is, suppose we have a read with cigar 2M3D4M, and we're right at
+     * the second M position.  offsetIntoCurrentCigarElement would be 1, as
+     * it's two elements into the 2M cigar.  Now stepping forward we'd be
+     * in cigar element 3D, and our offsetIntoCurrentCigarElement would be 0.
+     *
+     * @return the offset (from 0) of the current state in the current cigar element.
+     *  Will be 0 on the right edge, and -1 on the left.
+     */
+    @Ensures({"result >= 0 || (result == -1 && isLeftEdge())", "!isRightEdge() || result == 0"})
+    public int getOffsetIntoCurrentCigarElement() {
+        return offsetIntoCurrentCigarElement;
+    }
+
+    /**
+     * Convenience accessor of the CigarOperator of the current cigar element
+     *
+     * Robust to the case where we're on the edge, and currentElement is null, in which
+     * case this function returns null as well
+     *
+     * @return null if this is an edge state
+     */
+    @Ensures("result != null || isLeftEdge() || isRightEdge()")
+    public CigarOperator getCigarOperator() {
+        return currentElement == null ? null : currentElement.getOperator();
+    }
+
+    @Override
+    public String toString() {
+        return String.format("%s ro=%d go=%d cec=%d %s", read.getReadName(), readOffset, genomeOffset, offsetIntoCurrentCigarElement, currentElement);
+    }
+
+    // -----------------------------------------------------------------------------------------------
+    //
+    // Code for setting up prev / next states
+    //
+    // -----------------------------------------------------------------------------------------------
+
+    /**
+     * Step the state machine forward one unit
+     *
+     * Takes the current state of this machine, and advances the state until the next on-genome
+     * cigar element (M, X, =, D) is encountered, at which point this function returns with the
+     * cigar operator of the current element.
+     *
+     * Assumes that the AlignmentStateMachine is in the left edge state at the start, so that
+     * stepForwardOnGenome() can be called to move the machine to the first alignment position.  That
+     * is, the normal use of this code is:
+     *
+     * AlignmentStateMachine machine = new AlignmentStateMachine(read)
+     * machine.stepForwardOnGenome()
+     * // now the machine is at the first position on the genome
+     *
+     * When stepForwardOnGenome() advances off the right edge of the read, the state machine is
+     * left in a state such that isRightEdge() returns true and returns null, indicating the
+     * the machine cannot advance further.  The machine may explode, though this is not contracted,
+     * if stepForwardOnGenome() is called after a previous call returned null.
+     *
+     * @return the operator of the cigar element that machine stopped at, null if we advanced off the end of the read
+     */
+    @Ensures("result != null || isRightEdge()")
+    public CigarOperator stepForwardOnGenome() {
+        // loop until we either find a cigar element step that moves us one base on the genome, or we run
+        // out of cigar elements
+        while ( true ) {
+            // we enter this method with readOffset = index of the last processed base on the read
+            // (-1 if we did not process a single base yet); this can be last matching base,
+            // or last base of an insertion
+            if (currentElement == null || (offsetIntoCurrentCigarElement + 1) >= currentElement.getLength()) {
+                currentCigarElementOffset++;
+                if (currentCigarElementOffset < nCigarElements) {
+                    currentElement = cigar.getCigarElement(currentCigarElementOffset);
+                    offsetIntoCurrentCigarElement = -1;
+                    // next line: guards against cigar elements of length 0; when new cigar element is retrieved,
+                    // we reenter in order to re-check offsetIntoCurrentCigarElement against currentElement's length
+                    continue;
+                } else {
+                    if (currentElement != null && currentElement.getOperator() == CigarOperator.D)
+                        throw new UserException.MalformedBAM(read, "read ends with deletion. Cigar: " + read.getCigarString() + ". Although the SAM spec technically permits such reads, this is often indicative of malformed files. If you are sure you want to use this file, re-run your analysis with the extra option: -rf BadCigar");
+
+                    // we're done, so set the offset of the cigar to 0 for cleanliness, as well as the current element
+                    offsetIntoCurrentCigarElement = 0;
+                    readOffset = read.getReadLength();
+                    currentElement = null;
+
+                    // Reads that contain indels model the genomeOffset as the following base in the reference.  Because
+                    // we fall into this else block only when indels end the read, increment genomeOffset  such that the
+                    // current offset of this read is the next ref base after the end of the indel.  This position will
+                    // model a point on the reference somewhere after the end of the read.
+                    genomeOffset++; // extended events need that. Logically, it's legal to advance the genomic offset here:
+
+                    // we do step forward on the ref, and by returning null we also indicate that we are past the read end.
+                    return null;
+                }
+            }
+
+            offsetIntoCurrentCigarElement++;
+            boolean done = false;
+            switch (currentElement.getOperator()) {
+                case H: // ignore hard clips
+                case P: // ignore pads
+                    offsetIntoCurrentCigarElement = currentElement.getLength();
+                    break;
+                case I: // insertion w.r.t. the reference
+                case S: // soft clip
+                    offsetIntoCurrentCigarElement = currentElement.getLength();
+                    readOffset += currentElement.getLength();
+                    break;
+                case D: // deletion w.r.t. the reference
+                    if (readOffset < 0)             // we don't want reads starting with deletion, this is a malformed cigar string
+                        throw new UserException.MalformedBAM(read, "read starts with deletion. Cigar: " + read.getCigarString() + ". Although the SAM spec technically permits such reads, this is often indicative of malformed files. If you are sure you want to use this file, re-run your analysis with the extra option: -rf BadCigar");
+                    // should be the same as N case
+                    genomeOffset++;
+                    done = true;
+                    break;
+                case N: // reference skip (looks and gets processed just like a "deletion", just different logical meaning)
+                    genomeOffset++;
+                    done = true;
+                    break;
+                case M:
+                case EQ:
+                case X:
+                    readOffset++;
+                    genomeOffset++;
+                    done = true;
+                    break;
+                default:
+                    throw new IllegalStateException("Case statement didn't deal with cigar op: " + currentElement.getOperator());
+            }
+
+            if ( done )
+                return currentElement.getOperator();
+        }
+    }
+
+    /**
+     * Create a new PileupElement based on the current state of this element
+     *
+     * Must not be a left or right edge
+     *
+     * @return a pileup element
+     */
+    @Ensures("result != null")
+    public final PileupElement makePileupElement() {
+        if ( isLeftEdge() || isRightEdge() )
+            throw new IllegalStateException(MAKE_PILEUP_EDGE_ERROR);
+        return new PileupElement(read,
+                getReadOffset(),
+                getCurrentCigarElement(),
+                getCurrentCigarElementOffset(),
+                getOffsetIntoCurrentCigarElement());
+    }
+}
+
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/locusiterator/LIBSDownsamplingInfo.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/locusiterator/LIBSDownsamplingInfo.java
new file mode 100644
index 0000000..efbf33d
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/locusiterator/LIBSDownsamplingInfo.java
@@ -0,0 +1,51 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.locusiterator;
+
+/**
+ * Simple wrapper about the information LIBS needs about downsampling
+ *
+ * User: depristo
+ * Date: 1/5/13
+ * Time: 1:26 PM
+ */
+class LIBSDownsamplingInfo {
+    final private boolean performDownsampling;
+    final private int toCoverage;
+
+    public LIBSDownsamplingInfo(boolean performDownsampling, int toCoverage) {
+        this.performDownsampling = performDownsampling;
+        this.toCoverage = toCoverage;
+    }
+
+    public boolean isPerformDownsampling() {
+        return performDownsampling;
+    }
+
+    public int getToCoverage() {
+        return toCoverage;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/locusiterator/LIBSPerformance.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/locusiterator/LIBSPerformance.java
new file mode 100644
index 0000000..aaa518d
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/locusiterator/LIBSPerformance.java
@@ -0,0 +1,191 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.locusiterator;
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import htsjdk.samtools.SAMFileReader;
+import htsjdk.samtools.SAMReadGroupRecord;
+import htsjdk.samtools.SAMRecordIterator;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.commandline.Argument;
+import org.broadinstitute.gatk.utils.commandline.CommandLineProgram;
+import org.broadinstitute.gatk.utils.commandline.Input;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecordIterator;
+import org.broadinstitute.gatk.utils.*;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.*;
+
+/**
+ * Caliper microbenchmark of fragment pileup
+ */
+public class LIBSPerformance extends CommandLineProgram {
+    private static Logger logger = Logger.getLogger(LIBSPerformance.class);
+
+    @Input(fullName = "input_file", shortName = "I", doc = "SAM or BAM file(s)", required = true)
+    public File samFile = null;
+
+    @Input(fullName = "reference_sequence", shortName = "R", doc = "Reference sequence file", required = true)
+    public File referenceFile = null;
+
+    @Argument(fullName = "L", shortName = "L", doc = "Query location", required = false)
+    public String location = null;
+
+    @Argument(fullName = "dt", shortName = "dt", doc = "Enable downsampling", required = false)
+    public boolean downsample = false;
+
+    @Override
+    public int execute() throws IOException {
+        final IndexedFastaSequenceFile reference = new CachingIndexedFastaSequenceFile(referenceFile);
+        final GenomeLocParser genomeLocParser = new GenomeLocParser(reference);
+
+        final SAMFileReader reader = new SAMFileReader(samFile);
+
+        SAMRecordIterator rawIterator;
+        if ( location == null )
+            rawIterator = reader.iterator();
+        else {
+            final GenomeLoc loc = genomeLocParser.parseGenomeLoc(location);
+            rawIterator = reader.query(loc.getContig(), loc.getStart(), loc.getStop(), false);
+        }
+
+        final GATKSAMRecordIterator iterator = new GATKSAMRecordIterator(rawIterator);
+
+        final Set<String> samples = new HashSet<String>();
+        for ( final SAMReadGroupRecord rg : reader.getFileHeader().getReadGroups() )
+            samples.add(rg.getSample());
+
+        final LIBSDownsamplingInfo ds = new LIBSDownsamplingInfo(downsample, 250);
+
+        final LocusIteratorByState libs =
+                new LocusIteratorByState(
+                        iterator,
+                        ds,
+                        true,
+                        genomeLocParser,
+                        samples,
+                        false);
+
+        final SimpleTimer timer = new SimpleTimer().start();
+        int bp = 0;
+        double lastElapsed = 0;
+        while ( libs.hasNext() ) {
+            AlignmentContext context = libs.next();
+            bp++;
+            if ( timer.getElapsedTime() - lastElapsed > 10 ) {
+                logger.info(bp + " iterations at " + context.getLocation());
+                lastElapsed = timer.getElapsedTime();
+            }
+        }
+        logger.info(String.format("runtime in seconds: %.2f", timer.getElapsedTime()));
+
+        return 0;
+    }
+
+//    private void syntheticTests() {
+//        final int readLength = 101;
+//        final int nReads = 10000;
+//        final int locus = 1;
+//
+//        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000);
+//        final GenomeLocParser genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
+//
+//        int nIterations = 0;
+//        for ( final String cigar : Arrays.asList("101M", "50M10I40M", "50M10D40M") ) {
+//            GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, locus, readLength);
+//            read.setReadBases(Utils.dupBytes((byte) 'A', readLength));
+//            final byte[] quals = new byte[readLength];
+//            for ( int i = 0; i < readLength; i++ )
+//                quals[i] = (byte)(i % QualityUtils.MAX_SAM_QUAL_SCORE);
+//            read.setBaseQualities(quals);
+//            read.setCigarString(cigar);
+//
+//            for ( int j = 0; j < nReads; j++ ) {
+//                for ( int i = 0; i < rep; i++ ) {
+//                    switch ( op ) {
+//                        case NEW_STATE:
+//                        {
+//                            final AlignmentStateMachine alignmentStateMachine = new AlignmentStateMachine(read);
+//                            while ( alignmentStateMachine.stepForwardOnGenome() != null ) {
+//                                nIterations++;
+//                            }
+//                        }
+//                        break;
+////                        case OLD_STATE:
+////                        {
+////                            final SAMRecordAlignmentState alignmentStateMachine = new SAMRecordAlignmentState(read);
+////                            while ( alignmentStateMachine.stepForwardOnGenome() != null ) {
+////                                alignmentStateMachine.getRead();
+////                                nIterations++;
+////                            }
+////                        }
+////                        break;
+//                        case NEW_LIBS:
+//                        {
+//                            final List<GATKSAMRecord> reads = Collections.nCopies(30, read);
+//                            final org.broadinstitute.gatk.utils.locusiterator.LocusIteratorByState libs =
+//                                    new org.broadinstitute.gatk.utils.locusiterator.LocusIteratorByState(
+//                                            new LocusIteratorByStateBaseTest.FakeCloseableIterator<GATKSAMRecord>(reads.iterator()),
+//                                            LocusIteratorByStateBaseTest.createTestReadProperties(),
+//                                            genomeLocParser,
+//                                            LocusIteratorByState.sampleListForSAMWithoutReadGroups());
+//
+//                            while ( libs.hasNext() ) {
+//                                AlignmentContext context = libs.next();
+//                            }
+//                        }
+//                    }
+//                }
+//            }
+//        }
+//
+//        System.out.printf("iterations %d%n", nIterations);
+//    }
+
+    /**
+     * Required main method implementation.
+     * @param argv Command-line argument text.
+     * @throws Exception on error.
+     */
+    public static void main(String[] argv) throws Exception {
+        int returnCode = 0;
+        try {
+            LIBSPerformance instance = new LIBSPerformance();
+            start(instance, argv);
+            returnCode = 0;
+        } catch(Exception ex) {
+            returnCode = 1;
+            ex.printStackTrace();
+            throw ex;
+        } finally {
+            System.exit(returnCode);
+        }
+    }
+
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/locusiterator/LocusIterator.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/locusiterator/LocusIterator.java
new file mode 100644
index 0000000..0d28935
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/locusiterator/LocusIterator.java
@@ -0,0 +1,62 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.locusiterator;
+
+import htsjdk.samtools.util.CloseableIterator;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+
+import java.util.Iterator;
+
+/**
+ * Iterator that traverses a SAM File, accumulating information on a per-locus basis
+ */
+public abstract class LocusIterator implements Iterable<AlignmentContext>, CloseableIterator<AlignmentContext> {
+    public Iterator<AlignmentContext> iterator() {
+        return this;
+    }
+
+    public void close() {
+        //this.it.close();
+    }
+
+    public abstract boolean hasNext();
+    public abstract AlignmentContext next();
+
+    /**
+     * Get, if possible, the underlying LocusIteratorByState from this LocusIterator.
+     *
+     * @throws UnsupportedOperationException if we don't support this operation
+     *
+     * @return a non-null locus iterator by state
+     */
+    public LocusIteratorByState getLIBS() {
+        throw new UnsupportedOperationException("This locus iterator does not support getting the underlying LocusIteratorByState");
+    }
+
+    public void remove() {
+        throw new UnsupportedOperationException("Can not remove records from a SAM file via an iterator!");
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/locusiterator/LocusIteratorByState.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/locusiterator/LocusIteratorByState.java
new file mode 100644
index 0000000..c5bf32d
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/locusiterator/LocusIteratorByState.java
@@ -0,0 +1,457 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.locusiterator;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import htsjdk.samtools.CigarOperator;
+import htsjdk.samtools.SAMFileReader;
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.util.CloseableIterator;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.downsampling.DownsampleType;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecordIterator;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.downsampling.DownsamplingMethod;
+import org.broadinstitute.gatk.utils.pileup.PileupElement;
+import org.broadinstitute.gatk.utils.pileup.ReadBackedPileupImpl;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.broadinstitute.gatk.utils.sam.ReadUtils;
+
+import java.util.*;
+
+/**
+ * Iterator that traverses a SAM File, accumulating information on a per-locus basis
+ *
+ * Produces AlignmentContext objects, that contain ReadBackedPileups of PileupElements.  This
+ * class has its core job of converting an iterator of ordered SAMRecords into those
+ * RBPs.
+ *
+ * There are a few constraints on required and ensured by LIBS:
+ *
+ * -- Requires the Iterator<GATKSAMRecord> to returns reads in coordinate sorted order, consistent with the ordering
+ * defined by the SAM file format.  That for performance reasons this constraint isn't actually enforced.
+ * The behavior of LIBS is undefined in the case where the reads are badly ordered.
+ * -- The reads in the ReadBackedPileup are themselves in the order of appearance of the reads from the iterator.
+ * That is, the pileup is ordered in a way consistent with the SAM coordinate ordering
+ * -- Only aligned reads with at least one on-genomic cigar operator are passed on in the pileups.  That is,
+ * unmapped reads or reads that are all insertions (10I) or soft clipped (10S) are not passed on.
+ * -- LIBS can perform per-sample downsampling of a variety of kinds.
+ * -- Because of downsampling there's no guarantee that:
+ *   -- A read that could be aligned to a position will actually occur in the pileup (downsampled away)
+ *   -- A read that appears in a previous pileup that could align to a future position will actually occur
+ *      in that pileup.  That is, a read might show up at position i but be downsampled away in the pileup at j
+ * -- LIBS can optionally capture all of the reads that come off the iterator, before any leveling downsampling
+ * occurs, if requested.  This allows users of LIBS to see both a ReadBackedPileup view of the data as well as
+ * a stream of unique, sorted reads
+ */
+public final class LocusIteratorByState extends LocusIterator {
+    /** Indicates that we shouldn't do any downsampling */
+    public final static LIBSDownsamplingInfo NO_DOWNSAMPLING = new LIBSDownsamplingInfo(false, -1);
+
+    /**
+     * our log, which we want to capture anything from this class
+     */
+    private final static Logger logger = Logger.getLogger(LocusIteratorByState.class);
+
+    // -----------------------------------------------------------------------------------------------------------------
+    //
+    // member fields
+    //
+    // -----------------------------------------------------------------------------------------------------------------
+
+    /**
+     * Used to create new GenomeLocs as needed
+     */
+    private final GenomeLocParser genomeLocParser;
+
+    /**
+     * A complete list of all samples that may come out of the reads.  Must be
+     * comprehensive.
+     */
+    private final ArrayList<String> samples;
+
+    /**
+     * The system that maps incoming reads from the iterator to their pileup states
+     */
+    private final ReadStateManager readStates;
+
+    /**
+     * Should we include reads in the pileup which are aligned with a deletion operator to the reference?
+     */
+    private final boolean includeReadsWithDeletionAtLoci;
+
+    /**
+     * The next alignment context.  A non-null value means that a
+     * context is waiting from hasNext() for sending off to the next next() call.  A null
+     * value means that either hasNext() has not been called at all or that
+     * the underlying iterator is exhausted
+     */
+    private AlignmentContext nextAlignmentContext;
+
+    // -----------------------------------------------------------------------------------------------------------------
+    //
+    // constructors and other basic operations
+    //
+    // -----------------------------------------------------------------------------------------------------------------
+
+    /**
+     * Create a new LocusIteratorByState
+     *
+     * @param samIterator the iterator of reads to process into pileups.  Reads must be ordered
+     *                    according to standard coordinate-sorted BAM conventions
+     * @param downsamplingMethod information about how to downsample the reads
+     * @param includeReadsWithDeletionAtLoci Include reads with deletion at loci
+     * @param keepUniqueReadListInLIBS Keep unique read list in LIBS
+     * @param genomeLocParser used to create genome locs
+     * @param samples a complete list of samples present in the read groups for the reads coming from samIterator.
+     *                This is generally just the set of read group sample fields in the SAMFileHeader.  This
+     *                list of samples may contain a null element, and all reads without read groups will
+     *                be mapped to this null sample
+     */
+    public LocusIteratorByState(final Iterator<GATKSAMRecord> samIterator,
+                                final DownsamplingMethod downsamplingMethod,
+                                final boolean includeReadsWithDeletionAtLoci,
+                                final boolean keepUniqueReadListInLIBS,
+                                final GenomeLocParser genomeLocParser,
+                                final Collection<String> samples) {
+        this(samIterator,
+                toDownsamplingInfo(downsamplingMethod),
+                includeReadsWithDeletionAtLoci,
+                genomeLocParser,
+                samples,
+                keepUniqueReadListInLIBS);
+    }
+
+    /**
+     * Create a new LocusIteratorByState based on a SAMFileReader using reads in an iterator it
+     *
+     * Simple constructor that uses the samples in the reader, doesn't do any downsampling,
+     * and makes a new GenomeLocParser using the reader.  This constructor will be slow(ish)
+     * if you continually invoke this constructor, but it's easy to make.
+     *
+     * @param reader a non-null reader
+     * @param it an iterator from reader that has the reads we want to use to create ReadBackPileups
+     */
+    public LocusIteratorByState(final SAMFileReader reader, final CloseableIterator<SAMRecord> it) {
+        this(new GATKSAMRecordIterator(it),
+                new LIBSDownsamplingInfo(false, 0),
+                true,
+                new GenomeLocParser(reader.getFileHeader().getSequenceDictionary()),
+                ReadUtils.getSAMFileSamples(reader.getFileHeader()),
+                false);
+    }
+
+    /**
+     * Create a new LocusIteratorByState
+     *
+     * @param samIterator the iterator of reads to process into pileups.  Reads must be ordered
+     *                    according to standard coordinate-sorted BAM conventions
+     * @param downsamplingInfo meta-information about how to downsampling the reads
+     * @param genomeLocParser used to create genome locs
+     * @param samples a complete list of samples present in the read groups for the reads coming from samIterator.
+     *                This is generally just the set of read group sample fields in the SAMFileHeader.  This
+     *                list of samples may contain a null element, and all reads without read groups will
+     *                be mapped to this null sample
+     * @param maintainUniqueReadsList if true, we will keep the unique reads from off the samIterator and make them
+     *                                available via the transferReadsFromAllPreviousPileups interface
+     */
+    public LocusIteratorByState(final Iterator<GATKSAMRecord> samIterator,
+                                final LIBSDownsamplingInfo downsamplingInfo,
+                                final boolean includeReadsWithDeletionAtLoci,
+                                final GenomeLocParser genomeLocParser,
+                                final Collection<String> samples,
+                                final boolean maintainUniqueReadsList) {
+        if ( samIterator == null ) throw new IllegalArgumentException("samIterator cannot be null");
+        if ( downsamplingInfo == null ) throw new IllegalArgumentException("downsamplingInfo cannot be null");
+        if ( genomeLocParser == null ) throw new IllegalArgumentException("genomeLocParser cannot be null");
+        if ( samples == null ) throw new IllegalArgumentException("Samples cannot be null");
+
+        // currently the GATK expects this LocusIteratorByState to accept empty sample lists, when
+        // there's no read data.  So we need to throw this error only when samIterator.hasNext() is true
+        if (samples.isEmpty() && samIterator.hasNext()) {
+            throw new IllegalArgumentException("samples list must not be empty");
+        }
+
+        this.genomeLocParser = genomeLocParser;
+        this.includeReadsWithDeletionAtLoci = includeReadsWithDeletionAtLoci;
+        this.samples = new ArrayList<String>(samples);
+        this.readStates = new ReadStateManager(samIterator, this.samples, downsamplingInfo, maintainUniqueReadsList);
+    }
+
+    @Override
+    public Iterator<AlignmentContext> iterator() {
+        return this;
+    }
+
+    /**
+     * Get the current location (i.e., the bp of the center of the pileup) of the pileup, or null if not anywhere yet
+     *
+     * Assumes that read states is updated to reflect the current pileup position, but not advanced to the
+     * next location.
+     *
+     * @return the location of the current pileup, or null if we're after all reads
+     */
+    private GenomeLoc getLocation() {
+        return readStates.isEmpty() ? null : readStates.getFirst().getLocation(genomeLocParser);
+    }
+
+    // -----------------------------------------------------------------------------------------------------------------
+    //
+    // next() routine and associated collection operations
+    //
+    // -----------------------------------------------------------------------------------------------------------------
+
+    /**
+     * Is there another pileup available?
+     * @return
+     */
+    @Override
+    public boolean hasNext() {
+        lazyLoadNextAlignmentContext();
+        return nextAlignmentContext != null;
+    }
+
+    /**
+     * Get the next AlignmentContext available from the reads.
+     *
+     * @return a non-null AlignmentContext of the pileup after to the next genomic position covered by
+     * at least one read.
+     */
+    @Override
+    public AlignmentContext next() {
+        lazyLoadNextAlignmentContext();
+        if (!hasNext())
+            throw new NoSuchElementException("LocusIteratorByState: out of elements.");
+        AlignmentContext currentAlignmentContext = nextAlignmentContext;
+        nextAlignmentContext = null;
+        return currentAlignmentContext;
+    }
+
+    /**
+     * Move this LIBS until we are over position
+     *
+     * Will return null if cannot reach position (because we run out of data in the locus)
+     *
+     * @param position the start position of the AlignmentContext we want back
+     * @param stopAtFirstNonEmptySiteAfterPosition if true, we will stop as soon as we find a context with data with
+     *                                             position >= position, otherwise we will return a null value
+     *                                             and consume the data for the next position.  This means that without
+     *                                             specifying this value the LIBS will be in an indeterminate state
+     *                                             after calling this function, and should be reconstructed from scratch
+     *                                             for subsequent use
+     * @return a AlignmentContext at position, or null if this isn't possible
+     */
+    public AlignmentContext advanceToLocus(final int position, final boolean stopAtFirstNonEmptySiteAfterPosition) {
+        while ( hasNext() ) {
+            final AlignmentContext context = next();
+
+            if ( context == null )
+                // we ran out of data
+                return null;
+
+            if ( context.getPosition() == position )
+                return context;
+
+            if ( context.getPosition() > position)
+                return stopAtFirstNonEmptySiteAfterPosition ? context : null;
+        }
+
+        return null;
+    }
+
+    /**
+     * Creates the next alignment context from the given state.  Note that this is implemented as a
+     * lazy load method. nextAlignmentContext MUST BE null in order for this method to advance to the
+     * next entry.
+     */
+    private void lazyLoadNextAlignmentContext() {
+        while (nextAlignmentContext == null && readStates.hasNext()) {
+            readStates.collectPendingReads();
+
+            final GenomeLoc location = getLocation();
+            final Map<String, ReadBackedPileupImpl> fullPileup = new HashMap<String, ReadBackedPileupImpl>();
+
+            for (final Map.Entry<String, PerSampleReadStateManager> sampleStatePair : readStates ) {
+                final String sample = sampleStatePair.getKey();
+                final PerSampleReadStateManager readState = sampleStatePair.getValue();
+                final Iterator<AlignmentStateMachine> iterator = readState.iterator();
+                final List<PileupElement> pile = new ArrayList<PileupElement>(readState.size());
+
+                while (iterator.hasNext()) {
+                    // state object with the read/offset information
+                    final AlignmentStateMachine state = iterator.next();
+                    final GATKSAMRecord read = state.getRead();
+                    final CigarOperator op = state.getCigarOperator();
+
+                    if (op == CigarOperator.N) // N's are never added to any pileup
+                        continue;
+
+                    if (!dontIncludeReadInPileup(read, location.getStart())) {
+                        if ( ! includeReadsWithDeletionAtLoci && op == CigarOperator.D ) {
+                            continue;
+                        }
+
+                        pile.add(state.makePileupElement());
+                    }
+                }
+
+                if (! pile.isEmpty() ) // if this pileup added at least one base, add it to the full pileup
+                    fullPileup.put(sample, new ReadBackedPileupImpl(location, pile));
+            }
+
+            readStates.updateReadStates(); // critical - must be called after we get the current state offsets and location
+            if (!fullPileup.isEmpty()) // if we got reads with non-D/N over the current position, we are done
+                nextAlignmentContext = new AlignmentContext(location, new ReadBackedPileupImpl(location, fullPileup), false);
+        }
+    }
+
+    // -----------------------------------------------------------------------------------------------------------------
+    //
+    // getting the list of reads
+    //
+    // -----------------------------------------------------------------------------------------------------------------
+
+    /**
+     * Transfer current list of all unique reads that have ever been used in any pileup, clearing old list
+     *
+     * This list is guaranteed to only contain unique reads, even across calls to the this function.  It is
+     * literally the unique set of reads ever seen.
+     *
+     * The list occurs in the same order as they are encountered in the underlying iterator.
+     *
+     * Takes the maintained list of submitted reads, and transfers it to the caller of this
+     * function.  The old list of set to a new, cleanly allocated list so the caller officially
+     * owns the list returned by this call.  This is the only way to clear the tracking
+     * of submitted reads, if enabled.
+     *
+     * The purpose of this function is allow users of LIBS to keep track of all of the reads pulled off the
+     * underlying GATKSAMRecord iterator and that appeared at any point in the list of SAMRecordAlignmentState for
+     * any reads.  This function is intended to allow users to efficiently reconstruct the unique set of reads
+     * used across all pileups.  This is necessary for LIBS to handle because attempting to do
+     * so from the pileups coming out of LIBS is extremely expensive.
+     *
+     * This functionality is only available if LIBS was created with the argument to track the reads
+     *
+     * @throws UnsupportedOperationException if called when keepingSubmittedReads is false
+     *
+     * @return the current list
+     */
+    @Ensures("result != null")
+    public List<GATKSAMRecord> transferReadsFromAllPreviousPileups() {
+        return readStates.transferSubmittedReads();
+    }
+
+    /**
+     * Get the underlying list of tracked reads.  For testing only
+     * @return a non-null list
+     */
+    @Ensures("result != null")
+    protected List<GATKSAMRecord> getReadsFromAllPreviousPileups() {
+        return readStates.getSubmittedReads();
+    }
+
+    // -----------------------------------------------------------------------------------------------------------------
+    //
+    // utility functions
+    //
+    // -----------------------------------------------------------------------------------------------------------------
+
+    /**
+     * Should this read be excluded from the pileup?
+     *
+     * Generic place to put per-base filters appropriate to LocusIteratorByState
+     *
+     * @param rec the read to potentially exclude
+     * @param pos the genomic position of the current alignment
+     * @return true if the read should be excluded from the pileup, false otherwise
+     */
+    @Requires({"rec != null", "pos > 0"})
+    private boolean dontIncludeReadInPileup(final GATKSAMRecord rec, final long pos) {
+        return ReadUtils.isBaseInsideAdaptor(rec, pos);
+    }
+
+    /**
+     * Create a LIBSDownsamplingInfo object from the requested info in DownsamplingMethod
+     *
+     * LIBS will invoke the Reservoir and Leveling downsamplers on the read stream if we're
+     * downsampling to coverage by sample. SAMDataSource will have refrained from applying
+     * any downsamplers to the read stream in this case, in the expectation that LIBS will
+     * manage the downsampling. The reason for this is twofold: performance (don't have to
+     * split/re-assemble the read stream in SAMDataSource), and to enable partial downsampling
+     * of reads (eg., using half of a read, and throwing the rest away).
+     *
+     * @param downsamplingMethod downsampling information about what should be done to the reads
+     * @return a LIBS specific info holder about downsampling only
+     */
+    @Requires("downsamplingMethod != null")
+    @Ensures("result != null")
+    private static LIBSDownsamplingInfo toDownsamplingInfo(final DownsamplingMethod downsamplingMethod) {
+        final boolean performDownsampling = downsamplingMethod != null &&
+                downsamplingMethod.type == DownsampleType.BY_SAMPLE &&
+                downsamplingMethod.toCoverage != null;
+        final int coverage = performDownsampling ? downsamplingMethod.toCoverage : 0;
+
+        return new LIBSDownsamplingInfo(performDownsampling, coverage);
+    }
+
+    /**
+     * Create a pileup element for read at offset
+     *
+     * offset must correspond to a valid read offset given the read's cigar, or an IllegalStateException will be throw
+     *
+     * @param read a read
+     * @param offset the offset into the bases we'd like to use in the pileup
+     * @return a valid PileupElement with read and at offset
+     */
+    @Ensures("result != null")
+    public static PileupElement createPileupForReadAndOffset(final GATKSAMRecord read, final int offset) {
+        if ( read == null ) throw new IllegalArgumentException("read cannot be null");
+        if ( offset < 0 || offset >= read.getReadLength() ) throw new IllegalArgumentException("Invalid offset " + offset + " outside of bounds 0 and " + read.getReadLength());
+
+        final AlignmentStateMachine stateMachine = new AlignmentStateMachine(read);
+
+        while ( stateMachine.stepForwardOnGenome() != null ) {
+            if ( stateMachine.getReadOffset() == offset )
+                return stateMachine.makePileupElement();
+        }
+
+        throw new IllegalStateException("Tried to create a pileup for read " + read + " with offset " + offset +
+                " but we never saw such an offset in the alignment state machine");
+    }
+
+    /**
+     * For testing only.  Assumes that the incoming SAMRecords have no read groups, so creates a dummy sample list
+     * for the system.
+     */
+    public static List<String> sampleListForSAMWithoutReadGroups() {
+        List<String> samples = new ArrayList<String>();
+        samples.add(null);
+        return samples;
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/locusiterator/PerSampleReadStateManager.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/locusiterator/PerSampleReadStateManager.java
new file mode 100644
index 0000000..af9953e
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/locusiterator/PerSampleReadStateManager.java
@@ -0,0 +1,261 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.locusiterator;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Invariant;
+import com.google.java.contract.Requires;
+import htsjdk.samtools.CigarOperator;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.downsampling.Downsampler;
+import org.broadinstitute.gatk.utils.downsampling.LevelingDownsampler;
+
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * ReadStateManager for a single sample
+ *
+ * User: depristo
+ * Date: 1/13/13
+ * Time: 12:28 PM
+ */
+ at Invariant({
+        "readStartsAreWellOrdered()",
+        "! isDownsampling() || downsamplingTarget > 0",
+        "nSites >= 0",
+        "nSitesNeedingDownsampling >= 0",
+        "nSitesNeedingDownsampling <= nSites"
+})
+final class PerSampleReadStateManager implements Iterable<AlignmentStateMachine> {
+    private final static Logger logger = Logger.getLogger(ReadStateManager.class);
+    private final static boolean CAPTURE_DOWNSAMPLING_STATS = false;
+
+    /**
+     * A list (potentially empty) of alignment state machines.
+     *
+     * The state machines must be ordered by the alignment start of their underlying reads, with the
+     * lowest alignment starts on the left, and the largest on the right
+     */
+    private LinkedList<AlignmentStateMachine> readStatesByAlignmentStart = new LinkedList<AlignmentStateMachine>();
+
+    private final Downsampler<LinkedList<AlignmentStateMachine>> levelingDownsampler;
+    private final int downsamplingTarget;
+
+    /**
+     * The number of sites where downsampling has been invoked
+     */
+    private int nSitesNeedingDownsampling = 0;
+
+    /**
+     * The number of sites we've visited
+     */
+    private int nSites = 0;
+
+    /**
+     * Create a new PerSampleReadStateManager with downsampling parameters as requested by LIBSDownsamplingInfo
+     * @param LIBSDownsamplingInfo the downsampling params we want to use
+     */
+    public PerSampleReadStateManager(final LIBSDownsamplingInfo LIBSDownsamplingInfo) {
+        this.downsamplingTarget = LIBSDownsamplingInfo.isPerformDownsampling() ? LIBSDownsamplingInfo.getToCoverage() : -1;
+        this.levelingDownsampler = LIBSDownsamplingInfo.isPerformDownsampling()
+                ? new LevelingDownsampler<LinkedList<AlignmentStateMachine>, AlignmentStateMachine>(LIBSDownsamplingInfo.getToCoverage())
+                : null;
+    }
+
+    /**
+     * Group the underlying readStatesByAlignmentStart into a list of list of alignment state machines,
+     * where each list contains machines with a unique genome site.  The outer list is ordered
+     * by alignment start.
+     *
+     * For example, if the flat list has alignment starts [10, 10, 11, 12, 12, 13] then
+     * the resulting grouping will be [[10, 10], [11], [12, 12], [13]].
+     *
+     * @return a non-null list of lists
+     */
+    @Ensures("result != null")
+    private List<LinkedList<AlignmentStateMachine>> groupByAlignmentStart() {
+        final LinkedList<LinkedList<AlignmentStateMachine>> grouped = new LinkedList<LinkedList<AlignmentStateMachine>>();
+
+        AlignmentStateMachine last = null;
+        for ( final AlignmentStateMachine stateMachine : readStatesByAlignmentStart ) {
+            if ( last == null || stateMachine.getGenomeOffset() != last.getGenomeOffset() ) {
+                // we've advanced to a place where the state machine has a different state,
+                // so start a new list
+                grouped.add(new LinkedList<AlignmentStateMachine>());
+                last = stateMachine;
+            }
+            grouped.getLast().add(stateMachine);
+        }
+
+        return grouped;
+    }
+
+    /**
+     * Flattens the grouped list of list of alignment state machines into a single list in order
+     * @return a non-null list contains the state machines
+     */
+    @Ensures("result != null")
+    private LinkedList<AlignmentStateMachine> flattenByAlignmentStart(final List<LinkedList<AlignmentStateMachine>> grouped) {
+        final LinkedList<AlignmentStateMachine> flat = new LinkedList<AlignmentStateMachine>();
+        for ( final List<AlignmentStateMachine> l : grouped )
+            flat.addAll(l);
+        return flat;
+    }
+
+    /**
+     * Test that the reads are ordered by their alignment starts
+     * @return true if well ordered, false otherwise
+     */
+    private boolean readStartsAreWellOrdered() {
+        int lastStart = -1;
+        for ( final AlignmentStateMachine machine : readStatesByAlignmentStart ) {
+            if ( lastStart > machine.getRead().getAlignmentStart() )
+                return false;
+            lastStart = machine.getRead().getAlignmentStart();
+        }
+        return true;
+    }
+
+    /**
+     * Assumes it can just keep the states linked lists without making a copy
+     * @param states the new states to add to this manager
+     * @return The change in the number of states, after including states and potentially downsampling.  Note
+     * that this return result might be negative, if downsampling is enabled, as we might drop
+     * more sites than have been added by the downsampler
+     */
+    @Requires("states != null")
+    public int addStatesAtNextAlignmentStart(final LinkedList<AlignmentStateMachine> states) {
+        if ( states.isEmpty() ) {
+            return 0;
+        }
+
+        readStatesByAlignmentStart.addAll(states);
+        int nStatesAdded = states.size();
+
+        if ( isDownsampling() && readStatesByAlignmentStart.size() > downsamplingTarget ) {
+            // only go into the downsampling branch if we are downsampling and the coverage > the target
+            captureDownsamplingStats();
+            levelingDownsampler.submit(groupByAlignmentStart());
+            levelingDownsampler.signalEndOfInput();
+
+            nStatesAdded -= levelingDownsampler.getNumberOfDiscardedItems();
+
+            // use returned List directly rather than make a copy, for efficiency's sake
+            readStatesByAlignmentStart = flattenByAlignmentStart(levelingDownsampler.consumeFinalizedItems());
+            levelingDownsampler.resetStats();
+        }
+
+        return nStatesAdded;
+    }
+
+    /**
+     * Is downsampling enabled for this manager?
+     * @return true if we are downsampling, false otherwise
+     */
+    private boolean isDownsampling() {
+        return levelingDownsampler != null;
+    }
+
+    /**
+     * Get the leftmost alignment state machine, or null if the read states is empty
+     * @return a potentially null AlignmentStateMachine
+     */
+    public AlignmentStateMachine getFirst() {
+        return isEmpty() ? null : readStatesByAlignmentStart.getFirst();
+    }
+
+    /**
+     * Capture some statistics about the behavior of the downsampling, but only if CAPTURE_DOWNSAMPLING_STATS is true
+     */
+    @Requires("isDownsampling()")
+    private void captureDownsamplingStats() {
+        if ( CAPTURE_DOWNSAMPLING_STATS ) {
+            nSites++;
+            final int loc = getFirst().getGenomePosition();
+            String message = "Pass through";
+            final boolean downsampling = size() > downsamplingTarget;
+            if ( downsampling ) {
+                nSitesNeedingDownsampling++;
+                message = "Downsampling";
+            }
+
+            if ( downsampling || nSites % 10000 == 0 )
+                logger.info(String.format("%20s at %s: coverage=%d, max=%d, fraction of downsampled sites=%.2e",
+                        message, loc, size(), downsamplingTarget, (1.0 * nSitesNeedingDownsampling / nSites)));
+        }
+    }
+
+    /**
+     * Is there at least one alignment for this sample in this manager?
+     * @return true if there's at least one alignment, false otherwise
+     */
+    public boolean isEmpty() {
+        return readStatesByAlignmentStart.isEmpty();
+    }
+
+    /**
+     * Get the number of read states currently in this manager
+     * @return the number of read states
+     */
+    @Ensures("result >= 0")
+    public int size() {
+        return readStatesByAlignmentStart.size();
+    }
+
+    /**
+     * Advances all read states forward by one element, removing states that are
+     * no long aligned to the current position.
+     * @return the number of states we're removed after advancing
+     */
+    public int updateReadStates() {
+        int nRemoved = 0;
+        final Iterator<AlignmentStateMachine> it = iterator();
+        while (it.hasNext()) {
+            final AlignmentStateMachine state = it.next();
+            final CigarOperator op = state.stepForwardOnGenome();
+            if (op == null) {
+                // we discard the read only when we are past its end AND indel at the end of the read (if any) was
+                // already processed. Keeping the read state that returned null upon stepForwardOnGenome() is safe
+                // as the next call to stepForwardOnGenome() will return null again AND will clear hadIndel() flag.
+                it.remove();                                                // we've stepped off the end of the object
+                nRemoved++;
+            }
+        }
+
+        return nRemoved;
+    }
+
+    /**
+     * Iterate over the AlignmentStateMachine in this manager in alignment start order.
+     * @return a valid iterator
+     */
+    @Ensures("result != null")
+    public Iterator<AlignmentStateMachine> iterator() {
+        return readStatesByAlignmentStart.iterator();
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/locusiterator/ReadStateManager.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/locusiterator/ReadStateManager.java
new file mode 100644
index 0000000..8491289
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/locusiterator/ReadStateManager.java
@@ -0,0 +1,289 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.locusiterator;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import htsjdk.samtools.util.PeekableIterator;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+
+import java.util.*;
+
+/**
+ * Manages and updates mapping from sample -> List of SAMRecordAlignmentState
+ *
+ * Optionally can keep track of all of the reads pulled off the iterator and
+ * that appeared at any point in the list of SAMRecordAlignmentState for any reads.
+ * This functionaly is only possible at this stage, as this object does the popping of
+ * reads off the underlying source iterator, and presents only a pileup-like interface
+ * of samples -> SAMRecordAlignmentStates.  Reconstructing the unique set of reads
+ * used across all pileups is extremely expensive from that data structure.
+ *
+ * User: depristo
+ * Date: 1/5/13
+ * Time: 2:02 PM
+ */
+final class ReadStateManager implements Iterable<Map.Entry<String, PerSampleReadStateManager>> {
+    private final List<String> samples;
+    private final PeekableIterator<GATKSAMRecord> iterator;
+    private final SamplePartitioner<GATKSAMRecord> samplePartitioner;
+
+    /**
+     * A mapping from sample name -> the per sample read state manager that manages
+     *
+     * IT IS CRITICAL THAT THIS BE A LINKED HASH MAP, SO THAT THE ITERATION OF THE MAP OCCURS IN THE SAME
+     * ORDER AS THE ORIGINL SAMPLES
+     */
+    private final Map<String, PerSampleReadStateManager> readStatesBySample = new LinkedHashMap<String, PerSampleReadStateManager>();
+
+    private LinkedList<GATKSAMRecord> submittedReads;
+    private final boolean keepSubmittedReads;
+
+    private int totalReadStates = 0;
+
+    public ReadStateManager(final Iterator<GATKSAMRecord> source,
+                            final List<String> samples,
+                            final LIBSDownsamplingInfo LIBSDownsamplingInfo,
+                            final boolean keepSubmittedReads) {
+        this.samples = samples;
+        this.iterator = new PeekableIterator<GATKSAMRecord>(source);
+
+        this.keepSubmittedReads = keepSubmittedReads;
+        this.submittedReads = new LinkedList<GATKSAMRecord>();
+
+        for (final String sample : samples) {
+            // because this is a linked hash map the order of iteration will be in sample order
+            readStatesBySample.put(sample, new PerSampleReadStateManager(LIBSDownsamplingInfo));
+        }
+
+        samplePartitioner = new SamplePartitioner<GATKSAMRecord>(LIBSDownsamplingInfo, samples);
+    }
+
+    /**
+     * Returns a iterator over all the sample -> per-sample read state managers with each sample in this read state manager.
+     *
+     * The order of iteration is the same as the order of the samples provided upon construction to this
+     * ReadStateManager.
+     *
+     * @return Iterator over sample + per sample read state manager pairs for this read state manager.
+     */
+    @Override
+    public Iterator<Map.Entry<String, PerSampleReadStateManager>> iterator() {
+        return readStatesBySample.entrySet().iterator();
+    }
+
+    public boolean isEmpty() {
+        return totalReadStates == 0;
+    }
+
+    /**
+     * Retrieves the total number of reads in the manager across all samples.
+     *
+     * @return Total number of reads over all samples.
+     */
+    public int size() {
+        return totalReadStates;
+    }
+
+    /**
+     * Retrieves the total number of reads in the manager in the given sample.
+     *
+     * @param sample The sample.
+     * @return Total number of reads in the given sample.
+     */
+    public int size(final String sample) {
+        return readStatesBySample.get(sample).size();
+    }
+
+    public AlignmentStateMachine getFirst() {
+        for ( final PerSampleReadStateManager manager : readStatesBySample.values() ) {
+            if ( ! manager.isEmpty() )
+                return manager.getFirst();
+        }
+        return null;
+    }
+
+    public boolean hasNext() {
+        return totalReadStates > 0 || iterator.hasNext();
+    }
+
+    /**
+     * Advances all fo the read states by one bp.  After this call the read states are reflective
+     * of the next pileup.
+     */
+    public void updateReadStates() {
+        for (final PerSampleReadStateManager perSampleReadStateManager : readStatesBySample.values() ) {
+            totalReadStates -= perSampleReadStateManager.updateReadStates();
+        }
+    }
+
+    /**
+     * Does read start at the same position as described by currentContextIndex and currentAlignmentStart?
+     *
+     * @param read the read we want to test
+     * @param currentContigIndex the contig index (from the read's getReferenceIndex) of the reads in this state manager
+     * @param currentAlignmentStart the alignment start of the of the left-most position on the
+     *                           genome of the reads in this read state manager
+     * @return true if read has contig index and start equal to the current ones
+     */
+    private boolean readStartsAtCurrentPosition(final GATKSAMRecord read, final int currentContigIndex, final int currentAlignmentStart) {
+        return read.getAlignmentStart() == currentAlignmentStart && read.getReferenceIndex() == currentContigIndex;
+    }
+
+    /**
+     * Pull all of the reads off the iterator that overlap the left-most position among all
+     * reads this ReadStateManager
+     */
+    public void collectPendingReads() {
+        if (!iterator.hasNext())
+            return;
+
+        // determine the left-most boundary that determines which reads to keep in this new pileup
+        final int firstContigIndex;
+        final int firstAlignmentStart;
+        if ( isEmpty() ) {
+            // there are no reads here, so our next state is the next read in the stream
+            firstContigIndex = iterator.peek().getReferenceIndex();
+            firstAlignmentStart = iterator.peek().getAlignmentStart();
+        } else {
+            // there's a read in the system, so it's our targeted first read
+            final AlignmentStateMachine firstState = getFirst();
+            firstContigIndex = firstState.getReferenceIndex();
+            // note this isn't the alignment start of the read, but rather the alignment start position
+            firstAlignmentStart = firstState.getGenomePosition();
+        }
+
+        while ( iterator.hasNext() && readStartsAtCurrentPosition(iterator.peek(), firstContigIndex, firstAlignmentStart) ) {
+            submitRead(iterator.next());
+        }
+
+        samplePartitioner.doneSubmittingReads();
+
+        for (final String sample : samples) {
+            final Collection<GATKSAMRecord> newReads = samplePartitioner.getReadsForSample(sample);
+
+            // if we're keeping reads, take the (potentially downsampled) list of new reads for this sample
+            // and add to the list of reads.  Note this may reorder the list of reads someone (it groups them
+            // by sample, but it cannot change their absolute position on the genome as they all must
+            // start at the current location
+            if ( keepSubmittedReads )
+                submittedReads.addAll(newReads);
+
+            final PerSampleReadStateManager statesBySample = readStatesBySample.get(sample);
+            addReadsToSample(statesBySample, newReads);
+        }
+
+        samplePartitioner.reset();
+    }
+
+    /**
+     * Add a read to the sample partitioner, potentially adding it to all submitted reads, if appropriate
+     * @param read a non-null read
+     */
+    @Requires("read != null")
+    protected void submitRead(final GATKSAMRecord read) {
+        samplePartitioner.submitRead(read);
+    }
+
+    /**
+     * Transfer current list of submitted reads, clearing old list
+     *
+     * Takes the maintained list of submitted reads, and transfers it to the caller of this
+     * function.  The old list of set to a new, cleanly allocated list so the caller officially
+     * owns the list returned by this call.  This is the only way to clear the tracking
+     * of submitted reads, if enabled.
+     *
+     * How to use this function:
+     *
+     * while ( doing some work unit, such as creating pileup at some locus ):
+     *   interact with ReadStateManager in some way to make work unit
+     *   readsUsedInPileup = transferSubmittedReads)
+     *
+     * @throws UnsupportedOperationException if called when keepSubmittedReads is false
+     *
+     * @return the current list of submitted reads
+     */
+    @Ensures({
+            "result != null",
+            "result != submittedReads" // result and previous submitted reads are not == objects
+    })
+    public List<GATKSAMRecord> transferSubmittedReads() {
+        if ( ! keepSubmittedReads ) throw new UnsupportedOperationException("cannot transferSubmittedReads if you aren't keeping them");
+
+        final List<GATKSAMRecord> prevSubmittedReads = submittedReads;
+        this.submittedReads = new LinkedList<GATKSAMRecord>();
+
+        return prevSubmittedReads;
+    }
+
+    /**
+     * Are we keeping submitted reads, or not?
+     * @return true if we are keeping them, false otherwise
+     */
+    public boolean isKeepingSubmittedReads() {
+        return keepSubmittedReads;
+    }
+
+    /**
+     * Obtain a pointer to the list of submitted reads.
+     *
+     * This is not a copy of the list; it is shared with this ReadStateManager.  It should
+     * not be modified.  Updates to this ReadStateManager may change the contains of the
+     * list entirely.
+     *
+     * For testing purposes only.
+     *
+     * Will always be empty if we are are not keepSubmittedReads
+     *
+     * @return a non-null list of reads that have been submitted to this ReadStateManager
+     */
+    @Ensures({"result != null","keepSubmittedReads || result.isEmpty()"})
+    protected List<GATKSAMRecord> getSubmittedReads() {
+        return submittedReads;
+    }
+
+    /**
+     * Add reads with the given sample name to the given hanger entry.
+     *
+     * @param readStates The list of read states to add this collection of reads.
+     * @param reads      Reads to add.  Selected reads will be pulled from this source.
+     */
+    private void addReadsToSample(final PerSampleReadStateManager readStates, final Collection<GATKSAMRecord> reads) {
+        if (reads.isEmpty())
+            return;
+
+        final LinkedList<AlignmentStateMachine> newReadStates = new LinkedList<AlignmentStateMachine>();
+
+        for (final GATKSAMRecord read : reads) {
+            final AlignmentStateMachine state = new AlignmentStateMachine(read);
+            if ( state.stepForwardOnGenome() != null ) // todo -- should be an assertion not a skip
+                // explicitly filter out reads that are all insertions / soft clips
+                newReadStates.add(state);
+        }
+
+        totalReadStates += readStates.addStatesAtNextAlignmentStart(newReadStates);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/locusiterator/SamplePartitioner.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/locusiterator/SamplePartitioner.java
new file mode 100644
index 0000000..9ff3998
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/locusiterator/SamplePartitioner.java
@@ -0,0 +1,172 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.locusiterator;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.downsampling.Downsampler;
+import org.broadinstitute.gatk.utils.downsampling.PassThroughDownsampler;
+import org.broadinstitute.gatk.utils.downsampling.ReservoirDownsampler;
+
+import java.util.*;
+
+/**
+ * Divides reads by sample and (if requested) does a preliminary downsampling pass
+ * with a ReservoirDownsampler.
+ *
+ * Note: stores reads by sample ID string, not by sample object
+ */
+class SamplePartitioner<T extends SAMRecord> {
+    /**
+     * Map from sample name (as a string) to a downsampler of reads for that sample
+     */
+    final private Map<String, Downsampler<T>> readsBySample;
+
+    /**
+     * Are we in a state where we're done submitting reads and have semi-finalized the
+     * underlying per sample downsampler?
+     */
+    boolean doneSubmittingReads = false;
+
+    /**
+     * Create a new SamplePartitioner capable of splitting reads up into buckets of reads for
+     * each sample in samples, and perform a preliminary downsampling of these reads
+     * (separately for each sample) if downsampling is requested in LIBSDownsamplingInfo
+     *
+     * Note that samples must be comprehensive, in that all reads every submitted to this
+     * partitioner must come from one of the samples provided here.  If not, submitRead
+     * will throw an exception.  Duplicates in the list of samples will be ignored
+     *
+     * @param LIBSDownsamplingInfo do we want to downsample, and if so to what coverage?
+     * @param samples the complete list of samples we're going to partition reads into. Can be
+     *                empty, but in that case this code cannot function properly if you
+     *                attempt to add data to it.
+     */
+    @Ensures({
+            "readsBySample != null",
+            "readsBySample.size() == new HashSet(samples).size()"
+    })
+    public SamplePartitioner(final LIBSDownsamplingInfo LIBSDownsamplingInfo, final List<String> samples) {
+        if ( LIBSDownsamplingInfo == null ) throw new IllegalArgumentException("LIBSDownsamplingInfo cannot be null");
+        if ( samples == null ) throw new IllegalArgumentException("samples must be a non-null list");
+
+        readsBySample = new LinkedHashMap<String, Downsampler<T>>(samples.size());
+        for ( final String sample : samples ) {
+            readsBySample.put(sample, createDownsampler(LIBSDownsamplingInfo));
+        }
+    }
+
+    /**
+     * Create a new, ready to use downsampler based on the parameters in LIBSDownsamplingInfo
+     * @param LIBSDownsamplingInfo the parameters to use in creating the downsampler
+     * @return a downsampler appropriate for LIBSDownsamplingInfo.  If no downsampling is requested,
+     *   uses the PassThroughDownsampler, which does nothing at all.
+     */
+    @Requires("LIBSDownsamplingInfo != null")
+    @Ensures("result != null")
+    private Downsampler<T> createDownsampler(final LIBSDownsamplingInfo LIBSDownsamplingInfo) {
+        return LIBSDownsamplingInfo.isPerformDownsampling()
+                ? new ReservoirDownsampler<T>(LIBSDownsamplingInfo.getToCoverage(), true)
+                : new PassThroughDownsampler<T>();
+    }
+
+    /**
+     * Offer this read to the partitioner, putting it into the bucket of reads for the sample
+     * of read (obtained via the read's read group).
+     *
+     * If the read group is missing, uses the special "null" read group
+     *
+     * @throws IllegalStateException if the sample of read wasn't present in the original
+     *   set of samples provided to this SamplePartitioner at construction
+     *
+     * @param read the read to add to the sample's list of reads
+     */
+    @Requires("read != null")
+    @Ensures("doneSubmittingReads == false")
+    public void submitRead(final T read) {
+        final String sampleName = read.getReadGroup() != null ? read.getReadGroup().getSample() : null;
+        final Downsampler<T> downsampler = readsBySample.get(sampleName);
+        if ( downsampler == null )
+            throw new IllegalStateException("Offered read with sample name " + sampleName + " to SamplePartitioner " +
+                    "but this sample wasn't provided as one of possible samples at construction");
+
+        downsampler.submit(read);
+        doneSubmittingReads = false;
+    }
+
+    /**
+     * Tell this partitioner that all reads in this cycle have been submitted, so that we
+     * can finalize whatever downsampling is required by each sample.
+     *
+     * Note that we *must* call this function before getReadsForSample, or else that
+     * function will exception out.
+     */
+    @Ensures("doneSubmittingReads == true")
+    public void doneSubmittingReads() {
+        for ( final Downsampler<T> downsampler : readsBySample.values() ) {
+            downsampler.signalEndOfInput();
+        }
+        doneSubmittingReads = true;
+    }
+
+    /**
+     * Get the final collection of reads for this sample for this cycle
+     *
+     * The cycle is defined as all of the reads that occur between
+     * the first call to submitRead until doneSubmittingReads is called.  At that
+     * point additional downsampling may occur (depending on construction arguments)
+     * and that set of reads is returned here.
+     *
+     * Note that this function can only be called once per cycle, as underlying
+     * collection of reads is cleared.
+     *
+     * @param sampleName the sample we want reads for, must be present in the original samples
+     * @return a non-null collection of reads for sample in this cycle
+     */
+    @Ensures("result != null")
+    public Collection<T> getReadsForSample(final String sampleName) {
+        if ( ! doneSubmittingReads ) throw new IllegalStateException("getReadsForSample called before doneSubmittingReads was called");
+
+        final Downsampler<T> downsampler = readsBySample.get(sampleName);
+        if ( downsampler == null ) throw new NoSuchElementException("Sample name not found");
+
+        return downsampler.consumeFinalizedItems();
+    }
+
+    /**
+     * Resets this SamplePartitioner, indicating that we're starting a new
+     * cycle of adding reads to each underlying downsampler.
+     */
+    @Ensures("doneSubmittingReads == false")
+    public void reset() {
+        for ( final Downsampler<T> downsampler : readsBySample.values() ) {
+            downsampler.clearItems();
+            downsampler.resetStats();
+        }
+        doneSubmittingReads = false;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/EOFMarkedValue.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/EOFMarkedValue.java
new file mode 100644
index 0000000..b0c9d8f
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/EOFMarkedValue.java
@@ -0,0 +1,105 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.nanoScheduler;
+
+/**
+ * Wrapper to hold data that distinguishing an special EOF marker from a real object
+ *
+ * The only way to tell in a consumer thread that a blocking queue has no more data ever
+ * coming down the pipe is to pass in a "poison" or EOF object.  This class provides
+ * a generic capacity for that...
+ *
+ * The use case looks like this:
+ *
+ * BlockingQueue q
+ * producer:
+ *   while ( x has items )
+ *      q.put(new EOFMarkedValue(x))
+ *   q.put(new EOFMarkedValue())
+ *
+ * Consumer:
+ *   while ( true )
+ *       value = q.take()
+ *       if ( value.isEOFMarker() )
+ *          break
+ *       else
+ *          do something useful with value
+ *
+ *
+ * User: depristo
+ * Date: 9/6/12
+ * Time: 3:08 PM
+ */
+//@Invariant("! isEOFMarker() || value == null")
+class EOFMarkedValue<T> {
+    /**
+     * True if this is the EOF marker object
+     */
+    final private boolean isLast;
+
+    /**
+     * Our value, if we aren't the EOF marker
+     */
+    final private T value;
+
+    /**
+     * Create a new EOFMarkedValue containing a real value, where last is false
+     * @param value
+     */
+    EOFMarkedValue(final T value) {
+        isLast = false;
+        this.value = value;
+    }
+
+    /**
+     * Create a new EOFMarkedValue that is the last item
+     */
+    EOFMarkedValue() {
+        isLast = true;
+        this.value = null;
+    }
+
+    /**
+     * Is this the EOF marker?
+     *
+     * @return true if so, else false
+     */
+    public boolean isEOFMarker() {
+        return isLast;
+    }
+
+    /**
+     * Get the value held by this EOFMarkedValue
+     *
+     * @return the value
+     * @throws IllegalStateException if this is the last item
+     */
+    public T getValue() {
+        if ( isEOFMarker() )
+            throw new IllegalStateException("Cannot get value for last object");
+        return value;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/InputProducer.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/InputProducer.java
new file mode 100644
index 0000000..5676c75
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/InputProducer.java
@@ -0,0 +1,217 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.nanoScheduler;
+
+import org.apache.log4j.Logger;
+
+import java.util.Iterator;
+import java.util.concurrent.CountDownLatch;
+
+/**
+ * Helper class that allows multiple threads to reads input values from
+ * an iterator, and track the number of items read from that iterator.
+ */
+class InputProducer<InputType> {
+    private final static Logger logger = Logger.getLogger(InputProducer.class);
+
+    /**
+     * The iterator we are using to get data from
+     */
+    final Iterator<InputType> inputReader;
+
+    /**
+     * Have we read the last value from inputReader?
+     *
+     * Must be a local variable, as inputReader.hasNext() can actually end up doing a lot
+     * of work, and the method getNumInputValues() is supposed to be called not in the
+     * thread executing the reading of values but in the thread enqueuing results
+     */
+    boolean readLastValue = false;
+
+    /**
+     * Once we've readLastValue, lastValue contains a continually
+     * updating InputValue where EOF is true.  It's not necessarily
+     * a single value, as each read updates lastValue with the
+     * next EOF marker
+     */
+    private InputValue lastValue = null;
+
+    int nRead = 0;
+    int inputID = -1;
+
+    public InputProducer(final Iterator<InputType> inputReader) {
+        if ( inputReader == null ) throw new IllegalArgumentException("inputReader cannot be null");
+        this.inputReader = inputReader;
+    }
+
+    /**
+     * Returns the number of elements in the input stream, AFTER we've read all of the values.
+     * If we haven't read them all yet, returns -1
+     *
+     * @return the total number of elements in input stream, or -1 if some are still to be read
+     */
+    public synchronized int getNumInputValues() {
+        return allInputsHaveBeenRead() ? nRead : -1;
+    }
+
+    /**
+     * Returns true if all of the elements have been read from the input stream
+     *
+     * @return true if all of the elements have been read from the input stream
+     */
+    public synchronized boolean allInputsHaveBeenRead() {
+        return readLastValue;
+    }
+
+    /**
+     * Read the next item from the input stream, if possible
+     *
+     * If the inputReader has values, returns them, otherwise return null.
+     *
+     * This method is synchronized, as it manipulates local state accessed across multiple threads.
+     *
+     * @return the next input stream value, or null if the stream contains no more elements
+     */
+    private synchronized InputType readNextItem() {
+        if ( ! inputReader.hasNext() ) {
+            // we are done, mark ourselves as such and return null
+            readLastValue = true;
+            return null;
+        } else {
+            // get the next value, and return it
+            final InputType input = inputReader.next();
+            if ( input == null )
+                throw new IllegalStateException("inputReader.next() returned a null value, breaking our contract");
+            nRead++;
+            return input;
+        }
+    }
+
+    /**
+     * Are there currently more values in the iterator?
+     *
+     * Note the word currently.  It's possible that some already submitted
+     * job will read a value from this InputProvider, so in some sense
+     * there are no more values and in the future there'll be no next
+     * value.  That said, once this returns false it means that all
+     * of the possible values have been read
+     *
+     * @return true if a future call to next might return a non-EOF value, false if
+     *         the underlying iterator is definitely empty
+     */
+    public synchronized boolean hasNext() {
+        return ! allInputsHaveBeenRead();
+    }
+
+    /**
+     * Get the next InputValue from this producer.  The next value is
+     * either (1) the next value from the iterator, in which case the
+     * the return value is an InputValue containing that value, or (2)
+     * an InputValue with the EOF marker, indicating that the underlying
+     * iterator has been exhausted.
+     *
+     * This function never fails -- it can be called endlessly and
+     * while the underlying iterator has values it returns them, and then
+     * it returns a succession of EOF marking input values.
+     *
+     * @return an InputValue containing the next value in the underlying
+     *         iterator, or one with EOF marker, if the iterator is exhausted
+     */
+    public synchronized InputValue next() {
+        if ( readLastValue ) {
+            // we read the last value, so our value is the next
+            // EOF marker based on the last value.  Make sure to
+            // update the last value so the markers keep incrementing
+            // their job ids
+            lastValue = lastValue.nextEOF();
+            return lastValue;
+        } else {
+            final InputType value = readNextItem();
+
+            if ( value == null ) {
+                if ( ! readLastValue )
+                    throw new IllegalStateException("value == null but readLastValue is false!");
+
+                // add the EOF object so our consumer knows we are done in all inputs
+                // note that we do not increase inputID here, so that variable indicates the ID
+                // of the last real value read from the queue
+                lastValue = new InputValue(inputID + 1);
+                return lastValue;
+            } else {
+                // add the actual value to the outputQueue
+                return new InputValue(++inputID, value);
+            }
+        }
+    }
+
+    /**
+     * Helper class that contains a read value suitable for EOF marking in a BlockingQueue
+     *
+     * This class also contains an ID, an integer incrementing from 0 to N, for N total
+     * values in the input stream.  This ID indicates which element in the element stream this
+     * InputValue corresponds to.  Necessary for tracking and ordering results by input position.
+     *
+     * Note that EOF markers have IDs > N, and ID values >> N can occur if many EOF markers
+     * are enqueued in the outputQueue.
+     */
+    class InputValue extends EOFMarkedValue<InputType> {
+        final int id;
+
+        private InputValue(final int id, InputType datum) {
+            super(datum);
+            if ( id < 0 ) throw new IllegalArgumentException("id must be >= 0");
+            this.id = id;
+        }
+        private InputValue(final int id) {
+            super();
+            if ( id < 0 ) throw new IllegalArgumentException("id must be >= 0");
+            this.id = id;
+        }
+
+        /**
+         * Returns the ID of this input marker
+         * @return id >= 0
+         */
+        public int getId() {
+            return id;
+        }
+
+        /**
+         * Create another EOF marker with ID + 1 to this one.
+         *
+         * Useful in the case where we need to enqueue another EOF marker for future jobs and we
+         * want them to have a meaningful ID, one greater than the last one.
+         *
+         * @return ID
+         */
+        //@Ensures({"result.isEOFMarker()", "result.getId() == getId() + 1"})
+        public InputValue nextEOF() {
+            if ( ! isEOFMarker() )
+                throw new IllegalArgumentException("Cannot request next EOF marker for non-EOF marker InputValue");
+            return new InputValue(getId() + 1);
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/MapResult.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/MapResult.java
new file mode 100644
index 0000000..ab737f2
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/MapResult.java
@@ -0,0 +1,75 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.nanoScheduler;
+
+/**
+ * Holds the results of a map job suitable for producer/consumer threading
+ * via a BlockingQueue
+ */
+class MapResult<MapType> extends EOFMarkedValue<MapType> implements Comparable<MapResult<MapType>> {
+    final int jobID;
+
+    /**
+     * Create a new MapResult with value datum and jod jobID ID
+     *
+     * @param datum the value produced by the map job
+     * @param jobID the id of the map job (for correctness testing)
+     */
+    MapResult(final MapType datum, final int jobID) {
+        super(datum);
+        this.jobID = jobID;
+        if ( jobID < 0 ) throw new IllegalArgumentException("JobID must be >= 0");
+    }
+
+    MapResult(final int jobID) {
+        super();
+        this.jobID = jobID;
+        if ( jobID < 0 ) throw new IllegalArgumentException("JobID must be >= 0");
+    }
+
+    /**
+     * @return the job ID of the map job that produced this MapResult
+     */
+    public int getJobID() {
+        return jobID;
+    }
+
+    /**
+     * Compare these MapResults in order of JobID.
+     *
+     * @param o
+     * @return
+     */
+    @Override
+    public int compareTo(MapResult<MapType> o) {
+        return Integer.valueOf(jobID).compareTo(o.getJobID());
+    }
+
+    @Override
+    public String toString() {
+        return "[MapResult id=" + jobID + "]";
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/MapResultsQueue.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/MapResultsQueue.java
new file mode 100644
index 0000000..3d66823
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/MapResultsQueue.java
@@ -0,0 +1,116 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.nanoScheduler;
+
+import org.broadinstitute.gatk.utils.collections.ExpandingArrayList;
+
+/**
+ * Created with IntelliJ IDEA.
+ * User: depristo
+ * Date: 12/19/12
+ * Time: 3:53 PM
+ *
+ * This class makes some critical assumptions.  First is that the jobID of the first
+ * job is 0.  If this isn't true the MapResultsQueue will certainly fail.
+ */
+public class MapResultsQueue<MapType> {
+    //private final static boolean DEBUG = false;
+    //private final static Logger logger = Logger.getLogger(MapResultsQueue.class);
+
+    /**
+     * Although naturally stored as priority blocking queue, this is actually quite expensive
+     * due to the O(n log n) sorting calculation.  Since we know that the job ids start
+     * at 0 and increment by 1 in each successive job, we store an array instead.  The
+     * array is indexed by jobID, and contains the MapResult for that job id.  Because elements
+     * can be added to the queue in any order, we need to use an expanding array list to
+     * store the elements.
+     */
+    final ExpandingArrayList<MapResult<MapType>> queue = new ExpandingArrayList<MapResult<MapType>>(10000);
+
+    /**
+     * The jobID of the last job we've seen
+     */
+    int prevJobID = -1; // no jobs observed
+
+    /**
+     * Put mapResult into this MapResultsQueue, associated with its jobID
+     * @param mapResult a non-null map result
+     */
+    public synchronized void put(final MapResult<MapType> mapResult) {
+        if ( mapResult == null ) throw new IllegalArgumentException("mapResult cannot be null");
+
+        // make sure that nothing is at the job id for map
+        assert queue.size() < mapResult.getJobID() || queue.get(mapResult.getJobID()) == null;
+
+        queue.set(mapResult.getJobID(), mapResult);
+    }
+
+    /**
+     * Should we reduce the next value in the mapResultQueue?
+     *
+     * @return true if we should reduce
+     */
+    public synchronized boolean nextValueIsAvailable() {
+        final MapResult<MapType> nextMapResult = queue.get(nextJobID());
+
+        if ( nextMapResult == null ) {
+            // natural case -- the next job hasn't had a value added yet
+            return false;
+        } else if ( nextMapResult.getJobID() != nextJobID() ) {
+            // sanity check -- the job id at next isn't the one we expect
+            throw new IllegalStateException("Next job ID " + nextMapResult.getJobID() + " is not == previous job id " + prevJobID + " + 1");
+        } else {
+            // there's a value at the next job id, so return true
+            return true;
+        }
+    }
+
+    /**
+     * Get the next job ID'd be expect to see given our previous job id
+     * @return the next job id we'd fetch to reduce
+     */
+    private int nextJobID() {
+        return prevJobID + 1;
+    }
+
+    /**
+     * Can only be called when nextValueIsAvailable is true
+     * @return
+     * @throws InterruptedException
+     */
+    // TODO -- does this have to be synchronized? -- I think the answer is no
+    public synchronized MapResult<MapType> take() throws InterruptedException {
+        final MapResult<MapType> result = queue.get(nextJobID());
+
+        // make sure the value we've fetched has the right id
+        assert result.getJobID() == nextJobID();
+
+        prevJobID = result.getJobID();
+        queue.set(prevJobID, null);
+
+        return result;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/NSMapFunction.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/NSMapFunction.java
new file mode 100644
index 0000000..31ad7e0
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/NSMapFunction.java
@@ -0,0 +1,44 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.nanoScheduler;
+
+/**
+ * A function that maps from InputType -> ResultType
+ *
+ * For use with the NanoScheduler
+ *
+ * User: depristo
+ * Date: 8/24/12
+ * Time: 9:49 AM
+ */
+public interface NSMapFunction<InputType, ResultType> {
+    /**
+     * Return function on input, returning a value of ResultType
+     * @param input
+     * @return
+     */
+    public ResultType apply(final InputType input);
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/NSProgressFunction.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/NSProgressFunction.java
new file mode 100644
index 0000000..9b1c406
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/NSProgressFunction.java
@@ -0,0 +1,37 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.nanoScheduler;
+
+/**
+ * Created with IntelliJ IDEA.
+ * User: depristo
+ * Date: 9/4/12
+ * Time: 2:10 PM
+ * To change this template use File | Settings | File Templates.
+ */
+public interface NSProgressFunction<InputType> {
+    public void progress(final InputType lastMapInput);
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/NSReduceFunction.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/NSReduceFunction.java
new file mode 100644
index 0000000..7f1027f
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/NSReduceFunction.java
@@ -0,0 +1,43 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.nanoScheduler;
+
+/**
+ * A function that combines a value of MapType with an existing ReduceValue into a new ResultType
+ *
+ * User: depristo
+ * Date: 8/24/12
+ * Time: 9:49 AM
+ */
+public interface NSReduceFunction<MapType, ReduceType> {
+    /**
+     * Combine one with sum into a new ReduceType
+     * @param one the result of a map call on an input element
+     * @param sum the cumulative reduce result over all previous map calls
+     * @return
+     */
+    public ReduceType apply(MapType one, ReduceType sum);
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/NanoScheduler.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/NanoScheduler.java
new file mode 100644
index 0000000..50139dd
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/NanoScheduler.java
@@ -0,0 +1,494 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.nanoScheduler;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.MultiThreadedErrorTracker;
+import org.broadinstitute.gatk.utils.threading.NamedThreadFactory;
+
+import java.util.Iterator;
+import java.util.List;
+import java.util.concurrent.*;
+
+/**
+ * Framework for very fine grained MapReduce parallelism
+ *
+ * The overall framework works like this
+ *
+ * nano <- new Nanoschedule(bufferSize, numberOfMapElementsToProcessTogether, nThreads)
+ * List[Input] outerData : outerDataLoop )
+ *   result = nano.execute(outerData.iterator(), map, reduce)
+ *
+ * bufferSize determines how many elements from the input stream are read in one go by the
+ * nanoscheduler.  The scheduler may hold up to bufferSize in memory at one time, as well
+ * as up to bufferSize map results as well.
+ *
+ * numberOfMapElementsToProcessTogether determines how many input elements are processed
+ * together each thread cycle.  For example, if this value is 10, then the input data
+ * is grouped together in units of 10 elements each, and map called on each in term.  The more
+ * heavy-weight the map function is, in terms of CPU costs, the more it makes sense to
+ * have this number be small.  The lighter the CPU cost per element, though, the more this
+ * parameter introduces overhead due to need to context switch among threads to process
+ * each input element.  A value of -1 lets the nanoscheduler guess at a reasonable trade-off value.
+ *
+ * nThreads is a bit obvious yes?  Note though that the nanoscheduler assumes that it gets 1 thread
+ * from its client during the execute call, as this call blocks until all work is done.  The caller
+ * thread is put to work by execute to help with the processing of the data.  So in reality the
+ * nanoScheduler only spawn nThreads - 1 additional workers (if this is > 1).
+ *
+ * User: depristo
+ * Date: 8/24/12
+ * Time: 9:47 AM
+ */
+public class NanoScheduler<InputType, MapType, ReduceType> {
+    private final static Logger logger = Logger.getLogger(NanoScheduler.class);
+    private final static boolean ALLOW_SINGLE_THREAD_FASTPATH = true;
+    protected final static int UPDATE_PROGRESS_FREQ = 100;
+
+    /**
+     * Currently not used, but kept because it's conceptual reasonable to have a buffer
+     */
+    final int bufferSize;
+
+    /**
+     * The number of threads we're using to execute the map jobs in this nano scheduler
+     */
+    final int nThreads;
+
+    final ExecutorService masterExecutor;
+    final ExecutorService mapExecutor;
+    final MultiThreadedErrorTracker errorTracker = new MultiThreadedErrorTracker();
+
+    boolean shutdown = false;
+    boolean debug = false;
+    private NSProgressFunction<InputType> progressFunction = null;
+
+    /**
+     * Create a new nanoscheduler with the desire characteristics requested by the argument
+     *
+     * @param nThreads the number of threads to use to get work done, in addition to the
+     *                 thread calling execute
+     */
+    public NanoScheduler(final int nThreads) {
+        this(nThreads*100, nThreads);
+    }
+
+    protected NanoScheduler(final int bufferSize, final int nThreads) {
+        if ( bufferSize < 1 ) throw new IllegalArgumentException("bufferSize must be >= 1, got " + bufferSize);
+        if ( nThreads < 1 ) throw new IllegalArgumentException("nThreads must be >= 1, got " + nThreads);
+
+        this.bufferSize = bufferSize;
+        this.nThreads = nThreads;
+
+        if ( nThreads == 1 ) {
+            this.mapExecutor = this.masterExecutor = null;
+        } else {
+            this.masterExecutor = Executors.newSingleThreadExecutor(new NamedThreadFactory("NS-master-thread-%d"));
+            this.mapExecutor = Executors.newFixedThreadPool(nThreads, new NamedThreadFactory("NS-map-thread-%d"));
+        }
+    }
+
+    /**
+     * The number of parallel map threads in use with this NanoScheduler
+     * @return
+     */
+    @Ensures("result > 0")
+    public int getnThreads() {
+        return nThreads;
+    }
+
+    /**
+     * The input buffer size used by this NanoScheduler
+     * @return
+     */
+    @Ensures("result > 0")
+    public int getBufferSize() {
+        return this.bufferSize;
+    }
+
+    /**
+     * Tells this nanoScheduler to shutdown immediately, releasing all its resources.
+     *
+     * After this call, execute cannot be invoked without throwing an error
+     */
+    public void shutdown() {
+        if ( nThreads > 1 ) {
+            shutdownExecutor("mapExecutor", mapExecutor);
+            shutdownExecutor("masterExecutor", masterExecutor);
+        }
+
+        shutdown = true;
+    }
+
+    /**
+     * Helper function to cleanly shutdown an execution service, checking that the execution
+     * state is clean when it's done.
+     *
+     * @param name a string name for error messages for the executorService we are shutting down
+     * @param executorService the executorService to shut down
+     */
+    @Requires({"name != null", "executorService != null"})
+    @Ensures("executorService.isShutdown()")
+    private void shutdownExecutor(final String name, final ExecutorService executorService) {
+        if ( executorService.isShutdown() || executorService.isTerminated() )
+            throw new IllegalStateException("Executor service " + name + " is already shut down!");
+
+        final List<Runnable> remaining = executorService.shutdownNow();
+        if ( ! remaining.isEmpty() )
+            throw new IllegalStateException(remaining.size() + " remaining tasks found in an executor " + name + ", unexpected behavior!");
+    }
+
+    /**
+     * @return true if this nanoScheduler is shutdown, or false if its still open for business
+     */
+    public boolean isShutdown() {
+        return shutdown;
+    }
+
+    /**
+     * @return are we displaying verbose debugging information about the scheduling?
+     */
+    public boolean isDebug() {
+        return debug;
+    }
+
+    /**
+     * Helper function to display a String.formatted message if we are doing verbose debugging
+     *
+     * @param format the format argument suitable for String.format
+     * @param args the arguments for String.format
+     */
+    @Requires("format != null")
+    protected void debugPrint(final String format, Object ... args) {
+        if ( isDebug() )
+            logger.warn("Thread " + Thread.currentThread().getId() + ":" + String.format(format, args));
+    }
+
+    /**
+     * Turn on/off verbose debugging
+     *
+     * @param debug true if we want verbose debugging
+     */
+    public void setDebug(boolean debug) {
+        this.debug = debug;
+    }
+
+    /**
+     * Set the progress callback function to progressFunction
+     *
+     * The progress callback is invoked after each buffer size elements have been processed by map/reduce
+     *
+     * @param progressFunction a progress function to call, or null if you don't want any progress callback
+     */
+    public void setProgressFunction(final NSProgressFunction<InputType> progressFunction) {
+        this.progressFunction = progressFunction;
+    }
+
+    /**
+     * Execute a map/reduce job with this nanoScheduler
+     *
+     * Data comes from inputReader.  Will be read until hasNext() == false.
+     * map is called on each element provided by inputReader.  No order of operations is guarenteed
+     * reduce is called in order of the input data provided by inputReader on the result of map() applied
+     * to each element.
+     *
+     * Note that the caller thread is put to work with this function call.  The call doesn't return
+     * until all elements have been processes.
+     *
+     * It is safe to call this function repeatedly on a single nanoScheduler, at least until the
+     * shutdown method is called.
+     *
+     * Note that this function goes through a single threaded fast path if the number of threads
+     * is 1.
+     *
+     * @param inputReader an iterator providing us with the input data to nanoSchedule map/reduce over
+     * @param map the map function from input type -> map type, will be applied in parallel to each input
+     * @param reduce the reduce function from map type + reduce type -> reduce type to be applied in order to map results
+     * @return the last reduce value
+     */
+    public ReduceType execute(final Iterator<InputType> inputReader,
+                              final NSMapFunction<InputType, MapType> map,
+                              final ReduceType initialValue,
+                              final NSReduceFunction<MapType, ReduceType> reduce) {
+        if ( isShutdown() ) throw new IllegalStateException("execute called on already shutdown NanoScheduler");
+        if ( inputReader == null ) throw new IllegalArgumentException("inputReader cannot be null");
+        if ( map == null ) throw new IllegalArgumentException("map function cannot be null");
+        if ( reduce == null ) throw new IllegalArgumentException("reduce function cannot be null");
+
+        ReduceType result;
+        if ( ALLOW_SINGLE_THREAD_FASTPATH && getnThreads() == 1 ) {
+            result = executeSingleThreaded(inputReader, map, initialValue, reduce);
+        } else {
+            result = executeMultiThreaded(inputReader, map, initialValue, reduce);
+        }
+
+        return result;
+    }
+
+    /**
+     * Simple efficient reference implementation for single threaded execution.
+     *
+     * @return the reduce result of this map/reduce job
+     */
+    @Requires({"inputReader != null", "map != null", "reduce != null"})
+    private ReduceType executeSingleThreaded(final Iterator<InputType> inputReader,
+                                             final NSMapFunction<InputType, MapType> map,
+                                             final ReduceType initialValue,
+                                             final NSReduceFunction<MapType, ReduceType> reduce) {
+        ReduceType sum = initialValue;
+        int i = 0;
+
+        while ( true ) {
+            // start timer to ensure that both hasNext and next are caught by the timer
+            if ( ! inputReader.hasNext() ) {
+                break;
+            } else {
+                final InputType input = inputReader.next();
+
+                // map
+                final MapType mapValue = map.apply(input);
+
+                updateProgress(i++, input);
+
+                // reduce
+                sum = reduce.apply(mapValue, sum);
+            }
+        }
+
+        return sum;
+    }
+
+    /**
+     * Maybe update the progress meter (maybe because we don't want to do so so often that it costs cpu time)
+     * @param counter increasing counter to use to cut down on updates
+     * @param input the input we're currently at
+     */
+    private void updateProgress(final int counter, final InputType input) {
+        if ( progressFunction != null && counter % UPDATE_PROGRESS_FREQ == 0 )
+            progressFunction.progress(input);
+    }
+
+    /**
+     * Efficient parallel version of Map/Reduce
+     *
+     * @return the reduce result of this map/reduce job
+     */
+    @Requires({"inputReader != null", "map != null", "reduce != null"})
+    private ReduceType executeMultiThreaded(final Iterator<InputType> inputReader,
+                                            final NSMapFunction<InputType, MapType> map,
+                                            final ReduceType initialValue,
+                                            final NSReduceFunction<MapType, ReduceType> reduce) {
+        debugPrint("Executing nanoScheduler");
+
+        // start up the master job
+        final MasterJob masterJob = new MasterJob(inputReader, map, initialValue, reduce);
+        final Future<ReduceType> reduceResult = masterExecutor.submit(masterJob);
+
+        while ( true ) {
+            // check that no errors occurred while we were waiting
+            handleErrors();
+//            checkForDeadlocks();
+
+            try {
+                final ReduceType result = reduceResult.get(100, TimeUnit.MILLISECONDS);
+
+                // in case an error occurred in the reduce
+                handleErrors();
+
+                // return our final reduce result
+                return result;
+            } catch (final TimeoutException ex ) {
+                // a normal case -- we just aren't done
+            } catch (final InterruptedException ex) {
+                errorTracker.notifyOfError(ex);
+                // will handle error in the next round of the for loop
+            } catch (final ExecutionException ex) {
+                errorTracker.notifyOfError(ex);
+                // will handle error in the next round of the for loop
+            }
+        }
+    }
+
+//    private void checkForDeadlocks() {
+//        if ( deadLockCheckCounter++ % 100 == 0 ) {
+//            logger.info("Checking for deadlocks...");
+//            final ThreadMXBean bean = ManagementFactory.getThreadMXBean();
+//            final long[] threadIds = bean.findDeadlockedThreads(); // Returns null if no threads are deadlocked.
+//
+//            if (threadIds != null) {
+//                final ThreadInfo[] infos = bean.getThreadInfo(threadIds);
+//
+//                logger.error("!!! Deadlock detected !!!!");
+//                for (final ThreadInfo info : infos) {
+//                    logger.error("Thread " + info);
+//                    for ( final StackTraceElement elt : info.getStackTrace() ) {
+//                        logger.error("\t" + elt.toString());
+//                    }
+//                }
+//            }
+//        }
+//    }
+
+    private void handleErrors() {
+        if ( errorTracker.hasAnErrorOccurred() ) {
+            masterExecutor.shutdownNow();
+            mapExecutor.shutdownNow();
+            errorTracker.throwErrorIfPending();
+        }
+    }
+
+    /**
+     * MasterJob has the task to enqueue Map jobs and wait for the final reduce
+     *
+     * It must be run in a separate thread in order to properly handle errors that may occur
+     * in the input, map, or reduce jobs without deadlocking.
+     *
+     * The result of this callable is the final reduce value for the input / map / reduce jobs
+     */
+    private class MasterJob implements Callable<ReduceType> {
+        final Iterator<InputType> inputReader;
+        final NSMapFunction<InputType, MapType> map;
+        final ReduceType initialValue;
+        final NSReduceFunction<MapType, ReduceType> reduce;
+
+        private MasterJob(Iterator<InputType> inputReader, NSMapFunction<InputType, MapType> map, ReduceType initialValue, NSReduceFunction<MapType, ReduceType> reduce) {
+            this.inputReader = inputReader;
+            this.map = map;
+            this.initialValue = initialValue;
+            this.reduce = reduce;
+        }
+
+        @Override
+        public ReduceType call() {
+            // Create the input producer and start it running
+            final InputProducer<InputType> inputProducer = new InputProducer<InputType>(inputReader);
+
+            // create the MapResultsQueue to store results of map jobs.
+            final MapResultsQueue<MapType> mapResultQueue = new MapResultsQueue<MapType>();
+
+            // create the reducer we'll use for this nano scheduling run
+            final Reducer<MapType, ReduceType> reducer = new Reducer<MapType, ReduceType>(reduce, errorTracker, initialValue);
+
+            final CountDownLatch runningMapJobs = new CountDownLatch(nThreads);
+
+            try {
+                // create and submit the info needed by the read/map/reduce threads to do their work
+                for ( int i = 0; i < nThreads; i++ ) {
+                    mapExecutor.submit(new ReadMapReduceJob(inputProducer, mapResultQueue, runningMapJobs, map, reducer));
+                }
+
+                // wait for all of the input and map threads to finish
+                return waitForCompletion(mapResultQueue, runningMapJobs, reducer);
+            } catch (Throwable ex) {
+                errorTracker.notifyOfError(ex);
+                return initialValue;
+            }
+        }
+
+        /**
+         * Wait until the input thread and all map threads have completed running, and return the final reduce result
+         */
+        private ReduceType waitForCompletion(final MapResultsQueue<MapType> mapResultsQueue,
+                                             final CountDownLatch runningMapJobs,
+                                             final Reducer<MapType, ReduceType> reducer) throws InterruptedException {
+            // wait for all the map threads to finish by waiting on the runningMapJobs latch
+            runningMapJobs.await();
+
+            // do a final reduce here.  This is critically important because the InputMapReduce jobs
+            // no longer block on reducing, so it's possible for all the threads to end with a few
+            // reduce jobs on the queue still to do.  This call ensures that we reduce everything
+            reducer.reduceAsMuchAsPossible(mapResultsQueue, true);
+
+            // wait until we have a final reduce result
+            final ReduceType finalSum = reducer.getReduceResult();
+
+            // everything is finally shutdown, return the final reduce value
+            return finalSum;
+        }
+    }
+
+    private class ReadMapReduceJob implements Runnable {
+        final InputProducer<InputType> inputProducer;
+        final MapResultsQueue<MapType> mapResultQueue;
+        final NSMapFunction<InputType, MapType> map;
+        final Reducer<MapType, ReduceType> reducer;
+        final CountDownLatch runningMapJobs;
+
+        private ReadMapReduceJob(final InputProducer<InputType> inputProducer,
+                                 final MapResultsQueue<MapType> mapResultQueue,
+                                 final CountDownLatch runningMapJobs,
+                                 final NSMapFunction<InputType, MapType> map,
+                                 final Reducer<MapType, ReduceType> reducer) {
+            this.inputProducer = inputProducer;
+            this.mapResultQueue = mapResultQueue;
+            this.runningMapJobs = runningMapJobs;
+            this.map = map;
+            this.reducer = reducer;
+        }
+
+        @Override
+        public void run() {
+            try {
+                boolean done = false;
+                while ( ! done ) {
+                    // get the next item from the input producer
+                    final InputProducer<InputType>.InputValue inputWrapper = inputProducer.next();
+
+                    // depending on inputWrapper, actually do some work or not, putting result input result object
+                    final MapResult<MapType> result;
+                    if ( ! inputWrapper.isEOFMarker() ) {
+                        // just skip doing anything if we don't have work to do, which is possible
+                        // because we don't necessarily know how much input there is when we queue
+                        // up our jobs
+                        final InputType input = inputWrapper.getValue();
+
+                        // actually execute the map
+                        final MapType mapValue = map.apply(input);
+
+                        // enqueue the result into the mapResultQueue
+                        result = new MapResult<MapType>(mapValue, inputWrapper.getId());
+
+                        mapResultQueue.put(result);
+
+                        // reduce as much as possible, without blocking, if another thread is already doing reduces
+                        final int nReduced = reducer.reduceAsMuchAsPossible(mapResultQueue, false);
+
+                        updateProgress(inputWrapper.getId(), input);
+                    } else {
+                        done = true;
+                    }
+                }
+            } catch (Throwable ex) {
+                errorTracker.notifyOfError(ex);
+            } finally {
+                // we finished a map job, release the job queue semaphore
+                runningMapJobs.countDown();
+            }
+        }
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/Reducer.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/Reducer.java
new file mode 100644
index 0000000..cb3263c
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/nanoScheduler/Reducer.java
@@ -0,0 +1,169 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.nanoScheduler;
+
+import com.google.java.contract.Ensures;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.MultiThreadedErrorTracker;
+
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
+
+/**
+ * Reducer supporting multi-threaded reduce of the map/reduce.
+ *
+ * reduceAsMuchAsPossible is the key function.  Multiple threads can call into this, providing
+ * the map results queue, and this class accumulates the result of calling reduce
+ * on the maps objects.  reduceAsMuchAsPossible isn't directly synchronized, but manages multi-threading
+ * directly with a lock.  Threads can request either to block on the reduce call until it can be
+ * executed, or immediately exit if the lock isn't available.  That allows multi-threaded users
+ * to avoid piling up waiting to reduce while one thread is reducing.  They can instead immediately
+ * leave to go do something else productive
+ *
+ * @author depristo
+ * @since 2012
+ */
+class Reducer<MapType, ReduceType> {
+    private final static Logger logger = Logger.getLogger(Reducer.class);
+
+    /**
+     * The reduce function to execute
+     */
+    private final NSReduceFunction<MapType, ReduceType> reduce;
+
+    /**
+     * Used to communicate errors to the outer master thread
+     */
+    private final MultiThreadedErrorTracker errorTracker;
+
+    /**
+     * Lock used to protect the call reduceAsMuchAsPossible from race conditions
+     */
+    private final Lock reduceLock = new ReentrantLock();
+
+    /**
+     * The sum of the reduce function applied to all MapResults.  After this Reducer
+     * is done sum contains the final reduce result.
+     */
+    ReduceType sum;
+
+    /**
+     * Create a new Reducer that will apply the reduce function with initialSum value
+     * to values via reduceAsMuchAsPossible, timing the reduce function call costs with
+     * reduceTimer
+     *
+     * @param reduce the reduce function to apply
+     * @param initialSum the initial reduce sum
+     */
+    public Reducer(final NSReduceFunction<MapType, ReduceType> reduce,
+                   final MultiThreadedErrorTracker errorTracker,
+                   final ReduceType initialSum) {
+        if ( errorTracker == null ) throw new IllegalArgumentException("Error tracker cannot be null");
+        if ( reduce == null ) throw new IllegalArgumentException("Reduce function cannot be null");
+
+        this.errorTracker = errorTracker;
+        this.reduce = reduce;
+        this.sum = initialSum;
+    }
+
+    /**
+     * Reduce as much data as possible in mapResultQueue, returning the number of reduce calls completed
+     *
+     * As much as possible is defined as all of the MapResults in the queue are in order starting from the
+     * numSubmittedJobs we reduced previously, up to the either the queue being empty or where the next MapResult
+     * doesn't have JobID == prevJobID + 1.
+     *
+     * @param mapResultQueue a queue of MapResults in jobID order
+     * @return the number of reduces run, from 0 >
+     * @throws InterruptedException
+     */
+    @Ensures("result >= 0")
+    public int reduceAsMuchAsPossible(final MapResultsQueue<MapType> mapResultQueue, final boolean waitForLock) {
+        if ( mapResultQueue == null ) throw new IllegalArgumentException("mapResultQueue cannot be null");
+        int nReducesNow = 0;
+
+        final boolean haveLock = acquireReduceLock(waitForLock);
+        try {
+            if ( haveLock ) {
+                while ( mapResultQueue.nextValueIsAvailable() ) {
+                    final MapResult<MapType> result = mapResultQueue.take();
+
+                    if ( ! result.isEOFMarker() ) {
+                        nReducesNow++;
+
+                        // apply reduce, keeping track of sum
+                        sum = reduce.apply(result.getValue(), sum);
+                    }
+                }
+            }
+        } catch (Exception ex) {
+            errorTracker.notifyOfError(ex);
+        } finally {
+            if ( haveLock ) // if we acquired the lock, unlock it
+                releaseReduceLock();
+        }
+
+        return nReducesNow;
+    }
+
+    /**
+     * Acquire the reduce lock, either returning immediately if not possible or blocking until the lock is available
+     *
+     * @param blockUntilAvailable if true, we will block until the lock is available, otherwise we return immediately
+     *                            without acquiring the lock
+     * @return true if the lock has been acquired, false otherwise
+     */
+    protected boolean acquireReduceLock(final boolean blockUntilAvailable) {
+        if ( blockUntilAvailable ) {
+            reduceLock.lock();
+            return true;
+        } else {
+            return reduceLock.tryLock();
+        }
+    }
+
+    /**
+     * Free the reduce lock.
+     *
+     * Assumes that the invoking thread actually previously acquired the lock (it's a problem if not).
+     */
+    protected void releaseReduceLock() {
+        reduceLock.unlock();
+    }
+
+    /**
+     * Get the current reduce result resulting from applying reduce(...) to all MapResult elements.
+     *
+     * Note that this method cannot know if future reduce calls are coming in.  So it simply gets
+     * the current reduce result.  It is up to the caller to know whether the returned value is
+     * a partial result, or the full final value
+     *
+     * @return the total reduce result across all jobs
+     */
+    public ReduceType getReduceResult() {
+        return sum;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/package-info.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/package-info.java
index 8a42dff..25907eb 100644
--- a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/package-info.java
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/package-info.java
@@ -1,5 +1,5 @@
 /*
-* Copyright (c) 2012 The Broad Institute
+* Copyright 2012-2015 Broad Institute, Inc.
 * 
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pairhmm/BatchPairHMM.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pairhmm/BatchPairHMM.java
new file mode 100644
index 0000000..52b51c0
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pairhmm/BatchPairHMM.java
@@ -0,0 +1,41 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.pairhmm;
+
+import org.broadinstitute.gatk.utils.haplotype.Haplotype;
+
+import java.util.List;
+
+public interface BatchPairHMM {
+    public void batchAdd(final List<Haplotype> haplotypes,
+                         final byte[] readBases,
+                         final byte[] readQuals,
+                         final byte[] insertionGOP,
+                         final byte[] deletionGOP,
+                         final byte[] overallGCP);
+
+    public double[] batchGetResult();
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pairhmm/Log10PairHMM.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pairhmm/Log10PairHMM.java
new file mode 100644
index 0000000..4ee17c8
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pairhmm/Log10PairHMM.java
@@ -0,0 +1,220 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.pairhmm;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import org.broadinstitute.gatk.utils.MathUtils;
+import org.broadinstitute.gatk.utils.QualityUtils;
+
+import java.util.Arrays;
+
+import static java.lang.Math.log10;
+import static org.broadinstitute.gatk.utils.pairhmm.PairHMMModel.*;
+
+/**
+ * Util class for performing the pair HMM for local alignment. Figure 4.3 in Durbin 1998 book.
+ *
+ * User: rpoplin, carneiro
+ * Date: 3/1/12
+ */
+public class Log10PairHMM extends N2MemoryPairHMM {
+    /**
+     * Should we use exact log10 calculation (true), or an approximation (false)?
+     */
+    private final boolean doExactLog10;
+
+
+    // we divide e by 3 because the observed base could have come from any of the non-observed alleles
+    protected final static double log10_3 = log10(3.0);
+
+    /**
+     * Create an uninitialized PairHMM
+     *
+     * @param doExactLog10 should the log10 calculations be exact (slow) or approximate (faster)
+     */
+    public Log10PairHMM(final boolean doExactLog10) {
+        this.doExactLog10 = doExactLog10;
+    }
+
+    /**
+     * Is this HMM using exact log10 calculations?
+     * @return true if exact, false if approximate
+     */
+    public boolean isDoingExactLog10Calculations() {
+        return doExactLog10;
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    public void initialize(final int readMaxLength, final int haplotypeMaxLength ) {
+        super.initialize(readMaxLength, haplotypeMaxLength);
+
+        for( int iii=0; iii < paddedMaxReadLength; iii++ ) {
+            Arrays.fill(matchMatrix[iii], Double.NEGATIVE_INFINITY);
+            Arrays.fill(insertionMatrix[iii], Double.NEGATIVE_INFINITY);
+            Arrays.fill(deletionMatrix[iii], Double.NEGATIVE_INFINITY);
+        }
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    public double subComputeReadLikelihoodGivenHaplotypeLog10( final byte[] haplotypeBases,
+                                                               final byte[] readBases,
+                                                               final byte[] readQuals,
+                                                               final byte[] insertionGOP,
+                                                               final byte[] deletionGOP,
+                                                               final byte[] overallGCP,
+                                                               final int hapStartIndex,
+                                                               final boolean recacheReadValues,
+                                                               final int nextHapStartIndex) {
+
+
+        if ( ! constantsAreInitialized || recacheReadValues )
+            initializeProbabilities(insertionGOP, deletionGOP, overallGCP);
+        initializePriors(haplotypeBases, readBases, readQuals, hapStartIndex);
+        if (previousHaplotypeBases == null || previousHaplotypeBases.length != haplotypeBases.length) {
+            // set the initial value (free deletions in the beginning) for the first row in the deletion matrix
+            initializeMatrixValues(haplotypeBases);
+        }
+
+        for (int i = 1; i < paddedReadLength; i++) {
+            // +1 here is because hapStartIndex is 0-based, but our matrices are 1 based
+            for (int j = hapStartIndex+1; j < paddedHaplotypeLength; j++) {
+                updateCell(i, j, prior[i][j], transition[i]);
+            }
+        }
+
+        // final probability is the log10 sum of the last element in the Match and Insertion state arrays
+        // this way we ignore all paths that ended in deletions! (huge)
+        // but we have to sum all the paths ending in the M and I matrices, because they're no longer extended.
+        return finalLikelihoodCalculation();
+    }
+
+    protected void initializeMatrixValues(final byte[] haplotypeBases) {
+        final double initialValue = Math.log10(1.0 / haplotypeBases.length);
+        for( int j = 0; j < paddedHaplotypeLength; j++ ) {
+            deletionMatrix[0][j] = initialValue;
+        }
+    }
+
+    protected double finalLikelihoodCalculation() {
+        final int endI = paddedReadLength - 1;
+        double finalSumProbabilities = myLog10SumLog10(new double[]{matchMatrix[endI][1], insertionMatrix[endI][1]});
+        for (int j = 2; j < paddedHaplotypeLength; j++)
+            finalSumProbabilities = myLog10SumLog10(new double[]{finalSumProbabilities, matchMatrix[endI][j], insertionMatrix[endI][j]});
+        return finalSumProbabilities;
+    }
+
+
+    /**
+     * Initializes the matrix that holds all the constants related to the editing
+     * distance between the read and the haplotype.
+     *
+     * @param haplotypeBases the bases of the haplotype
+     * @param readBases      the bases of the read
+     * @param readQuals      the base quality scores of the read
+     * @param startIndex     where to start updating the distanceMatrix (in case this read is similar to the previous read)
+     */
+    public void initializePriors(final byte[] haplotypeBases, final byte[] readBases, final byte[] readQuals, final int startIndex) {
+
+        // initialize the pBaseReadLog10 matrix for all combinations of read x haplotype bases
+        // Abusing the fact that java initializes arrays with 0.0, so no need to fill in rows and columns below 2.
+
+        for (int i = 0; i < readBases.length; i++) {
+            final byte x = readBases[i];
+            final byte qual = readQuals[i];
+            for (int j = startIndex; j < haplotypeBases.length; j++) {
+                final byte y = haplotypeBases[j];
+                prior[i+1][j+1] = ( x == y || x == (byte) 'N' || y == (byte) 'N' ?
+                        QualityUtils.qualToProbLog10(qual) : (QualityUtils.qualToErrorProbLog10(qual) - (doNotUseTristateCorrection ? 0.0 : log10_3)) );
+            }
+        }
+    }
+
+    /**
+     * Initializes the matrix that holds all the constants related to quality scores.
+     *
+     * @param insertionGOP   insertion quality scores of the read
+     * @param deletionGOP    deletion quality scores of the read
+     * @param overallGCP     overall gap continuation penalty
+     */
+    @Requires({
+            "insertionGOP != null",
+            "deletionGOP != null",
+            "overallGCP != null"
+    })
+    @Ensures("constantsAreInitialized")
+    protected void initializeProbabilities(final byte[] insertionGOP, final byte[] deletionGOP, final byte[] overallGCP) {
+        PairHMMModel.qualToTransProbsLog10(transition,insertionGOP,deletionGOP,overallGCP);
+        // note that we initialized the constants
+        constantsAreInitialized = true;
+    }
+
+
+    /**
+     * Compute the log10SumLog10 of the values
+     *
+     * NOTE NOTE NOTE
+     *
+     * Log10PairHMM depends critically on this function tolerating values that are all -Infinity
+     * and the sum returning -Infinity.  Note good.  Needs to be fixed.
+     *
+     * NOTE NOTE NOTE
+     *
+     * @param values an array of log10 probabilities that need to be summed
+     * @return the log10 of the sum of the probabilities
+     */
+    @Requires("values != null")
+    protected double myLog10SumLog10(final double[] values) {
+        return doExactLog10 ? MathUtils.log10sumLog10(values) : MathUtils.approximateLog10SumLog10(values);
+    }
+
+    /**
+     * Updates a cell in the HMM matrix
+     *
+     * The read and haplotype indices are offset by one because the state arrays have an extra column to hold the
+     * initial conditions
+
+     * @param indI             row index in the matrices to update
+     * @param indJ             column index in the matrices to update
+     * @param prior            the likelihood editing distance matrix for the read x haplotype
+     * @param transition        an array with the six transition relevant to this location
+     */
+    protected void updateCell( final int indI, final int indJ, final double prior, final double[] transition) {
+
+        matchMatrix[indI][indJ] = prior +
+                myLog10SumLog10(new double[]{matchMatrix[indI - 1][indJ - 1] + transition[matchToMatch],
+                                         insertionMatrix[indI - 1][indJ - 1] + transition[indelToMatch],
+                                          deletionMatrix[indI - 1][indJ - 1] + transition[indelToMatch]});
+        insertionMatrix[indI][indJ] = myLog10SumLog10(new double[] {matchMatrix[indI - 1][indJ] + transition[matchToInsertion], insertionMatrix[indI - 1][indJ] + transition[insertionToInsertion]});
+        deletionMatrix[indI][indJ]  = myLog10SumLog10(new double[] {matchMatrix[indI][indJ - 1] + transition[matchToDeletion],  deletionMatrix[indI][indJ - 1] + transition[deletionToDeletion]});
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pairhmm/N2MemoryPairHMM.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pairhmm/N2MemoryPairHMM.java
new file mode 100644
index 0000000..3871711
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pairhmm/N2MemoryPairHMM.java
@@ -0,0 +1,98 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.pairhmm;
+
+import com.google.java.contract.Requires;
+
+/**
+ * Superclass for PairHMM that want to use a full read x haplotype matrix for their match, insertion, and deletion matrix
+ *
+ * User: rpoplin
+ * Date: 10/16/12
+ */
+abstract class N2MemoryPairHMM extends PairHMM {
+    protected double[][] transition = null; // The transition probabilities cache
+    protected double[][] prior = null;      // The prior probabilities cache
+    protected double[][] matchMatrix = null;
+    protected double[][] insertionMatrix = null;
+    protected double[][] deletionMatrix = null;
+
+    // only used for debugging purposes
+    protected boolean doNotUseTristateCorrection = false;
+
+    public void doNotUseTristateCorrection() {
+        doNotUseTristateCorrection = true;
+    }
+
+    /**
+     * Initialize this PairHMM, making it suitable to run against a read and haplotype with given lengths
+     *
+     * Note: Do not worry about padding, just provide the true max length of the read and haplotype. The HMM will take care of the padding.
+     *
+     * @param haplotypeMaxLength the max length of haplotypes we want to use with this PairHMM
+     * @param readMaxLength the max length of reads we want to use with this PairHMM
+     */
+    @Override
+    public void initialize( final int readMaxLength, final int haplotypeMaxLength ) {
+        super.initialize(readMaxLength, haplotypeMaxLength);
+
+        matchMatrix = new double[paddedMaxReadLength][paddedMaxHaplotypeLength];
+        insertionMatrix = new double[paddedMaxReadLength][paddedMaxHaplotypeLength];
+        deletionMatrix = new double[paddedMaxReadLength][paddedMaxHaplotypeLength];
+
+        transition = PairHMMModel.createTransitionMatrix(maxReadLength);
+        prior = new double[paddedMaxReadLength][paddedMaxHaplotypeLength];
+    }
+
+    /**
+     * Print out the core hmm matrices for debugging
+     */
+    protected void dumpMatrices() {
+        dumpMatrix("matchMetricArray", matchMatrix);
+        dumpMatrix("insertionMatrix", insertionMatrix);
+        dumpMatrix("deletionMatrix", deletionMatrix);
+    }
+
+    /**
+     * Print out in a human readable form the matrix for debugging
+     * @param name the name of this matrix
+     * @param matrix the matrix of values
+     */
+    @Requires({"name != null", "matrix != null"})
+    private void dumpMatrix(final String name, final double[][] matrix) {
+        System.out.printf("%s%n", name);
+        for ( int i = 0; i < matrix.length; i++) {
+            System.out.printf("\t%s[%d]", name, i);
+            for ( int j = 0; j < matrix[i].length; j++ ) {
+                if ( Double.isInfinite(matrix[i][j]) )
+                    System.out.printf(" %15s", String.format("%f", matrix[i][j]));
+                else
+                    System.out.printf(" % 15.5e", matrix[i][j]);
+            }
+            System.out.println();
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pairhmm/PairHMM.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pairhmm/PairHMM.java
new file mode 100644
index 0000000..9f57779
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pairhmm/PairHMM.java
@@ -0,0 +1,390 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.pairhmm;
+
+import com.google.java.contract.Requires;
+import htsjdk.variant.variantcontext.Allele;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.MathUtils;
+import org.broadinstitute.gatk.utils.genotyper.ReadLikelihoods;
+import org.broadinstitute.gatk.utils.haplotype.Haplotype;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+/**
+ * Util class for performing the pair HMM for local alignment. Figure 4.3 in Durbin 1998 book.
+ *
+ * User: rpoplin
+ * Date: 10/16/12
+ */
+public abstract class PairHMM {
+    protected final static Logger logger = Logger.getLogger(PairHMM.class);
+
+    protected boolean constantsAreInitialized = false;
+
+    protected byte[] previousHaplotypeBases;
+    protected int hapStartIndex;
+
+    public static final byte BASE_QUALITY_SCORE_THRESHOLD = (byte) 18; // Base quals less than this value are squashed down to min possible qual
+
+    public enum HMM_IMPLEMENTATION {
+        /* Very slow implementation which uses very accurate log10 sum functions. Only meant to be used as a reference test implementation */
+        EXACT,
+        /* PairHMM as implemented for the UnifiedGenotyper. Uses log10 sum functions accurate to only 1E-4 */
+        ORIGINAL,
+        /* Optimized version of the PairHMM which caches per-read computations and operations in real space to avoid costly sums of log10'ed likelihoods */
+        LOGLESS_CACHING,
+        /* Optimized AVX implementation of LOGLESS_CACHING called through JNI */
+        VECTOR_LOGLESS_CACHING,
+        /* Debugging for vector implementation of LOGLESS_CACHING */
+        DEBUG_VECTOR_LOGLESS_CACHING,
+        /* Logless caching PairHMM that stores computations in 1D arrays instead of matrices, and which proceeds diagonally over the (read x haplotype) intersection matrix */
+        ARRAY_LOGLESS
+    }
+
+    /* Instruction sets for computing VectorLoglessHMM */
+    public enum HMM_SUB_IMPLEMENTATION {
+        /* standard un-vectorized instructions */
+        UNVECTORIZED(0x0L, false),
+        /* Streaming SIMD Extensions (SSE), version 4.1 */
+        SSE41(0x1L, true),
+        /* Streaming SIMD Extensions (SSE), version 4.2 */
+        SSE42(0x2L, true),
+        /* Advanced Vector Extensions (AVX) */
+        AVX(0x4L, true),
+        /* For testing only, set bit beyond hardware capabilities */
+        TEST_BEYOND_CAPABILITIES(0x400L, true),
+        /* Enable all implementations */
+        ENABLE_ALL(0xFFFFFFFFFFFFFFFFl, false);
+
+        /* Masks for machine capabilities */
+        private final long mask;
+        /* Is a specific hardware instruction set requested? */
+        private final boolean isSpecificHardwareRequest;
+        HMM_SUB_IMPLEMENTATION(long mask, boolean isSpecificHardwareRequest) {
+            this.mask = mask;
+            this.isSpecificHardwareRequest = isSpecificHardwareRequest;
+        }
+        long getMask() { return mask; }
+        boolean getIsSpecificHardwareRequest() { return isSpecificHardwareRequest; }
+    }
+
+    protected int maxHaplotypeLength, maxReadLength;
+    protected int paddedMaxReadLength, paddedMaxHaplotypeLength;
+    protected int paddedReadLength, paddedHaplotypeLength;
+    protected boolean initialized = false;
+
+    // only used for debugging purposes
+    protected boolean doNotUseTristateCorrection = false;
+    protected void doNotUseTristateCorrection() { doNotUseTristateCorrection = true; }
+
+    //debug array
+    protected double[] mLikelihoodArray;
+
+    //profiling information
+    protected static Boolean doProfiling = true;
+    protected static long pairHMMComputeTime = 0;
+    protected long threadLocalPairHMMComputeTimeDiff = 0;
+    protected long startTime = 0;
+
+    /**
+     * Initialize this PairHMM, making it suitable to run against a read and haplotype with given lengths
+     *
+     * Note: Do not worry about padding, just provide the true max length of the read and haplotype. The HMM will take care of the padding.
+     *
+     * @param haplotypeMaxLength the max length of haplotypes we want to use with this PairHMM
+     * @param readMaxLength the max length of reads we want to use with this PairHMM
+     * @throws IllegalArgumentException if readMaxLength or haplotypeMaxLength is less than or equal to zero
+     */
+    public void initialize( final int readMaxLength, final int haplotypeMaxLength ) throws IllegalArgumentException {
+        if ( readMaxLength <= 0 ) throw new IllegalArgumentException("READ_MAX_LENGTH must be > 0 but got " + readMaxLength);
+        if ( haplotypeMaxLength <= 0 ) throw new IllegalArgumentException("HAPLOTYPE_MAX_LENGTH must be > 0 but got " + haplotypeMaxLength);
+
+        maxHaplotypeLength = haplotypeMaxLength;
+        maxReadLength = readMaxLength;
+
+        // M, X, and Y arrays are of size read and haplotype + 1 because of an extra column for initial conditions and + 1 to consider the final base in a non-global alignment
+        paddedMaxReadLength = readMaxLength + 1;
+        paddedMaxHaplotypeLength = haplotypeMaxLength + 1;
+
+        previousHaplotypeBases = null;
+
+        constantsAreInitialized = false;
+        initialized = true;
+    }
+
+    /**
+     * Called at the end of PairHMM for a region - mostly used by the JNI implementations
+     */
+    public void finalizeRegion()
+    {
+        ;
+    }
+
+    /**
+     * Initialize this PairHMM, making it suitable to run against a read and haplotype with given lengths
+     * This function is used by the JNI implementations to transfer all data once to the native code
+     * @param haplotypes the list of haplotypes
+     * @param perSampleReadList map from sample name to list of reads
+     * @param haplotypeMaxLength the max length of haplotypes we want to use with this PairHMM
+     * @param readMaxLength the max length of reads we want to use with this PairHMM
+     */
+    public void initialize( final List<Haplotype> haplotypes, final Map<String, List<GATKSAMRecord>> perSampleReadList, final int readMaxLength, final int haplotypeMaxLength ) {
+        initialize(readMaxLength, haplotypeMaxLength);
+    }
+
+    private int findMaxReadLength(final GATKSAMRecord ... reads) {
+        int max = 0;
+        for (final GATKSAMRecord read : reads) {
+            final int readLength = read.getReadLength();
+            if (max < readLength)
+                max = readLength;
+        }
+        return max;
+    }
+
+    private int findMaxAlleleLength(final List<? extends Allele> alleles) {
+        int max = 0;
+        for (final Allele allele : alleles) {
+            final int alleleLength = allele.length();
+            if (max < alleleLength)
+                max = alleleLength;
+        }
+        return max;
+    }
+
+    protected int findMaxReadLength(final List<GATKSAMRecord> reads) {
+        int listMaxReadLength = 0;
+        for(GATKSAMRecord read : reads){
+            final int readLength = read.getReadLength();
+            if( readLength > listMaxReadLength ) { listMaxReadLength = readLength; }
+        }
+        return listMaxReadLength;
+    }
+
+    protected int findMaxHaplotypeLength(final Collection<Haplotype> haplotypes) {
+        int listMaxHaplotypeLength = 0;
+        for( final Haplotype h : haplotypes) {
+            final int haplotypeLength = h.getBases().length;
+            if( haplotypeLength > listMaxHaplotypeLength ) { listMaxHaplotypeLength = haplotypeLength; }
+        }
+        return listMaxHaplotypeLength;
+    }
+
+    /**
+     *  Given a list of reads and haplotypes, for every read compute the total probability of said read arising from
+     *  each haplotype given base substitution, insertion, and deletion probabilities.
+     *
+     * @param processedReads reads to analyze instead of the ones present in the destination read-likelihoods.
+     * @param likelihoods where to store the likelihoods where position [a][r] is reserved for the likelihood of {@code reads[r]}
+     *             conditional to {@code alleles[a]}.
+     * @param gcp penalty for gap continuations base array map for processed reads.
+     *
+     * @return never {@code null}.
+     */
+    public void computeLikelihoods(final ReadLikelihoods.Matrix<Haplotype> likelihoods,
+                                   final List<GATKSAMRecord> processedReads,
+                                   final Map<GATKSAMRecord,byte[]> gcp) {
+        if (processedReads.isEmpty())
+            return;
+        if(doProfiling)
+            startTime = System.nanoTime();
+        // (re)initialize the pairHMM only if necessary
+        final int readMaxLength = findMaxReadLength(processedReads);
+        final int haplotypeMaxLength = findMaxAlleleLength(likelihoods.alleles());
+        if (!initialized || readMaxLength > maxReadLength || haplotypeMaxLength > maxHaplotypeLength)
+            initialize(readMaxLength, haplotypeMaxLength);
+
+        final int readCount = processedReads.size();
+        final List<Haplotype> alleles = likelihoods.alleles();
+        final int alleleCount = alleles.size();
+        mLikelihoodArray = new double[readCount * alleleCount];
+        int idx = 0;
+        int readIndex = 0;
+        for(final GATKSAMRecord read : processedReads){
+            final byte[] readBases = read.getReadBases();
+            final byte[] readQuals = read.getBaseQualities();
+            final byte[] readInsQuals = read.getBaseInsertionQualities();
+            final byte[] readDelQuals = read.getBaseDeletionQualities();
+            final byte[] overallGCP = gcp.get(read);
+
+            // peak at the next haplotype in the list (necessary to get nextHaplotypeBases, which is required for caching in the array implementation)
+            final boolean isFirstHaplotype = true;
+            for (int a = 0; a < alleleCount; a++) {
+                final Allele allele = alleles.get(a);
+                final byte[] alleleBases = allele.getBases();
+                final byte[] nextAlleleBases = a == alleles.size() - 1 ? null : alleles.get(a + 1).getBases();
+                final double lk = computeReadLikelihoodGivenHaplotypeLog10(alleleBases,
+                        readBases, readQuals, readInsQuals, readDelQuals, overallGCP, isFirstHaplotype, nextAlleleBases);
+                likelihoods.set(a, readIndex, lk);
+                mLikelihoodArray[idx++] = lk;
+            }
+            readIndex++;
+        }
+        if(doProfiling) {
+            threadLocalPairHMMComputeTimeDiff = (System.nanoTime() - startTime);
+            //synchronized(doProfiling)
+            {
+                pairHMMComputeTime += threadLocalPairHMMComputeTimeDiff;
+            }
+        }
+    }
+
+    /**
+     * Compute the total probability of read arising from haplotypeBases given base substitution, insertion, and deletion
+     * probabilities.
+     *
+     * Note on using hapStartIndex.  This allows you to compute the exact true likelihood of a full haplotypes
+     * given a read, assuming that the previous calculation read over a full haplotype, recaching the read values,
+     * starting only at the place where the new haplotype bases and the previous haplotype bases different.  This
+     * index is 0-based, and can be computed with findFirstPositionWhereHaplotypesDiffer given the two haplotypes.
+     * Note that this assumes that the read and all associated quals values are the same.
+     *
+     * @param haplotypeBases the full sequence (in standard SAM encoding) of the haplotype, must be >= than read bases in length
+     * @param readBases the bases (in standard encoding) of the read, must be <= haplotype bases in length
+     * @param readQuals the phred-scaled per base substitution quality scores of read.  Must be the same length as readBases
+     * @param insertionGOP the phred-scaled per base insertion quality scores of read.  Must be the same length as readBases
+     * @param deletionGOP the phred-scaled per base deletion quality scores of read.  Must be the same length as readBases
+     * @param overallGCP the phred-scaled gap continuation penalties scores of read.  Must be the same length as readBases
+     * @param recacheReadValues if false, we don't recalculate any cached results, assuming that readBases and its associated
+     *                          parameters are the same, and only the haplotype bases are changing underneath us
+     * @throws IllegalStateException  if did not call initialize() beforehand
+     * @throws IllegalArgumentException haplotypeBases is null or greater than maxHaplotypeLength
+     * @throws IllegalArgumentException readBases is null or greater than maxReadLength
+     * @throws IllegalArgumentException readBases, readQuals, insertionGOP, deletionGOP and overallGCP are not the same size
+     * @return the log10 probability of read coming from the haplotype under the provided error model
+     */
+    protected final double computeReadLikelihoodGivenHaplotypeLog10( final byte[] haplotypeBases,
+                                                                  final byte[] readBases,
+                                                                  final byte[] readQuals,
+                                                                  final byte[] insertionGOP,
+                                                                  final byte[] deletionGOP,
+                                                                  final byte[] overallGCP,
+                                                                  final boolean recacheReadValues,
+                                                                  final byte[] nextHaploytpeBases) throws IllegalStateException, IllegalArgumentException {
+
+        if ( ! initialized ) throw new IllegalStateException("Must call initialize before calling computeReadLikelihoodGivenHaplotypeLog10");
+        if ( haplotypeBases == null ) throw new IllegalArgumentException("haplotypeBases cannot be null");
+        if ( haplotypeBases.length > maxHaplotypeLength ) throw new IllegalArgumentException("Haplotype bases is too long, got " + haplotypeBases.length + " but max is " + maxHaplotypeLength);
+        if ( readBases == null ) throw new IllegalArgumentException("readBases cannot be null");
+        if ( readBases.length > maxReadLength ) throw new IllegalArgumentException("readBases is too long, got " + readBases.length + " but max is " + maxReadLength);
+        if ( readQuals.length != readBases.length ) throw new IllegalArgumentException("Read bases and read quals aren't the same size: " + readBases.length + " vs " + readQuals.length);
+        if ( insertionGOP.length != readBases.length ) throw new IllegalArgumentException("Read bases and read insertion quals aren't the same size: " + readBases.length + " vs " + insertionGOP.length);
+        if ( deletionGOP.length != readBases.length ) throw new IllegalArgumentException("Read bases and read deletion quals aren't the same size: " + readBases.length + " vs " + deletionGOP.length);
+        if ( overallGCP.length != readBases.length ) throw new IllegalArgumentException("Read bases and overall GCP aren't the same size: " + readBases.length + " vs " + overallGCP.length);
+
+        paddedReadLength = readBases.length + 1;
+        paddedHaplotypeLength = haplotypeBases.length + 1;
+
+        hapStartIndex =  (recacheReadValues) ? 0 : hapStartIndex;
+
+        // Pre-compute the difference between the current haplotype and the next one to be run
+        // Looking ahead is necessary for the ArrayLoglessPairHMM implementation
+        final int nextHapStartIndex =  (nextHaploytpeBases == null || haplotypeBases.length != nextHaploytpeBases.length) ? 0 : findFirstPositionWhereHaplotypesDiffer(haplotypeBases, nextHaploytpeBases);
+
+        double result = subComputeReadLikelihoodGivenHaplotypeLog10(haplotypeBases, readBases, readQuals, insertionGOP, deletionGOP, overallGCP, hapStartIndex, recacheReadValues, nextHapStartIndex);
+
+        if ( result > 0.0)
+            throw new IllegalStateException("PairHMM Log Probability cannot be greater than 0: " + String.format("haplotype: %s, read: %s, result: %f, PairHMM: %s", new String(haplotypeBases), new String(readBases), result, this.getClass().getSimpleName()));
+        else if (!MathUtils.goodLog10Probability(result))
+            throw new IllegalStateException("Invalid Log Probability: " + result);
+
+        // Warning: Careful if using the PairHMM in parallel! (this update has to be taken care of).
+        // Warning: This assumes no downstream modification of the haplotype bases (saves us from copying the array). It is okay for the haplotype caller and the Unified Genotyper.
+        previousHaplotypeBases = haplotypeBases;
+
+        // For the next iteration, the hapStartIndex for the next haploytpe becomes the index for the current haplotype
+        // The array implementation has to look ahead to the next haplotype to store caching info. It cannot do this if nextHapStart is before hapStart
+        hapStartIndex = (nextHapStartIndex < hapStartIndex) ? 0: nextHapStartIndex;
+
+        return result;
+    }
+
+    /**
+     * To be overloaded by subclasses to actually do calculation for #computeReadLikelihoodGivenHaplotypeLog10
+     */
+    @Requires({"readBases.length == readQuals.length", "readBases.length == insertionGOP.length", "readBases.length == deletionGOP.length",
+            "readBases.length == overallGCP.length", "matchMatrix!=null", "insertionMatrix!=null", "deletionMatrix!=null"})
+    protected abstract double subComputeReadLikelihoodGivenHaplotypeLog10( final byte[] haplotypeBases,
+                                                                           final byte[] readBases,
+                                                                           final byte[] readQuals,
+                                                                           final byte[] insertionGOP,
+                                                                           final byte[] deletionGOP,
+                                                                           final byte[] overallGCP,
+                                                                           final int hapStartIndex,
+                                                                           final boolean recacheReadValues,
+                                                                           final int nextHapStartIndex);
+
+    /**
+     * Compute the first position at which two haplotypes differ
+     *
+     * If the haplotypes are exact copies of each other, returns the min length of the two haplotypes.
+     *
+     * @param haplotype1 the first haplotype1
+     * @param haplotype2 the second haplotype1
+     * @throws IllegalArgumentException if haplotype1 or haplotype2 are null or zero length
+     * @return the index of the first position in haplotype1 and haplotype2 where the byte isn't the same
+     */
+    public static int findFirstPositionWhereHaplotypesDiffer(final byte[] haplotype1, final byte[] haplotype2) throws IllegalArgumentException {
+        if ( haplotype1 == null || haplotype1.length == 0 ) throw new IllegalArgumentException("Haplotype1 is bad " + Arrays.toString(haplotype1));
+        if ( haplotype2 == null || haplotype2.length == 0 ) throw new IllegalArgumentException("Haplotype2 is bad " + Arrays.toString(haplotype2));
+
+        for( int iii = 0; iii < haplotype1.length && iii < haplotype2.length; iii++ ) {
+            if( haplotype1[iii] != haplotype2[iii] ) {
+                return iii;
+            }
+        }
+
+        return Math.min(haplotype1.length, haplotype2.length);
+    }
+
+    /**
+     * Use number of threads to set doProfiling flag - doProfiling iff numThreads == 1
+     * This function should be called only during initialization phase - single thread phase of HC
+     */
+    public static void setNumberOfThreads(final int numThreads)
+    {
+        doProfiling = (numThreads == 1);
+        if(numThreads > 1)
+            logger.info("Performance profiling for PairHMM is disabled because HaplotypeCaller is being run with multiple threads (-nct>1) option\nProfiling is enabled only when running in single thread mode\n");
+    }
+
+    /**
+     * Return the results of the computeLikelihoods function
+     */
+    public double[] getLikelihoodArray() { return mLikelihoodArray; }
+    /**
+     * Called at the end of the program to close files, print profiling information etc 
+     */
+    public void close()
+    {
+        if(doProfiling)
+            logger.info("Total compute time in PairHMM computeLikelihoods() : "+(pairHMMComputeTime*1e-9));
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pairhmm/PairHMMModel.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pairhmm/PairHMMModel.java
new file mode 100644
index 0000000..6644c20
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pairhmm/PairHMMModel.java
@@ -0,0 +1,435 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.pairhmm;
+
+import org.broadinstitute.gatk.utils.MathUtils;
+import org.broadinstitute.gatk.utils.QualityUtils;
+
+/**
+ * Helper class that implement calculations required to implement the PairHMM Finite State Automation (FSA) model.
+ *
+ * @author Valentin Ruano-Rubio <valentin at broadinstitute.org>
+ */
+public class PairHMMModel {
+
+
+    /**
+     * Prevents instantiation of this class
+     */
+    private PairHMMModel() {
+
+    }
+
+    /**
+     * Length of the standard transition probability array.
+     */
+    public static final int TRANS_PROB_ARRAY_LENGTH = 6;
+
+    /**
+     * Position in the transition probability array for the Match-to-Match transition.
+     */
+    public static final int matchToMatch = 0;
+
+    /**
+     * Position in the transition probability array for the Indel-to-Match transition.
+     */
+    public static final int indelToMatch = 1;
+
+    /**
+     * Position in the transition probability array for the Match-to-Insertion transition.
+     */
+    public static final int matchToInsertion = 2;
+
+    /**
+     * Position in the transition probability array for the Insertion-to-Insertion transition.
+     */
+    public static final int insertionToInsertion = 3;
+
+    /**
+     * Position in the transition probability array for the Match-to-Deletion transition.
+     */
+    public static final int matchToDeletion = 4;
+
+    /**
+     * Position in the transition probability array for the Deletion-to-Deletion transition.
+     */
+    public static final int deletionToDeletion = 5;
+
+    /**
+     * Convenient ln10 constant.
+     */
+    private static double LN10 = Math.log(10);
+
+    /**
+     * Convenient (ln10)^-1 constant.
+     */
+    private static double INV_LN10 = 1.0 / LN10;
+
+    /**
+     * Holds pre-calculated the matchToMath probability values in linear scale.
+     *
+     * <p/>
+     * This is a triangular matrix stored in a unidimentional array like so:
+     * <p/>
+     * (0,0), (0,1), (1,1), (0,2), (1,2), (2,2), (0,3) ... ({@link QualityUtils#MAX_QUAL},{@link QualityUtils#MAX_QUAL})
+     */
+    private static double[] matchToMatchProb = new double[((QualityUtils.MAX_QUAL + 1) * (QualityUtils.MAX_QUAL + 2)) >> 1];
+
+    /**
+     * Holds pre-calculated the matchToMath probability values in log10 scale.
+     *
+     * <p/>
+     * This is a triangular matrix stored in a unidimentional array like so:
+     * <p/>
+     * (0,0), (0,1), (1,1), (0,2), (1,2), (2,2), (0,3) ... ({@link QualityUtils#MAX_QUAL},{@link QualityUtils#MAX_QUAL})
+     */
+    private static double[] matchToMatchLog10 = new double[((QualityUtils.MAX_QUAL + 1) * (QualityUtils.MAX_QUAL + 2)) >> 1];
+
+    /**
+     * Initialize matchToMatch cache tables {@link #matchToMatch} and {@link #matchToMatchLog10}
+     */
+    static {
+        for (int i = 0, offset = 0; i <= QualityUtils.MAX_QUAL; offset += ++i)
+            for (int j = 0; j <= i; j++) {
+                final double log10Sum = MathUtils.approximateLog10SumLog10(-0.1 * i,-0.1 * j);
+                matchToMatchLog10[offset + j] =
+                        Math.log1p( - Math.min(1,Math.pow(10,log10Sum))) * INV_LN10;
+                matchToMatchProb[offset + j] = Math.pow(10,matchToMatchLog10[offset + j]);
+            }
+    }
+
+    /**
+     * Fills a transition probability array given the different quality scores affecting a read site
+     *
+     * @param insQual the insertion quality score as a byte.
+     * @param delQual the deletion quality score as a byte.
+     * @param gcp the gap-continuation-penalty score as a byte.
+     *
+     * @throws NullPointerException if {@code dest} is {@code null}.
+     * @throws ArrayIndexOutOfBoundsException if {@code dest} is not large enough.
+     * @throws IllegalArgumentException if {@code insQual}, {@code delQual} or {@code gcp} is less than negative.
+     */
+    public static void qualToTransProbs(final double[] dest, final byte insQual, final byte delQual, final byte gcp) {
+        if (insQual < 0) throw new IllegalArgumentException("insert quality cannot less than 0: " + insQual);
+        if (delQual < 0) throw new IllegalArgumentException("deletion quality cannot be less than 0: " + delQual);
+        if (gcp < 0) throw new IllegalArgumentException("gcp cannot be less than 0: " + gcp);
+        dest[matchToMatch] = matchToMatchProb(insQual, delQual);
+        dest[matchToInsertion] = QualityUtils.qualToErrorProb(insQual);
+        dest[matchToDeletion] = QualityUtils.qualToErrorProb(delQual);
+        dest[indelToMatch] = QualityUtils.qualToProb(gcp);
+        dest[insertionToInsertion] = dest[deletionToDeletion] = QualityUtils.qualToErrorProb(gcp);
+    }
+
+    /**
+     * Returns a transition probability array given the different quality scores affecting a read site.
+     *
+     * @param insQual the insertion quality score as a byte.
+     * @param delQual the deletion quality score as a byte.
+     * @param gcp the gap-continuation-penalty score as a byte.
+     *
+     * @throws NullPointerException if {@code dest} is {@code null}.
+     * @throws ArrayIndexOutOfBoundsException if {@code dest} is not large enough.
+     * @throws IllegalArgumentException if {@code insQual}, {@code delQual} or {@code gcp} is less than negative.
+     *
+     * @return never {@code null}. An array of length {@link #TRANS_PROB_ARRAY_LENGTH}.
+     */
+    @SuppressWarnings("unused")
+    public static double[] qualToTransProbs(final byte insQual, final byte delQual, final byte gcp) {
+        final double[] dest = new double[TRANS_PROB_ARRAY_LENGTH];
+        qualToTransProbs(dest,insQual,delQual,gcp);
+        return dest;
+    }
+
+    /**
+     * Fills ax matrix with the transition probabilities for a number of bases.
+     *
+     * <p/>
+     * The first dimension of the matrix correspond to the different bases where the first one is stored in position 1.
+     * Thus the position 0 is left empty and the length of the resulting matrix is actually {@code insQual.length + 1}.
+     * <p/>
+     * Each entry is the transition probability array for that base with a length of {@link #TRANS_PROB_ARRAY_LENGTH}.
+     *
+     * @param dest the matrix to update
+     * @param insQuals insertion qualities.
+     * @param delQuals deletion qualities.
+     * @param gcps gap-continuation penalty qualities.
+     *
+     * @throws NullPointerException if any of the input arrays, matrices is {@code null} or any entry in {@code dest} is {@code null}.
+     * @throws IllegalArgumentException if {@code IllegalArgumentException}
+     *  if the input array don't have the same length.
+     * @throws ArrayIndexOutOfBoundsException if {@code dest} or any of its elements is not large enough to contain the
+     *  transition  matrix.
+     */
+    @SuppressWarnings("unused")
+    public static void qualToTransProbs(final double[][] dest, final byte[] insQuals, final byte[] delQuals, final byte[] gcps) {
+        final int readLength = insQuals.length;
+        if (delQuals.length != readLength) throw new IllegalArgumentException("deletion quality array length does not match insert quality array length: " + readLength + " != " + delQuals.length);
+        if (gcps.length != readLength) throw new IllegalArgumentException("deletion quality array length does not match insert quality array length: " + readLength + " != " + gcps.length);
+
+        if (dest.length < readLength + 1) throw new IllegalArgumentException("destination length is not enough for the read length: " + dest.length + " < " + readLength + " + 1");
+
+        for (int i = 0; i < readLength; i++)
+            qualToTransProbs(dest[i + 1], insQuals[i], delQuals[i], gcps[i]);
+    }
+
+    /**
+     * Returns a matrix with the transition probabilities for a number of bases.
+     *
+     * <p/>
+     * The first dimension of the matrix correspond to the different bases where the first one is stored in position 1.
+     * Thus the position 0 is left empty and the length of the resulting matrix is actually {@code insQual.length + 1}.
+     * <p/>
+     * Each entry is the transition probability array for that base with a length of {@link #TRANS_PROB_ARRAY_LENGTH}.
+     *
+     * @param insQuals insertion qualities.
+     * @param delQuals deletion qualities.
+     * @param gcps gap-continuation penalty qualities.
+     *
+     * @throws NullPointerException if any of the input arrays is {@code null}.
+     * @throws IllegalArgumentException if {@code IllegalArgumentException}
+     *  if the input array don't have the same length.
+     *
+     * @return never {@code null}, an matrix of the dimensions explained above.
+     */
+    @SuppressWarnings("unused")
+    public static double[][] qualToTransProbs(final byte[] insQuals, final byte[] delQuals, final byte[] gcps) {
+        final double[][] dest = createTransitionMatrix(insQuals.length);
+        qualToTransProbs(dest,insQuals,delQuals,gcps);
+        return dest;
+    }
+
+    /**
+     * Fills a transition log10 probability array given the different quality scores affecting a read site.
+     *
+     * @param insQual the insertion quality score as a byte.
+     * @param delQual the deletion quality score as a byte.
+     * @param gcp the gap-continuation-penalty score as a byte.
+     *
+     * @throws NullPointerException if {@code dest} is {@code null}.
+     * @throws ArrayIndexOutOfBoundsException if {@code dest} is not large enough.
+     * @throws IllegalArgumentException if {@code insQual}, {@code delQual} or {@code gcp} is less than negative.
+     */
+    public static void qualToTransProbsLog10(final double[] dest, final byte insQual, final byte delQual, final byte gcp) {
+        if (insQual < 0) throw new IllegalArgumentException("insert quality cannot less than 0: " + insQual);
+        if (delQual < 0) throw new IllegalArgumentException("deletion quality cannot be less than 0: " + delQual);
+        if (gcp < 0) throw new IllegalArgumentException("gcp cannot be less than 0: " + gcp);
+        dest[matchToMatch] = matchToMatchProbLog10(insQual, delQual);
+        dest[matchToInsertion] = QualityUtils.qualToErrorProbLog10(insQual);
+        dest[matchToDeletion] = QualityUtils.qualToErrorProbLog10(delQual);
+        dest[indelToMatch] = QualityUtils.qualToProbLog10(gcp);
+        dest[insertionToInsertion] = dest[deletionToDeletion] = QualityUtils.qualToErrorProbLog10(gcp);
+    }
+
+    /**
+     * Returns a transition log10 probability array given the different quality scores affecting a read site.
+     *
+     * @param insQual the insertion quality score as a byte.
+     * @param delQual the deletion quality score as a byte.
+     * @param gcp the gap-continuation-penalty score as a byte.
+     *
+     * @throws NullPointerException if {@code dest} is {@code null}.
+     * @throws ArrayIndexOutOfBoundsException if {@code dest} is not large enough.
+     * @throws IllegalArgumentException if {@code insQual}, {@code delQual} or {@code gcp} is less than negative.
+     *
+     * @return never {@code null}. An array of length {@link #TRANS_PROB_ARRAY_LENGTH}.
+     */
+    @SuppressWarnings("unused")
+    public static double[] qualToTransProbsLog10(final byte insQual, final byte delQual, final byte gcp) {
+        final double[] dest = new double[TRANS_PROB_ARRAY_LENGTH];
+        qualToTransProbsLog10(dest,insQual,delQual,gcp);
+        return dest;
+    }
+
+    /**
+     * Fills a matrix with the log10 transition probabilities for a number of bases.
+     *
+     * <p/>
+     * The first dimension of the matrix correspond to the different bases where the first one is stored in position 1.
+     * Thus the position 0 is left empty and the length of the resulting matrix is actually {@code insQual.length + 1}.
+     * <p/>
+     * Each entry is the transition probability array for that base with a length of {@link #TRANS_PROB_ARRAY_LENGTH}.
+     *
+     * @param insQuals insertion qualities.
+     * @param delQuals deletion qualities.
+     * @param gcps gap-continuation penalty qualities.
+     *
+     * @throws NullPointerException if any of the input arrays, matrices is {@code null} or any entry in {@code dest} is {@code null}.
+     * @throws IllegalArgumentException if {@code IllegalArgumentException}
+     *  if the input array don't have the same length.
+     * @throws ArrayIndexOutOfBoundsException if {@code dest} or any of its elements is not large enough to contain the
+     *  transition  matrix.
+     */
+    @SuppressWarnings("unused")
+    public static void qualToTransProbsLog10(final double[][] dest, final byte[] insQuals, final byte[] delQuals, final byte[] gcps) {
+        final int readLength = insQuals.length;
+        if (delQuals.length != readLength) throw new IllegalArgumentException("deletion quality array length does not match insert quality array length: " + readLength + " != " + delQuals.length);
+        if (gcps.length != readLength) throw new IllegalArgumentException("deletion quality array length does not match insert quality array length: " + readLength + " != " + gcps.length);
+        if (dest.length < readLength + 1) throw new IllegalArgumentException("destination length is not enough for the read length: " + dest.length + " < " + readLength + " + 1");
+
+        for (int i = 0; i < readLength; i++)
+            qualToTransProbsLog10(dest[i+1],insQuals[i],delQuals[i],gcps[i]);
+    }
+
+    /**
+     * Returns a matrix with the log10 transition probabilities for a number of bases.
+     *
+     * <p/>
+     * The first dimension of the matrix correspond to the different bases where the first one is stored in position 1.
+     * Thus the position 0 is left empty and the length of the resulting matrix is actually {@code insQual.length + 1}.
+     * <p/>
+     * Each entry is the transition probability array for that base with a length of {@link #TRANS_PROB_ARRAY_LENGTH}.
+     *
+     * @param insQuals insertion qualities.
+     * @param delQuals deletion qualities.
+     * @param gcps gap-continuation penalty qualities.
+     *
+     * @throws NullPointerException if any of the input arrays is {@code null}.
+     * @throws IllegalArgumentException if {@code IllegalArgumentException}
+     *  if the input array don't have the same length.
+     *
+     * @return never {@code null}, an matrix of the dimensions explained above.
+     */
+    @SuppressWarnings("unused")
+    public static double[][] qualToTransProbsLog10(final byte[] insQuals, final byte[] delQuals, final byte[] gcps) {
+        final double[][] dest = createTransitionMatrix(insQuals.length);
+        qualToTransProbsLog10(dest,insQuals,delQuals,gcps);
+        return dest;
+    }
+
+    /**
+     * Creates a transition probability matrix large enough to work with sequences of a particular length.
+     *
+     * @param maxReadLength the maximum read length for the transition matrix.
+     *
+     * @return never {@code null}. A matrix of {@code maxReadLength + 1} by {@link #TRANS_PROB_ARRAY_LENGTH} positions.
+     */
+    public static double[][] createTransitionMatrix(final int maxReadLength) {
+        return new double[maxReadLength + 1][TRANS_PROB_ARRAY_LENGTH];
+    }
+
+    /**
+     * Returns the probability that neither of two event takes place.
+     * <p/>
+     *
+     * We assume that both event never occur together and that delQual is the conditional probability
+     * (qual. encoded) of the second event, given the first event didn't took place. So that the
+     * probability of no event is: <br/>
+     *
+     * We assume that both event never occur together so that the probability of no event is: <br/>
+     *
+     * <code>1 - ProbErr(insQual) - ProbErr(delQual)</code> <br/>
+     *
+     * @param insQual PhRED scaled quality/probability of the first event.
+     * @param delQual PhRED scaled quality/probability of the second event.
+     *
+     * @return a value between 0 and 1.
+     */
+    public static double matchToMatchProb(final byte insQual, final byte delQual) {
+        return matchToMatchProb((insQual & 0xFF), (delQual & 0xFF));
+    }
+
+    /**
+     * Returns the probability (log 10 scaled) that neither of two event, insertion and deletion, takes place.
+     * <p/>
+     *
+     * We assume that both event never occur together so that the probability of no event is: <br/>
+     *
+     * <code>1 - ProbErr(insQual) - ProbErr(delQual)</code> <br/>
+     *
+     * @param insQual PhRED scaled quality/probability of an insertion.
+     * @param delQual PhRED scaled quality/probability of a deletion.
+     *
+     * @return a value between 0 and -Inf.
+     */
+    public static double matchToMatchProbLog10(final byte insQual, final byte delQual) {
+        return matchToMatchProbLog10((insQual & 0xFF), (delQual & 0xFF));
+    }
+
+    /**
+     * Returns the probability that neither of two events, insertion and deletion, takes place.
+     * <p/>
+     *
+     * We assume that both event never occur together and that delQual is the conditional probability
+     * (qual. encoded) of the second event, given the first event didn't took place. So that the
+     * probability of no event is: <br/>
+     *
+     * We assume that both event never occur together so that the probability of no event is: <br/>
+     *
+     * <code>1 - ProbErr(insQual) - ProbErr(delQual)</code> <br/>
+     *
+     * @param insQual PhRED scaled quality/probability of an insertion.
+     * @param delQual PhRED scaled quality/probability of a deletion.
+     * @return a value between 0 and 1.
+     */
+    public static double matchToMatchProb(final int insQual, final int delQual) {
+        final int minQual;
+        final int maxQual;
+        if (insQual <= delQual) {
+            minQual = insQual;
+            maxQual = delQual;
+        } else {
+            minQual = delQual;
+            maxQual = insQual;
+        }
+
+        if (minQual < 0) throw new IllegalArgumentException("quality cannot be negative: " + minQual + " and " + maxQual);
+
+        return (QualityUtils.MAX_QUAL < maxQual) ?  1.0 - Math.pow(10, MathUtils.approximateLog10SumLog10(-0.1 * minQual, -0.1 * maxQual)) :
+                matchToMatchProb[((maxQual * (maxQual + 1)) >> 1) + minQual];
+    }
+
+    /**
+     * Returns the probability (log 10 scaled) that neither of two event takes place.
+     * <p/>
+     *
+     * We assume that both event never occur together and that delQual is the conditional probability (qual. encoded)
+     * of the second event, given the first event didn't took place. So that the probability of no event is: <br/>
+     *
+     * We assume that both event never occur together so that the probability of no event is: <br/>
+     *
+     * <code>1 - ProbErr(insQual) - ProbErr(delQual)</code> <br/>
+     *
+     * @param insQual PhRED scaled quality/probability of an insertion.
+     * @param delQual PhRED scaled quality/probability of a deletion.
+     *
+     * @return a value between 0 and -Inf.
+     */
+    public static double matchToMatchProbLog10(final int insQual, final int delQual) {
+        final int minQual;
+        final int maxQual;
+        if (insQual <= delQual) {
+            minQual = insQual;
+            maxQual = delQual;
+        } else {
+            minQual = delQual;
+            maxQual = insQual;
+        }
+        return (QualityUtils.MAX_QUAL < maxQual) ? Math.log1p (
+                - Math.min(1,Math.pow(10,
+                        MathUtils.approximateLog10SumLog10(-.1 * minQual, -.1 * maxQual)))) * INV_LN10 :
+                matchToMatchLog10[((maxQual * (maxQual + 1)) >> 1) + minQual];
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pairhmm/PairHMMReadyHaplotypes.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pairhmm/PairHMMReadyHaplotypes.java
new file mode 100644
index 0000000..a6fcb2b
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pairhmm/PairHMMReadyHaplotypes.java
@@ -0,0 +1,182 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.pairhmm;
+
+import java.util.*;
+
+/**
+ * Collection of haplotypes sorted in a conveniently way to be run efficiently by the PairHMM.
+ *
+ * TODO not yet in use but likely to be as part of making graph-base likelihood run faster.
+ * TODO this could be extended to the classical PairHMM implementation simplifyling the PairHMM API.
+ */
+public class PairHMMReadyHaplotypes implements Iterable<PairHMMReadyHaplotypes.Entry> {
+
+
+    public class Entry {
+
+        private final byte[] bases;
+
+        private double likelihood = Double.NaN;
+
+        protected Entry(final byte[] bases) {
+            this.bases = bases;
+        }
+
+        public byte[] getBases() {
+            return bases;
+        }
+
+        public void setLikelihood(final double lk) {
+            likelihood = lk;
+        }
+
+        public double getLikelihood() {
+            return likelihood;
+        }
+
+    }
+
+    private Map<Entry,Map<Entry,Integer>> commonPrefixLength;
+
+    private SortedSet<Entry> entries;
+
+    private int capacity;
+
+    private final Comparator<Entry> comparator = new Comparator<Entry>() {
+        @Override
+        public int compare(final Entry o1, final Entry o2) {
+            final byte[] b1 = o1.bases;
+            final byte[] b2 = o2.bases;
+            Map<Entry,Integer> b1map = commonPrefixLength.get(o1);
+            if (b1map == null)
+                commonPrefixLength.put(o1, b1map = new HashMap<>(capacity));
+            Map<Entry,Integer> b2map = commonPrefixLength.get(o2);
+            if (b2map == null)
+                commonPrefixLength.put(o2, b2map = new HashMap<>(capacity));
+            final Integer previousI = b1map.get(o2) == null ? null : b1map.get(o2);
+            int i;
+            int result;
+            final int iLimit = Math.min(b1.length,b2.length);
+            if (previousI == null) {
+                for (i = 0; i < iLimit; i++)
+                    if (b1[i] != b2[i])
+                        break;
+                b1map.put(o2,i);
+                b2map.put(o1,i);
+            } else
+                i = previousI;
+
+            if (i < iLimit)
+                result = Byte.compare(b1[i],b2[i]);
+            else if (b1.length == b2.length)
+                result = 0;
+            else
+                result = b1.length < b2.length ? -1 : 1;
+            return result;
+        }
+    };
+
+    public PairHMMReadyHaplotypes(final int capacity) {
+        commonPrefixLength = new HashMap<>(capacity);
+        entries = new TreeSet<>(comparator);
+    }
+
+    public void add(final byte[] bases) {
+        final Entry entry = new Entry(bases);
+        entries.add(entry);
+    }
+
+    public int size() {
+        return entries.size();
+    }
+
+    @Override
+    public Iterator iterator() {
+        return new Iterator();
+    }
+
+    public class Iterator implements java.util.Iterator<Entry> {
+
+        private java.util.Iterator<Entry> actualIterator;
+        private Entry previousEntry;
+        private Entry currentEntry;
+        private int startIndex;
+        private int cmp;
+
+        private Iterator() {
+            actualIterator = entries.iterator();
+        }
+
+        public boolean hasNext() {
+            return actualIterator.hasNext();
+        }
+
+        public Entry next() {
+            previousEntry = currentEntry;
+            final Entry result = currentEntry = actualIterator.next();
+            startIndex = -1;
+            return result;
+        }
+
+        @Override
+        public void remove() {
+            throw new UnsupportedOperationException();
+        }
+
+        public byte[] bases() {
+            if (currentEntry == null)
+                throw new NoSuchElementException();
+            return currentEntry.bases;
+        }
+
+        public int startIndex() {
+            if (startIndex >= 0)
+                return startIndex;
+            else if (previousEntry == null)
+                return startIndex = 0;
+            else {
+                // The comparator will make sure the common-prefix-length is updated.
+                // The result in a field so that we avoid dead code elimination.
+                // perhaps I a bit paranohic but it does not harm to prevent.
+                cmp = comparator.compare(previousEntry,currentEntry);
+                return startIndex = commonPrefixLength.get(previousEntry).get(currentEntry);
+            }
+        }
+
+        @Override
+        public String toString() {
+            return super.toString() + " cmp = " + cmp;
+        }
+
+        public void setLikelihood(final double likelihood) {
+            if (currentEntry == null)
+                throw new NoSuchElementException();
+            currentEntry.setLikelihood(likelihood);
+        }
+    }
+
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pileup/MergingPileupElementIterator.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pileup/MergingPileupElementIterator.java
new file mode 100644
index 0000000..f078d76
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pileup/MergingPileupElementIterator.java
@@ -0,0 +1,76 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.pileup;
+
+import htsjdk.samtools.util.PeekableIterator;
+
+import java.util.Comparator;
+import java.util.Iterator;
+import java.util.PriorityQueue;
+
+/**
+ * Merges multiple pileups broken down by sample.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+class MergingPileupElementIterator<PE extends PileupElement> implements Iterator<PE> {
+    private final PriorityQueue<PeekableIterator<PE>> perSampleIterators;
+
+    public MergingPileupElementIterator(PerSamplePileupElementTracker<PE> tracker) {
+        perSampleIterators = new PriorityQueue<PeekableIterator<PE>>(Math.max(1,tracker.getSamples().size()),new PileupElementIteratorComparator());
+        for(final String sample: tracker.getSamples()) {
+            PileupElementTracker<PE> trackerPerSample = tracker.getElements(sample);
+            if(trackerPerSample.size() != 0)
+                perSampleIterators.add(new PeekableIterator<PE>(trackerPerSample.iterator()));
+        }
+    }
+
+    public boolean hasNext() {
+        return !perSampleIterators.isEmpty();
+    }
+
+    public PE next() {
+        PeekableIterator<PE> currentIterator = perSampleIterators.remove();
+        PE current = currentIterator.next();
+        if(currentIterator.hasNext())
+            perSampleIterators.add(currentIterator);
+        return current;
+    }
+
+    public void remove() {
+        throw new UnsupportedOperationException("Cannot remove from a merging iterator.");
+    }
+
+    /**
+     * Compares two peekable iterators consisting of pileup elements.
+     */
+    private class PileupElementIteratorComparator implements Comparator<PeekableIterator<PE>> {
+        public int compare(PeekableIterator<PE> lhs, PeekableIterator<PE> rhs) {
+            return rhs.peek().getOffset() - lhs.peek().getOffset();
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pileup/PileupElement.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pileup/PileupElement.java
new file mode 100644
index 0000000..c906698
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pileup/PileupElement.java
@@ -0,0 +1,539 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.pileup;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import htsjdk.samtools.CigarElement;
+import htsjdk.samtools.CigarOperator;
+import org.broadinstitute.gatk.utils.BaseUtils;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+
+import java.util.Arrays;
+import java.util.EnumSet;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: depristo
+ * Date: Apr 14, 2009
+ * Time: 8:54:05 AM
+ */
+public class PileupElement implements Comparable<PileupElement> {
+    private final static LinkedList<CigarElement> EMPTY_LINKED_LIST = new LinkedList<>();
+
+    private final static EnumSet<CigarOperator> ON_GENOME_OPERATORS =
+            EnumSet.of(CigarOperator.M, CigarOperator.EQ, CigarOperator.X, CigarOperator.D);
+
+    public static final byte DELETION_BASE = BaseUtils.Base.D.base;
+    public static final byte DELETION_QUAL = (byte) 16;
+    public static final byte A_FOLLOWED_BY_INSERTION_BASE = (byte) 87;
+    public static final byte C_FOLLOWED_BY_INSERTION_BASE = (byte) 88;
+    public static final byte T_FOLLOWED_BY_INSERTION_BASE = (byte) 89;
+    public static final byte G_FOLLOWED_BY_INSERTION_BASE = (byte) 90;
+
+    protected final GATKSAMRecord read;         // the read this base belongs to
+    protected final int offset;                 // the offset in the bases array for this base
+
+    private final CigarElement currentCigarElement;
+    private final int currentCigarOffset;
+    private final int offsetInCurrentCigar;
+
+    /**
+     * Create a new pileup element
+     *
+     * @param read a non-null read to pileup
+     * @param baseOffset the offset into the read's base / qual vector aligned to this position on the genome. If the
+     *                   current cigar element is a deletion, offset should be the offset of the last M/=/X position.
+     * @param currentElement a non-null CigarElement that indicates the cigar element aligning the read to the genome
+     * @param currentCigarOffset the offset of currentElement in read.getCigar().getElement(currentCigarOffset) == currentElement)
+     * @param offsetInCurrentCigar how far into the currentElement are we in our alignment to the genome?
+     */
+    public PileupElement(final GATKSAMRecord read, final int baseOffset,
+                         final CigarElement currentElement, final int currentCigarOffset,
+                         final int offsetInCurrentCigar) {
+        assert currentElement != null;
+
+        this.read = read;
+        this.offset = baseOffset;
+        this.currentCigarElement = currentElement;
+        this.currentCigarOffset = currentCigarOffset;
+        this.offsetInCurrentCigar = offsetInCurrentCigar;
+
+        // for performance regions these are assertions
+        assert this.read != null;
+        assert this.offset >= 0 && this.offset < this.read.getReadLength();
+        assert this.currentCigarOffset >= 0;
+        assert this.currentCigarOffset < read.getCigarLength();
+        assert this.offsetInCurrentCigar >= 0;
+        assert this.offsetInCurrentCigar < currentElement.getLength();
+    }
+
+    /**
+     * Create a new PileupElement that's a copy of toCopy
+     * @param toCopy the element we want to copy
+     */
+    public PileupElement(final PileupElement toCopy) {
+        this(toCopy.read, toCopy.offset, toCopy.currentCigarElement, toCopy.currentCigarOffset, toCopy.offsetInCurrentCigar);
+    }
+
+    /**
+     * Is this element a deletion w.r.t. the reference genome?
+     *
+     * @return true if this is a deletion, false otherwise
+     */
+    public boolean isDeletion() {
+        return currentCigarElement.getOperator() == CigarOperator.D;
+    }
+
+    /**
+     * Is the current element immediately before a deletion, but itself not a deletion?
+     *
+     * Suppose we are aligning a read with cigar 3M2D1M.  This function is true
+     * if we are in the last cigar position of the 3M, but not if we are in the 2D itself.
+     *
+     * @return true if the next alignment position is a deletion w.r.t. the reference genome
+     */
+    public boolean isBeforeDeletionStart() {
+        return ! isDeletion() && atEndOfCurrentCigar() && hasOperator(getNextOnGenomeCigarElement(), CigarOperator.D);
+    }
+
+    /**
+     * Is the current element immediately after a deletion, but itself not a deletion?
+     *
+     * Suppose we are aligning a read with cigar 1M2D3M.  This function is true
+     * if we are in the first cigar position of the 3M, but not if we are in the 2D itself or
+     * in any but the first position of the 3M.
+     *
+     * @return true if the previous alignment position is a deletion w.r.t. the reference genome
+     */
+    public boolean isAfterDeletionEnd() {
+        return ! isDeletion() && atStartOfCurrentCigar() && hasOperator(getPreviousOnGenomeCigarElement(), CigarOperator.D);
+    }
+
+    /**
+     * Get the read for this pileup element
+     * @return a non-null GATKSAMRecord
+     */
+    @Ensures("result != null")
+    public GATKSAMRecord getRead() {
+        return read;
+    }
+
+    /**
+     * Get the offset of the this element into the read that aligns that read's base to this genomic position.
+     *
+     * If the current element is a deletion then offset is the offset of the last base containing offset.
+     *
+     * @return a valid offset into the read's bases
+     */
+    @Ensures({"result >= 0", "result <= read.getReadLength()"})
+    public int getOffset() {
+        return offset;
+    }
+
+    /**
+     * Get the base aligned to the genome at this location
+     *
+     * If the current element is a deletion returns DELETION_BASE
+     *
+     * @return a base encoded as a byte
+     */
+    @Ensures("result != DELETION_BASE || (isDeletion() && result == DELETION_BASE)")
+    public byte getBase() {
+        return isDeletion() ? DELETION_BASE : read.getReadBases()[offset];
+    }
+
+    @Deprecated
+    public int getBaseIndex() {
+        return BaseUtils.simpleBaseToBaseIndex(getBase());
+    }
+
+    /**
+     * Get the base quality score of the base at this aligned position on the genome
+     * @return a phred-scaled quality score as a byte
+     */
+    public byte getQual() {
+        return isDeletion() ? DELETION_QUAL : read.getBaseQualities()[offset];
+    }
+
+    /**
+     * Get the Base Insertion quality at this pileup position
+     * @return a phred-scaled quality score as a byte
+     */
+    public byte getBaseInsertionQual() {
+        return isDeletion() ? DELETION_QUAL : read.getBaseInsertionQualities()[offset];
+    }
+
+    /**
+     * Get the Base Deletion quality at this pileup position
+     * @return a phred-scaled quality score as a byte
+     */
+    public byte getBaseDeletionQual() {
+        return isDeletion() ? DELETION_QUAL : read.getBaseDeletionQualities()[offset];
+    }
+
+    /**
+     * Get the length of an immediately following insertion or deletion event, or 0 if no such event exists
+     *
+     * Only returns a positive value when this pileup element is immediately before an indel.  Being
+     * immediately before a deletion means that this pileup element isn't an deletion, and that the
+     * next genomic alignment for this read is a deletion.  For the insertion case, this means
+     * that an insertion cigar occurs immediately after this element, between this one and the
+     * next genomic position.
+     *
+     * Note this function may be expensive, so multiple uses should be cached by the caller
+     *
+     * @return length of the event (number of inserted or deleted bases), or 0
+     */
+    @Ensures("result >= 0")
+    public int getLengthOfImmediatelyFollowingIndel() {
+        final CigarElement element = getNextIndelCigarElement();
+        return element == null ? 0 : element.getLength();
+    }
+
+    /**
+     * Helpful function to get the immediately following cigar element, for an insertion or deletion
+     *
+     * if this state precedes a deletion (i.e., next position on genome) or insertion (immediately between
+     * this and the next position) returns the CigarElement corresponding to this event.  Otherwise returns
+     * null.
+     *
+     * @return a CigarElement, or null if the next alignment state ins't an insertion or deletion.
+     */
+    private CigarElement getNextIndelCigarElement() {
+        if ( isBeforeDeletionStart() ) {
+            final CigarElement element = getNextOnGenomeCigarElement();
+            if ( element == null || element.getOperator() != CigarOperator.D )
+                throw new IllegalStateException("Immediately before deletion but the next cigar element isn't a deletion " + element);
+            return element;
+        } else if ( isBeforeInsertion() ) {
+            final CigarElement element = getBetweenNextPosition().get(0);
+            if ( element.getOperator() != CigarOperator.I )
+                throw new IllegalStateException("Immediately before insertion but the next cigar element isn't an insertion " + element);
+            return element;
+        } else {
+            return null;
+        }
+    }
+
+    /**
+     * Get the bases for an insertion that immediately follows this alignment state, or null if none exists
+     *
+     * @see #getLengthOfImmediatelyFollowingIndel() for details on the meaning of immediately.
+     *
+     * If the immediately following state isn't an insertion, returns null
+     *
+     * @return actual sequence of inserted bases, or a null if the event is a deletion or if there is no event in the associated read.
+     */
+    @Ensures("result == null || result.length() == getLengthOfImmediatelyFollowingIndel()")
+    public String getBasesOfImmediatelyFollowingInsertion() {
+        final CigarElement element = getNextIndelCigarElement();
+        if ( element != null && element.getOperator() == CigarOperator.I ) {
+            final int getFrom = offset + 1;
+            final byte[] bases = Arrays.copyOfRange(read.getReadBases(), getFrom, getFrom + element.getLength());
+            return new String(bases);
+        } else
+            return null;
+    }
+
+    /**
+     * Get the mapping quality of the read of this element
+     * @return the mapping quality of the underlying SAM record
+     */
+    public int getMappingQual() {
+        return read.getMappingQuality();
+    }
+
+    @Ensures("result != null")
+    public String toString() {
+        return String.format("%s @ %d = %c Q%d", getRead().getReadName(), getOffset(), (char) getBase(), getQual());
+    }
+
+    @Override
+    public int compareTo(final PileupElement pileupElement) {
+        if (offset < pileupElement.offset)
+            return -1;
+        else if (offset > pileupElement.offset)
+            return 1;
+        else if (read.getAlignmentStart() < pileupElement.read.getAlignmentStart())
+            return -1;
+        else if (read.getAlignmentStart() > pileupElement.read.getAlignmentStart())
+            return 1;
+        else
+            return 0;
+    }
+
+    // --------------------------------------------------------------------------
+    //
+    // Reduced read accessors
+    //
+    // --------------------------------------------------------------------------
+
+    /**
+     * Get the cigar element aligning this element to the genome
+     * @return a non-null CigarElement
+     */
+    @Ensures("result != null")
+    public CigarElement getCurrentCigarElement() {
+        return currentCigarElement;
+    }
+
+    /**
+     * Get the offset of this cigar element in the Cigar of the current read (0-based)
+     *
+     * Suppose the cigar is 1M2D3I4D.  If we are in the 1M state this function returns
+     * 0.  If we are in 2D, the result is 1.  If we are in the 4D, the result is 3.
+     *
+     * @return an offset into the read.getCigar() that brings us to the current cigar element
+     */
+    public int getCurrentCigarOffset() {
+        return currentCigarOffset;
+    }
+
+    /**
+     * Get the offset into the *current* cigar element for this alignment position
+     *
+     * We can be anywhere from offset 0 (first position) to length - 1 of the current
+     * cigar element aligning us to this genomic position.
+     *
+     * @return a valid offset into the current cigar element
+     */
+    @Ensures({"result >= 0", "result < getCurrentCigarElement().getLength()"})
+    public int getOffsetInCurrentCigar() {
+        return offsetInCurrentCigar;
+    }
+
+    /**
+     * Get the cigar elements that occur before the current position but after the previous position on the genome
+     *
+     * For example, if we are in the 3M state of 1M2I3M state then 2I occurs before this position.
+     *
+     * Note that this function does not care where we are in the current cigar element.  In the previous
+     * example this list of elements contains the 2I state regardless of where you are in the 3M.
+     *
+     * Note this returns the list of all elements that occur between this and the prev site, so for
+     * example we might have 5S10I2M and this function would return [5S, 10I].
+     *
+     * @return a non-null list of CigarElements
+     */
+    @Ensures("result != null")
+    public LinkedList<CigarElement> getBetweenPrevPosition() {
+        return atStartOfCurrentCigar() ? getBetween(Direction.PREV) : EMPTY_LINKED_LIST;
+    }
+
+    /**
+     * Get the cigar elements that occur after the current position but before the next position on the genome
+     *
+     * @see #getBetweenPrevPosition() for more details
+     *
+     * @return a non-null list of CigarElements
+     */
+    @Ensures("result != null")
+    public LinkedList<CigarElement> getBetweenNextPosition() {
+        return atEndOfCurrentCigar() ? getBetween(Direction.NEXT) : EMPTY_LINKED_LIST;
+    }
+
+    /** for some helper functions */
+    private enum Direction { PREV, NEXT }
+
+    /**
+     * Helper function to get cigar elements between this and either the prev or next genomic position
+     *
+     * @param direction PREVIOUS if we want before, NEXT if we want after
+     * @return a non-null list of cigar elements between this and the neighboring position in direction
+     */
+    @Ensures("result != null")
+    private LinkedList<CigarElement> getBetween(final Direction direction) {
+        final int increment = direction == Direction.NEXT ? 1 : -1;
+        LinkedList<CigarElement> elements = null;
+        final int nCigarElements = read.getCigarLength();
+        for ( int i = currentCigarOffset + increment; i >= 0 && i < nCigarElements; i += increment) {
+            final CigarElement elt = read.getCigar().getCigarElement(i);
+            if ( ON_GENOME_OPERATORS.contains(elt.getOperator()) )
+                break;
+            else {
+                // optimization: don't allocate list if not necessary
+                if ( elements == null )
+                    elements = new LinkedList<CigarElement>();
+
+                if ( increment > 0 )
+                    // to keep the list in the right order, if we are incrementing positively add to the end
+                    elements.add(elt);
+                else
+                    // counting down => add to front
+                    elements.addFirst(elt);
+            }
+        }
+
+        // optimization: elements is null because nothing got added, just return the empty list
+        return elements == null ? EMPTY_LINKED_LIST : elements;
+    }
+
+    /**
+     * Get the cigar element of the previous genomic aligned position
+     *
+     * For example, we might have 1M2I3M, and be sitting at the someone in the 3M.  This
+     * function would return 1M, as the 2I isn't on the genome.  Note this function skips
+     * all of the positions that would occur in the current element.  So the result
+     * is always 1M regardless of whether we're in the first, second, or third position of the 3M
+     * cigar.
+     *
+     * @return a CigarElement, or null (indicating that no previous element exists)
+     */
+    @Ensures("result == null || ON_GENOME_OPERATORS.contains(result.getOperator())")
+    public CigarElement getPreviousOnGenomeCigarElement() {
+        return getNeighboringOnGenomeCigarElement(Direction.PREV);
+    }
+
+    /**
+     * Get the cigar element of the next genomic aligned position
+     *
+     * @see #getPreviousOnGenomeCigarElement() for more details
+     *
+     * @return a CigarElement, or null (indicating that no next element exists)
+     */
+    @Ensures("result == null || ON_GENOME_OPERATORS.contains(result.getOperator())")
+    public CigarElement getNextOnGenomeCigarElement() {
+        return getNeighboringOnGenomeCigarElement(Direction.NEXT);
+    }
+
+    /**
+     * Helper function to get the cigar element of the next or previous genomic position
+     * @param direction the direction to look in
+     * @return a CigarElement, or null if no such element exists
+     */
+    @Ensures("result == null || ON_GENOME_OPERATORS.contains(result.getOperator())")
+    private CigarElement getNeighboringOnGenomeCigarElement(final Direction direction) {
+        final int increment = direction == Direction.NEXT ? 1 : -1;
+        final int nCigarElements = read.getCigarLength();
+
+        for ( int i = currentCigarOffset + increment; i >= 0 && i < nCigarElements; i += increment) {
+            final CigarElement elt = read.getCigar().getCigarElement(i);
+            if ( ON_GENOME_OPERATORS.contains(elt.getOperator()) )
+                return elt;
+        }
+
+        // getting here means that you didn't find anything
+        return null;
+    }
+
+    /**
+     * Does the cigar element (which may be null) have operation toMatch?
+     *
+     * @param maybeCigarElement a CigarElement that might be null
+     * @param toMatch a CigarOperator we want to match against the one in maybeCigarElement
+     * @return true if maybeCigarElement isn't null and has operator toMatch
+     */
+    @Requires("toMatch != null")
+    private boolean hasOperator(final CigarElement maybeCigarElement, final CigarOperator toMatch) {
+        return maybeCigarElement != null && maybeCigarElement.getOperator() == toMatch;
+    }
+
+    /**
+     * Does an insertion occur immediately before the current position on the genome?
+     *
+     * @return true if yes, false if no
+     */
+    public boolean isAfterInsertion() { return isAfter(getBetweenPrevPosition(), CigarOperator.I); }
+
+    /**
+     * Does an insertion occur immediately after the current position on the genome?
+     *
+     * @return true if yes, false if no
+     */
+    public boolean isBeforeInsertion() { return isBefore(getBetweenNextPosition(), CigarOperator.I); }
+
+    /**
+     * Does a soft-clipping event occur immediately before the current position on the genome?
+     *
+     * @return true if yes, false if no
+     */
+    public boolean isAfterSoftClip() { return isAfter(getBetweenPrevPosition(), CigarOperator.S); }
+
+    /**
+     * Does a soft-clipping event occur immediately after the current position on the genome?
+     *
+     * @return true if yes, false if no
+     */
+    public boolean isBeforeSoftClip() { return isBefore(getBetweenNextPosition(), CigarOperator.S); }
+
+    /**
+     * Does a soft-clipping event occur immediately before or after the current position on the genome?
+     *
+     * @return true if yes, false if no
+     */
+    public boolean isNextToSoftClip() { return isAfterSoftClip() || isBeforeSoftClip(); }
+
+    /**
+     * Is the current position at the end of the current cigar?
+     *
+     * For example, if we are in element 3M, this function returns true if we are at offsetInCurrentCigar
+     * of 2, but not 0 or 1.
+     *
+     * @return true if we're at the end of the current cigar
+     */
+    public boolean atEndOfCurrentCigar() {
+        return offsetInCurrentCigar == currentCigarElement.getLength() - 1;
+    }
+
+    /**
+     * Is the current position at the start of the current cigar?
+     *
+     * For example, if we are in element 3M, this function returns true if we are at offsetInCurrentCigar
+     * of 0, but not 1 or 2.
+     *
+     * @return true if we're at the start of the current cigar
+     */
+    public boolean atStartOfCurrentCigar() {
+        return offsetInCurrentCigar == 0;
+    }
+
+    /**
+     * Is op the last element in the list of elements?
+     *
+     * @param elements the elements to examine
+     * @param op the op we want the last element's op to equal
+     * @return true if op == last(elements).op
+     */
+    @Requires({"elements != null", "op != null"})
+    private boolean isAfter(final LinkedList<CigarElement> elements, final CigarOperator op) {
+        return ! elements.isEmpty() && elements.peekLast().getOperator() == op;
+    }
+
+    /**
+     * Is op the first element in the list of elements?
+     *
+     * @param elements the elements to examine
+     * @param op the op we want the last element's op to equal
+     * @return true if op == first(elements).op
+     */
+    @Requires({"elements != null", "op != null"})
+    private boolean isBefore(final List<CigarElement> elements, final CigarOperator op) {
+        return ! elements.isEmpty() && elements.get(0).getOperator() == op;
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pileup/PileupElementFilter.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pileup/PileupElementFilter.java
new file mode 100644
index 0000000..93f39ab
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pileup/PileupElementFilter.java
@@ -0,0 +1,36 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.pileup;
+
+/**
+ * A filtering interface for pileup elements.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public interface PileupElementFilter {
+    public boolean allow(final PileupElement pileupElement);
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pileup/PileupElementTracker.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pileup/PileupElementTracker.java
new file mode 100644
index 0000000..b2a7e84
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pileup/PileupElementTracker.java
@@ -0,0 +1,154 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.pileup;
+
+import org.apache.commons.collections.iterators.IteratorChain;
+
+import java.util.*;
+
+/**
+ * Javadoc goes here.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+abstract class PileupElementTracker<PE extends PileupElement> implements Iterable<PE> {
+    public abstract int size();
+
+    /**
+     * Iterate through the PEs here, but in any order, which may improve performance
+     * if you don't care about the underlying order the reads are coming to you in.
+     * @return an iteratable over all pileup elements in this tracker
+     */
+    public abstract Iterable<PE> unorderedIterable();
+
+    /**
+     * Same as @see #unorderedIterable but the actual iterator itself
+     * @return
+     */
+    public Iterator<PE> unorderedIterator() { return unorderedIterable().iterator(); }
+
+    public abstract PileupElementTracker<PE> copy();
+}
+
+class UnifiedPileupElementTracker<PE extends PileupElement> extends PileupElementTracker<PE> {
+    private final List<PE> pileup;
+
+    @Override
+    public UnifiedPileupElementTracker<PE> copy() {
+        UnifiedPileupElementTracker<PE> result = new UnifiedPileupElementTracker<PE>();
+        for(PE element : pileup)
+            result.add(element);
+        return result;
+    }
+
+    public UnifiedPileupElementTracker() { pileup = new LinkedList<PE>(); }
+    public UnifiedPileupElementTracker(List<PE> pileup) { this.pileup = pileup; }
+
+    public void add(PE element) {
+        pileup.add(element);
+    }
+
+    public PE get(int index) {
+        return pileup.get(index);
+    }
+
+    public int size() {
+        return pileup.size();
+    }
+
+    public Iterator<PE> iterator() { return pileup.iterator(); }
+    public Iterable<PE> unorderedIterable() { return this; }
+}
+
+class PerSamplePileupElementTracker<PE extends PileupElement> extends PileupElementTracker<PE> {
+    private final Map<String,PileupElementTracker<PE>> pileup;
+    private int size = 0;
+
+    public PerSamplePileupElementTracker() {
+        pileup = new HashMap<String,PileupElementTracker<PE>>();
+    }
+
+    public PerSamplePileupElementTracker<PE> copy() {
+        PerSamplePileupElementTracker<PE> result = new PerSamplePileupElementTracker<PE>();
+        for (Map.Entry<String, PileupElementTracker<PE>> entry : pileup.entrySet())
+            result.addElements(entry.getKey(), entry.getValue());
+
+        return result;
+    }
+
+    /**
+     * Gets a list of all the samples stored in this pileup.
+     * @return List of samples in this pileup.
+     */
+    public Collection<String> getSamples() {
+        return pileup.keySet();
+    }
+
+    public PileupElementTracker<PE> getElements(final String sample) {
+        return pileup.get(sample);
+    }
+
+    public PileupElementTracker<PE> getElements(final Collection<String> selectSampleNames) {
+        PerSamplePileupElementTracker<PE> result = new PerSamplePileupElementTracker<PE>();
+        for (final String sample :  selectSampleNames) {
+            result.addElements(sample, pileup.get(sample));
+        }
+        return result;
+    }
+
+    public void addElements(final String sample, PileupElementTracker<PE> elements) {
+        pileup.put(sample,elements);
+        size += elements.size();
+    }
+
+    public Iterator<PE> iterator() { return new MergingPileupElementIterator<PE>(this); }
+
+    public int size() {
+        return size;
+    }
+
+
+    public Iterable<PE> unorderedIterable() {
+        return new Iterable<PE>() {
+            @Override
+            public Iterator<PE> iterator() {
+                return new Iterator<PE>() {
+                    final private IteratorChain chain = new IteratorChain();
+
+                    { // initialize the chain with the unordered iterators of the per sample pileups
+                        for ( PileupElementTracker<PE> pet : pileup.values() ) {
+                            chain.addIterator(pet.unorderedIterator());
+                        }
+                    }
+                    @Override public boolean hasNext() { return chain.hasNext(); }
+                    @Override public PE next() { return (PE)chain.next(); }
+                    @Override public void remove() { throw new UnsupportedOperationException("Cannot remove"); }
+                };
+            }
+        };
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pileup/ReadBackedPileup.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pileup/ReadBackedPileup.java
new file mode 100644
index 0000000..ad21089
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pileup/ReadBackedPileup.java
@@ -0,0 +1,295 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.pileup;
+
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.HasGenomeLocation;
+import org.broadinstitute.gatk.utils.fragments.FragmentCollection;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * A data retrieval interface for accessing parts of the pileup.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public interface ReadBackedPileup extends Iterable<PileupElement>, HasGenomeLocation {
+    /**
+     * Returns a new ReadBackedPileup that is free of deletion spanning reads in this pileup.  Note that this
+     * does not copy the data, so both ReadBackedPileups should not be changed.  Doesn't make an unnecessary copy
+     * of the pileup (just returns this) if there are no deletions in the pileup.
+     *
+     * @return
+     */
+    public ReadBackedPileup getPileupWithoutDeletions();
+
+    /**
+     * Returns a new ReadBackedPileup where only one read from an overlapping read
+     * pair is retained.  If the two reads in question disagree to their basecall,
+     * neither read is retained.  If they agree on the base, the read with the higher
+     * quality observation is retained
+     *
+     * @return the newly filtered pileup
+     */
+    public ReadBackedPileup getOverlappingFragmentFilteredPileup();
+
+    /**
+     * Returns a new ReadBackedPileup where only one read from an overlapping read
+     * pair is retained.  If discardDiscordant and the two reads in question disagree to their basecall,
+     * neither read is retained.  Otherwise, the read with the higher
+     * quality (base or mapping, depending on baseQualNotMapQual) observation is retained
+     *
+     * @return the newly filtered pileup
+     */
+    public ReadBackedPileup getOverlappingFragmentFilteredPileup(boolean discardDiscordant, boolean baseQualNotMapQual);
+
+    /**
+     * Returns a new ReadBackedPileup that is free of mapping quality zero reads in this pileup.  Note that this
+     * does not copy the data, so both ReadBackedPileups should not be changed.  Doesn't make an unnecessary copy
+     * of the pileup (just returns this) if there are no MQ0 reads in the pileup.
+     *
+     * @return
+     */
+    public ReadBackedPileup getPileupWithoutMappingQualityZeroReads();
+
+    /**
+     * Gets the pileup consisting of only reads on the positive strand.
+     * @return A read-backed pileup consisting only of reads on the positive strand.
+     */
+    public ReadBackedPileup getPositiveStrandPileup();
+
+    /**
+     * Gets the pileup consisting of only reads on the negative strand.
+     * @return A read-backed pileup consisting only of reads on the negative strand.
+     */
+    public ReadBackedPileup getNegativeStrandPileup();
+
+    /**
+     * Gets a pileup consisting of all those elements passed by a given filter.
+     * @param filter Filter to use when testing for elements.
+     * @return a pileup without the given filtered elements.
+     */
+    public ReadBackedPileup getFilteredPileup(PileupElementFilter filter);
+
+    /** Returns subset of this pileup that contains only bases with quality >= minBaseQ, coming from
+     * reads with mapping qualities >= minMapQ. This method allocates and returns a new instance of ReadBackedPileup.
+     * @param minBaseQ
+     * @param minMapQ
+     * @return
+     */
+    public ReadBackedPileup getBaseAndMappingFilteredPileup( int minBaseQ, int minMapQ );
+
+    /** Returns subset of this pileup that contains only bases with quality >= minBaseQ.
+     * This method allocates and returns a new instance of ReadBackedPileup.
+     * @param minBaseQ
+     * @return
+     */
+    public ReadBackedPileup getBaseFilteredPileup( int minBaseQ );
+
+    /** Returns subset of this pileup that contains only bases coming from reads with mapping quality >= minMapQ.
+     * This method allocates and returns a new instance of ReadBackedPileup.
+     * @param minMapQ
+     * @return
+     */
+    public ReadBackedPileup getMappingFilteredPileup( int minMapQ );
+
+    /**
+     * Returns a pileup randomly downsampled to the desiredCoverage.
+     *
+     * @param desiredCoverage
+     * @return
+     */
+    public ReadBackedPileup getDownsampledPileup(int desiredCoverage);
+
+    /**
+     * Gets a collection of all the read groups represented in this pileup.
+     * @return A collection of all the read group ids represented in this pileup.
+     */
+    public Collection<String> getReadGroups();
+
+    /**
+     * Gets all the reads associated with a given read group.
+     * @param readGroupId Identifier for the read group.
+     * @return A pileup containing only the reads in the given read group.
+     */
+    public ReadBackedPileup getPileupForReadGroup(String readGroupId);
+
+    /**
+     * Gets all the reads associated with a given read groups.
+     * @param rgSet Set of identifiers for the read group.
+     * @return A pileup containing only the reads in the given read groups.
+     */
+    public ReadBackedPileup getPileupForReadGroups(final HashSet<String> rgSet);
+    
+    /**
+     * Gets all reads in a given lane id. (Lane ID is the read group
+     * id stripped of the last .XX sample identifier added by the GATK).
+     * @param laneID The read group ID without the sample identifier added by the GATK.
+     * @return A pileup containing the reads from all samples in the given lane.
+     */
+    public ReadBackedPileup getPileupForLane(String laneID);
+
+    /**
+     * Gets a collection of *names* of all the samples stored in this pileup.
+     * @return Collection of names
+     */
+    public Collection<String> getSamples();
+
+
+    /**
+     * Gets the particular subset of this pileup for all the given sample names.
+     * @param sampleNames Name of the sample to use.
+     * @return A subset of this pileup containing only reads with the given sample.
+     */
+    public ReadBackedPileup getPileupForSamples(Collection<String> sampleNames);
+
+    /**
+     * Gets the particular subset of this pileup for each given sample name.
+     *
+     * Same as calling getPileupForSample for all samples, but in O(n) instead of O(n^2).
+     *
+     * @param sampleNames Name of the sample to use.
+     * @return A subset of this pileup containing only reads with the given sample.
+     */
+    public Map<String, ReadBackedPileup> getPileupsForSamples(Collection<String> sampleNames);
+
+
+    /**
+     * Gets the particular subset of this pileup with the given sample name.
+     * @param sampleName Name of the sample to use.
+     * @return A subset of this pileup containing only reads with the given sample.
+     */
+    public ReadBackedPileup getPileupForSample(String sampleName);
+    
+    /**
+     * Simple useful routine to count the number of deletion bases in this pileup
+     *
+     * @return
+     */
+    public int getNumberOfDeletions();
+
+    /**
+     * Simple useful routine to count the number of deletion bases in at the next position this pileup
+     *
+     * @return
+     */
+    public int getNumberOfDeletionsAfterThisElement();
+
+    /**
+     * Simple useful routine to count the number of insertions right after this pileup
+     *
+     * @return
+     */
+    public int getNumberOfInsertionsAfterThisElement();
+
+    public int getNumberOfMappingQualityZeroReads();
+
+    /**
+     * @return the number of physical elements in this pileup (a reduced read is counted just once)
+     */
+    public int getNumberOfElements();
+
+    /**
+     * @return the number of abstract elements in this pileup (reduced reads are expanded to count all reads that they represent)
+     */
+    public int depthOfCoverage();
+
+    /**
+     * @return true if there are 0 elements in the pileup, false otherwise
+     */
+    public boolean isEmpty();
+
+    /**
+     * @return the location of this pileup
+     */
+    public GenomeLoc getLocation();
+
+    /**
+     * Get counts of A, C, G, T in order, which returns a int[4] vector with counts according
+     * to BaseUtils.simpleBaseToBaseIndex for each base.
+     *
+     * @return
+     */
+    public int[] getBaseCounts();
+
+    public String getPileupString(Character ref);
+
+    /**
+     * Returns a list of the reads in this pileup. Note this call costs O(n) and allocates fresh lists each time
+     * @return
+     */
+    public List<GATKSAMRecord> getReads();
+
+    /**
+     * Returns a list of the offsets in this pileup. Note this call costs O(n) and allocates fresh lists each time
+     * @return
+     */
+    public List<Integer> getOffsets();
+
+    /**
+     * Returns an array of the bases in this pileup. Note this call costs O(n) and allocates fresh array each time
+     * @return
+     */
+    public byte[] getBases();
+
+    /**
+    * Returns an array of the quals in this pileup. Note this call costs O(n) and allocates fresh array each time
+    * @return
+    */
+    public byte[] getQuals();
+
+    /**
+     * Get an array of the mapping qualities
+     * @return
+     */
+    public int[] getMappingQuals();
+
+    /**
+     * Returns a new ReadBackedPileup that is sorted by start coordinate of the reads.
+     *
+     * @return
+     */
+    public ReadBackedPileup getStartSortedPileup();
+
+    /**
+     * Converts this pileup into a FragmentCollection (see FragmentUtils for documentation)
+     * @return
+     */
+    public FragmentCollection<PileupElement> toFragments();
+
+    /**
+     * Creates a full copy (not shallow) of the ReadBacked Pileup
+     *
+     * @return
+     */
+    public ReadBackedPileup copy();
+
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pileup/ReadBackedPileupImpl.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pileup/ReadBackedPileupImpl.java
new file mode 100644
index 0000000..5dd3c6a
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pileup/ReadBackedPileupImpl.java
@@ -0,0 +1,1040 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.pileup;
+
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.fragments.FragmentCollection;
+import org.broadinstitute.gatk.utils.fragments.FragmentUtils;
+import org.broadinstitute.gatk.utils.locusiterator.LocusIteratorByState;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.broadinstitute.gatk.utils.BaseUtils;
+
+import java.util.*;
+
+public class ReadBackedPileupImpl implements ReadBackedPileup {
+    protected final GenomeLoc loc;
+    protected final PileupElementTracker<PileupElement> pileupElementTracker;
+
+    private final static int UNINITIALIZED_CACHED_INT_VALUE = -1;
+
+    /**
+     * Different then number of elements due to reduced reads
+     */
+    private int depthOfCoverage = UNINITIALIZED_CACHED_INT_VALUE;
+    private int nDeletions = UNINITIALIZED_CACHED_INT_VALUE;            // cached value of the number of deletions
+    private int nMQ0Reads = UNINITIALIZED_CACHED_INT_VALUE;             // cached value of the number of MQ0 reads
+
+    /**
+     * Create a new version of a read backed pileup at loc, using the reads and their corresponding
+     * offsets.  This pileup will contain a list, in order of the reads, of the piled bases at
+     * reads[i] for all i in offsets.  Does not make a copy of the data, so it's not safe to
+     * go changing the reads.
+     *
+     * @param loc     The genome loc to associate reads wotj
+     * @param reads
+     * @param offsets
+     */
+    public ReadBackedPileupImpl(GenomeLoc loc, List<GATKSAMRecord> reads, List<Integer> offsets) {
+        this.loc = loc;
+        this.pileupElementTracker = readsOffsets2Pileup(reads, offsets);
+    }
+
+
+    /**
+     * Create a new version of a read backed pileup at loc without any aligned reads
+     */
+    public ReadBackedPileupImpl(GenomeLoc loc) {
+        this(loc, new UnifiedPileupElementTracker<PileupElement>());
+    }
+
+    /**
+     * Create a new version of a read backed pileup at loc, using the reads and their corresponding
+     * offsets.  This lower level constructure assumes pileup is well-formed and merely keeps a
+     * pointer to pileup.  Don't go changing the data in pileup.
+     */
+    public ReadBackedPileupImpl(GenomeLoc loc, List<PileupElement> pileup) {
+        if (loc == null) throw new ReviewedGATKException("Illegal null genomeloc in ReadBackedPileup");
+        if (pileup == null) throw new ReviewedGATKException("Illegal null pileup in ReadBackedPileup");
+
+        this.loc = loc;
+        this.pileupElementTracker = new UnifiedPileupElementTracker<PileupElement>(pileup);
+    }
+
+    /**
+     * Optimization of above constructor where all of the cached data is provided
+     *
+     * @param loc
+     * @param pileup
+     */
+    @Deprecated
+    public ReadBackedPileupImpl(GenomeLoc loc, List<PileupElement> pileup, int size, int nDeletions, int nMQ0Reads) {
+        this(loc, pileup);
+    }
+
+    protected ReadBackedPileupImpl(GenomeLoc loc, PileupElementTracker<PileupElement> tracker) {
+        this.loc = loc;
+        this.pileupElementTracker = tracker;
+    }
+
+    public ReadBackedPileupImpl(GenomeLoc loc, Map<String, ReadBackedPileupImpl> pileupsBySample) {
+        this.loc = loc;
+        PerSamplePileupElementTracker<PileupElement> tracker = new PerSamplePileupElementTracker<PileupElement>();
+        for (Map.Entry<String, ReadBackedPileupImpl> pileupEntry : pileupsBySample.entrySet()) {
+            tracker.addElements(pileupEntry.getKey(), pileupEntry.getValue().pileupElementTracker);
+        }
+        this.pileupElementTracker = tracker;
+    }
+
+    public ReadBackedPileupImpl(GenomeLoc loc, List<GATKSAMRecord> reads, int offset) {
+        this.loc = loc;
+        this.pileupElementTracker = readsOffsets2Pileup(reads, offset);
+    }
+
+    /**
+     * Helper routine for converting reads and offset lists to a PileupElement list.
+     *
+     * @param reads
+     * @param offsets
+     * @return
+     */
+    private PileupElementTracker<PileupElement> readsOffsets2Pileup(List<GATKSAMRecord> reads, List<Integer> offsets) {
+        if (reads == null) throw new ReviewedGATKException("Illegal null read list in UnifiedReadBackedPileup");
+        if (offsets == null) throw new ReviewedGATKException("Illegal null offsets list in UnifiedReadBackedPileup");
+        if (reads.size() != offsets.size())
+            throw new ReviewedGATKException("Reads and offset lists have different sizes!");
+
+        UnifiedPileupElementTracker<PileupElement> pileup = new UnifiedPileupElementTracker<PileupElement>();
+        for (int i = 0; i < reads.size(); i++) {
+            GATKSAMRecord read = reads.get(i);
+            int offset = offsets.get(i);
+            pileup.add(createNewPileupElement(read, offset)); // only used to create fake pileups for testing so ancillary information is not important
+        }
+
+        return pileup;
+    }
+
+    /**
+     * Helper routine for converting reads and a single offset to a PileupElement list.
+     *
+     * @param reads
+     * @param offset
+     * @return
+     */
+    private PileupElementTracker<PileupElement> readsOffsets2Pileup(List<GATKSAMRecord> reads, int offset) {
+        if (reads == null) throw new ReviewedGATKException("Illegal null read list in UnifiedReadBackedPileup");
+        if (offset < 0) throw new ReviewedGATKException("Illegal offset < 0 UnifiedReadBackedPileup");
+
+        UnifiedPileupElementTracker<PileupElement> pileup = new UnifiedPileupElementTracker<PileupElement>();
+        for (GATKSAMRecord read : reads) {
+            pileup.add(createNewPileupElement(read, offset)); // only used to create fake pileups for testing so ancillary information is not important
+        }
+
+        return pileup;
+    }
+
+    protected ReadBackedPileupImpl createNewPileup(GenomeLoc loc, PileupElementTracker<PileupElement> tracker) {
+        return new ReadBackedPileupImpl(loc, tracker);
+    }
+
+    protected PileupElement createNewPileupElement(GATKSAMRecord read, int offset) {
+        return LocusIteratorByState.createPileupForReadAndOffset(read, offset);
+    }    
+    
+    // --------------------------------------------------------
+    //
+    // Special 'constructors'
+    //
+    // --------------------------------------------------------
+
+    /**
+     * Returns a new ReadBackedPileup that is free of deletion spanning reads in this pileup.  Note that this
+     * does not copy the data, so both ReadBackedPileups should not be changed.  Doesn't make an unnecessary copy
+     * of the pileup (just returns this) if there are no deletions in the pileup.
+     *
+     * @return
+     */
+    @Override
+    public ReadBackedPileupImpl getPileupWithoutDeletions() {
+        if (getNumberOfDeletions() > 0) {
+            if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
+                PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
+                PerSamplePileupElementTracker<PileupElement> filteredTracker = new PerSamplePileupElementTracker<PileupElement>();
+
+                for (final String sample : tracker.getSamples()) {
+                    PileupElementTracker<PileupElement> perSampleElements = tracker.getElements(sample);
+                    ReadBackedPileupImpl pileup = createNewPileup(loc, perSampleElements).getPileupWithoutDeletions();
+                    filteredTracker.addElements(sample, pileup.pileupElementTracker);
+                }
+                return createNewPileup(loc, filteredTracker);
+
+            } else {
+                UnifiedPileupElementTracker<PileupElement> tracker = (UnifiedPileupElementTracker<PileupElement>) pileupElementTracker;
+                UnifiedPileupElementTracker<PileupElement> filteredTracker = new UnifiedPileupElementTracker<PileupElement>();
+
+                for (PileupElement p : tracker) {
+                    if (!p.isDeletion()) {
+                        filteredTracker.add(p);
+                    }
+                }
+                return createNewPileup(loc, filteredTracker);
+            }
+        } else {
+            return this;
+        }
+    }
+
+    /**
+     * Returns a new ReadBackedPileup where only one read from an overlapping read
+     * pair is retained.  If the two reads in question disagree to their basecall,
+     * neither read is retained.  If they agree on the base, the read with the higher
+     * base quality observation is retained
+     *
+     * @return the newly filtered pileup
+     */
+    @Override
+    public ReadBackedPileup getOverlappingFragmentFilteredPileup() {
+        return getOverlappingFragmentFilteredPileup(true, true);
+    }
+
+    /**
+     * Returns a new ReadBackedPileup where only one read from an overlapping read
+     * pair is retained.  If discardDiscordant and the two reads in question disagree to their basecall,
+     * neither read is retained.  Otherwise, the read with the higher
+     * quality (base or mapping, depending on baseQualNotMapQual) observation is retained
+     *
+     * @return the newly filtered pileup
+     */
+    @Override
+    public ReadBackedPileupImpl getOverlappingFragmentFilteredPileup(boolean discardDiscordant, boolean baseQualNotMapQual) {
+        if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
+            PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
+            PerSamplePileupElementTracker<PileupElement> filteredTracker = new PerSamplePileupElementTracker<PileupElement>();
+
+            for (final String sample : tracker.getSamples()) {
+                PileupElementTracker<PileupElement> perSampleElements = tracker.getElements(sample);
+                ReadBackedPileupImpl pileup = createNewPileup(loc, perSampleElements).getOverlappingFragmentFilteredPileup(discardDiscordant, baseQualNotMapQual);
+                filteredTracker.addElements(sample, pileup.pileupElementTracker);
+            }
+            return createNewPileup(loc, filteredTracker);
+        } else {
+            Map<String, PileupElement> filteredPileup = new HashMap<String, PileupElement>();
+
+            for (PileupElement p : pileupElementTracker) {
+                String readName = p.getRead().getReadName();
+
+                // if we've never seen this read before, life is good
+                if (!filteredPileup.containsKey(readName)) {
+                    filteredPileup.put(readName, p);
+                } else {
+                    PileupElement existing = filteredPileup.get(readName);
+
+                    // if the reads disagree at this position, throw them both out.  Otherwise
+                    // keep the element with the higher quality score
+                    if (discardDiscordant && existing.getBase() != p.getBase()) {
+                        filteredPileup.remove(readName);
+                    } else {
+                        if (baseQualNotMapQual) {
+                            if (existing.getQual() < p.getQual())
+                                filteredPileup.put(readName, p);
+                        }
+                        else {
+                            if (existing.getMappingQual() < p.getMappingQual())
+                                filteredPileup.put(readName, p);
+                        }
+                    }
+                }
+            }
+
+            UnifiedPileupElementTracker<PileupElement> filteredTracker = new UnifiedPileupElementTracker<PileupElement>();
+            for (PileupElement filteredElement : filteredPileup.values())
+                filteredTracker.add(filteredElement);
+
+            return createNewPileup(loc, filteredTracker);
+        }
+    }
+
+
+    /**
+     * Returns a new ReadBackedPileup that is free of mapping quality zero reads in this pileup.  Note that this
+     * does not copy the data, so both ReadBackedPileups should not be changed.  Doesn't make an unnecessary copy
+     * of the pileup (just returns this) if there are no MQ0 reads in the pileup.
+     *
+     * @return
+     */
+    @Override
+    public ReadBackedPileupImpl getPileupWithoutMappingQualityZeroReads() {
+        if (getNumberOfMappingQualityZeroReads() > 0) {
+            if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
+                PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
+                PerSamplePileupElementTracker<PileupElement> filteredTracker = new PerSamplePileupElementTracker<PileupElement>();
+
+                for (final String sample : tracker.getSamples()) {
+                    PileupElementTracker<PileupElement> perSampleElements = tracker.getElements(sample);
+                    ReadBackedPileupImpl pileup = createNewPileup(loc, perSampleElements).getPileupWithoutMappingQualityZeroReads();
+                    filteredTracker.addElements(sample, pileup.pileupElementTracker);
+                }
+                return createNewPileup(loc, filteredTracker);
+
+            } else {
+                UnifiedPileupElementTracker<PileupElement> tracker = (UnifiedPileupElementTracker<PileupElement>) pileupElementTracker;
+                UnifiedPileupElementTracker<PileupElement> filteredTracker = new UnifiedPileupElementTracker<PileupElement>();
+
+                for (PileupElement p : tracker) {
+                    if (p.getRead().getMappingQuality() > 0) {
+                        filteredTracker.add(p);
+                    }
+                }
+                return createNewPileup(loc, filteredTracker);
+            }
+        } else {
+            return this;
+        }
+    }
+
+    public ReadBackedPileupImpl getPositiveStrandPileup() {
+        if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
+            PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
+            PerSamplePileupElementTracker<PileupElement> filteredTracker = new PerSamplePileupElementTracker<PileupElement>();
+
+            for (final String sample : tracker.getSamples()) {
+                PileupElementTracker<PileupElement> perSampleElements = tracker.getElements(sample);
+                ReadBackedPileupImpl pileup = createNewPileup(loc, perSampleElements).getPositiveStrandPileup();
+                filteredTracker.addElements(sample, pileup.pileupElementTracker);
+            }
+            return createNewPileup(loc, filteredTracker);
+        } else {
+            UnifiedPileupElementTracker<PileupElement> tracker = (UnifiedPileupElementTracker<PileupElement>) pileupElementTracker;
+            UnifiedPileupElementTracker<PileupElement> filteredTracker = new UnifiedPileupElementTracker<PileupElement>();
+
+            for (PileupElement p : tracker) {
+                if (!p.getRead().getReadNegativeStrandFlag()) {
+                    filteredTracker.add(p);
+                }
+            }
+            return createNewPileup(loc, filteredTracker);
+        }
+    }
+
+    /**
+     * Gets the pileup consisting of only reads on the negative strand.
+     *
+     * @return A read-backed pileup consisting only of reads on the negative strand.
+     */
+    public ReadBackedPileupImpl getNegativeStrandPileup() {
+        if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
+            PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
+            PerSamplePileupElementTracker<PileupElement> filteredTracker = new PerSamplePileupElementTracker<PileupElement>();
+
+            for (final String sample : tracker.getSamples()) {
+                PileupElementTracker<PileupElement> perSampleElements = tracker.getElements(sample);
+                ReadBackedPileupImpl pileup = createNewPileup(loc, perSampleElements).getNegativeStrandPileup();
+                filteredTracker.addElements(sample, pileup.pileupElementTracker);
+            }
+            return createNewPileup(loc, filteredTracker);
+        } else {
+            UnifiedPileupElementTracker<PileupElement> tracker = (UnifiedPileupElementTracker<PileupElement>) pileupElementTracker;
+            UnifiedPileupElementTracker<PileupElement> filteredTracker = new UnifiedPileupElementTracker<PileupElement>();
+
+            for (PileupElement p : tracker) {
+                if (p.getRead().getReadNegativeStrandFlag()) {
+                    filteredTracker.add(p);
+                }
+            }
+            return createNewPileup(loc, filteredTracker);
+        }
+    }
+
+    /**
+     * Gets a pileup consisting of all those elements passed by a given filter.
+     *
+     * @param filter Filter to use when testing for elements.
+     * @return a pileup without the given filtered elements.
+     */
+    public ReadBackedPileupImpl getFilteredPileup(PileupElementFilter filter) {
+        if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
+            PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
+            PerSamplePileupElementTracker<PileupElement> filteredTracker = new PerSamplePileupElementTracker<PileupElement>();
+
+            for (final String sample : tracker.getSamples()) {
+                PileupElementTracker<PileupElement> perSampleElements = tracker.getElements(sample);
+                ReadBackedPileupImpl pileup = createNewPileup(loc, perSampleElements).getFilteredPileup(filter);
+                filteredTracker.addElements(sample, pileup.pileupElementTracker);
+            }
+
+            return createNewPileup(loc, filteredTracker);
+        } else {
+            UnifiedPileupElementTracker<PileupElement> filteredTracker = new UnifiedPileupElementTracker<PileupElement>();
+
+            for (PileupElement p : pileupElementTracker) {
+                if (filter.allow(p))
+                    filteredTracker.add(p);
+            }
+
+            return createNewPileup(loc, filteredTracker);
+        }
+    }
+
+    /**
+     * Returns subset of this pileup that contains only bases with quality >= minBaseQ, coming from
+     * reads with mapping qualities >= minMapQ. This method allocates and returns a new instance of ReadBackedPileup.
+     *
+     * @param minBaseQ
+     * @param minMapQ
+     * @return
+     */
+    @Override
+    public ReadBackedPileupImpl getBaseAndMappingFilteredPileup(int minBaseQ, int minMapQ) {
+        if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
+            PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
+            PerSamplePileupElementTracker<PileupElement> filteredTracker = new PerSamplePileupElementTracker<PileupElement>();
+
+            for (final String sample : tracker.getSamples()) {
+                PileupElementTracker<PileupElement> perSampleElements = tracker.getElements(sample);
+                ReadBackedPileupImpl pileup = createNewPileup(loc, perSampleElements).getBaseAndMappingFilteredPileup(minBaseQ, minMapQ);
+                filteredTracker.addElements(sample, pileup.pileupElementTracker);
+            }
+
+            return createNewPileup(loc, filteredTracker);
+        } else {
+            UnifiedPileupElementTracker<PileupElement> filteredTracker = new UnifiedPileupElementTracker<PileupElement>();
+
+            for (PileupElement p : pileupElementTracker) {
+                if (p.getRead().getMappingQuality() >= minMapQ && (p.isDeletion() || p.getQual() >= minBaseQ)) {
+                    filteredTracker.add(p);
+                }
+            }
+
+            return createNewPileup(loc, filteredTracker);
+        }
+    }
+
+    /**
+     * Returns subset of this pileup that contains only bases with quality >= minBaseQ.
+     * This method allocates and returns a new instance of ReadBackedPileup.
+     *
+     * @param minBaseQ
+     * @return
+     */
+    @Override
+    public ReadBackedPileup getBaseFilteredPileup(int minBaseQ) {
+        return getBaseAndMappingFilteredPileup(minBaseQ, -1);
+    }
+
+    /**
+     * Returns subset of this pileup that contains only bases coming from reads with mapping quality >= minMapQ.
+     * This method allocates and returns a new instance of ReadBackedPileup.
+     *
+     * @param minMapQ
+     * @return
+     */
+    @Override
+    public ReadBackedPileup getMappingFilteredPileup(int minMapQ) {
+        return getBaseAndMappingFilteredPileup(-1, minMapQ);
+    }
+
+    /**
+     * Gets a list of the read groups represented in this pileup.
+     *
+     * @return
+     */
+    @Override
+    public Collection<String> getReadGroups() {
+        Set<String> readGroups = new HashSet<String>();
+        for (PileupElement pileupElement : this)
+            readGroups.add(pileupElement.getRead().getReadGroup().getReadGroupId());
+        return readGroups;
+    }
+
+    /**
+     * Gets the pileup for a given read group.  Horrendously inefficient at this point.
+     *
+     * @param targetReadGroupId Identifier for the read group.
+     * @return A read-backed pileup containing only the reads in the given read group.
+     */
+    @Override
+    public ReadBackedPileupImpl getPileupForReadGroup(String targetReadGroupId) {
+        if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
+            PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
+            PerSamplePileupElementTracker<PileupElement> filteredTracker = new PerSamplePileupElementTracker<PileupElement>();
+
+            for (final String sample : tracker.getSamples()) {
+                PileupElementTracker<PileupElement> perSampleElements = tracker.getElements(sample);
+                ReadBackedPileupImpl pileup = createNewPileup(loc, perSampleElements).getPileupForReadGroup(targetReadGroupId);
+                if (pileup != null)
+                    filteredTracker.addElements(sample, pileup.pileupElementTracker);
+            }
+            return filteredTracker.size() > 0 ? createNewPileup(loc, filteredTracker) : null;
+        } else {
+            UnifiedPileupElementTracker<PileupElement> filteredTracker = new UnifiedPileupElementTracker<PileupElement>();
+            for (PileupElement p : pileupElementTracker) {
+                GATKSAMRecord read = p.getRead();
+                if (targetReadGroupId != null) {
+                    if (read.getReadGroup() != null && targetReadGroupId.equals(read.getReadGroup().getReadGroupId()))
+                        filteredTracker.add(p);
+                } else {
+                    if (read.getReadGroup() == null || read.getReadGroup().getReadGroupId() == null)
+                        filteredTracker.add(p);
+                }
+            }
+            return filteredTracker.size() > 0 ? createNewPileup(loc, filteredTracker) : null;
+        }
+    }
+
+    /**
+     * Gets the pileup for a set of read groups.  Horrendously inefficient at this point.
+     *
+     * @param rgSet List of identifiers for the read groups.
+     * @return A read-backed pileup containing only the reads in the given read groups.
+     */
+    @Override
+    public ReadBackedPileupImpl getPileupForReadGroups(final HashSet<String> rgSet) {
+        if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
+            PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
+            PerSamplePileupElementTracker<PileupElement> filteredTracker = new PerSamplePileupElementTracker<PileupElement>();
+
+            for (final String sample : tracker.getSamples()) {
+                PileupElementTracker<PileupElement> perSampleElements = tracker.getElements(sample);
+                ReadBackedPileupImpl pileup = createNewPileup(loc, perSampleElements).getPileupForReadGroups(rgSet);
+                if (pileup != null)
+                    filteredTracker.addElements(sample, pileup.pileupElementTracker);
+            }
+            return filteredTracker.size() > 0 ? createNewPileup(loc, filteredTracker) : null;
+        } else {
+            UnifiedPileupElementTracker<PileupElement> filteredTracker = new UnifiedPileupElementTracker<PileupElement>();
+            for (PileupElement p : pileupElementTracker) {
+                GATKSAMRecord read = p.getRead();
+                if (rgSet != null && !rgSet.isEmpty()) {
+                    if (read.getReadGroup() != null && rgSet.contains(read.getReadGroup().getReadGroupId()))
+                        filteredTracker.add(p);
+                } else {
+                    if (read.getReadGroup() == null || read.getReadGroup().getReadGroupId() == null)
+                        filteredTracker.add(p);
+                }
+            }
+            return filteredTracker.size() > 0 ? createNewPileup(loc, filteredTracker) : null;
+        }
+    }
+
+    @Override
+    public ReadBackedPileupImpl getPileupForLane(String laneID) {
+        if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
+            PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
+            PerSamplePileupElementTracker<PileupElement> filteredTracker = new PerSamplePileupElementTracker<PileupElement>();
+
+            for (final String sample : tracker.getSamples()) {
+                PileupElementTracker<PileupElement> perSampleElements = tracker.getElements(sample);
+                ReadBackedPileupImpl pileup = createNewPileup(loc, perSampleElements).getPileupForLane(laneID);
+                if (pileup != null)
+                    filteredTracker.addElements(sample, pileup.pileupElementTracker);
+            }
+            return filteredTracker.size() > 0 ? createNewPileup(loc, filteredTracker) : null;
+        } else {
+            UnifiedPileupElementTracker<PileupElement> filteredTracker = new UnifiedPileupElementTracker<PileupElement>();
+            for (PileupElement p : pileupElementTracker) {
+                GATKSAMRecord read = p.getRead();
+                if (laneID != null) {
+                    if (read.getReadGroup() != null &&
+                            (read.getReadGroup().getReadGroupId().startsWith(laneID + ".")) ||   // lane is the same, but sample identifier is different
+                            (read.getReadGroup().getReadGroupId().equals(laneID)))               // in case there is no sample identifier, they have to be exactly the same
+                        filteredTracker.add(p);
+                } else {
+                    if (read.getReadGroup() == null || read.getReadGroup().getReadGroupId() == null)
+                        filteredTracker.add(p);
+                }
+            }
+            return filteredTracker.size() > 0 ? createNewPileup(loc, filteredTracker) : null;
+        }
+    }
+
+    public Collection<String> getSamples() {
+        if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
+            PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
+            return new HashSet<String>(tracker.getSamples());
+        } else {
+            Collection<String> sampleNames = new HashSet<String>();
+            for (PileupElement p : this) {
+                GATKSAMRecord read = p.getRead();
+                String sampleName = read.getReadGroup() != null ? read.getReadGroup().getSample() : null;
+                sampleNames.add(sampleName);
+            }
+            return sampleNames;
+        }
+    }
+
+    /**
+     * Returns a pileup randomly downsampled to the desiredCoverage.
+     *
+     * TODO: delete this once the experimental downsampler stabilizes
+     *
+     * @param desiredCoverage
+     * @return
+     */
+    @Override
+    public ReadBackedPileup getDownsampledPileup(int desiredCoverage) {
+        if (getNumberOfElements() <= desiredCoverage)
+            return this;
+
+        // randomly choose numbers corresponding to positions in the reads list
+        TreeSet<Integer> positions = new TreeSet<Integer>();
+        for (int i = 0; i < desiredCoverage; /* no update */) {
+            if (positions.add(Utils.getRandomGenerator().nextInt(getNumberOfElements())))
+                i++;
+        }
+
+        if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
+            PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
+            PerSamplePileupElementTracker<PileupElement> filteredTracker = new PerSamplePileupElementTracker<PileupElement>();
+
+
+            for (final String sample : tracker.getSamples()) {
+                PileupElementTracker<PileupElement> perSampleElements = tracker.getElements(sample);
+
+                int current = 0;
+                UnifiedPileupElementTracker<PileupElement> filteredPileup = new UnifiedPileupElementTracker<PileupElement>();
+                for (PileupElement p : perSampleElements) {
+                    if (positions.contains(current))
+                        filteredPileup.add(p);
+                    current++;
+
+                }
+                filteredTracker.addElements(sample, filteredPileup);
+            }
+
+            return createNewPileup(loc, filteredTracker);
+        } else {
+            UnifiedPileupElementTracker<PileupElement> tracker = (UnifiedPileupElementTracker<PileupElement>) pileupElementTracker;
+            UnifiedPileupElementTracker<PileupElement> filteredTracker = new UnifiedPileupElementTracker<PileupElement>();
+
+            Iterator positionIter = positions.iterator();
+
+            while (positionIter.hasNext()) {
+                int nextReadToKeep = (Integer) positionIter.next();
+                filteredTracker.add(tracker.get(nextReadToKeep));
+            }
+
+            return createNewPileup(getLocation(), filteredTracker);
+        }
+    }
+
+    @Override
+    public ReadBackedPileup getPileupForSamples(Collection<String> sampleNames) {
+        if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
+            PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
+            PileupElementTracker<PileupElement> filteredElements = tracker.getElements(sampleNames);
+            return filteredElements != null ? createNewPileup(loc, filteredElements) : null;
+        } else {
+            HashSet<String> hashSampleNames = new HashSet<String>(sampleNames);                                         // to speed up the "contains" access in the for loop
+            UnifiedPileupElementTracker<PileupElement> filteredTracker = new UnifiedPileupElementTracker<PileupElement>();
+            for (PileupElement p : pileupElementTracker) {
+                GATKSAMRecord read = p.getRead();
+                if (sampleNames != null) {                                                                              // still checking on sampleNames because hashSampleNames will never be null. And empty means something else.
+                    if (read.getReadGroup() != null && hashSampleNames.contains(read.getReadGroup().getSample()))
+                        filteredTracker.add(p);
+                } else {
+                    if (read.getReadGroup() == null || read.getReadGroup().getSample() == null)
+                        filteredTracker.add(p);
+                }
+            }
+            return filteredTracker.size() > 0 ? createNewPileup(loc, filteredTracker) : null;
+        }
+    }
+
+    @Override
+    public Map<String, ReadBackedPileup> getPileupsForSamples(Collection<String> sampleNames) {
+        Map<String, ReadBackedPileup> result = new HashMap<String, ReadBackedPileup>();
+        if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
+            PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
+            for (String sample : sampleNames) {
+                PileupElementTracker<PileupElement> filteredElements = tracker.getElements(sample);
+                if (filteredElements != null)
+                    result.put(sample, createNewPileup(loc, filteredElements));
+            }
+        } else {
+            Map<String, UnifiedPileupElementTracker<PileupElement>> trackerMap = new HashMap<String, UnifiedPileupElementTracker<PileupElement>>();
+
+            for (String sample : sampleNames) {                                                                         // initialize pileups for each sample
+                UnifiedPileupElementTracker<PileupElement> filteredTracker = new UnifiedPileupElementTracker<PileupElement>();
+                trackerMap.put(sample, filteredTracker);
+            }
+            for (PileupElement p : pileupElementTracker) {                                                                         // go through all pileup elements only once and add them to the respective sample's pileup
+                GATKSAMRecord read = p.getRead();
+                if (read.getReadGroup() != null) {
+                    String sample = read.getReadGroup().getSample();
+                    UnifiedPileupElementTracker<PileupElement> tracker = trackerMap.get(sample);
+                    if (tracker != null)                                                                                // we only add the pileup the requested samples. Completely ignore the rest
+                        tracker.add(p);
+                }
+            }
+            for (Map.Entry<String, UnifiedPileupElementTracker<PileupElement>> entry : trackerMap.entrySet())                      // create the ReadBackedPileup for each sample
+                result.put(entry.getKey(), createNewPileup(loc, entry.getValue()));
+        }
+        return result;
+    }
+
+
+    @Override
+    public ReadBackedPileup getPileupForSample(String sampleName) {
+        if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
+            PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
+            PileupElementTracker<PileupElement> filteredElements = tracker.getElements(sampleName);
+            return filteredElements != null ? createNewPileup(loc, filteredElements) : null;
+        } else {
+            UnifiedPileupElementTracker<PileupElement> filteredTracker = new UnifiedPileupElementTracker<PileupElement>();
+            for (PileupElement p : pileupElementTracker) {
+                GATKSAMRecord read = p.getRead();
+                if (sampleName != null) {
+                    if (read.getReadGroup() != null && sampleName.equals(read.getReadGroup().getSample()))
+                        filteredTracker.add(p);
+                } else {
+                    if (read.getReadGroup() == null || read.getReadGroup().getSample() == null)
+                        filteredTracker.add(p);
+                }
+            }
+            return filteredTracker.size() > 0 ? createNewPileup(loc, filteredTracker) : null;
+        }
+    }
+
+    // --------------------------------------------------------
+    //
+    // iterators
+    //
+    // --------------------------------------------------------
+
+    /**
+     * The best way to access PileupElements where you only care about the bases and quals in the pileup.
+     * <p/>
+     * for (PileupElement p : this) { doSomething(p); }
+     * <p/>
+     * Provides efficient iteration of the data.
+     *
+     * @return
+     */
+    @Override
+    public Iterator<PileupElement> iterator() {
+        return new Iterator<PileupElement>() {
+            private final Iterator<PileupElement> wrappedIterator = pileupElementTracker.iterator();
+
+            public boolean hasNext() {
+                return wrappedIterator.hasNext();
+            }
+
+            public PileupElement next() {
+                return wrappedIterator.next();
+            }
+
+            public void remove() {
+                throw new UnsupportedOperationException("Cannot remove from a pileup element iterator");
+            }
+        };
+    }
+
+    /**
+     * The best way to access PileupElements where you only care not only about bases and quals in the pileup
+     * but also need access to the index of the pileup element in the pile.
+     *
+     * for (ExtendedPileupElement p : this) { doSomething(p); }
+     *
+     * Provides efficient iteration of the data.
+     *
+     * @return
+     */
+
+    /**
+     * Simple useful routine to count the number of deletion bases in this pileup
+     *
+     * @return
+     */
+    @Override
+    public int getNumberOfDeletions() {
+        if ( nDeletions == UNINITIALIZED_CACHED_INT_VALUE ) {
+            nDeletions = 0;
+            for (PileupElement p : pileupElementTracker.unorderedIterable() ) {
+                if (p.isDeletion()) {
+                    nDeletions++;
+                }
+            }
+        }
+        return nDeletions;
+    }
+
+    @Override
+    public int getNumberOfMappingQualityZeroReads() {
+        if ( nMQ0Reads == UNINITIALIZED_CACHED_INT_VALUE ) {
+            nMQ0Reads = 0;
+
+            for (PileupElement p : pileupElementTracker.unorderedIterable()) {
+                if (p.getRead().getMappingQuality() == 0) {
+                    nMQ0Reads++;
+                }
+            }
+        }
+
+        return nMQ0Reads;
+    }
+
+    /**
+     * @return the number of physical elements in this pileup
+     */
+    @Override
+    public int getNumberOfElements() {
+        return pileupElementTracker.size();
+    }
+
+    /**
+     * @return the number of abstract elements in this pileup
+     */
+    @Override
+    public int depthOfCoverage() {
+        if (depthOfCoverage == UNINITIALIZED_CACHED_INT_VALUE) {
+            depthOfCoverage = pileupElementTracker.size();
+        }
+        return depthOfCoverage;
+    }
+
+    /**
+     * @return true if there are 0 elements in the pileup, false otherwise
+     */
+    @Override
+    public boolean isEmpty() {
+        return getNumberOfElements() == 0;
+    }
+
+
+    /**
+     * @return the location of this pileup
+     */
+    @Override
+    public GenomeLoc getLocation() {
+        return loc;
+    }
+
+    /**
+     * Get counts of A, C, G, T in order, which returns a int[4] vector with counts according
+     * to BaseUtils.simpleBaseToBaseIndex for each base.
+     *
+     * @return
+     */
+    @Override
+    public int[] getBaseCounts() {
+        int[] counts = new int[4];
+
+        // TODO -- can be optimized with .unorderedIterable()
+        if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
+            PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
+            for (final String sample : tracker.getSamples()) {
+                int[] countsBySample = createNewPileup(loc, tracker.getElements(sample)).getBaseCounts();
+                for (int i = 0; i < counts.length; i++)
+                    counts[i] += countsBySample[i];
+            }
+        } else {
+            for (PileupElement pile : this) {
+                // skip deletion sites
+                if (!pile.isDeletion()) {
+                    int index = BaseUtils.simpleBaseToBaseIndex((char) pile.getBase());
+                    if (index != -1)
+                        counts[index]++;
+                }
+            }
+        }
+
+        return counts;
+    }
+
+    @Override
+    public String getPileupString(Character ref) {
+        // In the pileup format, each line represents a genomic position, consisting of chromosome name,
+        // coordinate, reference base, read bases, read qualities and alignment mapping qualities.
+        return String.format("%s %s %c %s %s",
+                getLocation().getContig(), getLocation().getStart(),    // chromosome name and coordinate
+                ref,                                                     // reference base
+                new String(getBases()),
+                getQualsString());
+    }
+
+    // --------------------------------------------------------
+    //
+    // Convenience functions that may be slow
+    //
+    // --------------------------------------------------------
+
+    /**
+     * Returns a list of the reads in this pileup. Note this call costs O(n) and allocates fresh lists each time
+     *
+     * @return
+     */
+    @Override
+    public List<GATKSAMRecord> getReads() {
+        List<GATKSAMRecord> reads = new ArrayList<GATKSAMRecord>(getNumberOfElements());
+        for (PileupElement pile : this) {
+            reads.add(pile.getRead());
+        }
+        return reads;
+    }
+
+    @Override
+    public int getNumberOfDeletionsAfterThisElement() {
+        int count = 0;
+        for (PileupElement p : pileupElementTracker.unorderedIterable()) {
+            if (p.isBeforeDeletionStart())
+                count++;
+        }
+        return count;
+    }
+
+    @Override
+    public int getNumberOfInsertionsAfterThisElement() {
+        int count = 0;
+        for (PileupElement p : pileupElementTracker.unorderedIterable()) {
+            if (p.isBeforeInsertion())
+                count++;
+        }
+        return count;
+
+    }
+    /**
+     * Returns a list of the offsets in this pileup. Note this call costs O(n) and allocates fresh lists each time
+     *
+     * @return
+     */
+    @Override
+    public List<Integer> getOffsets() {
+        List<Integer> offsets = new ArrayList<Integer>(getNumberOfElements());
+        for (PileupElement pile : pileupElementTracker.unorderedIterable()) {
+            offsets.add(pile.getOffset());
+        }
+        return offsets;
+    }
+
+    /**
+     * Returns an array of the bases in this pileup. Note this call costs O(n) and allocates fresh array each time
+     *
+     * @return
+     */
+    @Override
+    public byte[] getBases() {
+        byte[] v = new byte[getNumberOfElements()];
+        int pos = 0;
+        for (PileupElement pile : pileupElementTracker) {
+            v[pos++] = pile.getBase();
+        }
+        return v;
+    }
+
+    /**
+     * Returns an array of the quals in this pileup. Note this call costs O(n) and allocates fresh array each time
+     *
+     * @return
+     */
+    @Override
+    public byte[] getQuals() {
+        byte[] v = new byte[getNumberOfElements()];
+        int pos = 0;
+        for (PileupElement pile : pileupElementTracker) {
+            v[pos++] = pile.getQual();
+        }
+        return v;
+    }
+
+    /**
+     * Get an array of the mapping qualities
+     *
+     * @return
+     */
+    @Override
+    public int[] getMappingQuals() {
+        final int[] v = new int[getNumberOfElements()];
+        int pos = 0;
+        for ( final PileupElement pile : pileupElementTracker ) {
+            v[pos++] = pile.getRead().getMappingQuality();
+        }
+        return v;
+    }
+
+    static String quals2String(byte[] quals) {
+        StringBuilder qualStr = new StringBuilder();
+        for (int qual : quals) {
+            qual = Math.min(qual, 63);              // todo: fixme, this isn't a good idea
+            char qualChar = (char) (33 + qual);     // todo: warning, this is illegal for qual > 63
+            qualStr.append(qualChar);
+        }
+
+        return qualStr.toString();
+    }
+
+    private String getQualsString() {
+        return quals2String(getQuals());
+    }
+
+    /**
+     * Returns a new ReadBackedPileup that is sorted by start coordinate of the reads.
+     *
+     * @return
+     */
+    @Override
+    public ReadBackedPileup getStartSortedPileup() {
+
+        final TreeSet<PileupElement> sortedElements = new TreeSet<PileupElement>(new Comparator<PileupElement>() {
+            @Override
+            public int compare(PileupElement element1, PileupElement element2) {
+                final int difference = element1.getRead().getAlignmentStart() - element2.getRead().getAlignmentStart();
+                return difference != 0 ? difference : element1.getRead().getReadName().compareTo(element2.getRead().getReadName());
+            }
+        });
+
+        if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
+            PerSamplePileupElementTracker<PileupElement> tracker = (PerSamplePileupElementTracker<PileupElement>) pileupElementTracker;
+
+            for (final String sample : tracker.getSamples()) {
+                PileupElementTracker<PileupElement> perSampleElements = tracker.getElements(sample);
+                for (PileupElement pile : perSampleElements)
+                    sortedElements.add(pile);
+            }
+        }
+        else {
+            UnifiedPileupElementTracker<PileupElement> tracker = (UnifiedPileupElementTracker<PileupElement>) pileupElementTracker;
+            for (PileupElement pile : tracker)
+                sortedElements.add(pile);
+        }
+
+        UnifiedPileupElementTracker<PileupElement> sortedTracker = new UnifiedPileupElementTracker<PileupElement>();
+        for (PileupElement pile : sortedElements)
+            sortedTracker.add(pile);
+
+        return createNewPileup(loc, sortedTracker);
+    }
+
+    @Override
+    public FragmentCollection<PileupElement> toFragments() {
+        return FragmentUtils.create(this);
+    }
+
+    @Override
+    public ReadBackedPileup copy() {
+        return new ReadBackedPileupImpl(loc, pileupElementTracker.copy());
+    }
+}
diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pileup2/Notes b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pileup2/Notes
similarity index 100%
rename from public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pileup2/Notes
rename to public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/pileup2/Notes
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/progressmeter/ProgressMeter.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/progressmeter/ProgressMeter.java
new file mode 100644
index 0000000..07d5075
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/progressmeter/ProgressMeter.java
@@ -0,0 +1,465 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.progressmeter;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Invariant;
+import com.google.java.contract.Requires;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.*;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.PrintStream;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * A meter measuring progress on a calculation through a set of genomic regions that can
+ * print a few key metrics to a logger and optionally to a file
+ *
+ * The key information for assessing progress is a set of genome locs describing the total
+ * set of regions we will process.  Whenever (at reasonable intervals) the processing unit
+ * can called notifyOfProgress and this logger may, depending on the metering delay, print
+ * a log message with the following metrics:
+ *
+ *      -- Number of processed X (X = processing units)
+ *      -- Runtime per.1M X
+ *      -- Percent of regions to be processed completed
+ *      -- The estimated total runtime based on previous performance
+ *      -- The estimated time remaining for the entire process
+ *
+ * The optional file log an expanded set of metrics in tabular format
+ * suitable for subsequent analysis in R.
+ *
+ * This class is -- and MUST BE -- thread-safe for use in the GATK.  Multiple independent
+ * threads executing processors will be calling notifyOfProgress() simultaneously and this
+ * class does (and MUST) properly sort out the timings of logs without interlacing outputs
+ * because of these threads.
+ *
+ * Consequently, the fundamental model for when to print the logs is time based.  We basically
+ * print a meter message every X seconds, minutes, hours, whatever is appropriate based on the
+ * estimated remaining runtime.
+ *
+ * @author depristo
+ * @since 2010 maybe, but written in 09/12 for clarity
+ */
+ at Invariant({
+        "targetSizeInBP >= 0",
+        "progressPrintFrequency > 0"
+})
+public class ProgressMeter {
+    protected static final Logger logger = Logger.getLogger(ProgressMeter.class);
+
+    // --------------------------------------------------------------------------------
+    // static constants controlling overall system behavior
+    // --------------------------------------------------------------------------------
+
+    /**
+     * Min. milliseconds after we start up the meter before we will print our first meter message
+     */
+    private final static long MIN_ELAPSED_TIME_BEFORE_FIRST_PROGRESS = 30 * 1000;
+
+    /**
+     * How often should we print performance logging information, when we are sending this
+     * information to a file?  Not dynamically updated as the logger meter is.
+     */
+    private final static long PERFORMANCE_LOG_PRINT_FREQUENCY = 10 * 1000;
+
+    private final static double TWO_HOURS_IN_SECONDS    =  2.0 * 60.0 * 60.0;
+    private final static double TWELVE_HOURS_IN_SECONDS = 12.0 * 60.0 * 60.0;
+
+    // --------------------------------------------------------------------------------
+    // Variables we updating during running
+    // --------------------------------------------------------------------------------
+
+    /**
+     * When was the last time we printed progress log?  In milleseconds
+     */
+    private long lastProgressPrintTime = -1;
+
+    /**
+     * How frequently should we be printing our meter messages?  Dynamically updated
+     * depending on how long we think the run has left.
+     */
+    private long progressPrintFrequency = 10 * 1000; // default value
+
+    /**
+     * When was the last time we printed to the performance log?  In millseconds
+     */
+    private long lastPerformanceLogPrintTime = -1;
+
+    // --------------------------------------------------------------------------------
+    // final variables fixed at object creation time
+    // --------------------------------------------------------------------------------
+
+    /**
+     * The set of genome locs describing the total region we are processing with
+     * this GATK run.  Used to determine how close we are to completing the run
+     */
+    private final GenomeLocSortedSet regionsBeingProcessed;
+
+    /**
+     * Size, in bp, of the area we are processing, derived from regionsBeingProcessed.
+     * Updated once in the system in initial for performance reasons
+     */
+    private final long targetSizeInBP;
+
+    /**
+     * A string describing the type of units being processes, so we can say things like
+     * "we are running at X processingUnitName per second"
+     */
+    private final String processingUnitName;
+
+    /**
+     * The space allocated to #processingUnitName in the output
+     */
+    private final int processingUnitWidth;
+
+    /**
+     * The format string used for progress lines
+     */
+    private final String progressFormatString;
+
+    /**
+     * A potentially null file where we print a supplementary, R readable performance log
+     * file.
+     */
+    private final PrintStream performanceLog;
+
+    /** We use the SimpleTimer to time our run */
+    private final SimpleTimer timer = new SimpleTimer();
+
+    private GenomeLoc maxGenomeLoc = null;
+    private Position position = new Position(PositionStatus.STARTING);
+    private long nTotalRecordsProcessed = 0;
+
+    /**
+     * The elapsed time in nanosecond, updated by the daemon thread, so that
+     * we don't pay any system call overhead to determine the the elapsed time.
+     */
+    private long elapsedTimeInNanosecondUpdatedByDaemon = 0;
+
+    final ProgressMeterDaemon progressMeterDaemon;
+
+    /**
+     * Create a new ProgressMeter
+     *
+     * Note that progress meter isn't started until the client calls start()
+     *
+     * @param performanceLogFile an optional performance log file where a table of performance logs will be written
+     * @param processingUnitName the name of the unit type being processed, suitable for saying X seconds per processingUnitName
+     * @param processingIntervals the intervals being processed
+     */
+    public ProgressMeter(final File performanceLogFile,
+                         final String processingUnitName,
+                         final GenomeLocSortedSet processingIntervals) {
+        this(performanceLogFile, processingUnitName, processingIntervals, ProgressMeterDaemon.DEFAULT_POLL_FREQUENCY_MILLISECONDS);
+    }
+
+    protected ProgressMeter(final File performanceLogFile,
+                            final String processingUnitName,
+                            final GenomeLocSortedSet processingIntervals,
+                            final long pollingFrequency) {
+        if ( processingUnitName == null ) throw new IllegalArgumentException("processingUnitName cannot be null");
+        if ( processingIntervals == null ) throw new IllegalArgumentException("Target intervals cannot be null");
+
+        this.processingUnitName = processingUnitName;
+        this.regionsBeingProcessed = processingIntervals;
+        this.processingUnitWidth = Math.max(processingUnitName.length(), "processed".length());
+        this.progressFormatString = String.format("%%15s   %%%1$ds   %%7s   %%%1$ds      %%5.1f%%%%   %%7s   %%9s", processingUnitWidth);
+
+        // setup the performance logger output, if requested
+        if ( performanceLogFile != null ) {
+            try {
+                this.performanceLog = new PrintStream(new FileOutputStream(performanceLogFile));
+                final List<String> pLogHeader = Arrays.asList("elapsed.time", "units.processed", "processing.speed",
+                        "bp.processed", "bp.speed", "genome.fraction.complete", "est.total.runtime", "est.time.remaining");
+                performanceLog.println(Utils.join("\t", pLogHeader));
+            } catch (FileNotFoundException e) {
+                throw new UserException.CouldNotCreateOutputFile(performanceLogFile, e);
+            }
+        } else {
+            performanceLog = null;
+        }
+
+        // cached for performance reasons
+        targetSizeInBP = processingIntervals.coveredSize();
+
+        // start up the timer
+        progressMeterDaemon = new ProgressMeterDaemon(this, pollingFrequency);
+    }
+
+    public ProgressMeterDaemon getProgressMeterDaemon() {
+        return progressMeterDaemon;
+    }
+
+    /**
+     * Start up the progress meter, printing initialization message and starting up the
+     * daemon thread for periodic printing.
+     */
+    @Requires("progressMeterDaemon != null")
+    public synchronized void start() {
+        timer.start();
+        lastProgressPrintTime = timer.currentTime();
+        final String formatString = String.format("%%15s | %%%1$ds | %%7s | %%%1$ds | %%9s | %%7s | %%9s", processingUnitWidth);
+
+        logger.info("[INITIALIZATION COMPLETE; STARTING PROCESSING]");
+        logger.info(String.format(formatString, "", "processed", "time", "per 1M", "", "total", "remaining"));
+        logger.info(String.format(formatString, "Location", processingUnitName, "elapsed", processingUnitName,
+                "completed", "runtime", "runtime"));
+
+        progressMeterDaemon.start();
+    }
+
+    /**
+     * @return the current runtime in nanoseconds
+     */
+    @Ensures("result >= 0")
+    public long getRuntimeInNanoseconds() {
+        return timer.getElapsedTimeNano();
+    }
+
+    /**
+     * This function is just like getRuntimeInNanoseconds but it doesn't actually query the
+     * system timer to determine the value, but rather uses a local variable in this meter
+     * that is updated by the daemon thread.  This means that the result is ridiculously imprecise
+     * for a nanosecond value (as it's only updated each pollingFrequency of the daemon) but
+     * it is free for clients to access, which can be critical when one wants to do tests like:
+     *
+     * for some work unit:
+     *   do unit if getRuntimeInNanosecondsUpdatedPeriodically < X
+     *
+     * and have this operation eventually timeout but don't want to pay the system call time to
+     * ensure that the loop exits as soon as the elapsed time exceeds X
+     *
+     * @return the current runtime in nanoseconds
+     */
+    @Ensures("result >= 0")
+    public long getRuntimeInNanosecondsUpdatedPeriodically() {
+        return elapsedTimeInNanosecondUpdatedByDaemon;
+    }
+
+    /**
+     * Update the period runtime variable to the current runtime in nanoseconds.  Should only
+     * be called by the daemon thread
+     */
+    protected void updateElapsedTimeInNanoseconds() {
+        elapsedTimeInNanosecondUpdatedByDaemon = getRuntimeInNanoseconds();
+    }
+
+
+
+    /**
+     * Utility routine that prints out process information (including timing) every N records or
+     * every M seconds, for N and M set in global variables.
+     *
+     * Synchronized to ensure that even with multiple threads calling notifyOfProgress we still
+     * get one clean stream of meter logs.
+     *
+     * Note this thread doesn't actually print progress, unless must print is true, but just registers
+     * the progress itself.  A separate printing daemon periodically polls the meter to print out
+     * progress
+     *
+     * @param loc Current location, can be null if you are at the end of the processing unit.  Must
+     *            have size == 1 (cannot be multiple bases in size).
+     * @param nTotalRecordsProcessed the total number of records we've processed
+     */
+    public synchronized void notifyOfProgress(final GenomeLoc loc, final long nTotalRecordsProcessed) {
+        if ( nTotalRecordsProcessed < 0 ) throw new IllegalArgumentException("nTotalRecordsProcessed must be >= 0");
+        if ( loc.size() != 1 ) throw new IllegalArgumentException("GenomeLoc must have size == 1 but got " + loc);
+
+        // weird comparison to ensure that loc == null (in unmapped reads) is keep before maxGenomeLoc == null (on startup)
+        this.maxGenomeLoc = loc == null ? loc : (maxGenomeLoc == null ? loc : loc.max(maxGenomeLoc));
+        this.nTotalRecordsProcessed = Math.max(this.nTotalRecordsProcessed, nTotalRecordsProcessed);
+
+        // a pretty name for our position
+        this.position = maxGenomeLoc == null ? new Position(PositionStatus.IN_UNMAPPED_READS) : new Position(maxGenomeLoc);
+    }
+
+    /**
+     * Describes the status of this position marker, such as starting up, done, in the unmapped reads,
+     * or somewhere on the genome
+     */
+    private enum PositionStatus {
+        STARTING("Starting"),
+        DONE("done"),
+        IN_UNMAPPED_READS("unmapped reads"),
+        ON_GENOME(null);
+
+        public final String message;
+
+        private PositionStatus(String message) {
+            this.message = message;
+        }
+    }
+
+    /**
+     * A pair of position status and the genome loc, if necessary.  Used to get a
+     * status update message as needed, without the computational cost of formatting
+     * the genome loc string every time a progress notification happens (which is almost
+     * always not printed)
+     */
+    private class Position {
+        final PositionStatus type;
+        final GenomeLoc maybeLoc;
+
+        /**
+         * Create a position object of any type != ON_GENOME
+         * @param type
+         */
+        @Requires({"type != null", "type != PositionStatus.ON_GENOME"})
+        private Position(PositionStatus type) {
+            this.type = type;
+            this.maybeLoc = null;
+        }
+
+        /**
+         * Create a position object of type ON_GENOME at genomeloc loc
+         * @param loc
+         */
+        @Requires("loc != null")
+        private Position(GenomeLoc loc) {
+            this.type = PositionStatus.ON_GENOME;
+            this.maybeLoc = loc;
+        }
+
+        /**
+         * @return a human-readable representation of this position
+         */
+        private String getMessage() {
+            if ( type == PositionStatus.ON_GENOME )
+                return maxGenomeLoc.getContig() + ":" + maxGenomeLoc.getStart();
+            else
+                return type.message;
+        }
+    }
+
+    /**
+     * Actually try to print out progress
+     *
+     * This function may print out if the progress print is due, but if not enough time has elapsed
+     * since the last print we will not print out information.
+     *
+     * @param mustPrint if true, progress will be printed regardless of the last time we printed progress
+     */
+    protected synchronized void printProgress(final boolean mustPrint) {
+        final long curTime = timer.currentTime();
+        final boolean printProgress = mustPrint || maxElapsedIntervalForPrinting(curTime, lastProgressPrintTime, progressPrintFrequency);
+        final boolean printLog = performanceLog != null && maxElapsedIntervalForPrinting(curTime, lastPerformanceLogPrintTime, PERFORMANCE_LOG_PRINT_FREQUENCY);
+
+        if ( printProgress || printLog ) {
+            final ProgressMeterData progressData = takeProgressSnapshot(maxGenomeLoc, nTotalRecordsProcessed);
+
+            final AutoFormattingTime elapsed = new AutoFormattingTime(progressData.getElapsedSeconds(), 5, 1);
+            final AutoFormattingTime bpRate = new AutoFormattingTime(progressData.secondsPerMillionBP());
+            final AutoFormattingTime unitRate = new AutoFormattingTime(progressData.secondsPerMillionElements());
+            final double fractionGenomeTargetCompleted = progressData.calculateFractionGenomeTargetCompleted(targetSizeInBP);
+            final AutoFormattingTime estTotalRuntime = new AutoFormattingTime(elapsed.getTimeInSeconds() / fractionGenomeTargetCompleted, 5, 1);
+            final AutoFormattingTime timeToCompletion = new AutoFormattingTime(estTotalRuntime.getTimeInSeconds() - elapsed.getTimeInSeconds());
+
+            if ( printProgress ) {
+                lastProgressPrintTime = curTime;
+                updateLoggerPrintFrequency(estTotalRuntime.getTimeInSeconds());
+
+                logger.info(String.format(progressFormatString,
+                        position.getMessage(), progressData.getUnitsProcessed()*1.0, elapsed, unitRate,
+                        100*fractionGenomeTargetCompleted, estTotalRuntime, timeToCompletion));
+
+            }
+
+            if ( printLog ) {
+                lastPerformanceLogPrintTime = curTime;
+                performanceLog.printf("%.2f\t%d\t%.2e\t%d\t%.2e\t%.2e\t%.2f\t%.2f%n",
+                        elapsed.getTimeInSeconds(), progressData.getUnitsProcessed(), unitRate.getTimeInSeconds(),
+                        progressData.getBpProcessed(), bpRate.getTimeInSeconds(),
+                        fractionGenomeTargetCompleted, estTotalRuntime.getTimeInSeconds(),
+                        timeToCompletion.getTimeInSeconds());
+            }
+        }
+    }
+
+    /**
+     * Determine, based on remaining runtime, how often to print the meter
+     *
+     * @param totalRuntimeSeconds kinda obvious, no?
+     */
+    private void updateLoggerPrintFrequency(final double totalRuntimeSeconds) {
+        // dynamically change the update rate so that short running jobs receive frequent updates while longer jobs receive fewer updates
+        if ( totalRuntimeSeconds > TWELVE_HOURS_IN_SECONDS )
+            progressPrintFrequency = 60 * 1000; // in milliseconds
+        else if ( totalRuntimeSeconds > TWO_HOURS_IN_SECONDS )
+            progressPrintFrequency = 30 * 1000; // in milliseconds
+        else
+            progressPrintFrequency = 10 * 1000; // in milliseconds
+    }
+
+    /**
+     * Creates a new ProgressData object recording a snapshot of our progress at this instant
+     *
+     * @param loc our current position.  If null, assumes we are done traversing
+     * @param nTotalRecordsProcessed the total number of records we've processed
+     * @return
+     */
+    private ProgressMeterData takeProgressSnapshot(final GenomeLoc loc, final long nTotalRecordsProcessed) {
+        // null -> end of processing
+        final long bpProcessed = loc == null ? targetSizeInBP : regionsBeingProcessed.sizeBeforeLoc(loc);
+        return new ProgressMeterData(timer.getElapsedTime(), nTotalRecordsProcessed, bpProcessed);
+    }
+
+    /**
+     * Should be called when processing is done
+     */
+    public void notifyDone(final long nTotalRecordsProcessed) {
+        // print out the progress meter
+        this.nTotalRecordsProcessed = nTotalRecordsProcessed;
+        this.position = new Position(PositionStatus.DONE);
+        printProgress(true);
+
+        logger.info(String.format("Total runtime %.2f secs, %.2f min, %.2f hours",
+                timer.getElapsedTime(), timer.getElapsedTime() / 60, timer.getElapsedTime() / 3600));
+
+        if ( performanceLog != null )
+            performanceLog.close();
+
+        // shutdown our daemon thread
+        progressMeterDaemon.done();
+    }
+
+    /**
+     * @param curTime (current runtime, in millisecs)
+     * @param lastPrintTime the last time we printed, in machine milliseconds
+     * @param printFreq maximum permitted difference between last print and current times
+     *
+     * @return true if the maximum interval (in millisecs) has passed since the last printing
+     */
+    private boolean maxElapsedIntervalForPrinting(final long curTime, long lastPrintTime, long printFreq) {
+        final long elapsed = curTime - lastPrintTime;
+        return elapsed > printFreq && elapsed > MIN_ELAPSED_TIME_BEFORE_FIRST_PROGRESS;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/progressmeter/ProgressMeterDaemon.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/progressmeter/ProgressMeterDaemon.java
new file mode 100644
index 0000000..850b9f1
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/progressmeter/ProgressMeterDaemon.java
@@ -0,0 +1,111 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.progressmeter;
+
+/**
+ * Daemon thread that periodically prints the progress of the progress meter
+ *
+ * User: depristo
+ * Date: 12/4/12
+ * Time: 9:16 PM
+ */
+public final class ProgressMeterDaemon extends Thread {
+    public final static long DEFAULT_POLL_FREQUENCY_MILLISECONDS = 10 * 1000;
+
+    /**
+     * How frequently should we poll and print progress?
+     */
+    private final long pollFrequencyMilliseconds;
+
+    /**
+     * How long are we waiting between print progress calls are issued?
+     * @return the time in milliseconds between progress meter calls
+     */
+    private long getPollFrequencyMilliseconds() {
+        return pollFrequencyMilliseconds;
+    }
+
+    /**
+     * Are we to continue periodically printing status, or should we shut down?
+     */
+    boolean done = false;
+
+    /**
+     * The meter we will call print on
+     */
+    final ProgressMeter meter;
+
+    /**
+     * Create a new ProgressMeterDaemon printing progress for meter
+     * @param meter the progress meter to print progress of
+     */
+    public ProgressMeterDaemon(final ProgressMeter meter, final long pollFrequencyMilliseconds) {
+        if ( meter == null ) throw new IllegalArgumentException("meter cannot be null");
+        if ( pollFrequencyMilliseconds <= 0 ) throw new IllegalArgumentException("pollFrequencyMilliseconds must be greater than 0 but got " + pollFrequencyMilliseconds);
+
+        this.meter = meter;
+        this.pollFrequencyMilliseconds = pollFrequencyMilliseconds;
+        setDaemon(true);
+        setName("ProgressMeterDaemon");
+    }
+
+    public ProgressMeterDaemon(final ProgressMeter meter) {
+        this(meter, DEFAULT_POLL_FREQUENCY_MILLISECONDS);
+    }
+
+    /**
+     * Tells this daemon thread to shutdown at the next opportunity, as the progress
+     * metering is complete.
+     */
+    public final void done() {
+        this.done = true;
+    }
+
+    /**
+     * Is this daemon thread done?
+     * @return true if done, false otherwise
+     */
+    public boolean isDone() {
+        return done;
+    }
+
+    /**
+     * Start up the ProgressMeterDaemon, polling every tens of seconds to print, if
+     * necessary, the provided progress meter.  Never exits until the JVM is complete,
+     * or done() is called, as the thread is a daemon thread
+     */
+    public void run() {
+        while (! done) {
+            meter.printProgress(false);
+            meter.updateElapsedTimeInNanoseconds();
+            try {
+                Thread.sleep(getPollFrequencyMilliseconds());
+            } catch (InterruptedException e) {
+                throw new RuntimeException(e);
+            }
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/progressmeter/ProgressMeterData.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/progressmeter/ProgressMeterData.java
new file mode 100644
index 0000000..0b8e984
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/progressmeter/ProgressMeterData.java
@@ -0,0 +1,79 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.progressmeter;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+
+/**
+ * a snapshot of our performance, suitable for storage and later analysis
+ */
+class ProgressMeterData {
+    private final double elapsedSeconds;
+    private final long unitsProcessed;
+    private final long bpProcessed;
+
+    @Requires({"unitsProcessed >= 0", "bpProcessed >= 0", "elapsedSeconds >= 0"})
+    public ProgressMeterData(double elapsedSeconds, long unitsProcessed, long bpProcessed) {
+        this.elapsedSeconds = elapsedSeconds;
+        this.unitsProcessed = unitsProcessed;
+        this.bpProcessed = bpProcessed;
+    }
+
+    @Ensures("result >= 0.0")
+    public double getElapsedSeconds() {
+        return elapsedSeconds;
+    }
+
+    @Ensures("result >= 0")
+    public long getUnitsProcessed() {
+        return unitsProcessed;
+    }
+
+    @Ensures("result >= 0")
+    public long getBpProcessed() {
+        return bpProcessed;
+    }
+
+    /** How long in seconds to process 1M traversal units? */
+    @Ensures("result >= 0.0")
+    public double secondsPerMillionElements() {
+        return (elapsedSeconds * 1000000.0) / Math.max(unitsProcessed, 1);
+    }
+
+    /** How long in seconds to process 1M bp on the genome? */
+    @Ensures("result >= 0.0")
+    public double secondsPerMillionBP() {
+        return (elapsedSeconds * 1000000.0) / Math.max(bpProcessed, 1);
+    }
+
+    /** What fraction of the target intervals have we covered? */
+    @Requires("targetSize >= 0")
+    @Ensures({"result >= 0.0", "result <= 1.0"})
+    public double calculateFractionGenomeTargetCompleted(final long targetSize) {
+        return (1.0*bpProcessed) / Math.max(targetSize, 1);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/recalibration/EventType.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/recalibration/EventType.java
new file mode 100644
index 0000000..5a74f72
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/recalibration/EventType.java
@@ -0,0 +1,72 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.recalibration;
+
+public enum EventType {
+    BASE_SUBSTITUTION("M", "Base Substitution"),
+    BASE_INSERTION("I", "Base Insertion"),
+    BASE_DELETION("D", "Base Deletion");
+
+    private final String representation;
+    private final String longRepresentation;
+
+    private EventType(String representation, String longRepresentation) {
+        this.representation = representation;
+        this.longRepresentation = longRepresentation;
+    }
+
+    /**
+     * Get the EventType corresponding to its ordinal index
+     * @param index an ordinal index
+     * @return the event type corresponding to ordinal index
+     */
+    public static EventType eventFrom(int index) {
+        return EventType.values()[index];
+    }
+
+    /**
+     * Get the EventType with short string representation
+     * @throws IllegalArgumentException if representation doesn't correspond to one of EventType
+     * @param representation short string representation of the event
+     * @return an EventType
+     */
+    public static EventType eventFrom(String representation) {
+        for (EventType eventType : EventType.values())
+            if (eventType.representation.equals(representation))
+                return eventType;
+
+        throw new IllegalArgumentException(String.format("Event %s does not exist.", representation));
+    }
+
+    @Override
+    public String toString() {
+        return representation;
+    }
+
+    public String prettyPrint() {
+        return longRepresentation;
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/RODRecordListImpl.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/RODRecordListImpl.java
new file mode 100644
index 0000000..7b82ae2
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/RODRecordListImpl.java
@@ -0,0 +1,129 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.refdata;
+
+import org.broadinstitute.gatk.utils.refdata.utils.GATKFeature;
+import org.broadinstitute.gatk.utils.refdata.utils.RODRecordList;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.HasGenomeLocation;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.util.*;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: asivache
+ * Date: Sep 10, 2009
+ * Time: 6:10:48 PM
+ * To change this template use File | Settings | File Templates.
+ */
+public class RODRecordListImpl extends AbstractList<GATKFeature> implements Comparable<RODRecordList>, Cloneable, RODRecordList, HasGenomeLocation {
+    private List<GATKFeature> records;
+    private GenomeLoc location = null;
+    private String name = null;
+
+    public RODRecordListImpl(String name) {
+        records = new ArrayList<GATKFeature>();
+        this.name = name;
+    }
+
+    /**
+     * Fully qualified constructor: instantiates a new GATKFeatureRecordList object with specified GATKFeature track name, location on the
+     * reference, and list of associated GATKFeatures. This is a knee-deep COPY constructor: passed name, loc, and data element
+     * objects will be referenced from the created GATKFeatureRecordList (so that changing them from outside will affect data
+     * in this object), however, the data elements will be copied into a newly
+     * allocated list, so that the 'data' collection argument can be modified afterwards without affecting the state
+     * of this record list. WARNING: this constructor is (semi-)validating: passed name and location
+     * are allowed to be nulls (although it maybe unsafe, use caution), but if they are not nulls, then passed non-null GATKFeature data
+     * elements must have same track name, and their locations must overlap with the passed 'location' argument. Null
+     * data elements or null 'data' collection argument are allowed as well.
+     * @param name the name of the track
+     * @param data the collection of features at this location
+     * @param loc the location
+     */
+    public RODRecordListImpl(String name, Collection<GATKFeature> data, GenomeLoc loc) {
+        this.records = new ArrayList<GATKFeature>(data==null?0:data.size());
+        this.name = name;
+        this.location = loc;
+        if ( data == null || data.size() == 0 ) return; // empty dataset, nothing to do
+        for ( GATKFeature r : data ) {
+            records.add(r);
+            if ( r == null ) continue;
+            if ( ! this.name.equals(r.getName() ) ) {
+                throw new ReviewedGATKException("Attempt to add GATKFeature with non-matching name "+r.getName()+" to the track "+name);
+            }
+            if ( location != null && ! location.overlapsP(r.getLocation()) ) {
+                    throw new ReviewedGATKException("Attempt to add GATKFeature that lies outside of specified interval "+location+"; offending GATKFeature:\n"+r.toString());
+            }
+        }
+    }
+
+
+    public GenomeLoc getLocation() { return location; }
+    public String getName() { return name; }
+    public Iterator<GATKFeature> iterator() { return records.iterator() ; }
+    public void clear() { records.clear(); }
+    public boolean isEmpty() { return records.isEmpty(); }
+
+    public boolean add(GATKFeature record) { add(record, false); return true;}
+
+    @Override
+    public GATKFeature get(int i) {
+        return records.get(i);
+    }
+
+    public void add(GATKFeature record, boolean allowNameMismatch) {
+        if ( record != null ) {
+            if ( ! allowNameMismatch && ! name.equals(record.getName() ) )
+                throw new ReviewedGATKException("Attempt to add GATKFeature with non-matching name "+record.getName()+" to the track "+name);
+        }
+        records.add(record);
+    }
+
+    public void add(RODRecordList records ) { add( records, false ); }
+
+    public void add(RODRecordList records, boolean allowNameMismatch) {
+        for ( GATKFeature record : records )
+            add(record, allowNameMismatch);
+    }    
+
+    public int size() { return records.size() ; }
+
+    /**
+     * Compares this object with the specified object for order.  Returns a
+     * negative integer, zero, or a positive integer as this object is less
+     * than, equal to, or greater than the specified object.
+     *
+     * @param that the object to be compared.
+     * @return a negative integer, zero, or a positive integer as this object
+     *         is less than, equal to, or greater than the specified object.
+     * @throws ClassCastException if the specified object's type prevents it
+     *                            from being compared to this object.
+     */
+    public int compareTo(RODRecordList that) {
+        return getLocation().compareTo(that.getLocation());  //To change body of implemented methods use File | Settings | File Templates.
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/RefMetaDataTracker.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/RefMetaDataTracker.java
new file mode 100644
index 0000000..6800916
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/RefMetaDataTracker.java
@@ -0,0 +1,497 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.refdata;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import org.apache.log4j.Logger;
+import htsjdk.tribble.Feature;
+import org.broadinstitute.gatk.utils.commandline.RodBinding;
+import org.broadinstitute.gatk.utils.refdata.utils.GATKFeature;
+import org.broadinstitute.gatk.utils.refdata.utils.RODRecordList;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+import java.util.*;
+
+/**
+ * This class represents the Reference Metadata available at a particular site in the genome.  It can be
+ * used to conveniently lookup the RMDs at this site, as well just getting a list of all of the RMDs
+ *
+ * The standard interaction model is:
+ *
+ * Traversal system arrives at a site, which has a bunch of RMDs covering it
+ * Traversal passes creates a tracker and passes it to the walker
+ * walker calls get(rodBinding) to obtain the RMDs values at this site for the track
+ * associated with rodBinding.
+ *
+ * Note that this is an immutable class.  Once created the underlying data structures
+ * cannot be modified
+ *
+ * User: mdepristo
+ * Date: Apr 3, 2009
+ * Time: 3:05:23 PM
+ */
+public class RefMetaDataTracker {
+    // TODO: this should be a list, not a bindings, actually
+    private final static RODRecordList EMPTY_ROD_RECORD_LIST = new RODRecordListImpl("EMPTY");
+
+    final Map<String, RODRecordList> bindings;
+    final protected static Logger logger = Logger.getLogger(RefMetaDataTracker.class);
+    public final static RefMetaDataTracker EMPTY_TRACKER = new RefMetaDataTracker();
+
+    // ------------------------------------------------------------------------------------------
+    //
+    //
+    // Special ENGINE interaction functions
+    //
+    //
+    // ------------------------------------------------------------------------------------------
+
+    /**
+     * Create an tracker with no bindings
+     */
+    public RefMetaDataTracker() {
+        bindings = Collections.emptyMap();
+    }
+
+    public RefMetaDataTracker(final Collection<RODRecordList> allBindings) {
+        // set up the bindings
+        if ( allBindings.isEmpty() )
+            bindings = Collections.emptyMap();
+        else {
+            final Map<String, RODRecordList> tmap = new HashMap<String, RODRecordList>(allBindings.size());
+            for ( RODRecordList rod : allBindings ) {
+                if ( rod != null && ! rod.isEmpty() )
+                    tmap.put(canonicalName(rod.getName()), rod);
+            }
+
+            // ensure that no one modifies the bindings itself
+            bindings = Collections.unmodifiableMap(tmap);
+        }
+    }
+
+    // ------------------------------------------------------------------------------------------
+    //
+    //
+    // Generic accessors
+    //
+    //
+    // ------------------------------------------------------------------------------------------
+
+    /**
+     * Gets all of the Tribble features spanning this locus, returning them as a list of specific
+     * type T extending Feature.  This function looks across all tracks to find the Features, so
+     * if you have two tracks A and B each containing 1 Feature, then getValues will return
+     * a list containing both features.
+     *
+     * Note that this function assumes that all of the bound features are instances of or
+     * subclasses of T.  A ClassCastException will occur if this isn't the case.  If you want
+     * to get all Features without any danger of such an exception use the root Tribble
+     * interface Feature.
+     *
+     * @param type The type of the underlying objects bound here
+     * @param <T> as above
+     * @return A freshly allocated list of all of the bindings, or an empty list if none are bound.
+     */
+    @Requires({"type != null"})
+    @Ensures("result != null")
+    public <T extends Feature> List<T> getValues(final Class<T> type) {
+        return addValues(bindings.keySet(), type, new ArrayList<T>(), null, false, false);
+    }
+
+    /**
+     * Provides the same functionality as @link #getValues(Class<T>) but will only include
+     * Features that start as the GenomeLoc provide onlyAtThisLoc.
+     *
+     * @param type The type of the underlying objects bound here
+     * @param onlyAtThisLoc
+     * @param <T> as above
+     * @return A freshly allocated list of all of the bindings, or an empty list if none are bound.
+     */
+    @Requires({"type != null", "onlyAtThisLoc != null"})
+    @Ensures("result != null")
+    public <T extends Feature> List<T> getValues(final Class<T> type, final GenomeLoc onlyAtThisLoc) {
+        return addValues(bindings.keySet(), type, new ArrayList<T>(), onlyAtThisLoc, true, false);
+    }
+
+    /**
+     * Uses the same logic as @link #getValues(Class) but arbitrary select one of the resulting
+     * elements of the list to return.  That is, if there would be two elements in the result of
+     * @link #getValues(Class), one of these two is selected, and which one it will be isn't
+     * specified.  Consequently, this method is only really safe if (1) you absolutely know
+     * that only one binding will meet the constraints of @link #getValues(Class) or (2)
+     * you truly don't care which of the multiple bindings available you are going to examine.
+     *
+     * If there are no bindings here, getFirstValue() return null
+     *
+     * @param type The type of the underlying objects bound here
+     * @param <T> as above
+     * @return A random single element the RODs bound here, or null if none are bound.
+     */
+    @Requires({"type != null"})
+    public <T extends Feature> T getFirstValue(final Class<T> type) {
+        return safeGetFirst(getValues(type));
+    }
+
+    /**
+     * Uses the same logic as @link #getValue(Class,GenomeLoc) to determine the list
+     * of eligible Features and @link #getFirstValue(Class) to select a single
+     * element from the interval list.
+     *
+     * @param type The type of the underlying objects bound here
+     * @param <T> as above
+     * @param onlyAtThisLoc only Features starting at this site are considered
+     * @return A random single element the RODs bound here starting at onlyAtThisLoc, or null if none are bound.
+     */
+    @Requires({"type != null", "onlyAtThisLoc != null"})
+    public <T extends Feature> T getFirstValue(final Class<T> type, final GenomeLoc onlyAtThisLoc) {
+        return safeGetFirst(getValues(type, onlyAtThisLoc));
+    }
+
+    /**
+     * Same logic as @link #getFirstValue(RodBinding, boolean) but prioritizes records from prioritizeThisLoc if available
+     *
+     * @param rodBindings Only Features coming from the tracks associated with one of rodBindings are fetched
+     * @param <T> The Tribble Feature type of the rodBinding, and consequently the type of the resulting list of Features
+     * @param prioritizeThisLoc only Features starting at this site are considered
+     * @return A freshly allocated list of all of the bindings, or an empty list if none are bound.
+     */
+    @Requires({"rodBindings != null", "prioritizeThisLoc != null"})
+    @Ensures("result != null")
+    public <T extends Feature> List<T> getPrioritizedValue(final Collection<RodBinding<T>> rodBindings, final GenomeLoc prioritizeThisLoc) {
+        final List<T> results = new ArrayList<>();
+
+        for ( final RodBinding<T> rodBinding : rodBindings ) {
+
+            // if there's a value at the prioritized location, take it
+            T value = getFirstValue(rodBinding, prioritizeThisLoc);
+
+            // otherwise, grab any one
+            if ( value == null )
+                value = getFirstValue(rodBinding);
+
+            // add if not null
+            if ( value != null )
+                results.add(value);
+        }
+
+        return results;
+    }
+
+    /**
+     * Gets all of the Tribble features bound to RodBinding spanning this locus, returning them as
+     * a list of specific type T extending Feature.
+     *
+     * Note that this function assumes that all of the bound features are instances of or
+     * subclasses of T.  A ClassCastException will occur if this isn't the case.
+     *
+     * @param rodBinding Only Features coming from the track associated with this rodBinding are fetched
+     * @param <T> The Tribble Feature type of the rodBinding, and consequently the type of the resulting list of Features
+     * @return A freshly allocated list of all of the bindings, or an empty list if none are bound.
+     */
+    @Requires({"rodBinding != null"})
+    @Ensures("result != null")
+    public <T extends Feature> List<T> getValues(final RodBinding<T> rodBinding) {
+        return addValues(rodBinding.getName(), rodBinding.getType(), new ArrayList<T>(1), getTrackDataByName(rodBinding), null, false, false);
+    }
+
+    /**
+     * Gets all of the Tribble features bound to any RodBinding in rodBindings,
+     * spanning this locus, returning them as a list of specific type T extending Feature.
+     *
+     * Note that this function assumes that all of the bound features are instances of or
+     * subclasses of T.  A ClassCastException will occur if this isn't the case.
+     *
+     * @param rodBindings Only Features coming from the tracks associated with one of rodBindings are fetched
+     * @param <T> The Tribble Feature type of the rodBinding, and consequently the type of the resulting list of Features
+     * @return A freshly allocated list of all of the bindings, or an empty list if none are bound.
+     */
+    @Requires({"rodBindings != null"})
+    @Ensures("result != null")
+    public <T extends Feature> List<T> getValues(final Collection<RodBinding<T>> rodBindings) {
+        List<T> results = new ArrayList<T>(1);
+        for ( RodBinding<T> rodBinding : rodBindings )
+            results.addAll(getValues(rodBinding));
+        return results;
+    }
+
+    /**
+     * The same logic as @link #getValues(RodBinding) but enforces that each Feature start at onlyAtThisLoc
+     *
+     * @param rodBinding Only Features coming from the track associated with this rodBinding are fetched
+     * @param <T> The Tribble Feature type of the rodBinding, and consequently the type of the resulting list of Features
+     * @param onlyAtThisLoc only Features starting at this site are considered
+     * @return A freshly allocated list of all of the bindings, or an empty list if none are bound.
+     */
+    @Requires({"rodBinding != null", "onlyAtThisLoc != null"})
+    @Ensures("result != null")
+    public <T extends Feature> List<T> getValues(final RodBinding<T> rodBinding, final GenomeLoc onlyAtThisLoc) {
+        return addValues(rodBinding.getName(), rodBinding.getType(), new ArrayList<T>(1), getTrackDataByName(rodBinding), onlyAtThisLoc, true, false);
+    }
+
+    /**
+     * The same logic as @link #getValues(List) but enforces that each Feature start at onlyAtThisLoc
+     *
+     * @param rodBindings Only Features coming from the tracks associated with one of rodBindings are fetched
+     * @param <T> The Tribble Feature type of the rodBinding, and consequently the type of the resulting list of Features
+     * @param onlyAtThisLoc only Features starting at this site are considered
+     * @return A freshly allocated list of all of the bindings, or an empty list if none are bound.
+     */
+    @Requires({"rodBindings != null", "onlyAtThisLoc != null"})
+    @Ensures("result != null")
+    public <T extends Feature> List<T> getValues(final Collection<RodBinding<T>> rodBindings, final GenomeLoc onlyAtThisLoc) {
+        List<T> results = new ArrayList<T>(1);
+        for ( RodBinding<T> rodBinding : rodBindings )
+            results.addAll(getValues(rodBinding, onlyAtThisLoc));
+        return results;
+    }
+
+    /**
+     * Uses the same logic as @getValues(RodBinding) to determine the list
+     * of eligible Features and select a single element from the resulting set
+     * of eligible features.
+     *
+     * @param rodBinding Only Features coming from the track associated with this rodBinding are fetched
+     * @param <T> as above
+     * @return A random single element the eligible Features found, or null if none are bound.
+     */
+    @Requires({"rodBinding != null"})
+    public <T extends Feature> T getFirstValue(final RodBinding<T> rodBinding) {
+        return safeGetFirst(addValues(rodBinding.getName(), rodBinding.getType(), null, getTrackDataByName(rodBinding), null, false, true));
+    }
+
+    /**
+     * Uses the same logic as @getValues(RodBinding, GenomeLoc) to determine the list
+     * of eligible Features and select a single element from the resulting set
+     * of eligible features.
+     *
+     * @param rodBinding Only Features coming from the track associated with this rodBinding are fetched
+     * @param <T> as above
+     * @param onlyAtThisLoc only Features starting at this site are considered
+     * @return A random single element the eligible Features found, or null if none are bound.
+     */
+    @Requires({"rodBinding != null", "onlyAtThisLoc != null"})
+    public <T extends Feature> T getFirstValue(final RodBinding<T> rodBinding, final GenomeLoc onlyAtThisLoc) {
+        return safeGetFirst(addValues(rodBinding.getName(), rodBinding.getType(), null, getTrackDataByName(rodBinding), onlyAtThisLoc, true, true));
+    }
+
+    /**
+     * Uses the same logic as @getValues(List) to determine the list
+     * of eligible Features and select a single element from the resulting set
+     * of eligible features.
+     *
+     * @param rodBindings Only Features coming from the tracks associated with these rodBindings are fetched
+     * @param <T> as above
+     * @return A random single element the eligible Features found, or null if none are bound.
+     */
+    @Requires({"rodBindings != null"})
+    public <T extends Feature> T getFirstValue(final Collection<RodBinding<T>> rodBindings) {
+        for ( RodBinding<T> rodBinding : rodBindings ) {
+            T val = getFirstValue(rodBinding);
+            if ( val != null )
+                return val;
+        }
+        return null;
+    }
+
+    /**
+     * Uses the same logic as @getValues(RodBinding,GenomeLoc) to determine the list
+     * of eligible Features and select a single element from the resulting set
+     * of eligible features.
+     *
+     * @param rodBindings Only Features coming from the tracks associated with these rodBindings are fetched
+     * @param <T> as above
+     * @param onlyAtThisLoc only Features starting at this site are considered
+     * @return A random single element the eligible Features found, or null if none are bound.
+     */
+    @Requires({"rodBindings != null", "onlyAtThisLoc != null"})
+    public <T extends Feature> T getFirstValue(final Collection<RodBinding<T>> rodBindings, final GenomeLoc onlyAtThisLoc) {
+        for ( RodBinding<T> rodBinding : rodBindings ) {
+            T val = getFirstValue(rodBinding, onlyAtThisLoc);
+            if ( val != null )
+                return val;
+        }
+        return null;
+    }
+
+    /**
+     * Is there a binding at this site to a ROD/track with the specified name?
+     *
+     * @param rodBinding the rod binding we want to know about
+     * @return true if any Features are bound in this tracker to rodBinding
+     */
+    @Requires({"rodBinding != null"})
+    public boolean hasValues(final RodBinding rodBinding) {
+        return bindings.containsKey(canonicalName(rodBinding.getName()));
+    }
+
+    /**
+     * Get all of the RMD tracks at the current site. Each track is returned as a single compound
+     * object (RODRecordList) that may contain multiple RMD records associated with the current site.
+     *
+     * @return List of all tracks
+     */
+    public List<RODRecordList> getBoundRodTracks() {
+        return new ArrayList<RODRecordList>(bindings.values());
+    }
+
+    /**
+     * The number of tracks with at least one value bound here
+     * @return the number of tracks with at least one bound Feature
+     */
+    public int getNTracksWithBoundFeatures() {
+        return bindings.size();
+    }
+
+    // ------------------------------------------------------------------------------------------
+    // Protected accessors using strings for unit testing
+    // ------------------------------------------------------------------------------------------
+
+    protected boolean hasValues(final String name) {
+        return bindings.containsKey(canonicalName(name));
+    }
+
+    protected <T extends Feature> List<T> getValues(final Class<T> type, final String name) {
+        return addValues(name, type, new ArrayList<T>(), getTrackDataByName(name), null, false, false);
+    }
+
+    protected <T extends Feature> List<T> getValues(final Class<T> type, final String name, final GenomeLoc onlyAtThisLoc) {
+        return addValues(name, type, new ArrayList<T>(), getTrackDataByName(name), onlyAtThisLoc, true, false);
+    }
+
+    protected <T extends Feature> T getFirstValue(final Class<T> type, final String name) {
+        return safeGetFirst(getValues(type, name));
+    }
+
+    protected <T extends Feature> T getFirstValue(final Class<T> type, final String name, final GenomeLoc onlyAtThisLoc) {
+        return safeGetFirst(getValues(type, name, onlyAtThisLoc));
+    }
+
+    // ------------------------------------------------------------------------------------------
+    //
+    //
+    // Private utility functions
+    //
+    //
+    // ------------------------------------------------------------------------------------------
+
+    /**
+     * Helper function for getFirst() operations that takes a list of <T> and
+     * returns the first element, or null if no such element exists.
+     *
+     * @param l
+     * @param <T>
+     * @return
+     */
+    @Requires({"l != null"})
+    private <T extends Feature> T safeGetFirst(final List<T> l) {
+        return l.isEmpty() ? null : l.get(0);
+    }
+
+    private <T extends Feature> List<T> addValues(final Collection<String> names,
+                                                  final Class<T> type,
+                                                  List<T> values,
+                                                  final GenomeLoc curLocation,
+                                                  final boolean requireStartHere,
+                                                  final boolean takeFirstOnly ) {
+        for ( String name : names ) {
+            RODRecordList rodList = getTrackDataByName(name); // require that the name is an exact match
+            values = addValues(name, type, values, rodList, curLocation, requireStartHere, takeFirstOnly );
+            if ( takeFirstOnly && ! values.isEmpty() )
+                break;
+        }
+
+        return values;
+    }
+
+
+
+    private <T extends Feature> List<T> addValues(final String name,
+                                                  final Class<T> type,
+                                                  List<T> values,
+                                                  final RODRecordList rodList,
+                                                  final GenomeLoc curLocation,
+                                                  final boolean requireStartHere,
+                                                  final boolean takeFirstOnly ) {
+        for ( GATKFeature rec : rodList ) {
+            if ( ! requireStartHere || rec.getLocation().getStart() == curLocation.getStart() ) {  // ok, we are going to keep this thing
+                Object obj = rec.getUnderlyingObject();
+                if (!(type.isAssignableFrom(obj.getClass())))
+                    throw new UserException.CommandLineException("Unable to cast track named " + name + " to type of " + type.toString()
+                            + " it's of type " + obj.getClass());
+
+                T objT = (T)obj;
+                if ( takeFirstOnly ) {
+                    if ( values == null )
+                        values = Arrays.asList(objT);
+                    else
+                        values.add(objT);
+
+                    break;
+                } else {
+                    if ( values == null )
+                        values = new ArrayList<T>();
+                    values.add(objT);
+                }
+            }
+        }
+
+        return values == null ? Collections.<T>emptyList() : values;
+    }
+
+    /**
+     * Finds the reference metadata track named 'name' and returns all ROD records from that track associated
+     * with the current site as a RODRecordList List object. If no data track with specified name is available,
+     * returns defaultValue wrapped as RODRecordList object. NOTE: if defaultValue is null, it will be wrapped up
+     * with track name set to 'name' and location set to null; otherwise the wrapper object will have name and
+     * location set to defaultValue.getID() and defaultValue.getLocation(), respectively (use caution,
+     * defaultValue.getLocation() may be not equal to what RODRecordList's location would be expected to be otherwise:
+     * for instance, on locus traversal, location is usually expected to be a single base we are currently looking at,
+     * regardless of the presence of "extended" RODs overlapping with that location).
+     * @param name                track name
+     * @return track data for the given rod
+     */
+    private RODRecordList getTrackDataByName(final String name) {
+        final String luName = canonicalName(name);
+        RODRecordList l = bindings.get(luName);
+        return l == null ? EMPTY_ROD_RECORD_LIST : l;
+    }
+
+    private RODRecordList getTrackDataByName(final RodBinding binding) {
+        return getTrackDataByName(binding.getName());
+    }
+
+    /**
+     * Returns the canonical name of the rod name (lowercases it)
+     * @param name the name of the rod
+     * @return canonical name of the rod
+     */
+    private String canonicalName(final String name) {
+        // todo -- remove me after switch to RodBinding syntax
+        return name.toLowerCase();
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/ReferenceDependentFeatureCodec.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/ReferenceDependentFeatureCodec.java
new file mode 100644
index 0000000..72f61b2
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/ReferenceDependentFeatureCodec.java
@@ -0,0 +1,42 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.refdata;
+
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+
+/**
+ * An interface marking that a given Tribble feature/codec is actually dependent on context within the
+ * reference, rather than having a dependency only on the contig, start, and stop of the given feature.
+ * A HACK.  Tribble should contain all the information in needs to decode the unqualified position of
+ * a feature.
+ */
+public interface ReferenceDependentFeatureCodec {
+    /**
+     * Sets the appropriate GenomeLocParser, providing additional context when decoding larger and more variable features.
+     * @param genomeLocParser The parser to supply. 
+     */
+    public void setGenomeLocParser(GenomeLocParser genomeLocParser);
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/ReferenceOrderedDatum.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/ReferenceOrderedDatum.java
new file mode 100644
index 0000000..c54f9c5
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/ReferenceOrderedDatum.java
@@ -0,0 +1,66 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.refdata;
+
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.HasGenomeLocation;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: mdepristo
+ * Date: Feb 27, 2009
+ * Time: 10:49:47 AM
+ * To change this template use File | Settings | File Templates.
+ */
+public interface ReferenceOrderedDatum extends Comparable<ReferenceOrderedDatum>, HasGenomeLocation {
+    public String getName();
+    public boolean parseLine(final Object header, final String[] parts) throws IOException;
+    public String toString();
+    public String toSimpleString();
+    public String repl();
+
+    /**
+     * Used by the ROD system to determine how to split input lines
+     * @return Regex string delimiter separating fields
+     */
+    public String delimiterRegex();
+
+    public GenomeLoc getLocation();
+    public int compareTo( ReferenceOrderedDatum that );
+
+    /**
+     * Backdoor hook to read header, meta-data, etc. associated with the file.  Will be
+     * called by the ROD system before streaming starts
+     *
+     * @param source source data file on disk from which this rod stream will be pulled
+     * @return a header object that will be passed to parseLine command
+     */
+    public Object initialize(final File source) throws FileNotFoundException;
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/SeekableRODIterator.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/SeekableRODIterator.java
new file mode 100644
index 0000000..0e0bfda
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/SeekableRODIterator.java
@@ -0,0 +1,412 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.refdata;
+
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.util.CloseableIterator;
+import org.broadinstitute.gatk.utils.iterators.PushbackIterator;
+import org.broadinstitute.gatk.utils.refdata.utils.GATKFeature;
+import org.broadinstitute.gatk.utils.refdata.utils.LocationAwareSeekableRODIterator;
+import org.broadinstitute.gatk.utils.refdata.utils.RODRecordList;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * Wrapper class for iterators over ROD objects. It is assumed that the underlying iterator can only
+ * perform standard next() operation, which advances it to the next ROD in the stream (i.e. reads the data file
+ * line by line). This iterator 1) shifts the focus from record-based traversal to position-based traversal,
+ * and 2) adds querying seekForward() method.
+ *
+ * Namely, this iterator's next() method advances not to the next ROD in the underlying stream, but to the next
+ * genomic position covered by (at least one) ROD, and returns all RODs overlapping with that position as a RODRecordList
+ * collection-like object. Similarly, when seekForward(interval) is called, this iterator skips all the RODs from the
+ * underlying stream, until it reaches specified genomic interval, and returns the list of all RODs overlapping with that interval.
+ *
+ * NOTE: this iterator has a STATE: next() operation is not allowed after a seekForward() to a non-point (extended) interval
+ * of length > 1. Such a call would leave the iterator in an inconsistent state. seekForward() can always be called after
+ * either seekForward() or next() (as long as usual ordering criteria are satisfied: the query interval location can neither
+ * start before the current position, nor end before the previous query end). seekForward to an interval of length 1
+ * reenables next() operation. 
+ *
+ * Created by IntelliJ IDEA.
+ * User: asivache
+ * Date: Sep 10, 2009
+ * Time: 6:20:46 PM
+ * To change this template use File | Settings | File Templates.
+ */
+public class SeekableRODIterator implements LocationAwareSeekableRODIterator {
+    /**
+     * Header for the datasource backing this iterator.
+     */
+    private final Object header;
+
+    /**
+     * The parser, used to construct new genome locs.
+     */
+    private final GenomeLocParser parser;
+
+    private final SAMSequenceDictionary sequenceDictionary;
+
+    private PushbackIterator<GATKFeature> it;
+    List<GATKFeature> records = null;  // here we will keep a pile of records overlaping with current position; when we iterate
+                               // and step out of record's scope, we purge it from the list
+    String name = null; // name of the ROD track wrapped by this iterator. Will be pulled from underlying iterator.
+
+    int curr_position = 0; // where the iterator is currently positioned on the genome
+    int max_position = 0;  // the rightmost stop position of currently loaded records
+    String curr_contig = null;   // what contig the iterator is currently on
+    boolean next_is_allowed = true; // see discussion below. next() is illegal after seek-forward queries of length > 1
+
+    // the stop position of the last query. We can query only in forward direction ("seek forward");
+    // it is not only the start position of every successive query that can not be before the start
+    // of the previous one (curr_start), but it is also illegal for a query interval to *end* before
+    // the end of previous query, otherwise we can end up in an inconsistent state
+    int curr_query_end = -1;
+
+    // EXAMPLE of inconsistency curr_query_end guards against:
+    //              record 1      record 2
+    //             ----------     -----------
+    // -------------------------------------------------- REF
+    //         ------------------------- query 1 (interval 1)
+    //               ----------  query 2 (interval 2)
+    //                     --------------- query 3
+    //
+    // If we query first for interval 1, both record 1 and record 2 will be loaded.
+    // Query for interval 2, on the other hand, should return only record 1, but after
+    // query 1 was performed, record 2 is already loaded from the file. If, on the other hand,
+    // we try to un-load it from memory, we won't be able to read it again. Hence query 2 is not
+    // allowed after query 1. Note also, that curr_query_end is not equivalent to max_position:
+    // the latter only tracks where currently loaded records end (and hence helps to re-load records);
+    // after query 1 is performed, max_position will be the end of record 2, but query 3 is still
+    // perfectly legal after query 1.
+    //
+    // IMPORTANT NOTE: it follows from the above discussion and example that next() is illegal after ANY
+    // seek-forward query EXCEPT those that are performed with length-1 intervals (queryInterval.start=queryinteval.stop).
+    // Indeed, in the example above, after, e.g., query 1 is performed, the iterator is "located" at the start
+    // of interval 1, but record1 and record 2 are already loaded. On the other hand, a subsequent call to next() would
+    // need to shift iterator's position by 1 base and return only record 1.
+    //
+    // This implementation tracks the query history and makes next() illegal after a seekforward query of length > 1,
+    // but re-enables next() again after a length-1 query.
+
+    public SeekableRODIterator(Object header,SAMSequenceDictionary rodDictionary,SAMSequenceDictionary referenceDictionary,GenomeLocParser parser,CloseableIterator<GATKFeature> it) {
+        this.header = header;
+        this.parser = parser;
+        this.sequenceDictionary = rodDictionary;
+        this.it = new PushbackIterator<GATKFeature>(it);
+        records = new LinkedList<GATKFeature>();
+        // the following is a trick: we would like the iterator to know the actual name assigned to
+        // the ROD implementing object we are working with. But the only way to do that is to
+        // get an instance of that ROD and query it for its name. Now, the only generic way we have at this point to instantiate
+        // the ROD is to make the underlying stream iterator to do it for us. So we are reading (or rather peeking into)
+        // the first line of the track data file just to get the ROD object created.
+        GATKFeature r = null;
+        if (this.it.hasNext()) r = this.it.element();
+        name = (r==null?null:r.getName());
+
+        curr_contig = referenceDictionary.getSequence(0).getSequenceName();
+    }
+
+    /**
+     * Gets the header associated with the backing input stream.
+     * @return the ROD header.
+     */
+    @Override
+    public Object getHeader() {
+        return header;
+    }
+
+    /**
+     * Gets the sequence dictionary associated with the backing input stream.
+     * @return sequence dictionary from the ROD header.
+     */
+    @Override
+    public SAMSequenceDictionary getSequenceDictionary() {
+        return sequenceDictionary;
+    }
+
+
+    /**
+     * Returns true if the data we iterate over has records associated with (any, not necessarily adjacent)
+     * genomic position farther along the reference.
+     * @return
+     */
+    public boolean hasNext() {
+
+        // if we did not walk to the very end of the interval(s) covered by currently loaded
+        // annotations (records), then we definitely have data for next genomic location
+        if ( curr_position < max_position ) return true;
+
+        // we are past currently loaded stuff; we have next if there are more lines to load:
+        return it.hasNext();
+    }
+
+    // Returns point location (i.e. genome loc of length 1) on the reference, to which this iterator will advance
+    // upon next call to next().
+    public GenomeLoc peekNextLocation() {
+        if ( curr_position + 1 <= max_position ) return parser.createGenomeLoc(curr_contig,curr_position+1);
+
+        // sorry, next reference position is not covered by the RODs we are currently holding. In this case,
+        // the location we will jump to upon next call to next() is the start of the next ROD record that we did
+        // not read yet:
+        if ( it.hasNext() ) {
+            GATKFeature r = it.element(); // peek, do not load!
+            return parser.createGenomeLoc(r.getLocation().getContig(),r.getLocation().getStart());
+        }
+        return null; // underlying iterator has no more records, there is no next location!
+    }
+
+    /** Advances iterator to the next genomic position that has ROD record(s) associated with it,
+     * and returns all the records overlapping with that position as a RODList. The location of the whole
+     * RODList object will be set to the smallest interval subsuming genomic intervals of all returned records.
+     * Note that next() is disabled (will throw an exception) after seekForward() operation with query length > 1.
+     * @return list of all RODs overlapping with the next "covered" genomic position
+     */
+     public RODRecordList next() {
+         if ( ! next_is_allowed )
+             throw new ReviewedGATKException("Illegal use of iterator: Can not advance iterator with next() after seek-forward query of length > 1");
+
+         curr_position++;
+ //        curr_query_end = -1;
+
+         if ( curr_position <= max_position ) {
+
+             // we still have bases covered by at least one currently loaded record;
+             // we have to purge only subset of records, on which we moved past the end
+             purgeOutOfScopeRecords();
+         } else {
+             // ooops, we are past the end of all loaded records - kill them all at once,
+             // load next record and reinitialize by fastforwarding current position to the start of next record
+             records.clear();
+             GATKFeature r = it.next(); // if hasNext() previously returned true, we are guaranteed that this call to reader.next() is safe
+             records.add( r );
+             curr_contig = r.getLocation().getContig();
+             curr_position = r.getLocation().getStart();
+             max_position = r.getLocation().getStop();
+         }
+
+         // current position is ste and at this point 'records' only keeps those annotations, on which we did not reach the end yet
+         // (we might have reloaded records completely if it was necessary); but we are not guaranteed yet that we
+         // hold ALL the records overlapping with the current position. Time to check if we just walked into the interval(s)
+         // covered by new records, so we need to load them too:
+
+         while ( it.hasNext() ) {
+             GATKFeature r = it.element();
+             if ( r == null ) {
+                 it.next();
+                 continue;
+             }
+
+             GenomeLoc currentContig = parser.createOverEntireContig(curr_contig);
+             GenomeLoc thatContig = r.getLocation();
+
+             if ( currentContig.isPast(thatContig) )
+                 throw new UserException("LocationAwareSeekableRODIterator: contig " +r.getLocation().getContig() +
+                         " occurs out of order in track " + r.getName() );
+             if ( currentContig.isBefore(thatContig) ) break; // next record is on a higher contig, we do not need it yet...
+
+             if ( r.getLocation().getStart() < curr_position )
+                 throw new UserException("LocationAwareSeekableRODIterator: track "+r.getName() +
+                         " is out of coordinate order on contig "+r.getLocation() + " compared to " + curr_contig + ":" + curr_position);
+
+             if ( r.getLocation().getStart() > curr_position ) break; // next record starts after the current position; we do not need it yet
+
+             r = it.next(); // we got here only if we do need next record, time to load it for real
+
+             int stop = r.getLocation().getStop();
+             if ( stop < curr_position ) throw new ReviewedGATKException("DEBUG: encountered contig that should have been loaded earlier"); // this should never happen
+             if ( stop > max_position ) max_position = stop; // max_position keeps the rightmost stop position across all loaded records
+             records.add(r);
+         }
+
+         // 'records' and current position are fully updated. Last, we need to set the location of the whole track
+        // (collection of ROD records) to the genomic site we are currently looking at, and return the list
+
+         return new RODRecordListImpl(name,records, parser.createGenomeLoc(curr_contig,curr_position));
+     }
+
+    /**
+     * Removes from the underlying collection the last element returned by the
+     * iterator (optional operation).  This method can be called only once per
+     * call to <tt>next</tt>.  The behavior of an iterator is unspecified if
+     * the underlying collection is modified while the iteration is in
+     * progress in any way other than by calling this method.
+     *
+     * @throws UnsupportedOperationException if the <tt>remove</tt>
+     *                                       operation is not supported by this Iterator.
+     * @throws IllegalStateException         if the <tt>next</tt> method has not
+     *                                       yet been called, or the <tt>remove</tt> method has already
+     *                                       been called after the last call to the <tt>next</tt>
+     *                                       method.
+     */
+    public void remove() {
+        throw new UnsupportedOperationException("LocationAwareSeekableRODIterator does not implement remove() operation");
+    }
+
+
+    /**
+     * Returns the current "position" (not location!! ;) ) of this iterator. This method is used by the sharding
+     * system when it searches for available iterators in the pool that can be reused to resume traversal.
+     * When iterator is advanced using next(), current position
+     * is the same as 'location'. However, after a seekForward() query with extended interval, returned position
+     * will be set to the last position of the query interval, to disable (illegal) attempts to roll the iterator
+     * back and re-start traversal from current location.
+     * @return Current ending position of the iterator, or null if no position exists.
+     */
+    public GenomeLoc position() {
+        if ( curr_contig == null ) return null;
+        if ( curr_query_end > curr_position )  {
+            // do not attempt to reuse this iterator if the position we need it for lies before the end of last query performed
+            return parser.createGenomeLoc(curr_contig,curr_query_end,curr_query_end);
+        }
+        else {
+            return parser.createGenomeLoc(curr_contig,curr_position);
+        }
+    }
+
+    /**
+     * Seeks forward through the file until the specified interval is reached.
+     * The location object <code>interval</code> can be either a single point or an extended interval. All
+     * ROD records overlapping with the whole interval will be returned, or null if no such records exist.
+     *
+     * Query interval must start at or after the iterator's current location, or exception will be thrown.
+     *
+     * Query interval must end at or after the stop position of the previous query, if any, or an exception will
+     * be thrown: subsequent queries that end before the stop of previous ones are illegal.
+     *
+     * If seekForward() is performed to an extended (length > 1 i.e. start != stop) interval, next() operation becomes
+     * illegal (the iterator changes state). Only seekForward() calls are allowed thereafter, until a seekForward() call
+     * to a length-1 interval is performed, which re-enables next(). seekForward() queries with length-1 intervals can
+     * always be safely intermixed with next() (as long as ordering is respected and query intervals are at or after the
+     * current position).
+     *
+     * Note that in contrast to
+     * next() (which always advances current position of the iterator on the reference), this method scrolls
+     * forward ONLY if the specified interval is ahead of the current location of
+     * the iterator. However, if called again with the same 'interval' argument as before, seekForward will NOT
+     * advance, but will simply return the same ROD list as before.
+     *
+     *
+     * @param interval point-like genomic location to fastforward to.
+     * @return ROD object at (or overlapping with) the specified position, or null if no such ROD exists.
+     */
+    public RODRecordList seekForward(GenomeLoc interval) {
+
+        if ( interval.isBefore(parser.createOverEntireContig(curr_contig)) &&
+             !(interval.getStart() == 0 && interval.getStop() == 0 && interval.getContig().equals(curr_contig)) ) // This criteria is syntactic sugar for 'seek to right before curr_contig'
+            throw new ReviewedGATKException("Out of order query: query contig "+interval.getContig()+" is located before "+
+                                     "the iterator's current contig");
+        if ( interval.getContig().equals(curr_contig) ) {
+            if ( interval.getStart() < curr_position )
+                throw new ReviewedGATKException("Out of order query: query position "+interval +" is located before "+
+                        "the iterator's current position "+curr_contig + ":" + curr_position);
+            if ( interval.getStop() < curr_query_end )
+                throw new ReviewedGATKException("Unsupported querying sequence: current query interval " +
+                        interval+" ends before the end of previous query interval ("+curr_query_end+")");
+        }
+
+        curr_position = interval.getStart();
+        curr_query_end = interval.getStop();
+
+        next_is_allowed = ( curr_position == curr_query_end ); // we can call next() later only if interval length is 1
+
+        if (  interval.getContig().equals(curr_contig) &&  curr_position <= max_position ) {
+            // some of the intervals we are currently keeping do overlap with the query interval
+
+            purgeOutOfScopeRecords();
+        } else {
+            // clean up and get ready for fast-forwarding towards the requested position
+            records.clear();
+            max_position = -1;
+            curr_contig = interval.getContig();
+        }
+
+        // curr_contig and curr_position are set to where we asked to scroll to
+
+        while ( it.hasNext() ) {
+            GATKFeature r = it.next();
+            if ( r == null ) continue;
+
+            GenomeLoc currentContig = parser.createOverEntireContig(curr_contig);
+            GenomeLoc thatContig = r.getLocation();
+
+            if ( currentContig.isPast(thatContig) ) continue; // did not reach requested contig yet
+            if ( currentContig.isBefore(thatContig) ) {
+                it.pushback(r); // next record is on the higher contig, we do not need it yet...
+                break;
+            }
+
+            // we get here if we are on the requested contig:
+
+            if ( r.getLocation().getStop() < curr_position ) continue; // did not reach the requested interval yet
+
+            if ( r.getLocation().getStart() > curr_query_end ) {
+                // past the query interval
+                it.pushback(r);
+                break;
+            }
+
+            // we get here only if interval of the record r overlaps with query interval, so the record should be loaded
+            if ( r.getLocation().getStop() > max_position ) max_position = r.getLocation().getStop();
+            records.add(r);
+        }
+
+        if ( records.size() > 0 ) {
+            return new RODRecordListImpl(name,records,interval);
+        } else {
+            return null;
+        }
+
+    }
+
+    /**
+     * Removes records that end before the curr_position from the list of currently kept records. This is a
+     * convenience (private) shortcut that does not perform extensive checking. In particular, it assumes that
+     * curr_position <= max_position, as well as that we are still on the same contig.
+     */
+    private void purgeOutOfScopeRecords() {
+        Iterator<GATKFeature> i = records.iterator();
+        while ( i.hasNext() ) {
+            GATKFeature r = i.next();
+            if ( r.getLocation().getStop() < curr_position ) {
+                i.remove(); // we moved past the end of interval the record r is associated with, purge the record forever
+            }
+        }
+
+    }
+
+    @Override
+    public void close() {
+        if (this.it != null) ((CloseableIterator)this.it.getUnderlyingIterator()).close();
+    }
+
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/VariantContextAdaptors.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/VariantContextAdaptors.java
new file mode 100644
index 0000000..8b45178
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/VariantContextAdaptors.java
@@ -0,0 +1,265 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.refdata;
+
+import htsjdk.samtools.util.SequenceUtil;
+import htsjdk.tribble.Feature;
+import htsjdk.tribble.annotation.Strand;
+import htsjdk.tribble.gelitext.GeliTextFeature;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.classloader.PluginManager;
+import org.broadinstitute.gatk.utils.codecs.hapmap.RawHapMapFeature;
+import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
+import htsjdk.variant.variantcontext.*;
+
+import java.util.*;
+
+/**
+ * A terrible but temporary approach to converting objects to VariantContexts.  If you want to add a converter,
+ * you need to create a adaptor object here and register a converter from your class to this object.  When tribble arrives,
+ * we'll use a better approach.
+ *
+ * To add a new converter:
+ *
+ *   create a subclass of VCAdaptor, overloading the convert operator
+ *   add it to the static map from input type -> converter where the input type is the object.class you want to convert
+ *
+ * That's it 
+ *
+ * @author depristo at broadinstitute.org
+ */
+public class VariantContextAdaptors {
+    // --------------------------------------------------------------------------------------------------------------
+    //
+    // Generic support routines.  Do not modify
+    //
+    // --------------------------------------------------------------------------------------------------------------
+
+    private static Map<Class<? extends Feature>,VCAdaptor> adaptors = new HashMap<Class<? extends Feature>,VCAdaptor>();
+
+    static {
+        PluginManager<VCAdaptor> vcAdaptorManager = new PluginManager<VCAdaptor>(VCAdaptor.class);
+        List<VCAdaptor> adaptorInstances = vcAdaptorManager.createAllTypes();
+        for(VCAdaptor adaptor: adaptorInstances)
+            adaptors.put(adaptor.getAdaptableFeatureType(),adaptor);
+    }
+
+    public static boolean canBeConvertedToVariantContext(Object variantContainingObject) {
+        return adaptors.containsKey(variantContainingObject.getClass());
+    }
+
+    /** generic superclass */
+    public interface VCAdaptor {
+        /**
+         * Gets the type of feature that this adaptor can 'adapt' into a VariantContext.
+         * @return Type of adaptable feature.  Must be a Tribble feature class.
+         */
+        Class<? extends Feature> getAdaptableFeatureType();
+        VariantContext convert(String name, Object input, ReferenceContext ref);
+    }
+
+    public static VariantContext toVariantContext(String name, Object variantContainingObject, ReferenceContext ref) {
+        if ( ! adaptors.containsKey(variantContainingObject.getClass()) )
+            return null;
+        else {
+            return adaptors.get(variantContainingObject.getClass()).convert(name, variantContainingObject, ref);
+        }
+    }
+
+    // --------------------------------------------------------------------------------------------------------------
+    //
+    // From here below you can add adaptor classes for new rods (or other types) to convert to VC
+    //
+    // --------------------------------------------------------------------------------------------------------------
+    private static class VariantContextAdaptor implements VCAdaptor {
+        /**
+         * 'Null' adaptor; adapts variant contexts to variant contexts.
+         * @return VariantContext.
+         */
+        @Override
+        public Class<? extends Feature> getAdaptableFeatureType() { return VariantContext.class; }
+
+        // already a VC, just cast and return it
+        @Override        
+        public VariantContext convert(String name, Object input, ReferenceContext ref) {
+            return (VariantContext)input;
+        }
+    }
+
+
+    // --------------------------------------------------------------------------------------------------------------
+    //
+    // GELI to VariantContext
+    //
+    // --------------------------------------------------------------------------------------------------------------
+
+    private static class GeliTextAdaptor implements VCAdaptor {
+        /**
+         * Converts Geli text records to VariantContext. 
+         * @return GeliTextFeature.
+         */
+        @Override
+        public Class<? extends Feature> getAdaptableFeatureType() { return GeliTextFeature.class; }
+
+        /**
+         * convert to a Variant Context, given:
+         * @param name  the name of the ROD
+         * @param input the Rod object, in this case a RodGeliText
+         * @param ref   the reference context
+         * @return a VariantContext object
+         */
+        @Override
+        public VariantContext convert(String name, Object input, ReferenceContext ref) {
+            GeliTextFeature geli = (GeliTextFeature)input;
+            if ( ! Allele.acceptableAlleleBases(String.valueOf(geli.getRefBase())) )
+                return null;
+            Allele refAllele = Allele.create(String.valueOf(geli.getRefBase()), true);
+
+            // make sure we can convert it
+            if ( geli.getGenotype().isHet() || !geli.getGenotype().containsBase(geli.getRefBase())) {
+                // add the reference allele
+                List<Allele> alleles = new ArrayList<Allele>();
+                List<Allele> genotypeAlleles = new ArrayList<Allele>();
+                // add all of the alt alleles
+                for ( char alt : geli.getGenotype().toString().toCharArray() ) {
+                    if ( ! Allele.acceptableAlleleBases(String.valueOf(alt)) ) {
+                        return null;
+                    }
+                    Allele allele = Allele.create(String.valueOf(alt), false);
+                    if (!alleles.contains(allele) && !refAllele.basesMatch(allele.getBases())) alleles.add(allele);
+
+                    // add the allele, first checking if it's reference or not
+                    if (!refAllele.basesMatch(allele.getBases())) genotypeAlleles.add(allele);
+                    else genotypeAlleles.add(refAllele);
+                }
+
+                Map<String, Object> attributes = new HashMap<String, Object>();
+                Collection<Genotype> genotypes = new ArrayList<Genotype>();
+                Genotype call = GenotypeBuilder.create(name, genotypeAlleles);
+
+                // add the call to the genotype list, and then use this list to create a VariantContext
+                genotypes.add(call);
+                alleles.add(refAllele);
+                GenomeLoc loc = ref.getGenomeLocParser().createGenomeLoc(geli.getChr(),geli.getStart());
+                return new VariantContextBuilder(name, loc.getContig(), loc.getStart(), loc.getStop(), alleles).genotypes(genotypes).log10PError(-1 * geli.getLODBestToReference()).attributes(attributes).make();
+            } else
+                return null; // can't handle anything else
+        }
+    }
+
+    // --------------------------------------------------------------------------------------------------------------
+    //
+    // HapMap to VariantContext
+    //
+    // --------------------------------------------------------------------------------------------------------------
+
+    private static class HapMapAdaptor implements VCAdaptor {
+        /**
+         * Converts HapMap records to VariantContext. 
+         * @return HapMapFeature.
+         */
+        @Override
+        public Class<? extends Feature> getAdaptableFeatureType() { return RawHapMapFeature.class; }
+
+        /**
+         * convert to a Variant Context, given:
+         * @param name  the name of the ROD
+         * @param input the Rod object, in this case a RodGeliText
+         * @param ref   the reference context
+         * @return a VariantContext object
+         */
+        @Override        
+        public VariantContext convert(String name, Object input, ReferenceContext ref) {
+            if ( ref == null )
+                throw new UnsupportedOperationException("Conversion from HapMap to VariantContext requires a reference context");
+
+            RawHapMapFeature hapmap = (RawHapMapFeature)input;
+
+            int index = hapmap.getStart() - ref.getWindow().getStart();
+            if ( index < 0 )
+                return null; // we weren't given enough reference context to create the VariantContext
+
+            HashSet<Allele> alleles = new HashSet<Allele>();
+            Allele refSNPAllele = Allele.create(ref.getBase(), true);
+            int deletionLength = -1;
+
+            Map<String, Allele> alleleMap = hapmap.getActualAlleles();
+            // use the actual alleles, if available
+            if ( alleleMap != null ) {
+                alleles.addAll(alleleMap.values());
+                Allele deletionAllele = alleleMap.get(RawHapMapFeature.INSERTION);  // yes, use insertion here (since we want the reference bases)
+                if ( deletionAllele != null && deletionAllele.isReference() )
+                    deletionLength = deletionAllele.length();
+            } else {
+                // add the reference allele for SNPs
+                alleles.add(refSNPAllele);
+            }
+
+            // make a mapping from sample to genotype
+            String[] samples = hapmap.getSampleIDs();
+            String[] genotypeStrings = hapmap.getGenotypes();
+
+            GenotypesContext genotypes = GenotypesContext.create(samples.length);
+            for ( int i = 0; i < samples.length; i++ ) {
+                // ignore bad genotypes
+                if ( genotypeStrings[i].contains("N") )
+                    continue;
+
+                String a1 = genotypeStrings[i].substring(0,1);
+                String a2 = genotypeStrings[i].substring(1);
+                ArrayList<Allele> myAlleles = new ArrayList<Allele>(2);
+
+                // use the mapping to actual alleles, if available
+                if ( alleleMap != null ) {
+                    myAlleles.add(alleleMap.get(a1));
+                    myAlleles.add(alleleMap.get(a2));
+                } else {
+                    // ignore indels (which we can't handle without knowing the alleles)
+                    if ( genotypeStrings[i].contains("I") || genotypeStrings[i].contains("D") )
+                        continue;
+
+                    Allele allele1 = Allele.create(a1, refSNPAllele.basesMatch(a1));
+                    Allele allele2 = Allele.create(a2, refSNPAllele.basesMatch(a2));
+
+                    myAlleles.add(allele1);
+                    myAlleles.add(allele2);
+                    alleles.add(allele1);
+                    alleles.add(allele2);
+                }
+
+                Genotype g = GenotypeBuilder.create(samples[i], myAlleles);
+                genotypes.add(g);
+            }
+
+            long end = hapmap.getEnd();
+            if ( deletionLength > 0 )
+                end += (deletionLength - 1);
+            VariantContext vc = new VariantContextBuilder(name, hapmap.getChr(), hapmap.getStart(), end, alleles).id(hapmap.getName()).genotypes(genotypes).make();
+            return vc;
+       }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/package-info.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/package-info.java
new file mode 100644
index 0000000..ea24c2b
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/package-info.java
@@ -0,0 +1,26 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.refdata;
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/tracks/FeatureManager.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/tracks/FeatureManager.java
new file mode 100644
index 0000000..a450fce
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/tracks/FeatureManager.java
@@ -0,0 +1,280 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.refdata.tracks;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import htsjdk.tribble.Feature;
+import htsjdk.tribble.FeatureCodec;
+import htsjdk.tribble.NameAwareCodec;
+import org.broadinstitute.gatk.utils.refdata.ReferenceDependentFeatureCodec;
+import org.broadinstitute.gatk.utils.refdata.utils.RMDTriplet;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.classloader.PluginManager;
+import htsjdk.variant.vcf.AbstractVCFCodec;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.help.GATKDocUtils;
+
+import java.io.File;
+import java.util.*;
+
+
+/**
+ * Class for managing Tribble Feature readers available to the GATK.  The features
+ * are dynamically determined via a PluginManager.  This class provides convenient
+ * getter methods for obtaining FeatureDescriptor objects that collect all of the
+ * useful information about the Tribble Codec, Feature, and name in one place.
+ *
+ * @author depristo
+ */
+public class FeatureManager  {
+    public static class FeatureDescriptor implements Comparable<FeatureDescriptor> {
+        final String name;
+        final FeatureCodec codec;
+
+        public FeatureDescriptor(final String name, final FeatureCodec codec) {
+            this.name = name;
+            this.codec = codec;
+        }
+
+        public String getName() {
+            return name;
+        }
+        public String getSimpleFeatureName() { return getFeatureClass().getSimpleName(); }
+        public FeatureCodec getCodec() {
+            return codec;
+        }
+        public Class getCodecClass() { return codec.getClass(); }
+        public Class getFeatureClass() { return codec.getFeatureType(); }
+
+        @Override
+        public String toString() {
+            return String.format("FeatureDescriptor name=%s codec=%s feature=%s",
+                    getName(), getCodecClass().getName(), getFeatureClass().getName());
+        }
+
+        @Override
+        public int compareTo(FeatureDescriptor o) {
+            return getName().compareTo(o.getName());
+        }
+    }
+
+    private final PluginManager<FeatureCodec> pluginManager;
+    private final Collection<FeatureDescriptor> featureDescriptors = new TreeSet<FeatureDescriptor>();
+    private final boolean lenientVCFProcessing;
+
+    /**
+     * Construct a FeatureManager without a master VCF header
+     */
+    public FeatureManager() {
+        this(false);
+    }
+
+    public FeatureManager(final boolean lenientVCFProcessing) {
+        this.lenientVCFProcessing = lenientVCFProcessing;
+        pluginManager = new PluginManager<FeatureCodec>(FeatureCodec.class, "Codecs", "Codec");
+
+        for (final String rawName: pluginManager.getPluginsByName().keySet()) {
+            FeatureCodec codec = pluginManager.createByName(rawName);
+            String name = rawName.toUpperCase();
+            FeatureDescriptor featureDescriptor = new FeatureDescriptor(name, codec);
+            featureDescriptors.add(featureDescriptor);
+        }
+    }
+
+    /**
+     * Return the FeatureDescriptor whose getCodecClass().equals(codecClass).
+     *
+     * @param codecClass
+     * @return A FeatureDescriptor or null if none is found
+     */
+    @Requires("codecClass != null")
+    public FeatureDescriptor getByCodec(Class codecClass) {
+        for ( FeatureDescriptor descriptor : featureDescriptors )
+            if ( descriptor.getCodecClass().equals(codecClass) )
+                return descriptor;
+        return null;
+    }
+
+    /**
+     * Returns a collection of FeatureDescriptors that emit records of type featureClass
+     *
+     * @param featureClass
+     * @return A FeatureDescriptor or null if none is found
+     */
+    @Requires("featureClass != null")
+    public <T extends Feature> Collection<FeatureDescriptor> getByFeature(Class<T> featureClass) {
+        Set<FeatureDescriptor> consistentDescriptors = new TreeSet<FeatureDescriptor>();
+
+        if (featureClass == null)
+            throw new IllegalArgumentException("trackRecordType value is null, please pass in an actual class object");
+
+        for ( FeatureDescriptor descriptor : featureDescriptors ) {
+            if ( featureClass.isAssignableFrom(descriptor.getFeatureClass()))
+                consistentDescriptors.add(descriptor);
+        }
+        return consistentDescriptors;
+    }
+
+    /**
+     * Return the FeatureDescriptor with getID().equals(name)
+     *
+     * @param name
+     * @return A FeatureDescriptor or null if none is found
+     */
+    @Requires("name != null")
+    public FeatureDescriptor getByName(String name) {
+        for ( FeatureDescriptor descriptor : featureDescriptors )
+            if ( descriptor.getName().equalsIgnoreCase(name) )
+                return descriptor;
+        return null;
+    }
+
+    /**
+     * Returns the FeatureDescriptor that can read the contexts of File file, is one can be determined
+     *
+     * @param file
+     * @return A FeatureDescriptor or null if none is found
+     */
+    @Requires({"file != null", "file.isFile()", "file.canRead()"})
+    public FeatureDescriptor getByFiletype(File file) {
+        List<FeatureDescriptor> canParse = new ArrayList<FeatureDescriptor>();
+        for ( FeatureDescriptor descriptor : featureDescriptors )
+            if ( descriptor.getCodec().canDecode(file.getPath()) ) {
+                canParse.add(descriptor);
+            }
+
+        if ( canParse.size() == 0 )
+            return null;
+        else if ( canParse.size() > 1 )
+            throw new ReviewedGATKException("BUG: multiple feature descriptors can read file " + file + ": " + canParse);
+        else
+            return canParse.get(0);
+    }
+
+    /**
+     * Returns the FeatureDescriptor associated with the type described by triplet, or null if none is found
+     * @param triplet
+     * @return
+     */
+    @Requires("triplet != null")
+    public FeatureDescriptor getByTriplet(RMDTriplet triplet) {
+        return getByName(triplet.getType());
+    }
+
+    /**
+     * @return all of the FeatureDescriptors available to the GATK.  Never null
+     */
+    @Ensures("result != null")
+    public Collection<FeatureDescriptor> getFeatureDescriptors() {
+        return Collections.unmodifiableCollection(featureDescriptors);
+    }
+
+
+    /**
+     * Returns a list of the available tribble track names (vcf,dbsnp,etc) that we can load
+     * @return
+     */
+    @Ensures("result != null")
+    public String userFriendlyListOfAvailableFeatures() {
+        return userFriendlyListOfAvailableFeatures(Feature.class);
+    }
+
+    /**
+     * Returns a list of the available tribble track names (vcf,dbsnp,etc) that we can load
+     * restricted to only Codecs producting Features consistent with the requiredFeatureType
+     * @return
+     */
+    @Ensures("result != null")
+    public String userFriendlyListOfAvailableFeatures(Class<? extends Feature> requiredFeatureType) {
+        final String nameHeader="Name", featureHeader = "FeatureType", docHeader="Documentation";
+
+        int maxNameLen = nameHeader.length(), maxFeatureNameLen = featureHeader.length();
+        for ( final FeatureDescriptor descriptor : featureDescriptors ) {
+            if ( requiredFeatureType.isAssignableFrom(descriptor.getFeatureClass()) ) {
+                maxNameLen = Math.max(maxNameLen, descriptor.getName().length());
+                maxFeatureNameLen = Math.max(maxFeatureNameLen, descriptor.getSimpleFeatureName().length());
+            }
+        }
+
+        StringBuilder docs = new StringBuilder();
+        String format = "%" + maxNameLen + "s   %" + maxFeatureNameLen + "s   %s%n";
+        docs.append(String.format(format, nameHeader, featureHeader, docHeader));
+        for ( final FeatureDescriptor descriptor : featureDescriptors ) {
+            if ( requiredFeatureType.isAssignableFrom(descriptor.getFeatureClass()) ) {
+                final String DocURL = GATKDocUtils.helpLinksToGATKDocs(descriptor.getCodecClass());
+                final String oneDoc;
+                if ( DocURL.contains("_sting_") ) {
+                    oneDoc = String.format(format,
+                            descriptor.getName(),
+                            descriptor.getSimpleFeatureName(),
+                            DocURL);
+                } else {
+                    oneDoc = String.format(format,
+                            descriptor.getName(),
+                            descriptor.getSimpleFeatureName(),
+                            "(this is an external codec and is not documented within GATK)");
+                }
+
+                docs.append(oneDoc);
+            }
+        }
+
+        return docs.toString();
+    }
+
+    /**
+     * Create a new FeatureCodec of the type described in descriptor, assigning it the
+     * name (if possible) and providing it the genomeLocParser (where necessary)
+     *
+     * @param descriptor FeatureDescriptor of the Tribble FeatureCodec we want to create
+     * @param name the name to assign this codec
+     * @param genomeLocParser GenomeLocParser for ReferenceDependentFeatureCodecs
+     * @param remappedSampleName replacement sample name for single-sample vcfs, or null if we're not performing
+     *                           sample name remapping
+     * @return the feature codec itself
+     */
+    @Requires({"descriptor != null", "name != null", "genomeLocParser != null"})
+    @Ensures("result != null")
+    public FeatureCodec createCodec(final FeatureDescriptor descriptor, final String name, final GenomeLocParser genomeLocParser,
+                                    final String remappedSampleName) {
+        FeatureCodec codex = pluginManager.createByType(descriptor.getCodecClass());
+        if ( codex instanceof NameAwareCodec )
+            ((NameAwareCodec)codex).setName(name);
+        if ( codex instanceof ReferenceDependentFeatureCodec )
+            ((ReferenceDependentFeatureCodec)codex).setGenomeLocParser(genomeLocParser);
+        if ( codex instanceof AbstractVCFCodec ) {
+            if ( lenientVCFProcessing ) {
+                ((AbstractVCFCodec)codex).disableOnTheFlyModifications();
+            }
+            if ( remappedSampleName != null ) {
+                ((AbstractVCFCodec)codex).setRemappedSampleName(remappedSampleName);
+            }
+        }
+
+        return codex;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/tracks/IndexDictionaryUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/tracks/IndexDictionaryUtils.java
new file mode 100644
index 0000000..30bd8ec
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/tracks/IndexDictionaryUtils.java
@@ -0,0 +1,114 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.refdata.tracks;
+
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.SAMSequenceRecord;
+import org.apache.log4j.Logger;
+import htsjdk.tribble.index.Index;
+import htsjdk.tribble.index.MutableIndex;
+import org.broadinstitute.gatk.utils.ValidationExclusion;
+import org.broadinstitute.gatk.utils.SequenceDictionaryUtils;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeSet;
+
+/**
+ * Utilities for working with Sequence Dictionaries embedded in tribble indices
+ *
+ * @author Your Name
+ * @since Date created
+ */
+public class IndexDictionaryUtils {
+    private final static Logger logger = Logger.getLogger(IndexDictionaryUtils.class);
+
+    // a constant we use for marking sequence dictionary entries in the Tribble index property list
+    public static final String SequenceDictionaryPropertyPredicate = "DICT:";
+
+    /**
+     * get the sequence dictionary from the track, if available.  If not, make it from the contig list that is always in the index
+     * @param index the index file to use
+     * @return a SAMSequenceDictionary if available, null if unavailable
+     */
+    public static SAMSequenceDictionary getSequenceDictionaryFromProperties(Index index) {
+        SAMSequenceDictionary dict = new SAMSequenceDictionary();
+        for (Map.Entry<String,String> entry : index.getProperties().entrySet()) {
+            if (entry.getKey().startsWith(SequenceDictionaryPropertyPredicate))
+                dict.addSequence(new SAMSequenceRecord(entry.getKey().substring(SequenceDictionaryPropertyPredicate.length() , entry.getKey().length()),
+                        Integer.valueOf(entry.getValue())));
+        }
+        return dict;
+    }
+
+    /**
+     * create the sequence dictionary with the contig list; a backup approach
+     * @param index the index file to use
+     * @param dict the sequence dictionary to add contigs to
+     * @return the filled-in sequence dictionary
+     */
+    static SAMSequenceDictionary createSequenceDictionaryFromContigList(final Index index, final SAMSequenceDictionary dict) {
+        final List<String> seqNames = index.getSequenceNames();
+        if (seqNames == null) {
+            return dict;
+        }
+        for (final String name : seqNames) {
+            SAMSequenceRecord seq = new SAMSequenceRecord(name, 0);
+            dict.addSequence(seq);
+        }
+        return dict;
+    }
+
+    /**
+     *  Sets the sequence dictionary of the given index.  THE INDEX MUST BE MUTABLE (i.e. not Tabix).
+     *
+     * @param index the (mutable) index file to use
+     * @param dict  the dictionary to use
+     */
+    public static void setIndexSequenceDictionary(Index index, SAMSequenceDictionary dict) {
+        for ( SAMSequenceRecord seq : dict.getSequences() ) {
+            final String contig = IndexDictionaryUtils.SequenceDictionaryPropertyPredicate + seq.getSequenceName();
+            final String length = String.valueOf(seq.getSequenceLength());
+            ((MutableIndex)index).addProperty(contig, length);
+        }
+    }
+
+    public static void validateTrackSequenceDictionary(final String trackName,
+                                                       final SAMSequenceDictionary trackDict,
+                                                       final SAMSequenceDictionary referenceDict,
+                                                       final ValidationExclusion.TYPE validationExclusionType ) {
+        // if the sequence dictionary is empty (as well as null which means it doesn't have a dictionary), skip validation
+        if (trackDict == null || trackDict.isEmpty())
+            logger.warn("Track " + trackName + " doesn't have a sequence dictionary built in, skipping dictionary validation");
+        else {
+            Set<String> trackSequences = new TreeSet<String>();
+            for (SAMSequenceRecord dictionaryEntry : trackDict.getSequences())
+                trackSequences.add(dictionaryEntry.getSequenceName());
+            SequenceDictionaryUtils.validateDictionaries(logger, validationExclusionType, trackName, trackDict, "reference", referenceDict, false, null);
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/tracks/RMDTrack.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/tracks/RMDTrack.java
new file mode 100644
index 0000000..76f2046
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/tracks/RMDTrack.java
@@ -0,0 +1,147 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.refdata.tracks;
+
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.util.CloseableIterator;
+import org.apache.log4j.Logger;
+import htsjdk.tribble.AbstractFeatureReader;
+import htsjdk.tribble.CloseableTribbleIterator;
+import htsjdk.tribble.Feature;
+import htsjdk.tribble.FeatureCodec;
+import org.broadinstitute.gatk.utils.refdata.utils.FeatureToGATKFeatureIterator;
+import org.broadinstitute.gatk.utils.refdata.utils.GATKFeature;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+import java.io.File;
+import java.io.IOException;
+
+
+/**
+ * @author aaron
+ *         <p/>
+ *         Class RMDTrack
+ *         <p/>
+ *         the basics of what a reference metadata track must contain.
+ */
+public class RMDTrack {
+    private final static Logger logger = Logger.getLogger(RMDTrackBuilder.class);
+
+    // the basics of a track:
+    private final Class type;           // our type
+    private final String name;          // the name
+    private final File file;            // the associated file we create the reader from
+
+    // our feature reader - allows queries
+    private AbstractFeatureReader reader;
+
+    // our sequence dictionary, which can be null
+    private final SAMSequenceDictionary dictionary;
+
+    /**
+     * Parser to use when creating/parsing GenomeLocs.
+     */
+    private final GenomeLocParser genomeLocParser;
+
+    // our codec type
+    private final FeatureCodec codec;
+
+    public Class getType() {
+        return type;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public File getFile() {
+        return file;
+    }
+
+    /**
+     * Create a track
+     *
+     * @param type the type of track, used for track lookup
+     * @param name the name of this specific track
+     * @param file the associated file, for reference or recreating the reader
+     * @param reader the feature reader to use as the underlying data source
+     * @param dict the sam sequence dictionary
+     * @param codec the feature codec we use to decode this type
+     */
+    public RMDTrack(final Class type, final String name, final File file, final AbstractFeatureReader reader, final SAMSequenceDictionary dict, final GenomeLocParser genomeLocParser, final FeatureCodec codec) {
+        this.type = type;
+        this.name = name;
+        this.file = file;
+        this.reader = reader;
+        this.dictionary = dict;
+        this.genomeLocParser = genomeLocParser;
+        this.codec = codec;
+    }
+
+    /**
+     * @return how to get an iterator of the underlying data.  This is all a track has to support,
+     *         but other more advanced tracks support the query interface
+     */
+    public CloseableIterator<GATKFeature> getIterator() {
+        try {
+            return new FeatureToGATKFeatureIterator(genomeLocParser,reader.iterator(),this.getName());
+        } catch (IOException e) {
+            throw new UserException.CouldNotReadInputFile(getFile(), "Unable to read from file", e);
+        }
+    }
+
+    public CloseableIterator<GATKFeature> query(GenomeLoc interval) throws IOException {
+        CloseableTribbleIterator<Feature> iter = reader.query(interval.getContig(),interval.getStart(),interval.getStop());
+        return new FeatureToGATKFeatureIterator(genomeLocParser, iter, this.getName());
+    }
+
+    public void close() {
+        try {
+            reader.close();
+        } catch (IOException e) {
+            throw new UserException.MalformedFile("Unable to close reader " + reader.toString(),e);
+        }
+        reader = null;
+    }
+
+    /**
+     * get the sequence dictionary from the track, if available
+     * @return a SAMSequenceDictionary if available, null if unavailable
+     */
+    public SAMSequenceDictionary getSequenceDictionary() {
+        return dictionary;
+    }
+
+    public Object getHeader() {
+        return reader.getHeader();
+    }
+
+    public FeatureCodec getCodec() {
+        return codec;
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/tracks/RMDTrackBuilder.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/tracks/RMDTrackBuilder.java
new file mode 100644
index 0000000..0b84c05
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/tracks/RMDTrackBuilder.java
@@ -0,0 +1,469 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.refdata.tracks;
+
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.SAMSequenceRecord;
+import htsjdk.variant.vcf.VCFContigHeaderLine;
+import htsjdk.variant.vcf.VCFHeader;
+import org.apache.log4j.Logger;
+import htsjdk.tribble.AbstractFeatureReader;
+import htsjdk.tribble.FeatureCodec;
+import htsjdk.tribble.Tribble;
+import htsjdk.tribble.TribbleException;
+import htsjdk.tribble.index.Index;
+import htsjdk.tribble.index.IndexFactory;
+import htsjdk.tribble.util.LittleEndianOutputStream;
+import org.broadinstitute.gatk.utils.SequenceDictionaryUtils;
+import org.broadinstitute.gatk.utils.commandline.ArgumentTypeDescriptor;
+import org.broadinstitute.gatk.utils.commandline.Tags;
+import org.broadinstitute.gatk.utils.ValidationExclusion;
+import org.broadinstitute.gatk.utils.refdata.utils.RMDTriplet;
+import org.broadinstitute.gatk.utils.refdata.utils.RMDTriplet.RMDStorageType;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.collections.Pair;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.file.FSLockWithShared;
+import org.broadinstitute.gatk.utils.instrumentation.Sizeof;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+
+/**
+ *
+ * @author aaron
+ *                                           `
+ * Class RMDTrackBuilder
+ *
+ * This class keeps track of the available codecs, and knows how to put together a track of
+ * that gets iterators from the FeatureReader using Tribble.
+ *
+ */
+public class RMDTrackBuilder { // extends PluginManager<FeatureCodec> {
+    /**
+     * our log, which we use to capture anything from this class
+     */
+    private final static Logger logger = Logger.getLogger(RMDTrackBuilder.class);
+
+    // private sequence dictionary we use to set our tracks with
+    private final SAMSequenceDictionary dict;
+
+    /**
+     * Private genome loc parser to use when building out new locs.
+     */
+    private final GenomeLocParser genomeLocParser;
+
+    /**
+     * Validation exclusions, for validating the sequence dictionary.
+     */
+    private ValidationExclusion.TYPE validationExclusionType;
+
+    private final FeatureManager featureManager;
+
+    // If true, do not attempt to create index files if they don't exist or are outdated, and don't
+    // make any file lock acquisition calls on the index files.
+    private final boolean disableAutoIndexCreation;
+
+    // Map of file name -> new sample name used when performing on-the-fly sample renaming
+    private final Map<String, String> sampleRenameMap;
+
+    /**
+     * Construct an RMDTrackerBuilder, allowing the user to define tracks to build after-the-fact.  This is generally
+     * used when walkers want to directly manage the ROD system for whatever reason.  Before using this constructor,
+     * please talk through your approach with the SE team.
+     * @param dict Sequence dictionary to use.
+     * @param genomeLocParser Location parser to use.
+     * @param validationExclusionType Types of validations to exclude, for sequence dictionary verification.
+     * @param disableAutoIndexCreation Do not auto-create index files, and do not use file locking when accessing index files.
+     *                                 UNSAFE in general (because it causes us not to lock index files before reading them) --
+     *                                 suitable only for test suite use.
+     * @param sampleRenameMap Map of file name -> new sample name used when performing on-the-fly sample renaming
+     */
+    public RMDTrackBuilder(final SAMSequenceDictionary dict,
+                           final GenomeLocParser genomeLocParser,
+                           final ValidationExclusion.TYPE validationExclusionType,
+                           final boolean disableAutoIndexCreation,
+                           final Map<String, String> sampleRenameMap) {
+        this.dict = dict;
+        this.validationExclusionType = validationExclusionType;
+        this.genomeLocParser = genomeLocParser;
+        this.featureManager = new FeatureManager(ValidationExclusion.lenientVCFProcessing(validationExclusionType));
+        this.disableAutoIndexCreation = disableAutoIndexCreation;
+        this.sampleRenameMap = sampleRenameMap;
+    }
+
+    /**
+     * Return the feature manager this RMDTrackBuilder is using the create tribble tracks
+     *
+     * @return
+     */
+    public FeatureManager getFeatureManager() {
+        return featureManager;
+    }
+
+    /**
+     * create a RMDTrack of the specified type
+     *
+     * @param fileDescriptor a description of the type of track to build.
+     *
+     * @return an instance of the track
+     */
+    public RMDTrack createInstanceOfTrack(final RMDTriplet fileDescriptor) {
+        String name = fileDescriptor.getName();
+        File inputFile = new File(fileDescriptor.getFile());
+
+        FeatureManager.FeatureDescriptor descriptor = getFeatureManager().getByTriplet(fileDescriptor);
+        if (descriptor == null)
+            throw new UserException.BadArgumentValue("-B",fileDescriptor.getType());
+
+        // return a feature reader track
+        Pair<AbstractFeatureReader, SAMSequenceDictionary> pair;
+        if (ArgumentTypeDescriptor.isCompressed(inputFile.toString()))
+            pair = createTabixIndexedFeatureSource(descriptor, name, inputFile);
+        else
+            pair = getFeatureSource(descriptor, name, inputFile, fileDescriptor.getStorageType());
+        if (pair == null) throw new UserException.CouldNotReadInputFile(inputFile, "Unable to make the feature reader for input file");
+
+        validateVariantAgainstSequenceDictionary(name, descriptor.getName(), pair.first, pair.second);
+
+        return new RMDTrack(descriptor.getCodecClass(), name, inputFile, pair.first, pair.second, genomeLocParser, createCodec(descriptor, name, inputFile));
+    }
+
+    /**
+     * Validate the VCF dictionary against the sequence dictionary.
+     *
+     * @param name      the name of this specific track
+     * @param descriptorName  the name of the feature
+     * @param reader    the feature reader to use as the underlying data source
+     * @param dict      the sam sequence dictionary
+     */
+    private void validateVariantAgainstSequenceDictionary(final String name, final String descriptorName, final AbstractFeatureReader reader, final SAMSequenceDictionary dict ) throws UserException {
+        // only process if the variant is a VCF
+        if ( name.equals("variant") && descriptorName.equals("VCF") ){
+            if ( reader != null && dict != null && reader.getHeader() != null ){
+                final List<VCFContigHeaderLine> contigs = ((VCFHeader) reader.getHeader()).getContigLines();
+                if (contigs != null) {
+                    // make the VCF dictionary from the contig header fields
+                    final List<SAMSequenceRecord> vcfContigRecords = new ArrayList<SAMSequenceRecord>();
+                    for (final VCFContigHeaderLine contig : contigs)
+                        vcfContigRecords.add(contig.getSAMSequenceRecord());
+
+                    // have VCF contig fields so can make a dictionary and compare it to the sequence dictionary
+                    if (!vcfContigRecords.isEmpty()) {
+                        final SAMSequenceDictionary vcfDictionary = new SAMSequenceDictionary(vcfContigRecords);
+                        final SAMSequenceDictionary sequenceDictionary = new SAMSequenceDictionary(dict.getSequences());
+
+                        SequenceDictionaryUtils.validateDictionaries(logger, validationExclusionType, name, vcfDictionary, "sequence", sequenceDictionary, false, null);
+                    }
+                }
+            }
+        }
+    }
+
+    /**
+     * Convenience method simplifying track creation.  Assume unnamed track based on a file rather than a stream.
+     * @param codecClass Type of Tribble codec class to build.
+     * @param inputFile Input file type to use.
+     * @return An RMDTrack, suitable for accessing reference metadata.
+     */
+    public RMDTrack createInstanceOfTrack(Class codecClass, File inputFile) {
+        final FeatureManager.FeatureDescriptor descriptor = getFeatureManager().getByCodec(codecClass);
+
+        if (descriptor == null)
+            throw new ReviewedGATKException("Unable to find type name for codec class " + codecClass.getName());
+
+        return createInstanceOfTrack(new RMDTriplet("anonymous",descriptor.getName(),inputFile.getAbsolutePath(),RMDStorageType.FILE,new Tags()));
+    }
+
+    /**
+     * create a feature reader, without assuming there exists an index.  This code assumes the feature
+     * reader of the appropriate type will figure out what the right index type is, and determine if it
+     * exists.
+     *
+     * @param descriptor the FeatureDescriptor describing the FeatureCodec we want to create
+     * @param name the name of the track
+     * @param inputFile the file to load
+     * @return a feature reader implementation
+     */
+    private Pair<AbstractFeatureReader, SAMSequenceDictionary> createTabixIndexedFeatureSource(FeatureManager.FeatureDescriptor descriptor, String name, File inputFile) {
+        // we might not know the index type, try loading with the default reader constructor
+        logger.debug("Attempting to load " + inputFile + " as a tabix indexed file without validating it");
+        try {
+            // getFeatureReader will detect that it's Tabix
+            return new Pair<>(AbstractFeatureReader.getFeatureReader(inputFile.getAbsolutePath(), createCodec(descriptor, name, inputFile)), null);
+        } catch (TribbleException e) {
+            throw new UserException(e.getMessage(), e);
+        }
+    }
+
+    /**
+     * add a name to the codec, if it takes one
+     * @param descriptor the class to create a codec for
+     * @param name the name to assign this codec
+     * @param inputFile input file that we will be decoding
+     * @return the feature codec itself
+     */
+    private FeatureCodec createCodec(final FeatureManager.FeatureDescriptor descriptor, final String name, final File inputFile) {
+        // The remappedSampleName will be null if either no on-the-fly sample renaming was requested,
+        // or the user's sample rename map file didn't contain an entry for this file:
+        final String remappedSampleName = sampleRenameMap != null ? sampleRenameMap.get(inputFile.getAbsolutePath()) : null;
+
+        return featureManager.createCodec(descriptor, name, genomeLocParser, remappedSampleName);
+    }
+
+    /**
+     * create a feature source object given:
+     * @param descriptor the FeatureDescriptor describing the FeatureCodec we want to create
+     * @param name the name of the codec
+     * @param inputFile the tribble file to parse
+     * @param storageType How the RMD is streamed into the input file.
+     * @return the input file as a FeatureReader
+     */
+    private Pair<AbstractFeatureReader, SAMSequenceDictionary> getFeatureSource(FeatureManager.FeatureDescriptor descriptor,
+                                                                        String name,
+                                                                        File inputFile,
+                                                                        RMDStorageType storageType) {
+        // Feature source and sequence dictionary to use as the ultimate reference
+        AbstractFeatureReader featureSource = null;
+        SAMSequenceDictionary sequenceDictionary = null;
+
+        // Detect whether or not this source should be indexed.
+        boolean canBeIndexed = (storageType == RMDStorageType.FILE);
+
+        if(canBeIndexed) {
+            try {
+                Index index = loadIndex(inputFile, createCodec(descriptor, name, inputFile));
+                try { logger.info(String.format("  Index for %s has size in bytes %d", inputFile, Sizeof.getObjectGraphSize(index))); }
+                catch (ReviewedGATKException e) { }
+
+                sequenceDictionary = IndexDictionaryUtils.getSequenceDictionaryFromProperties(index);
+
+                // if we don't have a dictionary in the Tribble file, and we've set a dictionary for this builder, set it in the file if they match
+                if (sequenceDictionary.isEmpty() && dict != null) {
+                    validateAndUpdateIndexSequenceDictionary(inputFile, index, dict);
+
+                    if ( ! disableAutoIndexCreation ) {
+                        File indexFile = Tribble.indexFile(inputFile);
+                        try { // re-write the index
+                            writeIndexToDisk(index,indexFile,new FSLockWithShared(indexFile));
+                        } catch (IOException e) {
+                            logger.warn("Unable to update index with the sequence dictionary for file " + indexFile + "; this will not affect your run of the GATK");
+                        }
+                    }
+
+                    sequenceDictionary = IndexDictionaryUtils.getSequenceDictionaryFromProperties(index);
+                }
+
+                featureSource = AbstractFeatureReader.getFeatureReader(inputFile.getAbsolutePath(), createCodec(descriptor, name, inputFile), index);
+            }
+            catch (TribbleException e) {
+                throw new UserException(e.getMessage());
+            }
+            catch (IOException e) {
+                throw new UserException("I/O error loading or writing tribble index file for " + inputFile.getAbsolutePath(), e);
+            }
+        }
+        else {
+            featureSource = AbstractFeatureReader.getFeatureReader(inputFile.getAbsolutePath(), createCodec(descriptor, name, inputFile), false);
+        }
+
+        return new Pair<AbstractFeatureReader,SAMSequenceDictionary>(featureSource,sequenceDictionary);
+    }
+
+    /**
+     * create an index for the input file
+     * @param inputFile the input file
+     * @param codec the codec to use
+     * @return a linear index for the specified type
+     * @throws IOException if we cannot write the index file
+     */
+    public synchronized Index loadIndex( final File inputFile, final FeatureCodec codec) throws IOException {
+        final File indexFile = Tribble.indexFile(inputFile);
+        final FSLockWithShared lock = new FSLockWithShared(indexFile);
+        Index idx = null;
+
+        // If the index file exists and is readable, attempt to load it from disk. We'll get null back
+        // if a problem was discovered with the index file when it was inspected, and we'll get an
+        // in-memory index back in the case where the index file could not be locked.
+        if (indexFile.canRead()) {
+            idx = disableAutoIndexCreation ? loadFromDisk(inputFile, indexFile)  // load without locking if we're in disableAutoIndexCreation mode
+                                           : attemptToLockAndLoadIndexFromDisk(inputFile, codec, indexFile, lock);
+        }
+
+        // If we have an index, it means we either loaded it from disk without issue or we created an in-memory
+        // index due to not being able to acquire a lock.
+        if (idx != null) return idx;
+
+        // We couldn't read the file, or we discovered a problem with the index file, so continue on to making a new index
+        idx = createIndexInMemory(inputFile, codec);
+        if ( ! disableAutoIndexCreation ) {
+            writeIndexToDisk(idx, indexFile, lock);
+        }
+        return idx;
+    }
+
+    /**
+     * Attempt to acquire a shared lock and then load the index from disk. Returns an in-memory index if
+     * a lock could not be obtained. Returns null if a problem was discovered with the index file when it
+     * was examined (eg., it was out-of-date).
+     *
+     * @param inputFile the input file
+     * @param codec the codec to read from
+     * @param indexFile the index file itself
+     * @param lock the lock file
+     * @return an index, or null if we couldn't load one
+     * @throws IOException if we fail for FS issues
+     */
+    protected Index attemptToLockAndLoadIndexFromDisk( final File inputFile, final FeatureCodec codec, final File indexFile, final FSLockWithShared lock ) throws IOException {
+        boolean locked = false;
+        Index idx = null;
+
+        try {
+            locked = lock.sharedLock();
+
+            if ( ! locked ) { // can't lock file
+                logger.info(String.format("Could not acquire a shared lock on index file %s, falling back to using an in-memory index for this GATK run.",
+                                          indexFile.getAbsolutePath()));
+                idx = createIndexInMemory(inputFile, codec);
+            }
+            else {
+                idx = loadFromDisk(inputFile, indexFile);
+            }
+        } finally {
+            if (locked) lock.unlock();
+        }
+        return idx;
+    }
+
+    /**
+     * load the index from disk, checking for out of date indexes and old versions (both of which are deleted)
+     * @param inputFile the input file
+     * @param indexFile the input file, plus the index extension
+     * @return an Index, or null if we're unable to load
+     */
+    protected Index loadFromDisk( final File inputFile, final File indexFile ) {
+        logger.debug("Loading Tribble index from disk for file " + inputFile);
+        Index index = IndexFactory.loadIndex(indexFile.getAbsolutePath());
+
+        // check if the file is up-to date (filestamp and version check)
+        if (index.isCurrentVersion() && indexFile.lastModified() >= inputFile.lastModified())
+            return index;
+        else if (indexFile.lastModified() < inputFile.lastModified())
+            logger.warn("Index file " + indexFile + " is out of date (index older than input file), " +
+                        (disableAutoIndexCreation ? "falling back to an in-memory index" : "deleting and updating the index file"));
+        else // we've loaded an old version of the index, we want to remove it <-- currently not used, but may re-enable
+            logger.warn("Index file " + indexFile + " is out of date (old version), " +
+                        (disableAutoIndexCreation ? "falling back to an in-memory index" : "deleting and updating the index file"));
+
+        if ( ! disableAutoIndexCreation ) {
+            boolean deleted = indexFile.delete();
+            if (!deleted) logger.warn("Index file " + indexFile + " is out of date, but could not be removed; it will not be trusted (we'll try to rebuild an in-memory copy)");
+        }
+
+        return null;
+    }
+
+
+    /**
+     * attempt to write the index to disk
+     * @param index the index to write to disk
+     * @param indexFile the index file location
+     * @param lock the locking object
+     * @throws IOException when unable to create the new index
+     */
+    private void writeIndexToDisk( final Index index, final File indexFile, final FSLockWithShared lock ) throws IOException {
+        if ( disableAutoIndexCreation ) {
+            return;
+        }
+
+        boolean locked = false;
+
+        try {
+            locked = lock.exclusiveLock();
+
+            if (locked) {
+                logger.info("Writing Tribble index to disk for file " + indexFile);
+                LittleEndianOutputStream stream = new LittleEndianOutputStream(new FileOutputStream(indexFile));
+                index.write(stream);
+                stream.close();
+            }
+            else // we can't write it to disk, just store it in memory, tell them this
+                logger.warn("Unable to write to " + indexFile + " for the index file, creating index in memory only");
+
+            try { logger.info(String.format("  Index for %s has size in bytes %d", indexFile, Sizeof.getObjectGraphSize(index))); }
+            catch ( ReviewedGATKException e) { }
+        }
+        finally {
+            if (locked) lock.unlock();
+        }
+
+    }
+
+    /**
+     * create the index in memory, given the input file and feature codec
+     * @param inputFile the input file
+     * @param codec the codec
+     * @return a LinearIndex, given the file location
+     * @throws IOException when unable to create the index in memory
+     */
+    protected Index createIndexInMemory(File inputFile, FeatureCodec codec) {
+        // this can take a while, let them know what we're doing
+        logger.debug("Creating Tribble index in memory for file " + inputFile);
+        Index idx = IndexFactory.createDynamicIndex(inputFile, codec, IndexFactory.IndexBalanceApproach.FOR_SEEK_TIME);
+        validateAndUpdateIndexSequenceDictionary(inputFile, idx, dict);
+        return idx;
+    }
+
+    /**
+     * set the sequence dictionary of the track.  This function checks that the contig listing of the underlying file is compatible.
+     * (that each contig in the index is in the sequence dictionary).
+     * @param inputFile for proper error message formatting.
+     * @param dict the sequence dictionary
+     * @param index the index file
+     */
+    public void validateAndUpdateIndexSequenceDictionary(final File inputFile, final Index index, final SAMSequenceDictionary dict) {
+        if (dict == null) throw new ReviewedGATKException("BUG: dict cannot be null");
+
+        // check that every contig in the RMD contig list is at least in the sequence dictionary we're being asked to set
+        final SAMSequenceDictionary currentDict = IndexDictionaryUtils.createSequenceDictionaryFromContigList(index, new SAMSequenceDictionary());
+        validateTrackSequenceDictionary(inputFile.getAbsolutePath(), currentDict, dict);
+
+        // actually update the dictionary in the index
+        IndexDictionaryUtils.setIndexSequenceDictionary(index, dict);
+    }
+
+    public void validateTrackSequenceDictionary(final String trackName,
+                                                final SAMSequenceDictionary trackDict,
+                                                final SAMSequenceDictionary referenceDict ) {
+        IndexDictionaryUtils.validateTrackSequenceDictionary(trackName, trackDict, referenceDict, validationExclusionType);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/utils/FeatureToGATKFeatureIterator.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/utils/FeatureToGATKFeatureIterator.java
new file mode 100644
index 0000000..743ee95
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/utils/FeatureToGATKFeatureIterator.java
@@ -0,0 +1,74 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.refdata.utils;
+
+import htsjdk.samtools.util.CloseableIterator;
+import htsjdk.tribble.CloseableTribbleIterator;
+import htsjdk.tribble.Feature;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+
+
+/**
+ * 
+ * @author aaron 
+ * 
+ * Class FeatureToGATKFeatureIterator
+ *
+ * a wrapper on Tribble feature iterators so that they produce GATKFeatures (which produce GenomeLocs)
+ */
+public class FeatureToGATKFeatureIterator implements CloseableIterator<GATKFeature> {
+    private final GenomeLocParser genomeLocParser;
+    private final CloseableTribbleIterator<Feature> iterator;
+    private final String name;
+
+    public FeatureToGATKFeatureIterator(GenomeLocParser genomeLocParser,CloseableTribbleIterator<Feature> iter, String name) {
+        this.genomeLocParser = genomeLocParser;
+        this.name = name;
+        this.iterator = iter;
+    }
+
+    @Override
+    public boolean hasNext() {
+        return iterator.hasNext();
+    }
+
+    @Override
+    public GATKFeature next() {
+        return new GATKFeature.TribbleGATKFeature(genomeLocParser,iterator.next(),name);
+    }
+
+    @Override
+    public void remove() {
+        throw new UnsupportedOperationException("Why does Iterator have this method? We always throw an exception here");
+    }
+
+    @Override
+    public void close() {
+        // The private adapted iterator may not be passed on by the method constructing this object,
+        // leaving only this adapter to close the wrapped iterator.
+        iterator.close();
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/utils/FlashBackIterator.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/utils/FlashBackIterator.java
new file mode 100644
index 0000000..4f50460
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/utils/FlashBackIterator.java
@@ -0,0 +1,221 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.refdata.utils;
+
+import htsjdk.samtools.SAMSequenceDictionary;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.HasGenomeLocation;
+
+import java.util.Comparator;
+import java.util.LinkedList;
+
+
+/**
+ * 
+ * @author aaron 
+ * 
+ * Class FlashBackIterator
+ *
+ * better than acid washed jeans...more like a Delorean that flies through time
+ *
+ * This iterator buffers a certain amount of ROD data to 'flash back' to.  This
+ * is needed for using ROD's in read traversals, because between shards we sometimes
+ * (actually often) need to go back to before the current iterators location and
+ * get RODs that overlap the current read.
+ */
+public class FlashBackIterator implements LocationAwareSeekableRODIterator {
+    private LocationAwareSeekableRODIterator iterator;
+    private LinkedList<ComparableList> pastQueue = new LinkedList<ComparableList>();
+    private LinkedList<ComparableList> aheadQueue = new LinkedList<ComparableList>();
+    private int MAX_QUEUE = 200;
+
+    /**
+     * create a flashback iterator
+     * @param iterator given a LocationAwareSeekableRODIterator
+     */
+    public FlashBackIterator(LocationAwareSeekableRODIterator iterator) {
+        this.iterator = iterator;
+    }
+
+    /**
+     * Gets the header associated with the backing input stream.
+     * @return the ROD header.
+     */
+    @Override
+    public Object getHeader() {
+        return iterator.getHeader();
+    }
+
+    /**
+     * Gets the sequence dictionary associated with the backing input stream.
+     * @return sequence dictionary from the ROD header.
+     */
+    @Override
+    public SAMSequenceDictionary getSequenceDictionary() {
+        return iterator.getSequenceDictionary();
+    }
+
+
+    /**
+     * peek at the next location
+     * @return
+     */
+    @Override
+    public GenomeLoc peekNextLocation() {
+        return (aheadQueue.size() > 0) ? aheadQueue.getFirst().getLocation() : iterator.peekNextLocation();
+    }
+
+    /**
+     * get the position of this iterator
+     * @return
+     */
+    @Override
+    public GenomeLoc position() {
+        return (aheadQueue.size() > 0) ? aheadQueue.getFirst().getLocation() : iterator.position();
+    }
+
+    /**
+     * seek forward on the iterator
+     * @param interval the interval to seek to
+     * @return a RODRecordList at that location, null otherwise
+     */
+    @Override
+    public RODRecordList seekForward(GenomeLoc interval) {
+
+        RODRecordList lt = iterator.seekForward(interval);
+        createPastRecord(lt);
+        return lt;
+    }
+
+    /**
+     * do we have a next record
+     * @return true if we have another record
+     */
+    @Override
+    public boolean hasNext() {
+        return (aheadQueue.size() > 0 ||  iterator.hasNext());
+    }
+
+    /**
+     * get the next record
+     * @return a RODRecordList
+     */
+    @Override
+    public RODRecordList next() {
+        return getNext();
+    }
+
+    /**
+     * we don't support remove
+     */
+    @Override
+    public void remove() {
+        throw new UnsupportedOperationException("We don't support remove");
+    }
+
+    /**
+     * get the next record, either from the queue or from the iterator
+     * @return a RODRecordList
+     */
+    private RODRecordList getNext() {
+        if (aheadQueue.size() > 0) {
+            RODRecordList ret = aheadQueue.getFirst().getList();
+            aheadQueue.removeFirst();
+            return ret;
+        } else {
+            RODRecordList ret = iterator.next();
+            createPastRecord(ret);
+            return ret;
+        }
+    }
+
+    private void createPastRecord(RODRecordList ret) {
+        ComparableList rec = new ComparableList(ret);
+        if (rec.getLocation() != null) pastQueue.addLast(new ComparableList(ret));
+        if (pastQueue.size() > this.MAX_QUEUE) pastQueue.removeFirst();
+    }
+
+    /**
+     * can we flash back to the specified location?
+     *
+     * @param location the location to try and flash back to
+     *
+     * @return true if we can, false otherwise
+     */
+    public boolean canFlashBackTo(GenomeLoc location) {
+        GenomeLoc farthestBack = (pastQueue.size() > 0) ? pastQueue.getFirst().getLocation() : iterator.peekNextLocation();
+        return (!farthestBack.isPast(location));
+    }
+
+    /**
+     * flashback! Throws an unsupported operation exception
+     *
+     * @param location where to flash back to
+     */
+    public void flashBackTo(GenomeLoc location) {
+        if (!canFlashBackTo(location)) throw new UnsupportedOperationException("we can't flash back to " + location);
+        if (pastQueue.size()==0) return; // the iterator can do it alone
+        while (pastQueue.size() > 0 && !pastQueue.getLast().getLocation().isBefore(location)) {
+            aheadQueue.addFirst(pastQueue.getLast());
+            pastQueue.removeLast();
+        }
+    }
+
+    public void close() {
+        this.aheadQueue.clear();
+        this.pastQueue.clear();
+    }
+}
+
+/**
+ * a list that buffers the location for this rod
+ */
+class ComparableList implements Comparator<ComparableList>, HasGenomeLocation {
+    private RODRecordList list;
+    private GenomeLoc location = null;
+    public ComparableList(RODRecordList list) {
+        this.list = list;
+        if (list != null && list.size() != 0)
+            location = list.getLocation();
+    }
+
+    @Override
+    public int compare(ComparableList list1, ComparableList list2) {
+        if (list1.location == null && list2.location == null)
+            return 0;
+        if (list1.location == null) return 1;
+        if (list2.location == null) return -1;
+        return (list1.location.compareTo(list2.location));
+    }
+
+    public GenomeLoc getLocation() {
+        return location;
+    }
+
+    public RODRecordList getList() {
+        return list;
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/utils/GATKFeature.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/utils/GATKFeature.java
new file mode 100644
index 0000000..4f947d1
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/utils/GATKFeature.java
@@ -0,0 +1,114 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.refdata.utils;
+
+import htsjdk.tribble.Feature;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.HasGenomeLocation;
+
+
+/**
+ * 
+ * @author aaron 
+ * 
+ * Class GATKFeature
+ *
+ * This wraps a Tribble feature or a RODatum so that both present the same interface: a genome loc for position and a
+ * way of retrieving the track name.
+ */
+public abstract class GATKFeature implements Feature, HasGenomeLocation {
+
+    public GATKFeature(String name) {
+        this.name = name;
+    }
+
+    String name;
+
+    protected void setName(String name) {
+        this.name = name;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public abstract GenomeLoc getLocation();
+
+    // TODO: this should be a Feature
+    public abstract Object getUnderlyingObject();
+
+    /**
+     * wrapping a Tribble feature in a GATK friendly interface
+     */
+    public static class TribbleGATKFeature extends GATKFeature {
+        private final GenomeLocParser genomeLocParser;
+        private final Feature feature;
+        private GenomeLoc position = null;
+        
+        public TribbleGATKFeature(GenomeLocParser genomeLocParser,Feature f, String name) {
+            super(name);
+            this.genomeLocParser = genomeLocParser;
+            feature = f;
+        }
+        public GenomeLoc getLocation() {
+            if (position == null) position = genomeLocParser.createGenomeLoc(feature.getChr(), feature.getStart(), feature.getEnd());
+            return position;
+        }
+
+        /** Return the features reference sequence name, e.g chromosome or contig */
+        @Override
+        public String getChr() {
+            return getContig();
+        }
+
+        /** Return the features reference sequence name, e.g chromosome or contig */
+        @Override
+        public String getContig() {
+            return feature.getContig();
+        }
+
+        /** Return the start position in 1-based coordinates (first base is 1) */
+        @Override
+        public int getStart() {
+            return feature.getStart();
+        }
+
+        /**
+         * Return the end position following 1-based fully closed conventions.  The length of a feature is
+         * end - start + 1;
+         */
+        @Override
+        public int getEnd() {
+            return feature.getEnd();
+        }
+
+        // TODO: this should be a Feature, actually
+        public Object getUnderlyingObject() {
+            return feature;
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/utils/LocationAwareSeekableRODIterator.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/utils/LocationAwareSeekableRODIterator.java
new file mode 100644
index 0000000..0bee072
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/utils/LocationAwareSeekableRODIterator.java
@@ -0,0 +1,49 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.refdata.utils;
+
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.util.CloseableIterator;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+
+/**
+ * @author aaron
+ *         <p/>
+ *         Interface LocationAwareSeekableRODIterator
+ *         <p/>
+ *         combine iteration with a position aware interface
+ */
+public interface LocationAwareSeekableRODIterator extends CloseableIterator<RODRecordList> {
+    public Object getHeader();
+
+    public SAMSequenceDictionary getSequenceDictionary();
+
+    public GenomeLoc peekNextLocation();
+
+    public GenomeLoc position();
+
+    public RODRecordList seekForward(GenomeLoc interval);    
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/utils/RMDTriplet.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/utils/RMDTriplet.java
new file mode 100644
index 0000000..dc35f7e
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/utils/RMDTriplet.java
@@ -0,0 +1,92 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.refdata.utils;
+
+
+import org.broadinstitute.gatk.utils.commandline.Tags;
+
+/**
+ * a helper class to manage our triplets of data for the -B command line option (name, type, file)
+ * TODO: The presence of four datapoints here suggests that this class' name isn't sufficient to describe its function.  Rename.
+ */
+public class RMDTriplet {
+    public enum RMDStorageType { FILE, STREAM };
+
+    private final String name;
+    private final String type;
+    private final String file;
+    private final RMDStorageType storageType;
+    private final Tags tags;
+
+    public RMDTriplet(final String name, final String type, final String file, final RMDStorageType storageType, final Tags tags) {
+        this.name = name;
+        this.type = type;
+        this.file = file;
+        this.storageType = storageType;
+        this.tags = tags;
+    }
+
+    /**
+     * Gets the name of this track.  RefMetaDataTrackers can use this identifier to retrieve data of a certain type.
+     * @return Name associated with this track.
+     */
+    public String getName() {
+        return name;
+    }
+
+    /**
+     * Gets the type of this track.  Informs the GATK how to parse this file type.
+     * @return Type associated with this track.
+     */
+    public String getType() {
+        return type;
+    }
+
+    /**
+     * Gets the filename representing this track.  Data is loaded from this file.
+     * @return Filename of the RMD.
+     */
+    public String getFile() {
+        return file;
+    }
+
+    /**
+     * The type of storage being used for this metadata track.  Right now, can be either a
+     * file type (can be indexed) or a stream type (can't be indexed).
+     * @return Storage type for this RMD 'triplet'.
+     */
+    public RMDStorageType getStorageType() {
+        return storageType;
+    }
+
+    /**
+     * Gets the key=value tags associated with this track
+     * @return Tags associated with this track.
+     */
+    public Tags getTags() {
+        return tags;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/utils/RODRecordList.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/utils/RODRecordList.java
new file mode 100644
index 0000000..4bba78d
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/refdata/utils/RODRecordList.java
@@ -0,0 +1,45 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.refdata.utils;
+
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.HasGenomeLocation;
+
+import java.util.List;
+
+
+/**
+ * @author aaron
+ *         <p/>
+ *         Class RODRecordList
+ *         <p/>
+ *         make the RODRecord list an interface, so we can stub in other implementations
+ *         during testing.
+ */
+public interface RODRecordList extends List<GATKFeature>, Comparable<RODRecordList>, HasGenomeLocation {
+    public GenomeLoc getLocation();
+    public String getName();
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/report/GATKReport.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/report/GATKReport.java
new file mode 100644
index 0000000..70a7c6a
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/report/GATKReport.java
@@ -0,0 +1,376 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.report;
+
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+import java.io.*;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+/**
+ * Container class for GATK report tables
+ */
+public class GATKReport {
+    public static final String GATKREPORT_HEADER_PREFIX = "#:GATKReport.";
+    public static final GATKReportVersion LATEST_REPORT_VERSION = GATKReportVersion.V1_1;
+    private static final String SEPARATOR = ":";
+    private GATKReportVersion version = LATEST_REPORT_VERSION;
+
+    private final TreeMap<String, GATKReportTable> tables = new TreeMap<String, GATKReportTable>();
+
+    /**
+     * Create a new, empty GATKReport.
+     */
+    public GATKReport() {
+    }
+
+    /**
+     * Create a new GATKReport with the contents of a GATKReport on disk.
+     *
+     * @param filename the path to the file to load
+     */
+    public GATKReport(String filename) {
+        this(new File(filename));
+    }
+
+    /**
+     * Create a new GATKReport with the contents of a GATKReport on disk.
+     *
+     * @param file the file to load
+     */
+    public GATKReport(File file) {
+        loadReport(file);
+    }
+
+    /**
+     * Create a new GATK report from GATK report tables
+     * @param tables Any number of tables that you want to add to the report
+     */
+    public GATKReport(GATKReportTable... tables) {
+        for( GATKReportTable table: tables)
+            addTable(table);
+    }
+
+    /**
+     * Load a GATKReport file from disk
+     *
+     * @param file the file to load
+     */
+    private void loadReport(File file) {
+        BufferedReader reader;
+        String reportHeader;
+        try {
+            reader = new BufferedReader(new FileReader(file));
+            reportHeader = reader.readLine();
+        } catch (FileNotFoundException e) {
+            throw new UserException.CouldNotReadInputFile(file, "it does not exist");
+        } catch (IOException e) { 
+            throw new UserException.CouldNotReadInputFile(file, e);
+        }   
+
+
+        // Read the first line for the version and number of tables.
+        version = GATKReportVersion.fromHeader(reportHeader);
+        if (version.equals(GATKReportVersion.V0_1) ||
+                version.equals(GATKReportVersion.V0_2))
+            throw new UserException("The GATK no longer supports reading legacy GATK Reports. Please use v1.0 or newer.");
+
+        int nTables = Integer.parseInt(reportHeader.split(":")[2]);
+
+        // Read each table according ot the number of tables
+        for (int i = 0; i < nTables; i++) {
+            addTable(new GATKReportTable(reader, version));
+        }
+    }
+
+    /**
+     * Add a new, empty table to the report
+     *
+     * @param tableName        the name of the table
+     * @param tableDescription the description of the table
+     * @param numColumns       the number of columns in this table
+     */
+    public void addTable(final String tableName, final String tableDescription, final int numColumns) {
+        addTable(tableName, tableDescription, numColumns, GATKReportTable.TableSortingWay.DO_NOT_SORT);
+    }
+
+    /**
+     * Add a new, empty table to the report
+     *
+     * @param tableName        the name of the table
+     * @param tableDescription the description of the table
+     * @param numColumns       the number of columns in this table
+     * @param sortingWay       way to sort table
+     */
+    public void addTable(final String tableName, final String tableDescription, final int numColumns, final GATKReportTable.TableSortingWay sortingWay) {
+        GATKReportTable table = new GATKReportTable(tableName, tableDescription, numColumns, sortingWay);
+        tables.put(tableName, table);
+    }
+
+    /**
+     * Adds a table, empty or populated, to the report
+     *
+     * @param table the table to add
+     */
+    public void addTable(GATKReportTable table) {
+        tables.put(table.getTableName(), table);
+    }
+
+    public void addTables(List<GATKReportTable> gatkReportTableV2s) {
+        for ( GATKReportTable table : gatkReportTableV2s )
+            addTable(table);
+    }
+
+    /**
+     * Return true if table with a given name exists
+     *
+     * @param tableName the name of the table
+     * @return true if the table exists, false otherwise
+     */
+    public boolean hasTable(String tableName) {
+        return tables.containsKey(tableName);
+    }
+
+    /**
+     * Return a table with a given name
+     *
+     * @param tableName the name of the table
+     * @return the table object
+     */
+    public GATKReportTable getTable(String tableName) {
+        GATKReportTable table = tables.get(tableName);
+        if (table == null)
+            throw new ReviewedGATKException("Table is not in GATKReport: " + tableName);
+        return table;
+    }
+
+    /**
+     * Print all tables contained within this container to a PrintStream
+     *
+     * @param out the PrintStream to which the tables should be written
+     */
+    public void print(PrintStream out) {
+        out.println(GATKREPORT_HEADER_PREFIX + getVersion().toString() + SEPARATOR + getTables().size());
+        for (GATKReportTable table : tables.values())
+            table.write(out);
+    }
+
+    public Collection<GATKReportTable> getTables() {
+        return tables.values();
+    }
+
+    /**
+     * This is the main function is charge of gathering the reports. It checks that the reports are compatible and then
+     * calls the table gathering functions.
+     *
+     * @param input another GATKReport of the same format
+     */
+    public void concat(GATKReport input) {
+
+        if ( !isSameFormat(input) ) {
+            throw new ReviewedGATKException("Failed to combine GATKReport, format doesn't match!");
+        }
+
+        for ( Map.Entry<String, GATKReportTable> table : tables.entrySet() ) {
+            table.getValue().concat(input.getTable(table.getKey()));
+        }
+    }
+
+    public GATKReportVersion getVersion() {
+        return version;
+    }
+
+    /**
+     * Returns whether or not the two reports have the same format, from columns, to tables, to reports, and everything
+     * in between. This does not check if the data inside is the same. This is the check to see if the two reports are
+     * gatherable or reduceable.
+     *
+     * @param report another GATK report
+     * @return true if the the reports are gatherable
+     */
+    public boolean isSameFormat(GATKReport report) {
+        if (!version.equals(report.version)) {
+            return false;
+        }
+        if (!tables.keySet().equals(report.tables.keySet())) {
+            return false;
+        }
+        for (String tableName : tables.keySet()) {
+            if (!getTable(tableName).isSameFormat(report.getTable(tableName)))
+                return false;
+        }
+        return true;
+    }
+
+    /**
+     * Checks that the reports are exactly the same.
+     *
+     * @param report another GATK report
+     * @return true if all field in the reports, tables, and columns are equal.
+     */
+    public boolean equals(GATKReport report) {
+        if (!version.equals(report.version)) {
+            return false;
+        }
+        if (!tables.keySet().equals(report.tables.keySet())) {
+            return false;
+        }
+        for (String tableName : tables.keySet()) {
+            if (!getTable(tableName).equals(report.getTable(tableName)))
+                return false;
+        }
+        return true;
+    }
+
+    /**
+     * The constructor for a simplified GATK Report. Simplified GATK report are designed for reports that do not need
+     * the advanced functionality of a full GATK Report.
+     * <p/>
+     * A simple GATK Report consists of:
+     * <p/>
+     * - A single table
+     * - No primary key ( it is hidden )
+     * <p/>
+     * Optional:
+     * - Only untyped columns. As long as the data is an Object, it will be accepted.
+     * - Default column values being empty strings.
+     * <p/>
+     * Limitations:
+     * <p/>
+     * - A simple GATK report cannot contain multiple tables.
+     * - It cannot contain typed columns, which prevents arithmetic gathering.
+     *
+     * @param tableName The name of your simple GATK report table
+     * @param columns   The names of the columns in your table
+     * @return a simplified GATK report
+     */
+    public static GATKReport newSimpleReport(final String tableName, final String... columns) {
+        return newSimpleReportWithDescription(tableName, "A simplified GATK table report", columns);
+    }
+
+    /**
+     * @see #newSimpleReport(String, String...) but with a customized description
+     * @param tableName
+     * @param desc
+     * @param columns
+     * @return
+     */
+    public static GATKReport newSimpleReportWithDescription(final String tableName, final String desc, final String... columns) {
+        GATKReportTable table = new GATKReportTable(tableName, desc, columns.length);
+
+        for (String column : columns) {
+            table.addColumn(column, "");
+        }
+
+        GATKReport output = new GATKReport();
+        output.addTable(table);
+
+        return output;
+    }
+
+    /**
+     * The constructor for a simplified GATK Report. Simplified GATK report are designed for reports that do not need
+     * the advanced functionality of a full GATK Report.
+     * <p/>
+     * A simple GATK Report consists of:
+     * <p/>
+     * - A single table
+     * - No primary key ( it is hidden )
+     * <p/>
+     * Optional:
+     * - Only untyped columns. As long as the data is an Object, it will be accepted.
+     * - Default column values being empty strings.
+     * <p/>
+     * Limitations:
+     * <p/>
+     * - A simple GATK report cannot contain multiple tables.
+     * - It cannot contain typed columns, which prevents arithmetic gathering.
+     *
+     * @param tableName The name of your simple GATK report table
+     * @param columns   The names of the columns in your table
+     * @return a simplified GATK report
+     */
+    public static GATKReport newSimpleReport(final String tableName, final List<String> columns) {
+        GATKReportTable table = new GATKReportTable(tableName, "A simplified GATK table report", columns.size());
+
+        for (String column : columns) {
+            table.addColumn(column, "");
+        }
+
+        GATKReport output = new GATKReport();
+        output.addTable(table);
+
+        return output;
+    }
+
+    /**
+     * This method provides an efficient way to populate a simplified GATK report. This method will only work on reports
+     * that qualify as simplified GATK reports. See the newSimpleReport() constructor for more information.
+     *
+     * @param values     the row of data to be added to the table.
+     *               Note: the number of arguments must match the columns in the table.
+     */
+    public void addRow(final Object... values) {
+        // Must be a simple report
+        if ( tables.size() != 1 )
+            throw new ReviewedGATKException("Cannot write a row to a complex GATK Report");
+
+        GATKReportTable table = tables.firstEntry().getValue();
+        if ( table.getNumColumns() != values.length )
+            throw new ReviewedGATKException("The number of arguments in writeRow (" + values.length + ") must match the number of columns in the table (" + table.getNumColumns() + ")" );
+
+        final int rowIndex = table.getNumRows();
+        for ( int i = 0; i < values.length; i++ )
+            table.set(rowIndex, i, values[i]);
+    }
+
+    /**
+     * This method provides an efficient way to populate a simplified GATK report. This method will only work on reports
+     * that qualify as simplified GATK reports. See the newSimpleReport() constructor for more information.
+     *
+     * @param values     the row of data to be added to the table.
+     *               Note: the number of arguments must match the columns in the table.
+     */
+    public void addRowList(final List<Object> values) {
+        if ( tables.size() != 1 )
+            throw new ReviewedGATKException("Cannot write a row to a complex GATK Report");
+
+        GATKReportTable table = tables.firstEntry().getValue();
+        if ( table.getNumColumns() != values.size() )
+            throw new ReviewedGATKException("The number of arguments in writeRow() must match the number of columns in the table");
+
+        final int rowIndex = table.getNumRows();
+        int idx = 0;
+        for ( Object value : values ) {
+            table.set(rowIndex,idx,value);
+            idx++;
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/report/GATKReportColumn.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/report/GATKReportColumn.java
new file mode 100644
index 0000000..0dbeb3b
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/report/GATKReportColumn.java
@@ -0,0 +1,147 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.report;
+
+import org.apache.commons.lang.math.NumberUtils;
+
+import java.util.Arrays;
+import java.util.Collection;
+
+/**
+ * column information within a GATK report table
+ */
+public class GATKReportColumn {
+    final private String columnName;
+    final private String format;
+    final private GATKReportDataType dataType;
+
+    private GATKReportColumnFormat columnFormat;
+    private GATKReportColumnFormat.Alignment alignment = GATKReportColumnFormat.Alignment.RIGHT;  // default alignment is to the right unless values added ask for a left alignment
+    private int maxWidth = 0;
+
+    /**
+     * Construct the column object, specifying the column name, default value, whether or not the column should be
+     * displayed, and the format string. This cannot be null.
+     *
+     * @param columnName   the name of the column
+     * @param format       format string
+     */
+    public GATKReportColumn(final String columnName, final String format) {
+        this.columnName = columnName;
+        this.maxWidth = columnName.length();
+        if ( format.equals("") ) {
+            this.format = "%s";
+            this.dataType = GATKReportDataType.Unknown;
+        }
+        else {
+            this.format = format;
+            this.dataType = GATKReportDataType.fromFormatString(format);
+        }
+    }
+
+    /**
+     * Get the display width for this column.  This allows the entire column to be displayed with the appropriate, fixed
+     * width.
+     *
+     * @return the format string for this column
+     */
+    public GATKReportColumnFormat getColumnFormat() {
+        if (columnFormat != null)
+            return columnFormat;
+
+        columnFormat = new GATKReportColumnFormat(maxWidth, alignment);
+        return columnFormat;
+    }
+
+    private static final Collection<String> RIGHT_ALIGN_STRINGS = Arrays.asList(
+            "null",
+            "NA",
+            String.valueOf(Double.POSITIVE_INFINITY),
+            String.valueOf(Double.NEGATIVE_INFINITY),
+            String.valueOf(Double.NaN));
+
+    /**
+     * Check if the value can be right aligned. Does not trim the values before checking if numeric since it assumes
+     * the spaces mean that the value is already padded.
+     *
+     * @param value to check
+     * @return true if the value is a right alignable
+     */
+    protected static boolean isRightAlign(final String value) {
+        return value == null || RIGHT_ALIGN_STRINGS.contains(value) || NumberUtils.isNumber(value.trim());
+    }
+
+    /**
+     * Returns a string version of the values.
+     *
+     * @param obj The object to convert to a string
+     * @return The string representation of the column
+     */
+    private String formatValue(final Object obj) {
+        String value;
+        if (obj == null) {
+            value = "null";
+        }
+        else if ( dataType.equals(GATKReportDataType.Unknown) && (obj instanceof Double || obj instanceof Float) ) {
+            value = String.format("%.8f", obj);
+        }
+        else
+            value = String.format(format, obj);
+
+        return value;
+    }
+
+    public GATKReportDataType getDataType() {
+        return dataType;
+    }
+
+    public String getColumnName() {
+        return columnName;
+    }
+
+    public String getFormat() {
+        return dataType.equals(GATKReportDataType.Unknown) ? "%s" : format;
+    }
+
+    public void updateFormatting(final Object value) {
+        if (value != null) {
+            final String formatted = formatValue(value);
+            if ( formatted.length() > 0 ) {
+                updateMaxWidth(formatted);
+                updateFormat(formatted);
+            }
+        }
+    }
+
+    private void updateMaxWidth(final String formatted) {
+        maxWidth = Math.max(formatted.length(), maxWidth);
+    }
+
+    private void updateFormat(final String formatted) {
+        if (alignment == GATKReportColumnFormat.Alignment.RIGHT)
+            alignment = isRightAlign(formatted) ? GATKReportColumnFormat.Alignment.RIGHT : GATKReportColumnFormat.Alignment.LEFT;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/report/GATKReportColumnFormat.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/report/GATKReportColumnFormat.java
new file mode 100644
index 0000000..6249f4f
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/report/GATKReportColumnFormat.java
@@ -0,0 +1,63 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.report;
+
+/**
+ * Column width and left/right alignment.
+ */
+public class GATKReportColumnFormat {
+    public static enum Alignment { LEFT, RIGHT }
+    private final int width;
+    private final Alignment alignment;
+
+    public GATKReportColumnFormat(int width, Alignment alignment) {
+        this.width = width;
+        this.alignment = alignment;
+    }
+
+    public int getWidth() {
+        return width;
+    }
+
+    public Alignment getAlignment() {
+        return alignment;
+    }
+
+    public String getNameFormat() {
+        return "%-" + width + "s";
+    }
+
+    public String getValueFormat() {
+        switch (alignment) {
+            case LEFT:
+                return "%-" + width + "s";
+            case RIGHT:
+                return "%" + width + "s";
+            default:
+                throw new UnsupportedOperationException("Unknown alignment: " + alignment);
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/report/GATKReportDataType.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/report/GATKReportDataType.java
new file mode 100644
index 0000000..a6e640f
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/report/GATKReportDataType.java
@@ -0,0 +1,236 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.report;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * The gatherable data types acceptable in a GATK report column.
+ */
+public enum GATKReportDataType {
+    /**
+     * The null type should not be used.
+     */
+    Null("Null"),
+
+    /**
+     * The default value when a format string is not present
+     */
+    Unknown("Unknown"),
+
+    /**
+     * Used for boolean values. Will display as true or false in the table.
+     */
+    Boolean("%[Bb]"),
+
+    /**
+     * Used for char values. Will display as a char so use printable values!
+     */
+    Character("%[Cc]"),
+
+    /**
+     * Used for float and double values. Will output a decimal with format %.8f unless otherwise specified.
+     */
+    Decimal("%.*[EeFf]"),
+
+    /**
+     * Used for int, byte, short, and long values. Will display the full number by default.
+     */
+    Integer("%[Dd]"),
+
+    /**
+     * Used for string values. Displays the string itself.
+     */
+    String("%[Ss]");
+
+    private final String dataTypeString;
+
+    private GATKReportDataType(String dataTypeString) {
+        this.dataTypeString = dataTypeString;
+    }
+
+    private static final Map<String, GATKReportDataType> lookup = new HashMap<String, GATKReportDataType>();
+
+    static {
+        for (GATKReportDataType s : EnumSet.allOf(GATKReportDataType.class))
+            lookup.put(s.dataTypeString, s);
+    }
+
+
+    @Override
+    public String toString() {
+        return this.dataTypeString;
+    }
+
+    /**
+     * Returns a GATK report data type from the Object specified. It looks through the list of acceptable classes and
+     * returns the appropriate data type.
+     *
+     * @param object the object ot derive the data type from
+     * @return the appropriate data type
+     */
+    public static GATKReportDataType fromObject(Object object) {
+        GATKReportDataType value;
+        if (object instanceof Boolean) {
+            value = GATKReportDataType.Boolean;
+
+        } else if (object instanceof Character) {
+            value = GATKReportDataType.Character;
+
+        } else if (object instanceof Float ||
+                object instanceof Double) {
+            value = GATKReportDataType.Decimal;
+
+        } else if (object instanceof Integer ||
+                object instanceof Long ||
+                object instanceof Short ||
+                object instanceof Byte ) {
+            value = GATKReportDataType.Integer;
+
+        } else if (object instanceof String) {
+            value = GATKReportDataType.String;
+
+        } else {
+            value = GATKReportDataType.Unknown;
+            //throw new UserException("GATKReport could not convert the data object into a GATKReportDataType. Acceptable data objects are found in the documentation.");
+        }
+        return value;
+    }
+
+    /**
+     * Returns a GATK report data type from the format string specified. It uses regex matching from the enumerated
+     * Strings.
+     *
+     * @param format the format string to derive the data type from
+     * @return the appropriate data type
+     */
+    public static GATKReportDataType fromFormatString(String format) {
+        if (format.equals(""))
+            return Unknown;
+        for (GATKReportDataType type : lookup.values()) {
+            if (format.matches(type.toString()) )
+                return type;
+        }
+        return Unknown;
+    }
+
+    /**
+     * Returns the default value of the data type. It returns an object that matches the class of the data type.
+     *
+     * @return an object that matches the data type
+     */
+    public Object getDefaultValue() {
+        switch (this) {
+            case Decimal:
+                return 0.0D;
+            case Boolean:
+                return false;
+            case Character:
+                return '0';
+            case Integer:
+                return 0L;
+            case String:
+                return "";
+            default:
+                return null;
+        }
+    }
+
+    /**
+     * Checks if the two objects are equal using the appropriate test form the data types.
+     *
+     * @param a an object
+     * @param b another object to check if equal
+     * @return true - the objects are equal, false - the objects are nto equal
+     */
+    public boolean isEqual(Object a, Object b) {
+        switch (this) {
+            case Null:
+                return true;
+            case Decimal:
+            case Boolean:
+            case Integer:
+                return a.toString().equals(b.toString());
+            case Character:
+            case String:
+            default:
+                return a.equals(b);
+        }
+    }
+
+    /**
+     * Converts an input String to the appropriate type using the data type. Used for parsing loading a GATK report from
+     * file.
+     *
+     * @param obj The input string
+     * @return an object that matches the data type.
+     */
+    Object Parse(Object obj) {
+        if (obj instanceof String) {
+            String str = obj.toString();
+            switch (this) {
+                case Decimal:
+                    return Double.parseDouble(str);
+                case Boolean:
+                    return java.lang.Boolean.parseBoolean(str);
+                case Integer:
+                    return Long.parseLong(str);
+                case String:
+                    return str;
+                case Character:
+                    return str.toCharArray()[0];
+                default:
+                    return str;
+            }
+        } else
+            return null;
+    }
+
+    /**
+     * Returns a format string version of the value according to the data type.
+     *
+     * @return The printf string representation of the object according to data type.
+     */
+    public String getDefaultFormatString() {
+        switch (this) {
+            case Decimal:
+                return "%.8f";
+            case Boolean:
+                return "%b";
+            case Integer:
+                return "%d";
+            case String:
+                return "%s";
+            case Character:
+                return "%c";
+            case Null:
+            default:
+                return "%s";
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/report/GATKReportGatherer.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/report/GATKReportGatherer.java
new file mode 100644
index 0000000..f7f5196
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/report/GATKReportGatherer.java
@@ -0,0 +1,62 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.report;
+
+import org.broadinstitute.gatk.utils.commandline.Gatherer;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.PrintStream;
+import java.util.List;
+
+public class GATKReportGatherer extends Gatherer {
+    @Override
+    public void gather(List<File> inputs, File output) {
+        //Combines inputs GATKReport to one output
+
+        PrintStream o;
+        try {
+            o = new PrintStream(output);
+        } catch (FileNotFoundException e) {
+            throw new UserException(String.format("File %s to be output by GATKReportGatherer function was not found", output));
+        }
+
+        GATKReport current = new GATKReport();
+        boolean isFirst = true;
+        for (File input : inputs) {
+            if (isFirst) {
+                current = new GATKReport(input);
+                isFirst = false;
+            } else {
+                current.concat(new GATKReport(input));
+            }
+        }
+
+        current.print(o);
+        o.close();
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/report/GATKReportTable.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/report/GATKReportTable.java
new file mode 100644
index 0000000..e40c3f3
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/report/GATKReportTable.java
@@ -0,0 +1,779 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.report;
+
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.text.TextFormattingUtils;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.util.*;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class GATKReportTable {
+    /**
+     * REGEX that matches any table with an invalid name
+     */
+    public static final String INVALID_TABLE_NAME_REGEX = "[^a-zA-Z0-9_\\-\\.]";
+    private static final String GATKTABLE_HEADER_PREFIX = "#:GATKTable";
+    private static final String SEPARATOR = ":";
+    private static final String ENDLINE = ":;";
+
+    private final String tableName;
+    private final String tableDescription;
+
+    private final TableSortingWay sortingWay;
+
+    private List<Object[]> underlyingData;
+    private final List<GATKReportColumn> columnInfo;
+    private final Map<Object, Integer> columnNameToIndex;
+    private final HashMap<Object, Integer> rowIdToIndex;
+
+    private static final String COULD_NOT_READ_HEADER = "Could not read the header of this file -- ";
+    private static final String COULD_NOT_READ_COLUMN_NAMES = "Could not read the column names of this file -- ";
+    private static final String COULD_NOT_READ_DATA_LINE = "Could not read a data line of this table -- ";
+    private static final String COULD_NOT_READ_EMPTY_LINE = "Could not read the last empty line of this table -- ";
+    private static final String OLD_GATK_TABLE_VERSION = "We no longer support older versions of the GATK Tables";
+
+    private static final int INITITAL_ARRAY_SIZE = 10000;
+    private static final String NUMBER_CONVERSION_EXCEPTION = "String is a number but is not a long or a double: ";
+
+    protected enum TableDataHeaderFields {
+        COLS(2),
+        ROWS(3),
+        FORMAT_START(4);
+
+        private final int index;
+        TableDataHeaderFields(int index) { this.index = index; }
+        public int index() { return index; }
+    }
+
+    public enum TableSortingWay {
+        SORT_BY_ROW,
+        SORT_BY_COLUMN,
+        DO_NOT_SORT
+    }
+
+    protected enum TableNameHeaderFields {
+        NAME(2),
+        DESCRIPTION(3);
+
+        private final int index;
+        TableNameHeaderFields(int index) { this.index = index; }
+        public int index() { return index; }
+    }
+
+    /**
+     * Construct a new GATK report table from the reader
+     * Note that the row ID mappings are just the index -> index
+     *
+     * @param reader        the reader
+     * @param version       the GATK report version
+     */
+    public GATKReportTable(BufferedReader reader, GATKReportVersion version) {
+
+        switch ( version ) {
+            case V1_1:
+                // read in the header lines
+                final String[] tableData, tableNameData;
+                try {
+                    tableData = reader.readLine().split(SEPARATOR);
+                    tableNameData = reader.readLine().split(SEPARATOR);
+                } catch (IOException e) {
+                    throw new ReviewedGATKException(COULD_NOT_READ_HEADER + e.getMessage());
+                }
+
+                // parse the header fields
+                tableName = tableNameData[TableNameHeaderFields.NAME.index()];
+                tableDescription = (tableNameData.length <= TableNameHeaderFields.DESCRIPTION.index()) ? "" : tableNameData[TableNameHeaderFields.DESCRIPTION.index()];                                           // table may have no description! (and that's okay)
+
+                // when reading from a file, we do not re-sort the rows
+                sortingWay = TableSortingWay.DO_NOT_SORT;
+
+                // initialize the data
+                final int nColumns = Integer.parseInt(tableData[TableDataHeaderFields.COLS.index()]);
+                final int nRows = Integer.parseInt(tableData[TableDataHeaderFields.ROWS.index()]);
+                underlyingData = new ArrayList<Object[]>(nRows);
+                columnInfo = new ArrayList<GATKReportColumn>(nColumns);
+                columnNameToIndex = new HashMap<Object, Integer>(nColumns);
+
+                // when reading from a file, the row ID mapping is just the index
+                rowIdToIndex = new HashMap<Object, Integer>();
+                for ( int i = 0; i < nRows; i++ )
+                    rowIdToIndex.put(i, i);
+
+                // read the column names
+                final String columnLine;
+                try {
+                    columnLine = reader.readLine();
+                } catch (IOException e) {
+                    throw new ReviewedGATKException(COULD_NOT_READ_COLUMN_NAMES);
+                }
+
+                final List<Integer> columnStarts = TextFormattingUtils.getWordStarts(columnLine);
+                final String[] columnNames = TextFormattingUtils.splitFixedWidth(columnLine, columnStarts);
+
+                // Put in columns using the format string from the header
+                for ( int i = 0; i < nColumns; i++ ) {
+                    final String format = tableData[TableDataHeaderFields.FORMAT_START.index() + i];
+                    addColumn(columnNames[i], format);
+                }
+
+                // fill in the table
+                try {
+                    for ( int i = 0; i < nRows; i++ ) {
+                        // read a data line
+                        final String dataLine = reader.readLine();
+                        final List<String> lineSplits = Arrays.asList(TextFormattingUtils.splitFixedWidth(dataLine, columnStarts));
+
+                        underlyingData.add(new Object[nColumns]);
+                        for ( int columnIndex = 0; columnIndex < nColumns; columnIndex++ ) {
+
+                            final GATKReportDataType type = columnInfo.get(columnIndex).getDataType();
+                            final String columnName = columnNames[columnIndex];
+                            set(i, columnName, type.Parse(lineSplits.get(columnIndex)));
+
+                        }
+                    }
+                } catch (IOException e) {
+                    throw new ReviewedGATKException(COULD_NOT_READ_DATA_LINE + e.getMessage());
+                }
+
+                try {
+                    reader.readLine();
+                } catch (IOException e) {
+                    throw new ReviewedGATKException(COULD_NOT_READ_EMPTY_LINE + e.getMessage());
+                }
+            break;
+
+            default:
+                throw new ReviewedGATKException(OLD_GATK_TABLE_VERSION);
+        }
+    }
+
+    /**
+     * Construct a new GATK report table with the specified name and description
+     *
+     * @param tableName        the name of the table
+     * @param tableDescription the description of the table
+     * @param numColumns       the number of columns in this table
+     */
+    public GATKReportTable(final String tableName, final String tableDescription, final int numColumns) {
+        this(tableName, tableDescription, numColumns, TableSortingWay.SORT_BY_ROW);
+    }
+
+    /**
+     * Construct a new GATK report table with the specified name and description and whether to sort rows by the row ID.
+     *
+     * @param tableName          the name of the table
+     * @param tableDescription   the description of the table
+     * @param numColumns         the number of columns in this table
+     * @param sortingWay         in what way to sort rows (instead of the order in which they were added)
+     */
+    public GATKReportTable(final String tableName, final String tableDescription, final int numColumns, final TableSortingWay sortingWay) {
+        if ( !isValidName(tableName) ) {
+            throw new ReviewedGATKException("Attempted to set a GATKReportTable name of '" + tableName + "'.  GATKReportTable names must be purely alphanumeric - no spaces or special characters are allowed.");
+        }
+
+        if ( !isValidDescription(tableDescription) ) {
+            throw new ReviewedGATKException("Attempted to set a GATKReportTable description of '" + tableDescription + "'.  GATKReportTable descriptions must not contain newlines.");
+        }
+
+        this.tableName = tableName;
+        this.tableDescription = tableDescription;
+        this.sortingWay = sortingWay;
+
+        underlyingData = new ArrayList<Object[]>(INITITAL_ARRAY_SIZE);
+        columnInfo = new ArrayList<GATKReportColumn>(numColumns);
+        columnNameToIndex = new HashMap<Object, Integer>(numColumns);
+        rowIdToIndex = new HashMap<Object, Integer>();
+    }
+
+    /**
+     * Create a new GATKReportTable with the same structure
+     * @param tableToCopy
+     */
+    public GATKReportTable(final GATKReportTable tableToCopy, final boolean copyData) {
+        this(tableToCopy.getTableName(), tableToCopy.getTableDescription(), tableToCopy.getNumColumns(), tableToCopy.sortingWay);
+        for ( final GATKReportColumn column : tableToCopy.getColumnInfo() )
+            addColumn(column.getColumnName(), column.getFormat());
+        if ( copyData )
+            throw new IllegalArgumentException("sorry, copying data in GATKReportTable isn't supported");
+    }
+
+        /**
+        * Verifies that a table or column name has only alphanumeric characters - no spaces or special characters allowed
+        *
+        * @param name the name of the table or column
+        * @return true if the name is valid, false if otherwise
+        */
+    private boolean isValidName(String name) {
+        Pattern p = Pattern.compile(INVALID_TABLE_NAME_REGEX);
+        Matcher m = p.matcher(name);
+
+        return !m.find();
+    }
+
+    /**
+     * Verifies that a table or column name has only alphanumeric characters - no spaces or special characters allowed
+     *
+     * @param description the name of the table or column
+     * @return true if the name is valid, false if otherwise
+     */
+    private boolean isValidDescription(String description) {
+        Pattern p = Pattern.compile("\\r|\\n");
+        Matcher m = p.matcher(description);
+
+        return !m.find();
+    }
+
+    /**
+     * Add a mapping from ID to the index of a new row added to the table.
+     *
+     * @param ID                    the unique ID
+     */
+    public void addRowID(final String ID) {
+        addRowID(ID, false);
+    }
+
+    /**
+     * Add a mapping from ID to the index of a new row added to the table.
+     *
+     * @param ID                    the unique ID
+     * @param populateFirstColumn   should we automatically populate the first column with the row's ID?
+     */
+    public void addRowID(final String ID, final boolean populateFirstColumn) {
+        addRowIDMapping(ID, underlyingData.size(), populateFirstColumn);
+    }
+
+    /**
+     * Add a mapping from ID to row index.
+     *
+     * @param ID                    the unique ID
+     * @param index                 the index associated with the ID
+     */
+    public void addRowIDMapping(final String ID, final int index) {
+        addRowIDMapping(ID, index, false);
+    }
+
+    /**
+     * Add a mapping from ID to row index.
+     *
+     * @param ID                    the unique ID
+     * @param index                 the index associated with the ID
+     * @param populateFirstColumn   should we automatically populate the first column with the row's ID?
+     */
+    public void addRowIDMapping(final Object ID, final int index, final boolean populateFirstColumn) {
+        expandTo(index, false);
+        rowIdToIndex.put(ID, index);
+
+        if ( populateFirstColumn )
+            set(index, 0, ID);
+    }
+
+    /**
+     * Remove a mapping from ID to row index.
+     *
+     * @param ID   the row ID
+     */
+    public void removeRowIDMapping(final Object ID) {
+        rowIdToIndex.remove(ID);
+    }
+
+    /**
+     * Add a column to the report
+     *
+     * @param columnName   the name of the column
+     */
+    public void addColumn(String columnName) {
+        addColumn(columnName, "");
+    }
+
+    /**
+     * Add a column to the report and the format string used to display the data.
+     *
+     * @param columnName   the name of the column
+     * @param format       the format string used to display data
+     */
+    public void addColumn(String columnName, String format) {
+        columnNameToIndex.put(columnName, columnInfo.size());
+        columnInfo.add(new GATKReportColumn(columnName, format));
+    }
+
+    /**
+     * Check if the requested cell is valid and expand the table if necessary
+     *
+     * @param rowIndex    the row index
+     * @param colIndex    the column index
+     */
+    private void verifyEntry(final int rowIndex, final int colIndex) {
+        if ( rowIndex < 0 || colIndex < 0 || colIndex >= getNumColumns() )
+            throw new ReviewedGATKException("attempted to access a cell that does not exist in table '" + tableName + "'");
+    }
+
+    /**
+     * expand the underlying table if needed to include the given row index
+     *
+     * @param rowIndex        the row index
+     * @param updateRowIdMap  should we update the row ID map?
+     */
+    private void expandTo(final int rowIndex, final boolean updateRowIdMap) {
+        int currentSize = underlyingData.size();
+        if ( rowIndex >= currentSize ) {
+            final int numNewRows = rowIndex - currentSize + 1;
+            for ( int i = 0; i < numNewRows; i++ ) {
+                if ( updateRowIdMap )
+                    rowIdToIndex.put(currentSize, currentSize);
+                underlyingData.add(new Object[getNumColumns()]);
+                currentSize++;
+            }
+        }
+    }
+
+    /**
+     * Set the value for a given position in the table.
+     * If the row ID doesn't exist, it will create a new row in the table with the given ID.
+     *
+     * @param rowID        the row ID
+     * @param columnName   the name of the column
+     * @param value        the value to set
+     */
+    public void set(final Object rowID, final String columnName, final Object value) {
+        if ( !rowIdToIndex.containsKey(rowID) ) {
+            rowIdToIndex.put(rowID, underlyingData.size());
+            expandTo(underlyingData.size(), false);
+        }
+        set(rowIdToIndex.get(rowID), columnNameToIndex.get(columnName), value);
+    }
+
+    /**
+     * Set the value for a given position in the table.
+     * If the row index doesn't exist, it will create new rows in the table accordingly.
+     *
+     * @param rowIndex     the row index
+     * @param colIndex     the column index
+     * @param value        the value to set
+     */
+    public void set(final int rowIndex, final int colIndex, Object value) {
+        expandTo(rowIndex, true);
+        verifyEntry(rowIndex, colIndex);
+        GATKReportColumn column = columnInfo.get(colIndex);
+
+        // We do not accept internal null values
+        if (value == null)
+            value = "null";
+        else
+            value = fixType(value, column);
+
+        if ( column.getDataType().equals(GATKReportDataType.fromObject(value)) || column.getDataType().equals(GATKReportDataType.Unknown) ) {
+            underlyingData.get(rowIndex)[colIndex] = value;
+            column.updateFormatting(value);
+        } else {
+            throw new ReviewedGATKException(String.format("Tried to add an object of type: %s to a column of type: %s", GATKReportDataType.fromObject(value).name(), column.getDataType().name()));
+        }
+    }
+
+    /**
+     * Returns true if the table contains a row mapping with the given ID
+     *
+     * @param rowID        the row ID
+     */
+    public boolean containsRowID(final Object rowID) {
+        return rowIdToIndex.containsKey(rowID);
+    }
+
+    /**
+     * Returns the row mapping IDs
+     *
+     */
+    public Collection<Object> getRowIDs() {
+        return rowIdToIndex.keySet();
+    }
+
+    /**
+    * Increment the value for a given position in the table.
+    * Throws an exception if the value in the cell is not an integer.
+    *
+    * @param rowID        the row ID
+    * @param columnName   the name of the column
+    */
+    public void increment(final Object rowID, final String columnName) {
+        int prevValue;
+        if ( !rowIdToIndex.containsKey(rowID) ) {
+            rowIdToIndex.put(rowID, underlyingData.size());
+            underlyingData.add(new Object[getNumColumns()]);
+            prevValue = 0;
+        } else {
+            Object obj = get(rowID, columnName);
+            if ( !(obj instanceof Integer) )
+                throw new ReviewedGATKException("Attempting to increment a value in a cell that is not an integer");
+            prevValue = (Integer)obj;
+        }
+
+        set(rowIdToIndex.get(rowID), columnNameToIndex.get(columnName), prevValue + 1);
+    }
+
+    /**
+     * Returns the index of the first row matching the column values.
+     * Ex: "CountVariants", "dbsnp", "eval", "called", "all", "novel", "all"
+     *
+     * @param columnValues column values.
+     * @return The index of the first row matching the column values or -1 if no such row exists.
+     */
+    public int findRowByData(final Object... columnValues) {
+        if ( columnValues == null || columnValues.length == 0 || columnValues.length > getNumColumns() )
+            return -1;
+
+        for ( int rowIndex = 0; rowIndex < underlyingData.size(); rowIndex++ ) {
+
+            final Object[] row = underlyingData.get(rowIndex);
+
+            boolean matches = true;
+            for ( int colIndex = 0; colIndex < columnValues.length; colIndex++ ) {
+                if ( !columnValues[colIndex].equals(row[colIndex]) ) {
+                    matches = false;
+                    break;
+                }
+            }
+
+            if ( matches )
+                return rowIndex;
+        }
+
+        return -1;
+    }
+
+    private Object fixType(final Object value, final GATKReportColumn column) {
+        // Below is some code to convert a string into its appropriate type.
+
+        // todo -- Types have to be more flexible. For example, %d should accept Integers, Shorts and Bytes.
+
+        Object newValue = null;
+        if ( value instanceof String && !column.getDataType().equals(GATKReportDataType.String) ) {
+            // Integer case
+            if ( column.getDataType().equals(GATKReportDataType.Integer) ) {
+                try {
+                    newValue = Long.parseLong((String) value);
+                } catch (Exception e) {
+                    /** do nothing */
+                }
+            }
+            if ( column.getDataType().equals(GATKReportDataType.Decimal) ) {
+                try {
+                    newValue = Double.parseDouble((String) value);
+                } catch (Exception e) {
+                    /** do nothing */
+                }
+            }
+            if ( column.getDataType().equals(GATKReportDataType.Character) && ((String) value).length() == 1 ) {
+                newValue = ((String) value).charAt(0);
+            }
+        }
+
+        return  (newValue != null) ? newValue : value;
+    }
+
+    /**
+     * Get a value from the given position in the table
+     *
+     * @param rowID       the row ID
+     * @param columnName  the name of the column
+     * @return the value stored at the specified position in the table
+     */
+    public Object get(final Object rowID, final String columnName) {
+        return get(rowIdToIndex.get(rowID), columnNameToIndex.get(columnName));
+    }
+
+    /**
+     * Get a value from the given position in the table
+     *
+     * @param rowIndex       the row ID
+     * @param columnName  the name of the column
+     * @return the value stored at the specified position in the table
+     */
+    public Object get(final int rowIndex, final String columnName) {
+        return get(rowIndex, columnNameToIndex.get(columnName));
+    }
+
+    /**
+     * Get a value from the given position in the table
+     *
+     * @param rowIndex    the index of the row
+     * @param columnIndex the index of the column
+     * @return the value stored at the specified position in the table
+     */
+    public Object get(int rowIndex, int columnIndex) {
+        verifyEntry(rowIndex, columnIndex);
+        return underlyingData.get(rowIndex)[columnIndex];
+    }
+
+    /**
+     * Write the table to the PrintStream, formatted nicely to be human-readable, AWK-able, and R-friendly.
+     *
+     * @param out the PrintStream to which the table should be written
+     */
+     void write(final PrintStream out) {
+
+         /*
+          * Table header:
+          * #:GATKTable:nColumns:nRows:(DataType for each column):;
+          * #:GATKTable:TableName:Description :;
+          * key   colA  colB
+          * row1  xxxx  xxxxx
+         */
+
+         // write the table definition
+         out.printf(GATKTABLE_HEADER_PREFIX + ":%d:%d", getNumColumns(), getNumRows());
+
+         // write the formats for all the columns
+         for ( final GATKReportColumn column : columnInfo )
+             out.print(SEPARATOR + column.getFormat());
+         out.println(ENDLINE);
+
+         // write the table name & description
+         out.printf(GATKTABLE_HEADER_PREFIX + ":%s:%s\n", tableName, tableDescription);
+
+         // write the column names
+         boolean needsPadding = false;
+         for ( final GATKReportColumn column : columnInfo ) {
+             if ( needsPadding )
+                 out.printf("  ");
+             needsPadding = true;
+
+             out.printf(column.getColumnFormat().getNameFormat(), column.getColumnName());
+         }
+         out.println();
+
+         // write the table body
+         switch (sortingWay) {
+             case SORT_BY_COLUMN:
+                 Collections.sort(underlyingData, new Comparator<Object[]>() {
+                     //INVARIANT the two arrays are of the same length and corresponding elements are of the same type
+                     @Override
+                     public int compare(Object[] objectArr1, Object[] objectArr2) {
+                         final int EQUAL = 0;
+
+                         int result = EQUAL;
+
+                         int l = objectArr1.length;
+                         for (int x = 0; x < l; x++) {
+                             if (objectArr1[x] instanceof Integer) {
+                                 result = ((Integer)objectArr1[x]).compareTo((Integer)objectArr2[x]);
+                             } else if (objectArr1[x] instanceof Double) {
+                                 result = ((Double)objectArr1[x]).compareTo((Double)objectArr2[x]);
+                             } else { // default uses String comparison
+                                 result = objectArr1[x].toString().compareTo(objectArr2[x].toString());
+                             }
+                             if( result  != EQUAL) {
+                                 return result;
+                             }
+                         }
+                         return result;
+                     }
+                 });
+                 for ( final Object[] row : underlyingData )
+                     writeRow(out, row);
+                 break;
+             case SORT_BY_ROW:
+                 // make sure that there are exactly the correct number of ID mappings
+                 if ( rowIdToIndex.size() != underlyingData.size() )
+                     throw new ReviewedGATKException("There isn't a 1-to-1 mapping from row ID to index; this can happen when rows are not created consistently");
+
+                 final TreeMap<Object, Integer> sortedMap;
+                 try {
+                     sortedMap = new TreeMap<Object, Integer>(rowIdToIndex);
+                 } catch (ClassCastException e) {
+                     throw new ReviewedGATKException("Unable to sort the rows based on the row IDs because the ID Objects are of different types");
+                 }
+                 for ( final Map.Entry<Object, Integer> rowKey : sortedMap.entrySet() )
+                     writeRow(out, underlyingData.get(rowKey.getValue()));
+                 break;
+             case DO_NOT_SORT:
+                 for ( final Object[] row : underlyingData )
+                     writeRow(out, row);
+         }
+         out.println();
+     }
+
+    private void writeRow(final PrintStream out, final Object[] row) {
+        boolean needsPadding = false;
+        for ( int i = 0; i < row.length; i++ ) {
+            if ( needsPadding )
+                out.printf("  ");
+            needsPadding = true;
+
+            final Object obj = row[i];
+            final String value;
+
+            final GATKReportColumn info = columnInfo.get(i);
+
+            if ( obj == null )
+                value = "null";
+            else if ( info.getDataType().equals(GATKReportDataType.Unknown) && (obj instanceof Double || obj instanceof Float) )
+                value = String.format("%.8f", obj);
+            else
+                value = String.format(info.getFormat(), obj);
+
+            out.printf(info.getColumnFormat().getValueFormat(), value);
+        }
+
+        out.println();
+    }
+
+    public int getNumRows() {
+        return underlyingData.size();
+    }
+
+    public int getNumColumns() {
+        return columnInfo.size();
+    }
+
+    public List<GATKReportColumn> getColumnInfo() {
+        return columnInfo;
+    }
+
+    public String getTableName() {
+        return tableName;
+    }
+
+    public String getTableDescription() {
+        return tableDescription;
+    }
+
+    /**
+     * Concatenates the rows from the table to this one
+     *
+     * @param table another GATK table
+     */
+    public void concat(final GATKReportTable table) {
+        if ( !isSameFormat(table) )
+            throw new ReviewedGATKException("Error trying to concatenate tables with different formats");
+
+        // add the data
+        underlyingData.addAll(table.underlyingData);
+
+        // update the row index map
+        final int currentNumRows = getNumRows();
+        for ( Map.Entry<Object, Integer> entry : table.rowIdToIndex.entrySet() )
+            rowIdToIndex.put(entry.getKey(), entry.getValue() + currentNumRows);
+    }
+
+    /**
+     * Returns whether or not the two tables have the same format including columns and everything in between. This does
+     * not check if the data inside is the same. This is the check to see if the two tables are gatherable or
+     * reduceable
+     *
+     * @param table another GATK table
+     * @return true if the the tables are gatherable
+     */
+    public boolean isSameFormat(final GATKReportTable table) {
+        if ( !tableName.equals(table.tableName) ||
+                !tableDescription.equals(table.tableDescription) ||
+                columnInfo.size() != table.columnInfo.size() )
+            return false;
+
+        for ( int i = 0; i < columnInfo.size(); i++ ) {
+            if ( !columnInfo.get(i).getFormat().equals(table.columnInfo.get(i).getFormat()) ||
+                    !columnInfo.get(i).getColumnName().equals(table.columnInfo.get(i).getColumnName()) )
+                return false;
+        }
+
+        return true;
+    }
+
+    /**
+     * Checks that the tables are exactly the same.
+     *
+     * @param table another GATK report
+     * @return true if all field in the reports, tables, and columns are equal.
+     */
+    public boolean equals(final GATKReportTable table) {
+        if ( !isSameFormat(table) ||
+                underlyingData.size() != table.underlyingData.size() )
+            return false;
+
+        final List<Object[]> myOrderedRows = getOrderedRows();
+        final List<Object[]> otherOrderedRows = table.getOrderedRows();
+
+        for ( int i = 0; i < underlyingData.size(); i++ ) {
+            final Object[] myData = myOrderedRows.get(i);
+            final Object[] otherData = otherOrderedRows.get(i);
+            for ( int j = 0; j < myData.length; j++ ) {
+                if ( !myData[j].toString().equals(otherData[j].toString()) )       // need to deal with different typing (e.g. Long vs. Integer)
+                    return false;
+            }
+        }
+
+        return true;
+    }
+
+    private List<Object[]> getOrderedRows() {
+
+        switch (sortingWay) {
+            case SORT_BY_COLUMN:
+                Collections.sort(underlyingData, new Comparator<Object[]>() {
+                    //INVARIANT the two arrays are of the same length and corresponding elements are of the same type
+                    @Override
+                    public int compare(Object[] objectArr1, Object[] objectArr2) {
+                        final int EQUAL = 0;
+                        int result = EQUAL;
+                        int l = objectArr1.length;
+                            for (int x = 0; x < l; x++) {
+                                if (objectArr1[x] instanceof Integer) {
+                                    result = ((Integer)objectArr1[x]).compareTo((Integer)objectArr2[x]);
+                                } else if (objectArr1[x] instanceof Double) {
+                                    result = ((Double)objectArr1[x]).compareTo((Double)objectArr2[x]);
+                                } else  { // default uses String comparison
+                                    result = objectArr1[x].toString().compareTo(objectArr2[x].toString());
+                                }
+                                if( result != EQUAL) {
+                                    return result;
+                                }
+                            }
+                            return result;
+                    }
+                });
+                return underlyingData;
+            case SORT_BY_ROW:
+                final TreeMap<Object, Integer> sortedMap;
+                try {
+                    sortedMap = new TreeMap<Object, Integer>(rowIdToIndex);
+                } catch (ClassCastException e) {
+                    return underlyingData;
+                }
+
+                final List<Object[]> orderedData = new ArrayList<Object[]>(underlyingData.size());
+                for ( final int rowKey : sortedMap.values() )
+                    orderedData.add(underlyingData.get(rowKey));
+
+                return orderedData;
+            default:
+                return underlyingData;
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/report/GATKReportVersion.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/report/GATKReportVersion.java
new file mode 100644
index 0000000..cf985d5
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/report/GATKReportVersion.java
@@ -0,0 +1,100 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.report;
+
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+public enum GATKReportVersion {
+    /**
+     * Differences between other versions:
+     * - Does not allow spaces in cells.
+     * - Mostly fixed width but has a bug where the string width of floating point
+     * values was not measured correctly leading to columns that aren't aligned
+     */
+    V0_1("v0.1"),
+
+    /**
+     * Differences between other versions:
+     * - Spaces allowed in cells, for example in sample names with spaces in them ex: "C507/FG-CR 6".
+     * - Fixed width fixed for floating point values
+     */
+    V0_2("v0.2"),
+
+    /*
+    * Differences between v0.x
+    * - Added table and report headers
+    * - Headers changed format, include the number of tables, rows, and metadata for gathering
+    * - IS GATHERABLE
+    */
+    V1_0("v1.0"),
+
+    /*
+    * Differences between v1.0
+    * - column numbers in header reflect the actual count of columns
+    * - primary keys are never displayed
+    */
+    V1_1("v1.1");
+
+    private final String versionString;
+
+    private GATKReportVersion(String versionString) {
+        this.versionString = versionString;
+    }
+
+    @Override
+    public String toString() {
+        return versionString;
+    }
+
+    public boolean equals(GATKReportVersion that) {
+        return (versionString.equals(that.versionString));
+    }
+
+    /**
+     * Returns the GATK Report Version from the file header.
+     *
+     * @param header Header from the file starting with ##:GATKReport.v[version]
+     * @return The version as an enum.
+     */
+    public static GATKReportVersion fromHeader(String header) {
+        if ( header == null )
+            throw new UserException.BadInput("The GATK report has no version specified in the header");
+
+        if (header.startsWith("##:GATKReport.v0.1 "))
+            return GATKReportVersion.V0_1;
+
+        if (header.startsWith("##:GATKReport.v0.2 "))
+            return GATKReportVersion.V0_2;
+
+        if (header.startsWith("#:GATKReport.v1.0"))
+            return GATKReportVersion.V1_0;
+
+        if (header.startsWith("#:GATKReport.v1.1"))
+            return GATKReportVersion.V1_1;
+
+        throw new UserException.BadInput("The GATK report has an unknown/unsupported version in the header: " + header);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/runtime/CapturedStreamOutput.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/runtime/CapturedStreamOutput.java
new file mode 100644
index 0000000..ee6dfbd
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/runtime/CapturedStreamOutput.java
@@ -0,0 +1,134 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.runtime;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.io.output.NullOutputStream;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.io.HardThresholdingOutputStream;
+
+import java.io.*;
+import java.util.EnumMap;
+
+/**
+ * Stream output captured from a stream.
+ */
+public class CapturedStreamOutput extends StreamOutput {
+    private final InputStream processStream;
+    private final EnumMap<StreamLocation, OutputStream> outputStreams = new EnumMap<StreamLocation, OutputStream>(StreamLocation.class);
+
+    /**
+     * The byte stream to capture content or null if no output string content was requested.
+     */
+    private final ByteArrayOutputStream bufferStream;
+
+    /**
+     * True if the buffer is truncated.
+     */
+    private boolean bufferTruncated = false;
+
+    /**
+     * @param settings       Settings that define what to capture.
+     * @param processStream  Stream to capture output.
+     * @param standardStream Stream to write debug output.
+     */
+    public CapturedStreamOutput(OutputStreamSettings settings, InputStream processStream, PrintStream standardStream) {
+        this.processStream = processStream;
+        int bufferSize = settings.getBufferSize();
+        this.bufferStream = (bufferSize < 0) ? new ByteArrayOutputStream() : new ByteArrayOutputStream(bufferSize);
+
+        for (StreamLocation location : settings.getStreamLocations()) {
+            OutputStream outputStream;
+            switch (location) {
+                case Buffer:
+                    if (bufferSize < 0) {
+                        outputStream = this.bufferStream;
+                    } else {
+                        outputStream = new HardThresholdingOutputStream(bufferSize) {
+                            @Override
+                            protected OutputStream getStream() throws IOException {
+                                return bufferTruncated ? NullOutputStream.NULL_OUTPUT_STREAM : bufferStream;
+                            }
+
+                            @Override
+                            protected void thresholdReached() throws IOException {
+                                bufferTruncated = true;
+                            }
+                        };
+                    }
+                    break;
+                case File:
+                    try {
+                        outputStream = new FileOutputStream(settings.getOutputFile(), settings.isAppendFile());
+                    } catch (IOException e) {
+                        throw new UserException.BadInput(e.getMessage());
+                    }
+                    break;
+                case Standard:
+                    outputStream = standardStream;
+                    break;
+                default:
+                    throw new ReviewedGATKException("Unexpected stream location: " + location);
+            }
+            this.outputStreams.put(location, outputStream);
+        }
+    }
+
+    @Override
+    public byte[] getBufferBytes() {
+        return bufferStream.toByteArray();
+    }
+
+    @Override
+    public boolean isBufferTruncated() {
+        return bufferTruncated;
+    }
+
+    /**
+     * Drain the input stream to keep the process from backing up until it's empty.
+     * File streams will be closed automatically when this method returns.
+     *
+     * @throws java.io.IOException When unable to read or write.
+     */
+    public void readAndClose() throws IOException {
+        try {
+            byte[] buf = new byte[4096];
+            int readCount;
+            while ((readCount = processStream.read(buf)) >= 0)
+                for (OutputStream outputStream : this.outputStreams.values()) {
+                    outputStream.write(buf, 0, readCount);
+                }
+        } finally {
+            for (StreamLocation location : this.outputStreams.keySet()) {
+                OutputStream outputStream = this.outputStreams.get(location);
+                outputStream.flush();
+                if (location != StreamLocation.Standard)
+                    IOUtils.closeQuietly(outputStream);
+            }
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/runtime/InputStreamSettings.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/runtime/InputStreamSettings.java
new file mode 100644
index 0000000..2fdc5be
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/runtime/InputStreamSettings.java
@@ -0,0 +1,116 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.runtime;
+
+import java.io.File;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.Set;
+
+/**
+ * Settings that define text to write to the process stdin.
+ */
+public class InputStreamSettings {
+    private final EnumSet<StreamLocation> streamLocations = EnumSet.noneOf(StreamLocation.class);
+    private byte[] inputBuffer;
+    private File inputFile;
+
+    public InputStreamSettings() {
+    }
+
+    /**
+     * @param inputBuffer String to write to stdin.
+     */
+    public InputStreamSettings(String inputBuffer) {
+        setInputBuffer(inputBuffer);
+    }
+
+    /**
+     * @param inputFile File to write to stdin.
+     */
+    public InputStreamSettings(File inputFile) {
+        setInputFile(inputFile);
+    }
+
+    /**
+     * @param inputBuffer String to write to stdin.
+     * @param inputFile   File to write to stdin.
+     */
+    public InputStreamSettings(byte[] inputBuffer, File inputFile) {
+        setInputBuffer(inputBuffer);
+        setInputFile(inputFile);
+    }
+
+    public Set<StreamLocation> getStreamLocations() {
+        return Collections.unmodifiableSet(streamLocations);
+    }
+
+    public byte[] getInputBuffer() {
+        return inputBuffer;
+    }
+
+    public void setInputBuffer(String inputBuffer) {
+        if (inputBuffer == null)
+            throw new IllegalArgumentException("inputBuffer cannot be null");
+        this.streamLocations.add(StreamLocation.Buffer);
+        this.inputBuffer = inputBuffer.getBytes();
+    }
+
+    public void setInputBuffer(byte[] inputBuffer) {
+        if (inputBuffer == null)
+            throw new IllegalArgumentException("inputBuffer cannot be null");
+        this.streamLocations.add(StreamLocation.Buffer);
+        this.inputBuffer = inputBuffer;
+    }
+
+    public void clearInputBuffer() {
+        this.streamLocations.remove(StreamLocation.Buffer);
+        this.inputBuffer = null;
+    }
+
+    public File getInputFile() {
+        return inputFile;
+    }
+
+    public void setInputFile(File inputFile) {
+        if (inputFile == null)
+            throw new IllegalArgumentException("inputFile cannot be null");
+        this.streamLocations.add(StreamLocation.File);
+        this.inputFile = inputFile;
+    }
+
+    public void clearInputFile() {
+        this.streamLocations.remove(StreamLocation.File);
+        this.inputFile = null;
+    }
+
+    public void setInputStandard(boolean inputStandard) {
+        if (inputStandard)
+            this.streamLocations.add(StreamLocation.Standard);
+        else
+            this.streamLocations.remove(StreamLocation.Standard);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/runtime/OutputStreamSettings.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/runtime/OutputStreamSettings.java
new file mode 100644
index 0000000..25375ed
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/runtime/OutputStreamSettings.java
@@ -0,0 +1,127 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.runtime;
+
+import java.io.File;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.Set;
+
+/**
+ * Settings that define text to capture from a process stream.
+ */
+public class OutputStreamSettings {
+    private final EnumSet<StreamLocation> streamLocations = EnumSet.noneOf(StreamLocation.class);
+    private int bufferSize;
+    private File outputFile;
+    private boolean appendFile;
+
+    public OutputStreamSettings() {
+    }
+
+    /**
+     * @param bufferSize The number of bytes to capture, or -1 for unlimited.
+     */
+    public OutputStreamSettings(int bufferSize) {
+        setBufferSize(bufferSize);
+    }
+
+    /**
+     * @param outputFile The file to write output to.
+     */
+    public OutputStreamSettings(File outputFile) {
+        setOutputFile(outputFile);
+    }
+
+    /**
+     * @param outputFile The file to write output to.
+     * @param append     true if the output file should be appended to.
+     */
+    public OutputStreamSettings(File outputFile, boolean append) {
+        setOutputFile(outputFile, append);
+    }
+
+    public OutputStreamSettings(int bufferSize, File outputFile, boolean appendFile) {
+        setBufferSize(bufferSize);
+        setOutputFile(outputFile, appendFile);
+    }
+
+    public Set<StreamLocation> getStreamLocations() {
+        return Collections.unmodifiableSet(streamLocations);
+    }
+
+    public int getBufferSize() {
+        return bufferSize;
+    }
+
+    public void setBufferSize(int bufferSize) {
+        this.streamLocations.add(StreamLocation.Buffer);
+        this.bufferSize = bufferSize;
+    }
+
+    public void clearBufferSize() {
+        this.streamLocations.remove(StreamLocation.Buffer);
+        this.bufferSize = 0;
+    }
+
+    public File getOutputFile() {
+        return outputFile;
+    }
+
+    public boolean isAppendFile() {
+        return appendFile;
+    }
+
+    /**
+     * Overwrites the outputFile with the process output.
+     *
+     * @param outputFile File to overwrite.
+     */
+    public void setOutputFile(File outputFile) {
+        setOutputFile(outputFile, false);
+    }
+
+    public void setOutputFile(File outputFile, boolean append) {
+        if (outputFile == null)
+            throw new IllegalArgumentException("outputFile cannot be null");
+        streamLocations.add(StreamLocation.File);
+        this.outputFile = outputFile;
+        this.appendFile = append;
+    }
+
+    public void clearOutputFile() {
+        streamLocations.remove(StreamLocation.File);
+        this.outputFile = null;
+        this.appendFile = false;
+    }
+
+    public void printStandard(boolean print) {
+        if (print)
+            this.streamLocations.add(StreamLocation.Standard);
+        else
+            this.streamLocations.remove(StreamLocation.Standard);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/runtime/ProcessController.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/runtime/ProcessController.java
new file mode 100644
index 0000000..682c258
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/runtime/ProcessController.java
@@ -0,0 +1,387 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.runtime;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.*;
+
+/**
+ * Facade to Runtime.exec() and java.lang.Process.  Handles
+ * running a process to completion and returns stdout and stderr
+ * as strings.  Creates separate threads for reading stdout and stderr,
+ * then reuses those threads for each process most efficient use is
+ * to create one of these and use it repeatedly.  Instances are not
+ * thread-safe, however.
+ *
+ * TODO: java.io sometimes zombies the backround threads locking up on read().
+ * Supposedly NIO has better ways of interrupting a blocked stream but will
+ * require a little bit of refactoring.
+ *
+ * @author Michael Koehrsen
+ * @author Khalid Shakir
+ */
+public class ProcessController {
+    private static Logger logger = Logger.getLogger(ProcessController.class);
+
+    private static enum ProcessStream {Stdout, Stderr}
+
+    // Tracks running processes.
+    private static final Set<ProcessController> running = Collections.synchronizedSet(new HashSet<ProcessController>());
+
+    // Tracks this running process.
+    private Process process;
+
+    // Threads that capture stdout and stderr
+    private final OutputCapture stdoutCapture;
+    private final OutputCapture stderrCapture;
+
+    // When a caller destroyes a controller a new thread local version will be created
+    private boolean destroyed = false;
+
+    // Communication channels with output capture threads
+
+    // Holds the stdout and stderr sent to the background capture threads
+    private final Map<ProcessStream, CapturedStreamOutput> toCapture =
+            new EnumMap<ProcessStream, CapturedStreamOutput>(ProcessStream.class);
+
+    // Holds the results of the capture from the background capture threads.
+    // May be the content via toCapture or an StreamOutput.EMPTY if the capture was interrupted.
+    private final Map<ProcessStream, StreamOutput> fromCapture =
+            new EnumMap<ProcessStream, StreamOutput>(ProcessStream.class);
+
+    // Useful for debugging if background threads have shut down correctly
+    private static int nextControllerId = 0;
+    private final int controllerId;
+
+    public ProcessController() {
+        // Start the background threads for this controller.
+        synchronized (running) {
+            controllerId = nextControllerId++;
+        }
+        stdoutCapture = new OutputCapture(ProcessStream.Stdout, controllerId);
+        stderrCapture = new OutputCapture(ProcessStream.Stderr, controllerId);
+        stdoutCapture.start();
+        stderrCapture.start();
+    }
+
+    /**
+     * Returns a thread local ProcessController.
+     * Should NOT be closed when finished so it can be reused by the thread.
+     *
+     * @return a thread local ProcessController.
+     */
+    public static ProcessController getThreadLocal() {
+        // If the local controller was destroyed get a fresh instance.
+        if (threadProcessController.get().destroyed)
+            threadProcessController.remove();
+        return threadProcessController.get();
+    }
+
+    /**
+     * Thread local process controller container.
+     */
+    private static final ThreadLocal<ProcessController> threadProcessController =
+            new ThreadLocal<ProcessController>() {
+                @Override
+                protected ProcessController initialValue() {
+                    return new ProcessController();
+                }
+            };
+
+    /**
+     * Similar to Runtime.exec() but drains the output and error streams.
+     *
+     * @param command Command to run.
+     * @return The result code.
+     */
+    public static int exec(String[] command) {
+        ProcessController controller = ProcessController.getThreadLocal();
+        return controller.exec(new ProcessSettings(command)).getExitValue();
+    }
+
+    /**
+     * Executes a command line program with the settings and waits for it to return,
+     * processing the output on a background thread.
+     *
+     * @param settings Settings to be run.
+     * @return The output of the command.
+     */
+    public ProcessOutput exec(ProcessSettings settings) {
+        if (destroyed)
+            throw new IllegalStateException("This controller was destroyed");
+
+        ProcessBuilder builder = new ProcessBuilder(settings.getCommand());
+        builder.directory(settings.getDirectory());
+
+        Map<String, String> settingsEnvironment = settings.getEnvironment();
+        if (settingsEnvironment != null) {
+            Map<String, String> builderEnvironment = builder.environment();
+            builderEnvironment.clear();
+            builderEnvironment.putAll(settingsEnvironment);
+        }
+
+        builder.redirectErrorStream(settings.isRedirectErrorStream());
+
+        StreamOutput stdout = null;
+        StreamOutput stderr = null;
+
+        // Start the process running.
+
+        try {
+            synchronized (toCapture) {
+                process = builder.start();
+            }
+            running.add(this);
+        } catch (IOException e) {
+            String message = String.format("Unable to start command: %s\nReason: %s",
+                    StringUtils.join(builder.command(), " "),
+                    e.getMessage());
+            throw new ReviewedGATKException(message);
+        }
+
+        int exitCode;
+
+        try {
+            // Notify the background threads to start capturing.
+            synchronized (toCapture) {
+                toCapture.put(ProcessStream.Stdout,
+                        new CapturedStreamOutput(settings.getStdoutSettings(), process.getInputStream(), System.out));
+                toCapture.put(ProcessStream.Stderr,
+                        new CapturedStreamOutput(settings.getStderrSettings(), process.getErrorStream(), System.err));
+                toCapture.notifyAll();
+            }
+
+            // Write stdin content
+            InputStreamSettings stdinSettings = settings.getStdinSettings();
+            Set<StreamLocation> streamLocations = stdinSettings.getStreamLocations();
+            if (!streamLocations.isEmpty()) {
+                try {
+                    OutputStream stdinStream = process.getOutputStream();
+                    for (StreamLocation location : streamLocations) {
+                        InputStream inputStream;
+                        switch (location) {
+                            case Buffer:
+                                inputStream = new ByteArrayInputStream(stdinSettings.getInputBuffer());
+                                break;
+                            case File:
+                                try {
+                                    inputStream = FileUtils.openInputStream(stdinSettings.getInputFile());
+                                } catch (IOException e) {
+                                    throw new UserException.BadInput(e.getMessage());
+                                }
+                                break;
+                            case Standard:
+                                inputStream = System.in;
+                                break;
+                            default:
+                                throw new ReviewedGATKException("Unexpected stream location: " + location);
+                        }
+                        try {
+                            IOUtils.copy(inputStream, stdinStream);
+                        } finally {
+                            if (location != StreamLocation.Standard)
+                                IOUtils.closeQuietly(inputStream);
+                        }
+                    }
+                    stdinStream.flush();
+                } catch (IOException e) {
+                    throw new ReviewedGATKException("Error writing to stdin on command: " + StringUtils.join(builder.command(), " "), e);
+                }
+            }
+
+            // Wait for the process to complete.
+            try {
+                process.getOutputStream().close();
+                process.waitFor();
+            } catch (IOException e) {
+                throw new ReviewedGATKException("Unable to close stdin on command: " + StringUtils.join(builder.command(), " "), e);
+            } catch (InterruptedException e) {
+                throw new ReviewedGATKException("Process interrupted", e);
+            } finally {
+                while (!destroyed && stdout == null || stderr == null) {
+                    synchronized (fromCapture) {
+                        if (fromCapture.containsKey(ProcessStream.Stdout))
+                            stdout = fromCapture.remove(ProcessStream.Stdout);
+                        if (fromCapture.containsKey(ProcessStream.Stderr))
+                            stderr = fromCapture.remove(ProcessStream.Stderr);
+                        try {
+                            if (stdout == null || stderr == null)
+                                fromCapture.wait();
+                        } catch (InterruptedException e) {
+                            // Log the error, ignore the interrupt and wait patiently
+                            // for the OutputCaptures to (via finally) return their
+                            // stdout and stderr.
+                            logger.error(e);
+                        }
+                    }
+                }
+
+                if (destroyed) {
+                    if (stdout == null)
+                        stdout = StreamOutput.EMPTY;
+                    if (stderr == null)
+                        stderr = StreamOutput.EMPTY;
+                }
+            }
+        } finally {
+            synchronized (toCapture) {
+                exitCode = process.exitValue();
+                process = null;
+            }
+            running.remove(this);
+        }
+
+        return new ProcessOutput(exitCode, stdout, stderr);
+    }
+
+    /**
+     * Executes a command line program with the settings and waits for it to return,
+     * processing the output on a background thread.
+     *
+     * Throws an IOException if the ProcessOutput exit code is nonzero
+     *
+     * @param settings Settings to be run.
+     */
+    public ProcessOutput execAndCheck(ProcessSettings settings) throws IOException {
+        ProcessOutput po = exec(settings);
+        if (po.getExitValue() != 0) {
+            String message = String.format("Process exited with %d\nCommand Line: %s",
+                    po.getExitValue(),
+                    Utils.join(" ", settings.getCommand()));
+            throw new IOException(message);
+        }
+        return po;
+    }
+
+    /**
+     * @return The set of still running processes.
+     */
+    public static Set<ProcessController> getRunning() {
+        synchronized (running) {
+            return new HashSet<ProcessController>(running);
+        }
+    }
+
+    /**
+     * Stops the process from running and tries to ensure process is cleaned up properly.
+     * NOTE: sub-processes started by process may be zombied with their parents set to pid 1.
+     * NOTE: capture threads may block on read.
+     * TODO: Try to use NIO to interrupt streams.
+     */
+    public void tryDestroy() {
+        destroyed = true;
+        synchronized (toCapture) {
+            if (process != null) {
+                process.destroy();
+                IOUtils.closeQuietly(process.getInputStream());
+                IOUtils.closeQuietly(process.getErrorStream());
+            }
+            stdoutCapture.interrupt();
+            stderrCapture.interrupt();
+            toCapture.notifyAll();
+        }
+    }
+
+    @Override
+    protected void finalize() throws Throwable {
+        try {
+            tryDestroy();
+        } catch (Exception e) {
+            logger.error(e);
+        }
+        super.finalize();
+    }
+
+    private class OutputCapture extends Thread {
+        private final int controllerId;
+        private final ProcessStream key;
+
+        /**
+         * Reads in the output of a stream on a background thread to keep the output pipe from backing up and freezing the called process.
+         *
+         * @param key The stdout or stderr key for this output capture.
+         * @param controllerId Unique id of the controller.
+         */
+        public OutputCapture(ProcessStream key, int controllerId) {
+            super(String.format("OutputCapture-%d-%s-%s-%d", controllerId, key.name().toLowerCase(),
+                    Thread.currentThread().getName(), Thread.currentThread().getId()));
+            this.controllerId = controllerId;
+            this.key = key;
+            setDaemon(true);
+        }
+
+        /**
+         * Runs the capture.
+         */
+        @Override
+        public void run() {
+            while (!destroyed) {
+                StreamOutput processStream = StreamOutput.EMPTY;
+                try {
+                    // Wait for a new input stream to be passed from this process controller.
+                    CapturedStreamOutput capturedProcessStream = null;
+                    while (!destroyed && capturedProcessStream == null) {
+                        synchronized (toCapture) {
+                            if (toCapture.containsKey(key)) {
+                                capturedProcessStream = toCapture.remove(key);
+                            } else {
+                                toCapture.wait();
+                            }
+                        }
+                    }
+
+                    if (!destroyed) {
+                        // Read in the input stream
+                        processStream = capturedProcessStream;
+                        capturedProcessStream.readAndClose();
+                    }
+                } catch (InterruptedException e) {
+                    logger.info("OutputCapture interrupted, exiting");
+                    break;
+                } catch (IOException e) {
+                    logger.error("Error reading process output", e);
+                } finally {
+                    // Send the string back to the process controller.
+                    synchronized (fromCapture) {
+                        fromCapture.put(key, processStream);
+                        fromCapture.notify();
+                    }
+                }
+            }
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/runtime/ProcessOutput.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/runtime/ProcessOutput.java
new file mode 100644
index 0000000..71d90eb
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/runtime/ProcessOutput.java
@@ -0,0 +1,57 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.runtime;
+
+public class ProcessOutput {
+    private final int exitValue;
+    private final StreamOutput stdout;
+    private final StreamOutput stderr;
+
+    /**
+     * The output of a process.
+     *
+     * @param exitValue The exit value.
+     * @param stdout    The capture of stdout as defined by the stdout OutputStreamSettings.
+     * @param stderr    The capture of stderr as defined by the stderr OutputStreamSettings.
+     */
+    public ProcessOutput(int exitValue, StreamOutput stdout, StreamOutput stderr) {
+        this.exitValue = exitValue;
+        this.stdout = stdout;
+        this.stderr = stderr;
+    }
+
+    public int getExitValue() {
+        return exitValue;
+    }
+
+    public StreamOutput getStdout() {
+        return stdout;
+    }
+
+    public StreamOutput getStderr() {
+        return stderr;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/runtime/ProcessSettings.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/runtime/ProcessSettings.java
new file mode 100644
index 0000000..52d2f06
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/runtime/ProcessSettings.java
@@ -0,0 +1,140 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.runtime;
+
+
+import java.io.File;
+import java.util.Map;
+
+public class ProcessSettings {
+    private String[] command;
+    private Map<String, String> environment;
+    private File directory;
+    private boolean redirectErrorStream;
+    private InputStreamSettings stdinSettings;
+    private OutputStreamSettings stdoutSettings;
+    private OutputStreamSettings stderrSettings;
+
+    /**
+     * @param command Command line to run.
+     */
+    public ProcessSettings(String[] command) {
+        this(command, false, null, null, null, null, null);
+    }
+
+    /**
+     * @param command             Command line to run.
+     * @param redirectErrorStream true if stderr should be sent to stdout.
+     * @param environment         Environment settings to override System.getEnv, or null to use System.getEnv.
+     * @param directory           The directory to run the command in, or null to run in the current directory.
+     * @param stdinSettings       Settings for writing to the process stdin.
+     * @param stdoutSettings      Settings for capturing the process stdout.
+     * @param stderrSettings      Setting for capturing the process stderr.
+     */
+    public ProcessSettings(String[] command, boolean redirectErrorStream, File directory, Map<String, String> environment,
+                           InputStreamSettings stdinSettings, OutputStreamSettings stdoutSettings, OutputStreamSettings stderrSettings) {
+        this.command = checkCommand(command);
+        this.redirectErrorStream = redirectErrorStream;
+        this.directory = directory;
+        this.environment = environment;
+        this.stdinSettings = checkSettings(stdinSettings);
+        this.stdoutSettings = checkSettings(stdoutSettings);
+        this.stderrSettings = checkSettings(stderrSettings);
+    }
+
+    public String[] getCommand() {
+        return command;
+    }
+
+    public void setCommand(String[] command) {
+        this.command = checkCommand(command);
+    }
+
+    public boolean isRedirectErrorStream() {
+        return redirectErrorStream;
+    }
+
+    public void setRedirectErrorStream(boolean redirectErrorStream) {
+        this.redirectErrorStream = redirectErrorStream;
+    }
+
+    public File getDirectory() {
+        return directory;
+    }
+
+    public void setDirectory(File directory) {
+        this.directory = directory;
+    }
+
+    public Map<String, String> getEnvironment() {
+        return environment;
+    }
+
+    public void setEnvironment(Map<String, String> environment) {
+        this.environment = environment;
+    }
+
+    public InputStreamSettings getStdinSettings() {
+        return stdinSettings;
+    }
+
+    public void setStdinSettings(InputStreamSettings stdinSettings) {
+        this.stdinSettings = checkSettings(stdinSettings);
+    }
+
+    public OutputStreamSettings getStdoutSettings() {
+        return stdoutSettings;
+    }
+
+    public void setStdoutSettings(OutputStreamSettings stdoutSettings) {
+        this.stdoutSettings = checkSettings(stdoutSettings);
+    }
+
+    public OutputStreamSettings getStderrSettings() {
+        return stderrSettings;
+    }
+
+    public void setStderrSettings(OutputStreamSettings stderrSettings) {
+        this.stderrSettings = checkSettings(stderrSettings);
+    }
+
+    protected String[] checkCommand(String[] command) {
+        if (command == null)
+            throw new IllegalArgumentException("Command is not allowed to be null");
+        for (String s: command)
+            if (s == null)
+                throw new IllegalArgumentException("Command is not allowed to contain nulls");
+        return command;
+    }
+
+    protected InputStreamSettings checkSettings(InputStreamSettings settings) {
+        return settings == null ? new InputStreamSettings() : settings;
+    }
+
+    protected OutputStreamSettings checkSettings(OutputStreamSettings settings) {
+        return settings == null ? new OutputStreamSettings() : settings;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/runtime/RuntimeUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/runtime/RuntimeUtils.java
new file mode 100644
index 0000000..241ebd2
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/runtime/RuntimeUtils.java
@@ -0,0 +1,77 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.runtime;
+
+import org.apache.commons.lang.StringUtils;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+
+public class RuntimeUtils {
+    public static final String[] PATHS;
+
+    static {
+        String path = System.getenv("PATH");
+        if (path == null)
+            path = System.getenv("path");
+        if (path == null) {
+            PATHS = new String[0];
+        } else {
+            PATHS = StringUtils.split(path, File.pathSeparatorChar);
+        }
+    }
+
+    /**
+     * Returns the path to an executable or null if it doesn't exist.
+     * @param executable Relative path
+     * @return The absolute file path.
+     */
+    public static File which(String executable) {
+        for (String path: PATHS) {
+            File file = new File(path, executable);
+            if (file.exists())
+                return file.getAbsoluteFile();
+        }
+        return null;
+    }
+
+    /**
+     * Return the current classpath as a list of absolute paths
+     * @return
+     */
+    public static List<String> getAbsoluteClassPaths() {
+        final String[] relativeClassPaths = System.getProperty("java.class.path").split(File.pathSeparator);
+        final List<String> absoluteClassPaths = new ArrayList<>(relativeClassPaths.length);
+        for (String classPath : relativeClassPaths) {
+            File cp = new File(classPath);
+            if (cp.exists())
+                absoluteClassPaths.add(cp.getAbsolutePath());
+        }
+
+        return absoluteClassPaths;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/runtime/StreamLocation.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/runtime/StreamLocation.java
new file mode 100644
index 0000000..bd42989
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/runtime/StreamLocation.java
@@ -0,0 +1,33 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.runtime;
+
+/**
+ * Where to read/write a stream
+ */
+public enum StreamLocation {
+    Buffer, File, Standard
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/runtime/StreamOutput.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/runtime/StreamOutput.java
new file mode 100644
index 0000000..e4a8b57
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/runtime/StreamOutput.java
@@ -0,0 +1,69 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.runtime;
+
+/**
+ * The content of stdout or stderr.
+ */
+public abstract class StreamOutput {
+    /**
+     * Empty stream output when no output is captured due to an error.
+     */
+    public static final StreamOutput EMPTY = new StreamOutput() {
+        @Override
+        public byte[] getBufferBytes() {
+            return new byte[0];
+        }
+
+        @Override
+        public boolean isBufferTruncated() {
+            return false;
+        }
+    };
+
+    /**
+     * Returns the content as a string.
+     *
+     * @return The content as a string.
+     */
+    public String getBufferString() {
+        return new String(getBufferBytes());
+    }
+
+    /**
+     * Returns the content as a string.
+     *
+     * @return The content as a string.
+     */
+    public abstract byte[] getBufferBytes();
+
+    /**
+     * Returns true if the buffer was truncated.
+     *
+     * @return true if the buffer was truncated.
+     */
+    public abstract boolean isBufferTruncated();
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/AlignmentStartComparator.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/AlignmentStartComparator.java
new file mode 100644
index 0000000..8aac52d
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/AlignmentStartComparator.java
@@ -0,0 +1,50 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import htsjdk.samtools.SAMRecord;
+
+import java.util.Comparator;
+
+/**
+ * Compares two SAMRecords only the basis on alignment start.  Note that
+ * comparisons are performed ONLY on the basis of alignment start; any
+ * two SAM records with the same alignment start will be considered equal.
+ *
+ * Unmapped alignments will all be considered equal.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class AlignmentStartComparator implements Comparator<SAMRecord> {
+    public int compare(SAMRecord lhs, SAMRecord rhs) {
+        if(!lhs.getReferenceIndex().equals(rhs.getReferenceIndex()))
+            return lhs.getReferenceIndex() - rhs.getReferenceIndex();
+
+        // Note: no integer overflow here because alignment starts are >= 0.
+        return lhs.getAlignmentStart() - rhs.getAlignmentStart();
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/AlignmentStartWithNoTiesComparator.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/AlignmentStartWithNoTiesComparator.java
new file mode 100644
index 0000000..b64bbac
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/AlignmentStartWithNoTiesComparator.java
@@ -0,0 +1,73 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import htsjdk.samtools.SAMRecord;
+
+import java.util.Comparator;
+
+public class AlignmentStartWithNoTiesComparator implements Comparator<SAMRecord> {
+    @Requires("c1 >= 0 && c2 >= 0")
+    @Ensures("result == 0 || result == 1 || result == -1")
+    private int compareContigs(int c1, int c2) {
+        if (c1 == c2)
+            return 0;
+        else if (c1 > c2)
+            return 1;
+        return -1;
+    }
+
+    @Requires("r1 != null && r2 != null")
+    @Ensures("result == 0 || result == 1 || result == -1")
+    public int compare(SAMRecord r1, SAMRecord r2) {
+        int result;
+
+        if (r1 == r2)
+            result = 0;
+
+        else if (r1.getReadUnmappedFlag())
+            result = 1;
+        else if (r2.getReadUnmappedFlag())
+            result = -1;
+        else {
+            final int cmpContig = compareContigs(r1.getReferenceIndex(), r2.getReferenceIndex());
+
+            if (cmpContig != 0)
+                result = cmpContig;
+
+            else {
+                if (r1.getAlignmentStart() < r2.getAlignmentStart())
+                    result = -1;
+                else
+                    result = 1;
+            }
+        }
+
+        return result;
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/AlignmentUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/AlignmentUtils.java
new file mode 100644
index 0000000..04e3ccb
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/AlignmentUtils.java
@@ -0,0 +1,1339 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import htsjdk.samtools.Cigar;
+import htsjdk.samtools.CigarElement;
+import htsjdk.samtools.CigarOperator;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.BaseUtils;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.haplotype.Haplotype;
+import org.broadinstitute.gatk.utils.pileup.PileupElement;
+import org.broadinstitute.gatk.utils.recalibration.EventType;
+import org.broadinstitute.gatk.utils.smithwaterman.SWPairwiseAlignment;
+
+import java.util.*;
+
+
+public final class AlignmentUtils {
+    private final static EnumSet<CigarOperator> ALIGNED_TO_GENOME_OPERATORS = EnumSet.of(CigarOperator.M, CigarOperator.EQ, CigarOperator.X);
+    private final static EnumSet<CigarOperator> ALIGNED_TO_GENOME_PLUS_SOFTCLIPS = EnumSet.of(CigarOperator.M, CigarOperator.EQ, CigarOperator.X, CigarOperator.S);
+    public final static String HAPLOTYPE_TAG = "HC";
+
+    // cannot be instantiated
+    private AlignmentUtils() { }
+
+    /**
+     * Does cigar start or end with a deletion operation?
+     *
+     * @param cigar a non-null cigar to test
+     * @return true if the first or last operator of cigar is a D
+     */
+    public static boolean startsOrEndsWithInsertionOrDeletion(final Cigar cigar) {
+        if ( cigar == null ) throw new IllegalArgumentException("Cigar cannot be null");
+
+        if ( cigar.isEmpty() )
+            return false;
+
+        final CigarOperator first = cigar.getCigarElement(0).getOperator();
+        final CigarOperator last = cigar.getCigarElement(cigar.numCigarElements()-1).getOperator();
+        return first == CigarOperator.D || first == CigarOperator.I || last == CigarOperator.D || last == CigarOperator.I;
+    }
+
+    /**
+     * Aligns reads the haplotype, and then projects this alignment of read -> hap onto the reference
+     * via the alignment of haplotype (via its getCigar) method.
+     *
+     * @param originalRead the read we want to write aligned to the reference genome
+     * @param haplotype the haplotype that the read should be aligned to, before aligning to the reference
+     * @param referenceStart the start of the reference that haplotype is aligned to.  Provides global coordinate frame.
+     * @param isInformative true if the read is differentially informative for one of the haplotypes
+     *
+     * @throws IllegalArgumentException if {@code originalRead} is {@code null} or {@code haplotype} is {@code null} or it
+     *   does not have a Cigar or the {@code referenceStart} is invalid (less than 1).
+     *
+     * @return a GATKSAMRecord aligned to reference. Never {@code null}.
+     */
+    public static GATKSAMRecord createReadAlignedToRef(final GATKSAMRecord originalRead,
+                                                       final Haplotype haplotype,
+                                                       final Haplotype refHaplotype,
+                                                       final int referenceStart,
+                                                       final boolean isInformative) {
+        if ( originalRead == null ) throw new IllegalArgumentException("originalRead cannot be null");
+        if ( haplotype == null ) throw new IllegalArgumentException("haplotype cannot be null");
+        if ( refHaplotype == null ) throw new IllegalArgumentException("ref haplotype cannot be null");
+        if ( haplotype.getCigar() == null ) throw new IllegalArgumentException("Haplotype cigar not set " + haplotype);
+        if ( referenceStart < 1 ) throw new IllegalArgumentException("reference start much be >= 1 but got " + referenceStart);
+
+        // compute the smith-waterman alignment of read -> haplotype
+        final SWPairwiseAlignment swPairwiseAlignment = new SWPairwiseAlignment(haplotype.getBases(), originalRead.getReadBases(), CigarUtils.NEW_SW_PARAMETERS);
+        if ( swPairwiseAlignment.getAlignmentStart2wrt1() == -1 )
+            // sw can fail (reasons not clear) so if it happens just don't realign the read
+            return originalRead;
+        final Cigar swCigar = consolidateCigar(swPairwiseAlignment.getCigar());
+
+        // since we're modifying the read we need to clone it
+        final GATKSAMRecord read = (GATKSAMRecord)originalRead.clone();
+
+        // only informative reads are given the haplotype tag to enhance visualization
+        if ( isInformative )
+            read.setAttribute(HAPLOTYPE_TAG, haplotype.hashCode());
+
+        // compute here the read starts w.r.t. the reference from the SW result and the hap -> ref cigar
+        final Cigar extendedHaplotypeCigar = haplotype.getConsolidatedPaddedCigar(1000);
+        final int readStartOnHaplotype = calcFirstBaseMatchingReferenceInCigar(extendedHaplotypeCigar, swPairwiseAlignment.getAlignmentStart2wrt1());
+        final int readStartOnReference = referenceStart + haplotype.getAlignmentStartHapwrtRef() + readStartOnHaplotype;
+        read.setAlignmentStart(readStartOnReference);
+        read.resetSoftStartAndEnd();
+
+        // compute the read -> ref alignment by mapping read -> hap -> ref from the
+        // SW of read -> hap mapped through the given by hap -> ref
+        final Cigar haplotypeToRef = trimCigarByBases(extendedHaplotypeCigar, swPairwiseAlignment.getAlignmentStart2wrt1(), extendedHaplotypeCigar.getReadLength() - 1);
+        final Cigar readToRefCigarRaw = applyCigarToCigar(swCigar, haplotypeToRef);
+        final Cigar readToRefCigarClean = cleanUpCigar(readToRefCigarRaw);
+        final Cigar readToRefCigar = leftAlignIndel(readToRefCigarClean, refHaplotype.getBases(),
+                originalRead.getReadBases(), swPairwiseAlignment.getAlignmentStart2wrt1(), 0, true);
+
+        read.setCigar(readToRefCigar);
+
+        if ( readToRefCigar.getReadLength() != read.getReadLength() )
+            throw new IllegalStateException("Cigar " + readToRefCigar + " with read length " + readToRefCigar.getReadLength()
+                    + " != read length " + read.getReadLength() + " for read " + read.format() + "\nhapToRef " + haplotypeToRef + " length " + haplotypeToRef.getReadLength() + "/" + haplotypeToRef.getReferenceLength()
+                    + "\nreadToHap " + swCigar + " length " + swCigar.getReadLength() + "/" + swCigar.getReferenceLength());
+
+        return read;
+    }
+
+
+
+    /**
+     * Get the byte[] from bases that cover the reference interval refStart -> refEnd given the
+     * alignment of bases to the reference (basesToRefCigar) and the start offset of the bases on the reference
+     *
+     * refStart and refEnd are 0 based offsets that we want to obtain.  In the client code, if the reference
+     * bases start at position X and you want Y -> Z, refStart should be Y - X and refEnd should be Z - X.
+     *
+     * If refStart or refEnd would start or end the new bases within a deletion, this function will return null
+     *
+     * @param bases
+     * @param refStart
+     * @param refEnd
+     * @param basesStartOnRef where does the bases array start w.r.t. the reference start?  For example, bases[0] of
+     *                        could be at refStart == 0 if basesStartOnRef == 0, but it could just as easily be at
+     *                        10 (meaning bases doesn't fully span the reference), which would be indicated by basesStartOnRef == 10.
+     *                        It's not trivial to eliminate this parameter because it's tied up with the cigar
+     * @param basesToRefCigar the cigar that maps the bases to the reference genome
+     * @return a byte[] containing the bases covering this interval, or null if we would start or end within a deletion
+     */
+    public static byte[] getBasesCoveringRefInterval(final int refStart, final int refEnd, final byte[] bases, final int basesStartOnRef, final Cigar basesToRefCigar) {
+        if ( refStart < 0 || refEnd < refStart ) throw new IllegalArgumentException("Bad start " + refStart + " and/or stop " + refEnd);
+        if ( basesStartOnRef < 0 ) throw new IllegalArgumentException("BasesStartOnRef must be >= 0 but got " + basesStartOnRef);
+        if ( bases == null ) throw new IllegalArgumentException("Bases cannot be null");
+        if ( basesToRefCigar == null ) throw new IllegalArgumentException("basesToRefCigar cannot be null");
+        if ( bases.length != basesToRefCigar.getReadLength() ) throw new IllegalArgumentException("Mismatch in length between reference bases " + bases.length + " and cigar length " + basesToRefCigar);
+
+        int refPos = basesStartOnRef;
+        int basesPos = 0;
+        int basesStart = -1;
+        int basesStop = -1;
+        boolean done = false;
+
+        for ( int iii = 0; ! done && iii < basesToRefCigar.numCigarElements(); iii++ ) {
+            final CigarElement ce = basesToRefCigar.getCigarElement(iii);
+            switch ( ce.getOperator() ) {
+                case I:
+                    basesPos += ce.getLength();
+                    break;
+                case M: case X: case EQ:
+                    for ( int i = 0; i < ce.getLength(); i++ ) {
+                        if ( refPos == refStart )
+                            basesStart = basesPos;
+                        if ( refPos == refEnd ) {
+                            basesStop = basesPos;
+                            done = true;
+                            break;
+                        }
+                        refPos++;
+                        basesPos++;
+                    }
+                    break;
+                case D:
+                    for ( int i = 0; i < ce.getLength(); i++ ) {
+                        if ( refPos == refEnd || refPos == refStart ) {
+                            // if we ever reach a ref position that is either a start or an end, we fail
+                            return null;
+                        }
+                        refPos++;
+                    }
+                    break;
+                default:
+                    throw new IllegalStateException("Unsupported operator " + ce);
+            }
+        }
+
+        if ( basesStart == -1 || basesStop == -1 )
+            throw new IllegalStateException("Never found start " + basesStart + " or stop " + basesStop + " given cigar " + basesToRefCigar);
+
+        return Arrays.copyOfRange(bases, basesStart, basesStop + 1);
+    }
+
+    /**
+     * Get the number of bases at which refSeq and readSeq differ, given their alignment
+     *
+     * @param cigar the alignment of readSeq to refSeq
+     * @param refSeq the bases of the reference sequence
+     * @param readSeq the bases of the read sequence
+     * @return the number of bases that differ between refSeq and readSeq
+     */
+    public static int calcNumDifferentBases(final Cigar cigar, final byte[] refSeq, final byte[] readSeq) {
+        int refIndex = 0, readIdx = 0, delta = 0;
+
+        for (final CigarElement ce : cigar.getCigarElements()) {
+            final int elementLength = ce.getLength();
+            switch (ce.getOperator()) {
+                case X:case EQ:case M:
+                    for (int j = 0; j < elementLength; j++, refIndex++, readIdx++)
+                        delta += refSeq[refIndex] != readSeq[readIdx] ? 1 : 0;
+                    break;
+                case I:
+                    delta += elementLength;
+                case S:
+                    readIdx += elementLength;
+                    break;
+                case D:
+                    delta += elementLength;
+                case N:
+                    refIndex += elementLength;
+                    break;
+                case H:
+                case P:
+                    break;
+                default:
+                    throw new ReviewedGATKException("The " + ce.getOperator() + " cigar element is not currently supported");
+            }
+        }
+
+        return delta;
+    }
+
+    public static class MismatchCount {
+        public int numMismatches = 0;
+        public long mismatchQualities = 0;
+    }
+
+    public static long mismatchingQualities(GATKSAMRecord r, byte[] refSeq, int refIndex) {
+        return getMismatchCount(r, refSeq, refIndex).mismatchQualities;
+    }
+
+    /**
+     * @see #getMismatchCount(GATKSAMRecord, byte[], int, int, int) with startOnRead == 0 and nReadBases == read.getReadLength()
+     */
+    public static MismatchCount getMismatchCount(GATKSAMRecord r, byte[] refSeq, int refIndex) {
+        return getMismatchCount(r, refSeq, refIndex, 0, r.getReadLength());
+    }
+
+    // todo -- this code and mismatchesInRefWindow should be combined and optimized into a single
+    // todo -- high performance implementation.  We can do a lot better than this right now
+
+    /**
+     * Count how many bases mismatch the reference.  Indels are not considered mismatching.
+     *
+     * @param r                   the sam record to check against
+     * @param refSeq              the byte array representing the reference sequence
+     * @param refIndex            the index in the reference byte array of the read's first base (the reference index
+     *                            is matching the alignment start, there may be tons of soft-clipped bases before/after
+     *                            that so it's wrong to compare with getReadLength() here.).  Note that refIndex is
+     *                            zero based, not 1 based
+     * @param startOnRead         the index in the read's bases from which we start counting
+     * @param nReadBases          the number of bases after (but including) startOnRead that we check
+     * @return non-null object representing the mismatch count
+     */
+    @Ensures("result != null")
+    public static MismatchCount getMismatchCount(GATKSAMRecord r, byte[] refSeq, int refIndex, int startOnRead, int nReadBases) {
+        if ( r == null ) throw new IllegalArgumentException("attempting to calculate the mismatch count from a read that is null");
+        if ( refSeq == null ) throw new IllegalArgumentException("attempting to calculate the mismatch count with a reference sequence that is null");
+        if ( refIndex < 0 ) throw new IllegalArgumentException("attempting to calculate the mismatch count with a reference index that is negative");
+        if ( startOnRead < 0 ) throw new IllegalArgumentException("attempting to calculate the mismatch count with a read start that is negative");
+        if ( nReadBases < 0 ) throw new IllegalArgumentException("attempting to calculate the mismatch count for a negative number of read bases");
+        if ( refSeq.length - refIndex < (r.getAlignmentEnd() - r.getAlignmentStart()) )
+            throw new IllegalArgumentException("attempting to calculate the mismatch count against a reference string that is smaller than the read");
+
+        MismatchCount mc = new MismatchCount();
+
+        int readIdx = 0;
+        final int endOnRead = startOnRead + nReadBases - 1; // index of the last base on read we want to count (note we are including soft-clipped bases with this math)
+        final byte[] readSeq = r.getReadBases();
+        final Cigar c = r.getCigar();
+        final byte[] readQuals = r.getBaseQualities();
+        for (final CigarElement ce : c.getCigarElements()) {
+
+            if (readIdx > endOnRead)
+                break;
+
+            final int elementLength = ce.getLength();
+            switch (ce.getOperator()) {
+                case X:
+                    mc.numMismatches += elementLength;
+                    for (int j = 0; j < elementLength; j++)
+                        mc.mismatchQualities += readQuals[readIdx+j];
+                case EQ:
+                    refIndex += elementLength;
+                    readIdx += elementLength;
+                break;
+                case M:
+                    for (int j = 0; j < elementLength; j++, refIndex++, readIdx++) {
+                        if (refIndex >= refSeq.length)
+                            continue;                      // TODO : It should never happen, we should throw exception here
+                        if (readIdx < startOnRead) continue;
+                        if (readIdx > endOnRead) break;
+                        byte refChr = refSeq[refIndex];
+                        byte readChr = readSeq[readIdx];
+                        // Note: we need to count X/N's as mismatches because that's what SAM requires
+                        //if ( BaseUtils.simpleBaseToBaseIndex(readChr) == -1 ||
+                        //     BaseUtils.simpleBaseToBaseIndex(refChr)  == -1 )
+                        //    continue; // do not count Ns/Xs/etc ?
+                        if (readChr != refChr) {
+                            mc.numMismatches++;
+                            mc.mismatchQualities += readQuals[readIdx];
+                        }
+                    }
+                    break;
+                case I:
+                case S:
+                    readIdx += elementLength;
+                    break;
+                case D:
+                case N:
+                    refIndex += elementLength;
+                    break;
+                case H:
+                case P:
+                    break;
+                default:
+                    throw new ReviewedGATKException("The " + ce.getOperator() + " cigar element is not currently supported");
+            }
+
+        }
+        return mc;
+    }
+
+    /**
+     * Returns number of alignment blocks (continuous stretches of aligned bases) in the specified alignment.
+     * This method follows closely the SAMRecord::getAlignmentBlocks() implemented in samtools library, but
+     * it only counts blocks without actually allocating and filling the list of blocks themselves. Hence, this method is
+     * a much more efficient alternative to r.getAlignmentBlocks.size() in the situations when this number is all that is needed.
+     * Formally, this method simply returns the number of M elements in the cigar.
+     *
+     * @param r alignment
+     * @return number of continuous alignment blocks (i.e. 'M' elements of the cigar; all indel and clipping elements are ignored).
+     */
+    @Ensures("result >= 0")
+    public static int getNumAlignmentBlocks(final SAMRecord r) {
+        if ( r == null ) throw new IllegalArgumentException("read cannot be null");
+        final Cigar cigar = r.getCigar();
+        if (cigar == null) return 0;
+
+        int n = 0;
+        for (final CigarElement e : cigar.getCigarElements()) {
+            if (ALIGNED_TO_GENOME_OPERATORS.contains(e.getOperator()))
+                n++;
+        }
+
+        return n;
+    }
+
+
+    /**
+     * Get the number of bases aligned to the genome, including soft clips
+     *
+     * If read is not mapped (i.e., doesn't have a cigar) returns 0
+     *
+     * @param r a non-null GATKSAMRecord
+     * @return the number of bases aligned to the genome in R, including soft clipped bases
+     */
+    public static int getNumAlignedBasesCountingSoftClips(final GATKSAMRecord r) {
+        int n = 0;
+        final Cigar cigar = r.getCigar();
+        if (cigar == null) return 0;
+
+        for (final CigarElement e : cigar.getCigarElements())
+            if (ALIGNED_TO_GENOME_PLUS_SOFTCLIPS.contains(e.getOperator()))
+                n += e.getLength();
+
+        return n;
+    }
+
+    /**
+     * Count the number of bases hard clipped from read
+     *
+     * If read's cigar is null, return 0
+     *
+     * @param r a non-null read
+     * @return a positive integer
+     */
+    @Ensures("result >= 0")
+    public static int getNumHardClippedBases(final SAMRecord r) {
+        if ( r == null ) throw new IllegalArgumentException("Read cannot be null");
+
+        int n = 0;
+        final Cigar cigar = r.getCigar();
+        if (cigar == null) return 0;
+
+        for (final CigarElement e : cigar.getCigarElements())
+            if (e.getOperator() == CigarOperator.H)
+                n += e.getLength();
+
+        return n;
+    }
+
+    /**
+     * Calculate the number of bases that are soft clipped in read with quality score greater than threshold
+     *
+     * Handles the case where the cigar is null (i.e., the read is unmapped), returning 0
+     *
+     * @param read a non-null GATKSAMRecord.
+     * @param qualThreshold consider bases with quals > this value as high quality.  Must be >= 0
+     * @return positive integer
+     */
+    @Ensures("result >= 0")
+    public static int calcNumHighQualitySoftClips( final GATKSAMRecord read, final byte qualThreshold ) {
+        if ( read == null ) throw new IllegalArgumentException("Read cannot be null");
+        if ( qualThreshold < 0 ) throw new IllegalArgumentException("Expected qualThreshold to be a positive byte but saw " + qualThreshold);
+
+        if ( read.getCigar() == null ) // the read is unmapped
+            return 0;
+
+        final byte[] qual = read.getBaseQualities( EventType.BASE_SUBSTITUTION );
+
+        int numHQSoftClips = 0;
+        int alignPos = 0;
+        for ( final CigarElement ce : read.getCigar().getCigarElements() ) {
+            final int elementLength = ce.getLength();
+
+            switch( ce.getOperator() ) {
+                case S:
+                    for( int jjj = 0; jjj < elementLength; jjj++ ) {
+                        if( qual[alignPos++] > qualThreshold ) { numHQSoftClips++; }
+                    }
+                    break;
+                case M: case I: case EQ: case X:
+                    alignPos += elementLength;
+                    break;
+                case H: case P: case D: case N:
+                    break;
+                default:
+                    throw new IllegalStateException("Unsupported cigar operator: " + ce.getOperator());
+            }
+        }
+
+        return numHQSoftClips;
+    }
+
+    public static int calcAlignmentByteArrayOffset(final Cigar cigar, final PileupElement pileupElement, final int alignmentStart, final int refLocus) {
+        return calcAlignmentByteArrayOffset( cigar, pileupElement.getOffset(), pileupElement.isDeletion(), alignmentStart, refLocus );
+    }
+
+    /**
+     * Calculate the index into the read's bases of the beginning of the encompassing cigar element for a given cigar and offset
+     *
+     * @param cigar            the read's CIGAR -- cannot be null
+     * @param offset           the offset to use for the calculation or -1 if in the middle of a deletion
+     * @param isDeletion       are we in the middle of a deletion?
+     * @param alignmentStart   the alignment start of the read
+     * @param refLocus         the reference position of the offset
+     * @return a non-negative int index
+     */
+    @Ensures("result >= 0")
+    public static int calcAlignmentByteArrayOffset(final Cigar cigar, final int offset, final boolean isDeletion, final int alignmentStart, final int refLocus) {
+        if ( cigar == null ) throw new IllegalArgumentException("attempting to find the alignment position from a CIGAR that is null");
+        if ( offset < -1 ) throw new IllegalArgumentException("attempting to find the alignment position with an offset that is negative (and not -1)");
+        if ( alignmentStart < 0 ) throw new IllegalArgumentException("attempting to find the alignment position from an alignment start that is negative");
+        if ( refLocus < 0 ) throw new IllegalArgumentException("attempting to find the alignment position from a reference position that is negative");
+        if ( offset >= cigar.getReadLength() ) throw new IllegalArgumentException("attempting to find the alignment position of an offset than is larger than the read length");
+
+        int pileupOffset = offset;
+
+        // Reassign the offset if we are in the middle of a deletion because of the modified representation of the read bases
+        if (isDeletion) {
+            pileupOffset = refLocus - alignmentStart;
+            final CigarElement ce = cigar.getCigarElement(0);
+            if (ce.getOperator() == CigarOperator.S) {
+                pileupOffset += ce.getLength();
+            }
+        }
+
+        int pos = 0;
+        int alignmentPos = 0;
+
+        for (int iii = 0; iii < cigar.numCigarElements(); iii++) {
+            final CigarElement ce = cigar.getCigarElement(iii);
+            final int elementLength = ce.getLength();
+
+            switch (ce.getOperator()) {
+                case I:
+                case S: // TODO -- I don't think that soft clips should be treated the same as inserted bases here. Investigation needed.
+                    pos += elementLength;
+                    if (pos >= pileupOffset) {
+                        return alignmentPos;
+                    }
+                    break;
+                case D:
+                    if (!isDeletion) {
+                        alignmentPos += elementLength;
+                    } else {
+                        if (pos + elementLength - 1 >= pileupOffset) {
+                            return alignmentPos + (pileupOffset - pos);
+                        } else {
+                            pos += elementLength;
+                            alignmentPos += elementLength;
+                        }
+                    }
+                    break;
+                case M:
+                case EQ:
+                case X:
+                    if (pos + elementLength - 1 >= pileupOffset) {
+                        return alignmentPos + (pileupOffset - pos);
+                    } else {
+                        pos += elementLength;
+                        alignmentPos += elementLength;
+                    }
+                    break;
+                case H:
+                case P:
+                case N:
+                    break;
+                default:
+                    throw new ReviewedGATKException("Unsupported cigar operator: " + ce.getOperator());
+            }
+        }
+
+        return alignmentPos;
+    }
+
+    /**
+     * Generate an array of bases for just those that are aligned to the reference (i.e. no clips or insertions)
+     *
+     * @param cigar            the read's CIGAR -- cannot be null
+     * @param read             the read's base array
+     * @return a non-null array of bases (bytes)
+     */
+    @Ensures("result != null")
+    public static byte[] readToAlignmentByteArray(final Cigar cigar, final byte[] read) {
+        if ( cigar == null ) throw new IllegalArgumentException("attempting to generate an alignment from a CIGAR that is null");
+        if ( read == null ) throw new IllegalArgumentException("attempting to generate an alignment from a read sequence that is null");
+
+        final int alignmentLength = cigar.getReferenceLength();
+        final byte[] alignment = new byte[alignmentLength];
+        int alignPos = 0;
+        int readPos = 0;
+        for (int iii = 0; iii < cigar.numCigarElements(); iii++) {
+
+            final CigarElement ce = cigar.getCigarElement(iii);
+            final int elementLength = ce.getLength();
+
+            switch (ce.getOperator()) {
+                case I:
+                    if (alignPos > 0) {
+                        final int prevPos = alignPos - 1;
+                        if (alignment[prevPos] == BaseUtils.Base.A.base) {
+                            alignment[prevPos] = PileupElement.A_FOLLOWED_BY_INSERTION_BASE;
+                        } else if (alignment[prevPos] == BaseUtils.Base.C.base) {
+                            alignment[prevPos] = PileupElement.C_FOLLOWED_BY_INSERTION_BASE;
+                        } else if (alignment[prevPos] == BaseUtils.Base.T.base) {
+                            alignment[prevPos] = PileupElement.T_FOLLOWED_BY_INSERTION_BASE;
+                        } else if (alignment[prevPos] == BaseUtils.Base.G.base) {
+                            alignment[prevPos] = PileupElement.G_FOLLOWED_BY_INSERTION_BASE;
+                        }
+                    }
+                case S:
+                    readPos += elementLength;
+                    break;
+                case D:
+                case N:
+                    for (int jjj = 0; jjj < elementLength; jjj++) {
+                        alignment[alignPos++] = PileupElement.DELETION_BASE;
+                    }
+                    break;
+                case M:
+                case EQ:
+                case X:
+                    for (int jjj = 0; jjj < elementLength; jjj++) {
+                        alignment[alignPos++] = read[readPos++];
+                    }
+                    break;
+                case H:
+                case P:
+                    break;
+                default:
+                    throw new ReviewedGATKException("Unsupported cigar operator: " + ce.getOperator());
+            }
+        }
+        return alignment;
+    }
+
+    /**
+     * Returns true if the read does not belong to a contig, i.e. it's location is GenomeLoc.UNMAPPED.
+     * NOTE: A read can have a mapped GenomeLoc and still have an unmapped flag!
+     *
+     * @param r record
+     * @return true if read is unmapped to a genome loc
+     */
+    public static boolean isReadGenomeLocUnmapped(final SAMRecord r) {
+        return SAMRecord.NO_ALIGNMENT_REFERENCE_NAME.equals(r.getReferenceName());
+    }
+
+    /**
+     * Due to (unfortunate) multiple ways to indicate that read is unmapped allowed by SAM format
+     * specification, one may need this convenience shortcut. Checks both 'read unmapped' flag and
+     * alignment reference index/start.
+     *
+     * Our life would be so much easier if all sam files followed the specs. In reality,
+     * sam files (including those generated by maq or bwa) miss headers altogether. When
+     * reading such a SAM file, reference name is set, but since there is no sequence dictionary,
+     * null is always returned for referenceIndex. Let's be paranoid here, and make sure that
+     * we do not call the read "unmapped" when it has only reference name set with ref. index missing
+     * or vice versa.
+     *
+     * @param r a non-null record
+     * @return true if read is unmapped
+     */
+    public static boolean isReadUnmapped(final SAMRecord r) {
+        if ( r == null )
+            throw new IllegalArgumentException("Read cannot be null");
+
+        return r.getReadUnmappedFlag() ||
+               !((r.getReferenceIndex() != null && r.getReferenceIndex() != SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX ||
+                  r.getReferenceName() != null && !r.getReferenceName().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME)) &&
+                 r.getAlignmentStart() != SAMRecord.NO_ALIGNMENT_START);
+
+    }
+
+    /**
+     * Need a well-formed, consolidated Cigar string so that the left aligning code works properly.
+     * For example, 1M1M1M1D2M1M --> 3M1D3M
+     * If the given cigar is empty then the returned cigar will also be empty
+     *
+     * Note that this routine collapses cigar elements of size 0, so 2M0M => 2M
+     *
+     * @param c the cigar to consolidate
+     * @return  a non-null cigar with consecutive matching operators merged into single operators.
+     */
+    @Ensures({"result != null"})
+    public static Cigar consolidateCigar( final Cigar c ) {
+        if ( c == null ) { throw new IllegalArgumentException("Cigar cannot be null"); }
+
+        // fast check to determine if there's anything worth doing before we create new Cigar and actually do some work
+        if ( ! needsConsolidation(c) )
+            return c;
+
+        final Cigar returnCigar = new Cigar();
+        int sumLength = 0;
+        CigarElement lastElement = null;
+
+        for( final CigarElement cur : c.getCigarElements() ) {
+            if ( cur.getLength() == 0 )
+                continue; // don't add elements of 0 length
+
+            if ( lastElement != null && lastElement.getOperator() != cur.getOperator() ) {
+                returnCigar.add(new CigarElement(sumLength, lastElement.getOperator()));
+                sumLength = 0;
+            }
+
+            sumLength += cur.getLength();
+            lastElement = cur;
+        }
+
+        if ( sumLength > 0 ) {
+            returnCigar.add(new CigarElement(sumLength, lastElement.getOperator()));
+        }
+
+        return returnCigar;
+    }
+
+    /**
+     * Does the cigar C need to be consolidated?
+     *
+     * @param c a non-null cigar
+     * @return true if so
+     */
+    private static boolean needsConsolidation(final Cigar c) {
+        if ( c.numCigarElements() <= 1 )
+            return false; // fast path for empty or single cigar
+
+        CigarOperator lastOp = null;
+        for( final CigarElement cur : c.getCigarElements() ) {
+            if ( cur.getLength() == 0 || lastOp == cur.getOperator() )
+                return true;
+            lastOp = cur.getOperator();
+        }
+
+        return false;
+    }
+
+    /**
+     * Takes the alignment of the read sequence <code>readSeq</code> to the reference sequence <code>refSeq</code>
+     * starting at 0-based position <code>refIndex</code> on the <code>refSeq</code> and specified by its <code>cigar</code>.
+     * The last argument <code>readIndex</code> specifies 0-based position on the read where the alignment described by the
+     * <code>cigar</code> starts. Usually cigars specify alignments of the whole read to the ref, so that readIndex is normally 0.
+     * Use non-zero readIndex only when the alignment cigar represents alignment of a part of the read. The refIndex in this case
+     * should be the position where the alignment of that part of the read starts at. In other words, both refIndex and readIndex are
+     * always the positions where the cigar starts on the ref and on the read, respectively.
+     * <p/>
+     * If the alignment has one or more indels, this method attempts to move them left across a stretch of repetitive bases.
+     * For instance, if the original cigar specifies that (any) one AT is deleted from a repeat sequence TATATATA, the output
+     * cigar will always mark the leftmost AT as deleted. If there is no indel in the original cigar or if the indel position
+     * is determined unambiguously (i.e. inserted/deleted sequence is not repeated), the original cigar is returned.
+     *
+     * Note that currently we do not actually support the case where there is more than one indel in the alignment.  We will throw
+     * an exception if there is -- unless the
+     *
+     * @param cigar     structure of the original alignment
+     * @param refSeq    reference sequence the read is aligned to
+     * @param readSeq   read sequence
+     * @param refIndex  0-based alignment start position on ref
+     * @param readIndex 0-based alignment start position on read
+     * @param doNotThrowExceptionForMultipleIndels  if true we will not throw an exception if we encounter multiple indels in the alignment will instead will return the original cigar
+     * @return a non-null cigar, in which the indels are guaranteed to be placed at the leftmost possible position across a repeat (if any)
+     */
+    @Ensures("result != null")
+    public static Cigar leftAlignIndel(Cigar cigar, final byte[] refSeq, final byte[] readSeq, final int refIndex, final int readIndex, final boolean doNotThrowExceptionForMultipleIndels) {
+        ensureLeftAlignmentHasGoodArguments(cigar, refSeq, readSeq, refIndex, readIndex);
+
+        final int numIndels = countIndelElements(cigar);
+        if ( numIndels == 0 )
+            return cigar;
+        if ( numIndels == 1 )
+            return leftAlignSingleIndel(cigar, refSeq, readSeq, refIndex, readIndex, true);
+
+        // if we got here then there is more than 1 indel in the alignment
+        if ( doNotThrowExceptionForMultipleIndels )
+            return cigar;
+
+        throw new UnsupportedOperationException("attempting to left align a CIGAR that has more than 1 indel in its alignment but this functionality has not been implemented yet");
+    }
+
+    private static void ensureLeftAlignmentHasGoodArguments(final Cigar cigar, final byte[] refSeq, final byte[] readSeq, final int refIndex, final int readIndex) {
+        if ( cigar == null ) throw new IllegalArgumentException("attempting to left align a CIGAR that is null");
+        if ( refSeq == null ) throw new IllegalArgumentException("attempting to left align a reference sequence that is null");
+        if ( readSeq == null ) throw new IllegalArgumentException("attempting to left align a read sequence that is null");
+        if ( refIndex < 0 ) throw new IllegalArgumentException("attempting to left align with a reference index less than 0");
+        if ( readIndex < 0 ) throw new IllegalArgumentException("attempting to left align with a read index less than 0");
+    }
+
+    /**
+     * Counts the number of I/D operators
+     *
+     * @param cigar   cigar to check -- cannot be null
+     * @return  non-negative count of indel operators
+     */
+    @Requires("cigar != null")
+    @Ensures("result >= 0")
+    private static int countIndelElements(final Cigar cigar) {
+        int indelCount = 0;
+        for ( CigarElement ce : cigar.getCigarElements() ) {
+            if ( ce.getOperator() == CigarOperator.D || ce.getOperator() == CigarOperator.I )
+                indelCount++;
+        }
+        return indelCount;
+    }
+
+    /**
+     * See the documentation for AlignmentUtils.leftAlignIndel() for more details.
+     *
+     * This flavor of the left alignment works if and only if the alignment has one - and only one - indel.
+     * An exception is thrown if there are no indels or more than 1 indel in the alignment.
+     *
+     * @param cigar     structure of the original alignment -- cannot be null
+     * @param refSeq    reference sequence the read is aligned to
+     * @param readSeq   read sequence
+     * @param refIndex  0-based alignment start position on ref
+     * @param readIndex 0-based alignment start position on read
+     * @param cleanupCigar if true, we'll cleanup the resulting cigar element, removing 0 length elements and deletions from the first cigar position
+     * @return a non-null cigar, in which the single indel is guaranteed to be placed at the leftmost possible position across a repeat (if any)
+     */
+    @Ensures("result != null")
+    public static Cigar leftAlignSingleIndel(Cigar cigar, final byte[] refSeq, final byte[] readSeq, final int refIndex, final int readIndex, final boolean cleanupCigar) {
+        ensureLeftAlignmentHasGoodArguments(cigar, refSeq, readSeq, refIndex, readIndex);
+
+        int indexOfIndel = -1;
+        for (int i = 0; i < cigar.numCigarElements(); i++) {
+            CigarElement ce = cigar.getCigarElement(i);
+            if (ce.getOperator() == CigarOperator.D || ce.getOperator() == CigarOperator.I) {
+                // if there is more than 1 indel, exception out
+                if (indexOfIndel != -1)
+                    throw new IllegalArgumentException("attempting to left align a CIGAR that has more than 1 indel in its alignment");
+                indexOfIndel = i;
+            }
+        }
+
+        // if there is no indel, exception out
+        if ( indexOfIndel == -1 )
+            throw new IllegalArgumentException("attempting to left align a CIGAR that has no indels in its alignment");
+        // if the alignment starts with an insertion (so that there is no place on the read to move that insertion further left), we are done
+        if ( indexOfIndel == 0 )
+            return cigar;
+
+        final int indelLength = cigar.getCigarElement(indexOfIndel).getLength();
+
+        byte[] altString = createIndelString(cigar, indexOfIndel, refSeq, readSeq, refIndex, readIndex);
+        if (altString == null)
+            return cigar;
+
+        Cigar newCigar = cigar;
+        for (int i = 0; i < indelLength; i++) {
+            newCigar = moveCigarLeft(newCigar, indexOfIndel);
+            byte[] newAltString = createIndelString(newCigar, indexOfIndel, refSeq, readSeq, refIndex, readIndex);
+
+            // check to make sure we haven't run off the end of the read
+            boolean reachedEndOfRead = cigarHasZeroSizeElement(newCigar);
+
+            if (Arrays.equals(altString, newAltString)) {
+                cigar = newCigar;
+                i = -1;
+                if (reachedEndOfRead)
+                    cigar = cleanupCigar ? cleanUpCigar(cigar) : cigar;
+            }
+
+            if (reachedEndOfRead)
+                break;
+        }
+
+        return cigar;
+    }
+
+    /**
+     * Does one of the elements in cigar have a 0 length?
+     *
+     * @param c a non-null cigar
+     * @return true if any element has 0 size
+     */
+    @Requires("c != null")
+    protected static boolean cigarHasZeroSizeElement(final Cigar c) {
+        for (final CigarElement ce : c.getCigarElements()) {
+            if (ce.getLength() == 0)
+                return true;
+        }
+        return false;
+    }
+
+    /**
+     * Clean up the incoming cigar
+     *
+     * Removes elements with zero size
+     * Clips away beginning deletion operators
+     *
+     * @param c the cigar string we want to clean up
+     * @return a newly allocated, cleaned up Cigar
+     */
+    @Requires("c != null")
+    @Ensures("result != null")
+    public static Cigar cleanUpCigar(final Cigar c) {
+        final List<CigarElement> elements = new ArrayList<CigarElement>(c.numCigarElements() - 1);
+
+        for (final CigarElement ce : c.getCigarElements()) {
+            if (ce.getLength() != 0 && (! elements.isEmpty() || ce.getOperator() != CigarOperator.D)) {
+                elements.add(ce);
+            }
+        }
+
+        return new Cigar(elements);
+    }
+
+    /**
+     * Removing a trailing deletion from the incoming cigar if present
+     *
+     * @param c the cigar we want to update
+     * @return a non-null Cigar
+     */
+    @Requires("c != null")
+    @Ensures("result != null")
+    public static Cigar removeTrailingDeletions(final Cigar c) {
+
+        final List<CigarElement> elements = c.getCigarElements();
+        if ( elements.get(elements.size() - 1).getOperator() != CigarOperator.D )
+            return c;
+
+        return new Cigar(elements.subList(0, elements.size() - 1));
+    }
+
+    /**
+     * Move the indel in a given cigar string one base to the left
+     *
+     * @param cigar          original cigar
+     * @param indexOfIndel   the index of the indel cigar element
+     * @return non-null cigar with indel moved one base to the left
+     */
+    @Requires("cigar != null && indexOfIndel >= 0 && indexOfIndel < cigar.numCigarElements()")
+    @Ensures("result != null")
+    private static Cigar moveCigarLeft(Cigar cigar, int indexOfIndel) {
+        // get the first few elements
+        ArrayList<CigarElement> elements = new ArrayList<CigarElement>(cigar.numCigarElements());
+        for (int i = 0; i < indexOfIndel - 1; i++)
+            elements.add(cigar.getCigarElement(i));
+
+        // get the indel element and move it left one base
+        CigarElement ce = cigar.getCigarElement(indexOfIndel - 1);
+        elements.add(new CigarElement(Math.max(ce.getLength() - 1, 0), ce.getOperator()));
+        elements.add(cigar.getCigarElement(indexOfIndel));
+        if (indexOfIndel + 1 < cigar.numCigarElements()) {
+            ce = cigar.getCigarElement(indexOfIndel + 1);
+            elements.add(new CigarElement(ce.getLength() + 1, ce.getOperator()));
+        } else {
+            elements.add(new CigarElement(1, CigarOperator.M));
+        }
+
+        // get the last few elements
+        for (int i = indexOfIndel + 2; i < cigar.numCigarElements(); i++)
+            elements.add(cigar.getCigarElement(i));
+        return new Cigar(elements);
+    }
+
+    /**
+     * Create the string (really a byte array) representation of an indel-containing cigar against the reference.
+     *
+     * @param cigar             the indel-containing cigar
+     * @param indexOfIndel      the index of the indel cigar element
+     * @param refSeq            the reference sequence
+     * @param readSeq           the read sequence for the cigar
+     * @param refIndex          the starting reference index into refSeq
+     * @param readIndex         the starting read index into readSeq
+     * @return non-null byte array which is the indel representation against the reference
+     */
+    @Requires("cigar != null && indexOfIndel >= 0 && indexOfIndel < cigar.numCigarElements() && refSeq != null && readSeq != null && refIndex >= 0 && readIndex >= 0")
+    @Ensures("result != null")
+    private static byte[] createIndelString(final Cigar cigar, final int indexOfIndel, final byte[] refSeq, final byte[] readSeq, int refIndex, int readIndex) {
+        CigarElement indel = cigar.getCigarElement(indexOfIndel);
+        int indelLength = indel.getLength();
+
+        int totalRefBases = 0;
+        for (int i = 0; i < indexOfIndel; i++) {
+            CigarElement ce = cigar.getCigarElement(i);
+            int length = ce.getLength();
+
+            switch (ce.getOperator()) {
+                case M:
+                case EQ:
+                case X:
+                    readIndex += length;
+                    refIndex += length;
+                    totalRefBases += length;
+                    break;
+                case S:
+                    readIndex += length;
+                    break;
+                case N:
+                    refIndex += length;
+                    totalRefBases += length;
+                    break;
+                default:
+                    break;
+            }
+        }
+
+        // sometimes, when there are very large known indels, we won't have enough reference sequence to cover them
+        if (totalRefBases + indelLength > refSeq.length)
+            indelLength -= (totalRefBases + indelLength - refSeq.length);
+
+        // the indel-based reference string
+        byte[] alt = new byte[refSeq.length + (indelLength * (indel.getOperator() == CigarOperator.D ? -1 : 1))];
+
+        // add the bases before the indel, making sure it's not aligned off the end of the reference
+        if (refIndex > alt.length || refIndex > refSeq.length)
+            return null;
+        System.arraycopy(refSeq, 0, alt, 0, refIndex);
+        int currentPos = refIndex;
+
+        // take care of the indel
+        if (indel.getOperator() == CigarOperator.D) {
+            refIndex += indelLength;
+        } else {
+            System.arraycopy(readSeq, readIndex, alt, currentPos, indelLength);
+            currentPos += indelLength;
+        }
+
+        // add the bases after the indel, making sure it's not aligned off the end of the reference
+        if (refSeq.length - refIndex > alt.length - currentPos)
+            return null;
+        System.arraycopy(refSeq, refIndex, alt, currentPos, refSeq.length - refIndex);
+
+        return alt;
+    }
+
+
+    /**
+     * Trim cigar down to one that starts at start reference on the left and extends to end on the reference
+     *
+     * @param cigar a non-null Cigar to trim down
+     * @param start Where should we start keeping bases on the reference?  The first position is 0
+     * @param end Where should we stop keeping bases on the reference?  The maximum value is cigar.getReferenceLength()
+     * @return a new Cigar with reference length == start - end + 1
+     */
+    public static Cigar trimCigarByReference(final Cigar cigar, final int start, final int end) {
+        if ( start < 0 ) throw new IllegalArgumentException("Start must be >= 0 but got " + start);
+        if ( end < start ) throw new IllegalArgumentException("End " + end + " is < start start " + start);
+        if ( end > cigar.getReferenceLength() ) throw new IllegalArgumentException("End is beyond the cigar's reference length " + end + " for cigar " + cigar );
+
+        final Cigar result = trimCigar(cigar, start, end, true);
+
+        if ( result.getReferenceLength() != end - start + 1)
+            throw new IllegalStateException("trimCigarByReference failure: start " + start + " end " + end + " for " + cigar + " resulted in cigar with wrong size " + result);
+        return result;
+    }
+
+    /**
+     * Trim cigar down to one that starts at start base in the cigar and extends to (inclusive) end base
+     *
+     * @param cigar a non-null Cigar to trim down
+     * @param start Where should we start keeping bases in the cigar?  The first position is 0
+     * @param end Where should we stop keeping bases in the cigar?  The maximum value is cigar.getReadLength()
+     * @return a new Cigar containing == start - end + 1 reads
+     */
+    public static Cigar trimCigarByBases(final Cigar cigar, final int start, final int end) {
+        if ( start < 0 ) throw new IllegalArgumentException("Start must be >= 0 but got " + start);
+        if ( end < start ) throw new IllegalArgumentException("End " + end + " is < start = " + start);
+        if ( end > cigar.getReadLength() ) throw new IllegalArgumentException("End is beyond the cigar's read length " + end + " for cigar " + cigar );
+
+        final Cigar result = trimCigar(cigar, start, end, false);
+
+        final int expectedSize = end - start + 1;
+        if ( result.getReadLength() != expectedSize)
+            throw new IllegalStateException("trimCigarByBases failure: start " + start + " end " + end + " for " + cigar + " resulted in cigar with wrong size " + result + " with size " + result.getReadLength() + " expected " + expectedSize + " for input cigar " + cigar);
+        return result;
+    }
+
+
+    /**
+     * Workhorse for trimCigarByBases and trimCigarByReference
+     *
+     * @param cigar a non-null Cigar to trim down
+     * @param start Where should we start keeping bases in the cigar?  The first position is 0
+     * @param end Where should we stop keeping bases in the cigar?  The maximum value is cigar.getReadLength()
+     * @param byReference should start and end be intrepreted as position in the reference or the read to trim to/from?
+     * @return a non-null cigar
+     */
+    @Requires({"cigar != null", "start >= 0", "start <= end"})
+    @Ensures("result != null")
+    private static Cigar trimCigar(final Cigar cigar, final int start, final int end, final boolean byReference) {
+        final List<CigarElement> newElements = new LinkedList<CigarElement>();
+
+        int pos = 0;
+        for ( final CigarElement elt : cigar.getCigarElements() ) {
+            if ( pos > end && (byReference || elt.getOperator() != CigarOperator.D) ) break;
+
+            switch ( elt.getOperator() ) {
+                case D:
+                    if ( ! byReference ) {
+                        if ( pos >= start )
+                            newElements.add(elt);
+                        break;
+                    }
+                    // otherwise fall through to the next case
+                case EQ: case M: case X:
+                    pos = addCigarElements(newElements, pos, start, end, elt);
+                    break;
+                case S: case I:
+                    if ( byReference ) {
+                        if ( pos >= start )
+                            newElements.add(elt);
+                    } else {
+                        pos = addCigarElements(newElements, pos, start, end, elt);
+                    }
+                    break;
+                default:
+                    throw new IllegalStateException("Cannot handle " + elt);
+            }
+        }
+
+        return AlignmentUtils.consolidateCigar(new Cigar(newElements));
+    }
+
+    /**
+     * Helper function for trimCigar that adds cigar elements (of total length X) of elt.op to dest for
+     * X bases that fall between start and end, where the last position of the base is pos.
+     *
+     * The primary use of this function is to create a new cigar element list that contains only
+     * elements that occur between start and end bases in an initial cigar.
+     *
+     * Note that this function may return multiple cigar elements (1M1M etc) that are best consolidated
+     * after the fact into a single simpler representation.
+     *
+     * @param dest we will append our cigar elements to this list
+     * @param pos the position (0 indexed) where elt started
+     * @param start only include bases that occur >= this position
+     * @param end only include bases that occur <= this position
+     * @param elt the element we are slicing down
+     * @return the position after we've traversed all elt.length bases of elt
+     */
+   protected static int addCigarElements(final List<CigarElement> dest, int pos, final int start, final int end, final CigarElement elt) {
+        final int length = Math.min(pos + elt.getLength() - 1, end) - Math.max(pos, start) + 1;
+        if ( length > 0 )
+            dest.add(new CigarElement(length, elt.getOperator()));
+        return pos + elt.getLength();
+    }
+
+    /**
+     * Get the offset (base 0) of the first reference aligned base in Cigar that occurs after readStartByBaseOfCigar base of the cigar
+     *
+     * The main purpose of this routine is to find a good start position for a read given it's cigar.  The real
+     * challenge is that the starting base might be inside an insertion, in which case the read actually starts
+     * at the next M/EQ/X operator.
+     *
+     * @param cigar a non-null cigar
+     * @param readStartByBaseOfCigar finds the first base after this (0 indexed) that aligns to the reference genome (M, EQ, X)
+     * @throws IllegalStateException if no such base can be found
+     * @return an offset into cigar
+     */
+    public static int calcFirstBaseMatchingReferenceInCigar(final Cigar cigar, int readStartByBaseOfCigar) {
+        if ( cigar == null ) throw new IllegalArgumentException("cigar cannot be null");
+        if ( readStartByBaseOfCigar >= cigar.getReadLength() ) throw new IllegalArgumentException("readStartByBaseOfCigar " + readStartByBaseOfCigar + " must be <= readLength " + cigar.getReadLength());
+
+        int hapOffset = 0, refOffset = 0;
+        for ( final CigarElement ce : cigar.getCigarElements() ) {
+            for ( int i = 0; i < ce.getLength(); i++ ) {
+                switch ( ce.getOperator() ) {
+                    case M:case EQ:case X:
+                        if ( hapOffset >= readStartByBaseOfCigar )
+                            return refOffset;
+                        hapOffset++;
+                        refOffset++;
+                        break;
+                    case I: case S:
+                        hapOffset++;
+                        break;
+                    case D:
+                        refOffset++;
+                        break;
+                    default:
+                        throw new IllegalStateException("calcFirstBaseMatchingReferenceInCigar does not support cigar " + ce.getOperator() + " in cigar " + cigar);
+                }
+            }
+        }
+
+        throw new IllegalStateException("Never found appropriate matching state for cigar " + cigar + " given start of " + readStartByBaseOfCigar);
+    }
+
+    /**
+     * Generate a new Cigar that maps the operations of the first cigar through those in a second
+     *
+     * For example, if first is 5M and the second is 2M1I2M then the result is 2M1I2M.
+     * However, if first is 1M2D3M and second is 2M1I3M this results in a cigar X
+     *
+     * ref   : AC-GTA
+     * hap   : ACxGTA  - 2M1I3M
+     * read  : A--GTA  - 1M2D3M
+     * result: A--GTA => 1M1D3M
+     *
+     * ref   : ACxG-TA
+     * hap   : AC-G-TA  - 2M1D3M
+     * read  : AC-GxTA  - 3M1I2M
+     * result: AC-GxTA => 2M1D1M1I2M
+     *
+     * ref   : ACGTA
+     * hap   : ACGTA  - 5M
+     * read  : A-GTA  - 1M1I3M
+     * result: A-GTA => 1M1I3M
+     *
+     * ref   : ACGTAC
+     * hap   : AC---C  - 2M3D1M
+     * read  : AC---C  - 3M
+     * result: AG---C => 2M3D
+     *
+     * The constraint here is that both cigars should imply that the result have the same number of
+     * reference bases (i.e.g, cigar.getReferenceLength() are equals).
+     *
+     * @param firstToSecond the cigar mapping hap1 -> hap2
+     * @param secondToThird the cigar mapping hap2 -> hap3
+     * @return A cigar mapping hap1 -> hap3
+     */
+    public static Cigar applyCigarToCigar(final Cigar firstToSecond, final Cigar secondToThird) {
+        final boolean DEBUG = false;
+
+        final List<CigarElement> newElements = new LinkedList<CigarElement>();
+        final int nElements12 = firstToSecond.getCigarElements().size();
+        final int nElements23 = secondToThird.getCigarElements().size();
+
+        int cigar12I = 0, cigar23I = 0;
+        int elt12I = 0, elt23I = 0;
+
+        while ( cigar12I < nElements12 && cigar23I < nElements23 ) {
+            final CigarElement elt12 = firstToSecond.getCigarElement(cigar12I);
+            final CigarElement elt23 = secondToThird.getCigarElement(cigar23I);
+
+            final CigarPairTransform transform = getTransformer(elt12.getOperator(), elt23.getOperator());
+
+            if ( DEBUG )
+                System.out.printf("Transform %s => %s with elt1 = %d %s @ %d elt2 = %d %s @ %d with transform %s%n",
+                        firstToSecond, secondToThird, cigar12I, elt12.getOperator(), elt12I, cigar23I, elt23.getOperator(), elt23I, transform);
+
+            if ( transform.op13 != null ) // skip no ops
+                newElements.add(new CigarElement(1, transform.op13));
+
+            elt12I += transform.advance12;
+            elt23I += transform.advance23;
+
+            // if have exhausted our current element, advance to the next one
+            if ( elt12I == elt12.getLength() ) { cigar12I++; elt12I = 0; }
+            if ( elt23I == elt23.getLength() ) { cigar23I++; elt23I = 0; }
+        }
+
+        return AlignmentUtils.consolidateCigar(new Cigar(newElements));
+    }
+
+    private static CigarPairTransform getTransformer(final CigarOperator op12, final CigarOperator op23) {
+        for ( final CigarPairTransform transform : cigarPairTransformers) {
+            if ( transform.op12.contains(op12) && transform.op23.contains(op23) )
+                return transform;
+        }
+
+        throw new IllegalStateException("No transformer for operators " + op12 + " and " + op23);
+    }
+
+    /**
+     * transformations that project one alignment state through another
+     *
+     * Think about this as a state machine, where we have:
+     *
+     * bases3 : xxx A zzz
+     * bases2 : xxx B zzz
+     * bases1 : xxx C zzz
+     *
+     * where A, B and C are alignment states of a three way alignment.  We want to capture
+     * the transition from operation mapping 1 -> 2 and an operation mapping 2 -> 3 and its
+     * associated mapping from 1 -> 3 and the advancement of the cigar states of 1->2 and 2->3.
+     *
+     * Imagine that A, B, and C are all equivalent (so that op12 = M and op23 = M).  This implies
+     * a mapping of 1->3 of M, and in this case the next states to consider in the 3 way alignment
+     * are the subsequent states in 1 and 2 (so that advance12 and advance23 are both 1).
+     *
+     * Obviously not all of the states and their associated transitions are so simple.  Suppose instead
+     * that op12 = I, and op23 = M.  What does this look like:
+     *
+     * bases3 : xxx - A zzz
+     * bases2 : xxx - B zzz
+     * bases1 : xxx I C zzz
+     *
+     * It means that op13 must be an insertion (as we have an extra base in 1 thats not present in 2 and
+     * so not present in 3).  We advance the cigar in 1 by 1 (as we've consumed one base in 1 for the I)
+     * but we haven't yet found the base corresponding to the M of op23.  So we don't advance23.
+     */
+    private static class CigarPairTransform {
+        private final EnumSet<CigarOperator> op12, op23;
+        private final CigarOperator op13;
+        private final int advance12, advance23;
+
+        private CigarPairTransform(CigarOperator op12, CigarOperator op23, CigarOperator op13, int advance12, int advance23) {
+            this.op12 = getCigarSet(op12);
+            this.op23 = getCigarSet(op23);
+            this.op13 = op13;
+            this.advance12 = advance12;
+            this.advance23 = advance23;
+        }
+
+        private static EnumSet<CigarOperator> getCigarSet(final CigarOperator masterOp) {
+            switch ( masterOp ) {
+                case M: return EnumSet.of(CigarOperator.M, CigarOperator.EQ, CigarOperator.X);
+                case I: return EnumSet.of(CigarOperator.I, CigarOperator.S);
+                case D: return EnumSet.of(CigarOperator.D);
+                default: throw new IllegalStateException("Unexpected state " + masterOp);
+            }
+        }
+
+        @Override
+        public String toString() {
+            return "CigarPairTransform{" +
+                    "op12=" + op12 +
+                    ", op23=" + op23 +
+                    ", op13=" + op13 +
+                    ", advance12=" + advance12 +
+                    ", advance23=" + advance23 +
+                    '}';
+        }
+    }
+
+
+    private final static List<CigarPairTransform> cigarPairTransformers = Arrays.asList(
+            //
+            // op12 is a match
+            //
+            // 3: xxx B yyy
+            // ^^^^^^^^^^^^
+            // 2: xxx M yyy
+            // 1: xxx M yyy
+            new CigarPairTransform(CigarOperator.M, CigarOperator.M, CigarOperator.M, 1, 1),
+            // 3: xxx I yyy
+            // ^^^^^^^^^^^^
+            // 2: xxx I yyy
+            // 1: xxx M yyy
+            new CigarPairTransform(CigarOperator.M, CigarOperator.I, CigarOperator.I, 1, 1),
+            // 3: xxx D yyy
+            // ^^^^^^^^^^^^
+            // 2: xxx D yyy
+            // 1: xxx M yyy
+            new CigarPairTransform(CigarOperator.M, CigarOperator.D, CigarOperator.D, 0, 1),
+
+            //
+            // op12 is a deletion
+            //
+            // 3: xxx D M yyy
+            // ^^^^^^^^^^^^
+            // 2: xxx M yyy
+            // 1: xxx D yyy
+            new CigarPairTransform(CigarOperator.D, CigarOperator.M, CigarOperator.D, 1, 1),
+            // 3: xxx D1 D2 yyy
+            // ^^^^^^^^^^^^
+            // 2: xxx D2 yyy
+            // 1: xxx D1 yyy
+            new CigarPairTransform(CigarOperator.D, CigarOperator.D, CigarOperator.D, 1, 0),
+            // 3: xxx X yyy => no-op, we skip emitting anything here
+            // ^^^^^^^^^^^^
+            // 2: xxx I yyy
+            // 1: xxx D yyy
+            new CigarPairTransform(CigarOperator.D, CigarOperator.I, null, 1, 1),
+
+            //
+            // op12 is a insertion
+            //
+            // 3: xxx I M yyy
+            // ^^^^^^^^^^^^
+            // 2: xxx M yyy
+            // 1: xxx I yyy
+            new CigarPairTransform(CigarOperator.I, CigarOperator.M, CigarOperator.I, 1, 0),
+            // 3: xxx I D yyy
+            // ^^^^^^^^^^^^
+            // 2: xxx D yyy
+            // 1: xxx I yyy
+            new CigarPairTransform(CigarOperator.I, CigarOperator.D, CigarOperator.I, 1, 0),
+            // 3: xxx I1 I2 yyy
+            // ^^^^^^^^^^^^
+            // 2: xxx I2 yyy
+            // 1: xxx I1 yyy
+            new CigarPairTransform(CigarOperator.I, CigarOperator.I, CigarOperator.I, 1, 0)
+            );
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialBAMBuilder.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialBAMBuilder.java
new file mode 100644
index 0000000..371e0f3
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialBAMBuilder.java
@@ -0,0 +1,242 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import htsjdk.samtools.*;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.NGSPlatform;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.*;
+
+/**
+ * Easy to use creator of artificial BAM files for testing
+ *
+ * Allows us to make a stream of reads or an index BAM file with read having the following properties
+ *
+ * - coming from n samples
+ * - of fixed read length and aligned to the genome with M operator
+ * - having N reads per alignment start
+ * - skipping N bases between each alignment start
+ * - starting at a given alignment start
+ *
+ * User: depristo
+ * Date: 1/15/13
+ * Time: 9:22 AM
+ */
+public class ArtificialBAMBuilder {
+    public final static int BAM_SHARD_SIZE = 16384;
+
+    private final IndexedFastaSequenceFile reference;
+    private final GenomeLocParser parser;
+
+    final int nReadsPerLocus;
+    final int nLoci;
+
+    int skipNLoci = 0;
+    int alignmentStart = 1;
+    int readLength = 10;
+    private final ArrayList<String> samples = new ArrayList<String>();
+    private List<GATKSAMRecord> createdReads = null;
+
+    private LinkedList<GATKSAMRecord> additionalReads = new LinkedList<GATKSAMRecord>();
+
+    final SAMFileWriterFactory factory = new SAMFileWriterFactory();
+    {
+        factory.setCreateIndex(true);
+    }
+
+    SAMFileHeader header;
+
+    public ArtificialBAMBuilder(final IndexedFastaSequenceFile reference, int nReadsPerLocus, int nLoci) {
+        this.nReadsPerLocus = nReadsPerLocus;
+        this.nLoci = nLoci;
+
+        this.reference = reference;
+        this.parser = new GenomeLocParser(reference);
+        createAndSetHeader(1);
+    }
+
+    public ArtificialBAMBuilder(int nReadsPerLocus, int nLoci) {
+        this(ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000).getSequenceDictionary(), nReadsPerLocus, nLoci);
+    }
+
+    public ArtificialBAMBuilder(final SAMSequenceDictionary dict, int nReadsPerLocus, int nLoci) {
+        this.nReadsPerLocus = nReadsPerLocus;
+        this.nLoci = nLoci;
+        this.reference = null;
+        this.parser = new GenomeLocParser(dict);
+        createAndSetHeader(1);
+    }
+
+    public IndexedFastaSequenceFile getReference() {
+        return reference;
+    }
+
+    public GenomeLocParser getGenomeLocParser() {
+        return parser;
+    }
+
+    public ArtificialBAMBuilder createAndSetHeader(final int nSamples) {
+        createdReads = null;
+        this.header = new SAMFileHeader();
+        header.setSortOrder(SAMFileHeader.SortOrder.coordinate);
+        header.setSequenceDictionary(parser.getContigs());
+        samples.clear();
+
+        for ( int i = 0; i < nSamples; i++ ) {
+            final GATKSAMReadGroupRecord rg = new GATKSAMReadGroupRecord("rg" + i);
+            final String sample = "sample" + i;
+            samples.add(sample);
+            rg.setSample(sample);
+            rg.setPlatform(NGSPlatform.ILLUMINA.getDefaultPlatform());
+            header.addReadGroup(rg);
+        }
+
+        return this;
+    }
+
+    public void addReads(final GATKSAMRecord readToAdd) {
+        createdReads = null;
+        additionalReads.add(readToAdd);
+    }
+
+    public void addReads(final Collection<GATKSAMRecord> readsToAdd) {
+        createdReads = null;
+        additionalReads.addAll(readsToAdd);
+    }
+
+    public List<String> getSamples() {
+        return samples;
+    }
+
+    /**
+     * Create a read stream based on the parameters.  The cigar string for each
+     * read will be *M, where * is the length of the read.
+     *
+     * Useful for testing things like LocusIteratorBystate
+     *
+     * @return a ordered list of reads
+     */
+    public List<GATKSAMRecord> makeReads() {
+        if ( createdReads == null ) {
+            final String baseName = "read";
+            final LinkedList<GATKSAMReadGroupRecord> readGroups = new LinkedList<GATKSAMReadGroupRecord>();
+            for ( final SAMReadGroupRecord rg : header.getReadGroups())
+                readGroups.add(new GATKSAMReadGroupRecord(rg));
+
+            List<GATKSAMRecord> reads = new ArrayList<GATKSAMRecord>(nReadsPerLocus*nLoci);
+            for ( int locusI = 0; locusI < nLoci; locusI++) {
+                final int locus = locusI * (skipNLoci + 1);
+                for ( int readI = 0; readI < nReadsPerLocus; readI++ ) {
+                    for ( final GATKSAMReadGroupRecord rg : readGroups ) {
+                        final String readName = String.format("%s.%d.%d.%s", baseName, locus, readI, rg.getId());
+                        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, readName, 0, alignmentStart + locus, readLength);
+                        read.setReadGroup(rg);
+                        reads.add(read);
+                    }
+                }
+            }
+
+            if ( ! additionalReads.isEmpty() ) {
+                reads.addAll(additionalReads);
+                Collections.sort(reads, new SAMRecordCoordinateComparator());
+            }
+
+            createdReads = new ArrayList<GATKSAMRecord>(reads);
+        }
+
+        return createdReads;
+    }
+
+    /**
+     * Make an indexed BAM file contains the reads in the builder, marking it for deleteOnExit()
+     * @return the BAM file
+     */
+    public File makeTemporarilyBAMFile() {
+        try {
+            final File file = File.createTempFile("tempBAM", ".bam");
+            file.deleteOnExit();
+
+            // Register the bam index file for deletion on exit as well:
+            new File(file.getAbsolutePath().replace(".bam", ".bai")).deleteOnExit();
+            new File(file.getAbsolutePath() + ".bai").deleteOnExit();
+
+            return makeBAMFile(file);
+        } catch ( IOException e ) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    /**
+     * Write the reads from this builder to output, creating an index as well
+     * @param output the output BAM file we want to use
+     * @return
+     */
+    public File makeBAMFile(final File output) {
+        final SAMFileWriter writer = factory.makeBAMWriter(header, true, output, 0);
+        for ( final GATKSAMRecord read : makeReads() )
+            writer.addAlignment(read);
+        writer.close();
+        return output;
+    }
+
+    public int getnReadsPerLocus() { return nReadsPerLocus; }
+    public int getnLoci() { return nLoci; }
+    public int getSkipNLoci() { return skipNLoci; }
+    public ArtificialBAMBuilder setSkipNLoci(int skipNLoci) { this.skipNLoci = skipNLoci; createdReads = null; return this; }
+    public int getAlignmentStart() { return alignmentStart; }
+    public ArtificialBAMBuilder setAlignmentStart(int alignmentStart) { this.alignmentStart = alignmentStart; createdReads = null; return this; }
+    public int getReadLength() { return readLength; }
+    public ArtificialBAMBuilder setReadLength(int readLength) { this.readLength = readLength; createdReads = null; return this; }
+    public SAMFileHeader getHeader() { return header; }
+    public ArtificialBAMBuilder setHeader(SAMFileHeader header) { this.header = header; createdReads = null; return this; }
+
+    public int getAlignmentEnd() {
+        return alignmentStart + nLoci * (skipNLoci + 1) + readLength;
+    }
+
+
+    public int getNSamples() { return samples.size(); }
+
+    public int expectedNumberOfReads() {
+        return nLoci * nReadsPerLocus * header.getReadGroups().size();
+    }
+
+    @Override
+    public String toString() {
+        return "ArtificialBAMBuilder{" +
+                "samples=" + samples +
+                ", readLength=" + readLength +
+                ", alignmentStart=" + alignmentStart +
+                ", skipNLoci=" + skipNLoci +
+                ", nLoci=" + nLoci +
+                ", nReadsPerLocus=" + nReadsPerLocus +
+                '}';
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialGATKSAMFileWriter.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialGATKSAMFileWriter.java
new file mode 100644
index 0000000..d430ab7
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialGATKSAMFileWriter.java
@@ -0,0 +1,129 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.util.ProgressLoggerInterface;
+
+import java.util.ArrayList;
+import java.util.List;
+
+
+/*
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following
+ * conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+/**
+ * @author aaron
+ *         <p/>
+ *         Class ArtificialGATKSAMFileWriter
+ *         <p/>
+ * generates a fake samwriter, that you can get the output reads
+ * from when you're done.  
+ */
+public class ArtificialGATKSAMFileWriter implements GATKSAMFileWriter {
+
+    // are we closed
+    private boolean closed = false;
+
+    // the SAMRecords we've added to this writer
+    List<SAMRecord> records = new ArrayList<SAMRecord>();
+
+    public void addAlignment( SAMRecord alignment ) {
+        records.add(alignment);
+    }
+
+    public SAMFileHeader getFileHeader() {
+        if (records.size() > 0) {
+            return records.get(0).getHeader();
+        }
+        return null;
+    }
+
+    /** not much to do when we're fake */
+    public void close() {
+        closed = true;
+    }
+
+    /**
+     * are we closed?
+     *
+     * @return true if we're closed
+     */
+    public boolean isClosed() {
+        return closed;
+    }
+
+    /**
+     * get the records we've seen
+     * @return
+     */
+    public List<SAMRecord> getRecords() {
+        return records;
+    }
+
+    @Override
+    public void writeHeader(SAMFileHeader header) {
+    }
+
+    @Override
+    public void setPresorted(boolean presorted) {
+    }
+
+    @Override
+    public void setMaxRecordsInRam(int maxRecordsInRam) {
+    }
+
+    /**
+     * @throws java.lang.UnsupportedOperationException No progress logging in this implementation.
+     */
+    @Override
+    public void setProgressLogger(final ProgressLoggerInterface logger) {
+        throw new UnsupportedOperationException("Progress logging not supported");
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialMultiSampleReadStream.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialMultiSampleReadStream.java
new file mode 100644
index 0000000..3d7c3c7
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialMultiSampleReadStream.java
@@ -0,0 +1,87 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import htsjdk.samtools.MergingSamRecordIterator;
+import htsjdk.samtools.SamFileHeaderMerger;
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SamReader;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIterator;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIteratorAdapter;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.util.*;
+
+/**
+ * Simple wrapper class that multiplexes multiple ArtificialSingleSampleReadStreams into a single stream of reads
+ *
+ * @author David Roazen
+ */
+public class ArtificialMultiSampleReadStream implements Iterable<SAMRecord> {
+
+    private Collection<ArtificialSingleSampleReadStream> perSampleArtificialReadStreams;
+    private MergingSamRecordIterator mergingIterator;
+
+    public ArtificialMultiSampleReadStream( Collection<ArtificialSingleSampleReadStream> perSampleArtificialReadStreams ) {
+        if ( perSampleArtificialReadStreams == null || perSampleArtificialReadStreams.isEmpty() ) {
+            throw new ReviewedGATKException("Can't create an ArtificialMultiSampleReadStream out of 0 ArtificialSingleSampleReadStreams");
+        }
+
+        this.perSampleArtificialReadStreams = perSampleArtificialReadStreams;
+    }
+
+    public Iterator<SAMRecord> iterator() {
+        // lazy initialization to prevent reads from being created until they're needed
+        initialize();
+
+        return mergingIterator;
+    }
+
+    public GATKSAMIterator getGATKSAMIterator() {
+        // lazy initialization to prevent reads from being created until they're needed
+        initialize();
+
+        return GATKSAMIteratorAdapter.adapt(mergingIterator);
+    }
+
+    private void initialize() {
+        Collection<SamReader> perSampleSAMReaders = new ArrayList<>(perSampleArtificialReadStreams.size());
+        Collection<SAMFileHeader> headers = new ArrayList<>(perSampleArtificialReadStreams.size());
+
+        for ( ArtificialSingleSampleReadStream readStream : perSampleArtificialReadStreams ) {
+            Collection<SAMRecord> thisStreamReads = readStream.makeReads();
+
+            ArtificialSAMFileReader reader = new ArtificialSAMFileReader(readStream.getHeader(),
+                                                               thisStreamReads.toArray(new SAMRecord[thisStreamReads.size()]));
+            perSampleSAMReaders.add(reader);
+            headers.add(reader.getFileHeader());
+        }
+
+        SamFileHeaderMerger headerMerger = new SamFileHeaderMerger(SAMFileHeader.SortOrder.coordinate, headers, true);
+        mergingIterator = new MergingSamRecordIterator(headerMerger, perSampleSAMReaders, true);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialPatternedSAMIterator.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialPatternedSAMIterator.java
new file mode 100644
index 0000000..99b3174
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialPatternedSAMIterator.java
@@ -0,0 +1,172 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMRecord;
+
+
+/*
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following
+ * conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+/**
+ * @author aaron
+ *
+ *  Class ArtificialPatternedSAMIterator
+ *
+ * This class allows you to pattern the artificial sam iterator, asking for reads
+ * in order or out of order.
+ */
+public class ArtificialPatternedSAMIterator extends ArtificialSAMIterator {
+
+    /** the pattern we're implementing */
+    public enum PATTERN {
+        RANDOM_READS, IN_ORDER_READS;
+    }
+
+    // our pattern
+    private final PATTERN mPattern;
+
+    /**
+     * this is pretty heavy (and it could be extremely heavy, given the amount of reads they request, but it
+     * allows us to give them each read once, reguardless of the order specified
+     */
+    private final int[] reads;
+    private final int readCount;
+
+    /**
+     * create the fake iterator, given the mapping of chromosomes and read counts.  If pattern
+     * is specified to be random, it will generate reads that are randomly placed on the current chromosome
+     *
+     * @param startingChr the starting chromosome
+     * @param endingChr   the ending chromosome
+     * @param readCount   the number of reads in each chromosome
+     * @param header      the associated header
+     * @param pattern     the pattern to implement
+     */
+    public ArtificialPatternedSAMIterator( int startingChr, int endingChr, int readCount, int unmappedReadCount, SAMFileHeader header, PATTERN pattern ) {
+        super(startingChr, endingChr, readCount, unmappedReadCount, header);
+        mPattern = pattern;
+        this.readCount = readCount;
+        reads = new int[readCount];
+
+        for (int x = 0; x < readCount; x++) {
+            reads[x] = x+1;
+        }
+        if (pattern == PATTERN.RANDOM_READS) {
+            // scramble a bunch of the reads
+            for (int y = 0; y < readCount; y++) {
+                int ranOne = (int) Math.round(Math.random() * ( readCount - 1 ));
+                int ranTwo = (int) Math.round(Math.random() * ( readCount - 1 ));
+                int temp = reads[ranOne];
+                reads[ranOne] = reads[ranTwo];
+                reads[ranTwo] = temp;
+            }
+            /**
+             *  up to this point there's no garauntee that the random() has made the reads out of order (though it's
+             *  extremely extremely unlikely it's failed).  Let's make sure there at least out of order:
+             */
+            if (this.reads[0] < this.reads[reads.length - 1]) {
+                int temp = reads[0];
+                reads[0] = reads[reads.length - 1];
+                reads[reads.length - 1] = temp;
+            }
+
+        }
+
+    }
+
+    /**
+     * override the default ArtificialSAMIterator createNextRead method, which creates the next read
+     *
+     * @return
+     */
+    protected boolean createNextRead() {
+        if (currentRead > rCount) {
+            currentChromo++;
+            currentRead = 1;
+        }
+        // check for end condition, have we finished the chromosome listing, and have no unmapped reads
+        if (currentChromo >= eChromosomeCount) {
+            if (unmappedRemaining < 1) {
+                this.next = null;
+                return false;
+            } else {
+                ++totalReadCount;
+                this.next = ArtificialSAMUtils.createArtificialRead(this.header,
+                        String.valueOf(totalReadCount),
+                        SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX,
+                        SAMRecord.NO_ALIGNMENT_START,
+                        50);
+                --unmappedRemaining;
+                return true;
+            }
+        }
+        ++totalReadCount;
+        this.next = getNextRecord(currentRead);
+
+        ++currentRead;
+        return true;
+    }
+
+
+    /**
+     * get the next read, given it's index in the chromosome
+     *
+     * @param read the read index in the chromosome
+     *
+     * @return a SAMRecord
+     */
+    private SAMRecord getNextRecord( int read ) {
+        if (read > this.readCount) {
+            return ArtificialSAMUtils.createArtificialRead(this.header, String.valueOf(reads[readCount - 1]), currentChromo, reads[readCount - 1], 50);
+        }
+        return ArtificialSAMUtils.createArtificialRead(this.header, String.valueOf(reads[read-1]), currentChromo, reads[read-1], 50);
+    }
+
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMFileReader.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMFileReader.java
new file mode 100644
index 0000000..74b9531
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMFileReader.java
@@ -0,0 +1,155 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import htsjdk.samtools.*;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.io.ByteArrayInputStream;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.List;
+/**
+ * User: hanna
+ * Date: Jun 11, 2009
+ * Time: 9:35:31 AM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * Pass specified reads into the given walker.
+ */
+
+public class ArtificialSAMFileReader extends SAMFileReader {
+    /**
+     * The parser, for GenomeLocs.
+     */
+    private final GenomeLocParser genomeLocParser;
+
+    /**
+     * Backing data store of reads.
+     */
+    private final List<SAMRecord> reads;
+
+    private SAMFileHeader customHeader = null;
+
+    /**
+     * Construct an artificial SAM file reader.
+     * @param sequenceDictionary sequence dictionary used to initialize our GenomeLocParser
+     * @param reads Reads to use as backing data source.
+     */
+    public ArtificialSAMFileReader(SAMSequenceDictionary sequenceDictionary,SAMRecord... reads) {
+        super( createEmptyInputStream(),true );
+        this.genomeLocParser = new GenomeLocParser(sequenceDictionary);
+        this.reads = Arrays.asList(reads);
+    }
+
+    /**
+     * Construct an artificial SAM file reader with the given SAM file header
+     *
+     * @param customHeader Header that should be returned by calls to getFileHeader() on this reader
+     * @param reads Reads to use as backing data source.
+     */
+    public ArtificialSAMFileReader( SAMFileHeader customHeader, SAMRecord... reads ) {
+        super(createEmptyInputStream(),true);
+
+        this.customHeader = customHeader;
+        this.genomeLocParser = new GenomeLocParser(customHeader.getSequenceDictionary());
+        this.reads = Arrays.asList(reads);
+    }
+
+
+    @Override
+    public SAMFileHeader getFileHeader() {
+        if ( customHeader != null ) {
+            return customHeader;
+        }
+
+        return super.getFileHeader();
+    }
+
+    /**
+     * @{inheritDoc}
+     */
+    @Override
+    public SAMRecordIterator query(final String sequence, final int start, final int end, final boolean contained) {
+        GenomeLoc region = genomeLocParser.createGenomeLoc(sequence, start, end);
+        List<SAMRecord> coveredSubset = new ArrayList<SAMRecord>();
+
+        for( SAMRecord read: reads ) {
+            GenomeLoc readPosition = genomeLocParser.createGenomeLoc(read);
+            if( contained && region.containsP(readPosition) ) coveredSubset.add(read);
+            else if( !contained && readPosition.overlapsP(region) ) coveredSubset.add(read);
+        }
+
+        final Iterator<SAMRecord> iterator = coveredSubset.iterator();
+        return new SAMRecordIterator() {
+            public boolean hasNext() { return iterator.hasNext(); }
+            public SAMRecord next() { return iterator.next(); }
+            public void close() {}
+            public void remove() { iterator.remove(); }
+            public SAMRecordIterator assertSorted(SAMFileHeader.SortOrder sortOrder) { return this; }
+        };
+    }
+
+    @Override
+    public SAMRecordIterator iterator() {
+        return new SAMRecordIterator() {
+            private final Iterator<SAMRecord> iterator = reads.iterator();
+            public boolean hasNext() { return iterator.hasNext(); }
+            public SAMRecord next() { return iterator.next(); }
+            public void close() {}
+            public void remove() { iterator.remove(); }
+            public SAMRecordIterator assertSorted(SAMFileHeader.SortOrder sortOrder) { return this; }
+        };
+    }
+
+    /**
+     * Builds an empty input stream for faking out the sam file reader.
+     * Derive it from a string so that, in the future, it might be possible
+     * to fake the text of a sam file from samtools output, et.c
+     * @return Stream that returns no characters.
+     */
+    private static InputStream createEmptyInputStream() {
+        try {
+            byte[] byteArray = "".getBytes("ISO-8859-1");
+            return new ByteArrayInputStream(byteArray);
+        }
+        catch( Exception ex ) {
+            throw new ReviewedGATKException("Unable to build empty input stream",ex);
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMIterator.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMIterator.java
new file mode 100644
index 0000000..aa03523
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMIterator.java
@@ -0,0 +1,212 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIterator;
+
+import java.util.Iterator;
+
+
+/*
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following
+ * conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+/** this fake iterator allows us to look at how specific piles of reads are handled */
+public class ArtificialSAMIterator implements GATKSAMIterator {
+
+
+    protected int currentChromo = 0;
+    protected int currentRead = 1;
+    protected int totalReadCount = 0;
+    protected int unmappedRemaining = 0;
+    protected boolean done = false;
+    // the next record
+    protected SAMRecord next = null;
+    protected SAMFileHeader header = null;
+
+    // the passed in parameters
+    protected final int sChr;
+    protected final int eChromosomeCount;
+    protected final int rCount;
+    protected final int unmappedReadCount;
+
+    // let us know to make a read, we need this to help out the fake sam query iterator
+    private boolean initialized = false;
+
+    /**
+     * Is this iterator currently open or closed?  Closed iterators can be reused.
+     */    
+    protected boolean open = false;
+
+    /**
+     * create the fake iterator, given the mapping of chromosomes and read counts
+     *
+     * @param startingChr the starting chromosome
+     * @param endingChr   the ending chromosome
+     * @param readCount   the number of reads in each chromosome
+     * @param header      the associated header
+     */
+    ArtificialSAMIterator( int startingChr, int endingChr, int readCount, SAMFileHeader header ) {
+        sChr = startingChr;
+        eChromosomeCount = (endingChr - startingChr) + 1;
+        rCount = readCount;
+        this.header = header;
+        unmappedReadCount = 0;
+        reset();
+    }
+
+    protected void reset() {
+        this.currentChromo = 0;
+        this.currentRead = 1;
+        this.totalReadCount = 0;
+        this.done = false;
+        this.next = null;
+        this.initialized = false;
+        this.unmappedRemaining = unmappedReadCount;
+    }
+
+    /**
+     * create the fake iterator, given the mapping of chromosomes and read counts
+     *
+     * @param startingChr the starting chromosome
+     * @param endingChr   the ending chromosome
+     * @param readCount   the number of reads in each chromosome
+     * @param header      the associated header
+     */
+    ArtificialSAMIterator( int startingChr, int endingChr, int readCount, int unmappedReadCount, SAMFileHeader header ) {
+        sChr = startingChr;
+        eChromosomeCount = (endingChr - startingChr) + 1;
+        rCount = readCount;
+        this.header = header;
+        this.currentChromo = 0;
+        this.unmappedReadCount = unmappedReadCount;
+        reset();
+    }
+
+    public void close() {
+        open = false;
+    }
+
+    public boolean hasNext() {
+        open = true;
+
+        if (!initialized){
+            initialized = true;
+            createNextRead();
+        }
+        if (this.next != null) {
+            return true;
+        }
+        return false;
+    }
+
+    protected boolean createNextRead() {
+        if (currentRead > rCount) {
+            currentChromo++;
+            currentRead = 1;
+        }
+        // check for end condition, have we finished the chromosome listing, and have no unmapped reads
+        if (currentChromo >= eChromosomeCount) {
+            if (unmappedRemaining < 1) {
+                this.next = null;
+                return false;
+            } else {
+                ++totalReadCount;
+                this.next = ArtificialSAMUtils.createArtificialRead(this.header,
+                        String.valueOf(totalReadCount),
+                        SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX,
+                        SAMRecord.NO_ALIGNMENT_START,
+                        50);
+                --unmappedRemaining;
+                return true;
+            }
+        }
+        ++totalReadCount;
+        this.next = ArtificialSAMUtils.createArtificialRead(this.header, String.valueOf(totalReadCount), currentChromo, currentRead, 50);
+        ++currentRead;
+        return true;
+    }
+
+
+    public SAMRecord next() {
+        open = true;        
+
+        SAMRecord ret = next;
+        createNextRead();
+        return ret;
+    }
+
+    public void remove() {
+        throw new UnsupportedOperationException("You've tried to remove on a GATKSAMIterator (unsupported), not to mention that this is a fake iterator.");
+    }
+
+    /**
+     * return this iterator, for the iterable interface
+     * @return
+     */
+    public Iterator<SAMRecord> iterator() {
+        return this;
+    }
+
+    /**
+     * some instrumentation methods
+     */
+    public int readsTaken() {
+        return totalReadCount;
+    }
+
+    /**
+     * peek at the next sam record
+     *
+     * @return
+     */
+    public SAMRecord peek() {
+        return this.next;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMQueryIterator.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMQueryIterator.java
new file mode 100644
index 0000000..e4f8b79
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMQueryIterator.java
@@ -0,0 +1,259 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.SAMSequenceRecord;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.util.List;
+
+
+/*
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following
+ * conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+/**
+ * @author aaron
+ *
+ * allows query calls to the artificial sam iterator, which allows you
+ * to test out classes that use specific itervals.  The reads returned will
+ * all lie in order in the specified interval.
+ */
+public class ArtificialSAMQueryIterator extends ArtificialSAMIterator {
+
+    // get the next positon
+    protected int finalPos = 0;
+    protected int startPos = 0;
+    protected int contigIndex = -1;
+    protected boolean overlapping = false;
+    protected int startingChr = 0;
+    protected boolean seeked = false;
+
+    /**
+     * create the fake iterator, given the mapping of chromosomes and read counts
+     *
+     * @param startingChr the starting chromosome
+     * @param endingChr   the ending chromosome
+     * @param readCount   the number of reads in each chromosome
+     * @param header      the associated header
+     */
+    ArtificialSAMQueryIterator( int startingChr, int endingChr, int readCount, int unmappedReadCount, SAMFileHeader header ) {
+        super(startingChr, endingChr, readCount, unmappedReadCount, header);
+        this.startingChr = startingChr;
+    }
+
+    @Override
+    protected void reset() {
+        this.startPos = 0;
+        this.finalPos = 0;
+        this.contigIndex = -1;
+        // Doesn't make sense to reset the overlapping flag, because we rely on its state later on.
+        // TODO: Make this a bit more direct.
+        //overlapping = false;
+        this.startingChr = 0;
+        this.seeked = false;
+        super.reset();
+    }
+
+    /**
+     * query containing - get reads contained by the specified interval
+     *
+     * @param contig the contig index string
+     * @param start  the start position
+     * @param stop   the stop position
+     */
+    public void queryContained( String contig, int start, int stop ) {
+        this.overlapping = false;
+        initialize(contig, start, stop);
+    }
+
+    /**
+     * query containing - get reads contained by the specified interval
+     *
+     * @param contig the contig index string
+     * @param start  the start position
+     * @param stop   the stop position
+     */
+    public void queryOverlapping( String contig, int start, int stop ) {
+        this.overlapping = true;
+        initialize(contig, start, stop);
+    }
+
+    public void query( String contig, int start, int stop, boolean contained ) {
+        if (contained)
+            queryContained(contig, start, stop);
+        else
+            queryOverlapping(contig, start, stop);
+    }
+
+    public void queryUnmappedReads() {
+        initializeUnmapped();
+    }
+
+    /**
+     * initialize the iterator to an unmapped read position
+     */
+    public void initializeUnmapped() {
+        // throw away data from the previous invocation, if one exists.
+        ensureUntouched();
+        reset();
+
+        while (super.hasNext() && this.peek().getReferenceIndex() >= 0) {
+            super.next();
+        }
+        // sanity check that we have an actual matching read next
+        SAMRecord rec = this.peek();
+        if (rec == null) {
+            throw new ReviewedGATKException("The next read doesn't match");
+        }
+        // set the seeked variable to true
+        seeked = true;
+    }
+
+
+
+
+    /**
+     * initialize the query iterator
+     *
+     * @param contig the contig
+     * @param start  the start position
+     * @param stop   the stop postition
+     */
+    private void initialize( String contig, int start, int stop ) {
+        // throw away data from the previous invocation, if one exists.
+        ensureUntouched();
+        reset();
+
+        finalPos = stop;
+        startPos = start;
+        if (finalPos < 0) {
+            finalPos = Integer.MAX_VALUE;
+        }
+        // sanity check that we have the contig
+        contigIndex = -1;
+        List<SAMSequenceRecord> list = header.getSequenceDictionary().getSequences();
+        for (SAMSequenceRecord rec : list) {
+            if (rec.getSequenceName().equals(contig)) {
+                contigIndex = rec.getSequenceIndex();
+            }
+        }
+        if (contigIndex < 0) { throw new IllegalArgumentException("ArtificialContig" + contig + " doesn't exist"); }
+        while (super.hasNext() && this.peek().getReferenceIndex() < contigIndex) {
+            super.next();
+        }
+        if (!super.hasNext()) {
+            throw new ReviewedGATKException("Unable to find the target chromosome");
+        }
+        while (super.hasNext() && this.peek().getAlignmentStart() < start) {
+            super.next();
+        }
+        // sanity check that we have an actual matching read next
+        SAMRecord rec = this.peek();
+        if (!matches(rec)) {
+            throw new ReviewedGATKException("The next read doesn't match");
+        }
+        // set the seeked variable to true
+        seeked = true;
+    }
+
+    /**
+     * given a read and the query type, check if it matches our regions
+     *
+     * @param rec the read
+     *
+     * @return true if it belongs in our region
+     */
+    public boolean matches( SAMRecord rec ) {
+        if (rec.getReferenceIndex() != this.contigIndex) {
+            return false;
+        }
+        // if we have an unmapped read, matching the contig is good enough for us
+        if (rec.getReferenceIndex() < 0) {
+            return true;    
+        }
+
+        if (!overlapping) {
+            // if the start or the end are somewhere within our range
+            if (( rec.getAlignmentStart() >= startPos && rec.getAlignmentEnd() <= finalPos )) {
+                return true;
+            }
+        } else {
+            if (( rec.getAlignmentStart() <= finalPos && rec.getAlignmentStart() >= startPos ) ||
+                    ( rec.getAlignmentEnd() <= finalPos && rec.getAlignmentEnd() >= startPos )) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+
+    /**
+     * override the hasNext, to incorportate our limiting factor
+     *
+     * @return
+     */
+    public boolean hasNext() {
+        boolean res = super.hasNext();
+        if (!seeked) {
+            return res;
+        }
+        if (res && matches(this.next)) {
+            return true;
+        }
+        return false;
+    }
+
+    /** make sure we haven't been used as an iterator yet; this is to miror the MergingSamIterator2 action. */
+    public void ensureUntouched() {
+        if (open) {
+            throw new UnsupportedOperationException("We've already been used as an iterator; you can't query after that");
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMUtils.java
new file mode 100644
index 0000000..2d192d3
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMUtils.java
@@ -0,0 +1,484 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import htsjdk.samtools.*;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIterator;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.locusiterator.LocusIteratorByState;
+import org.broadinstitute.gatk.utils.pileup.PileupElement;
+import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
+import org.broadinstitute.gatk.utils.pileup.ReadBackedPileupImpl;
+
+import java.io.File;
+import java.util.*;
+
+/**
+ * @author aaron
+ * @version 1.0
+ */
+public class ArtificialSAMUtils {
+    public static final int DEFAULT_READ_LENGTH = 50;
+
+    /**
+     * create an artificial sam file
+     *
+     * @param filename            the filename to write to
+     * @param numberOfChromosomes the number of chromosomes
+     * @param startingChromosome  where to start counting
+     * @param chromosomeSize      how large each chromosome is
+     * @param readsPerChomosome   how many reads to make in each chromosome.  They'll be aligned from position 1 to x (which is the number of reads)
+     */
+    public static void createArtificialBamFile(String filename, int numberOfChromosomes, int startingChromosome, int chromosomeSize, int readsPerChomosome) {
+        SAMFileHeader header = createArtificialSamHeader(numberOfChromosomes, startingChromosome, chromosomeSize);
+        File outFile = new File(filename);
+
+        SAMFileWriter out = new SAMFileWriterFactory().makeBAMWriter(header, true, outFile);
+
+        for (int x = startingChromosome; x < startingChromosome + numberOfChromosomes; x++) {
+            for (int readNumber = 1; readNumber < readsPerChomosome; readNumber++) {
+                out.addAlignment(createArtificialRead(header, "Read_" + readNumber, x - startingChromosome, readNumber, DEFAULT_READ_LENGTH));
+            }
+        }
+
+        out.close();
+    }
+
+    /**
+     * create an artificial sam file
+     *
+     * @param filename            the filename to write to
+     * @param numberOfChromosomes the number of chromosomes
+     * @param startingChromosome  where to start counting
+     * @param chromosomeSize      how large each chromosome is
+     * @param readsPerChomosome   how many reads to make in each chromosome.  They'll be aligned from position 1 to x (which is the number of reads)
+     */
+    public static void createArtificialSamFile(String filename, int numberOfChromosomes, int startingChromosome, int chromosomeSize, int readsPerChomosome) {
+        SAMFileHeader header = createArtificialSamHeader(numberOfChromosomes, startingChromosome, chromosomeSize);
+        File outFile = new File(filename);
+
+        SAMFileWriter out = new SAMFileWriterFactory().makeSAMWriter(header, false, outFile);
+
+        for (int x = startingChromosome; x < startingChromosome + numberOfChromosomes; x++) {
+            for (int readNumber = 1; readNumber <= readsPerChomosome; readNumber++) {
+                out.addAlignment(createArtificialRead(header, "Read_" + readNumber, x - startingChromosome, readNumber, 100));
+            }
+        }
+
+        out.close();
+    }
+
+    /**
+     * Creates an artificial sam header, matching the parameters, chromosomes which will be labeled chr1, chr2, etc
+     *
+     * @param numberOfChromosomes the number of chromosomes to create
+     * @param startingChromosome  the starting number for the chromosome (most likely set to 1)
+     * @param chromosomeSize      the length of each chromosome
+     * @return
+     */
+    public static SAMFileHeader createArtificialSamHeader(int numberOfChromosomes, int startingChromosome, int chromosomeSize) {
+        SAMFileHeader header = new SAMFileHeader();
+        header.setSortOrder(htsjdk.samtools.SAMFileHeader.SortOrder.coordinate);
+        SAMSequenceDictionary dict = new SAMSequenceDictionary();
+        // make up some sequence records
+        for (int x = startingChromosome; x < startingChromosome + numberOfChromosomes; x++) {
+            SAMSequenceRecord rec = new SAMSequenceRecord("chr" + (x), chromosomeSize /* size */);
+            rec.setSequenceLength(chromosomeSize);
+            dict.addSequence(rec);
+        }
+        header.setSequenceDictionary(dict);
+        return header;
+    }
+
+    /**
+     * Creates an artificial sam header based on the sequence dictionary dict
+     *
+     * @return a new sam header
+     */
+    public static SAMFileHeader createArtificialSamHeader(final SAMSequenceDictionary dict) {
+        SAMFileHeader header = new SAMFileHeader();
+        header.setSortOrder(htsjdk.samtools.SAMFileHeader.SortOrder.coordinate);
+        header.setSequenceDictionary(dict);
+        return header;
+    }
+
+    /**
+     * Creates an artificial sam header with standard test parameters
+     *
+     * @return the sam header
+     */
+    public static SAMFileHeader createArtificialSamHeader() {
+        return createArtificialSamHeader(1, 1, 1000000);
+    }
+
+    /**
+     * setup a default read group for a SAMFileHeader
+     *
+     * @param header      the header to set
+     * @param readGroupID the read group ID tag
+     * @param sampleName  the sample name
+     * @return the adjusted SAMFileHeader
+     */
+    public static SAMFileHeader createDefaultReadGroup(SAMFileHeader header, String readGroupID, String sampleName) {
+        SAMReadGroupRecord rec = new SAMReadGroupRecord(readGroupID);
+        rec.setSample(sampleName);
+        List<SAMReadGroupRecord> readGroups = new ArrayList<SAMReadGroupRecord>();
+        readGroups.add(rec);
+        header.setReadGroups(readGroups);
+        return header;
+    }
+
+    /**
+     * setup read groups for the specified read groups and sample names
+     *
+     * @param header       the header to set
+     * @param readGroupIDs the read group ID tags
+     * @param sampleNames  the sample names
+     * @return the adjusted SAMFileHeader
+     */
+    public static SAMFileHeader createEnumeratedReadGroups(SAMFileHeader header, List<String> readGroupIDs, List<String> sampleNames) {
+        if (readGroupIDs.size() != sampleNames.size()) {
+            throw new ReviewedGATKException("read group count and sample name count must be the same");
+        }
+
+        List<SAMReadGroupRecord> readGroups = new ArrayList<SAMReadGroupRecord>();
+
+        int x = 0;
+        for (; x < readGroupIDs.size(); x++) {
+            SAMReadGroupRecord rec = new SAMReadGroupRecord(readGroupIDs.get(x));
+            rec.setSample(sampleNames.get(x));
+            readGroups.add(rec);
+        }
+        header.setReadGroups(readGroups);
+        return header;
+    }
+
+
+    /**
+     * Create an artificial read based on the parameters.  The cigar string will be *M, where * is the length of the read
+     *
+     * @param header         the SAM header to associate the read with
+     * @param name           the name of the read
+     * @param refIndex       the reference index, i.e. what chromosome to associate it with
+     * @param alignmentStart where to start the alignment
+     * @param length         the length of the read
+     * @return the artificial read
+     */
+    public static GATKSAMRecord createArtificialRead(SAMFileHeader header, String name, int refIndex, int alignmentStart, int length) {
+        if ((refIndex == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX && alignmentStart != SAMRecord.NO_ALIGNMENT_START) ||
+                (refIndex != SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX && alignmentStart == SAMRecord.NO_ALIGNMENT_START))
+            throw new ReviewedGATKException("Invalid alignment start for artificial read, start = " + alignmentStart);
+        GATKSAMRecord record = new GATKSAMRecord(header);
+        record.setReadName(name);
+        record.setReferenceIndex(refIndex);
+        record.setAlignmentStart(alignmentStart);
+        List<CigarElement> elements = new ArrayList<CigarElement>();
+        elements.add(new CigarElement(length, CigarOperator.characterToEnum('M')));
+        record.setCigar(new Cigar(elements));
+        record.setProperPairFlag(false);
+
+        // our reads and quals are all 'A's by default
+        byte[] c = new byte[length];
+        byte[] q = new byte[length];
+        for (int x = 0; x < length; x++)
+            c[x] = q[x] = 'A';
+        record.setReadBases(c);
+        record.setBaseQualities(q);
+
+        if (refIndex == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX) {
+            record.setReadUnmappedFlag(true);
+        }
+
+        return record;
+    }
+
+    /**
+     * Create an artificial read based on the parameters.  The cigar string will be *M, where * is the length of the read
+     *
+     * @param header         the SAM header to associate the read with
+     * @param name           the name of the read
+     * @param refIndex       the reference index, i.e. what chromosome to associate it with
+     * @param alignmentStart where to start the alignment
+     * @param bases          the sequence of the read
+     * @param qual           the qualities of the read
+     * @return the artificial read
+     */
+    public static GATKSAMRecord createArtificialRead(SAMFileHeader header, String name, int refIndex, int alignmentStart, byte[] bases, byte[] qual) {
+        if (bases.length != qual.length) {
+            throw new ReviewedGATKException("Passed in read string is different length then the quality array");
+        }
+        GATKSAMRecord rec = createArtificialRead(header, name, refIndex, alignmentStart, bases.length);
+        rec.setReadBases(bases);
+        rec.setBaseQualities(qual);
+        rec.setReadGroup(new GATKSAMReadGroupRecord("x"));
+        if (refIndex == -1) {
+            rec.setReadUnmappedFlag(true);
+        }
+
+        return rec;
+    }
+
+    /**
+     * Create an artificial read based on the parameters
+     *
+     * @param header         the SAM header to associate the read with
+     * @param name           the name of the read
+     * @param refIndex       the reference index, i.e. what chromosome to associate it with
+     * @param alignmentStart where to start the alignment
+     * @param bases          the sequence of the read
+     * @param qual           the qualities of the read
+     * @param cigar          the cigar string of the read
+     * @return the artificial read
+     */
+    public static GATKSAMRecord createArtificialRead(SAMFileHeader header, String name, int refIndex, int alignmentStart, byte[] bases, byte[] qual, String cigar) {
+        GATKSAMRecord rec = createArtificialRead(header, name, refIndex, alignmentStart, bases, qual);
+        rec.setCigarString(cigar);
+        return rec;
+    }
+
+    /**
+     * Create an artificial read with the following default parameters :
+     * header:
+     * numberOfChromosomes = 1
+     * startingChromosome = 1
+     * chromosomeSize = 1000000
+     * read:
+     * name = "default_read"
+     * refIndex = 0
+     * alignmentStart = 1
+     *
+     * @param bases the sequence of the read
+     * @param qual  the qualities of the read
+     * @param cigar the cigar string of the read
+     * @return the artificial read
+     */
+    public static GATKSAMRecord createArtificialRead(byte[] bases, byte[] qual, String cigar) {
+        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader();
+        return ArtificialSAMUtils.createArtificialRead(header, "default_read", 0, 10000, bases, qual, cigar);
+    }
+
+    public static GATKSAMRecord createArtificialRead(Cigar cigar) {
+        int length = cigar.getReadLength();
+        byte [] base = {'A'};
+        byte [] qual = {30};
+        byte [] bases = Utils.arrayFromArrayWithLength(base, length);
+        byte [] quals = Utils.arrayFromArrayWithLength(qual, length);
+        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader();
+        return ArtificialSAMUtils.createArtificialRead(header, "default_read", 0, 10000, bases, quals, cigar.toString());
+    }
+
+    
+    public final static List<GATKSAMRecord> createPair(SAMFileHeader header, String name, int readLen, int leftStart, int rightStart, boolean leftIsFirst, boolean leftIsNegative) {
+        GATKSAMRecord left = ArtificialSAMUtils.createArtificialRead(header, name, 0, leftStart, readLen);
+        GATKSAMRecord right = ArtificialSAMUtils.createArtificialRead(header, name, 0, rightStart, readLen);
+
+        left.setReadPairedFlag(true);
+        right.setReadPairedFlag(true);
+
+        left.setProperPairFlag(true);
+        right.setProperPairFlag(true);
+
+        left.setFirstOfPairFlag(leftIsFirst);
+        right.setFirstOfPairFlag(!leftIsFirst);
+
+        left.setReadNegativeStrandFlag(leftIsNegative);
+        left.setMateNegativeStrandFlag(!leftIsNegative);
+        right.setReadNegativeStrandFlag(!leftIsNegative);
+        right.setMateNegativeStrandFlag(leftIsNegative);
+
+        left.setMateAlignmentStart(right.getAlignmentStart());
+        right.setMateAlignmentStart(left.getAlignmentStart());
+
+        left.setMateReferenceIndex(0);
+        right.setMateReferenceIndex(0);
+
+        int isize = rightStart + readLen - leftStart;
+        left.setInferredInsertSize(isize);
+        right.setInferredInsertSize(-isize);
+
+        return Arrays.asList(left, right);
+    }
+
+    /**
+     * Create a collection of identical artificial reads based on the parameters.  The cigar string for each
+     * read will be *M, where * is the length of the read.
+     *
+     * Useful for testing things like positional downsampling where you care only about the position and
+     * number of reads, and not the other attributes.
+     *
+     * @param stackSize      number of identical reads to create
+     * @param header         the SAM header to associate each read with
+     * @param name           name associated with each read
+     * @param refIndex       the reference index, i.e. what chromosome to associate them with
+     * @param alignmentStart where to start each alignment
+     * @param length         the length of each read
+     *
+     * @return a collection of stackSize reads all sharing the above properties
+     */
+    public static Collection<GATKSAMRecord> createStackOfIdenticalArtificialReads( int stackSize, SAMFileHeader header, String name, int refIndex, int alignmentStart, int length ) {
+        Collection<GATKSAMRecord> stack = new ArrayList<GATKSAMRecord>(stackSize);
+        for ( int i = 1; i <= stackSize; i++ ) {
+            stack.add(createArtificialRead(header, name, refIndex, alignmentStart, length));
+        }
+        return stack;
+    }
+
+    /**
+     * create an iterator containing the specified read piles
+     *
+     * @param startingChr the chromosome (reference ID) to start from
+     * @param endingChr   the id to end with
+     * @param readCount   the number of reads per chromosome
+     * @return GATKSAMIterator representing the specified amount of fake data
+     */
+    public static GATKSAMIterator mappedReadIterator(int startingChr, int endingChr, int readCount) {
+        SAMFileHeader header = createArtificialSamHeader((endingChr - startingChr) + 1, startingChr, readCount + DEFAULT_READ_LENGTH);
+
+        return new ArtificialSAMQueryIterator(startingChr, endingChr, readCount, 0, header);
+    }
+
+    /**
+     * create an iterator containing the specified read piles
+     *
+     * @param startingChr       the chromosome (reference ID) to start from
+     * @param endingChr         the id to end with
+     * @param readCount         the number of reads per chromosome
+     * @param unmappedReadCount the count of unmapped reads to place at the end of the iterator, like in a sorted bam file
+     * @return GATKSAMIterator representing the specified amount of fake data
+     */
+    public static GATKSAMIterator mappedAndUnmappedReadIterator(int startingChr, int endingChr, int readCount, int unmappedReadCount) {
+        SAMFileHeader header = createArtificialSamHeader((endingChr - startingChr) + 1, startingChr, readCount + DEFAULT_READ_LENGTH);
+
+        return new ArtificialSAMQueryIterator(startingChr, endingChr, readCount, unmappedReadCount, header);
+    }
+
+    /**
+     * create an ArtificialSAMQueryIterator containing the specified read piles
+     *
+     * @param startingChr the chromosome (reference ID) to start from
+     * @param endingChr   the id to end with
+     * @param readCount   the number of reads per chromosome
+     * @return GATKSAMIterator representing the specified amount of fake data
+     */
+    public static ArtificialSAMQueryIterator queryReadIterator(int startingChr, int endingChr, int readCount) {
+        SAMFileHeader header = createArtificialSamHeader((endingChr - startingChr) + 1, startingChr, readCount + DEFAULT_READ_LENGTH);
+
+        return new ArtificialSAMQueryIterator(startingChr, endingChr, readCount, 0, header);
+    }
+
+    /**
+     * create an ArtificialSAMQueryIterator containing the specified read piles
+     *
+     * @param startingChr       the chromosome (reference ID) to start from
+     * @param endingChr         the id to end with
+     * @param readCount         the number of reads per chromosome
+     * @param unmappedReadCount the count of unmapped reads to place at the end of the iterator, like in a sorted bam file
+     * @return GATKSAMIterator representing the specified amount of fake data
+     */
+    public static GATKSAMIterator queryReadIterator(int startingChr, int endingChr, int readCount, int unmappedReadCount) {
+        SAMFileHeader header = createArtificialSamHeader((endingChr - startingChr) + 1, startingChr, readCount + DEFAULT_READ_LENGTH);
+
+        return new ArtificialSAMQueryIterator(startingChr, endingChr, readCount, unmappedReadCount, header);
+    }
+
+    /**
+     * Create an iterator containing the specified reads
+     *
+     * @param reads the reads
+     * @return iterator for the reads
+     */
+    public static GATKSAMIterator createReadIterator(SAMRecord... reads) {
+        return createReadIterator(Arrays.asList(reads));
+    }
+
+    /**
+     * Create an iterator containing the specified reads
+     *
+     * @param reads the reads
+     * @return iterator for the reads
+     */
+    public static GATKSAMIterator createReadIterator(List<SAMRecord> reads) {
+        final Iterator<SAMRecord> iter = reads.iterator();
+        return new GATKSAMIterator() {
+            @Override public void close() {}
+            @Override public Iterator<SAMRecord> iterator() { return iter; }
+            @Override public boolean hasNext() { return iter.hasNext(); }
+            @Override public SAMRecord next() { return iter.next(); }
+            @Override public void remove() { iter.remove(); }
+        };
+    }
+
+    private final static int ranIntInclusive(Random ran, int start, int stop) {
+        final int range = stop - start;
+        return ran.nextInt(range) + start;
+    }
+
+    /**
+     * Creates a read backed pileup containing up to pileupSize reads at refID 0 from header at loc with
+     * reads created that have readLen bases.  Pairs are sampled from a gaussian distribution with mean insert
+     * size of insertSize and variation of insertSize / 10.  The first read will be in the pileup, and the second
+     * may be, depending on where this sampled insertSize puts it.
+     *
+     * @param header
+     * @param loc
+     * @param readLen
+     * @param insertSize
+     * @param pileupSize
+     * @return
+     */
+    public static ReadBackedPileup createReadBackedPileup(final SAMFileHeader header, final GenomeLoc loc, final int readLen, final int insertSize, final int pileupSize) {
+        final Random ran = new Random();
+        final boolean leftIsFirst = true;
+        final boolean leftIsNegative = false;
+        final int insertSizeVariation = insertSize / 10;
+        final int pos = loc.getStart();
+
+        final List<PileupElement> pileupElements = new ArrayList<PileupElement>();
+        for (int i = 0; i < pileupSize / 2; i++) {
+            final String readName = "read" + i;
+            final int leftStart = ranIntInclusive(ran, 1, pos);
+            final int fragmentSize = (int) (ran.nextGaussian() * insertSizeVariation + insertSize);
+            final int rightStart = leftStart + fragmentSize - readLen;
+
+            if (rightStart <= 0) continue;
+
+            List<GATKSAMRecord> pair = createPair(header, readName, readLen, leftStart, rightStart, leftIsFirst, leftIsNegative);
+            final GATKSAMRecord left = pair.get(0);
+            final GATKSAMRecord right = pair.get(1);
+
+            pileupElements.add(LocusIteratorByState.createPileupForReadAndOffset(left, pos - leftStart));
+
+            if (pos >= right.getAlignmentStart() && pos <= right.getAlignmentEnd()) {
+                pileupElements.add(LocusIteratorByState.createPileupForReadAndOffset(right, pos - rightStart));
+            }
+        }
+
+        Collections.sort(pileupElements);
+        return new ReadBackedPileupImpl(loc, pileupElements);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSingleSampleReadStream.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSingleSampleReadStream.java
new file mode 100644
index 0000000..d3eb73d
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSingleSampleReadStream.java
@@ -0,0 +1,213 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIterator;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIteratorAdapter;
+import org.broadinstitute.gatk.utils.MathUtils;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Iterator;
+
+/**
+ * An artificial stream of reads from a single read group/sample with configurable characteristics
+ * such as:
+ *
+ * -the number of contigs that the reads should be distributed across
+ * -number of "stacks" of reads sharing the same alignment start position per contig
+ * -the min/max number of reads in each stack (exact values chosen randomly from this range)
+ * -the min/max distance between stack start positions (exact values chosen randomly from this range)
+ * -the min/max length of each read (exact values chosen randomly from this range)
+ * -the number of unmapped reads
+ *
+ * The cigar string for all reads will be *M, where * is the length of the read.
+ *
+ * @author David Roazen
+ */
+public class ArtificialSingleSampleReadStream implements Iterable<SAMRecord> {
+    private SAMFileHeader header;
+    private String readGroupID;
+    private int numContigs;
+    private int numStacksPerContig;
+    private int minReadsPerStack;
+    private int maxReadsPerStack;
+    private int minDistanceBetweenStacks;
+    private int maxDistanceBetweenStacks;
+    private int minReadLength;
+    private int maxReadLength;
+    private int numUnmappedReads;
+
+    private static final String READ_GROUP_TAG = "RG";
+
+    public ArtificialSingleSampleReadStream( SAMFileHeader header,
+                                             String readGroupID,
+                                             int numContigs,
+                                             int numStacksPerContig,
+                                             int minReadsPerStack,
+                                             int maxReadsPerStack,
+                                             int minDistanceBetweenStacks,
+                                             int maxDistanceBetweenStacks,
+                                             int minReadLength,
+                                             int maxReadLength,
+                                             int numUnmappedReads ) {
+        this.header = header;
+        this.readGroupID = readGroupID;
+        this.numContigs = numContigs;
+        this.numStacksPerContig = numStacksPerContig;
+        this.minReadsPerStack = minReadsPerStack;
+        this.maxReadsPerStack = maxReadsPerStack;
+        this.minDistanceBetweenStacks = minDistanceBetweenStacks;
+        this.maxDistanceBetweenStacks = maxDistanceBetweenStacks;
+        this.minReadLength = minReadLength;
+        this.maxReadLength = maxReadLength;
+        this.numUnmappedReads = numUnmappedReads;
+
+        validateStreamParameters();
+    }
+
+    private void validateStreamParameters() {
+        if ( header == null || readGroupID == null ) {
+            throw new ReviewedGATKException("null SAMFileHeader or read group ID") ;
+        }
+
+        if ( header.getReadGroup(readGroupID) == null ) {
+            throw new ReviewedGATKException(String.format("Read group %s not found in SAMFileHeader", readGroupID));
+        }
+
+        if ( numContigs < 0 || numStacksPerContig < 0 || minReadsPerStack < 0 || maxReadsPerStack < 0 ||
+             minDistanceBetweenStacks < 0 || maxDistanceBetweenStacks < 0 || minReadLength < 0 || maxReadLength < 0 ||
+             numUnmappedReads < 0 ) {
+            throw new ReviewedGATKException("Read stream parameters must be >= 0");
+        }
+
+        if ( (numContigs == 0 && numStacksPerContig != 0) || (numContigs != 0 && numStacksPerContig == 0) ) {
+            throw new ReviewedGATKException("numContigs and numStacksPerContig must either both be > 0, or both be 0");
+        }
+
+        if ( minReadsPerStack > maxReadsPerStack ) {
+            throw new ReviewedGATKException("minReadsPerStack > maxReadsPerStack");
+        }
+
+        if ( minDistanceBetweenStacks > maxDistanceBetweenStacks ) {
+            throw new ReviewedGATKException("minDistanceBetweenStacks > maxDistanceBetweenStacks");
+        }
+
+        if ( minReadLength > maxReadLength ) {
+            throw new ReviewedGATKException("minReadLength > maxReadLength");
+        }
+    }
+
+    public Iterator<SAMRecord> iterator() {
+        return makeReads().iterator();
+    }
+
+    public GATKSAMIterator getGATKSAMIterator() {
+        return GATKSAMIteratorAdapter.adapt(iterator());
+    }
+
+    public Collection<SAMRecord> makeReads() {
+        Collection<SAMRecord> reads = new ArrayList<SAMRecord>(numContigs * numStacksPerContig * maxReadsPerStack);
+
+        for ( int contig = 0; contig < numContigs; contig++ ) {
+            int alignmentStart = 1;
+
+            for ( int stack = 0; stack < numStacksPerContig; stack++ ) {
+                reads.addAll(makeReadStack(contig, alignmentStart, MathUtils.randomIntegerInRange(minReadsPerStack, maxReadsPerStack)));
+                alignmentStart += MathUtils.randomIntegerInRange(minDistanceBetweenStacks, maxDistanceBetweenStacks);
+            }
+        }
+
+        if ( numUnmappedReads > 0 ) {
+            reads.addAll(makeReadStack(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX, SAMRecord.NO_ALIGNMENT_START, numUnmappedReads));
+        }
+
+        return reads;
+    }
+
+    private Collection<SAMRecord> makeReadStack( int contig, int alignmentStart, int stackSize ) {
+        Collection<SAMRecord> readStack = new ArrayList<SAMRecord>(stackSize);
+
+        for ( int i = 0; i < stackSize; i++ ) {
+            SAMRecord read = ArtificialSAMUtils.createArtificialRead(header,
+                                                                     "foo",
+                                                                     contig,
+                                                                     alignmentStart,
+                                                                     MathUtils.randomIntegerInRange(minReadLength, maxReadLength));
+            read.setAttribute(READ_GROUP_TAG, readGroupID);
+            readStack.add(read);
+        }
+
+        return readStack;
+    }
+
+    public SAMFileHeader getHeader() {
+        return header;
+    }
+
+    public String getReadGroupID() {
+        return readGroupID;
+    }
+
+    public int getNumContigs() {
+        return numContigs;
+    }
+
+    public int getNumStacksPerContig() {
+        return numStacksPerContig;
+    }
+
+    public int getMinReadsPerStack() {
+        return minReadsPerStack;
+    }
+
+    public int getMaxReadsPerStack() {
+        return maxReadsPerStack;
+    }
+
+    public int getMinDistanceBetweenStacks() {
+        return minDistanceBetweenStacks;
+    }
+
+    public int getMaxDistanceBetweenStacks() {
+        return maxDistanceBetweenStacks;
+    }
+
+    public int getMinReadLength() {
+        return minReadLength;
+    }
+
+    public int getMaxReadLength() {
+        return maxReadLength;
+    }
+
+    public int getNumUnmappedReads() {
+        return numUnmappedReads;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSingleSampleReadStreamAnalyzer.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSingleSampleReadStreamAnalyzer.java
new file mode 100644
index 0000000..3c7b2af
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ArtificialSingleSampleReadStreamAnalyzer.java
@@ -0,0 +1,282 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * A class for analyzing and validating the read stream produced by an ArtificialSingleSampleReadStream.
+ *
+ * Collects various statistics about the stream of reads it's fed, and validates the stream
+ * by checking whether the collected statistics match the nominal properties of the stream.
+ *
+ * Subclasses are expected to override the validate() method in order to check whether an artificial
+ * read stream has been *transformed* in some way (eg., by downsampling or some other process), rather
+ * than merely checking whether the stream matches its original properties.
+ *
+ * Usage is simple:
+ *
+ * ArtificialSingleSampleReadStreamAnalyzer analyzer = new ArtificialSingleSampleReadStreamAnalyzer(originalStream);
+ * analyzer.analyze(originalOrTransformedStream);
+ * analyzer.validate();  // override this method if you want to check whether the stream has been transformed
+ *                       // in a certain way relative to the original stream
+ *
+ * @author David Roazen
+ */
+public class ArtificialSingleSampleReadStreamAnalyzer {
+    protected ArtificialSingleSampleReadStream originalStream;
+    protected SAMRecord lastRead;
+    protected int totalReads;
+    protected boolean allSamplesMatch;
+    protected int numContigs;
+    protected List<Integer> stacksPerContig;
+    protected Integer minReadsPerStack;
+    protected Integer maxReadsPerStack;
+    protected Integer minDistanceBetweenStacks;
+    protected Integer maxDistanceBetweenStacks;
+    protected Integer minReadLength;
+    protected Integer maxReadLength;
+    protected int numUnmappedReads;
+
+    protected int currentContigNumStacks;
+    protected int currentStackNumReads;
+
+    /**
+     * Construct a new read stream analyzer, providing an ArtificialSingleSampleReadStream that will
+     * serve as the basis for comparison after the analysis is complete.
+     *
+     * @param originalStream the original ArtificialSingleSampleReadStream upon which the stream
+     *                       that will be fed to the analyzer is based
+     */
+    public ArtificialSingleSampleReadStreamAnalyzer( ArtificialSingleSampleReadStream originalStream ) {
+        this.originalStream = originalStream;
+        reset();
+    }
+
+    /**
+     * Reset all read stream statistics collected by this analyzer to prepare for a fresh run
+     */
+    public void reset() {
+        lastRead = null;
+        totalReads = 0;
+        allSamplesMatch = true;
+        numContigs = 0;
+        stacksPerContig = new ArrayList<Integer>();
+        minReadsPerStack = null;
+        maxReadsPerStack = null;
+        minDistanceBetweenStacks = null;
+        maxDistanceBetweenStacks = null;
+        minReadLength = null;
+        maxReadLength = null;
+        numUnmappedReads = 0;
+        currentContigNumStacks = 0;
+        currentStackNumReads = 0;
+    }
+
+    /**
+     * Collect statistics on the stream of reads passed in
+     *
+     * @param stream the stream of reads to analyze
+     */
+    public void analyze( Iterable<SAMRecord> stream ) {
+        for ( SAMRecord read : stream ) {
+            update(read);
+        }
+        finalizeStats();
+    }
+
+    /**
+     * Validate the stream by checking whether our collected statistics match the properties of the
+     * original stream. Throws a ReviewedGATKException if the stream is invalid.
+     *
+     * Override this method if you want to check whether the stream has been transformed in some
+     * way relative to the original stream.
+     */
+    public void validate() {
+        if ( (originalStream.getNumContigs() == 0 || originalStream.getNumStacksPerContig() == 0) && originalStream.getNumUnmappedReads() == 0 ) {
+            if ( totalReads != 0 ) {
+                throw new ReviewedGATKException("got reads from the stream, but the stream was configured to have 0 reads");
+            }
+            return;  // no further validation needed for the 0-reads case
+        }
+        else if ( totalReads == 0 ) {
+            throw new ReviewedGATKException("got no reads from the stream, but the stream was configured to have > 0 reads");
+        }
+
+        if ( ! allSamplesMatch ) {
+            throw new ReviewedGATKException("some reads had the wrong sample");
+        }
+
+        if ( numContigs != originalStream.getNumContigs() ) {
+            throw new ReviewedGATKException("number of contigs not correct");
+        }
+
+        if ( stacksPerContig.size() != originalStream.getNumContigs() ) {
+            throw new ReviewedGATKException(String.format("bug in analyzer code: calculated sizes for %d contigs even though there were only %d contigs",
+                                                           stacksPerContig.size(), originalStream.getNumContigs()));
+        }
+
+        for ( int contigStackCount : stacksPerContig ) {
+            if ( contigStackCount != originalStream.getNumStacksPerContig() ) {
+                throw new ReviewedGATKException("contig had incorrect number of stacks");
+            }
+        }
+
+        if ( originalStream.getNumStacksPerContig() > 0 ) {
+            if ( minReadsPerStack < originalStream.getMinReadsPerStack() ) {
+                throw new ReviewedGATKException("stack had fewer than the minimum number of reads");
+            }
+            if ( maxReadsPerStack > originalStream.getMaxReadsPerStack() ) {
+                throw new ReviewedGATKException("stack had more than the maximum number of reads");
+            }
+        }
+        else if ( minReadsPerStack != null || maxReadsPerStack != null ) {
+            throw new ReviewedGATKException("bug in analyzer code: reads per stack was calculated even though 0 stacks per contig was specified");
+        }
+
+        if ( originalStream.getNumStacksPerContig() > 1 ) {
+            if ( minDistanceBetweenStacks < originalStream.getMinDistanceBetweenStacks() ) {
+                throw new ReviewedGATKException("stacks were separated by less than the minimum distance");
+            }
+            if ( maxDistanceBetweenStacks > originalStream.getMaxDistanceBetweenStacks() ) {
+                throw new ReviewedGATKException("stacks were separated by more than the maximum distance");
+            }
+        }
+        else if ( minDistanceBetweenStacks != null || maxDistanceBetweenStacks != null ) {
+            throw new ReviewedGATKException("bug in analyzer code: distance between stacks was calculated even though numStacksPerContig was <= 1");
+        }
+
+        if ( minReadLength < originalStream.getMinReadLength() ) {
+            throw new ReviewedGATKException("read was shorter than the minimum allowed length");
+        }
+        if ( maxReadLength > originalStream.getMaxReadLength() ) {
+            throw new ReviewedGATKException("read was longer than the maximum allowed length");
+        }
+
+        if ( numUnmappedReads != originalStream.getNumUnmappedReads() ) {
+            throw new ReviewedGATKException(String.format("wrong number of unmapped reads: requested %d but saw %d",
+                                                           originalStream.getNumUnmappedReads(), numUnmappedReads));
+        }
+
+        if ( (originalStream.getNumContigs() == 0 || originalStream.getNumStacksPerContig() == 0) &&
+             numUnmappedReads != totalReads ) {
+            throw new ReviewedGATKException("stream should have consisted only of unmapped reads, but saw some mapped reads");
+        }
+    }
+
+    public void update( SAMRecord read ) {
+        if ( read.getReadUnmappedFlag() ) {
+            numUnmappedReads++;
+
+            if ( numUnmappedReads == 1 && lastRead != null ) {
+                processContigChange();
+                numContigs--;
+            }
+        }
+        else if ( lastRead == null ) {
+            numContigs = 1;
+            currentContigNumStacks = 1;
+            currentStackNumReads = 1;
+        }
+        else if ( ! read.getReferenceIndex().equals(lastRead.getReferenceIndex()) ) {
+            processContigChange();
+        }
+        else if ( read.getAlignmentStart() != lastRead.getAlignmentStart() ) {
+            processStackChangeWithinContig(read);
+        }
+        else {
+            currentStackNumReads++;
+        }
+
+        updateReadLength(read.getReadLength());
+        allSamplesMatch = allSamplesMatch && readHasCorrectSample(read);
+        totalReads++;
+
+        lastRead = read;
+    }
+
+
+    private void processContigChange() {
+        numContigs++;
+
+        stacksPerContig.add(currentContigNumStacks);
+        currentContigNumStacks = 1;
+
+        updateReadsPerStack(currentStackNumReads);
+        currentStackNumReads = 1;
+    }
+
+    private void processStackChangeWithinContig( SAMRecord read ) {
+        currentContigNumStacks++;
+
+        updateReadsPerStack(currentStackNumReads);
+        currentStackNumReads = 1;
+
+        updateDistanceBetweenStacks(read.getAlignmentStart() - lastRead.getAlignmentStart());
+    }
+
+    private void updateReadsPerStack( int stackReadCount ) {
+        if ( minReadsPerStack == null || stackReadCount < minReadsPerStack ) {
+            minReadsPerStack = stackReadCount;
+        }
+        if ( maxReadsPerStack == null || stackReadCount > maxReadsPerStack ) {
+            maxReadsPerStack = stackReadCount;
+        }
+    }
+
+    private void updateDistanceBetweenStacks( int stackDistance ) {
+        if ( minDistanceBetweenStacks == null || stackDistance < minDistanceBetweenStacks ) {
+            minDistanceBetweenStacks = stackDistance;
+        }
+        if ( maxDistanceBetweenStacks == null || stackDistance > maxDistanceBetweenStacks ) {
+            maxDistanceBetweenStacks = stackDistance;
+        }
+    }
+
+    private void updateReadLength( int readLength ) {
+        if ( minReadLength == null || readLength < minReadLength ) {
+            minReadLength = readLength;
+        }
+        if ( maxReadLength == null || readLength > maxReadLength ) {
+            maxReadLength = readLength;
+        }
+    }
+
+    private boolean readHasCorrectSample( SAMRecord read ) {
+        return originalStream.getReadGroupID().equals(read.getAttribute("RG"));
+    }
+
+    public void finalizeStats() {
+        if ( lastRead != null && ! lastRead.getReadUnmappedFlag() ) {
+            stacksPerContig.add(currentContigNumStacks);
+            updateReadsPerStack(currentStackNumReads);
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/CigarUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/CigarUtils.java
new file mode 100644
index 0000000..3017b56
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/CigarUtils.java
@@ -0,0 +1,273 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import com.google.java.contract.Ensures;
+import htsjdk.samtools.Cigar;
+import htsjdk.samtools.CigarElement;
+import htsjdk.samtools.CigarOperator;
+import htsjdk.samtools.TextCigarCodec;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.smithwaterman.Parameters;
+import org.broadinstitute.gatk.utils.smithwaterman.SWPairwiseAlignment;
+import org.broadinstitute.gatk.utils.smithwaterman.SmithWaterman;
+
+import java.util.Arrays;
+import java.util.Stack;
+
+/**
+ * Created with IntelliJ IDEA.
+ * User: ami
+ * Date: 11/26/13
+ * Time: 11:33 AM
+ * To change this template use File | Settings | File Templates.
+ */
+public class CigarUtils {
+
+    /**
+     * Combines equal adjacent elements of a Cigar object
+     *
+     * @param rawCigar the cigar object
+     * @return a combined cigar object
+     */
+    public static Cigar combineAdjacentCigarElements(Cigar rawCigar) {
+        Cigar combinedCigar = new Cigar();
+        CigarElement lastElement = null;
+        int lastElementLength = 0;
+        for (CigarElement cigarElement : rawCigar.getCigarElements()) {
+            if (lastElement != null &&
+                    ((lastElement.getOperator() == cigarElement.getOperator()) ||
+                            (lastElement.getOperator() == CigarOperator.I && cigarElement.getOperator() == CigarOperator.D) ||
+                            (lastElement.getOperator() == CigarOperator.D && cigarElement.getOperator() == CigarOperator.I)))
+                lastElementLength += cigarElement.getLength();
+            else
+            {
+                if (lastElement != null)
+                    combinedCigar.add(new CigarElement(lastElementLength, lastElement.getOperator()));
+
+                lastElement = cigarElement;
+                lastElementLength = cigarElement.getLength();
+            }
+        }
+        if (lastElement != null)
+            combinedCigar.add(new CigarElement(lastElementLength, lastElement.getOperator()));
+
+        return combinedCigar;
+    }
+
+    public static Cigar invertCigar (Cigar cigar) {
+        Stack<CigarElement> cigarStack = new Stack<CigarElement>();
+        for (CigarElement cigarElement : cigar.getCigarElements())
+            cigarStack.push(cigarElement);
+
+        Cigar invertedCigar = new Cigar();
+        while (!cigarStack.isEmpty())
+            invertedCigar.add(cigarStack.pop());
+
+        return invertedCigar;
+    }
+
+    /**
+     * Checks whether or not the read has any cigar element that is not H or S
+     *
+     * @param read the read
+     * @return true if it has any M, I or D, false otherwise
+     */
+    public static boolean readHasNonClippedBases(GATKSAMRecord read) {
+        for (CigarElement cigarElement : read.getCigar().getCigarElements())
+            if (cigarElement.getOperator() != CigarOperator.SOFT_CLIP && cigarElement.getOperator() != CigarOperator.HARD_CLIP)
+                return true;
+        return false;
+    }
+
+    public static Cigar cigarFromString(String cigarString) {
+        return TextCigarCodec.decode(cigarString);
+    }
+
+    /**
+    * A valid cigar object obeys the following rules:
+    *  - No Hard/Soft clips in the middle of the read
+    *  - No deletions in the beginning / end of the read
+    *  - No repeated adjacent element (e.g. 1M2M -> this should be 3M)
+    *  - No consecutive I/D elements
+    **/
+    public static boolean isCigarValid(Cigar cigar) {
+        if (cigar.isValid(null, -1) == null) {                                                                          // This should take care of most invalid Cigar Strings (picard's "exhaustive" implementation)
+
+            Stack<CigarElement> cigarElementStack = new Stack<CigarElement>();                                          // Stack to invert cigar string to find ending operator
+            CigarOperator startingOp = null;
+            CigarOperator endingOp = null;
+
+            // check if it doesn't start with deletions
+            boolean readHasStarted = false;                                                                             // search the list of elements for the starting operator
+            for (CigarElement cigarElement : cigar.getCigarElements()) {
+                if (!readHasStarted) {
+                    if (cigarElement.getOperator() != CigarOperator.SOFT_CLIP && cigarElement.getOperator() != CigarOperator.HARD_CLIP) {
+                        readHasStarted = true;
+                        startingOp = cigarElement.getOperator();
+                    }
+                }
+                cigarElementStack.push(cigarElement);
+            }
+
+            while (!cigarElementStack.empty()) {
+                CigarElement cigarElement = cigarElementStack.pop();
+                if (cigarElement.getOperator() != CigarOperator.SOFT_CLIP && cigarElement.getOperator() != CigarOperator.HARD_CLIP) {
+                    endingOp = cigarElement.getOperator();
+                    break;
+                }
+            }
+
+            if (startingOp != CigarOperator.DELETION && endingOp != CigarOperator.DELETION && startingOp != CigarOperator.SKIPPED_REGION && endingOp != CigarOperator.SKIPPED_REGION)
+                return true;                                                                                          // we don't accept reads starting or ending in deletions (add any other constraint here)
+        }
+
+        return false;
+    }
+
+    public static final int countRefBasesBasedOnCigar(final GATKSAMRecord read, final int cigarStartIndex, final int cigarEndIndex){
+        int result = 0;
+        for(int i = cigarStartIndex; i<cigarEndIndex;i++){
+            final CigarElement cigarElement = read.getCigar().getCigarElement(i);
+            switch (cigarElement.getOperator()) {
+                case M:
+                case S:
+                case D:
+                case N:
+                case H:
+                    result += cigarElement.getLength();
+                    break;
+                case I:
+                    break;
+                default:
+                    throw new ReviewedGATKException("Unsupported cigar operator: " + cigarElement.getOperator());
+            }
+        }
+        return result;
+    }
+
+    // used in the bubble state machine to apply Smith-Waterman to the bubble sequence
+    // these values were chosen via optimization against the NA12878 knowledge base
+    public static final Parameters NEW_SW_PARAMETERS = new Parameters(200, -150, -260, -11);
+
+    private final static String SW_PAD = "NNNNNNNNNN";
+
+    /**
+     * Calculate the cigar elements for this path against the reference sequence
+     *
+     * @param refSeq the reference sequence that all of the bases in this path should align to
+     * @return a Cigar mapping this path to refSeq, or null if no reasonable alignment could be found
+     */
+    public static Cigar calculateCigar(final byte[] refSeq, final byte[] altSeq) {
+        if ( altSeq.length == 0 ) {
+            // horrible edge case from the unit tests, where this path has no bases
+            return new Cigar(Arrays.asList(new CigarElement(refSeq.length, CigarOperator.D)));
+        }
+
+        final Cigar nonStandard;
+
+        final String paddedRef = SW_PAD + new String(refSeq) + SW_PAD;
+        final String paddedPath = SW_PAD + new String(altSeq) + SW_PAD;
+        final SmithWaterman alignment = new SWPairwiseAlignment( paddedRef.getBytes(), paddedPath.getBytes(), NEW_SW_PARAMETERS);
+
+        if ( isSWFailure(alignment) ) {
+            return null;
+        }
+
+
+        // cut off the padding bases
+        final int baseStart = SW_PAD.length();
+        final int baseEnd = paddedPath.length() - SW_PAD.length() - 1; // -1 because it's inclusive
+        nonStandard = AlignmentUtils.trimCigarByBases(alignment.getCigar(), baseStart, baseEnd);
+
+        if ( nonStandard.getReferenceLength() != refSeq.length ) {
+            nonStandard.add(new CigarElement(refSeq.length - nonStandard.getReferenceLength(), CigarOperator.D));
+        }
+
+        // finally, return the cigar with all indels left aligned
+        return leftAlignCigarSequentially(nonStandard, refSeq, altSeq, 0, 0);
+    }
+
+    /**
+     * Make sure that the SW didn't fail in some terrible way, and throw exception if it did
+     */
+    private static boolean isSWFailure(final SmithWaterman alignment) {
+        // check that the alignment starts at the first base, which it should given the padding
+        if ( alignment.getAlignmentStart2wrt1() > 0 ) {
+            return true;
+//          throw new IllegalStateException("SW failure ref " + paddedRef + " vs. " + paddedPath + " should always start at 0, but got " + alignment.getAlignmentStart2wrt1() + " with cigar " + alignment.getCigar());
+        }
+
+        // check that we aren't getting any S operators (which would be very bad downstream)
+        for ( final CigarElement ce : alignment.getCigar().getCigarElements() ) {
+            if ( ce.getOperator() == CigarOperator.S )
+                return true;
+            // soft clips at the end of the alignment are really insertions
+//                throw new IllegalStateException("SW failure ref " + paddedRef + " vs. " + paddedPath + " should never contain S operators but got cigar " + alignment.getCigar());
+        }
+
+        return false;
+    }
+
+    /**
+     * Left align the given cigar sequentially. This is needed because AlignmentUtils doesn't accept cigars with more than one indel in them.
+     * This is a target of future work to incorporate and generalize into AlignmentUtils for use by others.
+     * @param cigar     the cigar to left align
+     * @param refSeq    the reference byte array
+     * @param readSeq   the read byte array
+     * @param refIndex  0-based alignment start position on ref
+     * @param readIndex 0-based alignment start position on read
+     * @return          the left-aligned cigar
+     */
+    @Ensures({"cigar != null", "refSeq != null", "readSeq != null", "refIndex >= 0", "readIndex >= 0"})
+    public static Cigar leftAlignCigarSequentially(final Cigar cigar, final byte[] refSeq, final byte[] readSeq, int refIndex, int readIndex) {
+        final Cigar cigarToReturn = new Cigar();
+        Cigar cigarToAlign = new Cigar();
+        for (int i = 0; i < cigar.numCigarElements(); i++) {
+            final CigarElement ce = cigar.getCigarElement(i);
+            if (ce.getOperator() == CigarOperator.D || ce.getOperator() == CigarOperator.I) {
+                cigarToAlign.add(ce);
+                final Cigar leftAligned = AlignmentUtils.leftAlignSingleIndel(cigarToAlign, refSeq, readSeq, refIndex, readIndex, false);
+                for ( final CigarElement toAdd : leftAligned.getCigarElements() ) { cigarToReturn.add(toAdd); }
+                refIndex += cigarToAlign.getReferenceLength();
+                readIndex += cigarToAlign.getReadLength();
+                cigarToAlign = new Cigar();
+            } else {
+                cigarToAlign.add(ce);
+            }
+        }
+        if( !cigarToAlign.isEmpty() ) {
+            for( final CigarElement toAdd : cigarToAlign.getCigarElements() ) {
+                cigarToReturn.add(toAdd);
+            }
+        }
+
+        final Cigar result = AlignmentUtils.consolidateCigar(cigarToReturn);
+        if( result.getReferenceLength() != cigar.getReferenceLength() )
+            throw new IllegalStateException("leftAlignCigarSequentially failed to produce a valid CIGAR.  Reference lengths differ.  Initial cigar " + cigar + " left aligned into " + result);
+        return result;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/GATKSAMFileWriter.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/GATKSAMFileWriter.java
new file mode 100644
index 0000000..e04787f
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/GATKSAMFileWriter.java
@@ -0,0 +1,56 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMFileWriter;
+
+/**
+ * A writer that will allow unsorted BAM files to be written
+ * and sorted on-the-fly.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public interface GATKSAMFileWriter extends SAMFileWriter {
+    /**
+     * Writes the given custom header to SAM file output.
+     * @param header The header to write.
+     */
+    public void writeHeader(SAMFileHeader header);
+
+    /**
+     * Set Whether the BAM file to create is actually presorted.
+     * @param presorted True if the BAM file is presorted.  False otherwise.
+     */    
+    public void setPresorted(boolean presorted);
+
+    /**
+     * Set how many records in RAM the BAM file stores when sorting on-the-fly.
+     * @param maxRecordsInRam Max number of records in RAM.
+     */
+    public void setMaxRecordsInRam(int maxRecordsInRam);
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/GATKSAMReadGroupRecord.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/GATKSAMReadGroupRecord.java
new file mode 100644
index 0000000..6d85d7d
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/GATKSAMReadGroupRecord.java
@@ -0,0 +1,116 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import htsjdk.samtools.SAMReadGroupRecord;
+import org.broadinstitute.gatk.utils.NGSPlatform;
+
+/**
+ * @author ebanks
+ * GATKSAMReadGroupRecord
+ *
+ * this class extends the samtools SAMReadGroupRecord class and caches important
+ * (and oft-accessed) data that's not already cached by the SAMReadGroupRecord class
+ *
+ */
+public class GATKSAMReadGroupRecord extends SAMReadGroupRecord {
+    // the SAMReadGroupRecord data we're caching
+    private String mSample = null;
+    private String mPlatform = null;
+    private NGSPlatform mNGSPlatform = null;
+
+    // because some values can be null, we don't want to duplicate effort
+    private boolean retrievedSample = false;
+    private boolean retrievedPlatform = false;
+    private boolean retrievedNGSPlatform = false;
+
+    public GATKSAMReadGroupRecord(final String id) {
+        super(id);
+    }
+
+    public GATKSAMReadGroupRecord(SAMReadGroupRecord record) {
+        super(record.getReadGroupId(), record);
+    }
+
+    /**
+     * Get the NGSPlatform enum telling us the platform of this read group
+     *
+     * This function call is caching, so subsequent calls to it are free, while
+     * the first time it's called there's a bit of work to resolve the enum
+     *
+     * @return an NGSPlatform enum value
+     */
+    public NGSPlatform getNGSPlatform() {
+        if ( ! retrievedNGSPlatform ) {
+            mNGSPlatform = NGSPlatform.fromReadGroupPL(getPlatform());
+            retrievedNGSPlatform = true;
+        }
+
+        return mNGSPlatform;
+    }
+
+    @Override
+    public String toString() {
+        return "GATKSAMReadGroupRecord @RG:" + getReadGroupId();
+    }
+
+    ///////////////////////////////////////////////////////////////////////////////
+    // *** The following methods are overloaded to cache the appropriate data ***//
+    ///////////////////////////////////////////////////////////////////////////////
+
+    @Override
+    public String getSample() {
+        if ( !retrievedSample ) {
+            mSample = super.getSample();
+            retrievedSample = true;
+        }
+        return mSample;
+    }
+
+    @Override
+    public void setSample(String s) {
+        super.setSample(s);
+        mSample = s;
+        retrievedSample = true;
+    }
+
+    @Override
+    public String getPlatform() {
+        if ( !retrievedPlatform ) {
+            mPlatform = super.getPlatform();
+            retrievedPlatform = true;
+        }
+        return mPlatform;
+    }
+
+    @Override
+    public void setPlatform(String s) {
+        super.setPlatform(s);
+        mPlatform = s;
+        retrievedPlatform = true;
+        retrievedNGSPlatform = false; // recalculate the NGSPlatform
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/GATKSAMRecord.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/GATKSAMRecord.java
new file mode 100644
index 0000000..8a9f044
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/GATKSAMRecord.java
@@ -0,0 +1,623 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import com.google.java.contract.Ensures;
+import htsjdk.samtools.*;
+import org.broadinstitute.gatk.utils.NGSPlatform;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.recalibration.EventType;
+
+import java.util.*;
+
+/**
+ * @author ebanks, depristo
+ * GATKSAMRecord
+ *
+ * this class extends the samtools BAMRecord class (and SAMRecord) and caches important
+ * (and oft-accessed) data that's not already cached by the SAMRecord class
+ *
+ * IMPORTANT NOTE: Because ReadGroups are not set through the SAMRecord,
+ *   if they are ever modified externally then one must also invoke the
+ *   setReadGroup() method here to ensure that the cache is kept up-to-date.
+ *
+ * WARNING -- GATKSAMRecords cache several values (that are expensive to compute)
+ * that depending on the inferred insert size and alignment starts and stops of this read and its mate.
+ * Changing these values in any way will invalidate the cached value. However, we do not monitor those setter
+ * functions, so modifying a GATKSAMRecord in any way may result in stale cached values.
+ */
+public class GATKSAMRecord extends SAMRecord implements Cloneable {
+    // Base Quality Score Recalibrator specific attribute tags
+    public static final String BQSR_BASE_INSERTION_QUALITIES = "BI";                // base qualities for insertions
+    public static final String BQSR_BASE_DELETION_QUALITIES = "BD";                 // base qualities for deletions
+
+    /**
+     * The default quality score for an insertion or deletion, if
+     * none are provided for this read.
+     */
+    public static final byte DEFAULT_INSERTION_DELETION_QUAL = (byte)45;
+
+    // the SAMRecord data we're caching
+    private String mReadString = null;
+    private GATKSAMReadGroupRecord mReadGroup = null;
+    private final static int UNINITIALIZED = -1;
+    private int softStart = UNINITIALIZED;
+    private int softEnd = UNINITIALIZED;
+    private Integer adapterBoundary = null;
+
+    private boolean isStrandlessRead = false;
+
+    // because some values can be null, we don't want to duplicate effort
+    private boolean retrievedReadGroup = false;
+
+    // These temporary attributes were added here to make life easier for
+    // certain algorithms by providing a way to label or attach arbitrary data to
+    // individual GATKSAMRecords.
+    // These attributes exist in memory only, and are never written to disk.
+    private Map<Object, Object> temporaryAttributes;
+
+    /**
+     * HACK TO CREATE GATKSAMRECORD WITH ONLY A HEADER FOR TESTING PURPOSES ONLY
+     * @param header
+     */
+    public GATKSAMRecord(final SAMFileHeader header) {
+        this(new SAMRecord(header));
+    }
+
+    /**
+     * HACK TO CREATE GATKSAMRECORD BASED ONLY A SAMRECORD FOR TESTING PURPOSES ONLY
+     * @param read
+     */
+    public GATKSAMRecord(final SAMRecord read) {
+        super(read.getHeader());
+        super.setReferenceIndex(read.getReferenceIndex());
+        super.setAlignmentStart(read.getAlignmentStart());
+        super.setReadName(read.getReadName());
+        super.setMappingQuality(read.getMappingQuality());
+        // indexing bin done below
+        super.setCigar(read.getCigar());
+        super.setFlags(read.getFlags());
+        super.setMateReferenceIndex(read.getMateReferenceIndex());
+        super.setMateAlignmentStart(read.getMateAlignmentStart());
+        super.setInferredInsertSize(read.getInferredInsertSize());
+        SAMReadGroupRecord samRG = read.getReadGroup();
+        SAMBinaryTagAndValue samAttr = GATKBin.getReadBinaryAttributes(read);
+        if (samAttr == null) {
+            clearAttributes();
+        } else {
+            setAttributes(samAttr);
+        }
+        if (samRG != null) {
+            GATKSAMReadGroupRecord rg = new GATKSAMReadGroupRecord(samRG);
+            setReadGroup(rg);
+        }
+
+        super.setFileSource(read.getFileSource());
+        super.setReadName(read.getReadName());
+        super.setCigarString(read.getCigarString());
+        super.setReadBases(read.getReadBases());
+        super.setBaseQualities(read.getBaseQualities());
+        // From SAMRecord constructor: Do this after the above because setCigarString will clear it.
+        GATKBin.setReadIndexingBin(this, GATKBin.getReadIndexingBin(read));
+    }
+
+    public static GATKSAMRecord createRandomRead(int length) {
+        List<CigarElement> cigarElements = new LinkedList<>();
+        cigarElements.add(new CigarElement(length, CigarOperator.M));
+        Cigar cigar = new Cigar(cigarElements);
+        return ArtificialSAMUtils.createArtificialRead(cigar);
+    }
+
+    ///////////////////////////////////////////////////////////////////////////////
+    // *** support for reads without meaningful strand information            ***//
+    ///////////////////////////////////////////////////////////////////////////////
+
+    /**
+     * Does this read have a meaningful strandedness value?
+     *
+     * Some advanced types of reads, such as reads coming from merged fragments,
+     * don't have meaningful strandedness values, as they are composites of multiple
+     * other reads.  Strandless reads need to be handled specially by code that cares about
+     * stranded information, such as FS.
+     *
+     * @return true if this read doesn't have meaningful strand information
+     */
+    public boolean isStrandless() {
+        return isStrandlessRead;
+    }
+
+    /**
+     * Set the strandless state of this read to isStrandless
+     * @param isStrandless true if this read doesn't have a meaningful strandedness value
+     */
+    public void setIsStrandless(final boolean isStrandless) {
+        this.isStrandlessRead = isStrandless;
+    }
+
+    @Override
+    public boolean getReadNegativeStrandFlag() {
+        return ! isStrandless() && super.getReadNegativeStrandFlag();
+    }
+
+    @Override
+    public void setReadNegativeStrandFlag(final boolean flag) {
+        if ( isStrandless() )
+            throw new IllegalStateException("Cannot set the strand of a strandless read");
+        super.setReadNegativeStrandFlag(flag);
+    }
+
+
+    ///////////////////////////////////////////////////////////////////////////////
+    // *** The following methods are overloaded to cache the appropriate data ***//
+    ///////////////////////////////////////////////////////////////////////////////
+
+    @Override
+    public String getReadString() {
+        if ( mReadString == null )
+            mReadString = super.getReadString();
+        return mReadString;
+    }
+
+    @Override
+    public void setReadString(String s) {
+        super.setReadString(s);
+        mReadString = s;
+    }
+
+    /**
+     * Get the GATKSAMReadGroupRecord of this read
+     * @return a non-null GATKSAMReadGroupRecord
+     */
+    @Override
+    public GATKSAMReadGroupRecord getReadGroup() {
+        if ( ! retrievedReadGroup ) {
+            final SAMReadGroupRecord rg = super.getReadGroup();
+
+            // three cases: rg may be null (no rg, rg may already be a GATKSAMReadGroupRecord, or it may be
+            // a regular SAMReadGroupRecord in which case we have to make it a GATKSAMReadGroupRecord
+            if ( rg == null )
+                mReadGroup = null;
+            else if ( rg instanceof GATKSAMReadGroupRecord )
+                mReadGroup = (GATKSAMReadGroupRecord)rg;
+            else
+                mReadGroup = new GATKSAMReadGroupRecord(rg);
+
+            retrievedReadGroup = true;
+        }
+        return mReadGroup;
+    }
+
+    public void setReadGroup( final GATKSAMReadGroupRecord readGroup ) {
+        mReadGroup = readGroup;
+        retrievedReadGroup = true;
+        setAttribute("RG", mReadGroup.getId()); // todo -- this should be standardized, but we don't have access to SAMTagUtils!
+    }
+
+
+    @Override
+    public int hashCode() {
+        return super.hashCode();
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (this == o) return true;
+
+        if (!(o instanceof GATKSAMRecord)) return false;
+
+        // note that we do not consider the GATKSAMRecord internal state at all
+        return super.equals(o);
+    }
+
+    /**
+     * Setters and Accessors for base insertion and base deletion quality scores
+     */
+    public void setBaseQualities( final byte[] quals, final EventType errorModel ) {
+        switch( errorModel ) {
+            case BASE_SUBSTITUTION:
+                setBaseQualities(quals);
+                break;
+            case BASE_INSERTION:
+                setAttribute( GATKSAMRecord.BQSR_BASE_INSERTION_QUALITIES, quals == null ? null : SAMUtils.phredToFastq(quals) );
+                break;
+            case BASE_DELETION:
+                setAttribute( GATKSAMRecord.BQSR_BASE_DELETION_QUALITIES, quals == null ? null : SAMUtils.phredToFastq(quals) );
+                break;
+            default:
+                throw new ReviewedGATKException("Unrecognized Base Recalibration type: " + errorModel );
+        }
+    }
+
+    public byte[] getBaseQualities( final EventType errorModel ) {
+        switch( errorModel ) {
+            case BASE_SUBSTITUTION:
+                return getBaseQualities();
+            case BASE_INSERTION:
+                return getBaseInsertionQualities();
+            case BASE_DELETION:
+                return getBaseDeletionQualities();
+            default:
+                throw new ReviewedGATKException("Unrecognized Base Recalibration type: " + errorModel );
+        }
+    }
+
+    /**
+     * @return whether or not this read has base insertion or deletion qualities (one of the two is sufficient to return true)
+     */
+    public boolean hasBaseIndelQualities() {
+        return getAttribute( BQSR_BASE_INSERTION_QUALITIES ) != null || getAttribute( BQSR_BASE_DELETION_QUALITIES ) != null;
+    }
+
+    /**
+     * @return the base deletion quality or null if read doesn't have one
+     */
+    public byte[] getExistingBaseInsertionQualities() {
+        return SAMUtils.fastqToPhred( getStringAttribute(BQSR_BASE_INSERTION_QUALITIES));
+    }
+
+    /**
+     * @return the base deletion quality or null if read doesn't have one
+     */
+    public byte[] getExistingBaseDeletionQualities() {
+        return SAMUtils.fastqToPhred( getStringAttribute(BQSR_BASE_DELETION_QUALITIES));
+    }
+
+    /**
+     * Default utility to query the base insertion quality of a read. If the read doesn't have one, it creates an array of default qualities (currently Q45)
+     * and assigns it to the read.
+     *
+     * @return the base insertion quality array
+     */
+    public byte[] getBaseInsertionQualities() {
+        byte [] quals = getExistingBaseInsertionQualities();
+        if( quals == null ) {
+            quals = new byte[getBaseQualities().length];
+            Arrays.fill(quals, DEFAULT_INSERTION_DELETION_QUAL); // Some day in the future when base insertion and base deletion quals exist the samtools API will
+                                           // be updated and the original quals will be pulled here, but for now we assume the original quality is a flat Q45
+        }
+        return quals;
+    }
+
+    /**
+     * Default utility to query the base deletion quality of a read. If the read doesn't have one, it creates an array of default qualities (currently Q45)
+     * and assigns it to the read.
+     *
+     * @return the base deletion quality array
+     */
+    public byte[] getBaseDeletionQualities() {
+        byte[] quals = getExistingBaseDeletionQualities();
+        if( quals == null ) {
+            quals = new byte[getBaseQualities().length];
+            Arrays.fill(quals, DEFAULT_INSERTION_DELETION_QUAL);  // Some day in the future when base insertion and base deletion quals exist the samtools API will
+                                            // be updated and the original quals will be pulled here, but for now we assume the original quality is a flat Q45
+        }
+        return quals;
+    }
+
+    /**
+     * Efficient caching accessor that returns the GATK NGSPlatform of this read
+     * @return
+     */
+    public NGSPlatform getNGSPlatform() {
+        return getReadGroup().getNGSPlatform();
+    }
+
+    ///////////////////////////////////////////////////////////////////////////////
+    // *** GATKSAMRecord specific methods                                     ***//
+    ///////////////////////////////////////////////////////////////////////////////
+
+    /**
+     * Checks whether an attribute has been set for the given key.
+     *
+     * Temporary attributes provide a way to label or attach arbitrary data to
+     * individual GATKSAMRecords. These attributes exist in memory only,
+     * and are never written to disk.
+     *
+     * @param key key
+     * @return True if an attribute has been set for this key.
+     */
+    public boolean containsTemporaryAttribute(Object key) {
+        return temporaryAttributes != null && temporaryAttributes.containsKey(key);
+    }
+
+    /**
+     * Sets the key to the given value, replacing any previous value. The previous
+     * value is returned.
+     *
+     * Temporary attributes provide a way to label or attach arbitrary data to
+     * individual GATKSAMRecords. These attributes exist in memory only,
+     * and are never written to disk.
+     *
+     * @param key    key
+     * @param value  value
+     * @return attribute
+     */
+    public Object setTemporaryAttribute(Object key, Object value) {
+        if(temporaryAttributes == null) {
+            temporaryAttributes = new HashMap<>();
+        }
+        return temporaryAttributes.put(key, value);
+    }
+
+    /**
+     * Looks up the value associated with the given key.
+     *
+     * Temporary attributes provide a way to label or attach arbitrary data to
+     * individual GATKSAMRecords. These attributes exist in memory only,
+     * and are never written to disk.
+     *
+     * @param key key
+     * @return The value, or null.
+     */
+    public Object getTemporaryAttribute(Object key) {
+        if(temporaryAttributes != null) {
+            return temporaryAttributes.get(key);
+        }
+        return null;
+    }
+
+    /**
+     * Checks whether if the read has any bases.
+     *
+     * Empty reads can be dangerous as it may have no cigar strings, no read names and
+     * other missing attributes.
+     *
+     * @return true if the read has no bases
+     */
+    public boolean isEmpty() {
+        return super.getReadBases() == null || super.getReadLength() == 0;
+    }
+
+    /**
+     * Clears all attributes except ReadGroup of the read.
+     */
+    public GATKSAMRecord simplify () {
+        GATKSAMReadGroupRecord rg = getReadGroup(); // save the read group information
+        byte[] insQuals = (this.getAttribute(BQSR_BASE_INSERTION_QUALITIES) == null) ? null : getBaseInsertionQualities();
+        byte[] delQuals = (this.getAttribute(BQSR_BASE_DELETION_QUALITIES)  == null) ? null : getBaseDeletionQualities();
+        this.clearAttributes(); // clear all attributes from the read
+        this.setReadGroup(rg); // restore read group
+        if (insQuals != null)
+           this.setBaseQualities(insQuals, EventType.BASE_INSERTION); // restore base insertion if we had any
+        if (delQuals != null)
+            this.setBaseQualities(delQuals, EventType.BASE_DELETION); // restore base deletion if we had any
+        return this;
+    }
+
+    /**
+     * Calculates the reference coordinate for the beginning of the read taking into account soft clips but not hard clips.
+     *
+     * Note: getUnclippedStart() adds soft and hard clips, this function only adds soft clips.
+     *
+     * @return the unclipped start of the read taking soft clips (but not hard clips) into account
+     */
+    public int getSoftStart() {
+        if ( softStart == UNINITIALIZED ) {
+            softStart = getAlignmentStart();
+            for (final CigarElement cig : getCigar().getCigarElements()) {
+                final CigarOperator op = cig.getOperator();
+
+                if (op == CigarOperator.SOFT_CLIP)
+                    softStart -= cig.getLength();
+                else if (op != CigarOperator.HARD_CLIP)
+                    break;
+            }
+        }
+        return softStart;
+    }
+
+    /**
+     * Calculates the reference coordinate for the end of the read taking into account soft clips but not hard clips.
+     *
+     * Note: getUnclippedEnd() adds soft and hard clips, this function only adds soft clips.
+     *
+     * @return the unclipped end of the read taking soft clips (but not hard clips) into account
+     */
+    public int getSoftEnd() {
+        if ( softEnd == UNINITIALIZED ) {
+            boolean foundAlignedBase = false;
+            softEnd = getAlignmentEnd();
+            final List<CigarElement> cigs = getCigar().getCigarElements();
+            for (int i = cigs.size() - 1; i >= 0; --i) {
+                final CigarElement cig = cigs.get(i);
+                final CigarOperator op = cig.getOperator();
+
+                if (op == CigarOperator.SOFT_CLIP) // assumes the soft clip that we found is at the end of the aligned read
+                    softEnd += cig.getLength();
+                else if (op != CigarOperator.HARD_CLIP) {
+                    foundAlignedBase = true;
+                    break;
+                }
+            }
+            if( !foundAlignedBase ) { // for example 64H14S, the soft end is actually the same as the alignment end
+                softEnd = getAlignmentEnd();
+            }
+        }
+
+        return softEnd;
+    }
+
+    /**
+     * If the read is hard clipped, the soft start and end will change. You can set manually or just reset the cache
+     * so that the next call to getSoftStart/End will recalculate it lazily.
+     */
+    public void resetSoftStartAndEnd() {
+        softStart = -1;
+        softEnd = -1;
+    }
+
+    /**
+     * If the read is hard clipped, the soft start and end will change. You can set manually or just reset the cache
+     * so that the next call to getSoftStart/End will recalculate it lazily.
+     */
+    public void resetSoftStartAndEnd(int softStart, int softEnd) {
+        this.softStart = softStart;
+        this.softEnd = softEnd;
+    }
+
+    /**
+     * Determines the original alignment start of a previously clipped read.
+     * 
+     * This is useful for reads that have been trimmed to a variant region and lost the information of it's original alignment end
+     * 
+     * @return the alignment start of a read before it was clipped
+     */
+    public int getOriginalAlignmentStart() {
+        return getUnclippedStart();
+    }
+
+    /**
+     * Determines the original alignment end of a previously clipped read.
+     *
+     * This is useful for reads that have been trimmed to a variant region and lost the information of it's original alignment end
+     * 
+     * @return the alignment end of a read before it was clipped
+     */
+    public int getOriginalAlignmentEnd() {
+        return getUnclippedEnd();
+    }
+
+    /**
+     * Creates an empty GATKSAMRecord with the read's header, read group and mate
+     * information, but empty (not-null) fields:
+     *  - Cigar String
+     *  - Read Bases
+     *  - Base Qualities
+     *
+     * Use this method if you want to create a new empty GATKSAMRecord based on
+     * another GATKSAMRecord
+     *
+     * @param read a read to copy the header from
+     * @return a read with no bases but safe for the GATK
+     */
+    public static GATKSAMRecord emptyRead(GATKSAMRecord read) {
+        final GATKSAMRecord emptyRead = new GATKSAMRecord(read.getHeader());
+        emptyRead.setReferenceIndex(read.getReferenceIndex());
+        emptyRead.setAlignmentStart(0);
+        emptyRead.setMappingQuality(0);
+        // setting read indexing bin last
+        emptyRead.setFlags(read.getFlags());
+        emptyRead.setMateReferenceIndex(read.getMateReferenceIndex());
+        emptyRead.setMateAlignmentStart(read.getMateAlignmentStart());
+        emptyRead.setInferredInsertSize(read.getInferredInsertSize());
+
+        emptyRead.setCigarString("");
+        emptyRead.setReadBases(new byte[0]);
+        emptyRead.setBaseQualities(new byte[0]);
+
+        SAMReadGroupRecord samRG = read.getReadGroup();
+        emptyRead.clearAttributes();
+        if (samRG != null) {
+            GATKSAMReadGroupRecord rg = new GATKSAMReadGroupRecord(samRG);
+            emptyRead.setReadGroup(rg);
+        }
+
+        GATKBin.setReadIndexingBin(emptyRead, 0);
+
+        return emptyRead;
+    }
+
+    /**
+     * Creates a new GATKSAMRecord with the source read's header, read group and mate
+     * information, but with the following fields set to user-supplied values:
+     *  - Read Bases
+     *  - Base Qualities
+     *  - Base Insertion Qualities
+     *  - Base Deletion Qualities
+     *
+     *  Cigar string is empty (not-null)
+     *
+     * Use this method if you want to create a new GATKSAMRecord based on
+     * another GATKSAMRecord, but with modified bases and qualities
+     *
+     * @param read a read to copy the header from
+     * @param readBases an array containing the new bases you wish use in place of the originals
+     * @param baseQualities an array containing the new base qualities you wish use in place of the originals
+     * @param baseInsertionQualities an array containing the new base insertion qaulities
+     * @param baseDeletionQualities an array containing the new base deletion qualities
+     * @return a read with modified bases and qualities, safe for the GATK
+     */
+    public static GATKSAMRecord createQualityModifiedRead(final GATKSAMRecord read,
+                                                          final byte[] readBases,
+                                                          final byte[] baseQualities,
+                                                          final byte[] baseInsertionQualities,
+                                                          final byte[] baseDeletionQualities) {
+        if ( baseQualities.length != readBases.length || baseInsertionQualities.length != readBases.length || baseDeletionQualities.length != readBases.length )
+            throw new IllegalArgumentException("Read bases and read quality arrays aren't the same size: Bases:" + readBases.length
+                                                + " vs Base Q's:" + baseQualities.length
+                                                + " vs Insert Q's:" + baseInsertionQualities.length
+                                                + " vs Delete Q's:" + baseDeletionQualities.length);
+
+        final GATKSAMRecord processedRead = GATKSAMRecord.emptyRead(read);
+        processedRead.setReadBases(readBases);
+        processedRead.setBaseQualities(baseQualities, EventType.BASE_SUBSTITUTION);
+        processedRead.setBaseQualities(baseInsertionQualities, EventType.BASE_INSERTION);
+        processedRead.setBaseQualities(baseDeletionQualities, EventType.BASE_DELETION);
+
+        return processedRead;
+    }
+
+    /**
+     * Shallow copy of everything, except for the attribute list and the temporary attributes. 
+     * A new list of the attributes is created for both, but the attributes themselves are copied by reference.  
+     * This should be safe because callers should never modify a mutable value returned by any of the get() methods anyway.
+     * 
+     * @return a shallow copy of the GATKSAMRecord
+     */
+    @Override
+    public Object clone() {
+        try {
+            final GATKSAMRecord clone = (GATKSAMRecord) super.clone();
+            if (temporaryAttributes != null) {
+                clone.temporaryAttributes = new HashMap<>();
+                for (Object attribute : temporaryAttributes.keySet())
+                    clone.setTemporaryAttribute(attribute, temporaryAttributes.get(attribute));
+            }
+            return clone;
+        } catch (final CloneNotSupportedException e) {
+            throw new RuntimeException( e );
+        }
+    }
+
+    /**
+     * A caching version of ReadUtils.getAdaptorBoundary()
+     *
+     * see #ReadUtils.getAdaptorBoundary(SAMRecord) for more information about the meaning of this function
+     *
+     * WARNING -- this function caches a value depending on the inferred insert size and alignment starts
+     * and stops of this read and its mate.  Changing these values in any way will invalidate the cached value.
+     * However, we do not monitor those setter functions, so modifying a GATKSAMRecord in any way may
+     * result in stale cached values.
+     *
+     * @return the result of calling ReadUtils.getAdaptorBoundary on this read
+     */
+    @Ensures("result == ReadUtils.getAdaptorBoundary(this)")
+    public int getAdaptorBoundary() {
+        if ( adapterBoundary == null )
+            adapterBoundary = ReadUtils.getAdaptorBoundary(this);
+        return adapterBoundary;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/GATKSAMRecordIterator.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/GATKSAMRecordIterator.java
new file mode 100644
index 0000000..1560dce
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/GATKSAMRecordIterator.java
@@ -0,0 +1,64 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.util.CloseableIterator;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIterator;
+
+import java.util.Iterator;
+
+/**
+ * Temporarily hack to convert SAMRecords to GATKSAMRecords
+ *
+ * User: depristo
+ * Date: 1/11/13
+ * Time: 1:19 PM
+ */
+public class GATKSAMRecordIterator implements CloseableIterator<GATKSAMRecord>, Iterable<GATKSAMRecord> {
+    final CloseableIterator<? extends SAMRecord> it;
+
+    public GATKSAMRecordIterator(final CloseableIterator<? extends SAMRecord> it) {
+        this.it = it;
+    }
+
+    public GATKSAMRecordIterator(final GATKSAMIterator it) {
+        this.it = it;
+    }
+
+    @Override public boolean hasNext() { return it.hasNext(); }
+    @Override public GATKSAMRecord next() {
+        SAMRecord next = it.next();
+        if (next instanceof GATKSAMRecord) {
+            return (GATKSAMRecord)next;
+        } else {
+            return new GATKSAMRecord(next);
+        }
+    }
+    @Override public void remove() { it.remove(); }
+    @Override public void close() { it.close(); }
+    @Override public Iterator<GATKSAMRecord> iterator() { return this; }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ReadUnclippedStartWithNoTiesComparator.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ReadUnclippedStartWithNoTiesComparator.java
new file mode 100644
index 0000000..d57238e
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ReadUnclippedStartWithNoTiesComparator.java
@@ -0,0 +1,73 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import htsjdk.samtools.SAMRecord;
+
+import java.util.Comparator;
+
+public class ReadUnclippedStartWithNoTiesComparator implements Comparator<SAMRecord> {
+    @Requires("c1 >= 0 && c2 >= 0")
+    @Ensures("result == 0 || result == 1 || result == -1")
+    private int compareContigs(int c1, int c2) {
+        if (c1 == c2)
+            return 0;
+        else if (c1 > c2)
+            return 1;
+        return -1;
+    }
+
+    @Requires("r1 != null && r2 != null")
+    @Ensures("result == 0 || result == 1 || result == -1")
+    public int compare(SAMRecord r1, SAMRecord r2) {
+        int result;
+
+        if (r1 == r2)
+            result = 0;
+
+        else if (r1.getReadUnmappedFlag())
+            result = 1;
+        else if (r2.getReadUnmappedFlag())
+            result = -1;
+        else {
+            final int cmpContig = compareContigs(r1.getReferenceIndex(), r2.getReferenceIndex());
+
+            if (cmpContig != 0)
+                result = cmpContig;
+
+            else {
+                if (r1.getUnclippedStart() < r2.getUnclippedStart())
+                    result = -1;
+                else
+                    result = 1;
+            }
+        }
+
+        return result;
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ReadUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ReadUtils.java
new file mode 100644
index 0000000..b3d945e
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/ReadUtils.java
@@ -0,0 +1,957 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import htsjdk.samtools.*;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.*;
+import org.broadinstitute.gatk.utils.collections.Pair;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.io.File;
+import java.util.*;
+
+/**
+ * A miscellaneous collection of utilities for working with SAM files, headers, etc.
+ * Static methods only, please.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class ReadUtils {
+    private final static Logger logger = Logger.getLogger(ReadUtils.class);
+    
+    private static final String OFFSET_OUT_OF_BOUNDS_EXCEPTION = "Offset cannot be greater than read length %d : %d";
+    private static final String OFFSET_NOT_ZERO_EXCEPTION = "We ran past the end of the read and never found the offset, something went wrong!";
+    
+    private ReadUtils() {
+    }
+
+    private static final int DEFAULT_ADAPTOR_SIZE = 100;
+    public static final int CLIPPING_GOAL_NOT_REACHED = -1;
+
+    /**
+     * Pull out the samples from a SAMFileHeader;
+     * note that we use a TreeSet so that they are sorted
+     *
+     * @param header  the sam file header
+     * @return list of strings representing the sample names
+     */
+    public static Set<String> getSAMFileSamples(final SAMFileHeader header) {
+        // get all of the unique sample names
+        final Set<String> samples = new TreeSet<String>();
+        List<SAMReadGroupRecord> readGroups = header.getReadGroups();
+        for ( SAMReadGroupRecord readGroup : readGroups )
+            samples.add(readGroup.getSample());
+        return samples;
+    }
+
+    /**
+     * A marker to tell which end of the read has been clipped
+     */
+    public enum ClippingTail {
+        LEFT_TAIL,
+        RIGHT_TAIL
+    }
+
+    /**
+     * A HashMap of the SAM spec read flag names
+     *
+     * Note: This is not being used right now, but can be useful in the future
+     */
+    private static final Map<Integer, String> readFlagNames = new HashMap<Integer, String>();
+
+    static {
+        readFlagNames.put(0x1, "Paired");
+        readFlagNames.put(0x2, "Proper");
+        readFlagNames.put(0x4, "Unmapped");
+        readFlagNames.put(0x8, "MateUnmapped");
+        readFlagNames.put(0x10, "Forward");
+        //readFlagNames.put(0x20, "MateForward");
+        readFlagNames.put(0x40, "FirstOfPair");
+        readFlagNames.put(0x80, "SecondOfPair");
+        readFlagNames.put(0x100, "NotPrimary");
+        readFlagNames.put(0x200, "NON-PF");
+        readFlagNames.put(0x400, "Duplicate");
+    }
+
+    /**
+     * This enum represents all the different ways in which a read can overlap an interval.
+     *
+     * NO_OVERLAP_CONTIG:
+     * read and interval are in different contigs.
+     *
+     * NO_OVERLAP_LEFT:
+     * the read does not overlap the interval.
+     *
+     *                        |----------------| (interval)
+     *   <---------------->                      (read)
+     *
+     * NO_OVERLAP_RIGHT:
+     * the read does not overlap the interval.
+     *
+     *   |----------------|                      (interval)
+     *                        <----------------> (read)
+     *
+     * OVERLAP_LEFT:
+     * the read starts before the beginning of the interval but ends inside of it
+     *
+     *          |----------------| (interval)
+     *   <---------------->        (read)
+     *
+     * OVERLAP_RIGHT:
+     * the read starts inside the interval but ends outside of it
+     *
+     *   |----------------|     (interval)
+     *       <----------------> (read)
+     *
+     * OVERLAP_LEFT_AND_RIGHT:
+     * the read starts before the interval and ends after the interval
+     *
+     *      |-----------|     (interval)
+     *  <-------------------> (read)
+     *
+     * OVERLAP_CONTAINED:
+     * the read starts and ends inside the interval
+     *
+     *  |----------------|     (interval)
+     *     <-------->          (read)
+     */
+    public enum ReadAndIntervalOverlap {NO_OVERLAP_CONTIG, NO_OVERLAP_LEFT, NO_OVERLAP_RIGHT, NO_OVERLAP_HARDCLIPPED_LEFT, NO_OVERLAP_HARDCLIPPED_RIGHT, OVERLAP_LEFT, OVERLAP_RIGHT, OVERLAP_LEFT_AND_RIGHT, OVERLAP_CONTAINED}
+
+    /**
+     * is this base inside the adaptor of the read?
+     *
+     * There are two cases to treat here:
+     *
+     * 1) Read is in the negative strand => Adaptor boundary is on the left tail
+     * 2) Read is in the positive strand => Adaptor boundary is on the right tail
+     *
+     * Note: We return false to all reads that are UNMAPPED or have an weird big insert size (probably due to mismapping or bigger event)
+     *
+     * @param read the read to test
+     * @param basePos base position in REFERENCE coordinates (not read coordinates)
+     * @return whether or not the base is in the adaptor
+     */
+    public static boolean isBaseInsideAdaptor(final GATKSAMRecord read, long basePos) {
+        final int adaptorBoundary = read.getAdaptorBoundary();
+        if (adaptorBoundary == CANNOT_COMPUTE_ADAPTOR_BOUNDARY || read.getInferredInsertSize() > DEFAULT_ADAPTOR_SIZE)
+            return false;
+
+        return read.getReadNegativeStrandFlag() ? basePos <= adaptorBoundary : basePos >= adaptorBoundary;
+    }
+
+    /**
+     * Finds the adaptor boundary around the read and returns the first base inside the adaptor that is closest to
+     * the read boundary. If the read is in the positive strand, this is the first base after the end of the
+     * fragment (Picard calls it 'insert'), if the read is in the negative strand, this is the first base before the
+     * beginning of the fragment.
+     *
+     * There are two cases we need to treat here:
+     *
+     * 1) Our read is in the reverse strand :
+     *
+     *     <----------------------| *
+     *   |--------------------->
+     *
+     *   in these cases, the adaptor boundary is at the mate start (minus one)
+     *
+     * 2) Our read is in the forward strand :
+     *
+     *   |---------------------->   *
+     *     <----------------------|
+     *
+     *   in these cases the adaptor boundary is at the start of the read plus the inferred insert size (plus one)
+     *
+     * @param read the read being tested for the adaptor boundary
+     * @return the reference coordinate for the adaptor boundary (effectively the first base IN the adaptor, closest to the read.
+     * CANNOT_COMPUTE_ADAPTOR_BOUNDARY if the read is unmapped or the mate is mapped to another contig.
+     */
+    public static int getAdaptorBoundary(final SAMRecord read) {
+        if ( ! hasWellDefinedFragmentSize(read) ) {
+            return CANNOT_COMPUTE_ADAPTOR_BOUNDARY;
+        } else if ( read.getReadNegativeStrandFlag() ) {
+            return read.getMateAlignmentStart() - 1;           // case 1 (see header)
+        } else {
+            final int insertSize = Math.abs(read.getInferredInsertSize());    // the inferred insert size can be negative if the mate is mapped before the read (so we take the absolute value)
+            return read.getAlignmentStart() + insertSize + 1;  // case 2 (see header)
+        }
+    }
+
+    public static int CANNOT_COMPUTE_ADAPTOR_BOUNDARY = Integer.MIN_VALUE;
+
+    /**
+     * Can the adaptor sequence of read be reliably removed from the read based on the alignment of
+     * read and its mate?
+     *
+     * @param read the read to check
+     * @return true if it can, false otherwise
+     */
+    public static boolean hasWellDefinedFragmentSize(final SAMRecord read) {
+        if ( read.getInferredInsertSize() == 0 )
+            // no adaptors in reads with mates in another chromosome or unmapped pairs
+            return false;
+        if ( ! read.getReadPairedFlag() )
+            // only reads that are paired can be adaptor trimmed
+            return false;
+        if ( read.getReadUnmappedFlag() || read.getMateUnmappedFlag() )
+            // only reads when both reads are mapped can be trimmed
+            return false;
+//        if ( ! read.getProperPairFlag() )
+//            // note this flag isn't always set properly in BAMs, can will stop us from eliminating some proper pairs
+//            // reads that aren't part of a proper pair (i.e., have strange alignments) can't be trimmed
+//            return false;
+        if ( read.getReadNegativeStrandFlag() == read.getMateNegativeStrandFlag() )
+            // sanity check on getProperPairFlag to ensure that read1 and read2 aren't on the same strand
+            return false;
+
+        if ( read.getReadNegativeStrandFlag() ) {
+            // we're on the negative strand, so our read runs right to left
+            return read.getAlignmentEnd() > read.getMateAlignmentStart();
+        } else {
+            // we're on the positive strand, so our mate should be to our right (his start + insert size should be past our start)
+            return read.getAlignmentStart() <= read.getMateAlignmentStart() + read.getInferredInsertSize();
+        }
+    }
+
+    /**
+     * is the read a 454 read?
+     *
+     * @param read the read to test
+     * @return checks the read group tag PL for the default 454 tag
+     */
+    public static boolean is454Read(GATKSAMRecord read) {
+        return NGSPlatform.fromRead(read) == NGSPlatform.LS454;
+    }
+
+    /**
+     * is the read an IonTorrent read?
+     *
+     * @param read the read to test
+     * @return checks the read group tag PL for the default ion tag
+     */
+    public static boolean isIonRead(GATKSAMRecord read) {
+        return NGSPlatform.fromRead(read) == NGSPlatform.ION_TORRENT;
+    }
+
+    /**
+     * is the read a SOLiD read?
+     *
+     * @param read the read to test
+     * @return checks the read group tag PL for the default SOLiD tag
+     */
+    public static boolean isSOLiDRead(GATKSAMRecord read) {
+        return NGSPlatform.fromRead(read) == NGSPlatform.SOLID;
+    }
+
+    /**
+     * is the read a SLX read?
+     *
+     * @param read the read to test
+     * @return checks the read group tag PL for the default SLX tag
+     */
+    public static boolean isIlluminaRead(GATKSAMRecord read) {
+        return NGSPlatform.fromRead(read) == NGSPlatform.ILLUMINA;
+    }
+
+    /**
+     * checks if the read has a platform tag in the readgroup equal to 'name'.
+     * Assumes that 'name' is upper-cased.
+     *
+     * @param read the read to test
+     * @param name the upper-cased platform name to test
+     * @return whether or not name == PL tag in the read group of read
+     */
+    public static boolean isPlatformRead(GATKSAMRecord read, String name) {
+
+        SAMReadGroupRecord readGroup = read.getReadGroup();
+        if (readGroup != null) {
+            Object readPlatformAttr = readGroup.getAttribute("PL");
+            if (readPlatformAttr != null)
+                return readPlatformAttr.toString().toUpperCase().contains(name);
+        }
+        return false;
+    }
+
+
+    /**
+     * Returns the collections of reads sorted in coordinate order, according to the order defined
+     * in the reads themselves
+     *
+     * @param reads
+     * @return
+     */
+    public final static List<GATKSAMRecord> sortReadsByCoordinate(List<GATKSAMRecord> reads) {
+        final SAMRecordComparator comparer = new SAMRecordCoordinateComparator();
+        Collections.sort(reads, comparer);
+        return reads;
+    }
+
+    /**
+     * If a read starts in INSERTION, returns the first element length.
+     *
+     * Warning: If the read has Hard or Soft clips before the insertion this function will return 0.
+     *
+     * @param read
+     * @return the length of the first insertion, or 0 if there is none (see warning).
+     */
+    public final static int getFirstInsertionOffset(SAMRecord read) {
+        CigarElement e = read.getCigar().getCigarElement(0);
+        if ( e.getOperator() == CigarOperator.I )
+            return e.getLength();
+        else
+            return 0;
+    }
+
+    /**
+     * If a read ends in INSERTION, returns the last element length.
+     *
+     * Warning: If the read has Hard or Soft clips after the insertion this function will return 0.
+     *
+     * @param read
+     * @return the length of the last insertion, or 0 if there is none (see warning).
+     */
+    public final static int getLastInsertionOffset(SAMRecord read) {
+        CigarElement e = read.getCigar().getCigarElement(read.getCigarLength() - 1);
+        if ( e.getOperator() == CigarOperator.I )
+            return e.getLength();
+        else
+            return 0;
+    }
+
+    /**
+     * Determines what is the position of the read in relation to the interval.
+     * Note: This function uses the UNCLIPPED ENDS of the reads for the comparison.
+     * @param read the read
+     * @param interval the interval
+     * @return the overlap type as described by ReadAndIntervalOverlap enum (see above)
+     */
+    public static ReadAndIntervalOverlap getReadAndIntervalOverlapType(GATKSAMRecord read, GenomeLoc interval) {
+
+        int sStart = read.getSoftStart();
+        int sStop = read.getSoftEnd();
+        int uStart = read.getUnclippedStart();
+        int uStop = read.getUnclippedEnd();
+
+        if ( !read.getReferenceName().equals(interval.getContig()) )
+            return ReadAndIntervalOverlap.NO_OVERLAP_CONTIG;
+
+        else if ( uStop < interval.getStart() )
+            return ReadAndIntervalOverlap.NO_OVERLAP_LEFT;
+
+        else if ( uStart > interval.getStop() )
+            return ReadAndIntervalOverlap.NO_OVERLAP_RIGHT;
+
+        else if ( sStop < interval.getStart() )
+            return ReadAndIntervalOverlap.NO_OVERLAP_HARDCLIPPED_LEFT;
+
+        else if ( sStart > interval.getStop() )
+            return ReadAndIntervalOverlap.NO_OVERLAP_HARDCLIPPED_RIGHT;
+
+        else if ( (sStart >= interval.getStart()) &&
+                  (sStop <= interval.getStop()) )
+            return ReadAndIntervalOverlap.OVERLAP_CONTAINED;
+
+        else if ( (sStart < interval.getStart()) &&
+                  (sStop > interval.getStop()) )
+            return ReadAndIntervalOverlap.OVERLAP_LEFT_AND_RIGHT;
+
+        else if ( (sStart < interval.getStart()) )
+            return ReadAndIntervalOverlap.OVERLAP_LEFT;
+
+        else
+            return ReadAndIntervalOverlap.OVERLAP_RIGHT;
+    }
+
+    /**
+     * Pre-processes the results of getReadCoordinateForReferenceCoordinate(GATKSAMRecord, int) to take care of
+     * two corner cases:
+     * 
+     * 1. If clipping the right tail (end of the read) getReadCoordinateForReferenceCoordinate and fall inside
+     * a deletion return the base after the deletion. If clipping the left tail (beginning of the read) it
+     * doesn't matter because it already returns the previous base by default.
+     * 
+     * 2. If clipping the left tail (beginning of the read) getReadCoordinateForReferenceCoordinate and the
+     * read starts with an insertion, and you're requesting the first read based coordinate, it will skip
+     * the leading insertion (because it has the same reference coordinate as the following base).
+     *
+     * @param read
+     * @param refCoord
+     * @param tail
+     * @return the read coordinate corresponding to the requested reference coordinate for clipping.
+     */
+    @Requires({"refCoord >= read.getUnclippedStart()", "refCoord <= read.getUnclippedEnd() || (read.getUnclippedEnd() < read.getUnclippedStart())"})
+    @Ensures({"result >= 0", "result < read.getReadLength()"})
+    public static int getReadCoordinateForReferenceCoordinate(GATKSAMRecord read, int refCoord, ClippingTail tail) {
+        return getReadCoordinateForReferenceCoordinate(read.getSoftStart(), read.getCigar(), refCoord, tail, false);
+    }
+
+    public static int getReadCoordinateForReferenceCoordinateUpToEndOfRead(GATKSAMRecord read, int refCoord, ClippingTail tail) {
+        final int leftmostSafeVariantPosition = Math.max(read.getSoftStart(), refCoord);
+        return getReadCoordinateForReferenceCoordinate(read.getSoftStart(), read.getCigar(), leftmostSafeVariantPosition, tail, false);
+    }
+
+    public static int getReadCoordinateForReferenceCoordinate(final int alignmentStart, final Cigar cigar, final int refCoord, final ClippingTail tail, final boolean allowGoalNotReached) {
+        Pair<Integer, Boolean> result = getReadCoordinateForReferenceCoordinate(alignmentStart, cigar, refCoord, allowGoalNotReached);
+        int readCoord = result.getFirst();
+
+        // Corner case one: clipping the right tail and falls on deletion, move to the next
+        // read coordinate. It is not a problem for the left tail because the default answer
+        // from getReadCoordinateForReferenceCoordinate is to give the previous read coordinate.
+        if (result.getSecond() && tail == ClippingTail.RIGHT_TAIL)
+            readCoord++;
+
+        // clipping the left tail and first base is insertion, go to the next read coordinate
+        // with the same reference coordinate. Advance to the next cigar element, or to the
+        // end of the read if there is no next element.
+        final CigarElement firstElementIsInsertion = readStartsWithInsertion(cigar);
+        if (readCoord == 0 && tail == ClippingTail.LEFT_TAIL && firstElementIsInsertion != null)
+            readCoord = Math.min(firstElementIsInsertion.getLength(), cigar.getReadLength() - 1);
+
+        return readCoord;
+    }
+
+    /**
+     * Returns the read coordinate corresponding to the requested reference coordinate.
+     *
+     * WARNING: if the requested reference coordinate happens to fall inside or just before a deletion (or skipped region) in the read, this function
+     * will return the last read base before the deletion (or skipped region). This function returns a
+     * Pair(int readCoord, boolean fallsInsideOrJustBeforeDeletionOrSkippedRegion) so you can choose which readCoordinate to use when faced with
+     * a deletion (or skipped region).
+     *
+     * SUGGESTION: Use getReadCoordinateForReferenceCoordinate(GATKSAMRecord, int, ClippingTail) instead to get a
+     * pre-processed result according to normal clipping needs. Or you can use this function and tailor the
+     * behavior to your needs.
+     *
+     * @param read
+     * @param refCoord the requested reference coordinate
+     * @return the read coordinate corresponding to the requested reference coordinate. (see warning!)
+     */
+    @Requires({"refCoord >= read.getSoftStart()", "refCoord <= read.getSoftEnd()"})
+    @Ensures({"result.getFirst() >= 0", "result.getFirst() < read.getReadLength()"})
+    //TODO since we do not have contracts any more, should we check for the requirements in the method code?
+    public static Pair<Integer, Boolean> getReadCoordinateForReferenceCoordinate(GATKSAMRecord read, int refCoord) {
+        return getReadCoordinateForReferenceCoordinate(read.getSoftStart(), read.getCigar(), refCoord, false);
+    }
+
+    public static Pair<Integer, Boolean> getReadCoordinateForReferenceCoordinate(final int alignmentStart, final Cigar cigar, final int refCoord, final boolean allowGoalNotReached) {
+        int readBases = 0;
+        int refBases = 0;
+        boolean fallsInsideDeletionOrSkippedRegion = false;
+        boolean endJustBeforeDeletionOrSkippedRegion = false;
+        boolean fallsInsideOrJustBeforeDeletionOrSkippedRegion = false;
+
+        final int goal = refCoord - alignmentStart;  // The goal is to move this many reference bases
+        if (goal < 0) {
+            if (allowGoalNotReached) {
+                return new Pair<Integer, Boolean>(CLIPPING_GOAL_NOT_REACHED, false);
+            } else {
+                throw new ReviewedGATKException("Somehow the requested coordinate is not covered by the read. Too many deletions?");
+            }
+        }
+        boolean goalReached = refBases == goal;
+
+        Iterator<CigarElement> cigarElementIterator = cigar.getCigarElements().iterator();
+        while (!goalReached && cigarElementIterator.hasNext()) {
+            final CigarElement cigarElement = cigarElementIterator.next();
+            int shift = 0;
+
+            if (cigarElement.getOperator().consumesReferenceBases() || cigarElement.getOperator() == CigarOperator.SOFT_CLIP) {
+                if (refBases + cigarElement.getLength() < goal)
+                    shift = cigarElement.getLength();
+                else
+                    shift = goal - refBases;
+
+                refBases += shift;
+            }
+            goalReached = refBases == goal;
+
+            if (!goalReached && cigarElement.getOperator().consumesReadBases())
+                readBases += cigarElement.getLength();
+
+            if (goalReached) {
+                // Is this base's reference position within this cigar element? Or did we use it all?
+                final boolean endsWithinCigar = shift < cigarElement.getLength();
+
+                // If it isn't, we need to check the next one. There should *ALWAYS* be a next one
+                // since we checked if the goal coordinate is within the read length, so this is just a sanity check.
+                if (!endsWithinCigar && !cigarElementIterator.hasNext()) {
+                    if (allowGoalNotReached) {
+                        return new Pair<Integer, Boolean>(CLIPPING_GOAL_NOT_REACHED, false);
+                    } else {
+                        throw new ReviewedGATKException(String.format("Reference coordinate corresponds to a non-existent base in the read. This should never happen -- check read with alignment start: %s  and cigar: %s", alignmentStart, cigar));
+                    }
+                }
+
+                CigarElement nextCigarElement = null;
+
+                // if we end inside the current cigar element, we just have to check if it is a deletion (or skipped region)
+                if (endsWithinCigar)
+                    fallsInsideDeletionOrSkippedRegion = (cigarElement.getOperator() == CigarOperator.DELETION || cigarElement.getOperator() == CigarOperator.SKIPPED_REGION) ;
+
+                // if we end outside the current cigar element, we need to check if the next element is an insertion, deletion or skipped region.
+                else {
+                    nextCigarElement = cigarElementIterator.next();
+
+                    // if it's an insertion, we need to clip the whole insertion before looking at the next element
+                    if (nextCigarElement.getOperator() == CigarOperator.INSERTION) {
+                        readBases += nextCigarElement.getLength();
+                        if (!cigarElementIterator.hasNext()) {
+                            if (allowGoalNotReached) {
+                                return new Pair<Integer, Boolean>(CLIPPING_GOAL_NOT_REACHED, false);
+                            } else {
+                                throw new ReviewedGATKException(String.format("Reference coordinate corresponds to a non-existent base in the read. This should never happen -- check read with alignment start: %s  and cigar: %s", alignmentStart, cigar));
+                            }
+                        }
+
+                        nextCigarElement = cigarElementIterator.next();
+                    }
+
+                    // if it's a deletion (or skipped region), we will pass the information on to be handled downstream.
+                    endJustBeforeDeletionOrSkippedRegion = (nextCigarElement.getOperator() == CigarOperator.DELETION || nextCigarElement.getOperator() == CigarOperator.SKIPPED_REGION);
+                }
+
+                fallsInsideOrJustBeforeDeletionOrSkippedRegion = endJustBeforeDeletionOrSkippedRegion || fallsInsideDeletionOrSkippedRegion;
+
+                // If we reached our goal outside a deletion (or skipped region), add the shift
+                if (!fallsInsideOrJustBeforeDeletionOrSkippedRegion && cigarElement.getOperator().consumesReadBases())
+                    readBases += shift;
+
+                // If we reached our goal just before a deletion (or skipped region) we need
+                // to add the shift of the current cigar element but go back to it's last element to return the last
+                // base before the deletion (or skipped region) (see warning in function contracts)
+                else if (endJustBeforeDeletionOrSkippedRegion && cigarElement.getOperator().consumesReadBases())
+                    readBases += shift - 1;
+
+                // If we reached our goal inside a deletion (or skipped region), or just between a deletion and a skipped region,
+                // then we must backtrack to the last base before the deletion (or skipped region)
+                else if (fallsInsideDeletionOrSkippedRegion ||
+                        (endJustBeforeDeletionOrSkippedRegion && nextCigarElement.getOperator().equals(CigarOperator.N)) ||
+                        (endJustBeforeDeletionOrSkippedRegion && nextCigarElement.getOperator().equals(CigarOperator.D)))
+                    readBases--;
+            }
+        }
+
+        if (!goalReached) {
+            if (allowGoalNotReached) {
+                return new Pair<Integer, Boolean>(CLIPPING_GOAL_NOT_REACHED, false);
+            } else {
+                throw new ReviewedGATKException("Somehow the requested coordinate is not covered by the read. Alignment " + alignmentStart + " | " + cigar);
+            }
+        }
+
+        return new Pair<Integer, Boolean>(readBases, fallsInsideOrJustBeforeDeletionOrSkippedRegion);
+    }
+
+    /**
+     * Compares two SAMRecords only the basis on alignment start.  Note that
+     * comparisons are performed ONLY on the basis of alignment start; any
+     * two SAM records with the same alignment start will be considered equal.
+     *
+     * Unmapped alignments will all be considered equal.
+     */
+
+    @Requires({"read1 != null", "read2 != null"})
+    public static int compareSAMRecords(GATKSAMRecord read1, GATKSAMRecord read2) {
+        AlignmentStartComparator comp = new AlignmentStartComparator();
+        return comp.compare(read1, read2);
+    }
+
+    /**
+     * Is a base inside a read?
+     *
+     * @param read                the read to evaluate
+     * @param referenceCoordinate the reference coordinate of the base to test
+     * @return true if it is inside the read, false otherwise.
+     */
+    public static boolean isInsideRead(final GATKSAMRecord read, final int referenceCoordinate) {
+        return referenceCoordinate >= read.getAlignmentStart() && referenceCoordinate <= read.getAlignmentEnd();
+    }
+
+    /**
+     * Is this read all insertion?
+     *
+     * @param read
+     * @return whether or not the only element in the cigar string is an Insertion
+     */
+    public static boolean readIsEntirelyInsertion(GATKSAMRecord read) {
+        for (CigarElement cigarElement : read.getCigar().getCigarElements()) {
+            if (cigarElement.getOperator() != CigarOperator.INSERTION)
+                return false;
+        }
+        return true;
+    }
+
+    /**
+     * @see #readStartsWithInsertion(htsjdk.samtools.Cigar, boolean) with ignoreClipOps set to true
+     */
+    public static CigarElement readStartsWithInsertion(final Cigar cigarForRead) {
+        return readStartsWithInsertion(cigarForRead, true);
+    }
+
+    /**
+     * Checks if a read starts with an insertion.
+     *
+     * @param cigarForRead    the CIGAR to evaluate
+     * @param ignoreSoftClipOps   should we ignore S operators when evaluating whether an I operator is at the beginning?  Note that H operators are always ignored.
+     * @return the element if it's a leading insertion or null otherwise
+     */
+    public static CigarElement readStartsWithInsertion(final Cigar cigarForRead, final boolean ignoreSoftClipOps) {
+        for ( final CigarElement cigarElement : cigarForRead.getCigarElements() ) {
+            if ( cigarElement.getOperator() == CigarOperator.INSERTION )
+                return cigarElement;
+
+            else if ( cigarElement.getOperator() != CigarOperator.HARD_CLIP && ( !ignoreSoftClipOps || cigarElement.getOperator() != CigarOperator.SOFT_CLIP) )
+                break;
+        }
+        return null;
+    }
+
+    /**
+     * Returns the coverage distribution of a list of reads within the desired region.
+     *
+     * See getCoverageDistributionOfRead for information on how the coverage is calculated.
+     *
+     * @param list          the list of reads covering the region
+     * @param startLocation the first reference coordinate of the region (inclusive)
+     * @param stopLocation  the last reference coordinate of the region (inclusive)
+     * @return an array with the coverage of each position from startLocation to stopLocation
+     */
+    public static int [] getCoverageDistributionOfReads(List<GATKSAMRecord> list, int startLocation, int stopLocation) {
+        int [] totalCoverage = new int[stopLocation - startLocation + 1];
+
+        for (GATKSAMRecord read : list) {
+            int [] readCoverage = getCoverageDistributionOfRead(read, startLocation, stopLocation);
+            totalCoverage = MathUtils.addArrays(totalCoverage, readCoverage);
+        }
+
+        return totalCoverage;
+    }
+
+    /**
+     * Returns the coverage distribution of a single read within the desired region.
+     *
+     * Note: This function counts DELETIONS as coverage (since the main purpose is to downsample
+     * reads for variant regions, and deletions count as variants)
+     *
+     * @param read          the read to get the coverage distribution of
+     * @param startLocation the first reference coordinate of the region (inclusive)
+     * @param stopLocation  the last reference coordinate of the region (inclusive)
+     * @return an array with the coverage of each position from startLocation to stopLocation
+     */
+    public static int [] getCoverageDistributionOfRead(GATKSAMRecord read, int startLocation, int stopLocation) {
+        int [] coverage = new int[stopLocation - startLocation + 1];
+        int refLocation = read.getSoftStart();
+        for (CigarElement cigarElement : read.getCigar().getCigarElements()) {
+            switch (cigarElement.getOperator()) {
+                case S:
+                case M:
+                case EQ:
+                case N:
+                case X:
+                case D:
+                    for (int i = 0; i < cigarElement.getLength(); i++) {
+                        if (refLocation >= startLocation && refLocation <= stopLocation) {
+                            coverage[refLocation - startLocation]++;
+                        }
+                        refLocation++;
+                    }
+                    break;
+
+                case P:
+                case I:
+                case H:
+                    break;
+            }
+
+            if (refLocation > stopLocation)
+                break;
+        }
+        return coverage;
+    }
+
+    /**
+     * Makes association maps for the reads and loci coverage as described below :
+     *
+     *  - First: locusToReadMap -- a HashMap that describes for each locus, which reads contribute to its coverage.
+     *    Note: Locus is in reference coordinates.
+     *    Example: Locus => {read1, read2, ..., readN}
+     *
+     *  - Second: readToLocusMap -- a HashMap that describes for each read what loci it contributes to the coverage.
+     *    Note: Locus is a boolean array, indexed from 0 (= startLocation) to N (= stopLocation), with value==true meaning it contributes to the coverage.
+     *    Example: Read => {true, true, false, ... false}
+     *
+     * @param readList      the list of reads to generate the association mappings
+     * @param startLocation the first reference coordinate of the region (inclusive)
+     * @param stopLocation  the last reference coordinate of the region (inclusive)
+     * @return the two hashmaps described above
+     */
+    public static Pair<HashMap<Integer, HashSet<GATKSAMRecord>> , HashMap<GATKSAMRecord, Boolean[]>> getBothReadToLociMappings (List<GATKSAMRecord> readList, int startLocation, int stopLocation) {
+        int arraySize = stopLocation - startLocation + 1;
+
+        HashMap<Integer, HashSet<GATKSAMRecord>> locusToReadMap = new HashMap<Integer, HashSet<GATKSAMRecord>>(2*(stopLocation - startLocation + 1), 0.5f);
+        HashMap<GATKSAMRecord, Boolean[]> readToLocusMap = new HashMap<GATKSAMRecord, Boolean[]>(2*readList.size(), 0.5f);
+
+        for (int i = startLocation; i <= stopLocation; i++)
+            locusToReadMap.put(i, new HashSet<GATKSAMRecord>()); // Initialize the locusToRead map with empty lists
+
+        for (GATKSAMRecord read : readList) {
+            readToLocusMap.put(read, new Boolean[arraySize]);       // Initialize the readToLocus map with empty arrays
+
+            int [] readCoverage = getCoverageDistributionOfRead(read, startLocation, stopLocation);
+
+            for (int i = 0; i < readCoverage.length; i++) {
+                int refLocation = i + startLocation;
+                if (readCoverage[i] > 0) {
+                    // Update the hash for this locus
+                    HashSet<GATKSAMRecord> readSet = locusToReadMap.get(refLocation);
+                    readSet.add(read);
+
+                    // Add this locus to the read hash
+                    readToLocusMap.get(read)[refLocation - startLocation] = true;
+                }
+                else
+                    // Update the boolean array with a 'no coverage' from this read to this locus
+                    readToLocusMap.get(read)[refLocation-startLocation] = false;
+            }
+        }
+        return new Pair<HashMap<Integer, HashSet<GATKSAMRecord>>, HashMap<GATKSAMRecord, Boolean[]>>(locusToReadMap, readToLocusMap);
+    }
+
+    /**
+     * Create random read qualities
+     *
+     * @param length the length of the read
+     * @return an array with randomized base qualities between 0 and 50
+     */
+    public static byte[] createRandomReadQuals(int length) {
+        Random random = Utils.getRandomGenerator();
+        byte[] quals = new byte[length];
+        for (int i = 0; i < length; i++)
+            quals[i] = (byte) random.nextInt(50);
+        return quals;
+    }
+
+    /**
+     * Create random read qualities
+     *
+     * @param length  the length of the read
+     * @param allowNs whether or not to allow N's in the read
+     * @return an array with randomized bases (A-N) with equal probability
+     */
+    public static byte[] createRandomReadBases(int length, boolean allowNs) {
+        Random random = Utils.getRandomGenerator();
+        int numberOfBases = allowNs ? 5 : 4;
+        byte[] bases = new byte[length];
+        for (int i = 0; i < length; i++) {
+            switch (random.nextInt(numberOfBases)) {
+                case 0:
+                    bases[i] = 'A';
+                    break;
+                case 1:
+                    bases[i] = 'C';
+                    break;
+                case 2:
+                    bases[i] = 'G';
+                    break;
+                case 3:
+                    bases[i] = 'T';
+                    break;
+                case 4:
+                    bases[i] = 'N';
+                    break;
+                default:
+                    throw new ReviewedGATKException("Something went wrong, this is just impossible");
+            }
+        }
+        return bases;
+    }
+
+    public static GATKSAMRecord createRandomRead(int length) {
+        return createRandomRead(length, true);
+    }
+
+    public static GATKSAMRecord createRandomRead(int length, boolean allowNs) {
+        byte[] quals = ReadUtils.createRandomReadQuals(length);
+        byte[] bbases = ReadUtils.createRandomReadBases(length, allowNs);
+        return ArtificialSAMUtils.createArtificialRead(bbases, quals, bbases.length + "M");
+    }
+
+
+    public static String prettyPrintSequenceRecords ( SAMSequenceDictionary sequenceDictionary ) {
+        String[] sequenceRecordNames = new String[sequenceDictionary.size()];
+        int sequenceRecordIndex = 0;
+        for (SAMSequenceRecord sequenceRecord : sequenceDictionary.getSequences())
+            sequenceRecordNames[sequenceRecordIndex++] = sequenceRecord.getSequenceName();
+        return Arrays.deepToString(sequenceRecordNames);
+    }
+
+    /**
+     * Calculates the reference coordinate for a read coordinate
+     *
+     * @param read   the read
+     * @param offset the base in the read (coordinate in the read)
+     * @return the reference coordinate correspondent to this base
+     */
+    public static long getReferenceCoordinateForReadCoordinate(GATKSAMRecord read, int offset) {
+        if (offset > read.getReadLength()) 
+            throw new ReviewedGATKException(String.format(OFFSET_OUT_OF_BOUNDS_EXCEPTION, offset, read.getReadLength()));
+
+        long location = read.getAlignmentStart();
+        Iterator<CigarElement> cigarElementIterator = read.getCigar().getCigarElements().iterator();
+        while (offset > 0 && cigarElementIterator.hasNext()) {
+            CigarElement cigarElement = cigarElementIterator.next();
+            long move = 0;
+            if (cigarElement.getOperator().consumesReferenceBases())  
+                move = (long) Math.min(cigarElement.getLength(), offset);
+            location += move;
+            offset -= move;
+        }
+        if (offset > 0 && !cigarElementIterator.hasNext()) 
+            throw new ReviewedGATKException(OFFSET_NOT_ZERO_EXCEPTION);
+
+        return location;
+    }
+
+    /**
+     * Creates a map with each event in the read (cigar operator) and the read coordinate where it happened.
+     *
+     * Example:
+     *  D -> 2, 34, 75
+     *  I -> 55
+     *  S -> 0, 101
+     *  H -> 101
+     *
+     * @param read the read
+     * @return a map with the properties described above. See example
+     */
+    public static Map<CigarOperator, ArrayList<Integer>> getCigarOperatorForAllBases (GATKSAMRecord read) {
+        Map<CigarOperator, ArrayList<Integer>> events = new HashMap<CigarOperator, ArrayList<Integer>>();
+
+        int position = 0;
+        for (CigarElement cigarElement : read.getCigar().getCigarElements()) {
+            CigarOperator op = cigarElement.getOperator();
+            if (op.consumesReadBases()) {
+                ArrayList<Integer> list = events.get(op);
+                if (list == null) {
+                    list = new ArrayList<Integer>();
+                    events.put(op, list);
+                }
+                for (int i = position; i < cigarElement.getLength(); i++)
+                    list.add(position++);
+            }
+            else {
+                ArrayList<Integer> list = events.get(op);
+                if (list == null) {
+                    list = new ArrayList<Integer>();
+                    events.put(op, list);
+                }
+                list.add(position);
+            }
+        }
+        return events;
+    }
+
+    /**
+     * Given a read, outputs the read bases in a string format
+     *
+     * @param read the read
+     * @return a string representation of the read bases
+     */
+    public static String convertReadBasesToString(GATKSAMRecord read) {
+        String bases = "";
+        for (byte b : read.getReadBases()) {
+            bases += (char) b;
+        }
+        return bases.toUpperCase();
+    }
+
+    /**
+     * Given a read, outputs the base qualities in a string format
+     *
+     * @param quals the read qualities
+     * @return a string representation of the base qualities
+     */
+    public static String convertReadQualToString(byte[] quals) {
+        String result = "";
+        for (byte b : quals) {
+            result += (char) (33 + b);
+        }
+        return result;
+    }
+
+    /**
+     * Given a read, outputs the base qualities in a string format
+     *
+     * @param read the read
+     * @return a string representation of the base qualities
+     */
+    public static String convertReadQualToString(GATKSAMRecord read) {
+        return convertReadQualToString(read.getBaseQualities());
+    }
+
+    /**
+     * Returns the reverse complement of the read bases
+     *
+     * @param bases the read bases
+     * @return the reverse complement of the read bases
+     */
+    public static String getBasesReverseComplement(byte[] bases) {
+        String reverse = "";
+        for (int i = bases.length-1; i >=0; i--) {
+            reverse += (char) BaseUtils.getComplement(bases[i]);
+        }
+        return reverse;
+    }
+
+    /**
+     * Returns the reverse complement of the read bases
+     *
+     * @param read the read
+     * @return the reverse complement of the read bases
+     */
+    public static String getBasesReverseComplement(GATKSAMRecord read) {
+        return getBasesReverseComplement(read.getReadBases());
+    }
+
+    /**
+     * Calculate the maximum read length from the given list of reads.
+     * @param reads list of reads
+     * @return      non-negative integer
+     */
+    @Ensures({"result >= 0"})
+    public static int getMaxReadLength( final List<GATKSAMRecord> reads ) {
+        if( reads == null ) { throw new IllegalArgumentException("Attempting to check a null list of reads."); }
+
+        int maxReadLength = 0;
+        for( final GATKSAMRecord read : reads ) {
+            maxReadLength = Math.max(maxReadLength, read.getReadLength());
+        }
+        return maxReadLength;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/SAMReaderBuilder.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/SAMReaderBuilder.java
new file mode 100644
index 0000000..6b72d0c
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/SAMReaderBuilder.java
@@ -0,0 +1,102 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import htsjdk.samtools.SamReader;
+import htsjdk.samtools.SamReaderFactory;
+import htsjdk.samtools.ValidationStringency;
+import org.broadinstitute.gatk.utils.io.ReferenceBacked;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.io.File;
+
+/**
+ * Allows the user to steadily accumulate information about what
+ * components go into a SAM file writer, ultimately using this
+ * information to create a SAM file writer on demand.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class SAMReaderBuilder implements ReferenceBacked {
+    /**
+     * To which file should output be written?
+     */
+    private File samFile = null;
+
+    /**
+     * The reference file for the samFile.
+     */
+    private File referenceFile = null;
+
+    /**
+     * What compression level should be used when building this file?
+     */
+    private ValidationStringency validationStringency = null;
+
+    /**
+     * Sets the handle of the sam file to which data should be written.
+     * @param samFile The SAM file into which data should flow.
+     */
+    public void setSAMFile( File samFile ) {
+        this.samFile = samFile;
+    }
+
+    @Override
+    public File getReferenceFile() {
+        return referenceFile;
+    }
+
+    @Override
+    public void setReferenceFile(final File referenceFile) {
+        this.referenceFile = referenceFile;
+    }
+
+    /**
+     * Sets the validation stringency to apply when reading this sam file.
+     * @param validationStringency Stringency to apply.  Must not be null.
+     */
+    public void setValidationStringency( ValidationStringency validationStringency ) {
+        this.validationStringency = validationStringency;
+    }
+
+    /**
+     * Create the SAM writer, given the constituent parts accrued.
+     * @return Newly minted SAM file writer.
+     */
+    public SamReader build() {
+        if( samFile == null )
+            throw new ReviewedGATKException( "Filename for output sam file must be supplied.");
+        if( validationStringency == null )
+            throw new ReviewedGATKException( "Header for output sam file must be supplied.");
+
+        return SamReaderFactory
+                .makeDefault()
+                .referenceSequence(this.getReferenceFile())
+                .validationStringency(validationStringency)
+                .open(samFile);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/SAMReaderID.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/SAMReaderID.java
new file mode 100644
index 0000000..1338f7c
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/SAMReaderID.java
@@ -0,0 +1,134 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import org.broadinstitute.gatk.utils.commandline.Tags;
+
+import java.io.File;
+
+/**
+ * Uniquely identifies a SAM file reader.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class SAMReaderID implements Comparable {
+    /**
+     * The SAM file at the heart of this reader.  SAMReaderID
+     * currently supports only file-based readers.
+     */
+    private final File samFile;
+
+    /**
+     * A list of tags associated with this BAM file.
+     */
+    private final Tags tags;
+
+    /**
+     * Creates an identifier for a SAM file based on read.
+     * @param samFile The source file for SAM data.
+     * @param tags tags to use when creating a reader ID.
+     */
+    public SAMReaderID(File samFile, Tags tags) {
+        this.samFile = samFile;
+        this.tags = tags;
+    }
+
+    /**
+     * Creates an identifier for a SAM file based on read.
+     * @param samFileName The source filename for SAM data.
+     * @param tags tags to use when creating a reader ID.
+     */
+    public SAMReaderID(String samFileName, Tags tags) {
+        this(new File(samFileName),tags);        
+    }
+
+    /**
+     * Gets the absolute pathname of this SAM file
+     * @return  The absolute pathname of this reader's SAM file,
+     *          or null if this reader has no associated SAM file
+     */
+    public String getSamFilePath() {
+        if ( samFile == null ) {
+            return null;
+        }
+
+        return samFile.getAbsolutePath();
+    }
+
+    /**
+     * Gets the SAM file at the heart of this reader.  SAMReaderID
+     * currently supports only file-based readers.
+     * @return the SAM file at the heart of this reader.
+     */
+    public File getSamFile() {
+        return samFile;
+    }
+
+    /**
+     * Gets the tags associated with the given BAM file.
+     * @return A collection of the tags associated with this file.
+     */
+    public Tags getTags() {
+        return tags;
+    }
+
+    /**
+     * Compare two IDs to see whether they're equal.
+     * @param other The other identifier.
+     * @return True iff the two readers point to the same file.
+     */
+    @Override
+    public boolean equals(Object other) {
+        if(other == null) return false;
+        if(!(other instanceof SAMReaderID)) return false;
+
+        SAMReaderID otherID = (SAMReaderID)other;
+        return this.getSamFilePath().equals(otherID.getSamFilePath());
+    }
+
+    /**
+     * Generate a hash code for this object.
+     * @return A hash code, based solely on the file name at this point.
+     */
+    @Override
+    public int hashCode() {
+        return samFile.getAbsolutePath().hashCode();
+    }
+
+    /**
+     * Best string representation for a SAM file reader is the path of the source file.
+     */
+    @Override
+    public String toString() {
+        return getSamFilePath();
+    }
+
+    @Override
+    public int compareTo(Object other) {
+        return this.samFile.getAbsolutePath().compareTo(((SAMReaderID)other).samFile.getAbsolutePath());
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/SimplifyingSAMFileWriter.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/SimplifyingSAMFileWriter.java
new file mode 100644
index 0000000..c431528
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/SimplifyingSAMFileWriter.java
@@ -0,0 +1,86 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMFileWriter;
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.util.ProgressLoggerInterface;
+
+/**
+ * XXX
+ */
+public class SimplifyingSAMFileWriter implements SAMFileWriter {
+    final SAMFileWriter dest;
+
+    public SimplifyingSAMFileWriter(final SAMFileWriter finalDestination) {
+        this.dest = finalDestination;
+    }
+
+    public void addAlignment( SAMRecord read ) {
+        if ( keepRead(read) ) {
+            dest.addAlignment(simplifyRead(read));
+
+        }
+    }
+
+    /**
+     * Retrieves the header to use when creating the new SAM file.
+     * @return header to use when creating the new SAM file.
+     */
+    public SAMFileHeader getFileHeader() {
+        return dest.getFileHeader();
+    }
+
+    /**
+     * @{inheritDoc}
+     */
+    public void close() {
+        dest.close();
+    }
+
+
+    public static final boolean keepRead(SAMRecord read) {
+        return ! excludeRead(read);
+    }
+
+    public static final boolean excludeRead(SAMRecord read) {
+        return read.getReadUnmappedFlag() || read.getReadFailsVendorQualityCheckFlag() || read.getDuplicateReadFlag() || read.getNotPrimaryAlignmentFlag();
+    }
+
+    public static final SAMRecord simplifyRead(SAMRecord read) {
+        // the only attribute we keep is the RG
+        Object rg = read.getAttribute("RG");
+        read.clearAttributes();
+        read.setAttribute("RG", rg);
+        return read;
+    }
+
+    @Override
+    public void setProgressLogger(final ProgressLoggerInterface logger) {
+        dest.setProgressLogger(logger);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/package-info.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/package-info.java
new file mode 100644
index 0000000..c41b5bf
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/sam/package-info.java
@@ -0,0 +1,26 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/GlobalEdgeGreedySWPairwiseAlignment.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/GlobalEdgeGreedySWPairwiseAlignment.java
new file mode 100644
index 0000000..66b8970
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/GlobalEdgeGreedySWPairwiseAlignment.java
@@ -0,0 +1,208 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.smithwaterman;
+
+import htsjdk.samtools.Cigar;
+import htsjdk.samtools.CigarElement;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.sam.AlignmentUtils;
+
+import java.util.*;
+
+/**
+ * Pairwise discrete Smith-Waterman alignment with an edge greedy implementation
+ *
+ * ************************************************************************
+ * ****                    IMPORTANT NOTE:                             ****
+ * ****  This class assumes that all bytes come from UPPERCASED chars! ****
+ * ************************************************************************
+ *
+ * User: ebanks
+ */
+public final class GlobalEdgeGreedySWPairwiseAlignment extends SWPairwiseAlignment {
+
+    private final static boolean DEBUG_MODE = false;
+
+    /**
+     * Create a new greedy SW pairwise aligner
+     *
+     * @param reference the reference sequence we want to align
+     * @param alternate the alternate sequence we want to align
+     * @param parameters the SW parameters to use
+     */
+    public GlobalEdgeGreedySWPairwiseAlignment(final byte[] reference, final byte[] alternate, final Parameters parameters) {
+        super(reference, alternate, parameters);
+    }
+
+    /**
+     * Create a new SW pairwise aligner
+     *
+     * After creating the object the two sequences are aligned with an internal call to align(seq1, seq2)
+     *
+     * @param reference the reference sequence we want to align
+     * @param alternate the alternate sequence we want to align
+     * @param namedParameters the named parameter set to get our parameters from
+     */
+    public GlobalEdgeGreedySWPairwiseAlignment(final byte[] reference, final byte[] alternate, final SWParameterSet namedParameters) {
+        this(reference, alternate, namedParameters.parameters);
+    }
+
+    /**
+     * @see #GlobalEdgeGreedySWPairwiseAlignment(byte[], byte[], SWParameterSet) with original default parameters
+     */
+    public GlobalEdgeGreedySWPairwiseAlignment(byte[] reference, byte[] alternate) {
+        this(reference, alternate, SWParameterSet.ORIGINAL_DEFAULT);
+    }
+
+    /**
+     * Aligns the alternate sequence to the reference sequence
+     *
+     * @param reference  ref sequence
+     * @param alternate  alt sequence
+     */
+    @Override
+    protected void align(final byte[] reference, final byte[] alternate) {
+        if ( reference == null || reference.length == 0 )
+            throw new IllegalArgumentException("Non-null, non-empty reference sequences are required for the Smith-Waterman calculation");
+        if ( alternate == null || alternate.length == 0 )
+            throw new IllegalArgumentException("Non-null, non-empty alternate sequences are required for the Smith-Waterman calculation");
+
+        final int forwardEdgeMatch = Utils.longestCommonPrefix(reference, alternate, Integer.MAX_VALUE);
+
+        // edge case: one sequence is a strict prefix of the other
+        if ( forwardEdgeMatch == reference.length || forwardEdgeMatch == alternate.length ) {
+            alignmentResult = new SWPairwiseAlignmentResult(makeCigarForStrictPrefixAndSuffix(reference, alternate, forwardEdgeMatch, 0), 0);
+            return;
+        }
+
+        int reverseEdgeMatch = Utils.longestCommonSuffix(reference, alternate, Integer.MAX_VALUE);
+
+        // edge case: one sequence is a strict suffix of the other
+        if ( reverseEdgeMatch == reference.length || reverseEdgeMatch == alternate.length ) {
+            alignmentResult = new SWPairwiseAlignmentResult(makeCigarForStrictPrefixAndSuffix(reference, alternate, 0, reverseEdgeMatch), 0);
+            return;
+        }
+
+        final int sizeOfRefToAlign = reference.length - forwardEdgeMatch - reverseEdgeMatch;
+        final int sizeOfAltToAlign = alternate.length - forwardEdgeMatch - reverseEdgeMatch;
+
+        // edge case: one sequence is a strict subset of the other accounting for both prefix and suffix
+        final int minSizeToAlign = Math.min(sizeOfRefToAlign, sizeOfAltToAlign);
+        if ( minSizeToAlign < 0 )
+            reverseEdgeMatch += minSizeToAlign;
+        if ( sizeOfRefToAlign <= 0 || sizeOfAltToAlign <= 0 ) {
+            alignmentResult = new SWPairwiseAlignmentResult(makeCigarForStrictPrefixAndSuffix(reference, alternate, forwardEdgeMatch, reverseEdgeMatch), 0);
+            return;
+        }
+
+        final byte[] refToAlign = Utils.trimArray(reference, forwardEdgeMatch, reverseEdgeMatch);
+        final byte[] altToAlign = Utils.trimArray(alternate, forwardEdgeMatch, reverseEdgeMatch);
+
+        final int[][] sw = new int[(sizeOfRefToAlign+1)][(sizeOfAltToAlign+1)];
+        if ( keepScoringMatrix ) SW = sw;
+        final int[][] btrack = new int[(sizeOfRefToAlign+1)][(sizeOfAltToAlign+1)];
+
+        calculateMatrix(refToAlign, altToAlign, sw, btrack, OVERHANG_STRATEGY.INDEL);
+
+        if ( DEBUG_MODE ) {
+            System.out.println(new String(refToAlign) + " vs. " + new String(altToAlign));
+            debugMatrix(sw);
+            System.out.println("----");
+            debugMatrix(btrack);
+            System.out.println();
+        }
+
+        alignmentResult = calculateCigar(forwardEdgeMatch, reverseEdgeMatch, sw, btrack);
+    }
+
+
+    private void debugMatrix(final int[][] matrix) {
+        for ( int i = 0; i < matrix.length; i++ ) {
+            int [] cur = matrix[i];
+            for ( int j = 0; j < cur.length; j++ )
+                System.out.print(cur[j] + " ");
+            System.out.println();
+        }
+    }
+
+    /**
+     * Creates a CIGAR for the case where the prefix/suffix match combination encompasses an entire sequence
+     *
+     * @param reference            the reference sequence
+     * @param alternate            the alternate sequence
+     * @param matchingPrefix       the prefix match size
+     * @param matchingSuffix       the suffix match size
+     * @return non-null CIGAR
+     */
+    private Cigar makeCigarForStrictPrefixAndSuffix(final byte[] reference, final byte[] alternate, final int matchingPrefix, final int matchingSuffix) {
+
+        final List<CigarElement> result = new ArrayList<CigarElement>();
+
+        // edge case: no D or I element
+        if ( reference.length == alternate.length ) {
+            result.add(makeElement(State.MATCH, matchingPrefix + matchingSuffix));
+        } else {
+            // add the first M element
+            if ( matchingPrefix > 0 )
+                result.add(makeElement(State.MATCH, matchingPrefix));
+
+            // add the D or I element
+            if ( alternate.length > reference.length )
+                result.add(makeElement(State.INSERTION, alternate.length - reference.length));
+            else // if ( reference.length > alternate.length )
+                result.add(makeElement(State.DELETION, reference.length - alternate.length));
+
+            // add the last M element
+            if ( matchingSuffix > 0 )
+                result.add(makeElement(State.MATCH, matchingSuffix));
+        }
+
+        return new Cigar(result);
+    }
+
+    /**
+     * Calculates the CIGAR for the alignment from the back track matrix
+     *
+     * @param matchingPrefix       the prefix match size
+     * @param matchingSuffix       the suffix match size
+     * @param sw                   the Smith-Waterman matrix to use
+     * @param btrack               the back track matrix to use
+     * @return non-null SWPairwiseAlignmentResult object
+     */
+    protected SWPairwiseAlignmentResult calculateCigar(final int matchingPrefix, final int matchingSuffix,
+                                                       final int[][] sw, final int[][] btrack) {
+
+        final SWPairwiseAlignmentResult SW_result = calculateCigar(sw, btrack, OVERHANG_STRATEGY.INDEL);
+
+        final LinkedList<CigarElement> lce = new LinkedList<CigarElement>(SW_result.cigar.getCigarElements());
+        if ( matchingPrefix > 0 )
+            lce.addFirst(makeElement(State.MATCH, matchingPrefix));
+        if ( matchingSuffix > 0 )
+            lce.addLast(makeElement(State.MATCH, matchingSuffix));
+
+        return new SWPairwiseAlignmentResult(AlignmentUtils.consolidateCigar(new Cigar(lce)), 0);
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/Parameters.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/Parameters.java
new file mode 100644
index 0000000..f423a1a
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/Parameters.java
@@ -0,0 +1,62 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.smithwaterman;
+
+/**
+ * Holds the core Smith-Waterman alignment parameters of
+ *
+ * match value, and mismatch, gap open and gap extension penalties
+ *
+ * User: depristo
+ * Date: 4/11/13
+ * Time: 12:03 PM
+ */
+public final class Parameters {
+    public final int w_match;
+    public final int w_mismatch;
+    public final int w_open;
+    public final int w_extend;
+
+    /**
+     * Create a new set of SW parameters
+     * @param w_match the match score
+     * @param w_mismatch the mismatch penalty
+     * @param w_open the gap open penalty
+     * @param w_extend the gap extension penalty
+
+     */
+    public Parameters(final int w_match, final int w_mismatch, final int w_open, final int w_extend) {
+        if ( w_mismatch > 0 ) throw new IllegalArgumentException("w_mismatch must be <= 0 but got " + w_mismatch);
+        if ( w_open> 0 ) throw new IllegalArgumentException("w_open must be <= 0 but got " + w_open);
+        if ( w_extend> 0 ) throw new IllegalArgumentException("w_extend must be <= 0 but got " + w_extend);
+
+        this.w_match = w_match;
+        this.w_mismatch = w_mismatch;
+        this.w_open = w_open;
+        this.w_extend = w_extend;
+    }
+
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/SWPairwiseAlignment.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/SWPairwiseAlignment.java
new file mode 100644
index 0000000..b750b35
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/SWPairwiseAlignment.java
@@ -0,0 +1,599 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.smithwaterman;
+
+import htsjdk.samtools.Cigar;
+import htsjdk.samtools.CigarElement;
+import htsjdk.samtools.CigarOperator;
+import org.broadinstitute.gatk.utils.exceptions.GATKException;
+import org.broadinstitute.gatk.utils.sam.AlignmentUtils;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Pairwise discrete smith-waterman alignment
+ *
+ * ************************************************************************
+ * ****                    IMPORTANT NOTE:                             ****
+ * ****  This class assumes that all bytes come from UPPERCASED chars! ****
+ * ************************************************************************
+ *
+ * User: asivache
+ * Date: Mar 23, 2009
+ * Time: 1:54:54 PM
+ */
+public class SWPairwiseAlignment implements SmithWaterman {
+
+    protected SWPairwiseAlignmentResult alignmentResult;
+
+    protected final Parameters parameters;
+
+    /**
+     * The state of a trace step through the matrix
+     */
+    protected enum State {
+        MATCH,
+        INSERTION,
+        DELETION,
+        CLIP
+    }
+
+    /**
+     * What strategy should we use when the best path does not start/end at the corners of the matrix?
+     */
+    public enum OVERHANG_STRATEGY {
+        /*
+         * Add softclips for the overhangs
+         */
+        SOFTCLIP,
+
+        /*
+         * Treat the overhangs as proper insertions/deletions
+         */
+        INDEL,
+
+        /*
+         * Treat the overhangs as proper insertions/deletions for leading (but not trailing) overhangs.
+         * This is useful e.g. when we want to merge dangling tails in an assembly graph: because we don't
+         * expect the dangling tail to reach the end of the reference path we are okay ignoring trailing
+         * deletions - but leading indels are still very much relevant.
+         */
+        LEADING_INDEL,
+
+        /*
+         * Just ignore the overhangs
+         */
+        IGNORE
+    }
+
+    protected static boolean cutoff = false;
+
+    protected OVERHANG_STRATEGY overhang_strategy = OVERHANG_STRATEGY.SOFTCLIP;
+
+    /**
+     * The SW scoring matrix, stored for debugging purposes if keepScoringMatrix is true
+     */
+    protected int[][] SW = null;
+
+    /**
+     * Only for testing purposes in the SWPairwiseAlignmentMain function
+     * set to true to keep SW scoring matrix after align call
+     */
+    protected static boolean keepScoringMatrix = false;
+
+    /**
+     * Create a new SW pairwise aligner.
+     *
+     * @deprecated in favor of constructors using the Parameter or ParameterSet class
+     */
+    @Deprecated
+    public SWPairwiseAlignment(byte[] seq1, byte[] seq2, int match, int mismatch, int open, int extend ) {
+        this(seq1, seq2, new Parameters(match, mismatch, open, extend));
+    }
+
+    /**
+     * Create a new SW pairwise aligner
+     *
+     * After creating the object the two sequences are aligned with an internal call to align(seq1, seq2)
+     *
+     * @param seq1 the first sequence we want to align
+     * @param seq2 the second sequence we want to align
+     * @param parameters the SW parameters to use
+     */
+    public SWPairwiseAlignment(byte[] seq1, byte[] seq2, Parameters parameters) {
+        this(parameters);
+        align(seq1,seq2);
+    }
+
+    /**
+     * Create a new SW pairwise aligner
+     *
+     * After creating the object the two sequences are aligned with an internal call to align(seq1, seq2)
+     *
+     * @param seq1 the first sequence we want to align
+     * @param seq2 the second sequence we want to align
+     * @param parameters the SW parameters to use
+     * @param strategy   the overhang strategy to use
+     */
+    public SWPairwiseAlignment(final byte[] seq1, final byte[] seq2, final SWParameterSet parameters, final OVERHANG_STRATEGY strategy) {
+        this(parameters.parameters);
+        overhang_strategy = strategy;
+        align(seq1, seq2);
+    }
+
+    /**
+     * Create a new SW pairwise aligner, without actually doing any alignment yet
+     *
+     * @param parameters the SW parameters to use
+     */
+    protected SWPairwiseAlignment(final Parameters parameters) {
+        this.parameters = parameters;
+    }
+
+    /**
+     * Create a new SW pairwise aligner
+     *
+     * After creating the object the two sequences are aligned with an internal call to align(seq1, seq2)
+     *
+     * @param seq1 the first sequence we want to align
+     * @param seq2 the second sequence we want to align
+     * @param namedParameters the named parameter set to get our parameters from
+     */
+    public SWPairwiseAlignment(byte[] seq1, byte[] seq2, SWParameterSet namedParameters) {
+        this(seq1, seq2, namedParameters.parameters);
+    }
+
+    public SWPairwiseAlignment(byte[] seq1, byte[] seq2) {
+        this(seq1,seq2,SWParameterSet.ORIGINAL_DEFAULT);
+    }
+
+    @Override
+    public Cigar getCigar() { return alignmentResult.cigar ; }
+
+    @Override
+    public int getAlignmentStart2wrt1() { return alignmentResult.alignment_offset; }
+
+    /**
+     * Aligns the alternate sequence to the reference sequence
+     *
+     * @param reference  ref sequence
+     * @param alternate  alt sequence
+     */
+    protected void align(final byte[] reference, final byte[] alternate) {
+        if ( reference == null || reference.length == 0 || alternate == null || alternate.length == 0 )
+            throw new IllegalArgumentException("Non-null, non-empty sequences are required for the Smith-Waterman calculation");
+
+        final int n = reference.length+1;
+        final int m = alternate.length+1;
+        int[][] sw = new int[n][m];
+        if ( keepScoringMatrix ) SW = sw;
+        int[][] btrack=new int[n][m];
+
+        calculateMatrix(reference, alternate, sw, btrack);
+        alignmentResult = calculateCigar(sw, btrack, overhang_strategy); // length of the segment (continuous matches, insertions or deletions)
+    }
+
+    /**
+     * Calculates the SW matrices for the given sequences
+     *
+     * @param reference  ref sequence
+     * @param alternate  alt sequence
+     * @param sw         the Smith-Waterman matrix to populate
+     * @param btrack     the back track matrix to populate
+     */
+    protected void calculateMatrix(final byte[] reference, final byte[] alternate, final int[][] sw, final int[][] btrack) {
+        calculateMatrix(reference, alternate, sw, btrack, overhang_strategy);
+    }
+
+    /**
+     * Calculates the SW matrices for the given sequences
+     *
+     * @param reference  ref sequence
+     * @param alternate  alt sequence
+     * @param sw         the Smith-Waterman matrix to populate
+     * @param btrack     the back track matrix to populate
+     * @param overhang_strategy    the strategy to use for dealing with overhangs
+     */
+    protected void calculateMatrix(final byte[] reference, final byte[] alternate, final int[][] sw, final int[][] btrack, final OVERHANG_STRATEGY overhang_strategy) {
+        if ( reference.length == 0 || alternate.length == 0 )
+            throw new IllegalArgumentException("Non-null, non-empty sequences are required for the Smith-Waterman calculation");
+
+        final int ncol = sw[0].length;//alternate.length+1; formerly m
+        final int nrow = sw.length;// reference.length+1; formerly n
+
+        final int MATRIX_MIN_CUTOFF;   // never let matrix elements drop below this cutoff
+        if ( cutoff ) MATRIX_MIN_CUTOFF = 0;
+        else MATRIX_MIN_CUTOFF = (int) -1e8;
+
+        int lowInitValue=Integer.MIN_VALUE/2;
+        final int[] best_gap_v = new int[ncol+1];
+        Arrays.fill(best_gap_v, lowInitValue);
+        final int[] gap_size_v = new int[ncol+1];
+        final int[] best_gap_h = new int[nrow+1];
+        Arrays.fill(best_gap_h,lowInitValue);
+        final int[] gap_size_h = new int[nrow+1];
+
+        // we need to initialize the SW matrix with gap penalties if we want to keep track of indels at the edges of alignments
+        if ( overhang_strategy == OVERHANG_STRATEGY.INDEL || overhang_strategy == OVERHANG_STRATEGY.LEADING_INDEL ) {
+            // initialize the first row
+            int[] topRow=sw[0];
+            topRow[1]=parameters.w_open;
+            int currentValue = parameters.w_open;
+            for ( int i = 2; i < topRow.length; i++ ) {
+                currentValue += parameters.w_extend;
+                topRow[i]=currentValue;
+            }
+            // initialize the first column
+            sw[1][0]=parameters.w_open;
+            currentValue = parameters.w_open;
+            for ( int i = 2; i < sw.length; i++ ) {
+                currentValue += parameters.w_extend;
+                sw[i][0]=currentValue;
+            }
+        }
+        // build smith-waterman matrix and keep backtrack info:
+        int[] curRow=sw[0];
+        for ( int i = 1; i <sw.length ; i++ ) {
+            final byte a_base = reference[i-1]; // letter in a at the current pos
+            final int[] lastRow=curRow;
+            curRow=sw[i];
+            final int[] curBackTrackRow=btrack[i];
+            for ( int j = 1; j < curRow.length; j++) {
+                final byte b_base = alternate[j-1]; // letter in b at the current pos
+                // in other words, step_diag = sw[i-1][j-1] + wd(a_base,b_base);
+                final int step_diag = lastRow[j-1] + wd(a_base,b_base);
+
+                // optimized "traversal" of all the matrix cells above the current one (i.e. traversing
+                // all 'step down' events that would end in the current cell. The optimized code
+                // does exactly the same thing as the commented out loop below. IMPORTANT:
+                // the optimization works ONLY for linear w(k)=wopen+(k-1)*wextend!!!!
+
+                // if a gap (length 1) was just opened above, this is the cost of arriving to the current cell:
+                int prev_gap = lastRow[j]+parameters.w_open;
+                best_gap_v[j] += parameters.w_extend; // for the gaps that were already opened earlier, extending them by 1 costs w_extend
+                 if (  prev_gap > best_gap_v[j]  ) {
+                    // opening a gap just before the current cell results in better score than extending by one
+                    // the best previously opened gap. This will hold for ALL cells below: since any gap
+                    // once opened always costs w_extend to extend by another base, we will always get a better score
+                    // by arriving to any cell below from the gap we just opened (prev_gap) rather than from the previous best gap
+                    best_gap_v[j] = prev_gap;
+                    gap_size_v[j] = 1; // remember that the best step-down gap from above has length 1 (we just opened it)
+                } else {
+                    // previous best gap is still the best, even after extension by another base, so we just record that extension:
+                    gap_size_v[j]++;
+                }
+
+                final int step_down = best_gap_v[j] ;
+                final int kd = gap_size_v[j];
+
+                // optimized "traversal" of all the matrix cells to the left of the current one (i.e. traversing
+                // all 'step right' events that would end in the current cell. The optimized code
+                // does exactly the same thing as the commented out loop below. IMPORTANT:
+                // the optimization works ONLY for linear w(k)=wopen+(k-1)*wextend!!!!
+
+                prev_gap =curRow[j-1]  + parameters.w_open; // what would it cost us to open length 1 gap just to the left from current cell
+                best_gap_h[i] += parameters.w_extend; // previous best gap would cost us that much if extended by another base
+                if ( prev_gap > best_gap_h[i] ) {
+                    // newly opened gap is better (score-wise) than any previous gap with the same row index i; since
+                    // gap penalty is linear with k, this new gap location is going to remain better than any previous ones
+                    best_gap_h[i] = prev_gap;
+                    gap_size_h[i] = 1;
+                } else {
+                    gap_size_h[i]++;
+                }
+
+                final int step_right = best_gap_h[i];
+                final int ki = gap_size_h[i];
+
+                //priority here will be step diagonal, step right, step down
+                final boolean diagHighestOrEqual = (step_diag >= step_down)
+                                                && (step_diag >= step_right);
+
+                if ( diagHighestOrEqual ) {
+                    curRow[j]=Math.max(MATRIX_MIN_CUTOFF,step_diag);
+                    curBackTrackRow[j]=0;
+                }
+                else if(step_right>=step_down) { //moving right is the highest
+                    curRow[j]=Math.max(MATRIX_MIN_CUTOFF,step_right);
+                    curBackTrackRow[j]=-ki; // negative = horizontal
+                }
+                else  {
+                    curRow[j]=Math.max(MATRIX_MIN_CUTOFF,step_down);
+                    curBackTrackRow[j]= kd; // positive=vertical
+                }
+            }
+        }
+    }
+
+    /*
+     * Class to store the result of calculating the CIGAR from the back track matrix
+     */
+    protected final class SWPairwiseAlignmentResult {
+        public final Cigar cigar;
+        public final int alignment_offset;
+        public SWPairwiseAlignmentResult(final Cigar cigar, final int alignment_offset) {
+            this.cigar = cigar;
+            this.alignment_offset = alignment_offset;
+        }
+    }
+
+    /**
+     * Calculates the CIGAR for the alignment from the back track matrix
+     *
+     * @param sw                   the Smith-Waterman matrix to use
+     * @param btrack               the back track matrix to use
+     * @param overhang_strategy    the strategy to use for dealing with overhangs
+     * @return non-null SWPairwiseAlignmentResult object
+     */
+    protected SWPairwiseAlignmentResult calculateCigar(final int[][] sw, final int[][] btrack, final OVERHANG_STRATEGY overhang_strategy) {
+        // p holds the position we start backtracking from; we will be assembling a cigar in the backwards order
+        int p1 = 0, p2 = 0;
+
+        int refLength = sw.length-1;
+        int altLength = sw[0].length-1;
+
+        int maxscore = Integer.MIN_VALUE; // sw scores are allowed to be negative
+        int segment_length = 0; // length of the segment (continuous matches, insertions or deletions)
+
+        // if we want to consider overhangs as legitimate operators, then just start from the corner of the matrix
+        if ( overhang_strategy == OVERHANG_STRATEGY.INDEL ) {
+            p1 = refLength;
+            p2 = altLength;
+        } else {
+            // look for the largest score on the rightmost column. we use >= combined with the traversal direction
+            // to ensure that if two scores are equal, the one closer to diagonal gets picked
+            //Note: this is not technically smith-waterman, as by only looking for max values on the right we are
+            //excluding high scoring local alignments
+            p2=altLength;
+
+            for(int i=1;i<sw.length;i++)  {
+               final int curScore = sw[i][altLength];
+               if (curScore >= maxscore ) {
+                    p1 = i;
+                    maxscore = curScore;
+               }
+            }
+            // now look for a larger score on the bottom-most row
+            if ( overhang_strategy != OVERHANG_STRATEGY.LEADING_INDEL ) {
+                final int[] bottomRow=sw[refLength];
+                for ( int j = 1 ; j < bottomRow.length; j++) {
+                    int curScore=bottomRow[j];
+                    // data_offset is the offset of [n][j]
+                    if ( curScore > maxscore ||
+                            (curScore == maxscore && Math.abs(refLength-j) < Math.abs(p1 - p2) ) ) {
+                        p1 = refLength;
+                        p2 = j ;
+                        maxscore = curScore;
+                        segment_length = altLength - j ; // end of sequence 2 is overhanging; we will just record it as 'M' segment
+                    }
+                }
+            }
+        }
+        final List<CigarElement> lce = new ArrayList<CigarElement>(5);
+        if ( segment_length > 0 && overhang_strategy == OVERHANG_STRATEGY.SOFTCLIP ) {
+            lce.add(makeElement(State.CLIP, segment_length));
+            segment_length = 0;
+        }
+
+        // we will be placing all insertions and deletions into sequence b, so the states are named w/regard
+        // to that sequence
+
+        State state = State.MATCH;
+        do {
+            int btr = btrack[p1][p2];
+            State new_state;
+            int step_length = 1;
+            if ( btr > 0 ) {
+                new_state = State.DELETION;
+                step_length = btr;
+            } else if ( btr < 0 ) {
+                new_state = State.INSERTION;
+                step_length = (-btr);
+            } else new_state = State.MATCH; // and step_length =1, already set above
+
+            // move to next best location in the sw matrix:
+            switch( new_state ) {
+                case MATCH:  p1--; p2--; break; // move back along the diag in the sw matrix
+                case INSERTION: p2 -= step_length; break; // move left
+                case DELETION:  p1 -= step_length; break; // move up
+            }
+
+            // now let's see if the state actually changed:
+            if ( new_state == state ) segment_length+=step_length;
+            else {
+                // state changed, lets emit previous segment, whatever it was (Insertion Deletion, or (Mis)Match).
+                lce.add(makeElement(state, segment_length));
+                segment_length = step_length;
+                state = new_state;
+            }
+        // next condition is equivalent to  while ( sw[p1][p2] != 0 ) (with modified p1 and/or p2:
+        } while ( p1 > 0 && p2 > 0 );
+
+        // post-process the last segment we are still keeping;
+        // NOTE: if reads "overhangs" the ref on the left (i.e. if p2>0) we are counting
+        // those extra bases sticking out of the ref into the first cigar element if DO_SOFTCLIP is false;
+        // otherwise they will be softclipped. For instance,
+        // if read length is 5 and alignment starts at offset -2 (i.e. read starts before the ref, and only
+        // last 3 bases of the read overlap with/align to the ref), the cigar will be still 5M if
+        // DO_SOFTCLIP is false or 2S3M if DO_SOFTCLIP is true.
+        // The consumers need to check for the alignment offset and deal with it properly.
+        final int alignment_offset;
+        if ( overhang_strategy == OVERHANG_STRATEGY.SOFTCLIP ) {
+            lce.add(makeElement(state, segment_length));
+            if ( p2 > 0 ) lce.add(makeElement(State.CLIP, p2));
+            alignment_offset = p1;
+        } else if ( overhang_strategy == OVERHANG_STRATEGY.IGNORE ) {
+            lce.add(makeElement(state, segment_length + p2));
+            alignment_offset = p1 - p2;
+        } else {  // overhang_strategy == OVERHANG_STRATEGY.INDEL || overhang_strategy == OVERHANG_STRATEGY.LEADING_INDEL
+
+            // take care of the actual alignment
+            lce.add(makeElement(state, segment_length));
+
+            // take care of overhangs at the beginning of the alignment
+            if ( p1 > 0 )
+                lce.add(makeElement(State.DELETION, p1));
+            else if ( p2 > 0 )
+                lce.add(makeElement(State.INSERTION, p2));
+
+            alignment_offset = 0;
+        }
+
+        Collections.reverse(lce);
+        return new SWPairwiseAlignmentResult(AlignmentUtils.consolidateCigar(new Cigar(lce)), alignment_offset);
+    }
+
+    protected CigarElement makeElement(final State state, final int length) {
+        CigarOperator op = null;
+        switch (state) {
+            case MATCH: op = CigarOperator.M; break;
+            case INSERTION: op = CigarOperator.I; break;
+            case DELETION: op = CigarOperator.D; break;
+            case CLIP: op = CigarOperator.S; break;
+        }
+        return new CigarElement(length, op);
+    }
+
+
+    private int wd(final byte x, final byte y) {
+        return (x == y ? parameters.w_match : parameters.w_mismatch);
+    }
+
+    public void printAlignment(byte[] ref, byte[] read) {
+        printAlignment(ref,read,100);
+    }
+    
+    public void printAlignment(byte[] ref, byte[] read, int width) {
+        StringBuilder bread = new StringBuilder();
+        StringBuilder bref = new StringBuilder();
+        StringBuilder match = new StringBuilder();
+
+        int i = 0;
+        int j = 0;
+
+        final int offset = getAlignmentStart2wrt1();
+
+        Cigar cigar = getCigar();
+
+        if ( overhang_strategy != OVERHANG_STRATEGY.SOFTCLIP ) {
+
+            // we need to go through all the hassle below only if we do not do softclipping;
+            // otherwise offset is never negative
+            if ( offset < 0 ) {
+                for (  ; j < (-offset) ; j++ ) {
+                    bread.append((char)read[j]);
+                    bref.append(' ');
+                    match.append(' ');
+                }
+                // at negative offsets, our cigar's first element carries overhanging bases
+                // that we have just printed above. Tweak the first element to
+                // exclude those bases. Here we create a new list of cigar elements, so the original
+                // list/original cigar are unchanged (they are unmodifiable anyway!)
+
+                List<CigarElement> tweaked = new ArrayList<CigarElement>();
+                tweaked.addAll(cigar.getCigarElements());
+                tweaked.set(0,new CigarElement(cigar.getCigarElement(0).getLength()+offset,
+                        cigar.getCigarElement(0).getOperator()));
+                cigar = new Cigar(tweaked);
+            }
+        }
+
+        if ( offset > 0 ) { // note: the way this implementation works, cigar will ever start from S *only* if read starts before the ref, i.e. offset = 0
+            for (  ; i < getAlignmentStart2wrt1() ; i++ ) {
+                bref.append((char)ref[i]);
+                bread.append(' ');
+                match.append(' ');
+            }
+        }
+        
+        for ( CigarElement e : cigar.getCigarElements() ) {
+            switch (e.getOperator()) {
+                case M :
+                    for ( int z = 0 ; z < e.getLength() ; z++, i++, j++  ) {
+                        bref.append((i<ref.length)?(char)ref[i]:' ');
+                        bread.append((j < read.length)?(char)read[j]:' ');
+                        match.append( ( i<ref.length && j < read.length ) ? (ref[i] == read[j] ? '.':'*' ) : ' ' );
+                    }
+                    break;
+                case I :
+                    for ( int z = 0 ; z < e.getLength(); z++, j++ ) {
+                        bref.append('-');
+                        bread.append((char)read[j]);
+                        match.append('I');
+                    }
+                    break;
+                case S :
+                    for ( int z = 0 ; z < e.getLength(); z++, j++ ) {
+                        bref.append(' ');
+                        bread.append((char)read[j]);
+                        match.append('S');
+                    }
+                    break;
+                case D:
+                    for ( int z = 0 ; z < e.getLength(); z++ , i++ ) {
+                        bref.append((char)ref[i]);
+                        bread.append('-');
+                        match.append('D');
+                    }
+                    break;
+                default:
+                    throw new GATKException("Unexpected Cigar element:" + e.getOperator());
+            }
+        }
+        for ( ; i < ref.length; i++ ) bref.append((char)ref[i]);
+        for ( ; j < read.length; j++ ) bread.append((char)read[j]);
+
+        int pos = 0 ;
+        int maxlength = Math.max(match.length(),Math.max(bread.length(),bref.length()));
+        while ( pos < maxlength ) {
+            print_cautiously(match,pos,width);
+            print_cautiously(bread,pos,width);
+            print_cautiously(bref,pos,width);
+            System.out.println();
+            pos += width;
+        }
+    }
+
+    /** String builder's substring is extremely stupid: instead of trimming and/or returning an empty
+     * string when one end/both ends of the interval are out of range, it crashes with an
+     * exception. This utility function simply prints the substring if the interval is within the index range
+     * or trims accordingly if it is not.
+     * @param s
+     * @param start
+     * @param width
+     */
+    private static void print_cautiously(StringBuilder s, int start, int width) {
+        if ( start >= s.length() ) {
+            System.out.println();
+            return;
+        }
+        int end = Math.min(start+width,s.length());
+        System.out.println(s.substring(start,end));
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/SWPairwiseAlignmentMain.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/SWPairwiseAlignmentMain.java
new file mode 100644
index 0000000..40e45d9
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/SWPairwiseAlignmentMain.java
@@ -0,0 +1,221 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.smithwaterman;
+
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.collections.Pair;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Simple program to run SW performance test.
+ *
+ * // TODO -- should be replaced with Caliper before using again
+ *
+ * User: depristo
+ * Date: 2/28/13
+ * Time: 4:54 PM
+ * To change this template use File | Settings | File Templates.
+ */
+public class SWPairwiseAlignmentMain {
+    //    BELOW: main() method for testing; old implementations of the core methods are commented out below;
+//           uncomment everything through the end of the file if benchmarking of new vs old implementations is needed.
+
+    public static void main(String argv[]) {
+//        String ref="CACGAGCATATGTGTACATGAATTTGTATTGCACATGTGTTTAATGCGAACACGTGTCATGTGTATGTGTTCACATGCATGTGTGTCT";
+//        String read =   "GCATATGTTTACATGAATTTGTATTGCACATGTGTTTAATGCGAACACGTGTCATGTGTGTGTTCACATGCATGTG";
+
+        String ref = null;
+        String read = null;
+
+        Map<String,List<String>> args = processArgs(argv);
+
+        List<String> l = args.get("SEQ");
+        args.remove("SEQ");
+        if ( l == null ) {
+            System.err.println("SEQ argument is missing. Two input sequences must be provided");
+            System.exit(1);
+        }
+        if ( l.size() != 2 ) {
+            System.err.println("Two input sequences (SEQ arguments) must be provided. Found "+l.size()+" instead");
+            System.exit(1);
+        }
+
+        ref = l.get(0);
+        read = l.get(1);
+
+        Double m = extractSingleDoubleArg("MATCH",args);
+        Double mm = extractSingleDoubleArg("MISMATCH",args);
+        Double open = extractSingleDoubleArg("OPEN",args);
+        Double ext = extractSingleDoubleArg("EXTEND",args);
+
+        Boolean reverse = extractSingleBooleanArg("REVERSE",args);
+        if ( reverse != null && reverse.booleanValue() == true ) {
+            ref = Utils.reverse(ref);
+            read = Utils.reverse(read);
+        }
+
+        Boolean print_mat = extractSingleBooleanArg("PRINT_MATRIX",args);
+        Boolean cut = extractSingleBooleanArg("CUTOFF",args);
+        if ( cut != null ) SWPairwiseAlignment.cutoff = cut;
+
+        if ( args.size() != 0 ) {
+            System.err.println("Unknown argument on the command line: "+args.keySet().iterator().next());
+            System.exit(1);
+        }
+
+        final int w_match, w_mismatch, w_open, w_extend;
+
+        w_match = (m == null ? 30 : m.intValue());
+        w_mismatch = (mm == null ? -10 : mm.intValue());
+        w_open = (open == null ? -10 : open.intValue());
+        w_extend = (ext == null ? -2 : ext.intValue());
+
+
+        SWPairwiseAlignment.keepScoringMatrix = true;
+        SWPairwiseAlignment a = new SWPairwiseAlignment(ref.getBytes(),read.getBytes(),w_match,w_mismatch,w_open,w_extend);
+
+        System.out.println("start="+a.getAlignmentStart2wrt1()+", cigar="+a.getCigar()+
+                " length1="+ref.length()+" length2="+read.length());
+
+
+        System.out.println();
+        a.printAlignment(ref.getBytes(),read.getBytes());
+
+        System.out.println();
+        if ( print_mat != null && print_mat == true ) {
+            print(a.SW,ref.getBytes(),read.getBytes());
+        }
+    }
+
+    private static void print(final int[][] s, final byte[] a, final byte[] b) {
+        int n = a.length+1;
+        int m = b.length+1;
+        System.out.print("         ");
+        for ( int j = 1 ; j < m ; j++) System.out.printf(" %5c",(char)b[j-1]) ;
+        System.out.println();
+
+        for ( int i = 0 ; i < n ; i++) {
+            if ( i > 0 ) System.out.print((char)a[i-1]);
+            else System.out.print(' ');
+            System.out.print("  ");
+            for ( int j = 0; j < m ; j++ ) {
+                System.out.printf(" %5.1f",s[i][j]);
+            }
+            System.out.println();
+        }
+    }
+
+
+    static Pair<String,Integer> getArg(String prefix, String argv[], int i) {
+        String arg = null;
+        if ( argv[i].startsWith(prefix) ) {
+            arg = argv[i].substring(prefix.length());
+            if( arg.length() == 0 ) {
+                i++;
+                if ( i < argv.length ) arg = argv[i];
+                else {
+                    System.err.println("No value found after " + prefix + " argument tag");
+                    System.exit(1);
+                }
+            }
+            i++;
+        }
+        return new Pair<String,Integer>(arg,i);
+    }
+
+    static Map<String,List<String>> processArgs(String argv[]) {
+        Map<String,List<String>> args = new HashMap<String,List<String>>();
+
+        for ( int i = 0; i < argv.length ; i++ ) {
+            String arg = argv[i];
+            int pos = arg.indexOf('=');
+            if ( pos < 0 ) {
+                System.err.println("Argument "+arg+" is not of the form <ARG>=<VAL>");
+                System.exit(1);
+            }
+            String val = arg.substring(pos+1);
+            if ( val.length() == 0 ) {
+                // there was a space between '=' and the value
+                i++;
+                if ( i < argv.length ) val = argv[i];
+                else {
+                    System.err.println("No value found after " + arg + " argument tag");
+                    System.exit(1);
+                }
+            }
+            arg = arg.substring(0,pos);
+
+            List<String> l = args.get(arg);
+            if ( l == null ) {
+                l = new ArrayList<String>();
+                args.put(arg,l);
+            }
+            l.add(val);
+        }
+        return args;
+    }
+
+    static Double extractSingleDoubleArg(String argname, Map<String,List<String>> args) {
+        List<String> l = args.get(argname);
+        args.remove(argname);
+        if ( l == null ) return null;
+
+        if ( l.size() > 1 ) {
+            System.err.println("Only one "+argname+" argument is allowed");
+            System.exit(1);
+        }
+        double d=0;
+        try {
+            d = Double.parseDouble(l.get(0));
+        } catch ( NumberFormatException e) {
+            System.err.println("Can not parse value provided for "+argname+" argument ("+l.get(0)+")");
+            System.exit(1);
+        }
+        System.out.println("Argument "+argname+" set to "+d);
+        return new Double(d);
+    }
+
+
+    static Boolean extractSingleBooleanArg(String argname, Map<String,List<String>> args) {
+        List<String> l = args.get(argname);
+        args.remove(argname);
+        if ( l == null ) return null;
+
+        if ( l.size() > 1 ) {
+            System.err.println("Only one "+argname+" argument is allowed");
+            System.exit(1);
+        }
+        if ( l.get(0).equals("true") ) return Boolean.valueOf(true);
+        if ( l.get(0).equals("false") ) return Boolean.valueOf(false);
+        System.err.println("Can not parse value provided for "+argname+" argument ("+l.get(0)+"); true/false are allowed");
+        System.exit(1);
+        return Boolean.valueOf(false); // This value isn't used because it is preceded by System.exit(1)
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/SWParameterSet.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/SWParameterSet.java
new file mode 100644
index 0000000..a515273
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/SWParameterSet.java
@@ -0,0 +1,51 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.smithwaterman;
+
+/**
+ * Handy named collection of common Smith-waterman parameter sets
+ *
+ * User: depristo
+ * Date: 4/11/13
+ * Time: 12:02 PM
+ */
+public enum SWParameterSet {
+    // match=1, mismatch = -1/3, gap=-(1+k/3)
+    ORIGINAL_DEFAULT(new Parameters(3,-1,-4,-3)),
+
+    /**
+     * A standard set of values for NGS alignments
+     */
+    STANDARD_NGS(new Parameters(25, -50, -110, -6));
+
+    protected Parameters parameters;
+
+    SWParameterSet(final Parameters parameters) {
+        if ( parameters == null ) throw new IllegalArgumentException("parameters cannot be null");
+
+        this.parameters = parameters;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/SmithWaterman.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/SmithWaterman.java
new file mode 100644
index 0000000..a913c5a
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/smithwaterman/SmithWaterman.java
@@ -0,0 +1,57 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.smithwaterman;
+
+import htsjdk.samtools.Cigar;
+
+/**
+ * Generic interface for SmithWaterman calculations
+ *
+ * This interface allows clients to use a generic SmithWaterman variable, without propagating the specific
+ * implementation of SmithWaterman throughout their code:
+ *
+ * SmithWaterman sw = new SpecificSmithWatermanImplementation(ref, read, params)
+ * sw.getCigar()
+ * sw.getAlignmentStart2wrt1()
+ *
+ * User: depristo
+ * Date: 4/26/13
+ * Time: 8:24 AM
+ */
+public interface SmithWaterman {
+
+    /**
+     * Get the cigar string for the alignment of this SmithWaterman class
+     * @return a non-null cigar
+     */
+    public Cigar getCigar();
+
+    /**
+     * Get the starting position of the read sequence in the reference sequence
+     * @return a positive integer >= 0
+     */
+    public int getAlignmentStart2wrt1();
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/text/ListFileUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/text/ListFileUtils.java
new file mode 100644
index 0000000..91db56b
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/text/ListFileUtils.java
@@ -0,0 +1,344 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.text;
+
+import org.broadinstitute.gatk.utils.commandline.ParsingEngine;
+import org.broadinstitute.gatk.utils.commandline.RodBinding;
+import org.broadinstitute.gatk.utils.commandline.Tags;
+import org.broadinstitute.gatk.utils.sam.SAMReaderID;
+import org.broadinstitute.gatk.utils.refdata.tracks.FeatureManager;
+import org.broadinstitute.gatk.utils.refdata.utils.RMDTriplet;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.util.*;
+import java.util.regex.Pattern;
+
+/**
+ * A collection of convenience methods for working with list files.
+ */
+public class ListFileUtils {
+    /**
+     * Lines starting with this String in .list files are considered comments.
+     */
+    public static final String LIST_FILE_COMMENT_START = "#";        
+
+    /**
+     * Unpack the bam files to be processed, given a list of files.  That list of files can
+     * itself contain entries which are lists of other files to be read (note: you cannot have lists
+     * of lists of lists). Lines in .list files containing only whitespace or which begin with
+     * LIST_FILE_COMMENT_START are ignored.
+     *
+     * @param samFiles The sam files, in string format.
+     * @param parser Parser
+     * @return a flattened list of the bam files provided
+     */
+    public static List<SAMReaderID> unpackBAMFileList(final List<String> samFiles, final ParsingEngine parser) {
+        List<SAMReaderID> unpackedReads = new ArrayList<SAMReaderID>();
+        for( String inputFileName: samFiles ) {
+            Tags inputFileNameTags = parser.getTags(inputFileName);
+            inputFileName = expandFileName(inputFileName);
+            if (inputFileName.toLowerCase().endsWith(".list") ) {
+                try {
+                    for ( String fileName : new XReadLines(new File(inputFileName), true, LIST_FILE_COMMENT_START) ) {
+                        unpackedReads.add(new SAMReaderID(fileName,parser.getTags(inputFileName)));
+                    }
+                }
+                catch( FileNotFoundException ex ) {
+                    throw new UserException.CouldNotReadInputFile(new File(inputFileName), "Unable to find file while unpacking reads", ex);
+                }
+            }
+            else if(inputFileName.toLowerCase().endsWith(".bam") || inputFileName.toLowerCase().endsWith(".cram")) {
+                unpackedReads.add(new SAMReaderID(inputFileName,inputFileNameTags));
+            }
+            else if(inputFileName.endsWith("stdin")) {
+                unpackedReads.add(new SAMReaderID(inputFileName,inputFileNameTags));
+            }
+            else {
+                throw new UserException.CommandLineException(String.format("The GATK reads argument (-I, --input_file) supports only BAM/CRAM files with the .bam/.cram extension and lists of BAM/CRAM files " +
+                        "with the .list extension, but the file %s has neither extension.  Please ensure that your BAM/CRAM file or list " +
+                        "of BAM/CRAM files is in the correct format, update the extension, and try again.",inputFileName));
+            }
+        }
+        return unpackedReads;
+    }
+
+    /**
+     * Convert command-line argument representation of ROD bindings to something more easily understandable by the engine.
+     * @param RODBindings a text equivale
+     * @param parser Parser
+     * @return a list of expanded, bound RODs.
+     */
+    @Deprecated
+    @SuppressWarnings("unused") // TODO: Who is still using this? External walkers?
+    public static Collection<RMDTriplet> unpackRODBindingsOldStyle(final Collection<String> RODBindings, final ParsingEngine parser) {
+        // todo -- this is a strange home for this code.  Move into ROD system
+        Collection<RMDTriplet> rodBindings = new ArrayList<RMDTriplet>();
+
+        for (String fileName: RODBindings) {
+            final Tags tags = parser.getTags(fileName);
+            fileName = expandFileName(fileName);
+
+            List<String> positionalTags = tags.getPositionalTags();
+            if(positionalTags.size() != 2)
+                throw new UserException("Invalid syntax for -B (reference-ordered data) input flag.  " +
+                        "Please use the following syntax when providing reference-ordered " +
+                        "data: -B:<name>,<type> <filename>.");
+            // Assume that if tags are present, those tags are name and type.
+            // Name is always first, followed by type.
+            String name = positionalTags.get(0);
+            String type = positionalTags.get(1);
+
+            RMDTriplet.RMDStorageType storageType;
+            if(tags.getValue("storage") != null)
+                storageType = Enum.valueOf(RMDTriplet.RMDStorageType.class,tags.getValue("storage"));
+            else if(fileName.toLowerCase().endsWith("stdin"))
+                storageType = RMDTriplet.RMDStorageType.STREAM;
+            else
+                storageType = RMDTriplet.RMDStorageType.FILE;
+
+            rodBindings.add(new RMDTriplet(name,type,fileName,storageType,tags));
+        }
+
+        return rodBindings;
+    }
+
+    /**
+     * Convert command-line argument representation of ROD bindings to something more easily understandable by the engine.
+     * @param RODBindings a text equivale
+     * @param parser Parser
+     * @return a list of expanded, bound RODs.
+     */
+    @SuppressWarnings("unchecked")
+    public static Collection<RMDTriplet> unpackRODBindings(final Collection<RodBinding> RODBindings, @SuppressWarnings("unused") final ParsingEngine parser) {
+        // todo -- this is a strange home for this code.  Move into ROD system
+        Collection<RMDTriplet> rodBindings = new ArrayList<RMDTriplet>();
+        FeatureManager builderForValidation = new FeatureManager();
+
+        for (RodBinding rodBinding: RODBindings) {
+            String argValue = rodBinding.getSource();
+            String fileName = expandFileName(argValue);
+            String name = rodBinding.getName();
+            String type = rodBinding.getTribbleType();
+
+            RMDTriplet.RMDStorageType storageType;
+            if(rodBinding.getTags().getValue("storage") != null)
+                storageType = Enum.valueOf(RMDTriplet.RMDStorageType.class,rodBinding.getTags().getValue("storage"));
+            else if(fileName.toLowerCase().endsWith("stdin"))
+                storageType = RMDTriplet.RMDStorageType.STREAM;
+            else
+                storageType = RMDTriplet.RMDStorageType.FILE;
+
+            RMDTriplet triplet = new RMDTriplet(name,type,fileName,storageType,rodBinding.getTags());
+
+            // validate triplet type
+            FeatureManager.FeatureDescriptor descriptor = builderForValidation.getByTriplet(triplet);
+            if ( descriptor == null )
+                throw new UserException.UnknownTribbleType(rodBinding.getTribbleType(),
+                        String.format("Field %s had provided type %s but there's no such Tribble type.  The compatible types are: %n%s",
+                                rodBinding.getName(), rodBinding.getTribbleType(), builderForValidation.userFriendlyListOfAvailableFeatures(rodBinding.getType())));
+            if ( ! rodBinding.getType().isAssignableFrom(descriptor.getFeatureClass()) )
+                throw new UserException.BadArgumentValue(rodBinding.getName(),
+                        String.format("Field %s expects Features of type %s, but the input file produces Features of type %s. The compatible types are: %n%s",
+                                rodBinding.getName(), rodBinding.getType().getSimpleName(), descriptor.getSimpleFeatureName(),
+                                builderForValidation.userFriendlyListOfAvailableFeatures(rodBinding.getType())));
+
+
+            rodBindings.add(triplet);
+        }
+
+        return rodBindings;
+    }
+
+    /**
+     * Expand any special characters that appear in the filename.  Right now, '-' is expanded to
+     * '/dev/stdin' only, but in the future, special characters like '~' and '*' that are passed
+     * directly to the command line in some circumstances could be expanded as well.  Be careful
+     * when adding UNIX-isms.
+     * @param argument the text appearing on the command-line.
+     * @return An expanded string suitable for opening by Java/UNIX file handling utilities.
+     */
+    private static String expandFileName(String argument) {
+        if(argument.trim().equals("-"))
+            return "/dev/stdin";
+        return argument;
+    }
+
+    /**
+     * Returns a new set of values, containing a final set of values expanded from values
+     * <p/>
+     * Each element E of values can either be a literal string or a file ending in .list.
+     * For each E ending in .list we try to read a file named E from disk, and if possible
+     * all lines from that file are expanded into unique values.
+     *
+     * @param values Original values
+     * @return entries from values or the files listed in values
+     */
+    public static Set<String> unpackSet(Collection<String> values) {
+        if (values == null)
+            throw new NullPointerException("values cannot be null");
+        Set<String> unpackedValues = new LinkedHashSet<String>();
+        // Let's first go through the list and see if we were given any files.
+        // We'll add every entry in the file to our set, and treat the entries as
+        // if they had been specified on the command line.
+        for (String value : values) {
+            File file = new File(value);
+            if (value.toLowerCase().endsWith(".list") && file.exists()) {
+                try {
+                    unpackedValues.addAll(new XReadLines(file, true, LIST_FILE_COMMENT_START).readLines());
+                } catch (IOException e) {
+                    throw new UserException.CouldNotReadInputFile(file, e);
+                }
+            } else {
+                unpackedValues.add(value);
+            }
+        }
+        return unpackedValues;
+    }
+
+    /**
+     * Returns a new set of values including only values listed by filters
+     * <p/>
+     * Each element E of values can either be a literal string or a file.  For each E,
+     * we try to read a file named E from disk, and if possible all lines from that file are expanded
+     * into unique names.
+     * <p/>
+     * Filters may also be a file of filters.
+     *
+     * @param values     Values or files with values
+     * @param filters    Filters or files with filters
+     * @param exactMatch If true match filters exactly, otherwise use as both exact and regular expressions
+     * @return entries from values or the files listed in values, filtered by filters
+     */
+    public static Set<String> includeMatching(Collection<String> values, Collection<String> filters, boolean exactMatch) {
+        return includeMatching(values, IDENTITY_STRING_CONVERTER, filters, exactMatch);
+    }
+
+    /**
+     * Converts a type T to a String representation.
+     *
+     * @param <T> Type to convert to a String.
+     */
+    public static interface StringConverter<T> {
+        String convert(T value);
+    }
+
+    /**
+     * Returns a new set of values including only values matching filters
+     * <p/>
+     * Filters may also be a file of filters.
+     * <p/>
+     * The converter should convert T to a unique String for each value in the set.
+     *
+     * @param values     Values or files with values
+     * @param converter  Converts values to strings
+     * @param filters    Filters or files with filters
+     * @param exactMatch If true match filters exactly, otherwise use as both exact and regular expressions
+     * @return entries from values including only values matching filters
+     */
+    public static <T> Set<T> includeMatching(Collection<T> values, StringConverter<T> converter, Collection<String> filters, boolean exactMatch) {
+        if (values == null)
+            throw new NullPointerException("values cannot be null");
+        if (converter == null)
+            throw new NullPointerException("converter cannot be null");
+        if (filters == null)
+            throw new NullPointerException("filters cannot be null");
+
+        Set<String> unpackedFilters = unpackSet(filters);
+        Set<T> filteredValues = new LinkedHashSet<T>();
+        Collection<Pattern> patterns = null;
+        if (!exactMatch)
+            patterns = compilePatterns(unpackedFilters);
+        for (T value : values) {
+            String converted = converter.convert(value);
+            if (unpackedFilters.contains(converted)) {
+                filteredValues.add(value);
+            } else if (!exactMatch) {
+                for (Pattern pattern : patterns)
+                    if (pattern.matcher(converted).find())
+                        filteredValues.add(value);
+            }
+        }
+        return filteredValues;
+    }
+    
+    /**
+     * Returns a new set of values excluding any values matching filters.
+     * <p/>
+     * Filters may also be a file of filters.
+     * <p/>
+     * The converter should convert T to a unique String for each value in the set.
+     *
+     * @param values     Values or files with values
+     * @param converter  Converts values to strings
+     * @param filters    Filters or files with filters
+     * @param exactMatch If true match filters exactly, otherwise use as both exact and regular expressions
+     * @return entries from values exluding any values matching filters
+     */
+    public static <T> Set<T> excludeMatching(Collection<T> values, StringConverter<T> converter, Collection<String> filters, boolean exactMatch) {
+        if (values == null)
+            throw new NullPointerException("values cannot be null");
+        if (converter == null)
+            throw new NullPointerException("converter cannot be null");
+        if (filters == null)
+            throw new NullPointerException("filters cannot be null");
+
+        Set<String> unpackedFilters = unpackSet(filters);
+        Set<T> filteredValues = new LinkedHashSet<T>();
+        filteredValues.addAll(values);
+        Collection<Pattern> patterns = null;
+        if (!exactMatch)
+            patterns = compilePatterns(unpackedFilters);
+        for (T value : values) {
+            String converted = converter.convert(value);
+            if (unpackedFilters.contains(converted)) {
+                filteredValues.remove(value);
+            } else if (!exactMatch) {
+                for (Pattern pattern : patterns)
+                    if (pattern.matcher(converted).find())
+                        filteredValues.remove(value);
+            }
+        }
+        return filteredValues;
+    }
+
+    private static Collection<Pattern> compilePatterns(Collection<String> filters) {
+        Collection<Pattern> patterns = new ArrayList<Pattern>();
+        for (String filter: filters) {
+            patterns.add(Pattern.compile(filter));
+        }
+        return patterns;
+    }
+
+    protected static final StringConverter<String> IDENTITY_STRING_CONVERTER = new StringConverter<String>() {
+        @Override
+        public String convert(String value) {
+            return value;
+        }
+    };
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/text/TextFormattingUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/text/TextFormattingUtils.java
new file mode 100644
index 0000000..044a657
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/text/TextFormattingUtils.java
@@ -0,0 +1,182 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.text;
+
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.io.Resource;
+
+import java.io.IOException;
+import java.io.StringReader;
+import java.util.*;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * Common utilities for dealing with text formatting.
+ *
+ * @author mhanna
+ * @version 0.1
+ */
+public class TextFormattingUtils {
+    /**
+     * our log, which we want to capture anything from this class
+     */
+    private static Logger logger = Logger.getLogger(TextFormattingUtils.class);
+
+    /**
+     * The contents of the GATK bundle.  If no such resource exists, warn the user and create an empty bundle.
+     */
+    public static final ResourceBundle GATK_RESOURCE_BUNDLE = loadResourceBundle("GATKText", null);
+
+    /**
+     * The default line width, for GATK output written to the screen.
+     */
+    public static final int DEFAULT_LINE_WIDTH = 120;
+
+    /**
+     * Simple implementation of word-wrap for a line of text.  Idea and
+     * regexp shamelessly stolen from http://joust.kano.net/weblog/archives/000060.html.
+     * Regexp can probably be simplified for our application.
+     * @param text Text to wrap.
+     * @param width Maximum line width.
+     * @return A list of word-wrapped lines.
+     */
+    public static List<String> wordWrap( String text, int width ) {
+        Pattern wrapper = Pattern.compile( String.format(".{0,%d}(?:\\S(?:[\\s|]|$)|$)", width-1) );
+        Matcher matcher = wrapper.matcher( text );
+
+        List<String> wrapped = new ArrayList<String>();
+        while( matcher.find() ) {
+            // Regular expression is supersensitive to whitespace.
+            // Assert that content is present before adding the line.
+            String line = matcher.group().trim();
+            if( line.length() > 0 )
+                wrapped.add( matcher.group() );
+        }
+        return wrapped;
+    }
+
+    /**
+     * Compares two strings independently of case sensitivity.
+     */
+    public static class CaseInsensitiveComparator implements Comparator<String> {
+        /**
+         * Compares the order of lhs to rhs, not taking case into account.
+         * @param lhs First object to compare.
+         * @param rhs Second object to compare.
+         * @return 0 if objects are identical; -1 if lhs is before rhs, 1 if rhs is before lhs.  Nulls are treated as after everything else.
+         */
+        public int compare(String lhs, String rhs) {
+            if(lhs == null && rhs == null) return 0;
+            if(lhs == null) return 1;
+            if(rhs == null) return -1;
+            return lhs.toLowerCase().compareTo(rhs.toLowerCase());
+        }
+    }
+
+    /**
+     * Load the contents of a resource bundle with the given name.  If no such resource exists, warn the user
+     * and create an empty bundle.
+     * @param bundleName The name of the bundle to load.
+     * @param relativeClass The relative class or null to load a bundle from the root.
+     * @return The best resource bundle that can be found matching the given name.
+     */
+    public static ResourceBundle loadResourceBundle(String bundleName, Class<?> relativeClass) {
+        final ResourceBundle.Control c = ResourceBundle.Control.getControl(ResourceBundle.Control.FORMAT_DEFAULT);
+        final String resourceName = c.toResourceName(c.toBundleName(bundleName, Locale.ROOT), "properties");
+        final Resource resource = new Resource(resourceName, relativeClass);
+        ResourceBundle bundle;
+        try {
+            bundle = new PropertyResourceBundle(resource.getAllResourcesContentsAsStream());
+        }
+        catch(Exception ex) {
+            //logger.warn("Unable to load help text.  Help output will be sparse.");
+            // Generate an empty resource bundle.
+            try {
+                bundle = new PropertyResourceBundle(new StringReader(""));
+            }
+            catch(IOException ioe) {
+                throw new ReviewedGATKException("No resource bundle found, and unable to create an empty placeholder.",ioe);
+            }
+        }
+        return bundle;
+    }
+
+
+    /**
+     * Returns the word starting positions within line, excluding the first position 0.
+     * The returned list is compatible with splitFixedWidth.
+     * @param line Text to parse.
+     * @return the word starting positions within line, excluding the first position 0.
+     */
+    public static List<Integer> getWordStarts(String line) {
+        if (line == null)
+            throw new ReviewedGATKException("line is null");
+        List<Integer> starts = new ArrayList<Integer>();
+        int stop = line.length();
+        for (int i = 1; i < stop; i++)
+            if (Character.isWhitespace(line.charAt(i-1)))
+                if(!Character.isWhitespace(line.charAt(i)))
+                    starts.add(i);
+        return starts;
+    }
+
+    /**
+     * Parses a fixed width line of text.
+     * @param line Text to parse.
+     * @param columnStarts the column starting positions within line, excluding the first position 0.
+     * @return The parsed string array with each entry trimmed.
+     */
+    public static String[] splitFixedWidth(String line, List<Integer> columnStarts) {
+        if (line == null)
+            throw new ReviewedGATKException("line is null");
+        if (columnStarts == null)
+            throw new ReviewedGATKException("columnStarts is null");
+        int startCount = columnStarts.size();
+        String[] row = new String[startCount + 1];
+        if (startCount == 0) {
+            row[0] = line.trim();
+        } else {
+            row[0] = line.substring(0, columnStarts.get(0)).trim();
+            for (int i = 1; i < startCount; i++)
+                row[i] = line.substring(columnStarts.get(i - 1), columnStarts.get(i)).trim();
+            row[startCount] = line.substring(columnStarts.get(startCount - 1)).trim();
+        }
+        return row;
+    }
+
+    /**
+     * Parses a line of text by whitespace.
+     * @param line Text to parse.
+     * @return The parsed string array.
+     */
+    public static String[] splitWhiteSpace(String line) {
+        if (line == null)
+            throw new ReviewedGATKException("line is null");
+        return line.trim().split("\\s+");
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/text/XReadLines.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/text/XReadLines.java
new file mode 100644
index 0000000..267ab5f
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/text/XReadLines.java
@@ -0,0 +1,208 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.text;
+
+import java.io.*;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * Support for Python-like xreadlines() function as a class.  This is an iterator and iterable over
+ * Strings, each corresponding a line in the file (minus newline).  Enables the very simple accessing
+ * of lines in a file as:
+ *
+ * xReadLines reader = new xReadLines(new File(file_name));
+ * List<String> lines = reader.readLines();
+ * reader.close();
+ *
+ * or
+ *
+ * for ( String line : new xReadLines(new File(file_name)) {
+ *   doSomeWork(line);
+ * }
+ *
+ * For the love of god, please use this system for reading lines in a file.
+ */
+public class XReadLines implements Iterator<String>, Iterable<String> {
+    private final BufferedReader in;      // The stream we're reading from
+    private String nextLine = null;       // Return value of next call to next()
+    private final boolean trimWhitespace;
+    private final String commentPrefix;
+
+    public XReadLines(final File filename) throws FileNotFoundException {
+        this(new FileReader(filename), true, null);
+    }
+
+    public XReadLines(final File filename, final boolean trimWhitespace) throws FileNotFoundException {
+        this(new FileReader(filename), trimWhitespace, null);
+    }
+
+    /**
+     * Creates a new xReadLines object to read lines from filename
+     *
+     * @param filename file name
+     * @param trimWhitespace trim whitespace
+     * @param commentPrefix prefix for comments or null if no prefix is set
+     * @throws FileNotFoundException when the file is not found
+     */
+    public XReadLines(final File filename, final boolean trimWhitespace, final String commentPrefix) throws FileNotFoundException {
+        this(new FileReader(filename), trimWhitespace, commentPrefix);
+    }
+
+    public XReadLines(final InputStream inputStream) throws FileNotFoundException {
+        this(new InputStreamReader(inputStream), true, null);
+    }
+
+    public XReadLines(final InputStream inputStream, final boolean trimWhitespace) {
+        this(new InputStreamReader(inputStream), trimWhitespace, null);
+    }
+
+    /**
+     * Creates a new xReadLines object to read lines from an input stream
+     *
+     * @param inputStream input stream
+     * @param trimWhitespace trim whitespace
+     * @param commentPrefix prefix for comments or null if no prefix is set
+     */
+    public XReadLines(final InputStream inputStream, final boolean trimWhitespace, final String commentPrefix) {
+        this(new InputStreamReader(inputStream), trimWhitespace, commentPrefix);
+    }
+
+
+    /**
+     * Creates a new xReadLines object to read lines from a reader
+     *
+     * @param reader reader
+     */
+    public XReadLines(final Reader reader) {
+        this(reader, true, null);
+    }
+
+    /**
+     * Creates a new xReadLines object to read lines from an reader
+     *
+     * @param reader reader
+     * @param trimWhitespace trim whitespace
+     */
+    public XReadLines(final Reader reader, final boolean trimWhitespace) {
+        this(reader, trimWhitespace, null);
+    }
+
+    /**
+     * Creates a new xReadLines object to read lines from an bufferedReader
+     *
+     * @param reader file name
+     * @param trimWhitespace trim whitespace
+     * @param commentPrefix prefix for comments or null if no prefix is set
+     */
+    public XReadLines(final Reader reader, final boolean trimWhitespace, final String commentPrefix) {
+        this.in = (reader instanceof BufferedReader) ? (BufferedReader)reader : new BufferedReader(reader);
+        this.trimWhitespace = trimWhitespace;
+        this.commentPrefix = commentPrefix;
+        try {
+            this.nextLine = readNextLine();
+        } catch(IOException e) {
+            throw new IllegalArgumentException(e);
+        }
+    }
+
+    /**
+     * Reads all of the lines in the file, and returns them as a list of strings
+     *
+     * @return all of the lines in the file.
+     */
+    public List<String> readLines() {
+        List<String> lines = new LinkedList<String>();
+        for ( String line : this ) {
+            lines.add(line);
+        }
+        return lines;
+    }
+
+    /**
+     * I'm an iterator too...
+     * @return an iterator
+     */
+    public Iterator<String> iterator() {
+        return this;
+    }
+
+    public boolean hasNext() {
+        return this.nextLine != null;
+    }
+
+    /**
+     * Actually reads the next line from the stream, not accessible publicly
+     * @return the next line or null
+     * @throws IOException if an error occurs
+     */
+    private String readNextLine() throws IOException {
+        String nextLine;
+        while ((nextLine = this.in.readLine()) != null) {
+            if (this.trimWhitespace) {
+                nextLine = nextLine.trim();
+                if (nextLine.length() == 0)
+                    continue;
+            }
+            if (this.commentPrefix != null)
+                if (nextLine.startsWith(this.commentPrefix))
+                    continue;
+            break;
+        }
+        return nextLine;
+    }
+
+    /**
+     * Returns the next line (optionally minus whitespace)
+     * @return the next line
+     */
+    public String next() {
+        try {
+            String result = this.nextLine;
+            this.nextLine = readNextLine();
+
+            // If we haven't reached EOF yet
+            if (this.nextLine == null) {
+                in.close();             // And close on EOF
+            }
+
+            // Return the line we read last time through.
+            return result;
+        } catch(IOException e) {
+            throw new IllegalArgumentException(e);
+        }
+    }
+
+    // The file is read-only; we don't allow lines to be removed.
+    public void remove() {
+        throw new UnsupportedOperationException();
+    }
+
+    public void close() throws IOException {
+        this.in.close();
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/threading/EfficiencyMonitoringThreadFactory.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/threading/EfficiencyMonitoringThreadFactory.java
new file mode 100644
index 0000000..97d280a
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/threading/EfficiencyMonitoringThreadFactory.java
@@ -0,0 +1,160 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.threading;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Invariant;
+import com.google.java.contract.Requires;
+import org.apache.log4j.Logger;
+import org.apache.log4j.Priority;
+import org.broadinstitute.gatk.utils.AutoFormattingTime;
+
+import java.lang.management.ManagementFactory;
+import java.lang.management.ThreadInfo;
+import java.lang.management.ThreadMXBean;
+import java.util.ArrayList;
+import java.util.EnumMap;
+import java.util.List;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ThreadFactory;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Creates threads that automatically monitor their efficiency via the parent ThreadEfficiencyMonitor
+ *
+ * User: depristo
+ * Date: 8/14/12
+ * Time: 8:47 AM
+ */
+ at Invariant({
+        "activeThreads.size() <= nThreadsToCreate",
+        "countDownLatch.getCount() <= nThreadsToCreate",
+        "nThreadsCreated <= nThreadsToCreate"
+})
+public class EfficiencyMonitoringThreadFactory extends ThreadEfficiencyMonitor implements ThreadFactory  {
+    final int nThreadsToCreate;
+    final List<Thread> activeThreads;
+
+    int nThreadsCreated = 0;
+
+    /**
+     * Counts down the number of active activeThreads whose runtime info hasn't been incorporated into
+     * times.  Counts down from nThreadsToCreate to 0, at which point any code waiting
+     * on the final times is freed to run.
+     */
+    final CountDownLatch countDownLatch;
+
+    /**
+     * Create a new factory generating threads whose runtime and contention
+     * behavior is tracked in this factory.
+     *
+     * @param nThreadsToCreate the number of threads we will create in the factory before it's considered complete
+     */
+    public EfficiencyMonitoringThreadFactory(final int nThreadsToCreate) {
+        super();
+        if ( nThreadsToCreate <= 0 ) throw new IllegalArgumentException("nThreadsToCreate <= 0: " + nThreadsToCreate);
+
+        this.nThreadsToCreate = nThreadsToCreate;
+        activeThreads = new ArrayList<Thread>(nThreadsToCreate);
+        countDownLatch = new CountDownLatch(nThreadsToCreate);
+    }
+
+    /**
+     * How many threads have been created by this factory so far?
+     * @return
+     */
+    @Ensures("result >= 0")
+    public int getNThreadsCreated() {
+        return nThreadsCreated;
+    }
+
+    /**
+     * Only useful for testing, so that we can wait for all of the threads in the factory to complete running
+     *
+     * @throws InterruptedException
+     */
+    protected void waitForAllThreadsToComplete() throws InterruptedException {
+        countDownLatch.await();
+    }
+
+    @Ensures({
+            "activeThreads.size() <= old(activeThreads.size())",
+            "! activeThreads.contains(thread)",
+            "countDownLatch.getCount() <= old(countDownLatch.getCount())"
+    })
+    @Override
+    public synchronized void threadIsDone(final Thread thread) {
+        nThreadsAnalyzed++;
+
+        if ( DEBUG ) logger.warn("  Countdown " + countDownLatch.getCount() + " in thread " + Thread.currentThread().getName());
+
+        super.threadIsDone(thread);
+
+        // remove the thread from the list of active activeThreads, if it's in there, and decrement the countdown latch
+        if ( activeThreads.remove(thread) ) {
+            // one less thread is live for those blocking on all activeThreads to be complete
+            countDownLatch.countDown();
+            if ( DEBUG ) logger.warn("  -> Countdown " + countDownLatch.getCount() + " in thread " + Thread.currentThread().getName());
+        }
+    }
+
+    /**
+     * Create a new thread from this factory
+     *
+     * @param runnable
+     * @return
+     */
+    @Override
+    @Ensures({
+            "activeThreads.size() > old(activeThreads.size())",
+            "activeThreads.contains(result)",
+            "nThreadsCreated == old(nThreadsCreated) + 1"
+    })
+    public synchronized Thread newThread(final Runnable runnable) {
+        if ( activeThreads.size() >= nThreadsToCreate)
+            throw new IllegalStateException("Attempting to create more activeThreads than allowed by constructor argument nThreadsToCreate " + nThreadsToCreate);
+
+        nThreadsCreated++;
+        final Thread myThread = new TrackingThread(runnable);
+        activeThreads.add(myThread);
+        return myThread;
+    }
+
+    /**
+     * A wrapper around Thread that tracks the runtime of the thread and calls threadIsDone() when complete
+     */
+    private class TrackingThread extends Thread {
+        private TrackingThread(Runnable runnable) {
+            super(runnable);
+        }
+
+        @Override
+        public void run() {
+            super.run();
+            threadIsDone(this);
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/threading/NamedThreadFactory.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/threading/NamedThreadFactory.java
new file mode 100644
index 0000000..ceb2380
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/threading/NamedThreadFactory.java
@@ -0,0 +1,51 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.threading;
+
+import java.util.concurrent.ThreadFactory;
+
+/**
+ * Thread factor that produces threads with a given name pattern
+ *
+ * User: depristo
+ * Date: 9/5/12
+ * Time: 9:22 PM
+ *
+ */
+public class NamedThreadFactory implements ThreadFactory {
+    static int id = 0;
+    final String format;
+
+    public NamedThreadFactory(String format) {
+        this.format = format;
+        String.format(format, id); // test the name
+    }
+
+    @Override
+    public Thread newThread(Runnable r) {
+        return new Thread(r, String.format(format, id++));
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/threading/ThreadEfficiencyMonitor.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/threading/ThreadEfficiencyMonitor.java
new file mode 100644
index 0000000..8d9bddd
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/threading/ThreadEfficiencyMonitor.java
@@ -0,0 +1,232 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.threading;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Invariant;
+import com.google.java.contract.Requires;
+import org.apache.log4j.Logger;
+import org.apache.log4j.Priority;
+import org.broadinstitute.gatk.utils.AutoFormattingTime;
+
+import java.lang.management.ManagementFactory;
+import java.lang.management.ThreadInfo;
+import java.lang.management.ThreadMXBean;
+import java.util.EnumMap;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Uses an MXBean to monitor thread efficiency
+ *
+ * Once the monitor is created, calls to threadIsDone() can be used to add information
+ * about the efficiency of the provided thread to this monitor.
+ *
+ * Provides simple print() for displaying efficiency information to a logger
+ *
+ * User: depristo
+ * Date: 8/22/12
+ * Time: 10:48 AM
+ */
+ at Invariant({"nThreadsAnalyzed >= 0"})
+public class ThreadEfficiencyMonitor {
+    protected static final boolean DEBUG = false;
+    protected static Logger logger = Logger.getLogger(EfficiencyMonitoringThreadFactory.class);
+    final EnumMap<State, Long> times = new EnumMap<State, Long>(State.class);
+
+    /**
+     * The number of threads we've included in our efficiency monitoring
+     */
+    int nThreadsAnalyzed = 0;
+
+    /**
+     * The bean used to get the thread info about blocked and waiting times
+     */
+    final ThreadMXBean bean;
+
+    public ThreadEfficiencyMonitor() {
+        bean = ManagementFactory.getThreadMXBean();
+
+        // get the bean, and start tracking
+        if ( bean.isThreadContentionMonitoringSupported() )
+            bean.setThreadContentionMonitoringEnabled(true);
+        else
+            logger.warn("Thread contention monitoring not supported, we cannot track GATK multi-threaded efficiency");
+        //bean.setThreadCpuTimeEnabled(true);
+
+        if ( bean.isThreadCpuTimeSupported() )
+            bean.setThreadCpuTimeEnabled(true);
+        else
+            logger.warn("Thread CPU monitoring not supported, we cannot track GATK multi-threaded efficiency");
+
+        // initialize times to 0
+        for ( final State state : State.values() )
+            times.put(state, 0l);
+    }
+
+    private static long nanoToMilli(final long timeInNano) {
+        return TimeUnit.NANOSECONDS.toMillis(timeInNano);
+    }
+
+    /**
+     * Get the time spent in state across all threads created by this factory
+     *
+     * @param state to get information about
+     * @return the time in milliseconds
+     */
+    @Ensures({"result >= 0"})
+    public synchronized long getStateTime(final State state) {
+        return times.get(state);
+    }
+
+    /**
+     * Get the total time spent in all states across all threads created by this factory
+     *
+     * @return the time in milliseconds
+     */
+    @Ensures({"result >= 0"})
+    public synchronized long getTotalTime() {
+        long total = 0;
+        for ( final long time : times.values() )
+            total += time;
+        return total;
+    }
+
+    /**
+     * Get the fraction of time spent in state across all threads created by this factory
+     *
+     * @return the percentage (0.0-100.0) of time spent in state over all state times of all threads
+     */
+    @Ensures({"result >= 0.0", "result <= 100.0"})
+    public synchronized double getStatePercent(final State state) {
+        return (100.0 * getStateTime(state)) / Math.max(getTotalTime(), 1);
+    }
+
+    public int getnThreadsAnalyzed() {
+        return nThreadsAnalyzed;
+    }
+
+    @Override
+    public synchronized String toString() {
+        final StringBuilder b = new StringBuilder();
+
+        b.append("total ").append(getTotalTime()).append(" ");
+        for ( final State state : State.values() ) {
+            b.append(state).append(" ").append(getStateTime(state)).append(" ");
+        }
+
+        return b.toString();
+    }
+
+    /**
+     * Print usage information about threads from this factory to logger
+     * with the INFO priority
+     *
+     * @param logger
+     */
+    public synchronized void printUsageInformation(final Logger logger) {
+        printUsageInformation(logger, Priority.INFO);
+    }
+
+    /**
+     * Print usage information about threads from this factory to logger
+     * with the provided priority
+     *
+     * @param logger
+     */
+    public synchronized void printUsageInformation(final Logger logger, final Priority priority) {
+        logger.debug("Number of threads monitored: " + getnThreadsAnalyzed());
+        logger.debug("Total runtime " + new AutoFormattingTime(TimeUnit.MILLISECONDS.toNanos(getTotalTime())));
+        for ( final State state : State.values() ) {
+            logger.debug(String.format("\tPercent of time spent %s is %.2f", state.getUserFriendlyName(), getStatePercent(state)));
+        }
+        logger.log(priority, String.format("CPU      efficiency : %6.2f%% of time spent %s", getStatePercent(State.USER_CPU), State.USER_CPU.getUserFriendlyName()));
+        logger.log(priority, String.format("Walker inefficiency : %6.2f%% of time spent %s", getStatePercent(State.BLOCKING), State.BLOCKING.getUserFriendlyName()));
+        logger.log(priority, String.format("I/O    inefficiency : %6.2f%% of time spent %s", getStatePercent(State.WAITING_FOR_IO), State.WAITING_FOR_IO.getUserFriendlyName()));
+        logger.log(priority, String.format("Thread inefficiency : %6.2f%% of time spent %s", getStatePercent(State.WAITING), State.WAITING.getUserFriendlyName()));
+    }
+
+    /**
+     * Update the information about completed thread that ran for runtime in milliseconds
+     *
+     * This method updates all of the key timing and tracking information in the factory so that
+     * thread can be retired.  After this call the factory shouldn't have a pointer to the thread any longer
+     *
+     * @param thread the thread whose information we are updating
+     */
+    @Ensures({
+            "getTotalTime() >= old(getTotalTime())"
+    })
+    public synchronized void threadIsDone(final Thread thread) {
+        nThreadsAnalyzed++;
+
+        if ( DEBUG ) logger.warn("UpdateThreadInfo called");
+
+        final long threadID = thread.getId();
+        final ThreadInfo info = bean.getThreadInfo(thread.getId());
+        final long totalTimeNano = bean.getThreadCpuTime(threadID);
+        final long userTimeNano = bean.getThreadUserTime(threadID);
+        final long systemTimeNano = totalTimeNano - userTimeNano;
+        final long userTimeInMilliseconds = nanoToMilli(userTimeNano);
+        final long systemTimeInMilliseconds = nanoToMilli(systemTimeNano);
+
+        if ( info != null ) {
+            if ( DEBUG ) logger.warn("Updating thread with user runtime " + userTimeInMilliseconds + " and system runtime " + systemTimeInMilliseconds + " of which blocked " + info.getBlockedTime() + " and waiting " + info.getWaitedTime());
+            incTimes(State.BLOCKING, info.getBlockedTime());
+            incTimes(State.WAITING, info.getWaitedTime());
+            incTimes(State.USER_CPU, userTimeInMilliseconds);
+            incTimes(State.WAITING_FOR_IO, systemTimeInMilliseconds);
+        }
+    }
+
+    /**
+     * Helper function that increments the times counter by by for state
+     *
+     * @param state
+     * @param by
+     */
+    @Requires({"state != null", "by >= 0"})
+    @Ensures("getTotalTime() == old(getTotalTime()) + by")
+    private synchronized void incTimes(final State state, final long by) {
+        times.put(state, times.get(state) + by);
+    }
+
+    public enum State {
+        BLOCKING("blocking on synchronized data structures"),
+        WAITING("waiting on some other thread"),
+        USER_CPU("doing productive CPU work"),
+        WAITING_FOR_IO("waiting for I/O");
+
+        private final String userFriendlyName;
+
+        private State(String userFriendlyName) {
+            this.userFriendlyName = userFriendlyName;
+        }
+
+        public String getUserFriendlyName() {
+            return userFriendlyName;
+        }
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/threading/ThreadLocalArray.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/threading/ThreadLocalArray.java
new file mode 100644
index 0000000..61d4c0d
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/threading/ThreadLocalArray.java
@@ -0,0 +1,65 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.threading;
+
+import java.lang.reflect.Array;
+
+/**
+ * ThreadLocal implementation for arrays
+ *
+ * Example usage:
+ *
+ * private ThreadLocal<byte[]> threadLocalByteArray = new ThreadLocalArray<byte[]>(length, byte.class);
+ * ....
+ * byte[] byteArray = threadLocalByteArray.get();
+ *
+ * @param <T> the type of the array itself (eg., int[], double[], etc.)
+ *
+ * @author David Roazen
+ */
+public class ThreadLocalArray<T> extends ThreadLocal<T> {
+    private int arraySize;
+    private Class arrayElementType;
+
+    /**
+     * Create a new ThreadLocalArray
+     *
+     * @param arraySize desired length of the array
+     * @param arrayElementType type of the elements within the array (eg., Byte.class, Integer.class, etc.)
+     */
+    public ThreadLocalArray( int arraySize, Class arrayElementType ) {
+        super();
+
+        this.arraySize = arraySize;
+        this.arrayElementType = arrayElementType;
+    }
+
+    @Override
+    @SuppressWarnings("unchecked")
+    protected T initialValue() {
+        return (T)Array.newInstance(arrayElementType, arraySize);
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/threading/ThreadPoolMonitor.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/threading/ThreadPoolMonitor.java
new file mode 100644
index 0000000..9af1b36
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/threading/ThreadPoolMonitor.java
@@ -0,0 +1,77 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.threading;
+
+import org.apache.log4j.Logger;
+/**
+ * User: hanna
+ * Date: Apr 29, 2009
+ * Time: 4:27:58 PM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * Waits for a signal to come through that the thread pool has run
+ * a given task and therefore has a free slot.
+ *
+ * Make sure, that, when using, the submit and the run are both
+ * protected by the same synchronized(monitor) lock.  See the test
+ * case for an example.
+ */
+public class ThreadPoolMonitor implements Runnable {
+    /**
+     * Logger for reporting interruptions, etc.
+     */
+    private static Logger logger = Logger.getLogger(ThreadPoolMonitor.class);
+
+    /**
+     * Watch the monitor
+     */
+    public synchronized void watch() {
+        try {
+            wait();
+        }
+        catch( InterruptedException ex ) {
+            logger.error("ThreadPoolMonitor interrupted:" + ex.getStackTrace());
+            throw new RuntimeException("ThreadPoolMonitor interrupted", ex);
+        }
+    }
+
+    /**
+     * Instruct the monitor that the thread pool has run for the class.
+     * Only the thread pool should execute this method.
+     */
+    public synchronized void run() {
+        notify();
+    }
+}
+
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/threading/package-info.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/threading/package-info.java
new file mode 100644
index 0000000..6865d11
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/threading/package-info.java
@@ -0,0 +1,26 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.threading;
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/variant/ChromosomeCountConstants.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/variant/ChromosomeCountConstants.java
new file mode 100644
index 0000000..8b1aa1d
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/variant/ChromosomeCountConstants.java
@@ -0,0 +1,44 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.variant;
+
+import htsjdk.variant.vcf.VCFConstants;
+import htsjdk.variant.vcf.VCFInfoHeaderLine;
+import htsjdk.variant.vcf.VCFStandardHeaderLines;
+
+
+/**
+ * Keys and descriptions for the common chromosome count annotations
+ */
+public class ChromosomeCountConstants {
+
+    public static final String[] keyNames = { VCFConstants.ALLELE_NUMBER_KEY, VCFConstants.ALLELE_COUNT_KEY, VCFConstants.ALLELE_FREQUENCY_KEY };
+
+    public static final VCFInfoHeaderLine[] descriptions = {
+            VCFStandardHeaderLines.getInfoLine(VCFConstants.ALLELE_FREQUENCY_KEY),
+            VCFStandardHeaderLines.getInfoLine(VCFConstants.ALLELE_COUNT_KEY),
+            VCFStandardHeaderLines.getInfoLine(VCFConstants.ALLELE_NUMBER_KEY) };
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/variant/GATKVCFConstants.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/variant/GATKVCFConstants.java
new file mode 100644
index 0000000..ff04c29
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/variant/GATKVCFConstants.java
@@ -0,0 +1,175 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.variant;
+
+import htsjdk.variant.variantcontext.Allele;
+
+/**
+ * This class contains any constants (primarily FORMAT/INFO keys) in VCF files used by the GATK.
+ * Note that VCF-standard constants are in VCFConstants, in htsjdk.  Keys in header lines should
+ * have matching entries in GATKVCFHeaderLines
+ */
+public final class GATKVCFConstants {
+
+    //INFO keys
+    public static final String RAW_RMS_MAPPING_QUALITY_KEY =        "RAW_MQ";
+    public static final String AS_RMS_MAPPING_QUALITY_KEY =         "AS_MQ";
+    public static final String AS_RAW_RMS_MAPPING_QUALITY_KEY =     "AS_RAW_MQ";
+    public static final String ALLELE_BALANCE_HET_KEY =             "ABHet";
+    public static final String ALLELE_BALANCE_HOM_KEY =             "ABHom";
+    public static final String ORIGINAL_AC_KEY =                    "AC_Orig"; //SelectVariants
+    public static final String BEAGLE_AC_COMP_KEY =                 "ACH"; //BeagleOutputToVCF
+    public static final String ORIGINAL_AF_KEY =                    "AF_Orig"; //SelectVariants
+    public static final String BEAGLE_AF_COMP_KEY =                 "AFH"; //BeagleOutputToVCF
+    public static final String ORIGINAL_AN_KEY =                    "AN_Orig"; //SelectVariants
+    public static final String BEAGLE_AN_COMP_KEY =                 "ANH"; //BeagleOutputToVCF
+    public static final String BASE_COUNTS_KEY =                    "BaseCounts";
+    public static final String BASE_QUAL_RANK_SUM_KEY =             "BaseQRankSum";
+    public static final String AS_BASE_QUAL_RANK_SUM_KEY =          "AS_BaseQRankSum";
+    public static final String AS_RAW_BASE_QUAL_RANK_SUM_KEY =      "AS_RAW_BaseQRankSum";
+    public static final String GENOTYPE_AND_VALIDATE_STATUS_KEY =   "callStatus";
+    public static final String CLIPPING_RANK_SUM_KEY =              "ClippingRankSum";
+    public static final String CULPRIT_KEY =                        "culprit";
+    public static final String SPANNING_DELETIONS_KEY =             "Dels";
+    public static final String ORIGINAL_DP_KEY =                    "DP_Orig"; //SelectVariants
+    public static final String DOWNSAMPLED_KEY =                    "DS";
+    public static final String EVENT_COUNT_IN_HAPLOTYPE_KEY =       "ECNT"; //M2
+    public static final String EVENT_DISTANCE_MAX_KEY =             "MAX_ED"; //M2
+    public static final String EVENT_DISTANCE_MIN_KEY =             "MIN_ED"; //M2
+    public static final String FISHER_STRAND_KEY =                  "FS";
+    public static final String AS_FISHER_STRAND_KEY =               "AS_FS";
+    public static final String FRACTION_INFORMATIVE_READS_KEY =     "FractionInformativeReads";
+    public static final String AS_SB_TABLE_KEY =                    "AS_SB_TABLE";
+    public static final String GC_CONTENT_KEY =                     "GC";
+    public static final String GQ_MEAN_KEY =                        "GQ_MEAN";
+    public static final String GQ_STDEV_KEY =                       "GQ_STDDEV";
+    public static final String HAPLOTYPE_COUNT_KEY =                "HCNT"; //M2
+    public static final String HAPLOTYPE_SCORE_KEY =                "HaplotypeScore";
+    public static final String HI_CONF_DENOVO_KEY =                 "hiConfDeNovo";
+    public static final String HOMOPOLYMER_RUN_KEY =                "HRun";
+    public static final String HARDY_WEINBERG_KEY =                 "HW";
+    public static final String AVG_INTERVAL_DP_KEY =                "IDP"; //DiagnoseTargets
+    public static final String INTERVAL_GC_CONTENT_KEY =            "IGC";
+    public static final String INBREEDING_COEFFICIENT_KEY =         "InbreedingCoeff";
+    public static final String AS_INBREEDING_COEFFICIENT_KEY =      "AS_InbreedingCoeff";
+    public static final String EXCESS_HET_KEY =                     "ExcessHet";
+    public static final String AS_HETEROZYGOSITY_KEY =              "AS_InbreedingCoeff";
+    public static final String LIKELIHOOD_RANK_SUM_KEY =            "LikelihoodRankSum";
+    public static final String LO_CONF_DENOVO_KEY =                 "loConfDeNovo";
+    public static final String LOW_MQ_KEY =                         "LowMQ";
+    public static final String MLE_ALLELE_COUNT_KEY =               "MLEAC";
+    public static final String MLE_ALLELE_FREQUENCY_KEY =           "MLEAF";
+    public static final String MLE_PER_SAMPLE_ALLELE_COUNT_KEY =    "MLPSAC";
+    public static final String MLE_PER_SAMPLE_ALLELE_FRACTION_KEY = "MLPSAF";
+    public static final String MAP_QUAL_RANK_SUM_KEY =              "MQRankSum";
+    public static final String RAW_MAP_QUAL_RANK_SUM_KEY =          "RAW_MQRankSum";
+    public static final String AS_MAP_QUAL_RANK_SUM_KEY =           "AS_MQRankSum";
+    public static final String AS_RAW_MAP_QUAL_RANK_SUM_KEY =       "AS_RAW_MQRankSum";
+    public static final String MENDEL_VIOLATION_LR_KEY =            "MVLR";
+    public static final String NOCALL_CHROM_KEY =                   "NCC";
+    public static final String NUMBER_OF_DISCOVERED_ALLELES_KEY =   "NDA";
+    public static final String NEGATIVE_LABEL_KEY =                 "NEGATIVE_TRAIN_SITE";
+    public static final String NUM_GENOTYPES_CHANGED_KEY =          "NumGenotypesChanged"; //BeagleOutputToVCF
+    public static final String NON_DIPLOID_RATIO_KEY =              "OND";
+    public static final String ORIGINAL_ALT_ALLELE_INFO_KEY =       "OriginalAltAllele"; //BeagleOutputToVCF
+    public static final String ORIGINAL_CONTIG_KEY =                "OriginalChr"; //LiftoverVariants
+    public static final String ORIGINAL_START_KEY =                 "OriginalStart"; //LiftoverVariants
+    public static final String N_BASE_COUNT_KEY =                   "PercentNBase";
+    public static final String NORMAL_LOD_KEY =                     "NLOD"; //M2
+    public static final String RBP_INCONSISTENT_KEY =               "PhasingInconsistent"; //ReadBackedPhasing
+    public static final String GENOTYPE_PRIOR_KEY =                 "PG";
+    public static final String PANEL_OF_NORMALS_COUNT_KEY =         "PON"; //M2
+    public static final String POSITIVE_LABEL_KEY =                 "POSITIVE_TRAIN_SITE";
+    public static final String QUAL_BY_DEPTH_KEY =                  "QD";
+    public static final String AS_QUAL_BY_DEPTH_KEY =               "AS_QD";
+    public static final String AS_QUAL_KEY =                        "AS_QUAL";
+    public static final String BEAGLE_R2_KEY =                      "R2"; //BeagleOutputToVCF
+    public static final String AS_READ_POS_RANK_SUM_KEY =           "AS_ReadPosRankSum";
+    public static final String AS_RAW_READ_POS_RANK_SUM_KEY =       "AS_RAW_ReadPosRankSum";
+    public static final String READ_POS_RANK_SUM_KEY =              "ReadPosRankSum";
+    public static final String REFSAMPLE_DEPTH_KEY =                "REFDEPTH";
+    public static final String REPEATS_PER_ALLELE_KEY =             "RPA";
+    public static final String REPEAT_UNIT_KEY =                    "RU";
+    public static final String SAMPLE_LIST_KEY =                    "Samples";
+    public static final String STRAND_ODDS_RATIO_KEY =              "SOR";
+    public static final String AS_STRAND_ODDS_RATIO_KEY =           "AS_SOR";
+    public static final String STR_PRESENT_KEY =                    "STR";
+    public static final String TRANSMISSION_DISEQUILIBRIUM_KEY =    "TDT";
+    public static final String TUMOR_LOD_KEY =                      "TLOD"; //M2
+    public static final String VARIANT_TYPE_KEY =                   "VariantType";
+    public static final String VQS_LOD_KEY =                        "VQSLOD";
+    public static final String OXOG_ALT_F1R2_KEY =                  "ALT_F1R2";
+    public static final String OXOG_ALT_F2R1_KEY =                  "ALT_F2R1";
+    public static final String OXOG_REF_F1R2_KEY =                  "REF_F1R2";
+    public static final String OXOG_REF_F2R1_KEY =                  "REF_F2R1";
+    public static final String OXOG_FRACTION_KEY =                  "FOXOG";
+
+    //FORMAT keys
+    public static final String ALLELE_BALANCE_KEY =                 "AB";
+    public static final String ALLELE_FRACTION_KEY =                "AF"; //M2
+    public static final String BASE_COUNTS_BY_SAMPLE_KEY =          "BCS";
+    public static final String PL_FOR_ALL_SNP_ALLELES_KEY =         "APL";
+    public static final String RBP_HAPLOTYPE_KEY =                  "HP"; //ReadBackedPhasing
+    public static final String AVG_INTERVAL_DP_BY_SAMPLE_KEY =      "IDP"; //DiagnoseTargets
+    public static final String JOINT_LIKELIHOOD_TAG_NAME =          "JL"; //FamilyLikelihoodsUtils
+    public static final String JOINT_POSTERIOR_TAG_NAME =           "JP"; //FamilyLikelihoodsUtils
+    public static final String LOW_COVERAGE_LOCI =                  "LL"; //DiagnoseTargets
+    public final static String MIN_DP_FORMAT_KEY =                  "MIN_DP";
+    public static final String MAPPING_QUALITY_ZERO_BY_SAMPLE_KEY = "MQ0";
+    public static final String ORIGINAL_GENOTYPE_KEY =              "OG"; //BeagleOutputToVCF
+    public static final String HAPLOTYPE_CALLER_PHASING_GT_KEY =    "PGT";
+    public static final String HAPLOTYPE_CALLER_PHASING_ID_KEY =    "PID";
+    public static final String PHRED_SCALED_POSTERIORS_KEY =        "PP"; //FamilyLikelihoodsUtils / PosteriorLikelihoodsUtils
+    public static final String QUALITY_SCORE_SUM_KEY =              "QSS"; //M2
+    public static final String REFERENCE_GENOTYPE_QUALITY =         "RGQ";
+    public static final String STRAND_COUNT_BY_SAMPLE_KEY =         "SAC";
+    public static final String STRAND_BIAS_BY_SAMPLE_KEY =          "SB";
+    public final static String TRANSMISSION_PROBABILITY_KEY =       "TP"; //PhaseByTransmission
+    public static final String ZERO_COVERAGE_LOCI =                 "ZL"; //DiagnoseTargets
+
+    //FILTERS
+    /* Note that many filters used throughout GATK (most notably in VariantRecalibration) are dynamic,
+       their names (or descriptions) depend on some threshold.  Those filters are not included here
+     */
+    public static final String ALT_ALLELE_IN_NORMAL_FILTER_NAME =             "alt_allele_in_normal"; //M2
+    public static final String BEAGLE_MONO_FILTER_NAME =                      "BGL_SET_TO_MONOMORPHIC";
+    public static final String CLUSTERED_EVENTS_FILTER_NAME =                 "clustered_events"; //M2
+    public static final String GERMLINE_RISK_FILTER_NAME =                    "germline_risk"; //M2
+    public static final String HOMOLOGOUS_MAPPING_EVENT_FILTER_NAME =         "homologous_mapping_event"; //M2
+    public static final String LOW_QUAL_FILTER_NAME =                         "LowQual";
+    public static final String MULTI_EVENT_ALT_ALLELE_IN_NORMAL_FILTER_NAME = "multi_event_alt_allele_in_normal"; //M2
+    public static final String PON_FILTER_NAME =                              "panel_of_normals"; //M2
+    public static final String STR_CONTRACTION_FILTER_NAME =                  "str_contraction"; //M2
+    public static final String TUMOR_LOD_FILTER_NAME =                        "t_lod_fstar"; //M2
+    public static final String TRIALLELIC_SITE_FILTER_NAME =                  "triallelic_site"; //M2
+
+    // Symbolic alleles
+    public final static String SYMBOLIC_ALLELE_DEFINITION_HEADER_TAG = "ALT";
+    public final static String NON_REF_SYMBOLIC_ALLELE_NAME = "NON_REF";
+    public final static Allele NON_REF_SYMBOLIC_ALLELE = Allele.create("<"+NON_REF_SYMBOLIC_ALLELE_NAME+">", false); // represents any possible non-ref allele at this site
+    public final static String SPANNING_DELETION_SYMBOLIC_ALLELE_NAME_DEPRECATED = "*:DEL";
+    public final static Allele SPANNING_DELETION_SYMBOLIC_ALLELE_DEPRECATED = Allele.create("<"+SPANNING_DELETION_SYMBOLIC_ALLELE_NAME_DEPRECATED+">", false); // represents any possible spanning deletion allele at this site
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/variant/GATKVCFHeaderLines.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/variant/GATKVCFHeaderLines.java
new file mode 100644
index 0000000..f0fa072
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/variant/GATKVCFHeaderLines.java
@@ -0,0 +1,200 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.variant;
+
+import htsjdk.variant.vcf.*;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.broadinstitute.gatk.utils.variant.GATKVCFConstants.*;
+
+/**
+ * This class contains the VCFHeaderLine definitions for the annotation keys in GATKVCFConstants.
+ * VCF-standard header lines are in VCFStandardHeaderLines, in htsjdk
+ */
+public class GATKVCFHeaderLines {
+
+    public static VCFInfoHeaderLine getInfoLine(final String id) { return infoLines.get(id); }
+    public static VCFFormatHeaderLine getFormatLine(final String id) { return formatLines.get(id); }
+    public static VCFFilterHeaderLine getFilterLine(final String id) { return filterLines.get(id); }
+
+    private static Map<String, VCFInfoHeaderLine> infoLines = new HashMap<>(60);
+    private static Map<String, VCFFormatHeaderLine> formatLines = new HashMap<>(25);
+    private static Map<String, VCFFilterHeaderLine> filterLines = new HashMap<>(2);
+
+    private static void addFormatLine(final VCFFormatHeaderLine line) {
+        formatLines.put(line.getID(), line);
+    }
+
+    private static void addInfoLine(final VCFInfoHeaderLine line) {
+        infoLines.put(line.getID(), line);
+    }
+
+    private static void addFilterLine(final VCFFilterHeaderLine line) {
+        filterLines.put(line.getID(), line);
+    }
+
+    static {
+        addFilterLine(new VCFFilterHeaderLine(LOW_QUAL_FILTER_NAME, "Low quality"));
+        addFilterLine(new VCFFilterHeaderLine(BEAGLE_MONO_FILTER_NAME, "This site was set to monomorphic by Beagle"));
+
+        // M2-related filters
+        addFilterLine(new VCFFilterHeaderLine(GATKVCFConstants.ALT_ALLELE_IN_NORMAL_FILTER_NAME, "Evidence seen in the normal sample"));
+        addFilterLine(new VCFFilterHeaderLine(GATKVCFConstants.CLUSTERED_EVENTS_FILTER_NAME, "Clustered events observed in the tumor"));
+        addFilterLine(new VCFFilterHeaderLine(GATKVCFConstants.GERMLINE_RISK_FILTER_NAME, "Evidence indicates this site is germline, not somatic"));
+        addFilterLine(new VCFFilterHeaderLine(GATKVCFConstants.HOMOLOGOUS_MAPPING_EVENT_FILTER_NAME, "More than three events were observed in the tumor"));
+        addFilterLine(new VCFFilterHeaderLine(GATKVCFConstants.MULTI_EVENT_ALT_ALLELE_IN_NORMAL_FILTER_NAME, "Multiple events observed in tumor and normal"));
+        addFilterLine(new VCFFilterHeaderLine(GATKVCFConstants.PON_FILTER_NAME, "Seen in at least 2 samples in the panel of normals"));
+        addFilterLine(new VCFFilterHeaderLine(GATKVCFConstants.TUMOR_LOD_FILTER_NAME, "Tumor does not meet likelihood threshold"));
+        addFilterLine(new VCFFilterHeaderLine(GATKVCFConstants.STR_CONTRACTION_FILTER_NAME, "Site filtered due to contraction of short tandem repeat region"));
+        addFilterLine(new VCFFilterHeaderLine(GATKVCFConstants.TRIALLELIC_SITE_FILTER_NAME, "Site filtered because more than two alt alleles pass tumor LOD"));
+
+        addFormatLine(new VCFFormatHeaderLine(ALLELE_BALANCE_KEY, 1, VCFHeaderLineType.Float, "Allele balance for each het genotype"));
+        addFormatLine(new VCFFormatHeaderLine(BASE_COUNTS_BY_SAMPLE_KEY, 4, VCFHeaderLineType.Integer, "Counts of each base by sample"));
+        addFormatLine(new VCFFormatHeaderLine(MAPPING_QUALITY_ZERO_BY_SAMPLE_KEY, 1, VCFHeaderLineType.Integer, "Number of Mapping Quality Zero Reads per sample"));
+        addFormatLine(new VCFFormatHeaderLine(MLE_PER_SAMPLE_ALLELE_COUNT_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Integer, "Maximum likelihood expectation (MLE) for the alternate allele count, in the same order as listed, for each individual sample"));
+        addFormatLine(new VCFFormatHeaderLine(MLE_PER_SAMPLE_ALLELE_FRACTION_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "Maximum likelihood expectation (MLE) for the alternate allele fraction, in the same order as listed, for each individual sample"));
+        addFormatLine(new VCFFormatHeaderLine(STRAND_COUNT_BY_SAMPLE_KEY, VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.Integer, "Number of reads on the forward and reverse strand supporting each allele (including reference)"));
+        addFormatLine(new VCFFormatHeaderLine(STRAND_BIAS_BY_SAMPLE_KEY, 4, VCFHeaderLineType.Integer, "Per-sample component statistics which comprise the Fisher's Exact Test to detect strand bias."));
+        addFormatLine(new VCFFormatHeaderLine(MLE_PER_SAMPLE_ALLELE_COUNT_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Integer, "Maximum likelihood expectation (MLE) for the alternate allele count, in the same order as listed, for each individual sample"));
+        addFormatLine(new VCFFormatHeaderLine(MLE_PER_SAMPLE_ALLELE_FRACTION_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "Maximum likelihood expectation (MLE) for the alternate allele fraction, in the same order as listed, for each individual sample"));
+        addFormatLine(new VCFFormatHeaderLine(PL_FOR_ALL_SNP_ALLELES_KEY, 10, VCFHeaderLineType.Integer, "Phred-scaled genotype likelihoods for all 4 possible bases regardless of whether there is statistical evidence for them. Ordering is always PL for AA AC CC GA GC GG TA TC TG TT."));
+        addFormatLine(new VCFFormatHeaderLine(HAPLOTYPE_CALLER_PHASING_ID_KEY, 1, VCFHeaderLineType.String, "Physical phasing ID information, where each unique ID within a given sample (but not across samples) connects records within a phasing group"));
+        addFormatLine(new VCFFormatHeaderLine(HAPLOTYPE_CALLER_PHASING_GT_KEY, 1, VCFHeaderLineType.String, "Physical phasing haplotype information, describing how the alternate alleles are phased in relation to one another"));
+
+        addFormatLine(new VCFFormatHeaderLine(MIN_DP_FORMAT_KEY, 1, VCFHeaderLineType.Integer, "Minimum DP observed within the GVCF block"));
+        addFormatLine(new VCFFormatHeaderLine(REFERENCE_GENOTYPE_QUALITY, 1, VCFHeaderLineType.Integer, "Unconditional reference genotype confidence, encoded as a phred quality -10*log10 p(genotype call is wrong)"));
+        addFormatLine(new VCFFormatHeaderLine(TRANSMISSION_PROBABILITY_KEY, 1, VCFHeaderLineType.Integer, "Phred score of the genotype combination and phase given that the genotypes are correct"));
+        addFormatLine(new VCFFormatHeaderLine(RBP_HAPLOTYPE_KEY, VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.String, "Read-backed phasing haplotype identifiers"));
+        addFormatLine(new VCFFormatHeaderLine(AVG_INTERVAL_DP_BY_SAMPLE_KEY, 1, VCFHeaderLineType.Float, "Average sample depth across the interval. Sum of the sample specific depth in all loci divided by interval size."));
+        addFormatLine(new VCFFormatHeaderLine(LOW_COVERAGE_LOCI, 1, VCFHeaderLineType.Integer, "Number of loci for this sample, in this interval with low coverage (below the minimum coverage) but not zero."));
+        addFormatLine(new VCFFormatHeaderLine(ZERO_COVERAGE_LOCI, 1, VCFHeaderLineType.Integer, "Number of loci for this sample, in this interval with zero coverage."));
+        addFormatLine(new VCFFormatHeaderLine(PHRED_SCALED_POSTERIORS_KEY, VCFHeaderLineCount.G, VCFHeaderLineType.Integer, "Phred-scaled Posterior Genotype Probabilities"));
+        addFormatLine(new VCFFormatHeaderLine(JOINT_LIKELIHOOD_TAG_NAME, 1, VCFHeaderLineType.Integer, "Phred-scaled joint likelihood of the genotype combination (before applying family priors)"));
+        addFormatLine(new VCFFormatHeaderLine(JOINT_POSTERIOR_TAG_NAME, 1, VCFHeaderLineType.Integer, "Phred-scaled joint posterior probability of the genotype combination (after applying family priors)"));
+        addFormatLine(new VCFFormatHeaderLine(ORIGINAL_GENOTYPE_KEY, 1, VCFHeaderLineType.String, "Original Genotype input to Beagle"));
+
+        // M2-related info lines
+        addFormatLine(new VCFFormatHeaderLine(GATKVCFConstants.ALLELE_FRACTION_KEY, 1, VCFHeaderLineType.Float, "Allele fraction of the event in the tumor"));
+        addFormatLine(new VCFFormatHeaderLine(GATKVCFConstants.OXOG_ALT_F1R2_KEY, 1, VCFHeaderLineType.Integer, "Count of reads in F1R2 pair orientation supporting the alternate allele"));
+        addFormatLine(new VCFFormatHeaderLine(GATKVCFConstants.OXOG_ALT_F2R1_KEY, 1, VCFHeaderLineType.Integer, "Count of reads in F2R1 pair orientation supporting the alternate allele"));
+        addFormatLine(new VCFFormatHeaderLine(GATKVCFConstants.OXOG_REF_F1R2_KEY, 1, VCFHeaderLineType.Integer, "Count of reads in F1R2 pair orientation supporting the reference allele"));
+        addFormatLine(new VCFFormatHeaderLine(GATKVCFConstants.OXOG_REF_F2R1_KEY, 1, VCFHeaderLineType.Integer, "Count of reads in F2R1 pair orientation supporting the reference allele"));
+        addFormatLine(new VCFFormatHeaderLine(GATKVCFConstants.OXOG_FRACTION_KEY, 1, VCFHeaderLineType.Float, "Fraction of alt reads indicating OxoG error"));
+
+
+        addInfoLine(new VCFInfoHeaderLine(MLE_ALLELE_COUNT_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Integer, "Maximum likelihood expectation (MLE) for the allele counts (not necessarily the same as the AC), for each ALT allele, in the same order as listed"));
+        addInfoLine(new VCFInfoHeaderLine(MLE_ALLELE_FREQUENCY_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "Maximum likelihood expectation (MLE) for the allele frequency (not necessarily the same as the AF), for each ALT allele, in the same order as listed"));
+        addInfoLine(new VCFInfoHeaderLine(DOWNSAMPLED_KEY, 0, VCFHeaderLineType.Flag, "Were any of the samples downsampled?"));
+        addInfoLine(new VCFInfoHeaderLine(ALLELE_BALANCE_HET_KEY, 1, VCFHeaderLineType.Float, "Allele Balance for heterozygous calls (ref/(ref+alt))"));
+        addInfoLine(new VCFInfoHeaderLine(ALLELE_BALANCE_HOM_KEY, 1, VCFHeaderLineType.Float, "Allele Balance for homozygous calls (A/(A+O)) where A is the allele (ref or alt) and O is anything other"));
+        addInfoLine(new VCFInfoHeaderLine(NON_DIPLOID_RATIO_KEY, 1, VCFHeaderLineType.Float, "Overall non-diploid ratio (alleles/(alleles+non-alleles))"));
+        addInfoLine(new VCFInfoHeaderLine(BASE_COUNTS_KEY, 4, VCFHeaderLineType.Integer, "Counts of each base"));
+        addInfoLine(new VCFInfoHeaderLine(LOW_MQ_KEY, 3, VCFHeaderLineType.Float, "3-tuple: <fraction of reads with MQ=0>,<fraction of reads with MQ<=10>,<total number of reads>"));
+        addInfoLine(new VCFInfoHeaderLine(N_BASE_COUNT_KEY, 1, VCFHeaderLineType.Float, "Percentage of N bases in the pileup"));
+        addInfoLine(new VCFInfoHeaderLine(BASE_QUAL_RANK_SUM_KEY, 1, VCFHeaderLineType.Float, "Z-score from Wilcoxon rank sum test of Alt Vs. Ref base qualities"));
+        addInfoLine(new VCFInfoHeaderLine(AS_BASE_QUAL_RANK_SUM_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "allele specific Z-score from Wilcoxon rank sum test of each Alt Vs. Ref base qualities"));
+        addInfoLine(new VCFInfoHeaderLine(AS_RAW_BASE_QUAL_RANK_SUM_KEY, 1, VCFHeaderLineType.String, "raw data for allele specific rank sum test of base qualities"));
+        addInfoLine(new VCFInfoHeaderLine(CLIPPING_RANK_SUM_KEY, 1, VCFHeaderLineType.Float, "Z-score From Wilcoxon rank sum test of Alt vs. Ref number of hard clipped bases"));
+        addInfoLine(new VCFInfoHeaderLine(FISHER_STRAND_KEY, 1, VCFHeaderLineType.Float, "Phred-scaled p-value using Fisher's exact test to detect strand bias"));
+        addInfoLine(new VCFInfoHeaderLine(AS_FISHER_STRAND_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "allele specific phred-scaled p-value using Fisher's exact test to detect strand bias of each alt allele"));
+        addInfoLine(new VCFInfoHeaderLine(AS_SB_TABLE_KEY, 1, VCFHeaderLineType.String, "Allele-specific forward/reverse read counts for strand bias tests"));
+        addInfoLine(new VCFInfoHeaderLine(GC_CONTENT_KEY, 1, VCFHeaderLineType.Float, "GC content around the variant (see docs for window size details)"));
+        addInfoLine(new VCFInfoHeaderLine(NOCALL_CHROM_KEY, 1, VCFHeaderLineType.Integer, "Number of no-called samples"));
+        addInfoLine(new VCFInfoHeaderLine(GQ_MEAN_KEY, 1, VCFHeaderLineType.Float, "Mean of all GQ values"));
+        addInfoLine(new VCFInfoHeaderLine(GQ_STDEV_KEY, 1, VCFHeaderLineType.Float, "Standard deviation of all GQ values"));
+        addInfoLine(new VCFInfoHeaderLine(HAPLOTYPE_SCORE_KEY, 1, VCFHeaderLineType.Float, "Consistency of the site with at most two segregating haplotypes"));
+        addInfoLine(new VCFInfoHeaderLine(HARDY_WEINBERG_KEY, 1, VCFHeaderLineType.Float, "Phred-scaled p-value for Hardy-Weinberg violation"));
+        addInfoLine(new VCFInfoHeaderLine(HOMOPOLYMER_RUN_KEY, 1, VCFHeaderLineType.Integer, "Largest Contiguous Homopolymer Run of Variant Allele In Either Direction"));
+        addInfoLine(new VCFInfoHeaderLine(INBREEDING_COEFFICIENT_KEY, 1, VCFHeaderLineType.Float, "Inbreeding coefficient as estimated from the genotype likelihoods per-sample when compared against the Hardy-Weinberg expectation"));
+        addInfoLine(new VCFInfoHeaderLine(AS_INBREEDING_COEFFICIENT_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "Allele-specific inbreeding coefficient as estimated from the genotype likelihoods per-sample when compared against the Hardy-Weinberg expectation"));
+        addInfoLine(new VCFInfoHeaderLine(EXCESS_HET_KEY, 1, VCFHeaderLineType.Float, "Phred-scaled p-value for exact test of excess heterozygosity"));
+        addInfoLine(new VCFInfoHeaderLine(AS_HETEROZYGOSITY_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "allele specific heterozygosity as estimated from the genotype likelihoods per-sample when compared against the Hardy-Weinberg expectation; relate to inbreeding coefficient"));
+        addInfoLine(new VCFInfoHeaderLine(LIKELIHOOD_RANK_SUM_KEY, 1, VCFHeaderLineType.Float, "Z-score from Wilcoxon rank sum test of Alt Vs. Ref haplotype likelihoods"));
+        addInfoLine(new VCFInfoHeaderLine(MAP_QUAL_RANK_SUM_KEY, 1, VCFHeaderLineType.Float, "Z-score From Wilcoxon rank sum test of Alt vs. Ref read mapping qualities"));
+        addInfoLine(new VCFInfoHeaderLine(AS_MAP_QUAL_RANK_SUM_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "allele specific Z-score From Wilcoxon rank sum test of each Alt vs. Ref read mapping qualities"));
+        addInfoLine(new VCFInfoHeaderLine(RAW_RMS_MAPPING_QUALITY_KEY, 1, VCFHeaderLineType.Float, "Raw data for RMS Mapping Quality"));
+        addInfoLine(new VCFInfoHeaderLine(AS_RAW_RMS_MAPPING_QUALITY_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "Allele-specfic raw data for RMS Mapping Quality"));
+        addInfoLine(new VCFInfoHeaderLine(AS_RMS_MAPPING_QUALITY_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "Allele-specific RMS Mapping Quality"));
+        addInfoLine(new VCFInfoHeaderLine(RAW_MAP_QUAL_RANK_SUM_KEY, 1, VCFHeaderLineType.Float, "Raw data for Mapping Quality Rank Sum"));
+        addInfoLine(new VCFInfoHeaderLine(AS_RAW_MAP_QUAL_RANK_SUM_KEY, 1, VCFHeaderLineType.String, "Allele-specfic raw data for Mapping Quality Rank Sum"));
+        addInfoLine(new VCFInfoHeaderLine(AS_MAP_QUAL_RANK_SUM_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "Allele-specific Mapping Quality Rank Sum"));
+        addInfoLine(new VCFInfoHeaderLine(FRACTION_INFORMATIVE_READS_KEY, 1, VCFHeaderLineType.Float, "The fraction of informative reads out of the total reads"));
+
+        addInfoLine(new VCFInfoHeaderLine(MENDEL_VIOLATION_LR_KEY, 1, VCFHeaderLineType.Float, "Mendelian violation likelihood ratio: L[MV] - L[No MV]"));
+        addInfoLine(new VCFInfoHeaderLine(HI_CONF_DENOVO_KEY, 1, VCFHeaderLineType.String, "High confidence possible de novo mutation (GQ >= 20 for all trio members)=[comma-delimited list of child samples]"));
+        addInfoLine(new VCFInfoHeaderLine(LO_CONF_DENOVO_KEY, 1, VCFHeaderLineType.String, "Low confidence possible de novo mutation (GQ >= 10 for child, GQ > 0 for parents)=[comma-delimited list of child samples]"));
+        addInfoLine(new VCFInfoHeaderLine(QUAL_BY_DEPTH_KEY, 1, VCFHeaderLineType.Float, "Variant Confidence/Quality by Depth"));
+        addInfoLine(new VCFInfoHeaderLine(AS_QUAL_BY_DEPTH_KEY, 1, VCFHeaderLineType.Float, "Allele-specific Variant Confidence/Quality by Depth"));
+        addInfoLine(new VCFInfoHeaderLine(AS_QUAL_KEY, 1, VCFHeaderLineType.Float, "Allele-specific Variant Qual Score"));
+        addInfoLine(new VCFInfoHeaderLine(READ_POS_RANK_SUM_KEY, 1, VCFHeaderLineType.Float, "Z-score from Wilcoxon rank sum test of Alt vs. Ref read position bias"));
+        addInfoLine(new VCFInfoHeaderLine(AS_READ_POS_RANK_SUM_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "allele specific Z-score from Wilcoxon rank sum test of each Alt vs. Ref read position bias"));
+        addInfoLine(new VCFInfoHeaderLine(AS_RAW_READ_POS_RANK_SUM_KEY, 1, VCFHeaderLineType.String, "allele specific raw data for rank sum test of read position bias"));
+        addInfoLine(new VCFInfoHeaderLine(SAMPLE_LIST_KEY, VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.String, "List of polymorphic samples"));
+        addInfoLine(new VCFInfoHeaderLine(SPANNING_DELETIONS_KEY, 1, VCFHeaderLineType.Float, "Fraction of Reads Containing Spanning Deletions"));
+        addInfoLine(new VCFInfoHeaderLine(STRAND_ODDS_RATIO_KEY, 1, VCFHeaderLineType.Float, "Symmetric Odds Ratio of 2x2 contingency table to detect strand bias"));
+        addInfoLine(new VCFInfoHeaderLine(AS_STRAND_ODDS_RATIO_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "Allele specific strand Odds Ratio of 2x|Alts| contingency table to detect allele specific strand bias"));
+        addInfoLine(new VCFInfoHeaderLine(STR_PRESENT_KEY, 0, VCFHeaderLineType.Flag, "Variant is a short tandem repeat"));
+        addInfoLine(new VCFInfoHeaderLine(REPEAT_UNIT_KEY, 1, VCFHeaderLineType.String, "Tandem repeat unit (bases)"));
+        addInfoLine(new VCFInfoHeaderLine(REPEATS_PER_ALLELE_KEY, VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.Integer, "Number of times tandem repeat unit is repeated, for each allele (including reference)"));
+        addInfoLine(new VCFInfoHeaderLine(TRANSMISSION_DISEQUILIBRIUM_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "Test statistic from Wittkowski transmission disequilibrium test."));
+        addInfoLine(new VCFInfoHeaderLine(VARIANT_TYPE_KEY, 1, VCFHeaderLineType.String, "Variant type description"));
+        addInfoLine(new VCFInfoHeaderLine(NUMBER_OF_DISCOVERED_ALLELES_KEY, 1, VCFHeaderLineType.Integer, "Number of alternate alleles discovered (but not necessarily genotyped) at this site"));
+        addInfoLine(new VCFInfoHeaderLine(REFSAMPLE_DEPTH_KEY, 1, VCFHeaderLineType.Integer, "Total reference sample depth"));
+        addInfoLine(new VCFInfoHeaderLine(ORIGINAL_AC_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Integer, "Original AC"));
+        addInfoLine(new VCFInfoHeaderLine(ORIGINAL_AF_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "Original AF"));
+        addInfoLine(new VCFInfoHeaderLine(ORIGINAL_AN_KEY, 1, VCFHeaderLineType.Integer, "Original AN"));
+        addInfoLine(new VCFInfoHeaderLine(ORIGINAL_DP_KEY, 1, VCFHeaderLineType.Integer, "Original DP"));
+        addInfoLine(new VCFInfoHeaderLine(ORIGINAL_CONTIG_KEY, 1, VCFHeaderLineType.String, "Original contig name for the record"));
+        addInfoLine(new VCFInfoHeaderLine(ORIGINAL_START_KEY, 1, VCFHeaderLineType.Integer, "Original start position for the record"));
+        addInfoLine(new VCFInfoHeaderLine(VQS_LOD_KEY, 1, VCFHeaderLineType.Float, "Log odds of being a true variant versus being false under the trained gaussian mixture model"));
+        addInfoLine(new VCFInfoHeaderLine(CULPRIT_KEY, 1, VCFHeaderLineType.String, "The annotation which was the worst performing in the Gaussian mixture model, likely the reason why the variant was filtered out"));
+        addInfoLine(new VCFInfoHeaderLine(POSITIVE_LABEL_KEY, 1, VCFHeaderLineType.Flag, "This variant was used to build the positive training set of good variants"));
+        addInfoLine(new VCFInfoHeaderLine(NEGATIVE_LABEL_KEY, 1, VCFHeaderLineType.Flag, "This variant was used to build the negative training set of bad variants"));
+        addInfoLine(new VCFInfoHeaderLine(RBP_INCONSISTENT_KEY, 0, VCFHeaderLineType.Flag, "Are the reads significantly haplotype-inconsistent?"));
+        addInfoLine(new VCFInfoHeaderLine(GENOTYPE_AND_VALIDATE_STATUS_KEY, 1, VCFHeaderLineType.String, "Value from the validation VCF"));
+        addInfoLine(new VCFInfoHeaderLine(AVG_INTERVAL_DP_KEY, 1, VCFHeaderLineType.Float, "Average depth across the interval. Sum of the depth in a loci divided by interval size."));
+        addInfoLine(new VCFInfoHeaderLine(INTERVAL_GC_CONTENT_KEY, 1, VCFHeaderLineType.Float, "GC Content of the interval"));
+        addInfoLine(new VCFInfoHeaderLine(GENOTYPE_PRIOR_KEY, VCFHeaderLineCount.G, VCFHeaderLineType.Integer, "Genotype Likelihood Prior"));
+        addInfoLine(new VCFInfoHeaderLine(BEAGLE_R2_KEY, 1, VCFHeaderLineType.Float, "r2 Value reported by Beagle on each site"));
+        addInfoLine(new VCFInfoHeaderLine(NUM_GENOTYPES_CHANGED_KEY, 1, VCFHeaderLineType.Integer, "The number of genotypes changed by Beagle"));
+        addInfoLine(new VCFInfoHeaderLine(ORIGINAL_ALT_ALLELE_INFO_KEY, 1, VCFHeaderLineType.String, "The original alt allele for a site set to monomorphic by Beagle"));
+        addInfoLine(new VCFInfoHeaderLine(BEAGLE_AC_COMP_KEY, 1, VCFHeaderLineType.Integer, "Allele Count from Comparison ROD at this site"));
+        addInfoLine(new VCFInfoHeaderLine(BEAGLE_AF_COMP_KEY, 1, VCFHeaderLineType.Integer, "Allele Frequency from Comparison ROD at this site"));
+        addInfoLine(new VCFInfoHeaderLine(BEAGLE_AN_COMP_KEY, 1, VCFHeaderLineType.Float, "Allele Number from Comparison ROD at this site"));
+
+        // M2-related info lines
+        addInfoLine(new VCFInfoHeaderLine(GATKVCFConstants.EVENT_COUNT_IN_HAPLOTYPE_KEY, 1, VCFHeaderLineType.String, "Number of events in this haplotype"));
+        addInfoLine(new VCFInfoHeaderLine(GATKVCFConstants.EVENT_DISTANCE_MAX_KEY, 1, VCFHeaderLineType.Integer, "Maximum distance between events in this active region"));
+        addInfoLine(new VCFInfoHeaderLine(GATKVCFConstants.EVENT_DISTANCE_MIN_KEY, 1, VCFHeaderLineType.Integer, "Minimum distance between events in this active region"));
+        addInfoLine(new VCFInfoHeaderLine(GATKVCFConstants.HAPLOTYPE_COUNT_KEY, 1, VCFHeaderLineType.String, "Number of haplotypes that support this variant"));
+        addInfoLine(new VCFInfoHeaderLine(GATKVCFConstants.NORMAL_LOD_KEY, 1, VCFHeaderLineType.String, "Normal LOD score"));
+        addInfoLine(new VCFInfoHeaderLine(GATKVCFConstants.PANEL_OF_NORMALS_COUNT_KEY, 1, VCFHeaderLineType.String, "Count from Panel of Normals"));
+        addInfoLine(new VCFInfoHeaderLine(GATKVCFConstants.TUMOR_LOD_KEY, 1, VCFHeaderLineType.String, "Tumor LOD score"));
+
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/variant/GATKVCFIndexType.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/variant/GATKVCFIndexType.java
new file mode 100644
index 0000000..0ca4a34
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/variant/GATKVCFIndexType.java
@@ -0,0 +1,39 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.variant;
+
+import org.broadinstitute.gatk.utils.commandline.EnumerationArgumentDefault;
+
+/**
+ * Choose the Tribble indexing strategy
+ */
+public enum GATKVCFIndexType {
+    @EnumerationArgumentDefault
+    DYNAMIC_SEEK,       // use DynamicIndexCreator(IndexFactory.IndexBalanceApproach.FOR_SEEK_TIME)
+    DYNAMIC_SIZE,       // use DynamicIndexCreator(IndexFactory.IndexBalanceApproach.FOR_SIZE)
+    LINEAR,             // use LinearIndexCreator()
+    INTERVAL            // use IntervalIndexCreator()
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/variant/GATKVariantContextUtils.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/variant/GATKVariantContextUtils.java
new file mode 100644
index 0000000..520582a
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/variant/GATKVariantContextUtils.java
@@ -0,0 +1,2124 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.variant;
+
+import com.google.java.contract.Ensures;
+import com.google.java.contract.Requires;
+import htsjdk.tribble.TribbleException;
+import htsjdk.tribble.util.popgen.HardyWeinbergCalculation;
+import htsjdk.variant.variantcontext.*;
+import htsjdk.variant.vcf.VCFConstants;
+import org.apache.commons.lang.ArrayUtils;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.*;
+import org.broadinstitute.gatk.utils.collections.Pair;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.io.Serializable;
+import java.util.*;
+
+public class GATKVariantContextUtils {
+
+    private static Logger logger = Logger.getLogger(GATKVariantContextUtils.class);
+
+    public static final int DEFAULT_PLOIDY = HomoSapiensConstants.DEFAULT_PLOIDY;
+
+    public static final double SUM_GL_THRESH_NOCALL = -0.1; // if sum(gl) is bigger than this threshold, we treat GL's as non-informative and will force a no-call.
+
+    /**
+     * Diploid NO_CALL allele list...
+     *
+     * @deprecated you should use {@link #noCallAlleles(int)} instead. It indicates the presence of a hardcoded diploid assumption which is bad.
+     */
+    @Deprecated
+    public final static List<Allele> NO_CALL_ALLELES = Arrays.asList(Allele.NO_CALL, Allele.NO_CALL);
+
+    public final static String MERGE_FILTER_PREFIX = "filterIn";
+    public final static String MERGE_REF_IN_ALL = "ReferenceInAll";
+    public final static String MERGE_FILTER_IN_ALL = "FilteredInAll";
+    public final static String MERGE_INTERSECTION = "Intersection";
+
+    /**
+     * Checks whether a variant-context overlaps with a region.
+     *
+     * <p>
+     *     No event overlaps an unmapped region.
+     * </p>
+     *
+     * @param variantContext variant-context to test the overlap with.
+     * @param region region to test the overlap with.
+     *
+     * @throws IllegalArgumentException if either region or event is {@code null}.
+     *
+     * @return {@code true} if there is an overlap between the event described and the active region provided.
+     */
+    public static boolean overlapsRegion(final VariantContext variantContext, final GenomeLoc region) {
+        if (region == null) throw new IllegalArgumentException("the active region provided cannot be null");
+        if (variantContext == null) throw new IllegalArgumentException("the variant context provided cannot be null");
+        if (region.isUnmapped())
+            return false;
+        if (variantContext.getEnd() < region.getStart())
+            return false;
+        if (variantContext.getStart() > region.getStop())
+            return false;
+        if (!variantContext.getChr().equals(region.getContig()))
+            return false;
+        return true;
+    }
+
+    /**
+     * Returns a homozygous call allele list given the only allele and the ploidy.
+     *
+     * @param allele the only allele in the allele list.
+     * @param ploidy the ploidy of the resulting allele list.
+     *
+     * @throws IllegalArgumentException if {@code allele} is {@code null} or ploidy is negative.
+     *
+     * @return never {@code null}.
+     */
+    public static List<Allele> homozygousAlleleList(final Allele allele, final int ploidy) {
+        if (allele == null || ploidy < 0)
+            throw new IllegalArgumentException();
+
+        // Use a tailored inner class to implement the list:
+        return Collections.nCopies(ploidy,allele);
+    }
+
+    private static boolean hasPLIncompatibleAlleles(final Collection<Allele> alleleSet1, final Collection<Allele> alleleSet2) {
+        final Iterator<Allele> it1 = alleleSet1.iterator();
+        final Iterator<Allele> it2 = alleleSet2.iterator();
+
+        while ( it1.hasNext() && it2.hasNext() ) {
+            final Allele a1 = it1.next();
+            final Allele a2 = it2.next();
+            if ( ! a1.equals(a2) )
+                return true;
+        }
+
+        // by this point, at least one of the iterators is empty.  All of the elements
+        // we've compared are equal up until this point.  But it's possible that the
+        // sets aren't the same size, which is indicated by the test below.  If they
+        // are of the same size, though, the sets are compatible
+        return it1.hasNext() || it2.hasNext();
+    }
+
+    /**
+     * Determines the common reference allele
+     *
+     * @param VCs    the list of VariantContexts
+     * @param loc    if not null, ignore records that do not begin at this start location
+     * @return possibly null Allele
+     */
+    public static Allele determineReferenceAllele(final List<VariantContext> VCs, final GenomeLoc loc) {
+        Allele ref = null;
+
+        for ( final VariantContext vc : VCs ) {
+            if ( contextMatchesLoc(vc, loc) ) {
+                final Allele myRef = vc.getReference();
+                if ( ref == null || ref.length() < myRef.length() )
+                    ref = myRef;
+                else if ( ref.length() == myRef.length() && ! ref.equals(myRef) )
+                    throw new TribbleException(String.format("The provided variant file(s) have inconsistent references for the same position(s) at %s:%d, %s vs. %s", vc.getChr(), vc.getStart(), ref, myRef));
+            }
+        }
+
+        return ref;
+    }
+
+    /**
+     * Calculates the total ploidy of a variant context as the sum of all ploidies across genotypes.
+     * @param vc the target variant context.
+     * @param defaultPloidy the default ploidy to be assume when there is no ploidy information for a genotype.
+     * @return never {@code null}.
+     */
+    public static int totalPloidy(final VariantContext vc, final int defaultPloidy) {
+        if (vc == null)
+            throw new IllegalArgumentException("the vc provided cannot be null");
+        if (defaultPloidy < 0)
+            throw new IllegalArgumentException("the default ploidy must 0 or greater");
+        int result = 0;
+        for (final Genotype genotype : vc.getGenotypes()) {
+            final int declaredPloidy = genotype.getPloidy();
+            result += declaredPloidy <= 0 ? defaultPloidy : declaredPloidy;
+        }
+
+        return result;
+    }
+
+    public enum GenotypeMergeType {
+        /**
+         * Make all sample genotypes unique by file. Each sample shared across RODs gets named sample.ROD.
+         */
+        UNIQUIFY,
+        /**
+         * Take genotypes in priority order (see the priority argument).
+         */
+        PRIORITIZE,
+        /**
+         * Take the genotypes in any order.
+         */
+        UNSORTED,
+        /**
+         * Require that all samples/genotypes be unique between all inputs.
+         */
+        REQUIRE_UNIQUE
+    }
+
+    public enum FilteredRecordMergeType {
+        /**
+         * Union - leaves the record if any record is unfiltered.
+         */
+        KEEP_IF_ANY_UNFILTERED,
+        /**
+         * Requires all records present at site to be unfiltered. VCF files that don't contain the record don't influence this.
+         */
+        KEEP_IF_ALL_UNFILTERED,
+        /**
+         * If any record is present at this site (regardless of possibly being filtered), then all such records are kept and the filters are reset.
+         */
+        KEEP_UNCONDITIONAL
+    }
+
+    public enum MultipleAllelesMergeType {
+        /**
+         * Combine only alleles of the same type (SNP, indel, etc.) into a single VCF record.
+         */
+        BY_TYPE,
+        /**
+         * Merge all allele types at the same start position into the same VCF record.
+         */
+        MIX_TYPES
+    }
+
+    /**
+     * Refactored out of the AverageAltAlleleLength annotation class
+     * @param vc the variant context
+     * @return the average length of the alt allele (a double)
+     */
+    public static double getMeanAltAlleleLength(VariantContext vc) {
+        double averageLength = 1.0;
+        if ( ! vc.isSNP() && ! vc.isSymbolic() ) {
+            // adjust for the event length
+            int averageLengthNum = 0;
+            int averageLengthDenom = 0;
+            int refLength = vc.getReference().length();
+            for ( final Allele a : vc.getAlternateAlleles() ) {
+                int numAllele = vc.getCalledChrCount(a);
+                int alleleSize;
+                if ( a.length() == refLength ) {
+                    // SNP or MNP
+                    byte[] a_bases = a.getBases();
+                    byte[] ref_bases = vc.getReference().getBases();
+                    int n_mismatch = 0;
+                    for ( int idx = 0; idx < a_bases.length; idx++ ) {
+                        if ( a_bases[idx] != ref_bases[idx] )
+                            n_mismatch++;
+                    }
+                    alleleSize = n_mismatch;
+                }
+                else if ( a.isSymbolic() ) {
+                    alleleSize = 1;
+                } else {
+                    alleleSize = Math.abs(refLength-a.length());
+                }
+                averageLengthNum += alleleSize*numAllele;
+                averageLengthDenom += numAllele;
+            }
+            averageLength = ( (double) averageLengthNum )/averageLengthDenom;
+        }
+
+        return averageLength;
+    }
+
+    /**
+     * create a genome location, given a variant context
+     * @param genomeLocParser parser
+     * @param vc the variant context
+     * @return the genomeLoc
+     */
+    public static final GenomeLoc getLocation(GenomeLocParser genomeLocParser,VariantContext vc) {
+        return genomeLocParser.createGenomeLoc(vc.getChr(), vc.getStart(), vc.getEnd(), true);
+    }
+
+    public static BaseUtils.BaseSubstitutionType getSNPSubstitutionType(VariantContext context) {
+        if (!context.isSNP() || !context.isBiallelic())
+            throw new IllegalStateException("Requested SNP substitution type for bialleic non-SNP " + context);
+        return BaseUtils.SNPSubstitutionType(context.getReference().getBases()[0], context.getAlternateAllele(0).getBases()[0]);
+    }
+
+    /**
+     * If this is a BiAllelic SNP, is it a transition?
+     */
+    public static boolean isTransition(VariantContext context) {
+        return getSNPSubstitutionType(context) == BaseUtils.BaseSubstitutionType.TRANSITION;
+    }
+
+    /**
+     * If this is a BiAllelic SNP, is it a transversion?
+     */
+    public static boolean isTransversion(VariantContext context) {
+        return getSNPSubstitutionType(context) == BaseUtils.BaseSubstitutionType.TRANSVERSION;
+    }
+
+    public static boolean isTransition(Allele ref, Allele alt) {
+        return BaseUtils.SNPSubstitutionType(ref.getBases()[0], alt.getBases()[0]) == BaseUtils.BaseSubstitutionType.TRANSITION;
+    }
+
+    public static boolean isTransversion(Allele ref, Allele alt) {
+        return BaseUtils.SNPSubstitutionType(ref.getBases()[0], alt.getBases()[0]) == BaseUtils.BaseSubstitutionType.TRANSVERSION;
+    }
+
+    /**
+     * Returns a context identical to this with the REF and ALT alleles reverse complemented.
+     *
+     * @param vc        variant context
+     * @return new vc
+     */
+    public static VariantContext reverseComplement(VariantContext vc) {
+        // create a mapping from original allele to reverse complemented allele
+        HashMap<Allele, Allele> alleleMap = new HashMap<>(vc.getAlleles().size());
+        for ( final Allele originalAllele : vc.getAlleles() ) {
+            Allele newAllele;
+            if ( originalAllele.isNoCall() )
+                newAllele = originalAllele;
+            else
+                newAllele = Allele.create(BaseUtils.simpleReverseComplement(originalAllele.getBases()), originalAllele.isReference());
+            alleleMap.put(originalAllele, newAllele);
+        }
+
+        // create new Genotype objects
+        GenotypesContext newGenotypes = GenotypesContext.create(vc.getNSamples());
+        for ( final Genotype genotype : vc.getGenotypes() ) {
+            List<Allele> newAlleles = new ArrayList<>();
+            for ( final Allele allele : genotype.getAlleles() ) {
+                Allele newAllele = alleleMap.get(allele);
+                if ( newAllele == null )
+                    newAllele = Allele.NO_CALL;
+                newAlleles.add(newAllele);
+            }
+            newGenotypes.add(new GenotypeBuilder(genotype).alleles(newAlleles).make());
+        }
+
+        return new VariantContextBuilder(vc).alleles(alleleMap.values()).genotypes(newGenotypes).make();
+    }
+
+    /**
+     * Returns true iff VC is an non-complex indel where every allele represents an expansion or
+     * contraction of a series of identical bases in the reference.
+     *
+     * For example, suppose the ref bases are CTCTCTGA, which includes a 3x repeat of CTCTCT
+     *
+     * If VC = -/CT, then this function returns true because the CT insertion matches exactly the
+     * upcoming reference.
+     * If VC = -/CTA then this function returns false because the CTA isn't a perfect match
+     *
+     * Now consider deletions:
+     *
+     * If VC = CT/- then again the same logic applies and this returns true
+     * The case of CTA/- makes no sense because it doesn't actually match the reference bases.
+     *
+     * The logic of this function is pretty simple.  Take all of the non-null alleles in VC.  For
+     * each insertion allele of n bases, check if that allele matches the next n reference bases.
+     * For each deletion allele of n bases, check if this matches the reference bases at n - 2 n,
+     * as it must necessarily match the first n bases.  If this test returns true for all
+     * alleles you are a tandem repeat, otherwise you are not.
+     *
+     * @param vc
+     * @param refBasesStartingAtVCWithPad not this is assumed to include the PADDED reference
+     * @return
+     */
+    @Requires({"vc != null", "refBasesStartingAtVCWithPad != null && refBasesStartingAtVCWithPad.length > 0"})
+    public static boolean isTandemRepeat(final VariantContext vc, final byte[] refBasesStartingAtVCWithPad) {
+        final String refBasesStartingAtVCWithoutPad = new String(refBasesStartingAtVCWithPad).substring(1);
+        if ( ! vc.isIndel() ) // only indels are tandem repeats
+            return false;
+
+        final Allele ref = vc.getReference();
+
+        for ( final Allele allele : vc.getAlternateAlleles() ) {
+            if ( ! isRepeatAllele(ref, allele, refBasesStartingAtVCWithoutPad) )
+                return false;
+        }
+
+        // we've passed all of the tests, so we are a repeat
+        return true;
+    }
+
+    /**
+     *
+     * @param vc
+     * @param refBasesStartingAtVCWithPad
+     * @return
+     */
+    @Requires({"vc != null", "refBasesStartingAtVCWithPad != null && refBasesStartingAtVCWithPad.length > 0"})
+    public static Pair<List<Integer>,byte[]> getNumTandemRepeatUnits(final VariantContext vc, final byte[] refBasesStartingAtVCWithPad) {
+        final boolean VERBOSE = false;
+        final String refBasesStartingAtVCWithoutPad = new String(refBasesStartingAtVCWithPad).substring(1);
+        if ( ! vc.isIndel() ) // only indels are tandem repeats
+            return null;
+
+        final Allele refAllele = vc.getReference();
+        final byte[] refAlleleBases = Arrays.copyOfRange(refAllele.getBases(), 1, refAllele.length());
+
+        byte[] repeatUnit = null;
+        final ArrayList<Integer> lengths = new ArrayList<>();
+
+        for ( final Allele allele : vc.getAlternateAlleles() ) {
+            Pair<int[],byte[]> result = getNumTandemRepeatUnits(refAlleleBases, Arrays.copyOfRange(allele.getBases(), 1, allele.length()), refBasesStartingAtVCWithoutPad.getBytes());
+
+            final int[] repetitionCount = result.first;
+            // repetition count = 0 means allele is not a tandem expansion of context
+            if (repetitionCount[0] == 0 || repetitionCount[1] == 0)
+                return null;
+
+            if (lengths.isEmpty()) {
+                lengths.add(repetitionCount[0]); // add ref allele length only once
+            }
+            lengths.add(repetitionCount[1]);  // add this alt allele's length
+
+            repeatUnit = result.second;
+            if (VERBOSE) {
+                System.out.println("RefContext:"+refBasesStartingAtVCWithoutPad);
+                System.out.println("Ref:"+refAllele.toString()+" Count:" + String.valueOf(repetitionCount[0]));
+                System.out.println("Allele:"+allele.toString()+" Count:" + String.valueOf(repetitionCount[1]));
+                System.out.println("RU:"+new String(repeatUnit));
+            }
+        }
+
+        return new Pair<List<Integer>, byte[]>(lengths,repeatUnit);
+    }
+
+    /**
+     *
+     * @param refBases
+     * @param altBases
+     * @param remainingRefContext
+     * @return
+     * @deprecated there is still no alternative for this method but eventually there needs to be one implemented in TandemRepeatFinder (protected for now).
+     */
+    @Deprecated
+    public static Pair<int[],byte[]> getNumTandemRepeatUnits(final byte[] refBases, final byte[] altBases, final byte[] remainingRefContext) {
+         /* we can't exactly apply same logic as in basesAreRepeated() to compute tandem unit and number of repeated units.
+           Consider case where ref =ATATAT and we have an insertion of ATAT. Natural description is (AT)3 -> (AT)2.
+         */
+
+        byte[] longB;
+        // find first repeat unit based on either ref or alt, whichever is longer
+        if (altBases.length > refBases.length)
+            longB = altBases;
+        else
+            longB = refBases;
+
+        // see if non-null allele (either ref or alt, whichever is longer) can be decomposed into several identical tandem units
+        // for example, -*,CACA needs to first be decomposed into (CA)2
+        final int repeatUnitLength = findRepeatedSubstring(longB);
+        final byte[] repeatUnit = Arrays.copyOf(longB, repeatUnitLength);
+
+        final int[] repetitionCount = new int[2];
+        // look for repetitions forward on the ref bases (i.e. starting at beginning of ref bases)
+        int repetitionsInRef = findNumberOfRepetitions(repeatUnit, refBases, true);
+        repetitionCount[0] = findNumberOfRepetitions(repeatUnit, ArrayUtils.addAll(refBases, remainingRefContext), true)-repetitionsInRef;
+        repetitionCount[1] = findNumberOfRepetitions(repeatUnit, ArrayUtils.addAll(altBases, remainingRefContext), true)-repetitionsInRef;
+
+        return new Pair<>(repetitionCount, repeatUnit);
+
+    }
+
+    /**
+     * Find out if a string can be represented as a tandem number of substrings.
+     * For example ACTACT is a 2-tandem of ACT,
+     * but ACTACA is not.
+     *
+     * @param bases                 String to be tested
+     * @return                      Length of repeat unit, if string can be represented as tandem of substring (if it can't
+     *                              be represented as one, it will be just the length of the input string)
+     */
+    public static int findRepeatedSubstring(byte[] bases) {
+
+        int repLength;
+        for (repLength=1; repLength <=bases.length; repLength++) {
+            final byte[] candidateRepeatUnit = Arrays.copyOf(bases,repLength);
+            boolean allBasesMatch = true;
+            for (int start = repLength; start < bases.length; start += repLength ) {
+                // check that remaining of string is exactly equal to repeat unit
+                final byte[] basePiece = Arrays.copyOfRange(bases,start,start+candidateRepeatUnit.length);
+                if (!Arrays.equals(candidateRepeatUnit, basePiece)) {
+                    allBasesMatch = false;
+                    break;
+                }
+            }
+            if (allBasesMatch)
+                return repLength;
+        }
+
+        return repLength;
+    }
+
+    /**
+     * Helper routine that finds number of repetitions a string consists of.
+     * For example, for string ATAT and repeat unit AT, number of repetitions = 2
+     * @param repeatUnit             Substring
+     * @param testString             String to test
+     * @oaram lookForward            Look for repetitions forward (at beginning of string) or backward (at end of string)
+     * @return                       Number of repetitions (0 if testString is not a concatenation of n repeatUnit's
+     * @deprecated Move to use TandemRepeatFinder in protected (move to public if needed).
+     */
+    @Deprecated
+    public static int findNumberOfRepetitions(byte[] repeatUnit, byte[] testString, boolean lookForward) {
+
+        if (repeatUnit == null) throw new IllegalArgumentException("the repeat unit cannot be null");
+        if (testString == null) throw new IllegalArgumentException("the test string cannot be null");
+
+        int numRepeats = 0;
+        if (lookForward) {
+            // look forward on the test string
+            for (int start = 0; start < testString.length; start += repeatUnit.length) {
+                final int end = start + repeatUnit.length;
+                final byte[] unit = Arrays.copyOfRange(testString,start, end);
+                if (!Arrays.equals(unit,repeatUnit))
+                    break;
+                numRepeats++;
+            }
+            return numRepeats;
+        }
+
+        // look backward. For example, if repeatUnit = AT and testString = GATAT, number of repeat units is still 2
+        // look forward on the test string
+        for (int start = testString.length - repeatUnit.length; start >= 0; start -= repeatUnit.length) {
+            final int end = start + repeatUnit.length;
+            final byte[] unit = Arrays.copyOfRange(testString, start, end);
+            if(Arrays.equals(unit,repeatUnit))
+                numRepeats++;
+            else
+                break;
+        }
+        return numRepeats;
+    }
+
+    /**
+     * Helper function for isTandemRepeat that checks that allele matches somewhere on the reference
+     * @param ref
+     * @param alt
+     * @param refBasesStartingAtVCWithoutPad
+     * @return
+     */
+    protected static boolean isRepeatAllele(final Allele ref, final Allele alt, final String refBasesStartingAtVCWithoutPad) {
+        if ( ! Allele.oneIsPrefixOfOther(ref, alt) )
+            return false; // we require one allele be a prefix of another
+
+        if ( ref.length() > alt.length() ) { // we are a deletion
+            return basesAreRepeated(ref.getBaseString(), alt.getBaseString(), refBasesStartingAtVCWithoutPad, 2);
+        } else { // we are an insertion
+            return basesAreRepeated(alt.getBaseString(), ref.getBaseString(), refBasesStartingAtVCWithoutPad, 1);
+        }
+    }
+
+    protected static boolean basesAreRepeated(final String l, final String s, final String ref, final int minNumberOfMatches) {
+        final String potentialRepeat = l.substring(s.length()); // skip s bases
+
+        for ( int i = 0; i < minNumberOfMatches; i++) {
+            final int start = i * potentialRepeat.length();
+            final int end = (i+1) * potentialRepeat.length();
+            if ( ref.length() < end )
+                return false; // we ran out of bases to test
+            final String refSub = ref.substring(start, end);
+            if ( ! refSub.equals(potentialRepeat) )
+                return false; // repeat didn't match, fail
+        }
+
+        return true; // we passed all tests, we matched
+    }
+
+    public enum GenotypeAssignmentMethod {
+        /**
+         * set all of the genotype GT values to NO_CALL
+         */
+        SET_TO_NO_CALL,
+
+        /**
+         * Use the subsetted PLs to greedily assigned genotypes
+         */
+        USE_PLS_TO_ASSIGN,
+
+        /**
+         * Try to match the original GT calls, if at all possible
+         *
+         * Suppose I have 3 alleles: A/B/C and the following samples:
+         *
+         *       original_GT best_match to A/B best_match to A/C
+         * S1 => A/A A/A A/A
+         * S2 => A/B A/B A/A
+         * S3 => B/B B/B A/A
+         * S4 => B/C A/B A/C
+         * S5 => C/C A/A C/C
+         *
+         * Basically, all alleles not in the subset map to ref.  It means that het-alt genotypes
+         * when split into 2 bi-allelic variants will be het in each, which is good in some cases,
+         * rather than the undetermined behavior when using the PLs to assign, which could result
+         * in hom-var or hom-ref for each, depending on the exact PL values.
+         */
+        BEST_MATCH_TO_ORIGINAL,
+
+        /**
+         * do not even bother changing the GTs
+         */
+        DO_NOT_ASSIGN_GENOTYPES
+    }
+
+    /**
+     * subset the Variant Context to the specific set of alleles passed in (pruning the PLs appropriately)
+     *
+     * @param vc                 variant context with genotype likelihoods
+     * @param allelesToUse       which alleles from the vc are okay to use; *** must be in the same relative order as those in the original VC ***
+     * @param assignGenotypes    assignment strategy for the (subsetted) PLs
+     * @return a new non-null GenotypesContext
+     */
+    public static GenotypesContext subsetDiploidAlleles(final VariantContext vc,
+                                                        final List<Allele> allelesToUse,
+                                                        final GenotypeAssignmentMethod assignGenotypes) {
+        if ( vc == null ) throw new IllegalArgumentException("the VariantContext cannot be null");
+        if ( allelesToUse == null ) throw new IllegalArgumentException("the alleles to use cannot be null");
+        if ( allelesToUse.get(0).isNonReference() ) throw new IllegalArgumentException("First allele must be the reference allele");
+        if ( allelesToUse.size() == 1 ) throw new IllegalArgumentException("Cannot subset to only 1 alt allele");
+
+        // optimization: if no input genotypes, just exit
+        if (vc.getGenotypes().isEmpty()) return GenotypesContext.create();
+
+        // find the likelihoods indexes to use from the used alternate alleles
+        final List<Integer> likelihoodIndexesToUse = determineDiploidLikelihoodIndexesToUse(vc, allelesToUse);
+
+        // find the strand allele count indexes to use from the used alternate alleles
+        final List<Integer> sacIndexesToUse = determineSACIndexesToUse(vc, allelesToUse);
+
+        // create the new genotypes
+        return createGenotypesWithSubsettedLikelihoods(vc.getGenotypes(), vc, allelesToUse, likelihoodIndexesToUse, sacIndexesToUse, assignGenotypes);
+    }
+
+    /**
+     * Find the likelihood indexes to use for a selected set of diploid alleles
+     *
+     * @param originalVC        the original VariantContext
+     * @param allelesToUse      the subset of alleles to use
+     * @return a list of PL indexes to use or null if none
+     */
+    private static List<Integer> determineDiploidLikelihoodIndexesToUse(final VariantContext originalVC, final List<Allele> allelesToUse) {
+
+        if ( originalVC == null) throw new IllegalArgumentException("the original VariantContext cannot be null");
+        if ( allelesToUse == null ) throw new IllegalArgumentException("the alleles to use cannot be null");
+
+        // the bitset representing the allele indexes we want to keep
+        final boolean[] alleleIndexesToUse = getAlleleIndexBitset(originalVC, allelesToUse);
+
+        // an optimization: if we are supposed to use all (or none in the case of a ref call) of the alleles,
+        // then we can keep the PLs as is; otherwise, we determine which ones to keep
+        if ( MathUtils.countOccurrences(true, alleleIndexesToUse) == alleleIndexesToUse.length )
+            return null;
+
+        return getDiploidLikelihoodIndexes(originalVC, alleleIndexesToUse);
+    }
+
+    /**
+     * Find the strand allele count indexes to use for a selected set of alleles
+     *
+     * @param originalVC   the original VariantContext
+     * @param allelesToUse the subset of alleles to use
+     * @return a list of SAC indexes to use or null if none
+     */
+    public static List<Integer> determineSACIndexesToUse(final VariantContext originalVC, final List<Allele> allelesToUse) {
+
+        if ( originalVC == null ) throw new IllegalArgumentException("the original VC cannot be null");
+        if ( allelesToUse == null ) throw new IllegalArgumentException("the alleles to use cannot be null");
+
+        // the bitset representing the allele indexes we want to keep
+        final boolean[] alleleIndexesToUse = getAlleleIndexBitset(originalVC, allelesToUse);
+
+        // an optimization: if we are supposed to use all (or none in the case of a ref call) of the alleles,
+        // then we can keep the SACs as is; otherwise, we determine which ones to keep
+        if (MathUtils.countOccurrences(true, alleleIndexesToUse) == alleleIndexesToUse.length)
+            return null;
+
+        return getSACIndexes(alleleIndexesToUse);
+    }
+
+    /**
+     * Get the actual likelihoods indexes to use given the corresponding diploid allele indexes
+     *
+     * @param originalVC           the original VariantContext
+     * @param alleleIndexesToUse   the bitset representing the alleles to use (@see #getAlleleIndexBitset)
+     * @return a non-null List
+     */
+    private static List<Integer> getDiploidLikelihoodIndexes(final VariantContext originalVC, final boolean[] alleleIndexesToUse) {
+
+        if (originalVC == null) throw new IllegalArgumentException("the original VC cannot be null");
+        if (alleleIndexesToUse == null) throw new IllegalArgumentException("the alleles to use cannot be null");
+
+        // All samples must be diploid
+        for ( final Genotype g : originalVC.getGenotypes() ){
+            if ( g.getPloidy() != DEFAULT_PLOIDY )
+                throw new ReviewedGATKException("All samples must be diploid");
+        }
+
+        final List<Integer> result = new ArrayList<>(30);
+
+        // numLikelihoods takes total # of alleles.
+        final int numLikelihoods = GenotypeLikelihoods.numLikelihoods(originalVC.getNAlleles(), DEFAULT_PLOIDY);
+
+        for ( int PLindex = 0; PLindex < numLikelihoods; PLindex++ ) {
+            final GenotypeLikelihoods.GenotypeLikelihoodsAllelePair alleles = GenotypeLikelihoods.getAllelePair(PLindex);
+            // consider this entry only if both of the alleles are good
+            if ( alleleIndexesToUse[alleles.alleleIndex1] && alleleIndexesToUse[alleles.alleleIndex2] )
+                result.add(PLindex);
+        }
+
+        return result;
+    }
+
+    /**
+     * Get the actual strand aleele counts indexes to use given the corresponding allele indexes
+     *
+     * @param alleleIndexesToUse    the bitset representing the alleles to use (@see #getAlleleIndexBitset)
+     * @return a non-null List
+     */
+    private static List<Integer> getSACIndexes(final boolean[] alleleIndexesToUse) {
+
+        if (alleleIndexesToUse == null) throw new IllegalArgumentException("the alleles to use cannot be null");
+        if (alleleIndexesToUse.length == 0) throw new IllegalArgumentException("cannot have no alleles to use");
+
+        final List<Integer> result = new ArrayList<>(2 * alleleIndexesToUse.length);
+
+        for (int SACindex = 0; SACindex < alleleIndexesToUse.length; SACindex++) {
+            if (alleleIndexesToUse[SACindex]) {
+                result.add(2 * SACindex);
+                result.add(2 * SACindex + 1);
+            }
+        }
+
+        return result;
+    }
+
+    /**
+     * Given an original VariantContext and a list of alleles from that VC to keep,
+     * returns a bitset representing which allele indexes should be kept
+     *
+     * @param originalVC   the original VC
+     * @param allelesToUse the list of alleles to keep
+     * @return non-null bitset
+     */
+    private static boolean[] getAlleleIndexBitset(final VariantContext originalVC, final List<Allele> allelesToUse) {
+
+        if (originalVC == null) throw new IllegalArgumentException("the original VC cannot be null");
+        if (allelesToUse == null) throw new IllegalArgumentException("the alleles to use cannot be null");
+
+        final int numOriginalAltAlleles = originalVC.getNAlleles() - 1;
+        final boolean[] alleleIndexesToKeep = new boolean[numOriginalAltAlleles + 1];
+
+        // the reference Allele is definitely still used
+        alleleIndexesToKeep[0] = true;
+        for (int i = 0; i < numOriginalAltAlleles; i++) {
+            if (allelesToUse.contains(originalVC.getAlternateAllele(i)))
+                alleleIndexesToKeep[i + 1] = true;
+        }
+
+        return alleleIndexesToKeep;
+    }
+
+    /**
+     * Make a new SAC array from the a subset of the genotype's original SAC
+     *
+     * @param g               the genotype
+     * @param sacIndexesToUse the indexes in the SAC to use given the allelesToUse (@see #determineSACIndexesToUse())
+     * @return subset of SACs from the original genotype, the original SACs if sacIndexesToUse is null
+     */
+    public static int[] makeNewSACs(final Genotype g, final List<Integer> sacIndexesToUse) {
+
+        if (g == null) throw new IllegalArgumentException("the genotype cannot be null");
+
+        final int[] oldSACs  = getSACs(g);
+
+        if (sacIndexesToUse == null) {
+            return oldSACs;
+        } else {
+            final int[] newSACs = new int[sacIndexesToUse.size()];
+            int newIndex = 0;
+            for (final int oldIndex : sacIndexesToUse) {
+                newSACs[newIndex++] = oldSACs[oldIndex];
+            }
+            return newSACs;
+        }
+    }
+
+
+    /**
+     * Get the genotype SACs
+     *
+     * @param g the genotype
+     * @return an arrays of SACs
+     * @throws ReviewedGATKException if the type of the SACs is unexpected
+     */
+    private static int[] getSACs(final Genotype g) {
+
+        if ( g == null ) throw new IllegalArgumentException("the Genotype cannot be null");
+        if ( !g.hasExtendedAttribute(GATKVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY) )
+            throw new IllegalArgumentException("Genotype must have SAC");
+
+        if ( g.getExtendedAttributes().get(GATKVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY).getClass().equals(String.class) ) {
+            final String SACsString = (String) g.getExtendedAttributes().get(GATKVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY);
+            ArrayList<String> stringSACs = Utils.split(SACsString, ",");
+            final int[] intSACs = new int[stringSACs.size()];
+            int i = 0;
+            for (String sac : stringSACs)
+                intSACs[i++] = Integer.parseInt(sac);
+
+            return intSACs;
+        }
+        else if ( g.getExtendedAttributes().get(GATKVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY).getClass().equals(int[].class) )
+            return (int[]) g.getExtendedAttributes().get(GATKVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY);
+        else
+            throw new ReviewedGATKException("Unexpected SAC type");
+    }
+
+    /**
+     * Create the new GenotypesContext with the subsetted PLs, SACs and ADs
+     *
+     * @param originalGs               the original GenotypesContext
+     * @param originalVC               the original VariantContext
+     * @param allelesToUse             the actual alleles to use with the new Genotypes
+     * @param likelihoodIndexesToUse   the indexes in the PL to use given the allelesToUse (@see #determineDiploidLikelihoodIndexesToUse())
+     * @param sacIndexesToUse          the indexes in the SAC to use given the allelesToUse (@see #determineSACIndexesToUse())
+     * @param assignGenotypes          assignment strategy for the (subsetted) PLs
+     * @return a new non-null GenotypesContext
+     */
+    private static GenotypesContext createGenotypesWithSubsettedLikelihoods(final GenotypesContext originalGs,
+                                                                            final VariantContext originalVC,
+                                                                            final List<Allele> allelesToUse,
+                                                                            final List<Integer> likelihoodIndexesToUse,
+                                                                            final List<Integer> sacIndexesToUse,
+                                                                            final GenotypeAssignmentMethod assignGenotypes) {
+
+        if ( originalGs == null ) throw new IllegalArgumentException("the original GenotypesContext cannot be null");
+        if ( originalVC == null ) throw new IllegalArgumentException("the original VariantContext cannot be null");
+        if ( allelesToUse == null ) throw new IllegalArgumentException("the alleles to use cannot be null");
+
+        // the new genotypes to create
+        final GenotypesContext newGTs = GenotypesContext.create(originalGs.size());
+
+        // make sure we are seeing the expected number of likelihoods per sample
+        final int expectedNumLikelihoods = GenotypeLikelihoods.numLikelihoods(originalVC.getNAlleles(), 2);
+
+        // the samples
+        final List<String> sampleIndices = originalGs.getSampleNamesOrderedByName();
+
+        // create the new genotypes
+        for ( int k = 0; k < originalGs.size(); k++ ) {
+            final Genotype g = originalGs.get(sampleIndices.get(k));
+            final GenotypeBuilder gb = new GenotypeBuilder(g);
+
+            // create the new likelihoods array from the used alleles
+            double[] newLikelihoods;
+            if ( !g.hasLikelihoods() ) {
+                // we don't have any likelihoods, so we null out PLs and make G ./.
+                newLikelihoods = null;
+                gb.noPL();
+            } else {
+                final double[] originalLikelihoods = g.getLikelihoods().getAsVector();
+                if ( likelihoodIndexesToUse == null ) {
+                    newLikelihoods = originalLikelihoods;
+                } else if ( originalLikelihoods.length != expectedNumLikelihoods ) {
+                    logger.debug("Wrong number of likelihoods in sample " + g.getSampleName() + " at " + originalVC + " got " + g.getLikelihoodsString() + " but expected " + expectedNumLikelihoods);
+                    newLikelihoods = null;
+                } else {
+                    newLikelihoods = new double[likelihoodIndexesToUse.size()];
+                    int newIndex = 0;
+                    for ( final int oldIndex : likelihoodIndexesToUse )
+                        newLikelihoods[newIndex++] = originalLikelihoods[oldIndex];
+
+                    // might need to re-normalize
+                    newLikelihoods = MathUtils.normalizeFromLog10(newLikelihoods, false, true);
+                }
+
+                if ( newLikelihoods == null || likelihoodsAreUninformative(newLikelihoods) )
+                    gb.noPL();
+                else
+                    gb.PL(newLikelihoods);
+            }
+
+            // create the new strand allele counts array from the used alleles
+            if ( g.hasExtendedAttribute(GATKVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY)){
+                int[] newSACs = makeNewSACs(g, sacIndexesToUse);
+                gb.attribute(GATKVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY, newSACs);
+            }
+
+            updateGenotypeAfterSubsetting(g.getAlleles(), gb, assignGenotypes, newLikelihoods, allelesToUse);
+            newGTs.add(gb.make());
+        }
+
+        return fixADFromSubsettedAlleles(newGTs, originalVC, allelesToUse);
+    }
+
+    private static boolean likelihoodsAreUninformative(final double[] likelihoods) {
+        return MathUtils.sum(likelihoods) > SUM_GL_THRESH_NOCALL;
+    }
+
+    /**
+     * Add the genotype call (GT) field to GenotypeBuilder using the requested algorithm assignmentMethod
+     *
+     * @param originalGT the original genotype calls, cannot be null
+     * @param gb the builder where we should put our newly called alleles, cannot be null
+     * @param assignmentMethod the method to use to do the assignment, cannot be null
+     * @param newLikelihoods a vector of likelihoods to use if the method requires PLs, should be log10 likelihoods, cannot be null
+     * @param allelesToUse the alleles we are using for our subsetting
+     */
+    public static void updateGenotypeAfterSubsetting(final List<Allele> originalGT,
+                                                     final GenotypeBuilder gb,
+                                                     final GenotypeAssignmentMethod assignmentMethod,
+                                                     final double[] newLikelihoods,
+                                                     final List<Allele> allelesToUse) {
+        switch ( assignmentMethod ) {
+            case DO_NOT_ASSIGN_GENOTYPES:
+                break;
+            case SET_TO_NO_CALL:
+                gb.alleles(NO_CALL_ALLELES);
+                gb.noGQ();
+                break;
+            case USE_PLS_TO_ASSIGN:
+                if ( newLikelihoods == null || likelihoodsAreUninformative(newLikelihoods) ) {
+                    // if there is no mass on the (new) likelihoods, then just no-call the sample
+                    gb.alleles(NO_CALL_ALLELES);
+                    gb.noGQ();
+                } else {
+                    // find the genotype with maximum likelihoods
+                    final int PLindex = MathUtils.maxElementIndex(newLikelihoods);
+                    GenotypeLikelihoods.GenotypeLikelihoodsAllelePair alleles = GenotypeLikelihoods.getAllelePair(PLindex);
+                    gb.alleles(Arrays.asList(allelesToUse.get(alleles.alleleIndex1), allelesToUse.get(alleles.alleleIndex2)));
+                    gb.log10PError(GenotypeLikelihoods.getGQLog10FromLikelihoods(PLindex, newLikelihoods));
+                }
+                break;
+            case BEST_MATCH_TO_ORIGINAL:
+                final List<Allele> best = new LinkedList<>();
+                final Allele ref = allelesToUse.get(0); // WARNING -- should be checked in input argument
+                for ( final Allele originalAllele : originalGT ) {
+                    best.add(allelesToUse.contains(originalAllele) ? originalAllele : ref);
+                }
+                gb.noGQ();
+                gb.noPL();
+                gb.alleles(best);
+                break;
+        }
+    }
+
+    /**
+     * Subset the samples in VC to reference only information with ref call alleles
+     *
+     * Preserves DP if present
+     *
+     * @param vc the variant context to subset down to
+     * @param ploidy ploidy to use if a genotype doesn't have any alleles
+     * @return a GenotypesContext
+     */
+    public static GenotypesContext subsetToRefOnly(final VariantContext vc, final int ploidy) {
+        if ( vc == null ) throw new IllegalArgumentException("vc cannot be null");
+        if ( ploidy < 1 ) throw new IllegalArgumentException("ploidy must be >= 1 but got " + ploidy);
+
+        // the genotypes with PLs
+        final GenotypesContext oldGTs = vc.getGenotypes();
+
+        // optimization: if no input genotypes, just exit
+        if (oldGTs.isEmpty()) return oldGTs;
+
+        // the new genotypes to create
+        final GenotypesContext newGTs = GenotypesContext.create(oldGTs.size());
+
+        final Allele ref = vc.getReference();
+        final List<Allele> diploidRefAlleles = Arrays.asList(ref, ref);
+
+        // create the new genotypes
+        for ( final Genotype g : vc.getGenotypes() ) {
+            final int gPloidy = g.getPloidy() == 0 ? ploidy : g.getPloidy();
+            final List<Allele> refAlleles = gPloidy == 2 ? diploidRefAlleles : Collections.nCopies(gPloidy, ref);
+            final GenotypeBuilder gb = new GenotypeBuilder(g.getSampleName(), refAlleles);
+            if ( g.hasDP() ) gb.DP(g.getDP());
+            if ( g.hasGQ() ) gb.GQ(g.getGQ());
+            newGTs.add(gb.make());
+        }
+
+        return newGTs;
+    }
+
+    /**
+     * Assign genotypes (GTs) to the samples in the Variant Context greedily based on the PLs
+     *
+     * @param vc            variant context with genotype likelihoods
+     * @return genotypes context
+     */
+    public static GenotypesContext assignDiploidGenotypes(final VariantContext vc) {
+        return subsetDiploidAlleles(vc, vc.getAlleles(), GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN);
+    }
+
+    /**
+     * Split variant context into its biallelic components if there are more than 2 alleles
+     *
+     * For VC has A/B/C alleles, returns A/B and A/C contexts.
+     * Genotypes are all no-calls now (it's not possible to fix them easily)
+     * Alleles are right trimmed to satisfy VCF conventions
+     *
+     * If vc is biallelic or non-variant it is just returned
+     *
+     * Chromosome counts are updated (but they are by definition 0)
+     *
+     * @param vc a potentially multi-allelic variant context
+     * @return a list of bi-allelic (or monomorphic) variant context
+     */
+    public static List<VariantContext> splitVariantContextToBiallelics(final VariantContext vc) {
+        return splitVariantContextToBiallelics(vc, false, GenotypeAssignmentMethod.SET_TO_NO_CALL);
+    }
+
+    /**
+     * Split variant context into its biallelic components if there are more than 2 alleles
+     *
+     * For VC has A/B/C alleles, returns A/B and A/C contexts.
+     * Genotypes are all no-calls now (it's not possible to fix them easily)
+     * Alleles are right trimmed to satisfy VCF conventions
+     *
+     * If vc is biallelic or non-variant it is just returned
+     *
+     * Chromosome counts are updated (but they are by definition 0)
+     *
+     * @param vc a potentially multi-allelic variant context
+     * @param trimLeft if true, we will also left trim alleles, potentially moving the resulting vcs forward on the genome
+     * @return a list of bi-allelic (or monomorphic) variant context
+     */
+    public static List<VariantContext> splitVariantContextToBiallelics(final VariantContext vc, final boolean trimLeft, final GenotypeAssignmentMethod genotypeAssignmentMethod) {
+        if ( ! vc.isVariant() || vc.isBiallelic() )
+            // non variant or biallelics already satisfy the contract
+            return Collections.singletonList(vc);
+        else {
+            final List<VariantContext> biallelics = new LinkedList<>();
+
+            for ( final Allele alt : vc.getAlternateAlleles() ) {
+                VariantContextBuilder builder = new VariantContextBuilder(vc);
+                final List<Allele> alleles = Arrays.asList(vc.getReference(), alt);
+                builder.alleles(alleles);
+                builder.genotypes(subsetDiploidAlleles(vc, alleles, genotypeAssignmentMethod));
+                VariantContextUtils.calculateChromosomeCounts(builder, true);
+                final VariantContext trimmed = trimAlleles(builder.make(), trimLeft, true);
+                biallelics.add(trimmed);
+            }
+
+            return biallelics;
+        }
+    }
+
+    public static Genotype removePLsAndAD(final Genotype g) {
+        return ( g.hasLikelihoods() || g.hasAD() ) ? new GenotypeBuilder(g).noPL().noAD().make() : g;
+    }
+
+    //TODO consider refactor variant-context merging code so that we share as much as possible between
+    //TODO simpleMerge and referenceConfidenceMerge
+    //TODO likely using a separate helper class or hierarchy.
+    /**
+     * Merges VariantContexts into a single hybrid.  Takes genotypes for common samples in priority order, if provided.
+     * If uniquifySamples is true, the priority order is ignored and names are created by concatenating the VC name with
+     * the sample name
+     *
+     * @param unsortedVCs               collection of unsorted VCs
+     * @param priorityListOfVCs         priority list detailing the order in which we should grab the VCs
+     * @param filteredRecordMergeType   merge type for filtered records
+     * @param genotypeMergeOptions      merge option for genotypes
+     * @param annotateOrigin            should we annotate the set it came from?
+     * @param printMessages             should we print messages?
+     * @param setKey                    the key name of the set
+     * @param filteredAreUncalled       are filtered records uncalled?
+     * @param mergeInfoWithMaxAC        should we merge in info from the VC with maximum allele count?
+     * @return new VariantContext       representing the merge of unsortedVCs
+     */
+    public static VariantContext simpleMerge(final Collection<VariantContext> unsortedVCs,
+                                             final List<String> priorityListOfVCs,
+                                             final FilteredRecordMergeType filteredRecordMergeType,
+                                             final GenotypeMergeType genotypeMergeOptions,
+                                             final boolean annotateOrigin,
+                                             final boolean printMessages,
+                                             final String setKey,
+                                             final boolean filteredAreUncalled,
+                                             final boolean mergeInfoWithMaxAC ) {
+        int originalNumOfVCs = priorityListOfVCs == null ? 0 : priorityListOfVCs.size();
+        return simpleMerge(unsortedVCs, priorityListOfVCs, originalNumOfVCs, filteredRecordMergeType, genotypeMergeOptions, annotateOrigin, printMessages, setKey, filteredAreUncalled, mergeInfoWithMaxAC);
+    }
+
+    /**
+     * Merges VariantContexts into a single hybrid.  Takes genotypes for common samples in priority order, if provided.
+     * If uniquifySamples is true, the priority order is ignored and names are created by concatenating the VC name with
+     * the sample name.
+     * simpleMerge does not verify any more unique sample names EVEN if genotypeMergeOptions == GenotypeMergeType.REQUIRE_UNIQUE. One should use
+     * SampleUtils.verifyUniqueSamplesNames to check that before using simpleMerge.
+     *
+     * For more information on this method see: http://www.thedistractionnetwork.com/programmer-problem/
+     *
+     * @param unsortedVCs               collection of unsorted VCs
+     * @param priorityListOfVCs         priority list detailing the order in which we should grab the VCs
+     * @param filteredRecordMergeType   merge type for filtered records
+     * @param genotypeMergeOptions      merge option for genotypes
+     * @param annotateOrigin            should we annotate the set it came from?
+     * @param printMessages             should we print messages?
+     * @param setKey                    the key name of the set
+     * @param filteredAreUncalled       are filtered records uncalled?
+     * @param mergeInfoWithMaxAC        should we merge in info from the VC with maximum allele count?
+     * @return new VariantContext       representing the merge of unsortedVCs
+     */
+    public static VariantContext simpleMerge(final Collection<VariantContext> unsortedVCs,
+                                             final List<String> priorityListOfVCs,
+                                             final int originalNumOfVCs,
+                                             final FilteredRecordMergeType filteredRecordMergeType,
+                                             final GenotypeMergeType genotypeMergeOptions,
+                                             final boolean annotateOrigin,
+                                             final boolean printMessages,
+                                             final String setKey,
+                                             final boolean filteredAreUncalled,
+                                             final boolean mergeInfoWithMaxAC ) {
+        if ( unsortedVCs == null || unsortedVCs.isEmpty() )
+            return null;
+
+        if (priorityListOfVCs != null && originalNumOfVCs != priorityListOfVCs.size())
+            throw new IllegalArgumentException("the number of the original VariantContexts must be the same as the number of VariantContexts in the priority list");
+
+        if ( annotateOrigin && priorityListOfVCs == null && originalNumOfVCs == 0)
+            throw new IllegalArgumentException("Cannot merge calls and annotate their origins without a complete priority list of VariantContexts or the number of original VariantContexts");
+
+        final List<VariantContext> preFilteredVCs = sortVariantContextsByPriority(unsortedVCs, priorityListOfVCs, genotypeMergeOptions);
+        // Make sure all variant contexts are padded with reference base in case of indels if necessary
+        List<VariantContext> VCs = new ArrayList<>();
+
+        for (final VariantContext vc : preFilteredVCs) {
+            if ( ! filteredAreUncalled || vc.isNotFiltered() )
+                VCs.add(vc);
+        }
+
+        if ( VCs.isEmpty() ) // everything is filtered out and we're filteredAreUncalled
+            return null;
+
+        // establish the baseline info from the first VC
+        final VariantContext first = VCs.get(0);
+        final String name = first.getSource();
+        final Allele refAllele = determineReferenceAllele(VCs);
+
+        final LinkedHashSet<Allele> alleles = new LinkedHashSet<>();
+        final Set<String> filters = new HashSet<>();
+        final Map<String, Object> attributes = new LinkedHashMap<>();
+        final Set<String> inconsistentAttributes = new HashSet<>();
+        final Set<String> variantSources = new HashSet<>(); // contains the set of sources we found in our set of VCs that are variant
+        final Set<String> rsIDs = new LinkedHashSet<>(1); // most of the time there's one id
+
+        VariantContext longestVC = first;
+        int depth = 0;
+        int maxAC = -1;
+        final Map<String, Object> attributesWithMaxAC = new LinkedHashMap<>();
+        double log10PError = CommonInfo.NO_LOG10_PERROR;
+        boolean anyVCHadFiltersApplied = false;
+        VariantContext vcWithMaxAC = null;
+        GenotypesContext genotypes = GenotypesContext.create();
+
+        // counting the number of filtered and variant VCs
+        int nFiltered = 0;
+
+        boolean remapped = false;
+
+        // cycle through and add info from the other VCs, making sure the loc/reference matches
+        for ( final VariantContext vc : VCs ) {
+            if ( longestVC.getStart() != vc.getStart() )
+                throw new IllegalStateException("BUG: attempting to merge VariantContexts with different start sites: first="+ first.toString() + " second=" + vc.toString());
+
+            if ( VariantContextUtils.getSize(vc) > VariantContextUtils.getSize(longestVC) )
+                longestVC = vc; // get the longest location
+
+            nFiltered += vc.isFiltered() ? 1 : 0;
+            if ( vc.isVariant() ) variantSources.add(vc.getSource());
+
+            AlleleMapper alleleMapping = resolveIncompatibleAlleles(refAllele, vc, alleles);
+            remapped = remapped || alleleMapping.needsRemapping();
+
+            alleles.addAll(alleleMapping.values());
+
+            mergeGenotypes(genotypes, vc, alleleMapping, genotypeMergeOptions == GenotypeMergeType.UNIQUIFY);
+
+            // We always take the QUAL of the first VC with a non-MISSING qual for the combined value
+            if ( log10PError == CommonInfo.NO_LOG10_PERROR )
+                log10PError =  vc.getLog10PError();
+
+            filters.addAll(vc.getFilters());
+            anyVCHadFiltersApplied |= vc.filtersWereApplied();
+
+            //
+            // add attributes
+            //
+            // special case DP (add it up) and ID (just preserve it)
+            //
+            if (vc.hasAttribute(VCFConstants.DEPTH_KEY))
+                depth += vc.getAttributeAsInt(VCFConstants.DEPTH_KEY, 0);
+            if ( vc.hasID() ) rsIDs.add(vc.getID());
+            if (mergeInfoWithMaxAC && vc.hasAttribute(VCFConstants.ALLELE_COUNT_KEY)) {
+                String rawAlleleCounts = vc.getAttributeAsString(VCFConstants.ALLELE_COUNT_KEY, null);
+                // lets see if the string contains a "," separator
+                if (rawAlleleCounts.contains(VCFConstants.INFO_FIELD_ARRAY_SEPARATOR)) {
+                    final List<String> alleleCountArray = Arrays.asList(rawAlleleCounts.substring(1, rawAlleleCounts.length() - 1).split(VCFConstants.INFO_FIELD_ARRAY_SEPARATOR));
+                    for (final String alleleCount : alleleCountArray) {
+                        final int ac = Integer.valueOf(alleleCount.trim());
+                        if (ac > maxAC) {
+                            maxAC = ac;
+                            vcWithMaxAC = vc;
+                        }
+                    }
+                } else {
+                    final int ac = Integer.valueOf(rawAlleleCounts);
+                    if (ac > maxAC) {
+                        maxAC = ac;
+                        vcWithMaxAC = vc;
+                    }
+                }
+            }
+
+            for (final Map.Entry<String, Object> p : vc.getAttributes().entrySet()) {
+                final String key = p.getKey();
+                final Object value = p.getValue();
+                // only output annotations that have the same value in every input VC
+                // if we don't like the key already, don't go anywhere
+                if ( ! inconsistentAttributes.contains(key) ) {
+                    final boolean alreadyFound = attributes.containsKey(key);
+                    final Object boundValue = attributes.get(key);
+                    final boolean boundIsMissingValue = alreadyFound && boundValue.equals(VCFConstants.MISSING_VALUE_v4);
+
+                    if ( alreadyFound && ! boundValue.equals(value) && ! boundIsMissingValue ) {
+                        // we found the value but we're inconsistent, put it in the exclude list
+                        inconsistentAttributes.add(key);
+                        attributes.remove(key);
+                    } else if ( ! alreadyFound || boundIsMissingValue )  { // no value
+                        attributes.put(key, value);
+                    }
+                }
+            }
+        }
+
+        // if we have more alternate alleles in the merged VC than in one or more of the
+        // original VCs, we need to strip out the GL/PLs (because they are no longer accurate), as well as allele-dependent attributes like AC,AF, and AD
+        for ( final VariantContext vc : VCs ) {
+            if (vc.getAlleles().size() == 1)
+                continue;
+            if ( hasPLIncompatibleAlleles(alleles, vc.getAlleles())) {
+                if ( ! genotypes.isEmpty() ) {
+                    logger.debug(String.format("Stripping PLs at %s:%d-%d due to incompatible alleles merged=%s vs. single=%s",
+                            vc.getChr(), vc.getStart(), vc.getEnd(), alleles, vc.getAlleles()));
+                }
+                genotypes = stripPLsAndAD(genotypes);
+                // this will remove stale AC,AF attributed from vc
+                VariantContextUtils.calculateChromosomeCounts(vc, attributes, true);
+                break;
+            }
+        }
+
+        // take the VC with the maxAC and pull the attributes into a modifiable map
+        if ( mergeInfoWithMaxAC && vcWithMaxAC != null ) {
+            attributesWithMaxAC.putAll(vcWithMaxAC.getAttributes());
+        }
+
+        // if at least one record was unfiltered and we want a union, clear all of the filters
+        if ( (filteredRecordMergeType == FilteredRecordMergeType.KEEP_IF_ANY_UNFILTERED && nFiltered != VCs.size()) || filteredRecordMergeType == FilteredRecordMergeType.KEEP_UNCONDITIONAL )
+            filters.clear();
+
+
+        if ( annotateOrigin ) { // we care about where the call came from
+            String setValue;
+            if ( nFiltered == 0 && variantSources.size() == originalNumOfVCs ) // nothing was unfiltered
+                setValue = MERGE_INTERSECTION;
+            else if ( nFiltered == VCs.size() )     // everything was filtered out
+                setValue = MERGE_FILTER_IN_ALL;
+            else if ( variantSources.isEmpty() )    // everyone was reference
+                setValue = MERGE_REF_IN_ALL;
+            else {
+                final LinkedHashSet<String> s = new LinkedHashSet<>();
+                for ( final VariantContext vc : VCs )
+                    if ( vc.isVariant() )
+                        s.add( vc.isFiltered() ? MERGE_FILTER_PREFIX + vc.getSource() : vc.getSource() );
+                setValue = Utils.join("-", s);
+            }
+
+            if ( setKey != null ) {
+                attributes.put(setKey, setValue);
+                if( mergeInfoWithMaxAC && vcWithMaxAC != null ) {
+                    attributesWithMaxAC.put(setKey, setValue);
+                }
+            }
+        }
+
+        if ( depth > 0 )
+            attributes.put(VCFConstants.DEPTH_KEY, String.valueOf(depth));
+
+        final String ID = rsIDs.isEmpty() ? VCFConstants.EMPTY_ID_FIELD : Utils.join(",", rsIDs);
+
+        final VariantContextBuilder builder = new VariantContextBuilder().source(name).id(ID);
+        builder.loc(longestVC.getChr(), longestVC.getStart(), longestVC.getEnd());
+        builder.alleles(alleles);
+        builder.genotypes(genotypes);
+        builder.log10PError(log10PError);
+        if ( anyVCHadFiltersApplied ) {
+            builder.filters(filters.isEmpty() ? filters : new TreeSet<>(filters));
+        }
+        builder.attributes(new TreeMap<>(mergeInfoWithMaxAC ? attributesWithMaxAC : attributes));
+
+        // Trim the padded bases of all alleles if necessary
+        final VariantContext merged = builder.make();
+        if ( printMessages && remapped ) System.out.printf("Remapped => %s%n", merged);
+        return merged;
+    }
+
+    //TODO as part of a larger refactoring effort remapAlleles can be merged with createAlleleMapping.
+
+    public static GenotypesContext stripPLsAndAD(final GenotypesContext genotypes) {
+        final GenotypesContext newGs = GenotypesContext.create(genotypes.size());
+
+        for ( final Genotype g : genotypes ) {
+            newGs.add(removePLsAndAD(g));
+        }
+
+        return newGs;
+    }
+
+    /**
+     * Updates the PLs, SACs and AD of the Genotypes in the newly selected VariantContext to reflect the fact that some alleles
+     * from the original VariantContext are no longer present.
+     *
+     * @param selectedVC  the selected (new) VariantContext
+     * @param originalVC  the original VariantContext
+     * @return a new non-null GenotypesContext
+     */
+    public static GenotypesContext updatePLsSACsAD(final VariantContext selectedVC, final VariantContext originalVC) {
+        if ( selectedVC == null ) throw new IllegalArgumentException("the selected VariantContext cannot be null");
+        if ( originalVC == null ) throw new IllegalArgumentException("the original VariantContext cannot be null");
+
+        final int numNewAlleles = selectedVC.getAlleles().size();
+        final int numOriginalAlleles = originalVC.getAlleles().size();
+
+        // if we have more alternate alleles in the selected VC than in the original VC, then something is wrong
+        if ( numNewAlleles > numOriginalAlleles )
+            throw new IllegalArgumentException("Attempting to fix PLs, SACs and AD from what appears to be a *combined* VCF and not a selected one");
+
+        final GenotypesContext oldGs = selectedVC.getGenotypes();
+
+        // if we have the same number of alternate alleles in the selected VC as in the original VC, then we don't need to fix anything
+        if ( numNewAlleles == numOriginalAlleles )
+            return oldGs;
+
+        return fixDiploidGenotypesFromSubsettedAlleles(oldGs, originalVC, selectedVC.getAlleles());
+    }
+
+    /**
+     * Fix the PLs, SACs and ADs for the GenotypesContext of a VariantContext that has been subset
+     *
+     * @param originalGs       the original GenotypesContext
+     * @param originalVC       the original VariantContext
+     * @param allelesToUse     the new (sub)set of alleles to use
+     * @return a new non-null GenotypesContext
+     */
+    static private GenotypesContext fixDiploidGenotypesFromSubsettedAlleles(final GenotypesContext originalGs, final VariantContext originalVC, final List<Allele> allelesToUse) {
+
+        if ( originalGs == null ) throw new IllegalArgumentException("the selected GenotypesContext cannot be null");
+        if ( originalVC == null ) throw new IllegalArgumentException("the original VariantContext cannot be null");
+        if ( allelesToUse == null ) throw new IllegalArgumentException("the alleles to use cannot be null");
+
+        // find the likelihoods indexes to use from the used alternate alleles
+        final List<Integer> likelihoodIndexesToUse = determineDiploidLikelihoodIndexesToUse(originalVC, allelesToUse);
+
+        // find the strand allele count indexes to use from the used alternate alleles
+        final List<Integer> sacIndexesToUse = determineSACIndexesToUse(originalVC, allelesToUse);
+
+        // create the new genotypes
+        return createGenotypesWithSubsettedLikelihoods(originalGs, originalVC, allelesToUse, likelihoodIndexesToUse, sacIndexesToUse, GenotypeAssignmentMethod.DO_NOT_ASSIGN_GENOTYPES);
+    }
+
+    /**
+     * Fix the AD for the GenotypesContext of a VariantContext that has been subset
+     *
+     * @param originalGs       the original GenotypesContext
+     * @param originalVC       the original VariantContext
+     * @param allelesToUse     the new (sub)set of alleles to use
+     * @return a new non-null GenotypesContext
+     */
+    public static GenotypesContext fixADFromSubsettedAlleles(final GenotypesContext originalGs, final VariantContext originalVC, final List<Allele> allelesToUse) {
+        if (originalGs == null) throw new IllegalArgumentException("the original Gs cannot be null");
+        if (originalVC == null) throw new IllegalArgumentException("the original VC cannot be null");
+        if (allelesToUse == null) throw new IllegalArgumentException("the alleles to use list cannot be null");
+
+        // the bitset representing the allele indexes we want to keep
+        final boolean[] alleleIndexesToUse = getAlleleIndexBitset(originalVC, allelesToUse);
+
+        // the new genotypes to create
+        final GenotypesContext newGTs = GenotypesContext.create(originalGs.size());
+
+        // the samples
+        final List<String> sampleIndices = originalGs.getSampleNamesOrderedByName();
+
+        // create the new genotypes
+        for ( int k = 0; k < originalGs.size(); k++ ) {
+            final Genotype g = originalGs.get(sampleIndices.get(k));
+            newGTs.add(fixAD(g, alleleIndexesToUse, allelesToUse.size()));
+        }
+
+        return newGTs;
+    }
+
+    /**
+     * Fix the AD for the given Genotype
+     *
+     * @param genotype              the original Genotype
+     * @param alleleIndexesToUse    a bitset describing whether or not to keep a given index
+     * @param nAllelesToUse         how many alleles we are keeping
+     * @return a non-null Genotype
+     */
+    private static Genotype fixAD(final Genotype genotype, final boolean[] alleleIndexesToUse, final int nAllelesToUse) {
+        // if it ain't broke don't fix it
+        if ( !genotype.hasAD() )
+            return genotype;
+
+        final GenotypeBuilder builder = new GenotypeBuilder(genotype);
+
+        final int[] oldAD = genotype.getAD();
+        if ( oldAD.length != alleleIndexesToUse.length ) {
+            builder.noAD();
+        } else {
+            final int[] newAD = new int[nAllelesToUse];
+            int currentIndex = 0;
+            for ( int i = 0; i < oldAD.length; i++ ) {
+                if ( alleleIndexesToUse[i] )
+                    newAD[currentIndex++] = oldAD[i];
+            }
+            builder.AD(newAD);
+        }
+        return builder.make();
+    }
+
+    private static Allele determineReferenceAllele(final List<VariantContext> VCs) {
+        return determineReferenceAllele(VCs, null);
+    }
+
+    public static boolean contextMatchesLoc(final VariantContext vc, final GenomeLoc loc) {
+        return loc == null || loc.getStart() == vc.getStart();
+    }
+
+    static private AlleleMapper resolveIncompatibleAlleles(final Allele refAllele, final VariantContext vc, final LinkedHashSet<Allele> allAlleles) {
+        if ( refAllele.equals(vc.getReference()) )
+            return new AlleleMapper(vc);
+        else {
+            final Map<Allele, Allele> map = createAlleleMapping(refAllele, vc, allAlleles);
+            map.put(vc.getReference(), refAllele);
+            return new AlleleMapper(map);
+        }
+    }
+
+    //TODO as part of a larger refactoring effort {@link #createAlleleMapping} can be merged with {@link ReferenceConfidenceVariantContextMerger#remapAlleles}.
+    /**
+     * Create an allele mapping for the given context where its reference allele must (potentially) be extended to the given allele
+     *
+     * The refAllele is the longest reference allele seen at this start site.
+     * So imagine it is:
+     * refAllele: ACGTGA
+     * myRef:     ACGT
+     * myAlt:     A
+     *
+     * We need to remap all of the alleles in vc to include the extra GA so that
+     * myRef => refAllele and myAlt => AGA
+     *
+     * @param refAllele          the new (extended) reference allele
+     * @param oneVC              the Variant Context to extend
+     * @param currentAlleles     the list of alleles already created
+     * @return a non-null mapping of original alleles to new (extended) ones
+     */
+    protected static Map<Allele, Allele> createAlleleMapping(final Allele refAllele,
+                                                           final VariantContext oneVC,
+                                                           final Collection<Allele> currentAlleles) {
+        final Allele myRef = oneVC.getReference();
+        if ( refAllele.length() <= myRef.length() ) throw new IllegalStateException("BUG: myRef="+myRef+" is longer than refAllele="+refAllele);
+
+        final byte[] extraBases = Arrays.copyOfRange(refAllele.getBases(), myRef.length(), refAllele.length());
+
+        final Map<Allele, Allele> map = new HashMap<>();
+        for ( final Allele a : oneVC.getAlternateAlleles() ) {
+            if ( isUsableAlternateAllele(a) ) {
+                Allele extended = Allele.extend(a, extraBases);
+                for ( final Allele b : currentAlleles )
+                    if ( extended.equals(b) )
+                        extended = b;
+                map.put(a, extended);
+            }
+            // as long as it's not a reference allele then we want to add it as is (this covers e.g. symbolic and spanning deletion alleles)
+            else if ( !a.isReference() ) {
+                map.put(a, a);
+            }
+        }
+
+        return map;
+    }
+
+    static private boolean isUsableAlternateAllele(final Allele allele) {
+        return ! (allele.isReference() || allele.isSymbolic() || allele == Allele.SPAN_DEL );
+    }
+
+    public static List<VariantContext> sortVariantContextsByPriority(Collection<VariantContext> unsortedVCs, List<String> priorityListOfVCs, GenotypeMergeType mergeOption ) {
+        if ( mergeOption == GenotypeMergeType.PRIORITIZE && priorityListOfVCs == null )
+            throw new IllegalArgumentException("Cannot merge calls by priority with a null priority list");
+
+        if ( priorityListOfVCs == null || mergeOption == GenotypeMergeType.UNSORTED )
+            return new ArrayList<>(unsortedVCs);
+        else {
+            ArrayList<VariantContext> sorted = new ArrayList<>(unsortedVCs);
+            Collections.sort(sorted, new CompareByPriority(priorityListOfVCs));
+            return sorted;
+        }
+    }
+
+    private static void mergeGenotypes(GenotypesContext mergedGenotypes, VariantContext oneVC, AlleleMapper alleleMapping, boolean uniquifySamples) {
+        //TODO: should we add a check for cases when the genotypeMergeOption is REQUIRE_UNIQUE
+        for ( final Genotype g : oneVC.getGenotypes() ) {
+            final String name = mergedSampleName(oneVC.getSource(), g.getSampleName(), uniquifySamples);
+            if ( ! mergedGenotypes.containsSample(name) ) {
+                // only add if the name is new
+                Genotype newG = g;
+
+                if ( uniquifySamples || alleleMapping.needsRemapping() ) {
+                    final List<Allele> alleles = alleleMapping.needsRemapping() ? alleleMapping.remap(g.getAlleles()) : g.getAlleles();
+                    newG = new GenotypeBuilder(g).name(name).alleles(alleles).make();
+                }
+
+                mergedGenotypes.add(newG);
+            }
+        }
+    }
+
+    /**
+     * Cached NO_CALL immutable lists where the position ith contains the list with i elements.
+     */
+    private static List<Allele>[] NOCALL_LISTS = new List[] {
+            Collections.emptyList(),
+            Collections.singletonList(Allele.NO_CALL),
+            Collections.nCopies(2,Allele.NO_CALL)
+    };
+
+    /**
+     * Synchronized code to ensure that {@link #NOCALL_LISTS} has enough entries beyod the requested ploidy
+     * @param capacity the requested ploidy.
+     */
+    private static synchronized void ensureNoCallListsCapacity(final int capacity) {
+        final int currentCapacity = NOCALL_LISTS.length - 1;
+        if (currentCapacity >= capacity)
+            return;
+        NOCALL_LISTS = Arrays.copyOf(NOCALL_LISTS,Math.max(capacity,currentCapacity << 1) + 1);
+        for (int i = currentCapacity + 1; i < NOCALL_LISTS.length; i++)
+            NOCALL_LISTS[i] = Collections.nCopies(i,Allele.NO_CALL);
+    }
+
+    /**
+     * Returns a {@link Allele#NO_CALL NO_CALL} allele list provided the ploidy.
+     *
+     * @param ploidy the required ploidy.
+     *
+     * @return never {@code null}, but an empty list if {@code ploidy} is equal or less than 0. The returned list
+     *   might or might not be mutable.
+     */
+    public static List<Allele> noCallAlleles(final int ploidy) {
+        if (NOCALL_LISTS.length <= ploidy)
+            ensureNoCallListsCapacity(ploidy);
+        return NOCALL_LISTS[ploidy];
+    }
+
+
+    /**
+     * This is just a safe wrapper around GenotypeLikelihoods.calculatePLindex()
+     *
+     * @param originalIndex1   the index of the first allele
+     * @param originalIndex2   the index of the second allele
+     * @return the PL index
+     */
+    protected static int calculatePLindexFromUnorderedIndexes(final int originalIndex1, final int originalIndex2) {
+        // we need to make sure they are ordered correctly
+        return ( originalIndex2 < originalIndex1 ) ? GenotypeLikelihoods.calculatePLindex(originalIndex2, originalIndex1) : GenotypeLikelihoods.calculatePLindex(originalIndex1, originalIndex2);
+    }
+
+    public static String mergedSampleName(String trackName, String sampleName, boolean uniquify ) {
+        return uniquify ? sampleName + "." + trackName : sampleName;
+    }
+
+    /**
+     * Trim the alleles in inputVC from the reverse direction
+     *
+     * @param inputVC a non-null input VC whose alleles might need a haircut
+     * @return a non-null VariantContext (may be == to inputVC) with alleles trimmed up
+     */
+    public static VariantContext reverseTrimAlleles( final VariantContext inputVC ) {
+        return trimAlleles(inputVC, false, true);
+    }
+
+    /**
+     * Trim the alleles in inputVC from the forward direction
+     *
+     * @param inputVC a non-null input VC whose alleles might need a haircut
+     * @return a non-null VariantContext (may be == to inputVC) with alleles trimmed up
+     */
+    public static VariantContext forwardTrimAlleles( final VariantContext inputVC ) {
+        return trimAlleles(inputVC, true, false);
+    }
+
+    /**
+     * Trim the alleles in inputVC forward and reverse, as requested
+     *
+     * @param inputVC a non-null input VC whose alleles might need a haircut
+     * @param trimForward should we trim up the alleles from the forward direction?
+     * @param trimReverse should we trim up the alleles from the reverse direction?
+     * @return a non-null VariantContext (may be == to inputVC) with trimmed up alleles
+     */
+    @Ensures("result != null")
+    public static VariantContext trimAlleles(final VariantContext inputVC, final boolean trimForward, final boolean trimReverse) {
+        if ( inputVC == null ) throw new IllegalArgumentException("inputVC cannot be null");
+
+        if ( inputVC.getNAlleles() <= 1 || inputVC.isSNP() )
+            return inputVC;
+
+        // see whether we need to trim common reference base from all alleles
+        final int revTrim = trimReverse ? computeReverseClipping(inputVC.getAlleles(), inputVC.getReference().getDisplayString().getBytes()) : 0;
+        final VariantContext revTrimVC = trimAlleles(inputVC, -1, revTrim);
+        final int fwdTrim = trimForward ? computeForwardClipping(revTrimVC.getAlleles()) : -1;
+        final VariantContext vc= trimAlleles(revTrimVC, fwdTrim, 0);
+        return vc;
+    }
+
+    /**
+     * Trim up alleles in inputVC, cutting out all bases up to fwdTrimEnd inclusive and
+     * the last revTrim bases from the end
+     *
+     * @param inputVC a non-null input VC
+     * @param fwdTrimEnd bases up to this index (can be -1) will be removed from the start of all alleles
+     * @param revTrim the last revTrim bases of each allele will be clipped off as well
+     * @return a non-null VariantContext (may be == to inputVC) with trimmed up alleles
+     */
+    @Requires({"inputVC != null"})
+    @Ensures("result != null")
+    protected static VariantContext trimAlleles(final VariantContext inputVC,
+                                                final int fwdTrimEnd,
+                                                final int revTrim) {
+        if( fwdTrimEnd == -1 && revTrim == 0 ) // nothing to do, so just return inputVC unmodified
+            return inputVC;
+
+        final List<Allele> alleles = new LinkedList<>();
+        final Map<Allele, Allele> originalToTrimmedAlleleMap = new HashMap<>();
+
+        for (final Allele a : inputVC.getAlleles()) {
+            if (a.isSymbolic()) {
+                alleles.add(a);
+                originalToTrimmedAlleleMap.put(a, a);
+            } else {
+                // get bases for current allele and create a new one with trimmed bases
+                final byte[] newBases = Arrays.copyOfRange(a.getBases(), fwdTrimEnd+1, a.length()-revTrim);
+                final Allele trimmedAllele = Allele.create(newBases, a.isReference());
+                alleles.add(trimmedAllele);
+                originalToTrimmedAlleleMap.put(a, trimmedAllele);
+            }
+        }
+
+        // now we can recreate new genotypes with trimmed alleles
+        final AlleleMapper alleleMapper = new AlleleMapper(originalToTrimmedAlleleMap);
+        final GenotypesContext genotypes = updateGenotypesWithMappedAlleles(inputVC.getGenotypes(), alleleMapper);
+
+        final int start = inputVC.getStart() + (fwdTrimEnd + 1);
+        final VariantContextBuilder builder = new VariantContextBuilder(inputVC);
+        builder.start(start);
+        builder.stop(start + alleles.get(0).length() - 1);
+        builder.alleles(alleles);
+        builder.genotypes(genotypes);
+        return builder.make();
+    }
+
+    @Requires("originalGenotypes != null && alleleMapper != null")
+    protected static GenotypesContext updateGenotypesWithMappedAlleles(final GenotypesContext originalGenotypes, final AlleleMapper alleleMapper) {
+        final GenotypesContext updatedGenotypes = GenotypesContext.create(originalGenotypes.size());
+
+        for ( final Genotype genotype : originalGenotypes ) {
+            final List<Allele> updatedAlleles = alleleMapper.remap(genotype.getAlleles());
+            updatedGenotypes.add(new GenotypeBuilder(genotype).alleles(updatedAlleles).make());
+        }
+
+        return updatedGenotypes;
+    }
+
+    public static int computeReverseClipping(final List<Allele> unclippedAlleles, final byte[] ref) {
+        int clipping = 0;
+        boolean stillClipping = true;
+
+        while ( stillClipping ) {
+            for ( final Allele a : unclippedAlleles ) {
+                if ( a.isSymbolic() )
+                    continue;
+
+                // we need to ensure that we don't reverse clip out all of the bases from an allele because we then will have the wrong
+                // position set for the VariantContext (although it's okay to forward clip it all out, because the position will be fine).
+                if ( a.length() - clipping == 0 )
+                    return clipping - 1;
+
+                if ( a.length() - clipping <= 0 || a.length() == 0 ) {
+                    stillClipping = false;
+                }
+                else if ( ref.length == clipping ) {
+                    return -1;
+                }
+                else if ( a.getBases()[a.length()-clipping-1] != ref[ref.length-clipping-1] ) {
+                    stillClipping = false;
+                }
+            }
+            if ( stillClipping )
+                clipping++;
+        }
+
+        return clipping;
+    }
+
+    /**
+     * Clip out any unnecessary bases off the front of the alleles
+     *
+     * The VCF spec represents alleles as block substitutions, replacing AC with A for a
+     * 1 bp deletion of the C.  However, it's possible that we'd end up with alleles that
+     * contain extra bases on the left, such as GAC/GA to represent the same 1 bp deletion.
+     * This routine finds an offset among all alleles that can be safely trimmed
+     * off the left of each allele and still represent the same block substitution.
+     *
+     * A/C => A/C
+     * AC/A => AC/A
+     * ACC/AC => CC/C
+     * AGT/CAT => AGT/CAT
+     * <DEL>/C => <DEL>/C
+     *
+     * @param unclippedAlleles a non-null list of alleles that we want to clip
+     * @return the offset into the alleles where we can safely clip, inclusive, or
+     *   -1 if no clipping is tolerated.  So, if the result is 0, then we can remove
+     *   the first base of every allele.  If the result is 1, we can remove the
+     *   second base.
+     */
+    public static int computeForwardClipping(final List<Allele> unclippedAlleles) {
+        // cannot clip unless there's at least 1 alt allele
+        if ( unclippedAlleles.size() <= 1 )
+            return -1;
+
+        // we cannot forward clip any set of alleles containing a symbolic allele
+        int minAlleleLength = Integer.MAX_VALUE;
+        for ( final Allele a : unclippedAlleles ) {
+            if ( a.isSymbolic() )
+                return -1;
+            minAlleleLength = Math.min(minAlleleLength, a.length());
+        }
+
+        final byte[] firstAlleleBases = unclippedAlleles.get(0).getBases();
+        int indexOflastSharedBase = -1;
+
+        // the -1 to the stop is that we can never clip off the right most base
+        for ( int i = 0; i < minAlleleLength - 1; i++) {
+            final byte base = firstAlleleBases[i];
+
+            for ( final Allele allele : unclippedAlleles ) {
+                if ( allele.getBases()[i] != base )
+                    return indexOflastSharedBase;
+            }
+
+            indexOflastSharedBase = i;
+        }
+
+        return indexOflastSharedBase;
+    }
+
+    public static double computeHardyWeinbergPvalue(VariantContext vc) {
+        if ( vc.getCalledChrCount() == 0 )
+            return 0.0;
+        return HardyWeinbergCalculation.hwCalculate(vc.getHomRefCount(), vc.getHetCount(), vc.getHomVarCount());
+    }
+
+    public static boolean requiresPaddingBase(final List<String> alleles) {
+
+        // see whether one of the alleles would be null if trimmed through
+
+        for ( final String allele : alleles ) {
+            if ( allele.isEmpty() )
+                return true;
+        }
+
+        int clipping = 0;
+        Character currentBase = null;
+
+        while ( true ) {
+            for ( final String allele : alleles ) {
+                if ( allele.length() - clipping == 0 )
+                    return true;
+
+                char myBase = allele.charAt(clipping);
+                if ( currentBase == null )
+                    currentBase = myBase;
+                else if ( currentBase != myBase )
+                    return false;
+            }
+
+            clipping++;
+            currentBase = null;
+        }
+    }
+
+    private final static Map<String, Object> subsetAttributes(final CommonInfo igc, final Collection<String> keysToPreserve) {
+        Map<String, Object> attributes = new HashMap<>(keysToPreserve.size());
+        for ( final String key : keysToPreserve  ) {
+            if ( igc.hasAttribute(key) )
+                attributes.put(key, igc.getAttribute(key));
+        }
+        return attributes;
+    }
+
+    /**
+     * @deprecated use variant context builder version instead
+     * @param vc                  the variant context
+     * @param keysToPreserve      the keys to preserve
+     * @return a pruned version of the original variant context
+     */
+    @Deprecated
+    public static VariantContext pruneVariantContext(final VariantContext vc, Collection<String> keysToPreserve ) {
+        return pruneVariantContext(new VariantContextBuilder(vc), keysToPreserve).make();
+    }
+
+    public static VariantContextBuilder pruneVariantContext(final VariantContextBuilder builder, Collection<String> keysToPreserve ) {
+        final VariantContext vc = builder.make();
+        if ( keysToPreserve == null ) keysToPreserve = Collections.emptyList();
+
+        // VC info
+        final Map<String, Object> attributes = subsetAttributes(vc.getCommonInfo(), keysToPreserve);
+
+        // Genotypes
+        final GenotypesContext genotypes = GenotypesContext.create(vc.getNSamples());
+        for ( final Genotype g : vc.getGenotypes() ) {
+            final GenotypeBuilder gb = new GenotypeBuilder(g);
+            // remove AD, DP, PL, and all extended attributes, keeping just GT and GQ
+            gb.noAD().noDP().noPL().noAttributes();
+            genotypes.add(gb.make());
+        }
+
+        return builder.genotypes(genotypes).attributes(attributes);
+    }
+
+    public static boolean allelesAreSubset(VariantContext vc1, VariantContext vc2) {
+        // if all alleles of vc1 are a contained in alleles of vc2, return true
+        if (!vc1.getReference().equals(vc2.getReference()))
+            return false;
+
+        for (final Allele a :vc1.getAlternateAlleles()) {
+            if (!vc2.getAlternateAlleles().contains(a))
+                return false;
+        }
+
+        return true;
+    }
+
+    public static Map<VariantContext.Type, List<VariantContext>> separateVariantContextsByType( final Collection<VariantContext> VCs ) {
+        if( VCs == null ) { throw new IllegalArgumentException("VCs cannot be null."); }
+
+        final HashMap<VariantContext.Type, List<VariantContext>> mappedVCs = new HashMap<>();
+        for ( final VariantContext vc : VCs ) {
+            VariantContext.Type vcType = vc.getType();
+
+            // look at previous variant contexts of different type. If:
+            // a) otherVC has alleles which are subset of vc, remove otherVC from its list and add otherVC to vc's list
+            // b) vc has alleles which are subset of otherVC. Then, add vc to otherVC's type list (rather, do nothing since vc will be added automatically to its list)
+            // c) neither: do nothing, just add vc to its own list
+            boolean addtoOwnList = true;
+            for (final VariantContext.Type type : VariantContext.Type.values()) {
+                if (type.equals(vcType))
+                    continue;
+
+                if (!mappedVCs.containsKey(type))
+                    continue;
+
+                List<VariantContext> vcList = mappedVCs.get(type);
+                for (int k=0; k <  vcList.size(); k++) {
+                    VariantContext otherVC = vcList.get(k);
+                    if (allelesAreSubset(otherVC,vc)) {
+                        // otherVC has a type different than vc and its alleles are a subset of vc: remove otherVC from its list and add it to vc's type list
+                        vcList.remove(k);
+                        // avoid having empty lists
+                        if (vcList.isEmpty())
+                            mappedVCs.remove(type);
+                        if ( !mappedVCs.containsKey(vcType) )
+                            mappedVCs.put(vcType, new ArrayList<VariantContext>());
+                        mappedVCs.get(vcType).add(otherVC);
+                        break;
+                    }
+                    else if (allelesAreSubset(vc,otherVC)) {
+                        // vc has a type different than otherVC and its alleles are a subset of VC: add vc to otherVC's type list and don't add to its own
+                        mappedVCs.get(type).add(vc);
+                        addtoOwnList = false;
+                        break;
+                    }
+                }
+            }
+            if (addtoOwnList) {
+                if ( !mappedVCs.containsKey(vcType) )
+                    mappedVCs.put(vcType, new ArrayList<VariantContext>());
+                mappedVCs.get(vcType).add(vc);
+            }
+        }
+
+        return mappedVCs;
+    }
+
+    public static VariantContext purgeUnallowedGenotypeAttributes(VariantContext vc, Set<String> allowedAttributes) {
+        if ( allowedAttributes == null )
+            return vc;
+
+        final GenotypesContext newGenotypes = GenotypesContext.create(vc.getNSamples());
+        for ( final Genotype genotype : vc.getGenotypes() ) {
+            final Map<String, Object> attrs = new HashMap<>();
+            for ( final Map.Entry<String, Object> attr : genotype.getExtendedAttributes().entrySet() ) {
+                if ( allowedAttributes.contains(attr.getKey()) )
+                    attrs.put(attr.getKey(), attr.getValue());
+            }
+            newGenotypes.add(new GenotypeBuilder(genotype).attributes(attrs).make());
+        }
+
+        return new VariantContextBuilder(vc).genotypes(newGenotypes).make();
+    }
+
+    protected static class AlleleMapper {
+        private VariantContext vc = null;
+        private Map<Allele, Allele> map = null;
+        public AlleleMapper(VariantContext vc)          { this.vc = vc; }
+        public AlleleMapper(Map<Allele, Allele> map)    { this.map = map; }
+        public boolean needsRemapping()                 { return this.map != null; }
+        public Collection<Allele> values()              { return map != null ? map.values() : vc.getAlleles(); }
+        public Allele remap(Allele a)                   { return map != null && map.containsKey(a) ? map.get(a) : a; }
+
+        public List<Allele> remap(List<Allele> as) {
+            List<Allele> newAs = new ArrayList<>();
+            for ( final Allele a : as ) {
+                //System.out.printf("  Remapping %s => %s%n", a, remap(a));
+                newAs.add(remap(a));
+            }
+            return newAs;
+        }
+
+        /**
+         * @return the list of unique values
+         */
+        public List<Allele> getUniqueMappedAlleles() {
+            if ( map == null )
+                return Collections.emptyList();
+            return new ArrayList<>(new HashSet<>(map.values()));
+        }
+    }
+
+    private static class CompareByPriority implements Comparator<VariantContext>, Serializable {
+        List<String> priorityListOfVCs;
+        public CompareByPriority(List<String> priorityListOfVCs) {
+            this.priorityListOfVCs = priorityListOfVCs;
+        }
+
+        private int getIndex(VariantContext vc) {
+            int i = priorityListOfVCs.indexOf(vc.getSource());
+            if ( i == -1 ) throw new IllegalArgumentException("Priority list " + priorityListOfVCs + " doesn't contain variant context " + vc.getSource());
+            return i;
+        }
+
+        public int compare(VariantContext vc1, VariantContext vc2) {
+            return Integer.valueOf(getIndex(vc1)).compareTo(getIndex(vc2));
+        }
+    }
+
+    /**
+     * For testing purposes only.  Create a site-only VariantContext at contig:start containing alleles
+     *
+     * @param name the name of the VC
+     * @param contig the contig for the VC
+     * @param start the start of the VC
+     * @param alleleStrings a non-null, non-empty list of strings for the alleles.  The first will be the ref allele, and others the
+     *                      alt.  Will compute the stop of the VC from the length of the reference allele
+     * @return a non-null VariantContext
+     */
+    public static VariantContext makeFromAlleles(final String name, final String contig, final int start, final List<String> alleleStrings) {
+        if ( alleleStrings == null || alleleStrings.isEmpty() )
+            throw new IllegalArgumentException("alleleStrings must be non-empty, non-null list");
+
+        final List<Allele> alleles = new LinkedList<>();
+        final int length = alleleStrings.get(0).length();
+
+        boolean first = true;
+        for ( final String alleleString : alleleStrings ) {
+            alleles.add(Allele.create(alleleString, first));
+            first = false;
+        }
+      return new VariantContextBuilder(name, contig, start, start+length-1, alleles).make();
+    }
+
+    /**
+     * Splits the alleles for the provided variant context into its primitive parts.
+     * Requires that the input VC be bi-allelic, so calling methods should first call splitVariantContextToBiallelics() if needed.
+     * Currently works only for MNPs.
+     *
+     * @param vc  the non-null VC to split
+     * @return a non-empty list of VCs split into primitive parts or the original VC otherwise
+     */
+    public static List<VariantContext> splitIntoPrimitiveAlleles(final VariantContext vc) {
+        if ( vc == null )
+            throw new IllegalArgumentException("Trying to break a null Variant Context into primitive parts");
+
+        if ( !vc.isBiallelic() )
+            throw new IllegalArgumentException("Trying to break a multi-allelic Variant Context into primitive parts");
+
+        // currently only works for MNPs
+        if ( !vc.isMNP() )
+            return Arrays.asList(vc);
+
+        final byte[] ref = vc.getReference().getBases();
+        final byte[] alt = vc.getAlternateAllele(0).getBases();
+
+        if ( ref.length != alt.length )
+            throw new IllegalStateException("ref and alt alleles for MNP have different lengths");
+
+        final List<VariantContext> result = new ArrayList<>(ref.length);
+
+        for ( int i = 0; i < ref.length; i++ ) {
+
+            // if the ref and alt bases are different at a given position, create a new SNP record (otherwise do nothing)
+            if ( ref[i] != alt[i] ) {
+
+                // create the ref and alt SNP alleles
+                final Allele newRefAllele = Allele.create(ref[i], true);
+                final Allele newAltAllele = Allele.create(alt[i], false);
+
+                // create a new VariantContext with the new SNP alleles
+                final VariantContextBuilder newVC = new VariantContextBuilder(vc).start(vc.getStart() + i).stop(vc.getStart() + i).alleles(Arrays.asList(newRefAllele, newAltAllele));
+
+                // create new genotypes with updated alleles
+                final Map<Allele, Allele> alleleMap = new HashMap<>();
+                alleleMap.put(vc.getReference(), newRefAllele);
+                alleleMap.put(vc.getAlternateAllele(0), newAltAllele);
+                final GenotypesContext newGenotypes = updateGenotypesWithMappedAlleles(vc.getGenotypes(), new AlleleMapper(alleleMap));
+
+                result.add(newVC.genotypes(newGenotypes).make());
+            }
+        }
+
+        if ( result.isEmpty() )
+            result.add(vc);
+
+        return result;
+    }
+
+    /**
+     * Are vc1 and 2 equal including their position and alleles?
+     * @param vc1 non-null VariantContext
+     * @param vc2 non-null VariantContext
+     * @return true if vc1 and vc2 are equal, false otherwise
+     */
+    public static boolean equalSites(final VariantContext vc1, final VariantContext vc2) {
+        if ( vc1 == null ) throw new IllegalArgumentException("vc1 cannot be null");
+        if ( vc2 == null ) throw new IllegalArgumentException("vc2 cannot be null");
+
+        if ( vc1.getStart() != vc2.getStart() ) return false;
+        if ( vc1.getEnd() != vc2.getEnd() ) return false;
+        if ( ! vc1.getChr().equals(vc2.getChr())) return false;
+        if ( ! vc1.getAlleles().equals(vc2.getAlleles()) ) return false;
+        return true;
+    }
+
+    /**
+     * Returns the absolute 0-based index of an allele.
+     *
+     * <p/>
+     * If the allele is equal to the reference, the result is 0, if it equal to the first alternative the result is 1
+     * and so forth.
+     * <p/>
+     * Therefore if you want the 0-based index within the alternative alleles you need to do the following:
+     *
+     * <p/>
+     * You can indicate whether the Java object reference comparator {@code ==} can be safelly used by setting {@code useEquals} to {@code false}.
+     *
+     * @param vc the target variant context.
+     * @param allele the target allele.
+     * @param ignoreRefState whether the reference states of the allele is important at all. Has no effect if {@code useEquals} is {@code false}.
+     * @param considerRefAllele whether the reference allele should be considered. You should set it to {@code false} if you are only interested in alternative alleles.
+     * @param useEquals whether equal method should be used in the search: {@link Allele#equals(Allele,boolean)}.
+     *
+     * @throws IllegalArgumentException if {@code allele} is {@code null}.
+     * @return {@code -1} if there is no such allele that satify those criteria, a value between 0 and {@link VariantContext#getNAlleles()} {@code -1} otherwise.
+     */
+    public static int indexOfAllele(final VariantContext vc, final Allele allele, final boolean ignoreRefState, final boolean considerRefAllele, final boolean useEquals) {
+        if (allele == null) throw new IllegalArgumentException();
+        return useEquals ? indexOfEqualAllele(vc,allele,ignoreRefState,considerRefAllele) : indexOfSameAllele(vc,allele,considerRefAllele);
+    }
+
+    /**
+     * Returns the relative 0-based index of an alternative allele.
+     * <p/>
+     * The the query allele is the same as the first alternative allele, the result is 0,
+     * if it is equal to the second 1 and so forth.
+     *
+     *
+     * <p/>
+     * Notice that the ref-status of the query {@code allele} is ignored.
+     *
+     * @param vc the target variant context.
+     * @param allele the query allele.
+     * @param useEquals  whether equal method should be used in the search: {@link Allele#equals(Allele,boolean)}.
+     *
+     * @throws IllegalArgumentException if {@code allele} is {@code null}.
+     *
+     * @return {@code -1} if there is no such allele that satify those criteria, a value between 0 and the number
+     *  of alternative alleles - 1.
+     */
+    public static int indexOfAltAllele(final VariantContext vc, final Allele allele, final boolean useEquals) {
+        final int absoluteIndex = indexOfAllele(vc,allele,true,false,useEquals);
+        return absoluteIndex == -1 ? -1 : absoluteIndex - 1;
+    }
+
+    // Impements index search using equals.
+    private static int indexOfEqualAllele(final VariantContext vc, final Allele allele, final boolean ignoreRefState,
+                                          final boolean considerRefAllele) {
+        int i = 0;
+        for (final Allele a : vc.getAlleles())
+            if (a.equals(allele,ignoreRefState))
+                return i == 0 ? (considerRefAllele ? 0 : -1) : i;
+            else
+                i++;
+        return -1;
+    }
+
+    // Implements index search using ==.
+    private static int indexOfSameAllele(final VariantContext vc, final Allele allele, final boolean considerRefAllele) {
+        int i = 0;
+
+        for (final Allele a : vc.getAlleles())
+            if (a == allele)
+                return i == 0 ? (considerRefAllele ? 0 : -1) : i;
+            else
+                i++;
+
+        return -1;
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/variant/HomoSapiensConstants.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/variant/HomoSapiensConstants.java
new file mode 100644
index 0000000..a8695fe
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/variant/HomoSapiensConstants.java
@@ -0,0 +1,51 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.variant;
+
+/**
+ * <i>Homo sapiens</i> genome constants.
+ *
+ * <p>NOTE: reference to these constants is an indication that your code is (human) species assumption dependant.</p>
+ *
+ * @author Valentin Ruano-Rubio <valentin at broadinstitute.org>
+ */
+public class HomoSapiensConstants {
+
+    /**
+     * Standard heterozygous rate for SNP variation.
+     */
+    public static final double SNP_HETEROZYGOSITY = 1e-3;
+
+    /**
+     * Standard heterozygous rate for INDEL variation.
+     */
+    public static final double INDEL_HETEROZYGOSITY = 1.0/8000;
+
+    /**
+     * Standard ploidy for autosomal chromosomes.
+     */
+    public static final int DEFAULT_PLOIDY = 2;
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/variant/VCIterable.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/variant/VCIterable.java
new file mode 100644
index 0000000..85f2ba3
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/variant/VCIterable.java
@@ -0,0 +1,92 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.variant;
+
+import htsjdk.tribble.FeatureCodec;
+import htsjdk.tribble.FeatureCodecHeader;
+import htsjdk.variant.variantcontext.VariantContext;
+import htsjdk.variant.vcf.VCFHeader;
+import org.broadinstitute.gatk.utils.collections.Pair;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.util.Iterator;
+
+/*
+* NOTE: Refactored out of GATKVCFUtils
+*/
+public class VCIterable<SOURCE> implements Iterable<VariantContext>, Iterator<VariantContext> {
+    final SOURCE source;
+    final FeatureCodec<VariantContext, SOURCE> codec;
+    final VCFHeader header;
+
+    VCIterable(final SOURCE source, final FeatureCodec<VariantContext, SOURCE> codec, final VCFHeader header) {
+        this.source = source;
+        this.codec = codec;
+        this.header = header;
+    }
+
+    /**
+     * Utility class to read all of the VC records from a file
+     *
+     * @param file
+     * @param codec
+     * @return
+     * @throws java.io.IOException
+     */
+    public final static <SOURCE> Pair<VCFHeader, VCIterable<SOURCE>> readAllVCs( final File file, final FeatureCodec<VariantContext, SOURCE> codec) throws IOException {
+        // read in the features
+        SOURCE source = codec.makeSourceFromStream(new FileInputStream(file));
+        FeatureCodecHeader header = codec.readHeader(source);
+        final VCFHeader vcfHeader = (VCFHeader)header.getHeaderValue();
+        return new Pair<>(vcfHeader, new VCIterable<>(source, codec, vcfHeader));
+    }
+
+    @Override
+    public Iterator<VariantContext> iterator() {
+        return this;
+    }
+
+    @Override
+    public boolean hasNext() {
+        return ! codec.isDone(source);
+    }
+
+    @Override
+    public VariantContext next() {
+        try {
+            final VariantContext vc = codec.decode(source);
+            return vc == null ? null : vc.fullyDecode(header, false);
+        } catch ( IOException e ) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    @Override
+    public void remove() {
+    }
+}
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/wiggle/WiggleHeader.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/wiggle/WiggleHeader.java
new file mode 100644
index 0000000..4368ffd
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/wiggle/WiggleHeader.java
@@ -0,0 +1,56 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.wiggle;
+
+/**
+ * A class for defining the header values for a wiggle graph file (see UCSC). The optional fields are:
+ * name, description, visibility, color, altColor, priority, autoscale, alwaysZero, gridDefault,
+ * maxHeightPixels,graphType,viewLimits,yLineMark,yLineOnOff,windowingFunction,smoothingWindow
+ *
+ * For now only support name, description
+ *
+ * @Author chartl
+ * @Date Jul 21, 2010
+ */
+public class WiggleHeader {
+    static String type = "wiggle_0";
+    // defines the type of the track (for IGV or UCSC), wiggle_0 is the 'only' type of wiggle
+    private String name;
+    // a label for the track
+    private String description;
+    // a description of what the track is
+
+    public WiggleHeader(String name, String description) {
+        this.name = name;
+        this.description = description;
+    }
+
+    public String toString() {
+        return String.format("track type=%s name=\"%s\" description=\"%s\"",type,name,description);
+    }
+
+}
+
diff --git a/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/wiggle/WiggleWriter.java b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/wiggle/WiggleWriter.java
new file mode 100644
index 0000000..c7071f1
--- /dev/null
+++ b/public/gatk-utils/src/main/java/org/broadinstitute/gatk/utils/wiggle/WiggleWriter.java
@@ -0,0 +1,117 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.wiggle;
+
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+
+import java.io.*;
+
+/**
+ * Manages the output of wiggle files. Due to the wiggle spec (each wiggle file must be one chromosome), this writer
+ * will throw exceptions (or output multiple files?)
+ *
+ * todo -- currently no support for fixed step (special case of variable step)
+ * todo -- currently no support for span, start, or step
+ *
+ * @Author chartl
+ * @Date Jul 21, 2010
+ */
+public class WiggleWriter {
+
+    enum StepType {
+        fixed("fixedStep"),variable("variableStep");
+
+        String repr;
+
+        StepType(String repr) {
+            this.repr = repr;
+        }
+
+        public String toString() {
+            return repr;
+        }
+    }
+
+    private WiggleHeader wHeader = null;
+    // the header that we need to write prior to the file; and on future files (if multiple outputs ??)
+    private BufferedWriter wWriter = null;
+    // the file to which we are writing
+    private GenomeLoc firstLoc = null;
+    // the first genome loc the writer saw; need to cache this to compare contigs to preserve spec
+    private StepType type = StepType.variable;
+    // the type of step for the wiggle file, todo -- allow this to change
+
+    private String myFile = "unknown";
+
+    public WiggleWriter(File outputFile) {
+        myFile = outputFile.getAbsolutePath();
+        FileOutputStream outputStream;
+        try {
+            outputStream = new FileOutputStream(outputFile);
+        } catch ( FileNotFoundException e ) {
+            throw new UserException.CouldNotCreateOutputFile(outputFile, "Unable to create a wiggle file ", e);
+        }
+
+        wWriter = new BufferedWriter(new OutputStreamWriter(outputStream));
+    }
+
+    public WiggleWriter(OutputStream out) {
+       wWriter = new BufferedWriter(new OutputStreamWriter(out)); 
+    }
+
+    public void writeHeader(WiggleHeader header) {
+        wHeader = header;
+        write(wWriter,header.toString());
+    }
+
+    public void writeData(GenomeLoc loc, Object dataPoint) {
+        if ( this.firstLoc == null ) {
+            firstLoc = loc;
+            write(wWriter,String.format("%n"));
+            write(wWriter,String.format("%s\tchrom=%s",type.toString(),firstLoc.getContig()));
+            write(wWriter,String.format("%n"));
+            write(wWriter,String.format("%d\t%s",loc.getStart(),dataPoint.toString()));
+        } else if ( loc.compareContigs(firstLoc) == 0 ) {
+            write(wWriter,String.format("%n"));
+            write(wWriter,String.format("%d\t%s",loc.getStart(),dataPoint.toString()));
+        } else {
+            // todo -- maybe allow this to open a new file for the new chromosome?
+            throw new ReviewedGATKException("Attempting to write multiple contigs into wiggle file, first contig was "+firstLoc.getContig()+" most recent "+loc.getContig());
+        }
+    }
+
+    private void write(BufferedWriter w, String s) {
+        try {
+            w.write(s);
+            w.flush();
+            // flush required so writing to output stream will work
+        } catch (IOException e) {
+            throw new UserException.CouldNotCreateOutputFile(myFile, String.format("Error writing the wiggle line %s", s), e);
+        }
+    }
+}
diff --git a/public/gatk-utils/src/test/java/htsjdk/samtools/GATKBAMFileSpanUnitTest.java b/public/gatk-utils/src/test/java/htsjdk/samtools/GATKBAMFileSpanUnitTest.java
new file mode 100644
index 0000000..f0a1c9c
--- /dev/null
+++ b/public/gatk-utils/src/test/java/htsjdk/samtools/GATKBAMFileSpanUnitTest.java
@@ -0,0 +1,254 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package htsjdk.samtools;
+
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+/**
+ * Tests of functionality of union, intersection operators.
+ */
+public class GATKBAMFileSpanUnitTest {
+    @Test
+    public void testUnionOfEmptyFileSpans() {
+        GATKBAMFileSpan empty1 = new GATKBAMFileSpan();
+        GATKBAMFileSpan empty2 = new GATKBAMFileSpan();
+        GATKBAMFileSpan union = empty1.union(empty2);
+        Assert.assertEquals(union.getGATKChunks().size(),0,"Elements inserted in union of two empty sets");
+    }
+
+    @Test
+    public void testUnionOfNonOverlappingFileSpans() {
+        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk(0,65535));
+        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(1<<16,(1<<16)|65535));
+        GATKBAMFileSpan union = regionOne.union(regionTwo);
+        Assert.assertEquals(union.getGATKChunks().size(),2,"Discontiguous elements were merged");
+        Assert.assertEquals(union.getGATKChunks().get(0),regionOne.getGATKChunks().get(0),"Wrong chunk was first in list");
+        Assert.assertEquals(union.getGATKChunks().get(1),regionTwo.getGATKChunks().get(0),"Wrong chunk was second in list");
+    }
+
+    @Test
+    public void testUnionOfContiguousFileSpans() {
+        // Region 1 ends at position adjacent to Region 2 start:
+        // |---1----|---2----|
+
+        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk(0,1<<16));
+        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(1<<16,(1<<16)|65535));
+        GATKBAMFileSpan union = regionOne.union(regionTwo);
+        Assert.assertEquals(union.getGATKChunks().size(),1,"Elements to be merged were not.");
+        Assert.assertEquals(union.getGATKChunks().get(0),new GATKChunk(0,(1<<16)|65535));
+    }
+
+    @Test
+    public void testUnionOfFileSpansFirstRegionEndsWithinSecondRegion() {
+        // Region 1 ends within Region 2:
+        //        |---2----|
+        // |---1----|
+
+        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk(0,(1<<16)|32767));
+        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(1<<16,(1<<16)|65535));
+        GATKBAMFileSpan union = regionOne.union(regionTwo);
+        Assert.assertEquals(union.getGATKChunks().size(),1,"Elements to be merged were not.");
+        Assert.assertEquals(union.getGATKChunks().get(0),new GATKChunk(0,(1<<16)|65535));
+    }
+
+    @Test
+    public void testUnionOfFileSpansFirstRegionEndsAtSecondRegionEnd() {
+        // Region 1 ends at Region 2 end:
+        //        |---2----|
+        // |---1-----------|
+
+        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk(0,(1<<16)|65535));
+        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(1<<16,(1<<16)|65535));
+        GATKBAMFileSpan union = regionOne.union(regionTwo);
+        Assert.assertEquals(union.getGATKChunks().size(),1,"Elements to be merged were not.");
+        Assert.assertEquals(union.getGATKChunks().get(0),new GATKChunk(0,(1<<16)|65535));
+    }
+
+    @Test
+    public void testUnionOfFileSpansFirstRegionEndsAfterSecondRegionEnd() {
+        // Region 1 ends after Region 2 end:
+        //        |---2----|
+        // |---1---------------|
+
+        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk(0,(1<<16)|65535));
+        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(1<<16,(1<<16)|32767));
+        GATKBAMFileSpan union = regionOne.union(regionTwo);
+        Assert.assertEquals(union.getGATKChunks().size(),1,"Elements to be merged were not.");
+        Assert.assertEquals(union.getGATKChunks().get(0),new GATKChunk(0,(1<<16)|65535));
+    }
+
+    @Test
+    public void testUnionOfFileSpansFirstRegionStartsAtSecondRegionStart() {
+        // Region 1 starts at Region 2 start, but ends before Region 2:
+        // |---2--------|
+        // |---1----|
+
+        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk(1<<16,(1<<16)|32767));
+        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(1<<16,(1<<16)|65535));
+        GATKBAMFileSpan union = regionOne.union(regionTwo);
+        Assert.assertEquals(union.getGATKChunks().size(),1,"Elements to be merged were not.");
+        Assert.assertEquals(union.getGATKChunks().get(0),new GATKChunk(1<<16,(1<<16)|65535));
+    }
+
+    @Test
+    public void testUnionOfFileSpansFirstRegionEqualToSecondRegion() {
+        // Region 1 and Region 2 represent the same region:
+        // |---2----|
+        // |---1----|
+
+        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk(1<<16,(1<<16)|65535));
+        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(1<<16,(1<<16)|65535));
+        GATKBAMFileSpan union = regionOne.union(regionTwo);
+        Assert.assertEquals(union.getGATKChunks().size(),1,"Elements to be merged were not.");
+        Assert.assertEquals(union.getGATKChunks().get(0),new GATKChunk(1<<16,(1<<16)|65535));
+    }
+
+    @Test
+    public void testUnionOfStringOfFileSpans() {
+        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk[] { new GATKChunk(0,1<<16), new GATKChunk(2<<16,3<<16) });
+        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(1<<16,2<<16));
+        GATKBAMFileSpan union = regionOne.union(regionTwo);
+        Assert.assertEquals(union.getGATKChunks().size(),1,"Elements to be merged were not.");
+        Assert.assertEquals(union.getGATKChunks().get(0),new GATKChunk(0,3<<16));
+    }
+
+    @Test
+    public void testUnionAllFileSpansAdded() {
+        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk[] { new GATKChunk(0,1<<16), new GATKChunk(2<<16,3<<16), new GATKChunk(20<<16,21<<16) });
+        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(1<<16,2<<16));
+        GATKBAMFileSpan union = regionOne.union(regionTwo);
+        Assert.assertEquals(union.getGATKChunks().size(),2,"Elements to be merged were not.");
+        Assert.assertEquals(union.getGATKChunks().get(0),new GATKChunk(0,3<<16));
+        Assert.assertEquals(union.getGATKChunks().get(1),new GATKChunk(20<<16,21<<16));
+    }
+
+    @Test
+    public void testIntersectionOfEmptyFileSpans() {
+        GATKBAMFileSpan empty1 = new GATKBAMFileSpan();
+        GATKBAMFileSpan empty2 = new GATKBAMFileSpan();
+        GATKBAMFileSpan intersection = empty1.intersection(empty2);
+        Assert.assertEquals(intersection.getGATKChunks().size(),0,"Elements inserted in intersection of two empty sets");
+    }
+
+    @Test
+    public void testIntersectionOfNonOverlappingFileSpans() {
+        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk(0,1<<16));
+        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(1<<16,2<<16));
+        GATKBAMFileSpan intersection = regionOne.intersection(regionTwo);
+        Assert.assertEquals(intersection.getGATKChunks().size(),0,"Elements inserted in intersection of two non-intersecting filespans");
+    }
+
+    @Test
+    public void testIntersectionOfSmallOverlapInFileSpans() {
+        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk(0,1<<16));
+        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(65535,2<<16));
+        GATKBAMFileSpan intersection = regionOne.intersection(regionTwo);
+        Assert.assertEquals(intersection.getGATKChunks().size(),1,"No intersection found between two partially overlapping filespans");
+        Assert.assertEquals(intersection.getGATKChunks().get(0),new GATKChunk(65535,1<<16),"Determined intersection is incorrect.");
+    }
+
+    @Test
+    public void testIntersectionOfStrictSubset() {
+        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk(0,1<<16));
+        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(0,2<<16));
+        GATKBAMFileSpan intersection = regionOne.intersection(regionTwo);
+        Assert.assertEquals(intersection.getGATKChunks().size(),1,"No intersection found between two partially overlapping filespans");
+        Assert.assertEquals(intersection.getGATKChunks().get(0),new GATKChunk(0<<16,1<<16),"Determined intersection is incorrect.");
+
+        // Make sure intersection is symmetric
+        intersection = regionTwo.intersection(regionOne);
+        Assert.assertEquals(intersection.getGATKChunks().size(),1,"No intersection found between two partially overlapping filespans");
+        Assert.assertEquals(intersection.getGATKChunks().get(0),new GATKChunk(0<<16,1<<16),"Determined intersection is incorrect.");
+    }
+
+    @Test
+    public void testIntersectionOfPartialOverlap() {
+        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk(0,2<<16));
+        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(0<<16|32768,1<<16|32768));
+        GATKBAMFileSpan intersection = regionOne.intersection(regionTwo);
+        Assert.assertEquals(intersection.getGATKChunks().size(),1,"No intersection found between two partially overlapping filespans");
+        Assert.assertEquals(intersection.getGATKChunks().get(0),new GATKChunk(0<<16|32768,1<<16|32768),"Determined intersection is incorrect.");
+    }
+
+    @Test
+    public void testIntersectionOfChunkLists() {
+        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk(0,5<<16));
+        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk[] { new GATKChunk(1<<16,2<<16), new GATKChunk(3<<16,4<<16) });
+        GATKBAMFileSpan intersection = regionOne.intersection(regionTwo);
+        Assert.assertEquals(intersection.getGATKChunks().size(),2,"Wrong number of intersections found.");
+        Assert.assertEquals(intersection.getGATKChunks().get(0),new GATKChunk(1<<16,2<<16),"Determined intersection is incorrect.");
+        Assert.assertEquals(intersection.getGATKChunks().get(1),new GATKChunk(3<<16,4<<16),"Determined intersection is incorrect.");
+
+        // Make sure intersection is symmetric
+        intersection = regionTwo.intersection(regionOne);
+        Assert.assertEquals(intersection.getGATKChunks().size(),2,"Wrong number of intersections found.");
+        Assert.assertEquals(intersection.getGATKChunks().get(0),new GATKChunk(1<<16,2<<16),"Determined intersection is incorrect.");
+        Assert.assertEquals(intersection.getGATKChunks().get(1),new GATKChunk(3<<16,4<<16),"Determined intersection is incorrect.");
+    }
+
+    @Test
+    public void testSubtractionOfEmptyChunkLists() {
+        GATKBAMFileSpan regionOne = new GATKBAMFileSpan();
+        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan();
+        GATKBAMFileSpan subtraction = regionOne.minus(regionTwo);
+        Assert.assertEquals(subtraction.getGATKChunks().size(),0,"Elements inserted in subtraction of two empty sets");
+    }
+
+    @Test
+    public void testSingleIntervalSubtractedAway() {
+        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk(0,1<<16));
+        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(0,1<<16));
+        GATKBAMFileSpan subtraction = regionOne.minus(regionTwo);
+        Assert.assertEquals(subtraction.getGATKChunks().size(),0,"Elements inserted in complete subtraction of region");
+    }
+
+    @Test
+    public void testMultipleIntervalsSubtractedAway() {
+        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk[] { new GATKChunk(0,1<<16), new GATKChunk(2<<16,3<<16) });
+        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk[] { new GATKChunk(0,1<<16), new GATKChunk(2<<16,3<<16) });
+        GATKBAMFileSpan subtraction = regionOne.minus(regionTwo);
+        Assert.assertEquals(subtraction.getGATKChunks().size(),0,"Elements inserted in complete subtraction of region");
+    }
+
+    @Test
+    public void testSubtractionOfStrictSubset() {
+        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk(0,2<<16));
+        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(0,1<<16));
+        GATKBAMFileSpan subtraction = regionOne.minus(regionTwo);
+        Assert.assertEquals(subtraction.getGATKChunks().size(),1,"Incorrect size in strict subset subtraction of region");
+        Assert.assertEquals(subtraction.getGATKChunks().get(0),new GATKChunk(1<<16,2<<16),"Determined subtraction is incorrect.");
+    }
+
+    @Test
+    public void testSubtractionOfPartialOverlap() {
+        GATKBAMFileSpan regionOne = new GATKBAMFileSpan(new GATKChunk(0,2<<16));
+        GATKBAMFileSpan regionTwo = new GATKBAMFileSpan(new GATKChunk(1<<16,3<<16));
+        GATKBAMFileSpan subtraction = regionOne.minus(regionTwo);
+        Assert.assertEquals(subtraction.getGATKChunks().size(),1,"Incorrect size in partial subset subtraction of region");
+        Assert.assertEquals(subtraction.getGATKChunks().get(0),new GATKChunk(0<<16,1<<16),"Determined subtraction is incorrect.");
+    }
+}
diff --git a/public/gatk-utils/src/test/java/htsjdk/samtools/GATKChunkUnitTest.java b/public/gatk-utils/src/test/java/htsjdk/samtools/GATKChunkUnitTest.java
new file mode 100644
index 0000000..234bbcf
--- /dev/null
+++ b/public/gatk-utils/src/test/java/htsjdk/samtools/GATKChunkUnitTest.java
@@ -0,0 +1,71 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package htsjdk.samtools;
+
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+/**
+ * Test basic functionality of the GATK chunk, giving informative size capabilities, etc.
+ */
+public class GATKChunkUnitTest {
+    private static final int FULL_BLOCK_COMPRESSED_SIZE = 25559;
+    private static final int FULL_BLOCK_UNCOMPRESSED_SIZE = 65536;
+    private static final int HALF_BLOCK_UNCOMPRESSED_SIZE = FULL_BLOCK_UNCOMPRESSED_SIZE/2;
+
+    @Test
+    public void testSizeOfEmptyChunk() {
+        GATKChunk chunk = new GATKChunk(0,0);
+        Assert.assertEquals(chunk.size(),0,"Empty chunk's size is not equal to 0.");
+    }
+
+    @Test
+    public void testSizeOfChunkWithinSingleBlock() {
+        GATKChunk chunk = new GATKChunk(0,FULL_BLOCK_UNCOMPRESSED_SIZE-1);
+        Assert.assertEquals(chunk.size(),FULL_BLOCK_UNCOMPRESSED_SIZE-1,"Chunk spanning limits of block is returning wrong size.");
+
+        chunk = new GATKChunk(0,HALF_BLOCK_UNCOMPRESSED_SIZE);
+        Assert.assertEquals(chunk.size(),HALF_BLOCK_UNCOMPRESSED_SIZE,"Chunk spanning 1/2 block is returning the wrong size.");
+    }
+
+    @Test
+    public void testSizeOfSingleBlock() {
+        GATKChunk chunk = new GATKChunk(0,FULL_BLOCK_COMPRESSED_SIZE<<16);
+        Assert.assertEquals(chunk.size(),FULL_BLOCK_UNCOMPRESSED_SIZE,"Chunk spanning complete block returns incorrect size.");
+    }
+
+    @Test
+    public void testSizeOfBlockAndAHalf() {
+        GATKChunk chunk = new GATKChunk(0,(FULL_BLOCK_COMPRESSED_SIZE<<16)+HALF_BLOCK_UNCOMPRESSED_SIZE);
+        Assert.assertEquals(chunk.size(),FULL_BLOCK_UNCOMPRESSED_SIZE+HALF_BLOCK_UNCOMPRESSED_SIZE,"Chunk spanning 1.5 blocks returns incorrect size.");
+    }
+
+    @Test
+    public void testSizeOfHalfBlock() {
+        GATKChunk chunk = new GATKChunk(HALF_BLOCK_UNCOMPRESSED_SIZE,FULL_BLOCK_COMPRESSED_SIZE<<16);
+        Assert.assertEquals(chunk.size(),HALF_BLOCK_UNCOMPRESSED_SIZE,"Chunk spanning 0.5 blocks returns incorrect size.");
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/AutoFormattingTimeUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/AutoFormattingTimeUnitTest.java
new file mode 100644
index 0000000..fb95d67
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/AutoFormattingTimeUnitTest.java
@@ -0,0 +1,118 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.AutoFormattingTime;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * UnitTests for the AutoFormatting
+ *
+ * User: depristo
+ * Date: 8/24/12
+ * Time: 11:25 AM
+ * To change this template use File | Settings | File Templates.
+ */
+public class AutoFormattingTimeUnitTest extends BaseTest {
+    @DataProvider(name = "AutoFormattingTimeUnitSelection")
+    public Object[][] makeTimeData() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+        tests.add(new Object[]{TimeUnit.SECONDS.toNanos(10), "s"});
+        tests.add(new Object[]{TimeUnit.MINUTES.toNanos(10), "m"});
+        tests.add(new Object[]{TimeUnit.HOURS.toNanos(10), "h"});
+        tests.add(new Object[]{TimeUnit.DAYS.toNanos(10), "d"});
+        tests.add(new Object[]{TimeUnit.DAYS.toNanos(1000), "w"});
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "AutoFormattingTimeUnitSelection")
+    public void testUnitSelection(final long nano, final String expectedUnit) throws InterruptedException {
+        final AutoFormattingTime time = new AutoFormattingTime(nano);
+        testBasic(time, nano, time.getWidth(), time.getPrecision());
+        Assert.assertTrue(time.toString().endsWith(expectedUnit), "TimeUnit " + time.toString() + " didn't contain expected time unit " + expectedUnit);
+    }
+
+    @Test(dataProvider = "AutoFormattingTimeUnitSelection")
+    public void testSecondsAsDouble(final long nano, final String expectedUnit) throws InterruptedException {
+        final double inSeconds = nano * 1e-9;
+        final long nanoFromSeconds = (long)(inSeconds * 1e9);
+        final AutoFormattingTime time = new AutoFormattingTime(inSeconds);
+        testBasic(time, nanoFromSeconds, time.getWidth(), time.getPrecision());
+    }
+
+    @DataProvider(name = "AutoFormattingTimeWidthAndPrecision")
+    public Object[][] makeTimeWidthAndPrecision() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+        for ( final int width : Arrays.asList(-1, 1, 2, 6, 20) ) {
+            for ( final int precision : Arrays.asList(1, 2) ) {
+                tests.add(new Object[]{100.123456 * 1e9, width, precision});
+                tests.add(new Object[]{0.123456 * 1e9, width, precision});
+            }
+        }
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "AutoFormattingTimeWidthAndPrecision")
+    public void testWidthAndPrecision(final double inSeconds, final int width, final int precision) throws InterruptedException {
+        final AutoFormattingTime time = new AutoFormattingTime(inSeconds, width, precision);
+        final long nanoFromSeconds = (long)(inSeconds * 1e9);
+        testBasic(time, nanoFromSeconds, width, precision);
+        final Matcher match = matchToString(time);
+        match.matches();
+        final String widthString = match.group(1);
+        final String precisionString = match.group(2);
+        if ( width != -1 ) {
+            final int actualWidth = widthString.length() + 1 + precisionString.length();
+            Assert.assertTrue(actualWidth >= width, "width string '" + widthString + "' not >= the expected width " + width);
+        }
+        Assert.assertEquals(precisionString.length(), precision, "precision string '" + precisionString + "' not the expected precision " + precision);
+    }
+
+    private static Matcher matchToString(final AutoFormattingTime time) {
+        Pattern pattern = Pattern.compile("(\\s*\\d*)\\.(\\d*) \\w");
+        return pattern.matcher(time.toString());
+    }
+
+    private static void testBasic(final AutoFormattingTime aft, final long nano, final int expectedWidth, final int expectedPrecision) {
+        Assert.assertEquals(aft.getTimeInNanoSeconds(), nano);
+        assertEqualsDoubleSmart(aft.getTimeInSeconds(), nano * 1e-9, 1e-3, "Time in seconds not within tolerance of nanoSeconds");
+        Assert.assertEquals(aft.getWidth(), expectedWidth);
+        Assert.assertEquals(aft.getPrecision(), expectedPrecision);
+        Assert.assertNotNull(aft.toString(), "TimeUnit toString returned null");
+        final Matcher match = matchToString(aft);
+        Assert.assertTrue(match.matches(), "toString " + aft.toString() + " doesn't match our expected format");
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/BaseTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/BaseTest.java
new file mode 100644
index 0000000..e151541
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/BaseTest.java
@@ -0,0 +1,564 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import htsjdk.tribble.Tribble;
+import htsjdk.tribble.readers.LineIterator;
+import htsjdk.tribble.readers.PositionalBufferedStream;
+import htsjdk.tribble.util.TabixUtils;
+import htsjdk.variant.bcf2.BCF2Codec;
+import htsjdk.variant.variantcontext.Genotype;
+import htsjdk.variant.variantcontext.VariantContext;
+import htsjdk.variant.vcf.VCFCodec;
+import htsjdk.variant.vcf.VCFConstants;
+import htsjdk.variant.vcf.VCFHeader;
+import htsjdk.variant.vcf.VCFHeaderLine;
+import org.apache.log4j.AppenderSkeleton;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+import org.apache.log4j.PatternLayout;
+import org.apache.log4j.spi.LoggingEvent;
+import org.broadinstitute.gatk.utils.variant.VCIterable;
+import org.broadinstitute.gatk.utils.collections.Pair;
+import org.broadinstitute.gatk.utils.commandline.CommandLineUtils;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.io.IOUtils;
+import org.testng.Assert;
+import org.testng.Reporter;
+import org.testng.SkipException;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.util.*;
+
+/**
+ *
+ * User: aaron
+ * Date: Apr 14, 2009
+ * Time: 10:24:30 AM
+ *
+ * The Broad Institute
+ * SOFTWARE COPYRIGHT NOTICE AGREEMENT 
+ * This software and its documentation are copyright 2009 by the
+ * Broad Institute/Massachusetts Institute of Technology. All rights are reserved.
+ *
+ * This software is supplied without any warranty or guaranteed support whatsoever. Neither
+ * the Broad Institute nor MIT can be responsible for its use, misuse, or functionality.
+ *
+ */
+
+
+/**
+ * @author aaron
+ * @version 1.0
+ * @date Apr 14, 2009
+ * <p/>
+ * Class BaseTest
+ * <p/>
+ * This is the base test class for all of our test cases.  All test cases should extend from this
+ * class; it sets up the logger, and resolves the location of directories that we rely on.
+ */
+ at SuppressWarnings("unchecked")
+public abstract class BaseTest {
+    /** our log, which we want to capture anything from org.broadinstitute.sting */
+    public static final Logger logger = CommandLineUtils.getStingLogger();
+
+    private static final String CURRENT_DIRECTORY = System.getProperty("user.dir");
+    public static final String gatkDirectory = System.getProperty("gatkdir", CURRENT_DIRECTORY) + "/";
+    public static final String baseDirectory = System.getProperty("basedir", CURRENT_DIRECTORY) + "/";
+    public static final String testType = System.getProperty("testType"); // May be null
+    public static final String testTypeSubDirectory = testType == null ? "" : ("/" + testType); // May be empty
+
+    public static final String hg18Reference = "/seq/references/Homo_sapiens_assembly18/v0/Homo_sapiens_assembly18.fasta";
+    public static final String hg19Reference = "/seq/references/Homo_sapiens_assembly19/v1/Homo_sapiens_assembly19.fasta";
+    public static final String b36KGReference = "/humgen/1kg/reference/human_b36_both.fasta";
+    public static final String b37KGReference = "/humgen/1kg/reference/human_g1k_v37.fasta";
+    public static final String b37KGReferenceWithDecoy = "/humgen/gsa-hpprojects/GATK/bundle/current/b37/human_g1k_v37_decoy.fasta";
+    public static final String hg19ReferenceWithChrPrefixInChromosomeNames = "/humgen/gsa-hpprojects/GATK/bundle/current/hg19/ucsc.hg19.fasta";
+    public static final String GATKDataLocation = "/humgen/gsa-hpprojects/GATK/data/";
+    public static final String validationDataLocation = GATKDataLocation + "Validation_Data/";
+    public static final String evaluationDataLocation = GATKDataLocation + "Evaluation_Data/";
+    public static final String comparisonDataLocation = GATKDataLocation + "Comparisons/";
+    public static final String annotationDataLocation = GATKDataLocation + "Annotations/";
+
+    public static final String b37GoodBAM = validationDataLocation + "/CEUTrio.HiSeq.b37.chr20.10_11mb.bam";
+    public static final String b37GoodNA12878BAM = validationDataLocation + "/NA12878.HiSeq.WGS.bwa.cleaned.recal.hg19.20.bam";
+    public static final String b37_NA12878_OMNI = validationDataLocation + "/NA12878.omni.vcf";
+
+    public static final String dbsnpDataLocation = GATKDataLocation;
+    public static final String b36dbSNP129 = dbsnpDataLocation + "dbsnp_129_b36.vcf";
+    public static final String b37dbSNP129 = dbsnpDataLocation + "dbsnp_129_b37.vcf";
+    public static final String b37dbSNP132 = dbsnpDataLocation + "dbsnp_132_b37.vcf";
+    public static final String b37dbSNP138 = "/humgen/gsa-hpprojects/GATK/bundle/current/b37/dbsnp_138.b37.vcf";
+    public static final String hg18dbSNP132 = dbsnpDataLocation + "dbsnp_132.hg18.vcf";
+
+    public static final String hapmapDataLocation = comparisonDataLocation + "Validated/HapMap/3.3/";
+    public static final String b37hapmapGenotypes = hapmapDataLocation + "genotypes_r27_nr.b37_fwd.vcf";
+
+    public static final String intervalsLocation = "/seq/references/HybSelOligos/whole_exome_agilent_1.1_refseq_plus_3_boosters/";
+    public static final String hg19Intervals = intervalsLocation + "whole_exome_agilent_1.1_refseq_plus_3_boosters.Homo_sapiens_assembly19.targets.interval_list";
+    public static final String hg19Chr20Intervals = GATKDataLocation + "whole_exome_agilent_1.1_refseq_plus_3_boosters.Homo_sapiens_assembly19.targets.chr20.interval_list";
+
+    public static final boolean REQUIRE_NETWORK_CONNECTION = false;
+    private static final String networkTempDirRoot = "/broad/hptmp/";
+    private static final boolean networkTempDirRootExists = new File(networkTempDirRoot).exists();
+    private static final File networkTempDirFile;
+
+    private static final String privateTestDirRelative = "private/gatk-tools-private/src/test/resources/";
+    public static final String privateTestDir = new File(gatkDirectory, privateTestDirRelative).getAbsolutePath() + "/";
+    protected static final String privateTestDirRoot = privateTestDir.replace(privateTestDirRelative, "");
+
+    private static final String publicTestDirRelative = "public/gatk-utils/src/test/resources/";
+    public static final String publicTestDir = new File(gatkDirectory, publicTestDirRelative).getAbsolutePath() + "/";
+    protected static final String publicTestDirRoot = publicTestDir.replace(publicTestDirRelative, "");
+
+    public static final String keysDataLocation = validationDataLocation + "keys/";
+
+    public static final String exampleFASTA = publicTestDir + "exampleFASTA.fasta";
+
+    public final static String NA12878_PCRFREE = privateTestDir + "PCRFree.2x250.Illumina.20_10_11.bam";
+    public final static String NA12878_WEx = privateTestDir + "CEUTrio.HiSeq.WEx.b37_decoy.NA12878.20_10_11mb.bam";
+
+    public static final boolean queueTestRunModeIsSet = System.getProperty("queuetest.run", "").equals("true");
+
+    /** before the class starts up */
+    static {
+        // setup a basic log configuration
+        CommandLineUtils.configureConsoleLogging();
+
+        // setup our log layout
+        PatternLayout layout = new PatternLayout();
+        layout.setConversionPattern("TEST %C{1}.%M - %d{HH:mm:ss,SSS} - %m%n");
+
+        // now set the layout of all the loggers to our layout
+        CommandLineUtils.setLayout(logger, layout);
+
+        // Set the Root logger to only output warnings.
+        logger.setLevel(Level.WARN);
+
+        if (networkTempDirRootExists) {
+            networkTempDirFile = IOUtils.tempDir("temp.", ".dir", new File(networkTempDirRoot + System.getProperty("user.name")));
+            networkTempDirFile.deleteOnExit();
+        } else {
+            networkTempDirFile = null;
+        }
+
+
+        if ( REQUIRE_NETWORK_CONNECTION ) {
+            // find our file sources
+            if (!fileExist(hg18Reference) || !fileExist(hg19Reference) || !fileExist(b36KGReference)) {
+                logger.fatal("We can't locate the reference directories.  Aborting!");
+                throw new RuntimeException("BaseTest setup failed: unable to locate the reference directories");
+            }
+        }
+    }
+
+    /**
+     * Simple generic utility class to creating TestNG data providers:
+     *
+     * 1: inherit this class, as in
+     *
+     *      private class SummarizeDifferenceTest extends TestDataProvider {
+     *         public SummarizeDifferenceTest() {
+     *           super(SummarizeDifferenceTest.class);
+     *         }
+     *         ...
+     *      }
+     *
+     * Provide a reference to your class to the TestDataProvider constructor.
+     *
+     * 2: Create instances of your subclass.  Return from it the call to getTests, providing
+     * the class type of your test
+     *
+     * <code>
+     * {@literal @}DataProvider(name = "summaries")
+     * public Object[][] createSummaries() {
+     *   new SummarizeDifferenceTest().addDiff("A", "A").addSummary("A:2");
+     *   new SummarizeDifferenceTest().addDiff("A", "B").addSummary("A:1", "B:1");
+     *   return SummarizeDifferenceTest.getTests(SummarizeDifferenceTest.class);
+     * }
+     * </code>
+     *
+     * This class magically tracks created objects of this
+     */
+    public static class TestDataProvider {
+        private static final Map<Class, List<Object>> tests = new HashMap<>();
+        protected String name;
+
+        /**
+         * Create a new TestDataProvider instance bound to the class variable C
+         */
+        public TestDataProvider(Class c, String name) {
+            if ( ! tests.containsKey(c) )
+                tests.put(c, new ArrayList<>());
+            tests.get(c).add(this);
+            this.name = name;
+        }
+
+        public TestDataProvider(Class c) {
+            this(c, "");
+        }
+
+        public void setName(final String name) {
+            this.name = name;
+        }
+
+        /**
+         * Return all of the data providers in the form expected by TestNG of type class C
+         * @param c
+         * @return
+         */
+        public static Object[][] getTests(Class c) {
+            List<Object[]> params2 = new ArrayList<Object[]>();
+            for ( Object x : tests.get(c) ) params2.add(new Object[]{x});
+            return params2.toArray(new Object[][]{});
+        }
+
+        @Override
+        public String toString() {
+            return "TestDataProvider("+name+")";
+        }
+    }
+
+    /**
+     * test if the file exists
+     *
+     * @param file name as a string
+     * @return true if it exists
+     */
+    public static boolean fileExist(String file) {
+        File temp = new File(file);
+        return temp.exists();
+    }
+    
+    /**
+     * this appender looks for a specific message in the log4j stream.
+     * It can be used to verify that a specific message was generated to the logging system.
+     */
+    public static class ValidationAppender extends AppenderSkeleton {
+
+        private boolean foundString = false;
+        private String targetString = "";
+
+        public ValidationAppender(String target) {
+            targetString = target;
+        }
+
+        @Override
+        protected void append(LoggingEvent loggingEvent) {
+            if (loggingEvent.getMessage().equals(targetString))
+                foundString = true;
+        }
+
+        public void close() {
+            // do nothing
+        }
+
+        public boolean requiresLayout() {
+            return false;
+        }
+
+        public boolean foundString() {
+            return foundString;
+        }
+    }
+
+    /**
+     * Creates a temp file that will be deleted on exit after tests are complete.
+     * @param name Prefix of the file.
+     * @param extension Extension to concat to the end of the file.
+     * @return A file in the temporary directory starting with name, ending with extension, which will be deleted after the program exits.
+     */
+    public static File createTempFile(final String name, final String extension) {
+        try {
+            final File file = File.createTempFile(name, extension);
+            file.deleteOnExit();
+
+            // Mark corresponding indices for deletion on exit as well just in case an index is created for the temp file:
+            new File(file.getAbsolutePath() + Tribble.STANDARD_INDEX_EXTENSION).deleteOnExit();
+            new File(file.getAbsolutePath() + TabixUtils.STANDARD_INDEX_EXTENSION).deleteOnExit();
+            new File(file.getAbsolutePath() + ".bai").deleteOnExit();
+            new File(file.getAbsolutePath().replaceAll(extension + "$", ".bai")).deleteOnExit();
+
+            return file;
+        } catch (IOException ex) {
+            throw new ReviewedGATKException("Cannot create temp file: " + ex.getMessage(), ex);
+        }
+    }
+
+    /**
+     * Creates a temp list file that will be deleted on exit after tests are complete.
+     * @param tempFilePrefix Prefix of the file.
+     * @param lines lines to write to the file.
+     * @return A list file in the temporary directory starting with tempFilePrefix, which will be deleted after the program exits.
+     */
+    public static File createTempListFile(final String tempFilePrefix, final String... lines) {
+        try {
+            final File tempListFile = createTempFile(tempFilePrefix, ".list");
+
+            final PrintWriter out = new PrintWriter(tempListFile);
+            for (final String line : lines) {
+                out.println(line);
+            }
+            out.close();
+
+            return tempListFile;
+        } catch (IOException ex) {
+            throw new ReviewedGATKException("Cannot create temp file: " + ex.getMessage(), ex);
+        }
+    }
+
+    /**
+     * Creates a temp file that will be deleted on exit after tests are complete.
+     * @param name Name of the file.
+     * @return A file in the network temporary directory with name, which will be deleted after the program exits.
+     * @throws SkipException when the network is not available.
+     */
+    public static File tryCreateNetworkTempFile(String name) {
+        if (!networkTempDirRootExists)
+            throw new SkipException("Network temporary directory does not exist: " + networkTempDirRoot);
+        File file = new File(networkTempDirFile, name);
+        file.deleteOnExit();
+        return file;
+    }
+
+    /**
+     * Log this message so that it shows up inline during output as well as in html reports
+     *
+     * @param message
+     */
+    public static void log(final String message) {
+        Reporter.log(message, true);
+    }
+
+    private static final double DEFAULT_FLOAT_TOLERANCE = 1e-1;
+
+    public static final void assertEqualsDoubleSmart(final Object actual, final Double expected) {
+        Assert.assertTrue(actual instanceof Double, "Not a double");
+        assertEqualsDoubleSmart((double)(Double)actual, (double)expected);
+    }
+
+    public static final void assertEqualsDoubleSmart(final Object actual, final Double expected, final double tolerance) {
+        Assert.assertTrue(actual instanceof Double, "Not a double");
+        assertEqualsDoubleSmart((double)(Double)actual, (double)expected, tolerance);
+    }
+
+    public static final void assertEqualsDoubleSmart(final double actual, final double expected) {
+        assertEqualsDoubleSmart(actual, expected, DEFAULT_FLOAT_TOLERANCE);
+    }
+
+    public static final <T> void assertEqualsSet(final Set<T> actual, final Set<T> expected, final String info) {
+        final Set<T> actualSet = new HashSet<T>(actual);
+        final Set<T> expectedSet = new HashSet<T>(expected);
+        Assert.assertTrue(actualSet.equals(expectedSet), info); // note this is necessary due to testng bug for set comps
+    }
+
+    public static void assertEqualsDoubleSmart(final double actual, final double expected, final double tolerance) {
+        assertEqualsDoubleSmart(actual, expected, tolerance, null);
+    }
+
+    public static void assertEqualsDoubleSmart(final double actual, final double expected, final double tolerance, final String message) {
+        if ( Double.isNaN(expected) ) // NaN == NaN => false unfortunately
+            Assert.assertTrue(Double.isNaN(actual), "expected is nan, actual is not");
+        else if ( Double.isInfinite(expected) ) // NaN == NaN => false unfortunately
+            Assert.assertTrue(Double.isInfinite(actual), "expected is infinite, actual is not");
+        else {
+            final double delta = Math.abs(actual - expected);
+            final double ratio = Math.abs(actual / expected - 1.0);
+            Assert.assertTrue(delta < tolerance || ratio < tolerance, "expected = " + expected + " actual = " + actual
+                    + " not within tolerance " + tolerance
+                    + (message == null ? "" : "message: " + message));
+        }
+    }
+
+    public static void assertVariantContextsAreEqual( final VariantContext actual, final VariantContext expected ) {
+        Assert.assertNotNull(actual, "VariantContext expected not null");
+        Assert.assertEquals(actual.getChr(), expected.getChr(), "chr");
+        Assert.assertEquals(actual.getStart(), expected.getStart(), "start");
+        Assert.assertEquals(actual.getEnd(), expected.getEnd(), "end");
+        Assert.assertEquals(actual.getID(), expected.getID(), "id");
+        Assert.assertEquals(actual.getAlleles(), expected.getAlleles(), "alleles for " + expected + " vs " + actual);
+
+        assertAttributesEquals(actual.getAttributes(), expected.getAttributes());
+        Assert.assertEquals(actual.filtersWereApplied(), expected.filtersWereApplied(), "filtersWereApplied");
+        Assert.assertEquals(actual.isFiltered(), expected.isFiltered(), "isFiltered");
+        assertEqualsSet(actual.getFilters(), expected.getFilters(), "filters");
+        assertEqualsDoubleSmart(actual.getPhredScaledQual(), expected.getPhredScaledQual());
+
+        Assert.assertEquals(actual.hasGenotypes(), expected.hasGenotypes(), "hasGenotypes");
+        if ( expected.hasGenotypes() ) {
+            assertEqualsSet(actual.getSampleNames(), expected.getSampleNames(), "sample names set");
+            Assert.assertEquals(actual.getSampleNamesOrderedByName(), expected.getSampleNamesOrderedByName(), "sample names");
+            final Set<String> samples = expected.getSampleNames();
+            for ( final String sample : samples ) {
+                assertGenotypesAreEqual(actual.getGenotype(sample), expected.getGenotype(sample));
+            }
+        }
+    }
+
+    public static void assertVariantContextStreamsAreEqual(final Iterable<VariantContext> actual, final Iterable<VariantContext> expected) {
+        final Iterator<VariantContext> actualIT = actual.iterator();
+        final Iterator<VariantContext> expectedIT = expected.iterator();
+
+        while ( expectedIT.hasNext() ) {
+            final VariantContext expectedVC = expectedIT.next();
+            if ( expectedVC == null )
+                continue;
+
+            VariantContext actualVC;
+            do {
+                Assert.assertTrue(actualIT.hasNext(), "Too few records found in actual");
+                actualVC = actualIT.next();
+            } while ( actualIT.hasNext() && actualVC == null );
+
+            if ( actualVC == null )
+                Assert.fail("Too few records in actual");
+
+            assertVariantContextsAreEqual(actualVC, expectedVC);
+        }
+        Assert.assertTrue(! actualIT.hasNext(), "Too many records found in actual");
+    }
+
+
+    public static void assertGenotypesAreEqual(final Genotype actual, final Genotype expected) {
+        Assert.assertEquals(actual.getSampleName(), expected.getSampleName(), "Genotype names");
+        Assert.assertEquals(actual.getAlleles(), expected.getAlleles(), "Genotype alleles");
+        Assert.assertEquals(actual.getGenotypeString(), expected.getGenotypeString(), "Genotype string");
+        Assert.assertEquals(actual.getType(), expected.getType(), "Genotype type");
+
+        // filters are the same
+        Assert.assertEquals(actual.getFilters(), expected.getFilters(), "Genotype fields");
+        Assert.assertEquals(actual.isFiltered(), expected.isFiltered(), "Genotype isFiltered");
+
+        // inline attributes
+        Assert.assertEquals(actual.getDP(), expected.getDP(), "Genotype dp");
+        Assert.assertTrue(Arrays.equals(actual.getAD(), expected.getAD()));
+        Assert.assertEquals(actual.getGQ(), expected.getGQ(), "Genotype gq");
+        Assert.assertEquals(actual.hasPL(), expected.hasPL(), "Genotype hasPL");
+        Assert.assertEquals(actual.hasAD(), expected.hasAD(), "Genotype hasAD");
+        Assert.assertEquals(actual.hasGQ(), expected.hasGQ(), "Genotype hasGQ");
+        Assert.assertEquals(actual.hasDP(), expected.hasDP(), "Genotype hasDP");
+
+        Assert.assertEquals(actual.hasLikelihoods(), expected.hasLikelihoods(), "Genotype haslikelihoods");
+        Assert.assertEquals(actual.getLikelihoodsString(), expected.getLikelihoodsString(), "Genotype getlikelihoodsString");
+        Assert.assertEquals(actual.getLikelihoods(), expected.getLikelihoods(), "Genotype getLikelihoods");
+        Assert.assertTrue(Arrays.equals(actual.getPL(), expected.getPL()));
+
+        Assert.assertEquals(actual.getPhredScaledQual(), expected.getPhredScaledQual(), "Genotype phredScaledQual");
+        assertAttributesEquals(actual.getExtendedAttributes(), expected.getExtendedAttributes());
+        Assert.assertEquals(actual.isPhased(), expected.isPhased(), "Genotype isPhased");
+        Assert.assertEquals(actual.getPloidy(), expected.getPloidy(), "Genotype getPloidy");
+    }
+
+    public static void assertVCFHeadersAreEqual(final VCFHeader actual, final VCFHeader expected) {
+        Assert.assertEquals(actual.getMetaDataInSortedOrder().size(), expected.getMetaDataInSortedOrder().size(), "No VCF header lines");
+
+        // for some reason set.equals() is returning false but all paired elements are .equals().  Perhaps compare to is busted?
+        //Assert.assertEquals(actual.getMetaDataInInputOrder(), expected.getMetaDataInInputOrder());
+        final List<VCFHeaderLine> actualLines = new ArrayList<VCFHeaderLine>(actual.getMetaDataInSortedOrder());
+        final List<VCFHeaderLine> expectedLines = new ArrayList<VCFHeaderLine>(expected.getMetaDataInSortedOrder());
+        for ( int i = 0; i < actualLines.size(); i++ ) {
+            Assert.assertEquals(actualLines.get(i), expectedLines.get(i), "VCF header lines");
+        }
+    }
+
+    public static void assertVCFandBCFFilesAreTheSame(final File vcfFile, final File bcfFile) throws IOException {
+        final Pair<VCFHeader, VCIterable<LineIterator>> vcfData = VCIterable.readAllVCs(vcfFile, new VCFCodec());
+        final Pair<VCFHeader, VCIterable<PositionalBufferedStream>> bcfData = VCIterable.readAllVCs(bcfFile, new BCF2Codec());
+        assertVCFHeadersAreEqual(bcfData.getFirst(), vcfData.getFirst());
+        assertVariantContextStreamsAreEqual(bcfData.getSecond(), vcfData.getSecond());
+    }
+
+    private static void assertAttributeEquals(final String key, final Object actual, final Object expected) {
+        if ( expected instanceof Double ) {
+            // must be very tolerant because doubles are being rounded to 2 sig figs
+            assertEqualsDoubleSmart(actual, (Double) expected, 1e-2);
+        } else
+            Assert.assertEquals(actual, expected, "Attribute " + key);
+    }
+
+    private static void assertAttributesEquals(final Map<String, Object> actual, Map<String, Object> expected) {
+        final Set<String> expectedKeys = new HashSet<String>(expected.keySet());
+
+        for ( final Map.Entry<String, Object> act : actual.entrySet() ) {
+            final Object actualValue = act.getValue();
+            if ( expected.containsKey(act.getKey()) && expected.get(act.getKey()) != null ) {
+                final Object expectedValue = expected.get(act.getKey());
+                if ( expectedValue instanceof List ) {
+                    final List<Object> expectedList = (List<Object>)expectedValue;
+                    Assert.assertTrue(actualValue instanceof List, act.getKey() + " should be a list but isn't");
+                    final List<Object> actualList = (List<Object>)actualValue;
+                    Assert.assertEquals(actualList.size(), expectedList.size(), act.getKey() + " size");
+                    for ( int i = 0; i < expectedList.size(); i++ )
+                        assertAttributeEquals(act.getKey(), actualList.get(i), expectedList.get(i));
+                } else
+                    assertAttributeEquals(act.getKey(), actualValue, expectedValue);
+            } else {
+                // it's ok to have a binding in x -> null that's absent in y
+                Assert.assertNull(actualValue, act.getKey() + " present in one but not in the other");
+            }
+            expectedKeys.remove(act.getKey());
+        }
+
+        // now expectedKeys contains only the keys found in expected but not in actual,
+        // and they must all be null
+        for ( final String missingExpected : expectedKeys ) {
+            final Object value = expected.get(missingExpected);
+            Assert.assertTrue(isMissing(value), "Attribute " + missingExpected + " missing in one but not in other" );
+        }
+    }
+
+    private static final boolean isMissing(final Object value) {
+        if ( value == null ) return true;
+        else if ( value.equals(VCFConstants.MISSING_VALUE_v4) ) return true;
+        else if ( value instanceof List ) {
+            // handles the case where all elements are null or the list is empty
+            for ( final Object elt : (List)value)
+                if ( elt != null )
+                    return false;
+            return true;
+        } else
+            return false;
+    }
+
+    /**
+     * Checks whether two double array contain the same values or not.
+     * @param actual actual produced array.
+     * @param expected expected array.
+     * @param tolerance maximum difference between double value to be consider equivalent.
+     */
+    protected static void assertEqualsDoubleArray(final double[] actual, final double[] expected, final double tolerance) {
+        if (expected == null)
+            Assert.assertNull(actual);
+        else {
+            Assert.assertNotNull(actual);
+            Assert.assertEquals(actual.length,expected.length,"array length");
+        }
+        for (int i = 0; i < actual.length; i++)
+            Assert.assertEquals(actual[i],expected[i],tolerance,"array position " + i);
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/BaseUtilsUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/BaseUtilsUnitTest.java
new file mode 100644
index 0000000..92a412f
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/BaseUtilsUnitTest.java
@@ -0,0 +1,177 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Random;
+
+
+public class BaseUtilsUnitTest extends BaseTest {
+    @BeforeClass
+    public void init() { }
+
+    @Test
+    public void testMostFrequentBaseFraction() {
+        logger.warn("Executing testMostFrequentBaseFraction");
+
+        compareFrequentBaseFractionToExpected("AAAAA", 1.0);
+        compareFrequentBaseFractionToExpected("ACCG", 0.5);
+        compareFrequentBaseFractionToExpected("ACCCCTTTTG", 4.0/10.0);
+    }
+
+    private void compareFrequentBaseFractionToExpected(String sequence, double expected) {
+        double fraction = BaseUtils.mostFrequentBaseFraction(sequence.getBytes());
+        Assert.assertTrue(MathUtils.compareDoubles(fraction, expected) == 0);
+    }
+
+    @Test
+    public void testConvertIUPACtoN() {
+
+        checkBytesAreEqual(BaseUtils.convertIUPACtoN(new byte[]{'A', 'A', 'A'}, false, false), new byte[]{'A', 'A', 'A'});
+        checkBytesAreEqual(BaseUtils.convertIUPACtoN(new byte[]{'W', 'A', 'A'}, false, false), new byte[]{'N', 'A', 'A'});
+        checkBytesAreEqual(BaseUtils.convertIUPACtoN(new byte[]{'A', 'M', 'A'}, false, false), new byte[]{'A', 'N', 'A'});
+        checkBytesAreEqual(BaseUtils.convertIUPACtoN(new byte[]{'A', 'A', 'K'}, false, false), new byte[]{'A', 'A', 'N'});
+        checkBytesAreEqual(BaseUtils.convertIUPACtoN(new byte[]{'M', 'M', 'M'}, false, false), new byte[]{'N', 'N', 'N'});
+    }
+
+    private void checkBytesAreEqual(final byte[] b1, final byte[] b2) {
+        for ( int i = 0; i < b1.length; i++ )
+            Assert.assertEquals(b1[i], b2[i]);
+    }
+
+    @Test
+    public void testConvertBasesToIUPAC() {
+
+        for ( final BaseUtils.Base b : BaseUtils.Base.values() ) {
+            if ( BaseUtils.isRegularBase(b.base) )
+                Assert.assertEquals(BaseUtils.basesToIUPAC(b.base, b.base), b.base, "testing same base");
+        }
+
+        Assert.assertEquals(BaseUtils.basesToIUPAC((byte)'A', (byte)'X'), 'N', "testing non-standard base");
+        Assert.assertEquals(BaseUtils.basesToIUPAC((byte)'X', (byte)'A'), 'N', "testing non-standard base");
+        Assert.assertEquals(BaseUtils.basesToIUPAC((byte)'X', (byte)'X'), 'N', "testing non-standard base");
+
+        Assert.assertEquals(BaseUtils.basesToIUPAC((byte)'A', (byte)'T'), 'W', "testing A/T=W");
+        Assert.assertEquals(BaseUtils.basesToIUPAC((byte)'T', (byte)'A'), 'W', "testing T/A=W");
+        Assert.assertEquals(BaseUtils.basesToIUPAC((byte) 'G', (byte) 'T'), 'K', "testing G/T=K");
+        Assert.assertEquals(BaseUtils.basesToIUPAC((byte) 'T', (byte) 'G'), 'K', "testing T/G=K");
+    }
+
+    @Test
+    public void testTransitionTransversion() {
+        logger.warn("Executing testTransitionTransversion");
+
+        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'A', (byte)'T' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
+        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'A', (byte)'C' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
+        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'A', (byte)'G' ) == BaseUtils.BaseSubstitutionType.TRANSITION );
+        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'C', (byte)'A' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
+        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'C', (byte)'T' ) == BaseUtils.BaseSubstitutionType.TRANSITION );
+        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'C', (byte)'G' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
+        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'T', (byte)'A' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
+        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'T', (byte)'C' ) == BaseUtils.BaseSubstitutionType.TRANSITION );
+        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'T', (byte)'G' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
+        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'G', (byte)'A' ) == BaseUtils.BaseSubstitutionType.TRANSITION );
+        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'G', (byte)'T' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
+        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'G', (byte)'C' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
+
+        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'a', (byte)'T' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
+        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'a', (byte)'C' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
+        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'A', (byte)'T' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
+        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'A', (byte)'C' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
+        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'A', (byte)'t' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
+        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'A', (byte)'c' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
+        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'a', (byte)'t' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
+        Assert.assertTrue( BaseUtils.SNPSubstitutionType( (byte)'a', (byte)'c' ) == BaseUtils.BaseSubstitutionType.TRANSVERSION );
+    }
+
+    @Test
+    public void testReverseComplementString() {
+        logger.warn("Executing testReverseComplementString");
+
+        compareRCStringToExpected("ACGGT", "ACCGT");
+        compareRCStringToExpected("TCGTATATCTCGCTATATATATATAGCTCTAGTATA", "TATACTAGAGCTATATATATATAGCGAGATATACGA");
+        compareRCStringToExpected("AAAN", "NTTT");
+    }
+
+    private void compareRCStringToExpected(String fw, String rcExp) {
+        String rcObs = BaseUtils.simpleReverseComplement(fw);
+
+        Assert.assertTrue(rcObs.equals(rcExp));
+    }
+
+    @Test(dataProvider="baseComparatorData")
+    public void testBaseComparator(final Collection<byte[]> basesToSort) {
+        final ArrayList<byte[]> sorted = new ArrayList<>(basesToSort);
+        Collections.sort(sorted, BaseUtils.BASES_COMPARATOR);
+        for (int i = 0; i < sorted.size(); i++)   {
+            Assert.assertEquals(BaseUtils.BASES_COMPARATOR.compare(sorted.get(i),sorted.get(i)),0);
+            final String iString = new String(sorted.get(i));
+            for (int j = i; j < sorted.size(); j++) {
+                final String jString = new String(sorted.get(j));
+                if (iString.compareTo(jString) == 0)
+                    Assert.assertEquals(BaseUtils.BASES_COMPARATOR.compare(sorted.get(i),sorted.get(j)),0);
+                else
+                    Assert.assertTrue(BaseUtils.BASES_COMPARATOR.compare(sorted.get(i),sorted.get(j)) * iString.compareTo(jString) > 0);
+                Assert.assertTrue(BaseUtils.BASES_COMPARATOR.compare(sorted.get(i),sorted.get(j)) <= 0);
+            }
+        }
+    }
+
+    @DataProvider(name="baseComparatorData")
+    public Object[][] baseComparatorData() {
+        final int testCount = 10;
+        final int testSizeAverage = 10;
+        final int testSizeDeviation = 10;
+        final int haplotypeSizeAverage = 100;
+        final int haplotypeSizeDeviation = 100;
+
+        final Object[][] result = new Object[testCount][];
+
+        Utils.resetRandomGenerator();
+        final Random rnd = Utils.getRandomGenerator();
+
+        for (int i = 0; i < testCount; i++) {
+            final int size = (int) Math.max(0,rnd.nextDouble() * testSizeDeviation + testSizeAverage);
+            final ArrayList<byte[]> bases = new ArrayList<>(size);
+            for (int j = 0; j < size; j++) {
+                final int jSize = (int) Math.max(0,rnd.nextDouble() * haplotypeSizeDeviation + haplotypeSizeAverage);
+                final byte[] b = new byte[jSize];
+                for (int k = 0; k < jSize; k++)
+                    b[k] = BaseUtils.baseIndexToSimpleBase(rnd.nextInt(4));
+                bases.add(b);
+            }
+            result[i] = new Object[] { bases };
+        }
+        return result;
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/BitSetUtilsUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/BitSetUtilsUnitTest.java
new file mode 100644
index 0000000..0124a08
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/BitSetUtilsUnitTest.java
@@ -0,0 +1,84 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.util.Random;
+
+/**
+ * @author Mauricio Carneiro
+ * @since 3/5/12
+ */
+
+public class BitSetUtilsUnitTest {
+    private static int RANDOM_NUMBERS_TO_TRY = 87380;
+    private static Random random;
+
+    @BeforeClass
+    public void init() {
+        random = Utils.getRandomGenerator();
+    }
+
+    @Test(enabled = true)
+    public void testLongBitSet() {
+        long[] numbers = {0L, 1L, 428L, 65536L, 239847L, 4611686018427387903L, Long.MAX_VALUE, Long.MIN_VALUE, -1L, -2L, -7L, -128L, -65536L, -100000L};
+        for (long n : numbers)
+            Assert.assertEquals(BitSetUtils.longFrom(BitSetUtils.bitSetFrom(n)), n);
+
+        for (int i = 0; i < RANDOM_NUMBERS_TO_TRY; i++) {
+            long n = random.nextLong();
+            Assert.assertEquals(BitSetUtils.longFrom(BitSetUtils.bitSetFrom(n)), n);    // Because class Random uses a seed with only 48 bits, this algorithm will not return all possible long values.
+        }
+    }
+
+    @Test(enabled = true)
+    public void testShortBitSet() {
+        short[] numbers = {0, 1, 428, 25934, 23847, 16168, Short.MAX_VALUE, Short.MIN_VALUE, -1, -2, -7, -128, -12312, -31432};
+        for (long n : numbers)
+            Assert.assertEquals(BitSetUtils.shortFrom(BitSetUtils.bitSetFrom(n)), n);
+
+        for (int i = 0; i < RANDOM_NUMBERS_TO_TRY; i++) {
+            short n = (short) random.nextInt();
+            Assert.assertEquals(BitSetUtils.shortFrom(BitSetUtils.bitSetFrom(n)), n);
+        }
+    }
+
+    @Test(enabled = false)
+    public void testDNAAndBitSetConversion() {
+        String[] dna = {"AGGTGTTGT", "CCCCCCCCCCCCCC", "GGGGGGGGGGGGGG", "TTTTTTTTTTTTTT", "GTAGACCGATCTCAGCTAGT", "AACGTCAATGCAGTCAAGTCAGACGTGGGTT", "TTTTTTTTTTTTTTTTTTTTTTTTTTTTTT", "TTTTTTTTTTTTTTTTTTTTTTTTTTTTTTT"};
+
+        // Test all contexts of size 1-8.
+        //for (long n = 0; n < RANDOM_NUMBERS_TO_TRY; n++)
+        //    Assert.assertEquals(BitSetUtils.longFrom(BitSetUtils.bitSetFrom(ContextCovariate.contextFromKey(BitSetUtils.bitSetFrom(n)))), n);
+
+        // Test the special cases listed in the dna array
+        //for (String d : dna)
+        //    Assert.assertEquals(BitSetUtils.dnaFrom(BitSetUtils.bitSetFrom(d)), d);
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/ExampleToCopyUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/ExampleToCopyUnitTest.java
new file mode 100644
index 0000000..6f8a678
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/ExampleToCopyUnitTest.java
@@ -0,0 +1,239 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+
+// the imports for unit testing.
+
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMFileReader;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+import org.broadinstitute.gatk.utils.pileup.PileupElement;
+import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
+import org.broadinstitute.gatk.utils.pileup.ReadBackedPileupImpl;
+import org.broadinstitute.gatk.utils.sam.ArtificialBAMBuilder;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import htsjdk.variant.variantcontext.Allele;
+import htsjdk.variant.variantcontext.VariantContext;
+import htsjdk.variant.variantcontext.VariantContextBuilder;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.util.*;
+
+public class ExampleToCopyUnitTest extends BaseTest {
+    // example genome loc parser for this test, can be deleted if you don't use the reference
+    private GenomeLocParser genomeLocParser;
+
+    // example fasta index file, can be deleted if you don't use the reference
+    private IndexedFastaSequenceFile seq;
+
+    @BeforeClass
+    public void setup() throws FileNotFoundException {
+        // sequence
+        seq = new CachingIndexedFastaSequenceFile(new File(b37KGReference));
+        genomeLocParser = new GenomeLocParser(seq);
+    }
+
+    /**
+     * Combinatorial unit test data provider example.
+     *
+     * Creates data for testMyData test function, containing two arguments, start and size at each value
+     *
+     * @return Object[][] for testng DataProvider
+     */
+    @DataProvider(name = "MyDataProvider")
+    public Object[][] makeMyDataProvider() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        // this functionality can be adapted to provide input data for whatever you might want in your data
+        for ( final int start : Arrays.asList(1, 10, 100) ) {
+            for ( final int size : Arrays.asList(1, 10, 100, 1000) ) {
+                tests.add(new Object[]{start, size});
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    /**
+     * Example testng test using MyDataProvider
+     */
+    @Test(dataProvider = "MyDataProvider")
+    public void testMyData(final int start, final int size) {
+        // adaptor this code to do whatever testing you want given the arguments start and size
+        Assert.assertTrue(start >= 0);
+        Assert.assertTrue(size >= 0);
+    }
+
+    /**
+     * DataProvider example using a class-based data structure
+     */
+    private class MyDataProviderClass extends TestDataProvider {
+        private int start;
+        private int size;
+
+        private MyDataProviderClass(int start, int size) {
+            super(MyDataProviderClass.class);
+            this.start = start;
+            this.size = size;
+        }
+    }
+
+    @DataProvider(name = "MyClassBasedDataProvider")
+    public Object[][] makeMyDataProviderClass() {
+        // this functionality can be adapted to provide input data for whatever you might want in your data
+        for ( final int start : Arrays.asList(1, 10, 100) ) {
+            for ( final int size : Arrays.asList(1, 10, 100, 1000) ) {
+                new MyDataProviderClass(start, size);
+            }
+        }
+
+        return TestDataProvider.getTests(MyDataProviderClass.class);
+    }
+
+    /**
+     * Example testng test using MyClassBasedDataProvider
+     */
+    @Test(dataProvider = "MyClassBasedDataProvider")
+    public void testMyDataProviderClass(MyDataProviderClass testSpec) {
+        // adaptor this code to do whatever testing you want given the arguments start and size
+        Assert.assertTrue(testSpec.start >= 0);
+        Assert.assertTrue(testSpec.size >= 0);
+    }
+
+    /**
+     * A unit test that creates an artificial read for testing some code that uses reads
+     */
+    @Test()
+    public void testWithARead() {
+        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(seq.getSequenceDictionary());
+        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, 1, 10);
+        Assert.assertEquals(read.getReadLength(), 10);
+        // TODO -- add some tests here using read
+    }
+
+    /**
+     * A unit test that creates a GenomeLoc for testing
+     */
+    @Test()
+    public void testWithAGenomeLoc() {
+        final GenomeLoc loc = genomeLocParser.createGenomeLoc("1", 1, 10);
+        Assert.assertEquals(loc.size(), 10);
+        // TODO -- add some tests here using the loc
+    }
+
+    /**
+     * A unit test that creates an artificial read for testing some code that uses reads
+     *
+     * Note that effective creation of RBPs isn't so good.  If you need pileups of specific properties, you shoud
+     * look into building them yourself as in the example below
+     */
+    @Test()
+    public void testWithAPileup() {
+        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(seq.getSequenceDictionary());
+        final GenomeLoc myLocation = genomeLocParser.createGenomeLoc("1", 10);
+        final ReadBackedPileup pileup = ArtificialSAMUtils.createReadBackedPileup(header, myLocation, 10, 400, 10);
+        Assert.assertFalse(pileup.isEmpty());
+        // TODO -- add some tests here using pileup
+    }
+
+    /**
+     * A unit test that creates an artificial read for testing some code that uses reads
+     *
+     * Builds the pileup from scratch to have specific properties
+     */
+    @Test()
+    public void testBuildingAPileupWithSpecificProperties() {
+        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(seq.getSequenceDictionary());
+        final GenomeLoc myLocation = genomeLocParser.createGenomeLoc("1", 10);
+
+        final int pileupSize = 100;
+        final int readLength = 10;
+        final List<GATKSAMRecord> reads = new LinkedList<GATKSAMRecord>();
+        for ( int i = 0; i < pileupSize; i++ ) {
+            final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead" + i, 0, 1, readLength);
+            final byte[] bases = Utils.dupBytes((byte)'A', readLength);
+            bases[0] = (byte)(i % 2 == 0 ? 'A' : 'C'); // every other base is a C
+
+            // set the read's bases and quals
+            read.setReadBases(bases);
+            read.setBaseQualities(Utils.dupBytes((byte)30, readLength));
+            reads.add(read);
+        }
+
+        // create a pileup with all reads having offset 0
+        final ReadBackedPileup pileup = new ReadBackedPileupImpl(myLocation, reads, 0);
+        // TODO -- add some tests here using pileup
+
+        // this code ensures that the pileup example is correct.  Can be deleted
+        Assert.assertEquals(pileup.getNumberOfElements(), pileupSize);
+        int nA = 0, nC = 0;
+        for ( final PileupElement p : pileup ) {
+            if ( p.getBase() == 'A' ) nA++;
+            if ( p.getBase() == 'C' ) nC++;
+        }
+        Assert.assertEquals(nA, pileupSize / 2);
+        Assert.assertEquals(nC, pileupSize / 2);
+
+    }
+
+    /**
+     * A unit test that creates an artificial read for testing some code that uses reads
+     */
+    @Test()
+    public void testWithBAMFile() {
+        // create a fake BAM file, and iterate through it
+        final ArtificialBAMBuilder bamBuilder = new ArtificialBAMBuilder(seq, 20, 10);
+        final File bam = bamBuilder.makeTemporarilyBAMFile();
+        final SAMFileReader reader = new SAMFileReader(bam);
+
+        final Iterator<SAMRecord> bamIt = reader.iterator();
+        while ( bamIt.hasNext() ) {
+            final SAMRecord read = bamIt.next(); // all reads are actually GATKSAMRecords
+            // TODO -- add some tests that use reads from a BAM
+        }
+    }
+
+    /**
+     * Test code that creates VariantContexts
+     */
+    @Test()
+    public void testWithVariantContext() throws Exception {
+        final List<Allele> alleles = Arrays.asList(Allele.create("A", true), Allele.create("C"));
+        final VariantContext vc = new VariantContextBuilder("test", "1", 10, 10, alleles).make();
+        Assert.assertTrue(vc.getAlleles().size() >= 0);
+        // TODO -- add some tests that use VariantContext
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/GATKTextReporter.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/GATKTextReporter.java
new file mode 100644
index 0000000..69e66fb
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/GATKTextReporter.java
@@ -0,0 +1,41 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import org.testng.reporters.TextReporter;
+
+/**
+ * HACK: Create a variant of the TestNG TextReporter that can be run with no
+ *       arguments, and can therefore be added to the TestNG listener list.
+ *
+ * @author hanna
+ * @version 0.1
+ */
+public class GATKTextReporter extends TextReporter {
+    public GATKTextReporter() {
+        super("GATK test suite",2);
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/GenomeLocParserBenchmark.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/GenomeLocParserBenchmark.java
new file mode 100644
index 0000000..402e3ed
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/GenomeLocParserBenchmark.java
@@ -0,0 +1,81 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import com.google.caliper.Param;
+import com.google.caliper.SimpleBenchmark;
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+
+import java.io.File;
+
+/**
+ * Caliper microbenchmark of genome loc parser
+ */
+public class GenomeLocParserBenchmark extends SimpleBenchmark {
+    private IndexedFastaSequenceFile seq;
+    private final int ITERATIONS = 1000000;
+
+    @Param({"NEW", "NONE"})
+    GenomeLocParser.ValidationLevel validationLevel; // set automatically by framework
+
+    @Param({"true", "false"})
+    boolean useContigIndex; // set automatically by framework
+
+    @Override protected void setUp() throws Exception {
+        seq = new CachingIndexedFastaSequenceFile(new File("/Users/depristo/Desktop/broadLocal/localData/human_g1k_v37.fasta"));
+    }
+//
+//    public void timeSequentialCreationFromGenomeLoc(int rep) {
+//        final GenomeLocParser genomeLocParser = new GenomeLocParser(seq.getSequenceDictionary(), validationLevel);
+//        GenomeLoc last = genomeLocParser.createGenomeLoc("1", 1, 1);
+//        for ( int i = 0; i < rep; i++ ) {
+//            for ( int j = 1; j < ITERATIONS; j++ ) {
+//                if ( useContigIndex )
+//                    last = genomeLocParser.createGenomeLoc(last.getContig(), last.getContigIndex(), last.getStart() + 1);
+//                else
+//                    last = genomeLocParser.createGenomeLoc(last.getContig(), last.getStart() + 1);
+//            }
+//        }
+//    }
+//
+//    public void timeSequentialCreationFromGenomeLocOriginal(int rep) {
+//        final GenomeLocParserOriginal genomeLocParser = new GenomeLocParserOriginal(seq.getSequenceDictionary());
+//        GenomeLoc last = genomeLocParser.createGenomeLoc("1", 1, 1);
+//        for ( int i = 0; i < rep; i++ ) {
+//            for ( int j = 1; j < ITERATIONS; j++ ) {
+//                if ( useContigIndex )
+//                    last = genomeLocParser.createGenomeLoc(last.getContig(), last.getContigIndex(), last.getStart() + 1);
+//                else
+//                    last = genomeLocParser.createGenomeLoc(last.getContig(), last.getStart() + 1);
+//            }
+//        }
+//    }
+
+    public static void main(String[] args) {
+        com.google.caliper.Runner.main(GenomeLocParserBenchmark.class, args);
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/GenomeLocParserUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/GenomeLocParserUnitTest.java
new file mode 100644
index 0000000..2f49bbd
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/GenomeLocParserUnitTest.java
@@ -0,0 +1,509 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.SAMSequenceRecord;
+import htsjdk.tribble.Feature;
+import htsjdk.tribble.SimpleFeature;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import htsjdk.variant.variantcontext.Allele;
+import htsjdk.variant.variantcontext.VariantContext;
+import htsjdk.variant.variantcontext.VariantContextBuilder;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.util.Arrays;
+import java.util.LinkedList;
+import java.util.List;
+
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertTrue;
+
+/**
+ * @author aaron
+ *         <p/>
+ *         Class GenomeLocParserUnitTest
+ *         <p/>
+ *         Test out the functionality of the new genome loc parser
+ */
+public class GenomeLocParserUnitTest extends BaseTest {
+    private GenomeLocParser genomeLocParser;
+    private SAMFileHeader header;
+
+    @BeforeClass
+         public void init() {
+        header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 10);
+        genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
+    }
+
+    @Test(expectedExceptions=UserException.MalformedGenomeLoc.class)
+    public void testGetContigIndex() {
+        assertEquals(genomeLocParser.getContigIndex("blah"), -1); // should not be in the reference
+    }                
+
+    @Test
+    public void testGetContigIndexValid() {
+        assertEquals(genomeLocParser.getContigIndex("chr1"), 0); // should be in the reference
+    }
+
+    @Test(expectedExceptions=UserException.class)
+    public void testGetContigInfoUnknownContig1() {
+        assertEquals(null, genomeLocParser.getContigInfo("blah")); // should *not* be in the reference
+    }
+
+    @Test(expectedExceptions=UserException.class)
+    public void testGetContigInfoUnknownContig2() {
+        assertEquals(null, genomeLocParser.getContigInfo(null)); // should *not* be in the reference
+    }
+
+    @Test()
+    public void testHasContigInfoUnknownContig1() {
+        assertEquals(false, genomeLocParser.contigIsInDictionary("blah")); // should *not* be in the reference
+    }
+
+    @Test()
+    public void testHasContigInfoUnknownContig2() {
+        assertEquals(false, genomeLocParser.contigIsInDictionary(null)); // should *not* be in the reference
+    }
+
+    @Test()
+    public void testHasContigInfoKnownContig() {
+        assertEquals(true, genomeLocParser.contigIsInDictionary("chr1")); // should be in the reference
+    }
+
+    @Test
+    public void testGetContigInfoKnownContig() {
+        assertEquals(0, "chr1".compareTo(genomeLocParser.getContigInfo("chr1").getSequenceName())); // should be in the reference
+    }
+
+    @Test(expectedExceptions=ReviewedGATKException.class)
+    public void testParseBadString() {
+        genomeLocParser.parseGenomeLoc("Bad:0-1");
+    }
+
+    @Test
+    public void testContigHasColon() {
+        SAMFileHeader header = new SAMFileHeader();
+        header.setSortOrder(htsjdk.samtools.SAMFileHeader.SortOrder.coordinate);
+        SAMSequenceDictionary dict = new SAMSequenceDictionary();
+        SAMSequenceRecord rec = new SAMSequenceRecord("c:h:r1", 10);
+        rec.setSequenceLength(10);
+        dict.addSequence(rec);
+        header.setSequenceDictionary(dict);
+
+        final GenomeLocParser myGenomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
+        GenomeLoc loc = myGenomeLocParser.parseGenomeLoc("c:h:r1:4-5");
+        assertEquals(0, loc.getContigIndex());
+        assertEquals(loc.getStart(), 4);
+        assertEquals(loc.getStop(), 5);
+    }
+
+    @Test
+    public void testParseGoodString() {
+        GenomeLoc loc = genomeLocParser.parseGenomeLoc("chr1:1-10");
+        assertEquals(0, loc.getContigIndex());
+        assertEquals(loc.getStop(), 10);
+        assertEquals(loc.getStart(), 1);
+    }
+
+    @Test
+    public void testCreateGenomeLoc1() {
+        GenomeLoc loc = genomeLocParser.createGenomeLoc("chr1", 1, 100);
+        assertEquals(0, loc.getContigIndex());
+        assertEquals(loc.getStop(), 100);
+        assertEquals(loc.getStart(), 1);
+    }
+
+    @Test
+    public void testCreateGenomeLoc1point5() { // in honor of VAAL!
+        GenomeLoc loc = genomeLocParser.parseGenomeLoc("chr1:1");
+        assertEquals(0, loc.getContigIndex());
+        assertEquals(loc.getStop(), 1);
+        assertEquals(loc.getStart(), 1);
+    }
+
+    @Test
+    public void testCreateGenomeLoc2() {
+        GenomeLoc loc = genomeLocParser.createGenomeLoc("chr1", 1, 100);
+        assertEquals("chr1", loc.getContig());
+        assertEquals(loc.getStop(), 100);
+        assertEquals(loc.getStart(), 1);
+    }
+
+    @Test
+    public void testCreateGenomeLoc3() {
+        GenomeLoc loc = genomeLocParser.createGenomeLoc("chr1", 1);
+        assertEquals("chr1", loc.getContig());
+        assertEquals(loc.getStop(), 1);
+        assertEquals(loc.getStart(), 1);
+    }
+
+    @Test
+    public void testCreateGenomeLoc4() {
+        GenomeLoc loc = genomeLocParser.createGenomeLoc("chr1", 1);
+        assertEquals(0, loc.getContigIndex());
+        assertEquals(loc.getStop(), 1);
+        assertEquals(loc.getStart(), 1);
+    }
+
+    @Test
+    public void testCreateGenomeLoc5() {
+        GenomeLoc loc = genomeLocParser.createGenomeLoc("chr1", 1, 100);
+        GenomeLoc copy = genomeLocParser.createGenomeLoc(loc.getContig(),loc.getStart(),loc.getStop());
+        assertEquals(0, copy.getContigIndex());
+        assertEquals(copy.getStop(), 100);
+        assertEquals(copy.getStart(), 1);
+    }
+
+    @Test
+    public void testGenomeLocPlusSign() {
+        GenomeLoc loc = genomeLocParser.parseGenomeLoc("chr1:1+");
+        assertEquals(loc.getContigIndex(), 0);
+        assertEquals(loc.getStop(), 10); // the size
+        assertEquals(loc.getStart(), 1);
+    }
+
+    @Test
+    public void testGenomeLocParseOnlyChrome() {
+        GenomeLoc loc = genomeLocParser.parseGenomeLoc("chr1");
+        assertEquals(loc.getContigIndex(), 0);
+        assertEquals(loc.getStop(), 10); // the size
+        assertEquals(loc.getStart(), 1);
+    }
+
+    @Test(expectedExceptions=ReviewedGATKException.class)
+    public void testGenomeLocParseOnlyBadChrome() {
+        GenomeLoc loc = genomeLocParser.parseGenomeLoc("chr12");
+        assertEquals(loc.getContigIndex(), 0);
+        assertEquals(loc.getStop(), 10); // the size
+        assertEquals(loc.getStart(), 1);
+    }
+
+    @Test(expectedExceptions=ReviewedGATKException.class)
+    public void testGenomeLocBad() {
+        GenomeLoc loc = genomeLocParser.parseGenomeLoc("chr1:1-");
+        assertEquals(loc.getContigIndex(), 0);
+        assertEquals(loc.getStop(), 10); // the size
+        assertEquals(loc.getStart(), 1);
+    }
+
+    @Test(expectedExceptions=UserException.class)
+    public void testGenomeLocBad2() {
+        GenomeLoc loc = genomeLocParser.parseGenomeLoc("chr1:1-500-0");
+        assertEquals(loc.getContigIndex(), 0);
+        assertEquals(loc.getStop(), 10); // the size
+        assertEquals(loc.getStart(), 1);
+    }
+
+    @Test(expectedExceptions=UserException.class)
+    public void testGenomeLocBad3() {
+        GenomeLoc loc = genomeLocParser.parseGenomeLoc("chr1:1--0");
+        assertEquals(loc.getContigIndex(), 0);
+        assertEquals(loc.getStop(), 10); // the size
+        assertEquals(loc.getStart(), 1);
+    }
+
+    // test out the validating methods
+    @Test
+    public void testValidationOfGenomeLocs() {
+        assertTrue(genomeLocParser.isValidGenomeLoc("chr1",1,1));
+        assertTrue(!genomeLocParser.isValidGenomeLoc("chr2",1,1)); // shouldn't have an entry
+        assertTrue(!genomeLocParser.isValidGenomeLoc("chr1",1,11)); // past the end of the contig
+        assertTrue(!genomeLocParser.isValidGenomeLoc("chr1",-1,10)); // bad start
+        assertTrue(!genomeLocParser.isValidGenomeLoc("chr1",1,-2)); // bad stop
+        assertTrue( genomeLocParser.isValidGenomeLoc("chr1",-1,2, false)); // bad stop
+        assertTrue(!genomeLocParser.isValidGenomeLoc("chr1",10,11)); // bad start, past end
+        assertTrue( genomeLocParser.isValidGenomeLoc("chr1",10,11, false)); // bad start, past end
+        assertTrue(!genomeLocParser.isValidGenomeLoc("chr1",2,1)); // stop < start
+    }
+
+    @Test(expectedExceptions = ReviewedGATKException.class)
+    public void testValidateGenomeLoc() {
+        // bad contig index
+        genomeLocParser.validateGenomeLoc("chr1", 1, 1, 2, false);
+    }
+
+    private static class FlankingGenomeLocTestData extends TestDataProvider {
+        final GenomeLocParser parser;
+        final int basePairs;
+        final GenomeLoc original, flankStart, flankStop;
+
+        private FlankingGenomeLocTestData(String name, GenomeLocParser parser, int basePairs, String original, String flankStart, String flankStop) {
+            super(FlankingGenomeLocTestData.class, name);
+            this.parser = parser;
+            this.basePairs = basePairs;
+            this.original = parse(parser, original);
+            this.flankStart = flankStart == null ? null : parse(parser, flankStart);
+            this.flankStop = flankStop == null ? null : parse(parser, flankStop);
+        }
+
+        private static GenomeLoc parse(GenomeLocParser parser, String str) {
+            return "unmapped".equals(str) ? GenomeLoc.UNMAPPED : parser.parseGenomeLoc(str);
+        }
+    }
+
+    @DataProvider(name = "flankingGenomeLocs")
+    public Object[][] getFlankingGenomeLocs() {
+        int contigLength = 10000;
+        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, contigLength);
+        GenomeLocParser parser = new GenomeLocParser(header.getSequenceDictionary());
+
+        new FlankingGenomeLocTestData("atStartBase1", parser, 1,
+                "chr1:1", null, "chr1:2");
+
+        new FlankingGenomeLocTestData("atStartBase50", parser, 50,
+                "chr1:1", null, "chr1:2-51");
+
+        new FlankingGenomeLocTestData("atStartRange50", parser, 50,
+                "chr1:1-10", null, "chr1:11-60");
+
+        new FlankingGenomeLocTestData("atEndBase1", parser, 1,
+                "chr1:" + contigLength, "chr1:" + (contigLength - 1), null);
+
+        new FlankingGenomeLocTestData("atEndBase50", parser, 50,
+                "chr1:" + contigLength, String.format("chr1:%d-%d", contigLength - 50, contigLength - 1), null);
+
+        new FlankingGenomeLocTestData("atEndRange50", parser, 50,
+                String.format("chr1:%d-%d", contigLength - 10, contigLength),
+                String.format("chr1:%d-%d", contigLength - 60, contigLength - 11),
+                null);
+
+        new FlankingGenomeLocTestData("nearStartBase1", parser, 1,
+                "chr1:2", "chr1:1", "chr1:3");
+
+        new FlankingGenomeLocTestData("nearStartRange50", parser, 50,
+                "chr1:21-30", "chr1:1-20", "chr1:31-80");
+
+        new FlankingGenomeLocTestData("nearEndBase1", parser, 1,
+                "chr1:" + (contigLength - 1), "chr1:" + (contigLength - 2), "chr1:" + contigLength);
+
+        new FlankingGenomeLocTestData("nearEndRange50", parser, 50,
+                String.format("chr1:%d-%d", contigLength - 30, contigLength - 21),
+                String.format("chr1:%d-%d", contigLength - 80, contigLength - 31),
+                String.format("chr1:%d-%d", contigLength - 20, contigLength));
+
+        new FlankingGenomeLocTestData("beyondStartBase1", parser, 1,
+                "chr1:3", "chr1:2", "chr1:4");
+
+        new FlankingGenomeLocTestData("beyondStartRange50", parser, 50,
+                "chr1:101-200", "chr1:51-100", "chr1:201-250");
+
+        new FlankingGenomeLocTestData("beyondEndBase1", parser, 1,
+                "chr1:" + (contigLength - 3),
+                "chr1:" + (contigLength - 4),
+                "chr1:" + (contigLength - 2));
+
+        new FlankingGenomeLocTestData("beyondEndRange50", parser, 50,
+                String.format("chr1:%d-%d", contigLength - 200, contigLength - 101),
+                String.format("chr1:%d-%d", contigLength - 250, contigLength - 201),
+                String.format("chr1:%d-%d", contigLength - 100, contigLength - 51));
+
+        new FlankingGenomeLocTestData("unmapped", parser, 50,
+                "unmapped", null, null);
+
+        new FlankingGenomeLocTestData("fullContig", parser, 50,
+                "chr1", null, null);
+
+        return FlankingGenomeLocTestData.getTests(FlankingGenomeLocTestData.class);
+    }
+
+    @Test(dataProvider = "flankingGenomeLocs")
+    public void testCreateGenomeLocAtStart(FlankingGenomeLocTestData data) {
+        GenomeLoc actual = data.parser.createGenomeLocAtStart(data.original, data.basePairs);
+        String description = String.format("%n      name: %s%n  original: %s%n    actual: %s%n  expected: %s%n",
+                data.toString(), data.original, actual, data.flankStart);
+        assertEquals(actual, data.flankStart, description);
+    }
+
+    @Test(dataProvider = "flankingGenomeLocs")
+    public void testCreateGenomeLocAtStop(FlankingGenomeLocTestData data) {
+        GenomeLoc actual = data.parser.createGenomeLocAtStop(data.original, data.basePairs);
+        String description = String.format("%n      name: %s%n  original: %s%n    actual: %s%n  expected: %s%n",
+                data.toString(), data.original, actual, data.flankStop);
+        assertEquals(actual, data.flankStop, description);
+    }
+
+    @DataProvider(name = "parseGenomeLoc")
+    public Object[][] makeParsingTest() {
+        final List<Object[]> tests = new LinkedList<Object[]>();
+
+        tests.add(new Object[]{ "chr1:10", "chr1", 10 });
+        tests.add(new Object[]{ "chr1:100", "chr1", 100 });
+        tests.add(new Object[]{ "chr1:1000", "chr1", 1000 });
+        tests.add(new Object[]{ "chr1:1,000", "chr1", 1000 });
+        tests.add(new Object[]{ "chr1:10000", "chr1", 10000 });
+        tests.add(new Object[]{ "chr1:10,000", "chr1", 10000 });
+        tests.add(new Object[]{ "chr1:100000", "chr1", 100000 });
+        tests.add(new Object[]{ "chr1:100,000", "chr1", 100000 });
+        tests.add(new Object[]{ "chr1:1000000", "chr1", 1000000 });
+        tests.add(new Object[]{ "chr1:1,000,000", "chr1", 1000000 });
+        tests.add(new Object[]{ "chr1:1000,000", "chr1", 1000000 });
+        tests.add(new Object[]{ "chr1:1,000000", "chr1", 1000000 });
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test( dataProvider = "parseGenomeLoc")
+    public void testParsingPositions(final String string, final String contig, final int start) {
+        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 10000000);
+        GenomeLocParser genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
+        final GenomeLoc loc = genomeLocParser.parseGenomeLoc(string);
+        Assert.assertEquals(loc.getContig(), contig);
+        Assert.assertEquals(loc.getStart(), start);
+        Assert.assertEquals(loc.getStop(), start);
+    }
+
+    @Test( )
+    public void testCreationFromSAMRecord() {
+        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "foo", 0, 1, 5);
+        final GenomeLoc loc = genomeLocParser.createGenomeLoc(read);
+        Assert.assertEquals(loc.getContig(), read.getReferenceName());
+        Assert.assertEquals(loc.getContigIndex(), (int)read.getReferenceIndex());
+        Assert.assertEquals(loc.getStart(), read.getAlignmentStart());
+        Assert.assertEquals(loc.getStop(), read.getAlignmentEnd());
+    }
+
+    @Test( )
+    public void testCreationFromSAMRecordUnmapped() {
+        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "foo", 0, 1, 5);
+        read.setReadUnmappedFlag(true);
+        read.setReferenceIndex(-1);
+        final GenomeLoc loc = genomeLocParser.createGenomeLoc(read);
+        Assert.assertTrue(loc.isUnmapped());
+    }
+
+    @Test( )
+    public void testCreationFromSAMRecordUnmappedButOnGenome() {
+        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "foo", 0, 1, 5);
+        read.setReadUnmappedFlag(true);
+        read.setCigarString("*");
+        final GenomeLoc loc = genomeLocParser.createGenomeLoc(read);
+        Assert.assertEquals(loc.getContig(), read.getReferenceName());
+        Assert.assertEquals(loc.getContigIndex(), (int)read.getReferenceIndex());
+        Assert.assertEquals(loc.getStart(), read.getAlignmentStart());
+        Assert.assertEquals(loc.getStop(), read.getAlignmentStart());
+    }
+
+    @Test
+    public void testCreationFromFeature() {
+        final Feature feature = new SimpleFeature("chr1", 1, 5);
+        final GenomeLoc loc = genomeLocParser.createGenomeLoc(feature);
+        Assert.assertEquals(loc.getContig(), feature.getChr());
+        Assert.assertEquals(loc.getStart(), feature.getStart());
+        Assert.assertEquals(loc.getStop(), feature.getEnd());
+    }
+
+    @Test
+    public void testCreationFromVariantContext() {
+        final VariantContext feature = new VariantContextBuilder("x", "chr1", 1, 5, Arrays.asList(Allele.create("AAAAA", true))).make();
+        final GenomeLoc loc = genomeLocParser.createGenomeLoc(feature);
+        Assert.assertEquals(loc.getContig(), feature.getChr());
+        Assert.assertEquals(loc.getStart(), feature.getStart());
+        Assert.assertEquals(loc.getStop(), feature.getEnd());
+    }
+
+    @Test
+    public void testcreateGenomeLocOnContig() throws FileNotFoundException {
+        final CachingIndexedFastaSequenceFile seq = new CachingIndexedFastaSequenceFile(new File(b37KGReference));
+        final SAMSequenceDictionary dict = seq.getSequenceDictionary();
+        final GenomeLocParser genomeLocParser = new GenomeLocParser(dict);
+
+        for ( final SAMSequenceRecord rec : dict.getSequences() ) {
+            final GenomeLoc loc = genomeLocParser.createOverEntireContig(rec.getSequenceName());
+            Assert.assertEquals(loc.getContig(), rec.getSequenceName());
+            Assert.assertEquals(loc.getStart(), 1);
+            Assert.assertEquals(loc.getStop(), rec.getSequenceLength());
+        }
+    }
+
+    @DataProvider(name = "GenomeLocOnContig")
+    public Object[][] makeGenomeLocOnContig() {
+        final List<Object[]> tests = new LinkedList<Object[]>();
+
+        final int contigLength = header.getSequence(0).getSequenceLength();
+        for ( int start = -10; start < contigLength + 10; start++ ) {
+            for ( final int len : Arrays.asList(1, 10, 20) ) {
+                tests.add(new Object[]{ "chr1", start, start + len });
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test( dataProvider = "GenomeLocOnContig")
+    public void testGenomeLocOnContig(final String contig, final int start, final int stop) {
+        final int contigLength = header.getSequence(0).getSequenceLength();
+        final GenomeLoc loc = genomeLocParser.createGenomeLocOnContig(contig, start, stop);
+
+        if ( stop < 1 || start > contigLength )
+            Assert.assertNull(loc, "GenomeLoc should be null if the start/stops are not meaningful");
+        else {
+            Assert.assertNotNull(loc);
+            Assert.assertEquals(loc.getContig(), contig);
+            Assert.assertEquals(loc.getStart(), Math.max(start, 1));
+            Assert.assertEquals(loc.getStop(), Math.min(stop, contigLength));
+        }
+    }
+
+    @DataProvider(name = "GenomeLocPadding")
+    public Object[][] makeGenomeLocPadding() {
+        final List<Object[]> tests = new LinkedList<Object[]>();
+
+        final int contigLength = header.getSequence(0).getSequenceLength();
+        for ( int pad = 0; pad < contigLength + 1; pad++) {
+            for ( int start = 1; start < contigLength; start++ ) {
+                for ( int stop = start; stop < contigLength; stop++ ) {
+                    tests.add(new Object[]{ genomeLocParser.createGenomeLoc("chr1", start, stop), pad});
+                }
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test( dataProvider = "GenomeLocPadding")
+    public void testGenomeLocPadding(final GenomeLoc input, final int pad) {
+        final int contigLength = header.getSequence(0).getSequenceLength();
+        final GenomeLoc padded = genomeLocParser.createPaddedGenomeLoc(input, pad);
+
+        Assert.assertNotNull(padded);
+        Assert.assertEquals(padded.getContig(), input.getContig());
+        Assert.assertEquals(padded.getStart(), Math.max(input.getStart() - pad, 1));
+        Assert.assertEquals(padded.getStop(), Math.min(input.getStop() + pad, contigLength));
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/GenomeLocSortedSetUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/GenomeLocSortedSetUnitTest.java
new file mode 100644
index 0000000..cc6315d
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/GenomeLocSortedSetUnitTest.java
@@ -0,0 +1,405 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import htsjdk.samtools.SAMFileHeader;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertFalse;
+import static org.testng.Assert.assertTrue;
+
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.util.*;
+
+/**
+ *
+ * User: aaron
+ * Date: May 22, 2009
+ * Time: 2:14:07 PM
+ *
+ * The Broad Institute
+ * SOFTWARE COPYRIGHT NOTICE AGREEMENT 
+ * This software and its documentation are copyright 2009 by the
+ * Broad Institute/Massachusetts Institute of Technology. All rights are reserved.
+ *
+ * This software is supplied without any warranty or guaranteed support whatsoever. Neither
+ * the Broad Institute nor MIT can be responsible for its use, misuse, or functionality.
+ *
+ */
+
+
+/**
+ * @author aaron
+ * @version 1.0
+ * <p/>
+ * Class GenomeLocSetTest
+ * <p/>
+ * This tests the functions of the GenomeLocSet
+ */
+public class GenomeLocSortedSetUnitTest extends BaseTest {
+
+    private GenomeLocSortedSet mSortedSet = null;
+    private SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(NUMBER_OF_CHROMOSOMES, STARTING_CHROMOSOME, CHROMOSOME_SIZE);
+    private static final int NUMBER_OF_CHROMOSOMES = 5;
+    private static final int STARTING_CHROMOSOME = 1;
+    private static final int CHROMOSOME_SIZE = 1000;
+
+    private GenomeLocParser genomeLocParser;
+    private String contigOneName;
+
+    @BeforeClass
+    public void setup() {
+        genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
+        contigOneName = header.getSequenceDictionary().getSequence(1).getSequenceName();
+    }
+
+    @BeforeMethod
+    public void initializeSortedSet() {
+        mSortedSet = new GenomeLocSortedSet(genomeLocParser);        
+    }
+
+    @Test
+    public void testAdd() {
+        GenomeLoc g = genomeLocParser.createGenomeLoc(contigOneName, 0, 0);
+        assertTrue(mSortedSet.size() == 0);
+        mSortedSet.add(g);
+        assertTrue(mSortedSet.size() == 1);
+    }
+
+    @Test
+    public void testRemove() {
+        assertTrue(mSortedSet.size() == 0);
+        GenomeLoc g = genomeLocParser.createGenomeLoc(contigOneName, 0, 0);
+        mSortedSet.add(g);
+        assertTrue(mSortedSet.size() == 1);
+        mSortedSet.remove(g);
+        assertTrue(mSortedSet.size() == 0);
+    }
+
+    @Test
+    public void addRegion() {
+        assertTrue(mSortedSet.size() == 0);
+        GenomeLoc g = genomeLocParser.createGenomeLoc(contigOneName, 1, 50);
+        mSortedSet.add(g);
+        GenomeLoc f = genomeLocParser.createGenomeLoc(contigOneName, 30, 80);
+        mSortedSet.addRegion(f);
+        assertTrue(mSortedSet.size() == 1);
+    }
+
+    @Test
+    public void addRegionsOutOfOrder() {
+        final String contigTwoName = header.getSequenceDictionary().getSequence(2).getSequenceName();
+        assertTrue(mSortedSet.size() == 0);
+        GenomeLoc g = genomeLocParser.createGenomeLoc(contigTwoName, 1, 50);
+        mSortedSet.add(g);
+        GenomeLoc f = genomeLocParser.createGenomeLoc(contigOneName, 30, 80);
+        mSortedSet.addRegion(f);
+        assertTrue(mSortedSet.size() == 2);
+        assertTrue(mSortedSet.toList().get(0).getContig().equals(contigOneName));
+        assertTrue(mSortedSet.toList().get(1).getContig().equals(contigTwoName));
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void addThrowsException() {
+        assertTrue(mSortedSet.size() == 0);
+        GenomeLoc g = genomeLocParser.createGenomeLoc(contigOneName, 1, 50);
+        mSortedSet.add(g);
+        GenomeLoc f = genomeLocParser.createGenomeLoc(contigOneName, 30, 80);
+        mSortedSet.add(f);
+    }
+
+    @Test(expectedExceptions=IllegalArgumentException.class)
+    public void testAddDuplicate() {
+        assertTrue(mSortedSet.size() == 0);
+        GenomeLoc g = genomeLocParser.createGenomeLoc(contigOneName, 0, 0);
+        mSortedSet.add(g);
+        assertTrue(mSortedSet.size() == 1);
+        mSortedSet.add(g);
+    }
+
+    @Test
+    public void mergingOverlappingBelow() {
+        GenomeLoc g = genomeLocParser.createGenomeLoc(contigOneName, 0, 50);
+        GenomeLoc e = genomeLocParser.createGenomeLoc(contigOneName, 49, 100);
+        assertTrue(mSortedSet.size() == 0);
+        mSortedSet.add(g);
+        assertTrue(mSortedSet.size() == 1);
+        mSortedSet.addRegion(e);
+        assertTrue(mSortedSet.size() == 1);
+        Iterator<GenomeLoc> iter = mSortedSet.iterator();
+        GenomeLoc loc = iter.next();
+        assertEquals(loc.getStart(), 0);
+        assertEquals(loc.getStop(), 100);
+        assertEquals(loc.getContigIndex(), 1);
+    }
+
+    @Test
+    public void overlap() {
+        for ( int i = 1; i < 6; i++ ) {
+            final int start = i * 10;
+            mSortedSet.add(genomeLocParser.createGenomeLoc(contigOneName, start, start + 1));
+        }
+
+        // test matches in and around interval
+        assertFalse(mSortedSet.overlaps(genomeLocParser.createGenomeLoc(contigOneName, 9, 9)));
+        assertTrue(mSortedSet.overlaps(genomeLocParser.createGenomeLoc(contigOneName, 10, 10)));
+        assertTrue(mSortedSet.overlaps(genomeLocParser.createGenomeLoc(contigOneName, 11, 11)));
+        assertFalse(mSortedSet.overlaps(genomeLocParser.createGenomeLoc(contigOneName, 12, 12)));
+
+        // test matches spanning intervals
+        assertTrue(mSortedSet.overlaps(genomeLocParser.createGenomeLoc(contigOneName, 14, 20)));
+        assertTrue(mSortedSet.overlaps(genomeLocParser.createGenomeLoc(contigOneName, 11, 15)));
+        assertTrue(mSortedSet.overlaps(genomeLocParser.createGenomeLoc(contigOneName, 30, 40)));
+        assertTrue(mSortedSet.overlaps(genomeLocParser.createGenomeLoc(contigOneName, 51, 53)));
+
+        // test miss
+        assertFalse(mSortedSet.overlaps(genomeLocParser.createGenomeLoc(contigOneName, 12, 19)));
+
+        // test exact match after miss
+        assertTrue(mSortedSet.overlaps(genomeLocParser.createGenomeLoc(contigOneName, 40, 41)));
+
+        // test matches at beginning of intervals
+        assertFalse(mSortedSet.overlaps(genomeLocParser.createGenomeLoc(contigOneName, 5, 6)));
+        assertTrue(mSortedSet.overlaps(genomeLocParser.createGenomeLoc(contigOneName, 0, 10)));
+
+        // test matches at end of intervals
+        assertFalse(mSortedSet.overlaps(genomeLocParser.createGenomeLoc(contigOneName, 52, 53)));
+        assertTrue(mSortedSet.overlaps(genomeLocParser.createGenomeLoc(contigOneName, 51, 53)));
+        assertFalse(mSortedSet.overlaps(genomeLocParser.createGenomeLoc(contigOneName, 52, 53)));
+    }
+
+    @Test
+    public void mergingOverlappingAbove() {
+        GenomeLoc e = genomeLocParser.createGenomeLoc(contigOneName, 0, 50);
+        GenomeLoc g = genomeLocParser.createGenomeLoc(contigOneName, 49, 100);
+        assertTrue(mSortedSet.size() == 0);
+        mSortedSet.add(g);
+        assertTrue(mSortedSet.size() == 1);
+        mSortedSet.addRegion(e);
+        assertTrue(mSortedSet.size() == 1);
+        Iterator<GenomeLoc> iter = mSortedSet.iterator();
+        GenomeLoc loc = iter.next();
+        assertEquals(loc.getStart(), 0);
+        assertEquals(loc.getStop(), 100);
+        assertEquals(loc.getContigIndex(), 1);
+    }
+
+    @Test
+    public void deleteAllByRegion() {
+        GenomeLoc e = genomeLocParser.createGenomeLoc(contigOneName, 1, 100);
+        mSortedSet.add(e);
+        for (int x = 1; x < 101; x++) {
+            GenomeLoc del = genomeLocParser.createGenomeLoc(contigOneName,x,x);
+            mSortedSet = mSortedSet.subtractRegions(new GenomeLocSortedSet(genomeLocParser,del));
+        }
+        assertTrue(mSortedSet.isEmpty());
+    }
+
+    @Test
+    public void deleteSomeByRegion() {
+        GenomeLoc e = genomeLocParser.createGenomeLoc(contigOneName, 1, 100);
+        mSortedSet.add(e);
+        for (int x = 1; x < 50; x++) {
+            GenomeLoc del = genomeLocParser.createGenomeLoc(contigOneName,x,x);
+            mSortedSet = mSortedSet.subtractRegions(new GenomeLocSortedSet(genomeLocParser,del));
+        }
+        assertTrue(!mSortedSet.isEmpty());
+        assertTrue(mSortedSet.size() == 1);
+        GenomeLoc loc = mSortedSet.iterator().next();        
+        assertTrue(loc.getStop() == 100);
+        assertTrue(loc.getStart() == 50);
+
+    }
+
+    @Test
+    public void deleteSuperRegion() {
+        GenomeLoc e = genomeLocParser.createGenomeLoc(contigOneName, 10, 20);
+        GenomeLoc g = genomeLocParser.createGenomeLoc(contigOneName, 70, 100);
+        mSortedSet.add(g);
+        mSortedSet.addRegion(e);
+        assertTrue(mSortedSet.size() == 2);
+        // now delete a region
+        GenomeLoc d = genomeLocParser.createGenomeLoc(contigOneName, 15, 75);
+        mSortedSet = mSortedSet.subtractRegions(new GenomeLocSortedSet(genomeLocParser,d));
+        Iterator<GenomeLoc> iter = mSortedSet.iterator();
+        GenomeLoc loc = iter.next();
+        assertTrue(loc.getStart() == 10);
+        assertTrue(loc.getStop() == 14);
+        assertTrue(loc.getContigIndex() == 1);
+
+        loc = iter.next();
+        assertTrue(loc.getStart() == 76);
+        assertTrue(loc.getStop() == 100);
+        assertTrue(loc.getContigIndex() == 1);
+    }
+
+    @Test
+    public void substractComplexExample() {
+        GenomeLoc e = genomeLocParser.createGenomeLoc(contigOneName, 1, 20);
+        mSortedSet.add(e);
+
+        GenomeLoc r1 = genomeLocParser.createGenomeLoc(contigOneName, 3, 5);
+        GenomeLoc r2 = genomeLocParser.createGenomeLoc(contigOneName, 10, 12);
+        GenomeLoc r3 = genomeLocParser.createGenomeLoc(contigOneName, 16, 18);
+        GenomeLocSortedSet toExclude = new GenomeLocSortedSet(genomeLocParser,Arrays.asList(r1, r2, r3));
+
+        GenomeLocSortedSet remaining = mSortedSet.subtractRegions(toExclude);
+//        logger.debug("Initial   " + mSortedSet);
+//        logger.debug("Exclude   " + toExclude);
+//        logger.debug("Remaining " + remaining);
+
+        assertEquals(mSortedSet.coveredSize(), 20);
+        assertEquals(toExclude.coveredSize(), 9);
+        assertEquals(remaining.coveredSize(), 11);
+
+        Iterator<GenomeLoc> it = remaining.iterator();
+        GenomeLoc p1 = it.next();
+        GenomeLoc p2 = it.next();
+        GenomeLoc p3 = it.next();
+        GenomeLoc p4 = it.next();
+
+        assertEquals(genomeLocParser.createGenomeLoc(contigOneName, 1, 2), p1);
+        assertEquals(genomeLocParser.createGenomeLoc(contigOneName, 6, 9), p2);
+        assertEquals(genomeLocParser.createGenomeLoc(contigOneName, 13, 15), p3);
+        assertEquals(genomeLocParser.createGenomeLoc(contigOneName, 19, 20), p4);
+    }
+
+    private void testSizeBeforeLocX(int pos, int size) {
+        GenomeLoc test = genomeLocParser.createGenomeLoc(contigOneName, pos, pos);
+        assertEquals(mSortedSet.sizeBeforeLoc(test), size, String.format("X pos=%d size=%d", pos, size));
+    }
+
+    @Test
+    public void testSizeBeforeLoc() {
+        GenomeLoc r1 = genomeLocParser.createGenomeLoc(contigOneName, 3, 5);
+        GenomeLoc r2 = genomeLocParser.createGenomeLoc(contigOneName, 10, 12);
+        GenomeLoc r3 = genomeLocParser.createGenomeLoc(contigOneName, 16, 18);
+        mSortedSet.addAll(Arrays.asList(r1,r2,r3));
+
+        testSizeBeforeLocX(2, 0);
+        testSizeBeforeLocX(3, 0);
+        testSizeBeforeLocX(4, 1);
+        testSizeBeforeLocX(5, 2);
+        testSizeBeforeLocX(6, 3);
+
+        testSizeBeforeLocX(10, 3);
+        testSizeBeforeLocX(11, 4);
+        testSizeBeforeLocX(12, 5);
+        testSizeBeforeLocX(13, 6);
+        testSizeBeforeLocX(15, 6);
+
+        testSizeBeforeLocX(16, 6);
+        testSizeBeforeLocX(17, 7);
+        testSizeBeforeLocX(18, 8);
+        testSizeBeforeLocX(19, 9);
+        testSizeBeforeLocX(50, 9);
+        testSizeBeforeLocX(50, (int)mSortedSet.coveredSize());
+    }
+
+
+    @Test
+    public void fromSequenceDictionary() {
+        mSortedSet = GenomeLocSortedSet.createSetFromSequenceDictionary(this.header.getSequenceDictionary());
+        // we should have sequence
+        assertTrue(mSortedSet.size() == GenomeLocSortedSetUnitTest.NUMBER_OF_CHROMOSOMES);
+        int seqNumber = 0;
+        for (GenomeLoc loc : mSortedSet) {
+            assertTrue(loc.getStart() == 1);
+            assertTrue(loc.getStop() == GenomeLocSortedSetUnitTest.CHROMOSOME_SIZE);
+            assertTrue(loc.getContigIndex() == seqNumber);
+            ++seqNumber;
+        }
+        assertTrue(seqNumber == GenomeLocSortedSetUnitTest.NUMBER_OF_CHROMOSOMES);
+    }
+
+    // -----------------------------------------------------------------------------------------------
+    //
+    // Test getOverlapping
+    //
+    // -----------------------------------------------------------------------------------------------
+
+    @DataProvider(name = "GetOverlapping")
+    public Object[][] makeGetOverlappingTest() throws Exception {
+        final GenomeLocParser genomeLocParser = new GenomeLocParser(new CachingIndexedFastaSequenceFile(new File(b37KGReference)));
+
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        final GenomeLoc prev1 = genomeLocParser.createGenomeLoc("19", 1, 10);
+        final GenomeLoc prev2 = genomeLocParser.createGenomeLoc("19", 20, 50);
+        final GenomeLoc post1 = genomeLocParser.createGenomeLoc("21", 1, 10);
+        final GenomeLoc post2 = genomeLocParser.createGenomeLoc("21", 20, 50);
+
+        final int chr20Length = genomeLocParser.getContigs().getSequence("20").getSequenceLength();
+        for ( final int regionStart : Arrays.asList(1, 10, chr20Length - 10, chr20Length) ) {
+            for ( final int regionSize : Arrays.asList(1, 10, 100) ) {
+                final GenomeLoc region = genomeLocParser.createGenomeLocOnContig("20", regionStart, regionStart + regionSize);
+                final GenomeLoc spanning = genomeLocParser.createGenomeLocOnContig("20", regionStart - 10, region.getStop() + 10);
+                final GenomeLoc before_into = genomeLocParser.createGenomeLocOnContig("20", regionStart - 10, regionStart + 1);
+                final GenomeLoc middle = genomeLocParser.createGenomeLocOnContig("20", regionStart + 1, regionStart + 2);
+                final GenomeLoc middle_past = genomeLocParser.createGenomeLocOnContig("20", region.getStop()-1, region.getStop()+10);
+
+                final List<GenomeLoc> potentials = new LinkedList<GenomeLoc>();
+                potentials.add(region);
+                if ( spanning != null ) potentials.add(spanning);
+                if ( before_into != null ) potentials.add(before_into);
+                if ( middle != null ) potentials.add(middle);
+                if ( middle_past != null ) potentials.add(middle_past);
+
+                for ( final int n : Arrays.asList(1, 2, 3) ) {
+                    for ( final List<GenomeLoc> regions : Utils.makePermutations(potentials, n, false) ) {
+                        tests.add(new Object[]{new GenomeLocSortedSet(genomeLocParser, regions), region});
+                        tests.add(new Object[]{new GenomeLocSortedSet(genomeLocParser, Utils.append(regions, prev1)), region});
+                        tests.add(new Object[]{new GenomeLocSortedSet(genomeLocParser, Utils.append(regions, prev1, prev2)), region});
+                        tests.add(new Object[]{new GenomeLocSortedSet(genomeLocParser, Utils.append(regions, post1)), region});
+                        tests.add(new Object[]{new GenomeLocSortedSet(genomeLocParser, Utils.append(regions, post1, post2)), region});
+                        tests.add(new Object[]{new GenomeLocSortedSet(genomeLocParser, Utils.append(regions, prev1, post1)), region});
+                        tests.add(new Object[]{new GenomeLocSortedSet(genomeLocParser, Utils.append(regions, prev1, prev2, post1, post2)), region});
+                    }
+                }
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "GetOverlapping")
+    public void testGetOverlapping(final GenomeLocSortedSet intervals, final GenomeLoc region) {
+        final List<GenomeLoc> expectedOverlapping = intervals.getOverlappingFullSearch(region);
+        final List<GenomeLoc> actualOverlapping = intervals.getOverlapping(region);
+        Assert.assertEquals(actualOverlapping, expectedOverlapping);
+        Assert.assertEquals(intervals.overlaps(region), ! expectedOverlapping.isEmpty(), "GenomeLocSortedSet.overlaps didn't return expected result");
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/GenomeLocUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/GenomeLocUnitTest.java
new file mode 100644
index 0000000..c603569
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/GenomeLocUnitTest.java
@@ -0,0 +1,386 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+
+// the imports for unit testing.
+
+
+import htsjdk.samtools.reference.ReferenceSequenceFile;
+import htsjdk.samtools.SAMFileHeader;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+import org.broadinstitute.gatk.utils.interval.IntervalMergingRule;
+import org.broadinstitute.gatk.utils.interval.IntervalUtils;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.util.*;
+
+/**
+ * Basic unit test for GenomeLoc
+ */
+public class GenomeLocUnitTest extends BaseTest {
+    private static ReferenceSequenceFile seq;
+    private GenomeLocParser genomeLocParser;
+
+    @BeforeClass
+    public void init() throws FileNotFoundException {
+        // sequence
+        seq = new CachingIndexedFastaSequenceFile(new File(hg18Reference));
+        genomeLocParser = new GenomeLocParser(seq);
+    }
+
+    /**
+     * Tests that we got a string parameter in correctly
+     */
+    @Test
+    public void testIsBetween() {
+        logger.warn("Executing testIsBetween");
+
+        GenomeLoc locMiddle = genomeLocParser.createGenomeLoc("chr1", 3, 3);
+
+        GenomeLoc locLeft = genomeLocParser.createGenomeLoc("chr1", 1, 1);
+        GenomeLoc locRight = genomeLocParser.createGenomeLoc("chr1", 5, 5);
+
+        Assert.assertTrue(locMiddle.isBetween(locLeft, locRight));
+        Assert.assertFalse(locLeft.isBetween(locMiddle, locRight));
+        Assert.assertFalse(locRight.isBetween(locLeft, locMiddle));
+
+    }
+    @Test
+    public void testContigIndex() {
+        logger.warn("Executing testContigIndex");
+        GenomeLoc locOne = genomeLocParser.createGenomeLoc("chr1",1,1);
+        Assert.assertEquals(1, locOne.getContigIndex());
+        Assert.assertEquals("chr1", locOne.getContig());
+
+        GenomeLoc locX = genomeLocParser.createGenomeLoc("chrX",1,1);
+        Assert.assertEquals(23, locX.getContigIndex());
+        Assert.assertEquals("chrX", locX.getContig());
+
+        GenomeLoc locNumber = genomeLocParser.createGenomeLoc(seq.getSequenceDictionary().getSequence(1).getSequenceName(),1,1);
+        Assert.assertEquals(1, locNumber.getContigIndex());
+        Assert.assertEquals("chr1", locNumber.getContig());
+        Assert.assertEquals(0, locOne.compareTo(locNumber));
+
+    }
+
+    @Test
+    public void testCompareTo() {
+        logger.warn("Executing testCompareTo");
+        GenomeLoc twoOne = genomeLocParser.createGenomeLoc("chr2", 1);
+        GenomeLoc twoFive = genomeLocParser.createGenomeLoc("chr2", 5);
+        GenomeLoc twoOtherFive = genomeLocParser.createGenomeLoc("chr2", 5);
+        Assert.assertEquals(twoFive.compareTo(twoOtherFive), 0);
+
+        Assert.assertEquals(twoOne.compareTo(twoFive), -1);
+        Assert.assertEquals(twoFive.compareTo(twoOne), 1);
+
+        GenomeLoc oneOne = genomeLocParser.createGenomeLoc("chr1", 5);
+        Assert.assertEquals(oneOne.compareTo(twoOne), -1);
+        Assert.assertEquals(twoOne.compareTo(oneOne), 1);
+    }
+
+
+    @Test
+    public void testUnmappedSort() {
+        GenomeLoc chr1 = genomeLocParser.createGenomeLoc("chr1",1,10000000);
+        GenomeLoc chr2 = genomeLocParser.createGenomeLoc("chr2",1,10000000);
+        GenomeLoc unmapped = GenomeLoc.UNMAPPED;
+
+        List<GenomeLoc> unmappedOnly = Arrays.asList(unmapped);
+        Collections.sort(unmappedOnly);
+        Assert.assertEquals(unmappedOnly.size(),1,"Wrong number of elements in unmapped-only list.");
+        Assert.assertEquals(unmappedOnly.get(0),unmapped,"List sorted in wrong order");
+
+        List<GenomeLoc> chr1Presorted = Arrays.asList(chr1,unmapped);
+        Collections.sort(chr1Presorted);
+        Assert.assertEquals(chr1Presorted.size(),2,"Wrong number of elements in chr1,unmapped list.");
+        Assert.assertEquals(chr1Presorted,Arrays.asList(chr1,unmapped),"List sorted in wrong order");
+
+        List<GenomeLoc> chr1Inverted = Arrays.asList(unmapped,chr1);
+        Collections.sort(chr1Inverted);
+        Assert.assertEquals(chr1Inverted.size(),2,"Wrong number of elements in chr1,unmapped list.");
+        Assert.assertEquals(chr1Inverted,Arrays.asList(chr1,unmapped),"List sorted in wrong order");
+
+        List<GenomeLoc> chr1and2Presorted = Arrays.asList(chr1,chr2,unmapped);
+        Collections.sort(chr1and2Presorted);
+        Assert.assertEquals(chr1and2Presorted.size(),3,"Wrong number of elements in chr1,chr2,unmapped list.");
+        Assert.assertEquals(chr1and2Presorted,Arrays.asList(chr1,chr2,unmapped),"List sorted in wrong order");
+
+        List<GenomeLoc> chr1and2UnmappedInFront = Arrays.asList(unmapped,chr1,chr2);
+        Collections.sort(chr1and2UnmappedInFront);
+        Assert.assertEquals(chr1and2UnmappedInFront.size(),3,"Wrong number of elements in unmapped,chr1,chr2 list.");
+        Assert.assertEquals(chr1and2UnmappedInFront,Arrays.asList(chr1,chr2,unmapped),"List sorted in wrong order");
+
+        List<GenomeLoc> chr1and2UnmappedSandwiched = Arrays.asList(chr1,unmapped,chr2);
+        Collections.sort(chr1and2UnmappedSandwiched);
+        Assert.assertEquals(chr1and2UnmappedSandwiched.size(),3,"Wrong number of elements in chr1,unmapped,chr2 list.");
+        Assert.assertEquals(chr1and2UnmappedSandwiched,Arrays.asList(chr1,chr2,unmapped),"List sorted in wrong order");
+    }
+
+    @Test
+    public void testUnmappedMerge() {
+        GenomeLoc chr1 = genomeLocParser.createGenomeLoc("chr1",1,10000000);
+        GenomeLoc unmapped = GenomeLoc.UNMAPPED;
+
+        List<GenomeLoc> oneUnmappedOnly = Arrays.asList(unmapped);
+        oneUnmappedOnly = IntervalUtils.sortAndMergeIntervals(genomeLocParser,oneUnmappedOnly, IntervalMergingRule.OVERLAPPING_ONLY).toList();
+        Assert.assertEquals(oneUnmappedOnly.size(),1,"Wrong number of elements in list.");
+        Assert.assertEquals(oneUnmappedOnly.get(0),unmapped,"List sorted in wrong order");
+
+        List<GenomeLoc> twoUnmapped = Arrays.asList(unmapped,unmapped);
+        twoUnmapped = IntervalUtils.sortAndMergeIntervals(genomeLocParser,twoUnmapped,IntervalMergingRule.OVERLAPPING_ONLY).toList();
+        Assert.assertEquals(twoUnmapped.size(),1,"Wrong number of elements in list.");
+        Assert.assertEquals(twoUnmapped.get(0),unmapped,"List sorted in wrong order");
+
+        List<GenomeLoc> twoUnmappedAtEnd = Arrays.asList(chr1,unmapped,unmapped);
+        twoUnmappedAtEnd = IntervalUtils.sortAndMergeIntervals(genomeLocParser,twoUnmappedAtEnd,IntervalMergingRule.OVERLAPPING_ONLY).toList();
+        Assert.assertEquals(twoUnmappedAtEnd.size(),2,"Wrong number of elements in list.");
+        Assert.assertEquals(twoUnmappedAtEnd,Arrays.asList(chr1,unmapped),"List sorted in wrong order");
+
+        List<GenomeLoc> twoUnmappedMixed = Arrays.asList(unmapped,chr1,unmapped);
+        twoUnmappedMixed = IntervalUtils.sortAndMergeIntervals(genomeLocParser,twoUnmappedMixed,IntervalMergingRule.OVERLAPPING_ONLY).toList();
+        Assert.assertEquals(twoUnmappedMixed.size(),2,"Wrong number of elements in list.");
+        Assert.assertEquals(twoUnmappedMixed,Arrays.asList(chr1,unmapped),"List sorted in wrong order");
+    }
+
+    // -------------------------------------------------------------------------------------
+    //
+    // testing overlap detection
+    //
+    // -------------------------------------------------------------------------------------
+
+    private class ReciprocalOverlapProvider extends TestDataProvider {
+        GenomeLoc gl1, gl2;
+        int overlapSize;
+        double overlapFraction;
+
+        private ReciprocalOverlapProvider(int start1, int stop1, int start2, int stop2) {
+            super(ReciprocalOverlapProvider.class);
+            gl1 = genomeLocParser.createGenomeLoc("chr1", start1, stop1);
+            gl2 = genomeLocParser.createGenomeLoc("chr1", start2, stop2);
+
+            int shared = 0;
+            for ( int i = start1; i <= stop1; i++ ) {
+                if ( i >= start2 && i <= stop2 )
+                    shared++;
+            }
+
+            this.overlapSize = shared;
+            this.overlapFraction = Math.min((1.0*shared)/gl1.size(), (1.0*shared)/gl2.size());
+            super.setName(String.format("%d-%d / %d-%d overlap=%d / %.2f", start1, stop1, start2, stop2, overlapSize, overlapFraction));
+        }
+    }
+
+    @DataProvider(name = "ReciprocalOverlapProvider")
+    public Object[][] makeReciprocalOverlapProvider() {
+        for ( int start1 = 1; start1 <= 10; start1++ ) {
+            for ( int stop1 = start1; stop1 <= 10; stop1++ ) {
+                new ReciprocalOverlapProvider(start1, stop1, 1, 10);
+                new ReciprocalOverlapProvider(start1, stop1, 5, 10);
+                new ReciprocalOverlapProvider(start1, stop1, 5, 7);
+                new ReciprocalOverlapProvider(start1, stop1, 5, 15);
+                new ReciprocalOverlapProvider(start1, stop1, 11, 20);
+
+                new ReciprocalOverlapProvider(1, 10, start1, stop1);
+                new ReciprocalOverlapProvider(5, 10, start1, stop1);
+                new ReciprocalOverlapProvider(5, 7, start1, stop1);
+                new ReciprocalOverlapProvider(5, 15, start1, stop1);
+                new ReciprocalOverlapProvider(11, 20, start1, stop1);
+            }
+        }
+
+        return ReciprocalOverlapProvider.getTests(ReciprocalOverlapProvider.class);
+    }
+
+    @Test(dataProvider = "ReciprocalOverlapProvider")
+    public void testReciprocalOverlapProvider(ReciprocalOverlapProvider cfg) {
+        if ( cfg.overlapSize == 0 ) {
+            Assert.assertFalse(cfg.gl1.overlapsP(cfg.gl2));
+        } else {
+            Assert.assertTrue(cfg.gl1.overlapsP(cfg.gl2));
+            Assert.assertEquals(cfg.gl1.intersect(cfg.gl2).size(), cfg.overlapSize);
+            Assert.assertEquals(cfg.gl1.reciprocialOverlapFraction(cfg.gl2), cfg.overlapFraction);
+        }
+    }
+
+    // -------------------------------------------------------------------------------------
+    //
+    // testing comparison, hashcode, and equals
+    //
+    // -------------------------------------------------------------------------------------
+
+    @DataProvider(name = "GenomeLocComparisons")
+    public Object[][] createGenomeLocComparisons() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        final int start = 10;
+        for ( int stop = start; stop < start + 3; stop++ ) {
+            final GenomeLoc g1 = genomeLocParser.createGenomeLoc("chr2", start, stop);
+            for ( final String contig : Arrays.asList("chr1", "chr2", "chr3")) {
+                for ( int start2 = start - 1; start2 <= stop + 1; start2++ ) {
+                    for ( int stop2 = start2; stop2 < stop + 2; stop2++ ) {
+                        final GenomeLoc g2 = genomeLocParser.createGenomeLoc(contig, start2, stop2);
+
+                        ComparisonResult cmp = ComparisonResult.EQUALS;
+                        if ( contig.equals("chr3") ) cmp = ComparisonResult.LESS_THAN;
+                        else if ( contig.equals("chr1") ) cmp = ComparisonResult.GREATER_THAN;
+                        else if ( start < start2 ) cmp = ComparisonResult.LESS_THAN;
+                        else if ( start > start2 ) cmp = ComparisonResult.GREATER_THAN;
+                        else if ( stop < stop2 ) cmp = ComparisonResult.LESS_THAN;
+                        else if ( stop > stop2 ) cmp = ComparisonResult.GREATER_THAN;
+
+                        tests.add(new Object[]{g1, g2, cmp});
+                    }
+                }
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    private enum ComparisonResult {
+        LESS_THAN(-1),
+        EQUALS(0),
+        GREATER_THAN(1);
+
+        final int cmp;
+
+        private ComparisonResult(int cmp) {
+            this.cmp = cmp;
+        }
+    }
+
+    @Test(dataProvider = "GenomeLocComparisons")
+    public void testGenomeLocComparisons(GenomeLoc g1, GenomeLoc g2, ComparisonResult expected) {
+        Assert.assertEquals(g1.compareTo(g2), expected.cmp, "Comparing genome locs failed");
+        Assert.assertEquals(g1.equals(g2), expected == ComparisonResult.EQUALS);
+        if ( expected == ComparisonResult.EQUALS )
+            Assert.assertEquals(g1.hashCode(), g2.hashCode(), "Equal genome locs don't have the same hash code");
+    }
+
+    // -------------------------------------------------------------------------------------
+    //
+    // testing merging functionality
+    //
+    // -------------------------------------------------------------------------------------
+
+    private static final GenomeLoc loc1 = new GenomeLoc("1", 0, 10, 20);
+    private static final GenomeLoc loc2 = new GenomeLoc("1", 0, 21, 30);
+    private static final GenomeLoc loc3 = new GenomeLoc("1", 0, 31, 40);
+
+    private class MergeTest {
+        public List<GenomeLoc> locs;
+
+        private MergeTest(final List<GenomeLoc> locs) {
+            this.locs = locs;
+        }
+    }
+
+    @DataProvider(name = "SGLtest")
+    public Object[][] createFindVariantRegionsData() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        tests.add(new Object[]{new MergeTest(Arrays.<GenomeLoc>asList(loc1))});
+        tests.add(new Object[]{new MergeTest(Arrays.<GenomeLoc>asList(loc1, loc2))});
+        tests.add(new Object[]{new MergeTest(Arrays.<GenomeLoc>asList(loc1, loc2, loc3))});
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "SGLtest", enabled = true)
+    public void testSimpleGenomeLoc(MergeTest test) {
+        testMerge(test.locs);
+    }
+
+    @Test(expectedExceptions = ReviewedGATKException.class)
+    public void testNotContiguousLocs() {
+        final List<GenomeLoc> locs = new ArrayList<GenomeLoc>(1);
+        locs.add(loc1);
+        locs.add(loc3);
+        testMerge(locs);
+    }
+
+    private void testMerge(final List<GenomeLoc> locs) {
+        GenomeLoc result1 = locs.get(0);
+        for ( int i = 1; i < locs.size(); i++ )
+            result1 = GenomeLoc.merge(result1, locs.get(i));
+
+        GenomeLoc result2 = GenomeLoc.merge(new TreeSet<GenomeLoc>(locs));
+        Assert.assertEquals(result1, result2);
+        Assert.assertEquals(result1.getStart(), locs.get(0).getStart());
+        Assert.assertEquals(result1.getStop(), locs.get(locs.size() - 1).getStop());
+    }
+
+    // -------------------------------------------------------------------------------------
+    //
+    // testing distance functionality
+    //
+    // -------------------------------------------------------------------------------------
+
+    @Test(enabled=true)
+    public void testDistanceAcrossContigs() {
+        final int chrSize = 1000;
+        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(10, 0, chrSize);
+        GenomeLocParser parser = new GenomeLocParser(header.getSequenceDictionary());
+        GenomeLoc loc1 = parser.createGenomeLoc("chr3", 500);  // to check regular case
+        GenomeLoc loc2 = parser.createGenomeLoc("chr7", 200);  // to check regular case
+        GenomeLoc loc3 = parser.createGenomeLoc("chr0", 1);    // to check corner case
+        GenomeLoc loc4 = parser.createGenomeLoc("chr9", 1000);// to check corner case
+        GenomeLoc loc5 = parser.createGenomeLoc("chr7", 500);  // to make sure it does the right thing when in the same chromosome
+
+        GenomeLoc loc6 = parser.createGenomeLoc("chr7", 200, 300);
+        GenomeLoc loc7 = parser.createGenomeLoc("chr7", 500, 600);
+        GenomeLoc loc8 = parser.createGenomeLoc("chr9", 500, 600);
+
+        // Locus comparisons
+        Assert.assertEquals(loc1.distanceAcrossContigs(loc2, header), 3*chrSize + chrSize-loc1.getStop() + loc2.getStart()); // simple case, smaller first
+        Assert.assertEquals(loc2.distanceAcrossContigs(loc1, header), 3*chrSize + chrSize-loc1.getStop() + loc2.getStart()); // simple case, bigger first
+
+        Assert.assertEquals(loc3.distanceAcrossContigs(loc4, header), 10*chrSize - 1); // corner case, smaller first
+        Assert.assertEquals(loc4.distanceAcrossContigs(loc3, header), 10*chrSize - 1); // corner case, bigger first
+
+        Assert.assertEquals(loc2.distanceAcrossContigs(loc5, header), 300); // same contig, smaller first
+        Assert.assertEquals(loc5.distanceAcrossContigs(loc2, header), 300); // same contig, bigger first
+
+        // Interval comparisons
+        Assert.assertEquals(loc6.distanceAcrossContigs(loc7, header), 200); // same contig, smaller first
+        Assert.assertEquals(loc7.distanceAcrossContigs(loc6, header), 200); // same contig, bigger first
+
+        Assert.assertEquals(loc7.distanceAcrossContigs(loc8, header), chrSize + chrSize-loc7.stop + loc8.getStart()); // across contigs, smaller first
+        Assert.assertEquals(loc8.distanceAcrossContigs(loc7, header), chrSize + chrSize-loc7.stop + loc8.getStart()); // across congits, bigger first
+
+    }
+
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/MD5DB.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/MD5DB.java
new file mode 100644
index 0000000..2f88c4e
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/MD5DB.java
@@ -0,0 +1,312 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.diffengine.DiffEngine;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+
+import java.io.*;
+import java.util.Arrays;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: depristo
+ * Date: 7/18/11
+ * Time: 9:10 AM
+ *
+ * Utilities for manipulating the MD5 database of previous results
+ */
+public class MD5DB {
+    public static final Logger logger = Logger.getLogger(MD5DB.class);
+
+    /**
+     * Subdirectory under the ant build directory where we store integration test md5 results
+     */
+    private static final int MAX_RECORDS_TO_READ = 1000000;
+    private static final int MAX_RAW_DIFFS_TO_SUMMARIZE = -1;
+    public static final String LOCAL_MD5_DB_DIR = "integrationtests";
+    public static final String GLOBAL_MD5_DB_DIR = "/humgen/gsa-hpprojects/GATK/data/integrationtests";
+
+    // tracking and emitting a data file of origina and new md5s
+    private final File MD5MismatchesFile;
+    private final PrintStream md5MismatchStream;
+
+    public MD5DB() {
+        this(new File(MD5DB.LOCAL_MD5_DB_DIR + "/md5mismatches.txt"));
+    }
+
+    public MD5DB(final File MD5MismatchesFile) {
+        this.MD5MismatchesFile = MD5MismatchesFile;
+
+        ensureMd5DbDirectory();
+
+        logger.debug("Creating md5 mismatch db at " + MD5MismatchesFile);
+        try {
+            md5MismatchStream = new PrintStream(new FileOutputStream(MD5MismatchesFile));
+            md5MismatchStream.printf("%s\t%s\t%s%n", "expected", "observed", "test");
+        } catch ( FileNotFoundException e ) {
+            throw new ReviewedGATKException("Failed to open md5 mismatch file", e);
+        }
+
+    }
+
+    public void close() {
+        if ( md5MismatchStream != null ) {
+            logger.debug("Closeing md5 mismatch db at " + MD5MismatchesFile);
+            md5MismatchStream.close();
+        }
+    }
+
+    // ----------------------------------------------------------------------
+    //
+    // MD5 DB stuff
+    //
+    // ----------------------------------------------------------------------
+
+    /**
+     * Create the MD5 file directories if necessary
+     */
+    private void ensureMd5DbDirectory() {
+        File dir = new File(LOCAL_MD5_DB_DIR);
+        if ( ! dir.exists() ) {
+            System.out.printf("##### Creating MD5 db %s%n", LOCAL_MD5_DB_DIR);
+            if ( ! dir.mkdir() ) {
+                // Need to check AGAIN whether the dir exists, because we might be doing multi-process parallelism
+                // within the same working directory, and another GATK instance may have come along and created the
+                // directory between the calls to exists() and mkdir() above.
+                if ( ! dir.exists() ) {
+                    throw new ReviewedGATKException("Infrastructure failure: failed to create md5 directory " + LOCAL_MD5_DB_DIR);
+                }
+            }
+        }
+    }
+
+    /**
+     * Returns the path to an already existing file with the md5 contents, or valueIfNotFound
+     * if no such file exists in the db.
+     *
+     * @param md5
+     * @param valueIfNotFound
+     * @return
+     */
+    public String getMD5FilePath(final String md5, final String valueIfNotFound) {
+        // we prefer the global db to the local DB, so match it first
+        for ( String dir : Arrays.asList(GLOBAL_MD5_DB_DIR, LOCAL_MD5_DB_DIR)) {
+            File f = getFileForMD5(md5, dir);
+            if ( f.exists() && f.canRead() )
+                return f.getAbsolutePath();
+        }
+
+        return valueIfNotFound;
+    }
+
+    /**
+     * Utility function that given a file's md5 value and the path to the md5 db,
+     * returns the canonical name of the file. For example, if md5 is XXX and db is YYY,
+     * this will return YYY/XXX.integrationtest
+     *
+     * @param md5
+     * @param dbPath
+     * @return
+     */
+    private File getFileForMD5(final String md5, final String dbPath) {
+        final String basename = String.format("%s.integrationtest", md5);
+        return new File(dbPath + "/" + basename);
+    }
+
+    /**
+     * Copies the results file with md5 value to its canonical file name and db places
+     *
+     * @param md5
+     * @param resultsFile
+     */
+    private void updateMD5Db(final String md5, final File resultsFile) {
+        copyFileToDB(getFileForMD5(md5, LOCAL_MD5_DB_DIR), resultsFile);
+        copyFileToDB(getFileForMD5(md5, GLOBAL_MD5_DB_DIR), resultsFile);
+    }
+
+    /**
+     * Low-level utility routine that copies resultsFile to dbFile
+     * @param dbFile
+     * @param resultsFile
+     */
+    private void copyFileToDB(File dbFile, final File resultsFile) {
+        if ( ! dbFile.exists() ) {
+            // the file isn't already in the db, copy it over
+            System.out.printf("##### Updating MD5 file: %s%n", dbFile.getPath());
+            try {
+                FileUtils.copyFile(resultsFile, dbFile);
+            } catch ( IOException e ) {
+                System.out.printf("##### Skipping update, cannot write file %s%n", dbFile);
+            }
+        } else {
+            //System.out.printf("##### MD5 file is up to date: %s%n", dbFile.getPath());
+        }
+    }
+
+    /**
+     * Returns the byte[] of the entire contents of file, for md5 calculations
+     * @param file
+     * @return
+     * @throws IOException
+     */
+    private static byte[] getBytesFromFile(File file) throws IOException {
+        InputStream is = new FileInputStream(file);
+
+        // Get the size of the file
+        long length = file.length();
+
+        if (length > Integer.MAX_VALUE) {
+            // File is too large
+        }
+
+        // Create the byte array to hold the data
+        byte[] bytes = new byte[(int) length];
+
+        // Read in the bytes
+        int offset = 0;
+        int numRead = 0;
+        while (offset < bytes.length
+                && (numRead = is.read(bytes, offset, bytes.length - offset)) >= 0) {
+            offset += numRead;
+        }
+
+        // Ensure all the bytes have been read in
+        if (offset < bytes.length) {
+            throw new IOException("Could not completely read file " + file.getName());
+        }
+
+        // Close the input stream and return bytes
+        is.close();
+        return bytes;
+    }
+
+    public static class MD5Match {
+        public final String actualMD5, expectedMD5;
+        public final String failMessage;
+        public final String diffEngineOutput;
+        public final boolean failed;
+
+        public MD5Match(final String actualMD5, final String expectedMD5, final String failMessage, final String diffEngineOutput, final boolean failed) {
+            this.actualMD5 = actualMD5;
+            this.expectedMD5 = expectedMD5;
+            this.failMessage = failMessage;
+            this.diffEngineOutput = diffEngineOutput;
+            this.failed = failed;
+        }
+    }
+
+    /**
+     * Tests a file MD5 against an expected value, returning an MD5Match object containing a description of the
+     * match or mismatch. In case of a mismatch, outputs a description of the mismatch to various log files/streams.
+     *
+     * NOTE: This function WILL NOT throw an exception if the MD5s are different.
+     *
+     * @param testName Name of the test.
+     * @param testClassName Name of the class that contains the test.
+     * @param resultsFile File to MD5.
+     * @param expectedMD5 Expected MD5 value.
+     * @param parameterize If true or if expectedMD5 is an empty string, will print out the calculated MD5 instead of error text.
+     * @return an MD5Match object containing a description of the match/mismatch. Will have its "failed" field set
+     *         to true if there was a mismatch (unless we're using the "parameterize" argument)
+     */
+    public MD5Match testFileMD5(final String testName, final String testClassName, final File resultsFile, final String expectedMD5, final boolean parameterize) {
+        final String actualMD5 = calculateFileMD5(resultsFile);
+        String diffEngineOutput = "";
+        String failMessage = "";
+        boolean failed = false;
+
+        // copy md5 to integrationtests
+        updateMD5Db(actualMD5, resultsFile);
+
+        if (parameterize || expectedMD5.equals("")) {
+            BaseTest.log(String.format("PARAMETERIZATION: file %s has md5 = %s", resultsFile, actualMD5));
+        } else if ( ! expectedMD5.equals(actualMD5) ) {
+            failed = true;
+            failMessage = String.format("%s:%s has mismatching MD5s: expected=%s observed=%s", testClassName, testName, expectedMD5, actualMD5);
+            diffEngineOutput = logMD5MismatchAndGetDiffEngineOutput(testName, testClassName, expectedMD5, actualMD5);
+        }
+
+        return new MD5Match(actualMD5, expectedMD5, failMessage, diffEngineOutput, failed);
+    }
+
+    /**
+     * Calculates the MD5 for the specified file and returns it as a String
+     *
+     * @param file file whose MD5 to calculate
+     * @return file's MD5 in String form
+     * @throws RuntimeException if the file could not be read
+     */
+    public String calculateFileMD5( final File file ) {
+        try {
+            return Utils.calcMD5(getBytesFromFile(file));
+        }
+        catch ( Exception e ) {
+            throw new RuntimeException("Failed to read bytes from file: " + file + " for MD5 calculation", e);
+        }
+    }
+
+    /**
+     * Logs a description (including diff engine output) of the MD5 mismatch between the expectedMD5
+     * and actualMD5 to a combination of BaseTest.log(), the md5MismatchStream, and stdout, then returns
+     * the diff engine output.
+     *
+     * @param testName name of the test that generated the mismatch
+     * @param testClassName name of the class containing the test that generated the mismatch
+     * @param expectedMD5 the MD5 we were expecting from this test
+     * @param actualMD5 the MD5 we actually calculated from the test output
+     * @return the diff engine output produced while logging the description of the mismatch
+     */
+    private String logMD5MismatchAndGetDiffEngineOutput(final String testName, final String testClassName, final String expectedMD5, final String actualMD5) {
+        System.out.printf("##### Test %s:%s is going to fail #####%n", testClassName, testName);
+        String pathToExpectedMD5File = getMD5FilePath(expectedMD5, "[No DB file found]");
+        String pathToFileMD5File = getMD5FilePath(actualMD5, "[No DB file found]");
+        BaseTest.log(String.format("expected   %s", expectedMD5));
+        BaseTest.log(String.format("calculated %s", actualMD5));
+        BaseTest.log(String.format("diff %s %s", pathToExpectedMD5File, pathToFileMD5File));
+
+        md5MismatchStream.printf("%s\t%s\t%s%n", expectedMD5, actualMD5, testName);
+        md5MismatchStream.flush();
+
+        // inline differences
+        String diffEngineOutput = "";
+        final ByteArrayOutputStream baos = new ByteArrayOutputStream();
+        final PrintStream ps = new PrintStream(baos);
+        DiffEngine.SummaryReportParams params = new DiffEngine.SummaryReportParams(ps, 20, 10, 0, MAX_RAW_DIFFS_TO_SUMMARIZE, false);
+        boolean success = DiffEngine.simpleDiffFiles(new File(pathToExpectedMD5File), new File(pathToFileMD5File), MAX_RECORDS_TO_READ, params);
+        if ( success ) {
+            diffEngineOutput = baos.toString();
+            BaseTest.log(diffEngineOutput);
+            System.out.printf("Note that the above list is not comprehensive.  At most 20 lines of output, and 10 specific differences will be listed.  Please use -T DiffObjects -R " + BaseTest.publicTestDir + "exampleFASTA.fasta -m %s -t %s to explore the differences more freely%n",
+                    pathToExpectedMD5File, pathToFileMD5File);
+        }
+        ps.close();
+
+        return diffEngineOutput;
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/MD5Mismatch.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/MD5Mismatch.java
new file mode 100644
index 0000000..a85debd
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/MD5Mismatch.java
@@ -0,0 +1,67 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Assertion failure representing an MD5 mismatch between expected and actual
+ *
+ * @author Your Name
+ * @since Date created
+ */
+public class MD5Mismatch extends Exception {
+    final List<String> actuals, expecteds, diffEngineOutputs;
+
+    public MD5Mismatch(final String actual, final String expected, final String diffEngineOutput) {
+        this(Collections.singletonList(actual), Collections.singletonList(expected), Collections.singletonList(diffEngineOutput));
+    }
+
+    public MD5Mismatch(final List<String> actuals, final List<String> expecteds, final List<String> diffEngineOutputs) {
+        super(formatMessage(actuals, expecteds, diffEngineOutputs));
+        this.actuals = actuals;
+        this.expecteds = expecteds;
+        this.diffEngineOutputs = diffEngineOutputs;
+    }
+
+    @Override
+    public String toString() {
+        return formatMessage(actuals, expecteds, diffEngineOutputs);
+    }
+
+    private static String formatMessage(final List<String> actuals, final List<String> expecteds, final List<String> diffEngineOutputs) {
+        final StringBuilder b = new StringBuilder("MD5 mismatch: ");
+        for ( int i = 0; i < actuals.size(); i++ ) {
+            if ( i >= 1 ) b.append("\t\t\n\n");
+            b.append("actual ").append(actuals.get(i));
+            b.append(" expected ").append(expecteds.get(i));
+            b.append("\nDiff Engine Output:\n");
+            b.append(diffEngineOutputs.get(i));
+        }
+        return b.toString();
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/MRUCachingSAMSequencingDictionaryUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/MRUCachingSAMSequencingDictionaryUnitTest.java
new file mode 100644
index 0000000..ef6ef77
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/MRUCachingSAMSequencingDictionaryUnitTest.java
@@ -0,0 +1,97 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+
+import htsjdk.samtools.reference.ReferenceSequenceFile;
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.SAMSequenceRecord;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.util.LinkedList;
+import java.util.List;
+
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertTrue;
+
+public class MRUCachingSAMSequencingDictionaryUnitTest extends BaseTest {
+    private static ReferenceSequenceFile seq;
+    private static SAMSequenceDictionary dict;
+
+    @BeforeClass
+    public void init() throws FileNotFoundException {
+        // sequence
+        seq = new CachingIndexedFastaSequenceFile(new File(b37KGReference));
+        dict = seq.getSequenceDictionary();
+    }
+
+    @Test
+    public void testBasic() {
+        final MRUCachingSAMSequenceDictionary caching = new MRUCachingSAMSequenceDictionary(dict);
+
+        Assert.assertEquals(caching.getDictionary(), dict, "Dictionary not the one I expected");
+
+        for ( final SAMSequenceRecord rec : dict.getSequences() ) {
+            Assert.assertFalse(caching.isCached(rec.getSequenceIndex()), "Expected index to not be cached");
+            Assert.assertFalse(caching.isCached(rec.getSequenceName()), "Expected contig to not be cached");
+
+            Assert.assertEquals(caching.getSequence(rec.getSequenceName()), rec, "Couldn't query for sequence");
+            Assert.assertEquals(caching.getSequence(rec.getSequenceIndex()), rec, "Couldn't query for sequence index");
+            Assert.assertEquals(caching.hasContig(rec.getSequenceName()), true, "hasContig query for sequence");
+            Assert.assertEquals(caching.hasContigIndex(rec.getSequenceIndex()), true, "hasContigIndex query for sequence");
+            Assert.assertEquals(caching.getSequenceIndex(rec.getSequenceName()), rec.getSequenceIndex(), "Couldn't query for sequence");
+
+            Assert.assertEquals(caching.hasContig(rec.getSequenceName() + "asdfadsfa"), false, "hasContig query for unknown sequence");
+            Assert.assertEquals(caching.hasContigIndex(dict.getSequences().size()), false, "hasContigIndex query for unknown index");
+
+            Assert.assertTrue(caching.isCached(rec.getSequenceIndex()), "Expected index to be cached");
+            Assert.assertTrue(caching.isCached(rec.getSequenceName()), "Expected contig to be cached");
+        }
+    }
+
+    @Test(expectedExceptions = ReviewedGATKException.class)
+    public void testBadGetSequence() {
+        final MRUCachingSAMSequenceDictionary caching = new MRUCachingSAMSequenceDictionary(dict);
+        caching.getSequence("notInDictionary");
+    }
+
+    @Test(expectedExceptions = ReviewedGATKException.class)
+    public void testBadGetSequenceIndex() {
+        final MRUCachingSAMSequenceDictionary caching = new MRUCachingSAMSequenceDictionary(dict);
+        caching.getSequence(dict.getSequences().size());
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/MWUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/MWUnitTest.java
new file mode 100644
index 0000000..fb00578
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/MWUnitTest.java
@@ -0,0 +1,131 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.collections.Pair;
+
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+import org.testng.Assert;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: Ghost
+ * Date: 3/5/11
+ * Time: 2:06 PM
+ * To change this template use File | Settings | File Templates.
+ */
+public class MWUnitTest extends BaseTest {
+    @BeforeClass
+    public void init() { }
+
+    @Test
+    private void testMWU() {
+        logger.warn("Testing MWU");
+        MannWhitneyU mwu = new MannWhitneyU();
+        mwu.add(0, MannWhitneyU.USet.SET1);
+        mwu.add(1,MannWhitneyU.USet.SET2);
+        mwu.add(2,MannWhitneyU.USet.SET2);
+        mwu.add(3,MannWhitneyU.USet.SET2);
+        mwu.add(4,MannWhitneyU.USet.SET2);
+        mwu.add(5,MannWhitneyU.USet.SET2);
+        mwu.add(6,MannWhitneyU.USet.SET1);
+        mwu.add(7,MannWhitneyU.USet.SET1);
+        mwu.add(8,MannWhitneyU.USet.SET1);
+        mwu.add(9,MannWhitneyU.USet.SET1);
+        mwu.add(10,MannWhitneyU.USet.SET1);
+        mwu.add(11,MannWhitneyU.USet.SET2);
+        Assert.assertEquals(MannWhitneyU.calculateOneSidedU(mwu.getObservations(), MannWhitneyU.USet.SET1),25L);
+        Assert.assertEquals(MannWhitneyU.calculateOneSidedU(mwu.getObservations(),MannWhitneyU.USet.SET2),11L);
+
+        MannWhitneyU mwu2 = new MannWhitneyU();
+        MannWhitneyU mwuNoDither = new MannWhitneyU(false);
+        for ( int dp : new int[]{2,4,5,6,8} ) {
+            mwu2.add(dp,MannWhitneyU.USet.SET1);
+            mwuNoDither.add(dp,MannWhitneyU.USet.SET1);
+        }
+
+        for ( int dp : new int[]{1,3,7,9,10,11,12,13} ) {
+            mwu2.add(dp,MannWhitneyU.USet.SET2);
+            mwuNoDither.add(dp,MannWhitneyU.USet.SET2);
+        }
+
+        MannWhitneyU.ExactMode pm = MannWhitneyU.ExactMode.POINT;
+        MannWhitneyU.ExactMode cm = MannWhitneyU.ExactMode.CUMULATIVE;
+
+        // tests using the hypothesis that set 2 dominates set 1 (U value = 10)
+        Assert.assertEquals(MannWhitneyU.calculateOneSidedU(mwu2.getObservations(),MannWhitneyU.USet.SET1),10L);
+        Assert.assertEquals(MannWhitneyU.calculateOneSidedU(mwu2.getObservations(),MannWhitneyU.USet.SET2),30L);
+        Assert.assertEquals(MannWhitneyU.calculateOneSidedU(mwuNoDither.getObservations(),MannWhitneyU.USet.SET1),10L);
+        Assert.assertEquals(MannWhitneyU.calculateOneSidedU(mwuNoDither.getObservations(),MannWhitneyU.USet.SET2),30L);
+
+        Pair<Integer,Integer> sizes = mwu2.getSetSizes();
+
+        Assert.assertEquals(MannWhitneyU.calculatePUniformApproximation(sizes.first,sizes.second,10L),0.4180519701814064,1e-14);
+        Assert.assertEquals(MannWhitneyU.calculatePRecursively(sizes.first,sizes.second,10L,false,pm).second,0.021756021756021756,1e-14);
+        Assert.assertEquals(MannWhitneyU.calculatePNormalApproximation(sizes.first,sizes.second,10L,false).second,0.06214143703127617,1e-14);
+        logger.warn("Testing two-sided");
+        Assert.assertEquals((double)mwu2.runTwoSidedTest().second,2*0.021756021756021756,1e-8);
+
+        // tests using the hypothesis that set 1 dominates set 2 (U value = 30) -- empirical should be identical, normall approx close, uniform way off
+        Assert.assertEquals(MannWhitneyU.calculatePNormalApproximation(sizes.second,sizes.first,30L,true).second,2.0*0.08216463976903321,1e-14);
+        Assert.assertEquals(MannWhitneyU.calculatePUniformApproximation(sizes.second,sizes.first,30L),0.0023473625009559074,1e-14);
+        Assert.assertEquals(MannWhitneyU.calculatePRecursively(sizes.second,sizes.first,30L,false,pm).second,0.021756021756021756,1e-14); // note -- exactly same value as above
+        Assert.assertEquals(MannWhitneyU.calculatePRecursively(sizes.second,sizes.first,29L,false,cm).second,1.0-0.08547008547008,1e-14); // r does a correction, subtracting 1 from U
+        Assert.assertEquals(MannWhitneyU.calculatePRecursively(sizes.second,sizes.first,11L,false,cm).second,0.08547008547008,1e-14); // r does a correction, subtracting 1 from U
+        Assert.assertEquals(MannWhitneyU.calculatePRecursively(sizes.second,sizes.first,11L,false,cm).first,-1.36918910442,1e-2); // apache inversion set to be good only to 1e-2
+        Assert.assertEquals(MannWhitneyU.calculatePRecursively(sizes.second,sizes.first,29L,false,cm).first,1.36918910442,1e-2); // apache inversion set to be good only to 1e-2
+        Assert.assertEquals(MannWhitneyU.calculatePRecursively(sizes.second,sizes.first,29L,false,pm).first,1.2558754796642067,1e-8); // PDF should be similar
+        Assert.assertEquals(MannWhitneyU.calculatePRecursively(sizes.second,sizes.first,11L,false,pm).first,-1.2558754796642067,1e-8); // PDF should be similar
+        Assert.assertEquals(MannWhitneyU.calculatePRecursively(4,5,10L,false,pm).second,0.0952381,1e-5);
+        Assert.assertEquals(MannWhitneyU.calculatePRecursively(4,5,10L,false,pm).first,0.0,1e-14);
+
+        logger.warn("Set 1");
+        Assert.assertEquals((double)mwu2.runOneSidedTest(MannWhitneyU.USet.SET1).second,0.021756021756021756,1e-8);
+        logger.warn("Set 2");
+        Assert.assertEquals((double)mwu2.runOneSidedTest(MannWhitneyU.USet.SET2).second,0.021756021756021756,1e-8);
+
+        MannWhitneyU mwu3 = new MannWhitneyU();
+        for ( int dp : new int[]{0,2,4} ) {
+            mwu3.add(dp,MannWhitneyU.USet.SET1);
+        }
+        for ( int dp : new int[]{1,5,6,7,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34} ) {
+            mwu3.add(dp,MannWhitneyU.USet.SET2);
+        }
+        long u = MannWhitneyU.calculateOneSidedU(mwu3.getObservations(),MannWhitneyU.USet.SET1);
+        //logger.warn(String.format("U is: %d",u));
+        Pair<Integer,Integer> nums = mwu3.getSetSizes();
+        //logger.warn(String.format("Corrected p is: %.4e",MannWhitneyU.calculatePRecursivelyDoNotCheckValuesEvenThoughItIsSlow(nums.first,nums.second,u)));
+        //logger.warn(String.format("Counted sequences: %d",MannWhitneyU.countSequences(nums.first, nums.second, u)));
+        //logger.warn(String.format("Possible sequences: %d", (long) Arithmetic.binomial(nums.first+nums.second,nums.first)));
+        //logger.warn(String.format("Ratio: %.4e",MannWhitneyU.countSequences(nums.first,nums.second,u)/Arithmetic.binomial(nums.first+nums.second,nums.first)));
+        Assert.assertEquals(MannWhitneyU.calculatePRecursivelyDoNotCheckValuesEvenThoughItIsSlow(nums.first, nums.second, u), 3.665689149560116E-4, 1e-14);
+        Assert.assertEquals(MannWhitneyU.calculatePNormalApproximation(nums.first,nums.second,u,false).second,0.0032240865760884696,1e-14);
+        Assert.assertEquals(MannWhitneyU.calculatePUniformApproximation(nums.first,nums.second,u),0.0026195003025784036,1e-14);
+
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/MathUtilsUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/MathUtilsUnitTest.java
new file mode 100644
index 0000000..2835582
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/MathUtilsUnitTest.java
@@ -0,0 +1,913 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import cern.jet.random.Normal;
+import org.apache.commons.lang.ArrayUtils;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.*;
+
+/**
+ * Basic unit test for MathUtils
+ */
+public class MathUtilsUnitTest extends BaseTest {
+
+    @BeforeClass
+    public void init() {
+    }
+
+    /**
+     * Tests that we get unique values for the valid (non-null-producing) input space for {@link MathUtils#fastGenerateUniqueHashFromThreeIntegers(int, int, int)}.
+     */
+    @Test
+    public void testGenerateUniqueHashFromThreePositiveIntegers() {
+        logger.warn("Executing testGenerateUniqueHashFromThreePositiveIntegers");
+
+        final Set<Long> observedLongs = new HashSet<>();
+        for (short i = 0; i < Byte.MAX_VALUE; i++) {
+            for (short j = 0; j < Byte.MAX_VALUE; j++) {
+                for (short k = 0; k < Byte.MAX_VALUE; k++) {
+                    final Long aLong = MathUtils.fastGenerateUniqueHashFromThreeIntegers(i, j, k);
+                    //System.out.println(String.format("%s, %s, %s: %s", i, j, k, aLong));
+                    Assert.assertTrue(observedLongs.add(aLong));
+                }
+            }
+        }
+
+        for (short i = Byte.MAX_VALUE; i <= Short.MAX_VALUE && i > 0; i += 128) {
+            for (short j = Byte.MAX_VALUE; j <= Short.MAX_VALUE && j > 0; j += 128) {
+                for (short k = Byte.MAX_VALUE; k <= Short.MAX_VALUE && k > 0; k += 128) {
+                    final Long aLong = MathUtils.fastGenerateUniqueHashFromThreeIntegers(i, j, k);
+                    // System.out.println(String.format("%s, %s, %s: %s", i, j, k, aLong));
+                    Assert.assertTrue(observedLongs.add(aLong));
+                }
+            }
+        }
+    }
+
+    @Test(dataProvider = "log10OneMinusPow10Data")
+    public void testLog10OneMinusPow10(final double x, final double expected) {
+        final double actual = MathUtils.log10OneMinusPow10(x);
+        if (Double.isNaN(expected))
+            Assert.assertTrue(Double.isNaN(actual));
+        else
+            Assert.assertEquals(actual,expected,1E-9);
+    }
+
+    @Test(dataProvider = "log1mexpData")
+    public void testLog1mexp(final double x, final double expected) {
+        final double actual = MathUtils.log1mexp(x);
+        if (Double.isNaN(expected))
+            Assert.assertTrue(Double.isNaN(actual));
+        else
+            Assert.assertEquals(actual,expected,1E-9);
+    }
+
+    @DataProvider(name = "log10OneMinusPow10Data")
+    public Iterator<Object[]> log10OneMinusPow10Data() {
+
+          final double[] inValues = new double[] { Double.NaN, 10, 1, 0, -1, -3, -10, -30, -100, -300, -1000, -3000 };
+          return new Iterator<Object[]>() {
+
+              private int i = 0;
+
+              @Override
+              public boolean hasNext() {
+                return i < inValues.length;
+
+              }
+
+              @Override
+              public Object[] next() {
+                  final double input = inValues[i++];
+                  final double output = Math.log10( 1 - Math.pow(10,input));
+                  return new Object[] { input, output };
+              }
+
+              @Override
+              public void remove() {
+                  throw new UnsupportedOperationException();
+              }
+          };
+    }
+
+    @DataProvider(name = "log1mexpData")
+    public Iterator<Object[]> log1mexpData() {
+
+        final double[] inValues = new double[] { Double.NaN, 10, 1, 0, -1, -3, -10, -30, -100, -300, -1000, -3000 };
+        return new Iterator<Object[]>() {
+
+            private int i = 0;
+
+            @Override
+            public boolean hasNext() {
+                return i < inValues.length;
+
+            }
+
+            @Override
+            public Object[] next() {
+                final double input = inValues[i++];
+                final double output = Math.log( 1 - Math.exp(input));
+                return new Object[] { input, output };
+            }
+
+            @Override
+            public void remove() {
+                throw new UnsupportedOperationException();
+            }
+        };
+    }
+
+    /**
+     * Tests that we get the right values from the binomial distribution
+     */
+    @Test
+    public void testBinomialProbability() {
+        logger.warn("Executing testBinomialProbability");
+
+        Assert.assertEquals(MathUtils.binomialProbability(3, 2, 0.5), 0.375, 0.0001);
+        Assert.assertEquals(MathUtils.binomialProbability(100, 10, 0.5), 1.365543e-17, 1e-18);
+        Assert.assertEquals(MathUtils.binomialProbability(217, 73, 0.02), 4.521904e-67, 1e-68);
+        Assert.assertEquals(MathUtils.binomialProbability(300, 100, 0.02), 9.27097e-91, 1e-92);
+        Assert.assertEquals(MathUtils.binomialProbability(300, 150, 0.98), 6.462892e-168, 1e-169);
+        Assert.assertEquals(MathUtils.binomialProbability(300, 120, 0.98), 3.090054e-221, 1e-222);
+        Assert.assertEquals(MathUtils.binomialProbability(300, 112, 0.98), 2.34763e-236, 1e-237);
+    }
+
+    /**
+     * Tests that we get the right values from the binomial distribution
+     */
+    @Test
+    public void testCumulativeBinomialProbability() {
+        logger.warn("Executing testCumulativeBinomialProbability");
+
+        for (int j = 0; j < 2; j++) { // Test memoizing functionality, as well.
+            final int numTrials = 10;
+            for ( int i = 0; i < numTrials; i++ )
+                Assert.assertEquals(MathUtils.binomialCumulativeProbability(numTrials, i, i), MathUtils.binomialProbability(numTrials, i), 1e-10, String.format("k=%d, n=%d", i, numTrials));
+
+            Assert.assertEquals(MathUtils.binomialCumulativeProbability(10, 0, 2), 0.05468750, 1e-7);
+            Assert.assertEquals(MathUtils.binomialCumulativeProbability(10, 0, 5), 0.62304687, 1e-7);
+            Assert.assertEquals(MathUtils.binomialCumulativeProbability(10, 0, 10), 1.0, 1e-7);
+        }
+    }
+
+    /**
+     * Tests that we get the right values from the multinomial distribution
+     */
+    @Test
+    public void testMultinomialProbability() {
+        logger.warn("Executing testMultinomialProbability");
+
+        int[] counts0 = {2, 0, 1};
+        double[] probs0 = {0.33, 0.33, 0.34};
+        Assert.assertEquals(MathUtils.multinomialProbability(counts0, probs0), 0.111078, 1e-6);
+
+        int[] counts1 = {10, 20, 30};
+        double[] probs1 = {0.25, 0.25, 0.50};
+        Assert.assertEquals(MathUtils.multinomialProbability(counts1, probs1), 0.002870301, 1e-9);
+
+        int[] counts2 = {38, 82, 50, 36};
+        double[] probs2 = {0.25, 0.25, 0.25, 0.25};
+        Assert.assertEquals(MathUtils.multinomialProbability(counts2, probs2), 1.88221e-09, 1e-10);
+
+        int[] counts3 = {1, 600, 1};
+        double[] probs3 = {0.33, 0.33, 0.34};
+        Assert.assertEquals(MathUtils.multinomialProbability(counts3, probs3), 5.20988e-285, 1e-286);
+    }
+
+    /**
+     * Tests that the random index selection is working correctly
+     */
+    @Test
+    public void testRandomIndicesWithReplacement() {
+        logger.warn("Executing testRandomIndicesWithReplacement");
+
+        // Check that the size of the list returned is correct
+        Assert.assertTrue(MathUtils.sampleIndicesWithReplacement(5, 0).size() == 0);
+        Assert.assertTrue(MathUtils.sampleIndicesWithReplacement(5, 1).size() == 1);
+        Assert.assertTrue(MathUtils.sampleIndicesWithReplacement(5, 5).size() == 5);
+        Assert.assertTrue(MathUtils.sampleIndicesWithReplacement(5, 1000).size() == 1000);
+
+        // Check that the list contains only the k element range that as asked for - no more, no less
+        List<Integer> Five = new ArrayList<>();
+        Collections.addAll(Five, 0, 1, 2, 3, 4);
+        List<Integer> BigFive = MathUtils.sampleIndicesWithReplacement(5, 10000);
+        Assert.assertTrue(BigFive.containsAll(Five));
+        Assert.assertTrue(Five.containsAll(BigFive));
+    }
+
+    /**
+     * Tests that we get the right values from the multinomial distribution
+     */
+    @Test
+    public void testSliceListByIndices() {
+        logger.warn("Executing testSliceListByIndices");
+
+        // Check that the list contains only the k element range that as asked for - no more, no less but now
+        // use the index list to pull elements from another list using sliceListByIndices
+        List<Integer> Five = new ArrayList<>();
+        Collections.addAll(Five, 0, 1, 2, 3, 4);
+        List<Character> FiveAlpha = new ArrayList<>();
+        Collections.addAll(FiveAlpha, 'a', 'b', 'c', 'd', 'e');
+        List<Integer> BigFive = MathUtils.sampleIndicesWithReplacement(5, 10000);
+        List<Character> BigFiveAlpha = MathUtils.sliceListByIndices(BigFive, FiveAlpha);
+        Assert.assertTrue(BigFiveAlpha.containsAll(FiveAlpha));
+        Assert.assertTrue(FiveAlpha.containsAll(BigFiveAlpha));
+    }
+
+    /**
+     * Tests that we correctly compute mean and standard deviation from a stream of numbers
+     */
+    @Test
+    public void testRunningAverage() {
+        logger.warn("Executing testRunningAverage");
+
+        int[] numbers = {1, 2, 4, 5, 3, 128, 25678, -24};
+        MathUtils.RunningAverage r = new MathUtils.RunningAverage();
+
+        for (final double b : numbers)
+            r.add(b);
+
+        Assert.assertEquals((long) numbers.length, r.observationCount());
+        Assert.assertTrue(r.mean() - 3224.625 < 2e-10);
+        Assert.assertTrue(r.stddev() - 9072.6515881128 < 2e-10);
+    }
+
+    @Test
+    public void testLog10Gamma() {
+        logger.warn("Executing testLog10Gamma");
+
+        Assert.assertEquals(MathUtils.log10Gamma(4.0), 0.7781513, 1e-6);
+        Assert.assertEquals(MathUtils.log10Gamma(10), 5.559763, 1e-6);
+        Assert.assertEquals(MathUtils.log10Gamma(10654), 38280.53, 1e-2);
+    }
+
+    @Test
+    public void testLog10BinomialCoefficient() {
+        logger.warn("Executing testLog10BinomialCoefficient");
+        // note that we can test the binomial coefficient calculation indirectly via Newton's identity
+        // (1+z)^m = sum (m choose k)z^k
+        double[] z_vals = new double[]{0.999,0.9,0.8,0.5,0.2,0.01,0.0001};
+        int[] exponent = new int[]{5,15,25,50,100};
+        for ( double z : z_vals ) {
+            double logz = Math.log10(z);
+            for ( int exp : exponent ) {
+                double expected_log = exp*Math.log10(1+z);
+                double[] newtonArray_log = new double[1+exp];
+                for ( int k = 0 ; k <= exp; k++ ) {
+                    newtonArray_log[k] = MathUtils.log10BinomialCoefficient(exp,k)+k*logz;
+                }
+                Assert.assertEquals(MathUtils.log10sumLog10(newtonArray_log),expected_log,1e-6);
+            }
+        }
+
+        Assert.assertEquals(MathUtils.log10BinomialCoefficient(4, 2), 0.7781513, 1e-6);
+        Assert.assertEquals(MathUtils.log10BinomialCoefficient(10, 3), 2.079181, 1e-6);
+        Assert.assertEquals(MathUtils.log10BinomialCoefficient(103928, 119), 400.2156, 1e-4);
+    }
+
+    @Test
+    public void testFactorial() {
+        logger.warn("Executing testFactorial");
+        Assert.assertEquals((int) MathUtils.factorial(4), 24);
+        Assert.assertEquals((int) MathUtils.factorial(10), 3628800);
+        Assert.assertEquals((int) MathUtils.factorial(12), 479001600);
+    }
+
+    @Test
+    public void testLog10Factorial() {
+        logger.warn("Executing testLog10Factorial");
+        Assert.assertEquals(MathUtils.log10Factorial(4), 1.380211, 1e-6);
+        Assert.assertEquals(MathUtils.log10Factorial(10), 6.559763, 1e-6);
+        Assert.assertEquals(MathUtils.log10Factorial(12), 8.680337, 1e-6);
+        Assert.assertEquals(MathUtils.log10Factorial(200), 374.8969, 1e-3);
+        Assert.assertEquals(MathUtils.log10Factorial(12342), 45138.26, 1e-1);
+        double log10factorial_small = 0;
+        double log10factorial_middle = 374.8969;
+        double log10factorial_large = 45138.26;
+        int small_start = 1;
+        int med_start = 200;
+        int large_start = 12342;
+        for ( int i = 1; i < 1000; i++ ) {
+            log10factorial_small += Math.log10(i+small_start);
+            log10factorial_middle += Math.log10(i+med_start);
+            log10factorial_large += Math.log10(i+large_start);
+            Assert.assertEquals(MathUtils.log10Factorial(small_start+i),log10factorial_small,1e-6);
+            Assert.assertEquals(MathUtils.log10Factorial(med_start+i),log10factorial_middle,1e-3);
+            Assert.assertEquals(MathUtils.log10Factorial(large_start+i),log10factorial_large,1e-1);
+        }
+    }
+
+    @Test
+    public void testApproximateLog10SumLog10() {
+
+        final double requiredPrecision = 1E-4;
+
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {0.0}), 0.0, requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-5.15}), -5.15, requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {130.0}), 130.0, requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-0.145}), -0.145, requiredPrecision);
+
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(0.0, 0.0), Math.log10(Math.pow(10.0, 0.0) + Math.pow(10.0, 0.0)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-1.0, 0.0), Math.log10(Math.pow(10.0, -1.0) + Math.pow(10.0, 0.0)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(0.0, -1.0), Math.log10(Math.pow(10.0, 0.0) + Math.pow(10.0, -1.0)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-2.2, -3.5), Math.log10(Math.pow(10.0, -2.2) + Math.pow(10.0, -3.5)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-1.0, -7.1), Math.log10(Math.pow(10.0, -1.0) + Math.pow(10.0, -7.1)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(5.0, 6.2), Math.log10(Math.pow(10.0, 5.0) + Math.pow(10.0, 6.2)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(38.1, 16.2), Math.log10(Math.pow(10.0, 38.1) + Math.pow(10.0, 16.2)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-38.1, 6.2), Math.log10(Math.pow(10.0, -38.1) + Math.pow(10.0, 6.2)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-19.1, -37.1), Math.log10(Math.pow(10.0, -19.1) + Math.pow(10.0, -37.1)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-29.1, -27.6), Math.log10(Math.pow(10.0, -29.1) + Math.pow(10.0, -27.6)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-0.12345, -0.23456), Math.log10(Math.pow(10.0, -0.12345) + Math.pow(10.0, -0.23456)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-15.7654, -17.0101), Math.log10(Math.pow(10.0, -15.7654) + Math.pow(10.0, -17.0101)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-0.12345, Double.NEGATIVE_INFINITY), -0.12345, requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-15.7654, Double.NEGATIVE_INFINITY), -15.7654, requiredPrecision);
+
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {0.0, 0.0}), Math.log10(Math.pow(10.0, 0.0) + Math.pow(10.0, 0.0)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-1.0, 0.0}), Math.log10(Math.pow(10.0, -1.0) + Math.pow(10.0, 0.0)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {0.0, -1.0}), Math.log10(Math.pow(10.0, 0.0) + Math.pow(10.0, -1.0)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-2.2, -3.5}), Math.log10(Math.pow(10.0, -2.2) + Math.pow(10.0, -3.5)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-1.0, -7.1}), Math.log10(Math.pow(10.0, -1.0) + Math.pow(10.0, -7.1)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {5.0, 6.2}), Math.log10(Math.pow(10.0, 5.0) + Math.pow(10.0, 6.2)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {38.1, 16.2}), Math.log10(Math.pow(10.0, 38.1) + Math.pow(10.0, 16.2)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-38.1, 6.2}), Math.log10(Math.pow(10.0, -38.1) + Math.pow(10.0, 6.2)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-19.1, -37.1}), Math.log10(Math.pow(10.0, -19.1) + Math.pow(10.0, -37.1)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-29.1, -27.6}), Math.log10(Math.pow(10.0, -29.1) + Math.pow(10.0, -27.6)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-0.12345, -0.23456}), Math.log10(Math.pow(10.0, -0.12345) + Math.pow(10.0, -0.23456)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-15.7654, -17.0101}), Math.log10(Math.pow(10.0, -15.7654) + Math.pow(10.0, -17.0101)), requiredPrecision);
+
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {0.0, 0.0, 0.0}), Math.log10(Math.pow(10.0, 0.0) + Math.pow(10.0, 0.0) + Math.pow(10.0, 0.0)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-1.0, 0.0, 0.0}), Math.log10(Math.pow(10.0, -1.0) + Math.pow(10.0, 0.0) + Math.pow(10.0, 0.0)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {0.0, -1.0, -2.5}), Math.log10(Math.pow(10.0, 0.0) + Math.pow(10.0, -1.0) + Math.pow(10.0, -2.5)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-2.2, -3.5, -1.1}), Math.log10(Math.pow(10.0, -2.2) + Math.pow(10.0, -3.5) + Math.pow(10.0, -1.1)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-1.0, -7.1, 0.5}), Math.log10(Math.pow(10.0, -1.0) + Math.pow(10.0, -7.1) + Math.pow(10.0, 0.5)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {5.0, 6.2, 1.3}), Math.log10(Math.pow(10.0, 5.0) + Math.pow(10.0, 6.2) + Math.pow(10.0, 1.3)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {38.1, 16.2, 18.1}), Math.log10(Math.pow(10.0, 38.1) + Math.pow(10.0, 16.2) + Math.pow(10.0, 18.1)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-38.1, 6.2, 26.6}), Math.log10(Math.pow(10.0, -38.1) + Math.pow(10.0, 6.2) + Math.pow(10.0, 26.6)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-19.1, -37.1, -45.1}), Math.log10(Math.pow(10.0, -19.1) + Math.pow(10.0, -37.1) + Math.pow(10.0, -45.1)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-29.1, -27.6, -26.2}), Math.log10(Math.pow(10.0, -29.1) + Math.pow(10.0, -27.6) + Math.pow(10.0, -26.2)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-0.12345, -0.23456, -0.34567}), Math.log10(Math.pow(10.0, -0.12345) + Math.pow(10.0, -0.23456) + Math.pow(10.0, -0.34567)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(new double[] {-15.7654, -17.0101, -17.9341}), Math.log10(Math.pow(10.0, -15.7654) + Math.pow(10.0, -17.0101) + Math.pow(10.0, -17.9341)), requiredPrecision);
+
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(0.0, 0.0, 0.0), Math.log10(Math.pow(10.0, 0.0) + Math.pow(10.0, 0.0) + Math.pow(10.0, 0.0)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-1.0, 0.0, 0.0), Math.log10(Math.pow(10.0, -1.0) + Math.pow(10.0, 0.0) + Math.pow(10.0, 0.0)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(0.0, -1.0, -2.5), Math.log10(Math.pow(10.0, 0.0) + Math.pow(10.0, -1.0) + Math.pow(10.0, -2.5)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-2.2, -3.5, -1.1), Math.log10(Math.pow(10.0, -2.2) + Math.pow(10.0, -3.5) + Math.pow(10.0, -1.1)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-1.0, -7.1, 0.5), Math.log10(Math.pow(10.0, -1.0) + Math.pow(10.0, -7.1) + Math.pow(10.0, 0.5)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(5.0, 6.2, 1.3), Math.log10(Math.pow(10.0, 5.0) + Math.pow(10.0, 6.2) + Math.pow(10.0, 1.3)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(38.1, 16.2, 18.1), Math.log10(Math.pow(10.0, 38.1) + Math.pow(10.0, 16.2) + Math.pow(10.0, 18.1)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-38.1, 6.2, 26.6), Math.log10(Math.pow(10.0, -38.1) + Math.pow(10.0, 6.2) + Math.pow(10.0, 26.6)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-19.1, -37.1, -45.1), Math.log10(Math.pow(10.0, -19.1) + Math.pow(10.0, -37.1) + Math.pow(10.0, -45.1)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-29.1, -27.6, -26.2), Math.log10(Math.pow(10.0, -29.1) + Math.pow(10.0, -27.6) + Math.pow(10.0, -26.2)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-0.12345, -0.23456, -0.34567), Math.log10(Math.pow(10.0, -0.12345) + Math.pow(10.0, -0.23456) + Math.pow(10.0, -0.34567)), requiredPrecision);
+        Assert.assertEquals(MathUtils.approximateLog10SumLog10(-15.7654, -17.0101, -17.9341), Math.log10(Math.pow(10.0, -15.7654) + Math.pow(10.0, -17.0101) + Math.pow(10.0, -17.9341)), requiredPrecision);
+
+        // magnitude of the sum doesn't matter, so we can combinatorially test this via partitions of unity
+        double[] mult_partitionFactor = new double[]{0.999,0.98,0.95,0.90,0.8,0.5,0.3,0.1,0.05,0.001};
+        int[] n_partitions = new int[] {2,4,8,16,32,64,128,256,512,1028};
+        for ( double alpha : mult_partitionFactor ) {
+            double log_alpha = Math.log10(alpha);
+            double log_oneMinusAlpha = Math.log10(1-alpha);
+            for ( int npart : n_partitions ) {
+                double[] multiplicative = new double[npart];
+                double[] equal = new double[npart];
+                double remaining_log = 0.0;  // realspace = 1
+                for ( int i = 0 ; i < npart-1; i++ ) {
+                    equal[i] = -Math.log10(npart);
+                    double piece = remaining_log + log_alpha; // take a*remaining, leaving remaining-a*remaining = (1-a)*remaining
+                    multiplicative[i] = piece;
+                    remaining_log = remaining_log + log_oneMinusAlpha;
+                }
+                equal[npart-1] = -Math.log10(npart);
+                multiplicative[npart-1] = remaining_log;
+                Assert.assertEquals(MathUtils.approximateLog10SumLog10(equal),0.0,requiredPrecision,String.format("Did not sum to one: k=%d equal partitions.",npart));
+                Assert.assertEquals(MathUtils.approximateLog10SumLog10(multiplicative),0.0,requiredPrecision, String.format("Did not sum to one: k=%d multiplicative partitions with alpha=%f",npart,alpha));
+            }
+        }
+    }
+
+    @Test
+    public void testLog10sumLog10() {
+        final double requiredPrecision = 1E-14;
+
+        final double log3 = 0.477121254719662;
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[]{0.0, 0.0, 0.0}), log3, requiredPrecision);
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {0.0, 0.0, 0.0}, 0), log3, requiredPrecision);
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[]{0.0, 0.0, 0.0}, 0, 3), log3, requiredPrecision);
+
+        final double log2 = 0.301029995663981;
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {0.0, 0.0, 0.0}, 0, 2), log2, requiredPrecision);
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {0.0, 0.0, 0.0}, 0, 1), 0.0, requiredPrecision);
+
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {0.0}), 0.0, requiredPrecision);
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-5.15}), -5.15, requiredPrecision);
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {130.0}), 130.0, requiredPrecision);
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-0.145}), -0.145, requiredPrecision);
+
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {0.0, 0.0}), Math.log10(Math.pow(10.0, 0.0) + Math.pow(10.0, 0.0)), requiredPrecision);
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-1.0, 0.0}), Math.log10(Math.pow(10.0, -1.0) + Math.pow(10.0, 0.0)), requiredPrecision);
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {0.0, -1.0}), Math.log10(Math.pow(10.0, 0.0) + Math.pow(10.0, -1.0)), requiredPrecision);
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-2.2, -3.5}), Math.log10(Math.pow(10.0, -2.2) + Math.pow(10.0, -3.5)), requiredPrecision);
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-1.0, -7.1}), Math.log10(Math.pow(10.0, -1.0) + Math.pow(10.0, -7.1)), requiredPrecision);
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {5.0, 6.2}), Math.log10(Math.pow(10.0, 5.0) + Math.pow(10.0, 6.2)), requiredPrecision);
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {38.1, 16.2}), Math.log10(Math.pow(10.0, 38.1) + Math.pow(10.0, 16.2)), requiredPrecision);
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-38.1, 6.2}), Math.log10(Math.pow(10.0, -38.1) + Math.pow(10.0, 6.2)), requiredPrecision);
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-19.1, -37.1}), Math.log10(Math.pow(10.0, -19.1) + Math.pow(10.0, -37.1)), requiredPrecision);
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-29.1, -27.6}), Math.log10(Math.pow(10.0, -29.1) + Math.pow(10.0, -27.6)), requiredPrecision);
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-0.12345, -0.23456}), Math.log10(Math.pow(10.0, -0.12345) + Math.pow(10.0, -0.23456)), requiredPrecision);
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-15.7654, -17.0101}), Math.log10(Math.pow(10.0, -15.7654) + Math.pow(10.0, -17.0101)), requiredPrecision);
+
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {0.0, 0.0, 0.0}), Math.log10(Math.pow(10.0, 0.0) + Math.pow(10.0, 0.0) + Math.pow(10.0, 0.0)), requiredPrecision);
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-1.0, 0.0, 0.0}), Math.log10(Math.pow(10.0, -1.0) + Math.pow(10.0, 0.0) + Math.pow(10.0, 0.0)), requiredPrecision);
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {0.0, -1.0, -2.5}), Math.log10(Math.pow(10.0, 0.0) + Math.pow(10.0, -1.0) + Math.pow(10.0, -2.5)), requiredPrecision);
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-2.2, -3.5, -1.1}), Math.log10(Math.pow(10.0, -2.2) + Math.pow(10.0, -3.5) + Math.pow(10.0, -1.1)), requiredPrecision);
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-1.0, -7.1, 0.5}), Math.log10(Math.pow(10.0, -1.0) + Math.pow(10.0, -7.1) + Math.pow(10.0, 0.5)), requiredPrecision);
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {5.0, 6.2, 1.3}), Math.log10(Math.pow(10.0, 5.0) + Math.pow(10.0, 6.2) + Math.pow(10.0, 1.3)), requiredPrecision);
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {38.1, 16.2, 18.1}), Math.log10(Math.pow(10.0, 38.1) + Math.pow(10.0, 16.2) + Math.pow(10.0, 18.1)), requiredPrecision);
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-38.1, 6.2, 26.6}), Math.log10(Math.pow(10.0, -38.1) + Math.pow(10.0, 6.2) + Math.pow(10.0, 26.6)), requiredPrecision);
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-19.1, -37.1, -45.1}), Math.log10(Math.pow(10.0, -19.1) + Math.pow(10.0, -37.1) + Math.pow(10.0, -45.1)), requiredPrecision);
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-29.1, -27.6, -26.2}), Math.log10(Math.pow(10.0, -29.1) + Math.pow(10.0, -27.6) + Math.pow(10.0, -26.2)), requiredPrecision);
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-0.12345, -0.23456, -0.34567}), Math.log10(Math.pow(10.0, -0.12345) + Math.pow(10.0, -0.23456) + Math.pow(10.0, -0.34567)), requiredPrecision);
+        Assert.assertEquals(MathUtils.log10sumLog10(new double[] {-15.7654, -17.0101, -17.9341}), Math.log10(Math.pow(10.0, -15.7654) + Math.pow(10.0, -17.0101) + Math.pow(10.0, -17.9341)), requiredPrecision);
+
+        // magnitude of the sum doesn't matter, so we can combinatorially test this via partitions of unity
+        double[] mult_partitionFactor = new double[]{0.999,0.98,0.95,0.90,0.8,0.5,0.3,0.1,0.05,0.001};
+        int[] n_partitions = new int[] {2,4,8,16,32,64,128,256,512,1028};
+        for ( double alpha : mult_partitionFactor ) {
+            double log_alpha = Math.log10(alpha);
+            double log_oneMinusAlpha = Math.log10(1-alpha);
+            for ( int npart : n_partitions ) {
+                double[] multiplicative = new double[npart];
+                double[] equal = new double[npart];
+                double remaining_log = 0.0;  // realspace = 1
+                for ( int i = 0 ; i < npart-1; i++ ) {
+                    equal[i] = -Math.log10(npart);
+                    double piece = remaining_log + log_alpha; // take a*remaining, leaving remaining-a*remaining = (1-a)*remaining
+                    multiplicative[i] = piece;
+                    remaining_log = remaining_log + log_oneMinusAlpha;
+                }
+                equal[npart-1] = -Math.log10(npart);
+                multiplicative[npart-1] = remaining_log;
+                Assert.assertEquals(MathUtils.log10sumLog10(equal),0.0,requiredPrecision);
+                Assert.assertEquals(MathUtils.log10sumLog10(multiplicative),0.0,requiredPrecision,String.format("Did not sum to one: nPartitions=%d, alpha=%f",npart,alpha));
+            }
+        }
+    }
+
+    @Test
+    public void testLogDotProduct() {
+        Assert.assertEquals(MathUtils.logDotProduct(new double[]{-5.0,-3.0,2.0}, new double[]{6.0,7.0,8.0}),10.0,1e-3);
+        Assert.assertEquals(MathUtils.logDotProduct(new double[]{-5.0}, new double[]{6.0}),1.0,1e-3);
+    }
+
+    @Test
+    public void testNormalDistribution() {
+        final double requiredPrecision = 1E-10;
+
+        final Normal n = new Normal(0.0, 1.0, null);
+        for( final double mu : new double[]{-5.0, -3.2, -1.5, 0.0, 1.2, 3.0, 5.8977} ) {
+            for( final double sigma : new double[]{1.2, 3.0, 5.8977} ) {
+                for( final double x : new double[]{-5.0, -3.2, -1.5, 0.0, 1.2, 3.0, 5.8977} ) {
+                    n.setState(mu, sigma);
+                    Assert.assertEquals(n.pdf(x), MathUtils.normalDistribution(mu, sigma, x), requiredPrecision);
+                    Assert.assertEquals(Math.log10(n.pdf(x)), MathUtils.normalDistributionLog10(mu, sigma, x), requiredPrecision);
+                }
+            }
+        }
+    }
+
+    @DataProvider(name = "ArrayMinData")
+    public Object[][] makeArrayMinData() {
+        List<Object[]> tests = new ArrayList<>();
+
+        // this functionality can be adapted to provide input data for whatever you might want in your data
+        tests.add(new Object[]{Arrays.asList(10), 10});
+        tests.add(new Object[]{Arrays.asList(-10), -10});
+
+        for ( final List<Integer> values : Utils.makePermutations(Arrays.asList(1,2,3), 3, false) ) {
+            tests.add(new Object[]{values, 1});
+        }
+
+        for ( final List<Integer> values : Utils.makePermutations(Arrays.asList(1,2,-3), 3, false) ) {
+            tests.add(new Object[]{values, -3});
+        }
+
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "ArrayMinData")
+    public void testArrayMinList(final List<Integer> values, final int expected) {
+        final int actual = MathUtils.arrayMin(values);
+        Assert.assertEquals(actual, expected, "Failed with " + values);
+    }
+
+    @Test(dataProvider = "ArrayMinData")
+    public void testArrayMinIntArray(final List<Integer> values, final int expected) {
+        final int[] asArray = ArrayUtils.toPrimitive(values.toArray(new Integer[values.size()]));
+        final int actual = MathUtils.arrayMin(asArray);
+        Assert.assertEquals(actual, expected, "Failed with " + values);
+    }
+
+    @Test(dataProvider = "ArrayMinData")
+    public void testArrayMinByteArray(final List<Integer> values, final int expected) {
+        final byte[] asArray = new byte[values.size()];
+        for ( int i = 0; i < values.size(); i++ ) asArray[i] = (byte)(values.get(i) & 0xFF);
+        final byte actual = MathUtils.arrayMin(asArray);
+        Assert.assertEquals(actual, (byte)(expected & 0xFF), "Failed with " + values);
+    }
+
+    @Test(dataProvider = "ArrayMinData")
+    public void testArrayMinDoubleArray(final List<Integer> values, final int expected) {
+        final double[] asArray = new double[values.size()];
+        for ( int i = 0; i < values.size(); i++ ) asArray[i] = (double)(values.get(i));
+        final double actual = MathUtils.arrayMin(asArray);
+        Assert.assertEquals(actual, (double)expected, "Failed with " + values);
+    }
+
+    @DataProvider(name = "MedianData")
+    public Object[][] makeMedianData() {
+        final List<Object[]> tests = new ArrayList<>();
+
+        // this functionality can be adapted to provide input data for whatever you might want in your data
+        tests.add(new Object[]{Arrays.asList(10), 10});
+        tests.add(new Object[]{Arrays.asList(1, 10), 10});
+
+        for ( final List<Integer> values : Utils.makePermutations(Arrays.asList(1,2,-3), 3, false) ) {
+            tests.add(new Object[]{values, 1});
+        }
+
+        for ( final List<Double> values : Utils.makePermutations(Arrays.asList(1.1,2.1,-3.1), 3, false) ) {
+            tests.add(new Object[]{values, 1.1});
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "MedianData")
+    public void testMedian(final List<Comparable> values, final Comparable expected) {
+        final Comparable actual = MathUtils.median(values);
+        Assert.assertEquals(actual, expected, "Failed with " + values);
+    }
+
+
+
+    // man. All this to test dirichlet.
+
+    private double[] unwrap(List<Double> stuff) {
+        double[] unwrapped = new double[stuff.size()];
+        int idx = 0;
+        for ( Double d : stuff ) {
+            unwrapped[idx++] = d == null ? 0.0 : d;
+        }
+
+        return unwrapped;
+    }
+
+    /**
+     * The PartitionGenerator generates all of the partitions of a number n, e.g.
+     * 5 + 0
+     * 4 + 1
+     * 3 + 2
+     * 3 + 1 + 1
+     * 2 + 2 + 1
+     * 2 + 1 + 1 + 1
+     * 1 + 1 + 1 + 1 + 1
+     *
+     * This is used to help enumerate the state space over which the Dirichlet-Multinomial is defined,
+     * to ensure that the distribution function is properly implemented
+     */
+    class PartitionGenerator implements Iterator<List<Integer>> {
+        // generate the partitions of an integer, each partition sorted numerically
+        int n;
+        List<Integer> a;
+
+        int y;
+        int k;
+        int state;
+
+        int x;
+        int l;
+
+        public PartitionGenerator(int n) {
+            this.n = n;
+            this.y = n - 1;
+            this.k = 1;
+            this.a = new ArrayList<>();
+            for ( int i = 0; i < n; i++ ) {
+                this.a.add(i);
+            }
+            this.state = 0;
+        }
+
+        public void remove()  { /* do nothing */ }
+
+        public boolean hasNext() { return ! ( this.k == 0 && state == 0 ); }
+
+        private String dataStr()  {
+            return String.format("a = [%s]  k = %d  y = %d  state = %d  x = %d  l = %d",
+                    Utils.join(",",a), k, y, state, x, l);
+        }
+
+        public List<Integer> next() {
+            if ( this.state == 0 ) {
+                this.x = a.get(k-1)+1;
+                k -= 1;
+                this.state = 1;
+            }
+
+            if ( this.state == 1 ) {
+                while ( 2 * x <= y ) {
+                    this.a.set(k,x);
+                    this.y -= (int) x;
+                    this.k++;
+                }
+                this.l = 1+this.k;
+                this.state = 2;
+            }
+
+            if ( this.state == 2 ) {
+                if ( x <= y ) {
+                    this.a.set(k,x);
+                    this.a.set(l,y);
+                    x += 1;
+                    y -= 1;
+                    return this.a.subList(0, this.k + 2);
+                } else {
+                    this.state =3;
+                }
+            }
+
+            if ( this.state == 3 ) {
+                this.a.set(k,x+y);
+                this.y = x + y - 1;
+                this.state = 0;
+                return a.subList(0, k + 1);
+            }
+
+            throw new IllegalStateException("Cannot get here");
+        }
+
+        public String toString() {
+            final StringBuilder buf = new StringBuilder();
+            buf.append("{ ");
+            while ( hasNext() ) {
+                buf.append("[");
+                buf.append(Utils.join(",",next()));
+                buf.append("],");
+            }
+            buf.deleteCharAt(buf.lastIndexOf(","));
+            buf.append(" }");
+            return buf.toString();
+        }
+
+    }
+
+    /**
+     * NextCounts is the enumerator over the state space of the multinomial dirichlet.
+     *
+     * It filters the partition of the total sum to only those with a number of terms
+     * equal to the number of categories.
+     *
+     * It then generates all permutations of that partition.
+     *
+     * In so doing it enumerates over the full state space.
+     */
+    class NextCounts implements Iterator<int[]> {
+
+        private PartitionGenerator partitioner;
+        private int numCategories;
+        private int[] next;
+
+        public NextCounts(int numCategories, int totalCounts) {
+            partitioner = new PartitionGenerator(totalCounts);
+            this.numCategories = numCategories;
+            next = nextFromPartitioner();
+        }
+
+        public void remove() { /* do nothing */ }
+
+        public boolean hasNext() { return next != null; }
+
+        public int[] next() {
+            int[] toReturn = clone(next);
+            next = nextPermutation();
+            if ( next == null ) {
+                next = nextFromPartitioner();
+            }
+
+            return toReturn;
+        }
+
+        private int[] clone(int[] arr) {
+            return Arrays.copyOf(arr, arr.length);
+        }
+
+        private int[] nextFromPartitioner() {
+            if ( partitioner.hasNext() ) {
+                List<Integer> nxt = partitioner.next();
+                while ( partitioner.hasNext() && nxt.size() > numCategories ) {
+                    nxt = partitioner.next();
+                }
+
+                if ( nxt.size() > numCategories ) {
+                    return null;
+                } else {
+                    int[] buf = new int[numCategories];
+                    for ( int idx = 0; idx < nxt.size(); idx++ ) {
+                        buf[idx] = nxt.get(idx);
+                    }
+                    Arrays.sort(buf);
+                    return buf;
+                }
+            }
+
+            return null;
+        }
+
+        public int[] nextPermutation() {
+            return MathUtilsUnitTest.nextPermutation(next);
+        }
+
+    }
+
+    public static int[] nextPermutation(int[] next) {
+        // the counts can swap among each other. The int[] is originally in ascending order
+        // this generates the next array in lexicographic order descending
+
+        // locate the last occurrence where next[k] < next[k+1]
+        int gt = -1;
+        for ( int idx = 0; idx < next.length-1; idx++) {
+            if ( next[idx] < next[idx+1] ) {
+                gt = idx;
+            }
+        }
+
+        if ( gt == -1 ) {
+            return null;
+        }
+
+        int largestLessThan = gt+1;
+        for ( int idx = 1 + largestLessThan; idx < next.length; idx++) {
+            if ( next[gt] < next[idx] ) {
+                largestLessThan = idx;
+            }
+        }
+
+        int val = next[gt];
+        next[gt] = next[largestLessThan];
+        next[largestLessThan] = val;
+
+        // reverse the tail of the array
+        int[] newTail = new int[next.length-gt-1];
+        int ctr = 0;
+        for ( int idx = next.length-1; idx > gt; idx-- ) {
+            newTail[ctr++] = next[idx];
+        }
+
+        for ( int idx = 0; idx < newTail.length; idx++) {
+            next[gt+idx+1] = newTail[idx];
+        }
+
+        return next;
+    }
+
+
+    // before testing the dirichlet multinomial, we need to test the
+    // classes used to test the dirichlet multinomial
+
+    @Test
+    public void testPartitioner() {
+        int[] numsToTest = new int[]{1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20};
+        int[] expectedSizes = new int[]{1, 2, 3, 5, 7, 11, 15, 22, 30, 42, 56, 77, 101, 135, 176, 231, 297, 385, 490, 627};
+        for ( int testNum = 0; testNum < numsToTest.length; testNum++ ) {
+            PartitionGenerator gen = new PartitionGenerator(numsToTest[testNum]);
+            int size = 0;
+            while ( gen.hasNext() ) {
+                logger.debug(gen.dataStr());
+                size += 1;
+                gen.next();
+            }
+            Assert.assertEquals(size,expectedSizes[testNum],
+                    String.format("Expected %d partitions, observed %s",expectedSizes[testNum],new PartitionGenerator(numsToTest[testNum]).toString()));
+        }
+    }
+
+    @Test
+    public void testNextPermutation() {
+        int[] arr = new int[]{1,2,3,4};
+        int[][] gens = new int[][] {
+                new int[]{1,2,3,4},
+                new int[]{1,2,4,3},
+                new int[]{1,3,2,4},
+                new int[]{1,3,4,2},
+                new int[]{1,4,2,3},
+                new int[]{1,4,3,2},
+                new int[]{2,1,3,4},
+                new int[]{2,1,4,3},
+                new int[]{2,3,1,4},
+                new int[]{2,3,4,1},
+                new int[]{2,4,1,3},
+                new int[]{2,4,3,1},
+                new int[]{3,1,2,4},
+                new int[]{3,1,4,2},
+                new int[]{3,2,1,4},
+                new int[]{3,2,4,1},
+                new int[]{3,4,1,2},
+                new int[]{3,4,2,1},
+                new int[]{4,1,2,3},
+                new int[]{4,1,3,2},
+                new int[]{4,2,1,3},
+                new int[]{4,2,3,1},
+                new int[]{4,3,1,2},
+                new int[]{4,3,2,1} };
+        for ( int gen = 0; gen < gens.length; gen ++ ) {
+            for ( int idx = 0; idx < 3; idx++ ) {
+                Assert.assertEquals(arr[idx],gens[gen][idx],
+                 String.format("Error at generation %d, expected %s, observed %s",gen,Arrays.toString(gens[gen]),Arrays.toString(arr)));
+            }
+            arr = nextPermutation(arr);
+        }
+    }
+
+    private double[] addEpsilon(double[] counts) {
+        double[] d = new double[counts.length];
+        for ( int i = 0; i < counts.length; i ++ ) {
+            d[i] = counts[i] + 1e-3;
+        }
+        return d;
+    }
+
+    @Test
+    public void testDirichletMultinomial() {
+        List<double[]> testAlleles = Arrays.asList(
+                new double[]{80,240},
+                new double[]{1,10000},
+                new double[]{0,500},
+                new double[]{5140,20480},
+                new double[]{5000,800,200},
+                new double[]{6,3,1000},
+                new double[]{100,400,300,800},
+                new double[]{8000,100,20,80,2},
+                new double[]{90,20000,400,20,4,1280,720,1}
+        );
+
+        Assert.assertTrue(! Double.isInfinite(MathUtils.log10Gamma(1e-3)) && ! Double.isNaN(MathUtils.log10Gamma(1e-3)));
+
+        int[] numAlleleSampled = new int[]{2,5,10,20,25};
+        for ( double[] alleles : testAlleles ) {
+            for ( int count : numAlleleSampled ) {
+                // test that everything sums to one. Generate all multinomial draws
+                List<Double> likelihoods = new ArrayList<>(100000);
+                NextCounts generator = new NextCounts(alleles.length,count);
+                double maxLog = Double.MIN_VALUE;
+                //List<String> countLog = new ArrayList<String>(200);
+                while ( generator.hasNext() ) {
+                    int[] thisCount = generator.next();
+                    //countLog.add(Arrays.toString(thisCount));
+                    Double likelihood = MathUtils.dirichletMultinomial(addEpsilon(alleles),thisCount);
+                    Assert.assertTrue(! Double.isNaN(likelihood) && ! Double.isInfinite(likelihood),
+                            String.format("Likelihood for counts %s and nAlleles %d was %s",
+                                    Arrays.toString(thisCount),alleles.length,Double.toString(likelihood)));
+                    if ( likelihood > maxLog )
+                        maxLog = likelihood;
+                    likelihoods.add(likelihood);
+                }
+                //System.out.printf("%d likelihoods and max is (probability) %e\n",likelihoods.size(),Math.pow(10,maxLog));
+                Assert.assertEquals(MathUtils.sumLog10(unwrap(likelihoods)),1.0,1e-7,
+                        String.format("Counts %d and alleles %d have nLikelihoods %d. \n Counts: %s",
+                                count,alleles.length,likelihoods.size(), "NODEBUG"/*,countLog*/));
+            }
+        }
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/MedianUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/MedianUnitTest.java
new file mode 100644
index 0000000..74dfdb9
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/MedianUnitTest.java
@@ -0,0 +1,115 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+
+// the imports for unit testing.
+
+
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+
+public class MedianUnitTest extends BaseTest {
+
+    // --------------------------------------------------------------------------------
+    //
+    // Provider
+    //
+    // --------------------------------------------------------------------------------
+
+    private class MedianTestProvider extends TestDataProvider {
+        final List<Integer> values = new ArrayList<Integer>();
+        final int cap;
+        final Integer expected;
+
+        public MedianTestProvider(int expected, int cap, Integer ... values) {
+            super(MedianTestProvider.class);
+            this.expected = expected;
+            this.cap = cap;
+            this.values.addAll(Arrays.asList(values));
+            this.name = String.format("values=%s expected=%d cap=%d", this.values, expected, cap);
+        }
+    }
+
+    @DataProvider(name = "MedianTestProvider")
+    public Object[][] makeMedianTestProvider() {
+        new MedianTestProvider(1, 1000, 0, 1, 2);
+        new MedianTestProvider(1, 1000, 1, 0, 1, 2);
+        new MedianTestProvider(1, 1000, 0, 1, 2, 3);
+        new MedianTestProvider(2, 1000, 0, 1, 2, 3, 4);
+        new MedianTestProvider(2, 1000, 4, 1, 2, 3, 0);
+        new MedianTestProvider(1, 1000, 1);
+        new MedianTestProvider(2, 1000, 2);
+        new MedianTestProvider(1, 1000, 1, 2);
+
+        new MedianTestProvider(1, 3, 1);
+        new MedianTestProvider(1, 3, 1, 2);
+        new MedianTestProvider(2, 3, 1, 2, 3);
+        new MedianTestProvider(2, 3, 1, 2, 3, 4);
+        new MedianTestProvider(2, 3, 1, 2, 3, 4, 5);
+
+        new MedianTestProvider(1, 3, 1);
+        new MedianTestProvider(1, 3, 1, 2);
+        new MedianTestProvider(2, 3, 3, 2, 1);
+        new MedianTestProvider(3, 3, 4, 3, 2, 1);
+        new MedianTestProvider(4, 3, 5, 4, 3, 2, 1);
+
+        return MedianTestProvider.getTests(MedianTestProvider.class);
+    }
+
+    @Test(dataProvider = "MedianTestProvider")
+    public void testBasicLikelihoods(MedianTestProvider cfg) {
+        final Median<Integer> median = new Median<Integer>(cfg.cap);
+
+        int nAdded = 0;
+        for ( final int value : cfg.values )
+            if ( median.add(value) )
+                nAdded++;
+
+        Assert.assertEquals(nAdded, median.size());
+
+        Assert.assertEquals(cfg.values.isEmpty(), median.isEmpty());
+        Assert.assertEquals(cfg.values.size() >= cfg.cap, median.isFull());
+        Assert.assertEquals(median.getMedian(), cfg.expected, cfg.toString());
+    }
+
+    @Test(expectedExceptions = IllegalStateException.class)
+    public void testEmptyMedian() {
+        final Median<Integer> median = new Median<Integer>();
+        Assert.assertTrue(median.isEmpty());
+        final Integer d = 100;
+        Assert.assertEquals(median.getMedian(d), d);
+        median.getMedian();
+    }
+
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/NGSPlatformUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/NGSPlatformUnitTest.java
new file mode 100644
index 0000000..998ef1d
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/NGSPlatformUnitTest.java
@@ -0,0 +1,167 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+
+// the imports for unit testing.
+
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMReadGroupRecord;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class NGSPlatformUnitTest extends BaseTest {
+    // example genome loc parser for this test, can be deleted if you don't use the reference
+    private GenomeLocParser genomeLocParser;
+
+    // example fasta index file, can be deleted if you don't use the reference
+    private IndexedFastaSequenceFile seq;
+
+    @BeforeClass
+    public void setup() throws FileNotFoundException {
+        // sequence
+        seq = new CachingIndexedFastaSequenceFile(new File(b37KGReference));
+        genomeLocParser = new GenomeLocParser(seq);
+    }
+
+    @DataProvider(name = "TestPrimary")
+    public Object[][] makeTestPrimary() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        for ( final NGSPlatform pl : NGSPlatform.values() ) {
+            tests.add(new Object[]{pl, pl.BAM_PL_NAMES[0]});
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "TestPrimary")
+    public void testPrimary(final NGSPlatform pl, final String expectedPrimaryName) {
+        Assert.assertEquals(pl.getDefaultPlatform(), expectedPrimaryName, "Failed primary test for " + pl);
+    }
+
+    // make sure common names in BAMs are found
+    @DataProvider(name = "TestMappings")
+    public Object[][] makeTestMappings() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        final Map<String, NGSPlatform> expected = new HashMap<String, NGSPlatform>();
+        // VALID VALUES ACCORDING TO SAM SPEC: https://www.google.com/url?sa=t&rct=j&q=&esrc=s&source=web&cd=1&ved=0CC8QFjAA&url=http%3A%2F%2Fsamtools.sourceforge.net%2FSAM1.pdf&ei=Dm8WUbXAEsi10QHYqoDwDQ&usg=AFQjCNFkMtvEi6LeiKgpxQGtHTlqWKw2yw&bvm=bv.42080656,d.dmQ
+        expected.put("CAPILLARY", NGSPlatform.CAPILLARY);
+        expected.put("LS454", NGSPlatform.LS454);
+        expected.put("ILLUMINA", NGSPlatform.ILLUMINA);
+        expected.put("SOLID", NGSPlatform.SOLID);
+        expected.put("HELICOS", NGSPlatform.HELICOS);
+        expected.put("IONTORRENT", NGSPlatform.ION_TORRENT);
+        expected.put("PACBIO", NGSPlatform.PACBIO);
+        // other commonly seen values out in the wild
+        expected.put("SLX", NGSPlatform.ILLUMINA);
+        expected.put("SOLEXA", NGSPlatform.ILLUMINA);
+        expected.put("454", NGSPlatform.LS454);
+        expected.put("COMPLETE", NGSPlatform.COMPLETE_GENOMICS);
+        // unknown platforms should map to unknown
+        expected.put("MARKS_GENOMICS_TECH", NGSPlatform.UNKNOWN);
+        expected.put("RANDOM_PL_VALUE", NGSPlatform.UNKNOWN);
+        // critical -- a null platform maps to unknown
+        expected.put(null, NGSPlatform.UNKNOWN);
+
+        for ( final Map.Entry<String,NGSPlatform> one : expected.entrySet() ) {
+            tests.add(new Object[]{one.getKey(), one.getValue()});
+
+            if ( one.getKey() != null ) {
+                // make sure we're case insensitive
+                tests.add(new Object[]{one.getKey().toLowerCase(), one.getValue()});
+                tests.add(new Object[]{one.getKey().toUpperCase(), one.getValue()});
+
+                // make sure appending GENOMICS works (required for COMPLETE mapping
+                tests.add(new Object[]{one.getKey() + " GENOMICS", one.getValue()});
+                // make sure that random junk works correctly
+                tests.add(new Object[]{one.getKey() + " asdfa", one.getValue()});
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "TestMappings")
+    public void testMappings(final String plField, final NGSPlatform expected) {
+        Assert.assertEquals(NGSPlatform.fromReadGroupPL(plField), expected, "Failed primary test for " + plField + " mapping to " + expected);
+    }
+
+    @Test(dataProvider = "TestMappings")
+    public void testKnown(final String plField, final NGSPlatform expected) {
+        Assert.assertEquals(NGSPlatform.isKnown(plField), expected != NGSPlatform.UNKNOWN, "Failed isKnown test for " + plField + " mapping to " + expected);
+    }
+
+    /**
+     * A unit test that creates an artificial read for testing some code that uses reads
+     */
+    @Test(dataProvider = "TestMappings")
+    public void testPLFromReadWithRG(final String plField, final NGSPlatform expected) {
+        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(seq.getSequenceDictionary());
+        final String rgID = "ID";
+        final SAMReadGroupRecord rg = new SAMReadGroupRecord(rgID);
+        if ( plField != null )
+            rg.setPlatform(plField);
+        header.addReadGroup(rg);
+        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, 1, 10);
+        read.setAttribute("RG", rgID);
+        Assert.assertEquals(NGSPlatform.fromRead(read), expected);
+    }
+
+    @Test()
+    public void testPLFromReadWithRGButNoPL() {
+        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(seq.getSequenceDictionary());
+        final String rgID = "ID";
+        final SAMReadGroupRecord rg = new SAMReadGroupRecord(rgID);
+        header.addReadGroup(rg);
+        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, 1, 10);
+        read.setAttribute("RG", rgID);
+        Assert.assertEquals(NGSPlatform.fromRead(read), NGSPlatform.UNKNOWN);
+    }
+
+    @Test
+    public void testReadWithoutRG() {
+        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(seq.getSequenceDictionary());
+        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, 1, 10);
+        Assert.assertEquals(NGSPlatform.fromRead(read), NGSPlatform.UNKNOWN);
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/PathUtilsUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/PathUtilsUnitTest.java
new file mode 100644
index 0000000..45bef58
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/PathUtilsUnitTest.java
@@ -0,0 +1,65 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.testng.Assert;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+
+import java.io.File;
+
+public class PathUtilsUnitTest extends BaseTest {
+    @BeforeClass
+    public void init() { }
+
+    /**
+     * Tests that we can successfully refresh a volume
+     */
+    @Test
+    public void testRefreshVolume() {
+        logger.warn("Executing testRefreshVolume");
+
+        Assert.assertTrue(successfullyRefreshedVolume(System.getProperty("java.io.tmpdir")));
+        Assert.assertFalse(successfullyRefreshedVolume("/a/made/up/file.txt"));
+    }
+
+    private boolean successfullyRefreshedVolume(String filename) {
+        boolean result = true;
+
+        try {
+            PathUtils.refreshVolume(new File(filename));
+        } catch (ReviewedGATKException e) {
+            result = false;
+        }
+
+        logger.warn(filename + " is accessible : " + result);
+
+        return result;
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/QualityUtilsUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/QualityUtilsUnitTest.java
new file mode 100644
index 0000000..993878b
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/QualityUtilsUnitTest.java
@@ -0,0 +1,189 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: rpoplin
+ * Date: 3/21/12
+ */
+
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Basic unit test for QualityUtils class
+ */
+public class QualityUtilsUnitTest extends BaseTest {
+    final private static double TOLERANCE = 1e-9;
+
+    @BeforeClass
+    public void init() {
+    }
+
+    @DataProvider(name = "QualTest")
+    public Object[][] makeMyDataProvider() {
+        final List<Object[]> tests = new ArrayList<>();
+
+        for ( int qual = 0; qual < 255; qual++ ) {
+            tests.add(new Object[]{(byte)(qual & 0xFF), Math.pow(10.0, ((double)qual)/-10.0)});
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    /**
+     * Example testng test using MyDataProvider
+     */
+    @Test(dataProvider = "QualTest")
+    public void testMyData(final byte qual, final double errorRate) {
+        final double trueRate = 1 - errorRate;
+
+        final double actualErrorRate = QualityUtils.qualToErrorProb(qual);
+        Assert.assertEquals(actualErrorRate, errorRate, TOLERANCE);
+        final double actualTrueRate = QualityUtils.qualToProb(qual);
+        Assert.assertEquals(actualTrueRate, trueRate, TOLERANCE);
+
+        // log10 tests
+        final double actualLog10ErrorRate = QualityUtils.qualToErrorProbLog10(qual);
+        Assert.assertEquals(actualLog10ErrorRate, Math.log10(errorRate), TOLERANCE);
+        final double actualLog10TrueRate = QualityUtils.qualToProbLog10(qual);
+        Assert.assertEquals(actualLog10TrueRate, Math.log10(trueRate), TOLERANCE);
+
+        // test that we can convert our error rates to quals, accounting for boundaries
+        final int expectedQual = Math.max(Math.min(qual & 0xFF, QualityUtils.MAX_SAM_QUAL_SCORE), 1);
+        final byte actualQual = QualityUtils.trueProbToQual(trueRate);
+        Assert.assertEquals(actualQual, expectedQual & 0xFF);
+        final byte actualQualFromErrorRate = QualityUtils.errorProbToQual(errorRate);
+        Assert.assertEquals(actualQualFromErrorRate, expectedQual & 0xFF);
+
+        for ( int maxQual = 10; maxQual < QualityUtils.MAX_SAM_QUAL_SCORE; maxQual++ ) {
+            final byte maxAsByte = (byte)(maxQual & 0xFF);
+            final byte expectedQual2 = (byte)(Math.max(Math.min(qual & 0xFF, maxQual), 1) & 0xFF);
+            final byte actualQual2 = QualityUtils.trueProbToQual(trueRate, maxAsByte);
+            Assert.assertEquals(actualQual2, expectedQual2, "Failed with max " + maxQual);
+            final byte actualQualFromErrorRate2 = QualityUtils.errorProbToQual(errorRate, maxAsByte);
+            Assert.assertEquals(actualQualFromErrorRate2, expectedQual2, "Failed with max " + maxQual);
+
+            // test the integer routines
+            final byte actualQualInt2 = QualityUtils.trueProbToQual(trueRate, maxQual);
+            Assert.assertEquals(actualQualInt2, expectedQual2, "Failed with max " + maxQual);
+            final byte actualQualFromErrorRateInt2 = QualityUtils.errorProbToQual(errorRate, maxQual);
+            Assert.assertEquals(actualQualFromErrorRateInt2, expectedQual2, "Failed with max " + maxQual);
+        }
+    }
+
+    @Test
+    public void testTrueProbWithMinDouble() {
+        final byte actual = QualityUtils.trueProbToQual(Double.MIN_VALUE);
+        Assert.assertEquals(actual, 1, "Failed to convert true prob of min double to 1 qual");
+    }
+
+    @Test
+    public void testTrueProbWithVerySmallValue() {
+        final byte actual = QualityUtils.trueProbToQual(1.7857786272673852E-19);
+        Assert.assertEquals(actual, 1, "Failed to convert true prob of very small value 1.7857786272673852E-19 to 1 qual");
+    }
+
+    @Test
+    public void testQualCaches() {
+        Assert.assertEquals(QualityUtils.qualToErrorProb((byte) 20), 0.01, 1e-6);
+        Assert.assertEquals(QualityUtils.qualToErrorProbLog10((byte) 20), -2.0, 1e-6);
+        Assert.assertEquals(QualityUtils.qualToProb((byte) 20), 0.99, 1e-6);
+        Assert.assertEquals(QualityUtils.qualToProbLog10((byte) 20), -0.0043648054, 1e-6);
+
+        Assert.assertEquals(QualityUtils.qualToErrorProb((byte) 30), 0.001, 1e-6);
+        Assert.assertEquals(QualityUtils.qualToErrorProbLog10((byte) 30), -3.0, 1e-6);
+        Assert.assertEquals(QualityUtils.qualToProb((byte) 30), 0.999, 1e-6);
+        Assert.assertEquals(QualityUtils.qualToProbLog10((byte) 30), -0.000434511774, 1e-6);
+
+        Assert.assertEquals(QualityUtils.qualToErrorProb((byte) 40), 0.0001, 1e-6);
+        Assert.assertEquals(QualityUtils.qualToErrorProbLog10((byte) 40), -4.0, 1e-6);
+        Assert.assertEquals(QualityUtils.qualToProb((byte) 40), 0.9999, 1e-6);
+        Assert.assertEquals(QualityUtils.qualToProbLog10((byte) 40), -4.34316198e-5, 1e-6);
+    }
+
+    @Test()
+    public void testBoundingDefault() {
+        for ( int qual = 0; qual < 1000; qual++ ) {
+            final byte expected = (byte)Math.max(Math.min(qual, QualityUtils.MAX_SAM_QUAL_SCORE), 1);
+            Assert.assertEquals(QualityUtils.boundQual(qual), expected);
+        }
+    }
+
+    @Test()
+    public void testBoundingWithMax() {
+        for ( int max = 10; max < 255; max += 50 ) {
+            for ( int qual = 0; qual < 1000; qual++ ) {
+                final int expected = Math.max(Math.min(qual, max), 1);
+                Assert.assertEquals(QualityUtils.boundQual(qual, (byte)(max & 0xFF)) & 0xFF, expected & 0xFF, "qual " + qual + " max " + max);
+            }
+        }
+    }
+
+    @DataProvider(name = "PhredScaleDoubleOps")
+    public Object[][] makePhredDoubleTest() {
+        final List<Object[]> tests = new ArrayList<>();
+
+        tests.add(new Object[]{0.0, -10 * Math.log10(Double.MIN_VALUE)});
+        tests.add(new Object[]{1.0, 0.0});
+        for ( int pow = 1; pow < 20; pow++ ) {
+            tests.add(new Object[]{Math.pow(10.0, -1.0 * pow), pow * 10});
+            tests.add(new Object[]{Math.pow(10.0, -1.5 * pow), pow * 15});
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test()
+    public void testQualToErrorProbDouble() {
+        for ( double qual = 3.0; qual < 255.0; qual += 0.1 ) {
+            final double expected = Math.pow(10.0, qual / -10.0);
+            Assert.assertEquals(QualityUtils.qualToErrorProb(qual), expected, TOLERANCE, "failed qual->error prob for double qual " + qual);
+        }
+    }
+
+
+    @Test(dataProvider = "PhredScaleDoubleOps")
+    public void testPhredScaleDoubleOps(final double errorRate, final double expectedPhredScaled) {
+        final double actualError = QualityUtils.phredScaleErrorRate(errorRate);
+        Assert.assertEquals(actualError, expectedPhredScaled, TOLERANCE);
+        final double trueRate = 1 - errorRate;
+        final double actualTrue = QualityUtils.phredScaleCorrectRate(trueRate);
+        if ( trueRate == 1.0 ) {
+            Assert.assertEquals(actualTrue, QualityUtils.MIN_PHRED_SCALED_QUAL);
+        } else {
+            final double tol = errorRate < 1e-10 ? 10.0 : 1e-3;
+            Assert.assertEquals(actualTrue, expectedPhredScaled, tol);
+        }
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/R/RScriptExecutorUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/R/RScriptExecutorUnitTest.java
new file mode 100644
index 0000000..98f9736
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/R/RScriptExecutorUnitTest.java
@@ -0,0 +1,110 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.R;
+
+import org.apache.commons.io.FileUtils;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.io.IOUtils;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.File;
+
+/**
+ * Basic unit test for RScriptExecutor in reduced reads
+ */
+public class RScriptExecutorUnitTest extends BaseTest {
+
+    private static final String HELLO_WORLD_SCRIPT = "print('hello, world')";
+    private static final String GSALIB_LOADED_SCRIPT = "if (!'package:gsalib' %in% search()) stop('gsalib not loaded')";
+
+    @Test
+    public void testRscriptExists() {
+        Assert.assertTrue(RScriptExecutor.RSCRIPT_EXISTS, "Rscript not found in environment ${PATH}");
+    }
+
+    @Test(dependsOnMethods = "testRscriptExists")
+    public void testExistingScript() {
+        File script = writeScript(HELLO_WORLD_SCRIPT);
+        try {
+            RScriptExecutor executor = new RScriptExecutor();
+            executor.addScript(script);
+            executor.setExceptOnError(true);
+            Assert.assertTrue(executor.exec(), "Exec failed");
+        } finally {
+            FileUtils.deleteQuietly(script);
+        }
+    }
+
+    @Test(dependsOnMethods = "testRscriptExists", expectedExceptions = RScriptExecutorException.class)
+    public void testNonExistantScriptException() {
+        RScriptExecutor executor = new RScriptExecutor();
+        executor.setExceptOnError(true);
+        executor.addScript(new File("does_not_exists.R"));
+        executor.exec();
+    }
+
+    @Test(dependsOnMethods = "testRscriptExists")
+    public void testNonExistantScriptNoException() {
+        logger.warn("Testing that warning is printed an no exception thrown for missing script.");
+        RScriptExecutor executor = new RScriptExecutor();
+        executor.setExceptOnError(false);
+        executor.addScript(new File("does_not_exists.R"));
+        Assert.assertFalse(executor.exec(), "Exec should have returned false when the job failed");
+    }
+
+    @Test(dependsOnMethods = "testRscriptExists")
+    public void testLibrary() {
+        File script = writeScript(GSALIB_LOADED_SCRIPT);
+        try {
+            RScriptExecutor executor = new RScriptExecutor();
+            executor.addScript(script);
+            executor.addLibrary(RScriptLibrary.GSALIB);
+            executor.setExceptOnError(true);
+            Assert.assertTrue(executor.exec(), "Exec failed");
+        } finally {
+            FileUtils.deleteQuietly(script);
+        }
+    }
+
+    @Test(dependsOnMethods = "testRscriptExists", expectedExceptions = RScriptExecutorException.class)
+    public void testLibraryMissing() {
+        File script = writeScript(GSALIB_LOADED_SCRIPT);
+        try {
+            RScriptExecutor executor = new RScriptExecutor();
+            executor.addScript(script);
+            // GSALIB is not added nor imported in the script
+            executor.setExceptOnError(true);
+            executor.exec();
+        } finally {
+            FileUtils.deleteQuietly(script);
+        }
+    }
+
+    private File writeScript(String content) {
+        return IOUtils.writeTempFile(content, "myTestScript", ".R");
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/R/RScriptLibraryUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/R/RScriptLibraryUnitTest.java
new file mode 100644
index 0000000..ac3118e
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/R/RScriptLibraryUnitTest.java
@@ -0,0 +1,47 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.R;
+
+import org.apache.commons.io.FileUtils;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.File;
+
+public class RScriptLibraryUnitTest {
+    @Test
+    public void testProperties() {
+        Assert.assertEquals(RScriptLibrary.GSALIB.getLibraryName(), "gsalib");
+        Assert.assertEquals(RScriptLibrary.GSALIB.getResourcePath(), "gsalib.tar.gz");
+    }
+
+    @Test
+    public void testWriteTemp() {
+        File file = RScriptLibrary.GSALIB.writeTemp();
+        Assert.assertTrue(file.exists(), "R library was not written to temp file: " + file);
+        FileUtils.deleteQuietly(file);
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/R/RUtilsUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/R/RUtilsUnitTest.java
new file mode 100644
index 0000000..8e96d62
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/R/RUtilsUnitTest.java
@@ -0,0 +1,65 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.R;
+
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+public class RUtilsUnitTest {
+    @DataProvider(name = "stringLists")
+    public Object[][] getStringLists() {
+        return new Object[][] {
+                new Object[] { null, "NA" },
+                new Object[] { Collections.EMPTY_LIST, "c()" },
+                new Object[] { Arrays.asList("1", "2", "3"), "c('1','2','3')" }
+        };
+    }
+
+    @Test(dataProvider = "stringLists")
+    public void testToStringList(List<? extends CharSequence> actual, String expected) {
+        Assert.assertEquals(RUtils.toStringList(actual), expected);
+    }
+
+    @DataProvider(name = "numberLists")
+    public Object[][] getNumberLists() {
+        return new Object[][] {
+                new Object[] { null, "NA" },
+                new Object[] { Collections.EMPTY_LIST, "c()" },
+                new Object[] { Arrays.asList(1, 2, 3), "c(1,2,3)" },
+                new Object[] { Arrays.asList(1D, 2D, 3D), "c(1.0,2.0,3.0)" }
+        };
+    }
+
+    @Test(dataProvider = "numberLists")
+    public void testToNumberList(List<? extends Number> actual, String expected) {
+        Assert.assertEquals(RUtils.toNumberList(actual), expected);
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/SequenceDictionaryUtilsUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/SequenceDictionaryUtilsUnitTest.java
new file mode 100644
index 0000000..bbbcb94
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/SequenceDictionaryUtilsUnitTest.java
@@ -0,0 +1,239 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.SAMSequenceRecord;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+import org.testng.Assert;
+
+import static org.broadinstitute.gatk.utils.SequenceDictionaryUtils.*;
+import static org.broadinstitute.gatk.utils.SequenceDictionaryUtils.SequenceDictionaryCompatibility.*;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+public class SequenceDictionaryUtilsUnitTest extends BaseTest {
+
+    private static Logger logger = Logger.getLogger(SequenceDictionaryUtilsUnitTest.class);
+
+
+    @DataProvider( name = "SequenceDictionaryDataProvider" )
+    public Object[][] generateSequenceDictionaryTestData() {
+        final SAMSequenceRecord CHRM_HG19 = new SAMSequenceRecord("chrM", 16571);
+        final SAMSequenceRecord CHR_NONSTANDARD1 = new SAMSequenceRecord("NonStandard1", 8675309);
+        final SAMSequenceRecord CHR_NONSTANDARD2 = new SAMSequenceRecord("NonStandard2", 8675308);
+
+        final Class NO_COMMON_CONTIGS_EXCEPTION = UserException.IncompatibleSequenceDictionaries.class;
+        final Class UNEQUAL_COMMON_CONTIGS_EXCEPTION = UserException.IncompatibleSequenceDictionaries.class;
+        final Class NON_CANONICAL_HUMAN_ORDER_EXCEPTION = UserException.LexicographicallySortedSequenceDictionary.class;
+        final Class OUT_OF_ORDER_EXCEPTION = UserException.IncompatibleSequenceDictionaries.class;
+        final Class DIFFERENT_INDICES_EXCEPTION = UserException.IncompatibleSequenceDictionaries.class;
+
+        final List<SAMSequenceRecord> hg19Sequences = Arrays.asList(CHRM_HG19, CHR1_HG19, CHR2_HG19, CHR10_HG19);
+        final GenomeLocParser hg19GenomeLocParser = new GenomeLocParser(new SAMSequenceDictionary(hg19Sequences));
+        final List<GenomeLoc> hg19AllContigsIntervals = Arrays.asList(hg19GenomeLocParser.createGenomeLoc("chrM", 0, 1),
+                                                                      hg19GenomeLocParser.createGenomeLoc("chr1", 0, 1),
+                                                                      hg19GenomeLocParser.createGenomeLoc("chr2", 0, 1),
+                                                                      hg19GenomeLocParser.createGenomeLoc("chr10", 0, 1));
+        final List<GenomeLoc> hg19PartialContigsIntervals = Arrays.asList(hg19GenomeLocParser.createGenomeLoc("chrM", 0, 1),
+                                                                          hg19GenomeLocParser.createGenomeLoc("chr1", 0, 1));
+        final GenomeLocSortedSet hg19AllContigsIntervalSet = new GenomeLocSortedSet(hg19GenomeLocParser, hg19AllContigsIntervals);
+        final GenomeLocSortedSet hg19PartialContigsIntervalSet = new GenomeLocSortedSet(hg19GenomeLocParser, hg19PartialContigsIntervals);
+
+        return new Object[][]  {
+            // Identical dictionaries:
+            { Arrays.asList(CHR1_HG19),                        Arrays.asList(CHR1_HG19),                        null, IDENTICAL, null, false, null },
+            { Arrays.asList(CHR1_HG19, CHR2_HG19, CHR10_HG19), Arrays.asList(CHR1_HG19, CHR2_HG19, CHR10_HG19), null, IDENTICAL, null, false, null },
+            { Arrays.asList(CHR1_B37),                         Arrays.asList(CHR1_B37),                         null, IDENTICAL, null, false, null },
+            { Arrays.asList(CHR1_B37, CHR2_B37, CHR10_B37),    Arrays.asList(CHR1_B37, CHR2_B37, CHR10_B37),    null, IDENTICAL, null, false, null },
+
+            // Dictionaries with a common subset:
+            { Arrays.asList(CHR1_HG19),                                          Arrays.asList(CHR1_HG19, CHR_NONSTANDARD1),                                   null, COMMON_SUBSET, null, false, null },
+            { Arrays.asList(CHR1_HG19, CHR_NONSTANDARD1),                        Arrays.asList(CHR1_HG19, CHR_NONSTANDARD2),                                   null, COMMON_SUBSET, null, false, null },
+            { Arrays.asList(CHR_NONSTANDARD1, CHR1_HG19),                        Arrays.asList(CHR_NONSTANDARD2, CHR1_HG19),                                   null, COMMON_SUBSET, null, false, null },
+            { Arrays.asList(CHR_NONSTANDARD1, CHR1_HG19),                        Arrays.asList(CHR_NONSTANDARD2, CHR1_HG19, CHRM_HG19),                        null, COMMON_SUBSET, null, false, null },
+            { Arrays.asList(CHR1_HG19, CHR2_HG19, CHR10_HG19, CHR_NONSTANDARD1), Arrays.asList(CHR1_HG19, CHR2_HG19, CHR10_HG19, CHR_NONSTANDARD2),            null, COMMON_SUBSET, null, false, null },
+            { Arrays.asList(CHR1_HG19, CHR2_HG19, CHR10_HG19, CHR_NONSTANDARD1), Arrays.asList(CHR1_HG19, CHR2_HG19, CHR10_HG19),                              null, COMMON_SUBSET, null, false, null },
+            { Arrays.asList(CHR1_HG19, CHR2_HG19, CHR10_HG19),                   Arrays.asList(CHR1_HG19, CHR2_HG19, CHR10_HG19, CHR_NONSTANDARD1),            null, COMMON_SUBSET, null, false, null },
+            { Arrays.asList(CHR_NONSTANDARD1, CHR1_HG19, CHR2_HG19, CHR10_HG19), Arrays.asList(CHR_NONSTANDARD2, CHR1_HG19, CHR2_HG19, CHR10_HG19),            null, COMMON_SUBSET, null, false, null },
+            { Arrays.asList(CHR_NONSTANDARD1, CHR1_HG19, CHR2_HG19, CHR10_HG19), Arrays.asList(CHR_NONSTANDARD2, CHR1_HG19, CHR2_HG19, CHR10_HG19, CHRM_HG19), null, COMMON_SUBSET, null, false, null },
+            { Arrays.asList(CHR1_B37, CHR2_B37, CHR10_B37, CHR_NONSTANDARD1),    Arrays.asList(CHR1_B37, CHR2_B37, CHR10_B37, CHR_NONSTANDARD2),               null, COMMON_SUBSET, null, false, null },
+            { Arrays.asList(CHR1_HG19, CHR2_HG19),                               Arrays.asList(CHR1_HG19, CHR2_HG19, CHR10_HG19),                              null, COMMON_SUBSET, null, false, null },
+
+            // Dictionaries with no common contigs:
+            { Arrays.asList(CHR1_HG19),                        Arrays.asList(CHR2_HG19),                     null, NO_COMMON_CONTIGS, NO_COMMON_CONTIGS_EXCEPTION, false, null },
+            { Arrays.asList(CHR1_HG19),                        Arrays.asList(CHR1_B37),                      null, NO_COMMON_CONTIGS, NO_COMMON_CONTIGS_EXCEPTION, false, null },
+            { Arrays.asList(CHR1_HG19, CHR2_HG19, CHR10_HG19), Arrays.asList(CHR1_B37, CHR2_B37, CHR10_B37), null, NO_COMMON_CONTIGS, NO_COMMON_CONTIGS_EXCEPTION, false, null },
+            { Arrays.asList(CHR1_HG19, CHR2_HG19),             Arrays.asList(CHR1_B37, CHR2_B37, CHR10_B37), null, NO_COMMON_CONTIGS, NO_COMMON_CONTIGS_EXCEPTION, false, null },
+
+            // Dictionaries with unequal common contigs:
+            { Arrays.asList(CHR1_HG19),                                          Arrays.asList(CHR1_HG18),                                          null, UNEQUAL_COMMON_CONTIGS, UNEQUAL_COMMON_CONTIGS_EXCEPTION, false, null },
+            { Arrays.asList(CHR1_B36),                                           Arrays.asList(CHR1_B37),                                           null, UNEQUAL_COMMON_CONTIGS, UNEQUAL_COMMON_CONTIGS_EXCEPTION, false, null },
+            { Arrays.asList(CHR1_HG19, CHR2_HG19, CHR10_HG19),                   Arrays.asList(CHR1_HG18, CHR2_HG18, CHR10_HG18),                   null, UNEQUAL_COMMON_CONTIGS, UNEQUAL_COMMON_CONTIGS_EXCEPTION, false, null },
+            { Arrays.asList(CHR1_B37, CHR2_B37, CHR10_B37),                      Arrays.asList(CHR1_B36, CHR2_B36, CHR10_B36),                      null, UNEQUAL_COMMON_CONTIGS, UNEQUAL_COMMON_CONTIGS_EXCEPTION, false, null },
+            { Arrays.asList(CHR1_HG19, CHR2_HG19, CHR10_HG19, CHR_NONSTANDARD1), Arrays.asList(CHR1_HG18, CHR2_HG18, CHR10_HG18, CHR_NONSTANDARD2), null, UNEQUAL_COMMON_CONTIGS, UNEQUAL_COMMON_CONTIGS_EXCEPTION, false, null },
+            { Arrays.asList(CHR_NONSTANDARD1, CHR1_HG19, CHR2_HG19, CHR10_HG19), Arrays.asList(CHR_NONSTANDARD2, CHR1_HG18, CHR2_HG18, CHR10_HG18), null, UNEQUAL_COMMON_CONTIGS, UNEQUAL_COMMON_CONTIGS_EXCEPTION, false, null },
+            { Arrays.asList(CHR1_HG19, CHR2_HG19),                               Arrays.asList(CHR1_HG18, CHR2_HG18, CHR10_HG18),                   null, UNEQUAL_COMMON_CONTIGS, UNEQUAL_COMMON_CONTIGS_EXCEPTION, false, null },
+
+            // One or both dictionaries in non-canonical human order:
+            { Arrays.asList(CHR1_HG19, CHR10_HG19, CHR2_HG19), Arrays.asList(CHR1_HG19, CHR10_HG19, CHR2_HG19), null, NON_CANONICAL_HUMAN_ORDER, NON_CANONICAL_HUMAN_ORDER_EXCEPTION, false, null },
+            { Arrays.asList(CHR1_HG18, CHR10_HG18, CHR2_HG18), Arrays.asList(CHR1_HG18, CHR10_HG18, CHR2_HG18), null, NON_CANONICAL_HUMAN_ORDER, NON_CANONICAL_HUMAN_ORDER_EXCEPTION, false, null },
+            { Arrays.asList(CHR1_HG19, CHR10_HG19, CHR2_HG19), Arrays.asList(CHR1_HG19, CHR2_HG19, CHR10_HG19), null, NON_CANONICAL_HUMAN_ORDER, NON_CANONICAL_HUMAN_ORDER_EXCEPTION, false, null },
+            { Arrays.asList(CHR1_HG19, CHR2_HG19, CHR10_HG19), Arrays.asList(CHR1_HG19, CHR10_HG19, CHR2_HG19), null, NON_CANONICAL_HUMAN_ORDER, NON_CANONICAL_HUMAN_ORDER_EXCEPTION, false, null },
+            { Arrays.asList(CHR1_B37, CHR10_B37, CHR2_B37),    Arrays.asList(CHR1_B37, CHR10_B37, CHR2_B37),    null, NON_CANONICAL_HUMAN_ORDER, NON_CANONICAL_HUMAN_ORDER_EXCEPTION, false, null },
+            { Arrays.asList(CHR1_B36, CHR10_B36, CHR2_B36),    Arrays.asList(CHR1_B36, CHR10_B36, CHR2_B36),    null, NON_CANONICAL_HUMAN_ORDER, NON_CANONICAL_HUMAN_ORDER_EXCEPTION, false, null },
+
+            // Dictionaries with a common subset, but different relative ordering within that subset:
+            { Arrays.asList(CHR1_HG19, CHR2_HG19),            Arrays.asList(CHR2_HG19, CHR1_HG19),                              null, OUT_OF_ORDER, OUT_OF_ORDER_EXCEPTION, false, null },
+            { Arrays.asList(CHRM_HG19, CHR1_HG19, CHR2_HG19), Arrays.asList(CHR2_HG19, CHR1_HG19, CHRM_HG19),                   null, OUT_OF_ORDER, OUT_OF_ORDER_EXCEPTION, false, null },
+            { Arrays.asList(CHRM_HG19, CHR1_HG19, CHR2_HG19), Arrays.asList(CHRM_HG19, CHR2_HG19, CHR1_HG19),                   null, OUT_OF_ORDER, OUT_OF_ORDER_EXCEPTION, false, null },
+            { Arrays.asList(CHRM_HG19, CHR1_HG19, CHR2_HG19), Arrays.asList(CHR2_HG19, CHRM_HG19, CHR1_HG19),                   null, OUT_OF_ORDER, OUT_OF_ORDER_EXCEPTION, false, null },
+            { Arrays.asList(CHR1_B37, CHR2_B37),              Arrays.asList(CHR2_B37, CHR1_B37),                                null, OUT_OF_ORDER, OUT_OF_ORDER_EXCEPTION, false, null },
+
+
+            // Dictionaries with a common subset in the same relative order, but with different indices.
+            // This will only throw an exception during validation if isReadsToReferenceComparison is true,
+            // and there are intervals overlapping the misindexed contigs:
+
+            // These have isReadsToReferenceComparison == true and overlapping intervals, so we expect an exception:
+            { Arrays.asList(CHRM_HG19, CHR1_HG19),                                                 Arrays.asList(CHR1_HG19),                                          null, DIFFERENT_INDICES, DIFFERENT_INDICES_EXCEPTION, true, hg19AllContigsIntervalSet },
+            { Arrays.asList(CHR1_HG19, CHR2_HG19),                                                 Arrays.asList(CHRM_HG19, CHR1_HG19, CHR2_HG19),                    null, DIFFERENT_INDICES, DIFFERENT_INDICES_EXCEPTION, true, hg19AllContigsIntervalSet },
+            { Arrays.asList(CHR1_HG19, CHR2_HG19),                                                 Arrays.asList(CHRM_HG19, CHR1_HG19, CHR2_HG19, CHR_NONSTANDARD1),  null, DIFFERENT_INDICES, DIFFERENT_INDICES_EXCEPTION, true, hg19AllContigsIntervalSet },
+            { Arrays.asList(CHR1_HG19, CHR2_HG19),                                                 Arrays.asList(CHRM_HG19, CHR_NONSTANDARD1, CHR1_HG19, CHR2_HG19),  null, DIFFERENT_INDICES, DIFFERENT_INDICES_EXCEPTION, true, hg19AllContigsIntervalSet },
+            { Arrays.asList(CHR1_HG19, CHR2_HG19, CHR_NONSTANDARD1, CHRM_HG19 ),                   Arrays.asList(CHR1_HG19, CHR2_HG19, CHRM_HG19),                    null, DIFFERENT_INDICES, DIFFERENT_INDICES_EXCEPTION, true, hg19AllContigsIntervalSet },
+            { Arrays.asList(CHR1_HG19, CHR2_HG19, CHR_NONSTANDARD1, CHRM_HG19, CHR_NONSTANDARD2 ), Arrays.asList(CHR1_HG19, CHR2_HG19, CHRM_HG19, CHR_NONSTANDARD2 ), null, DIFFERENT_INDICES, DIFFERENT_INDICES_EXCEPTION, true, hg19AllContigsIntervalSet },
+            { Arrays.asList(CHR1_HG19, CHR_NONSTANDARD1, CHR2_HG19, CHRM_HG19, CHR_NONSTANDARD2 ), Arrays.asList(CHR1_HG19, CHR2_HG19, CHRM_HG19, CHR_NONSTANDARD2 ), null, DIFFERENT_INDICES, DIFFERENT_INDICES_EXCEPTION, true, hg19AllContigsIntervalSet },
+
+            // These have isReadsToReferenceComparison == true but no overlapping intervals, so we don't expect an exception:
+            { Arrays.asList(CHR2_HG19, CHR10_HG19),                              Arrays.asList(CHR10_HG19),                       null, DIFFERENT_INDICES, null, true, hg19PartialContigsIntervalSet },
+            { Arrays.asList(CHR1_HG19, CHR_NONSTANDARD1, CHR2_HG19),             Arrays.asList(CHR1_HG19, CHR2_HG19),             null, DIFFERENT_INDICES, null, true, hg19PartialContigsIntervalSet },
+            { Arrays.asList(CHR1_HG19, CHR_NONSTANDARD1, CHR2_HG19, CHR10_HG19), Arrays.asList(CHR1_HG19, CHR2_HG19, CHR10_HG19), null, DIFFERENT_INDICES, null, true, hg19PartialContigsIntervalSet },
+
+            // These have isReadsToReferenceComparison == false, so we don't expect an exception:
+            { Arrays.asList(CHRM_HG19, CHR1_HG19),                              Arrays.asList(CHR1_HG19),                       null, DIFFERENT_INDICES, null, false, hg19AllContigsIntervalSet },
+            { Arrays.asList(CHR1_HG19, CHR_NONSTANDARD1, CHR2_HG19, CHRM_HG19), Arrays.asList(CHR1_HG19, CHR2_HG19, CHRM_HG19), null, DIFFERENT_INDICES, null, false, hg19AllContigsIntervalSet },
+
+
+            // Tests for validation exclusions. Note that errors resulting from NO_COMMON_CONTIGs cannot be suppressed
+            { Arrays.asList(CHR1_HG19),                        Arrays.asList(CHR2_HG19),                        ValidationExclusion.TYPE.ALLOW_SEQ_DICT_INCOMPATIBILITY, NO_COMMON_CONTIGS,         NO_COMMON_CONTIGS_EXCEPTION, false, null },
+            { Arrays.asList(CHR1_HG19),                        Arrays.asList(CHR2_HG19),                        ValidationExclusion.TYPE.ALL,                            NO_COMMON_CONTIGS,         NO_COMMON_CONTIGS_EXCEPTION, false, null },
+            { Arrays.asList(CHR1_HG19),                        Arrays.asList(CHR1_HG18),                        ValidationExclusion.TYPE.ALLOW_SEQ_DICT_INCOMPATIBILITY, UNEQUAL_COMMON_CONTIGS,    null, false, null },
+            { Arrays.asList(CHR1_HG19),                        Arrays.asList(CHR1_HG18),                        ValidationExclusion.TYPE.ALL,                            UNEQUAL_COMMON_CONTIGS,    null, false, null },
+            { Arrays.asList(CHR1_HG19, CHR10_HG19, CHR2_HG19), Arrays.asList(CHR1_HG19, CHR10_HG19, CHR2_HG19), ValidationExclusion.TYPE.ALLOW_SEQ_DICT_INCOMPATIBILITY, NON_CANONICAL_HUMAN_ORDER, null, false, null },
+            { Arrays.asList(CHR1_HG19, CHR10_HG19, CHR2_HG19), Arrays.asList(CHR1_HG19, CHR10_HG19, CHR2_HG19), ValidationExclusion.TYPE.ALL,                            NON_CANONICAL_HUMAN_ORDER, null, false, null },
+            { Arrays.asList(CHR1_HG19, CHR2_HG19),             Arrays.asList(CHR2_HG19, CHR1_HG19),             ValidationExclusion.TYPE.ALLOW_SEQ_DICT_INCOMPATIBILITY, OUT_OF_ORDER,              null, false, null },
+            { Arrays.asList(CHR1_HG19, CHR2_HG19),             Arrays.asList(CHR2_HG19, CHR1_HG19),             ValidationExclusion.TYPE.ALL,                            OUT_OF_ORDER,              null, false, null },
+            { Arrays.asList(CHRM_HG19, CHR1_HG19),             Arrays.asList(CHR1_HG19),                        ValidationExclusion.TYPE.ALLOW_SEQ_DICT_INCOMPATIBILITY, DIFFERENT_INDICES,         null, true, hg19AllContigsIntervalSet },
+            { Arrays.asList(CHRM_HG19, CHR1_HG19),             Arrays.asList(CHR1_HG19),                        ValidationExclusion.TYPE.ALL,                            DIFFERENT_INDICES,         null, true, hg19AllContigsIntervalSet }
+        };
+    }
+
+    @Test( dataProvider = "SequenceDictionaryDataProvider" )
+    public void testSequenceDictionaryValidation( final List<SAMSequenceRecord> firstDictionaryContigs,
+                                                  final List<SAMSequenceRecord> secondDictionaryContigs,
+                                                  final ValidationExclusion.TYPE validationExclusions,
+                                                  final SequenceDictionaryUtils.SequenceDictionaryCompatibility dictionaryCompatibility,
+                                                  final Class expectedExceptionUponValidation,
+                                                  final boolean isReadsToReferenceComparison,
+                                                  final GenomeLocSortedSet intervals ) {
+
+        final SAMSequenceDictionary firstDictionary = createSequenceDictionary(firstDictionaryContigs);
+        final SAMSequenceDictionary secondDictionary = createSequenceDictionary(secondDictionaryContigs);
+        final String testDescription = String.format("First dictionary: %s  Second dictionary: %s  Validation exclusions: %s",
+                                                     SequenceDictionaryUtils.getDictionaryAsString(firstDictionary),
+                                                     SequenceDictionaryUtils.getDictionaryAsString(secondDictionary),
+                                                     validationExclusions);
+
+        Exception exceptionThrown = null;
+        try {
+            SequenceDictionaryUtils.validateDictionaries(logger,
+                                                         validationExclusions,
+                                                         "firstDictionary",
+                                                         firstDictionary,
+                                                         "secondDictionary",
+                                                         secondDictionary,
+                                                         isReadsToReferenceComparison,
+                                                         intervals);
+        }
+        catch ( Exception e ) {
+            exceptionThrown = e;
+        }
+
+        if ( expectedExceptionUponValidation != null ) {
+            Assert.assertTrue(exceptionThrown != null && expectedExceptionUponValidation.isInstance(exceptionThrown),
+                              String.format("Expected exception %s but saw %s instead. %s",
+                                            expectedExceptionUponValidation.getSimpleName(),
+                                            exceptionThrown == null ? "no exception" : exceptionThrown.getClass().getSimpleName(),
+                                            testDescription));
+        }
+        else {
+            Assert.assertTrue(exceptionThrown == null,
+                              String.format("Expected no exception but saw exception %s instead. %s",
+                                            exceptionThrown != null ? exceptionThrown.getClass().getSimpleName() : "none",
+                                            testDescription));
+        }
+    }
+
+    @Test( dataProvider = "SequenceDictionaryDataProvider" )
+    public void testSequenceDictionaryComparison( final List<SAMSequenceRecord> firstDictionaryContigs,
+                                                  final List<SAMSequenceRecord> secondDictionaryContigs,
+                                                  final ValidationExclusion.TYPE validationExclusions,
+                                                  final SequenceDictionaryUtils.SequenceDictionaryCompatibility dictionaryCompatibility,
+                                                  final Class expectedExceptionUponValidation,
+                                                  final boolean isReadsToReferenceComparison,
+                                                  final GenomeLocSortedSet intervals ) {
+
+        final SAMSequenceDictionary firstDictionary = createSequenceDictionary(firstDictionaryContigs);
+        final SAMSequenceDictionary secondDictionary = createSequenceDictionary(secondDictionaryContigs);
+        final String testDescription = String.format("First dictionary: %s  Second dictionary: %s",
+                                                     SequenceDictionaryUtils.getDictionaryAsString(firstDictionary),
+                                                     SequenceDictionaryUtils.getDictionaryAsString(secondDictionary));
+
+        final SequenceDictionaryUtils.SequenceDictionaryCompatibility reportedCompatibility =
+              SequenceDictionaryUtils.compareDictionaries(firstDictionary, secondDictionary);
+
+        Assert.assertTrue(reportedCompatibility == dictionaryCompatibility,
+                          String.format("Dictionary comparison should have returned %s but instead returned %s. %s",
+                                        dictionaryCompatibility, reportedCompatibility, testDescription));
+    }
+
+    private SAMSequenceDictionary createSequenceDictionary( final List<SAMSequenceRecord> contigs ) {
+        final List<SAMSequenceRecord> clonedContigs = new ArrayList<SAMSequenceRecord>(contigs.size());
+
+        // Clone the individual SAMSequenceRecords to avoid contig-index issues with shared objects
+        // across multiple dictionaries in tests
+        for ( SAMSequenceRecord contig : contigs ) {
+            clonedContigs.add(contig.clone());
+        }
+
+        return new SAMSequenceDictionary(clonedContigs);
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/SimpleTimerUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/SimpleTimerUnitTest.java
new file mode 100644
index 0000000..05203ad
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/SimpleTimerUnitTest.java
@@ -0,0 +1,179 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.lang.reflect.Field;
+
+import java.util.Arrays;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+public class SimpleTimerUnitTest extends BaseTest {
+    private final static String NAME = "unit.test.timer";
+
+    @Test
+    public void testSimpleTimer() {
+        SimpleTimer t = new SimpleTimer(NAME);
+        Assert.assertEquals(t.getName(), NAME, "Name is not the provided one");
+        Assert.assertFalse(t.isRunning(), "Initial state of the timer is running");
+        Assert.assertEquals(t.getElapsedTime(), 0.0, "New timer elapsed time should be 0");
+        Assert.assertEquals(t.getElapsedTimeNano(), 0l, "New timer elapsed time nano should be 0");
+
+        t.start();
+        Assert.assertTrue(t.isRunning(), "Started timer isn't running");
+        Assert.assertTrue(t.getElapsedTime() >= 0.0, "Elapsed time should be >= 0");
+        Assert.assertTrue(t.getElapsedTimeNano() >= 0.0, "Elapsed time nano should be >= 0");
+        long n1 = t.getElapsedTimeNano();
+        double t1 = t.getElapsedTime();
+        idleLoop(); // idle loop to wait a tiny bit of time
+        long n2 = t.getElapsedTimeNano();
+        double t2 = t.getElapsedTime();
+        Assert.assertTrue(t2 >= t1, "T2 >= T1 for a running time");
+        Assert.assertTrue(n2 >= n1, "T2 >= T1 nano for a running time");
+
+        t.stop();
+        Assert.assertFalse(t.isRunning(), "Stopped timer still running");
+        long n3 = t.getElapsedTimeNano();
+        double t3 = t.getElapsedTime();
+        idleLoop(); // idle loop to wait a tiny bit of time
+        double t4 = t.getElapsedTime();
+        long n4 = t.getElapsedTimeNano();
+        Assert.assertTrue(t4 == t3, "Elapsed times for two calls of stop timer not the same");
+        Assert.assertTrue(n4 == n3, "Elapsed times for two calls of stop timer not the same");
+
+        t.restart();
+        idleLoop(); // idle loop to wait a tiny bit of time
+        double t5 = t.getElapsedTime();
+        long n5 = t.getElapsedTimeNano();
+        Assert.assertTrue(t.isRunning(), "Restarted timer should be running");
+        idleLoop(); // idle loop to wait a tiny bit of time
+        double t6 = t.getElapsedTime();
+        long n6 = t.getElapsedTimeNano();
+        Assert.assertTrue(t5 >= t4, "Restarted timer elapsed time should be after elapsed time preceding the restart");
+        Assert.assertTrue(t6 >= t5, "Second elapsed time not after the first in restarted timer");
+        Assert.assertTrue(n5 >= n4, "Restarted timer elapsed time nano should be after elapsed time preceding the restart");
+        Assert.assertTrue(n6 >= n5, "Second elapsed time nano not after the first in restarted timer");
+
+        final List<Double> secondTimes = Arrays.asList(t1, t2, t3, t4, t5, t6);
+        final List<Long> nanoTimes     = Arrays.asList(n1, n2, n3, n4, n5, n6);
+        for ( int i = 0; i < nanoTimes.size(); i++ )
+            Assert.assertEquals(
+                    SimpleTimer.nanoToSecondsAsDouble(nanoTimes.get(i)),
+                    secondTimes.get(i), 1e-1, "Nanosecond and second timer disagree");
+    }
+
+    @Test
+    public void testNanoResolution() {
+        SimpleTimer t = new SimpleTimer(NAME);
+
+        // test the nanosecond resolution
+        long n7 = t.currentTimeNano();
+        int sum = 0;
+        for ( int i = 0; i < 100; i++) sum += i;
+        long n8 = t.currentTimeNano();
+        final long delta = n8 - n7;
+        final long oneMilliInNano = TimeUnit.MILLISECONDS.toNanos(1);
+        logger.warn("nanoTime before nano operation " + n7);
+        logger.warn("nanoTime after nano operation of summing 100 ints " + n8 + ", sum = " + sum + " time delta " + delta + " vs. 1 millsecond in nano " + oneMilliInNano);
+        Assert.assertTrue(n8 > n7, "SimpleTimer doesn't appear to have nanoSecond resolution: n8 " + n8 + " <= n7 " + n7);
+        Assert.assertTrue(delta < oneMilliInNano,
+                "SimpleTimer doesn't appear to have nanoSecond resolution: time delta is " + delta + " vs 1 millisecond in nano " + oneMilliInNano);
+    }
+
+    @Test
+    public void testMeaningfulTimes() {
+        SimpleTimer t = new SimpleTimer(NAME);
+
+        t.start();
+        for ( int i = 0; i < 100; i++ ) ;
+        long nano = t.getElapsedTimeNano();
+        double secs = t.getElapsedTime();
+
+        Assert.assertTrue(secs > 0, "Seconds timer doesn't appear to count properly: elapsed time is " + secs);
+        Assert.assertTrue(secs < 0.01, "Fast operation said to take longer than 10 milliseconds: elapsed time in seconds " + secs);
+
+        Assert.assertTrue(nano > 0, "Nanosecond timer doesn't appear to count properly: elapsed time is " + nano);
+        final long maxTimeInMicro = 10000;
+        final long maxTimeInNano = TimeUnit.MICROSECONDS.toNanos(maxTimeInMicro);
+        Assert.assertTrue(nano < maxTimeInNano, "Fast operation said to take longer than " + maxTimeInMicro + " microseconds: elapsed time in nano " + nano + " micro " + TimeUnit.NANOSECONDS.toMicros(nano));
+    }
+
+    @Test
+    public void testCheckpointRestart() throws Exception {
+        SimpleTimer t = new SimpleTimer(NAME);
+        
+        final Field offsetField = t.getClass().getDeclaredField("nanoTimeOffset");
+        offsetField.setAccessible(true);
+        long offset = ((Long) offsetField.get(t)).longValue();
+
+        t.start();
+        idleLoop();
+        // Make it as if clock has jumped into the past
+        offsetField.set(t, offset + TimeUnit.SECONDS.toNanos(10));
+        t.stop();
+        offset = ((Long) offsetField.get(t)).longValue();
+        Assert.assertEquals(t.getElapsedTime(), 0.0, "Time over restart is not zero.");
+
+        t.start();
+        idleLoop();
+        t.stop();
+        offset = ((Long) offsetField.get(t)).longValue();
+        double elapsed = t.getElapsedTime();
+        Assert.assertTrue(elapsed >= 0.0, "Elapsed time is zero.");
+        t.restart();
+        // Make the clock jump again by just a little
+        offsetField.set(t, offset + TimeUnit.SECONDS.toNanos(1));
+        idleLoop();
+        t.stop();
+        offset = ((Long) offsetField.get(t)).longValue();
+        Assert.assertTrue(t.getElapsedTime() > elapsed, "Small clock drift causing reset.");
+        elapsed = t.getElapsedTime();
+        // Now a bigger jump, into the future this time.
+        t.restart();
+        // Make the clock jump again by a lot
+        offsetField.set(t, offset - TimeUnit.SECONDS.toNanos(10));
+        t.stop();
+        Assert.assertEquals(t.getElapsedTime(), elapsed, "Time added over checkpoint/restart.");
+
+        // Test without stopping
+        t.start();
+        offset = ((Long) offsetField.get(t)).longValue();
+        // Make it as if clock has jumped into the past
+        offsetField.set(t, offset + TimeUnit.SECONDS.toNanos(10));       
+        Assert.assertEquals(t.getElapsedTime(), 0.0, "Elapsed time after C/R is not zero.");
+        idleLoop();
+        Assert.assertTrue(t.getElapsedTime() > 0.0, "Elapsed time zero after re-sync.");
+
+    }
+
+    private static void idleLoop() {
+        for ( int i = 0; i < 100000; i++ ) ; // idle loop to wait a tiny bit of time
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/TestNGTestTransformer.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/TestNGTestTransformer.java
new file mode 100644
index 0000000..e4772b3
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/TestNGTestTransformer.java
@@ -0,0 +1,62 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import org.apache.log4j.Logger;
+import org.testng.IAnnotationTransformer;
+import org.testng.annotations.ITestAnnotation;
+
+import java.lang.reflect.Constructor;
+import java.lang.reflect.Method;
+
+/**
+ * Provide default @Test values for GATK testng tests.
+ *
+ * Currently only sets the maximum runtime to 40 minutes, if it's not been specified.
+ *
+ * See http://beust.com/weblog/2006/10/18/annotation-transformers-in-java/
+ *
+ * @author depristo
+ * @since 10/31/12
+ * @version 0.1
+ */
+public class TestNGTestTransformer implements IAnnotationTransformer {
+    public static final long DEFAULT_TIMEOUT = 1000 * 60 * 40; // 40 minutes max per test
+
+    final static Logger logger = Logger.getLogger(TestNGTestTransformer.class);
+
+    public void transform(ITestAnnotation annotation,
+                          Class testClass,
+                          Constructor testConstructor,
+                          Method testMethod)
+    {
+        if ( annotation.getTimeOut() == 0 ) {
+            logger.warn("test " + (testMethod == null ? "<null>" : testMethod.toString()) + " has no specified timeout, adding default timeout " + DEFAULT_TIMEOUT / 1000 / 60 + " minutes");
+            annotation.setTimeOut(DEFAULT_TIMEOUT);
+        }
+    }
+}
+
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/UtilsUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/UtilsUnitTest.java
new file mode 100644
index 0000000..22e5348
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/UtilsUnitTest.java
@@ -0,0 +1,362 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils;
+
+import org.apache.commons.io.FileUtils;
+import org.broadinstitute.gatk.utils.io.IOUtils;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.util.*;
+
+/**
+ * Testing framework for general purpose utilities class.
+ *
+ * @author hanna
+ * @version 0.1
+ */
+
+public class UtilsUnitTest extends BaseTest {
+    @Test
+    public void testAppend() {
+        for ( int leftSize : Arrays.asList(0, 1, 2, 3) ) {
+            for ( final int rightSize : Arrays.asList(0, 1, 2) ) {
+                final List<Integer> left = new LinkedList<Integer>();
+                for ( int i = 0; i < leftSize; i++ ) left.add(i);
+                final List<Integer> total = new LinkedList<Integer>();
+                for ( int i = 0; i < leftSize + rightSize; i++ ) total.add(i);
+
+                if ( rightSize == 0 )
+                    Assert.assertEquals(Utils.append(left), total);
+                if ( rightSize == 1 )
+                    Assert.assertEquals(Utils.append(left, leftSize), total);
+                if ( rightSize == 2 )
+                    Assert.assertEquals(Utils.append(left, leftSize, leftSize + 1), total);
+            }
+        }
+
+    }
+
+    @Test
+    public void testDupStringNoChars() {
+        String duped = Utils.dupString('a',0);
+        Assert.assertEquals(duped.length(), 0, "dupString did not produce zero-length string");
+    }
+
+    @Test
+    public void testDupStringOneChar() {
+        String duped = Utils.dupString('b',1);
+        Assert.assertEquals(duped.length(), 1, "dupString did not produce single character string");
+        Assert.assertEquals(duped.charAt(0), 'b', "dupString character was incorrect");
+    }
+
+    @Test
+    public void testXor() {
+        Assert.assertEquals(Utils.xor(false, false), false, "xor F F failed");
+        Assert.assertEquals(Utils.xor(false, true), true, "xor F T failed");
+        Assert.assertEquals(Utils.xor(true, false), true, "xor T F failed");
+        Assert.assertEquals(Utils.xor(true, true), false, "xor T T failed");
+    }
+
+    @Test
+    public void testDupStringMultiChar() {
+        String duped = Utils.dupString('c',5);
+        Assert.assertEquals(duped.length(), 5, "dupString did not produce five character string");
+        Assert.assertEquals(duped,"ccccc","dupString string was incorrect");
+    }
+
+    @Test
+    public void testJoinMap() {
+        Map<String,Integer> map = new LinkedHashMap<String,Integer>();
+        map.put("one",1);
+        map.put("two",2);
+        String joined = Utils.joinMap("-",";",map);
+        Assert.assertTrue("one-1;two-2".equals(joined));
+    }
+
+    @Test
+    public void testJoinMapLargerSet() {
+        Map<String,Integer> map = new LinkedHashMap<String,Integer>();
+        map.put("one",1);
+        map.put("two",2);
+        map.put("three",1);
+        map.put("four",2);
+        map.put("five",1);
+        map.put("six",2);
+        String joined = Utils.joinMap("-",";",map);
+        Assert.assertTrue("one-1;two-2;three-1;four-2;five-1;six-2".equals(joined));
+    }
+
+    @Test
+    public void testConcat() {
+        final String s1 = "A";
+        final String s2 = "CC";
+        final String s3 = "TTT";
+        final String s4 = "GGGG";
+        Assert.assertEquals(new String(Utils.concat()), "");
+        Assert.assertEquals(new String(Utils.concat(s1.getBytes())), s1);
+        Assert.assertEquals(new String(Utils.concat(s1.getBytes(), s2.getBytes())), s1 + s2);
+        Assert.assertEquals(new String(Utils.concat(s1.getBytes(), s2.getBytes(), s3.getBytes())), s1 + s2 + s3);
+        Assert.assertEquals(new String(Utils.concat(s1.getBytes(), s2.getBytes(), s3.getBytes(), s4.getBytes())), s1 + s2 + s3 + s4);
+    }
+
+    @Test
+    public void testEscapeExpressions() {
+        String[] expected, actual;
+
+        expected = new String[] {"one", "two", "three"};
+        actual = Utils.escapeExpressions("one two three");
+        Assert.assertEquals(actual, expected);
+        actual = Utils.escapeExpressions(" one two three");
+        Assert.assertEquals(actual, expected);
+        actual = Utils.escapeExpressions("one two three ");
+        Assert.assertEquals(actual, expected);
+        actual = Utils.escapeExpressions(" one two three ");
+        Assert.assertEquals(actual, expected);
+        actual = Utils.escapeExpressions("  one  two  three  ");
+        Assert.assertEquals(actual, expected);
+
+        expected = new String[] {"one", "two", "three four", "five", "six"};
+        actual = Utils.escapeExpressions("one two 'three four' five six");
+        Assert.assertEquals(actual, expected);
+        actual = Utils.escapeExpressions(" one two 'three four' five six");
+        Assert.assertEquals(actual, expected);
+        actual = Utils.escapeExpressions("one two 'three four' five six ");
+        Assert.assertEquals(actual, expected);
+        actual = Utils.escapeExpressions(" one two 'three four' five six ");
+        Assert.assertEquals(actual, expected);
+        actual = Utils.escapeExpressions("  one  two  'three four'  five  six  ");
+        Assert.assertEquals(actual, expected);
+
+        expected = new String[] {"one two", "three", "four"};
+        actual = Utils.escapeExpressions("'one two' three four");
+        Assert.assertEquals(actual, expected);
+        actual = Utils.escapeExpressions(" 'one two' three four");
+        Assert.assertEquals(actual, expected);
+        actual = Utils.escapeExpressions("'one two' three four ");
+        Assert.assertEquals(actual, expected);
+        actual = Utils.escapeExpressions(" 'one two' three four ");
+        Assert.assertEquals(actual, expected);
+        actual = Utils.escapeExpressions("  'one two'  three  four  ");
+        Assert.assertEquals(actual, expected);
+
+        expected = new String[] {"one", "two", "three four"};
+        actual = Utils.escapeExpressions("one two 'three four'");
+        Assert.assertEquals(actual, expected);
+        actual = Utils.escapeExpressions(" one two 'three four'");
+        Assert.assertEquals(actual, expected);
+        actual = Utils.escapeExpressions("one two 'three four' ");
+        Assert.assertEquals(actual, expected);
+        actual = Utils.escapeExpressions(" one two 'three four' ");
+        Assert.assertEquals(actual, expected);
+        actual = Utils.escapeExpressions("  one  two  'three four'  ");
+        Assert.assertEquals(actual, expected);
+    }
+
+    @Test(dataProvider = "asIntegerListData")
+    public void testAsIntegerList(final int[] values) {
+        if (values == null) {
+            try {
+                Utils.asList((int[]) null);
+                Assert.fail("Should have thrown an exception");
+            } catch (final IllegalArgumentException ex) {
+                // good.
+            }
+        } else {
+            final Random rdn = Utils.getRandomGenerator();
+            final int[] valuesClone = values.clone();
+            final List<Integer> list = Utils.asList(valuesClone);
+            Assert.assertNotNull(list);
+            Assert.assertEquals(list.size(),values.length);
+            for (int i = 0; i < values.length; i++)
+                Assert.assertEquals((int) list.get(i),values[i]);
+            for (int i = 0; i < values.length; i++)
+                valuesClone[rdn.nextInt(values.length)] = rdn.nextInt(1000);
+            for (int i = 0; i < values.length; i++)
+                Assert.assertEquals((int) list.get(i),valuesClone[i]);
+        }
+    }
+
+    @Test(dataProvider = "asDoubleListData")
+    public void testAsDoubleList(final double[] values) {
+        if (values == null) {
+            try {
+                Utils.asList((int[]) null);
+                Assert.fail("Should have thrown an exception");
+            } catch (final IllegalArgumentException ex) {
+                // good.
+            }
+        } else {
+            final Random rdn = Utils.getRandomGenerator();
+            final double[] valuesClone = values.clone();
+            final List<Double> list = Utils.asList(valuesClone);
+            Assert.assertNotNull(list);
+            Assert.assertEquals(list.size(),values.length);
+            for (int i = 0; i < values.length; i++)
+                Assert.assertEquals((double) list.get(i),values[i]);
+            for (int i = 0; i < values.length; i++)
+                valuesClone[rdn.nextInt(values.length)] = rdn.nextDouble() * 1000;
+            for (int i = 0; i < values.length; i++)
+                Assert.assertEquals((double) list.get(i),valuesClone[i]);
+        }
+    }
+
+    @Test
+    public void testCalcMD5() throws Exception {
+        final File source = new File(publicTestDir + "exampleFASTA.fasta");
+        final String sourceMD5 = "36880691cf9e4178216f7b52e8d85fbe";
+
+        final byte[] sourceBytes = IOUtils.readFileIntoByteArray(source);
+        Assert.assertEquals(Utils.calcMD5(sourceBytes), sourceMD5);
+
+        final String sourceString = FileUtils.readFileToString(source);
+        Assert.assertEquals(Utils.calcMD5(sourceString), sourceMD5);
+    }
+
+    @Test
+    public void testLongestCommonOps() {
+        for ( int prefixLen = 0; prefixLen < 20; prefixLen++ ) {
+            for ( int extraSeq1Len = 0; extraSeq1Len < 10; extraSeq1Len++ ) {
+                for ( int extraSeq2Len = 0; extraSeq2Len < 10; extraSeq2Len++ ) {
+                    for ( int max = 0; max < 50; max++ ) {
+                        final String prefix = Utils.dupString("A", prefixLen);
+                        final int expected = Math.min(prefixLen, max);
+
+                        {
+                            final String seq1 = prefix + Utils.dupString("C", extraSeq1Len);
+                            final String seq2 = prefix + Utils.dupString("G", extraSeq1Len);
+                            Assert.assertEquals(Utils.longestCommonPrefix(seq1.getBytes(), seq2.getBytes(), max), expected, "LongestCommonPrefix failed: seq1 " + seq1 + " seq2 " + seq2 + " max " + max);
+                        }
+
+                        {
+                            final String seq1 = Utils.dupString("C", extraSeq1Len) + prefix;
+                            final String seq2 = Utils.dupString("G", extraSeq1Len) + prefix;
+                            Assert.assertEquals(Utils.longestCommonSuffix(seq1.getBytes(), seq2.getBytes(), max), expected, "longestCommonSuffix failed: seq1 " + seq1 + " seq2 " + seq2 + " max " + max);
+                        }
+                    }
+                }
+            }
+        }
+    }
+
+    @DataProvider(name = "trim")
+    public Object[][] createTrimTestData() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        final String s = "AAAA";
+        for ( int front = 0; front < s.length(); front++ ) {
+            for ( int back = 0; back < s.length(); back++ ) {
+                if ( front + back <= s.length() )
+                    tests.add(new Object[]{s, front, back});
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "trim", enabled = true)
+    public void testTrim(final String s, final int frontTrim, final int backTrim) {
+        Assert.assertEquals(s.length() - frontTrim - backTrim, Utils.trimArray(s.getBytes(), frontTrim, backTrim).length);
+    }
+
+    @Test(dataProvider = "equalRangeData", enabled = true)
+    public void testEqualRange(final byte[] array1, final byte[] array2, final int offset1, final int offset2, final int length, final boolean expected) {
+        Assert.assertEquals(Utils.equalRange(array1,offset1,array2,offset2,length),expected);
+        Assert.assertTrue(Utils.equalRange(array1,offset1,array1,offset1,length));
+        Assert.assertTrue(Utils.equalRange(array2,offset2,array2,offset2,length));
+
+    }
+
+    @DataProvider(name = "equalRangeData")
+    public Object[][] equalRangeData() {
+        return new Object[][] {
+                new Object[] { new byte[0] , new byte[0], 0, 0, 0, true},
+                new Object[]  {      "ABCF".getBytes(), "BC".getBytes(), 1,0,2, true },
+                new Object[]  { "ABCF".getBytes(), "".getBytes(), 1,0,0, true },
+                new Object[]  { "ABCF".getBytes(), "ACBF".getBytes(), 0,0, 4, false}
+        };
+
+    }
+
+    @Test(dataProvider = "skimArrayData")
+    public void testSkimArray(final String original, final String remove) {
+        final StringBuilder resultBuilder = new StringBuilder();
+        final boolean[] removeBoolean = new boolean[remove.length()];
+        for (int i = 0; i < original.length(); i++)
+            if (remove.charAt(i) == '1') {
+                resultBuilder.append(original.charAt(i));
+                removeBoolean[i] = false;
+            } else
+                removeBoolean[i] = true;
+
+        final String expected = resultBuilder.toString();
+        final byte[] resultBytes = Utils.skimArray(original.getBytes(),removeBoolean);
+        final String resultString = new String(resultBytes);
+        Assert.assertEquals(resultString,expected);
+    }
+
+    @DataProvider(name = "skimArrayData")
+    public Object[][] skimArrayData() {
+        return new Object[][] {
+                {"romeo+juliette" , "11111111111111" },
+                {"romeo+juliette" , "11111011111111" },
+                {"romeo+juliette" , "00000011111111" },
+                {"romeo+juliette" , "11111100000000" },
+                {"romeo+juliette" , "11111011111111" },
+                {"romeo+juliette" , "01111010000001" },
+                {"romeo+juliette" , "01100110000110" },
+                {"romeo+juliette" , "10101010101010" },
+                {"romeo+juliette" , "01010101010101" },
+                {"romeo+juliette" , "01111010111001" },
+        };
+    }
+
+
+    @DataProvider(name = "asIntegerListData")
+    public Object[][] asIntegerListData() {
+        return new Object[][] {
+                { null },
+                {new int[0]},
+                {new int[]{1, 2, 3, 4, 5}},
+                {new int[]{2}},
+                {new int[]{3,4}}
+        };
+    }
+
+    @DataProvider(name = "asDoubleListData")
+    public Object[][] asDoubleListData() {
+        return new Object[][] {
+                { null },
+                {new double[0]},
+                {new double[]{1, 2, 3, 4, 5}},
+                {new double[]{2}},
+                {new double[]{3,4}},
+                {new double[]{Double.NaN, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY}}
+        };
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/activeregion/ActiveRegionUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/activeregion/ActiveRegionUnitTest.java
new file mode 100644
index 0000000..8fc6420
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/activeregion/ActiveRegionUnitTest.java
@@ -0,0 +1,395 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.activeregion;
+
+
+// the imports for unit testing.
+
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import htsjdk.samtools.SAMFileHeader;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.util.*;
+
+
+public class ActiveRegionUnitTest extends BaseTest {
+    private final static boolean DEBUG = false;
+    private GenomeLocParser genomeLocParser;
+    private IndexedFastaSequenceFile seq;
+    private String contig;
+    private int contigLength;
+
+    @BeforeClass
+    public void init() throws FileNotFoundException {
+        // sequence
+        seq = new CachingIndexedFastaSequenceFile(new File(b37KGReference));
+        genomeLocParser = new GenomeLocParser(seq);
+        contig = "1";
+        contigLength = genomeLocParser.getContigInfo(contig).getSequenceLength();
+    }
+
+    @DataProvider(name = "ActionRegionCreationTest")
+    public Object[][] makePollingData() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+        for ( final int start : Arrays.asList(1, 10, 100, contigLength - 10, contigLength - 1) ) {
+            for ( final int size : Arrays.asList(1, 10, 100, 1000) ) {
+                for ( final int ext : Arrays.asList(0, 1, 10, 100) ) {
+                    for ( final boolean isActive : Arrays.asList(true, false) ) {
+                        for ( final boolean addStates : Arrays.asList(true, false) ) {
+                            List<ActivityProfileState> states = null;
+                            if ( addStates ) {
+                                states = new LinkedList<ActivityProfileState>();
+                                for ( int i = start; i < start + size; i++ ) {
+                                    states.add(new ActivityProfileState(genomeLocParser.createGenomeLoc(contig, i + start), isActive ? 1.0 : 0.0));
+                                }
+                            }
+                            final GenomeLoc loc = genomeLocParser.createGenomeLoc(contig, start, start + size - 1);
+                            tests.add(new Object[]{loc, states, isActive, ext});
+                        }
+                    }
+                }
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "ActionRegionCreationTest")
+    public void testCreatingActiveRegions(final GenomeLoc loc, final List<ActivityProfileState> supportingStates, final boolean isActive, final int extension) {
+        final ActiveRegion region = new ActiveRegion(loc, supportingStates, isActive, genomeLocParser, extension);
+        Assert.assertEquals(region.getLocation(), loc);
+        Assert.assertEquals(region.getExtendedLoc().getStart(), Math.max(loc.getStart() - extension, 1));
+        Assert.assertEquals(region.getExtendedLoc().getStop(), Math.min(loc.getStop() + extension, contigLength));
+        Assert.assertEquals(region.getReadSpanLoc().getStart(), Math.max(loc.getStart() - extension, 1));
+        Assert.assertEquals(region.getReadSpanLoc().getStop(), Math.min(loc.getStop() + extension, contigLength));
+        Assert.assertEquals(region.isActive(), isActive);
+        Assert.assertEquals(region.getExtension(), extension);
+        Assert.assertEquals(region.getReads(), Collections.emptyList());
+        Assert.assertEquals(region.size(), 0);
+        Assert.assertEquals(region.getSupportingStates(), supportingStates == null ? Collections.emptyList() : supportingStates);
+        Assert.assertNotNull(region.toString());
+
+        assertGoodReferenceGetter(region.getActiveRegionReference(seq), region.getExtendedLoc(), 0);
+        assertGoodReferenceGetter(region.getActiveRegionReference(seq, 10), region.getExtendedLoc(), 10);
+        assertGoodReferenceGetter(region.getFullReference(seq), region.getReadSpanLoc(), 0);
+        assertGoodReferenceGetter(region.getFullReference(seq, 10), region.getReadSpanLoc(), 10);
+    }
+
+    private void assertGoodReferenceGetter(final byte[] actualBytes, final GenomeLoc span, final int padding) {
+        final int expectedStart = Math.max(span.getStart() - padding, 1);
+        final int expectedStop = Math.min(span.getStop() + padding, contigLength);
+        final byte[] expectedBytes = seq.getSubsequenceAt(span.getContig(), expectedStart, expectedStop).getBases();
+        Assert.assertEquals(actualBytes, expectedBytes);
+    }
+
+    @DataProvider(name = "ActiveRegionReads")
+    public Object[][] makeActiveRegionReads() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(seq.getSequenceDictionary());
+        for ( final int start : Arrays.asList(1, 10, 100, contigLength - 10, contigLength - 1) ) {
+            for ( final int readStartOffset : Arrays.asList(-100, -10, 0, 10, 100) ) {
+                for ( final int readSize : Arrays.asList(10, 100, 1000) ) {
+                    final GenomeLoc loc = genomeLocParser.createGenomeLocOnContig(contig, start, start + 10);
+
+                    final int readStart = Math.max(start + readStartOffset, 1);
+                    final int readStop = Math.min(readStart + readSize, contigLength);
+                    final int readLength = readStop - readStart + 1;
+                    if ( readLength > 0 ) {
+                        GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, readStart, readLength);
+                        final GenomeLoc readLoc = genomeLocParser.createGenomeLoc(read);
+                        if ( readLoc.overlapsP(loc) )
+                            tests.add(new Object[]{loc, read});
+                    }
+                }
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "ActiveRegionReads")
+    public void testActiveRegionReads(final GenomeLoc loc, final GATKSAMRecord read) throws Exception {
+        final GenomeLoc expectedSpan = loc.union(genomeLocParser.createGenomeLoc(read));
+
+        final ActiveRegion region = new ActiveRegion(loc, null, true, genomeLocParser, 0);
+        final ActiveRegion region2 = new ActiveRegion(loc, null, true, genomeLocParser, 0);
+        Assert.assertEquals(region.getReads(), Collections.emptyList());
+        Assert.assertEquals(region.size(), 0);
+        Assert.assertEquals(region.getExtendedLoc(), loc);
+        Assert.assertEquals(region.getReadSpanLoc(), loc);
+        Assert.assertTrue(region.equalExceptReads(region2));
+
+        region.add(read);
+        Assert.assertEquals(region.getReads(), Collections.singletonList(read));
+        Assert.assertEquals(region.size(), 1);
+        Assert.assertEquals(region.getExtendedLoc(), loc);
+        Assert.assertEquals(region.getReadSpanLoc(), expectedSpan);
+        Assert.assertTrue(region.equalExceptReads(region2));
+
+        region.clearReads();
+        Assert.assertEquals(region.getReads(), Collections.emptyList());
+        Assert.assertEquals(region.size(), 0);
+        Assert.assertEquals(region.getExtendedLoc(), loc);
+        Assert.assertEquals(region.getReadSpanLoc(), loc);
+        Assert.assertTrue(region.equalExceptReads(region2));
+
+        region.addAll(Collections.singleton(read));
+        Assert.assertEquals(region.getReads(), Collections.singletonList(read));
+        Assert.assertEquals(region.size(), 1);
+        Assert.assertEquals(region.getExtendedLoc(), loc);
+        Assert.assertEquals(region.getReadSpanLoc(), expectedSpan);
+        Assert.assertTrue(region.equalExceptReads(region2));
+
+        region.removeAll(Collections.<GATKSAMRecord>emptySet());
+        Assert.assertEquals(region.getReads(), Collections.singletonList(read));
+        Assert.assertEquals(region.size(), 1);
+        Assert.assertEquals(region.getExtendedLoc(), loc);
+        Assert.assertEquals(region.getReadSpanLoc(), expectedSpan);
+        Assert.assertTrue(region.equalExceptReads(region2));
+
+        region.removeAll(Collections.singleton(read));
+        Assert.assertEquals(region.getReads(), Collections.emptyList());
+        Assert.assertEquals(region.size(), 0);
+        Assert.assertEquals(region.getExtendedLoc(), loc);
+        Assert.assertEquals(region.getReadSpanLoc(), loc);
+        Assert.assertTrue(region.equalExceptReads(region2));
+
+        final GATKSAMRecord read2 = (GATKSAMRecord)read.clone();
+        read2.setReadName(read.getReadName() + ".clone");
+
+        for ( final GATKSAMRecord readToKeep : Arrays.asList(read, read2)) {
+            region.addAll(Arrays.asList(read, read2));
+            final GATKSAMRecord readToDiscard = readToKeep == read ? read2 : read;
+            region.removeAll(Collections.singleton(readToDiscard));
+            Assert.assertEquals(region.getReads(), Arrays.asList(readToKeep));
+            Assert.assertEquals(region.size(), 1);
+            Assert.assertEquals(region.getExtendedLoc(), loc);
+        }
+    }
+
+    // -----------------------------------------------------------------------------------------------
+    //
+    // Make sure bad inputs are properly detected
+    //
+    // -----------------------------------------------------------------------------------------------
+
+    @DataProvider(name = "BadReadsTest")
+    public Object[][] makeBadReadsTest() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(seq.getSequenceDictionary());
+        tests.add(new Object[]{
+                ArtificialSAMUtils.createArtificialRead(header, "read1", 0, 10, 10),
+                ArtificialSAMUtils.createArtificialRead(header, "read2", 0, 9, 10)});
+        tests.add(new Object[]{
+                ArtificialSAMUtils.createArtificialRead(header, "read1", 0, 10, 10),
+                ArtificialSAMUtils.createArtificialRead(header, "read2", 1, 9, 10)});
+        tests.add(new Object[]{
+                ArtificialSAMUtils.createArtificialRead(header, "read1", 1, 10, 10),
+                ArtificialSAMUtils.createArtificialRead(header, "read2", 0, 9, 10)});
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "BadReadsTest", expectedExceptions = IllegalArgumentException.class)
+    public void testBadReads(final GATKSAMRecord read1, final GATKSAMRecord read2) {
+        final GenomeLoc loc = genomeLocParser.createGenomeLoc(read1);
+        final ActiveRegion region = new ActiveRegion(loc, null, true, genomeLocParser, 0);
+        region.add(read1);
+        region.add(read2);
+    }
+
+    // -----------------------------------------------------------------------------------------------
+    //
+    // Make sure we can properly cut up an active region based on engine intervals
+    //
+    // -----------------------------------------------------------------------------------------------
+
+    @DataProvider(name = "SplitActiveRegion")
+    public Object[][] makeSplitActiveRegion() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        final GenomeLoc whole_span = genomeLocParser.createGenomeLoc("20", 1, 500);
+        final GenomeLoc gl_before = genomeLocParser.createGenomeLoc("20", 1, 9);
+        final GenomeLoc gl_after = genomeLocParser.createGenomeLoc("20", 250, 500);
+        final GenomeLoc gl_diff_contig = genomeLocParser.createGenomeLoc("19", 40, 50);
+
+        final int regionStart = 10;
+        final int regionStop = 100;
+        final GenomeLoc region = genomeLocParser.createGenomeLoc("20", regionStart, regionStop);
+
+        for ( final GenomeLoc noEffect : Arrays.asList(whole_span) )
+            tests.add(new Object[]{
+                    region,
+                    Arrays.asList(noEffect),
+                    Arrays.asList(region)});
+
+        for ( final GenomeLoc noOverlap : Arrays.asList(gl_before, gl_after, gl_diff_contig) )
+            tests.add(new Object[]{
+                    region,
+                    Arrays.asList(noOverlap),
+                    Arrays.asList()});
+
+        tests.add(new Object[]{region,
+                Arrays.asList(genomeLocParser.createGenomeLoc("20", 5, 50)),
+                Arrays.asList(genomeLocParser.createGenomeLoc("20", regionStart, 50))});
+
+        tests.add(new Object[]{region,
+                Arrays.asList(genomeLocParser.createGenomeLoc("20", 50, 200)),
+                Arrays.asList(genomeLocParser.createGenomeLoc("20", 50, regionStop))});
+
+        tests.add(new Object[]{region,
+                Arrays.asList(genomeLocParser.createGenomeLoc("20", 40, 50)),
+                Arrays.asList(genomeLocParser.createGenomeLoc("20", 40, 50))});
+
+        tests.add(new Object[]{region,
+                Arrays.asList(genomeLocParser.createGenomeLoc("20", 20, 30), genomeLocParser.createGenomeLoc("20", 40, 50)),
+                Arrays.asList(genomeLocParser.createGenomeLoc("20", 20, 30), genomeLocParser.createGenomeLoc("20", 40, 50))});
+
+        tests.add(new Object[]{region,
+                Arrays.asList(genomeLocParser.createGenomeLoc("20", 1, 30), genomeLocParser.createGenomeLoc("20", 40, 50)),
+                Arrays.asList(genomeLocParser.createGenomeLoc("20", regionStart, 30), genomeLocParser.createGenomeLoc("20", 40, 50))});
+
+        tests.add(new Object[]{region,
+                Arrays.asList(genomeLocParser.createGenomeLoc("20", 1, 30), genomeLocParser.createGenomeLoc("20", 70, 200)),
+                Arrays.asList(genomeLocParser.createGenomeLoc("20", regionStart, 30), genomeLocParser.createGenomeLoc("20", 70, regionStop))});
+
+        tests.add(new Object[]{region,
+                Arrays.asList(genomeLocParser.createGenomeLoc("20", 1, 30), genomeLocParser.createGenomeLoc("20", 40, 50), genomeLocParser.createGenomeLoc("20", 70, 200)),
+                Arrays.asList(genomeLocParser.createGenomeLoc("20", regionStart, 30), genomeLocParser.createGenomeLoc("20", 40, 50), genomeLocParser.createGenomeLoc("20", 70, regionStop))});
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "SplitActiveRegion")
+    public void testSplitActiveRegion(final GenomeLoc regionLoc, final List<GenomeLoc> intervalLocs, final List<GenomeLoc> expectedRegionLocs) {
+        for ( final boolean addSubstates : Arrays.asList(true, false) ) {
+            final List<ActivityProfileState> states;
+            if ( addSubstates ) {
+                states = new LinkedList<ActivityProfileState>();
+                for ( int i = 0; i < regionLoc.size(); i++ )
+                    states.add(new ActivityProfileState(genomeLocParser.createGenomeLoc(regionLoc.getContig(), regionLoc.getStart() + i), 1.0));
+            } else {
+                states = null;
+            }
+
+            final ActiveRegion region = new ActiveRegion(regionLoc, states, true, genomeLocParser, 0);
+            final GenomeLocSortedSet intervals = new GenomeLocSortedSet(genomeLocParser,  intervalLocs);
+            final List<ActiveRegion> regions = region.splitAndTrimToIntervals(intervals);
+
+            Assert.assertEquals(regions.size(), expectedRegionLocs.size(), "Wrong number of split locations");
+            for ( int i = 0; i < expectedRegionLocs.size(); i++ ) {
+                final GenomeLoc expected = expectedRegionLocs.get(i);
+                final ActiveRegion actual = regions.get(i);
+                Assert.assertEquals(actual.getLocation(), expected, "Bad region after split");
+                Assert.assertEquals(actual.isActive(), region.isActive());
+                Assert.assertEquals(actual.getExtension(), region.getExtension());
+            }
+        }
+    }
+
+    // -----------------------------------------------------------------------------------------------
+    //
+    // Make sure we can properly cut up an active region based on engine intervals
+    //
+    // -----------------------------------------------------------------------------------------------
+
+    @DataProvider(name = "TrimActiveRegionData")
+    public Object[][] makeTrimActiveRegionData() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        // fully enclosed within active region
+        tests.add(new Object[]{
+                genomeLocParser.createGenomeLoc("20", 10, 20), 10,
+                genomeLocParser.createGenomeLoc("20", 15, 16),
+                genomeLocParser.createGenomeLoc("20", 15, 16), 0});
+
+        tests.add(new Object[]{
+                genomeLocParser.createGenomeLoc("20", 10, 20), 10,
+                genomeLocParser.createGenomeLoc("20", 10, 15),
+                genomeLocParser.createGenomeLoc("20", 10, 15), 0});
+
+        tests.add(new Object[]{
+                genomeLocParser.createGenomeLoc("20", 10, 20), 10,
+                genomeLocParser.createGenomeLoc("20", 15, 20),
+                genomeLocParser.createGenomeLoc("20", 15, 20), 0});
+
+        // needs extra padding on the right
+        tests.add(new Object[]{
+                genomeLocParser.createGenomeLoc("20", 10, 20), 10,
+                genomeLocParser.createGenomeLoc("20", 15, 25),
+                genomeLocParser.createGenomeLoc("20", 15, 20), 5});
+
+        // needs extra padding on the left
+        tests.add(new Object[]{
+                genomeLocParser.createGenomeLoc("20", 10, 20), 10,
+                genomeLocParser.createGenomeLoc("20", 5, 15),
+                genomeLocParser.createGenomeLoc("20", 10, 15), 5});
+
+        // needs extra padding on both
+        tests.add(new Object[]{
+                genomeLocParser.createGenomeLoc("20", 10, 20), 10,
+                genomeLocParser.createGenomeLoc("20", 7, 21),
+                genomeLocParser.createGenomeLoc("20", 10, 20), 3});
+        tests.add(new Object[]{
+                genomeLocParser.createGenomeLoc("20", 10, 20), 10,
+                genomeLocParser.createGenomeLoc("20", 9, 23),
+                genomeLocParser.createGenomeLoc("20", 10, 20), 3});
+
+        // desired span captures everything, so we're returning everything.  Tests that extension is set correctly
+        tests.add(new Object[]{
+                genomeLocParser.createGenomeLoc("20", 10, 20), 10,
+                genomeLocParser.createGenomeLoc("20", 1, 50),
+                genomeLocParser.createGenomeLoc("20", 10, 20), 10});
+
+        // At the start of the chromosome, potentially a bit weird
+        tests.add(new Object[]{
+                genomeLocParser.createGenomeLoc("20", 1, 10), 10,
+                genomeLocParser.createGenomeLoc("20", 1, 50),
+                genomeLocParser.createGenomeLoc("20", 1, 10), 10});
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "TrimActiveRegionData")
+    public void testTrimActiveRegion(final GenomeLoc regionLoc, final int extension, final GenomeLoc desiredSpan, final GenomeLoc expectedActiveRegion, final int expectedExtension) {
+        final ActiveRegion region = new ActiveRegion(regionLoc, Collections.<ActivityProfileState>emptyList(), true, genomeLocParser, extension);
+        final ActiveRegion trimmed = region.trim(desiredSpan);
+        Assert.assertEquals(trimmed.getLocation(), expectedActiveRegion, "Incorrect region");
+        Assert.assertEquals(trimmed.getExtension(), expectedExtension, "Incorrect region");
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/activeregion/ActivityProfileStateUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/activeregion/ActivityProfileStateUnitTest.java
new file mode 100644
index 0000000..dfc30ae
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/activeregion/ActivityProfileStateUnitTest.java
@@ -0,0 +1,92 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.activeregion;
+
+import htsjdk.samtools.SAMFileHeader;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.FileNotFoundException;
+import java.util.Arrays;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * Created with IntelliJ IDEA.
+ * User: depristo
+ * Date: 1/17/13
+ * Time: 2:30 PM
+ * To change this template use File | Settings | File Templates.
+ */
+public class ActivityProfileStateUnitTest {
+    private GenomeLocParser genomeLocParser;
+
+    @BeforeClass
+    public void init() throws FileNotFoundException {
+        // sequence
+        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 100);
+        genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
+    }
+
+    @DataProvider(name = "ActiveProfileResultProvider")
+    public Object[][] makeActiveProfileResultProvider() {
+        final List<Object[]> tests = new LinkedList<Object[]>();
+
+        final String chr = genomeLocParser.getContigs().getSequence(0).getSequenceName();
+        for ( final GenomeLoc loc : Arrays.asList(
+                genomeLocParser.createGenomeLoc(chr, 10, 10),
+                genomeLocParser.createGenomeLoc(chr, 100, 100) )) {
+            for ( final double prob : Arrays.asList(0.0, 0.5, 1.0) ) {
+                for ( final ActivityProfileState.Type state : ActivityProfileState.Type.values() ) {
+                    for ( final Number value : Arrays.asList(1, 2, 4) ) {
+                        tests.add(new Object[]{ loc, prob, state, value});
+                    }
+                }
+                tests.add(new Object[]{ loc, prob, null, null});
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "ActiveProfileResultProvider")
+    public void testActiveProfileResultProvider(GenomeLoc loc, final double prob, ActivityProfileState.Type maybeState, final Number maybeNumber) {
+        final ActivityProfileState apr = maybeState == null
+                ? new ActivityProfileState(loc, prob)
+                : new ActivityProfileState(loc, prob, maybeState, maybeNumber);
+
+        Assert.assertEquals(apr.getLoc(), loc);
+        Assert.assertNotNull(apr.toString());
+        Assert.assertEquals(apr.isActiveProb, prob);
+        Assert.assertEquals(apr.resultState, maybeState == null ? ActivityProfileState.Type.NONE : maybeState);
+        Assert.assertEquals(apr.resultValue, maybeState == null ? null : maybeNumber);
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/activeregion/ActivityProfileUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/activeregion/ActivityProfileUnitTest.java
new file mode 100644
index 0000000..7ee1040
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/activeregion/ActivityProfileUnitTest.java
@@ -0,0 +1,491 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.activeregion;
+
+
+// the imports for unit testing.
+
+
+import htsjdk.samtools.reference.ReferenceSequenceFile;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.MathUtils;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.util.*;
+
+
+public class ActivityProfileUnitTest extends BaseTest {
+    private final static boolean DEBUG = false;
+    private GenomeLocParser genomeLocParser;
+    private GenomeLoc startLoc;
+
+    private final static int MAX_PROB_PROPAGATION_DISTANCE = 50;
+    private final static double ACTIVE_PROB_THRESHOLD= 0.002;
+
+    @BeforeClass
+    public void init() throws FileNotFoundException {
+        // sequence
+        ReferenceSequenceFile seq = new CachingIndexedFastaSequenceFile(new File(hg18Reference));
+        genomeLocParser = new GenomeLocParser(seq);
+        startLoc = genomeLocParser.createGenomeLoc("chr1", 1, 1, 100);
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // Basic tests Provider
+    //
+    // --------------------------------------------------------------------------------
+
+    private class BasicActivityProfileTestProvider extends TestDataProvider {
+        List<Double> probs;
+        List<ActiveRegion> expectedRegions;
+        int extension = 0;
+        GenomeLoc regionStart = startLoc;
+        final ProfileType type;
+
+        public BasicActivityProfileTestProvider(final ProfileType type, final List<Double> probs, boolean startActive, int ... startsAndStops) {
+            super(BasicActivityProfileTestProvider.class);
+            this.type = type;
+            this.probs = probs;
+            this.expectedRegions = toRegions(startActive, startsAndStops);
+            setName(getName());
+        }
+
+        private String getName() {
+            return String.format("type=%s probs=%s expectedRegions=%s", type, Utils.join(",", probs), Utils.join(",", expectedRegions));
+        }
+
+        public ActivityProfile makeProfile() {
+            switch ( type ) {
+                case Base: return new ActivityProfile(genomeLocParser, MAX_PROB_PROPAGATION_DISTANCE, ACTIVE_PROB_THRESHOLD);
+                case BandPass:
+                    // zero size => equivalent to ActivityProfile
+                    return new BandPassActivityProfile(genomeLocParser, null, MAX_PROB_PROPAGATION_DISTANCE, ACTIVE_PROB_THRESHOLD, 0, 0.01, false);
+                default: throw new IllegalStateException(type.toString());
+            }
+        }
+
+        private List<ActiveRegion> toRegions(boolean isActive, int[] startsAndStops) {
+            List<ActiveRegion> l = new ArrayList<ActiveRegion>();
+            for ( int i = 0; i < startsAndStops.length - 1; i++) {
+                int start = regionStart.getStart() + startsAndStops[i];
+                int end = regionStart.getStart() + startsAndStops[i+1] - 1;
+                GenomeLoc activeLoc = genomeLocParser.createGenomeLoc(regionStart.getContig(), start, end);
+                ActiveRegion r = new ActiveRegion(activeLoc, Collections.<ActivityProfileState>emptyList(), isActive, genomeLocParser, extension);
+                l.add(r);
+                isActive = ! isActive;
+            }
+            return l;
+        }
+    }
+
+    private enum ProfileType {
+        Base, BandPass
+    }
+
+    @DataProvider(name = "BasicActivityProfileTestProvider")
+    public Object[][] makeQualIntervalTestProvider() {
+        for ( final ProfileType type : ProfileType.values() ) {
+            new BasicActivityProfileTestProvider(type, Arrays.asList(1.0), true, 0, 1);
+            new BasicActivityProfileTestProvider(type, Arrays.asList(1.0, 0.0), true, 0, 1, 2);
+            new BasicActivityProfileTestProvider(type, Arrays.asList(0.0, 1.0), false, 0, 1, 2);
+            new BasicActivityProfileTestProvider(type, Arrays.asList(1.0, 0.0, 1.0), true, 0, 1, 2, 3);
+            new BasicActivityProfileTestProvider(type, Arrays.asList(1.0, 1.0, 1.0), true, 0, 3);
+        }
+
+        return BasicActivityProfileTestProvider.getTests(BasicActivityProfileTestProvider.class);
+    }
+
+    @Test(enabled = ! DEBUG, dataProvider = "BasicActivityProfileTestProvider")
+    public void testBasicActivityProfile(BasicActivityProfileTestProvider cfg) {
+        ActivityProfile profile = cfg.makeProfile();
+
+        Assert.assertTrue(profile.isEmpty());
+
+        Assert.assertEquals(profile.parser, genomeLocParser);
+
+        for ( int i = 0; i < cfg.probs.size(); i++ ) {
+            double p = cfg.probs.get(i);
+            GenomeLoc loc = genomeLocParser.createGenomeLoc(cfg.regionStart.getContig(), cfg.regionStart.getStart() + i, cfg.regionStart.getStart() + i);
+            profile.add(new ActivityProfileState(loc, p));
+            Assert.assertFalse(profile.isEmpty(), "Profile shouldn't be empty after adding a state");
+        }
+        Assert.assertEquals(profile.regionStartLoc, genomeLocParser.createGenomeLoc(cfg.regionStart.getContig(), cfg.regionStart.getStart(), cfg.regionStart.getStart() ), "Start loc should be the start of the region");
+
+        Assert.assertEquals(profile.size(), cfg.probs.size(), "Should have exactly the number of states we expected to add");
+        assertProbsAreEqual(profile.stateList, cfg.probs);
+
+        // TODO -- reanble tests
+        //assertRegionsAreEqual(profile.createActiveRegions(0, 100), cfg.expectedRegions);
+    }
+
+    private void assertRegionsAreEqual(List<ActiveRegion> actual, List<ActiveRegion> expected) {
+        Assert.assertEquals(actual.size(), expected.size());
+        for ( int i = 0; i < actual.size(); i++ ) {
+            Assert.assertTrue(actual.get(i).equalExceptReads(expected.get(i)));
+        }
+    }
+
+    private void assertProbsAreEqual(List<ActivityProfileState> actual, List<Double> expected) {
+        Assert.assertEquals(actual.size(), expected.size());
+        for ( int i = 0; i < actual.size(); i++ ) {
+            Assert.assertEquals(actual.get(i).isActiveProb, expected.get(i));
+        }
+    }
+
+    // -------------------------------------------------------------------------------------
+    //
+    // Hardcore tests for adding to the profile and constructing active regions
+    //
+    // -------------------------------------------------------------------------------------
+
+    private static class SizeToStringList<T> extends ArrayList<T> {
+        @Override public String toString() { return "List[" + size() + "]"; }
+    }
+
+    @DataProvider(name = "RegionCreationTests")
+    public Object[][] makeRegionCreationTests() {
+        final List<Object[]> tests = new LinkedList<Object[]>();
+
+        final int contigLength = genomeLocParser.getContigs().getSequences().get(0).getSequenceLength();
+        for ( int start : Arrays.asList(1, 10, 100, contigLength - 100, contigLength - 10) ) {
+            for ( int regionSize : Arrays.asList(1, 10, 100, 1000, 10000) ) {
+                for ( int maxRegionSize : Arrays.asList(10, 50, 200) ) {
+                    for ( final boolean waitUntilEnd : Arrays.asList(false, true) ) {
+                        for ( final boolean forceConversion : Arrays.asList(false, true) ) {
+                            // what do I really want to test here?  I'd like to test a few cases:
+                            // -- region is all active (1.0)
+                            // -- region is all inactive (0.0)
+                            // -- cut the interval into 1, 2, 3, 4, 5 ... 10 regions, each with alternating activity values
+                            for ( final boolean startWithActive : Arrays.asList(true, false) ) {
+                                for ( int nParts : Arrays.asList(1, 2, 3, 4, 5, 7, 10, 11, 13) ) {
+
+//        for ( int start : Arrays.asList(1) ) {
+//            for ( int regionSize : Arrays.asList(100) ) {
+//                for ( int maxRegionSize : Arrays.asList(10) ) {
+//                    for ( final boolean waitUntilEnd : Arrays.asList(true) ) {
+//                        for ( final boolean forceConversion : Arrays.asList(false) ) {
+//                            for ( final boolean startWithActive : Arrays.asList(true) ) {
+//                                for ( int nParts : Arrays.asList(3) ) {
+                                    regionSize = Math.min(regionSize, contigLength - start);
+                                    final List<Boolean> regions = makeRegions(regionSize, startWithActive, nParts);
+                                    tests.add(new Object[]{ start, regions, maxRegionSize, nParts, forceConversion, waitUntilEnd });
+                                }
+                            }
+                        }
+                    }
+                }
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    private List<Boolean> makeRegions(final int totalRegionSize,
+                                      final boolean startWithActive,
+                                      final int nParts) {
+        final List<Boolean> regions = new SizeToStringList<Boolean>();
+
+        boolean isActive = startWithActive;
+        final int activeRegionSize = Math.max(totalRegionSize / nParts, 1);
+        for ( int i = 0; i < totalRegionSize; i += activeRegionSize ) {
+            for ( int j = 0; j < activeRegionSize && j + i < totalRegionSize; j++ ) {
+                regions.add(isActive);
+            }
+            isActive = ! isActive;
+        }
+
+        return regions;
+    }
+
+
+    @Test(enabled = !DEBUG, dataProvider = "RegionCreationTests")
+    public void testRegionCreation(final int start, final List<Boolean> probs, int maxRegionSize, final int nParts, final boolean forceConversion, final boolean waitUntilEnd) {
+        final ActivityProfile profile = new ActivityProfile(genomeLocParser, MAX_PROB_PROPAGATION_DISTANCE, ACTIVE_PROB_THRESHOLD);
+        Assert.assertNotNull(profile.toString());
+
+        final String contig = genomeLocParser.getContigs().getSequences().get(0).getSequenceName();
+        final List<Boolean> seenSites = new ArrayList<Boolean>(Collections.nCopies(probs.size(), false));
+        ActiveRegion lastRegion = null;
+        for ( int i = 0; i < probs.size(); i++ ) {
+            final boolean isActive = probs.get(i);
+            final GenomeLoc loc = genomeLocParser.createGenomeLoc(contig, i + start);
+            final ActivityProfileState state = new ActivityProfileState(loc, isActive ? 1.0 : 0.0);
+            profile.add(state);
+            Assert.assertNotNull(profile.toString());
+
+            if ( ! waitUntilEnd ) {
+                final List<ActiveRegion> regions = profile.popReadyActiveRegions(0, 1, maxRegionSize, false);
+                lastRegion = assertGoodRegions(start, regions, maxRegionSize, lastRegion, probs, seenSites);
+            }
+        }
+
+        if ( waitUntilEnd || forceConversion ) {
+            final List<ActiveRegion> regions = profile.popReadyActiveRegions(0, 1, maxRegionSize, forceConversion);
+            lastRegion = assertGoodRegions(start, regions, maxRegionSize, lastRegion, probs, seenSites);
+        }
+
+        for ( int i = 0; i < probs.size(); i++ ) {
+            if ( forceConversion || (i + maxRegionSize + profile.getMaxProbPropagationDistance() < probs.size()))
+                // only require a site to be seen if we are forcing conversion or the site is more than maxRegionSize from the end
+                Assert.assertTrue(seenSites.get(i), "Missed site " + i);
+        }
+
+        Assert.assertNotNull(profile.toString());
+    }
+
+    private ActiveRegion assertGoodRegions(final int start, final List<ActiveRegion> regions, final int maxRegionSize, ActiveRegion lastRegion, final List<Boolean> probs, final List<Boolean> seenSites) {
+        for ( final ActiveRegion region : regions ) {
+            Assert.assertTrue(region.getLocation().size() > 0, "Region " + region + " has a bad size");
+            Assert.assertTrue(region.getLocation().size() <= maxRegionSize, "Region " + region + " has a bad size: it's big than the max region size " + maxRegionSize);
+            if ( lastRegion != null ) {
+                Assert.assertTrue(region.getLocation().getStart() == lastRegion.getLocation().getStop() + 1, "Region " + region + " doesn't start immediately after previous region" + lastRegion);
+            }
+
+            // check that all active bases are actually active
+            final int regionOffset = region.getLocation().getStart() - start;
+            Assert.assertTrue(regionOffset >= 0 && regionOffset < probs.size(), "Region " + region + " has a bad offset w.r.t. start");
+            for ( int j = 0; j < region.getLocation().size(); j++ ) {
+                final int siteOffset = j + regionOffset;
+                Assert.assertEquals(region.isActive(), probs.get(siteOffset).booleanValue());
+                Assert.assertFalse(seenSites.get(siteOffset), "Site " + j + " in " + region + " was seen already");
+                seenSites.set(siteOffset, true);
+            }
+
+            lastRegion = region;
+        }
+
+        return lastRegion;
+    }
+
+    // -------------------------------------------------------------------------------------
+    //
+    // Hardcore tests for adding to the profile and constructing active regions
+    //
+    // -------------------------------------------------------------------------------------
+
+    @DataProvider(name = "SoftClipsTest")
+    public Object[][] makeSoftClipsTest() {
+        final List<Object[]> tests = new LinkedList<Object[]>();
+
+        final int contigLength = genomeLocParser.getContigs().getSequences().get(0).getSequenceLength();
+        for ( int start : Arrays.asList(1, 10, 100, contigLength - 100, contigLength - 10, contigLength - 1) ) {
+            for ( int precedingSites: Arrays.asList(0, 1, 10) ) {
+                if ( precedingSites + start < contigLength ) {
+                    for ( int softClipSize : Arrays.asList(1, 2, 10, 100) ) {
+//        for ( int start : Arrays.asList(10) ) {
+//            for ( int precedingSites: Arrays.asList(10) ) {
+//                for ( int softClipSize : Arrays.asList(1) ) {
+                        tests.add(new Object[]{ start, precedingSites, softClipSize });
+                    }
+                }
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = ! DEBUG, dataProvider = "SoftClipsTest")
+    public void testSoftClips(final int start, int nPrecedingSites, final int softClipSize) {
+        final ActivityProfile profile = new ActivityProfile(genomeLocParser, MAX_PROB_PROPAGATION_DISTANCE, ACTIVE_PROB_THRESHOLD);
+
+        final int contigLength = genomeLocParser.getContigs().getSequences().get(0).getSequenceLength();
+        final String contig = genomeLocParser.getContigs().getSequences().get(0).getSequenceName();
+        for ( int i = 0; i < nPrecedingSites; i++ ) {
+            final GenomeLoc loc = genomeLocParser.createGenomeLoc(contig, i + start);
+            final ActivityProfileState state = new ActivityProfileState(loc, 0.0);
+            profile.add(state);
+        }
+
+        final GenomeLoc softClipLoc = genomeLocParser.createGenomeLoc(contig, nPrecedingSites + start);
+        profile.add(new ActivityProfileState(softClipLoc, 1.0, ActivityProfileState.Type.HIGH_QUALITY_SOFT_CLIPS, softClipSize));
+
+        final int actualNumOfSoftClips = Math.min(softClipSize, profile.getMaxProbPropagationDistance());
+        if ( nPrecedingSites == 0 ) {
+            final int profileSize = Math.min(start + actualNumOfSoftClips, contigLength) - start + 1;
+            Assert.assertEquals(profile.size(), profileSize, "Wrong number of states in the profile");
+        }
+
+        for ( int i = 0; i < profile.size(); i++ ) {
+            final ActivityProfileState state = profile.getStateList().get(i);
+            final boolean withinSCRange = state.getLoc().distance(softClipLoc) <= actualNumOfSoftClips;
+            if ( withinSCRange ) {
+                Assert.assertTrue(state.isActiveProb > 0.0, "active prob should be changed within soft clip size");
+            } else {
+                Assert.assertEquals(state.isActiveProb, 0.0, "active prob shouldn't be changed outside of clip size");
+            }
+        }
+    }
+
+    // -------------------------------------------------------------------------------------
+    //
+    // Tests to ensure we cut large active regions in the right place
+    //
+    // -------------------------------------------------------------------------------------
+
+    private void addProb(final List<Double> l, final double v) {
+        l.add(v);
+    }
+
+    @DataProvider(name = "ActiveRegionCutTests")
+    public Object[][] makeActiveRegionCutTests() {
+        final List<Object[]> tests = new LinkedList<Object[]>();
+
+//        for ( final int activeRegionSize : Arrays.asList(30) ) {
+//            for ( final int minRegionSize : Arrays.asList(5) ) {
+        for ( final int activeRegionSize : Arrays.asList(10, 12, 20, 30, 40) ) {
+            for ( final int minRegionSize : Arrays.asList(1, 5, 10) ) {
+                final int maxRegionSize = activeRegionSize * 2 / 3;
+                if ( minRegionSize >= maxRegionSize ) continue;
+                { // test flat activity profile
+                    final List<Double> probs = Collections.nCopies(activeRegionSize, 1.0);
+                    tests.add(new Object[]{minRegionSize, maxRegionSize, maxRegionSize, probs});
+                }
+
+                { // test point profile is properly handled
+                    for ( int end = 1; end < activeRegionSize; end++ ) {
+                        final List<Double> probs = Collections.nCopies(end, 1.0);
+                        tests.add(new Object[]{minRegionSize, maxRegionSize, Math.min(end, maxRegionSize), probs});
+                    }
+                }
+
+                { // test increasing activity profile
+                    final List<Double> probs = new ArrayList<Double>(activeRegionSize);
+                    for ( int i = 0; i < activeRegionSize; i++ ) {
+                        addProb(probs, (1.0*(i+1))/ activeRegionSize);
+                    }
+                    tests.add(new Object[]{minRegionSize, maxRegionSize, maxRegionSize, probs});
+                }
+
+                { // test decreasing activity profile
+                    final List<Double> probs = new ArrayList<Double>(activeRegionSize);
+                    for ( int i = 0; i < activeRegionSize; i++ ) {
+                        addProb(probs, 1 - (1.0*(i+1))/ activeRegionSize);
+                    }
+                    tests.add(new Object[]{minRegionSize, maxRegionSize, maxRegionSize, probs});
+                }
+
+                { // test two peaks
+//                    for ( final double rootSigma : Arrays.asList(2.0) ) {
+//                        int maxPeak1 = 9; {
+//                            int maxPeak2 = 16; {
+                    for ( final double rootSigma : Arrays.asList(1.0, 2.0, 3.0) ) {
+                        for ( int maxPeak1 = 0; maxPeak1 < activeRegionSize / 2; maxPeak1++ ) {
+                            for ( int maxPeak2 = activeRegionSize / 2 + 1; maxPeak2 < activeRegionSize; maxPeak2++ ) {
+                                final double[] gauss1 = makeGaussian(maxPeak1, activeRegionSize, rootSigma);
+                                final double[] gauss2 = makeGaussian(maxPeak2, activeRegionSize, rootSigma+1);
+                                final List<Double> probs = new ArrayList<Double>(activeRegionSize);
+                                for ( int i = 0; i < activeRegionSize; i++ ) {
+                                    addProb(probs, gauss1[i] + gauss2[i]);
+                                }
+                                final int cutSite = findCutSiteForTwoMaxPeaks(probs, minRegionSize);
+                                if ( cutSite != -1 && cutSite < maxRegionSize )
+                                    tests.add(new Object[]{minRegionSize, maxRegionSize, Math.max(cutSite, minRegionSize), probs});
+                            }
+                        }
+                    }
+                }
+
+                { // test that the lowest of two minima is taken
+                    // looks like a bunch of 1s, 0.5, some 1.0s, 0.75, some more 1s
+//                    int firstMin = 0; {
+//                    int secondMin = 4; {
+                    for ( int firstMin = 1; firstMin < activeRegionSize; firstMin++ ) {
+                        for ( int secondMin = firstMin + 1; secondMin < activeRegionSize; secondMin++ ) {
+                            final List<Double> probs = new ArrayList<Double>(Collections.nCopies(activeRegionSize, 1.0));
+                            probs.set(firstMin, 0.5);
+                            probs.set(secondMin, 0.75);
+                            final int expectedCut;
+                            if ( firstMin + 1 < minRegionSize ) {
+                                if ( firstMin == secondMin - 1 ) // edge case for non-min at minRegionSize
+                                    expectedCut = maxRegionSize;
+                                else
+                                    expectedCut = secondMin + 1 > maxRegionSize ? maxRegionSize : ( secondMin + 1 < minRegionSize ? maxRegionSize : secondMin + 1);
+                            } else if ( firstMin + 1 > maxRegionSize )
+                                expectedCut = maxRegionSize;
+                            else {
+                                expectedCut = firstMin + 1;
+                            }
+
+                            Math.min(firstMin + 1, maxRegionSize);
+                            tests.add(new Object[]{minRegionSize, maxRegionSize, expectedCut, probs});
+                        }
+                    }
+                }
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    private double[] makeGaussian(final int mean, final int range, final double sigma) {
+        final double[] gauss = new double[range];
+        for( int iii = 0; iii < range; iii++ ) {
+            gauss[iii] = MathUtils.normalDistribution(mean, sigma, iii) + ACTIVE_PROB_THRESHOLD;
+        }
+        return gauss;
+    }
+
+    private int findCutSiteForTwoMaxPeaks(final List<Double> probs, final int minRegionSize) {
+        for ( int i = probs.size() - 2; i > minRegionSize; i-- ) {
+            double prev = probs.get(i - 1);
+            double next = probs.get(i + 1);
+            double cur = probs.get(i);
+            if ( cur < next && cur < prev )
+                return i + 1;
+        }
+
+        return -1;
+    }
+
+    @Test(dataProvider = "ActiveRegionCutTests")
+    public void testActiveRegionCutTests(final int minRegionSize, final int maxRegionSize, final int expectedRegionSize, final List<Double> probs) {
+        final ActivityProfile profile = new ActivityProfile(genomeLocParser, MAX_PROB_PROPAGATION_DISTANCE, ACTIVE_PROB_THRESHOLD);
+
+        final String contig = genomeLocParser.getContigs().getSequences().get(0).getSequenceName();
+        for ( int i = 0; i <= maxRegionSize + profile.getMaxProbPropagationDistance(); i++ ) {
+            final GenomeLoc loc = genomeLocParser.createGenomeLoc(contig, i + 1);
+            final double prob = i < probs.size() ? probs.get(i) : 0.0;
+            final ActivityProfileState state = new ActivityProfileState(loc, prob);
+            profile.add(state);
+        }
+
+        final List<ActiveRegion> regions = profile.popReadyActiveRegions(0, minRegionSize, maxRegionSize, false);
+        Assert.assertTrue(regions.size() >= 1, "Should only be one regions for this test");
+        final ActiveRegion region = regions.get(0);
+        Assert.assertEquals(region.getLocation().getStart(), 1, "Region should start at 1");
+        Assert.assertEquals(region.getLocation().size(), expectedRegionSize, "Incorrect region size; cut must have been incorrect");
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/activeregion/BandPassActivityProfileUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/activeregion/BandPassActivityProfileUnitTest.java
new file mode 100644
index 0000000..fa75e71
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/activeregion/BandPassActivityProfileUnitTest.java
@@ -0,0 +1,339 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.activeregion;
+
+
+// the imports for unit testing.
+
+
+import htsjdk.samtools.reference.ReferenceSequenceFile;
+import org.apache.commons.lang.ArrayUtils;
+import htsjdk.tribble.readers.LineIterator;
+import org.broadinstitute.gatk.utils.variant.VCIterable;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.MathUtils;
+import org.broadinstitute.gatk.utils.collections.Pair;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+import htsjdk.variant.variantcontext.VariantContext;
+import htsjdk.variant.vcf.VCFCodec;
+import htsjdk.variant.vcf.VCFHeader;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.LinkedList;
+import java.util.List;
+
+
+public class BandPassActivityProfileUnitTest extends BaseTest {
+    private final static boolean DEBUG = false;
+    private GenomeLocParser genomeLocParser;
+
+    private final static int MAX_PROB_PROPAGATION_DISTANCE = 50;
+    private final static double ACTIVE_PROB_THRESHOLD= 0.002;
+
+    @BeforeClass
+    public void init() throws FileNotFoundException {
+        // sequence
+        ReferenceSequenceFile seq = new CachingIndexedFastaSequenceFile(new File(b37KGReference));
+        genomeLocParser = new GenomeLocParser(seq);
+    }
+
+    @DataProvider(name = "BandPassBasicTest")
+    public Object[][] makeBandPassTest() {
+        final List<Object[]> tests = new LinkedList<Object[]>();
+
+        for ( int start : Arrays.asList(1, 10, 100, 1000) ) {
+            for ( boolean precedingIsActive : Arrays.asList(true, false) ) {
+                for ( int precedingSites: Arrays.asList(0, 1, 10, 100) ) {
+                    for ( int bandPassSize : Arrays.asList(0, 1, 10, 100) ) {
+                        for ( double sigma : Arrays.asList(1.0, 2.0, BandPassActivityProfile.DEFAULT_SIGMA) ) {
+//        for ( int start : Arrays.asList(10) ) {
+//            for ( boolean precedingIsActive : Arrays.asList(false) ) {
+//                for ( int precedingSites: Arrays.asList(0) ) {
+//                    for ( int bandPassSize : Arrays.asList(1) ) {
+                            tests.add(new Object[]{ start, precedingIsActive, precedingSites, bandPassSize, sigma });
+                        }
+                    }
+                }
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = ! DEBUG, dataProvider = "BandPassBasicTest")
+    public void testBandPass(final int start, final boolean precedingIsActive, final int nPrecedingSites, final int bandPassSize, final double sigma) {
+        final BandPassActivityProfile profile = new BandPassActivityProfile(genomeLocParser, null, MAX_PROB_PROPAGATION_DISTANCE, ACTIVE_PROB_THRESHOLD, bandPassSize, sigma, false);
+
+        final int expectedBandSize = bandPassSize * 2 + 1;
+        Assert.assertEquals(profile.getFilteredSize(), bandPassSize, "Wrong filter size");
+        Assert.assertEquals(profile.getSigma(), sigma, "Wrong sigma");
+        Assert.assertEquals(profile.getBandSize(), expectedBandSize, "Wrong expected band size");
+
+        final String contig = genomeLocParser.getContigs().getSequences().get(0).getSequenceName();
+        final double precedingProb = precedingIsActive ? 1.0 : 0.0;
+        for ( int i = 0; i < nPrecedingSites; i++ ) {
+            final GenomeLoc loc = genomeLocParser.createGenomeLoc(contig, i + start);
+            final ActivityProfileState state = new ActivityProfileState(loc, precedingProb);
+            profile.add(state);
+        }
+
+        final GenomeLoc nextLoc = genomeLocParser.createGenomeLoc(contig, nPrecedingSites + start);
+        profile.add(new ActivityProfileState(nextLoc, 1.0));
+
+        if ( precedingIsActive == false && nPrecedingSites >= bandPassSize && bandPassSize < start ) {
+            // we have enough space that all probs fall on the genome
+            final double[] probs = profile.getProbabilitiesAsArray();
+            Assert.assertEquals(MathUtils.sum(probs), 1.0 * (nPrecedingSites * precedingProb + 1), 1e-3, "Activity profile doesn't sum to number of non-zero prob states");
+        }
+    }
+
+    private double[] bandPassInOnePass(final BandPassActivityProfile profile, final double[] activeProbArray) {
+        final double[] bandPassProbArray = new double[activeProbArray.length];
+
+        // apply the band pass filter for activeProbArray into filteredProbArray
+        final double[] GaussianKernel = profile.getKernel();
+        for( int iii = 0; iii < activeProbArray.length; iii++ ) {
+            final double[] kernel = ArrayUtils.subarray(GaussianKernel, Math.max(profile.getFilteredSize() - iii, 0), Math.min(GaussianKernel.length, profile.getFilteredSize() + activeProbArray.length - iii));
+            final double[] activeProbSubArray = ArrayUtils.subarray(activeProbArray, Math.max(0,iii - profile.getFilteredSize()), Math.min(activeProbArray.length,iii + profile.getFilteredSize() + 1));
+            bandPassProbArray[iii] = dotProduct(activeProbSubArray, kernel);
+        }
+
+        return bandPassProbArray;
+    }
+
+    public static double dotProduct(double[] v1, double[] v2) {
+        Assert.assertEquals(v1.length,v2.length,"Array lengths do not mach in dotProduct");
+        double result = 0.0;
+        for (int k = 0; k < v1.length; k++)
+            result += v1[k] * v2[k];
+
+        return result;
+    }
+
+    @DataProvider(name = "BandPassComposition")
+    public Object[][] makeBandPassComposition() {
+        final List<Object[]> tests = new LinkedList<Object[]>();
+
+        for ( int bandPassSize : Arrays.asList(0, 1, 10, 100, BandPassActivityProfile.MAX_FILTER_SIZE) ) {
+            for ( int integrationLength : Arrays.asList(1, 10, 100, 1000) ) {
+                tests.add(new Object[]{ bandPassSize, integrationLength });
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test( enabled = ! DEBUG, dataProvider = "BandPassComposition")
+    public void testBandPassComposition(final int bandPassSize, final int integrationLength) {
+        final int start = 1;
+        final BandPassActivityProfile profile = new BandPassActivityProfile(genomeLocParser, null, MAX_PROB_PROPAGATION_DISTANCE,
+                ACTIVE_PROB_THRESHOLD, bandPassSize, BandPassActivityProfile.DEFAULT_SIGMA);
+        final double[] rawActiveProbs = new double[integrationLength + bandPassSize * 2];
+
+        // add a buffer so that we can get all of the band pass values
+        final String contig = genomeLocParser.getContigs().getSequences().get(0).getSequenceName();
+        int pos = start;
+        int rawProbsOffset = 0;
+        for ( int i = 0; i < bandPassSize; i++ ) {
+            final GenomeLoc loc = genomeLocParser.createGenomeLoc(contig, pos++);
+            final ActivityProfileState state = new ActivityProfileState(loc, 0.0);
+            profile.add(state);
+            rawActiveProbs[rawProbsOffset++] = 0.0;
+            rawActiveProbs[rawActiveProbs.length - rawProbsOffset] = 0.0;
+        }
+
+        for ( int i = 0; i < integrationLength; i++ ) {
+            final GenomeLoc nextLoc = genomeLocParser.createGenomeLoc(contig, pos++);
+            profile.add(new ActivityProfileState(nextLoc, 1.0));
+            rawActiveProbs[rawProbsOffset++] = 1.0;
+
+            for ( int j = 0; j < profile.size(); j++ ) {
+                Assert.assertTrue(profile.getStateList().get(j).isActiveProb >= 0.0, "State probability < 0 at " + j);
+                Assert.assertTrue(profile.getStateList().get(j).isActiveProb <= 1.0 + 1e-3, "State probability > 1 at " + j);
+            }
+        }
+
+        final double[] expectedProbs = bandPassInOnePass(profile, rawActiveProbs);
+        for ( int j = 0; j < profile.size(); j++ ) {
+            Assert.assertEquals(profile.getStateList().get(j).isActiveProb, expectedProbs[j], "State probability not expected at " + j);
+        }
+    }
+
+    // ------------------------------------------------------------------------------------
+    //
+    // Code to test the creation of the kernels
+    //
+    // ------------------------------------------------------------------------------------
+
+    /**
+
+     kernel <- function(sd, pThres) {
+     raw = dnorm(-80:81, mean=0, sd=sd)
+     norm = raw / sum(raw)
+     bad = norm < pThres
+     paste(norm[! bad], collapse=", ")
+     }
+
+     print(kernel(0.01, 1e-5))
+     print(kernel(1, 1e-5))
+     print(kernel(5, 1e-5))
+     print(kernel(17, 1e-5))
+
+     * @return
+     */
+
+    @DataProvider(name = "KernelCreation")
+    public Object[][] makeKernelCreation() {
+        final List<Object[]> tests = new LinkedList<Object[]>();
+
+        tests.add(new Object[]{ 0.01, 1000, new double[]{1.0}});
+        tests.add(new Object[]{ 1.0, 1000, new double[]{0.0001338302, 0.004431848, 0.053990966, 0.241970723, 0.398942278, 0.241970723, 0.053990966, 0.004431848, 0.0001338302}});
+        tests.add(new Object[]{ 1.0, 0, new double[]{1.0}});
+        tests.add(new Object[]{ 1.0, 1, new double[]{0.2740686, 0.4518628, 0.2740686}});
+        tests.add(new Object[]{ 1.0, 2, new double[]{0.05448868, 0.24420134, 0.40261995, 0.24420134, 0.05448868}});
+        tests.add(new Object[]{ 1.0, 1000, new double[]{0.0001338302, 0.004431848, 0.053990966, 0.241970723, 0.398942278, 0.241970723, 0.053990966, 0.004431848, 0.0001338302}});
+        tests.add(new Object[]{ 5.0, 1000, new double[]{1.1788613551308e-05, 2.67660451529771e-05, 5.83893851582921e-05, 0.000122380386022754, 0.000246443833694604, 0.000476817640292968, 0.000886369682387602, 0.00158309031659599, 0.00271659384673712, 0.00447890605896858, 0.00709491856924629, 0.0107981933026376, 0.0157900316601788, 0.0221841669358911, 0.029945493127149, 0.0388372109966426, 0.0483941449038287, 0.0579383105522965, 0.0666449205783599, 0.0736540280606647, 0.0782085387950912,  [...]
+        tests.add(new Object[]{17.0, 1000, new double[]{1.25162575710745e-05, 1.57001772728555e-05, 1.96260034693739e-05, 2.44487374842009e-05, 3.03513668801384e-05, 3.75489089511911e-05, 4.62928204154855e-05, 5.68757597480354e-05, 6.96366758708924e-05, 8.49661819944029e-05, 0.000103312156275406, 0.000125185491708561, 0.000151165896477646, 0.000181907623161359, 0.000218144981137171, 0.000260697461819069, 0.000310474281706066, 0.000368478124457557, 0.000435807841336874, 0.0005136598504885 [...]
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test( enabled = ! DEBUG, dataProvider = "KernelCreation")
+    public void testKernelCreation(final double sigma, final int maxSize, final double[] expectedKernel) {
+        final BandPassActivityProfile profile = new BandPassActivityProfile(genomeLocParser, null, MAX_PROB_PROPAGATION_DISTANCE, ACTIVE_PROB_THRESHOLD,
+                maxSize, sigma, true);
+
+        final double[] kernel = profile.getKernel();
+        Assert.assertEquals(kernel.length, expectedKernel.length);
+        for ( int i = 0; i < kernel.length; i++ )
+            Assert.assertEquals(kernel[i], expectedKernel[i], 1e-3, "Kernels not equal at " + i);
+    }
+
+    // ------------------------------------------------------------------------------------
+    //
+    // Large-scale test, reading in 1000G Phase I chr20 calls and making sure that
+    // the regions returned are the same if you run on the entire profile vs. doing it
+    // incremental
+    //
+    // ------------------------------------------------------------------------------------
+
+    @DataProvider(name = "VCFProfile")
+    public Object[][] makeVCFProfile() {
+        final List<Object[]> tests = new LinkedList<Object[]>();
+
+        //tests.add(new Object[]{ privateTestDir + "ALL.chr20.phase1_release_v3.20101123.snps_indels_svs.sites.vcf", "20", 60470, 61000});
+        //tests.add(new Object[]{ privateTestDir + "ALL.chr20.phase1_release_v3.20101123.snps_indels_svs.sites.vcf", "20", 60470, 100000});
+        //tests.add(new Object[]{ privateTestDir + "ALL.chr20.phase1_release_v3.20101123.snps_indels_svs.sites.vcf", "20", 60470, 1000000});
+        tests.add(new Object[]{ privateTestDir + "ALL.chr20.phase1_release_v3.20101123.snps_indels_svs.sites.vcf", "20", 60470, 1000000});
+        tests.add(new Object[]{ privateTestDir + "NA12878.WGS.b37.chr20.firstMB.vcf", "20", 1, 1000000});
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test( dataProvider = "VCFProfile")
+    public void testVCFProfile(final String path, final String contig, final int start, final int end) throws Exception {
+        final int extension = 50;
+        final int minRegionSize = 50;
+        final int maxRegionSize = 300;
+
+        final File file = new File(path);
+        final VCFCodec codec = new VCFCodec();
+        final Pair<VCFHeader, VCIterable<LineIterator>> reader = VCIterable.readAllVCs(file, codec);
+
+        final List<ActiveRegion> incRegions = new ArrayList<ActiveRegion>();
+        final BandPassActivityProfile incProfile = new BandPassActivityProfile(genomeLocParser, null, MAX_PROB_PROPAGATION_DISTANCE, ACTIVE_PROB_THRESHOLD);
+        final BandPassActivityProfile fullProfile = new BandPassActivityProfile(genomeLocParser, null, MAX_PROB_PROPAGATION_DISTANCE, ACTIVE_PROB_THRESHOLD);
+        int pos = start;
+        for ( final VariantContext vc : reader.getSecond() ) {
+            if ( vc == null ) continue;
+            while ( pos < vc.getStart() ) {
+                final GenomeLoc loc = genomeLocParser.createGenomeLoc(contig, pos);
+                //logger.warn("Adding 0.0 at " + loc + " because vc.getStart is " + vc.getStart());
+                incProfile.add(new ActivityProfileState(loc, 0.0));
+                fullProfile.add(new ActivityProfileState(loc, 0.0));
+                pos++;
+            }
+            if ( vc.getStart() >= start && vc.getEnd() <= end ) {
+                final GenomeLoc loc = genomeLocParser.createGenomeLoc(contig, pos);
+                //logger.warn("Adding 1.0 at " + loc);
+                ActivityProfileState.Type type = ActivityProfileState.Type.NONE;
+                Number value = null;
+                if ( vc.isBiallelic() && vc.isIndel() ) {
+                    type = ActivityProfileState.Type.HIGH_QUALITY_SOFT_CLIPS;
+                    value = Math.abs(vc.getIndelLengths().get(0));
+                }
+                final ActivityProfileState state = new ActivityProfileState(loc, 1.0, type, value);
+                incProfile.add(state);
+                fullProfile.add(state);
+                pos++;
+            }
+
+            incRegions.addAll(incProfile.popReadyActiveRegions(extension, minRegionSize, maxRegionSize, false));
+
+            if ( vc.getStart() > end )
+                break;
+        }
+
+        incRegions.addAll(incProfile.popReadyActiveRegions(extension, minRegionSize, maxRegionSize, true));
+
+        final List<ActiveRegion> fullRegions = fullProfile.popReadyActiveRegions(extension, minRegionSize, maxRegionSize, true);
+        assertGoodRegions(fullRegions, start, end, maxRegionSize);
+        assertGoodRegions(incRegions, start, end, maxRegionSize);
+
+        Assert.assertEquals(incRegions.size(),  fullRegions.size(), "incremental and full region sizes aren't the same");
+        for ( int i = 0; i < fullRegions.size(); i++ ) {
+            final ActiveRegion incRegion = incRegions.get(i);
+            final ActiveRegion fullRegion = fullRegions.get(i);
+            Assert.assertTrue(incRegion.equalExceptReads(fullRegion), "Full and incremental regions are not equal: full = " + fullRegion + " inc = " + incRegion);
+        }
+    }
+
+    private void assertGoodRegions(final List<ActiveRegion> regions, final int start, final int end, final int maxRegionSize) {
+        int lastPosSeen = start - 1;
+        for ( int regionI = 0; regionI < regions.size(); regionI++ ) {
+            final ActiveRegion region = regions.get(regionI);
+            Assert.assertEquals(region.getLocation().getStart(), lastPosSeen + 1, "discontinuous with previous region.  lastPosSeen " + lastPosSeen + " but region is " + region);
+            Assert.assertTrue(region.getLocation().size() <= maxRegionSize, "Region is too big: " + region);
+            lastPosSeen = region.getLocation().getStop();
+
+            for ( final ActivityProfileState state : region.getSupportingStates() ) {
+                Assert.assertEquals(state.isActiveProb > ACTIVE_PROB_THRESHOLD, region.isActive(),
+                        "Region is active=" + region.isActive() + " but contains a state " + state + " with prob "
+                                + state.isActiveProb + " not within expected values given threshold for activity of "
+                                + ACTIVE_PROB_THRESHOLD);
+            }
+        }
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/baq/BAQUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/baq/BAQUnitTest.java
new file mode 100644
index 0000000..1e9fd88
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/baq/BAQUnitTest.java
@@ -0,0 +1,257 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.baq;
+
+
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.BeforeMethod;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.broadinstitute.gatk.utils.Utils;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.PrintStream;
+import java.util.List;
+import java.util.ArrayList;
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import htsjdk.samtools.*;
+
+/**
+ * Basic unit test for BAQ calculation
+ */
+public class BAQUnitTest extends BaseTest {
+    private SAMFileHeader header;
+    private final int startChr = 1;
+    private final int numChr = 2;
+    private final int chrSize = 1000;
+    IndexedFastaSequenceFile fasta = null;
+
+    @BeforeMethod
+    public void before() {
+        header = ArtificialSAMUtils.createArtificialSamHeader(numChr, startChr, chrSize);
+        File referenceFile = new File(hg18Reference);
+        try {
+            fasta = new IndexedFastaSequenceFile(referenceFile);
+        }
+        catch(FileNotFoundException ex) {
+            throw new UserException.CouldNotReadInputFile(referenceFile,ex);
+        }
+    }
+
+    private class BAQTest {
+        String readBases, refBases;
+        byte[] quals, expected;
+        String cigar;
+        int refOffset;
+        int pos;
+
+        public BAQTest(String _refBases, String _readBases, String _quals, String _expected) {
+            this(0, -1, null, _readBases, _refBases, _quals, _expected);
+        }
+
+        public BAQTest(int refOffset, String _refBases, String _readBases, String _quals, String _expected) {
+            this(refOffset, -1, null, _refBases, _readBases, _quals, _expected);
+        }
+
+        public BAQTest(long pos, String cigar, String _readBases, String _quals, String _expected) {
+            this(0, pos, cigar, null, _readBases, _quals, _expected);
+        }
+
+
+        public BAQTest(int _refOffset, long _pos, String _cigar, String _refBases, String _readBases, String _quals, String _expected) {
+            refOffset = _refOffset;
+            pos = (int)_pos;
+            cigar = _cigar;
+            readBases = _readBases;
+            refBases = _refBases;
+
+            quals = new byte[_quals.getBytes().length];
+            expected = new byte[_quals.getBytes().length];
+            for ( int i = 0; i < quals.length; i++) {
+                quals[i] = (byte)(_quals.getBytes()[i] - 33);
+                expected[i] = (byte)(_expected.getBytes()[i] - 33);
+            }
+        }
+
+        public String toString() { return readBases; }
+
+        public SAMRecord createRead() {
+            SAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "foo", 0, pos > 0 ? pos + (refOffset > 0 ? refOffset : 0): 1, readBases.getBytes(), quals);
+            //if ( cigar != null ) read.setAlignmentEnd(readBases.getBytes().length + pos);
+            read.setCigarString( cigar == null ? String.format("%dM", quals.length) : cigar);
+            return read;
+        }
+    }
+
+
+    @DataProvider(name = "data")
+    public Object[][] createData1() {
+        List<BAQTest> params = new ArrayList<BAQTest>();
+
+        params.add(new BAQTest("GCTGCTCCTGGTACTGCTGGATGAGGGCCTCGATGAAGCTAAGCTTTTTCTCCTGCTCCTGCGTGATCCGCTGCAG",
+                               "GCTGCTCCTGGTACTGCTGGATGAGGGCCTCGATGAAGCTAAGCTTTTCCTCCTGCTCCTGCGTGATCCGCTGCAG",
+                               "?BACCBDDDFFBCFFHHFIHFEIFHIGHHGHBFEIFGIIGEGIIHGGGIHHIIHIIHIIHGICCIGEII at IGIHCG",
+                               "?BACCBDDDFFBCFFHHFIHFEIFHIGHHGHBFEIFGIIGEGII410..0HIIHIIHIIHGICCIGEII at IGIHCE"));
+
+        params.add(new BAQTest("GCTTTTTCTCCTCCTG",
+                               "GCTTTTCCTCCTCCTG",
+                               "IIHGGGIHHIIHHIIH",
+                               "EI410..0HIIHHIIE"));
+
+        // big and complex, also does a cap from 3 to 4!
+        params.add(new BAQTest(-3, 9999810l, "49M1I126M1I20M1I25M",
+                                "AAATTCAAGATTTCAAAGGCTCTTAACTGCTCAAGATAATTTTTTTTTTTTGAGACAGAGTCTTGCTGTGTTGCCCAGGCTGGAGTGCAGTGGCGTGATCTTGGCTCACTGCAAGCTCCGCCTCCCGGGTTCACGCCATTCTCCTGCCTCAGCCTCCCGAGTAGCTGGGACTACAGGCACCCACCACCACGCCTGGCCAATTTTTTTGTATTTTTAGTAGAGATAG",
+                                "TTCAAGATTTCAAAGGCTCTTAACTGCTCAAGATAATTTTTTTTTTTTGTAGACAGAGTCTTGCTGTGTTGCCCAGGCTGGAGTGCAGTGGCGTGATCTTGGCTCACTGCAAGCTCCGCCTCCCGGGTTCACGCCATTCTCCTGCCTCAGCCTCCCGAGTAGCTGGGACTACAGGCCACCCACCACCACGCCTGGCCTAATTTTTTTGTATTTTTAGTAGAGA",
+                                ">IHFECEBDBBCBCABABAADBD?AABBACEABABC?>?B>@A@@>A?B3BBC?CBDBAABBBBBAABAABBABDACCCBCDAACBCBABBB:ABDBACBBDCCCCABCDCCBCC@@;?<B at BC;CBBBAB=;A>ACBABBBABBCA@@<?>>AAA<CA at AABBABCC?BB8@<@C<>5;<A5=A;>=64>???B>=6497<<;;<;>2?>BA@??A6<<A59",
+                                ">EHFECEBDBBCBCABABAADBD?AABBACEABABC?>?B>@A@@>A?838BC?CBDBAABBBBBAABAABBABDACCCBCDAACBCBABBB:ABDBACBBDCCCCABCDCCBCC@@;?<B at BC;CBBBAB=;A>ACBABBBABBCA@@<?>>AAA<CA at AABBABCC?BB8@<@%<>5;<A5=A;>=64>???B;86497<<;;<;>2?>BA@??A6<<A59"));
+
+        // now changes
+        params.add(new BAQTest(-3, 9999966l, "36M",
+                                "CCGAGTAGCTGGGACTACAGGCACCCACCACCACGCCTGGCC",
+                                "AGTAGCTGGGACTACAGGCACCCACCACCACGCCTG",
+                                "A?>>@>AA?@@>A?>A@?>@>>?=>?'>?=>7=?A9",
+                                "A?>>@>AA?@@>A?>A@?>@>>?=>?'>?=>7=?A9"));
+
+        // raw base qualities are low -- but they shouldn't be capped
+        params.add(new BAQTest(-3, 9999993l, "4=13X2=3X1=4X2=4X1=2X",
+                                "CCACCACGCCTGGCCAATTTTTTTGTATTTTTAGTAGAGATA",
+                                "CCACGCTTGGCAAAGTTTTCCGTACGTTTAGCCGAG",
+                                "33'/(7+270&4),(&&-)$&,%7$',-/61(,6?8",
+                                "33'/(7+270&4),(&&-)$&,%7$',-/61(,6?8"));
+
+        // soft clipping
+        // todo soft clip testing just doesn't work right now!
+
+//        params.add(new BAQTest(29, 10000109l, "29S190M",
+//                                null, "GAAGGTTGAATCAAACCTTCGGTTCCAACGGATTACAGGTGTGAGCCACCGCGACCGGCCTGCTCAAGATAATTTTTAGGGCTAACTATGACATGAACCCCAAAATTCCTGTCCTCTAGATGGCAGAAACCAAGATAAAGTATCCCCACATGGCCACAAGGTTAAGCTCTTATGGACACAAAACAAGGCAGAGAAATGTCATTTGGCATTGGTTTCAGG",
+//                                "3737088:858278273772:3<=;:?;5=9@>@?>@=<>8?>@=>>?>4=5>?=5====A==@?A@=@6 at A><?B:A;:;>@A?>?AA>@?AA>A?>==?AAA@@A>=A<A>>A=?A>AA==@A?AA?>?AA?A@@C@:?A@<;::??AA==>@@?BB=<A?BA>>A>A?AB=???@?BBA@?BA==?A>A?BB=A:@?ABAB>>?ABB>8A at BAIGA",
+//                                "3737088:858278273772:3<=;:?;5=9@>@?>@=<>8?>@=>>?>4=5>?=5====A==@?A@=@6 at A><?B:A;:;>@A?>?AA>@?AA>A?>==?AAA@@A>=A<A>>A=?A>AA==@A?AA?>?AA?A@@C@:?A@<;::??AA==>@@?BB=<A?BA>>A>A?AB=???@?BBA@?BA==?A>A?BB=A:@?ABAB>>?ABB>8A at BAI>;"));
+
+//        params.add(new BAQTest(30, 10000373l, "30S69M1D2M",
+//                                null, "TGAAATCCTGCCTTATAGTTCCCCTAAACCCACGTTCTATCCCCAGATACTCCCCTCTTCATTACAGAACAACAAAGAAAGACAAATTCTTAGCATCAATG",
+//                                "###############################=89>B;6<;96*>.1799>++66=:=:8=<-.9>><;9<':-+;*+::=;8=;;.::<:;=/2=70<=?-",
+//                                "###############################=89>B;6<;96*>.1799>++66=:=:8=<-.9>><;9<':-+;*+::=;8=;;.::<:;=/2=7000%%"));
+
+
+//        params.add(new BAQTest(5, 10000109l, "5S5M",
+//                                "GAAGGTTGAA",
+//                                null,
+//                                "HHHHHHHHHH",
+//                                "HHHHHHHHHE"));
+
+//        params.add(new BAQTest(10009480l, "102M1I18M1I16M1I43M1I10M1D9M1I7M1I7M1I16M1I9M1I8M1I14M2I18M",
+//                                "AGAGATGGGGTTTCGCCATGTTGTCCAGGCTGGTCTTGAACTCCTGACCTCAAGTGATCTGCCCACCTCGGCCTCCCAAAGTGCTGGGATTACACGTGTGAAACCACCATGCCTGGTCTCTTAATTTTTCNGATTCTAATAAAATTACATTCTATTTGCTGAAAGNGTACTTTAGAGTTGAAAGAAAAAGAAAGGNGTGGAACTTCCCCTAGTAAACAAGGAAAAACNTCCATGTTATTTATTGGACCTTAAAAATAGTGAAACATCTTAAGAAAAAAAATCAATCCTA",
+//                                "@HI at BA<?C@?CA>7>=AA>9@==??C???@?>:?BB at BA>B?=A@@<=B?AB???@@@@@?=?A==B at 7<<?@>==>=<=>???>=@@A?<=B:5?413577/675;><;==@=<>>968;6;>????:#;=?>:3072077726/6;3719;9A=9;774771#30532676??=8::97<7144448/4425#65688821515986255/5601548355551#218>96/5/8<4/.2344/914/55553)1047;:30312:4:63556565631=:62610",
+//                                "@HI at BA<?C@?CA>7>=AA>9@==??C???@?>:?BB at BA>B?=A@@<=B?AB???@@@@@?=?A==B at 7<<?@>==>=<=>???>=@@A?<=B:5?413&!7/675;><;==@=<>>96!;6;>????:#;=?>:3!72077726/6;3719;9A=9;774771#30532676??=8::&!<7144448'$!25#65687421515986255/560!548355551#218>96!5/8<4/.2344/614(%!!53)1047;:30312:4:63556565631=:62610"));
+
+        List<Object[]> params2 = new ArrayList<Object[]>();
+        for ( BAQTest x : params ) params2.add(new Object[]{x});
+        return params2.toArray(new Object[][]{});
+    }
+
+
+
+    @Test(dataProvider = "data", enabled = true)
+    public void testBAQWithProvidedReference(BAQTest test) {
+        if ( test.refBases != null ) {
+            testBAQ(test, false);
+        }
+    }
+
+    @Test(dataProvider = "data", enabled = true)
+    public void testBAQWithCigarAndRefLookup(BAQTest test) {
+        if ( test.cigar != null ) {
+            testBAQ(test, true);
+        }
+    }
+
+    @Test(enabled = true)
+    public void testBAQQualRange() {
+        BAQ baq = new BAQ(1e-3, 0.1, 7, (byte)4, false);         // matches current samtools parameters
+        final byte ref = (byte)'A';
+        final byte alt = (byte)'A';
+
+        for ( int i = 0; i <= SAMUtils.MAX_PHRED_SCORE; i++ )
+            Assert.assertTrue(baq.calcEpsilon( ref, alt, (byte)i) >= 0.0, "Failed to get baq epsilon range");
+    }
+
+    @Test(enabled = true)
+    public void testBAQOverwritesExistingTagWithNull() {
+
+        // create a read with a single base off the end of the contig, which cannot be BAQed
+        final SAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "foo", 0, fasta.getSequenceDictionary().getSequence("chr1").getSequenceLength() + 1, 1);
+        read.setReadBases(new byte[] {(byte) 'A'});
+        read.setBaseQualities(new byte[] {(byte) 20});
+        read.setCigarString("1M");
+        read.setAttribute("BQ", "A");
+
+        // try to BAQ and tell it to RECALCULATE AND ADD_TAG
+        BAQ baq = new BAQ(1e-3, 0.1, 7, (byte)4, false);
+        baq.baqRead(read, fasta, BAQ.CalculationMode.RECALCULATE, BAQ.QualityMode.ADD_TAG);
+
+        // did we remove the existing tag?
+        Assert.assertTrue(read.getAttribute("BQ") == null);
+    }
+
+    public void testBAQ(BAQTest test, boolean lookupWithFasta) {
+        BAQ baqHMM = new BAQ(1e-3, 0.1, 7, (byte)4, false);         // matches current samtools parameters
+
+        SAMRecord read = test.createRead();
+        BAQ.BAQCalculationResult result;
+        if ( lookupWithFasta && test.cigar != null )
+            result = baqHMM.calcBAQFromHMM(read, fasta);
+        else
+            result = baqHMM.calcBAQFromHMM(read, test.refBases.getBytes(), test.refOffset);
+
+        System.out.println(Utils.dupString('-', 40));
+        System.out.println("reads   : " + new String(test.readBases));
+        printQuals(System.out, "in-quals:", test.quals, false);
+        printQuals(System.out, "bq-quals:", result.bq, false);
+        for (int i = 0; i < test.quals.length; i++) {
+            //result.bq[i] = baqHMM.capBaseByBAQ(result.rawQuals[i], result.bq[i], result.state[i], i);
+            Assert.assertTrue(result.bq[i] >= baqHMM.getMinBaseQual() || test.expected[i] < baqHMM.getMinBaseQual(), "BQ < min base quality");
+            Assert.assertEquals(result.bq[i], test.expected[i], "Did not see the expected BAQ value at " + i);
+        }
+
+    }
+
+    public final static void printQuals( PrintStream out, String prefix, byte[] quals, boolean asInt ) {
+        out.print(prefix);
+        for ( int i = 0; i < quals.length; i++) {
+            if ( asInt ) {
+                out.printf("%2d", (int)quals[i]);
+                if ( i+1 != quals.length ) out.print(",");
+            } else
+                out.print((char)(quals[i]+33));
+        }
+        out.println();
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/classloader/JVMUtilsUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/classloader/JVMUtilsUnitTest.java
new file mode 100644
index 0000000..96ec5f6
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/classloader/JVMUtilsUnitTest.java
@@ -0,0 +1,75 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.classloader;
+
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+public class JVMUtilsUnitTest {
+
+    // Test classes used by the tests for JVMUtils.getCallingClass():
+    private static class DummyTestClass1 {
+        public static Class getCaller( final Class callee ) {
+            return DummyTestClass2.getCaller(callee);
+        }
+    }
+
+    private static class DummyTestClass2 {
+        public static Class getCaller( final Class callee ) {
+            return DummyTestClass3.getCaller(callee);
+        }
+    }
+
+    private static class DummyTestClass3 {
+        public static Class getCaller( final Class callee ) {
+            return JVMUtils.getCallingClass(callee);
+        }
+    }
+
+    @DataProvider( name = "TestGetCallingClassDataProvider" )
+    public Object[][] getTestCallingClassTestData() {
+        return new Object[][] {
+            { DummyTestClass1.class, JVMUtilsUnitTest.class },
+            { DummyTestClass2.class, DummyTestClass1.class },
+            { DummyTestClass3.class, DummyTestClass2.class }
+        };
+    }
+
+    @Test( dataProvider = "TestGetCallingClassDataProvider" )
+    public void testGetCallingClass( final Class callee, final Class expectedCaller ) {
+        final Class reportedCaller = DummyTestClass1.getCaller(callee);
+
+        Assert.assertEquals(reportedCaller, expectedCaller,
+                            String.format("Wrong calling class returned from DummyTestClass1.getCaller(%s)", callee.getSimpleName()));
+    }
+
+    @Test( expectedExceptions = IllegalArgumentException.class )
+    public void testGetCallingClassCalleeNotFound() {
+        // Trying to get the calling class of a class not on the runtime stack should produce an exception.
+        JVMUtils.getCallingClass(DummyTestClass1.class);
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/clipping/ReadClipperTestUtils.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/clipping/ReadClipperTestUtils.java
new file mode 100644
index 0000000..06b70b7
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/clipping/ReadClipperTestUtils.java
@@ -0,0 +1,162 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.clipping;
+
+import htsjdk.samtools.Cigar;
+import htsjdk.samtools.CigarElement;
+import htsjdk.samtools.CigarOperator;
+import htsjdk.samtools.TextCigarCodec;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.broadinstitute.gatk.utils.sam.CigarUtils;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.testng.Assert;
+
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Stack;
+
+public class ReadClipperTestUtils {
+    //Should contain all the utils needed for tests to mass produce
+    //reads, cigars, and other needed classes
+
+    final static byte [] BASES = {'A', 'C', 'T', 'G'};
+    final static byte [] QUALS = {2, 15, 25, 30};
+    final static String CIGAR = "4M";
+    final static CigarElement[] cigarElements = { new CigarElement(1, CigarOperator.HARD_CLIP),
+                                                  new CigarElement(1, CigarOperator.SOFT_CLIP),
+                                                  new CigarElement(1, CigarOperator.INSERTION),
+                                                  new CigarElement(1, CigarOperator.DELETION),
+                                                  new CigarElement(1, CigarOperator.MATCH_OR_MISMATCH)};
+
+    /**
+     * Make a read fom the CIGAR
+     *
+     * @param cigar the CIGAR
+     * @param lengthChange change in read length relative the CIGAR length
+     * @return artificial read
+     */
+    public static GATKSAMRecord makeReadFromCigar(Cigar cigar, int lengthChange) {
+        int readLength = cigar.getReadLength();
+        if ( readLength >= -lengthChange ) {
+            readLength += lengthChange;
+        }
+
+        return ArtificialSAMUtils.createArtificialRead(Utils.arrayFromArrayWithLength(BASES, readLength), Utils.arrayFromArrayWithLength(QUALS, readLength), cigar.toString());
+    }
+
+    /**
+     * Make a read from the CIGAR string
+     *
+     * @param cigarString string used to create a CIGAR
+     * @param lengthChange change in read length relative the CIGAR length
+     * @return artificial read
+     */
+    public static GATKSAMRecord makeReadFromCigar(String cigarString, int lengthChange) {
+        return makeReadFromCigar(CigarUtils.cigarFromString(cigarString), lengthChange);
+    }
+
+    public static List<Cigar> generateCigarList(int maximumLength) {
+        return generateCigarList(maximumLength, cigarElements);
+    }
+
+        /**
+        * This function generates every valid permutation of cigar strings (with a given set of cigarElement) with a given length.
+        *
+        * A valid cigar object obeys the following rules:
+        *  - No Hard/Soft clips in the middle of the read
+        *  - No deletions in the beginning / end of the read
+        *  - No repeated adjacent element (e.g. 1M2M -> this should be 3M)
+        *  - No consecutive I/D elements
+        *
+        * @param maximumLength the maximum number of elements in the cigar
+        * @return a list with all valid Cigar objects
+        */
+    public static List<Cigar> generateCigarList(int maximumLength, CigarElement[] cigarElements) {
+        int numCigarElements = cigarElements.length;
+        LinkedList<Cigar> cigarList = new LinkedList<Cigar>();
+        byte [] cigarCombination = new byte[maximumLength];
+
+        Utils.fillArrayWithByte(cigarCombination, (byte) 0);               // we start off with all 0's in the combination array.
+        int currentIndex = 0;
+        while (true) {
+            Cigar cigar = createCigarFromCombination(cigarCombination, cigarElements);    // create the cigar
+            cigar = CigarUtils.combineAdjacentCigarElements(cigar);                   // combine adjacent elements
+            if (CigarUtils.isCigarValid(cigar)) {                                     // check if it's valid
+                cigarList.add(cigar);                                      // add it
+            }
+
+            boolean currentIndexChanged = false;
+            while (currentIndex < maximumLength && cigarCombination[currentIndex] == numCigarElements - 1) {
+                currentIndex++;                                            // find the next index to increment
+                currentIndexChanged = true;                                // keep track of the fact that we have changed indices!
+            }
+
+            if (currentIndex == maximumLength)                             // if we hit the end of the array, we're done.
+                break;
+
+            cigarCombination[currentIndex]++;                              // otherwise advance the current index
+
+            if (currentIndexChanged) {                                     // if we have changed index, then...
+                for (int i = 0; i < currentIndex; i++)
+                    cigarCombination[i] = 0;                               // reset everything from 0->currentIndex
+                currentIndex = 0;                                          // go back to the first index
+            }
+        }
+
+        return cigarList;
+    }
+
+    private static Cigar createCigarFromCombination(byte[] cigarCombination, CigarElement[] cigarElements) {
+        Cigar cigar = new Cigar();
+        for (byte i : cigarCombination) {
+            cigar.add(cigarElements[i]);
+        }
+        return cigar;
+    }
+
+    public static GATKSAMRecord makeRead() {
+        return ArtificialSAMUtils.createArtificialRead(BASES, QUALS, CIGAR);
+    }
+
+    /**
+     * Asserts that the two reads have the same bases, qualities and cigar strings
+     *
+     * @param actual the calculated read
+     * @param expected the expected read
+     */
+    public static void assertEqualReads(GATKSAMRecord actual, GATKSAMRecord expected) {
+        // If they're both not empty, test their contents
+        if(!actual.isEmpty() && !expected.isEmpty()) {
+            Assert.assertEquals(actual.getReadBases(), expected.getReadBases());
+            Assert.assertEquals(actual.getBaseQualities(), expected.getBaseQualities());
+            Assert.assertEquals(actual.getCigarString(), expected.getCigarString());
+        }
+        // Otherwise test if they're both empty
+        else
+            Assert.assertEquals(actual.isEmpty(), expected.isEmpty());
+     }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/clipping/ReadClipperUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/clipping/ReadClipperUnitTest.java
new file mode 100644
index 0000000..69abeb2
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/clipping/ReadClipperUnitTest.java
@@ -0,0 +1,421 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.clipping;
+
+import htsjdk.samtools.Cigar;
+import htsjdk.samtools.CigarElement;
+import htsjdk.samtools.CigarOperator;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.sam.CigarUtils;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+
+/**
+ * User: roger
+ * Date: 9/28/11
+ */
+public class ReadClipperUnitTest extends BaseTest {
+    private final static boolean DEBUG = false;
+
+    List<Cigar> cigarList;
+    int maximumCigarSize = 10;                                                                                           // 6 is the minimum necessary number to try all combinations of cigar types with guarantee of clipping an element with length = 2
+
+    @BeforeClass
+    public void init() {
+        cigarList = ReadClipperTestUtils.generateCigarList(maximumCigarSize);
+    }
+
+    @Test(enabled = !DEBUG)
+    public void testHardClipBothEndsByReferenceCoordinates() {
+        for (Cigar cigar : cigarList) {
+            GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar, 0);
+            int alnStart = read.getAlignmentStart();
+            int alnEnd = read.getAlignmentEnd();
+            int readLength = alnStart - alnEnd;
+            for (int i = 0; i < readLength / 2; i++) {
+                GATKSAMRecord clippedRead = ReadClipper.hardClipBothEndsByReferenceCoordinates(read, alnStart + i, alnEnd - i);
+                Assert.assertTrue(clippedRead.getAlignmentStart() >= alnStart + i, String.format("Clipped alignment start is less than original read (minus %d): %s -> %s", i, read.getCigarString(), clippedRead.getCigarString()));
+                Assert.assertTrue(clippedRead.getAlignmentEnd() <= alnEnd + i, String.format("Clipped alignment end is greater than original read (minus %d): %s -> %s", i, read.getCigarString(), clippedRead.getCigarString()));
+                assertUnclippedLimits(read, clippedRead);
+            }
+        }
+    }
+
+    @Test(enabled = !DEBUG)
+    public void testHardClipByReadCoordinates() {
+        for (Cigar cigar : cigarList) {
+            GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar, 0);
+            int readLength = read.getReadLength();
+            for (int i = 0; i < readLength; i++) {
+                GATKSAMRecord clipLeft = ReadClipper.hardClipByReadCoordinates(read, 0, i);
+                Assert.assertTrue(clipLeft.getReadLength() <= readLength - i, String.format("Clipped read length is greater than original read length (minus %d): %s -> %s", i, read.getCigarString(), clipLeft.getCigarString()));
+                assertUnclippedLimits(read, clipLeft);
+
+                GATKSAMRecord clipRight = ReadClipper.hardClipByReadCoordinates(read, i, readLength - 1);
+                Assert.assertTrue(clipRight.getReadLength() <= i, String.format("Clipped read length is greater than original read length (minus %d): %s -> %s", i, read.getCigarString(), clipRight.getCigarString()));
+                assertUnclippedLimits(read, clipRight);
+            }
+        }
+    }
+
+    @DataProvider(name = "ClippedReadLengthData")
+    public Object[][] makeClippedReadLengthData() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        // this functionality can be adapted to provide input data for whatever you might want in your data
+        final int originalReadLength = 50;
+        for ( int nToClip = 1; nToClip < originalReadLength - 1; nToClip++ ) {
+            tests.add(new Object[]{originalReadLength, nToClip});
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "ClippedReadLengthData", enabled = !DEBUG)
+    public void testHardClipReadLengthIsRight(final int originalReadLength, final int nToClip) {
+        GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(originalReadLength + "M", 0);
+        read.getReadLength(); // provoke the caching of the read length
+        final int expectedReadLength = originalReadLength - nToClip;
+        GATKSAMRecord clipped = ReadClipper.hardClipByReadCoordinates(read, 0, nToClip - 1);
+        Assert.assertEquals(clipped.getReadLength(), expectedReadLength,
+                String.format("Clipped read length %d with cigar %s not equal to the expected read length %d after clipping %d bases from the left from a %d bp read with cigar %s",
+                        clipped.getReadLength(), clipped.getCigar(), expectedReadLength, nToClip, read.getReadLength(), read.getCigar()));
+    }
+
+    @Test(enabled = !DEBUG)
+    public void testHardClipByReferenceCoordinates() {
+        for (Cigar cigar : cigarList) {
+            GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar, 0);
+            int start = read.getSoftStart();
+            int stop = read.getSoftEnd();
+
+            for (int i = start; i <= stop; i++) {
+                GATKSAMRecord clipLeft = (new ReadClipper(read)).hardClipByReferenceCoordinates(-1, i);
+                if (!clipLeft.isEmpty()) {
+                    Assert.assertTrue(clipLeft.getAlignmentStart() >= Math.min(read.getAlignmentEnd(), i + 1), String.format("Clipped alignment start (%d) is less the expected (%d): %s -> %s", clipLeft.getAlignmentStart(), i + 1, read.getCigarString(), clipLeft.getCigarString()));
+                    assertUnclippedLimits(read, clipLeft);
+                }
+
+                GATKSAMRecord clipRight = (new ReadClipper(read)).hardClipByReferenceCoordinates(i, -1);
+                if (!clipRight.isEmpty() && clipRight.getAlignmentStart() <= clipRight.getAlignmentEnd()) {             // alnStart > alnEnd if the entire read is a soft clip now. We can't test those.
+                    Assert.assertTrue(clipRight.getAlignmentEnd() <= Math.max(read.getAlignmentStart(), i - 1), String.format("Clipped alignment end (%d) is greater than expected (%d): %s -> %s", clipRight.getAlignmentEnd(), i - 1, read.getCigarString(), clipRight.getCigarString()));
+                    assertUnclippedLimits(read, clipRight);
+                }
+            }
+        }
+    }
+
+    @Test(enabled = !DEBUG)
+    public void testHardClipByReferenceCoordinatesLeftTail() {
+        for (Cigar cigar : cigarList) {
+            GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar, 0);
+            int alnStart = read.getAlignmentStart();
+            int alnEnd = read.getAlignmentEnd();
+            if (read.getSoftStart() == alnStart) {                                                                      // we can't test left clipping if the read has hanging soft clips on the left side
+                for (int i = alnStart; i <= alnEnd; i++) {
+                    GATKSAMRecord clipLeft = ReadClipper.hardClipByReferenceCoordinatesLeftTail(read, i);
+
+                    if (!clipLeft.isEmpty()) {
+                        Assert.assertTrue(clipLeft.getAlignmentStart() >= i + 1, String.format("Clipped alignment start (%d) is less the expected (%d): %s -> %s", clipLeft.getAlignmentStart(), i + 1, read.getCigarString(), clipLeft.getCigarString()));
+                        assertUnclippedLimits(read, clipLeft);
+                    }
+                }
+            }
+        }
+    }
+
+    @Test(enabled = !DEBUG)
+    public void testHardClipByReferenceCoordinatesRightTail() {
+        for (Cigar cigar : cigarList) {
+            GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar, 0);
+            int alnStart = read.getAlignmentStart();
+            int alnEnd = read.getAlignmentEnd();
+            if (read.getSoftEnd() == alnEnd) {                                                                          // we can't test right clipping if the read has hanging soft clips on the right side
+                for (int i = alnStart; i <= alnEnd; i++) {
+                    GATKSAMRecord clipRight = ReadClipper.hardClipByReferenceCoordinatesRightTail(read, i);
+                    if (!clipRight.isEmpty() && clipRight.getAlignmentStart() <= clipRight.getAlignmentEnd()) {         // alnStart > alnEnd if the entire read is a soft clip now. We can't test those.
+                        Assert.assertTrue(clipRight.getAlignmentEnd() <= i - 1, String.format("Clipped alignment end (%d) is greater than expected (%d): %s -> %s", clipRight.getAlignmentEnd(), i - 1, read.getCigarString(), clipRight.getCigarString()));
+                        assertUnclippedLimits(read, clipRight);
+                    }
+                }
+            }
+        }
+    }
+
+    @Test(enabled = !DEBUG)
+    public void testHardClipLowQualEnds() {
+        final byte LOW_QUAL = 2;
+        final byte HIGH_QUAL = 30;
+
+        /** create a read for every cigar permutation */
+        for (Cigar cigar : cigarList) {
+            GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar, 0);
+            int readLength = read.getReadLength();
+            byte[] quals = new byte[readLength];
+
+            for (int nLowQualBases = 0; nLowQualBases < readLength; nLowQualBases++) {
+
+                /**  create a read with nLowQualBases in the left tail */
+                Utils.fillArrayWithByte(quals, HIGH_QUAL);
+                for (int addLeft = 0; addLeft < nLowQualBases; addLeft++)
+                    quals[addLeft] = LOW_QUAL;
+                read.setBaseQualities(quals);
+                GATKSAMRecord clipLeft = ReadClipper.hardClipLowQualEnds(read, LOW_QUAL);
+                checkClippedReadsForLowQualEnds(read, clipLeft, LOW_QUAL, nLowQualBases);
+
+                /** create a read with nLowQualBases in the right tail */
+                Utils.fillArrayWithByte(quals, HIGH_QUAL);
+                for (int addRight = 0; addRight < nLowQualBases; addRight++)
+                    quals[readLength - addRight - 1] = LOW_QUAL;
+                read.setBaseQualities(quals);
+                GATKSAMRecord clipRight = ReadClipper.hardClipLowQualEnds(read, LOW_QUAL);
+                checkClippedReadsForLowQualEnds(read, clipRight, LOW_QUAL, nLowQualBases);
+
+                /** create a read with nLowQualBases on both tails */
+                if (nLowQualBases <= readLength / 2) {
+                    Utils.fillArrayWithByte(quals, HIGH_QUAL);
+                    for (int addBoth = 0; addBoth < nLowQualBases; addBoth++) {
+                        quals[addBoth] = LOW_QUAL;
+                        quals[readLength - addBoth - 1] = LOW_QUAL;
+                    }
+                    read.setBaseQualities(quals);
+                    GATKSAMRecord clipBoth = ReadClipper.hardClipLowQualEnds(read, LOW_QUAL);
+                    checkClippedReadsForLowQualEnds(read, clipBoth, LOW_QUAL, 2*nLowQualBases);
+                }
+            }
+        }
+    }
+
+    @Test(enabled = !DEBUG)
+    public void testHardClipSoftClippedBases() {
+        for (Cigar cigar : cigarList) {
+            GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar, 0);
+            GATKSAMRecord clippedRead = ReadClipper.hardClipSoftClippedBases(read);
+            CigarCounter original = new CigarCounter(read);
+            CigarCounter clipped = new CigarCounter(clippedRead);
+
+            assertUnclippedLimits(read, clippedRead);                                                                   // Make sure limits haven't changed
+            original.assertHardClippingSoftClips(clipped);                                                              // Make sure we have only clipped SOFT_CLIPS
+        }
+    }
+
+    @Test(enabled = false)
+    public void testHardClipLeadingInsertions() {
+        for (Cigar cigar : cigarList) {
+            if (startsWithInsertion(cigar)) {
+                GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar, 0);
+                GATKSAMRecord clippedRead = ReadClipper.hardClipLeadingInsertions(read);
+
+                assertUnclippedLimits(read, clippedRead);        // Make sure limits haven't changed
+
+                int expectedLength = read.getReadLength() - leadingCigarElementLength(read.getCigar(), CigarOperator.INSERTION);
+                if (cigarHasElementsDifferentThanInsertionsAndHardClips(read.getCigar()))
+                    expectedLength -= leadingCigarElementLength(CigarUtils.invertCigar(read.getCigar()), CigarOperator.INSERTION);
+
+                if (!clippedRead.isEmpty()) {
+                    Assert.assertEquals(expectedLength, clippedRead.getReadLength(), String.format("%s -> %s", read.getCigarString(), clippedRead.getCigarString()));  // check that everything else is still there
+                    Assert.assertFalse(startsWithInsertion(clippedRead.getCigar()));                                                                                   // check that the insertions are gone
+                } else
+                    Assert.assertTrue(expectedLength == 0, String.format("expected length: %d", expectedLength));                                                      // check that the read was expected to be fully clipped
+            }
+        }
+    }
+
+    @Test(enabled = !DEBUG)
+    public void testRevertSoftClippedBases() {
+        for (Cigar cigar : cigarList) {
+            final int leadingSoftClips = leadingCigarElementLength(cigar, CigarOperator.SOFT_CLIP);
+            final int tailSoftClips = leadingCigarElementLength(CigarUtils.invertCigar(cigar), CigarOperator.SOFT_CLIP);
+
+            final GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar, 0);
+            final GATKSAMRecord unclipped = ReadClipper.revertSoftClippedBases(read);
+
+            assertUnclippedLimits(read, unclipped);                                                                     // Make sure limits haven't changed
+
+            if (leadingSoftClips > 0 || tailSoftClips > 0) {
+                final int expectedStart = read.getAlignmentStart() - leadingSoftClips;
+                final int expectedEnd = read.getAlignmentEnd() + tailSoftClips;
+
+                Assert.assertEquals(unclipped.getAlignmentStart(), expectedStart);
+                Assert.assertEquals(unclipped.getAlignmentEnd(), expectedEnd);
+            } else
+                Assert.assertEquals(read.getCigarString(), unclipped.getCigarString());
+        }
+    }
+
+    @Test(enabled = !DEBUG)
+    public void testRevertSoftClippedBasesWithThreshold() {
+        for (Cigar cigar : cigarList) {
+            final int leadingSoftClips = leadingCigarElementLength(cigar, CigarOperator.SOFT_CLIP);
+            final int tailSoftClips = leadingCigarElementLength(CigarUtils.invertCigar(cigar), CigarOperator.SOFT_CLIP);
+
+            final GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar, 0);
+            final GATKSAMRecord unclipped = ReadClipper.revertSoftClippedBases(read);
+
+            assertUnclippedLimits(read, unclipped);                                                                     // Make sure limits haven't changed
+            Assert.assertNull(read.getCigar().isValid(null, -1));
+            Assert.assertNull(unclipped.getCigar().isValid(null, -1));
+
+            if (!(leadingSoftClips > 0 || tailSoftClips > 0))
+                Assert.assertEquals(read.getCigarString(), unclipped.getCigarString());
+
+        }
+    }
+
+    @DataProvider(name = "RevertSoftClipsBeforeContig")
+    public Object[][] makeRevertSoftClipsBeforeContig() {
+        List<Object[]> tests = new ArrayList<>();
+
+        // this functionality can be adapted to provide input data for whatever you might want in your data
+        for ( int softStart : Arrays.asList(-10, -1, 0) ) {
+            for ( int alignmentStart : Arrays.asList(1, 10) ) {
+                tests.add(new Object[]{softStart, alignmentStart});
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = true, dataProvider = "RevertSoftClipsBeforeContig")
+    public void testRevertSoftClippedBasesBeforeStartOfContig(final int softStart, final int alignmentStart) {
+        final int nMatches = 10;
+        final int nSoft = -1 * (softStart - alignmentStart);
+        final String cigar = nSoft + "S" + nMatches + "M";
+        final GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar, 0);
+        read.setAlignmentStart(alignmentStart);
+
+        Assert.assertEquals(read.getSoftStart(), softStart);
+        Assert.assertEquals(read.getAlignmentStart(), alignmentStart);
+        Assert.assertEquals(read.getCigarString(), cigar);
+
+        final GATKSAMRecord reverted = ReadClipper.revertSoftClippedBases(read);
+
+        final int expectedAlignmentStart = 1;
+        final String expectedCigar = (1 - softStart) + "H" + read.getAlignmentEnd() + "M";
+        Assert.assertEquals(reverted.getSoftStart(), expectedAlignmentStart);
+        Assert.assertEquals(reverted.getAlignmentStart(), expectedAlignmentStart);
+        Assert.assertEquals(reverted.getCigarString(), expectedCigar);
+    }
+
+    private void assertNoLowQualBases(GATKSAMRecord read, byte low_qual) {
+        if (!read.isEmpty()) {
+            byte[] quals = read.getBaseQualities();
+            for (int i = 0; i < quals.length; i++)
+                Assert.assertFalse(quals[i] <= low_qual, String.format("Found low qual (%d) base after hard clipping. Position: %d -- %s", low_qual, i, read.getCigarString()));
+        }
+    }
+
+    private void checkClippedReadsForLowQualEnds(GATKSAMRecord read, GATKSAMRecord clippedRead, byte lowQual, int nLowQualBases) {
+        assertUnclippedLimits(read, clippedRead);                                                                       // Make sure limits haven't changed
+        assertNoLowQualBases(clippedRead, lowQual);                                                                     // Make sure the low qualities are gone
+    }
+
+    /**
+     * Asserts that clipping doesn't change the getUnclippedStart / getUnclippedEnd
+     *
+     * @param original original read
+     * @param clipped clipped read
+     */
+    private void assertUnclippedLimits(GATKSAMRecord original, GATKSAMRecord clipped) {
+        if (CigarUtils.readHasNonClippedBases(clipped)) {
+            Assert.assertEquals(original.getUnclippedStart(), clipped.getUnclippedStart());
+            Assert.assertEquals(original.getUnclippedEnd(), clipped.getUnclippedEnd());
+        }
+    }
+
+    private boolean startsWithInsertion(Cigar cigar) {
+        return leadingCigarElementLength(cigar, CigarOperator.INSERTION) > 0;
+    }
+
+    private int leadingCigarElementLength(Cigar cigar, CigarOperator operator) {
+        for (CigarElement cigarElement : cigar.getCigarElements()) {
+            if (cigarElement.getOperator() == operator)
+                return cigarElement.getLength();
+            if (cigarElement.getOperator() != CigarOperator.HARD_CLIP)
+                break;
+        }
+        return 0;
+    }
+
+    private boolean cigarHasElementsDifferentThanInsertionsAndHardClips(Cigar cigar) {
+        for (CigarElement cigarElement : cigar.getCigarElements())
+            if (cigarElement.getOperator() != CigarOperator.INSERTION && cigarElement.getOperator() != CigarOperator.HARD_CLIP)
+                return true;
+        return false;
+    }
+
+    private class CigarCounter {
+        private HashMap<CigarOperator, Integer> counter;
+
+        public Integer getCounterForOp(CigarOperator operator) {
+            return counter.get(operator);
+        }
+
+        public CigarCounter(GATKSAMRecord read) {
+            CigarOperator[] operators = CigarOperator.values();
+            counter = new HashMap<CigarOperator, Integer>(operators.length);
+
+            for (CigarOperator op : operators)
+                counter.put(op, 0);
+
+            for (CigarElement cigarElement : read.getCigar().getCigarElements())
+                counter.put(cigarElement.getOperator(), counter.get(cigarElement.getOperator()) + cigarElement.getLength());
+        }
+
+        public boolean assertHardClippingSoftClips(CigarCounter clipped) {
+            for (CigarOperator op : counter.keySet()) {
+                if (op == CigarOperator.HARD_CLIP || op == CigarOperator.SOFT_CLIP) {
+                    int counterTotal = counter.get(CigarOperator.HARD_CLIP) + counter.get(CigarOperator.SOFT_CLIP);
+                    int clippedHard = clipped.getCounterForOp(CigarOperator.HARD_CLIP);
+                    int clippedSoft = clipped.getCounterForOp(CigarOperator.SOFT_CLIP);
+
+                    Assert.assertEquals(counterTotal, clippedHard);
+                    Assert.assertTrue(clippedSoft == 0);
+                } else
+                    Assert.assertEquals(counter.get(op), clipped.getCounterForOp(op));
+            }
+            return true;
+        }
+
+    }
+
+    @Test(enabled = !DEBUG)
+    public void testRevertEntirelySoftclippedReads() {
+        GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar("2H1S3H", 0);
+        GATKSAMRecord clippedRead = ReadClipper.revertSoftClippedBases(read);
+        Assert.assertEquals(clippedRead.getAlignmentStart(), read.getSoftStart());
+    }
+
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/codecs/beagle/BeagleCodecUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/codecs/beagle/BeagleCodecUnitTest.java
new file mode 100644
index 0000000..05632d6
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/codecs/beagle/BeagleCodecUnitTest.java
@@ -0,0 +1,42 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.codecs.beagle;
+
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+public class BeagleCodecUnitTest {
+
+    @Test
+    public void testCanDecode() {
+        final String EXTRA_CHAR = "1";
+        BeagleCodec codec = new BeagleCodec();
+        Assert.assertTrue(codec.canDecode("filename." + BeagleCodec.FILE_EXT));
+        Assert.assertTrue(codec.canDecode("filename" + EXTRA_CHAR + "." + BeagleCodec.FILE_EXT));
+        Assert.assertFalse(codec.canDecode("filename." + BeagleCodec.FILE_EXT + EXTRA_CHAR));
+        Assert.assertFalse(codec.canDecode("filename" + BeagleCodec.FILE_EXT));
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/codecs/hapmap/HapMapUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/codecs/hapmap/HapMapUnitTest.java
new file mode 100644
index 0000000..cf6bc20
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/codecs/hapmap/HapMapUnitTest.java
@@ -0,0 +1,174 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.codecs.hapmap;
+
+import htsjdk.tribble.annotation.Strand;
+import htsjdk.tribble.readers.LineIterator;
+import htsjdk.tribble.readers.LineIteratorImpl;
+import htsjdk.tribble.readers.LineReaderUtil;
+import htsjdk.tribble.readers.PositionalBufferedStream;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+
+/**
+ * Unit tests for the HapMap codec
+ */
+public class HapMapUnitTest extends BaseTest {
+    // our sample hapmap file
+    private final static File hapMapFile = new File(privateTestDir + "genotypes_chr1_ASW_phase3.3_first500.hapmap");
+    private final static String knownLine = "rs2185539 C/T chr1 556738 + ncbi_b36 bbs urn:lsid:bbs.hapmap.org:Protocol:Phase3.r3:1 urn:lsid:bbs.hapmap.org:Assay:Phase3.r3_r" +
+            "s2185539:1 urn:lsid:dcc.hapmap.org:Panel:US_African-30-trios:4 QC+ CC TC TT CT CC CC CC CC CC CC CC CC CC";
+    /**
+     * test reading the header off of the file.  We take in the file, read off the first line,
+     * close the reader, and then ask the HapMap decoder for the header with a new reader.  These should
+     * be equal (i.e. they return the same object).
+     */
+    @Test
+    public void testReadHeader() {
+        RawHapMapCodec codec = new RawHapMapCodec();
+        final LineIterator reader = getLineIterator();
+        try {
+            String header = reader.next();
+            Assert.assertTrue(header.equals(codec.readActualHeader(getLineIterator())));
+        } finally {
+            codec.close(reader);
+        }
+    }
+
+    @Test
+    public void testKnownRecordConversion() {
+        RawHapMapCodec codec = new RawHapMapCodec();
+        RawHapMapFeature feature = (RawHapMapFeature)codec.decode(knownLine);
+
+
+        // check that the alleles are right
+        Assert.assertEquals(feature.getAlleles().length,2);
+        Assert.assertTrue("C".equals(feature.getAlleles()[0]));
+        Assert.assertTrue("T".equals(feature.getAlleles()[1]));
+
+        // check the name
+        Assert.assertTrue("rs2185539".equals(feature.getName()));
+
+        // check the position
+        Assert.assertEquals(feature.getStart(),556738);
+        Assert.assertEquals(feature.getEnd(),556738);
+
+        // check the contig
+        Assert.assertTrue("chr1".equals(feature.getChr()));
+                
+        // check the assembly, center, protLSID, assayLSID, panelLSID, and qccode
+        Assert.assertTrue("ncbi_b36".equals(feature.getAssembly()));
+        Assert.assertTrue("bbs".equals(feature.getCenter()));
+        Assert.assertTrue("urn:lsid:bbs.hapmap.org:Protocol:Phase3.r3:1".equals(feature.getProtLSID()));
+        Assert.assertTrue("urn:lsid:bbs.hapmap.org:Assay:Phase3.r3_rs2185539:1".equals(feature.getAssayLSID()));
+        Assert.assertTrue("urn:lsid:dcc.hapmap.org:Panel:US_African-30-trios:4".equals(feature.getPanelLSID()));
+        Assert.assertTrue("QC+".equals(feature.getQCCode()));
+
+        // check the strand
+        Assert.assertEquals(feature.getStrand(),Strand.POSITIVE);
+
+        // check the genotypes
+        int x = 0;
+        for (; x < feature.getGenotypes().length; x++) {
+            switch (x) {
+                case 1: Assert.assertTrue("TC".equals(feature.getGenotypes()[x])); break;
+                case 2: Assert.assertTrue("TT".equals(feature.getGenotypes()[x])); break;
+                case 3: Assert.assertTrue("CT".equals(feature.getGenotypes()[x])); break;
+                default: Assert.assertTrue("CC".equals(feature.getGenotypes()[x])); break;
+            }
+        }
+        // assert that we found the correct number of records
+        Assert.assertEquals(x,13);
+    }
+
+    @Test
+    public void testReadCorrectNumberOfRecords() {
+        // setup the record for reading our 500 line file (499 records, 1 header line)
+        RawHapMapCodec codec = new RawHapMapCodec();
+        final LineIterator reader = getLineIterator();
+
+        int count = 0;
+        try {
+            codec.readHeader(reader);
+            while (reader.hasNext()) {
+                codec.decode(reader.next());
+                ++count;
+            }
+        } catch (IOException e) {
+            Assert.fail("IOException " + e.getMessage());
+        } finally {
+            codec.close(reader);
+        }
+        Assert.assertEquals(count,499);
+    }
+
+    @Test
+    public void testGetSampleNames() {
+        // setup the record for reading our 500 line file (499 records, 1 header line)
+        RawHapMapCodec codec = new RawHapMapCodec();
+        final LineIterator reader = getLineIterator();
+
+        String line;
+        try {
+            codec.readHeader(reader);
+            line = reader.next();
+            RawHapMapFeature feature = (RawHapMapFeature) codec.decode(line);
+            Assert.assertEquals(feature.getSampleIDs().length, 87);
+
+        } catch (IOException e) {
+            Assert.fail("IOException " + e.getMessage());
+        } finally {
+            codec.close(reader);
+        }
+    }
+
+    @Test
+    public void testCanDecode() {
+        final String EXTRA_CHAR = "1";
+        RawHapMapCodec codec = new RawHapMapCodec();
+        Assert.assertTrue(codec.canDecode("filename." + RawHapMapCodec.FILE_EXT));
+        Assert.assertTrue(codec.canDecode("filename" + EXTRA_CHAR + "." + RawHapMapCodec.FILE_EXT));
+        Assert.assertFalse(codec.canDecode("filename." + RawHapMapCodec.FILE_EXT + EXTRA_CHAR));
+        Assert.assertFalse(codec.canDecode("filename" + RawHapMapCodec.FILE_EXT));
+    }
+
+
+    public LineIterator getLineIterator() {
+        try {
+            return new LineIteratorImpl(LineReaderUtil.fromBufferedStream(new PositionalBufferedStream(new FileInputStream(hapMapFile))));
+        } catch (FileNotFoundException e) {
+            Assert.fail("Unable to open hapmap file : " + hapMapFile);
+        }
+        return null; // for intellij, it doesn't know that assert.fail is fatal
+    }
+
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/codecs/refseq/RefSeqCodecUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/codecs/refseq/RefSeqCodecUnitTest.java
new file mode 100644
index 0000000..1f1f6b9
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/codecs/refseq/RefSeqCodecUnitTest.java
@@ -0,0 +1,42 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.codecs.refseq;
+
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+public class RefSeqCodecUnitTest {
+
+    @Test
+    public void testCanDecode() {
+        final String EXTRA_CHAR = "1";
+        RefSeqCodec codec = new RefSeqCodec();
+        Assert.assertTrue(codec.canDecode("filename." + RefSeqCodec.FILE_EXT));
+        Assert.assertTrue(codec.canDecode("filename" + EXTRA_CHAR + "." + RefSeqCodec.FILE_EXT));
+        Assert.assertFalse(codec.canDecode("filename." + RefSeqCodec.FILE_EXT + EXTRA_CHAR));
+        Assert.assertFalse(codec.canDecode("filename" + RefSeqCodec.FILE_EXT));
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/codecs/sampileup/SAMPileupCodecUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/codecs/sampileup/SAMPileupCodecUnitTest.java
new file mode 100644
index 0000000..301d671
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/codecs/sampileup/SAMPileupCodecUnitTest.java
@@ -0,0 +1,42 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.codecs.sampileup;
+
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+public class SAMPileupCodecUnitTest {
+
+    @Test
+    public void testCanDecode() {
+        final String EXTRA_CHAR = "1";
+        SAMPileupCodec codec = new SAMPileupCodec();
+        Assert.assertTrue(codec.canDecode("filename." + SAMPileupCodec.FILE_EXT));
+        Assert.assertTrue(codec.canDecode("filename" + EXTRA_CHAR + "." + SAMPileupCodec.FILE_EXT));
+        Assert.assertFalse(codec.canDecode("filename." + SAMPileupCodec.FILE_EXT + "1"));
+        Assert.assertFalse(codec.canDecode("filename" + SAMPileupCodec.FILE_EXT));
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/codecs/samread/SAMReadCodecUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/codecs/samread/SAMReadCodecUnitTest.java
new file mode 100644
index 0000000..f5c0f5f
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/codecs/samread/SAMReadCodecUnitTest.java
@@ -0,0 +1,42 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.codecs.samread;
+
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+public class SAMReadCodecUnitTest {
+
+    @Test
+    public void testCanDecode() {
+        final String EXTRA_CHAR = "1";
+        SAMReadCodec codec = new SAMReadCodec();
+        Assert.assertTrue(codec.canDecode("filename." + SAMReadCodec.FILE_EXT));
+        Assert.assertTrue(codec.canDecode("filename" + EXTRA_CHAR + "." + SAMReadCodec.FILE_EXT));
+        Assert.assertFalse(codec.canDecode("filename." + SAMReadCodec.FILE_EXT + "1"));
+        Assert.assertFalse(codec.canDecode("filename" + SAMReadCodec.FILE_EXT));
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/codecs/table/TableCodecUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/codecs/table/TableCodecUnitTest.java
new file mode 100644
index 0000000..33ec130
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/codecs/table/TableCodecUnitTest.java
@@ -0,0 +1,42 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.codecs.table;
+
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+public class TableCodecUnitTest {
+
+    @Test
+    public void testCanDecode() {
+        final String EXTRA_CHAR = "1";
+        TableCodec codec = new TableCodec();
+        Assert.assertTrue(codec.canDecode("filename." + TableCodec.FILE_EXT));
+        Assert.assertTrue(codec.canDecode("filename" + EXTRA_CHAR + "." + TableCodec.FILE_EXT));
+        Assert.assertFalse(codec.canDecode("filename." + TableCodec.FILE_EXT + EXTRA_CHAR));
+        Assert.assertFalse(codec.canDecode("filename" + TableCodec.FILE_EXT));
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/collections/DefaultHashMapUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/collections/DefaultHashMapUnitTest.java
new file mode 100755
index 0000000..78865be
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/collections/DefaultHashMapUnitTest.java
@@ -0,0 +1,159 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.collections;
+
+
+// the imports for unit testing.
+
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.testng.Assert;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.Test;
+
+
+/**
+ * Basic unit test for DefaultHashMap
+ */
+public class DefaultHashMapUnitTest extends BaseTest {
+    DefaultHashMap<String, Double> empty, hasOne, hasTen;
+    Double initialDefault = 10.0;
+
+    @BeforeMethod
+    public void before() {
+        empty = new DefaultHashMap<String, Double>(initialDefault);
+
+        hasOne = new DefaultHashMap<String, Double>(initialDefault);
+        hasOne.put("1", .1);
+
+        hasTen = new DefaultHashMap<String, Double>(initialDefault);
+        for (Integer i = 1; i <= 10; i++) {
+            hasTen.put(i.toString(), i.doubleValue() / 10);
+        }
+    }
+
+    @Test
+    public void testBasicSizes() {
+        logger.warn("Executing testBasicSizes");
+
+        Assert.assertEquals(0, empty.size());
+        Assert.assertEquals(1, hasOne.size());
+        Assert.assertEquals(10, hasTen.size());
+    }
+
+    @Test
+    public void testTenElements() {
+        logger.warn("Executing testTenElements");
+
+        for (Integer i = 1; i <= 10; i++) {
+            Assert.assertEquals(i.doubleValue() / 10, hasTen.get(i.toString()));
+        }
+        Assert.assertEquals(initialDefault, hasTen.get("0"));
+    }
+
+    @Test
+    public void testClear() {
+        logger.warn("Executing testClear");
+
+        empty.clear();
+        hasOne.clear();
+        hasTen.clear();
+
+        Assert.assertEquals(0, empty.size());
+        Assert.assertEquals(0, hasOne.size());
+        Assert.assertEquals(0, hasTen.size());
+    }
+
+
+    @Test
+    public void testSettingTenElements() {
+        logger.warn("Executing testSettingTenElements");
+
+        Assert.assertEquals(10, hasTen.size());
+        for (Integer i = 1; i <= 10; i++) {
+            hasTen.put(i.toString(), i.doubleValue());
+        }
+
+        Assert.assertEquals(10, hasTen.size());
+        for (Integer i = 1; i <= 10; i++) {
+            Assert.assertEquals(i.doubleValue(), hasTen.get(i.toString()));
+        }
+    }
+
+    @Test
+    public void testSettingDefault() {
+        logger.warn("Executing testSettingDefault");
+
+        Assert.assertEquals(initialDefault, empty.get("0"));
+        Assert.assertEquals(initialDefault, hasOne.get("0"));
+        Assert.assertEquals(initialDefault, hasTen.get("0"));
+
+        empty.setDefaultValue(2 * initialDefault);
+        hasOne.setDefaultValue(2 * initialDefault);
+        hasTen.setDefaultValue(2 * initialDefault);
+
+        Assert.assertEquals(2 * initialDefault, empty.get("0"));
+        Assert.assertEquals(2 * initialDefault, hasOne.get("0"));
+        Assert.assertEquals(2 * initialDefault, hasTen.get("0"));
+
+    }
+
+    @Test
+    public void testAdd() {
+        logger.warn("Executing testAdd");
+
+        Assert.assertEquals(0, empty.size());
+
+        Double x = 1.0;
+        empty.put(x.toString(), x / 10);
+        Assert.assertEquals(1, empty.size());
+        Assert.assertEquals(.1, empty.get(x.toString()));
+
+        x = 2.0;
+        empty.put(x.toString(), x / 10);
+        Assert.assertEquals(2, empty.size());
+        Assert.assertEquals(.2, empty.get(x.toString()));
+
+    }
+
+    @Test
+    public void testUnset() {
+        logger.warn("Executing testUnset1");
+
+        Assert.assertEquals(10, hasTen.size());
+        Assert.assertEquals(.9, hasTen.get("9"));
+
+        hasTen.remove("9");
+
+        Assert.assertEquals(9, hasTen.size());
+        Assert.assertEquals(initialDefault, hasTen.get("9"));
+
+        hasTen.remove("1");
+
+        Assert.assertEquals(8, hasTen.size());
+        Assert.assertEquals(initialDefault, hasTen.get("1"));
+
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/collections/ExpandingArrayListUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/collections/ExpandingArrayListUnitTest.java
new file mode 100644
index 0000000..78b19d6
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/collections/ExpandingArrayListUnitTest.java
@@ -0,0 +1,177 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.collections;
+
+
+// the imports for unit testing.
+
+
+import org.testng.Assert;
+import org.testng.annotations.Test;
+import org.testng.annotations.BeforeMethod;
+
+import org.broadinstitute.gatk.utils.BaseTest;
+
+import java.util.Arrays;
+
+/**
+ * Basic unit test for RecalData
+ */
+public class ExpandingArrayListUnitTest extends BaseTest {
+    ExpandingArrayList<Integer> empty, initCap10, hasOne, hasTen;
+
+    @BeforeMethod
+    public void before() {
+        empty = new ExpandingArrayList<Integer>();
+
+        initCap10 = new ExpandingArrayList<Integer>(10);
+
+        hasOne = new ExpandingArrayList<Integer>();
+        hasOne.add(1);
+
+        hasTen = new ExpandingArrayList<Integer>();
+        hasTen.addAll(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10));
+    }
+
+    @Test
+    public void testBasicSizes() {
+        logger.warn("Executing testBasicSizes");
+
+        Assert.assertEquals(0, empty.size());
+        Assert.assertEquals(0, initCap10.size());
+        Assert.assertEquals(1, hasOne.size());
+        Assert.assertEquals(10, hasTen.size());
+    }
+
+    @Test
+    public void testTenElements() {
+        logger.warn("Executing testTenElements");
+
+        for ( int i = 0; i < 10; i++ ) {
+            Assert.assertEquals(i+1, (int)hasTen.get(i));
+        }
+    }
+
+    @Test
+    public void testSettingTenElements() {
+        logger.warn("Executing testSettingTenElements");
+
+        for ( int i = 0; i < 10; i++ ) {
+            Assert.assertEquals(i+1, (int)hasTen.set(i, 2*i));
+        }
+
+        Assert.assertEquals(10, hasTen.size());
+        for ( int i = 0; i < 10; i++ ) {
+            Assert.assertEquals(2*i, (int)hasTen.get(i));
+        }
+    }
+
+    @Test
+    public void testAdd() {
+        logger.warn("Executing testAdd");
+
+        Assert.assertEquals(0, empty.size());
+        empty.add(1);
+        Assert.assertEquals(1, empty.size());
+        Assert.assertEquals(1, (int)empty.get(0));
+        empty.add(2);
+        Assert.assertEquals(2, empty.size());
+        Assert.assertEquals(2, (int)empty.get(1));
+    }
+
+    @Test
+    public void testSet1() {
+        logger.warn("Executing testSet1");
+
+        Assert.assertEquals(0, empty.size());
+        empty.set(0, 1);
+        Assert.assertEquals(1, empty.size());
+        Assert.assertEquals(1, (int)empty.get(0));
+
+        empty.set(1, 2);
+        Assert.assertEquals(2, empty.size());
+        Assert.assertEquals(2, (int)empty.get(1));
+
+        // doesn't expand
+        empty.set(0, 3);
+        Assert.assertEquals(2, empty.size());
+        Assert.assertEquals(3, (int)empty.get(0));
+    }
+
+    @Test
+    public void testSetExpanding() {
+        logger.warn("Executing testSetExpanding");
+
+        Assert.assertEquals(0, empty.size());
+        empty.set(3, 1);
+        Assert.assertEquals(4, empty.size());
+        Assert.assertEquals(empty.get(0), null);
+        Assert.assertEquals(empty.get(1), null);
+        Assert.assertEquals(empty.get(2), null);
+        Assert.assertEquals(1, (int)empty.get(3));
+    }
+
+    @Test
+    public void testSetExpandingReset() {
+        logger.warn("Executing testSetExpandingReset");
+
+        Assert.assertEquals(0, empty.size());
+        empty.set(3, 3);
+        empty.set(2, 2);
+        empty.set(1, 1);
+        empty.set(0, 0);
+        Assert.assertEquals(4, empty.size());
+        for ( int i = 0; i < 4; i++ )
+            Assert.assertEquals(i, (int)empty.get(i));
+    }
+
+    @Test
+    public void testSetExpandingBig() {
+        logger.warn("Executing testSetExpandingBig");
+
+        Assert.assertEquals(0, empty.size());
+        empty.set(1000, 1000);
+        Assert.assertEquals(1001, empty.size());
+        for ( int i = 0; i < 1000; i++ )
+            Assert.assertEquals(empty.get(i), null);
+        Assert.assertEquals(1000, (int)empty.get(1000));
+    }
+
+    @Test (expectedExceptions=IndexOutOfBoundsException.class )
+    public void testSetBadGetNegative() {
+        logger.warn("Executing testSetBadGetNegative");
+        empty.get(-1);
+    }
+
+    @Test
+    public void testSetBadGetPost() {
+        logger.warn("Executing testSetBadGetPost");
+        empty.set(1, 1);
+        Assert.assertEquals(empty.get(0), null);
+        Assert.assertEquals(1, (int)empty.get(1));
+        Assert.assertEquals(empty.get(2), null);
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchSiteUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchSiteUnitTest.java
new file mode 100644
index 0000000..32cec45
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchSiteUnitTest.java
@@ -0,0 +1,80 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.File;
+
+public class ArgumentMatchSiteUnitTest {
+    @Test
+    public void testCommandLine() {
+        ArgumentMatchSite site = new ArgumentMatchSite(ArgumentMatchSource.COMMAND_LINE, 1);
+        Assert.assertEquals(site.getSource(), ArgumentMatchSource.COMMAND_LINE);
+        Assert.assertEquals(site.getIndex(), 1);
+    }
+
+    @Test
+    public void testFile() {
+        ArgumentMatchSource source = new ArgumentMatchFileSource(new File("test"));
+        ArgumentMatchSite site = new ArgumentMatchSite(source, 1);
+        Assert.assertEquals(site.getSource(), source);
+        Assert.assertEquals(site.getIndex(), 1);
+    }
+
+    @Test
+    public void testEquals() {
+        ArgumentMatchSource cmdLine = ArgumentMatchSource.COMMAND_LINE;
+        ArgumentMatchSite site1 = new ArgumentMatchSite(cmdLine, 1);
+        ArgumentMatchSite site2 = new ArgumentMatchSite(cmdLine, 2);
+
+        Assert.assertFalse(site1.equals(null));
+
+        Assert.assertTrue(site1.equals(site1));
+        Assert.assertFalse(site1.equals(site2));
+
+        Assert.assertFalse(site2.equals(site1));
+        Assert.assertTrue(site2.equals(site2));
+    }
+
+    @Test
+    public void testCompareTo() {
+        ArgumentMatchSource cmdLine = ArgumentMatchSource.COMMAND_LINE;
+        ArgumentMatchSite site1 = new ArgumentMatchSite(cmdLine, 1);
+        ArgumentMatchSite site2 = new ArgumentMatchSite(cmdLine, 2);
+
+        Assert.assertTrue(site1.compareTo(site1) == 0);
+        Assert.assertTrue(site1.compareTo(site2) < 0);
+        Assert.assertTrue(site2.compareTo(site1) > 0);
+        Assert.assertTrue(site2.compareTo(site2) == 0);
+    }
+
+    @Test(expectedExceptions = NullPointerException.class)
+    public void testCompareToNull() {
+        new ArgumentMatchSite(ArgumentMatchSource.COMMAND_LINE, 0).compareTo(null);
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchSourceUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchSourceUnitTest.java
new file mode 100644
index 0000000..418bfe1
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/commandline/ArgumentMatchSourceUnitTest.java
@@ -0,0 +1,99 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.File;
+
+public class ArgumentMatchSourceUnitTest extends BaseTest {
+    @Test
+    public void testCommandLine() {
+        ArgumentMatchSource source = ArgumentMatchSource.COMMAND_LINE;
+        Assert.assertEquals(source.getType(), ArgumentMatchSourceType.CommandLine);
+        Assert.assertNull(source.getDescription());
+    }
+
+    @Test
+    public void testFile() {
+        File f = new File("test");
+        ArgumentMatchSource source = new ArgumentMatchFileSource(f);
+        Assert.assertEquals(source.getType(), ArgumentMatchSourceType.Provider);
+        Assert.assertEquals(source.getDescription(), "file " + f.getAbsolutePath());
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void testNullFile() {
+        new ArgumentMatchSource(null);
+    }
+
+    @Test
+    public void testEquals() {
+        ArgumentMatchSource cmdLine = ArgumentMatchSource.COMMAND_LINE;
+        ArgumentMatchSource fileA = new ArgumentMatchFileSource(new File("a"));
+        ArgumentMatchSource fileB = new ArgumentMatchFileSource(new File("b"));
+
+        Assert.assertFalse(cmdLine.equals(null));
+
+        Assert.assertTrue(cmdLine.equals(cmdLine));
+        Assert.assertFalse(cmdLine.equals(fileA));
+        Assert.assertFalse(cmdLine.equals(fileB));
+
+        Assert.assertFalse(fileA.equals(cmdLine));
+        Assert.assertTrue(fileA.equals(fileA));
+        Assert.assertFalse(fileA.equals(fileB));
+
+        Assert.assertFalse(fileB.equals(cmdLine));
+        Assert.assertFalse(fileB.equals(fileA));
+        Assert.assertTrue(fileB.equals(fileB));
+    }
+
+    @Test
+    public void testCompareTo() {
+        ArgumentMatchSource cmdLine = ArgumentMatchSource.COMMAND_LINE;
+        ArgumentMatchSource fileA = new ArgumentMatchFileSource(new File("a"));
+        ArgumentMatchSource fileB = new ArgumentMatchFileSource(new File("b"));
+
+        Assert.assertTrue(cmdLine.compareTo(cmdLine) == 0);
+        Assert.assertTrue(cmdLine.compareTo(fileA) < 0);
+        Assert.assertTrue(cmdLine.compareTo(fileB) < 0);
+
+        Assert.assertTrue(fileA.compareTo(cmdLine) > 0);
+        Assert.assertTrue(fileA.compareTo(fileA) == 0);
+        Assert.assertTrue(fileA.compareTo(fileB) < 0);
+
+        Assert.assertTrue(fileB.compareTo(cmdLine) > 0);
+        Assert.assertTrue(fileB.compareTo(fileA) > 0);
+        Assert.assertTrue(fileB.compareTo(fileB) == 0);
+    }
+
+    @Test(expectedExceptions = NullPointerException.class)
+    public void testCompareToNull() {
+        ArgumentMatchSource.COMMAND_LINE.compareTo(null);
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/commandline/ParsingEngineUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/commandline/ParsingEngineUnitTest.java
new file mode 100644
index 0000000..6b80ebd
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/commandline/ParsingEngineUnitTest.java
@@ -0,0 +1,1140 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import org.apache.commons.io.FileUtils;
+import htsjdk.tribble.Feature;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import htsjdk.variant.variantcontext.VariantContext;
+import org.testng.Assert;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.List;
+import java.util.EnumSet;
+import java.util.Set;
+
+/**
+ * Test suite for the parsing engine.
+ */
+public class ParsingEngineUnitTest extends BaseTest {
+    /** we absolutely cannot have this file existing, or we'll fail the UnitTest */
+    private final static String NON_EXISTANT_FILENAME_VCF = "this_file_should_not_exist_on_disk_123456789.vcf";
+    private ParsingEngine parsingEngine;
+
+    @BeforeMethod
+    public void setUp() {
+        parsingEngine = new ParsingEngine(null);
+        RodBinding.resetNameCounter();
+    }
+
+    private class InputFileArgProvider {
+        @Argument(fullName="input_file",doc="input file",shortName="I")
+        public String inputFile;
+    }
+
+    @Test
+    public void shortNameArgumentTest() {
+        final String[] commandLine = new String[] {"-I","na12878.bam"};
+
+        parsingEngine.addArgumentSource( InputFileArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        InputFileArgProvider argProvider = new InputFileArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertEquals(argProvider.inputFile,"na12878.bam","Argument is not correctly initialized");
+    }
+
+    @Test
+    public void multiCharShortNameArgumentTest() {
+        final String[] commandLine = new String[] {"-out","out.txt"};
+
+        parsingEngine.addArgumentSource( MultiCharShortNameArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        MultiCharShortNameArgProvider argProvider = new MultiCharShortNameArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertEquals(argProvider.outputFile,"out.txt","Argument is not correctly initialized");
+    }
+
+
+    private class MultiCharShortNameArgProvider {
+        @Argument(shortName="out", doc="output file")
+        public String outputFile;
+    }
+
+    @Test
+    public void longNameArgumentTest() {
+        final String[] commandLine = new String[] {"--input_file", "na12878.bam"};
+
+        parsingEngine.addArgumentSource( InputFileArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        InputFileArgProvider argProvider = new InputFileArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertEquals(argProvider.inputFile,"na12878.bam","Argument is not correctly initialized");
+    }
+
+    @Test
+    public void extraWhitespaceTest() {
+        final String[] commandLine = new String[] {"  --input_file ", "na12878.bam"};
+
+        parsingEngine.addArgumentSource( InputFileArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        InputFileArgProvider argProvider = new InputFileArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertEquals(argProvider.inputFile,"na12878.bam","Argument is not correctly initialized");
+    }
+
+    @Test
+    public void primitiveArgumentTest() {
+        final String[] commandLine = new String[] {"--foo", "5"};
+
+        parsingEngine.addArgumentSource( PrimitiveArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        PrimitiveArgProvider argProvider = new PrimitiveArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertEquals(argProvider.foo, 5, "Argument is not correctly initialized");
+    }
+
+    @Test(expectedExceptions=InvalidArgumentValueException.class)
+    public void primitiveArgumentNoValueTest() {
+        final String[] commandLine = new String[] {"--foo"};
+
+        parsingEngine.addArgumentSource( PrimitiveArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        PrimitiveArgProvider argProvider = new PrimitiveArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertEquals(argProvider.foo, 5, "Argument is not correctly initialized");
+    }
+
+    private class PrimitiveArgProvider {
+        @Argument(doc="simple integer")
+        int foo;
+    }
+
+    @Test
+    public void flagTest() {
+        final String[] commandLine = new String[] {"--all_loci"};
+
+        parsingEngine.addArgumentSource( AllLociArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        AllLociArgProvider argProvider = new AllLociArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertTrue(argProvider.allLoci,"Argument is not correctly initialized");
+    }
+
+    private class AllLociArgProvider {
+        @Argument(fullName="all_loci",shortName="A", doc="all loci")
+        public boolean allLoci = false;
+    }
+
+    @Test
+    public void arrayTest() {
+        final String[] commandLine = new String[] {"-I", "foo.txt", "--input_file", "bar.txt"};
+
+        parsingEngine.addArgumentSource( MultiValueArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        MultiValueArgProvider argProvider = new MultiValueArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertEquals(argProvider.inputFile.length, 2, "Argument array is of incorrect length");
+        Assert.assertEquals(argProvider.inputFile[0],"foo.txt","1st filename is incorrect");
+        Assert.assertEquals(argProvider.inputFile[1],"bar.txt","2nd filename is incorrect");
+    }
+
+    private class MultiValueArgProvider {
+        @Argument(fullName="input_file",shortName="I", doc="input file")
+        public String[] inputFile;
+    }
+
+    @Test
+    public void enumTest() {
+        final String[] commandLine = new String[] {  "--test_enum", "TWO" };
+
+        parsingEngine.addArgumentSource( EnumArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        EnumArgProvider argProvider = new EnumArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertEquals(argProvider.testEnum, TestEnum.TWO, "Enum value is not correct");
+    }
+
+    @Test
+    public void enumMixedCaseTest() {
+        final String[] commandLine = new String[] {  "--test_enum", "oNe" };
+
+        parsingEngine.addArgumentSource( EnumArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        EnumArgProvider argProvider = new EnumArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertEquals(argProvider.testEnum, TestEnum.ONE, "Enum value is not correct");
+    }
+
+    @Test
+    public void enumDefaultTest() {
+        final String[] commandLine = new String[] {};
+
+        parsingEngine.addArgumentSource( EnumArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        EnumArgProvider argProvider = new EnumArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertEquals(argProvider.testEnum, TestEnum.THREE, "Enum value is not correct");
+    }
+
+    public enum TestEnum { ONE, TWO, THREE }
+
+    private class EnumArgProvider {
+        @Argument(fullName="test_enum",shortName="ti",doc="test enum",required=false)
+        public TestEnum testEnum = TestEnum.THREE;
+    }
+
+    @Test
+    public void typedCollectionTest() {
+        final String[] commandLine = new String[] { "-N","2","-N","4","-N","6","-N","8","-N","10" };
+
+        parsingEngine.addArgumentSource( IntegerListArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        IntegerListArgProvider argProvider = new IntegerListArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertNotNull(argProvider.integers, "Argument array is null");
+        Assert.assertEquals(argProvider.integers.size(), 5, "Argument array is of incorrect length");
+        Assert.assertEquals(argProvider.integers.get(0).intValue(), 2, "1st integer is incorrect");
+        Assert.assertEquals(argProvider.integers.get(1).intValue(), 4, "2nd integer is incorrect");
+        Assert.assertEquals(argProvider.integers.get(2).intValue(), 6, "3rd integer is incorrect");
+        Assert.assertEquals(argProvider.integers.get(3).intValue(), 8, "4th integer is incorrect");
+        Assert.assertEquals(argProvider.integers.get(4).intValue(), 10, "5th integer is incorrect");
+    }
+
+    private class IntegerListArgProvider {
+        @Argument(fullName="integer_list",shortName="N",doc="integer list")
+        public List<Integer> integers;
+    }
+
+    @Test
+    public void untypedCollectionTest() {
+        final String[] commandLine = new String[] { "-N","2","-N","4","-N","6","-N","8","-N","10" };
+
+        parsingEngine.addArgumentSource( UntypedListArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        UntypedListArgProvider argProvider = new UntypedListArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertNotNull(argProvider.integers, "Argument array is null");
+        Assert.assertEquals(argProvider.integers.size(), 5, "Argument array is of incorrect length");
+        Assert.assertEquals(argProvider.integers.get(0), "2", "1st integer is incorrect");
+        Assert.assertEquals(argProvider.integers.get(1), "4", "2nd integer is incorrect");
+        Assert.assertEquals(argProvider.integers.get(2), "6", "3rd integer is incorrect");
+        Assert.assertEquals(argProvider.integers.get(3), "8", "4th integer is incorrect");
+        Assert.assertEquals(argProvider.integers.get(4), "10", "5th integer is incorrect");
+    }
+
+    private class UntypedListArgProvider {
+        @Argument(fullName="untyped_list",shortName="N", doc="untyped list")
+        public List integers;
+    }
+
+    @Test(expectedExceptions=MissingArgumentException.class)
+    public void requiredArgTest() {
+        final String[] commandLine = new String[0];
+
+        parsingEngine.addArgumentSource( RequiredArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+    }
+
+    private class RequiredArgProvider {
+        @Argument(required=true,doc="value")
+        public Integer value;
+    }
+
+    @Test
+    public void defaultValueTest() {
+        // First try getting the default.
+        String[] commandLine = new String[0];
+
+        parsingEngine.addArgumentSource( DefaultValueArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        DefaultValueArgProvider argProvider = new DefaultValueArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertEquals(argProvider.value.intValue(), 42, "Default value is not correctly initialized");
+
+        // Then try to override it.
+        commandLine = new String[] { "--value", "27" };
+
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertEquals(argProvider.value.intValue(), 27, "Default value is not correctly initialized");
+    }
+
+    private class DefaultValueArgProvider {
+        @Argument(doc="value",required=false)
+        public Integer value = 42;
+    }
+
+    @Test
+    public void disableValidationOfRequiredArgTest() {
+        final String[] commandLine = new String[0];
+
+        parsingEngine.addArgumentSource( RequiredArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate( EnumSet.of(ParsingEngine.ValidationType.MissingRequiredArgument) );
+
+        RequiredArgProvider argProvider = new RequiredArgProvider();
+        parsingEngine.loadArgumentsIntoObject(argProvider );
+
+        Assert.assertNull(argProvider.value, "Value should have remain unset");
+    }
+
+    @Test
+    public void unrequiredArgTest() {
+        final String[] commandLine = new String[0];
+
+        parsingEngine.addArgumentSource( UnrequiredArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        UnrequiredArgProvider argProvider = new UnrequiredArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertNull(argProvider.value, "Value was unrequired and unspecified; contents should be null");
+    }
+
+    private class UnrequiredArgProvider {
+        @Argument(required=false,doc="unrequired value")
+        public Integer value;
+    }
+
+    @Test(expectedExceptions=InvalidArgumentException.class)
+    public void invalidArgTest() {
+        final String[] commandLine = new String[] { "--foo" };
+
+        parsingEngine.addArgumentSource( UnrequiredArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+    }
+
+    @Test(expectedExceptions= ReviewedGATKException.class)
+    public void duplicateLongNameTest() {
+        parsingEngine.addArgumentSource( DuplicateLongNameProvider.class );
+    }
+
+    private class DuplicateLongNameProvider {
+        @Argument(fullName="myarg",doc="my arg")
+        public Integer foo;
+
+        @Argument(fullName="myarg", doc="my arg")
+        public Integer bar;
+    }
+
+    @Test(expectedExceptions= ReviewedGATKException.class)
+    public void duplicateShortNameTest() {
+        parsingEngine.addArgumentSource( DuplicateShortNameProvider.class );
+    }
+
+
+    private class DuplicateShortNameProvider {
+        @Argument(shortName="myarg", doc="my arg")
+        public Integer foo;
+
+        @Argument(shortName="myarg", doc="my arg")
+        public Integer bar;
+    }
+
+    @Test(expectedExceptions=UnmatchedArgumentException.class)
+    public void missingArgumentNameTest() {
+        final String[] commandLine = new String[] {"foo.txt"};
+
+        parsingEngine.addArgumentSource( NoArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+    }
+
+    private class NoArgProvider {
+
+    }
+
+    @Test(expectedExceptions=UnmatchedArgumentException.class)
+    public void extraValueTest() {
+        final String[] commandLine = new String[] {"-I", "foo.txt", "bar.txt"};
+
+        parsingEngine.addArgumentSource( InputFileArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+    }
+
+    @Test(expectedExceptions=MissingArgumentException.class)
+    public void multipleInvalidArgTest() {
+        final String[] commandLine = new String[] {"-N1", "-N2", "-N3"};
+
+        parsingEngine.addArgumentSource( RequiredArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+    }
+
+    @Test(expectedExceptions=TooManyValuesForArgumentException.class)
+    public void invalidArgCountTest() {
+        final String[] commandLine = new String[] {"--value","1","--value","2","--value","3"};
+
+        parsingEngine.addArgumentSource( RequiredArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+    }
+
+    @Test
+    public void packageProtectedArgTest() {
+        final String[] commandLine = new String[] {"--foo", "1"};
+
+        parsingEngine.addArgumentSource( PackageProtectedArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        PackageProtectedArgProvider argProvider = new PackageProtectedArgProvider();
+        parsingEngine.loadArgumentsIntoObject(argProvider);
+
+        Assert.assertEquals(argProvider.foo.intValue(), 1, "Argument is not correctly initialized");
+    }
+
+    private class PackageProtectedArgProvider {
+        @Argument(doc="foo")
+        Integer foo;
+    }
+
+    @Test
+    public void derivedArgTest() {
+        final String[] commandLine = new String[] {"--bar", "5"};
+
+        parsingEngine.addArgumentSource( DerivedArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        DerivedArgProvider argProvider = new DerivedArgProvider();
+        parsingEngine.loadArgumentsIntoObject(argProvider);
+
+        Assert.assertEquals(argProvider.bar.intValue(), 5, "Argument is not correctly initialized");
+    }
+
+    private class DerivedArgProvider extends BaseArgProvider {
+    }
+
+    private class BaseArgProvider {
+        @Argument(doc="bar")
+        public Integer bar;
+    }
+
+    @Test
+    public void correctDefaultArgNameTest() {
+        parsingEngine.addArgumentSource( CamelCaseArgProvider.class );
+
+        DefinitionMatcher matcher = ArgumentDefinitions.FullNameDefinitionMatcher;
+        ArgumentDefinition definition = parsingEngine.argumentDefinitions.findArgumentDefinition("myarg", matcher);
+
+        Assert.assertNotNull(definition, "Invalid default argument name assigned");
+    }
+
+    @SuppressWarnings("unused")
+    private class CamelCaseArgProvider {
+        @Argument(doc="my arg")
+        Integer myArg;
+    }
+
+    @Test(expectedExceptions=UnmatchedArgumentException.class)
+    public void booleanWithParameterTest() {
+        final String[] commandLine = new String[] {"--mybool", "true"};
+
+        parsingEngine.addArgumentSource( BooleanArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+    }
+
+    @SuppressWarnings("unused")
+    private class BooleanArgProvider {
+        @Argument(doc="my bool")
+        boolean myBool;
+    }
+
+    @Test
+    public void validParseForAnalysisTypeTest() {
+        final String[] commandLine = new String[] {"--analysis_type", "Pileup" };
+
+        parsingEngine.addArgumentSource( AnalysisTypeArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate( EnumSet.of(ParsingEngine.ValidationType.MissingRequiredArgument) );
+
+        AnalysisTypeArgProvider argProvider = new AnalysisTypeArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertEquals(argProvider.Analysis_Name,"Pileup","Argument is not correctly initialized");
+    }
+
+    private class AnalysisTypeArgProvider {
+        @Argument(fullName="analysis_type", shortName="T", doc="Type of analysis to run")
+        public String Analysis_Name = null;
+    }
+
+    @Test(expectedExceptions=TooManyValuesForArgumentException.class)
+    public void invalidParseForAnalysisTypeTest() {
+        final String[] commandLine = new String[] {"--analysis_type", "Pileup", "-T", "CountReads" };
+
+        parsingEngine.addArgumentSource( AnalysisTypeArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate( EnumSet.of(ParsingEngine.ValidationType.MissingRequiredArgument) );
+    }
+
+    @Test(expectedExceptions=ArgumentsAreMutuallyExclusiveException.class)
+    public void mutuallyExclusiveArgumentsTest() {
+        // Passing only foo should work fine...
+        String[] commandLine = new String[] {"--foo","5"};
+
+        parsingEngine.addArgumentSource( MutuallyExclusiveArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        MutuallyExclusiveArgProvider argProvider = new MutuallyExclusiveArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertEquals(argProvider.foo.intValue(), 5, "Argument is not correctly initialized");
+
+        // But when foo and bar come together, danger!
+        commandLine = new String[] {"--foo","5","--bar","6"};
+
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+    }
+
+    @SuppressWarnings("unused")
+    private class MutuallyExclusiveArgProvider {
+        @Argument(doc="foo",exclusiveOf="bar")
+        Integer foo;
+
+        @Argument(doc="bar",required=false)
+        Integer bar;
+    }
+
+    @Test(expectedExceptions=InvalidArgumentValueException.class)
+    public void argumentValidationTest() {
+        // Passing only foo should work fine...
+        String[] commandLine = new String[] {"--value","521"};
+
+        parsingEngine.addArgumentSource( ValidatingArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        ValidatingArgProvider argProvider = new ValidatingArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertEquals(argProvider.value.intValue(), 521, "Argument is not correctly initialized");
+
+        // Try some invalid arguments
+        commandLine = new String[] {"--value","foo"};
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+    }
+
+    private class ValidatingArgProvider {
+        @Argument(doc="value",validation="\\d+")
+        Integer value;
+    }
+
+    @Test
+    public void argumentCollectionTest() {
+        String[] commandLine = new String[] { "--value", "5" };
+
+        parsingEngine.addArgumentSource( ArgumentCollectionProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        ArgumentCollectionProvider argProvider = new ArgumentCollectionProvider();
+        parsingEngine.loadArgumentsIntoObject(argProvider);
+
+        Assert.assertEquals(argProvider.rap.value.intValue(), 5, "Argument is not correctly initialized");
+    }
+
+    private class ArgumentCollectionProvider {
+        @ArgumentCollection
+        RequiredArgProvider rap = new RequiredArgProvider();
+    }
+
+    @Test(expectedExceptions= ReviewedGATKException.class)
+    public void multipleArgumentCollectionTest() {
+        parsingEngine.addArgumentSource( MultipleArgumentCollectionProvider.class );
+    }
+
+    @SuppressWarnings("unused")
+    private class MultipleArgumentCollectionProvider {
+        @ArgumentCollection
+        RequiredArgProvider rap1 = new RequiredArgProvider();
+        @ArgumentCollection
+        RequiredArgProvider rap2 = new RequiredArgProvider();
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // Tests of the RodBinding<T> system
+    //
+    // --------------------------------------------------------------------------------
+
+    private class SingleRodBindingArgProvider {
+        @Input(fullName="binding", shortName="V", required=true)
+        public RodBinding<Feature> binding;
+    }
+
+    @Test
+    public void basicRodBindingArgumentTest() {
+        final String[] commandLine = new String[] {"-V:vcf",NON_EXISTANT_FILENAME_VCF};
+
+        parsingEngine.addArgumentSource( SingleRodBindingArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        SingleRodBindingArgProvider argProvider = new SingleRodBindingArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertEquals(argProvider.binding.getName(), "binding", "Name isn't set properly");
+        Assert.assertEquals(argProvider.binding.getSource(), NON_EXISTANT_FILENAME_VCF, "Source isn't set to its expected value");
+        Assert.assertEquals(argProvider.binding.getType(), Feature.class, "Type isn't set to its expected value");
+        Assert.assertEquals(argProvider.binding.isBound(), true, "Bound() isn't returning its expected value");
+        Assert.assertEquals(argProvider.binding.getTags().getPositionalTags().size(), 1, "Tags aren't correctly set");
+    }
+
+    private class ShortNameOnlyRodBindingArgProvider {
+        @Input(shortName="short", required=false)
+        public RodBinding<Feature> binding; // = RodBinding.makeUnbound(Feature.class);
+    }
+
+    @Test
+    public void shortNameOnlyRodBindingArgumentTest() {
+        final String[] commandLine = new String[] {"-short:vcf",NON_EXISTANT_FILENAME_VCF};
+
+        parsingEngine.addArgumentSource( ShortNameOnlyRodBindingArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        ShortNameOnlyRodBindingArgProvider argProvider = new ShortNameOnlyRodBindingArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertEquals(argProvider.binding.getName(), "binding", "Name isn't set properly");
+        Assert.assertEquals(argProvider.binding.getSource(), NON_EXISTANT_FILENAME_VCF, "Source isn't set to its expected value");
+        Assert.assertEquals(argProvider.binding.getType(), Feature.class, "Type isn't set to its expected value");
+        Assert.assertEquals(argProvider.binding.isBound(), true, "Bound() isn't returning its expected value");
+        Assert.assertEquals(argProvider.binding.getTags().getPositionalTags().size(), 1, "Tags aren't correctly set");
+    }
+
+    private class OptionalRodBindingArgProvider {
+        @Input(fullName="binding", shortName="V", required=false)
+        public RodBinding<Feature> binding;
+
+        @Input(fullName="bindingNull", shortName="VN", required=false)
+        public RodBinding<VariantContext> bindingNull = null;
+    }
+
+    @Test
+    public void optionalRodBindingArgumentTest() {
+        final String[] commandLine = new String[] {};
+
+        parsingEngine.addArgumentSource( OptionalRodBindingArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        OptionalRodBindingArgProvider argProvider = new OptionalRodBindingArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertNotNull(argProvider.binding, "Default value not applied corrected to RodBinding");
+        Assert.assertEquals(argProvider.binding.getName(), RodBinding.UNBOUND_VARIABLE_NAME, "Name isn't set properly");
+        Assert.assertEquals(argProvider.binding.getSource(), RodBinding.UNBOUND_SOURCE, "Source isn't set to its expected value");
+        Assert.assertEquals(argProvider.binding.getType(), Feature.class, "Type isn't set to its expected value");
+        Assert.assertEquals(argProvider.binding.isBound(), false, "Bound() isn't returning its expected value");
+        Assert.assertEquals(argProvider.binding.getTags().getPositionalTags().size(), 0, "Tags aren't correctly set");
+
+        Assert.assertNotNull(argProvider.bindingNull, "Default value not applied corrected to RodBinding");
+        Assert.assertEquals(argProvider.bindingNull.getName(), RodBinding.UNBOUND_VARIABLE_NAME, "Name isn't set properly");
+        Assert.assertEquals(argProvider.bindingNull.getSource(), RodBinding.UNBOUND_SOURCE, "Source isn't set to its expected value");
+        Assert.assertEquals(argProvider.bindingNull.getType(), VariantContext.class, "Type isn't set to its expected value");
+        Assert.assertEquals(argProvider.bindingNull.isBound(), false, "Bound() isn't returning its expected value");
+        Assert.assertEquals(argProvider.bindingNull.getTags().getPositionalTags().size(), 0, "Tags aren't correctly set");
+    }
+
+    @Test(expectedExceptions = UserException.class)
+    public void rodBindingArgumentTestMissingType() {
+        final String[] commandLine = new String[] {"-V",NON_EXISTANT_FILENAME_VCF};
+
+        parsingEngine.addArgumentSource( SingleRodBindingArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        SingleRodBindingArgProvider argProvider = new SingleRodBindingArgProvider();
+        parsingEngine.loadArgumentsIntoObject(argProvider);
+    }
+
+    @Test(expectedExceptions = UserException.class)
+    public void rodBindingArgumentTestTooManyTags() {
+        final String[] commandLine = new String[] {"-V:x,y,z",NON_EXISTANT_FILENAME_VCF};
+
+        parsingEngine.addArgumentSource( SingleRodBindingArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        SingleRodBindingArgProvider argProvider = new SingleRodBindingArgProvider();
+        parsingEngine.loadArgumentsIntoObject(argProvider);
+    }
+
+    private class VariantContextRodBindingArgProvider {
+        @Input(fullName = "binding", shortName="V")
+        public RodBinding<VariantContext> binding;
+    }
+
+    @Test
+    public void variantContextBindingArgumentTest() {
+        final String[] commandLine = new String[] {"-V:vcf",NON_EXISTANT_FILENAME_VCF};
+
+        parsingEngine.addArgumentSource( VariantContextRodBindingArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        VariantContextRodBindingArgProvider argProvider = new VariantContextRodBindingArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertEquals(argProvider.binding.getName(), "binding", "Name isn't set properly");
+        Assert.assertEquals(argProvider.binding.getSource(), NON_EXISTANT_FILENAME_VCF, "Source isn't set to its expected value");
+        Assert.assertEquals(argProvider.binding.getType(), VariantContext.class, "Type isn't set to its expected value");
+        Assert.assertEquals(argProvider.binding.getTags().getPositionalTags().size(), 1, "Tags aren't correctly set");
+    }
+
+    private class ListRodBindingArgProvider {
+        @Input(fullName = "binding", shortName="V", required=false)
+        public List<RodBinding<Feature>> bindings;
+    }
+
+    @Test
+    public void listRodBindingArgumentTest() {
+        final String[] commandLine = new String[] {"-V:vcf",NON_EXISTANT_FILENAME_VCF};
+
+        parsingEngine.addArgumentSource( ListRodBindingArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        ListRodBindingArgProvider argProvider = new ListRodBindingArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertEquals(argProvider.bindings.size(), 1, "Unexpected number of bindings");
+        RodBinding<Feature> binding = argProvider.bindings.get(0);
+        Assert.assertEquals(binding.getName(), "binding", "Name isn't set properly");
+        Assert.assertEquals(binding.getSource(), NON_EXISTANT_FILENAME_VCF, "Source isn't set to its expected value");
+        Assert.assertEquals(binding.getType(), Feature.class, "Type isn't set to its expected value");
+        Assert.assertEquals(binding.getTags().getPositionalTags().size(), 1, "Tags aren't correctly set");
+    }
+
+    @Test
+    public void listRodBindingArgumentTest2Args() {
+        final String[] commandLine = new String[] {"-V:vcf",NON_EXISTANT_FILENAME_VCF, "-V:vcf", "bar.vcf"};
+
+        parsingEngine.addArgumentSource( ListRodBindingArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        ListRodBindingArgProvider argProvider = new ListRodBindingArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertEquals(argProvider.bindings.size(), 2, "Unexpected number of bindings");
+
+        RodBinding<Feature> binding = argProvider.bindings.get(0);
+        Assert.assertEquals(binding.getName(), "binding", "Name isn't set properly");
+        Assert.assertEquals(binding.getSource(), NON_EXISTANT_FILENAME_VCF, "Source isn't set to its expected value");
+        Assert.assertEquals(binding.getType(), Feature.class, "Type isn't set to its expected value");
+        Assert.assertEquals(binding.getTags().getPositionalTags().size(), 1, "Tags aren't correctly set");
+
+        RodBinding<Feature> binding2 = argProvider.bindings.get(1);
+        Assert.assertEquals(binding2.getName(), "binding2", "Name isn't set properly");
+        Assert.assertEquals(binding2.getSource(), "bar.vcf", "Source isn't set to its expected value");
+        Assert.assertEquals(binding2.getType(), Feature.class, "Type isn't set to its expected value");
+        Assert.assertEquals(binding2.getTags().getPositionalTags().size(), 1, "Tags aren't correctly set");
+    }
+
+    @Test
+    public void listRodBindingArgumentTest0Args() {
+        final String[] commandLine = new String[] {};
+
+        parsingEngine.addArgumentSource( ListRodBindingArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        ListRodBindingArgProvider argProvider = new ListRodBindingArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertNull(argProvider.bindings, "Bindings were not null");
+    }
+
+    @Test
+    public void listRodBindingArgumentTestExplicitlyNamed() {
+        final String[] commandLine = new String[] {"-V:foo,vcf",NON_EXISTANT_FILENAME_VCF, "-V:foo,vcf", "bar.vcf"};
+
+        parsingEngine.addArgumentSource( ListRodBindingArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        ListRodBindingArgProvider argProvider = new ListRodBindingArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertEquals(argProvider.bindings.size(), 2, "Unexpected number of bindings");
+        Assert.assertEquals(argProvider.bindings.get(0).getName(), "foo", "Name isn't set properly");
+        Assert.assertEquals(argProvider.bindings.get(1).getName(), "foo2", "Name isn't set properly");
+    }
+
+    private final static String HISEQ_VCF = privateTestDir + "HiSeq.10000.vcf";
+    private final static String TRANCHES_FILE = privateTestDir + "tranches.6.txt";
+
+    @Test
+    public void variantContextBindingTestDynamicTyping1() {
+        final String[] commandLine = new String[] {"-V", HISEQ_VCF};
+
+        parsingEngine.addArgumentSource( VariantContextRodBindingArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        VariantContextRodBindingArgProvider argProvider = new VariantContextRodBindingArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertEquals(argProvider.binding.getName(), "binding", "Name isn't set properly");
+        Assert.assertEquals(argProvider.binding.getSource(), HISEQ_VCF, "Source isn't set to its expected value");
+        Assert.assertEquals(argProvider.binding.getType(), VariantContext.class, "Type isn't set to its expected value");
+        Assert.assertEquals(argProvider.binding.getTags().getPositionalTags().size(), 0, "Tags aren't correctly set");
+    }
+
+    @Test
+    public void variantContextBindingTestDynamicTypingNameAsSingleArgument() {
+        final String[] commandLine = new String[] {"-V:name", HISEQ_VCF};
+
+        parsingEngine.addArgumentSource( VariantContextRodBindingArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        VariantContextRodBindingArgProvider argProvider = new VariantContextRodBindingArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertEquals(argProvider.binding.getName(), "name", "Name isn't set properly");
+        Assert.assertEquals(argProvider.binding.getSource(), HISEQ_VCF, "Source isn't set to its expected value");
+        Assert.assertEquals(argProvider.binding.getType(), VariantContext.class, "Type isn't set to its expected value");
+        Assert.assertEquals(argProvider.binding.getTags().getPositionalTags().size(), 1, "Tags aren't correctly set");
+    }
+
+    @Test()
+    public void variantContextBindingTestDynamicTypingTwoTagsPassing() {
+        final String[] commandLine = new String[] {"-V:name,vcf", HISEQ_VCF};
+
+        parsingEngine.addArgumentSource( VariantContextRodBindingArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        VariantContextRodBindingArgProvider argProvider = new VariantContextRodBindingArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertEquals(argProvider.binding.getName(), "name", "Name isn't set properly");
+        Assert.assertEquals(argProvider.binding.getSource(), HISEQ_VCF, "Source isn't set to its expected value");
+        Assert.assertEquals(argProvider.binding.getType(), VariantContext.class, "Type isn't set to its expected value");
+        Assert.assertEquals(argProvider.binding.getTags().getPositionalTags().size(), 2, "Tags aren't correctly set");
+    }
+
+    @Test()
+    public void variantContextBindingTestDynamicTypingTwoTagsCausingTypeFailure() {
+        final String[] commandLine = new String[] {"-V:name,beagle", HISEQ_VCF};
+
+        parsingEngine.addArgumentSource( VariantContextRodBindingArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        VariantContextRodBindingArgProvider argProvider = new VariantContextRodBindingArgProvider();
+        parsingEngine.loadArgumentsIntoObject(argProvider);
+
+        Assert.assertEquals(argProvider.binding.getName(), "name", "Name isn't set properly");
+        Assert.assertEquals(argProvider.binding.getSource(), HISEQ_VCF, "Source isn't set to its expected value");
+        Assert.assertEquals(argProvider.binding.getType(), VariantContext.class, "Type isn't set to its expected value");
+        Assert.assertEquals(argProvider.binding.getTribbleType(), "beagle", "Type isn't set to its expected value");
+        Assert.assertEquals(argProvider.binding.getTags().getPositionalTags().size(), 2, "Tags aren't correctly set");
+    }
+
+    @Test(expectedExceptions = UserException.class)
+    public void variantContextBindingTestDynamicTypingUnknownTribbleType() {
+        final String[] commandLine = new String[] {"-V", TRANCHES_FILE};
+
+        parsingEngine.addArgumentSource( VariantContextRodBindingArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        VariantContextRodBindingArgProvider argProvider = new VariantContextRodBindingArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+    }
+
+    @Test
+    public void argumentListTest() throws IOException {
+        File argsFile = BaseTest.createTempListFile("args.", "-I na12878.bam");
+        try {
+            final String[] commandLine = new String[] {"-args", argsFile.getPath()};
+            parsingEngine.addArgumentSource(InputFileArgProvider.class);
+            parsingEngine.parse(commandLine);
+            parsingEngine.validate();
+
+            InputFileArgProvider argProvider = new InputFileArgProvider();
+            parsingEngine.loadArgumentsIntoObject(argProvider);
+
+            Assert.assertEquals(argProvider.inputFile, "na12878.bam", "Argument is not correctly initialized");
+        } finally {
+            FileUtils.deleteQuietly(argsFile);
+        }
+    }
+
+    @SuppressWarnings("unused")
+    private class NumericRangeArgProvider {
+        @Argument(fullName = "intWithHardMinAndMax", minValue = 5, maxValue = 10)
+        public int intWithHardMinAndMax;
+
+        @Argument(fullName = "intWithHardMin", minValue = 5)
+        public int intWithHardMin;
+
+        @Argument(fullName = "intWithHardMax", maxValue = 10)
+        public int intWithHardMax;
+
+        @Argument(fullName = "intWithSoftMinAndMax", minRecommendedValue = 5, maxRecommendedValue = 10)
+        public int intWithSoftMinAndMax;
+
+        @Argument(fullName = "intWithSoftMin", minRecommendedValue = 5)
+        public int intWithSoftMin;
+
+        @Argument(fullName = "intWithSoftMax", maxRecommendedValue = 10)
+        public int intWithSoftMax;
+
+        @Argument(fullName = "intWithHardAndSoftMinAndMax", minValue = 5, minRecommendedValue = 7, maxValue = 10, maxRecommendedValue = 9)
+        public int intWithHardAndSoftMinAndMax;
+
+        @Argument(fullName = "intWithHardAndSoftMin", minValue = 5, minRecommendedValue = 7)
+        public int intWithHardAndSoftMin;
+
+        @Argument(fullName = "intWithHardAndSoftMax", maxValue = 10, maxRecommendedValue = 8)
+        public int intWithHardAndSoftMax;
+
+        @Argument(fullName = "intWithHardMinAndMaxDefaultOutsideRange", minValue = 5, maxValue = 10)
+        public int intWithHardMinAndMaxDefaultOutsideRange = -1;
+
+        @Argument(fullName = "integerWithHardMinAndMax", minValue = 5, maxValue = 10)
+        public Integer integerWithHardMinAndMax;
+
+        @Argument(fullName = "byteWithHardMinAndMax", minValue = 5, maxValue = 10)
+        public byte byteWithHardMinAndMax;
+
+        @Argument(fullName = "byteWithHardMin", minValue = 5)
+        public byte byteWithHardMin;
+
+        @Argument(fullName = "byteWithHardMax", maxValue = 10)
+        public byte byteWithHardMax;
+
+        @Argument(fullName = "doubleWithHardMinAndMax", minValue = 5.5, maxValue = 10.0)
+        public double doubleWithHardMinAndMax;
+
+        @Argument(fullName = "doubleWithHardMin", minValue = 5.5)
+        public double doubleWithHardMin;
+
+        @Argument(fullName = "doubleWithHardMax", maxValue = 10.0)
+        public double doubleWithHardMax;
+    }
+
+    @DataProvider(name = "NumericRangeConstraintViolationDataProvider")
+    public Object[][] numericRangeConstraintViolationDataProvider() {
+        return new Object[][] {
+                { new String[]{"--intWithHardMinAndMax", "11"} },
+                { new String[]{"--intWithHardMinAndMax", "4"} },
+                { new String[]{"--intWithHardMin", "4"} },
+                { new String[]{"--intWithHardMax", "11"} },
+                { new String[]{"--intWithHardAndSoftMinAndMax", "11"} },
+                { new String[]{"--intWithHardAndSoftMinAndMax", "4"} },
+                { new String[]{"--intWithHardAndSoftMin", "4"} },
+                { new String[]{"--intWithHardAndSoftMax", "11"} },
+                { new String[]{"--intWithHardMinAndMaxDefaultOutsideRange", "11"} },
+                { new String[]{"--intWithHardMinAndMaxDefaultOutsideRange", "4"} },
+                { new String[]{"--integerWithHardMinAndMax", "11"} },
+                { new String[]{"--integerWithHardMinAndMax", "4"} },
+                { new String[]{"--byteWithHardMinAndMax", "11"} },
+                { new String[]{"--byteWithHardMinAndMax", "4"} },
+                { new String[]{"--byteWithHardMin", "4"} },
+                { new String[]{"--byteWithHardMax", "11"} },
+                { new String[]{"--doubleWithHardMinAndMax", "5.4"} },
+                { new String[]{"--doubleWithHardMinAndMax", "10.1"} },
+                { new String[]{"--doubleWithHardMin", "5.4"} },
+                { new String[]{"--doubleWithHardMax", "10.1"} }
+        };
+    }
+
+    @Test(dataProvider = "NumericRangeConstraintViolationDataProvider",
+          expectedExceptions = ArgumentValueOutOfRangeException.class)
+    public void testNumericRangeWithConstraintViolation( final String[] commandLine ) {
+        runNumericArgumentRangeTest(commandLine);
+    }
+
+    @DataProvider(name = "NumericRangeWithoutConstraintViolationDataProvider")
+    public Object[][] numericRangeWithoutConstraintViolationDataProvider() {
+        return new Object[][] {
+                { new String[]{"--intWithHardMinAndMax", "10"} },
+                { new String[]{"--intWithHardMinAndMax", "5"} },
+                { new String[]{"--intWithHardMinAndMax", "7"} },
+                { new String[]{"--intWithHardMin", "11"} },
+                { new String[]{"--intWithHardMax", "4"} },
+                { new String[]{"--intWithSoftMinAndMax", "11"} },
+                { new String[]{"--intWithSoftMinAndMax", "4"} },
+                { new String[]{"--intWithSoftMin", "4"} },
+                { new String[]{"--intWithSoftMax", "11"} },
+                { new String[]{"--intWithHardAndSoftMinAndMax", "5"} },
+                { new String[]{"--intWithHardAndSoftMinAndMax", "7"} },
+                { new String[]{"--intWithHardAndSoftMinAndMax", "8"} },
+                { new String[]{"--intWithHardAndSoftMinAndMax", "9"} },
+                { new String[]{"--intWithHardAndSoftMinAndMax", "10"} },
+                { new String[]{"--intWithHardAndSoftMin", "5"} },
+                { new String[]{"--intWithHardAndSoftMin", "6"} },
+                { new String[]{"--intWithHardAndSoftMin", "7"} },
+                { new String[]{"--intWithHardAndSoftMax", "10"} },
+                { new String[]{"--intWithHardAndSoftMax", "9"} },
+                { new String[]{"--intWithHardAndSoftMax", "8"} },
+                { new String[]{"--intWithHardMinAndMaxDefaultOutsideRange", "10"} },
+                { new String[]{"--intWithHardMinAndMaxDefaultOutsideRange", "5"} },
+                { new String[]{"--intWithHardMinAndMaxDefaultOutsideRange", "7"} },
+                { new String[]{"--integerWithHardMinAndMax", "10"} },
+                { new String[]{"--integerWithHardMinAndMax", "5"} },
+                { new String[]{"--byteWithHardMinAndMax", "10"} },
+                { new String[]{"--byteWithHardMinAndMax", "5"} },
+                { new String[]{"--byteWithHardMinAndMax", "7"} },
+                { new String[]{"--byteWithHardMin", "5"} },
+                { new String[]{"--byteWithHardMax", "10"} },
+                { new String[]{"--doubleWithHardMinAndMax", "5.5"} },
+                { new String[]{"--doubleWithHardMinAndMax", "10.0"} },
+                { new String[]{"--doubleWithHardMinAndMax", "7.5"} },
+                { new String[]{"--doubleWithHardMin", "5.5"} },
+                { new String[]{"--doubleWithHardMin", "15.5"} },
+                { new String[]{"--doubleWithHardMax", "10.0"} },
+                { new String[]{"--doubleWithHardMax", "7.5"} }
+        };
+    }
+
+    @Test(dataProvider = "NumericRangeWithoutConstraintViolationDataProvider")
+    public void testNumericRangeWithoutConstraintViolation( final String[] commandLine ) {
+        // These tests succeed if no exception is thrown, since no constraints have been violated
+        runNumericArgumentRangeTest(commandLine);
+    }
+
+    private void runNumericArgumentRangeTest( final String[] commandLine ) {
+        parsingEngine.addArgumentSource(NumericRangeArgProvider.class);
+        parsingEngine.parse(commandLine);
+
+        NumericRangeArgProvider argProvider = new NumericRangeArgProvider();
+        parsingEngine.loadArgumentsIntoObject(argProvider);
+    }
+
+    @SuppressWarnings("unused")
+    private class VariedTypeArgProvider {
+        @Argument(fullName = "intVal", required=false)
+        private int anInt;
+
+        @Argument(fullName = "stringVal", required=false)
+        private String aString;
+
+        @Argument(fullName = "enumVal", required=false)
+        private TestEnum anEnum;
+
+        @Argument(fullName = "fileVal", required=false)
+        private File aFile;
+
+        @Argument(fullName = "stringSet", required=false)
+        private Set<String> someStrings;
+
+        @Argument(fullName = "intervalVal", required=false)
+        private IntervalBinding<Feature> anInterval;
+    }
+
+    @DataProvider(name = "MissingArgumentValueDataProvider")
+    public Object[][] missingArgumentDataProvider() {
+        return new Object[][]{
+                { new String[]{"--intVal"} },
+                { new String[]{"--stringVal"} },
+                { new String[]{"--enumVal"} },
+                { new String[]{"--fileVal"} },
+                { new String[]{"--stringSet"} },
+                { new String[]{"--stringSet", "aha", "--stringSet"} },
+                { new String[]{"--intervalVal"} }
+        };
+    }
+
+    @Test(dataProvider = "MissingArgumentValueDataProvider",
+          expectedExceptions = {InvalidArgumentValueException.class, MissingArgumentValueException.class})
+    public void testMissingArguments( final String[] commandLine ) {
+        parsingEngine.addArgumentSource( VariedTypeArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        VariedTypeArgProvider argProvider = new VariedTypeArgProvider();
+        parsingEngine.loadArgumentsIntoObject(argProvider);
+    }
+
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/commandline/RodBindingCollectionUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/commandline/RodBindingCollectionUnitTest.java
new file mode 100644
index 0000000..7fdd7b8
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/commandline/RodBindingCollectionUnitTest.java
@@ -0,0 +1,133 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import htsjdk.variant.variantcontext.VariantContext;
+import org.testng.Assert;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.util.Collection;
+
+public class RodBindingCollectionUnitTest extends BaseTest {
+
+    private ParsingEngine parsingEngine;
+    private Tags mytags;
+
+    private static final String defaultTagString = "VCF";
+    private static final String testVCFFileName = privateTestDir + "empty.vcf";
+    private static final String testListFileName = createTempListFile("oneVCF", testVCFFileName).getAbsolutePath();
+
+    @BeforeMethod
+    public void setUp() {
+        parsingEngine = new ParsingEngine(null);
+        RodBinding.resetNameCounter();
+        mytags = new Tags();
+        mytags.addPositionalTag(defaultTagString);
+    }
+
+    private class RodBindingCollectionArgProvider {
+        @Argument(fullName="input",doc="input",shortName="V")
+        public RodBindingCollection<VariantContext> input;
+    }
+
+    @Test
+    public void testStandardVCF() {
+        final String[] commandLine = new String[] {"-V", testVCFFileName};
+
+        parsingEngine.addArgumentSource( RodBindingCollectionArgProvider.class );
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        final RodBindingCollectionArgProvider argProvider = new RodBindingCollectionArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertEquals(argProvider.input.getRodBindings().iterator().next().getSource(), testVCFFileName, "Argument is not correctly initialized");
+    }
+
+    @Test
+    public void testList() {
+        final String[] commandLine = new String[] {"-V", testListFileName};
+
+        parsingEngine.addArgumentSource(RodBindingCollectionArgProvider.class);
+        parsingEngine.parse( commandLine );
+        parsingEngine.validate();
+
+        final RodBindingCollectionArgProvider argProvider = new RodBindingCollectionArgProvider();
+        parsingEngine.loadArgumentsIntoObject( argProvider );
+
+        Assert.assertEquals(argProvider.input.getRodBindings().iterator().next().getSource(), testVCFFileName, "Argument is not correctly initialized");
+    }
+
+    @Test
+    public void testDefaultTagsInFile() throws IOException {
+
+        final File testFile = createTempListFile("RodBindingCollectionUnitTest.defaultTags", testVCFFileName);
+
+        ArgumentTypeDescriptor.getRodBindingsCollection(testFile, parsingEngine, VariantContext.class, "foo", mytags, "input");
+
+        final Collection<RodBinding> bindings = parsingEngine.getRodBindings();
+        Assert.assertNotNull(bindings);
+        Assert.assertEquals(bindings.size(), 1);
+
+        final RodBinding binding = bindings.iterator().next();
+        Assert.assertEquals(parsingEngine.getTags(binding), mytags);
+    }
+
+    @Test(expectedExceptions = UserException.BadArgumentValue.class)
+    public void testDuplicateEntriesInFile() throws IOException {
+
+        final File testFile = createTempListFile("RodBindingCollectionUnitTest.variantListWithDuplicates", testVCFFileName, testVCFFileName);
+
+        ArgumentTypeDescriptor.getRodBindingsCollection(testFile, parsingEngine, VariantContext.class, "foo", mytags, "input");
+    }
+
+    @Test(expectedExceptions = UserException.BadArgumentValue.class)
+    public void testValidateEmptyFile() throws IOException {
+        final File testFile = createTempListFile("RodBindingCollectionUnitTest.emptyVCFList");
+
+        ArgumentTypeDescriptor.getRodBindingsCollection(testFile, parsingEngine, VariantContext.class, "foo", mytags, "input");
+    }
+
+    @Test
+    public void testOverrideTagsInFile() throws IOException {
+        final File testFile = createTempListFile("RodBindingCollectionUnitTest.overrideTags", "foo " + testVCFFileName);
+
+        ArgumentTypeDescriptor.getRodBindingsCollection(testFile, parsingEngine, VariantContext.class, "foo", mytags, "input");
+
+        final Collection<RodBinding> bindings = parsingEngine.getRodBindings();
+        Assert.assertNotNull(bindings);
+        Assert.assertEquals(bindings.size(), 1);
+
+        final RodBinding binding = bindings.iterator().next();
+        Assert.assertNotEquals(parsingEngine.getTags(binding), mytags);
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/commandline/RodBindingUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/commandline/RodBindingUnitTest.java
new file mode 100644
index 0000000..efed9c1
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/commandline/RodBindingUnitTest.java
@@ -0,0 +1,82 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.commandline;
+
+import org.broadinstitute.gatk.utils.BaseTest;
+import htsjdk.variant.variantcontext.VariantContext;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+import org.testng.annotations.BeforeMethod;
+
+/**
+ * Test suite for the parsing engine.
+ */
+public class RodBindingUnitTest extends BaseTest {
+    Tags mytags = new Tags();
+
+    @BeforeMethod
+    public void setUp() {
+        RodBinding.resetNameCounter();
+    }
+
+    @Test
+    public void testStandardRodBinding() {
+        RodBinding<VariantContext> b = new RodBinding<VariantContext>(VariantContext.class, "b", "foo", "vcf", mytags);
+        Assert.assertEquals(b.getName(), "b");
+        Assert.assertEquals(b.getType(), VariantContext.class);
+        Assert.assertEquals(b.getSource(), "foo");
+        Assert.assertEquals(b.getTribbleType(), "vcf");
+        Assert.assertEquals(b.isBound(), true);
+    }
+
+    @Test
+    public void testUnboundRodBinding() {
+        RodBinding<VariantContext> u = RodBinding.makeUnbound(VariantContext.class);
+        Assert.assertEquals(u.getName(), RodBinding.UNBOUND_VARIABLE_NAME);
+        Assert.assertEquals(u.getSource(), RodBinding.UNBOUND_SOURCE);
+        Assert.assertEquals(u.getType(), VariantContext.class);
+        Assert.assertEquals(u.getTribbleType(), RodBinding.UNBOUND_TRIBBLE_TYPE);
+        Assert.assertEquals(u.isBound(), false);
+    }
+
+    @Test
+    public void testMultipleBindings() {
+        String name = "binding";
+        RodBinding<VariantContext> b1 = new RodBinding<VariantContext>(VariantContext.class, name, "foo", "vcf", mytags);
+        Assert.assertEquals(b1.getName(), name);
+        Assert.assertEquals(b1.getType(), VariantContext.class);
+        Assert.assertEquals(b1.getSource(), "foo");
+        Assert.assertEquals(b1.getTribbleType(), "vcf");
+        Assert.assertEquals(b1.isBound(), true);
+
+        RodBinding<VariantContext> b2 = new RodBinding<VariantContext>(VariantContext.class, name, "foo", "vcf", mytags);
+        Assert.assertEquals(b2.getName(), name + "2");
+        Assert.assertEquals(b2.getType(), VariantContext.class);
+        Assert.assertEquals(b2.getSource(), "foo");
+        Assert.assertEquals(b2.getTribbleType(), "vcf");
+        Assert.assertEquals(b2.isBound(), true);
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/downsampling/AlleleBiasedDownsamplingUtilsUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/downsampling/AlleleBiasedDownsamplingUtilsUnitTest.java
new file mode 100644
index 0000000..b5bea32
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/downsampling/AlleleBiasedDownsamplingUtilsUnitTest.java
@@ -0,0 +1,219 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.downsampling;
+
+import htsjdk.samtools.CigarElement;
+import htsjdk.samtools.CigarOperator;
+import htsjdk.samtools.SAMFileHeader;
+import org.apache.log4j.Logger;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.pileup.PileupElement;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.util.*;
+
+
+/**
+ * Basic unit test for AlleleBiasedDownsamplingUtils
+ */
+public class AlleleBiasedDownsamplingUtilsUnitTest extends BaseTest {
+
+
+    @Test
+    public void testSmartDownsampling() {
+
+        final int[] idealHetAlleleCounts = new int[]{0, 50, 0, 50};
+        final int[] idealHomAlleleCounts = new int[]{0, 100, 0, 0};
+
+        // no contamination, no removal
+        testOneCase(0, 0, 0, 0, 0.1, 100, idealHetAlleleCounts, idealHetAlleleCounts);
+        testOneCase(0, 0, 0, 0, 0.1, 100, idealHomAlleleCounts, idealHomAlleleCounts);
+
+        // hom sample, het contaminant, different alleles
+        testOneCase(5, 0, 0, 0, 0.1, 100, idealHomAlleleCounts, idealHomAlleleCounts);
+        testOneCase(0, 0, 5, 0, 0.1, 100, idealHomAlleleCounts, idealHomAlleleCounts);
+        testOneCase(0, 0, 0, 5, 0.1, 100, idealHomAlleleCounts, idealHomAlleleCounts);
+
+        // hom sample, hom contaminant, different alleles
+        testOneCase(10, 0, 0, 0, 0.1, 100, idealHomAlleleCounts, idealHomAlleleCounts);
+        testOneCase(0, 0, 10, 0, 0.1, 100, idealHomAlleleCounts, idealHomAlleleCounts);
+        testOneCase(0, 0, 0, 10, 0.1, 100, idealHomAlleleCounts, idealHomAlleleCounts);
+
+        // het sample, het contaminant, different alleles
+        testOneCase(5, 0, 0, 0, 0.1, 100, idealHetAlleleCounts, idealHetAlleleCounts);
+        testOneCase(0, 0, 5, 0, 0.1, 100, idealHetAlleleCounts, idealHetAlleleCounts);
+
+        // het sample, hom contaminant, different alleles
+        testOneCase(10, 0, 0, 0, 0.1, 100, idealHetAlleleCounts, idealHetAlleleCounts);
+        testOneCase(0, 0, 10, 0, 0.1, 100, idealHetAlleleCounts, idealHetAlleleCounts);
+
+        // hom sample, het contaminant, overlapping alleles
+        final int[] enhancedHomAlleleCounts = new int[]{0, 105, 0, 0};
+        testOneCase(5, 5, 0, 0, 0.1, 100, idealHomAlleleCounts, enhancedHomAlleleCounts);
+        testOneCase(0, 5, 5, 0, 0.1, 100, idealHomAlleleCounts, enhancedHomAlleleCounts);
+        testOneCase(0, 5, 0, 5, 0.1, 100, idealHomAlleleCounts, enhancedHomAlleleCounts);
+
+        // hom sample, hom contaminant, overlapping alleles
+        testOneCase(0, 10, 0, 0, 0.1, 100, idealHomAlleleCounts, new int[]{0, 110, 0, 0});
+
+        // het sample, het contaminant, overlapping alleles
+        testOneCase(5, 5, 0, 0, 0.1, 100, idealHetAlleleCounts, idealHetAlleleCounts);
+        testOneCase(0, 5, 5, 0, 0.1, 100, idealHetAlleleCounts, idealHetAlleleCounts);
+        testOneCase(0, 5, 0, 5, 0.1, 100, idealHetAlleleCounts, new int[]{0, 55, 0, 55});
+        testOneCase(5, 0, 0, 5, 0.1, 100, idealHetAlleleCounts, idealHetAlleleCounts);
+        testOneCase(0, 0, 5, 5, 0.1, 100, idealHetAlleleCounts, idealHetAlleleCounts);
+
+        // het sample, hom contaminant, overlapping alleles
+        testOneCase(0, 10, 0, 0, 0.1, 100, idealHetAlleleCounts, idealHetAlleleCounts);
+        testOneCase(0, 0, 0, 10, 0.1, 100, idealHetAlleleCounts, idealHetAlleleCounts);
+    }
+
+    private static void testOneCase(final int addA, final int addC, final int addG, final int addT, final double contaminationFraction,
+                                    final int pileupSize, final int[] initialCounts, final int[] targetCounts) {
+
+        final int[] actualCounts = initialCounts.clone();
+        actualCounts[0] += addA;
+        actualCounts[1] += addC;
+        actualCounts[2] += addG;
+        actualCounts[3] += addT;
+
+        final int[] results = AlleleBiasedDownsamplingUtils.runSmartDownsampling(actualCounts, (int) (pileupSize * contaminationFraction));
+        Assert.assertTrue(countsAreEqual(results, targetCounts));
+    }
+
+    private static boolean countsAreEqual(final int[] counts1, final int[] counts2) {
+        for ( int i = 0; i < 4; i++ ) {
+            if ( counts1[i] != counts2[i] )
+                return false;
+        }
+        return true;
+    }
+
+    @DataProvider(name = "BiasedDownsamplingTest")
+    public Object[][] makeBiasedDownsamplingTest() {
+        final List<Object[]> tests = new LinkedList<Object[]>();
+
+        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000);
+
+        for ( final int originalCount : Arrays.asList(1, 2, 10, 1000) ) {
+            for ( final int toRemove : Arrays.asList(0, 1, 2, 10, 1000) ) {
+                if ( toRemove <= originalCount )
+                    tests.add(new Object[]{header, originalCount, toRemove});
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "BiasedDownsamplingTest")
+    public void testBiasedDownsampling(final SAMFileHeader header, final int originalCount, final int toRemove) {
+
+        final LinkedList<PileupElement> elements = new LinkedList<>();
+        for ( int i = 0; i < originalCount; i++ ) {
+            final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, 1, 1);
+            elements.add(new PileupElement(read, 0, new CigarElement(1, CigarOperator.M), 0, 0));
+        }
+
+        final List<PileupElement> result = AlleleBiasedDownsamplingUtils.downsampleElements(elements, originalCount, toRemove);
+
+        Assert.assertEquals(result.size(), toRemove);
+    }
+
+    @Test
+    public void testLoadContaminationFileDetails(){
+        Logger logger=org.apache.log4j.Logger.getRootLogger();
+
+        final String ArtificalBAMLocation = privateTestDir + "ArtificallyContaminatedBams/";
+        final File ContamFile1=new File(ArtificalBAMLocation+"contamination.case.1.txt");
+
+        Map<String,Double> Contam1=new HashMap<String,Double>();
+        Set<String> Samples1=new HashSet<String>();
+
+        Contam1.put("NA11918",0.15);
+        Samples1.addAll(Contam1.keySet());
+        testLoadFile(ContamFile1,Samples1,Contam1,logger);
+
+        Contam1.put("NA12842",0.13);
+        Samples1.addAll(Contam1.keySet());
+        testLoadFile(ContamFile1,Samples1,Contam1,logger);
+
+        Samples1.add("DUMMY");
+        testLoadFile(ContamFile1,Samples1,Contam1,logger);
+   }
+
+    private static void testLoadFile(final File file, final Set<String> Samples, final Map<String,Double> map, Logger logger){
+        Map<String,Double> loadedMap = AlleleBiasedDownsamplingUtils.loadContaminationFile(file,0.0,Samples,logger);
+        Assert.assertTrue(loadedMap.equals(map));
+    }
+
+    @DataProvider(name = "goodContaminationFiles")
+    public Integer[][] goodContaminationFiles() {
+        return new Integer[][]{
+                {1, 2},
+                {2, 3},
+                {3, 2},
+                {4, 2},
+                {5, 3},
+                {6, 2},
+                {7, 2},
+                {8, 2}
+        };
+    }
+
+    @Test(dataProvider = "goodContaminationFiles")
+    public void testLoadContaminationFile(final Integer ArtificalBAMnumber, final Integer numberOfSamples) {
+        final String ArtificialBAM = String.format("ArtificallyContaminatedBams/contamination.case.%d.txt", ArtificalBAMnumber);
+        Logger logger = org.apache.log4j.Logger.getRootLogger();
+
+        File ContamFile = new File(privateTestDir, ArtificialBAM);
+        Assert.assertTrue(AlleleBiasedDownsamplingUtils.loadContaminationFile(ContamFile, 0.0, null, logger).size() == numberOfSamples);
+
+    }
+
+
+    @DataProvider(name = "badContaminationFiles")
+    public Integer[][] badContaminationFiles() {
+        return new Integer[][]{{1}, {2}, {3}, {4}, {5}};
+    }
+
+    @Test(dataProvider = "badContaminationFiles", expectedExceptions = UserException.MalformedFile.class)
+    public void testLoadBrokenContaminationFile(final int i) {
+        Logger logger = org.apache.log4j.Logger.getRootLogger();
+        final String ArtificalBAMLocation = privateTestDir + "ArtificallyContaminatedBams/";
+
+        File ContaminationFile = new File(ArtificalBAMLocation + String.format("contamination.case.broken.%d.txt", i));
+        AlleleBiasedDownsamplingUtils.loadContaminationFile(ContaminationFile, 0.0, null, logger);
+
+    }
+
+
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/fasta/CachingIndexedFastaSequenceFileUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/fasta/CachingIndexedFastaSequenceFileUnitTest.java
new file mode 100644
index 0000000..ef8565d
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/fasta/CachingIndexedFastaSequenceFileUnitTest.java
@@ -0,0 +1,264 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.fasta;
+
+
+// the imports for unit testing.
+
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import htsjdk.samtools.reference.ReferenceSequence;
+import htsjdk.samtools.SAMSequenceRecord;
+import org.apache.commons.lang.StringUtils;
+import org.apache.log4j.Priority;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+
+/**
+ * Basic unit test for CachingIndexedFastaSequenceFile
+ */
+public class CachingIndexedFastaSequenceFileUnitTest extends BaseTest {
+    private File simpleFasta = new File(publicTestDir + "/exampleFASTA.fasta");
+    private static final int STEP_SIZE = 1;
+    private final static boolean DEBUG = false;
+
+    //private static final List<Integer> QUERY_SIZES = Arrays.asList(1);
+    private static final List<Integer> QUERY_SIZES = Arrays.asList(1, 10, 100);
+    private static final List<Integer> CACHE_SIZES = Arrays.asList(-1, 100, 1000);
+
+    @DataProvider(name = "fastas")
+    public Object[][] createData1() {
+        List<Object[]> params = new ArrayList<Object[]>();
+        for ( File fasta : Arrays.asList(simpleFasta) ) {
+            for ( int cacheSize : CACHE_SIZES ) {
+                for ( int querySize : QUERY_SIZES ) {
+                    params.add(new Object[]{fasta, cacheSize, querySize});
+                }
+            }
+        }
+
+        return params.toArray(new Object[][]{});
+    }
+
+    private static long getCacheSize(final long cacheSizeRequested) {
+        return cacheSizeRequested == -1 ? CachingIndexedFastaSequenceFile.DEFAULT_CACHE_SIZE : cacheSizeRequested;
+    }
+
+    @Test(dataProvider = "fastas", enabled = true && ! DEBUG)
+    public void testCachingIndexedFastaReaderSequential1(File fasta, int cacheSize, int querySize) throws FileNotFoundException {
+        final CachingIndexedFastaSequenceFile caching = new CachingIndexedFastaSequenceFile(fasta, getCacheSize(cacheSize), true, false);
+
+        SAMSequenceRecord contig = caching.getSequenceDictionary().getSequence(0);
+        logger.warn(String.format("Checking contig %s length %d with cache size %d and query size %d",
+                contig.getSequenceName(), contig.getSequenceLength(), cacheSize, querySize));
+        testSequential(caching, fasta, querySize);
+    }
+
+    private void testSequential(final CachingIndexedFastaSequenceFile caching, final File fasta, final int querySize) throws FileNotFoundException {
+        Assert.assertTrue(caching.isPreservingCase(), "testSequential only works for case preserving CachingIndexedFastaSequenceFile readers");
+
+        final IndexedFastaSequenceFile uncached = new IndexedFastaSequenceFile(fasta);
+
+        SAMSequenceRecord contig = uncached.getSequenceDictionary().getSequence(0);
+        for ( int i = 0; i < contig.getSequenceLength(); i += STEP_SIZE ) {
+            int start = i;
+            int stop = start + querySize;
+            if ( stop <= contig.getSequenceLength() ) {
+                ReferenceSequence cachedVal = caching.getSubsequenceAt(contig.getSequenceName(), start, stop);
+                ReferenceSequence uncachedVal = uncached.getSubsequenceAt(contig.getSequenceName(), start, stop);
+
+                Assert.assertEquals(cachedVal.getName(), uncachedVal.getName());
+                Assert.assertEquals(cachedVal.getContigIndex(), uncachedVal.getContigIndex());
+                Assert.assertEquals(cachedVal.getBases(), uncachedVal.getBases());
+            }
+        }
+
+        // asserts for efficiency.  We are going to make contig.length / STEP_SIZE queries
+        // at each of range: start -> start + querySize against a cache with size of X.
+        // we expect to hit the cache each time range falls within X.  We expect a hit
+        // on the cache if range is within X.  Which should happen at least (X - query_size * 2) / STEP_SIZE
+        // times.
+        final int minExpectedHits = (int)Math.floor((Math.min(caching.getCacheSize(), contig.getSequenceLength()) - querySize * 2.0) / STEP_SIZE);
+        caching.printEfficiency(Priority.WARN);
+        Assert.assertTrue(caching.getCacheHits() >= minExpectedHits, "Expected at least " + minExpectedHits + " cache hits but only got " + caching.getCacheHits());
+
+    }
+
+    // Tests grabbing sequences around a middle cached value.
+    @Test(dataProvider = "fastas", enabled = true && ! DEBUG)
+    public void testCachingIndexedFastaReaderTwoStage(File fasta, int cacheSize, int querySize) throws FileNotFoundException {
+        final IndexedFastaSequenceFile uncached = new IndexedFastaSequenceFile(fasta);
+        final CachingIndexedFastaSequenceFile caching = new CachingIndexedFastaSequenceFile(fasta, getCacheSize(cacheSize), true, false);
+
+        SAMSequenceRecord contig = uncached.getSequenceDictionary().getSequence(0);
+
+        int middleStart = (contig.getSequenceLength() - querySize) / 2;
+        int middleStop = middleStart + querySize;
+
+        logger.warn(String.format("Checking contig %s length %d with cache size %d and query size %d with intermediate query",
+                contig.getSequenceName(), contig.getSequenceLength(), cacheSize, querySize));
+
+        for ( int i = 0; i < contig.getSequenceLength(); i += 10 ) {
+            int start = i;
+            int stop = start + querySize;
+            if ( stop <= contig.getSequenceLength() ) {
+                ReferenceSequence grabMiddle = caching.getSubsequenceAt(contig.getSequenceName(), middleStart, middleStop);
+                ReferenceSequence cachedVal = caching.getSubsequenceAt(contig.getSequenceName(), start, stop);
+                ReferenceSequence uncachedVal = uncached.getSubsequenceAt(contig.getSequenceName(), start, stop);
+
+                Assert.assertEquals(cachedVal.getName(), uncachedVal.getName());
+                Assert.assertEquals(cachedVal.getContigIndex(), uncachedVal.getContigIndex());
+                Assert.assertEquals(cachedVal.getBases(), uncachedVal.getBases());
+            }
+        }
+    }
+
+    @DataProvider(name = "ParallelFastaTest")
+    public Object[][] createParallelFastaTest() {
+        List<Object[]> params = new ArrayList<Object[]>();
+
+        for ( File fasta : Arrays.asList(simpleFasta) ) {
+            for ( int cacheSize : CACHE_SIZES ) {
+                for ( int querySize : QUERY_SIZES ) {
+                    for ( int nt : Arrays.asList(1, 2, 3, 4) ) {
+                        params.add(new Object[]{fasta, cacheSize, querySize, nt});
+                    }
+                }
+            }
+        }
+
+        return params.toArray(new Object[][]{});
+    }
+
+
+    @Test(dataProvider = "ParallelFastaTest", enabled = true && ! DEBUG, timeOut = 60000)
+    public void testCachingIndexedFastaReaderParallel(final File fasta, final int cacheSize, final int querySize, final int nt) throws FileNotFoundException, InterruptedException {
+        final CachingIndexedFastaSequenceFile caching = new CachingIndexedFastaSequenceFile(fasta, getCacheSize(cacheSize), true, false);
+
+        logger.warn(String.format("Parallel caching index fasta reader test cacheSize %d querySize %d nt %d", caching.getCacheSize(), querySize, nt));
+        for ( int iterations = 0; iterations < 1; iterations++ ) {
+            final ExecutorService executor = Executors.newFixedThreadPool(nt);
+            final Collection<Callable<Object>> tasks = new ArrayList<Callable<Object>>(nt);
+            for ( int i = 0; i < nt; i++ )
+                tasks.add(new Callable<Object>() {
+                    @Override
+                    public Object call() throws Exception {
+                        testSequential(caching, fasta, querySize);
+                        return null;
+                    }
+                });
+            executor.invokeAll(tasks);
+            executor.shutdownNow();
+        }
+    }
+
+    // make sure some bases are lower case and some are upper case
+    @Test(enabled = true)
+    public void testMixedCasesInExample() throws FileNotFoundException, InterruptedException {
+        final IndexedFastaSequenceFile original = new IndexedFastaSequenceFile(new File(exampleFASTA));
+        final CachingIndexedFastaSequenceFile casePreserving = new CachingIndexedFastaSequenceFile(new File(exampleFASTA), true);
+        final CachingIndexedFastaSequenceFile allUpper = new CachingIndexedFastaSequenceFile(new File(exampleFASTA));
+
+        int nMixedCase = 0;
+        for ( SAMSequenceRecord contig : original.getSequenceDictionary().getSequences() ) {
+            nMixedCase += testCases(original, casePreserving, allUpper, contig.getSequenceName(), -1, -1);
+
+            final int step = 100;
+            for ( int lastPos = step; lastPos < contig.getSequenceLength(); lastPos += step ) {
+                testCases(original, casePreserving, allUpper, contig.getSequenceName(), lastPos - step, lastPos);
+            }
+        }
+
+        Assert.assertTrue(nMixedCase > 0, "No mixed cases sequences found in file.  Unexpected test state");
+    }
+
+    private int testCases(final IndexedFastaSequenceFile original,
+                          final IndexedFastaSequenceFile casePreserving,
+                          final IndexedFastaSequenceFile allUpper,
+                          final String contig, final int start, final int stop ) {
+        final String orig = fetchBaseString(original, contig, start, stop);
+        final String keptCase = fetchBaseString(casePreserving, contig, start, stop);
+        final String upperCase = fetchBaseString(allUpper, contig, start, stop).toUpperCase();
+
+        final String origToUpper = orig.toUpperCase();
+        if ( ! orig.equals(origToUpper) ) {
+            Assert.assertEquals(keptCase, orig, "Case preserving operation not equal to the original case for contig " + contig);
+            Assert.assertEquals(upperCase, origToUpper, "All upper case reader not equal to the uppercase of original case for contig " + contig);
+            return 1;
+        } else {
+            return 0;
+        }
+    }
+
+    private String fetchBaseString(final IndexedFastaSequenceFile reader, final String contig, final int start, final int stop) {
+        if ( start == -1 )
+            return new String(reader.getSequence(contig).getBases());
+        else
+            return new String(reader.getSubsequenceAt(contig, start, stop).getBases());
+    }
+
+    @Test(enabled = true)
+    public void testIupacChanges() throws FileNotFoundException, InterruptedException {
+        final String testFasta = privateTestDir + "iupacFASTA.fasta";
+        final CachingIndexedFastaSequenceFile iupacPreserving = new CachingIndexedFastaSequenceFile(new File(testFasta), false, true);
+        final CachingIndexedFastaSequenceFile makeNs = new CachingIndexedFastaSequenceFile(new File(testFasta));
+
+        int preservingNs = 0;
+        int changingNs = 0;
+        for ( SAMSequenceRecord contig : iupacPreserving.getSequenceDictionary().getSequences() ) {
+            final String sPreserving = fetchBaseString(iupacPreserving, contig.getSequenceName(), 0, 15000);
+            preservingNs += StringUtils.countMatches(sPreserving, "N");
+
+            final String sChanging = fetchBaseString(makeNs, contig.getSequenceName(), 0, 15000);
+            changingNs += StringUtils.countMatches(sChanging, "N");
+        }
+
+        Assert.assertEquals(changingNs, preservingNs + 4);
+    }
+
+    @Test(enabled = true, expectedExceptions = {UserException.class})
+    public void testFailOnBadBase() throws FileNotFoundException, InterruptedException {
+        final String testFasta = privateTestDir + "problematicFASTA.fasta";
+        final CachingIndexedFastaSequenceFile fasta = new CachingIndexedFastaSequenceFile(new File(testFasta));
+
+        for ( SAMSequenceRecord contig : fasta.getSequenceDictionary().getSequences() ) {
+            fetchBaseString(fasta, contig.getSequenceName(), -1, -1);
+        }
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/file/FSLockWithSharedUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/file/FSLockWithSharedUnitTest.java
new file mode 100644
index 0000000..7dbf347
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/file/FSLockWithSharedUnitTest.java
@@ -0,0 +1,60 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.file;
+
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.testng.annotations.Test;
+
+import java.io.File;
+
+public class FSLockWithSharedUnitTest extends BaseTest {
+
+    private static final int MAX_EXPECTED_LOCK_ACQUISITION_TIME = FSLockWithShared.DEFAULT_LOCK_ACQUISITION_TIMEOUT_IN_MILLISECONDS +
+                                                                  FSLockWithShared.THREAD_TERMINATION_TIMEOUT_IN_MILLISECONDS;
+
+    /**
+     * Test to ensure that we're never spending more than the maximum configured amount of time in lock acquisition calls.
+     */
+    @Test( timeOut = MAX_EXPECTED_LOCK_ACQUISITION_TIME + 10 * 1000 )
+    public void testLockAcquisitionTimeout() {
+        final File lockFile = createTempFile("FSLockWithSharedUnitTest", ".lock");
+        final FSLockWithShared lock = new FSLockWithShared(lockFile);
+        boolean lockAcquisitionSucceeded = false;
+
+        try {
+            lockAcquisitionSucceeded = lock.sharedLock();
+        }
+        catch ( UserException e ) {
+            logger.info("Caught UserException from lock acquisition call: lock acquisition must have timed out. Message: " + e.getMessage());
+        }
+        finally {
+            if ( lockAcquisitionSucceeded ) {
+                lock.unlock();
+            }
+        }
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/fragments/FragmentUtilsBenchmark.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/fragments/FragmentUtilsBenchmark.java
new file mode 100644
index 0000000..decb828
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/fragments/FragmentUtilsBenchmark.java
@@ -0,0 +1,81 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.fragments;
+
+import com.google.caliper.Param;
+import com.google.caliper.SimpleBenchmark;
+import htsjdk.samtools.SAMFileHeader;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Caliper microbenchmark of fragment pileup
+ */
+public class FragmentUtilsBenchmark extends SimpleBenchmark {
+    List<ReadBackedPileup> pileups;
+
+    @Param({"0", "4", "30", "150", "1000"})
+    int pileupSize; // set automatically by framework
+
+    @Param({"200", "400"})
+    int insertSize; // set automatically by framework
+
+    @Override protected void setUp() {
+        final int nPileupsToGenerate = 100;
+        pileups = new ArrayList<ReadBackedPileup>(nPileupsToGenerate);
+        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000);
+        GenomeLocParser genomeLocParser;
+        genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
+        GenomeLoc loc = genomeLocParser.createGenomeLoc("chr1", 50);
+        final int readLen = 100;
+
+        for ( int pileupN = 0; pileupN < nPileupsToGenerate; pileupN++ ) {
+            ReadBackedPileup rbp = ArtificialSAMUtils.createReadBackedPileup(header, loc, readLen, insertSize, pileupSize);
+            pileups.add(rbp);
+        }
+    }
+
+//    public void timeOriginal(int rep) {
+//        run(rep, FragmentUtils.FragmentMatchingAlgorithm.ORIGINAL);
+//    }
+
+    public void timeSkipNonOverlapping(int rep) {
+        int nFrags = 0;
+        for ( int i = 0; i < rep; i++ ) {
+            for ( ReadBackedPileup rbp : pileups )
+                nFrags += FragmentUtils.create(rbp).getOverlappingPairs().size();
+        }
+    }
+
+    public static void main(String[] args) {
+        com.google.caliper.Runner.main(FragmentUtilsBenchmark.class, args);
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/fragments/FragmentUtilsUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/fragments/FragmentUtilsUnitTest.java
new file mode 100644
index 0000000..1700baf
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/fragments/FragmentUtilsUnitTest.java
@@ -0,0 +1,390 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.fragments;
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.TextCigarCodec;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.pileup.PileupElement;
+import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
+import org.broadinstitute.gatk.utils.pileup.ReadBackedPileupImpl;
+import org.broadinstitute.gatk.utils.recalibration.EventType;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.broadinstitute.gatk.utils.sam.GATKSAMReadGroupRecord;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.testng.Assert;
+import org.testng.annotations.BeforeTest;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * Test routines for read-backed pileup.
+ */
+public class FragmentUtilsUnitTest extends BaseTest {
+    private static SAMFileHeader header;
+    private static GATKSAMReadGroupRecord rgForMerged;
+    private final static boolean DEBUG = false;
+
+    private class FragmentUtilsTest extends TestDataProvider {
+        List<TestState> statesForPileup = new ArrayList<TestState>();
+        List<TestState> statesForReads = new ArrayList<TestState>();
+
+        private FragmentUtilsTest(String name, int readLen, int leftStart, int rightStart,
+                                  boolean leftIsFirst, boolean leftIsNegative) {
+            super(FragmentUtilsTest.class, String.format("%s-leftIsFirst:%b-leftIsNegative:%b", name, leftIsFirst, leftIsNegative));
+
+            List<GATKSAMRecord> pair = ArtificialSAMUtils.createPair(header, "readpair", readLen, leftStart, rightStart, leftIsFirst, leftIsNegative);
+            GATKSAMRecord left = pair.get(0);
+            GATKSAMRecord right = pair.get(1);
+
+            for ( int pos = leftStart; pos < rightStart + readLen; pos++) {
+                boolean posCoveredByLeft = pos >= left.getAlignmentStart() && pos <= left.getAlignmentEnd();
+                boolean posCoveredByRight = pos >= right.getAlignmentStart() && pos <= right.getAlignmentEnd();
+
+                if ( posCoveredByLeft || posCoveredByRight ) {
+                    List<GATKSAMRecord> reads = new ArrayList<GATKSAMRecord>();
+                    List<Integer> offsets = new ArrayList<Integer>();
+
+                    if ( posCoveredByLeft ) {
+                        reads.add(left);
+                        offsets.add(pos - left.getAlignmentStart());
+                    }
+
+                    if ( posCoveredByRight ) {
+                        reads.add(right);
+                        offsets.add(pos - right.getAlignmentStart());
+                    }
+
+                    boolean shouldBeFragment = posCoveredByLeft && posCoveredByRight;
+                    ReadBackedPileup pileup = new ReadBackedPileupImpl(null, reads, offsets);
+                    TestState testState = new TestState(shouldBeFragment ? 0 : 1, shouldBeFragment ? 1 : 0, pileup, null);
+                    statesForPileup.add(testState);
+                }
+
+                TestState testState = left.getAlignmentEnd() >= right.getAlignmentStart() ? new TestState(0, 1, null, pair) : new TestState(2, 0, null, pair);
+                statesForReads.add(testState);
+            }
+        }
+    }
+
+    private class TestState {
+        int expectedSingletons, expectedPairs;
+        ReadBackedPileup pileup;
+        List<GATKSAMRecord> rawReads;
+
+        private TestState(final int expectedSingletons, final int expectedPairs, final ReadBackedPileup pileup, final List<GATKSAMRecord> rawReads) {
+            this.expectedSingletons = expectedSingletons;
+            this.expectedPairs = expectedPairs;
+            this.pileup = pileup;
+            this.rawReads = rawReads;
+        }
+    }
+
+    @DataProvider(name = "fragmentUtilsTest")
+    public Object[][] createTests() {
+        for ( boolean leftIsFirst : Arrays.asList(true, false) ) {
+            for ( boolean leftIsNegative : Arrays.asList(true, false) ) {
+                // Overlapping pair
+                // ---->        [first]
+                //   <---       [second]
+                new FragmentUtilsTest("overlapping-pair", 10, 1, 5, leftIsFirst, leftIsNegative);
+
+                // Non-overlapping pair
+                // ---->
+                //          <----
+                new FragmentUtilsTest("nonoverlapping-pair", 10, 1, 15, leftIsFirst, leftIsNegative);
+            }
+        }
+
+        return FragmentUtilsTest.getTests(FragmentUtilsTest.class);
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "fragmentUtilsTest")
+    public void testAsPileup(FragmentUtilsTest test) {
+        for ( TestState testState : test.statesForPileup ) {
+            ReadBackedPileup rbp = testState.pileup;
+            FragmentCollection<PileupElement> fp = FragmentUtils.create(rbp);
+            Assert.assertEquals(fp.getOverlappingPairs().size(), testState.expectedPairs);
+            Assert.assertEquals(fp.getSingletonReads().size(), testState.expectedSingletons);
+        }
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "fragmentUtilsTest")
+    public void testAsListOfReadsFromPileup(FragmentUtilsTest test) {
+        for ( TestState testState : test.statesForPileup ) {
+            FragmentCollection<GATKSAMRecord> fp = FragmentUtils.create(testState.pileup.getReads());
+            Assert.assertEquals(fp.getOverlappingPairs().size(), testState.expectedPairs);
+            Assert.assertEquals(fp.getSingletonReads().size(), testState.expectedSingletons);
+        }
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "fragmentUtilsTest")
+    public void testAsListOfReads(FragmentUtilsTest test) {
+        for ( TestState testState : test.statesForReads ) {
+            FragmentCollection<GATKSAMRecord> fp = FragmentUtils.create(testState.rawReads);
+            Assert.assertEquals(fp.getOverlappingPairs().size(), testState.expectedPairs);
+            Assert.assertEquals(fp.getSingletonReads().size(), testState.expectedSingletons);
+        }
+    }
+
+    @Test(enabled = !DEBUG, expectedExceptions = IllegalArgumentException.class)
+    public void testOutOfOrder() {
+        final List<GATKSAMRecord> pair = ArtificialSAMUtils.createPair(header, "readpair", 100, 1, 50, true, true);
+        final GATKSAMRecord left = pair.get(0);
+        final GATKSAMRecord right = pair.get(1);
+        final List<GATKSAMRecord> reads = Arrays.asList(right, left); // OUT OF ORDER!
+        final List<Integer> offsets = Arrays.asList(0, 50);
+        final ReadBackedPileup pileup = new ReadBackedPileupImpl(null, reads, offsets);
+        FragmentUtils.create(pileup); // should throw exception
+    }
+
+    @BeforeTest
+    public void setup() {
+        header = ArtificialSAMUtils.createArtificialSamHeader(1,1,1000);
+        rgForMerged = new GATKSAMReadGroupRecord("RG1");
+    }
+
+    @DataProvider(name = "MergeFragmentsTest")
+    public Object[][] createMergeFragmentsTest() throws Exception {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        final String leftFlank = "CCC";
+        final String rightFlank = "AAA";
+        final String allOverlappingBases = "ACGTACGTGGAACCTTAG";
+        for ( int overlapSize = 1; overlapSize < allOverlappingBases.length(); overlapSize++ ) {
+            final String overlappingBases = allOverlappingBases.substring(0, overlapSize);
+            final byte[] overlappingBaseQuals = new byte[overlapSize];
+            for ( int i = 0; i < overlapSize; i++ ) overlappingBaseQuals[i] = (byte)(i + 30);
+            final GATKSAMRecord read1  = makeOverlappingRead(leftFlank, 20, overlappingBases, overlappingBaseQuals, "", 30, 1);
+            final GATKSAMRecord read2  = makeOverlappingRead("", 20, overlappingBases, overlappingBaseQuals, rightFlank, 30, leftFlank.length() + 1);
+            final GATKSAMRecord merged = makeOverlappingRead(leftFlank, 20, overlappingBases, overlappingBaseQuals, rightFlank, 30, 1);
+            tests.add(new Object[]{"equalQuals", read1, read2, merged});
+
+            // test that the merged read base quality is the
+            tests.add(new Object[]{"lowQualLeft", modifyBaseQualities(read1, leftFlank.length(), overlapSize), read2, merged});
+            tests.add(new Object[]{"lowQualRight", read1, modifyBaseQualities(read2, 0, overlapSize), merged});
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    private GATKSAMRecord modifyBaseQualities(final GATKSAMRecord read, final int startOffset, final int length) throws Exception {
+        final GATKSAMRecord readWithLowQuals = (GATKSAMRecord)read.clone();
+        final byte[] withLowQuals = Arrays.copyOf(read.getBaseQualities(), read.getBaseQualities().length);
+        for ( int i = startOffset; i < startOffset + length; i++ )
+            withLowQuals[i] = (byte)(read.getBaseQualities()[i] + (i % 2 == 0 ? -1 : 0));
+        readWithLowQuals.setBaseQualities(withLowQuals);
+        return readWithLowQuals;
+    }
+
+    private GATKSAMRecord makeOverlappingRead(final String leftFlank, final int leftQual, final String overlapBases,
+                                              final byte[] overlapQuals, final String rightFlank, final int rightQual,
+                                              final int alignmentStart) {
+        final String bases = leftFlank + overlapBases + rightFlank;
+        final int readLength = bases.length();
+        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, alignmentStart, readLength);
+        final byte[] leftQuals = Utils.dupBytes((byte) leftQual, leftFlank.length());
+        final byte[] rightQuals = Utils.dupBytes((byte) rightQual, rightFlank.length());
+        final byte[] quals = Utils.concat(leftQuals, overlapQuals, rightQuals);
+        read.setCigarString(readLength + "M");
+        read.setReadBases(bases.getBytes());
+        for ( final EventType type : EventType.values() )
+            read.setBaseQualities(quals, type);
+        read.setReadGroup(rgForMerged);
+        read.setMappingQuality(60);
+        return read;
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "MergeFragmentsTest")
+    public void testMergingTwoReads(final String name, final GATKSAMRecord read1, final GATKSAMRecord read2, final GATKSAMRecord expectedMerged) {
+        final GATKSAMRecord actual = FragmentUtils.mergeOverlappingPairedFragments(read1, read2);
+
+        if ( expectedMerged == null ) {
+            Assert.assertNull(actual, "Expected reads not to merge, but got non-null result from merging");
+        } else {
+            Assert.assertTrue(actual.isStrandless(), "Merged reads should be strandless");
+            Assert.assertNotNull(actual, "Expected reads to merge, but got null result from merging");
+            // I really care about the bases, the quals, the CIGAR, and the read group tag
+            Assert.assertEquals(actual.getCigarString(), expectedMerged.getCigarString());
+            Assert.assertEquals(actual.getReadBases(), expectedMerged.getReadBases());
+            Assert.assertEquals(actual.getReadGroup(), expectedMerged.getReadGroup());
+            Assert.assertEquals(actual.getMappingQuality(), expectedMerged.getMappingQuality());
+            for ( final EventType type : EventType.values() )
+                Assert.assertEquals(actual.getBaseQualities(type), expectedMerged.getBaseQualities(type), "Failed base qualities for event type " + type);
+        }
+    }
+
+    @Test(enabled = !DEBUG)
+    public void testHardClippingBeforeMerge() {
+        final String common = Utils.dupString("A", 10);
+        final byte[] commonQuals = Utils.dupBytes((byte)30, common.length());
+        final String adapter    = "NNNN";
+
+        final GATKSAMRecord read1 = makeOverlappingRead(adapter, 30, common, commonQuals, "", 30, 10);
+        final GATKSAMRecord read2 = makeOverlappingRead("", 30, common, commonQuals, adapter, 30, 10);
+        final GATKSAMRecord expectedMerged = makeOverlappingRead("", 30, common, commonQuals, "", 30, 10);
+        read1.setCigarString("4S" + common.length() + "M");
+        read1.setProperPairFlag(true);
+        read1.setReadPairedFlag(true);
+        read1.setFirstOfPairFlag(true);
+        read1.setReadNegativeStrandFlag(true);
+        read1.setMateNegativeStrandFlag(false);
+        read1.setMateAlignmentStart(read2.getAlignmentStart());
+        read2.setCigarString(common.length() + "M4S");
+        read2.setProperPairFlag(true);
+        read2.setReadPairedFlag(true);
+        read2.setFirstOfPairFlag(false);
+        read2.setReadNegativeStrandFlag(false);
+        read2.setMateNegativeStrandFlag(true);
+        read2.setMateAlignmentStart(read1.getAlignmentStart());
+
+        final int insertSize = common.length() - 1;
+        read1.setInferredInsertSize(-insertSize);
+        read2.setInferredInsertSize(insertSize);
+
+        final GATKSAMRecord actual = FragmentUtils.mergeOverlappingPairedFragments(read1, read2);
+        Assert.assertEquals(actual.getCigarString(), expectedMerged.getCigarString());
+        Assert.assertEquals(actual.getReadBases(), expectedMerged.getReadBases());
+        Assert.assertEquals(actual.getReadGroup(), expectedMerged.getReadGroup());
+        Assert.assertEquals(actual.getMappingQuality(), expectedMerged.getMappingQuality());
+        for ( final EventType type : EventType.values() )
+            Assert.assertEquals(actual.getBaseQualities(type), expectedMerged.getBaseQualities(type), "Failed base qualities for event type " + type);
+    }
+
+    @Test(enabled = true)
+    public void testHardClippingBeforeMergeResultingInCompletelyContainedSecondRead() {
+        final String adapter    = "NNNN";
+
+        final GATKSAMRecord read1 = makeOverlappingRead(adapter, 30, Utils.dupString("A", 10), Utils.dupBytes((byte)30, 10), "", 30, 10);
+        final GATKSAMRecord read2 = makeOverlappingRead("", 30, Utils.dupString("A", 7), Utils.dupBytes((byte)30, 7), adapter, 30, 10);
+        read1.setCigarString("4S10M");
+        read1.setProperPairFlag(true);
+        read1.setFirstOfPairFlag(true);
+        read1.setReadNegativeStrandFlag(true);
+        read1.setMateAlignmentStart(10);
+        read2.setCigarString("7M4S");
+        read2.setProperPairFlag(true);
+        read2.setFirstOfPairFlag(false);
+        read2.setReadNegativeStrandFlag(false);
+
+        final int insertSize = 7 - 1;
+        read1.setInferredInsertSize(insertSize);
+        read2.setInferredInsertSize(-insertSize);
+
+        final GATKSAMRecord actual = FragmentUtils.mergeOverlappingPairedFragments(read1, read2);
+        Assert.assertNull(actual);
+    }
+
+    @DataProvider(name = "MergeFragmentsOffContig")
+    public Object[][] makeMergeFragmentsOffContig() throws Exception {
+        List<Object[]> tests = new ArrayList<>();
+
+        for ( final int pre1 : Arrays.asList(0, 50)) {
+            for ( final int post1 : Arrays.asList(0, 50)) {
+                for ( final int pre2 : Arrays.asList(0, 50)) {
+                    for ( final int post2 : Arrays.asList(0, 50)) {
+                        tests.add(new Object[]{pre1, post1, pre2, post2});
+                    }
+                }
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "MergeFragmentsOffContig")
+    public void testMergeFragmentsOffContig(final int pre1, final int post1, final int pre2, final int post2) {
+        final int contigSize = 10;
+        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 0, contigSize);
+
+        final GATKSAMRecord read1 = createReadOffContig(header, false, pre1, post1);
+        final GATKSAMRecord read2 = createReadOffContig(header, true, pre2, post2);
+
+        final GATKSAMRecord merged = FragmentUtils.mergeOverlappingPairedFragments(read1, read2);
+    }
+
+    private GATKSAMRecord createReadOffContig(final SAMFileHeader header, final boolean negStrand, final int pre, final int post) {
+        final int contigLen = header.getSequence(0).getSequenceLength();
+        final int readLen = pre + contigLen + post;
+        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read1", 0, 1, readLen);
+        read.setAlignmentStart(1);
+        read.setCigar(TextCigarCodec.decode(pre + "S" + contigLen + "M" + post + "S"));
+        read.setBaseQualities(Utils.dupBytes((byte) 30, readLen));
+        read.setReadBases(Utils.dupBytes((byte)'A', readLen));
+        read.setMappingQuality(60);
+        read.setMateAlignmentStart(1);
+        read.setProperPairFlag(true);
+        read.setReadPairedFlag(true);
+        read.setInferredInsertSize(30);
+        read.setReadNegativeStrandFlag(negStrand);
+        read.setMateNegativeStrandFlag(! negStrand);
+        read.setReadGroup(new GATKSAMReadGroupRecord("foo"));
+        return read;
+    }
+
+
+    private static final byte highQuality = 30;
+    private static final byte overlappingQuality = 20;
+
+    @DataProvider(name = "AdjustFragmentsTest")
+    public Object[][] createAdjustFragmentsTest() throws Exception {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        final String leftFlank = "CCC";
+        final String rightFlank = "AAA";
+        final String allOverlappingBases = "ACGTACGTGGAACCTTAG";
+        for ( int overlapSize = 1; overlapSize < allOverlappingBases.length(); overlapSize++ ) {
+            final String overlappingBases = allOverlappingBases.substring(0, overlapSize);
+            final byte[] overlappingBaseQuals = new byte[overlapSize];
+            for ( int i = 0; i < overlapSize; i++ ) overlappingBaseQuals[i] = highQuality;
+            final GATKSAMRecord read1  = makeOverlappingRead(leftFlank, highQuality, overlappingBases, overlappingBaseQuals, "", highQuality, 1);
+            final GATKSAMRecord read2  = makeOverlappingRead("", highQuality, overlappingBases, overlappingBaseQuals, rightFlank, highQuality, leftFlank.length() + 1);
+            tests.add(new Object[]{read1, read2, overlapSize});
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "AdjustFragmentsTest")
+    public void testAdjustingTwoReads(final GATKSAMRecord read1, final GATKSAMRecord read2, final int overlapSize) {
+        FragmentUtils.adjustQualsOfOverlappingPairedFragments(read1, read2);
+
+        for ( int i = 0; i < read1.getReadLength() - overlapSize; i++ )
+            Assert.assertEquals(read1.getBaseQualities()[i], highQuality);
+        for ( int i = read1.getReadLength() - overlapSize; i < read1.getReadLength(); i++ )
+            Assert.assertEquals(read1.getBaseQualities()[i], overlappingQuality);
+
+        for ( int i = 0; i < overlapSize; i++ )
+            Assert.assertEquals(read2.getBaseQualities()[i], overlappingQuality);
+        for ( int i = overlapSize; i < read2.getReadLength(); i++ )
+            Assert.assertEquals(read2.getBaseQualities()[i], highQuality);
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/haplotype/EventMapUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/haplotype/EventMapUnitTest.java
new file mode 100644
index 0000000..2188dbc
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/haplotype/EventMapUnitTest.java
@@ -0,0 +1,203 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.haplotype;
+
+import htsjdk.samtools.TextCigarCodec;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.UnvalidatingGenomeLoc;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils;
+import htsjdk.variant.variantcontext.VariantContext;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.*;
+
+public class EventMapUnitTest extends BaseTest {
+    private final static String CHR = "20";
+    private final static String NAME = "foo";
+    
+    @DataProvider(name = "MyDataProvider")
+         public Object[][] makeMyDataProvider() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        final List<String> SNP_ALLELES = Arrays.asList("A", "C");
+        final List<String> INS_ALLELES = Arrays.asList("A", "ACGTGA");
+        final List<String> DEL_ALLELES = Arrays.asList("ACGTA", "C");
+        final List<List<String>> allAlleles = Arrays.asList(SNP_ALLELES, INS_ALLELES, DEL_ALLELES);
+        for ( final int leftNotClump : Arrays.asList(-1, 3) ) {
+            for ( final int middleNotClump : Arrays.asList(-1, 10, 500) ) {
+                for ( final int rightNotClump : Arrays.asList(-1, 1000) ) {
+                    for ( final int nClumped : Arrays.asList(3, 4) ) {
+                        for ( final List<List<String>> alleles : Utils.makePermutations(allAlleles, nClumped, true)) {
+                            final List<VariantContext> allVCS = new LinkedList<VariantContext>();
+
+                            if ( leftNotClump != -1 ) allVCS.add(GATKVariantContextUtils.makeFromAlleles(NAME, CHR, leftNotClump, SNP_ALLELES));
+                            if ( middleNotClump != -1 ) allVCS.add(GATKVariantContextUtils.makeFromAlleles(NAME, CHR, middleNotClump, SNP_ALLELES));
+                            if ( rightNotClump != -1 ) allVCS.add(GATKVariantContextUtils.makeFromAlleles(NAME, CHR, rightNotClump, SNP_ALLELES));
+
+                            int clumpStart = 50;
+                            final List<VariantContext> vcs = new LinkedList<VariantContext>();
+                            for ( final List<String> myAlleles : alleles ) {
+                                final VariantContext vc = GATKVariantContextUtils.makeFromAlleles(NAME, CHR, clumpStart, myAlleles);
+                                clumpStart = vc.getEnd() + 3;
+                                vcs.add(vc);
+                            }
+
+                            tests.add(new Object[]{new EventMap(new LinkedList<VariantContext>(allVCS)), Collections.emptyList()});
+                            allVCS.addAll(vcs);
+                            tests.add(new Object[]{new EventMap(allVCS), vcs});
+                        }
+                    }
+                }
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    /**
+     * Example testng test using MyDataProvider
+     */
+    @Test(dataProvider = "MyDataProvider", enabled = true)
+    public void testGetNeighborhood(final EventMap eventMap, final List<VariantContext> expectedNeighbors) {
+        final VariantContext leftOfNeighors = expectedNeighbors.isEmpty() ? null : expectedNeighbors.get(0);
+
+        for ( final VariantContext vc : eventMap.getVariantContexts() ) {
+            final List<VariantContext> n = eventMap.getNeighborhood(vc, 5);
+            if ( leftOfNeighors == vc )
+                Assert.assertEquals(n, expectedNeighbors);
+            else if ( ! expectedNeighbors.contains(vc) )
+                Assert.assertEquals(n, Collections.singletonList(vc), "Should only contain the original vc but " + n);
+        }
+    }
+
+    @DataProvider(name = "BlockSubstitutionsData")
+    public Object[][] makeBlockSubstitutionsData() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        for ( int size = EventMap.MIN_NUMBER_OF_EVENTS_TO_COMBINE_INTO_BLOCK_SUBSTITUTION; size < 10; size++ ) {
+            final String ref = Utils.dupString("A", size);
+            final String alt = Utils.dupString("C", size);
+            tests.add(new Object[]{ref, alt, size + "M", GATKVariantContextUtils.makeFromAlleles(NAME, CHR, 1, Arrays.asList(ref, alt))});
+        }
+
+        tests.add(new Object[]{"AAAAAA", "GAGAGA", "6M", GATKVariantContextUtils.makeFromAlleles(NAME, CHR, 1, Arrays.asList("AAAAA", "GAGAG"))});
+        tests.add(new Object[]{"AAAAAA", "GAGAGG", "6M", GATKVariantContextUtils.makeFromAlleles(NAME, CHR, 1, Arrays.asList("AAAAAA", "GAGAGG"))});
+
+        for ( int len = 0; len < 10; len++ ) {
+            final String s = len == 0 ? "" : Utils.dupString("A", len);
+            tests.add(new Object[]{s + "AACCCCAA", s + "GAAG", len + 2 + "M4D2M", GATKVariantContextUtils.makeFromAlleles(NAME, CHR, 1 + len,   Arrays.asList("AACCCCAA", "GAAG"))});
+            tests.add(new Object[]{s + "AAAA", s + "GACCCCAG", len + 2 + "M4I2M", GATKVariantContextUtils.makeFromAlleles(NAME, CHR, 1 + len, Arrays.asList("AAAA", "GACCCCAG"))});
+
+            tests.add(new Object[]{"AACCCCAA" + s, "GAAG" + s, "2M4D" + (len + 2) + "M", GATKVariantContextUtils.makeFromAlleles(NAME, CHR, 1,   Arrays.asList("AACCCCAA", "GAAG"))});
+            tests.add(new Object[]{"AAAA" + s, "GACCCCAG" + s, "2M4I" + (len + 2) + "M", GATKVariantContextUtils.makeFromAlleles(NAME, CHR, 1, Arrays.asList("AAAA", "GACCCCAG"))});
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    /**
+     * Example testng test using MyDataProvider
+     */
+    @Test(dataProvider = "BlockSubstitutionsData")
+    public void testBlockSubstitutionsData(final String refBases, final String haplotypeBases, final String cigar, final VariantContext expectedBlock) {
+        final Haplotype hap = new Haplotype(haplotypeBases.getBytes(), false, 0, TextCigarCodec.decode(cigar));
+        final GenomeLoc loc = new UnvalidatingGenomeLoc(CHR, 0, 1, refBases.length());
+        final EventMap ee = new EventMap(hap, refBases.getBytes(), loc, NAME);
+        ee.replaceClumpedEventsWithBlockSubstitutions();
+        Assert.assertEquals(ee.getNumberOfEvents(), 1);
+        final VariantContext actual = ee.getVariantContexts().iterator().next();
+        Assert.assertTrue(GATKVariantContextUtils.equalSites(actual, expectedBlock), "Failed with " + actual);
+    }
+
+    @DataProvider(name = "AdjacentSNPIndelTest")
+    public Object[][] makeAdjacentSNPIndelTest() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        tests.add(new Object[]{"TT", "GCT", "1M1I1M", Arrays.asList(Arrays.asList("T", "GC"))});
+        tests.add(new Object[]{"GCT", "TT", "1M1D1M", Arrays.asList(Arrays.asList("GC", "T"))});
+        tests.add(new Object[]{"TT", "GCCT", "1M2I1M", Arrays.asList(Arrays.asList("T", "GCC"))});
+        tests.add(new Object[]{"GCCT", "TT", "1M2D1M", Arrays.asList(Arrays.asList("GCC", "T"))});
+        tests.add(new Object[]{"AAGCCT", "AATT", "3M2D1M", Arrays.asList(Arrays.asList("GCC", "T"))});
+        tests.add(new Object[]{"AAGCCT", "GATT", "3M2D1M", Arrays.asList(Arrays.asList("A", "G"), Arrays.asList("GCC", "T"))});
+        tests.add(new Object[]{"AAAAA", "AGACA", "5M", Arrays.asList(Arrays.asList("A", "G"), Arrays.asList("A", "C"))});
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    /**
+     * Example testng test using MyDataProvider
+     */
+    @Test(dataProvider = "AdjacentSNPIndelTest")
+    public void testAdjacentSNPIndelTest(final String refBases, final String haplotypeBases, final String cigar, final List<List<String>> expectedAlleles) {
+        final Haplotype hap = new Haplotype(haplotypeBases.getBytes(), false, 0, TextCigarCodec.decode(cigar));
+        final GenomeLoc loc = new UnvalidatingGenomeLoc(CHR, 0, 1, refBases.length());
+        final EventMap ee = new EventMap(hap, refBases.getBytes(), loc, NAME);
+        ee.replaceClumpedEventsWithBlockSubstitutions();
+        Assert.assertEquals(ee.getNumberOfEvents(), expectedAlleles.size());
+        final List<VariantContext> actuals = new ArrayList<VariantContext>(ee.getVariantContexts());
+        for ( int i = 0; i < ee.getNumberOfEvents(); i++ ) {
+            final VariantContext actual = actuals.get(i);
+            Assert.assertEquals(actual.getReference().getDisplayString(), expectedAlleles.get(i).get(0));
+            Assert.assertEquals(actual.getAlternateAllele(0).getDisplayString(), expectedAlleles.get(i).get(1));
+        }
+    }
+
+    @DataProvider(name = "MakeBlockData")
+    public Object[][] makeMakeBlockData() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        tests.add(new Object[]{Arrays.asList("A", "G"), Arrays.asList("AGT", "A"), Arrays.asList("AGT", "G")});
+        tests.add(new Object[]{Arrays.asList("A", "G"), Arrays.asList("A", "AGT"), Arrays.asList("A", "GGT")});
+
+        tests.add(new Object[]{Arrays.asList("AC", "A"), Arrays.asList("A", "AGT"), Arrays.asList("AC", "AGT")});
+        tests.add(new Object[]{Arrays.asList("ACGTA", "A"), Arrays.asList("A", "AG"), Arrays.asList("ACGTA", "AG")});
+        tests.add(new Object[]{Arrays.asList("AC", "A"), Arrays.asList("A", "AGCGT"), Arrays.asList("AC", "AGCGT")});
+        tests.add(new Object[]{Arrays.asList("A", "ACGTA"), Arrays.asList("AG", "A"), Arrays.asList("AG", "ACGTA")});
+        tests.add(new Object[]{Arrays.asList("A", "AC"), Arrays.asList("AGCGT", "A"), Arrays.asList("AGCGT", "AC")});
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    /**
+     * Example testng test using MyDataProvider
+     */
+    @Test(dataProvider = "MakeBlockData", enabled = true)
+    public void testGetNeighborhood(final List<String> firstAlleles, final List<String> secondAlleles, final List<String> expectedAlleles) {
+        final VariantContext vc1 = GATKVariantContextUtils.makeFromAlleles("x", "20", 10, firstAlleles);
+        final VariantContext vc2 = GATKVariantContextUtils.makeFromAlleles("x", "20", 10, secondAlleles);
+        final VariantContext expected = GATKVariantContextUtils.makeFromAlleles("x", "20", 10, expectedAlleles);
+
+        final EventMap eventMap = new EventMap(Collections.<VariantContext>emptyList());
+        final VariantContext block = eventMap.makeBlock(vc1, vc2);
+
+        Assert.assertEquals(block.getStart(), expected.getStart());
+        Assert.assertEquals(block.getAlleles(), expected.getAlleles());
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/haplotype/HaplotypeUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/haplotype/HaplotypeUnitTest.java
new file mode 100644
index 0000000..42801a3
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/haplotype/HaplotypeUnitTest.java
@@ -0,0 +1,249 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.haplotype;
+
+
+import htsjdk.samtools.Cigar;
+import htsjdk.samtools.CigarElement;
+import htsjdk.samtools.CigarOperator;
+import htsjdk.samtools.TextCigarCodec;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.UnvalidatingGenomeLoc;
+import org.broadinstitute.gatk.utils.haplotype.Haplotype;
+import htsjdk.variant.variantcontext.Allele;
+import htsjdk.variant.variantcontext.VariantContext;
+import htsjdk.variant.variantcontext.VariantContextBuilder;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.*;
+
+/**
+ * Basic unit test for Haplotype Class
+ */
+public class HaplotypeUnitTest extends BaseTest {
+    @Test
+    public void testSimpleInsertionAllele() {
+        final String bases = "ACTGGTCAACTGGTCAACTGGTCAACTGGTCA";
+
+        final ArrayList<CigarElement> h1CigarList = new ArrayList<CigarElement>();
+        h1CigarList.add(new CigarElement(bases.length(), CigarOperator.M));
+        final Cigar h1Cigar = new Cigar(h1CigarList);
+        String h1bases = "AACTTCTGGTCAACTGGTCAACTGGTCAACTGGTCA";
+        basicInsertTest("A", "AACTT", 0, h1Cigar, bases, h1bases);
+        h1bases = "ACTGGTCAACTTACTGGTCAACTGGTCAACTGGTCA";
+        basicInsertTest("A", "AACTT", 7, h1Cigar, bases, h1bases);
+        h1bases = "ACTGGTCAACTGGTCAAACTTCTGGTCAACTGGTCA";
+        basicInsertTest("A", "AACTT", 16, h1Cigar, bases, h1bases);
+    }
+
+    @Test
+    public void testSimpleDeletionAllele() {
+        final String bases = "ACTGGTCAACTGGTCAACTGGTCAACTGGTCA";
+
+        final ArrayList<CigarElement> h1CigarList = new ArrayList<CigarElement>();
+        h1CigarList.add(new CigarElement(bases.length(), CigarOperator.M));
+        final Cigar h1Cigar = new Cigar(h1CigarList);
+        String h1bases = "ATCAACTGGTCAACTGGTCAACTGGTCA";
+        basicInsertTest("ACTGG", "A", 0, h1Cigar, bases, h1bases);
+        h1bases = "ACTGGTCAGTCAACTGGTCAACTGGTCA";
+        basicInsertTest("AACTG", "A", 7, h1Cigar, bases, h1bases);
+        h1bases = "ACTGGTCAACTGGTCAATCAACTGGTCA";
+        basicInsertTest("ACTGG", "A", 16, h1Cigar, bases, h1bases);
+    }
+
+    @Test
+    public void testSimpleSNPAllele() {
+        final String bases = "ACTGGTCAACTGGTCAACTGGTCAACTGGTCA";
+
+        final ArrayList<CigarElement> h1CigarList = new ArrayList<CigarElement>();
+        h1CigarList.add(new CigarElement(bases.length(), CigarOperator.M));
+        final Cigar h1Cigar = new Cigar(h1CigarList);
+        String h1bases = "AGTGGTCAACTGGTCAACTGGTCAACTGGTCA";
+        basicInsertTest("C", "G", 1, h1Cigar, bases, h1bases);
+        h1bases = "ACTGGTCTACTGGTCAACTGGTCAACTGGTCA";
+        basicInsertTest("A", "T", 7, h1Cigar, bases, h1bases);
+        h1bases = "ACTGGTCAACTGGTCAAATGGTCAACTGGTCA";
+        basicInsertTest("C", "A", 17, h1Cigar, bases, h1bases);
+    }
+
+    @Test
+    public void testComplexInsertionAllele() {
+        final String bases = "ATCG" + "CCGGCCGGCC" + "ATCGATCG" + "AGGGGGA" + "AGGC";
+
+        final ArrayList<CigarElement> h1CigarList = new ArrayList<CigarElement>();
+        h1CigarList.add(new CigarElement(4, CigarOperator.M));
+        h1CigarList.add(new CigarElement(10, CigarOperator.I));
+        h1CigarList.add(new CigarElement(8, CigarOperator.M));
+        h1CigarList.add(new CigarElement(3, CigarOperator.D));
+        h1CigarList.add(new CigarElement(7 + 4, CigarOperator.M));
+        final Cigar h1Cigar = new Cigar(h1CigarList);
+        String h1bases = "AACTTTCG" + "CCGGCCGGCC" + "ATCGATCG" + "AGGGGGA" + "AGGC";
+        basicInsertTest("A", "AACTT", 0, h1Cigar, bases, h1bases);
+        h1bases = "ATCG" + "CCGGCCGGCC" + "ATCACTTGATCG" + "AGGGGGA" + "AGGC";
+        basicInsertTest("C", "CACTT", 6, h1Cigar, bases, h1bases);
+        h1bases = "ATCG" + "CCGGCCGGCC" + "ATCGATCG" + "AGACTTGGGGA" + "AGGC";
+        basicInsertTest("G", "GACTT", 16, h1Cigar, bases, h1bases);
+    }
+
+    @Test
+    public void testComplexDeletionAllele() {
+        final String bases = "ATCG" + "CCGGCCGGCC" + "ATCGATCG" + "AGGGGGA" + "AGGC";
+
+        final ArrayList<CigarElement> h1CigarList = new ArrayList<CigarElement>();
+        h1CigarList.add(new CigarElement(4, CigarOperator.M));
+        h1CigarList.add(new CigarElement(10, CigarOperator.I));
+        h1CigarList.add(new CigarElement(8, CigarOperator.M));
+        h1CigarList.add(new CigarElement(3, CigarOperator.D));
+        h1CigarList.add(new CigarElement(7 + 4, CigarOperator.M));
+        final Cigar h1Cigar = new Cigar(h1CigarList);
+        String h1bases = "A" + "CCGGCCGGCC" + "ATCGATCG" + "AGGGGGA" + "AGGC";
+        basicInsertTest("ATCG", "A", 0, h1Cigar, bases, h1bases);
+        h1bases = "ATCG" + "CCGGCCGGCC" + "ATAAAG" + "AGGGGGA" + "AGGC";
+        basicInsertTest("CGATC", "AAA", 6, h1Cigar, bases, h1bases);
+        h1bases = "ATCG" + "CCGGCCGGCC" + "ATCGATCG" + "AGA" + "AGGC";
+        basicInsertTest("GGGGG", "G", 16, h1Cigar, bases, h1bases);
+    }
+
+    @Test
+    public void testComplexSNPAllele() {
+        final String bases = "ATCG" + "CCGGCCGGCC" + "ATCGATCG" + "AGGGGGA" + "AGGC";
+
+        final ArrayList<CigarElement> h1CigarList = new ArrayList<CigarElement>();
+        h1CigarList.add(new CigarElement(4, CigarOperator.M));
+        h1CigarList.add(new CigarElement(10, CigarOperator.I));
+        h1CigarList.add(new CigarElement(8, CigarOperator.M));
+        h1CigarList.add(new CigarElement(3, CigarOperator.D));
+        h1CigarList.add(new CigarElement(7 + 4, CigarOperator.M));
+        final Cigar h1Cigar = new Cigar(h1CigarList);
+        String h1bases = "AGCG" + "CCGGCCGGCC" + "ATCGATCG" + "AGGGGGA" + "AGGC";
+        basicInsertTest("T", "G", 1, h1Cigar, bases, h1bases);
+        h1bases = "ATCG" + "CCGGCCGGCC" + "ATCTATCG" + "AGGGGGA" + "AGGC";
+        basicInsertTest("G", "T", 7, h1Cigar, bases, h1bases);
+        h1bases = "ATCG" + "CCGGCCGGCC" + "ATCGATCG" + "AGCGGGA" + "AGGC";
+        basicInsertTest("G", "C", 17, h1Cigar, bases, h1bases);
+    }
+
+    private void basicInsertTest(String ref, String alt, int loc, Cigar cigar, String hap, String newHap) {
+        final Haplotype h = new Haplotype(hap.getBytes());
+        final Allele h1refAllele = Allele.create(ref, true);
+        final Allele h1altAllele = Allele.create(alt, false);
+        final ArrayList<Allele> alleles = new ArrayList<Allele>();
+        alleles.add(h1refAllele);
+        alleles.add(h1altAllele);
+        final VariantContext vc = new VariantContextBuilder().alleles(alleles).loc("1", loc, loc + h1refAllele.getBases().length - 1).make();
+        h.setAlignmentStartHapwrtRef(0);
+        h.setCigar(cigar);
+        final Haplotype h1 = h.insertAllele(vc.getReference(), vc.getAlternateAllele(0), loc, vc.getStart());
+        final Haplotype h1expected = new Haplotype(newHap.getBytes());
+        Assert.assertEquals(h1, h1expected);
+    }
+
+    private Haplotype makeHCForCigar(final String bases, final String cigar) {
+        final Haplotype h = new Haplotype(bases.getBytes());
+        h.setCigar(TextCigarCodec.decode(cigar));
+        return h;
+    }
+
+    @Test
+    public void testConsolidateCigar() throws Exception {
+        Assert.assertEquals(makeHCForCigar("AGCT", "4M").getConsolidatedPaddedCigar(0).toString(), "4M");
+        Assert.assertEquals(makeHCForCigar("AGCT", "4M").getConsolidatedPaddedCigar(1).toString(), "5M");
+        Assert.assertEquals(makeHCForCigar("AGCT", "1M1I1I1M").getConsolidatedPaddedCigar(0).toString(), "1M2I1M");
+        Assert.assertEquals(makeHCForCigar("AGCT", "1M1I1I1M").getConsolidatedPaddedCigar(1).toString(), "1M2I2M");
+        Assert.assertEquals(makeHCForCigar("AGCT", "1M1I1I1M").getConsolidatedPaddedCigar(2).toString(), "1M2I3M");
+        Assert.assertEquals(makeHCForCigar("AGCT", "1M1I1I1I").getConsolidatedPaddedCigar(0).toString(), "1M3I");
+        Assert.assertEquals(makeHCForCigar("AGCT", "1M1I1I1I").getConsolidatedPaddedCigar(1).toString(), "1M3I1M");
+        Assert.assertEquals(makeHCForCigar("AGCT", "1M1I1I1I").getConsolidatedPaddedCigar(2).toString(), "1M3I2M");
+    }
+
+    @DataProvider(name = "TrimmingData")
+    public Object[][] makeTrimmingData() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        // this functionality can be adapted to provide input data for whatever you might want in your data
+        final GenomeLoc loc = new UnvalidatingGenomeLoc("20", 0, 10, 20);
+        final String fullBases = "ACGTAACCGGT";
+        for ( int trimStart = loc.getStart(); trimStart < loc.getStop(); trimStart++ ) {
+            for ( int trimStop = trimStart; trimStop <= loc.getStop(); trimStop++ ) {
+                final int start = trimStart - loc.getStart();
+                final int stop = start + (trimStop - trimStart) + 1;
+                final GenomeLoc trimmedLoc = new UnvalidatingGenomeLoc("20", 0, start + loc.getStart(), stop + loc.getStart() - 1);
+                final String expectedBases = fullBases.substring(start, stop);
+                final Haplotype full = new Haplotype(fullBases.getBytes(), loc);
+                final Haplotype trimmed = new Haplotype(expectedBases.getBytes(), trimmedLoc);
+
+                final int hapStart = 10;
+                full.setAlignmentStartHapwrtRef(hapStart);
+                full.setCigar(TextCigarCodec.decode(full.length() + "M"));
+
+                trimmed.setAlignmentStartHapwrtRef(hapStart + start);
+                trimmed.setCigar(TextCigarCodec.decode(trimmed.length() + "M"));
+
+                tests.add(new Object[]{full, trimmedLoc, trimmed});
+            }
+        }
+
+        final Haplotype full = new Haplotype("ACT".getBytes(), new UnvalidatingGenomeLoc("20", 0, 10, 14));
+        full.setAlignmentStartHapwrtRef(10);
+        full.setCigar(TextCigarCodec.decode("1M2D2M"));
+        tests.add(new Object[]{full, new UnvalidatingGenomeLoc("20", 0, 11, 12), null});
+        tests.add(new Object[]{full, new UnvalidatingGenomeLoc("20", 0, 10, 12), null});
+        tests.add(new Object[]{full, new UnvalidatingGenomeLoc("20", 0, 11, 13), null});
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "TrimmingData")
+    public void testTrim(final Haplotype full, final GenomeLoc trimTo, final Haplotype expected) {
+        final Haplotype actual = full.trim(trimTo);
+        if ( expected != null ) {
+            Assert.assertEquals(actual.getBases(), expected.getBases());
+            Assert.assertEquals(actual.getStartPosition(), trimTo.getStart());
+            Assert.assertEquals(actual.getStopPosition(), trimTo.getStop());
+            Assert.assertEquals(actual.getCigar(), expected.getCigar());
+            Assert.assertEquals(actual.getAlignmentStartHapwrtRef(), expected.getAlignmentStartHapwrtRef());
+        } else {
+            Assert.assertNull(actual);
+        }
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void testBadTrimLoc() {
+        final GenomeLoc loc = new UnvalidatingGenomeLoc("20", 0, 10, 20);
+        final Haplotype hap = new Haplotype("ACGTAACCGGT".getBytes(), loc);
+        hap.trim(new UnvalidatingGenomeLoc("20", 0, 1, 20));
+    }
+
+    @Test(expectedExceptions = IllegalStateException.class)
+    public void testBadTrimNoLoc() {
+        final Haplotype hap = new Haplotype("ACGTAACCGGT".getBytes());
+        hap.trim(new UnvalidatingGenomeLoc("20", 0, 1, 20));
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/interval/IntervalUtilsUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/interval/IntervalUtilsUnitTest.java
new file mode 100644
index 0000000..0fbb0b2
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/interval/IntervalUtilsUnitTest.java
@@ -0,0 +1,1114 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.interval;
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import htsjdk.samtools.reference.ReferenceSequenceFile;
+import htsjdk.samtools.util.Interval;
+import htsjdk.samtools.util.IntervalList;
+import htsjdk.samtools.SAMFileHeader;
+import org.apache.commons.io.FileUtils;
+import htsjdk.tribble.Feature;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.commandline.IntervalBinding;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.util.*;
+
+/**
+ * test out the interval utility methods
+ */
+public class IntervalUtilsUnitTest extends BaseTest {
+    // used to seed the genome loc parser with a sequence dictionary
+    private SAMFileHeader hg18Header;
+    private GenomeLocParser hg18GenomeLocParser;
+    private List<GenomeLoc> hg18ReferenceLocs;
+    private SAMFileHeader hg19Header;
+    private GenomeLocParser hg19GenomeLocParser;
+    private List<GenomeLoc> hg19ReferenceLocs;
+    private List<GenomeLoc> hg19exomeIntervals;
+
+    private List<GenomeLoc> getLocs(String... intervals) {
+        return getLocs(Arrays.asList(intervals));
+    }
+
+    private List<GenomeLoc> getLocs(List<String> intervals) {
+        if (intervals.size() == 0)
+            return hg18ReferenceLocs;
+        List<GenomeLoc> locs = new ArrayList<GenomeLoc>();
+        for (String interval: intervals)
+            locs.add(hg18GenomeLocParser.parseGenomeLoc(interval));
+        return Collections.unmodifiableList(locs);
+    }
+
+    @BeforeClass
+    public void init() {
+        File hg18Ref = new File(BaseTest.hg18Reference);
+        try {
+            final ReferenceSequenceFile seq = new CachingIndexedFastaSequenceFile(hg18Ref);
+            hg18Header = new SAMFileHeader();
+            hg18Header.setSequenceDictionary(seq.getSequenceDictionary());
+            hg18GenomeLocParser = new GenomeLocParser(seq);
+            hg18ReferenceLocs = Collections.unmodifiableList(GenomeLocSortedSet.createSetFromSequenceDictionary(seq.getSequenceDictionary()).toList()) ;
+        }
+        catch(FileNotFoundException ex) {
+            throw new UserException.CouldNotReadInputFile(hg18Ref,ex);
+        }
+
+        File hg19Ref = new File(BaseTest.hg19Reference);
+        try {
+            final ReferenceSequenceFile seq = new CachingIndexedFastaSequenceFile(hg19Ref);
+            hg19Header = new SAMFileHeader();
+            hg19Header.setSequenceDictionary(seq.getSequenceDictionary());
+            hg19GenomeLocParser = new GenomeLocParser(seq);
+            hg19ReferenceLocs = Collections.unmodifiableList(GenomeLocSortedSet.createSetFromSequenceDictionary(seq.getSequenceDictionary()).toList()) ;
+
+            hg19exomeIntervals = Collections.unmodifiableList(IntervalUtils.parseIntervalArguments(hg19GenomeLocParser, Arrays.asList(hg19Intervals)));
+        }
+        catch(FileNotFoundException ex) {
+            throw new UserException.CouldNotReadInputFile(hg19Ref,ex);
+        }
+    }
+
+    // -------------------------------------------------------------------------------------
+    //
+    // tests to ensure the quality of the interval cuts of the interval cutting functions
+    //
+    // -------------------------------------------------------------------------------------
+
+    private class IntervalSlicingTest extends TestDataProvider {
+        public int parts;
+        public double maxAllowableVariance;
+
+        private IntervalSlicingTest(final int parts, final double maxAllowableVariance) {
+            super(IntervalSlicingTest.class);
+            this.parts = parts;
+            this.maxAllowableVariance = maxAllowableVariance;
+        }
+
+        public String toString() {
+            return String.format("IntervalSlicingTest parts=%d maxVar=%.2f", parts, maxAllowableVariance);
+        }
+    }
+
+    @DataProvider(name = "intervalslicingdata")
+    public Object[][] createTrees() {
+        new IntervalSlicingTest(1, 0);
+        new IntervalSlicingTest(2, 1);
+        new IntervalSlicingTest(5, 1);
+        new IntervalSlicingTest(10, 1);
+        new IntervalSlicingTest(67, 1);
+        new IntervalSlicingTest(100, 1);
+        new IntervalSlicingTest(500, 1);
+        new IntervalSlicingTest(1000, 1);
+        return IntervalSlicingTest.getTests(IntervalSlicingTest.class);
+    }
+
+    @Test(enabled = true, dataProvider = "intervalslicingdata")
+    public void testFixedScatterIntervalsAlgorithm(IntervalSlicingTest test) {
+        List<List<GenomeLoc>> splits = IntervalUtils.splitFixedIntervals(hg19exomeIntervals, test.parts);
+
+        long totalSize = IntervalUtils.intervalSize(hg19exomeIntervals);
+        long idealSplitSize = totalSize / test.parts;
+
+        long sumOfSplitSizes = 0;
+        int counter = 0;
+        for ( final List<GenomeLoc> split : splits ) {
+            long splitSize = IntervalUtils.intervalSize(split);
+            double sigma = (splitSize - idealSplitSize) / (1.0 * idealSplitSize);
+            //logger.warn(String.format("Split %d size %d ideal %d sigma %.2f", counter, splitSize, idealSplitSize, sigma));
+            counter++;
+            sumOfSplitSizes += splitSize;
+            Assert.assertTrue(Math.abs(sigma) <= test.maxAllowableVariance, String.format("Interval %d (size %d ideal %d) has a variance %.2f outside of the tolerated range %.2f", counter, splitSize, idealSplitSize, sigma, test.maxAllowableVariance));
+        }
+
+        Assert.assertEquals(totalSize, sumOfSplitSizes, "Split intervals don't contain the exact number of bases in the origianl intervals");
+    }
+
+    // -------------------------------------------------------------------------------------
+    //
+    // splitLocusIntervals tests
+    //
+    // -------------------------------------------------------------------------------------
+
+    /** large scale tests for many intervals */
+    private class SplitLocusIntervalsTest extends TestDataProvider {
+        final List<GenomeLoc> originalIntervals;
+        final public int parts;
+
+        private SplitLocusIntervalsTest(final String name, List<GenomeLoc> originalIntervals, final int parts) {
+            super(SplitLocusIntervalsTest.class, name);
+            this.parts = parts;
+            this.originalIntervals = originalIntervals;
+        }
+
+        public String toString() {
+            return String.format("%s parts=%d", super.toString(), parts);
+        }
+    }
+
+    @DataProvider(name = "IntervalRepartitionTest")
+    public Object[][] createIntervalRepartitionTest() {
+        for ( int parts : Arrays.asList(1, 2, 3, 10, 13, 100, 151, 1000, 10000) ) {
+        //for ( int parts : Arrays.asList(10) ) {
+            new SplitLocusIntervalsTest("hg19RefLocs", hg19ReferenceLocs, parts);
+            new SplitLocusIntervalsTest("hg19ExomeLocs", hg19exomeIntervals, parts);
+        }
+
+        return SplitLocusIntervalsTest.getTests(SplitLocusIntervalsTest.class);
+    }
+
+    @Test(enabled = true, dataProvider = "IntervalRepartitionTest")
+    public void testIntervalRepartition(SplitLocusIntervalsTest test) {
+        List<List<GenomeLoc>> splitByLocus = IntervalUtils.splitLocusIntervals(test.originalIntervals, test.parts);
+        Assert.assertEquals(splitByLocus.size(), test.parts, "SplitLocusIntervals failed to generate correct number of intervals");
+        List<GenomeLoc> flat = IntervalUtils.flattenSplitIntervals(splitByLocus);
+
+        // test overall size
+        final long originalSize = IntervalUtils.intervalSize(test.originalIntervals);
+        final long flatSize = IntervalUtils.intervalSize(flat);
+        Assert.assertEquals(flatSize, originalSize, "SplitLocusIntervals locs cover an incorrect number of bases");
+
+        // test size of each split
+        final long ideal = (long)Math.floor(originalSize / (1.0 * test.parts));
+        final long maxSize = ideal + (originalSize % test.parts) * test.parts; // no more than N * rounding error in size
+        for ( final List<GenomeLoc> split : splitByLocus ) {
+            final long splitSize = IntervalUtils.intervalSize(split);
+            Assert.assertTrue(splitSize >= ideal && splitSize <= maxSize,
+                    String.format("SplitLocusIntervals interval (start=%s) has size %d outside of bounds ideal=%d, max=%d",
+                            split.get(0), splitSize, ideal, maxSize));
+        }
+
+        // test that every base in original is covered once by a base in split by locus intervals
+        String diff = IntervalUtils.equateIntervals(test.originalIntervals, flat);
+        Assert.assertNull(diff, diff);
+    }
+
+    /** small scale tests where the expected cuts are enumerated upfront for testing */
+    private class SplitLocusIntervalsSmallTest extends TestDataProvider {
+        final List<GenomeLoc> original;
+        final public int parts;
+        final public int expectedParts;
+        final List<GenomeLoc> expected;
+
+        private SplitLocusIntervalsSmallTest(final String name, List<GenomeLoc> originalIntervals, final int parts, List<GenomeLoc> expected) {
+            this(name, originalIntervals, parts,  expected, parts);
+        }
+
+        private SplitLocusIntervalsSmallTest(final String name, List<GenomeLoc> originalIntervals, final int parts, List<GenomeLoc> expected, int expectedParts) {
+            super(SplitLocusIntervalsSmallTest.class, name);
+            this.parts = parts;
+            this.expectedParts = expectedParts;
+            this.original = originalIntervals;
+            this.expected = expected;
+        }
+
+        public String toString() {
+            return String.format("%s parts=%d", super.toString(), parts);
+        }
+    }
+
+    @DataProvider(name = "SplitLocusIntervalsSmallTest")
+    public Object[][] createSplitLocusIntervalsSmallTest() {
+        GenomeLoc bp01_10 = hg19GenomeLocParser.createGenomeLoc("1", 1, 10);
+
+        GenomeLoc bp1_5 = hg19GenomeLocParser.createGenomeLoc("1", 1, 5);
+        GenomeLoc bp6_10 = hg19GenomeLocParser.createGenomeLoc("1", 6, 10);
+        new SplitLocusIntervalsSmallTest("cut into two", Arrays.asList(bp01_10), 2, Arrays.asList(bp1_5, bp6_10));
+
+        GenomeLoc bp20_30 = hg19GenomeLocParser.createGenomeLoc("1", 20, 30);
+        new SplitLocusIntervalsSmallTest("two in two", Arrays.asList(bp01_10, bp20_30), 2, Arrays.asList(bp01_10, bp20_30));
+
+        GenomeLoc bp1_7 = hg19GenomeLocParser.createGenomeLoc("1", 1, 7);
+        GenomeLoc bp8_10 = hg19GenomeLocParser.createGenomeLoc("1", 8, 10);
+        GenomeLoc bp20_23 = hg19GenomeLocParser.createGenomeLoc("1", 20, 23);
+        GenomeLoc bp24_30 = hg19GenomeLocParser.createGenomeLoc("1", 24, 30);
+        new SplitLocusIntervalsSmallTest("two in three", Arrays.asList(bp01_10, bp20_30), 3,
+                Arrays.asList(bp1_7, bp8_10, bp20_23, bp24_30));
+
+        GenomeLoc bp1_2 = hg19GenomeLocParser.createGenomeLoc("1", 1, 2);
+        GenomeLoc bp1_1 = hg19GenomeLocParser.createGenomeLoc("1", 1, 1);
+        GenomeLoc bp2_2 = hg19GenomeLocParser.createGenomeLoc("1", 2, 2);
+        new SplitLocusIntervalsSmallTest("too many pieces", Arrays.asList(bp1_2), 5, Arrays.asList(bp1_1, bp2_2), 2);
+
+        new SplitLocusIntervalsSmallTest("emptyList", Collections.<GenomeLoc>emptyList(), 5, Collections.<GenomeLoc>emptyList(), 0);
+
+        return SplitLocusIntervalsSmallTest.getTests(SplitLocusIntervalsSmallTest.class);
+    }
+
+    @Test(enabled = true, dataProvider = "SplitLocusIntervalsSmallTest")
+    public void splitLocusIntervalsSmallTest(SplitLocusIntervalsSmallTest test) {
+        List<List<GenomeLoc>> splitByLocus = IntervalUtils.splitLocusIntervals(test.original, test.parts);
+        Assert.assertEquals(splitByLocus.size(), test.expectedParts, "SplitLocusIntervals failed to generate correct number of intervals");
+        List<GenomeLoc> flat = IntervalUtils.flattenSplitIntervals(splitByLocus);
+
+        // test sizes
+        final long originalSize = IntervalUtils.intervalSize(test.original);
+        final long splitSize = IntervalUtils.intervalSize(flat);
+        Assert.assertEquals(splitSize, originalSize, "SplitLocusIntervals locs cover an incorrect number of bases");
+
+        Assert.assertEquals(flat, test.expected, "SplitLocusIntervals locs not expected intervals");
+    }
+
+    //
+    // Misc. tests
+    //
+
+    @Test(expectedExceptions=UserException.class)
+    public void testMergeListsBySetOperatorNoOverlap() {
+        // a couple of lists we'll use for the testing
+        List<GenomeLoc> listEveryTwoFromOne = new ArrayList<GenomeLoc>();
+        List<GenomeLoc> listEveryTwoFromTwo = new ArrayList<GenomeLoc>();
+
+        // create the two lists we'll use
+        for (int x = 1; x < 101; x++) {
+            if (x % 2 == 0)
+                listEveryTwoFromTwo.add(hg18GenomeLocParser.createGenomeLoc("chr1",x,x));
+            else
+                listEveryTwoFromOne.add(hg18GenomeLocParser.createGenomeLoc("chr1",x,x));
+        }
+
+        List<GenomeLoc> ret;
+        ret = IntervalUtils.mergeListsBySetOperator(listEveryTwoFromTwo, listEveryTwoFromOne, IntervalSetRule.UNION);
+        Assert.assertEquals(ret.size(), 100);
+        ret = IntervalUtils.mergeListsBySetOperator(listEveryTwoFromTwo, listEveryTwoFromOne, null);
+        Assert.assertEquals(ret.size(), 100);
+        ret = IntervalUtils.mergeListsBySetOperator(listEveryTwoFromTwo, listEveryTwoFromOne, IntervalSetRule.INTERSECTION);
+        Assert.assertEquals(ret.size(), 0);
+    }
+
+    @Test
+    public void testMergeListsBySetOperatorAllOverlap() {
+        // a couple of lists we'll use for the testing
+        List<GenomeLoc> allSites = new ArrayList<GenomeLoc>();
+        List<GenomeLoc> listEveryTwoFromTwo = new ArrayList<GenomeLoc>();
+
+        // create the two lists we'll use
+        for (int x = 1; x < 101; x++) {
+            if (x % 2 == 0)
+                listEveryTwoFromTwo.add(hg18GenomeLocParser.createGenomeLoc("chr1",x,x));
+            allSites.add(hg18GenomeLocParser.createGenomeLoc("chr1",x,x));
+        }
+
+        List<GenomeLoc> ret;
+        ret = IntervalUtils.mergeListsBySetOperator(listEveryTwoFromTwo, allSites, IntervalSetRule.UNION);
+        Assert.assertEquals(ret.size(), 150);
+        ret = IntervalUtils.mergeListsBySetOperator(listEveryTwoFromTwo, allSites, null);
+        Assert.assertEquals(ret.size(), 150);
+        ret = IntervalUtils.mergeListsBySetOperator(listEveryTwoFromTwo, allSites, IntervalSetRule.INTERSECTION);
+        Assert.assertEquals(ret.size(), 50);
+    }
+
+    @Test
+    public void testMergeListsBySetOperator() {
+        // a couple of lists we'll use for the testing
+        List<GenomeLoc> allSites = new ArrayList<GenomeLoc>();
+        List<GenomeLoc> listEveryTwoFromTwo = new ArrayList<GenomeLoc>();
+
+        // create the two lists we'll use
+        for (int x = 1; x < 101; x++) {
+            if (x % 5 == 0) {
+                listEveryTwoFromTwo.add(hg18GenomeLocParser.createGenomeLoc("chr1",x,x));
+                allSites.add(hg18GenomeLocParser.createGenomeLoc("chr1",x,x));
+            }
+        }
+
+        List<GenomeLoc> ret;
+        ret = IntervalUtils.mergeListsBySetOperator(listEveryTwoFromTwo, allSites, IntervalSetRule.UNION);
+        Assert.assertEquals(ret.size(), 40);
+        ret = IntervalUtils.mergeListsBySetOperator(listEveryTwoFromTwo, allSites, null);
+        Assert.assertEquals(ret.size(), 40);
+        ret = IntervalUtils.mergeListsBySetOperator(listEveryTwoFromTwo, allSites, IntervalSetRule.INTERSECTION);
+        Assert.assertEquals(ret.size(), 20);
+    }
+
+    @Test
+    public void testOverlappingIntervalsFromSameSourceWithIntersection() {
+        // a couple of lists we'll use for the testing
+        List<GenomeLoc> source1 = new ArrayList<GenomeLoc>();
+        List<GenomeLoc> source2 = new ArrayList<GenomeLoc>();
+
+        source1.add(hg18GenomeLocParser.createGenomeLoc("chr1", 10, 20));
+        source1.add(hg18GenomeLocParser.createGenomeLoc("chr1", 15, 25));
+
+        source2.add(hg18GenomeLocParser.createGenomeLoc("chr1", 16, 18));
+        source2.add(hg18GenomeLocParser.createGenomeLoc("chr1", 22, 24));
+
+        List<GenomeLoc> ret = IntervalUtils.mergeListsBySetOperator(source1, source2, IntervalSetRule.INTERSECTION);
+        Assert.assertEquals(ret.size(), 2);
+    }
+
+    @Test
+    public void testGetContigLengths() {
+        Map<String, Integer> lengths = IntervalUtils.getContigSizes(new File(BaseTest.hg18Reference));
+        Assert.assertEquals((long)lengths.get("chr1"), 247249719);
+        Assert.assertEquals((long)lengths.get("chr2"), 242951149);
+        Assert.assertEquals((long)lengths.get("chr3"), 199501827);
+        Assert.assertEquals((long)lengths.get("chr20"), 62435964);
+        Assert.assertEquals((long)lengths.get("chrX"), 154913754);
+    }
+
+    @Test
+    public void testParseIntervalArguments() {
+        Assert.assertEquals(getLocs().size(), 45);
+        Assert.assertEquals(getLocs("chr1", "chr2", "chr3").size(), 3);
+        Assert.assertEquals(getLocs("chr1:1-2", "chr1:4-5", "chr2:1-1", "chr3:2-2").size(), 4);
+    }
+
+    @Test
+    public void testIsIntervalFile() {
+        Assert.assertTrue(IntervalUtils.isIntervalFile(BaseTest.privateTestDir + "empty_intervals.list"));
+        Assert.assertTrue(IntervalUtils.isIntervalFile(BaseTest.privateTestDir + "empty_intervals.list", true));
+
+        List<String> extensions = Arrays.asList("bed", "interval_list", "intervals", "list", "picard");
+        for (String extension: extensions) {
+            Assert.assertTrue(IntervalUtils.isIntervalFile("test_intervals." + extension, false), "Tested interval file extension: " + extension);
+        }
+    }
+
+    @Test(expectedExceptions = UserException.CouldNotReadInputFile.class)
+    public void testMissingIntervalFile() {
+        IntervalUtils.isIntervalFile(BaseTest.privateTestDir + "no_such_intervals.list");
+    }
+
+    @Test
+    public void testFixedScatterIntervalsBasic() {
+        GenomeLoc chr1 = hg18GenomeLocParser.parseGenomeLoc("chr1");
+        GenomeLoc chr2 = hg18GenomeLocParser.parseGenomeLoc("chr2");
+        GenomeLoc chr3 = hg18GenomeLocParser.parseGenomeLoc("chr3");
+
+        List<File> files = testFiles("basic.", 3, ".intervals");
+
+        List<GenomeLoc> locs = getLocs("chr1", "chr2", "chr3");
+        List<List<GenomeLoc>> splits = IntervalUtils.splitFixedIntervals(locs, files.size());
+        IntervalUtils.scatterFixedIntervals(hg18Header, splits, files);
+
+        List<GenomeLoc> locs1 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(0).toString()));
+        List<GenomeLoc> locs2 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(1).toString()));
+        List<GenomeLoc> locs3 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(2).toString()));
+
+        Assert.assertEquals(locs1.size(), 1);
+        Assert.assertEquals(locs2.size(), 1);
+        Assert.assertEquals(locs3.size(), 1);
+
+        Assert.assertEquals(locs1.get(0), chr1);
+        Assert.assertEquals(locs2.get(0), chr2);
+        Assert.assertEquals(locs3.get(0), chr3);
+    }
+
+    @Test
+    public void testScatterFixedIntervalsLessFiles() {
+        GenomeLoc chr1 = hg18GenomeLocParser.parseGenomeLoc("chr1");
+        GenomeLoc chr2 = hg18GenomeLocParser.parseGenomeLoc("chr2");
+        GenomeLoc chr3 = hg18GenomeLocParser.parseGenomeLoc("chr3");
+        GenomeLoc chr4 = hg18GenomeLocParser.parseGenomeLoc("chr4");
+
+        List<File> files = testFiles("less.", 3, ".intervals");
+
+        List<GenomeLoc> locs = getLocs("chr1", "chr2", "chr3", "chr4");
+        List<List<GenomeLoc>> splits = IntervalUtils.splitFixedIntervals(locs, files.size());
+        IntervalUtils.scatterFixedIntervals(hg18Header, splits, files);
+
+        List<GenomeLoc> locs1 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(0).toString()));
+        List<GenomeLoc> locs2 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(1).toString()));
+        List<GenomeLoc> locs3 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(2).toString()));
+
+        Assert.assertEquals(locs1.size(), 1);
+        Assert.assertEquals(locs2.size(), 1);
+        Assert.assertEquals(locs3.size(), 2);
+
+        Assert.assertEquals(locs1.get(0), chr1);
+        Assert.assertEquals(locs2.get(0), chr2);
+        Assert.assertEquals(locs3.get(0), chr3);
+        Assert.assertEquals(locs3.get(1), chr4);
+    }
+
+    @Test(expectedExceptions=UserException.BadArgumentValue.class)
+    public void testSplitFixedIntervalsMoreFiles() {
+        List<File> files = testFiles("more.", 3, ".intervals");
+        List<GenomeLoc> locs = getLocs("chr1", "chr2");
+        IntervalUtils.splitFixedIntervals(locs, files.size());
+    }
+
+    @Test(expectedExceptions=UserException.BadArgumentValue.class)
+    public void testScatterFixedIntervalsMoreFiles() {
+        List<File> files = testFiles("more.", 3, ".intervals");
+        List<GenomeLoc> locs = getLocs("chr1", "chr2");
+        List<List<GenomeLoc>> splits = IntervalUtils.splitFixedIntervals(locs, locs.size()); // locs.size() instead of files.size()
+        IntervalUtils.scatterFixedIntervals(hg18Header, splits, files);
+    }
+    @Test
+    public void testScatterFixedIntervalsStart() {
+        List<String> intervals = Arrays.asList("chr1:1-2", "chr1:4-5", "chr2:1-1", "chr3:2-2");
+        GenomeLoc chr1a = hg18GenomeLocParser.parseGenomeLoc("chr1:1-2");
+        GenomeLoc chr1b = hg18GenomeLocParser.parseGenomeLoc("chr1:4-5");
+        GenomeLoc chr2 = hg18GenomeLocParser.parseGenomeLoc("chr2:1-1");
+        GenomeLoc chr3 = hg18GenomeLocParser.parseGenomeLoc("chr3:2-2");
+
+        List<File> files = testFiles("split.", 3, ".intervals");
+
+        List<GenomeLoc> locs = getLocs(intervals);
+        List<List<GenomeLoc>> splits = IntervalUtils.splitFixedIntervals(locs, files.size());
+        IntervalUtils.scatterFixedIntervals(hg18Header, splits, files);
+
+        List<GenomeLoc> locs1 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(0).toString()));
+        List<GenomeLoc> locs2 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(1).toString()));
+        List<GenomeLoc> locs3 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(2).toString()));
+
+        Assert.assertEquals(locs1.size(), 1);
+        Assert.assertEquals(locs2.size(), 1);
+        Assert.assertEquals(locs3.size(), 2);
+
+        Assert.assertEquals(locs1.get(0), chr1a);
+        Assert.assertEquals(locs2.get(0), chr1b);
+        Assert.assertEquals(locs3.get(0), chr2);
+        Assert.assertEquals(locs3.get(1), chr3);
+    }
+
+    @Test
+    public void testScatterFixedIntervalsMiddle() {
+        List<String> intervals = Arrays.asList("chr1:1-1", "chr2:1-2", "chr2:4-5", "chr3:2-2");
+        GenomeLoc chr1 = hg18GenomeLocParser.parseGenomeLoc("chr1:1-1");
+        GenomeLoc chr2a = hg18GenomeLocParser.parseGenomeLoc("chr2:1-2");
+        GenomeLoc chr2b = hg18GenomeLocParser.parseGenomeLoc("chr2:4-5");
+        GenomeLoc chr3 = hg18GenomeLocParser.parseGenomeLoc("chr3:2-2");
+
+        List<File> files = testFiles("split.", 3, ".intervals");
+
+        List<GenomeLoc> locs = getLocs(intervals);
+        List<List<GenomeLoc>> splits = IntervalUtils.splitFixedIntervals(locs, files.size());
+        IntervalUtils.scatterFixedIntervals(hg18Header, splits, files);
+
+        List<GenomeLoc> locs1 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(0).toString()));
+        List<GenomeLoc> locs2 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(1).toString()));
+        List<GenomeLoc> locs3 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(2).toString()));
+
+        Assert.assertEquals(locs1.size(), 1);
+        Assert.assertEquals(locs2.size(), 1);
+        Assert.assertEquals(locs3.size(), 2);
+
+        Assert.assertEquals(locs1.get(0), chr1);
+        Assert.assertEquals(locs2.get(0), chr2a);
+        Assert.assertEquals(locs3.get(0), chr2b);
+        Assert.assertEquals(locs3.get(1), chr3);
+    }
+
+    @Test
+    public void testScatterFixedIntervalsEnd() {
+        List<String> intervals = Arrays.asList("chr1:1-1", "chr2:2-2", "chr3:1-2", "chr3:4-5");
+        GenomeLoc chr1 = hg18GenomeLocParser.parseGenomeLoc("chr1:1-1");
+        GenomeLoc chr2 = hg18GenomeLocParser.parseGenomeLoc("chr2:2-2");
+        GenomeLoc chr3a = hg18GenomeLocParser.parseGenomeLoc("chr3:1-2");
+        GenomeLoc chr3b = hg18GenomeLocParser.parseGenomeLoc("chr3:4-5");
+
+        List<File> files = testFiles("split.", 3, ".intervals");
+
+        List<GenomeLoc> locs = getLocs(intervals);
+        List<List<GenomeLoc>> splits = IntervalUtils.splitFixedIntervals(locs, files.size());
+        IntervalUtils.scatterFixedIntervals(hg18Header, splits, files);
+
+        List<GenomeLoc> locs1 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(0).toString()));
+        List<GenomeLoc> locs2 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(1).toString()));
+        List<GenomeLoc> locs3 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(2).toString()));
+
+        Assert.assertEquals(locs1.size(), 2);
+        Assert.assertEquals(locs2.size(), 1);
+        Assert.assertEquals(locs3.size(), 1);
+
+        Assert.assertEquals(locs1.get(0), chr1);
+        Assert.assertEquals(locs1.get(1), chr2);
+        Assert.assertEquals(locs2.get(0), chr3a);
+        Assert.assertEquals(locs3.get(0), chr3b);
+    }
+
+    @Test
+    public void testScatterFixedIntervalsFile() {
+        List<File> files = testFiles("sg.", 20, ".intervals");
+        List<GenomeLoc> locs = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(BaseTest.GATKDataLocation + "whole_exome_agilent_designed_120.targets.hg18.chr20.interval_list"));
+        List<List<GenomeLoc>> splits = IntervalUtils.splitFixedIntervals(locs, files.size());
+
+        int[] counts = {
+                125, 138, 287, 291, 312, 105, 155, 324,
+                295, 298, 141, 121, 285, 302, 282, 88,
+                116, 274, 282, 248
+//                5169, 5573, 10017, 10567, 10551,
+//                5087, 4908, 10120, 10435, 10399,
+//                5391, 4735, 10621, 10352, 10654,
+//                5227, 5256, 10151, 9649, 9825
+        };
+
+        //String splitCounts = "";
+        for (int i = 0; i < splits.size(); i++) {
+            int splitCount = splits.get(i).size();
+            Assert.assertEquals(splitCount, counts[i], "Num intervals in split " + i);
+        }
+        //System.out.println(splitCounts.substring(2));
+
+        IntervalUtils.scatterFixedIntervals(hg18Header, splits, files);
+
+        int locIndex = 0;
+        for (int i = 0; i < files.size(); i++) {
+            String file = files.get(i).toString();
+            List<GenomeLoc> parsedLocs = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(file));
+            Assert.assertEquals(parsedLocs.size(), counts[i], "Intervals in " + file);
+            for (GenomeLoc parsedLoc: parsedLocs)
+                Assert.assertEquals(parsedLoc, locs.get(locIndex), String.format("Genome loc %d from file %d", locIndex++, i));
+        }
+        Assert.assertEquals(locIndex, locs.size(), "Total number of GenomeLocs");
+    }
+
+    @Test
+    public void testScatterFixedIntervalsMax() {
+        List<File> files = testFiles("sg.", 85, ".intervals");
+        List<List<GenomeLoc>> splits = IntervalUtils.splitFixedIntervals(hg19ReferenceLocs, files.size());
+        IntervalUtils.scatterFixedIntervals(hg19Header, splits, files);
+
+        for (int i = 0; i < files.size(); i++) {
+            String file = files.get(i).toString();
+            List<GenomeLoc> parsedLocs = IntervalUtils.parseIntervalArguments(hg19GenomeLocParser, Arrays.asList(file));
+            Assert.assertEquals(parsedLocs.size(), 1, "parsedLocs[" + i + "].size()");
+            Assert.assertEquals(parsedLocs.get(0), hg19ReferenceLocs.get(i), "parsedLocs[" + i + "].get()");
+        }
+    }
+
+    @Test
+    public void testScatterContigIntervalsOrder() {
+        List<String> intervals = Arrays.asList("chr2:1-1", "chr1:1-1", "chr3:2-2");
+        GenomeLoc chr1 = hg18GenomeLocParser.parseGenomeLoc("chr1:1-1");
+        GenomeLoc chr2 = hg18GenomeLocParser.parseGenomeLoc("chr2:1-1");
+        GenomeLoc chr3 = hg18GenomeLocParser.parseGenomeLoc("chr3:2-2");
+
+        List<File> files = testFiles("split.", 3, ".intervals");
+
+        IntervalUtils.scatterContigIntervals(hg18Header, getLocs(intervals), files);
+
+        List<GenomeLoc> locs1 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(0).toString()));
+        List<GenomeLoc> locs2 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(1).toString()));
+        List<GenomeLoc> locs3 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(2).toString()));
+
+        Assert.assertEquals(locs1.size(), 1);
+        Assert.assertEquals(locs2.size(), 1);
+        Assert.assertEquals(locs3.size(), 1);
+
+        Assert.assertEquals(locs1.get(0), chr2);
+        Assert.assertEquals(locs2.get(0), chr1);
+        Assert.assertEquals(locs3.get(0), chr3);
+    }
+
+    @Test
+    public void testScatterContigIntervalsBasic() {
+        GenomeLoc chr1 = hg18GenomeLocParser.parseGenomeLoc("chr1");
+        GenomeLoc chr2 = hg18GenomeLocParser.parseGenomeLoc("chr2");
+        GenomeLoc chr3 = hg18GenomeLocParser.parseGenomeLoc("chr3");
+
+        List<File> files = testFiles("contig_basic.", 3, ".intervals");
+
+        IntervalUtils.scatterContigIntervals(hg18Header, getLocs("chr1", "chr2", "chr3"), files);
+
+        List<GenomeLoc> locs1 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(0).toString()));
+        List<GenomeLoc> locs2 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(1).toString()));
+        List<GenomeLoc> locs3 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(2).toString()));
+
+        Assert.assertEquals(locs1.size(), 1);
+        Assert.assertEquals(locs2.size(), 1);
+        Assert.assertEquals(locs3.size(), 1);
+
+        Assert.assertEquals(locs1.get(0), chr1);
+        Assert.assertEquals(locs2.get(0), chr2);
+        Assert.assertEquals(locs3.get(0), chr3);
+    }
+
+    @Test
+    public void testScatterContigIntervalsLessFiles() {
+        GenomeLoc chr1 = hg18GenomeLocParser.parseGenomeLoc("chr1");
+        GenomeLoc chr2 = hg18GenomeLocParser.parseGenomeLoc("chr2");
+        GenomeLoc chr3 = hg18GenomeLocParser.parseGenomeLoc("chr3");
+        GenomeLoc chr4 = hg18GenomeLocParser.parseGenomeLoc("chr4");
+
+        List<File> files = testFiles("contig_less.", 3, ".intervals");
+
+        IntervalUtils.scatterContigIntervals(hg18Header, getLocs("chr1", "chr2", "chr3", "chr4"), files);
+
+        List<GenomeLoc> locs1 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(0).toString()));
+        List<GenomeLoc> locs2 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(1).toString()));
+        List<GenomeLoc> locs3 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(2).toString()));
+
+        Assert.assertEquals(locs1.size(), 2);
+        Assert.assertEquals(locs2.size(), 1);
+        Assert.assertEquals(locs3.size(), 1);
+
+        Assert.assertEquals(locs1.get(0), chr1);
+        Assert.assertEquals(locs1.get(1), chr2);
+        Assert.assertEquals(locs2.get(0), chr3);
+        Assert.assertEquals(locs3.get(0), chr4);
+    }
+
+    @Test(expectedExceptions=UserException.BadInput.class)
+    public void testScatterContigIntervalsMoreFiles() {
+        List<File> files = testFiles("contig_more.", 3, ".intervals");
+        IntervalUtils.scatterContigIntervals(hg18Header, getLocs("chr1", "chr2"), files);
+    }
+
+    @Test
+    public void testScatterContigIntervalsStart() {
+        List<String> intervals = Arrays.asList("chr1:1-2", "chr1:4-5", "chr2:1-1", "chr3:2-2");
+        GenomeLoc chr1a = hg18GenomeLocParser.parseGenomeLoc("chr1:1-2");
+        GenomeLoc chr1b = hg18GenomeLocParser.parseGenomeLoc("chr1:4-5");
+        GenomeLoc chr2 = hg18GenomeLocParser.parseGenomeLoc("chr2:1-1");
+        GenomeLoc chr3 = hg18GenomeLocParser.parseGenomeLoc("chr3:2-2");
+
+        List<File> files = testFiles("contig_split_start.", 3, ".intervals");
+
+        IntervalUtils.scatterContigIntervals(hg18Header, getLocs(intervals), files);
+
+        List<GenomeLoc> locs1 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(0).toString()));
+        List<GenomeLoc> locs2 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(1).toString()));
+        List<GenomeLoc> locs3 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(2).toString()));
+
+        Assert.assertEquals(locs1.size(), 2);
+        Assert.assertEquals(locs2.size(), 1);
+        Assert.assertEquals(locs3.size(), 1);
+
+        Assert.assertEquals(locs1.get(0), chr1a);
+        Assert.assertEquals(locs1.get(1), chr1b);
+        Assert.assertEquals(locs2.get(0), chr2);
+        Assert.assertEquals(locs3.get(0), chr3);
+    }
+
+    @Test
+    public void testScatterContigIntervalsMiddle() {
+        List<String> intervals = Arrays.asList("chr1:1-1", "chr2:1-2", "chr2:4-5", "chr3:2-2");
+        GenomeLoc chr1 = hg18GenomeLocParser.parseGenomeLoc("chr1:1-1");
+        GenomeLoc chr2a = hg18GenomeLocParser.parseGenomeLoc("chr2:1-2");
+        GenomeLoc chr2b = hg18GenomeLocParser.parseGenomeLoc("chr2:4-5");
+        GenomeLoc chr3 = hg18GenomeLocParser.parseGenomeLoc("chr3:2-2");
+
+        List<File> files = testFiles("contig_split_middle.", 3, ".intervals");
+
+        IntervalUtils.scatterContigIntervals(hg18Header, getLocs(intervals), files);
+
+        List<GenomeLoc> locs1 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(0).toString()));
+        List<GenomeLoc> locs2 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(1).toString()));
+        List<GenomeLoc> locs3 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(2).toString()));
+
+        Assert.assertEquals(locs1.size(), 1);
+        Assert.assertEquals(locs2.size(), 2);
+        Assert.assertEquals(locs3.size(), 1);
+
+        Assert.assertEquals(locs1.get(0), chr1);
+        Assert.assertEquals(locs2.get(0), chr2a);
+        Assert.assertEquals(locs2.get(1), chr2b);
+        Assert.assertEquals(locs3.get(0), chr3);
+    }
+
+    @Test
+    public void testScatterContigIntervalsEnd() {
+        List<String> intervals = Arrays.asList("chr1:1-1", "chr2:2-2", "chr3:1-2", "chr3:4-5");
+        GenomeLoc chr1 = hg18GenomeLocParser.parseGenomeLoc("chr1:1-1");
+        GenomeLoc chr2 = hg18GenomeLocParser.parseGenomeLoc("chr2:2-2");
+        GenomeLoc chr3a = hg18GenomeLocParser.parseGenomeLoc("chr3:1-2");
+        GenomeLoc chr3b = hg18GenomeLocParser.parseGenomeLoc("chr3:4-5");
+
+        List<File> files = testFiles("contig_split_end.", 3 ,".intervals");
+
+        IntervalUtils.scatterContigIntervals(hg18Header, getLocs(intervals), files);
+
+        List<GenomeLoc> locs1 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(0).toString()));
+        List<GenomeLoc> locs2 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(1).toString()));
+        List<GenomeLoc> locs3 = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Arrays.asList(files.get(2).toString()));
+
+        Assert.assertEquals(locs1.size(), 1);
+        Assert.assertEquals(locs2.size(), 1);
+        Assert.assertEquals(locs3.size(), 2);
+
+        Assert.assertEquals(locs1.get(0), chr1);
+        Assert.assertEquals(locs2.get(0), chr2);
+        Assert.assertEquals(locs3.get(0), chr3a);
+        Assert.assertEquals(locs3.get(1), chr3b);
+    }
+
+    @Test
+    public void testScatterContigIntervalsMax() {
+        List<File> files = testFiles("sg.", 85, ".intervals");
+        IntervalUtils.scatterContigIntervals(hg19Header, hg19ReferenceLocs, files);
+
+        for (int i = 0; i < files.size(); i++) {
+            String file = files.get(i).toString();
+            List<GenomeLoc> parsedLocs = IntervalUtils.parseIntervalArguments(hg19GenomeLocParser, Arrays.asList(file));
+            Assert.assertEquals(parsedLocs.size(), 1, "parsedLocs[" + i + "].size()");
+            Assert.assertEquals(parsedLocs.get(0), hg19ReferenceLocs.get(i), "parsedLocs[" + i + "].get()");
+        }
+    }
+
+    private List<File> testFiles(String prefix, int count, String suffix) {
+        ArrayList<File> files = new ArrayList<File>();
+        for (int i = 1; i <= count; i++) {
+            files.add(createTempFile(prefix + i, suffix));
+        }
+        return files;
+    }
+
+    @DataProvider(name="unmergedIntervals")
+    public Object[][] getUnmergedIntervals() {
+        return new Object[][] {
+                new Object[] {"small_unmerged_picard_intervals.list"},
+                new Object[] {"small_unmerged_gatk_intervals.list"}
+        };
+    }
+
+    @Test(dataProvider="unmergedIntervals")
+    public void testUnmergedIntervals(String unmergedIntervals) {
+        List<GenomeLoc> locs = IntervalUtils.parseIntervalArguments(hg18GenomeLocParser, Collections.singletonList(privateTestDir + unmergedIntervals));
+        Assert.assertEquals(locs.size(), 2);
+
+        List<GenomeLoc> merged;
+
+        merged = IntervalUtils.mergeIntervalLocations(locs, IntervalMergingRule.ALL);
+        Assert.assertEquals(merged.size(), 1);
+
+        // Test that null means the same as ALL
+        merged = IntervalUtils.mergeIntervalLocations(locs, null);
+        Assert.assertEquals(merged.size(), 1);
+    }
+
+    /*
+    Split into tests that can be written to files and tested by writeFlankingIntervals,
+    and lists that cannot but are still handled by getFlankingIntervals.
+    */
+    private static abstract class FlankingIntervalsTestData extends TestDataProvider {
+        final public File referenceFile;
+        final public GenomeLocParser parser;
+        final int basePairs;
+        final List<GenomeLoc> original;
+        final List<GenomeLoc> expected;
+
+        protected FlankingIntervalsTestData(Class<?> clazz, String name, File referenceFile, GenomeLocParser parser,
+                                          int basePairs, List<String> original, List<String> expected) {
+            super(clazz, name);
+            this.referenceFile = referenceFile;
+            this.parser = parser;
+            this.basePairs = basePairs;
+            this.original = parse(parser, original);
+            this.expected = parse(parser, expected);
+        }
+
+        private static List<GenomeLoc> parse(GenomeLocParser parser, List<String> locs) {
+            List<GenomeLoc> parsed = new ArrayList<GenomeLoc>();
+            for (String loc: locs)
+                parsed.add("unmapped".equals(loc) ? GenomeLoc.UNMAPPED : parser.parseGenomeLoc(loc));
+            return parsed;
+        }
+    }
+
+    private static class FlankingIntervalsFile extends FlankingIntervalsTestData {
+        public FlankingIntervalsFile(String name, File referenceFile, GenomeLocParser parser,
+                                     int basePairs, List<String> original, List<String> expected) {
+            super(FlankingIntervalsFile.class, name, referenceFile, parser, basePairs, original, expected);
+        }
+    }
+
+    private static class FlankingIntervalsList extends FlankingIntervalsTestData {
+        public FlankingIntervalsList(String name, File referenceFile, GenomeLocParser parser,
+                                     int basePairs, List<String> original, List<String> expected) {
+            super(FlankingIntervalsList.class, name, referenceFile, parser, basePairs, original, expected);
+        }
+    }
+
+    /* Intervals where the original and the flanks can be written to files. */
+    @DataProvider(name = "flankingIntervalsFiles")
+    public Object[][] getFlankingIntervalsFiles() {
+        File hg19ReferenceFile = new File(BaseTest.hg19Reference);
+        int hg19Length1 = hg19GenomeLocParser.getContigInfo("1").getSequenceLength();
+
+        new FlankingIntervalsFile("atStartBase1", hg19ReferenceFile, hg19GenomeLocParser, 1,
+                Arrays.asList("1:1"),
+                Arrays.asList("1:2"));
+
+        new FlankingIntervalsFile("atStartBase50", hg19ReferenceFile, hg19GenomeLocParser, 50,
+                Arrays.asList("1:1"),
+                Arrays.asList("1:2-51"));
+
+        new FlankingIntervalsFile("atStartRange50", hg19ReferenceFile, hg19GenomeLocParser, 50,
+                Arrays.asList("1:1-10"),
+                Arrays.asList("1:11-60"));
+
+        new FlankingIntervalsFile("atEndBase1", hg19ReferenceFile, hg19GenomeLocParser, 1,
+                Arrays.asList("1:" + hg19Length1),
+                Arrays.asList("1:" + (hg19Length1 - 1)));
+
+        new FlankingIntervalsFile("atEndBase50", hg19ReferenceFile, hg19GenomeLocParser, 50,
+                Arrays.asList("1:" + hg19Length1),
+                Arrays.asList(String.format("1:%d-%d", hg19Length1 - 50, hg19Length1 - 1)));
+
+        new FlankingIntervalsFile("atEndRange50", hg19ReferenceFile, hg19GenomeLocParser, 50,
+                Arrays.asList(String.format("1:%d-%d", hg19Length1 - 10, hg19Length1)),
+                Arrays.asList(String.format("1:%d-%d", hg19Length1 - 60, hg19Length1 - 11)));
+
+        new FlankingIntervalsFile("nearStartBase1", hg19ReferenceFile, hg19GenomeLocParser, 1,
+                Arrays.asList("1:2"),
+                Arrays.asList("1:1", "1:3"));
+
+        new FlankingIntervalsFile("nearStartRange50", hg19ReferenceFile, hg19GenomeLocParser, 50,
+                Arrays.asList("1:21-30"),
+                Arrays.asList("1:1-20", "1:31-80"));
+
+        new FlankingIntervalsFile("nearEndBase1", hg19ReferenceFile, hg19GenomeLocParser, 1,
+                Arrays.asList("1:" + (hg19Length1 - 1)),
+                Arrays.asList("1:" + (hg19Length1 - 2), "1:" + hg19Length1));
+
+        new FlankingIntervalsFile("nearEndRange50", hg19ReferenceFile, hg19GenomeLocParser, 50,
+                Arrays.asList(String.format("1:%d-%d", hg19Length1 - 30, hg19Length1 - 21)),
+                Arrays.asList(
+                        String.format("1:%d-%d", hg19Length1 - 80, hg19Length1 - 31),
+                        String.format("1:%d-%d", hg19Length1 - 20, hg19Length1)));
+
+        new FlankingIntervalsFile("beyondStartBase1", hg19ReferenceFile, hg19GenomeLocParser, 1,
+                Arrays.asList("1:3"),
+                Arrays.asList("1:2", "1:4"));
+
+        new FlankingIntervalsFile("beyondStartRange50", hg19ReferenceFile, hg19GenomeLocParser, 50,
+                Arrays.asList("1:101-200"),
+                Arrays.asList("1:51-100", "1:201-250"));
+
+        new FlankingIntervalsFile("beyondEndBase1", hg19ReferenceFile, hg19GenomeLocParser, 1,
+                Arrays.asList("1:" + (hg19Length1 - 3)),
+                Arrays.asList("1:" + (hg19Length1 - 4), "1:" + (hg19Length1 - 2)));
+
+        new FlankingIntervalsFile("beyondEndRange50", hg19ReferenceFile, hg19GenomeLocParser, 50,
+                Arrays.asList(String.format("1:%d-%d", hg19Length1 - 200, hg19Length1 - 101)),
+                Arrays.asList(
+                        String.format("1:%d-%d", hg19Length1 - 250, hg19Length1 - 201),
+                        String.format("1:%d-%d", hg19Length1 - 100, hg19Length1 - 51)));
+
+        new FlankingIntervalsFile("betweenFar50", hg19ReferenceFile, hg19GenomeLocParser, 50,
+                Arrays.asList("1:101-200", "1:401-500"),
+                Arrays.asList("1:51-100", "1:201-250", "1:351-400", "1:501-550"));
+
+        new FlankingIntervalsFile("betweenSpan50", hg19ReferenceFile, hg19GenomeLocParser, 50,
+                Arrays.asList("1:101-200", "1:301-400"),
+                Arrays.asList("1:51-100", "1:201-300", "1:401-450"));
+
+        new FlankingIntervalsFile("betweenOverlap50", hg19ReferenceFile, hg19GenomeLocParser, 50,
+                Arrays.asList("1:101-200", "1:271-400"),
+                Arrays.asList("1:51-100", "1:201-270", "1:401-450"));
+
+        new FlankingIntervalsFile("betweenShort50", hg19ReferenceFile, hg19GenomeLocParser, 50,
+                Arrays.asList("1:101-200", "1:221-400"),
+                Arrays.asList("1:51-100", "1:201-220", "1:401-450"));
+
+        new FlankingIntervalsFile("betweenNone50", hg19ReferenceFile, hg19GenomeLocParser, 50,
+                Arrays.asList("1:101-200", "1:121-400"),
+                Arrays.asList("1:51-100", "1:401-450"));
+
+        new FlankingIntervalsFile("twoContigs", hg19ReferenceFile, hg19GenomeLocParser, 50,
+                Arrays.asList("1:101-200", "2:301-400"),
+                Arrays.asList("1:51-100", "1:201-250", "2:251-300", "2:401-450"));
+
+        // Explicit testing a problematic agilent target pair
+        new FlankingIntervalsFile("badAgilent", hg19ReferenceFile, hg19GenomeLocParser, 50,
+                Arrays.asList("2:74756257-74756411", "2:74756487-74756628"),
+                // wrong!    ("2:74756206-74756256", "2:74756412-74756462", "2:74756436-74756486", "2:74756629-74756679")
+                Arrays.asList("2:74756207-74756256", "2:74756412-74756486", "2:74756629-74756678"));
+
+        return TestDataProvider.getTests(FlankingIntervalsFile.class);
+    }
+
+    /* Intervals where either the original and/or the flanks cannot be written to a file. */
+    @DataProvider(name = "flankingIntervalsLists")
+    public Object[][] getFlankingIntervalsLists() {
+        File hg19ReferenceFile = new File(BaseTest.hg19Reference);
+        List<String> empty = Collections.emptyList();
+
+        new FlankingIntervalsList("empty", hg19ReferenceFile, hg19GenomeLocParser, 50,
+                empty,
+                empty);
+
+        new FlankingIntervalsList("unmapped", hg19ReferenceFile, hg19GenomeLocParser, 50,
+                Arrays.asList("unmapped"),
+                empty);
+
+        new FlankingIntervalsList("fullContig", hg19ReferenceFile, hg19GenomeLocParser, 50,
+                Arrays.asList("1"),
+                empty);
+
+        new FlankingIntervalsList("fullContigs", hg19ReferenceFile, hg19GenomeLocParser, 50,
+                Arrays.asList("1", "2", "3"),
+                empty);
+
+        new FlankingIntervalsList("betweenWithUnmapped", hg19ReferenceFile, hg19GenomeLocParser, 50,
+                Arrays.asList("1:101-200", "1:301-400", "unmapped"),
+                Arrays.asList("1:51-100", "1:201-300", "1:401-450"));
+
+        return TestDataProvider.getTests(FlankingIntervalsList.class);
+    }
+
+    @Test(dataProvider = "flankingIntervalsFiles")
+    public void testWriteFlankingIntervals(FlankingIntervalsTestData data) throws Exception {
+        File originalFile = createTempFile("original.", ".intervals");
+        File flankingFile = createTempFile("flanking.", ".intervals");
+        try {
+            List<String> lines = new ArrayList<String>();
+            for (GenomeLoc loc: data.original)
+                lines.add(loc.toString());
+            FileUtils.writeLines(originalFile, lines);
+
+            IntervalUtils.writeFlankingIntervals(data.referenceFile, originalFile, flankingFile, data.basePairs);
+
+            List<GenomeLoc> actual = IntervalUtils.intervalFileToList(data.parser, flankingFile.getAbsolutePath());
+
+            String description = String.format("%n      name: %s%n  original: %s%n    actual: %s%n  expected: %s%n",
+                    data.toString(), data.original, actual, data.expected);
+            Assert.assertEquals(actual, data.expected, description);
+        } finally {
+            FileUtils.deleteQuietly(originalFile);
+            FileUtils.deleteQuietly(flankingFile);
+        }
+    }
+
+    @Test(dataProvider = "flankingIntervalsLists", expectedExceptions = UserException.class)
+    public void testWritingBadFlankingIntervals(FlankingIntervalsTestData data) throws Exception {
+        File originalFile = createTempFile("original.", ".intervals");
+        File flankingFile = createTempFile("flanking.", ".intervals");
+        try {
+            List<String> lines = new ArrayList<String>();
+            for (GenomeLoc loc: data.original)
+                lines.add(loc.toString());
+            FileUtils.writeLines(originalFile, lines);
+
+            // Should throw a user exception on bad input if either the original
+            // intervals are empty or if the flanking intervals are empty
+            IntervalUtils.writeFlankingIntervals(data.referenceFile, originalFile, flankingFile, data.basePairs);
+        } finally {
+            FileUtils.deleteQuietly(originalFile);
+            FileUtils.deleteQuietly(flankingFile);
+        }
+    }
+
+    @Test(dataProvider = "flankingIntervalsLists")
+    public void testGetFlankingIntervals(FlankingIntervalsTestData data) {
+        List<GenomeLoc> actual = IntervalUtils.getFlankingIntervals(data.parser, data.original, data.basePairs);
+        String description = String.format("%n      name: %s%n  original: %s%n    actual: %s%n  expected: %s%n",
+                data.toString(), data.original, actual, data.expected);
+        Assert.assertEquals(actual, data.expected, description);
+    }
+
+    @Test(expectedExceptions=UserException.BadArgumentValue.class)
+    public void testExceptionUponLegacyIntervalSyntax() throws Exception {
+        final GenomeLocParser parser = new GenomeLocParser(new CachingIndexedFastaSequenceFile(new File(BaseTest.hg19Reference)));
+
+        // Attempting to use the legacy -L "interval1;interval2" syntax should produce an exception:
+        IntervalBinding<Feature> binding = new IntervalBinding<Feature>("1;2");
+        binding.getIntervals(parser);
+    }
+
+    @DataProvider(name="invalidIntervalTestData")
+    public Object[][] invalidIntervalDataProvider() throws Exception {
+        File fastaFile = new File(publicTestDir + "exampleFASTA.fasta");
+        GenomeLocParser genomeLocParser = new GenomeLocParser(new IndexedFastaSequenceFile(fastaFile));
+
+        return new Object[][] {
+                new Object[] {genomeLocParser, "chr1", 10000000, 20000000},
+                new Object[] {genomeLocParser, "chr2", 1, 2},
+                new Object[] {genomeLocParser, "chr1", -1, 50}
+        };
+    }
+
+    /*
+     * This test is disabled because its assumption that we will not throw an error
+     * upon parsing invalid Picard intervals is no longer true, as htsjdk has added
+     * extra protection against invalid intervals to IntervalList.add().
+     *
+     * We should reconsider our decision in IntervalUtils.intervalFileToList() to
+     * silently ignore invalid intervals when parsing Picard interval files, as it's
+     * inconsistent with the way we handle invalid intervals for GATK interval files
+     * (throw a UserException, covered by testInvalidGATKFileIntervalHandling() below),
+     * and update this test accordingly.
+     */
+    @Test(dataProvider="invalidIntervalTestData", enabled = false)
+    public void testInvalidPicardIntervalHandling(GenomeLocParser genomeLocParser,
+                                                  String contig, int intervalStart, int intervalEnd ) throws Exception {
+
+        SAMFileHeader picardFileHeader = new SAMFileHeader();
+        picardFileHeader.addSequence(genomeLocParser.getContigInfo("chr1"));
+        IntervalList picardIntervals = new IntervalList(picardFileHeader);
+        picardIntervals.add(new Interval(contig, intervalStart, intervalEnd, true, "dummyname"));
+
+        File picardIntervalFile = createTempFile("testInvalidPicardIntervalHandling", ".intervals");
+        picardIntervals.write(picardIntervalFile);
+
+        List<IntervalBinding<Feature>> intervalArgs = new ArrayList<IntervalBinding<Feature>>(1);
+        intervalArgs.add(new IntervalBinding<Feature>(picardIntervalFile.getAbsolutePath()));
+
+        IntervalUtils.loadIntervals(intervalArgs, IntervalSetRule.UNION, IntervalMergingRule.ALL, 0, genomeLocParser);
+    }
+
+    @Test(expectedExceptions=UserException.class, dataProvider="invalidIntervalTestData")
+    public void testInvalidGATKFileIntervalHandling(GenomeLocParser genomeLocParser,
+                                                    String contig, int intervalStart, int intervalEnd ) throws Exception {
+
+        File gatkIntervalFile = createTempFile("testInvalidGATKFileIntervalHandling", ".intervals",
+                String.format("%s:%d-%d", contig, intervalStart, intervalEnd));
+
+        List<IntervalBinding<Feature>> intervalArgs = new ArrayList<IntervalBinding<Feature>>(1);
+        intervalArgs.add(new IntervalBinding<Feature>(gatkIntervalFile.getAbsolutePath()));
+
+        IntervalUtils.loadIntervals(intervalArgs, IntervalSetRule.UNION, IntervalMergingRule.ALL, 0, genomeLocParser);
+    }
+
+    private File createTempFile( String tempFilePrefix, String tempFileExtension, String... lines ) throws Exception {
+        File tempFile = BaseTest.createTempFile(tempFilePrefix, tempFileExtension);
+        FileUtils.writeLines(tempFile, Arrays.asList(lines));
+        return tempFile;
+    }
+
+    @DataProvider(name = "sortAndMergeIntervals")
+    public Object[][] getSortAndMergeIntervals() {
+        return new Object[][] {
+                new Object[] { IntervalMergingRule.OVERLAPPING_ONLY, getLocs("chr1:1", "chr1:3", "chr1:2"), getLocs("chr1:1", "chr1:2", "chr1:3") },
+                new Object[] { IntervalMergingRule.ALL, getLocs("chr1:1", "chr1:3", "chr1:2"), getLocs("chr1:1-3") },
+                new Object[] { IntervalMergingRule.OVERLAPPING_ONLY, getLocs("chr1:1", "chr1:3", "chr2:2"), getLocs("chr1:1", "chr1:3", "chr2:2") },
+                new Object[] { IntervalMergingRule.ALL, getLocs("chr1:1", "chr1:3", "chr2:2"), getLocs("chr1:1", "chr1:3", "chr2:2") },
+                new Object[] { IntervalMergingRule.OVERLAPPING_ONLY, getLocs("chr1:1", "chr1"), getLocs("chr1") },
+                new Object[] { IntervalMergingRule.ALL, getLocs("chr1:1", "chr1"), getLocs("chr1") }
+        };
+    }
+
+    @Test(dataProvider = "sortAndMergeIntervals")
+    public void testSortAndMergeIntervals(IntervalMergingRule merge, List<GenomeLoc> unsorted, List<GenomeLoc> expected) {
+        List<GenomeLoc> sorted = IntervalUtils.sortAndMergeIntervals(hg18GenomeLocParser, unsorted, merge).toList();
+        Assert.assertEquals(sorted, expected);
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/io/IOUtilsUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/io/IOUtilsUnitTest.java
new file mode 100644
index 0000000..66f6190
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/io/IOUtilsUnitTest.java
@@ -0,0 +1,326 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.io;
+
+import org.apache.commons.io.FileUtils;
+import org.broadinstitute.gatk.utils.BaseTest;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Random;
+
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+public class IOUtilsUnitTest extends BaseTest {
+  @Test
+  public void testGoodTempDir() {
+    IOUtils.checkTempDir(new File("/tmp/queue"));
+  }
+
+  @Test(expectedExceptions=UserException.BadTmpDir.class)
+  public void testBadTempDir() {
+    IOUtils.checkTempDir(new File("/tmp"));
+  }
+
+  @Test
+  public void testAbsoluteSubDir() {
+    File subDir = IOUtils.absolute(new File("."), new File("/path/to/file"));
+    Assert.assertEquals(subDir, new File("/path/to/file"));
+
+    subDir = IOUtils.absolute(new File("/different/path"), new File("/path/to/file"));
+    Assert.assertEquals(subDir, new File("/path/to/file"));
+
+    subDir = IOUtils.absolute(new File("/different/path"), new File("."));
+    Assert.assertEquals(subDir, new File("/different/path"));
+  }
+
+  @Test
+  public void testRelativeSubDir() throws IOException {
+    File subDir = IOUtils.absolute(new File("."), new File("path/to/file"));
+    Assert.assertEquals(subDir.getCanonicalFile(), new File("path/to/file").getCanonicalFile());
+
+    subDir = IOUtils.absolute(new File("/different/path"), new File("path/to/file"));
+    Assert.assertEquals(subDir, new File("/different/path/path/to/file"));
+  }
+
+  @Test
+  public void testDottedSubDir() throws IOException {
+    File subDir = IOUtils.absolute(new File("."), new File("path/../to/file"));
+    Assert.assertEquals(subDir.getCanonicalFile(), new File("path/../to/./file").getCanonicalFile());
+
+    subDir = IOUtils.absolute(new File("."), new File("/path/../to/file"));
+    Assert.assertEquals(subDir, new File("/path/../to/file"));
+
+    subDir = IOUtils.absolute(new File("/different/../path"), new File("path/to/file"));
+    Assert.assertEquals(subDir, new File("/different/../path/path/to/file"));
+
+    subDir = IOUtils.absolute(new File("/different/./path"), new File("/path/../to/file"));
+    Assert.assertEquals(subDir, new File("/path/../to/file"));
+  }
+
+  @Test
+  public void testTempDir() {
+    File tempDir = IOUtils.tempDir("Q-Unit-Test", "", new File("queueTempDirToDelete"));
+    Assert.assertTrue(tempDir.exists());
+    Assert.assertFalse(tempDir.isFile());
+    Assert.assertTrue(tempDir.isDirectory());
+    boolean deleted = IOUtils.tryDelete(tempDir);
+    Assert.assertTrue(deleted);
+    Assert.assertFalse(tempDir.exists());
+  }
+
+  @Test
+  public void testDirLevel() {
+    File dir = IOUtils.dirLevel(new File("/path/to/directory"), 1);
+    Assert.assertEquals(dir, new File("/path"));
+
+    dir = IOUtils.dirLevel(new File("/path/to/directory"), 2);
+    Assert.assertEquals(dir, new File("/path/to"));
+
+    dir = IOUtils.dirLevel(new File("/path/to/directory"), 3);
+    Assert.assertEquals(dir, new File("/path/to/directory"));
+
+    dir = IOUtils.dirLevel(new File("/path/to/directory"), 4);
+    Assert.assertEquals(dir, new File("/path/to/directory"));
+  }
+
+  @Test
+  public void testAbsolute() {
+    File dir = IOUtils.absolute(new File("/path/./to/./directory/."));
+    Assert.assertEquals(dir, new File("/path/to/directory"));
+
+    dir = IOUtils.absolute(new File("/"));
+    Assert.assertEquals(dir, new File("/"));
+
+    dir = IOUtils.absolute(new File("/."));
+    Assert.assertEquals(dir, new File("/"));
+
+    dir = IOUtils.absolute(new File("/././."));
+    Assert.assertEquals(dir, new File("/"));
+
+    dir = IOUtils.absolute(new File("/./directory/."));
+    Assert.assertEquals(dir, new File("/directory"));
+
+    dir = IOUtils.absolute(new File("/./directory/./"));
+    Assert.assertEquals(dir, new File("/directory"));
+
+    dir = IOUtils.absolute(new File("/./directory./"));
+    Assert.assertEquals(dir, new File("/directory."));
+
+    dir = IOUtils.absolute(new File("/./.directory/"));
+    Assert.assertEquals(dir, new File("/.directory"));
+  }
+
+  @Test
+  public void testTail() throws IOException {
+    List<String> lines = Arrays.asList(
+            "chr18_random	4262	3154410390	50	51",
+            "chr19_random	301858	3154414752	50	51",
+            "chr21_random	1679693	3154722662	50	51",
+            "chr22_random	257318	3156435963	50	51",
+            "chrX_random	1719168	3156698441	50	51");
+    List<String> tail = IOUtils.tail(new File(BaseTest.hg18Reference + ".fai"), 5);
+    Assert.assertEquals(tail.size(), 5);
+    for (int i = 0; i < 5; i++)
+      Assert.assertEquals(tail.get(i), lines.get(i));
+  }
+
+    @Test
+    public void testWriteSystemFile() throws IOException {
+        File temp = createTempFile("temp.", ".properties");
+        try {
+            IOUtils.writeResource(new Resource("testProperties.properties", null), temp);
+        } finally {
+            FileUtils.deleteQuietly(temp);
+        }
+    }
+
+    @Test
+    public void testWriteSystemTempFile() throws IOException {
+        File temp = IOUtils.writeTempResource(new Resource("testProperties.properties", null));
+        try {
+            Assert.assertTrue(temp.getName().startsWith("testProperties"), "File does not start with 'testProperties.': " + temp);
+            Assert.assertTrue(temp.getName().endsWith(".properties"), "File does not end with '.properties': " + temp);
+        } finally {
+            FileUtils.deleteQuietly(temp);
+        }
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void testMissingSystemFile() throws IOException {
+        File temp = createTempFile("temp.", ".properties");
+        try {
+            IOUtils.writeResource(new Resource("MissingStingText.properties", null), temp);
+        } finally {
+            FileUtils.deleteQuietly(temp);
+        }
+    }
+
+    @Test
+    public void testWriteRelativeFile() throws IOException {
+        File temp = createTempFile("temp.", ".properties");
+        try {
+            IOUtils.writeResource(new Resource("/testProperties.properties", IOUtils.class), temp);
+        } finally {
+            FileUtils.deleteQuietly(temp);
+        }
+    }
+
+    @Test
+    public void testWriteRelativeTempFile() throws IOException {
+        File temp = IOUtils.writeTempResource(new Resource("/testProperties.properties", IOUtils.class));
+        try {
+            Assert.assertTrue(temp.getName().startsWith("testProperties"), "File does not start with 'testProperties.': " + temp);
+            Assert.assertTrue(temp.getName().endsWith(".properties"), "File does not end with '.properties': " + temp);
+        } finally {
+            FileUtils.deleteQuietly(temp);
+        }
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void testMissingRelativeFile() throws IOException {
+        File temp = createTempFile("temp.", ".properties");
+        try {
+            // Looking for /org/broadinstitute/gatk/utils/file/GATKText.properties
+            IOUtils.writeResource(new Resource("GATKText.properties", IOUtils.class), temp);
+        } finally {
+            FileUtils.deleteQuietly(temp);
+        }
+    }
+
+    @Test
+    public void testResourceProperties() {
+        Resource resource = new Resource("foo", Resource.class);
+        Assert.assertEquals(resource.getPath(), "foo");
+        Assert.assertEquals(resource.getRelativeClass(), Resource.class);
+    }
+
+    @Test
+    public void testIsSpecialFile() {
+        Assert.assertTrue(IOUtils.isSpecialFile(new File("/dev")));
+        Assert.assertTrue(IOUtils.isSpecialFile(new File("/dev/null")));
+        Assert.assertTrue(IOUtils.isSpecialFile(new File("/dev/full")));
+        Assert.assertTrue(IOUtils.isSpecialFile(new File("/dev/stdout")));
+        Assert.assertTrue(IOUtils.isSpecialFile(new File("/dev/stderr")));
+        Assert.assertFalse(IOUtils.isSpecialFile(null));
+        Assert.assertFalse(IOUtils.isSpecialFile(new File("/home/user/my.file")));
+        Assert.assertFalse(IOUtils.isSpecialFile(new File("/devfake/null")));
+    }
+
+    @DataProvider( name = "ByteArrayIOTestData")
+    public Object[][] byteArrayIOTestDataProvider() {
+        return new Object[][] {
+            // file size, read buffer size
+            { 0,     4096 },
+            { 1,     4096 },
+            { 2000,  4096 },
+            { 4095,  4096 },
+            { 4096,  4096 },
+            { 4097,  4096 },
+            { 6000,  4096 },
+            { 8191,  4096 },
+            { 8192,  4096 },
+            { 8193,  4096 },
+            { 10000, 4096 }
+        };
+    }
+
+    @Test( dataProvider = "ByteArrayIOTestData" )
+    public void testWriteThenReadFileIntoByteArray ( int fileSize, int readBufferSize ) throws Exception {
+        File tempFile = createTempFile(String.format("testWriteThenReadFileIntoByteArray_%d_%d", fileSize, readBufferSize), "tmp");
+
+        byte[] dataWritten = getDeterministicRandomData(fileSize);
+        IOUtils.writeByteArrayToFile(dataWritten, tempFile);
+        byte[] dataRead = IOUtils.readFileIntoByteArray(tempFile, readBufferSize);
+
+        Assert.assertEquals(dataRead.length, dataWritten.length);
+        Assert.assertTrue(Arrays.equals(dataRead, dataWritten));
+    }
+
+    @Test( dataProvider = "ByteArrayIOTestData" )
+    public void testWriteThenReadStreamIntoByteArray ( int fileSize, int readBufferSize ) throws Exception {
+        File tempFile = createTempFile(String.format("testWriteThenReadStreamIntoByteArray_%d_%d", fileSize, readBufferSize), "tmp");
+
+        byte[] dataWritten = getDeterministicRandomData(fileSize);
+        IOUtils.writeByteArrayToStream(dataWritten, new FileOutputStream(tempFile));
+        byte[] dataRead = IOUtils.readStreamIntoByteArray(new FileInputStream(tempFile), readBufferSize);
+
+        Assert.assertEquals(dataRead.length, dataWritten.length);
+        Assert.assertTrue(Arrays.equals(dataRead, dataWritten));
+    }
+
+    @Test( expectedExceptions = UserException.CouldNotReadInputFile.class )
+    public void testReadNonExistentFileIntoByteArray() {
+        File nonExistentFile = new File("djfhsdkjghdfk");
+        Assert.assertFalse(nonExistentFile.exists());
+
+        IOUtils.readFileIntoByteArray(nonExistentFile);
+    }
+
+    @Test( expectedExceptions = ReviewedGATKException.class )
+    public void testReadNullStreamIntoByteArray() {
+        IOUtils.readStreamIntoByteArray(null);
+    }
+
+    @Test( expectedExceptions = ReviewedGATKException.class )
+    public void testReadStreamIntoByteArrayInvalidBufferSize() throws Exception {
+        IOUtils.readStreamIntoByteArray(new FileInputStream(createTempFile("testReadStreamIntoByteArrayInvalidBufferSize", "tmp")),
+                                        -1);
+    }
+
+    @Test( expectedExceptions = UserException.CouldNotCreateOutputFile.class )
+    public void testWriteByteArrayToUncreatableFile() {
+        IOUtils.writeByteArrayToFile(new byte[]{0}, new File("/dev/foo/bar"));
+    }
+
+    @Test( expectedExceptions = ReviewedGATKException.class )
+    public void testWriteNullByteArrayToFile() {
+        IOUtils.writeByteArrayToFile(null, createTempFile("testWriteNullByteArrayToFile", "tmp"));
+    }
+
+    @Test( expectedExceptions = ReviewedGATKException.class )
+    public void testWriteByteArrayToNullStream() {
+        IOUtils.writeByteArrayToStream(new byte[]{0}, null);
+    }
+
+    private byte[] getDeterministicRandomData ( int size ) {
+        Utils.resetRandomGenerator();
+        Random rand = Utils.getRandomGenerator();
+
+        byte[] randomData = new byte[size];
+        rand.nextBytes(randomData);
+
+        return randomData;
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/jna/clibrary/LibCUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/jna/clibrary/LibCUnitTest.java
new file mode 100644
index 0000000..7d62286
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/jna/clibrary/LibCUnitTest.java
@@ -0,0 +1,70 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.jna.clibrary;
+
+import com.sun.jna.NativeLong;
+import com.sun.jna.ptr.NativeLongByReference;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+public class LibCUnitTest extends BaseTest {
+
+    @Test
+    public void testEnvironment() {
+        String testProperty = "test_property";
+        String testValue = "value";
+        Assert.assertEquals(LibC.getenv(testProperty), null);
+        Assert.assertEquals(LibC.setenv(testProperty, testValue, 1), 0);
+        Assert.assertEquals(LibC.getenv(testProperty), testValue);
+        Assert.assertEquals(LibC.unsetenv(testProperty), 0);
+        Assert.assertEquals(LibC.getenv(testProperty), null);
+    }
+
+    @Test
+    public void testDifftime() throws Exception {
+        // Pointer to hold the times
+        NativeLongByReference ref = new NativeLongByReference();
+
+        // time() returns -1 on error.
+        NativeLong err = new NativeLong(-1L);
+
+        LibC.time(ref);
+        NativeLong time0 = ref.getValue();
+        Assert.assertNotSame(time0, err, "Time 0 returned an error (-1).");
+
+        Thread.sleep(5000L);
+
+        LibC.time(ref);
+        NativeLong time1 = ref.getValue();
+        Assert.assertNotSame(time1, err, "Time 1 returned an error (-1).");
+
+        Assert.assertNotSame(time1, time0, "Time 1 returned same time as Time 0.");
+
+        double diff = LibC.difftime(time1, time0);
+        Assert.assertTrue(diff >= 5, "Time difference was not greater than 5 seconds: " + diff);
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/JnaSessionQueueTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/JnaSessionQueueTest.java
new file mode 100644
index 0000000..e2dc57b
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/JnaSessionQueueTest.java
@@ -0,0 +1,165 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.jna.drmaa.v1_0;
+
+import org.apache.commons.io.FileUtils;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.ggf.drmaa.*;
+import org.testng.Assert;
+import org.testng.SkipException;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.util.*;
+
+public class JnaSessionQueueTest extends BaseTest {
+    private String implementation = null;
+    private static final SessionFactory factory = new JnaSessionFactory();
+
+    @Test
+    public void testDrmaa() throws Exception {
+        Session session = factory.getSession();
+        Version version = session.getVersion();
+        System.out.println(String.format("DRMAA version: %d.%d", version.getMajor(), version.getMinor()));
+        System.out.println(String.format("DRMAA contact(s): %s", session.getContact()));
+        System.out.println(String.format("DRM system(s): %s", session.getDrmSystem()));
+        System.out.println(String.format("DRMAA implementation(s): %s", session.getDrmaaImplementation()));
+        this.implementation = session.getDrmaaImplementation();
+    }
+
+    @Test(dependsOnMethods = { "testDrmaa" })
+    public void testSubmitEcho() throws Exception {
+        if ( ! queueTestRunModeIsSet ) {
+            throw new SkipException("Skipping testSubmitEcho because we are in queue test dry run mode");
+        }
+
+        if (implementation.contains("LSF")) {
+            System.err.println("    ***********************************************************");
+            System.err.println("   *************************************************************");
+            System.err.println("   ****                                                     ****");
+            System.err.println("  ****  Skipping JnaSessionQueueTest.testSubmitEcho()        ****");
+            System.err.println("  ****      Are you using the dotkit .combined_LSF_SGE?      ****");
+            System.err.println("   ****                                                     ****");
+            System.err.println("   *************************************************************");
+            System.err.println("    ***********************************************************");
+            throw new SkipException("Skipping testSubmitEcho because correct DRMAA implementation not found");
+        }
+
+        File outFile = tryCreateNetworkTempFile("JnaSessionQueueTest.out");
+        Session session = factory.getSession();
+        session.init(null);
+        try {
+            JobTemplate template = session.createJobTemplate();
+            template.setRemoteCommand("sh");
+            template.setOutputPath(":" + outFile.getAbsolutePath());
+            template.setJoinFiles(true);
+            template.setArgs(Arrays.asList("-c", "echo \"Hello world.\""));
+
+            String jobId = session.runJob(template);
+            System.out.println(String.format("Job id %s", jobId));
+            session.deleteJobTemplate(template);
+
+            System.out.println("Waiting for job to run: " + jobId);
+            int remotePs = Session.QUEUED_ACTIVE;
+
+            List<Integer> runningStatuses = Arrays.asList(Session.QUEUED_ACTIVE, Session.RUNNING);
+
+            while (runningStatuses.contains(remotePs)) {
+                Thread.sleep(30 * 1000L);
+                remotePs = session.getJobProgramStatus(jobId);
+            }
+
+            Assert.assertEquals(remotePs, Session.DONE, "Job status is not DONE.");
+
+            JobInfo jobInfo = session.wait(jobId, Session.TIMEOUT_NO_WAIT);
+
+            Assert.assertTrue(jobInfo.hasExited(), String.format("Job did not exit cleanly: %s", jobId));
+            Assert.assertEquals(jobInfo.getExitStatus(), 0, String.format("Exit status for jobId %s is non-zero", jobId));
+            if (jobInfo.hasSignaled())
+                Assert.fail(String.format("JobId %s exited with signal %s and core dump flag %s", jobId, jobInfo.getTerminatingSignal(), jobInfo.hasCoreDump()));
+            Assert.assertFalse(jobInfo.wasAborted(), String.format("Job was aborted: %s", jobId));
+        } finally {
+            session.exit();
+        }
+
+        Assert.assertTrue(FileUtils.waitFor(outFile, 120), "File not found: " + outFile.getAbsolutePath());
+        System.out.println("--- output ---");
+        System.out.println(FileUtils.readFileToString(outFile));
+        System.out.println("--- output ---");
+        Assert.assertTrue(outFile.delete(), "Unable to delete " + outFile.getAbsolutePath());
+        System.out.println("Validating that we reached the end of the test without exit.");
+    }
+
+    @Test
+    public void testCollectionConversions() {
+        Collection<String> list = Arrays.asList("a=1", "foo=bar", "empty=");
+        Map<String, String> map = new LinkedHashMap<String, String>();
+        map.put("a", "1");
+        map.put("foo", "bar");
+        map.put("empty", "");
+
+        Assert.assertEquals(JnaSession.collectionToMap(list), map);
+        Assert.assertEquals(JnaSession.mapToCollection(map), list);
+    }
+
+    @Test
+    public void testLimitConversions() {
+        Assert.assertEquals(JnaSession.formatLimit(0), "0:00:00");
+        Assert.assertEquals(JnaSession.formatLimit(59), "0:00:59");
+        Assert.assertEquals(JnaSession.formatLimit(60), "0:01:00");
+        Assert.assertEquals(JnaSession.formatLimit(3540), "0:59:00");
+        Assert.assertEquals(JnaSession.formatLimit(3599), "0:59:59");
+        Assert.assertEquals(JnaSession.formatLimit(7200), "2:00:00");
+        Assert.assertEquals(JnaSession.formatLimit(7260), "2:01:00");
+        Assert.assertEquals(JnaSession.formatLimit(7261), "2:01:01");
+
+        Assert.assertEquals(JnaSession.parseLimit("0"), 0);
+        Assert.assertEquals(JnaSession.parseLimit("00"), 0);
+        Assert.assertEquals(JnaSession.parseLimit("0:00"), 0);
+        Assert.assertEquals(JnaSession.parseLimit("00:00"), 0);
+        Assert.assertEquals(JnaSession.parseLimit("0:00:00"), 0);
+
+        Assert.assertEquals(JnaSession.parseLimit("1"), 1);
+        Assert.assertEquals(JnaSession.parseLimit("01"), 1);
+        Assert.assertEquals(JnaSession.parseLimit("0:01"), 1);
+        Assert.assertEquals(JnaSession.parseLimit("00:01"), 1);
+        Assert.assertEquals(JnaSession.parseLimit("0:00:01"), 1);
+
+        Assert.assertEquals(JnaSession.parseLimit("10"), 10);
+        Assert.assertEquals(JnaSession.parseLimit("0:10"), 10);
+        Assert.assertEquals(JnaSession.parseLimit("00:10"), 10);
+        Assert.assertEquals(JnaSession.parseLimit("0:00:10"), 10);
+
+        Assert.assertEquals(JnaSession.parseLimit("1:0"), 60);
+        Assert.assertEquals(JnaSession.parseLimit("1:00"), 60);
+        Assert.assertEquals(JnaSession.parseLimit("01:00"), 60);
+        Assert.assertEquals(JnaSession.parseLimit("0:01:00"), 60);
+
+        Assert.assertEquals(JnaSession.parseLimit("1:00:00"), 3600);
+
+        Assert.assertEquals(JnaSession.parseLimit("1:02:03"), 3723);
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/LibDrmaaQueueTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/LibDrmaaQueueTest.java
new file mode 100644
index 0000000..ecc0f9a
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/jna/drmaa/v1_0/LibDrmaaQueueTest.java
@@ -0,0 +1,257 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.jna.drmaa.v1_0;
+
+import com.sun.jna.Memory;
+import com.sun.jna.NativeLong;
+import com.sun.jna.Pointer;
+import com.sun.jna.StringArray;
+import com.sun.jna.ptr.IntByReference;
+import com.sun.jna.ptr.PointerByReference;
+import org.apache.commons.io.FileUtils;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.testng.Assert;
+import org.testng.SkipException;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.util.Arrays;
+import java.util.List;
+
+public class LibDrmaaQueueTest extends BaseTest {
+    private String implementation = null;
+
+    @Test
+    public void testDrmaa() throws Exception {
+        Memory error = new Memory(LibDrmaa.DRMAA_ERROR_STRING_BUFFER);
+        int errnum;
+
+        IntByReference major = new IntByReference();
+        IntByReference minor = new IntByReference();
+        Memory contact = new Memory(LibDrmaa.DRMAA_CONTACT_BUFFER);
+        Memory drmSystem = new Memory(LibDrmaa.DRMAA_DRM_SYSTEM_BUFFER);
+        Memory drmaaImplementation = new Memory(LibDrmaa.DRMAA_DRMAA_IMPLEMENTATION_BUFFER);
+
+        errnum = LibDrmaa.drmaa_version(major, minor, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
+
+        if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
+            Assert.fail(String.format("Could not get version from the DRMAA library: %s", error.getString(0)));
+
+        System.out.println(String.format("DRMAA version: %d.%d", major.getValue(), minor.getValue()));
+
+        errnum = LibDrmaa.drmaa_get_contact(contact, LibDrmaa.DRMAA_CONTACT_BUFFER_LEN, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
+
+        if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
+            Assert.fail(String.format("Could not get contacts from the DRMAA library: %s", error.getString(0)));
+
+        System.out.println(String.format("DRMAA contact(s): %s", contact.getString(0)));
+
+        errnum = LibDrmaa.drmaa_get_DRM_system(drmSystem, LibDrmaa.DRMAA_DRM_SYSTEM_BUFFER_LEN, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
+
+        if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
+            Assert.fail(String.format("Could not get DRM system from the DRMAA library: %s", error.getString(0)));
+
+        System.out.println(String.format("DRM system(s): %s", drmSystem.getString(0)));
+
+        errnum = LibDrmaa.drmaa_get_DRMAA_implementation(drmaaImplementation, LibDrmaa.DRMAA_DRMAA_IMPLEMENTATION_BUFFER_LEN, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
+
+        if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
+            Assert.fail(String.format("Could not get DRMAA implementation from the DRMAA library: %s", error.getString(0)));
+
+        System.out.println(String.format("DRMAA implementation(s): %s", drmaaImplementation.getString(0)));
+
+        this.implementation = drmaaImplementation.getString(0);
+    }
+
+    @Test(dependsOnMethods = { "testDrmaa" })
+    public void testSubmitEcho() throws Exception {
+        if ( ! queueTestRunModeIsSet ) {
+            throw new SkipException("Skipping testSubmitEcho because we are in pipeline test dry run mode");
+        }
+
+        if (implementation.contains("LSF")) {
+            System.err.println("    *********************************************************");
+            System.err.println("   ***********************************************************");
+            System.err.println("   ****                                                   ****");
+            System.err.println("  ****  Skipping LibDrmaaQueueTest.testSubmitEcho()        ****");
+            System.err.println("  ****     Are you using the dotkit .combined_LSF_SGE?     ****");
+            System.err.println("   ****                                                   ****");
+            System.err.println("   ***********************************************************");
+            System.err.println("    *********************************************************");
+            throw new SkipException("Skipping testSubmitEcho because correct DRMAA implementation not found");
+        }
+
+        Memory error = new Memory(LibDrmaa.DRMAA_ERROR_STRING_BUFFER);
+        int errnum;
+
+    File outFile = tryCreateNetworkTempFile("LibDrmaaQueueTest.out");
+
+        errnum = LibDrmaa.drmaa_init(null, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
+
+        if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
+            Assert.fail(String.format("Could not initialize the DRMAA library: %s", error.getString(0)));
+
+        try {
+            PointerByReference jtRef = new PointerByReference();
+            Pointer jt;
+            Memory jobIdMem = new Memory(LibDrmaa.DRMAA_JOBNAME_BUFFER);
+            String jobId;
+            IntByReference remotePs = new IntByReference();
+            IntByReference stat = new IntByReference();
+            PointerByReference rusage = new PointerByReference();
+
+            errnum = LibDrmaa.drmaa_allocate_job_template(jtRef, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
+
+            if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
+                Assert.fail(String.format("Could not create job template: %s", error.getString(0)));
+
+            jt = jtRef.getValue();
+
+            errnum = LibDrmaa.drmaa_set_attribute(jt, LibDrmaa.DRMAA_REMOTE_COMMAND, "sh", error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
+
+            if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
+                Assert.fail(String.format("Could not set attribute \"%s\": %s", LibDrmaa.DRMAA_REMOTE_COMMAND, error.getString(0)));
+
+            errnum = LibDrmaa.drmaa_set_attribute(jt, LibDrmaa.DRMAA_OUTPUT_PATH, ":" + outFile.getAbsolutePath(), error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
+
+            if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
+                Assert.fail(String.format("Could not set attribute \"%s\": %s", LibDrmaa.DRMAA_OUTPUT_PATH, error.getString(0)));
+
+            errnum = LibDrmaa.drmaa_set_attribute(jt, LibDrmaa.DRMAA_JOIN_FILES, "y", error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
+
+            if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
+                Assert.fail(String.format("Could not set attribute \"%s\": %s", LibDrmaa.DRMAA_JOIN_FILES, error.getString(0)));
+
+            StringArray args = new StringArray(new String[] { "-c", "echo \"Hello world.\"" });
+
+            errnum = LibDrmaa.drmaa_set_vector_attribute(jt, LibDrmaa.DRMAA_V_ARGV, args, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
+
+            if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
+                Assert.fail(String.format("Could not set attribute \"%s\": %s", LibDrmaa.DRMAA_V_ARGV, error.getString(0)));
+
+            errnum = LibDrmaa.drmaa_run_job(jobIdMem, LibDrmaa.DRMAA_JOBNAME_BUFFER_LEN, jt, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
+
+            if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
+                Assert.fail(String.format("Could not submit job: %s", error.getString(0)));
+
+            jobId = jobIdMem.getString(0);
+
+            System.out.println(String.format("Job id %s", jobId));
+
+            errnum = LibDrmaa.drmaa_delete_job_template(jt, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
+
+            if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
+                Assert.fail(String.format("Could not delete job template: %s", error.getString(0)));
+
+            System.out.println("Waiting for job to run: " + jobId);
+            remotePs.setValue(LibDrmaa.DRMAA_PS.DRMAA_PS_QUEUED_ACTIVE);
+
+            List<Integer> runningStatuses = Arrays.asList(
+                    LibDrmaa.DRMAA_PS.DRMAA_PS_QUEUED_ACTIVE, LibDrmaa.DRMAA_PS.DRMAA_PS_RUNNING);
+
+            while (runningStatuses.contains(remotePs.getValue())) {
+                Thread.sleep(30 * 1000L);
+
+                errnum = LibDrmaa.drmaa_job_ps(jobId, remotePs, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
+
+                if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
+                    Assert.fail(String.format("Could not get status for jobId %s: %s", jobId, error.getString(0)));
+            }
+
+            Assert.assertEquals(remotePs.getValue(), LibDrmaa.DRMAA_PS.DRMAA_PS_DONE, "Job status is not DONE.");
+
+            errnum = LibDrmaa.drmaa_wait(jobId, Pointer.NULL, new NativeLong(0), stat, LibDrmaa.DRMAA_TIMEOUT_NO_WAIT,
+                    rusage, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
+
+            if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
+                Assert.fail(String.format("Wait failed for jobId %s: %s", jobId, error.getString(0)));
+
+            IntByReference exited = new IntByReference();
+            IntByReference exitStatus = new IntByReference();
+            IntByReference signaled = new IntByReference();
+            Memory signal = new Memory(LibDrmaa.DRMAA_SIGNAL_BUFFER);
+            IntByReference coreDumped = new IntByReference();
+            IntByReference aborted = new IntByReference();
+
+            errnum = LibDrmaa.drmaa_wifexited(exited, stat.getValue(), error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
+
+            if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
+                Assert.fail(String.format("Exit check failed for jobId %s: %s", jobId, error.getString(0)));
+
+            Assert.assertTrue(exited.getValue() != 0, String.format("Job did not exit cleanly: %s", jobId));
+
+            errnum = LibDrmaa.drmaa_wexitstatus(exitStatus, stat.getValue(), error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
+
+            if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
+                Assert.fail(String.format("Exit status failed for jobId %s: %s", jobId, error.getString(0)));
+
+            Assert.assertEquals(exitStatus.getValue(), 0, String.format("Exit status for jobId %s is non-zero", jobId));
+
+            errnum = LibDrmaa.drmaa_wifsignaled(signaled, stat.getValue(), error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
+
+            if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
+                Assert.fail(String.format("Signaled check failed for jobId %s: %s", jobId, error.getString(0)));
+
+            if (signaled.getValue() != 0) {
+                errnum = LibDrmaa.drmaa_wtermsig(signal, LibDrmaa.DRMAA_SIGNAL_BUFFER_LEN, stat.getValue(), error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
+
+                if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
+                    Assert.fail(String.format("Signal lookup failed for jobId %s: %s", jobId, error.getString(0)));
+
+                errnum = LibDrmaa.drmaa_wcoredump(coreDumped, stat.getValue(), error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
+
+                if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
+                    Assert.fail(String.format("Core dump check failed for jobId %s: %s", jobId, error.getString(0)));
+
+                Assert.fail(String.format("JobId %s exited with signal %s and core dump flag %d", jobId, signal.getString(0), coreDumped.getValue()));
+            }
+
+            errnum = LibDrmaa.drmaa_wifaborted(aborted, stat.getValue(), error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
+
+            if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
+                Assert.fail(String.format("Aborted check failed for jobId %s: %s", jobId, error.getString(0)));
+
+            Assert.assertTrue(aborted.getValue() == 0, String.format("Job was aborted: %s", jobId));
+
+        } finally {
+            if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS) {
+                LibDrmaa.drmaa_exit(error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
+            } else {
+                errnum = LibDrmaa.drmaa_exit(error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
+
+                if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
+                    Assert.fail(String.format("Could not shut down the DRMAA library: %s", error.getString(0)));
+            }
+        }
+
+        Assert.assertTrue(FileUtils.waitFor(outFile, 120), "File not found: " + outFile.getAbsolutePath());
+        System.out.println("--- output ---");
+        System.out.println(FileUtils.readFileToString(outFile));
+        System.out.println("--- output ---");
+        Assert.assertTrue(outFile.delete(), "Unable to delete " + outFile.getAbsolutePath());
+        System.out.println("Validating that we reached the end of the test without exit.");
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/jna/lsf/v7_0_6/LibBatQueueTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/jna/lsf/v7_0_6/LibBatQueueTest.java
new file mode 100644
index 0000000..29b7ad0
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/jna/lsf/v7_0_6/LibBatQueueTest.java
@@ -0,0 +1,162 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.jna.lsf.v7_0_6;
+
+import com.sun.jna.*;
+import com.sun.jna.ptr.IntByReference;
+import org.apache.commons.io.FileUtils;
+import org.broadinstitute.gatk.utils.Utils;
+import org.testng.Assert;
+import org.testng.SkipException;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.jna.lsf.v7_0_6.LibBat.*;
+
+import java.io.File;
+
+/**
+ * Really unit tests, but these tests will only run on systems with LSF set up.
+ */
+public class LibBatQueueTest extends BaseTest {
+    @BeforeClass(enabled=false)
+    public void initLibBat() {
+        Assert.assertFalse(LibBat.lsb_init("LibBatQueueTest") < 0, LibBat.lsb_sperror("lsb_init() failed"));
+    }
+
+    @Test(enabled=false)
+    public void testClusterName() {
+        String clusterName = LibLsf.ls_getclustername();
+        System.out.println("Cluster name: " + clusterName);
+        Assert.assertNotNull(clusterName);
+    }
+
+    @Test(enabled=false)
+    public void testReadConfEnv() {
+        LibLsf.config_param[] configParams = (LibLsf.config_param[]) new LibLsf.config_param().toArray(4);
+
+        configParams[0].paramName = "LSF_UNIT_FOR_LIMITS";
+        configParams[1].paramName = "LSF_CONFDIR";
+        configParams[2].paramName = "MADE_UP_PARAMETER";
+
+        Structure.autoWrite(configParams);
+
+        if (LibLsf.ls_readconfenv(configParams[0], null) != 0) {
+            Assert.fail(LibLsf.ls_sysmsg());
+        }
+
+        Structure.autoRead(configParams);
+
+        System.out.println("LSF_UNIT_FOR_LIMITS: " + configParams[0].paramValue);
+        Assert.assertNotNull(configParams[1].paramValue);
+        Assert.assertNull(configParams[2].paramValue);
+        Assert.assertNull(configParams[3].paramName);
+        Assert.assertNull(configParams[3].paramValue);
+    }
+
+    @Test(enabled=false)
+    public void testReadQueueLimits() {
+        String queue = "hour";
+        StringArray queues = new StringArray(new String[] {queue});
+        IntByReference numQueues = new IntByReference(1);
+        queueInfoEnt queueInfo = LibBat.lsb_queueinfo(queues, numQueues, null, null, 0);
+
+        Assert.assertEquals(numQueues.getValue(), 1);
+        Assert.assertNotNull(queueInfo);
+        Assert.assertEquals(queueInfo.queue, queue);
+
+        int runLimit = queueInfo.rLimits[LibLsf.LSF_RLIMIT_RUN];
+        Assert.assertTrue(runLimit > 0, "LSF run limit is not greater than zero: " + runLimit);
+    }
+
+    @Test(enabled=false)
+    public void testSubmitEcho() throws Exception {
+        if ( ! queueTestRunModeIsSet ) {
+            throw new SkipException("Skipping testSubmitEcho because we are in queue test dry run mode");
+        }
+
+        String queue = "hour";
+        File outFile = tryCreateNetworkTempFile("LibBatQueueTest.out");
+
+        submit req = new submit();
+
+        for (int i = 0; i < LibLsf.LSF_RLIM_NLIMITS; i++)
+            req.rLimits[i] = LibLsf.DEFAULT_RLIMIT;
+
+        req.projectName = "LibBatQueueTest";
+        req.options |= LibBat.SUB_PROJECT_NAME;
+
+        req.queue = queue;
+        req.options |= LibBat.SUB_QUEUE;
+
+        req.outFile = outFile.getPath();
+        req.options |= LibBat.SUB_OUT_FILE;
+
+        req.userPriority = 100;
+        req.options2 |= LibBat.SUB2_JOB_PRIORITY;
+
+        req.command = "echo \"Hello world.\"";
+
+        String[] argv = {"", "-a", "tv"};
+        int setOptionResult = LibBat.setOption_(argv.length, new StringArray(argv), "a:", req, ~0, ~0, ~0, null);
+        Assert.assertTrue(setOptionResult != -1, "setOption_ returned -1");
+
+        submitReply reply = new submitReply();
+        long jobId = LibBat.lsb_submit(req, reply);
+
+        Assert.assertFalse(jobId < 0, LibBat.lsb_sperror("Error dispatching"));
+
+        System.out.println("Waiting for job to run: " + jobId);
+        int jobStatus = LibBat.JOB_STAT_PEND;
+        while (Utils.isFlagSet(jobStatus, LibBat.JOB_STAT_PEND) || Utils.isFlagSet(jobStatus, LibBat.JOB_STAT_RUN)) {
+            Thread.sleep(30 * 1000L);
+
+            int numJobs = LibBat.lsb_openjobinfo(jobId, null, null, null, null, LibBat.ALL_JOB);
+            try {
+                Assert.assertEquals(numJobs, 1);
+    
+                IntByReference more = new IntByReference();
+
+                jobInfoEnt jobInfo = LibBat.lsb_readjobinfo(more);
+                Assert.assertNotNull(jobInfo, "Job info is null");
+                Assert.assertEquals(more.getValue(), 0, "More job info results than expected");
+
+                jobStatus = jobInfo.status;
+            } finally {
+                LibBat.lsb_closejobinfo();
+            }
+        }
+        Assert.assertTrue(Utils.isFlagSet(jobStatus, LibBat.JOB_STAT_DONE), String.format("Unexpected job status: 0x%02x", jobStatus));
+
+        Assert.assertTrue(FileUtils.waitFor(outFile, 120), "File not found: " + outFile.getAbsolutePath());
+        System.out.println("--- output ---");
+        System.out.println(FileUtils.readFileToString(outFile));
+        System.out.println("--- output ---");
+        Assert.assertTrue(outFile.delete(), "Unable to delete " + outFile.getAbsolutePath());
+        Assert.assertEquals(reply.queue, req.queue, "LSF reply queue does not match requested queue.");
+        System.out.println("Validating that we reached the end of the test without exit.");
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/locusiterator/AlignmentStateMachineUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/locusiterator/AlignmentStateMachineUnitTest.java
new file mode 100644
index 0000000..df20a7e
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/locusiterator/AlignmentStateMachineUnitTest.java
@@ -0,0 +1,116 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.locusiterator;
+
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.Arrays;
+
+/**
+ * testing of the new (non-legacy) version of LocusIteratorByState
+ */
+public class AlignmentStateMachineUnitTest extends LocusIteratorByStateBaseTest {
+    @DataProvider(name = "AlignmentStateMachineTest")
+    public Object[][] makeAlignmentStateMachineTest() {
+//        return new Object[][]{{new LIBSTest("2M2D2X", 2)}};
+//        return createLIBSTests(
+//                Arrays.asList(2),
+//                Arrays.asList(2));
+        return createLIBSTests(
+                Arrays.asList(1, 2),
+                Arrays.asList(1, 2, 3, 4));
+    }
+
+    @Test(dataProvider = "AlignmentStateMachineTest")
+    public void testAlignmentStateMachineTest(LIBSTest params) {
+        final GATKSAMRecord read = params.makeRead();
+        final AlignmentStateMachine state = new AlignmentStateMachine(read);
+        final LIBS_position tester = new LIBS_position(read);
+
+        // min is one because always visit something, even for 10I reads
+        final int expectedBpToVisit = read.getAlignmentEnd() - read.getAlignmentStart() + 1;
+
+        Assert.assertSame(state.getRead(), read);
+        Assert.assertNotNull(state.toString());
+
+        int bpVisited = 0;
+        int lastOffset = -1;
+
+        // TODO -- more tests about test state machine state before first step?
+        Assert.assertTrue(state.isLeftEdge());
+        try {
+            state.makePileupElement();
+            Assert.fail("makePileupElement should've thrown an exception");
+        } catch (IllegalStateException e) {
+            Assert.assertTrue(e.getMessage().indexOf(state.MAKE_PILEUP_EDGE_ERROR) != -1);
+        }
+        Assert.assertNull(state.getCigarOperator());
+        Assert.assertNotNull(state.toString());
+        Assert.assertEquals(state.getReadOffset(), -1);
+        Assert.assertEquals(state.getGenomeOffset(), -1);
+        Assert.assertEquals(state.getCurrentCigarElementOffset(), -1);
+        Assert.assertEquals(state.getCurrentCigarElement(), null);
+
+        while ( state.stepForwardOnGenome() != null ) {
+            Assert.assertNotNull(state.toString());
+
+            tester.stepForwardOnGenome();
+
+            Assert.assertTrue(state.getReadOffset() >= lastOffset, "Somehow read offsets are decreasing: lastOffset " + lastOffset + " current " + state.getReadOffset());
+            Assert.assertEquals(state.getReadOffset(), tester.getCurrentReadOffset(), "Read offsets are wrong at " + bpVisited);
+
+            Assert.assertFalse(state.isLeftEdge());
+
+            Assert.assertEquals(state.getCurrentCigarElement(), read.getCigar().getCigarElement(tester.currentOperatorIndex), "CigarElement index failure");
+            Assert.assertEquals(state.getOffsetIntoCurrentCigarElement(), tester.getCurrentPositionOnOperatorBase0(), "CigarElement index failure");
+
+            Assert.assertEquals(read.getCigar().getCigarElement(state.getCurrentCigarElementOffset()), state.getCurrentCigarElement(), "Current cigar element isn't what we'd get from the read itself");
+
+            Assert.assertTrue(state.getOffsetIntoCurrentCigarElement() >= 0, "Offset into current cigar too small");
+            Assert.assertTrue(state.getOffsetIntoCurrentCigarElement() < state.getCurrentCigarElement().getLength(), "Offset into current cigar too big");
+
+            Assert.assertEquals(state.getGenomeOffset(), tester.getCurrentGenomeOffsetBase0(), "Offset from alignment start is bad");
+            Assert.assertEquals(state.getGenomePosition(), tester.getCurrentGenomeOffsetBase0() + read.getAlignmentStart(), "GenomePosition start is bad");
+            Assert.assertEquals(state.getLocation(genomeLocParser).size(), 1, "GenomeLoc position should have size == 1");
+            Assert.assertEquals(state.getLocation(genomeLocParser).getStart(), state.getGenomePosition(), "GenomeLoc position is bad");
+
+            // most tests of this functionality are in LIBS
+            Assert.assertNotNull(state.makePileupElement());
+
+            lastOffset = state.getReadOffset();
+            bpVisited++;
+        }
+
+        Assert.assertEquals(bpVisited, expectedBpToVisit, "Didn't visit the expected number of bp");
+        Assert.assertEquals(state.getReadOffset(), read.getReadLength());
+        Assert.assertEquals(state.getCurrentCigarElementOffset(), read.getCigarLength());
+        Assert.assertEquals(state.getCurrentCigarElement(), null);
+        Assert.assertNotNull(state.toString());
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/locusiterator/LIBS_position.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/locusiterator/LIBS_position.java
new file mode 100644
index 0000000..60bbe6a
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/locusiterator/LIBS_position.java
@@ -0,0 +1,155 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.locusiterator;
+
+import htsjdk.samtools.Cigar;
+import htsjdk.samtools.CigarElement;
+import htsjdk.samtools.CigarOperator;
+import htsjdk.samtools.SAMRecord;
+
+/**
+* Created with IntelliJ IDEA.
+* User: depristo
+* Date: 1/5/13
+* Time: 8:42 PM
+* To change this template use File | Settings | File Templates.
+*/
+public final class LIBS_position {
+
+    SAMRecord read;
+
+    final int numOperators;
+    int currentOperatorIndex = 0;
+    int currentPositionOnOperator = 0;
+    int currentReadOffset = 0;
+    int currentGenomeOffset = 0;
+
+    public boolean isBeforeDeletionStart = false;
+    public boolean isBeforeDeletedBase = false;
+    public boolean isAfterDeletionEnd = false;
+    public boolean isAfterDeletedBase = false;
+    public boolean isBeforeInsertion = false;
+    public boolean isAfterInsertion = false;
+    public boolean isNextToSoftClip = false;
+
+    boolean sawMop = false;
+
+    public LIBS_position(final SAMRecord read) {
+        this.read = read;
+        numOperators = read.getCigar().numCigarElements();
+    }
+
+    public int getCurrentReadOffset() {
+        return Math.max(0, currentReadOffset - 1);
+    }
+
+    public int getCurrentPositionOnOperatorBase0() {
+        return currentPositionOnOperator - 1;
+    }
+
+    public int getCurrentGenomeOffsetBase0() {
+        return currentGenomeOffset - 1;
+    }
+
+    /**
+     * Steps forward on the genome.  Returns false when done reading the read, true otherwise.
+     */
+    public boolean stepForwardOnGenome() {
+        if ( currentOperatorIndex == numOperators )
+            return false;
+
+        CigarElement curElement = read.getCigar().getCigarElement(currentOperatorIndex);
+        if ( currentPositionOnOperator >= curElement.getLength() ) {
+            if ( ++currentOperatorIndex == numOperators )
+                return false;
+
+            curElement = read.getCigar().getCigarElement(currentOperatorIndex);
+            currentPositionOnOperator = 0;
+        }
+
+        switch ( curElement.getOperator() ) {
+            case I: // insertion w.r.t. the reference
+//                if ( !sawMop )
+//                    break;
+            case S: // soft clip
+                currentReadOffset += curElement.getLength();
+            case H: // hard clip
+            case P: // padding
+                currentOperatorIndex++;
+                return stepForwardOnGenome();
+
+            case D: // deletion w.r.t. the reference
+            case N: // reference skip (looks and gets processed just like a "deletion", just different logical meaning)
+                currentPositionOnOperator++;
+                currentGenomeOffset++;
+                break;
+
+            case M:
+            case EQ:
+            case X:
+                sawMop = true;
+                currentReadOffset++;
+                currentPositionOnOperator++;
+                currentGenomeOffset++;
+                break;
+            default:
+                throw new IllegalStateException("No support for cigar op: " + curElement.getOperator());
+        }
+
+        final boolean isFirstOp = currentOperatorIndex == 0;
+        final boolean isLastOp = currentOperatorIndex == numOperators - 1;
+        final boolean isFirstBaseOfOp = currentPositionOnOperator == 1;
+        final boolean isLastBaseOfOp = currentPositionOnOperator == curElement.getLength();
+
+        isBeforeDeletionStart = isBeforeOp(read.getCigar(), currentOperatorIndex, CigarOperator.D, isLastOp, isLastBaseOfOp);
+        isBeforeDeletedBase = isBeforeDeletionStart || (!isLastBaseOfOp && curElement.getOperator() == CigarOperator.D);
+        isAfterDeletionEnd = isAfterOp(read.getCigar(), currentOperatorIndex, CigarOperator.D, isFirstOp, isFirstBaseOfOp);
+        isAfterDeletedBase  = isAfterDeletionEnd || (!isFirstBaseOfOp && curElement.getOperator() == CigarOperator.D);
+        isBeforeInsertion = isBeforeOp(read.getCigar(), currentOperatorIndex, CigarOperator.I, isLastOp, isLastBaseOfOp)
+                || (!sawMop && curElement.getOperator() == CigarOperator.I);
+        isAfterInsertion = isAfterOp(read.getCigar(), currentOperatorIndex, CigarOperator.I, isFirstOp, isFirstBaseOfOp);
+        isNextToSoftClip = isBeforeOp(read.getCigar(), currentOperatorIndex, CigarOperator.S, isLastOp, isLastBaseOfOp)
+                || isAfterOp(read.getCigar(), currentOperatorIndex, CigarOperator.S, isFirstOp, isFirstBaseOfOp);
+
+        return true;
+    }
+
+    private static boolean isBeforeOp(final Cigar cigar,
+                                      final int currentOperatorIndex,
+                                      final CigarOperator op,
+                                      final boolean isLastOp,
+                                      final boolean isLastBaseOfOp) {
+        return  !isLastOp && isLastBaseOfOp && cigar.getCigarElement(currentOperatorIndex+1).getOperator() == op;
+    }
+
+    private static boolean isAfterOp(final Cigar cigar,
+                                     final int currentOperatorIndex,
+                                     final CigarOperator op,
+                                     final boolean isFirstOp,
+                                     final boolean isFirstBaseOfOp) {
+        return  !isFirstOp && isFirstBaseOfOp && cigar.getCigarElement(currentOperatorIndex-1).getOperator() == op;
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/locusiterator/LocusIteratorBenchmark.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/locusiterator/LocusIteratorBenchmark.java
new file mode 100644
index 0000000..ea7165b
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/locusiterator/LocusIteratorBenchmark.java
@@ -0,0 +1,142 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.locusiterator;
+
+import com.google.caliper.Param;
+import com.google.caliper.SimpleBenchmark;
+import htsjdk.samtools.SAMFileHeader;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.QualityUtils;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * Caliper microbenchmark of fragment pileup
+ */
+public class LocusIteratorBenchmark extends SimpleBenchmark {
+    protected SAMFileHeader header;
+    protected GenomeLocParser genomeLocParser;
+
+    List<GATKSAMRecord> reads = new LinkedList<GATKSAMRecord>();
+    final int readLength = 101;
+    final int nReads = 10000;
+    final int locus = 1;
+
+    @Param({"101M", "50M10I40M", "50M10D40M"})
+    String cigar; // set automatically by framework
+
+    @Override protected void setUp() {
+        header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000);
+        genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
+
+        for ( int j = 0; j < nReads; j++ ) {
+            GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, locus, readLength);
+            read.setReadBases(Utils.dupBytes((byte) 'A', readLength));
+            final byte[] quals = new byte[readLength];
+            for ( int i = 0; i < readLength; i++ )
+                quals[i] = (byte)(i % QualityUtils.MAX_SAM_QUAL_SCORE);
+            read.setBaseQualities(quals);
+            read.setCigarString(cigar);
+            reads.add(read);
+        }
+    }
+
+//    public void timeOriginalLIBS(int rep) {
+//        for ( int i = 0; i < rep; i++ ) {
+//            final org.broadinstitute.gatk.utils.locusiterator.old.LocusIteratorByState libs =
+//                    new org.broadinstitute.gatk.utils.locusiterator.old.LocusIteratorByState(
+//                            new LocusIteratorByStateBaseTest.FakeCloseableIterator<SAMRecord>(reads.iterator()),
+//                            LocusIteratorByStateBaseTest.createTestReadProperties(),
+//                            genomeLocParser,
+//                            LocusIteratorByState.sampleListForSAMWithoutReadGroups());
+//
+//            while ( libs.hasNext() ) {
+//                AlignmentContext context = libs.next();
+//            }
+//        }
+//    }
+//
+//    public void timeLegacyLIBS(int rep) {
+//        for ( int i = 0; i < rep; i++ ) {
+//            final org.broadinstitute.gatk.utils.locusiterator.legacy.LegacyLocusIteratorByState libs =
+//                    new org.broadinstitute.gatk.utils.locusiterator.legacy.LegacyLocusIteratorByState(
+//                            new LocusIteratorByStateBaseTest.FakeCloseableIterator<SAMRecord>(reads.iterator()),
+//                            LocusIteratorByStateBaseTest.createTestReadProperties(),
+//                            genomeLocParser,
+//                            LocusIteratorByState.sampleListForSAMWithoutReadGroups());
+//
+//            while ( libs.hasNext() ) {
+//                AlignmentContext context = libs.next();
+//            }
+//        }
+//    }
+
+    public void timeNewLIBS(int rep) {
+        for ( int i = 0; i < rep; i++ ) {
+            final org.broadinstitute.gatk.utils.locusiterator.LocusIteratorByState libs =
+                    new org.broadinstitute.gatk.utils.locusiterator.LocusIteratorByState(
+                            new LocusIteratorByStateBaseTest.FakeCloseableIterator<GATKSAMRecord>(reads.iterator()),
+                            null, true, false,
+                            genomeLocParser,
+                            LocusIteratorByState.sampleListForSAMWithoutReadGroups());
+
+            while ( libs.hasNext() ) {
+                AlignmentContext context = libs.next();
+            }
+        }
+    }
+
+//    public void timeOriginalLIBSStateMachine(int rep) {
+//        for ( int i = 0; i < rep; i++ ) {
+//            for ( final SAMRecord read : reads ) {
+//                final SAMRecordAlignmentState alignmentStateMachine = new SAMRecordAlignmentState(read);
+//                while ( alignmentStateMachine.stepForwardOnGenome() != null ) {
+//                    alignmentStateMachine.getGenomeOffset();
+//                }
+//            }
+//        }
+//    }
+
+    public void timeAlignmentStateMachine(int rep) {
+        for ( int i = 0; i < rep; i++ ) {
+            for ( final GATKSAMRecord read : reads ) {
+                final AlignmentStateMachine alignmentStateMachine = new AlignmentStateMachine(read);
+                while ( alignmentStateMachine.stepForwardOnGenome() != null ) {
+                    ;
+                }
+            }
+        }
+    }
+
+    public static void main(String[] args) {
+        com.google.caliper.Runner.main(LocusIteratorBenchmark.class, args);
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/locusiterator/LocusIteratorByStateBaseTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/locusiterator/LocusIteratorByStateBaseTest.java
new file mode 100644
index 0000000..e390457
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/locusiterator/LocusIteratorByStateBaseTest.java
@@ -0,0 +1,232 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.locusiterator;
+
+import htsjdk.samtools.*;
+import htsjdk.samtools.util.CloseableIterator;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.downsampling.DownsamplingMethod;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.QualityUtils;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
+
+import java.util.*;
+
+/**
+ * testing of the new (non-legacy) version of LocusIteratorByState
+ */
+public class LocusIteratorByStateBaseTest extends BaseTest {
+    protected static SAMFileHeader header;
+    protected GenomeLocParser genomeLocParser;
+
+    @BeforeClass
+    public void beforeClass() {
+        header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000);
+        genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
+    }
+
+    protected LocusIteratorByState makeLTBS(List<GATKSAMRecord> reads) {
+        return makeLTBS(reads, null, false);
+    }
+
+    protected LocusIteratorByState makeLTBS(final List<GATKSAMRecord> reads,
+                                            final DownsamplingMethod downsamplingMethod,
+                                            final boolean keepUniqueReadList) {
+        return new LocusIteratorByState(new FakeCloseableIterator<GATKSAMRecord>(reads.iterator()),
+                downsamplingMethod, true, keepUniqueReadList,
+                genomeLocParser,
+                LocusIteratorByState.sampleListForSAMWithoutReadGroups());
+    }
+
+    public static class FakeCloseableIterator<T> implements CloseableIterator<T> {
+        Iterator<T> iterator;
+
+        public FakeCloseableIterator(Iterator<T> it) {
+            iterator = it;
+        }
+
+        @Override
+        public void close() {}
+
+        @Override
+        public boolean hasNext() {
+            return iterator.hasNext();
+        }
+
+        @Override
+        public T next() {
+            return iterator.next();
+        }
+
+        @Override
+        public void remove() {
+            throw new UnsupportedOperationException("Don't remove!");
+        }
+    }
+
+    protected static class LIBSTest {
+        public static final int locus = 44367788;
+        final String cigarString;
+        final int readLength;
+        final private List<CigarElement> elements;
+
+        public LIBSTest(final String cigarString) {
+            final Cigar cigar = TextCigarCodec.decode(cigarString);
+            this.cigarString = cigarString;
+            this.elements = cigar.getCigarElements();
+            this.readLength = cigar.getReadLength();
+        }
+
+        @Override
+        public String toString() {
+            return "LIBSTest{" +
+                    "cigar='" + cigarString + '\'' +
+                    ", readLength=" + readLength +
+                    '}';
+        }
+
+        public List<CigarElement> getElements() {
+            return elements;
+        }
+
+        public GATKSAMRecord makeRead() {
+            GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, locus, readLength);
+            read.setReadBases(Utils.dupBytes((byte) 'A', readLength));
+            final byte[] quals = new byte[readLength];
+            for ( int i = 0; i < readLength; i++ )
+                quals[i] = (byte)(i % QualityUtils.MAX_SAM_QUAL_SCORE);
+            read.setBaseQualities(quals);
+            read.setCigarString(cigarString);
+            return read;
+        }
+    }
+
+    private boolean isIndel(final CigarElement ce) {
+        return ce.getOperator() == CigarOperator.D || ce.getOperator() == CigarOperator.I;
+    }
+
+    private boolean startsWithDeletion(final List<CigarElement> elements) {
+        for ( final CigarElement element : elements ) {
+            switch ( element.getOperator() ) {
+                case M:
+                case I:
+                case EQ:
+                case X:
+                    return false;
+                case D:
+                    return true;
+                default:
+                    // keep looking
+            }
+        }
+
+        return false;
+    }
+
+    private LIBSTest makePermutationTest(final List<CigarElement> elements) {
+        CigarElement last = null;
+        boolean hasMatch = false;
+
+        // starts with D => bad
+        if ( startsWithDeletion(elements) )
+            return null;
+
+        // ends with D => bad
+        if ( elements.get(elements.size()-1).getOperator() == CigarOperator.D )
+            return null;
+
+        // make sure it's valid
+        String cigar = "";
+        int len = 0;
+        for ( final CigarElement ce : elements ) {
+            if ( ce.getOperator() == CigarOperator.N )
+                return null; // TODO -- don't support N
+
+            // abort on a bad cigar
+            if ( last != null ) {
+                if ( ce.getOperator() == last.getOperator() )
+                    return null;
+                if ( isIndel(ce) && isIndel(last) )
+                    return null;
+            }
+
+            cigar += ce.getLength() + ce.getOperator().toString();
+            len += ce.getLength();
+            last = ce;
+            hasMatch = hasMatch || ce.getOperator() == CigarOperator.M;
+        }
+
+        if ( ! hasMatch && elements.size() == 1 &&
+                ! (last.getOperator() == CigarOperator.I || last.getOperator() == CigarOperator.S))
+            return null;
+
+        return new LIBSTest(cigar);
+    }
+
+    @DataProvider(name = "LIBSTest")
+    public Object[][] createLIBSTests(final List<Integer> cigarLengths, final List<Integer> combinations) {
+        final List<Object[]> tests = new LinkedList<Object[]>();
+
+        final List<CigarOperator> allOps = Arrays.asList(CigarOperator.values());
+
+        final List<CigarElement> singleCigars = new LinkedList<CigarElement>();
+        for ( final int len : cigarLengths )
+            for ( final CigarOperator op : allOps )
+                singleCigars.add(new CigarElement(len, op));
+
+        for ( final int complexity : combinations ) {
+            for ( final List<CigarElement> elements : Utils.makePermutations(singleCigars, complexity, true) ) {
+                final LIBSTest test = makePermutationTest(elements);
+                if ( test != null ) tests.add(new Object[]{test});
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    /**
+     * Work around inadequate tests that aren't worth fixing.
+     *
+     * Look at the CIGAR 2M2P2D2P2M.  Both M states border a deletion, separated by P (padding elements).  So
+     * the right answer for deletions here is true for isBeforeDeletion() and isAfterDeletion() for the first
+     * and second M.  But the LIBS_position doesn't say so.
+     *
+     * @param elements
+     * @return
+     */
+    protected static boolean hasNeighboringPaddedOps(final List<CigarElement> elements, final int elementI) {
+        return (elementI - 1 >= 0 && isPadding(elements.get(elementI-1))) ||
+                (elementI + 1 < elements.size() && isPadding(elements.get(elementI+1)));
+    }
+
+    private static boolean isPadding(final CigarElement elt) {
+        return elt.getOperator() == CigarOperator.P || elt.getOperator() == CigarOperator.H || elt.getOperator() == CigarOperator.S;
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/locusiterator/LocusIteratorByStateUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/locusiterator/LocusIteratorByStateUnitTest.java
new file mode 100644
index 0000000..78185d8
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/locusiterator/LocusIteratorByStateUnitTest.java
@@ -0,0 +1,743 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.locusiterator;
+
+import htsjdk.samtools.CigarOperator;
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMReadGroupRecord;
+import org.broadinstitute.gatk.utils.contexts.AlignmentContext;
+import org.broadinstitute.gatk.utils.downsampling.DownsampleType;
+import org.broadinstitute.gatk.utils.downsampling.DownsamplingMethod;
+import org.broadinstitute.gatk.utils.NGSPlatform;
+import org.broadinstitute.gatk.utils.QualityUtils;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.pileup.PileupElement;
+import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup;
+import org.broadinstitute.gatk.utils.sam.ArtificialBAMBuilder;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.broadinstitute.gatk.utils.sam.GATKSAMReadGroupRecord;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.*;
+
+/**
+ * testing of the new (non-legacy) version of LocusIteratorByState
+ */
+public class LocusIteratorByStateUnitTest extends LocusIteratorByStateBaseTest {
+    private static final boolean DEBUG = false;
+    protected LocusIteratorByState li;
+
+    @Test(enabled = !DEBUG)
+    public void testUnmappedAndAllIReadsPassThrough() {
+        final int readLength = 10;
+        GATKSAMRecord mapped1 = ArtificialSAMUtils.createArtificialRead(header,"mapped1",0,1,readLength);
+        GATKSAMRecord mapped2 = ArtificialSAMUtils.createArtificialRead(header,"mapped2",0,1,readLength);
+        GATKSAMRecord unmapped = ArtificialSAMUtils.createArtificialRead(header,"unmapped",0,1,readLength);
+        GATKSAMRecord allI = ArtificialSAMUtils.createArtificialRead(header,"allI",0,1,readLength);
+
+        unmapped.setReadUnmappedFlag(true);
+        unmapped.setCigarString("*");
+        allI.setCigarString(readLength + "I");
+
+        List<GATKSAMRecord> reads = Arrays.asList(mapped1, unmapped, allI, mapped2);
+
+        // create the iterator by state with the fake reads and fake records
+        li = makeLTBS(reads, DownsamplingMethod.NONE, true);
+
+        Assert.assertTrue(li.hasNext());
+        AlignmentContext context = li.next();
+        ReadBackedPileup pileup = context.getBasePileup();
+        Assert.assertEquals(pileup.depthOfCoverage(), 2, "Should see only 2 reads in pileup, even with unmapped and all I reads");
+
+        final List<GATKSAMRecord> rawReads = li.transferReadsFromAllPreviousPileups();
+        Assert.assertEquals(rawReads, reads, "Input and transferred read lists should be the same, and include the unmapped and all I reads");
+    }
+
+    @Test(enabled = true && ! DEBUG)
+    public void testXandEQOperators() {
+        final byte[] bases1 = new byte[] {'A','A','A','A','A','A','A','A','A','A'};
+        final byte[] bases2 = new byte[] {'A','A','A','C','A','A','A','A','A','C'};
+
+        GATKSAMRecord r1 = ArtificialSAMUtils.createArtificialRead(header,"r1",0,1,10);
+        r1.setReadBases(bases1);
+        r1.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20});
+        r1.setCigarString("10M");
+
+        GATKSAMRecord r2 = ArtificialSAMUtils.createArtificialRead(header,"r2",0,1,10);
+        r2.setReadBases(bases2);
+        r2.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20,20,20});
+        r2.setCigarString("3=1X5=1X");
+
+        GATKSAMRecord r3 = ArtificialSAMUtils.createArtificialRead(header,"r3",0,1,10);
+        r3.setReadBases(bases2);
+        r3.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20,20,20});
+        r3.setCigarString("3=1X5M1X");
+
+        GATKSAMRecord r4  = ArtificialSAMUtils.createArtificialRead(header,"r4",0,1,10);
+        r4.setReadBases(bases2);
+        r4.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20});
+        r4.setCigarString("10M");
+
+        List<GATKSAMRecord> reads = Arrays.asList(r1, r2, r3, r4);
+
+        // create the iterator by state with the fake reads and fake records
+        li = makeLTBS(reads);
+
+        while (li.hasNext()) {
+            AlignmentContext context = li.next();
+            ReadBackedPileup pileup = context.getBasePileup();
+            Assert.assertEquals(pileup.depthOfCoverage(), 4);
+        }
+    }
+
+    @Test(enabled = true && ! DEBUG)
+    public void testIndelsInRegularPileup() {
+        final byte[] bases = new byte[] {'A','A','A','A','A','A','A','A','A','A'};
+        final byte[] indelBases = new byte[] {'A','A','A','A','C','T','A','A','A','A','A','A'};
+
+        GATKSAMRecord before = ArtificialSAMUtils.createArtificialRead(header,"before",0,1,10);
+        before.setReadBases(bases);
+        before.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20});
+        before.setCigarString("10M");
+
+        GATKSAMRecord during = ArtificialSAMUtils.createArtificialRead(header,"during",0,2,10);
+        during.setReadBases(indelBases);
+        during.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20,20,20});
+        during.setCigarString("4M2I6M");
+
+        GATKSAMRecord after  = ArtificialSAMUtils.createArtificialRead(header,"after",0,3,10);
+        after.setReadBases(bases);
+        after.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20});
+        after.setCigarString("10M");
+
+        List<GATKSAMRecord> reads = Arrays.asList(before, during, after);
+
+        // create the iterator by state with the fake reads and fake records
+        li = makeLTBS(reads);
+
+        boolean foundIndel = false;
+        while (li.hasNext()) {
+            AlignmentContext context = li.next();
+            ReadBackedPileup pileup = context.getBasePileup().getBaseFilteredPileup(10);
+            for (PileupElement p : pileup) {
+                if (p.isBeforeInsertion()) {
+                    foundIndel = true;
+                    Assert.assertEquals(p.getLengthOfImmediatelyFollowingIndel(), 2, "Wrong event length");
+                    Assert.assertEquals(p.getBasesOfImmediatelyFollowingInsertion(), "CT", "Inserted bases are incorrect");
+                    break;
+               }
+            }
+
+         }
+
+         Assert.assertTrue(foundIndel,"Indel in pileup not found");
+    }
+
+    @Test(enabled = false && ! DEBUG)
+    public void testWholeIndelReadInIsolation() {
+        final int firstLocus = 44367789;
+
+        GATKSAMRecord indelOnlyRead = ArtificialSAMUtils.createArtificialRead(header, "indelOnly", 0, firstLocus, 76);
+        indelOnlyRead.setReadBases(Utils.dupBytes((byte)'A',76));
+        indelOnlyRead.setBaseQualities(Utils.dupBytes((byte) '@', 76));
+        indelOnlyRead.setCigarString("76I");
+
+        List<GATKSAMRecord> reads = Arrays.asList(indelOnlyRead);
+
+        // create the iterator by state with the fake reads and fake records
+        li = makeLTBS(reads);
+
+        // Traditionally, reads that end with indels bleed into the pileup at the following locus.  Verify that the next pileup contains this read
+        // and considers it to be an indel-containing read.
+        Assert.assertTrue(li.hasNext(),"Should have found a whole-indel read in the normal base pileup without extended events enabled");
+        AlignmentContext alignmentContext = li.next();
+        Assert.assertEquals(alignmentContext.getLocation().getStart(), firstLocus, "Base pileup is at incorrect location.");
+        ReadBackedPileup basePileup = alignmentContext.getBasePileup();
+        Assert.assertEquals(basePileup.getReads().size(),1,"Pileup is of incorrect size");
+        Assert.assertSame(basePileup.getReads().get(0), indelOnlyRead, "Read in pileup is incorrect");
+    }
+
+    /**
+     * Test to make sure that reads supporting only an indel (example cigar string: 76I) do
+     * not negatively influence the ordering of the pileup.
+     */
+    @Test(enabled = true && ! DEBUG)
+    public void testWholeIndelRead() {
+        final int firstLocus = 44367788, secondLocus = firstLocus + 1;
+
+        GATKSAMRecord leadingRead = ArtificialSAMUtils.createArtificialRead(header,"leading",0,firstLocus,76);
+        leadingRead.setReadBases(Utils.dupBytes((byte)'A',76));
+        leadingRead.setBaseQualities(Utils.dupBytes((byte)'@',76));
+        leadingRead.setCigarString("1M75I");
+
+        GATKSAMRecord indelOnlyRead = ArtificialSAMUtils.createArtificialRead(header,"indelOnly",0,secondLocus,76);
+        indelOnlyRead.setReadBases(Utils.dupBytes((byte) 'A', 76));
+        indelOnlyRead.setBaseQualities(Utils.dupBytes((byte)'@',76));
+        indelOnlyRead.setCigarString("76I");
+
+        GATKSAMRecord fullMatchAfterIndel = ArtificialSAMUtils.createArtificialRead(header,"fullMatch",0,secondLocus,76);
+        fullMatchAfterIndel.setReadBases(Utils.dupBytes((byte)'A',76));
+        fullMatchAfterIndel.setBaseQualities(Utils.dupBytes((byte)'@',76));
+        fullMatchAfterIndel.setCigarString("75I1M");
+
+        List<GATKSAMRecord> reads = Arrays.asList(leadingRead, indelOnlyRead, fullMatchAfterIndel);
+
+        // create the iterator by state with the fake reads and fake records
+        li = makeLTBS(reads, null, false);
+        int currentLocus = firstLocus;
+        int numAlignmentContextsFound = 0;
+
+        while(li.hasNext()) {
+            AlignmentContext alignmentContext = li.next();
+            Assert.assertEquals(alignmentContext.getLocation().getStart(),currentLocus,"Current locus returned by alignment context is incorrect");
+
+            if(currentLocus == firstLocus) {
+                List<GATKSAMRecord> readsAtLocus = alignmentContext.getBasePileup().getReads();
+                Assert.assertEquals(readsAtLocus.size(),1,"Wrong number of reads at locus " + currentLocus);
+                Assert.assertSame(readsAtLocus.get(0),leadingRead,"leadingRead absent from pileup at locus " + currentLocus);
+            }
+            else if(currentLocus == secondLocus) {
+                List<GATKSAMRecord> readsAtLocus = alignmentContext.getBasePileup().getReads();
+                Assert.assertEquals(readsAtLocus.size(),1,"Wrong number of reads at locus " + currentLocus);
+                Assert.assertSame(readsAtLocus.get(0),fullMatchAfterIndel,"fullMatchAfterIndel absent from pileup at locus " + currentLocus);
+            }
+
+            currentLocus++;
+            numAlignmentContextsFound++;
+        }
+
+        Assert.assertEquals(numAlignmentContextsFound, 2, "Found incorrect number of alignment contexts");
+    }
+
+    /**
+     * Test to make sure that reads supporting only an indel (example cigar string: 76I) are represented properly
+     */
+    @Test(enabled = false && ! DEBUG)
+    public void testWholeIndelReadRepresentedTest() {
+        final int firstLocus = 44367788, secondLocus = firstLocus + 1;
+
+        GATKSAMRecord read1 = ArtificialSAMUtils.createArtificialRead(header,"read1",0,secondLocus,1);
+        read1.setReadBases(Utils.dupBytes((byte) 'A', 1));
+        read1.setBaseQualities(Utils.dupBytes((byte) '@', 1));
+        read1.setCigarString("1I");
+
+        List<GATKSAMRecord> reads = Arrays.asList(read1);
+
+        // create the iterator by state with the fake reads and fake records
+        li = makeLTBS(reads, null, false);
+
+        while(li.hasNext()) {
+            AlignmentContext alignmentContext = li.next();
+            ReadBackedPileup p = alignmentContext.getBasePileup();
+            Assert.assertTrue(p.getNumberOfElements() == 1);
+            // TODO -- fix tests
+//            PileupElement pe = p.iterator().next();
+//            Assert.assertTrue(pe.isBeforeInsertion());
+//            Assert.assertFalse(pe.isAfterInsertion());
+//            Assert.assertEquals(pe.getBasesOfImmediatelyFollowingInsertion(), "A");
+        }
+
+        GATKSAMRecord read2 = ArtificialSAMUtils.createArtificialRead(header,"read2",0,secondLocus,10);
+        read2.setReadBases(Utils.dupBytes((byte) 'A', 10));
+        read2.setBaseQualities(Utils.dupBytes((byte) '@', 10));
+        read2.setCigarString("10I");
+
+        reads = Arrays.asList(read2);
+
+        // create the iterator by state with the fake reads and fake records
+        li = makeLTBS(reads, null, false);
+
+        while(li.hasNext()) {
+            AlignmentContext alignmentContext = li.next();
+            ReadBackedPileup p = alignmentContext.getBasePileup();
+            Assert.assertTrue(p.getNumberOfElements() == 1);
+            // TODO -- fix tests
+//            PileupElement pe = p.iterator().next();
+//            Assert.assertTrue(pe.isBeforeInsertion());
+//            Assert.assertFalse(pe.isAfterInsertion());
+//            Assert.assertEquals(pe.getBasesOfImmediatelyFollowingInsertion(), "AAAAAAAAAA");
+        }
+    }
+
+
+    /////////////////////////////////////////////
+    // get event length and bases calculations //
+    /////////////////////////////////////////////
+
+    @DataProvider(name = "IndelLengthAndBasesTest")
+    public Object[][] makeIndelLengthAndBasesTest() {
+        final String EVENT_BASES = "ACGTACGTACGT";
+        final List<Object[]> tests = new LinkedList<Object[]>();
+
+        for ( int eventSize = 1; eventSize < 10; eventSize++ ) {
+            for ( final CigarOperator indel : Arrays.asList(CigarOperator.D, CigarOperator.I) ) {
+                final String cigar = String.format("2M%d%s1M", eventSize, indel.toString());
+                final String eventBases = indel == CigarOperator.D ? "" : EVENT_BASES.substring(0, eventSize);
+                final int readLength = 3 + eventBases.length();
+
+                GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, 1, readLength);
+                read.setReadBases(("TT" + eventBases + "A").getBytes());
+                final byte[] quals = new byte[readLength];
+                for ( int i = 0; i < readLength; i++ )
+                    quals[i] = (byte)(i % QualityUtils.MAX_SAM_QUAL_SCORE);
+                read.setBaseQualities(quals);
+                read.setCigarString(cigar);
+
+                tests.add(new Object[]{read, indel, eventSize, eventBases.equals("") ? null : eventBases});
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = true && ! DEBUG, dataProvider = "IndelLengthAndBasesTest")
+    public void testIndelLengthAndBasesTest(GATKSAMRecord read, final CigarOperator op, final int eventSize, final String eventBases) {
+        // create the iterator by state with the fake reads and fake records
+        li = makeLTBS(Arrays.asList((GATKSAMRecord)read), null, false);
+
+        Assert.assertTrue(li.hasNext());
+
+        final PileupElement firstMatch = getFirstPileupElement(li.next());
+
+        Assert.assertEquals(firstMatch.getLengthOfImmediatelyFollowingIndel(), 0, "Length != 0 for site not adjacent to indel");
+        Assert.assertEquals(firstMatch.getBasesOfImmediatelyFollowingInsertion(), null, "Getbases of following event should be null at non-adajenct event");
+
+        Assert.assertTrue(li.hasNext());
+
+        final PileupElement pe = getFirstPileupElement(li.next());
+
+        if ( op == CigarOperator.D )
+            Assert.assertTrue(pe.isBeforeDeletionStart());
+        else
+            Assert.assertTrue(pe.isBeforeInsertion());
+
+        Assert.assertEquals(pe.getLengthOfImmediatelyFollowingIndel(), eventSize, "Length of event failed");
+        Assert.assertEquals(pe.getBasesOfImmediatelyFollowingInsertion(), eventBases, "Getbases of following event failed");
+    }
+
+    private PileupElement getFirstPileupElement(final AlignmentContext context) {
+        final ReadBackedPileup p = context.getBasePileup();
+        Assert.assertEquals(p.getNumberOfElements(), 1);
+        return p.iterator().next();
+    }
+
+    ////////////////////////////////////////////
+    // comprehensive LIBS/PileupElement tests //
+    ////////////////////////////////////////////
+
+    @DataProvider(name = "MyLIBSTest")
+    public Object[][] makeLIBSTest() {
+        final List<Object[]> tests = new LinkedList<Object[]>();
+
+//        tests.add(new Object[]{new LIBSTest("2=2D2=2X", 1)});
+//        return tests.toArray(new Object[][]{});
+
+        return createLIBSTests(
+                Arrays.asList(1, 2),
+                Arrays.asList(1, 2, 3, 4));
+
+//        return createLIBSTests(
+//                Arrays.asList(2),
+//                Arrays.asList(3));
+    }
+
+    @Test(enabled = ! DEBUG, dataProvider = "MyLIBSTest")
+    public void testLIBS(LIBSTest params) {
+        // create the iterator by state with the fake reads and fake records
+        final GATKSAMRecord read = params.makeRead();
+        li = makeLTBS(Arrays.asList((GATKSAMRecord)read), null, false);
+        final LIBS_position tester = new LIBS_position(read);
+
+        int bpVisited = 0;
+        int lastOffset = 0;
+        while ( li.hasNext() ) {
+            bpVisited++;
+
+            AlignmentContext alignmentContext = li.next();
+            ReadBackedPileup p = alignmentContext.getBasePileup();
+            Assert.assertEquals(p.getNumberOfElements(), 1);
+            PileupElement pe = p.iterator().next();
+
+            Assert.assertEquals(p.getNumberOfDeletions(), pe.isDeletion() ? 1 : 0, "wrong number of deletions in the pileup");
+            Assert.assertEquals(p.getNumberOfMappingQualityZeroReads(), pe.getRead().getMappingQuality() == 0 ? 1 : 0, "wront number of mapq reads in the pileup");
+
+            tester.stepForwardOnGenome();
+
+            if ( ! hasNeighboringPaddedOps(params.getElements(), pe.getCurrentCigarOffset()) ) {
+                Assert.assertEquals(pe.isBeforeDeletionStart(), tester.isBeforeDeletionStart, "before deletion start failure");
+                Assert.assertEquals(pe.isAfterDeletionEnd(), tester.isAfterDeletionEnd, "after deletion end failure");
+            }
+
+            Assert.assertEquals(pe.isBeforeInsertion(), tester.isBeforeInsertion, "before insertion failure");
+            Assert.assertEquals(pe.isAfterInsertion(), tester.isAfterInsertion, "after insertion failure");
+            Assert.assertEquals(pe.isNextToSoftClip(), tester.isNextToSoftClip, "next to soft clip failure");
+
+            Assert.assertTrue(pe.getOffset() >= lastOffset, "Somehow read offsets are decreasing: lastOffset " + lastOffset + " current " + pe.getOffset());
+            Assert.assertEquals(pe.getOffset(), tester.getCurrentReadOffset(), "Read offsets are wrong at " + bpVisited);
+
+            Assert.assertEquals(pe.getCurrentCigarElement(), read.getCigar().getCigarElement(tester.currentOperatorIndex), "CigarElement index failure");
+            Assert.assertEquals(pe.getOffsetInCurrentCigar(), tester.getCurrentPositionOnOperatorBase0(), "CigarElement index failure");
+
+            Assert.assertEquals(read.getCigar().getCigarElement(pe.getCurrentCigarOffset()), pe.getCurrentCigarElement(), "Current cigar element isn't what we'd get from the read itself");
+
+            Assert.assertTrue(pe.getOffsetInCurrentCigar() >= 0, "Offset into current cigar too small");
+            Assert.assertTrue(pe.getOffsetInCurrentCigar() < pe.getCurrentCigarElement().getLength(), "Offset into current cigar too big");
+
+            Assert.assertEquals(pe.getOffset(), tester.getCurrentReadOffset(), "Read offset failure");
+            lastOffset = pe.getOffset();
+        }
+
+        final int expectedBpToVisit = read.getAlignmentEnd() - read.getAlignmentStart() + 1;
+        Assert.assertEquals(bpVisited, expectedBpToVisit, "Didn't visit the expected number of bp");
+    }
+
+    // ------------------------------------------------------------
+    //
+    // Tests for keeping reads
+    //
+    // ------------------------------------------------------------
+
+    @DataProvider(name = "LIBS_ComplexPileupTests")
+    public Object[][] makeLIBS_ComplexPileupTests() {
+        final List<Object[]> tests = new LinkedList<Object[]>();
+
+        for ( final int downsampleTo : Arrays.asList(-1, 1, 2, 5, 10, 30)) {
+            for ( final int nReadsPerLocus : Arrays.asList(1, 10, 60) ) {
+                for ( final int nLoci : Arrays.asList(1, 10, 25) ) {
+                    for ( final int nSamples : Arrays.asList(1, 2, 10) ) {
+                        for ( final boolean keepReads : Arrays.asList(true, false) ) {
+                            for ( final boolean grabReadsAfterEachCycle : Arrays.asList(true, false) ) {
+//        for ( final int downsampleTo : Arrays.asList(1)) {
+//            for ( final int nReadsPerLocus : Arrays.asList(1) ) {
+//                for ( final int nLoci : Arrays.asList(1) ) {
+//                    for ( final int nSamples : Arrays.asList(1) ) {
+//                        for ( final boolean keepReads : Arrays.asList(true) ) {
+//                            for ( final boolean grabReadsAfterEachCycle : Arrays.asList(true) ) {
+                                tests.add(new Object[]{nReadsPerLocus, nLoci, nSamples,
+                                        keepReads, grabReadsAfterEachCycle,
+                                        downsampleTo});
+                            }
+                        }
+                    }
+                }
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = true && ! DEBUG, dataProvider = "LIBS_ComplexPileupTests")
+    public void testLIBS_ComplexPileupTests(final int nReadsPerLocus,
+                                            final int nLoci,
+                                            final int nSamples,
+                                            final boolean keepReads,
+                                            final boolean grabReadsAfterEachCycle,
+                                            final int downsampleTo) {
+        //logger.warn(String.format("testLIBSKeepSubmittedReads %d %d %d %b %b %b", nReadsPerLocus, nLoci, nSamples, keepReads, grabReadsAfterEachCycle, downsample));
+        final int readLength = 10;
+
+        final boolean downsample = downsampleTo != -1;
+        final DownsamplingMethod downsampler = downsample
+                ? new DownsamplingMethod(DownsampleType.BY_SAMPLE, downsampleTo, null)
+                : new DownsamplingMethod(DownsampleType.NONE, null, null);
+
+        final ArtificialBAMBuilder bamBuilder = new ArtificialBAMBuilder(header.getSequenceDictionary(), nReadsPerLocus, nLoci);
+        bamBuilder.createAndSetHeader(nSamples).setReadLength(readLength).setAlignmentStart(1);
+
+        final List<GATKSAMRecord> reads = bamBuilder.makeReads();
+        li = new LocusIteratorByState(new FakeCloseableIterator<GATKSAMRecord>(reads.iterator()),
+                downsampler, true, keepReads,
+                genomeLocParser,
+                bamBuilder.getSamples());
+
+        final Set<GATKSAMRecord> seenSoFar = new HashSet<GATKSAMRecord>();
+        final Set<GATKSAMRecord> keptReads = new HashSet<GATKSAMRecord>();
+        int bpVisited = 0;
+        while ( li.hasNext() ) {
+            bpVisited++;
+            final AlignmentContext alignmentContext = li.next();
+            final ReadBackedPileup p = alignmentContext.getBasePileup();
+
+            AssertWellOrderedPileup(p);
+
+            if ( downsample ) {
+                // just not a safe test
+                //Assert.assertTrue(p.getNumberOfElements() <= maxDownsampledCoverage * nSamples, "Too many reads at locus after downsampling");
+            } else {
+                final int minPileupSize = nReadsPerLocus * nSamples;
+                Assert.assertTrue(p.getNumberOfElements() >= minPileupSize);
+            }
+
+            // the number of reads starting here
+            int nReadsStartingHere = 0;
+            for ( final GATKSAMRecord read : p.getReads() )
+                if ( read.getAlignmentStart() == alignmentContext.getPosition() )
+                    nReadsStartingHere++;
+
+            // we can have no more than maxDownsampledCoverage per sample
+            final int maxCoveragePerLocus = downsample ? downsampleTo : nReadsPerLocus;
+            Assert.assertTrue(nReadsStartingHere <= maxCoveragePerLocus * nSamples);
+
+            seenSoFar.addAll(p.getReads());
+            if ( keepReads && grabReadsAfterEachCycle ) {
+                final List<GATKSAMRecord> locusReads = li.transferReadsFromAllPreviousPileups();
+
+
+                if ( downsample ) {
+                    // with downsampling we might have some reads here that were downsampled away
+                    // in the pileup.  We want to ensure that no more than the max coverage per sample is added
+                    Assert.assertTrue(locusReads.size() >= nReadsStartingHere);
+                    Assert.assertTrue(locusReads.size() <= maxCoveragePerLocus * nSamples);
+                } else {
+                    Assert.assertEquals(locusReads.size(), nReadsStartingHere);
+                }
+                keptReads.addAll(locusReads);
+
+                // check that all reads we've seen so far are in our keptReads
+                for ( final GATKSAMRecord read : seenSoFar ) {
+                    Assert.assertTrue(keptReads.contains(read), "A read that appeared in a pileup wasn't found in the kept reads: " + read);
+                }
+            }
+
+            if ( ! keepReads )
+                Assert.assertTrue(li.getReadsFromAllPreviousPileups().isEmpty(), "Not keeping reads but the underlying list of reads isn't empty");
+        }
+
+        if ( keepReads && ! grabReadsAfterEachCycle )
+            keptReads.addAll(li.transferReadsFromAllPreviousPileups());
+
+        if ( ! downsample ) { // downsampling may drop loci
+            final int expectedBpToVisit = nLoci + readLength - 1;
+            Assert.assertEquals(bpVisited, expectedBpToVisit, "Didn't visit the expected number of bp");
+        }
+
+        if ( keepReads ) {
+            // check we have the right number of reads
+            final int totalReads = nLoci * nReadsPerLocus * nSamples;
+            if ( ! downsample ) { // downsampling may drop reads
+                Assert.assertEquals(keptReads.size(), totalReads, "LIBS didn't keep the right number of reads during the traversal");
+
+                // check that the order of reads is the same as in our read list
+                for ( int i = 0; i < reads.size(); i++ ) {
+                    final GATKSAMRecord inputRead = reads.get(i);
+                    final GATKSAMRecord keptRead = reads.get(i);
+                    Assert.assertSame(keptRead, inputRead, "Input reads and kept reads differ at position " + i);
+                }
+            } else {
+                Assert.assertTrue(keptReads.size() <= totalReads, "LIBS didn't keep the right number of reads during the traversal");
+            }
+
+            // check uniqueness
+            final Set<String> readNames = new HashSet<String>();
+            for ( final GATKSAMRecord read : keptReads ) {
+                Assert.assertFalse(readNames.contains(read.getReadName()), "Found duplicate reads in the kept reads");
+                readNames.add(read.getReadName());
+            }
+
+            // check that all reads we've seen are in our keptReads
+            for ( final GATKSAMRecord read : seenSoFar ) {
+                Assert.assertTrue(keptReads.contains(read), "A read that appeared in a pileup wasn't found in the kept reads: " + read);
+            }
+
+            if ( ! downsample ) {
+                // check that every read in the list of keep reads occurred at least once in one of the pileups
+                for ( final GATKSAMRecord keptRead : keptReads ) {
+                    Assert.assertTrue(seenSoFar.contains(keptRead), "There's a read " + keptRead + " in our keptReads list that never appeared in any pileup");
+                }
+            }
+        }
+    }
+
+    private void AssertWellOrderedPileup(final ReadBackedPileup pileup) {
+        if ( ! pileup.isEmpty() ) {
+            int leftMostPos = -1;
+
+            for ( final PileupElement pe : pileup ) {
+                Assert.assertTrue(pileup.getLocation().getContig().equals(pe.getRead().getReferenceName()), "ReadBackedPileup contains an element " + pe + " that's on a different contig than the pileup itself");
+                Assert.assertTrue(pe.getRead().getAlignmentStart() >= leftMostPos,
+                        "ReadBackedPileup contains an element " + pe + " whose read's alignment start " + pe.getRead().getAlignmentStart()
+                                + " occurs before the leftmost position we've seen previously " + leftMostPos);
+            }
+        }
+    }
+
+    // ---------------------------------------------------------------------------
+    // make sure that downsampling isn't holding onto a bazillion reads
+    //
+    @DataProvider(name = "LIBS_NotHoldingTooManyReads")
+    public Object[][] makeLIBS_NotHoldingTooManyReads() {
+        final List<Object[]> tests = new LinkedList<Object[]>();
+
+        for ( final int downsampleTo : Arrays.asList(1, 10)) {
+            for ( final int nReadsPerLocus : Arrays.asList(100, 1000, 10000, 100000) ) {
+                for ( final int payloadInBytes : Arrays.asList(0, 1024, 1024*1024) ) {
+                    tests.add(new Object[]{nReadsPerLocus, downsampleTo, payloadInBytes});
+                }
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = true && ! DEBUG, dataProvider = "LIBS_NotHoldingTooManyReads")
+//    @Test(enabled = true, dataProvider = "LIBS_NotHoldingTooManyReads", timeOut = 100000)
+    public void testLIBS_NotHoldingTooManyReads(final int nReadsPerLocus, final int downsampleTo, final int payloadInBytes) {
+        logger.warn(String.format("testLIBS_NotHoldingTooManyReads %d %d %d", nReadsPerLocus, downsampleTo, payloadInBytes));
+        final int readLength = 10;
+
+        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 100000);
+        final int nSamples = 1;
+        final List<String> samples = new ArrayList<String>(nSamples);
+        for ( int i = 0; i < nSamples; i++ ) {
+            final GATKSAMReadGroupRecord rg = new GATKSAMReadGroupRecord("rg" + i);
+            final String sample = "sample" + i;
+            samples.add(sample);
+            rg.setSample(sample);
+            rg.setPlatform(NGSPlatform.ILLUMINA.getDefaultPlatform());
+            header.addReadGroup(rg);
+        }
+
+        final boolean downsample = downsampleTo != -1;
+        final DownsamplingMethod downsampler = downsample
+                ? new DownsamplingMethod(DownsampleType.BY_SAMPLE, downsampleTo, null)
+                : new DownsamplingMethod(DownsampleType.NONE, null, null);
+
+        // final List<GATKSAMRecord> reads = ArtificialSAMUtils.createReadStream(nReadsPerLocus, nLoci, header, 1, readLength);
+
+        final WeakReadTrackingIterator iterator = new WeakReadTrackingIterator(nReadsPerLocus, readLength, payloadInBytes, header);
+
+        li = new LocusIteratorByState(iterator,
+                downsampler, true, false,
+                genomeLocParser,
+                samples);
+
+        while ( li.hasNext() ) {
+            final AlignmentContext next = li.next();
+            Assert.assertTrue(next.getBasePileup().getNumberOfElements() <= downsampleTo, "Too many elements in pileup " + next);
+            // TODO -- assert that there are <= X reads in memory after GC for some X
+        }
+    }
+
+    private static class WeakReadTrackingIterator implements Iterator<GATKSAMRecord> {
+        final int nReads, readLength, payloadInBytes;
+        int readI = 0;
+        final SAMFileHeader header;
+
+        private WeakReadTrackingIterator(int nReads, int readLength, final int payloadInBytes, final SAMFileHeader header) {
+            this.nReads = nReads;
+            this.readLength = readLength;
+            this.header = header;
+            this.payloadInBytes = payloadInBytes;
+        }
+
+        @Override public boolean hasNext() { return readI < nReads; }
+        @Override public void remove() { throw new UnsupportedOperationException("no remove"); }
+
+        @Override
+        public GATKSAMRecord next() {
+            readI++;
+            return makeRead();
+        }
+
+        private GATKSAMRecord makeRead() {
+            final SAMReadGroupRecord rg = header.getReadGroups().get(0);
+            final String readName = String.format("%s.%d.%s", "read", readI, rg.getId());
+            final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, readName, 0, 1, readLength);
+            read.setReadGroup(new GATKSAMReadGroupRecord(rg));
+            if ( payloadInBytes > 0 )
+                // add a payload byte array to push memory use per read even higher
+                read.setAttribute("PL", new byte[payloadInBytes]);
+            return read;
+        }
+    }
+
+    // ---------------------------------------------------------------------------
+    //
+    // make sure that adapter clipping is working properly in LIBS
+    //
+    // ---------------------------------------------------------------------------
+    @DataProvider(name = "AdapterClippingTest")
+    public Object[][] makeAdapterClippingTest() {
+        final List<Object[]> tests = new LinkedList<Object[]>();
+
+        final int start = 10;
+        for ( final int goodBases : Arrays.asList(10, 20, 30) ) {
+            for ( final int nClips : Arrays.asList(0, 1, 2, 10)) {
+                for ( final boolean onLeft : Arrays.asList(true, false) ) {
+                    final int readLength = nClips + goodBases;
+                    GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read1" , 0, start, readLength);
+                    read.setProperPairFlag(true);
+                    read.setReadPairedFlag(true);
+                    read.setReadUnmappedFlag(false);
+                    read.setMateUnmappedFlag(false);
+                    read.setReadBases(Utils.dupBytes((byte) 'A', readLength));
+                    read.setBaseQualities(Utils.dupBytes((byte) '@', readLength));
+                    read.setCigarString(readLength + "M");
+
+                    if ( onLeft ) {
+                        read.setReadNegativeStrandFlag(true);
+                        read.setMateNegativeStrandFlag(false);
+                        read.setMateAlignmentStart(start + nClips);
+                        read.setInferredInsertSize(readLength);
+                        tests.add(new Object[]{nClips, goodBases, 0, read});
+                    } else {
+                        read.setReadNegativeStrandFlag(false);
+                        read.setMateNegativeStrandFlag(true);
+                        read.setMateAlignmentStart(start - 1);
+                        read.setInferredInsertSize(goodBases - 1);
+                        tests.add(new Object[]{0, goodBases, nClips, read});
+                    }
+                }
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = true, dataProvider = "AdapterClippingTest")
+    public void testAdapterClipping(final int nClipsOnLeft, final int nReadContainingPileups, final int nClipsOnRight, final GATKSAMRecord read) {
+
+        li = new LocusIteratorByState(new FakeCloseableIterator<>(Collections.singletonList(read).iterator()),
+                DownsamplingMethod.NONE, true, false,
+                genomeLocParser,
+                LocusIteratorByState.sampleListForSAMWithoutReadGroups());
+
+        int expectedPos = read.getAlignmentStart() + nClipsOnLeft;
+        int nPileups = 0;
+        while ( li.hasNext() ) {
+            final AlignmentContext next = li.next();
+            Assert.assertEquals(next.getLocation().getStart(), expectedPos);
+            nPileups++;
+            expectedPos++;
+        }
+
+        final int nExpectedPileups = nReadContainingPileups;
+        Assert.assertEquals(nPileups, nExpectedPileups, "Wrong number of pileups seen");
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/locusiterator/PerSampleReadStateManagerUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/locusiterator/PerSampleReadStateManagerUnitTest.java
new file mode 100644
index 0000000..2522b8c
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/locusiterator/PerSampleReadStateManagerUnitTest.java
@@ -0,0 +1,188 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.locusiterator;
+
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.MathUtils;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.*;
+
+/**
+ * testing of the new (non-legacy) version of LocusIteratorByState
+ */
+public class PerSampleReadStateManagerUnitTest extends LocusIteratorByStateBaseTest {
+    private class PerSampleReadStateManagerTest extends TestDataProvider {
+        private List<Integer> readCountsPerAlignmentStart;
+        private List<SAMRecord> reads;
+        private List<ArrayList<AlignmentStateMachine>> recordStatesByAlignmentStart;
+        private int removalInterval;
+
+        public PerSampleReadStateManagerTest( List<Integer> readCountsPerAlignmentStart, int removalInterval ) {
+            super(PerSampleReadStateManagerTest.class);
+
+            this.readCountsPerAlignmentStart = readCountsPerAlignmentStart;
+            this.removalInterval = removalInterval;
+
+            reads = new ArrayList<SAMRecord>();
+            recordStatesByAlignmentStart = new ArrayList<ArrayList<AlignmentStateMachine>>();
+
+            setName(String.format("%s: readCountsPerAlignmentStart: %s  removalInterval: %d",
+                    getClass().getSimpleName(), readCountsPerAlignmentStart, removalInterval));
+        }
+
+        public void run() {
+            PerSampleReadStateManager perSampleReadStateManager = new PerSampleReadStateManager(LocusIteratorByState.NO_DOWNSAMPLING);
+
+            makeReads();
+
+            for ( ArrayList<AlignmentStateMachine> stackRecordStates : recordStatesByAlignmentStart ) {
+                perSampleReadStateManager.addStatesAtNextAlignmentStart(new LinkedList<AlignmentStateMachine>(stackRecordStates));
+            }
+
+            // read state manager should have the right number of reads
+            Assert.assertEquals(reads.size(), perSampleReadStateManager.size());
+
+            Iterator<SAMRecord> originalReadsIterator = reads.iterator();
+            Iterator<AlignmentStateMachine> recordStateIterator = perSampleReadStateManager.iterator();
+            int recordStateCount = 0;
+            int numReadStatesRemoved = 0;
+
+            // Do a first-pass validation of the record state iteration by making sure we get back everything we
+            // put in, in the same order, doing any requested removals of read states along the way
+            while ( recordStateIterator.hasNext() ) {
+                AlignmentStateMachine readState = recordStateIterator.next();
+                recordStateCount++;
+                SAMRecord readFromPerSampleReadStateManager = readState.getRead();
+
+                Assert.assertTrue(originalReadsIterator.hasNext());
+                SAMRecord originalRead = originalReadsIterator.next();
+
+                // The read we get back should be literally the same read in memory as we put in
+                Assert.assertTrue(originalRead == readFromPerSampleReadStateManager);
+
+                // If requested, remove a read state every removalInterval states
+                if ( removalInterval > 0 && recordStateCount % removalInterval == 0 ) {
+                    recordStateIterator.remove();
+                    numReadStatesRemoved++;
+                }
+            }
+
+            Assert.assertFalse(originalReadsIterator.hasNext());
+
+            // If we removed any read states, do a second pass through the read states to make sure the right
+            // states were removed
+            if ( numReadStatesRemoved > 0 ) {
+                Assert.assertEquals(perSampleReadStateManager.size(), reads.size() - numReadStatesRemoved);
+
+                originalReadsIterator = reads.iterator();
+                recordStateIterator = perSampleReadStateManager.iterator();
+                int readCount = 0;
+                int readStateCount = 0;
+
+                // Match record states with the reads that should remain after removal
+                while ( recordStateIterator.hasNext() ) {
+                    AlignmentStateMachine readState = recordStateIterator.next();
+                    readStateCount++;
+                    SAMRecord readFromPerSampleReadStateManager = readState.getRead();
+
+                    Assert.assertTrue(originalReadsIterator.hasNext());
+
+                    SAMRecord originalRead = originalReadsIterator.next();
+                    readCount++;
+
+                    if ( readCount % removalInterval == 0 ) {
+                        originalRead = originalReadsIterator.next(); // advance to next read, since the previous one should have been discarded
+                        readCount++;
+                    }
+
+                    // The read we get back should be literally the same read in memory as we put in (after accounting for removals)
+                    Assert.assertTrue(originalRead == readFromPerSampleReadStateManager);
+                }
+
+                Assert.assertEquals(readStateCount, reads.size() - numReadStatesRemoved);
+            }
+
+            // Allow memory used by this test to be reclaimed
+            readCountsPerAlignmentStart = null;
+            reads = null;
+            recordStatesByAlignmentStart = null;
+        }
+
+        private void makeReads() {
+            int alignmentStart = 1;
+
+            for ( int readsThisStack : readCountsPerAlignmentStart ) {
+                ArrayList<GATKSAMRecord> stackReads = new ArrayList<GATKSAMRecord>(ArtificialSAMUtils.createStackOfIdenticalArtificialReads(readsThisStack, header, "foo", 0, alignmentStart, MathUtils.randomIntegerInRange(50, 100)));
+                ArrayList<AlignmentStateMachine> stackRecordStates = new ArrayList<AlignmentStateMachine>();
+
+                for ( GATKSAMRecord read : stackReads ) {
+                    stackRecordStates.add(new AlignmentStateMachine(read));
+                }
+
+                reads.addAll(stackReads);
+                recordStatesByAlignmentStart.add(stackRecordStates);
+            }
+        }
+    }
+
+    @DataProvider(name = "PerSampleReadStateManagerTestDataProvider")
+    public Object[][] createPerSampleReadStateManagerTests() {
+        for ( List<Integer> thisTestReadStateCounts : Arrays.asList( Arrays.asList(1),
+                Arrays.asList(2),
+                Arrays.asList(10),
+                Arrays.asList(1, 1),
+                Arrays.asList(2, 2),
+                Arrays.asList(10, 10),
+                Arrays.asList(1, 10),
+                Arrays.asList(10, 1),
+                Arrays.asList(1, 1, 1),
+                Arrays.asList(2, 2, 2),
+                Arrays.asList(10, 10, 10),
+                Arrays.asList(1, 1, 1, 1, 1, 1),
+                Arrays.asList(10, 10, 10, 10, 10, 10),
+                Arrays.asList(1, 2, 10, 1, 2, 10)
+        ) ) {
+
+            for ( int removalInterval : Arrays.asList(0, 2, 3) ) {
+                new PerSampleReadStateManagerTest(thisTestReadStateCounts, removalInterval);
+            }
+        }
+
+        return PerSampleReadStateManagerTest.getTests(PerSampleReadStateManagerTest.class);
+    }
+
+    @Test(dataProvider = "PerSampleReadStateManagerTestDataProvider")
+    public void runPerSampleReadStateManagerTest( PerSampleReadStateManagerTest test ) {
+        logger.warn("Running test: " + test);
+
+        test.run();
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/nanoScheduler/InputProducerUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/nanoScheduler/InputProducerUnitTest.java
new file mode 100644
index 0000000..ff4280e
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/nanoScheduler/InputProducerUnitTest.java
@@ -0,0 +1,94 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.nanoScheduler;
+
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+* UnitTests for the InputProducer
+*
+* User: depristo
+* Date: 8/24/12
+* Time: 11:25 AM
+* To change this template use File | Settings | File Templates.
+*/
+public class InputProducerUnitTest extends BaseTest {
+    @DataProvider(name = "InputProducerTest")
+    public Object[][] createInputProducerTest() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        for ( final int nElements : Arrays.asList(0, 1, 10, 100, 1000, 10000, 100000) ) {
+            for ( final int queueSize : Arrays.asList(1, 10, 100) ) {
+                tests.add(new Object[]{ nElements, queueSize });
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = true, dataProvider = "InputProducerTest", timeOut = NanoSchedulerUnitTest.NANO_SCHEDULE_MAX_RUNTIME)
+    public void testInputProducer(final int nElements, final int queueSize) throws InterruptedException {
+        final List<Integer> elements = new ArrayList<Integer>(nElements);
+        for ( int i = 0; i < nElements; i++ ) elements.add(i);
+
+        final InputProducer<Integer> ip = new InputProducer<Integer>(elements.iterator());
+
+        Assert.assertFalse(ip.allInputsHaveBeenRead(), "InputProvider said that all inputs have been read, but I haven't started reading yet");
+        Assert.assertEquals(ip.getNumInputValues(), -1, "InputProvider told me that the queue was done, but I haven't started reading yet");
+
+        int lastValue = -1;
+        int nRead = 0;
+        while ( ip.hasNext() ) {
+            final int nTotalElements = ip.getNumInputValues();
+
+            if ( nRead < nElements )
+                Assert.assertEquals(nTotalElements, -1, "getNumInputValues should have returned -1 with not all elements read");
+            // note, cannot test else case because elements input could have emptied between calls
+
+            final InputProducer<Integer>.InputValue value = ip.next();
+            if ( value.isEOFMarker() ) {
+                Assert.assertEquals(nRead, nElements, "Number of input values " + nRead + " not all that are expected " + nElements);
+                break;
+            } else {
+                Assert.assertTrue(lastValue < value.getValue(), "Read values coming out of order!");
+                final int expected = lastValue + 1;
+                Assert.assertEquals((int)value.getValue(), expected, "Value observed " + value.getValue() + " not equal to the expected value " + expected);
+                nRead++;
+                lastValue = value.getValue();
+            }
+        }
+
+        Assert.assertTrue(ip.allInputsHaveBeenRead(), "InputProvider said that all inputs haven't been read, but I read them all");
+        Assert.assertEquals(ip.getNumInputValues(), nElements, "Wrong number of total elements getNumInputValues");
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/nanoScheduler/MapResultUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/nanoScheduler/MapResultUnitTest.java
new file mode 100644
index 0000000..2b62437
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/nanoScheduler/MapResultUnitTest.java
@@ -0,0 +1,65 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.nanoScheduler;
+
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+* UnitTests for the InputProducer
+*
+* User: depristo
+* Date: 8/24/12
+* Time: 11:25 AM
+* To change this template use File | Settings | File Templates.
+*/
+public class MapResultUnitTest {
+    @DataProvider(name = "CompareTester")
+    public Object[][] createCompareTester() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        for ( int id1 = 0; id1 < 10; id1++ ) {
+            for ( int id2 = 0; id2 < 10; id2++ ) {
+                tests.add(new Object[]{ id1, id2, Integer.valueOf(id1).compareTo(id2)});
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = true, dataProvider = "CompareTester")
+    public void testInputProducer(final int id1, final int id2, final int comp ) throws InterruptedException {
+        final MapResult<Integer> mr1 = new MapResult<Integer>(id1, id1);
+        final MapResult<Integer> mr2 = new MapResult<Integer>(id2, id2);
+        Assert.assertEquals(mr1.compareTo(mr2), comp, "Compare MapResultsUnitTest failed");
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/nanoScheduler/NanoSchedulerUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/nanoScheduler/NanoSchedulerUnitTest.java
new file mode 100644
index 0000000..f45daa2
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/nanoScheduler/NanoSchedulerUnitTest.java
@@ -0,0 +1,343 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.nanoScheduler;
+
+import org.apache.log4j.BasicConfigurator;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.SimpleTimer;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.testng.Assert;
+import org.testng.annotations.BeforeSuite;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * UnitTests for the NanoScheduler
+ *
+ * User: depristo
+ * Date: 8/24/12
+ * Time: 11:25 AM
+ * To change this template use File | Settings | File Templates.
+ */
+public class NanoSchedulerUnitTest extends BaseTest {
+    private final static boolean DEBUG = false;
+    private final static boolean debug = false;
+    public static final int NANO_SCHEDULE_MAX_RUNTIME = 30000;
+    public static final int EXCEPTION_THROWING_TEST_TIMEOUT = 10000;
+
+    private static class Map2x implements NSMapFunction<Integer, Integer> {
+        @Override public Integer apply(Integer input) { return input * 2; }
+    }
+
+    private static void maybeDelayMe(final int input) {
+        try {
+            if ( input % 7 == 0 ) {
+                final int milliToSleep = (input % 10);
+                //System.out.printf("Sleeping %d millseconds%n", milliToSleep);
+                Thread.sleep(milliToSleep);
+            }
+        } catch ( InterruptedException ex ) {
+            throw new RuntimeException(ex);
+        }
+    }
+
+    private static class Map2xWithDelays extends Map2x {
+        @Override public Integer apply(Integer input) {
+            maybeDelayMe(input);
+            return input * 2;
+        }
+    }
+
+    private static class ReduceSum implements NSReduceFunction<Integer, Integer> {
+        int prevOne = Integer.MIN_VALUE;
+
+        @Override public Integer apply(Integer one, Integer sum) {
+            Assert.assertTrue(prevOne < one, "Reduce came in out of order.  Prev " + prevOne + " cur " + one);
+            return one + sum;
+        }
+    }
+
+    private static class ProgressCallback implements NSProgressFunction<Integer> {
+        int callBacks = 0;
+
+        @Override
+        public void progress(Integer lastMapInput) {
+            callBacks++;
+        }
+    }
+
+
+    private static int sum2x(final int start, final int end) {
+        int sum = 0;
+        for ( int i = start; i < end; i++ )
+            sum += 2 * i;
+        return sum;
+    }
+
+    private static class NanoSchedulerBasicTest extends TestDataProvider {
+        final int bufferSize, nThreads, start, end, expectedResult;
+        final boolean addDelays;
+
+        public NanoSchedulerBasicTest(final int bufferSize, final int nThreads, final int start, final int end, final boolean addDelays) {
+            super(NanoSchedulerBasicTest.class);
+            this.bufferSize = bufferSize;
+            this.nThreads = nThreads;
+            this.start = start;
+            this.end = end;
+            this.expectedResult = sum2x(start, end);
+            this.addDelays = addDelays;
+            setName(String.format("%s nt=%d buf=%d start=%d end=%d sum=%d delays=%b",
+                    getClass().getSimpleName(), nThreads, bufferSize, start, end, expectedResult, addDelays));
+        }
+
+        public Iterator<Integer> makeReader() {
+            final List<Integer> ints = new ArrayList<Integer>();
+            for ( int i = start; i < end; i++ )
+                ints.add(i);
+            return ints.iterator();
+        }
+
+        public int nExpectedCallbacks() {
+            int nElements = Math.max(end - start, 0);
+            return nElements / bufferSize / NanoScheduler.UPDATE_PROGRESS_FREQ;
+        }
+
+        public Map2x makeMap() { return addDelays ? new Map2xWithDelays() : new Map2x(); }
+        public Integer initReduce() { return 0; }
+        public ReduceSum makeReduce() { return new ReduceSum(); }
+
+        public NanoScheduler<Integer, Integer, Integer> makeScheduler() {
+            final NanoScheduler <Integer, Integer, Integer> nano;
+            if ( bufferSize == -1 )
+                nano = new NanoScheduler<Integer, Integer, Integer>(nThreads);
+            else
+                nano = new NanoScheduler<Integer, Integer, Integer>(bufferSize, nThreads);
+
+            nano.setDebug(debug);
+            return nano;
+        }
+    }
+
+    static NanoSchedulerBasicTest exampleTest = null;
+    static NanoSchedulerBasicTest exampleTestWithDelays = null;
+
+    @BeforeSuite
+    public void setUp() throws Exception {
+        exampleTest = new NanoSchedulerBasicTest(10, 2, 1, 10, false);
+        exampleTestWithDelays = new NanoSchedulerBasicTest(10, 2, 1, 10, true);
+    }
+
+    @DataProvider(name = "NanoSchedulerBasicTest")
+    public Object[][] createNanoSchedulerBasicTest() {
+//        for ( final int bufferSize : Arrays.asList(1, 10) ) {
+//            for ( final int nt : Arrays.asList(1, 2, 4) ) {
+//                for ( final int start : Arrays.asList(0) ) {
+//                    for ( final int end : Arrays.asList(0, 1, 2) ) {
+//                        exampleTest = new NanoSchedulerBasicTest(bufferSize, nt, start, end, false);
+//                    }
+//                }
+//            }
+//        }
+
+        for ( final int bufferSize : Arrays.asList(-1, 1, 10, 100) ) {
+            for ( final int nt : Arrays.asList(1, 2, 4) ) {
+                for ( final int start : Arrays.asList(0) ) {
+                    for ( final int end : Arrays.asList(0, 1, 2, 11, 100, 10000, 100000) ) {
+                        for ( final boolean addDelays : Arrays.asList(true, false) ) {
+                            if ( end < 1000 )
+                                new NanoSchedulerBasicTest(bufferSize, nt, start, end, addDelays);
+                        }
+                    }
+                }
+            }
+        }
+
+        return NanoSchedulerBasicTest.getTests(NanoSchedulerBasicTest.class);
+    }
+
+    @Test(enabled = true && ! DEBUG, dataProvider = "NanoSchedulerBasicTest", timeOut = NANO_SCHEDULE_MAX_RUNTIME)
+    public void testSingleThreadedNanoScheduler(final NanoSchedulerBasicTest test) throws InterruptedException {
+        logger.warn("Running " + test);
+        if ( test.nThreads == 1 )
+            testNanoScheduler(test);
+    }
+
+    @Test(enabled = true && ! DEBUG, dataProvider = "NanoSchedulerBasicTest", timeOut = NANO_SCHEDULE_MAX_RUNTIME, dependsOnMethods = "testSingleThreadedNanoScheduler")
+    public void testMultiThreadedNanoScheduler(final NanoSchedulerBasicTest test) throws InterruptedException {
+        logger.warn("Running " + test);
+        if ( test.nThreads >= 1 )
+            testNanoScheduler(test);
+    }
+
+    private void testNanoScheduler(final NanoSchedulerBasicTest test) throws InterruptedException {
+        final SimpleTimer timer = new SimpleTimer().start();
+        final NanoScheduler<Integer, Integer, Integer> nanoScheduler = test.makeScheduler();
+
+        final ProgressCallback callback = new ProgressCallback();
+        nanoScheduler.setProgressFunction(callback);
+
+        if ( test.bufferSize > -1 )
+            Assert.assertEquals(nanoScheduler.getBufferSize(), test.bufferSize, "bufferSize argument");
+        Assert.assertEquals(nanoScheduler.getnThreads(), test.nThreads, "nThreads argument");
+
+        final Integer sum = nanoScheduler.execute(test.makeReader(), test.makeMap(), test.initReduce(), test.makeReduce());
+        Assert.assertNotNull(sum);
+        Assert.assertEquals((int)sum, test.expectedResult, "NanoScheduler sum not the same as calculated directly");
+
+        Assert.assertTrue(callback.callBacks >= test.nExpectedCallbacks(), "Not enough callbacks detected.  Expected at least " + test.nExpectedCallbacks() + " but saw only " + callback.callBacks);
+        nanoScheduler.shutdown();
+    }
+
+    @Test(enabled = true && ! DEBUG, dataProvider = "NanoSchedulerBasicTest", dependsOnMethods = "testMultiThreadedNanoScheduler", timeOut = 2 * NANO_SCHEDULE_MAX_RUNTIME)
+    public void testNanoSchedulerInLoop(final NanoSchedulerBasicTest test) throws InterruptedException {
+        if ( test.bufferSize > 1) {
+            logger.warn("Running " + test);
+
+            final NanoScheduler<Integer, Integer, Integer> nanoScheduler = test.makeScheduler();
+
+            // test reusing the scheduler
+            for ( int i = 0; i < 10; i++ ) {
+                final Integer sum = nanoScheduler.execute(test.makeReader(), test.makeMap(), test.initReduce(), test.makeReduce());
+                Assert.assertNotNull(sum);
+                Assert.assertEquals((int)sum, test.expectedResult, "NanoScheduler sum not the same as calculated directly");
+            }
+
+            nanoScheduler.shutdown();
+        }
+    }
+
+    @Test(enabled = true && ! DEBUG, timeOut = NANO_SCHEDULE_MAX_RUNTIME)
+    public void testShutdown() throws InterruptedException {
+        final NanoScheduler<Integer, Integer, Integer> nanoScheduler = new NanoScheduler<Integer, Integer, Integer>(1, 2);
+        Assert.assertFalse(nanoScheduler.isShutdown(), "scheduler should be alive");
+        nanoScheduler.shutdown();
+        Assert.assertTrue(nanoScheduler.isShutdown(), "scheduler should be dead");
+    }
+
+    @Test(enabled = true && ! DEBUG, expectedExceptions = IllegalStateException.class, timeOut = NANO_SCHEDULE_MAX_RUNTIME)
+    public void testShutdownExecuteFailure() throws InterruptedException {
+        final NanoScheduler<Integer, Integer, Integer> nanoScheduler = new NanoScheduler<Integer, Integer, Integer>(1, 2);
+        nanoScheduler.shutdown();
+        nanoScheduler.execute(exampleTest.makeReader(), exampleTest.makeMap(), exampleTest.initReduce(), exampleTest.makeReduce());
+    }
+
+    @DataProvider(name = "NanoSchedulerInputExceptionTest")
+    public Object[][] createNanoSchedulerInputExceptionTest() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+
+        for ( final int bufSize : Arrays.asList(100) ) {
+            for ( final int nThreads : Arrays.asList(8) ) {
+                for ( final boolean addDelays : Arrays.asList(true, false) ) {
+                    final NanoSchedulerBasicTest test = new NanoSchedulerBasicTest(bufSize, nThreads, 1, 1000000, false);
+                    final int maxN = addDelays ? 1000 : 10000;
+                    for ( int nElementsBeforeError = 0; nElementsBeforeError < maxN; nElementsBeforeError += Math.max(nElementsBeforeError / 10, 1) ) {
+                        tests.add(new Object[]{nElementsBeforeError, test, addDelays});
+                    }
+                }
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = true, expectedExceptions = NullPointerException.class, timeOut = EXCEPTION_THROWING_TEST_TIMEOUT)
+    public void testInputErrorIsThrown_NPE() throws InterruptedException {
+        executeTestErrorThrowingInput(10, new NullPointerException(), exampleTest, false);
+    }
+
+    @Test(enabled = true, expectedExceptions = ReviewedGATKException.class, timeOut = EXCEPTION_THROWING_TEST_TIMEOUT)
+    public void testInputErrorIsThrown_RSE() throws InterruptedException {
+        executeTestErrorThrowingInput(10, new ReviewedGATKException("test"), exampleTest, false);
+    }
+
+    @Test(enabled = true, expectedExceptions = NullPointerException.class, dataProvider = "NanoSchedulerInputExceptionTest", timeOut = EXCEPTION_THROWING_TEST_TIMEOUT, invocationCount = 1)
+    public void testInputRuntimeExceptionDoesntDeadlock(final int nElementsBeforeError, final NanoSchedulerBasicTest test, final boolean addDelays ) throws InterruptedException {
+        executeTestErrorThrowingInput(nElementsBeforeError, new NullPointerException(), test, addDelays);
+    }
+
+    @Test(enabled = true, expectedExceptions = ReviewedGATKException.class, dataProvider = "NanoSchedulerInputExceptionTest", timeOut = EXCEPTION_THROWING_TEST_TIMEOUT, invocationCount = 1)
+    public void testInputErrorDoesntDeadlock(final int nElementsBeforeError, final NanoSchedulerBasicTest test, final boolean addDelays ) throws InterruptedException {
+        executeTestErrorThrowingInput(nElementsBeforeError, new Error(), test, addDelays);
+    }
+
+    private void executeTestErrorThrowingInput(final int nElementsBeforeError, final Throwable ex, final NanoSchedulerBasicTest test, final boolean addDelays) {
+        logger.warn("executeTestErrorThrowingInput " + nElementsBeforeError + " ex=" + ex + " test=" + test + " addInputDelays=" + addDelays);
+        final NanoScheduler<Integer, Integer, Integer> nanoScheduler = test.makeScheduler();
+        nanoScheduler.execute(new ErrorThrowingIterator(nElementsBeforeError, ex, addDelays), test.makeMap(), test.initReduce(), test.makeReduce());
+    }
+
+    private static class ErrorThrowingIterator implements Iterator<Integer> {
+        final int nElementsBeforeError;
+        final boolean addDelays;
+        int i = 0;
+        final Throwable ex;
+
+        private ErrorThrowingIterator(final int nElementsBeforeError, Throwable ex, boolean addDelays) {
+            this.nElementsBeforeError = nElementsBeforeError;
+            this.ex = ex;
+            this.addDelays = addDelays;
+        }
+
+        @Override public boolean hasNext() { return true; }
+        @Override public Integer next() {
+            if ( i++ > nElementsBeforeError ) {
+                if ( ex instanceof Error )
+                    throw (Error)ex;
+                else if ( ex instanceof RuntimeException )
+                    throw (RuntimeException)ex;
+                else
+                    throw new RuntimeException("Bad exception " + ex);
+            } else if ( addDelays ) {
+                maybeDelayMe(i);
+                return i;
+            } else {
+                return i;
+            }
+        }
+        @Override public void remove() { throw new UnsupportedOperationException("x"); }
+    }
+
+    public static void main(String [ ] args) {
+        org.apache.log4j.Logger logger = org.apache.log4j.Logger.getRootLogger();
+        BasicConfigurator.configure();
+        logger.setLevel(org.apache.log4j.Level.DEBUG);
+
+        final NanoSchedulerBasicTest test = new NanoSchedulerBasicTest(1000, Integer.valueOf(args[0]), 0, Integer.valueOf(args[1]), false);
+        final NanoScheduler<Integer, Integer, Integer> nanoScheduler =
+                new NanoScheduler<Integer, Integer, Integer>(test.bufferSize, test.nThreads);
+        nanoScheduler.setDebug(true);
+
+        final Integer sum = nanoScheduler.execute(test.makeReader(), test.makeMap(), test.initReduce(), test.makeReduce());
+        System.out.printf("Sum = %d, expected =%d%n", sum, test.expectedResult);
+        nanoScheduler.shutdown();
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/nanoScheduler/ReducerUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/nanoScheduler/ReducerUnitTest.java
new file mode 100644
index 0000000..03c92a0
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/nanoScheduler/ReducerUnitTest.java
@@ -0,0 +1,236 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.nanoScheduler;
+
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.MultiThreadedErrorTracker;
+import org.broadinstitute.gatk.utils.Utils;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+import java.util.concurrent.*;
+
+/**
+ * UnitTests for Reducer
+ *
+ * User: depristo
+ * Date: 8/24/12
+ * Time: 11:25 AM
+ * To change this template use File | Settings | File Templates.
+ */
+public class ReducerUnitTest extends BaseTest {
+    @DataProvider(name = "ReducerThreadTest")
+    public Object[][] createReducerThreadTest() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        for ( final int groupSize : Arrays.asList(-1, 1, 5, 50, 500, 5000, 50000) ) {
+            for ( final int nElements : Arrays.asList(0, 1, 3, 5) ) {
+                if ( groupSize < nElements ) {
+                    for ( final List<MapResult<Integer>> jobs : Utils.makePermutations(makeJobs(nElements), nElements, false) ) {
+                        tests.add(new Object[]{ new ListOfJobs(jobs), groupSize });
+                    }
+                }
+            }
+
+            for ( final int nElements : Arrays.asList(10, 100, 1000, 10000, 100000, 1000000) ) {
+                if ( groupSize < nElements ) {
+                    tests.add(new Object[]{ new ListOfJobs(makeJobs(nElements)), groupSize });
+                }
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    private static class ListOfJobs extends ArrayList<MapResult<Integer>> {
+        private ListOfJobs(Collection<? extends MapResult<Integer>> c) {
+            super(c);
+        }
+
+        @Override
+        public String toString() {
+            if ( size() < 10 )
+                return super.toString();
+            else
+                return "JobList of " + size();
+        }
+    }
+
+    private static List<MapResult<Integer>> makeJobs(final int nElements) {
+        List<MapResult<Integer>> jobs = new ArrayList<MapResult<Integer>>(nElements);
+        for ( int i = 0; i < nElements; i++ ) {
+            jobs.add(new MapResult<Integer>(i, i));
+        }
+        return jobs;
+    }
+
+    private int expectedSum(final List<MapResult<Integer>> jobs) {
+        int sum = 0;
+        for ( final MapResult<Integer> job : jobs )
+            sum += job.getValue();
+        return sum;
+    }
+
+    @Test(enabled = true, dataProvider = "ReducerThreadTest", timeOut = NanoSchedulerUnitTest.NANO_SCHEDULE_MAX_RUNTIME)
+    public void testReducerThread(final List<MapResult<Integer>> allJobs, int groupSize) throws Exception {
+        if ( groupSize == -1 )
+            groupSize = allJobs.size();
+
+        final MapResultsQueue<Integer> mapResultsQueue = new MapResultsQueue<Integer>();
+
+        final List<List<MapResult<Integer>>> jobGroups = Utils.groupList(allJobs, groupSize);
+        final ReduceSumTest reduce = new ReduceSumTest();
+        final Reducer<Integer, Integer> reducer = new Reducer<Integer, Integer>(reduce, new MultiThreadedErrorTracker(), 0);
+
+        final TestWaitingForFinalReduce waitingThread = new TestWaitingForFinalReduce(reducer, expectedSum(allJobs));
+        final ExecutorService es = Executors.newSingleThreadExecutor();
+        es.submit(waitingThread);
+
+        int lastJobID = -1;
+        int nJobsSubmitted = 0;
+        int jobGroupCount = 0;
+        final int lastJobGroupCount = jobGroups.size() - 1;
+
+        for ( final List<MapResult<Integer>> jobs : jobGroups ) {
+            //logger.warn("Processing job group " + jobGroupCount + " with " + jobs.size() + " jobs");
+            for ( final MapResult<Integer> job : jobs ) {
+                lastJobID = Math.max(lastJobID, job.getJobID());
+                mapResultsQueue.put(job);
+                nJobsSubmitted++;
+            }
+
+            if ( jobGroupCount == lastJobGroupCount ) {
+                mapResultsQueue.put(new MapResult<Integer>(lastJobID+1));
+                nJobsSubmitted++;
+            }
+
+            final int nReduced = reducer.reduceAsMuchAsPossible(mapResultsQueue, true);
+            Assert.assertTrue(nReduced <= nJobsSubmitted, "Somehow reduced more jobs than submitted");
+
+            jobGroupCount++;
+        }
+
+        Assert.assertEquals(reduce.nRead, allJobs.size(), "number of read values not all of the values in the reducer queue");
+        es.shutdown();
+        es.awaitTermination(1, TimeUnit.HOURS);
+    }
+
+    @Test(timeOut = 1000, invocationCount = 100)
+    private void testNonBlockingReduce() throws Exception {
+        final Reducer<Integer, Integer> reducer = new Reducer<Integer, Integer>(new ReduceSumTest(), new MultiThreadedErrorTracker(), 0);
+        final MapResultsQueue<Integer> mapResultsQueue = new MapResultsQueue<Integer>();
+        mapResultsQueue.put(new MapResult<Integer>(0, 0));
+        mapResultsQueue.put(new MapResult<Integer>(1, 1));
+
+        final CountDownLatch latch = new CountDownLatch(1);
+        final ExecutorService es = Executors.newSingleThreadExecutor();
+
+        es.submit(new Runnable() {
+            @Override
+            public void run() {
+                reducer.acquireReduceLock(true);
+                latch.countDown();
+            }
+        });
+
+        latch.await();
+        final int nReduced = reducer.reduceAsMuchAsPossible(mapResultsQueue, false);
+        Assert.assertEquals(nReduced, 0, "The reducer lock was already held but we did some work");
+        es.shutdown();
+        es.awaitTermination(1, TimeUnit.HOURS);
+    }
+
+    @Test(timeOut = 10000, invocationCount = 100)
+    private void testBlockingReduce() throws Exception {
+        final Reducer<Integer, Integer> reducer = new Reducer<Integer, Integer>(new ReduceSumTest(), new MultiThreadedErrorTracker(), 0);
+        final MapResultsQueue<Integer> mapResultsQueue = new MapResultsQueue<Integer>();
+        mapResultsQueue.put(new MapResult<Integer>(0, 0));
+        mapResultsQueue.put(new MapResult<Integer>(1, 1));
+
+        final CountDownLatch latch = new CountDownLatch(1);
+        final ExecutorService es = Executors.newSingleThreadExecutor();
+
+        es.submit(new Runnable() {
+            @Override
+            public void run() {
+                reducer.acquireReduceLock(true);
+                latch.countDown();
+                try {
+                    Thread.sleep(100);
+                } catch ( InterruptedException e ) {
+                    ;
+                } finally {
+                    reducer.releaseReduceLock();
+                }
+            }
+        });
+
+        latch.await();
+        final int nReduced = reducer.reduceAsMuchAsPossible(mapResultsQueue, true);
+        Assert.assertEquals(nReduced, 2, "The reducer should have blocked until the lock was freed and reduced 2 values");
+        es.shutdown();
+        es.awaitTermination(1, TimeUnit.HOURS);
+    }
+
+
+    public class ReduceSumTest implements NSReduceFunction<Integer, Integer> {
+        int nRead = 0;
+        int lastValue = -1;
+
+        @Override public Integer apply(Integer one, Integer sum) {
+            Assert.assertTrue(lastValue < one, "Reduce came in out of order.  Prev " + lastValue + " cur " + one);
+
+            Assert.assertTrue(lastValue < one, "Read values coming out of order!");
+            final int expected = lastValue + 1;
+            Assert.assertEquals((int)one, expected, "Value observed " + one + " not equal to the expected value " + expected);
+            nRead++;
+            lastValue = expected;
+
+            return one + sum;
+        }
+    }
+
+    final static class TestWaitingForFinalReduce implements Runnable {
+        final Reducer<Integer, Integer> reducer;
+        final int expectedSum;
+
+        TestWaitingForFinalReduce(Reducer<Integer, Integer> reducer, final int expectedSum) {
+            this.reducer = reducer;
+            this.expectedSum = expectedSum;
+        }
+
+        @Override
+        public void run() {
+            final int observedSum = reducer.getReduceResult();
+            Assert.assertEquals(observedSum, expectedSum, "Reduce didn't sum to expected value");
+        }
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/pileup/PileupElementUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/pileup/PileupElementUnitTest.java
new file mode 100644
index 0000000..5c49874
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/pileup/PileupElementUnitTest.java
@@ -0,0 +1,189 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.pileup;
+
+import htsjdk.samtools.CigarElement;
+import htsjdk.samtools.CigarOperator;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.locusiterator.AlignmentStateMachine;
+import org.broadinstitute.gatk.utils.locusiterator.LIBS_position;
+import org.broadinstitute.gatk.utils.locusiterator.LocusIteratorByStateBaseTest;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.Arrays;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * testing of the new (non-legacy) version of LocusIteratorByState
+ */
+public class PileupElementUnitTest extends LocusIteratorByStateBaseTest {
+    @DataProvider(name = "PileupElementTest")
+    public Object[][] makePileupElementTest() {
+//        return new Object[][]{{new LIBSTest("2X2D2P2X")}};
+//        return createLIBSTests(
+//                Arrays.asList(2),
+//                Arrays.asList(2));
+        return createLIBSTests(
+                Arrays.asList(1, 2),
+                Arrays.asList(1, 2, 3, 4));
+    }
+
+    @Test(dataProvider = "PileupElementTest")
+    public void testPileupElementTest(LIBSTest params) {
+        final GATKSAMRecord read = params.makeRead();
+        final AlignmentStateMachine state = new AlignmentStateMachine(read);
+        final LIBS_position tester = new LIBS_position(read);
+
+        while ( state.stepForwardOnGenome() != null ) {
+            tester.stepForwardOnGenome();
+            final PileupElement pe = state.makePileupElement();
+
+            Assert.assertEquals(pe.getRead(), read);
+            Assert.assertEquals(pe.getMappingQual(), read.getMappingQuality());
+            Assert.assertEquals(pe.getOffset(), state.getReadOffset());
+
+            Assert.assertEquals(pe.isDeletion(), state.getCigarOperator() == CigarOperator.D);
+            Assert.assertEquals(pe.isAfterInsertion(), tester.isAfterInsertion);
+            Assert.assertEquals(pe.isBeforeInsertion(), tester.isBeforeInsertion);
+            Assert.assertEquals(pe.isNextToSoftClip(), tester.isNextToSoftClip);
+
+            if ( ! hasNeighboringPaddedOps(params.getElements(), pe.getCurrentCigarOffset()) ) {
+                Assert.assertEquals(pe.isAfterDeletionEnd(), tester.isAfterDeletionEnd);
+                Assert.assertEquals(pe.isBeforeDeletionStart(), tester.isBeforeDeletionStart);
+            }
+
+
+
+            Assert.assertEquals(pe.atEndOfCurrentCigar(), state.getOffsetIntoCurrentCigarElement() == state.getCurrentCigarElement().getLength() - 1, "atEndOfCurrentCigar failed");
+            Assert.assertEquals(pe.atStartOfCurrentCigar(), state.getOffsetIntoCurrentCigarElement() == 0, "atStartOfCurrentCigar failed");
+
+            Assert.assertEquals(pe.getBase(), pe.isDeletion() ? PileupElement.DELETION_BASE : read.getReadBases()[state.getReadOffset()]);
+            Assert.assertEquals(pe.getQual(), pe.isDeletion() ? PileupElement.DELETION_QUAL : read.getBaseQualities()[state.getReadOffset()]);
+
+            Assert.assertEquals(pe.getCurrentCigarElement(), state.getCurrentCigarElement());
+            Assert.assertEquals(pe.getCurrentCigarOffset(), state.getCurrentCigarElementOffset());
+
+            // tested in libs
+            //pe.getLengthOfImmediatelyFollowingIndel();
+            //pe.getBasesOfImmediatelyFollowingInsertion();
+
+            // Don't test -- pe.getBaseIndex();
+            if ( pe.atEndOfCurrentCigar() && state.getCurrentCigarElementOffset() < read.getCigarLength() - 1 ) {
+                final CigarElement nextElement = read.getCigar().getCigarElement(state.getCurrentCigarElementOffset() + 1);
+                if ( nextElement.getOperator() == CigarOperator.I ) {
+                    Assert.assertTrue(pe.getBetweenNextPosition().size() >= 1);
+                    Assert.assertEquals(pe.getBetweenNextPosition().get(0), nextElement);
+                }
+                if ( nextElement.getOperator() == CigarOperator.M ) {
+                    Assert.assertTrue(pe.getBetweenNextPosition().isEmpty());
+                }
+            } else {
+                Assert.assertTrue(pe.getBetweenNextPosition().isEmpty());
+            }
+
+            if ( pe.atStartOfCurrentCigar() && state.getCurrentCigarElementOffset() > 0 ) {
+                final CigarElement prevElement = read.getCigar().getCigarElement(state.getCurrentCigarElementOffset() - 1);
+                if ( prevElement.getOperator() == CigarOperator.I ) {
+                    Assert.assertTrue(pe.getBetweenPrevPosition().size() >= 1);
+                    Assert.assertEquals(pe.getBetweenPrevPosition().getLast(), prevElement);
+                }
+                if ( prevElement.getOperator() == CigarOperator.M ) {
+                    Assert.assertTrue(pe.getBetweenPrevPosition().isEmpty());
+                }
+            } else {
+                Assert.assertTrue(pe.getBetweenPrevPosition().isEmpty());
+            }
+
+            // TODO -- add meaningful tests
+            pe.getBaseInsertionQual();
+            pe.getBaseDeletionQual();
+        }
+    }
+
+
+    @DataProvider(name = "PrevAndNextTest")
+    public Object[][] makePrevAndNextTest() {
+        final List<Object[]> tests = new LinkedList<Object[]>();
+
+        final List<CigarOperator> operators = Arrays.asList(CigarOperator.I, CigarOperator.P, CigarOperator.S);
+
+        for ( final CigarOperator firstOp : Arrays.asList(CigarOperator.M) ) {
+            for ( final CigarOperator lastOp : Arrays.asList(CigarOperator.M, CigarOperator.D) ) {
+                for ( final int nIntermediate : Arrays.asList(1, 2, 3) ) {
+                    for ( final List<CigarOperator> combination : Utils.makePermutations(operators, nIntermediate, false) ) {
+                        final int readLength = 2 + combination.size();
+                        GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, 1, readLength);
+                        read.setReadBases(Utils.dupBytes((byte) 'A', readLength));
+                        read.setBaseQualities(Utils.dupBytes((byte) 30, readLength));
+
+                        String cigar = "1" + firstOp;
+                        for ( final CigarOperator op : combination ) cigar += "1" + op;
+                        cigar += "1" + lastOp;
+                        read.setCigarString(cigar);
+
+                        tests.add(new Object[]{read, firstOp, lastOp, combination});
+                    }
+                }
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "PrevAndNextTest")
+    public void testPrevAndNextTest(final GATKSAMRecord read, final CigarOperator firstOp, final CigarOperator lastOp, final List<CigarOperator> ops) {
+        final AlignmentStateMachine state = new AlignmentStateMachine(read);
+
+        state.stepForwardOnGenome();
+        final PileupElement pe = state.makePileupElement();
+        Assert.assertEquals(pe.getBetweenNextPosition().size(), ops.size());
+        Assert.assertEquals(pe.getBetweenPrevPosition().size(), 0);
+        assertEqualsOperators(pe.getBetweenNextPosition(), ops);
+        Assert.assertEquals(pe.getPreviousOnGenomeCigarElement(), null);
+        Assert.assertNotNull(pe.getNextOnGenomeCigarElement());
+        Assert.assertEquals(pe.getNextOnGenomeCigarElement().getOperator(), lastOp);
+
+        state.stepForwardOnGenome();
+        final PileupElement pe2 = state.makePileupElement();
+        Assert.assertEquals(pe2.getBetweenPrevPosition().size(), ops.size());
+        Assert.assertEquals(pe2.getBetweenNextPosition().size(), 0);
+        assertEqualsOperators(pe2.getBetweenPrevPosition(), ops);
+        Assert.assertNotNull(pe2.getPreviousOnGenomeCigarElement());
+        Assert.assertEquals(pe2.getPreviousOnGenomeCigarElement().getOperator(), firstOp);
+        Assert.assertEquals(pe2.getNextOnGenomeCigarElement(), null);
+    }
+
+    private void assertEqualsOperators(final List<CigarElement> elements, final List<CigarOperator> ops) {
+        for ( int i = 0; i < elements.size(); i++ ) {
+            Assert.assertEquals(elements.get(i).getOperator(), ops.get(i), "elements doesn't have expected operator at position " + i);
+        }
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/pileup/ReadBackedPileupUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/pileup/ReadBackedPileupUnitTest.java
new file mode 100644
index 0000000..70377e1
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/pileup/ReadBackedPileupUnitTest.java
@@ -0,0 +1,328 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.pileup;
+
+import htsjdk.samtools.CigarElement;
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMReadGroupRecord;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.sam.GATKSAMRecord;
+import org.testng.Assert;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.*;
+
+/**
+ * Test routines for read-backed pileup.
+ */
+public class ReadBackedPileupUnitTest {
+    protected static SAMFileHeader header;
+    protected GenomeLocParser genomeLocParser;
+    private GenomeLoc loc;
+
+    @BeforeClass
+    public void beforeClass() {
+        header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000);
+        genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
+        loc = genomeLocParser.createGenomeLoc("chr1", 1);
+    }
+
+    /**
+     * Ensure that basic read group splitting works.
+     */
+    @Test
+    public void testSplitByReadGroup() {
+        SAMReadGroupRecord readGroupOne = new SAMReadGroupRecord("rg1");
+        SAMReadGroupRecord readGroupTwo = new SAMReadGroupRecord("rg2");
+
+        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1,1,1000);
+        header.addReadGroup(readGroupOne);
+        header.addReadGroup(readGroupTwo);
+
+        GATKSAMRecord read1 = ArtificialSAMUtils.createArtificialRead(header,"read1",0,1,10);
+        read1.setAttribute("RG",readGroupOne.getId());
+        GATKSAMRecord read2 = ArtificialSAMUtils.createArtificialRead(header,"read2",0,1,10);
+        read2.setAttribute("RG",readGroupTwo.getId());
+        GATKSAMRecord read3 = ArtificialSAMUtils.createArtificialRead(header,"read3",0,1,10);
+        read3.setAttribute("RG",readGroupOne.getId());
+        GATKSAMRecord read4 = ArtificialSAMUtils.createArtificialRead(header,"read4",0,1,10);
+        read4.setAttribute("RG",readGroupTwo.getId());
+        GATKSAMRecord read5 = ArtificialSAMUtils.createArtificialRead(header,"read5",0,1,10);
+        read5.setAttribute("RG",readGroupTwo.getId());
+        GATKSAMRecord read6 = ArtificialSAMUtils.createArtificialRead(header,"read6",0,1,10);
+        read6.setAttribute("RG",readGroupOne.getId());
+        GATKSAMRecord read7 = ArtificialSAMUtils.createArtificialRead(header,"read7",0,1,10);
+        read7.setAttribute("RG",readGroupOne.getId());
+
+        ReadBackedPileup pileup = new ReadBackedPileupImpl(null, Arrays.asList(read1,read2,read3,read4,read5,read6,read7), Arrays.asList(1,1,1,1,1,1,1));
+
+        ReadBackedPileup rg1Pileup = pileup.getPileupForReadGroup("rg1");
+        List<GATKSAMRecord> rg1Reads = rg1Pileup.getReads();
+        Assert.assertEquals(rg1Reads.size(), 4, "Wrong number of reads in read group rg1");
+        Assert.assertEquals(rg1Reads.get(0), read1, "Read " + read1.getReadName() + " should be in rg1 but isn't");
+        Assert.assertEquals(rg1Reads.get(1), read3, "Read " + read3.getReadName() + " should be in rg1 but isn't");
+        Assert.assertEquals(rg1Reads.get(2), read6, "Read " + read6.getReadName() + " should be in rg1 but isn't");
+        Assert.assertEquals(rg1Reads.get(3), read7, "Read " + read7.getReadName() + " should be in rg1 but isn't");
+
+        ReadBackedPileup rg2Pileup = pileup.getPileupForReadGroup("rg2");
+        List<GATKSAMRecord> rg2Reads = rg2Pileup.getReads();        
+        Assert.assertEquals(rg2Reads.size(), 3, "Wrong number of reads in read group rg2");
+        Assert.assertEquals(rg2Reads.get(0), read2, "Read " + read2.getReadName() + " should be in rg2 but isn't");
+        Assert.assertEquals(rg2Reads.get(1), read4, "Read " + read4.getReadName() + " should be in rg2 but isn't");
+        Assert.assertEquals(rg2Reads.get(2), read5, "Read " + read5.getReadName() + " should be in rg2 but isn't");
+    }
+
+    /**
+     * Ensure that splitting read groups still works when dealing with null read groups.
+     */
+    @Test
+    public void testSplitByNullReadGroups() {
+        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1,1,1000);
+
+        GATKSAMRecord read1 = ArtificialSAMUtils.createArtificialRead(header,"read1",0,1,10);
+        GATKSAMRecord read2 = ArtificialSAMUtils.createArtificialRead(header,"read2",0,1,10);
+        GATKSAMRecord read3 = ArtificialSAMUtils.createArtificialRead(header,"read3",0,1,10);
+
+        ReadBackedPileup pileup = new ReadBackedPileupImpl(null,
+                                                           Arrays.asList(read1,read2,read3),
+                                                           Arrays.asList(1,1,1));
+
+        ReadBackedPileup nullRgPileup = pileup.getPileupForReadGroup(null);
+        List<GATKSAMRecord> nullRgReads = nullRgPileup.getReads();
+        Assert.assertEquals(nullRgPileup.getNumberOfElements(), 3, "Wrong number of reads in null read group");
+        Assert.assertEquals(nullRgReads.get(0), read1, "Read " + read1.getReadName() + " should be in null rg but isn't");
+        Assert.assertEquals(nullRgReads.get(1), read2, "Read " + read2.getReadName() + " should be in null rg but isn't");
+        Assert.assertEquals(nullRgReads.get(2), read3, "Read " + read3.getReadName() + " should be in null rg but isn't");
+
+        ReadBackedPileup rg1Pileup = pileup.getPileupForReadGroup("rg1");
+        Assert.assertNull(rg1Pileup, "Pileup for non-existent read group should return null");
+    }
+
+    /**
+     * Ensure that splitting read groups still works when dealing with a sample-split pileup.
+     */
+    @Test
+    public void testSplitBySample() {
+        SAMReadGroupRecord readGroupOne = new SAMReadGroupRecord("rg1");
+        readGroupOne.setSample("sample1");
+        SAMReadGroupRecord readGroupTwo = new SAMReadGroupRecord("rg2");
+        readGroupTwo.setSample("sample2");
+
+        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1,1,1000);
+        header.addReadGroup(readGroupOne);
+        header.addReadGroup(readGroupTwo);
+
+        GATKSAMRecord read1 = ArtificialSAMUtils.createArtificialRead(header,"read1",0,1,10);
+        read1.setAttribute("RG",readGroupOne.getId());
+        GATKSAMRecord read2 = ArtificialSAMUtils.createArtificialRead(header,"read2",0,1,10);
+        read2.setAttribute("RG",readGroupTwo.getId());
+        GATKSAMRecord read3 = ArtificialSAMUtils.createArtificialRead(header,"read3",0,1,10);
+        read3.setAttribute("RG",readGroupOne.getId());
+        GATKSAMRecord read4 = ArtificialSAMUtils.createArtificialRead(header,"read4",0,1,10);
+        read4.setAttribute("RG",readGroupTwo.getId());
+
+        ReadBackedPileupImpl sample1Pileup = new ReadBackedPileupImpl(null,
+                                                                      Arrays.asList(read1,read3),
+                                                                      Arrays.asList(1,1));
+        ReadBackedPileupImpl sample2Pileup = new ReadBackedPileupImpl(null,
+                                                                      Arrays.asList(read2,read4),
+                                                                      Arrays.asList(1,1));
+        Map<String,ReadBackedPileupImpl> sampleToPileupMap = new HashMap<String,ReadBackedPileupImpl>();
+        sampleToPileupMap.put(readGroupOne.getSample(),sample1Pileup);
+        sampleToPileupMap.put(readGroupTwo.getSample(),sample2Pileup);
+
+        ReadBackedPileup compositePileup = new ReadBackedPileupImpl(null,sampleToPileupMap);
+
+        ReadBackedPileup rg1Pileup = compositePileup.getPileupForReadGroup("rg1");
+        List<GATKSAMRecord> rg1Reads = rg1Pileup.getReads();
+
+        Assert.assertEquals(rg1Reads.size(), 2, "Wrong number of reads in read group rg1");
+        Assert.assertEquals(rg1Reads.get(0), read1, "Read " + read1.getReadName() + " should be in rg1 but isn't");
+        Assert.assertEquals(rg1Reads.get(1), read3, "Read " + read3.getReadName() + " should be in rg1 but isn't");
+
+        ReadBackedPileup rg2Pileup = compositePileup.getPileupForReadGroup("rg2");
+        List<GATKSAMRecord> rg2Reads = rg2Pileup.getReads();
+
+        Assert.assertEquals(rg1Reads.size(), 2, "Wrong number of reads in read group rg2");
+        Assert.assertEquals(rg2Reads.get(0), read2, "Read " + read2.getReadName() + " should be in rg2 but isn't");
+        Assert.assertEquals(rg2Reads.get(1), read4, "Read " + read4.getReadName() + " should be in rg2 but isn't");
+    }
+
+    @Test
+    public void testGetPileupForSample() {
+        String sample1 = "sample1";
+        String sample2 = "sample2";
+
+        SAMReadGroupRecord readGroupOne = new SAMReadGroupRecord("rg1");
+        readGroupOne.setSample(sample1);
+        SAMReadGroupRecord readGroupTwo = new SAMReadGroupRecord("rg2");
+        readGroupTwo.setSample(sample2);
+
+        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1,1,1000);
+        header.addReadGroup(readGroupOne);
+        header.addReadGroup(readGroupTwo);
+
+        GATKSAMRecord read1 = ArtificialSAMUtils.createArtificialRead(header,"read1",0,1,10);
+        read1.setAttribute("RG",readGroupOne.getId());
+        GATKSAMRecord read2 = ArtificialSAMUtils.createArtificialRead(header,"read2",0,1,10);
+        read2.setAttribute("RG",readGroupTwo.getId());
+
+        Map<String,ReadBackedPileupImpl> sampleToPileupMap = new HashMap<String,ReadBackedPileupImpl>();
+        sampleToPileupMap.put(sample1,new ReadBackedPileupImpl(null,Collections.singletonList(read1),0));
+        sampleToPileupMap.put(sample2,new ReadBackedPileupImpl(null,Collections.singletonList(read2),0));
+
+        ReadBackedPileup pileup = new ReadBackedPileupImpl(null,sampleToPileupMap);
+
+        ReadBackedPileup sample2Pileup = pileup.getPileupForSample(sample2);
+        Assert.assertEquals(sample2Pileup.getNumberOfElements(),1,"Sample 2 pileup has wrong number of elements");
+        Assert.assertEquals(sample2Pileup.getReads().get(0),read2,"Sample 2 pileup has incorrect read");
+
+        ReadBackedPileup missingSamplePileup = pileup.getPileupForSample("missing");
+        Assert.assertNull(missingSamplePileup,"Pileup for sample 'missing' should be null but isn't");
+
+        missingSamplePileup = pileup.getPileupForSample("not here");
+        Assert.assertNull(missingSamplePileup,"Pileup for sample 'not here' should be null but isn't");
+    }
+
+    private static int sampleI = 0;
+    private class RBPCountTest {
+        final String sample;
+        final int nReads, nMapq0, nDeletions;
+
+        private RBPCountTest(int nReads, int nMapq0, int nDeletions) {
+            this.sample = "sample" + sampleI++;
+            this.nReads = nReads;
+            this.nMapq0 = nMapq0;
+            this.nDeletions = nDeletions;
+        }
+
+        private List<PileupElement> makeReads( final int n, final int mapq, final String op ) {
+            final int readLength = 3;
+
+            final List<PileupElement> elts = new LinkedList<PileupElement>();
+            for ( int i = 0; i < n; i++ ) {
+                GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, 1, readLength);
+                read.setReadBases(Utils.dupBytes((byte) 'A', readLength));
+                read.setBaseQualities(Utils.dupBytes((byte) 30, readLength));
+                read.setCigarString("1M1" + op + "1M");
+                read.setMappingQuality(mapq);
+                final int baseOffset = op.equals("M") ? 1 : 0;
+                final CigarElement cigarElement = read.getCigar().getCigarElement(1);
+                elts.add(new PileupElement(read, baseOffset, cigarElement, 1, 0));
+            }
+
+            return elts;
+        }
+
+        private ReadBackedPileupImpl makePileup() {
+            final List<PileupElement> elts = new LinkedList<PileupElement>();
+
+            elts.addAll(makeReads(nMapq0, 0, "M"));
+            elts.addAll(makeReads(nDeletions, 30, "D"));
+            elts.addAll(makeReads(nReads - nMapq0 - nDeletions, 30, "M"));
+
+            return new ReadBackedPileupImpl(loc, elts);
+        }
+
+        @Override
+        public String toString() {
+            return "RBPCountTest{" +
+                    "sample='" + sample + '\'' +
+                    ", nReads=" + nReads +
+                    ", nMapq0=" + nMapq0 +
+                    ", nDeletions=" + nDeletions +
+                    '}';
+        }
+    }
+
+    @DataProvider(name = "RBPCountingTest")
+    public Object[][] makeRBPCountingTest() {
+        final List<Object[]> tests = new LinkedList<Object[]>();
+
+        for ( final int nMapq : Arrays.asList(0, 10, 20) ) {
+            for ( final int nDeletions : Arrays.asList(0, 10, 20) ) {
+                for ( final int nReg : Arrays.asList(0, 10, 20) ) {
+                    final int total = nMapq + nDeletions + nReg;
+                    if ( total > 0 )
+                        tests.add(new Object[]{new RBPCountTest(total, nMapq, nDeletions)});
+                }
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "RBPCountingTest")
+    public void testRBPCountingTestSinglePileup(RBPCountTest params) {
+        testRBPCounts(params.makePileup(), params);
+    }
+
+    @Test(dataProvider = "RBPCountingTest")
+    public void testRBPCountingTestMultiSample(RBPCountTest params) {
+        final RBPCountTest newSample = new RBPCountTest(2, 1, 1);
+        final Map<String, ReadBackedPileupImpl> pileupsBySample = new HashMap<String, ReadBackedPileupImpl>();
+        pileupsBySample.put(newSample.sample, newSample.makePileup());
+        pileupsBySample.put(params.sample, params.makePileup());
+        final ReadBackedPileup pileup = new ReadBackedPileupImpl(loc, pileupsBySample);
+        testRBPCounts(pileup, new RBPCountTest(params.nReads + 2, params.nMapq0 + 1, params.nDeletions + 1));
+    }
+
+    private void testRBPCounts(final ReadBackedPileup rbp, RBPCountTest expected) {
+        for ( int cycles = 0; cycles < 3; cycles++ ) {
+            // multiple cycles to make sure caching is working
+            Assert.assertEquals(rbp.getNumberOfElements(), expected.nReads);
+            Assert.assertEquals(rbp.depthOfCoverage(), expected.nReads);
+            Assert.assertEquals(rbp.getNumberOfDeletions(), expected.nDeletions);
+            Assert.assertEquals(rbp.getNumberOfMappingQualityZeroReads(), expected.nMapq0);
+        }
+    }
+
+    @Test
+    public void testRBPMappingQuals() {
+
+        // create a read with high MQ
+        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, 1, 10);
+        read.setReadBases(Utils.dupBytes((byte) 'A', 10));
+        read.setBaseQualities(Utils.dupBytes((byte) 30, 10));
+        read.setCigarString("10M");
+        read.setMappingQuality(200); // set a MQ higher than max signed byte
+
+        // now create the RBP
+        final List<PileupElement> elts = new LinkedList<>();
+        elts.add(new PileupElement(read, 0, read.getCigar().getCigarElement(0), 0, 0));
+        final Map<String, ReadBackedPileupImpl> pileupsBySample = new HashMap<>();
+        pileupsBySample.put("foo", new ReadBackedPileupImpl(loc, elts));
+        final ReadBackedPileup pileup = new ReadBackedPileupImpl(loc, pileupsBySample);
+
+        Assert.assertEquals(pileup.getMappingQuals()[0], 200);
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/progressmeter/ProgressMeterDaemonUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/progressmeter/ProgressMeterDaemonUnitTest.java
new file mode 100644
index 0000000..57ce9d9
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/progressmeter/ProgressMeterDaemonUnitTest.java
@@ -0,0 +1,121 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.progressmeter;
+
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.GenomeLocSortedSet;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * UnitTests for the ProgressMeterDaemon
+ *
+ * User: depristo
+ * Date: 8/24/12
+ * Time: 11:25 AM
+ * To change this template use File | Settings | File Templates.
+ */
+public class ProgressMeterDaemonUnitTest extends BaseTest {
+    private GenomeLocParser genomeLocParser;
+
+    @BeforeClass
+    public void init() throws FileNotFoundException {
+        genomeLocParser = new GenomeLocParser(new CachingIndexedFastaSequenceFile(new File(b37KGReference)));
+    }
+
+    // capture and count calls to progress
+    private class TestingProgressMeter extends ProgressMeter {
+        final List<Long> progressCalls = new LinkedList<Long>();
+
+        private TestingProgressMeter(final long poll) {
+            super(null, "test", new GenomeLocSortedSet(genomeLocParser), poll);
+            super.start();
+        }
+
+        @Override
+        protected synchronized void printProgress(boolean mustPrint) {
+            progressCalls.add(System.currentTimeMillis());
+        }
+    }
+
+    @DataProvider(name = "PollingData")
+    public Object[][] makePollingData() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+        for ( final int ticks : Arrays.asList(1, 5, 10) ) {
+            for ( final int poll : Arrays.asList(10, 100) ) {
+                tests.add(new Object[]{poll, ticks});
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test
+    public void testPeriodUpdateNano() {
+        final ProgressMeter meter = new TestingProgressMeter(10);
+        final long currentTime = meter.getRuntimeInNanoseconds();
+        meter.updateElapsedTimeInNanoseconds();
+        Assert.assertTrue( meter.getRuntimeInNanosecondsUpdatedPeriodically() > currentTime, "Updating the periodic runtime failed" );
+    }
+
+    @Test(dataProvider = "PollingData", invocationCount = 10, successPercentage = 90, enabled = false)
+    public void testProgressMeterDaemon(final long poll, final int ticks) throws InterruptedException {
+        final TestingProgressMeter meter = new TestingProgressMeter(poll);
+        final ProgressMeterDaemon daemon = meter.getProgressMeterDaemon();
+
+        Assert.assertTrue(daemon.isDaemon());
+
+        Assert.assertFalse(daemon.isDone());
+        Thread.sleep(ticks * poll);
+        Assert.assertFalse(daemon.isDone());
+
+        daemon.done();
+        Assert.assertTrue(daemon.isDone());
+
+        // wait for the thread to actually finish
+        daemon.join();
+
+        Assert.assertTrue(meter.progressCalls.size() >= 1,
+                "Expected at least one progress update call from daemon thread, but only got " + meter.progressCalls.size() + " with exact calls " + meter.progressCalls);
+
+        final int tolerance = (int)Math.ceil(0.8 * meter.progressCalls.size());
+        Assert.assertTrue(Math.abs(meter.progressCalls.size() - ticks) <= tolerance,
+                "Expected " + ticks + " progress calls from daemon thread, but got " + meter.progressCalls.size() + " and a tolerance of only " + tolerance);
+
+        Assert.assertTrue(meter.getRuntimeInNanosecondsUpdatedPeriodically() > 0, "Daemon should have updated our periodic runtime");
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/progressmeter/ProgressMeterDataUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/progressmeter/ProgressMeterDataUnitTest.java
new file mode 100644
index 0000000..2f11f25
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/progressmeter/ProgressMeterDataUnitTest.java
@@ -0,0 +1,86 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.progressmeter;
+
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.AutoFormattingTime;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * UnitTests for the ProgressMeterData
+ *
+ * User: depristo
+ * Date: 8/24/12
+ * Time: 11:25 AM
+ * To change this template use File | Settings | File Templates.
+ */
+public class ProgressMeterDataUnitTest extends BaseTest {
+    @Test
+    public void testBasic() {
+        Assert.assertEquals(new ProgressMeterData(1.0, 2, 3).getElapsedSeconds(), 1.0);
+        Assert.assertEquals(new ProgressMeterData(1.0, 2, 3).getUnitsProcessed(), 2);
+        Assert.assertEquals(new ProgressMeterData(1.0, 2, 3).getBpProcessed(), 3);
+    }
+
+    @Test
+    public void testFraction() {
+        final double TOL = 1e-3;
+        Assert.assertEquals(new ProgressMeterData(1.0, 1, 1).calculateFractionGenomeTargetCompleted(10), 0.1, TOL);
+        Assert.assertEquals(new ProgressMeterData(1.0, 1, 2).calculateFractionGenomeTargetCompleted(10), 0.2, TOL);
+        Assert.assertEquals(new ProgressMeterData(1.0, 1, 1).calculateFractionGenomeTargetCompleted(100), 0.01, TOL);
+        Assert.assertEquals(new ProgressMeterData(1.0, 1, 2).calculateFractionGenomeTargetCompleted(100), 0.02, TOL);
+        Assert.assertEquals(new ProgressMeterData(1.0, 1, 1).calculateFractionGenomeTargetCompleted(0), 1.0, TOL);
+    }
+
+    @Test
+    public void testSecondsPerBP() {
+        final double TOL = 1e-3;
+        final long M = 1000000;
+        Assert.assertEquals(new ProgressMeterData(1.0, 1, M).secondsPerMillionBP(), 1.0, TOL);
+        Assert.assertEquals(new ProgressMeterData(1.0, 1, M/10).secondsPerMillionBP(), 10.0, TOL);
+        Assert.assertEquals(new ProgressMeterData(2.0, 1, M).secondsPerMillionBP(), 2.0, TOL);
+        Assert.assertEquals(new ProgressMeterData(1.0, 1, 0).secondsPerMillionBP(), 1e6, TOL);
+    }
+
+    @Test
+    public void testSecondsPerElement() {
+        final double TOL = 1e-3;
+        final long M = 1000000;
+        Assert.assertEquals(new ProgressMeterData(1.0, M, 1).secondsPerMillionElements(), 1.0, TOL);
+        Assert.assertEquals(new ProgressMeterData(1.0, M/10, 1).secondsPerMillionElements(), 10.0, TOL);
+        Assert.assertEquals(new ProgressMeterData(2.00, M, 1).secondsPerMillionElements(), 2.0, TOL);
+        Assert.assertEquals(new ProgressMeterData(1.0, 0, 1).secondsPerMillionElements(), 1e6, TOL);
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/recalibration/EventTypeUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/recalibration/EventTypeUnitTest.java
new file mode 100644
index 0000000..e514b67
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/recalibration/EventTypeUnitTest.java
@@ -0,0 +1,61 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.recalibration;
+
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.util.*;
+
+public final class EventTypeUnitTest extends BaseTest {
+    @Test
+    public void testEventTypes() {
+        for ( final EventType et : EventType.values() ) {
+            Assert.assertNotNull(et.toString());
+            Assert.assertNotNull(et.prettyPrint());
+            Assert.assertFalse("".equals(et.toString()));
+            Assert.assertFalse("".equals(et.prettyPrint()));
+            Assert.assertEquals(EventType.eventFrom(et.ordinal()), et);
+            Assert.assertEquals(EventType.eventFrom(et.toString()), et);
+        }
+    }
+
+    @Test
+    public void testEventTypesEnumItself() {
+        final Set<String> shortReps = new HashSet<String>();
+        for ( final EventType et : EventType.values() ) {
+            Assert.assertFalse(shortReps.contains(et.toString()), "Short representative for EventType has duplicates for " + et);
+            shortReps.add(et.toString());
+        }
+        Assert.assertEquals(shortReps.size(), EventType.values().length, "Short representatives for EventType aren't unique");
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void testBadString() {
+        EventType.eventFrom("asdfhalsdjfalkjsdf");
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/refdata/RefMetaDataTrackerUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/refdata/RefMetaDataTrackerUnitTest.java
new file mode 100644
index 0000000..3dbfbfc
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/refdata/RefMetaDataTrackerUnitTest.java
@@ -0,0 +1,290 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.refdata;
+
+import htsjdk.samtools.SAMFileHeader;
+import org.apache.log4j.Logger;
+import htsjdk.tribble.Feature;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.commandline.RodBinding;
+import org.broadinstitute.gatk.utils.commandline.Tags;
+import org.broadinstitute.gatk.utils.contexts.ReferenceContext;
+import org.broadinstitute.gatk.utils.codecs.table.TableFeature;
+import org.broadinstitute.gatk.utils.refdata.utils.GATKFeature;
+import org.broadinstitute.gatk.utils.refdata.utils.RODRecordList;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+import htsjdk.variant.variantcontext.Allele;
+import htsjdk.variant.variantcontext.VariantContext;
+import htsjdk.variant.variantcontext.VariantContextBuilder;
+import org.testng.Assert;
+import org.testng.annotations.*;
+import java.util.*;
+import java.util.List;
+
+public class RefMetaDataTrackerUnitTest {
+    final protected static Logger logger = Logger.getLogger(RefMetaDataTrackerUnitTest.class);
+    private static SAMFileHeader header;
+    private ReferenceContext context;
+    private GenomeLocParser genomeLocParser;
+    private GenomeLoc locus;
+    private final static int START_POS = 10;
+    Allele A,C,G,T;
+    VariantContext AC_SNP, AG_SNP, AT_SNP;
+    TableFeature span10_10, span1_20, span10_20;
+
+    @BeforeClass
+    public void beforeClass() {
+        header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 100);
+        genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
+        locus = genomeLocParser.createGenomeLoc("chr1", START_POS, START_POS);
+        context = new ReferenceContext(genomeLocParser, locus, (byte)'A');
+        A = Allele.create("A", true);
+        C = Allele.create("C");
+        G = Allele.create("G");
+        T = Allele.create("T");
+        AC_SNP = new VariantContextBuilder("x", "chr1", START_POS, START_POS, Arrays.asList(A, C)).make();
+        AG_SNP = new VariantContextBuilder("x", "chr1", START_POS, START_POS, Arrays.asList(A, G)).make();
+        AT_SNP = new VariantContextBuilder("x", "chr1", START_POS, START_POS, Arrays.asList(A, T)).make();
+        span10_10 = makeSpan(10, 10);
+        span1_20 = makeSpan(1, 20);
+        span10_20 = makeSpan(10, 20);
+    }
+
+    @BeforeMethod
+    public void reset() {
+        RodBinding.resetNameCounter();
+    }
+
+    private class MyTest extends BaseTest.TestDataProvider {
+        public RODRecordList AValues, BValues;
+
+        private MyTest(Class c, final List<? extends Feature> AValues, final List<? extends Feature> BValues) {
+            super(c);
+            this.AValues = AValues == null ? null : makeRODRecord("A", AValues);
+            this.BValues = BValues == null ? null : makeRODRecord("B", BValues);
+        }
+
+        private MyTest(final List<? extends Feature> AValues, final List<? extends Feature> BValues) {
+            super(MyTest.class);
+            this.AValues = AValues == null ? null : makeRODRecord("A", AValues);
+            this.BValues = BValues == null ? null : makeRODRecord("B", BValues);
+        }
+
+        @Override
+        public String toString() {
+            return String.format("A=%s, B=%s", AValues, BValues);
+        }
+
+        private final RODRecordList makeRODRecord(String name, List<? extends Feature> features) {
+            List<GATKFeature> x = new ArrayList<GATKFeature>();
+            for ( Feature f : features )
+                x.add(new GATKFeature.TribbleGATKFeature(genomeLocParser, f, name));
+            return new RODRecordListImpl(name, x, locus);
+        }
+
+        public List<GATKFeature> expected(String name) {
+            if ( name.equals("A+B") ) return allValues();
+            if ( name.equals("A") ) return expectedAValues();
+            if ( name.equals("B") ) return expectedBValues();
+            throw new RuntimeException("FAIL");
+        }
+
+        public List<GATKFeature> allValues() {
+            List<GATKFeature> x = new ArrayList<GATKFeature>();
+            x.addAll(expectedAValues());
+            x.addAll(expectedBValues());
+            return x;
+        }
+
+        public List<GATKFeature> expectedAValues() {
+            return AValues == null ? Collections.<GATKFeature>emptyList() : AValues;
+        }
+
+        public List<GATKFeature> expectedBValues() {
+            return BValues == null ? Collections.<GATKFeature>emptyList() : BValues;
+        }
+
+        public RefMetaDataTracker makeTracker() {
+            List<RODRecordList> x = new ArrayList<RODRecordList>();
+            if ( AValues != null ) x.add(AValues);
+            if ( BValues != null ) x.add(BValues);
+            return new RefMetaDataTracker(x);
+        }
+
+        public int nBoundTracks() {
+            int n = 0;
+            if ( AValues != null ) n++;
+            if ( BValues != null ) n++;
+            return n;
+        }
+    }
+
+    private final TableFeature makeSpan(int start, int stop) {
+        return new TableFeature(genomeLocParser.createGenomeLoc("chr1", start, stop),
+                Collections.<String>emptyList(), Collections.<String>emptyList());
+    }
+
+    @DataProvider(name = "tests")
+    public Object[][] createTests() {
+        new MyTest(null, null);
+        new MyTest(Arrays.asList(AC_SNP), null);
+        new MyTest(Arrays.asList(AC_SNP, AT_SNP), null);
+        new MyTest(Arrays.asList(AC_SNP), Arrays.asList(AG_SNP));
+        new MyTest(Arrays.asList(AC_SNP, AT_SNP), Arrays.asList(AG_SNP));
+        new MyTest(Arrays.asList(AC_SNP, AT_SNP), Arrays.asList(span10_10));
+        new MyTest(Arrays.asList(AC_SNP, AT_SNP), Arrays.asList(span10_10, span10_20));
+        new MyTest(Arrays.asList(AC_SNP, AT_SNP), Arrays.asList(span10_10, span10_20, span1_20));
+
+        // for requires starts
+        new MyTest(Arrays.asList(span1_20), null);
+        new MyTest(Arrays.asList(span10_10, span10_20), null);
+        new MyTest(Arrays.asList(span10_10, span10_20, span1_20), null);
+
+        return MyTest.getTests(MyTest.class);
+    }
+
+    @Test(enabled = true, dataProvider = "tests")
+    public void testRawBindings(MyTest test) {
+        logger.warn("Testing " + test + " for number of bound tracks");
+        RefMetaDataTracker tracker = test.makeTracker();
+        Assert.assertEquals(tracker.getNTracksWithBoundFeatures(), test.nBoundTracks());
+
+        testSimpleBindings("A", tracker, test.AValues);
+        testSimpleBindings("B", tracker, test.BValues);
+    }
+
+    private <T> void testSimpleBindings(String name, RefMetaDataTracker tracker, RODRecordList expected) {
+        List<Feature> asValues = tracker.getValues(Feature.class, name);
+
+        Assert.assertEquals(tracker.hasValues(name), expected != null);
+        Assert.assertEquals(asValues.size(), expected == null ? 0 : expected.size());
+
+        if ( expected != null ) {
+            for ( GATKFeature e : expected ) {
+                boolean foundValue = false;
+                for ( Feature f : asValues ) {
+                    if ( e.getUnderlyingObject() == f ) foundValue = true;
+                }
+                Assert.assertTrue(foundValue, "Never found expected value of " + e.getUnderlyingObject() + " bound to " + name + " in " + tracker);
+            }
+        }
+    }
+
+    @Test(enabled = true, dataProvider = "tests")
+    public void testGettersAsString(MyTest test) {
+        logger.warn("Testing " + test + " for get() methods");
+        RefMetaDataTracker tracker = test.makeTracker();
+
+        for ( String name : Arrays.asList("A+B", "A", "B") ) {
+            List<Feature> v1 = name.equals("A+B") ? tracker.getValues(Feature.class) : tracker.getValues(Feature.class, name);
+            testGetter(name, v1, test.expected(name), true, tracker);
+
+            List<Feature> v2 = name.equals("A+B") ? tracker.getValues(Feature.class, locus) : tracker.getValues(Feature.class, name, locus);
+            testGetter(name, v2, startingHere(test.expected(name)), true, tracker);
+
+            Feature v3 = name.equals("A+B") ? tracker.getFirstValue(Feature.class) : tracker.getFirstValue(Feature.class, name);
+            testGetter(name, Arrays.asList(v3), test.expected(name), false, tracker);
+
+            Feature v4 = name.equals("A+B") ? tracker.getFirstValue(Feature.class, locus) : tracker.getFirstValue(Feature.class, name, locus);
+            testGetter(name, Arrays.asList(v4), startingHere(test.expected(name)), false, tracker);
+        }
+    }
+
+    @Test(enabled = true, dataProvider = "tests")
+    public void testGettersAsRodBindings(MyTest test) {
+        logger.warn("Testing " + test + " for get() methods as RodBindings");
+        RefMetaDataTracker tracker = test.makeTracker();
+
+        for ( String nameAsString : Arrays.asList("A", "B") ) {
+            RodBinding<Feature> binding = new RodBinding<Feature>(Feature.class, nameAsString, "none", "vcf", new Tags());
+            List<Feature> v1 = tracker.getValues(binding);
+            testGetter(nameAsString, v1, test.expected(nameAsString), true, tracker);
+
+            List<Feature> v2 = tracker.getValues(binding, locus);
+            testGetter(nameAsString, v2, startingHere(test.expected(nameAsString)), true, tracker);
+
+            Feature v3 = tracker.getFirstValue(binding);
+            testGetter(nameAsString, Arrays.asList(v3), test.expected(nameAsString), false, tracker);
+
+            Feature v4 = tracker.getFirstValue(binding, locus);
+            testGetter(nameAsString, Arrays.asList(v4), startingHere(test.expected(nameAsString)), false, tracker);
+        }
+    }
+
+    @Test(enabled = true, dataProvider = "tests")
+    public void testGettersAsListOfRodBindings(MyTest test) {
+        logger.warn("Testing " + test + " for get() methods for List<RodBindings>");
+        RefMetaDataTracker tracker = test.makeTracker();
+
+        String nameAsString = "A+B";
+        RodBinding<Feature> A = new RodBinding<Feature>(Feature.class, "A", "none", "vcf", new Tags());
+        RodBinding<Feature> B = new RodBinding<Feature>(Feature.class, "B", "none", "vcf", new Tags());
+        List<RodBinding<Feature>> binding = Arrays.asList(A, B);
+
+        List<Feature> v1 = tracker.getValues(binding);
+        testGetter(nameAsString, v1, test.expected(nameAsString), true, tracker);
+
+        List<Feature> v2 = tracker.getValues(binding, locus);
+        testGetter(nameAsString, v2, startingHere(test.expected(nameAsString)), true, tracker);
+
+        Feature v3 = tracker.getFirstValue(binding);
+        testGetter(nameAsString, Arrays.asList(v3), test.expected(nameAsString), false, tracker);
+
+        Feature v4 = tracker.getFirstValue(binding, locus);
+        testGetter(nameAsString, Arrays.asList(v4), startingHere(test.expected(nameAsString)), false, tracker);
+    }
+
+    private List<GATKFeature> startingHere(List<GATKFeature> l) {
+        List<GATKFeature> x = new ArrayList<GATKFeature>();
+        for ( GATKFeature f : l ) if ( f.getStart() == locus.getStart() ) x.add(f);
+        return x;
+    }
+
+    private void testGetter(String name, List<Feature> got, List<GATKFeature> expected, boolean requireExact, RefMetaDataTracker tracker) {
+        if ( got.size() == 1 && got.get(0) == null )
+            got = Collections.emptyList();
+
+        if ( requireExact )
+            Assert.assertEquals(got.size(), expected.size());
+
+        boolean foundAny = false;
+        for ( GATKFeature e : expected ) {
+            boolean found1 = false;
+            for ( Feature got1 : got ) {
+                if ( e.getUnderlyingObject() == got1 )
+                    found1 = true;
+            }
+            if ( requireExact )
+                Assert.assertTrue(found1, "Never found expected GATKFeature " + e + " bound to " + name + " in " + tracker);
+            foundAny = found1 || foundAny;
+        }
+
+        if ( ! requireExact && ! expected.isEmpty() )
+            Assert.assertTrue(foundAny, "Never found any got values matching one of the expected values bound to " + name + " in " + tracker);
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/refdata/tracks/FeatureManagerUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/refdata/tracks/FeatureManagerUnitTest.java
new file mode 100644
index 0000000..0194d49
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/refdata/tracks/FeatureManagerUnitTest.java
@@ -0,0 +1,163 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.refdata.tracks;
+
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import htsjdk.tribble.Feature;
+import htsjdk.tribble.FeatureCodec;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.codecs.table.BedTableCodec;
+import org.broadinstitute.gatk.utils.codecs.table.TableFeature;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import htsjdk.variant.vcf.VCF3Codec;
+import htsjdk.variant.vcf.VCFCodec;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+import htsjdk.variant.variantcontext.VariantContext;
+import org.testng.Assert;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.*;
+import java.util.*;
+
+
+/**
+ * @author depristo
+ *
+ * UnitTests for RMD FeatureManager
+ */
+public class FeatureManagerUnitTest extends BaseTest {
+    private static final File RANDOM_FILE = new File(publicTestDir+ "exampleGATKReport.eval");
+    private static final File VCF3_FILE = new File(privateTestDir + "vcf3.vcf");
+    private static final File VCF4_FILE = new File(privateTestDir + "HiSeq.10000.vcf");
+    private static final File VCF4_FILE_GZ = new File(privateTestDir + "HiSeq.10000.vcf.gz");
+    private static final File VCF4_FILE_BGZIP = new File(privateTestDir + "HiSeq.10000.bgzip.vcf.gz");
+
+    private FeatureManager manager;
+    private GenomeLocParser genomeLocParser;
+
+    @BeforeMethod
+    public void setup() {
+        File referenceFile = new File(b36KGReference);
+        try {
+            IndexedFastaSequenceFile seq = new CachingIndexedFastaSequenceFile(referenceFile);
+            genomeLocParser = new GenomeLocParser(seq);
+            manager = new FeatureManager();
+        }
+        catch(FileNotFoundException ex) {
+            throw new UserException.CouldNotReadInputFile(referenceFile,ex);
+        }
+    }
+
+    @Test
+    public void testManagerCreation() {
+        Assert.assertTrue(manager.getFeatureDescriptors().size() > 0);
+    }
+
+    private class FMTest extends BaseTest.TestDataProvider {
+        public Class codec;
+        public Class<? extends Feature> feature;
+        public String name;
+        public File associatedFile;
+
+        private FMTest(final Class feature, final Class codec, final String name, final File file) {
+            super(FMTest.class);
+            this.codec = codec;
+            this.feature = feature;
+            this.name = name;
+            this.associatedFile = file;
+        }
+
+        public void assertExpected(FeatureManager.FeatureDescriptor featureDescriptor) {
+            Assert.assertEquals(featureDescriptor.getCodecClass(), codec);
+            Assert.assertEquals(featureDescriptor.getFeatureClass(), feature);
+            Assert.assertEquals(featureDescriptor.getName().toLowerCase(), name.toLowerCase());
+        }
+
+        public String toString() {
+            return String.format("FMTest name=%s codec=%s feature=%s file=%s",
+                    name, codec.getSimpleName(), feature.getSimpleName(), associatedFile);
+        }
+    }
+
+    @DataProvider(name = "tests")
+    public Object[][] createTests() {
+        new FMTest(VariantContext.class, VCF3Codec.class, "VCF3", VCF3_FILE);
+        new FMTest(VariantContext.class, VCFCodec.class, "VCF", VCF4_FILE);
+        new FMTest(VariantContext.class, VCFCodec.class, "VCF", VCF4_FILE_GZ);
+        new FMTest(VariantContext.class, VCFCodec.class, "VCF", VCF4_FILE_BGZIP);
+        new FMTest(TableFeature.class, BedTableCodec.class, "bedtable", null);
+        return FMTest.getTests(FMTest.class);
+    }
+
+    @Test(dataProvider = "tests")
+    public void testGetByFile(FMTest params) {
+        if ( params.associatedFile != null ) {
+            FeatureManager.FeatureDescriptor byFile = manager.getByFiletype(params.associatedFile);
+            Assert.assertNotNull(byFile, "Couldn't find any type associated with file " + params.associatedFile);
+            params.assertExpected(byFile);
+        }
+    }
+
+    @Test
+    public void testGetByFileNoMatch() {
+        FeatureManager.FeatureDescriptor byFile = manager.getByFiletype(RANDOM_FILE);
+        Assert.assertNull(byFile, "Found type " + byFile + " associated with RANDOM, non-Tribble file " + RANDOM_FILE);
+    }
+
+    @Test(dataProvider = "tests")
+    public void testGetters(FMTest params) {
+        params.assertExpected(manager.getByCodec(params.codec));
+        params.assertExpected(manager.getByName(params.name));
+        params.assertExpected(manager.getByName(params.name.toLowerCase()));
+        params.assertExpected(manager.getByName(params.name.toUpperCase()));
+
+        Collection<FeatureManager.FeatureDescriptor> descriptors = manager.getByFeature(params.feature);
+        Assert.assertTrue(descriptors.size() > 0, "Look up by FeatureClass failed");
+    }
+
+    @Test
+    public void testUserFriendlyList() {
+        Assert.assertTrue(manager.userFriendlyListOfAvailableFeatures().length() > 0, "Expected at least one codec to be listed");
+        Assert.assertTrue(manager.userFriendlyListOfAvailableFeatures().split(",").length > 0, "Expected at least two codecs, but only saw one");
+    }
+
+    @Test
+    public void testCodecCreation() {
+        FeatureManager.FeatureDescriptor descriptor = manager.getByName("vcf");
+        Assert.assertNotNull(descriptor, "Couldn't find VCF feature descriptor!");
+
+        FeatureCodec c = manager.createCodec(descriptor, "foo", genomeLocParser, null);
+        Assert.assertNotNull(c, "Couldn't create codec");
+        Assert.assertEquals(c.getClass(), descriptor.getCodecClass());
+        Assert.assertEquals(c.getFeatureType(), descriptor.getFeatureClass());
+    }
+
+}
+
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/refdata/tracks/RMDTrackBuilderUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/refdata/tracks/RMDTrackBuilderUnitTest.java
new file mode 100644
index 0000000..7b25724
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/refdata/tracks/RMDTrackBuilderUnitTest.java
@@ -0,0 +1,190 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.refdata.tracks;
+
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import htsjdk.tribble.Tribble;
+import htsjdk.tribble.index.Index;
+import htsjdk.tribble.util.LittleEndianOutputStream;
+import htsjdk.variant.vcf.VCFCodec;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.testng.Assert;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+
+import org.testng.annotations.BeforeMethod;
+
+import org.testng.annotations.Test;
+
+import java.io.*;
+import java.nio.channels.FileChannel;
+
+
+/**
+ * @author aaron
+ *         <p/>
+ *         Class RMDTrackBuilderUnitTest
+ *         <p/>
+ *         Testing out the builder for tribble Tracks
+ */
+public class RMDTrackBuilderUnitTest extends BaseTest {
+    private RMDTrackBuilder builder;
+    private IndexedFastaSequenceFile seq;
+    private GenomeLocParser genomeLocParser;
+
+    @BeforeMethod
+    public void setup() {
+        File referenceFile = new File(b37KGReference);
+        try {
+            seq = new CachingIndexedFastaSequenceFile(referenceFile);
+        }
+        catch(FileNotFoundException ex) {
+            throw new UserException.CouldNotReadInputFile(referenceFile,ex);
+        }
+        genomeLocParser = new GenomeLocParser(seq);
+
+        // We have to disable auto-index creation/locking in the RMDTrackBuilder for tests,
+        // as the lock acquisition calls were intermittently hanging on our farm. This unfortunately
+        // means that we can't include tests for the auto-index creation feature.
+        builder = new RMDTrackBuilder(seq.getSequenceDictionary(),genomeLocParser,null,true,null);
+    }
+
+    @Test
+    public void testBuilder() {
+        Assert.assertTrue(builder.getFeatureManager().getFeatureDescriptors().size() > 0);
+    }
+
+    @Test
+    public void testDisableAutoIndexGeneration() throws IOException {
+        final File unindexedVCF = new File(privateTestDir + "unindexed.vcf");
+        final File unindexedVCFIndex = Tribble.indexFile(unindexedVCF);
+
+        Index index = builder.loadIndex(unindexedVCF, new VCFCodec());
+
+        Assert.assertFalse(unindexedVCFIndex.exists());
+        Assert.assertNotNull(index);
+    }
+
+    @Test
+    public void testLoadOnDiskIndex() {
+        final File originalVCF = new File(privateTestDir + "vcf4.1.example.vcf");
+        final File tempVCFWithCorrectIndex = createTempVCFFileAndIndex(originalVCF, false);
+        final File tempVCFIndexFile = Tribble.indexFile(tempVCFWithCorrectIndex);
+
+        final Index index = builder.loadFromDisk(tempVCFWithCorrectIndex, tempVCFIndexFile);
+
+        Assert.assertNotNull(index);
+        Assert.assertTrue(tempVCFIndexFile.exists());
+
+        final Index inMemoryIndex = builder.createIndexInMemory(tempVCFWithCorrectIndex, new VCFCodec());
+        Assert.assertTrue(index.equalsIgnoreProperties(inMemoryIndex));
+    }
+
+    @Test
+    public void testLoadOnDiskOutdatedIndex() {
+        final File originalVCF = new File(privateTestDir + "vcf4.1.example.vcf");
+        final File tempVCFWithOutdatedIndex = createTempVCFFileAndIndex(originalVCF, true);
+        final File tempVCFIndexFile = Tribble.indexFile(tempVCFWithOutdatedIndex);
+
+        final Index index = builder.loadFromDisk(tempVCFWithOutdatedIndex, tempVCFIndexFile);
+
+        // loadFromDisk() should return null to indicate that the index is outdated and should not be used,
+        // but should not delete the index since our builder has disableAutoIndexCreation set to true
+        Assert.assertNull(index);
+        Assert.assertTrue(tempVCFIndexFile.exists());
+    }
+
+    /**
+     * Create a temporary vcf file and an associated index file, which may be set to be out-of-date
+     * relative to the vcf
+     *
+     * @param vcfFile the vcf file
+     * @param createOutOfDateIndex if true, ensure that the temporary vcf file is modified after the index
+     * @return a file pointing to the new tmp location, with accompanying index
+     */
+    private File createTempVCFFileAndIndex( final File vcfFile, final boolean createOutOfDateIndex ) {
+        try {
+            final File tmpFile = createTempFile("RMDTrackBuilderUnitTest", "");
+            final File tmpIndex = Tribble.indexFile(tmpFile);
+            tmpIndex.deleteOnExit();
+
+            copyFile(vcfFile, tmpFile);
+            final Index inMemoryIndex = builder.createIndexInMemory(tmpFile, new VCFCodec());
+            final LittleEndianOutputStream indexOutputStream = new LittleEndianOutputStream(new FileOutputStream(tmpIndex));
+
+            // If requested, modify the tribble file after the index. Otherwise, modify the index last.
+            if ( createOutOfDateIndex ) {
+                inMemoryIndex.write(indexOutputStream);
+                indexOutputStream.close();
+                Thread.sleep(2000);
+                copyFile(vcfFile, tmpFile);
+            }
+            else {
+                copyFile(vcfFile, tmpFile);
+                Thread.sleep(2000);
+                inMemoryIndex.write(indexOutputStream);
+                indexOutputStream.close();
+            }
+
+            return tmpFile;
+        } catch (IOException e) {
+            Assert.fail("Unable to create temperary file");
+        } catch (InterruptedException e) {
+            Assert.fail("Somehow our thread got interrupted");
+        }
+        return null;
+    }
+
+    /**
+     * copy a file, from http://www.exampledepot.com/egs/java.nio/File2File.html
+     *
+     * @param srFile the source file
+     * @param dtFile the destination file
+     */
+    private static void copyFile(File srFile, File dtFile) {
+        try {
+            // Create channel on the source
+            FileChannel srcChannel = new FileInputStream(srFile).getChannel();
+
+            // Create channel on the destination
+            FileChannel dstChannel = new FileOutputStream(dtFile).getChannel();
+
+            // Copy file contents from source to destination
+            dstChannel.transferFrom(srcChannel, 0, srcChannel.size());
+
+            // Close the channels
+            srcChannel.close();
+            dstChannel.close();
+        } catch (IOException e) {
+            e.printStackTrace();
+            Assert.fail("Unable to process copy " + e.getMessage());
+        }
+    }
+
+}
+
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/refdata/utils/CheckableCloseableTribbleIterator.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/refdata/utils/CheckableCloseableTribbleIterator.java
new file mode 100644
index 0000000..191c689
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/refdata/utils/CheckableCloseableTribbleIterator.java
@@ -0,0 +1,90 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.refdata.utils;
+
+import htsjdk.tribble.CloseableTribbleIterator;
+import htsjdk.tribble.Feature;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * Adapter to allow checking if the wrapped iterator was closed.
+ * Creating an CCTI also adds it to the list returned from getThreadIterators().
+ * @param <T> feature
+ */
+public class CheckableCloseableTribbleIterator<T extends Feature> implements CloseableTribbleIterator<T> {
+    private final CloseableTribbleIterator<T> iterator;
+    private boolean closed = false;
+
+    private static ThreadLocal<List<CheckableCloseableTribbleIterator<? extends Feature>>> threadIterators =
+            new ThreadLocal<List<CheckableCloseableTribbleIterator<? extends Feature>>>() {
+                @Override
+                protected List<CheckableCloseableTribbleIterator<? extends Feature>> initialValue() {
+                    return new ArrayList<CheckableCloseableTribbleIterator<? extends Feature>>();
+                }
+            };
+
+    public CheckableCloseableTribbleIterator(CloseableTribbleIterator<T> iterator) {
+        this.iterator = iterator;
+        threadIterators.get().add(this);
+    }
+
+    /**
+     * Returns the list of iterators created on this thread since the last time clearCreatedIterators() was called.
+     * @return the list of iterators created on this thread since the last time clearCreatedIterators() was called.
+     */
+    public static List<CheckableCloseableTribbleIterator<? extends Feature>> getThreadIterators() {
+        return threadIterators.get();
+    }
+
+    /**
+     * Clears the tracked list of iterators created on this thread.
+     */
+    public static void clearThreadIterators() {
+        threadIterators.get().clear();
+    }
+
+    @Override
+    public void close() {
+        iterator.close();
+        this.closed = true;
+    }
+
+    /**
+     * Returns true if this iterator was properly closed.
+     * @return true if this iterator was properly closed.
+     */
+    public boolean isClosed() {
+        return closed;
+    }
+
+    @Override public Iterator<T> iterator() { return this; }
+    @Override public boolean hasNext() { return iterator.hasNext(); }
+    @Override public T next() { return iterator.next(); }
+    @Override public void remove() { iterator.remove(); }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/refdata/utils/FeatureToGATKFeatureIteratorUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/refdata/utils/FeatureToGATKFeatureIteratorUnitTest.java
new file mode 100644
index 0000000..789ef18
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/refdata/utils/FeatureToGATKFeatureIteratorUnitTest.java
@@ -0,0 +1,61 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.refdata.utils;
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import htsjdk.tribble.Feature;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import htsjdk.variant.vcf.VCFCodec;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.IOException;
+
+public class FeatureToGATKFeatureIteratorUnitTest extends BaseTest {
+    @Test
+    @SuppressWarnings("unchecked")
+    public void testCloseFilePointers() throws IOException {
+        final String chr = "20";
+        IndexedFastaSequenceFile seq = new CachingIndexedFastaSequenceFile(new File(BaseTest.hg19Reference));
+        GenomeLocParser parser = new GenomeLocParser(seq);
+        File file = new File(privateTestDir + "NA12878.hg19.example1.vcf");
+        VCFCodec codec = new VCFCodec();
+        TestFeatureReader reader = new TestFeatureReader(file.getAbsolutePath(), codec);
+        CheckableCloseableTribbleIterator<Feature> tribbleIterator = reader.query(chr, 1, 100000);
+        FeatureToGATKFeatureIterator gatkIterator = new FeatureToGATKFeatureIterator(parser, tribbleIterator, "test");
+        Assert.assertTrue(gatkIterator.hasNext(), "GATK feature iterator does not have a next value.");
+        GenomeLoc gatkLocation = gatkIterator.next().getLocation();
+        Assert.assertEquals(gatkLocation.getContig(), chr, "Instead of chr 20 rod iterator was at location " + gatkLocation);
+        Assert.assertFalse(tribbleIterator.isClosed(), "Tribble iterator is closed but should be still open.");
+        gatkIterator.close();
+        Assert.assertTrue(tribbleIterator.isClosed(), "Tribble iterator is open but should be now closed.");
+        reader.close();
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/refdata/utils/FlashBackIteratorUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/refdata/utils/FlashBackIteratorUnitTest.java
new file mode 100644
index 0000000..5ca887e
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/refdata/utils/FlashBackIteratorUnitTest.java
@@ -0,0 +1,369 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.refdata.utils;
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMSequenceDictionary;
+import org.testng.Assert;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.refdata.ReferenceOrderedDatum;
+import org.broadinstitute.gatk.utils.GenomeLoc;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+import org.broadinstitute.gatk.utils.sam.ArtificialSAMUtils;
+
+import org.testng.annotations.BeforeMethod;
+
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.util.AbstractList;
+import java.util.ArrayList;
+import java.util.List;
+
+
+/**
+ * @author aaron
+ *         <p/>
+ *         Class FlashBackIteratorUnitTest
+ *         <p/>
+ *         just like a greatful dead show...this will be prone to flashbacks
+ */
+public class FlashBackIteratorUnitTest extends BaseTest {
+    private SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(NUMBER_OF_CHROMOSOMES, STARTING_CHROMOSOME, CHROMOSOME_SIZE);
+    private static final int NUMBER_OF_CHROMOSOMES = 5;
+    private static final int STARTING_CHROMOSOME = 1;
+    private static final int CHROMOSOME_SIZE = 1000;
+
+    private String firstContig;
+    private GenomeLocParser genomeLocParser;
+
+    @BeforeMethod
+    public void setup() {
+        genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
+        firstContig = header.getSequenceDictionary().getSequence(0).getSequenceName();
+    }
+
+    @Test
+    public void testBasicIteration() {
+        GenomeLoc loc = genomeLocParser.createGenomeLoc(firstContig, 0, 0);
+        FlashBackIterator iter = new FlashBackIterator(new FakeSeekableRODIterator(genomeLocParser,loc));
+        GenomeLoc lastLocation = null;
+        for (int x = 0; x < 10; x++) {
+            iter.next();
+            GenomeLoc cur = iter.position();
+            if (lastLocation != null) {
+                Assert.assertTrue(lastLocation.isBefore(cur));
+            }
+            lastLocation = cur;
+        }
+    }
+
+    @Test
+    public void testBasicIterationThenFlashBack() {
+        GenomeLoc loc = genomeLocParser.createGenomeLoc(firstContig, 0, 0);
+        FlashBackIterator iter = new FlashBackIterator(new FakeSeekableRODIterator(genomeLocParser,loc));
+        GenomeLoc lastLocation = null;
+        for (int x = 0; x < 10; x++) {
+            iter.next();
+            GenomeLoc cur = iter.position();
+            if (lastLocation != null) {
+                Assert.assertTrue(lastLocation.isBefore(cur));
+            }
+            lastLocation = cur;
+        }
+        iter.flashBackTo(genomeLocParser.createGenomeLoc(firstContig, 2));
+    }
+
+    @Test
+    public void testBasicIterationThenFlashBackThenIterate() {
+        GenomeLoc loc = genomeLocParser.createGenomeLoc(firstContig, 0, 0);
+        FlashBackIterator iter = new FlashBackIterator(new FakeSeekableRODIterator(genomeLocParser,loc));
+        GenomeLoc lastLocation = null;
+        for (int x = 0; x < 10; x++) {
+            iter.next();
+            GenomeLoc cur = iter.position();
+            if (lastLocation != null) {
+                Assert.assertTrue(lastLocation.isBefore(cur));
+            }
+            lastLocation = cur;
+        }
+        iter.flashBackTo(genomeLocParser.createGenomeLoc(firstContig, 1));
+        int count = 0;
+        while (iter.hasNext()) {
+            count++;
+            iter.next();
+        }
+        Assert.assertEquals(count, 10);
+    }
+
+
+    @Test
+    public void testFlashBackTruth() {
+        GenomeLoc loc = genomeLocParser.createGenomeLoc(firstContig, 0, 0);
+        LocationAwareSeekableRODIterator backIter = new FakeSeekableRODIterator(genomeLocParser,loc);
+        // remove the first three records
+        backIter.next();
+        backIter.next();
+        backIter.next();
+        FlashBackIterator iter = new FlashBackIterator(backIter);
+        GenomeLoc lastLocation = null;
+        for (int x = 0; x < 10; x++) {
+            iter.next();
+            GenomeLoc cur = iter.position();
+            if (lastLocation != null) {
+                Assert.assertTrue(lastLocation.isBefore(cur));
+            }
+            lastLocation = cur;
+        }
+        Assert.assertTrue(iter.canFlashBackTo(genomeLocParser.createGenomeLoc(firstContig, 5)));
+        Assert.assertTrue(iter.canFlashBackTo(genomeLocParser.createGenomeLoc(firstContig, 15)));
+        Assert.assertTrue(!iter.canFlashBackTo(genomeLocParser.createGenomeLoc(firstContig, 2)));
+        Assert.assertTrue(!iter.canFlashBackTo(genomeLocParser.createGenomeLoc(firstContig, 1)));
+    }
+
+    @Test
+    public void testBasicIterationThenFlashBackHalfWayThenIterate() {
+        GenomeLoc loc = genomeLocParser.createGenomeLoc(firstContig, 0, 0);
+        FlashBackIterator iter = new FlashBackIterator(new FakeSeekableRODIterator(genomeLocParser,loc));
+        GenomeLoc lastLocation = null;
+        for (int x = 0; x < 10; x++) {
+            iter.next();
+            GenomeLoc cur = iter.position();
+            if (lastLocation != null) {
+                Assert.assertTrue(lastLocation.isBefore(cur));
+            }
+            lastLocation = cur;
+        }
+        iter.flashBackTo(genomeLocParser.createGenomeLoc(firstContig, 5));
+        int count = 0;
+        while (iter.hasNext()) {
+            count++;
+            iter.next();
+        }
+        Assert.assertEquals(count, 6); // chr1:5, 6, 7, 8, 9, and 10
+    }
+}
+
+
+class FakeSeekableRODIterator implements LocationAwareSeekableRODIterator {
+    private GenomeLocParser genomeLocParser;
+
+    // current location
+    private GenomeLoc location;
+    private FakeRODatum curROD;
+    private int recordCount = 10;
+
+    public FakeSeekableRODIterator(GenomeLocParser genomeLocParser,GenomeLoc startingLoc) {
+        this.genomeLocParser = genomeLocParser;
+        this.location = genomeLocParser.createGenomeLoc(startingLoc.getContig(), startingLoc.getStart() + 1, startingLoc.getStop() + 1);
+    }
+
+    /**
+     * Gets the header associated with the backing input stream.
+     * @return the ROD header.
+     */
+    @Override
+    public Object getHeader() {
+        return null;
+    }
+
+    /**
+     * Gets the sequence dictionary associated with the backing input stream.
+     * @return sequence dictionary from the ROD header.
+     */
+    @Override
+    public SAMSequenceDictionary getSequenceDictionary() {
+        return null;
+    }
+
+
+    @Override
+    public GenomeLoc peekNextLocation() {
+        System.err.println("Peek Next -> " + location);
+        return location;
+    }
+
+    @Override
+    public GenomeLoc position() {
+        return location;
+    }
+
+    @Override
+    public RODRecordList seekForward(GenomeLoc interval) {
+        this.location = interval;
+        return next();
+    }
+
+    @Override
+    public boolean hasNext() {
+        return (recordCount > 0);
+    }
+
+    @Override
+    public RODRecordList next() {
+        RODRecordList list = new FakeRODRecordList();
+        curROD = new FakeRODatum("STUPIDNAME", location);
+        location = genomeLocParser.createGenomeLoc(location.getContig(), location.getStart() + 1, location.getStop() + 1);
+        list.add(curROD);
+        recordCount--;
+        return list;
+    }
+
+    @Override
+    public void remove() {
+        throw new IllegalStateException("GRRR");
+    }
+
+    @Override
+    public void close() {
+        // nothing to do
+    }
+}
+
+
+/** for testing only */
+class FakeRODatum extends GATKFeature implements ReferenceOrderedDatum {
+
+    final GenomeLoc location;
+
+    public FakeRODatum(String name, GenomeLoc location) {
+        super(name);
+        this.location = location;
+    }
+
+    @Override
+    public String getName() {
+        return "false";
+    }
+
+    @Override
+    public boolean parseLine(Object header, String[] parts) throws IOException {
+        return false;
+    }
+
+    @Override
+    public String toSimpleString() {
+        return "";
+    }
+
+    @Override
+    public String repl() {
+        return "";
+    }
+
+    /**
+     * Used by the ROD system to determine how to split input lines
+     *
+     * @return Regex string delimiter separating fields
+     */
+    @Override
+    public String delimiterRegex() {
+        return "";
+    }
+
+    @Override
+    public GenomeLoc getLocation() {
+        return location;
+    }
+
+    @Override
+    public Object getUnderlyingObject() {
+        return this;
+    }
+
+    @Override
+    public int compareTo(ReferenceOrderedDatum that) {
+        return location.compareTo(that.getLocation());
+    }
+
+    /**
+     * Backdoor hook to read header, meta-data, etc. associated with the file.  Will be
+     * called by the ROD system before streaming starts
+     *
+     * @param source source data file on disk from which this rod stream will be pulled
+     *
+     * @return a header object that will be passed to parseLine command
+     */
+    @Override
+    public Object initialize(File source) throws FileNotFoundException {
+        return null;
+    }
+
+    @Override
+    public String getChr() {
+        return getContig();
+    }
+
+    @Override
+    public String getContig() {
+        return location.getContig();
+    }
+
+    @Override
+    public int getStart() {
+        return (int)location.getStart();
+    }
+
+    @Override
+    public int getEnd() {
+        return (int)location.getStop();
+    }
+}
+
+class FakeRODRecordList extends AbstractList<GATKFeature> implements RODRecordList {
+    private final List<GATKFeature> list = new ArrayList<GATKFeature>();
+
+    public boolean add(GATKFeature data) {
+        return list.add(data);
+    }
+
+    @Override
+    public GATKFeature get(int i) {
+        return list.get(i);
+    }
+
+    @Override
+    public int size() {
+        return list.size();
+    }
+
+    @Override
+    public GenomeLoc getLocation() {
+        return list.get(0).getLocation();
+    }
+
+    @Override
+    public String getName() {
+        return "test";
+    }
+
+    @Override
+    public int compareTo(RODRecordList rodRecordList) {
+        return this.list.get(0).getLocation().compareTo(rodRecordList.getLocation());
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/refdata/utils/TestFeatureReader.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/refdata/utils/TestFeatureReader.java
new file mode 100644
index 0000000..7b9a0fe
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/refdata/utils/TestFeatureReader.java
@@ -0,0 +1,53 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.refdata.utils;
+
+import htsjdk.tribble.Feature;
+import htsjdk.tribble.FeatureCodec;
+import htsjdk.tribble.TribbleIndexedFeatureReader;
+
+import java.io.IOException;
+
+/**
+ * Feature reader with additional test utilities. The iterators can be checked to see if they are closed.
+ */
+public class TestFeatureReader extends TribbleIndexedFeatureReader<Feature, Object> {
+    public TestFeatureReader(String featurePath, FeatureCodec codec) throws IOException {
+        super(featurePath, codec, true);
+    }
+
+    @Override
+    @SuppressWarnings("unchecked")
+    public CheckableCloseableTribbleIterator<Feature> iterator() throws IOException {
+        return new CheckableCloseableTribbleIterator<Feature>(super.iterator());
+    }
+
+    @Override
+    @SuppressWarnings("unchecked")
+    public CheckableCloseableTribbleIterator<Feature> query(String chr, int start, int end) throws IOException {
+        return new CheckableCloseableTribbleIterator<Feature>(super.query(chr, start, end));
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/refdata/utils/TestRMDTrackBuilder.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/refdata/utils/TestRMDTrackBuilder.java
new file mode 100644
index 0000000..436c4f4
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/refdata/utils/TestRMDTrackBuilder.java
@@ -0,0 +1,70 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.refdata.utils;
+
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.tribble.FeatureCodec;
+import htsjdk.tribble.index.Index;
+import org.broadinstitute.gatk.utils.refdata.tracks.FeatureManager;
+import org.broadinstitute.gatk.utils.refdata.tracks.IndexDictionaryUtils;
+import org.broadinstitute.gatk.utils.refdata.tracks.RMDTrack;
+import org.broadinstitute.gatk.utils.refdata.tracks.RMDTrackBuilder;
+import org.broadinstitute.gatk.utils.GenomeLocParser;
+
+import java.io.File;
+import java.io.IOException;
+
+/**
+ * Extension of RMDTrackBuilder that creates TestFeatureReader's which in turn create CheckableCloseableTribbleIterator's.
+ */
+public class TestRMDTrackBuilder extends RMDTrackBuilder {
+    private GenomeLocParser genomeLocParser;
+
+    public TestRMDTrackBuilder(SAMSequenceDictionary dict, GenomeLocParser genomeLocParser) {
+        // disable auto-index creation/locking in the RMDTrackBuilder for tests
+        super(dict, genomeLocParser, null, true, null);
+        this.genomeLocParser = genomeLocParser;
+    }
+
+    @Override
+    public RMDTrack createInstanceOfTrack(RMDTriplet fileDescriptor) {
+        String name = fileDescriptor.getName();
+        File inputFile = new File(fileDescriptor.getFile());
+        FeatureManager.FeatureDescriptor descriptor = getFeatureManager().getByTriplet(fileDescriptor);
+        FeatureCodec codec = getFeatureManager().createCodec(descriptor, name, genomeLocParser, null);
+        TestFeatureReader featureReader;
+        Index index;
+        try {
+            // Create a feature reader that creates checkable tribble iterators.
+            index = loadIndex(inputFile, codec);
+            featureReader = new TestFeatureReader(inputFile.getAbsolutePath(), codec);
+        } catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+        SAMSequenceDictionary sequenceDictionary = IndexDictionaryUtils.getSequenceDictionaryFromProperties(index);
+        return new RMDTrack(descriptor.getCodecClass(), name, inputFile, featureReader, sequenceDictionary, genomeLocParser, codec);
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/report/GATKReportUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/report/GATKReportUnitTest.java
new file mode 100644
index 0000000..d678517
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/report/GATKReportUnitTest.java
@@ -0,0 +1,289 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.report;
+
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.report.GATKReport;
+import org.broadinstitute.gatk.utils.report.GATKReportColumn;
+import org.broadinstitute.gatk.utils.report.GATKReportTable;
+import org.broadinstitute.gatk.utils.report.GATKReportVersion;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.util.Random;
+import java.io.FileInputStream;
+import java.io.DataInputStream;
+import java.io.BufferedReader;
+import java.io.InputStreamReader;
+import java.util.ArrayList;
+
+
+public class GATKReportUnitTest extends BaseTest {
+    @Test
+    public void testParse() throws Exception {
+        String reportPath = publicTestDir + "exampleGATKReportv2.tbl";
+        GATKReport report = new GATKReport(reportPath);
+        Assert.assertEquals(report.getVersion(), GATKReportVersion.V1_1);
+        Assert.assertEquals(report.getTables().size(), 5);
+
+        GATKReportTable countVariants = report.getTable("CountVariants");
+        Assert.assertEquals(countVariants.get(0, "nProcessedLoci"), "63025520");
+        Assert.assertEquals(countVariants.get(0, "nNoCalls"), "0");
+        Assert.assertEquals(countVariants.get(0, "heterozygosity"), 4.73e-06);
+
+        GATKReportTable validationReport = report.getTable("ValidationReport");
+        Assert.assertEquals(validationReport.get(2, "PPV"), Double.NaN);
+    }
+
+    @DataProvider(name = "rightAlignValues")
+    public Object[][] getRightAlignValues() {
+        return new Object[][]{
+                new Object[]{null, true},
+                new Object[]{"null", true},
+                new Object[]{"NA", true},
+                new Object[]{"0", true},
+                new Object[]{"0.0", true},
+                new Object[]{"-0", true},
+                new Object[]{"-0.0", true},
+                new Object[]{String.valueOf(Long.MAX_VALUE), true},
+                new Object[]{String.valueOf(Long.MIN_VALUE), true},
+                new Object[]{String.valueOf(Float.MIN_NORMAL), true},
+                new Object[]{String.valueOf(Double.MAX_VALUE), true},
+                new Object[]{String.valueOf(Double.MIN_VALUE), true},
+                new Object[]{String.valueOf(Double.POSITIVE_INFINITY), true},
+                new Object[]{String.valueOf(Double.NEGATIVE_INFINITY), true},
+                new Object[]{String.valueOf(Double.NaN), true},
+                new Object[]{"hello", false}
+        };
+    }
+
+    @Test(dataProvider = "rightAlignValues")
+    public void testIsRightAlign(String value, boolean expected) {
+        Assert.assertEquals(GATKReportColumn.isRightAlign(value), expected, "right align of '" + value + "'");
+    }
+
+    private GATKReportTable getTableWithRandomValues() {
+        Random number = new Random(123L);
+        final int VALUESRANGE = 10;
+
+        GATKReport report = GATKReport.newSimpleReport("TableName", "col1", "col2", "col3");
+        GATKReportTable table = new GATKReportTable("testSortingTable", "table with random values sorted by columns", 3, GATKReportTable.TableSortingWay.SORT_BY_COLUMN );
+
+        final int NUMROWS = 100;
+        for (int x = 0; x < NUMROWS; x++) {
+            report.addRow(number.nextInt(VALUESRANGE), number.nextInt(VALUESRANGE), number.nextInt(VALUESRANGE));
+        }
+        return table;
+    }
+
+    @Test(enabled = true)
+    public void testSortingByColumn() {
+        Assert.assertEquals(isSorted(getTableWithRandomValues()), true);
+    }
+
+    private boolean isSorted(GATKReportTable table) {
+        boolean result = true;
+        File testingSortingTableFile = new File("testSortingFile.txt");
+
+        try {
+            // Connect print stream to the output stream
+            PrintStream ps = new PrintStream(testingSortingTableFile);
+            table.write(ps);
+            ps.close();
+        }
+        catch (Exception e){
+            System.err.println ("Error: " + e.getMessage());
+        }
+
+        ArrayList<int[]> rows = new ArrayList<int[]>();
+        try {
+            // Open the file
+            FileInputStream fStream = new FileInputStream(testingSortingTableFile);
+            // Get the object of DataInputStream
+            DataInputStream in = new DataInputStream(fStream);
+            BufferedReader br = new BufferedReader(new InputStreamReader(in));
+            String strLine;
+            //Read File Line By Line
+            while ((strLine = br.readLine()) != null) {
+
+                String[] parts = strLine.split(" ");
+                int l = parts.length;
+                int[] row = new int[l];
+                for(int n = 0; n < l; n++) {
+                    row[n] = Integer.parseInt(parts[n]);
+                }
+                rows.add(row);
+            }
+            //Close the input stream
+            in.close();
+        } catch (Exception e){//Catch exception if any
+            System.err.println("Error: " + e.getMessage());
+        }
+        for (int x = 1; x < rows.size() && result; x++)    {
+            result = checkRowOrder(rows.get(x - 1), rows.get(x));
+        }
+        return result;
+    }
+
+    private boolean checkRowOrder(int[] row1, int[] row2) {
+        int l = row1.length;
+        final int EQUAL = 0;
+
+        int result = EQUAL;
+
+        for(int x = 0; x < l && ( result <= EQUAL); x++) {
+            result = ((Integer)row1[x]).compareTo(row2[x]);
+        }
+        if (result <= EQUAL) {
+            return true;
+        } else {
+            return false;
+        }
+    }
+
+    private GATKReportTable makeBasicTable() {
+        GATKReport report = GATKReport.newSimpleReport("TableName", "sample", "value");
+        GATKReportTable table = report.getTable("TableName");
+        report.addRow("foo.1", "hello");
+        report.addRow("foo.2", "world");
+        return table;
+    }
+
+    @Test
+    public void testDottedSampleName() {
+        GATKReportTable table = makeBasicTable();
+        Assert.assertEquals(table.get(0, "value"), "hello");
+        Assert.assertEquals(table.get(1, "value"), "world");
+    }
+
+    @Test
+    public void testSimpleGATKReport() {
+        // Create a new simple GATK report named "TableName" with columns: Roger, is, and Awesome
+        GATKReport report = GATKReport.newSimpleReport("TableName", "Roger", "is", "Awesome");
+
+        // Add data to simple GATK report
+        report.addRow(12, 23.45, true);
+        report.addRow("ans", '3', 24.5);
+        report.addRow("hi", "", 2.3);
+
+        // Print the report to console
+        //report.print(System.out);
+
+        try {
+            File file = createTempFile("GATKReportGatherer-UnitTest", ".tbl");
+            //System.out.format("The temporary file" + " has been created: %s%n", file);
+            PrintStream ps = new PrintStream(file);
+            report.print(ps);
+            //System.out.println("File succesfully outputed!");
+            GATKReport inputRead = new GATKReport(file);
+            //System.out.println("File succesfully read!");
+            //inputRead.print(System.out);
+            Assert.assertTrue(report.isSameFormat(inputRead));
+
+        } catch (IOException x) {
+            System.err.format("IOException: %s%n", x);
+        }
+
+    }
+
+    @Test
+    public void testGATKReportGatherer() {
+
+        GATKReport report1, report2, report3;
+        report1 = new GATKReport();
+        report1.addTable("TableName", "Description", 2);
+        report1.getTable("TableName").addColumn("colA", "%s");
+        report1.getTable("TableName").addColumn("colB", "%c");
+        report1.getTable("TableName").set(0, "colA", "NotNum");
+        report1.getTable("TableName").set(0, "colB", (char) 64);
+
+        report2 = new GATKReport();
+        report2.addTable("TableName", "Description", 2);
+        report2.getTable("TableName").addColumn("colA", "%s");
+        report2.getTable("TableName").addColumn("colB", "%c");
+        report2.getTable("TableName").set(0, "colA", "df3");
+        report2.getTable("TableName").set(0, "colB", 'A');
+
+        report3 = new GATKReport();
+        report3.addTable("TableName", "Description", 2);
+        report3.getTable("TableName").addColumn("colA", "%s");
+        report3.getTable("TableName").addColumn("colB", "%c");
+        report3.getTable("TableName").set(0, "colA", "df5f");
+        report3.getTable("TableName").set(0, "colB", 'c');
+
+        report1.concat(report2);
+        report1.concat(report3);
+
+        report1.addTable("Table2", "To contain some more data types", 3);
+        GATKReportTable table = report1.getTable("Table2");
+        table.addColumn("SomeInt", "%d");
+        table.addColumn("SomeFloat", "%.16E");
+        table.addColumn("TrueFalse", "%B");
+        table.addRowIDMapping("12df", 0);
+        table.addRowIDMapping("5f", 1);
+        table.addRowIDMapping("RZ", 2);
+        table.set("12df", "SomeInt", Byte.MAX_VALUE);
+        table.set("12df", "SomeFloat", 34.0);
+        table.set("12df", "TrueFalse", true);
+        table.set("5f", "SomeInt", Short.MAX_VALUE);
+        table.set("5f", "SomeFloat", Double.MAX_VALUE);
+        table.set("5f", "TrueFalse", false);
+        table.set("RZ", "SomeInt", Long.MAX_VALUE);
+        table.set("RZ", "SomeFloat", 535646345.657453464576);
+        table.set("RZ", "TrueFalse", true);
+
+        report1.addTable("Table3", "blah", 1, GATKReportTable.TableSortingWay.SORT_BY_ROW);
+        report1.getTable("Table3").addColumn("a");
+        report1.getTable("Table3").addRowIDMapping("q", 2);
+        report1.getTable("Table3").addRowIDMapping("5", 3);
+        report1.getTable("Table3").addRowIDMapping("573s", 0);
+        report1.getTable("Table3").addRowIDMapping("ZZZ", 1);
+        report1.getTable("Table3").set("q", "a", "34");
+        report1.getTable("Table3").set("5", "a", "c4g34");
+        report1.getTable("Table3").set("573s", "a", "fDlwueg");
+        report1.getTable("Table3").set("ZZZ", "a", "Dfs");
+
+        try {
+            File file = createTempFile("GATKReportGatherer-UnitTest", ".tbl");
+            //System.out.format("The temporary file" + " has been created: %s%n", file);
+            PrintStream ps = new PrintStream(file);
+            report1.print(ps);
+            //System.out.println("File succesfully outputed!");
+            GATKReport inputRead = new GATKReport(file);
+            //System.out.println("File succesfully read!");
+            //inputRead.print(System.out);
+            Assert.assertTrue(report1.isSameFormat(inputRead));
+            Assert.assertTrue(report1.equals(inputRead));
+
+        } catch (IOException x) {
+            System.err.format("IOException: %s%n", x);
+        }
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/report/ReportMarshallerUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/report/ReportMarshallerUnitTest.java
new file mode 100644
index 0000000..5d9478a
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/report/ReportMarshallerUnitTest.java
@@ -0,0 +1,64 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.report;
+
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.testng.annotations.Test;
+
+
+/**
+ * @author aaron
+ *         <p/>
+ *         Class ReportMarshallerUnitTest
+ *         <p/>
+ *         test out the marshaller
+ */
+public class ReportMarshallerUnitTest extends BaseTest {
+    @Test
+    public void testMarshalling() {
+        /*Configuration cfg = new Configuration();
+        try {
+            cfg.setDirectoryForTemplateLoading(new File("templates"));
+        } catch (IOException e) {
+            e.printStackTrace(); 
+        }
+        cfg.setObjectWrapper(new DefaultObjectWrapper());
+        Template temp = null;
+        try {
+            temp = cfg.createMarhsaller("myTestTemp.ftl");
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+        FakeAnalysis fa = new FakeAnalysis();
+        File fl = new File("testFile.out");
+        fl.deleteOnExit();
+        ReportMarshaller marsh = new ReportMarshaller("report",fl,temp);
+        marsh.write(fa);
+        marsh.write(fa);
+        marsh.write(fa);
+        marsh.close();*/
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/runtime/ProcessControllerUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/runtime/ProcessControllerUnitTest.java
new file mode 100644
index 0000000..eeaac02
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/runtime/ProcessControllerUnitTest.java
@@ -0,0 +1,518 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.runtime;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang.StringUtils;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.broadinstitute.gatk.utils.exceptions.UserException;
+import org.broadinstitute.gatk.utils.io.IOUtils;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
+
+public class ProcessControllerUnitTest extends BaseTest {
+    private static final String NL = String.format("%n");
+
+    @Test(timeOut = 60 * 1000)
+    public void testDestroyThreadLocal() throws InterruptedException {
+        for (int i = 0; i < 3; i++) {
+            final ProcessController controller = ProcessController.getThreadLocal();
+            final ProcessSettings job = new ProcessSettings(
+                    new String[] {"sh", "-c", "echo Hello World && sleep 600 && echo Goodbye"});
+            job.getStdoutSettings().setBufferSize(-1);
+
+            Thread t = new Thread(new Runnable() {
+                @Override
+                public void run() {
+                    System.out.println("BACK: Starting on background thread");
+                    ProcessOutput result = controller.exec(job);
+                    // Assert in background thread doesn't make it to main thread but does print a trace.
+                    Assert.assertTrue(result.getExitValue() != 0, "Destroy-attempted job returned zero exit status");
+                    System.out.println("BACK: Background thread exiting");
+                }
+            });
+
+            System.out.println("MAIN: Starting background thread");
+            t.start();
+            System.out.println("MAIN: Sleeping main thread 3s");
+            Thread.sleep(3000);
+            System.out.println("MAIN: Destroying job");
+            controller.tryDestroy();
+            System.out.println("MAIN: Not waiting on background thread to exit");
+            // Using standard java.io this was blocking on linux.
+            // TODO: try again with NIO.
+            //t.join();
+            //System.out.println("MAIN: Background thread exited");
+        }
+    }
+
+    @Test
+    public void testReuseAfterError() {
+        ProcessController controller = new ProcessController();
+
+        ProcessSettings job;
+
+        for (int i = 0; i < 3; i++) {
+            // Test bad command
+            job = new ProcessSettings(new String[] {"no_such_command"});
+            try {
+                controller.exec(job);
+            } catch (ReviewedGATKException e) {
+                /* Was supposed to throw an exception */
+            }
+
+            // Test exit != 0
+            job = new ProcessSettings(new String[] {"cat", "non_existent_file"});
+            int exitValue = controller.exec(job).getExitValue();
+            Assert.assertTrue(exitValue != 0, "'cat' non existent file returned 0");
+
+            // Text success
+            job = new ProcessSettings(new String[] {"echo", "Hello World"});
+            exitValue = controller.exec(job).getExitValue();
+            Assert.assertEquals(exitValue, 0, "Echo failed");
+        }
+    }
+
+    @Test
+    public void testEnvironment() {
+        String key = "MY_NEW_VAR";
+        String value = "value is here";
+
+        ProcessSettings job = new ProcessSettings(new String[] {"sh", "-c", "echo $"+key});
+        job.getStdoutSettings().setBufferSize(-1);
+        job.setRedirectErrorStream(true);
+
+        Map<String, String> env = new HashMap<String, String>(System.getenv());
+        env.put(key, value);
+        job.setEnvironment(env);
+
+        ProcessController controller = new ProcessController();
+        ProcessOutput result = controller.exec(job);
+        int exitValue = result.getExitValue();
+
+        Assert.assertEquals(exitValue, 0, "Echo environment variable failed");
+        Assert.assertEquals(result.getStdout().getBufferString(), value + NL, "Echo environment returned unexpected output");
+    }
+
+    @Test
+    public void testDirectory() throws IOException {
+        File dir = null;
+        try {
+            dir = IOUtils.tempDir("temp.", "").getCanonicalFile();
+
+            ProcessSettings job = new ProcessSettings(new String[] {"pwd"});
+            job.getStdoutSettings().setBufferSize(-1);
+            job.setRedirectErrorStream(true);
+            job.setDirectory(dir);
+
+            ProcessController controller = new ProcessController();
+            ProcessOutput result = controller.exec(job);
+            int exitValue = result.getExitValue();
+
+            Assert.assertEquals(exitValue, 0, "Getting working directory failed");
+
+            Assert.assertEquals(result.getStdout().getBufferString(), dir.getAbsolutePath() + NL,
+                    "Setting/getting working directory returned unexpected output");
+        } finally {
+            FileUtils.deleteQuietly(dir);
+        }
+    }
+
+    @Test
+    public void testReadStdInBuffer() {
+        String bufferText = "Hello from buffer";
+        ProcessSettings job = new ProcessSettings(new String[] {"cat"});
+        job.getStdoutSettings().setBufferSize(-1);
+        job.setRedirectErrorStream(true);
+        job.getStdinSettings().setInputBuffer(bufferText);
+
+        ProcessController controller = new ProcessController();
+        ProcessOutput output = controller.exec(job);
+
+        Assert.assertEquals(output.getStdout().getBufferString(), bufferText,
+                "Unexpected output from cat stdin buffer");
+    }
+
+    @Test
+    public void testReadStdInFile() {
+        File input = null;
+        try {
+            String fileText = "Hello from file";
+            input = IOUtils.writeTempFile(fileText, "stdin.", ".txt");
+
+            ProcessSettings job = new ProcessSettings(new String[] {"cat"});
+            job.getStdoutSettings().setBufferSize(-1);
+            job.setRedirectErrorStream(true);
+            job.getStdinSettings().setInputFile(input);
+
+            ProcessController controller = new ProcessController();
+            ProcessOutput output = controller.exec(job);
+
+            Assert.assertEquals(output.getStdout().getBufferString(), fileText,
+                    "Unexpected output from cat stdin file");
+        } finally {
+            FileUtils.deleteQuietly(input);
+        }
+    }
+
+    @Test
+    public void testWriteStdOut() {
+        ProcessSettings job = new ProcessSettings(new String[] {"echo", "Testing to stdout"});
+        // Not going to call the System.setOut() for now. Just running a basic visual test.
+        job.getStdoutSettings().printStandard(true);
+        job.setRedirectErrorStream(true);
+
+        System.out.println("testWriteStdOut: Writing two lines to std out...");
+        ProcessController controller = new ProcessController();
+        controller.exec(job);
+        job.setCommand(new String[]{"cat", "non_existent_file"});
+        controller.exec(job);
+        System.out.println("testWriteStdOut: ...two lines should have been printed to std out");
+    }
+
+    @Test
+    public void testErrorToOut() throws IOException {
+        File outFile = null;
+        File errFile = null;
+        try {
+            outFile = BaseTest.createTempFile("temp", "");
+            errFile = BaseTest.createTempFile("temp", "");
+
+            ProcessSettings job = new ProcessSettings(new String[]{"cat", "non_existent_file"});
+            job.getStdoutSettings().setOutputFile(outFile);
+            job.getStdoutSettings().setBufferSize(-1);
+            job.getStderrSettings().setOutputFile(errFile);
+            job.getStderrSettings().setBufferSize(-1);
+            job.setRedirectErrorStream(true);
+
+            ProcessOutput result = new ProcessController().exec(job);
+            int exitValue = result.getExitValue();
+
+            Assert.assertTrue(exitValue != 0, "'cat' non existent file returned 0");
+
+            String fileString, bufferString;
+
+            fileString = FileUtils.readFileToString(outFile);
+            Assert.assertTrue(fileString.length() > 0, "Out file was length 0");
+
+            bufferString = result.getStdout().getBufferString();
+            Assert.assertTrue(bufferString.length() > 0, "Out buffer was length 0");
+
+            Assert.assertFalse(result.getStdout().isBufferTruncated(), "Out buffer was truncated");
+            Assert.assertEquals(bufferString.length(), fileString.length(), "Out buffer length did not match file length");
+
+            fileString = FileUtils.readFileToString(errFile);
+            Assert.assertEquals(fileString, "", "Unexpected output to err file");
+
+            bufferString = result.getStderr().getBufferString();
+            Assert.assertEquals(bufferString, "", "Unexepected output to err buffer");
+        } finally {
+            FileUtils.deleteQuietly(outFile);
+            FileUtils.deleteQuietly(errFile);
+        }
+    }
+
+    @Test
+    public void testErrorToErr() throws IOException {
+        File outFile = null;
+        File errFile = null;
+        try {
+            outFile = BaseTest.createTempFile("temp", "");
+            errFile = BaseTest.createTempFile("temp", "");
+
+            ProcessSettings job = new ProcessSettings(new String[]{"cat", "non_existent_file"});
+            job.getStdoutSettings().setOutputFile(outFile);
+            job.getStdoutSettings().setBufferSize(-1);
+            job.getStderrSettings().setOutputFile(errFile);
+            job.getStderrSettings().setBufferSize(-1);
+            job.setRedirectErrorStream(false);
+
+            ProcessOutput result = new ProcessController().exec(job);
+            int exitValue = result.getExitValue();
+
+            Assert.assertTrue(exitValue != 0, "'cat' non existent file returned 0");
+
+            String fileString, bufferString;
+
+            fileString = FileUtils.readFileToString(errFile);
+            Assert.assertTrue(fileString.length() > 0, "Err file was length 0");
+
+            bufferString = result.getStderr().getBufferString();
+            Assert.assertTrue(bufferString.length() > 0, "Err buffer was length 0");
+
+            Assert.assertFalse(result.getStderr().isBufferTruncated(), "Err buffer was truncated");
+            Assert.assertEquals(bufferString.length(), fileString.length(), "Err buffer length did not match file length");
+
+            fileString = FileUtils.readFileToString(outFile);
+            Assert.assertEquals(fileString, "", "Unexpected output to out file");
+
+            bufferString = result.getStdout().getBufferString();
+            Assert.assertEquals(bufferString, "", "Unexepected output to out buffer");
+        } finally {
+            FileUtils.deleteQuietly(outFile);
+            FileUtils.deleteQuietly(errFile);
+        }
+    }
+
+    private static final String TRUNCATE_TEXT = "Hello World";
+    private static final byte[] TRUNCATE_OUTPUT_BYTES = (TRUNCATE_TEXT + NL).getBytes();
+
+    /**
+     * @return Test truncating content vs. not truncating (run at -1/+1 size)
+     */
+    @DataProvider(name = "truncateSizes")
+    public Object[][] getTruncateBufferSizes() {
+        int l = TRUNCATE_OUTPUT_BYTES.length;
+        return new Object[][]{
+                new Object[]{0, 0},
+                new Object[]{l, l},
+                new Object[]{l + 1, l},
+                new Object[]{l - 1, l - 1}
+        };
+    }
+
+    @Test(dataProvider = "truncateSizes")
+    public void testTruncateBuffer(int truncateLen, int expectedLen) {
+        byte[] expected = Arrays.copyOf(TRUNCATE_OUTPUT_BYTES, expectedLen);
+
+        String[] command = {"echo", TRUNCATE_TEXT};
+        ProcessController controller = new ProcessController();
+
+        ProcessSettings job = new ProcessSettings(command);
+        job.getStdoutSettings().setBufferSize(truncateLen);
+        ProcessOutput result = controller.exec(job);
+
+        int exitValue = result.getExitValue();
+
+        Assert.assertEquals(exitValue, 0,
+                String.format("Echo returned %d: %s", exitValue, TRUNCATE_TEXT));
+
+        byte[] bufferBytes = result.getStdout().getBufferBytes();
+
+        Assert.assertEquals(bufferBytes, expected,
+                String.format("Output buffer didn't match (%d vs %d)", expected.length, bufferBytes.length));
+
+        boolean truncated = result.getStdout().isBufferTruncated();
+
+        Assert.assertEquals(truncated, TRUNCATE_OUTPUT_BYTES.length > truncateLen,
+                "Unexpected buffer truncation result");
+    }
+
+    private static final String[] LONG_COMMAND = getLongCommand();
+    private static final String LONG_COMMAND_STRING = StringUtils.join(LONG_COMMAND, " ");
+    private static final String LONG_COMMAND_DESCRIPTION = "<long command>";
+
+    @DataProvider(name = "echoCommands")
+    public Object[][] getEchoCommands() {
+
+        new EchoCommand(new String[]{"echo", "Hello", "World"}, "Hello World" + NL);
+        new EchoCommand(new String[]{"echo", "'Hello", "World"}, "'Hello World" + NL);
+        new EchoCommand(new String[]{"echo", "Hello", "World'"}, "Hello World'" + NL);
+        new EchoCommand(new String[]{"echo", "'Hello", "World'"}, "'Hello World'" + NL);
+
+        String[] longCommand = new String[LONG_COMMAND.length + 1];
+        longCommand[0] = "echo";
+        System.arraycopy(LONG_COMMAND, 0, longCommand, 1, LONG_COMMAND.length);
+        new EchoCommand(longCommand, LONG_COMMAND_STRING + NL) {
+            @Override
+            public String toString() {
+                return LONG_COMMAND_DESCRIPTION;
+            }
+        };
+
+        return TestDataProvider.getTests(EchoCommand.class);
+    }
+
+    @Test(dataProvider = "echoCommands")
+    public void testEcho(EchoCommand script) throws IOException {
+        File outputFile = null;
+        try {
+            outputFile = BaseTest.createTempFile("temp", "");
+
+            ProcessSettings job = new ProcessSettings(script.command);
+            if (script.output != null) {
+                job.getStdoutSettings().setOutputFile(outputFile);
+                job.getStdoutSettings().setBufferSize(script.output.getBytes().length);
+            }
+
+            ProcessOutput result = new ProcessController().exec(job);
+            int exitValue = result.getExitValue();
+
+            Assert.assertEquals(exitValue, 0,
+                    String.format("Echo returned %d: %s", exitValue, script));
+
+            if (script.output != null) {
+
+                String fileString = FileUtils.readFileToString(outputFile);
+                Assert.assertEquals(fileString, script.output,
+                        String.format("Output file didn't match (%d vs %d): %s",
+                                fileString.length(), script.output.length(), script));
+
+                String bufferString = result.getStdout().getBufferString();
+                Assert.assertEquals(bufferString, script.output,
+                        String.format("Output content didn't match (%d vs %d): %s",
+                                bufferString.length(), script.output.length(), script));
+
+                Assert.assertFalse(result.getStdout().isBufferTruncated(),
+                        "Output content was truncated: " + script);
+            }
+        } finally {
+            FileUtils.deleteQuietly(outputFile);
+        }
+    }
+
+    @Test(expectedExceptions = ReviewedGATKException.class)
+    public void testUnableToStart() {
+        ProcessSettings job = new ProcessSettings(new String[]{"no_such_command"});
+        new ProcessController().exec(job);
+    }
+
+    @DataProvider(name = "scriptCommands")
+    public Object[][] getScriptCommands() {
+        new ScriptCommand(true, "echo Hello World", "Hello World" + NL);
+        new ScriptCommand(false, "echo 'Hello World", null);
+        new ScriptCommand(false, "echo Hello World'", null);
+        new ScriptCommand(true, "echo 'Hello World'", "Hello World" + NL);
+        new ScriptCommand(true, "echo \"Hello World\"", "Hello World" + NL);
+        new ScriptCommand(false, "no_such_echo Hello World", null);
+        new ScriptCommand(true, "echo #", NL);
+        new ScriptCommand(true, "echo \\#", "#" + NL);
+        new ScriptCommand(true, "echo \\\\#", "\\#" + NL);
+
+        new ScriptCommand(true, "echo " + LONG_COMMAND_STRING, LONG_COMMAND_STRING + NL) {
+            @Override
+            public String toString() {
+                return LONG_COMMAND_DESCRIPTION;
+            }
+        };
+
+        return TestDataProvider.getTests(ScriptCommand.class);
+    }
+
+    @Test(dataProvider = "scriptCommands")
+    public void testScript(ScriptCommand script) throws IOException {
+        File scriptFile = null;
+        File outputFile = null;
+        try {
+            scriptFile = writeScript(script.content);
+            outputFile = BaseTest.createTempFile("temp", "");
+
+            ProcessSettings job = new ProcessSettings(new String[]{"sh", scriptFile.getAbsolutePath()});
+            if (script.output != null) {
+                job.getStdoutSettings().setOutputFile(outputFile);
+                job.getStdoutSettings().setBufferSize(script.output.getBytes().length);
+            }
+
+            ProcessOutput result = new ProcessController().exec(job);
+            int exitValue = result.getExitValue();
+
+            Assert.assertEquals(exitValue == 0, script.succeed,
+                    String.format("Script returned %d: %s", exitValue, script));
+
+            if (script.output != null) {
+
+                String fileString = FileUtils.readFileToString(outputFile);
+                Assert.assertEquals(fileString, script.output,
+                        String.format("Output file didn't match (%d vs %d): %s",
+                                fileString.length(), script.output.length(), script));
+
+                String bufferString = result.getStdout().getBufferString();
+                Assert.assertEquals(bufferString, script.output,
+                        String.format("Output content didn't match (%d vs %d): %s",
+                                bufferString.length(), script.output.length(), script));
+
+                Assert.assertFalse(result.getStdout().isBufferTruncated(),
+                        "Output content was truncated: " + script);
+            }
+        } finally {
+            FileUtils.deleteQuietly(scriptFile);
+            FileUtils.deleteQuietly(outputFile);
+        }
+    }
+
+    private static String[] getLongCommand() {
+        // This command fails on some systems with a 4096 character limit when run via the old sh -c "echo ...",
+        // but works on the same systems when run via sh <script>
+        int cnt = 500;
+        String[] command = new String[cnt];
+        for (int i = 1; i <= cnt; i++) {
+            command[i - 1] = String.format("%03d______", i);
+        }
+        return command;
+    }
+
+    private static File writeScript(String contents) {
+        try {
+            File file = BaseTest.createTempFile("temp", "");
+            FileUtils.writeStringToFile(file, contents);
+            return file;
+        } catch (IOException e) {
+            throw new UserException.BadTmpDir(e.getMessage());
+        }
+    }
+
+    private static class EchoCommand extends TestDataProvider {
+        public final String[] command;
+        public final String output;
+
+        public EchoCommand(String[] command, String output) {
+            super(EchoCommand.class);
+            this.command = command;
+            this.output = output;
+        }
+
+        @Override
+        public String toString() {
+            return StringUtils.join(command, " ");
+        }
+    }
+
+    public static class ScriptCommand extends TestDataProvider {
+        public final boolean succeed;
+        public final String content;
+        public final String output;
+
+        public ScriptCommand(boolean succeed, String content, String output) {
+            super(ScriptCommand.class);
+            this.succeed = succeed;
+            this.content = content;
+            this.output = output;
+        }
+
+        @Override
+        public String toString() {
+            return content;
+        }
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/runtime/RuntimeUtilsUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/runtime/RuntimeUtilsUnitTest.java
new file mode 100644
index 0000000..8f60ff0
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/runtime/RuntimeUtilsUnitTest.java
@@ -0,0 +1,42 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.runtime;
+
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+public class RuntimeUtilsUnitTest extends BaseTest {
+    @Test
+    public void testWhichExists() {
+        Assert.assertNotNull(RuntimeUtils.which("ls"), "Unable to locate ls");
+    }
+
+    @Test
+    public void testWhichNotExists() {
+        Assert.assertNull(RuntimeUtils.which("does_not_exist"), "Found nonexistent binary: does_not_exist");
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/sam/AlignmentUtilsUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/sam/AlignmentUtilsUnitTest.java
new file mode 100644
index 0000000..6dca140
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/sam/AlignmentUtilsUnitTest.java
@@ -0,0 +1,1045 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import htsjdk.samtools.*;
+import org.apache.commons.lang.ArrayUtils;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.pileup.PileupElement;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.*;
+
+public class AlignmentUtilsUnitTest {
+    private final static boolean DEBUG = false;
+    private SAMFileHeader header;
+
+    /** Basic aligned and mapped read. */
+    private SAMRecord readMapped;
+
+    /** Read with no contig specified in the read, -L UNMAPPED */
+    private SAMRecord readNoReference;
+
+    /** This read has a start position, but is flagged that it's not mapped. */
+    private SAMRecord readUnmappedFlag;
+
+    /** This read says it's aligned, but to a contig not in the header. */
+    private SAMRecord readUnknownContig;
+
+    /** This read says it's aligned, but actually has an unknown start. */
+    private SAMRecord readUnknownStart;
+
+    @BeforeClass
+    public void init() {
+        header = ArtificialSAMUtils.createArtificialSamHeader(3, 1, ArtificialSAMUtils.DEFAULT_READ_LENGTH * 2);
+
+        readMapped = createMappedRead("mapped", 1);
+
+        readNoReference = createUnmappedRead("unmappedNoReference");
+
+        readUnmappedFlag = createMappedRead("unmappedFlagged", 2);
+        readUnmappedFlag.setReadUnmappedFlag(true);
+
+        readUnknownContig = createMappedRead("unknownContig", 3);
+        readUnknownContig.setReferenceName("unknownContig");
+
+        readUnknownStart = createMappedRead("unknownStart", 1);
+        readUnknownStart.setAlignmentStart(SAMRecord.NO_ALIGNMENT_START);
+    }
+
+    /**
+     * Test for -L UNMAPPED
+     */
+    @DataProvider(name = "genomeLocUnmappedReadTests")
+    public Object[][] getGenomeLocUnmappedReadTests() {
+        return new Object[][] {
+                new Object[] {readNoReference, true},
+                new Object[] {readMapped, false},
+                new Object[] {readUnmappedFlag, false},
+                new Object[] {readUnknownContig, false},
+                new Object[] {readUnknownStart, false}
+        };
+    }
+    @Test(enabled = !DEBUG, dataProvider = "genomeLocUnmappedReadTests")
+    public void testIsReadGenomeLocUnmapped(SAMRecord read, boolean expected) {
+        Assert.assertEquals(AlignmentUtils.isReadGenomeLocUnmapped(read), expected);
+    }
+
+    /**
+     * Test for read being truly unmapped
+     */
+    @DataProvider(name = "unmappedReadTests")
+    public Object[][] getUnmappedReadTests() {
+        return new Object[][] {
+                new Object[] {readNoReference, true},
+                new Object[] {readMapped, false},
+                new Object[] {readUnmappedFlag, true},
+                new Object[] {readUnknownContig, false},
+                new Object[] {readUnknownStart, true}
+        };
+    }
+    @Test(enabled = !DEBUG, dataProvider = "unmappedReadTests")
+    public void testIsReadUnmapped(SAMRecord read, boolean expected) {
+        Assert.assertEquals(AlignmentUtils.isReadUnmapped(read), expected);
+    }
+
+    private SAMRecord createUnmappedRead(String name) {
+        return ArtificialSAMUtils.createArtificialRead(
+                header,
+                name,
+                SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX,
+                SAMRecord.NO_ALIGNMENT_START,
+                ArtificialSAMUtils.DEFAULT_READ_LENGTH);
+    }
+
+    private SAMRecord createMappedRead(String name, int start) {
+        return ArtificialSAMUtils.createArtificialRead(
+                header,
+                name,
+                0,
+                start,
+                ArtificialSAMUtils.DEFAULT_READ_LENGTH);
+    }
+
+    private final List<List<CigarElement>> makeCigarElementCombinations() {
+        // this functionality can be adapted to provide input data for whatever you might want in your data
+        final List<CigarElement> cigarElements = new LinkedList<CigarElement>();
+        for ( final int size : Arrays.asList(0, 10) ) {
+            for ( final CigarOperator op : CigarOperator.values() ) {
+                cigarElements.add(new CigarElement(size, op));
+            }
+        }
+
+        final List<List<CigarElement>> combinations = new LinkedList<List<CigarElement>>();
+        for ( final int nElements : Arrays.asList(1, 2, 3) ) {
+            combinations.addAll(Utils.makePermutations(cigarElements, nElements, true));
+        }
+
+        return combinations;
+    }
+
+
+    @DataProvider(name = "CalcNumDifferentBasesData")
+    public Object[][] makeCalcNumDifferentBasesData() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        tests.add(new Object[]{"5M", "ACGTA", "ACGTA", 0});
+        tests.add(new Object[]{"5M", "ACGTA", "ACGTT", 1});
+        tests.add(new Object[]{"5M", "ACGTA", "TCGTT", 2});
+        tests.add(new Object[]{"5M", "ACGTA", "TTGTT", 3});
+        tests.add(new Object[]{"5M", "ACGTA", "TTTTT", 4});
+        tests.add(new Object[]{"5M", "ACGTA", "TTTCT", 5});
+        tests.add(new Object[]{"2M3I3M", "ACGTA", "ACNNNGTA", 3});
+        tests.add(new Object[]{"2M3I3M", "ACGTA", "ACNNNGTT", 4});
+        tests.add(new Object[]{"2M3I3M", "ACGTA", "TCNNNGTT", 5});
+        tests.add(new Object[]{"2M2D1M", "ACGTA", "ACA", 2});
+        tests.add(new Object[]{"2M2D1M", "ACGTA", "ACT", 3});
+        tests.add(new Object[]{"2M2D1M", "ACGTA", "TCT", 4});
+        tests.add(new Object[]{"2M2D1M", "ACGTA", "TGT", 5});
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = true, dataProvider = "CalcNumDifferentBasesData")
+    public void testCalcNumDifferentBases(final String cigarString, final String ref, final String read, final int expectedDifferences) {
+        final Cigar cigar = TextCigarCodec.decode(cigarString);
+        Assert.assertEquals(AlignmentUtils.calcNumDifferentBases(cigar, ref.getBytes(), read.getBytes()), expectedDifferences);
+    }
+
+    @DataProvider(name = "NumAlignedBasesCountingSoftClips")
+    public Object[][] makeNumAlignedBasesCountingSoftClips() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        final EnumSet<CigarOperator> alignedToGenome = EnumSet.of(CigarOperator.M, CigarOperator.EQ, CigarOperator.X, CigarOperator.S);
+        for ( final List<CigarElement> elements : makeCigarElementCombinations() ) {
+            int n = 0;
+            for ( final CigarElement elt : elements ) n += alignedToGenome.contains(elt.getOperator()) ? elt.getLength() : 0;
+            tests.add(new Object[]{new Cigar(elements), n});
+        }
+
+        tests.add(new Object[]{null, 0});
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "NumAlignedBasesCountingSoftClips")
+    public void testNumAlignedBasesCountingSoftClips(final Cigar cigar, final int expected) {
+        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, 1, cigar == null ? 10 : cigar.getReadLength());
+        read.setCigar(cigar);
+        Assert.assertEquals(AlignmentUtils.getNumAlignedBasesCountingSoftClips(read), expected, "Cigar " + cigar + " failed NumAlignedBasesCountingSoftClips");
+    }
+
+    @DataProvider(name = "CigarHasZeroElement")
+    public Object[][] makeCigarHasZeroElement() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        for ( final List<CigarElement> elements : makeCigarElementCombinations() ) {
+            boolean hasZero = false;
+            for ( final CigarElement elt : elements ) hasZero = hasZero || elt.getLength() == 0;
+            tests.add(new Object[]{new Cigar(elements), hasZero});
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "CigarHasZeroElement")
+    public void testCigarHasZeroSize(final Cigar cigar, final boolean hasZero) {
+        Assert.assertEquals(AlignmentUtils.cigarHasZeroSizeElement(cigar), hasZero, "Cigar " + cigar.toString() + " failed cigarHasZeroSizeElement");
+    }
+
+    @DataProvider(name = "NumHardClipped")
+    public Object[][] makeNumHardClipped() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        for ( final List<CigarElement> elements : makeCigarElementCombinations() ) {
+            int n = 0;
+            for ( final CigarElement elt : elements ) n += elt.getOperator() == CigarOperator.H ? elt.getLength() : 0;
+            tests.add(new Object[]{new Cigar(elements), n});
+        }
+
+        tests.add(new Object[]{null, 0});
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "NumHardClipped")
+    public void testNumHardClipped(final Cigar cigar, final int expected) {
+        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, 1, cigar == null ? 10 : cigar.getReadLength());
+        read.setCigar(cigar);
+        Assert.assertEquals(AlignmentUtils.getNumHardClippedBases(read), expected, "Cigar " + cigar + " failed num hard clips");
+    }
+
+    @DataProvider(name = "NumAlignedBlocks")
+    public Object[][] makeNumAlignedBlocks() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        for ( final List<CigarElement> elements : makeCigarElementCombinations() ) {
+            int n = 0;
+            for ( final CigarElement elt : elements ) {
+                switch ( elt.getOperator() ) {
+                    case M:case X:case EQ: n++; break;
+                    default: break;
+                }
+            }
+            tests.add(new Object[]{new Cigar(elements), n});
+        }
+
+        tests.add(new Object[]{null, 0});
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "NumAlignedBlocks")
+    public void testNumAlignedBlocks(final Cigar cigar, final int expected) {
+        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, 1, cigar == null ? 10 : cigar.getReadLength());
+        read.setCigar(cigar);
+        Assert.assertEquals(AlignmentUtils.getNumAlignmentBlocks(read), expected, "Cigar " + cigar + " failed NumAlignedBlocks");
+    }
+
+    @DataProvider(name = "ConsolidateCigarData")
+    public Object[][] makeConsolidateCigarData() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        // this functionality can be adapted to provide input data for whatever you might want in your data
+        tests.add(new Object[]{"1M1M", "2M"});
+        tests.add(new Object[]{"2M", "2M"});
+        tests.add(new Object[]{"2M0M", "2M"});
+        tests.add(new Object[]{"0M2M", "2M"});
+        tests.add(new Object[]{"0M2M0M0I0M1M", "3M"});
+        tests.add(new Object[]{"2M0M1M", "3M"});
+        tests.add(new Object[]{"1M1M1M1D2M1M", "3M1D3M"});
+        tests.add(new Object[]{"6M6M6M", "18M"});
+
+        final List<CigarElement> elements = new LinkedList<CigarElement>();
+        int i = 1;
+        for ( final CigarOperator op : CigarOperator.values() ) {
+            elements.add(new CigarElement(i++, op));
+        }
+        for ( final List<CigarElement> ops : Utils.makePermutations(elements,  3, false) ) {
+            final String expected = new Cigar(ops).toString();
+            final List<CigarElement> cutElements = new LinkedList<CigarElement>();
+            for ( final CigarElement elt : ops ) {
+                for ( int j = 0; j < elt.getLength(); j++ ) {
+                    cutElements.add(new CigarElement(1, elt.getOperator()));
+                }
+            }
+
+            final String actual = new Cigar(cutElements).toString();
+            tests.add(new Object[]{actual, expected});
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "ConsolidateCigarData")
+    public void testConsolidateCigarWithData(final String testCigarString, final String expectedCigarString) {
+        final Cigar testCigar = TextCigarCodec.decode(testCigarString);
+        final Cigar expectedCigar = TextCigarCodec.decode(expectedCigarString);
+        final Cigar actualCigar = AlignmentUtils.consolidateCigar(testCigar);
+        Assert.assertEquals(actualCigar, expectedCigar);
+    }
+
+    @DataProvider(name = "SoftClipsDataProvider")
+    public Object[][] makeSoftClipsDataProvider() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        // this functionality can be adapted to provide input data for whatever you might want in your data
+        for ( final int lengthOfLeftClip : Arrays.asList(0, 1, 10) ) {
+            for ( final int lengthOfRightClip : Arrays.asList(0, 1, 10) ) {
+                for ( final int qualThres : Arrays.asList(10, 20, 30) ) {
+                    for ( final String middleOp : Arrays.asList("M", "D") ) {
+                        for ( final int matchSize : Arrays.asList(0, 1, 10) ) {
+                            final byte[] left = makeQualArray(lengthOfLeftClip, qualThres);
+                            final byte[] right = makeQualArray(lengthOfRightClip, qualThres);
+                            int n = 0;
+                            for ( int i = 0; i < left.length; i++ ) n += left[i] > qualThres ? 1 : 0;
+                            for ( int i = 0; i < right.length; i++ ) n += right[i] > qualThres ? 1 : 0;
+                            tests.add(new Object[]{left, matchSize, middleOp, right, qualThres, n});
+                        }
+                    }
+                }
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    private byte[] makeQualArray(final int length, final int qualThreshold) {
+        final byte[] array = new byte[length];
+        for ( int i = 0; i < array.length; i++ )
+            array[i] = (byte)(qualThreshold + ( i % 2 == 0 ? 1 : - 1 ));
+        return array;
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "SoftClipsDataProvider")
+    public void testSoftClipsData(final byte[] qualsOfSoftClipsOnLeft, final int middleSize, final String middleOp, final byte[] qualOfSoftClipsOnRight, final int qualThreshold, final int numExpected) {
+        final int readLength = (middleOp.equals("D") ? 0 : middleSize) + qualOfSoftClipsOnRight.length + qualsOfSoftClipsOnLeft.length;
+        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, 1, readLength);
+        final byte[] bases = Utils.dupBytes((byte) 'A', readLength);
+        final byte[] matchBytes = middleOp.equals("D") ? new byte[]{} : Utils.dupBytes((byte)30, middleSize);
+        final byte[] quals = ArrayUtils.addAll(ArrayUtils.addAll(qualsOfSoftClipsOnLeft, matchBytes), qualOfSoftClipsOnRight);
+
+        // set the read's bases and quals
+        read.setReadBases(bases);
+        read.setBaseQualities(quals);
+
+        final StringBuilder cigar = new StringBuilder();
+        if (qualsOfSoftClipsOnLeft.length > 0 ) cigar.append(qualsOfSoftClipsOnLeft.length + "S");
+        if (middleSize > 0 ) cigar.append(middleSize + middleOp);
+        if (qualOfSoftClipsOnRight.length > 0 ) cigar.append(qualOfSoftClipsOnRight.length + "S");
+
+        read.setCigarString(cigar.toString());
+
+        final int actual = AlignmentUtils.calcNumHighQualitySoftClips(read, (byte) qualThreshold);
+        Assert.assertEquals(actual, numExpected, "Wrong number of soft clips detected for read " + read.getSAMString());
+    }
+
+    ////////////////////////////////////////////
+    // Test AlignmentUtils.getMismatchCount() //
+    ////////////////////////////////////////////
+
+    @DataProvider(name = "MismatchCountDataProvider")
+    public Object[][] makeMismatchCountDataProvider() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        final int readLength = 20;
+        final int lengthOfIndel = 2;
+        final int locationOnReference = 10;
+        final byte[] reference = Utils.dupBytes((byte)'A', readLength);
+        final byte[] quals = Utils.dupBytes((byte)'A', readLength);
+
+
+        for ( int startOnRead = 0; startOnRead <= readLength; startOnRead++ ) {
+            for ( int basesToRead = 0; basesToRead <= readLength; basesToRead++ ) {
+                for ( final int lengthOfSoftClip : Arrays.asList(0, 1, 10) ) {
+                    for ( final int lengthOfFirstM : Arrays.asList(0, 3) ) {
+                        for ( final char middleOp : Arrays.asList('M', 'D', 'I') ) {
+                            for ( final int mismatchLocation : Arrays.asList(-1, 0, 5, 10, 15, 19) ) {
+
+                                final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, locationOnReference, readLength);
+
+                                // set the read's bases and quals
+                                final byte[] readBases = reference.clone();
+                                // create the mismatch if requested
+                                if ( mismatchLocation != -1 )
+                                    readBases[mismatchLocation] = (byte)'C';
+                                read.setReadBases(readBases);
+                                read.setBaseQualities(quals);
+
+                                // create the CIGAR string
+                                read.setCigarString(buildTestCigarString(middleOp, lengthOfSoftClip, lengthOfFirstM, lengthOfIndel, readLength));
+
+                                // now, determine whether or not there's a mismatch
+                                final boolean isMismatch;
+                                if ( mismatchLocation < startOnRead || mismatchLocation >= startOnRead + basesToRead || mismatchLocation < lengthOfSoftClip ) {
+                                    isMismatch = false;
+                                } else if ( middleOp == 'M' || middleOp == 'D' || mismatchLocation < lengthOfSoftClip + lengthOfFirstM || mismatchLocation >= lengthOfSoftClip + lengthOfFirstM + lengthOfIndel ) {
+                                    isMismatch = true;
+                                } else {
+                                    isMismatch = false;
+                                }
+
+                                tests.add(new Object[]{read, locationOnReference, startOnRead, basesToRead, isMismatch});
+                            }
+                        }
+                    }
+                }
+            }
+        }
+
+        // Adding test to make sure soft-clipped reads go through the exceptions thrown at the beginning of the getMismatchCount method
+        // todo: incorporate cigars with right-tail soft-clips in the systematic tests above.
+        GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, 10, 20);
+        read.setReadBases(reference);
+        read.setBaseQualities(quals);
+        read.setCigarString("10S5M5S");
+        tests.add(new Object[]{read, 10, read.getAlignmentStart(), read.getReadLength(), false});
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "MismatchCountDataProvider")
+    public void testMismatchCountData(final GATKSAMRecord read, final int refIndex, final int startOnRead, final int basesToRead, final boolean isMismatch) {
+        final byte[] reference = Utils.dupBytes((byte)'A', 100);
+        final int actual = AlignmentUtils.getMismatchCount(read, reference, refIndex, startOnRead, basesToRead).numMismatches;
+        Assert.assertEquals(actual, isMismatch ? 1 : 0, "Wrong number of mismatches detected for read " + read.getSAMString());
+    }
+
+    private static String buildTestCigarString(final char middleOp, final int lengthOfSoftClip, final int lengthOfFirstM, final int lengthOfIndel, final int readLength) {
+        final StringBuilder cigar = new StringBuilder();
+        int remainingLength = readLength;
+
+        // add soft clips to the beginning of the read
+        if (lengthOfSoftClip > 0 ) {
+            cigar.append(lengthOfSoftClip).append("S");
+            remainingLength -= lengthOfSoftClip;
+        }
+
+        if ( middleOp == 'M' ) {
+            cigar.append(remainingLength).append("M");
+        } else {
+            if ( lengthOfFirstM > 0 ) {
+                cigar.append(lengthOfFirstM).append("M");
+                remainingLength -= lengthOfFirstM;
+            }
+
+            if ( middleOp == 'D' ) {
+                cigar.append(lengthOfIndel).append("D");
+            } else {
+                cigar.append(lengthOfIndel).append("I");
+                remainingLength -= lengthOfIndel;
+            }
+            cigar.append(remainingLength).append("M");
+        }
+
+        return cigar.toString();
+    }
+
+    ////////////////////////////////////////////////////////
+    // Test AlignmentUtils.calcAlignmentByteArrayOffset() //
+    ////////////////////////////////////////////////////////
+
+    @DataProvider(name = "AlignmentByteArrayOffsetDataProvider")
+    public Object[][] makeAlignmentByteArrayOffsetDataProvider() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        final int readLength = 20;
+        final int lengthOfIndel = 2;
+        final int locationOnReference = 20;
+
+        for ( int offset = 0; offset < readLength; offset++ ) {
+            for ( final int lengthOfSoftClip : Arrays.asList(0, 1, 10) ) {
+                for ( final int lengthOfFirstM : Arrays.asList(0, 3) ) {
+                    for ( final char middleOp : Arrays.asList('M', 'D', 'I') ) {
+
+                        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, locationOnReference, readLength);
+                        // create the CIGAR string
+                        read.setCigarString(buildTestCigarString(middleOp, lengthOfSoftClip, lengthOfFirstM, lengthOfIndel, readLength));
+
+                        // now, determine the expected alignment offset
+                        final int expected;
+                        boolean isDeletion = false;
+                        if ( offset < lengthOfSoftClip ) {
+                            expected = 0;
+                        } else if ( middleOp == 'M' || offset < lengthOfSoftClip + lengthOfFirstM ) {
+                            expected = offset - lengthOfSoftClip;
+                        } else if ( offset < lengthOfSoftClip + lengthOfFirstM + lengthOfIndel ) {
+                            if ( middleOp == 'D' ) {
+                                isDeletion = true;
+                                expected = offset - lengthOfSoftClip;
+                            } else {
+                                expected = lengthOfFirstM;
+                            }
+                        } else {
+                            expected = offset - lengthOfSoftClip - (middleOp == 'I' ? lengthOfIndel : -lengthOfIndel);
+                        }
+
+                        tests.add(new Object[]{read.getCigar(), offset, expected, isDeletion, lengthOfSoftClip});
+                    }
+                }
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "AlignmentByteArrayOffsetDataProvider")
+    public void testAlignmentByteArrayOffsetData(final Cigar cigar, final int offset, final int expectedResult, final boolean isDeletion, final int lengthOfSoftClip) {
+        final int actual = AlignmentUtils.calcAlignmentByteArrayOffset(cigar, isDeletion ? -1 : offset, isDeletion, 20, 20 + offset - lengthOfSoftClip);
+        Assert.assertEquals(actual, expectedResult, "Wrong alignment offset detected for cigar " + cigar.toString());
+    }
+
+    ////////////////////////////////////////////////////
+    // Test AlignmentUtils.readToAlignmentByteArray() //
+    ////////////////////////////////////////////////////
+
+    @DataProvider(name = "ReadToAlignmentByteArrayDataProvider")
+    public Object[][] makeReadToAlignmentByteArrayDataProvider() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        final int readLength = 20;
+        final int lengthOfIndel = 2;
+        final int locationOnReference = 20;
+
+        for ( final int lengthOfSoftClip : Arrays.asList(0, 1, 10) ) {
+            for ( final int lengthOfFirstM : Arrays.asList(0, 3) ) {
+                for ( final char middleOp : Arrays.asList('M', 'D', 'I') ) {
+
+                    final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, locationOnReference, readLength);
+                    // create the CIGAR string
+                    read.setCigarString(buildTestCigarString(middleOp, lengthOfSoftClip, lengthOfFirstM, lengthOfIndel, readLength));
+
+                    // now, determine the byte array size
+                    final int expected = readLength - lengthOfSoftClip - (middleOp == 'I' ? lengthOfIndel : (middleOp == 'D' ? -lengthOfIndel : 0));
+                    final int indelBasesStart = middleOp != 'M' ? lengthOfFirstM : -1;
+
+                    tests.add(new Object[]{read.getCigar(), expected, middleOp, indelBasesStart, lengthOfIndel});
+                }
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "ReadToAlignmentByteArrayDataProvider")
+    public void testReadToAlignmentByteArrayData(final Cigar cigar, final int expectedLength, final char middleOp, final int startOfIndelBases, final int lengthOfDeletion) {
+        final byte[] read = Utils.dupBytes((byte)'A', cigar.getReadLength());
+        final byte[] alignment = AlignmentUtils.readToAlignmentByteArray(cigar, read);
+
+        Assert.assertEquals(alignment.length, expectedLength, "Wrong alignment length detected for cigar " + cigar.toString());
+
+        for ( int i = 0; i < alignment.length; i++ ) {
+            final byte expectedBase;
+            if ( middleOp == 'D' && i >= startOfIndelBases && i < startOfIndelBases + lengthOfDeletion )
+                expectedBase = PileupElement.DELETION_BASE;
+            else if ( middleOp == 'I' && i == startOfIndelBases - 1 )
+                expectedBase = PileupElement.A_FOLLOWED_BY_INSERTION_BASE;
+            else
+                expectedBase = (byte)'A';
+            Assert.assertEquals(alignment[i], expectedBase, "Wrong base detected at position " + i);
+        }
+    }
+
+    //////////////////////////////////////////
+    // Test AlignmentUtils.leftAlignIndel() //
+    //////////////////////////////////////////
+
+
+
+    @DataProvider(name = "LeftAlignIndelDataProvider")
+    public Object[][] makeLeftAlignIndelDataProvider() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        final byte[] repeat1Reference = "ABCDEFGHIJKLMNOPXXXXXXXXXXABCDEFGHIJKLMNOP".getBytes();
+        final byte[] repeat2Reference = "ABCDEFGHIJKLMNOPXYXYXYXYXYABCDEFGHIJKLMNOP".getBytes();
+        final byte[] repeat3Reference = "ABCDEFGHIJKLMNOPXYZXYZXYZXYZABCDEFGHIJKLMN".getBytes();
+        final int referenceLength = repeat1Reference.length;
+
+        for ( int indelStart = 0; indelStart < repeat1Reference.length; indelStart++ ) {
+            for ( final int indelSize : Arrays.asList(0, 1, 2, 3, 4) ) {
+                for ( final char indelOp : Arrays.asList('D', 'I') ) {
+
+                    if ( indelOp == 'D' && indelStart + indelSize >= repeat1Reference.length )
+                        continue;
+
+                    final int readLength = referenceLength - (indelOp == 'D' ? indelSize : -indelSize);
+
+                    // create the original CIGAR string
+                    final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, 1, readLength);
+                    read.setCigarString(buildTestCigarString(indelSize == 0 ? 'M' : indelOp, 0, indelStart, indelSize, readLength));
+                    final Cigar originalCigar = read.getCigar();
+
+                    final Cigar expectedCigar1 = makeExpectedCigar1(originalCigar, indelOp, indelStart, indelSize, readLength);
+                    final byte[] readString1 = makeReadString(repeat1Reference, indelOp, indelStart, indelSize, readLength, 1);
+                    tests.add(new Object[]{originalCigar, expectedCigar1, repeat1Reference, readString1, 1});
+
+                    final Cigar expectedCigar2 = makeExpectedCigar2(originalCigar, indelOp, indelStart, indelSize, readLength);
+                    final byte[] readString2 = makeReadString(repeat2Reference, indelOp, indelStart, indelSize, readLength, 2);
+                    tests.add(new Object[]{originalCigar, expectedCigar2, repeat2Reference, readString2, 2});
+
+                    final Cigar expectedCigar3 = makeExpectedCigar3(originalCigar, indelOp, indelStart, indelSize, readLength);
+                    final byte[] readString3 = makeReadString(repeat3Reference, indelOp, indelStart, indelSize, readLength, 3);
+                    tests.add(new Object[]{originalCigar, expectedCigar3, repeat3Reference, readString3, 3});
+                }
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    private Cigar makeExpectedCigar1(final Cigar originalCigar, final char indelOp, final int indelStart, final int indelSize, final int readLength) {
+        if ( indelSize == 0 || indelStart < 17 || indelStart > (26 - (indelOp == 'D' ? indelSize : 0)) )
+            return originalCigar;
+
+        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, 1, readLength);
+        read.setCigarString(buildTestCigarString(indelOp, 0, 16, indelSize, readLength));
+        return read.getCigar();
+    }
+
+    private Cigar makeExpectedCigar2(final Cigar originalCigar, final char indelOp, final int indelStart, final int indelSize, final int readLength) {
+        if ( indelStart < 17 || indelStart > (26 - (indelOp == 'D' ? indelSize : 0)) )
+            return originalCigar;
+
+        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, 1, readLength);
+
+        if ( indelOp == 'I' && (indelSize == 1 || indelSize == 3) && indelStart % 2 == 1 )
+            read.setCigarString(buildTestCigarString(indelOp, 0, Math.max(indelStart - indelSize, 16), indelSize, readLength));
+        else if ( (indelSize == 2 || indelSize == 4) && (indelOp == 'D' || indelStart % 2 == 0) )
+            read.setCigarString(buildTestCigarString(indelOp, 0, 16, indelSize, readLength));
+        else
+            return originalCigar;
+
+        return read.getCigar();
+    }
+
+    private Cigar makeExpectedCigar3(final Cigar originalCigar, final char indelOp, final int indelStart, final int indelSize, final int readLength) {
+        if ( indelStart < 17 || indelStart > (28 - (indelOp == 'D' ? indelSize : 0)) )
+            return originalCigar;
+
+        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, 1, readLength);
+
+        if ( indelSize == 3 && (indelOp == 'D' || indelStart % 3 == 1) )
+            read.setCigarString(buildTestCigarString(indelOp, 0, 16, indelSize, readLength));
+        else if ( (indelOp == 'I' && indelSize == 4 && indelStart % 3 == 2) ||
+                (indelOp == 'I' && indelSize == 2 && indelStart % 3 == 0) ||
+                (indelOp == 'I' && indelSize == 1 && indelStart < 28 && indelStart % 3 == 2) )
+            read.setCigarString(buildTestCigarString(indelOp, 0, Math.max(indelStart - indelSize, 16), indelSize, readLength));
+        else
+            return originalCigar;
+
+        return read.getCigar();
+    }
+
+    private static byte[] makeReadString(final byte[] reference, final char indelOp, final int indelStart, final int indelSize, final int readLength, final int repeatLength) {
+        final byte[] readString = new byte[readLength];
+
+        if ( indelOp == 'D' && indelSize > 0 ) {
+            System.arraycopy(reference, 0, readString, 0, indelStart);
+            System.arraycopy(reference, indelStart + indelSize, readString, indelStart, readLength - indelStart);
+        } else if ( indelOp == 'I' && indelSize > 0 ) {
+            System.arraycopy(reference, 0, readString, 0, indelStart);
+            for ( int i = 0; i < indelSize; i++ ) {
+                if ( i % repeatLength == 0 )
+                    readString[indelStart + i] = 'X';
+                else if ( i % repeatLength == 1 )
+                    readString[indelStart + i] = 'Y';
+                else
+                    readString[indelStart + i] = 'Z';
+            }
+            System.arraycopy(reference, indelStart, readString, indelStart + indelSize, readLength - indelStart - indelSize);
+        } else {
+            System.arraycopy(reference, 0, readString, 0, readLength);
+        }
+
+        return readString;
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "LeftAlignIndelDataProvider")
+    public void testLeftAlignIndelData(final Cigar originalCigar, final Cigar expectedCigar, final byte[] reference, final byte[] read, final int repeatLength) {
+        final Cigar actualCigar = AlignmentUtils.leftAlignIndel(originalCigar, reference, read, 0, 0, true);
+        Assert.assertTrue(expectedCigar.equals(actualCigar), "Wrong left alignment detected for cigar " + originalCigar.toString() + " to " + actualCigar.toString() + " but expected " + expectedCigar.toString() + " with repeat length " + repeatLength);
+    }
+
+    //////////////////////////////////////////
+    // Test AlignmentUtils.trimCigarByReference() //
+    //////////////////////////////////////////
+
+    @DataProvider(name = "TrimCigarData")
+    public Object[][] makeTrimCigarData() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        for ( final CigarOperator op : Arrays.asList(CigarOperator.D, CigarOperator.EQ, CigarOperator.X, CigarOperator.M) ) {
+            for ( int myLength = 1; myLength < 6; myLength++ ) {
+                for ( int start = 0; start < myLength - 1; start++ ) {
+                    for ( int end = start; end < myLength; end++ ) {
+                        final int length = end - start + 1;
+
+                        final List<CigarOperator> padOps = Arrays.asList(CigarOperator.D, CigarOperator.M);
+                        for ( final CigarOperator padOp: padOps) {
+                            for ( int leftPad = 0; leftPad < 2; leftPad++ ) {
+                                for ( int rightPad = 0; rightPad < 2; rightPad++ ) {
+                                    tests.add(new Object[]{
+                                            (leftPad > 0 ? leftPad + padOp.toString() : "") + myLength + op.toString() + (rightPad > 0 ? rightPad + padOp.toString() : ""),
+                                            start + leftPad,
+                                            end + leftPad,
+                                            length + op.toString()});
+                                }
+                            }
+                        }
+                    }
+                }
+            }
+        }
+
+        for ( final int leftPad : Arrays.asList(0, 1, 2, 5) ) {
+            for ( final int rightPad : Arrays.asList(0, 1, 2, 5) ) {
+                final int length = leftPad + rightPad;
+                if ( length > 0 ) {
+                    for ( final int insSize : Arrays.asList(1, 10) ) {
+                        for ( int start = 0; start <= leftPad; start++ ) {
+                            for ( int stop = leftPad; stop < length; stop++ ) {
+                                final int leftPadRemaining = leftPad - start;
+                                final int rightPadRemaining = stop - leftPad + 1;
+                                final String insC = insSize + "I";
+                                tests.add(new Object[]{
+                                        leftPad + "M" + insC + rightPad + "M",
+                                        start,
+                                        stop,
+                                        (leftPadRemaining > 0 ? leftPadRemaining + "M" : "") + insC + (rightPadRemaining > 0 ? rightPadRemaining + "M" : "")
+                                });
+                            }
+                        }
+                    }
+                }
+            }
+        }
+
+        tests.add(new Object[]{"3M2D4M", 0, 8, "3M2D4M"});
+        tests.add(new Object[]{"3M2D4M", 2, 8, "1M2D4M"});
+        tests.add(new Object[]{"3M2D4M", 2, 6, "1M2D2M"});
+        tests.add(new Object[]{"3M2D4M", 3, 6, "2D2M"});
+        tests.add(new Object[]{"3M2D4M", 4, 6, "1D2M"});
+        tests.add(new Object[]{"3M2D4M", 5, 6, "2M"});
+        tests.add(new Object[]{"3M2D4M", 6, 6, "1M"});
+
+        tests.add(new Object[]{"2M3I4M", 0, 5, "2M3I4M"});
+        tests.add(new Object[]{"2M3I4M", 1, 5, "1M3I4M"});
+        tests.add(new Object[]{"2M3I4M", 1, 4, "1M3I3M"});
+        tests.add(new Object[]{"2M3I4M", 2, 4, "3I3M"});
+        tests.add(new Object[]{"2M3I4M", 2, 3, "3I2M"});
+        tests.add(new Object[]{"2M3I4M", 2, 2, "3I1M"});
+        tests.add(new Object[]{"2M3I4M", 3, 4, "2M"});
+        tests.add(new Object[]{"2M3I4M", 3, 3, "1M"});
+        tests.add(new Object[]{"2M3I4M", 4, 4, "1M"});
+
+        // this doesn't work -- but I'm not sure it should
+        //        tests.add(new Object[]{"2M3I4M", 2, 1, "3I"});
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "TrimCigarData", enabled = ! DEBUG)
+    public void testTrimCigar(final String cigarString, final int start, final int length, final String expectedCigarString) {
+        final Cigar cigar = TextCigarCodec.decode(cigarString);
+        final Cigar expectedCigar = TextCigarCodec.decode(expectedCigarString);
+        final Cigar actualCigar = AlignmentUtils.trimCigarByReference(cigar, start, length);
+        Assert.assertEquals(actualCigar, expectedCigar);
+    }
+
+    @DataProvider(name = "TrimCigarByBasesData")
+    public Object[][] makeTrimCigarByBasesData() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        tests.add(new Object[]{"2M3I4M", 0, 8, "2M3I4M"});
+        tests.add(new Object[]{"2M3I4M", 1, 8, "1M3I4M"});
+        tests.add(new Object[]{"2M3I4M", 2, 8, "3I4M"});
+        tests.add(new Object[]{"2M3I4M", 3, 8, "2I4M"});
+        tests.add(new Object[]{"2M3I4M", 4, 8, "1I4M"});
+        tests.add(new Object[]{"2M3I4M", 4, 7, "1I3M"});
+        tests.add(new Object[]{"2M3I4M", 4, 6, "1I2M"});
+        tests.add(new Object[]{"2M3I4M", 4, 5, "1I1M"});
+        tests.add(new Object[]{"2M3I4M", 4, 4, "1I"});
+        tests.add(new Object[]{"2M3I4M", 5, 5, "1M"});
+
+        tests.add(new Object[]{"2M2D2I", 0, 3, "2M2D2I"});
+        tests.add(new Object[]{"2M2D2I", 1, 3, "1M2D2I"});
+        tests.add(new Object[]{"2M2D2I", 2, 3, "2D2I"});
+        tests.add(new Object[]{"2M2D2I", 3, 3, "1I"});
+        tests.add(new Object[]{"2M2D2I", 2, 2, "2D1I"});
+        tests.add(new Object[]{"2M2D2I", 1, 2, "1M2D1I"});
+        tests.add(new Object[]{"2M2D2I", 0, 1, "2M2D"});
+        tests.add(new Object[]{"2M2D2I", 1, 1, "1M2D"});
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "TrimCigarByBasesData", enabled = !DEBUG)
+    public void testTrimCigarByBase(final String cigarString, final int start, final int length, final String expectedCigarString) {
+        final Cigar cigar = TextCigarCodec.decode(cigarString);
+        final Cigar expectedCigar = TextCigarCodec.decode(expectedCigarString);
+        final Cigar actualCigar = AlignmentUtils.trimCigarByBases(cigar, start, length);
+        Assert.assertEquals(actualCigar, expectedCigar);
+    }
+
+    //////////////////////////////////////////
+    // Test AlignmentUtils.applyCigarToCigar() //
+    //////////////////////////////////////////
+
+    @DataProvider(name = "ApplyCigarToCigarData")
+    public Object[][] makeApplyCigarToCigarData() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        for ( int i = 1; i < 5; i++ )
+            tests.add(new Object[]{i + "M", i + "M", i + "M"});
+
+//        * ref   : ACGTAC
+//        * hap   : AC---C  - 2M3D1M
+//        * read  : AC---C  - 3M
+//        * result: AG---C => 2M3D
+        tests.add(new Object[]{"3M", "2M3D1M", "2M3D1M"});
+
+//        * ref   : ACxG-TA
+//        * hap   : AC-G-TA  - 2M1D3M
+//        * read  : AC-GxTA  - 3M1I2M
+//        * result: AC-GxTA => 2M1D1M1I2M
+        tests.add(new Object[]{"3M1I2M", "2M1D3M", "2M1D1M1I2M"});
+
+//        * ref   : A-CGTA
+//        * hap   : A-CGTA  - 5M
+//        * read  : AxCGTA  - 1M1I4M
+//        * result: AxCGTA => 1M1I4M
+        tests.add(new Object[]{"1M1I4M", "5M", "1M1I4M"});
+
+//        * ref   : ACGTA
+//        * hap   : ACGTA  - 5M
+//        * read  : A--TA  - 1M2D2M
+//        * result: A--TA => 1M2D2M
+        tests.add(new Object[]{"1M2D2M", "5M", "1M2D2M"});
+
+//        * ref   : AC-GTA
+//        * hap   : ACxGTA  - 2M1I3M
+//        * read  : A--GTA  - 1M2D3M
+//        * result: A--GTA => 1M1D3M
+        tests.add(new Object[]{"108M14D24M2M18I29M92M1000M", "2M1I3M", "2M1I3M"});
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "ApplyCigarToCigarData", enabled = !DEBUG)
+    public void testApplyCigarToCigar(final String firstToSecondString, final String secondToThirdString, final String expectedCigarString) {
+        final Cigar firstToSecond = TextCigarCodec.decode(firstToSecondString);
+        final Cigar secondToThird = TextCigarCodec.decode(secondToThirdString);
+        final Cigar expectedCigar = TextCigarCodec.decode(expectedCigarString);
+        final Cigar actualCigar = AlignmentUtils.applyCigarToCigar(firstToSecond, secondToThird);
+        Assert.assertEquals(actualCigar, expectedCigar);
+    }
+
+    //////////////////////////////////////////
+    // Test AlignmentUtils.applyCigarToCigar() //
+    //////////////////////////////////////////
+
+    @DataProvider(name = "ReadOffsetFromCigarData")
+    public Object[][] makeReadOffsetFromCigarData() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        final int SIZE = 10;
+        for ( int i = 0; i < SIZE; i++ ) {
+            tests.add(new Object[]{SIZE + "M", i, i});
+        }
+
+        //          0123ii45
+        // ref    : ACGT--AC
+        // hap    : AC--xxAC (2M2D2I2M)
+        // ref.pos: 01    45
+        tests.add(new Object[]{"2M2D2I2M", 0, 0});
+        tests.add(new Object[]{"2M2D2I2M", 1, 1});
+        tests.add(new Object[]{"2M2D2I2M", 2, 4});
+        tests.add(new Object[]{"2M2D2I2M", 3, 4});
+        tests.add(new Object[]{"2M2D2I2M", 4, 4});
+        tests.add(new Object[]{"2M2D2I2M", 5, 5});
+
+        // 10132723 - 10132075 - 500 = 148
+        // what's the offset of the first match after the I?
+        // 108M + 14D + 24M + 2M = 148
+        // What's the offset of the first base that is after the I?
+        // 108M + 24M + 2M + 18I = 134M + 18I = 152 - 1 = 151
+        tests.add(new Object[]{"108M14D24M2M18I29M92M", 0, 0});
+        tests.add(new Object[]{"108M14D24M2M18I29M92M", 107, 107});
+        tests.add(new Object[]{"108M14D24M2M18I29M92M", 108, 108 + 14}); // first base after the deletion
+
+        tests.add(new Object[]{"108M14D24M2M18I29M92M", 132, 132+14}); // 2 before insertion
+        tests.add(new Object[]{"108M14D24M2M18I29M92M", 133, 133+14}); // last base before insertion
+
+        // entering into the insertion
+        for ( int i = 0; i < 18; i++ ) {
+            tests.add(new Object[]{"108M14D24M2M18I29M92M", 134+i, 148}); // inside insertion
+        }
+        tests.add(new Object[]{"108M14D24M2M18I29M92M", 134+18, 148}); // first base after insertion matches at same as insertion
+        tests.add(new Object[]{"108M14D24M2M18I29M92M", 134+18+1, 149});
+        tests.add(new Object[]{"108M14D24M2M18I29M92M", 134+18+2, 150});
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "ReadOffsetFromCigarData", enabled = !DEBUG)
+    public void testReadOffsetFromCigar(final String cigarString, final int startOnCigar, final int expectedOffset) {
+        final Cigar cigar = TextCigarCodec.decode(cigarString);
+        final int actualOffset = AlignmentUtils.calcFirstBaseMatchingReferenceInCigar(cigar, startOnCigar);
+        Assert.assertEquals(actualOffset, expectedOffset);
+    }
+
+    //////////////////////////////////////////
+    // Test AlignmentUtils.addCigarElements() //
+    //////////////////////////////////////////
+
+    @DataProvider(name = "AddCigarElementsData")
+    public Object[][] makeAddCigarElementsData() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        final int SIZE = 10;
+        for ( final CigarOperator op : Arrays.asList(CigarOperator.I, CigarOperator.M, CigarOperator.S, CigarOperator.EQ, CigarOperator.X)) {
+            for ( int start = 0; start < SIZE; start++ ) {
+                for ( int end = start; end < SIZE * 2; end ++ ) {
+                    for ( int pos = 0; pos < SIZE * 3; pos++ ) {
+                        int length = 0;
+                        for ( int i = 0; i < SIZE; i++ ) length += (i+pos) >= start && (i+pos) <= end ? 1 : 0;
+                        tests.add(new Object[]{SIZE + op.toString(), pos, start, end, length > 0 ? length + op.toString() : "*"});
+                    }
+                }
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "AddCigarElementsData", enabled = !DEBUG)
+    public void testAddCigarElements(final String cigarString, final int pos, final int start, final int end, final String expectedCigarString) {
+        final Cigar cigar = TextCigarCodec.decode(cigarString);
+        final CigarElement elt = cigar.getCigarElement(0);
+        final Cigar expectedCigar = TextCigarCodec.decode(expectedCigarString);
+
+        final List<CigarElement> elts = new LinkedList<CigarElement>();
+        final int actualEndPos = AlignmentUtils.addCigarElements(elts, pos, start, end, elt);
+
+        Assert.assertEquals(actualEndPos, pos + elt.getLength());
+        Assert.assertEquals(AlignmentUtils.consolidateCigar(new Cigar(elts)), expectedCigar);
+    }
+
+    @DataProvider(name = "GetBasesCoveringRefIntervalData")
+    public Object[][] makeGetBasesCoveringRefIntervalData() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        // matches
+        // 0123
+        // ACGT
+        tests.add(new Object[]{"ACGT", 0, 3, "4M", "ACGT"});
+        tests.add(new Object[]{"ACGT", 1, 3, "4M", "CGT"});
+        tests.add(new Object[]{"ACGT", 1, 2, "4M", "CG"});
+        tests.add(new Object[]{"ACGT", 1, 1, "4M", "C"});
+
+        // deletions
+        // 012345
+        // AC--GT
+        tests.add(new Object[]{"ACGT", 0, 5, "2M2D2M", "ACGT"});
+        tests.add(new Object[]{"ACGT", 1, 5, "2M2D2M", "CGT"});
+        tests.add(new Object[]{"ACGT", 2, 5, "2M2D2M", null});
+        tests.add(new Object[]{"ACGT", 3, 5, "2M2D2M", null});
+        tests.add(new Object[]{"ACGT", 4, 5, "2M2D2M", "GT"});
+        tests.add(new Object[]{"ACGT", 5, 5, "2M2D2M", "T"});
+        tests.add(new Object[]{"ACGT", 0, 4, "2M2D2M", "ACG"});
+        tests.add(new Object[]{"ACGT", 0, 3, "2M2D2M", null});
+        tests.add(new Object[]{"ACGT", 0, 2, "2M2D2M", null});
+        tests.add(new Object[]{"ACGT", 0, 1, "2M2D2M", "AC"});
+        tests.add(new Object[]{"ACGT", 0, 0, "2M2D2M", "A"});
+
+        // insertions
+        // 01--23
+        // ACTTGT
+        tests.add(new Object[]{"ACTTGT", 0, 3, "2M2I2M", "ACTTGT"});
+        tests.add(new Object[]{"ACTTGT", 1, 3, "2M2I2M", "CTTGT"});
+        tests.add(new Object[]{"ACTTGT", 2, 3, "2M2I2M", "GT"});
+        tests.add(new Object[]{"ACTTGT", 3, 3, "2M2I2M", "T"});
+        tests.add(new Object[]{"ACTTGT", 0, 2, "2M2I2M", "ACTTG"});
+        tests.add(new Object[]{"ACTTGT", 0, 1, "2M2I2M", "AC"});
+        tests.add(new Object[]{"ACTTGT", 1, 2, "2M2I2M", "CTTG"});
+        tests.add(new Object[]{"ACTTGT", 2, 2, "2M2I2M", "G"});
+        tests.add(new Object[]{"ACTTGT", 1, 1, "2M2I2M", "C"});
+
+        tests.add(new Object[]{"ACGT", 0, 1, "2M2I", "AC"});
+        tests.add(new Object[]{"ACGT", 1, 1, "2M2I", "C"});
+        tests.add(new Object[]{"ACGT", 0, 0, "2M2I", "A"});
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "GetBasesCoveringRefIntervalData", enabled = true)
+    public void testGetBasesCoveringRefInterval(final String basesString, final int refStart, final int refEnd, final String cigarString, final String expected) {
+        final byte[] actualBytes = AlignmentUtils.getBasesCoveringRefInterval(refStart, refEnd, basesString.getBytes(), 0, TextCigarCodec.decode(cigarString));
+        if ( expected == null )
+            Assert.assertNull(actualBytes);
+        else
+            Assert.assertEquals(new String(actualBytes), expected);
+    }
+
+    @DataProvider(name = "StartsOrEndsWithInsertionOrDeletionData")
+    public Object[][] makeStartsOrEndsWithInsertionOrDeletionData() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        tests.add(new Object[]{"2M", false});
+        tests.add(new Object[]{"1D2M", true});
+        tests.add(new Object[]{"2M1D", true});
+        tests.add(new Object[]{"2M1I", true});
+        tests.add(new Object[]{"1I2M", true});
+        tests.add(new Object[]{"1M1I2M", false});
+        tests.add(new Object[]{"1M1D2M", false});
+        tests.add(new Object[]{"1M1I2M1I", true});
+        tests.add(new Object[]{"1M1I2M1D", true});
+        tests.add(new Object[]{"1D1M1I2M", true});
+        tests.add(new Object[]{"1I1M1I2M", true});
+        tests.add(new Object[]{"1M1I2M1I1M", false});
+        tests.add(new Object[]{"1M1I2M1D1M", false});
+        tests.add(new Object[]{"1M1D2M1D1M", false});
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "StartsOrEndsWithInsertionOrDeletionData", enabled = true)
+    public void testStartsOrEndsWithInsertionOrDeletion(final String cigar, final boolean expected) {
+        Assert.assertEquals(AlignmentUtils.startsOrEndsWithInsertionOrDeletion(TextCigarCodec.decode(cigar)), expected);
+    }
+
+    @Test(dataProvider = "StartsOrEndsWithInsertionOrDeletionData", enabled = true)
+    public void testRemoveTrailingDeletions(final String cigar, final boolean expected) {
+
+        final Cigar originalCigar = TextCigarCodec.decode(cigar);
+        final Cigar newCigar = AlignmentUtils.removeTrailingDeletions(originalCigar);
+
+        Assert.assertEquals(originalCigar.equals(newCigar), !cigar.endsWith("D"));
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialBAMBuilderUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialBAMBuilderUnitTest.java
new file mode 100644
index 0000000..d3a7c5c
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialBAMBuilderUnitTest.java
@@ -0,0 +1,121 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import htsjdk.samtools.SAMFileReader;
+import htsjdk.samtools.SAMRecord;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * Created with IntelliJ IDEA.
+ * User: depristo
+ * Date: 1/15/13
+ * Time: 3:49 PM
+ * To change this template use File | Settings | File Templates.
+ */
+public class ArtificialBAMBuilderUnitTest extends BaseTest {
+    @DataProvider(name = "ArtificialBAMBuilderUnitTestProvider")
+    public Object[][] makeArtificialBAMBuilderUnitTestProvider() {
+        final List<Object[]> tests = new LinkedList<Object[]>();
+
+        final List<Integer> starts = Arrays.asList(
+                1, // very start of the chromosome
+                ArtificialBAMBuilder.BAM_SHARD_SIZE - 100, // right before the shard boundary
+                ArtificialBAMBuilder.BAM_SHARD_SIZE + 100 // right after the shard boundary
+        );
+
+        for ( final int readLength : Arrays.asList(10, 20) ) {
+            for ( final int skips : Arrays.asList(0, 1, 10) ) {
+                for ( final int start : starts ) {
+                    for ( final int nSamples : Arrays.asList(1, 2) ) {
+                        for ( final int nReadsPerLocus : Arrays.asList(1, 10) ) {
+                            for ( final int nLoci : Arrays.asList(10, 100, 1000) ) {
+                                final ArtificialBAMBuilder bamBuilder = new ArtificialBAMBuilder(nReadsPerLocus, nLoci);
+                                bamBuilder.setReadLength(readLength);
+                                bamBuilder.setSkipNLoci(skips);
+                                bamBuilder.setAlignmentStart(start);
+                                bamBuilder.createAndSetHeader(nSamples);
+                                tests.add(new Object[]{bamBuilder, readLength, skips, start, nSamples, nReadsPerLocus, nLoci});
+                            }
+                        }
+                    }
+                }
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "ArtificialBAMBuilderUnitTestProvider")
+    public void testBamProvider(final ArtificialBAMBuilder bamBuilder, int readLength, int skips, int start, int nSamples, int nReadsPerLocus, int nLoci) {
+        Assert.assertEquals(bamBuilder.getReadLength(), readLength);
+        Assert.assertEquals(bamBuilder.getSkipNLoci(), skips);
+        Assert.assertEquals(bamBuilder.getAlignmentStart(), start);
+        Assert.assertEquals(bamBuilder.getNSamples(), nSamples);
+        Assert.assertEquals(bamBuilder.getnReadsPerLocus(), nReadsPerLocus);
+        Assert.assertEquals(bamBuilder.getnLoci(), nLoci);
+
+        final List<GATKSAMRecord> reads = bamBuilder.makeReads();
+        Assert.assertEquals(reads.size(), bamBuilder.expectedNumberOfReads());
+        for ( final GATKSAMRecord read : reads ) {
+            assertGoodRead(read, bamBuilder);
+        }
+
+        final File bam = bamBuilder.makeTemporarilyBAMFile();
+        final SAMFileReader reader = new SAMFileReader(bam);
+        Assert.assertTrue(reader.hasIndex());
+        final Iterator<SAMRecord> bamIt = reader.iterator();
+        int nReadsFromBam = 0;
+        int lastStart = -1;
+        while ( bamIt.hasNext() ) {
+            final SAMRecord read = bamIt.next();
+            assertGoodRead(read, bamBuilder);
+            nReadsFromBam++;
+            Assert.assertTrue(read.getAlignmentStart() >= lastStart);
+            lastStart = read.getAlignmentStart();
+        }
+        Assert.assertEquals(nReadsFromBam, bamBuilder.expectedNumberOfReads());
+    }
+
+    private void assertGoodRead(final SAMRecord read, final ArtificialBAMBuilder bamBuilder) {
+        Assert.assertEquals(read.getReadLength(), bamBuilder.getReadLength());
+        Assert.assertEquals(read.getReadBases().length, bamBuilder.getReadLength());
+        Assert.assertEquals(read.getBaseQualities().length, bamBuilder.getReadLength());
+        Assert.assertTrue(read.getAlignmentStart() >= bamBuilder.getAlignmentStart());
+        Assert.assertNotNull(read.getReadGroup());
+    }
+}
+
+
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialPatternedSAMIteratorUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialPatternedSAMIteratorUnitTest.java
new file mode 100644
index 0000000..b556c0e
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialPatternedSAMIteratorUnitTest.java
@@ -0,0 +1,122 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import org.broadinstitute.gatk.utils.BaseTest;
+import static org.testng.Assert.assertTrue;
+import static org.testng.Assert.fail;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.Test;
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMRecord;
+
+
+/*
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following
+ * conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+/**
+ * 
+ * @author aaron 
+ * 
+ * Class ArtificialPatternedSAMIteratorUnitTest
+ *
+ * tests ArtificialPatternedSAMIterator, making sure that if you specify in order
+ * you get reads in order, and if you specify out of order you get them out of order.  
+ */
+public class ArtificialPatternedSAMIteratorUnitTest extends BaseTest {
+
+    // our artifical patterned iterator
+    ArtificialPatternedSAMIterator iter;
+
+    private int startingChr = 1;
+    private int endingChr = 2;
+    private int readCount = 100;
+    private int DEFAULT_READ_LENGTH = ArtificialSAMUtils.DEFAULT_READ_LENGTH;
+    SAMFileHeader header;
+
+    @BeforeMethod
+    public void before() {
+        header = ArtificialSAMUtils.createArtificialSamHeader(( endingChr - startingChr ) + 1, startingChr, readCount + DEFAULT_READ_LENGTH);
+
+    }
+    @Test
+    public void testInOrder() {
+        iter = new ArtificialPatternedSAMIterator(startingChr,endingChr,readCount,0,header, ArtificialPatternedSAMIterator.PATTERN.IN_ORDER_READS);
+        if (!iter.hasNext()) {
+            fail("no reads in the ArtificialPatternedSAMIterator");
+        }
+        SAMRecord last = iter.next();
+        while (iter.hasNext()) {
+            SAMRecord rec = iter.next();
+            if (!(rec.getReferenceIndex() > last.getReferenceIndex()) && (rec.getAlignmentStart() <= last.getAlignmentStart())) {
+                fail("read " + rec.getReadName() + " out of order compared to last read, " + last.getReadName());
+            }
+            last = rec;
+        }
+
+    }
+    @Test
+    public void testOutOfOrder() {
+        int outOfOrderCount = 0;
+        iter = new ArtificialPatternedSAMIterator(startingChr,endingChr,readCount,0,header, ArtificialPatternedSAMIterator.PATTERN.RANDOM_READS);
+        if (!iter.hasNext()) {
+            fail("no reads in the ArtificialPatternedSAMIterator");
+        }
+        SAMRecord last = iter.next();
+        while (iter.hasNext()) {
+            SAMRecord rec = iter.next();
+            if (!(rec.getReferenceIndex() > last.getReferenceIndex()) && (rec.getAlignmentStart() <= last.getAlignmentStart())) {
+                ++outOfOrderCount;
+            }
+            last = rec;
+        }
+        assertTrue(outOfOrderCount > 0);
+    }
+
+
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMFileWriterUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMFileWriterUnitTest.java
new file mode 100644
index 0000000..0df8bbf
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMFileWriterUnitTest.java
@@ -0,0 +1,120 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertTrue;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.Test;
+import org.broadinstitute.gatk.utils.BaseTest;
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.SAMFileHeader;
+
+import java.util.ArrayList;
+import java.util.List;
+
+
+/*
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following
+ * conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+/**
+ * @author aaron
+ *         <p/>
+ *         Class ArtificialGATKSAMFileWriter
+ *         <p/>
+ *         Test out the ArtificialGATKSAMFileWriter class
+ */
+public class ArtificialSAMFileWriterUnitTest extends BaseTest {
+
+    /** the artificial sam writer */
+    private ArtificialGATKSAMFileWriter writer;
+    private SAMFileHeader header;
+    private final int startChr = 1;
+    private final int numChr = 2;
+    private final int chrSize = 100;
+
+    @BeforeMethod
+    public void before() {
+        writer = new ArtificialGATKSAMFileWriter();
+        header = ArtificialSAMUtils.createArtificialSamHeader(numChr, startChr, chrSize);
+    }
+
+    @Test
+    public void testBasicCount() {
+        for (int x = 1; x <= 100; x++) {
+            SAMRecord rec = ArtificialSAMUtils.createArtificialRead(header, String.valueOf(x), 1, x, ArtificialSAMUtils.DEFAULT_READ_LENGTH);
+            writer.addAlignment(rec);
+        }
+        assertEquals(writer.getRecords().size(), 100);
+
+    }
+
+    @Test
+    public void testReadName() {
+        List<String> names = new ArrayList<String>();
+
+        for (int x = 1; x <= 100; x++) {
+            names.add(String.valueOf(x));
+            SAMRecord rec = ArtificialSAMUtils.createArtificialRead(header, String.valueOf(x), 1, x, ArtificialSAMUtils.DEFAULT_READ_LENGTH);
+            writer.addAlignment(rec);
+        }
+        assertEquals(writer.getRecords().size(), 100);
+
+        // check the names
+        for (int x = 0; x < 100; x++) {
+            assertTrue(names.get(x).equals(writer.getRecords().get(x).getReadName()));
+        }
+
+    }
+
+    @Test
+    public void testClose() {
+        writer.close();
+        assertTrue(writer.isClosed());
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMQueryIteratorUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMQueryIteratorUnitTest.java
new file mode 100644
index 0000000..19ef63f
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMQueryIteratorUnitTest.java
@@ -0,0 +1,138 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import org.broadinstitute.gatk.utils.BaseTest;
+import static org.testng.Assert.assertEquals;
+import org.testng.annotations.Test;
+import htsjdk.samtools.SAMRecord;
+
+
+/*
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following
+ * conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+/**
+ * @author aaron
+ *         <p/>
+ *         Class ArtificialSAMQueryIteratorUnitTest
+ *         <p/>
+ *         a test for the ArtificialSAMQueryIterator class.
+ */
+public class ArtificialSAMQueryIteratorUnitTest extends BaseTest {
+
+    @Test
+    public void testWholeChromosomeQuery() {
+        ArtificialSAMQueryIterator iter = ArtificialSAMUtils.queryReadIterator(1, 2, 100);
+        iter.queryContained("chr1", 1, -1);
+        int count = 0;
+        while (iter.hasNext()) {
+            SAMRecord rec = iter.next();
+            count++;
+        }
+        assertEquals(count, 100);
+
+    }
+
+    @Test
+    public void testContainedQueryStart() {
+        ArtificialSAMQueryIterator iter = ArtificialSAMUtils.queryReadIterator(1, 2, 100);
+        iter.queryContained("chr1", 1, 50);
+        int count = 0;
+        while (iter.hasNext()) {
+            SAMRecord rec = iter.next();
+            count++;
+        }
+        assertEquals(count, 1);
+
+    }
+
+    @Test
+    public void testOverlappingQueryStart() {
+        ArtificialSAMQueryIterator iter = ArtificialSAMUtils.queryReadIterator(1, 2, 100);
+        iter.queryOverlapping("chr1", 1, 50);
+        int count = 0;
+        while (iter.hasNext()) {
+            SAMRecord rec = iter.next();
+            count++;
+        }
+        assertEquals(count, 50);
+
+    }
+
+    @Test
+    public void testContainedQueryMiddle() {
+        ArtificialSAMQueryIterator iter = ArtificialSAMUtils.queryReadIterator(1, 2, 100);
+        iter.queryContained("chr1", 25, 74);
+        int count = 0;
+        while (iter.hasNext()) {
+            SAMRecord rec = iter.next();
+            count++;
+        }
+        assertEquals(count, 1);
+
+    }
+
+    @Test
+    public void testOverlappingQueryMiddle() {
+        ArtificialSAMQueryIterator iter = ArtificialSAMUtils.queryReadIterator(1, 2, 100);
+        iter.queryOverlapping("chr1", 25, 74);
+        int count = 0;
+        while (iter.hasNext()) {
+            SAMRecord rec = iter.next();
+            count++;
+        }
+        assertEquals(count, 50);
+
+    }
+
+    @Test(expectedExceptions=IllegalArgumentException.class)
+    public void testUnknownChromosome() {
+        ArtificialSAMQueryIterator iter = ArtificialSAMUtils.queryReadIterator(1, 2, 100);
+        iter.queryOverlapping("chr621", 25, 74);         
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMUtilsUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMUtilsUnitTest.java
new file mode 100644
index 0000000..70d25a5
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialSAMUtilsUnitTest.java
@@ -0,0 +1,108 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.iterators.GATKSAMIterator;
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertTrue;
+import static org.testng.Assert.fail;
+import org.testng.annotations.Test;
+import htsjdk.samtools.SAMRecord;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: aaronmckenna
+ * Date: Jun 3, 2009
+ * Time: 3:09:34 AM
+ * To change this template use File | Settings | File Templates.
+ */
+public class ArtificialSAMUtilsUnitTest extends BaseTest {
+
+
+    @Test
+    public void basicReadIteratorTest() {
+        GATKSAMIterator iter = ArtificialSAMUtils.mappedReadIterator(1, 100, 100);
+        int count = 0;
+        while (iter.hasNext()) {
+            SAMRecord rec = iter.next();
+            count++;
+        }
+        assertEquals(count, 100 * 100);
+    }
+
+    @Test
+    public void tenPerChromosome() {
+        GATKSAMIterator iter = ArtificialSAMUtils.mappedReadIterator(1, 100, 10);
+        int count = 0;
+        while (iter.hasNext()) {
+            SAMRecord rec = iter.next();
+
+            assertEquals(Integer.valueOf(Math.round(count / 10)), rec.getReferenceIndex());
+            count++;
+        }
+        assertEquals(count, 100 * 10);
+    }
+
+    @Test
+    public void onePerChromosome() {
+        GATKSAMIterator iter = ArtificialSAMUtils.mappedReadIterator(1, 100, 1);
+        int count = 0;
+        while (iter.hasNext()) {
+            SAMRecord rec = iter.next();
+
+            assertEquals(Integer.valueOf(count), rec.getReferenceIndex());
+            count++;
+        }
+        assertEquals(count, 100 * 1);
+    }
+
+    @Test
+    public void basicUnmappedIteratorTest() {
+        GATKSAMIterator iter = ArtificialSAMUtils.mappedAndUnmappedReadIterator(1, 100, 100, 1000);
+        int count = 0;
+        for (int x = 0; x < (100* 100); x++ ) {
+            if (!iter.hasNext()) {
+                fail ("we didn't get the expected number of reads");
+            }
+            SAMRecord rec = iter.next();
+            assertTrue(rec.getReferenceIndex() >= 0);
+            count++;
+        }
+        assertEquals(100 * 100, count);
+
+        // now we should have 1000 unmapped reads
+        count = 0;
+        while (iter.hasNext()) {
+            SAMRecord rec = iter.next();
+            assertTrue(rec.getReferenceIndex() < 0);
+            count++;
+        }
+        assertEquals(count, 1000);
+    }
+
+  
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialSingleSampleReadStreamUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialSingleSampleReadStreamUnitTest.java
new file mode 100644
index 0000000..5f6b36c
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/sam/ArtificialSingleSampleReadStreamUnitTest.java
@@ -0,0 +1,186 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMReadGroupRecord;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.testng.annotations.Test;
+import org.testng.annotations.DataProvider;
+
+import org.broadinstitute.gatk.utils.BaseTest;
+
+public class ArtificialSingleSampleReadStreamUnitTest extends BaseTest {
+
+    private static class ArtificialSingleSampleReadStreamTest extends TestDataProvider {
+        private ArtificialSingleSampleReadStream stream;
+        private ArtificialSingleSampleReadStreamAnalyzer streamAnalyzer;
+
+        public ArtificialSingleSampleReadStreamTest( ArtificialSingleSampleReadStream stream ) {
+            super(ArtificialSingleSampleReadStreamTest.class);
+
+            this.stream = stream;
+
+            setName(String.format("%s: numContigs=%d stacksPerContig=%d readsPerStack=%d-%d distanceBetweenStacks=%d-%d readLength=%d-%d unmappedReads=%d",
+                    getClass().getSimpleName(),
+                    stream.getNumContigs(),
+                    stream.getNumStacksPerContig(),
+                    stream.getMinReadsPerStack(),
+                    stream.getMaxReadsPerStack(),
+                    stream.getMinDistanceBetweenStacks(),
+                    stream.getMaxDistanceBetweenStacks(),
+                    stream.getMinReadLength(),
+                    stream.getMaxReadLength(),
+                    stream.getNumUnmappedReads()));
+        }
+
+        public void run() {
+            streamAnalyzer= new ArtificialSingleSampleReadStreamAnalyzer(stream);
+
+            streamAnalyzer.analyze(stream);
+
+            // Check whether the observed properties of the stream match its nominal properties
+            streamAnalyzer.validate();
+        }
+    }
+
+    @DataProvider(name = "ArtificialSingleSampleReadStreamTestDataProvider")
+    public Object[][] createArtificialSingleSampleReadStreamTests() {
+        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(3, 1, 10000);
+        String readGroupID = "testReadGroup";
+        SAMReadGroupRecord readGroup = new SAMReadGroupRecord(readGroupID);
+        readGroup.setSample("testSample");
+        header.addReadGroup(readGroup);
+
+        Utils.resetRandomGenerator();
+
+        // brute force testing!
+        for ( int numContigs = 0; numContigs <= 2; numContigs++ ) {
+            for ( int stacksPerContig = 0; stacksPerContig <= 2; stacksPerContig++ ) {
+                for ( int minReadsPerStack = 1; minReadsPerStack <= 2; minReadsPerStack++ ) {
+                    for ( int maxReadsPerStack = 1; maxReadsPerStack <= 3; maxReadsPerStack++ ) {
+                        for ( int minDistanceBetweenStacks = 1; minDistanceBetweenStacks <= 2; minDistanceBetweenStacks++ ) {
+                            for ( int maxDistanceBetweenStacks = 1; maxDistanceBetweenStacks <= 3; maxDistanceBetweenStacks++ ) {
+                                for ( int minReadLength = 1; minReadLength <= 2; minReadLength++ ) {
+                                    for ( int maxReadLength = 1; maxReadLength <= 3; maxReadLength++ ) {
+                                        for ( int numUnmappedReads = 0; numUnmappedReads <= 2; numUnmappedReads++ ) {
+                                            // Only test sane combinations here
+                                            if ( minReadsPerStack <= maxReadsPerStack &&
+                                                 minDistanceBetweenStacks <= maxDistanceBetweenStacks &&
+                                                 minReadLength <= maxReadLength &&
+                                                 ((numContigs > 0 && stacksPerContig > 0) || (numContigs == 0 && stacksPerContig == 0)) ) {
+
+                                                new ArtificialSingleSampleReadStreamTest(new ArtificialSingleSampleReadStream(header,
+                                                                                                                              readGroupID,
+                                                                                                                              numContigs,
+                                                                                                                              stacksPerContig,
+                                                                                                                              minReadsPerStack,
+                                                                                                                              maxReadsPerStack,
+                                                                                                                              minDistanceBetweenStacks,
+                                                                                                                              maxDistanceBetweenStacks,
+                                                                                                                              minReadLength,
+                                                                                                                              maxReadLength,
+                                                                                                                              numUnmappedReads));
+                                            }
+                                        }
+                                    }
+                                }
+                            }
+                        }
+                    }
+                }
+            }
+        }
+
+        return ArtificialSingleSampleReadStreamTest.getTests(ArtificialSingleSampleReadStreamTest.class);
+    }
+
+    @Test(dataProvider = "ArtificialSingleSampleReadStreamTestDataProvider")
+    public void testArtificialSingleSampleReadStream( ArtificialSingleSampleReadStreamTest test ) {
+        logger.warn("Running test: " + test);
+
+        Utils.resetRandomGenerator();
+        test.run();
+    }
+
+    @DataProvider(name = "ArtificialSingleSampleReadStreamInvalidArgumentsTestDataProvider")
+    public Object[][] createInvalidArgumentsTests() {
+        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(3, 1, 10000);
+        String readGroupID = "testReadGroup";
+        header.addReadGroup(new SAMReadGroupRecord(readGroupID));
+
+        return new Object[][] {
+            {"testNullHeader", null, readGroupID, 1, 1, 1, 2, 1, 2, 1, 2, 0},
+            {"testNullReadGroup", header, null, 1, 1, 1, 2, 1, 2, 1, 2, 0},
+            {"testInvalidReadGroup", header, "foo", 1, 1, 1, 2, 1, 2, 1, 2, 0},
+            {"testInvalidNumContigs", header, readGroupID, -1, 1, 1, 2, 1, 2, 1, 2, 0},
+            {"testInvalidNumStacksPerContig", header, readGroupID, 1, -1, 1, 2, 1, 2, 1, 2, 0},
+            {"test0ContigsNon0StacksPerContig", header, readGroupID, 0, 1, 1, 2, 1, 2, 1, 2, 0},
+            {"testNon0Contigs0StacksPerContig", header, readGroupID, 1, 0, 1, 2, 1, 2, 1, 2, 0},
+            {"testInvalidMinReadsPerStack", header, readGroupID, 1, 1, -1, 2, 1, 2, 1, 2, 0},
+            {"testInvalidMaxReadsPerStack", header, readGroupID, 1, 1, 1, -2, 1, 2, 1, 2, 0},
+            {"testInvalidMinDistanceBetweenStacks", header, readGroupID, 1, 1, 1, 2, -1, 2, 1, 2, 0},
+            {"testInvalidMaxDistanceBetweenStacks", header, readGroupID, 1, 1, 1, 2, 1, -2, 1, 2, 0},
+            {"testInvalidMinReadLength", header, readGroupID, 1, 1, 1, 2, 1, 2, -1, 2, 0},
+            {"testInvalidMaxReadLength", header, readGroupID, 1, 1, 1, 2, 1, 2, 1, -2, 0},
+            {"testInvalidReadsPerStackRange", header, readGroupID, 1, 1, 2, 1, 1, 2, 1, 2, 0},
+            {"testInvalidDistanceBetweenStacksRange", header, readGroupID, 1, 1, 1, 2, 2, 1, 1, 2, 0},
+            {"testInvalidReadLengthRange", header, readGroupID, 1, 1, 1, 2, 1, 2, 2, 1, 0},
+            {"testInvalidNumUnmappedReads", header, readGroupID, 1, 1, 1, 2, 1, 2, 1, 2, -1},
+        };
+    }
+
+    @Test(dataProvider = "ArtificialSingleSampleReadStreamInvalidArgumentsTestDataProvider",
+          expectedExceptions = ReviewedGATKException.class)
+    public void testInvalidArguments( String testName,
+                                      SAMFileHeader header,
+                                      String readGroupID,
+                                      int numContigs,
+                                      int numStacksPerContig,
+                                      int minReadsPerStack,
+                                      int maxReadsPerStack,
+                                      int minDistanceBetweenStacks,
+                                      int maxDistanceBetweenStacks,
+                                      int minReadLength,
+                                      int maxReadLength,
+                                      int numUnmappedReads ) {
+
+        logger.warn("Running test: " + testName);
+
+        ArtificialSingleSampleReadStream stream = new ArtificialSingleSampleReadStream(header,
+                                                                                       readGroupID,
+                                                                                       numContigs,
+                                                                                       numStacksPerContig,
+                                                                                       minReadsPerStack,
+                                                                                       maxReadsPerStack,
+                                                                                       minDistanceBetweenStacks,
+                                                                                       maxDistanceBetweenStacks,
+                                                                                       minReadLength,
+                                                                                       maxReadLength,
+                                                                                       numUnmappedReads);
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/sam/GATKSAMRecordUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/sam/GATKSAMRecordUnitTest.java
new file mode 100644
index 0000000..470671d
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/sam/GATKSAMRecordUnitTest.java
@@ -0,0 +1,78 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import htsjdk.samtools.SAMFileHeader;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+
+public class GATKSAMRecordUnitTest extends BaseTest {
+    GATKSAMRecord read;
+    final static String BASES = "ACTG";
+    final static String QUALS = "!+5?";
+
+    @BeforeClass
+    public void init() {
+        SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000);
+        read = ArtificialSAMUtils.createArtificialRead(header, "read1", 0, 1, BASES.length());
+        read.setReadUnmappedFlag(true);
+        read.setReadBases(new String(BASES).getBytes());
+        read.setBaseQualityString(new String(QUALS));
+    }
+
+    @Test
+    public void testStrandlessReads() {
+        final byte [] bases = {'A', 'A', 'A', 'A', 'A', 'A', 'A', 'A'};
+        final byte [] quals = {20 , 20 , 20 , 20 , 20 , 20 , 20 , 20 };
+        GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(bases, quals, "6M");
+        Assert.assertEquals(read.isStrandless(), false);
+
+        read.setReadNegativeStrandFlag(false);
+        Assert.assertEquals(read.isStrandless(), false);
+        Assert.assertEquals(read.getReadNegativeStrandFlag(), false);
+
+        read.setReadNegativeStrandFlag(true);
+        Assert.assertEquals(read.isStrandless(), false);
+        Assert.assertEquals(read.getReadNegativeStrandFlag(), true);
+
+        read.setReadNegativeStrandFlag(true);
+        read.setIsStrandless(true);
+        Assert.assertEquals(read.isStrandless(), true);
+        Assert.assertEquals(read.getReadNegativeStrandFlag(), false, "negative strand flag should return false even through its set for a strandless read");
+    }
+
+    @Test(expectedExceptions = IllegalStateException.class)
+    public void testStrandlessReadsFailSetStrand() {
+        final byte [] bases = {'A', 'A', 'A', 'A', 'A', 'A', 'A', 'A'};
+        final byte [] quals = {20 , 20 , 20 , 20 , 20 , 20 , 20 , 20 };
+        GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(bases, quals, "6M");
+        read.setIsStrandless(true);
+        read.setReadNegativeStrandFlag(true);
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/sam/ReadUtilsUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/sam/ReadUtilsUnitTest.java
new file mode 100644
index 0000000..c6233f1
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/sam/ReadUtilsUnitTest.java
@@ -0,0 +1,339 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.sam;
+
+import htsjdk.samtools.reference.IndexedFastaSequenceFile;
+import htsjdk.samtools.SAMFileHeader;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.BaseUtils;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.util.*;
+
+
+public class ReadUtilsUnitTest extends BaseTest {
+    private interface GetAdaptorFunc {
+        public int getAdaptor(final GATKSAMRecord record);
+    }
+
+    @DataProvider(name = "AdaptorGetter")
+    public Object[][] makeActiveRegionCutTests() {
+        final List<Object[]> tests = new LinkedList<Object[]>();
+
+        tests.add( new Object[]{ new GetAdaptorFunc() {
+            @Override public int getAdaptor(final GATKSAMRecord record) { return ReadUtils.getAdaptorBoundary(record); }
+        }});
+
+        tests.add( new Object[]{ new GetAdaptorFunc() {
+            @Override public int getAdaptor(final GATKSAMRecord record) { return record.getAdaptorBoundary(); }
+        }});
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    private GATKSAMRecord makeRead(final int fragmentSize, final int mateStart) {
+        final byte[] bases = {'A', 'C', 'G', 'T', 'A', 'C', 'G', 'T'};
+        final byte[] quals = {30, 30, 30, 30, 30, 30, 30, 30};
+        final String cigar = "8M";
+        GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(bases, quals, cigar);
+        read.setProperPairFlag(true);
+        read.setReadPairedFlag(true);
+        read.setMateAlignmentStart(mateStart);
+        read.setInferredInsertSize(fragmentSize);
+        return read;
+    }
+
+    @Test(dataProvider = "AdaptorGetter")
+    public void testGetAdaptorBoundary(final GetAdaptorFunc get) {
+        final int fragmentSize = 10;
+        final int mateStart = 1000;
+        final int BEFORE = mateStart - 2;
+        final int AFTER = mateStart + 2;
+        int myStart, boundary;
+        GATKSAMRecord read;
+
+        // Test case 1: positive strand, first read
+        read = makeRead(fragmentSize, mateStart);
+        myStart = BEFORE;
+        read.setAlignmentStart(myStart);
+        read.setReadNegativeStrandFlag(false);
+        read.setMateNegativeStrandFlag(true);
+        boundary = get.getAdaptor(read);
+        Assert.assertEquals(boundary, myStart + fragmentSize + 1);
+
+        // Test case 2: positive strand, second read
+        read = makeRead(fragmentSize, mateStart);
+        myStart = AFTER;
+        read.setAlignmentStart(myStart);
+        read.setReadNegativeStrandFlag(false);
+        read.setMateNegativeStrandFlag(true);
+        boundary = get.getAdaptor(read);
+        Assert.assertEquals(boundary, myStart + fragmentSize + 1);
+
+        // Test case 3: negative strand, second read
+        read = makeRead(fragmentSize, mateStart);
+        myStart = AFTER;
+        read.setAlignmentStart(myStart);
+        read.setReadNegativeStrandFlag(true);
+        read.setMateNegativeStrandFlag(false);
+        boundary = get.getAdaptor(read);
+        Assert.assertEquals(boundary, mateStart - 1);
+
+        // Test case 4: negative strand, first read
+        read = makeRead(fragmentSize, mateStart);
+        myStart = BEFORE;
+        read.setAlignmentStart(myStart);
+        read.setReadNegativeStrandFlag(true);
+        read.setMateNegativeStrandFlag(false);
+        boundary = get.getAdaptor(read);
+        Assert.assertEquals(boundary, mateStart - 1);
+
+        // Test case 5: mate is mapped to another chromosome (test both strands)
+        read = makeRead(fragmentSize, mateStart);
+        read.setInferredInsertSize(0);
+        read.setReadNegativeStrandFlag(true);
+        read.setMateNegativeStrandFlag(false);
+        boundary = get.getAdaptor(read);
+        Assert.assertEquals(boundary, ReadUtils.CANNOT_COMPUTE_ADAPTOR_BOUNDARY);
+        read.setReadNegativeStrandFlag(false);
+        read.setMateNegativeStrandFlag(true);
+        boundary = get.getAdaptor(read);
+        Assert.assertEquals(boundary, ReadUtils.CANNOT_COMPUTE_ADAPTOR_BOUNDARY);
+        read.setInferredInsertSize(10);
+
+        // Test case 6: read is unmapped
+        read = makeRead(fragmentSize, mateStart);
+        read.setReadUnmappedFlag(true);
+        boundary = get.getAdaptor(read);
+        Assert.assertEquals(boundary, ReadUtils.CANNOT_COMPUTE_ADAPTOR_BOUNDARY);
+        read.setReadUnmappedFlag(false);
+
+        // Test case 7:  reads don't overlap and look like this:
+        //    <--------|
+        //                 |------>
+        // first read:
+        read = makeRead(fragmentSize, mateStart);
+        myStart = 980;
+        read.setAlignmentStart(myStart);
+        read.setInferredInsertSize(20);
+        read.setReadNegativeStrandFlag(true);
+        boundary = get.getAdaptor(read);
+        Assert.assertEquals(boundary, ReadUtils.CANNOT_COMPUTE_ADAPTOR_BOUNDARY);
+
+        // second read:
+        read = makeRead(fragmentSize, mateStart);
+        myStart = 1000;
+        read.setAlignmentStart(myStart);
+        read.setInferredInsertSize(20);
+        read.setMateAlignmentStart(980);
+        read.setReadNegativeStrandFlag(false);
+        boundary = get.getAdaptor(read);
+        Assert.assertEquals(boundary, ReadUtils.CANNOT_COMPUTE_ADAPTOR_BOUNDARY);
+
+        // Test case 8: read doesn't have proper pair flag set
+        read = makeRead(fragmentSize, mateStart);
+        read.setReadPairedFlag(true);
+        read.setProperPairFlag(false);
+        Assert.assertEquals(get.getAdaptor(read), ReadUtils.CANNOT_COMPUTE_ADAPTOR_BOUNDARY);
+
+        // Test case 9: read and mate have same negative flag setting
+        for ( final boolean negFlag: Arrays.asList(true, false) ) {
+            read = makeRead(fragmentSize, mateStart);
+            read.setAlignmentStart(BEFORE);
+            read.setReadPairedFlag(true);
+            read.setProperPairFlag(true);
+            read.setReadNegativeStrandFlag(negFlag);
+            read.setMateNegativeStrandFlag(!negFlag);
+            Assert.assertTrue(get.getAdaptor(read) != ReadUtils.CANNOT_COMPUTE_ADAPTOR_BOUNDARY, "Get adaptor should have succeeded");
+
+            read = makeRead(fragmentSize, mateStart);
+            read.setAlignmentStart(BEFORE);
+            read.setReadPairedFlag(true);
+            read.setProperPairFlag(true);
+            read.setReadNegativeStrandFlag(negFlag);
+            read.setMateNegativeStrandFlag(negFlag);
+            Assert.assertEquals(get.getAdaptor(read), ReadUtils.CANNOT_COMPUTE_ADAPTOR_BOUNDARY, "Get adaptor should have failed for reads with bad alignment orientation");
+        }
+    }
+
+    @Test (enabled = true)
+    public void testGetBasesReverseComplement() {
+        int iterations = 1000;
+        Random random = Utils.getRandomGenerator();
+        while(iterations-- > 0) {
+            final int l = random.nextInt(1000);
+            GATKSAMRecord read = GATKSAMRecord.createRandomRead(l);
+            byte [] original = read.getReadBases();
+            byte [] reconverted = new byte[l];
+            String revComp = ReadUtils.getBasesReverseComplement(read);
+            for (int i=0; i<l; i++) {
+                reconverted[l-1-i] = BaseUtils.getComplement((byte) revComp.charAt(i));
+            }
+            Assert.assertEquals(reconverted, original);
+        }
+    }
+
+    @Test (enabled = true)
+    public void testGetMaxReadLength() {
+        for( final int minLength : Arrays.asList( 5, 30, 50 ) ) {
+            for( final int maxLength : Arrays.asList( 50, 75, 100 ) ) {
+                final List<GATKSAMRecord> reads = new ArrayList<GATKSAMRecord>();
+                for( int readLength = minLength; readLength <= maxLength; readLength++ ) {
+                    reads.add( ReadUtils.createRandomRead( readLength ) );
+                }
+                Assert.assertEquals(ReadUtils.getMaxReadLength(reads), maxLength, "max length does not match");
+            }
+        }
+
+        final List<GATKSAMRecord> reads = new LinkedList<GATKSAMRecord>();
+        Assert.assertEquals(ReadUtils.getMaxReadLength(reads), 0, "Empty list should have max length of zero");
+    }
+
+    @Test (enabled = true)
+    public void testReadWithNsRefIndexInDeletion() throws FileNotFoundException {
+
+        final IndexedFastaSequenceFile seq = new CachingIndexedFastaSequenceFile(new File(b37KGReference));
+        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(seq.getSequenceDictionary());
+        final int readLength = 76;
+
+        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, 8975, readLength);
+        read.setReadBases(Utils.dupBytes((byte) 'A', readLength));
+        read.setBaseQualities(Utils.dupBytes((byte)30, readLength));
+        read.setCigarString("3M414N1D73M");
+
+        final int result = ReadUtils.getReadCoordinateForReferenceCoordinateUpToEndOfRead(read, 9392, ReadUtils.ClippingTail.LEFT_TAIL);
+        Assert.assertEquals(result, 2);
+    }
+
+    @Test (enabled = true)
+    public void testReadWithNsRefAfterDeletion() throws FileNotFoundException {
+
+        final IndexedFastaSequenceFile seq = new CachingIndexedFastaSequenceFile(new File(b37KGReference));
+        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(seq.getSequenceDictionary());
+        final int readLength = 76;
+
+        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "myRead", 0, 8975, readLength);
+        read.setReadBases(Utils.dupBytes((byte) 'A', readLength));
+        read.setBaseQualities(Utils.dupBytes((byte)30, readLength));
+        read.setCigarString("3M414N1D73M");
+
+        final int result = ReadUtils.getReadCoordinateForReferenceCoordinateUpToEndOfRead(read, 9393, ReadUtils.ClippingTail.LEFT_TAIL);
+        Assert.assertEquals(result, 3);
+    }
+
+    @DataProvider(name = "HasWellDefinedFragmentSizeData")
+    public Object[][] makeHasWellDefinedFragmentSizeData() throws Exception {
+        final List<Object[]> tests = new LinkedList<Object[]>();
+
+        // setup a basic read that will work
+        final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader();
+        final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read1", 0, 10, 10);
+        read.setReadPairedFlag(true);
+        read.setProperPairFlag(true);
+        read.setReadUnmappedFlag(false);
+        read.setMateUnmappedFlag(false);
+        read.setAlignmentStart(100);
+        read.setCigarString("50M");
+        read.setMateAlignmentStart(130);
+        read.setInferredInsertSize(80);
+        read.setFirstOfPairFlag(true);
+        read.setReadNegativeStrandFlag(false);
+        read.setMateNegativeStrandFlag(true);
+
+        tests.add( new Object[]{ "basic case", read.clone(), true });
+
+        {
+            final GATKSAMRecord bad1 = (GATKSAMRecord)read.clone();
+            bad1.setReadPairedFlag(false);
+            tests.add( new Object[]{ "not paired", bad1, false });
+        }
+
+        {
+            final GATKSAMRecord bad = (GATKSAMRecord)read.clone();
+            bad.setProperPairFlag(false);
+            // we currently don't require the proper pair flag to be set
+            tests.add( new Object[]{ "not proper pair", bad, true });
+//            tests.add( new Object[]{ "not proper pair", bad, false });
+        }
+
+        {
+            final GATKSAMRecord bad = (GATKSAMRecord)read.clone();
+            bad.setReadUnmappedFlag(true);
+            tests.add( new Object[]{ "read is unmapped", bad, false });
+        }
+
+        {
+            final GATKSAMRecord bad = (GATKSAMRecord)read.clone();
+            bad.setMateUnmappedFlag(true);
+            tests.add( new Object[]{ "mate is unmapped", bad, false });
+        }
+
+        {
+            final GATKSAMRecord bad = (GATKSAMRecord)read.clone();
+            bad.setMateNegativeStrandFlag(false);
+            tests.add( new Object[]{ "read and mate both on positive strand", bad, false });
+        }
+
+        {
+            final GATKSAMRecord bad = (GATKSAMRecord)read.clone();
+            bad.setReadNegativeStrandFlag(true);
+            tests.add( new Object[]{ "read and mate both on negative strand", bad, false });
+        }
+
+        {
+            final GATKSAMRecord bad = (GATKSAMRecord)read.clone();
+            bad.setInferredInsertSize(0);
+            tests.add( new Object[]{ "insert size is 0", bad, false });
+        }
+
+        {
+            final GATKSAMRecord bad = (GATKSAMRecord)read.clone();
+            bad.setAlignmentStart(1000);
+            tests.add( new Object[]{ "positve read starts after mate end", bad, false });
+        }
+
+        {
+            final GATKSAMRecord bad = (GATKSAMRecord)read.clone();
+            bad.setReadNegativeStrandFlag(true);
+            bad.setMateNegativeStrandFlag(false);
+            bad.setMateAlignmentStart(1000);
+            tests.add( new Object[]{ "negative strand read ends before mate starts", bad, false });
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "HasWellDefinedFragmentSizeData")
+    private void testHasWellDefinedFragmentSize(final String name, final GATKSAMRecord read, final boolean expected) {
+        Assert.assertEquals(ReadUtils.hasWellDefinedFragmentSize(read), expected);
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/smithwaterman/SmithWatermanBenchmark.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/smithwaterman/SmithWatermanBenchmark.java
new file mode 100644
index 0000000..f6aff7b
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/smithwaterman/SmithWatermanBenchmark.java
@@ -0,0 +1,87 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.smithwaterman;
+
+import com.google.caliper.Param;
+import com.google.caliper.SimpleBenchmark;
+import org.broadinstitute.gatk.utils.Utils;
+
+/**
+ * Caliper microbenchmark of parsing a VCF file
+ */
+public class SmithWatermanBenchmark extends SimpleBenchmark {
+
+    @Param({"Original"})
+    String version; // set automatically by framework
+
+    @Param({"10", "50", "100", "500"})
+    int sizeOfMiddleRegion; // set automatically by framework
+
+    @Param({"10", "50", "100", "500"})
+    int sizeOfEndRegions; // set automatically by framework
+
+    String refString;
+    String hapString;
+
+    @Override protected void setUp() {
+        final StringBuilder ref = new StringBuilder();
+        final StringBuilder hap = new StringBuilder();
+
+        ref.append(Utils.dupString('A', sizeOfEndRegions));
+        hap.append(Utils.dupString('A', sizeOfEndRegions));
+
+        // introduce a SNP
+        ref.append("X");
+        hap.append("Y");
+
+        ref.append(Utils.dupString('A', sizeOfMiddleRegion));
+        hap.append(Utils.dupString('A', sizeOfMiddleRegion));
+
+        // introduce a SNP
+        ref.append("X");
+        hap.append("Y");
+
+        ref.append(Utils.dupString('A', sizeOfEndRegions));
+        hap.append(Utils.dupString('A', sizeOfEndRegions));
+
+        refString = ref.toString();
+        hapString = hap.toString();
+    }
+
+    public void timeSW(int rep) {
+        for ( int i = 0; i < rep; i++ ) {
+            final SmithWaterman sw;
+            if ( version.equals("Greedy") )
+                throw new IllegalArgumentException("Unsupported implementation");
+            sw = new SWPairwiseAlignment(refString.getBytes(), hapString.getBytes());
+            sw.getCigar();
+        }
+    }
+
+    public static void main(String[] args) {
+        com.google.caliper.Runner.main(SmithWatermanBenchmark.class, args);
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/text/ListFileUtilsUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/text/ListFileUtilsUnitTest.java
new file mode 100644
index 0000000..aeb1101
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/text/ListFileUtilsUnitTest.java
@@ -0,0 +1,154 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.text;
+
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.commandline.ParsingEngine;
+import org.broadinstitute.gatk.utils.commandline.Tags;
+import org.broadinstitute.gatk.utils.sam.SAMReaderID;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.util.*;
+
+/**
+ * Tests selected functionality in the CommandLineExecutable class
+ */
+public class ListFileUtilsUnitTest extends BaseTest {
+
+    @Test
+    public void testIgnoreBlankLinesInBAMListFiles() throws Exception {
+        File tempListFile = createTempListFile("testIgnoreBlankLines",
+                                               "",
+                                               publicTestDir + "exampleBAM.bam",
+                                               "         "
+                                              );
+
+        List<SAMReaderID> expectedBAMFileListAfterUnpacking = new ArrayList<SAMReaderID>();
+        expectedBAMFileListAfterUnpacking.add(new SAMReaderID(new File(publicTestDir + "exampleBAM.bam"), new Tags()));
+
+        performBAMListFileUnpackingTest(tempListFile, expectedBAMFileListAfterUnpacking);
+    }
+
+    @Test
+    public void testCommentSupportInBAMListFiles() throws Exception {
+        File tempListFile = createTempListFile("testCommentSupport",
+                                               "#",
+                                               publicTestDir + "exampleBAM.bam",
+                                               "#" + publicTestDir + "foo.bam",
+                                               "      # " + publicTestDir + "bar.bam"
+                                              );
+
+        List<SAMReaderID> expectedBAMFileListAfterUnpacking = new ArrayList<SAMReaderID>();
+        expectedBAMFileListAfterUnpacking.add(new SAMReaderID(new File(publicTestDir + "exampleBAM.bam"), new Tags()));
+
+        performBAMListFileUnpackingTest(tempListFile, expectedBAMFileListAfterUnpacking);
+    }
+
+    @Test
+    public void testUnpackSet() throws Exception {
+        Set<String> expected = new HashSet<String>(Arrays.asList(publicTestDir + "exampleBAM.bam"));
+        Set<String> actual;
+
+        actual = ListFileUtils.unpackSet(Arrays.asList(publicTestDir + "exampleBAM.bam"));
+        Assert.assertEquals(actual, expected);
+
+        File tempListFile = createTempListFile("testUnpackSet",
+                "#",
+                publicTestDir + "exampleBAM.bam",
+                "#" + publicTestDir + "foo.bam",
+                "      # " + publicTestDir + "bar.bam"
+        );
+        actual = ListFileUtils.unpackSet(Arrays.asList(tempListFile.getAbsolutePath()));
+        Assert.assertEquals(actual, expected);
+    }
+
+    @DataProvider(name="includeMatchingTests")
+    public Object[][] getIncludeMatchingTests() {
+        return new Object[][] {
+                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("a"), true, asSet("a") },
+                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("a"), false, asSet("a", "ab", "abc") },
+                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("b"), true, Collections.EMPTY_SET },
+                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("b"), false, asSet("ab", "abc") },
+                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("a", "b"), true, asSet("a") },
+                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("a", "b"), false, asSet("a", "ab", "abc") },
+                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("a", "ab"), true, asSet("a", "ab") },
+                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("a", "ab"), false, asSet("a", "ab", "abc") },
+                new Object[] { asSet("a", "ab", "abc"), Arrays.asList(".*b.*"), true, Collections.EMPTY_SET },
+                new Object[] { asSet("a", "ab", "abc"), Arrays.asList(".*b.*"), false, asSet("ab", "abc") },
+                new Object[] { asSet("a", "ab", "abc"), Arrays.asList(".*"), true, Collections.EMPTY_SET },
+                new Object[] { asSet("a", "ab", "abc"), Arrays.asList(".*"), false, asSet("a", "ab", "abc") }
+        };
+    }
+
+    @Test(dataProvider = "includeMatchingTests")
+    public void testIncludeMatching(Set<String> values, Collection<String> filters, boolean exactMatch, Set<String> expected) {
+        Set<String> actual = ListFileUtils.includeMatching(values, ListFileUtils.IDENTITY_STRING_CONVERTER, filters, exactMatch);
+        Assert.assertEquals(actual, expected);
+    }
+
+    @DataProvider(name="excludeMatchingTests")
+    public Object[][] getExcludeMatchingTests() {
+        return new Object[][] {
+                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("a"), true, asSet("ab", "abc") },
+                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("a"), false, Collections.EMPTY_SET },
+                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("b"), true, asSet("a", "ab", "abc") },
+                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("b"), false, asSet("a") },
+                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("a", "b"), true, asSet("ab", "abc") },
+                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("a", "b"), false, Collections.EMPTY_SET },
+                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("a", "ab"), true, asSet("abc") },
+                new Object[] { asSet("a", "ab", "abc"), Arrays.asList("a", "ab"), false, Collections.EMPTY_SET },
+                new Object[] { asSet("a", "ab", "abc"), Arrays.asList(".*b.*"), true, asSet("a", "ab", "abc") },
+                new Object[] { asSet("a", "ab", "abc"), Arrays.asList(".*b.*"), false, asSet("a") },
+                new Object[] { asSet("a", "ab", "abc"), Arrays.asList(".*"), true, asSet("a", "ab", "abc") },
+                new Object[] { asSet("a", "ab", "abc"), Arrays.asList(".*"), false, Collections.EMPTY_SET }
+        };
+    }
+
+    @Test(dataProvider = "excludeMatchingTests")
+    public void testExcludeMatching(Set<String> values, Collection<String> filters, boolean exactMatch, Set<String> expected) {
+        Set<String> actual = ListFileUtils.excludeMatching(values, ListFileUtils.IDENTITY_STRING_CONVERTER, filters, exactMatch);
+        Assert.assertEquals(actual, expected);
+    }
+
+    private static <T> Set<T> asSet(T... args){
+        return new HashSet<T>(Arrays.asList(args));
+    }
+
+    private void performBAMListFileUnpackingTest( File tempListFile, List<SAMReaderID> expectedUnpackedFileList ) throws Exception {
+        List<String> bamFiles = new ArrayList<String>();
+        bamFiles.add(tempListFile.getAbsolutePath());
+
+        List<SAMReaderID> unpackedBAMFileList = ListFileUtils.unpackBAMFileList(bamFiles,new ParsingEngine(null));
+
+        Assert.assertEquals(unpackedBAMFileList.size(), expectedUnpackedFileList.size(),
+                            "Unpacked BAM file list contains extraneous lines");
+        Assert.assertEquals(unpackedBAMFileList, expectedUnpackedFileList,
+                            "Unpacked BAM file list does not contain correct BAM file names");
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/text/TextFormattingUtilsUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/text/TextFormattingUtilsUnitTest.java
new file mode 100644
index 0000000..b8bf04b
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/text/TextFormattingUtilsUnitTest.java
@@ -0,0 +1,89 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.text;
+
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.util.Arrays;
+import java.util.Collections;
+
+public class TextFormattingUtilsUnitTest extends BaseTest {
+    @Test(expectedExceptions = ReviewedGATKException.class)
+    public void testSplitWhiteSpaceNullLine() {
+        TextFormattingUtils.splitWhiteSpace(null);
+    }
+
+    @Test
+    public void testSplitWhiteSpace() {
+        Assert.assertEquals(TextFormattingUtils.splitWhiteSpace("foo bar baz"), new String[] { "foo", "bar", "baz" });
+        Assert.assertEquals(TextFormattingUtils.splitWhiteSpace("foo  bar  baz"), new String[] { "foo", "bar", "baz" });
+        Assert.assertEquals(TextFormattingUtils.splitWhiteSpace(" foo bar baz"), new String[] { "foo", "bar", "baz" });
+        Assert.assertEquals(TextFormattingUtils.splitWhiteSpace(" foo bar baz "), new String[] { "foo", "bar", "baz" });
+        Assert.assertEquals(TextFormattingUtils.splitWhiteSpace("foo bar baz "), new String[] { "foo", "bar", "baz" });
+        Assert.assertEquals(TextFormattingUtils.splitWhiteSpace("\tfoo\tbar\tbaz\t"), new String[]{"foo", "bar", "baz"});
+    }
+
+    @Test(expectedExceptions = ReviewedGATKException.class)
+    public void testGetWordStartsNullLine() {
+        TextFormattingUtils.getWordStarts(null);
+    }
+
+    @Test
+    public void testGetWordStarts() {
+        Assert.assertEquals(TextFormattingUtils.getWordStarts("foo bar baz"), Arrays.asList(4, 8));
+        Assert.assertEquals(TextFormattingUtils.getWordStarts("foo  bar  baz"), Arrays.asList(5, 10));
+        Assert.assertEquals(TextFormattingUtils.getWordStarts(" foo bar baz"), Arrays.asList(1, 5, 9));
+        Assert.assertEquals(TextFormattingUtils.getWordStarts(" foo bar baz "), Arrays.asList(1, 5, 9));
+        Assert.assertEquals(TextFormattingUtils.getWordStarts("foo bar baz "), Arrays.asList(4, 8));
+        Assert.assertEquals(TextFormattingUtils.getWordStarts("\tfoo\tbar\tbaz\t"), Arrays.asList(1, 5, 9));
+    }
+
+    @Test(expectedExceptions = ReviewedGATKException.class)
+    public void testSplitFixedWidthNullLine() {
+        TextFormattingUtils.splitFixedWidth(null, Collections.<Integer>emptyList());
+    }
+
+    @Test(expectedExceptions = ReviewedGATKException.class)
+    public void testSplitFixedWidthNullColumnStarts() {
+        TextFormattingUtils.splitFixedWidth("foo bar baz", null);
+    }
+
+    @Test
+    public void testSplitFixedWidth() {
+        Assert.assertEquals(TextFormattingUtils.splitFixedWidth("foo bar baz", Arrays.asList(4, 8)), new String[] { "foo", "bar", "baz" });
+        Assert.assertEquals(TextFormattingUtils.splitFixedWidth("foo  bar  baz", Arrays.asList(5, 10)), new String[] { "foo", "bar", "baz" });
+        Assert.assertEquals(TextFormattingUtils.splitFixedWidth(" foo bar baz", Arrays.asList(5, 9)), new String[] { "foo", "bar", "baz" });
+        Assert.assertEquals(TextFormattingUtils.splitFixedWidth(" foo bar baz ", Arrays.asList(5, 9)), new String[] { "foo", "bar", "baz" });
+        Assert.assertEquals(TextFormattingUtils.splitFixedWidth("foo bar baz ", Arrays.asList(4, 8)), new String[] { "foo", "bar", "baz" });
+        Assert.assertEquals(TextFormattingUtils.splitFixedWidth("\tfoo\tbar\tbaz\t", Arrays.asList(5, 9)), new String[] { "foo", "bar", "baz" });
+        Assert.assertEquals(TextFormattingUtils.splitFixedWidth("f o b r b z", Arrays.asList(4, 8)), new String[] { "f o", "b r", "b z" });
+        Assert.assertEquals(TextFormattingUtils.splitFixedWidth(" f o b r b z", Arrays.asList(4, 8)), new String[] { "f o", "b r", "b z" });
+        Assert.assertEquals(TextFormattingUtils.splitFixedWidth("  f o b r b z", Arrays.asList(4, 8)), new String[] { "f", "o b", "r b z" });
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/threading/EfficiencyMonitoringThreadFactoryUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/threading/EfficiencyMonitoringThreadFactoryUnitTest.java
new file mode 100644
index 0000000..bbd594d
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/threading/EfficiencyMonitoringThreadFactoryUnitTest.java
@@ -0,0 +1,189 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.threading;
+
+import org.apache.log4j.Priority;
+import org.broadinstitute.gatk.utils.BaseTest;
+import org.broadinstitute.gatk.utils.Utils;
+import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Tests for the state monitoring thread factory.
+ */
+public class EfficiencyMonitoringThreadFactoryUnitTest extends BaseTest {
+    // the duration of the tests -- 100 ms is tolerable given the number of tests we are doing
+    private final static long THREAD_TARGET_DURATION_IN_MILLISECOND = 100000;
+    private final static int MAX_THREADS = 4;
+    final static Object GLOBAL_LOCK = new Object();
+
+    private class StateTest extends TestDataProvider {
+        private final double TOLERANCE = 0.1; // willing to tolerate a 10% error
+
+        final List<EfficiencyMonitoringThreadFactory.State> statesForThreads;
+
+        public StateTest(final List<EfficiencyMonitoringThreadFactory.State> statesForThreads) {
+            super(StateTest.class);
+            this.statesForThreads = statesForThreads;
+            setName("StateTest " + Utils.join(",", statesForThreads));
+        }
+
+        public List<EfficiencyMonitoringThreadFactory.State> getStatesForThreads() {
+            return statesForThreads;
+        }
+
+        public int getNStates() { return statesForThreads.size(); }
+
+        public double maxStatePercent(final EfficiencyMonitoringThreadFactory.State state) { return 100*(fraction(state) + TOLERANCE); }
+        public double minStatePercent(final EfficiencyMonitoringThreadFactory.State state) { return 100*(fraction(state) - TOLERANCE); }
+
+        private double fraction(final EfficiencyMonitoringThreadFactory.State state) {
+            return Collections.frequency(statesForThreads, state) / (1.0 * statesForThreads.size());
+        }
+    }
+
+    /**
+     * Test helper threading class that puts the thread into RUNNING, BLOCKED, or WAITING state as
+     * requested for input argument
+     */
+    private static class StateTestThread implements Callable<Double> {
+        private final EfficiencyMonitoringThreadFactory.State stateToImplement;
+
+        private StateTestThread(final EfficiencyMonitoringThreadFactory.State stateToImplement) {
+            this.stateToImplement = stateToImplement;
+        }
+
+        @Override
+        public Double call() throws Exception {
+            switch ( stateToImplement ) {
+                case USER_CPU:
+                    // do some work until we get to THREAD_TARGET_DURATION_IN_MILLISECOND
+                    double sum = 0.0;
+                    final long startTime = System.currentTimeMillis();
+                    for ( int i = 1; System.currentTimeMillis() - startTime < (THREAD_TARGET_DURATION_IN_MILLISECOND - 1); i++ ) {
+                        sum += Math.log10(i);
+                    }
+                    return sum;
+                case WAITING:
+                    Thread.currentThread().sleep(THREAD_TARGET_DURATION_IN_MILLISECOND);
+                    return 0.0;
+                case BLOCKING:
+                    if ( EfficiencyMonitoringThreadFactory.DEBUG ) logger.warn("Blocking...");
+                    synchronized (GLOBAL_LOCK) {
+                        // the GLOBAL_LOCK must be held by the unit test itself for this to properly block
+                        if ( EfficiencyMonitoringThreadFactory.DEBUG ) logger.warn("  ... done blocking");
+                    }
+                    return 0.0;
+                case WAITING_FOR_IO:
+                    // TODO -- implement me
+                    // shouldn't ever get here, throw an exception
+                    throw new ReviewedGATKException("WAITING_FOR_IO testing currently not implemented, until we figure out how to force a system call block");
+                default:
+                    throw new ReviewedGATKException("Unexpected thread test state " + stateToImplement);
+            }
+        }
+    }
+
+    @DataProvider(name = "StateTest")
+    public Object[][] createStateTest() {
+        for ( final int nThreads : Arrays.asList(3) ) {
+            //final List<EfficiencyMonitoringThreadFactory.State> allStates = Arrays.asList(EfficiencyMonitoringThreadFactory.State.WAITING_FOR_IO);
+            final List<EfficiencyMonitoringThreadFactory.State> allStates = Arrays.asList(EfficiencyMonitoringThreadFactory.State.USER_CPU, EfficiencyMonitoringThreadFactory.State.WAITING, EfficiencyMonitoringThreadFactory.State.BLOCKING);
+            //final List<EfficiencyMonitoringThreadFactory.State> allStates = Arrays.asList(EfficiencyMonitoringThreadFactory.State.values());
+            for (final List<EfficiencyMonitoringThreadFactory.State> states : Utils.makePermutations(allStates, nThreads, true) ) {
+                //if ( Collections.frequency(states, Thread.State.BLOCKED) > 0)
+                    new StateTest(states);
+            }
+        }
+
+        return StateTest.getTests(StateTest.class);
+    }
+
+    // NOTE this test takes an unreasonably long time to run, and so it's been disabled as these monitoring threads
+    // aren't a core GATK feature any longer.  Should be reabled if we come to care about this capability again
+    // in the future, or we can run these in parallel
+    @Test(enabled = false, dataProvider = "StateTest", timeOut = MAX_THREADS * THREAD_TARGET_DURATION_IN_MILLISECOND)
+    public void testStateTest(final StateTest test) throws InterruptedException {
+        // allows us to test blocking
+        final EfficiencyMonitoringThreadFactory factory = new EfficiencyMonitoringThreadFactory(test.getNStates());
+        final ExecutorService threadPool = Executors.newFixedThreadPool(test.getNStates(), factory);
+
+        logger.warn("Running " + test);
+        synchronized (GLOBAL_LOCK) {
+            //logger.warn("  Have lock");
+            for ( final EfficiencyMonitoringThreadFactory.State threadToRunState : test.getStatesForThreads() )
+            threadPool.submit(new StateTestThread(threadToRunState));
+
+            // lock has to be here for the whole running of the activeThreads but end before the sleep so the blocked activeThreads
+            // can block for their allotted time
+            threadPool.shutdown();
+            Thread.sleep(THREAD_TARGET_DURATION_IN_MILLISECOND);
+        }
+        //logger.warn("  Releasing lock");
+        threadPool.awaitTermination(10, TimeUnit.SECONDS);
+        //logger.warn("  done awaiting termination");
+        //logger.warn("  waiting for all activeThreads to complete");
+        factory.waitForAllThreadsToComplete();
+        //logger.warn("  done waiting for activeThreads");
+
+        // make sure we counted everything properly
+        final long totalTime = factory.getTotalTime();
+        final long minTime = (long)(THREAD_TARGET_DURATION_IN_MILLISECOND * 0.5) * test.getNStates();
+        final long maxTime = (long)(THREAD_TARGET_DURATION_IN_MILLISECOND * 1.5) * test.getNStates();
+        //logger.warn("Testing total time");
+        Assert.assertTrue(totalTime >= minTime, "Factory results not properly accumulated: totalTime = " + totalTime + " < minTime = " + minTime);
+        Assert.assertTrue(totalTime <= maxTime, "Factory results not properly accumulated: totalTime = " + totalTime + " > maxTime = " + maxTime);
+
+        for (final EfficiencyMonitoringThreadFactory.State state : EfficiencyMonitoringThreadFactory.State.values() ) {
+            final double min = test.minStatePercent(state);
+            final double max = test.maxStatePercent(state);
+            final double obs = factory.getStatePercent(state);
+//            logger.warn("  Checking " + state
+//                    + " min " + String.format("%.2f", min)
+//                    + " max " + String.format("%.2f", max)
+//                    + " obs " + String.format("%.2f", obs)
+//                    + " factor = " + factory);
+            Assert.assertTrue(obs >= min, "Too little time spent in state " + state + " obs " + obs + " min " + min);
+            Assert.assertTrue(obs <= max, "Too much time spent in state " + state + " obs " + obs + " max " + min);
+        }
+
+        // we actually ran the expected number of activeThreads
+        Assert.assertEquals(factory.getNThreadsCreated(), test.getNStates());
+
+        // should be called to ensure we don't format / NPE on output
+        factory.printUsageInformation(logger, Priority.WARN);
+    }
+}
\ No newline at end of file
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/threading/ThreadPoolMonitorUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/threading/ThreadPoolMonitorUnitTest.java
new file mode 100644
index 0000000..ecc611f
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/threading/ThreadPoolMonitorUnitTest.java
@@ -0,0 +1,64 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.threading;
+
+import org.testng.annotations.Test;
+import org.broadinstitute.gatk.utils.BaseTest;
+
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors; 
+/**
+ * User: hanna
+ * Date: Apr 29, 2009
+ * Time: 4:30:55 PM
+ * BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
+ * Software and documentation are copyright 2005 by the Broad Institute.
+ * All rights are reserved.
+ *
+ * Users acknowledge that this software is supplied without any warranty or support.
+ * The Broad Institute is not responsible for its use, misuse, or
+ * functionality.
+ */
+
+/**
+ * Tests for the thread pool monitor class.
+ */
+
+public class ThreadPoolMonitorUnitTest extends BaseTest {
+    private ExecutorService threadPool = Executors.newFixedThreadPool(1);
+
+    /**
+     * Test to make sure the thread pool wait works properly. 
+     */
+    @Test(timeOut=2000)
+    public void testThreadPoolMonitor() {
+        ThreadPoolMonitor monitor = new ThreadPoolMonitor();
+        synchronized(monitor) {
+            threadPool.submit(monitor);
+            monitor.watch();
+        }
+    }
+}
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/variant/GATKVariantContextUtilsUnitTest.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/variant/GATKVariantContextUtilsUnitTest.java
new file mode 100644
index 0000000..c64c04d
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/variant/GATKVariantContextUtilsUnitTest.java
@@ -0,0 +1,1665 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.variant;
+
+import htsjdk.variant.variantcontext.*;
+import org.broadinstitute.gatk.utils.*;
+import org.broadinstitute.gatk.utils.collections.Pair;
+import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile;
+import org.testng.Assert;
+import org.testng.annotations.BeforeSuite;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.*;
+
+public class GATKVariantContextUtilsUnitTest extends BaseTest {
+    private final static boolean DEBUG = false;
+
+    Allele Aref, T, C, G, Cref, ATC, ATCATC;
+    Allele ATCATCT;
+    Allele ATref;
+    Allele Anoref;
+    Allele GT;
+    Allele Symbolic;
+
+    private GenomeLocParser genomeLocParser;
+
+    @BeforeSuite
+    public void setup() throws IOException {
+        // alleles
+        Aref = Allele.create("A", true);
+        Cref = Allele.create("C", true);
+        T = Allele.create("T");
+        C = Allele.create("C");
+        G = Allele.create("G");
+        ATC = Allele.create("ATC");
+        ATCATC = Allele.create("ATCATC");
+        ATCATCT = Allele.create("ATCATCT");
+        ATref = Allele.create("AT",true);
+        Anoref = Allele.create("A",false);
+        GT = Allele.create("GT",false);
+        Symbolic = Allele.create("<Symbolic>", false);
+        genomeLocParser = new GenomeLocParser(new CachingIndexedFastaSequenceFile(new File(hg18Reference)));
+    }
+
+    private Genotype makeG(String sample, Allele a1, Allele a2, double log10pError, int... pls) {
+        return new GenotypeBuilder(sample, Arrays.asList(a1, a2)).log10PError(log10pError).PL(pls).make();
+    }
+
+
+    private Genotype makeG(String sample, Allele a1, Allele a2, double log10pError) {
+        return new GenotypeBuilder(sample, Arrays.asList(a1, a2)).log10PError(log10pError).make();
+    }
+
+    private VariantContext makeVC(String source, List<Allele> alleles) {
+        return makeVC(source, alleles, null, null);
+    }
+
+    private VariantContext makeVC(String source, List<Allele> alleles, Genotype... g1) {
+        return makeVC(source, alleles, Arrays.asList(g1));
+    }
+
+    private VariantContext makeVC(String source, List<Allele> alleles, String filter) {
+        return makeVC(source, alleles, filter.equals(".") ? null : new HashSet<String>(Arrays.asList(filter)));
+    }
+
+    private VariantContext makeVC(String source, List<Allele> alleles, Set<String> filters) {
+        return makeVC(source, alleles, null, filters);
+    }
+
+    private VariantContext makeVC(String source, List<Allele> alleles, Collection<Genotype> genotypes) {
+        return makeVC(source, alleles, genotypes, null);
+    }
+
+    private VariantContext makeVC(String source, List<Allele> alleles, Collection<Genotype> genotypes, Set<String> filters) {
+        int start = 10;
+        int stop = start + alleles.get(0).length() - 1; // alleles.contains(ATC) ? start + 3 : start;
+        return new VariantContextBuilder(source, "1", start, stop, alleles).genotypes(genotypes).filters(filters).make();
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // Test allele merging
+    //
+    // --------------------------------------------------------------------------------
+
+    private class MergeAllelesTest extends TestDataProvider {
+        List<List<Allele>> inputs;
+        List<Allele> expected;
+
+        private MergeAllelesTest(List<Allele>... arg) {
+            super(MergeAllelesTest.class);
+            LinkedList<List<Allele>> all = new LinkedList<>(Arrays.asList(arg));
+            expected = all.pollLast();
+            inputs = all;
+        }
+
+        public String toString() {
+            return String.format("MergeAllelesTest input=%s expected=%s", inputs, expected);
+        }
+    }
+    @DataProvider(name = "mergeAlleles")
+    public Object[][] mergeAllelesData() {
+        // first, do no harm
+        new MergeAllelesTest(Arrays.asList(Aref),
+                Arrays.asList(Aref));
+
+        new MergeAllelesTest(Arrays.asList(Aref),
+                Arrays.asList(Aref),
+                Arrays.asList(Aref));
+
+        new MergeAllelesTest(Arrays.asList(Aref),
+                Arrays.asList(Aref, T),
+                Arrays.asList(Aref, T));
+
+        new MergeAllelesTest(Arrays.asList(Aref, C),
+                Arrays.asList(Aref, T),
+                Arrays.asList(Aref, C, T));
+
+        new MergeAllelesTest(Arrays.asList(Aref, T),
+                Arrays.asList(Aref, C),
+                Arrays.asList(Aref, T, C)); // in order of appearence
+
+        new MergeAllelesTest(Arrays.asList(Aref, C, T),
+                Arrays.asList(Aref, C),
+                Arrays.asList(Aref, C, T));
+
+        new MergeAllelesTest(Arrays.asList(Aref, C, T), Arrays.asList(Aref, C, T));
+
+        new MergeAllelesTest(Arrays.asList(Aref, T, C), Arrays.asList(Aref, T, C));
+
+        new MergeAllelesTest(Arrays.asList(Aref, T, C),
+                Arrays.asList(Aref, C),
+                Arrays.asList(Aref, T, C)); // in order of appearence
+
+        new MergeAllelesTest(Arrays.asList(Aref),
+                Arrays.asList(Aref, ATC),
+                Arrays.asList(Aref, ATC));
+
+        new MergeAllelesTest(Arrays.asList(Aref),
+                Arrays.asList(Aref, ATC, ATCATC),
+                Arrays.asList(Aref, ATC, ATCATC));
+
+        // alleles in the order we see them
+        new MergeAllelesTest(Arrays.asList(Aref, ATCATC),
+                Arrays.asList(Aref, ATC, ATCATC),
+                Arrays.asList(Aref, ATCATC, ATC));
+
+        // same
+        new MergeAllelesTest(Arrays.asList(Aref, ATC),
+                Arrays.asList(Aref, ATCATC),
+                Arrays.asList(Aref, ATC, ATCATC));
+
+        new MergeAllelesTest(Arrays.asList(ATref, ATC, Anoref, G),
+                Arrays.asList(Aref, ATCATC, G),
+                Arrays.asList(ATref, ATC, Anoref, G, ATCATCT, GT));
+
+        return MergeAllelesTest.getTests(MergeAllelesTest.class);
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "mergeAlleles")
+    public void testMergeAlleles(MergeAllelesTest cfg) {
+        final List<VariantContext> inputs = new ArrayList<VariantContext>();
+
+        int i = 0;
+        for ( final List<Allele> alleles : cfg.inputs ) {
+            final String name = "vcf" + ++i;
+            inputs.add(makeVC(name, alleles));
+        }
+
+        final List<String> priority = vcs2priority(inputs);
+
+        final VariantContext merged = GATKVariantContextUtils.simpleMerge(
+                inputs, priority,
+                GATKVariantContextUtils.FilteredRecordMergeType.KEEP_IF_ANY_UNFILTERED,
+                GATKVariantContextUtils.GenotypeMergeType.PRIORITIZE, false, false, "set", false, false);
+
+        Assert.assertEquals(merged.getAlleles().size(),cfg.expected.size());
+        Assert.assertEquals(merged.getAlleles(), cfg.expected);
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // Test rsID merging
+    //
+    // --------------------------------------------------------------------------------
+
+    private class SimpleMergeRSIDTest extends TestDataProvider {
+        List<String> inputs;
+        String expected;
+
+        private SimpleMergeRSIDTest(String... arg) {
+            super(SimpleMergeRSIDTest.class);
+            LinkedList<String> allStrings = new LinkedList<String>(Arrays.asList(arg));
+            expected = allStrings.pollLast();
+            inputs = allStrings;
+        }
+
+        public String toString() {
+            return String.format("SimpleMergeRSIDTest vc=%s expected=%s", inputs, expected);
+        }
+    }
+
+    @DataProvider(name = "simplemergersiddata")
+    public Object[][] createSimpleMergeRSIDData() {
+        new SimpleMergeRSIDTest(".", ".");
+        new SimpleMergeRSIDTest(".", ".", ".");
+        new SimpleMergeRSIDTest("rs1", "rs1");
+        new SimpleMergeRSIDTest("rs1", "rs1", "rs1");
+        new SimpleMergeRSIDTest(".", "rs1", "rs1");
+        new SimpleMergeRSIDTest("rs1", ".", "rs1");
+        new SimpleMergeRSIDTest("rs1", "rs2", "rs1,rs2");
+        new SimpleMergeRSIDTest("rs1", "rs2", "rs1", "rs1,rs2"); // duplicates
+        new SimpleMergeRSIDTest("rs2", "rs1", "rs2,rs1");
+        new SimpleMergeRSIDTest("rs2", "rs1", ".", "rs2,rs1");
+        new SimpleMergeRSIDTest("rs2", ".", "rs1", "rs2,rs1");
+        new SimpleMergeRSIDTest("rs1", ".", ".", "rs1");
+        new SimpleMergeRSIDTest("rs1", "rs2", "rs3", "rs1,rs2,rs3");
+
+        return SimpleMergeRSIDTest.getTests(SimpleMergeRSIDTest.class);
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "simplemergersiddata")
+    public void testRSIDMerge(SimpleMergeRSIDTest cfg) {
+        VariantContext snpVC1 = makeVC("snpvc1", Arrays.asList(Aref, T));
+        final List<VariantContext> inputs = new ArrayList<VariantContext>();
+
+        for ( final String id : cfg.inputs ) {
+            inputs.add(new VariantContextBuilder(snpVC1).id(id).make());
+        }
+
+        final VariantContext merged = GATKVariantContextUtils.simpleMerge(
+                inputs, null,
+                GATKVariantContextUtils.FilteredRecordMergeType.KEEP_IF_ANY_UNFILTERED,
+                GATKVariantContextUtils.GenotypeMergeType.UNSORTED, false, false, "set", false, false);
+        Assert.assertEquals(merged.getID(), cfg.expected);
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // Test filtered merging
+    //
+    // --------------------------------------------------------------------------------
+
+    private class MergeFilteredTest extends TestDataProvider {
+        List<VariantContext> inputs;
+        VariantContext expected;
+        String setExpected;
+        GATKVariantContextUtils.FilteredRecordMergeType type;
+
+
+        private MergeFilteredTest(String name, VariantContext input1, VariantContext input2, VariantContext expected, String setExpected) {
+            this(name, input1, input2, expected, GATKVariantContextUtils.FilteredRecordMergeType.KEEP_IF_ANY_UNFILTERED, setExpected);
+        }
+
+        private MergeFilteredTest(String name, VariantContext input1, VariantContext input2, VariantContext expected, GATKVariantContextUtils.FilteredRecordMergeType type, String setExpected) {
+            super(MergeFilteredTest.class, name);
+            LinkedList<VariantContext> all = new LinkedList<VariantContext>(Arrays.asList(input1, input2));
+            this.expected = expected;
+            this.type = type;
+            inputs = all;
+            this.setExpected = setExpected;
+        }
+
+        public String toString() {
+            return String.format("%s input=%s expected=%s", super.toString(), inputs, expected);
+        }
+    }
+
+    @DataProvider(name = "mergeFiltered")
+    public Object[][] mergeFilteredData() {
+        new MergeFilteredTest("AllPass",
+                makeVC("1", Arrays.asList(Aref, T), VariantContext.PASSES_FILTERS),
+                makeVC("2", Arrays.asList(Aref, T), VariantContext.PASSES_FILTERS),
+                makeVC("3", Arrays.asList(Aref, T), VariantContext.PASSES_FILTERS),
+                GATKVariantContextUtils.MERGE_INTERSECTION);
+
+        new MergeFilteredTest("noFilters",
+                makeVC("1", Arrays.asList(Aref, T), "."),
+                makeVC("2", Arrays.asList(Aref, T), "."),
+                makeVC("3", Arrays.asList(Aref, T), "."),
+                GATKVariantContextUtils.MERGE_INTERSECTION);
+
+        new MergeFilteredTest("oneFiltered",
+                makeVC("1", Arrays.asList(Aref, T), "."),
+                makeVC("2", Arrays.asList(Aref, T), "FAIL"),
+                makeVC("3", Arrays.asList(Aref, T), "."),
+                String.format("1-%s2", GATKVariantContextUtils.MERGE_FILTER_PREFIX));
+
+        new MergeFilteredTest("onePassOneFail",
+                makeVC("1", Arrays.asList(Aref, T), VariantContext.PASSES_FILTERS),
+                makeVC("2", Arrays.asList(Aref, T), "FAIL"),
+                makeVC("3", Arrays.asList(Aref, T), VariantContext.PASSES_FILTERS),
+                String.format("1-%s2", GATKVariantContextUtils.MERGE_FILTER_PREFIX));
+
+        new MergeFilteredTest("AllFiltered",
+                makeVC("1", Arrays.asList(Aref, T), "FAIL"),
+                makeVC("2", Arrays.asList(Aref, T), "FAIL"),
+                makeVC("3", Arrays.asList(Aref, T), "FAIL"),
+                GATKVariantContextUtils.MERGE_FILTER_IN_ALL);
+
+        // test ALL vs. ANY
+        new MergeFilteredTest("FailOneUnfiltered",
+                makeVC("1", Arrays.asList(Aref, T), "FAIL"),
+                makeVC("2", Arrays.asList(Aref, T), "."),
+                makeVC("3", Arrays.asList(Aref, T), "."),
+                GATKVariantContextUtils.FilteredRecordMergeType.KEEP_IF_ANY_UNFILTERED,
+                String.format("%s1-2", GATKVariantContextUtils.MERGE_FILTER_PREFIX));
+
+        new MergeFilteredTest("OneFailAllUnfilteredArg",
+                makeVC("1", Arrays.asList(Aref, T), "FAIL"),
+                makeVC("2", Arrays.asList(Aref, T), "."),
+                makeVC("3", Arrays.asList(Aref, T), "FAIL"),
+                GATKVariantContextUtils.FilteredRecordMergeType.KEEP_IF_ALL_UNFILTERED,
+                String.format("%s1-2", GATKVariantContextUtils.MERGE_FILTER_PREFIX));
+
+        // test excluding allele in filtered record
+        new MergeFilteredTest("DontIncludeAlleleOfFilteredRecords",
+                makeVC("1", Arrays.asList(Aref, T), "."),
+                makeVC("2", Arrays.asList(Aref, T), "FAIL"),
+                makeVC("3", Arrays.asList(Aref, T), "."),
+                String.format("1-%s2", GATKVariantContextUtils.MERGE_FILTER_PREFIX));
+
+        // promotion of site from unfiltered to PASSES
+        new MergeFilteredTest("UnfilteredPlusPassIsPass",
+                makeVC("1", Arrays.asList(Aref, T), "."),
+                makeVC("2", Arrays.asList(Aref, T), VariantContext.PASSES_FILTERS),
+                makeVC("3", Arrays.asList(Aref, T), VariantContext.PASSES_FILTERS),
+                GATKVariantContextUtils.MERGE_INTERSECTION);
+
+        new MergeFilteredTest("RefInAll",
+                makeVC("1", Arrays.asList(Aref), VariantContext.PASSES_FILTERS),
+                makeVC("2", Arrays.asList(Aref), VariantContext.PASSES_FILTERS),
+                makeVC("3", Arrays.asList(Aref), VariantContext.PASSES_FILTERS),
+                GATKVariantContextUtils.MERGE_REF_IN_ALL);
+
+        new MergeFilteredTest("RefInOne",
+                makeVC("1", Arrays.asList(Aref), VariantContext.PASSES_FILTERS),
+                makeVC("2", Arrays.asList(Aref, T), VariantContext.PASSES_FILTERS),
+                makeVC("3", Arrays.asList(Aref, T), VariantContext.PASSES_FILTERS),
+                "2");
+
+        return MergeFilteredTest.getTests(MergeFilteredTest.class);
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "mergeFiltered")
+    public void testMergeFiltered(MergeFilteredTest cfg) {
+        final List<String> priority = vcs2priority(cfg.inputs);
+        final VariantContext merged = GATKVariantContextUtils.simpleMerge(
+                cfg.inputs, priority, cfg.type, GATKVariantContextUtils.GenotypeMergeType.PRIORITIZE, true, false, "set", false, false);
+
+        // test alleles are equal
+        Assert.assertEquals(merged.getAlleles(), cfg.expected.getAlleles());
+
+        // test set field
+        Assert.assertEquals(merged.getAttribute("set"), cfg.setExpected);
+
+        // test filter field
+        Assert.assertEquals(merged.getFilters(), cfg.expected.getFilters());
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // Test genotype merging
+    //
+    // --------------------------------------------------------------------------------
+
+    private class MergeGenotypesTest extends TestDataProvider {
+        List<VariantContext> inputs;
+        VariantContext expected;
+        List<String> priority;
+
+        private MergeGenotypesTest(String name, String priority, VariantContext... arg) {
+            super(MergeGenotypesTest.class, name);
+            LinkedList<VariantContext> all = new LinkedList<VariantContext>(Arrays.asList(arg));
+            this.expected = all.pollLast();
+            inputs = all;
+            this.priority = Arrays.asList(priority.split(","));
+        }
+
+        public String toString() {
+            return String.format("%s input=%s expected=%s", super.toString(), inputs, expected);
+        }
+    }
+
+    @DataProvider(name = "mergeGenotypes")
+    public Object[][] mergeGenotypesData() {
+        new MergeGenotypesTest("TakeGenotypeByPriority-1,2", "1,2",
+                makeVC("1", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1)),
+                makeVC("2", Arrays.asList(Aref, T), makeG("s1", Aref, T, -2)),
+                makeVC("3", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1)));
+
+        new MergeGenotypesTest("TakeGenotypeByPriority-1,2-nocall", "1,2",
+                makeVC("1", Arrays.asList(Aref, T), makeG("s1", Allele.NO_CALL, Allele.NO_CALL, -1)),
+                makeVC("2", Arrays.asList(Aref, T), makeG("s1", Aref, T, -2)),
+                makeVC("3", Arrays.asList(Aref, T), makeG("s1", Allele.NO_CALL, Allele.NO_CALL, -1)));
+
+        new MergeGenotypesTest("TakeGenotypeByPriority-2,1", "2,1",
+                makeVC("1", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1)),
+                makeVC("2", Arrays.asList(Aref, T), makeG("s1", Aref, T, -2)),
+                makeVC("3", Arrays.asList(Aref, T), makeG("s1", Aref, T, -2)));
+
+        new MergeGenotypesTest("NonOverlappingGenotypes", "1,2",
+                makeVC("1", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1)),
+                makeVC("2", Arrays.asList(Aref, T), makeG("s2", Aref, T, -2)),
+                makeVC("3", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1), makeG("s2", Aref, T, -2)));
+
+        new MergeGenotypesTest("PreserveNoCall", "1,2",
+                makeVC("1", Arrays.asList(Aref, T), makeG("s1", Allele.NO_CALL, Allele.NO_CALL, -1)),
+                makeVC("2", Arrays.asList(Aref, T), makeG("s2", Aref, T, -2)),
+                makeVC("3", Arrays.asList(Aref, T), makeG("s1", Allele.NO_CALL, Allele.NO_CALL, -1), makeG("s2", Aref, T, -2)));
+
+        new MergeGenotypesTest("PerserveAlleles", "1,2",
+                makeVC("1", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1)),
+                makeVC("2", Arrays.asList(Aref, C), makeG("s2", Aref, C, -2)),
+                makeVC("3", Arrays.asList(Aref, T, C), makeG("s1", Aref, T, -1), makeG("s2", Aref, C, -2)));
+
+        new MergeGenotypesTest("TakeGenotypePartialOverlap-1,2", "1,2",
+                makeVC("1", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1)),
+                makeVC("2", Arrays.asList(Aref, T), makeG("s1", Aref, T, -2), makeG("s3", Aref, T, -3)),
+                makeVC("3", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1), makeG("s3", Aref, T, -3)));
+
+        new MergeGenotypesTest("TakeGenotypePartialOverlap-2,1", "2,1",
+                makeVC("1", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1)),
+                makeVC("2", Arrays.asList(Aref, T), makeG("s1", Aref, T, -2), makeG("s3", Aref, T, -3)),
+                makeVC("3", Arrays.asList(Aref, T), makeG("s1", Aref, T, -2), makeG("s3", Aref, T, -3)));
+
+        //
+        // merging genothpes with PLs
+        //
+
+        // first, do no harm
+        new MergeGenotypesTest("OrderedPLs", "1",
+                makeVC("1", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1, 1, 2, 3)),
+                makeVC("1", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1, 1, 2, 3)));
+
+        // first, do no harm
+        new MergeGenotypesTest("OrderedPLs-3Alleles", "1",
+                makeVC("1", Arrays.asList(Aref, C, T), makeG("s1", Aref, T, -1, 1, 2, 3, 4, 5, 6)),
+                makeVC("1", Arrays.asList(Aref, C, T), makeG("s1", Aref, T, -1, 1, 2, 3, 4, 5, 6)));
+
+        // first, do no harm
+        new MergeGenotypesTest("OrderedPLs-3Alleles-2", "1",
+                makeVC("1", Arrays.asList(Aref, T, C), makeG("s1", Aref, T, -1, 1, 2, 3, 4, 5, 6)),
+                makeVC("1", Arrays.asList(Aref, T, C), makeG("s1", Aref, T, -1, 1, 2, 3, 4, 5, 6)));
+
+        // first, do no harm
+        new MergeGenotypesTest("OrderedPLs-3Alleles-2", "1",
+                makeVC("1", Arrays.asList(Aref, T, C), makeG("s1", Aref, T, -1, 1, 2, 3, 4, 5, 6)),
+                makeVC("1", Arrays.asList(Aref, T, C), makeG("s2", Aref, C, -1, 1, 2, 3, 4, 5, 6)),
+                makeVC("1", Arrays.asList(Aref, T, C), makeG("s1", Aref, T, -1, 1, 2, 3, 4, 5, 6), makeG("s2", Aref, C, -1, 1, 2, 3, 4, 5, 6)));
+
+        new MergeGenotypesTest("TakeGenotypePartialOverlapWithPLs-2,1", "2,1",
+                makeVC("1", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1,5,0,3)),
+                makeVC("2", Arrays.asList(Aref, T), makeG("s1", Aref, T, -2,4,0,2), makeG("s3", Aref, T, -3,3,0,2)),
+                makeVC("3", Arrays.asList(Aref, T), makeG("s1", Aref, T, -2,4,0,2), makeG("s3", Aref, T, -3,3,0,2)));
+
+        new MergeGenotypesTest("TakeGenotypePartialOverlapWithPLs-1,2", "1,2",
+                makeVC("1", Arrays.asList(Aref,ATC), makeG("s1", Aref, ATC, -1,5,0,3)),
+                makeVC("2", Arrays.asList(Aref, T), makeG("s1", Aref, T, -2,4,0,2), makeG("s3", Aref, T, -3,3,0,2)),
+                // no likelihoods on result since type changes to mixed multiallelic
+                makeVC("3", Arrays.asList(Aref, ATC, T), makeG("s1", Aref, ATC, -1), makeG("s3", Aref, T, -3)));
+
+        new MergeGenotypesTest("MultipleSamplePLsDifferentOrder", "1,2",
+                makeVC("1", Arrays.asList(Aref, C, T), makeG("s1", Aref, C, -1, 1, 2, 3, 4, 5, 6)),
+                makeVC("2", Arrays.asList(Aref, T, C), makeG("s2", Aref, T, -2, 6, 5, 4, 3, 2, 1)),
+                // no likelihoods on result since type changes to mixed multiallelic
+                makeVC("3", Arrays.asList(Aref, C, T), makeG("s1", Aref, C, -1), makeG("s2", Aref, T, -2)));
+
+        return MergeGenotypesTest.getTests(MergeGenotypesTest.class);
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "mergeGenotypes")
+    public void testMergeGenotypes(MergeGenotypesTest cfg) {
+        final VariantContext merged = GATKVariantContextUtils.simpleMerge(
+                cfg.inputs, cfg.priority, GATKVariantContextUtils.FilteredRecordMergeType.KEEP_IF_ANY_UNFILTERED,
+                GATKVariantContextUtils.GenotypeMergeType.PRIORITIZE, true, false, "set", false, false);
+
+        // test alleles are equal
+        Assert.assertEquals(merged.getAlleles(), cfg.expected.getAlleles());
+
+        // test genotypes
+        assertGenotypesAreMostlyEqual(merged.getGenotypes(), cfg.expected.getGenotypes());
+    }
+
+    // necessary to not overload equals for genotypes
+    private void assertGenotypesAreMostlyEqual(GenotypesContext actual, GenotypesContext expected) {
+        if (actual == expected) {
+            return;
+        }
+
+        if (actual == null || expected == null) {
+            Assert.fail("Maps not equal: expected: " + expected + " and actual: " + actual);
+        }
+
+        if (actual.size() != expected.size()) {
+            Assert.fail("Maps do not have the same size:" + actual.size() + " != " + expected.size());
+        }
+
+        for (Genotype value : actual) {
+            Genotype expectedValue = expected.get(value.getSampleName());
+
+            Assert.assertEquals(value.getAlleles(), expectedValue.getAlleles(), "Alleles in Genotype aren't equal");
+            Assert.assertEquals(value.getGQ(), expectedValue.getGQ(), "GQ values aren't equal");
+            Assert.assertEquals(value.hasLikelihoods(), expectedValue.hasLikelihoods(), "Either both have likelihoods or both not");
+            if ( value.hasLikelihoods() )
+                Assert.assertEquals(value.getLikelihoods().getAsVector(), expectedValue.getLikelihoods().getAsVector(), "Genotype likelihoods aren't equal");
+        }
+    }
+
+    @Test(enabled = !DEBUG)
+    public void testMergeGenotypesUniquify() {
+        final VariantContext vc1 = makeVC("1", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1));
+        final VariantContext vc2 = makeVC("2", Arrays.asList(Aref, T), makeG("s1", Aref, T, -2));
+
+        final VariantContext merged = GATKVariantContextUtils.simpleMerge(
+                Arrays.asList(vc1, vc2), null, GATKVariantContextUtils.FilteredRecordMergeType.KEEP_IF_ANY_UNFILTERED,
+                GATKVariantContextUtils.GenotypeMergeType.UNIQUIFY, false, false, "set", false, false);
+
+        // test genotypes
+        Assert.assertEquals(merged.getSampleNames(), new HashSet<>(Arrays.asList("s1.1", "s1.2")));
+    }
+
+// TODO: remove after testing
+//    @Test(expectedExceptions = IllegalStateException.class)
+//    public void testMergeGenotypesRequireUnique() {
+//        final VariantContext vc1 = makeVC("1", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1));
+//        final VariantContext vc2 = makeVC("2", Arrays.asList(Aref, T), makeG("s1", Aref, T, -2));
+//
+//        final VariantContext merged = VariantContextUtils.simpleMerge(
+//                Arrays.asList(vc1, vc2), null, VariantContextUtils.FilteredRecordMergeType.KEEP_IF_ANY_UNFILTERED,
+//                VariantContextUtils.GenotypeMergeType.REQUIRE_UNIQUE, false, false, "set", false, false, false);
+//    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // Misc. tests
+    //
+    // --------------------------------------------------------------------------------
+
+    @Test(enabled = !DEBUG)
+    public void testAnnotationSet() {
+        for ( final boolean annotate : Arrays.asList(true, false)) {
+            for ( final String set : Arrays.asList("set", "combine", "x")) {
+                final List<String> priority = Arrays.asList("1", "2");
+                VariantContext vc1 = makeVC("1", Arrays.asList(Aref, T), VariantContext.PASSES_FILTERS);
+                VariantContext vc2 = makeVC("2", Arrays.asList(Aref, T), VariantContext.PASSES_FILTERS);
+
+                final VariantContext merged = GATKVariantContextUtils.simpleMerge(
+                        Arrays.asList(vc1, vc2), priority, GATKVariantContextUtils.FilteredRecordMergeType.KEEP_IF_ANY_UNFILTERED,
+                        GATKVariantContextUtils.GenotypeMergeType.PRIORITIZE, annotate, false, set, false, false);
+
+                if ( annotate )
+                    Assert.assertEquals(merged.getAttribute(set), GATKVariantContextUtils.MERGE_INTERSECTION);
+                else
+                    Assert.assertFalse(merged.hasAttribute(set));
+            }
+        }
+    }
+
+    private static final List<String> vcs2priority(final Collection<VariantContext> vcs) {
+        final List<String> priority = new ArrayList<>();
+
+        for ( final VariantContext vc : vcs ) {
+            priority.add(vc.getSource());
+        }
+
+        return priority;
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // basic allele clipping test
+    //
+    // --------------------------------------------------------------------------------
+
+    private class ReverseClippingPositionTestProvider extends TestDataProvider {
+        final String ref;
+        final List<Allele> alleles = new ArrayList<Allele>();
+        final int expectedClip;
+
+        private ReverseClippingPositionTestProvider(final int expectedClip, final String ref, final String... alleles) {
+            super(ReverseClippingPositionTestProvider.class);
+            this.ref = ref;
+            for ( final String allele : alleles )
+                this.alleles.add(Allele.create(allele));
+            this.expectedClip = expectedClip;
+        }
+
+        @Override
+        public String toString() {
+            return String.format("ref=%s allele=%s reverse clip %d", ref, alleles, expectedClip);
+        }
+    }
+
+    @DataProvider(name = "ReverseClippingPositionTestProvider")
+    public Object[][] makeReverseClippingPositionTestProvider() {
+        // pair clipping
+        new ReverseClippingPositionTestProvider(0, "ATT", "CCG");
+        new ReverseClippingPositionTestProvider(1, "ATT", "CCT");
+        new ReverseClippingPositionTestProvider(2, "ATT", "CTT");
+        new ReverseClippingPositionTestProvider(2, "ATT", "ATT");  // cannot completely clip allele
+
+        // triplets
+        new ReverseClippingPositionTestProvider(0, "ATT", "CTT", "CGG");
+        new ReverseClippingPositionTestProvider(1, "ATT", "CTT", "CGT"); // the T can go
+        new ReverseClippingPositionTestProvider(2, "ATT", "CTT", "CTT"); // both Ts can go
+
+        return ReverseClippingPositionTestProvider.getTests(ReverseClippingPositionTestProvider.class);
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "ReverseClippingPositionTestProvider")
+    public void testReverseClippingPositionTestProvider(ReverseClippingPositionTestProvider cfg) {
+        int result = GATKVariantContextUtils.computeReverseClipping(cfg.alleles, cfg.ref.getBytes());
+        Assert.assertEquals(result, cfg.expectedClip);
+    }
+
+
+    // --------------------------------------------------------------------------------
+    //
+    // test splitting into bi-allelics
+    //
+    // --------------------------------------------------------------------------------
+
+    @DataProvider(name = "SplitBiallelics")
+    public Object[][] makeSplitBiallelics() throws CloneNotSupportedException {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        final VariantContextBuilder root = new VariantContextBuilder("x", "20", 10, 10, Arrays.asList(Aref, C));
+
+        // biallelic -> biallelic
+        tests.add(new Object[]{root.make(), Arrays.asList(root.make())});
+
+        // monos -> monos
+        root.alleles(Arrays.asList(Aref));
+        tests.add(new Object[]{root.make(), Arrays.asList(root.make())});
+
+        root.alleles(Arrays.asList(Aref, C, T));
+        tests.add(new Object[]{root.make(),
+                Arrays.asList(
+                        root.alleles(Arrays.asList(Aref, C)).make(),
+                        root.alleles(Arrays.asList(Aref, T)).make())});
+
+        root.alleles(Arrays.asList(Aref, C, T, G));
+        tests.add(new Object[]{root.make(),
+                Arrays.asList(
+                        root.alleles(Arrays.asList(Aref, C)).make(),
+                        root.alleles(Arrays.asList(Aref, T)).make(),
+                        root.alleles(Arrays.asList(Aref, G)).make())});
+
+        final Allele C      = Allele.create("C");
+        final Allele CA      = Allele.create("CA");
+        final Allele CAA     = Allele.create("CAA");
+        final Allele CAAAA   = Allele.create("CAAAA");
+        final Allele CAAAAA  = Allele.create("CAAAAA");
+        final Allele Cref      = Allele.create("C", true);
+        final Allele CAref     = Allele.create("CA", true);
+        final Allele CAAref    = Allele.create("CAA", true);
+        final Allele CAAAref   = Allele.create("CAAA", true);
+
+        root.alleles(Arrays.asList(Cref, CA, CAA));
+        tests.add(new Object[]{root.make(),
+                Arrays.asList(
+                        root.alleles(Arrays.asList(Cref, CA)).make(),
+                        root.alleles(Arrays.asList(Cref, CAA)).make())});
+
+        root.alleles(Arrays.asList(CAAref, C, CA)).stop(12);
+        tests.add(new Object[]{root.make(),
+                Arrays.asList(
+                        root.alleles(Arrays.asList(CAAref, C)).make(),
+                        root.alleles(Arrays.asList(CAref, C)).stop(11).make())});
+
+        root.alleles(Arrays.asList(CAAAref, C, CA, CAA)).stop(13);
+        tests.add(new Object[]{root.make(),
+                Arrays.asList(
+                        root.alleles(Arrays.asList(CAAAref, C)).make(),
+                        root.alleles(Arrays.asList(CAAref, C)).stop(12).make(),
+                        root.alleles(Arrays.asList(CAref, C)).stop(11).make())});
+
+        root.alleles(Arrays.asList(CAAAref, CAAAAA, CAAAA, CAA, C)).stop(13);
+        tests.add(new Object[]{root.make(),
+                Arrays.asList(
+                        root.alleles(Arrays.asList(Cref, CAA)).stop(10).make(),
+                        root.alleles(Arrays.asList(Cref, CA)).stop(10).make(),
+                        root.alleles(Arrays.asList(CAref, C)).stop(11).make(),
+                        root.alleles(Arrays.asList(CAAAref, C)).stop(13).make())});
+
+        final Allele threeCopies = Allele.create("GTTTTATTTTATTTTA", true);
+        final Allele twoCopies = Allele.create("GTTTTATTTTA", true);
+        final Allele zeroCopies = Allele.create("G", false);
+        final Allele oneCopies = Allele.create("GTTTTA", false);
+        tests.add(new Object[]{root.alleles(Arrays.asList(threeCopies, zeroCopies, oneCopies)).stop(25).make(),
+                Arrays.asList(
+                        root.alleles(Arrays.asList(threeCopies, zeroCopies)).stop(25).make(),
+                        root.alleles(Arrays.asList(twoCopies, zeroCopies)).stop(20).make())});
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "SplitBiallelics")
+    public void testSplitBiallelicsNoGenotypes(final VariantContext vc, final List<VariantContext> expectedBiallelics) {
+        final List<VariantContext> biallelics = GATKVariantContextUtils.splitVariantContextToBiallelics(vc);
+        Assert.assertEquals(biallelics.size(), expectedBiallelics.size());
+        for ( int i = 0; i < biallelics.size(); i++ ) {
+            final VariantContext actual = biallelics.get(i);
+            final VariantContext expected = expectedBiallelics.get(i);
+            assertVariantContextsAreEqual(actual, expected);
+        }
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "SplitBiallelics", dependsOnMethods = "testSplitBiallelicsNoGenotypes")
+    public void testSplitBiallelicsGenotypes(final VariantContext vc, final List<VariantContext> expectedBiallelics) {
+        final List<Genotype> genotypes = new ArrayList<Genotype>();
+
+        int sampleI = 0;
+        for ( final List<Allele> alleles : Utils.makePermutations(vc.getAlleles(), 2, true) ) {
+            genotypes.add(GenotypeBuilder.create("sample" + sampleI++, alleles));
+        }
+        genotypes.add(GenotypeBuilder.createMissing("missing", 2));
+
+        final VariantContext vcWithGenotypes = new VariantContextBuilder(vc).genotypes(genotypes).make();
+
+        final List<VariantContext> biallelics = GATKVariantContextUtils.splitVariantContextToBiallelics(vcWithGenotypes);
+        for ( int i = 0; i < biallelics.size(); i++ ) {
+            final VariantContext actual = biallelics.get(i);
+            Assert.assertEquals(actual.getNSamples(), vcWithGenotypes.getNSamples()); // not dropping any samples
+
+            for ( final Genotype inputGenotype : genotypes ) {
+                final Genotype actualGenotype = actual.getGenotype(inputGenotype.getSampleName());
+                Assert.assertNotNull(actualGenotype);
+                if ( ! vc.isVariant() || vc.isBiallelic() )
+                    Assert.assertEquals(actualGenotype, vcWithGenotypes.getGenotype(inputGenotype.getSampleName()));
+                else
+                    Assert.assertTrue(actualGenotype.isNoCall());
+            }
+        }
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // Test repeats
+    //
+    // --------------------------------------------------------------------------------
+
+    private class RepeatDetectorTest extends TestDataProvider {
+        String ref;
+        boolean isTrueRepeat;
+        VariantContext vc;
+
+        private RepeatDetectorTest(boolean isTrueRepeat, String ref, String refAlleleString, String ... altAlleleStrings) {
+            super(RepeatDetectorTest.class);
+            this.isTrueRepeat = isTrueRepeat;
+            this.ref = ref;
+
+            List<Allele> alleles = new LinkedList<Allele>();
+            final Allele refAllele = Allele.create(refAlleleString, true);
+            alleles.add(refAllele);
+            for ( final String altString: altAlleleStrings) {
+                final Allele alt = Allele.create(altString, false);
+                alleles.add(alt);
+            }
+
+            VariantContextBuilder builder = new VariantContextBuilder("test", "chr1", 1, refAllele.length(), alleles);
+            this.vc = builder.make();
+        }
+
+        public String toString() {
+            return String.format("%s refBases=%s trueRepeat=%b vc=%s", super.toString(), ref, isTrueRepeat, vc);
+        }
+    }
+
+    @DataProvider(name = "RepeatDetectorTest")
+    public Object[][] makeRepeatDetectorTest() {
+        new RepeatDetectorTest(true,  "NAAC", "N", "NA");
+        new RepeatDetectorTest(true,  "NAAC", "NA", "N");
+        new RepeatDetectorTest(false, "NAAC", "NAA", "N");
+        new RepeatDetectorTest(false, "NAAC", "N", "NC");
+        new RepeatDetectorTest(false, "AAC", "A", "C");
+
+        // running out of ref bases => false
+        new RepeatDetectorTest(false, "NAAC", "N", "NCAGTA");
+
+        // complex repeats
+        new RepeatDetectorTest(true,  "NATATATC", "N", "NAT");
+        new RepeatDetectorTest(true,  "NATATATC", "N", "NATA");
+        new RepeatDetectorTest(true,  "NATATATC", "N", "NATAT");
+        new RepeatDetectorTest(true,  "NATATATC", "NAT", "N");
+        new RepeatDetectorTest(false, "NATATATC", "NATA", "N");
+        new RepeatDetectorTest(false, "NATATATC", "NATAT", "N");
+
+        // multi-allelic
+        new RepeatDetectorTest(true,  "NATATATC", "N", "NAT", "NATAT");
+        new RepeatDetectorTest(true,  "NATATATC", "N", "NAT", "NATA");
+        new RepeatDetectorTest(true,  "NATATATC", "NAT", "N", "NATAT");
+        new RepeatDetectorTest(true,  "NATATATC", "NAT", "N", "NATA"); // two As
+        new RepeatDetectorTest(false, "NATATATC", "NAT", "N", "NATC"); // false
+        new RepeatDetectorTest(false, "NATATATC", "NAT", "N", "NCC"); // false
+        new RepeatDetectorTest(false, "NATATATC", "NAT", "NATAT", "NCC"); // false
+
+        return RepeatDetectorTest.getTests(RepeatDetectorTest.class);
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "RepeatDetectorTest")
+    public void testRepeatDetectorTest(RepeatDetectorTest cfg) {
+
+        // test alleles are equal
+        Assert.assertEquals(GATKVariantContextUtils.isTandemRepeat(cfg.vc, cfg.ref.getBytes()), cfg.isTrueRepeat);
+    }
+
+    @Test(enabled = !DEBUG)
+    public void testRepeatAllele() {
+        Allele nullR = Allele.create("A", true);
+        Allele nullA = Allele.create("A", false);
+        Allele atc   = Allele.create("AATC", false);
+        Allele atcatc   = Allele.create("AATCATC", false);
+        Allele ccccR = Allele.create("ACCCC", true);
+        Allele cc   = Allele.create("ACC", false);
+        Allele cccccc   = Allele.create("ACCCCCC", false);
+        Allele gagaR   = Allele.create("AGAGA", true);
+        Allele gagagaga   = Allele.create("AGAGAGAGA", false);
+
+        // - / ATC [ref] from 20-22
+        String delLoc = "chr1";
+        int delLocStart = 20;
+        int delLocStop = 22;
+
+        // - [ref] / ATC from 20-20
+        String insLoc = "chr1";
+        int insLocStart = 20;
+        int insLocStop = 20;
+
+        Pair<List<Integer>,byte[]> result;
+        byte[] refBytes = "TATCATCATCGGA".getBytes();
+
+        Assert.assertEquals(GATKVariantContextUtils.findNumberOfRepetitions("ATG".getBytes(), "ATGATGATGATG".getBytes(), true),4);
+        Assert.assertEquals(GATKVariantContextUtils.findNumberOfRepetitions("G".getBytes(), "ATGATGATGATG".getBytes(), true),0);
+        Assert.assertEquals(GATKVariantContextUtils.findNumberOfRepetitions("T".getBytes(), "T".getBytes(), true),1);
+        Assert.assertEquals(GATKVariantContextUtils.findNumberOfRepetitions("AT".getBytes(), "ATGATGATCATG".getBytes(), true),1);
+        Assert.assertEquals(GATKVariantContextUtils.findNumberOfRepetitions("CCC".getBytes(), "CCCCCCCC".getBytes(), true),2);
+
+        Assert.assertEquals(GATKVariantContextUtils.findRepeatedSubstring("ATG".getBytes()),3);
+        Assert.assertEquals(GATKVariantContextUtils.findRepeatedSubstring("AAA".getBytes()),1);
+        Assert.assertEquals(GATKVariantContextUtils.findRepeatedSubstring("CACACAC".getBytes()),7);
+        Assert.assertEquals(GATKVariantContextUtils.findRepeatedSubstring("CACACA".getBytes()),2);
+        Assert.assertEquals(GATKVariantContextUtils.findRepeatedSubstring("CATGCATG".getBytes()),4);
+        Assert.assertEquals(GATKVariantContextUtils.findRepeatedSubstring("AATAATA".getBytes()),7);
+
+
+        // A*,ATC, context = ATC ATC ATC : (ATC)3 -> (ATC)4
+        VariantContext vc = new VariantContextBuilder("foo", insLoc, insLocStart, insLocStop, Arrays.asList(nullR,atc)).make();
+        result = GATKVariantContextUtils.getNumTandemRepeatUnits(vc, refBytes);
+        Assert.assertEquals(result.getFirst().toArray()[0],3);
+        Assert.assertEquals(result.getFirst().toArray()[1],4);
+        Assert.assertEquals(result.getSecond().length,3);
+
+        // ATC*,A,ATCATC
+        vc = new VariantContextBuilder("foo", insLoc, insLocStart, insLocStart+3, Arrays.asList(Allele.create("AATC", true),nullA,atcatc)).make();
+        result = GATKVariantContextUtils.getNumTandemRepeatUnits(vc, refBytes);
+        Assert.assertEquals(result.getFirst().toArray()[0],3);
+        Assert.assertEquals(result.getFirst().toArray()[1],2);
+        Assert.assertEquals(result.getFirst().toArray()[2],4);
+        Assert.assertEquals(result.getSecond().length,3);
+
+        // simple non-tandem deletion: CCCC*, -
+        refBytes = "TCCCCCCCCATG".getBytes();
+        vc = new VariantContextBuilder("foo", delLoc, 10, 14, Arrays.asList(ccccR,nullA)).make();
+        result = GATKVariantContextUtils.getNumTandemRepeatUnits(vc, refBytes);
+        Assert.assertEquals(result.getFirst().toArray()[0],8);
+        Assert.assertEquals(result.getFirst().toArray()[1],4);
+        Assert.assertEquals(result.getSecond().length,1);
+
+        // CCCC*,CC,-,CCCCCC, context = CCC: (C)7 -> (C)5,(C)3,(C)9
+        refBytes = "TCCCCCCCAGAGAGAG".getBytes();
+        vc = new VariantContextBuilder("foo", insLoc, insLocStart, insLocStart+4, Arrays.asList(ccccR,cc, nullA,cccccc)).make();
+        result = GATKVariantContextUtils.getNumTandemRepeatUnits(vc, refBytes);
+        Assert.assertEquals(result.getFirst().toArray()[0],7);
+        Assert.assertEquals(result.getFirst().toArray()[1],5);
+        Assert.assertEquals(result.getFirst().toArray()[2],3);
+        Assert.assertEquals(result.getFirst().toArray()[3],9);
+        Assert.assertEquals(result.getSecond().length,1);
+
+        // GAGA*,-,GAGAGAGA
+        refBytes = "TGAGAGAGAGATTT".getBytes();
+        vc = new VariantContextBuilder("foo", insLoc, insLocStart, insLocStart+4, Arrays.asList(gagaR, nullA,gagagaga)).make();
+        result = GATKVariantContextUtils.getNumTandemRepeatUnits(vc, refBytes);
+        Assert.assertEquals(result.getFirst().toArray()[0],5);
+        Assert.assertEquals(result.getFirst().toArray()[1],3);
+        Assert.assertEquals(result.getFirst().toArray()[2],7);
+        Assert.assertEquals(result.getSecond().length,2);
+
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // test forward clipping
+    //
+    // --------------------------------------------------------------------------------
+
+    @DataProvider(name = "ForwardClippingData")
+    public Object[][] makeForwardClippingData() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        // this functionality can be adapted to provide input data for whatever you might want in your data
+        tests.add(new Object[]{Arrays.asList("A"), -1});
+        tests.add(new Object[]{Arrays.asList("<DEL>"), -1});
+        tests.add(new Object[]{Arrays.asList("A", "C"), -1});
+        tests.add(new Object[]{Arrays.asList("AC", "C"), -1});
+        tests.add(new Object[]{Arrays.asList("A", "G"), -1});
+        tests.add(new Object[]{Arrays.asList("A", "T"), -1});
+        tests.add(new Object[]{Arrays.asList("GT", "CA"), -1});
+        tests.add(new Object[]{Arrays.asList("GT", "CT"), -1});
+        tests.add(new Object[]{Arrays.asList("ACC", "AC"), 0});
+        tests.add(new Object[]{Arrays.asList("ACGC", "ACG"), 1});
+        tests.add(new Object[]{Arrays.asList("ACGC", "ACG"), 1});
+        tests.add(new Object[]{Arrays.asList("ACGC", "ACGA"), 2});
+        tests.add(new Object[]{Arrays.asList("ACGC", "AGC"), 0});
+        tests.add(new Object[]{Arrays.asList("A", "<DEL>"), -1});
+        for ( int len = 0; len < 50; len++ )
+            tests.add(new Object[]{Arrays.asList("A" + new String(Utils.dupBytes((byte)'C', len)), "C"), -1});
+
+        tests.add(new Object[]{Arrays.asList("A", "T", "C"), -1});
+        tests.add(new Object[]{Arrays.asList("AT", "AC", "AG"), 0});
+        tests.add(new Object[]{Arrays.asList("AT", "AC", "A"), -1});
+        tests.add(new Object[]{Arrays.asList("AT", "AC", "ACG"), 0});
+        tests.add(new Object[]{Arrays.asList("AC", "AC", "ACG"), 0});
+        tests.add(new Object[]{Arrays.asList("AC", "ACT", "ACG"), 0});
+        tests.add(new Object[]{Arrays.asList("ACG", "ACGT", "ACGTA"), 1});
+        tests.add(new Object[]{Arrays.asList("ACG", "ACGT", "ACGCA"), 1});
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "ForwardClippingData")
+    public void testForwardClipping(final List<String> alleleStrings, final int expectedClip) {
+        final List<Allele> alleles = new LinkedList<Allele>();
+        for ( final String alleleString : alleleStrings )
+            alleles.add(Allele.create(alleleString));
+
+        for ( final List<Allele> myAlleles : Utils.makePermutations(alleles, alleles.size(), false)) {
+            final int actual = GATKVariantContextUtils.computeForwardClipping(myAlleles);
+            Assert.assertEquals(actual, expectedClip);
+        }
+    }
+
+    @DataProvider(name = "ClipAlleleTest")
+    public Object[][] makeClipAlleleTest() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        // this functionality can be adapted to provide input data for whatever you might want in your data
+        tests.add(new Object[]{Arrays.asList("ACC", "AC"), Arrays.asList("AC", "A"), 0});
+        tests.add(new Object[]{Arrays.asList("ACGC", "ACG"), Arrays.asList("GC", "G"), 2});
+        tests.add(new Object[]{Arrays.asList("ACGC", "ACGA"), Arrays.asList("C", "A"), 3});
+        tests.add(new Object[]{Arrays.asList("ACGC", "AGC"), Arrays.asList("AC", "A"), 0});
+        tests.add(new Object[]{Arrays.asList("AT", "AC", "AG"), Arrays.asList("T", "C", "G"), 1});
+        tests.add(new Object[]{Arrays.asList("AT", "AC", "ACG"), Arrays.asList("T", "C", "CG"), 1});
+        tests.add(new Object[]{Arrays.asList("AC", "ACT", "ACG"), Arrays.asList("C", "CT", "CG"), 1});
+        tests.add(new Object[]{Arrays.asList("ACG", "ACGT", "ACGTA"), Arrays.asList("G", "GT", "GTA"), 2});
+        tests.add(new Object[]{Arrays.asList("ACG", "ACGT", "ACGCA"), Arrays.asList("G", "GT", "GCA"), 2});
+
+        // trims from left and right
+        tests.add(new Object[]{Arrays.asList("ACGTT", "ACCTT"), Arrays.asList("G", "C"), 2});
+        tests.add(new Object[]{Arrays.asList("ACGTT", "ACCCTT"), Arrays.asList("G", "CC"), 2});
+        tests.add(new Object[]{Arrays.asList("ACGTT", "ACGCTT"), Arrays.asList("G", "GC"), 2});
+        tests.add(new Object[]{Arrays.asList("ATCGAGCCGTG", "AAGCCGTG"), Arrays.asList("ATCG", "A"), 0});
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "ClipAlleleTest")
+    public void testClipAlleles(final List<String> alleleStrings, final List<String> expected, final int numLeftClipped) {
+        final int start = 10;
+        final VariantContext unclipped = GATKVariantContextUtils.makeFromAlleles("test", "20", start, alleleStrings);
+        final VariantContext clipped = GATKVariantContextUtils.trimAlleles(unclipped, true, true);
+
+        Assert.assertEquals(clipped.getStart(), unclipped.getStart() + numLeftClipped);
+        for ( int i = 0; i < unclipped.getAlleles().size(); i++ ) {
+            final Allele trimmed = clipped.getAlleles().get(i);
+            Assert.assertEquals(trimmed.getBaseString(), expected.get(i));
+        }
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // test primitive allele splitting
+    //
+    // --------------------------------------------------------------------------------
+
+    @DataProvider(name = "PrimitiveAlleleSplittingData")
+    public Object[][] makePrimitiveAlleleSplittingData() {
+        List<Object[]> tests = new ArrayList<>();
+
+        // no split
+        tests.add(new Object[]{"A", "C", 0, null});
+        tests.add(new Object[]{"A", "AC", 0, null});
+        tests.add(new Object[]{"AC", "A", 0, null});
+
+        // one split
+        tests.add(new Object[]{"ACA", "GCA", 1, Arrays.asList(0)});
+        tests.add(new Object[]{"ACA", "AGA", 1, Arrays.asList(1)});
+        tests.add(new Object[]{"ACA", "ACG", 1, Arrays.asList(2)});
+
+        // two splits
+        tests.add(new Object[]{"ACA", "GGA", 2, Arrays.asList(0, 1)});
+        tests.add(new Object[]{"ACA", "GCG", 2, Arrays.asList(0, 2)});
+        tests.add(new Object[]{"ACA", "AGG", 2, Arrays.asList(1, 2)});
+
+        // three splits
+        tests.add(new Object[]{"ACA", "GGG", 3, Arrays.asList(0, 1, 2)});
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "PrimitiveAlleleSplittingData")
+    public void testPrimitiveAlleleSplitting(final String ref, final String alt, final int expectedSplit, final List<Integer> variantPositions) {
+
+        final int start = 10;
+        final VariantContext vc = GATKVariantContextUtils.makeFromAlleles("test", "20", start, Arrays.asList(ref, alt));
+
+        final List<VariantContext> result = GATKVariantContextUtils.splitIntoPrimitiveAlleles(vc);
+
+        if ( expectedSplit > 0 ) {
+            Assert.assertEquals(result.size(), expectedSplit);
+            for ( int i = 0; i < variantPositions.size(); i++ ) {
+                Assert.assertEquals(result.get(i).getStart(), start + variantPositions.get(i));
+            }
+        } else {
+            Assert.assertEquals(result.size(), 1);
+            Assert.assertEquals(vc, result.get(0));
+        }
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // test allele remapping
+    //
+    // --------------------------------------------------------------------------------
+
+    @DataProvider(name = "AlleleRemappingData")
+    public Object[][] makeAlleleRemappingData() {
+        List<Object[]> tests = new ArrayList<>();
+
+        final Allele originalBase1 = Allele.create((byte)'A');
+        final Allele originalBase2 = Allele.create((byte)'T');
+
+        for ( final byte base1 : BaseUtils.BASES ) {
+            for ( final byte base2 : BaseUtils.BASES ) {
+                for ( final int numGenotypes : Arrays.asList(0, 1, 2, 5) ) {
+                    Map<Allele, Allele> map = new HashMap<>(2);
+                    map.put(originalBase1, Allele.create(base1));
+                    map.put(originalBase2, Allele.create(base2));
+
+                    tests.add(new Object[]{map, numGenotypes});
+                }
+            }
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "AlleleRemappingData")
+    public void testAlleleRemapping(final Map<Allele, Allele> alleleMap, final int numGenotypes) {
+
+        final GATKVariantContextUtils.AlleleMapper alleleMapper = new GATKVariantContextUtils.AlleleMapper(alleleMap);
+
+        final GenotypesContext originalGC = createGenotypesContext(numGenotypes, new ArrayList(alleleMap.keySet()));
+
+        final GenotypesContext remappedGC = GATKVariantContextUtils.updateGenotypesWithMappedAlleles(originalGC, alleleMapper);
+
+        for ( int i = 0; i < numGenotypes; i++ ) {
+            final Genotype originalG = originalGC.get(String.format("%d", i));
+            final Genotype remappedG = remappedGC.get(String.format("%d", i));
+
+            Assert.assertEquals(originalG.getAlleles().size(), remappedG.getAlleles().size());
+            for ( int j = 0; j < originalG.getAlleles().size(); j++ )
+                Assert.assertEquals(remappedG.getAllele(j), alleleMap.get(originalG.getAllele(j)));
+        }
+    }
+
+    private static GenotypesContext createGenotypesContext(final int numGenotypes, final List<Allele> alleles) {
+        Utils.resetRandomGenerator();
+        final Random random = Utils.getRandomGenerator();
+
+        final GenotypesContext gc = GenotypesContext.create();
+        for ( int i = 0; i < numGenotypes; i++ ) {
+            // choose alleles at random
+            final List<Allele> myAlleles = new ArrayList<Allele>();
+            myAlleles.add(alleles.get(random.nextInt(2)));
+            myAlleles.add(alleles.get(random.nextInt(2)));
+
+            final Genotype g = new GenotypeBuilder(String.format("%d", i)).alleles(myAlleles).make();
+            gc.add(g);
+        }
+
+        return gc;
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // Test subsetDiploidAlleles
+    //
+    // --------------------------------------------------------------------------------
+
+    @DataProvider(name = "subsetDiploidAllelesData")
+    public Object[][] makesubsetDiploidAllelesData() {
+        List<Object[]> tests = new ArrayList<>();
+
+        final Allele A = Allele.create("A", true);
+        final Allele C = Allele.create("C");
+        final Allele G = Allele.create("G");
+
+        final List<Allele> AA = Arrays.asList(A,A);
+        final List<Allele> AC = Arrays.asList(A,C);
+        final List<Allele> CC = Arrays.asList(C,C);
+        final List<Allele> AG = Arrays.asList(A,G);
+        final List<Allele> CG = Arrays.asList(C,G);
+        final List<Allele> GG = Arrays.asList(G,G);
+        final List<Allele> ACG = Arrays.asList(A,C,G);
+
+        final VariantContext vcBase = new VariantContextBuilder("test", "20", 10, 10, AC).make();
+
+        final double[] homRefPL = MathUtils.normalizeFromRealSpace(new double[]{0.9, 0.09, 0.01});
+        final double[] hetPL = MathUtils.normalizeFromRealSpace(new double[]{0.09, 0.9, 0.01});
+        final double[] homVarPL = MathUtils.normalizeFromRealSpace(new double[]{0.01, 0.09, 0.9});
+        final double[] uninformative = new double[]{0, 0, 0};
+
+        final Genotype base = new GenotypeBuilder("NA12878").DP(10).GQ(50).make();
+
+        // make sure we don't screw up the simple case
+        final Genotype aaGT = new GenotypeBuilder(base).alleles(AA).AD(new int[]{10,2}).PL(homRefPL).attribute(GATKVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY, new int[]{5, 10, 15, 20}).GQ(8).make();
+        final Genotype acGT = new GenotypeBuilder(base).alleles(AC).AD(new int[]{10, 2}).PL(hetPL).attribute(GATKVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY, new int[]{5, 10, 15, 20}).GQ(8).make();
+        final Genotype ccGT = new GenotypeBuilder(base).alleles(CC).AD(new int[]{10, 2}).PL(homVarPL).attribute(GATKVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY, new int[]{5, 10, 15, 20}).GQ(8).make();
+
+        tests.add(new Object[]{new VariantContextBuilder(vcBase).genotypes(aaGT).make(), AC, Arrays.asList(new GenotypeBuilder(aaGT).make())});
+        tests.add(new Object[]{new VariantContextBuilder(vcBase).genotypes(acGT).make(), AC, Arrays.asList(new GenotypeBuilder(acGT).make())});
+        tests.add(new Object[]{new VariantContextBuilder(vcBase).genotypes(ccGT).make(), AC, Arrays.asList(new GenotypeBuilder(ccGT).make())});
+
+        // uninformative test case
+        final Genotype uninformativeGT = new GenotypeBuilder(base).alleles(CC).PL(uninformative).GQ(0).make();
+        final Genotype emptyGT = new GenotypeBuilder(base).alleles(GATKVariantContextUtils.NO_CALL_ALLELES).noPL().noGQ().make();
+        tests.add(new Object[]{new VariantContextBuilder(vcBase).genotypes(uninformativeGT).make(), AC, Arrays.asList(emptyGT)});
+
+        // actually subsetting down from multiple alt values
+        final double[] homRef3AllelesPL = new double[]{0, -10, -20, -30, -40, -50};
+        final double[] hetRefC3AllelesPL = new double[]{-10, 0, -20, -30, -40, -50};
+        final double[] homC3AllelesPL = new double[]{-20, -10, 0, -30, -40, -50};
+        final double[] hetRefG3AllelesPL = new double[]{-20, -10, -30, 0, -40, -50};
+        final double[] hetCG3AllelesPL = new double[]{-20, -10, -30, -40, 0, -50}; // AA, AC, CC, AG, CG, GG
+        final double[] homG3AllelesPL = new double[]{-20, -10, -30, -40, -50, 0};  // AA, AC, CC, AG, CG, GG
+        tests.add(new Object[]{
+                new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).PL(homRef3AllelesPL).make()).make(),
+                AC,
+                Arrays.asList(new GenotypeBuilder(base).alleles(AA).PL(new double[]{0, -10, -20}).GQ(100).make())});
+
+        tests.add(new Object[]{
+                new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).PL(hetRefC3AllelesPL).make()).make(),
+                AC,
+                Arrays.asList(new GenotypeBuilder(base).alleles(AC).PL(new double[]{-10, 0, -20}).GQ(100).make())});
+
+        tests.add(new Object[]{
+                new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).PL(homC3AllelesPL).make()).make(),
+                AC,
+                Arrays.asList(new GenotypeBuilder(base).alleles(CC).PL(new double[]{-20, -10, 0}).GQ(100).make())});
+        tests.add(new Object[]{
+                new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).PL(hetRefG3AllelesPL).make()).make(),
+                AG,
+                Arrays.asList(new GenotypeBuilder(base).alleles(AG).PL(new double[]{-20, 0, -50}).GQ(200).make())});
+
+        // wow, scary -- bad output but discussed with Eric and we think this is the only thing that can be done
+        tests.add(new Object[]{
+                new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).PL(hetCG3AllelesPL).make()).make(),
+                AG,
+                Arrays.asList(new GenotypeBuilder(base).alleles(AA).PL(new double[]{0, -20, -30}).GQ(200).make())});
+
+        tests.add(new Object[]{
+                new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).PL(homG3AllelesPL).make()).make(),
+                AG,
+                Arrays.asList(new GenotypeBuilder(base).alleles(GG).PL(new double[]{-20, -40, 0}).GQ(200).make())});
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "subsetDiploidAllelesData")
+    public void testsubsetDiploidAllelesData(final VariantContext inputVC,
+                                             final List<Allele> allelesToUse,
+                                             final List<Genotype> expectedGenotypes) {
+        final GenotypesContext actual = GATKVariantContextUtils.subsetDiploidAlleles(inputVC, allelesToUse, GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN);
+
+        Assert.assertEquals(actual.size(), expectedGenotypes.size());
+        for ( final Genotype expected : expectedGenotypes ) {
+            final Genotype actualGT = actual.get(expected.getSampleName());
+            Assert.assertNotNull(actualGT);
+            assertGenotypesAreEqual(actualGT, expected);
+        }
+    }
+
+    @DataProvider(name = "UpdateGenotypeAfterSubsettingData")
+    public Object[][] makeUpdateGenotypeAfterSubsettingData() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        final Allele A = Allele.create("A", true);
+        final Allele C = Allele.create("C");
+        final Allele G = Allele.create("G");
+
+        final List<Allele> AA = Arrays.asList(A,A);
+        final List<Allele> AC = Arrays.asList(A,C);
+        final List<Allele> CC = Arrays.asList(C,C);
+        final List<Allele> AG = Arrays.asList(A,G);
+        final List<Allele> CG = Arrays.asList(C,G);
+        final List<Allele> GG = Arrays.asList(G,G);
+        final List<Allele> ACG = Arrays.asList(A,C,G);
+        final List<List<Allele>> allSubsetAlleles = Arrays.asList(AC,AG,ACG);
+
+        final double[] homRefPL = new double[]{0.9, 0.09, 0.01};
+        final double[] hetPL = new double[]{0.09, 0.9, 0.01};
+        final double[] homVarPL = new double[]{0.01, 0.09, 0.9};
+        final double[] uninformative = new double[]{0.33, 0.33, 0.33};
+        final List<double[]> allPLs = Arrays.asList(homRefPL, hetPL, homVarPL, uninformative);
+
+        for ( final List<Allele> alleles : allSubsetAlleles ) {
+            for ( final double[] pls : allPLs ) {
+                tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.SET_TO_NO_CALL, pls, AA, alleles, GATKVariantContextUtils.NO_CALL_ALLELES});
+            }
+        }
+
+        for ( final List<Allele> originalGT : Arrays.asList(AA, AC, CC, AG, CG, GG) ) {
+            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN, homRefPL, originalGT, AC, AA});
+            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN, hetPL, originalGT, AC, AC});
+            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN, homVarPL, originalGT, AC, CC});
+//        tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN, uninformative, AA, AC, GATKVariantContextUtils.NO_CALL_ALLELES});
+        }
+
+        for ( final double[] pls : allPLs ) {
+            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AA, AC, AA});
+            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AC, AC, AC});
+            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, CC, AC, CC});
+            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, CG, AC, AC});
+
+            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AA, AG, AA});
+            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AC, AG, AA});
+            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, CC, AG, AA});
+            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, CG, AG, AG});
+
+            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AA, ACG, AA});
+            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AC, ACG, AC});
+            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, CC, ACG, CC});
+            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AG, ACG, AG});
+            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, CG, ACG, CG});
+            tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, GG, ACG, GG});
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(enabled = !DEBUG, dataProvider = "UpdateGenotypeAfterSubsettingData")
+    public void testUpdateGenotypeAfterSubsetting(final GATKVariantContextUtils.GenotypeAssignmentMethod mode,
+                                                  final double[] likelihoods,
+                                                  final List<Allele> originalGT,
+                                                  final List<Allele> allelesToUse,
+                                                  final List<Allele> expectedAlleles) {
+        final GenotypeBuilder gb = new GenotypeBuilder("test");
+        final double[] log10Likelhoods = MathUtils.normalizeFromLog10(likelihoods, true, false);
+        GATKVariantContextUtils.updateGenotypeAfterSubsetting(originalGT, gb, mode, log10Likelhoods, allelesToUse);
+        final Genotype g = gb.make();
+        Assert.assertEquals(new HashSet<>(g.getAlleles()), new HashSet<>(expectedAlleles));
+    }
+
+    @Test(enabled = !DEBUG)
+    public void testSubsetToRef() {
+        final Map<Genotype, Genotype> tests = new LinkedHashMap<>();
+
+        for ( final List<Allele> alleles : Arrays.asList(Arrays.asList(Aref), Arrays.asList(C), Arrays.asList(Aref, C), Arrays.asList(Aref, C, C) ) ) {
+            for ( final String name : Arrays.asList("test1", "test2") ) {
+                final GenotypeBuilder builder = new GenotypeBuilder(name, alleles);
+                builder.DP(10);
+                builder.GQ(30);
+                builder.AD(alleles.size() == 1 ? new int[]{1} : (alleles.size() == 2 ? new int[]{1, 2} : new int[]{1, 2, 3}));
+                builder.PL(alleles.size() == 1 ? new int[]{1} : (alleles.size() == 2 ? new int[]{1, 2} : new int[]{1, 2, 3}));
+                builder.attribute(GATKVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY,
+                        alleles.size() == 1 ? new int[]{1, 2}  : (alleles.size() == 2 ? new int[]{1, 2, 3, 4} : new int[]{1, 2, 3, 4, 5, 6}));
+                final List<Allele> refs = Collections.nCopies(alleles.size(), Aref);
+                tests.put(builder.make(), builder.alleles(refs).noAD().noPL().attribute(GATKVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY, null).make());
+            }
+        }
+
+        for ( final int n : Arrays.asList(1, 2, 3) ) {
+            for ( final List<Genotype> genotypes : Utils.makePermutations(new ArrayList<>(tests.keySet()), n, false) ) {
+                final VariantContext vc = new VariantContextBuilder("test", "20", 1, 1, Arrays.asList(Aref, C)).genotypes(genotypes).make();
+                final GenotypesContext gc = GATKVariantContextUtils.subsetToRefOnly(vc, 2);
+
+                Assert.assertEquals(gc.size(), genotypes.size());
+                for ( int i = 0; i < genotypes.size(); i++ ) {
+                    assertGenotypesAreEqual(gc.get(i), tests.get(genotypes.get(i)));
+                }
+            }
+        }
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // Test updatePLsAndAD
+    //
+    // --------------------------------------------------------------------------------
+
+    @DataProvider(name = "updatePLsSACsAndADData")
+    public Object[][] makeUpdatePLsSACsAndADData() {
+        List<Object[]> tests = new ArrayList<>();
+
+        final Allele A = Allele.create("A", true);
+        final Allele C = Allele.create("C");
+        final Allele G = Allele.create("G");
+
+        final List<Allele> AA = Arrays.asList(A,A);
+        final List<Allele> AC = Arrays.asList(A,C);
+        final List<Allele> CC = Arrays.asList(C,C);
+        final List<Allele> AG = Arrays.asList(A,G);
+        final List<Allele> CG = Arrays.asList(C,G);
+        final List<Allele> GG = Arrays.asList(G,G);
+        final List<Allele> ACG = Arrays.asList(A,C,G);
+
+        final VariantContext vcBase = new VariantContextBuilder("test", "20", 10, 10, AC).make();
+
+        final double[] homRefPL = MathUtils.normalizeFromRealSpace(new double[]{0.9, 0.09, 0.01});
+        final double[] hetPL = MathUtils.normalizeFromRealSpace(new double[]{0.09, 0.9, 0.01});
+        final double[] homVarPL = MathUtils.normalizeFromRealSpace(new double[]{0.01, 0.09, 0.9});
+        final double[] uninformative = new double[]{0, 0, 0};
+
+        final Genotype base = new GenotypeBuilder("NA12878").DP(10).GQ(100).make();
+
+        // make sure we don't screw up the simple case where no selection happens
+        final Genotype aaGT = new GenotypeBuilder(base).alleles(AA).AD(new int[]{10,2}).PL(homRefPL).attribute(GATKVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY, new int[]{5, 10, 15, 20}).GQ(8).make();
+        final Genotype acGT = new GenotypeBuilder(base).alleles(AC).AD(new int[]{10, 2}).PL(hetPL).attribute(GATKVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY, new int[]{5, 10, 15, 20}).GQ(8).make();
+        final Genotype ccGT = new GenotypeBuilder(base).alleles(CC).AD(new int[]{10, 2}).PL(homVarPL).attribute(GATKVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY, new int[]{5, 10, 15, 20}).GQ(8).make();
+
+        tests.add(new Object[]{new VariantContextBuilder(vcBase).genotypes(aaGT).make(), new VariantContextBuilder(vcBase).alleles(AC).make(), Arrays.asList(new GenotypeBuilder(aaGT).make())});
+        tests.add(new Object[]{new VariantContextBuilder(vcBase).genotypes(acGT).make(), new VariantContextBuilder(vcBase).alleles(AC).make(), Arrays.asList(new GenotypeBuilder(acGT).make())});
+        tests.add(new Object[]{new VariantContextBuilder(vcBase).genotypes(ccGT).make(), new VariantContextBuilder(vcBase).alleles(AC).make(), Arrays.asList(new GenotypeBuilder(ccGT).make())});
+
+        // uninformative test cases
+        final Genotype uninformativeGT = new GenotypeBuilder(base).alleles(CC).noAD().PL(uninformative).GQ(0).make();
+        tests.add(new Object[]{new VariantContextBuilder(vcBase).genotypes(uninformativeGT).make(), new VariantContextBuilder(vcBase).alleles(AC).make(), Arrays.asList(uninformativeGT)});
+        final Genotype emptyGT = new GenotypeBuilder(base).alleles(GATKVariantContextUtils.NO_CALL_ALLELES).noAD().noPL().noGQ().make();
+        tests.add(new Object[]{new VariantContextBuilder(vcBase).genotypes(emptyGT).make(), new VariantContextBuilder(vcBase).alleles(AC).make(), Arrays.asList(emptyGT)});
+
+        // actually subsetting down from multiple alt values
+        final double[] homRef3AllelesPL = new double[]{0, -10, -20, -30, -40, -50};
+        final double[] hetRefC3AllelesPL = new double[]{-10, 0, -20, -30, -40, -50};
+        final double[] homC3AllelesPL = new double[]{-20, -10, 0, -30, -40, -50};
+        final double[] hetRefG3AllelesPL = new double[]{-20, -10, -30, 0, -40, -50};
+        final double[] hetCG3AllelesPL = new double[]{-20, -10, -30, -40, 0, -50}; // AA, AC, CC, AG, CG, GG
+        final double[] homG3AllelesPL = new double[]{-20, -10, -30, -40, -50, 0};  // AA, AC, CC, AG, CG, GG
+
+        final int[] homRef3AllelesAD = new int[]{20, 0, 1};
+        final int[] hetRefC3AllelesAD = new int[]{10, 10, 1};
+        final int[] homC3AllelesAD = new int[]{0, 20, 1};
+        final int[] hetRefG3AllelesAD = new int[]{10, 0, 11};
+        final int[] hetCG3AllelesAD = new int[]{0, 12, 11}; // AA, AC, CC, AG, CG, GG
+        final int[] homG3AllelesAD = new int[]{0, 1, 21};  // AA, AC, CC, AG, CG, GG
+
+        final int[] homRef3AllelesSAC = new int[]{20, 19, 0, 1, 3, 4};
+        final int[] hetRefC3AllelesSAC = new int[]{10, 9, 10, 9, 1, 1};
+        final int[] homC3AllelesSAC = new int[]{0, 0, 20, 20, 1, 1};
+        final int[] hetRefG3AllelesSAC = new int[]{10, 10, 0, 0, 11, 11};
+        final int[] hetCG3AllelesSAC = new int[]{0, 0, 12, 12, 11, 11}; // AA, AC, CC, AG, CG, GG
+        final int[] homG3AllelesSAC = new int[]{0, 0, 1, 1, 21, 21};  // AA, AC, CC, AG, CG, GG
+
+        tests.add(new Object[]{
+                new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).AD(homRef3AllelesAD).PL(homRef3AllelesPL).
+                        attribute(GATKVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY, homRef3AllelesSAC).make()).make(),
+                        new VariantContextBuilder(vcBase).alleles(AC).make(),
+                Arrays.asList(new GenotypeBuilder(base).alleles(AA).PL(new double[]{0, -10, -20}).AD(new int[]{20, 0}).
+                        attribute(GATKVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY, new int[]{20, 19, 0, 1}).GQ(100).make())});
+        tests.add(new Object[]{
+                new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).AD(hetRefC3AllelesAD).PL(hetRefC3AllelesPL).
+                        attribute(GATKVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY, hetRefC3AllelesSAC).make()).make(),
+                new VariantContextBuilder(vcBase).alleles(AC).make(),
+                Arrays.asList(new GenotypeBuilder(base).alleles(AA).PL(new double[]{-10, 0, -20}).AD(new int[]{10, 10}).
+                        attribute(GATKVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY, new int[]{10, 9, 10, 9}).GQ(100).make())});
+        tests.add(new Object[]{
+                new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).AD(homC3AllelesAD).PL(homC3AllelesPL).
+                        attribute(GATKVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY, homC3AllelesSAC).make()).make(),
+                new VariantContextBuilder(vcBase).alleles(AC).make(),
+                Arrays.asList(new GenotypeBuilder(base).alleles(AA).PL(new double[]{-20, -10, 0}).AD(new int[]{0, 20}).
+                        attribute(GATKVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY, new int[]{0, 0, 20, 20}).GQ(100).make())});
+        tests.add(new Object[]{
+                new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).AD(hetRefG3AllelesAD).PL(hetRefG3AllelesPL).
+                        attribute(GATKVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY, hetRefG3AllelesSAC).make()).make(),
+                new VariantContextBuilder(vcBase).alleles(AG).make(),
+                Arrays.asList(new GenotypeBuilder(base).alleles(AA).PL(new double[]{-20, 0, -50}).AD(new int[]{10, 11}).
+                        attribute(GATKVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY, new int[]{10, 10, 11, 11}).GQ(100).make())});
+        tests.add(new Object[]{
+                new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).AD(hetCG3AllelesAD).PL(hetCG3AllelesPL).
+                        attribute(GATKVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY, hetCG3AllelesSAC).make()).make(),
+                new VariantContextBuilder(vcBase).alleles(AG).make(),
+                Arrays.asList(new GenotypeBuilder(base).alleles(AA).PL(new double[]{0, -20, -30}).AD(new int[]{0, 11}).
+                        attribute(GATKVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY, new int[]{0, 0, 11, 11}).GQ(100).make())});
+        tests.add(new Object[]{
+                new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).AD(homG3AllelesAD).PL(homG3AllelesPL).
+                        attribute(GATKVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY, homG3AllelesSAC).make()).make(),
+                new VariantContextBuilder(vcBase).alleles(AG).make(),
+                Arrays.asList(new GenotypeBuilder(base).alleles(AA).PL(new double[]{-20, -40, 0}).AD(new int[]{0, 21}).
+                        attribute(GATKVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY, new int[]{0, 0, 21, 21}).GQ(100).make())});
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "updatePLsSACsAndADData")
+    public void testUpdatePLsAndADData(final VariantContext originalVC,
+                                       final VariantContext selectedVC,
+                                       final List<Genotype> expectedGenotypes) {
+        final VariantContext selectedVCwithGTs = new VariantContextBuilder(selectedVC).genotypes(originalVC.getGenotypes()).make();
+        final GenotypesContext actual = GATKVariantContextUtils.updatePLsSACsAD(selectedVCwithGTs, originalVC);
+
+        Assert.assertEquals(actual.size(), expectedGenotypes.size());
+        for ( final Genotype expected : expectedGenotypes ) {
+            final Genotype actualGT = actual.get(expected.getSampleName());
+            Assert.assertNotNull(actualGT);
+            assertGenotypesAreEqual(actualGT, expected);
+        }
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // Test methods for merging reference confidence VCs
+    //
+    // --------------------------------------------------------------------------------
+
+
+    @Test(dataProvider = "indexOfAlleleData")
+    public void testIndexOfAllele(final Allele reference, final List<Allele> altAlleles, final List<Allele> otherAlleles) {
+        final List<Allele> alleles = new ArrayList<>(altAlleles.size() + 1);
+        alleles.add(reference);
+        alleles.addAll(altAlleles);
+        final VariantContext vc = makeVC("Source", alleles);
+
+        for (int i = 0; i < alleles.size(); i++) {
+            Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,alleles.get(i),true,true,true),i);
+            Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,alleles.get(i),false,true,true),i);
+            Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,alleles.get(i),true,true,false),i);
+            Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,alleles.get(i),false,true,false),i);
+            Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,Allele.create(alleles.get(i),true),true,true,true),i);
+            Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,Allele.create(alleles.get(i),true),true,true,false),-1);
+            if (i == 0) {
+                Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,alleles.get(i),true,false,true),-1);
+                Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,alleles.get(i),false,false,true),-1);
+                Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,alleles.get(i),true,false,false),-1);
+                Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,alleles.get(i),false,false,false),-1);
+                Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,Allele.create(alleles.get(i).getBases(),true),false,true,true),i);
+                Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,Allele.create(alleles.get(i).getBases(),false),false,true,true),-1);
+            } else {
+                Assert.assertEquals(GATKVariantContextUtils.indexOfAltAllele(vc,alleles.get(i),true),i - 1);
+                Assert.assertEquals(GATKVariantContextUtils.indexOfAltAllele(vc,alleles.get(i),false), i - 1);
+                Assert.assertEquals(GATKVariantContextUtils.indexOfAltAllele(vc,Allele.create(alleles.get(i),true),true),i-1);
+                Assert.assertEquals(GATKVariantContextUtils.indexOfAltAllele(vc,Allele.create(alleles.get(i),true),false),-1);
+            }
+        }
+
+        for (final Allele other : otherAlleles) {
+            Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc, other, true, true, true), -1);
+            Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,other,false,true,true),-1);
+            Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,other,true,true,false),-1);
+            Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,other,false,true,false),-1);
+            Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,other,true,false,true),-1);
+            Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,other,false,false,true),-1);
+            Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc,other,true,false,false),-1);
+            Assert.assertEquals(GATKVariantContextUtils.indexOfAllele(vc, other, false, false, false),-1);
+        }
+    }
+
+    @DataProvider(name = "indexOfAlleleData")
+    public Iterator<Object[]> indexOfAlleleData() {
+
+        final Allele[] ALTERNATIVE_ALLELES = new Allele[] { T, C, G, ATC, ATCATC};
+
+        final int lastMask = 0x1F;
+
+        return new Iterator<Object[]>() {
+
+            int nextMask = 0;
+
+            @Override
+            public boolean hasNext() {
+                return nextMask <= lastMask;
+            }
+
+            @Override
+            public Object[] next() {
+
+                int mask = nextMask++;
+                final List<Allele> includedAlleles = new ArrayList<>(5);
+                final List<Allele> excludedAlleles = new ArrayList<>(5);
+                for (int i = 0; i < ALTERNATIVE_ALLELES.length; i++) {
+                    ((mask & 1) == 1 ? includedAlleles : excludedAlleles).add(ALTERNATIVE_ALLELES[i]);
+                    mask >>= 1;
+                }
+                return new Object[] { Aref , includedAlleles, excludedAlleles};
+            }
+
+            @Override
+            public void remove() {
+                throw new UnsupportedOperationException();
+            }
+        };
+    }
+
+    @Test(dataProvider="overlapWithData")
+    public void testOverlapsWith(final VariantContext vc, final GenomeLoc genomeLoc) {
+        final boolean expected;
+
+        if (genomeLoc.isUnmapped())
+            expected = false;
+        else if (vc.getStart() > genomeLoc.getStop())
+            expected = false;
+        else if (vc.getEnd() < genomeLoc.getStart())
+            expected = false;
+        else if (!vc.getChr().equals(genomeLoc.getContig()))
+            expected = false;
+        else
+            expected = true;
+
+        Assert.assertEquals(GATKVariantContextUtils.overlapsRegion(vc, genomeLoc), expected);
+    }
+
+
+    private final String[] OVERLAP_WITH_CHROMOSOMES =  { "chr1", "chr20" };
+    private final int[] OVERLAP_WITH_EVENT_SIZES =  { -10, -1, 0, 1, 10 }; // 0 == SNP , -X xbp deletion, +X xbp insertion.
+    private final int[] OVERLAP_WITH_EVENT_STARTS = { 10000000, 10000001,
+                                                      10000005, 10000010,
+                                                      10000009, 10000011,
+                                                      20000000 };
+
+    @DataProvider(name="overlapWithData")
+    public Object[][] overlapWithData() {
+
+        final int totalLocations = OVERLAP_WITH_CHROMOSOMES.length * OVERLAP_WITH_EVENT_SIZES.length * OVERLAP_WITH_EVENT_STARTS.length + 1;
+        final int totalEvents = OVERLAP_WITH_CHROMOSOMES.length * OVERLAP_WITH_EVENT_SIZES.length * OVERLAP_WITH_EVENT_STARTS.length;
+        final GenomeLoc[] locs = new GenomeLoc[totalLocations];
+        final VariantContext[] events = new VariantContext[totalEvents];
+
+        generateAllLocationsAndVariantContextCombinations(OVERLAP_WITH_CHROMOSOMES, OVERLAP_WITH_EVENT_SIZES,
+                OVERLAP_WITH_EVENT_STARTS, locs, events);
+
+        return generateAllParameterCombinationsForOverlapWithData(locs, events);
+    }
+
+    private Object[][] generateAllParameterCombinationsForOverlapWithData(GenomeLoc[] locs, VariantContext[] events) {
+        final List<Object[]> result = new LinkedList<>();
+        for (final GenomeLoc loc : locs)
+            for (final VariantContext event : events)
+               result.add(new Object[] { event , loc });
+
+        return result.toArray(new Object[result.size()][]);
+    }
+
+    private void generateAllLocationsAndVariantContextCombinations(final String[] chrs, final int[] eventSizes,
+                                                                   final int[] eventStarts, final GenomeLoc[] locs,
+                                                                   final VariantContext[] events) {
+        int nextIndex = 0;
+        for (final String chr : chrs )
+            for (final int size : eventSizes )
+                for (final int starts : eventStarts ) {
+                    locs[nextIndex] = genomeLocParser.createGenomeLoc(chr,starts,starts + Math.max(0,size));
+                    events[nextIndex++] = new VariantContextBuilder().source("test").loc(chr,starts,starts + Math.max(0,size)).alleles(Arrays.asList(
+                            Allele.create(randomBases(size <= 0 ? 1 : size + 1, true), true), Allele.create(randomBases(size < 0 ? -size + 1 : 1, false), false))).make();
+                }
+
+        locs[nextIndex++]  = GenomeLoc.UNMAPPED;
+    }
+
+    @Test(dataProvider = "totalPloidyData")
+    public void testTotalPloidy(final int[] ploidies, final int defaultPloidy, final int expected) {
+        final Genotype[] genotypes = new Genotype[ploidies.length];
+        final List<Allele> vcAlleles = Arrays.asList(Aref,C);
+        for (int i = 0; i < genotypes.length; i++)
+            genotypes[i] = new GenotypeBuilder().alleles(GATKVariantContextUtils.noCallAlleles(ploidies[i])).make();
+        final VariantContext vc = new VariantContextBuilder().chr("seq1").genotypes(genotypes).alleles(vcAlleles).make();
+        Assert.assertEquals(GATKVariantContextUtils.totalPloidy(vc,defaultPloidy),expected," " + defaultPloidy + " " + Arrays.toString(ploidies));
+    }
+
+    @DataProvider(name="totalPloidyData")
+    public Object[][] totalPloidyData() {
+        final Random rdn = Utils.getRandomGenerator();
+        final List<Object[]> resultList = new ArrayList<>();
+        for (int i = 0; i < 100; i++) {
+            final int sampleCount = rdn.nextInt(10);
+
+            int expected = 0;
+            final int defaultPloidy = rdn.nextInt(10) + 1;
+            final int[] plodies = new int[sampleCount];
+            for (int j = 0; j < sampleCount; j++) {
+                plodies[j] = rdn.nextInt(10);
+                expected += plodies[j] == 0 ? defaultPloidy : plodies[j];
+            }
+            resultList.add(new Object[] { plodies, defaultPloidy, expected });
+        }
+        return resultList.toArray(new Object[100][]);
+    }
+
+    private byte[] randomBases(final int length, final boolean reference) {
+        final byte[] bases = new byte[length];
+        bases[0] = (byte) (reference  ? 'A' : 'C');
+        BaseUtils.fillWithRandomBases(bases, 1, bases.length);
+        return bases;
+    }
+
+    @Test
+    public void testCreateAlleleMapping(){
+        final List<Allele> alleles = Arrays.asList(Aref,Symbolic,T);
+        final VariantContext vc = new VariantContextBuilder().chr("chr1").alleles(alleles).make();
+        Map<Allele, Allele> map = GATKVariantContextUtils.createAlleleMapping(ATref, vc, alleles);
+
+        final List<Allele> expectedAlleles = Arrays.asList(Allele.create("<Symbolic>", false), Allele.create("TT", false));
+        for ( int i = 0; i < vc.getAlternateAlleles().size(); i++ ){
+            Assert.assertEquals(map.get(vc.getAlternateAlleles().get(i)), expectedAlleles.get(i));
+        }
+    }
+
+    @Test(expectedExceptions = IllegalStateException.class)
+    public void testCreateAlleleMappingException(){
+        final List<Allele> alleles = Arrays.asList(Aref, Symbolic, T);
+        final VariantContext vc = new VariantContextBuilder().chr("chr1").alleles(alleles).make();
+        // Throws an exception if the ref allele length <= ref allele length to extend
+        Map<Allele, Allele> map = GATKVariantContextUtils.createAlleleMapping(Aref, vc, alleles);
+    }
+
+    @Test
+    public void testDetermineSACIndexesToUse(){
+        final VariantContext vc = makeVC("vc", Arrays.asList(Aref, T, C));
+        Assert.assertEquals(GATKVariantContextUtils.determineSACIndexesToUse(vc, Arrays.asList(Aref, C)), Arrays.asList(0, 1, 4, 5));
+        Assert.assertEquals(GATKVariantContextUtils.determineSACIndexesToUse(vc, Arrays.asList(G)), Arrays.asList(0, 1));
+    }
+
+    @Test
+    public void testMakeNewSACs(){
+        int[] expected = {10, 20}  ;
+        final Genotype g = new GenotypeBuilder().alleles(Arrays.asList(Allele.create("A", true), Allele.create("G"))).
+                attribute(GATKVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY, new int[]{5, 10, 15, 20}).make();
+        Assert.assertEquals(GATKVariantContextUtils.makeNewSACs(g, Arrays.asList(1, 3)), expected);
+    }
+}
+
diff --git a/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/variant/VariantContextBenchmark.java b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/variant/VariantContextBenchmark.java
new file mode 100644
index 0000000..79ee956
--- /dev/null
+++ b/public/gatk-utils/src/test/java/org/broadinstitute/gatk/utils/variant/VariantContextBenchmark.java
@@ -0,0 +1,377 @@
+/*
+* Copyright 2012-2015 Broad Institute, Inc.
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package org.broadinstitute.gatk.utils.variant;
+
+import com.google.caliper.Param;
+import com.google.caliper.SimpleBenchmark;
+import htsjdk.tribble.Feature;
+import htsjdk.tribble.FeatureCodec;
+import htsjdk.variant.variantcontext.*;
+import htsjdk.variant.vcf.VCFCodec;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+/**
+ * Caliper microbenchmark of parsing a VCF file
+ */
+public class VariantContextBenchmark extends SimpleBenchmark {
+    @Param({"/Users/depristo/Desktop/broadLocal/localData/ALL.chr20.merged_beagle_mach.20101123.snps_indels_svs.genotypes.vcf"})
+    String vcfFile;
+
+    @Param({"1000"})
+    int linesToRead; // set automatically by framework
+
+    @Param({"100"})
+    int nSamplesToTake; // set automatically by framework
+
+    @Param({"10"})
+    int dupsToMerge; // set automatically by framework
+
+    @Param
+    Operation operation; // set automatically by framework
+
+    private String INPUT_STRING;
+
+    public enum Operation {
+        READ,
+        SUBSET_TO_SAMPLES,
+        GET_TYPE,
+        GET_ID,
+        GET_GENOTYPES,
+        GET_ATTRIBUTE_STRING,
+        GET_ATTRIBUTE_INT,
+        GET_N_SAMPLES,
+        GET_GENOTYPES_FOR_SAMPLES,
+        GET_GENOTYPES_IN_ORDER_OF_NAME,
+        CALC_GENOTYPE_COUNTS,
+        MERGE
+    }
+
+    @Override protected void setUp() {
+        // TODO -- update for new tribble interface
+//        try {
+//            ReferenceSequenceFile seq = new CachingIndexedFastaSequenceFile(new File(BaseTest.b37KGReference));
+//            b37GenomeLocParser = new GenomeLocParser(seq);
+//        } catch ( FileNotFoundException e) {
+//            throw new RuntimeException(e);
+//        }
+//
+//        // read it into a String so that we don't try to benchmark IO issues
+//        try {
+//            FileInputStream s = new FileInputStream(new File(vcfFile));
+//            AsciiLineReader lineReader = new AsciiLineReader(s);
+//            int counter = 0;
+//            StringBuffer sb = new StringBuffer();
+//            while (counter++ < linesToRead ) {
+//                String line = lineReader.readLine();
+//                if ( line == null )
+//                    break;
+//                sb.append(line + "\n");
+//            }
+//            s.close();
+//            INPUT_STRING = sb.toString();
+//        } catch (IOException e) {
+//            throw new RuntimeException(e);
+//        }
+    }
+
+    private interface FunctionToBenchmark<T extends Feature> {
+        public void run(T vc);
+    }
+
+    private <T extends Feature> void runBenchmark(FeatureCodec codec, FunctionToBenchmark<T> func) {
+        // TODO -- update for new Tribble interface
+//        try {
+//            InputStream is = new ByteArrayInputStream(INPUT_STRING.getBytes());
+//            AsciiLineReader lineReader = new AsciiLineReader(is);
+//            codec.readHeader(lineReader);
+//
+//            int counter = 0;
+//            while (counter++ < linesToRead ) {
+//                String line = lineReader.readLine();
+//                if ( line == null )
+//                    break;
+//
+//                T vc = codec.decode(line);
+//                func.run(vc);
+//            }
+//        } catch (Exception e) {
+//            System.out.println("Benchmarking run failure because of " + e.getMessage());
+//        }
+    }
+
+    public void timeV14(int rep) {
+        for ( int i = 0; i < rep; i++ ) {
+            FunctionToBenchmark<VariantContext> func = getV14FunctionToBenchmark();
+            final VCFCodec codec = new VCFCodec();
+            runBenchmark(codec, func);
+        }
+    }
+
+    public FunctionToBenchmark<VariantContext> getV14FunctionToBenchmark() {
+        switch ( operation ) {
+            case READ:
+                return new FunctionToBenchmark<VariantContext>() {
+                    public void run(final VariantContext vc) {
+                        ; // empty operation
+                    }
+                };
+            case SUBSET_TO_SAMPLES:
+                return new FunctionToBenchmark<VariantContext>() {
+                    Set<String> samples;
+                    public void run(final VariantContext vc) {
+                        if ( samples == null )
+                            samples = new HashSet<>(new ArrayList<>(vc.getSampleNames()).subList(0, nSamplesToTake));
+                        VariantContext sub = vc.subContextFromSamples(samples);
+                        sub.getNSamples();
+                    }
+                };
+            case GET_TYPE:
+                return new FunctionToBenchmark<VariantContext>() {
+                    public void run(final VariantContext vc) {
+                        vc.getType();
+                    }
+                };
+            case GET_ID:
+                return new FunctionToBenchmark<VariantContext>() {
+                    public void run(final VariantContext vc) {
+                        vc.getID();
+                    }
+                };
+            case GET_GENOTYPES:
+                return new FunctionToBenchmark<VariantContext>() {
+                    public void run(final VariantContext vc) {
+                        vc.getGenotypes().size();
+                    }
+                };
+
+            case GET_GENOTYPES_FOR_SAMPLES:
+                return new FunctionToBenchmark<VariantContext>() {
+                    Set<String> samples;
+                    public void run(final VariantContext vc) {
+                        if ( samples == null )
+                            samples = new HashSet<>(new ArrayList<>(vc.getSampleNames()).subList(0, nSamplesToTake));
+                        vc.getGenotypes(samples).size();
+                    }
+                };
+
+            case GET_ATTRIBUTE_STRING:
+                return new FunctionToBenchmark<VariantContext>() {
+                    public void run(final VariantContext vc) {
+                        vc.getAttribute("AN", null);
+                    }
+                };
+
+            case GET_ATTRIBUTE_INT:
+                return new FunctionToBenchmark<VariantContext>() {
+                    public void run(final VariantContext vc) {
+                        vc.getAttributeAsInt("AC", 0);
+                    }
+                };
+
+            case GET_N_SAMPLES:
+                return new FunctionToBenchmark<VariantContext>() {
+                    public void run(final VariantContext vc) {
+                        vc.getNSamples();
+                    }
+                };
+
+            case GET_GENOTYPES_IN_ORDER_OF_NAME:
+                return new FunctionToBenchmark<VariantContext>() {
+                    public void run(final VariantContext vc) {
+                        ; // TODO - TEST IS BROKEN
+//                        int n = 0;
+//                        for ( final Genotype g: vc.getGenotypesOrderedByName() ) n++;
+                    }
+                };
+
+            case CALC_GENOTYPE_COUNTS:
+                return new FunctionToBenchmark<VariantContext>() {
+                    public void run(final VariantContext vc) {
+                        vc.getHetCount();
+                    }
+                };
+
+            case MERGE:
+                return new FunctionToBenchmark<VariantContext>() {
+                    public void run(final VariantContext vc) {
+                        List<VariantContext> toMerge = new ArrayList<>();
+
+                        for ( int i = 0; i < dupsToMerge; i++ ) {
+                            GenotypesContext gc = GenotypesContext.create(vc.getNSamples());
+                            for ( final Genotype g : vc.getGenotypes() ) {
+                                gc.add(new GenotypeBuilder(g).name(g.getSampleName()+"_"+i).make());
+                            }
+                            toMerge.add(new VariantContextBuilder(vc).genotypes(gc).make());
+                        }
+
+                        GATKVariantContextUtils.simpleMerge(toMerge, null,
+                                GATKVariantContextUtils.FilteredRecordMergeType.KEEP_IF_ANY_UNFILTERED,
+                                GATKVariantContextUtils.GenotypeMergeType.UNSORTED,
+                                true, false, "set", false, true);
+                    }
+                };
+
+            default: throw new IllegalArgumentException("Unexpected operation " + operation);
+        }
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // V13
+    //
+    // In order to use this, you must move the v13 version from archive and uncomment
+    //
+    // git mv private/archive/java/src/org/broadinstitute/sting/utils/variantcontext/v13 public/java/test/org/broadinstitute/sting/utils/variantcontext/v13
+    //
+    // --------------------------------------------------------------------------------
+
+//    public void timeV13(int rep) {
+//        for ( int i = 0; i < rep; i++ ) {
+//            FunctionToBenchmark<htsjdk.variant.variantcontext.v13.VariantContext> func = getV13FunctionToBenchmark();
+//            FeatureCodec<htsjdk.variant.variantcontext.v13.VariantContext> codec = new htsjdk.variant.variantcontext.v13.VCFCodec();
+//            runBenchmark(codec, func);
+//        }
+//    }
+//
+//    public FunctionToBenchmark<htsjdk.variant.variantcontext.v13.VariantContext> getV13FunctionToBenchmark() {
+//        switch ( operation ) {
+//            case READ:
+//                return new FunctionToBenchmark<htsjdk.variant.variantcontext.v13.VariantContext>() {
+//                    public void run(final htsjdk.variant.variantcontext.v13.VariantContext vc) {
+//                        ; // empty operation
+//                    }
+//                };
+//            case SUBSET_TO_SAMPLES:
+//                return new FunctionToBenchmark<htsjdk.variant.variantcontext.v13.VariantContext>() {
+//                    List<String> samples;
+//                    public void run(final htsjdk.variant.variantcontext.v13.VariantContext vc) {
+//                        if ( samples == null )
+//                            samples = new ArrayList<String>(vc.getSampleNames()).subList(0, nSamplesToTake);
+//                        htsjdk.variant.variantcontext.v13.VariantContext sub = vc.subContextFromGenotypes(vc.getGenotypes(samples).values());
+//                        sub.getNSamples();
+//                    }
+//                };
+//
+//            case GET_TYPE:
+//                return new FunctionToBenchmark<htsjdk.variant.variantcontext.v13.VariantContext>() {
+//                    public void run(final htsjdk.variant.variantcontext.v13.VariantContext vc) {
+//                        vc.getType();
+//                    }
+//                };
+//            case GET_ID:
+//                return new FunctionToBenchmark<htsjdk.variant.variantcontext.v13.VariantContext>() {
+//                    public void run(final htsjdk.variant.variantcontext.v13.VariantContext vc) {
+//                        vc.getID();
+//                    }
+//                };
+//            case GET_GENOTYPES:
+//                return new FunctionToBenchmark<htsjdk.variant.variantcontext.v13.VariantContext>() {
+//                    public void run(final htsjdk.variant.variantcontext.v13.VariantContext vc) {
+//                        vc.getGenotypes().size();
+//                    }
+//                };
+//
+//            case GET_GENOTYPES_FOR_SAMPLES:
+//                return new FunctionToBenchmark<htsjdk.variant.variantcontext.v13.VariantContext>() {
+//                    Set<String> samples;
+//                    public void run(final htsjdk.variant.variantcontext.v13.VariantContext vc) {
+//                        if ( samples == null )
+//                            samples = new HashSet<String>(new ArrayList<String>(vc.getSampleNames()).subList(0, nSamplesToTake));
+//                        vc.getGenotypes(samples).size();
+//                    }
+//                };
+//
+//            case GET_ATTRIBUTE_STRING:
+//                return new FunctionToBenchmark<htsjdk.variant.variantcontext.v13.VariantContext>() {
+//                    public void run(final htsjdk.variant.variantcontext.v13.VariantContext vc) {
+//                        vc.getExtendedAttribute("AN", null);
+//                    }
+//                };
+//
+//            case GET_ATTRIBUTE_INT:
+//                return new FunctionToBenchmark<htsjdk.variant.variantcontext.v13.VariantContext>() {
+//                    public void run(final htsjdk.variant.variantcontext.v13.VariantContext vc) {
+//                        vc.getAttributeAsInt("AC", 0);
+//                    }
+//                };
+//
+//            case GET_N_SAMPLES:
+//                return new FunctionToBenchmark<htsjdk.variant.variantcontext.v13.VariantContext>() {
+//                    public void run(final htsjdk.variant.variantcontext.v13.VariantContext vc) {
+//                        vc.getNSamples();
+//                    }
+//                };
+//
+//            case GET_GENOTYPES_IN_ORDER_OF_NAME:
+//                return new FunctionToBenchmark<htsjdk.variant.variantcontext.v13.VariantContext>() {
+//                    public void run(final htsjdk.variant.variantcontext.v13.VariantContext vc) {
+//                        ; // TODO - TEST IS BROKEN
+//                        //vc.getGenotypesOrderedByName();
+//                    }
+//                };
+//
+//            case CALC_GENOTYPE_COUNTS:
+//                return new FunctionToBenchmark<htsjdk.variant.variantcontext.v13.VariantContext>() {
+//                    public void run(final htsjdk.variant.variantcontext.v13.VariantContext vc) {
+//                        vc.getHetCount();
+//                    }
+//                };
+//
+//            case MERGE:
+//                return new FunctionToBenchmark<htsjdk.variant.variantcontext.v13.VariantContext>() {
+//                    public void run(final htsjdk.variant.variantcontext.v13.VariantContext vc) {
+//                        List<htsjdk.variant.variantcontext.v13.VariantContext> toMerge = new ArrayList<htsjdk.variant.variantcontext.v13.VariantContext>();
+//
+//                        for ( int i = 0; i < dupsToMerge; i++ ) {
+//                            Map<String, htsjdk.variant.variantcontext.v13.Genotype> gc = new HashMap<String, htsjdk.variant.variantcontext.v13.Genotype>();
+//                            for ( final htsjdk.variant.variantcontext.v13.Genotype g : vc.getGenotypes().values() ) {
+//                                String name = g.getSampleName()+"_"+i;
+//                                gc.put(name, new htsjdk.variant.variantcontext.v13.Genotype(name,
+//                                        g.getAlleles(), g.getLog10PError(), g.getFilters(), g.getAttributes(), g.isPhased(), g.getLikelihoods().getAsVector()));
+//                                toMerge.add(htsjdk.variant.variantcontext.v13.VariantContext.modifyGenotypes(vc, gc));
+//                            }
+//                        }
+//
+//                        htsjdk.variant.variantcontext.v13.VariantContextUtils.simpleMerge(b37GenomeLocParser,
+//                                toMerge, null,
+//                                htsjdk.variant.variantcontext.v13.VariantContextUtils.FilteredRecordMergeType.KEEP_IF_ANY_UNFILTERED,
+//                                htsjdk.variant.variantcontext.v13.VariantContextUtils.GenotypeMergeType.UNSORTED,
+//                                true, false, "set", false, true, false);
+//                    }
+//                };
+//
+//            default: throw new IllegalArgumentException("Unexpected operation " + operation);
+//        }
+//    }
+
+    public static void main(String[] args) {
+        com.google.caliper.Runner.main(VariantContextBenchmark.class, args);
+    }
+}
diff --git a/public/gatk-utils/src/test/resources/exampleBAM.bam b/public/gatk-utils/src/test/resources/exampleBAM.bam
new file mode 100644
index 0000000..d1c9d80
Binary files /dev/null and b/public/gatk-utils/src/test/resources/exampleBAM.bam differ
diff --git a/public/gatk-utils/src/test/resources/exampleBAM.bam.bai b/public/gatk-utils/src/test/resources/exampleBAM.bam.bai
new file mode 100644
index 0000000..33beef3
Binary files /dev/null and b/public/gatk-utils/src/test/resources/exampleBAM.bam.bai differ
diff --git a/public/gatk-utils/src/test/resources/exampleBAM.simple.bai b/public/gatk-utils/src/test/resources/exampleBAM.simple.bai
new file mode 100644
index 0000000..9320bab
Binary files /dev/null and b/public/gatk-utils/src/test/resources/exampleBAM.simple.bai differ
diff --git a/public/gatk-utils/src/test/resources/exampleBAM.simple.bam b/public/gatk-utils/src/test/resources/exampleBAM.simple.bam
new file mode 100644
index 0000000..d236e79
Binary files /dev/null and b/public/gatk-utils/src/test/resources/exampleBAM.simple.bam differ
diff --git a/public/gatk-utils/src/test/resources/exampleBAM_with_unmapped.bai b/public/gatk-utils/src/test/resources/exampleBAM_with_unmapped.bai
new file mode 100644
index 0000000..fd1625e
Binary files /dev/null and b/public/gatk-utils/src/test/resources/exampleBAM_with_unmapped.bai differ
diff --git a/public/gatk-utils/src/test/resources/exampleBAM_with_unmapped.bam b/public/gatk-utils/src/test/resources/exampleBAM_with_unmapped.bam
new file mode 100644
index 0000000..e5181b3
Binary files /dev/null and b/public/gatk-utils/src/test/resources/exampleBAM_with_unmapped.bam differ
diff --git a/public/gatk-utils/src/test/resources/exampleCRAM-nobai-nocrai.cram b/public/gatk-utils/src/test/resources/exampleCRAM-nobai-nocrai.cram
new file mode 100644
index 0000000..d4db8b3
Binary files /dev/null and b/public/gatk-utils/src/test/resources/exampleCRAM-nobai-nocrai.cram differ
diff --git a/public/gatk-utils/src/test/resources/exampleCRAM-nobai-withcrai.cram b/public/gatk-utils/src/test/resources/exampleCRAM-nobai-withcrai.cram
new file mode 100644
index 0000000..d4db8b3
Binary files /dev/null and b/public/gatk-utils/src/test/resources/exampleCRAM-nobai-withcrai.cram differ
diff --git a/public/gatk-utils/src/test/resources/exampleCRAM-nobai-withcrai.cram.crai b/public/gatk-utils/src/test/resources/exampleCRAM-nobai-withcrai.cram.crai
new file mode 100644
index 0000000..c3d728b
Binary files /dev/null and b/public/gatk-utils/src/test/resources/exampleCRAM-nobai-withcrai.cram.crai differ
diff --git a/public/gatk-utils/src/test/resources/exampleCRAM.cram b/public/gatk-utils/src/test/resources/exampleCRAM.cram
new file mode 100644
index 0000000..78d606d
Binary files /dev/null and b/public/gatk-utils/src/test/resources/exampleCRAM.cram differ
diff --git a/public/gatk-utils/src/test/resources/exampleCRAM.cram.bai b/public/gatk-utils/src/test/resources/exampleCRAM.cram.bai
new file mode 100644
index 0000000..e2ca31c
Binary files /dev/null and b/public/gatk-utils/src/test/resources/exampleCRAM.cram.bai differ
diff --git a/public/gatk-utils/src/test/resources/exampleCRAM.cram.crai b/public/gatk-utils/src/test/resources/exampleCRAM.cram.crai
new file mode 100644
index 0000000..3eee8e0
Binary files /dev/null and b/public/gatk-utils/src/test/resources/exampleCRAM.cram.crai differ
diff --git a/public/gatk-engine/src/test/resources/exampleDBSNP.vcf b/public/gatk-utils/src/test/resources/exampleDBSNP.vcf
similarity index 100%
rename from public/gatk-engine/src/test/resources/exampleDBSNP.vcf
rename to public/gatk-utils/src/test/resources/exampleDBSNP.vcf
diff --git a/public/gatk-engine/src/test/resources/exampleDBSNP.vcf.idx b/public/gatk-utils/src/test/resources/exampleDBSNP.vcf.idx
similarity index 100%
rename from public/gatk-engine/src/test/resources/exampleDBSNP.vcf.idx
rename to public/gatk-utils/src/test/resources/exampleDBSNP.vcf.idx
diff --git a/public/gatk-engine/src/test/resources/exampleFASTA-3contigs.fasta b/public/gatk-utils/src/test/resources/exampleFASTA-3contigs.fasta
similarity index 100%
rename from public/gatk-engine/src/test/resources/exampleFASTA-3contigs.fasta
rename to public/gatk-utils/src/test/resources/exampleFASTA-3contigs.fasta
diff --git a/public/gatk-engine/src/test/resources/exampleFASTA-combined.fasta b/public/gatk-utils/src/test/resources/exampleFASTA-combined.fasta
similarity index 100%
rename from public/gatk-engine/src/test/resources/exampleFASTA-combined.fasta
rename to public/gatk-utils/src/test/resources/exampleFASTA-combined.fasta
diff --git a/public/gatk-engine/src/test/resources/exampleFASTA-windows.fasta b/public/gatk-utils/src/test/resources/exampleFASTA-windows.fasta
similarity index 100%
rename from public/gatk-engine/src/test/resources/exampleFASTA-windows.fasta
rename to public/gatk-utils/src/test/resources/exampleFASTA-windows.fasta
diff --git a/public/gatk-engine/src/test/resources/exampleFASTA.dict b/public/gatk-utils/src/test/resources/exampleFASTA.dict
similarity index 100%
rename from public/gatk-engine/src/test/resources/exampleFASTA.dict
rename to public/gatk-utils/src/test/resources/exampleFASTA.dict
diff --git a/public/gatk-engine/src/test/resources/exampleFASTA.fasta b/public/gatk-utils/src/test/resources/exampleFASTA.fasta
similarity index 100%
rename from public/gatk-engine/src/test/resources/exampleFASTA.fasta
rename to public/gatk-utils/src/test/resources/exampleFASTA.fasta
diff --git a/public/gatk-engine/src/test/resources/exampleFASTA.fasta.amb b/public/gatk-utils/src/test/resources/exampleFASTA.fasta.amb
similarity index 100%
rename from public/gatk-engine/src/test/resources/exampleFASTA.fasta.amb
rename to public/gatk-utils/src/test/resources/exampleFASTA.fasta.amb
diff --git a/public/gatk-engine/src/test/resources/exampleFASTA.fasta.ann b/public/gatk-utils/src/test/resources/exampleFASTA.fasta.ann
similarity index 100%
rename from public/gatk-engine/src/test/resources/exampleFASTA.fasta.ann
rename to public/gatk-utils/src/test/resources/exampleFASTA.fasta.ann
diff --git a/public/gatk-engine/src/test/resources/exampleFASTA.fasta.bwt b/public/gatk-utils/src/test/resources/exampleFASTA.fasta.bwt
similarity index 100%
rename from public/gatk-engine/src/test/resources/exampleFASTA.fasta.bwt
rename to public/gatk-utils/src/test/resources/exampleFASTA.fasta.bwt
diff --git a/public/gatk-engine/src/test/resources/exampleFASTA.fasta.fai b/public/gatk-utils/src/test/resources/exampleFASTA.fasta.fai
similarity index 100%
rename from public/gatk-engine/src/test/resources/exampleFASTA.fasta.fai
rename to public/gatk-utils/src/test/resources/exampleFASTA.fasta.fai
diff --git a/public/gatk-engine/src/test/resources/exampleFASTA.fasta.pac b/public/gatk-utils/src/test/resources/exampleFASTA.fasta.pac
similarity index 100%
rename from public/gatk-engine/src/test/resources/exampleFASTA.fasta.pac
rename to public/gatk-utils/src/test/resources/exampleFASTA.fasta.pac
diff --git a/public/gatk-engine/src/test/resources/exampleFASTA.fasta.rbwt b/public/gatk-utils/src/test/resources/exampleFASTA.fasta.rbwt
similarity index 100%
rename from public/gatk-engine/src/test/resources/exampleFASTA.fasta.rbwt
rename to public/gatk-utils/src/test/resources/exampleFASTA.fasta.rbwt
diff --git a/public/gatk-engine/src/test/resources/exampleFASTA.fasta.rpac b/public/gatk-utils/src/test/resources/exampleFASTA.fasta.rpac
similarity index 100%
rename from public/gatk-engine/src/test/resources/exampleFASTA.fasta.rpac
rename to public/gatk-utils/src/test/resources/exampleFASTA.fasta.rpac
diff --git a/public/gatk-engine/src/test/resources/exampleFASTA.fasta.rsa b/public/gatk-utils/src/test/resources/exampleFASTA.fasta.rsa
similarity index 100%
rename from public/gatk-engine/src/test/resources/exampleFASTA.fasta.rsa
rename to public/gatk-utils/src/test/resources/exampleFASTA.fasta.rsa
diff --git a/public/gatk-engine/src/test/resources/exampleFASTA.fasta.sa b/public/gatk-utils/src/test/resources/exampleFASTA.fasta.sa
similarity index 100%
rename from public/gatk-engine/src/test/resources/exampleFASTA.fasta.sa
rename to public/gatk-utils/src/test/resources/exampleFASTA.fasta.sa
diff --git a/public/gatk-engine/src/test/resources/exampleGATKReport.eval b/public/gatk-utils/src/test/resources/exampleGATKReport.eval
similarity index 100%
rename from public/gatk-engine/src/test/resources/exampleGATKReport.eval
rename to public/gatk-utils/src/test/resources/exampleGATKReport.eval
diff --git a/public/gatk-engine/src/test/resources/exampleGATKReportv1.tbl b/public/gatk-utils/src/test/resources/exampleGATKReportv1.tbl
similarity index 100%
rename from public/gatk-engine/src/test/resources/exampleGATKReportv1.tbl
rename to public/gatk-utils/src/test/resources/exampleGATKReportv1.tbl
diff --git a/public/gatk-engine/src/test/resources/exampleGATKReportv2.tbl b/public/gatk-utils/src/test/resources/exampleGATKReportv2.tbl
similarity index 100%
rename from public/gatk-engine/src/test/resources/exampleGATKReportv2.tbl
rename to public/gatk-utils/src/test/resources/exampleGATKReportv2.tbl
diff --git a/public/gatk-engine/src/test/resources/exampleGRP.grp b/public/gatk-utils/src/test/resources/exampleGRP.grp
similarity index 100%
rename from public/gatk-engine/src/test/resources/exampleGRP.grp
rename to public/gatk-utils/src/test/resources/exampleGRP.grp
diff --git a/public/gatk-engine/src/test/resources/exampleINTERVAL.intervals b/public/gatk-utils/src/test/resources/exampleINTERVAL.intervals
similarity index 100%
rename from public/gatk-engine/src/test/resources/exampleINTERVAL.intervals
rename to public/gatk-utils/src/test/resources/exampleINTERVAL.intervals
diff --git a/public/gatk-utils/src/test/resources/exampleNORG.bam b/public/gatk-utils/src/test/resources/exampleNORG.bam
new file mode 100644
index 0000000..7967d83
Binary files /dev/null and b/public/gatk-utils/src/test/resources/exampleNORG.bam differ
diff --git a/public/gatk-utils/src/test/resources/exampleNORG.bam.bai b/public/gatk-utils/src/test/resources/exampleNORG.bam.bai
new file mode 100644
index 0000000..f4536d3
Binary files /dev/null and b/public/gatk-utils/src/test/resources/exampleNORG.bam.bai differ
diff --git a/public/gatk-engine/src/test/resources/forAlleleFractionSimulation.vcf b/public/gatk-utils/src/test/resources/forAlleleFractionSimulation.vcf
similarity index 100%
rename from public/gatk-engine/src/test/resources/forAlleleFractionSimulation.vcf
rename to public/gatk-utils/src/test/resources/forAlleleFractionSimulation.vcf
diff --git a/public/gatk-engine/src/test/resources/forAlleleFractionSimulation.vcf.idx b/public/gatk-utils/src/test/resources/forAlleleFractionSimulation.vcf.idx
similarity index 100%
rename from public/gatk-engine/src/test/resources/forAlleleFractionSimulation.vcf.idx
rename to public/gatk-utils/src/test/resources/forAlleleFractionSimulation.vcf.idx
diff --git a/public/gatk-engine/src/test/resources/forLongInsert.vcf b/public/gatk-utils/src/test/resources/forLongInsert.vcf
similarity index 100%
rename from public/gatk-engine/src/test/resources/forLongInsert.vcf
rename to public/gatk-utils/src/test/resources/forLongInsert.vcf
diff --git a/public/gatk-engine/src/test/resources/forLongInsert.vcf.idx b/public/gatk-utils/src/test/resources/forLongInsert.vcf.idx
similarity index 100%
rename from public/gatk-engine/src/test/resources/forLongInsert.vcf.idx
rename to public/gatk-utils/src/test/resources/forLongInsert.vcf.idx
diff --git a/public/gatk-engine/src/test/resources/forSimulation.vcf b/public/gatk-utils/src/test/resources/forSimulation.vcf
similarity index 100%
rename from public/gatk-engine/src/test/resources/forSimulation.vcf
rename to public/gatk-utils/src/test/resources/forSimulation.vcf
diff --git a/public/gatk-engine/src/test/resources/forSimulation.vcf.idx b/public/gatk-utils/src/test/resources/forSimulation.vcf.idx
similarity index 100%
rename from public/gatk-engine/src/test/resources/forSimulation.vcf.idx
rename to public/gatk-utils/src/test/resources/forSimulation.vcf.idx
diff --git a/public/gatk-tools-public/src/test/resources/testProperties.properties b/public/gatk-utils/src/test/resources/testProperties.properties
similarity index 100%
rename from public/gatk-tools-public/src/test/resources/testProperties.properties
rename to public/gatk-utils/src/test/resources/testProperties.properties
diff --git a/public/gatk-engine/src/test/resources/testfile.sam b/public/gatk-utils/src/test/resources/testfile.sam
similarity index 100%
rename from public/gatk-engine/src/test/resources/testfile.sam
rename to public/gatk-utils/src/test/resources/testfile.sam
diff --git a/public/gsalib/pom.xml b/public/gsalib/pom.xml
index 1ce5b4d..df9e7fc 100644
--- a/public/gsalib/pom.xml
+++ b/public/gsalib/pom.xml
@@ -5,7 +5,7 @@
     <parent>
         <groupId>org.broadinstitute.gatk</groupId>
         <artifactId>gatk-aggregator</artifactId>
-        <version>3.3</version>
+        <version>3.5</version>
         <relativePath>../..</relativePath>
     </parent>
 
@@ -32,6 +32,14 @@
                         </goals>
                         <phase>${gatk.generate-resources.phase}</phase>
                         <configuration>
+                            <!--
+                            Looking at the assembly plugin 2.4(.1) source code, <updateOnly>true</updateOnly>
+                            is equivalent to plexus_archiver.setForced(false). Unlike the docs say, this
+                            does work even if the file does not exist.
+                            Also, why..? "archiver.setUseJvmChmod( configSource.isUpdateOnly() );"
+                            Appears to only affect the plexus dir/DirectoryArchiver.
+                            -->
+                            <updateOnly>true</updateOnly>
                             <appendAssemblyId>false</appendAssemblyId>
                             <descriptors>
                                 <descriptor>src/assembly/gsalib.xml</descriptor>
diff --git a/public/gsalib/src/R/DESCRIPTION b/public/gsalib/src/R/DESCRIPTION
index 229204f..a3736d0 100644
--- a/public/gsalib/src/R/DESCRIPTION
+++ b/public/gsalib/src/R/DESCRIPTION
@@ -1,13 +1,14 @@
 Package: gsalib
 Type: Package
-Title: Utility functions
-Version: 1.0
-Date: 2010-10-02
-Imports: gplots, ggplot2, png
+Title: Utility Functions For GATK
+Version: 2.2
+Date: 2015-03-17
 Author: Kiran Garimella
-Maintainer: Mauricio Carneiro <carneiro at broadinstitute.org>
-BugReports: http://gatkforums.broadinstitute.org
-Description: Utility functions for GATK NGS analyses
+Maintainer: Geraldine Van der Auwera <vdauwera at broadinstitute.org>
+Description: This package contains utility functions used by the Genome Analysis Toolkit (GATK) to load tables and plot data. The GATK is a toolkit for variant discovery in high-throughput sequencing data.
 License: MIT + file LICENSE
 LazyLoad: yes
+Packaged: 2013-07-02 07:56:13 UTC; gege
 NeedsCompilation: no
+Repository: CRAN
+Date/Publication: 2013-07-03 17:30:49
diff --git a/public/gsalib/src/R/NAMESPACE b/public/gsalib/src/R/NAMESPACE
index 0bfe475..a976c20 100644
--- a/public/gsalib/src/R/NAMESPACE
+++ b/public/gsalib/src/R/NAMESPACE
@@ -1 +1,2 @@
-exportPattern("^[^\\.]")
\ No newline at end of file
+export(gsa.read.gatkreport)
+export(gsa.reshape.concordance.table)
\ No newline at end of file
diff --git a/public/gsalib/src/R/R/gsa.error.R b/public/gsalib/src/R/R/gsa.error.R
deleted file mode 100644
index 1c6a560..0000000
--- a/public/gsalib/src/R/R/gsa.error.R
+++ /dev/null
@@ -1,12 +0,0 @@
-gsa.error <- function(message) {
-    message("");
-    gsa.message("Error: **********");
-    gsa.message(sprintf("Error: %s", message));
-    gsa.message("Error: **********");
-    message("");
-
-    traceback();
-
-    message("");
-    stop(message, call. = FALSE);
-}
diff --git a/public/gsalib/src/R/R/gsa.getargs.R b/public/gsalib/src/R/R/gsa.getargs.R
deleted file mode 100644
index 94613bf..0000000
--- a/public/gsalib/src/R/R/gsa.getargs.R
+++ /dev/null
@@ -1,116 +0,0 @@
-.gsa.getargs.usage <- function(argspec, doc) {
-    cargs = commandArgs();
-
-    usage = "Usage:";
-
-    fileIndex = grep("--file=", cargs);
-    if (length(fileIndex) > 0) {
-        progname = gsub("--file=", "", cargs[fileIndex[1]]);
-
-        usage = sprintf("Usage: Rscript %s [arguments]", progname);
-
-        if (!is.na(doc)) {
-            message(sprintf("%s: %s\n", progname, doc));
-        }
-    }
-
-    message(usage);
-
-    for (argname in names(argspec)) {
-        key = argname;
-        defaultValue = 0;
-        doc = "";
-
-        if (is.list(argspec[[argname]])) {
-            defaultValue = argspec[[argname]]$value;
-            doc = argspec[[argname]]$doc;
-        }
-
-        message(sprintf(" -%-10s\t[default: %s]\t%s", key, defaultValue, doc));
-    }
-
-    message("");
-
-    stop(call. = FALSE);
-}
-
-gsa.getargs <- function(argspec, doc = NA) {
-    argsenv = new.env();
-
-    for (argname in names(argspec)) {
-        value = 0;
-        if (is.list(argspec[[argname]])) {
-            value = argspec[[argname]]$value;
-        } else {
-            value = argspec[[argname]];
-        }
-
-        assign(argname, value, envir=argsenv);
-    }
-
-    if (interactive()) {
-        for (argname in names(argspec)) {
-            value = get(argname, envir=argsenv);
-
-            if (is.na(value) | is.null(value)) {
-                if (exists("cmdargs")) {
-                    assign(argname, cmdargs[[argname]], envir=argsenv);
-                } else {
-                    assign(argname, readline(sprintf("Please enter a value for '%s': ", argname)), envir=argsenv);
-                }
-            } else {
-                assign(argname, value, envir=argsenv);
-            }
-        }
-    } else {
-        cargs = commandArgs(TRUE);
-
-        if (length(cargs) == 0) {
-            .gsa.getargs.usage(argspec, doc);
-        }
-
-        for (i in 1:length(cargs)) {
-            if (length(grep("^-", cargs[i], ignore.case=TRUE)) > 0) {
-                key = gsub("-", "", cargs[i]);
-                value = cargs[i+1];
-
-                if (key == "h" | key == "help") {
-                    .gsa.getargs.usage(argspec, doc);
-                }
-
-                if (length(grep("^[\\d\\.e\\+\\-]+$", value, perl=TRUE, ignore.case=TRUE)) > 0) {
-                    value = as.numeric(value);
-                }
-                
-                assign(key, value, envir=argsenv);
-            }
-        }
-    }
-
-    args = as.list(argsenv);
-
-    isMissingArgs = 0;
-    missingArgs = c();
-    
-    for (arg in names(argspec)) {
-        if (is.na(args[[arg]]) | is.null(args[[arg]])) {
-            gsa.warn(sprintf("Value for required argument '-%s' was not specified", arg));
-
-            isMissingArgs = 1;
-            missingArgs = c(missingArgs, arg);
-        }
-    }
-
-    if (isMissingArgs) {
-        gsa.error(
-            paste(
-                "Missing required arguments: -",
-                paste(missingArgs, collapse=" -"),
-                ".  Specify -h or -help to this script for a list of available arguments.",
-                sep=""
-            )
-        );
-    }
-
-    args;
-}
diff --git a/public/gsalib/src/R/R/gsa.message.R b/public/gsalib/src/R/R/gsa.message.R
deleted file mode 100644
index a2b909d..0000000
--- a/public/gsalib/src/R/R/gsa.message.R
+++ /dev/null
@@ -1,3 +0,0 @@
-gsa.message <- function(message) {
-    message(sprintf("[gsalib] %s", message));
-}
diff --git a/public/gsalib/src/R/R/gsa.plot.venn.R b/public/gsalib/src/R/R/gsa.plot.venn.R
deleted file mode 100644
index b1353cc..0000000
--- a/public/gsalib/src/R/R/gsa.plot.venn.R
+++ /dev/null
@@ -1,50 +0,0 @@
-gsa.plot.venn <-
-function(a, b, c=0, a_and_b, a_and_c=0, b_and_c=0,
-                     col=c("#FF6342", "#63C6DE", "#ADDE63"),
-                     pos=c(0.20, 0.20, 0.80, 0.82),
-                     debug=0
-                    ) {
-    library(png);
-    library(graphics);
-
-    # Set up properties
-    for (i in 1:length(col)) {
-        rgbcol = col2rgb(col[i]);
-        col[i] = sprintf("%02X%02X%02X", rgbcol[1], rgbcol[2], rgbcol[3]);
-    }
-
-    chco = paste(col[1], col[2], col[3], sep=",");
-    chd = paste(a, b, c, a_and_b, a_and_c, b_and_c, sep=",");
-
-    props = c(
-        'cht=v',
-        'chs=525x525',
-        'chds=0,10000000000',
-        paste('chco=', chco, sep=""),
-        paste('chd=t:', chd, sep="")
-    );
-    proplist = paste(props[1], props[2], props[3], props[4], props[5], sep='&');
-
-    # Get the venn diagram (as a temporary file)
-    filename = tempfile("venn");
-    cmd = paste("wget -O ", filename, " 'http://chart.apis.google.com/chart?", proplist, "' > /dev/null 2>&1", sep="");
-
-    if (debug == 1) {
-        print(cmd);
-    }
-    system(cmd);
-
-    # Render the temp png file into a plotting frame
-    a = readPNG(filename);
-    
-    plot(0, 0, type="n", xaxt="n", yaxt="n", bty="n", xlim=c(0, 1), ylim=c(0, 1), xlab="", ylab="");
-    if (c == 0 || a >= b) {
-        rasterImage(a, pos[1], pos[2], pos[3], pos[4]);
-    } else {
-        rasterImage(a, 0.37+pos[1], 0.37+pos[2], 0.37+pos[3], 0.37+pos[4], angle=180);
-    }
-
-    # Clean up!
-    unlink(filename);
-}
-
diff --git a/public/gsalib/src/R/R/gsa.read.eval.R b/public/gsalib/src/R/R/gsa.read.eval.R
deleted file mode 100644
index f1d4909..0000000
--- a/public/gsalib/src/R/R/gsa.read.eval.R
+++ /dev/null
@@ -1,83 +0,0 @@
-.gsa.attemptToLoadFile <- function(filename) {
-    file = NA;
-
-    if (file.exists(filename) & file.info(filename)$size > 500) {
-        file = read.csv(filename, header=TRUE, comment.char="#");
-    }
-
-    file;
-}
-
-gsa.read.eval <-
-function(evalRoot) {
-    fileAlleleCountStats = paste(evalRoot, ".AlleleCountStats.csv", sep="");
-    fileCompOverlap = paste(evalRoot, ".Comp_Overlap.csv", sep="");
-    fileCountVariants = paste(evalRoot, ".Count_Variants.csv", sep="");
-    fileGenotypeConcordance = paste(evalRoot, ".Genotype_Concordance.csv", sep="");
-    fileMetricsByAc = paste(evalRoot, ".MetricsByAc.csv", sep="");
-    fileMetricsBySample = paste(evalRoot, ".MetricsBySample.csv", sep="");
-    fileQuality_Metrics_by_allele_count = paste(evalRoot, ".Quality_Metrics_by_allele_count.csv", sep="");
-    fileQualityScoreHistogram = paste(evalRoot, ".QualityScoreHistogram.csv", sep="");
-    fileSampleStatistics = paste(evalRoot, ".Sample_Statistics.csv", sep="");
-    fileSampleSummaryStatistics = paste(evalRoot, ".Sample_Summary_Statistics.csv", sep="");
-    fileSimpleMetricsBySample = paste(evalRoot, ".SimpleMetricsBySample.csv", sep="");
-    fileTi_slash_Tv_Variant_Evaluator = paste(evalRoot, ".Ti_slash_Tv_Variant_Evaluator.csv", sep="");
-    fileTiTvStats = paste(evalRoot, ".TiTvStats.csv", sep="");
-    fileVariant_Quality_Score = paste(evalRoot, ".Variant_Quality_Score.csv", sep="");
-
-    eval = list(
-        AlleleCountStats = NA,
-        CompOverlap = NA,
-        CountVariants = NA,
-        GenotypeConcordance = NA,
-        MetricsByAc = NA,
-        MetricsBySample = NA,
-        Quality_Metrics_by_allele_count = NA,
-        QualityScoreHistogram = NA,
-        SampleStatistics = NA,
-        SampleSummaryStatistics = NA,
-        SimpleMetricsBySample = NA,
-        TiTv = NA,
-        TiTvStats = NA,
-        Variant_Quality_Score = NA,
-
-        CallsetNames = c(),
-        CallsetOnlyNames = c(),
-        CallsetFilteredNames = c()
-    );
-
-    eval$AlleleCountStats                = .gsa.attemptToLoadFile(fileAlleleCountStats);
-    eval$CompOverlap                     = .gsa.attemptToLoadFile(fileCompOverlap);
-    eval$CountVariants                   = .gsa.attemptToLoadFile(fileCountVariants);
-    eval$GenotypeConcordance             = .gsa.attemptToLoadFile(fileGenotypeConcordance);
-    eval$MetricsByAc                     = .gsa.attemptToLoadFile(fileMetricsByAc);
-    eval$MetricsBySample                 = .gsa.attemptToLoadFile(fileMetricsBySample);
-    eval$Quality_Metrics_by_allele_count = .gsa.attemptToLoadFile(fileQuality_Metrics_by_allele_count);
-    eval$QualityScoreHistogram           = .gsa.attemptToLoadFile(fileQualityScoreHistogram);
-    eval$SampleStatistics                = .gsa.attemptToLoadFile(fileSampleStatistics);
-    eval$SampleSummaryStatistics         = .gsa.attemptToLoadFile(fileSampleSummaryStatistics);
-    eval$SimpleMetricsBySample           = .gsa.attemptToLoadFile(fileSimpleMetricsBySample);
-    eval$TiTv                            = .gsa.attemptToLoadFile(fileTi_slash_Tv_Variant_Evaluator);
-    eval$TiTvStats                       = .gsa.attemptToLoadFile(fileTiTvStats);
-    eval$Variant_Quality_Score           = .gsa.attemptToLoadFile(fileVariant_Quality_Score);
-
-    uniqueJexlExpressions = unique(eval$TiTv$jexl_expression);
-    eval$CallsetOnlyNames = as.vector(uniqueJexlExpressions[grep("FilteredIn|Intersection|none", uniqueJexlExpressions, invert=TRUE, ignore.case=TRUE)]);
-    eval$CallsetNames = as.vector(gsub("-only", "", eval$CallsetOnlyNames));
-    eval$CallsetFilteredNames = as.vector(c(
-        paste(gsub("^(\\w)", "In\\U\\1", eval$CallsetNames[1], perl=TRUE), "-Filtered", gsub("^(\\w)", "In\\U\\1", eval$CallsetNames[2], perl=TRUE), sep=""),
-        paste(gsub("^(\\w)", "In\\U\\1", eval$CallsetNames[2], perl=TRUE), "-Filtered", gsub("^(\\w)", "In\\U\\1", eval$CallsetNames[1], perl=TRUE), sep=""))
-    );
-
-    if (!(eval$CallsetFilteredNames[1] %in% unique(eval$TiTv$jexl_expression))) {
-        eval$CallsetFilteredNames[1] = paste("In", eval$CallsetNames[1], "-FilteredIn", eval$CallsetNames[2], sep="");
-    }
-
-    if (!(eval$CallsetFilteredNames[2] %in% unique(eval$TiTv$jexl_expression))) {
-        eval$CallsetFilteredNames[2] = paste("In", eval$CallsetNames[2], "-FilteredIn", eval$CallsetNames[1], sep="");
-        #eval$CallsetFilteredNames[2] = paste(gsub("^(\\w)", "In", eval$CallsetNames[2], perl=TRUE), "-Filtered", gsub("^(\\w)", "In", eval$CallsetNames[1], perl=TRUE), sep="");
-    }
-
-    eval;
-}
-
diff --git a/public/gsalib/src/R/R/gsa.read.gatkreport.R b/public/gsalib/src/R/R/gsa.read.gatkreport.R
index eba94c0..095502a 100644
--- a/public/gsalib/src/R/R/gsa.read.gatkreport.R
+++ b/public/gsalib/src/R/R/gsa.read.gatkreport.R
@@ -112,7 +112,7 @@ gsa.read.gatkreportv1 <- function(lines) {
   
   finishTable <- function() {
     if ( rowCount == 1 )
-      # good I hate R.  Work around to avoid collapsing into an unstructured vector when 
+      # Workaround to avoid collapsing into an unstructured vector when 
       # there's only 1 row
       sub <- t(as.matrix(tableRows[1:rowCount,]))
     else
diff --git a/public/gsalib/src/R/R/gsa.read.squidmetrics.R b/public/gsalib/src/R/R/gsa.read.squidmetrics.R
deleted file mode 100644
index 39fa1ad..0000000
--- a/public/gsalib/src/R/R/gsa.read.squidmetrics.R
+++ /dev/null
@@ -1,28 +0,0 @@
-gsa.read.squidmetrics = function(project, bylane = FALSE) {
-    suppressMessages(library(ROracle));
-
-    drv = dbDriver("Oracle");
-    con = dbConnect(drv, "REPORTING/REPORTING at ora01:1521/SEQPROD");
-
-    if (bylane) {
-        statement = paste("SELECT * FROM ILLUMINA_PICARD_METRICS WHERE \"Project\" = '", project, "'", sep="");
-        print(statement);
-
-        rs  = dbSendQuery(con, statement = statement);
-        d = fetch(rs, n=-1);
-        dbHasCompleted(rs);
-        dbClearResult(rs);
-    } else {
-        statement = paste("SELECT * FROM ILLUMINA_SAMPLE_STATUS_AGG WHERE \"Project\" = '", project, "'", sep="");
-        print(statement);
-
-        rs = dbSendQuery(con, statement = statement);
-        d = fetch(rs, n=-1);
-        dbHasCompleted(rs);
-        dbClearResult(rs);
-    }
-
-    oraCloseDriver(drv);
-
-    subset(d, Project == project);
-}
diff --git a/public/gsalib/src/R/R/gsa.read.vcf.R b/public/gsalib/src/R/R/gsa.read.vcf.R
deleted file mode 100644
index 5beb645..0000000
--- a/public/gsalib/src/R/R/gsa.read.vcf.R
+++ /dev/null
@@ -1,23 +0,0 @@
-gsa.read.vcf <- function(vcffile, skip=0, nrows=-1, expandGenotypeFields = FALSE) {
-    headers = readLines(vcffile, n=100);
-    headerline = headers[grep("#CHROM", headers)];
-    header = unlist(strsplit(gsub("#", "", headerline), "\t"))
-    
-    d = read.table(vcffile, header=FALSE, skip=skip, nrows=nrows, stringsAsFactors=FALSE);
-    colnames(d) = header;
-
-    if (expandGenotypeFields) {
-        columns = ncol(d);
-
-        offset = columns + 1;
-        for (sampleIndex in 10:columns) {
-            gt = unlist(lapply(strsplit(d[,sampleIndex], ":"), function(x) x[1]));
-            d[,offset] = gt;
-            colnames(d)[offset] = sprintf("%s.GT", colnames(d)[sampleIndex]);
-
-            offset = offset + 1;
-        }
-    }
-
-    return(d);
-}
diff --git a/public/gsalib/src/R/R/gsa.reshape.concordance.table.R b/public/gsalib/src/R/R/gsa.reshape.concordance.table.R
new file mode 100644
index 0000000..9b53d87
--- /dev/null
+++ b/public/gsalib/src/R/R/gsa.reshape.concordance.table.R
@@ -0,0 +1,20 @@
+gsa.reshape.concordance.table <- function(report, table.name="GenotypeConcordance_Counts", sample.name="ALL") {
+  if (!is.null(table.name)) {
+    data <- report[[table.name]]
+  }
+  if (is.null(table.name)) {
+    data <- report
+  }
+  d <- data[data$Sample==sample.name,2:(length(data[1,])-1)]
+  
+  possible.genotypes <- c('NO_CALL', 'HOM_REF', 'HET', 'HOM_VAR', 'UNAVAILABLE', 'MIXED')
+  combinations <- outer(possible.genotypes, possible.genotypes, function(a,b) {paste(a,b,sep='_')})
+  existing.combi <- matrix(combinations %in% colnames(d), nrow=length(possible.genotypes))
+  eval.genotypes <- apply(existing.combi, 1, any)
+  comp.genotypes <- apply(existing.combi, 2, any)
+  
+  m <- matrix(d, nrow=sum(eval.genotypes), byrow=T)
+  dimnames(m) <- list(EvalGenotypes=possible.genotypes[eval.genotypes],
+                      CompGenotypes=possible.genotypes[comp.genotypes])
+  m
+}
diff --git a/public/gsalib/src/R/R/gsa.variantqc.utils.R b/public/gsalib/src/R/R/gsa.variantqc.utils.R
deleted file mode 100644
index 507b336..0000000
--- a/public/gsalib/src/R/R/gsa.variantqc.utils.R
+++ /dev/null
@@ -1,246 +0,0 @@
-library(gplots)
-library(ggplot2)
-library(tools)
-
-# -------------------------------------------------------
-# Utilities for displaying multiple plots per page
-# -------------------------------------------------------
-
-distributeGraphRows <- function(graphs, heights = c()) {
-  # Viewport layout 2 graphs top to bottom with given relative heights
-  #
-  #
-  if (length(heights) == 0) {
-    heights <- rep.int(1, length(graphs))
-  }
-  heights <- heights[!is.na(graphs)]
-  graphs <- graphs[!is.na(graphs)]
-  numGraphs <- length(graphs)
-  Layout <- grid.layout(nrow = numGraphs, ncol = 1, heights=heights)
-  grid.newpage()
-  pushViewport(viewport(layout = Layout))
-  subplot <- function(x) viewport(layout.pos.row = x, layout.pos.col = 1)
-  for (i in 1:numGraphs) {
-    print(graphs[[i]], vp = subplot(i))
-  }
-}
-
-distributeLogGraph <- function(graph, xName) {
-  continuousGraph <- graph + scale_x_continuous(xName)
-  logGraph <- graph + scale_x_log10(xName) + ggtitle("")
-  distributeGraphRows(list(continuousGraph, logGraph))
-}
-
-distributePerSampleGraph <- function(perSampleGraph, distGraph, ratio=c(2,1)) {
-  distributeGraphRows(list(perSampleGraph, distGraph), ratio)
-}
-
-removeExtraStrats <- function(variantEvalDataFrame, moreToRemove=c()) {
-  # Remove the standard extra stratification columns FunctionalClass, Novelty, and others in moreToRemove from the variantEvalDataFrame
-  #
-  # Only keeps the column marked with "all" for each removed column
-  #
-  for ( toRemove in c("FunctionalClass", "Novelty", moreToRemove) ) {
-    if (toRemove %in% colnames(variantEvalDataFrame)) {
-      variantEvalDataFrame <- variantEvalDataFrame[variantEvalDataFrame[[toRemove]] == "all",]
-    }
-  }
-  variantEvalDataFrame    
-}
-
-openPDF <- function(outputPDF) {
-  # Open the outputPDF file with standard dimensions, if outputPDF is not NA
-  if ( ! is.na(outputPDF) ) {
-    pdf(outputPDF, height=8.5, width=11)
-  }
-}
-
-closePDF <- function(outputPDF) {
-  # close the outputPDF file if not NA, and try to compact the PDF if possible
-  if ( ! is.na(outputPDF) ) {
-    dev.off()
-    if (exists("compactPDF")) {
-      print("compacting PDF")
-      compactPDF(outputPDF)
-    }
-  }
-}
-
-makeRatioDataFrame <- function(ACs, num, denom, widths = NULL) {
-  if ( is.null(widths) ) widths <- rep(1, length(ACs))
-  
-  value = NULL
-  titv <- data.frame(AC=ACs, width = widths, num=num, denom = denom, ratio = num / denom)
-}
-
-.reduceACs <- function(binWidthForAC, ACs) {
-  # computes data structures necessary to reduce the full range of ACs
-  #
-  # binWidthForAC returns the number of upcoming bins that should be merged into 
-  # that AC bin.  ACs is a vector of all AC values from 0 to 2N that should be 
-  # merged together
-  #
-  # Returns a list containing the reduced ACs starts, their corresponding widths,
-  # and a map from original ACs to their new ones (1 -> 1, 2 -> 2, 3 -> 2, etc)
-  maxAC <- max(ACs)
-  newACs <- c()
-  widths <- c()
-  newACMap <- c()
-  ac <- 0
-  while ( ac < maxAC ) {
-    newACs <- c(newACs, ac)
-    width <- binWidthForAC(ac)
-    widths <- c(widths, width)
-    newACMap <- c(newACMap, rep(ac, width))
-    ac <- ac + width
-  }
-  list(ACs = newACs, widths=widths, newACMap = newACMap)
-}
-
-# geometricACs <- function(k, ACs) {
-#   nBins <- round(k * log10(max(ACs)))
-#   
-#   binWidthForAC <- function(ac) {
-#     max(ceiling(ac / nBins), 1)
-#   }
-#   
-#   return(reduceACs(binWidthForAC, ACs))
-# }
-
-reduce.AC.on.LogLinear.intervals <- function(scaleFactor, ACs) {
-  # map the full range of AC values onto a log linear scale
-  #
-  # Reduce the full AC range onto one where the width of each new AC increases at a rate of
-  # 10^scaleFactor in size with growing AC values.  This is primarily useful for accurately
-  # computing ratios or other quantities by AC that aren't well determined when the AC 
-  # values are very large
-  #
-  # Returns a list containing the reduced ACs starts, their corresponding widths,
-  # and a map from original ACs to their new ones (1 -> 1, 2 -> 2, 3 -> 2, etc)
-  maxAC <- max(ACs)
-  afs <- ACs / maxAC
-  breaks <- 10^(seq(-4, -1, scaleFactor))
-  widths <- c()
-  lastBreak <- 1
-  for ( i in length(breaks):1 ) {
-    b <- breaks[i]
-    width <- sum(afs < lastBreak & afs >= b)
-    widths <- c(widths, width)
-    lastBreak <- b
-  }
-  widths <- rev(widths)
-  
-  binWidthForAC <- function(ac) {
-    af <- ac / maxAC
-    value = 1
-    for ( i in length(breaks):1 )
-      if ( af >= breaks[i] ) {
-        value = widths[i]
-        break
-      }
-    
-    return(value)
-  }
-  
-  return(.reduceACs(binWidthForAC, ACs))
-}
-
-.remapACs <- function(remapper, k, df) {
-  newACs <- remapper(k, df$AC)
-  
-  n = length(newACs$ACs)
-  num = rep(0, n)
-  denom = rep(0, n)
-  for ( i in 1:dim(df)[1] ) {
-    rowI = df$AC == i
-    row = df[rowI,]
-    newAC = newACs$newACMap[row$AC]
-    newRowI = newACs$ACs == newAC
-    num[newRowI] = num[newRowI] + df$num[rowI]
-    denom[newRowI] = denom[newRowI] + df$denom[rowI]
-  }
-  
-  newdf <- makeRatioDataFrame(newACs$ACs, num, denom, newACs$widths )
-  newdf
-}
-
-compute.ratio.on.LogLinear.AC.intervals <- function(ACs, num, denom, scaleFactor = 0.1) {
-  df = makeRatioDataFrame(ACs, num, denom, 1)
-  return(.remapACs(reduce.AC.on.LogLinear.intervals, scaleFactor, df))
-}
-
-plotVariantQC <- function(metrics, measures, requestedStrat = "Sample", 
-                          fixHistogramX=F, anotherStrat = NULL, nObsField = "n_indels", 
-                          onSamePage=F, facetVariableOnXPerSample = F, facetVariableOnXForDist = T, 
-                          moreTitle="", note = NULL) {
-  metrics$strat = metrics[[requestedStrat]]
-  
-  otherFacet = "."
-  id.vars = c("strat", "nobs")
-  metrics$nobs <- metrics[[nObsField]]
-  
-  # keep track of the other strat and it's implied facet value
-  if (! is.null(anotherStrat)) { 
-    id.vars = c(id.vars, anotherStrat)
-    otherFacet = anotherStrat
-  }
-  
-  molten <- melt(metrics, id.vars=id.vars, measure.vars=c(measures))
-  perSampleGraph <- ggplot(data=molten, aes(x=strat, y=value, group=variable, color=variable, fill=variable))
-
-  # create the title
-  titleText=paste(paste(paste(measures, collapse=", "), "by", requestedStrat), moreTitle)
-  if ( !is.null(note) ) {
-    titleText=paste(titleText, note, sep="\n")
-  }
-  paste(titleText)
-  title <- ggtitle(titleText)
-  
-  determineFacet <- function(onX) {
-    if ( onX ) { 
-      paste(otherFacet, "~ variable")
-    } else {
-      paste("variable ~", otherFacet)
-    }
-  }
-  
-  sampleFacet = determineFacet(facetVariableOnXPerSample)
-  distFacet   = determineFacet(facetVariableOnXForDist)
-  
-  if ( requestedStrat == "Sample" ) {
-    perSampleGraph <- perSampleGraph + geom_text(aes(label=strat), size=1.5) + geom_blank() # don't display a scale
-    perSampleGraph <- perSampleGraph + scale_x_discrete("Sample (ordered by nSNPs)")
-  } else { # by AlleleCount
-    perSampleGraph <- perSampleGraph + geom_point(aes(size=log10(nobs))) #+ geom_smooth(aes(weight=log10(nobs)))
-    perSampleGraph <- perSampleGraph + scale_x_log10("AlleleCount")
-  }    
-  perSampleGraph <- perSampleGraph + ylab("Variable value") + title
-  perSampleGraph <- perSampleGraph + facet_grid(sampleFacet, scales="free")
-  
-  nValues = length(unique(molten$value))
-  if (nValues > 2) {
-    if ( requestedStrat == "Sample" ) {
-      distGraph <- ggplot(data=molten, aes(x=value, group=variable, fill=variable))
-    } else {
-      distGraph <- ggplot(data=molten, aes(x=value, group=variable, fill=variable, weight=nobs))
-    }
-    distGraph <- distGraph + geom_histogram(aes(y=..ndensity..))
-    distGraph <- distGraph + geom_density(alpha=0.5, aes(y=..scaled..))
-    distGraph <- distGraph + geom_rug(aes(y=NULL, color=variable, position="jitter"))
-    scale = "free"
-    if ( fixHistogramX ) scale = "fixed"
-    distGraph <- distGraph + facet_grid(distFacet, scales=scale)
-    distGraph <- distGraph + ylab("Relative frequency")
-    distGraph <- distGraph + xlab("Variable value (see facet for variable by color)")
-    distGraph <- distGraph + theme(axis.text.x=element_text(angle=-45)) # , legend.position="none")
-  } else {
-    distGraph <- NA
-  }
-  
-  if ( onSamePage ) {
-    suppressMessages(distributePerSampleGraph(perSampleGraph, distGraph))
-  } else {
-    suppressMessages(print(perSampleGraph))
-    suppressMessages(print(distGraph + title))
-  }
-}
diff --git a/public/gsalib/src/R/R/gsa.warn.R b/public/gsalib/src/R/R/gsa.warn.R
deleted file mode 100644
index 7ee08ce..0000000
--- a/public/gsalib/src/R/R/gsa.warn.R
+++ /dev/null
@@ -1,3 +0,0 @@
-gsa.warn <- function(message) {
-    gsa.message(sprintf("Warning: %s", message));
-}
diff --git a/public/gsalib/src/R/Read-and-delete-me b/public/gsalib/src/R/Read-and-delete-me
deleted file mode 100644
index d04323a..0000000
--- a/public/gsalib/src/R/Read-and-delete-me
+++ /dev/null
@@ -1,9 +0,0 @@
-* Edit the help file skeletons in 'man', possibly combining help files
-  for multiple functions.
-* Put any C/C++/Fortran code in 'src'.
-* If you have compiled code, add a .First.lib() function in 'R' to load
-  the shared library.
-* Run R CMD build to build the package tarball.
-* Run R CMD check to check the package tarball.
-
-Read "Writing R Extensions" for more information.
diff --git a/public/gsalib/src/R/inst/extdata/test_gatkreport.table b/public/gsalib/src/R/inst/extdata/test_gatkreport.table
new file mode 100644
index 0000000..8769126
--- /dev/null
+++ b/public/gsalib/src/R/inst/extdata/test_gatkreport.table
@@ -0,0 +1,20 @@
+#:GATKReport.v1.0:2
+#:GATKTable:true:2:9:%.18E:%.15f:;
+#:GATKTable:ErrorRatePerCycle:The error rate per sequenced position in the reads
+cycle  errorrate.61PA8.7         qualavg.61PA8.7                                         
+0      7.451835696110506E-3      25.474613284804366                                      
+1      2.362777171937477E-3      29.844949954504095                                      
+2      9.087604507451836E-4      32.875909752547310
+3      5.452562704471102E-4      34.498999090081895                                      
+4      9.087604507451836E-4      35.148316651501370                                       
+5      5.452562704471102E-4      36.072234352256190                                       
+6      5.452562704471102E-4      36.121724890829700                                        
+7      5.452562704471102E-4      36.191048034934500                                        
+8      5.452562704471102E-4      36.003457059679770                                       
+
+#:GATKTable:false:2:3:%s:%c:;
+#:GATKTable:ExampleTable:This is an old-style table
+key    column
+1:1000  T 
+1:1001  A 
+1:1002  C 
diff --git a/public/gsalib/src/R/inst/extdata/test_genconcord.table b/public/gsalib/src/R/inst/extdata/test_genconcord.table
new file mode 100644
index 0000000..8748c88
--- /dev/null
+++ b/public/gsalib/src/R/inst/extdata/test_genconcord.table
@@ -0,0 +1,30 @@
+#:GATKReport.v1.1:5
+#:GATKTable:20:2:%s:%.3f:%.3f:%.3f:%.3f:%.3f:%.3f:%.3f:%.3f:%.3f:%.3f:%.3f:%.3f:%.3f:%.3f:%.3f:%.3f:%.3f:%.3f:%.3f:;
+#:GATKTable:GenotypeConcordance_CompProportions:Per-sample concordance tables: proportions of genotypes called in comp
+Sample   NO_CALL_HOM_REF  NO_CALL_HET  NO_CALL_HOM_VAR  HOM_REF_HOM_REF  HOM_REF_HET  HOM_REF_HOM_VAR  HET_HOM_REF  HET_HET  HET_HOM_VAR  HOM_VAR_HOM_REF  HOM_VAR_HET  HOM_VAR_HOM_VAR  UNAVAILABLE_HOM_REF  UNAVAILABLE_HET  UNAVAILABLE_HOM_VAR  MIXED_HOM_REF  MIXED_HET  MIXED_HOM_VAR  Mismatching_Alleles
+ALL                0.000        0.000            0.000            0.000        0.000            0.000        0.000    0.007        0.000            0.000        0.001            0.013                0.000            0.992                0.986          0.000      0.000          0.000                0.000
+NA12878            0.000        0.000            0.000            0.000        0.000            0.000        0.000    0.007        0.000            0.000        0.001            0.013                0.000            0.992                0.986          0.000      0.000          0.000                0.000
+
+#:GATKTable:38:2:%s:%d:%d:%d:%d:%d:%d:%d:%d:%d:%d:%d:%d:%d:%d:%d:%d:%d:%d:%d:%d:%d:%d:%d:%d:%d:%d:%d:%d:%d:%d:%d:%d:%d:%d:%d:%d:%d:;
+#:GATKTable:GenotypeConcordance_Counts:Per-sample concordance tables: comparison counts
+Sample   NO_CALL_NO_CALL  NO_CALL_HOM_REF  NO_CALL_HET  NO_CALL_HOM_VAR  NO_CALL_UNAVAILABLE  NO_CALL_MIXED  HOM_REF_NO_CALL  HOM_REF_HOM_REF  HOM_REF_HET  HOM_REF_HOM_VAR  HOM_REF_UNAVAILABLE  HOM_REF_MIXED  HET_NO_CALL  HET_HOM_REF  HET_HET  HET_HOM_VAR  HET_UNAVAILABLE  HET_MIXED  HOM_VAR_NO_CALL  HOM_VAR_HOM_REF  HOM_VAR_HET  HOM_VAR_HOM_VAR  HOM_VAR_UNAVAILABLE  HOM_VAR_MIXED  UNAVAILABLE_NO_CALL  UNAVAILABLE_HOM_REF  UNAVAILABLE_HET  UNAVAILABLE_HOM_VAR  UNAVAILABLE_UNAVAILABLE  UN [...]
+ALL                    0                0            0                0                    0              0                0                0            0                0                    0              0            0            0    13463           90             3901          0                0                0         2935            18144                 4448              0                    0                    0          2053693              1326112                    11290     [...]
+NA12878                0                0            0                0                    0              0                0                0            0                0                    0              0            0            0    13463           90             3901          0                0                0         2935            18144                 4448              0                    0                    0          2053693              1326112                    11290     [...]
+
+#:GATKTable:20:2:%s:%.3f:%.3f:%.3f:%.3f:%.3f:%.3f:%.3f:%.3f:%.3f:%.3f:%.3f:%.3f:%.3f:%.3f:%.3f:%.3f:%.3f:%.3f:%.3f:;
+#:GATKTable:GenotypeConcordance_EvalProportions:Per-sample concordance tables: proportions of genotypes called in eval
+Sample   HOM_REF_NO_CALL  HOM_REF_HOM_REF  HOM_REF_HET  HOM_REF_HOM_VAR  HOM_REF_UNAVAILABLE  HOM_REF_MIXED  HET_NO_CALL  HET_HOM_REF  HET_HET  HET_HOM_VAR  HET_UNAVAILABLE  HET_MIXED  HOM_VAR_NO_CALL  HOM_VAR_HOM_REF  HOM_VAR_HET  HOM_VAR_HOM_VAR  HOM_VAR_UNAVAILABLE  HOM_VAR_MIXED  Mismatching_Alleles
+ALL                0.000            0.000        0.000            0.000                0.000          0.000        0.000        0.000    0.771        0.005            0.224      0.000            0.000            0.000        0.115            0.711                0.174          0.000                0.000
+NA06989            0.000            0.000        0.000            0.000                0.000          0.000        0.000        0.000    0.771        0.005            0.224      0.000            0.000            0.000        0.115            0.711                0.174          0.000                0.000
+
+#:GATKTable:4:2:%s:%.3f:%.3f:%.3f:;
+#:GATKTable:GenotypeConcordance_Summary:Per-sample summary statistics: NRS, NRD, and OGC
+Sample   Non-Reference Sensitivity  Non-Reference Discrepancy  Overall_Genotype_Concordance
+ALL                          0.010                      0.087                         0.913
+NA06989                      0.010                      0.087                         0.913
+
+#:GATKTable:6:1:%d:%d:%d:%d:%d:%d:;
+#:GATKTable:SiteConcordance_Summary:Site-level summary statistics
+ALLELES_MATCH  EVAL_SUPERSET_TRUTH  EVAL_SUBSET_TRUTH  ALLELES_DO_NOT_MATCH  EVAL_ONLY  TRUTH_ONLY
+        34632                    0                  0                    16       8349     3391095
+
diff --git a/public/gsalib/src/R/man/gsa.error.Rd b/public/gsalib/src/R/man/gsa.error.Rd
deleted file mode 100644
index df7c0cb..0000000
--- a/public/gsalib/src/R/man/gsa.error.Rd
+++ /dev/null
@@ -1,49 +0,0 @@
-\name{gsa.error}
-\alias{gsa.error}
-\title{
-GSA error
-}
-\description{
-Write an error message to standard out with the prefix '[gsalib] Error:', print a traceback, and exit.
-}
-\usage{
-gsa.error(message)
-}
-%- maybe also 'usage' for other objects documented here.
-\arguments{
-  \item{message}{
-The error message to write.
-}
-}
-\details{
-%%  ~~ If necessary, more details than the description above ~~
-}
-\value{
-%%  ~Describe the value returned
-%%  If it is a LIST, use
-%%  \item{comp1 }{Description of 'comp1'}
-%%  \item{comp2 }{Description of 'comp2'}
-%% ...
-}
-\references{
-%% ~put references to the literature/web site here ~
-}
-\author{
-Kiran Garimella
-}
-\note{
-%%  ~~further notes~~
-}
-
-%% ~Make other sections like Warning with \section{Warning }{....} ~
-
-\seealso{
-%% ~~objects to See Also as \code{\link{help}}, ~~~
-}
-\examples{
-gsa.error("This is a message");
-}
-% Add one or more standard keywords, see file 'KEYWORDS' in the
-% R documentation directory.
-\keyword{ ~kwd1 }
-\keyword{ ~kwd2 }% __ONLY ONE__ keyword per line
diff --git a/public/gsalib/src/R/man/gsa.getargs.Rd b/public/gsalib/src/R/man/gsa.getargs.Rd
deleted file mode 100644
index 27aa1b0..0000000
--- a/public/gsalib/src/R/man/gsa.getargs.Rd
+++ /dev/null
@@ -1,57 +0,0 @@
-\name{gsa.getargs}
-\alias{gsa.getargs}
-\title{
-Get script arguments
-}
-\description{
-Get script arguments given a list object specifying arguments and documentation.  Can be used in command-line or interactive mode.  This is helpful when developing scripts in interactive mode that will eventually become command-line programs.  If no arguments are specified or help is requested in command-line mode, the script will print out a usage statement with available arguments and exit.
-}
-\usage{
-gsa.getargs(argspec, doc = NA)
-}
-\arguments{
-  \item{argspec}{
-A list object.  Each key is an argument name.  The value is another list object with a 'value' and 'doc' keys.  For example:
-\preformatted{argspec = list(
-    arg1 = list(value=10, doc="Info for optional arg1"),
-    arg2 = list(value=NA, doc="Info for required arg2")
-);
-}
-
-If the value provided is NA, the argument is considered required and must be specified when the script is invoked.  For command-line mode, this means the argument must be specified on the command-line.  In interactive mode, there are two ways of specifying these arguments.  First, if a properly formatted list argument called 'cmdargs' is present in the current environment (i.e. the object returned by gsa.getargs() from a previous invocation), the value is taken from this object.  Otherwi [...]
-}
-
-  \item{doc}{
-An optional string succinctly documenting the purpose of the script.
-}
-}
-\details{
-Interactive scripts typically make use of hardcoded filepaths and parameter settings.  This makes testing easy, but generalization to non-interactive mode more difficult.  This utility provides a mechanism for writing scripts that work properly in both interactive and command-line modes.
-
-To use this method, specify a list with key-value pairs representing the arguments as specified above.  In command-line mode, if no arguments are specified or the user specifies '-h' or '-help' anywhere on the command string, a help message indicating available arguments, their default values, and some documentation about the argument are provided.
-}
-\value{
-Returns a list with keys matching the argspec and values representing the specified arguments.
-
-\item{arg1 }{Value for argument 1}
-\item{arg2 }{Value for argument 2}
-...etc.
-}
-\references{
-%% ~put references to the literature/web site here ~
-}
-\author{
-Kiran Garimella
-}
-\examples{
-argspec = list(
-    file    = list(value="/my/test.vcf", doc="VCF file"),
-    verbose = list(value=0,              doc="If 1, set verbose mode"),
-    test2   = list(value=2.3e9,          doc="Another argument that does stuff")
-);
-        
-cmdargs = gsa.getargs(argspec, doc="My test program");
-
-print(cmdargs$file);  # will print '[1] "/my/test.vcf"'
-}
-\keyword{ ~kwd1 }
diff --git a/public/gsalib/src/R/man/gsa.message.Rd b/public/gsalib/src/R/man/gsa.message.Rd
deleted file mode 100644
index 9752de8..0000000
--- a/public/gsalib/src/R/man/gsa.message.Rd
+++ /dev/null
@@ -1,44 +0,0 @@
-\name{gsa.message}
-\alias{gsa.message}
-\title{
-GSA message
-}
-\description{
-Write a message to standard out with the prefix '[gsalib]'.
-}
-\usage{
-gsa.message(message)
-}
-\arguments{
-  \item{message}{
-The message to write.
-}
-}
-\details{
-%%  ~~ If necessary, more details than the description above ~~
-}
-\value{
-%%  ~Describe the value returned
-%%  If it is a LIST, use
-%%  \item{comp1 }{Description of 'comp1'}
-%%  \item{comp2 }{Description of 'comp2'}
-%% ...
-}
-\references{
-%% ~put references to the literature/web site here ~
-}
-\author{
-Kiran Garimella
-}
-\note{
-%%  ~~further notes~~
-}
-
-\seealso{
-%% ~~objects to See Also as \code{\link{help}}, ~~~
-}
-\examples{
-## Write message to stdout
-gsa.message("This is a message");
-}
-\keyword{ ~kwd1 }
diff --git a/public/gsalib/src/R/man/gsa.plot.venn.Rd b/public/gsalib/src/R/man/gsa.plot.venn.Rd
deleted file mode 100644
index bf4feb5..0000000
--- a/public/gsalib/src/R/man/gsa.plot.venn.Rd
+++ /dev/null
@@ -1,75 +0,0 @@
-\name{gsa.plot.venn}
-\alias{gsa.plot.venn}
-\title{
-Plot a proportional venn diagram
-}
-\description{
-Plot a proportional venn diagram (two or three-way venns allowed)
-}
-\usage{
-gsa.plot.venn(a, b, c = 0, a_and_b, a_and_c = 0, b_and_c = 0, col = c("#FF6342", "#63C6DE", "#ADDE63"), pos = c(0.2, 0.2, 0.8, 0.82), debug = 0)
-}
-\arguments{
-  \item{a}{
-size of 'a' circle
-}
-  \item{b}{
-size of 'b' circle
-}
-  \item{c}{
-size of 'c' circle
-}
-  \item{a_and_b}{
-size of a and b overlap
-}
-  \item{a_and_c}{
-size of a and c overlap
-}
-  \item{b_and_c}{
-size of b and c overlap
-}
-  \item{col}{
-vector of colors for each venn piece
-}
-  \item{pos}{
-vector of positional elements
-}
-  \item{debug}{
-if 1, set debug mode and print useful information
-}
-}
-\details{
-Plots a two-way or three-way proportional Venn diagram.  Internally, this method uses the Google Chart API to generate the diagram, then renders it into the plot window where it can be annotated in interesting ways.
-}
-\value{
-%%  ~Describe the value returned
-%%  If it is a LIST, use
-%%  \item{comp1 }{Description of 'comp1'}
-%%  \item{comp2 }{Description of 'comp2'}
-%% ...
-}
-\references{
-}
-\author{
-Kiran Garimella
-}
-\note{
-%%  ~~further notes~~
-}
-
-%% ~Make other sections like Warning with \section{Warning }{....} ~
-
-\seealso{
-%% ~~objects to See Also as \code{\link{help}}, ~~~
-}
-\examples{
-## Plot a two-way Venn diagram
-gsa.plot.venn(1000, 750, 0, 400);
-
-## Plot a three-way Venn diagram
-gsa.plot.venn(1000, 750, 900, 400, 650, 500);
-}
-% Add one or more standard keywords, see file 'KEYWORDS' in the
-% R documentation directory.
-\keyword{ ~kwd1 }
-\keyword{ ~kwd2 }% __ONLY ONE__ keyword per line
diff --git a/public/gsalib/src/R/man/gsa.read.eval.Rd b/public/gsalib/src/R/man/gsa.read.eval.Rd
deleted file mode 100644
index 0e2baba..0000000
--- a/public/gsalib/src/R/man/gsa.read.eval.Rd
+++ /dev/null
@@ -1,111 +0,0 @@
-\name{gsa.read.eval}
-\alias{gsa.read.eval}
-\title{
-Read a VariantEval file
-}
-\description{
-Read a VariantEval file that's output in R format.
-}
-\usage{
-gsa.read.eval(evalRoot)
-}
-%- maybe also 'usage' for other objects documented here.
-\arguments{
-  \item{evalRoot}{
-%%     ~~Describe \code{evalRoot} here~~
-}
-}
-\details{
-%%  ~~ If necessary, more details than the description above ~~
-}
-\value{
-%%  ~Describe the value returned
-%%  If it is a LIST, use
-%%  \item{comp1 }{Description of 'comp1'}
-%%  \item{comp2 }{Description of 'comp2'}
-%% ...
-}
-\references{
-%% ~put references to the literature/web site here ~
-}
-\author{
-%%  ~~who you are~~
-}
-\note{
-%%  ~~further notes~~
-}
-
-%% ~Make other sections like Warning with \section{Warning }{....} ~
-
-\seealso{
-%% ~~objects to See Also as \code{\link{help}}, ~~~
-}
-\examples{
-##---- Should be DIRECTLY executable !! ----
-##-- ==>  Define data, use random,
-##--	or do  help(data=index)  for the standard data sets.
-
-## The function is currently defined as
-function(evalRoot) {
-    fileAlleleCountStats = paste(evalRoot, ".AlleleCountStats.csv", sep="");
-    fileCompOverlap = paste(evalRoot, ".Comp_Overlap.csv", sep="");
-    fileCountVariants = paste(evalRoot, ".Count_Variants.csv", sep="");
-    fileGenotypeConcordance = paste(evalRoot, ".Genotype_Concordance.csv", sep="");
-    fileMetricsByAc = paste(evalRoot, ".MetricsByAc.csv", sep="");
-    fileMetricsBySample = paste(evalRoot, ".MetricsBySample.csv", sep="");
-    fileQuality_Metrics_by_allele_count = paste(evalRoot, ".Quality_Metrics_by_allele_count.csv", sep="");
-    fileQualityScoreHistogram = paste(evalRoot, ".QualityScoreHistogram.csv", sep="");
-    fileSampleStatistics = paste(evalRoot, ".Sample_Statistics.csv", sep="");
-    fileSampleSummaryStatistics = paste(evalRoot, ".Sample_Summary_Statistics.csv", sep="");
-    fileSimpleMetricsBySample = paste(evalRoot, ".SimpleMetricsBySample.csv", sep="");
-    fileTi_slash_Tv_Variant_Evaluator = paste(evalRoot, ".Ti_slash_Tv_Variant_Evaluator.csv", sep="");
-    fileTiTvStats = paste(evalRoot, ".TiTvStats.csv", sep="");
-    fileVariant_Quality_Score = paste(evalRoot, ".Variant_Quality_Score.csv", sep="");
-
-    eval = list(
-        AlleleCountStats = NA,
-        CompOverlap = NA,
-        CountVariants = NA,
-        GenotypeConcordance = NA,
-        MetricsByAc = NA,
-        MetricsBySample = NA,
-        Quality_Metrics_by_allele_count = NA,
-        QualityScoreHistogram = NA,
-        SampleStatistics = NA,
-        SampleSummaryStatistics = NA,
-        SimpleMetricsBySample = NA,
-        TiTv = NA,
-        TiTvStats = NA,
-        Variant_Quality_Score = NA,
-
-        CallsetNames = c(),
-        CallsetOnlyNames = c(),
-        CallsetFilteredNames = c()
-    );
-
-    eval$AlleleCountStats                = .attemptToLoadFile(fileAlleleCountStats);
-    eval$CompOverlap                     = .attemptToLoadFile(fileCompOverlap);
-    eval$CountVariants                   = .attemptToLoadFile(fileCountVariants);
-    eval$GenotypeConcordance             = .attemptToLoadFile(fileGenotypeConcordance);
-    eval$MetricsByAc                     = .attemptToLoadFile(fileMetricsByAc);
-    eval$MetricsBySample                 = .attemptToLoadFile(fileMetricsBySample);
-    eval$Quality_Metrics_by_allele_count = .attemptToLoadFile(fileQuality_Metrics_by_allele_count);
-    eval$QualityScoreHistogram           = .attemptToLoadFile(fileQualityScoreHistogram);
-    eval$SampleStatistics                = .attemptToLoadFile(fileSampleStatistics);
-    eval$SampleSummaryStatistics         = .attemptToLoadFile(fileSampleSummaryStatistics);
-    eval$SimpleMetricsBySample           = .attemptToLoadFile(fileSimpleMetricsBySample);
-    eval$TiTv                            = .attemptToLoadFile(fileTi_slash_Tv_Variant_Evaluator);
-    eval$TiTvStats                       = .attemptToLoadFile(fileTiTvStats);
-    eval$Variant_Quality_Score           = .attemptToLoadFile(fileVariant_Quality_Score);
-
-    uniqueJexlExpressions = unique(eval$TiTv$jexl_expression);
-    eval$CallsetOnlyNames = as.vector(uniqueJexlExpressions[grep("FilteredIn|Intersection|none", uniqueJexlExpressions, invert=TRUE, ignore.case=TRUE)]);
-    eval$CallsetNames = as.vector(gsub("-only", "", eval$CallsetOnlyNames));
-    eval$CallsetFilteredNames = as.vector(c()); 
-    eval;
-  }
-}
-% Add one or more standard keywords, see file 'KEYWORDS' in the
-% R documentation directory.
-\keyword{ ~kwd1 }
-\keyword{ ~kwd2 }% __ONLY ONE__ keyword per line
diff --git a/public/gsalib/src/R/man/gsa.read.gatkreport.Rd b/public/gsalib/src/R/man/gsa.read.gatkreport.Rd
index 67c2c7b..8551732 100644
--- a/public/gsalib/src/R/man/gsa.read.gatkreport.Rd
+++ b/public/gsalib/src/R/man/gsa.read.gatkreport.Rd
@@ -1,10 +1,10 @@
 \name{gsa.read.gatkreport}
 \alias{gsa.read.gatkreport}
 \title{
-gsa.read.gatkreport
+Function to read in a GATKReport
 }
 \description{
-Reads a GATKReport file - a multi-table document - and loads each table as a separate data.frame object in a list.
+This function reads in data from a GATKReport. A GATKReport is a document containing multiple tables produced by the GATK. Each table is loaded as a separate data.frame object in a list.
 }
 \usage{
 gsa.read.gatkreport(filename)
@@ -15,41 +15,22 @@ The path to the GATKReport file.
 }
 }
 \details{
-The GATKReport format replaces the multi-file output format used by many GATK tools and provides a single, consolidated file format.  This format accomodates multiple tables and is still R-loadable - through this function.
-
-The file format looks like this:
-\preformatted{##:GATKReport.v0.1 TableName : The description of the table
-col1   col2                      col3
-0      0.007451835696110506      25.474613284804366
-1      0.002362777171937477      29.844949954504095
-2      9.087604507451836E-4      32.87590975254731
-3      5.452562704471102E-4      34.498999090081895
-4      9.087604507451836E-4      35.14831665150137
-}
-
+The GATKReport format replaces the multi-file output format used previously by many GATK tools and provides a single, consolidated file format.  This format accommodates multiple tables and is still R-loadable through this function. 
 }
 \value{
-Returns a list object, where each key is the TableName and the value is the data.frame object with the contents of the table.  If multiple tables with the same name exist, each one after the first will be given names of "TableName.v1", "TableName.v2", ..., "TableName.vN".
-%%  ~Describe the value returned
-%%  If it is a LIST, use
-%%  \item{comp1 }{Description of 'comp1'}
-%%  \item{comp2 }{Description of 'comp2'}
-%% ...
+Returns a LIST object, where each key is the TableName and the value is the data.frame object with the contents of the table.  If multiple tables with the same name exist, each one after the first will be given names of TableName.v1, TableName.v2, ..., TableName.vN.
 }
 \references{
-%% ~put references to the literature/web site here ~
+http://www.broadinstitute.org/gatk/guide/article?id=1244
 }
 \author{
 Kiran Garimella
 }
 \note{
-%%  ~~further notes~~
-}
-
-\seealso{
-%% ~~objects to See Also as \code{\link{help}}, ~~~
+This function accepts different versions of the GATKReport format by making internal calls to gsa.read.gatkreportv0() or gsa.read.gatkreportv1() as appropriate.
 }
 \examples{
-report = gsa.read.gatkreport("/path/to/my/output.gatkreport");
+test_file = system.file("extdata", "test_gatkreport.table", package = "gsalib");
+report = gsa.read.gatkreport(test_file);
 }
-\keyword{ ~kwd1 }
+\keyword{ manip }
diff --git a/public/gsalib/src/R/man/gsa.read.gatkreportv0.Rd b/public/gsalib/src/R/man/gsa.read.gatkreportv0.Rd
new file mode 100644
index 0000000..978a5af
--- /dev/null
+++ b/public/gsalib/src/R/man/gsa.read.gatkreportv0.Rd
@@ -0,0 +1,26 @@
+\name{gsa.read.gatkreportv0}
+\alias{gsa.read.gatkreportv0}
+\title{
+Function to read in an old-style GATKReport
+}
+\description{
+This function reads in data from a version 0.x GATKReport. It should not be called directly; instead, use gsa.read.gatkreport()
+}
+\usage{
+gsa.read.gatkreportv0(lines)
+}
+\arguments{
+  \item{lines}{
+The lines read in from the input file.
+}
+}
+\value{
+Returns a LIST object, where each key is the TableName and the value is the data.frame object with the contents of the table.  If multiple tables with the same name exist, each one after the first will be given names of TableName.v1, TableName.v2, ..., TableName.vN.
+}
+\references{
+http://www.broadinstitute.org/gatk/guide/article?id=1244
+}
+\author{
+Kiran Garimella
+}
+\keyword{ manip }
diff --git a/public/gsalib/src/R/man/gsa.read.gatkreportv1.Rd b/public/gsalib/src/R/man/gsa.read.gatkreportv1.Rd
new file mode 100644
index 0000000..b9a1add
--- /dev/null
+++ b/public/gsalib/src/R/man/gsa.read.gatkreportv1.Rd
@@ -0,0 +1,26 @@
+\name{gsa.read.gatkreportv1}
+\alias{gsa.read.gatkreportv1}
+\title{
+Function to read in a new-style GATKReport
+}
+\description{
+This function reads in data from a version 1.x GATKReport. It should not be called directly; instead, use gsa.read.gatkreport()
+}
+\usage{
+gsa.read.gatkreportv1(lines)
+}
+\arguments{
+  \item{lines}{
+The lines read in from the input file.
+}
+}
+\value{
+Returns a LIST object, where each key is the TableName and the value is the data.frame object with the contents of the table.  If multiple tables with the same name exist, each one after the first will be given names of TableName.v1, TableName.v2, ..., TableName.vN.
+}
+\references{
+http://www.broadinstitute.org/gatk/guide/article?id=1244
+}
+\author{
+Kiran Garimella
+}
+\keyword{ manip }
diff --git a/public/gsalib/src/R/man/gsa.read.squidmetrics.Rd b/public/gsalib/src/R/man/gsa.read.squidmetrics.Rd
deleted file mode 100644
index 0a8b378..0000000
--- a/public/gsalib/src/R/man/gsa.read.squidmetrics.Rd
+++ /dev/null
@@ -1,48 +0,0 @@
-\name{gsa.read.squidmetrics}
-\alias{gsa.read.squidmetrics}
-\title{
-gsa.read.squidmetrics
-}
-\description{
-Reads metrics for a specified SQUID project into a dataframe.
-}
-\usage{
-gsa.read.squidmetrics("C315")
-}
-\arguments{
-  \item{project}{
-The project for which metrics should be obtained.
-}
-  \item{bylane}{
-If TRUE, obtains per-lane metrics rather than the default per-sample metrics.
-}
-}
-\details{
-%%  ~~ If necessary, more details than the description above ~~
-}
-\value{
-%%  ~Describe the value returned
-%%  If it is a LIST, use
-%%  \item{comp1 }{Description of 'comp1'}
-%%  \item{comp2 }{Description of 'comp2'}
-%% ...
-Returns a data frame with samples (or lanes) as the row and the metric as the column.
-}
-\references{
-%% ~put references to the literature/web site here ~
-}
-\author{
-Kiran Garimella
-}
-\note{
-This method will only work within the Broad Institute internal network.
-}
-
-\seealso{
-%% ~~objects to See Also as \code{\link{help}}, ~~~
-}
-\examples{
-## Obtain metrics for project C315.
-d = gsa.read.squidmetrics("C315");
-}
-\keyword{ ~kwd1 }
diff --git a/public/gsalib/src/R/man/gsa.read.vcf.Rd b/public/gsalib/src/R/man/gsa.read.vcf.Rd
deleted file mode 100644
index cffd35e..0000000
--- a/public/gsalib/src/R/man/gsa.read.vcf.Rd
+++ /dev/null
@@ -1,53 +0,0 @@
-\name{gsa.read.vcf}
-\alias{gsa.read.vcf}
-\title{
-gsa.read.vcf
-}
-\description{
-Reads a VCF file into a table.  Optionally expands genotype columns into separate columns containing the genotype, separate from the other fields specified in the FORMAT field.
-}
-\usage{
-gsa.read.vcf(vcffile, skip=0, nrows=-1, expandGenotypeFields = FALSE)
-}
-\arguments{
-  \item{vcffile}{
-The path to the vcf file.
-}
-  \item{skip}{
-The number of lines of the data file to skip before beginning to read data.
-}
-  \item{nrows}{
-The maximum number of rows to read in.  Negative and other invalid values are ignored.
-}
-  \item{expandGenotypeFields}{
-If TRUE, adds an additional column per sample containing just the genotype.
-}
-}
-\details{
-The VCF format is the standard variant call file format used in the GATK.  This function reads that data in as a table for easy analysis.
-}
-\value{
-Returns a data.frame object, where each column corresponds to the columns in the VCF file.
-%%  ~Describe the value returned
-%%  If it is a LIST, use
-%%  \item{comp1 }{Description of 'comp1'}
-%%  \item{comp2 }{Description of 'comp2'}
-%% ...
-}
-\references{
-%% ~put references to the literature/web site here ~
-}
-\author{
-Kiran Garimella
-}
-\note{
-%%  ~~further notes~~
-}
-
-\seealso{
-%% ~~objects to See Also as \code{\link{help}}, ~~~
-}
-\examples{
-vcf = gsa.read.vcf("/path/to/my/output.vcf");
-}
-\keyword{ ~kwd1 }
diff --git a/public/gsalib/src/R/man/gsa.reshape.concordance.table.Rd b/public/gsalib/src/R/man/gsa.reshape.concordance.table.Rd
new file mode 100644
index 0000000..ba20d84
--- /dev/null
+++ b/public/gsalib/src/R/man/gsa.reshape.concordance.table.Rd
@@ -0,0 +1,48 @@
+\name{gsa.reshape.concordance.table}
+\alias{gsa.reshape.concordance.table}
+\title{
+Reshape a Concordance Table
+}
+\description{
+Given a GATKReport generated by GenotypeConcordance (as output by \code{gsa.read.gatkreport}), this function reshapes the concordance for a specified sample into a matrix with the EvalGenotypes in rows and the CompGenotypes in columns (see the documentation for GenotypeConcordance for the definition of Eval and Comp)
+}
+\usage{
+gsa.reshape.concordance.table(report, table.name="GenotypeConcordance_Counts", sample.name="ALL")
+}
+\arguments{
+  \item{report}{
+A GATKReport as output by \code{gsa.read.gatkreport}.  If \code{table.name} is \code{NULL}, \code{report} is assumed to be the vector of concordance values to reshape.
+}
+  \item{table.name}{
+The table name in the GATKReport to reshape.  Defaults to "GenotypeConcordance_Counts", but could also be one of the proportion tables ("GenotypeConcordance_EvalProportions", "GenotypeConcordance_CompProportions").  This value can also be \code{NULL}, in which case \code{report} is reshaped directly.  
+}
+  \item{sample.name}{
+The sample name within \code{table.name} to use.
+}
+}
+\value{
+Returns a two-dimensional matrix with Eval genotypes in the rows and Comp genotypes in the columns.  The genotypes themselves (\code{HOM_REF}, \code{NO_CALL}, etc) are specified in the row/col names of the matrix.
+}
+\author{
+Phillip Dexheimer
+}
+
+\seealso{
+\code{\link{gsa.read.gatkreport}}
+}
+\examples{
+test_file = system.file("extdata", "test_genconcord.table", package = "gsalib")
+report = gsa.read.gatkreport(test_file)
+gsa.reshape.concordance.table(report)
+
+## Output looks like:
+##              CompGenotypes
+##EvalGenotypes NO_CALL HOM_REF HET HOM_VAR UNAVAILABLE MIXED
+##  NO_CALL     0       0       0       0       0           0    
+##  HOM_REF     0       0       0       0       0           0    
+##  HET         0       0       13463   90      3901        0    
+##  HOM_VAR     0       0       2935    18144   4448        0    
+##  UNAVAILABLE 0       0       2053693 1326112 11290       0    
+##  MIXED       0       0       0       0       0           0  
+}
+\keyword{ manip }
diff --git a/public/gsalib/src/R/man/gsa.warn.Rd b/public/gsalib/src/R/man/gsa.warn.Rd
deleted file mode 100644
index 0b9770b..0000000
--- a/public/gsalib/src/R/man/gsa.warn.Rd
+++ /dev/null
@@ -1,46 +0,0 @@
-\name{gsa.warn}
-\alias{gsa.warn}
-\title{
-GSA warn
-}
-\description{
-Write a warning message to standard out with the prefix '[gsalib] Warning:'.
-}
-\usage{
-gsa.warn(message)
-}
-%- maybe also 'usage' for other objects documented here.
-\arguments{
-  \item{message}{
-The warning message to write.
-}
-}
-\details{
-%%  ~~ If necessary, more details than the description above ~~
-}
-\value{
-%%  ~Describe the value returned
-%%  If it is a LIST, use
-%%  \item{comp1 }{Description of 'comp1'}
-%%  \item{comp2 }{Description of 'comp2'}
-%% ...
-}
-\references{
-%% ~put references to the literature/web site here ~
-}
-\author{
-Kiran Garimella
-}
-\note{
-%%  ~~further notes~~
-}
-
-\seealso{
-%% ~~objects to See Also as \code{\link{help}}, ~~~
-}
-\examples{
-## Write message to stdout
-gsa.warn("This is a warning message");
-}
-\keyword{ ~kwd1 }
-\keyword{ ~kwd2 }% __ONLY ONE__ keyword per line
diff --git a/public/gsalib/src/R/man/gsalib-internal.Rd b/public/gsalib/src/R/man/gsalib-internal.Rd
new file mode 100644
index 0000000..3ba8709
--- /dev/null
+++ b/public/gsalib/src/R/man/gsalib-internal.Rd
@@ -0,0 +1,7 @@
+\name{gsalib-internal}
+\title{Internal gsalib objects}
+\alias{.gsa.assignGATKTableToEnvironment}
+\alias{.gsa.splitFixedWidth}
+\description{Internal gsalib objects.}
+\details{These are not to be called by the user.}
+\keyword{internal}
\ No newline at end of file
diff --git a/public/gsalib/src/R/man/gsalib-package.Rd b/public/gsalib/src/R/man/gsalib-package.Rd
index 4a49cf9..f1e0e2a 100644
--- a/public/gsalib/src/R/man/gsalib-package.Rd
+++ b/public/gsalib/src/R/man/gsalib-package.Rd
@@ -3,68 +3,32 @@
 \alias{gsalib}
 \docType{package}
 \title{
-GATK utility analysis functions
+Utility functions for GATK
 }
 \description{
-Utility functions for analyzing GATK-processed NGS data
+Utility functions for analysis of genome sequence data with the GATK
 }
 \details{
-This package contains functions for working with GATK-processed NGS data.  These functions include a command-line parser that also allows a script to be used in interactive mode (good for developing scripts that will eventually be automated), a proportional Venn diagram generator, convenience methods for parsing VariantEval output, and more.
+\tabular{ll}{
+Package: \tab gsalib\cr
+Type: \tab Package\cr
+Version: \tab 2.2\cr
+Date: \tab 2015-03-17\cr
+License: \tab MIT\cr
+LazyLoad: \tab yes\cr
+}
+This package is primarily meant to be used programmatically by GATK tools. However the gsa.read.gatkreport() function can be used to easily read in data from a GATKReport. A GATKReport is a multi-table document generated by GATK tools. 
 }
 \author{
-Genome Sequencing and Analysis Group
-
-Medical and Population Genetics Program
+Kiran Garimella
 
-Maintainer: Kiran Garimella
+Maintainer: Geraldine Van der Auwera <vdauwera at broadinstitute.org>
 }
 \references{
-GATK website: http://www.broadinstitute.org/gatk
-
-GATK documentation guide: http://www.broadinstitute.org/gatk/guide
-
-GATK help forum: http://gatkforums.broadinstitute.org
+http://www.broadinstitute.org/gatk/guide/article?id=1244
 }
+\keyword{ package }
 \examples{
-## get script arguments in interactive and non-interactive mode
-cmdargs = gsa.getargs( list(
-    requiredArg1 = list(
-        value = NA,
-        doc   = "Documentation for requiredArg1"
-    ),
-
-    optionalArg1 = list(
-        value = 3e9,
-        doc   = "Documentation for optionalArg1"
-    )
-) );
-
-## plot a proportional Venn diagram
-gsa.plot.venn(500, 250, 0, 100);
-
-## read a GATKReport file
-report = gsa.gatk.report("/path/to/my/output.gatkreport");
-
-## emit a message
-gsa.message("This is a message");
-
-## emit a warning message
-gsa.message("This is a warning message");
-
-## emit an error message
-gsa.message("This is an error message");
-
-## read the SQUID metrics for a given sequencing project (internal to the Broad only)
-s = gsa.read.squidmetrics("C427");
-
-## read command-line arguments
-cmdargs = gsa.getargs(
-    list(
-        file    = list(value="/my/test.vcf", doc="VCF file"),
-        verbose = list(value=0,              doc="If 1, set verbose mode"),
-        test2   = list(value=2.3e9,          doc="Another argument that does stuff")
-    ),
-    doc="My test program"
-);
+test_file = system.file("inst", "extdata", "test_gatkreport.table", package = "gsalib");
+report = gsa.read.gatkreport(test_file);
 }
-\keyword{ package }
diff --git a/public/gsalib/src/R/man/test_gatkreport.table.Rd b/public/gsalib/src/R/man/test_gatkreport.table.Rd
new file mode 100644
index 0000000..c064da4
--- /dev/null
+++ b/public/gsalib/src/R/man/test_gatkreport.table.Rd
@@ -0,0 +1,13 @@
+\name{gatkreport_test_v1.table}
+\docType{data}
+\alias{gatkreport_test_v1.table}
+\title{Test table for version 1.x GATKReport}
+\description{
+   This is a new-style GATKReport.
+}
+\usage{gatkreport_test_v1.table}
+\format{Text document containing multiple tables}
+\source{GSA test data, Broad Institute}
+\references{
+   http://www.broadinstitute.org/gatk/guide/article?id=1244
+}
diff --git a/public/gsalib/src/R/man/test_genconcord.table.Rd b/public/gsalib/src/R/man/test_genconcord.table.Rd
new file mode 100644
index 0000000..f567341
--- /dev/null
+++ b/public/gsalib/src/R/man/test_genconcord.table.Rd
@@ -0,0 +1,13 @@
+\name{genotype_concordance_test.table}
+\docType{data}
+\alias{genotype_concordance_test.table}
+\title{Test table for Genotype Concordance Table Reshape}
+\description{
+   This is a GATKReport output by GenotypeConcordance.
+}
+\usage{genotype_concordance_test.table}
+\format{Text document containing multiple tables}
+\source{GSA test data, Broad Institute}
+\references{
+   http://www.broadinstitute.org/gatk/guide/article?id=1244
+}
diff --git a/public/package-tests/pom.xml b/public/package-tests/pom.xml
index ce3f5cf..809f23e 100644
--- a/public/package-tests/pom.xml
+++ b/public/package-tests/pom.xml
@@ -9,7 +9,7 @@
     <parent>
         <groupId>org.broadinstitute.gatk</groupId>
         <artifactId>gatk-root</artifactId>
-        <version>3.3</version>
+        <version>3.5</version>
         <relativePath>../gatk-root</relativePath>
     </parent>
 
@@ -35,7 +35,7 @@
         </dependency>
 
         <!--
-        gatk-framework test-jar added for BaseTest
+        gatk-utils test-jar added for BaseTest
 
         TODO: Currently the <exclusion> isn't working 100%, so switched to using additionalClasspathElements
 
@@ -47,7 +47,7 @@
           http://maven.apache.org/plugins/maven-jar-plugin/usage.html#The_preferred_way
         <dependency>
             <groupId>${project.groupId}</groupId>
-            <artifactId>gatk-tools-public</artifactId>
+            <artifactId>gatk-utils</artifactId>
             <version>${project.version}</version>
             <type>test-jar</type>
             <scope>test</scope>
@@ -101,7 +101,8 @@
                             <testClassesDirectory>${gatk.packagetests.testClasses}</testClassesDirectory>
                             <!-- TODO: Using additionalClasspathElement while debugging exclusion issue above -->
                             <additionalClasspathElements>
-                                <additionalClasspathElement>${gatk.basedir}/public/gatk-tools-public/target/gatk-tools-public-${project.version}-tests.jar</additionalClasspathElement>
+                                <additionalClasspathElement>${gatk.basedir}/public/gatk-utils/target/gatk-utils-${project.version}-tests.jar</additionalClasspathElement>
+                                <additionalClasspathElement>${gatk.basedir}/public/gatk-engine/target/gatk-engine-${project.version}-tests.jar</additionalClasspathElement>
                                 <additionalClasspathElement>${gatk.basedir}/public/gatk-queue/target/gatk-queue-${project.version}-tests.jar</additionalClasspathElement>
                             </additionalClasspathElements>
                         </configuration>
@@ -143,7 +144,8 @@
                             <testClassesDirectory>${gatk.packagetests.testClasses}</testClassesDirectory>
                             <!-- TODO: Using additionalClasspathElement while debugging exclusion issue above -->
                             <additionalClasspathElements>
-                                <additionalClasspathElement>${gatk.basedir}/public/gatk-tools-public/target/gatk-tools-public-${project.version}-tests.jar</additionalClasspathElement>
+                                <additionalClasspathElement>${gatk.basedir}/public/gatk-utils/target/gatk-utils-${project.version}-tests.jar</additionalClasspathElement>
+                                <additionalClasspathElement>${gatk.basedir}/public/gatk-engine/target/gatk-engine-${project.version}-tests.jar</additionalClasspathElement>
                                 <additionalClasspathElement>${gatk.basedir}/public/gatk-queue/target/gatk-queue-${project.version}-tests.jar</additionalClasspathElement>
                             </additionalClasspathElements>
                         </configuration>
diff --git a/public/perl/liftOverVCF.pl b/public/perl/liftOverVCF.pl
deleted file mode 100755
index a942145..0000000
--- a/public/perl/liftOverVCF.pl
+++ /dev/null
@@ -1,83 +0,0 @@
-#!/usr/bin/perl -w
-
-# Runs the liftover tool on a VCF and properly handles the output
-
-use strict;
-use Getopt::Long;
-
-my $in = undef;
-my $gatk = undef;
-my $chain = undef;
-my $newRef = undef;
-my $oldRef = undef;
-my $out = undef;
-my $tmp = "/tmp";
-my $recordOriginalLocation = 0;
-GetOptions( "vcf=s" => \$in,
-	    "gatk=s" => \$gatk,
-	    "chain=s" => \$chain,
-	    "newRef=s" => \$newRef,
-	    "oldRef=s" => \$oldRef,
-            "out=s" => \$out,
-	    "tmp=s" => \$tmp,
-	    "recordOriginalLocation" => \$recordOriginalLocation);
-
-if ( !$in || !$gatk || !$chain || !$newRef || !$oldRef || !$out ) {
-    print "Usage: liftOverVCF.pl\n\t-vcf \t\t<input vcf>\n\t-gatk \t\t<path to gatk trunk>\n\t-chain \t\t<chain file>\n\t-newRef \t<path to new reference prefix; we will need newRef.dict, .fasta, and .fasta.fai>\n\t-oldRef \t<path to old reference prefix; we will need oldRef.fasta>\n\t-out \t\t<output vcf>\n\t-tmp \t\t<temp file location; defaults to /tmp>\n\t-recordOriginalLocation \t\t<Should we record what the original location was in the INFO field?; defaults to false>\n";
-    print "Example: ./liftOverVCF.pl\n\t-vcf /humgen/gsa-hpprojects/GATK/data/Comparisons/Validated/1kg_snp_validation/all_validation_batches.b36.vcf\n\t-chain b36ToHg19.broad.over.chain\n\t-out lifted.hg19.vcf\n\t-gatk /humgen/gsa-scr1/ebanks/Sting_dev\n\t-newRef /seq/references/Homo_sapiens_assembly19/v0/Homo_sapiens_assembly19\n\t-oldRef /humgen/1kg/reference/human_b36_both\n";
-    exit(1);
-}
-
-# generate a random number
-my $random_number = rand();
-my $tmp_prefix = "$tmp/$random_number";
-print "Writing temporary files to prefix: $tmp_prefix\n";
-my $unsorted_vcf = "$tmp_prefix.unsorted.vcf";
-
-# lift over the file
-print "Lifting over the vcf...";
-my $cmd = "java -jar $gatk/dist/GenomeAnalysisTK.jar -T LiftoverVariants -R $oldRef.fasta -V:variant $in -o $unsorted_vcf -chain $chain -dict $newRef.dict -U LENIENT_VCF_PROCESSING";
-if ($recordOriginalLocation) {
-  $cmd .= " -recordOriginalLocation";
-}
-system($cmd) == 0 or quit("The liftover step failed.  Please correct the necessary errors before retrying.");
-
-# we need to sort the lifted over file now
-print "\nRe-sorting the vcf...\n";
-my $sorted_vcf = "$tmp_prefix.sorted.vcf";
-open(SORTED, ">$sorted_vcf") or die "can't open $sorted_vcf: $!";
-
-# write the header
-open(UNSORTED, "< $unsorted_vcf") or die "can't open $unsorted_vcf: $!";
-my $inHeader = 1;
-while ( $inHeader == 1 ) {
-    my $line = <UNSORTED>;
-    if ( $line !~ m/^#/ ) {
-	$inHeader = 0;
-    } else {
-	print SORTED "$line";
-    }
-}
-close(UNSORTED);
-close(SORTED);
-
-$cmd = "grep \"^#\" -v $unsorted_vcf | sort -n -k2 -T $tmp | $gatk/public/perl/sortByRef.pl --tmp $tmp - $newRef.fasta.fai >> $sorted_vcf";
-system($cmd) == 0 or quit("The sorting step failed.  Please correct the necessary errors before retrying.");
-
-# Filter the VCF for bad records
-print "\nFixing/removing bad records...\n";
-$cmd = "java -jar $gatk/dist/GenomeAnalysisTK.jar -T FilterLiftedVariants -R $newRef.fasta -V:variant $sorted_vcf -o $out -U LENIENT_VCF_PROCESSING";
-system($cmd) == 0 or quit("The filtering step failed.  Please correct the necessary errors before retrying.");
-
-# clean up
-unlink $unsorted_vcf;
-unlink $sorted_vcf;
-my $sorted_index = "$sorted_vcf.idx";
-unlink $sorted_index;
-
-print "\nDone!\n";
-
-sub quit {
-    print "\n$_[0]\n";
-    exit(1);
-}
diff --git a/public/perl/sortByRef.pl b/public/perl/sortByRef.pl
deleted file mode 100755
index e177077..0000000
--- a/public/perl/sortByRef.pl
+++ /dev/null
@@ -1,127 +0,0 @@
-#!/usr/bin/perl -w
-
-use strict;
-use Getopt::Long;
-
-sub usage {
-
-    print "\nUsage:\n";
-    print "sortByRef.pl [--k POS] [--tmp dir] INPUT REF_DICT\n\n";
-
-    print " Sorts lines of the input file INFILE according\n";
-    print " to the reference contig order specified by the\n";
-    print " reference dictionary REF_DICT (.fai file).\n";
-    print " The sort is stable. If -k option is not specified,\n";
-    print " it is assumed that the contig name is the first\n";
-    print " field in each line.\n\n";
-    print "  INPUT       input file to sort. If '-' is specified, \n";
-    print "              then reads from STDIN.\n";
-    print "  REF_DICT    .fai file, or ANY file that has contigs, in the\n";
-    print "              desired soting order, as its first column.\n";
-    print "  --k POS :   contig name is in the field POS (1-based)\n";
-    print "              of input lines.\n\n";
-    print "  --tmp DIR : temp directory [default=/tmp]\n\n";
-
-    exit(1);
-}
-
-my $pos = 1;
-my $tmp = "/tmp";
-GetOptions( "k:i" => \$pos,
-	    "tmp=s" => \$tmp);
-
-$pos--;
-
-usage() if ( scalar(@ARGV) == 0 );
-
-if ( scalar(@ARGV) != 2 ) {
-    print "Wrong number of arguments\n";
-    usage();
-}
-
-my $input_file = $ARGV[0];
-my $dict_file = $ARGV[1];
-
-
-open(DICT, "< $dict_file") or die("Can not open $dict_file: $!");
-
-my %ref_order;
-
-my $n = 0;
-while ( <DICT> ) {
-    chomp;
-    my ($contig, $rest) = split '\s';
-    die("Dictionary file is probably corrupt: multiple instances of contig $contig") if ( defined $ref_order{$contig} );
-
-    $ref_order{$contig} = $n;
-    $n++;
-}
-
-close DICT;
-#we have loaded contig ordering now
-
-my $INPUT;
-if ($input_file eq "-" ) {
-    $INPUT = "STDIN";
-} else {
-    open($INPUT, "< $input_file") or die("Can not open $input_file: $!");
-}
-
-my %temp_outputs;
-
-while ( <$INPUT> ) {
-    
-    my @fields = split '\s';
-    die("Specified field position exceeds the number of fields:\n$_") 
-        if ( $pos >= scalar(@fields) );
-
-    my $contig = $fields[$pos];
-    if ( $contig =~ m/:/ ) {
-        my @loc = split(/:/, $contig);
-        # print $contig . " " . $loc[0] . "\n";
-        $contig = $loc[0]
-    }
-    chomp $contig if ( $pos == scalar(@fields) - 1 ); # if last field in line
-
-    my $order;
-    if ( defined $ref_order{$contig} ) { $order = $ref_order{$contig}; }
-    else {
-        $ref_order{$contig} = $n;
-        $order = $n; # input line has contig that was not in the dict; 
-        $n++; # this contig will go at the end of the output, 
-              # after all known contigs
-    }
-
-    my $fhandle;
-    if ( defined $temp_outputs{$order} ) { $fhandle = $temp_outputs{$order} }
-    else {
-        #print "opening $order $$ $_\n";
-        open( $fhandle, " > $tmp/sortByRef.$$.$order.tmp" ) or
-            die ( "Can not open temporary file $order: $!");
-        $temp_outputs{$order} = $fhandle;
-    }
-
-    # we got the handle to the temp file that keeps all 
-    # lines with contig $contig
-
-    print $fhandle $_; # send current line to its corresponding temp file
-}
-
-close $INPUT;
-
-foreach my $f ( values %temp_outputs ) { close $f; }
-
-# now collect back into single output stream:
-
-for ( my $i = 0 ; $i < $n ; $i++ ) {
-    # if we did not have any lines on contig $i, then there's 
-    # no temp file and nothing to do
-    next if ( ! defined $temp_outputs{$i} ) ; 
-
-    my $f; 
-    open ( $f, "< $tmp/sortByRef.$$.$i.tmp" );
-    while ( <$f> ) { print ; }
-    close $f;
-
-    unlink "$tmp/sortByRef.$$.$i.tmp";    
-}
diff --git a/public/pom.xml b/public/pom.xml
index 7533fca..18a35a3 100644
--- a/public/pom.xml
+++ b/public/pom.xml
@@ -5,7 +5,7 @@
     <parent>
         <groupId>org.broadinstitute.gatk</groupId>
         <artifactId>gatk-root</artifactId>
-        <version>3.3</version>
+        <version>3.5</version>
         <relativePath>gatk-root</relativePath>
     </parent>
 
diff --git a/public/repo/com/google/code/cofoja/cofoja/1.0-r139/cofoja-1.0-r139.jar b/public/repo/com/google/code/cofoja/cofoja/1.0-r139/cofoja-1.0-r139.jar
deleted file mode 100644
index 2cbdd38..0000000
Binary files a/public/repo/com/google/code/cofoja/cofoja/1.0-r139/cofoja-1.0-r139.jar and /dev/null differ
diff --git a/public/repo/com/google/code/cofoja/cofoja/1.0-r139/cofoja-1.0-r139.pom b/public/repo/com/google/code/cofoja/cofoja/1.0-r139/cofoja-1.0-r139.pom
deleted file mode 100644
index 5a6fb69..0000000
--- a/public/repo/com/google/code/cofoja/cofoja/1.0-r139/cofoja-1.0-r139.pom
+++ /dev/null
@@ -1,9 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>com.google.code.cofoja</groupId>
-  <artifactId>cofoja</artifactId>
-  <name>cofoja</name>
-  <version>1.0-r139</version>
-</project>
diff --git a/public/repo/com/google/code/cofoja/cofoja/1.2-20140817/cofoja-1.2-20140817.jar b/public/repo/com/google/code/cofoja/cofoja/1.2-20140817/cofoja-1.2-20140817.jar
new file mode 100644
index 0000000..2b105c8
Binary files /dev/null and b/public/repo/com/google/code/cofoja/cofoja/1.2-20140817/cofoja-1.2-20140817.jar differ
diff --git a/public/repo/com/google/code/cofoja/cofoja/1.2-20140817/cofoja-1.2-20140817.pom b/public/repo/com/google/code/cofoja/cofoja/1.2-20140817/cofoja-1.2-20140817.pom
new file mode 100644
index 0000000..92fbea5
--- /dev/null
+++ b/public/repo/com/google/code/cofoja/cofoja/1.2-20140817/cofoja-1.2-20140817.pom
@@ -0,0 +1,89 @@
+<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+  <modelVersion>4.0.0</modelVersion>
+
+  <groupId>com.google.java.contract</groupId>
+  <artifactId>cofoja</artifactId>
+  <version>1.2-20140817</version>
+  <name>Contracts for Java</name>
+  <description>Contracts for Java is a contract programming framework for Java.</description>
+  <url>http://code.google.com/p/cofoja</url>
+
+  <issueManagement>
+    <system>code.google.com</system>
+    <url>http://code.google.com/p/cofoja/issues</url>
+  </issueManagement>
+
+  <inceptionYear>2010</inceptionYear>
+
+  <licenses>
+    <license>
+      <name>GNU Lesser General Public License, version 2.1 or later</name>
+      <url>http://www.gnu.org/licenses/lgpl-2.1.html</url>
+      <distribution>repo</distribution>
+    </license>
+  </licenses>
+
+  <scm>
+    <connection>scm:svn:http://cofoja.googlecode.com/svn/trunk/</connection>
+    <developerConnection>scm:svn:https://cofoja.googlecode.com/svn/trunk/</developerConnection>
+    <url>http://code.google.com/p/cofoja/source/browse</url>
+  </scm>
+
+  <properties>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+  </properties>
+
+    <developers>
+        <developer>
+            <id>andreasleitner</id>
+            <name>Andreas Leitner</name>
+            <email>andreasleitner at google.com</email>
+            <organization>Google</organization>
+            <organizationUrl>http://www.google.com</organizationUrl>
+            <roles>
+                <role>Developer</role>
+            </roles>
+        </developer>
+        <developer>
+            <id>davidmorgan</id>
+            <name>David Morgan</name>
+            <email>davidmorgan at google.com</email>
+            <organization>Google</organization>
+            <organizationUrl>http://www.google.com</organizationUrl>
+            <roles>
+                <role>Developer</role>
+            </roles>
+        </developer>
+        <developer>
+            <id>lenh</id>
+            <name>Nhat Minh Lê</name>
+            <email>nhat.minh.le at huoc.org</email>
+            <roles>
+                <role>Developer</role>
+            </roles>
+        </developer>
+    </developers>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.ow2.asm</groupId>
+            <artifactId>asm-all</artifactId>
+            <version>5.0.4</version>
+            <scope>compile</scope>
+        </dependency>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <version>4.8.2</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>com.sun</groupId>
+            <artifactId>tools</artifactId>
+            <version>1.5.0</version>
+            <scope>system</scope>
+            <systemPath>${toolsjar}</systemPath>                                                                                 
+        </dependency>
+    </dependencies>
+
+</project>
diff --git a/public/repo/picard/picard/1.120.1579/picard-1.120.1579.jar b/public/repo/picard/picard/1.120.1579/picard-1.120.1579.jar
deleted file mode 100644
index fa3fa36..0000000
Binary files a/public/repo/picard/picard/1.120.1579/picard-1.120.1579.jar and /dev/null differ
diff --git a/public/repo/picard/picard/1.120.1579/picard-1.120.1579.pom b/public/repo/picard/picard/1.120.1579/picard-1.120.1579.pom
deleted file mode 100644
index cca9972..0000000
--- a/public/repo/picard/picard/1.120.1579/picard-1.120.1579.pom
+++ /dev/null
@@ -1,34 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <groupId>picard</groupId>
-    <artifactId>picard</artifactId>
-    <version>1.120.1579</version>
-    <name>picard</name>
-    <dependencies>
-        <dependency>
-            <groupId>samtools</groupId>
-            <artifactId>htsjdk</artifactId>
-            <version>1.120.1620</version>
-        </dependency>
-        <!-- TODO: Picard is using a custom zip with just ant's BZip2 classes. See also: http://www.kohsuke.org/bzip2 -->
-        <dependency>
-            <groupId>org.apache.ant</groupId>
-            <artifactId>ant</artifactId>
-            <version>1.8.2</version>
-            <exclusions>
-                <exclusion>
-                    <groupId>org.apache.ant</groupId>
-                    <artifactId>ant-launcher</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-        <dependency>
-            <groupId>com.sun</groupId>
-            <artifactId>tools.jar</artifactId>
-            <version>1.5</version>
-            <scope>system</scope>
-            <systemPath>${java.home}/../lib/tools.jar</systemPath>
-        </dependency>
-    </dependencies>
-</project>
diff --git a/public/repo/samtools/htsjdk/1.120.1620/htsjdk-1.120.1620.jar b/public/repo/samtools/htsjdk/1.120.1620/htsjdk-1.120.1620.jar
deleted file mode 100644
index 8480ddc..0000000
Binary files a/public/repo/samtools/htsjdk/1.120.1620/htsjdk-1.120.1620.jar and /dev/null differ
diff --git a/public/repo/samtools/htsjdk/1.120.1620/htsjdk-1.120.1620.pom b/public/repo/samtools/htsjdk/1.120.1620/htsjdk-1.120.1620.pom
deleted file mode 100644
index 04ebef8..0000000
--- a/public/repo/samtools/htsjdk/1.120.1620/htsjdk-1.120.1620.pom
+++ /dev/null
@@ -1,27 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <groupId>samtools</groupId>
-    <artifactId>htsjdk</artifactId>
-    <version>1.120.1620</version>
-    <name>htsjdk</name>
-    <dependencies>
-        <dependency>
-            <groupId>org.testng</groupId>
-            <artifactId>testng</artifactId>
-            <version>5.5</version>
-            <classifier>jdk15</classifier>
-        </dependency>
-        <dependency>
-            <groupId>org.xerial.snappy</groupId>
-            <artifactId>snappy-java</artifactId>
-            <version>1.0.3-rc3</version>
-        </dependency>
-        <!-- TODO: This artifact is only in the GATK local repo, not in central, yet. -->
-        <dependency>
-            <groupId>com.google.code.cofoja</groupId>
-            <artifactId>cofoja</artifactId>
-            <version>1.0-r139</version>
-        </dependency>
-    </dependencies>
-</project>
diff --git a/public/src/main/assembly/binary-dist.xml b/public/src/main/assembly/binary-dist.xml
new file mode 100644
index 0000000..d2248c9
--- /dev/null
+++ b/public/src/main/assembly/binary-dist.xml
@@ -0,0 +1,22 @@
+<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
+    <id>binary-dist</id>
+    <formats>
+        <format>tar.bz2</format>
+    </formats>
+    <includeBaseDirectory>false</includeBaseDirectory>
+    <dependencySets>
+        <dependencySet>
+            <includes>
+                <include>${groupId}:${artifactId}</include>
+            </includes>
+            <outputFileNameMapping>${gatk.binary-dist.name}.${artifact.extension}</outputFileNameMapping>
+        </dependencySet>
+        <dependencySet>
+            <outputDirectory>resources</outputDirectory>
+            <unpack>true</unpack>
+            <includes>
+                <include>org.broadinstitute.gatk:*:tar.bz2:example-resources</include>
+            </includes>
+        </dependencySet>
+    </dependencySets>
+</assembly>
diff --git a/public/src/main/scripts/shell/check_utils_engine_tools.sh b/public/src/main/scripts/shell/check_utils_engine_tools.sh
new file mode 100755
index 0000000..97a723a
--- /dev/null
+++ b/public/src/main/scripts/shell/check_utils_engine_tools.sh
@@ -0,0 +1,25 @@
+#!/bin/sh
+
+# Exit with an error if:
+# - utils contains a reference to engine or tools
+# - engine contains a reference to tools
+
+sh -c \
+    "grep -Rn \
+      -e 'org.broadinstitute.gatk.tools' \
+      -e 'org.broadinstitute.gatk.engine' \
+      */*/src/*/*/org/broadinstitute/gatk/utils | \
+      grep -v dependencyanalyzer && \
+    grep -Rn \
+      -e 'org.broadinstitute.gatk.tools' \
+      */*/src/*/*/org/broadinstitute/gatk/engine" | \
+  sed -e 's/:/:'$'\x1B\x5B\x35\x6d''/2' -e 's/$/'$'\x1B\x5B\x6d''/' | \
+  grep gatk
+
+RESULT=$?
+if [[ ${RESULT} -eq 0 ]]; then
+    echo "Fix the above errors. Do not import tools nor engine into the utils, and do not import tools into the engine." >&2
+    exit 1
+else
+    exit 0
+fi
diff --git a/settings/helpTemplates/generic.index.template.html b/settings/helpTemplates/generic.index.template.html
index bd5742f..794e50d 100644
--- a/settings/helpTemplates/generic.index.template.html
+++ b/settings/helpTemplates/generic.index.template.html
@@ -24,7 +24,7 @@
 
 <?php
 
-    include '../../../include/common.php';
+    include '../../../common/include/common.php';
     $module = modules::GATK;
     printHeader($module, "GATK | Tool Documentation Index", "Guides");
 ?>
diff --git a/settings/helpTemplates/generic.template.html b/settings/helpTemplates/generic.template.html
index d163eff..0141c86 100644
--- a/settings/helpTemplates/generic.template.html
+++ b/settings/helpTemplates/generic.template.html
@@ -24,7 +24,7 @@
 
 <?php
 
-    include '../../../include/common.php';
+    include '../../../common/include/common.php';
     $module = modules::GATK;
     printHeader($module, "GATK | Tool Documentation Index", "Guides");
 ?>
@@ -172,7 +172,7 @@
 		</h3>
 	</#if>
     <#if annotdescript?has_content >
-        <h3>Header info <br />
+        <h3>Header definition line <br />
             <small>
                 <#list annotdescript as line>
                 <li><pre>${line}</pre></li>
@@ -255,10 +255,11 @@
 			<p></p>
 		</#if>
 		<#if extradocs?size != 0>
-			<h3>Inherited arguments</h3>
-			<p>The arguments described in the entries below can be supplied to this tool to modify
-			its behavior. For example, the -L argument directs the GATK engine restricts processing
-			to specific genomic intervals (this is an Engine capability and is therefore available to all GATK walkers).</p>
+			<h3>Engine arguments</h3>
+			<p>All tools inherit arguments from the GATK Engine' "CommandLineGATK" argument collection, which can be
+				used to modify various aspects of the tool's function. For example, the -L argument directs the GATK
+				engine to restrict processing to specific genomic intervals; or the -rf argument allows you to apply
+				certain read filters to exclude some of the data from the analysis.</p>
 			<ul>
 				<#list extradocs as extradoc>
 					<li><a href="${extradoc.filename}">${extradoc.name}</a></li>

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/gatk.git



More information about the debian-med-commit mailing list